Changeset 99686 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- May 8, 2023 10:44:25 PM (19 months ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r99685 r99686 9363 9363 #endif /* IEM_WITH_SETJMP */ 9364 9364 9365 9366 /** 9367 * Calculates the effective address of a ModR/M memory operand, extended version 9368 * for use in the recompilers. 9369 * 9370 * Meant to be used via IEM_MC_CALC_RM_EFF_ADDR. 9371 * 9372 * @return Strict VBox status code. 9373 * @param pVCpu The cross context virtual CPU structure of the calling thread. 9374 * @param bRm The ModRM byte. 9375 * @param cbImmAndRspOffset - First byte: The size of any immediate 9376 * following the effective address opcode bytes 9377 * (only for RIP relative addressing). 9378 * - Second byte: RSP displacement (for POP [ESP]). 9379 * @param pGCPtrEff Where to return the effective address. 9380 * @param puInfo Extra info: 32-bit displacement (bits 31:0) and 9381 * SIB byte (bits 39:32). 9382 */ 9383 VBOXSTRICTRC iemOpHlpCalcRmEffAddrEx(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, PRTGCPTR pGCPtrEff, uint64_t *puInfo) RT_NOEXCEPT 9384 { 9385 Log5(("iemOpHlpCalcRmEffAddr: bRm=%#x\n", bRm)); 9386 # define SET_SS_DEF() \ 9387 do \ 9388 { \ 9389 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SEG_MASK)) \ 9390 pVCpu->iem.s.iEffSeg = X86_SREG_SS; \ 9391 } while (0) 9392 9393 uint64_t uInfo; 9394 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT) 9395 { 9396 /** @todo Check the effective address size crap! */ 9397 if (pVCpu->iem.s.enmEffAddrMode == IEMMODE_16BIT) 9398 { 9399 uint16_t u16EffAddr; 9400 9401 /* Handle the disp16 form with no registers first. */ 9402 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 6) 9403 { 9404 IEM_OPCODE_GET_NEXT_U16(&u16EffAddr); 9405 uInfo = u16EffAddr; 9406 } 9407 else 9408 { 9409 /* Get the displacment. */ 9410 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9411 { 9412 case 0: u16EffAddr = 0; break; 9413 case 1: IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16EffAddr); break; 9414 case 2: IEM_OPCODE_GET_NEXT_U16(&u16EffAddr); break; 9415 default: AssertFailedReturn(VERR_IEM_IPE_1); /* (caller checked for these) */ 9416 } 9417 uInfo = u16EffAddr; 9418 9419 /* Add the base and index registers to the disp. */ 9420 switch (bRm & X86_MODRM_RM_MASK) 9421 { 9422 case 0: u16EffAddr += pVCpu->cpum.GstCtx.bx + pVCpu->cpum.GstCtx.si; break; 9423 case 1: u16EffAddr += pVCpu->cpum.GstCtx.bx + pVCpu->cpum.GstCtx.di; break; 9424 case 2: u16EffAddr += pVCpu->cpum.GstCtx.bp + pVCpu->cpum.GstCtx.si; SET_SS_DEF(); break; 9425 case 3: u16EffAddr += pVCpu->cpum.GstCtx.bp + pVCpu->cpum.GstCtx.di; SET_SS_DEF(); break; 9426 case 4: u16EffAddr += pVCpu->cpum.GstCtx.si; break; 9427 case 5: u16EffAddr += pVCpu->cpum.GstCtx.di; break; 9428 case 6: u16EffAddr += pVCpu->cpum.GstCtx.bp; SET_SS_DEF(); break; 9429 case 7: u16EffAddr += pVCpu->cpum.GstCtx.bx; break; 9430 } 9431 } 9432 9433 *pGCPtrEff = u16EffAddr; 9434 } 9435 else 9436 { 9437 Assert(pVCpu->iem.s.enmEffAddrMode == IEMMODE_32BIT); 9438 uint32_t u32EffAddr; 9439 9440 /* Handle the disp32 form with no registers first. */ 9441 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 5) 9442 { 9443 IEM_OPCODE_GET_NEXT_U32(&u32EffAddr); 9444 uInfo = u32EffAddr; 9445 } 9446 else 9447 { 9448 /* Get the register (or SIB) value. */ 9449 uInfo = 0; 9450 switch ((bRm & X86_MODRM_RM_MASK)) 9451 { 9452 case 0: u32EffAddr = pVCpu->cpum.GstCtx.eax; break; 9453 case 1: u32EffAddr = pVCpu->cpum.GstCtx.ecx; break; 9454 case 2: u32EffAddr = pVCpu->cpum.GstCtx.edx; break; 9455 case 3: u32EffAddr = pVCpu->cpum.GstCtx.ebx; break; 9456 case 4: /* SIB */ 9457 { 9458 uint8_t bSib; IEM_OPCODE_GET_NEXT_U8(&bSib); 9459 uInfo = (uint64_t)bSib << 32; 9460 9461 /* Get the index and scale it. */ 9462 switch ((bSib >> X86_SIB_INDEX_SHIFT) & X86_SIB_INDEX_SMASK) 9463 { 9464 case 0: u32EffAddr = pVCpu->cpum.GstCtx.eax; break; 9465 case 1: u32EffAddr = pVCpu->cpum.GstCtx.ecx; break; 9466 case 2: u32EffAddr = pVCpu->cpum.GstCtx.edx; break; 9467 case 3: u32EffAddr = pVCpu->cpum.GstCtx.ebx; break; 9468 case 4: u32EffAddr = 0; /*none */ break; 9469 case 5: u32EffAddr = pVCpu->cpum.GstCtx.ebp; break; 9470 case 6: u32EffAddr = pVCpu->cpum.GstCtx.esi; break; 9471 case 7: u32EffAddr = pVCpu->cpum.GstCtx.edi; break; 9472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9473 } 9474 u32EffAddr <<= (bSib >> X86_SIB_SCALE_SHIFT) & X86_SIB_SCALE_SMASK; 9475 9476 /* add base */ 9477 switch (bSib & X86_SIB_BASE_MASK) 9478 { 9479 case 0: u32EffAddr += pVCpu->cpum.GstCtx.eax; break; 9480 case 1: u32EffAddr += pVCpu->cpum.GstCtx.ecx; break; 9481 case 2: u32EffAddr += pVCpu->cpum.GstCtx.edx; break; 9482 case 3: u32EffAddr += pVCpu->cpum.GstCtx.ebx; break; 9483 case 4: u32EffAddr += pVCpu->cpum.GstCtx.esp + (cbImmAndRspOffset >> 8); SET_SS_DEF(); break; 9484 case 5: 9485 if ((bRm & X86_MODRM_MOD_MASK) != 0) 9486 { 9487 u32EffAddr += pVCpu->cpum.GstCtx.ebp; 9488 SET_SS_DEF(); 9489 } 9490 else 9491 { 9492 uint32_t u32Disp; 9493 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9494 u32EffAddr += u32Disp; 9495 uInfo |= u32Disp; 9496 } 9497 break; 9498 case 6: u32EffAddr += pVCpu->cpum.GstCtx.esi; break; 9499 case 7: u32EffAddr += pVCpu->cpum.GstCtx.edi; break; 9500 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9501 } 9502 break; 9503 } 9504 case 5: u32EffAddr = pVCpu->cpum.GstCtx.ebp; SET_SS_DEF(); break; 9505 case 6: u32EffAddr = pVCpu->cpum.GstCtx.esi; break; 9506 case 7: u32EffAddr = pVCpu->cpum.GstCtx.edi; break; 9507 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9508 } 9509 9510 /* Get and add the displacement. */ 9511 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9512 { 9513 case 0: 9514 break; 9515 case 1: 9516 { 9517 int8_t i8Disp; IEM_OPCODE_GET_NEXT_S8(&i8Disp); 9518 u32EffAddr += i8Disp; 9519 uInfo |= (uint32_t)(int32_t)i8Disp; 9520 break; 9521 } 9522 case 2: 9523 { 9524 uint32_t u32Disp; IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9525 u32EffAddr += u32Disp; 9526 uInfo |= (uint32_t)u32Disp; 9527 break; 9528 } 9529 default: 9530 AssertFailedReturn(VERR_IEM_IPE_2); /* (caller checked for these) */ 9531 } 9532 9533 } 9534 if (pVCpu->iem.s.enmEffAddrMode == IEMMODE_32BIT) 9535 *pGCPtrEff = u32EffAddr; 9536 else 9537 { 9538 Assert(pVCpu->iem.s.enmEffAddrMode == IEMMODE_16BIT); 9539 *pGCPtrEff = u32EffAddr & UINT16_MAX; 9540 } 9541 } 9542 } 9543 else 9544 { 9545 uint64_t u64EffAddr; 9546 9547 /* Handle the rip+disp32 form with no registers first. */ 9548 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 5) 9549 { 9550 IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64EffAddr); 9551 uInfo = (uint32_t)u64EffAddr; 9552 u64EffAddr += pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + (cbImmAndRspOffset & UINT32_C(0xff)); 9553 } 9554 else 9555 { 9556 /* Get the register (or SIB) value. */ 9557 uInfo = 0; 9558 switch ((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB) 9559 { 9560 case 0: u64EffAddr = pVCpu->cpum.GstCtx.rax; break; 9561 case 1: u64EffAddr = pVCpu->cpum.GstCtx.rcx; break; 9562 case 2: u64EffAddr = pVCpu->cpum.GstCtx.rdx; break; 9563 case 3: u64EffAddr = pVCpu->cpum.GstCtx.rbx; break; 9564 case 5: u64EffAddr = pVCpu->cpum.GstCtx.rbp; SET_SS_DEF(); break; 9565 case 6: u64EffAddr = pVCpu->cpum.GstCtx.rsi; break; 9566 case 7: u64EffAddr = pVCpu->cpum.GstCtx.rdi; break; 9567 case 8: u64EffAddr = pVCpu->cpum.GstCtx.r8; break; 9568 case 9: u64EffAddr = pVCpu->cpum.GstCtx.r9; break; 9569 case 10: u64EffAddr = pVCpu->cpum.GstCtx.r10; break; 9570 case 11: u64EffAddr = pVCpu->cpum.GstCtx.r11; break; 9571 case 13: u64EffAddr = pVCpu->cpum.GstCtx.r13; break; 9572 case 14: u64EffAddr = pVCpu->cpum.GstCtx.r14; break; 9573 case 15: u64EffAddr = pVCpu->cpum.GstCtx.r15; break; 9574 /* SIB */ 9575 case 4: 9576 case 12: 9577 { 9578 uint8_t bSib; IEM_OPCODE_GET_NEXT_U8(&bSib); 9579 uInfo = (uint64_t)bSib << 32; 9580 9581 /* Get the index and scale it. */ 9582 switch (((bSib >> X86_SIB_INDEX_SHIFT) & X86_SIB_INDEX_SMASK) | pVCpu->iem.s.uRexIndex) 9583 { 9584 case 0: u64EffAddr = pVCpu->cpum.GstCtx.rax; break; 9585 case 1: u64EffAddr = pVCpu->cpum.GstCtx.rcx; break; 9586 case 2: u64EffAddr = pVCpu->cpum.GstCtx.rdx; break; 9587 case 3: u64EffAddr = pVCpu->cpum.GstCtx.rbx; break; 9588 case 4: u64EffAddr = 0; /*none */ break; 9589 case 5: u64EffAddr = pVCpu->cpum.GstCtx.rbp; break; 9590 case 6: u64EffAddr = pVCpu->cpum.GstCtx.rsi; break; 9591 case 7: u64EffAddr = pVCpu->cpum.GstCtx.rdi; break; 9592 case 8: u64EffAddr = pVCpu->cpum.GstCtx.r8; break; 9593 case 9: u64EffAddr = pVCpu->cpum.GstCtx.r9; break; 9594 case 10: u64EffAddr = pVCpu->cpum.GstCtx.r10; break; 9595 case 11: u64EffAddr = pVCpu->cpum.GstCtx.r11; break; 9596 case 12: u64EffAddr = pVCpu->cpum.GstCtx.r12; break; 9597 case 13: u64EffAddr = pVCpu->cpum.GstCtx.r13; break; 9598 case 14: u64EffAddr = pVCpu->cpum.GstCtx.r14; break; 9599 case 15: u64EffAddr = pVCpu->cpum.GstCtx.r15; break; 9600 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9601 } 9602 u64EffAddr <<= (bSib >> X86_SIB_SCALE_SHIFT) & X86_SIB_SCALE_SMASK; 9603 9604 /* add base */ 9605 switch ((bSib & X86_SIB_BASE_MASK) | pVCpu->iem.s.uRexB) 9606 { 9607 case 0: u64EffAddr += pVCpu->cpum.GstCtx.rax; break; 9608 case 1: u64EffAddr += pVCpu->cpum.GstCtx.rcx; break; 9609 case 2: u64EffAddr += pVCpu->cpum.GstCtx.rdx; break; 9610 case 3: u64EffAddr += pVCpu->cpum.GstCtx.rbx; break; 9611 case 4: u64EffAddr += pVCpu->cpum.GstCtx.rsp + (cbImmAndRspOffset >> 8); SET_SS_DEF(); break; 9612 case 6: u64EffAddr += pVCpu->cpum.GstCtx.rsi; break; 9613 case 7: u64EffAddr += pVCpu->cpum.GstCtx.rdi; break; 9614 case 8: u64EffAddr += pVCpu->cpum.GstCtx.r8; break; 9615 case 9: u64EffAddr += pVCpu->cpum.GstCtx.r9; break; 9616 case 10: u64EffAddr += pVCpu->cpum.GstCtx.r10; break; 9617 case 11: u64EffAddr += pVCpu->cpum.GstCtx.r11; break; 9618 case 12: u64EffAddr += pVCpu->cpum.GstCtx.r12; break; 9619 case 14: u64EffAddr += pVCpu->cpum.GstCtx.r14; break; 9620 case 15: u64EffAddr += pVCpu->cpum.GstCtx.r15; break; 9621 /* complicated encodings */ 9622 case 5: 9623 case 13: 9624 if ((bRm & X86_MODRM_MOD_MASK) != 0) 9625 { 9626 if (!pVCpu->iem.s.uRexB) 9627 { 9628 u64EffAddr += pVCpu->cpum.GstCtx.rbp; 9629 SET_SS_DEF(); 9630 } 9631 else 9632 u64EffAddr += pVCpu->cpum.GstCtx.r13; 9633 } 9634 else 9635 { 9636 uint32_t u32Disp; 9637 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9638 u64EffAddr += (int32_t)u32Disp; 9639 uInfo |= u32Disp; 9640 } 9641 break; 9642 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9643 } 9644 break; 9645 } 9646 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9647 } 9648 9649 /* Get and add the displacement. */ 9650 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9651 { 9652 case 0: 9653 break; 9654 case 1: 9655 { 9656 int8_t i8Disp; 9657 IEM_OPCODE_GET_NEXT_S8(&i8Disp); 9658 u64EffAddr += i8Disp; 9659 uInfo |= (uint32_t)(int32_t)i8Disp; 9660 break; 9661 } 9662 case 2: 9663 { 9664 uint32_t u32Disp; 9665 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9666 u64EffAddr += (int32_t)u32Disp; 9667 uInfo |= u32Disp; 9668 break; 9669 } 9670 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* (caller checked for these) */ 9671 } 9672 9673 } 9674 9675 if (pVCpu->iem.s.enmEffAddrMode == IEMMODE_64BIT) 9676 *pGCPtrEff = u64EffAddr; 9677 else 9678 { 9679 Assert(pVCpu->iem.s.enmEffAddrMode == IEMMODE_32BIT); 9680 *pGCPtrEff = u64EffAddr & UINT32_MAX; 9681 } 9682 } 9683 *puInfo = uInfo; 9684 9685 Log5(("iemOpHlpCalcRmEffAddrEx: EffAddr=%#010RGv uInfo=%RX64\n", *pGCPtrEff, uInfo)); 9686 return VINF_SUCCESS; 9687 } 9688 9689 9690 #ifdef IEM_WITH_SETJMP 9691 /** 9692 * Calculates the effective address of a ModR/M memory operand, extended version 9693 * for use in the recompilers. 9694 * 9695 * Meant to be used via IEM_MC_CALC_RM_EFF_ADDR. 9696 * 9697 * May longjmp on internal error. 9698 * 9699 * @return The effective address. 9700 * @param pVCpu The cross context virtual CPU structure of the calling thread. 9701 * @param bRm The ModRM byte. 9702 * @param cbImmAndRspOffset - First byte: The size of any immediate 9703 * following the effective address opcode bytes 9704 * (only for RIP relative addressing). 9705 * - Second byte: RSP displacement (for POP [ESP]). 9706 * @param puInfo Extra info: 32-bit displacement (bits 31:0) and 9707 * SIB byte (bits 39:32). 9708 */ 9709 RTGCPTR iemOpHlpCalcRmEffAddrJmpEx(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, uint64_t *puInfo) IEM_NOEXCEPT_MAY_LONGJMP 9710 { 9711 Log5(("iemOpHlpCalcRmEffAddrJmp: bRm=%#x\n", bRm)); 9712 # define SET_SS_DEF() \ 9713 do \ 9714 { \ 9715 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SEG_MASK)) \ 9716 pVCpu->iem.s.iEffSeg = X86_SREG_SS; \ 9717 } while (0) 9718 9719 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT) 9720 { 9721 /** @todo Check the effective address size crap! */ 9722 if (pVCpu->iem.s.enmEffAddrMode == IEMMODE_16BIT) 9723 { 9724 uint16_t u16EffAddr; 9725 9726 /* Handle the disp16 form with no registers first. */ 9727 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 6) 9728 { 9729 IEM_OPCODE_GET_NEXT_U16(&u16EffAddr); 9730 *puInfo = u16EffAddr; 9731 } 9732 else 9733 { 9734 /* Get the displacment. */ 9735 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9736 { 9737 case 0: u16EffAddr = 0; break; 9738 case 1: IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16EffAddr); break; 9739 case 2: IEM_OPCODE_GET_NEXT_U16(&u16EffAddr); break; 9740 default: AssertFailedStmt(IEM_DO_LONGJMP(pVCpu, VERR_IEM_IPE_1)); /* (caller checked for these) */ 9741 } 9742 *puInfo = u16EffAddr; 9743 9744 /* Add the base and index registers to the disp. */ 9745 switch (bRm & X86_MODRM_RM_MASK) 9746 { 9747 case 0: u16EffAddr += pVCpu->cpum.GstCtx.bx + pVCpu->cpum.GstCtx.si; break; 9748 case 1: u16EffAddr += pVCpu->cpum.GstCtx.bx + pVCpu->cpum.GstCtx.di; break; 9749 case 2: u16EffAddr += pVCpu->cpum.GstCtx.bp + pVCpu->cpum.GstCtx.si; SET_SS_DEF(); break; 9750 case 3: u16EffAddr += pVCpu->cpum.GstCtx.bp + pVCpu->cpum.GstCtx.di; SET_SS_DEF(); break; 9751 case 4: u16EffAddr += pVCpu->cpum.GstCtx.si; break; 9752 case 5: u16EffAddr += pVCpu->cpum.GstCtx.di; break; 9753 case 6: u16EffAddr += pVCpu->cpum.GstCtx.bp; SET_SS_DEF(); break; 9754 case 7: u16EffAddr += pVCpu->cpum.GstCtx.bx; break; 9755 } 9756 } 9757 9758 Log5(("iemOpHlpCalcRmEffAddrJmp: EffAddr=%#06RX16 uInfo=%#RX64\n", u16EffAddr, *puInfo)); 9759 return u16EffAddr; 9760 } 9761 9762 Assert(pVCpu->iem.s.enmEffAddrMode == IEMMODE_32BIT); 9763 uint32_t u32EffAddr; 9764 uint64_t uInfo; 9765 9766 /* Handle the disp32 form with no registers first. */ 9767 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 5) 9768 { 9769 IEM_OPCODE_GET_NEXT_U32(&u32EffAddr); 9770 uInfo = u32EffAddr; 9771 } 9772 else 9773 { 9774 /* Get the register (or SIB) value. */ 9775 uInfo = 0; 9776 switch ((bRm & X86_MODRM_RM_MASK)) 9777 { 9778 case 0: u32EffAddr = pVCpu->cpum.GstCtx.eax; break; 9779 case 1: u32EffAddr = pVCpu->cpum.GstCtx.ecx; break; 9780 case 2: u32EffAddr = pVCpu->cpum.GstCtx.edx; break; 9781 case 3: u32EffAddr = pVCpu->cpum.GstCtx.ebx; break; 9782 case 4: /* SIB */ 9783 { 9784 uint8_t bSib; IEM_OPCODE_GET_NEXT_U8(&bSib); 9785 uInfo = (uint64_t)bSib << 32; 9786 9787 /* Get the index and scale it. */ 9788 switch ((bSib >> X86_SIB_INDEX_SHIFT) & X86_SIB_INDEX_SMASK) 9789 { 9790 case 0: u32EffAddr = pVCpu->cpum.GstCtx.eax; break; 9791 case 1: u32EffAddr = pVCpu->cpum.GstCtx.ecx; break; 9792 case 2: u32EffAddr = pVCpu->cpum.GstCtx.edx; break; 9793 case 3: u32EffAddr = pVCpu->cpum.GstCtx.ebx; break; 9794 case 4: u32EffAddr = 0; /*none */ break; 9795 case 5: u32EffAddr = pVCpu->cpum.GstCtx.ebp; break; 9796 case 6: u32EffAddr = pVCpu->cpum.GstCtx.esi; break; 9797 case 7: u32EffAddr = pVCpu->cpum.GstCtx.edi; break; 9798 IEM_NOT_REACHED_DEFAULT_CASE_RET2(RTGCPTR_MAX); 9799 } 9800 u32EffAddr <<= (bSib >> X86_SIB_SCALE_SHIFT) & X86_SIB_SCALE_SMASK; 9801 9802 /* add base */ 9803 switch (bSib & X86_SIB_BASE_MASK) 9804 { 9805 case 0: u32EffAddr += pVCpu->cpum.GstCtx.eax; break; 9806 case 1: u32EffAddr += pVCpu->cpum.GstCtx.ecx; break; 9807 case 2: u32EffAddr += pVCpu->cpum.GstCtx.edx; break; 9808 case 3: u32EffAddr += pVCpu->cpum.GstCtx.ebx; break; 9809 case 4: u32EffAddr += pVCpu->cpum.GstCtx.esp + (cbImmAndRspOffset >> 8); SET_SS_DEF(); break; 9810 case 5: 9811 if ((bRm & X86_MODRM_MOD_MASK) != 0) 9812 { 9813 u32EffAddr += pVCpu->cpum.GstCtx.ebp; 9814 SET_SS_DEF(); 9815 } 9816 else 9817 { 9818 uint32_t u32Disp; 9819 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9820 u32EffAddr += u32Disp; 9821 uInfo |= u32Disp; 9822 } 9823 break; 9824 case 6: u32EffAddr += pVCpu->cpum.GstCtx.esi; break; 9825 case 7: u32EffAddr += pVCpu->cpum.GstCtx.edi; break; 9826 IEM_NOT_REACHED_DEFAULT_CASE_RET2(RTGCPTR_MAX); 9827 } 9828 break; 9829 } 9830 case 5: u32EffAddr = pVCpu->cpum.GstCtx.ebp; SET_SS_DEF(); break; 9831 case 6: u32EffAddr = pVCpu->cpum.GstCtx.esi; break; 9832 case 7: u32EffAddr = pVCpu->cpum.GstCtx.edi; break; 9833 IEM_NOT_REACHED_DEFAULT_CASE_RET2(RTGCPTR_MAX); 9834 } 9835 9836 /* Get and add the displacement. */ 9837 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9838 { 9839 case 0: 9840 break; 9841 case 1: 9842 { 9843 int8_t i8Disp; IEM_OPCODE_GET_NEXT_S8(&i8Disp); 9844 u32EffAddr += i8Disp; 9845 uInfo |= (uint32_t)(int32_t)i8Disp; 9846 break; 9847 } 9848 case 2: 9849 { 9850 uint32_t u32Disp; IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9851 u32EffAddr += u32Disp; 9852 uInfo |= u32Disp; 9853 break; 9854 } 9855 default: 9856 AssertFailedStmt(IEM_DO_LONGJMP(pVCpu, VERR_IEM_IPE_2)); /* (caller checked for these) */ 9857 } 9858 } 9859 9860 *puInfo = uInfo; 9861 Log5(("iemOpHlpCalcRmEffAddrJmp: EffAddr=%#010RX32 uInfo=%#RX64\n", u32EffAddr, uInfo)); 9862 return u32EffAddr; 9863 } 9864 9865 uint64_t u64EffAddr; 9866 uint64_t uInfo; 9867 9868 /* Handle the rip+disp32 form with no registers first. */ 9869 if ((bRm & (X86_MODRM_MOD_MASK | X86_MODRM_RM_MASK)) == 5) 9870 { 9871 IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64EffAddr); 9872 uInfo = (uint32_t)u64EffAddr; 9873 u64EffAddr += pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + (cbImmAndRspOffset & UINT32_C(0xff)); 9874 } 9875 else 9876 { 9877 /* Get the register (or SIB) value. */ 9878 uInfo = 0; 9879 switch ((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB) 9880 { 9881 case 0: u64EffAddr = pVCpu->cpum.GstCtx.rax; break; 9882 case 1: u64EffAddr = pVCpu->cpum.GstCtx.rcx; break; 9883 case 2: u64EffAddr = pVCpu->cpum.GstCtx.rdx; break; 9884 case 3: u64EffAddr = pVCpu->cpum.GstCtx.rbx; break; 9885 case 5: u64EffAddr = pVCpu->cpum.GstCtx.rbp; SET_SS_DEF(); break; 9886 case 6: u64EffAddr = pVCpu->cpum.GstCtx.rsi; break; 9887 case 7: u64EffAddr = pVCpu->cpum.GstCtx.rdi; break; 9888 case 8: u64EffAddr = pVCpu->cpum.GstCtx.r8; break; 9889 case 9: u64EffAddr = pVCpu->cpum.GstCtx.r9; break; 9890 case 10: u64EffAddr = pVCpu->cpum.GstCtx.r10; break; 9891 case 11: u64EffAddr = pVCpu->cpum.GstCtx.r11; break; 9892 case 13: u64EffAddr = pVCpu->cpum.GstCtx.r13; break; 9893 case 14: u64EffAddr = pVCpu->cpum.GstCtx.r14; break; 9894 case 15: u64EffAddr = pVCpu->cpum.GstCtx.r15; break; 9895 /* SIB */ 9896 case 4: 9897 case 12: 9898 { 9899 uint8_t bSib; IEM_OPCODE_GET_NEXT_U8(&bSib); 9900 uInfo = (uint64_t)bSib << 32; 9901 9902 /* Get the index and scale it. */ 9903 switch (((bSib >> X86_SIB_INDEX_SHIFT) & X86_SIB_INDEX_SMASK) | pVCpu->iem.s.uRexIndex) 9904 { 9905 case 0: u64EffAddr = pVCpu->cpum.GstCtx.rax; break; 9906 case 1: u64EffAddr = pVCpu->cpum.GstCtx.rcx; break; 9907 case 2: u64EffAddr = pVCpu->cpum.GstCtx.rdx; break; 9908 case 3: u64EffAddr = pVCpu->cpum.GstCtx.rbx; break; 9909 case 4: u64EffAddr = 0; /*none */ break; 9910 case 5: u64EffAddr = pVCpu->cpum.GstCtx.rbp; break; 9911 case 6: u64EffAddr = pVCpu->cpum.GstCtx.rsi; break; 9912 case 7: u64EffAddr = pVCpu->cpum.GstCtx.rdi; break; 9913 case 8: u64EffAddr = pVCpu->cpum.GstCtx.r8; break; 9914 case 9: u64EffAddr = pVCpu->cpum.GstCtx.r9; break; 9915 case 10: u64EffAddr = pVCpu->cpum.GstCtx.r10; break; 9916 case 11: u64EffAddr = pVCpu->cpum.GstCtx.r11; break; 9917 case 12: u64EffAddr = pVCpu->cpum.GstCtx.r12; break; 9918 case 13: u64EffAddr = pVCpu->cpum.GstCtx.r13; break; 9919 case 14: u64EffAddr = pVCpu->cpum.GstCtx.r14; break; 9920 case 15: u64EffAddr = pVCpu->cpum.GstCtx.r15; break; 9921 IEM_NOT_REACHED_DEFAULT_CASE_RET2(RTGCPTR_MAX); 9922 } 9923 u64EffAddr <<= (bSib >> X86_SIB_SCALE_SHIFT) & X86_SIB_SCALE_SMASK; 9924 9925 /* add base */ 9926 switch ((bSib & X86_SIB_BASE_MASK) | pVCpu->iem.s.uRexB) 9927 { 9928 case 0: u64EffAddr += pVCpu->cpum.GstCtx.rax; break; 9929 case 1: u64EffAddr += pVCpu->cpum.GstCtx.rcx; break; 9930 case 2: u64EffAddr += pVCpu->cpum.GstCtx.rdx; break; 9931 case 3: u64EffAddr += pVCpu->cpum.GstCtx.rbx; break; 9932 case 4: u64EffAddr += pVCpu->cpum.GstCtx.rsp + (cbImmAndRspOffset >> 8); SET_SS_DEF(); break; 9933 case 6: u64EffAddr += pVCpu->cpum.GstCtx.rsi; break; 9934 case 7: u64EffAddr += pVCpu->cpum.GstCtx.rdi; break; 9935 case 8: u64EffAddr += pVCpu->cpum.GstCtx.r8; break; 9936 case 9: u64EffAddr += pVCpu->cpum.GstCtx.r9; break; 9937 case 10: u64EffAddr += pVCpu->cpum.GstCtx.r10; break; 9938 case 11: u64EffAddr += pVCpu->cpum.GstCtx.r11; break; 9939 case 12: u64EffAddr += pVCpu->cpum.GstCtx.r12; break; 9940 case 14: u64EffAddr += pVCpu->cpum.GstCtx.r14; break; 9941 case 15: u64EffAddr += pVCpu->cpum.GstCtx.r15; break; 9942 /* complicated encodings */ 9943 case 5: 9944 case 13: 9945 if ((bRm & X86_MODRM_MOD_MASK) != 0) 9946 { 9947 if (!pVCpu->iem.s.uRexB) 9948 { 9949 u64EffAddr += pVCpu->cpum.GstCtx.rbp; 9950 SET_SS_DEF(); 9951 } 9952 else 9953 u64EffAddr += pVCpu->cpum.GstCtx.r13; 9954 } 9955 else 9956 { 9957 uint32_t u32Disp; 9958 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9959 u64EffAddr += (int32_t)u32Disp; 9960 uInfo |= u32Disp; 9961 } 9962 break; 9963 IEM_NOT_REACHED_DEFAULT_CASE_RET2(RTGCPTR_MAX); 9964 } 9965 break; 9966 } 9967 IEM_NOT_REACHED_DEFAULT_CASE_RET2(RTGCPTR_MAX); 9968 } 9969 9970 /* Get and add the displacement. */ 9971 switch ((bRm >> X86_MODRM_MOD_SHIFT) & X86_MODRM_MOD_SMASK) 9972 { 9973 case 0: 9974 break; 9975 case 1: 9976 { 9977 int8_t i8Disp; 9978 IEM_OPCODE_GET_NEXT_S8(&i8Disp); 9979 u64EffAddr += i8Disp; 9980 uInfo |= (uint32_t)(int32_t)i8Disp; 9981 break; 9982 } 9983 case 2: 9984 { 9985 uint32_t u32Disp; 9986 IEM_OPCODE_GET_NEXT_U32(&u32Disp); 9987 u64EffAddr += (int32_t)u32Disp; 9988 uInfo |= u32Disp; 9989 break; 9990 } 9991 IEM_NOT_REACHED_DEFAULT_CASE_RET2(RTGCPTR_MAX); /* (caller checked for these) */ 9992 } 9993 9994 } 9995 9996 *puInfo = uInfo; 9997 if (pVCpu->iem.s.enmEffAddrMode == IEMMODE_64BIT) 9998 { 9999 Log5(("iemOpHlpCalcRmEffAddrJmp: EffAddr=%#010RGv uInfo=%#RX64\n", u64EffAddr, uInfo)); 10000 return u64EffAddr; 10001 } 10002 Assert(pVCpu->iem.s.enmEffAddrMode == IEMMODE_32BIT); 10003 Log5(("iemOpHlpCalcRmEffAddrJmp: EffAddr=%#010RGv uInfo=%#RX64\n", u64EffAddr & UINT32_MAX, uInfo)); 10004 return u64EffAddr & UINT32_MAX; 10005 } 10006 #endif /* IEM_WITH_SETJMP */ 10007 9365 10008 /** @} */ 9366 10009 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThreadedRecompiler.cpp
r99647 r99686 83 83 84 84 85 #undef IEM_MC_CALC_RM_EFF_ADDR 86 #ifndef IEM_WITH_SETJMP 87 # define IEM_MC_CALC_RM_EFF_ADDR(a_GCPtrEff, bRm, cbImm) \ 88 uint64_t uEffAddrInfo; \ 89 IEM_MC_RETURN_ON_FAILURE(iemOpHlpCalcRmEffAddrJmpEx(pVCpu, (bRm), (cbImm), &(a_GCPtrEff), &uEffAddrInfo)) 90 #else 91 # define IEM_MC_CALC_RM_EFF_ADDR(a_GCPtrEff, bRm, cbImm) \ 92 uint64_t uEffAddrInfo; \ 93 ((a_GCPtrEff) = iemOpHlpCalcRmEffAddrJmpEx(pVCpu, (bRm), (cbImm), &uEffAddrInfo)) 94 #endif 95 85 96 #define IEM_MC2_EMIT_CALL_1(a_enmFunction, a_uArg0) do { \ 86 97 IEMTHREADEDFUNCS const enmFunctionCheck = a_enmFunction; RT_NOREF(enmFunctionCheck); \ -
trunk/src/VBox/VMM/VMMAll/IEMAllThreadedPython.py
r99647 r99686 515 515 if self.sVariation == self.ksVariation_Addr16: 516 516 self.aoParamRefs.append(ThreadedParamRef('bRm', 'uint8_t', oStmt)); 517 self.aoParamRefs.append(ThreadedParamRef('u16Disp', 'uint16_t', oStmt)); 517 self.aoParamRefs.append(ThreadedParamRef('(uint16_t)uEffAddrInfo' , 518 'uint16_t', oStmt, sStdRef = 'u16Disp')); 518 519 elif self.sVariation in (self.ksVariation_Addr32, self.ksVariation_Addr32Flat): 519 520 self.aoParamRefs.append(ThreadedParamRef('bRm', 'uint8_t', oStmt)); 520 self.aoParamRefs.append(ThreadedParamRef('bSib', 'uint8_t', oStmt)); 521 self.aoParamRefs.append(ThreadedParamRef('u32Disp', 'uint32_t', oStmt)); 521 self.aoParamRefs.append(ThreadedParamRef('(uint8_t)(uEffAddrInfo >> 32)', 522 'uint8_t', oStmt, sStdRef = 'bSib')); 523 self.aoParamRefs.append(ThreadedParamRef('(uint32_t)uEffAddrInfo', 524 'uint32_t', oStmt, sStdRef = 'u32Disp')); 522 525 else: 523 526 assert self.sVariation in (self.ksVariation_Addr64, self.ksVariation_Addr64_32); 524 self.aoParamRefs.append(ThreadedParamRef('bRmEx', 'uint8_t', oStmt)); 525 self.aoParamRefs.append(ThreadedParamRef('bSib', 'uint8_t', oStmt)); 526 self.aoParamRefs.append(ThreadedParamRef('u32Disp', 'uint32_t', oStmt)); 527 self.aoParamRefs.append(ThreadedParamRef('IEM_GET_MODRM_EX(pVCpu, bRm)', 528 'uint8_t', oStmt, sStdRef = 'bRmEx')); 529 self.aoParamRefs.append(ThreadedParamRef('(uint8_t)(uEffAddrInfo >> 32)', 530 'uint8_t', oStmt, sStdRef = 'bSib')); 531 self.aoParamRefs.append(ThreadedParamRef('(uint32_t)uEffAddrInfo', 532 'uint32_t', oStmt, sStdRef = 'u32Disp')); 527 533 self.aoParamRefs.append(ThreadedParamRef('IEM_GET_INSTR_LEN(pVCpu)', 'uint4_t', oStmt, sStdRef = 'cbInstr')); 528 534 assert len(oStmt.asParams) == 3;
Note:
See TracChangeset
for help on using the changeset viewer.