Changeset 54898 in vbox for trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
- Timestamp:
- Mar 22, 2015 11:47:07 PM (10 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r54892 r54898 5229 5229 DECLINLINE(void) iemFpuUpdateOpcodeAndIpWorker(PIEMCPU pIemCpu, PCPUMCTX pCtx) 5230 5230 { 5231 pCtx-> fpu.FOP = pIemCpu->abOpcode[pIemCpu->offFpuOpcode]5231 pCtx->XState.x87.FOP = pIemCpu->abOpcode[pIemCpu->offFpuOpcode] 5232 5232 | ((uint16_t)(pIemCpu->abOpcode[pIemCpu->offFpuOpcode - 1] & 0x7) << 8); 5233 /** @todo FPU.CS and FPUIP needs to be kept seperately. */5233 /** @todo XState.x87.CS and FPUIP needs to be kept seperately. */ 5234 5234 if (IEM_IS_REAL_OR_V86_MODE(pIemCpu)) 5235 5235 { 5236 5236 /** @todo Testcase: making assumptions about how FPUIP and FPUDP are handled 5237 5237 * happens in real mode here based on the fnsave and fnstenv images. */ 5238 pCtx-> fpu.CS = 0;5239 pCtx-> fpu.FPUIP = pCtx->eip | ((uint32_t)pCtx->cs.Sel << 4);5238 pCtx->XState.x87.CS = 0; 5239 pCtx->XState.x87.FPUIP = pCtx->eip | ((uint32_t)pCtx->cs.Sel << 4); 5240 5240 } 5241 5241 else 5242 5242 { 5243 pCtx-> fpu.CS = pCtx->cs.Sel;5244 pCtx-> fpu.FPUIP = pCtx->rip;5245 } 5246 } 5247 5248 5249 /** 5250 * Updates the FPU.DS and FPUDP registers.5243 pCtx->XState.x87.CS = pCtx->cs.Sel; 5244 pCtx->XState.x87.FPUIP = pCtx->rip; 5245 } 5246 } 5247 5248 5249 /** 5250 * Updates the XState.x87.DS and FPUDP registers. 5251 5251 * 5252 5252 * @param pIemCpu The IEM per CPU data. … … 5270 5270 sel = pCtx->ds.Sel; 5271 5271 } 5272 /** @todo FPU.DS and FPUDP needs to be kept seperately. */5272 /** @todo XState.x87.DS and FPUDP needs to be kept seperately. */ 5273 5273 if (IEM_IS_REAL_OR_V86_MODE(pIemCpu)) 5274 5274 { 5275 pCtx-> fpu.DS = 0;5276 pCtx-> fpu.FPUDP = (uint32_t)GCPtrEff | ((uint32_t)sel << 4);5275 pCtx->XState.x87.DS = 0; 5276 pCtx->XState.x87.FPUDP = (uint32_t)GCPtrEff | ((uint32_t)sel << 4); 5277 5277 } 5278 5278 else 5279 5279 { 5280 pCtx-> fpu.DS = sel;5281 pCtx-> fpu.FPUDP = GCPtrEff;5280 pCtx->XState.x87.DS = sel; 5281 pCtx->XState.x87.FPUDP = GCPtrEff; 5282 5282 } 5283 5283 } … … 5293 5293 DECLINLINE(void) iemFpuRotateStackPush(PCPUMCTX pCtx) 5294 5294 { 5295 RTFLOAT80U r80Tmp = pCtx-> fpu.aRegs[7].r80;5296 pCtx-> fpu.aRegs[7].r80 = pCtx->fpu.aRegs[6].r80;5297 pCtx-> fpu.aRegs[6].r80 = pCtx->fpu.aRegs[5].r80;5298 pCtx-> fpu.aRegs[5].r80 = pCtx->fpu.aRegs[4].r80;5299 pCtx-> fpu.aRegs[4].r80 = pCtx->fpu.aRegs[3].r80;5300 pCtx-> fpu.aRegs[3].r80 = pCtx->fpu.aRegs[2].r80;5301 pCtx-> fpu.aRegs[2].r80 = pCtx->fpu.aRegs[1].r80;5302 pCtx-> fpu.aRegs[1].r80 = pCtx->fpu.aRegs[0].r80;5303 pCtx-> fpu.aRegs[0].r80 = r80Tmp;5295 RTFLOAT80U r80Tmp = pCtx->XState.x87.aRegs[7].r80; 5296 pCtx->XState.x87.aRegs[7].r80 = pCtx->XState.x87.aRegs[6].r80; 5297 pCtx->XState.x87.aRegs[6].r80 = pCtx->XState.x87.aRegs[5].r80; 5298 pCtx->XState.x87.aRegs[5].r80 = pCtx->XState.x87.aRegs[4].r80; 5299 pCtx->XState.x87.aRegs[4].r80 = pCtx->XState.x87.aRegs[3].r80; 5300 pCtx->XState.x87.aRegs[3].r80 = pCtx->XState.x87.aRegs[2].r80; 5301 pCtx->XState.x87.aRegs[2].r80 = pCtx->XState.x87.aRegs[1].r80; 5302 pCtx->XState.x87.aRegs[1].r80 = pCtx->XState.x87.aRegs[0].r80; 5303 pCtx->XState.x87.aRegs[0].r80 = r80Tmp; 5304 5304 } 5305 5305 … … 5314 5314 DECLINLINE(void) iemFpuRotateStackPop(PCPUMCTX pCtx) 5315 5315 { 5316 RTFLOAT80U r80Tmp = pCtx-> fpu.aRegs[0].r80;5317 pCtx-> fpu.aRegs[0].r80 = pCtx->fpu.aRegs[1].r80;5318 pCtx-> fpu.aRegs[1].r80 = pCtx->fpu.aRegs[2].r80;5319 pCtx-> fpu.aRegs[2].r80 = pCtx->fpu.aRegs[3].r80;5320 pCtx-> fpu.aRegs[3].r80 = pCtx->fpu.aRegs[4].r80;5321 pCtx-> fpu.aRegs[4].r80 = pCtx->fpu.aRegs[5].r80;5322 pCtx-> fpu.aRegs[5].r80 = pCtx->fpu.aRegs[6].r80;5323 pCtx-> fpu.aRegs[6].r80 = pCtx->fpu.aRegs[7].r80;5324 pCtx-> fpu.aRegs[7].r80 = r80Tmp;5316 RTFLOAT80U r80Tmp = pCtx->XState.x87.aRegs[0].r80; 5317 pCtx->XState.x87.aRegs[0].r80 = pCtx->XState.x87.aRegs[1].r80; 5318 pCtx->XState.x87.aRegs[1].r80 = pCtx->XState.x87.aRegs[2].r80; 5319 pCtx->XState.x87.aRegs[2].r80 = pCtx->XState.x87.aRegs[3].r80; 5320 pCtx->XState.x87.aRegs[3].r80 = pCtx->XState.x87.aRegs[4].r80; 5321 pCtx->XState.x87.aRegs[4].r80 = pCtx->XState.x87.aRegs[5].r80; 5322 pCtx->XState.x87.aRegs[5].r80 = pCtx->XState.x87.aRegs[6].r80; 5323 pCtx->XState.x87.aRegs[6].r80 = pCtx->XState.x87.aRegs[7].r80; 5324 pCtx->XState.x87.aRegs[7].r80 = r80Tmp; 5325 5325 } 5326 5326 … … 5337 5337 { 5338 5338 /* Update FSW and bail if there are pending exceptions afterwards. */ 5339 uint16_t fFsw = pCtx-> fpu.FSW & ~X86_FSW_C_MASK;5339 uint16_t fFsw = pCtx->XState.x87.FSW & ~X86_FSW_C_MASK; 5340 5340 fFsw |= pResult->FSW & ~X86_FSW_TOP_MASK; 5341 if ( (fFsw & (X86_FSW_IE | X86_FSW_ZE | X86_FSW_DE))5342 & ~(pCtx-> fpu.FCW & (X86_FCW_IM | X86_FCW_ZM | X86_FCW_DM)))5343 { 5344 pCtx-> fpu.FSW = fFsw;5341 if ( (fFsw & (X86_FSW_IE | X86_FSW_ZE | X86_FSW_DE)) 5342 & ~(pCtx->XState.x87.FCW & (X86_FCW_IM | X86_FCW_ZM | X86_FCW_DM))) 5343 { 5344 pCtx->XState.x87.FSW = fFsw; 5345 5345 return; 5346 5346 } 5347 5347 5348 5348 uint16_t iNewTop = (X86_FSW_TOP_GET(fFsw) + 7) & X86_FSW_TOP_SMASK; 5349 if (!(pCtx-> fpu.FTW & RT_BIT(iNewTop)))5349 if (!(pCtx->XState.x87.FTW & RT_BIT(iNewTop))) 5350 5350 { 5351 5351 /* All is fine, push the actual value. */ 5352 pCtx-> fpu.FTW |= RT_BIT(iNewTop);5353 pCtx-> fpu.aRegs[7].r80 = pResult->r80Result;5354 } 5355 else if (pCtx-> fpu.FCW & X86_FCW_IM)5352 pCtx->XState.x87.FTW |= RT_BIT(iNewTop); 5353 pCtx->XState.x87.aRegs[7].r80 = pResult->r80Result; 5354 } 5355 else if (pCtx->XState.x87.FCW & X86_FCW_IM) 5356 5356 { 5357 5357 /* Masked stack overflow, push QNaN. */ 5358 5358 fFsw |= X86_FSW_IE | X86_FSW_SF | X86_FSW_C1; 5359 iemFpuStoreQNan(&pCtx-> fpu.aRegs[7].r80);5359 iemFpuStoreQNan(&pCtx->XState.x87.aRegs[7].r80); 5360 5360 } 5361 5361 else 5362 5362 { 5363 5363 /* Raise stack overflow, don't push anything. */ 5364 pCtx-> fpu.FSW |= pResult->FSW & ~X86_FSW_C_MASK;5365 pCtx-> fpu.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_C1 | X86_FSW_B | X86_FSW_ES;5364 pCtx->XState.x87.FSW |= pResult->FSW & ~X86_FSW_C_MASK; 5365 pCtx->XState.x87.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_C1 | X86_FSW_B | X86_FSW_ES; 5366 5366 return; 5367 5367 } … … 5369 5369 fFsw &= ~X86_FSW_TOP_MASK; 5370 5370 fFsw |= iNewTop << X86_FSW_TOP_SHIFT; 5371 pCtx-> fpu.FSW = fFsw;5371 pCtx->XState.x87.FSW = fFsw; 5372 5372 5373 5373 iemFpuRotateStackPush(pCtx); … … 5386 5386 { 5387 5387 Assert(iStReg < 8); 5388 uint16_t iReg = (X86_FSW_TOP_GET(pCtx-> fpu.FSW) + iStReg) & X86_FSW_TOP_SMASK;5389 pCtx-> fpu.FSW &= ~X86_FSW_C_MASK;5390 pCtx-> fpu.FSW |= pResult->FSW & ~X86_FSW_TOP_MASK;5391 pCtx-> fpu.FTW |= RT_BIT(iReg);5392 pCtx-> fpu.aRegs[iStReg].r80 = pResult->r80Result;5388 uint16_t iReg = (X86_FSW_TOP_GET(pCtx->XState.x87.FSW) + iStReg) & X86_FSW_TOP_SMASK; 5389 pCtx->XState.x87.FSW &= ~X86_FSW_C_MASK; 5390 pCtx->XState.x87.FSW |= pResult->FSW & ~X86_FSW_TOP_MASK; 5391 pCtx->XState.x87.FTW |= RT_BIT(iReg); 5392 pCtx->XState.x87.aRegs[iStReg].r80 = pResult->r80Result; 5393 5393 } 5394 5394 … … 5403 5403 static void iemFpuUpdateFSWOnly(PCPUMCTX pCtx, uint16_t u16FSW) 5404 5404 { 5405 pCtx-> fpu.FSW &= ~X86_FSW_C_MASK;5406 pCtx-> fpu.FSW |= u16FSW & ~X86_FSW_TOP_MASK;5405 pCtx->XState.x87.FSW &= ~X86_FSW_C_MASK; 5406 pCtx->XState.x87.FSW |= u16FSW & ~X86_FSW_TOP_MASK; 5407 5407 } 5408 5408 … … 5416 5416 { 5417 5417 /* Check pending exceptions. */ 5418 uint16_t uFSW = pCtx-> fpu.FSW;5419 if ( (pCtx-> fpu.FSW & (X86_FSW_IE | X86_FSW_ZE | X86_FSW_DE))5420 & ~(pCtx-> fpu.FCW & (X86_FCW_IM | X86_FCW_ZM | X86_FCW_DM)))5418 uint16_t uFSW = pCtx->XState.x87.FSW; 5419 if ( (pCtx->XState.x87.FSW & (X86_FSW_IE | X86_FSW_ZE | X86_FSW_DE)) 5420 & ~(pCtx->XState.x87.FCW & (X86_FCW_IM | X86_FCW_ZM | X86_FCW_DM))) 5421 5421 return; 5422 5422 … … 5425 5425 uFSW &= ~X86_FSW_TOP_MASK; 5426 5426 uFSW |= (iOldTop + (UINT16_C(9) << X86_FSW_TOP_SHIFT)) & X86_FSW_TOP_MASK; 5427 pCtx-> fpu.FSW = uFSW;5427 pCtx->XState.x87.FSW = uFSW; 5428 5428 5429 5429 /* Mark the previous ST0 as empty. */ 5430 5430 iOldTop >>= X86_FSW_TOP_SHIFT; 5431 pCtx-> fpu.FTW &= ~RT_BIT(iOldTop);5431 pCtx->XState.x87.FTW &= ~RT_BIT(iOldTop); 5432 5432 5433 5433 /* Rotate the registers. */ … … 5481 5481 5482 5482 /* Update FSW and bail if there are pending exceptions afterwards. */ 5483 uint16_t fFsw = pCtx-> fpu.FSW & ~X86_FSW_C_MASK;5483 uint16_t fFsw = pCtx->XState.x87.FSW & ~X86_FSW_C_MASK; 5484 5484 fFsw |= pResult->FSW & ~X86_FSW_TOP_MASK; 5485 5485 if ( (fFsw & (X86_FSW_IE | X86_FSW_ZE | X86_FSW_DE)) 5486 & ~(pCtx-> fpu.FCW & (X86_FCW_IM | X86_FCW_ZM | X86_FCW_DM)))5487 { 5488 pCtx-> fpu.FSW = fFsw;5486 & ~(pCtx->XState.x87.FCW & (X86_FCW_IM | X86_FCW_ZM | X86_FCW_DM))) 5487 { 5488 pCtx->XState.x87.FSW = fFsw; 5489 5489 return; 5490 5490 } 5491 5491 5492 5492 uint16_t iNewTop = (X86_FSW_TOP_GET(fFsw) + 7) & X86_FSW_TOP_SMASK; 5493 if (!(pCtx-> fpu.FTW & RT_BIT(iNewTop)))5493 if (!(pCtx->XState.x87.FTW & RT_BIT(iNewTop))) 5494 5494 { 5495 5495 /* All is fine, push the actual value. */ 5496 pCtx-> fpu.FTW |= RT_BIT(iNewTop);5497 pCtx-> fpu.aRegs[0].r80 = pResult->r80Result1;5498 pCtx-> fpu.aRegs[7].r80 = pResult->r80Result2;5499 } 5500 else if (pCtx-> fpu.FCW & X86_FCW_IM)5496 pCtx->XState.x87.FTW |= RT_BIT(iNewTop); 5497 pCtx->XState.x87.aRegs[0].r80 = pResult->r80Result1; 5498 pCtx->XState.x87.aRegs[7].r80 = pResult->r80Result2; 5499 } 5500 else if (pCtx->XState.x87.FCW & X86_FCW_IM) 5501 5501 { 5502 5502 /* Masked stack overflow, push QNaN. */ 5503 5503 fFsw |= X86_FSW_IE | X86_FSW_SF | X86_FSW_C1; 5504 iemFpuStoreQNan(&pCtx-> fpu.aRegs[0].r80);5505 iemFpuStoreQNan(&pCtx-> fpu.aRegs[7].r80);5504 iemFpuStoreQNan(&pCtx->XState.x87.aRegs[0].r80); 5505 iemFpuStoreQNan(&pCtx->XState.x87.aRegs[7].r80); 5506 5506 } 5507 5507 else 5508 5508 { 5509 5509 /* Raise stack overflow, don't push anything. */ 5510 pCtx-> fpu.FSW |= pResult->FSW & ~X86_FSW_C_MASK;5511 pCtx-> fpu.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_C1 | X86_FSW_B | X86_FSW_ES;5510 pCtx->XState.x87.FSW |= pResult->FSW & ~X86_FSW_C_MASK; 5511 pCtx->XState.x87.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_C1 | X86_FSW_B | X86_FSW_ES; 5512 5512 return; 5513 5513 } … … 5515 5515 fFsw &= ~X86_FSW_TOP_MASK; 5516 5516 fFsw |= iNewTop << X86_FSW_TOP_SHIFT; 5517 pCtx-> fpu.FSW = fFsw;5517 pCtx->XState.x87.FSW = fFsw; 5518 5518 5519 5519 iemFpuRotateStackPush(pCtx); … … 5619 5619 Assert(iStReg < 8); 5620 5620 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 5621 uint8_t iReg = (X86_FSW_TOP_GET(pCtx-> fpu.FSW) + iStReg) & X86_FSW_TOP_SMASK;5622 pCtx-> fpu.FTW &= ~RT_BIT(iReg);5621 uint8_t iReg = (X86_FSW_TOP_GET(pCtx->XState.x87.FSW) + iStReg) & X86_FSW_TOP_SMASK; 5622 pCtx->XState.x87.FTW &= ~RT_BIT(iReg); 5623 5623 } 5624 5624 … … 5632 5632 { 5633 5633 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 5634 uint16_t uFsw = pCtx-> fpu.FSW;5634 uint16_t uFsw = pCtx->XState.x87.FSW; 5635 5635 uint16_t uTop = uFsw & X86_FSW_TOP_MASK; 5636 5636 uTop = (uTop + (1 << X86_FSW_TOP_SHIFT)) & X86_FSW_TOP_MASK; 5637 5637 uFsw &= ~X86_FSW_TOP_MASK; 5638 5638 uFsw |= uTop; 5639 pCtx-> fpu.FSW = uFsw;5639 pCtx->XState.x87.FSW = uFsw; 5640 5640 } 5641 5641 … … 5649 5649 { 5650 5650 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 5651 uint16_t uFsw = pCtx-> fpu.FSW;5651 uint16_t uFsw = pCtx->XState.x87.FSW; 5652 5652 uint16_t uTop = uFsw & X86_FSW_TOP_MASK; 5653 5653 uTop = (uTop + (7 << X86_FSW_TOP_SHIFT)) & X86_FSW_TOP_MASK; 5654 5654 uFsw &= ~X86_FSW_TOP_MASK; 5655 5655 uFsw |= uTop; 5656 pCtx-> fpu.FSW = uFsw;5656 pCtx->XState.x87.FSW = uFsw; 5657 5657 } 5658 5658 … … 5748 5748 { 5749 5749 Assert(iStReg < 8 || iStReg == UINT8_MAX); 5750 if (pCtx-> fpu.FCW & X86_FCW_IM)5750 if (pCtx->XState.x87.FCW & X86_FCW_IM) 5751 5751 { 5752 5752 /* Masked underflow. */ 5753 pCtx-> fpu.FSW &= ~X86_FSW_C_MASK;5754 pCtx-> fpu.FSW |= X86_FSW_IE | X86_FSW_SF;5755 uint16_t iReg = (X86_FSW_TOP_GET(pCtx-> fpu.FSW) + iStReg) & X86_FSW_TOP_SMASK;5753 pCtx->XState.x87.FSW &= ~X86_FSW_C_MASK; 5754 pCtx->XState.x87.FSW |= X86_FSW_IE | X86_FSW_SF; 5755 uint16_t iReg = (X86_FSW_TOP_GET(pCtx->XState.x87.FSW) + iStReg) & X86_FSW_TOP_SMASK; 5756 5756 if (iStReg != UINT8_MAX) 5757 5757 { 5758 pCtx-> fpu.FTW |= RT_BIT(iReg);5759 iemFpuStoreQNan(&pCtx-> fpu.aRegs[iStReg].r80);5758 pCtx->XState.x87.FTW |= RT_BIT(iReg); 5759 iemFpuStoreQNan(&pCtx->XState.x87.aRegs[iStReg].r80); 5760 5760 } 5761 5761 } 5762 5762 else 5763 5763 { 5764 pCtx-> fpu.FSW &= ~X86_FSW_C_MASK;5765 pCtx-> fpu.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_ES | X86_FSW_B;5764 pCtx->XState.x87.FSW &= ~X86_FSW_C_MASK; 5765 pCtx->XState.x87.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_ES | X86_FSW_B; 5766 5766 } 5767 5767 } … … 5830 5830 iemFpuUpdateOpcodeAndIpWorker(pIemCpu, pCtx); 5831 5831 5832 if (pCtx-> fpu.FCW & X86_FCW_IM)5832 if (pCtx->XState.x87.FCW & X86_FCW_IM) 5833 5833 { 5834 5834 /* Masked overflow - Push QNaN. */ 5835 uint16_t iNewTop = (X86_FSW_TOP_GET(pCtx-> fpu.FSW) + 7) & X86_FSW_TOP_SMASK;5836 pCtx-> fpu.FSW &= ~(X86_FSW_TOP_MASK | X86_FSW_C_MASK);5837 pCtx-> fpu.FSW |= X86_FSW_IE | X86_FSW_SF;5838 pCtx-> fpu.FSW |= iNewTop << X86_FSW_TOP_SHIFT;5839 pCtx-> fpu.FTW |= RT_BIT(iNewTop);5840 iemFpuStoreQNan(&pCtx-> fpu.aRegs[7].r80);5835 uint16_t iNewTop = (X86_FSW_TOP_GET(pCtx->XState.x87.FSW) + 7) & X86_FSW_TOP_SMASK; 5836 pCtx->XState.x87.FSW &= ~(X86_FSW_TOP_MASK | X86_FSW_C_MASK); 5837 pCtx->XState.x87.FSW |= X86_FSW_IE | X86_FSW_SF; 5838 pCtx->XState.x87.FSW |= iNewTop << X86_FSW_TOP_SHIFT; 5839 pCtx->XState.x87.FTW |= RT_BIT(iNewTop); 5840 iemFpuStoreQNan(&pCtx->XState.x87.aRegs[7].r80); 5841 5841 iemFpuRotateStackPush(pCtx); 5842 5842 } … … 5844 5844 { 5845 5845 /* Exception pending - don't change TOP or the register stack. */ 5846 pCtx-> fpu.FSW &= ~X86_FSW_C_MASK;5847 pCtx-> fpu.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_ES | X86_FSW_B;5846 pCtx->XState.x87.FSW &= ~X86_FSW_C_MASK; 5847 pCtx->XState.x87.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_ES | X86_FSW_B; 5848 5848 } 5849 5849 } … … 5856 5856 iemFpuUpdateOpcodeAndIpWorker(pIemCpu, pCtx); 5857 5857 5858 if (pCtx-> fpu.FCW & X86_FCW_IM)5858 if (pCtx->XState.x87.FCW & X86_FCW_IM) 5859 5859 { 5860 5860 /* Masked overflow - Push QNaN. */ 5861 uint16_t iNewTop = (X86_FSW_TOP_GET(pCtx-> fpu.FSW) + 7) & X86_FSW_TOP_SMASK;5862 pCtx-> fpu.FSW &= ~(X86_FSW_TOP_MASK | X86_FSW_C_MASK);5863 pCtx-> fpu.FSW |= X86_FSW_IE | X86_FSW_SF;5864 pCtx-> fpu.FSW |= iNewTop << X86_FSW_TOP_SHIFT;5865 pCtx-> fpu.FTW |= RT_BIT(iNewTop);5866 iemFpuStoreQNan(&pCtx-> fpu.aRegs[0].r80);5867 iemFpuStoreQNan(&pCtx-> fpu.aRegs[7].r80);5861 uint16_t iNewTop = (X86_FSW_TOP_GET(pCtx->XState.x87.FSW) + 7) & X86_FSW_TOP_SMASK; 5862 pCtx->XState.x87.FSW &= ~(X86_FSW_TOP_MASK | X86_FSW_C_MASK); 5863 pCtx->XState.x87.FSW |= X86_FSW_IE | X86_FSW_SF; 5864 pCtx->XState.x87.FSW |= iNewTop << X86_FSW_TOP_SHIFT; 5865 pCtx->XState.x87.FTW |= RT_BIT(iNewTop); 5866 iemFpuStoreQNan(&pCtx->XState.x87.aRegs[0].r80); 5867 iemFpuStoreQNan(&pCtx->XState.x87.aRegs[7].r80); 5868 5868 iemFpuRotateStackPush(pCtx); 5869 5869 } … … 5871 5871 { 5872 5872 /* Exception pending - don't change TOP or the register stack. */ 5873 pCtx-> fpu.FSW &= ~X86_FSW_C_MASK;5874 pCtx-> fpu.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_ES | X86_FSW_B;5873 pCtx->XState.x87.FSW &= ~X86_FSW_C_MASK; 5874 pCtx->XState.x87.FSW |= X86_FSW_IE | X86_FSW_SF | X86_FSW_ES | X86_FSW_B; 5875 5875 } 5876 5876 } … … 5885 5885 static void iemFpuStackPushOverflowOnly(PIEMCPU pIemCpu, PCPUMCTX pCtx) 5886 5886 { 5887 if (pCtx-> fpu.FCW & X86_FCW_IM)5887 if (pCtx->XState.x87.FCW & X86_FCW_IM) 5888 5888 { 5889 5889 /* Masked overflow. */ 5890 uint16_t iNewTop = (X86_FSW_TOP_GET(pCtx-> fpu.FSW) + 7) & X86_FSW_TOP_SMASK;5891 pCtx-> fpu.FSW &= ~(X86_FSW_TOP_MASK | X86_FSW_C_MASK);5892 pCtx-> fpu.FSW |= X86_FSW_C1 | X86_FSW_IE | X86_FSW_SF;5893 pCtx-> fpu.FSW |= iNewTop << X86_FSW_TOP_SHIFT;5894 pCtx-> fpu.FTW |= RT_BIT(iNewTop);5895 iemFpuStoreQNan(&pCtx-> fpu.aRegs[7].r80);5890 uint16_t iNewTop = (X86_FSW_TOP_GET(pCtx->XState.x87.FSW) + 7) & X86_FSW_TOP_SMASK; 5891 pCtx->XState.x87.FSW &= ~(X86_FSW_TOP_MASK | X86_FSW_C_MASK); 5892 pCtx->XState.x87.FSW |= X86_FSW_C1 | X86_FSW_IE | X86_FSW_SF; 5893 pCtx->XState.x87.FSW |= iNewTop << X86_FSW_TOP_SHIFT; 5894 pCtx->XState.x87.FTW |= RT_BIT(iNewTop); 5895 iemFpuStoreQNan(&pCtx->XState.x87.aRegs[7].r80); 5896 5896 iemFpuRotateStackPush(pCtx); 5897 5897 } … … 5899 5899 { 5900 5900 /* Exception pending - don't change TOP or the register stack. */ 5901 pCtx-> fpu.FSW &= ~X86_FSW_C_MASK;5902 pCtx-> fpu.FSW |= X86_FSW_C1 | X86_FSW_IE | X86_FSW_SF | X86_FSW_ES | X86_FSW_B;5901 pCtx->XState.x87.FSW &= ~X86_FSW_C_MASK; 5902 pCtx->XState.x87.FSW |= X86_FSW_C1 | X86_FSW_IE | X86_FSW_SF | X86_FSW_ES | X86_FSW_B; 5903 5903 } 5904 5904 } … … 5938 5938 { 5939 5939 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 5940 uint16_t iReg = (X86_FSW_TOP_GET(pCtx-> fpu.FSW) + iStReg) & X86_FSW_TOP_SMASK;5941 if (pCtx-> fpu.FTW & RT_BIT(iReg))5940 uint16_t iReg = (X86_FSW_TOP_GET(pCtx->XState.x87.FSW) + iStReg) & X86_FSW_TOP_SMASK; 5941 if (pCtx->XState.x87.FTW & RT_BIT(iReg)) 5942 5942 return VINF_SUCCESS; 5943 5943 return VERR_NOT_FOUND; … … 5948 5948 { 5949 5949 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 5950 uint16_t iReg = (X86_FSW_TOP_GET(pCtx-> fpu.FSW) + iStReg) & X86_FSW_TOP_SMASK;5951 if (pCtx-> fpu.FTW & RT_BIT(iReg))5952 { 5953 *ppRef = &pCtx-> fpu.aRegs[iStReg].r80;5950 uint16_t iReg = (X86_FSW_TOP_GET(pCtx->XState.x87.FSW) + iStReg) & X86_FSW_TOP_SMASK; 5951 if (pCtx->XState.x87.FTW & RT_BIT(iReg)) 5952 { 5953 *ppRef = &pCtx->XState.x87.aRegs[iStReg].r80; 5954 5954 return VINF_SUCCESS; 5955 5955 } … … 5962 5962 { 5963 5963 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 5964 uint16_t iTop = X86_FSW_TOP_GET(pCtx-> fpu.FSW);5964 uint16_t iTop = X86_FSW_TOP_GET(pCtx->XState.x87.FSW); 5965 5965 uint16_t iReg0 = (iTop + iStReg0) & X86_FSW_TOP_SMASK; 5966 5966 uint16_t iReg1 = (iTop + iStReg1) & X86_FSW_TOP_SMASK; 5967 if ((pCtx-> fpu.FTW & (RT_BIT(iReg0) | RT_BIT(iReg1))) == (RT_BIT(iReg0) | RT_BIT(iReg1)))5968 { 5969 *ppRef0 = &pCtx-> fpu.aRegs[iStReg0].r80;5970 *ppRef1 = &pCtx-> fpu.aRegs[iStReg1].r80;5967 if ((pCtx->XState.x87.FTW & (RT_BIT(iReg0) | RT_BIT(iReg1))) == (RT_BIT(iReg0) | RT_BIT(iReg1))) 5968 { 5969 *ppRef0 = &pCtx->XState.x87.aRegs[iStReg0].r80; 5970 *ppRef1 = &pCtx->XState.x87.aRegs[iStReg1].r80; 5971 5971 return VINF_SUCCESS; 5972 5972 } … … 5978 5978 { 5979 5979 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 5980 uint16_t iTop = X86_FSW_TOP_GET(pCtx-> fpu.FSW);5980 uint16_t iTop = X86_FSW_TOP_GET(pCtx->XState.x87.FSW); 5981 5981 uint16_t iReg0 = (iTop + iStReg0) & X86_FSW_TOP_SMASK; 5982 5982 uint16_t iReg1 = (iTop + iStReg1) & X86_FSW_TOP_SMASK; 5983 if ((pCtx-> fpu.FTW & (RT_BIT(iReg0) | RT_BIT(iReg1))) == (RT_BIT(iReg0) | RT_BIT(iReg1)))5984 { 5985 *ppRef0 = &pCtx-> fpu.aRegs[iStReg0].r80;5983 if ((pCtx->XState.x87.FTW & (RT_BIT(iReg0) | RT_BIT(iReg1))) == (RT_BIT(iReg0) | RT_BIT(iReg1))) 5984 { 5985 *ppRef0 = &pCtx->XState.x87.aRegs[iStReg0].r80; 5986 5986 return VINF_SUCCESS; 5987 5987 } … … 5997 5997 static void iemFpuRecalcExceptionStatus(PCPUMCTX pCtx) 5998 5998 { 5999 uint16_t u16Fsw = pCtx-> fpu.FSW;6000 if ((u16Fsw & X86_FSW_XCPT_MASK) & ~(pCtx-> fpu.FCW & X86_FCW_XCPT_MASK))5999 uint16_t u16Fsw = pCtx->XState.x87.FSW; 6000 if ((u16Fsw & X86_FSW_XCPT_MASK) & ~(pCtx->XState.x87.FCW & X86_FCW_XCPT_MASK)) 6001 6001 u16Fsw |= X86_FSW_ES | X86_FSW_B; 6002 6002 else 6003 6003 u16Fsw &= ~(X86_FSW_ES | X86_FSW_B); 6004 pCtx-> fpu.FSW = u16Fsw;6004 pCtx->XState.x87.FSW = u16Fsw; 6005 6005 } 6006 6006 … … 6014 6014 static uint16_t iemFpuCalcFullFtw(PCCPUMCTX pCtx) 6015 6015 { 6016 uint8_t const u8Ftw = (uint8_t)pCtx-> fpu.FTW;6016 uint8_t const u8Ftw = (uint8_t)pCtx->XState.x87.FTW; 6017 6017 uint16_t u16Ftw = 0; 6018 unsigned const iTop = X86_FSW_TOP_GET(pCtx-> fpu.FSW);6018 unsigned const iTop = X86_FSW_TOP_GET(pCtx->XState.x87.FSW); 6019 6019 for (unsigned iSt = 0; iSt < 8; iSt++) 6020 6020 { … … 6025 6025 { 6026 6026 uint16_t uTag; 6027 PCRTFLOAT80U const pr80Reg = &pCtx-> fpu.aRegs[iSt].r80;6027 PCRTFLOAT80U const pr80Reg = &pCtx->XState.x87.aRegs[iSt].r80; 6028 6028 if (pr80Reg->s.uExponent == 0x7fff) 6029 6029 uTag = 2; /* Exponent is all 1's => Special. */ … … 7172 7172 /* The lazy approach for now... */ 7173 7173 /** @todo testcase: Ordering of \#SS(0) vs \#GP() vs \#PF on SSE stuff. */ 7174 if ((GCPtrMem & 15) && !(pIemCpu->CTX_SUFF(pCtx)-> fpu.MXCSR & X86_MXSCR_MM)) /** @todo should probably check this *after* applying seg.u64Base... Check real HW. */7174 if ((GCPtrMem & 15) && !(pIemCpu->CTX_SUFF(pCtx)->XState.x87.MXCSR & X86_MXSCR_MM)) /** @todo should probably check this *after* applying seg.u64Base... Check real HW. */ 7175 7175 return iemRaiseGeneralProtectionFault0(pIemCpu); 7176 7176 … … 7372 7372 { 7373 7373 /* The lazy approach for now... */ 7374 if ((GCPtrMem & 15) && !(pIemCpu->CTX_SUFF(pCtx)-> fpu.MXCSR & X86_MXSCR_MM)) /** @todo should probably check this *after* applying seg.u64Base... Check real HW. */7374 if ((GCPtrMem & 15) && !(pIemCpu->CTX_SUFF(pCtx)->XState.x87.MXCSR & X86_MXSCR_MM)) /** @todo should probably check this *after* applying seg.u64Base... Check real HW. */ 7375 7375 return iemRaiseGeneralProtectionFault0(pIemCpu); 7376 7376 … … 8304 8304 #define IEM_MC_MAYBE_RAISE_FPU_XCPT() \ 8305 8305 do { \ 8306 if ((pIemCpu)->CTX_SUFF(pCtx)-> fpu.FSW & X86_FSW_ES) \8306 if ((pIemCpu)->CTX_SUFF(pCtx)->XState.x87.FSW & X86_FSW_ES) \ 8307 8307 return iemRaiseMathFault(pIemCpu); \ 8308 8308 } while (0) … … 8387 8387 #define IEM_MC_FETCH_EFLAGS(a_EFlags) (a_EFlags) = (pIemCpu)->CTX_SUFF(pCtx)->eflags.u 8388 8388 #define IEM_MC_FETCH_EFLAGS_U8(a_EFlags) (a_EFlags) = (uint8_t)(pIemCpu)->CTX_SUFF(pCtx)->eflags.u 8389 #define IEM_MC_FETCH_FSW(a_u16Fsw) (a_u16Fsw) = pIemCpu->CTX_SUFF(pCtx)-> fpu.FSW8390 #define IEM_MC_FETCH_FCW(a_u16Fcw) (a_u16Fcw) = pIemCpu->CTX_SUFF(pCtx)-> fpu.FCW8389 #define IEM_MC_FETCH_FSW(a_u16Fsw) (a_u16Fsw) = pIemCpu->CTX_SUFF(pCtx)->XState.x87.FSW 8390 #define IEM_MC_FETCH_FCW(a_u16Fcw) (a_u16Fcw) = pIemCpu->CTX_SUFF(pCtx)->XState.x87.FCW 8391 8391 8392 8392 #define IEM_MC_STORE_GREG_U8(a_iGReg, a_u8Value) *iemGRegRefU8(pIemCpu, (a_iGReg)) = (a_u8Value) … … 8401 8401 #define IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(a_pu32Dst) do { (a_pu32Dst)[1] = 0; } while (0) 8402 8402 #define IEM_MC_STORE_FPUREG_R80_SRC_REF(a_iSt, a_pr80Src) \ 8403 do { pIemCpu->CTX_SUFF(pCtx)-> fpu.aRegs[a_iSt].r80 = *(a_pr80Src); } while (0)8403 do { pIemCpu->CTX_SUFF(pCtx)->XState.x87.aRegs[a_iSt].r80 = *(a_pr80Src); } while (0) 8404 8404 8405 8405 #define IEM_MC_REF_GREG_U8(a_pu8Dst, a_iGReg) (a_pu8Dst) = iemGRegRefU8(pIemCpu, (a_iGReg)) … … 8492 8492 #define IEM_MC_FLIP_EFL_BIT(a_fBit) do { (pIemCpu)->CTX_SUFF(pCtx)->eflags.u ^= (a_fBit); } while (0) 8493 8493 8494 #define IEM_MC_CLEAR_FSW_EX() do { (pIemCpu)->CTX_SUFF(pCtx)-> fpu.FSW &= X86_FSW_C_MASK | X86_FSW_TOP_MASK; } while (0)8494 #define IEM_MC_CLEAR_FSW_EX() do { (pIemCpu)->CTX_SUFF(pCtx)->XState.x87.FSW &= X86_FSW_C_MASK | X86_FSW_TOP_MASK; } while (0) 8495 8495 8496 8496 8497 8497 #define IEM_MC_FETCH_MREG_U64(a_u64Value, a_iMReg) \ 8498 do { (a_u64Value) = pIemCpu->CTX_SUFF(pCtx)-> fpu.aRegs[(a_iMReg)].mmx; } while (0)8498 do { (a_u64Value) = pIemCpu->CTX_SUFF(pCtx)->XState.x87.aRegs[(a_iMReg)].mmx; } while (0) 8499 8499 #define IEM_MC_FETCH_MREG_U32(a_u32Value, a_iMReg) \ 8500 do { (a_u32Value) = pIemCpu->CTX_SUFF(pCtx)-> fpu.aRegs[(a_iMReg)].au32[0]; } while (0)8500 do { (a_u32Value) = pIemCpu->CTX_SUFF(pCtx)->XState.x87.aRegs[(a_iMReg)].au32[0]; } while (0) 8501 8501 #define IEM_MC_STORE_MREG_U64(a_iMReg, a_u64Value) \ 8502 do { pIemCpu->CTX_SUFF(pCtx)-> fpu.aRegs[(a_iMReg)].mmx = (a_u64Value); } while (0)8502 do { pIemCpu->CTX_SUFF(pCtx)->XState.x87.aRegs[(a_iMReg)].mmx = (a_u64Value); } while (0) 8503 8503 #define IEM_MC_STORE_MREG_U32_ZX_U64(a_iMReg, a_u32Value) \ 8504 do { pIemCpu->CTX_SUFF(pCtx)-> fpu.aRegs[(a_iMReg)].mmx = (uint32_t)(a_u32Value); } while (0)8504 do { pIemCpu->CTX_SUFF(pCtx)->XState.x87.aRegs[(a_iMReg)].mmx = (uint32_t)(a_u32Value); } while (0) 8505 8505 #define IEM_MC_REF_MREG_U64(a_pu64Dst, a_iMReg) \ 8506 (a_pu64Dst) = (&pIemCpu->CTX_SUFF(pCtx)-> fpu.aRegs[(a_iMReg)].mmx)8506 (a_pu64Dst) = (&pIemCpu->CTX_SUFF(pCtx)->XState.x87.aRegs[(a_iMReg)].mmx) 8507 8507 #define IEM_MC_REF_MREG_U64_CONST(a_pu64Dst, a_iMReg) \ 8508 (a_pu64Dst) = ((uint64_t const *)&pIemCpu->CTX_SUFF(pCtx)-> fpu.aRegs[(a_iMReg)].mmx)8508 (a_pu64Dst) = ((uint64_t const *)&pIemCpu->CTX_SUFF(pCtx)->XState.x87.aRegs[(a_iMReg)].mmx) 8509 8509 #define IEM_MC_REF_MREG_U32_CONST(a_pu32Dst, a_iMReg) \ 8510 (a_pu32Dst) = ((uint32_t const *)&pIemCpu->CTX_SUFF(pCtx)-> fpu.aRegs[(a_iMReg)].mmx)8510 (a_pu32Dst) = ((uint32_t const *)&pIemCpu->CTX_SUFF(pCtx)->XState.x87.aRegs[(a_iMReg)].mmx) 8511 8511 8512 8512 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) \ 8513 do { (a_u128Value) = pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].xmm; } while (0)8513 do { (a_u128Value) = pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].xmm; } while (0) 8514 8514 #define IEM_MC_FETCH_XREG_U64(a_u64Value, a_iXReg) \ 8515 do { (a_u64Value) = pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].au64[0]; } while (0)8515 do { (a_u64Value) = pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].au64[0]; } while (0) 8516 8516 #define IEM_MC_FETCH_XREG_U32(a_u32Value, a_iXReg) \ 8517 do { (a_u32Value) = pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].au32[0]; } while (0)8517 do { (a_u32Value) = pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].au32[0]; } while (0) 8518 8518 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) \ 8519 do { pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].xmm = (a_u128Value); } while (0)8519 do { pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].xmm = (a_u128Value); } while (0) 8520 8520 #define IEM_MC_STORE_XREG_U64_ZX_U128(a_iXReg, a_u64Value) \ 8521 do { pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].au64[0] = (a_u64Value); \8522 pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].au64[1] = 0; \8521 do { pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].au64[0] = (a_u64Value); \ 8522 pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].au64[1] = 0; \ 8523 8523 } while (0) 8524 8524 #define IEM_MC_STORE_XREG_U32_ZX_U128(a_iXReg, a_u32Value) \ 8525 do { pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].au64[0] = (uint32_t)(a_u32Value); \8526 pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].au64[1] = 0; \8525 do { pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].au64[0] = (uint32_t)(a_u32Value); \ 8526 pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].au64[1] = 0; \ 8527 8527 } while (0) 8528 8528 #define IEM_MC_REF_XREG_U128(a_pu128Dst, a_iXReg) \ 8529 (a_pu128Dst) = (&pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].xmm)8529 (a_pu128Dst) = (&pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].xmm) 8530 8530 #define IEM_MC_REF_XREG_U128_CONST(a_pu128Dst, a_iXReg) \ 8531 (a_pu128Dst) = ((uint128_t const *)&pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].xmm)8531 (a_pu128Dst) = ((uint128_t const *)&pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].xmm) 8532 8532 #define IEM_MC_REF_XREG_U64_CONST(a_pu64Dst, a_iXReg) \ 8533 (a_pu64Dst) = ((uint64_t const *)&pIemCpu->CTX_SUFF(pCtx)-> fpu.aXMM[(a_iXReg)].au64[0])8533 (a_pu64Dst) = ((uint64_t const *)&pIemCpu->CTX_SUFF(pCtx)->XState.x87.aXMM[(a_iXReg)].au64[0]) 8534 8534 8535 8535 #define IEM_MC_FETCH_MEM_U8(a_u8Dst, a_iSeg, a_GCPtrMem) \ … … 8739 8739 if ( !(a_u16FSW & X86_FSW_ES) \ 8740 8740 || !( (a_u16FSW & (X86_FSW_UE | X86_FSW_OE | X86_FSW_IE)) \ 8741 & ~(pIemCpu->CTX_SUFF(pCtx)-> fpu.FCW & X86_FCW_MASK_ALL) ) ) \8741 & ~(pIemCpu->CTX_SUFF(pCtx)->XState.x87.FCW & X86_FCW_MASK_ALL) ) ) \ 8742 8742 IEM_MC_RETURN_ON_FAILURE(iemMemCommitAndUnmap(pIemCpu, (a_pvMem), (a_fAccess))); \ 8743 8743 } while (0) … … 8875 8875 do { \ 8876 8876 iemFpuPrepareUsage(pIemCpu); \ 8877 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)-> fpu, (a0)); \8877 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)->XState.x87, (a0)); \ 8878 8878 } while (0) 8879 8879 … … 8888 8888 do { \ 8889 8889 iemFpuPrepareUsage(pIemCpu); \ 8890 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)-> fpu, (a0), (a1)); \8890 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)->XState.x87, (a0), (a1)); \ 8891 8891 } while (0) 8892 8892 … … 8902 8902 do { \ 8903 8903 iemFpuPrepareUsage(pIemCpu); \ 8904 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)-> fpu, (a0), (a1), (a2)); \8904 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)->XState.x87, (a0), (a1), (a2)); \ 8905 8905 } while (0) 8906 8906 … … 9019 9019 do { \ 9020 9020 iemFpuPrepareUsage(pIemCpu); \ 9021 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)-> fpu, (a0), (a1)); \9021 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)->XState.x87, (a0), (a1)); \ 9022 9022 } while (0) 9023 9023 … … 9033 9033 do { \ 9034 9034 iemFpuPrepareUsage(pIemCpu); \ 9035 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)-> fpu, (a0), (a1), (a2)); \9035 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)->XState.x87, (a0), (a1), (a2)); \ 9036 9036 } while (0) 9037 9037 … … 9047 9047 do { \ 9048 9048 iemFpuPrepareUsageSse(pIemCpu); \ 9049 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)-> fpu, (a0), (a1)); \9049 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)->XState.x87, (a0), (a1)); \ 9050 9050 } while (0) 9051 9051 … … 9061 9061 do { \ 9062 9062 iemFpuPrepareUsageSse(pIemCpu); \ 9063 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)-> fpu, (a0), (a1), (a2)); \9063 a_pfnAImpl(&pIemCpu->CTX_SUFF(pCtx)->XState.x87, (a0), (a1), (a2)); \ 9064 9064 } while (0) 9065 9065 … … 9131 9131 if (iemFpu2StRegsNotEmptyRefFirst(pIemCpu, (a_iSt0), &(a_pr80Dst0), (a_iSt1)) == VINF_SUCCESS) { 9132 9132 #define IEM_MC_IF_FCW_IM() \ 9133 if (pIemCpu->CTX_SUFF(pCtx)-> fpu.FCW & X86_FCW_IM) {9133 if (pIemCpu->CTX_SUFF(pCtx)->XState.x87.FCW & X86_FCW_IM) { 9134 9134 9135 9135 #define IEM_MC_ELSE() } else { … … 10219 10219 if (fRem) 10220 10220 { 10221 pOrgCtx-> fpu.FOP = pDebugCtx->fpu.FOP;10222 pOrgCtx-> fpu.FPUIP = pDebugCtx->fpu.FPUIP;10223 pOrgCtx-> fpu.CS = pDebugCtx->fpu.CS;10224 pOrgCtx-> fpu.Rsrvd1 = pDebugCtx->fpu.Rsrvd1;10225 pOrgCtx-> fpu.FPUDP = pDebugCtx->fpu.FPUDP;10226 pOrgCtx-> fpu.DS = pDebugCtx->fpu.DS;10227 pOrgCtx-> fpu.Rsrvd2 = pDebugCtx->fpu.Rsrvd2;10228 //pOrgCtx-> fpu.MXCSR_MASK = pDebugCtx->fpu.MXCSR_MASK;10229 if ((pOrgCtx-> fpu.FSW & X86_FSW_TOP_MASK) == (pDebugCtx->fpu.FSW & X86_FSW_TOP_MASK))10230 pOrgCtx-> fpu.FSW = pDebugCtx->fpu.FSW;10221 pOrgCtx->XState.x87.FOP = pDebugCtx->XState.x87.FOP; 10222 pOrgCtx->XState.x87.FPUIP = pDebugCtx->XState.x87.FPUIP; 10223 pOrgCtx->XState.x87.CS = pDebugCtx->XState.x87.CS; 10224 pOrgCtx->XState.x87.Rsrvd1 = pDebugCtx->XState.x87.Rsrvd1; 10225 pOrgCtx->XState.x87.FPUDP = pDebugCtx->XState.x87.FPUDP; 10226 pOrgCtx->XState.x87.DS = pDebugCtx->XState.x87.DS; 10227 pOrgCtx->XState.x87.Rsrvd2 = pDebugCtx->XState.x87.Rsrvd2; 10228 //pOrgCtx->XState.x87.MXCSR_MASK = pDebugCtx->XState.x87.MXCSR_MASK; 10229 if ((pOrgCtx->XState.x87.FSW & X86_FSW_TOP_MASK) == (pDebugCtx->XState.x87.FSW & X86_FSW_TOP_MASK)) 10230 pOrgCtx->XState.x87.FSW = pDebugCtx->XState.x87.FSW; 10231 10231 } 10232 10232 #endif 10233 if (memcmp(&pOrgCtx-> fpu, &pDebugCtx->fpu, sizeof(pDebugCtx->fpu)))10233 if (memcmp(&pOrgCtx->XState.x87, &pDebugCtx->XState.x87, sizeof(pDebugCtx->XState.x87))) 10234 10234 { 10235 10235 RTAssertMsg2Weak(" the FPU state differs\n"); 10236 10236 cDiffs++; 10237 CHECK_FIELD( fpu.FCW);10238 CHECK_FIELD( fpu.FSW);10239 CHECK_FIELD( fpu.FTW);10240 CHECK_FIELD( fpu.FOP);10241 CHECK_FIELD( fpu.FPUIP);10242 CHECK_FIELD( fpu.CS);10243 CHECK_FIELD( fpu.Rsrvd1);10244 CHECK_FIELD( fpu.FPUDP);10245 CHECK_FIELD( fpu.DS);10246 CHECK_FIELD( fpu.Rsrvd2);10247 CHECK_FIELD( fpu.MXCSR);10248 CHECK_FIELD( fpu.MXCSR_MASK);10249 CHECK_FIELD( fpu.aRegs[0].au64[0]); CHECK_FIELD(fpu.aRegs[0].au64[1]);10250 CHECK_FIELD( fpu.aRegs[1].au64[0]); CHECK_FIELD(fpu.aRegs[1].au64[1]);10251 CHECK_FIELD( fpu.aRegs[2].au64[0]); CHECK_FIELD(fpu.aRegs[2].au64[1]);10252 CHECK_FIELD( fpu.aRegs[3].au64[0]); CHECK_FIELD(fpu.aRegs[3].au64[1]);10253 CHECK_FIELD( fpu.aRegs[4].au64[0]); CHECK_FIELD(fpu.aRegs[4].au64[1]);10254 CHECK_FIELD( fpu.aRegs[5].au64[0]); CHECK_FIELD(fpu.aRegs[5].au64[1]);10255 CHECK_FIELD( fpu.aRegs[6].au64[0]); CHECK_FIELD(fpu.aRegs[6].au64[1]);10256 CHECK_FIELD( fpu.aRegs[7].au64[0]); CHECK_FIELD(fpu.aRegs[7].au64[1]);10257 CHECK_FIELD( fpu.aXMM[ 0].au64[0]); CHECK_FIELD(fpu.aXMM[ 0].au64[1]);10258 CHECK_FIELD( fpu.aXMM[ 1].au64[0]); CHECK_FIELD(fpu.aXMM[ 1].au64[1]);10259 CHECK_FIELD( fpu.aXMM[ 2].au64[0]); CHECK_FIELD(fpu.aXMM[ 2].au64[1]);10260 CHECK_FIELD( fpu.aXMM[ 3].au64[0]); CHECK_FIELD(fpu.aXMM[ 3].au64[1]);10261 CHECK_FIELD( fpu.aXMM[ 4].au64[0]); CHECK_FIELD(fpu.aXMM[ 4].au64[1]);10262 CHECK_FIELD( fpu.aXMM[ 5].au64[0]); CHECK_FIELD(fpu.aXMM[ 5].au64[1]);10263 CHECK_FIELD( fpu.aXMM[ 6].au64[0]); CHECK_FIELD(fpu.aXMM[ 6].au64[1]);10264 CHECK_FIELD( fpu.aXMM[ 7].au64[0]); CHECK_FIELD(fpu.aXMM[ 7].au64[1]);10265 CHECK_FIELD( fpu.aXMM[ 8].au64[0]); CHECK_FIELD(fpu.aXMM[ 8].au64[1]);10266 CHECK_FIELD( fpu.aXMM[ 9].au64[0]); CHECK_FIELD(fpu.aXMM[ 9].au64[1]);10267 CHECK_FIELD( fpu.aXMM[10].au64[0]); CHECK_FIELD(fpu.aXMM[10].au64[1]);10268 CHECK_FIELD( fpu.aXMM[11].au64[0]); CHECK_FIELD(fpu.aXMM[11].au64[1]);10269 CHECK_FIELD( fpu.aXMM[12].au64[0]); CHECK_FIELD(fpu.aXMM[12].au64[1]);10270 CHECK_FIELD( fpu.aXMM[13].au64[0]); CHECK_FIELD(fpu.aXMM[13].au64[1]);10271 CHECK_FIELD( fpu.aXMM[14].au64[0]); CHECK_FIELD(fpu.aXMM[14].au64[1]);10272 CHECK_FIELD( fpu.aXMM[15].au64[0]); CHECK_FIELD(fpu.aXMM[15].au64[1]);10273 for (unsigned i = 0; i < RT_ELEMENTS(pOrgCtx-> fpu.au32RsrvdRest); i++)10274 CHECK_FIELD( fpu.au32RsrvdRest[i]);10237 CHECK_FIELD(XState.x87.FCW); 10238 CHECK_FIELD(XState.x87.FSW); 10239 CHECK_FIELD(XState.x87.FTW); 10240 CHECK_FIELD(XState.x87.FOP); 10241 CHECK_FIELD(XState.x87.FPUIP); 10242 CHECK_FIELD(XState.x87.CS); 10243 CHECK_FIELD(XState.x87.Rsrvd1); 10244 CHECK_FIELD(XState.x87.FPUDP); 10245 CHECK_FIELD(XState.x87.DS); 10246 CHECK_FIELD(XState.x87.Rsrvd2); 10247 CHECK_FIELD(XState.x87.MXCSR); 10248 CHECK_FIELD(XState.x87.MXCSR_MASK); 10249 CHECK_FIELD(XState.x87.aRegs[0].au64[0]); CHECK_FIELD(XState.x87.aRegs[0].au64[1]); 10250 CHECK_FIELD(XState.x87.aRegs[1].au64[0]); CHECK_FIELD(XState.x87.aRegs[1].au64[1]); 10251 CHECK_FIELD(XState.x87.aRegs[2].au64[0]); CHECK_FIELD(XState.x87.aRegs[2].au64[1]); 10252 CHECK_FIELD(XState.x87.aRegs[3].au64[0]); CHECK_FIELD(XState.x87.aRegs[3].au64[1]); 10253 CHECK_FIELD(XState.x87.aRegs[4].au64[0]); CHECK_FIELD(XState.x87.aRegs[4].au64[1]); 10254 CHECK_FIELD(XState.x87.aRegs[5].au64[0]); CHECK_FIELD(XState.x87.aRegs[5].au64[1]); 10255 CHECK_FIELD(XState.x87.aRegs[6].au64[0]); CHECK_FIELD(XState.x87.aRegs[6].au64[1]); 10256 CHECK_FIELD(XState.x87.aRegs[7].au64[0]); CHECK_FIELD(XState.x87.aRegs[7].au64[1]); 10257 CHECK_FIELD(XState.x87.aXMM[ 0].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 0].au64[1]); 10258 CHECK_FIELD(XState.x87.aXMM[ 1].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 1].au64[1]); 10259 CHECK_FIELD(XState.x87.aXMM[ 2].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 2].au64[1]); 10260 CHECK_FIELD(XState.x87.aXMM[ 3].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 3].au64[1]); 10261 CHECK_FIELD(XState.x87.aXMM[ 4].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 4].au64[1]); 10262 CHECK_FIELD(XState.x87.aXMM[ 5].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 5].au64[1]); 10263 CHECK_FIELD(XState.x87.aXMM[ 6].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 6].au64[1]); 10264 CHECK_FIELD(XState.x87.aXMM[ 7].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 7].au64[1]); 10265 CHECK_FIELD(XState.x87.aXMM[ 8].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 8].au64[1]); 10266 CHECK_FIELD(XState.x87.aXMM[ 9].au64[0]); CHECK_FIELD(XState.x87.aXMM[ 9].au64[1]); 10267 CHECK_FIELD(XState.x87.aXMM[10].au64[0]); CHECK_FIELD(XState.x87.aXMM[10].au64[1]); 10268 CHECK_FIELD(XState.x87.aXMM[11].au64[0]); CHECK_FIELD(XState.x87.aXMM[11].au64[1]); 10269 CHECK_FIELD(XState.x87.aXMM[12].au64[0]); CHECK_FIELD(XState.x87.aXMM[12].au64[1]); 10270 CHECK_FIELD(XState.x87.aXMM[13].au64[0]); CHECK_FIELD(XState.x87.aXMM[13].au64[1]); 10271 CHECK_FIELD(XState.x87.aXMM[14].au64[0]); CHECK_FIELD(XState.x87.aXMM[14].au64[1]); 10272 CHECK_FIELD(XState.x87.aXMM[15].au64[0]); CHECK_FIELD(XState.x87.aXMM[15].au64[1]); 10273 for (unsigned i = 0; i < RT_ELEMENTS(pOrgCtx->XState.x87.au32RsrvdRest); i++) 10274 CHECK_FIELD(XState.x87.au32RsrvdRest[i]); 10275 10275 } 10276 10276 CHECK_FIELD(rip); … … 10529 10529 pCtx->cs.Sel, pCtx->ss.Sel, pCtx->ds.Sel, pCtx->es.Sel, 10530 10530 pCtx->fs.Sel, pCtx->gs.Sel, pCtx->eflags.u, 10531 pCtx-> fpu.FSW, pCtx->fpu.FCW, pCtx->fpu.FTW, pCtx->fpu.MXCSR, pCtx->fpu.MXCSR_MASK,10531 pCtx->XState.x87.FSW, pCtx->XState.x87.FCW, pCtx->XState.x87.FTW, pCtx->XState.x87.MXCSR, pCtx->XState.x87.MXCSR_MASK, 10532 10532 szInstr)); 10533 10533
Note:
See TracChangeset
for help on using the changeset viewer.