Changeset 102437 in vbox
- Timestamp:
- Dec 3, 2023 11:27:41 AM (12 months ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h
r102433 r102437 92 92 * after the memory. \ 93 93 */ \ 94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 95 95 { \ 96 96 IEM_MC_BEGIN(3, 3, 0, 0); \ … … 152 152 * after the memory. \ 153 153 */ \ 154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 155 155 { \ 156 156 IEM_MC_BEGIN(3, 3, 0, 0); \ … … 330 330 * after the memory. \ 331 331 */ \ 332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 333 333 { \ 334 334 switch (pVCpu->iem.s.enmEffOpSize) \ … … 548 548 * after the memory. \ 549 549 */ \ 550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 551 551 { \ 552 552 switch (pVCpu->iem.s.enmEffOpSize) \ … … 3974 3974 { \ 3975 3975 /* memory target */ \ 3976 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \3976 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 3977 3977 { \ 3978 3978 IEM_MC_BEGIN(3, 3, 0, 0); \ … … 4045 4045 { \ 4046 4046 /* memory target */ \ 4047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \4047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 4048 4048 { \ 4049 4049 IEM_MC_BEGIN(3, 3, 0, 0); \ … … 4267 4267 { \ 4268 4268 /* memory target */ \ 4269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \4269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 4270 4270 { \ 4271 4271 switch (pVCpu->iem.s.enmEffOpSize) \ … … 4505 4505 { \ 4506 4506 /* memory target */ \ 4507 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \4507 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 4508 4508 { \ 4509 4509 switch (pVCpu->iem.s.enmEffOpSize) \ … … 4791 4791 * Memory target. \ 4792 4792 */ \ 4793 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \4793 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 4794 4794 { \ 4795 4795 switch (pVCpu->iem.s.enmEffOpSize) \ … … 5012 5012 * Memory target. \ 5013 5013 */ \ 5014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \5014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 5015 5015 { \ 5016 5016 switch (pVCpu->iem.s.enmEffOpSize) \ … … 5269 5269 * We're accessing memory. 5270 5270 */ 5271 IEM_MC_BEGIN(2, 4, 0, 0); 5272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5273 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 5274 IEM_MC_LOCAL(uint8_t, uTmpReg); 5275 IEM_MC_ARG(uint8_t *, pu8Mem, 0); 5276 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); 5277 5278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5280 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 5281 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); 5271 #define IEMOP_XCHG_BYTE(a_fnWorker) \ 5272 IEM_MC_BEGIN(2, 4, 0, 0); \ 5273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 5274 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 5275 IEM_MC_LOCAL(uint8_t, uTmpReg); \ 5276 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \ 5277 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \ 5278 \ 5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 5281 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 5282 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 5283 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \ 5284 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 5285 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \ 5286 \ 5287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 5288 IEM_MC_END() 5289 5282 5290 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 5283 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg); 5291 { 5292 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked); 5293 } 5284 5294 else 5285 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg); 5286 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 5287 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); 5288 5289 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5290 IEM_MC_END(); 5295 { 5296 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked); 5297 } 5291 5298 } 5292 5299 } … … 5361 5368 * We're accessing memory. 5362 5369 */ 5363 switch (pVCpu->iem.s.enmEffOpSize) 5370 #define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64) \ 5371 do { \ 5372 switch (pVCpu->iem.s.enmEffOpSize) \ 5373 { \ 5374 case IEMMODE_16BIT: \ 5375 IEM_MC_BEGIN(2, 4, 0, 0); \ 5376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 5377 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 5378 IEM_MC_LOCAL(uint16_t, uTmpReg); \ 5379 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \ 5380 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \ 5381 \ 5382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 5383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 5384 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 5385 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 5386 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \ 5387 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 5388 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \ 5389 \ 5390 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 5391 IEM_MC_END(); \ 5392 break; \ 5393 \ 5394 case IEMMODE_32BIT: \ 5395 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \ 5396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 5397 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 5398 IEM_MC_LOCAL(uint32_t, uTmpReg); \ 5399 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \ 5400 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \ 5401 \ 5402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 5403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 5404 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 5405 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 5406 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \ 5407 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 5408 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \ 5409 \ 5410 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 5411 IEM_MC_END(); \ 5412 break; \ 5413 \ 5414 case IEMMODE_64BIT: \ 5415 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \ 5416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 5417 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 5418 IEM_MC_LOCAL(uint64_t, uTmpReg); \ 5419 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \ 5420 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \ 5421 \ 5422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 5424 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 5425 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 5426 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \ 5427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 5428 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \ 5429 \ 5430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 5431 IEM_MC_END(); \ 5432 break; \ 5433 \ 5434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 5435 } \ 5436 } while (0) 5437 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 5364 5438 { 5365 case IEMMODE_16BIT: 5366 IEM_MC_BEGIN(2, 4, 0, 0); 5367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5368 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 5369 IEM_MC_LOCAL(uint16_t, uTmpReg); 5370 IEM_MC_ARG(uint16_t *, pu16Mem, 0); 5371 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); 5372 5373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5375 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 5376 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); 5377 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 5378 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg); 5379 else 5380 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg); 5381 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 5382 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); 5383 5384 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5385 IEM_MC_END(); 5386 break; 5387 5388 case IEMMODE_32BIT: 5389 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); 5390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5391 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 5392 IEM_MC_LOCAL(uint32_t, uTmpReg); 5393 IEM_MC_ARG(uint32_t *, pu32Mem, 0); 5394 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); 5395 5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5398 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 5399 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); 5400 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 5401 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg); 5402 else 5403 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg); 5404 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 5405 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); 5406 5407 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5408 IEM_MC_END(); 5409 break; 5410 5411 case IEMMODE_64BIT: 5412 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); 5413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5414 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 5415 IEM_MC_LOCAL(uint64_t, uTmpReg); 5416 IEM_MC_ARG(uint64_t *, pu64Mem, 0); 5417 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); 5418 5419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5421 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 5422 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); 5423 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 5424 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg); 5425 else 5426 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg); 5427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 5428 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); 5429 5430 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5431 IEM_MC_END(); 5432 break; 5433 5434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5439 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked); 5440 } 5441 else 5442 { 5443 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked); 5435 5444 } 5436 5445 } … … 12893 12902 { 12894 12903 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock"); 12895 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 12896 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK; 12904 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK; 12897 12905 12898 12906 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b); … … 13001 13009 { \ 13002 13010 /* memory access. */ \ 13003 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \13011 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 13004 13012 { \ 13005 13013 IEM_MC_BEGIN(2, 2, 0, 0); \ … … 13099 13107 * Memory target. \ 13100 13108 */ \ 13101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \13109 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 13102 13110 { \ 13103 13111 switch (pVCpu->iem.s.enmEffOpSize) \ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h
r102433 r102437 9026 9026 /* memory destination. */ \ 9027 9027 /** @todo test negative bit offsets! */ \ 9028 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \9028 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 9029 9029 { \ 9030 9030 switch (pVCpu->iem.s.enmEffOpSize) \ … … 9282 9282 /* memory destination. */ \ 9283 9283 /** @todo test negative bit offsets! */ \ 9284 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \9284 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 9285 9285 { \ 9286 9286 switch (pVCpu->iem.s.enmEffOpSize) \ … … 10238 10238 { 10239 10239 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0); 10240 IEMOP_HLP_DONE_DECODING ();10240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 10241 10241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 10242 10242 IEM_MC_ARG(uint8_t *, pu8Al, 1); … … 10248 10248 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX); 10249 10249 IEM_MC_REF_EFLAGS(pEFlags); 10250 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10251 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags); 10250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags); 10251 10252 IEM_MC_ADVANCE_RIP_AND_FINISH(); 10253 IEM_MC_END(); 10254 } 10255 else 10256 { 10257 #define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker) \ 10258 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \ 10259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 10260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 10261 IEMOP_HLP_DONE_DECODING(); \ 10262 \ 10263 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 10264 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 10265 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 10266 \ 10267 IEM_MC_ARG(uint8_t, u8Src, 2); \ 10268 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 10269 \ 10270 IEM_MC_LOCAL(uint8_t, u8Al); \ 10271 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \ 10272 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \ 10273 \ 10274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \ 10275 IEM_MC_FETCH_EFLAGS(EFlags); \ 10276 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \ 10277 \ 10278 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 10279 IEM_MC_COMMIT_EFLAGS(EFlags); \ 10280 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \ 10281 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 10282 IEM_MC_END() 10283 10284 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 10285 { 10286 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8); 10287 } 10252 10288 else 10253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags); 10254 10255 IEM_MC_ADVANCE_RIP_AND_FINISH(); 10256 IEM_MC_END(); 10257 } 10258 else 10259 { 10260 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); 10261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 10262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 10263 IEMOP_HLP_DONE_DECODING(); 10264 10265 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 10266 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 10267 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 10268 10269 IEM_MC_ARG(uint8_t, u8Src, 2); 10270 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 10271 10272 IEM_MC_LOCAL(uint8_t, u8Al); 10273 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); 10274 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); 10275 10276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 10277 IEM_MC_FETCH_EFLAGS(EFlags); 10278 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10279 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags); 10280 else 10281 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags); 10282 10283 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 10284 IEM_MC_COMMIT_EFLAGS(EFlags); 10285 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); 10286 IEM_MC_ADVANCE_RIP_AND_FINISH(); 10287 IEM_MC_END(); 10289 { 10290 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked); 10291 } 10288 10292 } 10289 10293 } … … 10302 10306 case IEMMODE_16BIT: 10303 10307 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0); 10304 IEMOP_HLP_DONE_DECODING ();10308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 10305 10309 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 10306 10310 IEM_MC_ARG(uint16_t *, pu16Ax, 1); … … 10312 10316 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX); 10313 10317 IEM_MC_REF_EFLAGS(pEFlags); 10314 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10315 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags); 10316 else 10317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags); 10318 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags); 10318 10319 10319 10320 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 10323 10324 case IEMMODE_32BIT: 10324 10325 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0); 10325 IEMOP_HLP_DONE_DECODING ();10326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 10326 10327 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 10327 10328 IEM_MC_ARG(uint32_t *, pu32Eax, 1); … … 10333 10334 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX); 10334 10335 IEM_MC_REF_EFLAGS(pEFlags); 10335 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10336 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags); 10337 else 10338 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags); 10336 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags); 10339 10337 10340 10338 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { … … 10350 10348 case IEMMODE_64BIT: 10351 10349 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0); 10352 IEMOP_HLP_DONE_DECODING ();10350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 10353 10351 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 10354 10352 IEM_MC_ARG(uint64_t *, pu64Rax, 1); 10355 #ifdef RT_ARCH_X8610356 IEM_MC_ARG(uint64_t *, pu64Src, 2);10357 #else10358 10353 IEM_MC_ARG(uint64_t, u64Src, 2); 10359 #endif10360 10354 IEM_MC_ARG(uint32_t *, pEFlags, 3); 10361 10355 10362 #ifndef RT_ARCH_X8610363 10356 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 10364 #endif10365 10357 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 10366 10358 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX); 10367 10359 IEM_MC_REF_EFLAGS(pEFlags); 10368 #ifdef RT_ARCH_X86 10369 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 10370 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10371 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags); 10372 else 10373 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags); 10374 #else 10375 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10376 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags); 10377 else 10378 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags); 10379 #endif 10360 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags); 10380 10361 10381 10362 IEM_MC_ADVANCE_RIP_AND_FINISH(); 10382 10363 IEM_MC_END(); 10383 10364 break; 10384 break;10385 10365 10386 10366 IEM_NOT_REACHED_DEFAULT_CASE_RET(); … … 10389 10369 else 10390 10370 { 10391 switch (pVCpu->iem.s.enmEffOpSize) 10371 #define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64) \ 10372 do { \ 10373 switch (pVCpu->iem.s.enmEffOpSize) \ 10374 { \ 10375 case IEMMODE_16BIT: \ 10376 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \ 10377 \ 10378 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 10379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 10380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 10381 IEMOP_HLP_DONE_DECODING(); \ 10382 \ 10383 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 10384 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 10385 \ 10386 IEM_MC_ARG(uint16_t, u16Src, 2); \ 10387 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 10388 \ 10389 IEM_MC_LOCAL(uint16_t, u16Ax); \ 10390 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \ 10391 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \ 10392 \ 10393 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \ 10394 IEM_MC_FETCH_EFLAGS(EFlags); \ 10395 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \ 10396 \ 10397 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 10398 IEM_MC_COMMIT_EFLAGS(EFlags); \ 10399 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \ 10400 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 10401 IEM_MC_END(); \ 10402 break; \ 10403 \ 10404 case IEMMODE_32BIT: \ 10405 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \ 10406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 10407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 10408 IEMOP_HLP_DONE_DECODING(); \ 10409 \ 10410 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 10411 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 10412 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 10413 \ 10414 IEM_MC_ARG(uint32_t, u32Src, 2); \ 10415 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 10416 \ 10417 IEM_MC_LOCAL(uint32_t, u32Eax); \ 10418 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \ 10419 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \ 10420 \ 10421 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \ 10422 IEM_MC_FETCH_EFLAGS(EFlags); \ 10423 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \ 10424 \ 10425 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 10426 IEM_MC_COMMIT_EFLAGS(EFlags); \ 10427 \ 10428 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \ 10429 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \ 10430 } IEM_MC_ENDIF(); \ 10431 \ 10432 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 10433 IEM_MC_END(); \ 10434 break; \ 10435 \ 10436 case IEMMODE_64BIT: \ 10437 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0); \ 10438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 10439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 10440 IEMOP_HLP_DONE_DECODING(); \ 10441 \ 10442 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 10443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 10444 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 10445 \ 10446 IEM_MC_ARG(uint64_t, u64Src, 2); \ 10447 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 10448 \ 10449 IEM_MC_LOCAL(uint64_t, u64Rax); \ 10450 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \ 10451 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \ 10452 \ 10453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \ 10454 IEM_MC_FETCH_EFLAGS(EFlags); \ 10455 \ 10456 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \ 10457 \ 10458 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 10459 IEM_MC_COMMIT_EFLAGS(EFlags); \ 10460 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \ 10461 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 10462 IEM_MC_END(); \ 10463 break; \ 10464 \ 10465 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 10466 } \ 10467 } while (0) 10468 10469 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 10392 10470 { 10393 case IEMMODE_16BIT: 10394 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); 10395 10396 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 10397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 10398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 10399 IEMOP_HLP_DONE_DECODING(); 10400 10401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 10402 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 10403 10404 IEM_MC_ARG(uint16_t, u16Src, 2); 10405 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 10406 10407 IEM_MC_LOCAL(uint16_t, u16Ax); 10408 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); 10409 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); 10410 10411 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 10412 IEM_MC_FETCH_EFLAGS(EFlags); 10413 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10414 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags); 10415 else 10416 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags); 10417 10418 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 10419 IEM_MC_COMMIT_EFLAGS(EFlags); 10420 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); 10421 IEM_MC_ADVANCE_RIP_AND_FINISH(); 10422 IEM_MC_END(); 10423 break; 10424 10425 case IEMMODE_32BIT: 10426 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); 10427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 10428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 10429 IEMOP_HLP_DONE_DECODING(); 10430 10431 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 10432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 10433 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 10434 10435 IEM_MC_ARG(uint32_t, u32Src, 2); 10436 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 10437 10438 IEM_MC_LOCAL(uint32_t, u32Eax); 10439 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); 10440 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); 10441 10442 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 10443 IEM_MC_FETCH_EFLAGS(EFlags); 10444 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags); 10446 else 10447 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags); 10448 10449 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 10450 IEM_MC_COMMIT_EFLAGS(EFlags); 10451 10452 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { 10453 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); 10454 } IEM_MC_ENDIF(); 10455 10456 IEM_MC_ADVANCE_RIP_AND_FINISH(); 10457 IEM_MC_END(); 10458 break; 10459 10460 case IEMMODE_64BIT: 10461 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0); 10462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 10463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 10464 IEMOP_HLP_DONE_DECODING(); 10465 10466 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 10467 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 10468 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 10469 10470 #ifndef RT_ARCH_X86 10471 IEM_MC_ARG(uint64_t, u64Src, 2); 10472 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 10473 #endif 10474 10475 IEM_MC_LOCAL(uint64_t, u64Rax); 10476 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); 10477 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); 10478 10479 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 10480 IEM_MC_FETCH_EFLAGS(EFlags); 10481 10482 #ifdef RT_ARCH_X86 10483 IEM_MC_ARG(uint64_t *, pu64Src, 2); 10484 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); 10485 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10486 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags); 10487 else 10488 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags); 10489 #else 10490 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 10491 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags); 10492 else 10493 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags); 10494 #endif 10495 10496 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 10497 IEM_MC_COMMIT_EFLAGS(EFlags); 10498 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); 10499 IEM_MC_ADVANCE_RIP_AND_FINISH(); 10500 IEM_MC_END(); 10501 break; 10502 10503 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 10471 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64); 10472 } 10473 else 10474 { 10475 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked); 10504 10476 } 10505 10477 } … … 10830 10802 /* memory destination. */ \ 10831 10803 /** @todo test negative bit offsets! */ \ 10832 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \10804 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 10833 10805 { \ 10834 10806 switch (pVCpu->iem.s.enmEffOpSize) \ … … 11051 11023 /* memory destination. */ \ 11052 11024 /** @todo test negative bit offsets! */ \ 11053 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) ) \11025 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \ 11054 11026 { \ 11055 11027 switch (pVCpu->iem.s.enmEffOpSize) \ … … 11618 11590 * We're accessing memory. 11619 11591 */ 11620 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); 11621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 11622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 11623 IEMOP_HLP_DONE_DECODING(); 11624 11625 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 11626 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 11627 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 11628 11629 IEM_MC_LOCAL(uint8_t, u8RegCopy); 11630 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); 11631 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); 11632 11633 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 11634 IEM_MC_FETCH_EFLAGS(EFlags); 11635 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 11636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags); 11592 #define IEMOP_BODY_XADD_BYTE(a_fnWorker) \ 11593 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \ 11594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 11595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 11596 IEMOP_HLP_DONE_DECODING(); \ 11597 \ 11598 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 11599 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 11600 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 11601 \ 11602 IEM_MC_LOCAL(uint8_t, u8RegCopy); \ 11603 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 11604 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \ 11605 \ 11606 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 11607 IEM_MC_FETCH_EFLAGS(EFlags); \ 11608 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \ 11609 \ 11610 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 11611 IEM_MC_COMMIT_EFLAGS(EFlags); \ 11612 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \ 11613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 11614 IEM_MC_END() 11615 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 11616 { 11617 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8); 11618 } 11637 11619 else 11638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags); 11639 11640 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 11641 IEM_MC_COMMIT_EFLAGS(EFlags); 11642 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); 11643 IEM_MC_ADVANCE_RIP_AND_FINISH(); 11644 IEM_MC_END(); 11620 { 11621 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked); 11622 } 11645 11623 } 11646 11624 } … … 11719 11697 * We're accessing memory. 11720 11698 */ 11721 switch (pVCpu->iem.s.enmEffOpSize) 11699 #define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64) \ 11700 do { \ 11701 switch (pVCpu->iem.s.enmEffOpSize) \ 11702 { \ 11703 case IEMMODE_16BIT: \ 11704 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \ 11705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 11706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 11707 IEMOP_HLP_DONE_DECODING(); \ 11708 \ 11709 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 11710 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 11711 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 11712 \ 11713 IEM_MC_LOCAL(uint16_t, u16RegCopy); \ 11714 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 11715 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \ 11716 \ 11717 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 11718 IEM_MC_FETCH_EFLAGS(EFlags); \ 11719 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \ 11720 \ 11721 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 11722 IEM_MC_COMMIT_EFLAGS(EFlags); \ 11723 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \ 11724 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 11725 IEM_MC_END(); \ 11726 break; \ 11727 \ 11728 case IEMMODE_32BIT: \ 11729 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \ 11730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 11731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 11732 IEMOP_HLP_DONE_DECODING(); \ 11733 \ 11734 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 11735 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 11736 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 11737 \ 11738 IEM_MC_LOCAL(uint32_t, u32RegCopy); \ 11739 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 11740 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \ 11741 \ 11742 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 11743 IEM_MC_FETCH_EFLAGS(EFlags); \ 11744 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \ 11745 \ 11746 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 11747 IEM_MC_COMMIT_EFLAGS(EFlags); \ 11748 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \ 11749 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 11750 IEM_MC_END(); \ 11751 break; \ 11752 \ 11753 case IEMMODE_64BIT: \ 11754 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \ 11755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 11756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 11757 IEMOP_HLP_DONE_DECODING(); \ 11758 \ 11759 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 11760 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 11761 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 11762 \ 11763 IEM_MC_LOCAL(uint64_t, u64RegCopy); \ 11764 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 11765 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \ 11766 \ 11767 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 11768 IEM_MC_FETCH_EFLAGS(EFlags); \ 11769 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \ 11770 \ 11771 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 11772 IEM_MC_COMMIT_EFLAGS(EFlags); \ 11773 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \ 11774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 11775 IEM_MC_END(); \ 11776 break; \ 11777 \ 11778 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 11779 } \ 11780 } while (0) 11781 11782 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 11722 11783 { 11723 case IEMMODE_16BIT: 11724 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); 11725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 11726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 11727 IEMOP_HLP_DONE_DECODING(); 11728 11729 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 11730 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 11731 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 11732 11733 IEM_MC_LOCAL(uint16_t, u16RegCopy); 11734 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); 11735 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); 11736 11737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 11738 IEM_MC_FETCH_EFLAGS(EFlags); 11739 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 11740 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags); 11741 else 11742 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags); 11743 11744 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 11745 IEM_MC_COMMIT_EFLAGS(EFlags); 11746 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); 11747 IEM_MC_ADVANCE_RIP_AND_FINISH(); 11748 IEM_MC_END(); 11749 break; 11750 11751 case IEMMODE_32BIT: 11752 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); 11753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 11754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 11755 IEMOP_HLP_DONE_DECODING(); 11756 11757 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 11758 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 11759 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 11760 11761 IEM_MC_LOCAL(uint32_t, u32RegCopy); 11762 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); 11763 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); 11764 11765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 11766 IEM_MC_FETCH_EFLAGS(EFlags); 11767 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 11768 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags); 11769 else 11770 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags); 11771 11772 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 11773 IEM_MC_COMMIT_EFLAGS(EFlags); 11774 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); 11775 IEM_MC_ADVANCE_RIP_AND_FINISH(); 11776 IEM_MC_END(); 11777 break; 11778 11779 case IEMMODE_64BIT: 11780 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); 11781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 11782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 11783 IEMOP_HLP_DONE_DECODING(); 11784 11785 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 11786 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 11787 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 11788 11789 IEM_MC_LOCAL(uint64_t, u64RegCopy); 11790 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); 11791 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); 11792 11793 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 11794 IEM_MC_FETCH_EFLAGS(EFlags); 11795 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 11796 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags); 11797 else 11798 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags); 11799 11800 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 11801 IEM_MC_COMMIT_EFLAGS(EFlags); 11802 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); 11803 IEM_MC_ADVANCE_RIP_AND_FINISH(); 11804 IEM_MC_END(); 11805 break; 11806 11807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 11784 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64); 11785 } 11786 else 11787 { 11788 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked); 11808 11789 } 11809 11790 } … … 12433 12414 { 12434 12415 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq"); 12435 12436 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0); 12437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 12438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 12439 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); 12440 12441 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 12442 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); 12443 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 12444 12445 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); 12446 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); 12447 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); 12448 12449 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); 12450 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); 12451 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); 12452 12453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 12454 IEM_MC_FETCH_EFLAGS(EFlags); 12455 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK) 12456 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 12457 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); 12458 else 12459 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); 12460 12461 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 12462 IEM_MC_COMMIT_EFLAGS(EFlags); 12463 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { 12464 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); 12465 } IEM_MC_ENDIF(); 12466 IEM_MC_ADVANCE_RIP_AND_FINISH(); 12467 12468 IEM_MC_END(); 12416 #define IEMOP_BODY_CMPXCHG8B(a_fnWorker) \ 12417 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0); \ 12418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 12419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 12420 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \ 12421 \ 12422 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 12423 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \ 12424 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 12425 \ 12426 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \ 12427 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \ 12428 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \ 12429 \ 12430 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \ 12431 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \ 12432 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \ 12433 \ 12434 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \ 12435 IEM_MC_FETCH_EFLAGS(EFlags); \ 12436 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \ 12437 \ 12438 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 12439 IEM_MC_COMMIT_EFLAGS(EFlags); \ 12440 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \ 12441 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \ 12442 } IEM_MC_ENDIF(); \ 12443 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 12444 \ 12445 IEM_MC_END() 12446 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 12447 { 12448 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b); 12449 } 12450 else 12451 { 12452 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked); 12453 } 12469 12454 } 12470 12455 … … 12515 12500 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b) 12516 12501 { 12517 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK) 12518 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 12502 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 12519 12503 { 12520 12504 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo)); 12521 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b _locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);12505 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags); 12522 12506 BODY_CMPXCHG16B_TAIL; 12523 12507 } … … 12525 12509 { 12526 12510 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo)); 12527 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b , pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);12511 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags); 12528 12512 BODY_CMPXCHG16B_TAIL; 12529 12513 } … … 12551 12535 #elif defined(RT_ARCH_ARM64) 12552 12536 /** @todo may require fallback for unaligned accesses... */ 12553 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) )12537 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) 12554 12538 { 12555 12539 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo));
Note:
See TracChangeset
for help on using the changeset viewer.