Changeset 103678 in vbox
- Timestamp:
- Mar 5, 2024 9:56:20 AM (13 months ago)
- svn:sync-xref-src-repo-rev:
- 162040
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstCommonBodyMacros.h
r103642 r103678 32 32 */ 33 33 34 35 /** 36 * Special case body for word/dword/qword instruction like SUB and XOR that can 37 * be used to zero a register. 38 * 39 * This can be used both for the rv_rm and rm_rv forms since it's working on the 40 * same register. 41 */ 42 #define IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(a_bRm) \ 43 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((a_bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \ 44 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \ 45 { \ 46 switch (pVCpu->iem.s.enmEffOpSize) \ 47 { \ 48 case IEMMODE_16BIT: \ 49 IEM_MC_BEGIN(1, 0, 0, 0); \ 50 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 51 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \ 52 IEM_MC_LOCAL(uint32_t, fEFlags); \ 53 IEM_MC_FETCH_EFLAGS(fEFlags); \ 54 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \ 55 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \ 56 IEM_MC_COMMIT_EFLAGS(fEFlags); \ 57 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 58 IEM_MC_END(); \ 59 break; \ 60 \ 61 case IEMMODE_32BIT: \ 62 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0); \ 63 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 64 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \ 65 IEM_MC_LOCAL(uint32_t, fEFlags); \ 66 IEM_MC_FETCH_EFLAGS(fEFlags); \ 67 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \ 68 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \ 69 IEM_MC_COMMIT_EFLAGS(fEFlags); \ 70 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 71 IEM_MC_END(); \ 72 break; \ 73 \ 74 case IEMMODE_64BIT: \ 75 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0); \ 76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 77 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, a_bRm), 0); \ 78 IEM_MC_LOCAL(uint32_t, fEFlags); \ 79 IEM_MC_FETCH_EFLAGS(fEFlags); \ 80 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \ 81 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \ 82 IEM_MC_COMMIT_EFLAGS(fEFlags); \ 83 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 84 IEM_MC_END(); \ 85 break; \ 86 \ 87 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 88 } \ 89 } ((void)0) 34 90 35 91 /** -
trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h
r103677 r103678 58 58 59 59 /** 60 * Special case body for bytes instruction like SUB and XOR that can be used 61 * to zero a register. 62 * 63 * This can be used both for the r8_rm and rm_r8 forms since it's working on the 64 * same register. 65 */ 66 #define IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(a_bRm) \ 67 if ( (a_bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) \ 68 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) \ 69 { \ 70 IEM_MC_BEGIN(0, 1, 0, 0); \ 71 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 72 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_REG(pVCpu, a_bRm), 0); \ 73 IEM_MC_LOCAL(uint32_t, fEFlags); \ 74 IEM_MC_FETCH_EFLAGS(fEFlags); \ 75 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); \ 76 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); \ 77 IEM_MC_COMMIT_EFLAGS(fEFlags); \ 78 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 79 IEM_MC_END(); \ 80 } ((void)0) 81 82 /** 60 83 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte 61 84 * memory/register as the destination. 62 85 */ 63 #define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \ 64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 65 \ 86 #define IEMOP_BODY_BINARY_rm_r8_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \ 66 87 /* \ 67 88 * If rm is denoting a register, no more instruction bytes. \ 68 89 */ \ 69 if (IEM_IS_MODRM_REG_MODE( bRm)) \90 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \ 70 91 { \ 71 92 IEM_MC_BEGIN(3, 0, 0, 0); \ 72 93 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 73 94 IEM_MC_ARG(uint8_t, u8Src, 1); \ 74 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \95 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 75 96 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \ 76 97 IEM_MC_LOCAL(uint8_t, u8Dst); \ 77 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \98 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 78 99 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 79 100 IEM_MC_LOCAL(uint32_t, uEFlags); \ 80 101 IEM_MC_FETCH_EFLAGS(uEFlags); \ 81 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_ EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \82 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \102 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \ 103 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, a_bRm), u8Dst); \ 83 104 IEM_MC_COMMIT_EFLAGS(uEFlags); \ 84 105 } IEM_MC_NATIVE_ELSE() { \ 85 106 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 86 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \107 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 87 108 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 88 109 IEM_MC_REF_EFLAGS(pEFlags); \ 89 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU8, pu8Dst, u8Src, pEFlags); \110 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \ 90 111 } IEM_MC_NATIVE_ENDIF(); \ 91 112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 108 129 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 109 130 \ 110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 111 132 IEMOP_HLP_DONE_DECODING(); \ 112 133 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 113 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \134 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 114 135 IEM_MC_FETCH_EFLAGS(EFlags); \ 115 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU8, pu8Dst, u8Src, pEFlags); \136 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \ 116 137 \ 117 138 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ … … 129 150 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \ 130 151 \ 131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 132 153 IEMOP_HLP_DONE_DECODING(); \ 133 154 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 134 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 135 156 IEM_MC_FETCH_EFLAGS(EFlags); \ 136 IEM_MC_CALL_VOID_AIMPL_3( a_fnLockedU8, pu8Dst, u8Src, pEFlags); \157 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8_locked), pu8Dst, u8Src, pEFlags); \ 137 158 \ 138 159 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \ … … 227 248 * destination. 228 249 */ 229 #define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \ 230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 231 \ 250 #define IEMOP_BODY_BINARY_r8_rm(a_bRm, a_InsNm, a_fNativeArchs) \ 232 251 /* \ 233 252 * If rm is denoting a register, no more instruction bytes. \ 234 253 */ \ 235 if (IEM_IS_MODRM_REG_MODE( bRm)) \254 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \ 236 255 { \ 237 256 IEM_MC_BEGIN(3, 0, 0, 0); \ 238 257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 239 258 IEM_MC_ARG(uint8_t, u8Src, 1); \ 240 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \259 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 241 260 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ 242 261 IEM_MC_LOCAL(uint8_t, u8Dst); \ 243 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \262 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 244 263 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 245 264 IEM_MC_LOCAL(uint32_t, uEFlags); \ 246 265 IEM_MC_FETCH_EFLAGS(uEFlags); \ 247 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_ EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \248 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst); \266 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \ 267 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \ 249 268 IEM_MC_COMMIT_EFLAGS(uEFlags); \ 250 269 } IEM_MC_NATIVE_ELSE() { \ 251 270 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 252 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \271 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 253 272 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 254 273 IEM_MC_REF_EFLAGS(pEFlags); \ 255 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU8, pu8Dst, u8Src, pEFlags); \274 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \ 256 275 } IEM_MC_NATIVE_ENDIF(); \ 257 276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 265 284 IEM_MC_BEGIN(3, 1, 0, 0); \ 266 285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 268 287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 269 288 IEM_MC_ARG(uint8_t, u8Src, 1); \ … … 271 290 IEM_MC_NATIVE_IF(a_fNativeArchs) { \ 272 291 IEM_MC_LOCAL(uint8_t, u8Dst); \ 273 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \292 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 274 293 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 275 294 IEM_MC_LOCAL(uint32_t, uEFlags); \ 276 295 IEM_MC_FETCH_EFLAGS(uEFlags); \ 277 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_ EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \278 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst); \296 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \ 297 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, a_bRm), u8Dst); \ 279 298 IEM_MC_COMMIT_EFLAGS(uEFlags); \ 280 299 } IEM_MC_NATIVE_ELSE() { \ 281 300 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 282 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \301 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 283 302 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 284 303 IEM_MC_REF_EFLAGS(pEFlags); \ 285 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU8, pu8Dst, u8Src, pEFlags); \304 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u8), pu8Dst, u8Src, pEFlags); \ 286 305 } IEM_MC_NATIVE_ENDIF(); \ 287 306 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 295 314 * memory/register as the destination. 296 315 */ 297 #define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \ 298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 299 \ 316 #define IEMOP_BODY_BINARY_rm_rv_RW(a_bRm, a_InsNm, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \ 300 317 /* \ 301 318 * If rm is denoting a register, no more instruction bytes. \ 302 319 */ \ 303 if (IEM_IS_MODRM_REG_MODE( bRm)) \320 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \ 304 321 { \ 305 322 switch (pVCpu->iem.s.enmEffOpSize) \ … … 309 326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 310 327 IEM_MC_ARG(uint16_t, u16Src, 1); \ 311 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \328 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 312 329 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \ 313 330 IEM_MC_LOCAL(uint16_t, u16Dst); \ 314 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \331 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 315 332 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 316 333 IEM_MC_LOCAL(uint32_t, uEFlags); \ 317 334 IEM_MC_FETCH_EFLAGS(uEFlags); \ 318 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_ EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \319 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \335 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \ 336 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, a_bRm), u16Dst); \ 320 337 IEM_MC_COMMIT_EFLAGS(uEFlags); \ 321 338 } IEM_MC_NATIVE_ELSE() { \ 322 339 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 323 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \340 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 324 341 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 325 342 IEM_MC_REF_EFLAGS(pEFlags); \ 326 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU16, pu16Dst, u16Src, pEFlags); \343 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \ 327 344 } IEM_MC_NATIVE_ENDIF(); \ 328 345 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 334 351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 335 352 IEM_MC_ARG(uint32_t, u32Src, 1); \ 336 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \353 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 337 354 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \ 338 355 IEM_MC_LOCAL(uint32_t, u32Dst); \ 339 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \356 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 340 357 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 341 358 IEM_MC_LOCAL(uint32_t, uEFlags); \ 342 359 IEM_MC_FETCH_EFLAGS(uEFlags); \ 343 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_ EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \344 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \360 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \ 361 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, a_bRm), u32Dst); \ 345 362 IEM_MC_COMMIT_EFLAGS(uEFlags); \ 346 363 } IEM_MC_NATIVE_ELSE() { \ 347 364 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 348 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \365 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 349 366 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 350 367 IEM_MC_REF_EFLAGS(pEFlags); \ 351 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU32, pu32Dst, u32Src, pEFlags); \352 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \368 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \ 369 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 353 370 } IEM_MC_NATIVE_ENDIF(); \ 354 371 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 360 377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 361 378 IEM_MC_ARG(uint64_t, u64Src, 1); \ 362 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 363 380 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \ 364 381 IEM_MC_LOCAL(uint64_t, u64Dst); \ 365 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \382 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 366 383 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \ 367 384 IEM_MC_LOCAL(uint32_t, uEFlags); \ 368 385 IEM_MC_FETCH_EFLAGS(uEFlags); \ 369 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_ EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \386 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_InsNm,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \ 387 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, a_bRm), u64Dst); \ 371 388 IEM_MC_COMMIT_EFLAGS(uEFlags); \ 372 389 } IEM_MC_NATIVE_ELSE() { \ 373 390 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 374 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \391 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \ 375 392 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 376 393 IEM_MC_REF_EFLAGS(pEFlags); \ 377 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU64, pu64Dst, u64Src, pEFlags); \394 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \ 378 395 } IEM_MC_NATIVE_ENDIF(); \ 379 396 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ … … 403 420 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 404 421 \ 405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 406 423 IEMOP_HLP_DONE_DECODING(); \ 407 424 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 408 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \425 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 409 426 IEM_MC_FETCH_EFLAGS(EFlags); \ 410 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU16, pu16Dst, u16Src, pEFlags); \427 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16), pu16Dst, u16Src, pEFlags); \ 411 428 \ 412 429 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ … … 424 441 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 425 442 \ 426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 427 444 IEMOP_HLP_DONE_DECODING(); \ 428 445 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 429 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \446 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 430 447 IEM_MC_FETCH_EFLAGS(EFlags); \ 431 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU32, pu32Dst, u32Src, pEFlags); \448 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32), pu32Dst, u32Src, pEFlags); \ 432 449 \ 433 450 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ … … 445 462 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 446 463 \ 447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 448 465 IEMOP_HLP_DONE_DECODING(); \ 449 466 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 450 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \467 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 451 468 IEM_MC_FETCH_EFLAGS(EFlags); \ 452 IEM_MC_CALL_VOID_AIMPL_3( a_fnNormalU64, pu64Dst, u64Src, pEFlags); \469 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64), pu64Dst, u64Src, pEFlags); \ 453 470 \ 454 471 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ … … 465 482 (void)0 466 483 /* Separate macro to work around parsing issue in IEMAllInstPython.py */ 467 #define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_ fnLockedU16, a_fnLockedU32, a_fnLockedU64) \484 #define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_bRm, a_InsNm) \ 468 485 switch (pVCpu->iem.s.enmEffOpSize) \ 469 486 { \ … … 476 493 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 477 494 \ 478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 479 496 IEMOP_HLP_DONE_DECODING(); \ 480 497 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 481 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \498 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 482 499 IEM_MC_FETCH_EFLAGS(EFlags); \ 483 IEM_MC_CALL_VOID_AIMPL_3( a_fnLockedU16, pu16Dst, u16Src, pEFlags); \500 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u16_locked), pu16Dst, u16Src, pEFlags); \ 484 501 \ 485 502 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \ … … 497 514 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 498 515 \ 499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 500 517 IEMOP_HLP_DONE_DECODING(); \ 501 518 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 502 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \519 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 503 520 IEM_MC_FETCH_EFLAGS(EFlags); \ 504 IEM_MC_CALL_VOID_AIMPL_3( a_fnLockedU32, pu32Dst, u32Src, pEFlags); \521 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u32_locked), pu32Dst, u32Src, pEFlags); \ 505 522 \ 506 523 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \ … … 518 535 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 519 536 \ 520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \ 521 538 IEMOP_HLP_DONE_DECODING(); \ 522 539 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 523 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \540 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \ 524 541 IEM_MC_FETCH_EFLAGS(EFlags); \ 525 IEM_MC_CALL_VOID_AIMPL_3( a_fnLockedU64, pu64Dst, u64Src, pEFlags); \542 IEM_MC_CALL_VOID_AIMPL_3(RT_CONCAT3(iemAImpl_,a_InsNm,_u64_locked), pu64Dst, u64Src, pEFlags); \ 526 543 \ 527 544 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \ … … 850 867 { 851 868 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 852 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked, add, 0, 0); 869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 870 IEMOP_BODY_BINARY_rm_r8_RW(bRm, add, 0, 0); 853 871 } 854 872 … … 866 884 { 867 885 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 868 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, add, 0, 0); 869 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked); 886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 887 IEMOP_BODY_BINARY_rm_rv_RW( bRm, add, 0, 0); 888 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, add); 870 889 } 871 890 … … 880 899 { 881 900 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 882 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8, add, 0); 901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 902 IEMOP_BODY_BINARY_r8_rm(bRm, add, 0); 883 903 } 884 904 … … 971 991 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 972 992 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 973 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 994 IEMOP_BODY_BINARY_rm_r8_RW(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 974 995 } 975 996 … … 991 1012 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 992 1013 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 993 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 994 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked); 1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1015 IEMOP_BODY_BINARY_rm_rv_RW( bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1016 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, or); 995 1017 } 996 1018 … … 1006 1028 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 1007 1029 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1008 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1031 IEMOP_BODY_BINARY_r8_rm(bRm, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1009 1032 } 1010 1033 … … 1130 1153 { 1131 1154 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 1132 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked, adc, 0, 0); 1155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1156 IEMOP_BODY_BINARY_rm_r8_RW(bRm, adc, 0, 0); 1133 1157 } 1134 1158 … … 1147 1171 { 1148 1172 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1149 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, adc, 0, 0); 1150 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked); 1173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1174 IEMOP_BODY_BINARY_rm_rv_RW( bRm, adc, 0, 0); 1175 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, adc); 1151 1176 } 1152 1177 … … 1161 1186 { 1162 1187 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 1163 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8, adc, 0); 1188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1189 IEMOP_BODY_BINARY_r8_rm(bRm, adc, 0); 1164 1190 } 1165 1191 … … 1242 1268 { 1243 1269 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 1244 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked, sbb, 0, 0); 1270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1271 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sbb, 0, 0); 1245 1272 } 1246 1273 … … 1254 1281 { 1255 1282 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1256 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, sbb, 0, 0); 1257 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked); 1283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1284 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sbb, 0, 0); 1285 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sbb); 1258 1286 } 1259 1287 … … 1267 1295 { 1268 1296 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 1269 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8, sbb, 0); 1297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1298 IEMOP_BODY_BINARY_r8_rm(bRm, sbb, 0); 1270 1299 } 1271 1300 … … 1348 1377 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 1349 1378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1350 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1380 IEMOP_BODY_BINARY_rm_r8_RW(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1351 1381 } 1352 1382 … … 1361 1391 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1362 1392 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1363 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1364 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked); 1393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1394 IEMOP_BODY_BINARY_rm_rv_RW( bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1395 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, and); 1365 1396 } 1366 1397 … … 1375 1406 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 1376 1407 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1377 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1409 IEMOP_BODY_BINARY_r8_rm(bRm, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1378 1410 } 1379 1411 … … 1463 1495 { 1464 1496 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 1465 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1498 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */ 1499 IEMOP_BODY_BINARY_rm_r8_RW(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1466 1500 } 1467 1501 … … 1475 1509 { 1476 1510 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1477 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1478 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked); 1511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1512 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */ 1513 IEMOP_BODY_BINARY_rm_rv_RW( bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1514 IEMOP_BODY_BINARY_rm_rv_LOCKED(bRm, sub); 1479 1515 } 1480 1516 … … 1487 1523 FNIEMOP_DEF(iemOp_sub_Gb_Eb) 1488 1524 { 1489 /** @todo sub reg,samreg */1490 1525 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 1491 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1527 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */ 1528 IEMOP_BODY_BINARY_r8_rm(bRm, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1492 1529 } 1493 1530 … … 1502 1539 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0); 1503 1540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1541 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: sub samereg, samereg - zeros samereg and sets EFLAGS to know value */ 1504 1542 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1505 1543 } … … 1575 1613 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 1576 1614 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1577 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1616 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */ 1617 IEMOP_BODY_BINARY_rm_r8_RW(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1578 1618 } 1579 1619 … … 1588 1628 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 1589 1629 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1590 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1591 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked); 1630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1631 IEMOP_BODY_BINARY_rm_rv_RW( bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0); 1632 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */ 1633 IEMOP_BODY_BINARY_rm_rv_LOCKED( bRm, xor); 1592 1634 } 1593 1635 … … 1602 1644 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 1603 1645 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1604 /** @todo xor al,al optimization */ 1605 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1647 IEMOP_BODY_BINARY_r8_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */ 1648 IEMOP_BODY_BINARY_r8_rm(bRm, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1606 1649 } 1607 1650 … … 1616 1659 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0); 1617 1660 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 1618 1619 1661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1620 1621 /* 1622 * Deal with special case of 'xor rN, rN' which sets rN to zero and has a known EFLAGS outcome. 1623 */ 1624 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT)) 1625 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB) 1626 { 1627 switch (pVCpu->iem.s.enmEffOpSize) 1628 { 1629 case IEMMODE_16BIT: 1630 IEM_MC_BEGIN(1, 0, 0, 0); 1631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1632 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0); 1633 IEM_MC_LOCAL(uint32_t, fEFlags); 1634 IEM_MC_FETCH_EFLAGS(fEFlags); 1635 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); 1636 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); 1637 IEM_MC_COMMIT_EFLAGS(fEFlags); 1638 IEM_MC_ADVANCE_RIP_AND_FINISH(); 1639 IEM_MC_END(); 1640 break; 1641 1642 case IEMMODE_32BIT: 1643 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0); 1644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1645 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0); 1646 IEM_MC_LOCAL(uint32_t, fEFlags); 1647 IEM_MC_FETCH_EFLAGS(fEFlags); 1648 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); 1649 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); 1650 IEM_MC_COMMIT_EFLAGS(fEFlags); 1651 IEM_MC_ADVANCE_RIP_AND_FINISH(); 1652 IEM_MC_END(); 1653 break; 1654 1655 case IEMMODE_64BIT: 1656 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0); 1657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1658 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0); 1659 IEM_MC_LOCAL(uint32_t, fEFlags); 1660 IEM_MC_FETCH_EFLAGS(fEFlags); 1661 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS); 1662 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF); 1663 IEM_MC_COMMIT_EFLAGS(fEFlags); 1664 IEM_MC_ADVANCE_RIP_AND_FINISH(); 1665 IEM_MC_END(); 1666 break; 1667 1668 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 1669 } 1670 } 1671 1662 IEMOP_BODY_BINARY_rv_SAME_REG_ZERO(bRm); /* Special case: xor samereg, samereg - zeros samereg and sets EFLAGS to know value */ 1672 1663 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1673 1664 } … … 1800 1791 { 1801 1792 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb"); 1802 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1794 IEMOP_BODY_BINARY_r8_rm(bRm, cmp, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 1803 1795 } 1804 1796 -
trunk/src/VBox/VMM/VMMAll/IEMAllThrdPython.py
r103613 r103678 1288 1288 self.aoParamRefs.append(ThreadedParamRef(oStmt.sFn, self.analyzeCallToType(oStmt.sFn), oStmt, oStmt.idxFn)); 1289 1289 elif ( oStmt.sFn[0] != 'i' 1290 and not oStmt.sFn.startswith('RT_CONCAT3') 1290 1291 and not oStmt.sFn.startswith('IEMTARGETCPU_EFL_BEHAVIOR_SELECT') 1291 1292 and not oStmt.sFn.startswith('IEM_SELECT_HOST_OR_FALLBACK') ):
Note:
See TracChangeset
for help on using the changeset viewer.