- Timestamp:
- Feb 4, 2024 11:26:35 PM (16 months ago)
- svn:sync-xref-src-repo-rev:
- 161455
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h
r103185 r103190 8104 8104 IEMOP_HLP_MIN_186(); 8105 8105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8106 PCIEMOPSHIFTSIZES pImpl; 8106 8107 /* Need to use a body macro here since the EFLAGS behaviour differs between 8108 the shifts, rotates and rotate w/ carry. Sigh. */ 8109 #define GRP2_BODY_Eb_Ib(a_pImplExpr) \ 8110 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \ 8111 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 8112 { \ 8113 /* register */ \ 8114 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \ 8115 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \ 8116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8117 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 8118 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \ 8119 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 8120 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 8121 IEM_MC_REF_EFLAGS(pEFlags); \ 8122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \ 8123 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8124 IEM_MC_END(); \ 8125 } \ 8126 else \ 8127 { \ 8128 /* memory */ \ 8129 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \ 8130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 8131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 8132 \ 8133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \ 8134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8135 \ 8136 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 8137 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 8138 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 8139 \ 8140 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \ 8141 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 8142 IEM_MC_FETCH_EFLAGS(EFlags); \ 8143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \ 8144 \ 8145 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 8146 IEM_MC_COMMIT_EFLAGS(EFlags); \ 8147 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8148 IEM_MC_END(); \ 8149 } (void)0 8150 8107 8151 switch (IEM_GET_MODRM_REG_8(bRm)) 8108 8152 { 8109 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break; 8110 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break; 8111 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break; 8112 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break; 8113 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break; 8114 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break; 8115 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break; 8153 /** 8154 * @opdone 8155 * @opmaps grp2_c0 8156 * @opcode /0 8157 * @opflclass rotate_count 8158 */ 8159 case 0: 8160 { 8161 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0); 8162 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF); 8163 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags)); 8164 break; 8165 } 8166 /** 8167 * @opdone 8168 * @opmaps grp2_c0 8169 * @opcode /1 8170 * @opflclass rotate_count 8171 */ 8172 case 1: 8173 { 8174 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0); 8175 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF); 8176 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags)); 8177 break; 8178 } 8179 /** 8180 * @opdone 8181 * @opmaps grp2_c0 8182 * @opcode /2 8183 * @opflclass rotate_carry_count 8184 */ 8185 case 2: 8186 { 8187 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0); 8188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF); 8189 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags)); 8190 break; 8191 } 8192 /** 8193 * @opdone 8194 * @opmaps grp2_c0 8195 * @opcode /3 8196 * @opflclass rotate_carry_count 8197 */ 8198 case 3: 8199 { 8200 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0); 8201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF); 8202 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags)); 8203 break; 8204 } 8205 /** 8206 * @opdone 8207 * @opmaps grp2_c0 8208 * @opcode /4 8209 * @opflclass shift_count 8210 */ 8211 case 4: 8212 { 8213 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0); 8214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8215 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags)); 8216 break; 8217 } 8218 /** 8219 * @opdone 8220 * @opmaps grp2_c0 8221 * @opcode /5 8222 * @opflclass shift_count 8223 */ 8224 case 5: 8225 { 8226 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0); 8227 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8228 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags)); 8229 break; 8230 } 8231 /** 8232 * @opdone 8233 * @opmaps grp2_c0 8234 * @opcode /7 8235 * @opflclass shift_count 8236 */ 8237 case 7: 8238 { 8239 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0); 8240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8241 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags)); 8242 break; 8243 } 8244 8245 /** @opdone */ 8116 8246 case 6: IEMOP_RAISE_INVALID_OPCODE_RET(); 8117 8247 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */ 8118 8248 } 8119 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8120 8121 if (IEM_IS_MODRM_REG_MODE(bRm)) 8122 { 8123 /* register */ 8124 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 8125 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); 8126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8127 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 8128 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); 8129 IEM_MC_ARG(uint32_t *, pEFlags, 2); 8130 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 8131 IEM_MC_REF_EFLAGS(pEFlags); 8132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); 8133 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8134 IEM_MC_END(); 8135 } 8136 else 8137 { 8138 /* memory */ 8139 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); 8140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 8141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 8142 8143 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 8144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8145 8146 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 8147 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 8148 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 8149 8150 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); 8151 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 8152 IEM_MC_FETCH_EFLAGS(EFlags); 8153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); 8154 8155 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 8156 IEM_MC_COMMIT_EFLAGS(EFlags); 8157 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8158 IEM_MC_END(); 8159 } 8249 #undef GRP2_BODY_Eb_Ib 8160 8250 } 8161 8251 … … 8168 8258 IEMOP_HLP_MIN_186(); 8169 8259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8170 PCIEMOPSHIFTSIZES pImpl; 8260 8261 /* Need to use a body macro here since the EFLAGS behaviour differs between 8262 the shifts, rotates and rotate w/ carry. Sigh. */ 8263 #define GRP2_BODY_Ev_Ib(a_pImplExpr) \ 8264 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \ 8265 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 8266 { \ 8267 /* register */ \ 8268 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \ 8269 switch (pVCpu->iem.s.enmEffOpSize) \ 8270 { \ 8271 case IEMMODE_16BIT: \ 8272 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \ 8273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8274 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 8275 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \ 8276 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 8277 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 8278 IEM_MC_REF_EFLAGS(pEFlags); \ 8279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \ 8280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8281 IEM_MC_END(); \ 8282 break; \ 8283 \ 8284 case IEMMODE_32BIT: \ 8285 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \ 8286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8287 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 8288 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \ 8289 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 8290 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 8291 IEM_MC_REF_EFLAGS(pEFlags); \ 8292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \ 8293 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \ 8294 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8295 IEM_MC_END(); \ 8296 break; \ 8297 \ 8298 case IEMMODE_64BIT: \ 8299 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \ 8300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8301 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 8302 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \ 8303 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 8304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 8305 IEM_MC_REF_EFLAGS(pEFlags); \ 8306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \ 8307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8308 IEM_MC_END(); \ 8309 break; \ 8310 \ 8311 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 8312 } \ 8313 } \ 8314 else \ 8315 { \ 8316 /* memory */ \ 8317 switch (pVCpu->iem.s.enmEffOpSize) \ 8318 { \ 8319 case IEMMODE_16BIT: \ 8320 IEM_MC_BEGIN(3, 3, 0, 0); \ 8321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 8322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 8323 \ 8324 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \ 8325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8326 \ 8327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 8328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 8329 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 8330 \ 8331 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \ 8332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 8333 IEM_MC_FETCH_EFLAGS(EFlags); \ 8334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \ 8335 \ 8336 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 8337 IEM_MC_COMMIT_EFLAGS(EFlags); \ 8338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8339 IEM_MC_END(); \ 8340 break; \ 8341 \ 8342 case IEMMODE_32BIT: \ 8343 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \ 8344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 8345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 8346 \ 8347 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \ 8348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8349 \ 8350 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 8351 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 8352 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 8353 \ 8354 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \ 8355 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 8356 IEM_MC_FETCH_EFLAGS(EFlags); \ 8357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \ 8358 \ 8359 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 8360 IEM_MC_COMMIT_EFLAGS(EFlags); \ 8361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8362 IEM_MC_END(); \ 8363 break; \ 8364 \ 8365 case IEMMODE_64BIT: \ 8366 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \ 8367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 8368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 8369 \ 8370 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \ 8371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8372 \ 8373 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 8374 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 8375 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 8376 \ 8377 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \ 8378 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 8379 IEM_MC_FETCH_EFLAGS(EFlags); \ 8380 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \ 8381 \ 8382 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 8383 IEM_MC_COMMIT_EFLAGS(EFlags); \ 8384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8385 IEM_MC_END(); \ 8386 break; \ 8387 \ 8388 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 8389 } \ 8390 } (void)0 8391 8171 8392 switch (IEM_GET_MODRM_REG_8(bRm)) 8172 8393 { 8173 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break; 8174 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break; 8175 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break; 8176 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break; 8177 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break; 8178 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break; 8179 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break; 8394 /** 8395 * @opdone 8396 * @opmaps grp2_c1 8397 * @opcode /0 8398 * @opflclass rotate_count 8399 */ 8400 case 0: 8401 { 8402 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0); 8403 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags)); 8404 break; 8405 } 8406 /** 8407 * @opdone 8408 * @opmaps grp2_c1 8409 * @opcode /1 8410 * @opflclass rotate_count 8411 */ 8412 case 1: 8413 { 8414 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0); 8415 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags)); 8416 break; 8417 } 8418 /** 8419 * @opdone 8420 * @opmaps grp2_c1 8421 * @opcode /2 8422 * @opflclass rotate_carry_count 8423 */ 8424 case 2: 8425 { 8426 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0); 8427 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags)); 8428 break; 8429 } 8430 /** 8431 * @opdone 8432 * @opmaps grp2_c1 8433 * @opcode /3 8434 * @opflclass rotate_carry_count 8435 */ 8436 case 3: 8437 { 8438 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0); 8439 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags)); 8440 break; 8441 } 8442 /** 8443 * @opdone 8444 * @opmaps grp2_c1 8445 * @opcode /4 8446 * @opflclass shift_count 8447 */ 8448 case 4: 8449 { 8450 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0); 8451 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags)); 8452 } 8453 /** 8454 * @opdone 8455 * @opmaps grp2_c1 8456 * @opcode /5 8457 * @opflclass shift_count 8458 */ 8459 case 5: 8460 { 8461 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0); 8462 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags)); 8463 break; 8464 } 8465 /** 8466 * @opdone 8467 * @opmaps grp2_c1 8468 * @opcode /7 8469 * @opflclass shift_count 8470 */ 8471 case 7: 8472 { 8473 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0); 8474 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags)); 8475 break; 8476 } 8477 8180 8478 case 6: IEMOP_RAISE_INVALID_OPCODE_RET(); 8181 8479 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */ 8182 8480 } 8183 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8184 8185 if (IEM_IS_MODRM_REG_MODE(bRm)) 8186 { 8187 /* register */ 8188 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 8189 switch (pVCpu->iem.s.enmEffOpSize) 8190 { 8191 case IEMMODE_16BIT: 8192 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); 8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8194 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 8195 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); 8196 IEM_MC_ARG(uint32_t *, pEFlags, 2); 8197 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 8198 IEM_MC_REF_EFLAGS(pEFlags); 8199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); 8200 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8201 IEM_MC_END(); 8202 break; 8203 8204 case IEMMODE_32BIT: 8205 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); 8206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8207 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 8208 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); 8209 IEM_MC_ARG(uint32_t *, pEFlags, 2); 8210 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 8211 IEM_MC_REF_EFLAGS(pEFlags); 8212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); 8213 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); 8214 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8215 IEM_MC_END(); 8216 break; 8217 8218 case IEMMODE_64BIT: 8219 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); 8220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8221 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 8222 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); 8223 IEM_MC_ARG(uint32_t *, pEFlags, 2); 8224 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 8225 IEM_MC_REF_EFLAGS(pEFlags); 8226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); 8227 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8228 IEM_MC_END(); 8229 break; 8230 8231 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 8232 } 8233 } 8234 else 8235 { 8236 /* memory */ 8237 switch (pVCpu->iem.s.enmEffOpSize) 8238 { 8239 case IEMMODE_16BIT: 8240 IEM_MC_BEGIN(3, 3, 0, 0); 8241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 8242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 8243 8244 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 8245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8246 8247 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 8248 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 8249 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 8250 8251 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); 8252 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 8253 IEM_MC_FETCH_EFLAGS(EFlags); 8254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); 8255 8256 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 8257 IEM_MC_COMMIT_EFLAGS(EFlags); 8258 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8259 IEM_MC_END(); 8260 break; 8261 8262 case IEMMODE_32BIT: 8263 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); 8264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 8265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 8266 8267 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 8268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8269 8270 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 8271 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 8272 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 8273 8274 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); 8275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 8276 IEM_MC_FETCH_EFLAGS(EFlags); 8277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); 8278 8279 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 8280 IEM_MC_COMMIT_EFLAGS(EFlags); 8281 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8282 IEM_MC_END(); 8283 break; 8284 8285 case IEMMODE_64BIT: 8286 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); 8287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 8288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 8289 8290 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8292 8293 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 8294 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 8295 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 8296 8297 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); 8298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 8299 IEM_MC_FETCH_EFLAGS(EFlags); 8300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); 8301 8302 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 8303 IEM_MC_COMMIT_EFLAGS(EFlags); 8304 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8305 IEM_MC_END(); 8306 break; 8307 8308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 8309 } 8310 } 8481 #undef GRP2_BODY_Ev_Ib 8311 8482 } 8312 8483 … … 8779 8950 { 8780 8951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8781 PCIEMOPSHIFTSIZES pImpl; 8952 8953 /* Need to use a body macro here since the EFLAGS behaviour differs between 8954 the shifts, rotates and rotate w/ carry. Sigh. */ 8955 #define GRP2_BODY_Eb_1(a_pImplExpr) \ 8956 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \ 8957 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 8958 { \ 8959 /* register */ \ 8960 IEM_MC_BEGIN(3, 0, 0, 0); \ 8961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8962 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 8963 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \ 8964 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 8965 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 8966 IEM_MC_REF_EFLAGS(pEFlags); \ 8967 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \ 8968 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8969 IEM_MC_END(); \ 8970 } \ 8971 else \ 8972 { \ 8973 /* memory */ \ 8974 IEM_MC_BEGIN(3, 3, 0, 0); \ 8975 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 8976 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \ 8977 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 8978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 8979 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 8980 \ 8981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 8982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 8983 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 8984 IEM_MC_FETCH_EFLAGS(EFlags); \ 8985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \ 8986 \ 8987 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 8988 IEM_MC_COMMIT_EFLAGS(EFlags); \ 8989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 8990 IEM_MC_END(); \ 8991 } (void)0 8992 8782 8993 switch (IEM_GET_MODRM_REG_8(bRm)) 8783 8994 { 8784 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break; 8785 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break; 8786 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break; 8787 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break; 8788 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break; 8789 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break; 8790 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break; 8995 /** 8996 * @opdone 8997 * @opmaps grp2_d0 8998 * @opcode /0 8999 * @opflclass rotate_1 9000 */ 9001 case 0: 9002 { 9003 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0); 9004 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags)); 9005 break; 9006 } 9007 /** 9008 * @opdone 9009 * @opmaps grp2_d0 9010 * @opcode /1 9011 * @opflclass rotate_1 9012 */ 9013 case 1: 9014 { 9015 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0); 9016 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags)); 9017 break; 9018 } 9019 /** 9020 * @opdone 9021 * @opmaps grp2_d0 9022 * @opcode /2 9023 * @opflclass rotate_carry_1 9024 */ 9025 case 2: 9026 { 9027 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0); 9028 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags)); 9029 break; 9030 } 9031 /** 9032 * @opdone 9033 * @opmaps grp2_d0 9034 * @opcode /3 9035 * @opflclass rotate_carry_1 9036 */ 9037 case 3: 9038 { 9039 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0); 9040 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags)); 9041 break; 9042 } 9043 /** 9044 * @opdone 9045 * @opmaps grp2_d0 9046 * @opcode /4 9047 * @opflclass shift_1 9048 */ 9049 case 4: 9050 { 9051 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0); 9052 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags)); 9053 break; 9054 } 9055 /** 9056 * @opdone 9057 * @opmaps grp2_d0 9058 * @opcode /5 9059 * @opflclass shift_1 9060 */ 9061 case 5: 9062 { 9063 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0); 9064 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags)); 9065 break; 9066 } 9067 /** 9068 * @opdone 9069 * @opmaps grp2_d0 9070 * @opcode /7 9071 * @opflclass shift_1 9072 */ 9073 case 7: 9074 { 9075 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0); 9076 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags)); 9077 break; 9078 } 9079 /** @opdone */ 8791 9080 case 6: IEMOP_RAISE_INVALID_OPCODE_RET(); 8792 9081 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */ 8793 9082 } 8794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8795 8796 if (IEM_IS_MODRM_REG_MODE(bRm)) 8797 { 8798 /* register */ 8799 IEM_MC_BEGIN(3, 0, 0, 0); 8800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8801 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 8802 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); 8803 IEM_MC_ARG(uint32_t *, pEFlags, 2); 8804 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 8805 IEM_MC_REF_EFLAGS(pEFlags); 8806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); 8807 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8808 IEM_MC_END(); 8809 } 8810 else 8811 { 8812 /* memory */ 8813 IEM_MC_BEGIN(3, 3, 0, 0); 8814 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 8815 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); 8816 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 8817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 8818 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 8819 8820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 8821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8822 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 8823 IEM_MC_FETCH_EFLAGS(EFlags); 8824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); 8825 8826 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 8827 IEM_MC_COMMIT_EFLAGS(EFlags); 8828 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8829 IEM_MC_END(); 8830 } 9083 #undef GRP2_BODY_Eb_1 8831 9084 } 8832 9085 … … 8839 9092 { 8840 9093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8841 PCIEMOPSHIFTSIZES pImpl; 9094 9095 /* Need to use a body macro here since the EFLAGS behaviour differs between 9096 the shifts, rotates and rotate w/ carry. Sigh. */ 9097 #define GRP2_BODY_Ev_1(a_pImplExpr) \ 9098 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \ 9099 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 9100 { \ 9101 /* register */ \ 9102 switch (pVCpu->iem.s.enmEffOpSize) \ 9103 { \ 9104 case IEMMODE_16BIT: \ 9105 IEM_MC_BEGIN(3, 0, 0, 0); \ 9106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9107 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 9108 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \ 9109 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 9110 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9111 IEM_MC_REF_EFLAGS(pEFlags); \ 9112 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \ 9113 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9114 IEM_MC_END(); \ 9115 break; \ 9116 \ 9117 case IEMMODE_32BIT: \ 9118 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \ 9119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9120 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 9121 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \ 9122 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 9123 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9124 IEM_MC_REF_EFLAGS(pEFlags); \ 9125 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \ 9126 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9127 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9128 IEM_MC_END(); \ 9129 break; \ 9130 \ 9131 case IEMMODE_64BIT: \ 9132 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \ 9133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9134 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 9135 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \ 9136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 9137 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9138 IEM_MC_REF_EFLAGS(pEFlags); \ 9139 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \ 9140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9141 IEM_MC_END(); \ 9142 break; \ 9143 \ 9144 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 9145 } \ 9146 } \ 9147 else \ 9148 { \ 9149 /* memory */ \ 9150 switch (pVCpu->iem.s.enmEffOpSize) \ 9151 { \ 9152 case IEMMODE_16BIT: \ 9153 IEM_MC_BEGIN(3, 3, 0, 0); \ 9154 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 9155 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \ 9156 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 9157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 9158 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 9159 \ 9160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 9161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9162 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 9163 IEM_MC_FETCH_EFLAGS(EFlags); \ 9164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \ 9165 \ 9166 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 9167 IEM_MC_COMMIT_EFLAGS(EFlags); \ 9168 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9169 IEM_MC_END(); \ 9170 break; \ 9171 \ 9172 case IEMMODE_32BIT: \ 9173 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \ 9174 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 9175 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \ 9176 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 9177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 9178 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 9179 \ 9180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 9181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9182 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 9183 IEM_MC_FETCH_EFLAGS(EFlags); \ 9184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \ 9185 \ 9186 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 9187 IEM_MC_COMMIT_EFLAGS(EFlags); \ 9188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9189 IEM_MC_END(); \ 9190 break; \ 9191 \ 9192 case IEMMODE_64BIT: \ 9193 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \ 9194 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 9195 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \ 9196 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 9197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 9198 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 9199 \ 9200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 9201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9202 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 9203 IEM_MC_FETCH_EFLAGS(EFlags); \ 9204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \ 9205 \ 9206 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 9207 IEM_MC_COMMIT_EFLAGS(EFlags); \ 9208 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9209 IEM_MC_END(); \ 9210 break; \ 9211 \ 9212 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 9213 } \ 9214 } (void)0 9215 8842 9216 switch (IEM_GET_MODRM_REG_8(bRm)) 8843 9217 { 8844 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break; 8845 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break; 8846 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break; 8847 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break; 8848 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break; 8849 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break; 8850 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break; 9218 /** 9219 * @opdone 9220 * @opmaps grp2_d1 9221 * @opcode /0 9222 * @opflclass rotate_1 9223 */ 9224 case 0: 9225 { 9226 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0); 9227 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags)); 9228 break; 9229 } 9230 /** 9231 * @opdone 9232 * @opmaps grp2_d1 9233 * @opcode /1 9234 * @opflclass rotate_1 9235 */ 9236 case 1: 9237 { 9238 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0); 9239 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags)); 9240 break; 9241 } 9242 /** 9243 * @opdone 9244 * @opmaps grp2_d1 9245 * @opcode /2 9246 * @opflclass rotate_carry_1 9247 */ 9248 case 2: 9249 { 9250 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0); 9251 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags)); 9252 break; 9253 } 9254 /** 9255 * @opdone 9256 * @opmaps grp2_d1 9257 * @opcode /3 9258 * @opflclass rotate_carry_1 9259 */ 9260 case 3: 9261 { 9262 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0); 9263 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags)); 9264 break; 9265 } 9266 /** 9267 * @opdone 9268 * @opmaps grp2_d1 9269 * @opcode /4 9270 * @opflclass shift_1 9271 */ 9272 case 4: 9273 { 9274 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0); 9275 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags)); 9276 break; 9277 } 9278 /** 9279 * @opdone 9280 * @opmaps grp2_d1 9281 * @opcode /5 9282 * @opflclass shift_1 9283 */ 9284 case 5: 9285 { 9286 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0); 9287 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags)); 9288 break; 9289 } 9290 /** 9291 * @opdone 9292 * @opmaps grp2_d1 9293 * @opcode /7 9294 * @opflclass shift_1 9295 */ 9296 case 7: 9297 { 9298 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0); 9299 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags)); 9300 break; 9301 } 9302 /** @opdone */ 8851 9303 case 6: IEMOP_RAISE_INVALID_OPCODE_RET(); 8852 9304 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */ 8853 9305 } 8854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8855 8856 if (IEM_IS_MODRM_REG_MODE(bRm)) 8857 { 8858 /* register */ 8859 switch (pVCpu->iem.s.enmEffOpSize) 9306 #undef GRP2_BODY_Ev_1 9307 } 9308 9309 9310 /** 9311 * @opcode 0xd2 9312 */ 9313 FNIEMOP_DEF(iemOp_Grp2_Eb_CL) 9314 { 9315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 9316 9317 /* Need to use a body macro here since the EFLAGS behaviour differs between 9318 the shifts, rotates and rotate w/ carry. Sigh. */ 9319 #define GRP2_BODY_Eb_CL(a_pImplExpr) \ 9320 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \ 9321 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 9322 { \ 9323 /* register */ \ 9324 IEM_MC_BEGIN(3, 0, 0, 0); \ 9325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9326 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 9327 IEM_MC_ARG(uint8_t, cShiftArg, 1); \ 9328 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 9329 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \ 9330 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9331 IEM_MC_REF_EFLAGS(pEFlags); \ 9332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \ 9333 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9334 IEM_MC_END(); \ 9335 } \ 9336 else \ 9337 { \ 9338 /* memory */ \ 9339 IEM_MC_BEGIN(3, 3, 0, 0); \ 9340 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 9341 IEM_MC_ARG(uint8_t, cShiftArg, 1); \ 9342 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 9343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 9344 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 9345 \ 9346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 9347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9348 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \ 9349 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 9350 IEM_MC_FETCH_EFLAGS(EFlags); \ 9351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \ 9352 \ 9353 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 9354 IEM_MC_COMMIT_EFLAGS(EFlags); \ 9355 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9356 IEM_MC_END(); \ 9357 } (void)0 9358 9359 switch (IEM_GET_MODRM_REG_8(bRm)) 9360 { 9361 /** 9362 * @opdone 9363 * @opmaps grp2_d0 9364 * @opcode /0 9365 * @opflclass rotate_count 9366 */ 9367 case 0: 8860 9368 { 8861 case IEMMODE_16BIT: 8862 IEM_MC_BEGIN(3, 0, 0, 0); 8863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8864 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 8865 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); 8866 IEM_MC_ARG(uint32_t *, pEFlags, 2); 8867 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 8868 IEM_MC_REF_EFLAGS(pEFlags); 8869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); 8870 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8871 IEM_MC_END(); 8872 break; 8873 8874 case IEMMODE_32BIT: 8875 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); 8876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8877 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 8878 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); 8879 IEM_MC_ARG(uint32_t *, pEFlags, 2); 8880 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 8881 IEM_MC_REF_EFLAGS(pEFlags); 8882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); 8883 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); 8884 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8885 IEM_MC_END(); 8886 break; 8887 8888 case IEMMODE_64BIT: 8889 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); 8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8891 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 8892 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); 8893 IEM_MC_ARG(uint32_t *, pEFlags, 2); 8894 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 8895 IEM_MC_REF_EFLAGS(pEFlags); 8896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); 8897 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8898 IEM_MC_END(); 8899 break; 8900 8901 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9369 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0); 9370 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags)); 9371 break; 8902 9372 } 8903 } 8904 else 8905 { 8906 /* memory */ 8907 switch (pVCpu->iem.s.enmEffOpSize) 9373 /** 9374 * @opdone 9375 * @opmaps grp2_d0 9376 * @opcode /1 9377 * @opflclass rotate_count 9378 */ 9379 case 1: 8908 9380 { 8909 case IEMMODE_16BIT: 8910 IEM_MC_BEGIN(3, 3, 0, 0); 8911 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 8912 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); 8913 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 8914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 8915 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 8916 8917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 8918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8919 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 8920 IEM_MC_FETCH_EFLAGS(EFlags); 8921 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); 8922 8923 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 8924 IEM_MC_COMMIT_EFLAGS(EFlags); 8925 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8926 IEM_MC_END(); 8927 break; 8928 8929 case IEMMODE_32BIT: 8930 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); 8931 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 8932 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); 8933 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 8934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 8935 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 8936 8937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 8938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8939 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 8940 IEM_MC_FETCH_EFLAGS(EFlags); 8941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); 8942 8943 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 8944 IEM_MC_COMMIT_EFLAGS(EFlags); 8945 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8946 IEM_MC_END(); 8947 break; 8948 8949 case IEMMODE_64BIT: 8950 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); 8951 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 8952 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); 8953 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 8954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 8955 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 8956 8957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 8958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8959 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 8960 IEM_MC_FETCH_EFLAGS(EFlags); 8961 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); 8962 8963 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 8964 IEM_MC_COMMIT_EFLAGS(EFlags); 8965 IEM_MC_ADVANCE_RIP_AND_FINISH(); 8966 IEM_MC_END(); 8967 break; 8968 8969 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9381 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0); 9382 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags)); 9383 break; 8970 9384 } 8971 } 8972 } 8973 8974 8975 /** 8976 * @opcode 0xd2 8977 */ 8978 FNIEMOP_DEF(iemOp_Grp2_Eb_CL) 9385 /** 9386 * @opdone 9387 * @opmaps grp2_d0 9388 * @opcode /2 9389 * @opflclass rotate_carry_count 9390 */ 9391 case 2: 9392 { 9393 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0); 9394 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags)); 9395 break; 9396 } 9397 /** 9398 * @opdone 9399 * @opmaps grp2_d0 9400 * @opcode /3 9401 * @opflclass rotate_carry_count 9402 */ 9403 case 3: 9404 { 9405 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0); 9406 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags)); 9407 break; 9408 } 9409 /** 9410 * @opdone 9411 * @opmaps grp2_d0 9412 * @opcode /4 9413 * @opflclass shift_count 9414 */ 9415 case 4: 9416 { 9417 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0); 9418 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags)); 9419 break; 9420 } 9421 /** 9422 * @opdone 9423 * @opmaps grp2_d0 9424 * @opcode /5 9425 * @opflclass shift_count 9426 */ 9427 case 5: 9428 { 9429 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0); 9430 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags)); 9431 break; 9432 } 9433 /** 9434 * @opdone 9435 * @opmaps grp2_d0 9436 * @opcode /7 9437 * @opflclass shift_count 9438 */ 9439 case 7: 9440 { 9441 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0); 9442 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags)); 9443 break; 9444 } 9445 /** @opdone */ 9446 case 6: IEMOP_RAISE_INVALID_OPCODE_RET(); 9447 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */ 9448 } 9449 #undef GRP2_BODY_Eb_CL 9450 } 9451 9452 9453 /** 9454 * @opcode 0xd3 9455 */ 9456 FNIEMOP_DEF(iemOp_Grp2_Ev_CL) 8979 9457 { 8980 9458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8981 PCIEMOPSHIFTSIZES pImpl; 9459 9460 /* Need to use a body macro here since the EFLAGS behaviour differs between 9461 the shifts, rotates and rotate w/ carry. Sigh. */ 9462 #define GRP2_BODY_Ev_CL(a_pImplExpr) \ 9463 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \ 9464 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 9465 { \ 9466 /* register */ \ 9467 switch (pVCpu->iem.s.enmEffOpSize) \ 9468 { \ 9469 case IEMMODE_16BIT: \ 9470 IEM_MC_BEGIN(3, 0, 0, 0); \ 9471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9472 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 9473 IEM_MC_ARG(uint8_t, cShiftArg, 1); \ 9474 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 9475 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \ 9476 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9477 IEM_MC_REF_EFLAGS(pEFlags); \ 9478 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \ 9479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9480 IEM_MC_END(); \ 9481 break; \ 9482 \ 9483 case IEMMODE_32BIT: \ 9484 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \ 9485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9486 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 9487 IEM_MC_ARG(uint8_t, cShiftArg, 1); \ 9488 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 9489 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \ 9490 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9491 IEM_MC_REF_EFLAGS(pEFlags); \ 9492 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \ 9493 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9495 IEM_MC_END(); \ 9496 break; \ 9497 \ 9498 case IEMMODE_64BIT: \ 9499 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \ 9500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9501 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 9502 IEM_MC_ARG(uint8_t, cShiftArg, 1); \ 9503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 9504 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \ 9505 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 9506 IEM_MC_REF_EFLAGS(pEFlags); \ 9507 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \ 9508 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9509 IEM_MC_END(); \ 9510 break; \ 9511 \ 9512 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 9513 } \ 9514 } \ 9515 else \ 9516 { \ 9517 /* memory */ \ 9518 switch (pVCpu->iem.s.enmEffOpSize) \ 9519 { \ 9520 case IEMMODE_16BIT: \ 9521 IEM_MC_BEGIN(3, 3, 0, 0); \ 9522 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 9523 IEM_MC_ARG(uint8_t, cShiftArg, 1); \ 9524 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 9525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 9526 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 9527 \ 9528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 9529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9530 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \ 9531 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 9532 IEM_MC_FETCH_EFLAGS(EFlags); \ 9533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \ 9534 \ 9535 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 9536 IEM_MC_COMMIT_EFLAGS(EFlags); \ 9537 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9538 IEM_MC_END(); \ 9539 break; \ 9540 \ 9541 case IEMMODE_32BIT: \ 9542 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \ 9543 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 9544 IEM_MC_ARG(uint8_t, cShiftArg, 1); \ 9545 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 9546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 9547 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 9548 \ 9549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 9550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9551 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \ 9552 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 9553 IEM_MC_FETCH_EFLAGS(EFlags); \ 9554 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \ 9555 \ 9556 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 9557 IEM_MC_COMMIT_EFLAGS(EFlags); \ 9558 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9559 IEM_MC_END(); \ 9560 break; \ 9561 \ 9562 case IEMMODE_64BIT: \ 9563 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \ 9564 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 9565 IEM_MC_ARG(uint8_t, cShiftArg, 1); \ 9566 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 9567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 9568 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \ 9569 \ 9570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 9571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 9572 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \ 9573 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 9574 IEM_MC_FETCH_EFLAGS(EFlags); \ 9575 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \ 9576 \ 9577 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \ 9578 IEM_MC_COMMIT_EFLAGS(EFlags); \ 9579 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 9580 IEM_MC_END(); \ 9581 break; \ 9582 \ 9583 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 9584 } \ 9585 } (void)0 8982 9586 switch (IEM_GET_MODRM_REG_8(bRm)) 8983 9587 { 8984 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break; 8985 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break; 8986 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break; 8987 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break; 8988 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break; 8989 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break; 8990 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break; 9588 /** 9589 * @opdone 9590 * @opmaps grp2_d0 9591 * @opcode /0 9592 * @opflclass rotate_count 9593 */ 9594 case 0: 9595 { 9596 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0); 9597 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags)); 9598 break; 9599 } 9600 /** 9601 * @opdone 9602 * @opmaps grp2_d0 9603 * @opcode /1 9604 * @opflclass rotate_count 9605 */ 9606 case 1: 9607 { 9608 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0); 9609 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags)); 9610 break; 9611 } 9612 /** 9613 * @opdone 9614 * @opmaps grp2_d0 9615 * @opcode /2 9616 * @opflclass rotate_carry_count 9617 */ 9618 case 2: 9619 { 9620 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0); 9621 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags)); 9622 break; 9623 } 9624 /** 9625 * @opdone 9626 * @opmaps grp2_d0 9627 * @opcode /3 9628 * @opflclass rotate_carry_count 9629 */ 9630 case 3: 9631 { 9632 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0); 9633 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags)); 9634 break; 9635 } 9636 /** 9637 * @opdone 9638 * @opmaps grp2_d0 9639 * @opcode /4 9640 * @opflclass shift_count 9641 */ 9642 case 4: 9643 { 9644 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0); 9645 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags)); 9646 break; 9647 } 9648 /** 9649 * @opdone 9650 * @opmaps grp2_d0 9651 * @opcode /5 9652 * @opflclass shift_count 9653 */ 9654 case 5: 9655 { 9656 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0); 9657 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags)); 9658 break; 9659 } 9660 /** 9661 * @opdone 9662 * @opmaps grp2_d0 9663 * @opcode /7 9664 * @opflclass shift_count 9665 */ 9666 case 7: 9667 { 9668 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0); 9669 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags)); 9670 break; 9671 } 9672 /** @opdone */ 8991 9673 case 6: IEMOP_RAISE_INVALID_OPCODE_RET(); 8992 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */ 8993 } 8994 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 8995 8996 if (IEM_IS_MODRM_REG_MODE(bRm)) 8997 { 8998 /* register */ 8999 IEM_MC_BEGIN(3, 0, 0, 0); 9000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9001 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 9002 IEM_MC_ARG(uint8_t, cShiftArg, 1); 9003 IEM_MC_ARG(uint32_t *, pEFlags, 2); 9004 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 9005 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 9006 IEM_MC_REF_EFLAGS(pEFlags); 9007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); 9008 IEM_MC_ADVANCE_RIP_AND_FINISH(); 9009 IEM_MC_END(); 9010 } 9011 else 9012 { 9013 /* memory */ 9014 IEM_MC_BEGIN(3, 3, 0, 0); 9015 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 9016 IEM_MC_ARG(uint8_t, cShiftArg, 1); 9017 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 9018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 9019 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 9020 9021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 9022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9023 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 9024 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 9025 IEM_MC_FETCH_EFLAGS(EFlags); 9026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); 9027 9028 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 9029 IEM_MC_COMMIT_EFLAGS(EFlags); 9030 IEM_MC_ADVANCE_RIP_AND_FINISH(); 9031 IEM_MC_END(); 9032 } 9033 } 9034 9035 9036 /** 9037 * @opcode 0xd3 9038 */ 9039 FNIEMOP_DEF(iemOp_Grp2_Ev_CL) 9040 { 9041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 9042 PCIEMOPSHIFTSIZES pImpl; 9043 switch (IEM_GET_MODRM_REG_8(bRm)) 9044 { 9045 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break; 9046 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break; 9047 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break; 9048 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break; 9049 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break; 9050 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break; 9051 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break; 9052 case 6: IEMOP_RAISE_INVALID_OPCODE_RET(); 9053 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */ 9054 } 9055 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF); 9056 9057 if (IEM_IS_MODRM_REG_MODE(bRm)) 9058 { 9059 /* register */ 9060 switch (pVCpu->iem.s.enmEffOpSize) 9061 { 9062 case IEMMODE_16BIT: 9063 IEM_MC_BEGIN(3, 0, 0, 0); 9064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9065 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 9066 IEM_MC_ARG(uint8_t, cShiftArg, 1); 9067 IEM_MC_ARG(uint32_t *, pEFlags, 2); 9068 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 9069 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 9070 IEM_MC_REF_EFLAGS(pEFlags); 9071 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); 9072 IEM_MC_ADVANCE_RIP_AND_FINISH(); 9073 IEM_MC_END(); 9074 break; 9075 9076 case IEMMODE_32BIT: 9077 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); 9078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9079 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 9080 IEM_MC_ARG(uint8_t, cShiftArg, 1); 9081 IEM_MC_ARG(uint32_t *, pEFlags, 2); 9082 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 9083 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 9084 IEM_MC_REF_EFLAGS(pEFlags); 9085 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); 9086 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); 9087 IEM_MC_ADVANCE_RIP_AND_FINISH(); 9088 IEM_MC_END(); 9089 break; 9090 9091 case IEMMODE_64BIT: 9092 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); 9093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9094 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 9095 IEM_MC_ARG(uint8_t, cShiftArg, 1); 9096 IEM_MC_ARG(uint32_t *, pEFlags, 2); 9097 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 9098 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 9099 IEM_MC_REF_EFLAGS(pEFlags); 9100 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); 9101 IEM_MC_ADVANCE_RIP_AND_FINISH(); 9102 IEM_MC_END(); 9103 break; 9104 9105 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9106 } 9107 } 9108 else 9109 { 9110 /* memory */ 9111 switch (pVCpu->iem.s.enmEffOpSize) 9112 { 9113 case IEMMODE_16BIT: 9114 IEM_MC_BEGIN(3, 3, 0, 0); 9115 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 9116 IEM_MC_ARG(uint8_t, cShiftArg, 1); 9117 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 9118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 9119 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 9120 9121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 9122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9123 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 9124 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 9125 IEM_MC_FETCH_EFLAGS(EFlags); 9126 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); 9127 9128 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 9129 IEM_MC_COMMIT_EFLAGS(EFlags); 9130 IEM_MC_ADVANCE_RIP_AND_FINISH(); 9131 IEM_MC_END(); 9132 break; 9133 9134 case IEMMODE_32BIT: 9135 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); 9136 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 9137 IEM_MC_ARG(uint8_t, cShiftArg, 1); 9138 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 9139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 9140 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 9141 9142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 9143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9144 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 9145 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 9146 IEM_MC_FETCH_EFLAGS(EFlags); 9147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); 9148 9149 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 9150 IEM_MC_COMMIT_EFLAGS(EFlags); 9151 IEM_MC_ADVANCE_RIP_AND_FINISH(); 9152 IEM_MC_END(); 9153 break; 9154 9155 case IEMMODE_64BIT: 9156 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); 9157 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 9158 IEM_MC_ARG(uint8_t, cShiftArg, 1); 9159 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 9160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 9161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 9162 9163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 9164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9165 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 9166 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 9167 IEM_MC_FETCH_EFLAGS(EFlags); 9168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); 9169 9170 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); 9171 IEM_MC_COMMIT_EFLAGS(EFlags); 9172 IEM_MC_ADVANCE_RIP_AND_FINISH(); 9173 IEM_MC_END(); 9174 break; 9175 9176 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 9177 } 9178 } 9179 } 9674 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */ 9675 } 9676 #undef GRP2_BODY_Ev_CL 9677 } 9678 9180 9679 9181 9680 /** -
trunk/src/VBox/VMM/VMMAll/IEMAllInstPython.py
r103185 r103190 202 202 'rAX': [], 203 203 'rDX': [], 204 'CL': [], 204 205 'rSI': [], 205 206 'rDI': [], … … 211 212 'GS': [], 212 213 'SS': [], 214 215 # fixed valures. 216 '1': [], 213 217 }; 214 218 … … 353 357 # Fixed registers. 354 358 'AL': ( 'IDX_ParseFixedReg', 'AL', 'al', 'REG_AL', '', ), 359 'REG_CL': ( 'IDX_ParseFixedReg', 'CL', 'cl', 'REG_CL', '', ), 355 360 'rAX': ( 'IDX_ParseFixedReg', 'rAX', '%eAX', 'REG_EAX', '', ), 356 361 'rDX': ( 'IDX_ParseFixedReg', 'rDX', '%eDX', 'REG_EDX', '', ), … … 361 366 'GS': ( 'IDX_ParseFixedReg', 'GS', 'gs', 'REG_GS', '', ), 362 367 'SS': ( 'IDX_ParseFixedReg', 'SS', 'ss', 'REG_SS', '', ), 368 369 # Fixed values. 370 '1': ( '', '1', '1', '1', '', ), 363 371 }; 364 372 … … 384 392 'M_REG': ( 'ModR/M', [ 'rm', ], '', ), 385 393 'M_MEM': ( 'ModR/M', [ 'rm', ], '', ), 394 'M1': ( 'ModR/M', [ 'rm', '1' ], '', ), 395 'M_CL': ( 'ModR/M', [ 'rm', 'CL' ], '', ), # shl/rcl/ror/++ 396 'MI': ( 'ModR/M', [ 'rm', 'imm' ], '', ), 397 'MI_REG': ( 'ModR/M', [ 'rm', 'imm' ], '11 mr/reg', ), 398 'MI_MEM': ( 'ModR/M', [ 'rm', 'imm' ], '!11 mr/reg', ), 386 399 'R': ( 'ModR/M', [ 'reg', ], '', ), 387 400 … … 4336 4349 'asFlUndefined': [ 'af', ], 4337 4350 }, 4351 'rotate_1': { # rol and ror with fixed 1 shift count 4352 'asFlTest': [], 4353 'asFlModify': [ 'cf', 'of', ], 4354 'asFlClear': [], 4355 'asFlSet': [], 4356 'asFlUndefined': [], 4357 }, 4358 'rotate_count': { # rol and ror w/o fixed 1 shift count 4359 'asFlTest': [], 4360 'asFlModify': [ 'cf', 'of', ], 4361 'asFlClear': [], 4362 'asFlSet': [], 4363 'asFlUndefined': [ 'of', ], 4364 }, 4365 'rotate_carry_1': { # rcl and rcr with fixed 1 shift count 4366 'asFlTest': [ 'cf', ], 4367 'asFlModify': [ 'cf', 'of', ], 4368 'asFlClear': [], 4369 'asFlSet': [], 4370 'asFlUndefined': [], 4371 }, 4372 'rotate_carry_count': { # rcl and rcr w/o fixed 1 shift count 4373 'asFlTest': [ 'cf', ], 4374 'asFlModify': [ 'cf', 'of', ], 4375 'asFlClear': [], 4376 'asFlSet': [], 4377 'asFlUndefined': [ 'of', ], 4378 }, 4379 'shift_1': { # shl, shr or sar with fixed 1 count. 4380 'asFlTest': [], 4381 'asFlModify': [ 'cf', 'pf', 'af', 'zf', 'sf', 'of', ], 4382 'asFlClear': [], 4383 'asFlSet': [], 4384 'asFlUndefined': [ 'af', ], 4385 }, 4386 'shift_count': { # shl, shr or sar w/o fixed 1 shift count 4387 'asFlTest': [], 4388 'asFlModify': [ 'cf', 'pf', 'af', 'zf', 'sf', 'of', ], 4389 'asFlClear': [], 4390 'asFlSet': [], 4391 'asFlUndefined': [ 'af', 'of', ], 4392 }, 4338 4393 'bitmap': { # bt, btc, btr, btc 4339 4394 'asFlTest': [], … … 4350 4405 'asFlUndefined': [], 4351 4406 }, 4352 } 4407 }; 4353 4408 def parseTagOpEFlagsClass(self, sTag, aasSections, iTagLine, iEndLine): 4354 4409 """ 4355 4410 Tags: @opflclass 4356 Value: arithmetic, logical 4411 Value: arithmetic, logical, ... 4357 4412 4358 4413 """ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap2.cpp.h
r103185 r103190 2298 2298 2299 2299 2300 /** Opcode VEX.66.0F38 0xf7 (vex only). */ 2300 /** 2301 * @opcode 0xf7 2302 * @oppfx 0x66 2303 * @opflclass unchanged 2304 */ 2301 2305 FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By) 2302 2306 { … … 2306 2310 2307 2311 2308 /** Opcode VEX.F3.0F38 0xf7 (vex only). */ 2312 /** 2313 * @opcode 0xf7 2314 * @oppfx 0xf3 2315 * @opflclass unchanged 2316 */ 2309 2317 FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By) 2310 2318 { … … 2314 2322 2315 2323 2316 /** Opcode VEX.F2.0F38 0xf7 (vex only). */ 2324 /** 2325 * @opcode 0xf7 2326 * @oppfx 0xf2 2327 * @opflclass unchanged 2328 */ 2317 2329 FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By) 2318 2330 { -
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h
r102331 r103190 1118 1118 1119 1119 1120 /** Opcode VEX.F2.0F3A (vex only) */ 1120 /** 1121 * @opcode 0xf0 1122 * @oppfx 0xf2 1123 * @opflclass unchanged 1124 */ 1121 1125 FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib) 1122 1126 { -
trunk/src/VBox/VMM/include/IEMInternal.h
r103182 r103190 2065 2065 /** ModR/M: reg, r/m (memory) */ 2066 2066 #define IEMOPFORM_RM_MEM (IEMOPFORM_RM | IEMOPFORM_NOT_MOD3) 2067 /** ModR/M: reg, r/m */2067 /** ModR/M: reg, r/m, imm */ 2068 2068 #define IEMOPFORM_RMI 1 2069 /** ModR/M: reg, r/m (register) */2069 /** ModR/M: reg, r/m (register), imm */ 2070 2070 #define IEMOPFORM_RMI_REG (IEMOPFORM_RM | IEMOPFORM_MOD3) 2071 /** ModR/M: reg, r/m (memory) */2071 /** ModR/M: reg, r/m (memory), imm */ 2072 2072 #define IEMOPFORM_RMI_MEM (IEMOPFORM_RM | IEMOPFORM_NOT_MOD3) 2073 2073 /** ModR/M: r/m, reg */ … … 2077 2077 /** ModR/M: r/m (memory), reg */ 2078 2078 #define IEMOPFORM_MR_MEM (IEMOPFORM_MR | IEMOPFORM_NOT_MOD3) 2079 /** ModR/M: r/m, reg */2079 /** ModR/M: r/m, reg, imm */ 2080 2080 #define IEMOPFORM_MRI 3 2081 /** ModR/M: r/m (register), reg */2081 /** ModR/M: r/m (register), reg, imm */ 2082 2082 #define IEMOPFORM_MRI_REG (IEMOPFORM_MR | IEMOPFORM_MOD3) 2083 /** ModR/M: r/m (memory), reg */2083 /** ModR/M: r/m (memory), reg, imm */ 2084 2084 #define IEMOPFORM_MRI_MEM (IEMOPFORM_MR | IEMOPFORM_NOT_MOD3) 2085 2085 /** ModR/M: r/m only */ … … 2089 2089 /** ModR/M: r/m only (memory). */ 2090 2090 #define IEMOPFORM_M_MEM (IEMOPFORM_M | IEMOPFORM_NOT_MOD3) 2091 /** ModR/M: r/m, imm */ 2092 #define IEMOPFORM_MI 5 2093 /** ModR/M: r/m (register), imm */ 2094 #define IEMOPFORM_MI_REG (IEMOPFORM_MI | IEMOPFORM_MOD3) 2095 /** ModR/M: r/m (memory), imm */ 2096 #define IEMOPFORM_MI_MEM (IEMOPFORM_MI | IEMOPFORM_NOT_MOD3) 2097 /** ModR/M: r/m, 1 (shift and rotate instructions) */ 2098 #define IEMOPFORM_M1 6 2099 /** ModR/M: r/m (register), 1. */ 2100 #define IEMOPFORM_M1_REG (IEMOPFORM_M1 | IEMOPFORM_MOD3) 2101 /** ModR/M: r/m (memory), 1. */ 2102 #define IEMOPFORM_M1_MEM (IEMOPFORM_M1 | IEMOPFORM_NOT_MOD3) 2103 /** ModR/M: r/m, CL (shift and rotate instructions) 2104 * @todo This should just've been a generic fixed register. But the python 2105 * code doesn't needs more convincing. */ 2106 #define IEMOPFORM_M_CL 7 2107 /** ModR/M: r/m (register), CL. */ 2108 #define IEMOPFORM_M_CL_REG (IEMOPFORM_M_CL | IEMOPFORM_MOD3) 2109 /** ModR/M: r/m (memory), CL. */ 2110 #define IEMOPFORM_M_CL_MEM (IEMOPFORM_M_CL | IEMOPFORM_NOT_MOD3) 2091 2111 /** ModR/M: reg only */ 2092 #define IEMOPFORM_R 52112 #define IEMOPFORM_R 8 2093 2113 2094 2114 /** VEX+ModR/M: reg, r/m */ 2095 #define IEMOPFORM_VEX_RM 82115 #define IEMOPFORM_VEX_RM 16 2096 2116 /** VEX+ModR/M: reg, r/m (register) */ 2097 2117 #define IEMOPFORM_VEX_RM_REG (IEMOPFORM_VEX_RM | IEMOPFORM_MOD3) … … 2099 2119 #define IEMOPFORM_VEX_RM_MEM (IEMOPFORM_VEX_RM | IEMOPFORM_NOT_MOD3) 2100 2120 /** VEX+ModR/M: r/m, reg */ 2101 #define IEMOPFORM_VEX_MR 92121 #define IEMOPFORM_VEX_MR 17 2102 2122 /** VEX+ModR/M: r/m (register), reg */ 2103 2123 #define IEMOPFORM_VEX_MR_REG (IEMOPFORM_VEX_MR | IEMOPFORM_MOD3) … … 2105 2125 #define IEMOPFORM_VEX_MR_MEM (IEMOPFORM_VEX_MR | IEMOPFORM_NOT_MOD3) 2106 2126 /** VEX+ModR/M: r/m only */ 2107 #define IEMOPFORM_VEX_M 1 02127 #define IEMOPFORM_VEX_M 18 2108 2128 /** VEX+ModR/M: r/m only (register). */ 2109 2129 #define IEMOPFORM_VEX_M_REG (IEMOPFORM_VEX_M | IEMOPFORM_MOD3) … … 2111 2131 #define IEMOPFORM_VEX_M_MEM (IEMOPFORM_VEX_M | IEMOPFORM_NOT_MOD3) 2112 2132 /** VEX+ModR/M: reg only */ 2113 #define IEMOPFORM_VEX_R 1 12133 #define IEMOPFORM_VEX_R 19 2114 2134 /** VEX+ModR/M: reg, vvvv, r/m */ 2115 #define IEMOPFORM_VEX_RVM 122135 #define IEMOPFORM_VEX_RVM 20 2116 2136 /** VEX+ModR/M: reg, vvvv, r/m (register). */ 2117 2137 #define IEMOPFORM_VEX_RVM_REG (IEMOPFORM_VEX_RVM | IEMOPFORM_MOD3) … … 2119 2139 #define IEMOPFORM_VEX_RVM_MEM (IEMOPFORM_VEX_RVM | IEMOPFORM_NOT_MOD3) 2120 2140 /** VEX+ModR/M: reg, r/m, vvvv */ 2121 #define IEMOPFORM_VEX_RMV 132141 #define IEMOPFORM_VEX_RMV 21 2122 2142 /** VEX+ModR/M: reg, r/m, vvvv (register). */ 2123 2143 #define IEMOPFORM_VEX_RMV_REG (IEMOPFORM_VEX_RMV | IEMOPFORM_MOD3) … … 2125 2145 #define IEMOPFORM_VEX_RMV_MEM (IEMOPFORM_VEX_RMV | IEMOPFORM_NOT_MOD3) 2126 2146 /** VEX+ModR/M: reg, r/m, imm8 */ 2127 #define IEMOPFORM_VEX_RMI 142147 #define IEMOPFORM_VEX_RMI 22 2128 2148 /** VEX+ModR/M: reg, r/m, imm8 (register). */ 2129 2149 #define IEMOPFORM_VEX_RMI_REG (IEMOPFORM_VEX_RMI | IEMOPFORM_MOD3) … … 2131 2151 #define IEMOPFORM_VEX_RMI_MEM (IEMOPFORM_VEX_RMI | IEMOPFORM_NOT_MOD3) 2132 2152 /** VEX+ModR/M: r/m, vvvv, reg */ 2133 #define IEMOPFORM_VEX_MVR 152153 #define IEMOPFORM_VEX_MVR 23 2134 2154 /** VEX+ModR/M: r/m, vvvv, reg (register) */ 2135 2155 #define IEMOPFORM_VEX_MVR_REG (IEMOPFORM_VEX_MVR | IEMOPFORM_MOD3) … … 2137 2157 #define IEMOPFORM_VEX_MVR_MEM (IEMOPFORM_VEX_MVR | IEMOPFORM_NOT_MOD3) 2138 2158 /** VEX+ModR/M+/n: vvvv, r/m */ 2139 #define IEMOPFORM_VEX_VM 162159 #define IEMOPFORM_VEX_VM 24 2140 2160 /** VEX+ModR/M+/n: vvvv, r/m (register) */ 2141 2161 #define IEMOPFORM_VEX_VM_REG (IEMOPFORM_VEX_VM | IEMOPFORM_MOD3) … … 2143 2163 #define IEMOPFORM_VEX_VM_MEM (IEMOPFORM_VEX_VM | IEMOPFORM_NOT_MOD3) 2144 2164 /** VEX+ModR/M+/n: vvvv, r/m, imm8 */ 2145 #define IEMOPFORM_VEX_VMI 172165 #define IEMOPFORM_VEX_VMI 25 2146 2166 /** VEX+ModR/M+/n: vvvv, r/m, imm8 (register) */ 2147 2167 #define IEMOPFORM_VEX_VMI_REG (IEMOPFORM_VEX_VMI | IEMOPFORM_MOD3)
Note:
See TracChangeset
for help on using the changeset viewer.