Changeset 104722 in vbox
- Timestamp:
- May 18, 2024 5:00:15 AM (10 months ago)
- svn:sync-xref-src-repo-rev:
- 163283
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllCImpl.cpp
r104516 r104722 9882 9882 } 9883 9883 9884 9885 /** 9886 * Worker for 'VMASKMOVPS / VPMASKMOVD' 128-bit 32-bit-masked load. 9887 * 9888 * @param pVCpu The cross context virtual CPU structure of the calling thread. 9889 * @param cbInstr The current instruction length. 9890 * @param iXRegDst The destination XMM register index. 9891 * @param iXRegMsk The mask XMM register index. 9892 * @param iEffSeg The effective segment. 9893 * @param GCPtrEffSrc The source memory address. 9894 */ 9895 static VBOXSTRICTRC iemCImpl_maskmov_load_u128_32_worker(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iXRegDst, uint8_t iXRegMsk, uint8_t iEffSeg, RTGCPTR GCPtrEffSrc) 9896 { 9897 uint32_t fAccessed = 0; 9898 9899 PRTUINT128U puDst = (PRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iXRegDst]; 9900 PCRTUINT128U puMsk = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iXRegMsk]; 9901 PCRTUINT128U puSrc; 9902 9903 for (uint32_t i = 0; i < RT_ELEMENTS(puMsk->au32); i++) 9904 { 9905 fAccessed |= puMsk->au32[i]; 9906 } 9907 9908 if (fAccessed & RT_BIT(31)) { 9909 /* 9910 * Access the source memory. 9911 */ 9912 uint8_t bUnmapInfo; 9913 void *pvMemSrc; 9914 VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMemSrc, &bUnmapInfo, sizeof(*puSrc), 9915 iEffSeg, GCPtrEffSrc, IEM_ACCESS_DATA_R, 0); 9916 if (rcStrict != VINF_SUCCESS) 9917 return rcStrict; 9918 9919 puSrc = (PCRTUINT128U)pvMemSrc; 9920 9921 for (uint32_t i = 0; i < RT_ELEMENTS(puSrc->au32); i++) 9922 { 9923 puDst->au32[i] = (puMsk->au32[i] & RT_BIT(31)) ? puSrc->au32[i] : 0; 9924 } 9925 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iXRegDst].au64[0] = 0; 9926 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iXRegDst].au64[1] = 0; 9927 9928 rcStrict = iemMemCommitAndUnmap(pVCpu, bUnmapInfo); 9929 if (rcStrict != VINF_SUCCESS) 9930 return rcStrict; 9931 } 9932 else 9933 { 9934 puDst->au64[0] = 0; 9935 puDst->au64[1] = 0; 9936 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iXRegDst].au64[0] = 0; 9937 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iXRegDst].au64[1] = 0; 9938 } 9939 9940 return iemRegAddToRipAndFinishingClearingRF(pVCpu, cbInstr); 9941 } 9942 9943 9944 9945 /** 9946 * Worker for 'VMASKMOVPS / VPMASKMOVD' 256-bit 32-bit-masked load. 9947 * 9948 * @param pVCpu The cross context virtual CPU structure of the calling thread. 9949 * @param cbInstr The current instruction length. 9950 * @param iYRegDst The destination YMM register index. 9951 * @param iYRegMsk The mask YMM register index. 9952 * @param iEffSeg The effective segment. 9953 * @param GCPtrEffSrc The source memory address. 9954 */ 9955 static VBOXSTRICTRC iemCImpl_maskmov_load_u256_32_worker(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iYRegDst, uint8_t iYRegMsk, uint8_t iEffSeg, RTGCPTR GCPtrEffSrc) 9956 { 9957 uint32_t fAccessed = 0; 9958 9959 PRTUINT128U puDstLo = (PRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegDst]; 9960 PRTUINT128U puDstHi = (PRTUINT128U)&pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegDst]; 9961 PCRTUINT128U puMskLo = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegMsk]; 9962 PCRTUINT128U puMskHi = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegMsk]; 9963 PCRTUINT256U puSrc; 9964 9965 for (uint32_t i = 0; i < RT_ELEMENTS(puMskLo->au32); i++) 9966 { 9967 fAccessed |= puMskLo->au32[i] | puMskHi->au32[i]; 9968 } 9969 9970 if (fAccessed & RT_BIT(31)) { 9971 /* 9972 * Access the source memory. 9973 */ 9974 uint8_t bUnmapInfo; 9975 void *pvMemSrc; 9976 VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMemSrc, &bUnmapInfo, sizeof(*puSrc), 9977 iEffSeg, GCPtrEffSrc, IEM_ACCESS_DATA_R, 0); 9978 if (rcStrict != VINF_SUCCESS) 9979 return rcStrict; 9980 9981 puSrc = (PCRTUINT256U)pvMemSrc; 9982 9983 uint8_t const iHalf = RT_ELEMENTS(puSrc->au32) / 2; 9984 9985 for (uint32_t i = 0; i < iHalf; i++) 9986 { 9987 puDstLo->au32[i] = (puMskLo->au32[i] & RT_BIT(31)) ? puSrc->au32[i] : 0; 9988 } 9989 for (uint32_t i = iHalf; i < RT_ELEMENTS(puSrc->au32); i++) 9990 { 9991 puDstHi->au32[i - iHalf] = (puMskHi->au32[i - iHalf] & RT_BIT(31)) ? puSrc->au32[i] : 0; 9992 } 9993 9994 rcStrict = iemMemCommitAndUnmap(pVCpu, bUnmapInfo); 9995 if (rcStrict != VINF_SUCCESS) 9996 return rcStrict; 9997 } 9998 else 9999 { 10000 puDstLo->au64[0] = 0; 10001 puDstLo->au64[1] = 0; 10002 puDstHi->au64[0] = 0; 10003 puDstHi->au64[1] = 0; 10004 } 10005 10006 return iemRegAddToRipAndFinishingClearingRF(pVCpu, cbInstr); 10007 } 10008 10009 10010 /** 10011 * Worker for 'VMASKMOVPS / VPMASKMOVD' 128-bit 32-bit-masked store. 10012 * 10013 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10014 * @param cbInstr The current instruction length. 10015 * @param iEffSeg The effective segment. 10016 * @param GCPtrEffDst The destination memory address. 10017 * @param iXRegMsk The mask XMM register index. 10018 * @param iXRegSrc The source XMM register index. 10019 */ 10020 static VBOXSTRICTRC iemCImpl_maskmov_store_u128_32_worker(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPTR GCPtrEffDst, uint8_t iXRegMsk, uint8_t iXRegSrc) 10021 { 10022 uint32_t fAccessed = 0; 10023 10024 PRTUINT128U puDst; 10025 PCRTUINT128U puMsk = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iXRegMsk]; 10026 PCRTUINT128U puSrc = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iXRegSrc]; 10027 10028 for (uint32_t i = 0; i < RT_ELEMENTS(puMsk->au32); i++) 10029 { 10030 fAccessed |= puMsk->au32[i]; 10031 } 10032 10033 if (fAccessed & RT_BIT(31)) { 10034 /* 10035 * Access the destination memory. 10036 */ 10037 uint8_t bUnmapInfo; 10038 void *pvMemDst; 10039 VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMemDst, &bUnmapInfo, sizeof(*puDst), 10040 iEffSeg, GCPtrEffDst, IEM_ACCESS_DATA_RW, 0); 10041 if (rcStrict != VINF_SUCCESS) 10042 return rcStrict; 10043 10044 puDst = (PRTUINT128U)pvMemDst; 10045 10046 for (uint32_t i = 0; i < RT_ELEMENTS(puDst->au32); i++) 10047 { 10048 if (puMsk->au32[i] & RT_BIT(31)) 10049 puDst->au32[i] = puSrc->au32[i]; 10050 } 10051 10052 rcStrict = iemMemCommitAndUnmap(pVCpu, bUnmapInfo); 10053 if (rcStrict != VINF_SUCCESS) 10054 return rcStrict; 10055 } 10056 10057 return iemRegAddToRipAndFinishingClearingRF(pVCpu, cbInstr); 10058 } 10059 10060 10061 10062 /** 10063 * Worker for 'VMASKMOVPS / VPMASKMOVD' 256-bit 32-bit-masked store. 10064 * 10065 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10066 * @param cbInstr The current instruction length. 10067 * @param iEffSeg The effective segment. 10068 * @param GCPtrEffDst The destination memory address. 10069 * @param iYRegMsk The mask YMM register index. 10070 * @param iYRegSrc The source YMM register index. 10071 */ 10072 static VBOXSTRICTRC iemCImpl_maskmov_store_u256_32_worker(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPTR GCPtrEffDst, uint8_t iYRegMsk, uint8_t iYRegSrc) 10073 { 10074 uint32_t fAccessed = 0; 10075 10076 PRTUINT256U puDst; 10077 PCRTUINT128U puMskLo = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegMsk]; 10078 PCRTUINT128U puMskHi = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegMsk]; 10079 PCRTUINT128U puSrcLo = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegSrc]; 10080 PCRTUINT128U puSrcHi = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrc]; 10081 10082 for (uint32_t i = 0; i < RT_ELEMENTS(puMskLo->au32); i++) 10083 { 10084 fAccessed |= puMskLo->au32[i] | puMskHi->au32[i]; 10085 } 10086 10087 if (fAccessed & RT_BIT(31)) { 10088 /* 10089 * Access the destination memory. 10090 */ 10091 uint8_t bUnmapInfo; 10092 void *pvMemDst; 10093 VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMemDst, &bUnmapInfo, sizeof(*puDst), 10094 iEffSeg, GCPtrEffDst, IEM_ACCESS_DATA_RW, 0); 10095 if (rcStrict != VINF_SUCCESS) 10096 return rcStrict; 10097 10098 puDst = (PRTUINT256U)pvMemDst; 10099 10100 uint8_t const iHalf = RT_ELEMENTS(puDst->au32) / 2; 10101 10102 for (uint32_t i = 0; i < iHalf; i++) 10103 { 10104 if (puMskLo->au32[i] & RT_BIT(31)) 10105 puDst->au32[i] = puSrcLo->au32[i]; 10106 } 10107 for (uint32_t i = iHalf; i < RT_ELEMENTS(puDst->au32); i++) 10108 { 10109 if (puMskHi->au32[i - iHalf] & RT_BIT(31)) 10110 puDst->au32[i] = puSrcHi->au32[i - iHalf]; 10111 } 10112 10113 rcStrict = iemMemCommitAndUnmap(pVCpu, bUnmapInfo); 10114 if (rcStrict != VINF_SUCCESS) 10115 return rcStrict; 10116 } 10117 10118 return iemRegAddToRipAndFinishingClearingRF(pVCpu, cbInstr); 10119 } 10120 10121 10122 /** 10123 * Worker for 'VMASKMOVPD / VPMASKMOVQ' 128-bit 64-bit-masked load. 10124 * 10125 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10126 * @param cbInstr The current instruction length. 10127 * @param iXRegDst The destination XMM register index. 10128 * @param iXRegMsk The mask XMM register index. 10129 * @param iEffSeg The effective segment. 10130 * @param GCPtrEffSrc The source memory address. 10131 */ 10132 static VBOXSTRICTRC iemCImpl_maskmov_load_u128_64_worker(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iXRegDst, uint8_t iXRegMsk, uint8_t iEffSeg, RTGCPTR GCPtrEffSrc) 10133 { 10134 uint64_t fAccessed = 0; 10135 10136 PRTUINT128U puDst = (PRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iXRegDst]; 10137 PCRTUINT128U puMsk = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iXRegMsk]; 10138 PCRTUINT128U puSrc; 10139 10140 for (uint32_t i = 0; i < RT_ELEMENTS(puMsk->au64); i++) 10141 { 10142 fAccessed |= puMsk->au64[i]; 10143 } 10144 10145 if (fAccessed & RT_BIT_64(63)) { 10146 /* 10147 * Access the source memory. 10148 */ 10149 uint8_t bUnmapInfo; 10150 void *pvMemSrc; 10151 VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMemSrc, &bUnmapInfo, sizeof(*puSrc), 10152 iEffSeg, GCPtrEffSrc, IEM_ACCESS_DATA_R, 0); 10153 if (rcStrict != VINF_SUCCESS) 10154 return rcStrict; 10155 10156 puSrc = (PCRTUINT128U)pvMemSrc; 10157 10158 for (uint32_t i = 0; i < RT_ELEMENTS(puSrc->au64); i++) 10159 { 10160 puDst->au64[i] = (puMsk->au64[i] & RT_BIT_64(63)) ? puSrc->au64[i] : 0; 10161 } 10162 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iXRegDst].au64[0] = 0; 10163 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iXRegDst].au64[1] = 0; 10164 10165 rcStrict = iemMemCommitAndUnmap(pVCpu, bUnmapInfo); 10166 if (rcStrict != VINF_SUCCESS) 10167 return rcStrict; 10168 } 10169 else 10170 { 10171 puDst->au64[0] = 0; 10172 puDst->au64[1] = 0; 10173 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iXRegDst].au64[0] = 0; 10174 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iXRegDst].au64[1] = 0; 10175 } 10176 10177 return iemRegAddToRipAndFinishingClearingRF(pVCpu, cbInstr); 10178 } 10179 10180 10181 10182 /** 10183 * Worker for 'VMASKMOVPD / VPMASKMOVQ' 256-bit 64-bit-masked load. 10184 * 10185 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10186 * @param cbInstr The current instruction length. 10187 * @param iYRegDst The destination YMM register index. 10188 * @param iYRegMsk The mask YMM register index. 10189 * @param iEffSeg The effective segment. 10190 * @param GCPtrEffSrc The source memory address. 10191 */ 10192 static VBOXSTRICTRC iemCImpl_maskmov_load_u256_64_worker(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iYRegDst, uint8_t iYRegMsk, uint8_t iEffSeg, RTGCPTR GCPtrEffSrc) 10193 { 10194 uint64_t fAccessed = 0; 10195 10196 PRTUINT128U puDstLo = (PRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegDst]; 10197 PRTUINT128U puDstHi = (PRTUINT128U)&pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegDst]; 10198 PCRTUINT128U puMskLo = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegMsk]; 10199 PCRTUINT128U puMskHi = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegMsk]; 10200 PCRTUINT256U puSrc; 10201 10202 for (uint32_t i = 0; i < RT_ELEMENTS(puMskLo->au64); i++) 10203 { 10204 fAccessed |= puMskLo->au64[i] | puMskHi->au64[i]; 10205 } 10206 10207 if (fAccessed & RT_BIT_64(63)) { 10208 /* 10209 * Access the source memory. 10210 */ 10211 uint8_t bUnmapInfo; 10212 void *pvMemSrc; 10213 VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMemSrc, &bUnmapInfo, sizeof(*puSrc), 10214 iEffSeg, GCPtrEffSrc, IEM_ACCESS_DATA_R, 0); 10215 if (rcStrict != VINF_SUCCESS) 10216 return rcStrict; 10217 10218 puSrc = (PCRTUINT256U)pvMemSrc; 10219 10220 uint8_t const iHalf = RT_ELEMENTS(puSrc->au64) / 2; 10221 10222 for (uint32_t i = 0; i < iHalf; i++) 10223 { 10224 puDstLo->au64[i] = (puMskLo->au64[i] & RT_BIT_64(63)) ? puSrc->au64[i] : 0; 10225 } 10226 for (uint32_t i = iHalf; i < RT_ELEMENTS(puSrc->au64); i++) 10227 { 10228 puDstHi->au64[i - iHalf] = (puMskHi->au64[i - iHalf] & RT_BIT_64(63)) ? puSrc->au64[i] : 0; 10229 } 10230 10231 rcStrict = iemMemCommitAndUnmap(pVCpu, bUnmapInfo); 10232 if (rcStrict != VINF_SUCCESS) 10233 return rcStrict; 10234 } 10235 else 10236 { 10237 puDstLo->au64[0] = 0; 10238 puDstLo->au64[1] = 0; 10239 puDstHi->au64[0] = 0; 10240 puDstHi->au64[1] = 0; 10241 } 10242 10243 return iemRegAddToRipAndFinishingClearingRF(pVCpu, cbInstr); 10244 } 10245 10246 10247 /** 10248 * Worker for 'VMASKMOVPD / VPMASKMOVQ' 128-bit 64-bit-masked store. 10249 * 10250 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10251 * @param cbInstr The current instruction length. 10252 * @param iEffSeg The effective segment. 10253 * @param GCPtrEffDst The destination memory address. 10254 * @param iXRegMsk The mask XMM register index. 10255 * @param iXRegSrc The source XMM register index. 10256 */ 10257 static VBOXSTRICTRC iemCImpl_maskmov_store_u128_64_worker(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPTR GCPtrEffDst, uint8_t iXRegMsk, uint8_t iXRegSrc) 10258 { 10259 uint64_t fAccessed = 0; 10260 10261 PRTUINT128U puDst; 10262 PCRTUINT128U puMsk = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iXRegMsk]; 10263 PCRTUINT128U puSrc = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iXRegSrc]; 10264 10265 for (uint32_t i = 0; i < RT_ELEMENTS(puMsk->au64); i++) 10266 { 10267 fAccessed |= puMsk->au64[i]; 10268 } 10269 10270 if (fAccessed & RT_BIT_64(63)) { 10271 /* 10272 * Access the destination memory. 10273 */ 10274 uint8_t bUnmapInfo; 10275 void *pvMemDst; 10276 VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMemDst, &bUnmapInfo, sizeof(*puDst), 10277 iEffSeg, GCPtrEffDst, IEM_ACCESS_DATA_RW, 0); 10278 if (rcStrict != VINF_SUCCESS) 10279 return rcStrict; 10280 10281 puDst = (PRTUINT128U)pvMemDst; 10282 10283 for (uint32_t i = 0; i < RT_ELEMENTS(puDst->au64); i++) 10284 { 10285 if (puMsk->au64[i] & RT_BIT_64(63)) 10286 puDst->au64[i] = puSrc->au64[i]; 10287 } 10288 10289 rcStrict = iemMemCommitAndUnmap(pVCpu, bUnmapInfo); 10290 if (rcStrict != VINF_SUCCESS) 10291 return rcStrict; 10292 } 10293 10294 return iemRegAddToRipAndFinishingClearingRF(pVCpu, cbInstr); 10295 } 10296 10297 10298 10299 /** 10300 * Worker for 'VMASKMOVPD / VPMASKMOVQ' 256-bit 64-bit-masked store. 10301 * 10302 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10303 * @param cbInstr The current instruction length. 10304 * @param iEffSeg The effective segment. 10305 * @param GCPtrEffDst The destination memory address. 10306 * @param iYRegMsk The mask YMM register index. 10307 * @param iYRegSrc The source YMM register index. 10308 */ 10309 static VBOXSTRICTRC iemCImpl_maskmov_store_u256_64_worker(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPTR GCPtrEffDst, uint8_t iYRegMsk, uint8_t iYRegSrc) 10310 { 10311 uint64_t fAccessed = 0; 10312 10313 PRTUINT256U puDst; 10314 PCRTUINT128U puMskLo = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegMsk]; 10315 PCRTUINT128U puMskHi = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegMsk]; 10316 PCRTUINT128U puSrcLo = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegSrc]; 10317 PCRTUINT128U puSrcHi = (PCRTUINT128U)&pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrc]; 10318 10319 for (uint32_t i = 0; i < RT_ELEMENTS(puMskLo->au64); i++) 10320 { 10321 fAccessed |= puMskLo->au64[i] | puMskHi->au64[i]; 10322 } 10323 10324 if (fAccessed & RT_BIT_64(63)) { 10325 /* 10326 * Access the destination memory. 10327 */ 10328 uint8_t bUnmapInfo; 10329 void *pvMemDst; 10330 VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMemDst, &bUnmapInfo, sizeof(*puDst), 10331 iEffSeg, GCPtrEffDst, IEM_ACCESS_DATA_RW, 0); 10332 if (rcStrict != VINF_SUCCESS) 10333 return rcStrict; 10334 10335 puDst = (PRTUINT256U)pvMemDst; 10336 10337 uint8_t const iHalf = RT_ELEMENTS(puDst->au64) / 2; 10338 10339 for (uint32_t i = 0; i < iHalf; i++) 10340 { 10341 if (puMskLo->au64[i] & RT_BIT_64(63)) 10342 puDst->au64[i] = puSrcLo->au64[i]; 10343 } 10344 for (uint32_t i = iHalf; i < RT_ELEMENTS(puDst->au64); i++) 10345 { 10346 if (puMskHi->au64[i - iHalf] & RT_BIT_64(63)) 10347 puDst->au64[i] = puSrcHi->au64[i - iHalf]; 10348 } 10349 10350 rcStrict = iemMemCommitAndUnmap(pVCpu, bUnmapInfo); 10351 if (rcStrict != VINF_SUCCESS) 10352 return rcStrict; 10353 } 10354 10355 return iemRegAddToRipAndFinishingClearingRF(pVCpu, cbInstr); 10356 } 10357 10358 10359 /** 10360 * Implements 'VMASKMOVPS' 128-bit 32-bit-masked load. 10361 * 10362 * @param iXRegDst The destination XMM register index. 10363 * @param iXRegMsk The mask XMM register index. 10364 * @param iEffSeg The effective segment. 10365 * @param GCPtrEffSrc The source memory address. 10366 */ 10367 IEM_CIMPL_DEF_4(iemCImpl_vmaskmovps_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc) 10368 { 10369 return iemCImpl_maskmov_load_u128_32_worker(pVCpu, cbInstr, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc); 10370 } 10371 10372 10373 /** 10374 * Implements 'VMASKMOVPS' 256-bit 32-bit-masked load. 10375 * 10376 * @param iYRegDst The destination YMM register index. 10377 * @param iYRegMsk The mask YMM register index. 10378 * @param iEffSeg The effective segment. 10379 * @param GCPtrEffSrc The source memory address. 10380 */ 10381 IEM_CIMPL_DEF_4(iemCImpl_vmaskmovps_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc) 10382 { 10383 return iemCImpl_maskmov_load_u256_32_worker(pVCpu, cbInstr, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc); 10384 } 10385 10386 10387 /** 10388 * Implements 'VMASKMOVPS' 128-bit 32-bit-masked store. 10389 * 10390 * @param iEffSeg The effective segment. 10391 * @param GCPtrEffDst The destination memory address. 10392 * @param iXRegMsk The mask XMM register index. 10393 * @param iXRegSrc The source XMM register index. 10394 */ 10395 IEM_CIMPL_DEF_4(iemCImpl_vmaskmovps_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc) 10396 { 10397 return iemCImpl_maskmov_store_u128_32_worker(pVCpu, cbInstr, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc); 10398 } 10399 10400 10401 /** 10402 * Implements 'VMASKMOVPS' 256-bit 32-bit-masked store. 10403 * 10404 * @param iEffSeg The effective segment. 10405 * @param GCPtrEffDst The destination memory address. 10406 * @param iYRegMsk The mask YMM register index. 10407 * @param iYRegSrc The source YMM register index. 10408 */ 10409 IEM_CIMPL_DEF_4(iemCImpl_vmaskmovps_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc) 10410 { 10411 return iemCImpl_maskmov_store_u256_32_worker(pVCpu, cbInstr, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc); 10412 } 10413 10414 10415 /** 10416 * Implements 'VPMASKMOVD' 128-bit 32-bit-masked load. 10417 * 10418 * @param iXRegDst The destination XMM register index. 10419 * @param iXRegMsk The mask XMM register index. 10420 * @param iEffSeg The effective segment. 10421 * @param GCPtrEffSrc The source memory address. 10422 */ 10423 IEM_CIMPL_DEF_4(iemCImpl_vpmaskmovd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc) 10424 { 10425 return iemCImpl_maskmov_load_u128_32_worker(pVCpu, cbInstr, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc); 10426 } 10427 10428 10429 /** 10430 * Implements 'VPMASKMOVD' 256-bit 32-bit-masked load. 10431 * 10432 * @param iYRegDst The destination YMM register index. 10433 * @param iYRegMsk The mask YMM register index. 10434 * @param iEffSeg The effective segment. 10435 * @param GCPtrEffSrc The source memory address. 10436 */ 10437 IEM_CIMPL_DEF_4(iemCImpl_vpmaskmovd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc) 10438 { 10439 return iemCImpl_maskmov_load_u256_32_worker(pVCpu, cbInstr, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc); 10440 } 10441 10442 10443 /** 10444 * Implements 'VPMASKMOVD' 128-bit 32-bit-masked store. 10445 * 10446 * @param iEffSeg The effective segment. 10447 * @param GCPtrEffDst The destination memory address. 10448 * @param iXRegMsk The mask XMM register index. 10449 * @param iXRegSrc The source XMM register index. 10450 */ 10451 IEM_CIMPL_DEF_4(iemCImpl_vpmaskmovd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc) 10452 { 10453 return iemCImpl_maskmov_store_u128_32_worker(pVCpu, cbInstr, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc); 10454 } 10455 10456 10457 /** 10458 * Implements 'VPMASKMOVD' 256-bit 32-bit-masked store. 10459 * 10460 * @param iEffSeg The effective segment. 10461 * @param GCPtrEffDst The destination memory address. 10462 * @param iYRegMsk The mask YMM register index. 10463 * @param iYRegSrc The source YMM register index. 10464 */ 10465 IEM_CIMPL_DEF_4(iemCImpl_vpmaskmovd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc) 10466 { 10467 return iemCImpl_maskmov_store_u256_32_worker(pVCpu, cbInstr, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc); 10468 } 10469 10470 10471 /** 10472 * Implements 'VMASKMOVPD' 128-bit 64-bit-masked load. 10473 * 10474 * @param iXRegDst The destination XMM register index. 10475 * @param iXRegMsk The mask XMM register index. 10476 * @param iEffSeg The effective segment. 10477 * @param GCPtrEffSrc The source memory address. 10478 */ 10479 IEM_CIMPL_DEF_4(iemCImpl_vmaskmovpd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc) 10480 { 10481 return iemCImpl_maskmov_load_u128_64_worker(pVCpu, cbInstr, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc); 10482 } 10483 10484 10485 /** 10486 * Implements 'VMASKMOVPD' 256-bit 64-bit-masked load. 10487 * 10488 * @param iYRegDst The destination YMM register index. 10489 * @param iYRegMsk The mask YMM register index. 10490 * @param iEffSeg The effective segment. 10491 * @param GCPtrEffSrc The source memory address. 10492 */ 10493 IEM_CIMPL_DEF_4(iemCImpl_vmaskmovpd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc) 10494 { 10495 return iemCImpl_maskmov_load_u256_64_worker(pVCpu, cbInstr, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc); 10496 } 10497 10498 10499 /** 10500 * Implements 'VMASKMOVPD' 128-bit 64-bit-masked store. 10501 * 10502 * @param iEffSeg The effective segment. 10503 * @param GCPtrEffDst The destination memory address. 10504 * @param iXRegMsk The mask XMM register index. 10505 * @param iXRegSrc The source XMM register index. 10506 */ 10507 IEM_CIMPL_DEF_4(iemCImpl_vmaskmovpd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc) 10508 { 10509 return iemCImpl_maskmov_store_u128_64_worker(pVCpu, cbInstr, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc); 10510 } 10511 10512 10513 /** 10514 * Implements 'VMASKMOVPD' 256-bit 64-bit-masked store. 10515 * 10516 * @param iEffSeg The effective segment. 10517 * @param GCPtrEffDst The destination memory address. 10518 * @param iYRegMsk The mask YMM register index. 10519 * @param iYRegSrc The source YMM register index. 10520 */ 10521 IEM_CIMPL_DEF_4(iemCImpl_vmaskmovpd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc) 10522 { 10523 return iemCImpl_maskmov_store_u256_64_worker(pVCpu, cbInstr, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc); 10524 } 10525 10526 10527 /** 10528 * Implements 'VPMASKMOVQ' 128-bit 64-bit-masked load. 10529 * 10530 * @param iXRegDst The destination XMM register index. 10531 * @param iXRegMsk The mask XMM register index. 10532 * @param iEffSeg The effective segment. 10533 * @param GCPtrEffSrc The source memory address. 10534 */ 10535 IEM_CIMPL_DEF_4(iemCImpl_vpmaskmovq_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc) 10536 { 10537 return iemCImpl_maskmov_load_u128_64_worker(pVCpu, cbInstr, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc); 10538 } 10539 10540 10541 /** 10542 * Implements 'VPMASKMOVQ' 256-bit 64-bit-masked load. 10543 * 10544 * @param iYRegDst The destination YMM register index. 10545 * @param iYRegMsk The mask YMM register index. 10546 * @param iEffSeg The effective segment. 10547 * @param GCPtrEffSrc The source memory address. 10548 */ 10549 IEM_CIMPL_DEF_4(iemCImpl_vpmaskmovq_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc) 10550 { 10551 return iemCImpl_maskmov_load_u256_64_worker(pVCpu, cbInstr, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc); 10552 } 10553 10554 10555 /** 10556 * Implements 'VPMASKMOVQ' 128-bit 64-bit-masked store. 10557 * 10558 * @param iEffSeg The effective segment. 10559 * @param GCPtrEffDst The destination memory address. 10560 * @param iXRegMsk The mask XMM register index. 10561 * @param iXRegSrc The source XMM register index. 10562 */ 10563 IEM_CIMPL_DEF_4(iemCImpl_vpmaskmovq_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc) 10564 { 10565 return iemCImpl_maskmov_store_u128_64_worker(pVCpu, cbInstr, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc); 10566 } 10567 10568 10569 /** 10570 * Implements 'VPMASKMOVQ' 256-bit 64-bit-masked store. 10571 * 10572 * @param iEffSeg The effective segment. 10573 * @param GCPtrEffDst The destination memory address. 10574 * @param iYRegMsk The mask YMM register index. 10575 * @param iYRegSrc The source YMM register index. 10576 */ 10577 IEM_CIMPL_DEF_4(iemCImpl_vpmaskmovq_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc) 10578 { 10579 return iemCImpl_maskmov_store_u256_64_worker(pVCpu, cbInstr, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc); 10580 } 10581 10582 9884 10583 /** @} */ 9885 10584 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap2.cpp.h
r104272 r104722 963 963 964 964 /** Opcode VEX.66.0F38 0x2c. */ 965 FNIEMOP_STUB(iemOp_vmaskmovps_Vx_Hx_Mx); 965 FNIEMOP_DEF(iemOp_vmaskmovps_Vx_Hx_Mx) 966 { 967 // IEMOP_MNEMONIC3(RM, VMASKMOVPS, vmaskmovps, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */ 968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 969 if (!IEM_IS_MODRM_REG_MODE(bRm)) 970 { 971 if (pVCpu->iem.s.uVexLength) 972 { 973 /* 974 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m] 975 */ 976 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 977 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0); 978 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1); 979 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3); 980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 981 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2); 982 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx); 983 984 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 985 IEM_MC_PREPARE_AVX_USAGE(); 986 987 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc); 988 989 IEM_MC_END(); 990 } 991 else 992 { 993 /* 994 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m] 995 */ 996 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 997 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0); 998 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1); 999 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3); 1000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1001 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2); 1002 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx); 1003 1004 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1005 IEM_MC_PREPARE_AVX_USAGE(); 1006 1007 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc); 1008 1009 IEM_MC_END(); 1010 } 1011 } 1012 else 1013 { 1014 /* The register, register encoding is invalid. */ 1015 IEMOP_RAISE_INVALID_OPCODE_RET(); 1016 } 1017 } 1018 1019 966 1020 /** Opcode VEX.66.0F38 0x2d. */ 967 FNIEMOP_STUB(iemOp_vmaskmovpd_Vx_Hx_Mx); 1021 FNIEMOP_DEF(iemOp_vmaskmovpd_Vx_Hx_Mx) 1022 { 1023 // IEMOP_MNEMONIC3(RM, VMASKMOVPD, vmaskmovpd, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */ 1024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1025 if (!IEM_IS_MODRM_REG_MODE(bRm)) 1026 { 1027 if (pVCpu->iem.s.uVexLength) 1028 { 1029 /* 1030 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m] 1031 */ 1032 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1033 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0); 1034 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1); 1035 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3); 1036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1037 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2); 1038 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx); 1039 1040 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1041 IEM_MC_PREPARE_AVX_USAGE(); 1042 1043 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc); 1044 1045 IEM_MC_END(); 1046 } 1047 else 1048 { 1049 /* 1050 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m] 1051 */ 1052 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1053 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0); 1054 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1); 1055 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3); 1056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1057 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2); 1058 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx); 1059 1060 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1061 IEM_MC_PREPARE_AVX_USAGE(); 1062 1063 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc); 1064 1065 IEM_MC_END(); 1066 } 1067 } 1068 else 1069 { 1070 /* The register, register encoding is invalid. */ 1071 IEMOP_RAISE_INVALID_OPCODE_RET(); 1072 } 1073 } 1074 1075 968 1076 /** Opcode VEX.66.0F38 0x2e. */ 969 FNIEMOP_STUB(iemOp_vmaskmovps_Mx_Hx_Vx); 1077 FNIEMOP_DEF(iemOp_vmaskmovps_Mx_Hx_Vx) 1078 { 1079 // IEMOP_MNEMONIC3(RM, VMASKMOVPS, vmaskmovps, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */ 1080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1081 if (!IEM_IS_MODRM_REG_MODE(bRm)) 1082 { 1083 if (pVCpu->iem.s.uVexLength) 1084 { 1085 /* 1086 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg] 1087 */ 1088 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1089 1090 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1); 1091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 1092 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0); 1093 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx); 1094 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2); 1095 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3); 1096 1097 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1098 IEM_MC_PREPARE_AVX_USAGE(); 1099 1100 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc); 1101 1102 IEM_MC_END(); 1103 } 1104 else 1105 { 1106 /* 1107 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg] 1108 */ 1109 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1110 1111 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1); 1112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 1113 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0); 1114 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx); 1115 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2); 1116 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3); 1117 1118 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1119 IEM_MC_PREPARE_AVX_USAGE(); 1120 1121 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc); 1122 1123 IEM_MC_END(); 1124 } 1125 } 1126 else 1127 { 1128 /* The register, register encoding is invalid. */ 1129 IEMOP_RAISE_INVALID_OPCODE_RET(); 1130 } 1131 } 1132 1133 970 1134 /** Opcode VEX.66.0F38 0x2f. */ 971 FNIEMOP_STUB(iemOp_vmaskmovpd_Mx_Hx_Vx); 1135 FNIEMOP_DEF(iemOp_vmaskmovpd_Mx_Hx_Vx) 1136 { 1137 // IEMOP_MNEMONIC3(RM, VMASKMOVPD, vmaskmovpd, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */ 1138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1139 if (!IEM_IS_MODRM_REG_MODE(bRm)) 1140 { 1141 if (pVCpu->iem.s.uVexLength) 1142 { 1143 /* 1144 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg] 1145 */ 1146 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1147 1148 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1); 1149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 1150 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0); 1151 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx); 1152 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2); 1153 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3); 1154 1155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1156 IEM_MC_PREPARE_AVX_USAGE(); 1157 1158 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc); 1159 1160 IEM_MC_END(); 1161 } 1162 else 1163 { 1164 /* 1165 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg] 1166 */ 1167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1168 1169 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1); 1170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 1171 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0); 1172 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx); 1173 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2); 1174 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3); 1175 1176 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1177 IEM_MC_PREPARE_AVX_USAGE(); 1178 1179 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc); 1180 1181 IEM_MC_END(); 1182 } 1183 } 1184 else 1185 { 1186 /* The register, register encoding is invalid. */ 1187 IEMOP_RAISE_INVALID_OPCODE_RET(); 1188 } 1189 } 972 1190 973 1191 … … 1660 1878 /* Opcode VEX.66.0F38 0x8a - invalid. */ 1661 1879 /* Opcode VEX.66.0F38 0x8b - invalid. */ 1880 1881 1662 1882 /** Opcode VEX.66.0F38 0x8c. */ 1663 FNIEMOP_STUB(iemOp_vpmaskmovd_q_Vx_Hx_Mx); 1883 FNIEMOP_DEF(iemOp_vpmaskmovd_q_Vx_Hx_Mx) 1884 { 1885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1886 if (!IEM_IS_MODRM_REG_MODE(bRm)) 1887 { 1888 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1889 { 1890 // IEMOP_MNEMONIC3(RM, VPMASKMOVQ, vpmaskmovq, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */ 1891 if (pVCpu->iem.s.uVexLength) 1892 { 1893 /* 1894 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m] 1895 */ 1896 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1897 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0); 1898 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1); 1899 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3); 1900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1901 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2); 1902 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 1903 1904 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1905 IEM_MC_PREPARE_AVX_USAGE(); 1906 1907 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc); 1908 1909 IEM_MC_END(); 1910 } 1911 else 1912 { 1913 /* 1914 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m] 1915 */ 1916 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1917 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0); 1918 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1); 1919 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3); 1920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1921 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2); 1922 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 1923 1924 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1925 IEM_MC_PREPARE_AVX_USAGE(); 1926 1927 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc); 1928 1929 IEM_MC_END(); 1930 } 1931 } 1932 else 1933 { 1934 // IEMOP_MNEMONIC3(RM, VPMASKMOVD, vpmaskmovd, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */ 1935 if (pVCpu->iem.s.uVexLength) 1936 { 1937 /* 1938 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m] 1939 */ 1940 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1941 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0); 1942 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1); 1943 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3); 1944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1945 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2); 1946 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 1947 1948 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1949 IEM_MC_PREPARE_AVX_USAGE(); 1950 1951 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc); 1952 1953 IEM_MC_END(); 1954 } 1955 else 1956 { 1957 /* 1958 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m] 1959 */ 1960 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 1961 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0); 1962 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1); 1963 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3); 1964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1965 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2); 1966 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 1967 1968 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1969 IEM_MC_PREPARE_AVX_USAGE(); 1970 1971 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc); 1972 1973 IEM_MC_END(); 1974 } 1975 } 1976 } 1977 else 1978 { 1979 /* The register, register encoding is invalid. */ 1980 IEMOP_RAISE_INVALID_OPCODE_RET(); 1981 } 1982 } 1983 1984 1664 1985 /* Opcode VEX.66.0F38 0x8d - invalid. */ 1665 1986 /** Opcode VEX.66.0F38 0x8e. */ 1666 FNIEMOP_STUB(iemOp_vpmaskmovd_q_Mx_Vx_Hx); 1987 1988 1989 /** Opcode VEX.66.0F38 0x8e. */ 1990 FNIEMOP_DEF(iemOp_vpmaskmovd_q_Mx_Vx_Hx) 1991 { 1992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1993 if (!IEM_IS_MODRM_REG_MODE(bRm)) 1994 { 1995 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1996 { 1997 // IEMOP_MNEMONIC3(RM, VPMASKMOVQ, vpmaskmovq, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */ 1998 if (pVCpu->iem.s.uVexLength) 1999 { 2000 /* 2001 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg] 2002 */ 2003 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2004 2005 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1); 2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 2007 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0); 2008 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2009 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2); 2010 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3); 2011 2012 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2013 IEM_MC_PREPARE_AVX_USAGE(); 2014 2015 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc); 2016 2017 IEM_MC_END(); 2018 } 2019 else 2020 { 2021 /* 2022 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg] 2023 */ 2024 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2025 2026 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1); 2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 2028 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0); 2029 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2030 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2); 2031 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3); 2032 2033 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2034 IEM_MC_PREPARE_AVX_USAGE(); 2035 2036 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc); 2037 2038 IEM_MC_END(); 2039 } 2040 } 2041 else 2042 { 2043 // IEMOP_MNEMONIC3(RM, VPMASKMOVD, vpmaskmovd, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */ 2044 if (pVCpu->iem.s.uVexLength) 2045 { 2046 /* 2047 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg] 2048 */ 2049 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2050 2051 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1); 2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 2053 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0); 2054 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2055 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2); 2056 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3); 2057 2058 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2059 IEM_MC_PREPARE_AVX_USAGE(); 2060 2061 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc); 2062 2063 IEM_MC_END(); 2064 } 2065 else 2066 { 2067 /* 2068 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg] 2069 */ 2070 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2071 2072 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1); 2073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 2074 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0); 2075 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2076 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2); 2077 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3); 2078 2079 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2080 IEM_MC_PREPARE_AVX_USAGE(); 2081 2082 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc); 2083 2084 IEM_MC_END(); 2085 } 2086 } 2087 } 2088 else 2089 { 2090 /* The register, register encoding is invalid. */ 2091 IEMOP_RAISE_INVALID_OPCODE_RET(); 2092 } 2093 } 2094 2095 1667 2096 /* Opcode VEX.66.0F38 0x8f - invalid. */ 1668 2097 -
trunk/src/VBox/VMM/include/IEMInternal.h
r104521 r104722 5935 5935 IEM_CIMPL_PROTO_2(iemCImpl_rdseed, uint8_t, iReg, IEMMODE, enmEffOpSize); 5936 5936 IEM_CIMPL_PROTO_2(iemCImpl_rdrand, uint8_t, iReg, IEMMODE, enmEffOpSize); 5937 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc); 5938 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc); 5939 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc); 5940 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc); 5941 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc); 5942 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc); 5943 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc); 5944 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc); 5945 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc); 5946 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc); 5947 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc); 5948 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc); 5949 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc); 5950 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc); 5951 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc); 5952 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc); 5953 5937 5954 /** @} */ 5938 5955
Note:
See TracChangeset
for help on using the changeset viewer.