Changeset 106409 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Oct 17, 2024 12:22:46 AM (3 months ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h
r106407 r106409 7188 7188 * and IEM_MC_FETCH_MEM_FLAT_U8/16/32/64 and IEM_MC_STORE_MEM_FLAT_U8/16/32/64 7189 7189 * (with iSegReg = UINT8_MAX). */ 7190 /** @todo Pass enmOp, cbMem, fAlignMaskAndClt and a iSegReg == UINT8_MAX 7191 * indicator as template parameters. */ 7192 DECL_INLINE_THROW(uint32_t) 7193 iemNativeEmitMemFetchStoreDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarValue, uint8_t iSegReg, 7194 uint8_t idxVarGCPtrMem, uint8_t cbMem, uint32_t fAlignMaskAndCtl, IEMNATIVEMITMEMOP enmOp, 7195 uintptr_t pfnFunction, uint8_t idxInstr, uint8_t offDisp = 0) 7190 template<uint8_t const a_cbMem, uint32_t const a_fAlignMaskAndCtl, IEMNATIVEMITMEMOP const a_enmOp, bool a_fFlat = false> 7191 DECL_INLINE_THROW(uint32_t) 7192 iemNativeEmitMemFetchStoreDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarValue, uint8_t iSegReg, 7193 uint8_t idxVarGCPtrMem, uintptr_t pfnFunction, uint8_t idxInstr, uint8_t offDisp = 0) 7196 7194 { 7197 7195 /* … … 7200 7198 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarValue); 7201 7199 PIEMNATIVEVAR const pVarValue = &pReNative->Core.aVars[IEMNATIVE_VAR_IDX_UNPACK(idxVarValue)]; 7202 Assert( enmOp != kIemNativeEmitMemOp_Store7200 Assert( a_enmOp != kIemNativeEmitMemOp_Store 7203 7201 || pVarValue->enmKind == kIemNativeVarKind_Immediate 7204 7202 || pVarValue->enmKind == kIemNativeVarKind_Stack); … … 7208 7206 || pVarGCPtrMem->enmKind == kIemNativeVarKind_Stack, 7209 7207 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_UNEXPECTED_KIND)); 7210 Assert( iSegReg < 6 ||iSegReg == UINT8_MAX);7208 Assert(!a_fFlat ? iSegReg < 6 : iSegReg == UINT8_MAX); 7211 7209 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 7212 Assert ( cbMem == 1 || cbMem == 2 || cbMem == 4 ||cbMem == 87213 || cbMem == sizeof(RTUINT128U) ||cbMem == sizeof(RTUINT256U));7210 AssertCompile( a_cbMem == 1 || a_cbMem == 2 || a_cbMem == 4 || a_cbMem == 8 7211 || a_cbMem == sizeof(RTUINT128U) || a_cbMem == sizeof(RTUINT256U)); 7214 7212 #else 7215 Assert (cbMem == 1 || cbMem == 2 || cbMem == 4 ||cbMem == 8);7216 #endif 7217 Assert (!(fAlignMaskAndCtl & ~(UINT32_C(0xff) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE)));7213 AssertCompile(a_cbMem == 1 || a_cbMem == 2 || a_cbMem == 4 || a_cbMem == 8); 7214 #endif 7215 AssertCompile(!(a_fAlignMaskAndCtl & ~(UINT32_C(0xff) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE))); 7218 7216 AssertCompile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 4); 7219 7217 #ifdef VBOX_STRICT … … 7223 7221 || (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_32BIT_PROT_FLAT 7224 7222 || (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_32BIT_FLAT); 7225 switch ( cbMem)7223 switch (a_cbMem) 7226 7224 { 7227 7225 case 1: 7228 7226 Assert( pfnFunction 7229 == ( enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemFlatStoreDataU87230 : enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU87231 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U16 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU87232 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U32 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU87233 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU87234 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U16 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U167235 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U32 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U327236 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U647227 == ( a_enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemFlatStoreDataU8 7228 : a_enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8 7229 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U16 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8 7230 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U32 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8 7231 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8 7232 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U16 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U16 7233 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U32 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U32 7234 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U64 7237 7235 : UINT64_C(0xc000b000a0009000) )); 7238 Assert(! fAlignMaskAndCtl);7236 Assert(!a_fAlignMaskAndCtl); 7239 7237 break; 7240 7238 case 2: 7241 7239 Assert( pfnFunction 7242 == ( enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemFlatStoreDataU167243 : enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU167244 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U32 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU167245 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU167246 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U32 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U327247 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U647240 == ( a_enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemFlatStoreDataU16 7241 : a_enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16 7242 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U32 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16 7243 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16 7244 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U32 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U32 7245 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U64 7248 7246 : UINT64_C(0xc000b000a0009000) )); 7249 Assert( fAlignMaskAndCtl <= 1);7247 Assert(a_fAlignMaskAndCtl <= 1); 7250 7248 break; 7251 7249 case 4: 7252 7250 Assert( pfnFunction 7253 == ( enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemFlatStoreDataU327254 : enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU327255 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU327256 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU32_Sx_U647251 == ( a_enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemFlatStoreDataU32 7252 : a_enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU32 7253 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU32 7254 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFlatFetchDataU32_Sx_U64 7257 7255 : UINT64_C(0xc000b000a0009000) )); 7258 Assert( fAlignMaskAndCtl <= 3);7256 Assert(a_fAlignMaskAndCtl <= 3); 7259 7257 break; 7260 7258 case 8: 7261 7259 Assert( pfnFunction 7262 == ( enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemFlatStoreDataU647263 : enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU647260 == ( a_enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemFlatStoreDataU64 7261 : a_enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU64 7264 7262 : UINT64_C(0xc000b000a0009000) )); 7265 Assert( fAlignMaskAndCtl <= 7);7263 Assert(a_fAlignMaskAndCtl <= 7); 7266 7264 break; 7267 7265 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 7268 7266 case sizeof(RTUINT128U): 7269 Assert( ( enmOp == kIemNativeEmitMemOp_Fetch7267 Assert( ( a_enmOp == kIemNativeEmitMemOp_Fetch 7270 7268 && ( pfnFunction == (uintptr_t)iemNativeHlpMemFlatFetchDataU128 7271 7269 || pfnFunction == (uintptr_t)iemNativeHlpMemFlatFetchDataU128AlignedSse 7272 7270 || pfnFunction == (uintptr_t)iemNativeHlpMemFlatFetchDataU128NoAc)) 7273 || ( enmOp == kIemNativeEmitMemOp_Store7271 || ( a_enmOp == kIemNativeEmitMemOp_Store 7274 7272 && ( pfnFunction == (uintptr_t)iemNativeHlpMemFlatStoreDataU128AlignedSse 7275 7273 || pfnFunction == (uintptr_t)iemNativeHlpMemFlatStoreDataU128NoAc))); 7276 7274 Assert( pfnFunction == (uintptr_t)iemNativeHlpMemFlatFetchDataU128AlignedSse 7277 7275 || pfnFunction == (uintptr_t)iemNativeHlpMemFlatStoreDataU128AlignedSse 7278 ? ( fAlignMaskAndCtl & (IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE)) && (uint8_t)fAlignMaskAndCtl== 157279 : fAlignMaskAndCtl <= 15);7276 ? (a_fAlignMaskAndCtl & (IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE)) && (a_fAlignMaskAndCtl & 0xff) == 15 7277 : a_fAlignMaskAndCtl <= 15U); 7280 7278 break; 7281 7279 case sizeof(RTUINT256U): 7282 Assert( ( enmOp == kIemNativeEmitMemOp_Fetch7280 Assert( ( a_enmOp == kIemNativeEmitMemOp_Fetch 7283 7281 && ( pfnFunction == (uintptr_t)iemNativeHlpMemFlatFetchDataU256NoAc 7284 7282 || pfnFunction == (uintptr_t)iemNativeHlpMemFlatFetchDataU256AlignedAvx)) 7285 || ( enmOp == kIemNativeEmitMemOp_Store7283 || ( a_enmOp == kIemNativeEmitMemOp_Store 7286 7284 && ( pfnFunction == (uintptr_t)iemNativeHlpMemFlatStoreDataU256NoAc 7287 7285 || pfnFunction == (uintptr_t)iemNativeHlpMemFlatStoreDataU256AlignedAvx))); 7288 7286 Assert( pfnFunction == (uintptr_t)iemNativeHlpMemFlatFetchDataU256AlignedAvx 7289 7287 || pfnFunction == (uintptr_t)iemNativeHlpMemFlatStoreDataU256AlignedAvx 7290 ? ( fAlignMaskAndCtl & IEM_MEMMAP_F_ALIGN_GP) && (uint8_t)fAlignMaskAndCtl== 317291 : fAlignMaskAndCtl <= 31);7288 ? (a_fAlignMaskAndCtl & IEM_MEMMAP_F_ALIGN_GP) && (a_fAlignMaskAndCtl & 0xff) == 31 7289 : a_fAlignMaskAndCtl <= 31); 7292 7290 break; 7293 7291 #endif … … 7297 7295 { 7298 7296 Assert(iSegReg < 6); 7299 switch ( cbMem)7297 switch (a_cbMem) 7300 7298 { 7301 7299 case 1: 7302 7300 Assert( pfnFunction 7303 == ( enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemStoreDataU87304 : enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFetchDataU87305 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U16 ? (uintptr_t)iemNativeHlpMemFetchDataU87306 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U32 ? (uintptr_t)iemNativeHlpMemFetchDataU87307 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU87308 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U16 ? (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U167309 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U32 ? (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U327310 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U647301 == ( a_enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemStoreDataU8 7302 : a_enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFetchDataU8 7303 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U16 ? (uintptr_t)iemNativeHlpMemFetchDataU8 7304 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U32 ? (uintptr_t)iemNativeHlpMemFetchDataU8 7305 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU8 7306 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U16 ? (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U16 7307 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U32 ? (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U32 7308 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U64 7311 7309 : UINT64_C(0xc000b000a0009000) )); 7312 Assert(! fAlignMaskAndCtl);7310 Assert(!a_fAlignMaskAndCtl); 7313 7311 break; 7314 7312 case 2: 7315 7313 Assert( pfnFunction 7316 == ( enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemStoreDataU167317 : enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFetchDataU167318 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U32 ? (uintptr_t)iemNativeHlpMemFetchDataU167319 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU167320 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U32 ? (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U327321 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U647314 == ( a_enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemStoreDataU16 7315 : a_enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFetchDataU16 7316 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U32 ? (uintptr_t)iemNativeHlpMemFetchDataU16 7317 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU16 7318 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U32 ? (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U32 7319 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U64 7322 7320 : UINT64_C(0xc000b000a0009000) )); 7323 Assert( fAlignMaskAndCtl <= 1);7321 Assert(a_fAlignMaskAndCtl <= 1); 7324 7322 break; 7325 7323 case 4: 7326 7324 Assert( pfnFunction 7327 == ( enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemStoreDataU327328 : enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFetchDataU327329 : enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU327330 : enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU32_Sx_U647325 == ( a_enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemStoreDataU32 7326 : a_enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFetchDataU32 7327 : a_enmOp == kIemNativeEmitMemOp_Fetch_Zx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU32 7328 : a_enmOp == kIemNativeEmitMemOp_Fetch_Sx_U64 ? (uintptr_t)iemNativeHlpMemFetchDataU32_Sx_U64 7331 7329 : UINT64_C(0xc000b000a0009000) )); 7332 Assert( fAlignMaskAndCtl <= 3);7330 Assert(a_fAlignMaskAndCtl <= 3); 7333 7331 break; 7334 7332 case 8: 7335 7333 Assert( pfnFunction 7336 == ( enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemStoreDataU647337 : enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFetchDataU647334 == ( a_enmOp == kIemNativeEmitMemOp_Store ? (uintptr_t)iemNativeHlpMemStoreDataU64 7335 : a_enmOp == kIemNativeEmitMemOp_Fetch ? (uintptr_t)iemNativeHlpMemFetchDataU64 7338 7336 : UINT64_C(0xc000b000a0009000) )); 7339 Assert( fAlignMaskAndCtl <= 7);7337 Assert(a_fAlignMaskAndCtl <= 7); 7340 7338 break; 7341 7339 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 7342 7340 case sizeof(RTUINT128U): 7343 Assert( ( enmOp == kIemNativeEmitMemOp_Fetch7341 Assert( ( a_enmOp == kIemNativeEmitMemOp_Fetch 7344 7342 && ( pfnFunction == (uintptr_t)iemNativeHlpMemFetchDataU128 7345 7343 || pfnFunction == (uintptr_t)iemNativeHlpMemFetchDataU128AlignedSse 7346 7344 || pfnFunction == (uintptr_t)iemNativeHlpMemFetchDataU128NoAc)) 7347 || ( enmOp == kIemNativeEmitMemOp_Store7345 || ( a_enmOp == kIemNativeEmitMemOp_Store 7348 7346 && ( pfnFunction == (uintptr_t)iemNativeHlpMemStoreDataU128AlignedSse 7349 7347 || pfnFunction == (uintptr_t)iemNativeHlpMemStoreDataU128NoAc))); 7350 7348 Assert( pfnFunction == (uintptr_t)iemNativeHlpMemFetchDataU128AlignedSse 7351 7349 || pfnFunction == (uintptr_t)iemNativeHlpMemStoreDataU128AlignedSse 7352 ? ( fAlignMaskAndCtl & (IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE)) && (uint8_t)fAlignMaskAndCtl== 157353 : fAlignMaskAndCtl <= 15);7350 ? (a_fAlignMaskAndCtl & (IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE)) && (a_fAlignMaskAndCtl & 0xff) == 15 7351 : a_fAlignMaskAndCtl <= 15); 7354 7352 break; 7355 7353 case sizeof(RTUINT256U): 7356 Assert( ( enmOp == kIemNativeEmitMemOp_Fetch7354 Assert( ( a_enmOp == kIemNativeEmitMemOp_Fetch 7357 7355 && ( pfnFunction == (uintptr_t)iemNativeHlpMemFetchDataU256NoAc 7358 7356 || pfnFunction == (uintptr_t)iemNativeHlpMemFetchDataU256AlignedAvx)) 7359 || ( enmOp == kIemNativeEmitMemOp_Store7357 || ( a_enmOp == kIemNativeEmitMemOp_Store 7360 7358 && ( pfnFunction == (uintptr_t)iemNativeHlpMemStoreDataU256NoAc 7361 7359 || pfnFunction == (uintptr_t)iemNativeHlpMemStoreDataU256AlignedAvx))); 7362 7360 Assert( pfnFunction == (uintptr_t)iemNativeHlpMemFetchDataU256AlignedAvx 7363 7361 || pfnFunction == (uintptr_t)iemNativeHlpMemStoreDataU256AlignedAvx 7364 ? ( fAlignMaskAndCtl & IEM_MEMMAP_F_ALIGN_GP) && (uint8_t)fAlignMaskAndCtl== 317365 : fAlignMaskAndCtl <= 31);7362 ? (a_fAlignMaskAndCtl & IEM_MEMMAP_F_ALIGN_GP) && (a_fAlignMaskAndCtl & 0xff) == 31 7363 : a_fAlignMaskAndCtl <= 31); 7366 7364 break; 7367 7365 #endif … … 7406 7404 uint16_t const uTlbSeqNo = pReNative->uTlbSeqNo++; 7407 7405 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 7408 uint8_t idxRegValueFetch = UINT8_MAX; 7409 7410 if (cbMem == sizeof(RTUINT128U) || cbMem == sizeof(RTUINT256U)) 7411 idxRegValueFetch = enmOp == kIemNativeEmitMemOp_Store ? UINT8_MAX 7406 uint8_t idxRegValueFetch; 7407 if RT_CONSTEXPR_IF(a_cbMem == sizeof(RTUINT128U) || a_cbMem == sizeof(RTUINT256U)) 7408 idxRegValueFetch = a_enmOp == kIemNativeEmitMemOp_Store ? UINT8_MAX 7412 7409 : iemNativeVarSimdRegisterAcquire(pReNative, idxVarValue, &off); 7413 7410 else 7414 idxRegValueFetch = enmOp == kIemNativeEmitMemOp_Store ? UINT8_MAX7411 idxRegValueFetch = a_enmOp == kIemNativeEmitMemOp_Store ? UINT8_MAX 7415 7412 : !(pReNative->Core.bmHstRegs & RT_BIT_32(IEMNATIVE_CALL_RET_GREG)) 7416 7413 ? iemNativeVarRegisterSetAndAcquire(pReNative, idxVarValue, IEMNATIVE_CALL_RET_GREG, &off) 7417 7414 : iemNativeVarRegisterAcquire(pReNative, idxVarValue, &off); 7418 7415 #else 7419 uint8_t const idxRegValueFetch = enmOp == kIemNativeEmitMemOp_Store ? UINT8_MAX7416 uint8_t const idxRegValueFetch = a_enmOp == kIemNativeEmitMemOp_Store ? UINT8_MAX 7420 7417 : !(pReNative->Core.bmHstRegs & RT_BIT_32(IEMNATIVE_CALL_RET_GREG)) 7421 7418 ? iemNativeVarRegisterSetAndAcquire(pReNative, idxVarValue, IEMNATIVE_CALL_RET_GREG, &off) 7422 7419 : iemNativeVarRegisterAcquire(pReNative, idxVarValue, &off); 7423 7420 #endif 7424 IEMNATIVEEMITTLBSTATE const TlbState(pReNative, &off, idxVarGCPtrMem, iSegReg, cbMem, offDisp);7421 IEMNATIVEEMITTLBSTATE const TlbState(pReNative, &off, idxVarGCPtrMem, iSegReg, a_cbMem, offDisp); 7425 7422 7426 7423 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 7427 7424 uint8_t idxRegValueStore = UINT8_MAX; 7428 7425 7429 if (cbMem == sizeof(RTUINT128U) ||cbMem == sizeof(RTUINT256U))7430 idxRegValueStore = !TlbState.fSkip7431 && enmOp == kIemNativeEmitMemOp_Store7432 7433 7434 7426 if RT_CONSTEXPR_IF(a_cbMem == sizeof(RTUINT128U) || a_cbMem == sizeof(RTUINT256U)) 7427 idxRegValueStore = a_enmOp == kIemNativeEmitMemOp_Store 7428 && !TlbState.fSkip 7429 && pVarValue->enmKind != kIemNativeVarKind_Immediate 7430 ? iemNativeVarSimdRegisterAcquire(pReNative, idxVarValue, &off, true /*fInitialized*/) 7431 : UINT8_MAX; 7435 7432 else 7436 idxRegValueStore = !TlbState.fSkip7437 && enmOp == kIemNativeEmitMemOp_Store7433 idxRegValueStore = a_enmOp == kIemNativeEmitMemOp_Store 7434 && !TlbState.fSkip 7438 7435 && pVarValue->enmKind != kIemNativeVarKind_Immediate 7439 7436 ? iemNativeVarRegisterAcquireInited(pReNative, idxVarValue, &off) … … 7441 7438 7442 7439 #else 7443 uint8_t const idxRegValueStore = !TlbState.fSkip7444 && enmOp == kIemNativeEmitMemOp_Store7440 uint8_t const idxRegValueStore = a_enmOp == kIemNativeEmitMemOp_Store 7441 && !TlbState.fSkip 7445 7442 && pVarValue->enmKind != kIemNativeVarKind_Immediate 7446 7443 ? iemNativeVarRegisterAcquireInited(pReNative, idxVarValue, &off) … … 7509 7506 uint32_t fVolGregMask = IEMNATIVE_CALL_VOLATILE_GREG_MASK; 7510 7507 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 7511 if (cbMem == sizeof(RTUINT128U) ||cbMem == sizeof(RTUINT256U))7508 if RT_CONSTEXPR_IF(a_cbMem == sizeof(RTUINT128U) || a_cbMem == sizeof(RTUINT256U)) 7512 7509 { 7513 7510 /* … … 7521 7518 * as it will be overwritten anyway. 7522 7519 */ 7523 uint8_t const idxRegArgValue = iSegReg == UINT8_MAX? IEMNATIVE_CALL_ARG2_GREG : IEMNATIVE_CALL_ARG3_GREG;7520 uint8_t const idxRegArgValue = a_fFlat ? IEMNATIVE_CALL_ARG2_GREG : IEMNATIVE_CALL_ARG3_GREG; 7524 7521 off = iemNativeEmitLoadArgGregWithSimdVarAddrForMemAccess(pReNative, off, idxRegArgValue, idxVarValue, 7525 enmOp == kIemNativeEmitMemOp_Store /*fSyncRegWithStack*/);7522 a_enmOp == kIemNativeEmitMemOp_Store /*fSyncRegWithStack*/); 7526 7523 fVolGregMask &= ~RT_BIT_32(idxRegArgValue); 7527 7524 } 7528 7525 else 7529 7526 #endif 7530 if (enmOp == kIemNativeEmitMemOp_Store)7531 { 7532 uint8_t const idxRegArgValue = iSegReg == UINT8_MAX? IEMNATIVE_CALL_ARG2_GREG : IEMNATIVE_CALL_ARG3_GREG;7527 if RT_CONSTEXPR_IF(a_enmOp == kIemNativeEmitMemOp_Store) 7528 { 7529 uint8_t const idxRegArgValue = a_fFlat ? IEMNATIVE_CALL_ARG2_GREG : IEMNATIVE_CALL_ARG3_GREG; 7533 7530 off = iemNativeEmitLoadArgGregFromImmOrStackVar(pReNative, off, idxRegArgValue, idxVarValue, 0 /*cbAppend*/, 7534 7531 #ifdef IEMNATIVE_WITH_FREE_AND_FLUSH_VOLATILE_REGS_AT_TLB_LOOKUP … … 7548 7545 #endif 7549 7546 7550 if (iSegReg != UINT8_MAX)7547 if RT_CONSTEXPR_IF(!a_fFlat) 7551 7548 { 7552 7549 /* IEMNATIVE_CALL_ARG2_GREG = iSegReg */ … … 7557 7554 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 7558 7555 /* Do delayed EFLAGS calculations. */ 7559 if (enmOp == kIemNativeEmitMemOp_Store || cbMem == sizeof(RTUINT128U) ||cbMem == sizeof(RTUINT256U))7560 { 7561 if (iSegReg == UINT8_MAX)7556 if RT_CONSTEXPR_IF(a_enmOp == kIemNativeEmitMemOp_Store || a_cbMem == sizeof(RTUINT128U) || a_cbMem == sizeof(RTUINT256U)) 7557 { 7558 if RT_CONSTEXPR_IF(a_fFlat) 7562 7559 off = iemNativeDoPostponedEFlagsAtTlbMiss< RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) 7563 7560 | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG)>(pReNative, off, &TlbState, … … 7569 7566 fHstRegsNotToSave); 7570 7567 } 7571 else if (iSegReg == UINT8_MAX)7568 else if RT_CONSTEXPR_IF(a_fFlat) 7572 7569 off = iemNativeDoPostponedEFlagsAtTlbMiss< RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)>(pReNative, off, &TlbState, 7573 7570 fHstRegsNotToSave); … … 7587 7584 * Put the result in the right register if this is a fetch. 7588 7585 */ 7589 if (enmOp != kIemNativeEmitMemOp_Store)7586 if RT_CONSTEXPR_IF(a_enmOp != kIemNativeEmitMemOp_Store) 7590 7587 { 7591 7588 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 7592 if ( cbMem == sizeof(RTUINT128U) 7593 || cbMem == sizeof(RTUINT256U)) 7589 if RT_CONSTEXPR_IF(a_cbMem == sizeof(RTUINT128U) || a_cbMem == sizeof(RTUINT256U)) 7594 7590 { 7595 Assert( enmOp == kIemNativeEmitMemOp_Fetch);7591 Assert(a_enmOp == kIemNativeEmitMemOp_Fetch); 7596 7592 7597 7593 /* Sync the value on the stack with the host register assigned to the variable. */ … … 7643 7639 * TlbLookup: 7644 7640 */ 7645 off = iemNativeEmitTlbLookup<true>(pReNative, off, &TlbState, iSegReg, cbMem,fAlignMaskAndCtl,7646 enmOp == kIemNativeEmitMemOp_Store ? IEM_ACCESS_TYPE_WRITE : IEM_ACCESS_TYPE_READ,7641 off = iemNativeEmitTlbLookup<true>(pReNative, off, &TlbState, iSegReg, a_cbMem, a_fAlignMaskAndCtl, 7642 a_enmOp == kIemNativeEmitMemOp_Store ? IEM_ACCESS_TYPE_WRITE : IEM_ACCESS_TYPE_READ, 7647 7643 idxLabelTlbLookup, idxLabelTlbMiss, idxRegMemResult, offDisp); 7648 7644 … … 7653 7649 # ifdef IEM_WITH_TLB_STATISTICS 7654 7650 off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2, 7655 enmOp == kIemNativeEmitMemOp_Store7651 a_enmOp == kIemNativeEmitMemOp_Store 7656 7652 ? RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForFetch) 7657 7653 : RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStore)); 7658 7654 # endif 7659 switch ( enmOp)7655 switch (a_enmOp) 7660 7656 { 7661 7657 case kIemNativeEmitMemOp_Store: 7662 7658 if (pVarValue->enmKind != kIemNativeVarKind_Immediate) 7663 7659 { 7664 switch ( cbMem)7660 switch (a_cbMem) 7665 7661 { 7666 7662 case 1: … … 7690 7686 else 7691 7687 { 7692 switch ( cbMem)7688 switch (a_cbMem) 7693 7689 { 7694 7690 case 1: … … 7718 7714 case kIemNativeEmitMemOp_Fetch_Zx_U32: 7719 7715 case kIemNativeEmitMemOp_Fetch_Zx_U64: 7720 switch ( cbMem)7716 switch (a_cbMem) 7721 7717 { 7722 7718 case 1: … … 7754 7750 7755 7751 case kIemNativeEmitMemOp_Fetch_Sx_U16: 7756 Assert( cbMem == 1);7752 Assert(a_cbMem == 1); 7757 7753 off = iemNativeEmitLoadGprByGprU16SignExtendedFromS8Ex(pCodeBuf, off, idxRegValueFetch, idxRegMemResult); 7758 7754 break; 7759 7755 7760 7756 case kIemNativeEmitMemOp_Fetch_Sx_U32: 7761 Assert( cbMem == 1 ||cbMem == 2);7762 if ( cbMem == 1)7757 Assert(a_cbMem == 1 || a_cbMem == 2); 7758 if (a_cbMem == 1) 7763 7759 off = iemNativeEmitLoadGprByGprU32SignExtendedFromS8Ex(pCodeBuf, off, idxRegValueFetch, idxRegMemResult); 7764 7760 else … … 7767 7763 7768 7764 case kIemNativeEmitMemOp_Fetch_Sx_U64: 7769 switch ( cbMem)7765 switch (a_cbMem) 7770 7766 { 7771 7767 case 1: … … 7802 7798 } 7803 7799 #else 7804 RT_NOREF( fAlignMaskAndCtl,idxLabelTlbMiss);7800 RT_NOREF(idxLabelTlbMiss); 7805 7801 #endif 7806 7802 … … 7818 7814 /* 8-bit segmented: */ 7819 7815 #define IEM_MC_FETCH_MEM_U8(a_u8Dst, a_iSeg, a_GCPtrMem) \ 7820 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Dst, a_iSeg, a_GCPtrMem, \ 7821 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch, \ 7822 (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7816 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch>( \ 7817 pReNative, off, a_u8Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7823 7818 7824 7819 #define IEM_MC_FETCH_MEM_U8_ZX_U16(a_u16Dst, a_iSeg, a_GCPtrMem) \ 7825 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, \ 7826 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U16, \ 7827 (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7820 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U16>( \ 7821 pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7828 7822 7829 7823 #define IEM_MC_FETCH_MEM_U8_ZX_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \ 7830 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, \ 7831 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U32, \ 7832 (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7824 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U32>( \ 7825 pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7833 7826 7834 7827 #define IEM_MC_FETCH_MEM_U8_ZX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7835 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, \ 7836 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U64, \ 7837 (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7828 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U64>( \ 7829 pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7838 7830 7839 7831 #define IEM_MC_FETCH_MEM_U8_SX_U16(a_u16Dst, a_iSeg, a_GCPtrMem) \ 7840 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, \ 7841 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U16, \ 7842 (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U16, pCallEntry->idxInstr) 7832 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U16>(\ 7833 pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U16, pCallEntry->idxInstr) 7843 7834 7844 7835 #define IEM_MC_FETCH_MEM_U8_SX_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \ 7845 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, \ 7846 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U32, \ 7847 (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U32, pCallEntry->idxInstr) 7836 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U32>(\ 7837 pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U32, pCallEntry->idxInstr) 7848 7838 7849 7839 #define IEM_MC_FETCH_MEM_U8_SX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7850 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, \ 7851 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U64, \ 7852 (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U64, pCallEntry->idxInstr) 7840 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U64>(\ 7841 pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU8_Sx_U64, pCallEntry->idxInstr) 7853 7842 7854 7843 /* 16-bit segmented: */ 7855 7844 #define IEM_MC_FETCH_MEM_U16(a_u16Dst, a_iSeg, a_GCPtrMem) \ 7856 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, \ 7857 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7858 (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr) 7845 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch>(\ 7846 pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr) 7859 7847 7860 7848 #define IEM_MC_FETCH_MEM_U16_DISP(a_u16Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 7861 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, \ 7862 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7863 (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr, a_offDisp) 7849 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch>(\ 7850 pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr, a_offDisp) 7864 7851 7865 7852 #define IEM_MC_FETCH_MEM_U16_ZX_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \ 7866 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, \ 7867 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U32, \ 7868 (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr) 7853 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U32>(\ 7854 pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr) 7869 7855 7870 7856 #define IEM_MC_FETCH_MEM_U16_ZX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7871 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, \ 7872 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U64, \ 7873 (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr) 7857 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U64>(\ 7858 pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr) 7874 7859 7875 7860 #define IEM_MC_FETCH_MEM_U16_SX_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \ 7876 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, \ 7877 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, \ 7878 (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U32, pCallEntry->idxInstr) 7861 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32>(\ 7862 pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U32, pCallEntry->idxInstr) 7879 7863 7880 7864 #define IEM_MC_FETCH_MEM_U16_SX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7881 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, \ 7882 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U64, \ 7883 (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U64, pCallEntry->idxInstr) 7865 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U64>(\ 7866 pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U64, pCallEntry->idxInstr) 7884 7867 7885 7868 7886 7869 /* 32-bit segmented: */ 7887 7870 #define IEM_MC_FETCH_MEM_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \ 7888 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, \ 7889 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7890 (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7871 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch>(\ 7872 pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7891 7873 7892 7874 #define IEM_MC_FETCH_MEM_U32_DISP(a_u32Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 7893 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, \ 7894 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7895 (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr, a_offDisp) 7875 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch>(\ 7876 pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr, a_offDisp) 7896 7877 7897 7878 #define IEM_MC_FETCH_MEM_U32_ZX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7898 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, \ 7899 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U64, \ 7900 (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7879 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U64>(\ 7880 pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7901 7881 7902 7882 #define IEM_MC_FETCH_MEM_U32_SX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7903 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, \ 7904 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U64, \ 7905 (uintptr_t)iemNativeHlpMemFetchDataU32_Sx_U64, pCallEntry->idxInstr) 7883 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U64>(\ 7884 pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU32_Sx_U64, pCallEntry->idxInstr) 7906 7885 7907 7886 #define IEM_MC_FETCH_MEM_I16(a_i16Dst, a_iSeg, a_GCPtrMem) \ 7908 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i16Dst, a_iSeg, a_GCPtrMem, \ 7909 sizeof(int16_t), sizeof(int16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, \ 7910 (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U32, pCallEntry->idxInstr) 7887 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int16_t), sizeof(int16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32>(\ 7888 pReNative, off, a_i16Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U32, pCallEntry->idxInstr) 7911 7889 7912 7890 #define IEM_MC_FETCH_MEM_I16_DISP(a_i16Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 7913 off = iemNativeEmitMemFetchStoreDataCommon (pReNative, off, a_i16Dst, a_iSeg, a_GCPtrMem,\7914 sizeof(int16_t), sizeof(int16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, \7915 (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U32, pCallEntry->idxInstr,a_offDisp)7891 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int16_t), sizeof(int16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32>(\ 7892 pReNative, off, a_i16Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU16_Sx_U32, pCallEntry->idxInstr, \ 7893 a_offDisp) 7916 7894 7917 7895 #define IEM_MC_FETCH_MEM_I32(a_i32Dst, a_iSeg, a_GCPtrMem) \ 7918 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i32Dst, a_iSeg, a_GCPtrMem, \ 7919 sizeof(int32_t), sizeof(int32_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7920 (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7896 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int32_t), sizeof(int32_t) - 1, kIemNativeEmitMemOp_Fetch>(\ 7897 pReNative, off, a_i32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7921 7898 7922 7899 #define IEM_MC_FETCH_MEM_I32_DISP(a_i32Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 7923 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i32Dst, a_iSeg, a_GCPtrMem, \ 7924 sizeof(int32_t), sizeof(int32_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7925 (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr, a_offDisp) 7900 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int32_t), sizeof(int32_t) - 1, kIemNativeEmitMemOp_Fetch>(\ 7901 pReNative, off, a_i32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr, a_offDisp) 7926 7902 7927 7903 #define IEM_MC_FETCH_MEM_I64(a_i64Dst, a_iSeg, a_GCPtrMem) \ 7928 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i64Dst, a_iSeg, a_GCPtrMem, \ 7929 sizeof(int64_t), sizeof(int64_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7930 (uintptr_t)iemNativeHlpMemFetchDataU64, pCallEntry->idxInstr) 7904 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int64_t), sizeof(int64_t) - 1, kIemNativeEmitMemOp_Fetch>(\ 7905 pReNative, off, a_i64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU64, pCallEntry->idxInstr) 7931 7906 7932 7907 AssertCompileSize(RTFLOAT32U, sizeof(uint32_t)); 7933 7908 #define IEM_MC_FETCH_MEM_R32(a_r32Dst, a_iSeg, a_GCPtrMem) \ 7934 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_r32Dst, a_iSeg, a_GCPtrMem, \ 7935 sizeof(RTFLOAT32U), sizeof(RTFLOAT32U) - 1, kIemNativeEmitMemOp_Fetch, \ 7936 (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7909 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTFLOAT32U), sizeof(RTFLOAT32U) - 1, kIemNativeEmitMemOp_Fetch>(\ 7910 pReNative, off, a_r32Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7937 7911 7938 7912 7939 7913 /* 64-bit segmented: */ 7940 7914 #define IEM_MC_FETCH_MEM_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7941 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, \ 7942 sizeof(uint64_t), sizeof(uint64_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7943 (uintptr_t)iemNativeHlpMemFetchDataU64, pCallEntry->idxInstr) 7915 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint64_t), sizeof(uint64_t) - 1, kIemNativeEmitMemOp_Fetch>(\ 7916 pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU64, pCallEntry->idxInstr) 7944 7917 7945 7918 AssertCompileSize(RTFLOAT64U, sizeof(uint64_t)); 7946 7919 #define IEM_MC_FETCH_MEM_R64(a_r64Dst, a_iSeg, a_GCPtrMem) \ 7947 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_r64Dst, a_iSeg, a_GCPtrMem, \ 7948 sizeof(RTFLOAT64U), sizeof(RTFLOAT64U) - 1, kIemNativeEmitMemOp_Fetch, \ 7949 (uintptr_t)iemNativeHlpMemFetchDataU64, pCallEntry->idxInstr) 7920 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTFLOAT64U), sizeof(RTFLOAT64U) - 1, kIemNativeEmitMemOp_Fetch>(\ 7921 pReNative, off, a_r64Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU64, pCallEntry->idxInstr) 7950 7922 7951 7923 7952 7924 /* 8-bit flat: */ 7953 7925 #define IEM_MC_FETCH_MEM_FLAT_U8(a_u8Dst, a_GCPtrMem) \ 7954 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Dst, UINT8_MAX, a_GCPtrMem, \ 7955 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch, \ 7956 (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7926 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch, true>(\ 7927 pReNative, off, a_u8Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7957 7928 7958 7929 #define IEM_MC_FETCH_MEM_FLAT_U8_ZX_U16(a_u16Dst, a_GCPtrMem) \ 7959 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, \ 7960 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U16, \ 7961 (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7930 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U16, true>(\ 7931 pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7962 7932 7963 7933 #define IEM_MC_FETCH_MEM_FLAT_U8_ZX_U32(a_u32Dst, a_GCPtrMem) \ 7964 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, \ 7965 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U32, \ 7966 (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7934 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U32, true>(\ 7935 pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7967 7936 7968 7937 #define IEM_MC_FETCH_MEM_FLAT_U8_ZX_U64(a_u64Dst, a_GCPtrMem) \ 7969 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, \ 7970 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U64, \ 7971 (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7938 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Zx_U64, true>(\ 7939 pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7972 7940 7973 7941 #define IEM_MC_FETCH_MEM_FLAT_U8_SX_U16(a_u16Dst, a_GCPtrMem) \ 7974 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, \ 7975 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U16, \ 7976 (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U16, pCallEntry->idxInstr) 7942 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U16, true>(\ 7943 pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U16, pCallEntry->idxInstr) 7977 7944 7978 7945 #define IEM_MC_FETCH_MEM_FLAT_U8_SX_U32(a_u32Dst, a_GCPtrMem) \ 7979 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, \ 7980 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U32, \ 7981 (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U32, pCallEntry->idxInstr) 7946 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U32, true>(\ 7947 pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U32, pCallEntry->idxInstr) 7982 7948 7983 7949 #define IEM_MC_FETCH_MEM_FLAT_U8_SX_U64(a_u64Dst, a_GCPtrMem) \ 7984 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, \ 7985 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U64, \ 7986 (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U64, pCallEntry->idxInstr) 7950 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Fetch_Sx_U64, true>(\ 7951 pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU8_Sx_U64, pCallEntry->idxInstr) 7987 7952 7988 7953 7989 7954 /* 16-bit flat: */ 7990 7955 #define IEM_MC_FETCH_MEM_FLAT_U16(a_u16Dst, a_GCPtrMem) \ 7991 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, \ 7992 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7993 (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr) 7956 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 7957 pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr) 7994 7958 7995 7959 #define IEM_MC_FETCH_MEM_FLAT_U16_DISP(a_u16Dst, a_GCPtrMem, a_offDisp) \ 7996 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, \ 7997 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch, \ 7998 (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr, a_offDisp) 7960 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 7961 pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr, a_offDisp) 7999 7962 8000 7963 #define IEM_MC_FETCH_MEM_FLAT_U16_ZX_U32(a_u32Dst, a_GCPtrMem) \ 8001 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, \ 8002 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U32, \ 8003 (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr) 7964 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U32, true>(\ 7965 pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr) 8004 7966 8005 7967 #define IEM_MC_FETCH_MEM_FLAT_U16_ZX_U64(a_u64Dst, a_GCPtrMem) \ 8006 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, \ 8007 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U64, \ 8008 (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr) 7968 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U64, true>(\ 7969 pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr) 8009 7970 8010 7971 #define IEM_MC_FETCH_MEM_FLAT_U16_SX_U32(a_u32Dst, a_GCPtrMem) \ 8011 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, \ 8012 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, \ 8013 (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U32, pCallEntry->idxInstr) 7972 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, true>(\ 7973 pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U32, pCallEntry->idxInstr) 8014 7974 8015 7975 #define IEM_MC_FETCH_MEM_FLAT_U16_SX_U64(a_u64Dst, a_GCPtrMem) \ 8016 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, \ 8017 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U64, \ 8018 (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U64, pCallEntry->idxInstr) 7976 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U64, true>(\ 7977 pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U64, pCallEntry->idxInstr) 8019 7978 8020 7979 /* 32-bit flat: */ 8021 7980 #define IEM_MC_FETCH_MEM_FLAT_U32(a_u32Dst, a_GCPtrMem) \ 8022 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, \ 8023 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch, \ 8024 (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 7981 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 7982 pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 8025 7983 8026 7984 #define IEM_MC_FETCH_MEM_FLAT_U32_DISP(a_u32Dst, a_GCPtrMem, a_offDisp) \ 8027 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, \ 8028 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch, \ 8029 (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr, a_offDisp) 7985 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 7986 pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr, a_offDisp) 8030 7987 8031 7988 #define IEM_MC_FETCH_MEM_FLAT_U32_ZX_U64(a_u64Dst, a_GCPtrMem) \ 8032 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, \ 8033 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U64, \ 8034 (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 7989 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch_Zx_U64, true>(\ 7990 pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 8035 7991 8036 7992 #define IEM_MC_FETCH_MEM_FLAT_U32_SX_U64(a_u64Dst, a_GCPtrMem) \ 8037 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, \ 8038 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U64, \ 8039 (uintptr_t)iemNativeHlpMemFlatFetchDataU32_Sx_U64, pCallEntry->idxInstr) 7993 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U64, true>(\ 7994 pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU32_Sx_U64, pCallEntry->idxInstr) 8040 7995 8041 7996 #define IEM_MC_FETCH_MEM_FLAT_I16(a_i16Dst, a_GCPtrMem) \ 8042 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i16Dst, UINT8_MAX, a_GCPtrMem, \ 8043 sizeof(int16_t), sizeof(int16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, \ 8044 (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U32, pCallEntry->idxInstr) 7997 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int16_t), sizeof(int16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, true>(\ 7998 pReNative, off, a_i16Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U32, pCallEntry->idxInstr) 8045 7999 8046 8000 #define IEM_MC_FETCH_MEM_FLAT_I16_DISP(a_i16Dst, a_GCPtrMem, a_offDisp) \ 8047 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i16Dst, UINT8_MAX, a_GCPtrMem, \ 8048 sizeof(int16_t), sizeof(int16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, \ 8049 (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U32, pCallEntry->idxInstr, a_offDisp) 8001 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int16_t), sizeof(int16_t) - 1, kIemNativeEmitMemOp_Fetch_Sx_U32, true>(\ 8002 pReNative, off, a_i16Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU16_Sx_U32, pCallEntry->idxInstr, a_offDisp) 8050 8003 8051 8004 #define IEM_MC_FETCH_MEM_FLAT_I32(a_i32Dst, a_GCPtrMem) \ 8052 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i32Dst, UINT8_MAX, a_GCPtrMem, \ 8053 sizeof(int32_t), sizeof(int32_t) - 1, kIemNativeEmitMemOp_Fetch, \ 8054 (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 8005 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int32_t), sizeof(int32_t) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8006 pReNative, off, a_i32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 8055 8007 8056 8008 #define IEM_MC_FETCH_MEM_FLAT_I32_DISP(a_i32Dst, a_GCPtrMem, a_offDisp) \ 8057 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i32Dst, UINT8_MAX, a_GCPtrMem, \ 8058 sizeof(int32_t), sizeof(int32_t) - 1, kIemNativeEmitMemOp_Fetch, \ 8059 (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr, a_offDisp) 8009 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int32_t), sizeof(int32_t) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8010 pReNative, off, a_i32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr, a_offDisp) 8060 8011 8061 8012 #define IEM_MC_FETCH_MEM_FLAT_I64(a_i64Dst, a_GCPtrMem) \ 8062 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_i64Dst, UINT8_MAX, a_GCPtrMem, \ 8063 sizeof(int64_t), sizeof(int64_t) - 1, kIemNativeEmitMemOp_Fetch, \ 8064 (uintptr_t)iemNativeHlpMemFlatFetchDataU64, pCallEntry->idxInstr) 8013 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(int64_t), sizeof(int64_t) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8014 pReNative, off, a_i64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU64, pCallEntry->idxInstr) 8065 8015 8066 8016 #define IEM_MC_FETCH_MEM_FLAT_R32(a_r32Dst, a_GCPtrMem) \ 8067 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_r32Dst, UINT8_MAX, a_GCPtrMem, \ 8068 sizeof(RTFLOAT32U), sizeof(RTFLOAT32U) - 1, kIemNativeEmitMemOp_Fetch, \ 8069 (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 8017 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTFLOAT32U), sizeof(RTFLOAT32U) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8018 pReNative, off, a_r32Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 8070 8019 8071 8020 8072 8021 /* 64-bit flat: */ 8073 8022 #define IEM_MC_FETCH_MEM_FLAT_U64(a_u64Dst, a_GCPtrMem) \ 8074 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, \ 8075 sizeof(uint64_t), sizeof(uint64_t) - 1, kIemNativeEmitMemOp_Fetch, \ 8076 (uintptr_t)iemNativeHlpMemFlatFetchDataU64, pCallEntry->idxInstr) 8023 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint64_t), sizeof(uint64_t) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8024 pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU64, pCallEntry->idxInstr) 8077 8025 8078 8026 #define IEM_MC_FETCH_MEM_FLAT_R64(a_r64Dst, a_GCPtrMem) \ 8079 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_r64Dst, UINT8_MAX, a_GCPtrMem, \ 8080 sizeof(RTFLOAT64U), sizeof(RTFLOAT64U) - 1, kIemNativeEmitMemOp_Fetch, \ 8081 (uintptr_t)iemNativeHlpMemFlatFetchDataU64, pCallEntry->idxInstr) 8027 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTFLOAT64U), sizeof(RTFLOAT64U) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8028 pReNative, off, a_r64Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU64, pCallEntry->idxInstr) 8082 8029 8083 8030 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 8084 8031 /* 128-bit segmented: */ 8085 8032 #define IEM_MC_FETCH_MEM_U128(a_u128Dst, a_iSeg, a_GCPtrMem) \ 8086 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u128Dst, a_iSeg, a_GCPtrMem, \ 8087 sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Fetch, \ 8088 (uintptr_t)iemNativeHlpMemFetchDataU128, pCallEntry->idxInstr) 8033 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Fetch>(\ 8034 pReNative, off, a_u128Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU128, pCallEntry->idxInstr) 8089 8035 8090 8036 #define IEM_MC_FETCH_MEM_U128_ALIGN_SSE(a_u128Dst, a_iSeg, a_GCPtrMem) \ 8091 off = iemNativeEmitMemFetchStoreDataCommon (pReNative, off, a_u128Dst, a_iSeg, a_GCPtrMem,sizeof(RTUINT128U), \8037 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), \ 8092 8038 (sizeof(RTUINT128U) - 1U) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE, \ 8093 kIemNativeEmitMemOp_Fetch ,\8094 8039 kIemNativeEmitMemOp_Fetch>(\ 8040 pReNative, off, a_u128Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU128AlignedSse, pCallEntry->idxInstr) 8095 8041 8096 8042 AssertCompileSize(X86XMMREG, sizeof(RTUINT128U)); 8097 8043 #define IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(a_uXmmDst, a_iSeg, a_GCPtrMem) \ 8098 off = iemNativeEmitMemFetchStoreDataCommon (pReNative, off, a_uXmmDst, a_iSeg, a_GCPtrMem,sizeof(X86XMMREG), \8044 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(X86XMMREG), \ 8099 8045 (sizeof(X86XMMREG) - 1U) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE, \ 8100 kIemNativeEmitMemOp_Fetch ,\8101 8046 kIemNativeEmitMemOp_Fetch>(\ 8047 pReNative, off, a_uXmmDst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU128AlignedSse, pCallEntry->idxInstr) 8102 8048 8103 8049 #define IEM_MC_FETCH_MEM_U128_NO_AC(a_u128Dst, a_iSeg, a_GCPtrMem) \ 8104 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u128Dst, a_iSeg, a_GCPtrMem, \ 8105 sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Fetch, \ 8106 (uintptr_t)iemNativeHlpMemFetchDataU128NoAc, pCallEntry->idxInstr) 8050 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Fetch>(\ 8051 pReNative, off, a_u128Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU128NoAc, pCallEntry->idxInstr) 8107 8052 8108 8053 #define IEM_MC_FETCH_MEM_XMM_NO_AC(a_u128Dst, a_iSeg, a_GCPtrMem) \ 8109 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u128Dst, a_iSeg, a_GCPtrMem, \ 8110 sizeof(X86XMMREG), sizeof(X86XMMREG) - 1, kIemNativeEmitMemOp_Fetch, \ 8111 (uintptr_t)iemNativeHlpMemFetchDataU128NoAc, pCallEntry->idxInstr) 8054 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(X86XMMREG), sizeof(X86XMMREG) - 1, kIemNativeEmitMemOp_Fetch>(\ 8055 pReNative, off, a_u128Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU128NoAc, pCallEntry->idxInstr) 8112 8056 8113 8057 8114 8058 /* 128-bit flat: */ 8115 8059 #define IEM_MC_FETCH_MEM_FLAT_U128(a_u128Dst, a_GCPtrMem) \ 8116 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u128Dst, UINT8_MAX, a_GCPtrMem, \ 8117 sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Fetch, \ 8118 (uintptr_t)iemNativeHlpMemFlatFetchDataU128, pCallEntry->idxInstr) 8060 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8061 pReNative, off, a_u128Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU128, pCallEntry->idxInstr) 8119 8062 8120 8063 #define IEM_MC_FETCH_MEM_FLAT_U128_ALIGN_SSE(a_u128Dst, a_GCPtrMem) \ 8121 off = iemNativeEmitMemFetchStoreDataCommon (pReNative, off, a_u128Dst, UINT8_MAX, a_GCPtrMem,sizeof(RTUINT128U), \8064 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), \ 8122 8065 (sizeof(RTUINT128U) - 1U) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE, \ 8123 kIemNativeEmitMemOp_Fetch, \8124 8066 kIemNativeEmitMemOp_Fetch, true>(\ 8067 pReNative, off, a_u128Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU128AlignedSse, pCallEntry->idxInstr) 8125 8068 8126 8069 #define IEM_MC_FETCH_MEM_FLAT_XMM_ALIGN_SSE(a_uXmmDst, a_GCPtrMem) \ 8127 off = iemNativeEmitMemFetchStoreDataCommon (pReNative, off, a_uXmmDst, UINT8_MAX, a_GCPtrMem,sizeof(X86XMMREG), \8070 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(X86XMMREG), \ 8128 8071 (sizeof(X86XMMREG) - 1U) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE, \ 8129 kIemNativeEmitMemOp_Fetch, \8130 8072 kIemNativeEmitMemOp_Fetch, true>(\ 8073 pReNative, off, a_uXmmDst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU128AlignedSse, pCallEntry->idxInstr) 8131 8074 8132 8075 #define IEM_MC_FETCH_MEM_FLAT_U128_NO_AC(a_u128Dst, a_GCPtrMem) \ 8133 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u128Dst, UINT8_MAX, a_GCPtrMem, \ 8134 sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Fetch, \ 8135 (uintptr_t)iemNativeHlpMemFlatFetchDataU128NoAc, pCallEntry->idxInstr) 8076 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8077 pReNative, off, a_u128Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU128NoAc, pCallEntry->idxInstr) 8136 8078 8137 8079 #define IEM_MC_FETCH_MEM_FLAT_XMM_NO_AC(a_uXmmDst, a_GCPtrMem) \ 8138 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_uXmmDst, UINT8_MAX, a_GCPtrMem, \ 8139 sizeof(X86XMMREG), sizeof(X86XMMREG) - 1, kIemNativeEmitMemOp_Fetch, \ 8140 (uintptr_t)iemNativeHlpMemFlatFetchDataU128NoAc, pCallEntry->idxInstr) 8080 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(X86XMMREG), sizeof(X86XMMREG) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8081 pReNative, off, a_uXmmDst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU128NoAc, pCallEntry->idxInstr) 8141 8082 8142 8083 /* 256-bit segmented: */ 8143 8084 #define IEM_MC_FETCH_MEM_U256(a_u256Dst, a_iSeg, a_GCPtrMem) \ 8144 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Dst, a_iSeg, a_GCPtrMem, \ 8145 sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Fetch, \ 8146 (uintptr_t)iemNativeHlpMemFetchDataU256NoAc, pCallEntry->idxInstr) 8085 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Fetch>(\ 8086 pReNative, off, a_u256Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU256NoAc, pCallEntry->idxInstr) 8147 8087 8148 8088 #define IEM_MC_FETCH_MEM_U256_NO_AC(a_u256Dst, a_iSeg, a_GCPtrMem) \ 8149 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Dst, a_iSeg, a_GCPtrMem, \ 8150 sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Fetch, \ 8151 (uintptr_t)iemNativeHlpMemFetchDataU256NoAc, pCallEntry->idxInstr) 8089 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Fetch>(\ 8090 pReNative, off, a_u256Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU256NoAc, pCallEntry->idxInstr) 8152 8091 8153 8092 #define IEM_MC_FETCH_MEM_U256_ALIGN_AVX(a_u256Dst, a_iSeg, a_GCPtrMem) \ 8154 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Dst, a_iSeg, a_GCPtrMem, sizeof(RTUINT256U), \ 8155 (sizeof(RTUINT256U) - 1U) | IEM_MEMMAP_F_ALIGN_GP, kIemNativeEmitMemOp_Fetch, \ 8156 (uintptr_t)iemNativeHlpMemFetchDataU256AlignedAvx, pCallEntry->idxInstr) 8093 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), \ 8094 (sizeof(RTUINT256U) - 1U) | IEM_MEMMAP_F_ALIGN_GP, \ 8095 kIemNativeEmitMemOp_Fetch>(\ 8096 pReNative, off, a_u256Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU256AlignedAvx, pCallEntry->idxInstr) 8157 8097 8158 8098 #define IEM_MC_FETCH_MEM_YMM_NO_AC(a_u256Dst, a_iSeg, a_GCPtrMem) \ 8159 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Dst, a_iSeg, a_GCPtrMem, \ 8160 sizeof(X86YMMREG), sizeof(X86YMMREG) - 1, kIemNativeEmitMemOp_Fetch, \ 8161 (uintptr_t)iemNativeHlpMemFetchDataU256NoAc, pCallEntry->idxInstr) 8099 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(X86YMMREG), sizeof(X86YMMREG) - 1, kIemNativeEmitMemOp_Fetch>(\ 8100 pReNative, off, a_u256Dst, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFetchDataU256NoAc, pCallEntry->idxInstr) 8162 8101 8163 8102 8164 8103 /* 256-bit flat: */ 8165 8104 #define IEM_MC_FETCH_MEM_FLAT_U256(a_u256Dst, a_GCPtrMem) \ 8166 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Dst, UINT8_MAX, a_GCPtrMem, \ 8167 sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Fetch, \ 8168 (uintptr_t)iemNativeHlpMemFlatFetchDataU256NoAc, pCallEntry->idxInstr) 8105 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8106 pReNative, off, a_u256Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU256NoAc, pCallEntry->idxInstr) 8169 8107 8170 8108 #define IEM_MC_FETCH_MEM_FLAT_U256_NO_AC(a_u256Dst, a_GCPtrMem) \ 8171 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Dst, UINT8_MAX, a_GCPtrMem, \ 8172 sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Fetch, \ 8173 (uintptr_t)iemNativeHlpMemFlatFetchDataU256NoAc, pCallEntry->idxInstr) 8109 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8110 pReNative, off, a_u256Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU256NoAc, pCallEntry->idxInstr) 8174 8111 8175 8112 #define IEM_MC_FETCH_MEM_FLAT_U256_ALIGN_AVX(a_u256Dst, a_GCPtrMem) \ 8176 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Dst, UINT8_MAX, a_GCPtrMem, sizeof(RTUINT256U), \ 8177 (sizeof(RTUINT256U) - 1U) | IEM_MEMMAP_F_ALIGN_GP, kIemNativeEmitMemOp_Fetch, \ 8178 (uintptr_t)iemNativeHlpMemFlatFetchDataU256AlignedAvx, pCallEntry->idxInstr) 8113 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), \ 8114 (sizeof(RTUINT256U) - 1U) | IEM_MEMMAP_F_ALIGN_GP, \ 8115 kIemNativeEmitMemOp_Fetch, true>(\ 8116 pReNative, off, a_u256Dst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU256AlignedAvx, pCallEntry->idxInstr) 8179 8117 8180 8118 #define IEM_MC_FETCH_MEM_FLAT_YMM_NO_AC(a_uYmmDst, a_GCPtrMem) \ 8181 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_uYmmDst, UINT8_MAX, a_GCPtrMem, \ 8182 sizeof(X86YMMREG), sizeof(X86YMMREG) - 1, kIemNativeEmitMemOp_Fetch, \ 8183 (uintptr_t)iemNativeHlpMemFlatFetchDataU256NoAc, pCallEntry->idxInstr) 8119 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(X86YMMREG), sizeof(X86YMMREG) - 1, kIemNativeEmitMemOp_Fetch, true>(\ 8120 pReNative, off, a_uYmmDst, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatFetchDataU256NoAc, pCallEntry->idxInstr) 8184 8121 8185 8122 #endif … … 8191 8128 8192 8129 #define IEM_MC_STORE_MEM_U8(a_iSeg, a_GCPtrMem, a_u8Value) \ 8193 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Value, a_iSeg, a_GCPtrMem, \ 8194 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Store, \ 8195 (uintptr_t)iemNativeHlpMemStoreDataU8, pCallEntry->idxInstr) 8130 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Store>(\ 8131 pReNative, off, a_u8Value, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU8, pCallEntry->idxInstr) 8196 8132 8197 8133 #define IEM_MC_STORE_MEM_U16(a_iSeg, a_GCPtrMem, a_u16Value) \ 8198 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Value, a_iSeg, a_GCPtrMem, \ 8199 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Store, \ 8200 (uintptr_t)iemNativeHlpMemStoreDataU16, pCallEntry->idxInstr) 8134 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Store>(\ 8135 pReNative, off, a_u16Value, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU16, pCallEntry->idxInstr) 8201 8136 8202 8137 #define IEM_MC_STORE_MEM_U32(a_iSeg, a_GCPtrMem, a_u32Value) \ 8203 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Value, a_iSeg, a_GCPtrMem, \ 8204 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Store, \ 8205 (uintptr_t)iemNativeHlpMemStoreDataU32, pCallEntry->idxInstr) 8138 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Store>(\ 8139 pReNative, off, a_u32Value, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU32, pCallEntry->idxInstr) 8206 8140 8207 8141 #define IEM_MC_STORE_MEM_U64(a_iSeg, a_GCPtrMem, a_u64Value) \ 8208 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Value, a_iSeg, a_GCPtrMem, \ 8209 sizeof(uint64_t), sizeof(uint64_t) - 1, kIemNativeEmitMemOp_Store, \ 8210 (uintptr_t)iemNativeHlpMemStoreDataU64, pCallEntry->idxInstr) 8142 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint64_t), sizeof(uint64_t) - 1, kIemNativeEmitMemOp_Store>(\ 8143 pReNative, off, a_u64Value, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU64, pCallEntry->idxInstr) 8211 8144 8212 8145 8213 8146 #define IEM_MC_STORE_MEM_FLAT_U8(a_GCPtrMem, a_u8Value) \ 8214 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Value, UINT8_MAX, a_GCPtrMem, \ 8215 sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Store, \ 8216 (uintptr_t)iemNativeHlpMemFlatStoreDataU8, pCallEntry->idxInstr) 8147 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint8_t), 0 /*fAlignMaskAndCtl*/, kIemNativeEmitMemOp_Store, true>(\ 8148 pReNative, off, a_u8Value, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU8, pCallEntry->idxInstr) 8217 8149 8218 8150 #define IEM_MC_STORE_MEM_FLAT_U16(a_GCPtrMem, a_u16Value) \ 8219 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Value, UINT8_MAX, a_GCPtrMem, \ 8220 sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Store, \ 8221 (uintptr_t)iemNativeHlpMemFlatStoreDataU16, pCallEntry->idxInstr) 8151 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint16_t), sizeof(uint16_t) - 1, kIemNativeEmitMemOp_Store, true>(\ 8152 pReNative, off, a_u16Value, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU16, pCallEntry->idxInstr) 8222 8153 8223 8154 #define IEM_MC_STORE_MEM_FLAT_U32(a_GCPtrMem, a_u32Value) \ 8224 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Value, UINT8_MAX, a_GCPtrMem, \ 8225 sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Store, \ 8226 (uintptr_t)iemNativeHlpMemFlatStoreDataU32, pCallEntry->idxInstr) 8155 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint32_t), sizeof(uint32_t) - 1, kIemNativeEmitMemOp_Store, true>(\ 8156 pReNative, off, a_u32Value, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU32, pCallEntry->idxInstr) 8227 8157 8228 8158 #define IEM_MC_STORE_MEM_FLAT_U64(a_GCPtrMem, a_u64Value) \ 8229 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Value, UINT8_MAX, a_GCPtrMem, \ 8230 sizeof(uint64_t), sizeof(uint64_t) - 1, kIemNativeEmitMemOp_Store, \ 8231 (uintptr_t)iemNativeHlpMemFlatStoreDataU64, pCallEntry->idxInstr) 8159 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(uint64_t), sizeof(uint64_t) - 1, kIemNativeEmitMemOp_Store, true>(\ 8160 pReNative, off, a_u64Value, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU64, pCallEntry->idxInstr) 8232 8161 8233 8162 8234 8163 #define IEM_MC_STORE_MEM_U8_CONST(a_iSeg, a_GCPtrMem, a_u8ConstValue) \ 8235 off = iemNativeEmitMemStoreConstDataCommon (pReNative, off, a_u8ConstValue, a_iSeg, a_GCPtrMem, sizeof(uint8_t),\8236 8164 off = iemNativeEmitMemStoreConstDataCommon<sizeof(uint8_t)>(\ 8165 pReNative, off, a_u8ConstValue, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU8, pCallEntry->idxInstr) 8237 8166 8238 8167 #define IEM_MC_STORE_MEM_U16_CONST(a_iSeg, a_GCPtrMem, a_u16ConstValue) \ 8239 off = iemNativeEmitMemStoreConstDataCommon (pReNative, off, a_u16ConstValue, a_iSeg, a_GCPtrMem, sizeof(uint16_t),\8240 8168 off = iemNativeEmitMemStoreConstDataCommon<sizeof(uint16_t)>(\ 8169 pReNative, off, a_u16ConstValue, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU16, pCallEntry->idxInstr) 8241 8170 8242 8171 #define IEM_MC_STORE_MEM_U32_CONST(a_iSeg, a_GCPtrMem, a_u32ConstValue) \ 8243 off = iemNativeEmitMemStoreConstDataCommon (pReNative, off, a_u32ConstValue, a_iSeg, a_GCPtrMem, sizeof(uint32_t),\8244 8172 off = iemNativeEmitMemStoreConstDataCommon<sizeof(uint32_t)>(\ 8173 pReNative, off, a_u32ConstValue, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU32, pCallEntry->idxInstr) 8245 8174 8246 8175 #define IEM_MC_STORE_MEM_U64_CONST(a_iSeg, a_GCPtrMem, a_u64ConstValue) \ 8247 off = iemNativeEmitMemStoreConstDataCommon (pReNative, off, a_u64ConstValue, a_iSeg, a_GCPtrMem, sizeof(uint64_t),\8248 8176 off = iemNativeEmitMemStoreConstDataCommon<sizeof(uint64_t)>(\ 8177 pReNative, off, a_u64ConstValue, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU64, pCallEntry->idxInstr) 8249 8178 8250 8179 8251 8180 #define IEM_MC_STORE_MEM_FLAT_U8_CONST(a_GCPtrMem, a_u8ConstValue) \ 8252 off = iemNativeEmitMemStoreConstDataCommon (pReNative, off, a_u8ConstValue, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t),\8253 8181 off = iemNativeEmitMemStoreConstDataCommon<sizeof(uint8_t), true>(\ 8182 pReNative, off, a_u8ConstValue, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU8, pCallEntry->idxInstr) 8254 8183 8255 8184 #define IEM_MC_STORE_MEM_FLAT_U16_CONST(a_GCPtrMem, a_u16ConstValue) \ 8256 off = iemNativeEmitMemStoreConstDataCommon (pReNative, off, a_u16ConstValue, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t),\8257 8185 off = iemNativeEmitMemStoreConstDataCommon<sizeof(uint16_t), true>(\ 8186 pReNative, off, a_u16ConstValue, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU16, pCallEntry->idxInstr) 8258 8187 8259 8188 #define IEM_MC_STORE_MEM_FLAT_U32_CONST(a_GCPtrMem, a_u32ConstValue) \ 8260 off = iemNativeEmitMemStoreConstDataCommon (pReNative, off, a_u32ConstValue, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t),\8261 8189 off = iemNativeEmitMemStoreConstDataCommon<sizeof(uint32_t), true>(\ 8190 pReNative, off, a_u32ConstValue, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU32, pCallEntry->idxInstr) 8262 8191 8263 8192 #define IEM_MC_STORE_MEM_FLAT_U64_CONST(a_GCPtrMem, a_u64ConstValue) \ 8264 off = iemNativeEmitMemStoreConstDataCommon (pReNative, off, a_u64ConstValue, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t),\8265 8193 off = iemNativeEmitMemStoreConstDataCommon<sizeof(uint64_t), true>(\ 8194 pReNative, off, a_u64ConstValue, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU64, pCallEntry->idxInstr) 8266 8195 8267 8196 /** Emits code for IEM_MC_STORE_MEM_U8/16/32/64_CONST and 8268 8197 * IEM_MC_STORE_MEM_FLAT_U8/16/32/64_CONST (with iSegReg = UINT8_MAX). */ 8198 template<uint8_t const a_cbMem, bool a_fFlat = false> 8269 8199 DECL_INLINE_THROW(uint32_t) 8270 8200 iemNativeEmitMemStoreConstDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint64_t uValueConst, uint8_t iSegReg, 8271 uint8_t idxVarGCPtrMem, uint 8_t cbMem, uintptr_t pfnFunction, uint8_t idxInstr)8201 uint8_t idxVarGCPtrMem, uintptr_t pfnFunction, uint8_t idxInstr) 8272 8202 { 8273 8203 /* … … 8275 8205 * to do the grunt work. 8276 8206 */ 8277 uint8_t const idxVarConstValue = iemNativeVarAllocConst(pReNative, cbMem, uValueConst); 8278 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, idxVarConstValue, iSegReg, idxVarGCPtrMem, 8279 cbMem, cbMem - 1, kIemNativeEmitMemOp_Store, 8280 pfnFunction, idxInstr); 8207 uint8_t const idxVarConstValue = iemNativeVarAllocConst(pReNative, a_cbMem, uValueConst); 8208 off = iemNativeEmitMemFetchStoreDataCommon<a_cbMem, a_cbMem - 1, 8209 kIemNativeEmitMemOp_Store, 8210 a_fFlat>(pReNative, off, idxVarConstValue, iSegReg, 8211 idxVarGCPtrMem, pfnFunction, idxInstr); 8281 8212 iemNativeVarFreeLocal(pReNative, idxVarConstValue); 8282 8213 return off; … … 8286 8217 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 8287 8218 # define IEM_MC_STORE_MEM_U128_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u128Value) \ 8288 off = iemNativeEmitMemFetchStoreDataCommon (pReNative, off, a_u128Value, a_iSeg, a_GCPtrMem,sizeof(RTUINT128U), \8219 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), \ 8289 8220 (sizeof(RTUINT128U) - 1U) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE, \ 8290 kIemNativeEmitMemOp_Store ,\8291 8221 kIemNativeEmitMemOp_Store>(\ 8222 pReNative, off, a_u128Value, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU128AlignedSse, pCallEntry->idxInstr) 8292 8223 8293 8224 # define IEM_MC_STORE_MEM_U128_NO_AC(a_iSeg, a_GCPtrMem, a_u128Value) \ 8294 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u128Value, a_iSeg, a_GCPtrMem, \ 8295 sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Store, \ 8296 (uintptr_t)iemNativeHlpMemStoreDataU128NoAc, pCallEntry->idxInstr) 8225 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Store>(\ 8226 pReNative, off, a_u128Value, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU128NoAc, pCallEntry->idxInstr) 8297 8227 8298 8228 # define IEM_MC_STORE_MEM_U256_NO_AC(a_iSeg, a_GCPtrMem, a_u256Value) \ 8299 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Value, a_iSeg, a_GCPtrMem, \ 8300 sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Store, \ 8301 (uintptr_t)iemNativeHlpMemStoreDataU256NoAc, pCallEntry->idxInstr) 8229 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Store>(\ 8230 pReNative, off, a_u256Value, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU256NoAc, pCallEntry->idxInstr) 8302 8231 8303 8232 # define IEM_MC_STORE_MEM_U256_ALIGN_AVX(a_iSeg, a_GCPtrMem, a_u256Value) \ 8304 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Value, a_iSeg, a_GCPtrMem, sizeof(RTUINT256U), \ 8233 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), \ 8234 (sizeof(RTUINT256U) - 1U) | IEM_MEMMAP_F_ALIGN_GP, \ 8235 kIemNativeEmitMemOp_Store>(\ 8236 pReNative, off, a_u256Value, a_iSeg, a_GCPtrMem, (uintptr_t)iemNativeHlpMemStoreDataU256AlignedAvx, pCallEntry->idxInstr) 8237 8238 8239 # define IEM_MC_STORE_MEM_FLAT_U128_ALIGN_SSE(a_GCPtrMem, a_u128Value) \ 8240 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), \ 8241 (sizeof(RTUINT128U) - 1U) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE, \ 8242 kIemNativeEmitMemOp_Store, true>(\ 8243 pReNative, off, a_u128Value, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU128AlignedSse, \ 8244 pCallEntry->idxInstr) 8245 8246 # define IEM_MC_STORE_MEM_FLAT_U128_NO_AC(a_GCPtrMem, a_u128Value) \ 8247 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Store, true>(\ 8248 pReNative, off, a_u128Value, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU128NoAc, pCallEntry->idxInstr) 8249 8250 # define IEM_MC_STORE_MEM_FLAT_U256_NO_AC(a_GCPtrMem, a_u256Value) \ 8251 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Store, true>(\ 8252 pReNative, off, a_u256Value, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU256NoAc, pCallEntry->idxInstr) 8253 8254 # define IEM_MC_STORE_MEM_FLAT_U256_ALIGN_AVX(a_GCPtrMem, a_u256Value) \ 8255 off = iemNativeEmitMemFetchStoreDataCommon<sizeof(RTUINT256U), \ 8305 8256 (sizeof(RTUINT256U) - 1U) | IEM_MEMMAP_F_ALIGN_GP, kIemNativeEmitMemOp_Store, \ 8306 (uintptr_t)iemNativeHlpMemStoreDataU256AlignedAvx, pCallEntry->idxInstr) 8307 8308 8309 # define IEM_MC_STORE_MEM_FLAT_U128_ALIGN_SSE(a_GCPtrMem, a_u128Value) \ 8310 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u128Value, UINT8_MAX, a_GCPtrMem, sizeof(RTUINT128U), \ 8311 (sizeof(RTUINT128U) - 1U) | IEM_MEMMAP_F_ALIGN_GP | IEM_MEMMAP_F_ALIGN_SSE, \ 8312 kIemNativeEmitMemOp_Store, \ 8313 (uintptr_t)iemNativeHlpMemFlatStoreDataU128AlignedSse, pCallEntry->idxInstr) 8314 8315 # define IEM_MC_STORE_MEM_FLAT_U128_NO_AC(a_GCPtrMem, a_u128Value) \ 8316 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u128Value, UINT8_MAX, a_GCPtrMem, \ 8317 sizeof(RTUINT128U), sizeof(RTUINT128U) - 1, kIemNativeEmitMemOp_Store, \ 8318 (uintptr_t)iemNativeHlpMemFlatStoreDataU128NoAc, pCallEntry->idxInstr) 8319 8320 # define IEM_MC_STORE_MEM_FLAT_U256_NO_AC(a_GCPtrMem, a_u256Value) \ 8321 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Value, UINT8_MAX, a_GCPtrMem, \ 8322 sizeof(RTUINT256U), sizeof(RTUINT256U) - 1, kIemNativeEmitMemOp_Store, \ 8323 (uintptr_t)iemNativeHlpMemFlatStoreDataU256NoAc, pCallEntry->idxInstr) 8324 8325 # define IEM_MC_STORE_MEM_FLAT_U256_ALIGN_AVX(a_GCPtrMem, a_u256Value) \ 8326 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u256Value, UINT8_MAX, a_GCPtrMem, sizeof(RTUINT256U), \ 8327 (sizeof(RTUINT256U) - 1U) | IEM_MEMMAP_F_ALIGN_GP, kIemNativeEmitMemOp_Store, \ 8328 (uintptr_t)iemNativeHlpMemFlatStoreDataU256AlignedAvx, pCallEntry->idxInstr) 8257 true>(\ 8258 pReNative, off, a_u256Value, UINT8_MAX, a_GCPtrMem, (uintptr_t)iemNativeHlpMemFlatStoreDataU256AlignedAvx, pCallEntry->idxInstr) 8329 8259 #endif 8330 8260
Note:
See TracChangeset
for help on using the changeset viewer.