儲存庫 vbox 的更動 66909
- 時間撮記:
- 2017-5-16 下午01:29:44 (8 年 以前)
- 位置:
- trunk/src/VBox
- 檔案:
-
- 修改 6 筆資料
圖例:
- 未更動
- 新增
- 刪除
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r66906 r66909 10183 10183 10184 10184 /** 10185 * Stores a data dqword. 10186 * 10187 * @returns Strict VBox status code. 10188 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10189 * @param iSegReg The index of the segment register to use for 10190 * this access. The base and limits are checked. 10191 * @param GCPtrMem The address of the guest memory. 10192 * @param pu256Value Pointer to the value to store. 10193 */ 10194 IEM_STATIC VBOXSTRICTRC iemMemStoreDataU256(PVMCPU pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) 10195 { 10196 /* The lazy approach for now... */ 10197 PRTUINT256U pu256Dst; 10198 VBOXSTRICTRC rc = iemMemMap(pVCpu, (void **)&pu256Dst, sizeof(*pu256Dst), iSegReg, GCPtrMem, IEM_ACCESS_DATA_W); 10199 if (rc == VINF_SUCCESS) 10200 { 10201 pu256Dst->au64[0] = pu256Value->au64[0]; 10202 pu256Dst->au64[1] = pu256Value->au64[1]; 10203 pu256Dst->au64[2] = pu256Value->au64[2]; 10204 pu256Dst->au64[3] = pu256Value->au64[3]; 10205 rc = iemMemCommitAndUnmap(pVCpu, pu256Dst, IEM_ACCESS_DATA_W); 10206 } 10207 return rc; 10208 } 10209 10210 10211 #ifdef IEM_WITH_SETJMP 10212 /** 10213 * Stores a data dqword, longjmp on error. 10214 * 10215 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10216 * @param iSegReg The index of the segment register to use for 10217 * this access. The base and limits are checked. 10218 * @param GCPtrMem The address of the guest memory. 10219 * @param pu256Value Pointer to the value to store. 10220 */ 10221 IEM_STATIC void iemMemStoreDataU256Jmp(PVMCPU pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) 10222 { 10223 /* The lazy approach for now... */ 10224 PRTUINT256U pu256Dst = (PRTUINT256U)iemMemMapJmp(pVCpu, sizeof(*pu256Dst), iSegReg, GCPtrMem, IEM_ACCESS_DATA_W); 10225 pu256Dst->au64[0] = pu256Value->au64[0]; 10226 pu256Dst->au64[1] = pu256Value->au64[1]; 10227 pu256Dst->au64[2] = pu256Value->au64[2]; 10228 pu256Dst->au64[3] = pu256Value->au64[3]; 10229 iemMemCommitAndUnmapJmp(pVCpu, pu256Dst, IEM_ACCESS_DATA_W); 10230 } 10231 #endif 10232 10233 10234 /** 10235 * Stores a data dqword, AVX aligned. 10236 * 10237 * @returns Strict VBox status code. 10238 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10239 * @param iSegReg The index of the segment register to use for 10240 * this access. The base and limits are checked. 10241 * @param GCPtrMem The address of the guest memory. 10242 * @param pu256Value Pointer to the value to store. 10243 */ 10244 IEM_STATIC VBOXSTRICTRC iemMemStoreDataU256AlignedAvx(PVMCPU pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) 10245 { 10246 /* The lazy approach for now... */ 10247 if (GCPtrMem & 31) 10248 return iemRaiseGeneralProtectionFault0(pVCpu); 10249 10250 PRTUINT256U pu256Dst; 10251 VBOXSTRICTRC rc = iemMemMap(pVCpu, (void **)&pu256Dst, sizeof(*pu256Dst), iSegReg, GCPtrMem, IEM_ACCESS_DATA_W); 10252 if (rc == VINF_SUCCESS) 10253 { 10254 pu256Dst->au64[0] = pu256Value->au64[0]; 10255 pu256Dst->au64[1] = pu256Value->au64[1]; 10256 pu256Dst->au64[2] = pu256Value->au64[2]; 10257 pu256Dst->au64[3] = pu256Value->au64[3]; 10258 rc = iemMemCommitAndUnmap(pVCpu, pu256Dst, IEM_ACCESS_DATA_W); 10259 } 10260 return rc; 10261 } 10262 10263 10264 #ifdef IEM_WITH_SETJMP 10265 /** 10266 * Stores a data dqword, AVX aligned. 10267 * 10268 * @returns Strict VBox status code. 10269 * @param pVCpu The cross context virtual CPU structure of the calling thread. 10270 * @param iSegReg The index of the segment register to use for 10271 * this access. The base and limits are checked. 10272 * @param GCPtrMem The address of the guest memory. 10273 * @param pu256Value Pointer to the value to store. 10274 */ 10275 DECL_NO_INLINE(IEM_STATIC, void) 10276 iemMemStoreDataU256AlignedAvxJmp(PVMCPU pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) 10277 { 10278 /* The lazy approach for now... */ 10279 if ((GCPtrMem & 31) == 0) 10280 { 10281 PRTUINT256U pu256Dst = (PRTUINT256U)iemMemMapJmp(pVCpu, sizeof(*pu256Dst), iSegReg, GCPtrMem, IEM_ACCESS_DATA_W); 10282 pu256Dst->au64[0] = pu256Value->au64[0]; 10283 pu256Dst->au64[1] = pu256Value->au64[1]; 10284 pu256Dst->au64[2] = pu256Value->au64[2]; 10285 pu256Dst->au64[3] = pu256Value->au64[3]; 10286 iemMemCommitAndUnmapJmp(pVCpu, pu256Dst, IEM_ACCESS_DATA_W); 10287 return; 10288 } 10289 10290 VBOXSTRICTRC rcStrict = iemRaiseGeneralProtectionFault0(pVCpu); 10291 longjmp(*pVCpu->iem.s.CTX_SUFF(pJmpBuf), VBOXSTRICTRC_VAL(rcStrict)); 10292 } 10293 #endif 10294 10295 10296 /** 10185 10297 * Stores a descriptor register (sgdt, sidt). 10186 10298 * … … 11381 11493 } while (0) 11382 11494 11495 #define IEM_MC_FETCH_YREG_U64(a_u64Dst, a_iYRegSrc) \ 11496 do { PX86XSAVEAREA pXStateTmp = IEM_GET_CTX(pVCpu)->CTX_SUFF(pXState); \ 11497 uintptr_t const iYRegSrcTmp = (a_iYRegSrc); \ 11498 (a_u64Dst).au64[0] = pXStateTmp->x87.aXMM[iYRegSrcTmp].au64[0]; \ 11499 } while (0) 11500 #define IEM_MC_FETCH_YREG_U128(a_u128Dst, a_iYRegSrc) \ 11501 do { PX86XSAVEAREA pXStateTmp = IEM_GET_CTX(pVCpu)->CTX_SUFF(pXState); \ 11502 uintptr_t const iYRegSrcTmp = (a_iYRegSrc); \ 11503 (a_u128Dst).au64[0] = pXStateTmp->x87.aXMM[iYRegSrcTmp].au64[0]; \ 11504 (a_u128Dst).au64[1] = pXStateTmp->x87.aXMM[iYRegSrcTmp].au64[1]; \ 11505 } while (0) 11506 #define IEM_MC_FETCH_YREG_U256(a_u256Dst, a_iYRegSrc) \ 11507 do { PX86XSAVEAREA pXStateTmp = IEM_GET_CTX(pVCpu)->CTX_SUFF(pXState); \ 11508 uintptr_t const iYRegSrcTmp = (a_iYRegSrc); \ 11509 (a_u256Dst).au64[0] = pXStateTmp->x87.aXMM[iYRegSrcTmp].au64[0]; \ 11510 (a_u256Dst).au64[1] = pXStateTmp->x87.aXMM[iYRegSrcTmp].au64[1]; \ 11511 (a_u256Dst).au64[2] = pXStateTmp->u.YmmHi.aYmmHi[iYRegSrcTmp].au64[0]; \ 11512 (a_u256Dst).au64[3] = pXStateTmp->u.YmmHi.aYmmHi[iYRegSrcTmp].au64[1]; \ 11513 } while (0) 11514 11383 11515 #define IEM_MC_INT_CLEAR_ZMM_256_UP(a_pXState, a_iXRegDst) do { /* For AVX512 and AVX1024 support. */ } while (0) 11384 11516 #define IEM_MC_STORE_YREG_U32_ZX_VLMAX(a_iYRegDst, a_u32Src) \ … … 11419 11551 IEM_MC_INT_CLEAR_ZMM_256_UP(pXStateTmp, a_iYRegDst); \ 11420 11552 } while (0) 11553 11421 11554 #define IEM_MC_COPY_YREG_U256_ZX_VLMAX(a_iYRegDst, a_iYRegSrc) \ 11422 11555 do { PX86XSAVEAREA pXStateTmp = IEM_GET_CTX(pVCpu)->CTX_SUFF(pXState); \ … … 11439 11572 IEM_MC_INT_CLEAR_ZMM_256_UP(pXStateTmp, a_iYRegDst); \ 11440 11573 } while (0) 11574 11441 11575 #define IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(a_iYRegDst, a_iYRegSrc32, a_iYRegSrcHx) \ 11442 11576 do { PX86XSAVEAREA pXStateTmp = IEM_GET_CTX(pVCpu)->CTX_SUFF(pXState); \ … … 11744 11878 # define IEM_MC_STORE_MEM_U128_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u128Value) \ 11745 11879 iemMemStoreDataU128AlignedSseJmp(pVCpu, (a_iSeg), (a_GCPtrMem), (a_u128Value)) 11880 #endif 11881 11882 #ifndef IEM_WITH_SETJMP 11883 # define IEM_MC_STORE_MEM_U256(a_iSeg, a_GCPtrMem, a_u256Value) \ 11884 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU256(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value))) 11885 # define IEM_MC_STORE_MEM_U256_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u256Value) \ 11886 IEM_MC_RETURN_ON_FAILURE(iemMemStoreDataU256AlignedSse(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value))) 11887 #else 11888 # define IEM_MC_STORE_MEM_U256(a_iSeg, a_GCPtrMem, a_u256Value) \ 11889 iemMemStoreDataU256Jmp(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value)) 11890 # define IEM_MC_STORE_MEM_U256_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u256Value) \ 11891 iemMemStoreDataU256AlignedSseJmp(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value)) 11746 11892 #endif 11747 11893 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r66906 r66909 239 239 'Ma': ( 'IDX_UseModRM', 'rm', '%Ma', 'Ma', ), ##< Only used by BOUND. 240 240 'Mb_RO': ( 'IDX_UseModRM', 'rm', '%Mb', 'Mb', ), 241 'Md': ( 'IDX_UseModRM', 'rm', '%Md', 'Md', ), 241 242 'Md_RO': ( 'IDX_UseModRM', 'rm', '%Md', 'Md', ), 242 243 'Md_WO': ( 'IDX_UseModRM', 'rm', '%Md', 'Md', ), -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r66906 r66909 219 219 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea 220 220 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177 221 * @oponly222 221 */ 223 222 IEMOP_MNEMONIC3(VEX_RVM, VMOVSS, vmovss, Vss_WO, HdqCss, Uss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 246 245 * @optest op1=1 op2=2 -> op1=2 247 246 * @optest op1=0 op2=-22 -> op1=-22 248 * @oponly249 247 */ 250 IEMOP_MNEMONIC2(VEX_XM, VMOVSS, vmovss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);248 IEMOP_MNEMONIC2(VEX_XM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 251 249 IEM_MC_BEGIN(0, 2); 252 250 IEM_MC_LOCAL(uint32_t, uSrc); … … 286 284 * @optest op1=3 op2=-1 op3=0x77 -> 287 285 * op1=0xffffffffffffffff0000000000000077 288 * @oponly289 286 */ 290 287 IEMOP_MNEMONIC3(VEX_RVM, VMOVSD, vmovsd, Vsd_WO, HdqCsd, Usd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 313 310 * @optest op1=1 op2=2 -> op1=2 314 311 * @optest op1=0 op2=-22 -> op1=-22 315 * @oponly316 312 */ 317 313 IEMOP_MNEMONIC2(VEX_XM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 335 331 } 336 332 333 337 334 /** 338 * @ opcode 0x11 339 * @ oppfx none 340 * @ opcpuid sse 341 * @ opgroup og_sse_simdfp_datamove 342 * @ opxcpttype 4UA 343 * @ optest op1=1 op2=2 -> op1=2 344 * @ optest op1=0 op2=-42 -> op1=-42 335 * @opcode 0x11 336 * @oppfx none 337 * @opcpuid avx 338 * @opgroup og_avx_simdfp_datamove 339 * @opxcpttype 4UA 340 * @optest op1=1 op2=2 -> op1=2 341 * @optest op1=0 op2=-22 -> op1=-22 342 * @oponly 345 343 */ 346 FNIEMOP_STUB(iemOp_vmovups_Wps_Vps); 347 //FNIEMOP_DEF(iemOp_vmovups_Wps_Vps) 348 //{ 349 // IEMOP_MNEMONIC2(MR, VMOVUPS, vmovups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 350 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 351 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 352 // { 353 // /* 354 // * Register, register. 355 // */ 356 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 357 // IEM_MC_BEGIN(0, 0); 358 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 359 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 360 // IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 361 // ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 362 // IEM_MC_ADVANCE_RIP(); 363 // IEM_MC_END(); 364 // } 365 // else 366 // { 367 // /* 368 // * Memory, register. 369 // */ 370 // IEM_MC_BEGIN(0, 2); 371 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 372 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 373 // 374 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 375 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 376 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 377 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 378 // 379 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 380 // IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 381 // 382 // IEM_MC_ADVANCE_RIP(); 383 // IEM_MC_END(); 384 // } 385 // return VINF_SUCCESS; 386 //} 344 FNIEMOP_DEF(iemOp_vmovups_Wps_Vps) 345 { 346 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 347 Assert(pVCpu->iem.s.uVexLength <= 1); 348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 350 { 351 /* 352 * Register, register. 353 */ 354 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_NO_VVVV(); 355 IEM_MC_BEGIN(0, 0); 356 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 357 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); 358 if (pVCpu->iem.s.uVexLength == 0) 359 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 360 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 361 else 362 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 363 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 364 IEM_MC_ADVANCE_RIP(); 365 IEM_MC_END(); 366 } 367 else if (pVCpu->iem.s.uVexLength == 0) 368 { 369 /* 370 * 128-bit: Memory, register. 371 */ 372 IEM_MC_BEGIN(0, 2); 373 IEM_MC_LOCAL(RTUINT128U, uSrc); 374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 375 376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 377 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_NO_VVVV(); 378 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 379 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 380 381 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 382 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 383 384 IEM_MC_ADVANCE_RIP(); 385 IEM_MC_END(); 386 } 387 else 388 { 389 /* 390 * 256-bit: Memory, register. 391 */ 392 IEM_MC_BEGIN(0, 2); 393 IEM_MC_LOCAL(RTUINT256U, uSrc); 394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 395 396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 397 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_NO_VVVV(); 398 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 399 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); 400 401 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 402 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 403 404 IEM_MC_ADVANCE_RIP(); 405 IEM_MC_END(); 406 } 407 return VINF_SUCCESS; 408 } 387 409 388 410 -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r66906 r66909 522 522 #define IEM_MC_COPY_XREG_U128(a_iXRegDst, a_iXRegSrc) do { (void)fSseWrite; } while (0) 523 523 524 #define IEM_MC_FETCH_YREG_U256(a_u256Value, a_iYRegSrc) do { (a_u256Value).au64[0] = (a_u256Value).au64[1] = (a_u256Value).au64[2] = (a_u256Value).au64[3] = 0; CHK_TYPE(RTUINT256U, a_u256Value); (void)fAvxRead; } while (0) 525 #define IEM_MC_FETCH_YREG_U128(a_u128Value, a_iYRegSrc) do { (a_u128Value).au64[0] = (a_u128Value).au64[1] = 0; CHK_TYPE(RTUINT128U, a_u128Value); (void)fAvxRead; } while (0) 526 #define IEM_MC_FETCH_YREG_U64(a_u64Value, a_iYRegSrc) do { (a_u64Value) = UINT64_MAX; CHK_TYPE(uint64_t, a_u64Value); (void)fAvxRead; } while (0) 527 #define IEM_MC_FETCH_YREG_U32(a_u32Value, a_iYRegSrc) do { (a_u32Value) = UINT32_MAX; CHK_TYPE(uint32_t, a_u32Value); (void)fAvxRead; } while (0) 524 528 #define IEM_MC_STORE_YREG_U32_ZX_VLMAX(a_iYRegDst, a_u32Value) do { CHK_TYPE(uint32_t, a_u32Value); (void)fAvxWrite; } while (0) 525 529 #define IEM_MC_STORE_YREG_U64_ZX_VLMAX(a_iYRegDst, a_u64Value) do { CHK_TYPE(uint64_t, a_u64Value); (void)fAvxWrite; } while (0) … … 587 591 #define IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(a_pr64Dst) do { CHK_TYPE(PRTFLOAT64U, a_pr64Dst); } while (0) 588 592 #define IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(a_pr80Dst) do { CHK_TYPE(PRTFLOAT80U, a_pr80Dst); } while (0) 589 #define IEM_MC_STORE_MEM_U128(a_iSeg, a_GCPtrMem, a_u128Dst) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTUINT128U, a_u128Dst); CHK_SEG_IDX(a_iSeg);} while (0) 590 #define IEM_MC_STORE_MEM_U128_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u128Dst) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTUINT128U, a_u128Dst); CHK_SEG_IDX(a_iSeg);} while (0) 593 #define IEM_MC_STORE_MEM_U128(a_iSeg, a_GCPtrMem, a_u128Src) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTUINT128U, a_u128Src); CHK_SEG_IDX(a_iSeg);} while (0) 594 #define IEM_MC_STORE_MEM_U128_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u128Src) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTUINT128U, a_u128Src); CHK_SEG_IDX(a_iSeg);} while (0) 595 #define IEM_MC_STORE_MEM_U256(a_iSeg, a_GCPtrMem, a_u256Src) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTUINT256U, a_u256Src); CHK_SEG_IDX(a_iSeg);} while (0) 596 #define IEM_MC_STORE_MEM_U256_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u256Src) do { CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTUINT256U, a_u256Src); CHK_SEG_IDX(a_iSeg);} while (0) 591 597 592 598 #define IEM_MC_PUSH_U16(a_u16Value) do {} while (0) -
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1-template.c
r66906 r66909 104 104 105 105 /** 106 * Encoder callback. 107 * @returns Next encoding. If equal or less to @a iEncoding, no 108 * further encodings are available for testing. 109 * @param pThis The state. 110 * @param iEncoding The encoding. 111 */ 112 typedef BS3_DECL_NEAR(unsigned) FNBS3CG1ENCODER(struct BS3CG1STATE *pThis, unsigned iEncoding); 113 /** Pointer to a encoder callback. */ 114 typedef FNBS3CG1ENCODER *PFNBS3CG1ENCODER; 115 116 117 /** 106 118 * The state. 107 119 */ … … 138 150 /** Opcode bytes. */ 139 151 uint8_t abOpcodes[4]; 152 /** The instruction encoder. */ 153 PFNBS3CG1ENCODER pfnEncoder; 140 154 141 155 /** The length of the mnemonic. */ … … 2148 2162 2149 2163 2150 static unsigned B s3Cg1EncodeNext_VEX_MODRM_Vps_WO_Wps__OR__VEX_MODRM_Vpd_WO_Wpd(PBS3CG1STATE pThis, unsigned iEncoding)2164 static unsigned BS3_NEAR_CODE Bs3Cg1EncodeNext_VEX_MODRM_Vps_WO_Wps__OR__VEX_MODRM_Vpd_WO_Wpd(PBS3CG1STATE pThis, unsigned iEncoding) 2151 2165 { 2152 2166 unsigned off; … … 2390 2404 2391 2405 2392 static unsigned B s3Cg1EncodeNext_VEX_MODRM_VsomethingWO_HdqCsomething_Usomething(PBS3CG1STATE pThis, unsigned iEncoding)2406 static unsigned BS3_NEAR_CODE Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_HdqCsomething_Usomething(PBS3CG1STATE pThis, unsigned iEncoding) 2393 2407 { 2394 2408 unsigned off; … … 2473 2487 2474 2488 2475 static unsigned B s3Cg1EncodeNext_VEX_MODRM_VsomethingWO_Msomething_Wip_Lig(PBS3CG1STATE pThis, unsigned iEncoding)2489 static unsigned BS3_NEAR_CODE Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_Msomething_Wip_Lig(PBS3CG1STATE pThis, unsigned iEncoding) 2476 2490 { 2477 2491 unsigned off; … … 2679 2693 } 2680 2694 2695 static unsigned BS3_NEAR_CODE Bs3Cg1EncodeNext_VEX_MODRM_WsomethingWO_Vsomething_Wip(PBS3CG1STATE pThis, unsigned iEncoding) 2696 { 2697 unsigned off; 2698 switch (iEncoding) 2699 { 2700 /* 128-bit wide stuff goes first, then we'll update the operand widths afterwards. */ 2701 case 0: 2702 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 0 /*L*/, 1 /*~R*/); 2703 off = Bs3Cg1InsertOpcodes(pThis, off); 2704 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 1, 0); 2705 pThis->aOperands[pThis->iRmOp ].idxField = pThis->aOperands[pThis->iRmOp ].idxFieldBase + 0; 2706 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 1; 2707 break; 2708 2709 case 1: 2710 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 0 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2711 off = Bs3Cg1InsertOpcodes(pThis, off); 2712 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 4, 5); 2713 pThis->aOperands[pThis->iRmOp ].idxField = pThis->aOperands[pThis->iRmOp ].idxFieldBase + 5; 2714 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 4; 2715 break; 2716 case 2: 2717 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 0 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 1 /*W - ignored*/); 2718 off = Bs3Cg1InsertOpcodes(pThis, off); 2719 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 5, 4); 2720 pThis->aOperands[pThis->iRmOp ].idxField = pThis->aOperands[pThis->iRmOp ].idxFieldBase + 4; 2721 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 5; 2722 break; 2723 case 3: 2724 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 0 /*L*/, 1 /*~R*/); 2725 off = Bs3Cg1InsertOpcodes(pThis, off); 2726 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 2 /*iReg*/, 16, 0, BS3CG1OPLOC_MEM_WO); 2727 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 2; 2728 break; 2729 case 4: 2730 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 0 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2731 off = Bs3Cg1InsertOpcodes(pThis, off); 2732 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 3 /*iReg*/, 16, 0, BS3CG1OPLOC_MEM_WO); 2733 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 3; 2734 break; 2735 case 5: 2736 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 0 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 1 /*W - ignored */); 2737 off = Bs3Cg1InsertOpcodes(pThis, off); 2738 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 3 /*iReg*/, 16, 0, BS3CG1OPLOC_MEM_WO); 2739 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 3; 2740 break; 2741 case 6: 2742 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 0 /*L*/, 1 /*~R*/); 2743 off = Bs3Cg1InsertOpcodes(pThis, off); 2744 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 3 /*iReg*/, 16, 1 /*cbMissalign*/, BS3CG1OPLOC_MEM_WO); 2745 if (!Bs3Cg1XcptTypeIsUnaligned(pThis->enmXcptType)) 2746 pThis->bAlignmentXcpt = X86_XCPT_GP; 2747 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 3; 2748 break; 2749 case 7: 2750 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 0 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2751 off = Bs3Cg1InsertOpcodes(pThis, off); 2752 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 3 /*iReg*/, 16, 1 /*cbMissalign*/, BS3CG1OPLOC_MEM_WO); 2753 if (!Bs3Cg1XcptTypeIsUnaligned(pThis->enmXcptType)) 2754 pThis->bAlignmentXcpt = X86_XCPT_GP; 2755 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 3; 2756 break; 2757 /* 128-bit invalid encodings: */ 2758 case 8: 2759 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xe /*~V*/, 0 /*L*/, 1 /*~R*/); /* Bad V value */ 2760 off = Bs3Cg1InsertOpcodes(pThis, off); 2761 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 1, 0); 2762 pThis->aOperands[pThis->iRmOp ].idxField = pThis->aOperands[pThis->iRmOp ].idxFieldBase + 0; 2763 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 1; 2764 pThis->aOperands[pThis->iRmOp ].enmLocation = BS3CG1OPLOC_CTX_ZX_VLMAX; 2765 pThis->fInvalidEncoding = true; 2766 break; 2767 case 9: 2768 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0 /*~V*/, 0 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2769 off = Bs3Cg1InsertOpcodes(pThis, off); 2770 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 4, 5); 2771 pThis->aOperands[pThis->iRmOp ].idxField = pThis->aOperands[pThis->iRmOp ].idxFieldBase + 5; 2772 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 4; 2773 pThis->fInvalidEncoding = true; 2774 iEncoding = 20-1; 2775 break; 2776 2777 case 10: case 11: case 12: case 13: case 14: case 15: case 16: case 17: case 18: case 19: 2778 /* fall thru */ 2779 2780 /* 256-bit encodings: */ 2781 case 20: 2782 iEncoding = 20; 2783 pThis->aOperands[pThis->iRmOp ].cbOp = 32; 2784 pThis->aOperands[pThis->iRmOp ].idxFieldBase = BS3CG1DST_YMM0; 2785 pThis->aOperands[pThis->iRmOp ].enmLocation = BS3CG1OPLOC_CTX_ZX_VLMAX; 2786 pThis->aOperands[pThis->iRegOp].cbOp = 32; 2787 pThis->aOperands[pThis->iRegOp].idxFieldBase = BS3CG1DST_YMM0; 2788 2789 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/); 2790 off = Bs3Cg1InsertOpcodes(pThis, off); 2791 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 1, 0); 2792 pThis->aOperands[pThis->iRmOp ].idxField = pThis->aOperands[pThis->iRmOp ].idxFieldBase + 0; 2793 pThis->aOperands[pThis->iRegOp].idxField = pThis->aOperands[pThis->iRegOp].idxFieldBase + 1; 2794 break; 2795 2796 #if 0 2797 else if (iEncoding == 10) 2798 { 2799 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/); 2800 off = Bs3Cg1InsertOpcodes(pThis, off); 2801 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 1, 0); 2802 pThis->aOperands[pThis->iRmOp].cbOp = 32; 2803 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM0; 2804 pThis->aOperands[pThis->iRegOp].cbOp = 32; 2805 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM1; 2806 } 2807 else if (iEncoding == 11) 2808 { 2809 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2810 off = Bs3Cg1InsertOpcodes(pThis, off); 2811 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 4, 5); 2812 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM5; 2813 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM4; 2814 } 2815 else if (iEncoding == 12) 2816 { 2817 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 1 /*W - ignored*/); 2818 off = Bs3Cg1InsertOpcodes(pThis, off); 2819 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 5, 4); 2820 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM4; 2821 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM5; 2822 } 2823 else if (iEncoding == 13) 2824 { 2825 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/); 2826 off = Bs3Cg1InsertOpcodes(pThis, off); 2827 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 2 /*iReg*/, 32, 0, BS3CG1OPLOC_MEM); 2828 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM2; 2829 } 2830 else if (iEncoding == 14) 2831 { 2832 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2833 off = Bs3Cg1InsertOpcodes(pThis, off); 2834 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 3 /*iReg*/, 32, 0, BS3CG1OPLOC_MEM); 2835 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM3; 2836 } 2837 else if (iEncoding == 15) 2838 { 2839 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 1 /*W - ignored */); 2840 off = Bs3Cg1InsertOpcodes(pThis, off); 2841 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 3 /*iReg*/, 32, 0, BS3CG1OPLOC_MEM); 2842 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM3; 2843 } 2844 else if (iEncoding == 16) 2845 { 2846 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/); 2847 off = Bs3Cg1InsertOpcodes(pThis, off); 2848 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 3 /*iReg*/, 32, 1 /*cbMissalign*/, BS3CG1OPLOC_MEM); 2849 if (!Bs3Cg1XcptTypeIsUnaligned(pThis->enmXcptType)) 2850 pThis->bAlignmentXcpt = X86_XCPT_GP; 2851 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM3; 2852 } 2853 else if (iEncoding == 17) 2854 { 2855 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2856 off = Bs3Cg1InsertOpcodes(pThis, off); 2857 off = Bs3Cfg1EncodeMemMod0Disp(pThis, false, off, 3 /*iReg*/, 32, 1 /*cbMissalign*/, BS3CG1OPLOC_MEM); 2858 if (!Bs3Cg1XcptTypeIsUnaligned(pThis->enmXcptType)) 2859 pThis->bAlignmentXcpt = X86_XCPT_GP; 2860 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM3; 2861 } 2862 /* 256-bit invalid encodings: */ 2863 else if (iEncoding == 18) 2864 { 2865 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xe /*~V - invalid */, 1 /*L*/, 1 /*~R*/); /* Bad V value */ 2866 off = Bs3Cg1InsertOpcodes(pThis, off); 2867 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 1, 0); 2868 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM0; 2869 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM1; 2870 pThis->fInvalidEncoding = true; 2871 } 2872 else if (iEncoding == 19) 2873 { 2874 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0 /*~V - invalid */, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2875 off = Bs3Cg1InsertOpcodes(pThis, off); 2876 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 4, 5); 2877 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM5; 2878 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM4; 2879 pThis->fInvalidEncoding = true; 2880 } 2881 else if (iEncoding == 20) 2882 { 2883 pThis->abCurInstr[0] = P_RN; 2884 off = Bs3Cg1InsertVex3bPrefix(pThis, 1 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2885 off = Bs3Cg1InsertOpcodes(pThis, off); 2886 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 4, 5); 2887 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM5; 2888 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM4; 2889 pThis->fInvalidEncoding = true; 2890 } 2891 else if (iEncoding == 21) 2892 { 2893 pThis->abCurInstr[0] = P_RZ; 2894 off = Bs3Cg1InsertVex3bPrefix(pThis, 1 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2895 off = Bs3Cg1InsertOpcodes(pThis, off); 2896 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 4, 5); 2897 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM5; 2898 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM4; 2899 pThis->fInvalidEncoding = true; 2900 } 2901 else if (iEncoding == 22) 2902 { 2903 pThis->abCurInstr[0] = P_OZ; 2904 off = Bs3Cg1InsertVex3bPrefix(pThis, 1 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2905 off = Bs3Cg1InsertOpcodes(pThis, off); 2906 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 4, 5); 2907 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM5; 2908 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM4; 2909 pThis->fInvalidEncoding = true; 2910 } 2911 else if (iEncoding == 23) 2912 { 2913 pThis->abCurInstr[0] = P_LK; 2914 off = Bs3Cg1InsertVex3bPrefix(pThis, 1 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 1 /*~R*/, 1 /*~X*/, 1 /*~B*/, 0 /*W*/); 2915 off = Bs3Cg1InsertOpcodes(pThis, off); 2916 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 4, 5); 2917 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM5; 2918 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM4; 2919 pThis->fInvalidEncoding = true; 2920 } 2921 #if ARCH_BITS == 64 2922 /* 64-bit mode registers */ 2923 else if (BS3_MODE_IS_64BIT_CODE(pThis->bMode)) 2924 { 2925 if (iEncoding == 24) 2926 { 2927 off = Bs3Cg1InsertVex2bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 0 /*~R*/); 2928 off = Bs3Cg1InsertOpcodes(pThis, off); 2929 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 3, 4); 2930 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM4; 2931 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM11; 2932 } 2933 else if (iEncoding == 25) 2934 { 2935 off = Bs3Cg1InsertVex3bPrefix(pThis, 0 /*offDst*/, 0xf /*~V*/, 1 /*L*/, 0 /*~R*/, 1 /*~X*/, 0 /*~B*/, 0 /*W*/); 2936 off = Bs3Cg1InsertOpcodes(pThis, off); 2937 pThis->abCurInstr[off++] = X86_MODRM_MAKE(3, 1, 4); 2938 pThis->aOperands[pThis->iRmOp ].idxField = BS3CG1DST_YMM12; 2939 pThis->aOperands[pThis->iRegOp].idxField = BS3CG1DST_YMM9; 2940 } 2941 else 2942 return 0; 2943 } 2944 #endif 2945 else 2946 return 0; 2947 #endif 2948 2949 default: 2950 return 0; 2951 } 2952 2953 pThis->cbCurInstr = off; 2954 return iEncoding + 1; 2955 } 2956 2957 2681 2958 #endif /* BS3CG1_WITH_VEX */ 2682 2959 … … 2696 2973 { 2697 2974 pThis->bAlignmentXcpt = UINT8_MAX; 2975 if (pThis->pfnEncoder) 2976 return pThis->pfnEncoder(pThis, iEncoding); 2698 2977 2699 2978 switch (pThis->enmEncoding) … … 2771 3050 */ 2772 3051 #ifdef BS3CG1_WITH_VEX 2773 case BS3CG1ENC_VEX_MODRM_Vps_WO_Wps: 2774 case BS3CG1ENC_VEX_MODRM_Vpd_WO_Wpd: 2775 return Bs3Cg1EncodeNext_VEX_MODRM_Vps_WO_Wps__OR__VEX_MODRM_Vpd_WO_Wpd(pThis, iEncoding); 2776 2777 case BS3CG1ENC_VEX_MODRM_Vss_WO_HdqCss_Uss: 2778 case BS3CG1ENC_VEX_MODRM_Vsd_WO_HdqCsd_Usd: 2779 return Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_HdqCsomething_Usomething(pThis, iEncoding); 2780 2781 case BS3CG1ENC_VEX_MODRM_VsdZx_WO_Mq: 2782 case BS3CG1ENC_VEX_MODRM_VssZx_WO_Wss: 3052 case BS3CG1ENC_VEX_MODRM_VssZx_WO_Md: 2783 3053 return Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_Msomething_Wip_Lig(pThis, iEncoding); 2784 3054 … … 2819 3089 pThis->fSameRingNotOkay = false; 2820 3090 pThis->cbOperand = 0; 3091 pThis->pfnEncoder = NULL; 2821 3092 2822 3093 switch (pThis->enmEncoding) … … 3020 3291 case BS3CG1ENC_VEX_MODRM_Vps_WO_Wps: 3021 3292 case BS3CG1ENC_VEX_MODRM_Vpd_WO_Wpd: 3293 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_Vps_WO_Wps__OR__VEX_MODRM_Vpd_WO_Wpd; 3022 3294 pThis->iRmOp = 1; 3023 3295 pThis->iRegOp = 0; … … 3028 3300 break; 3029 3301 3030 case BS3CG1ENC_VEX_MODRM_VssZx_WO_Wss: 3302 case BS3CG1ENC_VEX_MODRM_VssZx_WO_Md: 3303 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_Msomething_Wip_Lig; 3031 3304 pThis->iRmOp = 1; 3032 3305 pThis->iRegOp = 0; … … 3040 3313 3041 3314 case BS3CG1ENC_VEX_MODRM_Vss_WO_HdqCss_Uss: 3315 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_HdqCsomething_Usomething; 3042 3316 pThis->iRegOp = 0; 3043 3317 pThis->iRmOp = 2; … … 3054 3328 3055 3329 case BS3CG1ENC_VEX_MODRM_VsdZx_WO_Mq: 3330 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_Msomething_Wip_Lig; 3056 3331 pThis->iRmOp = 1; 3057 3332 pThis->iRegOp = 0; … … 3065 3340 3066 3341 case BS3CG1ENC_VEX_MODRM_Vsd_WO_HdqCsd_Usd: 3342 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_VsomethingWO_HdqCsomething_Usomething; 3067 3343 pThis->iRegOp = 0; 3068 3344 pThis->iRmOp = 2; … … 3076 3352 pThis->aOperands[1].idxFieldBase = BS3CG1DST_XMM0; 3077 3353 pThis->aOperands[2].idxFieldBase = BS3CG1DST_XMM0_LO; 3354 break; 3355 3356 case BS3CG1ENC_VEX_MODRM_Wps_WO_Vps: 3357 pThis->pfnEncoder = Bs3Cg1EncodeNext_VEX_MODRM_WsomethingWO_Vsomething_Wip; 3358 pThis->iRmOp = 0; 3359 pThis->iRegOp = 1; 3360 pThis->aOperands[0].cbOp = 16; 3361 pThis->aOperands[1].cbOp = 16; 3362 pThis->aOperands[0].enmLocation = BS3CG1OPLOC_CTX_ZX_VLMAX; 3363 pThis->aOperands[1].enmLocation = BS3CG1OPLOC_CTX; 3364 pThis->aOperands[0].idxFieldBase = BS3CG1DST_XMM0; 3365 pThis->aOperands[1].idxFieldBase = BS3CG1DST_XMM0; 3078 3366 break; 3079 3367 -
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-generated-1.h
r66906 r66909 95 95 BS3CG1OP_Ma, 96 96 BS3CG1OP_Mb_RO, 97 BS3CG1OP_Md, 97 98 BS3CG1OP_Md_RO, 98 99 BS3CG1OP_Md_WO, … … 148 149 BS3CG1ENC_VEX_MODRM_Vpd_WO_Wpd, 149 150 BS3CG1ENC_VEX_MODRM_Vss_WO_HdqCss_Uss, 150 BS3CG1ENC_VEX_MODRM_VssZx_WO_Wss,151 151 BS3CG1ENC_VEX_MODRM_Vsd_WO_HdqCsd_Usd, 152 BS3CG1ENC_VEX_MODRM_VssZx_WO_Md, 152 153 BS3CG1ENC_VEX_MODRM_VsdZx_WO_Mq, 153 154 BS3CG1ENC_VEX_MODRM_Md_WO, 155 BS3CG1ENC_VEX_MODRM_Wps_WO_Vps, 154 156 155 157 BS3CG1ENC_FIXED,
注意:
瀏覽 TracChangeset
來幫助您使用更動檢視器