- 時間撮記:
- 2024-1-19 下午11:11:30 (13 月 以前)
- svn:sync-xref-src-repo-rev:
- 161209
- 檔案:
-
- 修改 1 筆資料
圖例:
- 未更動
- 新增
- 刪除
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r102904 r102977 2228 2228 2229 2229 /** 2230 * Used by TB code to map unsigned 8-bit data for atomic read-write w/ 2231 * segmentation. 2232 */ 2233 IEM_DECL_NATIVE_HLP_DEF(uint8_t *, iemNativeHlpMemMapDataU8Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, 2234 RTGCPTR GCPtrMem, uint8_t iSegReg)) 2235 { 2236 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2237 return iemMemMapDataU8AtSafeJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2238 #else 2239 return iemMemMapDataU8AtJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2240 #endif 2241 } 2242 2243 2244 /** 2230 2245 * Used by TB code to map unsigned 8-bit data read-write w/ segmentation. 2231 2246 */ … … 2270 2285 2271 2286 /** 2287 * Used by TB code to map unsigned 16-bit data for atomic read-write w/ 2288 * segmentation. 2289 */ 2290 IEM_DECL_NATIVE_HLP_DEF(uint16_t *, iemNativeHlpMemMapDataU16Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, 2291 RTGCPTR GCPtrMem, uint8_t iSegReg)) 2292 { 2293 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2294 return iemMemMapDataU16AtSafeJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2295 #else 2296 return iemMemMapDataU16AtJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2297 #endif 2298 } 2299 2300 2301 /** 2272 2302 * Used by TB code to map unsigned 16-bit data read-write w/ segmentation. 2273 2303 */ … … 2312 2342 2313 2343 /** 2344 * Used by TB code to map unsigned 32-bit data for atomic read-write w/ 2345 * segmentation. 2346 */ 2347 IEM_DECL_NATIVE_HLP_DEF(uint32_t *, iemNativeHlpMemMapDataU32Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, 2348 RTGCPTR GCPtrMem, uint8_t iSegReg)) 2349 { 2350 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2351 return iemMemMapDataU32AtSafeJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2352 #else 2353 return iemMemMapDataU32AtJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2354 #endif 2355 } 2356 2357 2358 /** 2314 2359 * Used by TB code to map unsigned 32-bit data read-write w/ segmentation. 2315 2360 */ … … 2354 2399 2355 2400 /** 2401 * Used by TB code to map unsigned 64-bit data for atomic read-write w/ 2402 * segmentation. 2403 */ 2404 IEM_DECL_NATIVE_HLP_DEF(uint64_t *, iemNativeHlpMemMapDataU64Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, 2405 RTGCPTR GCPtrMem, uint8_t iSegReg)) 2406 { 2407 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2408 return iemMemMapDataU64AtSafeJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2409 #else 2410 return iemMemMapDataU64AtJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2411 #endif 2412 } 2413 2414 2415 /** 2356 2416 * Used by TB code to map unsigned 64-bit data read-write w/ segmentation. 2357 2417 */ … … 2419 2479 #else 2420 2480 return iemMemMapDataD80WoJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2481 #endif 2482 } 2483 2484 2485 /** 2486 * Used by TB code to map unsigned 128-bit data for atomic read-write w/ 2487 * segmentation. 2488 */ 2489 IEM_DECL_NATIVE_HLP_DEF(RTUINT128U *, iemNativeHlpMemMapDataU128Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, 2490 RTGCPTR GCPtrMem, uint8_t iSegReg)) 2491 { 2492 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2493 return iemMemMapDataU128AtSafeJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2494 #else 2495 return iemMemMapDataU128AtJmp(pVCpu, pbUnmapInfo, iSegReg, GCPtrMem); 2421 2496 #endif 2422 2497 } … … 2470 2545 2471 2546 /** 2547 * Used by TB code to map unsigned 8-bit data for atomic read-write w/ flat 2548 * address. 2549 */ 2550 IEM_DECL_NATIVE_HLP_DEF(uint8_t *, iemNativeHlpMemFlatMapDataU8Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)) 2551 { 2552 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2553 return iemMemMapDataU8AtSafeJmp(pVCpu, pbUnmapInfo, UINT8_MAX, GCPtrMem); 2554 #else 2555 return iemMemFlatMapDataU8AtJmp(pVCpu, pbUnmapInfo, GCPtrMem); 2556 #endif 2557 } 2558 2559 2560 /** 2472 2561 * Used by TB code to map unsigned 8-bit data read-write w/ flat address. 2473 2562 */ … … 2509 2598 2510 2599 /** 2600 * Used by TB code to map unsigned 16-bit data for atomic read-write w/ flat 2601 * address. 2602 */ 2603 IEM_DECL_NATIVE_HLP_DEF(uint16_t *, iemNativeHlpMemFlatMapDataU16Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)) 2604 { 2605 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2606 return iemMemMapDataU16AtSafeJmp(pVCpu, pbUnmapInfo, UINT8_MAX, GCPtrMem); 2607 #else 2608 return iemMemFlatMapDataU16AtJmp(pVCpu, pbUnmapInfo, GCPtrMem); 2609 #endif 2610 } 2611 2612 2613 /** 2511 2614 * Used by TB code to map unsigned 16-bit data read-write w/ flat address. 2512 2615 */ … … 2548 2651 2549 2652 /** 2653 * Used by TB code to map unsigned 32-bit data for atomic read-write w/ flat 2654 * address. 2655 */ 2656 IEM_DECL_NATIVE_HLP_DEF(uint32_t *, iemNativeHlpMemFlatMapDataU32Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)) 2657 { 2658 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2659 return iemMemMapDataU32AtSafeJmp(pVCpu, pbUnmapInfo, UINT8_MAX, GCPtrMem); 2660 #else 2661 return iemMemFlatMapDataU32AtJmp(pVCpu, pbUnmapInfo, GCPtrMem); 2662 #endif 2663 } 2664 2665 2666 /** 2550 2667 * Used by TB code to map unsigned 32-bit data read-write w/ flat address. 2551 2668 */ … … 2587 2704 2588 2705 /** 2706 * Used by TB code to map unsigned 64-bit data for atomic read-write w/ flat 2707 * address. 2708 */ 2709 IEM_DECL_NATIVE_HLP_DEF(uint64_t *, iemNativeHlpMemFlatMapDataU64Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)) 2710 { 2711 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2712 return iemMemMapDataU64AtSafeJmp(pVCpu, pbUnmapInfo, UINT8_MAX, GCPtrMem); 2713 #else 2714 return iemMemFlatMapDataU64AtJmp(pVCpu, pbUnmapInfo, GCPtrMem); 2715 #endif 2716 } 2717 2718 2719 /** 2589 2720 * Used by TB code to map unsigned 64-bit data read-write w/ flat address. 2590 2721 */ … … 2647 2778 #else 2648 2779 return iemMemFlatMapDataD80WoJmp(pVCpu, pbUnmapInfo, GCPtrMem); 2780 #endif 2781 } 2782 2783 2784 /** 2785 * Used by TB code to map unsigned 128-bit data for atomic read-write w/ flat 2786 * address. 2787 */ 2788 IEM_DECL_NATIVE_HLP_DEF(RTUINT128U *, iemNativeHlpMemFlatMapDataU128Atomic,(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, RTGCPTR GCPtrMem)) 2789 { 2790 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_MAPPED 2791 return iemMemMapDataU128AtSafeJmp(pVCpu, pbUnmapInfo, UINT8_MAX, GCPtrMem); 2792 #else 2793 return iemMemFlatMapDataU128AtJmp(pVCpu, pbUnmapInfo, GCPtrMem); 2649 2794 #endif 2650 2795 } … … 2693 2838 * Helpers: Commit, rollback & unmap * 2694 2839 *********************************************************************************************************************************/ 2840 2841 /** 2842 * Used by TB code to commit and unmap a read-write memory mapping. 2843 */ 2844 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemCommitAndUnmapAtomic,(PVMCPUCC pVCpu, uint8_t bUnmapInfo)) 2845 { 2846 return iemMemCommitAndUnmapAtSafeJmp(pVCpu, bUnmapInfo); 2847 } 2848 2695 2849 2696 2850 /** … … 11903 12057 *********************************************************************************************************************************/ 11904 12058 12059 #define IEM_MC_MEM_MAP_U8_ATOMIC(a_pu8Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 12060 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu8Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint8_t), \ 12061 IEM_ACCESS_DATA_ATOMIC, 0 /*fAlignMask*/, \ 12062 (uintptr_t)iemNativeHlpMemMapDataU8Atomic, pCallEntry->idxInstr) 12063 11905 12064 #define IEM_MC_MEM_MAP_U8_RW(a_pu8Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11906 12065 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu8Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint8_t), \ 11907 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, 0 /*fAlignMask*/, \12066 IEM_ACCESS_DATA_RW, 0 /*fAlignMask*/, \ 11908 12067 (uintptr_t)iemNativeHlpMemMapDataU8Rw, pCallEntry->idxInstr) 11909 12068 11910 12069 #define IEM_MC_MEM_MAP_U8_WO(a_pu8Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11911 12070 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu8Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint8_t), \ 11912 IEM_ACCESS_ TYPE_WRITE, 0 /*fAlignMask*/, \12071 IEM_ACCESS_DATA_W, 0 /*fAlignMask*/, \ 11913 12072 (uintptr_t)iemNativeHlpMemMapDataU8Wo, pCallEntry->idxInstr) \ 11914 12073 11915 12074 #define IEM_MC_MEM_MAP_U8_RO(a_pu8Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11916 12075 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu8Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint8_t), \ 11917 IEM_ACCESS_ TYPE_READ, 0 /*fAlignMask*/, \12076 IEM_ACCESS_DATA_R, 0 /*fAlignMask*/, \ 11918 12077 (uintptr_t)iemNativeHlpMemMapDataU8Ro, pCallEntry->idxInstr) 11919 12078 12079 12080 #define IEM_MC_MEM_MAP_U16_ATOMIC(a_pu16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 12081 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint16_t), \ 12082 IEM_ACCESS_DATA_ATOMIC, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 12083 (uintptr_t)iemNativeHlpMemMapDataU16Atomic, pCallEntry->idxInstr) 11920 12084 11921 12085 #define IEM_MC_MEM_MAP_U16_RW(a_pu16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11922 12086 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint16_t), \ 11923 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, sizeof(uint16_t) - 1 /*fAlignMask*/, \12087 IEM_ACCESS_DATA_RW, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 11924 12088 (uintptr_t)iemNativeHlpMemMapDataU16Rw, pCallEntry->idxInstr) 11925 12089 11926 12090 #define IEM_MC_MEM_MAP_U16_WO(a_pu16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11927 12091 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint16_t), \ 11928 IEM_ACCESS_ TYPE_WRITE, sizeof(uint16_t) - 1 /*fAlignMask*/, \12092 IEM_ACCESS_DATA_W, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 11929 12093 (uintptr_t)iemNativeHlpMemMapDataU16Wo, pCallEntry->idxInstr) \ 11930 12094 11931 12095 #define IEM_MC_MEM_MAP_U16_RO(a_pu16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11932 12096 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint16_t), \ 11933 IEM_ACCESS_ TYPE_READ, sizeof(uint16_t) - 1 /*fAlignMask*/, \12097 IEM_ACCESS_DATA_R, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 11934 12098 (uintptr_t)iemNativeHlpMemMapDataU16Ro, pCallEntry->idxInstr) 11935 12099 11936 12100 #define IEM_MC_MEM_MAP_I16_WO(a_pi16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11937 12101 off = iemNativeEmitMemMapCommon(pReNative, off, a_pi16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(int16_t), \ 11938 IEM_ACCESS_ TYPE_WRITE, sizeof(uint16_t) - 1 /*fAlignMask*/, \12102 IEM_ACCESS_DATA_W, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 11939 12103 (uintptr_t)iemNativeHlpMemMapDataU16Wo, pCallEntry->idxInstr) \ 11940 12104 12105 12106 #define IEM_MC_MEM_MAP_U32_ATOMIC(a_pu32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 12107 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint32_t), \ 12108 IEM_ACCESS_DATA_ATOMIC, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 12109 (uintptr_t)iemNativeHlpMemMapDataU32Atomic, pCallEntry->idxInstr) 11941 12110 11942 12111 #define IEM_MC_MEM_MAP_U32_RW(a_pu32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11943 12112 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint32_t), \ 11944 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, sizeof(uint32_t) - 1 /*fAlignMask*/, \12113 IEM_ACCESS_DATA_RW, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 11945 12114 (uintptr_t)iemNativeHlpMemMapDataU32Rw, pCallEntry->idxInstr) 11946 12115 11947 12116 #define IEM_MC_MEM_MAP_U32_WO(a_pu32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11948 12117 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint32_t), \ 11949 IEM_ACCESS_ TYPE_WRITE, sizeof(uint32_t) - 1 /*fAlignMask*/, \12118 IEM_ACCESS_DATA_W, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 11950 12119 (uintptr_t)iemNativeHlpMemMapDataU32Wo, pCallEntry->idxInstr) \ 11951 12120 11952 12121 #define IEM_MC_MEM_MAP_U32_RO(a_pu32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11953 12122 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint32_t), \ 11954 IEM_ACCESS_ TYPE_READ, sizeof(uint32_t) - 1 /*fAlignMask*/, \12123 IEM_ACCESS_DATA_R, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 11955 12124 (uintptr_t)iemNativeHlpMemMapDataU32Ro, pCallEntry->idxInstr) 11956 12125 11957 12126 #define IEM_MC_MEM_MAP_I32_WO(a_pi32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11958 12127 off = iemNativeEmitMemMapCommon(pReNative, off, a_pi32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(int32_t), \ 11959 IEM_ACCESS_ TYPE_WRITE, sizeof(uint32_t) - 1 /*fAlignMask*/, \12128 IEM_ACCESS_DATA_W, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 11960 12129 (uintptr_t)iemNativeHlpMemMapDataU32Wo, pCallEntry->idxInstr) \ 11961 12130 12131 12132 #define IEM_MC_MEM_MAP_U64_ATOMIC(a_pu64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 12133 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint64_t), \ 12134 IEM_ACCESS_DATA_ATOMIC, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 12135 (uintptr_t)iemNativeHlpMemMapDataU64Atomic, pCallEntry->idxInstr) 11962 12136 11963 12137 #define IEM_MC_MEM_MAP_U64_RW(a_pu64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11964 12138 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint64_t), \ 11965 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, \12139 IEM_ACCESS_DATA_RW, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 11966 12140 (uintptr_t)iemNativeHlpMemMapDataU64Rw, pCallEntry->idxInstr) 11967 11968 12141 #define IEM_MC_MEM_MAP_U64_WO(a_pu64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11969 12142 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint64_t), \ 11970 IEM_ACCESS_ TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, \12143 IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 11971 12144 (uintptr_t)iemNativeHlpMemMapDataU64Wo, pCallEntry->idxInstr) \ 11972 12145 11973 12146 #define IEM_MC_MEM_MAP_U64_RO(a_pu64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11974 12147 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(uint64_t), \ 11975 IEM_ACCESS_ TYPE_READ, sizeof(uint64_t) - 1 /*fAlignMask*/, \12148 IEM_ACCESS_DATA_R, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 11976 12149 (uintptr_t)iemNativeHlpMemMapDataU64Ro, pCallEntry->idxInstr) 11977 12150 11978 12151 #define IEM_MC_MEM_MAP_I64_WO(a_pi64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11979 12152 off = iemNativeEmitMemMapCommon(pReNative, off, a_pi64Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(int64_t), \ 11980 IEM_ACCESS_ TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, \12153 IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 11981 12154 (uintptr_t)iemNativeHlpMemMapDataU64Wo, pCallEntry->idxInstr) \ 11982 12155 … … 11984 12157 #define IEM_MC_MEM_MAP_R80_WO(a_pr80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11985 12158 off = iemNativeEmitMemMapCommon(pReNative, off, a_pr80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(RTFLOAT80U), \ 11986 IEM_ACCESS_ TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, \12159 IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 11987 12160 (uintptr_t)iemNativeHlpMemMapDataR80Wo, pCallEntry->idxInstr) \ 11988 12161 11989 12162 #define IEM_MC_MEM_MAP_D80_WO(a_pd80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11990 12163 off = iemNativeEmitMemMapCommon(pReNative, off, a_pd80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(RTFLOAT80U), \ 11991 IEM_ACCESS_ TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, /** @todo check BCD align */ \12164 IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1 /*fAlignMask*/, /** @todo check BCD align */ \ 11992 12165 (uintptr_t)iemNativeHlpMemMapDataD80Wo, pCallEntry->idxInstr) \ 11993 12166 12167 12168 #define IEM_MC_MEM_MAP_U128_ATOMIC(a_pu128Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 12169 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu128Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(RTUINT128U), \ 12170 IEM_ACCESS_DATA_ATOMIC, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \ 12171 (uintptr_t)iemNativeHlpMemMapDataU128Atomic, pCallEntry->idxInstr) 11994 12172 11995 12173 #define IEM_MC_MEM_MAP_U128_RW(a_pu128Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 11996 12174 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu128Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(RTUINT128U), \ 11997 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \12175 IEM_ACCESS_DATA_RW, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \ 11998 12176 (uintptr_t)iemNativeHlpMemMapDataU128Rw, pCallEntry->idxInstr) 11999 12177 12000 12178 #define IEM_MC_MEM_MAP_U128_WO(a_pu128Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 12001 12179 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu128Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(RTUINT128U), \ 12002 IEM_ACCESS_ TYPE_WRITE, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \12180 IEM_ACCESS_DATA_W, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \ 12003 12181 (uintptr_t)iemNativeHlpMemMapDataU128Wo, pCallEntry->idxInstr) \ 12004 12182 12005 12183 #define IEM_MC_MEM_MAP_U128_RO(a_pu128Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \ 12006 12184 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu128Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem, sizeof(RTUINT128U), \ 12007 IEM_ACCESS_ TYPE_READ, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \12185 IEM_ACCESS_DATA_R, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \ 12008 12186 (uintptr_t)iemNativeHlpMemMapDataU128Ro, pCallEntry->idxInstr) 12009 12187 12010 12188 12189 12190 #define IEM_MC_MEM_FLAT_MAP_U8_ATOMIC(a_pu8Mem, a_bUnmapInfo, a_GCPtrMem) \ 12191 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu8Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t), \ 12192 IEM_ACCESS_DATA_ATOMIC, 0 /*fAlignMask*/, \ 12193 (uintptr_t)iemNativeHlpMemFlatMapDataU8Atomic, pCallEntry->idxInstr) 12011 12194 12012 12195 #define IEM_MC_MEM_FLAT_MAP_U8_RW(a_pu8Mem, a_bUnmapInfo, a_GCPtrMem) \ 12013 12196 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu8Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t), \ 12014 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, 0 /*fAlignMask*/, \12197 IEM_ACCESS_DATA_RW, 0 /*fAlignMask*/, \ 12015 12198 (uintptr_t)iemNativeHlpMemFlatMapDataU8Rw, pCallEntry->idxInstr) 12016 12199 12017 12200 #define IEM_MC_MEM_FLAT_MAP_U8_WO(a_pu8Mem, a_bUnmapInfo, a_GCPtrMem) \ 12018 12201 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu8Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t), \ 12019 IEM_ACCESS_ TYPE_WRITE, 0 /*fAlignMask*/, \12202 IEM_ACCESS_DATA_W, 0 /*fAlignMask*/, \ 12020 12203 (uintptr_t)iemNativeHlpMemFlatMapDataU8Wo, pCallEntry->idxInstr) \ 12021 12204 12022 12205 #define IEM_MC_MEM_FLAT_MAP_U8_RO(a_pu8Mem, a_bUnmapInfo, a_GCPtrMem) \ 12023 12206 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu8Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t), \ 12024 IEM_ACCESS_ TYPE_READ, 0 /*fAlignMask*/, \12207 IEM_ACCESS_DATA_R, 0 /*fAlignMask*/, \ 12025 12208 (uintptr_t)iemNativeHlpMemFlatMapDataU8Ro, pCallEntry->idxInstr) 12026 12209 12210 12211 #define IEM_MC_MEM_FLAT_MAP_U16_ATOMIC(a_pu16Mem, a_bUnmapInfo, a_GCPtrMem) \ 12212 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu16Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t), \ 12213 IEM_ACCESS_DATA_ATOMIC, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 12214 (uintptr_t)iemNativeHlpMemFlatMapDataU16Atomic, pCallEntry->idxInstr) 12027 12215 12028 12216 #define IEM_MC_MEM_FLAT_MAP_U16_RW(a_pu16Mem, a_bUnmapInfo, a_GCPtrMem) \ 12029 12217 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu16Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t), \ 12030 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, sizeof(uint16_t) - 1 /*fAlignMask*/, \12218 IEM_ACCESS_DATA_RW, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 12031 12219 (uintptr_t)iemNativeHlpMemFlatMapDataU16Rw, pCallEntry->idxInstr) 12032 12220 12033 12221 #define IEM_MC_MEM_FLAT_MAP_U16_WO(a_pu16Mem, a_bUnmapInfo, a_GCPtrMem) \ 12034 12222 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu16Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t), \ 12035 IEM_ACCESS_ TYPE_WRITE, sizeof(uint16_t) - 1 /*fAlignMask*/, \12223 IEM_ACCESS_DATA_W, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 12036 12224 (uintptr_t)iemNativeHlpMemFlatMapDataU16Wo, pCallEntry->idxInstr) \ 12037 12225 12038 12226 #define IEM_MC_MEM_FLAT_MAP_U16_RO(a_pu16Mem, a_bUnmapInfo, a_GCPtrMem) \ 12039 12227 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu16Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t), \ 12040 IEM_ACCESS_ TYPE_READ, sizeof(uint16_t) - 1 /*fAlignMask*/, \12228 IEM_ACCESS_DATA_R, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 12041 12229 (uintptr_t)iemNativeHlpMemFlatMapDataU16Ro, pCallEntry->idxInstr) 12042 12230 12043 12231 #define IEM_MC_MEM_FLAT_MAP_I16_WO(a_pi16Mem, a_bUnmapInfo, a_GCPtrMem) \ 12044 12232 off = iemNativeEmitMemMapCommon(pReNative, off, a_pi16Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(int16_t), \ 12045 IEM_ACCESS_ TYPE_WRITE, sizeof(uint16_t) - 1 /*fAlignMask*/, \12233 IEM_ACCESS_DATA_W, sizeof(uint16_t) - 1 /*fAlignMask*/, \ 12046 12234 (uintptr_t)iemNativeHlpMemFlatMapDataU16Wo, pCallEntry->idxInstr) \ 12047 12235 12236 12237 #define IEM_MC_MEM_FLAT_MAP_U32_ATOMIC(a_pu32Mem, a_bUnmapInfo, a_GCPtrMem) \ 12238 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu32Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t), \ 12239 IEM_ACCESS_DATA_ATOMIC, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 12240 (uintptr_t)iemNativeHlpMemFlatMapDataU32Atomic, pCallEntry->idxInstr) 12048 12241 12049 12242 #define IEM_MC_MEM_FLAT_MAP_U32_RW(a_pu32Mem, a_bUnmapInfo, a_GCPtrMem) \ 12050 12243 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu32Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t), \ 12051 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, sizeof(uint32_t) - 1 /*fAlignMask*/, \12244 IEM_ACCESS_DATA_RW, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 12052 12245 (uintptr_t)iemNativeHlpMemFlatMapDataU32Rw, pCallEntry->idxInstr) 12053 12246 12054 12247 #define IEM_MC_MEM_FLAT_MAP_U32_WO(a_pu32Mem, a_bUnmapInfo, a_GCPtrMem) \ 12055 12248 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu32Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t), \ 12056 IEM_ACCESS_ TYPE_WRITE, sizeof(uint32_t) - 1 /*fAlignMask*/, \12249 IEM_ACCESS_DATA_W, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 12057 12250 (uintptr_t)iemNativeHlpMemFlatMapDataU32Wo, pCallEntry->idxInstr) \ 12058 12251 12059 12252 #define IEM_MC_MEM_FLAT_MAP_U32_RO(a_pu32Mem, a_bUnmapInfo, a_GCPtrMem) \ 12060 12253 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu32Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t), \ 12061 IEM_ACCESS_ TYPE_READ, sizeof(uint32_t) - 1 /*fAlignMask*/, \12254 IEM_ACCESS_DATA_R, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 12062 12255 (uintptr_t)iemNativeHlpMemFlatMapDataU32Ro, pCallEntry->idxInstr) 12063 12256 12064 12257 #define IEM_MC_MEM_FLAT_MAP_I32_WO(a_pi32Mem, a_bUnmapInfo, a_GCPtrMem) \ 12065 12258 off = iemNativeEmitMemMapCommon(pReNative, off, a_pi32Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(int32_t), \ 12066 IEM_ACCESS_ TYPE_WRITE, sizeof(uint32_t) - 1 /*fAlignMask*/, \12259 IEM_ACCESS_DATA_W, sizeof(uint32_t) - 1 /*fAlignMask*/, \ 12067 12260 (uintptr_t)iemNativeHlpMemFlatMapDataU32Wo, pCallEntry->idxInstr) \ 12068 12261 12262 12263 #define IEM_MC_MEM_FLAT_MAP_U64_ATOMIC(a_pu64Mem, a_bUnmapInfo, a_GCPtrMem) \ 12264 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu64Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t), \ 12265 IEM_ACCESS_DATA_ATOMIC, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 12266 (uintptr_t)iemNativeHlpMemFlatMapDataU64Atomic, pCallEntry->idxInstr) 12069 12267 12070 12268 #define IEM_MC_MEM_FLAT_MAP_U64_RW(a_pu64Mem, a_bUnmapInfo, a_GCPtrMem) \ 12071 12269 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu64Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t), \ 12072 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, \12270 IEM_ACCESS_DATA_RW, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 12073 12271 (uintptr_t)iemNativeHlpMemFlatMapDataU64Rw, pCallEntry->idxInstr) 12074 12272 12075 12273 #define IEM_MC_MEM_FLAT_MAP_U64_WO(a_pu64Mem, a_bUnmapInfo, a_GCPtrMem) \ 12076 12274 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu64Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t), \ 12077 IEM_ACCESS_ TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, \12275 IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 12078 12276 (uintptr_t)iemNativeHlpMemFlatMapDataU64Wo, pCallEntry->idxInstr) \ 12079 12277 12080 12278 #define IEM_MC_MEM_FLAT_MAP_U64_RO(a_pu64Mem, a_bUnmapInfo, a_GCPtrMem) \ 12081 12279 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu64Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t), \ 12082 IEM_ACCESS_ TYPE_READ, sizeof(uint64_t) - 1 /*fAlignMask*/, \12280 IEM_ACCESS_DATA_R, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 12083 12281 (uintptr_t)iemNativeHlpMemFlatMapDataU64Ro, pCallEntry->idxInstr) 12084 12282 12085 12283 #define IEM_MC_MEM_FLAT_MAP_I64_WO(a_pi64Mem, a_bUnmapInfo, a_GCPtrMem) \ 12086 12284 off = iemNativeEmitMemMapCommon(pReNative, off, a_pi64Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(int64_t), \ 12087 IEM_ACCESS_ TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, \12285 IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 12088 12286 (uintptr_t)iemNativeHlpMemFlatMapDataU64Wo, pCallEntry->idxInstr) \ 12089 12287 … … 12091 12289 #define IEM_MC_MEM_FLAT_MAP_R80_WO(a_pr80Mem, a_bUnmapInfo, a_GCPtrMem) \ 12092 12290 off = iemNativeEmitMemMapCommon(pReNative, off, a_pr80Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(RTFLOAT80U), \ 12093 IEM_ACCESS_ TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, \12291 IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1 /*fAlignMask*/, \ 12094 12292 (uintptr_t)iemNativeHlpMemFlatMapDataR80Wo, pCallEntry->idxInstr) \ 12095 12293 12096 12294 #define IEM_MC_MEM_FLAT_MAP_D80_WO(a_pd80Mem, a_bUnmapInfo, a_GCPtrMem) \ 12097 12295 off = iemNativeEmitMemMapCommon(pReNative, off, a_pd80Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(RTFLOAT80U), \ 12098 IEM_ACCESS_ TYPE_WRITE, sizeof(uint64_t) - 1 /*fAlignMask*/, /** @todo check BCD align */ \12296 IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1 /*fAlignMask*/, /** @todo check BCD align */ \ 12099 12297 (uintptr_t)iemNativeHlpMemFlatMapDataD80Wo, pCallEntry->idxInstr) \ 12100 12298 12299 12300 #define IEM_MC_MEM_FLAT_MAP_U128_ATOMIC(a_pu128Mem, a_bUnmapInfo, a_GCPtrMem) \ 12301 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu128Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(RTUINT128U), \ 12302 IEM_ACCESS_DATA_ATOMIC, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \ 12303 (uintptr_t)iemNativeHlpMemFlatMapDataU128Atomic, pCallEntry->idxInstr) 12101 12304 12102 12305 #define IEM_MC_MEM_FLAT_MAP_U128_RW(a_pu128Mem, a_bUnmapInfo, a_GCPtrMem) \ 12103 12306 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu128Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(RTUINT128U), \ 12104 IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \12307 IEM_ACCESS_DATA_RW, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \ 12105 12308 (uintptr_t)iemNativeHlpMemFlatMapDataU128Rw, pCallEntry->idxInstr) 12106 12309 12107 12310 #define IEM_MC_MEM_FLAT_MAP_U128_WO(a_pu128Mem, a_bUnmapInfo, a_GCPtrMem) \ 12108 12311 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu128Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(RTUINT128U), \ 12109 IEM_ACCESS_ TYPE_WRITE, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \12312 IEM_ACCESS_DATA_W, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \ 12110 12313 (uintptr_t)iemNativeHlpMemFlatMapDataU128Wo, pCallEntry->idxInstr) \ 12111 12314 12112 12315 #define IEM_MC_MEM_FLAT_MAP_U128_RO(a_pu128Mem, a_bUnmapInfo, a_GCPtrMem) \ 12113 12316 off = iemNativeEmitMemMapCommon(pReNative, off, a_pu128Mem, a_bUnmapInfo, UINT8_MAX, a_GCPtrMem, sizeof(RTUINT128U), \ 12114 IEM_ACCESS_ TYPE_READ, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \12317 IEM_ACCESS_DATA_R, sizeof(RTUINT128U) - 1 /*fAlignMask*/, \ 12115 12318 (uintptr_t)iemNativeHlpMemFlatMapDataU128Ro, pCallEntry->idxInstr) 12116 12319 … … 12144 12347 12145 12348 #ifdef VBOX_STRICT 12349 # define IEM_MAP_HLP_FN_NO_AT(a_fAccess, a_fnBase) \ 12350 ( ((a_fAccess) & (IEM_ACCESS_TYPE_MASK | IEM_ACCESS_ATOMIC)) == (IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_TYPE_READ) \ 12351 ? (uintptr_t)RT_CONCAT(a_fnBase,Rw) \ 12352 : ((a_fAccess) & (IEM_ACCESS_TYPE_MASK | IEM_ACCESS_ATOMIC)) == IEM_ACCESS_TYPE_READ \ 12353 ? (uintptr_t)RT_CONCAT(a_fnBase,Ro) : (uintptr_t)RT_CONCAT(a_fnBase,Wo) ) 12146 12354 # define IEM_MAP_HLP_FN(a_fAccess, a_fnBase) \ 12147 ( ((a_fAccess) & IEM_ACCESS_TYPE_MASK) == (IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_TYPE_READ) \ 12148 ? (uintptr_t)RT_CONCAT(a_fnBase,Rw) \ 12149 : ((a_fAccess) & IEM_ACCESS_TYPE_MASK) == IEM_ACCESS_TYPE_READ \ 12150 ? (uintptr_t)RT_CONCAT(a_fnBase,Ro) : (uintptr_t)RT_CONCAT(a_fnBase,Wo) ) 12355 ( ((a_fAccess) & (IEM_ACCESS_TYPE_MASK | IEM_ACCESS_ATOMIC)) == (IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_TYPE_READ | IEM_ACCESS_ATOMIC) \ 12356 ? (uintptr_t)RT_CONCAT(a_fnBase,Atomic) \ 12357 : IEM_MAP_HLP_FN_NO_AT(a_fAccess, a_fnBase) ) 12151 12358 12152 12359 if (iSegReg == UINT8_MAX) … … 12168 12375 case 16: Assert(pfnFunction == IEM_MAP_HLP_FN(fAccess, iemNativeHlpMemFlatMapDataU128)); break; 12169 12376 # if 0 12170 case 32: Assert(pfnFunction == IEM_MAP_HLP_FN (fAccess, iemNativeHlpMemFlatMapDataU256)); break;12171 case 64: Assert(pfnFunction == IEM_MAP_HLP_FN (fAccess, iemNativeHlpMemFlatMapDataU512)); break;12377 case 32: Assert(pfnFunction == IEM_MAP_HLP_FN_NO_AT(fAccess, iemNativeHlpMemFlatMapDataU256)); break; 12378 case 64: Assert(pfnFunction == IEM_MAP_HLP_FN_NO_AT(fAccess, iemNativeHlpMemFlatMapDataU512)); break; 12172 12379 # endif 12173 12380 default: AssertFailed(); break; … … 12190 12397 case 16: Assert(pfnFunction == IEM_MAP_HLP_FN(fAccess, iemNativeHlpMemMapDataU128)); break; 12191 12398 # if 0 12192 case 32: Assert(pfnFunction == IEM_MAP_HLP_FN (fAccess, iemNativeHlpMemMapDataU256)); break;12193 case 64: Assert(pfnFunction == IEM_MAP_HLP_FN (fAccess, iemNativeHlpMemMapDataU512)); break;12399 case 32: Assert(pfnFunction == IEM_MAP_HLP_FN_NO_AT(fAccess, iemNativeHlpMemMapDataU256)); break; 12400 case 64: Assert(pfnFunction == IEM_MAP_HLP_FN_NO_AT(fAccess, iemNativeHlpMemMapDataU512)); break; 12194 12401 # endif 12195 12402 default: AssertFailed(); break; … … 12197 12404 } 12198 12405 # undef IEM_MAP_HLP_FN 12406 # undef IEM_MAP_HLP_FN_NO_AT 12199 12407 #endif 12200 12408 … … 12353 12561 12354 12562 12563 #define IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(a_bMapInfo) \ 12564 off = iemNativeEmitMemCommitAndUnmap(pReNative, off, (a_bMapInfo), IEM_ACCESS_DATA_ATOMIC, \ 12565 (uintptr_t)iemNativeHlpMemCommitAndUnmapAtomic, pCallEntry->idxInstr) 12566 12355 12567 #define IEM_MC_MEM_COMMIT_AND_UNMAP_RW(a_bMapInfo) \ 12356 off = iemNativeEmitMemCommitAndUnmap(pReNative, off, (a_bMapInfo), IEM_ACCESS_ TYPE_READ | IEM_ACCESS_TYPE_WRITE, \12568 off = iemNativeEmitMemCommitAndUnmap(pReNative, off, (a_bMapInfo), IEM_ACCESS_DATA_RW, \ 12357 12569 (uintptr_t)iemNativeHlpMemCommitAndUnmapRw, pCallEntry->idxInstr) 12358 12570 12359 12571 #define IEM_MC_MEM_COMMIT_AND_UNMAP_WO(a_bMapInfo) \ 12360 off = iemNativeEmitMemCommitAndUnmap(pReNative, off, (a_bMapInfo), IEM_ACCESS_ TYPE_WRITE, \12572 off = iemNativeEmitMemCommitAndUnmap(pReNative, off, (a_bMapInfo), IEM_ACCESS_DATA_W, \ 12361 12573 (uintptr_t)iemNativeHlpMemCommitAndUnmapWo, pCallEntry->idxInstr) 12362 12574 12363 12575 #define IEM_MC_MEM_COMMIT_AND_UNMAP_RO(a_bMapInfo) \ 12364 off = iemNativeEmitMemCommitAndUnmap(pReNative, off, (a_bMapInfo), IEM_ACCESS_ TYPE_READ, \12576 off = iemNativeEmitMemCommitAndUnmap(pReNative, off, (a_bMapInfo), IEM_ACCESS_DATA_R, \ 12365 12577 (uintptr_t)iemNativeHlpMemCommitAndUnmapRo, pCallEntry->idxInstr) 12366 12578 … … 12377 12589 || pReNative->Core.aVars[idxVarUnmapInfo].idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS); /* must be initialized */ 12378 12590 #ifdef VBOX_STRICT 12379 switch (fAccess & IEM_ACCESS_TYPE_MASK) 12380 { 12381 case IEM_ACCESS_TYPE_READ | IEM_ACCESS_TYPE_WRITE: Assert(pfnFunction == (uintptr_t)iemNativeHlpMemCommitAndUnmapRw); break; 12382 case IEM_ACCESS_TYPE_WRITE: Assert(pfnFunction == (uintptr_t)iemNativeHlpMemCommitAndUnmapWo); break; 12383 case IEM_ACCESS_TYPE_READ: Assert(pfnFunction == (uintptr_t)iemNativeHlpMemCommitAndUnmapRo); break; 12591 switch (fAccess & (IEM_ACCESS_TYPE_MASK | IEM_ACCESS_ATOMIC)) 12592 { 12593 case IEM_ACCESS_TYPE_READ | IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_ATOMIC: 12594 Assert(pfnFunction == (uintptr_t)iemNativeHlpMemCommitAndUnmapAtomic); break; 12595 case IEM_ACCESS_TYPE_READ | IEM_ACCESS_TYPE_WRITE: 12596 Assert(pfnFunction == (uintptr_t)iemNativeHlpMemCommitAndUnmapRw); break; 12597 case IEM_ACCESS_TYPE_WRITE: 12598 Assert(pfnFunction == (uintptr_t)iemNativeHlpMemCommitAndUnmapWo); break; 12599 case IEM_ACCESS_TYPE_READ: 12600 Assert(pfnFunction == (uintptr_t)iemNativeHlpMemCommitAndUnmapRo); break; 12384 12601 default: AssertFailed(); 12385 12602 }
注意:
瀏覽 TracChangeset
來幫助您使用更動檢視器