mirror of https://github.com/PCSX2/pcsx2.git
x86emitter: removed implicit pointer dereferencing on 'indirect' operand types, and implicit uptr addressing on ptr[]; avoids some obscure pitfalls and might speed up release mode builds (LTCG).
git-svn-id: http://pcsx2.googlecode.com/svn/trunk@3158 96395faa-99c1-11dd-bbfe-3dabce05a288
This commit is contained in:
parent
492488a15d
commit
a7bb875e18
|
@ -646,16 +646,6 @@ template< typename T > void xWrite( T val );
|
|||
// no reduction necessary :D
|
||||
}
|
||||
|
||||
ModSibBase( const void* target )
|
||||
{
|
||||
Base = xEmptyReg;
|
||||
Index = xEmptyReg;
|
||||
Scale = 0;
|
||||
Displacement= (s32)target;
|
||||
|
||||
// no reduction necessary :D
|
||||
}
|
||||
|
||||
virtual uint GetOperandSize() const { pxFail( "Invalid operation on ModSibBase" ); return 0; }
|
||||
bool IsByteSizeDisp() const { return is_s8( Displacement ); }
|
||||
|
||||
|
@ -682,7 +672,6 @@ template< typename T > void xWrite( T val );
|
|||
protected:
|
||||
explicit ModSib32orLess( const xAddressInfo& src ) : _parent( src ) {}
|
||||
explicit ModSib32orLess( s32 disp ) : _parent( disp ) {}
|
||||
ModSib32orLess( const void* target ) : _parent( target ) {}
|
||||
ModSib32orLess( xAddressReg base, xAddressReg index, int scale=0, s32 displacement=0 ) :
|
||||
_parent( base, index, scale, displacement ) {}
|
||||
};
|
||||
|
@ -701,7 +690,6 @@ template< typename T > void xWrite( T val );
|
|||
public: \
|
||||
explicit ModSib##bits( const xAddressInfo& src ) : _parent( src ) {} \
|
||||
explicit ModSib##bits( s32 disp ) : _parent( disp ) {} \
|
||||
ModSib##bits( const u##bits* target ) : _parent( target ) {} \
|
||||
ModSib##bits( xAddressReg base, xAddressReg index, int scale=0, s32 displacement=0 ) : \
|
||||
_parent( base, index, scale, displacement ) {} \
|
||||
\
|
||||
|
@ -758,11 +746,6 @@ template< typename T > void xWrite( T val );
|
|||
return xModSibType( src );
|
||||
}
|
||||
|
||||
xModSibType operator[]( uptr src ) const
|
||||
{
|
||||
return xModSibType( src );
|
||||
}
|
||||
|
||||
xModSibType operator[]( const void* src ) const
|
||||
{
|
||||
return xModSibType( (uptr)src );
|
||||
|
|
|
@ -62,7 +62,7 @@ void x86capabilities::SIMD_EstablishMXCSRmask()
|
|||
HostSys::MemProtectStatic( recSSE, Protect_ReadWrite, true );
|
||||
|
||||
xSetPtr( recSSE );
|
||||
xFXSAVE( targetFXSAVE );
|
||||
xFXSAVE( ptr[&targetFXSAVE] );
|
||||
xRET();
|
||||
|
||||
HostSys::MemProtectStatic( recSSE, Protect_ReadOnly, true );
|
||||
|
|
|
@ -89,9 +89,9 @@ static ModSib8 _mhlp8( x86IntRegType to1, x86IntRegType to2 )
|
|||
#define DEFINE_LEGACY_HELPER( cod, bits ) \
|
||||
emitterT void cod##bits##RtoR( x86IntRegType to, x86IntRegType from ) { x##cod( xRegister##bits(to), xRegister##bits(from) ); } \
|
||||
emitterT void cod##bits##ItoR( x86IntRegType to, u##bits imm ) { x##cod( xRegister##bits(to), imm ); } \
|
||||
emitterT void cod##bits##MtoR( x86IntRegType to, uptr from ) { x##cod( xRegister##bits(to), (void*)from ); } \
|
||||
emitterT void cod##bits##RtoM( uptr to, x86IntRegType from ) { x##cod( (void*)to, xRegister##bits(from) ); } \
|
||||
emitterT void cod##bits##ItoM( uptr to, u##bits imm ) { x##cod( ptr##bits[to], imm ); } \
|
||||
emitterT void cod##bits##MtoR( x86IntRegType to, uptr from ) { x##cod( xRegister##bits(to), ptr[(void*)from] ); } \
|
||||
emitterT void cod##bits##RtoM( uptr to, x86IntRegType from ) { x##cod( ptr[(void*)to], xRegister##bits(from) ); } \
|
||||
emitterT void cod##bits##ItoM( uptr to, u##bits imm ) { x##cod( ptr##bits[(u##bits*)to], imm ); } \
|
||||
emitterT void cod##bits##ItoRm( x86IntRegType to, u##bits imm, int offset ) { x##cod( _mhlp##bits(to) + offset, imm ); } \
|
||||
emitterT void cod##bits##RmtoR( x86IntRegType to, x86IntRegType from, int offset ) { x##cod( xRegister##bits(to), _mhlp##bits(from) + offset ); } \
|
||||
emitterT void cod##bits##RtoRm( x86IntRegType to, x86IntRegType from, int offset ) { x##cod( _mhlp##bits(to) + offset, xRegister##bits(from) ); } \
|
||||
|
@ -103,14 +103,14 @@ static ModSib8 _mhlp8( x86IntRegType to1, x86IntRegType to2 )
|
|||
#define DEFINE_LEGACY_SHIFT_HELPER( cod, bits ) \
|
||||
emitterT void cod##bits##CLtoR( x86IntRegType to ) { x##cod( xRegister##bits(to), cl ); } \
|
||||
emitterT void cod##bits##ItoR( x86IntRegType to, u8 imm ) { x##cod( xRegister##bits(to), imm ); } \
|
||||
emitterT void cod##bits##CLtoM( uptr to ) { x##cod( ptr##bits[to], cl ); } \
|
||||
emitterT void cod##bits##ItoM( uptr to, u8 imm ) { x##cod( ptr##bits[to], imm ); } \
|
||||
emitterT void cod##bits##CLtoM( uptr to ) { x##cod( ptr##bits[(u##bits*)to], cl ); } \
|
||||
emitterT void cod##bits##ItoM( uptr to, u8 imm ) { x##cod( ptr##bits[(u##bits*)to], imm ); } \
|
||||
emitterT void cod##bits##ItoRm( x86IntRegType to, u8 imm, int offset ) { x##cod( _mhlp##bits(to) + offset, imm ); } \
|
||||
emitterT void cod##bits##CLtoRm( x86IntRegType to, int offset ) { x##cod( _mhlp##bits(to) + offset, cl ); }
|
||||
|
||||
#define DEFINE_LEGACY_ONEREG_HELPER( cod, bits ) \
|
||||
emitterT void cod##bits##R( x86IntRegType to ) { x##cod( xRegister##bits(to) ); } \
|
||||
emitterT void cod##bits##M( uptr to ) { x##cod( ptr##bits[to] ); } \
|
||||
emitterT void cod##bits##M( uptr to ) { x##cod( ptr##bits[(u##bits*)to] ); } \
|
||||
emitterT void cod##bits##Rm( x86IntRegType to, uptr offset ) { x##cod( _mhlp##bits(to) + offset ); }
|
||||
|
||||
#define DEFINE_OPCODE_LEGACY( cod ) \
|
||||
|
@ -159,7 +159,7 @@ DEFINE_OPCODE_ONEREG_LEGACY( NEG )
|
|||
#define DEFINE_LEGACY_MOVEXTEND( form, destbits, srcbits ) \
|
||||
emitterT void MOV##form##destbits##R##srcbits##toR( x86IntRegType to, x86IntRegType from ) { xMOV##form( xRegister##destbits( to ), xRegister##srcbits( from ) ); } \
|
||||
emitterT void MOV##form##destbits##Rm##srcbits##toR( x86IntRegType to, x86IntRegType from, int offset ) { xMOV##form( xRegister##destbits( to ), ptr##srcbits[xAddressReg( from ) + offset] ); } \
|
||||
emitterT void MOV##form##destbits##M##srcbits##toR( x86IntRegType to, u32 from ) { xMOV##form( xRegister##destbits( to ), ptr##srcbits[from] ); }
|
||||
emitterT void MOV##form##destbits##M##srcbits##toR( x86IntRegType to, u32 from ) { xMOV##form( xRegister##destbits( to ), ptr##srcbits[(u##srcbits*)from] ); }
|
||||
|
||||
DEFINE_LEGACY_MOVEXTEND( SX, 32, 16 )
|
||||
DEFINE_LEGACY_MOVEXTEND( ZX, 32, 16 )
|
||||
|
@ -170,17 +170,17 @@ DEFINE_LEGACY_MOVEXTEND( SX, 16, 8 )
|
|||
DEFINE_LEGACY_MOVEXTEND( ZX, 16, 8 )
|
||||
|
||||
emitterT void TEST32ItoR( x86IntRegType to, u32 from ) { xTEST( xRegister32(to), from ); }
|
||||
emitterT void TEST32ItoM( uptr to, u32 from ) { xTEST( ptr32[to], from ); }
|
||||
emitterT void TEST32ItoM( uptr to, u32 from ) { xTEST( ptr32[(u32*)to], from ); }
|
||||
emitterT void TEST32RtoR( x86IntRegType to, x86IntRegType from ) { xTEST( xRegister32(to), xRegister32(from) ); }
|
||||
emitterT void TEST32ItoRm( x86IntRegType to, u32 from ) { xTEST( ptr32[xAddressReg(to)], from ); }
|
||||
|
||||
emitterT void TEST16ItoR( x86IntRegType to, u16 from ) { xTEST( xRegister16(to), from ); }
|
||||
emitterT void TEST16ItoM( uptr to, u16 from ) { xTEST( ptr16[to], from ); }
|
||||
emitterT void TEST16ItoM( uptr to, u16 from ) { xTEST( ptr16[(u16*)to], from ); }
|
||||
emitterT void TEST16RtoR( x86IntRegType to, x86IntRegType from ) { xTEST( xRegister16(to), xRegister16(from) ); }
|
||||
emitterT void TEST16ItoRm( x86IntRegType to, u16 from ) { xTEST( ptr16[xAddressReg(to)], from ); }
|
||||
|
||||
emitterT void TEST8ItoR( x86IntRegType to, u8 from ) { xTEST( xRegister8(to), from ); }
|
||||
emitterT void TEST8ItoM( uptr to, u8 from ) { xTEST( ptr8[to], from ); }
|
||||
emitterT void TEST8ItoM( uptr to, u8 from ) { xTEST( ptr8[(u8*)to], from ); }
|
||||
emitterT void TEST8RtoR( x86IntRegType to, x86IntRegType from ) { xTEST( xRegister8(to), xRegister8(from) ); }
|
||||
emitterT void TEST8ItoRm( x86IntRegType to, u8 from ) { xTEST( ptr8[xAddressReg(to)], from ); }
|
||||
|
||||
|
@ -207,7 +207,7 @@ emitterT void AND32I8toR( x86IntRegType to, s8 from )
|
|||
|
||||
emitterT void AND32I8toM( uptr to, s8 from )
|
||||
{
|
||||
xAND( ptr8[to], from );
|
||||
xAND( ptr8[(u8*)to], from );
|
||||
}
|
||||
|
||||
/* cmove r32 to r32*/
|
||||
|
@ -233,9 +233,9 @@ emitterT void MUL32R( x86IntRegType from ) { xUMUL( xRegister32(from) ); }
|
|||
/* imul eax by r32 to edx:eax */
|
||||
emitterT void IMUL32R( x86IntRegType from ) { xMUL( xRegister32(from) ); }
|
||||
/* mul eax by m32 to edx:eax */
|
||||
emitterT void MUL32M( u32 from ) { xUMUL( ptr32[from] ); }
|
||||
emitterT void MUL32M( u32 from ) { xUMUL( ptr32[(u32*)from] ); }
|
||||
/* imul eax by m32 to edx:eax */
|
||||
emitterT void IMUL32M( u32 from ) { xMUL( ptr32[from] ); }
|
||||
emitterT void IMUL32M( u32 from ) { xMUL( ptr32[(u32*)from] ); }
|
||||
|
||||
/* imul r32 by r32 to r32 */
|
||||
emitterT void IMUL32RtoR( x86IntRegType to, x86IntRegType from )
|
||||
|
@ -248,9 +248,9 @@ emitterT void DIV32R( x86IntRegType from ) { xUDIV( xRegister32(from) ); }
|
|||
/* idiv eax by r32 to edx:eax */
|
||||
emitterT void IDIV32R( x86IntRegType from ) { xDIV( xRegister32(from) ); }
|
||||
/* div eax by m32 to edx:eax */
|
||||
emitterT void DIV32M( u32 from ) { xUDIV( ptr32[from] ); }
|
||||
emitterT void DIV32M( u32 from ) { xUDIV( ptr32[(u32*)from] ); }
|
||||
/* idiv eax by m32 to edx:eax */
|
||||
emitterT void IDIV32M( u32 from ) { xDIV( ptr32[from] ); }
|
||||
emitterT void IDIV32M( u32 from ) { xDIV( ptr32[(u32*)from] ); }
|
||||
|
||||
|
||||
emitterT void LEA32RtoR(x86IntRegType to, x86IntRegType from, s32 offset)
|
||||
|
@ -310,7 +310,7 @@ emitterT void PUSH32R( x86IntRegType from ) { xPUSH( xRegister32( from ) ); }
|
|||
/* push m32 */
|
||||
emitterT void PUSH32M( u32 from )
|
||||
{
|
||||
xPUSH( ptr[from] );
|
||||
xPUSH( ptr[(void*)from] );
|
||||
}
|
||||
|
||||
/* pop r32 */
|
||||
|
@ -458,7 +458,7 @@ emitterT void JMPR( x86IntRegType to )
|
|||
// jmp m32
|
||||
emitterT void JMP32M( uptr to )
|
||||
{
|
||||
xJMP( ptr32[to] );
|
||||
xJMP( ptr32[(u32*)to] );
|
||||
}
|
||||
|
||||
/* jp rel8 */
|
||||
|
@ -719,7 +719,7 @@ emitterT void CALL32R( x86IntRegType to )
|
|||
/* call m32 */
|
||||
emitterT void CALL32M( u32 to )
|
||||
{
|
||||
xCALL( ptr32[to] );
|
||||
xCALL( ptr32[(u32*)to] );
|
||||
}
|
||||
|
||||
emitterT void BSRRtoR(x86IntRegType to, x86IntRegType from)
|
||||
|
|
|
@ -23,14 +23,14 @@ using namespace x86Emitter;
|
|||
// MMX / SSE Mixed Bag
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
emitterT void MOVQMtoR( x86MMXRegType to, uptr from ) { xMOVQ( xRegisterMMX(to), (void*)from ); }
|
||||
emitterT void MOVQRtoM( uptr to, x86MMXRegType from ) { xMOVQ( (void*)to, xRegisterMMX(from) ); }
|
||||
emitterT void MOVQMtoR( x86MMXRegType to, uptr from ) { xMOVQ( xRegisterMMX(to), ptr[(void*)from] ); }
|
||||
emitterT void MOVQRtoM( uptr to, x86MMXRegType from ) { xMOVQ( ptr[(void*)to], xRegisterMMX(from) ); }
|
||||
emitterT void MOVQRtoR( x86MMXRegType to, x86MMXRegType from ) { xMOVQ( xRegisterMMX(to), xRegisterMMX(from) ); }
|
||||
emitterT void MOVQRmtoR( x86MMXRegType to, x86IntRegType from, int offset ) { xMOVQ( xRegisterMMX(to), ptr[xAddressReg(from)+offset] ); }
|
||||
emitterT void MOVQRtoRm( x86IntRegType to, x86MMXRegType from, int offset ) { xMOVQ( ptr[xAddressReg(to)+offset], xRegisterMMX(from) ); }
|
||||
|
||||
emitterT void MOVDMtoMMX( x86MMXRegType to, uptr from ) { xMOVDZX( xRegisterMMX(to), (void*)from ); }
|
||||
emitterT void MOVDMMXtoM( uptr to, x86MMXRegType from ) { xMOVD( (void*)to, xRegisterMMX(from) ); }
|
||||
emitterT void MOVDMtoMMX( x86MMXRegType to, uptr from ) { xMOVDZX( xRegisterMMX(to), ptr[(void*)from] ); }
|
||||
emitterT void MOVDMMXtoM( uptr to, x86MMXRegType from ) { xMOVD( ptr[(void*)to], xRegisterMMX(from) ); }
|
||||
emitterT void MOVD32RtoMMX( x86MMXRegType to, x86IntRegType from ) { xMOVDZX( xRegisterMMX(to), xRegister32(from) ); }
|
||||
emitterT void MOVD32RmtoMMX( x86MMXRegType to, x86IntRegType from, int offset ) { xMOVDZX( xRegisterMMX(to), ptr[xAddressReg(from)+offset] ); }
|
||||
emitterT void MOVD32MMXtoR( x86IntRegType to, x86MMXRegType from ) { xMOVD( xRegister32(to), xRegisterMMX(from) ); }
|
||||
|
@ -41,22 +41,22 @@ emitterT void MASKMOVQRtoR(x86MMXRegType to, x86MMXRegType from) { xMASKMOV(
|
|||
|
||||
#define DEFINE_LEGACY_LOGIC_OPCODE( mod ) \
|
||||
emitterT void P##mod##RtoR( x86MMXRegType to, x86MMXRegType from ) { xP##mod( xRegisterMMX(to), xRegisterMMX(from) ); } \
|
||||
emitterT void P##mod##MtoR( x86MMXRegType to, uptr from ) { xP##mod( xRegisterMMX(to), (void*)from ); } \
|
||||
emitterT void P##mod##MtoR( x86MMXRegType to, uptr from ) { xP##mod( xRegisterMMX(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE2_P##mod##_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xP##mod( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE2_P##mod##_M128_to_XMM( x86SSERegType to, uptr from ) { xP##mod( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE2_P##mod##_M128_to_XMM( x86SSERegType to, uptr from ) { xP##mod( xRegisterSSE(to), ptr[(void*)from]); }
|
||||
|
||||
#define DEFINE_LEGACY_ARITHMETIC( mod, sub ) \
|
||||
emitterT void P##mod##sub##RtoR( x86MMXRegType to, x86MMXRegType from ) { xP##mod.sub( xRegisterMMX(to), xRegisterMMX(from) ); } \
|
||||
emitterT void P##mod##sub##MtoR( x86MMXRegType to, uptr from ) { xP##mod.sub( xRegisterMMX(to), (void*)from ); } \
|
||||
emitterT void P##mod##sub##MtoR( x86MMXRegType to, uptr from ) { xP##mod.sub( xRegisterMMX(to), ptr[(void*)from] ); } \
|
||||
emitterT void SSE2_P##mod##sub##_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xP##mod.sub( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE2_P##mod##sub##_M128_to_XMM( x86SSERegType to, uptr from ) { xP##mod.sub( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE2_P##mod##sub##_M128_to_XMM( x86SSERegType to, uptr from ) { xP##mod.sub( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
|
||||
#define DEFINE_LEGACY_SHIFT_STUFF( mod, sub ) \
|
||||
emitterT void P##mod##sub##RtoR( x86MMXRegType to, x86MMXRegType from ) { xP##mod.sub( xRegisterMMX(to), xRegisterMMX(from) ); } \
|
||||
emitterT void P##mod##sub##MtoR( x86MMXRegType to, uptr from ) { xP##mod.sub( xRegisterMMX(to), (void*)from ); } \
|
||||
emitterT void P##mod##sub##MtoR( x86MMXRegType to, uptr from ) { xP##mod.sub( xRegisterMMX(to), ptr[(void*)from] ); } \
|
||||
emitterT void P##mod##sub##ItoR( x86MMXRegType to, u8 imm ) { xP##mod.sub( xRegisterMMX(to), imm ); } \
|
||||
emitterT void SSE2_P##mod##sub##_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xP##mod.sub( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE2_P##mod##sub##_M128_to_XMM( x86SSERegType to, uptr from ) { xP##mod.sub( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE2_P##mod##sub##_M128_to_XMM( x86SSERegType to, uptr from ) { xP##mod.sub( xRegisterSSE(to), ptr[(void*)from] ); } \
|
||||
emitterT void SSE2_P##mod##sub##_I8_to_XMM( x86SSERegType to, u8 imm ) { xP##mod.sub( xRegisterSSE(to), imm ); }
|
||||
|
||||
#define DEFINE_LEGACY_SHIFT_OPCODE( mod ) \
|
||||
|
@ -109,11 +109,11 @@ DEFINE_LEGACY_ARITHMETIC( UNPCK, LWD );
|
|||
DEFINE_LEGACY_ARITHMETIC( UNPCK, HWD );
|
||||
|
||||
|
||||
emitterT void PMULUDQMtoR( x86MMXRegType to, uptr from ) { xPMUL.UDQ( xRegisterMMX( to ), (void*)from ); }
|
||||
emitterT void PMULUDQMtoR( x86MMXRegType to, uptr from ) { xPMUL.UDQ( xRegisterMMX( to ), ptr[(void*)from]); }
|
||||
emitterT void PMULUDQRtoR( x86MMXRegType to, x86MMXRegType from ) { xPMUL.UDQ( xRegisterMMX( to ), xRegisterMMX( from ) ); }
|
||||
|
||||
emitterT void PSHUFWRtoR(x86MMXRegType to, x86MMXRegType from, u8 imm8) { xPSHUF.W( xRegisterMMX(to), xRegisterMMX(from), imm8 ); }
|
||||
emitterT void PSHUFWMtoR(x86MMXRegType to, uptr from, u8 imm8) { xPSHUF.W( xRegisterMMX(to), (void*)from, imm8 ); }
|
||||
emitterT void PSHUFWMtoR(x86MMXRegType to, uptr from, u8 imm8) { xPSHUF.W( xRegisterMMX(to), ptr[(void*)from], imm8 ); }
|
||||
|
||||
emitterT void PINSRWRtoMMX( x86MMXRegType to, x86SSERegType from, u8 imm8 ) { xPINSR.W( xRegisterMMX(to), xRegister32(from), imm8 ); }
|
||||
|
||||
|
@ -124,8 +124,8 @@ emitterT void EMMS() { xEMMS(); }
|
|||
// ------------------------------------------------------------------------
|
||||
|
||||
#define DEFINE_LEGACY_MOV_OPCODE( mod, sse ) \
|
||||
emitterT void sse##_MOV##mod##_M128_to_XMM( x86SSERegType to, uptr from ) { xMOV##mod( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void sse##_MOV##mod##_XMM_to_M128( uptr to, x86SSERegType from ) { xMOV##mod( (void*)to, xRegisterSSE(from) ); } \
|
||||
emitterT void sse##_MOV##mod##_M128_to_XMM( x86SSERegType to, uptr from ) { xMOV##mod( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void sse##_MOV##mod##_XMM_to_M128( uptr to, x86SSERegType from ) { xMOV##mod( ptr[(void*)to], xRegisterSSE(from) ); } \
|
||||
emitterT void sse##_MOV##mod##RmtoR( x86SSERegType to, x86IntRegType from, int offset ) { xMOV##mod( xRegisterSSE(to), ptr[xAddressReg(from)+offset] ); } \
|
||||
emitterT void sse##_MOV##mod##RtoRm( x86IntRegType to, x86SSERegType from, int offset ) { xMOV##mod( ptr[xAddressReg(to)+offset], xRegisterSSE(from) ); } \
|
||||
emitterT void sse##_MOV##mod##RmStoR( x86SSERegType to, x86IntRegType from, x86IntRegType from2, int scale ) \
|
||||
|
@ -134,45 +134,45 @@ emitterT void EMMS() { xEMMS(); }
|
|||
{ xMOV##mod( ptr[xAddressReg(to)+xAddressReg(from2)], xRegisterSSE(from) ); }
|
||||
|
||||
#define DEFINE_LEGACY_PSD_OPCODE( mod ) \
|
||||
emitterT void SSE_##mod##PS_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod.PS( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE_##mod##PS_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod.PS( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE_##mod##PS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod.PS( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE2_##mod##PD_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod.PD( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE2_##mod##PD_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod.PD( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE2_##mod##PD_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod.PD( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
#define DEFINE_LEGACY_SSSD_OPCODE( mod ) \
|
||||
emitterT void SSE_##mod##SS_M32_to_XMM( x86SSERegType to, uptr from ) { x##mod.SS( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE_##mod##SS_M32_to_XMM( x86SSERegType to, uptr from ) { x##mod.SS( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE_##mod##SS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod.SS( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE2_##mod##SD_M64_to_XMM( x86SSERegType to, uptr from ) { x##mod.SD( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE2_##mod##SD_M64_to_XMM( x86SSERegType to, uptr from ) { x##mod.SD( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE2_##mod##SD_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod.SD( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
#define DEFINE_LEGACY_CMP_OPCODE( comp ) \
|
||||
emitterT void SSE_CMP##comp##PS_M128_to_XMM( x86SSERegType to, uptr from ) { xCMP##comp.PS( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE_CMP##comp##PS_M128_to_XMM( x86SSERegType to, uptr from ) { xCMP##comp.PS( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE_CMP##comp##PS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xCMP##comp.PS( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE2_CMP##comp##PD_M128_to_XMM( x86SSERegType to, uptr from ) { xCMP##comp.PD( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE2_CMP##comp##PD_M128_to_XMM( x86SSERegType to, uptr from ) { xCMP##comp.PD( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE2_CMP##comp##PD_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xCMP##comp.PD( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE_CMP##comp##SS_M32_to_XMM( x86SSERegType to, uptr from ) { xCMP##comp.SS( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE_CMP##comp##SS_M32_to_XMM( x86SSERegType to, uptr from ) { xCMP##comp.SS( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE_CMP##comp##SS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xCMP##comp.SS( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE2_CMP##comp##SD_M64_to_XMM( x86SSERegType to, uptr from ) { xCMP##comp.SD( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE2_CMP##comp##SD_M64_to_XMM( x86SSERegType to, uptr from ) { xCMP##comp.SD( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE2_CMP##comp##SD_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xCMP##comp.SD( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
#define DEFINE_LEGACY_RSQRT_OPCODE(mod) \
|
||||
emitterT void SSE_##mod##PS_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod.PS( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE_##mod##PS_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod.PS( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE_##mod##PS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod.PS( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE_##mod##SS_M32_to_XMM( x86SSERegType to, uptr from ) { x##mod.SS( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE_##mod##SS_M32_to_XMM( x86SSERegType to, uptr from ) { x##mod.SS( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE_##mod##SS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod.SS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
#define DEFINE_LEGACY_SQRT_OPCODE(mod) \
|
||||
DEFINE_LEGACY_RSQRT_OPCODE(mod) \
|
||||
emitterT void SSE2_##mod##SD_M64_to_XMM( x86SSERegType to, uptr from ) { x##mod.SD( xRegisterSSE(to), (void*)from ); } \
|
||||
emitterT void SSE2_##mod##SD_M64_to_XMM( x86SSERegType to, uptr from ) { x##mod.SD( xRegisterSSE(to), ptr[(void*)from]); } \
|
||||
emitterT void SSE2_##mod##SD_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod.SD( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
#define DEFINE_LEGACY_OP128( ssenum, mod, sub ) \
|
||||
emitterT void SSE##ssenum##_##mod##sub##_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod.sub( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE##ssenum##_##mod##sub##_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod.sub( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE##ssenum##_##mod##sub##_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod.sub( xRegisterSSE(to), ptr[(void*)from]); }
|
||||
|
||||
#define DEFINE_LEGACY_MOV128( ssenum, mod, sub ) \
|
||||
emitterT void SSE##ssenum##_##mod##sub##_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { x##mod##sub( xRegisterSSE(to), xRegisterSSE(from) ); } \
|
||||
emitterT void SSE##ssenum##_##mod##sub##_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod##sub( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE##ssenum##_##mod##sub##_M128_to_XMM( x86SSERegType to, uptr from ) { x##mod##sub( xRegisterSSE(to), ptr[(void*)from]); }
|
||||
|
||||
|
||||
#define DEFINE_LEGACY_PSSD_OPCODE( mod ) \
|
||||
|
@ -242,46 +242,46 @@ DEFINE_LEGACY_OP128( 4, PMIN, UD )
|
|||
emitterT void SSE_MOVAPS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xMOVAPS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVDQA_XMM_to_XMM( x86SSERegType to, x86SSERegType from) { xMOVDQA( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
emitterT void SSE2_MOVD_M32_to_XMM( x86SSERegType to, uptr from ) { xMOVDZX( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE2_MOVD_M32_to_XMM( x86SSERegType to, uptr from ) { xMOVDZX( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
emitterT void SSE2_MOVD_R_to_XMM( x86SSERegType to, x86IntRegType from ) { xMOVDZX( xRegisterSSE(to), xRegister32(from) ); }
|
||||
emitterT void SSE2_MOVD_Rm_to_XMM( x86SSERegType to, x86IntRegType from, int offset )
|
||||
{
|
||||
xMOVDZX( xRegisterSSE(to), ptr[xAddressReg(from)+offset] );
|
||||
}
|
||||
|
||||
emitterT void SSE2_MOVD_XMM_to_M32( u32 to, x86SSERegType from ) { xMOVD( (void*)to, xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVD_XMM_to_M32( u32 to, x86SSERegType from ) { xMOVD( ptr[(void*)to], xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVD_XMM_to_R( x86IntRegType to, x86SSERegType from ) { xMOVD( xRegister32(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVD_XMM_to_Rm( x86IntRegType to, x86SSERegType from, int offset )
|
||||
{
|
||||
xMOVD( ptr[xAddressReg(from)+offset], xRegisterSSE(from) );
|
||||
}
|
||||
|
||||
emitterT void SSE2_MOVQ_M64_to_XMM( x86SSERegType to, uptr from ) { xMOVQZX( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE2_MOVQ_M64_to_XMM( x86SSERegType to, uptr from ) { xMOVQZX( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
emitterT void SSE2_MOVQ_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xMOVQZX( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVQ_XMM_to_M64( u32 to, x86SSERegType from ) { xMOVQ( (void*)to, xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVQ_XMM_to_M64( u32 to, x86SSERegType from ) { xMOVQ( ptr[(void*)to], xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVDQ2Q_XMM_to_MM( x86MMXRegType to, x86SSERegType from) { xMOVQ( xRegisterMMX(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVQ2DQ_MM_to_XMM( x86SSERegType to, x86MMXRegType from) { xMOVQ( xRegisterSSE(to), xRegisterMMX(from) ); }
|
||||
|
||||
|
||||
emitterT void SSE_MOVSS_M32_to_XMM( x86SSERegType to, uptr from ) { xMOVSSZX( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE_MOVSS_XMM_to_M32( u32 to, x86SSERegType from ) { xMOVSS( (void*)to, xRegisterSSE(from) ); }
|
||||
emitterT void SSE_MOVSS_M32_to_XMM( x86SSERegType to, uptr from ) { xMOVSSZX( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
emitterT void SSE_MOVSS_XMM_to_M32( u32 to, x86SSERegType from ) { xMOVSS( ptr[(void*)to], xRegisterSSE(from) ); }
|
||||
emitterT void SSE_MOVSS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xMOVSS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE_MOVSS_Rm_to_XMM( x86SSERegType to, x86IntRegType from, int offset ) { xMOVSSZX( xRegisterSSE(to), ptr[xAddressReg(from)+offset] ); }
|
||||
emitterT void SSE_MOVSS_XMM_to_Rm( x86IntRegType to, x86SSERegType from, int offset ) { xMOVSS( ptr[xAddressReg(to)+offset], xRegisterSSE(from) ); }
|
||||
|
||||
emitterT void SSE2_MOVSD_M32_to_XMM( x86SSERegType to, uptr from ) { xMOVSDZX( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE2_MOVSD_XMM_to_M32( u32 to, x86SSERegType from ) { xMOVSD( (void*)to, xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVSD_M32_to_XMM( x86SSERegType to, uptr from ) { xMOVSDZX( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
emitterT void SSE2_MOVSD_XMM_to_M32( u32 to, x86SSERegType from ) { xMOVSD( ptr[(void*)to], xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVSD_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xMOVSD( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_MOVSD_Rm_to_XMM( x86SSERegType to, x86IntRegType from, int offset ) { xMOVSDZX( xRegisterSSE(to), ptr[xAddressReg(from)+offset] ); }
|
||||
emitterT void SSE2_MOVSD_XMM_to_Rm( x86IntRegType to, x86SSERegType from, int offset ) { xMOVSD( ptr[xAddressReg(to)+offset], xRegisterSSE(from) ); }
|
||||
|
||||
emitterT void SSE_MOVLPS_M64_to_XMM( x86SSERegType to, uptr from ) { xMOVL.PS( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE_MOVLPS_XMM_to_M64( u32 to, x86SSERegType from ) { xMOVL.PS( (void*)to, xRegisterSSE(from) ); }
|
||||
emitterT void SSE_MOVLPS_M64_to_XMM( x86SSERegType to, uptr from ) { xMOVL.PS( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
emitterT void SSE_MOVLPS_XMM_to_M64( u32 to, x86SSERegType from ) { xMOVL.PS( ptr[(void*)to], xRegisterSSE(from) ); }
|
||||
emitterT void SSE_MOVLPS_Rm_to_XMM( x86SSERegType to, x86IntRegType from, int offset ) { xMOVL.PS( xRegisterSSE(to), ptr[xAddressReg(from)+offset] ); }
|
||||
emitterT void SSE_MOVLPS_XMM_to_Rm( x86IntRegType to, x86SSERegType from, int offset ) { xMOVL.PS( ptr[xAddressReg(to)+offset], xRegisterSSE(from) ); }
|
||||
|
||||
emitterT void SSE_MOVHPS_M64_to_XMM( x86SSERegType to, uptr from ) { xMOVH.PS( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE_MOVHPS_XMM_to_M64( u32 to, x86SSERegType from ) { xMOVH.PS( (void*)to, xRegisterSSE(from) ); }
|
||||
emitterT void SSE_MOVHPS_M64_to_XMM( x86SSERegType to, uptr from ) { xMOVH.PS( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
emitterT void SSE_MOVHPS_XMM_to_M64( u32 to, x86SSERegType from ) { xMOVH.PS( ptr[(void*)to], xRegisterSSE(from) ); }
|
||||
emitterT void SSE_MOVHPS_Rm_to_XMM( x86SSERegType to, x86IntRegType from, int offset ) { xMOVH.PS( xRegisterSSE(to), ptr[xAddressReg(from)+offset] ); }
|
||||
emitterT void SSE_MOVHPS_XMM_to_Rm( x86IntRegType to, x86SSERegType from, int offset ) { xMOVH.PS( ptr[xAddressReg(to)+offset], xRegisterSSE(from) ); }
|
||||
|
||||
|
@ -292,31 +292,31 @@ emitterT void SSE_MASKMOVDQU_XMM_to_XMM( x86SSERegType to, x86SSERegType from )
|
|||
emitterT void SSE2_PMOVMSKB_XMM_to_R32(x86IntRegType to, x86SSERegType from) { xPMOVMSKB( xRegister32(to), xRegisterSSE(from) ); }
|
||||
|
||||
emitterT void SSE_SHUFPS_XMM_to_XMM( x86SSERegType to, x86SSERegType from, u8 imm8 ) { xSHUF.PS( xRegisterSSE(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE_SHUFPS_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xSHUF.PS( xRegisterSSE(to), (void*)from, imm8 ); }
|
||||
emitterT void SSE_SHUFPS_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xSHUF.PS( xRegisterSSE(to), ptr[(void*)from], imm8 ); }
|
||||
emitterT void SSE_SHUFPS_Rm_to_XMM( x86SSERegType to, x86IntRegType from, int offset, u8 imm8 )
|
||||
{
|
||||
xSHUF.PS( xRegisterSSE(to), ptr[xAddressReg(from)+offset], imm8 );
|
||||
}
|
||||
|
||||
emitterT void SSE_SHUFPD_XMM_to_XMM( x86SSERegType to, x86SSERegType from, u8 imm8 ) { xSHUF.PD( xRegisterSSE(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE_SHUFPD_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xSHUF.PD( xRegisterSSE(to), (void*)from, imm8 ); }
|
||||
emitterT void SSE_SHUFPD_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xSHUF.PD( xRegisterSSE(to), ptr[(void*)from], imm8 ); }
|
||||
|
||||
emitterT void SSE_CVTPI2PS_M64_to_XMM( x86SSERegType to, uptr from ) { xCVTPI2PS( xRegisterSSE(to), (u64*)from ); }
|
||||
emitterT void SSE_CVTPI2PS_M64_to_XMM( x86SSERegType to, uptr from ) { xCVTPI2PS( xRegisterSSE(to), ptr64[(u64*)from] ); }
|
||||
emitterT void SSE_CVTPI2PS_MM_to_XMM( x86SSERegType to, x86MMXRegType from ) { xCVTPI2PS( xRegisterSSE(to), xRegisterMMX(from) ); }
|
||||
emitterT void SSE_CVTPS2PI_M64_to_MM( x86MMXRegType to, uptr from ) { xCVTPS2PI( xRegisterMMX(to), (u64*)from ); }
|
||||
emitterT void SSE_CVTPS2PI_M64_to_MM( x86MMXRegType to, uptr from ) { xCVTPS2PI( xRegisterMMX(to), ptr64[(u64*)from] ); }
|
||||
emitterT void SSE_CVTPS2PI_XMM_to_MM( x86MMXRegType to, x86SSERegType from ) { xCVTPS2PI( xRegisterMMX(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE_CVTTSS2SI_M32_to_R32(x86IntRegType to, uptr from) { xCVTTSS2SI( xRegister32(to), (u32*)from ); }
|
||||
emitterT void SSE_CVTTSS2SI_M32_to_R32(x86IntRegType to, uptr from) { xCVTTSS2SI( xRegister32(to), ptr32[(u32*)from] ); }
|
||||
emitterT void SSE_CVTTSS2SI_XMM_to_R32(x86IntRegType to, x86SSERegType from) { xCVTTSS2SI( xRegister32(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE_CVTSI2SS_M32_to_XMM(x86SSERegType to, uptr from) { xCVTSI2SS( xRegisterSSE(to), (u32*)from ); }
|
||||
emitterT void SSE_CVTSI2SS_M32_to_XMM(x86SSERegType to, uptr from) { xCVTSI2SS( xRegisterSSE(to), ptr32[(u32*)from] ); }
|
||||
emitterT void SSE_CVTSI2SS_R_to_XMM(x86SSERegType to, x86IntRegType from) { xCVTSI2SS( xRegisterSSE(to), xRegister32(from) ); }
|
||||
|
||||
emitterT void SSE2_CVTSS2SD_M32_to_XMM( x86SSERegType to, uptr from) { xCVTSS2SD( xRegisterSSE(to), (u32*)from ); }
|
||||
emitterT void SSE2_CVTSS2SD_M32_to_XMM( x86SSERegType to, uptr from) { xCVTSS2SD( xRegisterSSE(to), ptr32[(u32*)from] ); }
|
||||
emitterT void SSE2_CVTSS2SD_XMM_to_XMM( x86SSERegType to, x86SSERegType from) { xCVTSS2SD( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_CVTSD2SS_M64_to_XMM( x86SSERegType to, uptr from) { xCVTSD2SS( xRegisterSSE(to), (u64*)from ); }
|
||||
emitterT void SSE2_CVTSD2SS_M64_to_XMM( x86SSERegType to, uptr from) { xCVTSD2SS( xRegisterSSE(to), ptr64[(u64*)from] ); }
|
||||
emitterT void SSE2_CVTSD2SS_XMM_to_XMM( x86SSERegType to, x86SSERegType from) { xCVTSD2SS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_CVTDQ2PS_M128_to_XMM( x86SSERegType to, uptr from ) { xCVTDQ2PS( xRegisterSSE(to), (u128*)from ); }
|
||||
emitterT void SSE2_CVTDQ2PS_M128_to_XMM( x86SSERegType to, uptr from ) { xCVTDQ2PS( xRegisterSSE(to), ptr128[(u128*)from] ); }
|
||||
emitterT void SSE2_CVTDQ2PS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xCVTDQ2PS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE2_CVTPS2DQ_M128_to_XMM( x86SSERegType to, uptr from ) { xCVTPS2DQ( xRegisterSSE(to), (u128*)from ); }
|
||||
emitterT void SSE2_CVTPS2DQ_M128_to_XMM( x86SSERegType to, uptr from ) { xCVTPS2DQ( xRegisterSSE(to), ptr128[(u128*)from] ); }
|
||||
emitterT void SSE2_CVTPS2DQ_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xCVTPS2DQ( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
emitterT void SSE2_CVTTPS2DQ_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xCVTTPS2DQ( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
@ -325,15 +325,15 @@ emitterT void SSE_PMAXSW_MM_to_MM( x86MMXRegType to, x86MMXRegType from ) { xPM
|
|||
emitterT void SSE_PMINSW_MM_to_MM( x86MMXRegType to, x86MMXRegType from ) { xPMAX.SW( xRegisterMMX(to), xRegisterMMX(from) ); }
|
||||
|
||||
emitterT void SSE2_PSHUFD_XMM_to_XMM( x86SSERegType to, x86SSERegType from, u8 imm8 ) { xPSHUF.D( xRegisterSSE(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE2_PSHUFD_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xPSHUF.D( xRegisterSSE(to), (void*)from, imm8 ); }
|
||||
emitterT void SSE2_PSHUFD_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xPSHUF.D( xRegisterSSE(to), ptr[(void*)from], imm8 ); }
|
||||
emitterT void SSE2_PSHUFLW_XMM_to_XMM( x86SSERegType to, x86SSERegType from, u8 imm8 ) { xPSHUF.LW( xRegisterSSE(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE2_PSHUFLW_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xPSHUF.LW( xRegisterSSE(to), (void*)from, imm8 ); }
|
||||
emitterT void SSE2_PSHUFLW_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xPSHUF.LW( xRegisterSSE(to), ptr[(void*)from], imm8 ); }
|
||||
emitterT void SSE2_PSHUFHW_XMM_to_XMM( x86SSERegType to, x86SSERegType from, u8 imm8 ) { xPSHUF.HW( xRegisterSSE(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE2_PSHUFHW_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xPSHUF.HW( xRegisterSSE(to), (void*)from, imm8 ); }
|
||||
emitterT void SSE2_PSHUFHW_M128_to_XMM( x86SSERegType to, uptr from, u8 imm8 ) { xPSHUF.HW( xRegisterSSE(to), ptr[(void*)from], imm8 ); }
|
||||
|
||||
emitterT void SSE_UNPCKLPS_M128_to_XMM( x86SSERegType to, uptr from ) { xUNPCK.LPS( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE_UNPCKLPS_M128_to_XMM( x86SSERegType to, uptr from ) { xUNPCK.LPS( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
emitterT void SSE_UNPCKLPS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xUNPCK.LPS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE_UNPCKHPS_M128_to_XMM( x86SSERegType to, uptr from ) { xUNPCK.HPS( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE_UNPCKHPS_M128_to_XMM( x86SSERegType to, uptr from ) { xUNPCK.HPS( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
emitterT void SSE_UNPCKHPS_XMM_to_XMM( x86SSERegType to, x86SSERegType from ) { xUNPCK.HPS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
emitterT void SSE_MOVMSKPS_XMM_to_R32(x86IntRegType to, x86SSERegType from) { xMOVMSKPS( xRegister32(to), xRegisterSSE(from) ); }
|
||||
|
@ -353,26 +353,26 @@ emitterT void SSE_PINSRW_R32_to_XMM(x86SSERegType to, x86IntRegType from, u8 imm
|
|||
emitterT void SSE2_PMADDWD_XMM_to_XMM(x86SSERegType to, x86SSERegType from) { xPMADD.WD( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
|
||||
emitterT void SSE3_HADDPS_XMM_to_XMM(x86SSERegType to, x86SSERegType from) { xHADD.PS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE3_HADDPS_M128_to_XMM(x86SSERegType to, uptr from) { xHADD.PS( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE3_HADDPS_M128_to_XMM(x86SSERegType to, uptr from) { xHADD.PS( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
|
||||
emitterT void SSE4_PINSRD_R32_to_XMM(x86SSERegType to, x86IntRegType from, u8 imm8) { xPINSR.D( xRegisterSSE(to), xRegister32(from), imm8 ); }
|
||||
|
||||
emitterT void SSE4_INSERTPS_XMM_to_XMM(x86SSERegType to, x86SSERegType from, u8 imm8) { xINSERTPS( xRegisterSSE(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE4_EXTRACTPS_XMM_to_R32(x86IntRegType to, x86SSERegType from, u8 imm8) { xEXTRACTPS( xRegister32(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE4_EXTRACTPS_XMM_to_M32(uptr to, x86SSERegType from, u8 imm8) { xEXTRACTPS( (u32*)to, xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE4_EXTRACTPS_XMM_to_M32(uptr to, x86SSERegType from, u8 imm8) { xEXTRACTPS( ptr32[(u32*)to], xRegisterSSE(from), imm8 ); }
|
||||
|
||||
emitterT void SSE4_DPPS_XMM_to_XMM(x86SSERegType to, x86SSERegType from, u8 imm8) { xDP.PS( xRegisterSSE(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE4_DPPS_M128_to_XMM(x86SSERegType to, uptr from, u8 imm8) { xDP.PS( xRegisterSSE(to), (void*)from, imm8 ); }
|
||||
emitterT void SSE4_DPPS_M128_to_XMM(x86SSERegType to, uptr from, u8 imm8) { xDP.PS( xRegisterSSE(to), ptr[(void*)from], imm8 ); }
|
||||
|
||||
emitterT void SSE4_BLENDPS_XMM_to_XMM(x86IntRegType to, x86SSERegType from, u8 imm8) { xBLEND.PS( xRegisterSSE(to), xRegisterSSE(from), imm8 ); }
|
||||
emitterT void SSE4_BLENDVPS_XMM_to_XMM(x86SSERegType to, x86SSERegType from) { xBLEND.VPS( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE4_BLENDVPS_M128_to_XMM(x86SSERegType to, uptr from) { xBLEND.VPS( xRegisterSSE(to), (void*)from ); }
|
||||
emitterT void SSE4_BLENDVPS_M128_to_XMM(x86SSERegType to, uptr from) { xBLEND.VPS( xRegisterSSE(to), ptr[(void*)from] ); }
|
||||
|
||||
emitterT void SSE4_PMOVSXDQ_XMM_to_XMM(x86SSERegType to, x86SSERegType from) { xPMOVSX.DQ( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE4_PMULDQ_XMM_to_XMM(x86SSERegType to, x86SSERegType from) { xPMUL.DQ( xRegisterSSE(to), xRegisterSSE(from) ); }
|
||||
emitterT void SSE4_PTEST_XMM_to_XMM(x86SSERegType to, x86SSERegType from) { xPTEST(xRegisterSSE(to), xRegisterSSE(from)); }
|
||||
|
||||
emitterT void SSE_LDMXCSR( uptr from ) { xLDMXCSR( (u32*)from ); }
|
||||
emitterT void SSE_LDMXCSR( uptr from ) { xLDMXCSR( ptr32[(u32*)from] ); }
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -62,7 +62,7 @@ SSE_MXCSR& SSE_MXCSR::ApplyReserveMask()
|
|||
|
||||
SSE_MXCSR::operator x86Emitter::ModSib32() const
|
||||
{
|
||||
return &bitmask;
|
||||
return x86Emitter::ptr32[&bitmask];
|
||||
}
|
||||
|
||||
namespace x86Emitter {
|
||||
|
|
|
@ -882,12 +882,12 @@ __emitinline void xBSWAP( const xRegister32& to )
|
|||
|
||||
__emitinline void xStoreReg( const xRegisterSSE& src )
|
||||
{
|
||||
xMOVDQA( &XMMRegisters::data[src.Id*2], src );
|
||||
xMOVDQA( ptr[&XMMRegisters::data[src.Id*2]], src );
|
||||
}
|
||||
|
||||
__emitinline void xRestoreReg( const xRegisterSSE& dest )
|
||||
{
|
||||
xMOVDQA( dest, &XMMRegisters::data[dest.Id*2] );
|
||||
xMOVDQA( dest, ptr[&XMMRegisters::data[dest.Id*2]] );
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -114,14 +114,14 @@ void recDI()
|
|||
|
||||
//CALLFunc( (uptr)Interp::DI );
|
||||
|
||||
xMOV(eax, ptr32[&cpuRegs.CP0.n.Status]);
|
||||
xMOV(eax, ptr[&cpuRegs.CP0.n.Status]);
|
||||
xTEST(eax, 0x20006); // EXL | ERL | EDI
|
||||
xForwardJNZ8 iHaveNoIdea;
|
||||
xTEST(eax, 0x18); // KSU
|
||||
xForwardJNZ8 inUserMode;
|
||||
iHaveNoIdea.SetTarget();
|
||||
xAND(eax, ~(u32)0x10000); // EIE
|
||||
xMOV(ptr32[&cpuRegs.CP0.n.Status], eax);
|
||||
xMOV(ptr[&cpuRegs.CP0.n.Status], eax);
|
||||
inUserMode.SetTarget();
|
||||
}
|
||||
|
||||
|
@ -171,12 +171,12 @@ void recMFC0( void )
|
|||
case 1:
|
||||
iFlushCall(FLUSH_NODESTROY);
|
||||
xCALL( COP0_UpdatePCCR );
|
||||
xMOV(eax, &cpuRegs.PERF.n.pcr0);
|
||||
xMOV(eax, ptr[&cpuRegs.PERF.n.pcr0]);
|
||||
break;
|
||||
case 3:
|
||||
iFlushCall(FLUSH_NODESTROY);
|
||||
xCALL( COP0_UpdatePCCR );
|
||||
xMOV(eax, &cpuRegs.PERF.n.pcr1);
|
||||
xMOV(eax, ptr[&cpuRegs.PERF.n.pcr1]);
|
||||
break;
|
||||
}
|
||||
_deleteEEreg(_Rt_, 0);
|
||||
|
|
|
@ -148,25 +148,25 @@ static void _DynGen_StackFrameCheck()
|
|||
|
||||
// --------- EBP Here -----------
|
||||
|
||||
xCMP( ebp, &s_store_ebp );
|
||||
xCMP( ebp, ptr[&s_store_ebp] );
|
||||
xForwardJE8 skipassert_ebp;
|
||||
|
||||
xMOV( ecx, 1 ); // 1 specifies EBP
|
||||
xMOV( edx, ebp );
|
||||
xCALL( StackFrameCheckFailed );
|
||||
xMOV( ebp, &s_store_ebp ); // half-hearted frame recovery attempt!
|
||||
xMOV( ebp, ptr[&s_store_ebp] ); // half-hearted frame recovery attempt!
|
||||
|
||||
skipassert_ebp.SetTarget();
|
||||
|
||||
// --------- ESP There -----------
|
||||
|
||||
xCMP( esp, &s_store_esp );
|
||||
xCMP( esp, ptr[&s_store_esp] );
|
||||
xForwardJE8 skipassert_esp;
|
||||
|
||||
xXOR( ecx, ecx ); // 0 specifies ESP
|
||||
xMOV( edx, esp );
|
||||
xCALL( StackFrameCheckFailed );
|
||||
xMOV( esp, &s_store_esp ); // half-hearted frame recovery attempt!
|
||||
xMOV( esp, ptr[&s_store_esp] ); // half-hearted frame recovery attempt!
|
||||
|
||||
skipassert_esp.SetTarget();
|
||||
}
|
||||
|
@ -180,10 +180,10 @@ static DynGenFunc* _DynGen_JITCompile()
|
|||
u8* retval = xGetPtr();
|
||||
_DynGen_StackFrameCheck();
|
||||
|
||||
xMOV( ecx, &psxRegs.pc );
|
||||
xMOV( ecx, ptr[&psxRegs.pc] );
|
||||
xCALL( iopRecRecompile );
|
||||
|
||||
xMOV( eax, &psxRegs.pc );
|
||||
xMOV( eax, ptr[&psxRegs.pc] );
|
||||
xMOV( ebx, eax );
|
||||
xSHR( eax, 16 );
|
||||
xMOV( ecx, ptr[psxRecLUT + (eax*4)] );
|
||||
|
@ -205,7 +205,7 @@ static DynGenFunc* _DynGen_DispatcherReg()
|
|||
u8* retval = xGetPtr();
|
||||
_DynGen_StackFrameCheck();
|
||||
|
||||
xMOV( eax, &psxRegs.pc );
|
||||
xMOV( eax, ptr[&psxRegs.pc] );
|
||||
xMOV( ebx, eax );
|
||||
xSHR( eax, 16 );
|
||||
xMOV( ecx, ptr[psxRecLUT + (eax*4)] );
|
||||
|
@ -312,8 +312,8 @@ static DynGenFunc* _DynGen_EnterRecompiledCode()
|
|||
|
||||
if( IsDevBuild )
|
||||
{
|
||||
xMOV( &s_store_esp, esp );
|
||||
xMOV( &s_store_ebp, ebp );
|
||||
xMOV( ptr[&s_store_esp], esp );
|
||||
xMOV( ptr[&s_store_ebp], ebp );
|
||||
}
|
||||
|
||||
xJMP( iopDispatcherReg );
|
||||
|
|
|
@ -704,7 +704,7 @@ static void rpsxSB()
|
|||
|
||||
MOV32MtoR(ECX, (uptr)&psxRegs.GPR.r[_Rs_]);
|
||||
if (_Imm_) ADD32ItoR(ECX, _Imm_);
|
||||
xMOV( edx, &psxRegs.GPR.r[_Rt_] );
|
||||
xMOV( edx, ptr[&psxRegs.GPR.r[_Rt_]] );
|
||||
xCALL( iopMemWrite8 );
|
||||
}
|
||||
|
||||
|
@ -715,7 +715,7 @@ static void rpsxSH()
|
|||
|
||||
MOV32MtoR(ECX, (uptr)&psxRegs.GPR.r[_Rs_]);
|
||||
if (_Imm_) ADD32ItoR(ECX, _Imm_);
|
||||
xMOV( edx, &psxRegs.GPR.r[_Rt_] );
|
||||
xMOV( edx, ptr[&psxRegs.GPR.r[_Rt_]] );
|
||||
xCALL( iopMemWrite16 );
|
||||
}
|
||||
|
||||
|
@ -726,7 +726,7 @@ static void rpsxSW()
|
|||
|
||||
MOV32MtoR(ECX, (uptr)&psxRegs.GPR.r[_Rs_]);
|
||||
if (_Imm_) ADD32ItoR(ECX, _Imm_);
|
||||
xMOV( edx, &psxRegs.GPR.r[_Rt_] );
|
||||
xMOV( edx, ptr[&psxRegs.GPR.r[_Rt_]] );
|
||||
xCALL( iopMemWrite32 );
|
||||
}
|
||||
|
||||
|
|
|
@ -375,25 +375,25 @@ static void _DynGen_StackFrameCheck()
|
|||
|
||||
// --------- EBP Here -----------
|
||||
|
||||
xCMP( ebp, &s_store_ebp );
|
||||
xCMP( ebp, ptr[&s_store_ebp] );
|
||||
xForwardJE8 skipassert_ebp;
|
||||
|
||||
xMOV( ecx, 1 ); // 1 specifies EBP
|
||||
xMOV( edx, ebp );
|
||||
xCALL( StackFrameCheckFailed );
|
||||
xMOV( ebp, &s_store_ebp ); // half-hearted frame recovery attempt!
|
||||
xMOV( ebp, ptr[&s_store_ebp] ); // half-hearted frame recovery attempt!
|
||||
|
||||
skipassert_ebp.SetTarget();
|
||||
|
||||
// --------- ESP There -----------
|
||||
|
||||
xCMP( esp, &s_store_esp );
|
||||
xCMP( esp, ptr[&s_store_esp] );
|
||||
xForwardJE8 skipassert_esp;
|
||||
|
||||
xXOR( ecx, ecx ); // 0 specifies ESP
|
||||
xMOV( edx, esp );
|
||||
xCALL( StackFrameCheckFailed );
|
||||
xMOV( esp, &s_store_esp ); // half-hearted frame recovery attempt!
|
||||
xMOV( esp, ptr[&s_store_esp] ); // half-hearted frame recovery attempt!
|
||||
|
||||
skipassert_esp.SetTarget();
|
||||
}
|
||||
|
@ -407,10 +407,10 @@ static DynGenFunc* _DynGen_JITCompile()
|
|||
u8* retval = xGetAlignedCallTarget();
|
||||
_DynGen_StackFrameCheck();
|
||||
|
||||
xMOV( ecx, &cpuRegs.pc );
|
||||
xMOV( ecx, ptr[&cpuRegs.pc] );
|
||||
xCALL( recRecompile );
|
||||
|
||||
xMOV( eax, &cpuRegs.pc );
|
||||
xMOV( eax, ptr[&cpuRegs.pc] );
|
||||
xMOV( ebx, eax );
|
||||
xSHR( eax, 16 );
|
||||
xMOV( ecx, ptr[recLUT + (eax*4)] );
|
||||
|
@ -432,7 +432,7 @@ static DynGenFunc* _DynGen_DispatcherReg()
|
|||
u8* retval = xGetPtr(); // fallthrough target, can't align it!
|
||||
_DynGen_StackFrameCheck();
|
||||
|
||||
xMOV( eax, &cpuRegs.pc );
|
||||
xMOV( eax, ptr[&cpuRegs.pc] );
|
||||
xMOV( ebx, eax );
|
||||
xSHR( eax, 16 );
|
||||
xMOV( ecx, ptr[recLUT + (eax*4)] );
|
||||
|
@ -475,8 +475,8 @@ static DynGenFunc* _DynGen_EnterRecompiledCode()
|
|||
xMOV( ptr32[esp+0x18], ebp );
|
||||
xLEA( ebp, ptr32[esp+0x18] );
|
||||
|
||||
xMOV( &s_store_esp, esp );
|
||||
xMOV( &s_store_ebp, ebp );
|
||||
xMOV( ptr[&s_store_esp], esp );
|
||||
xMOV( ptr[&s_store_ebp], ebp );
|
||||
|
||||
xJMP( ptr32[&DispatcherReg] );
|
||||
|
||||
|
@ -1104,10 +1104,10 @@ static void iBranchTest(u32 newpc)
|
|||
}
|
||||
else
|
||||
{
|
||||
xMOV(eax, &cpuRegs.cycle);
|
||||
xMOV(eax, ptr[&cpuRegs.cycle]);
|
||||
xADD(eax, eeScaleBlockCycles());
|
||||
xMOV(&cpuRegs.cycle, eax); // update cycles
|
||||
xSUB(eax, &g_nextBranchCycle);
|
||||
xMOV(ptr[&cpuRegs.cycle], eax); // update cycles
|
||||
xSUB(eax, ptr[&g_nextBranchCycle]);
|
||||
|
||||
if (newpc == 0xffffffff)
|
||||
xJS( DispatcherReg );
|
||||
|
|
|
@ -370,11 +370,11 @@ void vtlb_DynGenRead64_Const( u32 bits, u32 addr_const )
|
|||
switch( bits )
|
||||
{
|
||||
case 64:
|
||||
iMOV64_Smart( ptr[edx], ptr[ppf] );
|
||||
iMOV64_Smart( ptr[edx], ptr[(void*)ppf] );
|
||||
break;
|
||||
|
||||
case 128:
|
||||
iMOV128_SSE( ptr[edx], ptr[ppf] );
|
||||
iMOV128_SSE( ptr[edx], ptr[(void*)ppf] );
|
||||
break;
|
||||
|
||||
jNO_DEFAULT
|
||||
|
@ -416,20 +416,20 @@ void vtlb_DynGenRead32_Const( u32 bits, bool sign, u32 addr_const )
|
|||
{
|
||||
case 8:
|
||||
if( sign )
|
||||
xMOVSX( eax, ptr8[ppf] );
|
||||
xMOVSX( eax, ptr8[(u8*)ppf] );
|
||||
else
|
||||
xMOVZX( eax, ptr8[ppf] );
|
||||
xMOVZX( eax, ptr8[(u8*)ppf] );
|
||||
break;
|
||||
|
||||
case 16:
|
||||
if( sign )
|
||||
xMOVSX( eax, ptr16[ppf] );
|
||||
xMOVSX( eax, ptr16[(u16*)ppf] );
|
||||
else
|
||||
xMOVZX( eax, ptr16[ppf] );
|
||||
xMOVZX( eax, ptr16[(u16*)ppf] );
|
||||
break;
|
||||
|
||||
case 32:
|
||||
xMOV( eax, ptr[ppf] );
|
||||
xMOV( eax, ptr[(void*)ppf] );
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -450,7 +450,7 @@ void vtlb_DynGenRead32_Const( u32 bits, bool sign, u32 addr_const )
|
|||
// Shortcut for the INTC_STAT register, which many games like to spin on heavily.
|
||||
if( (bits == 32) && !EmuConfig.Speedhacks.IntcStat && (paddr == INTC_STAT) )
|
||||
{
|
||||
xMOV( eax, &psHu32( INTC_STAT ) );
|
||||
xMOV( eax, ptr[&psHu32( INTC_STAT )] );
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -505,23 +505,23 @@ void vtlb_DynGenWrite_Const( u32 bits, u32 addr_const )
|
|||
{
|
||||
//8 , 16, 32 : data on EDX
|
||||
case 8:
|
||||
xMOV( ptr[ppf], dl );
|
||||
xMOV( ptr[(void*)ppf], dl );
|
||||
break;
|
||||
|
||||
case 16:
|
||||
xMOV( ptr[ppf], dx );
|
||||
xMOV( ptr[(void*)ppf], dx );
|
||||
break;
|
||||
|
||||
case 32:
|
||||
xMOV( ptr[ppf], edx );
|
||||
xMOV( ptr[(void*)ppf], edx );
|
||||
break;
|
||||
|
||||
case 64:
|
||||
iMOV64_Smart( ptr[ppf], ptr[edx] );
|
||||
iMOV64_Smart( ptr[(void*)ppf], ptr[edx] );
|
||||
break;
|
||||
|
||||
case 128:
|
||||
iMOV128_SSE( ptr[ppf], ptr[edx] );
|
||||
iMOV128_SSE( ptr[(void*)ppf], ptr[edx] );
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ void normBranch(mV, microFlagCycles& mFC) {
|
|||
|
||||
void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
||||
mVUsetupBranch(mVU, mFC);
|
||||
xCMP(ptr16[&mVU->branch], 0);
|
||||
xCMP(ptr16[(u16*)&mVU->branch], 0);
|
||||
incPC(3);
|
||||
if (mVUup.eBit) { // Conditional Branch With E-Bit Set
|
||||
mVUendProgram(mVU, &mFC, 2);
|
||||
|
|
Loading…
Reference in New Issue