2024-07-30 11:42:36 +00:00
// SPDX-FileCopyrightText: 2002-2024 PCSX2 Dev Team
// SPDX-License-Identifier: GPL-3.0+
2009-11-06 21:45:30 +00:00
/*
* ix86 core v0 .9 .1
*
* Original Authors ( v0 .6 .2 and prior ) :
* linuzappz < linuzappz @ pcsx . net >
* alexey silinov
* goldfinger
* zerofrog ( @ gmail . com )
*
* Authors of v0 .9 .1 :
* Jake . Stine ( @ gmail . com )
* cottonvibes ( @ gmail . com )
* sudonim ( 1 @ gmail . com )
*/
2021-09-01 20:31:46 +00:00
# include "common/emitter/internal.h"
# include "common/emitter/implement/helpers.h"
2009-11-06 21:45:30 +00:00
2016-11-12 15:28:37 +00:00
namespace x86Emitter
{
2009-11-06 21:45:30 +00:00
2021-09-06 18:28:26 +00:00
void _xMovRtoR ( const xRegisterInt & to , const xRegisterInt & from )
{
pxAssert ( to . GetOperandSize ( ) = = from . GetOperandSize ( ) ) ;
2009-11-06 21:45:30 +00:00
2021-09-06 18:28:26 +00:00
if ( to = = from )
return ; // ignore redundant MOVs.
2009-11-06 21:45:30 +00:00
2021-09-06 18:28:26 +00:00
xOpWrite ( from . GetPrefix16 ( ) , from . Is8BitOp ( ) ? 0x88 : 0x89 , from , to ) ;
}
2009-11-06 21:45:30 +00:00
2021-09-06 18:28:26 +00:00
void xImpl_Mov : : operator ( ) ( const xRegisterInt & to , const xRegisterInt & from ) const
{
// FIXME WTF?
_xMovRtoR ( to , from ) ;
}
2009-11-06 21:45:30 +00:00
2021-09-06 18:28:26 +00:00
void xImpl_Mov : : operator ( ) ( const xIndirectVoid & dest , const xRegisterInt & from ) const
{
// mov eax has a special from when writing directly to a DISP32 address
// (sans any register index/base registers).
2009-11-06 21:45:30 +00:00
2022-03-21 08:42:15 +00:00
xOpWrite ( from . GetPrefix16 ( ) , from . Is8BitOp ( ) ? 0x88 : 0x89 , from , dest ) ;
2021-09-06 18:28:26 +00:00
}
2009-11-06 21:45:30 +00:00
2021-09-06 18:28:26 +00:00
void xImpl_Mov : : operator ( ) ( const xRegisterInt & to , const xIndirectVoid & src ) const
{
// mov eax has a special from when reading directly from a DISP32 address
// (sans any register index/base registers).
2009-11-06 21:45:30 +00:00
2022-03-21 08:42:15 +00:00
xOpWrite ( to . GetPrefix16 ( ) , to . Is8BitOp ( ) ? 0x8a : 0x8b , to , src ) ;
2021-09-06 18:28:26 +00:00
}
void xImpl_Mov : : operator ( ) ( const xIndirect64orLess & dest , sptr imm ) const
{
switch ( dest . GetOperandSize ( ) )
{
case 1 :
pxAssertMsg ( imm = = ( s8 ) imm | | imm = = ( u8 ) imm , " Immediate won't fit! " ) ;
break ;
case 2 :
pxAssertMsg ( imm = = ( s16 ) imm | | imm = = ( u16 ) imm , " Immediate won't fit! " ) ;
break ;
case 4 :
pxAssertMsg ( imm = = ( s32 ) imm | | imm = = ( u32 ) imm , " Immediate won't fit! " ) ;
break ;
case 8 :
pxAssertMsg ( imm = = ( s32 ) imm , " Immediate won't fit in immediate slot, go through a register! " ) ;
break ;
default :
pxAssertMsg ( 0 , " Bad indirect size! " ) ;
}
xOpWrite ( dest . GetPrefix16 ( ) , dest . Is8BitOp ( ) ? 0xc6 : 0xc7 , 0 , dest , dest . GetImmSize ( ) ) ;
dest . xWriteImm ( imm ) ;
}
// preserve_flags - set to true to disable optimizations which could alter the state of
// the flags (namely replacing mov reg,0 with xor).
void xImpl_Mov : : operator ( ) ( const xRegisterInt & to , sptr imm , bool preserve_flags ) const
{
switch ( to . GetOperandSize ( ) )
{
case 1 :
pxAssertMsg ( imm = = ( s8 ) imm | | imm = = ( u8 ) imm , " Immediate won't fit! " ) ;
break ;
case 2 :
pxAssertMsg ( imm = = ( s16 ) imm | | imm = = ( u16 ) imm , " Immediate won't fit! " ) ;
break ;
case 4 :
pxAssertMsg ( imm = = ( s32 ) imm | | imm = = ( u32 ) imm , " Immediate won't fit! " ) ;
break ;
case 8 :
pxAssertMsg ( imm = = ( s32 ) imm | | imm = = ( u32 ) imm , " Immediate won't fit in immediate slot, use mov64 or lea! " ) ;
break ;
default :
pxAssertMsg ( 0 , " Bad indirect size! " ) ;
}
const xRegisterInt & to_ = to . GetNonWide ( ) ;
if ( ! preserve_flags & & ( imm = = 0 ) )
{
_g1_EmitOp ( G1Type_XOR , to_ , to_ ) ;
}
2021-10-26 20:11:04 +00:00
else if ( imm = = ( sptr ) ( u32 ) imm | | ! to . IsWide ( ) )
2021-09-06 18:28:26 +00:00
{
// Note: MOV does not have (reg16/32,imm8) forms.
u8 opcode = ( to_ . Is8BitOp ( ) ? 0xb0 : 0xb8 ) | to_ . Id ;
xOpAccWrite ( to_ . GetPrefix16 ( ) , opcode , 0 , to_ ) ;
to_ . xWriteImm ( imm ) ;
}
else
{
xOpWrite ( to . GetPrefix16 ( ) , 0xc7 , 0 , to ) ;
to . xWriteImm ( imm ) ;
}
}
const xImpl_Mov xMOV ;
2009-11-06 21:45:30 +00:00
2021-09-06 18:28:26 +00:00
void xImpl_MovImm64 : : operator ( ) ( const xRegister64 & to , s64 imm , bool preserve_flags ) const
{
if ( imm = = ( u32 ) imm | | imm = = ( s32 ) imm )
{
xMOV ( to , imm , preserve_flags ) ;
}
else
{
u8 opcode = 0xb8 | to . Id ;
xOpAccWrite ( to . GetPrefix16 ( ) , opcode , 0 , to ) ;
xWrite64 ( imm ) ;
}
}
const xImpl_MovImm64 xMOV64 ;
2020-08-19 08:19:28 +00:00
2021-09-06 18:28:26 +00:00
// --------------------------------------------------------------------------------------
// CMOVcc
// --------------------------------------------------------------------------------------
2009-11-06 21:45:30 +00:00
2023-12-22 10:30:31 +00:00
# define ccSane() pxAssertMsg(ccType >= 0 && ccType <= 0x0f, "Invalid comparison type specifier.")
2009-11-06 21:45:30 +00:00
// Macro useful for trapping unwanted use of EBP.
//#define EbpAssert() pxAssert( to != ebp )
# define EbpAssert()
2021-09-06 18:28:26 +00:00
void xImpl_CMov : : operator ( ) ( const xRegister16or32or64 & to , const xRegister16or32or64 & from ) const
{
pxAssert ( to - > GetOperandSize ( ) = = from - > GetOperandSize ( ) ) ;
ccSane ( ) ;
xOpWrite0F ( to - > GetPrefix16 ( ) , 0x40 | ccType , to , from ) ;
}
void xImpl_CMov : : operator ( ) ( const xRegister16or32or64 & to , const xIndirectVoid & sibsrc ) const
{
ccSane ( ) ;
xOpWrite0F ( to - > GetPrefix16 ( ) , 0x40 | ccType , to , sibsrc ) ;
}
//void xImpl_CMov::operator()( const xDirectOrIndirect32& to, const xDirectOrIndirect32& from ) const { ccSane(); _DoI_helpermess( *this, to, from ); }
//void xImpl_CMov::operator()( const xDirectOrIndirect16& to, const xDirectOrIndirect16& from ) const { ccSane(); _DoI_helpermess( *this, to, from ); }
void xImpl_Set : : operator ( ) ( const xRegister8 & to ) const
{
ccSane ( ) ;
xOpWrite0F ( 0x90 | ccType , 0 , to ) ;
}
void xImpl_Set : : operator ( ) ( const xIndirect8 & dest ) const
{
ccSane ( ) ;
xOpWrite0F ( 0x90 | ccType , 0 , dest ) ;
}
//void xImpl_Set::operator()( const xDirectOrIndirect8& dest ) const { ccSane(); _DoI_helpermess( *this, dest ); }
void xImpl_MovExtend : : operator ( ) ( const xRegister16or32or64 & to , const xRegister8 & from ) const
{
EbpAssert ( ) ;
xOpWrite0F (
( to - > GetOperandSize ( ) = = 2 ) ? 0x66 : 0 ,
SignExtend ? 0xbe : 0xb6 ,
to , from ) ;
}
void xImpl_MovExtend : : operator ( ) ( const xRegister16or32or64 & to , const xIndirect8 & sibsrc ) const
{
EbpAssert ( ) ;
xOpWrite0F (
( to - > GetOperandSize ( ) = = 2 ) ? 0x66 : 0 ,
SignExtend ? 0xbe : 0xb6 ,
to , sibsrc ) ;
}
void xImpl_MovExtend : : operator ( ) ( const xRegister32or64 & to , const xRegister16 & from ) const
{
EbpAssert ( ) ;
xOpWrite0F ( SignExtend ? 0xbf : 0xb7 , to , from ) ;
}
void xImpl_MovExtend : : operator ( ) ( const xRegister32or64 & to , const xIndirect16 & sibsrc ) const
{
EbpAssert ( ) ;
xOpWrite0F ( SignExtend ? 0xbf : 0xb7 , to , sibsrc ) ;
}
2009-11-06 21:45:30 +00:00
2020-09-23 09:29:30 +00:00
void xImpl_MovExtend : : operator ( ) ( const xRegister64 & to , const xRegister32 & from ) const
{
EbpAssert ( ) ;
pxAssertMsg ( SignExtend , " Use mov for 64-bit movzx " ) ;
xOpWrite ( 0 , 0x63 , to , from ) ;
}
2009-11-06 21:45:30 +00:00
2020-09-23 09:29:30 +00:00
void xImpl_MovExtend : : operator ( ) ( const xRegister64 & to , const xIndirect32 & sibsrc ) const
{
EbpAssert ( ) ;
pxAssertMsg ( SignExtend , " Use mov for 64-bit movzx " ) ;
xOpWrite ( 0 , 0x63 , to , sibsrc ) ;
}
2009-11-06 21:45:30 +00:00
2021-09-06 18:28:26 +00:00
const xImpl_MovExtend xMOVSX = { true } ;
const xImpl_MovExtend xMOVZX = { false } ;
const xImpl_CMov xCMOVA = { Jcc_Above } ;
const xImpl_CMov xCMOVAE = { Jcc_AboveOrEqual } ;
const xImpl_CMov xCMOVB = { Jcc_Below } ;
const xImpl_CMov xCMOVBE = { Jcc_BelowOrEqual } ;
const xImpl_CMov xCMOVG = { Jcc_Greater } ;
const xImpl_CMov xCMOVGE = { Jcc_GreaterOrEqual } ;
const xImpl_CMov xCMOVL = { Jcc_Less } ;
const xImpl_CMov xCMOVLE = { Jcc_LessOrEqual } ;
const xImpl_CMov xCMOVZ = { Jcc_Zero } ;
const xImpl_CMov xCMOVE = { Jcc_Equal } ;
const xImpl_CMov xCMOVNZ = { Jcc_NotZero } ;
const xImpl_CMov xCMOVNE = { Jcc_NotEqual } ;
const xImpl_CMov xCMOVO = { Jcc_Overflow } ;
const xImpl_CMov xCMOVNO = { Jcc_NotOverflow } ;
const xImpl_CMov xCMOVC = { Jcc_Carry } ;
const xImpl_CMov xCMOVNC = { Jcc_NotCarry } ;
const xImpl_CMov xCMOVS = { Jcc_Signed } ;
const xImpl_CMov xCMOVNS = { Jcc_Unsigned } ;
const xImpl_CMov xCMOVPE = { Jcc_ParityEven } ;
const xImpl_CMov xCMOVPO = { Jcc_ParityOdd } ;
const xImpl_Set xSETA = { Jcc_Above } ;
const xImpl_Set xSETAE = { Jcc_AboveOrEqual } ;
const xImpl_Set xSETB = { Jcc_Below } ;
const xImpl_Set xSETBE = { Jcc_BelowOrEqual } ;
const xImpl_Set xSETG = { Jcc_Greater } ;
const xImpl_Set xSETGE = { Jcc_GreaterOrEqual } ;
const xImpl_Set xSETL = { Jcc_Less } ;
const xImpl_Set xSETLE = { Jcc_LessOrEqual } ;
const xImpl_Set xSETZ = { Jcc_Zero } ;
const xImpl_Set xSETE = { Jcc_Equal } ;
const xImpl_Set xSETNZ = { Jcc_NotZero } ;
const xImpl_Set xSETNE = { Jcc_NotEqual } ;
const xImpl_Set xSETO = { Jcc_Overflow } ;
const xImpl_Set xSETNO = { Jcc_NotOverflow } ;
const xImpl_Set xSETC = { Jcc_Carry } ;
const xImpl_Set xSETNC = { Jcc_NotCarry } ;
const xImpl_Set xSETS = { Jcc_Signed } ;
const xImpl_Set xSETNS = { Jcc_Unsigned } ;
const xImpl_Set xSETPE = { Jcc_ParityEven } ;
const xImpl_Set xSETPO = { Jcc_ParityOdd } ;
2016-11-12 15:28:37 +00:00
} // end namespace x86Emitter