mirror of https://github.com/PCSX2/pcsx2.git
Clang Format VU files
This commit is contained in:
parent
553d8ccbb4
commit
bda80fc748
34
pcsx2/VU.h
34
pcsx2/VU.h
|
@ -38,17 +38,21 @@ enum VURegFlags
|
||||||
//#define INT_VUSTALLHACK //some games work without those, big speedup
|
//#define INT_VUSTALLHACK //some games work without those, big speedup
|
||||||
//#define INT_VUDOUBLEHACK
|
//#define INT_VUDOUBLEHACK
|
||||||
|
|
||||||
enum VUStatus {
|
enum VUStatus
|
||||||
|
{
|
||||||
VU_Ready = 0,
|
VU_Ready = 0,
|
||||||
VU_Run = 1,
|
VU_Run = 1,
|
||||||
VU_Stop = 2,
|
VU_Stop = 2,
|
||||||
};
|
};
|
||||||
|
|
||||||
union VECTOR {
|
union VECTOR
|
||||||
struct {
|
{
|
||||||
|
struct
|
||||||
|
{
|
||||||
float x, y, z, w;
|
float x, y, z, w;
|
||||||
} f;
|
} f;
|
||||||
struct {
|
struct
|
||||||
|
{
|
||||||
u32 x, y, z, w;
|
u32 x, y, z, w;
|
||||||
} i;
|
} i;
|
||||||
|
|
||||||
|
@ -66,8 +70,10 @@ union VECTOR {
|
||||||
s8 SC[16];
|
s8 SC[16];
|
||||||
};
|
};
|
||||||
|
|
||||||
struct REG_VI {
|
struct REG_VI
|
||||||
union {
|
{
|
||||||
|
union
|
||||||
|
{
|
||||||
float F;
|
float F;
|
||||||
s32 SL;
|
s32 SL;
|
||||||
u32 UL;
|
u32 UL;
|
||||||
|
@ -83,7 +89,8 @@ struct REG_VI {
|
||||||
//#define VUFLAG_BREAKONMFLAG 0x00000001
|
//#define VUFLAG_BREAKONMFLAG 0x00000001
|
||||||
#define VUFLAG_MFLAGSET 0x00000002
|
#define VUFLAG_MFLAGSET 0x00000002
|
||||||
#define VUFLAG_INTCINTERRUPT 0x00000004
|
#define VUFLAG_INTCINTERRUPT 0x00000004
|
||||||
struct fdivPipe {
|
struct fdivPipe
|
||||||
|
{
|
||||||
int enable;
|
int enable;
|
||||||
REG_VI reg;
|
REG_VI reg;
|
||||||
u32 sCycle;
|
u32 sCycle;
|
||||||
|
@ -91,14 +98,16 @@ struct fdivPipe {
|
||||||
u32 statusflag;
|
u32 statusflag;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct efuPipe {
|
struct efuPipe
|
||||||
|
{
|
||||||
int enable;
|
int enable;
|
||||||
REG_VI reg;
|
REG_VI reg;
|
||||||
u32 sCycle;
|
u32 sCycle;
|
||||||
u32 Cycle;
|
u32 Cycle;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct fmacPipe {
|
struct fmacPipe
|
||||||
|
{
|
||||||
int enable;
|
int enable;
|
||||||
int reg;
|
int reg;
|
||||||
int xyzw;
|
int xyzw;
|
||||||
|
@ -109,14 +118,16 @@ struct fmacPipe {
|
||||||
u32 clipflag;
|
u32 clipflag;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ialuPipe {
|
struct ialuPipe
|
||||||
|
{
|
||||||
int enable;
|
int enable;
|
||||||
int reg;
|
int reg;
|
||||||
u32 sCycle;
|
u32 sCycle;
|
||||||
u32 Cycle;
|
u32 Cycle;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct __aligned16 VURegs {
|
struct __aligned16 VURegs
|
||||||
|
{
|
||||||
VECTOR VF[32]; // VF and VI need to be first in this struct for proper mapping
|
VECTOR VF[32]; // VF and VI need to be first in this struct for proper mapping
|
||||||
REG_VI VI[32]; // needs to be 128bit x 32 (cottonvibes)
|
REG_VI VI[32]; // needs to be 128bit x 32 (cottonvibes)
|
||||||
|
|
||||||
|
@ -209,4 +220,3 @@ inline bool VURegs::IsVU1() const { return this == &vuRegs[1]; }
|
||||||
inline bool VURegs::IsVU0() const { return this == &vuRegs[0]; }
|
inline bool VURegs::IsVU0() const { return this == &vuRegs[0]; }
|
||||||
|
|
||||||
extern u32* GET_VU_MEM(VURegs* VU, u32 addr);
|
extern u32* GET_VU_MEM(VURegs* VU, u32 addr);
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,8 @@
|
||||||
#include "MTVU.h"
|
#include "MTVU.h"
|
||||||
|
|
||||||
// Executes a Block based on EE delta time
|
// Executes a Block based on EE delta time
|
||||||
void BaseVUmicroCPU::ExecuteBlock(bool startUp) {
|
void BaseVUmicroCPU::ExecuteBlock(bool startUp)
|
||||||
|
{
|
||||||
const u32& stat = VU0.VI[REG_VPU_STAT].UL;
|
const u32& stat = VU0.VI[REG_VPU_STAT].UL;
|
||||||
const int test = m_Idx ? 0x100 : 1;
|
const int test = m_Idx ? 0x100 : 1;
|
||||||
const int s = EmuConfig.Gamefixes.VUKickstartHack ? 16 : 0; // Kick Start Cycles (Jak needs at least 4 due to writing values after they're read
|
const int s = EmuConfig.Gamefixes.VUKickstartHack ? 16 : 0; // Kick Start Cycles (Jak needs at least 4 due to writing values after they're read
|
||||||
|
@ -30,12 +31,15 @@ void BaseVUmicroCPU::ExecuteBlock(bool startUp) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!(stat & test)) return;
|
if (!(stat & test))
|
||||||
|
return;
|
||||||
|
|
||||||
if (startUp && s) { // Start Executing a microprogram
|
if (startUp && s)
|
||||||
|
{ // Start Executing a microprogram
|
||||||
Execute(s); // Kick start VU
|
Execute(s); // Kick start VU
|
||||||
}
|
}
|
||||||
else { // Continue Executing
|
else
|
||||||
|
{ // Continue Executing
|
||||||
u32 cycle = m_Idx ? VU1.cycle : VU0.cycle;
|
u32 cycle = m_Idx ? VU1.cycle : VU0.cycle;
|
||||||
s32 delta = (s32)(u32)(cpuRegs.cycle - cycle);
|
s32 delta = (s32)(u32)(cpuRegs.cycle - cycle);
|
||||||
s32 nextblockcycles = m_Idx ? VU1.nextBlockCycles : VU0.nextBlockCycles;
|
s32 nextblockcycles = m_Idx ? VU1.nextBlockCycles : VU0.nextBlockCycles;
|
||||||
|
@ -52,14 +56,17 @@ void BaseVUmicroCPU::ExecuteBlock(bool startUp) {
|
||||||
// EE data to VU0's registers. We want to run VU0 Micro right after this
|
// EE data to VU0's registers. We want to run VU0 Micro right after this
|
||||||
// to ensure that the register is used at the correct time.
|
// to ensure that the register is used at the correct time.
|
||||||
// This fixes spinning/hanging in some games like Ratchet and Clank's Intro.
|
// This fixes spinning/hanging in some games like Ratchet and Clank's Intro.
|
||||||
void BaseVUmicroCPU::ExecuteBlockJIT(BaseVUmicroCPU* cpu) {
|
void BaseVUmicroCPU::ExecuteBlockJIT(BaseVUmicroCPU* cpu)
|
||||||
|
{
|
||||||
const u32& stat = VU0.VI[REG_VPU_STAT].UL;
|
const u32& stat = VU0.VI[REG_VPU_STAT].UL;
|
||||||
const int test = 1;
|
const int test = 1;
|
||||||
|
|
||||||
if (stat & test) { // VU is running
|
if (stat & test)
|
||||||
|
{ // VU is running
|
||||||
s32 delta = (s32)(u32)(cpuRegs.cycle - VU0.cycle);
|
s32 delta = (s32)(u32)(cpuRegs.cycle - VU0.cycle);
|
||||||
|
|
||||||
if (delta > 0) { // Enough time has passed
|
if (delta > 0)
|
||||||
|
{ // Enough time has passed
|
||||||
cpu->Execute(delta); // Execute the time since the last call
|
cpu->Execute(delta); // Execute the time since the last call
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,13 +26,15 @@
|
||||||
static u8 __pagealigned vu0_RecDispatchers[mVUdispCacheSize];
|
static u8 __pagealigned vu0_RecDispatchers[mVUdispCacheSize];
|
||||||
static u8 __pagealigned vu1_RecDispatchers[mVUdispCacheSize];
|
static u8 __pagealigned vu1_RecDispatchers[mVUdispCacheSize];
|
||||||
|
|
||||||
static __fi void mVUthrowHardwareDeficiency(const wxChar* extFail, int vuIndex) {
|
static __fi void mVUthrowHardwareDeficiency(const wxChar* extFail, int vuIndex)
|
||||||
|
{
|
||||||
throw Exception::HardwareDeficiency()
|
throw Exception::HardwareDeficiency()
|
||||||
.SetDiagMsg(pxsFmt(L"microVU%d recompiler init failed: %s is not available.", vuIndex, extFail))
|
.SetDiagMsg(pxsFmt(L"microVU%d recompiler init failed: %s is not available.", vuIndex, extFail))
|
||||||
.SetUserMsg(pxsFmt(_("%s Extensions not found. microVU requires a host CPU with SSE2 extensions."), extFail));
|
.SetUserMsg(pxsFmt(_("%s Extensions not found. microVU requires a host CPU with SSE2 extensions."), extFail));
|
||||||
}
|
}
|
||||||
|
|
||||||
void mVUreserveCache(microVU& mVU) {
|
void mVUreserveCache(microVU& mVU)
|
||||||
|
{
|
||||||
|
|
||||||
mVU.cache_reserve = new RecompiledCodeReserve(pxsFmt("Micro VU%u Recompiler Cache", mVU.index), _16mb);
|
mVU.cache_reserve = new RecompiledCodeReserve(pxsFmt("Micro VU%u Recompiler Cache", mVU.index), _16mb);
|
||||||
mVU.cache_reserve->SetProfilerName(pxsFmt("mVU%urec", mVU.index));
|
mVU.cache_reserve->SetProfilerName(pxsFmt("mVU%urec", mVU.index));
|
||||||
|
@ -45,9 +47,11 @@ void mVUreserveCache(microVU& mVU) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only run this once per VU! ;)
|
// Only run this once per VU! ;)
|
||||||
void mVUinit(microVU& mVU, uint vuIndex) {
|
void mVUinit(microVU& mVU, uint vuIndex)
|
||||||
|
{
|
||||||
|
|
||||||
if(!x86caps.hasStreamingSIMD4Extensions) mVUthrowHardwareDeficiency( L"SSE4", vuIndex );
|
if (!x86caps.hasStreamingSIMD4Extensions)
|
||||||
|
mVUthrowHardwareDeficiency(L"SSE4", vuIndex);
|
||||||
|
|
||||||
memzero(mVU.prog);
|
memzero(mVU.prog);
|
||||||
|
|
||||||
|
@ -65,14 +69,17 @@ void mVUinit(microVU& mVU, uint vuIndex) {
|
||||||
|
|
||||||
mVUreserveCache(mVU);
|
mVUreserveCache(mVU);
|
||||||
|
|
||||||
if (vuIndex) mVU.dispCache = vu1_RecDispatchers;
|
if (vuIndex)
|
||||||
else mVU.dispCache = vu0_RecDispatchers;
|
mVU.dispCache = vu1_RecDispatchers;
|
||||||
|
else
|
||||||
|
mVU.dispCache = vu0_RecDispatchers;
|
||||||
|
|
||||||
mVU.regAlloc.reset(new microRegAlloc(mVU.index));
|
mVU.regAlloc.reset(new microRegAlloc(mVU.index));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resets Rec Data
|
// Resets Rec Data
|
||||||
void mVUreset(microVU& mVU, bool resetReserve) {
|
void mVUreset(microVU& mVU, bool resetReserve)
|
||||||
|
{
|
||||||
|
|
||||||
if (THREAD_VU1)
|
if (THREAD_VU1)
|
||||||
{
|
{
|
||||||
|
@ -85,7 +92,8 @@ void mVUreset(microVU& mVU, bool resetReserve) {
|
||||||
VU0.VI[REG_VPU_STAT].UL &= ~0x100;
|
VU0.VI[REG_VPU_STAT].UL &= ~0x100;
|
||||||
}
|
}
|
||||||
// Restore reserve to uncommitted state
|
// Restore reserve to uncommitted state
|
||||||
if (resetReserve) mVU.cache_reserve->Reset();
|
if (resetReserve)
|
||||||
|
mVU.cache_reserve->Reset();
|
||||||
|
|
||||||
HostSys::MemProtect(mVU.dispCache, mVUdispCacheSize, PageAccess_ReadWrite());
|
HostSys::MemProtect(mVU.dispCache, mVUdispCacheSize, PageAccess_ReadWrite());
|
||||||
memset(mVU.dispCache, 0xcc, mVUdispCacheSize);
|
memset(mVU.dispCache, 0xcc, mVUdispCacheSize);
|
||||||
|
@ -115,13 +123,16 @@ void mVUreset(microVU& mVU, bool resetReserve) {
|
||||||
mVU.prog.x86end = z + ((mVU.cacheSize - mVUcacheSafeZone) * _1mb);
|
mVU.prog.x86end = z + ((mVU.cacheSize - mVUcacheSafeZone) * _1mb);
|
||||||
//memset(mVU.prog.x86start, 0xcc, mVU.cacheSize*_1mb);
|
//memset(mVU.prog.x86start, 0xcc, mVU.cacheSize*_1mb);
|
||||||
|
|
||||||
for(u32 i = 0; i < (mVU.progSize / 2); i++) {
|
for (u32 i = 0; i < (mVU.progSize / 2); i++)
|
||||||
if(!mVU.prog.prog[i]) {
|
{
|
||||||
|
if (!mVU.prog.prog[i])
|
||||||
|
{
|
||||||
mVU.prog.prog[i] = new std::deque<microProgram*>();
|
mVU.prog.prog[i] = new std::deque<microProgram*>();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
std::deque<microProgram*>::iterator it(mVU.prog.prog[i]->begin());
|
std::deque<microProgram*>::iterator it(mVU.prog.prog[i]->begin());
|
||||||
for ( ; it != mVU.prog.prog[i]->end(); ++it) {
|
for (; it != mVU.prog.prog[i]->end(); ++it)
|
||||||
|
{
|
||||||
mVUdeleteProg(mVU, it[0]);
|
mVUdeleteProg(mVU, it[0]);
|
||||||
}
|
}
|
||||||
mVU.prog.prog[i]->clear();
|
mVU.prog.prog[i]->clear();
|
||||||
|
@ -131,20 +142,26 @@ void mVUreset(microVU& mVU, bool resetReserve) {
|
||||||
|
|
||||||
HostSys::MemProtect(mVU.dispCache, mVUdispCacheSize, PageAccess_ExecOnly());
|
HostSys::MemProtect(mVU.dispCache, mVUdispCacheSize, PageAccess_ExecOnly());
|
||||||
|
|
||||||
if (mVU.index) Perf::any.map((uptr)&mVU.dispCache, mVUdispCacheSize, "mVU1 Dispatcher");
|
if (mVU.index)
|
||||||
else Perf::any.map((uptr)&mVU.dispCache, mVUdispCacheSize, "mVU0 Dispatcher");
|
Perf::any.map((uptr)&mVU.dispCache, mVUdispCacheSize, "mVU1 Dispatcher");
|
||||||
|
else
|
||||||
|
Perf::any.map((uptr)&mVU.dispCache, mVUdispCacheSize, "mVU0 Dispatcher");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Free Allocated Resources
|
// Free Allocated Resources
|
||||||
void mVUclose(microVU& mVU) {
|
void mVUclose(microVU& mVU)
|
||||||
|
{
|
||||||
|
|
||||||
safe_delete(mVU.cache_reserve);
|
safe_delete(mVU.cache_reserve);
|
||||||
|
|
||||||
// Delete Programs and Block Managers
|
// Delete Programs and Block Managers
|
||||||
for (u32 i = 0; i < (mVU.progSize / 2); i++) {
|
for (u32 i = 0; i < (mVU.progSize / 2); i++)
|
||||||
if (!mVU.prog.prog[i]) continue;
|
{
|
||||||
|
if (!mVU.prog.prog[i])
|
||||||
|
continue;
|
||||||
std::deque<microProgram*>::iterator it(mVU.prog.prog[i]->begin());
|
std::deque<microProgram*>::iterator it(mVU.prog.prog[i]->begin());
|
||||||
for ( ; it != mVU.prog.prog[i]->end(); ++it) {
|
for (; it != mVU.prog.prog[i]->end(); ++it)
|
||||||
|
{
|
||||||
mVUdeleteProg(mVU, it[0]);
|
mVUdeleteProg(mVU, it[0]);
|
||||||
}
|
}
|
||||||
safe_delete(mVU.prog.prog[i]);
|
safe_delete(mVU.prog.prog[i]);
|
||||||
|
@ -152,11 +169,14 @@ void mVUclose(microVU& mVU) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clears Block Data in specified range
|
// Clears Block Data in specified range
|
||||||
__fi void mVUclear(mV, u32 addr, u32 size) {
|
__fi void mVUclear(mV, u32 addr, u32 size)
|
||||||
if(!mVU.prog.cleared) {
|
{
|
||||||
|
if (!mVU.prog.cleared)
|
||||||
|
{
|
||||||
mVU.prog.cleared = 1; // Next execution searches/creates a new microprogram
|
mVU.prog.cleared = 1; // Next execution searches/creates a new microprogram
|
||||||
memzero(mVU.prog.lpState); // Clear pipeline state
|
memzero(mVU.prog.lpState); // Clear pipeline state
|
||||||
for(u32 i = 0; i < (mVU.progSize / 2); i++) {
|
for (u32 i = 0; i < (mVU.progSize / 2); i++)
|
||||||
|
{
|
||||||
mVU.prog.quick[i].block = NULL; // Clear current quick-reference block
|
mVU.prog.quick[i].block = NULL; // Clear current quick-reference block
|
||||||
mVU.prog.quick[i].prog = NULL; // Clear current quick-reference prog
|
mVU.prog.quick[i].prog = NULL; // Clear current quick-reference prog
|
||||||
}
|
}
|
||||||
|
@ -168,13 +188,16 @@ __fi void mVUclear(mV, u32 addr, u32 size) {
|
||||||
//------------------------------------------------------------------
|
//------------------------------------------------------------------
|
||||||
|
|
||||||
// Finds and Ages/Kills Programs if they haven't been used in a while.
|
// Finds and Ages/Kills Programs if they haven't been used in a while.
|
||||||
__ri void mVUvsyncUpdate(mV) {
|
__ri void mVUvsyncUpdate(mV)
|
||||||
|
{
|
||||||
//mVU.prog.curFrame++;
|
//mVU.prog.curFrame++;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Deletes a program
|
// Deletes a program
|
||||||
__ri void mVUdeleteProg(microVU& mVU, microProgram*& prog) {
|
__ri void mVUdeleteProg(microVU& mVU, microProgram*& prog)
|
||||||
for (u32 i = 0; i < (mVU.progSize / 2); i++) {
|
{
|
||||||
|
for (u32 i = 0; i < (mVU.progSize / 2); i++)
|
||||||
|
{
|
||||||
safe_delete(prog->block[i]);
|
safe_delete(prog->block[i]);
|
||||||
}
|
}
|
||||||
safe_delete(prog->ranges);
|
safe_delete(prog->ranges);
|
||||||
|
@ -182,7 +205,8 @@ __ri void mVUdeleteProg(microVU& mVU, microProgram*& prog) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates a new Micro Program
|
// Creates a new Micro Program
|
||||||
__ri microProgram* mVUcreateProg(microVU& mVU, int startPC) {
|
__ri microProgram* mVUcreateProg(microVU& mVU, int startPC)
|
||||||
|
{
|
||||||
microProgram* prog = (microProgram*)_aligned_malloc(sizeof(microProgram), 64);
|
microProgram* prog = (microProgram*)_aligned_malloc(sizeof(microProgram), 64);
|
||||||
memset(prog, 0, sizeof(microProgram));
|
memset(prog, 0, sizeof(microProgram));
|
||||||
prog->idx = mVU.prog.total++;
|
prog->idx = mVU.prog.total++;
|
||||||
|
@ -199,23 +223,33 @@ __ri microProgram* mVUcreateProg(microVU& mVU, int startPC) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Caches Micro Program
|
// Caches Micro Program
|
||||||
__ri void mVUcacheProg(microVU& mVU, microProgram& prog) {
|
__ri void mVUcacheProg(microVU& mVU, microProgram& prog)
|
||||||
if (!mVU.index) memcpy(prog.data, mVU.regs().Micro, 0x1000);
|
{
|
||||||
else memcpy(prog.data, mVU.regs().Micro, 0x4000);
|
if (!mVU.index)
|
||||||
|
memcpy(prog.data, mVU.regs().Micro, 0x1000);
|
||||||
|
else
|
||||||
|
memcpy(prog.data, mVU.regs().Micro, 0x4000);
|
||||||
mVUdumpProg(mVU, prog);
|
mVUdumpProg(mVU, prog);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate Hash for partial program based on compiled ranges...
|
// Generate Hash for partial program based on compiled ranges...
|
||||||
u64 mVUrangesHash(microVU& mVU, microProgram& prog) {
|
u64 mVUrangesHash(microVU& mVU, microProgram& prog)
|
||||||
union {
|
{
|
||||||
|
union
|
||||||
|
{
|
||||||
u64 v64;
|
u64 v64;
|
||||||
u32 v32[2];
|
u32 v32[2];
|
||||||
} hash = {0};
|
} hash = {0};
|
||||||
|
|
||||||
std::deque<microRange>::const_iterator it(prog.ranges->begin());
|
std::deque<microRange>::const_iterator it(prog.ranges->begin());
|
||||||
for ( ; it != prog.ranges->end(); ++it) {
|
for (; it != prog.ranges->end(); ++it)
|
||||||
if((it[0].start<0)||(it[0].end<0)) { DevCon.Error("microVU%d: Negative Range![%d][%d]", mVU.index, it[0].start, it[0].end); }
|
{
|
||||||
for(int i = it[0].start/4; i < it[0].end/4; i++) {
|
if ((it[0].start < 0) || (it[0].end < 0))
|
||||||
|
{
|
||||||
|
DevCon.Error("microVU%d: Negative Range![%d][%d]", mVU.index, it[0].start, it[0].end);
|
||||||
|
}
|
||||||
|
for (int i = it[0].start / 4; i < it[0].end / 4; i++)
|
||||||
|
{
|
||||||
hash.v32[0] -= prog.data[i];
|
hash.v32[0] -= prog.data[i];
|
||||||
hash.v32[1] ^= prog.data[i];
|
hash.v32[1] ^= prog.data[i];
|
||||||
}
|
}
|
||||||
|
@ -224,25 +258,31 @@ u64 mVUrangesHash(microVU& mVU, microProgram& prog) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prints the ratio of unique programs to total programs
|
// Prints the ratio of unique programs to total programs
|
||||||
void mVUprintUniqueRatio(microVU& mVU) {
|
void mVUprintUniqueRatio(microVU& mVU)
|
||||||
|
{
|
||||||
std::vector<u64> v;
|
std::vector<u64> v;
|
||||||
for(u32 pc = 0; pc < mProgSize/2; pc++) {
|
for (u32 pc = 0; pc < mProgSize / 2; pc++)
|
||||||
|
{
|
||||||
microProgramList* list = mVU.prog.prog[pc];
|
microProgramList* list = mVU.prog.prog[pc];
|
||||||
if (!list) continue;
|
if (!list)
|
||||||
|
continue;
|
||||||
std::deque<microProgram*>::iterator it(list->begin());
|
std::deque<microProgram*>::iterator it(list->begin());
|
||||||
for ( ; it != list->end(); ++it) {
|
for (; it != list->end(); ++it)
|
||||||
|
{
|
||||||
v.push_back(mVUrangesHash(mVU, *it[0]));
|
v.push_back(mVUrangesHash(mVU, *it[0]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
u32 total = v.size();
|
u32 total = v.size();
|
||||||
sortVector(v);
|
sortVector(v);
|
||||||
makeUnique(v);
|
makeUnique(v);
|
||||||
if (!total) return;
|
if (!total)
|
||||||
|
return;
|
||||||
DevCon.WriteLn("%d / %d [%3.1f%%]", v.size(), total, 100. - (double)v.size() / (double)total * 100.);
|
DevCon.WriteLn("%d / %d [%3.1f%%]", v.size(), total, 100. - (double)v.size() / (double)total * 100.);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compare Cached microProgram to mVU.regs().Micro
|
// Compare Cached microProgram to mVU.regs().Micro
|
||||||
__fi bool mVUcmpProg(microVU& mVU, microProgram& prog, const bool cmpWholeProg) {
|
__fi bool mVUcmpProg(microVU& mVU, microProgram& prog, const bool cmpWholeProg)
|
||||||
|
{
|
||||||
if (cmpWholeProg)
|
if (cmpWholeProg)
|
||||||
{
|
{
|
||||||
if (memcmp_mmx((u8*)prog.data, mVU.regs().Micro, mVU.microMemSize))
|
if (memcmp_mmx((u8*)prog.data, mVU.regs().Micro, mVU.microMemSize))
|
||||||
|
@ -250,10 +290,15 @@ __fi bool mVUcmpProg(microVU& mVU, microProgram& prog, const bool cmpWholeProg)
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
for (const auto& range : *prog.ranges) {
|
for (const auto& range : *prog.ranges)
|
||||||
|
{
|
||||||
auto cmpOffset = [&](void* x) { return (u8*)x + range.start; };
|
auto cmpOffset = [&](void* x) { return (u8*)x + range.start; };
|
||||||
if ((range.start < 0) || (range.end < 0)) { DevCon.Error("microVU%d: Negative Range![%d][%d]", mVU.index, range.start, range.end); }
|
if ((range.start < 0) || (range.end < 0))
|
||||||
if (memcmp_mmx(cmpOffset(prog.data), cmpOffset(mVU.regs().Micro), (range.end - range.start))) {
|
{
|
||||||
|
DevCon.Error("microVU%d: Negative Range![%d][%d]", mVU.index, range.start, range.end);
|
||||||
|
}
|
||||||
|
if (memcmp_mmx(cmpOffset(prog.data), cmpOffset(mVU.regs().Micro), (range.end - range.start)))
|
||||||
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -265,17 +310,21 @@ __fi bool mVUcmpProg(microVU& mVU, microProgram& prog, const bool cmpWholeProg)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Searches for Cached Micro Program and sets prog.cur to it (returns entry-point to program)
|
// Searches for Cached Micro Program and sets prog.cur to it (returns entry-point to program)
|
||||||
_mVUt __fi void* mVUsearchProg(u32 startPC, uptr pState) {
|
_mVUt __fi void* mVUsearchProg(u32 startPC, uptr pState)
|
||||||
|
{
|
||||||
microVU& mVU = mVUx;
|
microVU& mVU = mVUx;
|
||||||
microProgramQuick& quick = mVU.prog.quick[mVU.regs().start_pc / 8];
|
microProgramQuick& quick = mVU.prog.quick[mVU.regs().start_pc / 8];
|
||||||
microProgramList* list = mVU.prog.prog[mVU.regs().start_pc / 8];
|
microProgramList* list = mVU.prog.prog[mVU.regs().start_pc / 8];
|
||||||
|
|
||||||
if(!quick.prog) { // If null, we need to search for new program
|
if (!quick.prog)
|
||||||
|
{ // If null, we need to search for new program
|
||||||
std::deque<microProgram*>::iterator it(list->begin());
|
std::deque<microProgram*>::iterator it(list->begin());
|
||||||
for ( ; it != list->end(); ++it) {
|
for (; it != list->end(); ++it)
|
||||||
|
{
|
||||||
bool b = mVUcmpProg(mVU, *it[0], 0);
|
bool b = mVUcmpProg(mVU, *it[0], 0);
|
||||||
|
|
||||||
if (b) {
|
if (b)
|
||||||
|
{
|
||||||
quick.block = it[0]->block[startPC / 8];
|
quick.block = it[0]->block[startPC / 8];
|
||||||
quick.prog = it[0];
|
quick.prog = it[0];
|
||||||
list->erase(it);
|
list->erase(it);
|
||||||
|
@ -322,39 +371,57 @@ _mVUt __fi void* mVUsearchProg(u32 startPC, uptr pState) {
|
||||||
//------------------------------------------------------------------
|
//------------------------------------------------------------------
|
||||||
// recMicroVU0 / recMicroVU1
|
// recMicroVU0 / recMicroVU1
|
||||||
//------------------------------------------------------------------
|
//------------------------------------------------------------------
|
||||||
recMicroVU0::recMicroVU0() { m_Idx = 0; IsInterpreter = false; }
|
recMicroVU0::recMicroVU0()
|
||||||
recMicroVU1::recMicroVU1() { m_Idx = 1; IsInterpreter = false; }
|
{
|
||||||
|
m_Idx = 0;
|
||||||
|
IsInterpreter = false;
|
||||||
|
}
|
||||||
|
recMicroVU1::recMicroVU1()
|
||||||
|
{
|
||||||
|
m_Idx = 1;
|
||||||
|
IsInterpreter = false;
|
||||||
|
}
|
||||||
void recMicroVU0::Vsync() noexcept { mVUvsyncUpdate(microVU0); }
|
void recMicroVU0::Vsync() noexcept { mVUvsyncUpdate(microVU0); }
|
||||||
void recMicroVU1::Vsync() noexcept { mVUvsyncUpdate(microVU1); }
|
void recMicroVU1::Vsync() noexcept { mVUvsyncUpdate(microVU1); }
|
||||||
|
|
||||||
void recMicroVU0::Reserve() {
|
void recMicroVU0::Reserve()
|
||||||
|
{
|
||||||
if (m_Reserved.exchange(1) == 0)
|
if (m_Reserved.exchange(1) == 0)
|
||||||
mVUinit(microVU0, 0);
|
mVUinit(microVU0, 0);
|
||||||
}
|
}
|
||||||
void recMicroVU1::Reserve() {
|
void recMicroVU1::Reserve()
|
||||||
if (m_Reserved.exchange(1) == 0) {
|
{
|
||||||
|
if (m_Reserved.exchange(1) == 0)
|
||||||
|
{
|
||||||
mVUinit(microVU1, 1);
|
mVUinit(microVU1, 1);
|
||||||
vu1Thread.Start();
|
vu1Thread.Start();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void recMicroVU0::Shutdown() noexcept {
|
void recMicroVU0::Shutdown() noexcept
|
||||||
|
{
|
||||||
if (m_Reserved.exchange(0) == 1)
|
if (m_Reserved.exchange(0) == 1)
|
||||||
mVUclose(microVU0);
|
mVUclose(microVU0);
|
||||||
}
|
}
|
||||||
void recMicroVU1::Shutdown() noexcept {
|
void recMicroVU1::Shutdown() noexcept
|
||||||
if (m_Reserved.exchange(0) == 1) {
|
{
|
||||||
|
if (m_Reserved.exchange(0) == 1)
|
||||||
|
{
|
||||||
vu1Thread.WaitVU();
|
vu1Thread.WaitVU();
|
||||||
mVUclose(microVU1);
|
mVUclose(microVU1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void recMicroVU0::Reset() {
|
void recMicroVU0::Reset()
|
||||||
if(!pxAssertDev(m_Reserved, "MicroVU0 CPU Provider has not been reserved prior to reset!")) return;
|
{
|
||||||
|
if (!pxAssertDev(m_Reserved, "MicroVU0 CPU Provider has not been reserved prior to reset!"))
|
||||||
|
return;
|
||||||
mVUreset(microVU0, true);
|
mVUreset(microVU0, true);
|
||||||
}
|
}
|
||||||
void recMicroVU1::Reset() {
|
void recMicroVU1::Reset()
|
||||||
if(!pxAssertDev(m_Reserved, "MicroVU1 CPU Provider has not been reserved prior to reset!")) return;
|
{
|
||||||
|
if (!pxAssertDev(m_Reserved, "MicroVU1 CPU Provider has not been reserved prior to reset!"))
|
||||||
|
return;
|
||||||
vu1Thread.WaitVU();
|
vu1Thread.WaitVU();
|
||||||
vu1Thread.Get_MTVUChanges();
|
vu1Thread.Get_MTVUChanges();
|
||||||
mVUreset(microVU1, true);
|
mVUreset(microVU1, true);
|
||||||
|
@ -365,12 +432,14 @@ void recMicroVU0::SetStartPC(u32 startPC)
|
||||||
VU0.start_pc = startPC;
|
VU0.start_pc = startPC;
|
||||||
}
|
}
|
||||||
|
|
||||||
void recMicroVU0::Execute(u32 cycles) {
|
void recMicroVU0::Execute(u32 cycles)
|
||||||
|
{
|
||||||
pxAssert(m_Reserved); // please allocate me first! :|
|
pxAssert(m_Reserved); // please allocate me first! :|
|
||||||
|
|
||||||
VU0.flags &= ~VUFLAG_MFLAGSET;
|
VU0.flags &= ~VUFLAG_MFLAGSET;
|
||||||
|
|
||||||
if(!(VU0.VI[REG_VPU_STAT].UL & 1)) return;
|
if (!(VU0.VI[REG_VPU_STAT].UL & 1))
|
||||||
|
return;
|
||||||
VU0.VI[REG_TPC].UL <<= 3;
|
VU0.VI[REG_TPC].UL <<= 3;
|
||||||
|
|
||||||
// Sometimes games spin on vu0, so be careful with this value
|
// Sometimes games spin on vu0, so be careful with this value
|
||||||
|
@ -390,11 +459,14 @@ void recMicroVU1::SetStartPC(u32 startPC)
|
||||||
VU1.start_pc = startPC;
|
VU1.start_pc = startPC;
|
||||||
}
|
}
|
||||||
|
|
||||||
void recMicroVU1::Execute(u32 cycles) {
|
void recMicroVU1::Execute(u32 cycles)
|
||||||
|
{
|
||||||
pxAssert(m_Reserved); // please allocate me first! :|
|
pxAssert(m_Reserved); // please allocate me first! :|
|
||||||
|
|
||||||
if (!THREAD_VU1) {
|
if (!THREAD_VU1)
|
||||||
if(!(VU0.VI[REG_VPU_STAT].UL & 0x100)) return;
|
{
|
||||||
|
if (!(VU0.VI[REG_VPU_STAT].UL & 0x100))
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
VU1.VI[REG_TPC].UL <<= 3;
|
VU1.VI[REG_TPC].UL <<= 3;
|
||||||
((mVUrecCall)microVU1.startFunct)(VU1.VI[REG_TPC].UL, cycles);
|
((mVUrecCall)microVU1.startFunct)(VU1.VI[REG_TPC].UL, cycles);
|
||||||
|
@ -406,38 +478,46 @@ void recMicroVU1::Execute(u32 cycles) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void recMicroVU0::Clear(u32 addr, u32 size) {
|
void recMicroVU0::Clear(u32 addr, u32 size)
|
||||||
|
{
|
||||||
pxAssert(m_Reserved); // please allocate me first! :|
|
pxAssert(m_Reserved); // please allocate me first! :|
|
||||||
mVUclear(microVU0, addr, size);
|
mVUclear(microVU0, addr, size);
|
||||||
}
|
}
|
||||||
void recMicroVU1::Clear(u32 addr, u32 size) {
|
void recMicroVU1::Clear(u32 addr, u32 size)
|
||||||
|
{
|
||||||
pxAssert(m_Reserved); // please allocate me first! :|
|
pxAssert(m_Reserved); // please allocate me first! :|
|
||||||
mVUclear(microVU1, addr, size);
|
mVUclear(microVU1, addr, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint recMicroVU0::GetCacheReserve() const {
|
uint recMicroVU0::GetCacheReserve() const
|
||||||
|
{
|
||||||
return microVU0.cacheSize;
|
return microVU0.cacheSize;
|
||||||
}
|
}
|
||||||
uint recMicroVU1::GetCacheReserve() const {
|
uint recMicroVU1::GetCacheReserve() const
|
||||||
|
{
|
||||||
return microVU1.cacheSize;
|
return microVU1.cacheSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
void recMicroVU0::SetCacheReserve(uint reserveInMegs) const {
|
void recMicroVU0::SetCacheReserve(uint reserveInMegs) const
|
||||||
|
{
|
||||||
DevCon.WriteLn("microVU0: Changing cache size [%dmb]", reserveInMegs);
|
DevCon.WriteLn("microVU0: Changing cache size [%dmb]", reserveInMegs);
|
||||||
microVU0.cacheSize = std::min(reserveInMegs, mVU0cacheReserve);
|
microVU0.cacheSize = std::min(reserveInMegs, mVU0cacheReserve);
|
||||||
safe_delete(microVU0.cache_reserve); // I assume this unmaps the memory
|
safe_delete(microVU0.cache_reserve); // I assume this unmaps the memory
|
||||||
mVUreserveCache(microVU0); // Need rec-reset after this
|
mVUreserveCache(microVU0); // Need rec-reset after this
|
||||||
}
|
}
|
||||||
void recMicroVU1::SetCacheReserve(uint reserveInMegs) const {
|
void recMicroVU1::SetCacheReserve(uint reserveInMegs) const
|
||||||
|
{
|
||||||
DevCon.WriteLn("microVU1: Changing cache size [%dmb]", reserveInMegs);
|
DevCon.WriteLn("microVU1: Changing cache size [%dmb]", reserveInMegs);
|
||||||
microVU1.cacheSize = std::min(reserveInMegs, mVU1cacheReserve);
|
microVU1.cacheSize = std::min(reserveInMegs, mVU1cacheReserve);
|
||||||
safe_delete(microVU1.cache_reserve); // I assume this unmaps the memory
|
safe_delete(microVU1.cache_reserve); // I assume this unmaps the memory
|
||||||
mVUreserveCache(microVU1); // Need rec-reset after this
|
mVUreserveCache(microVU1); // Need rec-reset after this
|
||||||
}
|
}
|
||||||
|
|
||||||
void recMicroVU1::ResumeXGkick() {
|
void recMicroVU1::ResumeXGkick()
|
||||||
|
{
|
||||||
pxAssert(m_Reserved); // please allocate me first! :|
|
pxAssert(m_Reserved); // please allocate me first! :|
|
||||||
|
|
||||||
if(!(VU0.VI[REG_VPU_STAT].UL & 0x100)) return;
|
if (!(VU0.VI[REG_VPU_STAT].UL & 0x100))
|
||||||
|
return;
|
||||||
((mVUrecCallXG)microVU1.startFunctXG)();
|
((mVUrecCallXG)microVU1.startFunctXG)();
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,16 +18,28 @@
|
||||||
extern void mVUincCycles(microVU& mVU, int x);
|
extern void mVUincCycles(microVU& mVU, int x);
|
||||||
extern void* mVUcompile(microVU& mVU, u32 startPC, uptr pState);
|
extern void* mVUcompile(microVU& mVU, u32 startPC, uptr pState);
|
||||||
extern void* mVUcompileSingleInstruction(microVU& mVU, u32 startPC, uptr pState, microFlagCycles& mFC);
|
extern void* mVUcompileSingleInstruction(microVU& mVU, u32 startPC, uptr pState, microFlagCycles& mFC);
|
||||||
__fi int getLastFlagInst(microRegInfo& pState, int* xFlag, int flagType, int isEbit) {
|
__fi int getLastFlagInst(microRegInfo& pState, int* xFlag, int flagType, int isEbit)
|
||||||
if (isEbit) return findFlagInst(xFlag, 0x7fffffff);
|
{
|
||||||
if (pState.needExactMatch & (1<<flagType)) return 3;
|
if (isEbit)
|
||||||
|
return findFlagInst(xFlag, 0x7fffffff);
|
||||||
|
if (pState.needExactMatch & (1 << flagType))
|
||||||
|
return 3;
|
||||||
return (((pState.flagInfo >> (2 * flagType + 2)) & 3) - 1) & 3;
|
return (((pState.flagInfo >> (2 * flagType + 2)) & 3) - 1) & 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
void mVU0clearlpStateJIT() { if (!microVU0.prog.cleared) memzero(microVU0.prog.lpState); }
|
void mVU0clearlpStateJIT()
|
||||||
void mVU1clearlpStateJIT() { if (!microVU1.prog.cleared) memzero(microVU1.prog.lpState); }
|
{
|
||||||
|
if (!microVU0.prog.cleared)
|
||||||
|
memzero(microVU0.prog.lpState);
|
||||||
|
}
|
||||||
|
void mVU1clearlpStateJIT()
|
||||||
|
{
|
||||||
|
if (!microVU1.prog.cleared)
|
||||||
|
memzero(microVU1.prog.lpState);
|
||||||
|
}
|
||||||
|
|
||||||
void mVUDTendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
void mVUDTendProgram(mV, microFlagCycles* mFC, int isEbit)
|
||||||
|
{
|
||||||
|
|
||||||
int fStatus = getLastFlagInst(mVUpBlock->pState, mFC->xStatus, 0, isEbit);
|
int fStatus = getLastFlagInst(mVUpBlock->pState, mFC->xStatus, 0, isEbit);
|
||||||
int fMac = getLastFlagInst(mVUpBlock->pState, mFC->xMac, 1, isEbit);
|
int fMac = getLastFlagInst(mVUpBlock->pState, mFC->xMac, 1, isEbit);
|
||||||
|
@ -39,35 +51,47 @@ void mVUDTendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
|
|
||||||
mVU.regAlloc->TDwritebackAll(); //Writing back ok, invalidating early kills the rec, so don't do it :P
|
mVU.regAlloc->TDwritebackAll(); //Writing back ok, invalidating early kills the rec, so don't do it :P
|
||||||
|
|
||||||
if (isEbit) {
|
if (isEbit)
|
||||||
|
{
|
||||||
/*memzero(mVUinfo);
|
/*memzero(mVUinfo);
|
||||||
memzero(mVUregsTemp);*/
|
memzero(mVUregsTemp);*/
|
||||||
mVUincCycles(mVU, 100); // Ensures Valid P/Q instances (And sets all cycle data to 0)
|
mVUincCycles(mVU, 100); // Ensures Valid P/Q instances (And sets all cycle data to 0)
|
||||||
mVUcycles -= 100;
|
mVUcycles -= 100;
|
||||||
qInst = mVU.q;
|
qInst = mVU.q;
|
||||||
pInst = mVU.p;
|
pInst = mVU.p;
|
||||||
if (mVUinfo.doDivFlag) {
|
if (mVUinfo.doDivFlag)
|
||||||
|
{
|
||||||
sFLAG.doFlag = true;
|
sFLAG.doFlag = true;
|
||||||
sFLAG.write = fStatus;
|
sFLAG.write = fStatus;
|
||||||
mVUdivSet(mVU);
|
mVUdivSet(mVU);
|
||||||
}
|
}
|
||||||
//Run any pending XGKick, providing we've got to it.
|
//Run any pending XGKick, providing we've got to it.
|
||||||
if (mVUinfo.doXGKICK && xPC >= mVUinfo.XGKICKPC) {
|
if (mVUinfo.doXGKICK && xPC >= mVUinfo.XGKICKPC)
|
||||||
|
{
|
||||||
mVU_XGKICK_DELAY(mVU);
|
mVU_XGKICK_DELAY(mVU);
|
||||||
}
|
}
|
||||||
if (!isVU1) xFastCall((void*)mVU0clearlpStateJIT);
|
if (!isVU1)
|
||||||
else xFastCall((void*)mVU1clearlpStateJIT);
|
xFastCall((void*)mVU0clearlpStateJIT);
|
||||||
|
else
|
||||||
|
xFastCall((void*)mVU1clearlpStateJIT);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save P/Q Regs
|
// Save P/Q Regs
|
||||||
if (qInst) { xPSHUF.D(xmmPQ, xmmPQ, 0xe1); }
|
if (qInst)
|
||||||
|
{
|
||||||
|
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
||||||
|
}
|
||||||
xMOVSS(ptr32[&mVU.regs().VI[REG_Q].UL], xmmPQ);
|
xMOVSS(ptr32[&mVU.regs().VI[REG_Q].UL], xmmPQ);
|
||||||
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
||||||
xMOVSS(ptr32[&mVU.regs().pending_q], xmmPQ);
|
xMOVSS(ptr32[&mVU.regs().pending_q], xmmPQ);
|
||||||
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
||||||
|
|
||||||
if (isVU1) {
|
if (isVU1)
|
||||||
if (pInst) { xPSHUF.D(xmmPQ, xmmPQ, 0xb4); } // Swap Pending/Active P
|
{
|
||||||
|
if (pInst)
|
||||||
|
{
|
||||||
|
xPSHUF.D(xmmPQ, xmmPQ, 0xb4);
|
||||||
|
} // Swap Pending/Active P
|
||||||
xPSHUF.D(xmmPQ, xmmPQ, 0xC6); // 3 0 1 2
|
xPSHUF.D(xmmPQ, xmmPQ, 0xC6); // 3 0 1 2
|
||||||
xMOVSS(ptr32[&mVU.regs().VI[REG_P].UL], xmmPQ);
|
xMOVSS(ptr32[&mVU.regs().VI[REG_P].UL], xmmPQ);
|
||||||
xPSHUF.D(xmmPQ, xmmPQ, 0x87); // 0 2 1 3
|
xPSHUF.D(xmmPQ, xmmPQ, 0x87); // 0 2 1 3
|
||||||
|
@ -83,7 +107,8 @@ void mVUDTendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
xMOV(ptr32[&mVU.regs().VI[REG_MAC_FLAG].UL], gprT1);
|
xMOV(ptr32[&mVU.regs().VI[REG_MAC_FLAG].UL], gprT1);
|
||||||
xMOV(ptr32[&mVU.regs().VI[REG_CLIP_FLAG].UL], gprT2);
|
xMOV(ptr32[&mVU.regs().VI[REG_CLIP_FLAG].UL], gprT2);
|
||||||
|
|
||||||
if (!isEbit) { // Backup flag instances
|
if (!isEbit)
|
||||||
|
{ // Backup flag instances
|
||||||
xMOVAPS(xmmT1, ptr128[mVU.macFlag]);
|
xMOVAPS(xmmT1, ptr128[mVU.macFlag]);
|
||||||
xMOVAPS(ptr128[&mVU.regs().micro_macflags], xmmT1);
|
xMOVAPS(ptr128[&mVU.regs().micro_macflags], xmmT1);
|
||||||
xMOVAPS(xmmT1, ptr128[mVU.clipFlag]);
|
xMOVAPS(xmmT1, ptr128[mVU.clipFlag]);
|
||||||
|
@ -93,7 +118,9 @@ void mVUDTendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
xMOV(ptr32[&mVU.regs().micro_statusflags[1]], gprF1);
|
xMOV(ptr32[&mVU.regs().micro_statusflags[1]], gprF1);
|
||||||
xMOV(ptr32[&mVU.regs().micro_statusflags[2]], gprF2);
|
xMOV(ptr32[&mVU.regs().micro_statusflags[2]], gprF2);
|
||||||
xMOV(ptr32[&mVU.regs().micro_statusflags[3]], gprF3);
|
xMOV(ptr32[&mVU.regs().micro_statusflags[3]], gprF3);
|
||||||
} else { // Flush flag instances
|
}
|
||||||
|
else
|
||||||
|
{ // Flush flag instances
|
||||||
xMOVDZX(xmmT1, ptr32[&mVU.regs().VI[REG_CLIP_FLAG].UL]);
|
xMOVDZX(xmmT1, ptr32[&mVU.regs().VI[REG_CLIP_FLAG].UL]);
|
||||||
xSHUF.PS(xmmT1, xmmT1, 0);
|
xSHUF.PS(xmmT1, xmmT1, 0);
|
||||||
xMOVAPS(ptr128[&mVU.regs().micro_clipflags], xmmT1);
|
xMOVAPS(ptr128[&mVU.regs().micro_clipflags], xmmT1);
|
||||||
|
@ -107,9 +134,11 @@ void mVUDTendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
xMOVAPS(ptr128[&mVU.regs().micro_statusflags], xmmT1);
|
xMOVAPS(ptr128[&mVU.regs().micro_statusflags], xmmT1);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isEbit) { // Clear 'is busy' Flags
|
if (isEbit)
|
||||||
|
{ // Clear 'is busy' Flags
|
||||||
xMOV(ptr32[&mVU.regs().nextBlockCycles], 0);
|
xMOV(ptr32[&mVU.regs().nextBlockCycles], 0);
|
||||||
if (!mVU.index || !THREAD_VU1) {
|
if (!mVU.index || !THREAD_VU1)
|
||||||
|
{
|
||||||
xAND(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? ~0x100 : ~0x001)); // VBS0/VBS1 flag
|
xAND(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? ~0x100 : ~0x001)); // VBS0/VBS1 flag
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
@ -118,14 +147,16 @@ void mVUDTendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
else
|
else
|
||||||
xMOV(ptr32[&mVU.regs().nextBlockCycles], mVUcycles);
|
xMOV(ptr32[&mVU.regs().nextBlockCycles], mVUcycles);
|
||||||
|
|
||||||
if (isEbit != 2) { // Save PC, and Jump to Exit Point
|
if (isEbit != 2)
|
||||||
|
{ // Save PC, and Jump to Exit Point
|
||||||
xMOV(ptr32[&mVU.regs().VI[REG_TPC].UL], xPC);
|
xMOV(ptr32[&mVU.regs().VI[REG_TPC].UL], xPC);
|
||||||
xJMP(mVU.exitFunct);
|
xJMP(mVU.exitFunct);
|
||||||
}
|
}
|
||||||
memcpy(&mVUregs, &stateBackup, sizeof(mVUregs)); //Restore the state for the rest of the recompile
|
memcpy(&mVUregs, &stateBackup, sizeof(mVUregs)); //Restore the state for the rest of the recompile
|
||||||
}
|
}
|
||||||
|
|
||||||
void mVUendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
void mVUendProgram(mV, microFlagCycles* mFC, int isEbit)
|
||||||
|
{
|
||||||
|
|
||||||
int fStatus = getLastFlagInst(mVUpBlock->pState, mFC->xStatus, 0, isEbit && isEbit != 3);
|
int fStatus = getLastFlagInst(mVUpBlock->pState, mFC->xStatus, 0, isEbit && isEbit != 3);
|
||||||
int fMac = getLastFlagInst(mVUpBlock->pState, mFC->xMac, 1, isEbit && isEbit != 3);
|
int fMac = getLastFlagInst(mVUpBlock->pState, mFC->xMac, 1, isEbit && isEbit != 3);
|
||||||
|
@ -139,19 +170,22 @@ void mVUendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
else
|
else
|
||||||
mVU.regAlloc->flushAll();
|
mVU.regAlloc->flushAll();
|
||||||
|
|
||||||
if (isEbit && isEbit != 3) {
|
if (isEbit && isEbit != 3)
|
||||||
|
{
|
||||||
memzero(mVUinfo);
|
memzero(mVUinfo);
|
||||||
memzero(mVUregsTemp);
|
memzero(mVUregsTemp);
|
||||||
mVUincCycles(mVU, 100); // Ensures Valid P/Q instances (And sets all cycle data to 0)
|
mVUincCycles(mVU, 100); // Ensures Valid P/Q instances (And sets all cycle data to 0)
|
||||||
mVUcycles -= 100;
|
mVUcycles -= 100;
|
||||||
qInst = mVU.q;
|
qInst = mVU.q;
|
||||||
pInst = mVU.p;
|
pInst = mVU.p;
|
||||||
if (mVUinfo.doDivFlag) {
|
if (mVUinfo.doDivFlag)
|
||||||
|
{
|
||||||
sFLAG.doFlag = true;
|
sFLAG.doFlag = true;
|
||||||
sFLAG.write = fStatus;
|
sFLAG.write = fStatus;
|
||||||
mVUdivSet(mVU);
|
mVUdivSet(mVU);
|
||||||
}
|
}
|
||||||
if (mVUinfo.doXGKICK) {
|
if (mVUinfo.doXGKICK)
|
||||||
|
{
|
||||||
mVU_XGKICK_DELAY(mVU);
|
mVU_XGKICK_DELAY(mVU);
|
||||||
}
|
}
|
||||||
if (!isVU1)
|
if (!isVU1)
|
||||||
|
@ -161,14 +195,21 @@ void mVUendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save P/Q Regs
|
// Save P/Q Regs
|
||||||
if (qInst) { xPSHUF.D(xmmPQ, xmmPQ, 0xe1); }
|
if (qInst)
|
||||||
|
{
|
||||||
|
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
||||||
|
}
|
||||||
xMOVSS(ptr32[&mVU.regs().VI[REG_Q].UL], xmmPQ);
|
xMOVSS(ptr32[&mVU.regs().VI[REG_Q].UL], xmmPQ);
|
||||||
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
||||||
xMOVSS(ptr32[&mVU.regs().pending_q], xmmPQ);
|
xMOVSS(ptr32[&mVU.regs().pending_q], xmmPQ);
|
||||||
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
xPSHUF.D(xmmPQ, xmmPQ, 0xe1);
|
||||||
|
|
||||||
if (isVU1) {
|
if (isVU1)
|
||||||
if (pInst) { xPSHUF.D(xmmPQ, xmmPQ, 0xb4); } // Swap Pending/Active P
|
{
|
||||||
|
if (pInst)
|
||||||
|
{
|
||||||
|
xPSHUF.D(xmmPQ, xmmPQ, 0xb4);
|
||||||
|
} // Swap Pending/Active P
|
||||||
xPSHUF.D(xmmPQ, xmmPQ, 0xC6); // 3 0 1 2
|
xPSHUF.D(xmmPQ, xmmPQ, 0xC6); // 3 0 1 2
|
||||||
xMOVSS(ptr32[&mVU.regs().VI[REG_P].UL], xmmPQ);
|
xMOVSS(ptr32[&mVU.regs().VI[REG_P].UL], xmmPQ);
|
||||||
xPSHUF.D(xmmPQ, xmmPQ, 0x87); // 0 2 1 3
|
xPSHUF.D(xmmPQ, xmmPQ, 0x87); // 0 2 1 3
|
||||||
|
@ -184,7 +225,8 @@ void mVUendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
xMOV(ptr32[&mVU.regs().VI[REG_MAC_FLAG].UL], gprT1);
|
xMOV(ptr32[&mVU.regs().VI[REG_MAC_FLAG].UL], gprT1);
|
||||||
xMOV(ptr32[&mVU.regs().VI[REG_CLIP_FLAG].UL], gprT2);
|
xMOV(ptr32[&mVU.regs().VI[REG_CLIP_FLAG].UL], gprT2);
|
||||||
|
|
||||||
if (!isEbit || isEbit == 3) { // Backup flag instances
|
if (!isEbit || isEbit == 3)
|
||||||
|
{ // Backup flag instances
|
||||||
xMOVAPS(xmmT1, ptr128[mVU.macFlag]);
|
xMOVAPS(xmmT1, ptr128[mVU.macFlag]);
|
||||||
xMOVAPS(ptr128[&mVU.regs().micro_macflags], xmmT1);
|
xMOVAPS(ptr128[&mVU.regs().micro_macflags], xmmT1);
|
||||||
xMOVAPS(xmmT1, ptr128[mVU.clipFlag]);
|
xMOVAPS(xmmT1, ptr128[mVU.clipFlag]);
|
||||||
|
@ -195,7 +237,8 @@ void mVUendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
xMOV(ptr32[&mVU.regs().micro_statusflags[2]], gprF2);
|
xMOV(ptr32[&mVU.regs().micro_statusflags[2]], gprF2);
|
||||||
xMOV(ptr32[&mVU.regs().micro_statusflags[3]], gprF3);
|
xMOV(ptr32[&mVU.regs().micro_statusflags[3]], gprF3);
|
||||||
}
|
}
|
||||||
else { // Flush flag instances
|
else
|
||||||
|
{ // Flush flag instances
|
||||||
xMOVDZX(xmmT1, ptr32[&mVU.regs().VI[REG_CLIP_FLAG].UL]);
|
xMOVDZX(xmmT1, ptr32[&mVU.regs().VI[REG_CLIP_FLAG].UL]);
|
||||||
xSHUF.PS(xmmT1, xmmT1, 0);
|
xSHUF.PS(xmmT1, xmmT1, 0);
|
||||||
xMOVAPS(ptr128[&mVU.regs().micro_clipflags], xmmT1);
|
xMOVAPS(ptr128[&mVU.regs().micro_clipflags], xmmT1);
|
||||||
|
@ -210,9 +253,11 @@ void mVUendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if ((isEbit && isEbit != 3)) { // Clear 'is busy' Flags
|
if ((isEbit && isEbit != 3))
|
||||||
|
{ // Clear 'is busy' Flags
|
||||||
xMOV(ptr32[&mVU.regs().nextBlockCycles], 0);
|
xMOV(ptr32[&mVU.regs().nextBlockCycles], 0);
|
||||||
if (!mVU.index || !THREAD_VU1) {
|
if (!mVU.index || !THREAD_VU1)
|
||||||
|
{
|
||||||
xAND(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? ~0x100 : ~0x001)); // VBS0/VBS1 flag
|
xAND(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? ~0x100 : ~0x001)); // VBS0/VBS1 flag
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
@ -221,7 +266,8 @@ void mVUendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
else
|
else
|
||||||
xMOV(ptr32[&mVU.regs().nextBlockCycles], mVUcycles);
|
xMOV(ptr32[&mVU.regs().nextBlockCycles], mVUcycles);
|
||||||
|
|
||||||
if (isEbit != 2 && isEbit != 3) { // Save PC, and Jump to Exit Point
|
if (isEbit != 2 && isEbit != 3)
|
||||||
|
{ // Save PC, and Jump to Exit Point
|
||||||
xMOV(ptr32[&mVU.regs().VI[REG_TPC].UL], xPC);
|
xMOV(ptr32[&mVU.regs().VI[REG_TPC].UL], xPC);
|
||||||
xJMP(mVU.exitFunct);
|
xJMP(mVU.exitFunct);
|
||||||
}
|
}
|
||||||
|
@ -229,37 +275,54 @@ void mVUendProgram(mV, microFlagCycles* mFC, int isEbit) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recompiles Code for Proper Flags and Q/P regs on Block Linkings
|
// Recompiles Code for Proper Flags and Q/P regs on Block Linkings
|
||||||
void mVUsetupBranch(mV, microFlagCycles& mFC) {
|
void mVUsetupBranch(mV, microFlagCycles& mFC)
|
||||||
|
{
|
||||||
|
|
||||||
mVU.regAlloc->flushAll(); // Flush Allocated Regs
|
mVU.regAlloc->flushAll(); // Flush Allocated Regs
|
||||||
mVUsetupFlags(mVU, mFC); // Shuffle Flag Instances
|
mVUsetupFlags(mVU, mFC); // Shuffle Flag Instances
|
||||||
|
|
||||||
// Shuffle P/Q regs since every block starts at instance #0
|
// Shuffle P/Q regs since every block starts at instance #0
|
||||||
if (mVU.p || mVU.q) { xPSHUF.D(xmmPQ, xmmPQ, shufflePQ); }
|
if (mVU.p || mVU.q)
|
||||||
|
{
|
||||||
|
xPSHUF.D(xmmPQ, xmmPQ, shufflePQ);
|
||||||
|
}
|
||||||
mVU.p = 0, mVU.q = 0;
|
mVU.p = 0, mVU.q = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
void normBranchCompile(microVU& mVU, u32 branchPC) {
|
void normBranchCompile(microVU& mVU, u32 branchPC)
|
||||||
|
{
|
||||||
microBlock* pBlock;
|
microBlock* pBlock;
|
||||||
blockCreate(branchPC / 8);
|
blockCreate(branchPC / 8);
|
||||||
pBlock = mVUblocks[branchPC / 8]->search((microRegInfo*)&mVUregs);
|
pBlock = mVUblocks[branchPC / 8]->search((microRegInfo*)&mVUregs);
|
||||||
if (pBlock) { xJMP(pBlock->x86ptrStart); }
|
if (pBlock)
|
||||||
else { mVUcompile(mVU, branchPC, (uptr)&mVUregs); }
|
{
|
||||||
|
xJMP(pBlock->x86ptrStart);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
mVUcompile(mVU, branchPC, (uptr)&mVUregs);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void normJumpCompile(mV, microFlagCycles& mFC, bool isEvilJump) {
|
void normJumpCompile(mV, microFlagCycles& mFC, bool isEvilJump)
|
||||||
|
{
|
||||||
memcpy(&mVUpBlock->pStateEnd, &mVUregs, sizeof(microRegInfo));
|
memcpy(&mVUpBlock->pStateEnd, &mVUregs, sizeof(microRegInfo));
|
||||||
mVUsetupBranch(mVU, mFC);
|
mVUsetupBranch(mVU, mFC);
|
||||||
mVUbackupRegs(mVU);
|
mVUbackupRegs(mVU);
|
||||||
|
|
||||||
if(!mVUpBlock->jumpCache) { // Create the jump cache for this block
|
if (!mVUpBlock->jumpCache)
|
||||||
|
{ // Create the jump cache for this block
|
||||||
mVUpBlock->jumpCache = new microJumpCache[mProgSize / 2];
|
mVUpBlock->jumpCache = new microJumpCache[mProgSize / 2];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isEvilJump) xMOV(arg1regd, ptr32[&mVU.evilBranch]);
|
if (isEvilJump)
|
||||||
else xMOV(arg1regd, ptr32[&mVU.branch]);
|
xMOV(arg1regd, ptr32[&mVU.evilBranch]);
|
||||||
if (doJumpCaching) xLoadFarAddr(arg2reg, mVUpBlock);
|
else
|
||||||
else xLoadFarAddr(arg2reg, &mVUpBlock->pStateEnd);
|
xMOV(arg1regd, ptr32[&mVU.branch]);
|
||||||
|
if (doJumpCaching)
|
||||||
|
xLoadFarAddr(arg2reg, mVUpBlock);
|
||||||
|
else
|
||||||
|
xLoadFarAddr(arg2reg, &mVUpBlock->pStateEnd);
|
||||||
|
|
||||||
if (mVUup.eBit && isEvilJump) // E-bit EvilJump
|
if (mVUup.eBit && isEvilJump) // E-bit EvilJump
|
||||||
{
|
{
|
||||||
|
@ -270,14 +333,17 @@ void normJumpCompile(mV, microFlagCycles& mFC, bool isEvilJump) {
|
||||||
xJMP(mVU.exitFunct);
|
xJMP(mVU.exitFunct);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!mVU.index) xFastCall((void*)(void(*)())mVUcompileJIT<0>, arg1reg, arg2reg); //(u32 startPC, uptr pState)
|
if (!mVU.index)
|
||||||
else xFastCall((void*)(void(*)())mVUcompileJIT<1>, arg1reg, arg2reg);
|
xFastCall((void*)(void (*)())mVUcompileJIT<0>, arg1reg, arg2reg); //(u32 startPC, uptr pState)
|
||||||
|
else
|
||||||
|
xFastCall((void*)(void (*)())mVUcompileJIT<1>, arg1reg, arg2reg);
|
||||||
|
|
||||||
mVUrestoreRegs(mVU);
|
mVUrestoreRegs(mVU);
|
||||||
xJMP(gprT1q); // Jump to rec-code address
|
xJMP(gprT1q); // Jump to rec-code address
|
||||||
}
|
}
|
||||||
|
|
||||||
void normBranch(mV, microFlagCycles& mFC) {
|
void normBranch(mV, microFlagCycles& mFC)
|
||||||
|
{
|
||||||
|
|
||||||
// E-bit or T-Bit or D-Bit Branch
|
// E-bit or T-Bit or D-Bit Branch
|
||||||
if (mVUup.dBit && doDBitHandling)
|
if (mVUup.dBit && doDBitHandling)
|
||||||
|
@ -285,7 +351,8 @@ void normBranch(mV, microFlagCycles& mFC) {
|
||||||
u32 tempPC = iPC;
|
u32 tempPC = iPC;
|
||||||
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x400 : 0x4));
|
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x400 : 0x4));
|
||||||
xForwardJump32 eJMP(Jcc_Zero);
|
xForwardJump32 eJMP(Jcc_Zero);
|
||||||
if (!mVU.index || !THREAD_VU1) {
|
if (!mVU.index || !THREAD_VU1)
|
||||||
|
{
|
||||||
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x200 : 0x2));
|
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x200 : 0x2));
|
||||||
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
||||||
}
|
}
|
||||||
|
@ -299,7 +366,8 @@ void normBranch(mV, microFlagCycles& mFC) {
|
||||||
u32 tempPC = iPC;
|
u32 tempPC = iPC;
|
||||||
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x800 : 0x8));
|
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x800 : 0x8));
|
||||||
xForwardJump32 eJMP(Jcc_Zero);
|
xForwardJump32 eJMP(Jcc_Zero);
|
||||||
if (!mVU.index || !THREAD_VU1) {
|
if (!mVU.index || !THREAD_VU1)
|
||||||
|
{
|
||||||
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x400 : 0x4));
|
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x400 : 0x4));
|
||||||
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
||||||
}
|
}
|
||||||
|
@ -314,7 +382,8 @@ void normBranch(mV, microFlagCycles& mFC) {
|
||||||
u32 tempPC = iPC;
|
u32 tempPC = iPC;
|
||||||
u32* cpS = (u32*)&mVUregs;
|
u32* cpS = (u32*)&mVUregs;
|
||||||
u32* lpS = (u32*)&mVU.prog.lpState;
|
u32* lpS = (u32*)&mVU.prog.lpState;
|
||||||
for (size_t i = 0; i < (sizeof(microRegInfo) - 4) / 4; i++, lpS++, cpS++) {
|
for (size_t i = 0; i < (sizeof(microRegInfo) - 4) / 4; i++, lpS++, cpS++)
|
||||||
|
{
|
||||||
xMOV(ptr32[lpS], cpS[0]);
|
xMOV(ptr32[lpS], cpS[0]);
|
||||||
}
|
}
|
||||||
mVUsetupBranch(mVU, mFC);
|
mVUsetupBranch(mVU, mFC);
|
||||||
|
@ -324,7 +393,8 @@ void normBranch(mV, microFlagCycles& mFC) {
|
||||||
xJMP(mVU.exitFunct);
|
xJMP(mVU.exitFunct);
|
||||||
iPC = tempPC;
|
iPC = tempPC;
|
||||||
}
|
}
|
||||||
if (mVUup.eBit) {
|
if (mVUup.eBit)
|
||||||
|
{
|
||||||
if (mVUlow.badBranch)
|
if (mVUlow.badBranch)
|
||||||
DevCon.Warning("End on evil Unconditional branch! - Not implemented! - If game broken report to PCSX2 Team");
|
DevCon.Warning("End on evil Unconditional branch! - Not implemented! - If game broken report to PCSX2 Team");
|
||||||
|
|
||||||
|
@ -343,7 +413,6 @@ void normBranch(mV, microFlagCycles& mFC) {
|
||||||
xMOV(gprT3, badBranchAddr);
|
xMOV(gprT3, badBranchAddr);
|
||||||
xSHR(gprT3, 3);
|
xSHR(gprT3, 3);
|
||||||
mVUallocVIb(mVU, gprT3, _It_);
|
mVUallocVIb(mVU, gprT3, _It_);
|
||||||
|
|
||||||
}
|
}
|
||||||
incPC(-3);
|
incPC(-3);
|
||||||
}
|
}
|
||||||
|
@ -357,7 +426,8 @@ void normBranch(mV, microFlagCycles& mFC) {
|
||||||
//This handles JALR/BAL in the delay slot of a conditional branch. We do this because the normal handling
|
//This handles JALR/BAL in the delay slot of a conditional branch. We do this because the normal handling
|
||||||
//Doesn't seem to work properly, even if the link is made to the correct address, so we do it manually instead.
|
//Doesn't seem to work properly, even if the link is made to the correct address, so we do it manually instead.
|
||||||
//Normally EvilBlock handles all this stuff, but something to do with conditionals and links don't quite work right :/
|
//Normally EvilBlock handles all this stuff, but something to do with conditionals and links don't quite work right :/
|
||||||
void condJumpProcessingEvil(mV, microFlagCycles& mFC, int JMPcc) {
|
void condJumpProcessingEvil(mV, microFlagCycles& mFC, int JMPcc)
|
||||||
|
{
|
||||||
|
|
||||||
u32 bPC = iPC - 1; // mVUcompile can modify iPC, mVUpBlock, and mVUregs so back them up
|
u32 bPC = iPC - 1; // mVUcompile can modify iPC, mVUpBlock, and mVUregs so back them up
|
||||||
u32 badBranchAddr;
|
u32 badBranchAddr;
|
||||||
|
@ -391,9 +461,9 @@ void condJumpProcessingEvil(mV, microFlagCycles& mFC, int JMPcc) {
|
||||||
mVUallocVIb(mVU, gprT3, _It_);
|
mVUallocVIb(mVU, gprT3, _It_);
|
||||||
|
|
||||||
normJumpCompile(mVU, mFC, true); //Compile evil branch, just in time!
|
normJumpCompile(mVU, mFC, true); //Compile evil branch, just in time!
|
||||||
|
|
||||||
}
|
}
|
||||||
void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
void condBranch(mV, microFlagCycles& mFC, int JMPcc)
|
||||||
|
{
|
||||||
mVUsetupBranch(mVU, mFC);
|
mVUsetupBranch(mVU, mFC);
|
||||||
|
|
||||||
if (mVUup.tBit)
|
if (mVUup.tBit)
|
||||||
|
@ -402,7 +472,8 @@ void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
||||||
u32 tempPC = iPC;
|
u32 tempPC = iPC;
|
||||||
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x800 : 0x8));
|
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x800 : 0x8));
|
||||||
xForwardJump32 eJMP(Jcc_Zero);
|
xForwardJump32 eJMP(Jcc_Zero);
|
||||||
if (!mVU.index || !THREAD_VU1) {
|
if (!mVU.index || !THREAD_VU1)
|
||||||
|
{
|
||||||
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x400 : 0x4));
|
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x400 : 0x4));
|
||||||
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
||||||
}
|
}
|
||||||
|
@ -425,7 +496,8 @@ void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
||||||
u32 tempPC = iPC;
|
u32 tempPC = iPC;
|
||||||
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x400 : 0x4));
|
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x400 : 0x4));
|
||||||
xForwardJump32 eJMP(Jcc_Zero);
|
xForwardJump32 eJMP(Jcc_Zero);
|
||||||
if (!mVU.index || !THREAD_VU1) {
|
if (!mVU.index || !THREAD_VU1)
|
||||||
|
{
|
||||||
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x200 : 0x2));
|
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x200 : 0x2));
|
||||||
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
||||||
}
|
}
|
||||||
|
@ -448,7 +520,8 @@ void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
||||||
u32 tempPC = iPC;
|
u32 tempPC = iPC;
|
||||||
u32* cpS = (u32*)&mVUregs;
|
u32* cpS = (u32*)&mVUregs;
|
||||||
u32* lpS = (u32*)&mVU.prog.lpState;
|
u32* lpS = (u32*)&mVU.prog.lpState;
|
||||||
for (size_t i = 0; i < (sizeof(microRegInfo) - 4) / 4; i++, lpS++, cpS++) {
|
for (size_t i = 0; i < (sizeof(microRegInfo) - 4) / 4; i++, lpS++, cpS++)
|
||||||
|
{
|
||||||
xMOV(ptr32[lpS], cpS[0]);
|
xMOV(ptr32[lpS], cpS[0]);
|
||||||
}
|
}
|
||||||
mVUendProgram(mVU, &mFC, 3);
|
mVUendProgram(mVU, &mFC, 3);
|
||||||
|
@ -464,7 +537,8 @@ void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
||||||
xJMP(mVU.exitFunct);
|
xJMP(mVU.exitFunct);
|
||||||
iPC = tempPC;
|
iPC = tempPC;
|
||||||
}
|
}
|
||||||
if (mVUup.eBit) { // Conditional Branch With E-Bit Set
|
if (mVUup.eBit)
|
||||||
|
{ // Conditional Branch With E-Bit Set
|
||||||
if (mVUlow.evilBranch)
|
if (mVUlow.evilBranch)
|
||||||
DevCon.Warning("End on evil branch! - Not implemented! - If game broken report to PCSX2 Team");
|
DevCon.Warning("End on evil branch! - Not implemented! - If game broken report to PCSX2 Team");
|
||||||
|
|
||||||
|
@ -484,7 +558,8 @@ void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
||||||
xJMP(mVU.exitFunct);
|
xJMP(mVU.exitFunct);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
else { // Normal Conditional Branch
|
else
|
||||||
|
{ // Normal Conditional Branch
|
||||||
xCMP(ptr16[&mVU.branch], 0);
|
xCMP(ptr16[&mVU.branch], 0);
|
||||||
|
|
||||||
incPC(3);
|
incPC(3);
|
||||||
|
@ -503,12 +578,14 @@ void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
||||||
blockCreate(iPC / 2);
|
blockCreate(iPC / 2);
|
||||||
bBlock = mVUblocks[iPC / 2]->search((microRegInfo*)&mVUregs);
|
bBlock = mVUblocks[iPC / 2]->search((microRegInfo*)&mVUregs);
|
||||||
incPC2(-1);
|
incPC2(-1);
|
||||||
if (bBlock) { // Branch non-taken has already been compiled
|
if (bBlock)
|
||||||
|
{ // Branch non-taken has already been compiled
|
||||||
xJcc(xInvertCond((JccComparisonType)JMPcc), bBlock->x86ptrStart);
|
xJcc(xInvertCond((JccComparisonType)JMPcc), bBlock->x86ptrStart);
|
||||||
incPC(-3); // Go back to branch opcode (to get branch imm addr)
|
incPC(-3); // Go back to branch opcode (to get branch imm addr)
|
||||||
normBranchCompile(mVU, branchAddr(mVU));
|
normBranchCompile(mVU, branchAddr(mVU));
|
||||||
}
|
}
|
||||||
else {
|
else
|
||||||
|
{
|
||||||
s32* ajmp = xJcc32((JccComparisonType)JMPcc);
|
s32* ajmp = xJcc32((JccComparisonType)JMPcc);
|
||||||
u32 bPC = iPC; // mVUcompile can modify iPC, mVUpBlock, and mVUregs so back them up
|
u32 bPC = iPC; // mVUcompile can modify iPC, mVUpBlock, and mVUregs so back them up
|
||||||
microBlock* pBlock = mVUpBlock;
|
microBlock* pBlock = mVUpBlock;
|
||||||
|
@ -525,13 +602,16 @@ void condBranch(mV, microFlagCycles& mFC, int JMPcc) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void normJump(mV, microFlagCycles& mFC) {
|
void normJump(mV, microFlagCycles& mFC)
|
||||||
|
{
|
||||||
if (mVUup.mBit)
|
if (mVUup.mBit)
|
||||||
{
|
{
|
||||||
DevCon.Warning("M-Bit on Jump! Please report if broken");
|
DevCon.Warning("M-Bit on Jump! Please report if broken");
|
||||||
}
|
}
|
||||||
if (mVUlow.constJump.isValid) { // Jump Address is Constant
|
if (mVUlow.constJump.isValid)
|
||||||
if (mVUup.eBit) { // E-bit Jump
|
{ // Jump Address is Constant
|
||||||
|
if (mVUup.eBit)
|
||||||
|
{ // E-bit Jump
|
||||||
iPC = (mVUlow.constJump.regValue * 2) & (mVU.progMemMask);
|
iPC = (mVUlow.constJump.regValue * 2) & (mVU.progMemMask);
|
||||||
mVUendProgram(mVU, &mFC, 1);
|
mVUendProgram(mVU, &mFC, 1);
|
||||||
return;
|
return;
|
||||||
|
@ -553,7 +633,6 @@ void normJump(mV, microFlagCycles& mFC) {
|
||||||
xSHR(gprT1, 3);
|
xSHR(gprT1, 3);
|
||||||
incPC(2);
|
incPC(2);
|
||||||
mVUallocVIb(mVU, gprT1, _It_);
|
mVUallocVIb(mVU, gprT1, _It_);
|
||||||
|
|
||||||
}
|
}
|
||||||
incPC(-3);
|
incPC(-3);
|
||||||
}
|
}
|
||||||
|
@ -561,7 +640,8 @@ void normJump(mV, microFlagCycles& mFC) {
|
||||||
{
|
{
|
||||||
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x400 : 0x4));
|
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x400 : 0x4));
|
||||||
xForwardJump32 eJMP(Jcc_Zero);
|
xForwardJump32 eJMP(Jcc_Zero);
|
||||||
if (!mVU.index || !THREAD_VU1) {
|
if (!mVU.index || !THREAD_VU1)
|
||||||
|
{
|
||||||
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x200 : 0x2));
|
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x200 : 0x2));
|
||||||
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
||||||
}
|
}
|
||||||
|
@ -575,7 +655,8 @@ void normJump(mV, microFlagCycles& mFC) {
|
||||||
{
|
{
|
||||||
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x800 : 0x8));
|
xTEST(ptr32[&VU0.VI[REG_FBRST].UL], (isVU1 ? 0x800 : 0x8));
|
||||||
xForwardJump32 eJMP(Jcc_Zero);
|
xForwardJump32 eJMP(Jcc_Zero);
|
||||||
if (!mVU.index || !THREAD_VU1) {
|
if (!mVU.index || !THREAD_VU1)
|
||||||
|
{
|
||||||
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x400 : 0x4));
|
xOR(ptr32[&VU0.VI[REG_VPU_STAT].UL], (isVU1 ? 0x400 : 0x4));
|
||||||
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
xOR(ptr32[&mVU.regs().flags], VUFLAG_INTCINTERRUPT);
|
||||||
}
|
}
|
||||||
|
@ -585,13 +666,15 @@ void normJump(mV, microFlagCycles& mFC) {
|
||||||
xJMP(mVU.exitFunct);
|
xJMP(mVU.exitFunct);
|
||||||
eJMP.SetTarget();
|
eJMP.SetTarget();
|
||||||
}
|
}
|
||||||
if (mVUup.eBit) { // E-bit Jump
|
if (mVUup.eBit)
|
||||||
|
{ // E-bit Jump
|
||||||
mVUendProgram(mVU, &mFC, 2);
|
mVUendProgram(mVU, &mFC, 2);
|
||||||
xMOV(gprT1, ptr32[&mVU.branch]);
|
xMOV(gprT1, ptr32[&mVU.branch]);
|
||||||
xMOV(ptr32[&mVU.regs().VI[REG_TPC].UL], gprT1);
|
xMOV(ptr32[&mVU.regs().VI[REG_TPC].UL], gprT1);
|
||||||
xJMP(mVU.exitFunct);
|
xJMP(mVU.exitFunct);
|
||||||
}
|
}
|
||||||
else {
|
else
|
||||||
|
{
|
||||||
normJumpCompile(mVU, mFC, false);
|
normJumpCompile(mVU, mFC, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,8 @@ using namespace R5900::Dynarec;
|
||||||
#define printCOP2(...) (void)0
|
#define printCOP2(...) (void)0
|
||||||
//#define printCOP2 DevCon.Status
|
//#define printCOP2 DevCon.Status
|
||||||
|
|
||||||
void setupMacroOp(int mode, const char* opName) {
|
void setupMacroOp(int mode, const char* opName)
|
||||||
|
{
|
||||||
printCOP2(opName);
|
printCOP2(opName);
|
||||||
microVU0.cop2 = 1;
|
microVU0.cop2 = 1;
|
||||||
microVU0.prog.IRinfo.curPC = 0;
|
microVU0.prog.IRinfo.curPC = 0;
|
||||||
|
@ -37,14 +38,17 @@ void setupMacroOp(int mode, const char* opName) {
|
||||||
memset(µVU0.prog.IRinfo.info[0], 0, sizeof(microVU0.prog.IRinfo.info[0]));
|
memset(µVU0.prog.IRinfo.info[0], 0, sizeof(microVU0.prog.IRinfo.info[0]));
|
||||||
iFlushCall(FLUSH_EVERYTHING);
|
iFlushCall(FLUSH_EVERYTHING);
|
||||||
microVU0.regAlloc->reset();
|
microVU0.regAlloc->reset();
|
||||||
if (mode & 0x01) { // Q-Reg will be Read
|
if (mode & 0x01)
|
||||||
|
{ // Q-Reg will be Read
|
||||||
xMOVSSZX(xmmPQ, ptr32[&vu0Regs.VI[REG_Q].UL]);
|
xMOVSSZX(xmmPQ, ptr32[&vu0Regs.VI[REG_Q].UL]);
|
||||||
}
|
}
|
||||||
if (mode & 0x08) { // Clip Instruction
|
if (mode & 0x08)
|
||||||
|
{ // Clip Instruction
|
||||||
microVU0.prog.IRinfo.info[0].cFlag.write = 0xff;
|
microVU0.prog.IRinfo.info[0].cFlag.write = 0xff;
|
||||||
microVU0.prog.IRinfo.info[0].cFlag.lastWrite = 0xff;
|
microVU0.prog.IRinfo.info[0].cFlag.lastWrite = 0xff;
|
||||||
}
|
}
|
||||||
if (mode & 0x10) { // Update Status/Mac Flags
|
if (mode & 0x10)
|
||||||
|
{ // Update Status/Mac Flags
|
||||||
microVU0.prog.IRinfo.info[0].sFlag.doFlag = true;
|
microVU0.prog.IRinfo.info[0].sFlag.doFlag = true;
|
||||||
microVU0.prog.IRinfo.info[0].sFlag.doNonSticky = true;
|
microVU0.prog.IRinfo.info[0].sFlag.doNonSticky = true;
|
||||||
microVU0.prog.IRinfo.info[0].sFlag.write = 0;
|
microVU0.prog.IRinfo.info[0].sFlag.write = 0;
|
||||||
|
@ -58,18 +62,22 @@ void setupMacroOp(int mode, const char* opName) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void endMacroOp(int mode) {
|
void endMacroOp(int mode)
|
||||||
if (mode & 0x02) { // Q-Reg was Written To
|
{
|
||||||
|
if (mode & 0x02)
|
||||||
|
{ // Q-Reg was Written To
|
||||||
xMOVSS(ptr32[&vu0Regs.VI[REG_Q].UL], xmmPQ);
|
xMOVSS(ptr32[&vu0Regs.VI[REG_Q].UL], xmmPQ);
|
||||||
}
|
}
|
||||||
if (mode & 0x10) { // Status/Mac Flags were Updated
|
if (mode & 0x10)
|
||||||
|
{ // Status/Mac Flags were Updated
|
||||||
// Normalize
|
// Normalize
|
||||||
mVUallocSFLAGc(eax, gprF0, 0);
|
mVUallocSFLAGc(eax, gprF0, 0);
|
||||||
xMOV(ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL], eax);
|
xMOV(ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL], eax);
|
||||||
}
|
}
|
||||||
microVU0.regAlloc->flushAll();
|
microVU0.regAlloc->flushAll();
|
||||||
|
|
||||||
if (mode & 0x10) { // Update VU0 Status/Mac instances after flush to avoid corrupting anything
|
if (mode & 0x10)
|
||||||
|
{ // Update VU0 Status/Mac instances after flush to avoid corrupting anything
|
||||||
mVUallocSFLAGd(&vu0Regs.VI[REG_STATUS_FLAG].UL);
|
mVUallocSFLAGd(&vu0Regs.VI[REG_STATUS_FLAG].UL);
|
||||||
xMOVDZX(xmmT1, eax);
|
xMOVDZX(xmmT1, eax);
|
||||||
xSHUF.PS(xmmT1, xmmT1, 0);
|
xSHUF.PS(xmmT1, xmmT1, 0);
|
||||||
|
@ -83,20 +91,27 @@ void endMacroOp(int mode) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#define REC_COP2_mVU0(f, opName, mode) \
|
#define REC_COP2_mVU0(f, opName, mode) \
|
||||||
void recV##f() { \
|
void recV##f() \
|
||||||
|
{ \
|
||||||
setupMacroOp(mode, opName); \
|
setupMacroOp(mode, opName); \
|
||||||
if (mode & 4) { \
|
if (mode & 4) \
|
||||||
|
{ \
|
||||||
mVU_##f(microVU0, 0); \
|
mVU_##f(microVU0, 0); \
|
||||||
if (!microVU0.prog.IRinfo.info[0].lOp.isNOP) { \
|
if (!microVU0.prog.IRinfo.info[0].lOp.isNOP) \
|
||||||
|
{ \
|
||||||
mVU_##f(microVU0, 1); \
|
mVU_##f(microVU0, 1); \
|
||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
else { mVU_##f(microVU0, 1); } \
|
else \
|
||||||
|
{ \
|
||||||
|
mVU_##f(microVU0, 1); \
|
||||||
|
} \
|
||||||
endMacroOp(mode); \
|
endMacroOp(mode); \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define INTERPRETATE_COP2_FUNC(f) \
|
#define INTERPRETATE_COP2_FUNC(f) \
|
||||||
void recV##f() { \
|
void recV##f() \
|
||||||
|
{ \
|
||||||
recCall(V##f); \
|
recCall(V##f); \
|
||||||
_freeX86regs(); \
|
_freeX86regs(); \
|
||||||
}
|
}
|
||||||
|
@ -244,7 +259,8 @@ INTERPRETATE_COP2_FUNC(CALLMSR);
|
||||||
// Macro VU - Branches
|
// Macro VU - Branches
|
||||||
//------------------------------------------------------------------
|
//------------------------------------------------------------------
|
||||||
|
|
||||||
void _setupBranchTest(u32*(jmpType)(u32), bool isLikely) {
|
void _setupBranchTest(u32*(jmpType)(u32), bool isLikely)
|
||||||
|
{
|
||||||
printCOP2("COP2 Branch");
|
printCOP2("COP2 Branch");
|
||||||
_eeFlushAllUnused();
|
_eeFlushAllUnused();
|
||||||
//xTEST(ptr32[&vif1Regs.stat._u32], 0x4);
|
//xTEST(ptr32[&vif1Regs.stat._u32], 0x4);
|
||||||
|
@ -261,9 +277,11 @@ void recBC2TL() { _setupBranchTest(JZ32, true); }
|
||||||
// Macro VU - COP2 Transfer Instructions
|
// Macro VU - COP2 Transfer Instructions
|
||||||
//------------------------------------------------------------------
|
//------------------------------------------------------------------
|
||||||
|
|
||||||
void COP2_Interlock(bool mBitSync) {
|
void COP2_Interlock(bool mBitSync)
|
||||||
|
{
|
||||||
|
|
||||||
if (cpuRegs.code & 1) {
|
if (cpuRegs.code & 1)
|
||||||
|
{
|
||||||
iFlushCall(FLUSH_EVERYTHING);
|
iFlushCall(FLUSH_EVERYTHING);
|
||||||
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
||||||
xForwardJZ32 skipvuidle;
|
xForwardJZ32 skipvuidle;
|
||||||
|
@ -272,13 +290,16 @@ void COP2_Interlock(bool mBitSync) {
|
||||||
xMOV(ptr[&cpuRegs.cycle], eax); // update cycles
|
xMOV(ptr[&cpuRegs.cycle], eax); // update cycles
|
||||||
xLoadFarAddr(arg1reg, CpuVU0);
|
xLoadFarAddr(arg1reg, CpuVU0);
|
||||||
xFastCall((void*)BaseVUmicroCPU::ExecuteBlockJIT, arg1reg);
|
xFastCall((void*)BaseVUmicroCPU::ExecuteBlockJIT, arg1reg);
|
||||||
if (mBitSync) xFastCall((void*)_vu0WaitMicro);
|
if (mBitSync)
|
||||||
else xFastCall((void*)_vu0FinishMicro);
|
xFastCall((void*)_vu0WaitMicro);
|
||||||
|
else
|
||||||
|
xFastCall((void*)_vu0FinishMicro);
|
||||||
skipvuidle.SetTarget();
|
skipvuidle.SetTarget();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void TEST_FBRST_RESET(FnType_Void* resetFunct, int vuIndex) {
|
void TEST_FBRST_RESET(FnType_Void* resetFunct, int vuIndex)
|
||||||
|
{
|
||||||
xTEST(eax, (vuIndex) ? 0x200 : 0x002);
|
xTEST(eax, (vuIndex) ? 0x200 : 0x002);
|
||||||
xForwardJZ8 skip;
|
xForwardJZ8 skip;
|
||||||
xFastCall((void*)resetFunct);
|
xFastCall((void*)resetFunct);
|
||||||
|
@ -286,16 +307,19 @@ void TEST_FBRST_RESET(FnType_Void* resetFunct, int vuIndex) {
|
||||||
skip.SetTarget();
|
skip.SetTarget();
|
||||||
}
|
}
|
||||||
|
|
||||||
static void recCFC2() {
|
static void recCFC2()
|
||||||
|
{
|
||||||
|
|
||||||
printCOP2("CFC2");
|
printCOP2("CFC2");
|
||||||
|
|
||||||
COP2_Interlock(false);
|
COP2_Interlock(false);
|
||||||
if (!_Rt_) return;
|
if (!_Rt_)
|
||||||
|
return;
|
||||||
|
|
||||||
iFlushCall(FLUSH_EVERYTHING);
|
iFlushCall(FLUSH_EVERYTHING);
|
||||||
|
|
||||||
if (!(cpuRegs.code & 1)) {
|
if (!(cpuRegs.code & 1))
|
||||||
|
{
|
||||||
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
||||||
xForwardJZ32 skipvuidle;
|
xForwardJZ32 skipvuidle;
|
||||||
xMOV(eax, ptr32[&cpuRegs.cycle]);
|
xMOV(eax, ptr32[&cpuRegs.cycle]);
|
||||||
|
@ -311,12 +335,15 @@ static void recCFC2() {
|
||||||
skipvuidle.SetTarget();
|
skipvuidle.SetTarget();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_Rd_ == REG_STATUS_FLAG) { // Normalize Status Flag
|
if (_Rd_ == REG_STATUS_FLAG)
|
||||||
|
{ // Normalize Status Flag
|
||||||
xMOV(eax, ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL]);
|
xMOV(eax, ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL]);
|
||||||
}
|
}
|
||||||
else xMOV(eax, ptr32[&vu0Regs.VI[_Rd_].UL]);
|
else
|
||||||
|
xMOV(eax, ptr32[&vu0Regs.VI[_Rd_].UL]);
|
||||||
|
|
||||||
if (_Rd_ == REG_TPC) { // Divide TPC register value by 8 during copying
|
if (_Rd_ == REG_TPC)
|
||||||
|
{ // Divide TPC register value by 8 during copying
|
||||||
// Ok, this deserves an explanation.
|
// Ok, this deserves an explanation.
|
||||||
// Accoring to the official PS2 VU0 coding manual there are 3 ways to execute a micro subroutine on VU0
|
// Accoring to the official PS2 VU0 coding manual there are 3 ways to execute a micro subroutine on VU0
|
||||||
// one of which is using the VCALLMSR intruction.
|
// one of which is using the VCALLMSR intruction.
|
||||||
|
@ -356,25 +383,31 @@ static void recCFC2() {
|
||||||
// FixMe: Should R-Reg have upper 9 bits 0?
|
// FixMe: Should R-Reg have upper 9 bits 0?
|
||||||
xMOV(ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]], eax);
|
xMOV(ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]], eax);
|
||||||
|
|
||||||
if (_Rd_ >= 16) {
|
if (_Rd_ >= 16)
|
||||||
|
{
|
||||||
xCDQ(); // Sign Extend
|
xCDQ(); // Sign Extend
|
||||||
xMOV(ptr32[&cpuRegs.GPR.r[_Rt_].UL[1]], edx);
|
xMOV(ptr32[&cpuRegs.GPR.r[_Rt_].UL[1]], edx);
|
||||||
}
|
}
|
||||||
else xMOV(ptr32[&cpuRegs.GPR.r[_Rt_].UL[1]], 0);
|
else
|
||||||
|
xMOV(ptr32[&cpuRegs.GPR.r[_Rt_].UL[1]], 0);
|
||||||
|
|
||||||
// FixMe: I think this is needed, but not sure how it works
|
// FixMe: I think this is needed, but not sure how it works
|
||||||
_eeOnWriteReg(_Rt_, 1);
|
_eeOnWriteReg(_Rt_, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void recCTC2() {
|
static void recCTC2()
|
||||||
|
{
|
||||||
|
|
||||||
printCOP2("CTC2");
|
printCOP2("CTC2");
|
||||||
COP2_Interlock(1);
|
COP2_Interlock(1);
|
||||||
if (!_Rd_) return;
|
if (!_Rd_)
|
||||||
|
return;
|
||||||
|
|
||||||
iFlushCall(FLUSH_EVERYTHING);
|
iFlushCall(FLUSH_EVERYTHING);
|
||||||
|
|
||||||
if (!(cpuRegs.code & 1)) {;
|
if (!(cpuRegs.code & 1))
|
||||||
|
{
|
||||||
|
;
|
||||||
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
||||||
xForwardJZ32 skipvuidle;
|
xForwardJZ32 skipvuidle;
|
||||||
xMOV(eax, ptr32[&cpuRegs.cycle]);
|
xMOV(eax, ptr32[&cpuRegs.cycle]);
|
||||||
|
@ -390,9 +423,12 @@ static void recCTC2() {
|
||||||
skipvuidle.SetTarget();
|
skipvuidle.SetTarget();
|
||||||
}
|
}
|
||||||
|
|
||||||
switch(_Rd_) {
|
switch (_Rd_)
|
||||||
case REG_MAC_FLAG: case REG_TPC:
|
{
|
||||||
case REG_VPU_STAT: break; // Read Only Regs
|
case REG_MAC_FLAG:
|
||||||
|
case REG_TPC:
|
||||||
|
case REG_VPU_STAT:
|
||||||
|
break; // Read Only Regs
|
||||||
case REG_R:
|
case REG_R:
|
||||||
xMOV(eax, ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]]);
|
xMOV(eax, ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]]);
|
||||||
xOR(eax, 0x3f800000);
|
xOR(eax, 0x3f800000);
|
||||||
|
@ -400,14 +436,15 @@ static void recCTC2() {
|
||||||
break;
|
break;
|
||||||
case REG_STATUS_FLAG:
|
case REG_STATUS_FLAG:
|
||||||
{
|
{
|
||||||
if (_Rt_) {
|
if (_Rt_)
|
||||||
|
{
|
||||||
xMOV(eax, ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]]);
|
xMOV(eax, ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]]);
|
||||||
xAND(eax, 0xFC0);
|
xAND(eax, 0xFC0);
|
||||||
xAND(ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL], 0x3F);
|
xAND(ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL], 0x3F);
|
||||||
xOR(ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL], eax);
|
xOR(ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL], eax);
|
||||||
|
|
||||||
}
|
}
|
||||||
else xAND(ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL], 0x3F);
|
else
|
||||||
|
xAND(ptr32[&vu0Regs.VI[REG_STATUS_FLAG].UL], 0x3F);
|
||||||
|
|
||||||
//Need to update the sticky flags for microVU
|
//Need to update the sticky flags for microVU
|
||||||
mVUallocSFLAGd(&vu0Regs.VI[REG_STATUS_FLAG].UL);
|
mVUallocSFLAGd(&vu0Regs.VI[REG_STATUS_FLAG].UL);
|
||||||
|
@ -420,18 +457,22 @@ static void recCTC2() {
|
||||||
case REG_CMSAR1: // Execute VU1 Micro SubRoutine
|
case REG_CMSAR1: // Execute VU1 Micro SubRoutine
|
||||||
xMOV(ecx, 1);
|
xMOV(ecx, 1);
|
||||||
xFastCall((void*)vu1Finish, ecx);
|
xFastCall((void*)vu1Finish, ecx);
|
||||||
if (_Rt_) {
|
if (_Rt_)
|
||||||
|
{
|
||||||
xMOV(ecx, ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]]);
|
xMOV(ecx, ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]]);
|
||||||
}
|
}
|
||||||
else xXOR(ecx, ecx);
|
else
|
||||||
|
xXOR(ecx, ecx);
|
||||||
xFastCall((void*)vu1ExecMicro, ecx);
|
xFastCall((void*)vu1ExecMicro, ecx);
|
||||||
break;
|
break;
|
||||||
case REG_FBRST:
|
case REG_FBRST:
|
||||||
if (!_Rt_) {
|
if (!_Rt_)
|
||||||
|
{
|
||||||
xMOV(ptr32[&vu0Regs.VI[REG_FBRST].UL], 0);
|
xMOV(ptr32[&vu0Regs.VI[REG_FBRST].UL], 0);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
else xMOV(eax, ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]]);
|
else
|
||||||
|
xMOV(eax, ptr32[&cpuRegs.GPR.r[_Rt_].UL[0]]);
|
||||||
|
|
||||||
TEST_FBRST_RESET(vu0ResetRegs, 0);
|
TEST_FBRST_RESET(vu0ResetRegs, 0);
|
||||||
TEST_FBRST_RESET(vu1ResetRegs, 1);
|
TEST_FBRST_RESET(vu1ResetRegs, 1);
|
||||||
|
@ -442,20 +483,24 @@ static void recCTC2() {
|
||||||
default:
|
default:
|
||||||
// Executing vu0 block here fixes the intro of Ratchet and Clank
|
// Executing vu0 block here fixes the intro of Ratchet and Clank
|
||||||
// sVU's COP2 has a comment that "Donald Duck" needs this too...
|
// sVU's COP2 has a comment that "Donald Duck" needs this too...
|
||||||
if (_Rd_) _eeMoveGPRtoM((uptr)&vu0Regs.VI[_Rd_].UL, _Rt_);
|
if (_Rd_)
|
||||||
|
_eeMoveGPRtoM((uptr)&vu0Regs.VI[_Rd_].UL, _Rt_);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void recQMFC2() {
|
static void recQMFC2()
|
||||||
|
{
|
||||||
|
|
||||||
printCOP2("QMFC2");
|
printCOP2("QMFC2");
|
||||||
COP2_Interlock(false);
|
COP2_Interlock(false);
|
||||||
if (!_Rt_) return;
|
if (!_Rt_)
|
||||||
|
return;
|
||||||
|
|
||||||
iFlushCall(FLUSH_EVERYTHING);
|
iFlushCall(FLUSH_EVERYTHING);
|
||||||
|
|
||||||
if (!(cpuRegs.code & 1)) {
|
if (!(cpuRegs.code & 1))
|
||||||
|
{
|
||||||
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
||||||
xForwardJZ32 skipvuidle;
|
xForwardJZ32 skipvuidle;
|
||||||
xMOV(eax, ptr32[&cpuRegs.cycle]);
|
xMOV(eax, ptr32[&cpuRegs.cycle]);
|
||||||
|
@ -478,15 +523,18 @@ static void recQMFC2() {
|
||||||
xMOVAPS(ptr128[&cpuRegs.GPR.r[_Rt_]], xmmT1);
|
xMOVAPS(ptr128[&cpuRegs.GPR.r[_Rt_]], xmmT1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void recQMTC2() {
|
static void recQMTC2()
|
||||||
|
{
|
||||||
|
|
||||||
printCOP2("QMTC2");
|
printCOP2("QMTC2");
|
||||||
COP2_Interlock(true);
|
COP2_Interlock(true);
|
||||||
if (!_Rd_) return;
|
if (!_Rd_)
|
||||||
|
return;
|
||||||
|
|
||||||
iFlushCall(FLUSH_EVERYTHING);
|
iFlushCall(FLUSH_EVERYTHING);
|
||||||
|
|
||||||
if (!(cpuRegs.code & 1)) {
|
if (!(cpuRegs.code & 1))
|
||||||
|
{
|
||||||
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
||||||
xForwardJZ32 skipvuidle;
|
xForwardJZ32 skipvuidle;
|
||||||
xMOV(eax, ptr32[&cpuRegs.cycle]);
|
xMOV(eax, ptr32[&cpuRegs.cycle]);
|
||||||
|
@ -513,7 +561,8 @@ void recCOP2();
|
||||||
void recCOP2_BC2();
|
void recCOP2_BC2();
|
||||||
void recCOP2_SPEC1();
|
void recCOP2_SPEC1();
|
||||||
void recCOP2_SPEC2();
|
void recCOP2_SPEC2();
|
||||||
void rec_C2UNK() {
|
void rec_C2UNK()
|
||||||
|
{
|
||||||
Console.Error("Cop2 bad opcode: %x", cpuRegs.code);
|
Console.Error("Cop2 bad opcode: %x", cpuRegs.code);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -522,54 +571,286 @@ void _vuRegsCOP22(VURegs* VU, _VURegsNum* VUregsn) {}
|
||||||
|
|
||||||
// Recompilation
|
// Recompilation
|
||||||
void (*recCOP2t[32])() = {
|
void (*recCOP2t[32])() = {
|
||||||
rec_C2UNK, recQMFC2, recCFC2, rec_C2UNK, rec_C2UNK, recQMTC2, recCTC2, rec_C2UNK,
|
rec_C2UNK,
|
||||||
recCOP2_BC2, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recQMFC2,
|
||||||
recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1,
|
recCFC2,
|
||||||
recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1, recCOP2_SPEC1,
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
recQMTC2,
|
||||||
|
recCTC2,
|
||||||
|
rec_C2UNK,
|
||||||
|
recCOP2_BC2,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
|
recCOP2_SPEC1,
|
||||||
};
|
};
|
||||||
|
|
||||||
void (*recCOP2_BC2t[32])() = {
|
void (*recCOP2_BC2t[32])() = {
|
||||||
recBC2F, recBC2T, recBC2FL, recBC2TL, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recBC2F,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recBC2T,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recBC2FL,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recBC2TL,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
};
|
};
|
||||||
|
|
||||||
void (*recCOP2SPECIAL1t[64])() = {
|
void (*recCOP2SPECIAL1t[64])() = {
|
||||||
recVADDx, recVADDy, recVADDz, recVADDw, recVSUBx, recVSUBy, recVSUBz, recVSUBw,
|
recVADDx,
|
||||||
recVMADDx, recVMADDy, recVMADDz, recVMADDw, recVMSUBx, recVMSUBy, recVMSUBz, recVMSUBw,
|
recVADDy,
|
||||||
recVMAXx, recVMAXy, recVMAXz, recVMAXw, recVMINIx, recVMINIy, recVMINIz, recVMINIw,
|
recVADDz,
|
||||||
recVMULx, recVMULy, recVMULz, recVMULw, recVMULq, recVMAXi, recVMULi, recVMINIi,
|
recVADDw,
|
||||||
recVADDq, recVMADDq, recVADDi, recVMADDi, recVSUBq, recVMSUBq, recVSUBi, recVMSUBi,
|
recVSUBx,
|
||||||
recVADD, recVMADD, recVMUL, recVMAX, recVSUB, recVMSUB, recVOPMSUB, recVMINI,
|
recVSUBy,
|
||||||
recVIADD, recVISUB, recVIADDI, rec_C2UNK, recVIAND, recVIOR, rec_C2UNK, rec_C2UNK,
|
recVSUBz,
|
||||||
recVCALLMS, recVCALLMSR,rec_C2UNK, rec_C2UNK, recCOP2_SPEC2, recCOP2_SPEC2, recCOP2_SPEC2, recCOP2_SPEC2,
|
recVSUBw,
|
||||||
|
recVMADDx,
|
||||||
|
recVMADDy,
|
||||||
|
recVMADDz,
|
||||||
|
recVMADDw,
|
||||||
|
recVMSUBx,
|
||||||
|
recVMSUBy,
|
||||||
|
recVMSUBz,
|
||||||
|
recVMSUBw,
|
||||||
|
recVMAXx,
|
||||||
|
recVMAXy,
|
||||||
|
recVMAXz,
|
||||||
|
recVMAXw,
|
||||||
|
recVMINIx,
|
||||||
|
recVMINIy,
|
||||||
|
recVMINIz,
|
||||||
|
recVMINIw,
|
||||||
|
recVMULx,
|
||||||
|
recVMULy,
|
||||||
|
recVMULz,
|
||||||
|
recVMULw,
|
||||||
|
recVMULq,
|
||||||
|
recVMAXi,
|
||||||
|
recVMULi,
|
||||||
|
recVMINIi,
|
||||||
|
recVADDq,
|
||||||
|
recVMADDq,
|
||||||
|
recVADDi,
|
||||||
|
recVMADDi,
|
||||||
|
recVSUBq,
|
||||||
|
recVMSUBq,
|
||||||
|
recVSUBi,
|
||||||
|
recVMSUBi,
|
||||||
|
recVADD,
|
||||||
|
recVMADD,
|
||||||
|
recVMUL,
|
||||||
|
recVMAX,
|
||||||
|
recVSUB,
|
||||||
|
recVMSUB,
|
||||||
|
recVOPMSUB,
|
||||||
|
recVMINI,
|
||||||
|
recVIADD,
|
||||||
|
recVISUB,
|
||||||
|
recVIADDI,
|
||||||
|
rec_C2UNK,
|
||||||
|
recVIAND,
|
||||||
|
recVIOR,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
recVCALLMS,
|
||||||
|
recVCALLMSR,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
recCOP2_SPEC2,
|
||||||
|
recCOP2_SPEC2,
|
||||||
|
recCOP2_SPEC2,
|
||||||
|
recCOP2_SPEC2,
|
||||||
};
|
};
|
||||||
|
|
||||||
void (*recCOP2SPECIAL2t[128])() = {
|
void (*recCOP2SPECIAL2t[128])() = {
|
||||||
recVADDAx, recVADDAy, recVADDAz, recVADDAw, recVSUBAx, recVSUBAy, recVSUBAz, recVSUBAw,
|
recVADDAx,
|
||||||
recVMADDAx,recVMADDAy, recVMADDAz, recVMADDAw, recVMSUBAx, recVMSUBAy, recVMSUBAz, recVMSUBAw,
|
recVADDAy,
|
||||||
recVITOF0, recVITOF4, recVITOF12, recVITOF15, recVFTOI0, recVFTOI4, recVFTOI12, recVFTOI15,
|
recVADDAz,
|
||||||
recVMULAx, recVMULAy, recVMULAz, recVMULAw, recVMULAq, recVABS, recVMULAi, recVCLIP,
|
recVADDAw,
|
||||||
recVADDAq, recVMADDAq, recVADDAi, recVMADDAi, recVSUBAq, recVMSUBAq, recVSUBAi, recVMSUBAi,
|
recVSUBAx,
|
||||||
recVADDA, recVMADDA, recVMULA, rec_C2UNK, recVSUBA, recVMSUBA, recVOPMULA, recVNOP,
|
recVSUBAy,
|
||||||
recVMOVE, recVMR32, rec_C2UNK, rec_C2UNK, recVLQI, recVSQI, recVLQD, recVSQD,
|
recVSUBAz,
|
||||||
recVDIV, recVSQRT, recVRSQRT, recVWAITQ, recVMTIR, recVMFIR, recVILWR, recVISWR,
|
recVSUBAw,
|
||||||
recVRNEXT, recVRGET, recVRINIT, recVRXOR, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recVMADDAx,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recVMADDAy,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recVMADDAz,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recVMADDAw,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recVMSUBAx,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recVMSUBAy,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recVMSUBAz,
|
||||||
rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK, rec_C2UNK,
|
recVMSUBAw,
|
||||||
|
recVITOF0,
|
||||||
|
recVITOF4,
|
||||||
|
recVITOF12,
|
||||||
|
recVITOF15,
|
||||||
|
recVFTOI0,
|
||||||
|
recVFTOI4,
|
||||||
|
recVFTOI12,
|
||||||
|
recVFTOI15,
|
||||||
|
recVMULAx,
|
||||||
|
recVMULAy,
|
||||||
|
recVMULAz,
|
||||||
|
recVMULAw,
|
||||||
|
recVMULAq,
|
||||||
|
recVABS,
|
||||||
|
recVMULAi,
|
||||||
|
recVCLIP,
|
||||||
|
recVADDAq,
|
||||||
|
recVMADDAq,
|
||||||
|
recVADDAi,
|
||||||
|
recVMADDAi,
|
||||||
|
recVSUBAq,
|
||||||
|
recVMSUBAq,
|
||||||
|
recVSUBAi,
|
||||||
|
recVMSUBAi,
|
||||||
|
recVADDA,
|
||||||
|
recVMADDA,
|
||||||
|
recVMULA,
|
||||||
|
rec_C2UNK,
|
||||||
|
recVSUBA,
|
||||||
|
recVMSUBA,
|
||||||
|
recVOPMULA,
|
||||||
|
recVNOP,
|
||||||
|
recVMOVE,
|
||||||
|
recVMR32,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
recVLQI,
|
||||||
|
recVSQI,
|
||||||
|
recVLQD,
|
||||||
|
recVSQD,
|
||||||
|
recVDIV,
|
||||||
|
recVSQRT,
|
||||||
|
recVRSQRT,
|
||||||
|
recVWAITQ,
|
||||||
|
recVMTIR,
|
||||||
|
recVMFIR,
|
||||||
|
recVILWR,
|
||||||
|
recVISWR,
|
||||||
|
recVRNEXT,
|
||||||
|
recVRGET,
|
||||||
|
recVRINIT,
|
||||||
|
recVRXOR,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
|
rec_C2UNK,
|
||||||
};
|
};
|
||||||
|
|
||||||
namespace R5900 {
|
namespace R5900
|
||||||
namespace Dynarec {
|
{
|
||||||
namespace OpcodeImpl { void recCOP2() { recCOP2t[_Rs_](); }}}}
|
namespace Dynarec
|
||||||
|
{
|
||||||
|
namespace OpcodeImpl
|
||||||
|
{
|
||||||
|
void recCOP2() { recCOP2t[_Rs_](); }
|
||||||
|
} // namespace OpcodeImpl
|
||||||
|
} // namespace Dynarec
|
||||||
|
} // namespace R5900
|
||||||
void recCOP2_BC2() { recCOP2_BC2t[_Rt_](); }
|
void recCOP2_BC2() { recCOP2_BC2t[_Rt_](); }
|
||||||
void recCOP2_SPEC1() {
|
void recCOP2_SPEC1()
|
||||||
|
{
|
||||||
iFlushCall(FLUSH_EVERYTHING);
|
iFlushCall(FLUSH_EVERYTHING);
|
||||||
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
xTEST(ptr32[&VU0.VI[REG_VPU_STAT].UL], 0x1);
|
||||||
xForwardJZ32 skipvuidle;
|
xForwardJZ32 skipvuidle;
|
||||||
|
|
Loading…
Reference in New Issue