diff --git a/ARM.cpp b/ARM.cpp index 2e51d86c..883e3302 100644 --- a/ARM.cpp +++ b/ARM.cpp @@ -4,6 +4,27 @@ #include "ARMInterpreter.h" +u32 ARM::ConditionTable[16] = +{ + 0xF0F0, // EQ + 0x0F0F, // NE + 0xCCCC, // CS + 0x3333, // CC + 0xFF00, // MI + 0x00FF, // PL + 0xAAAA, // VS + 0x5555, // VC + 0x0C0C, // HI + 0xF3F3, // LS + 0xAA55, // GE + 0x55AA, // LT + 0x0A05, // GT + 0xF5FA, // LE + 0xFFFF, // AL + 0x0000 // NE +}; + + ARM::ARM(u32 num) { // well uh @@ -20,6 +41,8 @@ void ARM::Reset() for (int i = 0; i < 16; i++) R[i] = 0; + CPSR = 0x000000D3; + ExceptionBase = Num ? 0x00000000 : 0xFFFF0000; // zorp @@ -33,10 +56,16 @@ void ARM::JumpTo(u32 addr) // TODO: THUMB!! if (addr&1) printf("!!! THUMB JUMP\n"); + addr &= ~3; NextInstr = Read32(addr); R[15] = addr+4; } +void ARM::RestoreCPSR() +{ + printf("TODO: restore CPSR\n"); +} + s32 ARM::Execute(s32 cycles) { while (cycles > 0) @@ -49,9 +78,20 @@ s32 ARM::Execute(s32 cycles) R[15] += 4; // actually execute - if ((CurInstr & 0xF0000000) != 0xE0000000) printf("well shit\n"); - u32 icode = ((CurInstr >> 4) & 0xF) | ((CurInstr >> 16) & 0xFF0); - cycles -= ARMInterpreter::ARMInstrTable[icode](this); + if (CheckCondition(CurInstr >> 28)) + { + u32 icode = ((CurInstr >> 4) & 0xF) | ((CurInstr >> 16) & 0xFF0); + cycles -= ARMInterpreter::ARMInstrTable[icode](this); + } + else if ((CurInstr & 0xFE000000) == 0xFA000000) + { + cycles -= ARMInterpreter::A_BLX_IMM(this); + } + else + { + // not executing it. oh well + cycles -= 1; // 1S. todo: check + } } return cycles; diff --git a/ARM.h b/ARM.h index b1ce0bf0..cd4a417e 100644 --- a/ARM.h +++ b/ARM.h @@ -9,6 +9,9 @@ // lame #define C_S(x) x #define C_N(x) x +#define C_I(x) x + +#define ROR(x, n) (((x) >> (n)) | ((x) << (32-(n)))) class ARM { @@ -19,8 +22,39 @@ public: void Reset(); void JumpTo(u32 addr); + void RestoreCPSR(); + s32 Execute(s32 cycles); + bool CheckCondition(u32 code) + { + if (code == 0xE) return true; + if (ConditionTable[code] & (1 << (CPSR>>28))) return true; + return false; + } + + void SetC(bool c) + { + if (c) CPSR |= 0x20000000; + else CPSR &= ~0x20000000; + } + + void SetNZ(bool n, bool z) + { + CPSR &= ~0xC0000000; + if (n) CPSR |= 0x80000000; + if (z) CPSR |= 0x40000000; + } + + void SetNZCV(bool n, bool z, bool c, bool v) + { + CPSR &= ~0xF0000000; + if (n) CPSR |= 0x80000000; + if (z) CPSR |= 0x40000000; + if (c) CPSR |= 0x20000000; + if (v) CPSR |= 0x10000000; + } + u32 Read32(u32 addr) { if (Num) return NDS::ARM7Read32(addr); @@ -41,6 +75,8 @@ public: u32 NextInstr; u32 ExceptionBase; + + static u32 ConditionTable[16]; }; #endif // ARM_H diff --git a/ARMInterpreter.cpp b/ARMInterpreter.cpp index 2acef27e..424bfaa2 100644 --- a/ARMInterpreter.cpp +++ b/ARMInterpreter.cpp @@ -1,6 +1,7 @@ #include #include "NDS.h" #include "ARMInterpreter.h" +#include "ARMInterpreter_ALU.h" #include "ARMInterpreter_Branch.h" @@ -11,6 +12,7 @@ namespace ARMInterpreter s32 A_UNK(ARM* cpu) { printf("undefined ARM instruction %08X @ %08X\n", cpu->CurInstr, cpu->R[15]-8); + for (int i = 0; i < 16; i++) printf("R%d: %08X\n", i, cpu->R[i]); NDS::Halt(); return 0x7FFFFFFF; } diff --git a/ARMInterpreter.h b/ARMInterpreter.h index 5318b81c..08e4be13 100644 --- a/ARMInterpreter.h +++ b/ARMInterpreter.h @@ -11,6 +11,8 @@ namespace ARMInterpreter extern s32 (*ARMInstrTable[4096])(ARM* cpu); extern s32 (*THUMBInstrTable[1024])(ARM* cpu); +s32 A_BLX_IMM(ARM* cpu); // I'm a special one look at me + } #endif // ARMINTERPRETER_H diff --git a/ARMInterpreter_ALU.cpp b/ARMInterpreter_ALU.cpp new file mode 100644 index 00000000..8980ffb4 --- /dev/null +++ b/ARMInterpreter_ALU.cpp @@ -0,0 +1,428 @@ +#include "ARM.h" + + +#define CARRY_ADD(a, b) ((0xFFFFFFFF-a) < b) +#define CARRY_SUB(a, b) (a >= b) + +#define OVERFLOW_ADD(a, b, res) ((!(((a) ^ (b)) & 0x80000000)) && (((a) ^ (res)) & 0x80000000)) +#define OVERFLOW_SUB(a, b, res) ((((a) ^ (b)) & 0x80000000) && (((a) ^ (res)) & 0x80000000)) + + +namespace ARMInterpreter +{ + + +#define LSL_IMM(x, s) \ + x <<= s; + +#define LSR_IMM(x, s) \ + if (s == 0) s = 32; \ + x >>= s; + +#define ASR_IMM(x, s) \ + if (s == 0) s = 32; \ + x = ((s32)x) >> s; + +#define ROR_IMM(x, s) \ + if (s == 0) \ + { \ + x = (x >> 1) | ((cpu->CPSR & 0x20000000) << 2); \ + } \ + else \ + { \ + x = ROR(x, s); \ + } + +#define LSL_IMM_S(x, s) \ + if (s > 0) \ + { \ + cpu->SetC(x & (1<<(32-s))); \ + x <<= s; \ + } + +#define LSR_IMM_S(x, s) \ + if (s == 0) s = 32; \ + cpu->SetC(x & (1<<(s-1))); \ + x >>= s; + +#define ASR_IMM_S(x, s) \ + if (s == 0) s = 32; \ + cpu->SetC(x & (1<<(s-1))); \ + x = ((s32)x) >> s; + +#define ROR_IMM_S(x, s) \ + if (s == 0) \ + { \ + cpu->SetC(x & 1); \ + x = (x >> 1) | ((cpu->CPSR & 0x20000000) << 2); \ + } \ + else \ + { \ + cpu->SetC(x & (1<<(s-1))); \ + x = ROR(x, s); \ + } + +#define LSL_REG(x, s) \ + x <<= s; + +#define LSR_REG(x, s) \ + x >>= s; + +#define ASR_REG(x, s) \ + x = ((s32)x) >> s; + +#define ROR_REG(x, s) \ + x = ROR(x, s); + +#define LSL_REG_S(x, s) \ + if (s > 0) cpu->SetC(x & (1<<(32-s))); \ + x <<= s; + +#define LSR_REG_S(x, s) \ + if (s > 0) cpu->SetC(x & (1<<(s-1))); \ + x >>= s; + +#define ASR_REG_S(x, s) \ + if (s > 0) cpu->SetC(x & (1<<(s-1))); \ + x = ((s32)x) >> s; + +#define ROR_REG_S(x, s) \ + if (s > 0) cpu->SetC(x & (1<<(s-1))); \ + x = ROR(x, s); + + + +#define A_CALC_OP2_IMM \ + u32 b = ROR(cpu->CurInstr&0xFF, (cpu->CurInstr>>7)&0x1E); + +#define A_CALC_OP2_REG_SHIFT_IMM(shiftop) \ + u32 b = cpu->R[cpu->CurInstr&0xF]; \ + u32 s = (cpu->CurInstr>>7)&0x1F; \ + shiftop(b, s); + +#define A_CALC_OP2_REG_SHIFT_REG(shiftop) \ + u32 b = cpu->R[cpu->CurInstr&0xF]; \ + shiftop(b, cpu->R[(cpu->CurInstr>>8)&0xF]); + + +#define A_IMPLEMENT_ALU_OP(x) \ +\ +s32 A_##x##_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_IMM \ + A_##x(0) \ +} \ +s32 A_##x##_REG_LSL_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(LSL_IMM) \ + A_##x(0) \ +} \ +s32 A_##x##_REG_LSR_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(LSR_IMM) \ + A_##x(0) \ +} \ +s32 A_##x##_REG_ASR_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(ASR_IMM) \ + A_##x(0) \ +} \ +s32 A_##x##_REG_ROR_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(ROR_IMM) \ + A_##x(0) \ +} \ +s32 A_##x##_REG_LSL_REG(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(LSL_REG) \ + A_##x(1) \ +} \ +s32 A_##x##_REG_LSR_REG(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(LSR_REG) \ + A_##x(1) \ +} \ +s32 A_##x##_REG_ASR_REG(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(ASR_REG) \ + A_##x(1) \ +} \ +s32 A_##x##_REG_ROR_REG(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(ROR_REG) \ + A_##x(1) \ +} \ +s32 A_##x##_IMM_S(ARM* cpu) \ +{ \ + A_CALC_OP2_IMM \ + A_##x##_S(0) \ +} \ +s32 A_##x##_REG_LSL_IMM_S(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(LSL_IMM_S) \ + A_##x##_S(0) \ +} \ +s32 A_##x##_REG_LSR_IMM_S(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(LSR_IMM_S) \ + A_##x##_S(0) \ +} \ +s32 A_##x##_REG_ASR_IMM_S(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(ASR_IMM_S) \ + A_##x##_S(0) \ +} \ +s32 A_##x##_REG_ROR_IMM_S(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(ROR_IMM_S) \ + A_##x##_S(0) \ +} \ +s32 A_##x##_REG_LSL_REG_S(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(LSL_REG_S) \ + A_##x##_S(1) \ +} \ +s32 A_##x##_REG_LSR_REG_S(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(LSR_REG_S) \ + A_##x##_S(1) \ +} \ +s32 A_##x##_REG_ASR_REG_S(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(ASR_REG_S) \ + A_##x##_S(1) \ +} \ +s32 A_##x##_REG_ROR_REG_S(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(ROR_REG_S) \ + A_##x##_S(1) \ +} + +#define A_IMPLEMENT_ALU_TEST(x) \ +\ +s32 A_##x##_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_IMM \ + A_##x(0) \ +} \ +s32 A_##x##_REG_LSL_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(LSL_IMM_S) \ + A_##x(0) \ +} \ +s32 A_##x##_REG_LSR_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(LSR_IMM_S) \ + A_##x(0) \ +} \ +s32 A_##x##_REG_ASR_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(ASR_IMM_S) \ + A_##x(0) \ +} \ +s32 A_##x##_REG_ROR_IMM(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_IMM(ROR_IMM_S) \ + A_##x(0) \ +} \ +s32 A_##x##_REG_LSL_REG(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(LSL_REG_S) \ + A_##x(1) \ +} \ +s32 A_##x##_REG_LSR_REG(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(LSR_REG_S) \ + A_##x(1) \ +} \ +s32 A_##x##_REG_ASR_REG(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(ASR_REG_S) \ + A_##x(1) \ +} \ +s32 A_##x##_REG_ROR_REG(ARM* cpu) \ +{ \ + A_CALC_OP2_REG_SHIFT_REG(ROR_REG_S) \ + A_##x(1) \ +} + + +#define A_TST(c) \ + u32 a = cpu->R[(cpu->CurInstr>>16) & 0xF]; \ + u32 res = a & b; \ + cpu->SetNZ(res & 0x80000000, \ + !res); \ + return C_S(1) + C_I(c); + +A_IMPLEMENT_ALU_TEST(TST) + + +#define A_TEQ(c) \ + u32 a = cpu->R[(cpu->CurInstr>>16) & 0xF]; \ + u32 res = a ^ b; \ + cpu->SetNZ(res & 0x80000000, \ + !res); \ + return C_S(1) + C_I(c); + +A_IMPLEMENT_ALU_TEST(TEQ) + + +#define A_CMP(c) \ + u32 a = cpu->R[(cpu->CurInstr>>16) & 0xF]; \ + u32 res = a - b; \ + cpu->SetNZCV(res & 0x80000000, \ + !res, \ + CARRY_SUB(a, b), \ + OVERFLOW_SUB(a, b, res)); \ + return C_S(1) + C_I(c); + +A_IMPLEMENT_ALU_TEST(CMP) + + +#define A_CMN(c) \ + u32 a = cpu->R[(cpu->CurInstr>>16) & 0xF]; \ + u32 res = a + b; \ + cpu->SetNZCV(res & 0x80000000, \ + !res, \ + CARRY_ADD(a, b), \ + OVERFLOW_ADD(a, b, res)); \ + return C_S(1) + C_I(c); + +A_IMPLEMENT_ALU_TEST(CMN) + + + +#define A_ORR(c) \ + u32 a = cpu->R[(cpu->CurInstr>>16) & 0xF]; \ + u32 res = a | b; \ + if (((cpu->CurInstr>>12) & 0xF) == 15) \ + { \ + cpu->JumpTo(res); \ + return C_S(2) + C_I(c) + C_N(1); \ + } \ + else \ + { \ + cpu->R[(cpu->CurInstr>>12) & 0xF] = res; \ + return C_S(1) + C_I(c); \ + } + +#define A_ORR_S(c) \ + u32 a = cpu->R[(cpu->CurInstr>>16) & 0xF]; \ + u32 res = a | b; \ + cpu->SetNZ(res & 0x80000000, \ + !res); \ + if (((cpu->CurInstr>>12) & 0xF) == 15) \ + { \ + cpu->JumpTo(res); \ + cpu->RestoreCPSR(); \ + return C_S(2) + C_I(c) + C_N(1); \ + } \ + else \ + { \ + cpu->R[(cpu->CurInstr>>12) & 0xF] = res; \ + return C_S(1) + C_I(c); \ + } + +A_IMPLEMENT_ALU_OP(ORR) + + + +#define A_MOV(c) \ + if (((cpu->CurInstr>>12) & 0xF) == 15) \ + { \ + cpu->JumpTo(b); \ + return C_S(2) + C_I(c) + C_N(1); \ + } \ + else \ + { \ + cpu->R[(cpu->CurInstr>>12) & 0xF] = b; \ + return C_S(1) + C_I(c); \ + } + +#define A_MOV_S(c) \ + cpu->SetNZ(b & 0x80000000, \ + !b); \ + if (((cpu->CurInstr>>12) & 0xF) == 15) \ + { \ + cpu->JumpTo(b); \ + cpu->RestoreCPSR(); \ + return C_S(2) + C_I(c) + C_N(1); \ + } \ + else \ + { \ + cpu->R[(cpu->CurInstr>>12) & 0xF] = b; \ + return C_S(1) + C_I(c); \ + } + +A_IMPLEMENT_ALU_OP(MOV) + + + +#define A_BIC(c) \ + u32 a = cpu->R[(cpu->CurInstr>>16) & 0xF]; \ + u32 res = a & ~b; \ + if (((cpu->CurInstr>>12) & 0xF) == 15) \ + { \ + cpu->JumpTo(res); \ + return C_S(2) + C_I(c) + C_N(1); \ + } \ + else \ + { \ + cpu->R[(cpu->CurInstr>>12) & 0xF] = res; \ + return C_S(1) + C_I(c); \ + } + +#define A_BIC_S(c) \ + u32 a = cpu->R[(cpu->CurInstr>>16) & 0xF]; \ + u32 res = a & ~b; \ + cpu->SetNZ(res & 0x80000000, \ + !res); \ + if (((cpu->CurInstr>>12) & 0xF) == 15) \ + { \ + cpu->JumpTo(res); \ + cpu->RestoreCPSR(); \ + return C_S(2) + C_I(c) + C_N(1); \ + } \ + else \ + { \ + cpu->R[(cpu->CurInstr>>12) & 0xF] = res; \ + return C_S(1) + C_I(c); \ + } + +A_IMPLEMENT_ALU_OP(BIC) + + + +#define A_MVN(c) \ + b = ~b; \ + if (((cpu->CurInstr>>12) & 0xF) == 15) \ + { \ + cpu->JumpTo(b); \ + return C_S(2) + C_I(c) + C_N(1); \ + } \ + else \ + { \ + cpu->R[(cpu->CurInstr>>12) & 0xF] = b; \ + return C_S(1) + C_I(c); \ + } + +#define A_MVN_S(c) \ + b = ~b; \ + cpu->SetNZ(b & 0x80000000, \ + !b); \ + if (((cpu->CurInstr>>12) & 0xF) == 15) \ + { \ + cpu->JumpTo(b); \ + cpu->RestoreCPSR(); \ + return C_S(2) + C_I(c) + C_N(1); \ + } \ + else \ + { \ + cpu->R[(cpu->CurInstr>>12) & 0xF] = b; \ + return C_S(1) + C_I(c); \ + } + +A_IMPLEMENT_ALU_OP(MVN) + + +} diff --git a/ARMInterpreter_ALU.h b/ARMInterpreter_ALU.h new file mode 100644 index 00000000..0e4dd1ad --- /dev/null +++ b/ARMInterpreter_ALU.h @@ -0,0 +1,126 @@ + +#ifndef ARMINTERPRETER_ALU_H +#define ARMINTERPRETER_ALU_H + +namespace ARMInterpreter +{ + +s32 A_TST_IMM(ARM* cpu); +s32 A_TST_REG_LSL_IMM(ARM* cpu); +s32 A_TST_REG_LSR_IMM(ARM* cpu); +s32 A_TST_REG_ASR_IMM(ARM* cpu); +s32 A_TST_REG_ROR_IMM(ARM* cpu); +s32 A_TST_REG_LSL_REG(ARM* cpu); +s32 A_TST_REG_LSR_REG(ARM* cpu); +s32 A_TST_REG_ASR_REG(ARM* cpu); +s32 A_TST_REG_ROR_REG(ARM* cpu); + +s32 A_TEQ_IMM(ARM* cpu); +s32 A_TEQ_REG_LSL_IMM(ARM* cpu); +s32 A_TEQ_REG_LSR_IMM(ARM* cpu); +s32 A_TEQ_REG_ASR_IMM(ARM* cpu); +s32 A_TEQ_REG_ROR_IMM(ARM* cpu); +s32 A_TEQ_REG_LSL_REG(ARM* cpu); +s32 A_TEQ_REG_LSR_REG(ARM* cpu); +s32 A_TEQ_REG_ASR_REG(ARM* cpu); +s32 A_TEQ_REG_ROR_REG(ARM* cpu); + +s32 A_CMP_IMM(ARM* cpu); +s32 A_CMP_REG_LSL_IMM(ARM* cpu); +s32 A_CMP_REG_LSR_IMM(ARM* cpu); +s32 A_CMP_REG_ASR_IMM(ARM* cpu); +s32 A_CMP_REG_ROR_IMM(ARM* cpu); +s32 A_CMP_REG_LSL_REG(ARM* cpu); +s32 A_CMP_REG_LSR_REG(ARM* cpu); +s32 A_CMP_REG_ASR_REG(ARM* cpu); +s32 A_CMP_REG_ROR_REG(ARM* cpu); + +s32 A_CMN_IMM(ARM* cpu); +s32 A_CMN_REG_LSL_IMM(ARM* cpu); +s32 A_CMN_REG_LSR_IMM(ARM* cpu); +s32 A_CMN_REG_ASR_IMM(ARM* cpu); +s32 A_CMN_REG_ROR_IMM(ARM* cpu); +s32 A_CMN_REG_LSL_REG(ARM* cpu); +s32 A_CMN_REG_LSR_REG(ARM* cpu); +s32 A_CMN_REG_ASR_REG(ARM* cpu); +s32 A_CMN_REG_ROR_REG(ARM* cpu); + +s32 A_ORR_IMM(ARM* cpu); +s32 A_ORR_REG_LSL_IMM(ARM* cpu); +s32 A_ORR_REG_LSR_IMM(ARM* cpu); +s32 A_ORR_REG_ASR_IMM(ARM* cpu); +s32 A_ORR_REG_ROR_IMM(ARM* cpu); +s32 A_ORR_REG_LSL_REG(ARM* cpu); +s32 A_ORR_REG_LSR_REG(ARM* cpu); +s32 A_ORR_REG_ASR_REG(ARM* cpu); +s32 A_ORR_REG_ROR_REG(ARM* cpu); +s32 A_ORR_IMM_S(ARM* cpu); +s32 A_ORR_REG_LSL_IMM_S(ARM* cpu); +s32 A_ORR_REG_LSR_IMM_S(ARM* cpu); +s32 A_ORR_REG_ASR_IMM_S(ARM* cpu); +s32 A_ORR_REG_ROR_IMM_S(ARM* cpu); +s32 A_ORR_REG_LSL_REG_S(ARM* cpu); +s32 A_ORR_REG_LSR_REG_S(ARM* cpu); +s32 A_ORR_REG_ASR_REG_S(ARM* cpu); +s32 A_ORR_REG_ROR_REG_S(ARM* cpu); + +s32 A_MOV_IMM(ARM* cpu); +s32 A_MOV_REG_LSL_IMM(ARM* cpu); +s32 A_MOV_REG_LSR_IMM(ARM* cpu); +s32 A_MOV_REG_ASR_IMM(ARM* cpu); +s32 A_MOV_REG_ROR_IMM(ARM* cpu); +s32 A_MOV_REG_LSL_REG(ARM* cpu); +s32 A_MOV_REG_LSR_REG(ARM* cpu); +s32 A_MOV_REG_ASR_REG(ARM* cpu); +s32 A_MOV_REG_ROR_REG(ARM* cpu); +s32 A_MOV_IMM_S(ARM* cpu); +s32 A_MOV_REG_LSL_IMM_S(ARM* cpu); +s32 A_MOV_REG_LSR_IMM_S(ARM* cpu); +s32 A_MOV_REG_ASR_IMM_S(ARM* cpu); +s32 A_MOV_REG_ROR_IMM_S(ARM* cpu); +s32 A_MOV_REG_LSL_REG_S(ARM* cpu); +s32 A_MOV_REG_LSR_REG_S(ARM* cpu); +s32 A_MOV_REG_ASR_REG_S(ARM* cpu); +s32 A_MOV_REG_ROR_REG_S(ARM* cpu); + +s32 A_BIC_IMM(ARM* cpu); +s32 A_BIC_REG_LSL_IMM(ARM* cpu); +s32 A_BIC_REG_LSR_IMM(ARM* cpu); +s32 A_BIC_REG_ASR_IMM(ARM* cpu); +s32 A_BIC_REG_ROR_IMM(ARM* cpu); +s32 A_BIC_REG_LSL_REG(ARM* cpu); +s32 A_BIC_REG_LSR_REG(ARM* cpu); +s32 A_BIC_REG_ASR_REG(ARM* cpu); +s32 A_BIC_REG_ROR_REG(ARM* cpu); +s32 A_BIC_IMM_S(ARM* cpu); +s32 A_BIC_REG_LSL_IMM_S(ARM* cpu); +s32 A_BIC_REG_LSR_IMM_S(ARM* cpu); +s32 A_BIC_REG_ASR_IMM_S(ARM* cpu); +s32 A_BIC_REG_ROR_IMM_S(ARM* cpu); +s32 A_BIC_REG_LSL_REG_S(ARM* cpu); +s32 A_BIC_REG_LSR_REG_S(ARM* cpu); +s32 A_BIC_REG_ASR_REG_S(ARM* cpu); +s32 A_BIC_REG_ROR_REG_S(ARM* cpu); + +s32 A_MVN_IMM(ARM* cpu); +s32 A_MVN_REG_LSL_IMM(ARM* cpu); +s32 A_MVN_REG_LSR_IMM(ARM* cpu); +s32 A_MVN_REG_ASR_IMM(ARM* cpu); +s32 A_MVN_REG_ROR_IMM(ARM* cpu); +s32 A_MVN_REG_LSL_REG(ARM* cpu); +s32 A_MVN_REG_LSR_REG(ARM* cpu); +s32 A_MVN_REG_ASR_REG(ARM* cpu); +s32 A_MVN_REG_ROR_REG(ARM* cpu); +s32 A_MVN_IMM_S(ARM* cpu); +s32 A_MVN_REG_LSL_IMM_S(ARM* cpu); +s32 A_MVN_REG_LSR_IMM_S(ARM* cpu); +s32 A_MVN_REG_ASR_IMM_S(ARM* cpu); +s32 A_MVN_REG_ROR_IMM_S(ARM* cpu); +s32 A_MVN_REG_LSL_REG_S(ARM* cpu); +s32 A_MVN_REG_LSR_REG_S(ARM* cpu); +s32 A_MVN_REG_ASR_REG_S(ARM* cpu); +s32 A_MVN_REG_ROR_REG_S(ARM* cpu); + +} + +#endif diff --git a/ARMInterpreter_Branch.cpp b/ARMInterpreter_Branch.cpp index 4555235e..5b1689b9 100644 --- a/ARMInterpreter_Branch.cpp +++ b/ARMInterpreter_Branch.cpp @@ -22,6 +22,16 @@ s32 A_BL(ARM* cpu) return C_S(2) + C_N(1); } +s32 A_BLX_IMM(ARM* cpu) +{ + s32 offset = (s32)(cpu->CurInstr << 8) >> 6; + if (cpu->CurInstr & 0x01000000) offset += 2; + cpu->R[14] = cpu->R[15] - 4; + cpu->JumpTo(cpu->R[15] + offset + 1); + + return C_S(2) + C_N(1); +} + } diff --git a/ARM_InstrTable.h b/ARM_InstrTable.h index 812d040e..51454c1d 100644 --- a/ARM_InstrTable.h +++ b/ARM_InstrTable.h @@ -106,8 +106,8 @@ INSTRFUNC_PROTO(ARMInstrTable[4096]) = A_UNK, A_UNK, A_UNK, A_UNK, // 0001 0001 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_TST_REG_LSL_IMM, A_TST_REG_LSL_REG, A_TST_REG_LSR_IMM, A_TST_REG_LSR_REG, + A_TST_REG_ASR_IMM, A_TST_REG_ASR_REG, A_TST_REG_ROR_IMM, A_TST_REG_ROR_REG, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, @@ -118,8 +118,8 @@ INSTRFUNC_PROTO(ARMInstrTable[4096]) = A_UNK, A_UNK, A_UNK, A_UNK, // 0001 0011 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_TEQ_REG_LSL_IMM, A_TEQ_REG_LSL_REG, A_TEQ_REG_LSR_IMM, A_TEQ_REG_LSR_REG, + A_TEQ_REG_ASR_IMM, A_TEQ_REG_ASR_REG, A_TEQ_REG_ROR_IMM, A_TEQ_REG_ROR_REG, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, @@ -130,8 +130,8 @@ INSTRFUNC_PROTO(ARMInstrTable[4096]) = A_UNK, A_UNK, A_UNK, A_UNK, // 0001 0101 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_CMP_REG_LSL_IMM, A_CMP_REG_LSL_REG, A_CMP_REG_LSR_IMM, A_CMP_REG_LSR_REG, + A_CMP_REG_ASR_IMM, A_CMP_REG_ASR_REG, A_CMP_REG_ROR_IMM, A_CMP_REG_ROR_REG, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, @@ -142,56 +142,56 @@ INSTRFUNC_PROTO(ARMInstrTable[4096]) = A_UNK, A_UNK, A_UNK, A_UNK, // 0001 0111 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_CMN_REG_LSL_IMM, A_CMN_REG_LSL_REG, A_CMN_REG_LSR_IMM, A_CMN_REG_LSR_REG, + A_CMN_REG_ASR_IMM, A_CMN_REG_ASR_REG, A_CMN_REG_ROR_IMM, A_CMN_REG_ROR_REG, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, // 0001 1000 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_ORR_REG_LSL_IMM, A_ORR_REG_LSL_REG, A_ORR_REG_LSR_IMM, A_ORR_REG_LSR_REG, + A_ORR_REG_ASR_IMM, A_ORR_REG_ASR_REG, A_ORR_REG_ROR_IMM, A_ORR_REG_ROR_REG, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, // 0001 1001 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_ORR_REG_LSL_IMM_S, A_ORR_REG_LSL_REG_S, A_ORR_REG_LSR_IMM_S, A_ORR_REG_LSR_REG_S, + A_ORR_REG_ASR_IMM_S, A_ORR_REG_ASR_REG_S, A_ORR_REG_ROR_IMM_S, A_ORR_REG_ROR_REG_S, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, // 0001 1010 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_MOV_REG_LSL_IMM, A_MOV_REG_LSL_REG, A_MOV_REG_LSR_IMM, A_MOV_REG_LSR_REG, + A_MOV_REG_ASR_IMM, A_MOV_REG_ASR_REG, A_MOV_REG_ROR_IMM, A_MOV_REG_ROR_REG, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, // 0001 1011 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_MOV_REG_LSL_IMM_S, A_MOV_REG_LSL_REG_S, A_MOV_REG_LSR_IMM_S, A_MOV_REG_LSR_REG_S, + A_MOV_REG_ASR_IMM_S, A_MOV_REG_ASR_REG_S, A_MOV_REG_ROR_IMM_S, A_MOV_REG_ROR_REG_S, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, // 0001 1100 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_BIC_REG_LSL_IMM, A_BIC_REG_LSL_REG, A_BIC_REG_LSR_IMM, A_BIC_REG_LSR_REG, + A_BIC_REG_ASR_IMM, A_BIC_REG_ASR_REG, A_BIC_REG_ROR_IMM, A_BIC_REG_ROR_REG, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, // 0001 1101 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_BIC_REG_LSL_IMM_S, A_BIC_REG_LSL_REG_S, A_BIC_REG_LSR_IMM_S, A_BIC_REG_LSR_REG_S, + A_BIC_REG_ASR_IMM_S, A_BIC_REG_ASR_REG_S, A_BIC_REG_ROR_IMM_S, A_BIC_REG_ROR_REG_S, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, // 0001 1110 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_MVN_REG_LSL_IMM, A_MVN_REG_LSL_REG, A_MVN_REG_LSR_IMM, A_MVN_REG_LSR_REG, + A_MVN_REG_ASR_IMM, A_MVN_REG_ASR_REG, A_MVN_REG_ROR_IMM, A_MVN_REG_ROR_REG, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, // 0001 1111 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_MVN_REG_LSL_IMM_S, A_MVN_REG_LSL_REG_S, A_MVN_REG_LSR_IMM_S, A_MVN_REG_LSR_REG_S, + A_MVN_REG_ASR_IMM_S, A_MVN_REG_ASR_REG_S, A_MVN_REG_ROR_IMM_S, A_MVN_REG_ROR_REG_S, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, A_UNK, @@ -302,10 +302,10 @@ INSTRFUNC_PROTO(ARMInstrTable[4096]) = A_UNK, A_UNK, A_UNK, A_UNK, // 0011 0001 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_TST_IMM, A_TST_IMM, A_TST_IMM, A_TST_IMM, + A_TST_IMM, A_TST_IMM, A_TST_IMM, A_TST_IMM, + A_TST_IMM, A_TST_IMM, A_TST_IMM, A_TST_IMM, + A_TST_IMM, A_TST_IMM, A_TST_IMM, A_TST_IMM, // 0011 0010 0000 A_UNK, A_UNK, A_UNK, A_UNK, @@ -314,10 +314,10 @@ INSTRFUNC_PROTO(ARMInstrTable[4096]) = A_UNK, A_UNK, A_UNK, A_UNK, // 0011 0011 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_TEQ_IMM, A_TEQ_IMM, A_TEQ_IMM, A_TEQ_IMM, + A_TEQ_IMM, A_TEQ_IMM, A_TEQ_IMM, A_TEQ_IMM, + A_TEQ_IMM, A_TEQ_IMM, A_TEQ_IMM, A_TEQ_IMM, + A_TEQ_IMM, A_TEQ_IMM, A_TEQ_IMM, A_TEQ_IMM, // 0011 0100 0000 A_UNK, A_UNK, A_UNK, A_UNK, @@ -326,10 +326,10 @@ INSTRFUNC_PROTO(ARMInstrTable[4096]) = A_UNK, A_UNK, A_UNK, A_UNK, // 0011 0101 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_CMP_IMM, A_CMP_IMM, A_CMP_IMM, A_CMP_IMM, + A_CMP_IMM, A_CMP_IMM, A_CMP_IMM, A_CMP_IMM, + A_CMP_IMM, A_CMP_IMM, A_CMP_IMM, A_CMP_IMM, + A_CMP_IMM, A_CMP_IMM, A_CMP_IMM, A_CMP_IMM, // 0011 0110 0000 A_UNK, A_UNK, A_UNK, A_UNK, @@ -338,58 +338,58 @@ INSTRFUNC_PROTO(ARMInstrTable[4096]) = A_UNK, A_UNK, A_UNK, A_UNK, // 0011 0111 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_CMN_IMM, A_CMN_IMM, A_CMN_IMM, A_CMN_IMM, + A_CMN_IMM, A_CMN_IMM, A_CMN_IMM, A_CMN_IMM, + A_CMN_IMM, A_CMN_IMM, A_CMN_IMM, A_CMN_IMM, + A_CMN_IMM, A_CMN_IMM, A_CMN_IMM, A_CMN_IMM, // 0011 1000 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_ORR_IMM, A_ORR_IMM, A_ORR_IMM, A_ORR_IMM, + A_ORR_IMM, A_ORR_IMM, A_ORR_IMM, A_ORR_IMM, + A_ORR_IMM, A_ORR_IMM, A_ORR_IMM, A_ORR_IMM, + A_ORR_IMM, A_ORR_IMM, A_ORR_IMM, A_ORR_IMM, // 0011 1001 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_ORR_IMM_S, A_ORR_IMM_S, A_ORR_IMM_S, A_ORR_IMM_S, + A_ORR_IMM_S, A_ORR_IMM_S, A_ORR_IMM_S, A_ORR_IMM_S, + A_ORR_IMM_S, A_ORR_IMM_S, A_ORR_IMM_S, A_ORR_IMM_S, + A_ORR_IMM_S, A_ORR_IMM_S, A_ORR_IMM_S, A_ORR_IMM_S, // 0011 1010 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_MOV_IMM, A_MOV_IMM, A_MOV_IMM, A_MOV_IMM, + A_MOV_IMM, A_MOV_IMM, A_MOV_IMM, A_MOV_IMM, + A_MOV_IMM, A_MOV_IMM, A_MOV_IMM, A_MOV_IMM, + A_MOV_IMM, A_MOV_IMM, A_MOV_IMM, A_MOV_IMM, // 0011 1011 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_MOV_IMM_S, A_MOV_IMM_S, A_MOV_IMM_S, A_MOV_IMM_S, + A_MOV_IMM_S, A_MOV_IMM_S, A_MOV_IMM_S, A_MOV_IMM_S, + A_MOV_IMM_S, A_MOV_IMM_S, A_MOV_IMM_S, A_MOV_IMM_S, + A_MOV_IMM_S, A_MOV_IMM_S, A_MOV_IMM_S, A_MOV_IMM_S, // 0011 1100 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_BIC_IMM, A_BIC_IMM, A_BIC_IMM, A_BIC_IMM, + A_BIC_IMM, A_BIC_IMM, A_BIC_IMM, A_BIC_IMM, + A_BIC_IMM, A_BIC_IMM, A_BIC_IMM, A_BIC_IMM, + A_BIC_IMM, A_BIC_IMM, A_BIC_IMM, A_BIC_IMM, // 0011 1101 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_BIC_IMM_S, A_BIC_IMM_S, A_BIC_IMM_S, A_BIC_IMM_S, + A_BIC_IMM_S, A_BIC_IMM_S, A_BIC_IMM_S, A_BIC_IMM_S, + A_BIC_IMM_S, A_BIC_IMM_S, A_BIC_IMM_S, A_BIC_IMM_S, + A_BIC_IMM_S, A_BIC_IMM_S, A_BIC_IMM_S, A_BIC_IMM_S, // 0011 1110 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_MVN_IMM, A_MVN_IMM, A_MVN_IMM, A_MVN_IMM, + A_MVN_IMM, A_MVN_IMM, A_MVN_IMM, A_MVN_IMM, + A_MVN_IMM, A_MVN_IMM, A_MVN_IMM, A_MVN_IMM, + A_MVN_IMM, A_MVN_IMM, A_MVN_IMM, A_MVN_IMM, // 0011 1111 0000 - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, - A_UNK, A_UNK, A_UNK, A_UNK, + A_MVN_IMM_S, A_MVN_IMM_S, A_MVN_IMM_S, A_MVN_IMM_S, + A_MVN_IMM_S, A_MVN_IMM_S, A_MVN_IMM_S, A_MVN_IMM_S, + A_MVN_IMM_S, A_MVN_IMM_S, A_MVN_IMM_S, A_MVN_IMM_S, + A_MVN_IMM_S, A_MVN_IMM_S, A_MVN_IMM_S, A_MVN_IMM_S, diff --git a/melonDS.depend b/melonDS.depend index 988dd07d..be3cae2d 100644 --- a/melonDS.depend +++ b/melonDS.depend @@ -13,28 +13,37 @@ "NDS.h" "ARM.h" -1480007764 source:c:\documents\sources\melonds\arm.cpp +1480027981 source:c:\documents\sources\melonds\arm.cpp "NDS.h" "ARM.h" "ARMInterpreter.h" -1480008165 c:\documents\sources\melonds\arm.h +1480027964 c:\documents\sources\melonds\arm.h "types.h" "NDS.h" -1480008597 c:\documents\sources\melonds\arm_instrtable.h +1480028755 c:\documents\sources\melonds\arm_instrtable.h -1480005496 c:\documents\sources\melonds\arminterpreter.h +1480018830 c:\documents\sources\melonds\arminterpreter.h "types.h" "ARM.h" -1480008388 source:c:\documents\sources\melonds\arminterpreter.cpp +1480015932 source:c:\documents\sources\melonds\arminterpreter.cpp "NDS.h" "ARMInterpreter.h" + "ARMInterpreter_ALU.h" "ARMInterpreter_Branch.h" "ARM_InstrTable.h" 1480008608 c:\documents\sources\melonds\arminterpreter_branch.h +1480018773 source:c:\documents\sources\melonds\arminterpreter_branch.cpp + "ARM.h" + +1480028448 c:\documents\sources\melonds\arminterpreter_alu.h + +1480028805 source:c:\documents\sources\melonds\arminterpreter_alu.cpp + "ARM.h" +