Skip to content
Permalink
Browse files

Masked PSP Memory support for the AArch64 Dynarec

  • Loading branch information...
m4xw committed Apr 15, 2019
1 parent 54e102c commit b9352354c9b83c63214ce6ccdf359c05a031784e
@@ -247,6 +247,9 @@ void Arm64Jit::GenerateFixedCode(const JitOptions &jo) {
}

LDR(INDEX_UNSIGNED, SCRATCH1, CTXREG, offsetof(MIPSState, pc));
#ifdef MASKED_PSP_MEMORY
ANDI2R(SCRATCH1, SCRATCH1, 0x3FFFFFFF);
#endif
LDR(SCRATCH1, MEMBASEREG, SCRATCH1_64);
LSR(SCRATCH2, SCRATCH1, 24); // or UBFX(SCRATCH2, SCRATCH1, 24, 8)
ANDI2R(SCRATCH1, SCRATCH1, 0x00FFFFFF);
@@ -43,7 +43,7 @@ using namespace MIPSAnalyst;
// All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly.
// Currently known non working ones should have DISABLE.

// #define CONDITIONAL_DISABLE { Comp_Generic(op); return; }
//#define CONDITIONAL_DISABLE(flag) { Comp_Generic(op); return; }
#define CONDITIONAL_DISABLE(flag) if (jo.Disabled(JitDisable::flag)) { Comp_Generic(op); return; }
#define DISABLE { Comp_Generic(op); return; }

@@ -51,7 +51,7 @@
// All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly.
// Currently known non working ones should have DISABLE.

// #define CONDITIONAL_DISABLE { Comp_Generic(op); return; }
// #define CONDITIONAL_DISABLE(flag) { Comp_Generic(op); return; }
#define CONDITIONAL_DISABLE(flag) if (jo.Disabled(JitDisable::flag)) { Comp_Generic(op); return; }
#define DISABLE { Comp_Generic(op); return; }

@@ -102,7 +102,11 @@ void Arm64Jit::Comp_FPULS(MIPSOpcode op)
fpr.SpillLock(ft);
fpr.MapReg(ft, MAP_NOINIT | MAP_DIRTY);
if (gpr.IsImm(rs)) {
#ifdef MASKED_PSP_MEMORY
u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF;
#else
u32 addr = offset + gpr.GetImm(rs);
#endif
gpr.SetRegImm(SCRATCH1, addr);
} else {
gpr.MapReg(rs);
@@ -129,7 +133,11 @@ void Arm64Jit::Comp_FPULS(MIPSOpcode op)

fpr.MapReg(ft);
if (gpr.IsImm(rs)) {
#ifdef MASKED_PSP_MEMORY
u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF;
#else
u32 addr = offset + gpr.GetImm(rs);
#endif
gpr.SetRegImm(SCRATCH1, addr);
} else {
gpr.MapReg(rs);
@@ -41,7 +41,7 @@
// All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly.
// Currently known non working ones should have DISABLE.

// #define CONDITIONAL_DISABLE { Comp_Generic(op); return; }
//#define CONDITIONAL_DISABLE(flag) { Comp_Generic(op); return; }
#define CONDITIONAL_DISABLE(flag) if (jo.Disabled(JitDisable::flag)) { Comp_Generic(op); return; }
#define DISABLE { Comp_Generic(op); return; }

@@ -56,6 +56,9 @@ namespace MIPSComp {
} else {
MOV(SCRATCH1, gpr.R(rs));
}
#ifdef MASKED_PSP_MEMORY
ANDI2R(SCRATCH1, SCRATCH1, 0x3FFFFFFF);
#endif
}

std::vector<FixupBranch> Arm64Jit::SetScratch1ForSafeAddress(MIPSGPReg rs, s16 offset, ARM64Reg tempReg) {
@@ -135,12 +138,17 @@ namespace MIPSComp {
std::vector<FixupBranch> skips;

if (gpr.IsImm(rs) && Memory::IsValidAddress(iaddr)) {
#ifdef MASKED_PSP_MEMORY
u32 addr = iaddr & 0x3FFFFFFF;
#else
u32 addr = iaddr;
#endif
// Need to initialize since this only loads part of the register.
// But rs no longer matters (even if rs == rt) since we have the address.
gpr.MapReg(rt, load ? MAP_DIRTY : 0);
gpr.SetRegImm(SCRATCH1, iaddr & ~3);
gpr.SetRegImm(SCRATCH1, addr & ~3);

u8 shift = (iaddr & 3) * 8;
u8 shift = (addr & 3) * 8;

switch (o) {
case 34: // lwl
@@ -347,7 +355,12 @@ namespace MIPSComp {
}

if (gpr.IsImm(rs) && Memory::IsValidAddress(iaddr)) {
if (offset == 0) {
#ifdef MASKED_PSP_MEMORY
u32 addr = iaddr & 0x3FFFFFFF;
#else
u32 addr = iaddr;
#endif
if (addr == iaddr && offset == 0) {
// It was already safe. Let's shove it into a reg and use it directly.
if (targetReg == INVALID_REG) {
load ? gpr.MapDirtyIn(rt, rs) : gpr.MapInIn(rt, rs);
@@ -360,7 +373,7 @@ namespace MIPSComp {
gpr.MapReg(rt, load ? MAP_NOINIT : 0);
targetReg = gpr.R(rt);
}
gpr.SetRegImm(SCRATCH1, iaddr);
gpr.SetRegImm(SCRATCH1, addr);
addrReg = SCRATCH1;
}
} else {
@@ -37,7 +37,7 @@
// All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly.
// Currently known non working ones should have DISABLE.

// #define CONDITIONAL_DISABLE { fpr.ReleaseSpillLocksAndDiscardTemps(); Comp_Generic(op); return; }
// #define CONDITIONAL_DISABLE(flag) { fpr.ReleaseSpillLocksAndDiscardTemps(); Comp_Generic(op); return; }
#define CONDITIONAL_DISABLE(flag) if (jo.Disabled(JitDisable::flag)) { Comp_Generic(op); return; }
#define DISABLE { fpr.ReleaseSpillLocksAndDiscardTemps(); Comp_Generic(op); return; }

@@ -222,7 +222,11 @@ namespace MIPSComp {
// CC might be set by slow path below, so load regs first.
fpr.MapRegV(vt, MAP_DIRTY | MAP_NOINIT);
if (gpr.IsImm(rs)) {
#ifdef MASKED_PSP_MEMORY
u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF;
#else
u32 addr = offset + gpr.GetImm(rs);
#endif
gpr.SetRegImm(SCRATCH1, addr);
} else {
gpr.MapReg(rs);
@@ -251,7 +255,11 @@ namespace MIPSComp {
// CC might be set by slow path below, so load regs first.
fpr.MapRegV(vt);
if (gpr.IsImm(rs)) {
#ifdef MASKED_PSP_MEMORY
u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF;
#else
u32 addr = offset + gpr.GetImm(rs);
#endif
gpr.SetRegImm(SCRATCH1, addr);
} else {
gpr.MapReg(rs);
@@ -293,7 +301,11 @@ namespace MIPSComp {
fpr.MapRegsAndSpillLockV(vregs, V_Quad, MAP_DIRTY | MAP_NOINIT);

if (gpr.IsImm(rs)) {
#ifdef MASKED_PSP_MEMORY
u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF;
#else
u32 addr = imm + gpr.GetImm(rs);
#endif
gpr.SetRegImm(SCRATCH1_64, addr + (uintptr_t)Memory::base);
} else {
gpr.MapReg(rs);
@@ -326,7 +338,11 @@ namespace MIPSComp {
fpr.MapRegsAndSpillLockV(vregs, V_Quad, 0);

if (gpr.IsImm(rs)) {
#ifdef MASKED_PSP_MEMORY
u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF;
#else
u32 addr = imm + gpr.GetImm(rs);
#endif
gpr.SetRegImm(SCRATCH1_64, addr + (uintptr_t)Memory::base);
} else {
gpr.MapReg(rs);
@@ -440,6 +440,9 @@ Arm64Gen::ARM64Reg Arm64RegCache::MapRegAsPointer(MIPSGPReg reg) {
if (!jo_->enablePointerify) {
// Convert to a pointer by adding the base and clearing off the top bits.
// If SP, we can probably avoid the top bit clear, let's play with that later.
#ifdef MASKED_PSP_MEMORY
emit_->ANDI2R(EncodeRegTo64(a), EncodeRegTo64(a), 0x3FFFFFFF);
#endif
emit_->ADD(EncodeRegTo64(a), EncodeRegTo64(a), MEMBASEREG);
mr[reg].loc = ML_ARMREG_AS_PTR;
} else if (!ar[a].pointerified) {

0 comments on commit b935235

Please sign in to comment.
You can’t perform that action at this time.