Permalink
Browse files

PowerPC: Make the PowerPCState's msr member variable a UReg_MSR instance

Gets rid of the need to construct UReg_MSR values around the the actual
member in order to query information from it (without using shifts and
masks). This makes it more concise in some areas, while helping with
readability in some other places (such as copying the ILE bit to the LE
bit in the exception checking functions).
  • Loading branch information...
lioncash committed May 5, 2018
1 parent 58b96ee commit ffcf107dd2686f27a7e27e5285b13334ba9955fb
@@ -269,10 +269,9 @@ bool CBoot::Load_BS2(const std::string& boot_rom_filename)
PowerPC::ppcState.gpr[4] = 0x00002030;
PowerPC::ppcState.gpr[5] = 0x0000009c;
UReg_MSR& m_MSR = ((UReg_MSR&)PowerPC::ppcState.msr);
m_MSR.FP = 1;
m_MSR.DR = 1;
m_MSR.IR = 1;
MSR.FP = 1;
MSR.DR = 1;
MSR.IR = 1;
PowerPC::ppcState.spr[SPR_HID0] = 0x0011c464;
PowerPC::ppcState.spr[SPR_IBAT3U] = 0xfff0001f;
@@ -55,11 +55,10 @@ void CBoot::RunFunction(u32 address)
void CBoot::SetupMSR()
{
UReg_MSR& m_MSR = ((UReg_MSR&)PowerPC::ppcState.msr);
m_MSR.FP = 1;
m_MSR.DR = 1;
m_MSR.IR = 1;
m_MSR.EE = 1;
MSR.FP = 1;
MSR.DR = 1;
MSR.IR = 1;
MSR.EE = 1;
}
void CBoot::SetupBAT(bool is_wii)
@@ -284,7 +284,7 @@ bool Kernel::BootstrapPPC(const std::string& boot_content_path)
// NAND titles start with address translation off at 0x3400 (via the PPC bootstub)
// The state of other CPU registers (like the BAT registers) doesn't matter much
// because the realmode code at 0x3400 initializes everything itself anyway.
MSR = 0;
MSR.Hex = 0;
PC = 0x3400;
return true;
@@ -65,7 +65,7 @@ bool Load()
const PowerPC::CoreMode core_mode = PowerPC::GetMode();
PowerPC::SetMode(PowerPC::CoreMode::Interpreter);
MSR = 0;
MSR.Hex = 0;
PC = 0x3400;
NOTICE_LOG(IOS, "Loaded MIOS and bootstrapped PPC.");
@@ -196,7 +196,7 @@ static void ApplyPatches(const std::vector<Patch>& patches)
// We require at least 2 stack frames, if the stack is shallower than that then it won't work.
static bool IsStackSane()
{
DEBUG_ASSERT(UReg_MSR(MSR).DR && UReg_MSR(MSR).IR);
DEBUG_ASSERT(MSR.DR && MSR.IR);
// Check the stack pointer
u32 SP = GPR(1);
@@ -220,13 +220,12 @@ bool ApplyFramePatches()
// callback hook we can end up catching the game in an exception vector.
// We deal with this by returning false so that SystemTimers will reschedule us in a few cycles
// where we can try again after the CPU hopefully returns back to the normal instruction flow.
UReg_MSR msr = MSR;
if (!msr.DR || !msr.IR || !IsStackSane())
if (!MSR.DR || !MSR.IR || !IsStackSane())
{
DEBUG_LOG(
ACTIONREPLAY,
"Need to retry later. CPU configuration is currently incorrect. PC = 0x%08X, MSR = 0x%08X",
PC, MSR);
PC, MSR.Hex);
return false;
}
@@ -150,8 +150,7 @@ static void WriteBrokenBlockNPC(UGeckoInstruction data)
static bool CheckFPU(u32 data)
{
UReg_MSR msr{MSR};
if (!msr.FP)
if (!MSR.FP)
{
PowerPC::ppcState.Exceptions |= EXCEPTION_FPU_UNAVAILABLE;
PowerPC::CheckExceptions();
@@ -446,7 +446,7 @@ static void gdb_read_register()
wbe32hex(reply, PC);
break;
case 65:
wbe32hex(reply, MSR);
wbe32hex(reply, MSR.Hex);
break;
case 66:
wbe32hex(reply, PowerPC::GetCR());
@@ -531,7 +531,7 @@ static void gdb_write_register()
PC = re32hex(bufptr);
break;
case 65:
MSR = re32hex(bufptr);
MSR.Hex = re32hex(bufptr);
break;
case 66:
PowerPC::SetCR(re32hex(bufptr));
@@ -96,8 +96,7 @@ static void Trace(UGeckoInstruction& inst)
"INTER PC: %08x SRR0: %08x SRR1: %08x CRval: %016lx FPSCR: %08x MSR: %08x LR: "
"%08x %s %08x %s",
PC, SRR0, SRR1, (unsigned long)PowerPC::ppcState.cr_val[0], PowerPC::ppcState.fpscr,
PowerPC::ppcState.msr, PowerPC::ppcState.spr[8], regs.c_str(), inst.hex,
ppc_inst.c_str());
MSR.Hex, PowerPC::ppcState.spr[8], regs.c_str(), inst.hex, ppc_inst.c_str());
}
int Interpreter::SingleStepInner()
@@ -153,8 +152,7 @@ int Interpreter::SingleStepInner()
if (m_prev_inst.hex != 0)
{
const UReg_MSR msr{MSR};
if (msr.FP) // If FPU is enabled, just execute
if (MSR.FP) // If FPU is enabled, just execute
{
m_op_table[m_prev_inst.OPCD](m_prev_inst);
if (PowerPC::ppcState.Exceptions & EXCEPTION_DSI)
@@ -119,9 +119,9 @@ void Interpreter::rfi(UGeckoInstruction inst)
// Restore saved bits from SRR1 to MSR.
// Gecko/Broadway can save more bits than explicitly defined in ppc spec
const int mask = 0x87C0FFFF;
MSR = (MSR & ~mask) | (SRR1 & mask);
MSR.Hex = (MSR.Hex & ~mask) | (SRR1 & mask);
// MSR[13] is set to 0.
MSR &= 0xFFFBFFFF;
MSR.Hex &= 0xFFFBFFFF;
// Here we should check if there are pending exceptions, and if their corresponding enable bits
// are set
// if above is true, we'd do:
@@ -278,7 +278,7 @@ void Interpreter::lmw(UGeckoInstruction inst)
{
u32 address = Helper_Get_EA(inst);
if ((address & 0b11) != 0 || UReg_MSR{MSR}.LE)
if ((address & 0b11) != 0 || MSR.LE)
{
GenerateAlignmentException(address);
return;
@@ -306,7 +306,7 @@ void Interpreter::stmw(UGeckoInstruction inst)
{
u32 address = Helper_Get_EA(inst);
if ((address & 0b11) != 0 || UReg_MSR{MSR}.LE)
if ((address & 0b11) != 0 || MSR.LE)
{
GenerateAlignmentException(address);
return;
@@ -685,7 +685,7 @@ void Interpreter::lswx(UGeckoInstruction inst)
{
u32 EA = Helper_Get_EA_X(inst);
if (UReg_MSR{MSR}.LE)
if (MSR.LE)
{
GenerateAlignmentException(EA);
return;
@@ -867,7 +867,7 @@ void Interpreter::lswi(UGeckoInstruction inst)
else
EA = rGPR[inst.RA];
if (UReg_MSR{MSR}.LE)
if (MSR.LE)
{
GenerateAlignmentException(EA);
return;
@@ -918,7 +918,7 @@ void Interpreter::stswi(UGeckoInstruction inst)
else
EA = rGPR[inst.RA];
if (UReg_MSR{MSR}.LE)
if (MSR.LE)
{
GenerateAlignmentException(EA);
return;
@@ -958,7 +958,7 @@ void Interpreter::stswx(UGeckoInstruction inst)
{
u32 EA = Helper_Get_EA_X(inst);
if (UReg_MSR{MSR}.LE)
if (MSR.LE)
{
GenerateAlignmentException(EA);
return;
@@ -136,7 +136,7 @@ void Interpreter::mtcrf(UGeckoInstruction inst)
void Interpreter::mfmsr(UGeckoInstruction inst)
{
// Privileged?
rGPR[inst.RD] = MSR;
rGPR[inst.RD] = MSR.Hex;
}
void Interpreter::mfsr(UGeckoInstruction inst)
@@ -153,7 +153,7 @@ void Interpreter::mfsrin(UGeckoInstruction inst)
void Interpreter::mtmsr(UGeckoInstruction inst)
{
// Privileged?
MSR = rGPR[inst.RS];
MSR.Hex = rGPR[inst.RS];
PowerPC::CheckExceptions();
m_end_block = true;
}
@@ -565,8 +565,8 @@ void Jit64::Trace()
#endif
DEBUG_LOG(DYNA_REC, "JIT64 PC: %08x SRR0: %08x SRR1: %08x FPSCR: %08x MSR: %08x LR: %08x %s %s",
PC, SRR0, SRR1, PowerPC::ppcState.fpscr, PowerPC::ppcState.msr,
PowerPC::ppcState.spr[8], regs.c_str(), fregs.c_str());
PC, SRR0, SRR1, PowerPC::ppcState.fpscr, MSR.Hex, PowerPC::ppcState.spr[8],
regs.c_str(), fregs.c_str());
}
void Jit64::Jit(u32 em_address)
@@ -342,7 +342,7 @@ void Jit64::dcbz(UGeckoInstruction inst)
ADD(32, R(RSCRATCH), gpr.R(a));
AND(32, R(RSCRATCH), Imm32(~31));
if (UReg_MSR(MSR).DR)
if (MSR.DR)
{
// Perform lookup to see if we can use fast path.
MOV(64, R(RSCRATCH2), ImmPtr(&PowerPC::dbat_table[0]));
@@ -367,7 +367,7 @@ void Jit64::dcbz(UGeckoInstruction inst)
ABI_CallFunctionR(PowerPC::ClearCacheLine, RSCRATCH);
ABI_PopRegistersAndAdjustStack(registersInUse, 0);
if (UReg_MSR(MSR).DR)
if (MSR.DR)
{
FixupBranch end = J(true);
SwitchToNearCode();
@@ -24,7 +24,7 @@ void Jit64::psq_stXX(UGeckoInstruction inst)
JITDISABLE(bJITLoadStorePairedOff);
// For performance, the AsmCommon routines assume address translation is on.
FALLBACK_IF(!UReg_MSR(MSR).DR);
FALLBACK_IF(!MSR.DR);
s32 offset = inst.SIMM_12;
bool indexed = inst.OPCD == 4;
@@ -114,7 +114,7 @@ void Jit64::psq_lXX(UGeckoInstruction inst)
JITDISABLE(bJITLoadStorePairedOff);
// For performance, the AsmCommon routines assume address translation is on.
FALLBACK_IF(!UReg_MSR(MSR).DR);
FALLBACK_IF(!MSR.DR);
s32 offset = inst.SIMM_12;
bool indexed = inst.OPCD == 4;
@@ -364,8 +364,8 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg& opAddress,
}
FixupBranch exit;
bool dr_set = (flags & SAFE_LOADSTORE_DR_ON) || UReg_MSR(MSR).DR;
bool fast_check_address = !slowmem && dr_set;
const bool dr_set = (flags & SAFE_LOADSTORE_DR_ON) || MSR.DR;
const bool fast_check_address = !slowmem && dr_set;
if (fast_check_address)
{
FixupBranch slow = CheckIfSafeAddress(R(reg_value), reg_addr, registersInUse);
@@ -526,8 +526,8 @@ void EmuCodeBlock::SafeWriteRegToReg(OpArg reg_value, X64Reg reg_addr, int acces
}
FixupBranch exit;
bool dr_set = (flags & SAFE_LOADSTORE_DR_ON) || UReg_MSR(MSR).DR;
bool fast_check_address = !slowmem && dr_set;
const bool dr_set = (flags & SAFE_LOADSTORE_DR_ON) || MSR.DR;
const bool fast_check_address = !slowmem && dr_set;
if (fast_check_address)
{
FixupBranch slow = CheckIfSafeAddress(reg_value, reg_addr, registersInUse);
@@ -23,7 +23,7 @@ void JitArm64::psq_l(UGeckoInstruction inst)
FALLBACK_IF(jo.memcheck || !jo.fastmem);
// The asm routines assume address translation is on.
FALLBACK_IF(!UReg_MSR(MSR).DR);
FALLBACK_IF(!MSR.DR);
// X30 is LR
// X0 contains the scale
@@ -106,7 +106,7 @@ void JitArm64::psq_st(UGeckoInstruction inst)
FALLBACK_IF(jo.memcheck || !jo.fastmem);
// The asm routines assume address translation is on.
FALLBACK_IF(!UReg_MSR(MSR).DR);
FALLBACK_IF(!MSR.DR);
// X30 is LR
// X0 contains the scale
@@ -51,6 +51,6 @@ bool JitBase::CanMergeNextInstructions(int count) const
void JitBase::UpdateMemoryOptions()
{
bool any_watchpoints = PowerPC::memchecks.HasAny();
jo.fastmem = SConfig::GetInstance().bFastmem && (UReg_MSR(MSR).DR || !any_watchpoints);
jo.fastmem = SConfig::GetInstance().bFastmem && (MSR.DR || !any_watchpoints);
jo.memcheck = SConfig::GetInstance().bMMU || any_watchpoints;
}
@@ -100,7 +100,7 @@ JitBlock* JitBaseBlockCache::AllocateBlock(u32 em_address)
JitBlock& b = block_map.emplace(physicalAddress, JitBlock())->second;
b.effectiveAddress = em_address;
b.physicalAddress = physicalAddress;
b.msrBits = MSR & JIT_CACHE_MSR_MASK;
b.msrBits = MSR.Hex & JIT_CACHE_MSR_MASK;
b.linkData.clear();
b.fast_block_map_index = 0;
return &b;
@@ -174,8 +174,8 @@ const u8* JitBaseBlockCache::Dispatch()
{
JitBlock* block = fast_block_map[FastLookupIndexForAddress(PC)];
if (!block || block->effectiveAddress != PC || block->msrBits != (MSR & JIT_CACHE_MSR_MASK))
block = MoveBlockIntoFastCache(PC, MSR & JIT_CACHE_MSR_MASK);
if (!block || block->effectiveAddress != PC || block->msrBits != (MSR.Hex & JIT_CACHE_MSR_MASK))
block = MoveBlockIntoFastCache(PC, MSR.Hex & JIT_CACHE_MSR_MASK);
if (!block)
return nullptr;
@@ -141,12 +141,12 @@ int GetHostCode(u32* address, const u8** code, u32* code_size)
return 1;
}
JitBlock* block = g_jit->GetBlockCache()->GetBlockFromStartAddress(*address, MSR);
JitBlock* block = g_jit->GetBlockCache()->GetBlockFromStartAddress(*address, MSR.Hex);
if (!block)
{
for (int i = 0; i < 500; i++)
{
block = g_jit->GetBlockCache()->GetBlockFromStartAddress(*address - 4 * i, MSR);
block = g_jit->GetBlockCache()->GetBlockFromStartAddress(*address - 4 * i, MSR.Hex);
if (block)
break;
}
Oops, something went wrong.

0 comments on commit ffcf107

Please sign in to comment.