Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Core/MIPS/LoongArch64/LoongArch64CompSystem.cpp
3188 views
1
// Copyright (c) 2023- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include "Common/Profiler/Profiler.h"
19
#include "Core/Core.h"
20
#include "Core/HLE/HLE.h"
21
#include "Core/HLE/ReplaceTables.h"
22
#include "Core/MemMap.h"
23
#include "Core/MIPS/LoongArch64/LoongArch64Jit.h"
24
#include "Core/MIPS/LoongArch64/LoongArch64RegCache.h"
25
26
// This file contains compilation for basic PC/downcount accounting, syscalls, debug funcs, etc.
27
//
28
// All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly.
29
// Currently known non working ones should have DISABLE. No flags because that's in IR already.
30
31
// #define CONDITIONAL_DISABLE { CompIR_Generic(inst); return; }
32
#define CONDITIONAL_DISABLE {}
33
#define DISABLE { CompIR_Generic(inst); return; }
34
#define INVALIDOP { _assert_msg_(false, "Invalid IR inst %d", (int)inst.op); CompIR_Generic(inst); return; }
35
36
namespace MIPSComp {
37
38
using namespace LoongArch64Gen;
39
using namespace LoongArch64JitConstants;
40
41
void LoongArch64JitBackend::CompIR_Basic(IRInst inst) {
42
CONDITIONAL_DISABLE;
43
44
switch (inst.op) {
45
case IROp::SetConst:
46
// Sign extend all constants. We get 0xFFFFFFFF sometimes, and it's more work to truncate.
47
// The register only holds 32 bits in the end anyway.
48
regs_.SetGPRImm(inst.dest, (int32_t)inst.constant);
49
break;
50
51
case IROp::SetConstF:
52
regs_.Map(inst);
53
if (inst.constant == 0)
54
MOVGR2FR_W(regs_.F(inst.dest), R_ZERO);
55
else
56
QuickFLI(32, regs_.F(inst.dest), inst.constant, SCRATCH1);
57
break;
58
59
case IROp::Downcount:
60
if (inst.constant <= 2048) {
61
ADDI_D(DOWNCOUNTREG, DOWNCOUNTREG, -(s32)inst.constant);
62
} else {
63
LI(SCRATCH1, inst.constant);
64
SUB_D(DOWNCOUNTREG, DOWNCOUNTREG, SCRATCH1);
65
}
66
break;
67
68
case IROp::SetPC:
69
regs_.Map(inst);
70
MovToPC(regs_.R(inst.src1));
71
break;
72
73
case IROp::SetPCConst:
74
LI(SCRATCH1, inst.constant);
75
MovToPC(SCRATCH1);
76
break;
77
78
default:
79
INVALIDOP;
80
break;
81
}
82
}
83
84
void LoongArch64JitBackend::CompIR_Transfer(IRInst inst) {
85
CONDITIONAL_DISABLE;
86
87
switch (inst.op) {
88
case IROp::SetCtrlVFPU:
89
regs_.SetGPRImm(IRREG_VFPU_CTRL_BASE + inst.dest, inst.constant);
90
break;
91
92
case IROp::SetCtrlVFPUReg:
93
regs_.Map(inst);
94
MOVE(regs_.R(IRREG_VFPU_CTRL_BASE + inst.dest), regs_.R(inst.src1));
95
regs_.MarkGPRDirty(IRREG_VFPU_CTRL_BASE + inst.dest, regs_.IsNormalized32(inst.src1));
96
break;
97
98
case IROp::SetCtrlVFPUFReg:
99
regs_.Map(inst);
100
MOVFR2GR_S(regs_.R(IRREG_VFPU_CTRL_BASE + inst.dest), regs_.F(inst.src1));
101
regs_.MarkGPRDirty(IRREG_VFPU_CTRL_BASE + inst.dest, true);
102
break;
103
104
case IROp::FpCondFromReg:
105
regs_.MapWithExtra(inst, { { 'G', IRREG_FPCOND, 1, MIPSMap::NOINIT } });
106
MOVE(regs_.R(IRREG_FPCOND), regs_.R(inst.src1));
107
break;
108
109
case IROp::FpCondToReg:
110
regs_.MapWithExtra(inst, { { 'G', IRREG_FPCOND, 1, MIPSMap::INIT } });
111
MOVE(regs_.R(inst.dest), regs_.R(IRREG_FPCOND));
112
regs_.MarkGPRDirty(inst.dest, regs_.IsNormalized32(IRREG_FPCOND));
113
break;
114
115
case IROp::FpCtrlFromReg:
116
regs_.MapWithExtra(inst, { { 'G', IRREG_FPCOND, 1, MIPSMap::NOINIT } });
117
LI(SCRATCH1, 0x0181FFFF);
118
AND(SCRATCH1, regs_.R(inst.src1), SCRATCH1);
119
// Extract the new fpcond value.
120
SRLI_D(regs_.R(IRREG_FPCOND), SCRATCH1, 23);
121
ANDI(regs_.R(IRREG_FPCOND), regs_.R(IRREG_FPCOND), 1);
122
ST_W(SCRATCH1, CTXREG, IRREG_FCR31 * 4);
123
regs_.MarkGPRDirty(IRREG_FPCOND, true);
124
break;
125
126
case IROp::FpCtrlToReg:
127
regs_.MapWithExtra(inst, { { 'G', IRREG_FPCOND, 1, MIPSMap::INIT } });
128
// Load fcr31 and clear the fpcond bit.
129
LD_W(SCRATCH1, CTXREG, IRREG_FCR31 * 4);
130
LI(SCRATCH2, ~(1 << 23));
131
AND(SCRATCH1, SCRATCH1, SCRATCH2);
132
133
// Now get the correct fpcond bit.
134
ANDI(SCRATCH2, regs_.R(IRREG_FPCOND), 1);
135
SLLI_D(SCRATCH2, SCRATCH2, 23);
136
OR(regs_.R(inst.dest), SCRATCH1, SCRATCH2);
137
138
// Also update mips->fcr31 while we're here.
139
ST_W(regs_.R(inst.dest), CTXREG, IRREG_FCR31 * 4);
140
regs_.MarkGPRDirty(inst.dest, true);
141
break;
142
143
case IROp::VfpuCtrlToReg:
144
regs_.Map(inst);
145
MOVE(regs_.R(inst.dest), regs_.R(IRREG_VFPU_CTRL_BASE + inst.src1));
146
regs_.MarkGPRDirty(inst.dest, regs_.IsNormalized32(IRREG_VFPU_CTRL_BASE + inst.src1));
147
break;
148
149
case IROp::FMovFromGPR:
150
if (regs_.IsGPRImm(inst.src1) && regs_.GetGPRImm(inst.src1) == 0) {
151
regs_.MapFPR(inst.dest, MIPSMap::NOINIT);
152
MOVGR2FR_W(regs_.F(inst.dest), R_ZERO);
153
} else {
154
regs_.Map(inst);
155
MOVGR2FR_W(regs_.F(inst.dest), regs_.R(inst.src1));
156
}
157
break;
158
159
case IROp::FMovToGPR:
160
regs_.Map(inst);
161
MOVFR2GR_S(regs_.R(inst.dest), regs_.F(inst.src1));
162
regs_.MarkGPRDirty(inst.dest, true);
163
break;
164
165
default:
166
INVALIDOP;
167
break;
168
}
169
}
170
171
void LoongArch64JitBackend::CompIR_System(IRInst inst) {
172
CONDITIONAL_DISABLE;
173
174
switch (inst.op) {
175
case IROp::Syscall:
176
FlushAll();
177
SaveStaticRegisters();
178
179
WriteDebugProfilerStatus(IRProfilerStatus::SYSCALL);
180
#ifdef USE_PROFILER
181
// When profiling, we can't skip CallSyscall, since it times syscalls.
182
LI(R4, (int32_t)inst.constant);
183
QuickCallFunction(&CallSyscall, SCRATCH2);
184
#else
185
// Skip the CallSyscall where possible.
186
{
187
MIPSOpcode op(inst.constant);
188
void *quickFunc = GetQuickSyscallFunc(op);
189
if (quickFunc) {
190
LI(R4, (uintptr_t)GetSyscallFuncPointer(op));
191
QuickCallFunction((const u8 *)quickFunc, SCRATCH2);
192
} else {
193
LI(R4, (int32_t)inst.constant);
194
QuickCallFunction(&CallSyscall, SCRATCH2);
195
}
196
}
197
#endif
198
199
WriteDebugProfilerStatus(IRProfilerStatus::IN_JIT);
200
LoadStaticRegisters();
201
// This is always followed by an ExitToPC, where we check coreState.
202
break;
203
204
case IROp::CallReplacement:
205
FlushAll();
206
SaveStaticRegisters();
207
WriteDebugProfilerStatus(IRProfilerStatus::REPLACEMENT);
208
QuickCallFunction(GetReplacementFunc(inst.constant)->replaceFunc, SCRATCH2);
209
WriteDebugProfilerStatus(IRProfilerStatus::IN_JIT);
210
LoadStaticRegisters();
211
212
// Do not violate value in R4
213
MOVE(SCRATCH1, R4);
214
SRAI_W(SCRATCH2, R4, 31);
215
// Absolute value trick: if neg, abs(x) == (x ^ -1) + 1.
216
XOR(SCRATCH1, SCRATCH1, SCRATCH2);
217
SUB_W(SCRATCH1, SCRATCH1, SCRATCH2);
218
SUB_D(DOWNCOUNTREG, DOWNCOUNTREG, SCRATCH1);
219
220
// R4 might be the mapped reg, but there's only one.
221
// Set dest reg to the sign of the result.
222
regs_.Map(inst);
223
MOVE(regs_.R(inst.dest), SCRATCH2);
224
break;
225
226
case IROp::Break:
227
FlushAll();
228
// This doesn't naturally have restore/apply around it.
229
RestoreRoundingMode(true);
230
SaveStaticRegisters();
231
MovFromPC(R4);
232
QuickCallFunction(&Core_BreakException, SCRATCH2);
233
LoadStaticRegisters();
234
ApplyRoundingMode(true);
235
MovFromPC(SCRATCH1);
236
ADDI_D(SCRATCH1, SCRATCH1, 4);
237
QuickJ(R_RA, dispatcherPCInSCRATCH1_);
238
break;
239
240
default:
241
INVALIDOP;
242
break;
243
}
244
}
245
246
void LoongArch64JitBackend::CompIR_Breakpoint(IRInst inst) {
247
CONDITIONAL_DISABLE;
248
249
switch (inst.op) {
250
case IROp::Breakpoint:
251
case IROp::MemoryCheck:
252
CompIR_Generic(inst);
253
break;
254
255
default:
256
INVALIDOP;
257
break;
258
}
259
}
260
261
void LoongArch64JitBackend::CompIR_ValidateAddress(IRInst inst) {
262
CONDITIONAL_DISABLE;
263
264
switch (inst.op) {
265
case IROp::ValidateAddress8:
266
case IROp::ValidateAddress16:
267
case IROp::ValidateAddress32:
268
case IROp::ValidateAddress128:
269
CompIR_Generic(inst);
270
break;
271
272
default:
273
INVALIDOP;
274
break;
275
}
276
}
277
278
} // namespace MIPSComp
279