Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Core/MIPS/LoongArch64/LoongArch64CompLoadStore.cpp
3188 views
1
// Copyright (c) 2023- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include "Core/MemMap.h"
19
#include "Core/MIPS/LoongArch64/LoongArch64Jit.h"
20
#include "Core/MIPS/LoongArch64/LoongArch64RegCache.h"
21
22
// This file contains compilation for load/store instructions.
23
//
24
// All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly.
25
// Currently known non working ones should have DISABLE. No flags because that's in IR already.
26
27
// #define CONDITIONAL_DISABLE { CompIR_Generic(inst); return; }
28
#define CONDITIONAL_DISABLE {}
29
#define DISABLE { CompIR_Generic(inst); return; }
30
#define INVALIDOP { _assert_msg_(false, "Invalid IR inst %d", (int)inst.op); CompIR_Generic(inst); return; }
31
32
namespace MIPSComp {
33
34
using namespace LoongArch64Gen;
35
using namespace LoongArch64JitConstants;
36
37
void LoongArch64JitBackend::SetScratch1ToSrc1Address(IRReg src1) {
38
regs_.MapGPR(src1);
39
#ifdef MASKED_PSP_MEMORY
40
SLLI_W(SCRATCH1, regs_.R(src1), 2);
41
SRLI_W(SCRATCH1, SCRATCH1, 2);
42
ADD_D(SCRATCH1, SCRATCH1, MEMBASEREG);
43
#else
44
// Clear the top bits to be safe.
45
SLLI_D(SCRATCH1, regs_.R(src1), 32);
46
SRLI_D(SCRATCH1, SCRATCH1, 32);
47
ADD_D(SCRATCH1, SCRATCH1, MEMBASEREG);
48
#endif
49
}
50
51
int32_t LoongArch64JitBackend::AdjustForAddressOffset(LoongArch64Gen::LoongArch64Reg *reg, int32_t constant, int32_t range) {
52
if (constant < -2048 || constant + range > 2047) {
53
#ifdef MASKED_PSP_MEMORY
54
if (constant > 0)
55
constant &= Memory::MEMVIEW32_MASK;
56
#endif
57
// It can't be this negative, must be a constant with top bit set.
58
if ((constant & 0xC0000000) == 0x80000000) {
59
LI(SCRATCH2, (uint32_t)constant);
60
ADD_D(SCRATCH1, *reg, SCRATCH2);
61
} else {
62
LI(SCRATCH2, constant);
63
ADD_D(SCRATCH1, *reg, SCRATCH2);
64
}
65
*reg = SCRATCH1;
66
return 0;
67
}
68
return constant;
69
}
70
71
void LoongArch64JitBackend::CompIR_Load(IRInst inst) {
72
CONDITIONAL_DISABLE;
73
74
regs_.SpillLockGPR(inst.dest, inst.src1);
75
LoongArch64Reg addrReg = INVALID_REG;
76
if (inst.src1 == MIPS_REG_ZERO) {
77
// This will get changed by AdjustForAddressOffset.
78
addrReg = MEMBASEREG;
79
#ifdef MASKED_PSP_MEMORY
80
inst.constant &= Memory::MEMVIEW32_MASK;
81
#endif
82
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
83
addrReg = regs_.MapGPRAsPointer(inst.src1);
84
} else {
85
SetScratch1ToSrc1Address(inst.src1);
86
addrReg = SCRATCH1;
87
}
88
// With NOINIT, MapReg won't subtract MEMBASEREG even if dest == src1.
89
regs_.MapGPR(inst.dest, MIPSMap::NOINIT);
90
regs_.MarkGPRDirty(inst.dest, true);
91
92
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
93
94
// TODO: Safe memory? Or enough to have crash handler + validate?
95
96
switch (inst.op) {
97
case IROp::Load8:
98
LD_BU(regs_.R(inst.dest), addrReg, imm);
99
break;
100
101
case IROp::Load8Ext:
102
LD_B(regs_.R(inst.dest), addrReg, imm);
103
break;
104
105
case IROp::Load16:
106
LD_HU(regs_.R(inst.dest), addrReg, imm);
107
break;
108
109
case IROp::Load16Ext:
110
LD_H(regs_.R(inst.dest), addrReg, imm);
111
break;
112
113
case IROp::Load32:
114
LD_W(regs_.R(inst.dest), addrReg, imm);
115
break;
116
117
case IROp::Load32Linked:
118
if (inst.dest != MIPS_REG_ZERO)
119
LD_W(regs_.R(inst.dest), addrReg, imm);
120
regs_.SetGPRImm(IRREG_LLBIT, 1);
121
break;
122
123
default:
124
INVALIDOP;
125
break;
126
}
127
}
128
129
void LoongArch64JitBackend::CompIR_LoadShift(IRInst inst) {
130
CONDITIONAL_DISABLE;
131
132
switch (inst.op) {
133
case IROp::Load32Left:
134
case IROp::Load32Right:
135
// Should not happen if the pass to split is active.
136
DISABLE;
137
break;
138
139
default:
140
INVALIDOP;
141
break;
142
}
143
}
144
145
void LoongArch64JitBackend::CompIR_FLoad(IRInst inst) {
146
CONDITIONAL_DISABLE;
147
148
LoongArch64Reg addrReg = INVALID_REG;
149
if (inst.src1 == MIPS_REG_ZERO) {
150
// This will get changed by AdjustForAddressOffset.
151
addrReg = MEMBASEREG;
152
#ifdef MASKED_PSP_MEMORY
153
inst.constant &= Memory::MEMVIEW32_MASK;
154
#endif
155
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
156
addrReg = regs_.MapGPRAsPointer(inst.src1);
157
} else {
158
SetScratch1ToSrc1Address(inst.src1);
159
addrReg = SCRATCH1;
160
}
161
162
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
163
164
// TODO: Safe memory? Or enough to have crash handler + validate?
165
166
switch (inst.op) {
167
case IROp::LoadFloat:
168
regs_.MapFPR(inst.dest, MIPSMap::NOINIT);
169
FLD_S(regs_.F(inst.dest), addrReg, imm);
170
break;
171
172
default:
173
INVALIDOP;
174
break;
175
}
176
}
177
178
void LoongArch64JitBackend::CompIR_VecLoad(IRInst inst) {
179
CONDITIONAL_DISABLE;
180
181
LoongArch64Reg addrReg = INVALID_REG;
182
if (inst.src1 == MIPS_REG_ZERO) {
183
// This will get changed by AdjustForAddressOffset.
184
addrReg = MEMBASEREG;
185
#ifdef MASKED_PSP_MEMORY
186
inst.constant &= Memory::MEMVIEW32_MASK;
187
#endif
188
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
189
addrReg = regs_.MapGPRAsPointer(inst.src1);
190
} else {
191
SetScratch1ToSrc1Address(inst.src1);
192
addrReg = SCRATCH1;
193
}
194
195
// We need to be able to address the whole 16 bytes, so offset of 12.
196
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant, 12);
197
198
// TODO: Safe memory? Or enough to have crash handler + validate?
199
200
switch (inst.op) {
201
case IROp::LoadVec4:
202
if (cpu_info.LOONGARCH_LSX) {
203
regs_.MapVec4(inst.dest, MIPSMap::NOINIT);
204
VLD(regs_.V(inst.dest), addrReg, imm);
205
} else {
206
for (int i = 0; i < 4; ++i) {
207
// Spilling is okay.
208
regs_.MapFPR(inst.dest + i, MIPSMap::NOINIT);
209
FLD_S(regs_.F(inst.dest + i), addrReg, imm + 4 * i);
210
}
211
}
212
break;
213
214
default:
215
INVALIDOP;
216
break;
217
}
218
}
219
220
void LoongArch64JitBackend::CompIR_Store(IRInst inst) {
221
CONDITIONAL_DISABLE;
222
223
regs_.SpillLockGPR(inst.src3, inst.src1);
224
LoongArch64Reg addrReg = INVALID_REG;
225
if (inst.src1 == MIPS_REG_ZERO) {
226
// This will get changed by AdjustForAddressOffset.
227
addrReg = MEMBASEREG;
228
#ifdef MASKED_PSP_MEMORY
229
inst.constant &= Memory::MEMVIEW32_MASK;
230
#endif
231
} else if ((jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) && inst.src3 != inst.src1) {
232
addrReg = regs_.MapGPRAsPointer(inst.src1);
233
} else {
234
SetScratch1ToSrc1Address(inst.src1);
235
addrReg = SCRATCH1;
236
}
237
LoongArch64Reg valueReg = regs_.TryMapTempImm(inst.src3);
238
if (valueReg == INVALID_REG)
239
valueReg = regs_.MapGPR(inst.src3);
240
241
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
242
243
// TODO: Safe memory? Or enough to have crash handler + validate?
244
245
switch (inst.op) {
246
case IROp::Store8:
247
ST_B(valueReg, addrReg, imm);
248
break;
249
250
case IROp::Store16:
251
ST_H(valueReg, addrReg, imm);
252
break;
253
254
case IROp::Store32:
255
ST_W(valueReg, addrReg, imm);
256
break;
257
258
default:
259
INVALIDOP;
260
break;
261
}
262
}
263
264
void LoongArch64JitBackend::CompIR_CondStore(IRInst inst) {
265
CONDITIONAL_DISABLE;
266
if (inst.op != IROp::Store32Conditional)
267
INVALIDOP;
268
269
regs_.SpillLockGPR(IRREG_LLBIT, inst.src3, inst.src1);
270
LoongArch64Reg addrReg = INVALID_REG;
271
if (inst.src1 == MIPS_REG_ZERO) {
272
// This will get changed by AdjustForAddressOffset.
273
addrReg = MEMBASEREG;
274
#ifdef MASKED_PSP_MEMORY
275
inst.constant &= Memory::MEMVIEW32_MASK;
276
#endif
277
} else if ((jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) && inst.src3 != inst.src1) {
278
addrReg = regs_.MapGPRAsPointer(inst.src1);
279
} else {
280
SetScratch1ToSrc1Address(inst.src1);
281
addrReg = SCRATCH1;
282
}
283
regs_.MapGPR(inst.src3, inst.dest == MIPS_REG_ZERO ? MIPSMap::INIT : MIPSMap::DIRTY);
284
regs_.MapGPR(IRREG_LLBIT);
285
286
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
287
288
// TODO: Safe memory? Or enough to have crash handler + validate?
289
290
FixupBranch condFailed = BEQZ(regs_.R(IRREG_LLBIT));
291
ST_W(regs_.R(inst.src3), addrReg, imm);
292
293
if (inst.dest != MIPS_REG_ZERO) {
294
LI(regs_.R(inst.dest), 1);
295
FixupBranch finish = B();
296
297
SetJumpTarget(condFailed);
298
LI(regs_.R(inst.dest), 0);
299
SetJumpTarget(finish);
300
} else {
301
SetJumpTarget(condFailed);
302
}
303
}
304
305
void LoongArch64JitBackend::CompIR_StoreShift(IRInst inst) {
306
CONDITIONAL_DISABLE;
307
308
switch (inst.op) {
309
case IROp::Store32Left:
310
case IROp::Store32Right:
311
// Should not happen if the pass to split is active.
312
DISABLE;
313
break;
314
315
default:
316
INVALIDOP;
317
break;
318
}
319
}
320
321
void LoongArch64JitBackend::CompIR_FStore(IRInst inst) {
322
CONDITIONAL_DISABLE;
323
324
LoongArch64Reg addrReg = INVALID_REG;
325
if (inst.src1 == MIPS_REG_ZERO) {
326
// This will get changed by AdjustForAddressOffset.
327
addrReg = MEMBASEREG;
328
#ifdef MASKED_PSP_MEMORY
329
inst.constant &= Memory::MEMVIEW32_MASK;
330
#endif
331
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
332
addrReg = regs_.MapGPRAsPointer(inst.src1);
333
} else {
334
SetScratch1ToSrc1Address(inst.src1);
335
addrReg = SCRATCH1;
336
}
337
338
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant);
339
340
// TODO: Safe memory? Or enough to have crash handler + validate?
341
342
switch (inst.op) {
343
case IROp::StoreFloat:
344
regs_.MapFPR(inst.src3);
345
FST_S(regs_.F(inst.src3), addrReg, imm);
346
break;
347
348
default:
349
INVALIDOP;
350
break;
351
}
352
}
353
354
void LoongArch64JitBackend::CompIR_VecStore(IRInst inst) {
355
CONDITIONAL_DISABLE;
356
357
LoongArch64Reg addrReg = INVALID_REG;
358
if (inst.src1 == MIPS_REG_ZERO) {
359
// This will get changed by AdjustForAddressOffset.
360
addrReg = MEMBASEREG;
361
#ifdef MASKED_PSP_MEMORY
362
inst.constant &= Memory::MEMVIEW32_MASK;
363
#endif
364
} else if (jo.cachePointers || regs_.IsGPRMappedAsPointer(inst.src1)) {
365
addrReg = regs_.MapGPRAsPointer(inst.src1);
366
} else {
367
SetScratch1ToSrc1Address(inst.src1);
368
addrReg = SCRATCH1;
369
}
370
371
// We need to be able to address the whole 16 bytes, so offset of 12.
372
s32 imm = AdjustForAddressOffset(&addrReg, inst.constant, 12);
373
374
// TODO: Safe memory? Or enough to have crash handler + validate?
375
376
switch (inst.op) {
377
case IROp::StoreVec4:
378
if (cpu_info.LOONGARCH_LSX) {
379
regs_.MapVec4(inst.src3);
380
VST(regs_.V(inst.src3), addrReg, imm);
381
} else {
382
for (int i = 0; i < 4; ++i) {
383
// Spilling is okay, though not ideal.
384
regs_.MapFPR(inst.src3 + i);
385
FST_S(regs_.F(inst.src3 + i), addrReg, imm + 4 * i);
386
}
387
}
388
break;
389
390
default:
391
INVALIDOP;
392
break;
393
}
394
}
395
396
} // namespace MIPSComp
397
398