Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mobile
Path: blob/master/src/hotspot/cpu/aarch64/gc/z/zBarrierSetAssembler_aarch64.cpp
41153 views
1
/*
2
* Copyright (c) 2019, 2021, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation.
8
*
9
* This code is distributed in the hope that it will be useful, but WITHOUT
10
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12
* version 2 for more details (a copy is included in the LICENSE file that
13
* accompanied this code).
14
*
15
* You should have received a copy of the GNU General Public License version
16
* 2 along with this work; if not, write to the Free Software Foundation,
17
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
*
19
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
* or visit www.oracle.com if you need additional information or have any
21
* questions.
22
*/
23
24
#include "precompiled.hpp"
25
#include "asm/macroAssembler.inline.hpp"
26
#include "code/codeBlob.hpp"
27
#include "code/vmreg.inline.hpp"
28
#include "gc/z/zBarrier.inline.hpp"
29
#include "gc/z/zBarrierSet.hpp"
30
#include "gc/z/zBarrierSetAssembler.hpp"
31
#include "gc/z/zBarrierSetRuntime.hpp"
32
#include "gc/z/zThreadLocalData.hpp"
33
#include "memory/resourceArea.hpp"
34
#include "runtime/sharedRuntime.hpp"
35
#include "utilities/macros.hpp"
36
#ifdef COMPILER1
37
#include "c1/c1_LIRAssembler.hpp"
38
#include "c1/c1_MacroAssembler.hpp"
39
#include "gc/z/c1/zBarrierSetC1.hpp"
40
#endif // COMPILER1
41
#ifdef COMPILER2
42
#include "gc/z/c2/zBarrierSetC2.hpp"
43
#endif // COMPILER2
44
45
#ifdef PRODUCT
46
#define BLOCK_COMMENT(str) /* nothing */
47
#else
48
#define BLOCK_COMMENT(str) __ block_comment(str)
49
#endif
50
51
#undef __
52
#define __ masm->
53
54
void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
55
DecoratorSet decorators,
56
BasicType type,
57
Register dst,
58
Address src,
59
Register tmp1,
60
Register tmp_thread) {
61
if (!ZBarrierSet::barrier_needed(decorators, type)) {
62
// Barrier not needed
63
BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
64
return;
65
}
66
67
assert_different_registers(rscratch1, rscratch2, src.base());
68
assert_different_registers(rscratch1, rscratch2, dst);
69
70
Label done;
71
72
// Load bad mask into scratch register.
73
__ ldr(rscratch1, address_bad_mask_from_thread(rthread));
74
__ lea(rscratch2, src);
75
__ ldr(dst, src);
76
77
// Test reference against bad mask. If mask bad, then we need to fix it up.
78
__ tst(dst, rscratch1);
79
__ br(Assembler::EQ, done);
80
81
__ enter();
82
83
__ push_call_clobbered_registers_except(RegSet::of(dst));
84
85
if (c_rarg0 != dst) {
86
__ mov(c_rarg0, dst);
87
}
88
__ mov(c_rarg1, rscratch2);
89
90
__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
91
92
// Make sure dst has the return value.
93
if (dst != r0) {
94
__ mov(dst, r0);
95
}
96
97
__ pop_call_clobbered_registers_except(RegSet::of(dst));
98
__ leave();
99
100
__ bind(done);
101
}
102
103
#ifdef ASSERT
104
105
void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
106
DecoratorSet decorators,
107
BasicType type,
108
Address dst,
109
Register val,
110
Register tmp1,
111
Register tmp2) {
112
// Verify value
113
if (is_reference_type(type)) {
114
// Note that src could be noreg, which means we
115
// are storing null and can skip verification.
116
if (val != noreg) {
117
Label done;
118
119
// tmp1 and tmp2 are often set to noreg.
120
RegSet savedRegs = RegSet::of(rscratch1);
121
__ push(savedRegs, sp);
122
123
__ ldr(rscratch1, address_bad_mask_from_thread(rthread));
124
__ tst(val, rscratch1);
125
__ br(Assembler::EQ, done);
126
__ stop("Verify oop store failed");
127
__ should_not_reach_here();
128
__ bind(done);
129
__ pop(savedRegs, sp);
130
}
131
}
132
133
// Store value
134
BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2);
135
}
136
137
#endif // ASSERT
138
139
void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
140
DecoratorSet decorators,
141
bool is_oop,
142
Register src,
143
Register dst,
144
Register count,
145
RegSet saved_regs) {
146
if (!is_oop) {
147
// Barrier not needed
148
return;
149
}
150
151
BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
152
153
assert_different_registers(src, count, rscratch1);
154
155
__ push(saved_regs, sp);
156
157
if (count == c_rarg0) {
158
if (src == c_rarg1) {
159
// exactly backwards!!
160
__ mov(rscratch1, c_rarg0);
161
__ mov(c_rarg0, c_rarg1);
162
__ mov(c_rarg1, rscratch1);
163
} else {
164
__ mov(c_rarg1, count);
165
__ mov(c_rarg0, src);
166
}
167
} else {
168
__ mov(c_rarg0, src);
169
__ mov(c_rarg1, count);
170
}
171
172
__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2);
173
174
__ pop(saved_regs, sp);
175
176
BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
177
}
178
179
void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
180
Register jni_env,
181
Register robj,
182
Register tmp,
183
Label& slowpath) {
184
BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
185
186
assert_different_registers(jni_env, robj, tmp);
187
188
// Resolve jobject
189
BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath);
190
191
// The Address offset is too large to direct load - -784. Our range is +127, -128.
192
__ mov(tmp, (int64_t)(in_bytes(ZThreadLocalData::address_bad_mask_offset()) -
193
in_bytes(JavaThread::jni_environment_offset())));
194
195
// Load address bad mask
196
__ add(tmp, jni_env, tmp);
197
__ ldr(tmp, Address(tmp));
198
199
// Check address bad mask
200
__ tst(robj, tmp);
201
__ br(Assembler::NE, slowpath);
202
203
BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
204
}
205
206
#ifdef COMPILER1
207
208
#undef __
209
#define __ ce->masm()->
210
211
void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
212
LIR_Opr ref) const {
213
assert_different_registers(rscratch1, rthread, ref->as_register());
214
215
__ ldr(rscratch1, address_bad_mask_from_thread(rthread));
216
__ tst(ref->as_register(), rscratch1);
217
}
218
219
void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
220
ZLoadBarrierStubC1* stub) const {
221
// Stub entry
222
__ bind(*stub->entry());
223
224
Register ref = stub->ref()->as_register();
225
Register ref_addr = noreg;
226
Register tmp = noreg;
227
228
if (stub->tmp()->is_valid()) {
229
// Load address into tmp register
230
ce->leal(stub->ref_addr(), stub->tmp());
231
ref_addr = tmp = stub->tmp()->as_pointer_register();
232
} else {
233
// Address already in register
234
ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
235
}
236
237
assert_different_registers(ref, ref_addr, noreg);
238
239
// Save r0 unless it is the result or tmp register
240
// Set up SP to accomodate parameters and maybe r0..
241
if (ref != r0 && tmp != r0) {
242
__ sub(sp, sp, 32);
243
__ str(r0, Address(sp, 16));
244
} else {
245
__ sub(sp, sp, 16);
246
}
247
248
// Setup arguments and call runtime stub
249
ce->store_parameter(ref_addr, 1);
250
ce->store_parameter(ref, 0);
251
252
__ far_call(stub->runtime_stub());
253
254
// Verify result
255
__ verify_oop(r0, "Bad oop");
256
257
// Move result into place
258
if (ref != r0) {
259
__ mov(ref, r0);
260
}
261
262
// Restore r0 unless it is the result or tmp register
263
if (ref != r0 && tmp != r0) {
264
__ ldr(r0, Address(sp, 16));
265
__ add(sp, sp, 32);
266
} else {
267
__ add(sp, sp, 16);
268
}
269
270
// Stub exit
271
__ b(*stub->continuation());
272
}
273
274
#undef __
275
#define __ sasm->
276
277
void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
278
DecoratorSet decorators) const {
279
__ prologue("zgc_load_barrier stub", false);
280
281
__ push_call_clobbered_registers_except(RegSet::of(r0));
282
283
// Setup arguments
284
__ load_parameter(0, c_rarg0);
285
__ load_parameter(1, c_rarg1);
286
287
__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);
288
289
__ pop_call_clobbered_registers_except(RegSet::of(r0));
290
291
__ epilogue();
292
}
293
#endif // COMPILER1
294
295
#ifdef COMPILER2
296
297
OptoReg::Name ZBarrierSetAssembler::refine_register(const Node* node, OptoReg::Name opto_reg) {
298
if (!OptoReg::is_reg(opto_reg)) {
299
return OptoReg::Bad;
300
}
301
302
const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);
303
if (vm_reg->is_FloatRegister()) {
304
return opto_reg & ~1;
305
}
306
307
return opto_reg;
308
}
309
310
#undef __
311
#define __ _masm->
312
313
class ZSaveLiveRegisters {
314
private:
315
MacroAssembler* const _masm;
316
RegSet _gp_regs;
317
FloatRegSet _fp_regs;
318
319
public:
320
void initialize(ZLoadBarrierStubC2* stub) {
321
// Record registers that needs to be saved/restored
322
RegMaskIterator rmi(stub->live());
323
while (rmi.has_next()) {
324
const OptoReg::Name opto_reg = rmi.next();
325
if (OptoReg::is_reg(opto_reg)) {
326
const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);
327
if (vm_reg->is_Register()) {
328
_gp_regs += RegSet::of(vm_reg->as_Register());
329
} else if (vm_reg->is_FloatRegister()) {
330
_fp_regs += FloatRegSet::of(vm_reg->as_FloatRegister());
331
} else {
332
fatal("Unknown register type");
333
}
334
}
335
}
336
337
// Remove C-ABI SOE registers, scratch regs and _ref register that will be updated
338
_gp_regs -= RegSet::range(r19, r30) + RegSet::of(r8, r9, stub->ref());
339
}
340
341
ZSaveLiveRegisters(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :
342
_masm(masm),
343
_gp_regs(),
344
_fp_regs() {
345
346
// Figure out what registers to save/restore
347
initialize(stub);
348
349
// Save registers
350
__ push(_gp_regs, sp);
351
__ push_fp(_fp_regs, sp);
352
}
353
354
~ZSaveLiveRegisters() {
355
// Restore registers
356
__ pop_fp(_fp_regs, sp);
357
358
// External runtime call may clobber ptrue reg
359
__ reinitialize_ptrue();
360
361
__ pop(_gp_regs, sp);
362
}
363
};
364
365
#undef __
366
#define __ _masm->
367
368
class ZSetupArguments {
369
private:
370
MacroAssembler* const _masm;
371
const Register _ref;
372
const Address _ref_addr;
373
374
public:
375
ZSetupArguments(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :
376
_masm(masm),
377
_ref(stub->ref()),
378
_ref_addr(stub->ref_addr()) {
379
380
// Setup arguments
381
if (_ref_addr.base() == noreg) {
382
// No self healing
383
if (_ref != c_rarg0) {
384
__ mov(c_rarg0, _ref);
385
}
386
__ mov(c_rarg1, 0);
387
} else {
388
// Self healing
389
if (_ref == c_rarg0) {
390
// _ref is already at correct place
391
__ lea(c_rarg1, _ref_addr);
392
} else if (_ref != c_rarg1) {
393
// _ref is in wrong place, but not in c_rarg1, so fix it first
394
__ lea(c_rarg1, _ref_addr);
395
__ mov(c_rarg0, _ref);
396
} else if (_ref_addr.base() != c_rarg0 && _ref_addr.index() != c_rarg0) {
397
assert(_ref == c_rarg1, "Mov ref first, vacating c_rarg0");
398
__ mov(c_rarg0, _ref);
399
__ lea(c_rarg1, _ref_addr);
400
} else {
401
assert(_ref == c_rarg1, "Need to vacate c_rarg1 and _ref_addr is using c_rarg0");
402
if (_ref_addr.base() == c_rarg0 || _ref_addr.index() == c_rarg0) {
403
__ mov(rscratch2, c_rarg1);
404
__ lea(c_rarg1, _ref_addr);
405
__ mov(c_rarg0, rscratch2);
406
} else {
407
ShouldNotReachHere();
408
}
409
}
410
}
411
}
412
413
~ZSetupArguments() {
414
// Transfer result
415
if (_ref != r0) {
416
__ mov(_ref, r0);
417
}
418
}
419
};
420
421
#undef __
422
#define __ masm->
423
424
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
425
BLOCK_COMMENT("ZLoadBarrierStubC2");
426
427
// Stub entry
428
__ bind(*stub->entry());
429
430
{
431
ZSaveLiveRegisters save_live_registers(masm, stub);
432
ZSetupArguments setup_arguments(masm, stub);
433
__ mov(rscratch1, stub->slow_path());
434
__ blr(rscratch1);
435
}
436
// Stub exit
437
__ b(*stub->continuation());
438
}
439
440
#undef __
441
442
#endif // COMPILER2
443
444