Path: blob/master/src/hotspot/cpu/aarch64/gc/z/zBarrierSetAssembler_aarch64.cpp
41153 views
/*1* Copyright (c) 2019, 2021, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*/2223#include "precompiled.hpp"24#include "asm/macroAssembler.inline.hpp"25#include "code/codeBlob.hpp"26#include "code/vmreg.inline.hpp"27#include "gc/z/zBarrier.inline.hpp"28#include "gc/z/zBarrierSet.hpp"29#include "gc/z/zBarrierSetAssembler.hpp"30#include "gc/z/zBarrierSetRuntime.hpp"31#include "gc/z/zThreadLocalData.hpp"32#include "memory/resourceArea.hpp"33#include "runtime/sharedRuntime.hpp"34#include "utilities/macros.hpp"35#ifdef COMPILER136#include "c1/c1_LIRAssembler.hpp"37#include "c1/c1_MacroAssembler.hpp"38#include "gc/z/c1/zBarrierSetC1.hpp"39#endif // COMPILER140#ifdef COMPILER241#include "gc/z/c2/zBarrierSetC2.hpp"42#endif // COMPILER24344#ifdef PRODUCT45#define BLOCK_COMMENT(str) /* nothing */46#else47#define BLOCK_COMMENT(str) __ block_comment(str)48#endif4950#undef __51#define __ masm->5253void ZBarrierSetAssembler::load_at(MacroAssembler* masm,54DecoratorSet decorators,55BasicType type,56Register dst,57Address src,58Register tmp1,59Register tmp_thread) {60if (!ZBarrierSet::barrier_needed(decorators, type)) {61// Barrier not needed62BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);63return;64}6566assert_different_registers(rscratch1, rscratch2, src.base());67assert_different_registers(rscratch1, rscratch2, dst);6869Label done;7071// Load bad mask into scratch register.72__ ldr(rscratch1, address_bad_mask_from_thread(rthread));73__ lea(rscratch2, src);74__ ldr(dst, src);7576// Test reference against bad mask. If mask bad, then we need to fix it up.77__ tst(dst, rscratch1);78__ br(Assembler::EQ, done);7980__ enter();8182__ push_call_clobbered_registers_except(RegSet::of(dst));8384if (c_rarg0 != dst) {85__ mov(c_rarg0, dst);86}87__ mov(c_rarg1, rscratch2);8889__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);9091// Make sure dst has the return value.92if (dst != r0) {93__ mov(dst, r0);94}9596__ pop_call_clobbered_registers_except(RegSet::of(dst));97__ leave();9899__ bind(done);100}101102#ifdef ASSERT103104void ZBarrierSetAssembler::store_at(MacroAssembler* masm,105DecoratorSet decorators,106BasicType type,107Address dst,108Register val,109Register tmp1,110Register tmp2) {111// Verify value112if (is_reference_type(type)) {113// Note that src could be noreg, which means we114// are storing null and can skip verification.115if (val != noreg) {116Label done;117118// tmp1 and tmp2 are often set to noreg.119RegSet savedRegs = RegSet::of(rscratch1);120__ push(savedRegs, sp);121122__ ldr(rscratch1, address_bad_mask_from_thread(rthread));123__ tst(val, rscratch1);124__ br(Assembler::EQ, done);125__ stop("Verify oop store failed");126__ should_not_reach_here();127__ bind(done);128__ pop(savedRegs, sp);129}130}131132// Store value133BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2);134}135136#endif // ASSERT137138void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,139DecoratorSet decorators,140bool is_oop,141Register src,142Register dst,143Register count,144RegSet saved_regs) {145if (!is_oop) {146// Barrier not needed147return;148}149150BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");151152assert_different_registers(src, count, rscratch1);153154__ push(saved_regs, sp);155156if (count == c_rarg0) {157if (src == c_rarg1) {158// exactly backwards!!159__ mov(rscratch1, c_rarg0);160__ mov(c_rarg0, c_rarg1);161__ mov(c_rarg1, rscratch1);162} else {163__ mov(c_rarg1, count);164__ mov(c_rarg0, src);165}166} else {167__ mov(c_rarg0, src);168__ mov(c_rarg1, count);169}170171__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), 2);172173__ pop(saved_regs, sp);174175BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");176}177178void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,179Register jni_env,180Register robj,181Register tmp,182Label& slowpath) {183BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");184185assert_different_registers(jni_env, robj, tmp);186187// Resolve jobject188BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, robj, tmp, slowpath);189190// The Address offset is too large to direct load - -784. Our range is +127, -128.191__ mov(tmp, (int64_t)(in_bytes(ZThreadLocalData::address_bad_mask_offset()) -192in_bytes(JavaThread::jni_environment_offset())));193194// Load address bad mask195__ add(tmp, jni_env, tmp);196__ ldr(tmp, Address(tmp));197198// Check address bad mask199__ tst(robj, tmp);200__ br(Assembler::NE, slowpath);201202BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");203}204205#ifdef COMPILER1206207#undef __208#define __ ce->masm()->209210void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,211LIR_Opr ref) const {212assert_different_registers(rscratch1, rthread, ref->as_register());213214__ ldr(rscratch1, address_bad_mask_from_thread(rthread));215__ tst(ref->as_register(), rscratch1);216}217218void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,219ZLoadBarrierStubC1* stub) const {220// Stub entry221__ bind(*stub->entry());222223Register ref = stub->ref()->as_register();224Register ref_addr = noreg;225Register tmp = noreg;226227if (stub->tmp()->is_valid()) {228// Load address into tmp register229ce->leal(stub->ref_addr(), stub->tmp());230ref_addr = tmp = stub->tmp()->as_pointer_register();231} else {232// Address already in register233ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();234}235236assert_different_registers(ref, ref_addr, noreg);237238// Save r0 unless it is the result or tmp register239// Set up SP to accomodate parameters and maybe r0..240if (ref != r0 && tmp != r0) {241__ sub(sp, sp, 32);242__ str(r0, Address(sp, 16));243} else {244__ sub(sp, sp, 16);245}246247// Setup arguments and call runtime stub248ce->store_parameter(ref_addr, 1);249ce->store_parameter(ref, 0);250251__ far_call(stub->runtime_stub());252253// Verify result254__ verify_oop(r0, "Bad oop");255256// Move result into place257if (ref != r0) {258__ mov(ref, r0);259}260261// Restore r0 unless it is the result or tmp register262if (ref != r0 && tmp != r0) {263__ ldr(r0, Address(sp, 16));264__ add(sp, sp, 32);265} else {266__ add(sp, sp, 16);267}268269// Stub exit270__ b(*stub->continuation());271}272273#undef __274#define __ sasm->275276void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,277DecoratorSet decorators) const {278__ prologue("zgc_load_barrier stub", false);279280__ push_call_clobbered_registers_except(RegSet::of(r0));281282// Setup arguments283__ load_parameter(0, c_rarg0);284__ load_parameter(1, c_rarg1);285286__ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), 2);287288__ pop_call_clobbered_registers_except(RegSet::of(r0));289290__ epilogue();291}292#endif // COMPILER1293294#ifdef COMPILER2295296OptoReg::Name ZBarrierSetAssembler::refine_register(const Node* node, OptoReg::Name opto_reg) {297if (!OptoReg::is_reg(opto_reg)) {298return OptoReg::Bad;299}300301const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);302if (vm_reg->is_FloatRegister()) {303return opto_reg & ~1;304}305306return opto_reg;307}308309#undef __310#define __ _masm->311312class ZSaveLiveRegisters {313private:314MacroAssembler* const _masm;315RegSet _gp_regs;316FloatRegSet _fp_regs;317318public:319void initialize(ZLoadBarrierStubC2* stub) {320// Record registers that needs to be saved/restored321RegMaskIterator rmi(stub->live());322while (rmi.has_next()) {323const OptoReg::Name opto_reg = rmi.next();324if (OptoReg::is_reg(opto_reg)) {325const VMReg vm_reg = OptoReg::as_VMReg(opto_reg);326if (vm_reg->is_Register()) {327_gp_regs += RegSet::of(vm_reg->as_Register());328} else if (vm_reg->is_FloatRegister()) {329_fp_regs += FloatRegSet::of(vm_reg->as_FloatRegister());330} else {331fatal("Unknown register type");332}333}334}335336// Remove C-ABI SOE registers, scratch regs and _ref register that will be updated337_gp_regs -= RegSet::range(r19, r30) + RegSet::of(r8, r9, stub->ref());338}339340ZSaveLiveRegisters(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :341_masm(masm),342_gp_regs(),343_fp_regs() {344345// Figure out what registers to save/restore346initialize(stub);347348// Save registers349__ push(_gp_regs, sp);350__ push_fp(_fp_regs, sp);351}352353~ZSaveLiveRegisters() {354// Restore registers355__ pop_fp(_fp_regs, sp);356357// External runtime call may clobber ptrue reg358__ reinitialize_ptrue();359360__ pop(_gp_regs, sp);361}362};363364#undef __365#define __ _masm->366367class ZSetupArguments {368private:369MacroAssembler* const _masm;370const Register _ref;371const Address _ref_addr;372373public:374ZSetupArguments(MacroAssembler* masm, ZLoadBarrierStubC2* stub) :375_masm(masm),376_ref(stub->ref()),377_ref_addr(stub->ref_addr()) {378379// Setup arguments380if (_ref_addr.base() == noreg) {381// No self healing382if (_ref != c_rarg0) {383__ mov(c_rarg0, _ref);384}385__ mov(c_rarg1, 0);386} else {387// Self healing388if (_ref == c_rarg0) {389// _ref is already at correct place390__ lea(c_rarg1, _ref_addr);391} else if (_ref != c_rarg1) {392// _ref is in wrong place, but not in c_rarg1, so fix it first393__ lea(c_rarg1, _ref_addr);394__ mov(c_rarg0, _ref);395} else if (_ref_addr.base() != c_rarg0 && _ref_addr.index() != c_rarg0) {396assert(_ref == c_rarg1, "Mov ref first, vacating c_rarg0");397__ mov(c_rarg0, _ref);398__ lea(c_rarg1, _ref_addr);399} else {400assert(_ref == c_rarg1, "Need to vacate c_rarg1 and _ref_addr is using c_rarg0");401if (_ref_addr.base() == c_rarg0 || _ref_addr.index() == c_rarg0) {402__ mov(rscratch2, c_rarg1);403__ lea(c_rarg1, _ref_addr);404__ mov(c_rarg0, rscratch2);405} else {406ShouldNotReachHere();407}408}409}410}411412~ZSetupArguments() {413// Transfer result414if (_ref != r0) {415__ mov(_ref, r0);416}417}418};419420#undef __421#define __ masm->422423void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {424BLOCK_COMMENT("ZLoadBarrierStubC2");425426// Stub entry427__ bind(*stub->entry());428429{430ZSaveLiveRegisters save_live_registers(masm, stub);431ZSetupArguments setup_arguments(masm, stub);432__ mov(rscratch1, stub->slow_path());433__ blr(rscratch1);434}435// Stub exit436__ b(*stub->continuation());437}438439#undef __440441#endif // COMPILER2442443444