Path: blob/master/src/hotspot/cpu/s390/gc/g1/g1BarrierSetAssembler_s390.cpp
41155 views
/*1* Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.2* Copyright (c) 2018, 2019 SAP SE. All rights reserved.3* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.4*5* This code is free software; you can redistribute it and/or modify it6* under the terms of the GNU General Public License version 2 only, as7* published by the Free Software Foundation.8*9* This code is distributed in the hope that it will be useful, but WITHOUT10* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or11* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License12* version 2 for more details (a copy is included in the LICENSE file that13* accompanied this code).14*15* You should have received a copy of the GNU General Public License version16* 2 along with this work; if not, write to the Free Software Foundation,17* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.18*19* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA20* or visit www.oracle.com if you need additional information or have any21* questions.22*23*/2425#include "precompiled.hpp"26#include "asm/macroAssembler.inline.hpp"27#include "registerSaver_s390.hpp"28#include "gc/g1/g1CardTable.hpp"29#include "gc/g1/g1BarrierSet.hpp"30#include "gc/g1/g1BarrierSetAssembler.hpp"31#include "gc/g1/g1BarrierSetRuntime.hpp"32#include "gc/g1/g1DirtyCardQueue.hpp"33#include "gc/g1/g1SATBMarkQueueSet.hpp"34#include "gc/g1/g1ThreadLocalData.hpp"35#include "gc/g1/heapRegion.hpp"36#include "interpreter/interp_masm.hpp"37#include "runtime/jniHandles.hpp"38#include "runtime/sharedRuntime.hpp"39#ifdef COMPILER140#include "c1/c1_LIRAssembler.hpp"41#include "c1/c1_MacroAssembler.hpp"42#include "gc/g1/c1/g1BarrierSetC1.hpp"43#endif4445#define __ masm->4647#define BLOCK_COMMENT(str) if (PrintAssembly) __ block_comment(str)4849void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators,50Register addr, Register count) {51bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0;5253// With G1, don't generate the call if we statically know that the target is uninitialized.54if (!dest_uninitialized) {55// Is marking active?56Label filtered;57assert_different_registers(addr, Z_R0_scratch); // would be destroyed by push_frame()58assert_different_registers(count, Z_R0_scratch); // would be destroyed by push_frame()59Register Rtmp1 = Z_R0_scratch;60const int active_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset());61if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {62__ load_and_test_int(Rtmp1, Address(Z_thread, active_offset));63} else {64guarantee(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption");65__ load_and_test_byte(Rtmp1, Address(Z_thread, active_offset));66}67__ z_bre(filtered); // Activity indicator is zero, so there is no marking going on currently.6869RegisterSaver::save_live_registers(masm, RegisterSaver::arg_registers); // Creates frame.7071if (UseCompressedOops) {72__ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_array_pre_narrow_oop_entry), addr, count);73} else {74__ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_array_pre_oop_entry), addr, count);75}7677RegisterSaver::restore_live_registers(masm, RegisterSaver::arg_registers);7879__ bind(filtered);80}81}8283void G1BarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,84Register addr, Register count, bool do_return) {85address entry_point = CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_array_post_entry);86if (!do_return) {87assert_different_registers(addr, Z_R0_scratch); // would be destroyed by push_frame()88assert_different_registers(count, Z_R0_scratch); // would be destroyed by push_frame()89RegisterSaver::save_live_registers(masm, RegisterSaver::arg_registers); // Creates frame.90__ call_VM_leaf(entry_point, addr, count);91RegisterSaver::restore_live_registers(masm, RegisterSaver::arg_registers);92} else {93// Tail call: call c and return to stub caller.94__ lgr_if_needed(Z_ARG1, addr);95__ lgr_if_needed(Z_ARG2, count);96__ load_const(Z_R1, entry_point);97__ z_br(Z_R1); // Branch without linking, callee will return to stub caller.98}99}100101void G1BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,102const Address& src, Register dst, Register tmp1, Register tmp2, Label *L_handle_null) {103bool on_oop = is_reference_type(type);104bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0;105bool on_phantom = (decorators & ON_PHANTOM_OOP_REF) != 0;106bool on_reference = on_weak || on_phantom;107Label done;108if (on_oop && on_reference && L_handle_null == NULL) { L_handle_null = &done; }109ModRefBarrierSetAssembler::load_at(masm, decorators, type, src, dst, tmp1, tmp2, L_handle_null);110if (on_oop && on_reference) {111// Generate the G1 pre-barrier code to log the value of112// the referent field in an SATB buffer.113g1_write_barrier_pre(masm, decorators | IS_NOT_NULL,114NULL /* obj */,115dst /* pre_val */,116noreg/* preserve */ ,117tmp1, tmp2 /* tmp */,118true /* pre_val_needed */);119}120__ bind(done);121}122123void G1BarrierSetAssembler::g1_write_barrier_pre(MacroAssembler* masm, DecoratorSet decorators,124const Address* obj,125Register Rpre_val, // Ideally, this is a non-volatile register.126Register Rval, // Will be preserved.127Register Rtmp1, // If Rpre_val is volatile, either Rtmp1128Register Rtmp2, // or Rtmp2 has to be non-volatile.129bool pre_val_needed // Save Rpre_val across runtime call, caller uses it.130) {131132bool not_null = (decorators & IS_NOT_NULL) != 0,133preloaded = obj == NULL;134135const Register Robj = obj ? obj->base() : noreg,136Roff = obj ? obj->index() : noreg;137const int active_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset());138const int buffer_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_buffer_offset());139const int index_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_index_offset());140assert_different_registers(Rtmp1, Rtmp2, Z_R0_scratch); // None of the Rtmp<i> must be Z_R0!!141assert_different_registers(Robj, Z_R0_scratch); // Used for addressing. Furthermore, push_frame destroys Z_R0!!142assert_different_registers(Rval, Z_R0_scratch); // push_frame destroys Z_R0!!143144Label callRuntime, filtered;145146BLOCK_COMMENT("g1_write_barrier_pre {");147148// Is marking active?149// Note: value is loaded for test purposes only. No further use here.150if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {151__ load_and_test_int(Rtmp1, Address(Z_thread, active_offset));152} else {153guarantee(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption");154__ load_and_test_byte(Rtmp1, Address(Z_thread, active_offset));155}156__ z_bre(filtered); // Activity indicator is zero, so there is no marking going on currently.157158assert(Rpre_val != noreg, "must have a real register");159160161// If an object is given, we need to load the previous value into Rpre_val.162if (obj) {163// Load the previous value...164if (UseCompressedOops) {165__ z_llgf(Rpre_val, *obj);166} else {167__ z_lg(Rpre_val, *obj);168}169}170171// Is the previous value NULL?172// If so, we don't need to record it and we're done.173// Note: pre_val is loaded, decompressed and stored (directly or via runtime call).174// Register contents is preserved across runtime call if caller requests to do so.175if (preloaded && not_null) {176#ifdef ASSERT177__ z_ltgr(Rpre_val, Rpre_val);178__ asm_assert_ne("null oop not allowed (G1 pre)", 0x321); // Checked by caller.179#endif180} else {181__ z_ltgr(Rpre_val, Rpre_val);182__ z_bre(filtered); // previous value is NULL, so we don't need to record it.183}184185// Decode the oop now. We know it's not NULL.186if (Robj != noreg && UseCompressedOops) {187__ oop_decoder(Rpre_val, Rpre_val, /*maybeNULL=*/false);188}189190// OK, it's not filtered, so we'll need to call enqueue.191192// We can store the original value in the thread's buffer193// only if index > 0. Otherwise, we need runtime to handle.194// (The index field is typed as size_t.)195Register Rbuffer = Rtmp1, Rindex = Rtmp2;196assert_different_registers(Rbuffer, Rindex, Rpre_val);197198__ z_lg(Rbuffer, buffer_offset, Z_thread);199200__ load_and_test_long(Rindex, Address(Z_thread, index_offset));201__ z_bre(callRuntime); // If index == 0, goto runtime.202203__ add2reg(Rindex, -wordSize); // Decrement index.204__ z_stg(Rindex, index_offset, Z_thread);205206// Record the previous value.207__ z_stg(Rpre_val, 0, Rbuffer, Rindex);208__ z_bru(filtered); // We are done.209210Rbuffer = noreg; // end of life211Rindex = noreg; // end of life212213__ bind(callRuntime);214215// Save some registers (inputs and result) over runtime call216// by spilling them into the top frame.217if (Robj != noreg && Robj->is_volatile()) {218__ z_stg(Robj, Robj->encoding()*BytesPerWord, Z_SP);219}220if (Roff != noreg && Roff->is_volatile()) {221__ z_stg(Roff, Roff->encoding()*BytesPerWord, Z_SP);222}223if (Rval != noreg && Rval->is_volatile()) {224__ z_stg(Rval, Rval->encoding()*BytesPerWord, Z_SP);225}226227// Save Rpre_val (result) over runtime call.228Register Rpre_save = Rpre_val;229if ((Rpre_val == Z_R0_scratch) || (pre_val_needed && Rpre_val->is_volatile())) {230guarantee(!Rtmp1->is_volatile() || !Rtmp2->is_volatile(), "oops!");231Rpre_save = !Rtmp1->is_volatile() ? Rtmp1 : Rtmp2;232}233__ lgr_if_needed(Rpre_save, Rpre_val);234235// Push frame to protect top frame with return pc and spilled register values.236__ save_return_pc();237__ push_frame_abi160(0); // Will use Z_R0 as tmp.238239// Rpre_val may be destroyed by push_frame().240__ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_field_pre_entry), Rpre_save, Z_thread);241242__ pop_frame();243__ restore_return_pc();244245// Restore spilled values.246if (Robj != noreg && Robj->is_volatile()) {247__ z_lg(Robj, Robj->encoding()*BytesPerWord, Z_SP);248}249if (Roff != noreg && Roff->is_volatile()) {250__ z_lg(Roff, Roff->encoding()*BytesPerWord, Z_SP);251}252if (Rval != noreg && Rval->is_volatile()) {253__ z_lg(Rval, Rval->encoding()*BytesPerWord, Z_SP);254}255if (pre_val_needed && Rpre_val->is_volatile()) {256__ lgr_if_needed(Rpre_val, Rpre_save);257}258259__ bind(filtered);260BLOCK_COMMENT("} g1_write_barrier_pre");261}262263void G1BarrierSetAssembler::g1_write_barrier_post(MacroAssembler* masm, DecoratorSet decorators, Register Rstore_addr, Register Rnew_val,264Register Rtmp1, Register Rtmp2, Register Rtmp3) {265bool not_null = (decorators & IS_NOT_NULL) != 0;266267assert_different_registers(Rstore_addr, Rnew_val, Rtmp1, Rtmp2); // Most probably, Rnew_val == Rtmp3.268269Label callRuntime, filtered;270271CardTableBarrierSet* ct = barrier_set_cast<CardTableBarrierSet>(BarrierSet::barrier_set());272273BLOCK_COMMENT("g1_write_barrier_post {");274275// Does store cross heap regions?276// It does if the two addresses specify different grain addresses.277if (VM_Version::has_DistinctOpnds()) {278__ z_xgrk(Rtmp1, Rstore_addr, Rnew_val);279} else {280__ z_lgr(Rtmp1, Rstore_addr);281__ z_xgr(Rtmp1, Rnew_val);282}283__ z_srag(Rtmp1, Rtmp1, HeapRegion::LogOfHRGrainBytes);284__ z_bre(filtered);285286// Crosses regions, storing NULL?287if (not_null) {288#ifdef ASSERT289__ z_ltgr(Rnew_val, Rnew_val);290__ asm_assert_ne("null oop not allowed (G1 post)", 0x322); // Checked by caller.291#endif292} else {293__ z_ltgr(Rnew_val, Rnew_val);294__ z_bre(filtered);295}296297Rnew_val = noreg; // end of lifetime298299// Storing region crossing non-NULL, is card already dirty?300assert_different_registers(Rtmp1, Rtmp2, Rtmp3);301// Make sure not to use Z_R0 for any of these registers.302Register Rcard_addr = (Rtmp1 != Z_R0_scratch) ? Rtmp1 : Rtmp3;303Register Rbase = (Rtmp2 != Z_R0_scratch) ? Rtmp2 : Rtmp3;304305// calculate address of card306__ load_const_optimized(Rbase, (address)ct->card_table()->byte_map_base()); // Card table base.307__ z_srlg(Rcard_addr, Rstore_addr, CardTable::card_shift); // Index into card table.308__ z_algr(Rcard_addr, Rbase); // Explicit calculation needed for cli.309Rbase = noreg; // end of lifetime310311// Filter young.312__ z_cli(0, Rcard_addr, G1CardTable::g1_young_card_val());313__ z_bre(filtered);314315// Check the card value. If dirty, we're done.316// This also avoids false sharing of the (already dirty) card.317__ z_sync(); // Required to support concurrent cleaning.318__ z_cli(0, Rcard_addr, G1CardTable::dirty_card_val()); // Reload after membar.319__ z_bre(filtered);320321// Storing a region crossing, non-NULL oop, card is clean.322// Dirty card and log.323__ z_mvi(0, Rcard_addr, G1CardTable::dirty_card_val());324325Register Rcard_addr_x = Rcard_addr;326Register Rqueue_index = (Rtmp2 != Z_R0_scratch) ? Rtmp2 : Rtmp1;327Register Rqueue_buf = (Rtmp3 != Z_R0_scratch) ? Rtmp3 : Rtmp1;328const int qidx_off = in_bytes(G1ThreadLocalData::dirty_card_queue_index_offset());329const int qbuf_off = in_bytes(G1ThreadLocalData::dirty_card_queue_buffer_offset());330if ((Rcard_addr == Rqueue_buf) || (Rcard_addr == Rqueue_index)) {331Rcard_addr_x = Z_R0_scratch; // Register shortage. We have to use Z_R0.332}333__ lgr_if_needed(Rcard_addr_x, Rcard_addr);334335__ load_and_test_long(Rqueue_index, Address(Z_thread, qidx_off));336__ z_bre(callRuntime); // Index == 0 then jump to runtime.337338__ z_lg(Rqueue_buf, qbuf_off, Z_thread);339340__ add2reg(Rqueue_index, -wordSize); // Decrement index.341__ z_stg(Rqueue_index, qidx_off, Z_thread);342343__ z_stg(Rcard_addr_x, 0, Rqueue_index, Rqueue_buf); // Store card.344__ z_bru(filtered);345346__ bind(callRuntime);347348// TODO: do we need a frame? Introduced to be on the safe side.349bool needs_frame = true;350__ lgr_if_needed(Rcard_addr, Rcard_addr_x); // copy back asap. push_frame will destroy Z_R0_scratch!351352// VM call need frame to access(write) O register.353if (needs_frame) {354__ save_return_pc();355__ push_frame_abi160(0); // Will use Z_R0 as tmp on old CPUs.356}357358// Save the live input values.359__ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_field_post_entry), Rcard_addr, Z_thread);360361if (needs_frame) {362__ pop_frame();363__ restore_return_pc();364}365366__ bind(filtered);367368BLOCK_COMMENT("} g1_write_barrier_post");369}370371void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,372const Address& dst, Register val, Register tmp1, Register tmp2, Register tmp3) {373bool is_array = (decorators & IS_ARRAY) != 0;374bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;375bool precise = is_array || on_anonymous;376// Load and record the previous value.377g1_write_barrier_pre(masm, decorators, &dst, tmp3, val, tmp1, tmp2, false);378379BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3);380381// No need for post barrier if storing NULL382if (val != noreg) {383const Register base = dst.base(),384idx = dst.index();385const intptr_t disp = dst.disp();386if (precise && (disp != 0 || idx != noreg)) {387__ add2reg_with_index(base, disp, idx, base);388}389g1_write_barrier_post(masm, decorators, base, val, tmp1, tmp2, tmp3);390}391}392393void G1BarrierSetAssembler::resolve_jobject(MacroAssembler* masm, Register value, Register tmp1, Register tmp2) {394NearLabel Ldone, Lnot_weak;395__ z_ltgr(tmp1, value);396__ z_bre(Ldone); // Use NULL result as-is.397398__ z_nill(value, ~JNIHandles::weak_tag_mask);399__ z_lg(value, 0, value); // Resolve (untagged) jobject.400401__ z_tmll(tmp1, JNIHandles::weak_tag_mask); // Test for jweak tag.402__ z_braz(Lnot_weak);403__ verify_oop(value, FILE_AND_LINE);404DecoratorSet decorators = IN_NATIVE | ON_PHANTOM_OOP_REF;405g1_write_barrier_pre(masm, decorators, (const Address*)NULL, value, noreg, tmp1, tmp2, true);406__ bind(Lnot_weak);407__ verify_oop(value, FILE_AND_LINE);408__ bind(Ldone);409}410411#ifdef COMPILER1412413#undef __414#define __ ce->masm()->415416void G1BarrierSetAssembler::gen_pre_barrier_stub(LIR_Assembler* ce, G1PreBarrierStub* stub) {417G1BarrierSetC1* bs = (G1BarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();418// At this point we know that marking is in progress.419// If do_load() is true then we have to emit the420// load of the previous value; otherwise it has already421// been loaded into _pre_val.422__ bind(*stub->entry());423ce->check_reserved_argument_area(16); // RT stub needs 2 spill slots.424assert(stub->pre_val()->is_register(), "Precondition.");425426Register pre_val_reg = stub->pre_val()->as_register();427428if (stub->do_load()) {429ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/);430}431432__ z_ltgr(Z_R1_scratch, pre_val_reg); // Pass oop in Z_R1_scratch to Runtime1::g1_pre_barrier_slow_id.433__ branch_optimized(Assembler::bcondZero, *stub->continuation());434ce->emit_call_c(bs->pre_barrier_c1_runtime_code_blob()->code_begin());435__ branch_optimized(Assembler::bcondAlways, *stub->continuation());436}437438void G1BarrierSetAssembler::gen_post_barrier_stub(LIR_Assembler* ce, G1PostBarrierStub* stub) {439G1BarrierSetC1* bs = (G1BarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();440__ bind(*stub->entry());441ce->check_reserved_argument_area(16); // RT stub needs 2 spill slots.442assert(stub->addr()->is_register(), "Precondition.");443assert(stub->new_val()->is_register(), "Precondition.");444Register new_val_reg = stub->new_val()->as_register();445__ z_ltgr(new_val_reg, new_val_reg);446__ branch_optimized(Assembler::bcondZero, *stub->continuation());447__ z_lgr(Z_R1_scratch, stub->addr()->as_pointer_register());448ce->emit_call_c(bs->post_barrier_c1_runtime_code_blob()->code_begin());449__ branch_optimized(Assembler::bcondAlways, *stub->continuation());450}451452#undef __453454#define __ sasm->455456static OopMap* save_volatile_registers(StubAssembler* sasm, Register return_pc = Z_R14) {457__ block_comment("save_volatile_registers");458RegisterSaver::RegisterSet reg_set = RegisterSaver::all_volatile_registers;459int frame_size_in_slots = RegisterSaver::live_reg_frame_size(reg_set) / VMRegImpl::stack_slot_size;460sasm->set_frame_size(frame_size_in_slots / VMRegImpl::slots_per_word);461return RegisterSaver::save_live_registers(sasm, reg_set, return_pc);462}463464static void restore_volatile_registers(StubAssembler* sasm) {465__ block_comment("restore_volatile_registers");466RegisterSaver::RegisterSet reg_set = RegisterSaver::all_volatile_registers;467RegisterSaver::restore_live_registers(sasm, reg_set);468}469470void G1BarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {471// Z_R1_scratch: previous value of memory472473BarrierSet* bs = BarrierSet::barrier_set();474__ set_info("g1_pre_barrier_slow_id", false);475476Register pre_val = Z_R1_scratch;477Register tmp = Z_R6; // Must be non-volatile because it is used to save pre_val.478Register tmp2 = Z_R7;479480Label refill, restart, marking_not_active;481int satb_q_active_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset());482int satb_q_index_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_index_offset());483int satb_q_buf_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_buffer_offset());484485// Save tmp registers (see assertion in G1PreBarrierStub::emit_code()).486__ z_stg(tmp, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);487__ z_stg(tmp2, 1*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);488489// Is marking still active?490if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {491__ load_and_test_int(tmp, Address(Z_thread, satb_q_active_byte_offset));492} else {493assert(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption");494__ load_and_test_byte(tmp, Address(Z_thread, satb_q_active_byte_offset));495}496__ z_bre(marking_not_active); // Activity indicator is zero, so there is no marking going on currently.497498__ bind(restart);499// Load the index into the SATB buffer. SATBMarkQueue::_index is a500// size_t so ld_ptr is appropriate.501__ z_ltg(tmp, satb_q_index_byte_offset, Z_R0, Z_thread);502503// index == 0?504__ z_brz(refill);505506__ z_lg(tmp2, satb_q_buf_byte_offset, Z_thread);507__ add2reg(tmp, -oopSize);508509__ z_stg(pre_val, 0, tmp, tmp2); // [_buf + index] := <address_of_card>510__ z_stg(tmp, satb_q_index_byte_offset, Z_thread);511512__ bind(marking_not_active);513// Restore tmp registers (see assertion in G1PreBarrierStub::emit_code()).514__ z_lg(tmp, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);515__ z_lg(tmp2, 1*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);516__ z_br(Z_R14);517518__ bind(refill);519save_volatile_registers(sasm);520__ z_lgr(tmp, pre_val); // save pre_val521__ call_VM_leaf(CAST_FROM_FN_PTR(address, G1SATBMarkQueueSet::handle_zero_index_for_thread),522Z_thread);523__ z_lgr(pre_val, tmp); // restore pre_val524restore_volatile_registers(sasm);525__ z_bru(restart);526}527528void G1BarrierSetAssembler::generate_c1_post_barrier_runtime_stub(StubAssembler* sasm) {529// Z_R1_scratch: oop address, address of updated memory slot530531BarrierSet* bs = BarrierSet::barrier_set();532__ set_info("g1_post_barrier_slow_id", false);533534Register addr_oop = Z_R1_scratch;535Register addr_card = Z_R1_scratch;536Register r1 = Z_R6; // Must be saved/restored.537Register r2 = Z_R7; // Must be saved/restored.538Register cardtable = r1; // Must be non-volatile, because it is used to save addr_card.539CardTableBarrierSet* ctbs = barrier_set_cast<CardTableBarrierSet>(bs);540CardTable* ct = ctbs->card_table();541CardTable::CardValue* byte_map_base = ct->byte_map_base();542543// Save registers used below (see assertion in G1PreBarrierStub::emit_code()).544__ z_stg(r1, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);545546Label not_already_dirty, restart, refill, young_card;547548// Calculate address of card corresponding to the updated oop slot.549AddressLiteral rs(byte_map_base);550__ z_srlg(addr_card, addr_oop, CardTable::card_shift);551addr_oop = noreg; // dead now552__ load_const_optimized(cardtable, rs); // cardtable := <card table base>553__ z_agr(addr_card, cardtable); // addr_card := addr_oop>>card_shift + cardtable554555__ z_cli(0, addr_card, (int)G1CardTable::g1_young_card_val());556__ z_bre(young_card);557558__ z_sync(); // Required to support concurrent cleaning.559560__ z_cli(0, addr_card, (int)CardTable::dirty_card_val());561__ z_brne(not_already_dirty);562563__ bind(young_card);564// We didn't take the branch, so we're already dirty: restore565// used registers and return.566__ z_lg(r1, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);567__ z_br(Z_R14);568569// Not dirty.570__ bind(not_already_dirty);571572// First, dirty it: [addr_card] := 0573__ z_mvi(0, addr_card, CardTable::dirty_card_val());574575Register idx = cardtable; // Must be non-volatile, because it is used to save addr_card.576Register buf = r2;577cardtable = noreg; // now dead578579// Save registers used below (see assertion in G1PreBarrierStub::emit_code()).580__ z_stg(r2, 1*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);581582ByteSize dirty_card_q_index_byte_offset = G1ThreadLocalData::dirty_card_queue_index_offset();583ByteSize dirty_card_q_buf_byte_offset = G1ThreadLocalData::dirty_card_queue_buffer_offset();584585__ bind(restart);586587// Get the index into the update buffer. G1DirtyCardQueue::_index is588// a size_t so z_ltg is appropriate here.589__ z_ltg(idx, Address(Z_thread, dirty_card_q_index_byte_offset));590591// index == 0?592__ z_brz(refill);593594__ z_lg(buf, Address(Z_thread, dirty_card_q_buf_byte_offset));595__ add2reg(idx, -oopSize);596597__ z_stg(addr_card, 0, idx, buf); // [_buf + index] := <address_of_card>598__ z_stg(idx, Address(Z_thread, dirty_card_q_index_byte_offset));599// Restore killed registers and return.600__ z_lg(r1, 0*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);601__ z_lg(r2, 1*BytesPerWord + FrameMap::first_available_sp_in_frame, Z_SP);602__ z_br(Z_R14);603604__ bind(refill);605save_volatile_registers(sasm);606__ z_lgr(idx, addr_card); // Save addr_card, tmp3 must be non-volatile.607__ call_VM_leaf(CAST_FROM_FN_PTR(address, G1DirtyCardQueueSet::handle_zero_index_for_thread),608Z_thread);609__ z_lgr(addr_card, idx);610restore_volatile_registers(sasm); // Restore addr_card.611__ z_bru(restart);612}613614#undef __615616#endif // COMPILER1617618619