Path: blob/master/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp
41155 views
/*1* Copyright (c) 2018, 2021, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "classfile/classLoaderData.hpp"26#include "gc/shared/barrierSet.hpp"27#include "gc/shared/barrierSetAssembler.hpp"28#include "gc/shared/barrierSetNMethod.hpp"29#include "gc/shared/collectedHeap.hpp"30#include "interpreter/interp_masm.hpp"31#include "memory/universe.hpp"32#include "runtime/jniHandles.hpp"33#include "runtime/sharedRuntime.hpp"34#include "runtime/stubRoutines.hpp"35#include "runtime/thread.hpp"3637#define __ masm->3839void BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,40Register dst, Address src, Register tmp1, Register tmp_thread) {41bool in_heap = (decorators & IN_HEAP) != 0;42bool in_native = (decorators & IN_NATIVE) != 0;43bool is_not_null = (decorators & IS_NOT_NULL) != 0;44bool atomic = (decorators & MO_RELAXED) != 0;4546switch (type) {47case T_OBJECT:48case T_ARRAY: {49if (in_heap) {50#ifdef _LP6451if (UseCompressedOops) {52__ movl(dst, src);53if (is_not_null) {54__ decode_heap_oop_not_null(dst);55} else {56__ decode_heap_oop(dst);57}58} else59#endif60{61__ movptr(dst, src);62}63} else {64assert(in_native, "why else?");65__ movptr(dst, src);66}67break;68}69case T_BOOLEAN: __ load_unsigned_byte(dst, src); break;70case T_BYTE: __ load_signed_byte(dst, src); break;71case T_CHAR: __ load_unsigned_short(dst, src); break;72case T_SHORT: __ load_signed_short(dst, src); break;73case T_INT: __ movl (dst, src); break;74case T_ADDRESS: __ movptr(dst, src); break;75case T_FLOAT:76assert(dst == noreg, "only to ftos");77__ load_float(src);78break;79case T_DOUBLE:80assert(dst == noreg, "only to dtos");81__ load_double(src);82break;83case T_LONG:84assert(dst == noreg, "only to ltos");85#ifdef _LP6486__ movq(rax, src);87#else88if (atomic) {89__ fild_d(src); // Must load atomically90__ subptr(rsp,2*wordSize); // Make space for store91__ fistp_d(Address(rsp,0));92__ pop(rax);93__ pop(rdx);94} else {95__ movl(rax, src);96__ movl(rdx, src.plus_disp(wordSize));97}98#endif99break;100default: Unimplemented();101}102}103104void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,105Address dst, Register val, Register tmp1, Register tmp2) {106bool in_heap = (decorators & IN_HEAP) != 0;107bool in_native = (decorators & IN_NATIVE) != 0;108bool is_not_null = (decorators & IS_NOT_NULL) != 0;109bool atomic = (decorators & MO_RELAXED) != 0;110111switch (type) {112case T_OBJECT:113case T_ARRAY: {114if (in_heap) {115if (val == noreg) {116assert(!is_not_null, "inconsistent access");117#ifdef _LP64118if (UseCompressedOops) {119__ movl(dst, (int32_t)NULL_WORD);120} else {121__ movslq(dst, (int32_t)NULL_WORD);122}123#else124__ movl(dst, (int32_t)NULL_WORD);125#endif126} else {127#ifdef _LP64128if (UseCompressedOops) {129assert(!dst.uses(val), "not enough registers");130if (is_not_null) {131__ encode_heap_oop_not_null(val);132} else {133__ encode_heap_oop(val);134}135__ movl(dst, val);136} else137#endif138{139__ movptr(dst, val);140}141}142} else {143assert(in_native, "why else?");144assert(val != noreg, "not supported");145__ movptr(dst, val);146}147break;148}149case T_BOOLEAN:150__ andl(val, 0x1); // boolean is true if LSB is 1151__ movb(dst, val);152break;153case T_BYTE:154__ movb(dst, val);155break;156case T_SHORT:157__ movw(dst, val);158break;159case T_CHAR:160__ movw(dst, val);161break;162case T_INT:163__ movl(dst, val);164break;165case T_LONG:166assert(val == noreg, "only tos");167#ifdef _LP64168__ movq(dst, rax);169#else170if (atomic) {171__ push(rdx);172__ push(rax); // Must update atomically with FIST173__ fild_d(Address(rsp,0)); // So load into FPU register174__ fistp_d(dst); // and put into memory atomically175__ addptr(rsp, 2*wordSize);176} else {177__ movptr(dst, rax);178__ movptr(dst.plus_disp(wordSize), rdx);179}180#endif181break;182case T_FLOAT:183assert(val == noreg, "only tos");184__ store_float(dst);185break;186case T_DOUBLE:187assert(val == noreg, "only tos");188__ store_double(dst);189break;190case T_ADDRESS:191__ movptr(dst, val);192break;193default: Unimplemented();194}195}196197void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,198Register obj, Register tmp, Label& slowpath) {199__ clear_jweak_tag(obj);200__ movptr(obj, Address(obj, 0));201}202203void BarrierSetAssembler::tlab_allocate(MacroAssembler* masm,204Register thread, Register obj,205Register var_size_in_bytes,206int con_size_in_bytes,207Register t1,208Register t2,209Label& slow_case) {210assert_different_registers(obj, t1, t2);211assert_different_registers(obj, var_size_in_bytes, t1);212Register end = t2;213if (!thread->is_valid()) {214#ifdef _LP64215thread = r15_thread;216#else217assert(t1->is_valid(), "need temp reg");218thread = t1;219__ get_thread(thread);220#endif221}222223__ verify_tlab();224225__ movptr(obj, Address(thread, JavaThread::tlab_top_offset()));226if (var_size_in_bytes == noreg) {227__ lea(end, Address(obj, con_size_in_bytes));228} else {229__ lea(end, Address(obj, var_size_in_bytes, Address::times_1));230}231__ cmpptr(end, Address(thread, JavaThread::tlab_end_offset()));232__ jcc(Assembler::above, slow_case);233234// update the tlab top pointer235__ movptr(Address(thread, JavaThread::tlab_top_offset()), end);236237// recover var_size_in_bytes if necessary238if (var_size_in_bytes == end) {239__ subptr(var_size_in_bytes, obj);240}241__ verify_tlab();242}243244// Defines obj, preserves var_size_in_bytes245void BarrierSetAssembler::eden_allocate(MacroAssembler* masm,246Register thread, Register obj,247Register var_size_in_bytes,248int con_size_in_bytes,249Register t1,250Label& slow_case) {251assert(obj == rax, "obj must be in rax, for cmpxchg");252assert_different_registers(obj, var_size_in_bytes, t1);253if (!Universe::heap()->supports_inline_contig_alloc()) {254__ jmp(slow_case);255} else {256Register end = t1;257Label retry;258__ bind(retry);259ExternalAddress heap_top((address) Universe::heap()->top_addr());260__ movptr(obj, heap_top);261if (var_size_in_bytes == noreg) {262__ lea(end, Address(obj, con_size_in_bytes));263} else {264__ lea(end, Address(obj, var_size_in_bytes, Address::times_1));265}266// if end < obj then we wrapped around => object too long => slow case267__ cmpptr(end, obj);268__ jcc(Assembler::below, slow_case);269__ cmpptr(end, ExternalAddress((address) Universe::heap()->end_addr()));270__ jcc(Assembler::above, slow_case);271// Compare obj with the top addr, and if still equal, store the new top addr in272// end at the address of the top addr pointer. Sets ZF if was equal, and clears273// it otherwise. Use lock prefix for atomicity on MPs.274__ locked_cmpxchgptr(end, heap_top);275__ jcc(Assembler::notEqual, retry);276incr_allocated_bytes(masm, thread, var_size_in_bytes, con_size_in_bytes, thread->is_valid() ? noreg : t1);277}278}279280void BarrierSetAssembler::incr_allocated_bytes(MacroAssembler* masm, Register thread,281Register var_size_in_bytes,282int con_size_in_bytes,283Register t1) {284if (!thread->is_valid()) {285#ifdef _LP64286thread = r15_thread;287#else288assert(t1->is_valid(), "need temp reg");289thread = t1;290__ get_thread(thread);291#endif292}293294#ifdef _LP64295if (var_size_in_bytes->is_valid()) {296__ addq(Address(thread, in_bytes(JavaThread::allocated_bytes_offset())), var_size_in_bytes);297} else {298__ addq(Address(thread, in_bytes(JavaThread::allocated_bytes_offset())), con_size_in_bytes);299}300#else301if (var_size_in_bytes->is_valid()) {302__ addl(Address(thread, in_bytes(JavaThread::allocated_bytes_offset())), var_size_in_bytes);303} else {304__ addl(Address(thread, in_bytes(JavaThread::allocated_bytes_offset())), con_size_in_bytes);305}306__ adcl(Address(thread, in_bytes(JavaThread::allocated_bytes_offset())+4), 0);307#endif308}309310#ifdef _LP64311void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm) {312BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();313if (bs_nm == NULL) {314return;315}316Label continuation;317Register thread = r15_thread;318Address disarmed_addr(thread, in_bytes(bs_nm->thread_disarmed_offset()));319__ align(8);320__ cmpl(disarmed_addr, 0);321__ jcc(Assembler::equal, continuation);322__ call(RuntimeAddress(StubRoutines::x86::method_entry_barrier()));323__ bind(continuation);324}325#else326void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm) {327BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();328if (bs_nm == NULL) {329return;330}331332Label continuation;333334Register tmp = rdi;335__ push(tmp);336__ movptr(tmp, (intptr_t)bs_nm->disarmed_value_address());337Address disarmed_addr(tmp, 0);338__ align(4);339__ cmpl(disarmed_addr, 0);340__ pop(tmp);341__ jcc(Assembler::equal, continuation);342__ call(RuntimeAddress(StubRoutines::x86::method_entry_barrier()));343__ bind(continuation);344}345#endif346347void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) {348BarrierSetNMethod* bs = BarrierSet::barrier_set()->barrier_set_nmethod();349if (bs == NULL) {350return;351}352353Label bad_call;354__ cmpptr(rbx, 0); // rbx contains the incoming method for c2i adapters.355__ jcc(Assembler::equal, bad_call);356357#ifdef _LP64358Register tmp1 = rscratch1;359Register tmp2 = rscratch2;360#else361Register tmp1 = rax;362Register tmp2 = rcx;363__ push(tmp1);364__ push(tmp2);365#endif // _LP64366367// Pointer chase to the method holder to find out if the method is concurrently unloading.368Label method_live;369__ load_method_holder_cld(tmp1, rbx);370371// Is it a strong CLD?372__ cmpl(Address(tmp1, ClassLoaderData::keep_alive_offset()), 0);373__ jcc(Assembler::greater, method_live);374375// Is it a weak but alive CLD?376__ movptr(tmp1, Address(tmp1, ClassLoaderData::holder_offset()));377__ resolve_weak_handle(tmp1, tmp2);378__ cmpptr(tmp1, 0);379__ jcc(Assembler::notEqual, method_live);380381#ifndef _LP64382__ pop(tmp2);383__ pop(tmp1);384#endif385386__ bind(bad_call);387__ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub()));388__ bind(method_live);389390#ifndef _LP64391__ pop(tmp2);392__ pop(tmp1);393#endif394}395396397