Path: blob/master/src/hotspot/share/gc/parallel/psCompactionManager.cpp
41149 views
/*1* Copyright (c) 2005, 2021, Oracle and/or its affiliates. All rights reserved.2* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.3*4* This code is free software; you can redistribute it and/or modify it5* under the terms of the GNU General Public License version 2 only, as6* published by the Free Software Foundation.7*8* This code is distributed in the hope that it will be useful, but WITHOUT9* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or10* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License11* version 2 for more details (a copy is included in the LICENSE file that12* accompanied this code).13*14* You should have received a copy of the GNU General Public License version15* 2 along with this work; if not, write to the Free Software Foundation,16* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.17*18* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA19* or visit www.oracle.com if you need additional information or have any20* questions.21*22*/2324#include "precompiled.hpp"25#include "gc/parallel/objectStartArray.hpp"26#include "gc/parallel/parMarkBitMap.inline.hpp"27#include "gc/parallel/parallelScavengeHeap.hpp"28#include "gc/parallel/psCompactionManager.inline.hpp"29#include "gc/parallel/psOldGen.hpp"30#include "gc/parallel/psParallelCompact.inline.hpp"31#include "gc/shared/taskqueue.inline.hpp"32#include "logging/log.hpp"33#include "memory/iterator.inline.hpp"34#include "oops/access.inline.hpp"35#include "oops/compressedOops.inline.hpp"36#include "oops/instanceKlass.inline.hpp"37#include "oops/instanceMirrorKlass.inline.hpp"38#include "oops/objArrayKlass.inline.hpp"39#include "oops/oop.inline.hpp"4041PSOldGen* ParCompactionManager::_old_gen = NULL;42ParCompactionManager** ParCompactionManager::_manager_array = NULL;4344ParCompactionManager::OopTaskQueueSet* ParCompactionManager::_oop_task_queues = NULL;45ParCompactionManager::ObjArrayTaskQueueSet* ParCompactionManager::_objarray_task_queues = NULL;46ParCompactionManager::RegionTaskQueueSet* ParCompactionManager::_region_task_queues = NULL;4748ObjectStartArray* ParCompactionManager::_start_array = NULL;49ParMarkBitMap* ParCompactionManager::_mark_bitmap = NULL;50GrowableArray<size_t >* ParCompactionManager::_shadow_region_array = NULL;51Monitor* ParCompactionManager::_shadow_region_monitor = NULL;5253ParCompactionManager::ParCompactionManager() {5455ParallelScavengeHeap* heap = ParallelScavengeHeap::heap();5657_old_gen = heap->old_gen();58_start_array = old_gen()->start_array();5960marking_stack()->initialize();61_objarray_stack.initialize();62_region_stack.initialize();6364reset_bitmap_query_cache();65}6667void ParCompactionManager::initialize(ParMarkBitMap* mbm) {68assert(ParallelScavengeHeap::heap() != NULL,69"Needed for initialization");7071_mark_bitmap = mbm;7273uint parallel_gc_threads = ParallelScavengeHeap::heap()->workers().total_workers();7475assert(_manager_array == NULL, "Attempt to initialize twice");76_manager_array = NEW_C_HEAP_ARRAY(ParCompactionManager*, parallel_gc_threads+1, mtGC);7778_oop_task_queues = new OopTaskQueueSet(parallel_gc_threads);79_objarray_task_queues = new ObjArrayTaskQueueSet(parallel_gc_threads);80_region_task_queues = new RegionTaskQueueSet(parallel_gc_threads);8182// Create and register the ParCompactionManager(s) for the worker threads.83for(uint i=0; i<parallel_gc_threads; i++) {84_manager_array[i] = new ParCompactionManager();85oop_task_queues()->register_queue(i, _manager_array[i]->marking_stack());86_objarray_task_queues->register_queue(i, &_manager_array[i]->_objarray_stack);87region_task_queues()->register_queue(i, _manager_array[i]->region_stack());88}8990// The VMThread gets its own ParCompactionManager, which is not available91// for work stealing.92_manager_array[parallel_gc_threads] = new ParCompactionManager();93assert(ParallelScavengeHeap::heap()->workers().total_workers() != 0,94"Not initialized?");9596_shadow_region_array = new (ResourceObj::C_HEAP, mtGC) GrowableArray<size_t >(10, mtGC);9798_shadow_region_monitor = new Monitor(Mutex::barrier, "CompactionManager monitor",99Mutex::_allow_vm_block_flag, Monitor::_safepoint_check_never);100}101102void ParCompactionManager::reset_all_bitmap_query_caches() {103uint parallel_gc_threads = ParallelScavengeHeap::heap()->workers().total_workers();104for (uint i=0; i<=parallel_gc_threads; i++) {105_manager_array[i]->reset_bitmap_query_cache();106}107}108109110ParCompactionManager*111ParCompactionManager::gc_thread_compaction_manager(uint index) {112assert(index < ParallelGCThreads, "index out of range");113assert(_manager_array != NULL, "Sanity");114return _manager_array[index];115}116117void ParCompactionManager::follow_marking_stacks() {118do {119// Drain the overflow stack first, to allow stealing from the marking stack.120oop obj;121while (marking_stack()->pop_overflow(obj)) {122follow_contents(obj);123}124while (marking_stack()->pop_local(obj)) {125follow_contents(obj);126}127128// Process ObjArrays one at a time to avoid marking stack bloat.129ObjArrayTask task;130if (_objarray_stack.pop_overflow(task) || _objarray_stack.pop_local(task)) {131follow_array((objArrayOop)task.obj(), task.index());132}133} while (!marking_stacks_empty());134135assert(marking_stacks_empty(), "Sanity");136}137138void ParCompactionManager::drain_region_stacks() {139do {140// Drain overflow stack first so other threads can steal.141size_t region_index;142while (region_stack()->pop_overflow(region_index)) {143PSParallelCompact::fill_and_update_region(this, region_index);144}145146while (region_stack()->pop_local(region_index)) {147PSParallelCompact::fill_and_update_region(this, region_index);148}149} while (!region_stack()->is_empty());150}151152size_t ParCompactionManager::pop_shadow_region_mt_safe(PSParallelCompact::RegionData* region_ptr) {153MonitorLocker ml(_shadow_region_monitor, Mutex::_no_safepoint_check_flag);154while (true) {155if (!_shadow_region_array->is_empty()) {156return _shadow_region_array->pop();157}158// Check if the corresponding heap region is available now.159// If so, we don't need to get a shadow region anymore, and160// we return InvalidShadow to indicate such a case.161if (region_ptr->claimed()) {162return InvalidShadow;163}164ml.wait(1);165}166}167168void ParCompactionManager::push_shadow_region_mt_safe(size_t shadow_region) {169MonitorLocker ml(_shadow_region_monitor, Mutex::_no_safepoint_check_flag);170_shadow_region_array->push(shadow_region);171ml.notify();172}173174void ParCompactionManager::push_shadow_region(size_t shadow_region) {175_shadow_region_array->push(shadow_region);176}177178void ParCompactionManager::remove_all_shadow_regions() {179_shadow_region_array->clear();180}181182#ifdef ASSERT183void ParCompactionManager::verify_all_marking_stack_empty() {184uint parallel_gc_threads = ParallelGCThreads;185for (uint i = 0; i <= parallel_gc_threads; i++) {186assert(_manager_array[i]->marking_stacks_empty(), "Marking stack should be empty");187}188}189190void ParCompactionManager::verify_all_region_stack_empty() {191uint parallel_gc_threads = ParallelGCThreads;192for (uint i = 0; i <= parallel_gc_threads; i++) {193assert(_manager_array[i]->region_stack()->is_empty(), "Region stack should be empty");194}195}196#endif197198199