annotate src/hotspot/share/gc/shared/referenceProcessor.cpp @ 50590:4fa726f796f5

8202781: Fix typo in DiscoveredListIterator::complete_enqeue Reviewed-by: kbarrett
author tschatzl
date Tue, 08 May 2018 16:49:20 +0200
parents 9d17c375dc30
children bf2f27b92064
rev   line source
duke@1 1 /*
eosterlund@49472 2 * Copyright (c) 2001, 2018, Oracle and/or its affiliates. All rights reserved.
duke@1 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@1 4 *
duke@1 5 * This code is free software; you can redistribute it and/or modify it
duke@1 6 * under the terms of the GNU General Public License version 2 only, as
duke@1 7 * published by the Free Software Foundation.
duke@1 8 *
duke@1 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@1 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@1 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@1 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@1 13 * accompanied this code).
duke@1 14 *
duke@1 15 * You should have received a copy of the GNU General Public License version
duke@1 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@1 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@1 18 *
trims@5547 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@5547 20 * or visit www.oracle.com if you need additional information or have any
trims@5547 21 * questions.
duke@1 22 *
duke@1 23 */
duke@1 24
stefank@7397 25 #include "precompiled.hpp"
goetz@35498 26 #include "classfile/javaClasses.inline.hpp"
stefank@7397 27 #include "classfile/systemDictionary.hpp"
pliden@30764 28 #include "gc/shared/collectedHeap.hpp"
pliden@30764 29 #include "gc/shared/collectedHeap.inline.hpp"
pliden@30764 30 #include "gc/shared/gcTimer.hpp"
brutisso@35061 31 #include "gc/shared/gcTraceTime.inline.hpp"
pliden@30764 32 #include "gc/shared/referencePolicy.hpp"
goetz@35862 33 #include "gc/shared/referenceProcessor.inline.hpp"
brutisso@35061 34 #include "logging/log.hpp"
stefank@49825 35 #include "memory/allocation.inline.hpp"
jprovino@37248 36 #include "memory/resourceArea.hpp"
eosterlund@48195 37 #include "oops/access.inline.hpp"
stefank@7397 38 #include "oops/oop.inline.hpp"
stefank@7397 39 #include "runtime/java.hpp"
duke@1 40
ysr@1606 41 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
ysr@1606 42 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
johnc@10683 43 jlong ReferenceProcessor::_soft_ref_timestamp_clock = 0;
ysr@1606 44
duke@1 45 void referenceProcessor_init() {
duke@1 46 ReferenceProcessor::init_statics();
duke@1 47 }
duke@1 48
duke@1 49 void ReferenceProcessor::init_statics() {
jwilhelm@22551 50 // We need a monotonically non-decreasing time in ms but
johnc@11251 51 // os::javaTimeMillis() does not guarantee monotonicity.
johnc@11251 52 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
johnc@10683 53
johnc@10683 54 // Initialize the soft ref timestamp clock.
johnc@10683 55 _soft_ref_timestamp_clock = now;
johnc@10683 56 // Also update the soft ref clock in j.l.r.SoftReference
johnc@10683 57 java_lang_ref_SoftReference::set_clock(_soft_ref_timestamp_clock);
duke@1 58
ysr@1606 59 _always_clear_soft_ref_policy = new AlwaysClearPolicy();
jcm@43455 60 if (is_server_compilation_mode_vm()) {
jcm@43455 61 _default_soft_ref_policy = new LRUMaxHeapPolicy();
jcm@43455 62 } else {
jcm@43455 63 _default_soft_ref_policy = new LRUCurrentHeapPolicy();
jcm@43455 64 }
ysr@1606 65 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
ysr@1606 66 vm_exit_during_initialization("Could not allocate reference policy object");
ysr@1606 67 }
duke@1 68 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
duke@1 69 RefDiscoveryPolicy == ReferentBasedDiscovery,
jwilhelm@22775 70 "Unrecognized RefDiscoveryPolicy");
duke@1 71 }
duke@1 72
kbarrett@28212 73 void ReferenceProcessor::enable_discovery(bool check_no_refs) {
johnc@10683 74 #ifdef ASSERT
johnc@10683 75 // Verify that we're not currently discovering refs
kbarrett@28212 76 assert(!_discovering_refs, "nested call?");
johnc@10683 77
johnc@10683 78 if (check_no_refs) {
johnc@10683 79 // Verify that the discovered lists are empty
johnc@10683 80 verify_no_references_recorded();
johnc@10683 81 }
johnc@10683 82 #endif // ASSERT
johnc@10683 83
johnc@10683 84 // Someone could have modified the value of the static
johnc@10683 85 // field in the j.l.r.SoftReference class that holds the
johnc@10683 86 // soft reference timestamp clock using reflection or
johnc@10683 87 // Unsafe between GCs. Unconditionally update the static
johnc@10683 88 // field in ReferenceProcessor here so that we use the new
johnc@10683 89 // value during reference discovery.
johnc@10683 90
johnc@10683 91 _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
johnc@10683 92 _discovering_refs = true;
johnc@10683 93 }
johnc@10683 94
tschatzl@50489 95 ReferenceProcessor::ReferenceProcessor(BoolObjectClosure* is_subject_to_discovery,
ysr@8688 96 bool mt_processing,
jmasa@11396 97 uint mt_processing_degree,
ysr@8688 98 bool mt_discovery,
jmasa@11396 99 uint mt_discovery_degree,
coleenp@360 100 bool atomic_discovery,
brutisso@24845 101 BoolObjectClosure* is_alive_non_header) :
tschatzl@50489 102 _is_subject_to_discovery(is_subject_to_discovery),
duke@1 103 _discovering_refs(false),
duke@1 104 _enqueuing_is_done(false),
ysr@8688 105 _is_alive_non_header(is_alive_non_header),
duke@1 106 _processing_is_mt(mt_processing),
duke@1 107 _next_id(0)
duke@1 108 {
tschatzl@50489 109 assert(is_subject_to_discovery != NULL, "must be set");
tschatzl@50489 110
duke@1 111 _discovery_is_atomic = atomic_discovery;
duke@1 112 _discovery_is_mt = mt_discovery;
tschatzl@50492 113 _num_queues = MAX2(1U, mt_processing_degree);
tschatzl@50492 114 _max_num_queues = MAX2(_num_queues, mt_discovery_degree);
johnc@10747 115 _discovered_refs = NEW_C_HEAP_ARRAY(DiscoveredList,
tschatzl@50492 116 _max_num_queues * number_of_subclasses_of_ref(), mtGC);
zgu@13195 117
johnc@10747 118 if (_discovered_refs == NULL) {
duke@1 119 vm_exit_during_initialization("Could not allocated RefProc Array");
duke@1 120 }
johnc@10747 121 _discoveredSoftRefs = &_discovered_refs[0];
tschatzl@50492 122 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_queues];
tschatzl@50492 123 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_queues];
tschatzl@50492 124 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_queues];
johnc@10747 125
johnc@10747 126 // Initialize all entries to NULL
tschatzl@50492 127 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
johnc@10747 128 _discovered_refs[i].set_head(NULL);
johnc@10747 129 _discovered_refs[i].set_length(0);
duke@1 130 }
johnc@10747 131
ysr@8688 132 setup_policy(false /* default soft ref policy */);
duke@1 133 }
duke@1 134
duke@1 135 #ifndef PRODUCT
duke@1 136 void ReferenceProcessor::verify_no_references_recorded() {
duke@1 137 guarantee(!_discovering_refs, "Discovering refs?");
tschatzl@50492 138 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
johnc@10747 139 guarantee(_discovered_refs[i].is_empty(),
jmasa@37165 140 "Found non-empty discovered list at %u", i);
duke@1 141 }
duke@1 142 }
duke@1 143 #endif
duke@1 144
duke@1 145 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
tschatzl@50492 146 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
coleenp@360 147 if (UseCompressedOops) {
johnc@10747 148 f->do_oop((narrowOop*)_discovered_refs[i].adr_head());
coleenp@360 149 } else {
johnc@10747 150 f->do_oop((oop*)_discovered_refs[i].adr_head());
coleenp@360 151 }
duke@1 152 }
duke@1 153 }
duke@1 154
coleenp@360 155 void ReferenceProcessor::update_soft_ref_master_clock() {
duke@1 156 // Update (advance) the soft ref master clock field. This must be done
duke@1 157 // after processing the soft ref list.
johnc@11251 158
jwilhelm@22551 159 // We need a monotonically non-decreasing time in ms but
johnc@11251 160 // os::javaTimeMillis() does not guarantee monotonicity.
johnc@11251 161 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
johnc@10683 162 jlong soft_ref_clock = java_lang_ref_SoftReference::clock();
johnc@10683 163 assert(soft_ref_clock == _soft_ref_timestamp_clock, "soft ref clocks out of sync");
johnc@10683 164
duke@1 165 NOT_PRODUCT(
johnc@10683 166 if (now < _soft_ref_timestamp_clock) {
brutisso@37073 167 log_warning(gc)("time warp: " JLONG_FORMAT " to " JLONG_FORMAT,
brutisso@37073 168 _soft_ref_timestamp_clock, now);
duke@1 169 }
duke@1 170 )
johnc@11251 171 // The values of now and _soft_ref_timestamp_clock are set using
johnc@11251 172 // javaTimeNanos(), which is guaranteed to be monotonically
johnc@11251 173 // non-decreasing provided the underlying platform provides such
johnc@11251 174 // a time source (and it is bug free).
jwilhelm@22551 175 // In product mode, however, protect ourselves from non-monotonicity.
johnc@10683 176 if (now > _soft_ref_timestamp_clock) {
johnc@10683 177 _soft_ref_timestamp_clock = now;
duke@1 178 java_lang_ref_SoftReference::set_clock(now);
duke@1 179 }
duke@1 180 // Else leave clock stalled at its old value until time progresses
duke@1 181 // past clock value.
duke@1 182 }
duke@1 183
sangheki@46795 184 size_t ReferenceProcessor::total_count(DiscoveredList lists[]) const {
sla@18025 185 size_t total = 0;
tschatzl@50492 186 for (uint i = 0; i < _max_num_queues; ++i) {
sla@18025 187 total += lists[i].length();
sla@18025 188 }
sla@18025 189 return total;
sla@18025 190 }
sla@18025 191
sla@18025 192 ReferenceProcessorStats ReferenceProcessor::process_discovered_references(
sangheki@46795 193 BoolObjectClosure* is_alive,
sangheki@46795 194 OopClosure* keep_alive,
sangheki@46795 195 VoidClosure* complete_gc,
sangheki@46795 196 AbstractRefProcTaskExecutor* task_executor,
sangheki@46795 197 ReferenceProcessorPhaseTimes* phase_times) {
sangheki@46795 198
sangheki@46795 199 double start_time = os::elapsedTime();
duke@1 200
duke@1 201 assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
duke@1 202 // Stop treating discovered references specially.
duke@1 203 disable_discovery();
duke@1 204
johnc@10683 205 // If discovery was concurrent, someone could have modified
johnc@10683 206 // the value of the static field in the j.l.r.SoftReference
johnc@10683 207 // class that holds the soft reference timestamp clock using
johnc@10683 208 // reflection or Unsafe between when discovery was enabled and
johnc@10683 209 // now. Unconditionally update the static field in ReferenceProcessor
johnc@10683 210 // here so that we use the new value during processing of the
johnc@10683 211 // discovered soft refs.
johnc@10683 212
johnc@10683 213 _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
johnc@10683 214
sangheki@46795 215 ReferenceProcessorStats stats(total_count(_discoveredSoftRefs),
sangheki@46795 216 total_count(_discoveredWeakRefs),
sangheki@46795 217 total_count(_discoveredFinalRefs),
sangheki@46795 218 total_count(_discoveredPhantomRefs));
brutisso@33103 219
duke@1 220 // Soft references
duke@1 221 {
sangheki@46795 222 RefProcPhaseTimesTracker tt(REF_SOFT, phase_times, this);
brutisso@33103 223 process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
sangheki@46795 224 is_alive, keep_alive, complete_gc, task_executor, phase_times);
duke@1 225 }
duke@1 226
duke@1 227 update_soft_ref_master_clock();
duke@1 228
duke@1 229 // Weak references
duke@1 230 {
sangheki@46795 231 RefProcPhaseTimesTracker tt(REF_WEAK, phase_times, this);
brutisso@33103 232 process_discovered_reflist(_discoveredWeakRefs, NULL, true,
sangheki@46795 233 is_alive, keep_alive, complete_gc, task_executor, phase_times);
duke@1 234 }
duke@1 235
duke@1 236 // Final references
duke@1 237 {
sangheki@46795 238 RefProcPhaseTimesTracker tt(REF_FINAL, phase_times, this);
brutisso@33103 239 process_discovered_reflist(_discoveredFinalRefs, NULL, false,
sangheki@46795 240 is_alive, keep_alive, complete_gc, task_executor, phase_times);
duke@1 241 }
duke@1 242
duke@1 243 // Phantom references
duke@1 244 {
sangheki@46795 245 RefProcPhaseTimesTracker tt(REF_PHANTOM, phase_times, this);
kbarrett@35225 246 process_discovered_reflist(_discoveredPhantomRefs, NULL, true,
sangheki@46795 247 is_alive, keep_alive, complete_gc, task_executor, phase_times);
duke@1 248 }
duke@1 249
stefank@47803 250 if (task_executor != NULL) {
stefank@47803 251 // Record the work done by the parallel workers.
stefank@47803 252 task_executor->set_single_threaded_mode();
duke@1 253 }
sla@18025 254
sangheki@46795 255 phase_times->set_total_time_ms((os::elapsedTime() - start_time) * 1000);
sangheki@46795 256
brutisso@33103 257 return stats;
duke@1 258 }
duke@1 259
johnc@10670 260 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
tschatzl@50492 261 _current_discovered_addr = java_lang_ref_Reference::discovered_addr_raw(_current_discovered);
tschatzl@50492 262 oop discovered = java_lang_ref_Reference::discovered(_current_discovered);
tschatzl@50492 263 assert(_current_discovered_addr && oopDesc::is_oop_or_null(discovered),
david@33105 264 "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
tschatzl@50492 265 _next_discovered = discovered;
tschatzl@50492 266
tschatzl@50492 267 _referent_addr = java_lang_ref_Reference::referent_addr_raw(_current_discovered);
tschatzl@50492 268 _referent = java_lang_ref_Reference::referent(_current_discovered);
duke@1 269 assert(Universe::heap()->is_in_reserved_or_null(_referent),
duke@1 270 "Wrong oop found in java.lang.Reference object");
duke@1 271 assert(allow_null_referent ?
coleenp@46968 272 oopDesc::is_oop_or_null(_referent)
coleenp@46968 273 : oopDesc::is_oop(_referent),
david@33105 274 "Expected an oop%s for referent field at " PTR_FORMAT,
david@33105 275 (allow_null_referent ? " or NULL" : ""),
david@33105 276 p2i(_referent));
duke@1 277 }
duke@1 278
johnc@10670 279 void DiscoveredListIterator::remove() {
tschatzl@50492 280 assert(oopDesc::is_oop(_current_discovered), "Dropping a bad reference");
tschatzl@50492 281 RawAccess<>::oop_store(_current_discovered_addr, oop(NULL));
stefank@10524 282
coleenp@360 283 // First _prev_next ref actually points into DiscoveredList (gross).
stefank@10524 284 oop new_next;
tschatzl@50492 285 if (_next_discovered == _current_discovered) {
stefank@10524 286 // At the end of the list, we should make _prev point to itself.
stefank@10524 287 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
stefank@10524 288 // and _prev will be NULL.
tschatzl@50492 289 new_next = _prev_discovered;
stefank@10524 290 } else {
tschatzl@50492 291 new_next = _next_discovered;
stefank@10524 292 }
pliden@22768 293 // Remove Reference object from discovered list. Note that G1 does not need a
pliden@22768 294 // pre-barrier here because we know the Reference has already been found/marked,
pliden@22768 295 // that's how it ended up in the discovered list in the first place.
tschatzl@50492 296 RawAccess<>::oop_store(_prev_discovered_addr, new_next);
duke@1 297 NOT_PRODUCT(_removed++);
ysr@1605 298 _refs_list.dec_length(1);
duke@1 299 }
duke@1 300
johnc@10670 301 void DiscoveredListIterator::clear_referent() {
eosterlund@48195 302 RawAccess<>::oop_store(_referent_addr, oop(NULL));
duke@1 303 }
duke@1 304
tschatzl@50585 305 void DiscoveredListIterator::enqueue() {
tschatzl@50585 306 // Self-loop next, so as to make Ref not active.
tschatzl@50585 307 java_lang_ref_Reference::set_next_raw(_current_discovered, _current_discovered);
tschatzl@50585 308
tschatzl@50585 309 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_current_discovered,
tschatzl@50585 310 java_lang_ref_Reference::discovered_offset,
tschatzl@50585 311 _next_discovered);
tschatzl@50585 312 }
tschatzl@50585 313
tschatzl@50590 314 void DiscoveredListIterator::complete_enqueue() {
tschatzl@50585 315 if (_prev_discovered != NULL) {
tschatzl@50585 316 // This is the last object.
tschatzl@50585 317 // Swap refs_list into pending list and set obj's
tschatzl@50585 318 // discovered to what we read from the pending list.
tschatzl@50585 319 oop old = Universe::swap_reference_pending_list(_refs_list.head());
tschatzl@50585 320 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(_prev_discovered, java_lang_ref_Reference::discovered_offset, old);
tschatzl@50585 321 }
tschatzl@50585 322 }
tschatzl@50585 323
duke@1 324 // NOTE: process_phase*() are largely similar, and at a high level
duke@1 325 // merely iterate over the extant list applying a predicate to
duke@1 326 // each of its elements and possibly removing that element from the
duke@1 327 // list and applying some further closures to that element.
duke@1 328 // We should consider the possibility of replacing these
duke@1 329 // process_phase*() methods by abstracting them into
duke@1 330 // a single general iterator invocation that receives appropriate
duke@1 331 // closures that accomplish this work.
duke@1 332
duke@1 333 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
duke@1 334 // referents are not alive, but that should be kept alive for policy reasons.
duke@1 335 // Keep alive the transitive closure of all such referents.
duke@1 336 void
coleenp@360 337 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
duke@1 338 ReferencePolicy* policy,
duke@1 339 BoolObjectClosure* is_alive,
duke@1 340 OopClosure* keep_alive,
duke@1 341 VoidClosure* complete_gc) {
duke@1 342 assert(policy != NULL, "Must have a non-NULL policy");
brutisso@24845 343 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@1 344 // Decide which softly reachable refs should be kept alive.
duke@1 345 while (iter.has_next()) {
duke@1 346 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
duke@1 347 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
johnc@10683 348 if (referent_is_dead &&
johnc@10683 349 !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
brutisso@35061 350 log_develop_trace(gc, ref)("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
brutisso@35061 351 p2i(iter.obj()), iter.obj()->klass()->internal_name());
ysr@1605 352 // Remove Reference object from list
ysr@1605 353 iter.remove();
duke@1 354 // keep the referent around
duke@1 355 iter.make_referent_alive();
ysr@1605 356 iter.move_to_next();
duke@1 357 } else {
duke@1 358 iter.next();
duke@1 359 }
duke@1 360 }
duke@1 361 // Close the reachable set
duke@1 362 complete_gc->do_void();
brutisso@35061 363 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " dead Refs out of " SIZE_FORMAT " discovered Refs by policy, from list " INTPTR_FORMAT,
jmasa@36198 364 iter.removed(), iter.processed(), p2i(&refs_list));
sangheki@46795 365 }
duke@1 366
tschatzl@50489 367 void ReferenceProcessor::process_phase2(DiscoveredList& refs_list,
tschatzl@50489 368 BoolObjectClosure* is_alive,
tschatzl@50489 369 OopClosure* keep_alive,
tschatzl@50489 370 VoidClosure* complete_gc) {
tschatzl@50489 371 if (discovery_is_atomic()) {
tschatzl@50489 372 // complete_gc is ignored in this case for this phase
tschatzl@50489 373 pp2_work(refs_list, is_alive, keep_alive);
tschatzl@50489 374 } else {
tschatzl@50489 375 assert(complete_gc != NULL, "Error");
tschatzl@50489 376 pp2_work_concurrent_discovery(refs_list, is_alive,
tschatzl@50489 377 keep_alive, complete_gc);
tschatzl@50489 378 }
tschatzl@50489 379 }
duke@1 380 // Traverse the list and remove any Refs that are not active, or
duke@1 381 // whose referents are either alive or NULL.
duke@1 382 void
coleenp@360 383 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
duke@1 384 BoolObjectClosure* is_alive,
coleenp@360 385 OopClosure* keep_alive) {
duke@1 386 assert(discovery_is_atomic(), "Error");
brutisso@24845 387 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@1 388 while (iter.has_next()) {
duke@1 389 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
coleenp@360 390 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
coleenp@360 391 assert(next == NULL, "Should not discover inactive Reference");
duke@1 392 if (iter.is_referent_alive()) {
brutisso@35061 393 log_develop_trace(gc, ref)("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
brutisso@35061 394 p2i(iter.obj()), iter.obj()->klass()->internal_name());
duke@1 395 // The referent is reachable after all.
ysr@1605 396 // Remove Reference object from list.
ysr@1605 397 iter.remove();
duke@1 398 // Update the referent pointer as necessary: Note that this
duke@1 399 // should not entail any recursive marking because the
duke@1 400 // referent must already have been traversed.
duke@1 401 iter.make_referent_alive();
ysr@1605 402 iter.move_to_next();
duke@1 403 } else {
duke@1 404 iter.next();
duke@1 405 }
duke@1 406 }
duke@1 407 NOT_PRODUCT(
brutisso@35061 408 if (iter.processed() > 0) {
brutisso@35061 409 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
david@29800 410 " Refs in discovered list " INTPTR_FORMAT,
jmasa@36198 411 iter.removed(), iter.processed(), p2i(&refs_list));
duke@1 412 }
duke@1 413 )
duke@1 414 }
duke@1 415
duke@1 416 void
coleenp@360 417 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
coleenp@360 418 BoolObjectClosure* is_alive,
coleenp@360 419 OopClosure* keep_alive,
coleenp@360 420 VoidClosure* complete_gc) {
duke@1 421 assert(!discovery_is_atomic(), "Error");
brutisso@24845 422 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@1 423 while (iter.has_next()) {
duke@1 424 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
eosterlund@49472 425 HeapWord* next_addr = java_lang_ref_Reference::next_addr_raw(iter.obj());
coleenp@360 426 oop next = java_lang_ref_Reference::next(iter.obj());
duke@1 427 if ((iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@360 428 next != NULL)) {
coleenp@46968 429 assert(oopDesc::is_oop_or_null(next), "Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next));
duke@1 430 // Remove Reference object from list
duke@1 431 iter.remove();
duke@1 432 // Trace the cohorts
duke@1 433 iter.make_referent_alive();
coleenp@360 434 if (UseCompressedOops) {
coleenp@360 435 keep_alive->do_oop((narrowOop*)next_addr);
coleenp@360 436 } else {
coleenp@360 437 keep_alive->do_oop((oop*)next_addr);
coleenp@360 438 }
ysr@1605 439 iter.move_to_next();
duke@1 440 } else {
duke@1 441 iter.next();
duke@1 442 }
duke@1 443 }
duke@1 444 // Now close the newly reachable set
duke@1 445 complete_gc->do_void();
duke@1 446 NOT_PRODUCT(
brutisso@35061 447 if (iter.processed() > 0) {
brutisso@35061 448 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " active Refs out of " SIZE_FORMAT
david@29800 449 " Refs in discovered list " INTPTR_FORMAT,
jmasa@36198 450 iter.removed(), iter.processed(), p2i(&refs_list));
duke@1 451 }
duke@1 452 )
duke@1 453 }
duke@1 454
tschatzl@50492 455 void ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
tschatzl@50492 456 bool clear_referent,
tschatzl@50492 457 BoolObjectClosure* is_alive,
tschatzl@50492 458 OopClosure* keep_alive,
tschatzl@50492 459 VoidClosure* complete_gc) {
jmasa@6759 460 ResourceMark rm;
brutisso@24845 461 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@1 462 while (iter.has_next()) {
duke@1 463 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
duke@1 464 if (clear_referent) {
duke@1 465 // NULL out referent pointer
duke@1 466 iter.clear_referent();
duke@1 467 } else {
duke@1 468 // keep the referent around
duke@1 469 iter.make_referent_alive();
duke@1 470 }
tschatzl@50585 471 iter.enqueue();
brutisso@35061 472 log_develop_trace(gc, ref)("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
brutisso@35061 473 clear_referent ? "cleared " : "", p2i(iter.obj()), iter.obj()->klass()->internal_name());
coleenp@46968 474 assert(oopDesc::is_oop(iter.obj(), UseConcMarkSweepGC), "Adding a bad reference");
duke@1 475 iter.next();
duke@1 476 }
tschatzl@50590 477 iter.complete_enqueue();
duke@1 478 // Close the reachable set
duke@1 479 complete_gc->do_void();
tschatzl@50585 480 // Clear the list.
tschatzl@50585 481 refs_list.set_head(NULL);
tschatzl@50585 482 refs_list.set_length(0);
duke@1 483 }
duke@1 484
duke@1 485 void
stefank@10524 486 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
stefank@10524 487 oop obj = NULL;
stefank@10524 488 oop next = refs_list.head();
stefank@10524 489 while (next != obj) {
stefank@10524 490 obj = next;
stefank@10524 491 next = java_lang_ref_Reference::discovered(obj);
stefank@10524 492 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
stefank@10524 493 }
stefank@10524 494 refs_list.set_head(NULL);
stefank@10524 495 refs_list.set_length(0);
stefank@10524 496 }
stefank@10524 497
ysr@1374 498 void ReferenceProcessor::abandon_partial_discovery() {
ysr@1374 499 // loop over the lists
tschatzl@50492 500 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
tschatzl@50492 501 if ((i % _max_num_queues) == 0) {
brutisso@35061 502 log_develop_trace(gc, ref)("Abandoning %s discovered list", list_name(i));
ysr@1374 503 }
kbarrett@30766 504 clear_discovered_references(_discovered_refs[i]);
duke@1 505 }
duke@1 506 }
duke@1 507
sangheki@46795 508 size_t ReferenceProcessor::total_reference_count(ReferenceType type) const {
sangheki@46795 509 DiscoveredList* list = NULL;
sangheki@46795 510
sangheki@46795 511 switch (type) {
sangheki@46795 512 case REF_SOFT:
sangheki@46795 513 list = _discoveredSoftRefs;
sangheki@46795 514 break;
sangheki@46795 515 case REF_WEAK:
sangheki@46795 516 list = _discoveredWeakRefs;
sangheki@46795 517 break;
sangheki@46795 518 case REF_FINAL:
sangheki@46795 519 list = _discoveredFinalRefs;
sangheki@46795 520 break;
sangheki@46795 521 case REF_PHANTOM:
sangheki@46795 522 list = _discoveredPhantomRefs;
sangheki@46795 523 break;
sangheki@46795 524 case REF_OTHER:
sangheki@46795 525 case REF_NONE:
sangheki@46795 526 default:
sangheki@46795 527 ShouldNotReachHere();
sangheki@46795 528 }
sangheki@46795 529 return total_count(list);
sangheki@46795 530 }
sangheki@46795 531
duke@1 532 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@1 533 public:
sangheki@46795 534 RefProcPhase1Task(ReferenceProcessor& ref_processor,
sangheki@46795 535 DiscoveredList refs_lists[],
sangheki@46795 536 ReferencePolicy* policy,
sangheki@46795 537 bool marks_oops_alive,
sangheki@46795 538 ReferenceProcessorPhaseTimes* phase_times)
sangheki@46795 539 : ProcessTask(ref_processor, refs_lists, marks_oops_alive, phase_times),
duke@1 540 _policy(policy)
duke@1 541 { }
duke@1 542 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@1 543 OopClosure& keep_alive,
duke@1 544 VoidClosure& complete_gc)
duke@1 545 {
sangheki@46795 546 RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase1, _phase_times, i);
sangheki@46795 547
pliden@37067 548 _ref_processor.process_phase1(_refs_lists[i], _policy,
duke@1 549 &is_alive, &keep_alive, &complete_gc);
duke@1 550 }
duke@1 551 private:
duke@1 552 ReferencePolicy* _policy;
duke@1 553 };
duke@1 554
duke@1 555 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@1 556 public:
sangheki@46795 557 RefProcPhase2Task(ReferenceProcessor& ref_processor,
sangheki@46795 558 DiscoveredList refs_lists[],
sangheki@46795 559 bool marks_oops_alive,
sangheki@46795 560 ReferenceProcessorPhaseTimes* phase_times)
sangheki@46795 561 : ProcessTask(ref_processor, refs_lists, marks_oops_alive, phase_times)
duke@1 562 { }
duke@1 563 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@1 564 OopClosure& keep_alive,
duke@1 565 VoidClosure& complete_gc)
duke@1 566 {
sangheki@46795 567 RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase2, _phase_times, i);
sangheki@46795 568
duke@1 569 _ref_processor.process_phase2(_refs_lists[i],
duke@1 570 &is_alive, &keep_alive, &complete_gc);
duke@1 571 }
duke@1 572 };
duke@1 573
duke@1 574 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@1 575 public:
sangheki@46795 576 RefProcPhase3Task(ReferenceProcessor& ref_processor,
sangheki@46795 577 DiscoveredList refs_lists[],
sangheki@46795 578 bool clear_referent,
sangheki@46795 579 bool marks_oops_alive,
sangheki@46795 580 ReferenceProcessorPhaseTimes* phase_times)
sangheki@46795 581 : ProcessTask(ref_processor, refs_lists, marks_oops_alive, phase_times),
duke@1 582 _clear_referent(clear_referent)
duke@1 583 { }
duke@1 584 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@1 585 OopClosure& keep_alive,
duke@1 586 VoidClosure& complete_gc)
duke@1 587 {
sangheki@46795 588 RefProcWorkerTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, _phase_times, i);
sangheki@46795 589
duke@1 590 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
duke@1 591 &is_alive, &keep_alive, &complete_gc);
duke@1 592 }
duke@1 593 private:
duke@1 594 bool _clear_referent;
duke@1 595 };
duke@1 596
brutisso@35061 597 #ifndef PRODUCT
jmasa@37165 598 void ReferenceProcessor::log_reflist_counts(DiscoveredList ref_lists[], uint active_length, size_t total_refs) {
brutisso@35061 599 if (!log_is_enabled(Trace, gc, ref)) {
brutisso@35061 600 return;
brutisso@35061 601 }
brutisso@35061 602
brutisso@35061 603 stringStream st;
jmasa@37165 604 for (uint i = 0; i < active_length; ++i) {
brutisso@35061 605 st.print(SIZE_FORMAT " ", ref_lists[i].length());
brutisso@35061 606 }
brutisso@35061 607 log_develop_trace(gc, ref)("%s= " SIZE_FORMAT, st.as_string(), total_refs);
jmasa@37165 608 #ifdef ASSERT
tschatzl@50492 609 for (uint i = active_length; i < _max_num_queues; i++) {
jmasa@37165 610 assert(ref_lists[i].length() == 0, SIZE_FORMAT " unexpected References in %u",
jmasa@37165 611 ref_lists[i].length(), i);
jmasa@37165 612 }
jmasa@37165 613 #endif
brutisso@35061 614 }
brutisso@35061 615 #endif
brutisso@35061 616
jmasa@37165 617 void ReferenceProcessor::set_active_mt_degree(uint v) {
tschatzl@50492 618 _num_queues = v;
jmasa@37165 619 _next_id = 0;
jmasa@37165 620 }
jmasa@37165 621
duke@1 622 // Balances reference queues.
jmasa@6759 623 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
jmasa@6759 624 // queues[0, 1, ..., _num_q-1] because only the first _num_q
jmasa@6759 625 // corresponding to the active workers will be processed.
duke@1 626 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
duke@1 627 {
duke@1 628 // calculate total length
duke@1 629 size_t total_refs = 0;
brutisso@35061 630 log_develop_trace(gc, ref)("Balance ref_lists ");
jmasa@6759 631
tschatzl@50492 632 for (uint i = 0; i < _max_num_queues; ++i) {
duke@1 633 total_refs += ref_lists[i].length();
jmasa@37165 634 }
tschatzl@50492 635 log_reflist_counts(ref_lists, _max_num_queues, total_refs);
tschatzl@50492 636 size_t avg_refs = total_refs / _num_queues + 1;
jmasa@11396 637 uint to_idx = 0;
tschatzl@50492 638 for (uint from_idx = 0; from_idx < _max_num_queues; from_idx++) {
jmasa@6759 639 bool move_all = false;
tschatzl@50492 640 if (from_idx >= _num_queues) {
jmasa@6759 641 move_all = ref_lists[from_idx].length() > 0;
jmasa@6759 642 }
jmasa@6759 643 while ((ref_lists[from_idx].length() > avg_refs) ||
jmasa@6759 644 move_all) {
tschatzl@50492 645 assert(to_idx < _num_queues, "Sanity Check!");
duke@1 646 if (ref_lists[to_idx].length() < avg_refs) {
duke@1 647 // move superfluous refs
jmasa@6759 648 size_t refs_to_move;
jmasa@6759 649 // Move all the Ref's if the from queue will not be processed.
jmasa@6759 650 if (move_all) {
jmasa@6759 651 refs_to_move = MIN2(ref_lists[from_idx].length(),
jmasa@6759 652 avg_refs - ref_lists[to_idx].length());
jmasa@6759 653 } else {
jmasa@6759 654 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
jmasa@6759 655 avg_refs - ref_lists[to_idx].length());
jmasa@6759 656 }
stefank@10524 657
stefank@10524 658 assert(refs_to_move > 0, "otherwise the code below will fail");
stefank@10524 659
duke@1 660 oop move_head = ref_lists[from_idx].head();
duke@1 661 oop move_tail = move_head;
duke@1 662 oop new_head = move_head;
duke@1 663 // find an element to split the list on
duke@1 664 for (size_t j = 0; j < refs_to_move; ++j) {
duke@1 665 move_tail = new_head;
coleenp@360 666 new_head = java_lang_ref_Reference::discovered(new_head);
duke@1 667 }
stefank@10524 668
stefank@10524 669 // Add the chain to the to list.
stefank@10524 670 if (ref_lists[to_idx].head() == NULL) {
stefank@10524 671 // to list is empty. Make a loop at the end.
brutisso@24845 672 java_lang_ref_Reference::set_discovered_raw(move_tail, move_tail);
stefank@10524 673 } else {
brutisso@24845 674 java_lang_ref_Reference::set_discovered_raw(move_tail, ref_lists[to_idx].head());
stefank@10524 675 }
duke@1 676 ref_lists[to_idx].set_head(move_head);
ysr@1605 677 ref_lists[to_idx].inc_length(refs_to_move);
stefank@10524 678
stefank@10524 679 // Remove the chain from the from list.
stefank@10524 680 if (move_tail == new_head) {
stefank@10524 681 // We found the end of the from list.
stefank@10524 682 ref_lists[from_idx].set_head(NULL);
stefank@10524 683 } else {
stefank@10524 684 ref_lists[from_idx].set_head(new_head);
stefank@10524 685 }
ysr@1605 686 ref_lists[from_idx].dec_length(refs_to_move);
jmasa@6759 687 if (ref_lists[from_idx].length() == 0) {
jmasa@6759 688 break;
jmasa@6759 689 }
duke@1 690 } else {
tschatzl@50492 691 to_idx = (to_idx + 1) % _num_queues;
duke@1 692 }
duke@1 693 }
duke@1 694 }
jmasa@6759 695 #ifdef ASSERT
jmasa@6759 696 size_t balanced_total_refs = 0;
tschatzl@50492 697 for (uint i = 0; i < _num_queues; ++i) {
jmasa@6759 698 balanced_total_refs += ref_lists[i].length();
jmasa@37165 699 }
tschatzl@50492 700 log_reflist_counts(ref_lists, _num_queues, balanced_total_refs);
jmasa@6759 701 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
jmasa@6759 702 #endif
jmasa@6759 703 }
jmasa@6759 704
brutisso@33103 705 void ReferenceProcessor::process_discovered_reflist(
sangheki@46795 706 DiscoveredList refs_lists[],
sangheki@46795 707 ReferencePolicy* policy,
sangheki@46795 708 bool clear_referent,
sangheki@46795 709 BoolObjectClosure* is_alive,
sangheki@46795 710 OopClosure* keep_alive,
sangheki@46795 711 VoidClosure* complete_gc,
sangheki@46795 712 AbstractRefProcTaskExecutor* task_executor,
sangheki@46795 713 ReferenceProcessorPhaseTimes* phase_times)
duke@1 714 {
jmasa@6759 715 bool mt_processing = task_executor != NULL && _processing_is_mt;
sangheki@46795 716
sangheki@46795 717 phase_times->set_processing_is_mt(mt_processing);
sangheki@46795 718
tschatzl@50303 719 if (mt_processing && ParallelRefProcBalancingEnabled) {
sangheki@46795 720 RefProcBalanceQueuesTimeTracker tt(phase_times);
duke@1 721 balance_queues(refs_lists);
duke@1 722 }
sla@18025 723
duke@1 724 // Phase 1 (soft refs only):
duke@1 725 // . Traverse the list and remove any SoftReferences whose
duke@1 726 // referents are not alive, but that should be kept alive for
duke@1 727 // policy reasons. Keep alive the transitive closure of all
duke@1 728 // such referents.
duke@1 729 if (policy != NULL) {
sangheki@46795 730 RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase1, phase_times);
sangheki@46795 731
jmasa@6759 732 if (mt_processing) {
sangheki@46795 733 RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/, phase_times);
duke@1 734 task_executor->execute(phase1);
duke@1 735 } else {
tschatzl@50492 736 for (uint i = 0; i < _max_num_queues; i++) {
duke@1 737 process_phase1(refs_lists[i], policy,
duke@1 738 is_alive, keep_alive, complete_gc);
duke@1 739 }
duke@1 740 }
duke@1 741 } else { // policy == NULL
duke@1 742 assert(refs_lists != _discoveredSoftRefs,
duke@1 743 "Policy must be specified for soft references.");
duke@1 744 }
duke@1 745
duke@1 746 // Phase 2:
duke@1 747 // . Traverse the list and remove any refs whose referents are alive.
sangheki@46795 748 {
sangheki@46795 749 RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase2, phase_times);
sangheki@46795 750
sangheki@46795 751 if (mt_processing) {
sangheki@46795 752 RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/, phase_times);
sangheki@46795 753 task_executor->execute(phase2);
sangheki@46795 754 } else {
tschatzl@50492 755 for (uint i = 0; i < _max_num_queues; i++) {
sangheki@46795 756 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
sangheki@46795 757 }
duke@1 758 }
duke@1 759 }
duke@1 760
duke@1 761 // Phase 3:
duke@1 762 // . Traverse the list and process referents as appropriate.
sangheki@46795 763 {
sangheki@46795 764 RefProcParPhaseTimeTracker tt(ReferenceProcessorPhaseTimes::RefPhase3, phase_times);
sangheki@46795 765
sangheki@46795 766 if (mt_processing) {
sangheki@46795 767 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/, phase_times);
sangheki@46795 768 task_executor->execute(phase3);
sangheki@46795 769 } else {
tschatzl@50492 770 for (uint i = 0; i < _max_num_queues; i++) {
sangheki@46795 771 process_phase3(refs_lists[i], clear_referent,
sangheki@46795 772 is_alive, keep_alive, complete_gc);
sangheki@46795 773 }
duke@1 774 }
duke@1 775 }
duke@1 776 }
duke@1 777
duke@1 778 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
jmasa@11396 779 uint id = 0;
duke@1 780 // Determine the queue index to use for this object.
duke@1 781 if (_discovery_is_mt) {
duke@1 782 // During a multi-threaded discovery phase,
duke@1 783 // each thread saves to its "own" list.
duke@1 784 Thread* thr = Thread::current();
johnc@7399 785 id = thr->as_Worker_thread()->id();
duke@1 786 } else {
duke@1 787 // single-threaded discovery, we save in round-robin
duke@1 788 // fashion to each of the lists.
duke@1 789 if (_processing_is_mt) {
duke@1 790 id = next_id();
duke@1 791 }
duke@1 792 }
tschatzl@50492 793 assert(id < _max_num_queues, "Id is out-of-bounds id %u and max id %u)", id, _max_num_queues);
duke@1 794
duke@1 795 // Get the discovered queue to which we will add
duke@1 796 DiscoveredList* list = NULL;
duke@1 797 switch (rt) {
duke@1 798 case REF_OTHER:
duke@1 799 // Unknown reference type, no special treatment
duke@1 800 break;
duke@1 801 case REF_SOFT:
duke@1 802 list = &_discoveredSoftRefs[id];
duke@1 803 break;
duke@1 804 case REF_WEAK:
duke@1 805 list = &_discoveredWeakRefs[id];
duke@1 806 break;
duke@1 807 case REF_FINAL:
duke@1 808 list = &_discoveredFinalRefs[id];
duke@1 809 break;
duke@1 810 case REF_PHANTOM:
duke@1 811 list = &_discoveredPhantomRefs[id];
duke@1 812 break;
duke@1 813 case REF_NONE:
coleenp@13738 814 // we should not reach here if we are an InstanceRefKlass
duke@1 815 default:
kbarrett@32817 816 ShouldNotReachHere();
duke@1 817 }
brutisso@35061 818 log_develop_trace(gc, ref)("Thread %d gets list " INTPTR_FORMAT, id, p2i(list));
duke@1 819 return list;
duke@1 820 }
duke@1 821
coleenp@360 822 inline void
coleenp@360 823 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
coleenp@360 824 oop obj,
coleenp@360 825 HeapWord* discovered_addr) {
duke@1 826 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
duke@1 827 // First we must make sure this object is only enqueued once. CAS in a non null
duke@1 828 // discovered_addr.
ysr@1374 829 oop current_head = refs_list.head();
stefank@10524 830 // The last ref must have its discovered field pointing to itself.
stefank@10524 831 oop next_discovered = (current_head != NULL) ? current_head : obj;
ysr@1374 832
eosterlund@48195 833 oop retest = RawAccess<>::oop_atomic_cmpxchg(next_discovered, discovered_addr, oop(NULL));
eosterlund@48195 834
duke@1 835 if (retest == NULL) {
duke@1 836 // This thread just won the right to enqueue the object.
ysr@10526 837 // We have separate lists for enqueueing, so no synchronization
duke@1 838 // is necessary.
coleenp@360 839 refs_list.set_head(obj);
ysr@1605 840 refs_list.inc_length(1);
johnc@7399 841
brutisso@35061 842 log_develop_trace(gc, ref)("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
brutisso@35061 843 p2i(obj), obj->klass()->internal_name());
duke@1 844 } else {
duke@1 845 // If retest was non NULL, another thread beat us to it:
duke@1 846 // The reference has already been discovered...
brutisso@35061 847 log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
brutisso@35061 848 p2i(obj), obj->klass()->internal_name());
duke@1 849 }
eosterlund@48195 850 }
duke@1 851
ysr@7420 852 #ifndef PRODUCT
ysr@7420 853 // Non-atomic (i.e. concurrent) discovery might allow us
ysr@7420 854 // to observe j.l.References with NULL referents, being those
ysr@7420 855 // cleared concurrently by mutators during (or after) discovery.
ysr@7420 856 void ReferenceProcessor::verify_referent(oop obj) {
ysr@7420 857 bool da = discovery_is_atomic();
ysr@7420 858 oop referent = java_lang_ref_Reference::referent(obj);
coleenp@46968 859 assert(da ? oopDesc::is_oop(referent) : oopDesc::is_oop_or_null(referent),
david@33105 860 "Bad referent " INTPTR_FORMAT " found in Reference "
david@33105 861 INTPTR_FORMAT " during %satomic discovery ",
david@33105 862 p2i(referent), p2i(obj), da ? "" : "non-");
ysr@7420 863 }
ysr@7420 864 #endif
ysr@7420 865
tschatzl@50489 866 bool ReferenceProcessor::is_subject_to_discovery(oop const obj) const {
tschatzl@50489 867 return _is_subject_to_discovery->do_object_b(obj);
tschatzl@50489 868 }
tschatzl@50489 869
duke@1 870 // We mention two of several possible choices here:
duke@1 871 // #0: if the reference object is not in the "originating generation"
duke@1 872 // (or part of the heap being collected, indicated by our "span"
duke@1 873 // we don't treat it specially (i.e. we scan it as we would
duke@1 874 // a normal oop, treating its references as strong references).
ysr@10526 875 // This means that references can't be discovered unless their
duke@1 876 // referent is also in the same span. This is the simplest,
duke@1 877 // most "local" and most conservative approach, albeit one
duke@1 878 // that may cause weak references to be enqueued least promptly.
duke@1 879 // We call this choice the "ReferenceBasedDiscovery" policy.
duke@1 880 // #1: the reference object may be in any generation (span), but if
duke@1 881 // the referent is in the generation (span) being currently collected
duke@1 882 // then we can discover the reference object, provided
duke@1 883 // the object has not already been discovered by
duke@1 884 // a different concurrently running collector (as may be the
duke@1 885 // case, for instance, if the reference object is in CMS and
duke@1 886 // the referent in DefNewGeneration), and provided the processing
duke@1 887 // of this reference object by the current collector will
duke@1 888 // appear atomic to every other collector in the system.
duke@1 889 // (Thus, for instance, a concurrent collector may not
duke@1 890 // discover references in other generations even if the
duke@1 891 // referent is in its own generation). This policy may,
duke@1 892 // in certain cases, enqueue references somewhat sooner than
duke@1 893 // might Policy #0 above, but at marginally increased cost
duke@1 894 // and complexity in processing these references.
duke@1 895 // We call this choice the "RefeferentBasedDiscovery" policy.
duke@1 896 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
ysr@10526 897 // Make sure we are discovering refs (rather than processing discovered refs).
duke@1 898 if (!_discovering_refs || !RegisterReferences) {
duke@1 899 return false;
duke@1 900 }
ysr@10526 901 // We only discover active references.
coleenp@360 902 oop next = java_lang_ref_Reference::next(obj);
ysr@10526 903 if (next != NULL) { // Ref is no longer active
duke@1 904 return false;
duke@1 905 }
duke@1 906
duke@1 907 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
tschatzl@50489 908 !is_subject_to_discovery(obj)) {
duke@1 909 // Reference is not in the originating generation;
duke@1 910 // don't treat it specially (i.e. we want to scan it as a normal
duke@1 911 // object with strong references).
duke@1 912 return false;
duke@1 913 }
duke@1 914
ysr@10526 915 // We only discover references whose referents are not (yet)
ysr@10526 916 // known to be strongly reachable.
duke@1 917 if (is_alive_non_header() != NULL) {
ysr@7420 918 verify_referent(obj);
ysr@7420 919 if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
duke@1 920 return false; // referent is reachable
duke@1 921 }
duke@1 922 }
ysr@1606 923 if (rt == REF_SOFT) {
ysr@1606 924 // For soft refs we can decide now if these are not
ysr@1606 925 // current candidates for clearing, in which case we
ysr@1606 926 // can mark through them now, rather than delaying that
ysr@1606 927 // to the reference-processing phase. Since all current
ysr@1606 928 // time-stamp policies advance the soft-ref clock only
jwilhelm@32623 929 // at a full collection cycle, this is always currently
ysr@1606 930 // accurate.
johnc@10683 931 if (!_current_soft_ref_policy->should_clear_reference(obj, _soft_ref_timestamp_clock)) {
ysr@1606 932 return false;
ysr@1606 933 }
ysr@1606 934 }
duke@1 935
johnc@10670 936 ResourceMark rm; // Needed for tracing.
johnc@10670 937
eosterlund@49472 938 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr_raw(obj);
ysr@1374 939 const oop discovered = java_lang_ref_Reference::discovered(obj);
coleenp@46968 940 assert(oopDesc::is_oop_or_null(discovered), "Expected an oop or NULL for discovered field at " PTR_FORMAT, p2i(discovered));
coleenp@360 941 if (discovered != NULL) {
duke@1 942 // The reference has already been discovered...
brutisso@35061 943 log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)",
brutisso@35061 944 p2i(obj), obj->klass()->internal_name());
duke@1 945 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
duke@1 946 // assumes that an object is not processed twice;
duke@1 947 // if it's been already discovered it must be on another
duke@1 948 // generation's discovered list; so we won't discover it.
duke@1 949 return false;
duke@1 950 } else {
duke@1 951 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
duke@1 952 "Unrecognized policy");
duke@1 953 // Check assumption that an object is not potentially
duke@1 954 // discovered twice except by concurrent collectors that potentially
duke@1 955 // trace the same Reference object twice.
johnc@7399 956 assert(UseConcMarkSweepGC || UseG1GC,
johnc@7399 957 "Only possible with a concurrent marking collector");
duke@1 958 return true;
duke@1 959 }
duke@1 960 }
duke@1 961
duke@1 962 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
ysr@7420 963 verify_referent(obj);
ysr@10526 964 // Discover if and only if EITHER:
ysr@10526 965 // .. reference is in our span, OR
ysr@10526 966 // .. we are an atomic collector and referent is in our span
tschatzl@50489 967 if (is_subject_to_discovery(obj) ||
ysr@7420 968 (discovery_is_atomic() &&
tschatzl@50489 969 is_subject_to_discovery(java_lang_ref_Reference::referent(obj)))) {
duke@1 970 } else {
duke@1 971 return false;
duke@1 972 }
duke@1 973 } else {
duke@1 974 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
tschatzl@50489 975 is_subject_to_discovery(obj), "code inconsistency");
duke@1 976 }
duke@1 977
duke@1 978 // Get the right type of discovered queue head.
duke@1 979 DiscoveredList* list = get_discovered_list(rt);
duke@1 980 if (list == NULL) {
duke@1 981 return false; // nothing special needs to be done
duke@1 982 }
duke@1 983
duke@1 984 if (_discovery_is_mt) {
duke@1 985 add_to_discovered_list_mt(*list, obj, discovered_addr);
duke@1 986 } else {
brutisso@24845 987 // We do a raw store here: the field will be visited later when processing
brutisso@24845 988 // the discovered references.
ysr@1374 989 oop current_head = list->head();
stefank@10524 990 // The last ref must have its discovered field pointing to itself.
stefank@10524 991 oop next_discovered = (current_head != NULL) ? current_head : obj;
stefank@10524 992
ysr@1374 993 assert(discovered == NULL, "control point invariant");
eosterlund@48195 994 RawAccess<>::oop_store(discovered_addr, next_discovered);
duke@1 995 list->set_head(obj);
ysr@1605 996 list->inc_length(1);
duke@1 997
brutisso@35061 998 log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name());
duke@1 999 }
coleenp@46968 1000 assert(oopDesc::is_oop(obj), "Discovered a bad reference");
ysr@7420 1001 verify_referent(obj);
duke@1 1002 return true;
duke@1 1003 }
duke@1 1004
sjohanss@37494 1005 bool ReferenceProcessor::has_discovered_references() {
tschatzl@50492 1006 for (uint i = 0; i < _max_num_queues * number_of_subclasses_of_ref(); i++) {
sjohanss@37494 1007 if (!_discovered_refs[i].is_empty()) {
sjohanss@37494 1008 return true;
sjohanss@37494 1009 }
sjohanss@37494 1010 }
sjohanss@37494 1011 return false;
sjohanss@37494 1012 }
sjohanss@37494 1013
duke@1 1014 // Preclean the discovered references by removing those
duke@1 1015 // whose referents are alive, and by marking from those that
duke@1 1016 // are not active. These lists can be handled here
duke@1 1017 // in any order and, indeed, concurrently.
duke@1 1018 void ReferenceProcessor::preclean_discovered_references(
duke@1 1019 BoolObjectClosure* is_alive,
duke@1 1020 OopClosure* keep_alive,
duke@1 1021 VoidClosure* complete_gc,
sla@18025 1022 YieldClosure* yield,
brutisso@33107 1023 GCTimer* gc_timer) {
duke@1 1024
duke@1 1025 // Soft references
duke@1 1026 {
brutisso@35061 1027 GCTraceTime(Debug, gc, ref) tm("Preclean SoftReferences", gc_timer);
tschatzl@50492 1028 for (uint i = 0; i < _max_num_queues; i++) {
ysr@1605 1029 if (yield->should_return()) {
ysr@1605 1030 return;
ysr@1605 1031 }
duke@1 1032 preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
duke@1 1033 keep_alive, complete_gc, yield);
duke@1 1034 }
duke@1 1035 }
duke@1 1036
duke@1 1037 // Weak references
duke@1 1038 {
brutisso@35061 1039 GCTraceTime(Debug, gc, ref) tm("Preclean WeakReferences", gc_timer);
tschatzl@50492 1040 for (uint i = 0; i < _max_num_queues; i++) {
ysr@1605 1041 if (yield->should_return()) {
ysr@1605 1042 return;
ysr@1605 1043 }
duke@1 1044 preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
duke@1 1045 keep_alive, complete_gc, yield);
duke@1 1046 }
duke@1 1047 }
duke@1 1048
duke@1 1049 // Final references
duke@1 1050 {
brutisso@35061 1051 GCTraceTime(Debug, gc, ref) tm("Preclean FinalReferences", gc_timer);
tschatzl@50492 1052 for (uint i = 0; i < _max_num_queues; i++) {
ysr@1605 1053 if (yield->should_return()) {
ysr@1605 1054 return;
ysr@1605 1055 }
duke@1 1056 preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
duke@1 1057 keep_alive, complete_gc, yield);
duke@1 1058 }
duke@1 1059 }
duke@1 1060
duke@1 1061 // Phantom references
duke@1 1062 {
brutisso@35061 1063 GCTraceTime(Debug, gc, ref) tm("Preclean PhantomReferences", gc_timer);
tschatzl@50492 1064 for (uint i = 0; i < _max_num_queues; i++) {
ysr@1605 1065 if (yield->should_return()) {
ysr@1605 1066 return;
ysr@1605 1067 }
duke@1 1068 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
duke@1 1069 keep_alive, complete_gc, yield);
duke@1 1070 }
duke@1 1071 }
duke@1 1072 }
duke@1 1073
duke@1 1074 // Walk the given discovered ref list, and remove all reference objects
duke@1 1075 // whose referents are still alive, whose referents are NULL or which
ysr@1605 1076 // are not active (have a non-NULL next field). NOTE: When we are
ysr@1605 1077 // thus precleaning the ref lists (which happens single-threaded today),
jwilhelm@22551 1078 // we do not disable refs discovery to honor the correct semantics of
ysr@1605 1079 // java.lang.Reference. As a result, we need to be careful below
ysr@1605 1080 // that ref removal steps interleave safely with ref discovery steps
ysr@1605 1081 // (in this thread).
coleenp@360 1082 void
coleenp@360 1083 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
coleenp@360 1084 BoolObjectClosure* is_alive,
coleenp@360 1085 OopClosure* keep_alive,
coleenp@360 1086 VoidClosure* complete_gc,
coleenp@360 1087 YieldClosure* yield) {
brutisso@24845 1088 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@1 1089 while (iter.has_next()) {
duke@1 1090 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@360 1091 oop obj = iter.obj();
coleenp@360 1092 oop next = java_lang_ref_Reference::next(obj);
duke@1 1093 if (iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@360 1094 next != NULL) {
duke@1 1095 // The referent has been cleared, or is alive, or the Reference is not
duke@1 1096 // active; we need to trace and mark its cohort.
brutisso@35061 1097 log_develop_trace(gc, ref)("Precleaning Reference (" INTPTR_FORMAT ": %s)",
brutisso@35061 1098 p2i(iter.obj()), iter.obj()->klass()->internal_name());
duke@1 1099 // Remove Reference object from list
duke@1 1100 iter.remove();
duke@1 1101 // Keep alive its cohort.
duke@1 1102 iter.make_referent_alive();
coleenp@360 1103 if (UseCompressedOops) {
eosterlund@49472 1104 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr_raw(obj);
coleenp@360 1105 keep_alive->do_oop(next_addr);
coleenp@360 1106 } else {
eosterlund@49472 1107 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr_raw(obj);
coleenp@360 1108 keep_alive->do_oop(next_addr);
coleenp@360 1109 }
ysr@1605 1110 iter.move_to_next();
duke@1 1111 } else {
duke@1 1112 iter.next();
duke@1 1113 }
duke@1 1114 }
duke@1 1115 // Close the reachable set
duke@1 1116 complete_gc->do_void();
duke@1 1117
duke@1 1118 NOT_PRODUCT(
brutisso@35061 1119 if (iter.processed() > 0) {
brutisso@35061 1120 log_develop_trace(gc, ref)(" Dropped " SIZE_FORMAT " Refs out of " SIZE_FORMAT " Refs in discovered list " INTPTR_FORMAT,
jmasa@36198 1121 iter.removed(), iter.processed(), p2i(&refs_list));
duke@1 1122 }
duke@1 1123 )
duke@1 1124 }
duke@1 1125
jmasa@11396 1126 const char* ReferenceProcessor::list_name(uint i) {
tschatzl@50492 1127 assert(i <= _max_num_queues * number_of_subclasses_of_ref(),
johnc@10670 1128 "Out of bounds index");
johnc@10670 1129
tschatzl@50492 1130 int j = i / _max_num_queues;
duke@1 1131 switch (j) {
duke@1 1132 case 0: return "SoftRef";
duke@1 1133 case 1: return "WeakRef";
duke@1 1134 case 2: return "FinalRef";
duke@1 1135 case 3: return "PhantomRef";
duke@1 1136 }
duke@1 1137 ShouldNotReachHere();
duke@1 1138 return NULL;
duke@1 1139 }