annotate src/share/vm/gc_implementation/g1/g1MarkSweep.cpp @ 470:ad8c8ca4ab0f

6785258: Update copyright year Summary: Update copyright for files that have been modified starting July 2008 to Dec 2008 Reviewed-by: katleman, ohair, tbell
author xdono
date Mon, 15 Dec 2008 16:55:11 -0800
parents 27a80744a83b
children 96b229c54d1e
rev   line source
ysr@342 1 /*
xdono@470 2 * Copyright 2001-2008 Sun Microsystems, Inc. All Rights Reserved.
ysr@342 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
ysr@342 4 *
ysr@342 5 * This code is free software; you can redistribute it and/or modify it
ysr@342 6 * under the terms of the GNU General Public License version 2 only, as
ysr@342 7 * published by the Free Software Foundation.
ysr@342 8 *
ysr@342 9 * This code is distributed in the hope that it will be useful, but WITHOUT
ysr@342 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
ysr@342 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
ysr@342 12 * version 2 for more details (a copy is included in the LICENSE file that
ysr@342 13 * accompanied this code).
ysr@342 14 *
ysr@342 15 * You should have received a copy of the GNU General Public License version
ysr@342 16 * 2 along with this work; if not, write to the Free Software Foundation,
ysr@342 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
ysr@342 18 *
ysr@342 19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
ysr@342 20 * CA 95054 USA or visit www.sun.com if you need additional information or
ysr@342 21 * have any questions.
ysr@342 22 *
ysr@342 23 */
ysr@342 24
ysr@342 25 #include "incls/_precompiled.incl"
ysr@342 26 #include "incls/_g1MarkSweep.cpp.incl"
ysr@342 27
ysr@342 28 class HeapRegion;
ysr@342 29
ysr@342 30 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
ysr@342 31 bool clear_all_softrefs) {
ysr@342 32 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
ysr@342 33
ysr@342 34 // hook up weak ref data so it can be used during Mark-Sweep
ysr@342 35 assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
ysr@453 36 assert(rp != NULL, "should be non-NULL");
ysr@342 37 GenMarkSweep::_ref_processor = rp;
ysr@457 38 rp->setup_policy(clear_all_softrefs);
ysr@342 39
ysr@342 40 // When collecting the permanent generation methodOops may be moving,
ysr@342 41 // so we either have to flush all bcp data or convert it into bci.
ysr@342 42 CodeCache::gc_prologue();
ysr@342 43 Threads::gc_prologue();
ysr@342 44
ysr@342 45 // Increment the invocation count for the permanent generation, since it is
ysr@342 46 // implicitly collected whenever we do a full mark sweep collection.
ysr@342 47 SharedHeap* sh = SharedHeap::heap();
ysr@342 48 sh->perm_gen()->stat_record()->invocations++;
ysr@342 49
ysr@342 50 bool marked_for_unloading = false;
ysr@342 51
ysr@342 52 allocate_stacks();
ysr@342 53
iveresov@358 54 // We should save the marks of the currently locked biased monitors.
iveresov@358 55 // The marking doesn't preserve the marks of biased objects.
iveresov@358 56 BiasedLocking::preserve_marks();
iveresov@358 57
ysr@342 58 mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
ysr@342 59
ysr@342 60 if (G1VerifyConcMark) {
ysr@342 61 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@342 62 g1h->checkConcurrentMark();
ysr@342 63 }
ysr@342 64
ysr@342 65 mark_sweep_phase2();
ysr@342 66
ysr@342 67 // Don't add any more derived pointers during phase3
ysr@342 68 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
ysr@342 69
ysr@342 70 mark_sweep_phase3();
ysr@342 71
ysr@342 72 mark_sweep_phase4();
ysr@342 73
ysr@342 74 GenMarkSweep::restore_marks();
iveresov@358 75 BiasedLocking::restore_marks();
ysr@342 76 GenMarkSweep::deallocate_stacks();
ysr@342 77
ysr@342 78 // We must invalidate the perm-gen rs, so that it gets rebuilt.
ysr@342 79 GenRemSet* rs = sh->rem_set();
ysr@342 80 rs->invalidate(sh->perm_gen()->used_region(), true /*whole_heap*/);
ysr@342 81
ysr@342 82 // "free at last gc" is calculated from these.
ysr@342 83 // CHF: cheating for now!!!
ysr@342 84 // Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity());
ysr@342 85 // Universe::set_heap_used_at_last_gc(Universe::heap()->used());
ysr@342 86
ysr@342 87 Threads::gc_epilogue();
ysr@342 88 CodeCache::gc_epilogue();
ysr@342 89
ysr@342 90 // refs processing: clean slate
ysr@342 91 GenMarkSweep::_ref_processor = NULL;
ysr@342 92 }
ysr@342 93
ysr@342 94
ysr@342 95 void G1MarkSweep::allocate_stacks() {
ysr@342 96 GenMarkSweep::_preserved_count_max = 0;
ysr@342 97 GenMarkSweep::_preserved_marks = NULL;
ysr@342 98 GenMarkSweep::_preserved_count = 0;
ysr@342 99 GenMarkSweep::_preserved_mark_stack = NULL;
ysr@342 100 GenMarkSweep::_preserved_oop_stack = NULL;
ysr@342 101
ysr@342 102 GenMarkSweep::_marking_stack =
ysr@342 103 new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
ysr@342 104
ysr@342 105 size_t size = SystemDictionary::number_of_classes() * 2;
ysr@342 106 GenMarkSweep::_revisit_klass_stack =
ysr@342 107 new (ResourceObj::C_HEAP) GrowableArray<Klass*>((int)size, true);
ysr@342 108 }
ysr@342 109
ysr@342 110 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
ysr@342 111 bool clear_all_softrefs) {
ysr@342 112 // Recursively traverse all live objects and mark them
ysr@342 113 EventMark m("1 mark object");
ysr@342 114 TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty);
ysr@342 115 GenMarkSweep::trace(" 1");
ysr@342 116
ysr@342 117 SharedHeap* sh = SharedHeap::heap();
ysr@342 118
ysr@342 119 sh->process_strong_roots(true, // Collecting permanent generation.
ysr@342 120 SharedHeap::SO_SystemClasses,
ysr@342 121 &GenMarkSweep::follow_root_closure,
ysr@342 122 &GenMarkSweep::follow_root_closure);
ysr@342 123
ysr@342 124 // Process reference objects found during marking
ysr@453 125 ReferenceProcessor* rp = GenMarkSweep::ref_processor();
ysr@457 126 rp->setup_policy(clear_all_softrefs);
ysr@453 127 rp->process_discovered_references(&GenMarkSweep::is_alive,
ysr@453 128 &GenMarkSweep::keep_alive,
ysr@453 129 &GenMarkSweep::follow_stack_closure,
ysr@453 130 NULL);
ysr@342 131
ysr@342 132 // Follow system dictionary roots and unload classes
ysr@342 133 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
ysr@342 134 assert(GenMarkSweep::_marking_stack->is_empty(),
ysr@342 135 "stack should be empty by now");
ysr@342 136
ysr@342 137 // Follow code cache roots (has to be done after system dictionary,
ysr@342 138 // assumes all live klasses are marked)
ysr@342 139 CodeCache::do_unloading(&GenMarkSweep::is_alive,
ysr@342 140 &GenMarkSweep::keep_alive,
ysr@342 141 purged_class);
ysr@342 142 GenMarkSweep::follow_stack();
ysr@342 143
ysr@342 144 // Update subklass/sibling/implementor links of live klasses
ysr@342 145 GenMarkSweep::follow_weak_klass_links();
ysr@342 146 assert(GenMarkSweep::_marking_stack->is_empty(),
ysr@342 147 "stack should be empty by now");
ysr@342 148
ysr@342 149 // Visit symbol and interned string tables and delete unmarked oops
ysr@342 150 SymbolTable::unlink(&GenMarkSweep::is_alive);
ysr@342 151 StringTable::unlink(&GenMarkSweep::is_alive);
ysr@342 152
ysr@342 153 assert(GenMarkSweep::_marking_stack->is_empty(),
ysr@342 154 "stack should be empty by now");
ysr@342 155 }
ysr@342 156
ysr@342 157 class G1PrepareCompactClosure: public HeapRegionClosure {
ysr@342 158 ModRefBarrierSet* _mrbs;
ysr@342 159 CompactPoint _cp;
ysr@342 160 bool _popular_only;
ysr@342 161
ysr@342 162 void free_humongous_region(HeapRegion* hr) {
ysr@342 163 HeapWord* bot = hr->bottom();
ysr@342 164 HeapWord* end = hr->end();
ysr@342 165 assert(hr->startsHumongous(),
ysr@342 166 "Only the start of a humongous region should be freed.");
ysr@342 167 G1CollectedHeap::heap()->free_region(hr);
ysr@342 168 hr->prepare_for_compaction(&_cp);
ysr@342 169 // Also clear the part of the card table that will be unused after
ysr@342 170 // compaction.
ysr@342 171 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
ysr@342 172 }
ysr@342 173
ysr@342 174 public:
ysr@342 175 G1PrepareCompactClosure(CompactibleSpace* cs, bool popular_only) :
ysr@342 176 _cp(NULL, cs, cs->initialize_threshold()),
ysr@342 177 _mrbs(G1CollectedHeap::heap()->mr_bs()),
ysr@342 178 _popular_only(popular_only)
ysr@342 179 {}
ysr@342 180 bool doHeapRegion(HeapRegion* hr) {
ysr@342 181 if (_popular_only && !hr->popular())
ysr@342 182 return true; // terminate early
ysr@342 183 else if (!_popular_only && hr->popular())
ysr@342 184 return false; // skip this one.
ysr@342 185
ysr@342 186 if (hr->isHumongous()) {
ysr@342 187 if (hr->startsHumongous()) {
ysr@342 188 oop obj = oop(hr->bottom());
ysr@342 189 if (obj->is_gc_marked()) {
ysr@342 190 obj->forward_to(obj);
ysr@342 191 } else {
ysr@342 192 free_humongous_region(hr);
ysr@342 193 }
ysr@342 194 } else {
ysr@342 195 assert(hr->continuesHumongous(), "Invalid humongous.");
ysr@342 196 }
ysr@342 197 } else {
ysr@342 198 hr->prepare_for_compaction(&_cp);
ysr@342 199 // Also clear the part of the card table that will be unused after
ysr@342 200 // compaction.
ysr@342 201 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
ysr@342 202 }
ysr@342 203 return false;
ysr@342 204 }
ysr@342 205 };
ysr@342 206 // Stolen verbatim from g1CollectedHeap.cpp
ysr@342 207 class FindFirstRegionClosure: public HeapRegionClosure {
ysr@342 208 HeapRegion* _a_region;
ysr@342 209 bool _find_popular;
ysr@342 210 public:
ysr@342 211 FindFirstRegionClosure(bool find_popular) :
ysr@342 212 _a_region(NULL), _find_popular(find_popular) {}
ysr@342 213 bool doHeapRegion(HeapRegion* r) {
ysr@342 214 if (r->popular() == _find_popular) {
ysr@342 215 _a_region = r;
ysr@342 216 return true;
ysr@342 217 } else {
ysr@342 218 return false;
ysr@342 219 }
ysr@342 220 }
ysr@342 221 HeapRegion* result() { return _a_region; }
ysr@342 222 };
ysr@342 223
ysr@342 224 void G1MarkSweep::mark_sweep_phase2() {
ysr@342 225 // Now all live objects are marked, compute the new object addresses.
ysr@342 226
ysr@342 227 // It is imperative that we traverse perm_gen LAST. If dead space is
ysr@342 228 // allowed a range of dead object may get overwritten by a dead int
ysr@342 229 // array. If perm_gen is not traversed last a klassOop may get
ysr@342 230 // overwritten. This is fine since it is dead, but if the class has dead
ysr@342 231 // instances we have to skip them, and in order to find their size we
ysr@342 232 // need the klassOop!
ysr@342 233 //
ysr@342 234 // It is not required that we traverse spaces in the same order in
ysr@342 235 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
ysr@342 236 // tracking expects us to do so. See comment under phase4.
ysr@342 237
ysr@342 238 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@342 239 Generation* pg = g1h->perm_gen();
ysr@342 240
ysr@342 241 EventMark m("2 compute new addresses");
ysr@342 242 TraceTime tm("phase 2", PrintGC && Verbose, true, gclog_or_tty);
ysr@342 243 GenMarkSweep::trace("2");
ysr@342 244
ysr@342 245 // First we compact the popular regions.
ysr@342 246 if (G1NumPopularRegions > 0) {
ysr@342 247 CompactibleSpace* sp = g1h->first_compactible_space();
ysr@342 248 FindFirstRegionClosure cl(true /*find_popular*/);
ysr@342 249 g1h->heap_region_iterate(&cl);
ysr@342 250 HeapRegion *r = cl.result();
ysr@342 251 assert(r->popular(), "should have found a popular region.");
ysr@342 252 assert(r == sp, "first popular heap region should "
ysr@342 253 "== first compactible space");
ysr@342 254 G1PrepareCompactClosure blk(sp, true/*popular_only*/);
ysr@342 255 g1h->heap_region_iterate(&blk);
ysr@342 256 }
ysr@342 257
ysr@342 258 // Now we do the regular regions.
ysr@342 259 FindFirstRegionClosure cl(false /*find_popular*/);
ysr@342 260 g1h->heap_region_iterate(&cl);
ysr@342 261 HeapRegion *r = cl.result();
ysr@342 262 assert(!r->popular(), "should have founda non-popular region.");
ysr@342 263 CompactibleSpace* sp = r;
ysr@342 264 if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
ysr@342 265 sp = r->next_compaction_space();
ysr@342 266 }
ysr@342 267
ysr@342 268 G1PrepareCompactClosure blk(sp, false/*popular_only*/);
ysr@342 269 g1h->heap_region_iterate(&blk);
ysr@342 270
ysr@342 271 CompactPoint perm_cp(pg, NULL, NULL);
ysr@342 272 pg->prepare_for_compaction(&perm_cp);
ysr@342 273 }
ysr@342 274
ysr@342 275 class G1AdjustPointersClosure: public HeapRegionClosure {
ysr@342 276 public:
ysr@342 277 bool doHeapRegion(HeapRegion* r) {
ysr@342 278 if (r->isHumongous()) {
ysr@342 279 if (r->startsHumongous()) {
ysr@342 280 // We must adjust the pointers on the single H object.
ysr@342 281 oop obj = oop(r->bottom());
ysr@342 282 debug_only(GenMarkSweep::track_interior_pointers(obj));
ysr@342 283 // point all the oops to the new location
ysr@342 284 obj->adjust_pointers();
ysr@342 285 debug_only(GenMarkSweep::check_interior_pointers());
ysr@342 286 }
ysr@342 287 } else {
ysr@342 288 // This really ought to be "as_CompactibleSpace"...
ysr@342 289 r->adjust_pointers();
ysr@342 290 }
ysr@342 291 return false;
ysr@342 292 }
ysr@342 293 };
ysr@342 294
ysr@342 295 void G1MarkSweep::mark_sweep_phase3() {
ysr@342 296 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@342 297 Generation* pg = g1h->perm_gen();
ysr@342 298
ysr@342 299 // Adjust the pointers to reflect the new locations
ysr@342 300 EventMark m("3 adjust pointers");
ysr@342 301 TraceTime tm("phase 3", PrintGC && Verbose, true, gclog_or_tty);
ysr@342 302 GenMarkSweep::trace("3");
ysr@342 303
ysr@342 304 SharedHeap* sh = SharedHeap::heap();
ysr@342 305
ysr@342 306 sh->process_strong_roots(true, // Collecting permanent generation.
ysr@342 307 SharedHeap::SO_AllClasses,
ysr@342 308 &GenMarkSweep::adjust_root_pointer_closure,
ysr@342 309 &GenMarkSweep::adjust_pointer_closure);
ysr@342 310
ysr@342 311 g1h->ref_processor()->weak_oops_do(&GenMarkSweep::adjust_root_pointer_closure);
ysr@342 312
ysr@342 313 // Now adjust pointers in remaining weak roots. (All of which should
ysr@342 314 // have been cleared if they pointed to non-surviving objects.)
ysr@342 315 g1h->g1_process_weak_roots(&GenMarkSweep::adjust_root_pointer_closure,
ysr@342 316 &GenMarkSweep::adjust_pointer_closure);
ysr@342 317
ysr@342 318 GenMarkSweep::adjust_marks();
ysr@342 319
ysr@342 320 G1AdjustPointersClosure blk;
ysr@342 321 g1h->heap_region_iterate(&blk);
ysr@342 322 pg->adjust_pointers();
ysr@342 323 }
ysr@342 324
ysr@342 325 class G1SpaceCompactClosure: public HeapRegionClosure {
ysr@342 326 public:
ysr@342 327 G1SpaceCompactClosure() {}
ysr@342 328
ysr@342 329 bool doHeapRegion(HeapRegion* hr) {
ysr@342 330 if (hr->isHumongous()) {
ysr@342 331 if (hr->startsHumongous()) {
ysr@342 332 oop obj = oop(hr->bottom());
ysr@342 333 if (obj->is_gc_marked()) {
ysr@342 334 obj->init_mark();
ysr@342 335 } else {
ysr@342 336 assert(hr->is_empty(), "Should have been cleared in phase 2.");
ysr@342 337 }
ysr@342 338 hr->reset_during_compaction();
ysr@342 339 }
ysr@342 340 } else {
ysr@342 341 hr->compact();
ysr@342 342 }
ysr@342 343 return false;
ysr@342 344 }
ysr@342 345 };
ysr@342 346
ysr@342 347 void G1MarkSweep::mark_sweep_phase4() {
ysr@342 348 // All pointers are now adjusted, move objects accordingly
ysr@342 349
ysr@342 350 // It is imperative that we traverse perm_gen first in phase4. All
ysr@342 351 // classes must be allocated earlier than their instances, and traversing
ysr@342 352 // perm_gen first makes sure that all klassOops have moved to their new
ysr@342 353 // location before any instance does a dispatch through it's klass!
ysr@342 354
ysr@342 355 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
ysr@342 356 // in the same order in phase2, phase3 and phase4. We don't quite do that
ysr@342 357 // here (perm_gen first rather than last), so we tell the validate code
ysr@342 358 // to use a higher index (saved from phase2) when verifying perm_gen.
ysr@342 359 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@342 360 Generation* pg = g1h->perm_gen();
ysr@342 361
ysr@342 362 EventMark m("4 compact heap");
ysr@342 363 TraceTime tm("phase 4", PrintGC && Verbose, true, gclog_or_tty);
ysr@342 364 GenMarkSweep::trace("4");
ysr@342 365
ysr@342 366 pg->compact();
ysr@342 367
ysr@342 368 G1SpaceCompactClosure blk;
ysr@342 369 g1h->heap_region_iterate(&blk);
ysr@342 370
ysr@342 371 }
ysr@342 372
ysr@342 373 // Local Variables: ***
ysr@342 374 // c-indentation-style: gnu ***
ysr@342 375 // End: ***