annotate src/share/vm/gc_implementation/g1/heapRegion.cpp @ 2165:0fa27f37d4d4

6977804: G1: remove the zero-filling thread Summary: This changeset removes the zero-filling thread from G1 and collapses the two free region lists we had before (the "free" and "unclean" lists) into one. The new free list uses the new heap region sets / lists abstractions that we'll ultimately use it to keep track of all regions in the heap. A heap region set was also introduced for the humongous regions. Finally, this change increases the concurrency between the thread that completes freeing regions (after a cleanup pause) and the rest of the system (before we'd have to wait for said thread to complete before allocating a new region). The changest also includes a lot of refactoring and code simplification. Reviewed-by: jcoomes, johnc
author tonyp
date Wed, 19 Jan 2011 19:30:42 -0500
parents 2250ee17e258
children abdfc822206f
rev   line source
ysr@345 1 /*
tonyp@2146 2 * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved.
ysr@345 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
ysr@345 4 *
ysr@345 5 * This code is free software; you can redistribute it and/or modify it
ysr@345 6 * under the terms of the GNU General Public License version 2 only, as
ysr@345 7 * published by the Free Software Foundation.
ysr@345 8 *
ysr@345 9 * This code is distributed in the hope that it will be useful, but WITHOUT
ysr@345 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
ysr@345 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
ysr@345 12 * version 2 for more details (a copy is included in the LICENSE file that
ysr@345 13 * accompanied this code).
ysr@345 14 *
ysr@345 15 * You should have received a copy of the GNU General Public License version
ysr@345 16 * 2 along with this work; if not, write to the Free Software Foundation,
ysr@345 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
ysr@345 18 *
trims@1563 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1563 20 * or visit www.oracle.com if you need additional information or have any
trims@1563 21 * questions.
ysr@345 22 *
ysr@345 23 */
ysr@345 24
stefank@1992 25 #include "precompiled.hpp"
stefank@1992 26 #include "gc_implementation/g1/g1BlockOffsetTable.inline.hpp"
stefank@1992 27 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
stefank@1992 28 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
stefank@1992 29 #include "gc_implementation/g1/heapRegion.inline.hpp"
stefank@1992 30 #include "gc_implementation/g1/heapRegionRemSet.hpp"
stefank@1992 31 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
stefank@1992 32 #include "memory/genOopClosures.inline.hpp"
stefank@1992 33 #include "memory/iterator.hpp"
stefank@1992 34 #include "oops/oop.inline.hpp"
ysr@345 35
tonyp@996 36 int HeapRegion::LogOfHRGrainBytes = 0;
tonyp@996 37 int HeapRegion::LogOfHRGrainWords = 0;
tonyp@996 38 int HeapRegion::GrainBytes = 0;
tonyp@996 39 int HeapRegion::GrainWords = 0;
tonyp@996 40 int HeapRegion::CardsPerRegion = 0;
tonyp@996 41
ysr@345 42 HeapRegionDCTOC::HeapRegionDCTOC(G1CollectedHeap* g1,
ysr@345 43 HeapRegion* hr, OopClosure* cl,
ysr@345 44 CardTableModRefBS::PrecisionStyle precision,
ysr@345 45 FilterKind fk) :
ysr@345 46 ContiguousSpaceDCTOC(hr, cl, precision, NULL),
ysr@345 47 _hr(hr), _fk(fk), _g1(g1)
ysr@345 48 {}
ysr@345 49
ysr@345 50 FilterOutOfRegionClosure::FilterOutOfRegionClosure(HeapRegion* r,
ysr@345 51 OopClosure* oc) :
ysr@345 52 _r_bottom(r->bottom()), _r_end(r->end()),
ysr@345 53 _oc(oc), _out_of_region(0)
ysr@345 54 {}
ysr@345 55
ysr@345 56 class VerifyLiveClosure: public OopClosure {
tonyp@860 57 private:
ysr@345 58 G1CollectedHeap* _g1h;
ysr@345 59 CardTableModRefBS* _bs;
ysr@345 60 oop _containing_obj;
ysr@345 61 bool _failures;
ysr@345 62 int _n_failures;
tonyp@860 63 bool _use_prev_marking;
ysr@345 64 public:
tonyp@860 65 // use_prev_marking == true -> use "prev" marking information,
tonyp@860 66 // use_prev_marking == false -> use "next" marking information
tonyp@860 67 VerifyLiveClosure(G1CollectedHeap* g1h, bool use_prev_marking) :
ysr@345 68 _g1h(g1h), _bs(NULL), _containing_obj(NULL),
tonyp@860 69 _failures(false), _n_failures(0), _use_prev_marking(use_prev_marking)
ysr@345 70 {
ysr@345 71 BarrierSet* bs = _g1h->barrier_set();
ysr@345 72 if (bs->is_a(BarrierSet::CardTableModRef))
ysr@345 73 _bs = (CardTableModRefBS*)bs;
ysr@345 74 }
ysr@345 75
ysr@345 76 void set_containing_obj(oop obj) {
ysr@345 77 _containing_obj = obj;
ysr@345 78 }
ysr@345 79
ysr@345 80 bool failures() { return _failures; }
ysr@345 81 int n_failures() { return _n_failures; }
ysr@345 82
ysr@896 83 virtual void do_oop(narrowOop* p) { do_oop_work(p); }
ysr@896 84 virtual void do_oop( oop* p) { do_oop_work(p); }
ysr@345 85
tonyp@1477 86 void print_object(outputStream* out, oop obj) {
tonyp@1477 87 #ifdef PRODUCT
tonyp@1477 88 klassOop k = obj->klass();
tonyp@1477 89 const char* class_name = instanceKlass::cast(k)->external_name();
tonyp@1477 90 out->print_cr("class name %s", class_name);
tonyp@1477 91 #else // PRODUCT
tonyp@1477 92 obj->print_on(out);
tonyp@1477 93 #endif // PRODUCT
tonyp@1477 94 }
tonyp@1477 95
ysr@896 96 template <class T> void do_oop_work(T* p) {
ysr@345 97 assert(_containing_obj != NULL, "Precondition");
tonyp@860 98 assert(!_g1h->is_obj_dead_cond(_containing_obj, _use_prev_marking),
tonyp@860 99 "Precondition");
ysr@896 100 T heap_oop = oopDesc::load_heap_oop(p);
ysr@896 101 if (!oopDesc::is_null(heap_oop)) {
ysr@896 102 oop obj = oopDesc::decode_heap_oop_not_null(heap_oop);
ysr@345 103 bool failed = false;
tonyp@860 104 if (!_g1h->is_in_closed_subset(obj) ||
tonyp@860 105 _g1h->is_obj_dead_cond(obj, _use_prev_marking)) {
ysr@345 106 if (!_failures) {
ysr@345 107 gclog_or_tty->print_cr("");
ysr@345 108 gclog_or_tty->print_cr("----------");
ysr@345 109 }
ysr@345 110 if (!_g1h->is_in_closed_subset(obj)) {
tonyp@1477 111 HeapRegion* from = _g1h->heap_region_containing((HeapWord*)p);
ysr@345 112 gclog_or_tty->print_cr("Field "PTR_FORMAT
tonyp@1477 113 " of live obj "PTR_FORMAT" in region "
tonyp@1477 114 "["PTR_FORMAT", "PTR_FORMAT")",
tonyp@1477 115 p, (void*) _containing_obj,
tonyp@1477 116 from->bottom(), from->end());
tonyp@1477 117 print_object(gclog_or_tty, _containing_obj);
tonyp@1477 118 gclog_or_tty->print_cr("points to obj "PTR_FORMAT" not in the heap",
tonyp@1477 119 (void*) obj);
ysr@345 120 } else {
tonyp@1477 121 HeapRegion* from = _g1h->heap_region_containing((HeapWord*)p);
tonyp@1477 122 HeapRegion* to = _g1h->heap_region_containing((HeapWord*)obj);
ysr@345 123 gclog_or_tty->print_cr("Field "PTR_FORMAT
tonyp@1477 124 " of live obj "PTR_FORMAT" in region "
tonyp@1477 125 "["PTR_FORMAT", "PTR_FORMAT")",
tonyp@1477 126 p, (void*) _containing_obj,
tonyp@1477 127 from->bottom(), from->end());
tonyp@1477 128 print_object(gclog_or_tty, _containing_obj);
tonyp@1477 129 gclog_or_tty->print_cr("points to dead obj "PTR_FORMAT" in region "
tonyp@1477 130 "["PTR_FORMAT", "PTR_FORMAT")",
tonyp@1477 131 (void*) obj, to->bottom(), to->end());
tonyp@1477 132 print_object(gclog_or_tty, obj);
ysr@345 133 }
ysr@345 134 gclog_or_tty->print_cr("----------");
ysr@345 135 _failures = true;
ysr@345 136 failed = true;
ysr@345 137 _n_failures++;
ysr@345 138 }
ysr@345 139
ysr@345 140 if (!_g1h->full_collection()) {
ysr@896 141 HeapRegion* from = _g1h->heap_region_containing((HeapWord*)p);
ysr@896 142 HeapRegion* to = _g1h->heap_region_containing(obj);
ysr@345 143 if (from != NULL && to != NULL &&
ysr@345 144 from != to &&
ysr@345 145 !to->isHumongous()) {
ysr@345 146 jbyte cv_obj = *_bs->byte_for_const(_containing_obj);
ysr@345 147 jbyte cv_field = *_bs->byte_for_const(p);
ysr@345 148 const jbyte dirty = CardTableModRefBS::dirty_card_val();
ysr@345 149
ysr@345 150 bool is_bad = !(from->is_young()
ysr@345 151 || to->rem_set()->contains_reference(p)
ysr@345 152 || !G1HRRSFlushLogBuffersOnVerify && // buffers were not flushed
ysr@345 153 (_containing_obj->is_objArray() ?
ysr@345 154 cv_field == dirty
ysr@345 155 : cv_obj == dirty || cv_field == dirty));
ysr@345 156 if (is_bad) {
ysr@345 157 if (!_failures) {
ysr@345 158 gclog_or_tty->print_cr("");
ysr@345 159 gclog_or_tty->print_cr("----------");
ysr@345 160 }
ysr@345 161 gclog_or_tty->print_cr("Missing rem set entry:");
ysr@345 162 gclog_or_tty->print_cr("Field "PTR_FORMAT
ysr@345 163 " of obj "PTR_FORMAT
ysr@345 164 ", in region %d ["PTR_FORMAT
ysr@345 165 ", "PTR_FORMAT"),",
ysr@345 166 p, (void*) _containing_obj,
ysr@345 167 from->hrs_index(),
ysr@345 168 from->bottom(),
ysr@345 169 from->end());
ysr@345 170 _containing_obj->print_on(gclog_or_tty);
ysr@345 171 gclog_or_tty->print_cr("points to obj "PTR_FORMAT
ysr@345 172 " in region %d ["PTR_FORMAT
ysr@345 173 ", "PTR_FORMAT").",
ysr@345 174 (void*) obj, to->hrs_index(),
ysr@345 175 to->bottom(), to->end());
ysr@345 176 obj->print_on(gclog_or_tty);
ysr@345 177 gclog_or_tty->print_cr("Obj head CTE = %d, field CTE = %d.",
ysr@345 178 cv_obj, cv_field);
ysr@345 179 gclog_or_tty->print_cr("----------");
ysr@345 180 _failures = true;
ysr@345 181 if (!failed) _n_failures++;
ysr@345 182 }
ysr@345 183 }
ysr@345 184 }
ysr@345 185 }
ysr@345 186 }
ysr@345 187 };
ysr@345 188
ysr@345 189 template<class ClosureType>
ysr@345 190 HeapWord* walk_mem_region_loop(ClosureType* cl, G1CollectedHeap* g1h,
ysr@345 191 HeapRegion* hr,
ysr@345 192 HeapWord* cur, HeapWord* top) {
ysr@345 193 oop cur_oop = oop(cur);
ysr@345 194 int oop_size = cur_oop->size();
ysr@345 195 HeapWord* next_obj = cur + oop_size;
ysr@345 196 while (next_obj < top) {
ysr@345 197 // Keep filtering the remembered set.
ysr@345 198 if (!g1h->is_obj_dead(cur_oop, hr)) {
ysr@345 199 // Bottom lies entirely below top, so we can call the
ysr@345 200 // non-memRegion version of oop_iterate below.
ysr@345 201 cur_oop->oop_iterate(cl);
ysr@345 202 }
ysr@345 203 cur = next_obj;
ysr@345 204 cur_oop = oop(cur);
ysr@345 205 oop_size = cur_oop->size();
ysr@345 206 next_obj = cur + oop_size;
ysr@345 207 }
ysr@345 208 return cur;
ysr@345 209 }
ysr@345 210
ysr@345 211 void HeapRegionDCTOC::walk_mem_region_with_cl(MemRegion mr,
ysr@345 212 HeapWord* bottom,
ysr@345 213 HeapWord* top,
ysr@345 214 OopClosure* cl) {
ysr@345 215 G1CollectedHeap* g1h = _g1;
ysr@345 216
ysr@345 217 int oop_size;
ysr@345 218
ysr@345 219 OopClosure* cl2 = cl;
ysr@345 220 FilterIntoCSClosure intoCSFilt(this, g1h, cl);
ysr@345 221 FilterOutOfRegionClosure outOfRegionFilt(_hr, cl);
ysr@345 222 switch (_fk) {
ysr@345 223 case IntoCSFilterKind: cl2 = &intoCSFilt; break;
ysr@345 224 case OutOfRegionFilterKind: cl2 = &outOfRegionFilt; break;
ysr@345 225 }
ysr@345 226
ysr@345 227 // Start filtering what we add to the remembered set. If the object is
ysr@345 228 // not considered dead, either because it is marked (in the mark bitmap)
ysr@345 229 // or it was allocated after marking finished, then we add it. Otherwise
ysr@345 230 // we can safely ignore the object.
ysr@345 231 if (!g1h->is_obj_dead(oop(bottom), _hr)) {
ysr@345 232 oop_size = oop(bottom)->oop_iterate(cl2, mr);
ysr@345 233 } else {
ysr@345 234 oop_size = oop(bottom)->size();
ysr@345 235 }
ysr@345 236
ysr@345 237 bottom += oop_size;
ysr@345 238
ysr@345 239 if (bottom < top) {
ysr@345 240 // We replicate the loop below for several kinds of possible filters.
ysr@345 241 switch (_fk) {
ysr@345 242 case NoFilterKind:
ysr@345 243 bottom = walk_mem_region_loop(cl, g1h, _hr, bottom, top);
ysr@345 244 break;
ysr@345 245 case IntoCSFilterKind: {
ysr@345 246 FilterIntoCSClosure filt(this, g1h, cl);
ysr@345 247 bottom = walk_mem_region_loop(&filt, g1h, _hr, bottom, top);
ysr@345 248 break;
ysr@345 249 }
ysr@345 250 case OutOfRegionFilterKind: {
ysr@345 251 FilterOutOfRegionClosure filt(_hr, cl);
ysr@345 252 bottom = walk_mem_region_loop(&filt, g1h, _hr, bottom, top);
ysr@345 253 break;
ysr@345 254 }
ysr@345 255 default:
ysr@345 256 ShouldNotReachHere();
ysr@345 257 }
ysr@345 258
ysr@345 259 // Last object. Need to do dead-obj filtering here too.
ysr@345 260 if (!g1h->is_obj_dead(oop(bottom), _hr)) {
ysr@345 261 oop(bottom)->oop_iterate(cl2, mr);
ysr@345 262 }
ysr@345 263 }
ysr@345 264 }
ysr@345 265
tonyp@996 266 // Minimum region size; we won't go lower than that.
tonyp@996 267 // We might want to decrease this in the future, to deal with small
tonyp@996 268 // heaps a bit more efficiently.
tonyp@996 269 #define MIN_REGION_SIZE ( 1024 * 1024 )
tonyp@996 270
tonyp@996 271 // Maximum region size; we don't go higher than that. There's a good
tonyp@996 272 // reason for having an upper bound. We don't want regions to get too
tonyp@996 273 // large, otherwise cleanup's effectiveness would decrease as there
tonyp@996 274 // will be fewer opportunities to find totally empty regions after
tonyp@996 275 // marking.
tonyp@996 276 #define MAX_REGION_SIZE ( 32 * 1024 * 1024 )
tonyp@996 277
tonyp@996 278 // The automatic region size calculation will try to have around this
tonyp@996 279 // many regions in the heap (based on the min heap size).
tonyp@996 280 #define TARGET_REGION_NUMBER 2048
tonyp@996 281
tonyp@996 282 void HeapRegion::setup_heap_region_size(uintx min_heap_size) {
tonyp@996 283 // region_size in bytes
tonyp@996 284 uintx region_size = G1HeapRegionSize;
tonyp@996 285 if (FLAG_IS_DEFAULT(G1HeapRegionSize)) {
tonyp@996 286 // We base the automatic calculation on the min heap size. This
tonyp@996 287 // can be problematic if the spread between min and max is quite
tonyp@996 288 // wide, imagine -Xms128m -Xmx32g. But, if we decided it based on
tonyp@996 289 // the max size, the region size might be way too large for the
tonyp@996 290 // min size. Either way, some users might have to set the region
tonyp@996 291 // size manually for some -Xms / -Xmx combos.
tonyp@996 292
tonyp@996 293 region_size = MAX2(min_heap_size / TARGET_REGION_NUMBER,
tonyp@996 294 (uintx) MIN_REGION_SIZE);
tonyp@996 295 }
tonyp@996 296
tonyp@996 297 int region_size_log = log2_long((jlong) region_size);
tonyp@996 298 // Recalculate the region size to make sure it's a power of
tonyp@996 299 // 2. This means that region_size is the largest power of 2 that's
tonyp@996 300 // <= what we've calculated so far.
prr@1496 301 region_size = ((uintx)1 << region_size_log);
tonyp@996 302
tonyp@996 303 // Now make sure that we don't go over or under our limits.
tonyp@996 304 if (region_size < MIN_REGION_SIZE) {
tonyp@996 305 region_size = MIN_REGION_SIZE;
tonyp@996 306 } else if (region_size > MAX_REGION_SIZE) {
tonyp@996 307 region_size = MAX_REGION_SIZE;
tonyp@996 308 }
tonyp@996 309
tonyp@996 310 // And recalculate the log.
tonyp@996 311 region_size_log = log2_long((jlong) region_size);
tonyp@996 312
tonyp@996 313 // Now, set up the globals.
tonyp@996 314 guarantee(LogOfHRGrainBytes == 0, "we should only set it once");
tonyp@996 315 LogOfHRGrainBytes = region_size_log;
tonyp@996 316
tonyp@996 317 guarantee(LogOfHRGrainWords == 0, "we should only set it once");
tonyp@996 318 LogOfHRGrainWords = LogOfHRGrainBytes - LogHeapWordSize;
tonyp@996 319
tonyp@996 320 guarantee(GrainBytes == 0, "we should only set it once");
tonyp@996 321 // The cast to int is safe, given that we've bounded region_size by
tonyp@996 322 // MIN_REGION_SIZE and MAX_REGION_SIZE.
tonyp@996 323 GrainBytes = (int) region_size;
tonyp@996 324
tonyp@996 325 guarantee(GrainWords == 0, "we should only set it once");
tonyp@996 326 GrainWords = GrainBytes >> LogHeapWordSize;
tonyp@996 327 guarantee(1 << LogOfHRGrainWords == GrainWords, "sanity");
tonyp@996 328
tonyp@996 329 guarantee(CardsPerRegion == 0, "we should only set it once");
tonyp@996 330 CardsPerRegion = GrainBytes >> CardTableModRefBS::card_shift;
tonyp@996 331 }
tonyp@996 332
ysr@345 333 void HeapRegion::reset_after_compaction() {
ysr@345 334 G1OffsetTableContigSpace::reset_after_compaction();
ysr@345 335 // After a compaction the mark bitmap is invalid, so we must
ysr@345 336 // treat all objects as being inside the unmarked area.
ysr@345 337 zero_marked_bytes();
ysr@345 338 init_top_at_mark_start();
ysr@345 339 }
ysr@345 340
ysr@345 341 DirtyCardToOopClosure*
ysr@345 342 HeapRegion::new_dcto_closure(OopClosure* cl,
ysr@345 343 CardTableModRefBS::PrecisionStyle precision,
ysr@345 344 HeapRegionDCTOC::FilterKind fk) {
ysr@345 345 return new HeapRegionDCTOC(G1CollectedHeap::heap(),
ysr@345 346 this, cl, precision, fk);
ysr@345 347 }
ysr@345 348
ysr@345 349 void HeapRegion::hr_clear(bool par, bool clear_space) {
tonyp@2165 350 assert(_humongous_type == NotHumongous,
tonyp@2165 351 "we should have already filtered out humongous regions");
tonyp@2165 352 assert(_humongous_start_region == NULL,
tonyp@2165 353 "we should have already filtered out humongous regions");
tonyp@2165 354 assert(_end == _orig_end,
tonyp@2165 355 "we should have already filtered out humongous regions");
tonyp@2165 356
ysr@345 357 _in_collection_set = false;
ysr@345 358 _is_gc_alloc_region = false;
ysr@345 359
ysr@345 360 set_young_index_in_cset(-1);
ysr@345 361 uninstall_surv_rate_group();
ysr@345 362 set_young_type(NotYoung);
ysr@345 363
ysr@345 364 if (!par) {
ysr@345 365 // If this is parallel, this will be done later.
ysr@345 366 HeapRegionRemSet* hrrs = rem_set();
ysr@345 367 if (hrrs != NULL) hrrs->clear();
tonyp@358 368 _claimed = InitialClaimValue;
ysr@345 369 }
ysr@345 370 zero_marked_bytes();
ysr@345 371 set_sort_index(-1);
ysr@345 372
ysr@345 373 _offsets.resize(HeapRegion::GrainWords);
ysr@345 374 init_top_at_mark_start();
tonyp@359 375 if (clear_space) clear(SpaceDecorator::Mangle);
ysr@345 376 }
ysr@345 377
ysr@345 378 // <PREDICTION>
ysr@345 379 void HeapRegion::calc_gc_efficiency() {
ysr@345 380 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@345 381 _gc_efficiency = (double) garbage_bytes() /
ysr@345 382 g1h->predict_region_elapsed_time_ms(this, false);
ysr@345 383 }
ysr@345 384 // </PREDICTION>
ysr@345 385
tonyp@2146 386 void HeapRegion::set_startsHumongous(HeapWord* new_top, HeapWord* new_end) {
tonyp@2165 387 assert(!isHumongous(), "sanity / pre-condition");
tonyp@1911 388 assert(end() == _orig_end,
tonyp@1911 389 "Should be normal before the humongous object allocation");
tonyp@1911 390 assert(top() == bottom(), "should be empty");
tonyp@2146 391 assert(bottom() <= new_top && new_top <= new_end, "pre-condition");
tonyp@1911 392
tonyp@358 393 _humongous_type = StartsHumongous;
ysr@345 394 _humongous_start_region = this;
tonyp@1911 395
tonyp@1911 396 set_end(new_end);
tonyp@2146 397 _offsets.set_for_starts_humongous(new_top);
tonyp@1911 398 }
tonyp@1911 399
tonyp@2146 400 void HeapRegion::set_continuesHumongous(HeapRegion* first_hr) {
tonyp@2165 401 assert(!isHumongous(), "sanity / pre-condition");
tonyp@1911 402 assert(end() == _orig_end,
tonyp@1911 403 "Should be normal before the humongous object allocation");
tonyp@1911 404 assert(top() == bottom(), "should be empty");
tonyp@2146 405 assert(first_hr->startsHumongous(), "pre-condition");
tonyp@1911 406
tonyp@1911 407 _humongous_type = ContinuesHumongous;
tonyp@2146 408 _humongous_start_region = first_hr;
ysr@345 409 }
ysr@345 410
tonyp@2165 411 void HeapRegion::set_notHumongous() {
tonyp@2165 412 assert(isHumongous(), "pre-condition");
tonyp@2165 413
tonyp@2165 414 if (startsHumongous()) {
tonyp@2165 415 assert(top() <= end(), "pre-condition");
tonyp@2165 416 set_end(_orig_end);
tonyp@2165 417 if (top() > end()) {
tonyp@2165 418 // at least one "continues humongous" region after it
tonyp@2165 419 set_top(end());
tonyp@2165 420 }
tonyp@2165 421 } else {
tonyp@2165 422 // continues humongous
tonyp@2165 423 assert(end() == _orig_end, "sanity");
tonyp@2165 424 }
tonyp@2165 425
tonyp@2165 426 assert(capacity() == (size_t) HeapRegion::GrainBytes, "pre-condition");
tonyp@2165 427 _humongous_type = NotHumongous;
tonyp@2165 428 _humongous_start_region = NULL;
tonyp@2165 429 }
tonyp@2165 430
ysr@345 431 bool HeapRegion::claimHeapRegion(jint claimValue) {
ysr@345 432 jint current = _claimed;
ysr@345 433 if (current != claimValue) {
ysr@345 434 jint res = Atomic::cmpxchg(claimValue, &_claimed, current);
ysr@345 435 if (res == current) {
ysr@345 436 return true;
ysr@345 437 }
ysr@345 438 }
ysr@345 439 return false;
ysr@345 440 }
ysr@345 441
ysr@345 442 HeapWord* HeapRegion::next_block_start_careful(HeapWord* addr) {
ysr@345 443 HeapWord* low = addr;
ysr@345 444 HeapWord* high = end();
ysr@345 445 while (low < high) {
ysr@345 446 size_t diff = pointer_delta(high, low);
ysr@345 447 // Must add one below to bias toward the high amount. Otherwise, if
ysr@345 448 // "high" were at the desired value, and "low" were one less, we
ysr@345 449 // would not converge on "high". This is not symmetric, because
ysr@345 450 // we set "high" to a block start, which might be the right one,
ysr@345 451 // which we don't do for "low".
ysr@345 452 HeapWord* middle = low + (diff+1)/2;
ysr@345 453 if (middle == high) return high;
ysr@345 454 HeapWord* mid_bs = block_start_careful(middle);
ysr@345 455 if (mid_bs < addr) {
ysr@345 456 low = middle;
ysr@345 457 } else {
ysr@345 458 high = mid_bs;
ysr@345 459 }
ysr@345 460 }
ysr@345 461 assert(low == high && low >= addr, "Didn't work.");
ysr@345 462 return low;
ysr@345 463 }
ysr@345 464
tonyp@359 465 void HeapRegion::initialize(MemRegion mr, bool clear_space, bool mangle_space) {
tonyp@359 466 G1OffsetTableContigSpace::initialize(mr, false, mangle_space);
ysr@345 467 hr_clear(false/*par*/, clear_space);
ysr@345 468 }
ysr@345 469 #ifdef _MSC_VER // the use of 'this' below gets a warning, make it go away
ysr@345 470 #pragma warning( disable:4355 ) // 'this' : used in base member initializer list
ysr@345 471 #endif // _MSC_VER
ysr@345 472
ysr@345 473
ysr@345 474 HeapRegion::
ysr@345 475 HeapRegion(G1BlockOffsetSharedArray* sharedOffsetArray,
ysr@345 476 MemRegion mr, bool is_zeroed)
ysr@345 477 : G1OffsetTableContigSpace(sharedOffsetArray, mr, is_zeroed),
ysr@345 478 _next_fk(HeapRegionDCTOC::NoFilterKind),
ysr@345 479 _hrs_index(-1),
tonyp@358 480 _humongous_type(NotHumongous), _humongous_start_region(NULL),
ysr@345 481 _in_collection_set(false), _is_gc_alloc_region(false),
ysr@345 482 _next_in_special_set(NULL), _orig_end(NULL),
tonyp@358 483 _claimed(InitialClaimValue), _evacuation_failed(false),
ysr@345 484 _prev_marked_bytes(0), _next_marked_bytes(0), _sort_index(-1),
ysr@345 485 _young_type(NotYoung), _next_young_region(NULL),
tonyp@2165 486 _next_dirty_cards_region(NULL), _next(NULL), _pending_removal(false),
tonyp@2165 487 #ifdef ASSERT
tonyp@2165 488 _containing_set(NULL),
tonyp@2165 489 #endif // ASSERT
tonyp@2165 490 _young_index_in_cset(-1), _surv_rate_group(NULL), _age_index(-1),
tonyp@2165 491 _rem_set(NULL), _recorded_rs_length(0), _predicted_elapsed_time_ms(0),
johnc@1483 492 _predicted_bytes_to_copy(0)
ysr@345 493 {
ysr@345 494 _orig_end = mr.end();
ysr@345 495 // Note that initialize() will set the start of the unmarked area of the
ysr@345 496 // region.
tonyp@359 497 this->initialize(mr, !is_zeroed, SpaceDecorator::Mangle);
tonyp@359 498 set_top(bottom());
tonyp@359 499 set_saved_mark();
ysr@345 500
ysr@345 501 _rem_set = new HeapRegionRemSet(sharedOffsetArray, this);
ysr@345 502
ysr@345 503 assert(HeapRegionRemSet::num_par_rem_sets() > 0, "Invariant.");
ysr@345 504 // In case the region is allocated during a pause, note the top.
ysr@345 505 // We haven't done any counting on a brand new region.
ysr@345 506 _top_at_conc_mark_count = bottom();
ysr@345 507 }
ysr@345 508
ysr@345 509 class NextCompactionHeapRegionClosure: public HeapRegionClosure {
ysr@345 510 const HeapRegion* _target;
ysr@345 511 bool _target_seen;
ysr@345 512 HeapRegion* _last;
ysr@345 513 CompactibleSpace* _res;
ysr@345 514 public:
ysr@345 515 NextCompactionHeapRegionClosure(const HeapRegion* target) :
ysr@345 516 _target(target), _target_seen(false), _res(NULL) {}
ysr@345 517 bool doHeapRegion(HeapRegion* cur) {
ysr@345 518 if (_target_seen) {
ysr@345 519 if (!cur->isHumongous()) {
ysr@345 520 _res = cur;
ysr@345 521 return true;
ysr@345 522 }
ysr@345 523 } else if (cur == _target) {
ysr@345 524 _target_seen = true;
ysr@345 525 }
ysr@345 526 return false;
ysr@345 527 }
ysr@345 528 CompactibleSpace* result() { return _res; }
ysr@345 529 };
ysr@345 530
ysr@345 531 CompactibleSpace* HeapRegion::next_compaction_space() const {
ysr@345 532 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@345 533 // cast away const-ness
ysr@345 534 HeapRegion* r = (HeapRegion*) this;
ysr@345 535 NextCompactionHeapRegionClosure blk(r);
ysr@345 536 g1h->heap_region_iterate_from(r, &blk);
ysr@345 537 return blk.result();
ysr@345 538 }
ysr@345 539
ysr@345 540 void HeapRegion::save_marks() {
ysr@345 541 set_saved_mark();
ysr@345 542 }
ysr@345 543
ysr@345 544 void HeapRegion::oops_in_mr_iterate(MemRegion mr, OopClosure* cl) {
ysr@345 545 HeapWord* p = mr.start();
ysr@345 546 HeapWord* e = mr.end();
ysr@345 547 oop obj;
ysr@345 548 while (p < e) {
ysr@345 549 obj = oop(p);
ysr@345 550 p += obj->oop_iterate(cl);
ysr@345 551 }
ysr@345 552 assert(p == e, "bad memregion: doesn't end on obj boundary");
ysr@345 553 }
ysr@345 554
ysr@345 555 #define HeapRegion_OOP_SINCE_SAVE_MARKS_DEFN(OopClosureType, nv_suffix) \
ysr@345 556 void HeapRegion::oop_since_save_marks_iterate##nv_suffix(OopClosureType* cl) { \
ysr@345 557 ContiguousSpace::oop_since_save_marks_iterate##nv_suffix(cl); \
ysr@345 558 }
ysr@345 559 SPECIALIZED_SINCE_SAVE_MARKS_CLOSURES(HeapRegion_OOP_SINCE_SAVE_MARKS_DEFN)
ysr@345 560
ysr@345 561
ysr@345 562 void HeapRegion::oop_before_save_marks_iterate(OopClosure* cl) {
ysr@345 563 oops_in_mr_iterate(MemRegion(bottom(), saved_mark_word()), cl);
ysr@345 564 }
ysr@345 565
ysr@345 566 HeapWord*
ysr@345 567 HeapRegion::object_iterate_mem_careful(MemRegion mr,
ysr@345 568 ObjectClosure* cl) {
ysr@345 569 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@345 570 // We used to use "block_start_careful" here. But we're actually happy
ysr@345 571 // to update the BOT while we do this...
ysr@345 572 HeapWord* cur = block_start(mr.start());
ysr@345 573 mr = mr.intersection(used_region());
ysr@345 574 if (mr.is_empty()) return NULL;
ysr@345 575 // Otherwise, find the obj that extends onto mr.start().
ysr@345 576
ysr@345 577 assert(cur <= mr.start()
ysr@896 578 && (oop(cur)->klass_or_null() == NULL ||
ysr@345 579 cur + oop(cur)->size() > mr.start()),
ysr@345 580 "postcondition of block_start");
ysr@345 581 oop obj;
ysr@345 582 while (cur < mr.end()) {
ysr@345 583 obj = oop(cur);
ysr@896 584 if (obj->klass_or_null() == NULL) {
ysr@345 585 // Ran into an unparseable point.
ysr@345 586 return cur;
ysr@345 587 } else if (!g1h->is_obj_dead(obj)) {
ysr@345 588 cl->do_object(obj);
ysr@345 589 }
ysr@345 590 if (cl->abort()) return cur;
ysr@345 591 // The check above must occur before the operation below, since an
ysr@345 592 // abort might invalidate the "size" operation.
ysr@345 593 cur += obj->size();
ysr@345 594 }
ysr@345 595 return NULL;
ysr@345 596 }
ysr@345 597
ysr@345 598 HeapWord*
ysr@345 599 HeapRegion::
ysr@345 600 oops_on_card_seq_iterate_careful(MemRegion mr,
johnc@1685 601 FilterOutOfRegionClosure* cl,
johnc@1685 602 bool filter_young) {
ysr@345 603 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@345 604
ysr@345 605 // If we're within a stop-world GC, then we might look at a card in a
ysr@345 606 // GC alloc region that extends onto a GC LAB, which may not be
ysr@345 607 // parseable. Stop such at the "saved_mark" of the region.
ysr@345 608 if (G1CollectedHeap::heap()->is_gc_active()) {
ysr@345 609 mr = mr.intersection(used_region_at_save_marks());
ysr@345 610 } else {
ysr@345 611 mr = mr.intersection(used_region());
ysr@345 612 }
ysr@345 613 if (mr.is_empty()) return NULL;
ysr@345 614 // Otherwise, find the obj that extends onto mr.start().
ysr@345 615
johnc@1685 616 // The intersection of the incoming mr (for the card) and the
johnc@1685 617 // allocated part of the region is non-empty. This implies that
johnc@1685 618 // we have actually allocated into this region. The code in
johnc@1685 619 // G1CollectedHeap.cpp that allocates a new region sets the
johnc@1685 620 // is_young tag on the region before allocating. Thus we
johnc@1685 621 // safely know if this region is young.
johnc@1685 622 if (is_young() && filter_young) {
johnc@1685 623 return NULL;
johnc@1685 624 }
johnc@1685 625
johnc@1727 626 assert(!is_young(), "check value of filter_young");
johnc@1727 627
ysr@345 628 // We used to use "block_start_careful" here. But we're actually happy
ysr@345 629 // to update the BOT while we do this...
ysr@345 630 HeapWord* cur = block_start(mr.start());
ysr@345 631 assert(cur <= mr.start(), "Postcondition");
ysr@345 632
ysr@345 633 while (cur <= mr.start()) {
ysr@896 634 if (oop(cur)->klass_or_null() == NULL) {
ysr@345 635 // Ran into an unparseable point.
ysr@345 636 return cur;
ysr@345 637 }
ysr@345 638 // Otherwise...
ysr@345 639 int sz = oop(cur)->size();
ysr@345 640 if (cur + sz > mr.start()) break;
ysr@345 641 // Otherwise, go on.
ysr@345 642 cur = cur + sz;
ysr@345 643 }
ysr@345 644 oop obj;
ysr@345 645 obj = oop(cur);
ysr@345 646 // If we finish this loop...
ysr@345 647 assert(cur <= mr.start()
ysr@896 648 && obj->klass_or_null() != NULL
ysr@345 649 && cur + obj->size() > mr.start(),
ysr@345 650 "Loop postcondition");
ysr@345 651 if (!g1h->is_obj_dead(obj)) {
ysr@345 652 obj->oop_iterate(cl, mr);
ysr@345 653 }
ysr@345 654
ysr@345 655 HeapWord* next;
ysr@345 656 while (cur < mr.end()) {
ysr@345 657 obj = oop(cur);
ysr@896 658 if (obj->klass_or_null() == NULL) {
ysr@345 659 // Ran into an unparseable point.
ysr@345 660 return cur;
ysr@345 661 };
ysr@345 662 // Otherwise:
ysr@345 663 next = (cur + obj->size());
ysr@345 664 if (!g1h->is_obj_dead(obj)) {
ysr@345 665 if (next < mr.end()) {
ysr@345 666 obj->oop_iterate(cl);
ysr@345 667 } else {
ysr@345 668 // this obj spans the boundary. If it's an array, stop at the
ysr@345 669 // boundary.
ysr@345 670 if (obj->is_objArray()) {
ysr@345 671 obj->oop_iterate(cl, mr);
ysr@345 672 } else {
ysr@345 673 obj->oop_iterate(cl);
ysr@345 674 }
ysr@345 675 }
ysr@345 676 }
ysr@345 677 cur = next;
ysr@345 678 }
ysr@345 679 return NULL;
ysr@345 680 }
ysr@345 681
ysr@345 682 void HeapRegion::print() const { print_on(gclog_or_tty); }
ysr@345 683 void HeapRegion::print_on(outputStream* st) const {
ysr@345 684 if (isHumongous()) {
ysr@345 685 if (startsHumongous())
ysr@345 686 st->print(" HS");
ysr@345 687 else
ysr@345 688 st->print(" HC");
ysr@345 689 } else {
ysr@345 690 st->print(" ");
ysr@345 691 }
ysr@345 692 if (in_collection_set())
ysr@345 693 st->print(" CS");
ysr@345 694 else if (is_gc_alloc_region())
ysr@345 695 st->print(" A ");
ysr@345 696 else
ysr@345 697 st->print(" ");
ysr@345 698 if (is_young())
johnc@1483 699 st->print(is_survivor() ? " SU" : " Y ");
ysr@345 700 else
ysr@345 701 st->print(" ");
ysr@345 702 if (is_empty())
ysr@345 703 st->print(" F");
ysr@345 704 else
ysr@345 705 st->print(" ");
tonyp@1079 706 st->print(" %5d", _gc_time_stamp);
tonyp@1477 707 st->print(" PTAMS "PTR_FORMAT" NTAMS "PTR_FORMAT,
tonyp@1477 708 prev_top_at_mark_start(), next_top_at_mark_start());
ysr@345 709 G1OffsetTableContigSpace::print_on(st);
ysr@345 710 }
ysr@345 711
tonyp@860 712 void HeapRegion::verify(bool allow_dirty) const {
tonyp@1079 713 bool dummy = false;
tonyp@1079 714 verify(allow_dirty, /* use_prev_marking */ true, /* failures */ &dummy);
tonyp@860 715 }
tonyp@860 716
ysr@345 717 // This really ought to be commoned up into OffsetTableContigSpace somehow.
ysr@345 718 // We would need a mechanism to make that code skip dead objects.
ysr@345 719
tonyp@1079 720 void HeapRegion::verify(bool allow_dirty,
tonyp@1079 721 bool use_prev_marking,
tonyp@1079 722 bool* failures) const {
ysr@345 723 G1CollectedHeap* g1 = G1CollectedHeap::heap();
tonyp@1079 724 *failures = false;
ysr@345 725 HeapWord* p = bottom();
ysr@345 726 HeapWord* prev_p = NULL;
tonyp@860 727 VerifyLiveClosure vl_cl(g1, use_prev_marking);
tonyp@1740 728 bool is_humongous = isHumongous();
tonyp@2146 729 bool do_bot_verify = !is_young();
tonyp@1740 730 size_t object_num = 0;
ysr@345 731 while (p < top()) {
tonyp@2146 732 oop obj = oop(p);
tonyp@2146 733 size_t obj_size = obj->size();
tonyp@2146 734 object_num += 1;
tonyp@2146 735
tonyp@2146 736 if (is_humongous != g1->isHumongous(obj_size)) {
tonyp@1740 737 gclog_or_tty->print_cr("obj "PTR_FORMAT" is of %shumongous size ("
tonyp@1740 738 SIZE_FORMAT" words) in a %shumongous region",
tonyp@2146 739 p, g1->isHumongous(obj_size) ? "" : "non-",
tonyp@2146 740 obj_size, is_humongous ? "" : "non-");
tonyp@1740 741 *failures = true;
tonyp@2146 742 return;
tonyp@1740 743 }
tonyp@2146 744
tonyp@2146 745 // If it returns false, verify_for_object() will output the
tonyp@2146 746 // appropriate messasge.
tonyp@2146 747 if (do_bot_verify && !_offsets.verify_for_object(p, obj_size)) {
tonyp@2146 748 *failures = true;
tonyp@2146 749 return;
tonyp@2146 750 }
tonyp@2146 751
tonyp@2146 752 if (!g1->is_obj_dead_cond(obj, this, use_prev_marking)) {
tonyp@2146 753 if (obj->is_oop()) {
tonyp@2146 754 klassOop klass = obj->klass();
tonyp@2146 755 if (!klass->is_perm()) {
tonyp@2146 756 gclog_or_tty->print_cr("klass "PTR_FORMAT" of object "PTR_FORMAT" "
tonyp@2146 757 "not in perm", klass, obj);
tonyp@2146 758 *failures = true;
tonyp@2146 759 return;
tonyp@2146 760 } else if (!klass->is_klass()) {
tonyp@2146 761 gclog_or_tty->print_cr("klass "PTR_FORMAT" of object "PTR_FORMAT" "
tonyp@2146 762 "not a klass", klass, obj);
tonyp@2146 763 *failures = true;
tonyp@2146 764 return;
tonyp@2146 765 } else {
tonyp@2146 766 vl_cl.set_containing_obj(obj);
tonyp@2146 767 obj->oop_iterate(&vl_cl);
tonyp@2146 768 if (vl_cl.failures()) {
tonyp@2146 769 *failures = true;
tonyp@2146 770 }
tonyp@2146 771 if (G1MaxVerifyFailures >= 0 &&
tonyp@2146 772 vl_cl.n_failures() >= G1MaxVerifyFailures) {
tonyp@2146 773 return;
tonyp@2146 774 }
tonyp@2146 775 }
tonyp@2146 776 } else {
tonyp@2146 777 gclog_or_tty->print_cr(PTR_FORMAT" no an oop", obj);
tonyp@1079 778 *failures = true;
tonyp@1079 779 return;
tonyp@1079 780 }
ysr@345 781 }
ysr@345 782 prev_p = p;
tonyp@2146 783 p += obj_size;
ysr@345 784 }
tonyp@2146 785
tonyp@2146 786 if (p != top()) {
tonyp@2146 787 gclog_or_tty->print_cr("end of last object "PTR_FORMAT" "
tonyp@2146 788 "does not match top "PTR_FORMAT, p, top());
tonyp@2146 789 *failures = true;
tonyp@2146 790 return;
tonyp@2146 791 }
tonyp@2146 792
tonyp@2146 793 HeapWord* the_end = end();
tonyp@2146 794 assert(p == top(), "it should still hold");
tonyp@2146 795 // Do some extra BOT consistency checking for addresses in the
tonyp@2146 796 // range [top, end). BOT look-ups in this range should yield
tonyp@2146 797 // top. No point in doing that if top == end (there's nothing there).
tonyp@2146 798 if (p < the_end) {
tonyp@2146 799 // Look up top
tonyp@2146 800 HeapWord* addr_1 = p;
tonyp@2146 801 HeapWord* b_start_1 = _offsets.block_start_const(addr_1);
tonyp@2146 802 if (b_start_1 != p) {
tonyp@2146 803 gclog_or_tty->print_cr("BOT look up for top: "PTR_FORMAT" "
tonyp@2146 804 " yielded "PTR_FORMAT", expecting "PTR_FORMAT,
tonyp@2146 805 addr_1, b_start_1, p);
tonyp@2146 806 *failures = true;
tonyp@2146 807 return;
tonyp@2146 808 }
tonyp@2146 809
tonyp@2146 810 // Look up top + 1
tonyp@2146 811 HeapWord* addr_2 = p + 1;
tonyp@2146 812 if (addr_2 < the_end) {
tonyp@2146 813 HeapWord* b_start_2 = _offsets.block_start_const(addr_2);
tonyp@2146 814 if (b_start_2 != p) {
tonyp@2146 815 gclog_or_tty->print_cr("BOT look up for top + 1: "PTR_FORMAT" "
tonyp@2146 816 " yielded "PTR_FORMAT", expecting "PTR_FORMAT,
tonyp@2146 817 addr_2, b_start_2, p);
tonyp@1079 818 *failures = true;
tonyp@1079 819 return;
tonyp@2146 820 }
tonyp@2146 821 }
tonyp@2146 822
tonyp@2146 823 // Look up an address between top and end
tonyp@2146 824 size_t diff = pointer_delta(the_end, p) / 2;
tonyp@2146 825 HeapWord* addr_3 = p + diff;
tonyp@2146 826 if (addr_3 < the_end) {
tonyp@2146 827 HeapWord* b_start_3 = _offsets.block_start_const(addr_3);
tonyp@2146 828 if (b_start_3 != p) {
tonyp@2146 829 gclog_or_tty->print_cr("BOT look up for top + diff: "PTR_FORMAT" "
tonyp@2146 830 " yielded "PTR_FORMAT", expecting "PTR_FORMAT,
tonyp@2146 831 addr_3, b_start_3, p);
tonyp@2146 832 *failures = true;
tonyp@2146 833 return;
tonyp@2146 834 }
tonyp@2146 835 }
tonyp@2146 836
tonyp@2146 837 // Loook up end - 1
tonyp@2146 838 HeapWord* addr_4 = the_end - 1;
tonyp@2146 839 HeapWord* b_start_4 = _offsets.block_start_const(addr_4);
tonyp@2146 840 if (b_start_4 != p) {
tonyp@2146 841 gclog_or_tty->print_cr("BOT look up for end - 1: "PTR_FORMAT" "
tonyp@2146 842 " yielded "PTR_FORMAT", expecting "PTR_FORMAT,
tonyp@2146 843 addr_4, b_start_4, p);
tonyp@2146 844 *failures = true;
tonyp@2146 845 return;
tonyp@1079 846 }
ysr@345 847 }
tonyp@1079 848
tonyp@1740 849 if (is_humongous && object_num > 1) {
tonyp@1740 850 gclog_or_tty->print_cr("region ["PTR_FORMAT","PTR_FORMAT"] is humongous "
tonyp@1740 851 "but has "SIZE_FORMAT", objects",
tonyp@1740 852 bottom(), end(), object_num);
tonyp@1740 853 *failures = true;
tonyp@1079 854 return;
ysr@345 855 }
ysr@345 856 }
ysr@345 857
ysr@345 858 // G1OffsetTableContigSpace code; copied from space.cpp. Hope this can go
ysr@345 859 // away eventually.
ysr@345 860
tonyp@359 861 void G1OffsetTableContigSpace::initialize(MemRegion mr, bool clear_space, bool mangle_space) {
ysr@345 862 // false ==> we'll do the clearing if there's clearing to be done.
tonyp@359 863 ContiguousSpace::initialize(mr, false, mangle_space);
ysr@345 864 _offsets.zero_bottom_entry();
ysr@345 865 _offsets.initialize_threshold();
tonyp@359 866 if (clear_space) clear(mangle_space);
ysr@345 867 }
ysr@345 868
tonyp@359 869 void G1OffsetTableContigSpace::clear(bool mangle_space) {
tonyp@359 870 ContiguousSpace::clear(mangle_space);
ysr@345 871 _offsets.zero_bottom_entry();
ysr@345 872 _offsets.initialize_threshold();
ysr@345 873 }
ysr@345 874
ysr@345 875 void G1OffsetTableContigSpace::set_bottom(HeapWord* new_bottom) {
ysr@345 876 Space::set_bottom(new_bottom);
ysr@345 877 _offsets.set_bottom(new_bottom);
ysr@345 878 }
ysr@345 879
ysr@345 880 void G1OffsetTableContigSpace::set_end(HeapWord* new_end) {
ysr@345 881 Space::set_end(new_end);
ysr@345 882 _offsets.resize(new_end - bottom());
ysr@345 883 }
ysr@345 884
ysr@345 885 void G1OffsetTableContigSpace::print() const {
ysr@345 886 print_short();
ysr@345 887 gclog_or_tty->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", "
ysr@345 888 INTPTR_FORMAT ", " INTPTR_FORMAT ")",
ysr@345 889 bottom(), top(), _offsets.threshold(), end());
ysr@345 890 }
ysr@345 891
ysr@345 892 HeapWord* G1OffsetTableContigSpace::initialize_threshold() {
ysr@345 893 return _offsets.initialize_threshold();
ysr@345 894 }
ysr@345 895
ysr@345 896 HeapWord* G1OffsetTableContigSpace::cross_threshold(HeapWord* start,
ysr@345 897 HeapWord* end) {
ysr@345 898 _offsets.alloc_block(start, end);
ysr@345 899 return _offsets.threshold();
ysr@345 900 }
ysr@345 901
ysr@345 902 HeapWord* G1OffsetTableContigSpace::saved_mark_word() const {
ysr@345 903 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@345 904 assert( _gc_time_stamp <= g1h->get_gc_time_stamp(), "invariant" );
ysr@345 905 if (_gc_time_stamp < g1h->get_gc_time_stamp())
ysr@345 906 return top();
ysr@345 907 else
ysr@345 908 return ContiguousSpace::saved_mark_word();
ysr@345 909 }
ysr@345 910
ysr@345 911 void G1OffsetTableContigSpace::set_saved_mark() {
ysr@345 912 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@345 913 unsigned curr_gc_time_stamp = g1h->get_gc_time_stamp();
ysr@345 914
ysr@345 915 if (_gc_time_stamp < curr_gc_time_stamp) {
ysr@345 916 // The order of these is important, as another thread might be
ysr@345 917 // about to start scanning this region. If it does so after
ysr@345 918 // set_saved_mark and before _gc_time_stamp = ..., then the latter
ysr@345 919 // will be false, and it will pick up top() as the high water mark
ysr@345 920 // of region. If it does so after _gc_time_stamp = ..., then it
ysr@345 921 // will pick up the right saved_mark_word() as the high water mark
ysr@345 922 // of the region. Either way, the behaviour will be correct.
ysr@345 923 ContiguousSpace::set_saved_mark();
ysr@896 924 OrderAccess::storestore();
iveresov@356 925 _gc_time_stamp = curr_gc_time_stamp;
ysr@896 926 // The following fence is to force a flush of the writes above, but
ysr@896 927 // is strictly not needed because when an allocating worker thread
ysr@896 928 // calls set_saved_mark() it does so under the ParGCRareEvent_lock;
ysr@896 929 // when the lock is released, the write will be flushed.
ysr@896 930 // OrderAccess::fence();
ysr@345 931 }
ysr@345 932 }
ysr@345 933
ysr@345 934 G1OffsetTableContigSpace::
ysr@345 935 G1OffsetTableContigSpace(G1BlockOffsetSharedArray* sharedOffsetArray,
ysr@345 936 MemRegion mr, bool is_zeroed) :
ysr@345 937 _offsets(sharedOffsetArray, mr),
ysr@345 938 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true),
ysr@345 939 _gc_time_stamp(0)
ysr@345 940 {
ysr@345 941 _offsets.set_space(this);
tonyp@359 942 initialize(mr, !is_zeroed, SpaceDecorator::Mangle);
ysr@345 943 }