annotate src/hotspot/share/gc/g1/g1Allocator.hpp @ 53642:c9325aa887da

8214118: HeapRegions marked as archive even if CDS mapping fails Reviewed-by: tschatzl, jiangli
author sjohanss
date Fri, 23 Nov 2018 10:57:07 +0100
parents 8dbf1a13af49
children 9807daeb47c4 ee29b516a36a
rev   line source
sjohanss@26837 1 /*
sjohanss@50208 2 * Copyright (c) 2014, 2018, Oracle and/or its affiliates. All rights reserved.
sjohanss@26837 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
sjohanss@26837 4 *
sjohanss@26837 5 * This code is free software; you can redistribute it and/or modify it
sjohanss@26837 6 * under the terms of the GNU General Public License version 2 only, as
sjohanss@26837 7 * published by the Free Software Foundation.
sjohanss@26837 8 *
sjohanss@26837 9 * This code is distributed in the hope that it will be useful, but WITHOUT
sjohanss@26837 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
sjohanss@26837 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
sjohanss@26837 12 * version 2 for more details (a copy is included in the LICENSE file that
sjohanss@26837 13 * accompanied this code).
sjohanss@26837 14 *
sjohanss@26837 15 * You should have received a copy of the GNU General Public License version
sjohanss@26837 16 * 2 along with this work; if not, write to the Free Software Foundation,
sjohanss@26837 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
sjohanss@26837 18 *
sjohanss@26837 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
sjohanss@26837 20 * or visit www.oracle.com if you need additional information or have any
sjohanss@26837 21 * questions.
sjohanss@26837 22 *
sjohanss@26837 23 */
sjohanss@26837 24
pliden@30764 25 #ifndef SHARE_VM_GC_G1_G1ALLOCATOR_HPP
pliden@30764 26 #define SHARE_VM_GC_G1_G1ALLOCATOR_HPP
sjohanss@26837 27
pliden@30764 28 #include "gc/g1/g1AllocRegion.hpp"
pliden@30764 29 #include "gc/g1/g1InCSetState.hpp"
pliden@30764 30 #include "gc/shared/collectedHeap.hpp"
pliden@30764 31 #include "gc/shared/plab.hpp"
stefank@30175 32
stefank@30175 33 class EvacuationInfo;
sjohanss@26837 34
tschatzl@32185 35 // Interface to keep track of which regions G1 is currently allocating into. Provides
tschatzl@32185 36 // some accessors (e.g. allocating into them, or getting their occupancy).
tschatzl@32185 37 // Also keeps track of retained regions across GCs.
sjohanss@26837 38 class G1Allocator : public CHeapObj<mtGC> {
sjohanss@26837 39 friend class VMStructs;
sjohanss@50208 40
sjohanss@50208 41 private:
sjohanss@26837 42 G1CollectedHeap* _g1h;
sjohanss@26837 43
sjohanss@50208 44 bool _survivor_is_full;
sjohanss@50208 45 bool _old_is_full;
tschatzl@32185 46
sjohanss@50208 47 // Alloc region used to satisfy mutator allocation requests.
sjohanss@50208 48 MutatorAllocRegion _mutator_alloc_region;
tschatzl@32377 49
sjohanss@50208 50 // Alloc region used to satisfy allocation requests by the GC for
sjohanss@50208 51 // survivor objects.
sjohanss@50208 52 SurvivorGCAllocRegion _survivor_gc_alloc_region;
sjohanss@50208 53
sjohanss@50208 54 // Alloc region used to satisfy allocation requests by the GC for
sjohanss@50208 55 // old objects.
sjohanss@50208 56 OldGCAllocRegion _old_gc_alloc_region;
sjohanss@50208 57
sjohanss@50208 58 HeapRegion* _retained_old_gc_alloc_region;
sjohanss@50208 59
sjohanss@50208 60 bool survivor_is_full() const;
sjohanss@50208 61 bool old_is_full() const;
sjohanss@50208 62
sjohanss@50208 63 void set_survivor_full();
sjohanss@50208 64 void set_old_full();
sjohanss@50208 65
sjohanss@50208 66 void reuse_retained_old_region(EvacuationInfo& evacuation_info,
sjohanss@50208 67 OldGCAllocRegion* old,
sjohanss@50208 68 HeapRegion** retained);
tschatzl@32377 69
tschatzl@32185 70 // Accessors to the allocation regions.
sjohanss@50208 71 inline MutatorAllocRegion* mutator_alloc_region();
sjohanss@50208 72 inline SurvivorGCAllocRegion* survivor_gc_alloc_region();
sjohanss@50208 73 inline OldGCAllocRegion* old_gc_alloc_region();
tschatzl@32185 74
tschatzl@32185 75 // Allocation attempt during GC for a survivor object / PLAB.
sjohanss@50208 76 HeapWord* survivor_attempt_allocation(size_t min_word_size,
tschatzl@32389 77 size_t desired_word_size,
sjohanss@49788 78 size_t* actual_word_size);
sjohanss@50208 79
tschatzl@32185 80 // Allocation attempt during GC for an old object / PLAB.
sjohanss@50208 81 HeapWord* old_attempt_allocation(size_t min_word_size,
tschatzl@32389 82 size_t desired_word_size,
sjohanss@49788 83 size_t* actual_word_size);
sjohanss@26837 84 public:
sjohanss@50208 85 G1Allocator(G1CollectedHeap* heap);
sjohanss@26837 86
tschatzl@32185 87 #ifdef ASSERT
tschatzl@32185 88 // Do we currently have an active mutator region to allocate into?
sjohanss@49788 89 bool has_mutator_alloc_region() { return mutator_alloc_region()->get() != NULL; }
tschatzl@32185 90 #endif
sjohanss@26837 91
sjohanss@50208 92 void init_mutator_alloc_region();
sjohanss@50208 93 void release_mutator_alloc_region();
sjohanss@26837 94
sjohanss@50208 95 void init_gc_alloc_regions(EvacuationInfo& evacuation_info);
sjohanss@50208 96 void release_gc_alloc_regions(EvacuationInfo& evacuation_info);
sjohanss@50208 97 void abandon_gc_alloc_regions();
sjohanss@50208 98 bool is_retained_old_region(HeapRegion* hr);
sjohanss@26837 99
tschatzl@32185 100 // Allocate blocks of memory during mutator time.
tschatzl@32185 101
sjohanss@50470 102 inline HeapWord* attempt_allocation(size_t min_word_size,
sjohanss@50470 103 size_t desired_word_size,
sjohanss@50470 104 size_t* actual_word_size);
sjohanss@49788 105 inline HeapWord* attempt_allocation_locked(size_t word_size);
sjohanss@49788 106 inline HeapWord* attempt_allocation_force(size_t word_size);
tschatzl@32185 107
sjohanss@49788 108 size_t unsafe_max_tlab_alloc();
sjohanss@50208 109 size_t used_in_alloc_regions();
tschatzl@32185 110
tschatzl@32185 111 // Allocate blocks of memory during garbage collection. Will ensure an
tschatzl@32185 112 // allocation region, either by picking one or expanding the
tschatzl@32185 113 // heap, and then allocate a block of the given size. The block
tschatzl@32185 114 // may not be a humongous - it must fit into a single heap region.
tschatzl@32185 115 HeapWord* par_allocate_during_gc(InCSetState dest,
sjohanss@49788 116 size_t word_size);
tschatzl@32185 117
tschatzl@32389 118 HeapWord* par_allocate_during_gc(InCSetState dest,
tschatzl@32389 119 size_t min_word_size,
tschatzl@32389 120 size_t desired_word_size,
sjohanss@49788 121 size_t* actual_word_size);
sjohanss@26837 122 };
sjohanss@26837 123
tschatzl@32185 124 // Manages the PLABs used during garbage collection. Interface for allocation from PLABs.
tschatzl@32185 125 // Needs to handle multiple contexts, extra alignment in any "survivor" area and some
tschatzl@32185 126 // statistics.
tschatzl@32185 127 class G1PLABAllocator : public CHeapObj<mtGC> {
sjohanss@26837 128 friend class G1ParScanThreadState;
sjohanss@50208 129 private:
sjohanss@26837 130 G1CollectedHeap* _g1h;
tschatzl@32185 131 G1Allocator* _allocator;
sjohanss@26837 132
sjohanss@50208 133 PLAB _surviving_alloc_buffer;
sjohanss@50208 134 PLAB _tenured_alloc_buffer;
sjohanss@50208 135 PLAB* _alloc_buffers[InCSetState::Num];
sjohanss@50208 136
tschatzl@28213 137 // The survivor alignment in effect in bytes.
tschatzl@28213 138 // == 0 : don't align survivors
tschatzl@28213 139 // != 0 : align survivors to that alignment
tschatzl@28213 140 // These values were chosen to favor the non-alignment case since some
tschatzl@28213 141 // architectures have a special compare against zero instructions.
tschatzl@28213 142 const uint _survivor_alignment_bytes;
tschatzl@28213 143
tschatzl@32379 144 // Number of words allocated directly (not counting PLAB allocation).
tschatzl@32379 145 size_t _direct_allocated[InCSetState::Num];
tschatzl@32379 146
sjohanss@50208 147 void flush_and_retire_stats();
sjohanss@50208 148 inline PLAB* alloc_buffer(InCSetState dest);
sjohanss@26837 149
tschatzl@28213 150 // Calculate the survivor space object alignment in bytes. Returns that or 0 if
tschatzl@28213 151 // there are no restrictions on survivor alignment.
sjohanss@50208 152 static uint calc_survivor_alignment_bytes();
tschatzl@32377 153
tschatzl@32383 154 bool may_throw_away_buffer(size_t const allocation_word_sz, size_t const buffer_size) const;
sjohanss@26837 155 public:
tschatzl@32185 156 G1PLABAllocator(G1Allocator* allocator);
sjohanss@26837 157
sjohanss@50208 158 void waste(size_t& wasted, size_t& undo_wasted);
sjohanss@26837 159
tschatzl@28213 160 // Allocate word_sz words in dest, either directly into the regions or by
tschatzl@28213 161 // allocating a new PLAB. Returns the address of the allocated memory, NULL if
tschatzl@32377 162 // not successful. Plab_refill_failed indicates whether an attempt to refill the
tschatzl@32377 163 // PLAB failed or not.
tschatzl@28213 164 HeapWord* allocate_direct_or_new_plab(InCSetState dest,
tschatzl@28213 165 size_t word_sz,
tschatzl@32377 166 bool* plab_refill_failed);
tschatzl@28213 167
tschatzl@28213 168 // Allocate word_sz words in the PLAB of dest. Returns the address of the
tschatzl@28213 169 // allocated memory, NULL if not successful.
tschatzl@32378 170 inline HeapWord* plab_allocate(InCSetState dest,
sjohanss@49788 171 size_t word_sz);
tschatzl@28213 172
sjohanss@50208 173 inline HeapWord* allocate(InCSetState dest,
sjohanss@50208 174 size_t word_sz,
sjohanss@50208 175 bool* refill_failed);
sjohanss@26837 176
sjohanss@49788 177 void undo_allocation(InCSetState dest, HeapWord* obj, size_t word_sz);
sjohanss@26837 178 };
sjohanss@26837 179
sjohanss@46285 180 // G1ArchiveRegionMap is a boolean array used to mark G1 regions as
sjohanss@46285 181 // archive regions. This allows a quick check for whether an object
sjohanss@46285 182 // should not be marked because it is in an archive region.
sjohanss@46285 183 class G1ArchiveRegionMap : public G1BiasedMappedArray<bool> {
sjohanss@46285 184 protected:
sjohanss@46285 185 bool default_value() const { return false; }
sjohanss@46285 186 };
sjohanss@46285 187
jiangli@31346 188 // G1ArchiveAllocator is used to allocate memory in archive
jiangli@46810 189 // regions. Such regions are not scavenged nor compacted by GC.
jiangli@46810 190 // There are two types of archive regions, which are
jiangli@46810 191 // differ in the kind of references allowed for the contained objects:
jiangli@46810 192 //
jiangli@46810 193 // - 'Closed' archive region contain no references outside of other
jiangli@46810 194 // closed archive regions. The region is immutable by GC. GC does
jiangli@46810 195 // not mark object header in 'closed' archive region.
jiangli@46810 196 // - An 'open' archive region allow references to any other regions,
jiangli@46810 197 // including closed archive, open archive and other java heap regions.
jiangli@46810 198 // GC can adjust pointers and mark object header in 'open' archive region.
jiangli@31346 199 class G1ArchiveAllocator : public CHeapObj<mtGC> {
jiangli@31346 200 protected:
jiangli@46810 201 bool _open; // Indicate if the region is 'open' archive.
jiangli@31346 202 G1CollectedHeap* _g1h;
jiangli@31346 203
jiangli@31346 204 // The current allocation region
jiangli@31346 205 HeapRegion* _allocation_region;
jiangli@31346 206
jiangli@31346 207 // Regions allocated for the current archive range.
jiangli@31346 208 GrowableArray<HeapRegion*> _allocated_regions;
jiangli@31346 209
jiangli@31346 210 // The number of bytes used in the current range.
jiangli@31346 211 size_t _summary_bytes_used;
jiangli@31346 212
jiangli@31346 213 // Current allocation window within the current region.
jiangli@31346 214 HeapWord* _bottom;
jiangli@31346 215 HeapWord* _top;
jiangli@31346 216 HeapWord* _max;
jiangli@31346 217
jiangli@31346 218 // Allocate a new region for this archive allocator.
jiangli@31346 219 // Allocation is from the top of the reserved heap downward.
jiangli@31346 220 bool alloc_new_region();
jiangli@31346 221
jiangli@31346 222 public:
jiangli@46810 223 G1ArchiveAllocator(G1CollectedHeap* g1h, bool open) :
tschatzl@52033 224 _open(open),
jiangli@31346 225 _g1h(g1h),
jiangli@31346 226 _allocation_region(NULL),
jiangli@31346 227 _allocated_regions((ResourceObj::set_allocation_type((address) &_allocated_regions,
jiangli@31346 228 ResourceObj::C_HEAP),
jiangli@31346 229 2), true /* C_Heap */),
jiangli@31346 230 _summary_bytes_used(0),
jiangli@31346 231 _bottom(NULL),
jiangli@31346 232 _top(NULL),
tschatzl@52033 233 _max(NULL) { }
jiangli@31346 234
jiangli@31346 235 virtual ~G1ArchiveAllocator() {
jiangli@31346 236 assert(_allocation_region == NULL, "_allocation_region not NULL");
jiangli@31346 237 }
jiangli@31346 238
jiangli@46810 239 static G1ArchiveAllocator* create_allocator(G1CollectedHeap* g1h, bool open);
jiangli@31346 240
jiangli@31346 241 // Allocate memory for an individual object.
jiangli@31346 242 HeapWord* archive_mem_allocate(size_t word_size);
jiangli@31346 243
jiangli@31346 244 // Return the memory ranges used in the current archive, after
jiangli@31346 245 // aligning to the requested alignment.
jiangli@31346 246 void complete_archive(GrowableArray<MemRegion>* ranges,
jiangli@31346 247 size_t end_alignment_in_bytes);
jiangli@31346 248
jiangli@31346 249 // The number of bytes allocated by this allocator.
jiangli@31346 250 size_t used() {
jiangli@31346 251 return _summary_bytes_used;
jiangli@31346 252 }
jiangli@31346 253
jiangli@31346 254 // Clear the count of bytes allocated in prior G1 regions. This
jiangli@31346 255 // must be done when recalculate_use is used to reset the counter
jiangli@31346 256 // for the generic allocator, since it counts bytes in all G1
jiangli@31346 257 // regions, including those still associated with this allocator.
jiangli@31346 258 void clear_used() {
jiangli@31346 259 _summary_bytes_used = 0;
jiangli@31346 260 }
jiangli@31346 261
sjohanss@46285 262 // Create the _archive_region_map which is used to identify archive objects.
sjohanss@46285 263 static inline void enable_archive_object_check();
sjohanss@46285 264
sjohanss@53642 265 // Mark regions containing the specified address range as archive/non-archive.
jiangli@46810 266 static inline void set_range_archive(MemRegion range, bool open);
sjohanss@53642 267 static inline void clear_range_archive(MemRegion range, bool open);
sjohanss@46285 268
jiangli@46810 269 // Check if the object is in closed archive
jiangli@46810 270 static inline bool is_closed_archive_object(oop object);
jiangli@46810 271 // Check if the object is in open archive
jiangli@46810 272 static inline bool is_open_archive_object(oop object);
jiangli@46810 273 // Check if the object is either in closed archive or open archive
jiangli@52991 274 static inline bool is_archived_object(oop object);
sjohanss@46285 275
sjohanss@46285 276 private:
sjohanss@46285 277 static bool _archive_check_enabled;
jiangli@46810 278 static G1ArchiveRegionMap _closed_archive_region_map;
jiangli@46810 279 static G1ArchiveRegionMap _open_archive_region_map;
sjohanss@46285 280
jiangli@46810 281 // Check if an object is in a closed archive region using the _closed_archive_region_map.
jiangli@46810 282 static inline bool in_closed_archive_range(oop object);
jiangli@46810 283 // Check if an object is in open archive region using the _open_archive_region_map.
jiangli@46810 284 static inline bool in_open_archive_range(oop object);
sjohanss@46285 285
jiangli@46810 286 // Check if archive object checking is enabled, to avoid calling in_open/closed_archive_range
sjohanss@46285 287 // unnecessarily.
sjohanss@46285 288 static inline bool archive_check_enabled();
jiangli@31346 289 };
jiangli@31346 290
pliden@30764 291 #endif // SHARE_VM_GC_G1_G1ALLOCATOR_HPP