annotate src/share/vm/runtime/sweeper.cpp @ 3602:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents d2a62e0f25eb
children 0cfa93c2fcc4
rev   line source
duke@0 1 /*
coleenp@3602 2 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
duke@0 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@0 4 *
duke@0 5 * This code is free software; you can redistribute it and/or modify it
duke@0 6 * under the terms of the GNU General Public License version 2 only, as
duke@0 7 * published by the Free Software Foundation.
duke@0 8 *
duke@0 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@0 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@0 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@0 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@0 13 * accompanied this code).
duke@0 14 *
duke@0 15 * You should have received a copy of the GNU General Public License version
duke@0 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@0 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@0 18 *
trims@1472 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1472 20 * or visit www.oracle.com if you need additional information or have any
trims@1472 21 * questions.
duke@0 22 *
duke@0 23 */
duke@0 24
stefank@1879 25 #include "precompiled.hpp"
stefank@1879 26 #include "code/codeCache.hpp"
coleenp@3602 27 #include "code/compiledIC.hpp"
coleenp@3602 28 #include "code/icBuffer.hpp"
stefank@1879 29 #include "code/nmethod.hpp"
stefank@1879 30 #include "compiler/compileBroker.hpp"
stefank@1879 31 #include "memory/resourceArea.hpp"
coleenp@3602 32 #include "oops/method.hpp"
stefank@1879 33 #include "runtime/atomic.hpp"
stefank@1879 34 #include "runtime/compilationPolicy.hpp"
stefank@1879 35 #include "runtime/mutexLocker.hpp"
stefank@1879 36 #include "runtime/os.hpp"
stefank@1879 37 #include "runtime/sweeper.hpp"
stefank@1879 38 #include "runtime/vm_operations.hpp"
stefank@1879 39 #include "utilities/events.hpp"
stefank@1879 40 #include "utilities/xmlstream.hpp"
duke@0 41
never@2481 42 #ifdef ASSERT
never@2481 43
never@2481 44 #define SWEEP(nm) record_sweep(nm, __LINE__)
never@2481 45 // Sweeper logging code
never@2481 46 class SweeperRecord {
never@2481 47 public:
never@2481 48 int traversal;
never@2481 49 int invocation;
never@2481 50 int compile_id;
never@2481 51 long traversal_mark;
never@2481 52 int state;
never@2481 53 const char* kind;
never@2481 54 address vep;
never@2481 55 address uep;
never@2481 56 int line;
never@2481 57
never@2481 58 void print() {
never@2481 59 tty->print_cr("traversal = %d invocation = %d compile_id = %d %s uep = " PTR_FORMAT " vep = "
never@2481 60 PTR_FORMAT " state = %d traversal_mark %d line = %d",
never@2481 61 traversal,
never@2481 62 invocation,
never@2481 63 compile_id,
never@2481 64 kind == NULL ? "" : kind,
never@2481 65 uep,
never@2481 66 vep,
never@2481 67 state,
never@2481 68 traversal_mark,
never@2481 69 line);
never@2481 70 }
never@2481 71 };
never@2481 72
never@2481 73 static int _sweep_index = 0;
never@2481 74 static SweeperRecord* _records = NULL;
never@2481 75
never@2481 76 void NMethodSweeper::report_events(int id, address entry) {
never@2481 77 if (_records != NULL) {
never@2481 78 for (int i = _sweep_index; i < SweeperLogEntries; i++) {
never@2481 79 if (_records[i].uep == entry ||
never@2481 80 _records[i].vep == entry ||
never@2481 81 _records[i].compile_id == id) {
never@2481 82 _records[i].print();
never@2481 83 }
never@2481 84 }
never@2481 85 for (int i = 0; i < _sweep_index; i++) {
never@2481 86 if (_records[i].uep == entry ||
never@2481 87 _records[i].vep == entry ||
never@2481 88 _records[i].compile_id == id) {
never@2481 89 _records[i].print();
never@2481 90 }
never@2481 91 }
never@2481 92 }
never@2481 93 }
never@2481 94
never@2481 95 void NMethodSweeper::report_events() {
never@2481 96 if (_records != NULL) {
never@2481 97 for (int i = _sweep_index; i < SweeperLogEntries; i++) {
never@2481 98 // skip empty records
never@2481 99 if (_records[i].vep == NULL) continue;
never@2481 100 _records[i].print();
never@2481 101 }
never@2481 102 for (int i = 0; i < _sweep_index; i++) {
never@2481 103 // skip empty records
never@2481 104 if (_records[i].vep == NULL) continue;
never@2481 105 _records[i].print();
never@2481 106 }
never@2481 107 }
never@2481 108 }
never@2481 109
never@2481 110 void NMethodSweeper::record_sweep(nmethod* nm, int line) {
never@2481 111 if (_records != NULL) {
never@2481 112 _records[_sweep_index].traversal = _traversals;
never@2481 113 _records[_sweep_index].traversal_mark = nm->_stack_traversal_mark;
never@2481 114 _records[_sweep_index].invocation = _invocations;
never@2481 115 _records[_sweep_index].compile_id = nm->compile_id();
never@2481 116 _records[_sweep_index].kind = nm->compile_kind();
never@2481 117 _records[_sweep_index].state = nm->_state;
never@2481 118 _records[_sweep_index].vep = nm->verified_entry_point();
never@2481 119 _records[_sweep_index].uep = nm->entry_point();
never@2481 120 _records[_sweep_index].line = line;
never@2481 121
never@2481 122 _sweep_index = (_sweep_index + 1) % SweeperLogEntries;
never@2481 123 }
never@2481 124 }
never@2481 125 #else
never@2481 126 #define SWEEP(nm)
never@2481 127 #endif
never@2481 128
never@2481 129
duke@0 130 long NMethodSweeper::_traversals = 0; // No. of stack traversals performed
never@1535 131 nmethod* NMethodSweeper::_current = NULL; // Current nmethod
never@1564 132 int NMethodSweeper::_seen = 0 ; // No. of nmethods we have currently processed in current pass of CodeCache
never@1564 133
never@1564 134 volatile int NMethodSweeper::_invocations = 0; // No. of invocations left until we are completed with this pass
never@1564 135 volatile int NMethodSweeper::_sweep_started = 0; // Whether a sweep is in progress.
duke@0 136
duke@0 137 jint NMethodSweeper::_locked_seen = 0;
duke@0 138 jint NMethodSweeper::_not_entrant_seen_on_stack = 0;
duke@0 139 bool NMethodSweeper::_rescan = false;
never@1458 140 bool NMethodSweeper::_do_sweep = false;
kvn@1202 141 bool NMethodSweeper::_was_full = false;
kvn@1202 142 jint NMethodSweeper::_advise_to_sweep = 0;
kvn@1202 143 jlong NMethodSweeper::_last_was_full = 0;
kvn@1202 144 uint NMethodSweeper::_highest_marked = 0;
kvn@1202 145 long NMethodSweeper::_was_full_traversal = 0;
duke@0 146
jrose@989 147 class MarkActivationClosure: public CodeBlobClosure {
jrose@989 148 public:
jrose@989 149 virtual void do_code_blob(CodeBlob* cb) {
jrose@989 150 // If we see an activation belonging to a non_entrant nmethod, we mark it.
jrose@989 151 if (cb->is_nmethod() && ((nmethod*)cb)->is_not_entrant()) {
jrose@989 152 ((nmethod*)cb)->mark_as_seen_on_stack();
jrose@989 153 }
jrose@989 154 }
jrose@989 155 };
jrose@989 156 static MarkActivationClosure mark_activation_closure;
jrose@989 157
never@1458 158 void NMethodSweeper::scan_stacks() {
duke@0 159 assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
duke@0 160 if (!MethodFlushing) return;
never@1458 161 _do_sweep = true;
duke@0 162
duke@0 163 // No need to synchronize access, since this is always executed at a
duke@0 164 // safepoint. If we aren't in the middle of scan and a rescan
never@1458 165 // hasn't been requested then just return. If UseCodeCacheFlushing is on and
never@1458 166 // code cache flushing is in progress, don't skip sweeping to help make progress
never@1458 167 // clearing space in the code cache.
never@1458 168 if ((_current == NULL && !_rescan) && !(UseCodeCacheFlushing && !CompileBroker::should_compile_new_jobs())) {
never@1458 169 _do_sweep = false;
never@1458 170 return;
never@1458 171 }
duke@0 172
duke@0 173 // Make sure CompiledIC_lock in unlocked, since we might update some
duke@0 174 // inline caches. If it is, we just bail-out and try later.
duke@0 175 if (CompiledIC_lock->is_locked() || Patching_lock->is_locked()) return;
duke@0 176
duke@0 177 // Check for restart
duke@0 178 assert(CodeCache::find_blob_unsafe(_current) == _current, "Sweeper nmethod cached state invalid");
duke@0 179 if (_current == NULL) {
duke@0 180 _seen = 0;
duke@0 181 _invocations = NmethodSweepFraction;
never@1458 182 _current = CodeCache::first_nmethod();
duke@0 183 _traversals += 1;
duke@0 184 if (PrintMethodFlushing) {
duke@0 185 tty->print_cr("### Sweep: stack traversal %d", _traversals);
duke@0 186 }
jrose@989 187 Threads::nmethods_do(&mark_activation_closure);
duke@0 188
duke@0 189 // reset the flags since we started a scan from the beginning.
duke@0 190 _rescan = false;
duke@0 191 _locked_seen = 0;
duke@0 192 _not_entrant_seen_on_stack = 0;
duke@0 193 }
duke@0 194
kvn@1202 195 if (UseCodeCacheFlushing) {
kvn@1202 196 if (!CodeCache::needs_flushing()) {
never@1458 197 // scan_stacks() runs during a safepoint, no race with setters
kvn@1202 198 _advise_to_sweep = 0;
kvn@1202 199 }
kvn@1202 200
kvn@1202 201 if (was_full()) {
kvn@1202 202 // There was some progress so attempt to restart the compiler
kvn@1202 203 jlong now = os::javaTimeMillis();
kvn@1202 204 jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
kvn@1202 205 jlong curr_interval = now - _last_was_full;
kvn@1202 206 if ((!CodeCache::needs_flushing()) && (curr_interval > max_interval)) {
kvn@1202 207 CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
kvn@1202 208 set_was_full(false);
kvn@1202 209
kvn@1202 210 // Update the _last_was_full time so we can tell how fast the
kvn@1202 211 // code cache is filling up
kvn@1202 212 _last_was_full = os::javaTimeMillis();
kvn@1202 213
never@1564 214 log_sweep("restart_compiler");
kvn@1202 215 }
kvn@1202 216 }
kvn@1202 217 }
duke@0 218 }
duke@0 219
never@1458 220 void NMethodSweeper::possibly_sweep() {
never@1564 221 assert(JavaThread::current()->thread_state() == _thread_in_vm, "must run in vm mode");
never@1458 222 if ((!MethodFlushing) || (!_do_sweep)) return;
never@1458 223
never@1458 224 if (_invocations > 0) {
never@1458 225 // Only one thread at a time will sweep
never@1458 226 jint old = Atomic::cmpxchg( 1, &_sweep_started, 0 );
never@1458 227 if (old != 0) {
never@1458 228 return;
never@1458 229 }
never@2481 230 #ifdef ASSERT
never@2481 231 if (LogSweeper && _records == NULL) {
never@2481 232 // Create the ring buffer for the logging code
zgu@3465 233 _records = NEW_C_HEAP_ARRAY(SweeperRecord, SweeperLogEntries, mtGC);
never@2481 234 memset(_records, 0, sizeof(SweeperRecord) * SweeperLogEntries);
never@2481 235 }
never@2481 236 #endif
never@1564 237 if (_invocations > 0) {
never@1564 238 sweep_code_cache();
never@1564 239 _invocations--;
never@1564 240 }
never@1564 241 _sweep_started = 0;
never@1458 242 }
never@1458 243 }
never@1458 244
never@1458 245 void NMethodSweeper::sweep_code_cache() {
never@1458 246 #ifdef ASSERT
never@1458 247 jlong sweep_start;
never@1564 248 if (PrintMethodFlushing) {
never@1458 249 sweep_start = os::javaTimeMillis();
never@1458 250 }
never@1458 251 #endif
never@1458 252 if (PrintMethodFlushing && Verbose) {
never@1564 253 tty->print_cr("### Sweep at %d out of %d. Invocations left: %d", _seen, CodeCache::nof_nmethods(), _invocations);
never@1458 254 }
never@1458 255
never@1564 256 // We want to visit all nmethods after NmethodSweepFraction
never@1564 257 // invocations so divide the remaining number of nmethods by the
never@1564 258 // remaining number of invocations. This is only an estimate since
never@1564 259 // the number of nmethods changes during the sweep so the final
never@1564 260 // stage must iterate until it there are no more nmethods.
never@1564 261 int todo = (CodeCache::nof_nmethods() - _seen) / _invocations;
never@1458 262
never@1458 263 assert(!SafepointSynchronize::is_at_safepoint(), "should not be in safepoint when we get here");
never@1458 264 assert(!CodeCache_lock->owned_by_self(), "just checking");
never@1458 265
never@1458 266 {
never@1458 267 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
never@1458 268
never@1564 269 // The last invocation iterates until there are no more nmethods
never@1564 270 for (int i = 0; (i < todo || _invocations == 1) && _current != NULL; i++) {
iveresov@3137 271 if (SafepointSynchronize::is_synchronizing()) { // Safepoint request
iveresov@3137 272 if (PrintMethodFlushing && Verbose) {
iveresov@3137 273 tty->print_cr("### Sweep at %d out of %d, invocation: %d, yielding to safepoint", _seen, CodeCache::nof_nmethods(), _invocations);
iveresov@3137 274 }
iveresov@3137 275 MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
never@1458 276
iveresov@3137 277 assert(Thread::current()->is_Java_thread(), "should be java thread");
iveresov@3137 278 JavaThread* thread = (JavaThread*)Thread::current();
iveresov@3137 279 ThreadBlockInVM tbivm(thread);
iveresov@3137 280 thread->java_suspend_self();
iveresov@3137 281 }
never@1564 282 // Since we will give up the CodeCache_lock, always skip ahead
never@1564 283 // to the next nmethod. Other blobs can be deleted by other
never@1564 284 // threads but nmethods are only reclaimed by the sweeper.
never@1535 285 nmethod* next = CodeCache::next_nmethod(_current);
never@1458 286
never@1458 287 // Now ready to process nmethod and give up CodeCache_lock
never@1458 288 {
never@1458 289 MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
never@1535 290 process_nmethod(_current);
never@1458 291 }
never@1458 292 _seen++;
never@1458 293 _current = next;
never@1458 294 }
never@1458 295 }
never@1458 296
never@1564 297 assert(_invocations > 1 || _current == NULL, "must have scanned the whole cache");
never@1564 298
never@1458 299 if (_current == NULL && !_rescan && (_locked_seen || _not_entrant_seen_on_stack)) {
never@1458 300 // we've completed a scan without making progress but there were
never@1458 301 // nmethods we were unable to process either because they were
never@1458 302 // locked or were still on stack. We don't have to aggresively
never@1458 303 // clean them up so just stop scanning. We could scan once more
never@1458 304 // but that complicates the control logic and it's unlikely to
never@1458 305 // matter much.
never@1458 306 if (PrintMethodFlushing) {
never@1458 307 tty->print_cr("### Couldn't make progress on some nmethods so stopping sweep");
never@1458 308 }
never@1458 309 }
never@1458 310
never@1458 311 #ifdef ASSERT
never@1458 312 if(PrintMethodFlushing) {
never@1458 313 jlong sweep_end = os::javaTimeMillis();
never@1458 314 tty->print_cr("### sweeper: sweep time(%d): " INT64_FORMAT, _invocations, sweep_end - sweep_start);
never@1458 315 }
never@1458 316 #endif
never@1564 317
never@1564 318 if (_invocations == 1) {
never@1564 319 log_sweep("finished");
never@1564 320 }
never@1458 321 }
never@1458 322
never@2481 323 class NMethodMarker: public StackObj {
never@2481 324 private:
never@2481 325 CompilerThread* _thread;
never@2481 326 public:
never@2481 327 NMethodMarker(nmethod* nm) {
never@2481 328 _thread = CompilerThread::current();
coleenp@3602 329 if (!nm->is_zombie() && !nm->is_unloaded()) {
coleenp@3602 330 // Only expose live nmethods for scanning
never@2481 331 _thread->set_scanned_nmethod(nm);
never@2481 332 }
coleenp@3602 333 }
never@2481 334 ~NMethodMarker() {
never@2481 335 _thread->set_scanned_nmethod(NULL);
never@2481 336 }
never@2481 337 };
never@2481 338
coleenp@3602 339 void NMethodSweeper::release_nmethod(nmethod *nm) {
coleenp@3602 340 // Clean up any CompiledICHolders
coleenp@3602 341 {
coleenp@3602 342 ResourceMark rm;
coleenp@3602 343 MutexLocker ml_patch(CompiledIC_lock);
coleenp@3602 344 RelocIterator iter(nm);
coleenp@3602 345 while (iter.next()) {
coleenp@3602 346 if (iter.type() == relocInfo::virtual_call_type) {
coleenp@3602 347 CompiledIC::cleanup_call_site(iter.virtual_call_reloc());
coleenp@3602 348 }
coleenp@3602 349 }
coleenp@3602 350 }
coleenp@3602 351
coleenp@3602 352 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
coleenp@3602 353 nm->flush();
coleenp@3602 354 }
duke@0 355
duke@0 356 void NMethodSweeper::process_nmethod(nmethod *nm) {
never@1458 357 assert(!CodeCache_lock->owned_by_self(), "just checking");
never@1458 358
never@2481 359 // Make sure this nmethod doesn't get unloaded during the scan,
never@2481 360 // since the locks acquired below might safepoint.
never@2481 361 NMethodMarker nmm(nm);
never@2481 362
never@2481 363 SWEEP(nm);
never@2481 364
duke@0 365 // Skip methods that are currently referenced by the VM
duke@0 366 if (nm->is_locked_by_vm()) {
duke@0 367 // But still remember to clean-up inline caches for alive nmethods
duke@0 368 if (nm->is_alive()) {
duke@0 369 // Clean-up all inline caches that points to zombie/non-reentrant methods
never@1458 370 MutexLocker cl(CompiledIC_lock);
duke@0 371 nm->cleanup_inline_caches();
never@2481 372 SWEEP(nm);
duke@0 373 } else {
duke@0 374 _locked_seen++;
never@2481 375 SWEEP(nm);
duke@0 376 }
duke@0 377 return;
duke@0 378 }
duke@0 379
duke@0 380 if (nm->is_zombie()) {
duke@0 381 // If it is first time, we see nmethod then we mark it. Otherwise,
duke@0 382 // we reclame it. When we have seen a zombie method twice, we know that
never@1564 383 // there are no inline caches that refer to it.
duke@0 384 if (nm->is_marked_for_reclamation()) {
duke@0 385 assert(!nm->is_locked_by_vm(), "must not flush locked nmethods");
ysr@941 386 if (PrintMethodFlushing && Verbose) {
kvn@1202 387 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (marked for reclamation) being flushed", nm->compile_id(), nm);
ysr@941 388 }
coleenp@3602 389 release_nmethod(nm);
duke@0 390 } else {
ysr@941 391 if (PrintMethodFlushing && Verbose) {
kvn@1202 392 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (zombie) being marked for reclamation", nm->compile_id(), nm);
ysr@941 393 }
duke@0 394 nm->mark_for_reclamation();
duke@0 395 _rescan = true;
never@2481 396 SWEEP(nm);
duke@0 397 }
duke@0 398 } else if (nm->is_not_entrant()) {
duke@0 399 // If there is no current activations of this method on the
duke@0 400 // stack we can safely convert it to a zombie method
duke@0 401 if (nm->can_not_entrant_be_converted()) {
ysr@941 402 if (PrintMethodFlushing && Verbose) {
kvn@1202 403 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (not entrant) being made zombie", nm->compile_id(), nm);
ysr@941 404 }
duke@0 405 nm->make_zombie();
duke@0 406 _rescan = true;
never@2481 407 SWEEP(nm);
duke@0 408 } else {
duke@0 409 // Still alive, clean up its inline caches
never@1458 410 MutexLocker cl(CompiledIC_lock);
duke@0 411 nm->cleanup_inline_caches();
duke@0 412 // we coudn't transition this nmethod so don't immediately
duke@0 413 // request a rescan. If this method stays on the stack for a
never@1458 414 // long time we don't want to keep rescanning the code cache.
duke@0 415 _not_entrant_seen_on_stack++;
never@2481 416 SWEEP(nm);
duke@0 417 }
duke@0 418 } else if (nm->is_unloaded()) {
duke@0 419 // Unloaded code, just make it a zombie
ysr@941 420 if (PrintMethodFlushing && Verbose)
kvn@1202 421 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (unloaded) being made zombie", nm->compile_id(), nm);
ysr@941 422 if (nm->is_osr_method()) {
coleenp@3602 423 SWEEP(nm);
duke@0 424 // No inline caches will ever point to osr methods, so we can just remove it
coleenp@3602 425 release_nmethod(nm);
duke@0 426 } else {
duke@0 427 nm->make_zombie();
duke@0 428 _rescan = true;
never@2481 429 SWEEP(nm);
duke@0 430 }
duke@0 431 } else {
duke@0 432 assert(nm->is_alive(), "should be alive");
kvn@1202 433
kvn@1202 434 if (UseCodeCacheFlushing) {
kvn@1202 435 if ((nm->method()->code() != nm) && !(nm->is_locked_by_vm()) && !(nm->is_osr_method()) &&
kvn@1202 436 (_traversals > _was_full_traversal+2) && (((uint)nm->compile_id()) < _highest_marked) &&
kvn@1202 437 CodeCache::needs_flushing()) {
kvn@1202 438 // This method has not been called since the forced cleanup happened
kvn@1202 439 nm->make_not_entrant();
kvn@1202 440 }
kvn@1202 441 }
kvn@1202 442
duke@0 443 // Clean-up all inline caches that points to zombie/non-reentrant methods
never@1458 444 MutexLocker cl(CompiledIC_lock);
duke@0 445 nm->cleanup_inline_caches();
never@2481 446 SWEEP(nm);
duke@0 447 }
duke@0 448 }
kvn@1202 449
kvn@1202 450 // Code cache unloading: when compilers notice the code cache is getting full,
kvn@1202 451 // they will call a vm op that comes here. This code attempts to speculatively
kvn@1202 452 // unload the oldest half of the nmethods (based on the compile job id) by
kvn@1202 453 // saving the old code in a list in the CodeCache. Then
never@1458 454 // execution resumes. If a method so marked is not called by the second sweeper
never@1458 455 // stack traversal after the current one, the nmethod will be marked non-entrant and
coleenp@3602 456 // got rid of by normal sweeping. If the method is called, the Method*'s
coleenp@3602 457 // _code field is restored and the Method*/nmethod
kvn@1202 458 // go back to their normal state.
kvn@1202 459 void NMethodSweeper::handle_full_code_cache(bool is_full) {
kvn@1202 460 // Only the first one to notice can advise us to start early cleaning
kvn@1202 461 if (!is_full){
kvn@1202 462 jint old = Atomic::cmpxchg( 1, &_advise_to_sweep, 0 );
kvn@1202 463 if (old != 0) {
kvn@1202 464 return;
kvn@1202 465 }
kvn@1202 466 }
kvn@1202 467
kvn@1202 468 if (is_full) {
kvn@1202 469 // Since code cache is full, immediately stop new compiles
kvn@1202 470 bool did_set = CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
kvn@1202 471 if (!did_set) {
kvn@1202 472 // only the first to notice can start the cleaning,
kvn@1202 473 // others will go back and block
kvn@1202 474 return;
kvn@1202 475 }
kvn@1202 476 set_was_full(true);
kvn@1202 477
kvn@1202 478 // If we run out within MinCodeCacheFlushingInterval of the last unload time, give up
kvn@1202 479 jlong now = os::javaTimeMillis();
kvn@1202 480 jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
kvn@1202 481 jlong curr_interval = now - _last_was_full;
kvn@1202 482 if (curr_interval < max_interval) {
kvn@1202 483 _rescan = true;
never@1564 484 log_sweep("disable_compiler", "flushing_interval='" UINT64_FORMAT "'",
never@1564 485 curr_interval/1000);
kvn@1202 486 return;
kvn@1202 487 }
kvn@1202 488 }
kvn@1202 489
kvn@1202 490 VM_HandleFullCodeCache op(is_full);
kvn@1202 491 VMThread::execute(&op);
kvn@1202 492
kvn@1202 493 // rescan again as soon as possible
kvn@1202 494 _rescan = true;
kvn@1202 495 }
kvn@1202 496
kvn@1202 497 void NMethodSweeper::speculative_disconnect_nmethods(bool is_full) {
kvn@1202 498 // If there was a race in detecting full code cache, only run
kvn@1202 499 // one vm op for it or keep the compiler shut off
kvn@1202 500
kvn@1202 501 debug_only(jlong start = os::javaTimeMillis();)
kvn@1202 502
kvn@1202 503 if ((!was_full()) && (is_full)) {
kvn@1202 504 if (!CodeCache::needs_flushing()) {
never@1564 505 log_sweep("restart_compiler");
kvn@1202 506 CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
kvn@1202 507 return;
kvn@1202 508 }
kvn@1202 509 }
kvn@1202 510
kvn@1202 511 // Traverse the code cache trying to dump the oldest nmethods
kvn@1202 512 uint curr_max_comp_id = CompileBroker::get_compilation_id();
kvn@1202 513 uint flush_target = ((curr_max_comp_id - _highest_marked) >> 1) + _highest_marked;
never@1564 514 log_sweep("start_cleaning");
kvn@1202 515
kvn@1202 516 nmethod* nm = CodeCache::alive_nmethod(CodeCache::first());
kvn@1202 517 jint disconnected = 0;
kvn@1202 518 jint made_not_entrant = 0;
kvn@1202 519 while ((nm != NULL)){
kvn@1202 520 uint curr_comp_id = nm->compile_id();
kvn@1202 521
kvn@1202 522 // OSR methods cannot be flushed like this. Also, don't flush native methods
kvn@1202 523 // since they are part of the JDK in most cases
kvn@1202 524 if (nm->is_in_use() && (!nm->is_osr_method()) && (!nm->is_locked_by_vm()) &&
kvn@1202 525 (!nm->is_native_method()) && ((curr_comp_id < flush_target))) {
kvn@1202 526
kvn@1202 527 if ((nm->method()->code() == nm)) {
kvn@1202 528 // This method has not been previously considered for
kvn@1202 529 // unloading or it was restored already
kvn@1202 530 CodeCache::speculatively_disconnect(nm);
kvn@1202 531 disconnected++;
kvn@1202 532 } else if (nm->is_speculatively_disconnected()) {
kvn@1202 533 // This method was previously considered for preemptive unloading and was not called since then
iveresov@1703 534 CompilationPolicy::policy()->delay_compilation(nm->method());
kvn@1202 535 nm->make_not_entrant();
kvn@1202 536 made_not_entrant++;
kvn@1202 537 }
kvn@1202 538
kvn@1202 539 if (curr_comp_id > _highest_marked) {
kvn@1202 540 _highest_marked = curr_comp_id;
kvn@1202 541 }
kvn@1202 542 }
kvn@1202 543 nm = CodeCache::alive_nmethod(CodeCache::next(nm));
kvn@1202 544 }
kvn@1202 545
never@1564 546 log_sweep("stop_cleaning",
never@1564 547 "disconnected='" UINT32_FORMAT "' made_not_entrant='" UINT32_FORMAT "'",
never@1564 548 disconnected, made_not_entrant);
kvn@1202 549
never@1458 550 // Shut off compiler. Sweeper will start over with a new stack scan and
never@1458 551 // traversal cycle and turn it back on if it clears enough space.
kvn@1202 552 if (was_full()) {
kvn@1202 553 _last_was_full = os::javaTimeMillis();
kvn@1202 554 CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
kvn@1202 555 }
kvn@1202 556
kvn@1202 557 // After two more traversals the sweeper will get rid of unrestored nmethods
kvn@1202 558 _was_full_traversal = _traversals;
kvn@1202 559 #ifdef ASSERT
kvn@1202 560 jlong end = os::javaTimeMillis();
kvn@1202 561 if(PrintMethodFlushing && Verbose) {
kvn@1202 562 tty->print_cr("### sweeper: unload time: " INT64_FORMAT, end-start);
kvn@1202 563 }
kvn@1202 564 #endif
kvn@1202 565 }
never@1564 566
never@1564 567
never@1564 568 // Print out some state information about the current sweep and the
never@1564 569 // state of the code cache if it's requested.
never@1564 570 void NMethodSweeper::log_sweep(const char* msg, const char* format, ...) {
never@1564 571 if (PrintMethodFlushing) {
iveresov@2329 572 stringStream s;
iveresov@2329 573 // Dump code cache state into a buffer before locking the tty,
iveresov@2329 574 // because log_state() will use locks causing lock conflicts.
iveresov@2329 575 CodeCache::log_state(&s);
iveresov@2329 576
never@1564 577 ttyLocker ttyl;
never@1564 578 tty->print("### sweeper: %s ", msg);
never@1564 579 if (format != NULL) {
never@1564 580 va_list ap;
never@1564 581 va_start(ap, format);
never@1564 582 tty->vprint(format, ap);
never@1564 583 va_end(ap);
never@1564 584 }
iveresov@2329 585 tty->print_cr(s.as_string());
never@1564 586 }
never@1564 587
never@1564 588 if (LogCompilation && (xtty != NULL)) {
iveresov@2329 589 stringStream s;
iveresov@2329 590 // Dump code cache state into a buffer before locking the tty,
iveresov@2329 591 // because log_state() will use locks causing lock conflicts.
iveresov@2329 592 CodeCache::log_state(&s);
iveresov@2329 593
never@1564 594 ttyLocker ttyl;
never@1566 595 xtty->begin_elem("sweeper state='%s' traversals='" INTX_FORMAT "' ", msg, (intx)traversal_count());
never@1564 596 if (format != NULL) {
never@1564 597 va_list ap;
never@1564 598 va_start(ap, format);
never@1564 599 xtty->vprint(format, ap);
never@1564 600 va_end(ap);
never@1564 601 }
iveresov@2329 602 xtty->print(s.as_string());
never@1564 603 xtty->stamp();
never@1564 604 xtty->end_elem();
never@1564 605 }
never@1564 606 }