annotate src/share/vm/runtime/sweeper.cpp @ 1458:bfe29ec02863

6950075: nmethod sweeper should operate concurrently Reviewed-by: never, kvn Contributed-by: eric.caspole@amd.com
author never
date Mon, 17 May 2010 16:50:07 -0700
parents 5f24d0319e54
children c18cbe5936b8
rev   line source
duke@0 1 /*
xdono@948 2 * Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved.
duke@0 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@0 4 *
duke@0 5 * This code is free software; you can redistribute it and/or modify it
duke@0 6 * under the terms of the GNU General Public License version 2 only, as
duke@0 7 * published by the Free Software Foundation.
duke@0 8 *
duke@0 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@0 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@0 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@0 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@0 13 * accompanied this code).
duke@0 14 *
duke@0 15 * You should have received a copy of the GNU General Public License version
duke@0 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@0 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@0 18 *
duke@0 19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
duke@0 20 * CA 95054 USA or visit www.sun.com if you need additional information or
duke@0 21 * have any questions.
duke@0 22 *
duke@0 23 */
duke@0 24
duke@0 25 # include "incls/_precompiled.incl"
duke@0 26 # include "incls/_sweeper.cpp.incl"
duke@0 27
duke@0 28 long NMethodSweeper::_traversals = 0; // No. of stack traversals performed
duke@0 29 CodeBlob* NMethodSweeper::_current = NULL; // Current nmethod
duke@0 30 int NMethodSweeper::_seen = 0 ; // No. of blobs we have currently processed in current pass of CodeCache
duke@0 31 int NMethodSweeper::_invocations = 0; // No. of invocations left until we are completed with this pass
duke@0 32
duke@0 33 jint NMethodSweeper::_locked_seen = 0;
duke@0 34 jint NMethodSweeper::_not_entrant_seen_on_stack = 0;
duke@0 35 bool NMethodSweeper::_rescan = false;
never@1458 36 bool NMethodSweeper::_do_sweep = false;
never@1458 37 jint NMethodSweeper::_sweep_started = 0;
kvn@1202 38 bool NMethodSweeper::_was_full = false;
kvn@1202 39 jint NMethodSweeper::_advise_to_sweep = 0;
kvn@1202 40 jlong NMethodSweeper::_last_was_full = 0;
kvn@1202 41 uint NMethodSweeper::_highest_marked = 0;
kvn@1202 42 long NMethodSweeper::_was_full_traversal = 0;
duke@0 43
jrose@989 44 class MarkActivationClosure: public CodeBlobClosure {
jrose@989 45 public:
jrose@989 46 virtual void do_code_blob(CodeBlob* cb) {
jrose@989 47 // If we see an activation belonging to a non_entrant nmethod, we mark it.
jrose@989 48 if (cb->is_nmethod() && ((nmethod*)cb)->is_not_entrant()) {
jrose@989 49 ((nmethod*)cb)->mark_as_seen_on_stack();
jrose@989 50 }
jrose@989 51 }
jrose@989 52 };
jrose@989 53 static MarkActivationClosure mark_activation_closure;
jrose@989 54
never@1458 55 void NMethodSweeper::scan_stacks() {
duke@0 56 assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
duke@0 57 if (!MethodFlushing) return;
never@1458 58 _do_sweep = true;
duke@0 59
duke@0 60 // No need to synchronize access, since this is always executed at a
duke@0 61 // safepoint. If we aren't in the middle of scan and a rescan
never@1458 62 // hasn't been requested then just return. If UseCodeCacheFlushing is on and
never@1458 63 // code cache flushing is in progress, don't skip sweeping to help make progress
never@1458 64 // clearing space in the code cache.
never@1458 65 if ((_current == NULL && !_rescan) && !(UseCodeCacheFlushing && !CompileBroker::should_compile_new_jobs())) {
never@1458 66 _do_sweep = false;
never@1458 67 return;
never@1458 68 }
duke@0 69
duke@0 70 // Make sure CompiledIC_lock in unlocked, since we might update some
duke@0 71 // inline caches. If it is, we just bail-out and try later.
duke@0 72 if (CompiledIC_lock->is_locked() || Patching_lock->is_locked()) return;
duke@0 73
duke@0 74 // Check for restart
duke@0 75 assert(CodeCache::find_blob_unsafe(_current) == _current, "Sweeper nmethod cached state invalid");
duke@0 76 if (_current == NULL) {
duke@0 77 _seen = 0;
duke@0 78 _invocations = NmethodSweepFraction;
never@1458 79 _current = CodeCache::first_nmethod();
duke@0 80 _traversals += 1;
duke@0 81 if (PrintMethodFlushing) {
duke@0 82 tty->print_cr("### Sweep: stack traversal %d", _traversals);
duke@0 83 }
jrose@989 84 Threads::nmethods_do(&mark_activation_closure);
duke@0 85
duke@0 86 // reset the flags since we started a scan from the beginning.
duke@0 87 _rescan = false;
duke@0 88 _locked_seen = 0;
duke@0 89 _not_entrant_seen_on_stack = 0;
duke@0 90 }
duke@0 91
kvn@1202 92 if (UseCodeCacheFlushing) {
kvn@1202 93 if (!CodeCache::needs_flushing()) {
never@1458 94 // scan_stacks() runs during a safepoint, no race with setters
kvn@1202 95 _advise_to_sweep = 0;
kvn@1202 96 }
kvn@1202 97
kvn@1202 98 if (was_full()) {
kvn@1202 99 // There was some progress so attempt to restart the compiler
kvn@1202 100 jlong now = os::javaTimeMillis();
kvn@1202 101 jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
kvn@1202 102 jlong curr_interval = now - _last_was_full;
kvn@1202 103 if ((!CodeCache::needs_flushing()) && (curr_interval > max_interval)) {
kvn@1202 104 CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
kvn@1202 105 set_was_full(false);
kvn@1202 106
kvn@1202 107 // Update the _last_was_full time so we can tell how fast the
kvn@1202 108 // code cache is filling up
kvn@1202 109 _last_was_full = os::javaTimeMillis();
kvn@1202 110
kvn@1202 111 if (PrintMethodFlushing) {
kvn@1202 112 tty->print_cr("### sweeper: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes, restarting compiler",
kvn@1202 113 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1202 114 }
kvn@1202 115 if (LogCompilation && (xtty != NULL)) {
kvn@1202 116 ttyLocker ttyl;
kvn@1202 117 xtty->begin_elem("restart_compiler live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1202 118 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1202 119 xtty->stamp();
kvn@1202 120 xtty->end_elem();
kvn@1202 121 }
kvn@1202 122 }
kvn@1202 123 }
kvn@1202 124 }
duke@0 125 }
duke@0 126
never@1458 127 void NMethodSweeper::possibly_sweep() {
never@1458 128 if ((!MethodFlushing) || (!_do_sweep)) return;
never@1458 129
never@1458 130 if (_invocations > 0) {
never@1458 131 // Only one thread at a time will sweep
never@1458 132 jint old = Atomic::cmpxchg( 1, &_sweep_started, 0 );
never@1458 133 if (old != 0) {
never@1458 134 return;
never@1458 135 }
never@1458 136 sweep_code_cache();
never@1458 137 }
never@1458 138 _sweep_started = 0;
never@1458 139 }
never@1458 140
never@1458 141 void NMethodSweeper::sweep_code_cache() {
never@1458 142 #ifdef ASSERT
never@1458 143 jlong sweep_start;
never@1458 144 if(PrintMethodFlushing) {
never@1458 145 sweep_start = os::javaTimeMillis();
never@1458 146 }
never@1458 147 #endif
never@1458 148 if (PrintMethodFlushing && Verbose) {
never@1458 149 tty->print_cr("### Sweep at %d out of %d. Invocations left: %d", _seen, CodeCache::nof_blobs(), _invocations);
never@1458 150 }
never@1458 151
never@1458 152 // We want to visit all nmethods after NmethodSweepFraction invocations.
never@1458 153 // If invocation is 1 we do the rest
never@1458 154 int todo = CodeCache::nof_blobs();
never@1458 155 if (_invocations > 1) {
never@1458 156 todo = (CodeCache::nof_blobs() - _seen) / _invocations;
never@1458 157 }
never@1458 158
never@1458 159 // Compilers may check to sweep more often than stack scans happen,
never@1458 160 // don't keep trying once it is all scanned
never@1458 161 _invocations--;
never@1458 162
never@1458 163 assert(!SafepointSynchronize::is_at_safepoint(), "should not be in safepoint when we get here");
never@1458 164 assert(!CodeCache_lock->owned_by_self(), "just checking");
never@1458 165
never@1458 166 {
never@1458 167 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
never@1458 168
never@1458 169 for(int i = 0; i < todo && _current != NULL; i++) {
never@1458 170
never@1458 171 // Since we will give up the CodeCache_lock, always skip ahead to an nmethod.
never@1458 172 // Other blobs can be deleted by other threads
never@1458 173 // Read next before we potentially delete current
never@1458 174 CodeBlob* next = CodeCache::next_nmethod(_current);
never@1458 175
never@1458 176 // Now ready to process nmethod and give up CodeCache_lock
never@1458 177 {
never@1458 178 MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
never@1458 179 process_nmethod((nmethod *)_current);
never@1458 180 }
never@1458 181 _seen++;
never@1458 182 _current = next;
never@1458 183 }
never@1458 184
never@1458 185 // Skip forward to the next nmethod (if any). Code blobs other than nmethods
never@1458 186 // can be freed async to us and make _current invalid while we sleep.
never@1458 187 _current = CodeCache::next_nmethod(_current);
never@1458 188 }
never@1458 189
never@1458 190 if (_current == NULL && !_rescan && (_locked_seen || _not_entrant_seen_on_stack)) {
never@1458 191 // we've completed a scan without making progress but there were
never@1458 192 // nmethods we were unable to process either because they were
never@1458 193 // locked or were still on stack. We don't have to aggresively
never@1458 194 // clean them up so just stop scanning. We could scan once more
never@1458 195 // but that complicates the control logic and it's unlikely to
never@1458 196 // matter much.
never@1458 197 if (PrintMethodFlushing) {
never@1458 198 tty->print_cr("### Couldn't make progress on some nmethods so stopping sweep");
never@1458 199 }
never@1458 200 }
never@1458 201
never@1458 202 #ifdef ASSERT
never@1458 203 if(PrintMethodFlushing) {
never@1458 204 jlong sweep_end = os::javaTimeMillis();
never@1458 205 tty->print_cr("### sweeper: sweep time(%d): " INT64_FORMAT, _invocations, sweep_end - sweep_start);
never@1458 206 }
never@1458 207 #endif
never@1458 208 }
never@1458 209
duke@0 210
duke@0 211 void NMethodSweeper::process_nmethod(nmethod *nm) {
never@1458 212 assert(!CodeCache_lock->owned_by_self(), "just checking");
never@1458 213
duke@0 214 // Skip methods that are currently referenced by the VM
duke@0 215 if (nm->is_locked_by_vm()) {
duke@0 216 // But still remember to clean-up inline caches for alive nmethods
duke@0 217 if (nm->is_alive()) {
duke@0 218 // Clean-up all inline caches that points to zombie/non-reentrant methods
never@1458 219 MutexLocker cl(CompiledIC_lock);
duke@0 220 nm->cleanup_inline_caches();
duke@0 221 } else {
duke@0 222 _locked_seen++;
duke@0 223 }
duke@0 224 return;
duke@0 225 }
duke@0 226
duke@0 227 if (nm->is_zombie()) {
duke@0 228 // If it is first time, we see nmethod then we mark it. Otherwise,
duke@0 229 // we reclame it. When we have seen a zombie method twice, we know that
duke@0 230 // there are no inline caches that referes to it.
duke@0 231 if (nm->is_marked_for_reclamation()) {
duke@0 232 assert(!nm->is_locked_by_vm(), "must not flush locked nmethods");
ysr@941 233 if (PrintMethodFlushing && Verbose) {
kvn@1202 234 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (marked for reclamation) being flushed", nm->compile_id(), nm);
ysr@941 235 }
never@1458 236 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
duke@0 237 nm->flush();
duke@0 238 } else {
ysr@941 239 if (PrintMethodFlushing && Verbose) {
kvn@1202 240 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (zombie) being marked for reclamation", nm->compile_id(), nm);
ysr@941 241 }
duke@0 242 nm->mark_for_reclamation();
duke@0 243 _rescan = true;
duke@0 244 }
duke@0 245 } else if (nm->is_not_entrant()) {
duke@0 246 // If there is no current activations of this method on the
duke@0 247 // stack we can safely convert it to a zombie method
duke@0 248 if (nm->can_not_entrant_be_converted()) {
ysr@941 249 if (PrintMethodFlushing && Verbose) {
kvn@1202 250 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (not entrant) being made zombie", nm->compile_id(), nm);
ysr@941 251 }
duke@0 252 nm->make_zombie();
duke@0 253 _rescan = true;
duke@0 254 } else {
duke@0 255 // Still alive, clean up its inline caches
never@1458 256 MutexLocker cl(CompiledIC_lock);
duke@0 257 nm->cleanup_inline_caches();
duke@0 258 // we coudn't transition this nmethod so don't immediately
duke@0 259 // request a rescan. If this method stays on the stack for a
never@1458 260 // long time we don't want to keep rescanning the code cache.
duke@0 261 _not_entrant_seen_on_stack++;
duke@0 262 }
duke@0 263 } else if (nm->is_unloaded()) {
duke@0 264 // Unloaded code, just make it a zombie
ysr@941 265 if (PrintMethodFlushing && Verbose)
kvn@1202 266 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (unloaded) being made zombie", nm->compile_id(), nm);
ysr@941 267 if (nm->is_osr_method()) {
duke@0 268 // No inline caches will ever point to osr methods, so we can just remove it
never@1458 269 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
duke@0 270 nm->flush();
duke@0 271 } else {
duke@0 272 nm->make_zombie();
duke@0 273 _rescan = true;
duke@0 274 }
duke@0 275 } else {
duke@0 276 assert(nm->is_alive(), "should be alive");
kvn@1202 277
kvn@1202 278 if (UseCodeCacheFlushing) {
kvn@1202 279 if ((nm->method()->code() != nm) && !(nm->is_locked_by_vm()) && !(nm->is_osr_method()) &&
kvn@1202 280 (_traversals > _was_full_traversal+2) && (((uint)nm->compile_id()) < _highest_marked) &&
kvn@1202 281 CodeCache::needs_flushing()) {
kvn@1202 282 // This method has not been called since the forced cleanup happened
kvn@1202 283 nm->make_not_entrant();
kvn@1202 284 }
kvn@1202 285 }
kvn@1202 286
duke@0 287 // Clean-up all inline caches that points to zombie/non-reentrant methods
never@1458 288 MutexLocker cl(CompiledIC_lock);
duke@0 289 nm->cleanup_inline_caches();
duke@0 290 }
duke@0 291 }
kvn@1202 292
kvn@1202 293 // Code cache unloading: when compilers notice the code cache is getting full,
kvn@1202 294 // they will call a vm op that comes here. This code attempts to speculatively
kvn@1202 295 // unload the oldest half of the nmethods (based on the compile job id) by
kvn@1202 296 // saving the old code in a list in the CodeCache. Then
never@1458 297 // execution resumes. If a method so marked is not called by the second sweeper
never@1458 298 // stack traversal after the current one, the nmethod will be marked non-entrant and
kvn@1202 299 // got rid of by normal sweeping. If the method is called, the methodOop's
kvn@1202 300 // _code field is restored and the methodOop/nmethod
kvn@1202 301 // go back to their normal state.
kvn@1202 302 void NMethodSweeper::handle_full_code_cache(bool is_full) {
kvn@1202 303 // Only the first one to notice can advise us to start early cleaning
kvn@1202 304 if (!is_full){
kvn@1202 305 jint old = Atomic::cmpxchg( 1, &_advise_to_sweep, 0 );
kvn@1202 306 if (old != 0) {
kvn@1202 307 return;
kvn@1202 308 }
kvn@1202 309 }
kvn@1202 310
kvn@1202 311 if (is_full) {
kvn@1202 312 // Since code cache is full, immediately stop new compiles
kvn@1202 313 bool did_set = CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
kvn@1202 314 if (!did_set) {
kvn@1202 315 // only the first to notice can start the cleaning,
kvn@1202 316 // others will go back and block
kvn@1202 317 return;
kvn@1202 318 }
kvn@1202 319 set_was_full(true);
kvn@1202 320
kvn@1202 321 // If we run out within MinCodeCacheFlushingInterval of the last unload time, give up
kvn@1202 322 jlong now = os::javaTimeMillis();
kvn@1202 323 jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
kvn@1202 324 jlong curr_interval = now - _last_was_full;
kvn@1202 325 if (curr_interval < max_interval) {
kvn@1202 326 _rescan = true;
kvn@1202 327 if (PrintMethodFlushing) {
kvn@1202 328 tty->print_cr("### handle full too often, turning off compiler");
kvn@1202 329 }
kvn@1202 330 if (LogCompilation && (xtty != NULL)) {
kvn@1202 331 ttyLocker ttyl;
kvn@1202 332 xtty->begin_elem("disable_compiler flushing_interval='" UINT64_FORMAT "' live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1202 333 curr_interval/1000, CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1202 334 xtty->stamp();
kvn@1202 335 xtty->end_elem();
kvn@1202 336 }
kvn@1202 337 return;
kvn@1202 338 }
kvn@1202 339 }
kvn@1202 340
kvn@1202 341 VM_HandleFullCodeCache op(is_full);
kvn@1202 342 VMThread::execute(&op);
kvn@1202 343
kvn@1202 344 // rescan again as soon as possible
kvn@1202 345 _rescan = true;
kvn@1202 346 }
kvn@1202 347
kvn@1202 348 void NMethodSweeper::speculative_disconnect_nmethods(bool is_full) {
kvn@1202 349 // If there was a race in detecting full code cache, only run
kvn@1202 350 // one vm op for it or keep the compiler shut off
kvn@1202 351
kvn@1202 352 debug_only(jlong start = os::javaTimeMillis();)
kvn@1202 353
kvn@1202 354 if ((!was_full()) && (is_full)) {
kvn@1202 355 if (!CodeCache::needs_flushing()) {
kvn@1202 356 if (PrintMethodFlushing) {
kvn@1202 357 tty->print_cr("### sweeper: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes, restarting compiler",
kvn@1202 358 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1202 359 }
kvn@1202 360 if (LogCompilation && (xtty != NULL)) {
kvn@1202 361 ttyLocker ttyl;
kvn@1202 362 xtty->begin_elem("restart_compiler live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1202 363 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1202 364 xtty->stamp();
kvn@1202 365 xtty->end_elem();
kvn@1202 366 }
kvn@1202 367 CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
kvn@1202 368 return;
kvn@1202 369 }
kvn@1202 370 }
kvn@1202 371
kvn@1202 372 // Traverse the code cache trying to dump the oldest nmethods
kvn@1202 373 uint curr_max_comp_id = CompileBroker::get_compilation_id();
kvn@1202 374 uint flush_target = ((curr_max_comp_id - _highest_marked) >> 1) + _highest_marked;
kvn@1202 375 if (PrintMethodFlushing && Verbose) {
kvn@1202 376 tty->print_cr("### Cleaning code cache: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes",
kvn@1202 377 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1202 378 }
kvn@1202 379 if (LogCompilation && (xtty != NULL)) {
kvn@1202 380 ttyLocker ttyl;
kvn@1202 381 xtty->begin_elem("start_cleaning_code_cache live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1202 382 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1202 383 xtty->stamp();
kvn@1202 384 xtty->end_elem();
kvn@1202 385 }
kvn@1202 386
kvn@1202 387 nmethod* nm = CodeCache::alive_nmethod(CodeCache::first());
kvn@1202 388 jint disconnected = 0;
kvn@1202 389 jint made_not_entrant = 0;
kvn@1202 390 while ((nm != NULL)){
kvn@1202 391 uint curr_comp_id = nm->compile_id();
kvn@1202 392
kvn@1202 393 // OSR methods cannot be flushed like this. Also, don't flush native methods
kvn@1202 394 // since they are part of the JDK in most cases
kvn@1202 395 if (nm->is_in_use() && (!nm->is_osr_method()) && (!nm->is_locked_by_vm()) &&
kvn@1202 396 (!nm->is_native_method()) && ((curr_comp_id < flush_target))) {
kvn@1202 397
kvn@1202 398 if ((nm->method()->code() == nm)) {
kvn@1202 399 // This method has not been previously considered for
kvn@1202 400 // unloading or it was restored already
kvn@1202 401 CodeCache::speculatively_disconnect(nm);
kvn@1202 402 disconnected++;
kvn@1202 403 } else if (nm->is_speculatively_disconnected()) {
kvn@1202 404 // This method was previously considered for preemptive unloading and was not called since then
kvn@1202 405 nm->method()->invocation_counter()->decay();
kvn@1202 406 nm->method()->backedge_counter()->decay();
kvn@1202 407 nm->make_not_entrant();
kvn@1202 408 made_not_entrant++;
kvn@1202 409 }
kvn@1202 410
kvn@1202 411 if (curr_comp_id > _highest_marked) {
kvn@1202 412 _highest_marked = curr_comp_id;
kvn@1202 413 }
kvn@1202 414 }
kvn@1202 415 nm = CodeCache::alive_nmethod(CodeCache::next(nm));
kvn@1202 416 }
kvn@1202 417
kvn@1202 418 if (LogCompilation && (xtty != NULL)) {
kvn@1202 419 ttyLocker ttyl;
kvn@1202 420 xtty->begin_elem("stop_cleaning_code_cache disconnected='" UINT32_FORMAT "' made_not_entrant='" UINT32_FORMAT "' live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1202 421 disconnected, made_not_entrant, CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1202 422 xtty->stamp();
kvn@1202 423 xtty->end_elem();
kvn@1202 424 }
kvn@1202 425
never@1458 426 // Shut off compiler. Sweeper will start over with a new stack scan and
never@1458 427 // traversal cycle and turn it back on if it clears enough space.
kvn@1202 428 if (was_full()) {
kvn@1202 429 _last_was_full = os::javaTimeMillis();
kvn@1202 430 CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
kvn@1202 431 }
kvn@1202 432
kvn@1202 433 // After two more traversals the sweeper will get rid of unrestored nmethods
kvn@1202 434 _was_full_traversal = _traversals;
kvn@1202 435 #ifdef ASSERT
kvn@1202 436 jlong end = os::javaTimeMillis();
kvn@1202 437 if(PrintMethodFlushing && Verbose) {
kvn@1202 438 tty->print_cr("### sweeper: unload time: " INT64_FORMAT, end-start);
kvn@1202 439 }
kvn@1202 440 #endif
kvn@1202 441 }