src/share/vm/runtime/sweeper.cpp

Thu, 03 Jun 2010 13:21:47 -0400

author
acorn
date
Thu, 03 Jun 2010 13:21:47 -0400
changeset 1942
b96a3e44582f
parent 1893
bfe29ec02863
child 1907
c18cbe5936b8
permissions
-rw-r--r--

6852873: Reduce safepoint cleanup time
Summary: New optional flags to reduce inflated monitor cleanup times
Reviewed-by: chrisphi, dice

duke@435 1 /*
xdono@1383 2 * Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
duke@435 19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
duke@435 20 * CA 95054 USA or visit www.sun.com if you need additional information or
duke@435 21 * have any questions.
duke@435 22 *
duke@435 23 */
duke@435 24
duke@435 25 # include "incls/_precompiled.incl"
duke@435 26 # include "incls/_sweeper.cpp.incl"
duke@435 27
duke@435 28 long NMethodSweeper::_traversals = 0; // No. of stack traversals performed
duke@435 29 CodeBlob* NMethodSweeper::_current = NULL; // Current nmethod
duke@435 30 int NMethodSweeper::_seen = 0 ; // No. of blobs we have currently processed in current pass of CodeCache
duke@435 31 int NMethodSweeper::_invocations = 0; // No. of invocations left until we are completed with this pass
duke@435 32
duke@435 33 jint NMethodSweeper::_locked_seen = 0;
duke@435 34 jint NMethodSweeper::_not_entrant_seen_on_stack = 0;
duke@435 35 bool NMethodSweeper::_rescan = false;
never@1893 36 bool NMethodSweeper::_do_sweep = false;
never@1893 37 jint NMethodSweeper::_sweep_started = 0;
kvn@1637 38 bool NMethodSweeper::_was_full = false;
kvn@1637 39 jint NMethodSweeper::_advise_to_sweep = 0;
kvn@1637 40 jlong NMethodSweeper::_last_was_full = 0;
kvn@1637 41 uint NMethodSweeper::_highest_marked = 0;
kvn@1637 42 long NMethodSweeper::_was_full_traversal = 0;
duke@435 43
jrose@1424 44 class MarkActivationClosure: public CodeBlobClosure {
jrose@1424 45 public:
jrose@1424 46 virtual void do_code_blob(CodeBlob* cb) {
jrose@1424 47 // If we see an activation belonging to a non_entrant nmethod, we mark it.
jrose@1424 48 if (cb->is_nmethod() && ((nmethod*)cb)->is_not_entrant()) {
jrose@1424 49 ((nmethod*)cb)->mark_as_seen_on_stack();
jrose@1424 50 }
jrose@1424 51 }
jrose@1424 52 };
jrose@1424 53 static MarkActivationClosure mark_activation_closure;
jrose@1424 54
never@1893 55 void NMethodSweeper::scan_stacks() {
duke@435 56 assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
duke@435 57 if (!MethodFlushing) return;
never@1893 58 _do_sweep = true;
duke@435 59
duke@435 60 // No need to synchronize access, since this is always executed at a
duke@435 61 // safepoint. If we aren't in the middle of scan and a rescan
never@1893 62 // hasn't been requested then just return. If UseCodeCacheFlushing is on and
never@1893 63 // code cache flushing is in progress, don't skip sweeping to help make progress
never@1893 64 // clearing space in the code cache.
never@1893 65 if ((_current == NULL && !_rescan) && !(UseCodeCacheFlushing && !CompileBroker::should_compile_new_jobs())) {
never@1893 66 _do_sweep = false;
never@1893 67 return;
never@1893 68 }
duke@435 69
duke@435 70 // Make sure CompiledIC_lock in unlocked, since we might update some
duke@435 71 // inline caches. If it is, we just bail-out and try later.
duke@435 72 if (CompiledIC_lock->is_locked() || Patching_lock->is_locked()) return;
duke@435 73
duke@435 74 // Check for restart
duke@435 75 assert(CodeCache::find_blob_unsafe(_current) == _current, "Sweeper nmethod cached state invalid");
duke@435 76 if (_current == NULL) {
duke@435 77 _seen = 0;
duke@435 78 _invocations = NmethodSweepFraction;
never@1893 79 _current = CodeCache::first_nmethod();
duke@435 80 _traversals += 1;
duke@435 81 if (PrintMethodFlushing) {
duke@435 82 tty->print_cr("### Sweep: stack traversal %d", _traversals);
duke@435 83 }
jrose@1424 84 Threads::nmethods_do(&mark_activation_closure);
duke@435 85
duke@435 86 // reset the flags since we started a scan from the beginning.
duke@435 87 _rescan = false;
duke@435 88 _locked_seen = 0;
duke@435 89 _not_entrant_seen_on_stack = 0;
duke@435 90 }
duke@435 91
kvn@1637 92 if (UseCodeCacheFlushing) {
kvn@1637 93 if (!CodeCache::needs_flushing()) {
never@1893 94 // scan_stacks() runs during a safepoint, no race with setters
kvn@1637 95 _advise_to_sweep = 0;
kvn@1637 96 }
kvn@1637 97
kvn@1637 98 if (was_full()) {
kvn@1637 99 // There was some progress so attempt to restart the compiler
kvn@1637 100 jlong now = os::javaTimeMillis();
kvn@1637 101 jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
kvn@1637 102 jlong curr_interval = now - _last_was_full;
kvn@1637 103 if ((!CodeCache::needs_flushing()) && (curr_interval > max_interval)) {
kvn@1637 104 CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
kvn@1637 105 set_was_full(false);
kvn@1637 106
kvn@1637 107 // Update the _last_was_full time so we can tell how fast the
kvn@1637 108 // code cache is filling up
kvn@1637 109 _last_was_full = os::javaTimeMillis();
kvn@1637 110
kvn@1637 111 if (PrintMethodFlushing) {
kvn@1637 112 tty->print_cr("### sweeper: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes, restarting compiler",
kvn@1637 113 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1637 114 }
kvn@1637 115 if (LogCompilation && (xtty != NULL)) {
kvn@1637 116 ttyLocker ttyl;
kvn@1637 117 xtty->begin_elem("restart_compiler live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1637 118 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1637 119 xtty->stamp();
kvn@1637 120 xtty->end_elem();
kvn@1637 121 }
kvn@1637 122 }
kvn@1637 123 }
kvn@1637 124 }
duke@435 125 }
duke@435 126
never@1893 127 void NMethodSweeper::possibly_sweep() {
never@1893 128 if ((!MethodFlushing) || (!_do_sweep)) return;
never@1893 129
never@1893 130 if (_invocations > 0) {
never@1893 131 // Only one thread at a time will sweep
never@1893 132 jint old = Atomic::cmpxchg( 1, &_sweep_started, 0 );
never@1893 133 if (old != 0) {
never@1893 134 return;
never@1893 135 }
never@1893 136 sweep_code_cache();
never@1893 137 }
never@1893 138 _sweep_started = 0;
never@1893 139 }
never@1893 140
never@1893 141 void NMethodSweeper::sweep_code_cache() {
never@1893 142 #ifdef ASSERT
never@1893 143 jlong sweep_start;
never@1893 144 if(PrintMethodFlushing) {
never@1893 145 sweep_start = os::javaTimeMillis();
never@1893 146 }
never@1893 147 #endif
never@1893 148 if (PrintMethodFlushing && Verbose) {
never@1893 149 tty->print_cr("### Sweep at %d out of %d. Invocations left: %d", _seen, CodeCache::nof_blobs(), _invocations);
never@1893 150 }
never@1893 151
never@1893 152 // We want to visit all nmethods after NmethodSweepFraction invocations.
never@1893 153 // If invocation is 1 we do the rest
never@1893 154 int todo = CodeCache::nof_blobs();
never@1893 155 if (_invocations > 1) {
never@1893 156 todo = (CodeCache::nof_blobs() - _seen) / _invocations;
never@1893 157 }
never@1893 158
never@1893 159 // Compilers may check to sweep more often than stack scans happen,
never@1893 160 // don't keep trying once it is all scanned
never@1893 161 _invocations--;
never@1893 162
never@1893 163 assert(!SafepointSynchronize::is_at_safepoint(), "should not be in safepoint when we get here");
never@1893 164 assert(!CodeCache_lock->owned_by_self(), "just checking");
never@1893 165
never@1893 166 {
never@1893 167 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
never@1893 168
never@1893 169 for(int i = 0; i < todo && _current != NULL; i++) {
never@1893 170
never@1893 171 // Since we will give up the CodeCache_lock, always skip ahead to an nmethod.
never@1893 172 // Other blobs can be deleted by other threads
never@1893 173 // Read next before we potentially delete current
never@1893 174 CodeBlob* next = CodeCache::next_nmethod(_current);
never@1893 175
never@1893 176 // Now ready to process nmethod and give up CodeCache_lock
never@1893 177 {
never@1893 178 MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
never@1893 179 process_nmethod((nmethod *)_current);
never@1893 180 }
never@1893 181 _seen++;
never@1893 182 _current = next;
never@1893 183 }
never@1893 184
never@1893 185 // Skip forward to the next nmethod (if any). Code blobs other than nmethods
never@1893 186 // can be freed async to us and make _current invalid while we sleep.
never@1893 187 _current = CodeCache::next_nmethod(_current);
never@1893 188 }
never@1893 189
never@1893 190 if (_current == NULL && !_rescan && (_locked_seen || _not_entrant_seen_on_stack)) {
never@1893 191 // we've completed a scan without making progress but there were
never@1893 192 // nmethods we were unable to process either because they were
never@1893 193 // locked or were still on stack. We don't have to aggresively
never@1893 194 // clean them up so just stop scanning. We could scan once more
never@1893 195 // but that complicates the control logic and it's unlikely to
never@1893 196 // matter much.
never@1893 197 if (PrintMethodFlushing) {
never@1893 198 tty->print_cr("### Couldn't make progress on some nmethods so stopping sweep");
never@1893 199 }
never@1893 200 }
never@1893 201
never@1893 202 #ifdef ASSERT
never@1893 203 if(PrintMethodFlushing) {
never@1893 204 jlong sweep_end = os::javaTimeMillis();
never@1893 205 tty->print_cr("### sweeper: sweep time(%d): " INT64_FORMAT, _invocations, sweep_end - sweep_start);
never@1893 206 }
never@1893 207 #endif
never@1893 208 }
never@1893 209
duke@435 210
duke@435 211 void NMethodSweeper::process_nmethod(nmethod *nm) {
never@1893 212 assert(!CodeCache_lock->owned_by_self(), "just checking");
never@1893 213
duke@435 214 // Skip methods that are currently referenced by the VM
duke@435 215 if (nm->is_locked_by_vm()) {
duke@435 216 // But still remember to clean-up inline caches for alive nmethods
duke@435 217 if (nm->is_alive()) {
duke@435 218 // Clean-up all inline caches that points to zombie/non-reentrant methods
never@1893 219 MutexLocker cl(CompiledIC_lock);
duke@435 220 nm->cleanup_inline_caches();
duke@435 221 } else {
duke@435 222 _locked_seen++;
duke@435 223 }
duke@435 224 return;
duke@435 225 }
duke@435 226
duke@435 227 if (nm->is_zombie()) {
duke@435 228 // If it is first time, we see nmethod then we mark it. Otherwise,
duke@435 229 // we reclame it. When we have seen a zombie method twice, we know that
duke@435 230 // there are no inline caches that referes to it.
duke@435 231 if (nm->is_marked_for_reclamation()) {
duke@435 232 assert(!nm->is_locked_by_vm(), "must not flush locked nmethods");
ysr@1376 233 if (PrintMethodFlushing && Verbose) {
kvn@1637 234 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (marked for reclamation) being flushed", nm->compile_id(), nm);
ysr@1376 235 }
never@1893 236 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
duke@435 237 nm->flush();
duke@435 238 } else {
ysr@1376 239 if (PrintMethodFlushing && Verbose) {
kvn@1637 240 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (zombie) being marked for reclamation", nm->compile_id(), nm);
ysr@1376 241 }
duke@435 242 nm->mark_for_reclamation();
duke@435 243 _rescan = true;
duke@435 244 }
duke@435 245 } else if (nm->is_not_entrant()) {
duke@435 246 // If there is no current activations of this method on the
duke@435 247 // stack we can safely convert it to a zombie method
duke@435 248 if (nm->can_not_entrant_be_converted()) {
ysr@1376 249 if (PrintMethodFlushing && Verbose) {
kvn@1637 250 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (not entrant) being made zombie", nm->compile_id(), nm);
ysr@1376 251 }
duke@435 252 nm->make_zombie();
duke@435 253 _rescan = true;
duke@435 254 } else {
duke@435 255 // Still alive, clean up its inline caches
never@1893 256 MutexLocker cl(CompiledIC_lock);
duke@435 257 nm->cleanup_inline_caches();
duke@435 258 // we coudn't transition this nmethod so don't immediately
duke@435 259 // request a rescan. If this method stays on the stack for a
never@1893 260 // long time we don't want to keep rescanning the code cache.
duke@435 261 _not_entrant_seen_on_stack++;
duke@435 262 }
duke@435 263 } else if (nm->is_unloaded()) {
duke@435 264 // Unloaded code, just make it a zombie
ysr@1376 265 if (PrintMethodFlushing && Verbose)
kvn@1637 266 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (unloaded) being made zombie", nm->compile_id(), nm);
ysr@1376 267 if (nm->is_osr_method()) {
duke@435 268 // No inline caches will ever point to osr methods, so we can just remove it
never@1893 269 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
duke@435 270 nm->flush();
duke@435 271 } else {
duke@435 272 nm->make_zombie();
duke@435 273 _rescan = true;
duke@435 274 }
duke@435 275 } else {
duke@435 276 assert(nm->is_alive(), "should be alive");
kvn@1637 277
kvn@1637 278 if (UseCodeCacheFlushing) {
kvn@1637 279 if ((nm->method()->code() != nm) && !(nm->is_locked_by_vm()) && !(nm->is_osr_method()) &&
kvn@1637 280 (_traversals > _was_full_traversal+2) && (((uint)nm->compile_id()) < _highest_marked) &&
kvn@1637 281 CodeCache::needs_flushing()) {
kvn@1637 282 // This method has not been called since the forced cleanup happened
kvn@1637 283 nm->make_not_entrant();
kvn@1637 284 }
kvn@1637 285 }
kvn@1637 286
duke@435 287 // Clean-up all inline caches that points to zombie/non-reentrant methods
never@1893 288 MutexLocker cl(CompiledIC_lock);
duke@435 289 nm->cleanup_inline_caches();
duke@435 290 }
duke@435 291 }
kvn@1637 292
kvn@1637 293 // Code cache unloading: when compilers notice the code cache is getting full,
kvn@1637 294 // they will call a vm op that comes here. This code attempts to speculatively
kvn@1637 295 // unload the oldest half of the nmethods (based on the compile job id) by
kvn@1637 296 // saving the old code in a list in the CodeCache. Then
never@1893 297 // execution resumes. If a method so marked is not called by the second sweeper
never@1893 298 // stack traversal after the current one, the nmethod will be marked non-entrant and
kvn@1637 299 // got rid of by normal sweeping. If the method is called, the methodOop's
kvn@1637 300 // _code field is restored and the methodOop/nmethod
kvn@1637 301 // go back to their normal state.
kvn@1637 302 void NMethodSweeper::handle_full_code_cache(bool is_full) {
kvn@1637 303 // Only the first one to notice can advise us to start early cleaning
kvn@1637 304 if (!is_full){
kvn@1637 305 jint old = Atomic::cmpxchg( 1, &_advise_to_sweep, 0 );
kvn@1637 306 if (old != 0) {
kvn@1637 307 return;
kvn@1637 308 }
kvn@1637 309 }
kvn@1637 310
kvn@1637 311 if (is_full) {
kvn@1637 312 // Since code cache is full, immediately stop new compiles
kvn@1637 313 bool did_set = CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
kvn@1637 314 if (!did_set) {
kvn@1637 315 // only the first to notice can start the cleaning,
kvn@1637 316 // others will go back and block
kvn@1637 317 return;
kvn@1637 318 }
kvn@1637 319 set_was_full(true);
kvn@1637 320
kvn@1637 321 // If we run out within MinCodeCacheFlushingInterval of the last unload time, give up
kvn@1637 322 jlong now = os::javaTimeMillis();
kvn@1637 323 jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
kvn@1637 324 jlong curr_interval = now - _last_was_full;
kvn@1637 325 if (curr_interval < max_interval) {
kvn@1637 326 _rescan = true;
kvn@1637 327 if (PrintMethodFlushing) {
kvn@1637 328 tty->print_cr("### handle full too often, turning off compiler");
kvn@1637 329 }
kvn@1637 330 if (LogCompilation && (xtty != NULL)) {
kvn@1637 331 ttyLocker ttyl;
kvn@1637 332 xtty->begin_elem("disable_compiler flushing_interval='" UINT64_FORMAT "' live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1637 333 curr_interval/1000, CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1637 334 xtty->stamp();
kvn@1637 335 xtty->end_elem();
kvn@1637 336 }
kvn@1637 337 return;
kvn@1637 338 }
kvn@1637 339 }
kvn@1637 340
kvn@1637 341 VM_HandleFullCodeCache op(is_full);
kvn@1637 342 VMThread::execute(&op);
kvn@1637 343
kvn@1637 344 // rescan again as soon as possible
kvn@1637 345 _rescan = true;
kvn@1637 346 }
kvn@1637 347
kvn@1637 348 void NMethodSweeper::speculative_disconnect_nmethods(bool is_full) {
kvn@1637 349 // If there was a race in detecting full code cache, only run
kvn@1637 350 // one vm op for it or keep the compiler shut off
kvn@1637 351
kvn@1637 352 debug_only(jlong start = os::javaTimeMillis();)
kvn@1637 353
kvn@1637 354 if ((!was_full()) && (is_full)) {
kvn@1637 355 if (!CodeCache::needs_flushing()) {
kvn@1637 356 if (PrintMethodFlushing) {
kvn@1637 357 tty->print_cr("### sweeper: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes, restarting compiler",
kvn@1637 358 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1637 359 }
kvn@1637 360 if (LogCompilation && (xtty != NULL)) {
kvn@1637 361 ttyLocker ttyl;
kvn@1637 362 xtty->begin_elem("restart_compiler live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1637 363 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1637 364 xtty->stamp();
kvn@1637 365 xtty->end_elem();
kvn@1637 366 }
kvn@1637 367 CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
kvn@1637 368 return;
kvn@1637 369 }
kvn@1637 370 }
kvn@1637 371
kvn@1637 372 // Traverse the code cache trying to dump the oldest nmethods
kvn@1637 373 uint curr_max_comp_id = CompileBroker::get_compilation_id();
kvn@1637 374 uint flush_target = ((curr_max_comp_id - _highest_marked) >> 1) + _highest_marked;
kvn@1637 375 if (PrintMethodFlushing && Verbose) {
kvn@1637 376 tty->print_cr("### Cleaning code cache: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes",
kvn@1637 377 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1637 378 }
kvn@1637 379 if (LogCompilation && (xtty != NULL)) {
kvn@1637 380 ttyLocker ttyl;
kvn@1637 381 xtty->begin_elem("start_cleaning_code_cache live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1637 382 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1637 383 xtty->stamp();
kvn@1637 384 xtty->end_elem();
kvn@1637 385 }
kvn@1637 386
kvn@1637 387 nmethod* nm = CodeCache::alive_nmethod(CodeCache::first());
kvn@1637 388 jint disconnected = 0;
kvn@1637 389 jint made_not_entrant = 0;
kvn@1637 390 while ((nm != NULL)){
kvn@1637 391 uint curr_comp_id = nm->compile_id();
kvn@1637 392
kvn@1637 393 // OSR methods cannot be flushed like this. Also, don't flush native methods
kvn@1637 394 // since they are part of the JDK in most cases
kvn@1637 395 if (nm->is_in_use() && (!nm->is_osr_method()) && (!nm->is_locked_by_vm()) &&
kvn@1637 396 (!nm->is_native_method()) && ((curr_comp_id < flush_target))) {
kvn@1637 397
kvn@1637 398 if ((nm->method()->code() == nm)) {
kvn@1637 399 // This method has not been previously considered for
kvn@1637 400 // unloading or it was restored already
kvn@1637 401 CodeCache::speculatively_disconnect(nm);
kvn@1637 402 disconnected++;
kvn@1637 403 } else if (nm->is_speculatively_disconnected()) {
kvn@1637 404 // This method was previously considered for preemptive unloading and was not called since then
kvn@1637 405 nm->method()->invocation_counter()->decay();
kvn@1637 406 nm->method()->backedge_counter()->decay();
kvn@1637 407 nm->make_not_entrant();
kvn@1637 408 made_not_entrant++;
kvn@1637 409 }
kvn@1637 410
kvn@1637 411 if (curr_comp_id > _highest_marked) {
kvn@1637 412 _highest_marked = curr_comp_id;
kvn@1637 413 }
kvn@1637 414 }
kvn@1637 415 nm = CodeCache::alive_nmethod(CodeCache::next(nm));
kvn@1637 416 }
kvn@1637 417
kvn@1637 418 if (LogCompilation && (xtty != NULL)) {
kvn@1637 419 ttyLocker ttyl;
kvn@1637 420 xtty->begin_elem("stop_cleaning_code_cache disconnected='" UINT32_FORMAT "' made_not_entrant='" UINT32_FORMAT "' live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
kvn@1637 421 disconnected, made_not_entrant, CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
kvn@1637 422 xtty->stamp();
kvn@1637 423 xtty->end_elem();
kvn@1637 424 }
kvn@1637 425
never@1893 426 // Shut off compiler. Sweeper will start over with a new stack scan and
never@1893 427 // traversal cycle and turn it back on if it clears enough space.
kvn@1637 428 if (was_full()) {
kvn@1637 429 _last_was_full = os::javaTimeMillis();
kvn@1637 430 CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
kvn@1637 431 }
kvn@1637 432
kvn@1637 433 // After two more traversals the sweeper will get rid of unrestored nmethods
kvn@1637 434 _was_full_traversal = _traversals;
kvn@1637 435 #ifdef ASSERT
kvn@1637 436 jlong end = os::javaTimeMillis();
kvn@1637 437 if(PrintMethodFlushing && Verbose) {
kvn@1637 438 tty->print_cr("### sweeper: unload time: " INT64_FORMAT, end-start);
kvn@1637 439 }
kvn@1637 440 #endif
kvn@1637 441 }

mercurial