1.1 --- a/src/share/vm/gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.cpp Sat Jan 16 23:51:50 2010 -0800 1.2 +++ b/src/share/vm/gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.cpp Thu Jan 21 11:33:32 2010 -0800 1.3 @@ -3655,9 +3655,7 @@ 1.4 verify_work_stacks_empty(); 1.5 verify_overflow_empty(); 1.6 assert(_revisitStack.isEmpty(), "tabula rasa"); 1.7 - 1.8 - DEBUG_ONLY(RememberKlassesChecker cmx(CMSClassUnloadingEnabled);) 1.9 - 1.10 + DEBUG_ONLY(RememberKlassesChecker cmx(should_unload_classes());) 1.11 bool result = false; 1.12 if (CMSConcurrentMTEnabled && ParallelCMSThreads > 0) { 1.13 result = do_marking_mt(asynch); 1.14 @@ -4124,7 +4122,6 @@ 1.15 void CMSConcMarkingTask::coordinator_yield() { 1.16 assert(ConcurrentMarkSweepThread::cms_thread_has_cms_token(), 1.17 "CMS thread should hold CMS token"); 1.18 - 1.19 DEBUG_ONLY(RememberKlassesChecker mux(false);) 1.20 // First give up the locks, then yield, then re-lock 1.21 // We should probably use a constructor/destructor idiom to 1.22 @@ -4201,9 +4198,7 @@ 1.23 // Mutate the Refs discovery so it is MT during the 1.24 // multi-threaded marking phase. 1.25 ReferenceProcessorMTMutator mt(ref_processor(), num_workers > 1); 1.26 - 1.27 - DEBUG_ONLY(RememberKlassesChecker cmx(CMSClassUnloadingEnabled);) 1.28 - 1.29 + DEBUG_ONLY(RememberKlassesChecker cmx(should_unload_classes());) 1.30 conc_workers()->start_task(&tsk); 1.31 while (tsk.yielded()) { 1.32 tsk.coordinator_yield(); 1.33 @@ -4472,7 +4467,7 @@ 1.34 // for cleaner interfaces. 1.35 rp->preclean_discovered_references( 1.36 rp->is_alive_non_header(), &keep_alive, &complete_trace, 1.37 - &yield_cl); 1.38 + &yield_cl, should_unload_classes()); 1.39 } 1.40 1.41 if (clean_survivor) { // preclean the active survivor space(s) 1.42 @@ -4494,7 +4489,7 @@ 1.43 SurvivorSpacePrecleanClosure 1.44 sss_cl(this, _span, &_markBitMap, &_markStack, 1.45 &pam_cl, before_count, CMSYield); 1.46 - DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) 1.47 + DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());) 1.48 dng->from()->object_iterate_careful(&sss_cl); 1.49 dng->to()->object_iterate_careful(&sss_cl); 1.50 } 1.51 @@ -4665,7 +4660,7 @@ 1.52 verify_work_stacks_empty(); 1.53 verify_overflow_empty(); 1.54 sample_eden(); 1.55 - DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) 1.56 + DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());) 1.57 stop_point = 1.58 gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl); 1.59 } 1.60 @@ -4753,7 +4748,7 @@ 1.61 sample_eden(); 1.62 verify_work_stacks_empty(); 1.63 verify_overflow_empty(); 1.64 - DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) 1.65 + DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());) 1.66 HeapWord* stop_point = 1.67 gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl); 1.68 if (stop_point != NULL) { 1.69 @@ -4853,7 +4848,7 @@ 1.70 assert(haveFreelistLocks(), "must have free list locks"); 1.71 assert_lock_strong(bitMapLock()); 1.72 1.73 - DEBUG_ONLY(RememberKlassesChecker fmx(CMSClassUnloadingEnabled);) 1.74 + DEBUG_ONLY(RememberKlassesChecker fmx(should_unload_classes());) 1.75 if (!init_mark_was_synchronous) { 1.76 // We might assume that we need not fill TLAB's when 1.77 // CMSScavengeBeforeRemark is set, because we may have just done