src/share/vm/gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.cpp

changeset 1625
4788266644c1
parent 1580
e018e6884bd8
child 1719
5f1f51edaff6
equal deleted inserted replaced
1605:c4d722788ed6 1625:4788266644c1
3653 // obsolete contents from a short-circuited previous CMS cycle. 3653 // obsolete contents from a short-circuited previous CMS cycle.
3654 _revisitStack.reset(); 3654 _revisitStack.reset();
3655 verify_work_stacks_empty(); 3655 verify_work_stacks_empty();
3656 verify_overflow_empty(); 3656 verify_overflow_empty();
3657 assert(_revisitStack.isEmpty(), "tabula rasa"); 3657 assert(_revisitStack.isEmpty(), "tabula rasa");
3658 3658 DEBUG_ONLY(RememberKlassesChecker cmx(should_unload_classes());)
3659 DEBUG_ONLY(RememberKlassesChecker cmx(CMSClassUnloadingEnabled);)
3660
3661 bool result = false; 3659 bool result = false;
3662 if (CMSConcurrentMTEnabled && ParallelCMSThreads > 0) { 3660 if (CMSConcurrentMTEnabled && ParallelCMSThreads > 0) {
3663 result = do_marking_mt(asynch); 3661 result = do_marking_mt(asynch);
3664 } else { 3662 } else {
3665 result = do_marking_st(asynch); 3663 result = do_marking_st(asynch);
4122 4120
4123 // This is run by the CMS (coordinator) thread. 4121 // This is run by the CMS (coordinator) thread.
4124 void CMSConcMarkingTask::coordinator_yield() { 4122 void CMSConcMarkingTask::coordinator_yield() {
4125 assert(ConcurrentMarkSweepThread::cms_thread_has_cms_token(), 4123 assert(ConcurrentMarkSweepThread::cms_thread_has_cms_token(),
4126 "CMS thread should hold CMS token"); 4124 "CMS thread should hold CMS token");
4127
4128 DEBUG_ONLY(RememberKlassesChecker mux(false);) 4125 DEBUG_ONLY(RememberKlassesChecker mux(false);)
4129 // First give up the locks, then yield, then re-lock 4126 // First give up the locks, then yield, then re-lock
4130 // We should probably use a constructor/destructor idiom to 4127 // We should probably use a constructor/destructor idiom to
4131 // do this unlock/lock or modify the MutexUnlocker class to 4128 // do this unlock/lock or modify the MutexUnlocker class to
4132 // serve our purpose. XXX 4129 // serve our purpose. XXX
4199 // Refs discovery is already non-atomic. 4196 // Refs discovery is already non-atomic.
4200 assert(!ref_processor()->discovery_is_atomic(), "Should be non-atomic"); 4197 assert(!ref_processor()->discovery_is_atomic(), "Should be non-atomic");
4201 // Mutate the Refs discovery so it is MT during the 4198 // Mutate the Refs discovery so it is MT during the
4202 // multi-threaded marking phase. 4199 // multi-threaded marking phase.
4203 ReferenceProcessorMTMutator mt(ref_processor(), num_workers > 1); 4200 ReferenceProcessorMTMutator mt(ref_processor(), num_workers > 1);
4204 4201 DEBUG_ONLY(RememberKlassesChecker cmx(should_unload_classes());)
4205 DEBUG_ONLY(RememberKlassesChecker cmx(CMSClassUnloadingEnabled);)
4206
4207 conc_workers()->start_task(&tsk); 4202 conc_workers()->start_task(&tsk);
4208 while (tsk.yielded()) { 4203 while (tsk.yielded()) {
4209 tsk.coordinator_yield(); 4204 tsk.coordinator_yield();
4210 conc_workers()->continue_task(&tsk); 4205 conc_workers()->continue_task(&tsk);
4211 } 4206 }
4470 // The code in this method may need further 4465 // The code in this method may need further
4471 // tweaking for better performance and some restructuring 4466 // tweaking for better performance and some restructuring
4472 // for cleaner interfaces. 4467 // for cleaner interfaces.
4473 rp->preclean_discovered_references( 4468 rp->preclean_discovered_references(
4474 rp->is_alive_non_header(), &keep_alive, &complete_trace, 4469 rp->is_alive_non_header(), &keep_alive, &complete_trace,
4475 &yield_cl); 4470 &yield_cl, should_unload_classes());
4476 } 4471 }
4477 4472
4478 if (clean_survivor) { // preclean the active survivor space(s) 4473 if (clean_survivor) { // preclean the active survivor space(s)
4479 assert(_young_gen->kind() == Generation::DefNew || 4474 assert(_young_gen->kind() == Generation::DefNew ||
4480 _young_gen->kind() == Generation::ParNew || 4475 _young_gen->kind() == Generation::ParNew ||
4492 unsigned int before_count = 4487 unsigned int before_count =
4493 GenCollectedHeap::heap()->total_collections(); 4488 GenCollectedHeap::heap()->total_collections();
4494 SurvivorSpacePrecleanClosure 4489 SurvivorSpacePrecleanClosure
4495 sss_cl(this, _span, &_markBitMap, &_markStack, 4490 sss_cl(this, _span, &_markBitMap, &_markStack,
4496 &pam_cl, before_count, CMSYield); 4491 &pam_cl, before_count, CMSYield);
4497 DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) 4492 DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
4498 dng->from()->object_iterate_careful(&sss_cl); 4493 dng->from()->object_iterate_careful(&sss_cl);
4499 dng->to()->object_iterate_careful(&sss_cl); 4494 dng->to()->object_iterate_careful(&sss_cl);
4500 } 4495 }
4501 MarkRefsIntoAndScanClosure 4496 MarkRefsIntoAndScanClosure
4502 mrias_cl(_span, ref_processor(), &_markBitMap, &_modUnionTable, 4497 mrias_cl(_span, ref_processor(), &_markBitMap, &_modUnionTable,
4663 startTimer(); 4658 startTimer();
4664 { 4659 {
4665 verify_work_stacks_empty(); 4660 verify_work_stacks_empty();
4666 verify_overflow_empty(); 4661 verify_overflow_empty();
4667 sample_eden(); 4662 sample_eden();
4668 DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) 4663 DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
4669 stop_point = 4664 stop_point =
4670 gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl); 4665 gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl);
4671 } 4666 }
4672 if (stop_point != NULL) { 4667 if (stop_point != NULL) {
4673 // The careful iteration stopped early either because it found an 4668 // The careful iteration stopped early either because it found an
4751 CMSTokenSyncWithLocks ts(true, gen->freelistLock(), bitMapLock()); 4746 CMSTokenSyncWithLocks ts(true, gen->freelistLock(), bitMapLock());
4752 startTimer(); 4747 startTimer();
4753 sample_eden(); 4748 sample_eden();
4754 verify_work_stacks_empty(); 4749 verify_work_stacks_empty();
4755 verify_overflow_empty(); 4750 verify_overflow_empty();
4756 DEBUG_ONLY(RememberKlassesChecker mx(CMSClassUnloadingEnabled);) 4751 DEBUG_ONLY(RememberKlassesChecker mx(should_unload_classes());)
4757 HeapWord* stop_point = 4752 HeapWord* stop_point =
4758 gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl); 4753 gen->cmsSpace()->object_iterate_careful_m(dirtyRegion, cl);
4759 if (stop_point != NULL) { 4754 if (stop_point != NULL) {
4760 // The careful iteration stopped early because it found an 4755 // The careful iteration stopped early because it found an
4761 // uninitialized object. Redirty the bits corresponding to the 4756 // uninitialized object. Redirty the bits corresponding to the
4851 CodeCache::gc_prologue(); 4846 CodeCache::gc_prologue();
4852 } 4847 }
4853 assert(haveFreelistLocks(), "must have free list locks"); 4848 assert(haveFreelistLocks(), "must have free list locks");
4854 assert_lock_strong(bitMapLock()); 4849 assert_lock_strong(bitMapLock());
4855 4850
4856 DEBUG_ONLY(RememberKlassesChecker fmx(CMSClassUnloadingEnabled);) 4851 DEBUG_ONLY(RememberKlassesChecker fmx(should_unload_classes());)
4857 if (!init_mark_was_synchronous) { 4852 if (!init_mark_was_synchronous) {
4858 // We might assume that we need not fill TLAB's when 4853 // We might assume that we need not fill TLAB's when
4859 // CMSScavengeBeforeRemark is set, because we may have just done 4854 // CMSScavengeBeforeRemark is set, because we may have just done
4860 // a scavenge which would have filled all TLAB's -- and besides 4855 // a scavenge which would have filled all TLAB's -- and besides
4861 // Eden would be empty. This however may not always be the case -- 4856 // Eden would be empty. This however may not always be the case --

mercurial