src/share/vm/memory/genCollectedHeap.cpp

changeset 7975
79b13c9a93e8
parent 7073
4d3a43351904
child 7990
1f646daf0d67
equal deleted inserted replaced
7974:8f8f1d578796 7975:79b13c9a93e8
24 24
25 #include "precompiled.hpp" 25 #include "precompiled.hpp"
26 #include "classfile/symbolTable.hpp" 26 #include "classfile/symbolTable.hpp"
27 #include "classfile/systemDictionary.hpp" 27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp" 28 #include "classfile/vmSymbols.hpp"
29 #include "code/codeCache.hpp"
29 #include "code/icBuffer.hpp" 30 #include "code/icBuffer.hpp"
30 #include "gc_implementation/shared/collectorCounters.hpp" 31 #include "gc_implementation/shared/collectorCounters.hpp"
31 #include "gc_implementation/shared/gcTrace.hpp" 32 #include "gc_implementation/shared/gcTrace.hpp"
32 #include "gc_implementation/shared/gcTraceTime.hpp" 33 #include "gc_implementation/shared/gcTraceTime.hpp"
33 #include "gc_implementation/shared/vmGCOperations.hpp" 34 #include "gc_implementation/shared/vmGCOperations.hpp"
47 #include "runtime/fprofiler.hpp" 48 #include "runtime/fprofiler.hpp"
48 #include "runtime/handles.hpp" 49 #include "runtime/handles.hpp"
49 #include "runtime/handles.inline.hpp" 50 #include "runtime/handles.inline.hpp"
50 #include "runtime/java.hpp" 51 #include "runtime/java.hpp"
51 #include "runtime/vmThread.hpp" 52 #include "runtime/vmThread.hpp"
53 #include "services/management.hpp"
52 #include "services/memoryService.hpp" 54 #include "services/memoryService.hpp"
53 #include "utilities/vmError.hpp" 55 #include "utilities/vmError.hpp"
54 #include "utilities/workgroup.hpp" 56 #include "utilities/workgroup.hpp"
55 #include "utilities/macros.hpp" 57 #include "utilities/macros.hpp"
56 #if INCLUDE_ALL_GCS 58 #if INCLUDE_ALL_GCS
61 GenCollectedHeap* GenCollectedHeap::_gch; 63 GenCollectedHeap* GenCollectedHeap::_gch;
62 NOT_PRODUCT(size_t GenCollectedHeap::_skip_header_HeapWords = 0;) 64 NOT_PRODUCT(size_t GenCollectedHeap::_skip_header_HeapWords = 0;)
63 65
64 // The set of potentially parallel tasks in root scanning. 66 // The set of potentially parallel tasks in root scanning.
65 enum GCH_strong_roots_tasks { 67 enum GCH_strong_roots_tasks {
66 // We probably want to parallelize both of these internally, but for now... 68 GCH_PS_Universe_oops_do,
69 GCH_PS_JNIHandles_oops_do,
70 GCH_PS_ObjectSynchronizer_oops_do,
71 GCH_PS_FlatProfiler_oops_do,
72 GCH_PS_Management_oops_do,
73 GCH_PS_SystemDictionary_oops_do,
74 GCH_PS_ClassLoaderDataGraph_oops_do,
75 GCH_PS_jvmti_oops_do,
76 GCH_PS_CodeCache_oops_do,
67 GCH_PS_younger_gens, 77 GCH_PS_younger_gens,
68 // Leave this one last. 78 // Leave this one last.
69 GCH_PS_NumElements 79 GCH_PS_NumElements
70 }; 80 };
71 81
72 GenCollectedHeap::GenCollectedHeap(GenCollectorPolicy *policy) : 82 GenCollectedHeap::GenCollectedHeap(GenCollectorPolicy *policy) :
73 SharedHeap(policy), 83 SharedHeap(policy),
74 _gen_policy(policy), 84 _gen_policy(policy),
75 _gen_process_roots_tasks(new SubTasksDone(GCH_PS_NumElements)), 85 _process_strong_tasks(new SubTasksDone(GCH_PS_NumElements)),
76 _full_collections_completed(0) 86 _full_collections_completed(0)
77 { 87 {
78 if (_gen_process_roots_tasks == NULL ||
79 !_gen_process_roots_tasks->valid()) {
80 vm_exit_during_initialization("Failed necessary allocation.");
81 }
82 assert(policy != NULL, "Sanity check"); 88 assert(policy != NULL, "Sanity check");
83 } 89 }
84 90
85 jint GenCollectedHeap::initialize() { 91 jint GenCollectedHeap::initialize() {
86 CollectedHeap::pre_initialize(); 92 CollectedHeap::pre_initialize();
588 return collector_policy()->satisfy_failed_allocation(size, is_tlab); 594 return collector_policy()->satisfy_failed_allocation(size, is_tlab);
589 } 595 }
590 596
591 void GenCollectedHeap::set_par_threads(uint t) { 597 void GenCollectedHeap::set_par_threads(uint t) {
592 SharedHeap::set_par_threads(t); 598 SharedHeap::set_par_threads(t);
593 _gen_process_roots_tasks->set_n_threads(t); 599 set_n_termination(t);
594 } 600 }
595 601
596 void GenCollectedHeap:: 602 void GenCollectedHeap::set_n_termination(uint t) {
597 gen_process_roots(int level, 603 _process_strong_tasks->set_n_threads(t);
598 bool younger_gens_as_roots, 604 }
599 bool activate_scope, 605
600 SharedHeap::ScanningOption so, 606 #ifdef ASSERT
601 OopsInGenClosure* not_older_gens, 607 class AssertNonScavengableClosure: public OopClosure {
602 OopsInGenClosure* weak_roots, 608 public:
603 OopsInGenClosure* older_gens, 609 virtual void do_oop(oop* p) {
604 CLDClosure* cld_closure, 610 assert(!Universe::heap()->is_in_partial_collection(*p),
605 CLDClosure* weak_cld_closure, 611 "Referent should not be scavengable."); }
606 CodeBlobClosure* code_closure) { 612 virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
613 };
614 static AssertNonScavengableClosure assert_is_non_scavengable_closure;
615 #endif
616
617 void GenCollectedHeap::process_roots(bool activate_scope,
618 ScanningOption so,
619 OopClosure* strong_roots,
620 OopClosure* weak_roots,
621 CLDClosure* strong_cld_closure,
622 CLDClosure* weak_cld_closure,
623 CodeBlobClosure* code_roots) {
624 StrongRootsScope srs(this, activate_scope);
607 625
608 // General roots. 626 // General roots.
609 SharedHeap::process_roots(activate_scope, so, 627 assert(_strong_roots_parity != 0, "must have called prologue code");
610 not_older_gens, weak_roots, 628 assert(code_roots != NULL, "code root closure should always be set");
611 cld_closure, weak_cld_closure, 629 // _n_termination for _process_strong_tasks should be set up stream
612 code_closure); 630 // in a method not running in a GC worker. Otherwise the GC worker
631 // could be trying to change the termination condition while the task
632 // is executing in another GC worker.
633
634 if (!_process_strong_tasks->is_task_claimed(GCH_PS_ClassLoaderDataGraph_oops_do)) {
635 ClassLoaderDataGraph::roots_cld_do(strong_cld_closure, weak_cld_closure);
636 }
637
638 // Some CLDs contained in the thread frames should be considered strong.
639 // Don't process them if they will be processed during the ClassLoaderDataGraph phase.
640 CLDClosure* roots_from_clds_p = (strong_cld_closure != weak_cld_closure) ? strong_cld_closure : NULL;
641 // Only process code roots from thread stacks if we aren't visiting the entire CodeCache anyway
642 CodeBlobClosure* roots_from_code_p = (so & SO_AllCodeCache) ? NULL : code_roots;
643
644 Threads::possibly_parallel_oops_do(strong_roots, roots_from_clds_p, roots_from_code_p);
645
646 if (!_process_strong_tasks->is_task_claimed(GCH_PS_Universe_oops_do)) {
647 Universe::oops_do(strong_roots);
648 }
649 // Global (strong) JNI handles
650 if (!_process_strong_tasks->is_task_claimed(GCH_PS_JNIHandles_oops_do)) {
651 JNIHandles::oops_do(strong_roots);
652 }
653
654 if (!_process_strong_tasks->is_task_claimed(GCH_PS_ObjectSynchronizer_oops_do)) {
655 ObjectSynchronizer::oops_do(strong_roots);
656 }
657 if (!_process_strong_tasks->is_task_claimed(GCH_PS_FlatProfiler_oops_do)) {
658 FlatProfiler::oops_do(strong_roots);
659 }
660 if (!_process_strong_tasks->is_task_claimed(GCH_PS_Management_oops_do)) {
661 Management::oops_do(strong_roots);
662 }
663 if (!_process_strong_tasks->is_task_claimed(GCH_PS_jvmti_oops_do)) {
664 JvmtiExport::oops_do(strong_roots);
665 }
666
667 if (!_process_strong_tasks->is_task_claimed(GCH_PS_SystemDictionary_oops_do)) {
668 SystemDictionary::roots_oops_do(strong_roots, weak_roots);
669 }
670
671 // All threads execute the following. A specific chunk of buckets
672 // from the StringTable are the individual tasks.
673 if (weak_roots != NULL) {
674 if (CollectedHeap::use_parallel_gc_threads()) {
675 StringTable::possibly_parallel_oops_do(weak_roots);
676 } else {
677 StringTable::oops_do(weak_roots);
678 }
679 }
680
681 if (!_process_strong_tasks->is_task_claimed(GCH_PS_CodeCache_oops_do)) {
682 if (so & SO_ScavengeCodeCache) {
683 assert(code_roots != NULL, "must supply closure for code cache");
684
685 // We only visit parts of the CodeCache when scavenging.
686 CodeCache::scavenge_root_nmethods_do(code_roots);
687 }
688 if (so & SO_AllCodeCache) {
689 assert(code_roots != NULL, "must supply closure for code cache");
690
691 // CMSCollector uses this to do intermediate-strength collections.
692 // We scan the entire code cache, since CodeCache::do_unloading is not called.
693 CodeCache::blobs_do(code_roots);
694 }
695 // Verify that the code cache contents are not subject to
696 // movement by a scavenging collection.
697 DEBUG_ONLY(CodeBlobToOopClosure assert_code_is_non_scavengable(&assert_is_non_scavengable_closure, !CodeBlobToOopClosure::FixRelocations));
698 DEBUG_ONLY(CodeCache::asserted_non_scavengable_nmethods_do(&assert_code_is_non_scavengable));
699 }
700
701 }
702
703 void GenCollectedHeap::gen_process_roots(int level,
704 bool younger_gens_as_roots,
705 bool activate_scope,
706 ScanningOption so,
707 bool only_strong_roots,
708 OopsInGenClosure* not_older_gens,
709 OopsInGenClosure* older_gens,
710 CLDClosure* cld_closure) {
711 const bool is_adjust_phase = !only_strong_roots && !younger_gens_as_roots;
712
713 bool is_moving_collection = false;
714 if (level == 0 || is_adjust_phase) {
715 // young collections are always moving
716 is_moving_collection = true;
717 }
718
719 MarkingCodeBlobClosure mark_code_closure(not_older_gens, is_moving_collection);
720 OopsInGenClosure* weak_roots = only_strong_roots ? NULL : not_older_gens;
721 CLDClosure* weak_cld_closure = only_strong_roots ? NULL : cld_closure;
722
723 process_roots(activate_scope, so,
724 not_older_gens, weak_roots,
725 cld_closure, weak_cld_closure,
726 &mark_code_closure);
613 727
614 if (younger_gens_as_roots) { 728 if (younger_gens_as_roots) {
615 if (!_gen_process_roots_tasks->is_task_claimed(GCH_PS_younger_gens)) { 729 if (!_process_strong_tasks->is_task_claimed(GCH_PS_younger_gens)) {
616 for (int i = 0; i < level; i++) { 730 for (int i = 0; i < level; i++) {
617 not_older_gens->set_generation(_gens[i]); 731 not_older_gens->set_generation(_gens[i]);
618 _gens[i]->oop_iterate(not_older_gens); 732 _gens[i]->oop_iterate(not_older_gens);
619 } 733 }
620 not_older_gens->reset_generation(); 734 not_older_gens->reset_generation();
626 older_gens->set_generation(_gens[i]); 740 older_gens->set_generation(_gens[i]);
627 rem_set()->younger_refs_iterate(_gens[i], older_gens); 741 rem_set()->younger_refs_iterate(_gens[i], older_gens);
628 older_gens->reset_generation(); 742 older_gens->reset_generation();
629 } 743 }
630 744
631 _gen_process_roots_tasks->all_tasks_completed(); 745 _process_strong_tasks->all_tasks_completed();
632 } 746 }
633 747
634 void GenCollectedHeap:: 748
635 gen_process_roots(int level, 749 class AlwaysTrueClosure: public BoolObjectClosure {
636 bool younger_gens_as_roots, 750 public:
637 bool activate_scope, 751 bool do_object_b(oop p) { return true; }
638 SharedHeap::ScanningOption so, 752 };
639 bool only_strong_roots, 753 static AlwaysTrueClosure always_true;
640 OopsInGenClosure* not_older_gens,
641 OopsInGenClosure* older_gens,
642 CLDClosure* cld_closure) {
643
644 const bool is_adjust_phase = !only_strong_roots && !younger_gens_as_roots;
645
646 bool is_moving_collection = false;
647 if (level == 0 || is_adjust_phase) {
648 // young collections are always moving
649 is_moving_collection = true;
650 }
651
652 MarkingCodeBlobClosure mark_code_closure(not_older_gens, is_moving_collection);
653 CodeBlobClosure* code_closure = &mark_code_closure;
654
655 gen_process_roots(level,
656 younger_gens_as_roots,
657 activate_scope, so,
658 not_older_gens, only_strong_roots ? NULL : not_older_gens,
659 older_gens,
660 cld_closure, only_strong_roots ? NULL : cld_closure,
661 code_closure);
662
663 }
664 754
665 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) { 755 void GenCollectedHeap::gen_process_weak_roots(OopClosure* root_closure) {
666 SharedHeap::process_weak_roots(root_closure); 756 JNIHandles::weak_oops_do(&always_true, root_closure);
667 // "Local" "weak" refs
668 for (int i = 0; i < _n_gens; i++) { 757 for (int i = 0; i < _n_gens; i++) {
669 _gens[i]->ref_processor()->weak_oops_do(root_closure); 758 _gens[i]->ref_processor()->weak_oops_do(root_closure);
670 } 759 }
671 } 760 }
672 761

mercurial