207 GenCollectedHeap* gch = GenCollectedHeap::heap(); |
207 GenCollectedHeap* gch = GenCollectedHeap::heap(); |
208 GCCauseSetter gccs(gch, _gc_cause); |
208 GCCauseSetter gccs(gch, _gc_cause); |
209 gch->do_full_collection(gch->must_clear_all_soft_refs(), _max_level); |
209 gch->do_full_collection(gch->must_clear_all_soft_refs(), _max_level); |
210 } |
210 } |
211 |
211 |
|
212 // Returns true iff concurrent GCs unloads metadata. |
212 bool VM_CollectForMetadataAllocation::initiate_concurrent_GC() { |
213 bool VM_CollectForMetadataAllocation::initiate_concurrent_GC() { |
213 #if INCLUDE_ALL_GCS |
214 #if INCLUDE_ALL_GCS |
214 if (UseConcMarkSweepGC && CMSClassUnloadingEnabled) { |
215 if (UseConcMarkSweepGC && CMSClassUnloadingEnabled) { |
215 MetaspaceGC::set_should_concurrent_collect(true); |
216 MetaspaceGC::set_should_concurrent_collect(true); |
216 return true; |
217 return true; |
217 } |
218 } |
218 |
219 |
219 if (UseG1GC) { |
220 if (UseG1GC && ClassUnloadingWithConcurrentMark) { |
220 G1CollectedHeap* g1h = G1CollectedHeap::heap(); |
221 G1CollectedHeap* g1h = G1CollectedHeap::heap(); |
221 g1h->g1_policy()->set_initiate_conc_mark_if_possible(); |
222 g1h->g1_policy()->set_initiate_conc_mark_if_possible(); |
222 |
223 |
223 GCCauseSetter x(g1h, _gc_cause); |
224 GCCauseSetter x(g1h, _gc_cause); |
224 |
225 |