1.1 --- a/src/share/vm/memory/referenceProcessor.cpp Fri Oct 14 11:12:24 2011 -0400 1.2 +++ b/src/share/vm/memory/referenceProcessor.cpp Mon Oct 17 09:57:41 2011 -0700 1.3 @@ -105,19 +105,22 @@ 1.4 _discovery_is_mt = mt_discovery; 1.5 _num_q = MAX2(1, mt_processing_degree); 1.6 _max_num_q = MAX2(_num_q, mt_discovery_degree); 1.7 - _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, 1.8 + _discovered_refs = NEW_C_HEAP_ARRAY(DiscoveredList, 1.9 _max_num_q * number_of_subclasses_of_ref()); 1.10 - if (_discoveredSoftRefs == NULL) { 1.11 + if (_discovered_refs == NULL) { 1.12 vm_exit_during_initialization("Could not allocated RefProc Array"); 1.13 } 1.14 + _discoveredSoftRefs = &_discovered_refs[0]; 1.15 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q]; 1.16 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q]; 1.17 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q]; 1.18 - // Initialized all entries to NULL 1.19 + 1.20 + // Initialize all entries to NULL 1.21 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { 1.22 - _discoveredSoftRefs[i].set_head(NULL); 1.23 - _discoveredSoftRefs[i].set_length(0); 1.24 + _discovered_refs[i].set_head(NULL); 1.25 + _discovered_refs[i].set_length(0); 1.26 } 1.27 + 1.28 // If we do barriers, cache a copy of the barrier set. 1.29 if (discovered_list_needs_barrier) { 1.30 _bs = Universe::heap()->barrier_set(); 1.31 @@ -129,7 +132,7 @@ 1.32 void ReferenceProcessor::verify_no_references_recorded() { 1.33 guarantee(!_discovering_refs, "Discovering refs?"); 1.34 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { 1.35 - guarantee(_discoveredSoftRefs[i].is_empty(), 1.36 + guarantee(_discovered_refs[i].is_empty(), 1.37 "Found non-empty discovered list"); 1.38 } 1.39 } 1.40 @@ -138,9 +141,9 @@ 1.41 void ReferenceProcessor::weak_oops_do(OopClosure* f) { 1.42 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { 1.43 if (UseCompressedOops) { 1.44 - f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head()); 1.45 + f->do_oop((narrowOop*)_discovered_refs[i].adr_head()); 1.46 } else { 1.47 - f->do_oop((oop*)_discoveredSoftRefs[i].adr_head()); 1.48 + f->do_oop((oop*)_discovered_refs[i].adr_head()); 1.49 } 1.50 } 1.51 } 1.52 @@ -423,15 +426,15 @@ 1.53 AbstractRefProcTaskExecutor* task_executor) { 1.54 if (_processing_is_mt && task_executor != NULL) { 1.55 // Parallel code 1.56 - RefProcEnqueueTask tsk(*this, _discoveredSoftRefs, 1.57 + RefProcEnqueueTask tsk(*this, _discovered_refs, 1.58 pending_list_addr, _max_num_q); 1.59 task_executor->execute(tsk); 1.60 } else { 1.61 // Serial code: call the parent class's implementation 1.62 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { 1.63 - enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr); 1.64 - _discoveredSoftRefs[i].set_head(NULL); 1.65 - _discoveredSoftRefs[i].set_length(0); 1.66 + enqueue_discovered_reflist(_discovered_refs[i], pending_list_addr); 1.67 + _discovered_refs[i].set_head(NULL); 1.68 + _discovered_refs[i].set_length(0); 1.69 } 1.70 } 1.71 } 1.72 @@ -691,7 +694,7 @@ 1.73 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) { 1.74 gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i)); 1.75 } 1.76 - abandon_partial_discovered_list(_discoveredSoftRefs[i]); 1.77 + abandon_partial_discovered_list(_discovered_refs[i]); 1.78 } 1.79 } 1.80 1.81 @@ -952,7 +955,7 @@ 1.82 "\nScrubbing %s discovered list of Null referents", 1.83 list_name(i)); 1.84 } 1.85 - clean_up_discovered_reflist(_discoveredSoftRefs[i]); 1.86 + clean_up_discovered_reflist(_discovered_refs[i]); 1.87 } 1.88 } 1.89 1.90 @@ -1402,7 +1405,7 @@ 1.91 void ReferenceProcessor::clear_discovered_references() { 1.92 guarantee(!_discovering_refs, "Discovering refs?"); 1.93 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { 1.94 - clear_discovered_references(_discoveredSoftRefs[i]); 1.95 + clear_discovered_references(_discovered_refs[i]); 1.96 } 1.97 } 1.98