85 // Card table stuff |
85 // Card table stuff |
86 // |
86 // |
87 |
87 |
88 MemRegion cmr((HeapWord*)virtual_space()->low(), |
88 MemRegion cmr((HeapWord*)virtual_space()->low(), |
89 (HeapWord*)virtual_space()->high()); |
89 (HeapWord*)virtual_space()->high()); |
|
90 if (ZapUnusedHeapArea) { |
|
91 // Mangle newly committed space immediately rather than |
|
92 // waiting for the initialization of the space even though |
|
93 // mangling is related to spaces. Doing it here eliminates |
|
94 // the need to carry along information that a complete mangling |
|
95 // (bottom to end) needs to be done. |
|
96 SpaceMangler::mangle_region(cmr); |
|
97 } |
|
98 |
90 Universe::heap()->barrier_set()->resize_covered_region(cmr); |
99 Universe::heap()->barrier_set()->resize_covered_region(cmr); |
91 |
100 |
92 CardTableModRefBS* _ct = (CardTableModRefBS*)Universe::heap()->barrier_set(); |
101 CardTableModRefBS* _ct = (CardTableModRefBS*)Universe::heap()->barrier_set(); |
93 assert (_ct->kind() == BarrierSet::CardTableModRef, "Sanity"); |
102 assert (_ct->kind() == BarrierSet::CardTableModRef, "Sanity"); |
94 |
103 |
110 _object_space = new MutableSpace(); |
119 _object_space = new MutableSpace(); |
111 |
120 |
112 if (_object_space == NULL) |
121 if (_object_space == NULL) |
113 vm_exit_during_initialization("Could not allocate an old gen space"); |
122 vm_exit_during_initialization("Could not allocate an old gen space"); |
114 |
123 |
115 object_space()->initialize(cmr, true); |
124 object_space()->initialize(cmr, |
|
125 SpaceDecorator::Clear, |
|
126 SpaceDecorator::Mangle); |
116 |
127 |
117 _object_mark_sweep = new PSMarkSweepDecorator(_object_space, start_array(), MarkSweepDeadRatio); |
128 _object_mark_sweep = new PSMarkSweepDecorator(_object_space, start_array(), MarkSweepDeadRatio); |
118 |
129 |
119 if (_object_mark_sweep == NULL) |
130 if (_object_mark_sweep == NULL) |
120 vm_exit_during_initialization("Could not complete allocation of old generation"); |
131 vm_exit_during_initialization("Could not complete allocation of old generation"); |
230 bool PSOldGen::expand_by(size_t bytes) { |
241 bool PSOldGen::expand_by(size_t bytes) { |
231 assert_lock_strong(ExpandHeap_lock); |
242 assert_lock_strong(ExpandHeap_lock); |
232 assert_locked_or_safepoint(Heap_lock); |
243 assert_locked_or_safepoint(Heap_lock); |
233 bool result = virtual_space()->expand_by(bytes); |
244 bool result = virtual_space()->expand_by(bytes); |
234 if (result) { |
245 if (result) { |
|
246 if (ZapUnusedHeapArea) { |
|
247 // We need to mangle the newly expanded area. The memregion spans |
|
248 // end -> new_end, we assume that top -> end is already mangled. |
|
249 // Do the mangling before post_resize() is called because |
|
250 // the space is available for allocation after post_resize(); |
|
251 HeapWord* const virtual_space_high = (HeapWord*) virtual_space()->high(); |
|
252 assert(object_space()->end() < virtual_space_high, |
|
253 "Should be true before post_resize()"); |
|
254 MemRegion mangle_region(object_space()->end(), virtual_space_high); |
|
255 // Note that the object space has not yet been updated to |
|
256 // coincede with the new underlying virtual space. |
|
257 SpaceMangler::mangle_region(mangle_region); |
|
258 } |
235 post_resize(); |
259 post_resize(); |
236 if (UsePerfData) { |
260 if (UsePerfData) { |
237 _space_counters->update_capacity(); |
261 _space_counters->update_capacity(); |
238 _gen_counters->update_all(); |
262 _gen_counters->update_all(); |
239 } |
263 } |
346 size_t new_word_size = new_memregion.word_size(); |
370 size_t new_word_size = new_memregion.word_size(); |
347 |
371 |
348 start_array()->set_covered_region(new_memregion); |
372 start_array()->set_covered_region(new_memregion); |
349 Universe::heap()->barrier_set()->resize_covered_region(new_memregion); |
373 Universe::heap()->barrier_set()->resize_covered_region(new_memregion); |
350 |
374 |
351 // Did we expand? |
|
352 HeapWord* const virtual_space_high = (HeapWord*) virtual_space()->high(); |
375 HeapWord* const virtual_space_high = (HeapWord*) virtual_space()->high(); |
353 if (object_space()->end() < virtual_space_high) { |
|
354 // We need to mangle the newly expanded area. The memregion spans |
|
355 // end -> new_end, we assume that top -> end is already mangled. |
|
356 // This cannot be safely tested for, as allocation may be taking |
|
357 // place. |
|
358 MemRegion mangle_region(object_space()->end(), virtual_space_high); |
|
359 object_space()->mangle_region(mangle_region); |
|
360 } |
|
361 |
376 |
362 // ALWAYS do this last!! |
377 // ALWAYS do this last!! |
363 object_space()->set_end(virtual_space_high); |
378 object_space()->set_end(virtual_space_high); |
364 |
379 |
365 assert(new_word_size == heap_word_size(object_space()->capacity_in_bytes()), |
380 assert(new_word_size == heap_word_size(object_space()->capacity_in_bytes()), |
460 |
475 |
461 void PSOldGen::verify_object_start_array() { |
476 void PSOldGen::verify_object_start_array() { |
462 VerifyObjectStartArrayClosure check( this, &_start_array ); |
477 VerifyObjectStartArrayClosure check( this, &_start_array ); |
463 object_iterate(&check); |
478 object_iterate(&check); |
464 } |
479 } |
|
480 |
|
481 #ifndef PRODUCT |
|
482 void PSOldGen::record_spaces_top() { |
|
483 assert(ZapUnusedHeapArea, "Not mangling unused space"); |
|
484 object_space()->set_top_for_allocations(); |
|
485 } |
|
486 #endif |