Thu, 27 May 2010 18:01:56 -0700
6916623: Align object to 16 bytes to use Compressed Oops with java heap up to 64Gb
Summary: Added new product ObjectAlignmentInBytes flag to control object alignment.
Reviewed-by: twisti, ysr, iveresov
1 /*
2 * Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
25 # include "incls/_precompiled.incl"
26 # include "incls/_sweeper.cpp.incl"
28 long NMethodSweeper::_traversals = 0; // No. of stack traversals performed
29 CodeBlob* NMethodSweeper::_current = NULL; // Current nmethod
30 int NMethodSweeper::_seen = 0 ; // No. of blobs we have currently processed in current pass of CodeCache
31 int NMethodSweeper::_invocations = 0; // No. of invocations left until we are completed with this pass
33 jint NMethodSweeper::_locked_seen = 0;
34 jint NMethodSweeper::_not_entrant_seen_on_stack = 0;
35 bool NMethodSweeper::_rescan = false;
36 bool NMethodSweeper::_do_sweep = false;
37 jint NMethodSweeper::_sweep_started = 0;
38 bool NMethodSweeper::_was_full = false;
39 jint NMethodSweeper::_advise_to_sweep = 0;
40 jlong NMethodSweeper::_last_was_full = 0;
41 uint NMethodSweeper::_highest_marked = 0;
42 long NMethodSweeper::_was_full_traversal = 0;
44 class MarkActivationClosure: public CodeBlobClosure {
45 public:
46 virtual void do_code_blob(CodeBlob* cb) {
47 // If we see an activation belonging to a non_entrant nmethod, we mark it.
48 if (cb->is_nmethod() && ((nmethod*)cb)->is_not_entrant()) {
49 ((nmethod*)cb)->mark_as_seen_on_stack();
50 }
51 }
52 };
53 static MarkActivationClosure mark_activation_closure;
55 void NMethodSweeper::scan_stacks() {
56 assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
57 if (!MethodFlushing) return;
58 _do_sweep = true;
60 // No need to synchronize access, since this is always executed at a
61 // safepoint. If we aren't in the middle of scan and a rescan
62 // hasn't been requested then just return. If UseCodeCacheFlushing is on and
63 // code cache flushing is in progress, don't skip sweeping to help make progress
64 // clearing space in the code cache.
65 if ((_current == NULL && !_rescan) && !(UseCodeCacheFlushing && !CompileBroker::should_compile_new_jobs())) {
66 _do_sweep = false;
67 return;
68 }
70 // Make sure CompiledIC_lock in unlocked, since we might update some
71 // inline caches. If it is, we just bail-out and try later.
72 if (CompiledIC_lock->is_locked() || Patching_lock->is_locked()) return;
74 // Check for restart
75 assert(CodeCache::find_blob_unsafe(_current) == _current, "Sweeper nmethod cached state invalid");
76 if (_current == NULL) {
77 _seen = 0;
78 _invocations = NmethodSweepFraction;
79 _current = CodeCache::first_nmethod();
80 _traversals += 1;
81 if (PrintMethodFlushing) {
82 tty->print_cr("### Sweep: stack traversal %d", _traversals);
83 }
84 Threads::nmethods_do(&mark_activation_closure);
86 // reset the flags since we started a scan from the beginning.
87 _rescan = false;
88 _locked_seen = 0;
89 _not_entrant_seen_on_stack = 0;
90 }
92 if (UseCodeCacheFlushing) {
93 if (!CodeCache::needs_flushing()) {
94 // scan_stacks() runs during a safepoint, no race with setters
95 _advise_to_sweep = 0;
96 }
98 if (was_full()) {
99 // There was some progress so attempt to restart the compiler
100 jlong now = os::javaTimeMillis();
101 jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
102 jlong curr_interval = now - _last_was_full;
103 if ((!CodeCache::needs_flushing()) && (curr_interval > max_interval)) {
104 CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
105 set_was_full(false);
107 // Update the _last_was_full time so we can tell how fast the
108 // code cache is filling up
109 _last_was_full = os::javaTimeMillis();
111 if (PrintMethodFlushing) {
112 tty->print_cr("### sweeper: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes, restarting compiler",
113 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
114 }
115 if (LogCompilation && (xtty != NULL)) {
116 ttyLocker ttyl;
117 xtty->begin_elem("restart_compiler live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
118 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
119 xtty->stamp();
120 xtty->end_elem();
121 }
122 }
123 }
124 }
125 }
127 void NMethodSweeper::possibly_sweep() {
128 if ((!MethodFlushing) || (!_do_sweep)) return;
130 if (_invocations > 0) {
131 // Only one thread at a time will sweep
132 jint old = Atomic::cmpxchg( 1, &_sweep_started, 0 );
133 if (old != 0) {
134 return;
135 }
136 sweep_code_cache();
137 }
138 _sweep_started = 0;
139 }
141 void NMethodSweeper::sweep_code_cache() {
142 #ifdef ASSERT
143 jlong sweep_start;
144 if(PrintMethodFlushing) {
145 sweep_start = os::javaTimeMillis();
146 }
147 #endif
148 if (PrintMethodFlushing && Verbose) {
149 tty->print_cr("### Sweep at %d out of %d. Invocations left: %d", _seen, CodeCache::nof_blobs(), _invocations);
150 }
152 // We want to visit all nmethods after NmethodSweepFraction invocations.
153 // If invocation is 1 we do the rest
154 int todo = CodeCache::nof_blobs();
155 if (_invocations > 1) {
156 todo = (CodeCache::nof_blobs() - _seen) / _invocations;
157 }
159 // Compilers may check to sweep more often than stack scans happen,
160 // don't keep trying once it is all scanned
161 _invocations--;
163 assert(!SafepointSynchronize::is_at_safepoint(), "should not be in safepoint when we get here");
164 assert(!CodeCache_lock->owned_by_self(), "just checking");
166 {
167 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
169 for(int i = 0; i < todo && _current != NULL; i++) {
171 // Since we will give up the CodeCache_lock, always skip ahead to an nmethod.
172 // Other blobs can be deleted by other threads
173 // Read next before we potentially delete current
174 CodeBlob* next = CodeCache::next_nmethod(_current);
176 // Now ready to process nmethod and give up CodeCache_lock
177 {
178 MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
179 process_nmethod((nmethod *)_current);
180 }
181 _seen++;
182 _current = next;
183 }
185 // Skip forward to the next nmethod (if any). Code blobs other than nmethods
186 // can be freed async to us and make _current invalid while we sleep.
187 _current = CodeCache::next_nmethod(_current);
188 }
190 if (_current == NULL && !_rescan && (_locked_seen || _not_entrant_seen_on_stack)) {
191 // we've completed a scan without making progress but there were
192 // nmethods we were unable to process either because they were
193 // locked or were still on stack. We don't have to aggresively
194 // clean them up so just stop scanning. We could scan once more
195 // but that complicates the control logic and it's unlikely to
196 // matter much.
197 if (PrintMethodFlushing) {
198 tty->print_cr("### Couldn't make progress on some nmethods so stopping sweep");
199 }
200 }
202 #ifdef ASSERT
203 if(PrintMethodFlushing) {
204 jlong sweep_end = os::javaTimeMillis();
205 tty->print_cr("### sweeper: sweep time(%d): " INT64_FORMAT, _invocations, sweep_end - sweep_start);
206 }
207 #endif
208 }
211 void NMethodSweeper::process_nmethod(nmethod *nm) {
212 assert(!CodeCache_lock->owned_by_self(), "just checking");
214 // Skip methods that are currently referenced by the VM
215 if (nm->is_locked_by_vm()) {
216 // But still remember to clean-up inline caches for alive nmethods
217 if (nm->is_alive()) {
218 // Clean-up all inline caches that points to zombie/non-reentrant methods
219 MutexLocker cl(CompiledIC_lock);
220 nm->cleanup_inline_caches();
221 } else {
222 _locked_seen++;
223 }
224 return;
225 }
227 if (nm->is_zombie()) {
228 // If it is first time, we see nmethod then we mark it. Otherwise,
229 // we reclame it. When we have seen a zombie method twice, we know that
230 // there are no inline caches that referes to it.
231 if (nm->is_marked_for_reclamation()) {
232 assert(!nm->is_locked_by_vm(), "must not flush locked nmethods");
233 if (PrintMethodFlushing && Verbose) {
234 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (marked for reclamation) being flushed", nm->compile_id(), nm);
235 }
236 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
237 nm->flush();
238 } else {
239 if (PrintMethodFlushing && Verbose) {
240 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (zombie) being marked for reclamation", nm->compile_id(), nm);
241 }
242 nm->mark_for_reclamation();
243 _rescan = true;
244 }
245 } else if (nm->is_not_entrant()) {
246 // If there is no current activations of this method on the
247 // stack we can safely convert it to a zombie method
248 if (nm->can_not_entrant_be_converted()) {
249 if (PrintMethodFlushing && Verbose) {
250 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (not entrant) being made zombie", nm->compile_id(), nm);
251 }
252 nm->make_zombie();
253 _rescan = true;
254 } else {
255 // Still alive, clean up its inline caches
256 MutexLocker cl(CompiledIC_lock);
257 nm->cleanup_inline_caches();
258 // we coudn't transition this nmethod so don't immediately
259 // request a rescan. If this method stays on the stack for a
260 // long time we don't want to keep rescanning the code cache.
261 _not_entrant_seen_on_stack++;
262 }
263 } else if (nm->is_unloaded()) {
264 // Unloaded code, just make it a zombie
265 if (PrintMethodFlushing && Verbose)
266 tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (unloaded) being made zombie", nm->compile_id(), nm);
267 if (nm->is_osr_method()) {
268 // No inline caches will ever point to osr methods, so we can just remove it
269 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
270 nm->flush();
271 } else {
272 nm->make_zombie();
273 _rescan = true;
274 }
275 } else {
276 assert(nm->is_alive(), "should be alive");
278 if (UseCodeCacheFlushing) {
279 if ((nm->method()->code() != nm) && !(nm->is_locked_by_vm()) && !(nm->is_osr_method()) &&
280 (_traversals > _was_full_traversal+2) && (((uint)nm->compile_id()) < _highest_marked) &&
281 CodeCache::needs_flushing()) {
282 // This method has not been called since the forced cleanup happened
283 nm->make_not_entrant();
284 }
285 }
287 // Clean-up all inline caches that points to zombie/non-reentrant methods
288 MutexLocker cl(CompiledIC_lock);
289 nm->cleanup_inline_caches();
290 }
291 }
293 // Code cache unloading: when compilers notice the code cache is getting full,
294 // they will call a vm op that comes here. This code attempts to speculatively
295 // unload the oldest half of the nmethods (based on the compile job id) by
296 // saving the old code in a list in the CodeCache. Then
297 // execution resumes. If a method so marked is not called by the second sweeper
298 // stack traversal after the current one, the nmethod will be marked non-entrant and
299 // got rid of by normal sweeping. If the method is called, the methodOop's
300 // _code field is restored and the methodOop/nmethod
301 // go back to their normal state.
302 void NMethodSweeper::handle_full_code_cache(bool is_full) {
303 // Only the first one to notice can advise us to start early cleaning
304 if (!is_full){
305 jint old = Atomic::cmpxchg( 1, &_advise_to_sweep, 0 );
306 if (old != 0) {
307 return;
308 }
309 }
311 if (is_full) {
312 // Since code cache is full, immediately stop new compiles
313 bool did_set = CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
314 if (!did_set) {
315 // only the first to notice can start the cleaning,
316 // others will go back and block
317 return;
318 }
319 set_was_full(true);
321 // If we run out within MinCodeCacheFlushingInterval of the last unload time, give up
322 jlong now = os::javaTimeMillis();
323 jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
324 jlong curr_interval = now - _last_was_full;
325 if (curr_interval < max_interval) {
326 _rescan = true;
327 if (PrintMethodFlushing) {
328 tty->print_cr("### handle full too often, turning off compiler");
329 }
330 if (LogCompilation && (xtty != NULL)) {
331 ttyLocker ttyl;
332 xtty->begin_elem("disable_compiler flushing_interval='" UINT64_FORMAT "' live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
333 curr_interval/1000, CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
334 xtty->stamp();
335 xtty->end_elem();
336 }
337 return;
338 }
339 }
341 VM_HandleFullCodeCache op(is_full);
342 VMThread::execute(&op);
344 // rescan again as soon as possible
345 _rescan = true;
346 }
348 void NMethodSweeper::speculative_disconnect_nmethods(bool is_full) {
349 // If there was a race in detecting full code cache, only run
350 // one vm op for it or keep the compiler shut off
352 debug_only(jlong start = os::javaTimeMillis();)
354 if ((!was_full()) && (is_full)) {
355 if (!CodeCache::needs_flushing()) {
356 if (PrintMethodFlushing) {
357 tty->print_cr("### sweeper: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes, restarting compiler",
358 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
359 }
360 if (LogCompilation && (xtty != NULL)) {
361 ttyLocker ttyl;
362 xtty->begin_elem("restart_compiler live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
363 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
364 xtty->stamp();
365 xtty->end_elem();
366 }
367 CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
368 return;
369 }
370 }
372 // Traverse the code cache trying to dump the oldest nmethods
373 uint curr_max_comp_id = CompileBroker::get_compilation_id();
374 uint flush_target = ((curr_max_comp_id - _highest_marked) >> 1) + _highest_marked;
375 if (PrintMethodFlushing && Verbose) {
376 tty->print_cr("### Cleaning code cache: Live blobs:" UINT32_FORMAT "/Free code cache:" SIZE_FORMAT " bytes",
377 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
378 }
379 if (LogCompilation && (xtty != NULL)) {
380 ttyLocker ttyl;
381 xtty->begin_elem("start_cleaning_code_cache live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
382 CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
383 xtty->stamp();
384 xtty->end_elem();
385 }
387 nmethod* nm = CodeCache::alive_nmethod(CodeCache::first());
388 jint disconnected = 0;
389 jint made_not_entrant = 0;
390 while ((nm != NULL)){
391 uint curr_comp_id = nm->compile_id();
393 // OSR methods cannot be flushed like this. Also, don't flush native methods
394 // since they are part of the JDK in most cases
395 if (nm->is_in_use() && (!nm->is_osr_method()) && (!nm->is_locked_by_vm()) &&
396 (!nm->is_native_method()) && ((curr_comp_id < flush_target))) {
398 if ((nm->method()->code() == nm)) {
399 // This method has not been previously considered for
400 // unloading or it was restored already
401 CodeCache::speculatively_disconnect(nm);
402 disconnected++;
403 } else if (nm->is_speculatively_disconnected()) {
404 // This method was previously considered for preemptive unloading and was not called since then
405 nm->method()->invocation_counter()->decay();
406 nm->method()->backedge_counter()->decay();
407 nm->make_not_entrant();
408 made_not_entrant++;
409 }
411 if (curr_comp_id > _highest_marked) {
412 _highest_marked = curr_comp_id;
413 }
414 }
415 nm = CodeCache::alive_nmethod(CodeCache::next(nm));
416 }
418 if (LogCompilation && (xtty != NULL)) {
419 ttyLocker ttyl;
420 xtty->begin_elem("stop_cleaning_code_cache disconnected='" UINT32_FORMAT "' made_not_entrant='" UINT32_FORMAT "' live_blobs='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
421 disconnected, made_not_entrant, CodeCache::nof_blobs(), CodeCache::unallocated_capacity());
422 xtty->stamp();
423 xtty->end_elem();
424 }
426 // Shut off compiler. Sweeper will start over with a new stack scan and
427 // traversal cycle and turn it back on if it clears enough space.
428 if (was_full()) {
429 _last_was_full = os::javaTimeMillis();
430 CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
431 }
433 // After two more traversals the sweeper will get rid of unrestored nmethods
434 _was_full_traversal = _traversals;
435 #ifdef ASSERT
436 jlong end = os::javaTimeMillis();
437 if(PrintMethodFlushing && Verbose) {
438 tty->print_cr("### sweeper: unload time: " INT64_FORMAT, end-start);
439 }
440 #endif
441 }