1.1 --- a/src/share/vm/runtime/compilationPolicy.cpp Thu Jan 26 16:49:22 2012 +0100 1.2 +++ b/src/share/vm/runtime/compilationPolicy.cpp Thu Jan 26 12:15:24 2012 -0800 1.3 @@ -306,29 +306,27 @@ 1.4 return (current >= initial + target); 1.5 } 1.6 1.7 -nmethod* NonTieredCompPolicy::event(methodHandle method, methodHandle inlinee, int branch_bci, int bci, CompLevel comp_level, nmethod* nm, TRAPS) { 1.8 +nmethod* NonTieredCompPolicy::event(methodHandle method, methodHandle inlinee, int branch_bci, 1.9 + int bci, CompLevel comp_level, nmethod* nm, JavaThread* thread) { 1.10 assert(comp_level == CompLevel_none, "This should be only called from the interpreter"); 1.11 NOT_PRODUCT(trace_frequency_counter_overflow(method, branch_bci, bci)); 1.12 - if (JvmtiExport::can_post_interpreter_events()) { 1.13 - assert(THREAD->is_Java_thread(), "Wrong type of thread"); 1.14 - if (((JavaThread*)THREAD)->is_interp_only_mode()) { 1.15 - // If certain JVMTI events (e.g. frame pop event) are requested then the 1.16 - // thread is forced to remain in interpreted code. This is 1.17 - // implemented partly by a check in the run_compiled_code 1.18 - // section of the interpreter whether we should skip running 1.19 - // compiled code, and partly by skipping OSR compiles for 1.20 - // interpreted-only threads. 1.21 - if (bci != InvocationEntryBci) { 1.22 - reset_counter_for_back_branch_event(method); 1.23 - return NULL; 1.24 - } 1.25 + if (JvmtiExport::can_post_interpreter_events() && thread->is_interp_only_mode()) { 1.26 + // If certain JVMTI events (e.g. frame pop event) are requested then the 1.27 + // thread is forced to remain in interpreted code. This is 1.28 + // implemented partly by a check in the run_compiled_code 1.29 + // section of the interpreter whether we should skip running 1.30 + // compiled code, and partly by skipping OSR compiles for 1.31 + // interpreted-only threads. 1.32 + if (bci != InvocationEntryBci) { 1.33 + reset_counter_for_back_branch_event(method); 1.34 + return NULL; 1.35 } 1.36 } 1.37 if (bci == InvocationEntryBci) { 1.38 // when code cache is full, compilation gets switched off, UseCompiler 1.39 // is set to false 1.40 if (!method->has_compiled_code() && UseCompiler) { 1.41 - method_invocation_event(method, CHECK_NULL); 1.42 + method_invocation_event(method, thread); 1.43 } else { 1.44 // Force counter overflow on method entry, even if no compilation 1.45 // happened. (The method_invocation_event call does this also.) 1.46 @@ -344,7 +342,7 @@ 1.47 NOT_PRODUCT(trace_osr_request(method, osr_nm, bci)); 1.48 // when code cache is full, we should not compile any more... 1.49 if (osr_nm == NULL && UseCompiler) { 1.50 - method_back_branch_event(method, bci, CHECK_NULL); 1.51 + method_back_branch_event(method, bci, thread); 1.52 osr_nm = method->lookup_osr_nmethod_for(bci, CompLevel_highest_tier, true); 1.53 } 1.54 if (osr_nm == NULL) { 1.55 @@ -395,7 +393,7 @@ 1.56 1.57 // SimpleCompPolicy - compile current method 1.58 1.59 -void SimpleCompPolicy::method_invocation_event( methodHandle m, TRAPS) { 1.60 +void SimpleCompPolicy::method_invocation_event(methodHandle m, JavaThread* thread) { 1.61 int hot_count = m->invocation_count(); 1.62 reset_counter_for_invocation_event(m); 1.63 const char* comment = "count"; 1.64 @@ -405,18 +403,18 @@ 1.65 if (nm == NULL ) { 1.66 const char* comment = "count"; 1.67 CompileBroker::compile_method(m, InvocationEntryBci, CompLevel_highest_tier, 1.68 - m, hot_count, comment, CHECK); 1.69 + m, hot_count, comment, thread); 1.70 } 1.71 } 1.72 } 1.73 1.74 -void SimpleCompPolicy::method_back_branch_event(methodHandle m, int bci, TRAPS) { 1.75 +void SimpleCompPolicy::method_back_branch_event(methodHandle m, int bci, JavaThread* thread) { 1.76 int hot_count = m->backedge_count(); 1.77 const char* comment = "backedge_count"; 1.78 1.79 if (is_compilation_enabled() && !m->is_not_osr_compilable() && can_be_compiled(m)) { 1.80 CompileBroker::compile_method(m, bci, CompLevel_highest_tier, 1.81 - m, hot_count, comment, CHECK); 1.82 + m, hot_count, comment, thread); 1.83 NOT_PRODUCT(trace_osr_completion(m->lookup_osr_nmethod_for(bci, CompLevel_highest_tier, true));) 1.84 } 1.85 } 1.86 @@ -427,14 +425,13 @@ 1.87 1.88 1.89 // Consider m for compilation 1.90 -void StackWalkCompPolicy::method_invocation_event(methodHandle m, TRAPS) { 1.91 +void StackWalkCompPolicy::method_invocation_event(methodHandle m, JavaThread* thread) { 1.92 int hot_count = m->invocation_count(); 1.93 reset_counter_for_invocation_event(m); 1.94 const char* comment = "count"; 1.95 1.96 if (is_compilation_enabled() && m->code() == NULL && can_be_compiled(m)) { 1.97 - ResourceMark rm(THREAD); 1.98 - JavaThread *thread = (JavaThread*)THREAD; 1.99 + ResourceMark rm(thread); 1.100 frame fr = thread->last_frame(); 1.101 assert(fr.is_interpreted_frame(), "must be interpreted"); 1.102 assert(fr.interpreter_frame_method() == m(), "bad method"); 1.103 @@ -461,17 +458,17 @@ 1.104 assert(top != NULL, "findTopInlinableFrame returned null"); 1.105 if (TraceCompilationPolicy) top->print(); 1.106 CompileBroker::compile_method(top->top_method(), InvocationEntryBci, CompLevel_highest_tier, 1.107 - m, hot_count, comment, CHECK); 1.108 + m, hot_count, comment, thread); 1.109 } 1.110 } 1.111 } 1.112 1.113 -void StackWalkCompPolicy::method_back_branch_event(methodHandle m, int bci, TRAPS) { 1.114 +void StackWalkCompPolicy::method_back_branch_event(methodHandle m, int bci, JavaThread* thread) { 1.115 int hot_count = m->backedge_count(); 1.116 const char* comment = "backedge_count"; 1.117 1.118 if (is_compilation_enabled() && !m->is_not_osr_compilable() && can_be_compiled(m)) { 1.119 - CompileBroker::compile_method(m, bci, CompLevel_highest_tier, m, hot_count, comment, CHECK); 1.120 + CompileBroker::compile_method(m, bci, CompLevel_highest_tier, m, hot_count, comment, thread); 1.121 1.122 NOT_PRODUCT(trace_osr_completion(m->lookup_osr_nmethod_for(bci, CompLevel_highest_tier, true));) 1.123 }