Thu, 07 Apr 2011 09:53:20 -0700
7009266: G1: assert(obj->is_oop_or_null(true )) failed: Error
Summary: A referent object that is only weakly reachable at the start of concurrent marking but is re-attached to the strongly reachable object graph during marking may not be marked as live. This can cause the reference object to be processed prematurely and leave dangling pointers to the referent object. Implement a read barrier for the java.lang.ref.Reference::referent field by intrinsifying the Reference.get() method, and intercepting accesses though JNI, reflection, and Unsafe, so that when a non-null referent object is read it is also logged in an SATB buffer.
Reviewed-by: kvn, iveresov, never, tonyp, dholmes
1 /*
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #ifndef SHARE_VM_OOPS_METHODOOP_HPP
26 #define SHARE_VM_OOPS_METHODOOP_HPP
28 #include "classfile/vmSymbols.hpp"
29 #include "code/compressedStream.hpp"
30 #include "compiler/oopMap.hpp"
31 #include "interpreter/invocationCounter.hpp"
32 #include "oops/constMethodOop.hpp"
33 #include "oops/constantPoolOop.hpp"
34 #include "oops/instanceKlass.hpp"
35 #include "oops/oop.hpp"
36 #include "oops/typeArrayOop.hpp"
37 #include "utilities/accessFlags.hpp"
38 #include "utilities/growableArray.hpp"
40 // A methodOop represents a Java method.
41 //
42 // Memory layout (each line represents a word). Note that most applications load thousands of methods,
43 // so keeping the size of this structure small has a big impact on footprint.
44 //
45 // We put all oops and method_size first for better gc cache locality.
46 //
47 // The actual bytecodes are inlined after the end of the methodOopDesc struct.
48 //
49 // There are bits in the access_flags telling whether inlined tables are present.
50 // Note that accessing the line number and local variable tables is not performance critical at all.
51 // Accessing the checked exceptions table is used by reflection, so we put that last to make access
52 // to it fast.
53 //
54 // The line number table is compressed and inlined following the byte codes. It is found as the first
55 // byte following the byte codes. The checked exceptions table and the local variable table are inlined
56 // after the line number table, and indexed from the end of the method. We do not compress the checked
57 // exceptions table since the average length is less than 2, and do not bother to compress the local
58 // variable table either since it is mostly absent.
59 //
60 // Note that native_function and signature_handler has to be at fixed offsets (required by the interpreter)
61 //
62 // |------------------------------------------------------|
63 // | header |
64 // | klass |
65 // |------------------------------------------------------|
66 // | constMethodOop (oop) |
67 // | constants (oop) |
68 // |------------------------------------------------------|
69 // | methodData (oop) |
70 // | interp_invocation_count |
71 // |------------------------------------------------------|
72 // | access_flags |
73 // | vtable_index |
74 // |------------------------------------------------------|
75 // | result_index (C++ interpreter only) |
76 // |------------------------------------------------------|
77 // | method_size | max_stack |
78 // | max_locals | size_of_parameters |
79 // |------------------------------------------------------|
80 // | intrinsic_id, (unused) | throwout_count |
81 // |------------------------------------------------------|
82 // | num_breakpoints | (unused) |
83 // |------------------------------------------------------|
84 // | invocation_counter |
85 // | backedge_counter |
86 // |------------------------------------------------------|
87 // | prev_time (tiered only, 64 bit wide) |
88 // | |
89 // |------------------------------------------------------|
90 // | rate (tiered) |
91 // |------------------------------------------------------|
92 // | code (pointer) |
93 // | i2i (pointer) |
94 // | adapter (pointer) |
95 // | from_compiled_entry (pointer) |
96 // | from_interpreted_entry (pointer) |
97 // |------------------------------------------------------|
98 // | native_function (present only if native) |
99 // | signature_handler (present only if native) |
100 // |------------------------------------------------------|
103 class CheckedExceptionElement;
104 class LocalVariableTableElement;
105 class AdapterHandlerEntry;
106 class methodDataOopDesc;
108 class methodOopDesc : public oopDesc {
109 friend class methodKlass;
110 friend class VMStructs;
111 private:
112 constMethodOop _constMethod; // Method read-only data.
113 constantPoolOop _constants; // Constant pool
114 methodDataOop _method_data;
115 int _interpreter_invocation_count; // Count of times invoked (reused as prev_event_count in tiered)
116 AccessFlags _access_flags; // Access flags
117 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
118 // note: can have vtables with >2**16 elements (because of inheritance)
119 #ifdef CC_INTERP
120 int _result_index; // C++ interpreter needs for converting results to/from stack
121 #endif
122 u2 _method_size; // size of this object
123 u2 _max_stack; // Maximum number of entries on the expression stack
124 u2 _max_locals; // Number of local variables used by this method
125 u2 _size_of_parameters; // size of the parameter block (receiver + arguments) in words
126 u1 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
127 u2 _interpreter_throwout_count; // Count of times method was exited via exception while interpreting
128 u2 _number_of_breakpoints; // fullspeed debugging support
129 InvocationCounter _invocation_counter; // Incremented before each activation of the method - used to trigger frequency-based optimizations
130 InvocationCounter _backedge_counter; // Incremented before each backedge taken - used to trigger frequencey-based optimizations
132 #ifdef TIERED
133 jlong _prev_time; // Previous time the rate was acquired
134 float _rate; // Events (invocation and backedge counter increments) per millisecond
135 #endif
137 #ifndef PRODUCT
138 int _compiled_invocation_count; // Number of nmethod invocations so far (for perf. debugging)
139 #endif
140 // Entry point for calling both from and to the interpreter.
141 address _i2i_entry; // All-args-on-stack calling convention
142 // Adapter blob (i2c/c2i) for this methodOop. Set once when method is linked.
143 AdapterHandlerEntry* _adapter;
144 // Entry point for calling from compiled code, to compiled code if it exists
145 // or else the interpreter.
146 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
147 // The entry point for calling both from and to compiled code is
148 // "_code->entry_point()". Because of tiered compilation and de-opt, this
149 // field can come and go. It can transition from NULL to not-null at any
150 // time (whenever a compile completes). It can transition from not-null to
151 // NULL only at safepoints (because of a de-opt).
152 nmethod* volatile _code; // Points to the corresponding piece of native code
153 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
155 public:
157 // accessors for instance variables
158 constMethodOop constMethod() const { return _constMethod; }
159 void set_constMethod(constMethodOop xconst) { oop_store_without_check((oop*)&_constMethod, (oop)xconst); }
162 static address make_adapters(methodHandle mh, TRAPS);
163 volatile address from_compiled_entry() const { return (address)OrderAccess::load_ptr_acquire(&_from_compiled_entry); }
164 volatile address from_interpreted_entry() const{ return (address)OrderAccess::load_ptr_acquire(&_from_interpreted_entry); }
166 // access flag
167 AccessFlags access_flags() const { return _access_flags; }
168 void set_access_flags(AccessFlags flags) { _access_flags = flags; }
170 // name
171 Symbol* name() const { return _constants->symbol_at(name_index()); }
172 int name_index() const { return constMethod()->name_index(); }
173 void set_name_index(int index) { constMethod()->set_name_index(index); }
175 // signature
176 Symbol* signature() const { return _constants->symbol_at(signature_index()); }
177 int signature_index() const { return constMethod()->signature_index(); }
178 void set_signature_index(int index) { constMethod()->set_signature_index(index); }
180 // generics support
181 Symbol* generic_signature() const { int idx = generic_signature_index(); return ((idx != 0) ? _constants->symbol_at(idx) : (Symbol*)NULL); }
182 int generic_signature_index() const { return constMethod()->generic_signature_index(); }
183 void set_generic_signature_index(int index) { constMethod()->set_generic_signature_index(index); }
185 // annotations support
186 typeArrayOop annotations() const { return instanceKlass::cast(method_holder())->get_method_annotations_of(method_idnum()); }
187 typeArrayOop parameter_annotations() const { return instanceKlass::cast(method_holder())->get_method_parameter_annotations_of(method_idnum()); }
188 typeArrayOop annotation_default() const { return instanceKlass::cast(method_holder())->get_method_default_annotations_of(method_idnum()); }
190 #ifdef CC_INTERP
191 void set_result_index(BasicType type);
192 int result_index() { return _result_index; }
193 #endif
195 // Helper routine: get klass name + "." + method name + signature as
196 // C string, for the purpose of providing more useful NoSuchMethodErrors
197 // and fatal error handling. The string is allocated in resource
198 // area if a buffer is not provided by the caller.
199 char* name_and_sig_as_C_string();
200 char* name_and_sig_as_C_string(char* buf, int size);
202 // Static routine in the situations we don't have a methodOop
203 static char* name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature);
204 static char* name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size);
206 Bytecodes::Code java_code_at(int bci) const {
207 return Bytecodes::java_code_at(this, bcp_from(bci));
208 }
209 Bytecodes::Code code_at(int bci) const {
210 return Bytecodes::code_at(this, bcp_from(bci));
211 }
213 // JVMTI breakpoints
214 Bytecodes::Code orig_bytecode_at(int bci) const;
215 void set_orig_bytecode_at(int bci, Bytecodes::Code code);
216 void set_breakpoint(int bci);
217 void clear_breakpoint(int bci);
218 void clear_all_breakpoints();
219 // Tracking number of breakpoints, for fullspeed debugging.
220 // Only mutated by VM thread.
221 u2 number_of_breakpoints() const { return _number_of_breakpoints; }
222 void incr_number_of_breakpoints() { ++_number_of_breakpoints; }
223 void decr_number_of_breakpoints() { --_number_of_breakpoints; }
224 // Initialization only
225 void clear_number_of_breakpoints() { _number_of_breakpoints = 0; }
227 // index into instanceKlass methods() array
228 u2 method_idnum() const { return constMethod()->method_idnum(); }
229 void set_method_idnum(u2 idnum) { constMethod()->set_method_idnum(idnum); }
231 // code size
232 int code_size() const { return constMethod()->code_size(); }
234 // method size
235 int method_size() const { return _method_size; }
236 void set_method_size(int size) {
237 assert(0 <= size && size < (1 << 16), "invalid method size");
238 _method_size = size;
239 }
241 // constant pool for klassOop holding this method
242 constantPoolOop constants() const { return _constants; }
243 void set_constants(constantPoolOop c) { oop_store_without_check((oop*)&_constants, c); }
245 // max stack
246 int max_stack() const { return _max_stack; }
247 void set_max_stack(int size) { _max_stack = size; }
249 // max locals
250 int max_locals() const { return _max_locals; }
251 void set_max_locals(int size) { _max_locals = size; }
253 int highest_comp_level() const;
254 void set_highest_comp_level(int level);
255 int highest_osr_comp_level() const;
256 void set_highest_osr_comp_level(int level);
258 // Count of times method was exited via exception while interpreting
259 void interpreter_throwout_increment() {
260 if (_interpreter_throwout_count < 65534) {
261 _interpreter_throwout_count++;
262 }
263 }
265 int interpreter_throwout_count() const { return _interpreter_throwout_count; }
266 void set_interpreter_throwout_count(int count) { _interpreter_throwout_count = count; }
268 // size of parameters
269 int size_of_parameters() const { return _size_of_parameters; }
271 bool has_stackmap_table() const {
272 return constMethod()->has_stackmap_table();
273 }
275 typeArrayOop stackmap_data() const {
276 return constMethod()->stackmap_data();
277 }
279 void set_stackmap_data(typeArrayOop sd) {
280 constMethod()->set_stackmap_data(sd);
281 }
283 // exception handler table
284 typeArrayOop exception_table() const
285 { return constMethod()->exception_table(); }
286 void set_exception_table(typeArrayOop e)
287 { constMethod()->set_exception_table(e); }
288 bool has_exception_handler() const
289 { return constMethod()->has_exception_handler(); }
291 // Finds the first entry point bci of an exception handler for an
292 // exception of klass ex_klass thrown at throw_bci. A value of NULL
293 // for ex_klass indicates that the exception klass is not known; in
294 // this case it matches any constraint class. Returns -1 if the
295 // exception cannot be handled in this method. The handler
296 // constraint classes are loaded if necessary. Note that this may
297 // throw an exception if loading of the constraint classes causes
298 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
299 // If an exception is thrown, returns the bci of the
300 // exception handler which caused the exception to be thrown, which
301 // is needed for proper retries. See, for example,
302 // InterpreterRuntime::exception_handler_for_exception.
303 int fast_exception_handler_bci_for(KlassHandle ex_klass, int throw_bci, TRAPS);
305 // method data access
306 methodDataOop method_data() const {
307 return _method_data;
308 }
309 void set_method_data(methodDataOop data) {
310 oop_store_without_check((oop*)&_method_data, (oop)data);
311 }
313 // invocation counter
314 InvocationCounter* invocation_counter() { return &_invocation_counter; }
315 InvocationCounter* backedge_counter() { return &_backedge_counter; }
317 #ifdef TIERED
318 // We are reusing interpreter_invocation_count as a holder for the previous event count!
319 // We can do that since interpreter_invocation_count is not used in tiered.
320 int prev_event_count() const { return _interpreter_invocation_count; }
321 void set_prev_event_count(int count) { _interpreter_invocation_count = count; }
322 jlong prev_time() const { return _prev_time; }
323 void set_prev_time(jlong time) { _prev_time = time; }
324 float rate() const { return _rate; }
325 void set_rate(float rate) { _rate = rate; }
326 #endif
328 int invocation_count();
329 int backedge_count();
331 bool was_executed_more_than(int n);
332 bool was_never_executed() { return !was_executed_more_than(0); }
334 static void build_interpreter_method_data(methodHandle method, TRAPS);
336 int interpreter_invocation_count() {
337 if (TieredCompilation) return invocation_count();
338 else return _interpreter_invocation_count;
339 }
340 void set_interpreter_invocation_count(int count) { _interpreter_invocation_count = count; }
341 int increment_interpreter_invocation_count() {
342 if (TieredCompilation) ShouldNotReachHere();
343 return ++_interpreter_invocation_count;
344 }
346 #ifndef PRODUCT
347 int compiled_invocation_count() const { return _compiled_invocation_count; }
348 void set_compiled_invocation_count(int count) { _compiled_invocation_count = count; }
349 #endif // not PRODUCT
351 // Clear (non-shared space) pointers which could not be relevant
352 // if this (shared) method were mapped into another JVM.
353 void remove_unshareable_info();
355 // nmethod/verified compiler entry
356 address verified_code_entry();
357 bool check_code() const; // Not inline to avoid circular ref
358 nmethod* volatile code() const { assert( check_code(), "" ); return (nmethod *)OrderAccess::load_ptr_acquire(&_code); }
359 void clear_code(); // Clear out any compiled code
360 static void set_code(methodHandle mh, nmethod* code);
361 void set_adapter_entry(AdapterHandlerEntry* adapter) { _adapter = adapter; }
362 address get_i2c_entry();
363 address get_c2i_entry();
364 address get_c2i_unverified_entry();
365 AdapterHandlerEntry* adapter() { return _adapter; }
366 // setup entry points
367 void link_method(methodHandle method, TRAPS);
368 // clear entry points. Used by sharing code
369 void unlink_method();
371 // vtable index
372 enum VtableIndexFlag {
373 // Valid vtable indexes are non-negative (>= 0).
374 // These few negative values are used as sentinels.
375 highest_unused_vtable_index_value = -5,
376 invalid_vtable_index = -4, // distinct from any valid vtable index
377 garbage_vtable_index = -3, // not yet linked; no vtable layout yet
378 nonvirtual_vtable_index = -2 // there is no need for vtable dispatch
379 // 6330203 Note: Do not use -1, which was overloaded with many meanings.
380 };
381 DEBUG_ONLY(bool valid_vtable_index() const { return _vtable_index >= nonvirtual_vtable_index; })
382 int vtable_index() const { assert(valid_vtable_index(), "");
383 return _vtable_index; }
384 void set_vtable_index(int index) { _vtable_index = index; }
386 // interpreter entry
387 address interpreter_entry() const { return _i2i_entry; }
388 // Only used when first initialize so we can set _i2i_entry and _from_interpreted_entry
389 void set_interpreter_entry(address entry) { _i2i_entry = entry; _from_interpreted_entry = entry; }
390 int interpreter_kind(void) {
391 return constMethod()->interpreter_kind();
392 }
393 void set_interpreter_kind();
394 void set_interpreter_kind(int kind) {
395 constMethod()->set_interpreter_kind(kind);
396 }
398 // native function (used for native methods only)
399 enum {
400 native_bind_event_is_interesting = true
401 };
402 address native_function() const { return *(native_function_addr()); }
403 // Must specify a real function (not NULL).
404 // Use clear_native_function() to unregister.
405 void set_native_function(address function, bool post_event_flag);
406 bool has_native_function() const;
407 void clear_native_function();
409 // signature handler (used for native methods only)
410 address signature_handler() const { return *(signature_handler_addr()); }
411 void set_signature_handler(address handler);
413 // Interpreter oopmap support
414 void mask_for(int bci, InterpreterOopMap* mask);
416 #ifndef PRODUCT
417 // operations on invocation counter
418 void print_invocation_count();
419 #endif
421 // byte codes
422 void set_code(address code) { return constMethod()->set_code(code); }
423 address code_base() const { return constMethod()->code_base(); }
424 bool contains(address bcp) const { return constMethod()->contains(bcp); }
426 // prints byte codes
427 void print_codes() const { print_codes_on(tty); }
428 void print_codes_on(outputStream* st) const PRODUCT_RETURN;
429 void print_codes_on(int from, int to, outputStream* st) const PRODUCT_RETURN;
431 // checked exceptions
432 int checked_exceptions_length() const
433 { return constMethod()->checked_exceptions_length(); }
434 CheckedExceptionElement* checked_exceptions_start() const
435 { return constMethod()->checked_exceptions_start(); }
437 // localvariable table
438 bool has_localvariable_table() const
439 { return constMethod()->has_localvariable_table(); }
440 int localvariable_table_length() const
441 { return constMethod()->localvariable_table_length(); }
442 LocalVariableTableElement* localvariable_table_start() const
443 { return constMethod()->localvariable_table_start(); }
445 bool has_linenumber_table() const
446 { return constMethod()->has_linenumber_table(); }
447 u_char* compressed_linenumber_table() const
448 { return constMethod()->compressed_linenumber_table(); }
450 // method holder (the klassOop holding this method)
451 klassOop method_holder() const { return _constants->pool_holder(); }
453 void compute_size_of_parameters(Thread *thread); // word size of parameters (receiver if any + arguments)
454 Symbol* klass_name() const; // returns the name of the method holder
455 BasicType result_type() const; // type of the method result
456 int result_type_index() const; // type index of the method result
457 bool is_returning_oop() const { BasicType r = result_type(); return (r == T_OBJECT || r == T_ARRAY); }
458 bool is_returning_fp() const { BasicType r = result_type(); return (r == T_FLOAT || r == T_DOUBLE); }
460 // Checked exceptions thrown by this method (resolved to mirrors)
461 objArrayHandle resolved_checked_exceptions(TRAPS) { return resolved_checked_exceptions_impl(this, THREAD); }
463 // Access flags
464 bool is_public() const { return access_flags().is_public(); }
465 bool is_private() const { return access_flags().is_private(); }
466 bool is_protected() const { return access_flags().is_protected(); }
467 bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
468 bool is_static() const { return access_flags().is_static(); }
469 bool is_final() const { return access_flags().is_final(); }
470 bool is_synchronized() const { return access_flags().is_synchronized();}
471 bool is_native() const { return access_flags().is_native(); }
472 bool is_abstract() const { return access_flags().is_abstract(); }
473 bool is_strict() const { return access_flags().is_strict(); }
474 bool is_synthetic() const { return access_flags().is_synthetic(); }
476 // returns true if contains only return operation
477 bool is_empty_method() const;
479 // returns true if this is a vanilla constructor
480 bool is_vanilla_constructor() const;
482 // checks method and its method holder
483 bool is_final_method() const;
484 bool is_strict_method() const;
486 // true if method needs no dynamic dispatch (final and/or no vtable entry)
487 bool can_be_statically_bound() const;
489 // returns true if the method has any backward branches.
490 bool has_loops() {
491 return access_flags().loops_flag_init() ? access_flags().has_loops() : compute_has_loops_flag();
492 };
494 bool compute_has_loops_flag();
496 bool has_jsrs() {
497 return access_flags().has_jsrs();
498 };
499 void set_has_jsrs() {
500 _access_flags.set_has_jsrs();
501 }
503 // returns true if the method has any monitors.
504 bool has_monitors() const { return is_synchronized() || access_flags().has_monitor_bytecodes(); }
505 bool has_monitor_bytecodes() const { return access_flags().has_monitor_bytecodes(); }
507 void set_has_monitor_bytecodes() { _access_flags.set_has_monitor_bytecodes(); }
509 // monitor matching. This returns a conservative estimate of whether the monitorenter/monitorexit bytecodes
510 // propererly nest in the method. It might return false, even though they actually nest properly, since the info.
511 // has not been computed yet.
512 bool guaranteed_monitor_matching() const { return access_flags().is_monitor_matching(); }
513 void set_guaranteed_monitor_matching() { _access_flags.set_monitor_matching(); }
515 // returns true if the method is an accessor function (setter/getter).
516 bool is_accessor() const;
518 // returns true if the method is an initializer (<init> or <clinit>).
519 bool is_initializer() const;
521 // returns true if the method is static OR if the classfile version < 51
522 bool has_valid_initializer_flags() const;
524 // returns true if the method name is <clinit> and the method has
525 // valid static initializer flags.
526 bool is_static_initializer() const;
528 // compiled code support
529 // NOTE: code() is inherently racy as deopt can be clearing code
530 // simultaneously. Use with caution.
531 bool has_compiled_code() const { return code() != NULL; }
533 // sizing
534 static int object_size(bool is_native);
535 static int header_size() { return sizeof(methodOopDesc)/HeapWordSize; }
536 int object_size() const { return method_size(); }
538 bool object_is_parsable() const { return method_size() > 0; }
540 // interpreter support
541 static ByteSize const_offset() { return byte_offset_of(methodOopDesc, _constMethod ); }
542 static ByteSize constants_offset() { return byte_offset_of(methodOopDesc, _constants ); }
543 static ByteSize access_flags_offset() { return byte_offset_of(methodOopDesc, _access_flags ); }
544 #ifdef CC_INTERP
545 static ByteSize result_index_offset() { return byte_offset_of(methodOopDesc, _result_index ); }
546 #endif /* CC_INTERP */
547 static ByteSize size_of_locals_offset() { return byte_offset_of(methodOopDesc, _max_locals ); }
548 static ByteSize size_of_parameters_offset() { return byte_offset_of(methodOopDesc, _size_of_parameters); }
549 static ByteSize from_compiled_offset() { return byte_offset_of(methodOopDesc, _from_compiled_entry); }
550 static ByteSize code_offset() { return byte_offset_of(methodOopDesc, _code); }
551 static ByteSize invocation_counter_offset() { return byte_offset_of(methodOopDesc, _invocation_counter); }
552 static ByteSize backedge_counter_offset() { return byte_offset_of(methodOopDesc, _backedge_counter); }
553 static ByteSize method_data_offset() {
554 return byte_offset_of(methodOopDesc, _method_data);
555 }
556 static ByteSize interpreter_invocation_counter_offset() { return byte_offset_of(methodOopDesc, _interpreter_invocation_count); }
557 #ifndef PRODUCT
558 static ByteSize compiled_invocation_counter_offset() { return byte_offset_of(methodOopDesc, _compiled_invocation_count); }
559 #endif // not PRODUCT
560 static ByteSize native_function_offset() { return in_ByteSize(sizeof(methodOopDesc)); }
561 static ByteSize from_interpreted_offset() { return byte_offset_of(methodOopDesc, _from_interpreted_entry ); }
562 static ByteSize interpreter_entry_offset() { return byte_offset_of(methodOopDesc, _i2i_entry ); }
563 static ByteSize signature_handler_offset() { return in_ByteSize(sizeof(methodOopDesc) + wordSize); }
564 static ByteSize max_stack_offset() { return byte_offset_of(methodOopDesc, _max_stack ); }
566 // for code generation
567 static int method_data_offset_in_bytes() { return offset_of(methodOopDesc, _method_data); }
568 static int interpreter_invocation_counter_offset_in_bytes()
569 { return offset_of(methodOopDesc, _interpreter_invocation_count); }
570 static int intrinsic_id_offset_in_bytes() { return offset_of(methodOopDesc, _intrinsic_id); }
571 static int intrinsic_id_size_in_bytes() { return sizeof(u1); }
573 // Static methods that are used to implement member methods where an exposed this pointer
574 // is needed due to possible GCs
575 static objArrayHandle resolved_checked_exceptions_impl(methodOop this_oop, TRAPS);
577 // Returns the byte code index from the byte code pointer
578 int bci_from(address bcp) const;
579 address bcp_from(int bci) const;
580 int validate_bci_from_bcx(intptr_t bcx) const;
582 // Returns the line number for a bci if debugging information for the method is prowided,
583 // -1 is returned otherwise.
584 int line_number_from_bci(int bci) const;
586 // Reflection support
587 bool is_overridden_in(klassOop k) const;
589 // JSR 292 support
590 bool is_method_handle_invoke() const { return access_flags().is_method_handle_invoke(); }
591 static bool is_method_handle_invoke_name(vmSymbols::SID name_sid);
592 static bool is_method_handle_invoke_name(Symbol* name) {
593 return is_method_handle_invoke_name(vmSymbols::find_sid(name));
594 }
595 // Tests if this method is an internal adapter frame from the
596 // MethodHandleCompiler.
597 bool is_method_handle_adapter() const;
598 static methodHandle make_invoke_method(KlassHandle holder,
599 Symbol* name, //invokeExact or invokeGeneric
600 Symbol* signature, //anything at all
601 Handle method_type,
602 TRAPS);
603 // these operate only on invoke methods:
604 oop method_handle_type() const;
605 static jint* method_type_offsets_chain(); // series of pointer-offsets, terminated by -1
606 // presize interpreter frames for extra interpreter stack entries, if needed
607 // method handles want to be able to push a few extra values (e.g., a bound receiver), and
608 // invokedynamic sometimes needs to push a bootstrap method, call site, and arglist,
609 // all without checking for a stack overflow
610 static int extra_stack_entries() { return (EnableMethodHandles ? (int)MethodHandlePushLimit : 0) + (EnableInvokeDynamic ? 3 : 0); }
611 static int extra_stack_words(); // = extra_stack_entries() * Interpreter::stackElementSize()
613 // RedefineClasses() support:
614 bool is_old() const { return access_flags().is_old(); }
615 void set_is_old() { _access_flags.set_is_old(); }
616 bool is_obsolete() const { return access_flags().is_obsolete(); }
617 void set_is_obsolete() { _access_flags.set_is_obsolete(); }
618 // see the definition in methodOop.cpp for the gory details
619 bool should_not_be_cached() const;
621 // JVMTI Native method prefixing support:
622 bool is_prefixed_native() const { return access_flags().is_prefixed_native(); }
623 void set_is_prefixed_native() { _access_flags.set_is_prefixed_native(); }
625 // Rewriting support
626 static methodHandle clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,
627 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS);
629 // Get this method's jmethodID -- allocate if it doesn't exist
630 jmethodID jmethod_id() { methodHandle this_h(this);
631 return instanceKlass::get_jmethod_id(method_holder(), this_h); }
633 // Lookup the jmethodID for this method. Return NULL if not found.
634 // NOTE that this function can be called from a signal handler
635 // (see AsyncGetCallTrace support for Forte Analyzer) and this
636 // needs to be async-safe. No allocation should be done and
637 // so handles are not used to avoid deadlock.
638 jmethodID find_jmethod_id_or_null() { return instanceKlass::cast(method_holder())->jmethod_id_or_null(this); }
640 // JNI static invoke cached itable index accessors
641 int cached_itable_index() { return instanceKlass::cast(method_holder())->cached_itable_index(method_idnum()); }
642 void set_cached_itable_index(int index) { instanceKlass::cast(method_holder())->set_cached_itable_index(method_idnum(), index); }
644 // Support for inlining of intrinsic methods
645 vmIntrinsics::ID intrinsic_id() const { return (vmIntrinsics::ID) _intrinsic_id; }
646 void set_intrinsic_id(vmIntrinsics::ID id) { _intrinsic_id = (u1) id; }
648 // Helper routines for intrinsic_id() and vmIntrinsics::method().
649 void init_intrinsic_id(); // updates from _none if a match
650 static vmSymbols::SID klass_id_for_intrinsics(klassOop holder);
652 // On-stack replacement support
653 bool has_osr_nmethod(int level, bool match_level) {
654 return instanceKlass::cast(method_holder())->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
655 }
657 nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
658 return instanceKlass::cast(method_holder())->lookup_osr_nmethod(this, bci, level, match_level);
659 }
661 // Inline cache support
662 void cleanup_inline_caches();
664 // Find if klass for method is loaded
665 bool is_klass_loaded_by_klass_index(int klass_index) const;
666 bool is_klass_loaded(int refinfo_index, bool must_be_resolved = false) const;
668 // Indicates whether compilation failed earlier for this method, or
669 // whether it is not compilable for another reason like having a
670 // breakpoint set in it.
671 bool is_not_compilable(int comp_level = CompLevel_any) const;
672 void set_not_compilable(int comp_level = CompLevel_all, bool report = true);
673 void set_not_compilable_quietly(int comp_level = CompLevel_all) {
674 set_not_compilable(comp_level, false);
675 }
676 bool is_not_osr_compilable(int comp_level = CompLevel_any) const {
677 return is_not_compilable(comp_level) || access_flags().is_not_osr_compilable();
678 }
679 void set_not_osr_compilable() { _access_flags.set_not_osr_compilable(); }
680 bool is_not_c1_compilable() const { return access_flags().is_not_c1_compilable(); }
681 void set_not_c1_compilable() { _access_flags.set_not_c1_compilable(); }
682 bool is_not_c2_compilable() const { return access_flags().is_not_c2_compilable(); }
683 void set_not_c2_compilable() { _access_flags.set_not_c2_compilable(); }
685 // Background compilation support
686 bool queued_for_compilation() const { return access_flags().queued_for_compilation(); }
687 void set_queued_for_compilation() { _access_flags.set_queued_for_compilation(); }
688 void clear_queued_for_compilation() { _access_flags.clear_queued_for_compilation(); }
690 // Resolve all classes in signature, return 'true' if successful
691 static bool load_signature_classes(methodHandle m, TRAPS);
693 // Return if true if not all classes references in signature, including return type, has been loaded
694 static bool has_unloaded_classes_in_signature(methodHandle m, TRAPS);
696 // Printing
697 void print_short_name(outputStream* st) /*PRODUCT_RETURN*/; // prints as klassname::methodname; Exposed so field engineers can debug VM
698 void print_name(outputStream* st) PRODUCT_RETURN; // prints as "virtual void foo(int)"
700 // Helper routine used for method sorting
701 static void sort_methods(objArrayOop methods,
702 objArrayOop methods_annotations,
703 objArrayOop methods_parameter_annotations,
704 objArrayOop methods_default_annotations,
705 bool idempotent = false);
707 // size of parameters
708 void set_size_of_parameters(int size) { _size_of_parameters = size; }
709 private:
711 // Inlined elements
712 address* native_function_addr() const { assert(is_native(), "must be native"); return (address*) (this+1); }
713 address* signature_handler_addr() const { return native_function_addr() + 1; }
715 // Garbage collection support
716 oop* adr_constMethod() const { return (oop*)&_constMethod; }
717 oop* adr_constants() const { return (oop*)&_constants; }
718 oop* adr_method_data() const { return (oop*)&_method_data; }
719 };
722 // Utility class for compressing line number tables
724 class CompressedLineNumberWriteStream: public CompressedWriteStream {
725 private:
726 int _bci;
727 int _line;
728 public:
729 // Constructor
730 CompressedLineNumberWriteStream(int initial_size) : CompressedWriteStream(initial_size), _bci(0), _line(0) {}
731 CompressedLineNumberWriteStream(u_char* buffer, int initial_size) : CompressedWriteStream(buffer, initial_size), _bci(0), _line(0) {}
733 // Write (bci, line number) pair to stream
734 void write_pair_regular(int bci_delta, int line_delta);
736 inline void write_pair_inline(int bci, int line) {
737 int bci_delta = bci - _bci;
738 int line_delta = line - _line;
739 _bci = bci;
740 _line = line;
741 // Skip (0,0) deltas - they do not add information and conflict with terminator.
742 if (bci_delta == 0 && line_delta == 0) return;
743 // Check if bci is 5-bit and line number 3-bit unsigned.
744 if (((bci_delta & ~0x1F) == 0) && ((line_delta & ~0x7) == 0)) {
745 // Compress into single byte.
746 jubyte value = ((jubyte) bci_delta << 3) | (jubyte) line_delta;
747 // Check that value doesn't match escape character.
748 if (value != 0xFF) {
749 write_byte(value);
750 return;
751 }
752 }
753 write_pair_regular(bci_delta, line_delta);
754 }
756 // Windows AMD64 + Apr 2005 PSDK with /O2 generates bad code for write_pair.
757 // Disabling optimization doesn't work for methods in header files
758 // so we force it to call through the non-optimized version in the .cpp.
759 // It's gross, but it's the only way we can ensure that all callers are
760 // fixed. _MSC_VER is defined by the windows compiler
761 #if defined(_M_AMD64) && _MSC_VER >= 1400
762 void write_pair(int bci, int line);
763 #else
764 void write_pair(int bci, int line) { write_pair_inline(bci, line); }
765 #endif
767 // Write end-of-stream marker
768 void write_terminator() { write_byte(0); }
769 };
772 // Utility class for decompressing line number tables
774 class CompressedLineNumberReadStream: public CompressedReadStream {
775 private:
776 int _bci;
777 int _line;
778 public:
779 // Constructor
780 CompressedLineNumberReadStream(u_char* buffer);
781 // Read (bci, line number) pair from stream. Returns false at end-of-stream.
782 bool read_pair();
783 // Accessing bci and line number (after calling read_pair)
784 int bci() const { return _bci; }
785 int line() const { return _line; }
786 };
789 /// Fast Breakpoints.
791 // If this structure gets more complicated (because bpts get numerous),
792 // move it into its own header.
794 // There is presently no provision for concurrent access
795 // to breakpoint lists, which is only OK for JVMTI because
796 // breakpoints are written only at safepoints, and are read
797 // concurrently only outside of safepoints.
799 class BreakpointInfo : public CHeapObj {
800 friend class VMStructs;
801 private:
802 Bytecodes::Code _orig_bytecode;
803 int _bci;
804 u2 _name_index; // of method
805 u2 _signature_index; // of method
806 BreakpointInfo* _next; // simple storage allocation
808 public:
809 BreakpointInfo(methodOop m, int bci);
811 // accessors
812 Bytecodes::Code orig_bytecode() { return _orig_bytecode; }
813 void set_orig_bytecode(Bytecodes::Code code) { _orig_bytecode = code; }
814 int bci() { return _bci; }
816 BreakpointInfo* next() const { return _next; }
817 void set_next(BreakpointInfo* n) { _next = n; }
819 // helps for searchers
820 bool match(const methodOopDesc* m, int bci) {
821 return bci == _bci && match(m);
822 }
824 bool match(const methodOopDesc* m) {
825 return _name_index == m->name_index() &&
826 _signature_index == m->signature_index();
827 }
829 void set(methodOop method);
830 void clear(methodOop method);
831 };
833 #endif // SHARE_VM_OOPS_METHODOOP_HPP