src/share/vm/oops/methodOop.cpp

Thu, 07 Apr 2011 09:53:20 -0700

author
johnc
date
Thu, 07 Apr 2011 09:53:20 -0700
changeset 2781
e1162778c1c8
parent 2639
8033953d67ff
child 2742
ed69575596ac
permissions
-rw-r--r--

7009266: G1: assert(obj->is_oop_or_null(true )) failed: Error
Summary: A referent object that is only weakly reachable at the start of concurrent marking but is re-attached to the strongly reachable object graph during marking may not be marked as live. This can cause the reference object to be processed prematurely and leave dangling pointers to the referent object. Implement a read barrier for the java.lang.ref.Reference::referent field by intrinsifying the Reference.get() method, and intercepting accesses though JNI, reflection, and Unsafe, so that when a non-null referent object is read it is also logged in an SATB buffer.
Reviewed-by: kvn, iveresov, never, tonyp, dholmes

duke@435 1 /*
never@2462 2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "classfile/systemDictionary.hpp"
stefank@2314 27 #include "code/debugInfoRec.hpp"
stefank@2314 28 #include "gc_interface/collectedHeap.inline.hpp"
stefank@2314 29 #include "interpreter/bytecodeStream.hpp"
stefank@2314 30 #include "interpreter/bytecodeTracer.hpp"
stefank@2314 31 #include "interpreter/bytecodes.hpp"
stefank@2314 32 #include "interpreter/interpreter.hpp"
stefank@2314 33 #include "interpreter/oopMapCache.hpp"
stefank@2314 34 #include "memory/gcLocker.hpp"
stefank@2314 35 #include "memory/generation.hpp"
stefank@2314 36 #include "memory/oopFactory.hpp"
stefank@2314 37 #include "oops/klassOop.hpp"
stefank@2314 38 #include "oops/methodDataOop.hpp"
stefank@2314 39 #include "oops/methodOop.hpp"
stefank@2314 40 #include "oops/oop.inline.hpp"
coleenp@2497 41 #include "oops/symbol.hpp"
stefank@2314 42 #include "prims/jvmtiExport.hpp"
stefank@2314 43 #include "prims/methodHandleWalk.hpp"
stefank@2314 44 #include "prims/nativeLookup.hpp"
stefank@2314 45 #include "runtime/arguments.hpp"
stefank@2314 46 #include "runtime/compilationPolicy.hpp"
stefank@2314 47 #include "runtime/frame.inline.hpp"
stefank@2314 48 #include "runtime/handles.inline.hpp"
stefank@2314 49 #include "runtime/relocator.hpp"
stefank@2314 50 #include "runtime/sharedRuntime.hpp"
stefank@2314 51 #include "runtime/signature.hpp"
stefank@2314 52 #include "utilities/xmlstream.hpp"
duke@435 53
duke@435 54
duke@435 55 // Implementation of methodOopDesc
duke@435 56
duke@435 57 address methodOopDesc::get_i2c_entry() {
duke@435 58 assert(_adapter != NULL, "must have");
duke@435 59 return _adapter->get_i2c_entry();
duke@435 60 }
duke@435 61
duke@435 62 address methodOopDesc::get_c2i_entry() {
duke@435 63 assert(_adapter != NULL, "must have");
duke@435 64 return _adapter->get_c2i_entry();
duke@435 65 }
duke@435 66
duke@435 67 address methodOopDesc::get_c2i_unverified_entry() {
duke@435 68 assert(_adapter != NULL, "must have");
duke@435 69 return _adapter->get_c2i_unverified_entry();
duke@435 70 }
duke@435 71
duke@435 72 char* methodOopDesc::name_and_sig_as_C_string() {
duke@435 73 return name_and_sig_as_C_string(Klass::cast(constants()->pool_holder()), name(), signature());
duke@435 74 }
duke@435 75
duke@435 76 char* methodOopDesc::name_and_sig_as_C_string(char* buf, int size) {
duke@435 77 return name_and_sig_as_C_string(Klass::cast(constants()->pool_holder()), name(), signature(), buf, size);
duke@435 78 }
duke@435 79
coleenp@2497 80 char* methodOopDesc::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
duke@435 81 const char* klass_name = klass->external_name();
duke@435 82 int klass_name_len = (int)strlen(klass_name);
duke@435 83 int method_name_len = method_name->utf8_length();
duke@435 84 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
duke@435 85 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
duke@435 86 strcpy(dest, klass_name);
duke@435 87 dest[klass_name_len] = '.';
duke@435 88 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
duke@435 89 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
duke@435 90 dest[len] = 0;
duke@435 91 return dest;
duke@435 92 }
duke@435 93
coleenp@2497 94 char* methodOopDesc::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
coleenp@2497 95 Symbol* klass_name = klass->name();
duke@435 96 klass_name->as_klass_external_name(buf, size);
duke@435 97 int len = (int)strlen(buf);
duke@435 98
duke@435 99 if (len < size - 1) {
duke@435 100 buf[len++] = '.';
duke@435 101
duke@435 102 method_name->as_C_string(&(buf[len]), size - len);
duke@435 103 len = (int)strlen(buf);
duke@435 104
duke@435 105 signature->as_C_string(&(buf[len]), size - len);
duke@435 106 }
duke@435 107
duke@435 108 return buf;
duke@435 109 }
duke@435 110
duke@435 111 int methodOopDesc::fast_exception_handler_bci_for(KlassHandle ex_klass, int throw_bci, TRAPS) {
duke@435 112 // exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)
duke@435 113 const int beg_bci_offset = 0;
duke@435 114 const int end_bci_offset = 1;
duke@435 115 const int handler_bci_offset = 2;
duke@435 116 const int klass_index_offset = 3;
duke@435 117 const int entry_size = 4;
duke@435 118 // access exception table
duke@435 119 typeArrayHandle table (THREAD, constMethod()->exception_table());
duke@435 120 int length = table->length();
duke@435 121 assert(length % entry_size == 0, "exception table format has changed");
duke@435 122 // iterate through all entries sequentially
duke@435 123 constantPoolHandle pool(THREAD, constants());
duke@435 124 for (int i = 0; i < length; i += entry_size) {
duke@435 125 int beg_bci = table->int_at(i + beg_bci_offset);
duke@435 126 int end_bci = table->int_at(i + end_bci_offset);
duke@435 127 assert(beg_bci <= end_bci, "inconsistent exception table");
duke@435 128 if (beg_bci <= throw_bci && throw_bci < end_bci) {
duke@435 129 // exception handler bci range covers throw_bci => investigate further
duke@435 130 int handler_bci = table->int_at(i + handler_bci_offset);
duke@435 131 int klass_index = table->int_at(i + klass_index_offset);
duke@435 132 if (klass_index == 0) {
duke@435 133 return handler_bci;
duke@435 134 } else if (ex_klass.is_null()) {
duke@435 135 return handler_bci;
duke@435 136 } else {
duke@435 137 // we know the exception class => get the constraint class
duke@435 138 // this may require loading of the constraint class; if verification
duke@435 139 // fails or some other exception occurs, return handler_bci
duke@435 140 klassOop k = pool->klass_at(klass_index, CHECK_(handler_bci));
duke@435 141 KlassHandle klass = KlassHandle(THREAD, k);
duke@435 142 assert(klass.not_null(), "klass not loaded");
duke@435 143 if (ex_klass->is_subtype_of(klass())) {
duke@435 144 return handler_bci;
duke@435 145 }
duke@435 146 }
duke@435 147 }
duke@435 148 }
duke@435 149
duke@435 150 return -1;
duke@435 151 }
duke@435 152
duke@435 153 void methodOopDesc::mask_for(int bci, InterpreterOopMap* mask) {
duke@435 154
duke@435 155 Thread* myThread = Thread::current();
duke@435 156 methodHandle h_this(myThread, this);
duke@435 157 #ifdef ASSERT
duke@435 158 bool has_capability = myThread->is_VM_thread() ||
duke@435 159 myThread->is_ConcurrentGC_thread() ||
duke@435 160 myThread->is_GC_task_thread();
duke@435 161
duke@435 162 if (!has_capability) {
duke@435 163 if (!VerifyStack && !VerifyLastFrame) {
duke@435 164 // verify stack calls this outside VM thread
duke@435 165 warning("oopmap should only be accessed by the "
duke@435 166 "VM, GC task or CMS threads (or during debugging)");
duke@435 167 InterpreterOopMap local_mask;
duke@435 168 instanceKlass::cast(method_holder())->mask_for(h_this, bci, &local_mask);
duke@435 169 local_mask.print();
duke@435 170 }
duke@435 171 }
duke@435 172 #endif
duke@435 173 instanceKlass::cast(method_holder())->mask_for(h_this, bci, mask);
duke@435 174 return;
duke@435 175 }
duke@435 176
duke@435 177
duke@435 178 int methodOopDesc::bci_from(address bcp) const {
jrose@1161 179 assert(is_native() && bcp == code_base() || contains(bcp) || is_error_reported(), "bcp doesn't belong to this method");
duke@435 180 return bcp - code_base();
duke@435 181 }
duke@435 182
duke@435 183
duke@435 184 // Return (int)bcx if it appears to be a valid BCI.
duke@435 185 // Return bci_from((address)bcx) if it appears to be a valid BCP.
duke@435 186 // Return -1 otherwise.
duke@435 187 // Used by profiling code, when invalid data is a possibility.
duke@435 188 // The caller is responsible for validating the methodOop itself.
duke@435 189 int methodOopDesc::validate_bci_from_bcx(intptr_t bcx) const {
duke@435 190 // keep bci as -1 if not a valid bci
duke@435 191 int bci = -1;
duke@435 192 if (bcx == 0 || (address)bcx == code_base()) {
duke@435 193 // code_size() may return 0 and we allow 0 here
duke@435 194 // the method may be native
duke@435 195 bci = 0;
duke@435 196 } else if (frame::is_bci(bcx)) {
duke@435 197 if (bcx < code_size()) {
duke@435 198 bci = (int)bcx;
duke@435 199 }
duke@435 200 } else if (contains((address)bcx)) {
duke@435 201 bci = (address)bcx - code_base();
duke@435 202 }
duke@435 203 // Assert that if we have dodged any asserts, bci is negative.
duke@435 204 assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");
duke@435 205 return bci;
duke@435 206 }
duke@435 207
duke@435 208 address methodOopDesc::bcp_from(int bci) const {
duke@435 209 assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()), "illegal bci");
duke@435 210 address bcp = code_base() + bci;
duke@435 211 assert(is_native() && bcp == code_base() || contains(bcp), "bcp doesn't belong to this method");
duke@435 212 return bcp;
duke@435 213 }
duke@435 214
duke@435 215
duke@435 216 int methodOopDesc::object_size(bool is_native) {
duke@435 217 // If native, then include pointers for native_function and signature_handler
duke@435 218 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
duke@435 219 int extra_words = align_size_up(extra_bytes, BytesPerWord) / BytesPerWord;
duke@435 220 return align_object_size(header_size() + extra_words);
duke@435 221 }
duke@435 222
duke@435 223
coleenp@2497 224 Symbol* methodOopDesc::klass_name() const {
duke@435 225 klassOop k = method_holder();
duke@435 226 assert(k->is_klass(), "must be klass");
duke@435 227 instanceKlass* ik = (instanceKlass*) k->klass_part();
duke@435 228 return ik->name();
duke@435 229 }
duke@435 230
duke@435 231
duke@435 232 void methodOopDesc::set_interpreter_kind() {
duke@435 233 int kind = Interpreter::method_kind(methodOop(this));
duke@435 234 assert(kind != Interpreter::invalid,
duke@435 235 "interpreter entry must be valid");
duke@435 236 set_interpreter_kind(kind);
duke@435 237 }
duke@435 238
duke@435 239
duke@435 240 // Attempt to return method oop to original state. Clear any pointers
duke@435 241 // (to objects outside the shared spaces). We won't be able to predict
duke@435 242 // where they should point in a new JVM. Further initialize some
duke@435 243 // entries now in order allow them to be write protected later.
duke@435 244
duke@435 245 void methodOopDesc::remove_unshareable_info() {
duke@435 246 unlink_method();
duke@435 247 set_interpreter_kind();
duke@435 248 }
duke@435 249
duke@435 250
iveresov@2138 251 bool methodOopDesc::was_executed_more_than(int n) {
duke@435 252 // Invocation counter is reset when the methodOop is compiled.
duke@435 253 // If the method has compiled code we therefore assume it has
duke@435 254 // be excuted more than n times.
duke@435 255 if (is_accessor() || is_empty_method() || (code() != NULL)) {
duke@435 256 // interpreter doesn't bump invocation counter of trivial methods
duke@435 257 // compiler does not bump invocation counter of compiled methods
duke@435 258 return true;
iveresov@2138 259 }
iveresov@2138 260 else if (_invocation_counter.carry() || (method_data() != NULL && method_data()->invocation_counter()->carry())) {
duke@435 261 // The carry bit is set when the counter overflows and causes
duke@435 262 // a compilation to occur. We don't know how many times
duke@435 263 // the counter has been reset, so we simply assume it has
duke@435 264 // been executed more than n times.
duke@435 265 return true;
duke@435 266 } else {
duke@435 267 return invocation_count() > n;
duke@435 268 }
duke@435 269 }
duke@435 270
duke@435 271 #ifndef PRODUCT
iveresov@2138 272 void methodOopDesc::print_invocation_count() {
duke@435 273 if (is_static()) tty->print("static ");
duke@435 274 if (is_final()) tty->print("final ");
duke@435 275 if (is_synchronized()) tty->print("synchronized ");
duke@435 276 if (is_native()) tty->print("native ");
duke@435 277 method_holder()->klass_part()->name()->print_symbol_on(tty);
duke@435 278 tty->print(".");
duke@435 279 name()->print_symbol_on(tty);
duke@435 280 signature()->print_symbol_on(tty);
duke@435 281
duke@435 282 if (WizardMode) {
duke@435 283 // dump the size of the byte codes
duke@435 284 tty->print(" {%d}", code_size());
duke@435 285 }
duke@435 286 tty->cr();
duke@435 287
duke@435 288 tty->print_cr (" interpreter_invocation_count: %8d ", interpreter_invocation_count());
duke@435 289 tty->print_cr (" invocation_counter: %8d ", invocation_count());
duke@435 290 tty->print_cr (" backedge_counter: %8d ", backedge_count());
duke@435 291 if (CountCompiledCalls) {
duke@435 292 tty->print_cr (" compiled_invocation_count: %8d ", compiled_invocation_count());
duke@435 293 }
duke@435 294
duke@435 295 }
duke@435 296 #endif
duke@435 297
duke@435 298 // Build a methodDataOop object to hold information about this method
duke@435 299 // collected in the interpreter.
duke@435 300 void methodOopDesc::build_interpreter_method_data(methodHandle method, TRAPS) {
coleenp@2363 301 // Do not profile method if current thread holds the pending list lock,
coleenp@2363 302 // which avoids deadlock for acquiring the MethodData_lock.
coleenp@2363 303 if (instanceRefKlass::owns_pending_list_lock((JavaThread*)THREAD)) {
coleenp@2363 304 return;
coleenp@2363 305 }
coleenp@2363 306
duke@435 307 // Grab a lock here to prevent multiple
duke@435 308 // methodDataOops from being created.
duke@435 309 MutexLocker ml(MethodData_lock, THREAD);
duke@435 310 if (method->method_data() == NULL) {
duke@435 311 methodDataOop method_data = oopFactory::new_methodData(method, CHECK);
duke@435 312 method->set_method_data(method_data);
duke@435 313 if (PrintMethodData && (Verbose || WizardMode)) {
duke@435 314 ResourceMark rm(THREAD);
duke@435 315 tty->print("build_interpreter_method_data for ");
duke@435 316 method->print_name(tty);
duke@435 317 tty->cr();
duke@435 318 // At the end of the run, the MDO, full of data, will be dumped.
duke@435 319 }
duke@435 320 }
duke@435 321 }
duke@435 322
duke@435 323 void methodOopDesc::cleanup_inline_caches() {
duke@435 324 // The current system doesn't use inline caches in the interpreter
duke@435 325 // => nothing to do (keep this method around for future use)
duke@435 326 }
duke@435 327
duke@435 328
jrose@1145 329 int methodOopDesc::extra_stack_words() {
jrose@1145 330 // not an inline function, to avoid a header dependency on Interpreter
twisti@1861 331 return extra_stack_entries() * Interpreter::stackElementSize;
jrose@1145 332 }
jrose@1145 333
jrose@1145 334
duke@435 335 void methodOopDesc::compute_size_of_parameters(Thread *thread) {
coleenp@2497 336 ArgumentSizeComputer asc(signature());
duke@435 337 set_size_of_parameters(asc.size() + (is_static() ? 0 : 1));
duke@435 338 }
duke@435 339
duke@435 340 #ifdef CC_INTERP
duke@435 341 void methodOopDesc::set_result_index(BasicType type) {
duke@435 342 _result_index = Interpreter::BasicType_as_index(type);
duke@435 343 }
duke@435 344 #endif
duke@435 345
duke@435 346 BasicType methodOopDesc::result_type() const {
duke@435 347 ResultTypeFinder rtf(signature());
duke@435 348 return rtf.type();
duke@435 349 }
duke@435 350
duke@435 351
duke@435 352 bool methodOopDesc::is_empty_method() const {
duke@435 353 return code_size() == 1
duke@435 354 && *code_base() == Bytecodes::_return;
duke@435 355 }
duke@435 356
duke@435 357
duke@435 358 bool methodOopDesc::is_vanilla_constructor() const {
duke@435 359 // Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
duke@435 360 // which only calls the superclass vanilla constructor and possibly does stores of
duke@435 361 // zero constants to local fields:
duke@435 362 //
duke@435 363 // aload_0
duke@435 364 // invokespecial
duke@435 365 // indexbyte1
duke@435 366 // indexbyte2
duke@435 367 //
duke@435 368 // followed by an (optional) sequence of:
duke@435 369 //
duke@435 370 // aload_0
duke@435 371 // aconst_null / iconst_0 / fconst_0 / dconst_0
duke@435 372 // putfield
duke@435 373 // indexbyte1
duke@435 374 // indexbyte2
duke@435 375 //
duke@435 376 // followed by:
duke@435 377 //
duke@435 378 // return
duke@435 379
duke@435 380 assert(name() == vmSymbols::object_initializer_name(), "Should only be called for default constructors");
duke@435 381 assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");
duke@435 382 int size = code_size();
duke@435 383 // Check if size match
duke@435 384 if (size == 0 || size % 5 != 0) return false;
duke@435 385 address cb = code_base();
duke@435 386 int last = size - 1;
duke@435 387 if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
duke@435 388 // Does not call superclass default constructor
duke@435 389 return false;
duke@435 390 }
duke@435 391 // Check optional sequence
duke@435 392 for (int i = 4; i < last; i += 5) {
duke@435 393 if (cb[i] != Bytecodes::_aload_0) return false;
duke@435 394 if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
duke@435 395 if (cb[i+2] != Bytecodes::_putfield) return false;
duke@435 396 }
duke@435 397 return true;
duke@435 398 }
duke@435 399
duke@435 400
duke@435 401 bool methodOopDesc::compute_has_loops_flag() {
duke@435 402 BytecodeStream bcs(methodOop(this));
duke@435 403 Bytecodes::Code bc;
duke@435 404
duke@435 405 while ((bc = bcs.next()) >= 0) {
duke@435 406 switch( bc ) {
duke@435 407 case Bytecodes::_ifeq:
duke@435 408 case Bytecodes::_ifnull:
duke@435 409 case Bytecodes::_iflt:
duke@435 410 case Bytecodes::_ifle:
duke@435 411 case Bytecodes::_ifne:
duke@435 412 case Bytecodes::_ifnonnull:
duke@435 413 case Bytecodes::_ifgt:
duke@435 414 case Bytecodes::_ifge:
duke@435 415 case Bytecodes::_if_icmpeq:
duke@435 416 case Bytecodes::_if_icmpne:
duke@435 417 case Bytecodes::_if_icmplt:
duke@435 418 case Bytecodes::_if_icmpgt:
duke@435 419 case Bytecodes::_if_icmple:
duke@435 420 case Bytecodes::_if_icmpge:
duke@435 421 case Bytecodes::_if_acmpeq:
duke@435 422 case Bytecodes::_if_acmpne:
duke@435 423 case Bytecodes::_goto:
duke@435 424 case Bytecodes::_jsr:
duke@435 425 if( bcs.dest() < bcs.next_bci() ) _access_flags.set_has_loops();
duke@435 426 break;
duke@435 427
duke@435 428 case Bytecodes::_goto_w:
duke@435 429 case Bytecodes::_jsr_w:
duke@435 430 if( bcs.dest_w() < bcs.next_bci() ) _access_flags.set_has_loops();
duke@435 431 break;
duke@435 432 }
duke@435 433 }
duke@435 434 _access_flags.set_loops_flag_init();
duke@435 435 return _access_flags.has_loops();
duke@435 436 }
duke@435 437
duke@435 438
duke@435 439 bool methodOopDesc::is_final_method() const {
duke@435 440 // %%% Should return true for private methods also,
duke@435 441 // since there is no way to override them.
duke@435 442 return is_final() || Klass::cast(method_holder())->is_final();
duke@435 443 }
duke@435 444
duke@435 445
duke@435 446 bool methodOopDesc::is_strict_method() const {
duke@435 447 return is_strict();
duke@435 448 }
duke@435 449
duke@435 450
duke@435 451 bool methodOopDesc::can_be_statically_bound() const {
duke@435 452 if (is_final_method()) return true;
duke@435 453 return vtable_index() == nonvirtual_vtable_index;
duke@435 454 }
duke@435 455
duke@435 456
duke@435 457 bool methodOopDesc::is_accessor() const {
duke@435 458 if (code_size() != 5) return false;
duke@435 459 if (size_of_parameters() != 1) return false;
never@2462 460 if (java_code_at(0) != Bytecodes::_aload_0 ) return false;
never@2462 461 if (java_code_at(1) != Bytecodes::_getfield) return false;
never@2462 462 if (java_code_at(4) != Bytecodes::_areturn &&
never@2462 463 java_code_at(4) != Bytecodes::_ireturn ) return false;
duke@435 464 return true;
duke@435 465 }
duke@435 466
duke@435 467
duke@435 468 bool methodOopDesc::is_initializer() const {
kamg@2616 469 return name() == vmSymbols::object_initializer_name() || is_static_initializer();
kamg@2616 470 }
kamg@2616 471
kamg@2616 472 bool methodOopDesc::has_valid_initializer_flags() const {
kamg@2616 473 return (is_static() ||
kamg@2616 474 instanceKlass::cast(method_holder())->major_version() < 51);
kamg@2616 475 }
kamg@2616 476
kamg@2616 477 bool methodOopDesc::is_static_initializer() const {
kamg@2616 478 // For classfiles version 51 or greater, ensure that the clinit method is
kamg@2616 479 // static. Non-static methods with the name "<clinit>" are not static
kamg@2616 480 // initializers. (older classfiles exempted for backward compatibility)
kamg@2616 481 return name() == vmSymbols::class_initializer_name() &&
kamg@2616 482 has_valid_initializer_flags();
duke@435 483 }
duke@435 484
duke@435 485
duke@435 486 objArrayHandle methodOopDesc::resolved_checked_exceptions_impl(methodOop this_oop, TRAPS) {
duke@435 487 int length = this_oop->checked_exceptions_length();
duke@435 488 if (length == 0) { // common case
duke@435 489 return objArrayHandle(THREAD, Universe::the_empty_class_klass_array());
duke@435 490 } else {
duke@435 491 methodHandle h_this(THREAD, this_oop);
never@1577 492 objArrayOop m_oop = oopFactory::new_objArray(SystemDictionary::Class_klass(), length, CHECK_(objArrayHandle()));
duke@435 493 objArrayHandle mirrors (THREAD, m_oop);
duke@435 494 for (int i = 0; i < length; i++) {
duke@435 495 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
duke@435 496 klassOop k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
never@1577 497 assert(Klass::cast(k)->is_subclass_of(SystemDictionary::Throwable_klass()), "invalid exception class");
duke@435 498 mirrors->obj_at_put(i, Klass::cast(k)->java_mirror());
duke@435 499 }
duke@435 500 return mirrors;
duke@435 501 }
duke@435 502 };
duke@435 503
duke@435 504
duke@435 505 int methodOopDesc::line_number_from_bci(int bci) const {
duke@435 506 if (bci == SynchronizationEntryBCI) bci = 0;
duke@435 507 assert(bci == 0 || 0 <= bci && bci < code_size(), "illegal bci");
duke@435 508 int best_bci = 0;
duke@435 509 int best_line = -1;
duke@435 510
duke@435 511 if (has_linenumber_table()) {
duke@435 512 // The line numbers are a short array of 2-tuples [start_pc, line_number].
duke@435 513 // Not necessarily sorted and not necessarily one-to-one.
duke@435 514 CompressedLineNumberReadStream stream(compressed_linenumber_table());
duke@435 515 while (stream.read_pair()) {
duke@435 516 if (stream.bci() == bci) {
duke@435 517 // perfect match
duke@435 518 return stream.line();
duke@435 519 } else {
duke@435 520 // update best_bci/line
duke@435 521 if (stream.bci() < bci && stream.bci() >= best_bci) {
duke@435 522 best_bci = stream.bci();
duke@435 523 best_line = stream.line();
duke@435 524 }
duke@435 525 }
duke@435 526 }
duke@435 527 }
duke@435 528 return best_line;
duke@435 529 }
duke@435 530
duke@435 531
duke@435 532 bool methodOopDesc::is_klass_loaded_by_klass_index(int klass_index) const {
duke@435 533 if( _constants->tag_at(klass_index).is_unresolved_klass() ) {
duke@435 534 Thread *thread = Thread::current();
coleenp@2497 535 Symbol* klass_name = _constants->klass_name_at(klass_index);
duke@435 536 Handle loader(thread, instanceKlass::cast(method_holder())->class_loader());
duke@435 537 Handle prot (thread, Klass::cast(method_holder())->protection_domain());
duke@435 538 return SystemDictionary::find(klass_name, loader, prot, thread) != NULL;
duke@435 539 } else {
duke@435 540 return true;
duke@435 541 }
duke@435 542 }
duke@435 543
duke@435 544
duke@435 545 bool methodOopDesc::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {
duke@435 546 int klass_index = _constants->klass_ref_index_at(refinfo_index);
duke@435 547 if (must_be_resolved) {
duke@435 548 // Make sure klass is resolved in constantpool.
duke@435 549 if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
duke@435 550 }
duke@435 551 return is_klass_loaded_by_klass_index(klass_index);
duke@435 552 }
duke@435 553
duke@435 554
duke@435 555 void methodOopDesc::set_native_function(address function, bool post_event_flag) {
duke@435 556 assert(function != NULL, "use clear_native_function to unregister natives");
duke@435 557 address* native_function = native_function_addr();
duke@435 558
duke@435 559 // We can see racers trying to place the same native function into place. Once
duke@435 560 // is plenty.
duke@435 561 address current = *native_function;
duke@435 562 if (current == function) return;
duke@435 563 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
duke@435 564 function != NULL) {
duke@435 565 // native_method_throw_unsatisfied_link_error_entry() should only
duke@435 566 // be passed when post_event_flag is false.
duke@435 567 assert(function !=
duke@435 568 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
duke@435 569 "post_event_flag mis-match");
duke@435 570
duke@435 571 // post the bind event, and possible change the bind function
duke@435 572 JvmtiExport::post_native_method_bind(this, &function);
duke@435 573 }
duke@435 574 *native_function = function;
duke@435 575 // This function can be called more than once. We must make sure that we always
duke@435 576 // use the latest registered method -> check if a stub already has been generated.
duke@435 577 // If so, we have to make it not_entrant.
duke@435 578 nmethod* nm = code(); // Put it into local variable to guard against concurrent updates
duke@435 579 if (nm != NULL) {
duke@435 580 nm->make_not_entrant();
duke@435 581 }
duke@435 582 }
duke@435 583
duke@435 584
duke@435 585 bool methodOopDesc::has_native_function() const {
duke@435 586 address func = native_function();
duke@435 587 return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
duke@435 588 }
duke@435 589
duke@435 590
duke@435 591 void methodOopDesc::clear_native_function() {
duke@435 592 set_native_function(
duke@435 593 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
duke@435 594 !native_bind_event_is_interesting);
duke@435 595 clear_code();
duke@435 596 }
duke@435 597
duke@435 598
duke@435 599 void methodOopDesc::set_signature_handler(address handler) {
duke@435 600 address* signature_handler = signature_handler_addr();
duke@435 601 *signature_handler = handler;
duke@435 602 }
duke@435 603
duke@435 604
duke@435 605 bool methodOopDesc::is_not_compilable(int comp_level) const {
jrose@1145 606 if (is_method_handle_invoke()) {
jrose@1145 607 // compilers must recognize this method specially, or not at all
jrose@1145 608 return true;
jrose@1145 609 }
iveresov@2138 610 if (number_of_breakpoints() > 0) {
iveresov@2138 611 return true;
duke@435 612 }
iveresov@2138 613 if (comp_level == CompLevel_any) {
iveresov@2138 614 return is_not_c1_compilable() || is_not_c2_compilable();
iveresov@2138 615 }
iveresov@2138 616 if (is_c1_compile(comp_level)) {
iveresov@2138 617 return is_not_c1_compilable();
iveresov@2138 618 }
iveresov@2138 619 if (is_c2_compile(comp_level)) {
iveresov@2138 620 return is_not_c2_compilable();
iveresov@2138 621 }
iveresov@2138 622 return false;
duke@435 623 }
duke@435 624
duke@435 625 // call this when compiler finds that this method is not compilable
kvn@1643 626 void methodOopDesc::set_not_compilable(int comp_level, bool report) {
kvn@1643 627 if (PrintCompilation && report) {
kvn@1641 628 ttyLocker ttyl;
kvn@1641 629 tty->print("made not compilable ");
kvn@1641 630 this->print_short_name(tty);
kvn@1641 631 int size = this->code_size();
kvn@1641 632 if (size > 0)
kvn@1641 633 tty->print(" (%d bytes)", size);
kvn@1641 634 tty->cr();
kvn@1641 635 }
duke@435 636 if ((TraceDeoptimization || LogCompilation) && (xtty != NULL)) {
duke@435 637 ttyLocker ttyl;
duke@435 638 xtty->begin_elem("make_not_compilable thread='%d'", (int) os::current_thread_id());
duke@435 639 xtty->method(methodOop(this));
duke@435 640 xtty->stamp();
duke@435 641 xtty->end_elem();
duke@435 642 }
iveresov@2138 643 if (comp_level == CompLevel_all) {
iveresov@2138 644 set_not_c1_compilable();
iveresov@2138 645 set_not_c2_compilable();
iveresov@2138 646 } else {
iveresov@2138 647 if (is_c1_compile(comp_level)) {
iveresov@2138 648 set_not_c1_compilable();
iveresov@2138 649 } else
iveresov@2138 650 if (is_c2_compile(comp_level)) {
iveresov@2138 651 set_not_c2_compilable();
iveresov@2138 652 }
duke@435 653 }
iveresov@2138 654 CompilationPolicy::policy()->disable_compilation(this);
duke@435 655 }
duke@435 656
duke@435 657 // Revert to using the interpreter and clear out the nmethod
duke@435 658 void methodOopDesc::clear_code() {
duke@435 659
duke@435 660 // this may be NULL if c2i adapters have not been made yet
duke@435 661 // Only should happen at allocate time.
duke@435 662 if (_adapter == NULL) {
duke@435 663 _from_compiled_entry = NULL;
duke@435 664 } else {
duke@435 665 _from_compiled_entry = _adapter->get_c2i_entry();
duke@435 666 }
duke@435 667 OrderAccess::storestore();
duke@435 668 _from_interpreted_entry = _i2i_entry;
duke@435 669 OrderAccess::storestore();
duke@435 670 _code = NULL;
duke@435 671 }
duke@435 672
duke@435 673 // Called by class data sharing to remove any entry points (which are not shared)
duke@435 674 void methodOopDesc::unlink_method() {
duke@435 675 _code = NULL;
duke@435 676 _i2i_entry = NULL;
duke@435 677 _from_interpreted_entry = NULL;
duke@435 678 if (is_native()) {
duke@435 679 *native_function_addr() = NULL;
duke@435 680 set_signature_handler(NULL);
duke@435 681 }
duke@435 682 NOT_PRODUCT(set_compiled_invocation_count(0);)
duke@435 683 invocation_counter()->reset();
duke@435 684 backedge_counter()->reset();
duke@435 685 _adapter = NULL;
duke@435 686 _from_compiled_entry = NULL;
duke@435 687 assert(_method_data == NULL, "unexpected method data?");
duke@435 688 set_method_data(NULL);
duke@435 689 set_interpreter_throwout_count(0);
duke@435 690 set_interpreter_invocation_count(0);
duke@435 691 }
duke@435 692
duke@435 693 // Called when the method_holder is getting linked. Setup entrypoints so the method
duke@435 694 // is ready to be called from interpreter, compiler, and vtables.
duke@435 695 void methodOopDesc::link_method(methodHandle h_method, TRAPS) {
duke@435 696 assert(_i2i_entry == NULL, "should only be called once");
duke@435 697 assert(_adapter == NULL, "init'd to NULL" );
duke@435 698 assert( _code == NULL, "nothing compiled yet" );
duke@435 699
duke@435 700 // Setup interpreter entrypoint
duke@435 701 assert(this == h_method(), "wrong h_method()" );
duke@435 702 address entry = Interpreter::entry_for_method(h_method);
duke@435 703 assert(entry != NULL, "interpreter entry must be non-null");
duke@435 704 // Sets both _i2i_entry and _from_interpreted_entry
duke@435 705 set_interpreter_entry(entry);
jrose@1145 706 if (is_native() && !is_method_handle_invoke()) {
duke@435 707 set_native_function(
duke@435 708 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
duke@435 709 !native_bind_event_is_interesting);
duke@435 710 }
duke@435 711
duke@435 712 // Setup compiler entrypoint. This is made eagerly, so we do not need
duke@435 713 // special handling of vtables. An alternative is to make adapters more
duke@435 714 // lazily by calling make_adapter() from from_compiled_entry() for the
duke@435 715 // normal calls. For vtable calls life gets more complicated. When a
duke@435 716 // call-site goes mega-morphic we need adapters in all methods which can be
duke@435 717 // called from the vtable. We need adapters on such methods that get loaded
duke@435 718 // later. Ditto for mega-morphic itable calls. If this proves to be a
duke@435 719 // problem we'll make these lazily later.
duke@435 720 (void) make_adapters(h_method, CHECK);
duke@435 721
duke@435 722 // ONLY USE the h_method now as make_adapter may have blocked
duke@435 723
duke@435 724 }
duke@435 725
duke@435 726 address methodOopDesc::make_adapters(methodHandle mh, TRAPS) {
duke@435 727 // Adapters for compiled code are made eagerly here. They are fairly
duke@435 728 // small (generally < 100 bytes) and quick to make (and cached and shared)
duke@435 729 // so making them eagerly shouldn't be too expensive.
duke@435 730 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
duke@435 731 if (adapter == NULL ) {
never@1622 732 THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "out of space in CodeCache for adapters");
duke@435 733 }
duke@435 734
duke@435 735 mh->set_adapter_entry(adapter);
duke@435 736 mh->_from_compiled_entry = adapter->get_c2i_entry();
duke@435 737 return adapter->get_c2i_entry();
duke@435 738 }
duke@435 739
duke@435 740 // The verified_code_entry() must be called when a invoke is resolved
duke@435 741 // on this method.
duke@435 742
duke@435 743 // It returns the compiled code entry point, after asserting not null.
duke@435 744 // This function is called after potential safepoints so that nmethod
duke@435 745 // or adapter that it points to is still live and valid.
duke@435 746 // This function must not hit a safepoint!
duke@435 747 address methodOopDesc::verified_code_entry() {
duke@435 748 debug_only(No_Safepoint_Verifier nsv;)
kvn@1637 749 nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code);
kvn@1637 750 if (code == NULL && UseCodeCacheFlushing) {
kvn@1637 751 nmethod *saved_code = CodeCache::find_and_remove_saved_code(this);
kvn@1637 752 if (saved_code != NULL) {
kvn@1637 753 methodHandle method(this);
kvn@1637 754 assert( ! saved_code->is_osr_method(), "should not get here for osr" );
kvn@1637 755 set_code( method, saved_code );
kvn@1637 756 }
kvn@1637 757 }
kvn@1637 758
duke@435 759 assert(_from_compiled_entry != NULL, "must be set");
duke@435 760 return _from_compiled_entry;
duke@435 761 }
duke@435 762
duke@435 763 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
duke@435 764 // (could be racing a deopt).
duke@435 765 // Not inline to avoid circular ref.
duke@435 766 bool methodOopDesc::check_code() const {
duke@435 767 // cached in a register or local. There's a race on the value of the field.
duke@435 768 nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code);
duke@435 769 return code == NULL || (code->method() == NULL) || (code->method() == (methodOop)this && !code->is_osr_method());
duke@435 770 }
duke@435 771
duke@435 772 // Install compiled code. Instantly it can execute.
duke@435 773 void methodOopDesc::set_code(methodHandle mh, nmethod *code) {
duke@435 774 assert( code, "use clear_code to remove code" );
duke@435 775 assert( mh->check_code(), "" );
duke@435 776
duke@435 777 guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
duke@435 778
duke@435 779 // These writes must happen in this order, because the interpreter will
duke@435 780 // directly jump to from_interpreted_entry which jumps to an i2c adapter
duke@435 781 // which jumps to _from_compiled_entry.
duke@435 782 mh->_code = code; // Assign before allowing compiled code to exec
duke@435 783
duke@435 784 int comp_level = code->comp_level();
duke@435 785 // In theory there could be a race here. In practice it is unlikely
duke@435 786 // and not worth worrying about.
iveresov@2138 787 if (comp_level > mh->highest_comp_level()) {
iveresov@2138 788 mh->set_highest_comp_level(comp_level);
duke@435 789 }
duke@435 790
duke@435 791 OrderAccess::storestore();
twisti@2047 792 #ifdef SHARK
twisti@2200 793 mh->_from_interpreted_entry = code->insts_begin();
twisti@2047 794 #else
duke@435 795 mh->_from_compiled_entry = code->verified_entry_point();
duke@435 796 OrderAccess::storestore();
duke@435 797 // Instantly compiled code can execute.
duke@435 798 mh->_from_interpreted_entry = mh->get_i2c_entry();
twisti@2047 799 #endif // SHARK
duke@435 800
duke@435 801 }
duke@435 802
duke@435 803
duke@435 804 bool methodOopDesc::is_overridden_in(klassOop k) const {
duke@435 805 instanceKlass* ik = instanceKlass::cast(k);
duke@435 806
duke@435 807 if (ik->is_interface()) return false;
duke@435 808
duke@435 809 // If method is an interface, we skip it - except if it
duke@435 810 // is a miranda method
duke@435 811 if (instanceKlass::cast(method_holder())->is_interface()) {
duke@435 812 // Check that method is not a miranda method
duke@435 813 if (ik->lookup_method(name(), signature()) == NULL) {
duke@435 814 // No implementation exist - so miranda method
duke@435 815 return false;
duke@435 816 }
duke@435 817 return true;
duke@435 818 }
duke@435 819
duke@435 820 assert(ik->is_subclass_of(method_holder()), "should be subklass");
duke@435 821 assert(ik->vtable() != NULL, "vtable should exist");
duke@435 822 if (vtable_index() == nonvirtual_vtable_index) {
duke@435 823 return false;
duke@435 824 } else {
duke@435 825 methodOop vt_m = ik->method_at_vtable(vtable_index());
duke@435 826 return vt_m != methodOop(this);
duke@435 827 }
duke@435 828 }
duke@435 829
duke@435 830
dcubed@483 831 // give advice about whether this methodOop should be cached or not
dcubed@483 832 bool methodOopDesc::should_not_be_cached() const {
dcubed@483 833 if (is_old()) {
dcubed@483 834 // This method has been redefined. It is either EMCP or obsolete
dcubed@483 835 // and we don't want to cache it because that would pin the method
dcubed@483 836 // down and prevent it from being collectible if and when it
dcubed@483 837 // finishes executing.
dcubed@483 838 return true;
dcubed@483 839 }
dcubed@483 840
dcubed@483 841 if (mark()->should_not_be_cached()) {
dcubed@483 842 // It is either not safe or not a good idea to cache this
dcubed@483 843 // method at this time because of the state of the embedded
dcubed@483 844 // markOop. See markOop.cpp for the gory details.
dcubed@483 845 return true;
dcubed@483 846 }
dcubed@483 847
dcubed@483 848 // caching this method should be just fine
dcubed@483 849 return false;
dcubed@483 850 }
dcubed@483 851
jrose@1862 852 bool methodOopDesc::is_method_handle_invoke_name(vmSymbols::SID name_sid) {
jrose@1862 853 switch (name_sid) {
jrose@1862 854 case vmSymbols::VM_SYMBOL_ENUM_NAME(invokeExact_name):
jrose@1862 855 case vmSymbols::VM_SYMBOL_ENUM_NAME(invokeGeneric_name):
jrose@1862 856 return true;
jrose@1862 857 }
jrose@2638 858 if ((AllowTransitionalJSR292 || AllowInvokeForInvokeGeneric)
jrose@2148 859 && name_sid == vmSymbols::VM_SYMBOL_ENUM_NAME(invoke_name))
jrose@2148 860 return true;
jrose@1862 861 return false;
jrose@1862 862 }
jrose@1862 863
jrose@1145 864 // Constant pool structure for invoke methods:
jrose@1145 865 enum {
jrose@1862 866 _imcp_invoke_name = 1, // utf8: 'invokeExact' or 'invokeGeneric'
coleenp@2497 867 _imcp_invoke_signature, // utf8: (variable Symbol*)
jrose@2639 868 _imcp_method_type_value, // string: (variable java/lang/invoke/MethodType, sic)
jrose@1145 869 _imcp_limit
jrose@1145 870 };
jrose@1145 871
jrose@1145 872 oop methodOopDesc::method_handle_type() const {
jrose@1145 873 if (!is_method_handle_invoke()) { assert(false, "caller resp."); return NULL; }
jrose@1145 874 oop mt = constants()->resolved_string_at(_imcp_method_type_value);
jrose@1145 875 assert(mt->klass() == SystemDictionary::MethodType_klass(), "");
jrose@1145 876 return mt;
jrose@1145 877 }
jrose@1145 878
jrose@1145 879 jint* methodOopDesc::method_type_offsets_chain() {
jrose@1145 880 static jint pchase[] = { -1, -1, -1 };
jrose@1145 881 if (pchase[0] == -1) {
jrose@1145 882 jint step0 = in_bytes(constants_offset());
jrose@1145 883 jint step1 = (constantPoolOopDesc::header_size() + _imcp_method_type_value) * HeapWordSize;
jrose@1145 884 // do this in reverse to avoid races:
jrose@1145 885 OrderAccess::release_store(&pchase[1], step1);
jrose@1145 886 OrderAccess::release_store(&pchase[0], step0);
jrose@1145 887 }
jrose@1145 888 return pchase;
jrose@1145 889 }
jrose@1145 890
twisti@1587 891 //------------------------------------------------------------------------------
twisti@1587 892 // methodOopDesc::is_method_handle_adapter
twisti@1587 893 //
twisti@1587 894 // Tests if this method is an internal adapter frame from the
twisti@1587 895 // MethodHandleCompiler.
jrose@1862 896 // Must be consistent with MethodHandleCompiler::get_method_oop().
twisti@1587 897 bool methodOopDesc::is_method_handle_adapter() const {
jrose@2017 898 if (is_synthetic() &&
jrose@2017 899 !is_native() && // has code from MethodHandleCompiler
jrose@2017 900 is_method_handle_invoke_name(name()) &&
jrose@2017 901 MethodHandleCompiler::klass_is_method_handle_adapter_holder(method_holder())) {
jrose@2017 902 assert(!is_method_handle_invoke(), "disjoint");
jrose@2017 903 return true;
jrose@2017 904 } else {
jrose@2017 905 return false;
jrose@2017 906 }
twisti@1587 907 }
twisti@1587 908
jrose@1145 909 methodHandle methodOopDesc::make_invoke_method(KlassHandle holder,
coleenp@2497 910 Symbol* name,
coleenp@2497 911 Symbol* signature,
jrose@1145 912 Handle method_type, TRAPS) {
jrose@1145 913 methodHandle empty;
jrose@1145 914
jrose@1145 915 assert(holder() == SystemDictionary::MethodHandle_klass(),
jrose@1145 916 "must be a JSR 292 magic type");
jrose@1145 917
jrose@1145 918 if (TraceMethodHandles) {
jrose@1145 919 tty->print("Creating invoke method for ");
jrose@1145 920 signature->print_value();
jrose@1145 921 tty->cr();
jrose@1145 922 }
jrose@1145 923
jrose@1145 924 constantPoolHandle cp;
jrose@1145 925 {
jrose@1145 926 constantPoolOop cp_oop = oopFactory::new_constantPool(_imcp_limit, IsSafeConc, CHECK_(empty));
jrose@1145 927 cp = constantPoolHandle(THREAD, cp_oop);
jrose@1145 928 }
coleenp@2497 929 cp->symbol_at_put(_imcp_invoke_name, name);
coleenp@2497 930 cp->symbol_at_put(_imcp_invoke_signature, signature);
coleenp@2497 931 cp->string_at_put(_imcp_method_type_value, Universe::the_null_string());
jrose@1145 932 cp->set_pool_holder(holder());
jrose@1145 933
jrose@1145 934 // set up the fancy stuff:
jrose@1145 935 cp->pseudo_string_at_put(_imcp_method_type_value, method_type());
jrose@1145 936 methodHandle m;
jrose@1145 937 {
jrose@1145 938 int flags_bits = (JVM_MH_INVOKE_BITS | JVM_ACC_PUBLIC | JVM_ACC_FINAL);
jrose@1145 939 methodOop m_oop = oopFactory::new_method(0, accessFlags_from(flags_bits),
jrose@1145 940 0, 0, 0, IsSafeConc, CHECK_(empty));
jrose@1145 941 m = methodHandle(THREAD, m_oop);
jrose@1145 942 }
jrose@1145 943 m->set_constants(cp());
jrose@1145 944 m->set_name_index(_imcp_invoke_name);
jrose@1145 945 m->set_signature_index(_imcp_invoke_signature);
jrose@1862 946 assert(is_method_handle_invoke_name(m->name()), "");
coleenp@2497 947 assert(m->signature() == signature, "");
jrose@2148 948 assert(m->is_method_handle_invoke(), "");
jrose@1145 949 #ifdef CC_INTERP
twisti@2563 950 ResultTypeFinder rtf(signature);
jrose@1145 951 m->set_result_index(rtf.type());
jrose@1145 952 #endif
jrose@1145 953 m->compute_size_of_parameters(THREAD);
jrose@1145 954 m->set_exception_table(Universe::the_empty_int_array());
jrose@2148 955 m->init_intrinsic_id();
jrose@2148 956 assert(m->intrinsic_id() == vmIntrinsics::_invokeExact ||
jrose@2148 957 m->intrinsic_id() == vmIntrinsics::_invokeGeneric, "must be an invoker");
jrose@1145 958
jrose@1145 959 // Finally, set up its entry points.
jrose@1145 960 assert(m->method_handle_type() == method_type(), "");
jrose@1145 961 assert(m->can_be_statically_bound(), "");
jrose@1145 962 m->set_vtable_index(methodOopDesc::nonvirtual_vtable_index);
jrose@1145 963 m->link_method(m, CHECK_(empty));
jrose@1145 964
jrose@1145 965 #ifdef ASSERT
jrose@1145 966 // Make sure the pointer chase works.
jrose@1145 967 address p = (address) m();
jrose@1145 968 for (jint* pchase = method_type_offsets_chain(); (*pchase) != -1; pchase++) {
jrose@1145 969 p = *(address*)(p + (*pchase));
jrose@1145 970 }
jrose@1145 971 assert((oop)p == method_type(), "pointer chase is correct");
jrose@1145 972 #endif
jrose@1145 973
jrose@1474 974 if (TraceMethodHandles && (Verbose || WizardMode))
jrose@1145 975 m->print_on(tty);
jrose@1145 976
jrose@1145 977 return m;
jrose@1145 978 }
jrose@1145 979
jrose@1145 980
dcubed@483 981
duke@435 982 methodHandle methodOopDesc:: clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,
duke@435 983 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
duke@435 984 // Code below does not work for native methods - they should never get rewritten anyway
duke@435 985 assert(!m->is_native(), "cannot rewrite native methods");
duke@435 986 // Allocate new methodOop
duke@435 987 AccessFlags flags = m->access_flags();
duke@435 988 int checked_exceptions_len = m->checked_exceptions_length();
duke@435 989 int localvariable_len = m->localvariable_table_length();
jmasa@953 990 // Allocate newm_oop with the is_conc_safe parameter set
jmasa@953 991 // to IsUnsafeConc to indicate that newm_oop is not yet
jmasa@953 992 // safe for concurrent processing by a GC.
jmasa@953 993 methodOop newm_oop = oopFactory::new_method(new_code_length,
jmasa@953 994 flags,
jmasa@953 995 new_compressed_linenumber_size,
jmasa@953 996 localvariable_len,
jmasa@953 997 checked_exceptions_len,
jmasa@953 998 IsUnsafeConc,
jmasa@953 999 CHECK_(methodHandle()));
duke@435 1000 methodHandle newm (THREAD, newm_oop);
ysr@2533 1001 NOT_PRODUCT(int nmsz = newm->is_parsable() ? newm->size() : -1;)
duke@435 1002 int new_method_size = newm->method_size();
duke@435 1003 // Create a shallow copy of methodOopDesc part, but be careful to preserve the new constMethodOop
duke@435 1004 constMethodOop newcm = newm->constMethod();
ysr@2533 1005 NOT_PRODUCT(int ncmsz = newcm->is_parsable() ? newcm->size() : -1;)
duke@435 1006 int new_const_method_size = newm->constMethod()->object_size();
jmasa@953 1007
duke@435 1008 memcpy(newm(), m(), sizeof(methodOopDesc));
duke@435 1009 // Create shallow copy of constMethodOopDesc, but be careful to preserve the methodOop
jmasa@953 1010 // is_conc_safe is set to false because that is the value of
jmasa@953 1011 // is_conc_safe initialzied into newcm and the copy should
jmasa@953 1012 // not overwrite that value. During the window during which it is
jmasa@953 1013 // tagged as unsafe, some extra work could be needed during precleaning
jmasa@953 1014 // or concurrent marking but those phases will be correct. Setting and
jmasa@953 1015 // resetting is done in preference to a careful copying into newcm to
jmasa@953 1016 // avoid having to know the precise layout of a constMethodOop.
ysr@2533 1017 m->constMethod()->set_is_conc_safe(oopDesc::IsUnsafeConc);
ysr@2533 1018 assert(m->constMethod()->is_parsable(), "Should remain parsable");
ysr@2533 1019
ysr@2533 1020 // NOTE: this is a reachable object that transiently signals "conc_unsafe"
ysr@2533 1021 // However, no allocations are done during this window
ysr@2533 1022 // during which it is tagged conc_unsafe, so we are assured that any concurrent
ysr@2533 1023 // thread will not wait forever for the object to revert to "conc_safe".
ysr@2533 1024 // Further, any such conc_unsafe object will indicate a stable size
ysr@2533 1025 // through the transition.
duke@435 1026 memcpy(newcm, m->constMethod(), sizeof(constMethodOopDesc));
ysr@2533 1027 m->constMethod()->set_is_conc_safe(oopDesc::IsSafeConc);
ysr@2533 1028 assert(m->constMethod()->is_parsable(), "Should remain parsable");
ysr@2533 1029
duke@435 1030 // Reset correct method/const method, method size, and parameter info
duke@435 1031 newcm->set_method(newm());
duke@435 1032 newm->set_constMethod(newcm);
duke@435 1033 assert(newcm->method() == newm(), "check");
duke@435 1034 newm->constMethod()->set_code_size(new_code_length);
duke@435 1035 newm->constMethod()->set_constMethod_size(new_const_method_size);
duke@435 1036 newm->set_method_size(new_method_size);
duke@435 1037 assert(newm->code_size() == new_code_length, "check");
duke@435 1038 assert(newm->checked_exceptions_length() == checked_exceptions_len, "check");
duke@435 1039 assert(newm->localvariable_table_length() == localvariable_len, "check");
duke@435 1040 // Copy new byte codes
duke@435 1041 memcpy(newm->code_base(), new_code, new_code_length);
duke@435 1042 // Copy line number table
duke@435 1043 if (new_compressed_linenumber_size > 0) {
duke@435 1044 memcpy(newm->compressed_linenumber_table(),
duke@435 1045 new_compressed_linenumber_table,
duke@435 1046 new_compressed_linenumber_size);
duke@435 1047 }
duke@435 1048 // Copy checked_exceptions
duke@435 1049 if (checked_exceptions_len > 0) {
duke@435 1050 memcpy(newm->checked_exceptions_start(),
duke@435 1051 m->checked_exceptions_start(),
duke@435 1052 checked_exceptions_len * sizeof(CheckedExceptionElement));
duke@435 1053 }
duke@435 1054 // Copy local variable number table
duke@435 1055 if (localvariable_len > 0) {
duke@435 1056 memcpy(newm->localvariable_table_start(),
duke@435 1057 m->localvariable_table_start(),
duke@435 1058 localvariable_len * sizeof(LocalVariableTableElement));
duke@435 1059 }
jmasa@953 1060
jmasa@953 1061 // Only set is_conc_safe to true when changes to newcm are
jmasa@953 1062 // complete.
ysr@2533 1063 assert(!newm->is_parsable() || nmsz < 0 || newm->size() == nmsz, "newm->size() inconsistency");
ysr@2533 1064 assert(!newcm->is_parsable() || ncmsz < 0 || newcm->size() == ncmsz, "newcm->size() inconsistency");
jmasa@953 1065 newcm->set_is_conc_safe(true);
duke@435 1066 return newm;
duke@435 1067 }
duke@435 1068
jrose@1291 1069 vmSymbols::SID methodOopDesc::klass_id_for_intrinsics(klassOop holder) {
duke@435 1070 // if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
duke@435 1071 // because we are not loading from core libraries
jrose@1291 1072 if (instanceKlass::cast(holder)->class_loader() != NULL)
jrose@1291 1073 return vmSymbols::NO_SID; // regardless of name, no intrinsics here
duke@435 1074
duke@435 1075 // see if the klass name is well-known:
coleenp@2497 1076 Symbol* klass_name = instanceKlass::cast(holder)->name();
jrose@1291 1077 return vmSymbols::find_sid(klass_name);
jrose@1291 1078 }
jrose@1291 1079
jrose@1291 1080 void methodOopDesc::init_intrinsic_id() {
jrose@1291 1081 assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");
jrose@1291 1082 const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte));
jrose@1291 1083 assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size");
jrose@2148 1084 assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), "");
jrose@1291 1085
jrose@1291 1086 // the klass name is well-known:
jrose@1291 1087 vmSymbols::SID klass_id = klass_id_for_intrinsics(method_holder());
jrose@1291 1088 assert(klass_id != vmSymbols::NO_SID, "caller responsibility");
duke@435 1089
duke@435 1090 // ditto for method and signature:
duke@435 1091 vmSymbols::SID name_id = vmSymbols::find_sid(name());
jrose@2148 1092 if (name_id == vmSymbols::NO_SID) return;
duke@435 1093 vmSymbols::SID sig_id = vmSymbols::find_sid(signature());
jrose@2639 1094 if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
jrose@2639 1095 && !(klass_id == vmSymbols::VM_SYMBOL_ENUM_NAME(java_dyn_MethodHandle) && AllowTransitionalJSR292)
jrose@2148 1096 && sig_id == vmSymbols::NO_SID) return;
duke@435 1097 jshort flags = access_flags().as_short();
duke@435 1098
jrose@1291 1099 vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
jrose@1291 1100 if (id != vmIntrinsics::_none) {
jrose@1291 1101 set_intrinsic_id(id);
jrose@1291 1102 return;
jrose@1291 1103 }
jrose@1291 1104
duke@435 1105 // A few slightly irregular cases:
duke@435 1106 switch (klass_id) {
duke@435 1107 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_StrictMath):
duke@435 1108 // Second chance: check in regular Math.
duke@435 1109 switch (name_id) {
duke@435 1110 case vmSymbols::VM_SYMBOL_ENUM_NAME(min_name):
duke@435 1111 case vmSymbols::VM_SYMBOL_ENUM_NAME(max_name):
duke@435 1112 case vmSymbols::VM_SYMBOL_ENUM_NAME(sqrt_name):
duke@435 1113 // pretend it is the corresponding method in the non-strict class:
duke@435 1114 klass_id = vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_Math);
jrose@1291 1115 id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
duke@435 1116 break;
duke@435 1117 }
jrose@1862 1118 break;
jrose@1862 1119
jrose@1862 1120 // Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*.
jrose@2639 1121 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_dyn_MethodHandle): // AllowTransitionalJSR292 ONLY
jrose@2639 1122 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle):
jrose@1862 1123 if (is_static() || !is_native()) break;
jrose@1862 1124 switch (name_id) {
jrose@1862 1125 case vmSymbols::VM_SYMBOL_ENUM_NAME(invokeGeneric_name):
jrose@2148 1126 id = vmIntrinsics::_invokeGeneric;
jrose@2148 1127 break;
jrose@2148 1128 case vmSymbols::VM_SYMBOL_ENUM_NAME(invokeExact_name):
jrose@2148 1129 id = vmIntrinsics::_invokeExact;
jrose@2148 1130 break;
jrose@2148 1131 case vmSymbols::VM_SYMBOL_ENUM_NAME(invoke_name):
jrose@2638 1132 if (AllowInvokeForInvokeGeneric) id = vmIntrinsics::_invokeGeneric;
jrose@2638 1133 else if (AllowTransitionalJSR292) id = vmIntrinsics::_invokeExact;
jrose@1862 1134 break;
jrose@1862 1135 }
jrose@1862 1136 break;
jrose@2639 1137 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_InvokeDynamic):
jrose@1862 1138 if (!is_static() || !is_native()) break;
jrose@1862 1139 id = vmIntrinsics::_invokeDynamic;
jrose@1862 1140 break;
duke@435 1141 }
duke@435 1142
jrose@1291 1143 if (id != vmIntrinsics::_none) {
jrose@1291 1144 // Set up its iid. It is an alias method.
jrose@1291 1145 set_intrinsic_id(id);
jrose@1291 1146 return;
jrose@1291 1147 }
duke@435 1148 }
duke@435 1149
duke@435 1150 // These two methods are static since a GC may move the methodOopDesc
duke@435 1151 bool methodOopDesc::load_signature_classes(methodHandle m, TRAPS) {
duke@435 1152 bool sig_is_loaded = true;
duke@435 1153 Handle class_loader(THREAD, instanceKlass::cast(m->method_holder())->class_loader());
duke@435 1154 Handle protection_domain(THREAD, Klass::cast(m->method_holder())->protection_domain());
coleenp@2497 1155 ResourceMark rm(THREAD);
coleenp@2497 1156 Symbol* signature = m->signature();
duke@435 1157 for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
duke@435 1158 if (ss.is_object()) {
coleenp@2497 1159 Symbol* sym = ss.as_symbol(CHECK_(false));
coleenp@2497 1160 Symbol* name = sym;
duke@435 1161 klassOop klass = SystemDictionary::resolve_or_null(name, class_loader,
duke@435 1162 protection_domain, THREAD);
rasbold@539 1163 // We are loading classes eagerly. If a ClassNotFoundException or
rasbold@539 1164 // a LinkageError was generated, be sure to ignore it.
duke@435 1165 if (HAS_PENDING_EXCEPTION) {
never@1577 1166 if (PENDING_EXCEPTION->is_a(SystemDictionary::ClassNotFoundException_klass()) ||
never@1577 1167 PENDING_EXCEPTION->is_a(SystemDictionary::LinkageError_klass())) {
duke@435 1168 CLEAR_PENDING_EXCEPTION;
duke@435 1169 } else {
duke@435 1170 return false;
duke@435 1171 }
duke@435 1172 }
duke@435 1173 if( klass == NULL) { sig_is_loaded = false; }
duke@435 1174 }
duke@435 1175 }
duke@435 1176 return sig_is_loaded;
duke@435 1177 }
duke@435 1178
duke@435 1179 bool methodOopDesc::has_unloaded_classes_in_signature(methodHandle m, TRAPS) {
duke@435 1180 Handle class_loader(THREAD, instanceKlass::cast(m->method_holder())->class_loader());
duke@435 1181 Handle protection_domain(THREAD, Klass::cast(m->method_holder())->protection_domain());
coleenp@2497 1182 ResourceMark rm(THREAD);
coleenp@2497 1183 Symbol* signature = m->signature();
duke@435 1184 for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
duke@435 1185 if (ss.type() == T_OBJECT) {
coleenp@2497 1186 Symbol* name = ss.as_symbol_or_null();
coleenp@2497 1187 if (name == NULL) return true;
duke@435 1188 klassOop klass = SystemDictionary::find(name, class_loader, protection_domain, THREAD);
duke@435 1189 if (klass == NULL) return true;
duke@435 1190 }
duke@435 1191 }
duke@435 1192 return false;
duke@435 1193 }
duke@435 1194
duke@435 1195 // Exposed so field engineers can debug VM
duke@435 1196 void methodOopDesc::print_short_name(outputStream* st) {
duke@435 1197 ResourceMark rm;
duke@435 1198 #ifdef PRODUCT
duke@435 1199 st->print(" %s::", method_holder()->klass_part()->external_name());
duke@435 1200 #else
duke@435 1201 st->print(" %s::", method_holder()->klass_part()->internal_name());
duke@435 1202 #endif
duke@435 1203 name()->print_symbol_on(st);
duke@435 1204 if (WizardMode) signature()->print_symbol_on(st);
duke@435 1205 }
duke@435 1206
duke@435 1207
duke@435 1208 extern "C" {
duke@435 1209 static int method_compare(methodOop* a, methodOop* b) {
duke@435 1210 return (*a)->name()->fast_compare((*b)->name());
duke@435 1211 }
duke@435 1212
duke@435 1213 // Prevent qsort from reordering a previous valid sort by
duke@435 1214 // considering the address of the methodOops if two methods
duke@435 1215 // would otherwise compare as equal. Required to preserve
duke@435 1216 // optimal access order in the shared archive. Slower than
duke@435 1217 // method_compare, only used for shared archive creation.
duke@435 1218 static int method_compare_idempotent(methodOop* a, methodOop* b) {
duke@435 1219 int i = method_compare(a, b);
duke@435 1220 if (i != 0) return i;
duke@435 1221 return ( a < b ? -1 : (a == b ? 0 : 1));
duke@435 1222 }
duke@435 1223
coleenp@1853 1224 // We implement special compare versions for narrow oops to avoid
coleenp@1853 1225 // testing for UseCompressedOops on every comparison.
coleenp@1853 1226 static int method_compare_narrow(narrowOop* a, narrowOop* b) {
coleenp@1853 1227 methodOop m = (methodOop)oopDesc::load_decode_heap_oop(a);
coleenp@1853 1228 methodOop n = (methodOop)oopDesc::load_decode_heap_oop(b);
coleenp@1853 1229 return m->name()->fast_compare(n->name());
coleenp@1853 1230 }
coleenp@1853 1231
coleenp@1853 1232 static int method_compare_narrow_idempotent(narrowOop* a, narrowOop* b) {
coleenp@1853 1233 int i = method_compare_narrow(a, b);
coleenp@1853 1234 if (i != 0) return i;
coleenp@1853 1235 return ( a < b ? -1 : (a == b ? 0 : 1));
coleenp@1853 1236 }
coleenp@1853 1237
duke@435 1238 typedef int (*compareFn)(const void*, const void*);
duke@435 1239 }
duke@435 1240
duke@435 1241
duke@435 1242 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
duke@435 1243 static void reorder_based_on_method_index(objArrayOop methods,
duke@435 1244 objArrayOop annotations,
coleenp@548 1245 GrowableArray<oop>* temp_array) {
duke@435 1246 if (annotations == NULL) {
duke@435 1247 return;
duke@435 1248 }
duke@435 1249
duke@435 1250 int length = methods->length();
duke@435 1251 int i;
duke@435 1252 // Copy to temp array
coleenp@548 1253 temp_array->clear();
coleenp@548 1254 for (i = 0; i < length; i++) {
coleenp@548 1255 temp_array->append(annotations->obj_at(i));
coleenp@548 1256 }
duke@435 1257
duke@435 1258 // Copy back using old method indices
duke@435 1259 for (i = 0; i < length; i++) {
duke@435 1260 methodOop m = (methodOop) methods->obj_at(i);
coleenp@548 1261 annotations->obj_at_put(i, temp_array->at(m->method_idnum()));
duke@435 1262 }
duke@435 1263 }
duke@435 1264
duke@435 1265
duke@435 1266 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
duke@435 1267 void methodOopDesc::sort_methods(objArrayOop methods,
duke@435 1268 objArrayOop methods_annotations,
duke@435 1269 objArrayOop methods_parameter_annotations,
duke@435 1270 objArrayOop methods_default_annotations,
duke@435 1271 bool idempotent) {
duke@435 1272 int length = methods->length();
duke@435 1273 if (length > 1) {
duke@435 1274 bool do_annotations = false;
duke@435 1275 if (methods_annotations != NULL ||
duke@435 1276 methods_parameter_annotations != NULL ||
duke@435 1277 methods_default_annotations != NULL) {
duke@435 1278 do_annotations = true;
duke@435 1279 }
duke@435 1280 if (do_annotations) {
duke@435 1281 // Remember current method ordering so we can reorder annotations
duke@435 1282 for (int i = 0; i < length; i++) {
duke@435 1283 methodOop m = (methodOop) methods->obj_at(i);
duke@435 1284 m->set_method_idnum(i);
duke@435 1285 }
duke@435 1286 }
duke@435 1287
duke@435 1288 // Use a simple bubble sort for small number of methods since
duke@435 1289 // qsort requires a functional pointer call for each comparison.
coleenp@1853 1290 if (length < 8) {
duke@435 1291 bool sorted = true;
duke@435 1292 for (int i=length-1; i>0; i--) {
duke@435 1293 for (int j=0; j<i; j++) {
duke@435 1294 methodOop m1 = (methodOop)methods->obj_at(j);
duke@435 1295 methodOop m2 = (methodOop)methods->obj_at(j+1);
duke@435 1296 if ((uintptr_t)m1->name() > (uintptr_t)m2->name()) {
duke@435 1297 methods->obj_at_put(j, m2);
duke@435 1298 methods->obj_at_put(j+1, m1);
duke@435 1299 sorted = false;
duke@435 1300 }
duke@435 1301 }
duke@435 1302 if (sorted) break;
coleenp@548 1303 sorted = true;
duke@435 1304 }
duke@435 1305 } else {
coleenp@1853 1306 compareFn compare =
coleenp@1853 1307 (UseCompressedOops ?
coleenp@1853 1308 (compareFn) (idempotent ? method_compare_narrow_idempotent : method_compare_narrow):
coleenp@1853 1309 (compareFn) (idempotent ? method_compare_idempotent : method_compare));
coleenp@548 1310 qsort(methods->base(), length, heapOopSize, compare);
duke@435 1311 }
duke@435 1312
duke@435 1313 // Sort annotations if necessary
duke@435 1314 assert(methods_annotations == NULL || methods_annotations->length() == methods->length(), "");
duke@435 1315 assert(methods_parameter_annotations == NULL || methods_parameter_annotations->length() == methods->length(), "");
duke@435 1316 assert(methods_default_annotations == NULL || methods_default_annotations->length() == methods->length(), "");
duke@435 1317 if (do_annotations) {
coleenp@548 1318 ResourceMark rm;
duke@435 1319 // Allocate temporary storage
coleenp@548 1320 GrowableArray<oop>* temp_array = new GrowableArray<oop>(length);
duke@435 1321 reorder_based_on_method_index(methods, methods_annotations, temp_array);
duke@435 1322 reorder_based_on_method_index(methods, methods_parameter_annotations, temp_array);
duke@435 1323 reorder_based_on_method_index(methods, methods_default_annotations, temp_array);
duke@435 1324 }
duke@435 1325
duke@435 1326 // Reset method ordering
duke@435 1327 for (int i = 0; i < length; i++) {
duke@435 1328 methodOop m = (methodOop) methods->obj_at(i);
duke@435 1329 m->set_method_idnum(i);
duke@435 1330 }
duke@435 1331 }
duke@435 1332 }
duke@435 1333
duke@435 1334
duke@435 1335 //-----------------------------------------------------------------------------------
duke@435 1336 // Non-product code
duke@435 1337
duke@435 1338 #ifndef PRODUCT
duke@435 1339 class SignatureTypePrinter : public SignatureTypeNames {
duke@435 1340 private:
duke@435 1341 outputStream* _st;
duke@435 1342 bool _use_separator;
duke@435 1343
duke@435 1344 void type_name(const char* name) {
duke@435 1345 if (_use_separator) _st->print(", ");
duke@435 1346 _st->print(name);
duke@435 1347 _use_separator = true;
duke@435 1348 }
duke@435 1349
duke@435 1350 public:
coleenp@2497 1351 SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
duke@435 1352 _st = st;
duke@435 1353 _use_separator = false;
duke@435 1354 }
duke@435 1355
duke@435 1356 void print_parameters() { _use_separator = false; iterate_parameters(); }
duke@435 1357 void print_returntype() { _use_separator = false; iterate_returntype(); }
duke@435 1358 };
duke@435 1359
duke@435 1360
duke@435 1361 void methodOopDesc::print_name(outputStream* st) {
duke@435 1362 Thread *thread = Thread::current();
duke@435 1363 ResourceMark rm(thread);
duke@435 1364 SignatureTypePrinter sig(signature(), st);
duke@435 1365 st->print("%s ", is_static() ? "static" : "virtual");
duke@435 1366 sig.print_returntype();
duke@435 1367 st->print(" %s.", method_holder()->klass_part()->internal_name());
duke@435 1368 name()->print_symbol_on(st);
duke@435 1369 st->print("(");
duke@435 1370 sig.print_parameters();
duke@435 1371 st->print(")");
duke@435 1372 }
duke@435 1373
duke@435 1374
duke@435 1375 void methodOopDesc::print_codes_on(outputStream* st) const {
duke@435 1376 print_codes_on(0, code_size(), st);
duke@435 1377 }
duke@435 1378
duke@435 1379 void methodOopDesc::print_codes_on(int from, int to, outputStream* st) const {
duke@435 1380 Thread *thread = Thread::current();
duke@435 1381 ResourceMark rm(thread);
duke@435 1382 methodHandle mh (thread, (methodOop)this);
duke@435 1383 BytecodeStream s(mh);
duke@435 1384 s.set_interval(from, to);
duke@435 1385 BytecodeTracer::set_closure(BytecodeTracer::std_closure());
duke@435 1386 while (s.next() >= 0) BytecodeTracer::trace(mh, s.bcp(), st);
duke@435 1387 }
duke@435 1388 #endif // not PRODUCT
duke@435 1389
duke@435 1390
duke@435 1391 // Simple compression of line number tables. We use a regular compressed stream, except that we compress deltas
duke@435 1392 // between (bci,line) pairs since they are smaller. If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)
duke@435 1393 // we save it as one byte, otherwise we write a 0xFF escape character and use regular compression. 0x0 is used
duke@435 1394 // as end-of-stream terminator.
duke@435 1395
duke@435 1396 void CompressedLineNumberWriteStream::write_pair_regular(int bci_delta, int line_delta) {
duke@435 1397 // bci and line number does not compress into single byte.
duke@435 1398 // Write out escape character and use regular compression for bci and line number.
duke@435 1399 write_byte((jubyte)0xFF);
duke@435 1400 write_signed_int(bci_delta);
duke@435 1401 write_signed_int(line_delta);
duke@435 1402 }
duke@435 1403
duke@435 1404 // See comment in methodOop.hpp which explains why this exists.
sla@2540 1405 #if defined(_M_AMD64) && _MSC_VER >= 1400
duke@435 1406 #pragma optimize("", off)
duke@435 1407 void CompressedLineNumberWriteStream::write_pair(int bci, int line) {
duke@435 1408 write_pair_inline(bci, line);
duke@435 1409 }
duke@435 1410 #pragma optimize("", on)
duke@435 1411 #endif
duke@435 1412
duke@435 1413 CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {
duke@435 1414 _bci = 0;
duke@435 1415 _line = 0;
duke@435 1416 };
duke@435 1417
duke@435 1418
duke@435 1419 bool CompressedLineNumberReadStream::read_pair() {
duke@435 1420 jubyte next = read_byte();
duke@435 1421 // Check for terminator
duke@435 1422 if (next == 0) return false;
duke@435 1423 if (next == 0xFF) {
duke@435 1424 // Escape character, regular compression used
duke@435 1425 _bci += read_signed_int();
duke@435 1426 _line += read_signed_int();
duke@435 1427 } else {
duke@435 1428 // Single byte compression used
duke@435 1429 _bci += next >> 3;
duke@435 1430 _line += next & 0x7;
duke@435 1431 }
duke@435 1432 return true;
duke@435 1433 }
duke@435 1434
duke@435 1435
never@2462 1436 Bytecodes::Code methodOopDesc::orig_bytecode_at(int bci) const {
duke@435 1437 BreakpointInfo* bp = instanceKlass::cast(method_holder())->breakpoints();
duke@435 1438 for (; bp != NULL; bp = bp->next()) {
duke@435 1439 if (bp->match(this, bci)) {
duke@435 1440 return bp->orig_bytecode();
duke@435 1441 }
duke@435 1442 }
duke@435 1443 ShouldNotReachHere();
duke@435 1444 return Bytecodes::_shouldnotreachhere;
duke@435 1445 }
duke@435 1446
duke@435 1447 void methodOopDesc::set_orig_bytecode_at(int bci, Bytecodes::Code code) {
duke@435 1448 assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");
duke@435 1449 BreakpointInfo* bp = instanceKlass::cast(method_holder())->breakpoints();
duke@435 1450 for (; bp != NULL; bp = bp->next()) {
duke@435 1451 if (bp->match(this, bci)) {
duke@435 1452 bp->set_orig_bytecode(code);
duke@435 1453 // and continue, in case there is more than one
duke@435 1454 }
duke@435 1455 }
duke@435 1456 }
duke@435 1457
duke@435 1458 void methodOopDesc::set_breakpoint(int bci) {
duke@435 1459 instanceKlass* ik = instanceKlass::cast(method_holder());
duke@435 1460 BreakpointInfo *bp = new BreakpointInfo(this, bci);
duke@435 1461 bp->set_next(ik->breakpoints());
duke@435 1462 ik->set_breakpoints(bp);
duke@435 1463 // do this last:
duke@435 1464 bp->set(this);
duke@435 1465 }
duke@435 1466
duke@435 1467 static void clear_matches(methodOop m, int bci) {
duke@435 1468 instanceKlass* ik = instanceKlass::cast(m->method_holder());
duke@435 1469 BreakpointInfo* prev_bp = NULL;
duke@435 1470 BreakpointInfo* next_bp;
duke@435 1471 for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = next_bp) {
duke@435 1472 next_bp = bp->next();
duke@435 1473 // bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).
duke@435 1474 if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) {
duke@435 1475 // do this first:
duke@435 1476 bp->clear(m);
duke@435 1477 // unhook it
duke@435 1478 if (prev_bp != NULL)
duke@435 1479 prev_bp->set_next(next_bp);
duke@435 1480 else
duke@435 1481 ik->set_breakpoints(next_bp);
duke@435 1482 delete bp;
duke@435 1483 // When class is redefined JVMTI sets breakpoint in all versions of EMCP methods
duke@435 1484 // at same location. So we have multiple matching (method_index and bci)
duke@435 1485 // BreakpointInfo nodes in BreakpointInfo list. We should just delete one
duke@435 1486 // breakpoint for clear_breakpoint request and keep all other method versions
duke@435 1487 // BreakpointInfo for future clear_breakpoint request.
duke@435 1488 // bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)
duke@435 1489 // which is being called when class is unloaded. We delete all the Breakpoint
duke@435 1490 // information for all versions of method. We may not correctly restore the original
duke@435 1491 // bytecode in all method versions, but that is ok. Because the class is being unloaded
duke@435 1492 // so these methods won't be used anymore.
duke@435 1493 if (bci >= 0) {
duke@435 1494 break;
duke@435 1495 }
duke@435 1496 } else {
duke@435 1497 // This one is a keeper.
duke@435 1498 prev_bp = bp;
duke@435 1499 }
duke@435 1500 }
duke@435 1501 }
duke@435 1502
duke@435 1503 void methodOopDesc::clear_breakpoint(int bci) {
duke@435 1504 assert(bci >= 0, "");
duke@435 1505 clear_matches(this, bci);
duke@435 1506 }
duke@435 1507
duke@435 1508 void methodOopDesc::clear_all_breakpoints() {
duke@435 1509 clear_matches(this, -1);
duke@435 1510 }
duke@435 1511
duke@435 1512
iveresov@2138 1513 int methodOopDesc::invocation_count() {
iveresov@2138 1514 if (TieredCompilation) {
iveresov@2138 1515 const methodDataOop mdo = method_data();
iveresov@2138 1516 if (invocation_counter()->carry() || ((mdo != NULL) ? mdo->invocation_counter()->carry() : false)) {
iveresov@2138 1517 return InvocationCounter::count_limit;
iveresov@2138 1518 } else {
iveresov@2138 1519 return invocation_counter()->count() + ((mdo != NULL) ? mdo->invocation_counter()->count() : 0);
iveresov@2138 1520 }
iveresov@2138 1521 } else {
iveresov@2138 1522 return invocation_counter()->count();
iveresov@2138 1523 }
iveresov@2138 1524 }
iveresov@2138 1525
iveresov@2138 1526 int methodOopDesc::backedge_count() {
iveresov@2138 1527 if (TieredCompilation) {
iveresov@2138 1528 const methodDataOop mdo = method_data();
iveresov@2138 1529 if (backedge_counter()->carry() || ((mdo != NULL) ? mdo->backedge_counter()->carry() : false)) {
iveresov@2138 1530 return InvocationCounter::count_limit;
iveresov@2138 1531 } else {
iveresov@2138 1532 return backedge_counter()->count() + ((mdo != NULL) ? mdo->backedge_counter()->count() : 0);
iveresov@2138 1533 }
iveresov@2138 1534 } else {
iveresov@2138 1535 return backedge_counter()->count();
iveresov@2138 1536 }
iveresov@2138 1537 }
iveresov@2138 1538
iveresov@2138 1539 int methodOopDesc::highest_comp_level() const {
iveresov@2138 1540 methodDataOop mdo = method_data();
iveresov@2138 1541 if (mdo != NULL) {
iveresov@2138 1542 return mdo->highest_comp_level();
iveresov@2138 1543 } else {
iveresov@2138 1544 return CompLevel_none;
iveresov@2138 1545 }
iveresov@2138 1546 }
iveresov@2138 1547
iveresov@2138 1548 int methodOopDesc::highest_osr_comp_level() const {
iveresov@2138 1549 methodDataOop mdo = method_data();
iveresov@2138 1550 if (mdo != NULL) {
iveresov@2138 1551 return mdo->highest_osr_comp_level();
iveresov@2138 1552 } else {
iveresov@2138 1553 return CompLevel_none;
iveresov@2138 1554 }
iveresov@2138 1555 }
iveresov@2138 1556
iveresov@2138 1557 void methodOopDesc::set_highest_comp_level(int level) {
iveresov@2138 1558 methodDataOop mdo = method_data();
iveresov@2138 1559 if (mdo != NULL) {
iveresov@2138 1560 mdo->set_highest_comp_level(level);
iveresov@2138 1561 }
iveresov@2138 1562 }
iveresov@2138 1563
iveresov@2138 1564 void methodOopDesc::set_highest_osr_comp_level(int level) {
iveresov@2138 1565 methodDataOop mdo = method_data();
iveresov@2138 1566 if (mdo != NULL) {
iveresov@2138 1567 mdo->set_highest_osr_comp_level(level);
iveresov@2138 1568 }
iveresov@2138 1569 }
iveresov@2138 1570
duke@435 1571 BreakpointInfo::BreakpointInfo(methodOop m, int bci) {
duke@435 1572 _bci = bci;
duke@435 1573 _name_index = m->name_index();
duke@435 1574 _signature_index = m->signature_index();
duke@435 1575 _orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci);
duke@435 1576 if (_orig_bytecode == Bytecodes::_breakpoint)
duke@435 1577 _orig_bytecode = m->orig_bytecode_at(_bci);
duke@435 1578 _next = NULL;
duke@435 1579 }
duke@435 1580
duke@435 1581 void BreakpointInfo::set(methodOop method) {
duke@435 1582 #ifdef ASSERT
duke@435 1583 {
duke@435 1584 Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci);
duke@435 1585 if (code == Bytecodes::_breakpoint)
duke@435 1586 code = method->orig_bytecode_at(_bci);
duke@435 1587 assert(orig_bytecode() == code, "original bytecode must be the same");
duke@435 1588 }
duke@435 1589 #endif
duke@435 1590 *method->bcp_from(_bci) = Bytecodes::_breakpoint;
duke@435 1591 method->incr_number_of_breakpoints();
duke@435 1592 SystemDictionary::notice_modification();
duke@435 1593 {
duke@435 1594 // Deoptimize all dependents on this method
duke@435 1595 Thread *thread = Thread::current();
duke@435 1596 HandleMark hm(thread);
duke@435 1597 methodHandle mh(thread, method);
duke@435 1598 Universe::flush_dependents_on_method(mh);
duke@435 1599 }
duke@435 1600 }
duke@435 1601
duke@435 1602 void BreakpointInfo::clear(methodOop method) {
duke@435 1603 *method->bcp_from(_bci) = orig_bytecode();
duke@435 1604 assert(method->number_of_breakpoints() > 0, "must not go negative");
duke@435 1605 method->decr_number_of_breakpoints();
duke@435 1606 }

mercurial