src/share/vm/oops/method.cpp

Mon, 12 Dec 2016 12:53:38 +0100

author
zmajo
date
Mon, 12 Dec 2016 12:53:38 +0100
changeset 8664
00cbb581da94
parent 8583
d18eb5b5a3d6
child 8717
77d9c9da7188
permissions
-rw-r--r--

8157181: Compilers accept modification of final fields outside initializer methods
Summary: Track initialized final field updates; disable constant folding if an update is detected.
Reviewed-by: vlivanov, dnsimon, forax, never, kvn, coleenp

duke@435 1 /*
kevinw@8368 2 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
coleenp@4490 26 #include "classfile/metadataOnStackMark.hpp"
stefank@2314 27 #include "classfile/systemDictionary.hpp"
stefank@2314 28 #include "code/debugInfoRec.hpp"
stefank@2314 29 #include "gc_interface/collectedHeap.inline.hpp"
stefank@2314 30 #include "interpreter/bytecodeStream.hpp"
stefank@2314 31 #include "interpreter/bytecodeTracer.hpp"
stefank@2314 32 #include "interpreter/bytecodes.hpp"
stefank@2314 33 #include "interpreter/interpreter.hpp"
stefank@2314 34 #include "interpreter/oopMapCache.hpp"
stefank@2314 35 #include "memory/gcLocker.hpp"
stefank@2314 36 #include "memory/generation.hpp"
acorn@4497 37 #include "memory/heapInspection.hpp"
coleenp@4037 38 #include "memory/metadataFactory.hpp"
jiangli@8509 39 #include "memory/metaspaceShared.hpp"
stefank@2314 40 #include "memory/oopFactory.hpp"
kamg@4245 41 #include "oops/constMethod.hpp"
coleenp@4037 42 #include "oops/methodData.hpp"
coleenp@4037 43 #include "oops/method.hpp"
stefank@2314 44 #include "oops/oop.inline.hpp"
coleenp@2497 45 #include "oops/symbol.hpp"
stefank@2314 46 #include "prims/jvmtiExport.hpp"
twisti@3969 47 #include "prims/methodHandles.hpp"
stefank@2314 48 #include "prims/nativeLookup.hpp"
stefank@2314 49 #include "runtime/arguments.hpp"
stefank@2314 50 #include "runtime/compilationPolicy.hpp"
stefank@2314 51 #include "runtime/frame.inline.hpp"
stefank@2314 52 #include "runtime/handles.inline.hpp"
goetz@6911 53 #include "runtime/orderAccess.inline.hpp"
stefank@2314 54 #include "runtime/relocator.hpp"
stefank@2314 55 #include "runtime/sharedRuntime.hpp"
stefank@2314 56 #include "runtime/signature.hpp"
brutisso@2976 57 #include "utilities/quickSort.hpp"
stefank@2314 58 #include "utilities/xmlstream.hpp"
duke@435 59
drchase@6680 60 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
duke@435 61
coleenp@4037 62 // Implementation of Method
duke@435 63
coleenp@4037 64 Method* Method::allocate(ClassLoaderData* loader_data,
kamg@4245 65 int byte_code_size,
kamg@4245 66 AccessFlags access_flags,
coleenp@4572 67 InlineTableSizes* sizes,
kamg@4245 68 ConstMethod::MethodType method_type,
kamg@4245 69 TRAPS) {
coleenp@4037 70 assert(!access_flags.is_native() || byte_code_size == 0,
coleenp@4037 71 "native methods should not contain byte codes");
coleenp@4037 72 ConstMethod* cm = ConstMethod::allocate(loader_data,
kamg@4245 73 byte_code_size,
coleenp@4572 74 sizes,
kamg@4245 75 method_type,
kamg@4245 76 CHECK_NULL);
coleenp@4037 77
coleenp@4037 78 int size = Method::size(access_flags.is_native());
coleenp@4037 79
iklam@5208 80 return new (loader_data, size, false, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, size);
coleenp@4037 81 }
coleenp@4037 82
coleenp@4712 83 Method::Method(ConstMethod* xconst, AccessFlags access_flags, int size) {
coleenp@4037 84 No_Safepoint_Verifier no_safepoint;
coleenp@4037 85 set_constMethod(xconst);
coleenp@4037 86 set_access_flags(access_flags);
coleenp@4037 87 set_method_size(size);
coleenp@4037 88 set_intrinsic_id(vmIntrinsics::_none);
coleenp@4037 89 set_jfr_towrite(false);
coleenp@4715 90 set_force_inline(false);
coleenp@4715 91 set_hidden(false);
coleenp@4715 92 set_dont_inline(false);
vlivanov@7890 93 set_has_injected_profile(false);
coleenp@4037 94 set_method_data(NULL);
iveresov@7203 95 clear_method_counters();
coleenp@4037 96 set_vtable_index(Method::garbage_vtable_index);
coleenp@4037 97
coleenp@4037 98 // Fix and bury in Method*
coleenp@4037 99 set_interpreter_entry(NULL); // sets i2i entry and from_int
coleenp@4037 100 set_adapter_entry(NULL);
coleenp@4037 101 clear_code(); // from_c/from_i get set to c2i/i2i
coleenp@4037 102
coleenp@4037 103 if (access_flags.is_native()) {
coleenp@4037 104 clear_native_function();
coleenp@4037 105 set_signature_handler(NULL);
coleenp@4037 106 }
coleenp@4037 107
coleenp@4037 108 NOT_PRODUCT(set_compiled_invocation_count(0);)
coleenp@4037 109 }
coleenp@4037 110
coleenp@4037 111 // Release Method*. The nmethod will be gone when we get here because
coleenp@4037 112 // we've walked the code cache.
coleenp@4037 113 void Method::deallocate_contents(ClassLoaderData* loader_data) {
shshahma@8583 114 clear_jmethod_id(loader_data);
coleenp@4037 115 MetadataFactory::free_metadata(loader_data, constMethod());
coleenp@4037 116 set_constMethod(NULL);
coleenp@4037 117 MetadataFactory::free_metadata(loader_data, method_data());
coleenp@4037 118 set_method_data(NULL);
jiangli@4936 119 MetadataFactory::free_metadata(loader_data, method_counters());
iveresov@7203 120 clear_method_counters();
coleenp@4037 121 // The nmethod will be gone when we get here.
coleenp@4037 122 if (code() != NULL) _code = NULL;
coleenp@4037 123 }
coleenp@4037 124
coleenp@4037 125 address Method::get_i2c_entry() {
duke@435 126 assert(_adapter != NULL, "must have");
duke@435 127 return _adapter->get_i2c_entry();
duke@435 128 }
duke@435 129
coleenp@4037 130 address Method::get_c2i_entry() {
duke@435 131 assert(_adapter != NULL, "must have");
duke@435 132 return _adapter->get_c2i_entry();
duke@435 133 }
duke@435 134
coleenp@4037 135 address Method::get_c2i_unverified_entry() {
duke@435 136 assert(_adapter != NULL, "must have");
duke@435 137 return _adapter->get_c2i_unverified_entry();
duke@435 138 }
duke@435 139
coleenp@4037 140 char* Method::name_and_sig_as_C_string() const {
hseigel@4278 141 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
duke@435 142 }
duke@435 143
coleenp@4037 144 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
hseigel@4278 145 return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
duke@435 146 }
duke@435 147
coleenp@4037 148 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
duke@435 149 const char* klass_name = klass->external_name();
duke@435 150 int klass_name_len = (int)strlen(klass_name);
duke@435 151 int method_name_len = method_name->utf8_length();
duke@435 152 int len = klass_name_len + 1 + method_name_len + signature->utf8_length();
duke@435 153 char* dest = NEW_RESOURCE_ARRAY(char, len + 1);
duke@435 154 strcpy(dest, klass_name);
duke@435 155 dest[klass_name_len] = '.';
duke@435 156 strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
duke@435 157 strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
duke@435 158 dest[len] = 0;
duke@435 159 return dest;
duke@435 160 }
duke@435 161
coleenp@4037 162 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
coleenp@2497 163 Symbol* klass_name = klass->name();
duke@435 164 klass_name->as_klass_external_name(buf, size);
duke@435 165 int len = (int)strlen(buf);
duke@435 166
duke@435 167 if (len < size - 1) {
duke@435 168 buf[len++] = '.';
duke@435 169
duke@435 170 method_name->as_C_string(&(buf[len]), size - len);
duke@435 171 len = (int)strlen(buf);
duke@435 172
duke@435 173 signature->as_C_string(&(buf[len]), size - len);
duke@435 174 }
duke@435 175
duke@435 176 return buf;
duke@435 177 }
duke@435 178
jiangli@4405 179 int Method::fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS) {
duke@435 180 // exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)
duke@435 181 // access exception table
jiangli@4405 182 ExceptionTable table(mh());
jiangli@3917 183 int length = table.length();
duke@435 184 // iterate through all entries sequentially
jiangli@4405 185 constantPoolHandle pool(THREAD, mh->constants());
jiangli@3917 186 for (int i = 0; i < length; i ++) {
jiangli@3917 187 //reacquire the table in case a GC happened
jiangli@4405 188 ExceptionTable table(mh());
jiangli@3917 189 int beg_bci = table.start_pc(i);
jiangli@3917 190 int end_bci = table.end_pc(i);
duke@435 191 assert(beg_bci <= end_bci, "inconsistent exception table");
duke@435 192 if (beg_bci <= throw_bci && throw_bci < end_bci) {
duke@435 193 // exception handler bci range covers throw_bci => investigate further
jiangli@3917 194 int handler_bci = table.handler_pc(i);
jiangli@3917 195 int klass_index = table.catch_type_index(i);
duke@435 196 if (klass_index == 0) {
duke@435 197 return handler_bci;
duke@435 198 } else if (ex_klass.is_null()) {
duke@435 199 return handler_bci;
duke@435 200 } else {
duke@435 201 // we know the exception class => get the constraint class
duke@435 202 // this may require loading of the constraint class; if verification
duke@435 203 // fails or some other exception occurs, return handler_bci
coleenp@4037 204 Klass* k = pool->klass_at(klass_index, CHECK_(handler_bci));
duke@435 205 KlassHandle klass = KlassHandle(THREAD, k);
duke@435 206 assert(klass.not_null(), "klass not loaded");
duke@435 207 if (ex_klass->is_subtype_of(klass())) {
duke@435 208 return handler_bci;
duke@435 209 }
duke@435 210 }
duke@435 211 }
duke@435 212 }
duke@435 213
duke@435 214 return -1;
duke@435 215 }
duke@435 216
coleenp@4037 217 void Method::mask_for(int bci, InterpreterOopMap* mask) {
duke@435 218
duke@435 219 Thread* myThread = Thread::current();
duke@435 220 methodHandle h_this(myThread, this);
duke@435 221 #ifdef ASSERT
duke@435 222 bool has_capability = myThread->is_VM_thread() ||
duke@435 223 myThread->is_ConcurrentGC_thread() ||
duke@435 224 myThread->is_GC_task_thread();
duke@435 225
duke@435 226 if (!has_capability) {
duke@435 227 if (!VerifyStack && !VerifyLastFrame) {
duke@435 228 // verify stack calls this outside VM thread
duke@435 229 warning("oopmap should only be accessed by the "
duke@435 230 "VM, GC task or CMS threads (or during debugging)");
duke@435 231 InterpreterOopMap local_mask;
coleenp@4251 232 method_holder()->mask_for(h_this, bci, &local_mask);
duke@435 233 local_mask.print();
duke@435 234 }
duke@435 235 }
duke@435 236 #endif
coleenp@4251 237 method_holder()->mask_for(h_this, bci, mask);
duke@435 238 return;
duke@435 239 }
duke@435 240
duke@435 241
coleenp@4037 242 int Method::bci_from(address bcp) const {
kvn@4102 243 #ifdef ASSERT
kvn@4102 244 { ResourceMark rm;
twisti@3848 245 assert(is_native() && bcp == code_base() || contains(bcp) || is_error_reported(),
twisti@3848 246 err_msg("bcp doesn't belong to this method: bcp: " INTPTR_FORMAT ", method: %s", bcp, name_and_sig_as_C_string()));
kvn@4102 247 }
kvn@4102 248 #endif
duke@435 249 return bcp - code_base();
duke@435 250 }
duke@435 251
duke@435 252
duke@435 253 // Return (int)bcx if it appears to be a valid BCI.
duke@435 254 // Return bci_from((address)bcx) if it appears to be a valid BCP.
duke@435 255 // Return -1 otherwise.
duke@435 256 // Used by profiling code, when invalid data is a possibility.
coleenp@4037 257 // The caller is responsible for validating the Method* itself.
coleenp@4037 258 int Method::validate_bci_from_bcx(intptr_t bcx) const {
duke@435 259 // keep bci as -1 if not a valid bci
duke@435 260 int bci = -1;
duke@435 261 if (bcx == 0 || (address)bcx == code_base()) {
duke@435 262 // code_size() may return 0 and we allow 0 here
duke@435 263 // the method may be native
duke@435 264 bci = 0;
duke@435 265 } else if (frame::is_bci(bcx)) {
duke@435 266 if (bcx < code_size()) {
duke@435 267 bci = (int)bcx;
duke@435 268 }
duke@435 269 } else if (contains((address)bcx)) {
duke@435 270 bci = (address)bcx - code_base();
duke@435 271 }
duke@435 272 // Assert that if we have dodged any asserts, bci is negative.
duke@435 273 assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");
duke@435 274 return bci;
duke@435 275 }
duke@435 276
coleenp@4037 277 address Method::bcp_from(int bci) const {
kvn@6429 278 assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()), err_msg("illegal bci: %d", bci));
duke@435 279 address bcp = code_base() + bci;
duke@435 280 assert(is_native() && bcp == code_base() || contains(bcp), "bcp doesn't belong to this method");
duke@435 281 return bcp;
duke@435 282 }
duke@435 283
duke@435 284
coleenp@4037 285 int Method::size(bool is_native) {
duke@435 286 // If native, then include pointers for native_function and signature_handler
duke@435 287 int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
duke@435 288 int extra_words = align_size_up(extra_bytes, BytesPerWord) / BytesPerWord;
duke@435 289 return align_object_size(header_size() + extra_words);
duke@435 290 }
duke@435 291
duke@435 292
coleenp@4037 293 Symbol* Method::klass_name() const {
coleenp@4037 294 Klass* k = method_holder();
duke@435 295 assert(k->is_klass(), "must be klass");
coleenp@4037 296 InstanceKlass* ik = (InstanceKlass*) k;
duke@435 297 return ik->name();
duke@435 298 }
duke@435 299
duke@435 300
duke@435 301 // Attempt to return method oop to original state. Clear any pointers
duke@435 302 // (to objects outside the shared spaces). We won't be able to predict
duke@435 303 // where they should point in a new JVM. Further initialize some
duke@435 304 // entries now in order allow them to be write protected later.
duke@435 305
coleenp@4037 306 void Method::remove_unshareable_info() {
duke@435 307 unlink_method();
duke@435 308 }
duke@435 309
jiangli@8509 310 void Method::set_vtable_index(int index) {
jiangli@8509 311 if (is_shared() && !MetaspaceShared::remapped_readwrite()) {
jiangli@8509 312 // At runtime initialize_vtable is rerun as part of link_class_impl()
jiangli@8509 313 // for a shared class loaded by the non-boot loader to obtain the loader
jiangli@8509 314 // constraints based on the runtime classloaders' context.
jiangli@8509 315 return; // don't write into the shared class
jiangli@8509 316 } else {
jiangli@8509 317 _vtable_index = index;
jiangli@8509 318 }
jiangli@8509 319 }
jiangli@8509 320
jiangli@8509 321 void Method::set_itable_index(int index) {
jiangli@8509 322 if (is_shared() && !MetaspaceShared::remapped_readwrite()) {
jiangli@8509 323 // At runtime initialize_itable is rerun as part of link_class_impl()
jiangli@8509 324 // for a shared class loaded by the non-boot loader to obtain the loader
jiangli@8509 325 // constraints based on the runtime classloaders' context. The dumptime
jiangli@8509 326 // itable index should be the same as the runtime index.
jiangli@8509 327 assert(_vtable_index == itable_index_max - index,
jiangli@8509 328 "archived itable index is different from runtime index");
jiangli@8509 329 return; // don’t write into the shared class
jiangli@8509 330 } else {
jiangli@8509 331 _vtable_index = itable_index_max - index;
jiangli@8509 332 }
jiangli@8509 333 assert(valid_itable_index(), "");
jiangli@8509 334 }
jiangli@8509 335
jiangli@8509 336
duke@435 337
coleenp@4037 338 bool Method::was_executed_more_than(int n) {
coleenp@4037 339 // Invocation counter is reset when the Method* is compiled.
duke@435 340 // If the method has compiled code we therefore assume it has
duke@435 341 // be excuted more than n times.
duke@435 342 if (is_accessor() || is_empty_method() || (code() != NULL)) {
duke@435 343 // interpreter doesn't bump invocation counter of trivial methods
duke@435 344 // compiler does not bump invocation counter of compiled methods
duke@435 345 return true;
iveresov@2138 346 }
jiangli@4936 347 else if ((method_counters() != NULL &&
jiangli@4936 348 method_counters()->invocation_counter()->carry()) ||
jiangli@4936 349 (method_data() != NULL &&
jiangli@4936 350 method_data()->invocation_counter()->carry())) {
duke@435 351 // The carry bit is set when the counter overflows and causes
duke@435 352 // a compilation to occur. We don't know how many times
duke@435 353 // the counter has been reset, so we simply assume it has
duke@435 354 // been executed more than n times.
duke@435 355 return true;
duke@435 356 } else {
duke@435 357 return invocation_count() > n;
duke@435 358 }
duke@435 359 }
duke@435 360
duke@435 361 #ifndef PRODUCT
coleenp@4037 362 void Method::print_invocation_count() {
duke@435 363 if (is_static()) tty->print("static ");
duke@435 364 if (is_final()) tty->print("final ");
duke@435 365 if (is_synchronized()) tty->print("synchronized ");
duke@435 366 if (is_native()) tty->print("native ");
coleenp@4037 367 method_holder()->name()->print_symbol_on(tty);
duke@435 368 tty->print(".");
duke@435 369 name()->print_symbol_on(tty);
duke@435 370 signature()->print_symbol_on(tty);
duke@435 371
duke@435 372 if (WizardMode) {
duke@435 373 // dump the size of the byte codes
duke@435 374 tty->print(" {%d}", code_size());
duke@435 375 }
duke@435 376 tty->cr();
duke@435 377
duke@435 378 tty->print_cr (" interpreter_invocation_count: %8d ", interpreter_invocation_count());
duke@435 379 tty->print_cr (" invocation_counter: %8d ", invocation_count());
duke@435 380 tty->print_cr (" backedge_counter: %8d ", backedge_count());
duke@435 381 if (CountCompiledCalls) {
duke@435 382 tty->print_cr (" compiled_invocation_count: %8d ", compiled_invocation_count());
duke@435 383 }
duke@435 384
duke@435 385 }
duke@435 386 #endif
duke@435 387
coleenp@4037 388 // Build a MethodData* object to hold information about this method
duke@435 389 // collected in the interpreter.
coleenp@4037 390 void Method::build_interpreter_method_data(methodHandle method, TRAPS) {
coleenp@2363 391 // Do not profile method if current thread holds the pending list lock,
coleenp@2363 392 // which avoids deadlock for acquiring the MethodData_lock.
coleenp@4047 393 if (InstanceRefKlass::owns_pending_list_lock((JavaThread*)THREAD)) {
coleenp@2363 394 return;
coleenp@2363 395 }
coleenp@2363 396
duke@435 397 // Grab a lock here to prevent multiple
coleenp@4037 398 // MethodData*s from being created.
duke@435 399 MutexLocker ml(MethodData_lock, THREAD);
duke@435 400 if (method->method_data() == NULL) {
coleenp@4037 401 ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
coleenp@4037 402 MethodData* method_data = MethodData::allocate(loader_data, method, CHECK);
duke@435 403 method->set_method_data(method_data);
duke@435 404 if (PrintMethodData && (Verbose || WizardMode)) {
duke@435 405 ResourceMark rm(THREAD);
duke@435 406 tty->print("build_interpreter_method_data for ");
duke@435 407 method->print_name(tty);
duke@435 408 tty->cr();
duke@435 409 // At the end of the run, the MDO, full of data, will be dumped.
duke@435 410 }
duke@435 411 }
duke@435 412 }
duke@435 413
jiangli@4936 414 MethodCounters* Method::build_method_counters(Method* m, TRAPS) {
jiangli@4936 415 methodHandle mh(m);
jiangli@4936 416 ClassLoaderData* loader_data = mh->method_holder()->class_loader_data();
jiangli@4936 417 MethodCounters* counters = MethodCounters::allocate(loader_data, CHECK_NULL);
iveresov@7203 418 if (!mh->init_method_counters(counters)) {
jiangli@4936 419 MetadataFactory::free_metadata(loader_data, counters);
jiangli@4936 420 }
jiangli@4936 421 return mh->method_counters();
jiangli@4936 422 }
jiangli@4936 423
coleenp@4037 424 void Method::cleanup_inline_caches() {
duke@435 425 // The current system doesn't use inline caches in the interpreter
duke@435 426 // => nothing to do (keep this method around for future use)
duke@435 427 }
duke@435 428
duke@435 429
coleenp@4037 430 int Method::extra_stack_words() {
jrose@1145 431 // not an inline function, to avoid a header dependency on Interpreter
twisti@1861 432 return extra_stack_entries() * Interpreter::stackElementSize;
jrose@1145 433 }
jrose@1145 434
jrose@1145 435
coleenp@4037 436 void Method::compute_size_of_parameters(Thread *thread) {
coleenp@2497 437 ArgumentSizeComputer asc(signature());
duke@435 438 set_size_of_parameters(asc.size() + (is_static() ? 0 : 1));
duke@435 439 }
duke@435 440
coleenp@4037 441 BasicType Method::result_type() const {
duke@435 442 ResultTypeFinder rtf(signature());
duke@435 443 return rtf.type();
duke@435 444 }
duke@435 445
duke@435 446
coleenp@4037 447 bool Method::is_empty_method() const {
duke@435 448 return code_size() == 1
duke@435 449 && *code_base() == Bytecodes::_return;
duke@435 450 }
duke@435 451
duke@435 452
coleenp@4037 453 bool Method::is_vanilla_constructor() const {
duke@435 454 // Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
duke@435 455 // which only calls the superclass vanilla constructor and possibly does stores of
duke@435 456 // zero constants to local fields:
duke@435 457 //
duke@435 458 // aload_0
duke@435 459 // invokespecial
duke@435 460 // indexbyte1
duke@435 461 // indexbyte2
duke@435 462 //
duke@435 463 // followed by an (optional) sequence of:
duke@435 464 //
duke@435 465 // aload_0
duke@435 466 // aconst_null / iconst_0 / fconst_0 / dconst_0
duke@435 467 // putfield
duke@435 468 // indexbyte1
duke@435 469 // indexbyte2
duke@435 470 //
duke@435 471 // followed by:
duke@435 472 //
duke@435 473 // return
duke@435 474
duke@435 475 assert(name() == vmSymbols::object_initializer_name(), "Should only be called for default constructors");
duke@435 476 assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");
duke@435 477 int size = code_size();
duke@435 478 // Check if size match
duke@435 479 if (size == 0 || size % 5 != 0) return false;
duke@435 480 address cb = code_base();
duke@435 481 int last = size - 1;
duke@435 482 if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
duke@435 483 // Does not call superclass default constructor
duke@435 484 return false;
duke@435 485 }
duke@435 486 // Check optional sequence
duke@435 487 for (int i = 4; i < last; i += 5) {
duke@435 488 if (cb[i] != Bytecodes::_aload_0) return false;
duke@435 489 if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
duke@435 490 if (cb[i+2] != Bytecodes::_putfield) return false;
duke@435 491 }
duke@435 492 return true;
duke@435 493 }
duke@435 494
duke@435 495
coleenp@4037 496 bool Method::compute_has_loops_flag() {
coleenp@4037 497 BytecodeStream bcs(this);
duke@435 498 Bytecodes::Code bc;
duke@435 499
duke@435 500 while ((bc = bcs.next()) >= 0) {
duke@435 501 switch( bc ) {
duke@435 502 case Bytecodes::_ifeq:
duke@435 503 case Bytecodes::_ifnull:
duke@435 504 case Bytecodes::_iflt:
duke@435 505 case Bytecodes::_ifle:
duke@435 506 case Bytecodes::_ifne:
duke@435 507 case Bytecodes::_ifnonnull:
duke@435 508 case Bytecodes::_ifgt:
duke@435 509 case Bytecodes::_ifge:
duke@435 510 case Bytecodes::_if_icmpeq:
duke@435 511 case Bytecodes::_if_icmpne:
duke@435 512 case Bytecodes::_if_icmplt:
duke@435 513 case Bytecodes::_if_icmpgt:
duke@435 514 case Bytecodes::_if_icmple:
duke@435 515 case Bytecodes::_if_icmpge:
duke@435 516 case Bytecodes::_if_acmpeq:
duke@435 517 case Bytecodes::_if_acmpne:
duke@435 518 case Bytecodes::_goto:
duke@435 519 case Bytecodes::_jsr:
duke@435 520 if( bcs.dest() < bcs.next_bci() ) _access_flags.set_has_loops();
duke@435 521 break;
duke@435 522
duke@435 523 case Bytecodes::_goto_w:
duke@435 524 case Bytecodes::_jsr_w:
duke@435 525 if( bcs.dest_w() < bcs.next_bci() ) _access_flags.set_has_loops();
duke@435 526 break;
duke@435 527 }
duke@435 528 }
duke@435 529 _access_flags.set_loops_flag_init();
duke@435 530 return _access_flags.has_loops();
duke@435 531 }
duke@435 532
drchase@5732 533 bool Method::is_final_method(AccessFlags class_access_flags) const {
drchase@5732 534 // or "does_not_require_vtable_entry"
acorn@5848 535 // default method or overpass can occur, is not final (reuses vtable entry)
drchase@5732 536 // private methods get vtable entries for backward class compatibility.
acorn@5848 537 if (is_overpass() || is_default_method()) return false;
drchase@5732 538 return is_final() || class_access_flags.is_final();
drchase@5732 539 }
duke@435 540
coleenp@4037 541 bool Method::is_final_method() const {
drchase@5732 542 return is_final_method(method_holder()->access_flags());
duke@435 543 }
duke@435 544
acorn@5848 545 bool Method::is_default_method() const {
acorn@5848 546 if (method_holder() != NULL &&
acorn@5848 547 method_holder()->is_interface() &&
acorn@5848 548 !is_abstract()) {
acorn@5848 549 return true;
acorn@5848 550 } else {
acorn@5848 551 return false;
acorn@5848 552 }
acorn@5848 553 }
acorn@5848 554
drchase@5732 555 bool Method::can_be_statically_bound(AccessFlags class_access_flags) const {
drchase@5732 556 if (is_final_method(class_access_flags)) return true;
drchase@5732 557 #ifdef ASSERT
acorn@5848 558 ResourceMark rm;
drchase@5732 559 bool is_nonv = (vtable_index() == nonvirtual_vtable_index);
acorn@5848 560 if (class_access_flags.is_interface()) {
acorn@5848 561 assert(is_nonv == is_static(), err_msg("is_nonv=%s", name_and_sig_as_C_string()));
acorn@5848 562 }
drchase@5732 563 #endif
drchase@5732 564 assert(valid_vtable_index() || valid_itable_index(), "method must be linked before we ask this question");
duke@435 565 return vtable_index() == nonvirtual_vtable_index;
duke@435 566 }
duke@435 567
drchase@5732 568 bool Method::can_be_statically_bound() const {
drchase@5732 569 return can_be_statically_bound(method_holder()->access_flags());
drchase@5732 570 }
duke@435 571
coleenp@4037 572 bool Method::is_accessor() const {
duke@435 573 if (code_size() != 5) return false;
duke@435 574 if (size_of_parameters() != 1) return false;
never@2462 575 if (java_code_at(0) != Bytecodes::_aload_0 ) return false;
never@2462 576 if (java_code_at(1) != Bytecodes::_getfield) return false;
never@2462 577 if (java_code_at(4) != Bytecodes::_areturn &&
never@2462 578 java_code_at(4) != Bytecodes::_ireturn ) return false;
duke@435 579 return true;
duke@435 580 }
duke@435 581
thartmann@7365 582 bool Method::is_constant_getter() const {
thartmann@7365 583 int last_index = code_size() - 1;
thartmann@7365 584 // Check if the first 1-3 bytecodes are a constant push
thartmann@7365 585 // and the last bytecode is a return.
thartmann@7365 586 return (2 <= code_size() && code_size() <= 4 &&
thartmann@7365 587 Bytecodes::is_const(java_code_at(0)) &&
thartmann@7365 588 Bytecodes::length_for(java_code_at(0)) == last_index &&
thartmann@7365 589 Bytecodes::is_return(java_code_at(last_index)));
thartmann@7365 590 }
duke@435 591
coleenp@4037 592 bool Method::is_initializer() const {
zmajo@8664 593 return is_object_initializer() || is_static_initializer();
kamg@2616 594 }
kamg@2616 595
coleenp@4037 596 bool Method::has_valid_initializer_flags() const {
kamg@2616 597 return (is_static() ||
coleenp@4251 598 method_holder()->major_version() < 51);
kamg@2616 599 }
kamg@2616 600
coleenp@4037 601 bool Method::is_static_initializer() const {
kamg@2616 602 // For classfiles version 51 or greater, ensure that the clinit method is
kamg@2616 603 // static. Non-static methods with the name "<clinit>" are not static
kamg@2616 604 // initializers. (older classfiles exempted for backward compatibility)
kamg@2616 605 return name() == vmSymbols::class_initializer_name() &&
kamg@2616 606 has_valid_initializer_flags();
duke@435 607 }
duke@435 608
zmajo@8664 609 bool Method::is_object_initializer() const {
zmajo@8664 610 return name() == vmSymbols::object_initializer_name();
zmajo@8664 611 }
duke@435 612
coleenp@4037 613 objArrayHandle Method::resolved_checked_exceptions_impl(Method* this_oop, TRAPS) {
duke@435 614 int length = this_oop->checked_exceptions_length();
duke@435 615 if (length == 0) { // common case
duke@435 616 return objArrayHandle(THREAD, Universe::the_empty_class_klass_array());
duke@435 617 } else {
duke@435 618 methodHandle h_this(THREAD, this_oop);
never@1577 619 objArrayOop m_oop = oopFactory::new_objArray(SystemDictionary::Class_klass(), length, CHECK_(objArrayHandle()));
duke@435 620 objArrayHandle mirrors (THREAD, m_oop);
duke@435 621 for (int i = 0; i < length; i++) {
duke@435 622 CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
coleenp@4037 623 Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
hseigel@4278 624 assert(k->is_subclass_of(SystemDictionary::Throwable_klass()), "invalid exception class");
hseigel@4278 625 mirrors->obj_at_put(i, k->java_mirror());
duke@435 626 }
duke@435 627 return mirrors;
duke@435 628 }
duke@435 629 };
duke@435 630
duke@435 631
coleenp@4037 632 int Method::line_number_from_bci(int bci) const {
duke@435 633 if (bci == SynchronizationEntryBCI) bci = 0;
duke@435 634 assert(bci == 0 || 0 <= bci && bci < code_size(), "illegal bci");
duke@435 635 int best_bci = 0;
duke@435 636 int best_line = -1;
duke@435 637
duke@435 638 if (has_linenumber_table()) {
duke@435 639 // The line numbers are a short array of 2-tuples [start_pc, line_number].
duke@435 640 // Not necessarily sorted and not necessarily one-to-one.
duke@435 641 CompressedLineNumberReadStream stream(compressed_linenumber_table());
duke@435 642 while (stream.read_pair()) {
duke@435 643 if (stream.bci() == bci) {
duke@435 644 // perfect match
duke@435 645 return stream.line();
duke@435 646 } else {
duke@435 647 // update best_bci/line
duke@435 648 if (stream.bci() < bci && stream.bci() >= best_bci) {
duke@435 649 best_bci = stream.bci();
duke@435 650 best_line = stream.line();
duke@435 651 }
duke@435 652 }
duke@435 653 }
duke@435 654 }
duke@435 655 return best_line;
duke@435 656 }
duke@435 657
duke@435 658
coleenp@4037 659 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
jiangli@3826 660 if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
duke@435 661 Thread *thread = Thread::current();
jiangli@3826 662 Symbol* klass_name = constants()->klass_name_at(klass_index);
coleenp@4251 663 Handle loader(thread, method_holder()->class_loader());
hseigel@4278 664 Handle prot (thread, method_holder()->protection_domain());
duke@435 665 return SystemDictionary::find(klass_name, loader, prot, thread) != NULL;
duke@435 666 } else {
duke@435 667 return true;
duke@435 668 }
duke@435 669 }
duke@435 670
duke@435 671
coleenp@4037 672 bool Method::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {
jiangli@3826 673 int klass_index = constants()->klass_ref_index_at(refinfo_index);
duke@435 674 if (must_be_resolved) {
duke@435 675 // Make sure klass is resolved in constantpool.
duke@435 676 if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
duke@435 677 }
duke@435 678 return is_klass_loaded_by_klass_index(klass_index);
duke@435 679 }
duke@435 680
duke@435 681
coleenp@4037 682 void Method::set_native_function(address function, bool post_event_flag) {
duke@435 683 assert(function != NULL, "use clear_native_function to unregister natives");
twisti@3969 684 assert(!is_method_handle_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
duke@435 685 address* native_function = native_function_addr();
duke@435 686
duke@435 687 // We can see racers trying to place the same native function into place. Once
duke@435 688 // is plenty.
duke@435 689 address current = *native_function;
duke@435 690 if (current == function) return;
duke@435 691 if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
duke@435 692 function != NULL) {
duke@435 693 // native_method_throw_unsatisfied_link_error_entry() should only
duke@435 694 // be passed when post_event_flag is false.
duke@435 695 assert(function !=
duke@435 696 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
duke@435 697 "post_event_flag mis-match");
duke@435 698
duke@435 699 // post the bind event, and possible change the bind function
duke@435 700 JvmtiExport::post_native_method_bind(this, &function);
duke@435 701 }
duke@435 702 *native_function = function;
duke@435 703 // This function can be called more than once. We must make sure that we always
duke@435 704 // use the latest registered method -> check if a stub already has been generated.
duke@435 705 // If so, we have to make it not_entrant.
duke@435 706 nmethod* nm = code(); // Put it into local variable to guard against concurrent updates
duke@435 707 if (nm != NULL) {
duke@435 708 nm->make_not_entrant();
duke@435 709 }
duke@435 710 }
duke@435 711
duke@435 712
coleenp@4037 713 bool Method::has_native_function() const {
twisti@3970 714 if (is_method_handle_intrinsic())
twisti@3970 715 return false; // special-cased in SharedRuntime::generate_native_wrapper
duke@435 716 address func = native_function();
duke@435 717 return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
duke@435 718 }
duke@435 719
duke@435 720
coleenp@4037 721 void Method::clear_native_function() {
twisti@3969 722 // Note: is_method_handle_intrinsic() is allowed here.
duke@435 723 set_native_function(
duke@435 724 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
duke@435 725 !native_bind_event_is_interesting);
duke@435 726 clear_code();
duke@435 727 }
duke@435 728
coleenp@4037 729 address Method::critical_native_function() {
never@3500 730 methodHandle mh(this);
never@3500 731 return NativeLookup::lookup_critical_entry(mh);
never@3500 732 }
never@3500 733
duke@435 734
coleenp@4037 735 void Method::set_signature_handler(address handler) {
duke@435 736 address* signature_handler = signature_handler_addr();
duke@435 737 *signature_handler = handler;
duke@435 738 }
duke@435 739
duke@435 740
vlivanov@4539 741 void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
kvn@1643 742 if (PrintCompilation && report) {
kvn@1641 743 ttyLocker ttyl;
twisti@4111 744 tty->print("made not %scompilable on ", is_osr ? "OSR " : "");
twisti@4111 745 if (comp_level == CompLevel_all) {
twisti@4111 746 tty->print("all levels ");
twisti@4111 747 } else {
twisti@4111 748 tty->print("levels ");
twisti@4111 749 for (int i = (int)CompLevel_none; i <= comp_level; i++) {
twisti@4111 750 tty->print("%d ", i);
twisti@4111 751 }
twisti@4111 752 }
kvn@1641 753 this->print_short_name(tty);
kvn@1641 754 int size = this->code_size();
vlivanov@4539 755 if (size > 0) {
kvn@1641 756 tty->print(" (%d bytes)", size);
vlivanov@4539 757 }
vlivanov@4539 758 if (reason != NULL) {
vlivanov@4539 759 tty->print(" %s", reason);
vlivanov@4539 760 }
kvn@1641 761 tty->cr();
kvn@1641 762 }
duke@435 763 if ((TraceDeoptimization || LogCompilation) && (xtty != NULL)) {
duke@435 764 ttyLocker ttyl;
vlivanov@7184 765 xtty->begin_elem("make_not_compilable thread='" UINTX_FORMAT "' osr='%d' level='%d'",
vlivanov@7184 766 os::current_thread_id(), is_osr, comp_level);
vlivanov@4539 767 if (reason != NULL) {
vlivanov@4539 768 xtty->print(" reason=\'%s\'", reason);
vlivanov@4539 769 }
coleenp@4037 770 xtty->method(this);
duke@435 771 xtty->stamp();
duke@435 772 xtty->end_elem();
duke@435 773 }
twisti@4111 774 }
twisti@4111 775
vlivanov@5638 776 bool Method::is_always_compilable() const {
vlivanov@5638 777 // Generated adapters must be compiled
vlivanov@5638 778 if (is_method_handle_intrinsic() && is_synthetic()) {
vlivanov@5638 779 assert(!is_not_c1_compilable(), "sanity check");
vlivanov@5638 780 assert(!is_not_c2_compilable(), "sanity check");
vlivanov@5638 781 return true;
vlivanov@5638 782 }
vlivanov@5638 783
vlivanov@5638 784 return false;
vlivanov@5638 785 }
vlivanov@5638 786
twisti@4111 787 bool Method::is_not_compilable(int comp_level) const {
twisti@4111 788 if (number_of_breakpoints() > 0)
twisti@4111 789 return true;
vlivanov@5638 790 if (is_always_compilable())
vlivanov@5638 791 return false;
twisti@4111 792 if (comp_level == CompLevel_any)
twisti@4111 793 return is_not_c1_compilable() || is_not_c2_compilable();
twisti@4111 794 if (is_c1_compile(comp_level))
twisti@4111 795 return is_not_c1_compilable();
twisti@4111 796 if (is_c2_compile(comp_level))
twisti@4111 797 return is_not_c2_compilable();
twisti@4111 798 return false;
twisti@4111 799 }
twisti@4111 800
twisti@4111 801 // call this when compiler finds that this method is not compilable
vlivanov@4539 802 void Method::set_not_compilable(int comp_level, bool report, const char* reason) {
vlivanov@5638 803 if (is_always_compilable()) {
vlivanov@5638 804 // Don't mark a method which should be always compilable
vlivanov@5638 805 return;
vlivanov@5638 806 }
vlivanov@4539 807 print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);
iveresov@2138 808 if (comp_level == CompLevel_all) {
iveresov@2138 809 set_not_c1_compilable();
iveresov@2138 810 set_not_c2_compilable();
iveresov@2138 811 } else {
twisti@4111 812 if (is_c1_compile(comp_level))
iveresov@2138 813 set_not_c1_compilable();
twisti@4111 814 if (is_c2_compile(comp_level))
twisti@4111 815 set_not_c2_compilable();
twisti@4111 816 }
twisti@4111 817 CompilationPolicy::policy()->disable_compilation(this);
iignatyev@5541 818 assert(!CompilationPolicy::can_be_compiled(this, comp_level), "sanity check");
twisti@4111 819 }
twisti@4111 820
twisti@4111 821 bool Method::is_not_osr_compilable(int comp_level) const {
twisti@4111 822 if (is_not_compilable(comp_level))
twisti@4111 823 return true;
twisti@4111 824 if (comp_level == CompLevel_any)
twisti@4111 825 return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();
twisti@4111 826 if (is_c1_compile(comp_level))
twisti@4111 827 return is_not_c1_osr_compilable();
twisti@4111 828 if (is_c2_compile(comp_level))
twisti@4111 829 return is_not_c2_osr_compilable();
twisti@4111 830 return false;
twisti@4111 831 }
twisti@4111 832
vlivanov@4539 833 void Method::set_not_osr_compilable(int comp_level, bool report, const char* reason) {
vlivanov@4539 834 print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
twisti@4111 835 if (comp_level == CompLevel_all) {
twisti@4111 836 set_not_c1_osr_compilable();
twisti@4111 837 set_not_c2_osr_compilable();
twisti@4111 838 } else {
twisti@4111 839 if (is_c1_compile(comp_level))
twisti@4111 840 set_not_c1_osr_compilable();
twisti@4111 841 if (is_c2_compile(comp_level))
twisti@4111 842 set_not_c2_osr_compilable();
duke@435 843 }
iveresov@2138 844 CompilationPolicy::policy()->disable_compilation(this);
iignatyev@5541 845 assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");
duke@435 846 }
duke@435 847
duke@435 848 // Revert to using the interpreter and clear out the nmethod
coleenp@4037 849 void Method::clear_code() {
duke@435 850
duke@435 851 // this may be NULL if c2i adapters have not been made yet
duke@435 852 // Only should happen at allocate time.
duke@435 853 if (_adapter == NULL) {
duke@435 854 _from_compiled_entry = NULL;
duke@435 855 } else {
duke@435 856 _from_compiled_entry = _adapter->get_c2i_entry();
duke@435 857 }
duke@435 858 OrderAccess::storestore();
duke@435 859 _from_interpreted_entry = _i2i_entry;
duke@435 860 OrderAccess::storestore();
duke@435 861 _code = NULL;
duke@435 862 }
duke@435 863
duke@435 864 // Called by class data sharing to remove any entry points (which are not shared)
coleenp@4037 865 void Method::unlink_method() {
duke@435 866 _code = NULL;
duke@435 867 _i2i_entry = NULL;
duke@435 868 _from_interpreted_entry = NULL;
duke@435 869 if (is_native()) {
duke@435 870 *native_function_addr() = NULL;
duke@435 871 set_signature_handler(NULL);
duke@435 872 }
duke@435 873 NOT_PRODUCT(set_compiled_invocation_count(0);)
duke@435 874 _adapter = NULL;
duke@435 875 _from_compiled_entry = NULL;
coleenp@4751 876
coleenp@4751 877 // In case of DumpSharedSpaces, _method_data should always be NULL.
coleenp@4751 878 //
coleenp@4751 879 // During runtime (!DumpSharedSpaces), when we are cleaning a
coleenp@4751 880 // shared class that failed to load, this->link_method() may
coleenp@4751 881 // have already been called (before an exception happened), so
coleenp@4751 882 // this->_method_data may not be NULL.
coleenp@4751 883 assert(!DumpSharedSpaces || _method_data == NULL, "unexpected method data?");
coleenp@4751 884
duke@435 885 set_method_data(NULL);
iveresov@7203 886 clear_method_counters();
duke@435 887 }
duke@435 888
duke@435 889 // Called when the method_holder is getting linked. Setup entrypoints so the method
duke@435 890 // is ready to be called from interpreter, compiler, and vtables.
coleenp@4037 891 void Method::link_method(methodHandle h_method, TRAPS) {
coleenp@2945 892 // If the code cache is full, we may reenter this function for the
coleenp@2945 893 // leftover methods that weren't linked.
coleenp@2945 894 if (_i2i_entry != NULL) return;
coleenp@2945 895
duke@435 896 assert(_adapter == NULL, "init'd to NULL" );
duke@435 897 assert( _code == NULL, "nothing compiled yet" );
duke@435 898
duke@435 899 // Setup interpreter entrypoint
duke@435 900 assert(this == h_method(), "wrong h_method()" );
duke@435 901 address entry = Interpreter::entry_for_method(h_method);
duke@435 902 assert(entry != NULL, "interpreter entry must be non-null");
duke@435 903 // Sets both _i2i_entry and _from_interpreted_entry
duke@435 904 set_interpreter_entry(entry);
twisti@5108 905
twisti@5108 906 // Don't overwrite already registered native entries.
twisti@5108 907 if (is_native() && !has_native_function()) {
duke@435 908 set_native_function(
duke@435 909 SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
duke@435 910 !native_bind_event_is_interesting);
duke@435 911 }
duke@435 912
duke@435 913 // Setup compiler entrypoint. This is made eagerly, so we do not need
duke@435 914 // special handling of vtables. An alternative is to make adapters more
duke@435 915 // lazily by calling make_adapter() from from_compiled_entry() for the
duke@435 916 // normal calls. For vtable calls life gets more complicated. When a
duke@435 917 // call-site goes mega-morphic we need adapters in all methods which can be
duke@435 918 // called from the vtable. We need adapters on such methods that get loaded
duke@435 919 // later. Ditto for mega-morphic itable calls. If this proves to be a
duke@435 920 // problem we'll make these lazily later.
coleenp@2946 921 (void) make_adapters(h_method, CHECK);
duke@435 922
duke@435 923 // ONLY USE the h_method now as make_adapter may have blocked
duke@435 924
duke@435 925 }
duke@435 926
coleenp@4037 927 address Method::make_adapters(methodHandle mh, TRAPS) {
duke@435 928 // Adapters for compiled code are made eagerly here. They are fairly
duke@435 929 // small (generally < 100 bytes) and quick to make (and cached and shared)
duke@435 930 // so making them eagerly shouldn't be too expensive.
duke@435 931 AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
duke@435 932 if (adapter == NULL ) {
never@1622 933 THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "out of space in CodeCache for adapters");
duke@435 934 }
duke@435 935
duke@435 936 mh->set_adapter_entry(adapter);
duke@435 937 mh->_from_compiled_entry = adapter->get_c2i_entry();
duke@435 938 return adapter->get_c2i_entry();
duke@435 939 }
duke@435 940
coleenp@6626 941 void Method::restore_unshareable_info(TRAPS) {
coleenp@6626 942 // Since restore_unshareable_info can be called more than once for a method, don't
coleenp@6626 943 // redo any work. If this field is restored, there is nothing to do.
coleenp@6626 944 if (_from_compiled_entry == NULL) {
coleenp@6626 945 // restore method's vtable by calling a virtual function
coleenp@6626 946 restore_vtable();
coleenp@6626 947
coleenp@6626 948 methodHandle mh(THREAD, this);
coleenp@6626 949 link_method(mh, CHECK);
coleenp@6626 950 }
coleenp@6626 951 }
coleenp@6626 952
coleenp@6626 953
duke@435 954 // The verified_code_entry() must be called when a invoke is resolved
duke@435 955 // on this method.
duke@435 956
duke@435 957 // It returns the compiled code entry point, after asserting not null.
duke@435 958 // This function is called after potential safepoints so that nmethod
duke@435 959 // or adapter that it points to is still live and valid.
duke@435 960 // This function must not hit a safepoint!
coleenp@4037 961 address Method::verified_code_entry() {
duke@435 962 debug_only(No_Safepoint_Verifier nsv;)
duke@435 963 assert(_from_compiled_entry != NULL, "must be set");
duke@435 964 return _from_compiled_entry;
duke@435 965 }
duke@435 966
duke@435 967 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
duke@435 968 // (could be racing a deopt).
duke@435 969 // Not inline to avoid circular ref.
coleenp@4037 970 bool Method::check_code() const {
duke@435 971 // cached in a register or local. There's a race on the value of the field.
duke@435 972 nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code);
coleenp@4037 973 return code == NULL || (code->method() == NULL) || (code->method() == (Method*)this && !code->is_osr_method());
duke@435 974 }
duke@435 975
duke@435 976 // Install compiled code. Instantly it can execute.
coleenp@4037 977 void Method::set_code(methodHandle mh, nmethod *code) {
duke@435 978 assert( code, "use clear_code to remove code" );
duke@435 979 assert( mh->check_code(), "" );
duke@435 980
duke@435 981 guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
duke@435 982
duke@435 983 // These writes must happen in this order, because the interpreter will
duke@435 984 // directly jump to from_interpreted_entry which jumps to an i2c adapter
duke@435 985 // which jumps to _from_compiled_entry.
duke@435 986 mh->_code = code; // Assign before allowing compiled code to exec
duke@435 987
duke@435 988 int comp_level = code->comp_level();
duke@435 989 // In theory there could be a race here. In practice it is unlikely
duke@435 990 // and not worth worrying about.
iveresov@2138 991 if (comp_level > mh->highest_comp_level()) {
iveresov@2138 992 mh->set_highest_comp_level(comp_level);
duke@435 993 }
duke@435 994
duke@435 995 OrderAccess::storestore();
twisti@2047 996 #ifdef SHARK
twisti@2200 997 mh->_from_interpreted_entry = code->insts_begin();
twisti@3969 998 #else //!SHARK
duke@435 999 mh->_from_compiled_entry = code->verified_entry_point();
duke@435 1000 OrderAccess::storestore();
duke@435 1001 // Instantly compiled code can execute.
twisti@3969 1002 if (!mh->is_method_handle_intrinsic())
twisti@3969 1003 mh->_from_interpreted_entry = mh->get_i2c_entry();
twisti@3969 1004 #endif //!SHARK
duke@435 1005 }
duke@435 1006
duke@435 1007
coleenp@4037 1008 bool Method::is_overridden_in(Klass* k) const {
coleenp@4037 1009 InstanceKlass* ik = InstanceKlass::cast(k);
duke@435 1010
duke@435 1011 if (ik->is_interface()) return false;
duke@435 1012
duke@435 1013 // If method is an interface, we skip it - except if it
duke@435 1014 // is a miranda method
coleenp@4251 1015 if (method_holder()->is_interface()) {
duke@435 1016 // Check that method is not a miranda method
duke@435 1017 if (ik->lookup_method(name(), signature()) == NULL) {
duke@435 1018 // No implementation exist - so miranda method
duke@435 1019 return false;
duke@435 1020 }
duke@435 1021 return true;
duke@435 1022 }
duke@435 1023
duke@435 1024 assert(ik->is_subclass_of(method_holder()), "should be subklass");
duke@435 1025 assert(ik->vtable() != NULL, "vtable should exist");
drchase@5732 1026 if (!has_vtable_index()) {
duke@435 1027 return false;
duke@435 1028 } else {
coleenp@4037 1029 Method* vt_m = ik->method_at_vtable(vtable_index());
coleenp@4037 1030 return vt_m != this;
duke@435 1031 }
duke@435 1032 }
duke@435 1033
duke@435 1034
coleenp@4037 1035 // give advice about whether this Method* should be cached or not
coleenp@4037 1036 bool Method::should_not_be_cached() const {
dcubed@483 1037 if (is_old()) {
dcubed@483 1038 // This method has been redefined. It is either EMCP or obsolete
dcubed@483 1039 // and we don't want to cache it because that would pin the method
dcubed@483 1040 // down and prevent it from being collectible if and when it
dcubed@483 1041 // finishes executing.
dcubed@483 1042 return true;
dcubed@483 1043 }
dcubed@483 1044
dcubed@483 1045 // caching this method should be just fine
dcubed@483 1046 return false;
dcubed@483 1047 }
dcubed@483 1048
twisti@4866 1049
twisti@4866 1050 /**
twisti@4866 1051 * Returns true if this is one of the specially treated methods for
twisti@4866 1052 * security related stack walks (like Reflection.getCallerClass).
twisti@4866 1053 */
twisti@4866 1054 bool Method::is_ignored_by_security_stack_walk() const {
twisti@4866 1055 const bool use_new_reflection = JDK_Version::is_gte_jdk14x_version() && UseNewReflection;
twisti@4866 1056
twisti@4866 1057 if (intrinsic_id() == vmIntrinsics::_invoke) {
twisti@4866 1058 // This is Method.invoke() -- ignore it
twisti@4866 1059 return true;
twisti@4866 1060 }
twisti@4866 1061 if (use_new_reflection &&
twisti@4866 1062 method_holder()->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass())) {
twisti@4866 1063 // This is an auxilary frame -- ignore it
twisti@4866 1064 return true;
twisti@4866 1065 }
twisti@4866 1066 if (is_method_handle_intrinsic() || is_compiled_lambda_form()) {
twisti@4866 1067 // This is an internal adapter frame for method handles -- ignore it
twisti@4866 1068 return true;
twisti@4866 1069 }
twisti@4866 1070 return false;
twisti@4866 1071 }
twisti@4866 1072
twisti@4866 1073
jrose@1145 1074 // Constant pool structure for invoke methods:
jrose@1145 1075 enum {
twisti@3969 1076 _imcp_invoke_name = 1, // utf8: 'invokeExact', etc.
coleenp@2497 1077 _imcp_invoke_signature, // utf8: (variable Symbol*)
jrose@1145 1078 _imcp_limit
jrose@1145 1079 };
jrose@1145 1080
twisti@3969 1081 // Test if this method is an MH adapter frame generated by Java code.
twisti@3969 1082 // Cf. java/lang/invoke/InvokerBytecodeGenerator
coleenp@4037 1083 bool Method::is_compiled_lambda_form() const {
twisti@3969 1084 return intrinsic_id() == vmIntrinsics::_compiledLambdaForm;
jrose@1145 1085 }
jrose@1145 1086
twisti@3969 1087 // Test if this method is an internal MH primitive method.
coleenp@4037 1088 bool Method::is_method_handle_intrinsic() const {
twisti@3969 1089 vmIntrinsics::ID iid = intrinsic_id();
twisti@3969 1090 return (MethodHandles::is_signature_polymorphic(iid) &&
twisti@3969 1091 MethodHandles::is_signature_polymorphic_intrinsic(iid));
jrose@1145 1092 }
jrose@1145 1093
coleenp@4037 1094 bool Method::has_member_arg() const {
twisti@3969 1095 vmIntrinsics::ID iid = intrinsic_id();
twisti@3969 1096 return (MethodHandles::is_signature_polymorphic(iid) &&
twisti@3969 1097 MethodHandles::has_member_arg(iid));
twisti@1587 1098 }
twisti@1587 1099
twisti@3969 1100 // Make an instance of a signature-polymorphic internal MH primitive.
coleenp@4037 1101 methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid,
twisti@3969 1102 Symbol* signature,
twisti@3969 1103 TRAPS) {
never@3091 1104 ResourceMark rm;
jrose@1145 1105 methodHandle empty;
jrose@1145 1106
twisti@3969 1107 KlassHandle holder = SystemDictionary::MethodHandle_klass();
twisti@3969 1108 Symbol* name = MethodHandles::signature_polymorphic_intrinsic_name(iid);
twisti@3969 1109 assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
jrose@1145 1110 if (TraceMethodHandles) {
twisti@3969 1111 tty->print_cr("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
jrose@1145 1112 }
jrose@1145 1113
jrose@2760 1114 // invariant: cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
jrose@2760 1115 name->increment_refcount();
jrose@2760 1116 signature->increment_refcount();
jrose@2760 1117
twisti@3969 1118 int cp_length = _imcp_limit;
coleenp@4037 1119 ClassLoaderData* loader_data = holder->class_loader_data();
jrose@1145 1120 constantPoolHandle cp;
jrose@1145 1121 {
coleenp@4037 1122 ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
jrose@1145 1123 cp = constantPoolHandle(THREAD, cp_oop);
jrose@1145 1124 }
coleenp@4251 1125 cp->set_pool_holder(InstanceKlass::cast(holder()));
coleenp@2497 1126 cp->symbol_at_put(_imcp_invoke_name, name);
coleenp@2497 1127 cp->symbol_at_put(_imcp_invoke_signature, signature);
coleenp@4490 1128 cp->set_has_preresolution();
jrose@1145 1129
twisti@3969 1130 // decide on access bits: public or not?
twisti@3969 1131 int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
twisti@3969 1132 bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
twisti@3969 1133 if (must_be_static) flags_bits |= JVM_ACC_STATIC;
twisti@3969 1134 assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
twisti@3969 1135
jrose@1145 1136 methodHandle m;
jrose@1145 1137 {
coleenp@4572 1138 InlineTableSizes sizes;
coleenp@4398 1139 Method* m_oop = Method::allocate(loader_data, 0,
coleenp@4572 1140 accessFlags_from(flags_bits), &sizes,
coleenp@4398 1141 ConstMethod::NORMAL, CHECK_(empty));
jrose@1145 1142 m = methodHandle(THREAD, m_oop);
jrose@1145 1143 }
jrose@1145 1144 m->set_constants(cp());
jrose@1145 1145 m->set_name_index(_imcp_invoke_name);
jrose@1145 1146 m->set_signature_index(_imcp_invoke_signature);
twisti@3969 1147 assert(MethodHandles::is_signature_polymorphic_name(m->name()), "");
coleenp@2497 1148 assert(m->signature() == signature, "");
twisti@2563 1149 ResultTypeFinder rtf(signature);
kevinw@8368 1150 m->constMethod()->set_result_type(rtf.type());
jrose@1145 1151 m->compute_size_of_parameters(THREAD);
jrose@2148 1152 m->init_intrinsic_id();
twisti@3969 1153 assert(m->is_method_handle_intrinsic(), "");
twisti@3969 1154 #ifdef ASSERT
twisti@3969 1155 if (!MethodHandles::is_signature_polymorphic(m->intrinsic_id())) m->print();
twisti@3969 1156 assert(MethodHandles::is_signature_polymorphic(m->intrinsic_id()), "must be an invoker");
twisti@3969 1157 assert(m->intrinsic_id() == iid, "correctly predicted iid");
twisti@3969 1158 #endif //ASSERT
jrose@1145 1159
jrose@1145 1160 // Finally, set up its entry points.
jrose@1145 1161 assert(m->can_be_statically_bound(), "");
coleenp@4037 1162 m->set_vtable_index(Method::nonvirtual_vtable_index);
jrose@1145 1163 m->link_method(m, CHECK_(empty));
jrose@1145 1164
jrose@1474 1165 if (TraceMethodHandles && (Verbose || WizardMode))
jrose@1145 1166 m->print_on(tty);
jrose@1145 1167
jrose@1145 1168 return m;
jrose@1145 1169 }
jrose@1145 1170
coleenp@4037 1171 Klass* Method::check_non_bcp_klass(Klass* klass) {
hseigel@4278 1172 if (klass != NULL && klass->class_loader() != NULL) {
hseigel@4278 1173 if (klass->oop_is_objArray())
coleenp@4142 1174 klass = ObjArrayKlass::cast(klass)->bottom_klass();
jrose@2982 1175 return klass;
jrose@2982 1176 }
jrose@2982 1177 return NULL;
jrose@2982 1178 }
jrose@1145 1179
dcubed@483 1180
coleenp@4037 1181 methodHandle Method::clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,
duke@435 1182 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
duke@435 1183 // Code below does not work for native methods - they should never get rewritten anyway
duke@435 1184 assert(!m->is_native(), "cannot rewrite native methods");
coleenp@4037 1185 // Allocate new Method*
duke@435 1186 AccessFlags flags = m->access_flags();
coleenp@4572 1187
coleenp@4572 1188 ConstMethod* cm = m->constMethod();
coleenp@4572 1189 int checked_exceptions_len = cm->checked_exceptions_length();
coleenp@4572 1190 int localvariable_len = cm->localvariable_table_length();
coleenp@4572 1191 int exception_table_len = cm->exception_table_length();
coleenp@4572 1192 int method_parameters_len = cm->method_parameters_length();
coleenp@4572 1193 int method_annotations_len = cm->method_annotations_length();
coleenp@4572 1194 int parameter_annotations_len = cm->parameter_annotations_length();
coleenp@4572 1195 int type_annotations_len = cm->type_annotations_length();
coleenp@4572 1196 int default_annotations_len = cm->default_annotations_length();
coleenp@4572 1197
coleenp@4572 1198 InlineTableSizes sizes(
coleenp@4572 1199 localvariable_len,
coleenp@4572 1200 new_compressed_linenumber_size,
coleenp@4572 1201 exception_table_len,
coleenp@4572 1202 checked_exceptions_len,
coleenp@4572 1203 method_parameters_len,
coleenp@4572 1204 cm->generic_signature_index(),
coleenp@4572 1205 method_annotations_len,
coleenp@4572 1206 parameter_annotations_len,
coleenp@4572 1207 type_annotations_len,
coleenp@4572 1208 default_annotations_len,
coleenp@4572 1209 0);
coleenp@4037 1210
kamg@4245 1211 ClassLoaderData* loader_data = m->method_holder()->class_loader_data();
coleenp@4037 1212 Method* newm_oop = Method::allocate(loader_data,
kamg@4245 1213 new_code_length,
kamg@4245 1214 flags,
coleenp@4572 1215 &sizes,
kamg@4245 1216 m->method_type(),
kamg@4245 1217 CHECK_(methodHandle()));
duke@435 1218 methodHandle newm (THREAD, newm_oop);
duke@435 1219 int new_method_size = newm->method_size();
jmasa@953 1220
coleenp@4037 1221 // Create a shallow copy of Method part, but be careful to preserve the new ConstMethod*
coleenp@4037 1222 ConstMethod* newcm = newm->constMethod();
coleenp@4037 1223 int new_const_method_size = newm->constMethod()->size();
ysr@2533 1224
coleenp@4037 1225 memcpy(newm(), m(), sizeof(Method));
coleenp@4037 1226
coleenp@4037 1227 // Create shallow copy of ConstMethod.
coleenp@4037 1228 memcpy(newcm, m->constMethod(), sizeof(ConstMethod));
ysr@2533 1229
duke@435 1230 // Reset correct method/const method, method size, and parameter info
duke@435 1231 newm->set_constMethod(newcm);
duke@435 1232 newm->constMethod()->set_code_size(new_code_length);
duke@435 1233 newm->constMethod()->set_constMethod_size(new_const_method_size);
duke@435 1234 newm->set_method_size(new_method_size);
duke@435 1235 assert(newm->code_size() == new_code_length, "check");
emc@5432 1236 assert(newm->method_parameters_length() == method_parameters_len, "check");
duke@435 1237 assert(newm->checked_exceptions_length() == checked_exceptions_len, "check");
jiangli@3920 1238 assert(newm->exception_table_length() == exception_table_len, "check");
duke@435 1239 assert(newm->localvariable_table_length() == localvariable_len, "check");
duke@435 1240 // Copy new byte codes
duke@435 1241 memcpy(newm->code_base(), new_code, new_code_length);
duke@435 1242 // Copy line number table
duke@435 1243 if (new_compressed_linenumber_size > 0) {
duke@435 1244 memcpy(newm->compressed_linenumber_table(),
duke@435 1245 new_compressed_linenumber_table,
duke@435 1246 new_compressed_linenumber_size);
duke@435 1247 }
emc@5432 1248 // Copy method_parameters
emc@5432 1249 if (method_parameters_len > 0) {
emc@5432 1250 memcpy(newm->method_parameters_start(),
emc@5432 1251 m->method_parameters_start(),
emc@5432 1252 method_parameters_len * sizeof(MethodParametersElement));
emc@5432 1253 }
duke@435 1254 // Copy checked_exceptions
duke@435 1255 if (checked_exceptions_len > 0) {
duke@435 1256 memcpy(newm->checked_exceptions_start(),
duke@435 1257 m->checked_exceptions_start(),
duke@435 1258 checked_exceptions_len * sizeof(CheckedExceptionElement));
duke@435 1259 }
jiangli@3920 1260 // Copy exception table
jiangli@3920 1261 if (exception_table_len > 0) {
jiangli@3920 1262 memcpy(newm->exception_table_start(),
jiangli@3920 1263 m->exception_table_start(),
jiangli@3920 1264 exception_table_len * sizeof(ExceptionTableElement));
jiangli@3920 1265 }
duke@435 1266 // Copy local variable number table
duke@435 1267 if (localvariable_len > 0) {
duke@435 1268 memcpy(newm->localvariable_table_start(),
duke@435 1269 m->localvariable_table_start(),
duke@435 1270 localvariable_len * sizeof(LocalVariableTableElement));
duke@435 1271 }
coleenp@4037 1272 // Copy stackmap table
coleenp@4037 1273 if (m->has_stackmap_table()) {
coleenp@4037 1274 int code_attribute_length = m->stackmap_data()->length();
coleenp@4037 1275 Array<u1>* stackmap_data =
coleenp@4037 1276 MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_NULL);
coleenp@4037 1277 memcpy((void*)stackmap_data->adr_at(0),
coleenp@4037 1278 (void*)m->stackmap_data()->adr_at(0), code_attribute_length);
coleenp@4037 1279 newm->set_stackmap_data(stackmap_data);
coleenp@4037 1280 }
jmasa@953 1281
coleenp@4837 1282 // copy annotations over to new method
coleenp@4837 1283 newcm->copy_annotations_from(cm);
duke@435 1284 return newm;
duke@435 1285 }
duke@435 1286
coleenp@4037 1287 vmSymbols::SID Method::klass_id_for_intrinsics(Klass* holder) {
duke@435 1288 // if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
duke@435 1289 // because we are not loading from core libraries
kvn@4205 1290 // exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
kvn@4205 1291 // which does not use the class default class loader so we check for its loader here
twisti@4866 1292 InstanceKlass* ik = InstanceKlass::cast(holder);
twisti@4866 1293 if ((ik->class_loader() != NULL) && !SystemDictionary::is_ext_class_loader(ik->class_loader())) {
jrose@1291 1294 return vmSymbols::NO_SID; // regardless of name, no intrinsics here
kvn@4205 1295 }
duke@435 1296
duke@435 1297 // see if the klass name is well-known:
twisti@4866 1298 Symbol* klass_name = ik->name();
jrose@1291 1299 return vmSymbols::find_sid(klass_name);
jrose@1291 1300 }
jrose@1291 1301
coleenp@4037 1302 void Method::init_intrinsic_id() {
jrose@1291 1303 assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");
jrose@1291 1304 const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte));
jrose@1291 1305 assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size");
jrose@2148 1306 assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), "");
jrose@1291 1307
jrose@1291 1308 // the klass name is well-known:
jrose@1291 1309 vmSymbols::SID klass_id = klass_id_for_intrinsics(method_holder());
jrose@1291 1310 assert(klass_id != vmSymbols::NO_SID, "caller responsibility");
duke@435 1311
duke@435 1312 // ditto for method and signature:
duke@435 1313 vmSymbols::SID name_id = vmSymbols::find_sid(name());
twisti@3969 1314 if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
twisti@3969 1315 && name_id == vmSymbols::NO_SID)
twisti@3969 1316 return;
duke@435 1317 vmSymbols::SID sig_id = vmSymbols::find_sid(signature());
jrose@2639 1318 if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
jrose@2148 1319 && sig_id == vmSymbols::NO_SID) return;
duke@435 1320 jshort flags = access_flags().as_short();
duke@435 1321
jrose@1291 1322 vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
jrose@1291 1323 if (id != vmIntrinsics::_none) {
jrose@1291 1324 set_intrinsic_id(id);
jrose@1291 1325 return;
jrose@1291 1326 }
jrose@1291 1327
duke@435 1328 // A few slightly irregular cases:
duke@435 1329 switch (klass_id) {
duke@435 1330 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_StrictMath):
duke@435 1331 // Second chance: check in regular Math.
duke@435 1332 switch (name_id) {
duke@435 1333 case vmSymbols::VM_SYMBOL_ENUM_NAME(min_name):
duke@435 1334 case vmSymbols::VM_SYMBOL_ENUM_NAME(max_name):
duke@435 1335 case vmSymbols::VM_SYMBOL_ENUM_NAME(sqrt_name):
duke@435 1336 // pretend it is the corresponding method in the non-strict class:
duke@435 1337 klass_id = vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_Math);
jrose@1291 1338 id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
duke@435 1339 break;
duke@435 1340 }
jrose@1862 1341 break;
jrose@1862 1342
jrose@1862 1343 // Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*.
jrose@2639 1344 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle):
twisti@3969 1345 if (!is_native()) break;
twisti@3969 1346 id = MethodHandles::signature_polymorphic_name_id(method_holder(), name());
twisti@3969 1347 if (is_static() != MethodHandles::is_signature_polymorphic_static(id))
twisti@3969 1348 id = vmIntrinsics::_none;
jrose@1862 1349 break;
duke@435 1350 }
duke@435 1351
jrose@1291 1352 if (id != vmIntrinsics::_none) {
jrose@1291 1353 // Set up its iid. It is an alias method.
jrose@1291 1354 set_intrinsic_id(id);
jrose@1291 1355 return;
jrose@1291 1356 }
duke@435 1357 }
duke@435 1358
coleenp@4037 1359 // These two methods are static since a GC may move the Method
coleenp@4037 1360 bool Method::load_signature_classes(methodHandle m, TRAPS) {
twisti@3969 1361 if (THREAD->is_Compiler_thread()) {
twisti@3969 1362 // There is nothing useful this routine can do from within the Compile thread.
twisti@3969 1363 // Hopefully, the signature contains only well-known classes.
twisti@3969 1364 // We could scan for this and return true/false, but the caller won't care.
twisti@3969 1365 return false;
twisti@3969 1366 }
duke@435 1367 bool sig_is_loaded = true;
coleenp@4251 1368 Handle class_loader(THREAD, m->method_holder()->class_loader());
coleenp@4251 1369 Handle protection_domain(THREAD, m->method_holder()->protection_domain());
coleenp@2497 1370 ResourceMark rm(THREAD);
coleenp@2497 1371 Symbol* signature = m->signature();
duke@435 1372 for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
duke@435 1373 if (ss.is_object()) {
coleenp@2497 1374 Symbol* sym = ss.as_symbol(CHECK_(false));
coleenp@2497 1375 Symbol* name = sym;
coleenp@4037 1376 Klass* klass = SystemDictionary::resolve_or_null(name, class_loader,
duke@435 1377 protection_domain, THREAD);
rasbold@539 1378 // We are loading classes eagerly. If a ClassNotFoundException or
rasbold@539 1379 // a LinkageError was generated, be sure to ignore it.
duke@435 1380 if (HAS_PENDING_EXCEPTION) {
never@1577 1381 if (PENDING_EXCEPTION->is_a(SystemDictionary::ClassNotFoundException_klass()) ||
never@1577 1382 PENDING_EXCEPTION->is_a(SystemDictionary::LinkageError_klass())) {
duke@435 1383 CLEAR_PENDING_EXCEPTION;
duke@435 1384 } else {
duke@435 1385 return false;
duke@435 1386 }
duke@435 1387 }
duke@435 1388 if( klass == NULL) { sig_is_loaded = false; }
duke@435 1389 }
duke@435 1390 }
duke@435 1391 return sig_is_loaded;
duke@435 1392 }
duke@435 1393
coleenp@4037 1394 bool Method::has_unloaded_classes_in_signature(methodHandle m, TRAPS) {
coleenp@4251 1395 Handle class_loader(THREAD, m->method_holder()->class_loader());
coleenp@4251 1396 Handle protection_domain(THREAD, m->method_holder()->protection_domain());
coleenp@2497 1397 ResourceMark rm(THREAD);
coleenp@2497 1398 Symbol* signature = m->signature();
duke@435 1399 for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
duke@435 1400 if (ss.type() == T_OBJECT) {
coleenp@2497 1401 Symbol* name = ss.as_symbol_or_null();
coleenp@2497 1402 if (name == NULL) return true;
coleenp@4037 1403 Klass* klass = SystemDictionary::find(name, class_loader, protection_domain, THREAD);
duke@435 1404 if (klass == NULL) return true;
duke@435 1405 }
duke@435 1406 }
duke@435 1407 return false;
duke@435 1408 }
duke@435 1409
duke@435 1410 // Exposed so field engineers can debug VM
coleenp@4037 1411 void Method::print_short_name(outputStream* st) {
duke@435 1412 ResourceMark rm;
duke@435 1413 #ifdef PRODUCT
coleenp@4037 1414 st->print(" %s::", method_holder()->external_name());
duke@435 1415 #else
coleenp@4037 1416 st->print(" %s::", method_holder()->internal_name());
duke@435 1417 #endif
duke@435 1418 name()->print_symbol_on(st);
duke@435 1419 if (WizardMode) signature()->print_symbol_on(st);
twisti@3969 1420 else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))
twisti@3969 1421 MethodHandles::print_as_basic_type_signature_on(st, signature(), true);
duke@435 1422 }
duke@435 1423
brutisso@2976 1424 // Comparer for sorting an object array containing
coleenp@4037 1425 // Method*s.
coleenp@4037 1426 static int method_comparator(Method* a, Method* b) {
coleenp@4037 1427 return a->name()->fast_compare(b->name());
kvn@3128 1428 }
duke@435 1429
duke@435 1430 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
acorn@5848 1431 // default_methods also uses this without the ordering for fast find_method
acorn@5848 1432 void Method::sort_methods(Array<Method*>* methods, bool idempotent, bool set_idnums) {
duke@435 1433 int length = methods->length();
duke@435 1434 if (length > 1) {
brutisso@2976 1435 {
brutisso@2976 1436 No_Safepoint_Verifier nsv;
coleenp@4037 1437 QuickSort::sort<Method*>(methods->data(), length, method_comparator, idempotent);
duke@435 1438 }
duke@435 1439 // Reset method ordering
acorn@5848 1440 if (set_idnums) {
acorn@5848 1441 for (int i = 0; i < length; i++) {
acorn@5848 1442 Method* m = methods->at(i);
acorn@5848 1443 m->set_method_idnum(i);
sspitsyn@7636 1444 m->set_orig_method_idnum(i);
acorn@5848 1445 }
duke@435 1446 }
duke@435 1447 }
duke@435 1448 }
duke@435 1449
duke@435 1450 //-----------------------------------------------------------------------------------
dcubed@4562 1451 // Non-product code unless JVM/TI needs it
duke@435 1452
dcubed@4562 1453 #if !defined(PRODUCT) || INCLUDE_JVMTI
duke@435 1454 class SignatureTypePrinter : public SignatureTypeNames {
duke@435 1455 private:
duke@435 1456 outputStream* _st;
duke@435 1457 bool _use_separator;
duke@435 1458
duke@435 1459 void type_name(const char* name) {
duke@435 1460 if (_use_separator) _st->print(", ");
drchase@6680 1461 _st->print("%s", name);
duke@435 1462 _use_separator = true;
duke@435 1463 }
duke@435 1464
duke@435 1465 public:
coleenp@2497 1466 SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
duke@435 1467 _st = st;
duke@435 1468 _use_separator = false;
duke@435 1469 }
duke@435 1470
duke@435 1471 void print_parameters() { _use_separator = false; iterate_parameters(); }
duke@435 1472 void print_returntype() { _use_separator = false; iterate_returntype(); }
duke@435 1473 };
duke@435 1474
duke@435 1475
coleenp@4037 1476 void Method::print_name(outputStream* st) {
duke@435 1477 Thread *thread = Thread::current();
duke@435 1478 ResourceMark rm(thread);
duke@435 1479 SignatureTypePrinter sig(signature(), st);
duke@435 1480 st->print("%s ", is_static() ? "static" : "virtual");
duke@435 1481 sig.print_returntype();
coleenp@4037 1482 st->print(" %s.", method_holder()->internal_name());
duke@435 1483 name()->print_symbol_on(st);
duke@435 1484 st->print("(");
duke@435 1485 sig.print_parameters();
duke@435 1486 st->print(")");
duke@435 1487 }
dcubed@4562 1488 #endif // !PRODUCT || INCLUDE_JVMTI
duke@435 1489
duke@435 1490
dcubed@4562 1491 //-----------------------------------------------------------------------------------
dcubed@4562 1492 // Non-product code
dcubed@4562 1493
dcubed@4562 1494 #ifndef PRODUCT
coleenp@4037 1495 void Method::print_codes_on(outputStream* st) const {
duke@435 1496 print_codes_on(0, code_size(), st);
duke@435 1497 }
duke@435 1498
coleenp@4037 1499 void Method::print_codes_on(int from, int to, outputStream* st) const {
duke@435 1500 Thread *thread = Thread::current();
duke@435 1501 ResourceMark rm(thread);
coleenp@4037 1502 methodHandle mh (thread, (Method*)this);
duke@435 1503 BytecodeStream s(mh);
duke@435 1504 s.set_interval(from, to);
duke@435 1505 BytecodeTracer::set_closure(BytecodeTracer::std_closure());
duke@435 1506 while (s.next() >= 0) BytecodeTracer::trace(mh, s.bcp(), st);
duke@435 1507 }
duke@435 1508 #endif // not PRODUCT
duke@435 1509
duke@435 1510
duke@435 1511 // Simple compression of line number tables. We use a regular compressed stream, except that we compress deltas
duke@435 1512 // between (bci,line) pairs since they are smaller. If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)
duke@435 1513 // we save it as one byte, otherwise we write a 0xFF escape character and use regular compression. 0x0 is used
duke@435 1514 // as end-of-stream terminator.
duke@435 1515
duke@435 1516 void CompressedLineNumberWriteStream::write_pair_regular(int bci_delta, int line_delta) {
duke@435 1517 // bci and line number does not compress into single byte.
duke@435 1518 // Write out escape character and use regular compression for bci and line number.
duke@435 1519 write_byte((jubyte)0xFF);
duke@435 1520 write_signed_int(bci_delta);
duke@435 1521 write_signed_int(line_delta);
duke@435 1522 }
duke@435 1523
coleenp@4037 1524 // See comment in method.hpp which explains why this exists.
sla@2540 1525 #if defined(_M_AMD64) && _MSC_VER >= 1400
duke@435 1526 #pragma optimize("", off)
duke@435 1527 void CompressedLineNumberWriteStream::write_pair(int bci, int line) {
duke@435 1528 write_pair_inline(bci, line);
duke@435 1529 }
duke@435 1530 #pragma optimize("", on)
duke@435 1531 #endif
duke@435 1532
duke@435 1533 CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {
duke@435 1534 _bci = 0;
duke@435 1535 _line = 0;
duke@435 1536 };
duke@435 1537
duke@435 1538
duke@435 1539 bool CompressedLineNumberReadStream::read_pair() {
duke@435 1540 jubyte next = read_byte();
duke@435 1541 // Check for terminator
duke@435 1542 if (next == 0) return false;
duke@435 1543 if (next == 0xFF) {
duke@435 1544 // Escape character, regular compression used
duke@435 1545 _bci += read_signed_int();
duke@435 1546 _line += read_signed_int();
duke@435 1547 } else {
duke@435 1548 // Single byte compression used
duke@435 1549 _bci += next >> 3;
duke@435 1550 _line += next & 0x7;
duke@435 1551 }
duke@435 1552 return true;
duke@435 1553 }
duke@435 1554
duke@435 1555
coleenp@4037 1556 Bytecodes::Code Method::orig_bytecode_at(int bci) const {
coleenp@4251 1557 BreakpointInfo* bp = method_holder()->breakpoints();
duke@435 1558 for (; bp != NULL; bp = bp->next()) {
duke@435 1559 if (bp->match(this, bci)) {
duke@435 1560 return bp->orig_bytecode();
duke@435 1561 }
duke@435 1562 }
twisti@6039 1563 {
twisti@6039 1564 ResourceMark rm;
twisti@6039 1565 fatal(err_msg("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci));
twisti@6039 1566 }
duke@435 1567 return Bytecodes::_shouldnotreachhere;
duke@435 1568 }
duke@435 1569
coleenp@4037 1570 void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) {
duke@435 1571 assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");
coleenp@4251 1572 BreakpointInfo* bp = method_holder()->breakpoints();
duke@435 1573 for (; bp != NULL; bp = bp->next()) {
duke@435 1574 if (bp->match(this, bci)) {
duke@435 1575 bp->set_orig_bytecode(code);
duke@435 1576 // and continue, in case there is more than one
duke@435 1577 }
duke@435 1578 }
duke@435 1579 }
duke@435 1580
coleenp@4037 1581 void Method::set_breakpoint(int bci) {
coleenp@4251 1582 InstanceKlass* ik = method_holder();
duke@435 1583 BreakpointInfo *bp = new BreakpointInfo(this, bci);
duke@435 1584 bp->set_next(ik->breakpoints());
duke@435 1585 ik->set_breakpoints(bp);
duke@435 1586 // do this last:
duke@435 1587 bp->set(this);
duke@435 1588 }
duke@435 1589
coleenp@4037 1590 static void clear_matches(Method* m, int bci) {
coleenp@4251 1591 InstanceKlass* ik = m->method_holder();
duke@435 1592 BreakpointInfo* prev_bp = NULL;
duke@435 1593 BreakpointInfo* next_bp;
duke@435 1594 for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = next_bp) {
duke@435 1595 next_bp = bp->next();
duke@435 1596 // bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).
duke@435 1597 if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) {
duke@435 1598 // do this first:
duke@435 1599 bp->clear(m);
duke@435 1600 // unhook it
duke@435 1601 if (prev_bp != NULL)
duke@435 1602 prev_bp->set_next(next_bp);
duke@435 1603 else
duke@435 1604 ik->set_breakpoints(next_bp);
duke@435 1605 delete bp;
duke@435 1606 // When class is redefined JVMTI sets breakpoint in all versions of EMCP methods
duke@435 1607 // at same location. So we have multiple matching (method_index and bci)
duke@435 1608 // BreakpointInfo nodes in BreakpointInfo list. We should just delete one
duke@435 1609 // breakpoint for clear_breakpoint request and keep all other method versions
duke@435 1610 // BreakpointInfo for future clear_breakpoint request.
duke@435 1611 // bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)
duke@435 1612 // which is being called when class is unloaded. We delete all the Breakpoint
duke@435 1613 // information for all versions of method. We may not correctly restore the original
duke@435 1614 // bytecode in all method versions, but that is ok. Because the class is being unloaded
duke@435 1615 // so these methods won't be used anymore.
duke@435 1616 if (bci >= 0) {
duke@435 1617 break;
duke@435 1618 }
duke@435 1619 } else {
duke@435 1620 // This one is a keeper.
duke@435 1621 prev_bp = bp;
duke@435 1622 }
duke@435 1623 }
duke@435 1624 }
duke@435 1625
coleenp@4037 1626 void Method::clear_breakpoint(int bci) {
duke@435 1627 assert(bci >= 0, "");
duke@435 1628 clear_matches(this, bci);
duke@435 1629 }
duke@435 1630
coleenp@4037 1631 void Method::clear_all_breakpoints() {
duke@435 1632 clear_matches(this, -1);
duke@435 1633 }
duke@435 1634
duke@435 1635
coleenp@4037 1636 int Method::invocation_count() {
jiangli@4936 1637 MethodCounters *mcs = method_counters();
iveresov@2138 1638 if (TieredCompilation) {
coleenp@4037 1639 MethodData* const mdo = method_data();
jiangli@4936 1640 if (((mcs != NULL) ? mcs->invocation_counter()->carry() : false) ||
jiangli@4936 1641 ((mdo != NULL) ? mdo->invocation_counter()->carry() : false)) {
iveresov@2138 1642 return InvocationCounter::count_limit;
iveresov@2138 1643 } else {
jiangli@4936 1644 return ((mcs != NULL) ? mcs->invocation_counter()->count() : 0) +
jiangli@4936 1645 ((mdo != NULL) ? mdo->invocation_counter()->count() : 0);
iveresov@2138 1646 }
iveresov@2138 1647 } else {
jiangli@4936 1648 return (mcs == NULL) ? 0 : mcs->invocation_counter()->count();
iveresov@2138 1649 }
iveresov@2138 1650 }
iveresov@2138 1651
coleenp@4037 1652 int Method::backedge_count() {
jiangli@4936 1653 MethodCounters *mcs = method_counters();
iveresov@2138 1654 if (TieredCompilation) {
coleenp@4037 1655 MethodData* const mdo = method_data();
jiangli@4936 1656 if (((mcs != NULL) ? mcs->backedge_counter()->carry() : false) ||
jiangli@4936 1657 ((mdo != NULL) ? mdo->backedge_counter()->carry() : false)) {
iveresov@2138 1658 return InvocationCounter::count_limit;
iveresov@2138 1659 } else {
jiangli@4936 1660 return ((mcs != NULL) ? mcs->backedge_counter()->count() : 0) +
jiangli@4936 1661 ((mdo != NULL) ? mdo->backedge_counter()->count() : 0);
iveresov@2138 1662 }
iveresov@2138 1663 } else {
jiangli@4936 1664 return (mcs == NULL) ? 0 : mcs->backedge_counter()->count();
iveresov@2138 1665 }
iveresov@2138 1666 }
iveresov@2138 1667
coleenp@4037 1668 int Method::highest_comp_level() const {
iveresov@7171 1669 const MethodCounters* mcs = method_counters();
iveresov@7171 1670 if (mcs != NULL) {
iveresov@7171 1671 return mcs->highest_comp_level();
iveresov@2138 1672 } else {
iveresov@2138 1673 return CompLevel_none;
iveresov@2138 1674 }
iveresov@2138 1675 }
iveresov@2138 1676
coleenp@4037 1677 int Method::highest_osr_comp_level() const {
iveresov@7171 1678 const MethodCounters* mcs = method_counters();
iveresov@7171 1679 if (mcs != NULL) {
iveresov@7171 1680 return mcs->highest_osr_comp_level();
iveresov@2138 1681 } else {
iveresov@2138 1682 return CompLevel_none;
iveresov@2138 1683 }
iveresov@2138 1684 }
iveresov@2138 1685
coleenp@4037 1686 void Method::set_highest_comp_level(int level) {
iveresov@7171 1687 MethodCounters* mcs = method_counters();
iveresov@7171 1688 if (mcs != NULL) {
iveresov@7171 1689 mcs->set_highest_comp_level(level);
iveresov@2138 1690 }
iveresov@2138 1691 }
iveresov@2138 1692
coleenp@4037 1693 void Method::set_highest_osr_comp_level(int level) {
iveresov@7171 1694 MethodCounters* mcs = method_counters();
iveresov@7171 1695 if (mcs != NULL) {
iveresov@7171 1696 mcs->set_highest_osr_comp_level(level);
iveresov@2138 1697 }
iveresov@2138 1698 }
iveresov@2138 1699
coleenp@4037 1700 BreakpointInfo::BreakpointInfo(Method* m, int bci) {
duke@435 1701 _bci = bci;
duke@435 1702 _name_index = m->name_index();
duke@435 1703 _signature_index = m->signature_index();
duke@435 1704 _orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci);
duke@435 1705 if (_orig_bytecode == Bytecodes::_breakpoint)
duke@435 1706 _orig_bytecode = m->orig_bytecode_at(_bci);
duke@435 1707 _next = NULL;
duke@435 1708 }
duke@435 1709
coleenp@4037 1710 void BreakpointInfo::set(Method* method) {
duke@435 1711 #ifdef ASSERT
duke@435 1712 {
duke@435 1713 Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci);
duke@435 1714 if (code == Bytecodes::_breakpoint)
duke@435 1715 code = method->orig_bytecode_at(_bci);
duke@435 1716 assert(orig_bytecode() == code, "original bytecode must be the same");
duke@435 1717 }
duke@435 1718 #endif
jiangli@4936 1719 Thread *thread = Thread::current();
duke@435 1720 *method->bcp_from(_bci) = Bytecodes::_breakpoint;
jiangli@4936 1721 method->incr_number_of_breakpoints(thread);
duke@435 1722 SystemDictionary::notice_modification();
duke@435 1723 {
duke@435 1724 // Deoptimize all dependents on this method
duke@435 1725 HandleMark hm(thread);
duke@435 1726 methodHandle mh(thread, method);
duke@435 1727 Universe::flush_dependents_on_method(mh);
duke@435 1728 }
duke@435 1729 }
duke@435 1730
coleenp@4037 1731 void BreakpointInfo::clear(Method* method) {
duke@435 1732 *method->bcp_from(_bci) = orig_bytecode();
duke@435 1733 assert(method->number_of_breakpoints() > 0, "must not go negative");
jiangli@4936 1734 method->decr_number_of_breakpoints(Thread::current());
duke@435 1735 }
coleenp@4037 1736
coleenp@4037 1737 // jmethodID handling
coleenp@4037 1738
coleenp@4037 1739 // This is a block allocating object, sort of like JNIHandleBlock, only a
coleenp@4037 1740 // lot simpler. There aren't many of these, they aren't long, they are rarely
coleenp@4037 1741 // deleted and so we can do some suboptimal things.
coleenp@4037 1742 // It's allocated on the CHeap because once we allocate a jmethodID, we can
coleenp@4037 1743 // never get rid of it.
coleenp@4037 1744 // It would be nice to be able to parameterize the number of methods for
coleenp@4037 1745 // the null_class_loader but then we'd have to turn this and ClassLoaderData
coleenp@4037 1746 // into templates.
coleenp@4037 1747
coleenp@4037 1748 // I feel like this brain dead class should exist somewhere in the STL
coleenp@4037 1749
coleenp@4037 1750 class JNIMethodBlock : public CHeapObj<mtClass> {
coleenp@4037 1751 enum { number_of_methods = 8 };
coleenp@4037 1752
coleenp@4037 1753 Method* _methods[number_of_methods];
coleenp@4037 1754 int _top;
coleenp@4037 1755 JNIMethodBlock* _next;
coleenp@4037 1756 public:
coleenp@4037 1757 static Method* const _free_method;
coleenp@4037 1758
coleenp@4037 1759 JNIMethodBlock() : _next(NULL), _top(0) {
coleenp@4037 1760 for (int i = 0; i< number_of_methods; i++) _methods[i] = _free_method;
coleenp@4037 1761 }
coleenp@4037 1762
coleenp@4037 1763 Method** add_method(Method* m) {
coleenp@4037 1764 if (_top < number_of_methods) {
coleenp@4037 1765 // top points to the next free entry.
coleenp@4037 1766 int i = _top;
coleenp@4037 1767 _methods[i] = m;
coleenp@4037 1768 _top++;
coleenp@4037 1769 return &_methods[i];
coleenp@4037 1770 } else if (_top == number_of_methods) {
coleenp@4037 1771 // if the next free entry ran off the block see if there's a free entry
coleenp@4037 1772 for (int i = 0; i< number_of_methods; i++) {
coleenp@4037 1773 if (_methods[i] == _free_method) {
coleenp@4037 1774 _methods[i] = m;
coleenp@4037 1775 return &_methods[i];
coleenp@4037 1776 }
coleenp@4037 1777 }
coleenp@4037 1778 // Only check each block once for frees. They're very unlikely.
coleenp@4037 1779 // Increment top past the end of the block.
coleenp@4037 1780 _top++;
coleenp@4037 1781 }
coleenp@4037 1782 // need to allocate a next block.
coleenp@4037 1783 if (_next == NULL) {
coleenp@4037 1784 _next = new JNIMethodBlock();
coleenp@4037 1785 }
coleenp@4037 1786 return _next->add_method(m);
coleenp@4037 1787 }
coleenp@4037 1788
coleenp@4037 1789 bool contains(Method** m) {
coleenp@4037 1790 for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
coleenp@4037 1791 for (int i = 0; i< number_of_methods; i++) {
coleenp@4037 1792 if (&(b->_methods[i]) == m) {
coleenp@4037 1793 return true;
coleenp@4037 1794 }
coleenp@4037 1795 }
coleenp@4037 1796 }
coleenp@4037 1797 return false; // not found
coleenp@4037 1798 }
coleenp@4037 1799
coleenp@4037 1800 // Doesn't really destroy it, just marks it as free so it can be reused.
coleenp@4037 1801 void destroy_method(Method** m) {
coleenp@4037 1802 #ifdef ASSERT
coleenp@4037 1803 assert(contains(m), "should be a methodID");
coleenp@4037 1804 #endif // ASSERT
coleenp@4037 1805 *m = _free_method;
coleenp@4037 1806 }
shshahma@8583 1807 void clear_method(Method* m) {
shshahma@8583 1808 for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
shshahma@8583 1809 for (int i = 0; i < number_of_methods; i++) {
shshahma@8583 1810 if (b->_methods[i] == m) {
shshahma@8583 1811 b->_methods[i] = NULL;
shshahma@8583 1812 return;
shshahma@8583 1813 }
shshahma@8583 1814 }
shshahma@8583 1815 }
shshahma@8583 1816 // not found
shshahma@8583 1817 }
coleenp@4037 1818
coleenp@4037 1819 // During class unloading the methods are cleared, which is different
coleenp@4037 1820 // than freed.
coleenp@4037 1821 void clear_all_methods() {
coleenp@4037 1822 for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
coleenp@4037 1823 for (int i = 0; i< number_of_methods; i++) {
shshahma@8440 1824 b->_methods[i] = NULL;
coleenp@4037 1825 }
coleenp@4037 1826 }
coleenp@4037 1827 }
coleenp@4037 1828 #ifndef PRODUCT
coleenp@4037 1829 int count_methods() {
coleenp@4037 1830 // count all allocated methods
coleenp@4037 1831 int count = 0;
coleenp@4037 1832 for (JNIMethodBlock* b = this; b != NULL; b = b->_next) {
coleenp@4037 1833 for (int i = 0; i< number_of_methods; i++) {
shshahma@8440 1834 if (b->_methods[i] != _free_method) count++;
coleenp@4037 1835 }
coleenp@4037 1836 }
coleenp@4037 1837 return count;
coleenp@4037 1838 }
coleenp@4037 1839 #endif // PRODUCT
coleenp@4037 1840 };
coleenp@4037 1841
coleenp@4037 1842 // Something that can't be mistaken for an address or a markOop
coleenp@4037 1843 Method* const JNIMethodBlock::_free_method = (Method*)55;
coleenp@4037 1844
coleenp@4037 1845 // Add a method id to the jmethod_ids
coleenp@4037 1846 jmethodID Method::make_jmethod_id(ClassLoaderData* loader_data, Method* m) {
coleenp@4037 1847 ClassLoaderData* cld = loader_data;
coleenp@4037 1848
coleenp@4037 1849 if (!SafepointSynchronize::is_at_safepoint()) {
coleenp@4037 1850 // Have to add jmethod_ids() to class loader data thread-safely.
coleenp@4037 1851 // Also have to add the method to the list safely, which the cld lock
coleenp@4037 1852 // protects as well.
coleenp@4037 1853 MutexLockerEx ml(cld->metaspace_lock(), Mutex::_no_safepoint_check_flag);
coleenp@4037 1854 if (cld->jmethod_ids() == NULL) {
coleenp@4037 1855 cld->set_jmethod_ids(new JNIMethodBlock());
coleenp@4037 1856 }
coleenp@4037 1857 // jmethodID is a pointer to Method*
coleenp@4037 1858 return (jmethodID)cld->jmethod_ids()->add_method(m);
coleenp@4037 1859 } else {
coleenp@4037 1860 // At safepoint, we are single threaded and can set this.
coleenp@4037 1861 if (cld->jmethod_ids() == NULL) {
coleenp@4037 1862 cld->set_jmethod_ids(new JNIMethodBlock());
coleenp@4037 1863 }
coleenp@4037 1864 // jmethodID is a pointer to Method*
coleenp@4037 1865 return (jmethodID)cld->jmethod_ids()->add_method(m);
coleenp@4037 1866 }
coleenp@4037 1867 }
coleenp@4037 1868
coleenp@4037 1869 // Mark a jmethodID as free. This is called when there is a data race in
coleenp@4037 1870 // InstanceKlass while creating the jmethodID cache.
coleenp@4037 1871 void Method::destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID m) {
coleenp@4037 1872 ClassLoaderData* cld = loader_data;
coleenp@4037 1873 Method** ptr = (Method**)m;
coleenp@4037 1874 assert(cld->jmethod_ids() != NULL, "should have method handles");
coleenp@4037 1875 cld->jmethod_ids()->destroy_method(ptr);
coleenp@4037 1876 }
coleenp@4037 1877
coleenp@4037 1878 void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {
coleenp@4037 1879 // Can't assert the method_holder is the same because the new method has the
coleenp@4037 1880 // scratch method holder.
coleenp@4037 1881 assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()
coleenp@4037 1882 == new_method->method_holder()->class_loader(),
coleenp@4037 1883 "changing to a different class loader");
coleenp@4037 1884 // Just change the method in place, jmethodID pointer doesn't change.
coleenp@4037 1885 *((Method**)jmid) = new_method;
coleenp@4037 1886 }
coleenp@4037 1887
coleenp@4037 1888 bool Method::is_method_id(jmethodID mid) {
coleenp@4037 1889 Method* m = resolve_jmethod_id(mid);
shshahma@8583 1890 if (m == NULL) {
shshahma@8583 1891 return false;
shshahma@8583 1892 }
coleenp@4251 1893 InstanceKlass* ik = m->method_holder();
shshahma@8440 1894 if (ik == NULL) {
shshahma@8440 1895 return false;
shshahma@8440 1896 }
coleenp@4037 1897 ClassLoaderData* cld = ik->class_loader_data();
coleenp@4037 1898 if (cld->jmethod_ids() == NULL) return false;
coleenp@4037 1899 return (cld->jmethod_ids()->contains((Method**)mid));
coleenp@4037 1900 }
coleenp@4037 1901
coleenp@4037 1902 Method* Method::checked_resolve_jmethod_id(jmethodID mid) {
coleenp@4037 1903 if (mid == NULL) return NULL;
shshahma@8440 1904 if (!Method::is_method_id(mid)) {
shshahma@8440 1905 return NULL;
shshahma@8440 1906 }
coleenp@4037 1907 Method* o = resolve_jmethod_id(mid);
coleenp@4037 1908 if (o == NULL || o == JNIMethodBlock::_free_method || !((Metadata*)o)->is_method()) {
coleenp@4037 1909 return NULL;
coleenp@4037 1910 }
coleenp@4037 1911 return o;
coleenp@4037 1912 };
coleenp@4037 1913
coleenp@4037 1914 void Method::set_on_stack(const bool value) {
coleenp@4037 1915 // Set both the method itself and its constant pool. The constant pool
coleenp@4037 1916 // on stack means some method referring to it is also on the stack.
coleenp@4037 1917 constants()->set_on_stack(value);
stefank@7333 1918
stefank@7333 1919 bool succeeded = _access_flags.set_on_stack(value);
stefank@7333 1920 if (value && succeeded) {
stefank@7333 1921 MetadataOnStackMark::record(this, Thread::current());
stefank@7333 1922 }
coleenp@4037 1923 }
coleenp@4037 1924
shshahma@8583 1925 void Method::clear_jmethod_id(ClassLoaderData* loader_data) {
shshahma@8583 1926 loader_data->jmethod_ids()->clear_method(this);
shshahma@8583 1927 }
shshahma@8583 1928
coleenp@4037 1929 // Called when the class loader is unloaded to make all methods weak.
coleenp@4037 1930 void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
coleenp@4037 1931 loader_data->jmethod_ids()->clear_all_methods();
coleenp@4037 1932 }
coleenp@4037 1933
coleenp@6678 1934 bool Method::has_method_vptr(const void* ptr) {
coleenp@6678 1935 Method m;
coleenp@6678 1936 // This assumes that the vtbl pointer is the first word of a C++ object.
coleenp@6678 1937 // This assumption is also in universe.cpp patch_klass_vtble
coleenp@6678 1938 void* vtbl2 = dereference_vptr((const void*)&m);
coleenp@6678 1939 void* this_vtbl = dereference_vptr(ptr);
coleenp@6678 1940 return vtbl2 == this_vtbl;
coleenp@6678 1941 }
coleenp@4295 1942
coleenp@4295 1943 // Check that this pointer is valid by checking that the vtbl pointer matches
coleenp@4295 1944 bool Method::is_valid_method() const {
coleenp@4295 1945 if (this == NULL) {
coleenp@4295 1946 return false;
coleenp@4295 1947 } else if (!is_metaspace_object()) {
coleenp@4295 1948 return false;
coleenp@4295 1949 } else {
coleenp@6678 1950 return has_method_vptr((const void*)this);
coleenp@4295 1951 }
coleenp@4295 1952 }
coleenp@4295 1953
coleenp@4037 1954 #ifndef PRODUCT
coleenp@4037 1955 void Method::print_jmethod_ids(ClassLoaderData* loader_data, outputStream* out) {
coleenp@4037 1956 out->print_cr("jni_method_id count = %d", loader_data->jmethod_ids()->count_methods());
coleenp@4037 1957 }
coleenp@4037 1958 #endif // PRODUCT
coleenp@4037 1959
coleenp@4037 1960
coleenp@4037 1961 // Printing
coleenp@4037 1962
coleenp@4037 1963 #ifndef PRODUCT
coleenp@4037 1964
coleenp@4037 1965 void Method::print_on(outputStream* st) const {
coleenp@4037 1966 ResourceMark rm;
coleenp@4037 1967 assert(is_method(), "must be method");
drchase@6680 1968 st->print_cr("%s", internal_name());
coleenp@4037 1969 // get the effect of PrintOopAddress, always, for methods:
coleenp@4037 1970 st->print_cr(" - this oop: "INTPTR_FORMAT, (intptr_t)this);
coleenp@4037 1971 st->print (" - method holder: "); method_holder()->print_value_on(st); st->cr();
coleenp@4037 1972 st->print (" - constants: "INTPTR_FORMAT" ", (address)constants());
coleenp@4037 1973 constants()->print_value_on(st); st->cr();
coleenp@4037 1974 st->print (" - access: 0x%x ", access_flags().as_int()); access_flags().print_on(st); st->cr();
coleenp@4037 1975 st->print (" - name: "); name()->print_value_on(st); st->cr();
coleenp@4037 1976 st->print (" - signature: "); signature()->print_value_on(st); st->cr();
coleenp@4037 1977 st->print_cr(" - max stack: %d", max_stack());
coleenp@4037 1978 st->print_cr(" - max locals: %d", max_locals());
coleenp@4037 1979 st->print_cr(" - size of params: %d", size_of_parameters());
coleenp@4037 1980 st->print_cr(" - method size: %d", method_size());
coleenp@4037 1981 if (intrinsic_id() != vmIntrinsics::_none)
coleenp@4037 1982 st->print_cr(" - intrinsic id: %d %s", intrinsic_id(), vmIntrinsics::name_at(intrinsic_id()));
coleenp@4037 1983 if (highest_comp_level() != CompLevel_none)
coleenp@4037 1984 st->print_cr(" - highest level: %d", highest_comp_level());
coleenp@4037 1985 st->print_cr(" - vtable index: %d", _vtable_index);
coleenp@4037 1986 st->print_cr(" - i2i entry: " INTPTR_FORMAT, interpreter_entry());
coleenp@4037 1987 st->print( " - adapters: ");
coleenp@4037 1988 AdapterHandlerEntry* a = ((Method*)this)->adapter();
coleenp@4037 1989 if (a == NULL)
coleenp@4037 1990 st->print_cr(INTPTR_FORMAT, a);
coleenp@4037 1991 else
coleenp@4037 1992 a->print_adapter_on(st);
coleenp@4037 1993 st->print_cr(" - compiled entry " INTPTR_FORMAT, from_compiled_entry());
coleenp@4037 1994 st->print_cr(" - code size: %d", code_size());
coleenp@4037 1995 if (code_size() != 0) {
coleenp@4037 1996 st->print_cr(" - code start: " INTPTR_FORMAT, code_base());
coleenp@4037 1997 st->print_cr(" - code end (excl): " INTPTR_FORMAT, code_base() + code_size());
coleenp@4037 1998 }
coleenp@4037 1999 if (method_data() != NULL) {
coleenp@4037 2000 st->print_cr(" - method data: " INTPTR_FORMAT, (address)method_data());
coleenp@4037 2001 }
coleenp@4037 2002 st->print_cr(" - checked ex length: %d", checked_exceptions_length());
coleenp@4037 2003 if (checked_exceptions_length() > 0) {
coleenp@4037 2004 CheckedExceptionElement* table = checked_exceptions_start();
coleenp@4037 2005 st->print_cr(" - checked ex start: " INTPTR_FORMAT, table);
coleenp@4037 2006 if (Verbose) {
coleenp@4037 2007 for (int i = 0; i < checked_exceptions_length(); i++) {
coleenp@4037 2008 st->print_cr(" - throws %s", constants()->printable_name_at(table[i].class_cp_index));
coleenp@4037 2009 }
coleenp@4037 2010 }
coleenp@4037 2011 }
coleenp@4037 2012 if (has_linenumber_table()) {
coleenp@4037 2013 u_char* table = compressed_linenumber_table();
coleenp@4037 2014 st->print_cr(" - linenumber start: " INTPTR_FORMAT, table);
coleenp@4037 2015 if (Verbose) {
coleenp@4037 2016 CompressedLineNumberReadStream stream(table);
coleenp@4037 2017 while (stream.read_pair()) {
coleenp@4037 2018 st->print_cr(" - line %d: %d", stream.line(), stream.bci());
coleenp@4037 2019 }
coleenp@4037 2020 }
coleenp@4037 2021 }
coleenp@4037 2022 st->print_cr(" - localvar length: %d", localvariable_table_length());
coleenp@4037 2023 if (localvariable_table_length() > 0) {
coleenp@4037 2024 LocalVariableTableElement* table = localvariable_table_start();
coleenp@4037 2025 st->print_cr(" - localvar start: " INTPTR_FORMAT, table);
coleenp@4037 2026 if (Verbose) {
coleenp@4037 2027 for (int i = 0; i < localvariable_table_length(); i++) {
coleenp@4037 2028 int bci = table[i].start_bci;
coleenp@4037 2029 int len = table[i].length;
coleenp@4037 2030 const char* name = constants()->printable_name_at(table[i].name_cp_index);
coleenp@4037 2031 const char* desc = constants()->printable_name_at(table[i].descriptor_cp_index);
coleenp@4037 2032 int slot = table[i].slot;
coleenp@4037 2033 st->print_cr(" - %s %s bci=%d len=%d slot=%d", desc, name, bci, len, slot);
coleenp@4037 2034 }
coleenp@4037 2035 }
coleenp@4037 2036 }
coleenp@4037 2037 if (code() != NULL) {
coleenp@4037 2038 st->print (" - compiled code: ");
coleenp@4037 2039 code()->print_value_on(st);
coleenp@4037 2040 }
coleenp@4037 2041 if (is_native()) {
coleenp@4037 2042 st->print_cr(" - native function: " INTPTR_FORMAT, native_function());
coleenp@4037 2043 st->print_cr(" - signature handler: " INTPTR_FORMAT, signature_handler());
coleenp@4037 2044 }
coleenp@4037 2045 }
coleenp@4037 2046
coleenp@4037 2047 #endif //PRODUCT
coleenp@4037 2048
coleenp@4037 2049 void Method::print_value_on(outputStream* st) const {
coleenp@4037 2050 assert(is_method(), "must be method");
drchase@6680 2051 st->print("%s", internal_name());
coleenp@4037 2052 print_address_on(st);
coleenp@4037 2053 st->print(" ");
coleenp@4037 2054 name()->print_value_on(st);
coleenp@4037 2055 st->print(" ");
coleenp@4037 2056 signature()->print_value_on(st);
coleenp@4037 2057 st->print(" in ");
coleenp@4037 2058 method_holder()->print_value_on(st);
drchase@5732 2059 if (WizardMode) st->print("#%d", _vtable_index);
coleenp@4037 2060 if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
coleenp@4037 2061 if (WizardMode && code() != NULL) st->print(" ((nmethod*)%p)", code());
coleenp@4037 2062 }
coleenp@4037 2063
acorn@4497 2064 #if INCLUDE_SERVICES
acorn@4497 2065 // Size Statistics
acorn@4497 2066 void Method::collect_statistics(KlassSizeStats *sz) const {
acorn@4497 2067 int mysize = sz->count(this);
acorn@4497 2068 sz->_method_bytes += mysize;
acorn@4497 2069 sz->_method_all_bytes += mysize;
acorn@4497 2070 sz->_rw_bytes += mysize;
acorn@4497 2071
acorn@4497 2072 if (constMethod()) {
acorn@4497 2073 constMethod()->collect_statistics(sz);
acorn@4497 2074 }
acorn@4497 2075 if (method_data()) {
acorn@4497 2076 method_data()->collect_statistics(sz);
acorn@4497 2077 }
acorn@4497 2078 }
acorn@4497 2079 #endif // INCLUDE_SERVICES
coleenp@4037 2080
coleenp@4037 2081 // Verification
coleenp@4037 2082
coleenp@4037 2083 void Method::verify_on(outputStream* st) {
coleenp@4037 2084 guarantee(is_method(), "object must be method");
coleenp@4037 2085 guarantee(constants()->is_constantPool(), "should be constant pool");
coleenp@4037 2086 guarantee(constMethod()->is_constMethod(), "should be ConstMethod*");
coleenp@4037 2087 MethodData* md = method_data();
coleenp@4037 2088 guarantee(md == NULL ||
coleenp@4037 2089 md->is_methodData(), "should be method data");
coleenp@4037 2090 }

mercurial