src/share/vm/oops/instanceKlass.cpp

Wed, 05 Dec 2007 09:00:00 -0800

author
dcubed
date
Wed, 05 Dec 2007 09:00:00 -0800
changeset 451
f8236e79048a
parent 435
a61af66fc99e
child 479
52fed2ec0afb
child 482
2c106685d6d0
permissions
-rw-r--r--

6664627: Merge changes made only in hotspot 11 forward to jdk 7
Reviewed-by: jcoomes

     1 /*
     2  * Copyright 1997-2007 Sun Microsystems, Inc.  All Rights Reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
    20  * CA 95054 USA or visit www.sun.com if you need additional information or
    21  * have any questions.
    22  *
    23  */
    25 # include "incls/_precompiled.incl"
    26 # include "incls/_instanceKlass.cpp.incl"
    28 bool instanceKlass::should_be_initialized() const {
    29   return !is_initialized();
    30 }
    32 klassVtable* instanceKlass::vtable() const {
    33   return new klassVtable(as_klassOop(), start_of_vtable(), vtable_length() / vtableEntry::size());
    34 }
    36 klassItable* instanceKlass::itable() const {
    37   return new klassItable(as_klassOop());
    38 }
    40 void instanceKlass::eager_initialize(Thread *thread) {
    41   if (!EagerInitialization) return;
    43   if (this->is_not_initialized()) {
    44     // abort if the the class has a class initializer
    45     if (this->class_initializer() != NULL) return;
    47     // abort if it is java.lang.Object (initialization is handled in genesis)
    48     klassOop super = this->super();
    49     if (super == NULL) return;
    51     // abort if the super class should be initialized
    52     if (!instanceKlass::cast(super)->is_initialized()) return;
    54     // call body to expose the this pointer
    55     instanceKlassHandle this_oop(thread, this->as_klassOop());
    56     eager_initialize_impl(this_oop);
    57   }
    58 }
    61 void instanceKlass::eager_initialize_impl(instanceKlassHandle this_oop) {
    62   EXCEPTION_MARK;
    63   ObjectLocker ol(this_oop, THREAD);
    65   // abort if someone beat us to the initialization
    66   if (!this_oop->is_not_initialized()) return;  // note: not equivalent to is_initialized()
    68   ClassState old_state = this_oop->_init_state;
    69   link_class_impl(this_oop, true, THREAD);
    70   if (HAS_PENDING_EXCEPTION) {
    71     CLEAR_PENDING_EXCEPTION;
    72     // Abort if linking the class throws an exception.
    74     // Use a test to avoid redundantly resetting the state if there's
    75     // no change.  Set_init_state() asserts that state changes make
    76     // progress, whereas here we might just be spinning in place.
    77     if( old_state != this_oop->_init_state )
    78       this_oop->set_init_state (old_state);
    79   } else {
    80     // linking successfull, mark class as initialized
    81     this_oop->set_init_state (fully_initialized);
    82     // trace
    83     if (TraceClassInitialization) {
    84       ResourceMark rm(THREAD);
    85       tty->print_cr("[Initialized %s without side effects]", this_oop->external_name());
    86     }
    87   }
    88 }
    91 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
    92 // process. The step comments refers to the procedure described in that section.
    93 // Note: implementation moved to static method to expose the this pointer.
    94 void instanceKlass::initialize(TRAPS) {
    95   if (this->should_be_initialized()) {
    96     HandleMark hm(THREAD);
    97     instanceKlassHandle this_oop(THREAD, this->as_klassOop());
    98     initialize_impl(this_oop, CHECK);
    99     // Note: at this point the class may be initialized
   100     //       OR it may be in the state of being initialized
   101     //       in case of recursive initialization!
   102   } else {
   103     assert(is_initialized(), "sanity check");
   104   }
   105 }
   108 bool instanceKlass::verify_code(
   109     instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
   110   // 1) Verify the bytecodes
   111   Verifier::Mode mode =
   112     throw_verifyerror ? Verifier::ThrowException : Verifier::NoException;
   113   return Verifier::verify(this_oop, mode, CHECK_false);
   114 }
   117 // Used exclusively by the shared spaces dump mechanism to prevent
   118 // classes mapped into the shared regions in new VMs from appearing linked.
   120 void instanceKlass::unlink_class() {
   121   assert(is_linked(), "must be linked");
   122   _init_state = loaded;
   123 }
   125 void instanceKlass::link_class(TRAPS) {
   126   assert(is_loaded(), "must be loaded");
   127   if (!is_linked()) {
   128     instanceKlassHandle this_oop(THREAD, this->as_klassOop());
   129     link_class_impl(this_oop, true, CHECK);
   130   }
   131 }
   133 // Called to verify that a class can link during initialization, without
   134 // throwing a VerifyError.
   135 bool instanceKlass::link_class_or_fail(TRAPS) {
   136   assert(is_loaded(), "must be loaded");
   137   if (!is_linked()) {
   138     instanceKlassHandle this_oop(THREAD, this->as_klassOop());
   139     link_class_impl(this_oop, false, CHECK_false);
   140   }
   141   return is_linked();
   142 }
   144 bool instanceKlass::link_class_impl(
   145     instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
   146   // check for error state
   147   if (this_oop->is_in_error_state()) {
   148     ResourceMark rm(THREAD);
   149     THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(),
   150                this_oop->external_name(), false);
   151   }
   152   // return if already verified
   153   if (this_oop->is_linked()) {
   154     return true;
   155   }
   157   // Timing
   158   // timer handles recursion
   159   assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
   160   JavaThread* jt = (JavaThread*)THREAD;
   161   PerfTraceTimedEvent vmtimer(ClassLoader::perf_class_link_time(),
   162                         ClassLoader::perf_classes_linked(),
   163                         jt->get_thread_stat()->class_link_recursion_count_addr());
   165   // link super class before linking this class
   166   instanceKlassHandle super(THREAD, this_oop->super());
   167   if (super.not_null()) {
   168     if (super->is_interface()) {  // check if super class is an interface
   169       ResourceMark rm(THREAD);
   170       Exceptions::fthrow(
   171         THREAD_AND_LOCATION,
   172         vmSymbolHandles::java_lang_IncompatibleClassChangeError(),
   173         "class %s has interface %s as super class",
   174         this_oop->external_name(),
   175         super->external_name()
   176       );
   177       return false;
   178     }
   180     link_class_impl(super, throw_verifyerror, CHECK_false);
   181   }
   183   // link all interfaces implemented by this class before linking this class
   184   objArrayHandle interfaces (THREAD, this_oop->local_interfaces());
   185   int num_interfaces = interfaces->length();
   186   for (int index = 0; index < num_interfaces; index++) {
   187     HandleMark hm(THREAD);
   188     instanceKlassHandle ih(THREAD, klassOop(interfaces->obj_at(index)));
   189     link_class_impl(ih, throw_verifyerror, CHECK_false);
   190   }
   192   // in case the class is linked in the process of linking its superclasses
   193   if (this_oop->is_linked()) {
   194     return true;
   195   }
   197   // verification & rewriting
   198   {
   199     ObjectLocker ol(this_oop, THREAD);
   200     // rewritten will have been set if loader constraint error found
   201     // on an earlier link attempt
   202     // don't verify or rewrite if already rewritten
   203     if (!this_oop->is_linked()) {
   204       if (!this_oop->is_rewritten()) {
   205         {
   206           assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
   207           JavaThread* jt = (JavaThread*)THREAD;
   208           // Timer includes any side effects of class verification (resolution,
   209           // etc), but not recursive entry into verify_code().
   210           PerfTraceTime timer(ClassLoader::perf_class_verify_time(),
   211                             jt->get_thread_stat()->class_verify_recursion_count_addr());
   212           bool verify_ok = verify_code(this_oop, throw_verifyerror, THREAD);
   213           if (!verify_ok) {
   214             return false;
   215           }
   216         }
   218         // Just in case a side-effect of verify linked this class already
   219         // (which can sometimes happen since the verifier loads classes
   220         // using custom class loaders, which are free to initialize things)
   221         if (this_oop->is_linked()) {
   222           return true;
   223         }
   225         // also sets rewritten
   226         this_oop->rewrite_class(CHECK_false);
   227       }
   229       // Initialize the vtable and interface table after
   230       // methods have been rewritten since rewrite may
   231       // fabricate new methodOops.
   232       // also does loader constraint checking
   233       if (!this_oop()->is_shared()) {
   234         ResourceMark rm(THREAD);
   235         this_oop->vtable()->initialize_vtable(true, CHECK_false);
   236         this_oop->itable()->initialize_itable(true, CHECK_false);
   237       }
   238 #ifdef ASSERT
   239       else {
   240         ResourceMark rm(THREAD);
   241         this_oop->vtable()->verify(tty, true);
   242         // In case itable verification is ever added.
   243         // this_oop->itable()->verify(tty, true);
   244       }
   245 #endif
   246       this_oop->set_init_state(linked);
   247       if (JvmtiExport::should_post_class_prepare()) {
   248         Thread *thread = THREAD;
   249         assert(thread->is_Java_thread(), "thread->is_Java_thread()");
   250         JvmtiExport::post_class_prepare((JavaThread *) thread, this_oop());
   251       }
   252     }
   253   }
   254   return true;
   255 }
   258 // Rewrite the byte codes of all of the methods of a class.
   259 // Three cases:
   260 //    During the link of a newly loaded class.
   261 //    During the preloading of classes to be written to the shared spaces.
   262 //      - Rewrite the methods and update the method entry points.
   263 //
   264 //    During the link of a class in the shared spaces.
   265 //      - The methods were already rewritten, update the metho entry points.
   266 //
   267 // The rewriter must be called exactly once. Rewriting must happen after
   268 // verification but before the first method of the class is executed.
   270 void instanceKlass::rewrite_class(TRAPS) {
   271   assert(is_loaded(), "must be loaded");
   272   instanceKlassHandle this_oop(THREAD, this->as_klassOop());
   273   if (this_oop->is_rewritten()) {
   274     assert(this_oop()->is_shared(), "rewriting an unshared class?");
   275     return;
   276   }
   277   Rewriter::rewrite(this_oop, CHECK); // No exception can happen here
   278   this_oop->set_rewritten();
   279 }
   282 void instanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
   283   // Make sure klass is linked (verified) before initialization
   284   // A class could already be verified, since it has been reflected upon.
   285   this_oop->link_class(CHECK);
   287   // refer to the JVM book page 47 for description of steps
   288   // Step 1
   289   { ObjectLocker ol(this_oop, THREAD);
   291     Thread *self = THREAD; // it's passed the current thread
   293     // Step 2
   294     // If we were to use wait() instead of waitInterruptibly() then
   295     // we might end up throwing IE from link/symbol resolution sites
   296     // that aren't expected to throw.  This would wreak havoc.  See 6320309.
   297     while(this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) {
   298       ol.waitUninterruptibly(CHECK);
   299     }
   301     // Step 3
   302     if (this_oop->is_being_initialized() && this_oop->is_reentrant_initialization(self))
   303       return;
   305     // Step 4
   306     if (this_oop->is_initialized())
   307       return;
   309     // Step 5
   310     if (this_oop->is_in_error_state()) {
   311       ResourceMark rm(THREAD);
   312       const char* desc = "Could not initialize class ";
   313       const char* className = this_oop->external_name();
   314       size_t msglen = strlen(desc) + strlen(className) + 1;
   315       char* message = NEW_C_HEAP_ARRAY(char, msglen);
   316       if (NULL == message) {
   317         // Out of memory: can't create detailed error message
   318         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
   319       } else {
   320         jio_snprintf(message, msglen, "%s%s", desc, className);
   321         THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
   322       }
   323     }
   325     // Step 6
   326     this_oop->set_init_state(being_initialized);
   327     this_oop->set_init_thread(self);
   328   }
   330   // Step 7
   331   klassOop super_klass = this_oop->super();
   332   if (super_klass != NULL && !this_oop->is_interface() && Klass::cast(super_klass)->should_be_initialized()) {
   333     Klass::cast(super_klass)->initialize(THREAD);
   335     if (HAS_PENDING_EXCEPTION) {
   336       Handle e(THREAD, PENDING_EXCEPTION);
   337       CLEAR_PENDING_EXCEPTION;
   338       {
   339         EXCEPTION_MARK;
   340         this_oop->set_initialization_state_and_notify(initialization_error, THREAD); // Locks object, set state, and notify all waiting threads
   341         CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, superclass initialization error is thrown below
   342       }
   343       THROW_OOP(e());
   344     }
   345   }
   347   // Step 8
   348   {
   349     assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
   350     JavaThread* jt = (JavaThread*)THREAD;
   351     // Timer includes any side effects of class initialization (resolution,
   352     // etc), but not recursive entry into call_class_initializer().
   353     PerfTraceTimedEvent timer(ClassLoader::perf_class_init_time(),
   354                               ClassLoader::perf_classes_inited(),
   355                               jt->get_thread_stat()->class_init_recursion_count_addr());
   356     this_oop->call_class_initializer(THREAD);
   357   }
   359   // Step 9
   360   if (!HAS_PENDING_EXCEPTION) {
   361     this_oop->set_initialization_state_and_notify(fully_initialized, CHECK);
   362     { ResourceMark rm(THREAD);
   363       debug_only(this_oop->vtable()->verify(tty, true);)
   364     }
   365   }
   366   else {
   367     // Step 10 and 11
   368     Handle e(THREAD, PENDING_EXCEPTION);
   369     CLEAR_PENDING_EXCEPTION;
   370     {
   371       EXCEPTION_MARK;
   372       this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
   373       CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
   374     }
   375     if (e->is_a(SystemDictionary::error_klass())) {
   376       THROW_OOP(e());
   377     } else {
   378       JavaCallArguments args(e);
   379       THROW_ARG(vmSymbolHandles::java_lang_ExceptionInInitializerError(),
   380                 vmSymbolHandles::throwable_void_signature(),
   381                 &args);
   382     }
   383   }
   384 }
   387 // Note: implementation moved to static method to expose the this pointer.
   388 void instanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
   389   instanceKlassHandle kh(THREAD, this->as_klassOop());
   390   set_initialization_state_and_notify_impl(kh, state, CHECK);
   391 }
   393 void instanceKlass::set_initialization_state_and_notify_impl(instanceKlassHandle this_oop, ClassState state, TRAPS) {
   394   ObjectLocker ol(this_oop, THREAD);
   395   this_oop->set_init_state(state);
   396   ol.notify_all(CHECK);
   397 }
   399 void instanceKlass::add_implementor(klassOop k) {
   400   assert(Compile_lock->owned_by_self(), "");
   401   // Filter out my subinterfaces.
   402   // (Note: Interfaces are never on the subklass list.)
   403   if (instanceKlass::cast(k)->is_interface()) return;
   405   // Filter out subclasses whose supers already implement me.
   406   // (Note: CHA must walk subclasses of direct implementors
   407   // in order to locate indirect implementors.)
   408   klassOop sk = instanceKlass::cast(k)->super();
   409   if (sk != NULL && instanceKlass::cast(sk)->implements_interface(as_klassOop()))
   410     // We only need to check one immediate superclass, since the
   411     // implements_interface query looks at transitive_interfaces.
   412     // Any supers of the super have the same (or fewer) transitive_interfaces.
   413     return;
   415   // Update number of implementors
   416   int i = _nof_implementors++;
   418   // Record this implementor, if there are not too many already
   419   if (i < implementors_limit) {
   420     assert(_implementors[i] == NULL, "should be exactly one implementor");
   421     oop_store_without_check((oop*)&_implementors[i], k);
   422   } else if (i == implementors_limit) {
   423     // clear out the list on first overflow
   424     for (int i2 = 0; i2 < implementors_limit; i2++)
   425       oop_store_without_check((oop*)&_implementors[i2], NULL);
   426   }
   428   // The implementor also implements the transitive_interfaces
   429   for (int index = 0; index < local_interfaces()->length(); index++) {
   430     instanceKlass::cast(klassOop(local_interfaces()->obj_at(index)))->add_implementor(k);
   431   }
   432 }
   434 void instanceKlass::init_implementor() {
   435   for (int i = 0; i < implementors_limit; i++)
   436     oop_store_without_check((oop*)&_implementors[i], NULL);
   437   _nof_implementors = 0;
   438 }
   441 void instanceKlass::process_interfaces(Thread *thread) {
   442   // link this class into the implementors list of every interface it implements
   443   KlassHandle this_as_oop (thread, this->as_klassOop());
   444   for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
   445     assert(local_interfaces()->obj_at(i)->is_klass(), "must be a klass");
   446     instanceKlass* interf = instanceKlass::cast(klassOop(local_interfaces()->obj_at(i)));
   447     assert(interf->is_interface(), "expected interface");
   448     interf->add_implementor(this_as_oop());
   449   }
   450 }
   452 bool instanceKlass::can_be_primary_super_slow() const {
   453   if (is_interface())
   454     return false;
   455   else
   456     return Klass::can_be_primary_super_slow();
   457 }
   459 objArrayOop instanceKlass::compute_secondary_supers(int num_extra_slots, TRAPS) {
   460   // The secondaries are the implemented interfaces.
   461   instanceKlass* ik = instanceKlass::cast(as_klassOop());
   462   objArrayHandle interfaces (THREAD, ik->transitive_interfaces());
   463   int num_secondaries = num_extra_slots + interfaces->length();
   464   if (num_secondaries == 0) {
   465     return Universe::the_empty_system_obj_array();
   466   } else if (num_extra_slots == 0) {
   467     return interfaces();
   468   } else {
   469     // a mix of both
   470     objArrayOop secondaries = oopFactory::new_system_objArray(num_secondaries, CHECK_NULL);
   471     for (int i = 0; i < interfaces->length(); i++) {
   472       secondaries->obj_at_put(num_extra_slots+i, interfaces->obj_at(i));
   473     }
   474     return secondaries;
   475   }
   476 }
   478 bool instanceKlass::compute_is_subtype_of(klassOop k) {
   479   if (Klass::cast(k)->is_interface()) {
   480     return implements_interface(k);
   481   } else {
   482     return Klass::compute_is_subtype_of(k);
   483   }
   484 }
   486 bool instanceKlass::implements_interface(klassOop k) const {
   487   if (as_klassOop() == k) return true;
   488   assert(Klass::cast(k)->is_interface(), "should be an interface class");
   489   for (int i = 0; i < transitive_interfaces()->length(); i++) {
   490     if (transitive_interfaces()->obj_at(i) == k) {
   491       return true;
   492     }
   493   }
   494   return false;
   495 }
   497 objArrayOop instanceKlass::allocate_objArray(int n, int length, TRAPS) {
   498   if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException());
   499   if (length > arrayOopDesc::max_array_length(T_OBJECT)) {
   500     THROW_OOP_0(Universe::out_of_memory_error_array_size());
   501   }
   502   int size = objArrayOopDesc::object_size(length);
   503   klassOop ak = array_klass(n, CHECK_NULL);
   504   KlassHandle h_ak (THREAD, ak);
   505   objArrayOop o =
   506     (objArrayOop)CollectedHeap::array_allocate(h_ak, size, length, CHECK_NULL);
   507   return o;
   508 }
   510 instanceOop instanceKlass::register_finalizer(instanceOop i, TRAPS) {
   511   if (TraceFinalizerRegistration) {
   512     tty->print("Registered ");
   513     i->print_value_on(tty);
   514     tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", (address)i);
   515   }
   516   instanceHandle h_i(THREAD, i);
   517   // Pass the handle as argument, JavaCalls::call expects oop as jobjects
   518   JavaValue result(T_VOID);
   519   JavaCallArguments args(h_i);
   520   methodHandle mh (THREAD, Universe::finalizer_register_method());
   521   JavaCalls::call(&result, mh, &args, CHECK_NULL);
   522   return h_i();
   523 }
   525 instanceOop instanceKlass::allocate_instance(TRAPS) {
   526   bool has_finalizer_flag = has_finalizer(); // Query before possible GC
   527   int size = size_helper();  // Query before forming handle.
   529   KlassHandle h_k(THREAD, as_klassOop());
   531   instanceOop i;
   533   i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
   534   if (has_finalizer_flag && !RegisterFinalizersAtInit) {
   535     i = register_finalizer(i, CHECK_NULL);
   536   }
   537   return i;
   538 }
   540 instanceOop instanceKlass::allocate_permanent_instance(TRAPS) {
   541   // Finalizer registration occurs in the Object.<init> constructor
   542   // and constructors normally aren't run when allocating perm
   543   // instances so simply disallow finalizable perm objects.  This can
   544   // be relaxed if a need for it is found.
   545   assert(!has_finalizer(), "perm objects not allowed to have finalizers");
   546   int size = size_helper();  // Query before forming handle.
   547   KlassHandle h_k(THREAD, as_klassOop());
   548   instanceOop i = (instanceOop)
   549     CollectedHeap::permanent_obj_allocate(h_k, size, CHECK_NULL);
   550   return i;
   551 }
   553 void instanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
   554   if (is_interface() || is_abstract()) {
   555     ResourceMark rm(THREAD);
   556     THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
   557               : vmSymbols::java_lang_InstantiationException(), external_name());
   558   }
   559   if (as_klassOop() == SystemDictionary::class_klass()) {
   560     ResourceMark rm(THREAD);
   561     THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
   562               : vmSymbols::java_lang_IllegalAccessException(), external_name());
   563   }
   564 }
   566 klassOop instanceKlass::array_klass_impl(bool or_null, int n, TRAPS) {
   567   instanceKlassHandle this_oop(THREAD, as_klassOop());
   568   return array_klass_impl(this_oop, or_null, n, THREAD);
   569 }
   571 klassOop instanceKlass::array_klass_impl(instanceKlassHandle this_oop, bool or_null, int n, TRAPS) {
   572   if (this_oop->array_klasses() == NULL) {
   573     if (or_null) return NULL;
   575     ResourceMark rm;
   576     JavaThread *jt = (JavaThread *)THREAD;
   577     {
   578       // Atomic creation of array_klasses
   579       MutexLocker mc(Compile_lock, THREAD);   // for vtables
   580       MutexLocker ma(MultiArray_lock, THREAD);
   582       // Check if update has already taken place
   583       if (this_oop->array_klasses() == NULL) {
   584         objArrayKlassKlass* oakk =
   585           (objArrayKlassKlass*)Universe::objArrayKlassKlassObj()->klass_part();
   587         klassOop  k = oakk->allocate_objArray_klass(1, this_oop, CHECK_NULL);
   588         this_oop->set_array_klasses(k);
   589       }
   590     }
   591   }
   592   // _this will always be set at this point
   593   objArrayKlass* oak = (objArrayKlass*)this_oop->array_klasses()->klass_part();
   594   if (or_null) {
   595     return oak->array_klass_or_null(n);
   596   }
   597   return oak->array_klass(n, CHECK_NULL);
   598 }
   600 klassOop instanceKlass::array_klass_impl(bool or_null, TRAPS) {
   601   return array_klass_impl(or_null, 1, THREAD);
   602 }
   604 void instanceKlass::call_class_initializer(TRAPS) {
   605   instanceKlassHandle ik (THREAD, as_klassOop());
   606   call_class_initializer_impl(ik, THREAD);
   607 }
   609 static int call_class_initializer_impl_counter = 0;   // for debugging
   611 methodOop instanceKlass::class_initializer() {
   612   return find_method(vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
   613 }
   615 void instanceKlass::call_class_initializer_impl(instanceKlassHandle this_oop, TRAPS) {
   616   methodHandle h_method(THREAD, this_oop->class_initializer());
   617   assert(!this_oop->is_initialized(), "we cannot initialize twice");
   618   if (TraceClassInitialization) {
   619     tty->print("%d Initializing ", call_class_initializer_impl_counter++);
   620     this_oop->name()->print_value();
   621     tty->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", (address)this_oop());
   622   }
   623   if (h_method() != NULL) {
   624     JavaCallArguments args; // No arguments
   625     JavaValue result(T_VOID);
   626     JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
   627   }
   628 }
   631 void instanceKlass::mask_for(methodHandle method, int bci,
   632   InterpreterOopMap* entry_for) {
   633   // Dirty read, then double-check under a lock.
   634   if (_oop_map_cache == NULL) {
   635     // Otherwise, allocate a new one.
   636     MutexLocker x(OopMapCacheAlloc_lock);
   637     // First time use. Allocate a cache in C heap
   638     if (_oop_map_cache == NULL) {
   639       _oop_map_cache = new OopMapCache();
   640     }
   641   }
   642   // _oop_map_cache is constant after init; lookup below does is own locking.
   643   _oop_map_cache->lookup(method, bci, entry_for);
   644 }
   647 bool instanceKlass::find_local_field(symbolOop name, symbolOop sig, fieldDescriptor* fd) const {
   648   const int n = fields()->length();
   649   for (int i = 0; i < n; i += next_offset ) {
   650     int name_index = fields()->ushort_at(i + name_index_offset);
   651     int sig_index  = fields()->ushort_at(i + signature_index_offset);
   652     symbolOop f_name = constants()->symbol_at(name_index);
   653     symbolOop f_sig  = constants()->symbol_at(sig_index);
   654     if (f_name == name && f_sig == sig) {
   655       fd->initialize(as_klassOop(), i);
   656       return true;
   657     }
   658   }
   659   return false;
   660 }
   663 void instanceKlass::field_names_and_sigs_iterate(OopClosure* closure) {
   664   const int n = fields()->length();
   665   for (int i = 0; i < n; i += next_offset ) {
   666     int name_index = fields()->ushort_at(i + name_index_offset);
   667     symbolOop name = constants()->symbol_at(name_index);
   668     closure->do_oop((oop*)&name);
   670     int sig_index  = fields()->ushort_at(i + signature_index_offset);
   671     symbolOop sig = constants()->symbol_at(sig_index);
   672     closure->do_oop((oop*)&sig);
   673   }
   674 }
   677 klassOop instanceKlass::find_interface_field(symbolOop name, symbolOop sig, fieldDescriptor* fd) const {
   678   const int n = local_interfaces()->length();
   679   for (int i = 0; i < n; i++) {
   680     klassOop intf1 = klassOop(local_interfaces()->obj_at(i));
   681     assert(Klass::cast(intf1)->is_interface(), "just checking type");
   682     // search for field in current interface
   683     if (instanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
   684       assert(fd->is_static(), "interface field must be static");
   685       return intf1;
   686     }
   687     // search for field in direct superinterfaces
   688     klassOop intf2 = instanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
   689     if (intf2 != NULL) return intf2;
   690   }
   691   // otherwise field lookup fails
   692   return NULL;
   693 }
   696 klassOop instanceKlass::find_field(symbolOop name, symbolOop sig, fieldDescriptor* fd) const {
   697   // search order according to newest JVM spec (5.4.3.2, p.167).
   698   // 1) search for field in current klass
   699   if (find_local_field(name, sig, fd)) {
   700     return as_klassOop();
   701   }
   702   // 2) search for field recursively in direct superinterfaces
   703   { klassOop intf = find_interface_field(name, sig, fd);
   704     if (intf != NULL) return intf;
   705   }
   706   // 3) apply field lookup recursively if superclass exists
   707   { klassOop supr = super();
   708     if (supr != NULL) return instanceKlass::cast(supr)->find_field(name, sig, fd);
   709   }
   710   // 4) otherwise field lookup fails
   711   return NULL;
   712 }
   715 klassOop instanceKlass::find_field(symbolOop name, symbolOop sig, bool is_static, fieldDescriptor* fd) const {
   716   // search order according to newest JVM spec (5.4.3.2, p.167).
   717   // 1) search for field in current klass
   718   if (find_local_field(name, sig, fd)) {
   719     if (fd->is_static() == is_static) return as_klassOop();
   720   }
   721   // 2) search for field recursively in direct superinterfaces
   722   if (is_static) {
   723     klassOop intf = find_interface_field(name, sig, fd);
   724     if (intf != NULL) return intf;
   725   }
   726   // 3) apply field lookup recursively if superclass exists
   727   { klassOop supr = super();
   728     if (supr != NULL) return instanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
   729   }
   730   // 4) otherwise field lookup fails
   731   return NULL;
   732 }
   735 bool instanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
   736   int length = fields()->length();
   737   for (int i = 0; i < length; i += next_offset) {
   738     if (offset_from_fields( i ) == offset) {
   739       fd->initialize(as_klassOop(), i);
   740       if (fd->is_static() == is_static) return true;
   741     }
   742   }
   743   return false;
   744 }
   747 bool instanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
   748   klassOop klass = as_klassOop();
   749   while (klass != NULL) {
   750     if (instanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
   751       return true;
   752     }
   753     klass = Klass::cast(klass)->super();
   754   }
   755   return false;
   756 }
   759 void instanceKlass::methods_do(void f(methodOop method)) {
   760   int len = methods()->length();
   761   for (int index = 0; index < len; index++) {
   762     methodOop m = methodOop(methods()->obj_at(index));
   763     assert(m->is_method(), "must be method");
   764     f(m);
   765   }
   766 }
   768 void instanceKlass::do_local_static_fields(FieldClosure* cl) {
   769   fieldDescriptor fd;
   770   int length = fields()->length();
   771   for (int i = 0; i < length; i += next_offset) {
   772     fd.initialize(as_klassOop(), i);
   773     if (fd.is_static()) cl->do_field(&fd);
   774   }
   775 }
   778 void instanceKlass::do_local_static_fields(void f(fieldDescriptor*, TRAPS), TRAPS) {
   779   instanceKlassHandle h_this(THREAD, as_klassOop());
   780   do_local_static_fields_impl(h_this, f, CHECK);
   781 }
   784 void instanceKlass::do_local_static_fields_impl(instanceKlassHandle this_oop, void f(fieldDescriptor* fd, TRAPS), TRAPS) {
   785   fieldDescriptor fd;
   786   int length = this_oop->fields()->length();
   787   for (int i = 0; i < length; i += next_offset) {
   788     fd.initialize(this_oop(), i);
   789     if (fd.is_static()) { f(&fd, CHECK); } // Do NOT remove {}! (CHECK macro expands into several statements)
   790   }
   791 }
   794 void instanceKlass::do_nonstatic_fields(FieldClosure* cl) {
   795   fieldDescriptor fd;
   796   instanceKlass* super = superklass();
   797   if (super != NULL) {
   798     super->do_nonstatic_fields(cl);
   799   }
   800   int length = fields()->length();
   801   for (int i = 0; i < length; i += next_offset) {
   802     fd.initialize(as_klassOop(), i);
   803     if (!(fd.is_static())) cl->do_field(&fd);
   804   }
   805 }
   808 void instanceKlass::array_klasses_do(void f(klassOop k)) {
   809   if (array_klasses() != NULL)
   810     arrayKlass::cast(array_klasses())->array_klasses_do(f);
   811 }
   814 void instanceKlass::with_array_klasses_do(void f(klassOop k)) {
   815   f(as_klassOop());
   816   array_klasses_do(f);
   817 }
   819 #ifdef ASSERT
   820 static int linear_search(objArrayOop methods, symbolOop name, symbolOop signature) {
   821   int len = methods->length();
   822   for (int index = 0; index < len; index++) {
   823     methodOop m = (methodOop)(methods->obj_at(index));
   824     assert(m->is_method(), "must be method");
   825     if (m->signature() == signature && m->name() == name) {
   826        return index;
   827     }
   828   }
   829   return -1;
   830 }
   831 #endif
   833 methodOop instanceKlass::find_method(symbolOop name, symbolOop signature) const {
   834   return instanceKlass::find_method(methods(), name, signature);
   835 }
   837 methodOop instanceKlass::find_method(objArrayOop methods, symbolOop name, symbolOop signature) {
   838   int len = methods->length();
   839   // methods are sorted, so do binary search
   840   int l = 0;
   841   int h = len - 1;
   842   while (l <= h) {
   843     int mid = (l + h) >> 1;
   844     methodOop m = (methodOop)methods->obj_at(mid);
   845     assert(m->is_method(), "must be method");
   846     int res = m->name()->fast_compare(name);
   847     if (res == 0) {
   848       // found matching name; do linear search to find matching signature
   849       // first, quick check for common case
   850       if (m->signature() == signature) return m;
   851       // search downwards through overloaded methods
   852       int i;
   853       for (i = mid - 1; i >= l; i--) {
   854         methodOop m = (methodOop)methods->obj_at(i);
   855         assert(m->is_method(), "must be method");
   856         if (m->name() != name) break;
   857         if (m->signature() == signature) return m;
   858       }
   859       // search upwards
   860       for (i = mid + 1; i <= h; i++) {
   861         methodOop m = (methodOop)methods->obj_at(i);
   862         assert(m->is_method(), "must be method");
   863         if (m->name() != name) break;
   864         if (m->signature() == signature) return m;
   865       }
   866       // not found
   867 #ifdef ASSERT
   868       int index = linear_search(methods, name, signature);
   869       if (index != -1) fatal1("binary search bug: should have found entry %d", index);
   870 #endif
   871       return NULL;
   872     } else if (res < 0) {
   873       l = mid + 1;
   874     } else {
   875       h = mid - 1;
   876     }
   877   }
   878 #ifdef ASSERT
   879   int index = linear_search(methods, name, signature);
   880   if (index != -1) fatal1("binary search bug: should have found entry %d", index);
   881 #endif
   882   return NULL;
   883 }
   885 methodOop instanceKlass::uncached_lookup_method(symbolOop name, symbolOop signature) const {
   886   klassOop klass = as_klassOop();
   887   while (klass != NULL) {
   888     methodOop method = instanceKlass::cast(klass)->find_method(name, signature);
   889     if (method != NULL) return method;
   890     klass = instanceKlass::cast(klass)->super();
   891   }
   892   return NULL;
   893 }
   895 // lookup a method in all the interfaces that this class implements
   896 methodOop instanceKlass::lookup_method_in_all_interfaces(symbolOop name,
   897                                                          symbolOop signature) const {
   898   objArrayOop all_ifs = instanceKlass::cast(as_klassOop())->transitive_interfaces();
   899   int num_ifs = all_ifs->length();
   900   instanceKlass *ik = NULL;
   901   for (int i = 0; i < num_ifs; i++) {
   902     ik = instanceKlass::cast(klassOop(all_ifs->obj_at(i)));
   903     methodOop m = ik->lookup_method(name, signature);
   904     if (m != NULL) {
   905       return m;
   906     }
   907   }
   908   return NULL;
   909 }
   911 /* jni_id_for_impl for jfieldIds only */
   912 JNIid* instanceKlass::jni_id_for_impl(instanceKlassHandle this_oop, int offset) {
   913   MutexLocker ml(JfieldIdCreation_lock);
   914   // Retry lookup after we got the lock
   915   JNIid* probe = this_oop->jni_ids() == NULL ? NULL : this_oop->jni_ids()->find(offset);
   916   if (probe == NULL) {
   917     // Slow case, allocate new static field identifier
   918     probe = new JNIid(this_oop->as_klassOop(), offset, this_oop->jni_ids());
   919     this_oop->set_jni_ids(probe);
   920   }
   921   return probe;
   922 }
   925 /* jni_id_for for jfieldIds only */
   926 JNIid* instanceKlass::jni_id_for(int offset) {
   927   JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
   928   if (probe == NULL) {
   929     probe = jni_id_for_impl(this->as_klassOop(), offset);
   930   }
   931   return probe;
   932 }
   935 // Lookup or create a jmethodID.
   936 // This code can be called by the VM thread.  For this reason it is critical that
   937 // there are no blocking operations (safepoints) while the lock is held -- or a
   938 // deadlock can occur.
   939 jmethodID instanceKlass::jmethod_id_for_impl(instanceKlassHandle ik_h, methodHandle method_h) {
   940   size_t idnum = (size_t)method_h->method_idnum();
   941   jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
   942   size_t length = 0;
   943   jmethodID id = NULL;
   944   // array length stored in first element, other elements offset by one
   945   if (jmeths == NULL ||                         // If there is no jmethodID array,
   946       (length = (size_t)jmeths[0]) <= idnum ||  // or if it is too short,
   947       (id = jmeths[idnum+1]) == NULL) {         // or if this jmethodID isn't allocated
   949     // Do all the safepointing things (allocations) before grabbing the lock.
   950     // These allocations will have to be freed if they are unused.
   952     // Allocate a new array of methods.
   953     jmethodID* to_dealloc_jmeths = NULL;
   954     jmethodID* new_jmeths = NULL;
   955     if (length <= idnum) {
   956       // A new array will be needed (unless some other thread beats us to it)
   957       size_t size = MAX2(idnum+1, (size_t)ik_h->idnum_allocated_count());
   958       new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1);
   959       memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
   960       new_jmeths[0] =(jmethodID)size;  // array size held in the first element
   961     }
   963     // Allocate a new method ID.
   964     jmethodID to_dealloc_id = NULL;
   965     jmethodID new_id = NULL;
   966     if (method_h->is_old() && !method_h->is_obsolete()) {
   967       // The method passed in is old (but not obsolete), we need to use the current version
   968       methodOop current_method = ik_h->method_with_idnum((int)idnum);
   969       assert(current_method != NULL, "old and but not obsolete, so should exist");
   970       methodHandle current_method_h(current_method == NULL? method_h() : current_method);
   971       new_id = JNIHandles::make_jmethod_id(current_method_h);
   972     } else {
   973       // It is the current version of the method or an obsolete method,
   974       // use the version passed in
   975       new_id = JNIHandles::make_jmethod_id(method_h);
   976     }
   978     {
   979       MutexLocker ml(JmethodIdCreation_lock);
   981       // We must not go to a safepoint while holding this lock.
   982       debug_only(No_Safepoint_Verifier nosafepoints;)
   984       // Retry lookup after we got the lock
   985       jmeths = ik_h->methods_jmethod_ids_acquire();
   986       if (jmeths == NULL || (length = (size_t)jmeths[0]) <= idnum) {
   987         if (jmeths != NULL) {
   988           // We have grown the array: copy the existing entries, and delete the old array
   989           for (size_t index = 0; index < length; index++) {
   990             new_jmeths[index+1] = jmeths[index+1];
   991           }
   992           to_dealloc_jmeths = jmeths; // using the new jmeths, deallocate the old one
   993         }
   994         ik_h->release_set_methods_jmethod_ids(jmeths = new_jmeths);
   995       } else {
   996         id = jmeths[idnum+1];
   997         to_dealloc_jmeths = new_jmeths; // using the old jmeths, deallocate the new one
   998       }
   999       if (id == NULL) {
  1000         id = new_id;
  1001         jmeths[idnum+1] = id;  // install the new method ID
  1002       } else {
  1003         to_dealloc_id = new_id; // the new id wasn't used, mark it for deallocation
  1007     // Free up unneeded or no longer needed resources
  1008     FreeHeap(to_dealloc_jmeths);
  1009     if (to_dealloc_id != NULL) {
  1010       JNIHandles::destroy_jmethod_id(to_dealloc_id);
  1013   return id;
  1017 // Lookup a jmethodID, NULL if not found.  Do no blocking, no allocations, no handles
  1018 jmethodID instanceKlass::jmethod_id_or_null(methodOop method) {
  1019   size_t idnum = (size_t)method->method_idnum();
  1020   jmethodID* jmeths = methods_jmethod_ids_acquire();
  1021   size_t length;                                // length assigned as debugging crumb
  1022   jmethodID id = NULL;
  1023   if (jmeths != NULL &&                         // If there is a jmethodID array,
  1024       (length = (size_t)jmeths[0]) > idnum) {   // and if it is long enough,
  1025     id = jmeths[idnum+1];                       // Look up the id (may be NULL)
  1027   return id;
  1031 // Cache an itable index
  1032 void instanceKlass::set_cached_itable_index(size_t idnum, int index) {
  1033   int* indices = methods_cached_itable_indices_acquire();
  1034   if (indices == NULL ||                         // If there is no index array,
  1035       ((size_t)indices[0]) <= idnum) {           // or if it is too short
  1036     // Lock before we allocate the array so we don't leak
  1037     MutexLocker ml(JNICachedItableIndex_lock);
  1038     // Retry lookup after we got the lock
  1039     indices = methods_cached_itable_indices_acquire();
  1040     size_t length = 0;
  1041     // array length stored in first element, other elements offset by one
  1042     if (indices == NULL || (length = (size_t)indices[0]) <= idnum) {
  1043       size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count());
  1044       int* new_indices = NEW_C_HEAP_ARRAY(int, size+1);
  1045       // Copy the existing entries, if any
  1046       size_t i;
  1047       for (i = 0; i < length; i++) {
  1048         new_indices[i+1] = indices[i+1];
  1050       // Set all the rest to -1
  1051       for (i = length; i < size; i++) {
  1052         new_indices[i+1] = -1;
  1054       if (indices != NULL) {
  1055         FreeHeap(indices);  // delete any old indices
  1057       release_set_methods_cached_itable_indices(indices = new_indices);
  1059   } else {
  1060     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
  1062   // This is a cache, if there is a race to set it, it doesn't matter
  1063   indices[idnum+1] = index;
  1067 // Retrieve a cached itable index
  1068 int instanceKlass::cached_itable_index(size_t idnum) {
  1069   int* indices = methods_cached_itable_indices_acquire();
  1070   if (indices != NULL && ((size_t)indices[0]) > idnum) {
  1071      // indices exist and are long enough, retrieve possible cached
  1072     return indices[idnum+1];
  1074   return -1;
  1078 //
  1079 // nmethodBucket is used to record dependent nmethods for
  1080 // deoptimization.  nmethod dependencies are actually <klass, method>
  1081 // pairs but we really only care about the klass part for purposes of
  1082 // finding nmethods which might need to be deoptimized.  Instead of
  1083 // recording the method, a count of how many times a particular nmethod
  1084 // was recorded is kept.  This ensures that any recording errors are
  1085 // noticed since an nmethod should be removed as many times are it's
  1086 // added.
  1087 //
  1088 class nmethodBucket {
  1089  private:
  1090   nmethod*       _nmethod;
  1091   int            _count;
  1092   nmethodBucket* _next;
  1094  public:
  1095   nmethodBucket(nmethod* nmethod, nmethodBucket* next) {
  1096     _nmethod = nmethod;
  1097     _next = next;
  1098     _count = 1;
  1100   int count()                             { return _count; }
  1101   int increment()                         { _count += 1; return _count; }
  1102   int decrement()                         { _count -= 1; assert(_count >= 0, "don't underflow"); return _count; }
  1103   nmethodBucket* next()                   { return _next; }
  1104   void set_next(nmethodBucket* b)         { _next = b; }
  1105   nmethod* get_nmethod()                  { return _nmethod; }
  1106 };
  1109 //
  1110 // Walk the list of dependent nmethods searching for nmethods which
  1111 // are dependent on the klassOop that was passed in and mark them for
  1112 // deoptimization.  Returns the number of nmethods found.
  1113 //
  1114 int instanceKlass::mark_dependent_nmethods(DepChange& changes) {
  1115   assert_locked_or_safepoint(CodeCache_lock);
  1116   int found = 0;
  1117   nmethodBucket* b = _dependencies;
  1118   while (b != NULL) {
  1119     nmethod* nm = b->get_nmethod();
  1120     // since dependencies aren't removed until an nmethod becomes a zombie,
  1121     // the dependency list may contain nmethods which aren't alive.
  1122     if (nm->is_alive() && !nm->is_marked_for_deoptimization() && nm->check_dependency_on(changes)) {
  1123       if (TraceDependencies) {
  1124         ResourceMark rm;
  1125         tty->print_cr("Marked for deoptimization");
  1126         tty->print_cr("  context = %s", this->external_name());
  1127         changes.print();
  1128         nm->print();
  1129         nm->print_dependencies();
  1131       nm->mark_for_deoptimization();
  1132       found++;
  1134     b = b->next();
  1136   return found;
  1140 //
  1141 // Add an nmethodBucket to the list of dependencies for this nmethod.
  1142 // It's possible that an nmethod has multiple dependencies on this klass
  1143 // so a count is kept for each bucket to guarantee that creation and
  1144 // deletion of dependencies is consistent.
  1145 //
  1146 void instanceKlass::add_dependent_nmethod(nmethod* nm) {
  1147   assert_locked_or_safepoint(CodeCache_lock);
  1148   nmethodBucket* b = _dependencies;
  1149   nmethodBucket* last = NULL;
  1150   while (b != NULL) {
  1151     if (nm == b->get_nmethod()) {
  1152       b->increment();
  1153       return;
  1155     b = b->next();
  1157   _dependencies = new nmethodBucket(nm, _dependencies);
  1161 //
  1162 // Decrement count of the nmethod in the dependency list and remove
  1163 // the bucket competely when the count goes to 0.  This method must
  1164 // find a corresponding bucket otherwise there's a bug in the
  1165 // recording of dependecies.
  1166 //
  1167 void instanceKlass::remove_dependent_nmethod(nmethod* nm) {
  1168   assert_locked_or_safepoint(CodeCache_lock);
  1169   nmethodBucket* b = _dependencies;
  1170   nmethodBucket* last = NULL;
  1171   while (b != NULL) {
  1172     if (nm == b->get_nmethod()) {
  1173       if (b->decrement() == 0) {
  1174         if (last == NULL) {
  1175           _dependencies = b->next();
  1176         } else {
  1177           last->set_next(b->next());
  1179         delete b;
  1181       return;
  1183     last = b;
  1184     b = b->next();
  1186 #ifdef ASSERT
  1187   tty->print_cr("### %s can't find dependent nmethod:", this->external_name());
  1188   nm->print();
  1189 #endif // ASSERT
  1190   ShouldNotReachHere();
  1194 #ifndef PRODUCT
  1195 void instanceKlass::print_dependent_nmethods(bool verbose) {
  1196   nmethodBucket* b = _dependencies;
  1197   int idx = 0;
  1198   while (b != NULL) {
  1199     nmethod* nm = b->get_nmethod();
  1200     tty->print("[%d] count=%d { ", idx++, b->count());
  1201     if (!verbose) {
  1202       nm->print_on(tty, "nmethod");
  1203       tty->print_cr(" } ");
  1204     } else {
  1205       nm->print();
  1206       nm->print_dependencies();
  1207       tty->print_cr("--- } ");
  1209     b = b->next();
  1214 bool instanceKlass::is_dependent_nmethod(nmethod* nm) {
  1215   nmethodBucket* b = _dependencies;
  1216   while (b != NULL) {
  1217     if (nm == b->get_nmethod()) {
  1218       return true;
  1220     b = b->next();
  1222   return false;
  1224 #endif //PRODUCT
  1227 void instanceKlass::follow_static_fields() {
  1228   oop* start = start_of_static_fields();
  1229   oop* end   = start + static_oop_field_size();
  1230   while (start < end) {
  1231     if (*start != NULL) {
  1232       assert(Universe::heap()->is_in_closed_subset(*start),
  1233              "should be in heap");
  1234       MarkSweep::mark_and_push(start);
  1236     start++;
  1240 #ifndef SERIALGC
  1241 void instanceKlass::follow_static_fields(ParCompactionManager* cm) {
  1242   oop* start = start_of_static_fields();
  1243   oop* end   = start + static_oop_field_size();
  1244   while (start < end) {
  1245     if (*start != NULL) {
  1246       assert(Universe::heap()->is_in(*start), "should be in heap");
  1247       PSParallelCompact::mark_and_push(cm, start);
  1249     start++;
  1252 #endif // SERIALGC
  1255 void instanceKlass::adjust_static_fields() {
  1256   oop* start = start_of_static_fields();
  1257   oop* end   = start + static_oop_field_size();
  1258   while (start < end) {
  1259     MarkSweep::adjust_pointer(start);
  1260     start++;
  1264 #ifndef SERIALGC
  1265 void instanceKlass::update_static_fields() {
  1266   oop* const start = start_of_static_fields();
  1267   oop* const beg_oop = start;
  1268   oop* const end_oop = start + static_oop_field_size();
  1269   for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) {
  1270     PSParallelCompact::adjust_pointer(cur_oop);
  1274 void
  1275 instanceKlass::update_static_fields(HeapWord* beg_addr, HeapWord* end_addr) {
  1276   oop* const start = start_of_static_fields();
  1277   oop* const beg_oop = MAX2((oop*)beg_addr, start);
  1278   oop* const end_oop = MIN2((oop*)end_addr, start + static_oop_field_size());
  1279   for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) {
  1280     PSParallelCompact::adjust_pointer(cur_oop);
  1283 #endif // SERIALGC
  1285 void instanceKlass::oop_follow_contents(oop obj) {
  1286   assert (obj!=NULL, "can't follow the content of NULL object");
  1287   obj->follow_header();
  1288   OopMapBlock* map     = start_of_nonstatic_oop_maps();
  1289   OopMapBlock* end_map = map + nonstatic_oop_map_size();
  1290   while (map < end_map) {
  1291     oop* start = obj->obj_field_addr(map->offset());
  1292     oop* end   = start + map->length();
  1293     while (start < end) {
  1294       if (*start != NULL) {
  1295         assert(Universe::heap()->is_in_closed_subset(*start),
  1296                "should be in heap");
  1297         MarkSweep::mark_and_push(start);
  1299       start++;
  1301     map++;
  1305 #ifndef SERIALGC
  1306 void instanceKlass::oop_follow_contents(ParCompactionManager* cm,
  1307                                         oop obj) {
  1308   assert (obj!=NULL, "can't follow the content of NULL object");
  1309   obj->follow_header(cm);
  1310   OopMapBlock* map     = start_of_nonstatic_oop_maps();
  1311   OopMapBlock* end_map = map + nonstatic_oop_map_size();
  1312   while (map < end_map) {
  1313     oop* start = obj->obj_field_addr(map->offset());
  1314     oop* end   = start + map->length();
  1315     while (start < end) {
  1316       if (*start != NULL) {
  1317         assert(Universe::heap()->is_in(*start), "should be in heap");
  1318         PSParallelCompact::mark_and_push(cm, start);
  1320       start++;
  1322     map++;
  1325 #endif // SERIALGC
  1327 #define invoke_closure_on(start, closure, nv_suffix) {                          \
  1328   oop obj = *(start);                                                           \
  1329   if (obj != NULL) {                                                            \
  1330     assert(Universe::heap()->is_in_closed_subset(obj), "should be in heap");    \
  1331     (closure)->do_oop##nv_suffix(start);                                        \
  1332   }                                                                             \
  1335 // closure's do_header() method dicates whether the given closure should be
  1336 // applied to the klass ptr in the object header.
  1338 #define InstanceKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)           \
  1340 int instanceKlass::oop_oop_iterate##nv_suffix(oop obj,                          \
  1341                                               OopClosureType* closure) {        \
  1342   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik); \
  1343   /* header */                                                                  \
  1344   if (closure->do_header()) {                                                   \
  1345     obj->oop_iterate_header(closure);                                           \
  1346   }                                                                             \
  1347   /* instance variables */                                                      \
  1348   OopMapBlock* map     = start_of_nonstatic_oop_maps();                         \
  1349   OopMapBlock* const end_map = map + nonstatic_oop_map_size();                  \
  1350   const intx field_offset    = PrefetchFieldsAhead;                             \
  1351   if (field_offset > 0) {                                                       \
  1352     while (map < end_map) {                                                     \
  1353       oop* start = obj->obj_field_addr(map->offset());                          \
  1354       oop* const end   = start + map->length();                                 \
  1355       while (start < end) {                                                     \
  1356         prefetch_beyond(start, (oop*)end, field_offset,                         \
  1357                         closure->prefetch_style());                             \
  1358         SpecializationStats::                                                   \
  1359           record_do_oop_call##nv_suffix(SpecializationStats::ik);               \
  1360         invoke_closure_on(start, closure, nv_suffix);                           \
  1361         start++;                                                                \
  1362       }                                                                         \
  1363       map++;                                                                    \
  1364     }                                                                           \
  1365   } else {                                                                      \
  1366     while (map < end_map) {                                                     \
  1367       oop* start = obj->obj_field_addr(map->offset());                          \
  1368       oop* const end   = start + map->length();                                 \
  1369       while (start < end) {                                                     \
  1370         SpecializationStats::                                                   \
  1371           record_do_oop_call##nv_suffix(SpecializationStats::ik);               \
  1372         invoke_closure_on(start, closure, nv_suffix);                           \
  1373         start++;                                                                \
  1374       }                                                                         \
  1375       map++;                                                                    \
  1376     }                                                                           \
  1377   }                                                                             \
  1378   return size_helper();                                                         \
  1381 #define InstanceKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)         \
  1383 int instanceKlass::oop_oop_iterate##nv_suffix##_m(oop obj,                      \
  1384                                                   OopClosureType* closure,      \
  1385                                                   MemRegion mr) {               \
  1386   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik); \
  1387   /* header */                                                                  \
  1388   if (closure->do_header()) {                                                   \
  1389     obj->oop_iterate_header(closure, mr);                                       \
  1390   }                                                                             \
  1391   /* instance variables */                                                      \
  1392   OopMapBlock* map     = start_of_nonstatic_oop_maps();                         \
  1393   OopMapBlock* const end_map = map + nonstatic_oop_map_size();                  \
  1394   HeapWord* bot = mr.start();                                                   \
  1395   HeapWord* top = mr.end();                                                     \
  1396   oop* start = obj->obj_field_addr(map->offset());                              \
  1397   HeapWord* end = MIN2((HeapWord*)(start + map->length()), top);                \
  1398   /* Find the first map entry that extends onto mr. */                          \
  1399   while (map < end_map && end <= bot) {                                         \
  1400     map++;                                                                      \
  1401     start = obj->obj_field_addr(map->offset());                                 \
  1402     end = MIN2((HeapWord*)(start + map->length()), top);                        \
  1403   }                                                                             \
  1404   if (map != end_map) {                                                         \
  1405     /* The current map's end is past the start of "mr".  Skip up to the first   \
  1406        entry on "mr". */                                                        \
  1407     while ((HeapWord*)start < bot) {                                            \
  1408       start++;                                                                  \
  1409     }                                                                           \
  1410     const intx field_offset = PrefetchFieldsAhead;                              \
  1411     for (;;) {                                                                  \
  1412       if (field_offset > 0) {                                                   \
  1413         while ((HeapWord*)start < end) {                                        \
  1414           prefetch_beyond(start, (oop*)end, field_offset,                       \
  1415                           closure->prefetch_style());                           \
  1416           invoke_closure_on(start, closure, nv_suffix);                         \
  1417           start++;                                                              \
  1418         }                                                                       \
  1419       } else {                                                                  \
  1420         while ((HeapWord*)start < end) {                                        \
  1421           invoke_closure_on(start, closure, nv_suffix);                         \
  1422           start++;                                                              \
  1423         }                                                                       \
  1424       }                                                                         \
  1425       /* Go to the next map. */                                                 \
  1426       map++;                                                                    \
  1427       if (map == end_map) {                                                     \
  1428         break;                                                                  \
  1429       }                                                                         \
  1430       /* Otherwise,  */                                                         \
  1431       start = obj->obj_field_addr(map->offset());                               \
  1432       if ((HeapWord*)start >= top) {                                            \
  1433         break;                                                                  \
  1434       }                                                                         \
  1435       end = MIN2((HeapWord*)(start + map->length()), top);                      \
  1436     }                                                                           \
  1437   }                                                                             \
  1438   return size_helper();                                                         \
  1441 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN)
  1442 ALL_OOP_OOP_ITERATE_CLOSURES_3(InstanceKlass_OOP_OOP_ITERATE_DEFN)
  1443 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
  1444 ALL_OOP_OOP_ITERATE_CLOSURES_3(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
  1447 void instanceKlass::iterate_static_fields(OopClosure* closure) {
  1448   oop* start = start_of_static_fields();
  1449   oop* end   = start + static_oop_field_size();
  1450   while (start < end) {
  1451     assert(Universe::heap()->is_in_reserved_or_null(*start), "should be in heap");
  1452     closure->do_oop(start);
  1453     start++;
  1457 void instanceKlass::iterate_static_fields(OopClosure* closure,
  1458                                           MemRegion mr) {
  1459   oop* start = start_of_static_fields();
  1460   oop* end   = start + static_oop_field_size();
  1461   // I gather that the the static fields of reference types come first,
  1462   // hence the name of "oop_field_size", and that is what makes this safe.
  1463   assert((intptr_t)mr.start() ==
  1464          align_size_up((intptr_t)mr.start(), sizeof(oop)) &&
  1465          (intptr_t)mr.end() == align_size_up((intptr_t)mr.end(), sizeof(oop)),
  1466          "Memregion must be oop-aligned.");
  1467   if ((HeapWord*)start < mr.start()) start = (oop*)mr.start();
  1468   if ((HeapWord*)end   > mr.end())   end   = (oop*)mr.end();
  1469   while (start < end) {
  1470     invoke_closure_on(start, closure,_v);
  1471     start++;
  1476 int instanceKlass::oop_adjust_pointers(oop obj) {
  1477   int size = size_helper();
  1479   // Compute oopmap block range. The common case is nonstatic_oop_map_size == 1.
  1480   OopMapBlock* map     = start_of_nonstatic_oop_maps();
  1481   OopMapBlock* const end_map = map + nonstatic_oop_map_size();
  1482   // Iterate over oopmap blocks
  1483   while (map < end_map) {
  1484     // Compute oop range for this block
  1485     oop* start = obj->obj_field_addr(map->offset());
  1486     oop* end   = start + map->length();
  1487     // Iterate over oops
  1488     while (start < end) {
  1489       assert(Universe::heap()->is_in_or_null(*start), "should be in heap");
  1490       MarkSweep::adjust_pointer(start);
  1491       start++;
  1493     map++;
  1496   obj->adjust_header();
  1497   return size;
  1500 #ifndef SERIALGC
  1501 void instanceKlass::oop_copy_contents(PSPromotionManager* pm, oop obj) {
  1502   assert(!pm->depth_first(), "invariant");
  1503   // Compute oopmap block range. The common case is nonstatic_oop_map_size == 1.
  1504   OopMapBlock* start_map = start_of_nonstatic_oop_maps();
  1505   OopMapBlock* map       = start_map + nonstatic_oop_map_size();
  1507   // Iterate over oopmap blocks
  1508   while (start_map < map) {
  1509     --map;
  1510     // Compute oop range for this block
  1511     oop* start = obj->obj_field_addr(map->offset());
  1512     oop* curr  = start + map->length();
  1513     // Iterate over oops
  1514     while (start < curr) {
  1515       --curr;
  1516       if (PSScavenge::should_scavenge(*curr)) {
  1517         assert(Universe::heap()->is_in(*curr), "should be in heap");
  1518         pm->claim_or_forward_breadth(curr);
  1524 void instanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
  1525   assert(pm->depth_first(), "invariant");
  1526   // Compute oopmap block range. The common case is nonstatic_oop_map_size == 1.
  1527   OopMapBlock* start_map = start_of_nonstatic_oop_maps();
  1528   OopMapBlock* map       = start_map + nonstatic_oop_map_size();
  1530   // Iterate over oopmap blocks
  1531   while (start_map < map) {
  1532     --map;
  1533     // Compute oop range for this block
  1534     oop* start = obj->obj_field_addr(map->offset());
  1535     oop* curr  = start + map->length();
  1536     // Iterate over oops
  1537     while (start < curr) {
  1538       --curr;
  1539       if (PSScavenge::should_scavenge(*curr)) {
  1540         assert(Universe::heap()->is_in(*curr), "should be in heap");
  1541         pm->claim_or_forward_depth(curr);
  1547 int instanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
  1548   // Compute oopmap block range.  The common case is nonstatic_oop_map_size==1.
  1549   OopMapBlock* map           = start_of_nonstatic_oop_maps();
  1550   OopMapBlock* const end_map = map + nonstatic_oop_map_size();
  1551   // Iterate over oopmap blocks
  1552   while (map < end_map) {
  1553     // Compute oop range for this oopmap block.
  1554     oop* const map_start = obj->obj_field_addr(map->offset());
  1555     oop* const beg_oop = map_start;
  1556     oop* const end_oop = map_start + map->length();
  1557     for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) {
  1558       PSParallelCompact::adjust_pointer(cur_oop);
  1560     ++map;
  1563   return size_helper();
  1566 int instanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
  1567                                        HeapWord* beg_addr, HeapWord* end_addr) {
  1568   // Compute oopmap block range.  The common case is nonstatic_oop_map_size==1.
  1569   OopMapBlock* map           = start_of_nonstatic_oop_maps();
  1570   OopMapBlock* const end_map = map + nonstatic_oop_map_size();
  1571   // Iterate over oopmap blocks
  1572   while (map < end_map) {
  1573     // Compute oop range for this oopmap block.
  1574     oop* const map_start = obj->obj_field_addr(map->offset());
  1575     oop* const beg_oop = MAX2((oop*)beg_addr, map_start);
  1576     oop* const end_oop = MIN2((oop*)end_addr, map_start + map->length());
  1577     for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) {
  1578       PSParallelCompact::adjust_pointer(cur_oop);
  1580     ++map;
  1583   return size_helper();
  1586 void instanceKlass::copy_static_fields(PSPromotionManager* pm) {
  1587   assert(!pm->depth_first(), "invariant");
  1588   // Compute oop range
  1589   oop* start = start_of_static_fields();
  1590   oop* end   = start + static_oop_field_size();
  1591   // Iterate over oops
  1592   while (start < end) {
  1593     if (PSScavenge::should_scavenge(*start)) {
  1594       assert(Universe::heap()->is_in(*start), "should be in heap");
  1595       pm->claim_or_forward_breadth(start);
  1597     start++;
  1601 void instanceKlass::push_static_fields(PSPromotionManager* pm) {
  1602   assert(pm->depth_first(), "invariant");
  1603   // Compute oop range
  1604   oop* start = start_of_static_fields();
  1605   oop* end   = start + static_oop_field_size();
  1606   // Iterate over oops
  1607   while (start < end) {
  1608     if (PSScavenge::should_scavenge(*start)) {
  1609       assert(Universe::heap()->is_in(*start), "should be in heap");
  1610       pm->claim_or_forward_depth(start);
  1612     start++;
  1616 void instanceKlass::copy_static_fields(ParCompactionManager* cm) {
  1617   // Compute oop range
  1618   oop* start = start_of_static_fields();
  1619   oop* end   = start + static_oop_field_size();
  1620   // Iterate over oops
  1621   while (start < end) {
  1622     if (*start != NULL) {
  1623       assert(Universe::heap()->is_in(*start), "should be in heap");
  1624       // *start = (oop) cm->summary_data()->calc_new_pointer(*start);
  1625       PSParallelCompact::adjust_pointer(start);
  1627     start++;
  1630 #endif // SERIALGC
  1632 // This klass is alive but the implementor link is not followed/updated.
  1633 // Subklass and sibling links are handled by Klass::follow_weak_klass_links
  1635 void instanceKlass::follow_weak_klass_links(
  1636   BoolObjectClosure* is_alive, OopClosure* keep_alive) {
  1637   assert(is_alive->do_object_b(as_klassOop()), "this oop should be live");
  1638   if (ClassUnloading) {
  1639     for (int i = 0; i < implementors_limit; i++) {
  1640       klassOop impl = _implementors[i];
  1641       if (impl == NULL)  break;  // no more in the list
  1642       if (!is_alive->do_object_b(impl)) {
  1643         // remove this guy from the list by overwriting him with the tail
  1644         int lasti = --_nof_implementors;
  1645         assert(lasti >= i && lasti < implementors_limit, "just checking");
  1646         _implementors[i] = _implementors[lasti];
  1647         _implementors[lasti] = NULL;
  1648         --i; // rerun the loop at this index
  1651   } else {
  1652     for (int i = 0; i < implementors_limit; i++) {
  1653       keep_alive->do_oop(&adr_implementors()[i]);
  1656   Klass::follow_weak_klass_links(is_alive, keep_alive);
  1660 void instanceKlass::remove_unshareable_info() {
  1661   Klass::remove_unshareable_info();
  1662   init_implementor();
  1666 static void clear_all_breakpoints(methodOop m) {
  1667   m->clear_all_breakpoints();
  1671 void instanceKlass::release_C_heap_structures() {
  1672   // Deallocate oop map cache
  1673   if (_oop_map_cache != NULL) {
  1674     delete _oop_map_cache;
  1675     _oop_map_cache = NULL;
  1678   // Deallocate JNI identifiers for jfieldIDs
  1679   JNIid::deallocate(jni_ids());
  1680   set_jni_ids(NULL);
  1682   jmethodID* jmeths = methods_jmethod_ids_acquire();
  1683   if (jmeths != (jmethodID*)NULL) {
  1684     release_set_methods_jmethod_ids(NULL);
  1685     FreeHeap(jmeths);
  1688   int* indices = methods_cached_itable_indices_acquire();
  1689   if (indices != (int*)NULL) {
  1690     release_set_methods_cached_itable_indices(NULL);
  1691     FreeHeap(indices);
  1694   // release dependencies
  1695   nmethodBucket* b = _dependencies;
  1696   _dependencies = NULL;
  1697   while (b != NULL) {
  1698     nmethodBucket* next = b->next();
  1699     delete b;
  1700     b = next;
  1703   // Deallocate breakpoint records
  1704   if (breakpoints() != 0x0) {
  1705     methods_do(clear_all_breakpoints);
  1706     assert(breakpoints() == 0x0, "should have cleared breakpoints");
  1709   // deallocate information about previous versions
  1710   if (_previous_versions != NULL) {
  1711     for (int i = _previous_versions->length() - 1; i >= 0; i--) {
  1712       PreviousVersionNode * pv_node = _previous_versions->at(i);
  1713       delete pv_node;
  1715     delete _previous_versions;
  1716     _previous_versions = NULL;
  1719   // deallocate the cached class file
  1720   if (_cached_class_file_bytes != NULL) {
  1721     os::free(_cached_class_file_bytes);
  1722     _cached_class_file_bytes = NULL;
  1723     _cached_class_file_len = 0;
  1727 char* instanceKlass::signature_name() const {
  1728   const char* src = (const char*) (name()->as_C_string());
  1729   const int src_length = (int)strlen(src);
  1730   char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
  1731   int src_index = 0;
  1732   int dest_index = 0;
  1733   dest[dest_index++] = 'L';
  1734   while (src_index < src_length) {
  1735     dest[dest_index++] = src[src_index++];
  1737   dest[dest_index++] = ';';
  1738   dest[dest_index] = '\0';
  1739   return dest;
  1742 // different verisons of is_same_class_package
  1743 bool instanceKlass::is_same_class_package(klassOop class2) {
  1744   klassOop class1 = as_klassOop();
  1745   oop classloader1 = instanceKlass::cast(class1)->class_loader();
  1746   symbolOop classname1 = Klass::cast(class1)->name();
  1748   if (Klass::cast(class2)->oop_is_objArray()) {
  1749     class2 = objArrayKlass::cast(class2)->bottom_klass();
  1751   oop classloader2;
  1752   if (Klass::cast(class2)->oop_is_instance()) {
  1753     classloader2 = instanceKlass::cast(class2)->class_loader();
  1754   } else {
  1755     assert(Klass::cast(class2)->oop_is_typeArray(), "should be type array");
  1756     classloader2 = NULL;
  1758   symbolOop classname2 = Klass::cast(class2)->name();
  1760   return instanceKlass::is_same_class_package(classloader1, classname1,
  1761                                               classloader2, classname2);
  1764 bool instanceKlass::is_same_class_package(oop classloader2, symbolOop classname2) {
  1765   klassOop class1 = as_klassOop();
  1766   oop classloader1 = instanceKlass::cast(class1)->class_loader();
  1767   symbolOop classname1 = Klass::cast(class1)->name();
  1769   return instanceKlass::is_same_class_package(classloader1, classname1,
  1770                                               classloader2, classname2);
  1773 // return true if two classes are in the same package, classloader
  1774 // and classname information is enough to determine a class's package
  1775 bool instanceKlass::is_same_class_package(oop class_loader1, symbolOop class_name1,
  1776                                           oop class_loader2, symbolOop class_name2) {
  1777   if (class_loader1 != class_loader2) {
  1778     return false;
  1779   } else {
  1780     ResourceMark rm;
  1782     // The symbolOop's are in UTF8 encoding. Since we only need to check explicitly
  1783     // for ASCII characters ('/', 'L', '['), we can keep them in UTF8 encoding.
  1784     // Otherwise, we just compare jbyte values between the strings.
  1785     jbyte *name1 = class_name1->base();
  1786     jbyte *name2 = class_name2->base();
  1788     jbyte *last_slash1 = UTF8::strrchr(name1, class_name1->utf8_length(), '/');
  1789     jbyte *last_slash2 = UTF8::strrchr(name2, class_name2->utf8_length(), '/');
  1791     if ((last_slash1 == NULL) || (last_slash2 == NULL)) {
  1792       // One of the two doesn't have a package.  Only return true
  1793       // if the other one also doesn't have a package.
  1794       return last_slash1 == last_slash2;
  1795     } else {
  1796       // Skip over '['s
  1797       if (*name1 == '[') {
  1798         do {
  1799           name1++;
  1800         } while (*name1 == '[');
  1801         if (*name1 != 'L') {
  1802           // Something is terribly wrong.  Shouldn't be here.
  1803           return false;
  1806       if (*name2 == '[') {
  1807         do {
  1808           name2++;
  1809         } while (*name2 == '[');
  1810         if (*name2 != 'L') {
  1811           // Something is terribly wrong.  Shouldn't be here.
  1812           return false;
  1816       // Check that package part is identical
  1817       int length1 = last_slash1 - name1;
  1818       int length2 = last_slash2 - name2;
  1820       return UTF8::equal(name1, length1, name2, length2);
  1826 jint instanceKlass::compute_modifier_flags(TRAPS) const {
  1827   klassOop k = as_klassOop();
  1828   jint access = access_flags().as_int();
  1830   // But check if it happens to be member class.
  1831   typeArrayOop inner_class_list = inner_classes();
  1832   int length = (inner_class_list == NULL) ? 0 : inner_class_list->length();
  1833   assert (length % instanceKlass::inner_class_next_offset == 0, "just checking");
  1834   if (length > 0) {
  1835     typeArrayHandle inner_class_list_h(THREAD, inner_class_list);
  1836     instanceKlassHandle ik(THREAD, k);
  1837     for (int i = 0; i < length; i += instanceKlass::inner_class_next_offset) {
  1838       int ioff = inner_class_list_h->ushort_at(
  1839                       i + instanceKlass::inner_class_inner_class_info_offset);
  1841       // Inner class attribute can be zero, skip it.
  1842       // Strange but true:  JVM spec. allows null inner class refs.
  1843       if (ioff == 0) continue;
  1845       // only look at classes that are already loaded
  1846       // since we are looking for the flags for our self.
  1847       symbolOop inner_name = ik->constants()->klass_name_at(ioff);
  1848       if ((ik->name() == inner_name)) {
  1849         // This is really a member class.
  1850         access = inner_class_list_h->ushort_at(i + instanceKlass::inner_class_access_flags_offset);
  1851         break;
  1855   // Remember to strip ACC_SUPER bit
  1856   return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
  1859 jint instanceKlass::jvmti_class_status() const {
  1860   jint result = 0;
  1862   if (is_linked()) {
  1863     result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
  1866   if (is_initialized()) {
  1867     assert(is_linked(), "Class status is not consistent");
  1868     result |= JVMTI_CLASS_STATUS_INITIALIZED;
  1870   if (is_in_error_state()) {
  1871     result |= JVMTI_CLASS_STATUS_ERROR;
  1873   return result;
  1876 methodOop instanceKlass::method_at_itable(klassOop holder, int index, TRAPS) {
  1877   itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
  1878   int method_table_offset_in_words = ioe->offset()/wordSize;
  1879   int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
  1880                        / itableOffsetEntry::size();
  1882   for (int cnt = 0 ; ; cnt ++, ioe ++) {
  1883     // If the interface isn't implemented by the reciever class,
  1884     // the VM should throw IncompatibleClassChangeError.
  1885     if (cnt >= nof_interfaces) {
  1886       THROW_OOP_0(vmSymbols::java_lang_IncompatibleClassChangeError());
  1889     klassOop ik = ioe->interface_klass();
  1890     if (ik == holder) break;
  1893   itableMethodEntry* ime = ioe->first_method_entry(as_klassOop());
  1894   methodOop m = ime[index].method();
  1895   if (m == NULL) {
  1896     THROW_OOP_0(vmSymbols::java_lang_AbstractMethodError());
  1898   return m;
  1901 // On-stack replacement stuff
  1902 void instanceKlass::add_osr_nmethod(nmethod* n) {
  1903   // only one compilation can be active
  1904   NEEDS_CLEANUP
  1905   // This is a short non-blocking critical region, so the no safepoint check is ok.
  1906   OsrList_lock->lock_without_safepoint_check();
  1907   assert(n->is_osr_method(), "wrong kind of nmethod");
  1908   n->set_link(osr_nmethods_head());
  1909   set_osr_nmethods_head(n);
  1910   // Remember to unlock again
  1911   OsrList_lock->unlock();
  1915 void instanceKlass::remove_osr_nmethod(nmethod* n) {
  1916   // This is a short non-blocking critical region, so the no safepoint check is ok.
  1917   OsrList_lock->lock_without_safepoint_check();
  1918   assert(n->is_osr_method(), "wrong kind of nmethod");
  1919   nmethod* last = NULL;
  1920   nmethod* cur  = osr_nmethods_head();
  1921   // Search for match
  1922   while(cur != NULL && cur != n) {
  1923     last = cur;
  1924     cur = cur->link();
  1926   if (cur == n) {
  1927     if (last == NULL) {
  1928       // Remove first element
  1929       set_osr_nmethods_head(osr_nmethods_head()->link());
  1930     } else {
  1931       last->set_link(cur->link());
  1934   n->set_link(NULL);
  1935   // Remember to unlock again
  1936   OsrList_lock->unlock();
  1939 nmethod* instanceKlass::lookup_osr_nmethod(const methodOop m, int bci) const {
  1940   // This is a short non-blocking critical region, so the no safepoint check is ok.
  1941   OsrList_lock->lock_without_safepoint_check();
  1942   nmethod* osr = osr_nmethods_head();
  1943   while (osr != NULL) {
  1944     assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
  1945     if (osr->method() == m &&
  1946         (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
  1947       // Found a match - return it.
  1948       OsrList_lock->unlock();
  1949       return osr;
  1951     osr = osr->link();
  1953   OsrList_lock->unlock();
  1954   return NULL;
  1957 // -----------------------------------------------------------------------------------------------------
  1958 #ifndef PRODUCT
  1960 // Printing
  1962 void FieldPrinter::do_field(fieldDescriptor* fd) {
  1963    if (fd->is_static() == (_obj == NULL)) {
  1964      _st->print("   - ");
  1965      fd->print_on(_st);
  1966      _st->cr();
  1967    } else {
  1968      fd->print_on_for(_st, _obj);
  1969      _st->cr();
  1974 void instanceKlass::oop_print_on(oop obj, outputStream* st) {
  1975   Klass::oop_print_on(obj, st);
  1977   if (as_klassOop() == SystemDictionary::string_klass()) {
  1978     typeArrayOop value  = java_lang_String::value(obj);
  1979     juint        offset = java_lang_String::offset(obj);
  1980     juint        length = java_lang_String::length(obj);
  1981     if (value != NULL &&
  1982         value->is_typeArray() &&
  1983         offset          <= (juint) value->length() &&
  1984         offset + length <= (juint) value->length()) {
  1985       st->print("string: ");
  1986       Handle h_obj(obj);
  1987       java_lang_String::print(h_obj, st);
  1988       st->cr();
  1989       if (!WizardMode)  return;  // that is enough
  1993   st->print_cr("fields:");
  1994   FieldPrinter print_nonstatic_field(st, obj);
  1995   do_nonstatic_fields(&print_nonstatic_field);
  1997   if (as_klassOop() == SystemDictionary::class_klass()) {
  1998     klassOop mirrored_klass = java_lang_Class::as_klassOop(obj);
  1999     st->print("   - fake entry for mirror: ");
  2000     mirrored_klass->print_value_on(st);
  2001     st->cr();
  2002     st->print("   - fake entry resolved_constructor: ");
  2003     methodOop ctor = java_lang_Class::resolved_constructor(obj);
  2004     ctor->print_value_on(st);
  2005     klassOop array_klass = java_lang_Class::array_klass(obj);
  2006     st->print("   - fake entry for array: ");
  2007     array_klass->print_value_on(st);
  2008     st->cr();
  2009     st->cr();
  2013 void instanceKlass::oop_print_value_on(oop obj, outputStream* st) {
  2014   st->print("a ");
  2015   name()->print_value_on(st);
  2016   obj->print_address_on(st);
  2019 #endif
  2021 const char* instanceKlass::internal_name() const {
  2022   return external_name();
  2027 // Verification
  2029 class VerifyFieldClosure: public OopClosure {
  2030  public:
  2031   void do_oop(oop* p) {
  2032     guarantee(Universe::heap()->is_in_closed_subset(p), "should be in heap");
  2033     if (!(*p)->is_oop_or_null()) {
  2034       tty->print_cr("Failed: %p -> %p",p,(address)*p);
  2035       Universe::print();
  2036       guarantee(false, "boom");
  2039 };
  2042 void instanceKlass::oop_verify_on(oop obj, outputStream* st) {
  2043   Klass::oop_verify_on(obj, st);
  2044   VerifyFieldClosure blk;
  2045   oop_oop_iterate(obj, &blk);
  2048 #ifndef PRODUCT
  2050 void instanceKlass::verify_class_klass_nonstatic_oop_maps(klassOop k) {
  2051   // This verification code is disabled.  JDK_Version::is_gte_jdk14x_version()
  2052   // cannot be called since this function is called before the VM is
  2053   // able to determine what JDK version is running with.
  2054   // The check below always is false since 1.4.
  2055   return;
  2057   // This verification code temporarily disabled for the 1.4
  2058   // reflection implementation since java.lang.Class now has
  2059   // Java-level instance fields. Should rewrite this to handle this
  2060   // case.
  2061   if (!(JDK_Version::is_gte_jdk14x_version() && UseNewReflection)) {
  2062     // Verify that java.lang.Class instances have a fake oop field added.
  2063     instanceKlass* ik = instanceKlass::cast(k);
  2065     // Check that we have the right class
  2066     static bool first_time = true;
  2067     guarantee(k == SystemDictionary::class_klass() && first_time, "Invalid verify of maps");
  2068     first_time = false;
  2069     const int extra = java_lang_Class::number_of_fake_oop_fields;
  2070     guarantee(ik->nonstatic_field_size() == extra, "just checking");
  2071     guarantee(ik->nonstatic_oop_map_size() == 1, "just checking");
  2072     guarantee(ik->size_helper() == align_object_size(instanceOopDesc::header_size() + extra), "just checking");
  2074     // Check that the map is (2,extra)
  2075     int offset = java_lang_Class::klass_offset;
  2077     OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
  2078     guarantee(map->offset() == offset && map->length() == extra, "just checking");
  2082 #endif
  2085 /* JNIid class for jfieldIDs only */
  2086  JNIid::JNIid(klassOop holder, int offset, JNIid* next) {
  2087    _holder = holder;
  2088    _offset = offset;
  2089    _next = next;
  2090    debug_only(_is_static_field_id = false;)
  2094  JNIid* JNIid::find(int offset) {
  2095    JNIid* current = this;
  2096    while (current != NULL) {
  2097      if (current->offset() == offset) return current;
  2098      current = current->next();
  2100    return NULL;
  2103 void JNIid::oops_do(OopClosure* f) {
  2104   for (JNIid* cur = this; cur != NULL; cur = cur->next()) {
  2105     f->do_oop(cur->holder_addr());
  2109 void JNIid::deallocate(JNIid* current) {
  2110    while (current != NULL) {
  2111      JNIid* next = current->next();
  2112      delete current;
  2113      current = next;
  2118  void JNIid::verify(klassOop holder) {
  2119    int first_field_offset  = instanceKlass::cast(holder)->offset_of_static_fields();
  2120    int end_field_offset;
  2121    end_field_offset = first_field_offset + (instanceKlass::cast(holder)->static_field_size() * wordSize);
  2123    JNIid* current = this;
  2124    while (current != NULL) {
  2125      guarantee(current->holder() == holder, "Invalid klass in JNIid");
  2126  #ifdef ASSERT
  2127      int o = current->offset();
  2128      if (current->is_static_field_id()) {
  2129        guarantee(o >= first_field_offset  && o < end_field_offset,  "Invalid static field offset in JNIid");
  2131  #endif
  2132      current = current->next();
  2137 #ifdef ASSERT
  2138   void instanceKlass::set_init_state(ClassState state) {
  2139     bool good_state = as_klassOop()->is_shared() ? (_init_state <= state)
  2140                                                  : (_init_state < state);
  2141     assert(good_state || state == allocated, "illegal state transition");
  2142     _init_state = state;
  2144 #endif
  2147 // RedefineClasses() support for previous versions:
  2149 // Add an information node that contains weak references to the
  2150 // interesting parts of the previous version of the_class.
  2151 void instanceKlass::add_previous_version(instanceKlassHandle ikh,
  2152        BitMap * emcp_methods, int emcp_method_count) {
  2153   assert(Thread::current()->is_VM_thread(),
  2154     "only VMThread can add previous versions");
  2156   if (_previous_versions == NULL) {
  2157     // This is the first previous version so make some space.
  2158     // Start with 2 elements under the assumption that the class
  2159     // won't be redefined much.
  2160     _previous_versions =  new (ResourceObj::C_HEAP)
  2161                             GrowableArray<PreviousVersionNode *>(2, true);
  2164   // RC_TRACE macro has an embedded ResourceMark
  2165   RC_TRACE(0x00000100, ("adding previous version ref for %s @%d, EMCP_cnt=%d",
  2166     ikh->external_name(), _previous_versions->length(), emcp_method_count));
  2167   constantPoolHandle cp_h(ikh->constants());
  2168   jweak cp_ref = JNIHandles::make_weak_global(cp_h);
  2169   PreviousVersionNode * pv_node = NULL;
  2170   objArrayOop old_methods = ikh->methods();
  2172   if (emcp_method_count == 0) {
  2173     pv_node = new PreviousVersionNode(cp_ref, NULL);
  2174     RC_TRACE(0x00000400,
  2175       ("add: all methods are obsolete; flushing any EMCP weak refs"));
  2176   } else {
  2177     int local_count = 0;
  2178     GrowableArray<jweak>* method_refs = new (ResourceObj::C_HEAP)
  2179       GrowableArray<jweak>(emcp_method_count, true);
  2180     for (int i = 0; i < old_methods->length(); i++) {
  2181       if (emcp_methods->at(i)) {
  2182         // this old method is EMCP so save a weak ref
  2183         methodOop old_method = (methodOop) old_methods->obj_at(i);
  2184         methodHandle old_method_h(old_method);
  2185         jweak method_ref = JNIHandles::make_weak_global(old_method_h);
  2186         method_refs->append(method_ref);
  2187         if (++local_count >= emcp_method_count) {
  2188           // no more EMCP methods so bail out now
  2189           break;
  2193     pv_node = new PreviousVersionNode(cp_ref, method_refs);
  2196   _previous_versions->append(pv_node);
  2198   // Using weak references allows the interesting parts of previous
  2199   // classes to be GC'ed when they are no longer needed. Since the
  2200   // caller is the VMThread and we are at a safepoint, this is a good
  2201   // time to clear out unused weak references.
  2203   RC_TRACE(0x00000400, ("add: previous version length=%d",
  2204     _previous_versions->length()));
  2206   // skip the last entry since we just added it
  2207   for (int i = _previous_versions->length() - 2; i >= 0; i--) {
  2208     // check the previous versions array for a GC'ed weak refs
  2209     pv_node = _previous_versions->at(i);
  2210     cp_ref = pv_node->prev_constant_pool();
  2211     assert(cp_ref != NULL, "weak cp ref was unexpectedly cleared");
  2212     if (cp_ref == NULL) {
  2213       delete pv_node;
  2214       _previous_versions->remove_at(i);
  2215       // Since we are traversing the array backwards, we don't have to
  2216       // do anything special with the index.
  2217       continue;  // robustness
  2220     constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
  2221     if (cp == NULL) {
  2222       // this entry has been GC'ed so remove it
  2223       delete pv_node;
  2224       _previous_versions->remove_at(i);
  2225       // Since we are traversing the array backwards, we don't have to
  2226       // do anything special with the index.
  2227       continue;
  2228     } else {
  2229       RC_TRACE(0x00000400, ("add: previous version @%d is alive", i));
  2232     GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
  2233     if (method_refs != NULL) {
  2234       RC_TRACE(0x00000400, ("add: previous methods length=%d",
  2235         method_refs->length()));
  2236       for (int j = method_refs->length() - 1; j >= 0; j--) {
  2237         jweak method_ref = method_refs->at(j);
  2238         assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
  2239         if (method_ref == NULL) {
  2240           method_refs->remove_at(j);
  2241           // Since we are traversing the array backwards, we don't have to
  2242           // do anything special with the index.
  2243           continue;  // robustness
  2246         methodOop method = (methodOop)JNIHandles::resolve(method_ref);
  2247         if (method == NULL || emcp_method_count == 0) {
  2248           // This method entry has been GC'ed or the current
  2249           // RedefineClasses() call has made all methods obsolete
  2250           // so remove it.
  2251           JNIHandles::destroy_weak_global(method_ref);
  2252           method_refs->remove_at(j);
  2253         } else {
  2254           // RC_TRACE macro has an embedded ResourceMark
  2255           RC_TRACE(0x00000400,
  2256             ("add: %s(%s): previous method @%d in version @%d is alive",
  2257             method->name()->as_C_string(), method->signature()->as_C_string(),
  2258             j, i));
  2264   int obsolete_method_count = old_methods->length() - emcp_method_count;
  2266   if (emcp_method_count != 0 && obsolete_method_count != 0 &&
  2267       _previous_versions->length() > 1) {
  2268     // We have a mix of obsolete and EMCP methods. If there is more
  2269     // than the previous version that we just added, then we have to
  2270     // clear out any matching EMCP method entries the hard way.
  2271     int local_count = 0;
  2272     for (int i = 0; i < old_methods->length(); i++) {
  2273       if (!emcp_methods->at(i)) {
  2274         // only obsolete methods are interesting
  2275         methodOop old_method = (methodOop) old_methods->obj_at(i);
  2276         symbolOop m_name = old_method->name();
  2277         symbolOop m_signature = old_method->signature();
  2279         // skip the last entry since we just added it
  2280         for (int j = _previous_versions->length() - 2; j >= 0; j--) {
  2281           // check the previous versions array for a GC'ed weak refs
  2282           pv_node = _previous_versions->at(j);
  2283           cp_ref = pv_node->prev_constant_pool();
  2284           assert(cp_ref != NULL, "weak cp ref was unexpectedly cleared");
  2285           if (cp_ref == NULL) {
  2286             delete pv_node;
  2287             _previous_versions->remove_at(j);
  2288             // Since we are traversing the array backwards, we don't have to
  2289             // do anything special with the index.
  2290             continue;  // robustness
  2293           constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
  2294           if (cp == NULL) {
  2295             // this entry has been GC'ed so remove it
  2296             delete pv_node;
  2297             _previous_versions->remove_at(j);
  2298             // Since we are traversing the array backwards, we don't have to
  2299             // do anything special with the index.
  2300             continue;
  2303           GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
  2304           if (method_refs == NULL) {
  2305             // We have run into a PreviousVersion generation where
  2306             // all methods were made obsolete during that generation's
  2307             // RedefineClasses() operation. At the time of that
  2308             // operation, all EMCP methods were flushed so we don't
  2309             // have to go back any further.
  2310             //
  2311             // A NULL method_refs is different than an empty method_refs.
  2312             // We cannot infer any optimizations about older generations
  2313             // from an empty method_refs for the current generation.
  2314             break;
  2317           for (int k = method_refs->length() - 1; k >= 0; k--) {
  2318             jweak method_ref = method_refs->at(k);
  2319             assert(method_ref != NULL,
  2320               "weak method ref was unexpectedly cleared");
  2321             if (method_ref == NULL) {
  2322               method_refs->remove_at(k);
  2323               // Since we are traversing the array backwards, we don't
  2324               // have to do anything special with the index.
  2325               continue;  // robustness
  2328             methodOop method = (methodOop)JNIHandles::resolve(method_ref);
  2329             if (method == NULL) {
  2330               // this method entry has been GC'ed so skip it
  2331               JNIHandles::destroy_weak_global(method_ref);
  2332               method_refs->remove_at(k);
  2333               continue;
  2336             if (method->name() == m_name &&
  2337                 method->signature() == m_signature) {
  2338               // The current RedefineClasses() call has made all EMCP
  2339               // versions of this method obsolete so mark it as obsolete
  2340               // and remove the weak ref.
  2341               RC_TRACE(0x00000400,
  2342                 ("add: %s(%s): flush obsolete method @%d in version @%d",
  2343                 m_name->as_C_string(), m_signature->as_C_string(), k, j));
  2345               method->set_is_obsolete();
  2346               JNIHandles::destroy_weak_global(method_ref);
  2347               method_refs->remove_at(k);
  2348               break;
  2352           // The previous loop may not find a matching EMCP method, but
  2353           // that doesn't mean that we can optimize and not go any
  2354           // further back in the PreviousVersion generations. The EMCP
  2355           // method for this generation could have already been GC'ed,
  2356           // but there still may be an older EMCP method that has not
  2357           // been GC'ed.
  2360         if (++local_count >= obsolete_method_count) {
  2361           // no more obsolete methods so bail out now
  2362           break;
  2367 } // end add_previous_version()
  2370 // Determine if instanceKlass has a previous version.
  2371 bool instanceKlass::has_previous_version() const {
  2372   if (_previous_versions == NULL) {
  2373     // no previous versions array so answer is easy
  2374     return false;
  2377   for (int i = _previous_versions->length() - 1; i >= 0; i--) {
  2378     // Check the previous versions array for an info node that hasn't
  2379     // been GC'ed
  2380     PreviousVersionNode * pv_node = _previous_versions->at(i);
  2382     jweak cp_ref = pv_node->prev_constant_pool();
  2383     assert(cp_ref != NULL, "weak reference was unexpectedly cleared");
  2384     if (cp_ref == NULL) {
  2385       continue;  // robustness
  2388     constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
  2389     if (cp != NULL) {
  2390       // we have at least one previous version
  2391       return true;
  2394     // We don't have to check the method refs. If the constant pool has
  2395     // been GC'ed then so have the methods.
  2398   // all of the underlying nodes' info has been GC'ed
  2399   return false;
  2400 } // end has_previous_version()
  2402 methodOop instanceKlass::method_with_idnum(int idnum) {
  2403   methodOop m = NULL;
  2404   if (idnum < methods()->length()) {
  2405     m = (methodOop) methods()->obj_at(idnum);
  2407   if (m == NULL || m->method_idnum() != idnum) {
  2408     for (int index = 0; index < methods()->length(); ++index) {
  2409       m = (methodOop) methods()->obj_at(index);
  2410       if (m->method_idnum() == idnum) {
  2411         return m;
  2415   return m;
  2419 // Set the annotation at 'idnum' to 'anno'.
  2420 // We don't want to create or extend the array if 'anno' is NULL, since that is the
  2421 // default value.  However, if the array exists and is long enough, we must set NULL values.
  2422 void instanceKlass::set_methods_annotations_of(int idnum, typeArrayOop anno, objArrayOop* md_p) {
  2423   objArrayOop md = *md_p;
  2424   if (md != NULL && md->length() > idnum) {
  2425     md->obj_at_put(idnum, anno);
  2426   } else if (anno != NULL) {
  2427     // create the array
  2428     int length = MAX2(idnum+1, (int)_idnum_allocated_count);
  2429     md = oopFactory::new_system_objArray(length, Thread::current());
  2430     if (*md_p != NULL) {
  2431       // copy the existing entries
  2432       for (int index = 0; index < (*md_p)->length(); index++) {
  2433         md->obj_at_put(index, (*md_p)->obj_at(index));
  2436     set_annotations(md, md_p);
  2437     md->obj_at_put(idnum, anno);
  2438   } // if no array and idnum isn't included there is nothing to do
  2441 // Construct a PreviousVersionNode entry for the array hung off
  2442 // the instanceKlass.
  2443 PreviousVersionNode::PreviousVersionNode(jweak prev_constant_pool,
  2444   GrowableArray<jweak>* prev_EMCP_methods) {
  2446   _prev_constant_pool = prev_constant_pool;
  2447   _prev_EMCP_methods = prev_EMCP_methods;
  2451 // Destroy a PreviousVersionNode
  2452 PreviousVersionNode::~PreviousVersionNode() {
  2453   if (_prev_constant_pool != NULL) {
  2454     JNIHandles::destroy_weak_global(_prev_constant_pool);
  2457   if (_prev_EMCP_methods != NULL) {
  2458     for (int i = _prev_EMCP_methods->length() - 1; i >= 0; i--) {
  2459       jweak method_ref = _prev_EMCP_methods->at(i);
  2460       if (method_ref != NULL) {
  2461         JNIHandles::destroy_weak_global(method_ref);
  2464     delete _prev_EMCP_methods;
  2469 // Construct a PreviousVersionInfo entry
  2470 PreviousVersionInfo::PreviousVersionInfo(PreviousVersionNode *pv_node) {
  2471   _prev_constant_pool_handle = constantPoolHandle();  // NULL handle
  2472   _prev_EMCP_method_handles = NULL;
  2474   jweak cp_ref = pv_node->prev_constant_pool();
  2475   assert(cp_ref != NULL, "weak constant pool ref was unexpectedly cleared");
  2476   if (cp_ref == NULL) {
  2477     return;  // robustness
  2480   constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
  2481   if (cp == NULL) {
  2482     // Weak reference has been GC'ed. Since the constant pool has been
  2483     // GC'ed, the methods have also been GC'ed.
  2484     return;
  2487   // make the constantPoolOop safe to return
  2488   _prev_constant_pool_handle = constantPoolHandle(cp);
  2490   GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
  2491   if (method_refs == NULL) {
  2492     // the instanceKlass did not have any EMCP methods
  2493     return;
  2496   _prev_EMCP_method_handles = new GrowableArray<methodHandle>(10);
  2498   int n_methods = method_refs->length();
  2499   for (int i = 0; i < n_methods; i++) {
  2500     jweak method_ref = method_refs->at(i);
  2501     assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
  2502     if (method_ref == NULL) {
  2503       continue;  // robustness
  2506     methodOop method = (methodOop)JNIHandles::resolve(method_ref);
  2507     if (method == NULL) {
  2508       // this entry has been GC'ed so skip it
  2509       continue;
  2512     // make the methodOop safe to return
  2513     _prev_EMCP_method_handles->append(methodHandle(method));
  2518 // Destroy a PreviousVersionInfo
  2519 PreviousVersionInfo::~PreviousVersionInfo() {
  2520   // Since _prev_EMCP_method_handles is not C-heap allocated, we
  2521   // don't have to delete it.
  2525 // Construct a helper for walking the previous versions array
  2526 PreviousVersionWalker::PreviousVersionWalker(instanceKlass *ik) {
  2527   _previous_versions = ik->previous_versions();
  2528   _current_index = 0;
  2529   // _hm needs no initialization
  2530   _current_p = NULL;
  2534 // Destroy a PreviousVersionWalker
  2535 PreviousVersionWalker::~PreviousVersionWalker() {
  2536   // Delete the current info just in case the caller didn't walk to
  2537   // the end of the previous versions list. No harm if _current_p is
  2538   // already NULL.
  2539   delete _current_p;
  2541   // When _hm is destroyed, all the Handles returned in
  2542   // PreviousVersionInfo objects will be destroyed.
  2543   // Also, after this destructor is finished it will be
  2544   // safe to delete the GrowableArray allocated in the
  2545   // PreviousVersionInfo objects.
  2549 // Return the interesting information for the next previous version
  2550 // of the klass. Returns NULL if there are no more previous versions.
  2551 PreviousVersionInfo* PreviousVersionWalker::next_previous_version() {
  2552   if (_previous_versions == NULL) {
  2553     // no previous versions so nothing to return
  2554     return NULL;
  2557   delete _current_p;  // cleanup the previous info for the caller
  2558   _current_p = NULL;  // reset to NULL so we don't delete same object twice
  2560   int length = _previous_versions->length();
  2562   while (_current_index < length) {
  2563     PreviousVersionNode * pv_node = _previous_versions->at(_current_index++);
  2564     PreviousVersionInfo * pv_info = new (ResourceObj::C_HEAP)
  2565                                           PreviousVersionInfo(pv_node);
  2567     constantPoolHandle cp_h = pv_info->prev_constant_pool_handle();
  2568     if (cp_h.is_null()) {
  2569       delete pv_info;
  2571       // The underlying node's info has been GC'ed so try the next one.
  2572       // We don't have to check the methods. If the constant pool has
  2573       // GC'ed then so have the methods.
  2574       continue;
  2577     // Found a node with non GC'ed info so return it. The caller will
  2578     // need to delete pv_info when they are done with it.
  2579     _current_p = pv_info;
  2580     return pv_info;
  2583   // all of the underlying nodes' info has been GC'ed
  2584   return NULL;
  2585 } // end next_previous_version()

mercurial