src/share/vm/runtime/jniHandles.cpp

Tue, 04 Mar 2008 09:44:24 -0500

author
sbohne
date
Tue, 04 Mar 2008 09:44:24 -0500
changeset 493
7ee622712fcf
parent 435
a61af66fc99e
child 548
ba764ed4b6f2
permissions
-rw-r--r--

6666698: EnableBiasedLocking with BiasedLockingStartupDelay can block Watcher thread
Summary: Enqueue VM_EnableBiasedLocking operation asynchronously
Reviewed-by: never, xlu, kbr, acorn

     1 /*
     2  * Copyright 1998-2007 Sun Microsystems, Inc.  All Rights Reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
    20  * CA 95054 USA or visit www.sun.com if you need additional information or
    21  * have any questions.
    22  *
    23  */
    25 # include "incls/_precompiled.incl"
    26 # include "incls/_jniHandles.cpp.incl"
    29 JNIHandleBlock* JNIHandles::_global_handles       = NULL;
    30 JNIHandleBlock* JNIHandles::_weak_global_handles  = NULL;
    31 oop             JNIHandles::_deleted_handle       = NULL;
    34 jobject JNIHandles::make_local(oop obj) {
    35   if (obj == NULL) {
    36     return NULL;                // ignore null handles
    37   } else {
    38     Thread* thread = Thread::current();
    39     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    40     return thread->active_handles()->allocate_handle(obj);
    41   }
    42 }
    45 // optimized versions
    47 jobject JNIHandles::make_local(Thread* thread, oop obj) {
    48   if (obj == NULL) {
    49     return NULL;                // ignore null handles
    50   } else {
    51     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    52     return thread->active_handles()->allocate_handle(obj);
    53   }
    54 }
    57 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
    58   if (obj == NULL) {
    59     return NULL;                // ignore null handles
    60   } else {
    61     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
    62     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    63     return thread->active_handles()->allocate_handle(obj);
    64   }
    65 }
    68 jobject JNIHandles::make_global(Handle obj) {
    69   jobject res = NULL;
    70   if (!obj.is_null()) {
    71     // ignore null handles
    72     MutexLocker ml(JNIGlobalHandle_lock);
    73     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
    74     res = _global_handles->allocate_handle(obj());
    75   } else {
    76     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
    77   }
    79   return res;
    80 }
    83 jobject JNIHandles::make_weak_global(Handle obj) {
    84   jobject res = NULL;
    85   if (!obj.is_null()) {
    86     // ignore null handles
    87     MutexLocker ml(JNIGlobalHandle_lock);
    88     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
    89     res = _weak_global_handles->allocate_handle(obj());
    90   } else {
    91     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
    92   }
    93   return res;
    94 }
    96 jmethodID JNIHandles::make_jmethod_id(methodHandle mh) {
    97   return (jmethodID) make_weak_global(mh);
    98 }
   102 void JNIHandles::change_method_associated_with_jmethod_id(jmethodID jmid, methodHandle mh) {
   103   MutexLocker ml(JNIGlobalHandle_lock); // Is this necessary?
   104   Handle obj = (Handle)mh;
   105   oop* jobj = (oop*)jmid;
   106   *jobj = obj();
   107 }
   110 void JNIHandles::destroy_global(jobject handle) {
   111   if (handle != NULL) {
   112     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
   113     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
   114   }
   115 }
   118 void JNIHandles::destroy_weak_global(jobject handle) {
   119   if (handle != NULL) {
   120     assert(!CheckJNICalls || is_weak_global_handle(handle), "Invalid delete of weak global JNI handle");
   121     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
   122   }
   123 }
   125 void JNIHandles::destroy_jmethod_id(jmethodID mid) {
   126   destroy_weak_global((jobject)mid);
   127 }
   130 void JNIHandles::oops_do(OopClosure* f) {
   131   f->do_oop(&_deleted_handle);
   132   _global_handles->oops_do(f);
   133 }
   136 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
   137   _weak_global_handles->weak_oops_do(is_alive, f);
   138 }
   141 void JNIHandles::initialize() {
   142   _global_handles      = JNIHandleBlock::allocate_block();
   143   _weak_global_handles = JNIHandleBlock::allocate_block();
   144   EXCEPTION_MARK;
   145   // We will never reach the CATCH below since Exceptions::_throw will cause
   146   // the VM to exit if an exception is thrown during initialization
   147   klassOop k      = SystemDictionary::object_klass();
   148   _deleted_handle = instanceKlass::cast(k)->allocate_permanent_instance(CATCH);
   149 }
   152 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
   153   JNIHandleBlock* block = thread->active_handles();
   155   // Look back past possible native calls to jni_PushLocalFrame.
   156   while (block != NULL) {
   157     if (block->chain_contains(handle)) {
   158       return true;
   159     }
   160     block = block->pop_frame_link();
   161   }
   162   return false;
   163 }
   166 // Determine if the handle is somewhere in the current thread's stack.
   167 // We easily can't isolate any particular stack frame the handle might
   168 // come from, so we'll check the whole stack.
   170 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
   171   // If there is no java frame, then this must be top level code, such
   172   // as the java command executable, in which case, this type of handle
   173   // is not permitted.
   174   return (thr->has_last_Java_frame() &&
   175          (void*)obj < (void*)thr->stack_base() &&
   176          (void*)obj >= (void*)thr->last_Java_sp());
   177 }
   180 bool JNIHandles::is_global_handle(jobject handle) {
   181   return _global_handles->chain_contains(handle);
   182 }
   185 bool JNIHandles::is_weak_global_handle(jobject handle) {
   186   return _weak_global_handles->chain_contains(handle);
   187 }
   189 long JNIHandles::global_handle_memory_usage() {
   190   return _global_handles->memory_usage();
   191 }
   193 long JNIHandles::weak_global_handle_memory_usage() {
   194   return _weak_global_handles->memory_usage();
   195 }
   198 class AlwaysAliveClosure: public BoolObjectClosure {
   199 public:
   200   bool do_object_b(oop obj) { return true; }
   201   void do_object(oop obj) { assert(false, "Don't call"); }
   202 };
   204 class CountHandleClosure: public OopClosure {
   205 private:
   206   int _count;
   207 public:
   208   CountHandleClosure(): _count(0) {}
   209   void do_oop(oop* unused) {
   210     _count++;
   211   }
   212   int count() { return _count; }
   213 };
   215 // We assume this is called at a safepoint: no lock is needed.
   216 void JNIHandles::print_on(outputStream* st) {
   217   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
   218   assert(_global_handles != NULL && _weak_global_handles != NULL,
   219          "JNIHandles not initialized");
   221   CountHandleClosure global_handle_count;
   222   AlwaysAliveClosure always_alive;
   223   oops_do(&global_handle_count);
   224   weak_oops_do(&always_alive, &global_handle_count);
   226   st->print_cr("JNI global references: %d", global_handle_count.count());
   227   st->cr();
   228   st->flush();
   229 }
   231 class VerifyHandleClosure: public OopClosure {
   232 public:
   233   void do_oop(oop* root) {
   234     (*root)->verify();
   235   }
   236 };
   238 void JNIHandles::verify() {
   239   VerifyHandleClosure verify_handle;
   240   AlwaysAliveClosure always_alive;
   242   oops_do(&verify_handle);
   243   weak_oops_do(&always_alive, &verify_handle);
   244 }
   248 void jni_handles_init() {
   249   JNIHandles::initialize();
   250 }
   253 int             JNIHandleBlock::_blocks_allocated     = 0;
   254 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
   255 #ifndef PRODUCT
   256 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
   257 #endif
   260 void JNIHandleBlock::zap() {
   261   // Zap block values
   262   _top  = 0;
   263   for (int index = 0; index < block_size_in_oops; index++) {
   264     _handles[index] = badJNIHandle;
   265   }
   266 }
   268 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
   269   assert(thread == NULL || thread == Thread::current(), "sanity check");
   270   JNIHandleBlock* block;
   271   // Check the thread-local free list for a block so we don't
   272   // have to acquire a mutex.
   273   if (thread != NULL && thread->free_handle_block() != NULL) {
   274     block = thread->free_handle_block();
   275     thread->set_free_handle_block(block->_next);
   276   }
   277   else {
   278     // locking with safepoint checking introduces a potential deadlock:
   279     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
   280     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
   281     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
   282     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
   283                      Mutex::_no_safepoint_check_flag);
   284     if (_block_free_list == NULL) {
   285       // Allocate new block
   286       block = new JNIHandleBlock();
   287       _blocks_allocated++;
   288       if (TraceJNIHandleAllocation) {
   289         tty->print_cr("JNIHandleBlock " INTPTR_FORMAT " allocated (%d total blocks)",
   290                       block, _blocks_allocated);
   291       }
   292       if (ZapJNIHandleArea) block->zap();
   293       #ifndef PRODUCT
   294       // Link new block to list of all allocated blocks
   295       block->_block_list_link = _block_list;
   296       _block_list = block;
   297       #endif
   298     } else {
   299       // Get block from free list
   300       block = _block_free_list;
   301       _block_free_list = _block_free_list->_next;
   302     }
   303   }
   304   block->_top  = 0;
   305   block->_next = NULL;
   306   block->_pop_frame_link = NULL;
   307   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
   308   debug_only(block->_last = NULL);
   309   debug_only(block->_free_list = NULL);
   310   debug_only(block->_allocate_before_rebuild = -1);
   311   return block;
   312 }
   315 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
   316   assert(thread == NULL || thread == Thread::current(), "sanity check");
   317   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
   318   // Put returned block at the beginning of the thread-local free list.
   319   // Note that if thread == NULL, we use it as an implicit argument that
   320   // we _don't_ want the block to be kept on the free_handle_block.
   321   // See for instance JavaThread::exit().
   322   if (thread != NULL ) {
   323     if (ZapJNIHandleArea) block->zap();
   324     JNIHandleBlock* freelist = thread->free_handle_block();
   325     block->_pop_frame_link = NULL;
   326     thread->set_free_handle_block(block);
   328     // Add original freelist to end of chain
   329     if ( freelist != NULL ) {
   330       while ( block->_next != NULL ) block = block->_next;
   331       block->_next = freelist;
   332     }
   333     block = NULL;
   334   }
   335   if (block != NULL) {
   336     // Return blocks to free list
   337     // locking with safepoint checking introduces a potential deadlock:
   338     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
   339     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
   340     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
   341     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
   342                      Mutex::_no_safepoint_check_flag);
   343     while (block != NULL) {
   344       if (ZapJNIHandleArea) block->zap();
   345       JNIHandleBlock* next = block->_next;
   346       block->_next = _block_free_list;
   347       _block_free_list = block;
   348       block = next;
   349     }
   350   }
   351   if (pop_frame_link != NULL) {
   352     // As a sanity check we release blocks pointed to by the pop_frame_link.
   353     // This should never happen (only if PopLocalFrame is not called the
   354     // correct number of times).
   355     release_block(pop_frame_link, thread);
   356   }
   357 }
   360 void JNIHandleBlock::oops_do(OopClosure* f) {
   361   JNIHandleBlock* current_chain = this;
   362   // Iterate over chain of blocks, followed by chains linked through the
   363   // pop frame links.
   364   while (current_chain != NULL) {
   365     for (JNIHandleBlock* current = current_chain; current != NULL;
   366          current = current->_next) {
   367       assert(current == current_chain || current->pop_frame_link() == NULL,
   368         "only blocks first in chain should have pop frame link set");
   369       for (int index = 0; index < current->_top; index++) {
   370         oop* root = &(current->_handles)[index];
   371         oop value = *root;
   372         // traverse heap pointers only, not deleted handles or free list
   373         // pointers
   374         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
   375           f->do_oop(root);
   376         }
   377       }
   378       // the next handle block is valid only if current block is full
   379       if (current->_top < block_size_in_oops) {
   380         break;
   381       }
   382     }
   383     current_chain = current_chain->pop_frame_link();
   384   }
   385 }
   388 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
   389                                   OopClosure* f) {
   390   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
   391     assert(current->pop_frame_link() == NULL,
   392       "blocks holding weak global JNI handles should not have pop frame link set");
   393     for (int index = 0; index < current->_top; index++) {
   394       oop* root = &(current->_handles)[index];
   395       oop value = *root;
   396       // traverse heap pointers only, not deleted handles or free list pointers
   397       if (value != NULL && Universe::heap()->is_in_reserved(value)) {
   398         if (is_alive->do_object_b(value)) {
   399           // The weakly referenced object is alive, update pointer
   400           f->do_oop(root);
   401         } else {
   402           // The weakly referenced object is not alive, clear the reference by storing NULL
   403           if (TraceReferenceGC) {
   404             tty->print_cr("Clearing JNI weak reference (" INTPTR_FORMAT ")", root);
   405           }
   406           *root = NULL;
   407         }
   408       }
   409     }
   410     // the next handle block is valid only if current block is full
   411     if (current->_top < block_size_in_oops) {
   412       break;
   413     }
   414   }
   415 }
   418 jobject JNIHandleBlock::allocate_handle(oop obj) {
   419   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
   420   if (_top == 0) {
   421     // This is the first allocation or the initial block got zapped when
   422     // entering a native function. If we have any following blocks they are
   423     // not valid anymore.
   424     for (JNIHandleBlock* current = _next; current != NULL;
   425          current = current->_next) {
   426       assert(current->_last == NULL, "only first block should have _last set");
   427       assert(current->_free_list == NULL,
   428              "only first block should have _free_list set");
   429       current->_top = 0;
   430       if (ZapJNIHandleArea) current->zap();
   431     }
   432     // Clear initial block
   433     _free_list = NULL;
   434     _allocate_before_rebuild = 0;
   435     _last = this;
   436     if (ZapJNIHandleArea) zap();
   437   }
   439   // Try last block
   440   if (_last->_top < block_size_in_oops) {
   441     oop* handle = &(_last->_handles)[_last->_top++];
   442     *handle = obj;
   443     return (jobject) handle;
   444   }
   446   // Try free list
   447   if (_free_list != NULL) {
   448     oop* handle = _free_list;
   449     _free_list = (oop*) *_free_list;
   450     *handle = obj;
   451     return (jobject) handle;
   452   }
   453   // Check if unused block follow last
   454   if (_last->_next != NULL) {
   455     // update last and retry
   456     _last = _last->_next;
   457     return allocate_handle(obj);
   458   }
   460   // No space available, we have to rebuild free list or expand
   461   if (_allocate_before_rebuild == 0) {
   462       rebuild_free_list();        // updates _allocate_before_rebuild counter
   463   } else {
   464     // Append new block
   465     Thread* thread = Thread::current();
   466     Handle obj_handle(thread, obj);
   467     // This can block, so we need to preserve obj accross call.
   468     _last->_next = JNIHandleBlock::allocate_block(thread);
   469     _last = _last->_next;
   470     _allocate_before_rebuild--;
   471     obj = obj_handle();
   472   }
   473   return allocate_handle(obj);  // retry
   474 }
   477 void JNIHandleBlock::rebuild_free_list() {
   478   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
   479   int free = 0;
   480   int blocks = 0;
   481   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
   482     for (int index = 0; index < current->_top; index++) {
   483       oop* handle = &(current->_handles)[index];
   484       if (*handle ==  JNIHandles::deleted_handle()) {
   485         // this handle was cleared out by a delete call, reuse it
   486         *handle = (oop) _free_list;
   487         _free_list = handle;
   488         free++;
   489       }
   490     }
   491     // we should not rebuild free list if there are unused handles at the end
   492     assert(current->_top == block_size_in_oops, "just checking");
   493     blocks++;
   494   }
   495   // Heuristic: if more than half of the handles are free we rebuild next time
   496   // as well, otherwise we append a corresponding number of new blocks before
   497   // attempting a free list rebuild again.
   498   int total = blocks * block_size_in_oops;
   499   int extra = total - 2*free;
   500   if (extra > 0) {
   501     // Not as many free handles as we would like - compute number of new blocks to append
   502     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
   503   }
   504   if (TraceJNIHandleAllocation) {
   505     tty->print_cr("Rebuild free list JNIHandleBlock " INTPTR_FORMAT " blocks=%d used=%d free=%d add=%d",
   506       this, blocks, total-free, free, _allocate_before_rebuild);
   507   }
   508 }
   511 bool JNIHandleBlock::contains(jobject handle) const {
   512   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
   513 }
   516 bool JNIHandleBlock::chain_contains(jobject handle) const {
   517   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
   518     if (current->contains(handle)) {
   519       return true;
   520     }
   521   }
   522   return false;
   523 }
   526 int JNIHandleBlock::length() const {
   527   int result = 1;
   528   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
   529     result++;
   530   }
   531   return result;
   532 }
   534 // This method is not thread-safe, i.e., must be called whule holding a lock on the
   535 // structure.
   536 long JNIHandleBlock::memory_usage() const {
   537   return length() * sizeof(JNIHandleBlock);
   538 }
   541 #ifndef PRODUCT
   543 bool JNIHandleBlock::any_contains(jobject handle) {
   544   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
   545     if (current->contains(handle)) {
   546       return true;
   547     }
   548   }
   549   return false;
   550 }
   552 void JNIHandleBlock::print_statistics() {
   553   int used_blocks = 0;
   554   int free_blocks = 0;
   555   int used_handles = 0;
   556   int free_handles = 0;
   557   JNIHandleBlock* block = _block_list;
   558   while (block != NULL) {
   559     if (block->_top > 0) {
   560       used_blocks++;
   561     } else {
   562       free_blocks++;
   563     }
   564     used_handles += block->_top;
   565     free_handles += (block_size_in_oops - block->_top);
   566     block = block->_block_list_link;
   567   }
   568   tty->print_cr("JNIHandleBlocks statistics");
   569   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
   570   tty->print_cr("- blocks in use:    %d", used_blocks);
   571   tty->print_cr("- blocks free:      %d", free_blocks);
   572   tty->print_cr("- handles in use:   %d", used_handles);
   573   tty->print_cr("- handles free:     %d", free_handles);
   574 }
   576 #endif

mercurial