src/share/vm/runtime/jniHandles.cpp

Fri, 08 Oct 2010 09:29:09 -0700

author
jcoomes
date
Fri, 08 Oct 2010 09:29:09 -0700
changeset 2198
0715f0cf171d
parent 1971
38e8278318ca
child 2314
f95d63e2154a
permissions
-rw-r--r--

Merge

     1 /*
     2  * Copyright (c) 1998, 2008, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 # include "incls/_precompiled.incl"
    26 # include "incls/_jniHandles.cpp.incl"
    29 JNIHandleBlock* JNIHandles::_global_handles       = NULL;
    30 JNIHandleBlock* JNIHandles::_weak_global_handles  = NULL;
    31 oop             JNIHandles::_deleted_handle       = NULL;
    34 jobject JNIHandles::make_local(oop obj) {
    35   if (obj == NULL) {
    36     return NULL;                // ignore null handles
    37   } else {
    38     Thread* thread = Thread::current();
    39     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    40     return thread->active_handles()->allocate_handle(obj);
    41   }
    42 }
    45 // optimized versions
    47 jobject JNIHandles::make_local(Thread* thread, oop obj) {
    48   if (obj == NULL) {
    49     return NULL;                // ignore null handles
    50   } else {
    51     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    52     return thread->active_handles()->allocate_handle(obj);
    53   }
    54 }
    57 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
    58   if (obj == NULL) {
    59     return NULL;                // ignore null handles
    60   } else {
    61     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
    62     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    63     return thread->active_handles()->allocate_handle(obj);
    64   }
    65 }
    68 jobject JNIHandles::make_global(Handle obj) {
    69   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
    70   jobject res = NULL;
    71   if (!obj.is_null()) {
    72     // ignore null handles
    73     MutexLocker ml(JNIGlobalHandle_lock);
    74     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
    75     res = _global_handles->allocate_handle(obj());
    76   } else {
    77     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
    78   }
    80   return res;
    81 }
    84 jobject JNIHandles::make_weak_global(Handle obj) {
    85   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
    86   jobject res = NULL;
    87   if (!obj.is_null()) {
    88     // ignore null handles
    89     MutexLocker ml(JNIGlobalHandle_lock);
    90     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
    91     res = _weak_global_handles->allocate_handle(obj());
    92   } else {
    93     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
    94   }
    95   return res;
    96 }
    98 jmethodID JNIHandles::make_jmethod_id(methodHandle mh) {
    99   return (jmethodID) make_weak_global(mh);
   100 }
   104 void JNIHandles::change_method_associated_with_jmethod_id(jmethodID jmid, methodHandle mh) {
   105   MutexLocker ml(JNIGlobalHandle_lock); // Is this necessary?
   106   Handle obj = (Handle)mh;
   107   oop* jobj = (oop*)jmid;
   108   *jobj = obj();
   109 }
   112 void JNIHandles::destroy_global(jobject handle) {
   113   if (handle != NULL) {
   114     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
   115     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
   116   }
   117 }
   120 void JNIHandles::destroy_weak_global(jobject handle) {
   121   if (handle != NULL) {
   122     assert(!CheckJNICalls || is_weak_global_handle(handle), "Invalid delete of weak global JNI handle");
   123     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
   124   }
   125 }
   127 void JNIHandles::destroy_jmethod_id(jmethodID mid) {
   128   destroy_weak_global((jobject)mid);
   129 }
   132 void JNIHandles::oops_do(OopClosure* f) {
   133   f->do_oop(&_deleted_handle);
   134   _global_handles->oops_do(f);
   135 }
   138 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
   139   _weak_global_handles->weak_oops_do(is_alive, f);
   140 }
   143 void JNIHandles::initialize() {
   144   _global_handles      = JNIHandleBlock::allocate_block();
   145   _weak_global_handles = JNIHandleBlock::allocate_block();
   146   EXCEPTION_MARK;
   147   // We will never reach the CATCH below since Exceptions::_throw will cause
   148   // the VM to exit if an exception is thrown during initialization
   149   klassOop k      = SystemDictionary::Object_klass();
   150   _deleted_handle = instanceKlass::cast(k)->allocate_permanent_instance(CATCH);
   151 }
   154 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
   155   JNIHandleBlock* block = thread->active_handles();
   157   // Look back past possible native calls to jni_PushLocalFrame.
   158   while (block != NULL) {
   159     if (block->chain_contains(handle)) {
   160       return true;
   161     }
   162     block = block->pop_frame_link();
   163   }
   164   return false;
   165 }
   168 // Determine if the handle is somewhere in the current thread's stack.
   169 // We easily can't isolate any particular stack frame the handle might
   170 // come from, so we'll check the whole stack.
   172 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
   173   // If there is no java frame, then this must be top level code, such
   174   // as the java command executable, in which case, this type of handle
   175   // is not permitted.
   176   return (thr->has_last_Java_frame() &&
   177          (void*)obj < (void*)thr->stack_base() &&
   178          (void*)obj >= (void*)thr->last_Java_sp());
   179 }
   182 bool JNIHandles::is_global_handle(jobject handle) {
   183   return _global_handles->chain_contains(handle);
   184 }
   187 bool JNIHandles::is_weak_global_handle(jobject handle) {
   188   return _weak_global_handles->chain_contains(handle);
   189 }
   191 long JNIHandles::global_handle_memory_usage() {
   192   return _global_handles->memory_usage();
   193 }
   195 long JNIHandles::weak_global_handle_memory_usage() {
   196   return _weak_global_handles->memory_usage();
   197 }
   200 class AlwaysAliveClosure: public BoolObjectClosure {
   201 public:
   202   bool do_object_b(oop obj) { return true; }
   203   void do_object(oop obj) { assert(false, "Don't call"); }
   204 };
   206 class CountHandleClosure: public OopClosure {
   207 private:
   208   int _count;
   209 public:
   210   CountHandleClosure(): _count(0) {}
   211   virtual void do_oop(oop* unused) {
   212     _count++;
   213   }
   214   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
   215   int count() { return _count; }
   216 };
   218 // We assume this is called at a safepoint: no lock is needed.
   219 void JNIHandles::print_on(outputStream* st) {
   220   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
   221   assert(_global_handles != NULL && _weak_global_handles != NULL,
   222          "JNIHandles not initialized");
   224   CountHandleClosure global_handle_count;
   225   AlwaysAliveClosure always_alive;
   226   oops_do(&global_handle_count);
   227   weak_oops_do(&always_alive, &global_handle_count);
   229   st->print_cr("JNI global references: %d", global_handle_count.count());
   230   st->cr();
   231   st->flush();
   232 }
   234 class VerifyHandleClosure: public OopClosure {
   235 public:
   236   virtual void do_oop(oop* root) {
   237     (*root)->verify();
   238   }
   239   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
   240 };
   242 void JNIHandles::verify() {
   243   VerifyHandleClosure verify_handle;
   244   AlwaysAliveClosure always_alive;
   246   oops_do(&verify_handle);
   247   weak_oops_do(&always_alive, &verify_handle);
   248 }
   252 void jni_handles_init() {
   253   JNIHandles::initialize();
   254 }
   257 int             JNIHandleBlock::_blocks_allocated     = 0;
   258 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
   259 #ifndef PRODUCT
   260 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
   261 #endif
   264 void JNIHandleBlock::zap() {
   265   // Zap block values
   266   _top  = 0;
   267   for (int index = 0; index < block_size_in_oops; index++) {
   268     _handles[index] = badJNIHandle;
   269   }
   270 }
   272 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
   273   assert(thread == NULL || thread == Thread::current(), "sanity check");
   274   JNIHandleBlock* block;
   275   // Check the thread-local free list for a block so we don't
   276   // have to acquire a mutex.
   277   if (thread != NULL && thread->free_handle_block() != NULL) {
   278     block = thread->free_handle_block();
   279     thread->set_free_handle_block(block->_next);
   280   }
   281   else {
   282     // locking with safepoint checking introduces a potential deadlock:
   283     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
   284     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
   285     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
   286     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
   287                      Mutex::_no_safepoint_check_flag);
   288     if (_block_free_list == NULL) {
   289       // Allocate new block
   290       block = new JNIHandleBlock();
   291       _blocks_allocated++;
   292       if (TraceJNIHandleAllocation) {
   293         tty->print_cr("JNIHandleBlock " INTPTR_FORMAT " allocated (%d total blocks)",
   294                       block, _blocks_allocated);
   295       }
   296       if (ZapJNIHandleArea) block->zap();
   297       #ifndef PRODUCT
   298       // Link new block to list of all allocated blocks
   299       block->_block_list_link = _block_list;
   300       _block_list = block;
   301       #endif
   302     } else {
   303       // Get block from free list
   304       block = _block_free_list;
   305       _block_free_list = _block_free_list->_next;
   306     }
   307   }
   308   block->_top  = 0;
   309   block->_next = NULL;
   310   block->_pop_frame_link = NULL;
   311   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
   312   debug_only(block->_last = NULL);
   313   debug_only(block->_free_list = NULL);
   314   debug_only(block->_allocate_before_rebuild = -1);
   315   return block;
   316 }
   319 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
   320   assert(thread == NULL || thread == Thread::current(), "sanity check");
   321   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
   322   // Put returned block at the beginning of the thread-local free list.
   323   // Note that if thread == NULL, we use it as an implicit argument that
   324   // we _don't_ want the block to be kept on the free_handle_block.
   325   // See for instance JavaThread::exit().
   326   if (thread != NULL ) {
   327     if (ZapJNIHandleArea) block->zap();
   328     JNIHandleBlock* freelist = thread->free_handle_block();
   329     block->_pop_frame_link = NULL;
   330     thread->set_free_handle_block(block);
   332     // Add original freelist to end of chain
   333     if ( freelist != NULL ) {
   334       while ( block->_next != NULL ) block = block->_next;
   335       block->_next = freelist;
   336     }
   337     block = NULL;
   338   }
   339   if (block != NULL) {
   340     // Return blocks to free list
   341     // locking with safepoint checking introduces a potential deadlock:
   342     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
   343     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
   344     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
   345     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
   346                      Mutex::_no_safepoint_check_flag);
   347     while (block != NULL) {
   348       if (ZapJNIHandleArea) block->zap();
   349       JNIHandleBlock* next = block->_next;
   350       block->_next = _block_free_list;
   351       _block_free_list = block;
   352       block = next;
   353     }
   354   }
   355   if (pop_frame_link != NULL) {
   356     // As a sanity check we release blocks pointed to by the pop_frame_link.
   357     // This should never happen (only if PopLocalFrame is not called the
   358     // correct number of times).
   359     release_block(pop_frame_link, thread);
   360   }
   361 }
   364 void JNIHandleBlock::oops_do(OopClosure* f) {
   365   JNIHandleBlock* current_chain = this;
   366   // Iterate over chain of blocks, followed by chains linked through the
   367   // pop frame links.
   368   while (current_chain != NULL) {
   369     for (JNIHandleBlock* current = current_chain; current != NULL;
   370          current = current->_next) {
   371       assert(current == current_chain || current->pop_frame_link() == NULL,
   372         "only blocks first in chain should have pop frame link set");
   373       for (int index = 0; index < current->_top; index++) {
   374         oop* root = &(current->_handles)[index];
   375         oop value = *root;
   376         // traverse heap pointers only, not deleted handles or free list
   377         // pointers
   378         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
   379           f->do_oop(root);
   380         }
   381       }
   382       // the next handle block is valid only if current block is full
   383       if (current->_top < block_size_in_oops) {
   384         break;
   385       }
   386     }
   387     current_chain = current_chain->pop_frame_link();
   388   }
   389 }
   392 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
   393                                   OopClosure* f) {
   394   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
   395     assert(current->pop_frame_link() == NULL,
   396       "blocks holding weak global JNI handles should not have pop frame link set");
   397     for (int index = 0; index < current->_top; index++) {
   398       oop* root = &(current->_handles)[index];
   399       oop value = *root;
   400       // traverse heap pointers only, not deleted handles or free list pointers
   401       if (value != NULL && Universe::heap()->is_in_reserved(value)) {
   402         if (is_alive->do_object_b(value)) {
   403           // The weakly referenced object is alive, update pointer
   404           f->do_oop(root);
   405         } else {
   406           // The weakly referenced object is not alive, clear the reference by storing NULL
   407           if (TraceReferenceGC) {
   408             tty->print_cr("Clearing JNI weak reference (" INTPTR_FORMAT ")", root);
   409           }
   410           *root = NULL;
   411         }
   412       }
   413     }
   414     // the next handle block is valid only if current block is full
   415     if (current->_top < block_size_in_oops) {
   416       break;
   417     }
   418   }
   419 }
   422 jobject JNIHandleBlock::allocate_handle(oop obj) {
   423   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
   424   if (_top == 0) {
   425     // This is the first allocation or the initial block got zapped when
   426     // entering a native function. If we have any following blocks they are
   427     // not valid anymore.
   428     for (JNIHandleBlock* current = _next; current != NULL;
   429          current = current->_next) {
   430       assert(current->_last == NULL, "only first block should have _last set");
   431       assert(current->_free_list == NULL,
   432              "only first block should have _free_list set");
   433       current->_top = 0;
   434       if (ZapJNIHandleArea) current->zap();
   435     }
   436     // Clear initial block
   437     _free_list = NULL;
   438     _allocate_before_rebuild = 0;
   439     _last = this;
   440     if (ZapJNIHandleArea) zap();
   441   }
   443   // Try last block
   444   if (_last->_top < block_size_in_oops) {
   445     oop* handle = &(_last->_handles)[_last->_top++];
   446     *handle = obj;
   447     return (jobject) handle;
   448   }
   450   // Try free list
   451   if (_free_list != NULL) {
   452     oop* handle = _free_list;
   453     _free_list = (oop*) *_free_list;
   454     *handle = obj;
   455     return (jobject) handle;
   456   }
   457   // Check if unused block follow last
   458   if (_last->_next != NULL) {
   459     // update last and retry
   460     _last = _last->_next;
   461     return allocate_handle(obj);
   462   }
   464   // No space available, we have to rebuild free list or expand
   465   if (_allocate_before_rebuild == 0) {
   466       rebuild_free_list();        // updates _allocate_before_rebuild counter
   467   } else {
   468     // Append new block
   469     Thread* thread = Thread::current();
   470     Handle obj_handle(thread, obj);
   471     // This can block, so we need to preserve obj accross call.
   472     _last->_next = JNIHandleBlock::allocate_block(thread);
   473     _last = _last->_next;
   474     _allocate_before_rebuild--;
   475     obj = obj_handle();
   476   }
   477   return allocate_handle(obj);  // retry
   478 }
   481 void JNIHandleBlock::rebuild_free_list() {
   482   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
   483   int free = 0;
   484   int blocks = 0;
   485   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
   486     for (int index = 0; index < current->_top; index++) {
   487       oop* handle = &(current->_handles)[index];
   488       if (*handle ==  JNIHandles::deleted_handle()) {
   489         // this handle was cleared out by a delete call, reuse it
   490         *handle = (oop) _free_list;
   491         _free_list = handle;
   492         free++;
   493       }
   494     }
   495     // we should not rebuild free list if there are unused handles at the end
   496     assert(current->_top == block_size_in_oops, "just checking");
   497     blocks++;
   498   }
   499   // Heuristic: if more than half of the handles are free we rebuild next time
   500   // as well, otherwise we append a corresponding number of new blocks before
   501   // attempting a free list rebuild again.
   502   int total = blocks * block_size_in_oops;
   503   int extra = total - 2*free;
   504   if (extra > 0) {
   505     // Not as many free handles as we would like - compute number of new blocks to append
   506     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
   507   }
   508   if (TraceJNIHandleAllocation) {
   509     tty->print_cr("Rebuild free list JNIHandleBlock " INTPTR_FORMAT " blocks=%d used=%d free=%d add=%d",
   510       this, blocks, total-free, free, _allocate_before_rebuild);
   511   }
   512 }
   515 bool JNIHandleBlock::contains(jobject handle) const {
   516   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
   517 }
   520 bool JNIHandleBlock::chain_contains(jobject handle) const {
   521   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
   522     if (current->contains(handle)) {
   523       return true;
   524     }
   525   }
   526   return false;
   527 }
   530 int JNIHandleBlock::length() const {
   531   int result = 1;
   532   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
   533     result++;
   534   }
   535   return result;
   536 }
   538 // This method is not thread-safe, i.e., must be called whule holding a lock on the
   539 // structure.
   540 long JNIHandleBlock::memory_usage() const {
   541   return length() * sizeof(JNIHandleBlock);
   542 }
   545 #ifndef PRODUCT
   547 bool JNIHandleBlock::any_contains(jobject handle) {
   548   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
   549     if (current->contains(handle)) {
   550       return true;
   551     }
   552   }
   553   return false;
   554 }
   556 void JNIHandleBlock::print_statistics() {
   557   int used_blocks = 0;
   558   int free_blocks = 0;
   559   int used_handles = 0;
   560   int free_handles = 0;
   561   JNIHandleBlock* block = _block_list;
   562   while (block != NULL) {
   563     if (block->_top > 0) {
   564       used_blocks++;
   565     } else {
   566       free_blocks++;
   567     }
   568     used_handles += block->_top;
   569     free_handles += (block_size_in_oops - block->_top);
   570     block = block->_block_list_link;
   571   }
   572   tty->print_cr("JNIHandleBlocks statistics");
   573   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
   574   tty->print_cr("- blocks in use:    %d", used_blocks);
   575   tty->print_cr("- blocks free:      %d", free_blocks);
   576   tty->print_cr("- handles in use:   %d", used_handles);
   577   tty->print_cr("- handles free:     %d", free_handles);
   578 }
   580 #endif

mercurial