src/share/vm/runtime/jniHandles.cpp

Tue, 26 Mar 2013 09:06:16 -0400

author
hseigel
date
Tue, 26 Mar 2013 09:06:16 -0400
changeset 4819
36376b540a98
parent 4299
f34d701e952e
child 5159
001ec9515f84
permissions
-rw-r--r--

8009595: The UseSplitVerifier option needs to be deprecated.
Summary: Put UseSplitVerifier option on the deprecated list.
Reviewed-by: dcubed, kmo, acorn

     1 /*
     2  * Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/systemDictionary.hpp"
    27 #include "oops/oop.inline.hpp"
    28 #include "prims/jvmtiExport.hpp"
    29 #include "runtime/jniHandles.hpp"
    30 #include "runtime/mutexLocker.hpp"
    31 #include "runtime/thread.inline.hpp"
    34 JNIHandleBlock* JNIHandles::_global_handles       = NULL;
    35 JNIHandleBlock* JNIHandles::_weak_global_handles  = NULL;
    36 oop             JNIHandles::_deleted_handle       = NULL;
    39 jobject JNIHandles::make_local(oop obj) {
    40   if (obj == NULL) {
    41     return NULL;                // ignore null handles
    42   } else {
    43     Thread* thread = Thread::current();
    44     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    45     return thread->active_handles()->allocate_handle(obj);
    46   }
    47 }
    50 // optimized versions
    52 jobject JNIHandles::make_local(Thread* thread, oop obj) {
    53   if (obj == NULL) {
    54     return NULL;                // ignore null handles
    55   } else {
    56     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    57     return thread->active_handles()->allocate_handle(obj);
    58   }
    59 }
    62 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
    63   if (obj == NULL) {
    64     return NULL;                // ignore null handles
    65   } else {
    66     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
    67     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    68     return thread->active_handles()->allocate_handle(obj);
    69   }
    70 }
    73 jobject JNIHandles::make_global(Handle obj) {
    74   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
    75   jobject res = NULL;
    76   if (!obj.is_null()) {
    77     // ignore null handles
    78     MutexLocker ml(JNIGlobalHandle_lock);
    79     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
    80     res = _global_handles->allocate_handle(obj());
    81   } else {
    82     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
    83   }
    85   return res;
    86 }
    89 jobject JNIHandles::make_weak_global(Handle obj) {
    90   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
    91   jobject res = NULL;
    92   if (!obj.is_null()) {
    93     // ignore null handles
    94     MutexLocker ml(JNIGlobalHandle_lock);
    95     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
    96     res = _weak_global_handles->allocate_handle(obj());
    97   } else {
    98     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
    99   }
   100   return res;
   101 }
   104 void JNIHandles::destroy_global(jobject handle) {
   105   if (handle != NULL) {
   106     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
   107     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
   108   }
   109 }
   112 void JNIHandles::destroy_weak_global(jobject handle) {
   113   if (handle != NULL) {
   114     assert(!CheckJNICalls || is_weak_global_handle(handle), "Invalid delete of weak global JNI handle");
   115     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
   116   }
   117 }
   120 void JNIHandles::oops_do(OopClosure* f) {
   121   f->do_oop(&_deleted_handle);
   122   _global_handles->oops_do(f);
   123 }
   126 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
   127   _weak_global_handles->weak_oops_do(is_alive, f);
   128 }
   131 void JNIHandles::initialize() {
   132   _global_handles      = JNIHandleBlock::allocate_block();
   133   _weak_global_handles = JNIHandleBlock::allocate_block();
   134   EXCEPTION_MARK;
   135   // We will never reach the CATCH below since Exceptions::_throw will cause
   136   // the VM to exit if an exception is thrown during initialization
   137   Klass* k      = SystemDictionary::Object_klass();
   138   _deleted_handle = InstanceKlass::cast(k)->allocate_instance(CATCH);
   139 }
   142 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
   143   JNIHandleBlock* block = thread->active_handles();
   145   // Look back past possible native calls to jni_PushLocalFrame.
   146   while (block != NULL) {
   147     if (block->chain_contains(handle)) {
   148       return true;
   149     }
   150     block = block->pop_frame_link();
   151   }
   152   return false;
   153 }
   156 // Determine if the handle is somewhere in the current thread's stack.
   157 // We easily can't isolate any particular stack frame the handle might
   158 // come from, so we'll check the whole stack.
   160 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
   161   // If there is no java frame, then this must be top level code, such
   162   // as the java command executable, in which case, this type of handle
   163   // is not permitted.
   164   return (thr->has_last_Java_frame() &&
   165          (void*)obj < (void*)thr->stack_base() &&
   166          (void*)obj >= (void*)thr->last_Java_sp());
   167 }
   170 bool JNIHandles::is_global_handle(jobject handle) {
   171   return _global_handles->chain_contains(handle);
   172 }
   175 bool JNIHandles::is_weak_global_handle(jobject handle) {
   176   return _weak_global_handles->chain_contains(handle);
   177 }
   179 long JNIHandles::global_handle_memory_usage() {
   180   return _global_handles->memory_usage();
   181 }
   183 long JNIHandles::weak_global_handle_memory_usage() {
   184   return _weak_global_handles->memory_usage();
   185 }
   188 class AlwaysAliveClosure: public BoolObjectClosure {
   189 public:
   190   bool do_object_b(oop obj) { return true; }
   191   void do_object(oop obj) { assert(false, "Don't call"); }
   192 };
   194 class CountHandleClosure: public OopClosure {
   195 private:
   196   int _count;
   197 public:
   198   CountHandleClosure(): _count(0) {}
   199   virtual void do_oop(oop* unused) {
   200     _count++;
   201   }
   202   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
   203   int count() { return _count; }
   204 };
   206 // We assume this is called at a safepoint: no lock is needed.
   207 void JNIHandles::print_on(outputStream* st) {
   208   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
   209   assert(_global_handles != NULL && _weak_global_handles != NULL,
   210          "JNIHandles not initialized");
   212   CountHandleClosure global_handle_count;
   213   AlwaysAliveClosure always_alive;
   214   oops_do(&global_handle_count);
   215   weak_oops_do(&always_alive, &global_handle_count);
   217   st->print_cr("JNI global references: %d", global_handle_count.count());
   218   st->cr();
   219   st->flush();
   220 }
   222 class VerifyHandleClosure: public OopClosure {
   223 public:
   224   virtual void do_oop(oop* root) {
   225     (*root)->verify();
   226   }
   227   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
   228 };
   230 void JNIHandles::verify() {
   231   VerifyHandleClosure verify_handle;
   232   AlwaysAliveClosure always_alive;
   234   oops_do(&verify_handle);
   235   weak_oops_do(&always_alive, &verify_handle);
   236 }
   240 void jni_handles_init() {
   241   JNIHandles::initialize();
   242 }
   245 int             JNIHandleBlock::_blocks_allocated     = 0;
   246 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
   247 #ifndef PRODUCT
   248 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
   249 #endif
   252 void JNIHandleBlock::zap() {
   253   // Zap block values
   254   _top  = 0;
   255   for (int index = 0; index < block_size_in_oops; index++) {
   256     _handles[index] = badJNIHandle;
   257   }
   258 }
   260 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
   261   assert(thread == NULL || thread == Thread::current(), "sanity check");
   262   JNIHandleBlock* block;
   263   // Check the thread-local free list for a block so we don't
   264   // have to acquire a mutex.
   265   if (thread != NULL && thread->free_handle_block() != NULL) {
   266     block = thread->free_handle_block();
   267     thread->set_free_handle_block(block->_next);
   268   }
   269   else {
   270     // locking with safepoint checking introduces a potential deadlock:
   271     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
   272     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
   273     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
   274     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
   275                      Mutex::_no_safepoint_check_flag);
   276     if (_block_free_list == NULL) {
   277       // Allocate new block
   278       block = new JNIHandleBlock();
   279       _blocks_allocated++;
   280       if (TraceJNIHandleAllocation) {
   281         tty->print_cr("JNIHandleBlock " INTPTR_FORMAT " allocated (%d total blocks)",
   282                       block, _blocks_allocated);
   283       }
   284       if (ZapJNIHandleArea) block->zap();
   285       #ifndef PRODUCT
   286       // Link new block to list of all allocated blocks
   287       block->_block_list_link = _block_list;
   288       _block_list = block;
   289       #endif
   290     } else {
   291       // Get block from free list
   292       block = _block_free_list;
   293       _block_free_list = _block_free_list->_next;
   294     }
   295   }
   296   block->_top  = 0;
   297   block->_next = NULL;
   298   block->_pop_frame_link = NULL;
   299   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
   300   debug_only(block->_last = NULL);
   301   debug_only(block->_free_list = NULL);
   302   debug_only(block->_allocate_before_rebuild = -1);
   303   return block;
   304 }
   307 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
   308   assert(thread == NULL || thread == Thread::current(), "sanity check");
   309   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
   310   // Put returned block at the beginning of the thread-local free list.
   311   // Note that if thread == NULL, we use it as an implicit argument that
   312   // we _don't_ want the block to be kept on the free_handle_block.
   313   // See for instance JavaThread::exit().
   314   if (thread != NULL ) {
   315     if (ZapJNIHandleArea) block->zap();
   316     JNIHandleBlock* freelist = thread->free_handle_block();
   317     block->_pop_frame_link = NULL;
   318     thread->set_free_handle_block(block);
   320     // Add original freelist to end of chain
   321     if ( freelist != NULL ) {
   322       while ( block->_next != NULL ) block = block->_next;
   323       block->_next = freelist;
   324     }
   325     block = NULL;
   326   }
   327   if (block != NULL) {
   328     // Return blocks to free list
   329     // locking with safepoint checking introduces a potential deadlock:
   330     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
   331     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
   332     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
   333     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
   334                      Mutex::_no_safepoint_check_flag);
   335     while (block != NULL) {
   336       if (ZapJNIHandleArea) block->zap();
   337       JNIHandleBlock* next = block->_next;
   338       block->_next = _block_free_list;
   339       _block_free_list = block;
   340       block = next;
   341     }
   342   }
   343   if (pop_frame_link != NULL) {
   344     // As a sanity check we release blocks pointed to by the pop_frame_link.
   345     // This should never happen (only if PopLocalFrame is not called the
   346     // correct number of times).
   347     release_block(pop_frame_link, thread);
   348   }
   349 }
   352 void JNIHandleBlock::oops_do(OopClosure* f) {
   353   JNIHandleBlock* current_chain = this;
   354   // Iterate over chain of blocks, followed by chains linked through the
   355   // pop frame links.
   356   while (current_chain != NULL) {
   357     for (JNIHandleBlock* current = current_chain; current != NULL;
   358          current = current->_next) {
   359       assert(current == current_chain || current->pop_frame_link() == NULL,
   360         "only blocks first in chain should have pop frame link set");
   361       for (int index = 0; index < current->_top; index++) {
   362         oop* root = &(current->_handles)[index];
   363         oop value = *root;
   364         // traverse heap pointers only, not deleted handles or free list
   365         // pointers
   366         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
   367           f->do_oop(root);
   368         }
   369       }
   370       // the next handle block is valid only if current block is full
   371       if (current->_top < block_size_in_oops) {
   372         break;
   373       }
   374     }
   375     current_chain = current_chain->pop_frame_link();
   376   }
   377 }
   380 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
   381                                   OopClosure* f) {
   382   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
   383     assert(current->pop_frame_link() == NULL,
   384       "blocks holding weak global JNI handles should not have pop frame link set");
   385     for (int index = 0; index < current->_top; index++) {
   386       oop* root = &(current->_handles)[index];
   387       oop value = *root;
   388       // traverse heap pointers only, not deleted handles or free list pointers
   389       if (value != NULL && Universe::heap()->is_in_reserved(value)) {
   390         if (is_alive->do_object_b(value)) {
   391           // The weakly referenced object is alive, update pointer
   392           f->do_oop(root);
   393         } else {
   394           // The weakly referenced object is not alive, clear the reference by storing NULL
   395           if (TraceReferenceGC) {
   396             tty->print_cr("Clearing JNI weak reference (" INTPTR_FORMAT ")", root);
   397           }
   398           *root = NULL;
   399         }
   400       }
   401     }
   402     // the next handle block is valid only if current block is full
   403     if (current->_top < block_size_in_oops) {
   404       break;
   405     }
   406   }
   408   /*
   409    * JVMTI data structures may also contain weak oops.  The iteration of them
   410    * is placed here so that we don't need to add it to each of the collectors.
   411    */
   412   JvmtiExport::weak_oops_do(is_alive, f);
   413 }
   416 jobject JNIHandleBlock::allocate_handle(oop obj) {
   417   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
   418   if (_top == 0) {
   419     // This is the first allocation or the initial block got zapped when
   420     // entering a native function. If we have any following blocks they are
   421     // not valid anymore.
   422     for (JNIHandleBlock* current = _next; current != NULL;
   423          current = current->_next) {
   424       assert(current->_last == NULL, "only first block should have _last set");
   425       assert(current->_free_list == NULL,
   426              "only first block should have _free_list set");
   427       current->_top = 0;
   428       if (ZapJNIHandleArea) current->zap();
   429     }
   430     // Clear initial block
   431     _free_list = NULL;
   432     _allocate_before_rebuild = 0;
   433     _last = this;
   434     if (ZapJNIHandleArea) zap();
   435   }
   437   // Try last block
   438   if (_last->_top < block_size_in_oops) {
   439     oop* handle = &(_last->_handles)[_last->_top++];
   440     *handle = obj;
   441     return (jobject) handle;
   442   }
   444   // Try free list
   445   if (_free_list != NULL) {
   446     oop* handle = _free_list;
   447     _free_list = (oop*) *_free_list;
   448     *handle = obj;
   449     return (jobject) handle;
   450   }
   451   // Check if unused block follow last
   452   if (_last->_next != NULL) {
   453     // update last and retry
   454     _last = _last->_next;
   455     return allocate_handle(obj);
   456   }
   458   // No space available, we have to rebuild free list or expand
   459   if (_allocate_before_rebuild == 0) {
   460       rebuild_free_list();        // updates _allocate_before_rebuild counter
   461   } else {
   462     // Append new block
   463     Thread* thread = Thread::current();
   464     Handle obj_handle(thread, obj);
   465     // This can block, so we need to preserve obj accross call.
   466     _last->_next = JNIHandleBlock::allocate_block(thread);
   467     _last = _last->_next;
   468     _allocate_before_rebuild--;
   469     obj = obj_handle();
   470   }
   471   return allocate_handle(obj);  // retry
   472 }
   475 void JNIHandleBlock::rebuild_free_list() {
   476   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
   477   int free = 0;
   478   int blocks = 0;
   479   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
   480     for (int index = 0; index < current->_top; index++) {
   481       oop* handle = &(current->_handles)[index];
   482       if (*handle ==  JNIHandles::deleted_handle()) {
   483         // this handle was cleared out by a delete call, reuse it
   484         *handle = (oop) _free_list;
   485         _free_list = handle;
   486         free++;
   487       }
   488     }
   489     // we should not rebuild free list if there are unused handles at the end
   490     assert(current->_top == block_size_in_oops, "just checking");
   491     blocks++;
   492   }
   493   // Heuristic: if more than half of the handles are free we rebuild next time
   494   // as well, otherwise we append a corresponding number of new blocks before
   495   // attempting a free list rebuild again.
   496   int total = blocks * block_size_in_oops;
   497   int extra = total - 2*free;
   498   if (extra > 0) {
   499     // Not as many free handles as we would like - compute number of new blocks to append
   500     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
   501   }
   502   if (TraceJNIHandleAllocation) {
   503     tty->print_cr("Rebuild free list JNIHandleBlock " INTPTR_FORMAT " blocks=%d used=%d free=%d add=%d",
   504       this, blocks, total-free, free, _allocate_before_rebuild);
   505   }
   506 }
   509 bool JNIHandleBlock::contains(jobject handle) const {
   510   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
   511 }
   514 bool JNIHandleBlock::chain_contains(jobject handle) const {
   515   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
   516     if (current->contains(handle)) {
   517       return true;
   518     }
   519   }
   520   return false;
   521 }
   524 int JNIHandleBlock::length() const {
   525   int result = 1;
   526   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
   527     result++;
   528   }
   529   return result;
   530 }
   532 // This method is not thread-safe, i.e., must be called whule holding a lock on the
   533 // structure.
   534 long JNIHandleBlock::memory_usage() const {
   535   return length() * sizeof(JNIHandleBlock);
   536 }
   539 #ifndef PRODUCT
   541 bool JNIHandleBlock::any_contains(jobject handle) {
   542   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
   543     if (current->contains(handle)) {
   544       return true;
   545     }
   546   }
   547   return false;
   548 }
   550 void JNIHandleBlock::print_statistics() {
   551   int used_blocks = 0;
   552   int free_blocks = 0;
   553   int used_handles = 0;
   554   int free_handles = 0;
   555   JNIHandleBlock* block = _block_list;
   556   while (block != NULL) {
   557     if (block->_top > 0) {
   558       used_blocks++;
   559     } else {
   560       free_blocks++;
   561     }
   562     used_handles += block->_top;
   563     free_handles += (block_size_in_oops - block->_top);
   564     block = block->_block_list_link;
   565   }
   566   tty->print_cr("JNIHandleBlocks statistics");
   567   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
   568   tty->print_cr("- blocks in use:    %d", used_blocks);
   569   tty->print_cr("- blocks free:      %d", free_blocks);
   570   tty->print_cr("- handles in use:   %d", used_handles);
   571   tty->print_cr("- handles free:     %d", free_handles);
   572 }
   574 #endif

mercurial