src/share/vm/runtime/jniHandles.cpp

Wed, 19 Nov 2014 14:21:09 -0800

author
mchung
date
Wed, 19 Nov 2014 14:21:09 -0800
changeset 7368
fa6adc194d48
parent 6680
78bbf4d43a14
child 6876
710a3c8b516e
child 7627
d68158e12cea
permissions
-rw-r--r--

8064667: Add -XX:+CheckEndorsedAndExtDirs flag to JDK 8
Reviewed-by: coleenp, ccheung

     1 /*
     2  * Copyright (c) 1998, 2014, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/systemDictionary.hpp"
    27 #include "oops/oop.inline.hpp"
    28 #include "prims/jvmtiExport.hpp"
    29 #include "runtime/jniHandles.hpp"
    30 #include "runtime/mutexLocker.hpp"
    31 #include "runtime/thread.inline.hpp"
    33 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
    35 JNIHandleBlock* JNIHandles::_global_handles       = NULL;
    36 JNIHandleBlock* JNIHandles::_weak_global_handles  = NULL;
    37 oop             JNIHandles::_deleted_handle       = NULL;
    40 jobject JNIHandles::make_local(oop obj) {
    41   if (obj == NULL) {
    42     return NULL;                // ignore null handles
    43   } else {
    44     Thread* thread = Thread::current();
    45     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    46     return thread->active_handles()->allocate_handle(obj);
    47   }
    48 }
    51 // optimized versions
    53 jobject JNIHandles::make_local(Thread* thread, oop obj) {
    54   if (obj == NULL) {
    55     return NULL;                // ignore null handles
    56   } else {
    57     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    58     return thread->active_handles()->allocate_handle(obj);
    59   }
    60 }
    63 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
    64   if (obj == NULL) {
    65     return NULL;                // ignore null handles
    66   } else {
    67     JavaThread* thread = JavaThread::thread_from_jni_environment(env);
    68     assert(Universe::heap()->is_in_reserved(obj), "sanity check");
    69     return thread->active_handles()->allocate_handle(obj);
    70   }
    71 }
    74 jobject JNIHandles::make_global(Handle obj) {
    75   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
    76   jobject res = NULL;
    77   if (!obj.is_null()) {
    78     // ignore null handles
    79     MutexLocker ml(JNIGlobalHandle_lock);
    80     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
    81     res = _global_handles->allocate_handle(obj());
    82   } else {
    83     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
    84   }
    86   return res;
    87 }
    90 jobject JNIHandles::make_weak_global(Handle obj) {
    91   assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC");
    92   jobject res = NULL;
    93   if (!obj.is_null()) {
    94     // ignore null handles
    95     MutexLocker ml(JNIGlobalHandle_lock);
    96     assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
    97     res = _weak_global_handles->allocate_handle(obj());
    98   } else {
    99     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
   100   }
   101   return res;
   102 }
   105 void JNIHandles::destroy_global(jobject handle) {
   106   if (handle != NULL) {
   107     assert(is_global_handle(handle), "Invalid delete of global JNI handle");
   108     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
   109   }
   110 }
   113 void JNIHandles::destroy_weak_global(jobject handle) {
   114   if (handle != NULL) {
   115     assert(!CheckJNICalls || is_weak_global_handle(handle), "Invalid delete of weak global JNI handle");
   116     *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
   117   }
   118 }
   121 void JNIHandles::oops_do(OopClosure* f) {
   122   f->do_oop(&_deleted_handle);
   123   _global_handles->oops_do(f);
   124 }
   127 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
   128   _weak_global_handles->weak_oops_do(is_alive, f);
   129 }
   132 void JNIHandles::initialize() {
   133   _global_handles      = JNIHandleBlock::allocate_block();
   134   _weak_global_handles = JNIHandleBlock::allocate_block();
   135   EXCEPTION_MARK;
   136   // We will never reach the CATCH below since Exceptions::_throw will cause
   137   // the VM to exit if an exception is thrown during initialization
   138   Klass* k      = SystemDictionary::Object_klass();
   139   _deleted_handle = InstanceKlass::cast(k)->allocate_instance(CATCH);
   140 }
   143 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
   144   JNIHandleBlock* block = thread->active_handles();
   146   // Look back past possible native calls to jni_PushLocalFrame.
   147   while (block != NULL) {
   148     if (block->chain_contains(handle)) {
   149       return true;
   150     }
   151     block = block->pop_frame_link();
   152   }
   153   return false;
   154 }
   157 // Determine if the handle is somewhere in the current thread's stack.
   158 // We easily can't isolate any particular stack frame the handle might
   159 // come from, so we'll check the whole stack.
   161 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
   162   // If there is no java frame, then this must be top level code, such
   163   // as the java command executable, in which case, this type of handle
   164   // is not permitted.
   165   return (thr->has_last_Java_frame() &&
   166          (void*)obj < (void*)thr->stack_base() &&
   167          (void*)obj >= (void*)thr->last_Java_sp());
   168 }
   171 bool JNIHandles::is_global_handle(jobject handle) {
   172   return _global_handles->chain_contains(handle);
   173 }
   176 bool JNIHandles::is_weak_global_handle(jobject handle) {
   177   return _weak_global_handles->chain_contains(handle);
   178 }
   180 long JNIHandles::global_handle_memory_usage() {
   181   return _global_handles->memory_usage();
   182 }
   184 long JNIHandles::weak_global_handle_memory_usage() {
   185   return _weak_global_handles->memory_usage();
   186 }
   189 class AlwaysAliveClosure: public BoolObjectClosure {
   190 public:
   191   bool do_object_b(oop obj) { return true; }
   192 };
   194 class CountHandleClosure: public OopClosure {
   195 private:
   196   int _count;
   197 public:
   198   CountHandleClosure(): _count(0) {}
   199   virtual void do_oop(oop* unused) {
   200     _count++;
   201   }
   202   virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
   203   int count() { return _count; }
   204 };
   206 // We assume this is called at a safepoint: no lock is needed.
   207 void JNIHandles::print_on(outputStream* st) {
   208   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
   209   assert(_global_handles != NULL && _weak_global_handles != NULL,
   210          "JNIHandles not initialized");
   212   CountHandleClosure global_handle_count;
   213   AlwaysAliveClosure always_alive;
   214   oops_do(&global_handle_count);
   215   weak_oops_do(&always_alive, &global_handle_count);
   217   st->print_cr("JNI global references: %d", global_handle_count.count());
   218   st->cr();
   219   st->flush();
   220 }
   222 class VerifyHandleClosure: public OopClosure {
   223 public:
   224   virtual void do_oop(oop* root) {
   225     (*root)->verify();
   226   }
   227   virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); }
   228 };
   230 void JNIHandles::verify() {
   231   VerifyHandleClosure verify_handle;
   232   AlwaysAliveClosure always_alive;
   234   oops_do(&verify_handle);
   235   weak_oops_do(&always_alive, &verify_handle);
   236 }
   240 void jni_handles_init() {
   241   JNIHandles::initialize();
   242 }
   245 int             JNIHandleBlock::_blocks_allocated     = 0;
   246 JNIHandleBlock* JNIHandleBlock::_block_free_list      = NULL;
   247 #ifndef PRODUCT
   248 JNIHandleBlock* JNIHandleBlock::_block_list           = NULL;
   249 #endif
   252 void JNIHandleBlock::zap() {
   253   // Zap block values
   254   _top  = 0;
   255   for (int index = 0; index < block_size_in_oops; index++) {
   256     _handles[index] = badJNIHandle;
   257   }
   258 }
   260 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread)  {
   261   assert(thread == NULL || thread == Thread::current(), "sanity check");
   262   JNIHandleBlock* block;
   263   // Check the thread-local free list for a block so we don't
   264   // have to acquire a mutex.
   265   if (thread != NULL && thread->free_handle_block() != NULL) {
   266     block = thread->free_handle_block();
   267     thread->set_free_handle_block(block->_next);
   268   }
   269   else {
   270     // locking with safepoint checking introduces a potential deadlock:
   271     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
   272     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
   273     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
   274     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
   275                      Mutex::_no_safepoint_check_flag);
   276     if (_block_free_list == NULL) {
   277       // Allocate new block
   278       block = new JNIHandleBlock();
   279       _blocks_allocated++;
   280       if (TraceJNIHandleAllocation) {
   281         tty->print_cr("JNIHandleBlock " INTPTR_FORMAT " allocated (%d total blocks)",
   282                       block, _blocks_allocated);
   283       }
   284       if (ZapJNIHandleArea) block->zap();
   285       #ifndef PRODUCT
   286       // Link new block to list of all allocated blocks
   287       block->_block_list_link = _block_list;
   288       _block_list = block;
   289       #endif
   290     } else {
   291       // Get block from free list
   292       block = _block_free_list;
   293       _block_free_list = _block_free_list->_next;
   294     }
   295   }
   296   block->_top  = 0;
   297   block->_next = NULL;
   298   block->_pop_frame_link = NULL;
   299   // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
   300   debug_only(block->_last = NULL);
   301   debug_only(block->_free_list = NULL);
   302   debug_only(block->_allocate_before_rebuild = -1);
   303   return block;
   304 }
   307 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
   308   assert(thread == NULL || thread == Thread::current(), "sanity check");
   309   JNIHandleBlock* pop_frame_link = block->pop_frame_link();
   310   // Put returned block at the beginning of the thread-local free list.
   311   // Note that if thread == NULL, we use it as an implicit argument that
   312   // we _don't_ want the block to be kept on the free_handle_block.
   313   // See for instance JavaThread::exit().
   314   if (thread != NULL ) {
   315     if (ZapJNIHandleArea) block->zap();
   316     JNIHandleBlock* freelist = thread->free_handle_block();
   317     block->_pop_frame_link = NULL;
   318     thread->set_free_handle_block(block);
   320     // Add original freelist to end of chain
   321     if ( freelist != NULL ) {
   322       while ( block->_next != NULL ) block = block->_next;
   323       block->_next = freelist;
   324     }
   325     block = NULL;
   326   }
   327   if (block != NULL) {
   328     // Return blocks to free list
   329     // locking with safepoint checking introduces a potential deadlock:
   330     // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
   331     // - another would hold Threads_lock (jni_AttachCurrentThread) and then
   332     //   JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
   333     MutexLockerEx ml(JNIHandleBlockFreeList_lock,
   334                      Mutex::_no_safepoint_check_flag);
   335     while (block != NULL) {
   336       if (ZapJNIHandleArea) block->zap();
   337       JNIHandleBlock* next = block->_next;
   338       block->_next = _block_free_list;
   339       _block_free_list = block;
   340       block = next;
   341     }
   342   }
   343   if (pop_frame_link != NULL) {
   344     // As a sanity check we release blocks pointed to by the pop_frame_link.
   345     // This should never happen (only if PopLocalFrame is not called the
   346     // correct number of times).
   347     release_block(pop_frame_link, thread);
   348   }
   349 }
   352 void JNIHandleBlock::oops_do(OopClosure* f) {
   353   JNIHandleBlock* current_chain = this;
   354   // Iterate over chain of blocks, followed by chains linked through the
   355   // pop frame links.
   356   while (current_chain != NULL) {
   357     for (JNIHandleBlock* current = current_chain; current != NULL;
   358          current = current->_next) {
   359       assert(current == current_chain || current->pop_frame_link() == NULL,
   360         "only blocks first in chain should have pop frame link set");
   361       for (int index = 0; index < current->_top; index++) {
   362         oop* root = &(current->_handles)[index];
   363         oop value = *root;
   364         // traverse heap pointers only, not deleted handles or free list
   365         // pointers
   366         if (value != NULL && Universe::heap()->is_in_reserved(value)) {
   367           f->do_oop(root);
   368         }
   369       }
   370       // the next handle block is valid only if current block is full
   371       if (current->_top < block_size_in_oops) {
   372         break;
   373       }
   374     }
   375     current_chain = current_chain->pop_frame_link();
   376   }
   377 }
   380 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
   381                                   OopClosure* f) {
   382   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
   383     assert(current->pop_frame_link() == NULL,
   384       "blocks holding weak global JNI handles should not have pop frame link set");
   385     for (int index = 0; index < current->_top; index++) {
   386       oop* root = &(current->_handles)[index];
   387       oop value = *root;
   388       // traverse heap pointers only, not deleted handles or free list pointers
   389       if (value != NULL && Universe::heap()->is_in_reserved(value)) {
   390         if (is_alive->do_object_b(value)) {
   391           // The weakly referenced object is alive, update pointer
   392           f->do_oop(root);
   393         } else {
   394           // The weakly referenced object is not alive, clear the reference by storing NULL
   395           if (TraceReferenceGC) {
   396             tty->print_cr("Clearing JNI weak reference (" INTPTR_FORMAT ")", root);
   397           }
   398           *root = NULL;
   399         }
   400       }
   401     }
   402     // the next handle block is valid only if current block is full
   403     if (current->_top < block_size_in_oops) {
   404       break;
   405     }
   406   }
   408   /*
   409    * JVMTI data structures may also contain weak oops.  The iteration of them
   410    * is placed here so that we don't need to add it to each of the collectors.
   411    */
   412   JvmtiExport::weak_oops_do(is_alive, f);
   413 }
   416 jobject JNIHandleBlock::allocate_handle(oop obj) {
   417   assert(Universe::heap()->is_in_reserved(obj), "sanity check");
   418   if (_top == 0) {
   419     // This is the first allocation or the initial block got zapped when
   420     // entering a native function. If we have any following blocks they are
   421     // not valid anymore.
   422     for (JNIHandleBlock* current = _next; current != NULL;
   423          current = current->_next) {
   424       assert(current->_last == NULL, "only first block should have _last set");
   425       assert(current->_free_list == NULL,
   426              "only first block should have _free_list set");
   427       current->_top = 0;
   428       if (ZapJNIHandleArea) current->zap();
   429     }
   430     // Clear initial block
   431     _free_list = NULL;
   432     _allocate_before_rebuild = 0;
   433     _last = this;
   434     if (ZapJNIHandleArea) zap();
   435   }
   437   // Try last block
   438   if (_last->_top < block_size_in_oops) {
   439     oop* handle = &(_last->_handles)[_last->_top++];
   440     *handle = obj;
   441     return (jobject) handle;
   442   }
   444   // Try free list
   445   if (_free_list != NULL) {
   446     oop* handle = _free_list;
   447     _free_list = (oop*) *_free_list;
   448     *handle = obj;
   449     return (jobject) handle;
   450   }
   451   // Check if unused block follow last
   452   if (_last->_next != NULL) {
   453     // update last and retry
   454     _last = _last->_next;
   455     return allocate_handle(obj);
   456   }
   458   // No space available, we have to rebuild free list or expand
   459   if (_allocate_before_rebuild == 0) {
   460       rebuild_free_list();        // updates _allocate_before_rebuild counter
   461   } else {
   462     // Append new block
   463     Thread* thread = Thread::current();
   464     Handle obj_handle(thread, obj);
   465     // This can block, so we need to preserve obj accross call.
   466     _last->_next = JNIHandleBlock::allocate_block(thread);
   467     _last = _last->_next;
   468     _allocate_before_rebuild--;
   469     obj = obj_handle();
   470   }
   471   return allocate_handle(obj);  // retry
   472 }
   475 void JNIHandleBlock::rebuild_free_list() {
   476   assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
   477   int free = 0;
   478   int blocks = 0;
   479   for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
   480     for (int index = 0; index < current->_top; index++) {
   481       oop* handle = &(current->_handles)[index];
   482       if (*handle ==  JNIHandles::deleted_handle()) {
   483         // this handle was cleared out by a delete call, reuse it
   484         *handle = (oop) _free_list;
   485         _free_list = handle;
   486         free++;
   487       }
   488     }
   489     // we should not rebuild free list if there are unused handles at the end
   490     assert(current->_top == block_size_in_oops, "just checking");
   491     blocks++;
   492   }
   493   // Heuristic: if more than half of the handles are free we rebuild next time
   494   // as well, otherwise we append a corresponding number of new blocks before
   495   // attempting a free list rebuild again.
   496   int total = blocks * block_size_in_oops;
   497   int extra = total - 2*free;
   498   if (extra > 0) {
   499     // Not as many free handles as we would like - compute number of new blocks to append
   500     _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
   501   }
   502   if (TraceJNIHandleAllocation) {
   503     tty->print_cr("Rebuild free list JNIHandleBlock " INTPTR_FORMAT " blocks=%d used=%d free=%d add=%d",
   504       this, blocks, total-free, free, _allocate_before_rebuild);
   505   }
   506 }
   509 bool JNIHandleBlock::contains(jobject handle) const {
   510   return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
   511 }
   514 bool JNIHandleBlock::chain_contains(jobject handle) const {
   515   for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
   516     if (current->contains(handle)) {
   517       return true;
   518     }
   519   }
   520   return false;
   521 }
   524 int JNIHandleBlock::length() const {
   525   int result = 1;
   526   for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
   527     result++;
   528   }
   529   return result;
   530 }
   532 // This method is not thread-safe, i.e., must be called whule holding a lock on the
   533 // structure.
   534 long JNIHandleBlock::memory_usage() const {
   535   return length() * sizeof(JNIHandleBlock);
   536 }
   539 #ifndef PRODUCT
   541 bool JNIHandleBlock::any_contains(jobject handle) {
   542   for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
   543     if (current->contains(handle)) {
   544       return true;
   545     }
   546   }
   547   return false;
   548 }
   550 void JNIHandleBlock::print_statistics() {
   551   int used_blocks = 0;
   552   int free_blocks = 0;
   553   int used_handles = 0;
   554   int free_handles = 0;
   555   JNIHandleBlock* block = _block_list;
   556   while (block != NULL) {
   557     if (block->_top > 0) {
   558       used_blocks++;
   559     } else {
   560       free_blocks++;
   561     }
   562     used_handles += block->_top;
   563     free_handles += (block_size_in_oops - block->_top);
   564     block = block->_block_list_link;
   565   }
   566   tty->print_cr("JNIHandleBlocks statistics");
   567   tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
   568   tty->print_cr("- blocks in use:    %d", used_blocks);
   569   tty->print_cr("- blocks free:      %d", free_blocks);
   570   tty->print_cr("- handles in use:   %d", used_handles);
   571   tty->print_cr("- handles free:     %d", free_handles);
   572 }
   574 #endif

mercurial