src/share/vm/runtime/jniHandles.cpp

Thu, 10 Apr 2008 15:49:16 -0400

author
sbohne
date
Thu, 10 Apr 2008 15:49:16 -0400
changeset 528
c6ff24ceec1c
parent 435
a61af66fc99e
child 548
ba764ed4b6f2
permissions
-rw-r--r--

6686407: Fix for 6666698 broke -XX:BiasedLockingStartupDelay=0
Summary: Stack allocated VM_EnableBiasedLocking op must be marked as such
Reviewed-by: xlu, acorn, never, dholmes

duke@435 1 /*
duke@435 2 * Copyright 1998-2007 Sun Microsystems, Inc. All Rights Reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
duke@435 19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
duke@435 20 * CA 95054 USA or visit www.sun.com if you need additional information or
duke@435 21 * have any questions.
duke@435 22 *
duke@435 23 */
duke@435 24
duke@435 25 # include "incls/_precompiled.incl"
duke@435 26 # include "incls/_jniHandles.cpp.incl"
duke@435 27
duke@435 28
duke@435 29 JNIHandleBlock* JNIHandles::_global_handles = NULL;
duke@435 30 JNIHandleBlock* JNIHandles::_weak_global_handles = NULL;
duke@435 31 oop JNIHandles::_deleted_handle = NULL;
duke@435 32
duke@435 33
duke@435 34 jobject JNIHandles::make_local(oop obj) {
duke@435 35 if (obj == NULL) {
duke@435 36 return NULL; // ignore null handles
duke@435 37 } else {
duke@435 38 Thread* thread = Thread::current();
duke@435 39 assert(Universe::heap()->is_in_reserved(obj), "sanity check");
duke@435 40 return thread->active_handles()->allocate_handle(obj);
duke@435 41 }
duke@435 42 }
duke@435 43
duke@435 44
duke@435 45 // optimized versions
duke@435 46
duke@435 47 jobject JNIHandles::make_local(Thread* thread, oop obj) {
duke@435 48 if (obj == NULL) {
duke@435 49 return NULL; // ignore null handles
duke@435 50 } else {
duke@435 51 assert(Universe::heap()->is_in_reserved(obj), "sanity check");
duke@435 52 return thread->active_handles()->allocate_handle(obj);
duke@435 53 }
duke@435 54 }
duke@435 55
duke@435 56
duke@435 57 jobject JNIHandles::make_local(JNIEnv* env, oop obj) {
duke@435 58 if (obj == NULL) {
duke@435 59 return NULL; // ignore null handles
duke@435 60 } else {
duke@435 61 JavaThread* thread = JavaThread::thread_from_jni_environment(env);
duke@435 62 assert(Universe::heap()->is_in_reserved(obj), "sanity check");
duke@435 63 return thread->active_handles()->allocate_handle(obj);
duke@435 64 }
duke@435 65 }
duke@435 66
duke@435 67
duke@435 68 jobject JNIHandles::make_global(Handle obj) {
duke@435 69 jobject res = NULL;
duke@435 70 if (!obj.is_null()) {
duke@435 71 // ignore null handles
duke@435 72 MutexLocker ml(JNIGlobalHandle_lock);
duke@435 73 assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
duke@435 74 res = _global_handles->allocate_handle(obj());
duke@435 75 } else {
duke@435 76 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
duke@435 77 }
duke@435 78
duke@435 79 return res;
duke@435 80 }
duke@435 81
duke@435 82
duke@435 83 jobject JNIHandles::make_weak_global(Handle obj) {
duke@435 84 jobject res = NULL;
duke@435 85 if (!obj.is_null()) {
duke@435 86 // ignore null handles
duke@435 87 MutexLocker ml(JNIGlobalHandle_lock);
duke@435 88 assert(Universe::heap()->is_in_reserved(obj()), "sanity check");
duke@435 89 res = _weak_global_handles->allocate_handle(obj());
duke@435 90 } else {
duke@435 91 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
duke@435 92 }
duke@435 93 return res;
duke@435 94 }
duke@435 95
duke@435 96 jmethodID JNIHandles::make_jmethod_id(methodHandle mh) {
duke@435 97 return (jmethodID) make_weak_global(mh);
duke@435 98 }
duke@435 99
duke@435 100
duke@435 101
duke@435 102 void JNIHandles::change_method_associated_with_jmethod_id(jmethodID jmid, methodHandle mh) {
duke@435 103 MutexLocker ml(JNIGlobalHandle_lock); // Is this necessary?
duke@435 104 Handle obj = (Handle)mh;
duke@435 105 oop* jobj = (oop*)jmid;
duke@435 106 *jobj = obj();
duke@435 107 }
duke@435 108
duke@435 109
duke@435 110 void JNIHandles::destroy_global(jobject handle) {
duke@435 111 if (handle != NULL) {
duke@435 112 assert(is_global_handle(handle), "Invalid delete of global JNI handle");
duke@435 113 *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
duke@435 114 }
duke@435 115 }
duke@435 116
duke@435 117
duke@435 118 void JNIHandles::destroy_weak_global(jobject handle) {
duke@435 119 if (handle != NULL) {
duke@435 120 assert(!CheckJNICalls || is_weak_global_handle(handle), "Invalid delete of weak global JNI handle");
duke@435 121 *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it
duke@435 122 }
duke@435 123 }
duke@435 124
duke@435 125 void JNIHandles::destroy_jmethod_id(jmethodID mid) {
duke@435 126 destroy_weak_global((jobject)mid);
duke@435 127 }
duke@435 128
duke@435 129
duke@435 130 void JNIHandles::oops_do(OopClosure* f) {
duke@435 131 f->do_oop(&_deleted_handle);
duke@435 132 _global_handles->oops_do(f);
duke@435 133 }
duke@435 134
duke@435 135
duke@435 136 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
duke@435 137 _weak_global_handles->weak_oops_do(is_alive, f);
duke@435 138 }
duke@435 139
duke@435 140
duke@435 141 void JNIHandles::initialize() {
duke@435 142 _global_handles = JNIHandleBlock::allocate_block();
duke@435 143 _weak_global_handles = JNIHandleBlock::allocate_block();
duke@435 144 EXCEPTION_MARK;
duke@435 145 // We will never reach the CATCH below since Exceptions::_throw will cause
duke@435 146 // the VM to exit if an exception is thrown during initialization
duke@435 147 klassOop k = SystemDictionary::object_klass();
duke@435 148 _deleted_handle = instanceKlass::cast(k)->allocate_permanent_instance(CATCH);
duke@435 149 }
duke@435 150
duke@435 151
duke@435 152 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) {
duke@435 153 JNIHandleBlock* block = thread->active_handles();
duke@435 154
duke@435 155 // Look back past possible native calls to jni_PushLocalFrame.
duke@435 156 while (block != NULL) {
duke@435 157 if (block->chain_contains(handle)) {
duke@435 158 return true;
duke@435 159 }
duke@435 160 block = block->pop_frame_link();
duke@435 161 }
duke@435 162 return false;
duke@435 163 }
duke@435 164
duke@435 165
duke@435 166 // Determine if the handle is somewhere in the current thread's stack.
duke@435 167 // We easily can't isolate any particular stack frame the handle might
duke@435 168 // come from, so we'll check the whole stack.
duke@435 169
duke@435 170 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) {
duke@435 171 // If there is no java frame, then this must be top level code, such
duke@435 172 // as the java command executable, in which case, this type of handle
duke@435 173 // is not permitted.
duke@435 174 return (thr->has_last_Java_frame() &&
duke@435 175 (void*)obj < (void*)thr->stack_base() &&
duke@435 176 (void*)obj >= (void*)thr->last_Java_sp());
duke@435 177 }
duke@435 178
duke@435 179
duke@435 180 bool JNIHandles::is_global_handle(jobject handle) {
duke@435 181 return _global_handles->chain_contains(handle);
duke@435 182 }
duke@435 183
duke@435 184
duke@435 185 bool JNIHandles::is_weak_global_handle(jobject handle) {
duke@435 186 return _weak_global_handles->chain_contains(handle);
duke@435 187 }
duke@435 188
duke@435 189 long JNIHandles::global_handle_memory_usage() {
duke@435 190 return _global_handles->memory_usage();
duke@435 191 }
duke@435 192
duke@435 193 long JNIHandles::weak_global_handle_memory_usage() {
duke@435 194 return _weak_global_handles->memory_usage();
duke@435 195 }
duke@435 196
duke@435 197
duke@435 198 class AlwaysAliveClosure: public BoolObjectClosure {
duke@435 199 public:
duke@435 200 bool do_object_b(oop obj) { return true; }
duke@435 201 void do_object(oop obj) { assert(false, "Don't call"); }
duke@435 202 };
duke@435 203
duke@435 204 class CountHandleClosure: public OopClosure {
duke@435 205 private:
duke@435 206 int _count;
duke@435 207 public:
duke@435 208 CountHandleClosure(): _count(0) {}
duke@435 209 void do_oop(oop* unused) {
duke@435 210 _count++;
duke@435 211 }
duke@435 212 int count() { return _count; }
duke@435 213 };
duke@435 214
duke@435 215 // We assume this is called at a safepoint: no lock is needed.
duke@435 216 void JNIHandles::print_on(outputStream* st) {
duke@435 217 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
duke@435 218 assert(_global_handles != NULL && _weak_global_handles != NULL,
duke@435 219 "JNIHandles not initialized");
duke@435 220
duke@435 221 CountHandleClosure global_handle_count;
duke@435 222 AlwaysAliveClosure always_alive;
duke@435 223 oops_do(&global_handle_count);
duke@435 224 weak_oops_do(&always_alive, &global_handle_count);
duke@435 225
duke@435 226 st->print_cr("JNI global references: %d", global_handle_count.count());
duke@435 227 st->cr();
duke@435 228 st->flush();
duke@435 229 }
duke@435 230
duke@435 231 class VerifyHandleClosure: public OopClosure {
duke@435 232 public:
duke@435 233 void do_oop(oop* root) {
duke@435 234 (*root)->verify();
duke@435 235 }
duke@435 236 };
duke@435 237
duke@435 238 void JNIHandles::verify() {
duke@435 239 VerifyHandleClosure verify_handle;
duke@435 240 AlwaysAliveClosure always_alive;
duke@435 241
duke@435 242 oops_do(&verify_handle);
duke@435 243 weak_oops_do(&always_alive, &verify_handle);
duke@435 244 }
duke@435 245
duke@435 246
duke@435 247
duke@435 248 void jni_handles_init() {
duke@435 249 JNIHandles::initialize();
duke@435 250 }
duke@435 251
duke@435 252
duke@435 253 int JNIHandleBlock::_blocks_allocated = 0;
duke@435 254 JNIHandleBlock* JNIHandleBlock::_block_free_list = NULL;
duke@435 255 #ifndef PRODUCT
duke@435 256 JNIHandleBlock* JNIHandleBlock::_block_list = NULL;
duke@435 257 #endif
duke@435 258
duke@435 259
duke@435 260 void JNIHandleBlock::zap() {
duke@435 261 // Zap block values
duke@435 262 _top = 0;
duke@435 263 for (int index = 0; index < block_size_in_oops; index++) {
duke@435 264 _handles[index] = badJNIHandle;
duke@435 265 }
duke@435 266 }
duke@435 267
duke@435 268 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread) {
duke@435 269 assert(thread == NULL || thread == Thread::current(), "sanity check");
duke@435 270 JNIHandleBlock* block;
duke@435 271 // Check the thread-local free list for a block so we don't
duke@435 272 // have to acquire a mutex.
duke@435 273 if (thread != NULL && thread->free_handle_block() != NULL) {
duke@435 274 block = thread->free_handle_block();
duke@435 275 thread->set_free_handle_block(block->_next);
duke@435 276 }
duke@435 277 else {
duke@435 278 // locking with safepoint checking introduces a potential deadlock:
duke@435 279 // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
duke@435 280 // - another would hold Threads_lock (jni_AttachCurrentThread) and then
duke@435 281 // JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
duke@435 282 MutexLockerEx ml(JNIHandleBlockFreeList_lock,
duke@435 283 Mutex::_no_safepoint_check_flag);
duke@435 284 if (_block_free_list == NULL) {
duke@435 285 // Allocate new block
duke@435 286 block = new JNIHandleBlock();
duke@435 287 _blocks_allocated++;
duke@435 288 if (TraceJNIHandleAllocation) {
duke@435 289 tty->print_cr("JNIHandleBlock " INTPTR_FORMAT " allocated (%d total blocks)",
duke@435 290 block, _blocks_allocated);
duke@435 291 }
duke@435 292 if (ZapJNIHandleArea) block->zap();
duke@435 293 #ifndef PRODUCT
duke@435 294 // Link new block to list of all allocated blocks
duke@435 295 block->_block_list_link = _block_list;
duke@435 296 _block_list = block;
duke@435 297 #endif
duke@435 298 } else {
duke@435 299 // Get block from free list
duke@435 300 block = _block_free_list;
duke@435 301 _block_free_list = _block_free_list->_next;
duke@435 302 }
duke@435 303 }
duke@435 304 block->_top = 0;
duke@435 305 block->_next = NULL;
duke@435 306 block->_pop_frame_link = NULL;
duke@435 307 // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle
duke@435 308 debug_only(block->_last = NULL);
duke@435 309 debug_only(block->_free_list = NULL);
duke@435 310 debug_only(block->_allocate_before_rebuild = -1);
duke@435 311 return block;
duke@435 312 }
duke@435 313
duke@435 314
duke@435 315 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) {
duke@435 316 assert(thread == NULL || thread == Thread::current(), "sanity check");
duke@435 317 JNIHandleBlock* pop_frame_link = block->pop_frame_link();
duke@435 318 // Put returned block at the beginning of the thread-local free list.
duke@435 319 // Note that if thread == NULL, we use it as an implicit argument that
duke@435 320 // we _don't_ want the block to be kept on the free_handle_block.
duke@435 321 // See for instance JavaThread::exit().
duke@435 322 if (thread != NULL ) {
duke@435 323 if (ZapJNIHandleArea) block->zap();
duke@435 324 JNIHandleBlock* freelist = thread->free_handle_block();
duke@435 325 block->_pop_frame_link = NULL;
duke@435 326 thread->set_free_handle_block(block);
duke@435 327
duke@435 328 // Add original freelist to end of chain
duke@435 329 if ( freelist != NULL ) {
duke@435 330 while ( block->_next != NULL ) block = block->_next;
duke@435 331 block->_next = freelist;
duke@435 332 }
duke@435 333 block = NULL;
duke@435 334 }
duke@435 335 if (block != NULL) {
duke@435 336 // Return blocks to free list
duke@435 337 // locking with safepoint checking introduces a potential deadlock:
duke@435 338 // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock
duke@435 339 // - another would hold Threads_lock (jni_AttachCurrentThread) and then
duke@435 340 // JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block)
duke@435 341 MutexLockerEx ml(JNIHandleBlockFreeList_lock,
duke@435 342 Mutex::_no_safepoint_check_flag);
duke@435 343 while (block != NULL) {
duke@435 344 if (ZapJNIHandleArea) block->zap();
duke@435 345 JNIHandleBlock* next = block->_next;
duke@435 346 block->_next = _block_free_list;
duke@435 347 _block_free_list = block;
duke@435 348 block = next;
duke@435 349 }
duke@435 350 }
duke@435 351 if (pop_frame_link != NULL) {
duke@435 352 // As a sanity check we release blocks pointed to by the pop_frame_link.
duke@435 353 // This should never happen (only if PopLocalFrame is not called the
duke@435 354 // correct number of times).
duke@435 355 release_block(pop_frame_link, thread);
duke@435 356 }
duke@435 357 }
duke@435 358
duke@435 359
duke@435 360 void JNIHandleBlock::oops_do(OopClosure* f) {
duke@435 361 JNIHandleBlock* current_chain = this;
duke@435 362 // Iterate over chain of blocks, followed by chains linked through the
duke@435 363 // pop frame links.
duke@435 364 while (current_chain != NULL) {
duke@435 365 for (JNIHandleBlock* current = current_chain; current != NULL;
duke@435 366 current = current->_next) {
duke@435 367 assert(current == current_chain || current->pop_frame_link() == NULL,
duke@435 368 "only blocks first in chain should have pop frame link set");
duke@435 369 for (int index = 0; index < current->_top; index++) {
duke@435 370 oop* root = &(current->_handles)[index];
duke@435 371 oop value = *root;
duke@435 372 // traverse heap pointers only, not deleted handles or free list
duke@435 373 // pointers
duke@435 374 if (value != NULL && Universe::heap()->is_in_reserved(value)) {
duke@435 375 f->do_oop(root);
duke@435 376 }
duke@435 377 }
duke@435 378 // the next handle block is valid only if current block is full
duke@435 379 if (current->_top < block_size_in_oops) {
duke@435 380 break;
duke@435 381 }
duke@435 382 }
duke@435 383 current_chain = current_chain->pop_frame_link();
duke@435 384 }
duke@435 385 }
duke@435 386
duke@435 387
duke@435 388 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive,
duke@435 389 OopClosure* f) {
duke@435 390 for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
duke@435 391 assert(current->pop_frame_link() == NULL,
duke@435 392 "blocks holding weak global JNI handles should not have pop frame link set");
duke@435 393 for (int index = 0; index < current->_top; index++) {
duke@435 394 oop* root = &(current->_handles)[index];
duke@435 395 oop value = *root;
duke@435 396 // traverse heap pointers only, not deleted handles or free list pointers
duke@435 397 if (value != NULL && Universe::heap()->is_in_reserved(value)) {
duke@435 398 if (is_alive->do_object_b(value)) {
duke@435 399 // The weakly referenced object is alive, update pointer
duke@435 400 f->do_oop(root);
duke@435 401 } else {
duke@435 402 // The weakly referenced object is not alive, clear the reference by storing NULL
duke@435 403 if (TraceReferenceGC) {
duke@435 404 tty->print_cr("Clearing JNI weak reference (" INTPTR_FORMAT ")", root);
duke@435 405 }
duke@435 406 *root = NULL;
duke@435 407 }
duke@435 408 }
duke@435 409 }
duke@435 410 // the next handle block is valid only if current block is full
duke@435 411 if (current->_top < block_size_in_oops) {
duke@435 412 break;
duke@435 413 }
duke@435 414 }
duke@435 415 }
duke@435 416
duke@435 417
duke@435 418 jobject JNIHandleBlock::allocate_handle(oop obj) {
duke@435 419 assert(Universe::heap()->is_in_reserved(obj), "sanity check");
duke@435 420 if (_top == 0) {
duke@435 421 // This is the first allocation or the initial block got zapped when
duke@435 422 // entering a native function. If we have any following blocks they are
duke@435 423 // not valid anymore.
duke@435 424 for (JNIHandleBlock* current = _next; current != NULL;
duke@435 425 current = current->_next) {
duke@435 426 assert(current->_last == NULL, "only first block should have _last set");
duke@435 427 assert(current->_free_list == NULL,
duke@435 428 "only first block should have _free_list set");
duke@435 429 current->_top = 0;
duke@435 430 if (ZapJNIHandleArea) current->zap();
duke@435 431 }
duke@435 432 // Clear initial block
duke@435 433 _free_list = NULL;
duke@435 434 _allocate_before_rebuild = 0;
duke@435 435 _last = this;
duke@435 436 if (ZapJNIHandleArea) zap();
duke@435 437 }
duke@435 438
duke@435 439 // Try last block
duke@435 440 if (_last->_top < block_size_in_oops) {
duke@435 441 oop* handle = &(_last->_handles)[_last->_top++];
duke@435 442 *handle = obj;
duke@435 443 return (jobject) handle;
duke@435 444 }
duke@435 445
duke@435 446 // Try free list
duke@435 447 if (_free_list != NULL) {
duke@435 448 oop* handle = _free_list;
duke@435 449 _free_list = (oop*) *_free_list;
duke@435 450 *handle = obj;
duke@435 451 return (jobject) handle;
duke@435 452 }
duke@435 453 // Check if unused block follow last
duke@435 454 if (_last->_next != NULL) {
duke@435 455 // update last and retry
duke@435 456 _last = _last->_next;
duke@435 457 return allocate_handle(obj);
duke@435 458 }
duke@435 459
duke@435 460 // No space available, we have to rebuild free list or expand
duke@435 461 if (_allocate_before_rebuild == 0) {
duke@435 462 rebuild_free_list(); // updates _allocate_before_rebuild counter
duke@435 463 } else {
duke@435 464 // Append new block
duke@435 465 Thread* thread = Thread::current();
duke@435 466 Handle obj_handle(thread, obj);
duke@435 467 // This can block, so we need to preserve obj accross call.
duke@435 468 _last->_next = JNIHandleBlock::allocate_block(thread);
duke@435 469 _last = _last->_next;
duke@435 470 _allocate_before_rebuild--;
duke@435 471 obj = obj_handle();
duke@435 472 }
duke@435 473 return allocate_handle(obj); // retry
duke@435 474 }
duke@435 475
duke@435 476
duke@435 477 void JNIHandleBlock::rebuild_free_list() {
duke@435 478 assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking");
duke@435 479 int free = 0;
duke@435 480 int blocks = 0;
duke@435 481 for (JNIHandleBlock* current = this; current != NULL; current = current->_next) {
duke@435 482 for (int index = 0; index < current->_top; index++) {
duke@435 483 oop* handle = &(current->_handles)[index];
duke@435 484 if (*handle == JNIHandles::deleted_handle()) {
duke@435 485 // this handle was cleared out by a delete call, reuse it
duke@435 486 *handle = (oop) _free_list;
duke@435 487 _free_list = handle;
duke@435 488 free++;
duke@435 489 }
duke@435 490 }
duke@435 491 // we should not rebuild free list if there are unused handles at the end
duke@435 492 assert(current->_top == block_size_in_oops, "just checking");
duke@435 493 blocks++;
duke@435 494 }
duke@435 495 // Heuristic: if more than half of the handles are free we rebuild next time
duke@435 496 // as well, otherwise we append a corresponding number of new blocks before
duke@435 497 // attempting a free list rebuild again.
duke@435 498 int total = blocks * block_size_in_oops;
duke@435 499 int extra = total - 2*free;
duke@435 500 if (extra > 0) {
duke@435 501 // Not as many free handles as we would like - compute number of new blocks to append
duke@435 502 _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops;
duke@435 503 }
duke@435 504 if (TraceJNIHandleAllocation) {
duke@435 505 tty->print_cr("Rebuild free list JNIHandleBlock " INTPTR_FORMAT " blocks=%d used=%d free=%d add=%d",
duke@435 506 this, blocks, total-free, free, _allocate_before_rebuild);
duke@435 507 }
duke@435 508 }
duke@435 509
duke@435 510
duke@435 511 bool JNIHandleBlock::contains(jobject handle) const {
duke@435 512 return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]);
duke@435 513 }
duke@435 514
duke@435 515
duke@435 516 bool JNIHandleBlock::chain_contains(jobject handle) const {
duke@435 517 for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) {
duke@435 518 if (current->contains(handle)) {
duke@435 519 return true;
duke@435 520 }
duke@435 521 }
duke@435 522 return false;
duke@435 523 }
duke@435 524
duke@435 525
duke@435 526 int JNIHandleBlock::length() const {
duke@435 527 int result = 1;
duke@435 528 for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) {
duke@435 529 result++;
duke@435 530 }
duke@435 531 return result;
duke@435 532 }
duke@435 533
duke@435 534 // This method is not thread-safe, i.e., must be called whule holding a lock on the
duke@435 535 // structure.
duke@435 536 long JNIHandleBlock::memory_usage() const {
duke@435 537 return length() * sizeof(JNIHandleBlock);
duke@435 538 }
duke@435 539
duke@435 540
duke@435 541 #ifndef PRODUCT
duke@435 542
duke@435 543 bool JNIHandleBlock::any_contains(jobject handle) {
duke@435 544 for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) {
duke@435 545 if (current->contains(handle)) {
duke@435 546 return true;
duke@435 547 }
duke@435 548 }
duke@435 549 return false;
duke@435 550 }
duke@435 551
duke@435 552 void JNIHandleBlock::print_statistics() {
duke@435 553 int used_blocks = 0;
duke@435 554 int free_blocks = 0;
duke@435 555 int used_handles = 0;
duke@435 556 int free_handles = 0;
duke@435 557 JNIHandleBlock* block = _block_list;
duke@435 558 while (block != NULL) {
duke@435 559 if (block->_top > 0) {
duke@435 560 used_blocks++;
duke@435 561 } else {
duke@435 562 free_blocks++;
duke@435 563 }
duke@435 564 used_handles += block->_top;
duke@435 565 free_handles += (block_size_in_oops - block->_top);
duke@435 566 block = block->_block_list_link;
duke@435 567 }
duke@435 568 tty->print_cr("JNIHandleBlocks statistics");
duke@435 569 tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks);
duke@435 570 tty->print_cr("- blocks in use: %d", used_blocks);
duke@435 571 tty->print_cr("- blocks free: %d", free_blocks);
duke@435 572 tty->print_cr("- handles in use: %d", used_handles);
duke@435 573 tty->print_cr("- handles free: %d", free_handles);
duke@435 574 }
duke@435 575
duke@435 576 #endif

mercurial