duke@435: /* duke@435: * Copyright 2001-2007 Sun Microsystems, Inc. All Rights Reserved. duke@435: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. duke@435: * duke@435: * This code is free software; you can redistribute it and/or modify it duke@435: * under the terms of the GNU General Public License version 2 only, as duke@435: * published by the Free Software Foundation. duke@435: * duke@435: * This code is distributed in the hope that it will be useful, but WITHOUT duke@435: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or duke@435: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License duke@435: * version 2 for more details (a copy is included in the LICENSE file that duke@435: * accompanied this code). duke@435: * duke@435: * You should have received a copy of the GNU General Public License version duke@435: * 2 along with this work; if not, write to the Free Software Foundation, duke@435: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. duke@435: * duke@435: * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, duke@435: * CA 95054 USA or visit www.sun.com if you need additional information or duke@435: * have any questions. duke@435: * duke@435: */ duke@435: duke@435: // Inline allocation implementations. duke@435: duke@435: void CollectedHeap::post_allocation_setup_common(KlassHandle klass, duke@435: HeapWord* obj, duke@435: size_t size) { duke@435: post_allocation_setup_no_klass_install(klass, obj, size); duke@435: post_allocation_install_obj_klass(klass, oop(obj), (int) size); duke@435: } duke@435: duke@435: void CollectedHeap::post_allocation_setup_no_klass_install(KlassHandle klass, duke@435: HeapWord* objPtr, duke@435: size_t size) { duke@435: duke@435: oop obj = (oop)objPtr; duke@435: duke@435: assert(obj != NULL, "NULL object pointer"); duke@435: if (UseBiasedLocking && (klass() != NULL)) { duke@435: obj->set_mark(klass->prototype_header()); duke@435: } else { duke@435: // May be bootstrapping duke@435: obj->set_mark(markOopDesc::prototype()); duke@435: } duke@435: duke@435: // support low memory notifications (no-op if not enabled) duke@435: LowMemoryDetector::detect_low_memory_for_collected_pools(); duke@435: } duke@435: duke@435: void CollectedHeap::post_allocation_install_obj_klass(KlassHandle klass, duke@435: oop obj, duke@435: int size) { duke@435: // These asserts are kind of complicated because of klassKlass duke@435: // and the beginning of the world. duke@435: assert(klass() != NULL || !Universe::is_fully_initialized(), "NULL klass"); duke@435: assert(klass() == NULL || klass()->is_klass(), "not a klass"); duke@435: assert(klass() == NULL || klass()->klass_part() != NULL, "not a klass"); duke@435: assert(obj != NULL, "NULL object pointer"); duke@435: obj->set_klass(klass()); duke@435: assert(!Universe::is_fully_initialized() || obj->blueprint() != NULL, duke@435: "missing blueprint"); coleenp@548: } duke@435: coleenp@548: // Support for jvmti and dtrace coleenp@548: inline void post_allocation_notify(KlassHandle klass, oop obj) { duke@435: // support for JVMTI VMObjectAlloc event (no-op if not enabled) duke@435: JvmtiExport::vm_object_alloc_event_collector(obj); duke@435: duke@435: if (DTraceAllocProbes) { duke@435: // support for Dtrace object alloc event (no-op most of the time) duke@435: if (klass() != NULL && klass()->klass_part()->name() != NULL) { duke@435: SharedRuntime::dtrace_object_alloc(obj); duke@435: } duke@435: } duke@435: } duke@435: duke@435: void CollectedHeap::post_allocation_setup_obj(KlassHandle klass, duke@435: HeapWord* obj, duke@435: size_t size) { duke@435: post_allocation_setup_common(klass, obj, size); duke@435: assert(Universe::is_bootstrapping() || duke@435: !((oop)obj)->blueprint()->oop_is_array(), "must not be an array"); coleenp@548: // notify jvmti and dtrace coleenp@548: post_allocation_notify(klass, (oop)obj); duke@435: } duke@435: duke@435: void CollectedHeap::post_allocation_setup_array(KlassHandle klass, duke@435: HeapWord* obj, duke@435: size_t size, duke@435: int length) { duke@435: assert(length >= 0, "length should be non-negative"); coleenp@548: post_allocation_setup_common(klass, obj, size); coleenp@548: // Must set length after installing klass as set_klass zeros the length coleenp@548: // field in UseCompressedOops duke@435: ((arrayOop)obj)->set_length(length); duke@435: assert(((oop)obj)->blueprint()->oop_is_array(), "must be an array"); coleenp@548: // notify jvmti and dtrace (must be after length is set for dtrace) coleenp@548: post_allocation_notify(klass, (oop)obj); duke@435: } duke@435: duke@435: HeapWord* CollectedHeap::common_mem_allocate_noinit(size_t size, bool is_noref, TRAPS) { duke@435: duke@435: // Clear unhandled oops for memory allocation. Memory allocation might duke@435: // not take out a lock if from tlab, so clear here. duke@435: CHECK_UNHANDLED_OOPS_ONLY(THREAD->clear_unhandled_oops();) duke@435: duke@435: if (HAS_PENDING_EXCEPTION) { duke@435: NOT_PRODUCT(guarantee(false, "Should not allocate with exception pending")); duke@435: return NULL; // caller does a CHECK_0 too duke@435: } duke@435: duke@435: // We may want to update this, is_noref objects might not be allocated in TLABs. duke@435: HeapWord* result = NULL; duke@435: if (UseTLAB) { duke@435: result = CollectedHeap::allocate_from_tlab(THREAD, size); duke@435: if (result != NULL) { duke@435: assert(!HAS_PENDING_EXCEPTION, duke@435: "Unexpected exception, will result in uninitialized storage"); duke@435: return result; duke@435: } duke@435: } ysr@777: bool gc_overhead_limit_was_exceeded = false; duke@435: result = Universe::heap()->mem_allocate(size, duke@435: is_noref, duke@435: false, duke@435: &gc_overhead_limit_was_exceeded); duke@435: if (result != NULL) { duke@435: NOT_PRODUCT(Universe::heap()-> duke@435: check_for_non_bad_heap_word_value(result, size)); duke@435: assert(!HAS_PENDING_EXCEPTION, duke@435: "Unexpected exception, will result in uninitialized storage"); duke@435: return result; duke@435: } duke@435: duke@435: duke@435: if (!gc_overhead_limit_was_exceeded) { duke@435: // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support duke@435: report_java_out_of_memory("Java heap space"); duke@435: duke@435: if (JvmtiExport::should_post_resource_exhausted()) { duke@435: JvmtiExport::post_resource_exhausted( duke@435: JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR | JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP, duke@435: "Java heap space"); duke@435: } duke@435: duke@435: THROW_OOP_0(Universe::out_of_memory_error_java_heap()); duke@435: } else { duke@435: // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support duke@435: report_java_out_of_memory("GC overhead limit exceeded"); duke@435: duke@435: if (JvmtiExport::should_post_resource_exhausted()) { duke@435: JvmtiExport::post_resource_exhausted( duke@435: JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR | JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP, duke@435: "GC overhead limit exceeded"); duke@435: } duke@435: duke@435: THROW_OOP_0(Universe::out_of_memory_error_gc_overhead_limit()); duke@435: } duke@435: } duke@435: duke@435: HeapWord* CollectedHeap::common_mem_allocate_init(size_t size, bool is_noref, TRAPS) { duke@435: HeapWord* obj = common_mem_allocate_noinit(size, is_noref, CHECK_NULL); duke@435: init_obj(obj, size); duke@435: return obj; duke@435: } duke@435: duke@435: // Need to investigate, do we really want to throw OOM exception here? duke@435: HeapWord* CollectedHeap::common_permanent_mem_allocate_noinit(size_t size, TRAPS) { duke@435: if (HAS_PENDING_EXCEPTION) { duke@435: NOT_PRODUCT(guarantee(false, "Should not allocate with exception pending")); duke@435: return NULL; // caller does a CHECK_NULL too duke@435: } duke@435: duke@435: #ifdef ASSERT duke@435: if (CIFireOOMAt > 0 && THREAD->is_Compiler_thread() && duke@435: ++_fire_out_of_memory_count >= CIFireOOMAt) { duke@435: // For testing of OOM handling in the CI throw an OOM and see how duke@435: // it does. Historically improper handling of these has resulted duke@435: // in crashes which we really don't want to have in the CI. duke@435: THROW_OOP_0(Universe::out_of_memory_error_perm_gen()); duke@435: } duke@435: #endif duke@435: duke@435: HeapWord* result = Universe::heap()->permanent_mem_allocate(size); duke@435: if (result != NULL) { duke@435: NOT_PRODUCT(Universe::heap()-> duke@435: check_for_non_bad_heap_word_value(result, size)); duke@435: assert(!HAS_PENDING_EXCEPTION, duke@435: "Unexpected exception, will result in uninitialized storage"); duke@435: return result; duke@435: } duke@435: // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support duke@435: report_java_out_of_memory("PermGen space"); duke@435: duke@435: if (JvmtiExport::should_post_resource_exhausted()) { duke@435: JvmtiExport::post_resource_exhausted( duke@435: JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR, duke@435: "PermGen space"); duke@435: } duke@435: duke@435: THROW_OOP_0(Universe::out_of_memory_error_perm_gen()); duke@435: } duke@435: duke@435: HeapWord* CollectedHeap::common_permanent_mem_allocate_init(size_t size, TRAPS) { duke@435: HeapWord* obj = common_permanent_mem_allocate_noinit(size, CHECK_NULL); duke@435: init_obj(obj, size); duke@435: return obj; duke@435: } duke@435: duke@435: HeapWord* CollectedHeap::allocate_from_tlab(Thread* thread, size_t size) { duke@435: assert(UseTLAB, "should use UseTLAB"); duke@435: duke@435: HeapWord* obj = thread->tlab().allocate(size); duke@435: if (obj != NULL) { duke@435: return obj; duke@435: } duke@435: // Otherwise... duke@435: return allocate_from_tlab_slow(thread, size); duke@435: } duke@435: duke@435: void CollectedHeap::init_obj(HeapWord* obj, size_t size) { duke@435: assert(obj != NULL, "cannot initialize NULL object"); duke@435: const size_t hs = oopDesc::header_size(); duke@435: assert(size >= hs, "unexpected object size"); duke@435: Copy::fill_to_aligned_words(obj + hs, size - hs); duke@435: } duke@435: duke@435: oop CollectedHeap::obj_allocate(KlassHandle klass, int size, TRAPS) { duke@435: debug_only(check_for_valid_allocation_state()); duke@435: assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); duke@435: assert(size >= 0, "int won't convert to size_t"); duke@435: HeapWord* obj = common_mem_allocate_init(size, false, CHECK_NULL); duke@435: post_allocation_setup_obj(klass, obj, size); duke@435: NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); duke@435: return (oop)obj; duke@435: } duke@435: duke@435: oop CollectedHeap::array_allocate(KlassHandle klass, duke@435: int size, duke@435: int length, duke@435: TRAPS) { duke@435: debug_only(check_for_valid_allocation_state()); duke@435: assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); duke@435: assert(size >= 0, "int won't convert to size_t"); duke@435: HeapWord* obj = common_mem_allocate_init(size, false, CHECK_NULL); duke@435: post_allocation_setup_array(klass, obj, size, length); duke@435: NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); duke@435: return (oop)obj; duke@435: } duke@435: duke@435: oop CollectedHeap::large_typearray_allocate(KlassHandle klass, duke@435: int size, duke@435: int length, duke@435: TRAPS) { duke@435: debug_only(check_for_valid_allocation_state()); duke@435: assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); duke@435: assert(size >= 0, "int won't convert to size_t"); duke@435: HeapWord* obj = common_mem_allocate_init(size, true, CHECK_NULL); duke@435: post_allocation_setup_array(klass, obj, size, length); duke@435: NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); duke@435: return (oop)obj; duke@435: } duke@435: duke@435: oop CollectedHeap::permanent_obj_allocate(KlassHandle klass, int size, TRAPS) { duke@435: oop obj = permanent_obj_allocate_no_klass_install(klass, size, CHECK_NULL); duke@435: post_allocation_install_obj_klass(klass, obj, size); duke@435: NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value((HeapWord*) obj, duke@435: size)); duke@435: return obj; duke@435: } duke@435: duke@435: oop CollectedHeap::permanent_obj_allocate_no_klass_install(KlassHandle klass, duke@435: int size, duke@435: TRAPS) { duke@435: debug_only(check_for_valid_allocation_state()); duke@435: assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); duke@435: assert(size >= 0, "int won't convert to size_t"); duke@435: HeapWord* obj = common_permanent_mem_allocate_init(size, CHECK_NULL); duke@435: post_allocation_setup_no_klass_install(klass, obj, size); duke@435: NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); duke@435: return (oop)obj; duke@435: } duke@435: duke@435: oop CollectedHeap::permanent_array_allocate(KlassHandle klass, duke@435: int size, duke@435: int length, duke@435: TRAPS) { duke@435: debug_only(check_for_valid_allocation_state()); duke@435: assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); duke@435: assert(size >= 0, "int won't convert to size_t"); duke@435: HeapWord* obj = common_permanent_mem_allocate_init(size, CHECK_NULL); duke@435: post_allocation_setup_array(klass, obj, size, length); duke@435: NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); duke@435: return (oop)obj; duke@435: } duke@435: duke@435: // Returns "TRUE" if "p" is a method oop in the duke@435: // current heap with high probability. NOTE: The main duke@435: // current consumers of this interface are Forte:: duke@435: // and ThreadProfiler::. In these cases, the duke@435: // interpreter frame from which "p" came, may be duke@435: // under construction when sampled asynchronously, so duke@435: // the clients want to check that it represents a duke@435: // valid method before using it. Nonetheless since duke@435: // the clients do not typically lock out GC, the duke@435: // predicate is_valid_method() is not stable, so duke@435: // it is possible that by the time "p" is used, it duke@435: // is no longer valid. duke@435: inline bool CollectedHeap::is_valid_method(oop p) const { duke@435: return duke@435: p != NULL && duke@435: duke@435: // Check whether it is aligned at a HeapWord boundary. duke@435: Space::is_aligned(p) && duke@435: duke@435: // Check whether "method" is in the allocated part of the duke@435: // permanent generation -- this needs to be checked before duke@435: // p->klass() below to avoid a SEGV (but see below duke@435: // for a potential window of vulnerability). duke@435: is_permanent((void*)p) && duke@435: duke@435: // See if GC is active; however, there is still an duke@435: // apparently unavoidable window after this call duke@435: // and before the client of this interface uses "p". duke@435: // If the client chooses not to lock out GC, then duke@435: // it's a risk the client must accept. duke@435: !is_gc_active() && duke@435: duke@435: // Check that p is a methodOop. duke@435: p->klass() == Universe::methodKlassObj(); duke@435: } duke@435: duke@435: duke@435: #ifndef PRODUCT duke@435: duke@435: inline bool duke@435: CollectedHeap::promotion_should_fail(volatile size_t* count) { duke@435: // Access to count is not atomic; the value does not have to be exact. duke@435: if (PromotionFailureALot) { duke@435: const size_t gc_num = total_collections(); duke@435: const size_t elapsed_gcs = gc_num - _promotion_failure_alot_gc_number; duke@435: if (elapsed_gcs >= PromotionFailureALotInterval) { duke@435: // Test for unsigned arithmetic wrap-around. duke@435: if (++*count >= PromotionFailureALotCount) { duke@435: *count = 0; duke@435: return true; duke@435: } duke@435: } duke@435: } duke@435: return false; duke@435: } duke@435: duke@435: inline bool CollectedHeap::promotion_should_fail() { duke@435: return promotion_should_fail(&_promotion_failure_alot_count); duke@435: } duke@435: duke@435: inline void CollectedHeap::reset_promotion_should_fail(volatile size_t* count) { duke@435: if (PromotionFailureALot) { duke@435: _promotion_failure_alot_gc_number = total_collections(); duke@435: *count = 0; duke@435: } duke@435: } duke@435: duke@435: inline void CollectedHeap::reset_promotion_should_fail() { duke@435: reset_promotion_should_fail(&_promotion_failure_alot_count); duke@435: } duke@435: #endif // #ifndef PRODUCT