aoqi@0: /* aoqi@0: * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. aoqi@0: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. aoqi@0: * aoqi@0: * This code is free software; you can redistribute it and/or modify it aoqi@0: * under the terms of the GNU General Public License version 2 only, as aoqi@0: * published by the Free Software Foundation. aoqi@0: * aoqi@0: * This code is distributed in the hope that it will be useful, but WITHOUT aoqi@0: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or aoqi@0: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License aoqi@0: * version 2 for more details (a copy is included in the LICENSE file that aoqi@0: * accompanied this code). aoqi@0: * aoqi@0: * You should have received a copy of the GNU General Public License version aoqi@0: * 2 along with this work; if not, write to the Free Software Foundation, aoqi@0: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. aoqi@0: * aoqi@0: * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA aoqi@0: * or visit www.oracle.com if you need additional information or have any aoqi@0: * questions. aoqi@0: * aoqi@0: */ aoqi@0: aoqi@0: #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP aoqi@0: #define SHARE_VM_MEMORY_UNIVERSE_HPP aoqi@0: aoqi@0: #include "runtime/handles.hpp" aoqi@0: #include "utilities/array.hpp" aoqi@0: #include "utilities/growableArray.hpp" aoqi@0: aoqi@0: // Universe is a name space holding known system classes and objects in the VM. aoqi@0: // aoqi@0: // Loaded classes are accessible through the SystemDictionary. aoqi@0: // aoqi@0: // The object heap is allocated and accessed through Universe, and various allocation aoqi@0: // support is provided. Allocation by the interpreter and compiled code is done inline aoqi@0: // and bails out to Scavenge::invoke_and_allocate. aoqi@0: aoqi@0: class CollectedHeap; aoqi@0: class DeferredObjAllocEvent; aoqi@0: aoqi@0: aoqi@0: // A helper class for caching a Method* when the user of the cache aoqi@0: // only cares about the latest version of the Method*. This cache safely aoqi@0: // interacts with the RedefineClasses API. aoqi@0: aoqi@0: class LatestMethodCache : public CHeapObj { aoqi@0: // We save the Klass* and the idnum of Method* in order to get aoqi@0: // the current cached Method*. aoqi@0: private: aoqi@0: Klass* _klass; aoqi@0: int _method_idnum; aoqi@0: aoqi@0: public: aoqi@0: LatestMethodCache() { _klass = NULL; _method_idnum = -1; } aoqi@0: ~LatestMethodCache() { _klass = NULL; _method_idnum = -1; } aoqi@0: aoqi@0: void init(Klass* k, Method* m); aoqi@0: Klass* klass() const { return _klass; } aoqi@0: int method_idnum() const { return _method_idnum; } aoqi@0: aoqi@0: Method* get_method(); aoqi@0: aoqi@0: // Enhanced Class Redefinition support aoqi@0: void classes_do(void f(Klass*)) { aoqi@0: f(_klass); aoqi@0: } aoqi@0: aoqi@0: // CDS support. Replace the klass in this with the archive version aoqi@0: // could use this for Enhanced Class Redefinition also. aoqi@0: void serialize(SerializeClosure* f) { aoqi@0: f->do_ptr((void**)&_klass); aoqi@0: } aoqi@0: }; aoqi@0: aoqi@0: aoqi@0: // For UseCompressedOops. aoqi@0: struct NarrowPtrStruct { aoqi@0: // Base address for oop-within-java-object materialization. aoqi@0: // NULL if using wide oops or zero based narrow oops. aoqi@0: address _base; aoqi@0: // Number of shift bits for encoding/decoding narrow ptrs. aoqi@0: // 0 if using wide ptrs or zero based unscaled narrow ptrs, aoqi@0: // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise. aoqi@0: int _shift; aoqi@0: // Generate code with implicit null checks for narrow ptrs. aoqi@0: bool _use_implicit_null_checks; aoqi@0: }; aoqi@0: aoqi@0: enum VerifyOption { aoqi@0: VerifyOption_Default = 0, aoqi@0: aoqi@0: // G1 aoqi@0: VerifyOption_G1UsePrevMarking = VerifyOption_Default, aoqi@0: VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1, aoqi@0: VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1 aoqi@0: }; aoqi@0: aoqi@0: class Universe: AllStatic { aoqi@0: // Ugh. Universe is much too friendly. aoqi@0: friend class MarkSweep; aoqi@0: friend class oopDesc; aoqi@0: friend class ClassLoader; aoqi@0: friend class Arguments; aoqi@0: friend class SystemDictionary; aoqi@0: friend class VMStructs; aoqi@0: friend class VM_PopulateDumpSharedSpace; aoqi@0: friend class Metaspace; aoqi@0: aoqi@0: friend jint universe_init(); aoqi@0: friend void universe2_init(); aoqi@0: friend bool universe_post_init(); aoqi@0: aoqi@0: private: aoqi@0: // Known classes in the VM aoqi@0: static Klass* _boolArrayKlassObj; aoqi@0: static Klass* _byteArrayKlassObj; aoqi@0: static Klass* _charArrayKlassObj; aoqi@0: static Klass* _intArrayKlassObj; aoqi@0: static Klass* _shortArrayKlassObj; aoqi@0: static Klass* _longArrayKlassObj; aoqi@0: static Klass* _singleArrayKlassObj; aoqi@0: static Klass* _doubleArrayKlassObj; aoqi@0: static Klass* _typeArrayKlassObjs[T_VOID+1]; aoqi@0: aoqi@0: static Klass* _objectArrayKlassObj; aoqi@0: aoqi@0: // Known objects in the VM aoqi@0: aoqi@0: // Primitive objects aoqi@0: static oop _int_mirror; aoqi@0: static oop _float_mirror; aoqi@0: static oop _double_mirror; aoqi@0: static oop _byte_mirror; aoqi@0: static oop _bool_mirror; aoqi@0: static oop _char_mirror; aoqi@0: static oop _long_mirror; aoqi@0: static oop _short_mirror; aoqi@0: static oop _void_mirror; aoqi@0: aoqi@0: static oop _main_thread_group; // Reference to the main thread group object aoqi@0: static oop _system_thread_group; // Reference to the system thread group object aoqi@0: aoqi@0: static objArrayOop _the_empty_class_klass_array; // Canonicalized obj array of type java.lang.Class aoqi@0: static oop _the_null_string; // A cache of "null" as a Java string aoqi@0: static oop _the_min_jint_string; // A cache of "-2147483648" as a Java string aoqi@0: static LatestMethodCache* _finalizer_register_cache; // static method for registering finalizable objects aoqi@0: static LatestMethodCache* _loader_addClass_cache; // method for registering loaded classes in class loader vector aoqi@0: static LatestMethodCache* _pd_implies_cache; // method for checking protection domain attributes aoqi@0: aoqi@0: static Method* _throw_illegal_access_error; aoqi@0: aoqi@0: // preallocated error objects (no backtrace) aoqi@0: static oop _out_of_memory_error_java_heap; aoqi@0: static oop _out_of_memory_error_metaspace; aoqi@0: static oop _out_of_memory_error_class_metaspace; aoqi@0: static oop _out_of_memory_error_array_size; aoqi@0: static oop _out_of_memory_error_gc_overhead_limit; roland@7419: static oop _out_of_memory_error_realloc_objects; aoqi@0: aoqi@0: static Array* _the_empty_int_array; // Canonicalized int array aoqi@0: static Array* _the_empty_short_array; // Canonicalized short array aoqi@0: static Array* _the_empty_klass_array; // Canonicalized klass obj array aoqi@0: static Array* _the_empty_method_array; // Canonicalized method obj array aoqi@0: aoqi@0: static Array* _the_array_interfaces_array; aoqi@0: aoqi@0: // array of preallocated error objects with backtrace aoqi@0: static objArrayOop _preallocated_out_of_memory_error_array; aoqi@0: aoqi@0: // number of preallocated error objects available for use aoqi@0: static volatile jint _preallocated_out_of_memory_error_avail_count; aoqi@0: aoqi@0: static oop _null_ptr_exception_instance; // preallocated exception object aoqi@0: static oop _arithmetic_exception_instance; // preallocated exception object aoqi@0: static oop _virtual_machine_error_instance; // preallocated exception object aoqi@0: // The object used as an exception dummy when exceptions are thrown for aoqi@0: // the vm thread. aoqi@0: static oop _vm_exception; aoqi@0: jcoomes@7164: static oop _allocation_context_notification_obj; jcoomes@7164: aoqi@0: // The particular choice of collected heap. aoqi@0: static CollectedHeap* _collectedHeap; aoqi@0: aoqi@0: static intptr_t _non_oop_bits; aoqi@0: aoqi@0: // For UseCompressedOops. aoqi@0: static struct NarrowPtrStruct _narrow_oop; aoqi@0: // For UseCompressedClassPointers. aoqi@0: static struct NarrowPtrStruct _narrow_klass; aoqi@0: static address _narrow_ptrs_base; aoqi@0: aoqi@0: // array of dummy objects used with +FullGCAlot aoqi@0: debug_only(static objArrayOop _fullgc_alot_dummy_array;) aoqi@0: // index of next entry to clear aoqi@0: debug_only(static int _fullgc_alot_dummy_next;) aoqi@0: aoqi@0: // Compiler/dispatch support aoqi@0: static int _base_vtable_size; // Java vtbl size of klass Object (in words) aoqi@0: aoqi@0: // Initialization aoqi@0: static bool _bootstrapping; // true during genesis aoqi@0: static bool _fully_initialized; // true after universe_init and initialize_vtables called aoqi@0: aoqi@0: // the array of preallocated errors with backtraces aoqi@0: static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; } aoqi@0: aoqi@0: // generate an out of memory error; if possible using an error with preallocated backtrace; aoqi@0: // otherwise return the given default error. aoqi@0: static oop gen_out_of_memory_error(oop default_err); aoqi@0: aoqi@0: // Historic gc information aoqi@0: static size_t _heap_capacity_at_last_gc; aoqi@0: static size_t _heap_used_at_last_gc; aoqi@0: aoqi@0: static jint initialize_heap(); aoqi@0: static void initialize_basic_type_mirrors(TRAPS); aoqi@0: static void fixup_mirrors(TRAPS); aoqi@0: aoqi@0: static void reinitialize_vtable_of(KlassHandle h_k, TRAPS); aoqi@0: static void reinitialize_itables(TRAPS); aoqi@0: static void compute_base_vtable_size(); // compute vtable size of class Object aoqi@0: aoqi@0: static void genesis(TRAPS); // Create the initial world aoqi@0: aoqi@0: // Mirrors for primitive classes (created eagerly) aoqi@0: static oop check_mirror(oop m) { aoqi@0: assert(m != NULL, "mirror not initialized"); aoqi@0: return m; aoqi@0: } aoqi@0: aoqi@0: static void set_narrow_oop_base(address base) { aoqi@0: assert(UseCompressedOops, "no compressed oops?"); aoqi@0: _narrow_oop._base = base; aoqi@0: } aoqi@0: static void set_narrow_klass_base(address base) { aoqi@0: assert(UseCompressedClassPointers, "no compressed klass ptrs?"); aoqi@0: _narrow_klass._base = base; aoqi@0: } aoqi@0: static void set_narrow_oop_use_implicit_null_checks(bool use) { aoqi@0: assert(UseCompressedOops, "no compressed ptrs?"); aoqi@0: _narrow_oop._use_implicit_null_checks = use; aoqi@0: } aoqi@0: aoqi@0: // Debugging aoqi@0: static int _verify_count; // number of verifies done aoqi@0: // True during call to verify(). Should only be set/cleared in verify(). aoqi@0: static bool _verify_in_progress; aoqi@0: aoqi@0: static void compute_verify_oop_data(); aoqi@0: aoqi@0: public: aoqi@0: // Known classes in the VM aoqi@0: static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; } aoqi@0: static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; } aoqi@0: static Klass* charArrayKlassObj() { return _charArrayKlassObj; } aoqi@0: static Klass* intArrayKlassObj() { return _intArrayKlassObj; } aoqi@0: static Klass* shortArrayKlassObj() { return _shortArrayKlassObj; } aoqi@0: static Klass* longArrayKlassObj() { return _longArrayKlassObj; } aoqi@0: static Klass* singleArrayKlassObj() { return _singleArrayKlassObj; } aoqi@0: static Klass* doubleArrayKlassObj() { return _doubleArrayKlassObj; } aoqi@0: aoqi@0: static Klass* objectArrayKlassObj() { aoqi@0: return _objectArrayKlassObj; aoqi@0: } aoqi@0: aoqi@0: static Klass* typeArrayKlassObj(BasicType t) { aoqi@0: assert((uint)t < T_VOID+1, err_msg("range check for type: %s", type2name(t))); aoqi@0: assert(_typeArrayKlassObjs[t] != NULL, "domain check"); aoqi@0: return _typeArrayKlassObjs[t]; aoqi@0: } aoqi@0: aoqi@0: // Known objects in the VM aoqi@0: static oop int_mirror() { return check_mirror(_int_mirror); } aoqi@0: static oop float_mirror() { return check_mirror(_float_mirror); } aoqi@0: static oop double_mirror() { return check_mirror(_double_mirror); } aoqi@0: static oop byte_mirror() { return check_mirror(_byte_mirror); } aoqi@0: static oop bool_mirror() { return check_mirror(_bool_mirror); } aoqi@0: static oop char_mirror() { return check_mirror(_char_mirror); } aoqi@0: static oop long_mirror() { return check_mirror(_long_mirror); } aoqi@0: static oop short_mirror() { return check_mirror(_short_mirror); } aoqi@0: static oop void_mirror() { return check_mirror(_void_mirror); } aoqi@0: aoqi@0: // table of same aoqi@0: static oop _mirrors[T_VOID+1]; aoqi@0: aoqi@0: static oop java_mirror(BasicType t) { aoqi@0: assert((uint)t < T_VOID+1, "range check"); aoqi@0: return check_mirror(_mirrors[t]); aoqi@0: } aoqi@0: static oop main_thread_group() { return _main_thread_group; } aoqi@0: static void set_main_thread_group(oop group) { _main_thread_group = group;} aoqi@0: aoqi@0: static oop system_thread_group() { return _system_thread_group; } aoqi@0: static void set_system_thread_group(oop group) { _system_thread_group = group;} aoqi@0: aoqi@0: static objArrayOop the_empty_class_klass_array () { return _the_empty_class_klass_array; } aoqi@0: static Array* the_array_interfaces_array() { return _the_array_interfaces_array; } aoqi@0: static oop the_null_string() { return _the_null_string; } aoqi@0: static oop the_min_jint_string() { return _the_min_jint_string; } aoqi@0: aoqi@0: static Method* finalizer_register_method() { return _finalizer_register_cache->get_method(); } aoqi@0: static Method* loader_addClass_method() { return _loader_addClass_cache->get_method(); } aoqi@0: aoqi@0: static Method* protection_domain_implies_method() { return _pd_implies_cache->get_method(); } aoqi@0: aoqi@0: static oop null_ptr_exception_instance() { return _null_ptr_exception_instance; } aoqi@0: static oop arithmetic_exception_instance() { return _arithmetic_exception_instance; } aoqi@0: static oop virtual_machine_error_instance() { return _virtual_machine_error_instance; } aoqi@0: static oop vm_exception() { return _vm_exception; } jcoomes@7164: jcoomes@7164: static inline oop allocation_context_notification_obj(); jcoomes@7164: static inline void set_allocation_context_notification_obj(oop obj); jcoomes@7164: aoqi@0: static Method* throw_illegal_access_error() { return _throw_illegal_access_error; } aoqi@0: aoqi@0: static Array* the_empty_int_array() { return _the_empty_int_array; } aoqi@0: static Array* the_empty_short_array() { return _the_empty_short_array; } aoqi@0: static Array* the_empty_method_array() { return _the_empty_method_array; } aoqi@0: static Array* the_empty_klass_array() { return _the_empty_klass_array; } aoqi@0: aoqi@0: // OutOfMemoryError support. Returns an error with the required message. The returned error aoqi@0: // may or may not have a backtrace. If error has a backtrace then the stack trace is already aoqi@0: // filled in. aoqi@0: static oop out_of_memory_error_java_heap() { return gen_out_of_memory_error(_out_of_memory_error_java_heap); } aoqi@0: static oop out_of_memory_error_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_metaspace); } aoqi@0: static oop out_of_memory_error_class_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace); } aoqi@0: static oop out_of_memory_error_array_size() { return gen_out_of_memory_error(_out_of_memory_error_array_size); } aoqi@0: static oop out_of_memory_error_gc_overhead_limit() { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit); } roland@7419: static oop out_of_memory_error_realloc_objects() { return gen_out_of_memory_error(_out_of_memory_error_realloc_objects); } aoqi@0: aoqi@0: // Accessors needed for fast allocation aoqi@0: static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; } aoqi@0: static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; } aoqi@0: static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; } aoqi@0: static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; } aoqi@0: static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; } aoqi@0: static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; } aoqi@0: static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; } aoqi@0: static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; } aoqi@0: static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; } aoqi@0: aoqi@0: // The particular choice of collected heap. aoqi@0: static CollectedHeap* heap() { return _collectedHeap; } aoqi@0: aoqi@0: // For UseCompressedOops aoqi@0: // Narrow Oop encoding mode: aoqi@0: // 0 - Use 32-bits oops without encoding when aoqi@0: // NarrowOopHeapBaseMin + heap_size < 4Gb aoqi@0: // 1 - Use zero based compressed oops with encoding when aoqi@0: // NarrowOopHeapBaseMin + heap_size < 32Gb aoqi@0: // 2 - Use compressed oops with heap base + encoding. aoqi@0: enum NARROW_OOP_MODE { aoqi@0: UnscaledNarrowOop = 0, aoqi@0: ZeroBasedNarrowOop = 1, aoqi@0: HeapBasedNarrowOop = 2 aoqi@0: }; aoqi@0: static NARROW_OOP_MODE narrow_oop_mode(); aoqi@0: static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode); aoqi@0: static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode); aoqi@0: static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode); aoqi@0: static address narrow_oop_base() { return _narrow_oop._base; } aoqi@0: static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); } aoqi@0: static int narrow_oop_shift() { return _narrow_oop._shift; } aoqi@0: static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; } aoqi@0: aoqi@0: // For UseCompressedClassPointers aoqi@0: static address narrow_klass_base() { return _narrow_klass._base; } aoqi@0: static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); } aoqi@0: static int narrow_klass_shift() { return _narrow_klass._shift; } aoqi@0: static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; } aoqi@0: aoqi@0: static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; } aoqi@0: static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; } aoqi@0: static address narrow_ptrs_base() { return _narrow_ptrs_base; } aoqi@0: aoqi@0: // this is set in vm_version on sparc (and then reset in universe afaict) aoqi@0: static void set_narrow_oop_shift(int shift) { aoqi@0: _narrow_oop._shift = shift; aoqi@0: } aoqi@0: aoqi@0: static void set_narrow_klass_shift(int shift) { aoqi@0: assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs"); aoqi@0: _narrow_klass._shift = shift; aoqi@0: } aoqi@0: aoqi@0: // Reserve Java heap and determine CompressedOops mode aoqi@0: static ReservedSpace reserve_heap(size_t heap_size, size_t alignment); aoqi@0: aoqi@0: // Historic gc information aoqi@0: static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; } aoqi@0: static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; } aoqi@0: static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; } aoqi@0: static void update_heap_info_at_gc(); aoqi@0: aoqi@0: // Testers aoqi@0: static bool is_bootstrapping() { return _bootstrapping; } aoqi@0: static bool is_fully_initialized() { return _fully_initialized; } aoqi@0: aoqi@0: static inline bool element_type_should_be_aligned(BasicType type); aoqi@0: static inline bool field_type_should_be_aligned(BasicType type); aoqi@0: static bool on_page_boundary(void* addr); aoqi@0: static bool should_fill_in_stack_trace(Handle throwable); aoqi@0: static void check_alignment(uintx size, uintx alignment, const char* name); aoqi@0: aoqi@0: // Finalizer support. aoqi@0: static void run_finalizers_on_exit(); aoqi@0: aoqi@0: // Iteration aoqi@0: aoqi@0: // Apply "f" to the addresses of all the direct heap pointers maintained aoqi@0: // as static fields of "Universe". aoqi@0: static void oops_do(OopClosure* f, bool do_all = false); aoqi@0: aoqi@0: // CDS support aoqi@0: static void serialize(SerializeClosure* f, bool do_all = false); aoqi@0: aoqi@0: // Apply "f" to all klasses for basic types (classes not present in aoqi@0: // SystemDictionary). aoqi@0: static void basic_type_classes_do(void f(Klass*)); aoqi@0: aoqi@0: // For sharing -- fill in a list of known vtable pointers. aoqi@0: static void init_self_patching_vtbl_list(void** list, int count); aoqi@0: aoqi@0: // Debugging aoqi@0: static bool verify_in_progress() { return _verify_in_progress; } aoqi@0: static void verify(VerifyOption option, const char* prefix, bool silent = VerifySilently); aoqi@0: static void verify(const char* prefix, bool silent = VerifySilently) { aoqi@0: verify(VerifyOption_Default, prefix, silent); aoqi@0: } aoqi@0: static void verify(bool silent = VerifySilently) { aoqi@0: verify("", silent); aoqi@0: } aoqi@0: aoqi@0: static int verify_count() { return _verify_count; } aoqi@0: // The default behavior is to call print_on() on gclog_or_tty. aoqi@0: static void print(); aoqi@0: // The extended parameter determines which method on the heap will aoqi@0: // be called: print_on() (extended == false) or print_extended_on() aoqi@0: // (extended == true). aoqi@0: static void print_on(outputStream* st, bool extended = false); aoqi@0: static void print_heap_at_SIGBREAK(); aoqi@0: static void print_heap_before_gc() { print_heap_before_gc(gclog_or_tty); } aoqi@0: static void print_heap_after_gc() { print_heap_after_gc(gclog_or_tty); } aoqi@0: static void print_heap_before_gc(outputStream* st, bool ignore_extended = false); aoqi@0: static void print_heap_after_gc(outputStream* st, bool ignore_extended = false); aoqi@0: aoqi@0: // Change the number of dummy objects kept reachable by the full gc dummy aoqi@0: // array; this should trigger relocation in a sliding compaction collector. aoqi@0: debug_only(static bool release_fullgc_alot_dummy();) aoqi@0: // The non-oop pattern (see compiledIC.hpp, etc) aoqi@0: static void* non_oop_word(); aoqi@0: aoqi@0: // Oop verification (see MacroAssembler::verify_oop) aoqi@0: static uintptr_t verify_oop_mask() PRODUCT_RETURN0; aoqi@0: static uintptr_t verify_oop_bits() PRODUCT_RETURN0; aoqi@0: static uintptr_t verify_mark_bits() PRODUCT_RETURN0; aoqi@0: static uintptr_t verify_mark_mask() PRODUCT_RETURN0; aoqi@0: aoqi@0: // Flushing and deoptimization aoqi@0: static void flush_dependents_on(instanceKlassHandle dependee); aoqi@0: static void flush_dependents_on(Handle call_site, Handle method_handle); aoqi@0: #ifdef HOTSWAP aoqi@0: // Flushing and deoptimization in case of evolution aoqi@0: static void flush_evol_dependents_on(instanceKlassHandle dependee); aoqi@0: #endif // HOTSWAP aoqi@0: // Support for fullspeed debugging aoqi@0: static void flush_dependents_on_method(methodHandle dependee); aoqi@0: aoqi@0: // Compiler support aoqi@0: static int base_vtable_size() { return _base_vtable_size; } aoqi@0: }; aoqi@0: aoqi@0: class DeferredObjAllocEvent : public CHeapObj { aoqi@0: private: aoqi@0: oop _oop; aoqi@0: size_t _bytesize; aoqi@0: jint _arena_id; aoqi@0: aoqi@0: public: aoqi@0: DeferredObjAllocEvent(const oop o, const size_t s, const jint id) { aoqi@0: _oop = o; aoqi@0: _bytesize = s; aoqi@0: _arena_id = id; aoqi@0: } aoqi@0: aoqi@0: ~DeferredObjAllocEvent() { aoqi@0: } aoqi@0: aoqi@0: jint arena_id() { return _arena_id; } aoqi@0: size_t bytesize() { return _bytesize; } aoqi@0: oop get_oop() { return _oop; } aoqi@0: }; aoqi@0: aoqi@0: #endif // SHARE_VM_MEMORY_UNIVERSE_HPP