duke@435: /* hseigel@4523: * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. duke@435: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. duke@435: * duke@435: * This code is free software; you can redistribute it and/or modify it duke@435: * under the terms of the GNU General Public License version 2 only, as duke@435: * published by the Free Software Foundation. duke@435: * duke@435: * This code is distributed in the hope that it will be useful, but WITHOUT duke@435: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or duke@435: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License duke@435: * version 2 for more details (a copy is included in the LICENSE file that duke@435: * accompanied this code). duke@435: * duke@435: * You should have received a copy of the GNU General Public License version duke@435: * 2 along with this work; if not, write to the Free Software Foundation, duke@435: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. duke@435: * trims@1907: * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA trims@1907: * or visit www.oracle.com if you need additional information or have any trims@1907: * questions. duke@435: * duke@435: */ duke@435: stefank@2314: #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP stefank@2314: #define SHARE_VM_MEMORY_UNIVERSE_HPP stefank@2314: stefank@2314: #include "runtime/handles.hpp" coleenp@4037: #include "utilities/array.hpp" stefank@2314: #include "utilities/growableArray.hpp" stefank@2314: duke@435: // Universe is a name space holding known system classes and objects in the VM. duke@435: // duke@435: // Loaded classes are accessible through the SystemDictionary. duke@435: // duke@435: // The object heap is allocated and accessed through Universe, and various allocation duke@435: // support is provided. Allocation by the interpreter and compiled code is done inline duke@435: // and bails out to Scavenge::invoke_and_allocate. duke@435: duke@435: class CollectedHeap; duke@435: class DeferredObjAllocEvent; duke@435: duke@435: coleenp@4037: // Common parts of a Method* cache. This cache safely interacts with duke@435: // the RedefineClasses API. duke@435: // zgu@3900: class CommonMethodOopCache : public CHeapObj { coleenp@4037: // We save the Klass* and the idnum of Method* in order to get coleenp@4037: // the current cached Method*. duke@435: private: coleenp@4037: Klass* _klass; duke@435: int _method_idnum; duke@435: duke@435: public: duke@435: CommonMethodOopCache() { _klass = NULL; _method_idnum = -1; } duke@435: ~CommonMethodOopCache() { _klass = NULL; _method_idnum = -1; } duke@435: coleenp@4037: void init(Klass* k, Method* m, TRAPS); coleenp@4037: Klass* klass() const { return _klass; } duke@435: int method_idnum() const { return _method_idnum; } duke@435: coleenp@4037: // Enhanced Class Redefinition support coleenp@4037: void classes_do(void f(Klass*)) { coleenp@4037: f(_klass); coleenp@4037: } coleenp@4037: coleenp@4037: // CDS support. Replace the klass in this with the archive version coleenp@4037: // could use this for Enhanced Class Redefinition also. coleenp@4037: void serialize(SerializeClosure* f) { coleenp@4037: f->do_ptr((void**)&_klass); coleenp@4037: } duke@435: }; duke@435: duke@435: coleenp@4037: // A helper class for caching a Method* when the user of the cache coleenp@4037: // cares about all versions of the Method*. duke@435: // duke@435: class ActiveMethodOopsCache : public CommonMethodOopCache { duke@435: // This subclass adds weak references to older versions of the coleenp@4037: // Method* and a query method for a Method*. duke@435: duke@435: private: coleenp@4037: // If the cached Method* has not been redefined, then duke@435: // _prev_methods will be NULL. If all of the previous duke@435: // versions of the method have been collected, then duke@435: // _prev_methods can have a length of zero. coleenp@4037: GrowableArray* _prev_methods; duke@435: duke@435: public: duke@435: ActiveMethodOopsCache() { _prev_methods = NULL; } duke@435: ~ActiveMethodOopsCache(); duke@435: coleenp@4037: void add_previous_version(Method* const method); coleenp@4037: bool is_same_method(Method* const method) const; duke@435: }; duke@435: duke@435: coleenp@4037: // A helper class for caching a Method* when the user of the cache coleenp@4037: // only cares about the latest version of the Method*. duke@435: // duke@435: class LatestMethodOopCache : public CommonMethodOopCache { coleenp@4037: // This subclass adds a getter method for the latest Method*. duke@435: duke@435: public: coleenp@4037: Method* get_Method(); duke@435: }; duke@435: roland@4159: // For UseCompressedOops and UseCompressedKlassPointers. roland@4159: struct NarrowPtrStruct { roland@4159: // Base address for oop/klass-within-java-object materialization. roland@4159: // NULL if using wide oops/klasses or zero based narrow oops/klasses. kvn@1077: address _base; roland@4159: // Number of shift bits for encoding/decoding narrow ptrs. roland@4159: // 0 if using wide ptrs or zero based unscaled narrow ptrs, roland@4159: // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise. kvn@1077: int _shift; roland@4159: // Generate code with implicit null checks for narrow ptrs. kvn@1077: bool _use_implicit_null_checks; kvn@1077: }; kvn@1077: johnc@2969: enum VerifyOption { johnc@2969: VerifyOption_Default = 0, johnc@2969: johnc@2969: // G1 johnc@2969: VerifyOption_G1UsePrevMarking = VerifyOption_Default, johnc@2969: VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1, johnc@2969: VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1 johnc@2969: }; duke@435: duke@435: class Universe: AllStatic { jcoomes@916: // Ugh. Universe is much too friendly. duke@435: friend class MarkSweep; duke@435: friend class oopDesc; duke@435: friend class ClassLoader; duke@435: friend class Arguments; duke@435: friend class SystemDictionary; duke@435: friend class VMStructs; duke@435: friend class VM_PopulateDumpSharedSpace; duke@435: duke@435: friend jint universe_init(); duke@435: friend void universe2_init(); duke@435: friend bool universe_post_init(); duke@435: duke@435: private: duke@435: // Known classes in the VM coleenp@4037: static Klass* _boolArrayKlassObj; coleenp@4037: static Klass* _byteArrayKlassObj; coleenp@4037: static Klass* _charArrayKlassObj; coleenp@4037: static Klass* _intArrayKlassObj; coleenp@4037: static Klass* _shortArrayKlassObj; coleenp@4037: static Klass* _longArrayKlassObj; coleenp@4037: static Klass* _singleArrayKlassObj; coleenp@4037: static Klass* _doubleArrayKlassObj; coleenp@4037: static Klass* _typeArrayKlassObjs[T_VOID+1]; duke@435: coleenp@4037: static Klass* _objectArrayKlassObj; duke@435: duke@435: // Known objects in the VM duke@435: duke@435: // Primitive objects duke@435: static oop _int_mirror; duke@435: static oop _float_mirror; duke@435: static oop _double_mirror; duke@435: static oop _byte_mirror; duke@435: static oop _bool_mirror; duke@435: static oop _char_mirror; duke@435: static oop _long_mirror; duke@435: static oop _short_mirror; duke@435: static oop _void_mirror; duke@435: duke@435: static oop _main_thread_group; // Reference to the main thread group object duke@435: static oop _system_thread_group; // Reference to the system thread group object duke@435: duke@435: static objArrayOop _the_empty_class_klass_array; // Canonicalized obj array of type java.lang.Class never@1515: static oop _the_null_string; // A cache of "null" as a Java string never@1515: static oop _the_min_jint_string; // A cache of "-2147483648" as a Java string duke@435: static LatestMethodOopCache* _finalizer_register_cache; // static method for registering finalizable objects coleenp@4062: static LatestMethodOopCache* _loader_addClass_cache; // method for registering loaded classes in class loader vector duke@435: static ActiveMethodOopsCache* _reflect_invoke_cache; // method for security checks duke@435: static oop _out_of_memory_error_java_heap; // preallocated error object (no backtrace) duke@435: static oop _out_of_memory_error_perm_gen; // preallocated error object (no backtrace) duke@435: static oop _out_of_memory_error_array_size;// preallocated error object (no backtrace) duke@435: static oop _out_of_memory_error_gc_overhead_limit; // preallocated error object (no backtrace) duke@435: coleenp@4037: static Array* _the_empty_int_array; // Canonicalized int array coleenp@4037: static Array* _the_empty_short_array; // Canonicalized short array coleenp@4037: static Array* _the_empty_klass_array; // Canonicalized klass obj array coleenp@4037: static Array* _the_empty_method_array; // Canonicalized method obj array coleenp@4037: coleenp@4037: static Array* _the_array_interfaces_array; coleenp@4037: duke@435: // array of preallocated error objects with backtrace duke@435: static objArrayOop _preallocated_out_of_memory_error_array; duke@435: duke@435: // number of preallocated error objects available for use duke@435: static volatile jint _preallocated_out_of_memory_error_avail_count; duke@435: duke@435: static oop _null_ptr_exception_instance; // preallocated exception object duke@435: static oop _arithmetic_exception_instance; // preallocated exception object duke@435: static oop _virtual_machine_error_instance; // preallocated exception object duke@435: // The object used as an exception dummy when exceptions are thrown for duke@435: // the vm thread. duke@435: static oop _vm_exception; duke@435: duke@435: // The particular choice of collected heap. duke@435: static CollectedHeap* _collectedHeap; kvn@1077: kvn@1077: // For UseCompressedOops. roland@4159: static struct NarrowPtrStruct _narrow_oop; roland@4159: // For UseCompressedKlassPointers. roland@4159: static struct NarrowPtrStruct _narrow_klass; roland@4159: static address _narrow_ptrs_base; duke@435: hseigel@4523: // Aligned size of the metaspace. hseigel@4523: static size_t _class_metaspace_size; hseigel@4523: duke@435: // array of dummy objects used with +FullGCAlot duke@435: debug_only(static objArrayOop _fullgc_alot_dummy_array;) coleenp@548: // index of next entry to clear duke@435: debug_only(static int _fullgc_alot_dummy_next;) duke@435: duke@435: // Compiler/dispatch support duke@435: static int _base_vtable_size; // Java vtbl size of klass Object (in words) duke@435: duke@435: // Initialization duke@435: static bool _bootstrapping; // true during genesis duke@435: static bool _fully_initialized; // true after universe_init and initialize_vtables called duke@435: duke@435: // the array of preallocated errors with backtraces duke@435: static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; } duke@435: duke@435: // generate an out of memory error; if possible using an error with preallocated backtrace; duke@435: // otherwise return the given default error. duke@435: static oop gen_out_of_memory_error(oop default_err); duke@435: duke@435: // Historic gc information duke@435: static size_t _heap_capacity_at_last_gc; duke@435: static size_t _heap_used_at_last_gc; duke@435: duke@435: static jint initialize_heap(); duke@435: static void initialize_basic_type_mirrors(TRAPS); duke@435: static void fixup_mirrors(TRAPS); duke@435: duke@435: static void reinitialize_vtable_of(KlassHandle h_k, TRAPS); duke@435: static void reinitialize_itables(TRAPS); duke@435: static void compute_base_vtable_size(); // compute vtable size of class Object duke@435: duke@435: static void genesis(TRAPS); // Create the initial world duke@435: duke@435: // Mirrors for primitive classes (created eagerly) duke@435: static oop check_mirror(oop m) { duke@435: assert(m != NULL, "mirror not initialized"); duke@435: return m; duke@435: } duke@435: coleenp@4037: // Narrow Oop encoding mode: coleenp@4037: // 0 - Use 32-bits oops without encoding when coleenp@4037: // NarrowOopHeapBaseMin + heap_size < 4Gb coleenp@4037: // 1 - Use zero based compressed oops with encoding when coleenp@4037: // NarrowOopHeapBaseMin + heap_size < 32Gb coleenp@4037: // 2 - Use compressed oops with heap base + encoding. coleenp@4037: enum NARROW_OOP_MODE { coleenp@4037: UnscaledNarrowOop = 0, coleenp@4037: ZeroBasedNarrowOop = 1, coleenp@4037: HeapBasedNarrowOop = 2 coleenp@4037: }; coleenp@4037: static char* preferred_heap_base(size_t heap_size, NARROW_OOP_MODE mode); roland@4159: static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode); roland@4159: static void set_narrow_oop_base(address base) { roland@4159: assert(UseCompressedOops, "no compressed oops?"); roland@4159: _narrow_oop._base = base; roland@4159: } roland@4159: static void set_narrow_klass_base(address base) { roland@4159: assert(UseCompressedKlassPointers, "no compressed klass ptrs?"); roland@4159: _narrow_klass._base = base; roland@4159: } roland@4159: static void set_narrow_oop_use_implicit_null_checks(bool use) { roland@4159: assert(UseCompressedOops, "no compressed ptrs?"); roland@4159: _narrow_oop._use_implicit_null_checks = use; roland@4159: } roland@4159: static bool reserve_metaspace_helper(bool with_base = false); roland@4159: static ReservedHeapSpace reserve_heap_metaspace(size_t heap_size, size_t alignment, bool& contiguous); coleenp@4037: hseigel@4523: static size_t class_metaspace_size() { hseigel@4523: return _class_metaspace_size; hseigel@4523: } hseigel@4523: static void set_class_metaspace_size(size_t metaspace_size) { hseigel@4523: _class_metaspace_size = metaspace_size; hseigel@4523: } hseigel@4523: duke@435: // Debugging duke@435: static int _verify_count; // number of verifies done duke@435: // True during call to verify(). Should only be set/cleared in verify(). duke@435: static bool _verify_in_progress; duke@435: duke@435: static void compute_verify_oop_data(); duke@435: duke@435: public: duke@435: // Known classes in the VM coleenp@4037: static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; } coleenp@4037: static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; } coleenp@4037: static Klass* charArrayKlassObj() { return _charArrayKlassObj; } coleenp@4037: static Klass* intArrayKlassObj() { return _intArrayKlassObj; } coleenp@4037: static Klass* shortArrayKlassObj() { return _shortArrayKlassObj; } coleenp@4037: static Klass* longArrayKlassObj() { return _longArrayKlassObj; } coleenp@4037: static Klass* singleArrayKlassObj() { return _singleArrayKlassObj; } coleenp@4037: static Klass* doubleArrayKlassObj() { return _doubleArrayKlassObj; } duke@435: coleenp@4037: static Klass* objectArrayKlassObj() { duke@435: return _objectArrayKlassObj; duke@435: } duke@435: coleenp@4037: static Klass* typeArrayKlassObj(BasicType t) { twisti@3885: assert((uint)t < T_VOID+1, err_msg("range check for type: %s", type2name(t))); duke@435: assert(_typeArrayKlassObjs[t] != NULL, "domain check"); duke@435: return _typeArrayKlassObjs[t]; duke@435: } duke@435: coleenp@2497: // Known objects in the VM coleenp@2497: static oop int_mirror() { return check_mirror(_int_mirror); } duke@435: static oop float_mirror() { return check_mirror(_float_mirror); } duke@435: static oop double_mirror() { return check_mirror(_double_mirror); } duke@435: static oop byte_mirror() { return check_mirror(_byte_mirror); } duke@435: static oop bool_mirror() { return check_mirror(_bool_mirror); } duke@435: static oop char_mirror() { return check_mirror(_char_mirror); } duke@435: static oop long_mirror() { return check_mirror(_long_mirror); } duke@435: static oop short_mirror() { return check_mirror(_short_mirror); } duke@435: static oop void_mirror() { return check_mirror(_void_mirror); } duke@435: duke@435: // table of same duke@435: static oop _mirrors[T_VOID+1]; duke@435: duke@435: static oop java_mirror(BasicType t) { duke@435: assert((uint)t < T_VOID+1, "range check"); duke@435: return check_mirror(_mirrors[t]); duke@435: } duke@435: static oop main_thread_group() { return _main_thread_group; } duke@435: static void set_main_thread_group(oop group) { _main_thread_group = group;} duke@435: duke@435: static oop system_thread_group() { return _system_thread_group; } duke@435: static void set_system_thread_group(oop group) { _system_thread_group = group;} duke@435: duke@435: static objArrayOop the_empty_class_klass_array () { return _the_empty_class_klass_array; } coleenp@4037: static Array* the_array_interfaces_array() { return _the_array_interfaces_array; } never@1515: static oop the_null_string() { return _the_null_string; } never@1515: static oop the_min_jint_string() { return _the_min_jint_string; } coleenp@4037: static Method* finalizer_register_method() { return _finalizer_register_cache->get_Method(); } coleenp@4062: static Method* loader_addClass_method() { return _loader_addClass_cache->get_Method(); } duke@435: static ActiveMethodOopsCache* reflect_invoke_cache() { return _reflect_invoke_cache; } duke@435: static oop null_ptr_exception_instance() { return _null_ptr_exception_instance; } duke@435: static oop arithmetic_exception_instance() { return _arithmetic_exception_instance; } duke@435: static oop virtual_machine_error_instance() { return _virtual_machine_error_instance; } duke@435: static oop vm_exception() { return _vm_exception; } duke@435: coleenp@4037: static Array* the_empty_int_array() { return _the_empty_int_array; } coleenp@4037: static Array* the_empty_short_array() { return _the_empty_short_array; } coleenp@4037: static Array* the_empty_method_array() { return _the_empty_method_array; } coleenp@4037: static Array* the_empty_klass_array() { return _the_empty_klass_array; } coleenp@4037: duke@435: // OutOfMemoryError support. Returns an error with the required message. The returned error duke@435: // may or may not have a backtrace. If error has a backtrace then the stack trace is already duke@435: // filled in. duke@435: static oop out_of_memory_error_java_heap() { return gen_out_of_memory_error(_out_of_memory_error_java_heap); } duke@435: static oop out_of_memory_error_perm_gen() { return gen_out_of_memory_error(_out_of_memory_error_perm_gen); } duke@435: static oop out_of_memory_error_array_size() { return gen_out_of_memory_error(_out_of_memory_error_array_size); } duke@435: static oop out_of_memory_error_gc_overhead_limit() { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit); } duke@435: duke@435: // Accessors needed for fast allocation coleenp@4037: static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; } coleenp@4037: static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; } coleenp@4037: static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; } coleenp@4037: static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; } coleenp@4037: static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; } coleenp@4037: static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; } coleenp@4037: static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; } coleenp@4037: static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; } coleenp@4037: static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; } duke@435: duke@435: // The particular choice of collected heap. duke@435: static CollectedHeap* heap() { return _collectedHeap; } duke@435: coleenp@548: // For UseCompressedOops roland@4159: static address narrow_oop_base() { return _narrow_oop._base; } roland@4159: static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); } roland@4159: static int narrow_oop_shift() { return _narrow_oop._shift; } roland@4159: static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; } roland@4159: roland@4159: // For UseCompressedKlassPointers roland@4159: static address narrow_klass_base() { return _narrow_klass._base; } roland@4159: static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); } roland@4159: static int narrow_klass_shift() { return _narrow_klass._shift; } roland@4159: static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; } roland@4159: roland@4159: static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; } roland@4159: static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; } roland@4159: static address narrow_ptrs_base() { return _narrow_ptrs_base; } coleenp@4037: coleenp@4037: // this is set in vm_version on sparc (and then reset in universe afaict) roland@4159: static void set_narrow_oop_shift(int shift) { roland@4159: _narrow_oop._shift = shift; roland@4159: } roland@4159: roland@4159: static void set_narrow_klass_shift(int shift) { roland@4159: assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs"); roland@4159: _narrow_klass._shift = shift; roland@4159: } coleenp@4037: coleenp@4037: // Reserve Java heap and determine CompressedOops mode coleenp@4037: static ReservedSpace reserve_heap(size_t heap_size, size_t alignment); coleenp@548: duke@435: // Historic gc information duke@435: static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; } duke@435: static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; } duke@435: static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; } duke@435: static void update_heap_info_at_gc(); duke@435: duke@435: // Testers duke@435: static bool is_bootstrapping() { return _bootstrapping; } duke@435: static bool is_fully_initialized() { return _fully_initialized; } duke@435: duke@435: static inline bool element_type_should_be_aligned(BasicType type); duke@435: static inline bool field_type_should_be_aligned(BasicType type); duke@435: static bool on_page_boundary(void* addr); duke@435: static bool should_fill_in_stack_trace(Handle throwable); duke@435: static void check_alignment(uintx size, uintx alignment, const char* name); duke@435: duke@435: // Finalizer support. duke@435: static void run_finalizers_on_exit(); duke@435: duke@435: // Iteration duke@435: duke@435: // Apply "f" to the addresses of all the direct heap pointers maintained duke@435: // as static fields of "Universe". duke@435: static void oops_do(OopClosure* f, bool do_all = false); duke@435: coleenp@4037: // CDS support coleenp@4037: static void serialize(SerializeClosure* f, bool do_all = false); coleenp@4037: duke@435: // Apply "f" to all klasses for basic types (classes not present in duke@435: // SystemDictionary). coleenp@4037: static void basic_type_classes_do(void f(Klass*)); duke@435: duke@435: // For sharing -- fill in a list of known vtable pointers. duke@435: static void init_self_patching_vtbl_list(void** list, int count); duke@435: duke@435: // Debugging duke@435: static bool verify_in_progress() { return _verify_in_progress; } johnc@4176: static void verify(bool silent, VerifyOption option); johnc@4176: static void verify(bool silent) { johnc@4176: verify(silent, VerifyOption_Default /* option */); johnc@4176: } johnc@4176: static void verify() { johnc@4176: verify(false /* silent */); johnc@4176: } johnc@4176: tonyp@3269: static int verify_count() { return _verify_count; } tonyp@3269: // The default behavior is to call print_on() on gclog_or_tty. duke@435: static void print(); tonyp@3269: // The extended parameter determines which method on the heap will tonyp@3269: // be called: print_on() (extended == false) or print_extended_on() tonyp@3269: // (extended == true). tonyp@3269: static void print_on(outputStream* st, bool extended = false); duke@435: static void print_heap_at_SIGBREAK(); duke@435: static void print_heap_before_gc() { print_heap_before_gc(gclog_or_tty); } duke@435: static void print_heap_after_gc() { print_heap_after_gc(gclog_or_tty); } never@3571: static void print_heap_before_gc(outputStream* st, bool ignore_extended = false); never@3571: static void print_heap_after_gc(outputStream* st, bool ignore_extended = false); duke@435: duke@435: // Change the number of dummy objects kept reachable by the full gc dummy duke@435: // array; this should trigger relocation in a sliding compaction collector. duke@435: debug_only(static bool release_fullgc_alot_dummy();) duke@435: // The non-oop pattern (see compiledIC.hpp, etc) duke@435: static void* non_oop_word(); duke@435: duke@435: // Oop verification (see MacroAssembler::verify_oop) duke@435: static uintptr_t verify_oop_mask() PRODUCT_RETURN0; duke@435: static uintptr_t verify_oop_bits() PRODUCT_RETURN0; duke@435: static uintptr_t verify_mark_bits() PRODUCT_RETURN0; duke@435: static uintptr_t verify_mark_mask() PRODUCT_RETURN0; duke@435: duke@435: // Flushing and deoptimization duke@435: static void flush_dependents_on(instanceKlassHandle dependee); twisti@3050: static void flush_dependents_on(Handle call_site, Handle method_handle); duke@435: #ifdef HOTSWAP duke@435: // Flushing and deoptimization in case of evolution duke@435: static void flush_evol_dependents_on(instanceKlassHandle dependee); duke@435: #endif // HOTSWAP duke@435: // Support for fullspeed debugging duke@435: static void flush_dependents_on_method(methodHandle dependee); duke@435: duke@435: // Compiler support duke@435: static int base_vtable_size() { return _base_vtable_size; } duke@435: }; duke@435: zgu@3900: class DeferredObjAllocEvent : public CHeapObj { duke@435: private: duke@435: oop _oop; duke@435: size_t _bytesize; duke@435: jint _arena_id; duke@435: duke@435: public: duke@435: DeferredObjAllocEvent(const oop o, const size_t s, const jint id) { duke@435: _oop = o; duke@435: _bytesize = s; duke@435: _arena_id = id; duke@435: } duke@435: duke@435: ~DeferredObjAllocEvent() { duke@435: } duke@435: duke@435: jint arena_id() { return _arena_id; } duke@435: size_t bytesize() { return _bytesize; } duke@435: oop get_oop() { return _oop; } duke@435: }; stefank@2314: stefank@2314: #endif // SHARE_VM_MEMORY_UNIVERSE_HPP