src/share/vm/memory/universe.hpp

Thu, 15 Aug 2013 20:04:10 -0400

author
hseigel
date
Thu, 15 Aug 2013 20:04:10 -0400
changeset 5528
740e263c80c6
parent 5508
85147f28faba
child 5578
4c84d351cca9
permissions
-rw-r--r--

8003424: Enable Class Data Sharing for CompressedOops
8016729: ObjectAlignmentInBytes=16 now forces the use of heap based compressed oops
8005933: The -Xshare:auto option is ignored for -server
Summary: Move klass metaspace above the heap and support CDS with compressed klass ptrs.
Reviewed-by: coleenp, kvn, mgerdin, tschatzl, stefank

duke@435 1 /*
hseigel@4523 2 * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP
stefank@2314 26 #define SHARE_VM_MEMORY_UNIVERSE_HPP
stefank@2314 27
stefank@2314 28 #include "runtime/handles.hpp"
coleenp@4037 29 #include "utilities/array.hpp"
stefank@2314 30 #include "utilities/growableArray.hpp"
stefank@2314 31
duke@435 32 // Universe is a name space holding known system classes and objects in the VM.
duke@435 33 //
duke@435 34 // Loaded classes are accessible through the SystemDictionary.
duke@435 35 //
duke@435 36 // The object heap is allocated and accessed through Universe, and various allocation
duke@435 37 // support is provided. Allocation by the interpreter and compiled code is done inline
duke@435 38 // and bails out to Scavenge::invoke_and_allocate.
duke@435 39
duke@435 40 class CollectedHeap;
duke@435 41 class DeferredObjAllocEvent;
duke@435 42
duke@435 43
coleenp@5508 44 // A helper class for caching a Method* when the user of the cache
coleenp@5508 45 // only cares about the latest version of the Method*. This cache safely
coleenp@5508 46 // interacts with the RedefineClasses API.
coleenp@5508 47
coleenp@5508 48 class LatestMethodCache : public CHeapObj<mtClass> {
coleenp@4037 49 // We save the Klass* and the idnum of Method* in order to get
coleenp@4037 50 // the current cached Method*.
duke@435 51 private:
coleenp@4037 52 Klass* _klass;
duke@435 53 int _method_idnum;
duke@435 54
duke@435 55 public:
coleenp@5508 56 LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
coleenp@5508 57 ~LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
duke@435 58
coleenp@5508 59 void init(Klass* k, Method* m);
coleenp@5508 60 Klass* klass() const { return _klass; }
coleenp@5508 61 int method_idnum() const { return _method_idnum; }
coleenp@5508 62
coleenp@5508 63 Method* get_method();
duke@435 64
coleenp@4037 65 // Enhanced Class Redefinition support
coleenp@4037 66 void classes_do(void f(Klass*)) {
coleenp@4037 67 f(_klass);
coleenp@4037 68 }
coleenp@4037 69
coleenp@4037 70 // CDS support. Replace the klass in this with the archive version
coleenp@4037 71 // could use this for Enhanced Class Redefinition also.
coleenp@4037 72 void serialize(SerializeClosure* f) {
coleenp@4037 73 f->do_ptr((void**)&_klass);
coleenp@4037 74 }
duke@435 75 };
duke@435 76
duke@435 77
hseigel@5528 78 // For UseCompressedOops.
roland@4159 79 struct NarrowPtrStruct {
hseigel@5528 80 // Base address for oop-within-java-object materialization.
hseigel@5528 81 // NULL if using wide oops or zero based narrow oops.
kvn@1077 82 address _base;
roland@4159 83 // Number of shift bits for encoding/decoding narrow ptrs.
roland@4159 84 // 0 if using wide ptrs or zero based unscaled narrow ptrs,
roland@4159 85 // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
kvn@1077 86 int _shift;
roland@4159 87 // Generate code with implicit null checks for narrow ptrs.
kvn@1077 88 bool _use_implicit_null_checks;
kvn@1077 89 };
kvn@1077 90
johnc@2969 91 enum VerifyOption {
johnc@2969 92 VerifyOption_Default = 0,
johnc@2969 93
johnc@2969 94 // G1
johnc@2969 95 VerifyOption_G1UsePrevMarking = VerifyOption_Default,
johnc@2969 96 VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
johnc@2969 97 VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1
johnc@2969 98 };
duke@435 99
duke@435 100 class Universe: AllStatic {
jcoomes@916 101 // Ugh. Universe is much too friendly.
duke@435 102 friend class MarkSweep;
duke@435 103 friend class oopDesc;
duke@435 104 friend class ClassLoader;
duke@435 105 friend class Arguments;
duke@435 106 friend class SystemDictionary;
duke@435 107 friend class VMStructs;
duke@435 108 friend class VM_PopulateDumpSharedSpace;
hseigel@5528 109 friend class Metaspace;
duke@435 110
duke@435 111 friend jint universe_init();
duke@435 112 friend void universe2_init();
duke@435 113 friend bool universe_post_init();
duke@435 114
duke@435 115 private:
duke@435 116 // Known classes in the VM
coleenp@4037 117 static Klass* _boolArrayKlassObj;
coleenp@4037 118 static Klass* _byteArrayKlassObj;
coleenp@4037 119 static Klass* _charArrayKlassObj;
coleenp@4037 120 static Klass* _intArrayKlassObj;
coleenp@4037 121 static Klass* _shortArrayKlassObj;
coleenp@4037 122 static Klass* _longArrayKlassObj;
coleenp@4037 123 static Klass* _singleArrayKlassObj;
coleenp@4037 124 static Klass* _doubleArrayKlassObj;
coleenp@4037 125 static Klass* _typeArrayKlassObjs[T_VOID+1];
duke@435 126
coleenp@4037 127 static Klass* _objectArrayKlassObj;
duke@435 128
duke@435 129 // Known objects in the VM
duke@435 130
duke@435 131 // Primitive objects
duke@435 132 static oop _int_mirror;
duke@435 133 static oop _float_mirror;
duke@435 134 static oop _double_mirror;
duke@435 135 static oop _byte_mirror;
duke@435 136 static oop _bool_mirror;
duke@435 137 static oop _char_mirror;
duke@435 138 static oop _long_mirror;
duke@435 139 static oop _short_mirror;
duke@435 140 static oop _void_mirror;
duke@435 141
duke@435 142 static oop _main_thread_group; // Reference to the main thread group object
duke@435 143 static oop _system_thread_group; // Reference to the system thread group object
duke@435 144
duke@435 145 static objArrayOop _the_empty_class_klass_array; // Canonicalized obj array of type java.lang.Class
never@1515 146 static oop _the_null_string; // A cache of "null" as a Java string
never@1515 147 static oop _the_min_jint_string; // A cache of "-2147483648" as a Java string
coleenp@5508 148 static LatestMethodCache* _finalizer_register_cache; // static method for registering finalizable objects
coleenp@5508 149 static LatestMethodCache* _loader_addClass_cache; // method for registering loaded classes in class loader vector
coleenp@5508 150 static LatestMethodCache* _pd_implies_cache; // method for checking protection domain attributes
coleenp@5508 151
coleenp@5337 152 // preallocated error objects (no backtrace)
coleenp@5337 153 static oop _out_of_memory_error_java_heap;
coleenp@5337 154 static oop _out_of_memory_error_metaspace;
coleenp@5337 155 static oop _out_of_memory_error_class_metaspace;
coleenp@5337 156 static oop _out_of_memory_error_array_size;
coleenp@5337 157 static oop _out_of_memory_error_gc_overhead_limit;
duke@435 158
coleenp@4037 159 static Array<int>* _the_empty_int_array; // Canonicalized int array
coleenp@4037 160 static Array<u2>* _the_empty_short_array; // Canonicalized short array
coleenp@4037 161 static Array<Klass*>* _the_empty_klass_array; // Canonicalized klass obj array
coleenp@4037 162 static Array<Method*>* _the_empty_method_array; // Canonicalized method obj array
coleenp@4037 163
coleenp@4037 164 static Array<Klass*>* _the_array_interfaces_array;
coleenp@4037 165
duke@435 166 // array of preallocated error objects with backtrace
duke@435 167 static objArrayOop _preallocated_out_of_memory_error_array;
duke@435 168
duke@435 169 // number of preallocated error objects available for use
duke@435 170 static volatile jint _preallocated_out_of_memory_error_avail_count;
duke@435 171
duke@435 172 static oop _null_ptr_exception_instance; // preallocated exception object
duke@435 173 static oop _arithmetic_exception_instance; // preallocated exception object
duke@435 174 static oop _virtual_machine_error_instance; // preallocated exception object
duke@435 175 // The object used as an exception dummy when exceptions are thrown for
duke@435 176 // the vm thread.
duke@435 177 static oop _vm_exception;
duke@435 178
duke@435 179 // The particular choice of collected heap.
duke@435 180 static CollectedHeap* _collectedHeap;
kvn@1077 181
kvn@1077 182 // For UseCompressedOops.
roland@4159 183 static struct NarrowPtrStruct _narrow_oop;
roland@4159 184 // For UseCompressedKlassPointers.
roland@4159 185 static struct NarrowPtrStruct _narrow_klass;
roland@4159 186 static address _narrow_ptrs_base;
duke@435 187
duke@435 188 // array of dummy objects used with +FullGCAlot
duke@435 189 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
coleenp@548 190 // index of next entry to clear
duke@435 191 debug_only(static int _fullgc_alot_dummy_next;)
duke@435 192
duke@435 193 // Compiler/dispatch support
duke@435 194 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
duke@435 195
duke@435 196 // Initialization
duke@435 197 static bool _bootstrapping; // true during genesis
duke@435 198 static bool _fully_initialized; // true after universe_init and initialize_vtables called
duke@435 199
duke@435 200 // the array of preallocated errors with backtraces
duke@435 201 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
duke@435 202
duke@435 203 // generate an out of memory error; if possible using an error with preallocated backtrace;
duke@435 204 // otherwise return the given default error.
duke@435 205 static oop gen_out_of_memory_error(oop default_err);
duke@435 206
duke@435 207 // Historic gc information
duke@435 208 static size_t _heap_capacity_at_last_gc;
duke@435 209 static size_t _heap_used_at_last_gc;
duke@435 210
duke@435 211 static jint initialize_heap();
duke@435 212 static void initialize_basic_type_mirrors(TRAPS);
duke@435 213 static void fixup_mirrors(TRAPS);
duke@435 214
duke@435 215 static void reinitialize_vtable_of(KlassHandle h_k, TRAPS);
duke@435 216 static void reinitialize_itables(TRAPS);
duke@435 217 static void compute_base_vtable_size(); // compute vtable size of class Object
duke@435 218
duke@435 219 static void genesis(TRAPS); // Create the initial world
duke@435 220
duke@435 221 // Mirrors for primitive classes (created eagerly)
duke@435 222 static oop check_mirror(oop m) {
duke@435 223 assert(m != NULL, "mirror not initialized");
duke@435 224 return m;
duke@435 225 }
duke@435 226
roland@4159 227 static void set_narrow_oop_base(address base) {
roland@4159 228 assert(UseCompressedOops, "no compressed oops?");
roland@4159 229 _narrow_oop._base = base;
roland@4159 230 }
roland@4159 231 static void set_narrow_klass_base(address base) {
roland@4159 232 assert(UseCompressedKlassPointers, "no compressed klass ptrs?");
roland@4159 233 _narrow_klass._base = base;
roland@4159 234 }
roland@4159 235 static void set_narrow_oop_use_implicit_null_checks(bool use) {
roland@4159 236 assert(UseCompressedOops, "no compressed ptrs?");
roland@4159 237 _narrow_oop._use_implicit_null_checks = use;
roland@4159 238 }
hseigel@4523 239
duke@435 240 // Debugging
duke@435 241 static int _verify_count; // number of verifies done
duke@435 242 // True during call to verify(). Should only be set/cleared in verify().
duke@435 243 static bool _verify_in_progress;
duke@435 244
duke@435 245 static void compute_verify_oop_data();
duke@435 246
duke@435 247 public:
duke@435 248 // Known classes in the VM
coleenp@4037 249 static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; }
coleenp@4037 250 static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; }
coleenp@4037 251 static Klass* charArrayKlassObj() { return _charArrayKlassObj; }
coleenp@4037 252 static Klass* intArrayKlassObj() { return _intArrayKlassObj; }
coleenp@4037 253 static Klass* shortArrayKlassObj() { return _shortArrayKlassObj; }
coleenp@4037 254 static Klass* longArrayKlassObj() { return _longArrayKlassObj; }
coleenp@4037 255 static Klass* singleArrayKlassObj() { return _singleArrayKlassObj; }
coleenp@4037 256 static Klass* doubleArrayKlassObj() { return _doubleArrayKlassObj; }
duke@435 257
coleenp@4037 258 static Klass* objectArrayKlassObj() {
duke@435 259 return _objectArrayKlassObj;
duke@435 260 }
duke@435 261
coleenp@4037 262 static Klass* typeArrayKlassObj(BasicType t) {
twisti@3885 263 assert((uint)t < T_VOID+1, err_msg("range check for type: %s", type2name(t)));
duke@435 264 assert(_typeArrayKlassObjs[t] != NULL, "domain check");
duke@435 265 return _typeArrayKlassObjs[t];
duke@435 266 }
duke@435 267
coleenp@2497 268 // Known objects in the VM
coleenp@2497 269 static oop int_mirror() { return check_mirror(_int_mirror); }
duke@435 270 static oop float_mirror() { return check_mirror(_float_mirror); }
duke@435 271 static oop double_mirror() { return check_mirror(_double_mirror); }
duke@435 272 static oop byte_mirror() { return check_mirror(_byte_mirror); }
duke@435 273 static oop bool_mirror() { return check_mirror(_bool_mirror); }
duke@435 274 static oop char_mirror() { return check_mirror(_char_mirror); }
duke@435 275 static oop long_mirror() { return check_mirror(_long_mirror); }
duke@435 276 static oop short_mirror() { return check_mirror(_short_mirror); }
duke@435 277 static oop void_mirror() { return check_mirror(_void_mirror); }
duke@435 278
duke@435 279 // table of same
duke@435 280 static oop _mirrors[T_VOID+1];
duke@435 281
duke@435 282 static oop java_mirror(BasicType t) {
duke@435 283 assert((uint)t < T_VOID+1, "range check");
duke@435 284 return check_mirror(_mirrors[t]);
duke@435 285 }
duke@435 286 static oop main_thread_group() { return _main_thread_group; }
duke@435 287 static void set_main_thread_group(oop group) { _main_thread_group = group;}
duke@435 288
duke@435 289 static oop system_thread_group() { return _system_thread_group; }
duke@435 290 static void set_system_thread_group(oop group) { _system_thread_group = group;}
duke@435 291
duke@435 292 static objArrayOop the_empty_class_klass_array () { return _the_empty_class_klass_array; }
coleenp@4037 293 static Array<Klass*>* the_array_interfaces_array() { return _the_array_interfaces_array; }
never@1515 294 static oop the_null_string() { return _the_null_string; }
never@1515 295 static oop the_min_jint_string() { return _the_min_jint_string; }
mullan@5242 296
coleenp@5508 297 static Method* finalizer_register_method() { return _finalizer_register_cache->get_method(); }
coleenp@5508 298 static Method* loader_addClass_method() { return _loader_addClass_cache->get_method(); }
coleenp@5508 299
coleenp@5508 300 static Method* protection_domain_implies_method() { return _pd_implies_cache->get_method(); }
mullan@5242 301
duke@435 302 static oop null_ptr_exception_instance() { return _null_ptr_exception_instance; }
duke@435 303 static oop arithmetic_exception_instance() { return _arithmetic_exception_instance; }
duke@435 304 static oop virtual_machine_error_instance() { return _virtual_machine_error_instance; }
duke@435 305 static oop vm_exception() { return _vm_exception; }
duke@435 306
coleenp@4037 307 static Array<int>* the_empty_int_array() { return _the_empty_int_array; }
coleenp@4037 308 static Array<u2>* the_empty_short_array() { return _the_empty_short_array; }
coleenp@4037 309 static Array<Method*>* the_empty_method_array() { return _the_empty_method_array; }
coleenp@4037 310 static Array<Klass*>* the_empty_klass_array() { return _the_empty_klass_array; }
coleenp@4037 311
duke@435 312 // OutOfMemoryError support. Returns an error with the required message. The returned error
duke@435 313 // may or may not have a backtrace. If error has a backtrace then the stack trace is already
duke@435 314 // filled in.
duke@435 315 static oop out_of_memory_error_java_heap() { return gen_out_of_memory_error(_out_of_memory_error_java_heap); }
coleenp@5337 316 static oop out_of_memory_error_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_metaspace); }
coleenp@5337 317 static oop out_of_memory_error_class_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace); }
duke@435 318 static oop out_of_memory_error_array_size() { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
duke@435 319 static oop out_of_memory_error_gc_overhead_limit() { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit); }
duke@435 320
duke@435 321 // Accessors needed for fast allocation
coleenp@4037 322 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
coleenp@4037 323 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
coleenp@4037 324 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
coleenp@4037 325 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
coleenp@4037 326 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
coleenp@4037 327 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
coleenp@4037 328 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
coleenp@4037 329 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
coleenp@4037 330 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
duke@435 331
duke@435 332 // The particular choice of collected heap.
duke@435 333 static CollectedHeap* heap() { return _collectedHeap; }
duke@435 334
coleenp@548 335 // For UseCompressedOops
sla@5237 336 // Narrow Oop encoding mode:
sla@5237 337 // 0 - Use 32-bits oops without encoding when
sla@5237 338 // NarrowOopHeapBaseMin + heap_size < 4Gb
sla@5237 339 // 1 - Use zero based compressed oops with encoding when
sla@5237 340 // NarrowOopHeapBaseMin + heap_size < 32Gb
sla@5237 341 // 2 - Use compressed oops with heap base + encoding.
sla@5237 342 enum NARROW_OOP_MODE {
sla@5237 343 UnscaledNarrowOop = 0,
sla@5237 344 ZeroBasedNarrowOop = 1,
sla@5237 345 HeapBasedNarrowOop = 2
sla@5237 346 };
sla@5237 347 static NARROW_OOP_MODE narrow_oop_mode();
sla@5237 348 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
sla@5237 349 static char* preferred_heap_base(size_t heap_size, NARROW_OOP_MODE mode);
sla@5237 350 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
roland@4159 351 static address narrow_oop_base() { return _narrow_oop._base; }
roland@4159 352 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
roland@4159 353 static int narrow_oop_shift() { return _narrow_oop._shift; }
roland@4159 354 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
roland@4159 355
roland@4159 356 // For UseCompressedKlassPointers
roland@4159 357 static address narrow_klass_base() { return _narrow_klass._base; }
roland@4159 358 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
roland@4159 359 static int narrow_klass_shift() { return _narrow_klass._shift; }
roland@4159 360 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
roland@4159 361
roland@4159 362 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
roland@4159 363 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
roland@4159 364 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
coleenp@4037 365
coleenp@4037 366 // this is set in vm_version on sparc (and then reset in universe afaict)
roland@4159 367 static void set_narrow_oop_shift(int shift) {
roland@4159 368 _narrow_oop._shift = shift;
roland@4159 369 }
roland@4159 370
roland@4159 371 static void set_narrow_klass_shift(int shift) {
roland@4159 372 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
roland@4159 373 _narrow_klass._shift = shift;
roland@4159 374 }
coleenp@4037 375
coleenp@4037 376 // Reserve Java heap and determine CompressedOops mode
coleenp@4037 377 static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
coleenp@548 378
duke@435 379 // Historic gc information
duke@435 380 static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; }
duke@435 381 static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
duke@435 382 static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; }
duke@435 383 static void update_heap_info_at_gc();
duke@435 384
duke@435 385 // Testers
duke@435 386 static bool is_bootstrapping() { return _bootstrapping; }
duke@435 387 static bool is_fully_initialized() { return _fully_initialized; }
duke@435 388
duke@435 389 static inline bool element_type_should_be_aligned(BasicType type);
duke@435 390 static inline bool field_type_should_be_aligned(BasicType type);
duke@435 391 static bool on_page_boundary(void* addr);
duke@435 392 static bool should_fill_in_stack_trace(Handle throwable);
duke@435 393 static void check_alignment(uintx size, uintx alignment, const char* name);
duke@435 394
duke@435 395 // Finalizer support.
duke@435 396 static void run_finalizers_on_exit();
duke@435 397
duke@435 398 // Iteration
duke@435 399
duke@435 400 // Apply "f" to the addresses of all the direct heap pointers maintained
duke@435 401 // as static fields of "Universe".
duke@435 402 static void oops_do(OopClosure* f, bool do_all = false);
duke@435 403
coleenp@4037 404 // CDS support
coleenp@4037 405 static void serialize(SerializeClosure* f, bool do_all = false);
coleenp@4037 406
duke@435 407 // Apply "f" to all klasses for basic types (classes not present in
duke@435 408 // SystemDictionary).
coleenp@4037 409 static void basic_type_classes_do(void f(Klass*));
duke@435 410
duke@435 411 // For sharing -- fill in a list of known vtable pointers.
duke@435 412 static void init_self_patching_vtbl_list(void** list, int count);
duke@435 413
duke@435 414 // Debugging
duke@435 415 static bool verify_in_progress() { return _verify_in_progress; }
stefank@5018 416 static void verify(VerifyOption option, const char* prefix, bool silent = VerifySilently);
stefank@5018 417 static void verify(const char* prefix, bool silent = VerifySilently) {
stefank@5018 418 verify(VerifyOption_Default, prefix, silent);
johnc@4176 419 }
stefank@5018 420 static void verify(bool silent = VerifySilently) {
stefank@5018 421 verify("", silent);
johnc@4176 422 }
johnc@4176 423
tonyp@3269 424 static int verify_count() { return _verify_count; }
tonyp@3269 425 // The default behavior is to call print_on() on gclog_or_tty.
duke@435 426 static void print();
tonyp@3269 427 // The extended parameter determines which method on the heap will
tonyp@3269 428 // be called: print_on() (extended == false) or print_extended_on()
tonyp@3269 429 // (extended == true).
tonyp@3269 430 static void print_on(outputStream* st, bool extended = false);
duke@435 431 static void print_heap_at_SIGBREAK();
duke@435 432 static void print_heap_before_gc() { print_heap_before_gc(gclog_or_tty); }
duke@435 433 static void print_heap_after_gc() { print_heap_after_gc(gclog_or_tty); }
never@3571 434 static void print_heap_before_gc(outputStream* st, bool ignore_extended = false);
never@3571 435 static void print_heap_after_gc(outputStream* st, bool ignore_extended = false);
duke@435 436
duke@435 437 // Change the number of dummy objects kept reachable by the full gc dummy
duke@435 438 // array; this should trigger relocation in a sliding compaction collector.
duke@435 439 debug_only(static bool release_fullgc_alot_dummy();)
duke@435 440 // The non-oop pattern (see compiledIC.hpp, etc)
duke@435 441 static void* non_oop_word();
duke@435 442
duke@435 443 // Oop verification (see MacroAssembler::verify_oop)
duke@435 444 static uintptr_t verify_oop_mask() PRODUCT_RETURN0;
duke@435 445 static uintptr_t verify_oop_bits() PRODUCT_RETURN0;
duke@435 446 static uintptr_t verify_mark_bits() PRODUCT_RETURN0;
duke@435 447 static uintptr_t verify_mark_mask() PRODUCT_RETURN0;
duke@435 448
duke@435 449 // Flushing and deoptimization
duke@435 450 static void flush_dependents_on(instanceKlassHandle dependee);
twisti@3050 451 static void flush_dependents_on(Handle call_site, Handle method_handle);
duke@435 452 #ifdef HOTSWAP
duke@435 453 // Flushing and deoptimization in case of evolution
duke@435 454 static void flush_evol_dependents_on(instanceKlassHandle dependee);
duke@435 455 #endif // HOTSWAP
duke@435 456 // Support for fullspeed debugging
duke@435 457 static void flush_dependents_on_method(methodHandle dependee);
duke@435 458
duke@435 459 // Compiler support
duke@435 460 static int base_vtable_size() { return _base_vtable_size; }
duke@435 461 };
duke@435 462
zgu@3900 463 class DeferredObjAllocEvent : public CHeapObj<mtInternal> {
duke@435 464 private:
duke@435 465 oop _oop;
duke@435 466 size_t _bytesize;
duke@435 467 jint _arena_id;
duke@435 468
duke@435 469 public:
duke@435 470 DeferredObjAllocEvent(const oop o, const size_t s, const jint id) {
duke@435 471 _oop = o;
duke@435 472 _bytesize = s;
duke@435 473 _arena_id = id;
duke@435 474 }
duke@435 475
duke@435 476 ~DeferredObjAllocEvent() {
duke@435 477 }
duke@435 478
duke@435 479 jint arena_id() { return _arena_id; }
duke@435 480 size_t bytesize() { return _bytesize; }
duke@435 481 oop get_oop() { return _oop; }
duke@435 482 };
stefank@2314 483
stefank@2314 484 #endif // SHARE_VM_MEMORY_UNIVERSE_HPP

mercurial