src/share/vm/memory/universe.hpp

Tue, 24 Jun 2014 16:20:15 +0200

author
stefank
date
Tue, 24 Jun 2014 16:20:15 +0200
changeset 6982
4c1b88a53c74
parent 6134
9d15b81d5d1b
child 6876
710a3c8b516e
child 7164
fa6c442c59ee
permissions
-rw-r--r--

8046670: Make CMS metadata aware closures applicable for other collectors
Reviewed-by: ehelin, mgerdin

duke@435 1 /*
hseigel@4523 2 * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #ifndef SHARE_VM_MEMORY_UNIVERSE_HPP
stefank@2314 26 #define SHARE_VM_MEMORY_UNIVERSE_HPP
stefank@2314 27
stefank@2314 28 #include "runtime/handles.hpp"
coleenp@4037 29 #include "utilities/array.hpp"
stefank@2314 30 #include "utilities/growableArray.hpp"
stefank@2314 31
duke@435 32 // Universe is a name space holding known system classes and objects in the VM.
duke@435 33 //
duke@435 34 // Loaded classes are accessible through the SystemDictionary.
duke@435 35 //
duke@435 36 // The object heap is allocated and accessed through Universe, and various allocation
duke@435 37 // support is provided. Allocation by the interpreter and compiled code is done inline
duke@435 38 // and bails out to Scavenge::invoke_and_allocate.
duke@435 39
duke@435 40 class CollectedHeap;
duke@435 41 class DeferredObjAllocEvent;
duke@435 42
duke@435 43
coleenp@5508 44 // A helper class for caching a Method* when the user of the cache
coleenp@5508 45 // only cares about the latest version of the Method*. This cache safely
coleenp@5508 46 // interacts with the RedefineClasses API.
coleenp@5508 47
coleenp@5508 48 class LatestMethodCache : public CHeapObj<mtClass> {
coleenp@4037 49 // We save the Klass* and the idnum of Method* in order to get
coleenp@4037 50 // the current cached Method*.
duke@435 51 private:
coleenp@4037 52 Klass* _klass;
duke@435 53 int _method_idnum;
duke@435 54
duke@435 55 public:
coleenp@5508 56 LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
coleenp@5508 57 ~LatestMethodCache() { _klass = NULL; _method_idnum = -1; }
duke@435 58
coleenp@5508 59 void init(Klass* k, Method* m);
coleenp@5508 60 Klass* klass() const { return _klass; }
coleenp@5508 61 int method_idnum() const { return _method_idnum; }
coleenp@5508 62
coleenp@5508 63 Method* get_method();
duke@435 64
coleenp@4037 65 // Enhanced Class Redefinition support
coleenp@4037 66 void classes_do(void f(Klass*)) {
coleenp@4037 67 f(_klass);
coleenp@4037 68 }
coleenp@4037 69
coleenp@4037 70 // CDS support. Replace the klass in this with the archive version
coleenp@4037 71 // could use this for Enhanced Class Redefinition also.
coleenp@4037 72 void serialize(SerializeClosure* f) {
coleenp@4037 73 f->do_ptr((void**)&_klass);
coleenp@4037 74 }
duke@435 75 };
duke@435 76
duke@435 77
hseigel@5528 78 // For UseCompressedOops.
roland@4159 79 struct NarrowPtrStruct {
hseigel@5528 80 // Base address for oop-within-java-object materialization.
hseigel@5528 81 // NULL if using wide oops or zero based narrow oops.
kvn@1077 82 address _base;
roland@4159 83 // Number of shift bits for encoding/decoding narrow ptrs.
roland@4159 84 // 0 if using wide ptrs or zero based unscaled narrow ptrs,
roland@4159 85 // LogMinObjAlignmentInBytes/LogKlassAlignmentInBytes otherwise.
kvn@1077 86 int _shift;
roland@4159 87 // Generate code with implicit null checks for narrow ptrs.
kvn@1077 88 bool _use_implicit_null_checks;
kvn@1077 89 };
kvn@1077 90
johnc@2969 91 enum VerifyOption {
johnc@2969 92 VerifyOption_Default = 0,
johnc@2969 93
johnc@2969 94 // G1
johnc@2969 95 VerifyOption_G1UsePrevMarking = VerifyOption_Default,
johnc@2969 96 VerifyOption_G1UseNextMarking = VerifyOption_G1UsePrevMarking + 1,
johnc@2969 97 VerifyOption_G1UseMarkWord = VerifyOption_G1UseNextMarking + 1
johnc@2969 98 };
duke@435 99
duke@435 100 class Universe: AllStatic {
jcoomes@916 101 // Ugh. Universe is much too friendly.
duke@435 102 friend class MarkSweep;
duke@435 103 friend class oopDesc;
duke@435 104 friend class ClassLoader;
duke@435 105 friend class Arguments;
duke@435 106 friend class SystemDictionary;
duke@435 107 friend class VMStructs;
duke@435 108 friend class VM_PopulateDumpSharedSpace;
hseigel@5528 109 friend class Metaspace;
duke@435 110
duke@435 111 friend jint universe_init();
duke@435 112 friend void universe2_init();
duke@435 113 friend bool universe_post_init();
duke@435 114
duke@435 115 private:
duke@435 116 // Known classes in the VM
coleenp@4037 117 static Klass* _boolArrayKlassObj;
coleenp@4037 118 static Klass* _byteArrayKlassObj;
coleenp@4037 119 static Klass* _charArrayKlassObj;
coleenp@4037 120 static Klass* _intArrayKlassObj;
coleenp@4037 121 static Klass* _shortArrayKlassObj;
coleenp@4037 122 static Klass* _longArrayKlassObj;
coleenp@4037 123 static Klass* _singleArrayKlassObj;
coleenp@4037 124 static Klass* _doubleArrayKlassObj;
coleenp@4037 125 static Klass* _typeArrayKlassObjs[T_VOID+1];
duke@435 126
coleenp@4037 127 static Klass* _objectArrayKlassObj;
duke@435 128
duke@435 129 // Known objects in the VM
duke@435 130
duke@435 131 // Primitive objects
duke@435 132 static oop _int_mirror;
duke@435 133 static oop _float_mirror;
duke@435 134 static oop _double_mirror;
duke@435 135 static oop _byte_mirror;
duke@435 136 static oop _bool_mirror;
duke@435 137 static oop _char_mirror;
duke@435 138 static oop _long_mirror;
duke@435 139 static oop _short_mirror;
duke@435 140 static oop _void_mirror;
duke@435 141
duke@435 142 static oop _main_thread_group; // Reference to the main thread group object
duke@435 143 static oop _system_thread_group; // Reference to the system thread group object
duke@435 144
duke@435 145 static objArrayOop _the_empty_class_klass_array; // Canonicalized obj array of type java.lang.Class
never@1515 146 static oop _the_null_string; // A cache of "null" as a Java string
never@1515 147 static oop _the_min_jint_string; // A cache of "-2147483648" as a Java string
coleenp@5508 148 static LatestMethodCache* _finalizer_register_cache; // static method for registering finalizable objects
coleenp@5508 149 static LatestMethodCache* _loader_addClass_cache; // method for registering loaded classes in class loader vector
coleenp@5508 150 static LatestMethodCache* _pd_implies_cache; // method for checking protection domain attributes
coleenp@5508 151
drchase@6134 152 static Method* _throw_illegal_access_error;
drchase@6134 153
coleenp@5337 154 // preallocated error objects (no backtrace)
coleenp@5337 155 static oop _out_of_memory_error_java_heap;
coleenp@5337 156 static oop _out_of_memory_error_metaspace;
coleenp@5337 157 static oop _out_of_memory_error_class_metaspace;
coleenp@5337 158 static oop _out_of_memory_error_array_size;
coleenp@5337 159 static oop _out_of_memory_error_gc_overhead_limit;
duke@435 160
coleenp@4037 161 static Array<int>* _the_empty_int_array; // Canonicalized int array
coleenp@4037 162 static Array<u2>* _the_empty_short_array; // Canonicalized short array
coleenp@4037 163 static Array<Klass*>* _the_empty_klass_array; // Canonicalized klass obj array
coleenp@4037 164 static Array<Method*>* _the_empty_method_array; // Canonicalized method obj array
coleenp@4037 165
coleenp@4037 166 static Array<Klass*>* _the_array_interfaces_array;
coleenp@4037 167
duke@435 168 // array of preallocated error objects with backtrace
duke@435 169 static objArrayOop _preallocated_out_of_memory_error_array;
duke@435 170
duke@435 171 // number of preallocated error objects available for use
duke@435 172 static volatile jint _preallocated_out_of_memory_error_avail_count;
duke@435 173
duke@435 174 static oop _null_ptr_exception_instance; // preallocated exception object
duke@435 175 static oop _arithmetic_exception_instance; // preallocated exception object
duke@435 176 static oop _virtual_machine_error_instance; // preallocated exception object
duke@435 177 // The object used as an exception dummy when exceptions are thrown for
duke@435 178 // the vm thread.
duke@435 179 static oop _vm_exception;
duke@435 180
duke@435 181 // The particular choice of collected heap.
duke@435 182 static CollectedHeap* _collectedHeap;
kvn@1077 183
twisti@5726 184 static intptr_t _non_oop_bits;
twisti@5726 185
kvn@1077 186 // For UseCompressedOops.
roland@4159 187 static struct NarrowPtrStruct _narrow_oop;
ehelin@5694 188 // For UseCompressedClassPointers.
roland@4159 189 static struct NarrowPtrStruct _narrow_klass;
roland@4159 190 static address _narrow_ptrs_base;
duke@435 191
duke@435 192 // array of dummy objects used with +FullGCAlot
duke@435 193 debug_only(static objArrayOop _fullgc_alot_dummy_array;)
coleenp@548 194 // index of next entry to clear
duke@435 195 debug_only(static int _fullgc_alot_dummy_next;)
duke@435 196
duke@435 197 // Compiler/dispatch support
duke@435 198 static int _base_vtable_size; // Java vtbl size of klass Object (in words)
duke@435 199
duke@435 200 // Initialization
duke@435 201 static bool _bootstrapping; // true during genesis
duke@435 202 static bool _fully_initialized; // true after universe_init and initialize_vtables called
duke@435 203
duke@435 204 // the array of preallocated errors with backtraces
duke@435 205 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; }
duke@435 206
duke@435 207 // generate an out of memory error; if possible using an error with preallocated backtrace;
duke@435 208 // otherwise return the given default error.
duke@435 209 static oop gen_out_of_memory_error(oop default_err);
duke@435 210
duke@435 211 // Historic gc information
duke@435 212 static size_t _heap_capacity_at_last_gc;
duke@435 213 static size_t _heap_used_at_last_gc;
duke@435 214
duke@435 215 static jint initialize_heap();
duke@435 216 static void initialize_basic_type_mirrors(TRAPS);
duke@435 217 static void fixup_mirrors(TRAPS);
duke@435 218
duke@435 219 static void reinitialize_vtable_of(KlassHandle h_k, TRAPS);
duke@435 220 static void reinitialize_itables(TRAPS);
duke@435 221 static void compute_base_vtable_size(); // compute vtable size of class Object
duke@435 222
duke@435 223 static void genesis(TRAPS); // Create the initial world
duke@435 224
duke@435 225 // Mirrors for primitive classes (created eagerly)
duke@435 226 static oop check_mirror(oop m) {
duke@435 227 assert(m != NULL, "mirror not initialized");
duke@435 228 return m;
duke@435 229 }
duke@435 230
roland@4159 231 static void set_narrow_oop_base(address base) {
roland@4159 232 assert(UseCompressedOops, "no compressed oops?");
roland@4159 233 _narrow_oop._base = base;
roland@4159 234 }
roland@4159 235 static void set_narrow_klass_base(address base) {
ehelin@5694 236 assert(UseCompressedClassPointers, "no compressed klass ptrs?");
roland@4159 237 _narrow_klass._base = base;
roland@4159 238 }
roland@4159 239 static void set_narrow_oop_use_implicit_null_checks(bool use) {
roland@4159 240 assert(UseCompressedOops, "no compressed ptrs?");
roland@4159 241 _narrow_oop._use_implicit_null_checks = use;
roland@4159 242 }
hseigel@4523 243
duke@435 244 // Debugging
duke@435 245 static int _verify_count; // number of verifies done
duke@435 246 // True during call to verify(). Should only be set/cleared in verify().
duke@435 247 static bool _verify_in_progress;
duke@435 248
duke@435 249 static void compute_verify_oop_data();
duke@435 250
duke@435 251 public:
duke@435 252 // Known classes in the VM
coleenp@4037 253 static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; }
coleenp@4037 254 static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; }
coleenp@4037 255 static Klass* charArrayKlassObj() { return _charArrayKlassObj; }
coleenp@4037 256 static Klass* intArrayKlassObj() { return _intArrayKlassObj; }
coleenp@4037 257 static Klass* shortArrayKlassObj() { return _shortArrayKlassObj; }
coleenp@4037 258 static Klass* longArrayKlassObj() { return _longArrayKlassObj; }
coleenp@4037 259 static Klass* singleArrayKlassObj() { return _singleArrayKlassObj; }
coleenp@4037 260 static Klass* doubleArrayKlassObj() { return _doubleArrayKlassObj; }
duke@435 261
coleenp@4037 262 static Klass* objectArrayKlassObj() {
duke@435 263 return _objectArrayKlassObj;
duke@435 264 }
duke@435 265
coleenp@4037 266 static Klass* typeArrayKlassObj(BasicType t) {
twisti@3885 267 assert((uint)t < T_VOID+1, err_msg("range check for type: %s", type2name(t)));
duke@435 268 assert(_typeArrayKlassObjs[t] != NULL, "domain check");
duke@435 269 return _typeArrayKlassObjs[t];
duke@435 270 }
duke@435 271
coleenp@2497 272 // Known objects in the VM
coleenp@2497 273 static oop int_mirror() { return check_mirror(_int_mirror); }
duke@435 274 static oop float_mirror() { return check_mirror(_float_mirror); }
duke@435 275 static oop double_mirror() { return check_mirror(_double_mirror); }
duke@435 276 static oop byte_mirror() { return check_mirror(_byte_mirror); }
duke@435 277 static oop bool_mirror() { return check_mirror(_bool_mirror); }
duke@435 278 static oop char_mirror() { return check_mirror(_char_mirror); }
duke@435 279 static oop long_mirror() { return check_mirror(_long_mirror); }
duke@435 280 static oop short_mirror() { return check_mirror(_short_mirror); }
duke@435 281 static oop void_mirror() { return check_mirror(_void_mirror); }
duke@435 282
duke@435 283 // table of same
duke@435 284 static oop _mirrors[T_VOID+1];
duke@435 285
duke@435 286 static oop java_mirror(BasicType t) {
duke@435 287 assert((uint)t < T_VOID+1, "range check");
duke@435 288 return check_mirror(_mirrors[t]);
duke@435 289 }
duke@435 290 static oop main_thread_group() { return _main_thread_group; }
duke@435 291 static void set_main_thread_group(oop group) { _main_thread_group = group;}
duke@435 292
duke@435 293 static oop system_thread_group() { return _system_thread_group; }
duke@435 294 static void set_system_thread_group(oop group) { _system_thread_group = group;}
duke@435 295
duke@435 296 static objArrayOop the_empty_class_klass_array () { return _the_empty_class_klass_array; }
coleenp@4037 297 static Array<Klass*>* the_array_interfaces_array() { return _the_array_interfaces_array; }
never@1515 298 static oop the_null_string() { return _the_null_string; }
never@1515 299 static oop the_min_jint_string() { return _the_min_jint_string; }
mullan@5242 300
coleenp@5508 301 static Method* finalizer_register_method() { return _finalizer_register_cache->get_method(); }
coleenp@5508 302 static Method* loader_addClass_method() { return _loader_addClass_cache->get_method(); }
coleenp@5508 303
coleenp@5508 304 static Method* protection_domain_implies_method() { return _pd_implies_cache->get_method(); }
mullan@5242 305
duke@435 306 static oop null_ptr_exception_instance() { return _null_ptr_exception_instance; }
duke@435 307 static oop arithmetic_exception_instance() { return _arithmetic_exception_instance; }
duke@435 308 static oop virtual_machine_error_instance() { return _virtual_machine_error_instance; }
duke@435 309 static oop vm_exception() { return _vm_exception; }
drchase@6134 310 static Method* throw_illegal_access_error() { return _throw_illegal_access_error; }
duke@435 311
coleenp@4037 312 static Array<int>* the_empty_int_array() { return _the_empty_int_array; }
coleenp@4037 313 static Array<u2>* the_empty_short_array() { return _the_empty_short_array; }
coleenp@4037 314 static Array<Method*>* the_empty_method_array() { return _the_empty_method_array; }
coleenp@4037 315 static Array<Klass*>* the_empty_klass_array() { return _the_empty_klass_array; }
coleenp@4037 316
duke@435 317 // OutOfMemoryError support. Returns an error with the required message. The returned error
duke@435 318 // may or may not have a backtrace. If error has a backtrace then the stack trace is already
duke@435 319 // filled in.
duke@435 320 static oop out_of_memory_error_java_heap() { return gen_out_of_memory_error(_out_of_memory_error_java_heap); }
coleenp@5337 321 static oop out_of_memory_error_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_metaspace); }
coleenp@5337 322 static oop out_of_memory_error_class_metaspace() { return gen_out_of_memory_error(_out_of_memory_error_class_metaspace); }
duke@435 323 static oop out_of_memory_error_array_size() { return gen_out_of_memory_error(_out_of_memory_error_array_size); }
duke@435 324 static oop out_of_memory_error_gc_overhead_limit() { return gen_out_of_memory_error(_out_of_memory_error_gc_overhead_limit); }
duke@435 325
duke@435 326 // Accessors needed for fast allocation
coleenp@4037 327 static Klass** boolArrayKlassObj_addr() { return &_boolArrayKlassObj; }
coleenp@4037 328 static Klass** byteArrayKlassObj_addr() { return &_byteArrayKlassObj; }
coleenp@4037 329 static Klass** charArrayKlassObj_addr() { return &_charArrayKlassObj; }
coleenp@4037 330 static Klass** intArrayKlassObj_addr() { return &_intArrayKlassObj; }
coleenp@4037 331 static Klass** shortArrayKlassObj_addr() { return &_shortArrayKlassObj; }
coleenp@4037 332 static Klass** longArrayKlassObj_addr() { return &_longArrayKlassObj; }
coleenp@4037 333 static Klass** singleArrayKlassObj_addr() { return &_singleArrayKlassObj; }
coleenp@4037 334 static Klass** doubleArrayKlassObj_addr() { return &_doubleArrayKlassObj; }
coleenp@4037 335 static Klass** objectArrayKlassObj_addr() { return &_objectArrayKlassObj; }
duke@435 336
duke@435 337 // The particular choice of collected heap.
duke@435 338 static CollectedHeap* heap() { return _collectedHeap; }
duke@435 339
coleenp@548 340 // For UseCompressedOops
sla@5237 341 // Narrow Oop encoding mode:
sla@5237 342 // 0 - Use 32-bits oops without encoding when
sla@5237 343 // NarrowOopHeapBaseMin + heap_size < 4Gb
sla@5237 344 // 1 - Use zero based compressed oops with encoding when
sla@5237 345 // NarrowOopHeapBaseMin + heap_size < 32Gb
sla@5237 346 // 2 - Use compressed oops with heap base + encoding.
sla@5237 347 enum NARROW_OOP_MODE {
sla@5237 348 UnscaledNarrowOop = 0,
sla@5237 349 ZeroBasedNarrowOop = 1,
sla@5237 350 HeapBasedNarrowOop = 2
sla@5237 351 };
sla@5237 352 static NARROW_OOP_MODE narrow_oop_mode();
sla@5237 353 static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
stefank@5578 354 static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
sla@5237 355 static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
roland@4159 356 static address narrow_oop_base() { return _narrow_oop._base; }
roland@4159 357 static bool is_narrow_oop_base(void* addr) { return (narrow_oop_base() == (address)addr); }
roland@4159 358 static int narrow_oop_shift() { return _narrow_oop._shift; }
roland@4159 359 static bool narrow_oop_use_implicit_null_checks() { return _narrow_oop._use_implicit_null_checks; }
roland@4159 360
ehelin@5694 361 // For UseCompressedClassPointers
roland@4159 362 static address narrow_klass_base() { return _narrow_klass._base; }
roland@4159 363 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
roland@4159 364 static int narrow_klass_shift() { return _narrow_klass._shift; }
roland@4159 365 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
roland@4159 366
roland@4159 367 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
roland@4159 368 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
roland@4159 369 static address narrow_ptrs_base() { return _narrow_ptrs_base; }
coleenp@4037 370
coleenp@4037 371 // this is set in vm_version on sparc (and then reset in universe afaict)
roland@4159 372 static void set_narrow_oop_shift(int shift) {
roland@4159 373 _narrow_oop._shift = shift;
roland@4159 374 }
roland@4159 375
roland@4159 376 static void set_narrow_klass_shift(int shift) {
roland@4159 377 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs");
roland@4159 378 _narrow_klass._shift = shift;
roland@4159 379 }
coleenp@4037 380
coleenp@4037 381 // Reserve Java heap and determine CompressedOops mode
coleenp@4037 382 static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
coleenp@548 383
duke@435 384 // Historic gc information
duke@435 385 static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; }
duke@435 386 static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
duke@435 387 static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; }
duke@435 388 static void update_heap_info_at_gc();
duke@435 389
duke@435 390 // Testers
duke@435 391 static bool is_bootstrapping() { return _bootstrapping; }
duke@435 392 static bool is_fully_initialized() { return _fully_initialized; }
duke@435 393
duke@435 394 static inline bool element_type_should_be_aligned(BasicType type);
duke@435 395 static inline bool field_type_should_be_aligned(BasicType type);
duke@435 396 static bool on_page_boundary(void* addr);
duke@435 397 static bool should_fill_in_stack_trace(Handle throwable);
duke@435 398 static void check_alignment(uintx size, uintx alignment, const char* name);
duke@435 399
duke@435 400 // Finalizer support.
duke@435 401 static void run_finalizers_on_exit();
duke@435 402
duke@435 403 // Iteration
duke@435 404
duke@435 405 // Apply "f" to the addresses of all the direct heap pointers maintained
duke@435 406 // as static fields of "Universe".
duke@435 407 static void oops_do(OopClosure* f, bool do_all = false);
duke@435 408
coleenp@4037 409 // CDS support
coleenp@4037 410 static void serialize(SerializeClosure* f, bool do_all = false);
coleenp@4037 411
duke@435 412 // Apply "f" to all klasses for basic types (classes not present in
duke@435 413 // SystemDictionary).
coleenp@4037 414 static void basic_type_classes_do(void f(Klass*));
duke@435 415
duke@435 416 // For sharing -- fill in a list of known vtable pointers.
duke@435 417 static void init_self_patching_vtbl_list(void** list, int count);
duke@435 418
duke@435 419 // Debugging
duke@435 420 static bool verify_in_progress() { return _verify_in_progress; }
stefank@5018 421 static void verify(VerifyOption option, const char* prefix, bool silent = VerifySilently);
stefank@5018 422 static void verify(const char* prefix, bool silent = VerifySilently) {
stefank@5018 423 verify(VerifyOption_Default, prefix, silent);
johnc@4176 424 }
stefank@5018 425 static void verify(bool silent = VerifySilently) {
stefank@5018 426 verify("", silent);
johnc@4176 427 }
johnc@4176 428
tonyp@3269 429 static int verify_count() { return _verify_count; }
tonyp@3269 430 // The default behavior is to call print_on() on gclog_or_tty.
duke@435 431 static void print();
tonyp@3269 432 // The extended parameter determines which method on the heap will
tonyp@3269 433 // be called: print_on() (extended == false) or print_extended_on()
tonyp@3269 434 // (extended == true).
tonyp@3269 435 static void print_on(outputStream* st, bool extended = false);
duke@435 436 static void print_heap_at_SIGBREAK();
duke@435 437 static void print_heap_before_gc() { print_heap_before_gc(gclog_or_tty); }
duke@435 438 static void print_heap_after_gc() { print_heap_after_gc(gclog_or_tty); }
never@3571 439 static void print_heap_before_gc(outputStream* st, bool ignore_extended = false);
never@3571 440 static void print_heap_after_gc(outputStream* st, bool ignore_extended = false);
duke@435 441
duke@435 442 // Change the number of dummy objects kept reachable by the full gc dummy
duke@435 443 // array; this should trigger relocation in a sliding compaction collector.
duke@435 444 debug_only(static bool release_fullgc_alot_dummy();)
duke@435 445 // The non-oop pattern (see compiledIC.hpp, etc)
duke@435 446 static void* non_oop_word();
duke@435 447
duke@435 448 // Oop verification (see MacroAssembler::verify_oop)
duke@435 449 static uintptr_t verify_oop_mask() PRODUCT_RETURN0;
duke@435 450 static uintptr_t verify_oop_bits() PRODUCT_RETURN0;
duke@435 451 static uintptr_t verify_mark_bits() PRODUCT_RETURN0;
duke@435 452 static uintptr_t verify_mark_mask() PRODUCT_RETURN0;
duke@435 453
duke@435 454 // Flushing and deoptimization
duke@435 455 static void flush_dependents_on(instanceKlassHandle dependee);
twisti@3050 456 static void flush_dependents_on(Handle call_site, Handle method_handle);
duke@435 457 #ifdef HOTSWAP
duke@435 458 // Flushing and deoptimization in case of evolution
duke@435 459 static void flush_evol_dependents_on(instanceKlassHandle dependee);
duke@435 460 #endif // HOTSWAP
duke@435 461 // Support for fullspeed debugging
duke@435 462 static void flush_dependents_on_method(methodHandle dependee);
duke@435 463
duke@435 464 // Compiler support
duke@435 465 static int base_vtable_size() { return _base_vtable_size; }
duke@435 466 };
duke@435 467
zgu@3900 468 class DeferredObjAllocEvent : public CHeapObj<mtInternal> {
duke@435 469 private:
duke@435 470 oop _oop;
duke@435 471 size_t _bytesize;
duke@435 472 jint _arena_id;
duke@435 473
duke@435 474 public:
duke@435 475 DeferredObjAllocEvent(const oop o, const size_t s, const jint id) {
duke@435 476 _oop = o;
duke@435 477 _bytesize = s;
duke@435 478 _arena_id = id;
duke@435 479 }
duke@435 480
duke@435 481 ~DeferredObjAllocEvent() {
duke@435 482 }
duke@435 483
duke@435 484 jint arena_id() { return _arena_id; }
duke@435 485 size_t bytesize() { return _bytesize; }
duke@435 486 oop get_oop() { return _oop; }
duke@435 487 };
stefank@2314 488
stefank@2314 489 #endif // SHARE_VM_MEMORY_UNIVERSE_HPP

mercurial