413 // collection or expansion activity. |
413 // collection or expansion activity. |
414 virtual size_t unsafe_max_tlab_alloc(Thread *thr) const { |
414 virtual size_t unsafe_max_tlab_alloc(Thread *thr) const { |
415 guarantee(false, "thread-local allocation buffers not supported"); |
415 guarantee(false, "thread-local allocation buffers not supported"); |
416 return 0; |
416 return 0; |
417 } |
417 } |
|
418 |
418 // Can a compiler initialize a new object without store barriers? |
419 // Can a compiler initialize a new object without store barriers? |
419 // This permission only extends from the creation of a new object |
420 // This permission only extends from the creation of a new object |
420 // via a TLAB up to the first subsequent safepoint. |
421 // via a TLAB up to the first subsequent safepoint. If such permission |
|
422 // is granted for this heap type, the compiler promises to call |
|
423 // defer_store_barrier() below on any slow path allocation of |
|
424 // a new object for which such initializing store barriers will |
|
425 // have been elided. |
421 virtual bool can_elide_tlab_store_barriers() const = 0; |
426 virtual bool can_elide_tlab_store_barriers() const = 0; |
422 |
427 |
423 // If a compiler is eliding store barriers for TLAB-allocated objects, |
428 // If a compiler is eliding store barriers for TLAB-allocated objects, |
424 // there is probably a corresponding slow path which can produce |
429 // there is probably a corresponding slow path which can produce |
425 // an object allocated anywhere. The compiler's runtime support |
430 // an object allocated anywhere. The compiler's runtime support |
426 // promises to call this function on such a slow-path-allocated |
431 // promises to call this function on such a slow-path-allocated |
427 // object before performing initializations that have elided |
432 // object before performing initializations that have elided |
428 // store barriers. Returns new_obj, or maybe a safer copy thereof. |
433 // store barriers. Returns new_obj, or maybe a safer copy thereof. |
429 virtual oop new_store_barrier(oop new_obj); |
434 virtual oop defer_store_barrier(JavaThread* thread, oop new_obj); |
|
435 |
|
436 // Answers whether an initializing store to a new object currently |
|
437 // allocated at the given address doesn't need a (deferred) store |
|
438 // barrier. Returns "true" if it doesn't need an initializing |
|
439 // store barrier; answers "false" if it does. |
|
440 virtual bool can_elide_initializing_store_barrier(oop new_obj) = 0; |
|
441 |
|
442 // If the CollectedHeap was asked to defer a store barrier above, |
|
443 // this informs it to flush such a deferred store barrier to the |
|
444 // remembered set. |
|
445 virtual void flush_deferred_store_barrier(JavaThread* thread); |
430 |
446 |
431 // Can a compiler elide a store barrier when it writes |
447 // Can a compiler elide a store barrier when it writes |
432 // a permanent oop into the heap? Applies when the compiler |
448 // a permanent oop into the heap? Applies when the compiler |
433 // is storing x to the heap, where x->is_perm() is true. |
449 // is storing x to the heap, where x->is_perm() is true. |
434 virtual bool can_elide_permanent_oop_store_barriers() const = 0; |
450 virtual bool can_elide_permanent_oop_store_barriers() const = 0; |