src/share/vm/gc_implementation/g1/g1CollectedHeap.inline.hpp

Thu, 07 Apr 2011 09:53:20 -0700

author
johnc
date
Thu, 07 Apr 2011 09:53:20 -0700
changeset 2781
e1162778c1c8
parent 2715
abdfc822206f
child 2963
c3f1170908be
permissions
-rw-r--r--

7009266: G1: assert(obj->is_oop_or_null(true )) failed: Error
Summary: A referent object that is only weakly reachable at the start of concurrent marking but is re-attached to the strongly reachable object graph during marking may not be marked as live. This can cause the reference object to be processed prematurely and leave dangling pointers to the referent object. Implement a read barrier for the java.lang.ref.Reference::referent field by intrinsifying the Reference.get() method, and intercepting accesses though JNI, reflection, and Unsafe, so that when a non-null referent object is read it is also logged in an SATB buffer.
Reviewed-by: kvn, iveresov, never, tonyp, dholmes

ysr@777 1 /*
tonyp@2454 2 * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved.
ysr@777 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
ysr@777 4 *
ysr@777 5 * This code is free software; you can redistribute it and/or modify it
ysr@777 6 * under the terms of the GNU General Public License version 2 only, as
ysr@777 7 * published by the Free Software Foundation.
ysr@777 8 *
ysr@777 9 * This code is distributed in the hope that it will be useful, but WITHOUT
ysr@777 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
ysr@777 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
ysr@777 12 * version 2 for more details (a copy is included in the LICENSE file that
ysr@777 13 * accompanied this code).
ysr@777 14 *
ysr@777 15 * You should have received a copy of the GNU General Public License version
ysr@777 16 * 2 along with this work; if not, write to the Free Software Foundation,
ysr@777 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
ysr@777 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
ysr@777 22 *
ysr@777 23 */
ysr@777 24
stefank@2314 25 #ifndef SHARE_VM_GC_IMPLEMENTATION_G1_G1COLLECTEDHEAP_INLINE_HPP
stefank@2314 26 #define SHARE_VM_GC_IMPLEMENTATION_G1_G1COLLECTEDHEAP_INLINE_HPP
stefank@2314 27
stefank@2314 28 #include "gc_implementation/g1/concurrentMark.hpp"
stefank@2314 29 #include "gc_implementation/g1/g1CollectedHeap.hpp"
tonyp@2715 30 #include "gc_implementation/g1/g1AllocRegion.inline.hpp"
tonyp@2315 31 #include "gc_implementation/g1/g1CollectorPolicy.hpp"
tonyp@2469 32 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
stefank@2314 33 #include "utilities/taskqueue.hpp"
stefank@2314 34
ysr@777 35 // Inline functions for G1CollectedHeap
ysr@777 36
ysr@777 37 inline HeapRegion*
ysr@777 38 G1CollectedHeap::heap_region_containing(const void* addr) const {
ysr@777 39 HeapRegion* hr = _hrs->addr_to_region(addr);
ysr@777 40 // hr can be null if addr in perm_gen
ysr@777 41 if (hr != NULL && hr->continuesHumongous()) {
ysr@777 42 hr = hr->humongous_start_region();
ysr@777 43 }
ysr@777 44 return hr;
ysr@777 45 }
ysr@777 46
ysr@777 47 inline HeapRegion*
ysr@777 48 G1CollectedHeap::heap_region_containing_raw(const void* addr) const {
tonyp@961 49 assert(_g1_reserved.contains(addr), "invariant");
johnc@1187 50 size_t index = pointer_delta(addr, _g1_reserved.start(), 1)
johnc@1187 51 >> HeapRegion::LogOfHRGrainBytes;
johnc@1187 52
tonyp@961 53 HeapRegion* res = _hrs->at(index);
tonyp@961 54 assert(res == _hrs->addr_to_region(addr), "sanity");
ysr@777 55 return res;
ysr@777 56 }
ysr@777 57
ysr@777 58 inline bool G1CollectedHeap::obj_in_cs(oop obj) {
ysr@777 59 HeapRegion* r = _hrs->addr_to_region(obj);
ysr@777 60 return r != NULL && r->in_collection_set();
ysr@777 61 }
ysr@777 62
tonyp@2315 63 inline HeapWord*
tonyp@2715 64 G1CollectedHeap::attempt_allocation(size_t word_size,
tonyp@2715 65 unsigned int* gc_count_before_ret) {
tonyp@2715 66 assert_heap_not_locked_and_not_at_safepoint();
tonyp@2715 67 assert(!isHumongous(word_size), "attempt_allocation() should not "
tonyp@2715 68 "be called for humongous allocation requests");
ysr@777 69
tonyp@2715 70 HeapWord* result = _mutator_alloc_region.attempt_allocation(word_size,
tonyp@2715 71 false /* bot_updates */);
tonyp@2715 72 if (result == NULL) {
tonyp@2715 73 result = attempt_allocation_slow(word_size, gc_count_before_ret);
tonyp@2715 74 }
tonyp@2715 75 assert_heap_not_locked();
tonyp@2315 76 if (result != NULL) {
tonyp@2315 77 dirty_young_block(result, word_size);
tonyp@2315 78 }
tonyp@2715 79 return result;
tonyp@2454 80 }
tonyp@2454 81
tonyp@2315 82 // It dirties the cards that cover the block so that so that the post
tonyp@2315 83 // write barrier never queues anything when updating objects on this
tonyp@2315 84 // block. It is assumed (and in fact we assert) that the block
tonyp@2315 85 // belongs to a young region.
tonyp@2315 86 inline void
tonyp@2315 87 G1CollectedHeap::dirty_young_block(HeapWord* start, size_t word_size) {
tonyp@2315 88 assert_heap_not_locked();
tonyp@2315 89
tonyp@2315 90 // Assign the containing region to containing_hr so that we don't
tonyp@2315 91 // have to keep calling heap_region_containing_raw() in the
tonyp@2315 92 // asserts below.
tonyp@2315 93 DEBUG_ONLY(HeapRegion* containing_hr = heap_region_containing_raw(start);)
tonyp@2315 94 assert(containing_hr != NULL && start != NULL && word_size > 0,
tonyp@2315 95 "pre-condition");
tonyp@2315 96 assert(containing_hr->is_in(start), "it should contain start");
tonyp@2315 97 assert(containing_hr->is_young(), "it should be young");
tonyp@2315 98 assert(!containing_hr->isHumongous(), "it should not be humongous");
tonyp@2315 99
tonyp@2315 100 HeapWord* end = start + word_size;
tonyp@2315 101 assert(containing_hr->is_in(end - 1), "it should also contain end - 1");
tonyp@2315 102
tonyp@2315 103 MemRegion mr(start, end);
tonyp@2315 104 ((CardTableModRefBS*)_g1h->barrier_set())->dirty(mr);
ysr@777 105 }
ysr@777 106
jcoomes@2064 107 inline RefToScanQueue* G1CollectedHeap::task_queue(int i) const {
ysr@777 108 return _task_queues->queue(i);
ysr@777 109 }
ysr@777 110
ysr@777 111 inline bool G1CollectedHeap::isMarkedPrev(oop obj) const {
ysr@777 112 return _cm->prevMarkBitMap()->isMarked((HeapWord *)obj);
ysr@777 113 }
ysr@777 114
ysr@777 115 inline bool G1CollectedHeap::isMarkedNext(oop obj) const {
ysr@777 116 return _cm->nextMarkBitMap()->isMarked((HeapWord *)obj);
ysr@777 117 }
stefank@2314 118
stefank@2314 119 #endif // SHARE_VM_GC_IMPLEMENTATION_G1_G1COLLECTEDHEAP_INLINE_HPP

mercurial