Thu, 22 Jul 2010 10:27:41 -0400
6962589: remove breadth first scanning code from parallel gc
Summary: Remove the breadth-first copying order from ParallelScavenge and use depth-first by default.
Reviewed-by: jcoomes, ysr, johnc
1 /*
2 * Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 # include "incls/_precompiled.incl"
26 # include "incls/_constMethodKlass.cpp.incl"
29 klassOop constMethodKlass::create_klass(TRAPS) {
30 constMethodKlass o;
31 KlassHandle h_this_klass(THREAD, Universe::klassKlassObj());
32 KlassHandle k = base_create_klass(h_this_klass, header_size(),
33 o.vtbl_value(), CHECK_NULL);
34 // Make sure size calculation is right
35 assert(k()->size() == align_object_size(header_size()),
36 "wrong size for object");
37 //java_lang_Class::create_mirror(k, CHECK_NULL); // Allocate mirror
38 return k();
39 }
42 int constMethodKlass::oop_size(oop obj) const {
43 assert(obj->is_constMethod(), "must be constMethod oop");
44 return constMethodOop(obj)->object_size();
45 }
47 bool constMethodKlass::oop_is_parsable(oop obj) const {
48 assert(obj->is_constMethod(), "must be constMethod oop");
49 return constMethodOop(obj)->object_is_parsable();
50 }
52 bool constMethodKlass::oop_is_conc_safe(oop obj) const {
53 assert(obj->is_constMethod(), "must be constMethod oop");
54 return constMethodOop(obj)->is_conc_safe();
55 }
57 constMethodOop constMethodKlass::allocate(int byte_code_size,
58 int compressed_line_number_size,
59 int localvariable_table_length,
60 int checked_exceptions_length,
61 bool is_conc_safe,
62 TRAPS) {
64 int size = constMethodOopDesc::object_size(byte_code_size,
65 compressed_line_number_size,
66 localvariable_table_length,
67 checked_exceptions_length);
68 KlassHandle h_k(THREAD, as_klassOop());
69 constMethodOop cm = (constMethodOop)
70 CollectedHeap::permanent_obj_allocate(h_k, size, CHECK_NULL);
71 assert(!cm->is_parsable(), "Not yet safely parsable");
72 No_Safepoint_Verifier no_safepoint;
73 cm->set_interpreter_kind(Interpreter::invalid);
74 cm->init_fingerprint();
75 cm->set_method(NULL);
76 cm->set_stackmap_data(NULL);
77 cm->set_exception_table(NULL);
78 cm->set_code_size(byte_code_size);
79 cm->set_constMethod_size(size);
80 cm->set_inlined_tables_length(checked_exceptions_length,
81 compressed_line_number_size,
82 localvariable_table_length);
83 assert(cm->size() == size, "wrong size for object");
84 cm->set_is_conc_safe(is_conc_safe);
85 cm->set_partially_loaded();
86 assert(cm->is_parsable(), "Is safely parsable by gc");
87 return cm;
88 }
90 void constMethodKlass::oop_follow_contents(oop obj) {
91 assert (obj->is_constMethod(), "object must be constMethod");
92 constMethodOop cm = constMethodOop(obj);
93 MarkSweep::mark_and_push(cm->adr_method());
94 MarkSweep::mark_and_push(cm->adr_stackmap_data());
95 MarkSweep::mark_and_push(cm->adr_exception_table());
96 // Performance tweak: We skip iterating over the klass pointer since we
97 // know that Universe::constMethodKlassObj never moves.
98 }
100 #ifndef SERIALGC
101 void constMethodKlass::oop_follow_contents(ParCompactionManager* cm,
102 oop obj) {
103 assert (obj->is_constMethod(), "object must be constMethod");
104 constMethodOop cm_oop = constMethodOop(obj);
105 PSParallelCompact::mark_and_push(cm, cm_oop->adr_method());
106 PSParallelCompact::mark_and_push(cm, cm_oop->adr_stackmap_data());
107 PSParallelCompact::mark_and_push(cm, cm_oop->adr_exception_table());
108 // Performance tweak: We skip iterating over the klass pointer since we
109 // know that Universe::constMethodKlassObj never moves.
110 }
111 #endif // SERIALGC
113 int constMethodKlass::oop_oop_iterate(oop obj, OopClosure* blk) {
114 assert (obj->is_constMethod(), "object must be constMethod");
115 constMethodOop cm = constMethodOop(obj);
116 blk->do_oop(cm->adr_method());
117 blk->do_oop(cm->adr_stackmap_data());
118 blk->do_oop(cm->adr_exception_table());
119 // Get size before changing pointers.
120 // Don't call size() or oop_size() since that is a virtual call.
121 int size = cm->object_size();
122 return size;
123 }
126 int constMethodKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) {
127 assert (obj->is_constMethod(), "object must be constMethod");
128 constMethodOop cm = constMethodOop(obj);
129 oop* adr;
130 adr = cm->adr_method();
131 if (mr.contains(adr)) blk->do_oop(adr);
132 adr = cm->adr_stackmap_data();
133 if (mr.contains(adr)) blk->do_oop(adr);
134 adr = cm->adr_exception_table();
135 if (mr.contains(adr)) blk->do_oop(adr);
136 // Get size before changing pointers.
137 // Don't call size() or oop_size() since that is a virtual call.
138 int size = cm->object_size();
139 // Performance tweak: We skip iterating over the klass pointer since we
140 // know that Universe::constMethodKlassObj never moves.
141 return size;
142 }
145 int constMethodKlass::oop_adjust_pointers(oop obj) {
146 assert(obj->is_constMethod(), "should be constMethod");
147 constMethodOop cm = constMethodOop(obj);
148 MarkSweep::adjust_pointer(cm->adr_method());
149 MarkSweep::adjust_pointer(cm->adr_stackmap_data());
150 MarkSweep::adjust_pointer(cm->adr_exception_table());
151 // Get size before changing pointers.
152 // Don't call size() or oop_size() since that is a virtual call.
153 int size = cm->object_size();
154 // Performance tweak: We skip iterating over the klass pointer since we
155 // know that Universe::constMethodKlassObj never moves.
156 return size;
157 }
159 #ifndef SERIALGC
160 void constMethodKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
161 assert(obj->is_constMethod(), "should be constMethod");
162 }
164 int constMethodKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
165 assert(obj->is_constMethod(), "should be constMethod");
166 constMethodOop cm_oop = constMethodOop(obj);
167 #if 0
168 PSParallelCompact::adjust_pointer(cm_oop->adr_method());
169 PSParallelCompact::adjust_pointer(cm_oop->adr_exception_table());
170 PSParallelCompact::adjust_pointer(cm_oop->adr_stackmap_data());
171 #endif
172 oop* const beg_oop = cm_oop->oop_block_beg();
173 oop* const end_oop = cm_oop->oop_block_end();
174 for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) {
175 PSParallelCompact::adjust_pointer(cur_oop);
176 }
177 return cm_oop->object_size();
178 }
180 int constMethodKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
181 HeapWord* beg_addr,
182 HeapWord* end_addr) {
183 assert(obj->is_constMethod(), "should be constMethod");
184 constMethodOop cm_oop = constMethodOop(obj);
186 oop* const beg_oop = MAX2((oop*)beg_addr, cm_oop->oop_block_beg());
187 oop* const end_oop = MIN2((oop*)end_addr, cm_oop->oop_block_end());
188 for (oop* cur_oop = beg_oop; cur_oop < end_oop; ++cur_oop) {
189 PSParallelCompact::adjust_pointer(cur_oop);
190 }
192 return cm_oop->object_size();
193 }
194 #endif // SERIALGC
196 #ifndef PRODUCT
198 // Printing
200 void constMethodKlass::oop_print_on(oop obj, outputStream* st) {
201 ResourceMark rm;
202 assert(obj->is_constMethod(), "must be constMethod");
203 Klass::oop_print_on(obj, st);
204 constMethodOop m = constMethodOop(obj);
205 st->print(" - method: " INTPTR_FORMAT " ", (address)m->method());
206 m->method()->print_value_on(st); st->cr();
207 st->print(" - exceptions: " INTPTR_FORMAT "\n", (address)m->exception_table());
208 if (m->has_stackmap_table()) {
209 st->print(" - stackmap data: ");
210 m->stackmap_data()->print_value_on(st);
211 st->cr();
212 }
213 }
215 #endif //PRODUCT
217 // Short version of printing constMethodOop - just print the name of the
218 // method it belongs to.
219 void constMethodKlass::oop_print_value_on(oop obj, outputStream* st) {
220 assert(obj->is_constMethod(), "must be constMethod");
221 constMethodOop m = constMethodOop(obj);
222 st->print(" const part of method " );
223 m->method()->print_value_on(st);
224 }
226 const char* constMethodKlass::internal_name() const {
227 return "{constMethod}";
228 }
231 // Verification
233 void constMethodKlass::oop_verify_on(oop obj, outputStream* st) {
234 Klass::oop_verify_on(obj, st);
235 guarantee(obj->is_constMethod(), "object must be constMethod");
236 constMethodOop m = constMethodOop(obj);
237 guarantee(m->is_perm(), "should be in permspace");
239 // Verification can occur during oop construction before the method or
240 // other fields have been initialized.
241 if (!obj->partially_loaded()) {
242 guarantee(m->method()->is_perm(), "should be in permspace");
243 guarantee(m->method()->is_method(), "should be method");
244 typeArrayOop stackmap_data = m->stackmap_data();
245 guarantee(stackmap_data == NULL ||
246 stackmap_data->is_perm(), "should be in permspace");
247 guarantee(m->exception_table()->is_perm(), "should be in permspace");
248 guarantee(m->exception_table()->is_typeArray(), "should be type array");
250 address m_end = (address)((oop*) m + m->size());
251 address compressed_table_start = m->code_end();
252 guarantee(compressed_table_start <= m_end, "invalid method layout");
253 address compressed_table_end = compressed_table_start;
254 // Verify line number table
255 if (m->has_linenumber_table()) {
256 CompressedLineNumberReadStream stream(m->compressed_linenumber_table());
257 while (stream.read_pair()) {
258 guarantee(stream.bci() >= 0 && stream.bci() <= m->code_size(), "invalid bci in line number table");
259 }
260 compressed_table_end += stream.position();
261 }
262 guarantee(compressed_table_end <= m_end, "invalid method layout");
263 // Verify checked exceptions and local variable tables
264 if (m->has_checked_exceptions()) {
265 u2* addr = m->checked_exceptions_length_addr();
266 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout");
267 }
268 if (m->has_localvariable_table()) {
269 u2* addr = m->localvariable_table_length_addr();
270 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout");
271 }
272 // Check compressed_table_end relative to uncompressed_table_start
273 u2* uncompressed_table_start;
274 if (m->has_localvariable_table()) {
275 uncompressed_table_start = (u2*) m->localvariable_table_start();
276 } else {
277 if (m->has_checked_exceptions()) {
278 uncompressed_table_start = (u2*) m->checked_exceptions_start();
279 } else {
280 uncompressed_table_start = (u2*) m_end;
281 }
282 }
283 int gap = (intptr_t) uncompressed_table_start - (intptr_t) compressed_table_end;
284 int max_gap = align_object_size(1)*BytesPerWord;
285 guarantee(gap >= 0 && gap < max_gap, "invalid method layout");
286 }
287 }
289 bool constMethodKlass::oop_partially_loaded(oop obj) const {
290 assert(obj->is_constMethod(), "object must be klass");
291 constMethodOop m = constMethodOop(obj);
292 // check whether exception_table points to self (flag for partially loaded)
293 return m->exception_table() == (typeArrayOop)obj;
294 }
297 // The exception_table is the last field set when loading an object.
298 void constMethodKlass::oop_set_partially_loaded(oop obj) {
299 assert(obj->is_constMethod(), "object must be klass");
300 constMethodOop m = constMethodOop(obj);
301 // Temporarily set exception_table to point to self
302 m->set_exception_table((typeArrayOop)obj);
303 }