src/share/vm/oops/constMethodKlass.cpp

changeset 3917
8150fa46d2ed
parent 3826
2fe087c3e814
equal deleted inserted replaced
3880:06320b1578cb 3917:8150fa46d2ed
63 } 63 }
64 64
65 constMethodOop constMethodKlass::allocate(int byte_code_size, 65 constMethodOop constMethodKlass::allocate(int byte_code_size,
66 int compressed_line_number_size, 66 int compressed_line_number_size,
67 int localvariable_table_length, 67 int localvariable_table_length,
68 int exception_table_length,
68 int checked_exceptions_length, 69 int checked_exceptions_length,
69 bool is_conc_safe, 70 bool is_conc_safe,
70 TRAPS) { 71 TRAPS) {
71 72
72 int size = constMethodOopDesc::object_size(byte_code_size, 73 int size = constMethodOopDesc::object_size(byte_code_size,
73 compressed_line_number_size, 74 compressed_line_number_size,
74 localvariable_table_length, 75 localvariable_table_length,
76 exception_table_length,
75 checked_exceptions_length); 77 checked_exceptions_length);
76 KlassHandle h_k(THREAD, as_klassOop()); 78 KlassHandle h_k(THREAD, as_klassOop());
77 constMethodOop cm = (constMethodOop) 79 constMethodOop cm = (constMethodOop)
78 CollectedHeap::permanent_obj_allocate(h_k, size, CHECK_NULL); 80 CollectedHeap::permanent_obj_allocate(h_k, size, CHECK_NULL);
79 assert(!cm->is_parsable(), "Not yet safely parsable"); 81 assert(!cm->is_parsable(), "Not yet safely parsable");
80 No_Safepoint_Verifier no_safepoint; 82 No_Safepoint_Verifier no_safepoint;
81 cm->set_interpreter_kind(Interpreter::invalid); 83 cm->set_interpreter_kind(Interpreter::invalid);
82 cm->init_fingerprint(); 84 cm->init_fingerprint();
83 cm->set_constants(NULL); 85 cm->set_constants(NULL);
84 cm->set_stackmap_data(NULL); 86 cm->set_stackmap_data(NULL);
85 cm->set_exception_table(NULL);
86 cm->set_code_size(byte_code_size); 87 cm->set_code_size(byte_code_size);
87 cm->set_constMethod_size(size); 88 cm->set_constMethod_size(size);
88 cm->set_inlined_tables_length(checked_exceptions_length, 89 cm->set_inlined_tables_length(checked_exceptions_length,
89 compressed_line_number_size, 90 compressed_line_number_size,
90 localvariable_table_length); 91 localvariable_table_length,
92 exception_table_length);
91 assert(cm->size() == size, "wrong size for object"); 93 assert(cm->size() == size, "wrong size for object");
92 cm->set_is_conc_safe(is_conc_safe); 94 cm->set_is_conc_safe(is_conc_safe);
93 cm->set_partially_loaded(); 95 cm->set_partially_loaded();
94 assert(cm->is_parsable(), "Is safely parsable by gc"); 96 assert(cm->is_parsable(), "Is safely parsable by gc");
95 return cm; 97 return cm;
98 void constMethodKlass::oop_follow_contents(oop obj) { 100 void constMethodKlass::oop_follow_contents(oop obj) {
99 assert (obj->is_constMethod(), "object must be constMethod"); 101 assert (obj->is_constMethod(), "object must be constMethod");
100 constMethodOop cm = constMethodOop(obj); 102 constMethodOop cm = constMethodOop(obj);
101 MarkSweep::mark_and_push(cm->adr_constants()); 103 MarkSweep::mark_and_push(cm->adr_constants());
102 MarkSweep::mark_and_push(cm->adr_stackmap_data()); 104 MarkSweep::mark_and_push(cm->adr_stackmap_data());
103 MarkSweep::mark_and_push(cm->adr_exception_table());
104 // Performance tweak: We skip iterating over the klass pointer since we 105 // Performance tweak: We skip iterating over the klass pointer since we
105 // know that Universe::constMethodKlassObj never moves. 106 // know that Universe::constMethodKlassObj never moves.
106 } 107 }
107 108
108 #ifndef SERIALGC 109 #ifndef SERIALGC
110 oop obj) { 111 oop obj) {
111 assert (obj->is_constMethod(), "object must be constMethod"); 112 assert (obj->is_constMethod(), "object must be constMethod");
112 constMethodOop cm_oop = constMethodOop(obj); 113 constMethodOop cm_oop = constMethodOop(obj);
113 PSParallelCompact::mark_and_push(cm, cm_oop->adr_constants()); 114 PSParallelCompact::mark_and_push(cm, cm_oop->adr_constants());
114 PSParallelCompact::mark_and_push(cm, cm_oop->adr_stackmap_data()); 115 PSParallelCompact::mark_and_push(cm, cm_oop->adr_stackmap_data());
115 PSParallelCompact::mark_and_push(cm, cm_oop->adr_exception_table());
116 // Performance tweak: We skip iterating over the klass pointer since we 116 // Performance tweak: We skip iterating over the klass pointer since we
117 // know that Universe::constMethodKlassObj never moves. 117 // know that Universe::constMethodKlassObj never moves.
118 } 118 }
119 #endif // SERIALGC 119 #endif // SERIALGC
120 120
121 int constMethodKlass::oop_oop_iterate(oop obj, OopClosure* blk) { 121 int constMethodKlass::oop_oop_iterate(oop obj, OopClosure* blk) {
122 assert (obj->is_constMethod(), "object must be constMethod"); 122 assert (obj->is_constMethod(), "object must be constMethod");
123 constMethodOop cm = constMethodOop(obj); 123 constMethodOop cm = constMethodOop(obj);
124 blk->do_oop(cm->adr_constants()); 124 blk->do_oop(cm->adr_constants());
125 blk->do_oop(cm->adr_stackmap_data()); 125 blk->do_oop(cm->adr_stackmap_data());
126 blk->do_oop(cm->adr_exception_table());
127 // Get size before changing pointers. 126 // Get size before changing pointers.
128 // Don't call size() or oop_size() since that is a virtual call. 127 // Don't call size() or oop_size() since that is a virtual call.
129 int size = cm->object_size(); 128 int size = cm->object_size();
130 return size; 129 return size;
131 } 130 }
137 oop* adr; 136 oop* adr;
138 adr = cm->adr_constants(); 137 adr = cm->adr_constants();
139 if (mr.contains(adr)) blk->do_oop(adr); 138 if (mr.contains(adr)) blk->do_oop(adr);
140 adr = cm->adr_stackmap_data(); 139 adr = cm->adr_stackmap_data();
141 if (mr.contains(adr)) blk->do_oop(adr); 140 if (mr.contains(adr)) blk->do_oop(adr);
142 adr = cm->adr_exception_table();
143 if (mr.contains(adr)) blk->do_oop(adr);
144 // Get size before changing pointers. 141 // Get size before changing pointers.
145 // Don't call size() or oop_size() since that is a virtual call. 142 // Don't call size() or oop_size() since that is a virtual call.
146 int size = cm->object_size(); 143 int size = cm->object_size();
147 // Performance tweak: We skip iterating over the klass pointer since we 144 // Performance tweak: We skip iterating over the klass pointer since we
148 // know that Universe::constMethodKlassObj never moves. 145 // know that Universe::constMethodKlassObj never moves.
153 int constMethodKlass::oop_adjust_pointers(oop obj) { 150 int constMethodKlass::oop_adjust_pointers(oop obj) {
154 assert(obj->is_constMethod(), "should be constMethod"); 151 assert(obj->is_constMethod(), "should be constMethod");
155 constMethodOop cm = constMethodOop(obj); 152 constMethodOop cm = constMethodOop(obj);
156 MarkSweep::adjust_pointer(cm->adr_constants()); 153 MarkSweep::adjust_pointer(cm->adr_constants());
157 MarkSweep::adjust_pointer(cm->adr_stackmap_data()); 154 MarkSweep::adjust_pointer(cm->adr_stackmap_data());
158 MarkSweep::adjust_pointer(cm->adr_exception_table());
159 // Get size before changing pointers. 155 // Get size before changing pointers.
160 // Don't call size() or oop_size() since that is a virtual call. 156 // Don't call size() or oop_size() since that is a virtual call.
161 int size = cm->object_size(); 157 int size = cm->object_size();
162 // Performance tweak: We skip iterating over the klass pointer since we 158 // Performance tweak: We skip iterating over the klass pointer since we
163 // know that Universe::constMethodKlassObj never moves. 159 // know that Universe::constMethodKlassObj never moves.
188 assert(obj->is_constMethod(), "must be constMethod"); 184 assert(obj->is_constMethod(), "must be constMethod");
189 Klass::oop_print_on(obj, st); 185 Klass::oop_print_on(obj, st);
190 constMethodOop m = constMethodOop(obj); 186 constMethodOop m = constMethodOop(obj);
191 st->print(" - constants: " INTPTR_FORMAT " ", (address)m->constants()); 187 st->print(" - constants: " INTPTR_FORMAT " ", (address)m->constants());
192 m->constants()->print_value_on(st); st->cr(); 188 m->constants()->print_value_on(st); st->cr();
193 st->print(" - exceptions: " INTPTR_FORMAT "\n", (address)m->exception_table());
194 if (m->has_stackmap_table()) { 189 if (m->has_stackmap_table()) {
195 st->print(" - stackmap data: "); 190 st->print(" - stackmap data: ");
196 m->stackmap_data()->print_value_on(st); 191 m->stackmap_data()->print_value_on(st);
197 st->cr(); 192 st->cr();
198 } 193 }
226 guarantee(m->constants()->is_perm(), "should be in permspace"); 221 guarantee(m->constants()->is_perm(), "should be in permspace");
227 guarantee(m->constants()->is_constantPool(), "should be constant pool"); 222 guarantee(m->constants()->is_constantPool(), "should be constant pool");
228 typeArrayOop stackmap_data = m->stackmap_data(); 223 typeArrayOop stackmap_data = m->stackmap_data();
229 guarantee(stackmap_data == NULL || 224 guarantee(stackmap_data == NULL ||
230 stackmap_data->is_perm(), "should be in permspace"); 225 stackmap_data->is_perm(), "should be in permspace");
231 guarantee(m->exception_table()->is_perm(), "should be in permspace");
232 guarantee(m->exception_table()->is_typeArray(), "should be type array");
233 226
234 address m_end = (address)((oop*) m + m->size()); 227 address m_end = (address)((oop*) m + m->size());
235 address compressed_table_start = m->code_end(); 228 address compressed_table_start = m->code_end();
236 guarantee(compressed_table_start <= m_end, "invalid method layout"); 229 guarantee(compressed_table_start <= m_end, "invalid method layout");
237 address compressed_table_end = compressed_table_start; 230 address compressed_table_end = compressed_table_start;
242 guarantee(stream.bci() >= 0 && stream.bci() <= m->code_size(), "invalid bci in line number table"); 235 guarantee(stream.bci() >= 0 && stream.bci() <= m->code_size(), "invalid bci in line number table");
243 } 236 }
244 compressed_table_end += stream.position(); 237 compressed_table_end += stream.position();
245 } 238 }
246 guarantee(compressed_table_end <= m_end, "invalid method layout"); 239 guarantee(compressed_table_end <= m_end, "invalid method layout");
247 // Verify checked exceptions and local variable tables 240 // Verify checked exceptions, exception table and local variable tables
248 if (m->has_checked_exceptions()) { 241 if (m->has_checked_exceptions()) {
249 u2* addr = m->checked_exceptions_length_addr(); 242 u2* addr = m->checked_exceptions_length_addr();
243 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout");
244 }
245 if (m->has_exception_handler()) {
246 u2* addr = m->exception_table_length_addr();
250 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout"); 247 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout");
251 } 248 }
252 if (m->has_localvariable_table()) { 249 if (m->has_localvariable_table()) {
253 u2* addr = m->localvariable_table_length_addr(); 250 u2* addr = m->localvariable_table_length_addr();
254 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout"); 251 guarantee(*addr > 0 && (address) addr >= compressed_table_end && (address) addr < m_end, "invalid method layout");
255 } 252 }
256 // Check compressed_table_end relative to uncompressed_table_start 253 // Check compressed_table_end relative to uncompressed_table_start
257 u2* uncompressed_table_start; 254 u2* uncompressed_table_start;
258 if (m->has_localvariable_table()) { 255 if (m->has_localvariable_table()) {
259 uncompressed_table_start = (u2*) m->localvariable_table_start(); 256 uncompressed_table_start = (u2*) m->localvariable_table_start();
257 } else if (m->has_exception_handler()) {
258 uncompressed_table_start = (u2*) m->exception_table_start();
259 } else if (m->has_checked_exceptions()) {
260 uncompressed_table_start = (u2*) m->checked_exceptions_start();
260 } else { 261 } else {
261 if (m->has_checked_exceptions()) {
262 uncompressed_table_start = (u2*) m->checked_exceptions_start();
263 } else {
264 uncompressed_table_start = (u2*) m_end; 262 uncompressed_table_start = (u2*) m_end;
265 }
266 } 263 }
267 int gap = (intptr_t) uncompressed_table_start - (intptr_t) compressed_table_end; 264 int gap = (intptr_t) uncompressed_table_start - (intptr_t) compressed_table_end;
268 int max_gap = align_object_size(1)*BytesPerWord; 265 int max_gap = align_object_size(1)*BytesPerWord;
269 guarantee(gap >= 0 && gap < max_gap, "invalid method layout"); 266 guarantee(gap >= 0 && gap < max_gap, "invalid method layout");
270 } 267 }
271 } 268 }
272 269
273 bool constMethodKlass::oop_partially_loaded(oop obj) const { 270 bool constMethodKlass::oop_partially_loaded(oop obj) const {
274 assert(obj->is_constMethod(), "object must be klass"); 271 assert(obj->is_constMethod(), "object must be klass");
275 constMethodOop m = constMethodOop(obj); 272 constMethodOop m = constMethodOop(obj);
276 // check whether exception_table points to self (flag for partially loaded) 273 // check whether stackmap_data points to self (flag for partially loaded)
277 return m->exception_table() == (typeArrayOop)obj; 274 return m->stackmap_data() == (typeArrayOop)obj;
278 } 275 }
279 276
280 277
281 // The exception_table is the last field set when loading an object. 278 // The exception_table is the last field set when loading an object.
282 void constMethodKlass::oop_set_partially_loaded(oop obj) { 279 void constMethodKlass::oop_set_partially_loaded(oop obj) {
283 assert(obj->is_constMethod(), "object must be klass"); 280 assert(obj->is_constMethod(), "object must be klass");
284 constMethodOop m = constMethodOop(obj); 281 constMethodOop m = constMethodOop(obj);
285 // Temporarily set exception_table to point to self 282 // Temporarily set stackmap_data to point to self
286 m->set_exception_table((typeArrayOop)obj); 283 m->set_stackmap_data((typeArrayOop)obj);
287 } 284 }

mercurial