Tue, 24 Sep 2013 15:56:25 +0200
7009641: Don't fail VM when CodeCache is full
Summary: Allocation in the code cache returns NULL instead of failing the entire VM
Reviewed-by: kvn, iveresov
1.1 --- a/src/cpu/sparc/vm/vtableStubs_sparc.cpp Fri Sep 20 11:17:04 2013 -0700 1.2 +++ b/src/cpu/sparc/vm/vtableStubs_sparc.cpp Tue Sep 24 15:56:25 2013 +0200 1.3 @@ -52,6 +52,11 @@ 1.4 VtableStub* VtableStubs::create_vtable_stub(int vtable_index) { 1.5 const int sparc_code_length = VtableStub::pd_code_size_limit(true); 1.6 VtableStub* s = new(sparc_code_length) VtableStub(true, vtable_index); 1.7 + // Can be NULL if there is no free space in the code cache. 1.8 + if (s == NULL) { 1.9 + return NULL; 1.10 + } 1.11 + 1.12 ResourceMark rm; 1.13 CodeBuffer cb(s->entry_point(), sparc_code_length); 1.14 MacroAssembler* masm = new MacroAssembler(&cb); 1.15 @@ -125,6 +130,11 @@ 1.16 VtableStub* VtableStubs::create_itable_stub(int itable_index) { 1.17 const int sparc_code_length = VtableStub::pd_code_size_limit(false); 1.18 VtableStub* s = new(sparc_code_length) VtableStub(false, itable_index); 1.19 + // Can be NULL if there is no free space in the code cache. 1.20 + if (s == NULL) { 1.21 + return NULL; 1.22 + } 1.23 + 1.24 ResourceMark rm; 1.25 CodeBuffer cb(s->entry_point(), sparc_code_length); 1.26 MacroAssembler* masm = new MacroAssembler(&cb);
2.1 --- a/src/cpu/x86/vm/vtableStubs_x86_32.cpp Fri Sep 20 11:17:04 2013 -0700 2.2 +++ b/src/cpu/x86/vm/vtableStubs_x86_32.cpp Tue Sep 24 15:56:25 2013 +0200 2.3 @@ -58,6 +58,11 @@ 2.4 VtableStub* VtableStubs::create_vtable_stub(int vtable_index) { 2.5 const int i486_code_length = VtableStub::pd_code_size_limit(true); 2.6 VtableStub* s = new(i486_code_length) VtableStub(true, vtable_index); 2.7 + // Can be NULL if there is no free space in the code cache. 2.8 + if (s == NULL) { 2.9 + return NULL; 2.10 + } 2.11 + 2.12 ResourceMark rm; 2.13 CodeBuffer cb(s->entry_point(), i486_code_length); 2.14 MacroAssembler* masm = new MacroAssembler(&cb); 2.15 @@ -132,6 +137,11 @@ 2.16 // add code here, bump the code stub size returned by pd_code_size_limit! 2.17 const int i486_code_length = VtableStub::pd_code_size_limit(false); 2.18 VtableStub* s = new(i486_code_length) VtableStub(false, itable_index); 2.19 + // Can be NULL if there is no free space in the code cache. 2.20 + if (s == NULL) { 2.21 + return NULL; 2.22 + } 2.23 + 2.24 ResourceMark rm; 2.25 CodeBuffer cb(s->entry_point(), i486_code_length); 2.26 MacroAssembler* masm = new MacroAssembler(&cb);
3.1 --- a/src/cpu/x86/vm/vtableStubs_x86_64.cpp Fri Sep 20 11:17:04 2013 -0700 3.2 +++ b/src/cpu/x86/vm/vtableStubs_x86_64.cpp Tue Sep 24 15:56:25 2013 +0200 3.3 @@ -49,6 +49,11 @@ 3.4 VtableStub* VtableStubs::create_vtable_stub(int vtable_index) { 3.5 const int amd64_code_length = VtableStub::pd_code_size_limit(true); 3.6 VtableStub* s = new(amd64_code_length) VtableStub(true, vtable_index); 3.7 + // Can be NULL if there is no free space in the code cache. 3.8 + if (s == NULL) { 3.9 + return NULL; 3.10 + } 3.11 + 3.12 ResourceMark rm; 3.13 CodeBuffer cb(s->entry_point(), amd64_code_length); 3.14 MacroAssembler* masm = new MacroAssembler(&cb); 3.15 @@ -126,6 +131,11 @@ 3.16 // returned by pd_code_size_limit! 3.17 const int amd64_code_length = VtableStub::pd_code_size_limit(false); 3.18 VtableStub* s = new(amd64_code_length) VtableStub(false, itable_index); 3.19 + // Can be NULL if there is no free space in the code cache. 3.20 + if (s == NULL) { 3.21 + return NULL; 3.22 + } 3.23 + 3.24 ResourceMark rm; 3.25 CodeBuffer cb(s->entry_point(), amd64_code_length); 3.26 MacroAssembler* masm = new MacroAssembler(&cb);
4.1 --- a/src/share/vm/code/compiledIC.cpp Fri Sep 20 11:17:04 2013 -0700 4.2 +++ b/src/share/vm/code/compiledIC.cpp Tue Sep 24 15:56:25 2013 +0200 4.3 @@ -160,7 +160,7 @@ 4.4 // High-level access to an inline cache. Guaranteed to be MT-safe. 4.5 4.6 4.7 -void CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) { 4.8 +bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) { 4.9 assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 4.10 assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic"); 4.11 assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?"); 4.12 @@ -170,8 +170,10 @@ 4.13 assert(bytecode == Bytecodes::_invokeinterface, ""); 4.14 int itable_index = call_info->itable_index(); 4.15 entry = VtableStubs::find_itable_stub(itable_index); 4.16 + if (entry == false) { 4.17 + return false; 4.18 + } 4.19 #ifdef ASSERT 4.20 - assert(entry != NULL, "entry not computed"); 4.21 int index = call_info->resolved_method()->itable_index(); 4.22 assert(index == itable_index, "CallInfo pre-computes this"); 4.23 #endif //ASSERT 4.24 @@ -184,6 +186,9 @@ 4.25 int vtable_index = call_info->vtable_index(); 4.26 assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check"); 4.27 entry = VtableStubs::find_vtable_stub(vtable_index); 4.28 + if (entry == NULL) { 4.29 + return false; 4.30 + } 4.31 InlineCacheBuffer::create_transition_stub(this, NULL, entry); 4.32 } 4.33 4.34 @@ -200,6 +205,7 @@ 4.35 // race because the IC entry was complete when we safepointed so 4.36 // cleaning it immediately is harmless. 4.37 // assert(is_megamorphic(), "sanity check"); 4.38 + return true; 4.39 } 4.40 4.41
5.1 --- a/src/share/vm/code/compiledIC.hpp Fri Sep 20 11:17:04 2013 -0700 5.2 +++ b/src/share/vm/code/compiledIC.hpp Tue Sep 24 15:56:25 2013 +0200 5.3 @@ -226,7 +226,10 @@ 5.4 // 5.5 void set_to_clean(); // Can only be called during a safepoint operation 5.6 void set_to_monomorphic(CompiledICInfo& info); 5.7 - void set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS); 5.8 + 5.9 + // Returns true if successful and false otherwise. The call can fail if memory 5.10 + // allocation in the code cache fails. 5.11 + bool set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS); 5.12 5.13 static void compute_monomorphic_entry(methodHandle method, KlassHandle receiver_klass, 5.14 bool is_optimized, bool static_bound, CompiledICInfo& info, TRAPS);
6.1 --- a/src/share/vm/code/vtableStubs.cpp Fri Sep 20 11:17:04 2013 -0700 6.2 +++ b/src/share/vm/code/vtableStubs.cpp Tue Sep 24 15:56:25 2013 +0200 6.3 @@ -46,12 +46,9 @@ 6.4 address VtableStub::_chunk_end = NULL; 6.5 VMReg VtableStub::_receiver_location = VMRegImpl::Bad(); 6.6 6.7 -static int num_vtable_chunks = 0; 6.8 - 6.9 6.10 void* VtableStub::operator new(size_t size, int code_size) throw() { 6.11 assert(size == sizeof(VtableStub), "mismatched size"); 6.12 - num_vtable_chunks++; 6.13 // compute real VtableStub size (rounded to nearest word) 6.14 const int real_size = round_to(code_size + sizeof(VtableStub), wordSize); 6.15 // malloc them in chunks to minimize header overhead 6.16 @@ -60,7 +57,7 @@ 6.17 const int bytes = chunk_factor * real_size + pd_code_alignment(); 6.18 BufferBlob* blob = BufferBlob::create("vtable chunks", bytes); 6.19 if (blob == NULL) { 6.20 - vm_exit_out_of_memory(bytes, OOM_MALLOC_ERROR, "CodeCache: no room for vtable chunks"); 6.21 + return NULL; 6.22 } 6.23 _chunk = blob->content_begin(); 6.24 _chunk_end = _chunk + bytes; 6.25 @@ -121,6 +118,12 @@ 6.26 } else { 6.27 s = create_itable_stub(vtable_index); 6.28 } 6.29 + 6.30 + // Creation of vtable or itable can fail if there is not enough free space in the code cache. 6.31 + if (s == NULL) { 6.32 + return NULL; 6.33 + } 6.34 + 6.35 enter(is_vtable_stub, vtable_index, s); 6.36 if (PrintAdapterHandlers) { 6.37 tty->print_cr("Decoding VtableStub %s[%d]@%d",
7.1 --- a/src/share/vm/runtime/sharedRuntime.cpp Fri Sep 20 11:17:04 2013 -0700 7.2 +++ b/src/share/vm/runtime/sharedRuntime.cpp Tue Sep 24 15:56:25 2013 +0200 7.3 @@ -1506,8 +1506,11 @@ 7.4 info, CHECK_(methodHandle())); 7.5 inline_cache->set_to_monomorphic(info); 7.6 } else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) { 7.7 - // Change to megamorphic 7.8 - inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle())); 7.9 + // Potential change to megamorphic 7.10 + bool successful = inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle())); 7.11 + if (!successful) { 7.12 + inline_cache->set_to_clean(); 7.13 + } 7.14 } else { 7.15 // Either clean or megamorphic 7.16 }