duke@435: /* coleenp@4037: * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. duke@435: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. duke@435: * duke@435: * This code is free software; you can redistribute it and/or modify it duke@435: * under the terms of the GNU General Public License version 2 only, as duke@435: * published by the Free Software Foundation. duke@435: * duke@435: * This code is distributed in the hope that it will be useful, but WITHOUT duke@435: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or duke@435: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License duke@435: * version 2 for more details (a copy is included in the LICENSE file that duke@435: * accompanied this code). duke@435: * duke@435: * You should have received a copy of the GNU General Public License version duke@435: * 2 along with this work; if not, write to the Free Software Foundation, duke@435: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. duke@435: * trims@1907: * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA trims@1907: * or visit www.oracle.com if you need additional information or have any trims@1907: * questions. duke@435: * duke@435: */ duke@435: stefank@2314: #include "precompiled.hpp" stefank@2314: #include "code/codeCache.hpp" stefank@2314: #include "code/debugInfoRec.hpp" stefank@2314: #include "code/nmethod.hpp" stefank@2314: #include "code/pcDesc.hpp" stefank@2314: #include "code/scopeDesc.hpp" stefank@2314: #include "interpreter/interpreter.hpp" stefank@2314: #include "interpreter/oopMapCache.hpp" stefank@2314: #include "oops/instanceKlass.hpp" stefank@2314: #include "oops/oop.inline.hpp" stefank@2314: #include "runtime/basicLock.hpp" stefank@2314: #include "runtime/handles.inline.hpp" stefank@2314: #include "runtime/monitorChunk.hpp" stefank@2314: #include "runtime/signature.hpp" stefank@2314: #include "runtime/stubRoutines.hpp" stefank@2314: #include "runtime/vframeArray.hpp" stefank@2314: #include "runtime/vframe_hp.hpp" stefank@2314: #ifdef COMPILER2 stefank@2314: #include "opto/matcher.hpp" stefank@2314: #endif duke@435: duke@435: duke@435: // ------------- compiledVFrame -------------- duke@435: duke@435: StackValueCollection* compiledVFrame::locals() const { duke@435: // Natives has no scope duke@435: if (scope() == NULL) return new StackValueCollection(0); duke@435: GrowableArray* scv_list = scope()->locals(); duke@435: if (scv_list == NULL) return new StackValueCollection(0); duke@435: duke@435: // scv_list is the list of ScopeValues describing the JVM stack state. duke@435: // There is one scv_list entry for every JVM stack state in use. duke@435: int length = scv_list->length(); duke@435: StackValueCollection* result = new StackValueCollection(length); duke@435: // In rare instances set_locals may have occurred in which case duke@435: // there are local values that are not described by the ScopeValue anymore duke@435: GrowableArray* deferred = NULL; duke@435: GrowableArray* list = thread()->deferred_locals(); duke@435: if (list != NULL ) { duke@435: // In real life this never happens or is typically a single element search duke@435: for (int i = 0; i < list->length(); i++) { duke@435: if (list->at(i)->matches((vframe*)this)) { duke@435: deferred = list->at(i)->locals(); duke@435: break; duke@435: } duke@435: } duke@435: } duke@435: duke@435: for( int i = 0; i < length; i++ ) { duke@435: result->add( create_stack_value(scv_list->at(i)) ); duke@435: } duke@435: duke@435: // Replace specified locals with any deferred writes that are present duke@435: if (deferred != NULL) { duke@435: for ( int l = 0; l < deferred->length() ; l ++) { duke@435: jvmtiDeferredLocalVariable* val = deferred->at(l); duke@435: switch (val->type()) { duke@435: case T_BOOLEAN: duke@435: result->set_int_at(val->index(), val->value().z); duke@435: break; duke@435: case T_CHAR: duke@435: result->set_int_at(val->index(), val->value().c); duke@435: break; duke@435: case T_FLOAT: duke@435: result->set_float_at(val->index(), val->value().f); duke@435: break; duke@435: case T_DOUBLE: duke@435: result->set_double_at(val->index(), val->value().d); duke@435: break; duke@435: case T_BYTE: duke@435: result->set_int_at(val->index(), val->value().b); duke@435: break; duke@435: case T_SHORT: duke@435: result->set_int_at(val->index(), val->value().s); duke@435: break; duke@435: case T_INT: duke@435: result->set_int_at(val->index(), val->value().i); duke@435: break; duke@435: case T_LONG: duke@435: result->set_long_at(val->index(), val->value().j); duke@435: break; duke@435: case T_OBJECT: duke@435: { duke@435: Handle obj((oop)val->value().l); duke@435: result->set_obj_at(val->index(), obj); duke@435: } duke@435: break; duke@435: default: duke@435: ShouldNotReachHere(); duke@435: } duke@435: } duke@435: } duke@435: duke@435: return result; duke@435: } duke@435: duke@435: duke@435: void compiledVFrame::set_locals(StackValueCollection* values) const { duke@435: duke@435: fatal("Should use update_local for each local update"); duke@435: } duke@435: duke@435: void compiledVFrame::update_local(BasicType type, int index, jvalue value) { duke@435: duke@435: #ifdef ASSERT duke@435: duke@435: assert(fr().is_deoptimized_frame(), "frame must be scheduled for deoptimization"); duke@435: #endif /* ASSERT */ duke@435: GrowableArray* deferred = thread()->deferred_locals(); duke@435: if (deferred != NULL ) { duke@435: // See if this vframe has already had locals with deferred writes duke@435: int f; duke@435: for ( f = 0 ; f < deferred->length() ; f++ ) { duke@435: if (deferred->at(f)->matches(this)) { duke@435: // Matching, vframe now see if the local already had deferred write duke@435: GrowableArray* locals = deferred->at(f)->locals(); duke@435: int l; duke@435: for (l = 0 ; l < locals->length() ; l++ ) { duke@435: if (locals->at(l)->index() == index) { duke@435: locals->at(l)->set_value(value); duke@435: return; duke@435: } duke@435: } duke@435: // No matching local already present. Push a new value onto the deferred collection duke@435: locals->push(new jvmtiDeferredLocalVariable(index, type, value)); duke@435: return; duke@435: } duke@435: } duke@435: // No matching vframe must push a new vframe duke@435: } else { duke@435: // No deferred updates pending for this thread. duke@435: // allocate in C heap zgu@3900: deferred = new(ResourceObj::C_HEAP, mtCompiler) GrowableArray (1, true); duke@435: thread()->set_deferred_locals(deferred); duke@435: } duke@435: deferred->push(new jvmtiDeferredLocalVariableSet(method(), bci(), fr().id())); duke@435: assert(deferred->top()->id() == fr().id(), "Huh? Must match"); duke@435: deferred->top()->set_local_at(index, type, value); duke@435: } duke@435: duke@435: StackValueCollection* compiledVFrame::expressions() const { duke@435: // Natives has no scope duke@435: if (scope() == NULL) return new StackValueCollection(0); duke@435: GrowableArray* scv_list = scope()->expressions(); duke@435: if (scv_list == NULL) return new StackValueCollection(0); duke@435: duke@435: // scv_list is the list of ScopeValues describing the JVM stack state. duke@435: // There is one scv_list entry for every JVM stack state in use. duke@435: int length = scv_list->length(); duke@435: StackValueCollection* result = new StackValueCollection(length); duke@435: for( int i = 0; i < length; i++ ) duke@435: result->add( create_stack_value(scv_list->at(i)) ); duke@435: duke@435: return result; duke@435: } duke@435: duke@435: duke@435: // The implementation of the following two methods was factorized into the duke@435: // class StackValue because it is also used from within deoptimization.cpp for duke@435: // rematerialization and relocking of non-escaping objects. duke@435: duke@435: StackValue *compiledVFrame::create_stack_value(ScopeValue *sv) const { duke@435: return StackValue::create_stack_value(&_fr, register_map(), sv); duke@435: } duke@435: duke@435: BasicLock* compiledVFrame::resolve_monitor_lock(Location location) const { duke@435: return StackValue::resolve_monitor_lock(&_fr, location); duke@435: } duke@435: duke@435: duke@435: GrowableArray* compiledVFrame::monitors() const { duke@435: // Natives has no scope duke@435: if (scope() == NULL) { duke@435: nmethod* nm = code(); coleenp@4037: Method* method = nm->method(); duke@435: assert(method->is_native(), ""); duke@435: if (!method->is_synchronized()) { duke@435: return new GrowableArray(0); duke@435: } duke@435: // This monitor is really only needed for UseBiasedLocking, but duke@435: // return it in all cases for now as it might be useful for stack duke@435: // traces and tools as well duke@435: GrowableArray *monitors = new GrowableArray(1); duke@435: // Casting away const duke@435: frame& fr = (frame&) _fr; kamg@2361: MonitorInfo* info = new MonitorInfo( kamg@2361: fr.get_native_receiver(), fr.get_native_monitor(), false, false); duke@435: monitors->push(info); duke@435: return monitors; duke@435: } duke@435: GrowableArray* monitors = scope()->monitors(); duke@435: if (monitors == NULL) { duke@435: return new GrowableArray(0); duke@435: } duke@435: GrowableArray* result = new GrowableArray(monitors->length()); duke@435: for (int index = 0; index < monitors->length(); index++) { duke@435: MonitorValue* mv = monitors->at(index); kvn@1253: ScopeValue* ov = mv->owner(); kvn@1253: StackValue *owner_sv = create_stack_value(ov); // it is an oop kvn@1253: if (ov->is_object() && owner_sv->obj_is_scalar_replaced()) { // The owner object was scalar replaced kvn@1253: assert(mv->eliminated(), "monitor should be eliminated for scalar replaced object"); kvn@1253: // Put klass for scalar replaced object. kvn@1253: ScopeValue* kv = ((ObjectValue *)ov)->klass(); kvn@1253: assert(kv->is_constant_oop(), "klass should be oop constant for scalar replaced object"); coleenp@4037: Handle k(((ConstantOopReadValue*)kv)->value()()); coleenp@4037: assert(java_lang_Class::is_instance(k()), "must be"); coleenp@4037: result->push(new MonitorInfo(k(), resolve_monitor_lock(mv->basic_lock()), kvn@1253: mv->eliminated(), true)); kvn@1253: } else { kvn@1253: result->push(new MonitorInfo(owner_sv->get_obj()(), resolve_monitor_lock(mv->basic_lock()), kvn@1253: mv->eliminated(), false)); kvn@1253: } duke@435: } duke@435: return result; duke@435: } duke@435: duke@435: duke@435: compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, nmethod* nm) duke@435: : javaVFrame(fr, reg_map, thread) { duke@435: _scope = NULL; duke@435: // Compiled method (native stub or Java code) duke@435: // native wrappers have no scope data, it is implied duke@435: if (!nm->is_native_method()) { duke@435: _scope = nm->scope_desc_at(_fr.pc()); duke@435: } duke@435: } duke@435: duke@435: compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, ScopeDesc* scope) duke@435: : javaVFrame(fr, reg_map, thread) { duke@435: _scope = scope; duke@435: guarantee(_scope != NULL, "scope must be present"); duke@435: } duke@435: duke@435: duke@435: bool compiledVFrame::is_top() const { duke@435: // FIX IT: Remove this when new native stubs are in place duke@435: if (scope() == NULL) return true; duke@435: return scope()->is_top(); duke@435: } duke@435: duke@435: duke@435: nmethod* compiledVFrame::code() const { duke@435: return CodeCache::find_nmethod(_fr.pc()); duke@435: } duke@435: duke@435: coleenp@4037: Method* compiledVFrame::method() const { duke@435: if (scope() == NULL) { duke@435: // native nmethods have no scope the method is implied duke@435: nmethod* nm = code(); duke@435: assert(nm->is_native_method(), "must be native"); duke@435: return nm->method(); duke@435: } coleenp@4037: return scope()->method(); duke@435: } duke@435: duke@435: duke@435: int compiledVFrame::bci() const { duke@435: int raw = raw_bci(); duke@435: return raw == SynchronizationEntryBCI ? 0 : raw; duke@435: } duke@435: duke@435: duke@435: int compiledVFrame::raw_bci() const { duke@435: if (scope() == NULL) { duke@435: // native nmethods have no scope the method/bci is implied duke@435: nmethod* nm = code(); duke@435: assert(nm->is_native_method(), "must be native"); duke@435: return 0; duke@435: } duke@435: return scope()->bci(); duke@435: } duke@435: cfang@1335: bool compiledVFrame::should_reexecute() const { cfang@1335: if (scope() == NULL) { cfang@1335: // native nmethods have no scope the method/bci is implied cfang@1335: nmethod* nm = code(); cfang@1335: assert(nm->is_native_method(), "must be native"); cfang@1335: return false; cfang@1335: } cfang@1335: return scope()->should_reexecute(); cfang@1335: } duke@435: duke@435: vframe* compiledVFrame::sender() const { duke@435: const frame f = fr(); duke@435: if (scope() == NULL) { duke@435: // native nmethods have no scope the method/bci is implied duke@435: nmethod* nm = code(); duke@435: assert(nm->is_native_method(), "must be native"); duke@435: return vframe::sender(); duke@435: } else { duke@435: return scope()->is_top() duke@435: ? vframe::sender() duke@435: : new compiledVFrame(&f, register_map(), thread(), scope()->sender()); duke@435: } duke@435: } duke@435: coleenp@4037: jvmtiDeferredLocalVariableSet::jvmtiDeferredLocalVariableSet(Method* method, int bci, intptr_t* id) { duke@435: _method = method; duke@435: _bci = bci; duke@435: _id = id; duke@435: // Alway will need at least one, must be on C heap zgu@3900: _locals = new(ResourceObj::C_HEAP, mtCompiler) GrowableArray (1, true); duke@435: } duke@435: duke@435: jvmtiDeferredLocalVariableSet::~jvmtiDeferredLocalVariableSet() { duke@435: for (int i = 0; i < _locals->length() ; i++ ) { duke@435: delete _locals->at(i); duke@435: } duke@435: // Free growableArray and c heap for elements duke@435: delete _locals; duke@435: } duke@435: duke@435: bool jvmtiDeferredLocalVariableSet::matches(vframe* vf) { duke@435: if (!vf->is_compiled_frame()) return false; duke@435: compiledVFrame* cvf = (compiledVFrame*)vf; duke@435: return cvf->fr().id() == id() && cvf->method() == method() && cvf->bci() == bci(); duke@435: } duke@435: duke@435: void jvmtiDeferredLocalVariableSet::set_local_at(int idx, BasicType type, jvalue val) { duke@435: int i; duke@435: for ( i = 0 ; i < locals()->length() ; i++ ) { duke@435: if ( locals()->at(i)->index() == idx) { duke@435: assert(locals()->at(i)->type() == type, "Wrong type"); duke@435: locals()->at(i)->set_value(val); duke@435: return; duke@435: } duke@435: } duke@435: locals()->push(new jvmtiDeferredLocalVariable(idx, type, val)); duke@435: } duke@435: duke@435: void jvmtiDeferredLocalVariableSet::oops_do(OopClosure* f) { coleenp@4037: // The Method* is on the stack so a live activation keeps it alive coleenp@4037: // either by mirror in interpreter or code in compiled code. duke@435: for ( int i = 0; i < locals()->length(); i++ ) { duke@435: if ( locals()->at(i)->type() == T_OBJECT) { duke@435: f->do_oop(locals()->at(i)->oop_addr()); duke@435: } duke@435: } duke@435: } duke@435: duke@435: jvmtiDeferredLocalVariable::jvmtiDeferredLocalVariable(int index, BasicType type, jvalue value) { duke@435: _index = index; duke@435: _type = type; duke@435: _value = value; duke@435: } duke@435: duke@435: duke@435: #ifndef PRODUCT duke@435: void compiledVFrame::verify() const { duke@435: Unimplemented(); duke@435: } duke@435: #endif // PRODUCT