kamg@4245: /* coleenp@4572: * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. kamg@4245: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. kamg@4245: * kamg@4245: * This code is free software; you can redistribute it and/or modify it kamg@4245: * under the terms of the GNU General Public License version 2 only, as kamg@4245: * published by the Free Software Foundation. kamg@4245: * kamg@4245: * This code is distributed in the hope that it will be useful, but WITHOUT kamg@4245: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or kamg@4245: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License kamg@4245: * version 2 for more details (a copy is included in the LICENSE file that kamg@4245: * accompanied this code). kamg@4245: * kamg@4245: * You should have received a copy of the GNU General Public License version kamg@4245: * 2 along with this work; if not, write to the Free Software Foundation, kamg@4245: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. kamg@4245: * kamg@4245: * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA kamg@4245: * or visit www.oracle.com if you need additional information or have any kamg@4245: * questions. kamg@4245: * kamg@4245: */ kamg@4245: kamg@4245: #include "precompiled.hpp" kamg@4245: #include "classfile/bytecodeAssembler.hpp" kamg@4245: #include "classfile/defaultMethods.hpp" kamg@4245: #include "classfile/genericSignatures.hpp" kamg@4245: #include "classfile/symbolTable.hpp" kamg@4245: #include "memory/allocation.hpp" kamg@4245: #include "memory/metadataFactory.hpp" kamg@4245: #include "memory/resourceArea.hpp" kamg@4245: #include "runtime/signature.hpp" kamg@4245: #include "runtime/thread.hpp" kamg@4245: #include "oops/instanceKlass.hpp" kamg@4245: #include "oops/klass.hpp" kamg@4245: #include "oops/method.hpp" kamg@4245: #include "utilities/accessFlags.hpp" kamg@4245: #include "utilities/exceptions.hpp" kamg@4245: #include "utilities/ostream.hpp" kamg@4245: #include "utilities/pair.hpp" kamg@4245: #include "utilities/resourceHash.hpp" kamg@4245: kamg@4245: typedef enum { QUALIFIED, DISQUALIFIED } QualifiedState; kamg@4245: kamg@4245: // Because we use an iterative algorithm when iterating over the type kamg@4245: // hierarchy, we can't use traditional scoped objects which automatically do kamg@4245: // cleanup in the destructor when the scope is exited. PseudoScope (and kamg@4245: // PseudoScopeMark) provides a similar functionality, but for when you want a kamg@4245: // scoped object in non-stack memory (such as in resource memory, as we do kamg@4245: // here). You've just got to remember to call 'destroy()' on the scope when kamg@4245: // leaving it (and marks have to be explicitly added). kamg@4245: class PseudoScopeMark : public ResourceObj { kamg@4245: public: kamg@4245: virtual void destroy() = 0; kamg@4245: }; kamg@4245: kamg@4245: class PseudoScope : public ResourceObj { kamg@4245: private: kamg@4245: GrowableArray _marks; kamg@4245: public: kamg@4245: kamg@4245: static PseudoScope* cast(void* data) { kamg@4245: return static_cast(data); kamg@4245: } kamg@4245: kamg@4245: void add_mark(PseudoScopeMark* psm) { kamg@4245: _marks.append(psm); kamg@4245: } kamg@4245: kamg@4245: void destroy() { kamg@4245: for (int i = 0; i < _marks.length(); ++i) { kamg@4245: _marks.at(i)->destroy(); kamg@4245: } kamg@4245: } kamg@4245: }; kamg@4245: kamg@4245: class ContextMark : public PseudoScopeMark { kamg@4245: private: kamg@4245: generic::Context::Mark _mark; kamg@4245: public: kamg@4245: ContextMark(const generic::Context::Mark& cm) : _mark(cm) {} kamg@4245: virtual void destroy() { _mark.destroy(); } kamg@4245: }; kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: static void print_slot(outputStream* str, Symbol* name, Symbol* signature) { kamg@4245: ResourceMark rm; kamg@4245: str->print("%s%s", name->as_C_string(), signature->as_C_string()); kamg@4245: } kamg@4245: kamg@4245: static void print_method(outputStream* str, Method* mo, bool with_class=true) { kamg@4245: ResourceMark rm; kamg@4245: if (with_class) { kamg@4245: str->print("%s.", mo->klass_name()->as_C_string()); kamg@4245: } kamg@4245: print_slot(str, mo->name(), mo->signature()); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: /** kamg@4245: * Perform a depth-first iteration over the class hierarchy, applying kamg@4245: * algorithmic logic as it goes. kamg@4245: * kamg@4245: * This class is one half of the inheritance hierarchy analysis mechanism. kamg@4245: * It is meant to be used in conjunction with another class, the algorithm, kamg@4245: * which is indicated by the ALGO template parameter. This class can be kamg@4245: * paired with any algorithm class that provides the required methods. kamg@4245: * kamg@4245: * This class contains all the mechanics for iterating over the class hierarchy kamg@4245: * starting at a particular root, without recursing (thus limiting stack growth kamg@4245: * from this point). It visits each superclass (if present) and superinterface kamg@4245: * in a depth-first manner, with callbacks to the ALGO class as each class is kamg@4245: * encountered (visit()), The algorithm can cut-off further exploration of a kamg@4245: * particular branch by returning 'false' from a visit() call. kamg@4245: * kamg@4245: * The ALGO class, must provide a visit() method, which each of which will be kamg@4245: * called once for each node in the inheritance tree during the iteration. In kamg@4245: * addition, it can provide a memory block via new_node_data(InstanceKlass*), kamg@4245: * which it can use for node-specific storage (and access via the kamg@4245: * current_data() and data_at_depth(int) methods). kamg@4245: * kamg@4245: * Bare minimum needed to be an ALGO class: kamg@4245: * class Algo : public HierarchyVisitor { kamg@4245: * void* new_node_data(InstanceKlass* cls) { return NULL; } kamg@4245: * void free_node_data(void* data) { return; } kamg@4245: * bool visit() { return true; } kamg@4245: * }; kamg@4245: */ kamg@4245: template kamg@4245: class HierarchyVisitor : StackObj { kamg@4245: private: kamg@4245: kamg@4245: class Node : public ResourceObj { kamg@4245: public: kamg@4245: InstanceKlass* _class; kamg@4245: bool _super_was_visited; kamg@4245: int _interface_index; kamg@4245: void* _algorithm_data; kamg@4245: kamg@4245: Node(InstanceKlass* cls, void* data, bool visit_super) kamg@4245: : _class(cls), _super_was_visited(!visit_super), kamg@4245: _interface_index(0), _algorithm_data(data) {} kamg@4245: kamg@4245: int number_of_interfaces() { return _class->local_interfaces()->length(); } kamg@4245: int interface_index() { return _interface_index; } kamg@4245: void set_super_visited() { _super_was_visited = true; } kamg@4245: void increment_visited_interface() { ++_interface_index; } kamg@4245: void set_all_interfaces_visited() { kamg@4245: _interface_index = number_of_interfaces(); kamg@4245: } kamg@4245: bool has_visited_super() { return _super_was_visited; } kamg@4245: bool has_visited_all_interfaces() { kamg@4245: return interface_index() >= number_of_interfaces(); kamg@4245: } kamg@4245: InstanceKlass* interface_at(int index) { kamg@4245: return InstanceKlass::cast(_class->local_interfaces()->at(index)); kamg@4245: } kamg@4245: InstanceKlass* next_super() { return _class->java_super(); } kamg@4245: InstanceKlass* next_interface() { kamg@4245: return interface_at(interface_index()); kamg@4245: } kamg@4245: }; kamg@4245: kamg@4245: bool _cancelled; kamg@4245: GrowableArray _path; kamg@4245: kamg@4245: Node* current_top() const { return _path.top(); } kamg@4245: bool has_more_nodes() const { return !_path.is_empty(); } kamg@4245: void push(InstanceKlass* cls, void* data) { kamg@4245: assert(cls != NULL, "Requires a valid instance class"); kamg@4245: Node* node = new Node(cls, data, has_super(cls)); kamg@4245: _path.push(node); kamg@4245: } kamg@4245: void pop() { _path.pop(); } kamg@4245: kamg@4245: void reset_iteration() { kamg@4245: _cancelled = false; kamg@4245: _path.clear(); kamg@4245: } kamg@4245: bool is_cancelled() const { return _cancelled; } kamg@4245: kamg@4245: static bool has_super(InstanceKlass* cls) { kamg@4245: return cls->super() != NULL && !cls->is_interface(); kamg@4245: } kamg@4245: kamg@4245: Node* node_at_depth(int i) const { kamg@4245: return (i >= _path.length()) ? NULL : _path.at(_path.length() - i - 1); kamg@4245: } kamg@4245: kamg@4245: protected: kamg@4245: kamg@4245: // Accessors available to the algorithm kamg@4245: int current_depth() const { return _path.length() - 1; } kamg@4245: kamg@4245: InstanceKlass* class_at_depth(int i) { kamg@4245: Node* n = node_at_depth(i); kamg@4245: return n == NULL ? NULL : n->_class; kamg@4245: } kamg@4245: InstanceKlass* current_class() { return class_at_depth(0); } kamg@4245: kamg@4245: void* data_at_depth(int i) { kamg@4245: Node* n = node_at_depth(i); kamg@4245: return n == NULL ? NULL : n->_algorithm_data; kamg@4245: } kamg@4245: void* current_data() { return data_at_depth(0); } kamg@4245: kamg@4245: void cancel_iteration() { _cancelled = true; } kamg@4245: kamg@4245: public: kamg@4245: kamg@4245: void run(InstanceKlass* root) { kamg@4245: ALGO* algo = static_cast(this); kamg@4245: kamg@4245: reset_iteration(); kamg@4245: kamg@4245: void* algo_data = algo->new_node_data(root); kamg@4245: push(root, algo_data); kamg@4245: bool top_needs_visit = true; kamg@4245: kamg@4245: do { kamg@4245: Node* top = current_top(); kamg@4245: if (top_needs_visit) { kamg@4245: if (algo->visit() == false) { kamg@4245: // algorithm does not want to continue along this path. Arrange kamg@4245: // it so that this state is immediately popped off the stack kamg@4245: top->set_super_visited(); kamg@4245: top->set_all_interfaces_visited(); kamg@4245: } kamg@4245: top_needs_visit = false; kamg@4245: } kamg@4245: kamg@4245: if (top->has_visited_super() && top->has_visited_all_interfaces()) { kamg@4245: algo->free_node_data(top->_algorithm_data); kamg@4245: pop(); kamg@4245: } else { kamg@4245: InstanceKlass* next = NULL; kamg@4245: if (top->has_visited_super() == false) { kamg@4245: next = top->next_super(); kamg@4245: top->set_super_visited(); kamg@4245: } else { kamg@4245: next = top->next_interface(); kamg@4245: top->increment_visited_interface(); kamg@4245: } kamg@4245: assert(next != NULL, "Otherwise we shouldn't be here"); kamg@4245: algo_data = algo->new_node_data(next); kamg@4245: push(next, algo_data); kamg@4245: top_needs_visit = true; kamg@4245: } kamg@4245: } while (!is_cancelled() && has_more_nodes()); kamg@4245: } kamg@4245: }; kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: class PrintHierarchy : public HierarchyVisitor { kamg@4245: public: kamg@4245: kamg@4245: bool visit() { kamg@4245: InstanceKlass* cls = current_class(); kamg@4245: streamIndentor si(tty, current_depth() * 2); kamg@4245: tty->indent().print_cr("%s", cls->name()->as_C_string()); kamg@4245: return true; kamg@4245: } kamg@4245: kamg@4245: void* new_node_data(InstanceKlass* cls) { return NULL; } kamg@4245: void free_node_data(void* data) { return; } kamg@4245: }; kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: // Used to register InstanceKlass objects and all related metadata structures kamg@4245: // (Methods, ConstantPools) as "in-use" by the current thread so that they can't kamg@4245: // be deallocated by class redefinition while we're using them. The classes are kamg@4245: // de-registered when this goes out of scope. kamg@4245: // kamg@4245: // Once a class is registered, we need not bother with methodHandles or kamg@4245: // constantPoolHandles for it's associated metadata. kamg@4245: class KeepAliveRegistrar : public StackObj { kamg@4245: private: kamg@4245: Thread* _thread; kamg@4245: GrowableArray _keep_alive; kamg@4245: kamg@4245: public: kamg@4245: KeepAliveRegistrar(Thread* thread) : _thread(thread), _keep_alive(20) { kamg@4245: assert(thread == Thread::current(), "Must be current thread"); kamg@4245: } kamg@4245: kamg@4245: ~KeepAliveRegistrar() { kamg@4245: for (int i = _keep_alive.length() - 1; i >= 0; --i) { kamg@4245: ConstantPool* cp = _keep_alive.at(i); kamg@4245: int idx = _thread->metadata_handles()->find_from_end(cp); kamg@4245: assert(idx > 0, "Must be in the list"); kamg@4245: _thread->metadata_handles()->remove_at(idx); kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: // Register a class as 'in-use' by the thread. It's fine to register a class kamg@4245: // multiple times (though perhaps inefficient) kamg@4245: void register_class(InstanceKlass* ik) { kamg@4245: ConstantPool* cp = ik->constants(); kamg@4245: _keep_alive.push(cp); kamg@4245: _thread->metadata_handles()->push(cp); kamg@4245: } kamg@4245: }; kamg@4245: kamg@4245: class KeepAliveVisitor : public HierarchyVisitor { kamg@4245: private: kamg@4245: KeepAliveRegistrar* _registrar; kamg@4245: kamg@4245: public: kamg@4245: KeepAliveVisitor(KeepAliveRegistrar* registrar) : _registrar(registrar) {} kamg@4245: kamg@4245: void* new_node_data(InstanceKlass* cls) { return NULL; } kamg@4245: void free_node_data(void* data) { return; } kamg@4245: kamg@4245: bool visit() { kamg@4245: _registrar->register_class(current_class()); kamg@4245: return true; kamg@4245: } kamg@4245: }; kamg@4245: kamg@4245: // A method family contains a set of all methods that implement a single kamg@4245: // language-level method. Because of erasure, these methods may have different kamg@4245: // signatures. As members of the set are collected while walking over the kamg@4245: // hierarchy, they are tagged with a qualification state. The qualification kamg@4245: // state for an erased method is set to disqualified if there exists a path kamg@4245: // from the root of hierarchy to the method that contains an interleaving kamg@4245: // language-equivalent method defined in an interface. kamg@4245: class MethodFamily : public ResourceObj { kamg@4245: private: kamg@4245: kamg@4245: generic::MethodDescriptor* _descriptor; // language-level description kamg@4245: GrowableArray > _members; kamg@4245: ResourceHashtable _member_index; kamg@4245: kamg@4245: Method* _selected_target; // Filled in later, if a unique target exists kamg@4245: Symbol* _exception_message; // If no unique target is found kamg@4245: kamg@4245: bool contains_method(Method* method) { kamg@4245: int* lookup = _member_index.get(method); kamg@4245: return lookup != NULL; kamg@4245: } kamg@4245: kamg@4245: void add_method(Method* method, QualifiedState state) { kamg@4245: Pair entry(method, state); kamg@4245: _member_index.put(method, _members.length()); kamg@4245: _members.append(entry); kamg@4245: } kamg@4245: kamg@4245: void disqualify_method(Method* method) { kamg@4245: int* index = _member_index.get(method); morris@4778: guarantee(index != NULL && *index >= 0 && *index < _members.length(), "bad index"); kamg@4245: _members.at(*index).second = DISQUALIFIED; kamg@4245: } kamg@4245: kamg@4245: Symbol* generate_no_defaults_message(TRAPS) const; kamg@4245: Symbol* generate_abstract_method_message(Method* method, TRAPS) const; kamg@4245: Symbol* generate_conflicts_message(GrowableArray* methods, TRAPS) const; kamg@4245: kamg@4245: public: kamg@4245: kamg@4245: MethodFamily(generic::MethodDescriptor* canonical_desc) kamg@4245: : _descriptor(canonical_desc), _selected_target(NULL), kamg@4245: _exception_message(NULL) {} kamg@4245: kamg@4245: generic::MethodDescriptor* descriptor() const { return _descriptor; } kamg@4245: kamg@4245: bool descriptor_matches(generic::MethodDescriptor* md, generic::Context* ctx) { kamg@4245: return descriptor()->covariant_match(md, ctx); kamg@4245: } kamg@4245: kamg@4245: void set_target_if_empty(Method* m) { kamg@4245: if (_selected_target == NULL && !m->is_overpass()) { kamg@4245: _selected_target = m; kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: void record_qualified_method(Method* m) { kamg@4245: // If the method already exists in the set as qualified, this operation is kamg@4245: // redundant. If it already exists as disqualified, then we leave it as kamg@4245: // disqualfied. Thus we only add to the set if it's not already in the kamg@4245: // set. kamg@4245: if (!contains_method(m)) { kamg@4245: add_method(m, QUALIFIED); kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: void record_disqualified_method(Method* m) { kamg@4245: // If not in the set, add it as disqualified. If it's already in the set, kamg@4245: // then set the state to disqualified no matter what the previous state was. kamg@4245: if (!contains_method(m)) { kamg@4245: add_method(m, DISQUALIFIED); kamg@4245: } else { kamg@4245: disqualify_method(m); kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: bool has_target() const { return _selected_target != NULL; } kamg@4245: bool throws_exception() { return _exception_message != NULL; } kamg@4245: kamg@4245: Method* get_selected_target() { return _selected_target; } kamg@4245: Symbol* get_exception_message() { return _exception_message; } kamg@4245: kamg@4245: // Either sets the target or the exception error message kamg@4245: void determine_target(InstanceKlass* root, TRAPS) { kamg@4245: if (has_target() || throws_exception()) { kamg@4245: return; kamg@4245: } kamg@4245: kamg@4245: GrowableArray qualified_methods; kamg@4245: for (int i = 0; i < _members.length(); ++i) { kamg@4245: Pair entry = _members.at(i); kamg@4245: if (entry.second == QUALIFIED) { kamg@4245: qualified_methods.append(entry.first); kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: if (qualified_methods.length() == 0) { kamg@4245: _exception_message = generate_no_defaults_message(CHECK); kamg@4245: } else if (qualified_methods.length() == 1) { kamg@4245: Method* method = qualified_methods.at(0); kamg@4245: if (method->is_abstract()) { kamg@4245: _exception_message = generate_abstract_method_message(method, CHECK); kamg@4245: } else { kamg@4245: _selected_target = qualified_methods.at(0); kamg@4245: } kamg@4245: } else { kamg@4245: _exception_message = generate_conflicts_message(&qualified_methods,CHECK); kamg@4245: } kamg@4245: kamg@4245: assert((has_target() ^ throws_exception()) == 1, kamg@4245: "One and only one must be true"); kamg@4245: } kamg@4245: kamg@4245: bool contains_signature(Symbol* query) { kamg@4245: for (int i = 0; i < _members.length(); ++i) { kamg@4245: if (query == _members.at(i).first->signature()) { kamg@4245: return true; kamg@4245: } kamg@4245: } kamg@4245: return false; kamg@4245: } kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: void print_on(outputStream* str) const { kamg@4245: print_on(str, 0); kamg@4245: } kamg@4245: kamg@4245: void print_on(outputStream* str, int indent) const { kamg@4245: streamIndentor si(str, indent * 2); kamg@4245: kamg@4245: generic::Context ctx(NULL); // empty, as _descriptor already canonicalized kamg@4245: TempNewSymbol family = descriptor()->reify_signature(&ctx, Thread::current()); kamg@4245: str->indent().print_cr("Logical Method %s:", family->as_C_string()); kamg@4245: kamg@4245: streamIndentor si2(str); kamg@4245: for (int i = 0; i < _members.length(); ++i) { kamg@4245: str->indent(); kamg@4245: print_method(str, _members.at(i).first); kamg@4245: if (_members.at(i).second == DISQUALIFIED) { kamg@4245: str->print(" (disqualified)"); kamg@4245: } kamg@4245: str->print_cr(""); kamg@4245: } kamg@4245: kamg@4245: if (_selected_target != NULL) { kamg@4245: print_selected(str, 1); kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: void print_selected(outputStream* str, int indent) const { kamg@4245: assert(has_target(), "Should be called otherwise"); kamg@4245: streamIndentor si(str, indent * 2); kamg@4245: str->indent().print("Selected method: "); kamg@4245: print_method(str, _selected_target); kamg@4245: str->print_cr(""); kamg@4245: } kamg@4245: kamg@4245: void print_exception(outputStream* str, int indent) { kamg@4245: assert(throws_exception(), "Should be called otherwise"); kamg@4245: streamIndentor si(str, indent * 2); kamg@4245: str->indent().print_cr("%s", _exception_message->as_C_string()); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: }; kamg@4245: kamg@4245: Symbol* MethodFamily::generate_no_defaults_message(TRAPS) const { kamg@4245: return SymbolTable::new_symbol("No qualifying defaults found", CHECK_NULL); kamg@4245: } kamg@4245: kamg@4245: Symbol* MethodFamily::generate_abstract_method_message(Method* method, TRAPS) const { kamg@4245: Symbol* klass = method->klass_name(); kamg@4245: Symbol* name = method->name(); kamg@4245: Symbol* sig = method->signature(); kamg@4245: stringStream ss; kamg@4245: ss.print("Method "); kamg@4245: ss.write((const char*)klass->bytes(), klass->utf8_length()); kamg@4245: ss.print("."); kamg@4245: ss.write((const char*)name->bytes(), name->utf8_length()); kamg@4245: ss.write((const char*)sig->bytes(), sig->utf8_length()); kamg@4245: ss.print(" is abstract"); kamg@4245: return SymbolTable::new_symbol(ss.base(), (int)ss.size(), CHECK_NULL); kamg@4245: } kamg@4245: kamg@4245: Symbol* MethodFamily::generate_conflicts_message(GrowableArray* methods, TRAPS) const { kamg@4245: stringStream ss; kamg@4245: ss.print("Conflicting default methods:"); kamg@4245: for (int i = 0; i < methods->length(); ++i) { kamg@4245: Method* method = methods->at(i); kamg@4245: Symbol* klass = method->klass_name(); kamg@4245: Symbol* name = method->name(); kamg@4245: ss.print(" "); kamg@4245: ss.write((const char*)klass->bytes(), klass->utf8_length()); kamg@4245: ss.print("."); kamg@4245: ss.write((const char*)name->bytes(), name->utf8_length()); kamg@4245: } kamg@4245: return SymbolTable::new_symbol(ss.base(), (int)ss.size(), CHECK_NULL); kamg@4245: } kamg@4245: kamg@4245: class StateRestorer; kamg@4245: kamg@4245: // StatefulMethodFamily is a wrapper around MethodFamily that maintains the kamg@4245: // qualification state during hierarchy visitation, and applies that state kamg@4245: // when adding members to the MethodFamily. kamg@4245: class StatefulMethodFamily : public ResourceObj { kamg@4245: friend class StateRestorer; kamg@4245: private: kamg@4245: MethodFamily* _method; kamg@4245: QualifiedState _qualification_state; kamg@4245: kamg@4245: void set_qualification_state(QualifiedState state) { kamg@4245: _qualification_state = state; kamg@4245: } kamg@4245: kamg@4245: public: kamg@4245: StatefulMethodFamily(generic::MethodDescriptor* md, generic::Context* ctx) { kamg@4245: _method = new MethodFamily(md->canonicalize(ctx)); kamg@4245: _qualification_state = QUALIFIED; kamg@4245: } kamg@4245: kamg@4245: void set_target_if_empty(Method* m) { _method->set_target_if_empty(m); } kamg@4245: kamg@4245: MethodFamily* get_method_family() { return _method; } kamg@4245: kamg@4245: bool descriptor_matches(generic::MethodDescriptor* md, generic::Context* ctx) { kamg@4245: return _method->descriptor_matches(md, ctx); kamg@4245: } kamg@4245: kamg@4245: StateRestorer* record_method_and_dq_further(Method* mo); kamg@4245: }; kamg@4245: kamg@4245: class StateRestorer : public PseudoScopeMark { kamg@4245: private: kamg@4245: StatefulMethodFamily* _method; kamg@4245: QualifiedState _state_to_restore; kamg@4245: public: kamg@4245: StateRestorer(StatefulMethodFamily* dm, QualifiedState state) kamg@4245: : _method(dm), _state_to_restore(state) {} kamg@4245: ~StateRestorer() { destroy(); } kamg@4245: void restore_state() { _method->set_qualification_state(_state_to_restore); } kamg@4245: virtual void destroy() { restore_state(); } kamg@4245: }; kamg@4245: kamg@4245: StateRestorer* StatefulMethodFamily::record_method_and_dq_further(Method* mo) { kamg@4245: StateRestorer* mark = new StateRestorer(this, _qualification_state); kamg@4245: if (_qualification_state == QUALIFIED) { kamg@4245: _method->record_qualified_method(mo); kamg@4245: } else { kamg@4245: _method->record_disqualified_method(mo); kamg@4245: } kamg@4245: // Everything found "above"??? this method in the hierarchy walk is set to kamg@4245: // disqualified kamg@4245: set_qualification_state(DISQUALIFIED); kamg@4245: return mark; kamg@4245: } kamg@4245: kamg@4245: class StatefulMethodFamilies : public ResourceObj { kamg@4245: private: kamg@4245: GrowableArray _methods; kamg@4245: kamg@4245: public: kamg@4245: StatefulMethodFamily* find_matching( kamg@4245: generic::MethodDescriptor* md, generic::Context* ctx) { kamg@4245: for (int i = 0; i < _methods.length(); ++i) { kamg@4245: StatefulMethodFamily* existing = _methods.at(i); kamg@4245: if (existing->descriptor_matches(md, ctx)) { kamg@4245: return existing; kamg@4245: } kamg@4245: } kamg@4245: return NULL; kamg@4245: } kamg@4245: kamg@4245: StatefulMethodFamily* find_matching_or_create( kamg@4245: generic::MethodDescriptor* md, generic::Context* ctx) { kamg@4245: StatefulMethodFamily* method = find_matching(md, ctx); kamg@4245: if (method == NULL) { kamg@4245: method = new StatefulMethodFamily(md, ctx); kamg@4245: _methods.append(method); kamg@4245: } kamg@4245: return method; kamg@4245: } kamg@4245: kamg@4245: void extract_families_into(GrowableArray* array) { kamg@4245: for (int i = 0; i < _methods.length(); ++i) { kamg@4245: array->append(_methods.at(i)->get_method_family()); kamg@4245: } kamg@4245: } kamg@4245: }; kamg@4245: kamg@4245: // Represents a location corresponding to a vtable slot for methods that kamg@4245: // neither the class nor any of it's ancestors provide an implementaion. kamg@4245: // Default methods may be present to fill this slot. kamg@4245: class EmptyVtableSlot : public ResourceObj { kamg@4245: private: kamg@4245: Symbol* _name; kamg@4245: Symbol* _signature; kamg@4245: int _size_of_parameters; kamg@4245: MethodFamily* _binding; kamg@4245: kamg@4245: public: kamg@4245: EmptyVtableSlot(Method* method) kamg@4245: : _name(method->name()), _signature(method->signature()), kamg@4245: _size_of_parameters(method->size_of_parameters()), _binding(NULL) {} kamg@4245: kamg@4245: Symbol* name() const { return _name; } kamg@4245: Symbol* signature() const { return _signature; } kamg@4245: int size_of_parameters() const { return _size_of_parameters; } kamg@4245: kamg@4245: void bind_family(MethodFamily* lm) { _binding = lm; } kamg@4245: bool is_bound() { return _binding != NULL; } kamg@4245: MethodFamily* get_binding() { return _binding; } kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: void print_on(outputStream* str) const { kamg@4245: print_slot(str, name(), signature()); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: }; kamg@4245: kamg@4245: static GrowableArray* find_empty_vtable_slots( kamg@4245: InstanceKlass* klass, GrowableArray* mirandas, TRAPS) { kamg@4245: kamg@4245: assert(klass != NULL, "Must be valid class"); kamg@4245: kamg@4245: GrowableArray* slots = new GrowableArray(); kamg@4245: kamg@4245: // All miranda methods are obvious candidates kamg@4245: for (int i = 0; i < mirandas->length(); ++i) { kamg@4245: EmptyVtableSlot* slot = new EmptyVtableSlot(mirandas->at(i)); kamg@4245: slots->append(slot); kamg@4245: } kamg@4245: kamg@4245: // Also any overpasses in our superclasses, that we haven't implemented. kamg@4245: // (can't use the vtable because it is not guaranteed to be initialized yet) kamg@4245: InstanceKlass* super = klass->java_super(); kamg@4245: while (super != NULL) { kamg@4245: for (int i = 0; i < super->methods()->length(); ++i) { kamg@4245: Method* m = super->methods()->at(i); kamg@4245: if (m->is_overpass()) { kamg@4245: // m is a method that would have been a miranda if not for the kamg@4245: // default method processing that occurred on behalf of our superclass, kamg@4245: // so it's a method we want to re-examine in this new context. That is, kamg@4245: // unless we have a real implementation of it in the current class. kamg@4245: Method* impl = klass->lookup_method(m->name(), m->signature()); kamg@4245: if (impl == NULL || impl->is_overpass()) { kamg@4245: slots->append(new EmptyVtableSlot(m)); kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: super = super->java_super(); kamg@4245: } kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: tty->print_cr("Slots that need filling:"); kamg@4245: streamIndentor si(tty); kamg@4245: for (int i = 0; i < slots->length(); ++i) { kamg@4245: tty->indent(); kamg@4245: slots->at(i)->print_on(tty); kamg@4245: tty->print_cr(""); kamg@4245: } kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: return slots; kamg@4245: } kamg@4245: kamg@4245: // Iterates over the type hierarchy looking for all methods with a specific kamg@4245: // method name. The result of this is a set of method families each of kamg@4245: // which is populated with a set of methods that implement the same kamg@4245: // language-level signature. kamg@4245: class FindMethodsByName : public HierarchyVisitor { kamg@4245: private: kamg@4245: // Context data kamg@4245: Thread* THREAD; kamg@4245: generic::DescriptorCache* _cache; kamg@4245: Symbol* _method_name; kamg@4245: generic::Context* _ctx; kamg@4245: StatefulMethodFamilies _families; kamg@4245: kamg@4245: public: kamg@4245: kamg@4245: FindMethodsByName(generic::DescriptorCache* cache, Symbol* name, kamg@4245: generic::Context* ctx, Thread* thread) : kamg@4245: _cache(cache), _method_name(name), _ctx(ctx), THREAD(thread) {} kamg@4245: kamg@4245: void get_discovered_families(GrowableArray* methods) { kamg@4245: _families.extract_families_into(methods); kamg@4245: } kamg@4245: kamg@4245: void* new_node_data(InstanceKlass* cls) { return new PseudoScope(); } kamg@4245: void free_node_data(void* node_data) { kamg@4245: PseudoScope::cast(node_data)->destroy(); kamg@4245: } kamg@4245: kamg@4245: bool visit() { kamg@4245: PseudoScope* scope = PseudoScope::cast(current_data()); kamg@4245: InstanceKlass* klass = current_class(); kamg@4245: InstanceKlass* sub = current_depth() > 0 ? class_at_depth(1) : NULL; kamg@4245: kamg@4245: ContextMark* cm = new ContextMark(_ctx->mark()); kamg@4245: scope->add_mark(cm); // will restore context when scope is freed kamg@4245: kamg@4245: _ctx->apply_type_arguments(sub, klass, THREAD); kamg@4245: kamg@4245: int start, end = 0; kamg@4245: start = klass->find_method_by_name(_method_name, &end); kamg@4245: if (start != -1) { kamg@4245: for (int i = start; i < end; ++i) { kamg@4245: Method* m = klass->methods()->at(i); kamg@4245: // This gets the method's parameter list with its generic type kamg@4245: // parameters resolved kamg@4245: generic::MethodDescriptor* md = _cache->descriptor_for(m, THREAD); kamg@4245: kamg@4245: // Find all methods on this hierarchy that match this method kamg@4245: // (name, signature). This class collects other families of this kamg@4245: // method name. kamg@4245: StatefulMethodFamily* family = kamg@4245: _families.find_matching_or_create(md, _ctx); kamg@4245: kamg@4245: if (klass->is_interface()) { kamg@4245: // ??? kamg@4245: StateRestorer* restorer = family->record_method_and_dq_further(m); kamg@4245: scope->add_mark(restorer); kamg@4245: } else { kamg@4245: // This is the rule that methods in classes "win" (bad word) over kamg@4245: // methods in interfaces. This works because of single inheritance kamg@4245: family->set_target_if_empty(m); kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: return true; kamg@4245: } kamg@4245: }; kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: static void print_families( kamg@4245: GrowableArray* methods, Symbol* match) { kamg@4245: streamIndentor si(tty, 4); kamg@4245: if (methods->length() == 0) { kamg@4245: tty->indent(); kamg@4245: tty->print_cr("No Logical Method found"); kamg@4245: } kamg@4245: for (int i = 0; i < methods->length(); ++i) { kamg@4245: tty->indent(); kamg@4245: MethodFamily* lm = methods->at(i); kamg@4245: if (lm->contains_signature(match)) { kamg@4245: tty->print_cr(""); kamg@4245: } else { kamg@4245: tty->print_cr(""); kamg@4245: } kamg@4245: lm->print_on(tty, 1); kamg@4245: } kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: static void merge_in_new_methods(InstanceKlass* klass, kamg@4245: GrowableArray* new_methods, TRAPS); kamg@4245: static void create_overpasses( kamg@4245: GrowableArray* slots, InstanceKlass* klass, TRAPS); kamg@4245: kamg@4245: // This is the guts of the default methods implementation. This is called just kamg@4245: // after the classfile has been parsed if some ancestor has default methods. kamg@4245: // kamg@4245: // First if finds any name/signature slots that need any implementation (either kamg@4245: // because they are miranda or a superclass's implementation is an overpass kamg@4245: // itself). For each slot, iterate over the hierarchy, using generic signature kamg@4245: // information to partition any methods that match the name into method families kamg@4245: // where each family contains methods whose signatures are equivalent at the kamg@4245: // language level (i.e., their reified parameters match and return values are kamg@4245: // covariant). Check those sets to see if they contain a signature that matches kamg@4245: // the slot we're looking at (if we're lucky, there might be other empty slots kamg@4245: // that we can fill using the same analysis). kamg@4245: // kamg@4245: // For each slot filled, we generate an overpass method that either calls the kamg@4245: // unique default method candidate using invokespecial, or throws an exception kamg@4245: // (in the case of no default method candidates, or more than one valid kamg@4245: // candidate). These methods are then added to the class's method list. If kamg@4245: // the method set we're using contains methods (qualified or not) with a kamg@4245: // different runtime signature than the method we're creating, then we have to kamg@4245: // create bridges with those signatures too. kamg@4245: void DefaultMethods::generate_default_methods( kamg@4245: InstanceKlass* klass, GrowableArray* mirandas, TRAPS) { kamg@4245: kamg@4245: // This resource mark is the bound for all memory allocation that takes kamg@4245: // place during default method processing. After this goes out of scope, kamg@4245: // all (Resource) objects' memory will be reclaimed. Be careful if adding an kamg@4245: // embedded resource mark under here as that memory can't be used outside kamg@4245: // whatever scope it's in. kamg@4245: ResourceMark rm(THREAD); kamg@4245: kamg@4245: generic::DescriptorCache cache; kamg@4245: kamg@4245: // Keep entire hierarchy alive for the duration of the computation kamg@4245: KeepAliveRegistrar keepAlive(THREAD); kamg@4245: KeepAliveVisitor loadKeepAlive(&keepAlive); kamg@4245: loadKeepAlive.run(klass); kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: ResourceMark rm; // be careful with these! kamg@4245: tty->print_cr("Class %s requires default method processing", kamg@4245: klass->name()->as_klass_external_name()); kamg@4245: PrintHierarchy printer; kamg@4245: printer.run(klass); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: GrowableArray* empty_slots = kamg@4245: find_empty_vtable_slots(klass, mirandas, CHECK); kamg@4245: kamg@4245: for (int i = 0; i < empty_slots->length(); ++i) { kamg@4245: EmptyVtableSlot* slot = empty_slots->at(i); kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: streamIndentor si(tty, 2); kamg@4245: tty->indent().print("Looking for default methods for slot "); kamg@4245: slot->print_on(tty); kamg@4245: tty->print_cr(""); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: if (slot->is_bound()) { kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: streamIndentor si(tty, 4); kamg@4245: tty->indent().print_cr("Already bound to logical method:"); kamg@4245: slot->get_binding()->print_on(tty, 1); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: continue; // covered by previous processing kamg@4245: } kamg@4245: kamg@4245: generic::Context ctx(&cache); kamg@4245: FindMethodsByName visitor(&cache, slot->name(), &ctx, CHECK); kamg@4245: visitor.run(klass); kamg@4245: kamg@4245: GrowableArray discovered_families; kamg@4245: visitor.get_discovered_families(&discovered_families); kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: print_families(&discovered_families, slot->signature()); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: // Find and populate any other slots that match the discovered families kamg@4245: for (int j = i; j < empty_slots->length(); ++j) { kamg@4245: EmptyVtableSlot* open_slot = empty_slots->at(j); kamg@4245: kamg@4245: if (slot->name() == open_slot->name()) { kamg@4245: for (int k = 0; k < discovered_families.length(); ++k) { kamg@4245: MethodFamily* lm = discovered_families.at(k); kamg@4245: kamg@4245: if (lm->contains_signature(open_slot->signature())) { kamg@4245: lm->determine_target(klass, CHECK); kamg@4245: open_slot->bind_family(lm); kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: tty->print_cr("Creating overpasses..."); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: create_overpasses(empty_slots, klass, CHECK); kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: tty->print_cr("Default method processing complete"); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: } kamg@4245: kamg@4245: kamg@4245: /** kamg@4245: * Generic analysis was used upon interface '_target' and found a unique kamg@4245: * default method candidate with generic signature '_method_desc'. This kamg@4245: * method is only viable if it would also be in the set of default method kamg@4245: * candidates if we ran a full analysis on the current class. kamg@4245: * kamg@4245: * The only reason that the method would not be in the set of candidates for kamg@4245: * the current class is if that there's another covariantly matching method kamg@4245: * which is "more specific" than the found method -- i.e., one could find a kamg@4245: * path in the interface hierarchy in which the matching method appears kamg@4245: * before we get to '_target'. kamg@4245: * kamg@4245: * In order to determine this, we examine all of the implemented kamg@4245: * interfaces. If we find path that leads to the '_target' interface, then kamg@4245: * we examine that path to see if there are any methods that would shadow kamg@4245: * the selected method along that path. kamg@4245: */ kamg@4245: class ShadowChecker : public HierarchyVisitor { kamg@4245: private: kamg@4245: generic::DescriptorCache* _cache; kamg@4245: Thread* THREAD; kamg@4245: kamg@4245: InstanceKlass* _target; kamg@4245: kamg@4245: Symbol* _method_name; kamg@4245: InstanceKlass* _method_holder; kamg@4245: generic::MethodDescriptor* _method_desc; kamg@4245: bool _found_shadow; kamg@4245: kamg@4245: bool path_has_shadow() { kamg@4245: generic::Context ctx(_cache); kamg@4245: kamg@4245: for (int i = current_depth() - 1; i > 0; --i) { kamg@4245: InstanceKlass* ik = class_at_depth(i); kamg@4245: InstanceKlass* sub = class_at_depth(i + 1); kamg@4245: ctx.apply_type_arguments(sub, ik, THREAD); kamg@4245: kamg@4245: if (ik->is_interface()) { kamg@4245: int end; kamg@4245: int start = ik->find_method_by_name(_method_name, &end); kamg@4245: if (start != -1) { kamg@4245: for (int j = start; j < end; ++j) { kamg@4245: Method* mo = ik->methods()->at(j); kamg@4245: generic::MethodDescriptor* md = _cache->descriptor_for(mo, THREAD); kamg@4245: if (_method_desc->covariant_match(md, &ctx)) { kamg@4245: return true; kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: return false; kamg@4245: } kamg@4245: kamg@4245: public: kamg@4245: kamg@4245: ShadowChecker(generic::DescriptorCache* cache, Thread* thread, kamg@4245: Symbol* name, InstanceKlass* holder, generic::MethodDescriptor* desc, kamg@4245: InstanceKlass* target) kamg@4245: : _cache(cache), THREAD(thread), _method_name(name), _method_holder(holder), kamg@4245: _method_desc(desc), _target(target), _found_shadow(false) {} kamg@4245: kamg@4245: void* new_node_data(InstanceKlass* cls) { return NULL; } kamg@4245: void free_node_data(void* data) { return; } kamg@4245: kamg@4245: bool visit() { kamg@4245: InstanceKlass* ik = current_class(); kamg@4245: if (ik == _target && current_depth() == 1) { kamg@4245: return false; // This was the specified super -- no need to search it kamg@4245: } kamg@4245: if (ik == _method_holder || ik == _target) { kamg@4245: // We found a path that should be examined to see if it shadows _method kamg@4245: if (path_has_shadow()) { kamg@4245: _found_shadow = true; kamg@4245: cancel_iteration(); kamg@4245: } kamg@4245: return false; // no need to continue up hierarchy kamg@4245: } kamg@4245: return true; kamg@4245: } kamg@4245: kamg@4245: bool found_shadow() { return _found_shadow; } kamg@4245: }; kamg@4245: kamg@4245: // This is called during linktime when we find an invokespecial call that kamg@4245: // refers to a direct superinterface. It indicates that we should find the kamg@4245: // default method in the hierarchy of that superinterface, and if that method kamg@4245: // would have been a candidate from the point of view of 'this' class, then we kamg@4245: // return that method. kamg@4245: Method* DefaultMethods::find_super_default( kamg@4245: Klass* cls, Klass* super, Symbol* method_name, Symbol* sig, TRAPS) { kamg@4245: kamg@4245: ResourceMark rm(THREAD); kamg@4245: kamg@4245: assert(cls != NULL && super != NULL, "Need real classes"); kamg@4245: kamg@4245: InstanceKlass* current_class = InstanceKlass::cast(cls); kamg@4245: InstanceKlass* direction = InstanceKlass::cast(super); kamg@4245: kamg@4245: // Keep entire hierarchy alive for the duration of the computation kamg@4245: KeepAliveRegistrar keepAlive(THREAD); kamg@4245: KeepAliveVisitor loadKeepAlive(&keepAlive); kamg@4245: loadKeepAlive.run(current_class); kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: tty->print_cr("Finding super default method %s.%s%s from %s", kamg@4245: direction->name()->as_C_string(), kamg@4245: method_name->as_C_string(), sig->as_C_string(), kamg@4245: current_class->name()->as_C_string()); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: if (!direction->is_interface()) { kamg@4245: // We should not be here kamg@4245: return NULL; kamg@4245: } kamg@4245: kamg@4245: generic::DescriptorCache cache; kamg@4245: generic::Context ctx(&cache); kamg@4245: kamg@4245: // Prime the initial generic context for current -> direction kamg@4245: ctx.apply_type_arguments(current_class, direction, CHECK_NULL); kamg@4245: kamg@4245: FindMethodsByName visitor(&cache, method_name, &ctx, CHECK_NULL); kamg@4245: visitor.run(direction); kamg@4245: kamg@4245: GrowableArray families; kamg@4245: visitor.get_discovered_families(&families); kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: print_families(&families, sig); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: MethodFamily* selected_family = NULL; kamg@4245: kamg@4245: for (int i = 0; i < families.length(); ++i) { kamg@4245: MethodFamily* lm = families.at(i); kamg@4245: if (lm->contains_signature(sig)) { kamg@4245: lm->determine_target(current_class, CHECK_NULL); kamg@4245: selected_family = lm; kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: if (selected_family->has_target()) { kamg@4245: Method* target = selected_family->get_selected_target(); kamg@4245: InstanceKlass* holder = InstanceKlass::cast(target->method_holder()); kamg@4245: kamg@4245: // Verify that the identified method is valid from the context of kamg@4245: // the current class kamg@4245: ShadowChecker checker(&cache, THREAD, target->name(), kamg@4245: holder, selected_family->descriptor(), direction); kamg@4245: checker.run(current_class); kamg@4245: kamg@4245: if (checker.found_shadow()) { kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: tty->print_cr(" Only candidate found was shadowed."); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: THROW_MSG_(vmSymbols::java_lang_AbstractMethodError(), kamg@4245: "Accessible default method not found", NULL); kamg@4245: } else { kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: tty->print(" Returning "); kamg@4245: print_method(tty, target, true); kamg@4245: tty->print_cr(""); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: return target; kamg@4245: } kamg@4245: } else { kamg@4245: assert(selected_family->throws_exception(), "must have target or throw"); kamg@4245: THROW_MSG_(vmSymbols::java_lang_AbstractMethodError(), kamg@4245: selected_family->get_exception_message()->as_C_string(), NULL); kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: kamg@4245: static int assemble_redirect( kamg@4245: BytecodeConstantPool* cp, BytecodeBuffer* buffer, kamg@4245: Symbol* incoming, Method* target, TRAPS) { kamg@4245: kamg@4245: BytecodeAssembler assem(buffer, cp); kamg@4245: kamg@4245: SignatureStream in(incoming, true); kamg@4245: SignatureStream out(target->signature(), true); kamg@4245: u2 parameter_count = 0; kamg@4245: kamg@4245: assem.aload(parameter_count++); // load 'this' kamg@4245: kamg@4245: while (!in.at_return_type()) { kamg@4245: assert(!out.at_return_type(), "Parameter counts do not match"); kamg@4245: BasicType bt = in.type(); kamg@4245: assert(out.type() == bt, "Parameter types are not compatible"); kamg@4245: assem.load(bt, parameter_count); kamg@4245: if (in.is_object() && in.as_symbol(THREAD) != out.as_symbol(THREAD)) { kamg@4245: assem.checkcast(out.as_symbol(THREAD)); kamg@4245: } else if (bt == T_LONG || bt == T_DOUBLE) { kamg@4245: ++parameter_count; // longs and doubles use two slots kamg@4245: } kamg@4245: ++parameter_count; kamg@4245: in.next(); kamg@4245: out.next(); kamg@4245: } kamg@4245: assert(out.at_return_type(), "Parameter counts do not match"); kamg@4245: assert(in.type() == out.type(), "Return types are not compatible"); kamg@4245: kamg@4245: if (parameter_count == 1 && (in.type() == T_LONG || in.type() == T_DOUBLE)) { kamg@4245: ++parameter_count; // need room for return value kamg@4245: } kamg@4245: if (target->method_holder()->is_interface()) { kamg@4245: assem.invokespecial(target); kamg@4245: } else { kamg@4245: assem.invokevirtual(target); kamg@4245: } kamg@4245: kamg@4245: if (in.is_object() && in.as_symbol(THREAD) != out.as_symbol(THREAD)) { kamg@4245: assem.checkcast(in.as_symbol(THREAD)); kamg@4245: } kamg@4245: assem._return(in.type()); kamg@4245: return parameter_count; kamg@4245: } kamg@4245: kamg@4245: static int assemble_abstract_method_error( kamg@4245: BytecodeConstantPool* cp, BytecodeBuffer* buffer, Symbol* message, TRAPS) { kamg@4245: kamg@4245: Symbol* errorName = vmSymbols::java_lang_AbstractMethodError(); kamg@4245: Symbol* init = vmSymbols::object_initializer_name(); kamg@4245: Symbol* sig = vmSymbols::string_void_signature(); kamg@4245: kamg@4245: BytecodeAssembler assem(buffer, cp); kamg@4245: kamg@4245: assem._new(errorName); kamg@4245: assem.dup(); kamg@4245: assem.load_string(message); kamg@4245: assem.invokespecial(errorName, init, sig); kamg@4245: assem.athrow(); kamg@4245: kamg@4245: return 3; // max stack size: [ exception, exception, string ] kamg@4245: } kamg@4245: kamg@4245: static Method* new_method( kamg@4245: BytecodeConstantPool* cp, BytecodeBuffer* bytecodes, Symbol* name, kamg@4245: Symbol* sig, AccessFlags flags, int max_stack, int params, kamg@4245: ConstMethod::MethodType mt, TRAPS) { kamg@4245: kamg@4245: address code_start = static_cast
(bytecodes->adr_at(0)); kamg@4245: int code_length = bytecodes->length(); coleenp@4572: InlineTableSizes sizes; kamg@4245: kamg@4245: Method* m = Method::allocate(cp->pool_holder()->class_loader_data(), coleenp@4572: code_length, flags, &sizes, coleenp@4398: mt, CHECK_NULL); kamg@4245: kamg@4245: m->set_constants(NULL); // This will get filled in later kamg@4245: m->set_name_index(cp->utf8(name)); kamg@4245: m->set_signature_index(cp->utf8(sig)); kamg@4245: #ifdef CC_INTERP kamg@4245: ResultTypeFinder rtf(sig); kamg@4245: m->set_result_index(rtf.type()); kamg@4245: #endif kamg@4245: m->set_size_of_parameters(params); kamg@4245: m->set_max_stack(max_stack); kamg@4245: m->set_max_locals(params); kamg@4245: m->constMethod()->set_stackmap_data(NULL); kamg@4245: m->set_code(code_start); kamg@4245: m->set_force_inline(true); kamg@4245: kamg@4245: return m; kamg@4245: } kamg@4245: kamg@4245: static void switchover_constant_pool(BytecodeConstantPool* bpool, kamg@4245: InstanceKlass* klass, GrowableArray* new_methods, TRAPS) { kamg@4245: kamg@4245: if (new_methods->length() > 0) { kamg@4245: ConstantPool* cp = bpool->create_constant_pool(CHECK); kamg@4245: if (cp != klass->constants()) { kamg@4245: klass->class_loader_data()->add_to_deallocate_list(klass->constants()); kamg@4245: klass->set_constants(cp); kamg@4245: cp->set_pool_holder(klass); kamg@4245: kamg@4245: for (int i = 0; i < new_methods->length(); ++i) { kamg@4245: new_methods->at(i)->set_constants(cp); kamg@4245: } kamg@4245: for (int i = 0; i < klass->methods()->length(); ++i) { kamg@4245: Method* mo = klass->methods()->at(i); kamg@4245: mo->set_constants(cp); kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: // A "bridge" is a method created by javac to bridge the gap between kamg@4245: // an implementation and a generically-compatible, but different, signature. kamg@4245: // Bridges have actual bytecode implementation in classfiles. kamg@4245: // An "overpass", on the other hand, performs the same function as a bridge kamg@4245: // but does not occur in a classfile; the VM creates overpass itself, kamg@4245: // when it needs a path to get from a call site to an default method, and kamg@4245: // a bridge doesn't exist. kamg@4245: static void create_overpasses( kamg@4245: GrowableArray* slots, kamg@4245: InstanceKlass* klass, TRAPS) { kamg@4245: kamg@4245: GrowableArray overpasses; kamg@4245: BytecodeConstantPool bpool(klass->constants()); kamg@4245: kamg@4245: for (int i = 0; i < slots->length(); ++i) { kamg@4245: EmptyVtableSlot* slot = slots->at(i); kamg@4245: kamg@4245: if (slot->is_bound()) { kamg@4245: MethodFamily* method = slot->get_binding(); kamg@4245: int max_stack = 0; kamg@4245: BytecodeBuffer buffer; kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: tty->print("for slot: "); kamg@4245: slot->print_on(tty); kamg@4245: tty->print_cr(""); kamg@4245: if (method->has_target()) { kamg@4245: method->print_selected(tty, 1); kamg@4245: } else { kamg@4245: method->print_exception(tty, 1); kamg@4245: } kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: if (method->has_target()) { kamg@4245: Method* selected = method->get_selected_target(); kamg@4245: max_stack = assemble_redirect( kamg@4245: &bpool, &buffer, slot->signature(), selected, CHECK); kamg@4245: } else if (method->throws_exception()) { kamg@4245: max_stack = assemble_abstract_method_error( kamg@4245: &bpool, &buffer, method->get_exception_message(), CHECK); kamg@4245: } kamg@4245: AccessFlags flags = accessFlags_from( kamg@4245: JVM_ACC_PUBLIC | JVM_ACC_SYNTHETIC | JVM_ACC_BRIDGE); kamg@4245: Method* m = new_method(&bpool, &buffer, slot->name(), slot->signature(), kamg@4245: flags, max_stack, slot->size_of_parameters(), kamg@4245: ConstMethod::OVERPASS, CHECK); kamg@4245: if (m != NULL) { kamg@4245: overpasses.push(m); kamg@4245: } kamg@4245: } kamg@4245: } kamg@4245: kamg@4245: #ifndef PRODUCT kamg@4245: if (TraceDefaultMethods) { kamg@4245: tty->print_cr("Created %d overpass methods", overpasses.length()); kamg@4245: } kamg@4245: #endif // ndef PRODUCT kamg@4245: kamg@4245: switchover_constant_pool(&bpool, klass, &overpasses, CHECK); kamg@4245: merge_in_new_methods(klass, &overpasses, CHECK); kamg@4245: } kamg@4245: kamg@4245: static void sort_methods(GrowableArray* methods) { kamg@4245: // Note that this must sort using the same key as is used for sorting kamg@4245: // methods in InstanceKlass. kamg@4245: bool sorted = true; kamg@4245: for (int i = methods->length() - 1; i > 0; --i) { kamg@4245: for (int j = 0; j < i; ++j) { kamg@4245: Method* m1 = methods->at(j); kamg@4245: Method* m2 = methods->at(j + 1); kamg@4245: if ((uintptr_t)m1->name() > (uintptr_t)m2->name()) { kamg@4245: methods->at_put(j, m2); kamg@4245: methods->at_put(j + 1, m1); kamg@4245: sorted = false; kamg@4245: } kamg@4245: } kamg@4245: if (sorted) break; kamg@4245: sorted = true; kamg@4245: } kamg@4245: #ifdef ASSERT kamg@4245: uintptr_t prev = 0; kamg@4245: for (int i = 0; i < methods->length(); ++i) { kamg@4245: Method* mh = methods->at(i); kamg@4245: uintptr_t nv = (uintptr_t)mh->name(); kamg@4245: assert(nv >= prev, "Incorrect overpass method ordering"); kamg@4245: prev = nv; kamg@4245: } kamg@4245: #endif kamg@4245: } kamg@4245: kamg@4245: static void merge_in_new_methods(InstanceKlass* klass, kamg@4245: GrowableArray* new_methods, TRAPS) { kamg@4245: kamg@4245: enum { ANNOTATIONS, PARAMETERS, DEFAULTS, NUM_ARRAYS }; kamg@4245: kamg@4245: Array* original_methods = klass->methods(); kamg@4245: Array* original_ordering = klass->method_ordering(); kamg@4245: Array* merged_ordering = Universe::the_empty_int_array(); kamg@4245: kamg@4245: int new_size = klass->methods()->length() + new_methods->length(); kamg@4245: kamg@4245: Array* merged_methods = MetadataFactory::new_array( kamg@4245: klass->class_loader_data(), new_size, NULL, CHECK); coleenp@4572: kamg@4245: if (original_ordering != NULL && original_ordering->length() > 0) { kamg@4245: merged_ordering = MetadataFactory::new_array( kamg@4245: klass->class_loader_data(), new_size, CHECK); kamg@4245: } kamg@4245: int method_order_index = klass->methods()->length(); kamg@4245: kamg@4245: sort_methods(new_methods); kamg@4245: kamg@4245: // Perform grand merge of existing methods and new methods kamg@4245: int orig_idx = 0; kamg@4245: int new_idx = 0; kamg@4245: kamg@4245: for (int i = 0; i < new_size; ++i) { kamg@4245: Method* orig_method = NULL; kamg@4245: Method* new_method = NULL; kamg@4245: if (orig_idx < original_methods->length()) { kamg@4245: orig_method = original_methods->at(orig_idx); kamg@4245: } kamg@4245: if (new_idx < new_methods->length()) { kamg@4245: new_method = new_methods->at(new_idx); kamg@4245: } kamg@4245: kamg@4245: if (orig_method != NULL && kamg@4245: (new_method == NULL || orig_method->name() < new_method->name())) { kamg@4245: merged_methods->at_put(i, orig_method); kamg@4245: original_methods->at_put(orig_idx, NULL); kamg@4245: if (merged_ordering->length() > 0) { kamg@4245: merged_ordering->at_put(i, original_ordering->at(orig_idx)); kamg@4245: } kamg@4245: ++orig_idx; kamg@4245: } else { kamg@4245: merged_methods->at_put(i, new_method); kamg@4245: if (merged_ordering->length() > 0) { kamg@4245: merged_ordering->at_put(i, method_order_index++); kamg@4245: } kamg@4245: ++new_idx; kamg@4245: } kamg@4245: // update idnum for new location kamg@4245: merged_methods->at(i)->set_method_idnum(i); kamg@4245: } kamg@4245: kamg@4245: // Verify correct order kamg@4245: #ifdef ASSERT kamg@4245: uintptr_t prev = 0; kamg@4245: for (int i = 0; i < merged_methods->length(); ++i) { kamg@4245: Method* mo = merged_methods->at(i); kamg@4245: uintptr_t nv = (uintptr_t)mo->name(); kamg@4245: assert(nv >= prev, "Incorrect method ordering"); kamg@4245: prev = nv; kamg@4245: } kamg@4245: #endif kamg@4245: kamg@4245: // Replace klass methods with new merged lists kamg@4245: klass->set_methods(merged_methods); kamg@4245: kamg@4245: ClassLoaderData* cld = klass->class_loader_data(); kamg@4245: MetadataFactory::free_array(cld, original_methods); kamg@4245: if (original_ordering->length() > 0) { kamg@4245: klass->set_method_ordering(merged_ordering); kamg@4245: MetadataFactory::free_array(cld, original_ordering); kamg@4245: } kamg@4245: } kamg@4245: