1.1 --- a/src/share/vm/opto/graphKit.hpp Mon Nov 21 05:29:59 2016 +0000 1.2 +++ b/src/share/vm/opto/graphKit.hpp Wed Nov 23 23:01:34 2016 -0800 1.3 @@ -517,23 +517,28 @@ 1.4 // of volatile fields. 1.5 Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, 1.6 MemNode::MemOrd mo, LoadNode::ControlDependency control_dependency = LoadNode::DependsOnlyOnTest, 1.7 - bool require_atomic_access = false) { 1.8 + bool require_atomic_access = false, bool unaligned = false, 1.9 + bool mismatched = false) { 1.10 // This version computes alias_index from bottom_type 1.11 return make_load(ctl, adr, t, bt, adr->bottom_type()->is_ptr(), 1.12 - mo, control_dependency, require_atomic_access); 1.13 + mo, control_dependency, require_atomic_access, 1.14 + unaligned, mismatched); 1.15 } 1.16 Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, const TypePtr* adr_type, 1.17 MemNode::MemOrd mo, LoadNode::ControlDependency control_dependency = LoadNode::DependsOnlyOnTest, 1.18 - bool require_atomic_access = false) { 1.19 + bool require_atomic_access = false, bool unaligned = false, 1.20 + bool mismatched = false) { 1.21 // This version computes alias_index from an address type 1.22 assert(adr_type != NULL, "use other make_load factory"); 1.23 return make_load(ctl, adr, t, bt, C->get_alias_index(adr_type), 1.24 - mo, control_dependency, require_atomic_access); 1.25 + mo, control_dependency, require_atomic_access, 1.26 + unaligned, mismatched); 1.27 } 1.28 // This is the base version which is given an alias index. 1.29 Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, int adr_idx, 1.30 MemNode::MemOrd mo, LoadNode::ControlDependency control_dependency = LoadNode::DependsOnlyOnTest, 1.31 - bool require_atomic_access = false); 1.32 + bool require_atomic_access = false, bool unaligned = false, 1.33 + bool mismatched = false); 1.34 1.35 // Create & transform a StoreNode and store the effect into the 1.36 // parser's memory state. 1.37 @@ -546,19 +551,24 @@ 1.38 Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt, 1.39 const TypePtr* adr_type, 1.40 MemNode::MemOrd mo, 1.41 - bool require_atomic_access = false) { 1.42 + bool require_atomic_access = false, 1.43 + bool unaligned = false, 1.44 + bool mismatched = false) { 1.45 // This version computes alias_index from an address type 1.46 assert(adr_type != NULL, "use other store_to_memory factory"); 1.47 return store_to_memory(ctl, adr, val, bt, 1.48 C->get_alias_index(adr_type), 1.49 - mo, require_atomic_access); 1.50 + mo, require_atomic_access, 1.51 + unaligned, mismatched); 1.52 } 1.53 // This is the base version which is given alias index 1.54 // Return the new StoreXNode 1.55 Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt, 1.56 int adr_idx, 1.57 MemNode::MemOrd, 1.58 - bool require_atomic_access = false); 1.59 + bool require_atomic_access = false, 1.60 + bool unaligned = false, 1.61 + bool mismatched = false); 1.62 1.63 1.64 // All in one pre-barrier, store, post_barrier 1.65 @@ -581,7 +591,8 @@ 1.66 const TypeOopPtr* val_type, 1.67 BasicType bt, 1.68 bool use_precise, 1.69 - MemNode::MemOrd mo); 1.70 + MemNode::MemOrd mo, 1.71 + bool mismatched = false); 1.72 1.73 Node* store_oop_to_object(Node* ctl, 1.74 Node* obj, // containing obj 1.75 @@ -612,7 +623,8 @@ 1.76 const TypePtr* adr_type, 1.77 Node* val, 1.78 BasicType bt, 1.79 - MemNode::MemOrd mo); 1.80 + MemNode::MemOrd mo, 1.81 + bool mismatched = false); 1.82 1.83 // For the few case where the barriers need special help 1.84 void pre_barrier(bool do_load, Node* ctl,