src/share/vm/opto/library_call.cpp

changeset 2784
92add02409c9
parent 2687
3d58a4983660
parent 2781
e1162778c1c8
child 2786
59766fd005ff
     1.1 --- a/src/share/vm/opto/library_call.cpp	Wed Apr 06 16:02:53 2011 -0700
     1.2 +++ b/src/share/vm/opto/library_call.cpp	Fri Apr 08 14:19:50 2011 -0700
     1.3 @@ -166,6 +166,10 @@
     1.4    // This returns Type::AnyPtr, RawPtr, or OopPtr.
     1.5    int classify_unsafe_addr(Node* &base, Node* &offset);
     1.6    Node* make_unsafe_address(Node* base, Node* offset);
     1.7 +  // Helper for inline_unsafe_access.
     1.8 +  // Generates the guards that check whether the result of
     1.9 +  // Unsafe.getObject should be recorded in an SATB log buffer.
    1.10 +  void insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val);
    1.11    bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
    1.12    bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
    1.13    bool inline_unsafe_allocate();
    1.14 @@ -240,6 +244,8 @@
    1.15    bool inline_numberOfTrailingZeros(vmIntrinsics::ID id);
    1.16    bool inline_bitCount(vmIntrinsics::ID id);
    1.17    bool inline_reverseBytes(vmIntrinsics::ID id);
    1.18 +
    1.19 +  bool inline_reference_get();
    1.20  };
    1.21  
    1.22  
    1.23 @@ -336,6 +342,14 @@
    1.24      if (!UsePopCountInstruction)  return NULL;
    1.25      break;
    1.26  
    1.27 +  case vmIntrinsics::_Reference_get:
    1.28 +    // It is only when G1 is enabled that we absolutely
    1.29 +    // need to use the intrinsic version of Reference.get()
    1.30 +    // so that the value in the referent field, if necessary,
    1.31 +    // can be registered by the pre-barrier code.
    1.32 +    if (!UseG1GC) return NULL;
    1.33 +    break;
    1.34 +
    1.35   default:
    1.36      assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
    1.37      assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
    1.38 @@ -387,6 +401,7 @@
    1.39      tty->print_cr("Intrinsic %s", str);
    1.40    }
    1.41  #endif
    1.42 +
    1.43    if (kit.try_to_inline()) {
    1.44      if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
    1.45        CompileTask::print_inlining(kit.callee(), jvms->depth() - 1, kit.bci(), is_virtual() ? "(intrinsic, virtual)" : "(intrinsic)");
    1.46 @@ -402,11 +417,19 @@
    1.47    }
    1.48  
    1.49    if (PrintIntrinsics) {
    1.50 -    tty->print("Did not inline intrinsic %s%s at bci:%d in",
    1.51 +    if (jvms->has_method()) {
    1.52 +      // Not a root compile.
    1.53 +      tty->print("Did not inline intrinsic %s%s at bci:%d in",
    1.54 +                 vmIntrinsics::name_at(intrinsic_id()),
    1.55 +                 (is_virtual() ? " (virtual)" : ""), kit.bci());
    1.56 +      kit.caller()->print_short_name(tty);
    1.57 +      tty->print_cr(" (%d bytes)", kit.caller()->code_size());
    1.58 +    } else {
    1.59 +      // Root compile
    1.60 +      tty->print("Did not generate intrinsic %s%s at bci:%d in",
    1.61                 vmIntrinsics::name_at(intrinsic_id()),
    1.62                 (is_virtual() ? " (virtual)" : ""), kit.bci());
    1.63 -    kit.caller()->print_short_name(tty);
    1.64 -    tty->print_cr(" (%d bytes)", kit.caller()->code_size());
    1.65 +    }
    1.66    }
    1.67    C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
    1.68    return NULL;
    1.69 @@ -418,6 +441,14 @@
    1.70    const bool is_native_ptr  = true;
    1.71    const bool is_static      = true;
    1.72  
    1.73 +  if (!jvms()->has_method()) {
    1.74 +    // Root JVMState has a null method.
    1.75 +    assert(map()->memory()->Opcode() == Op_Parm, "");
    1.76 +    // Insert the memory aliasing node
    1.77 +    set_all_memory(reset_memory());
    1.78 +  }
    1.79 +  assert(merged_memory(), "");
    1.80 +
    1.81    switch (intrinsic_id()) {
    1.82    case vmIntrinsics::_hashCode:
    1.83      return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
    1.84 @@ -658,6 +689,9 @@
    1.85    case vmIntrinsics::_getCallerClass:
    1.86      return inline_native_Reflection_getCallerClass();
    1.87  
    1.88 +  case vmIntrinsics::_Reference_get:
    1.89 +    return inline_reference_get();
    1.90 +
    1.91    default:
    1.92      // If you get here, it may be that someone has added a new intrinsic
    1.93      // to the list in vmSymbols.hpp without implementing it here.
    1.94 @@ -2076,6 +2110,110 @@
    1.95  
    1.96  const static BasicType T_ADDRESS_HOLDER = T_LONG;
    1.97  
    1.98 +// Helper that guards and inserts a G1 pre-barrier.
    1.99 +void LibraryCallKit::insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val) {
   1.100 +  assert(UseG1GC, "should not call this otherwise");
   1.101 +
   1.102 +  // We could be accessing the referent field of a reference object. If so, when G1
   1.103 +  // is enabled, we need to log the value in the referent field in an SATB buffer.
   1.104 +  // This routine performs some compile time filters and generates suitable
   1.105 +  // runtime filters that guard the pre-barrier code.
   1.106 +
   1.107 +  // Some compile time checks.
   1.108 +
   1.109 +  // If offset is a constant, is it java_lang_ref_Reference::_reference_offset?
   1.110 +  const TypeX* otype = offset->find_intptr_t_type();
   1.111 +  if (otype != NULL && otype->is_con() &&
   1.112 +      otype->get_con() != java_lang_ref_Reference::referent_offset) {
   1.113 +    // Constant offset but not the reference_offset so just return
   1.114 +    return;
   1.115 +  }
   1.116 +
   1.117 +  // We only need to generate the runtime guards for instances.
   1.118 +  const TypeOopPtr* btype = base_oop->bottom_type()->isa_oopptr();
   1.119 +  if (btype != NULL) {
   1.120 +    if (btype->isa_aryptr()) {
   1.121 +      // Array type so nothing to do
   1.122 +      return;
   1.123 +    }
   1.124 +
   1.125 +    const TypeInstPtr* itype = btype->isa_instptr();
   1.126 +    if (itype != NULL) {
   1.127 +      // Can the klass of base_oop be statically determined
   1.128 +      // to be _not_ a sub-class of Reference?
   1.129 +      ciKlass* klass = itype->klass();
   1.130 +      if (klass->is_subtype_of(env()->Reference_klass()) &&
   1.131 +          !env()->Reference_klass()->is_subtype_of(klass)) {
   1.132 +        return;
   1.133 +      }
   1.134 +    }
   1.135 +  }
   1.136 +
   1.137 +  // The compile time filters did not reject base_oop/offset so
   1.138 +  // we need to generate the following runtime filters
   1.139 +  //
   1.140 +  // if (offset == java_lang_ref_Reference::_reference_offset) {
   1.141 +  //   if (base != null) {
   1.142 +  //     if (klass(base)->reference_type() != REF_NONE)) {
   1.143 +  //       pre_barrier(_, pre_val, ...);
   1.144 +  //     }
   1.145 +  //   }
   1.146 +  // }
   1.147 +
   1.148 +  float likely  = PROB_LIKELY(0.999);
   1.149 +  float unlikely  = PROB_UNLIKELY(0.999);
   1.150 +
   1.151 +  IdealKit ideal(gvn(), control(),  merged_memory());
   1.152 +#define __ ideal.
   1.153 +
   1.154 +  const int reference_type_offset = instanceKlass::reference_type_offset_in_bytes() +
   1.155 +                                        sizeof(oopDesc);
   1.156 +
   1.157 +  Node* referent_off = __ ConI(java_lang_ref_Reference::referent_offset);
   1.158 +
   1.159 +  __ if_then(offset, BoolTest::eq, referent_off, unlikely); {
   1.160 +    __ if_then(base_oop, BoolTest::ne, null(), likely); {
   1.161 +
   1.162 +      // Update graphKit memory and control from IdealKit.
   1.163 +      set_all_memory(__ merged_memory());
   1.164 +      set_control(__ ctrl());
   1.165 +
   1.166 +      Node* ref_klass_con = makecon(TypeKlassPtr::make(env()->Reference_klass()));
   1.167 +      Node* is_instof = gen_instanceof(base_oop, ref_klass_con);
   1.168 +
   1.169 +      // Update IdealKit memory and control from graphKit.
   1.170 +      __ set_all_memory(merged_memory());
   1.171 +      __ set_ctrl(control());
   1.172 +
   1.173 +      Node* one = __ ConI(1);
   1.174 +
   1.175 +      __ if_then(is_instof, BoolTest::eq, one, unlikely); {
   1.176 +
   1.177 +        // Update graphKit from IdeakKit.
   1.178 +        set_all_memory(__ merged_memory());
   1.179 +        set_control(__ ctrl());
   1.180 +
   1.181 +        // Use the pre-barrier to record the value in the referent field
   1.182 +        pre_barrier(false /* do_load */,
   1.183 +                    __ ctrl(),
   1.184 +                    NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */,
   1.185 +                    pre_val /* pre_val */,
   1.186 +                    T_OBJECT);
   1.187 +
   1.188 +        // Update IdealKit from graphKit.
   1.189 +        __ set_all_memory(merged_memory());
   1.190 +        __ set_ctrl(control());
   1.191 +
   1.192 +      } __ end_if(); // _ref_type != ref_none
   1.193 +    } __ end_if(); // base  != NULL
   1.194 +  } __ end_if(); // offset == referent_offset
   1.195 +
   1.196 +  // Final sync IdealKit and GraphKit.
   1.197 +  sync_kit(ideal);
   1.198 +#undef __
   1.199 +}
   1.200 +
   1.201 +
   1.202  // Interpret Unsafe.fieldOffset cookies correctly:
   1.203  extern jlong Unsafe_field_offset_to_byte_offset(jlong field_offset);
   1.204  
   1.205 @@ -2152,9 +2290,11 @@
   1.206    // Build address expression.  See the code in inline_unsafe_prefetch.
   1.207    Node *adr;
   1.208    Node *heap_base_oop = top();
   1.209 +  Node* offset = top();
   1.210 +
   1.211    if (!is_native_ptr) {
   1.212      // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
   1.213 -    Node* offset = pop_pair();
   1.214 +    offset = pop_pair();
   1.215      // The base is either a Java object or a value produced by Unsafe.staticFieldBase
   1.216      Node* base   = pop();
   1.217      // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
   1.218 @@ -2195,6 +2335,13 @@
   1.219    // or Compile::must_alias will throw a diagnostic assert.)
   1.220    bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
   1.221  
   1.222 +  // If we are reading the value of the referent field of a Reference
   1.223 +  // object (either by using Unsafe directly or through reflection)
   1.224 +  // then, if G1 is enabled, we need to record the referent in an
   1.225 +  // SATB log buffer using the pre-barrier mechanism.
   1.226 +  bool need_read_barrier = UseG1GC && !is_native_ptr && !is_store &&
   1.227 +                           offset != top() && heap_base_oop != top();
   1.228 +
   1.229    if (!is_store && type == T_OBJECT) {
   1.230      // Attempt to infer a sharper value type from the offset and base type.
   1.231      ciKlass* sharpened_klass = NULL;
   1.232 @@ -2278,8 +2425,13 @@
   1.233      case T_SHORT:
   1.234      case T_INT:
   1.235      case T_FLOAT:
   1.236 +      push(p);
   1.237 +      break;
   1.238      case T_OBJECT:
   1.239 -      push( p );
   1.240 +      if (need_read_barrier) {
   1.241 +        insert_g1_pre_barrier(heap_base_oop, offset, p);
   1.242 +      }
   1.243 +      push(p);
   1.244        break;
   1.245      case T_ADDRESS:
   1.246        // Cast to an int type.
   1.247 @@ -2536,7 +2688,10 @@
   1.248    case T_OBJECT:
   1.249       // reference stores need a store barrier.
   1.250      // (They don't if CAS fails, but it isn't worth checking.)
   1.251 -    pre_barrier(control(), base, adr, alias_idx, newval, value_type->make_oopptr(), T_OBJECT);
   1.252 +    pre_barrier(true /* do_load*/,
   1.253 +                control(), base, adr, alias_idx, newval, value_type->make_oopptr(),
   1.254 +                NULL /* pre_val*/,
   1.255 +                T_OBJECT);
   1.256  #ifdef _LP64
   1.257      if (adr->bottom_type()->is_ptr_to_narrowoop()) {
   1.258        Node *newval_enc = _gvn.transform(new (C, 2) EncodePNode(newval, newval->bottom_type()->make_narrowoop()));
   1.259 @@ -5312,3 +5467,44 @@
   1.260                      copyfunc_addr, copyfunc_name, adr_type,
   1.261                      src_start, dest_start, copy_length XTOP);
   1.262  }
   1.263 +
   1.264 +//----------------------------inline_reference_get----------------------------
   1.265 +
   1.266 +bool LibraryCallKit::inline_reference_get() {
   1.267 +  const int nargs = 1; // self
   1.268 +
   1.269 +  guarantee(java_lang_ref_Reference::referent_offset > 0,
   1.270 +            "should have already been set");
   1.271 +
   1.272 +  int referent_offset = java_lang_ref_Reference::referent_offset;
   1.273 +
   1.274 +  // Restore the stack and pop off the argument
   1.275 +  _sp += nargs;
   1.276 +  Node *reference_obj = pop();
   1.277 +
   1.278 +  // Null check on self without removing any arguments.
   1.279 +  _sp += nargs;
   1.280 +  reference_obj = do_null_check(reference_obj, T_OBJECT);
   1.281 +  _sp -= nargs;;
   1.282 +
   1.283 +  if (stopped()) return true;
   1.284 +
   1.285 +  Node *adr = basic_plus_adr(reference_obj, reference_obj, referent_offset);
   1.286 +
   1.287 +  ciInstanceKlass* klass = env()->Object_klass();
   1.288 +  const TypeOopPtr* object_type = TypeOopPtr::make_from_klass(klass);
   1.289 +
   1.290 +  Node* no_ctrl = NULL;
   1.291 +  Node *result = make_load(no_ctrl, adr, object_type, T_OBJECT);
   1.292 +
   1.293 +  // Use the pre-barrier to record the value in the referent field
   1.294 +  pre_barrier(false /* do_load */,
   1.295 +              control(),
   1.296 +              NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */,
   1.297 +              result /* pre_val */,
   1.298 +              T_OBJECT);
   1.299 +
   1.300 +  push(result);
   1.301 +  return true;
   1.302 +}
   1.303 +

mercurial