src/share/vm/opto/library_call.cpp

changeset 2781
e1162778c1c8
parent 2667
0e3ed5a14f73
child 2784
92add02409c9
equal deleted inserted replaced
2780:e6beb62de02d 2781:e1162778c1c8
163 bool inline_min_max(vmIntrinsics::ID id); 163 bool inline_min_max(vmIntrinsics::ID id);
164 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y); 164 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y);
165 // This returns Type::AnyPtr, RawPtr, or OopPtr. 165 // This returns Type::AnyPtr, RawPtr, or OopPtr.
166 int classify_unsafe_addr(Node* &base, Node* &offset); 166 int classify_unsafe_addr(Node* &base, Node* &offset);
167 Node* make_unsafe_address(Node* base, Node* offset); 167 Node* make_unsafe_address(Node* base, Node* offset);
168 // Helper for inline_unsafe_access.
169 // Generates the guards that check whether the result of
170 // Unsafe.getObject should be recorded in an SATB log buffer.
171 void insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val);
168 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile); 172 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
169 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static); 173 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
170 bool inline_unsafe_allocate(); 174 bool inline_unsafe_allocate();
171 bool inline_unsafe_copyMemory(); 175 bool inline_unsafe_copyMemory();
172 bool inline_native_currentThread(); 176 bool inline_native_currentThread();
237 bool inline_fp_conversions(vmIntrinsics::ID id); 241 bool inline_fp_conversions(vmIntrinsics::ID id);
238 bool inline_numberOfLeadingZeros(vmIntrinsics::ID id); 242 bool inline_numberOfLeadingZeros(vmIntrinsics::ID id);
239 bool inline_numberOfTrailingZeros(vmIntrinsics::ID id); 243 bool inline_numberOfTrailingZeros(vmIntrinsics::ID id);
240 bool inline_bitCount(vmIntrinsics::ID id); 244 bool inline_bitCount(vmIntrinsics::ID id);
241 bool inline_reverseBytes(vmIntrinsics::ID id); 245 bool inline_reverseBytes(vmIntrinsics::ID id);
246
247 bool inline_reference_get();
242 }; 248 };
243 249
244 250
245 //---------------------------make_vm_intrinsic---------------------------- 251 //---------------------------make_vm_intrinsic----------------------------
246 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { 252 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
333 case vmIntrinsics::_bitCount_i: 339 case vmIntrinsics::_bitCount_i:
334 case vmIntrinsics::_bitCount_l: 340 case vmIntrinsics::_bitCount_l:
335 if (!UsePopCountInstruction) return NULL; 341 if (!UsePopCountInstruction) return NULL;
336 break; 342 break;
337 343
344 case vmIntrinsics::_Reference_get:
345 // It is only when G1 is enabled that we absolutely
346 // need to use the intrinsic version of Reference.get()
347 // so that the value in the referent field, if necessary,
348 // can be registered by the pre-barrier code.
349 if (!UseG1GC) return NULL;
350 break;
351
338 default: 352 default:
339 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility"); 353 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
340 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?"); 354 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
341 break; 355 break;
342 } 356 }
384 char buf[1000]; 398 char buf[1000];
385 const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf)); 399 const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf));
386 tty->print_cr("Intrinsic %s", str); 400 tty->print_cr("Intrinsic %s", str);
387 } 401 }
388 #endif 402 #endif
403
389 if (kit.try_to_inline()) { 404 if (kit.try_to_inline()) {
390 if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) { 405 if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) {
391 tty->print("Inlining intrinsic %s%s at bci:%d in", 406 if (jvms->has_method()) {
392 vmIntrinsics::name_at(intrinsic_id()), 407 // Not a root compile.
393 (is_virtual() ? " (virtual)" : ""), kit.bci()); 408 tty->print("Inlining intrinsic %s%s at bci:%d in",
394 kit.caller()->print_short_name(tty); 409 vmIntrinsics::name_at(intrinsic_id()),
395 tty->print_cr(" (%d bytes)", kit.caller()->code_size()); 410 (is_virtual() ? " (virtual)" : ""), kit.bci());
411 kit.caller()->print_short_name(tty);
412 tty->print_cr(" (%d bytes)", kit.caller()->code_size());
413 } else {
414 // Root compile
415 tty->print_cr("Generating intrinsic %s%s at bci:%d",
416 vmIntrinsics::name_at(intrinsic_id()),
417 (is_virtual() ? " (virtual)" : ""), kit.bci());
418 }
396 } 419 }
397 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked); 420 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked);
398 if (C->log()) { 421 if (C->log()) {
399 C->log()->elem("intrinsic id='%s'%s nodes='%d'", 422 C->log()->elem("intrinsic id='%s'%s nodes='%d'",
400 vmIntrinsics::name_at(intrinsic_id()), 423 vmIntrinsics::name_at(intrinsic_id()),
403 } 426 }
404 return kit.transfer_exceptions_into_jvms(); 427 return kit.transfer_exceptions_into_jvms();
405 } 428 }
406 429
407 if (PrintIntrinsics) { 430 if (PrintIntrinsics) {
408 tty->print("Did not inline intrinsic %s%s at bci:%d in", 431 if (jvms->has_method()) {
432 // Not a root compile.
433 tty->print("Did not inline intrinsic %s%s at bci:%d in",
434 vmIntrinsics::name_at(intrinsic_id()),
435 (is_virtual() ? " (virtual)" : ""), kit.bci());
436 kit.caller()->print_short_name(tty);
437 tty->print_cr(" (%d bytes)", kit.caller()->code_size());
438 } else {
439 // Root compile
440 tty->print("Did not generate intrinsic %s%s at bci:%d in",
409 vmIntrinsics::name_at(intrinsic_id()), 441 vmIntrinsics::name_at(intrinsic_id()),
410 (is_virtual() ? " (virtual)" : ""), kit.bci()); 442 (is_virtual() ? " (virtual)" : ""), kit.bci());
411 kit.caller()->print_short_name(tty); 443 }
412 tty->print_cr(" (%d bytes)", kit.caller()->code_size());
413 } 444 }
414 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed); 445 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed);
415 return NULL; 446 return NULL;
416 } 447 }
417 448
418 bool LibraryCallKit::try_to_inline() { 449 bool LibraryCallKit::try_to_inline() {
419 // Handle symbolic names for otherwise undistinguished boolean switches: 450 // Handle symbolic names for otherwise undistinguished boolean switches:
420 const bool is_store = true; 451 const bool is_store = true;
421 const bool is_native_ptr = true; 452 const bool is_native_ptr = true;
422 const bool is_static = true; 453 const bool is_static = true;
454
455 if (!jvms()->has_method()) {
456 // Root JVMState has a null method.
457 assert(map()->memory()->Opcode() == Op_Parm, "");
458 // Insert the memory aliasing node
459 set_all_memory(reset_memory());
460 }
461 assert(merged_memory(), "");
423 462
424 switch (intrinsic_id()) { 463 switch (intrinsic_id()) {
425 case vmIntrinsics::_hashCode: 464 case vmIntrinsics::_hashCode:
426 return inline_native_hashcode(intrinsic()->is_virtual(), !is_static); 465 return inline_native_hashcode(intrinsic()->is_virtual(), !is_static);
427 case vmIntrinsics::_identityHashCode: 466 case vmIntrinsics::_identityHashCode:
658 case vmIntrinsics::_attemptUpdate: 697 case vmIntrinsics::_attemptUpdate:
659 return inline_native_AtomicLong_attemptUpdate(); 698 return inline_native_AtomicLong_attemptUpdate();
660 699
661 case vmIntrinsics::_getCallerClass: 700 case vmIntrinsics::_getCallerClass:
662 return inline_native_Reflection_getCallerClass(); 701 return inline_native_Reflection_getCallerClass();
702
703 case vmIntrinsics::_Reference_get:
704 return inline_reference_get();
663 705
664 default: 706 default:
665 // If you get here, it may be that someone has added a new intrinsic 707 // If you get here, it may be that someone has added a new intrinsic
666 // to the list in vmSymbols.hpp without implementing it here. 708 // to the list in vmSymbols.hpp without implementing it here.
667 #ifndef PRODUCT 709 #ifndef PRODUCT
2077 2119
2078 //----------------------------inline_unsafe_access---------------------------- 2120 //----------------------------inline_unsafe_access----------------------------
2079 2121
2080 const static BasicType T_ADDRESS_HOLDER = T_LONG; 2122 const static BasicType T_ADDRESS_HOLDER = T_LONG;
2081 2123
2124 // Helper that guards and inserts a G1 pre-barrier.
2125 void LibraryCallKit::insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val) {
2126 assert(UseG1GC, "should not call this otherwise");
2127
2128 // We could be accessing the referent field of a reference object. If so, when G1
2129 // is enabled, we need to log the value in the referent field in an SATB buffer.
2130 // This routine performs some compile time filters and generates suitable
2131 // runtime filters that guard the pre-barrier code.
2132
2133 // Some compile time checks.
2134
2135 // If offset is a constant, is it java_lang_ref_Reference::_reference_offset?
2136 const TypeX* otype = offset->find_intptr_t_type();
2137 if (otype != NULL && otype->is_con() &&
2138 otype->get_con() != java_lang_ref_Reference::referent_offset) {
2139 // Constant offset but not the reference_offset so just return
2140 return;
2141 }
2142
2143 // We only need to generate the runtime guards for instances.
2144 const TypeOopPtr* btype = base_oop->bottom_type()->isa_oopptr();
2145 if (btype != NULL) {
2146 if (btype->isa_aryptr()) {
2147 // Array type so nothing to do
2148 return;
2149 }
2150
2151 const TypeInstPtr* itype = btype->isa_instptr();
2152 if (itype != NULL) {
2153 // Can the klass of base_oop be statically determined
2154 // to be _not_ a sub-class of Reference?
2155 ciKlass* klass = itype->klass();
2156 if (klass->is_subtype_of(env()->Reference_klass()) &&
2157 !env()->Reference_klass()->is_subtype_of(klass)) {
2158 return;
2159 }
2160 }
2161 }
2162
2163 // The compile time filters did not reject base_oop/offset so
2164 // we need to generate the following runtime filters
2165 //
2166 // if (offset == java_lang_ref_Reference::_reference_offset) {
2167 // if (base != null) {
2168 // if (klass(base)->reference_type() != REF_NONE)) {
2169 // pre_barrier(_, pre_val, ...);
2170 // }
2171 // }
2172 // }
2173
2174 float likely = PROB_LIKELY(0.999);
2175 float unlikely = PROB_UNLIKELY(0.999);
2176
2177 IdealKit ideal(gvn(), control(), merged_memory());
2178 #define __ ideal.
2179
2180 const int reference_type_offset = instanceKlass::reference_type_offset_in_bytes() +
2181 sizeof(oopDesc);
2182
2183 Node* referent_off = __ ConI(java_lang_ref_Reference::referent_offset);
2184
2185 __ if_then(offset, BoolTest::eq, referent_off, unlikely); {
2186 __ if_then(base_oop, BoolTest::ne, null(), likely); {
2187
2188 // Update graphKit memory and control from IdealKit.
2189 set_all_memory(__ merged_memory());
2190 set_control(__ ctrl());
2191
2192 Node* ref_klass_con = makecon(TypeKlassPtr::make(env()->Reference_klass()));
2193 Node* is_instof = gen_instanceof(base_oop, ref_klass_con);
2194
2195 // Update IdealKit memory and control from graphKit.
2196 __ set_all_memory(merged_memory());
2197 __ set_ctrl(control());
2198
2199 Node* one = __ ConI(1);
2200
2201 __ if_then(is_instof, BoolTest::eq, one, unlikely); {
2202
2203 // Update graphKit from IdeakKit.
2204 set_all_memory(__ merged_memory());
2205 set_control(__ ctrl());
2206
2207 // Use the pre-barrier to record the value in the referent field
2208 pre_barrier(false /* do_load */,
2209 __ ctrl(),
2210 NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */,
2211 pre_val /* pre_val */,
2212 T_OBJECT);
2213
2214 // Update IdealKit from graphKit.
2215 __ set_all_memory(merged_memory());
2216 __ set_ctrl(control());
2217
2218 } __ end_if(); // _ref_type != ref_none
2219 } __ end_if(); // base != NULL
2220 } __ end_if(); // offset == referent_offset
2221
2222 // Final sync IdealKit and GraphKit.
2223 sync_kit(ideal);
2224 #undef __
2225 }
2226
2227
2082 // Interpret Unsafe.fieldOffset cookies correctly: 2228 // Interpret Unsafe.fieldOffset cookies correctly:
2083 extern jlong Unsafe_field_offset_to_byte_offset(jlong field_offset); 2229 extern jlong Unsafe_field_offset_to_byte_offset(jlong field_offset);
2084 2230
2085 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) { 2231 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) {
2086 if (callee()->is_static()) return false; // caller must have the capability! 2232 if (callee()->is_static()) return false; // caller must have the capability!
2153 } 2299 }
2154 2300
2155 // Build address expression. See the code in inline_unsafe_prefetch. 2301 // Build address expression. See the code in inline_unsafe_prefetch.
2156 Node *adr; 2302 Node *adr;
2157 Node *heap_base_oop = top(); 2303 Node *heap_base_oop = top();
2304 Node* offset = top();
2305
2158 if (!is_native_ptr) { 2306 if (!is_native_ptr) {
2159 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset 2307 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset
2160 Node* offset = pop_pair(); 2308 offset = pop_pair();
2161 // The base is either a Java object or a value produced by Unsafe.staticFieldBase 2309 // The base is either a Java object or a value produced by Unsafe.staticFieldBase
2162 Node* base = pop(); 2310 Node* base = pop();
2163 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset 2311 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset
2164 // to be plain byte offsets, which are also the same as those accepted 2312 // to be plain byte offsets, which are also the same as those accepted
2165 // by oopDesc::field_base. 2313 // by oopDesc::field_base.
2196 // the barriers get omitted and the unsafe reference begins to "pollute" 2344 // the barriers get omitted and the unsafe reference begins to "pollute"
2197 // the alias analysis of the rest of the graph, either Compile::can_alias 2345 // the alias analysis of the rest of the graph, either Compile::can_alias
2198 // or Compile::must_alias will throw a diagnostic assert.) 2346 // or Compile::must_alias will throw a diagnostic assert.)
2199 bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM); 2347 bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM);
2200 2348
2349 // If we are reading the value of the referent field of a Reference
2350 // object (either by using Unsafe directly or through reflection)
2351 // then, if G1 is enabled, we need to record the referent in an
2352 // SATB log buffer using the pre-barrier mechanism.
2353 bool need_read_barrier = UseG1GC && !is_native_ptr && !is_store &&
2354 offset != top() && heap_base_oop != top();
2355
2201 if (!is_store && type == T_OBJECT) { 2356 if (!is_store && type == T_OBJECT) {
2202 // Attempt to infer a sharper value type from the offset and base type. 2357 // Attempt to infer a sharper value type from the offset and base type.
2203 ciKlass* sharpened_klass = NULL; 2358 ciKlass* sharpened_klass = NULL;
2204 2359
2205 // See if it is an instance field, with an object type. 2360 // See if it is an instance field, with an object type.
2279 case T_CHAR: 2434 case T_CHAR:
2280 case T_BYTE: 2435 case T_BYTE:
2281 case T_SHORT: 2436 case T_SHORT:
2282 case T_INT: 2437 case T_INT:
2283 case T_FLOAT: 2438 case T_FLOAT:
2439 push(p);
2440 break;
2284 case T_OBJECT: 2441 case T_OBJECT:
2285 push( p ); 2442 if (need_read_barrier) {
2443 insert_g1_pre_barrier(heap_base_oop, offset, p);
2444 }
2445 push(p);
2286 break; 2446 break;
2287 case T_ADDRESS: 2447 case T_ADDRESS:
2288 // Cast to an int type. 2448 // Cast to an int type.
2289 p = _gvn.transform( new (C, 2) CastP2XNode(NULL,p) ); 2449 p = _gvn.transform( new (C, 2) CastP2XNode(NULL,p) );
2290 p = ConvX2L(p); 2450 p = ConvX2L(p);
2537 cas = _gvn.transform(new (C, 5) CompareAndSwapLNode(control(), mem, adr, newval, oldval)); 2697 cas = _gvn.transform(new (C, 5) CompareAndSwapLNode(control(), mem, adr, newval, oldval));
2538 break; 2698 break;
2539 case T_OBJECT: 2699 case T_OBJECT:
2540 // reference stores need a store barrier. 2700 // reference stores need a store barrier.
2541 // (They don't if CAS fails, but it isn't worth checking.) 2701 // (They don't if CAS fails, but it isn't worth checking.)
2542 pre_barrier(control(), base, adr, alias_idx, newval, value_type->make_oopptr(), T_OBJECT); 2702 pre_barrier(true /* do_load*/,
2703 control(), base, adr, alias_idx, newval, value_type->make_oopptr(),
2704 NULL /* pre_val*/,
2705 T_OBJECT);
2543 #ifdef _LP64 2706 #ifdef _LP64
2544 if (adr->bottom_type()->is_ptr_to_narrowoop()) { 2707 if (adr->bottom_type()->is_ptr_to_narrowoop()) {
2545 Node *newval_enc = _gvn.transform(new (C, 2) EncodePNode(newval, newval->bottom_type()->make_narrowoop())); 2708 Node *newval_enc = _gvn.transform(new (C, 2) EncodePNode(newval, newval->bottom_type()->make_narrowoop()));
2546 Node *oldval_enc = _gvn.transform(new (C, 2) EncodePNode(oldval, oldval->bottom_type()->make_narrowoop())); 2709 Node *oldval_enc = _gvn.transform(new (C, 2) EncodePNode(oldval, oldval->bottom_type()->make_narrowoop()));
2547 cas = _gvn.transform(new (C, 5) CompareAndSwapNNode(control(), mem, adr, 2710 cas = _gvn.transform(new (C, 5) CompareAndSwapNNode(control(), mem, adr,
5313 make_runtime_call(RC_LEAF|RC_NO_FP, 5476 make_runtime_call(RC_LEAF|RC_NO_FP,
5314 OptoRuntime::fast_arraycopy_Type(), 5477 OptoRuntime::fast_arraycopy_Type(),
5315 copyfunc_addr, copyfunc_name, adr_type, 5478 copyfunc_addr, copyfunc_name, adr_type,
5316 src_start, dest_start, copy_length XTOP); 5479 src_start, dest_start, copy_length XTOP);
5317 } 5480 }
5481
5482 //----------------------------inline_reference_get----------------------------
5483
5484 bool LibraryCallKit::inline_reference_get() {
5485 const int nargs = 1; // self
5486
5487 guarantee(java_lang_ref_Reference::referent_offset > 0,
5488 "should have already been set");
5489
5490 int referent_offset = java_lang_ref_Reference::referent_offset;
5491
5492 // Restore the stack and pop off the argument
5493 _sp += nargs;
5494 Node *reference_obj = pop();
5495
5496 // Null check on self without removing any arguments.
5497 _sp += nargs;
5498 reference_obj = do_null_check(reference_obj, T_OBJECT);
5499 _sp -= nargs;;
5500
5501 if (stopped()) return true;
5502
5503 Node *adr = basic_plus_adr(reference_obj, reference_obj, referent_offset);
5504
5505 ciInstanceKlass* klass = env()->Object_klass();
5506 const TypeOopPtr* object_type = TypeOopPtr::make_from_klass(klass);
5507
5508 Node* no_ctrl = NULL;
5509 Node *result = make_load(no_ctrl, adr, object_type, T_OBJECT);
5510
5511 // Use the pre-barrier to record the value in the referent field
5512 pre_barrier(false /* do_load */,
5513 control(),
5514 NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */,
5515 result /* pre_val */,
5516 T_OBJECT);
5517
5518 push(result);
5519 return true;
5520 }
5521

mercurial