src/share/vm/opto/library_call.cpp

changeset 9858
b985cbb00e68
parent 9720
67dddb025b7b
child 9896
1b8c45b8216a
equal deleted inserted replaced
9727:c7a3e57fdf4a 9858:b985cbb00e68
25 #include "precompiled.hpp" 25 #include "precompiled.hpp"
26 #include "classfile/systemDictionary.hpp" 26 #include "classfile/systemDictionary.hpp"
27 #include "classfile/vmSymbols.hpp" 27 #include "classfile/vmSymbols.hpp"
28 #include "compiler/compileBroker.hpp" 28 #include "compiler/compileBroker.hpp"
29 #include "compiler/compileLog.hpp" 29 #include "compiler/compileLog.hpp"
30 #include "jfr/support/jfrIntrinsics.hpp"
30 #include "oops/objArrayKlass.hpp" 31 #include "oops/objArrayKlass.hpp"
31 #include "opto/addnode.hpp" 32 #include "opto/addnode.hpp"
32 #include "opto/callGenerator.hpp" 33 #include "opto/callGenerator.hpp"
33 #include "opto/cfgnode.hpp" 34 #include "opto/cfgnode.hpp"
34 #include "opto/connode.hpp" 35 #include "opto/connode.hpp"
38 #include "opto/parse.hpp" 39 #include "opto/parse.hpp"
39 #include "opto/runtime.hpp" 40 #include "opto/runtime.hpp"
40 #include "opto/subnode.hpp" 41 #include "opto/subnode.hpp"
41 #include "prims/nativeLookup.hpp" 42 #include "prims/nativeLookup.hpp"
42 #include "runtime/sharedRuntime.hpp" 43 #include "runtime/sharedRuntime.hpp"
43 #include "trace/traceMacros.hpp" 44 #include "utilities/macros.hpp"
44 45
45 class LibraryIntrinsic : public InlineCallGenerator { 46 class LibraryIntrinsic : public InlineCallGenerator {
46 // Extend the set of intrinsics known to the runtime: 47 // Extend the set of intrinsics known to the runtime:
47 public: 48 public:
48 private: 49 private:
234 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static); 235 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
235 static bool klass_needs_init_guard(Node* kls); 236 static bool klass_needs_init_guard(Node* kls);
236 bool inline_unsafe_allocate(); 237 bool inline_unsafe_allocate();
237 bool inline_unsafe_copyMemory(); 238 bool inline_unsafe_copyMemory();
238 bool inline_native_currentThread(); 239 bool inline_native_currentThread();
239 #ifdef TRACE_HAVE_INTRINSICS 240 #ifdef JFR_HAVE_INTRINSICS
240 bool inline_native_classID(); 241 bool inline_native_classID();
241 bool inline_native_threadID(); 242 bool inline_native_getEventWriter();
242 #endif 243 #endif
243 bool inline_native_time_funcs(address method, const char* funcName); 244 bool inline_native_time_funcs(address method, const char* funcName);
244 bool inline_native_isInterrupted(); 245 bool inline_native_isInterrupted();
245 bool inline_native_Class_query(vmIntrinsics::ID id); 246 bool inline_native_Class_query(vmIntrinsics::ID id);
246 bool inline_native_subtype_check(); 247 bool inline_native_subtype_check();
877 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id()); 878 case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());
878 879
879 case vmIntrinsics::_currentThread: return inline_native_currentThread(); 880 case vmIntrinsics::_currentThread: return inline_native_currentThread();
880 case vmIntrinsics::_isInterrupted: return inline_native_isInterrupted(); 881 case vmIntrinsics::_isInterrupted: return inline_native_isInterrupted();
881 882
882 #ifdef TRACE_HAVE_INTRINSICS 883 #ifdef JFR_HAVE_INTRINSICS
883 case vmIntrinsics::_classID: return inline_native_classID(); 884 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, JFR_TIME_FUNCTION), "counterTime");
884 case vmIntrinsics::_threadID: return inline_native_threadID(); 885 case vmIntrinsics::_getClassId: return inline_native_classID();
885 case vmIntrinsics::_counterTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime"); 886 case vmIntrinsics::_getEventWriter: return inline_native_getEventWriter();
886 #endif 887 #endif
887 case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis"); 888 case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
888 case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime"); 889 case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
889 case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate(); 890 case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate();
890 case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory(); 891 case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory();
3241 Node* obj = new_instance(kls, test); 3242 Node* obj = new_instance(kls, test);
3242 set_result(obj); 3243 set_result(obj);
3243 return true; 3244 return true;
3244 } 3245 }
3245 3246
3246 #ifdef TRACE_HAVE_INTRINSICS 3247 #ifdef JFR_HAVE_INTRINSICS
3247 /* 3248 /*
3248 * oop -> myklass 3249 * oop -> myklass
3249 * myklass->trace_id |= USED 3250 * myklass->trace_id |= USED
3250 * return myklass->trace_id & ~0x3 3251 * return myklass->trace_id & ~0x3
3251 */ 3252 */
3252 bool LibraryCallKit::inline_native_classID() { 3253 bool LibraryCallKit::inline_native_classID() {
3253 null_check_receiver(); // null-check, then ignore 3254 Node* cls = null_check(argument(0), T_OBJECT);
3254 Node* cls = null_check(argument(1), T_OBJECT);
3255 Node* kls = load_klass_from_mirror(cls, false, NULL, 0); 3255 Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
3256 kls = null_check(kls, T_OBJECT); 3256 kls = null_check(kls, T_OBJECT);
3257 ByteSize offset = TRACE_ID_OFFSET; 3257
3258 ByteSize offset = KLASS_TRACE_ID_OFFSET;
3258 Node* insp = basic_plus_adr(kls, in_bytes(offset)); 3259 Node* insp = basic_plus_adr(kls, in_bytes(offset));
3259 Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG, MemNode::unordered); 3260 Node* tvalue = make_load(NULL, insp, TypeLong::LONG, T_LONG, MemNode::unordered);
3260 Node* bits = longcon(~0x03l); // ignore bit 0 & 1 3261
3261 Node* andl = _gvn.transform(new (C) AndLNode(tvalue, bits));
3262 Node* clsused = longcon(0x01l); // set the class bit 3262 Node* clsused = longcon(0x01l); // set the class bit
3263 Node* orl = _gvn.transform(new (C) OrLNode(tvalue, clsused)); 3263 Node* orl = _gvn.transform(new (C) OrLNode(tvalue, clsused));
3264
3265 const TypePtr *adr_type = _gvn.type(insp)->isa_ptr(); 3264 const TypePtr *adr_type = _gvn.type(insp)->isa_ptr();
3266 store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered); 3265 store_to_memory(control(), insp, orl, T_LONG, adr_type, MemNode::unordered);
3267 set_result(andl); 3266
3267 #ifdef TRACE_ID_META_BITS
3268 Node* mbits = longcon(~TRACE_ID_META_BITS);
3269 tvalue = _gvn.transform(new (C) AndLNode(tvalue, mbits));
3270 #endif
3271 #ifdef TRACE_ID_SHIFT
3272 Node* cbits = intcon(TRACE_ID_SHIFT);
3273 tvalue = _gvn.transform(new (C) URShiftLNode(tvalue, cbits));
3274 #endif
3275
3276 set_result(tvalue);
3268 return true; 3277 return true;
3269 } 3278 }
3270 3279
3271 bool LibraryCallKit::inline_native_threadID() { 3280 bool LibraryCallKit::inline_native_getEventWriter() {
3272 Node* tls_ptr = NULL; 3281 Node* tls_ptr = _gvn.transform(new (C) ThreadLocalNode());
3273 Node* cur_thr = generate_current_thread(tls_ptr); 3282
3274 Node* p = basic_plus_adr(top()/*!oop*/, tls_ptr, in_bytes(JavaThread::osthread_offset())); 3283 Node* jobj_ptr = basic_plus_adr(top(), tls_ptr,
3275 Node* osthread = make_load(NULL, p, TypeRawPtr::NOTNULL, T_ADDRESS, MemNode::unordered); 3284 in_bytes(THREAD_LOCAL_WRITER_OFFSET_JFR)
3276 p = basic_plus_adr(top()/*!oop*/, osthread, in_bytes(OSThread::thread_id_offset())); 3285 );
3277 3286
3278 Node* threadid = NULL; 3287 Node* jobj = make_load(control(), jobj_ptr, TypeRawPtr::BOTTOM, T_ADDRESS, MemNode::unordered);
3279 size_t thread_id_size = OSThread::thread_id_size(); 3288
3280 if (thread_id_size == (size_t) BytesPerLong) { 3289 Node* jobj_cmp_null = _gvn.transform( new (C) CmpPNode(jobj, null()) );
3281 threadid = ConvL2I(make_load(control(), p, TypeLong::LONG, T_LONG, MemNode::unordered)); 3290 Node* test_jobj_eq_null = _gvn.transform( new (C) BoolNode(jobj_cmp_null, BoolTest::eq) );
3282 } else if (thread_id_size == (size_t) BytesPerInt) { 3291
3283 threadid = make_load(control(), p, TypeInt::INT, T_INT, MemNode::unordered); 3292 IfNode* iff_jobj_null =
3284 } else { 3293 create_and_map_if(control(), test_jobj_eq_null, PROB_MIN, COUNT_UNKNOWN);
3285 ShouldNotReachHere(); 3294
3286 } 3295 enum { _normal_path = 1,
3287 set_result(threadid); 3296 _null_path = 2,
3297 PATH_LIMIT };
3298
3299 RegionNode* result_rgn = new (C) RegionNode(PATH_LIMIT);
3300 PhiNode* result_val = new (C) PhiNode(result_rgn, TypePtr::BOTTOM);
3301
3302 Node* jobj_is_null = _gvn.transform(new (C) IfTrueNode(iff_jobj_null));
3303 result_rgn->init_req(_null_path, jobj_is_null);
3304 result_val->init_req(_null_path, null());
3305
3306 Node* jobj_is_not_null = _gvn.transform(new (C) IfFalseNode(iff_jobj_null));
3307 result_rgn->init_req(_normal_path, jobj_is_not_null);
3308
3309 Node* res = make_load(jobj_is_not_null, jobj, TypeInstPtr::NOTNULL, T_OBJECT, MemNode::unordered);
3310 result_val->init_req(_normal_path, res);
3311
3312 set_result(result_rgn, result_val);
3313
3288 return true; 3314 return true;
3289 } 3315 }
3290 #endif 3316 #endif // JFR_HAVE_INTRINSICS
3291 3317
3292 //------------------------inline_native_time_funcs-------------- 3318 //------------------------inline_native_time_funcs--------------
3293 // inline code for System.currentTimeMillis() and System.nanoTime() 3319 // inline code for System.currentTimeMillis() and System.nanoTime()
3294 // these have the same type and signature 3320 // these have the same type and signature
3295 bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) { 3321 bool LibraryCallKit::inline_native_time_funcs(address funcAddr, const char* funcName) {

mercurial