src/cpu/sparc/vm/c1_LIRAssembler_sparc.cpp

changeset 1813
9f5b60a14736
parent 1804
0a43776437b6
child 1907
c18cbe5936b8
child 1919
61b2245abf36
equal deleted inserted replaced
1812:ef74d6d1ac1e 1813:9f5b60a14736
386 386
387 return offset; 387 return offset;
388 } 388 }
389 389
390 390
391 // Emit the code to remove the frame from the stack in the exception
392 // unwind path.
393 int LIR_Assembler::emit_unwind_handler() {
394 #ifndef PRODUCT
395 if (CommentedAssembly) {
396 _masm->block_comment("Unwind handler");
397 }
398 #endif
399
400 int offset = code_offset();
401
402 // Fetch the exception from TLS and clear out exception related thread state
403 __ ld_ptr(G2_thread, in_bytes(JavaThread::exception_oop_offset()), O0);
404 __ st_ptr(G0, G2_thread, in_bytes(JavaThread::exception_oop_offset()));
405 __ st_ptr(G0, G2_thread, in_bytes(JavaThread::exception_pc_offset()));
406
407 __ bind(_unwind_handler_entry);
408 __ verify_not_null_oop(O0);
409 if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
410 __ mov(O0, I0); // Preserve the exception
411 }
412
413 // Preform needed unlocking
414 MonitorExitStub* stub = NULL;
415 if (method()->is_synchronized()) {
416 monitor_address(0, FrameMap::I1_opr);
417 stub = new MonitorExitStub(FrameMap::I1_opr, true, 0);
418 __ unlock_object(I3, I2, I1, *stub->entry());
419 __ bind(*stub->continuation());
420 }
421
422 if (compilation()->env()->dtrace_method_probes()) {
423 jobject2reg(method()->constant_encoding(), O0);
424 __ call(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), relocInfo::runtime_call_type);
425 __ delayed()->nop();
426 }
427
428 if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
429 __ mov(I0, O0); // Restore the exception
430 }
431
432 // dispatch to the unwind logic
433 __ call(Runtime1::entry_for(Runtime1::unwind_exception_id), relocInfo::runtime_call_type);
434 __ delayed()->nop();
435
436 // Emit the slow path assembly
437 if (stub != NULL) {
438 stub->emit_code(this);
439 }
440
441 return offset;
442 }
443
444
391 int LIR_Assembler::emit_deopt_handler() { 445 int LIR_Assembler::emit_deopt_handler() {
392 // if the last instruction is a call (typically to do a throw which 446 // if the last instruction is a call (typically to do a throw which
393 // is coming at the end after block reordering) the return address 447 // is coming at the end after block reordering) the return address
394 // must still point into the code area in order to avoid assertion 448 // must still point into the code area in order to avoid assertion
395 // failures when searching for the corresponding bci => add a nop 449 // failures when searching for the corresponding bci => add a nop
2048 ShouldNotReachHere(); 2102 ShouldNotReachHere();
2049 return -1; 2103 return -1;
2050 } 2104 }
2051 2105
2052 2106
2053 void LIR_Assembler::throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info, bool unwind) { 2107 void LIR_Assembler::throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info) {
2054 assert(exceptionOop->as_register() == Oexception, "should match"); 2108 assert(exceptionOop->as_register() == Oexception, "should match");
2055 assert(unwind || exceptionPC->as_register() == Oissuing_pc, "should match"); 2109 assert(exceptionPC->as_register() == Oissuing_pc, "should match");
2056 2110
2057 info->add_register_oop(exceptionOop); 2111 info->add_register_oop(exceptionOop);
2058 2112
2059 if (unwind) { 2113 // reuse the debug info from the safepoint poll for the throw op itself
2060 __ call(Runtime1::entry_for(Runtime1::unwind_exception_id), relocInfo::runtime_call_type); 2114 address pc_for_athrow = __ pc();
2061 __ delayed()->nop(); 2115 int pc_for_athrow_offset = __ offset();
2062 } else { 2116 RelocationHolder rspec = internal_word_Relocation::spec(pc_for_athrow);
2063 // reuse the debug info from the safepoint poll for the throw op itself 2117 __ set(pc_for_athrow, Oissuing_pc, rspec);
2064 address pc_for_athrow = __ pc(); 2118 add_call_info(pc_for_athrow_offset, info); // for exception handler
2065 int pc_for_athrow_offset = __ offset(); 2119
2066 RelocationHolder rspec = internal_word_Relocation::spec(pc_for_athrow); 2120 __ call(Runtime1::entry_for(Runtime1::handle_exception_id), relocInfo::runtime_call_type);
2067 __ set(pc_for_athrow, Oissuing_pc, rspec); 2121 __ delayed()->nop();
2068 add_call_info(pc_for_athrow_offset, info); // for exception handler 2122 }
2069 2123
2070 __ call(Runtime1::entry_for(Runtime1::handle_exception_id), relocInfo::runtime_call_type); 2124
2071 __ delayed()->nop(); 2125 void LIR_Assembler::unwind_op(LIR_Opr exceptionOop) {
2072 } 2126 assert(exceptionOop->as_register() == Oexception, "should match");
2127
2128 __ br(Assembler::always, false, Assembler::pt, _unwind_handler_entry);
2129 __ delayed()->nop();
2073 } 2130 }
2074 2131
2075 2132
2076 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) { 2133 void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
2077 Register src = op->src()->as_register(); 2134 Register src = op->src()->as_register();
2356 op->tmp4()->as_register() == O1 && 2413 op->tmp4()->as_register() == O1 &&
2357 op->klass()->as_register() == G5, "must be"); 2414 op->klass()->as_register() == G5, "must be");
2358 if (UseSlowPath || 2415 if (UseSlowPath ||
2359 (!UseFastNewObjectArray && (op->type() == T_OBJECT || op->type() == T_ARRAY)) || 2416 (!UseFastNewObjectArray && (op->type() == T_OBJECT || op->type() == T_ARRAY)) ||
2360 (!UseFastNewTypeArray && (op->type() != T_OBJECT && op->type() != T_ARRAY))) { 2417 (!UseFastNewTypeArray && (op->type() != T_OBJECT && op->type() != T_ARRAY))) {
2361 __ br(Assembler::always, false, Assembler::pn, *op->stub()->entry()); 2418 __ br(Assembler::always, false, Assembler::pt, *op->stub()->entry());
2362 __ delayed()->nop(); 2419 __ delayed()->nop();
2363 } else { 2420 } else {
2364 __ allocate_array(op->obj()->as_register(), 2421 __ allocate_array(op->obj()->as_register(),
2365 op->len()->as_register(), 2422 op->len()->as_register(),
2366 op->tmp1()->as_register(), 2423 op->tmp1()->as_register(),

mercurial