src/cpu/mips/vm/c1_MacroAssembler_mips.cpp

changeset 8865
ffcdff41a92f
parent 6880
52ea28d233d2
child 9144
cecfc245b19a
equal deleted inserted replaced
8864:e4aeef458496 8865:ffcdff41a92f
138 // load object 138 // load object
139 ld_ptr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes())); 139 ld_ptr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
140 biased_locking_exit(obj, hdr, done); 140 biased_locking_exit(obj, hdr, done);
141 } 141 }
142 142
143
144
145 // load displaced header 143 // load displaced header
146 ld_ptr(hdr, disp_hdr, 0); 144 ld_ptr(hdr, disp_hdr, 0);
147 // if the loaded hdr is NULL we had recursive locking 145 // if the loaded hdr is NULL we had recursive locking
148 // if we had recursive locking, we are done 146 // if we had recursive locking, we are done
149 beq(hdr, R0, done); 147 beq(hdr, R0, done);
176 eden_allocate(obj, var_size_in_bytes, con_size_in_bytes, t1, t2, slow_case); 174 eden_allocate(obj, var_size_in_bytes, con_size_in_bytes, t1, t2, slow_case);
177 } 175 }
178 } 176 }
179 177
180 void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register len, Register t1 , Register t2) { 178 void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register len, Register t1 , Register t2) {
181 assert_different_registers(obj, klass, len, AT); 179 assert_different_registers(obj, klass, len, T9);
182 180
183 if (UseBiasedLocking && !len->is_valid()) { 181 if (UseBiasedLocking && !len->is_valid()) {
184 assert_different_registers(obj, klass, len, t1, t2); 182 assert_different_registers(obj, klass, len, t1, t2);
185 ld_ptr(t1, klass, in_bytes(Klass::prototype_header_offset())); 183 ld_ptr(t1, klass, in_bytes(Klass::prototype_header_offset()));
186 st_ptr(t1, obj, oopDesc::mark_offset_in_bytes()); 184 st_ptr(t1, obj, oopDesc::mark_offset_in_bytes());
187 } else { 185 } else {
188 li(AT, (intptr_t)markOopDesc::prototype()); 186 li(T9, (intptr_t)markOopDesc::prototype());
189 st_ptr(AT, obj, oopDesc::mark_offset_in_bytes()); 187 st_ptr(T9, obj, oopDesc::mark_offset_in_bytes());
190 } 188 }
191 //st_ptr(klass, obj, oopDesc::klass_offset_in_bytes()); 189 //st_ptr(klass, obj, oopDesc::klass_offset_in_bytes());
192 #ifdef _LP64 190 #ifdef _LP64
193 if (UseCompressedOops) { 191 if (UseCompressedClassPointers) {
194 move(AT, klass); 192 move(T9, klass);
195 store_klass(obj, AT); 193 store_klass(obj, T9);
196 } else 194 } else
197 #endif 195 #endif
198 { 196 {
199 st_ptr(klass, obj, oopDesc::klass_offset_in_bytes()); 197 st_ptr(klass, obj, oopDesc::klass_offset_in_bytes());
200 } 198 }
201 199
202 if (len->is_valid()) { 200 if (len->is_valid()) {
203 sw(len, obj, arrayOopDesc::length_offset_in_bytes()); 201 sw(len, obj, arrayOopDesc::length_offset_in_bytes());
204 } 202 }
205 #ifdef _LP64 203 #ifdef _LP64
206 else if (UseCompressedOops) { 204 else if (UseCompressedClassPointers) {
207 store_klass_gap(obj, R0); 205 store_klass_gap(obj, R0);
208 } 206 }
209 #endif 207 #endif
210 } 208 }
211 209
424 // Note: RECEIVER must still contain the receiver! 422 // Note: RECEIVER must still contain the receiver!
425 Label L; 423 Label L;
426 #ifdef _LP64 424 #ifdef _LP64
427 //ld_ptr(AT, receiver, oopDesc::klass_offset_in_bytes()); 425 //ld_ptr(AT, receiver, oopDesc::klass_offset_in_bytes());
428 //add for compressedoops 426 //add for compressedoops
429 load_klass(AT, receiver); 427 load_klass(T9, receiver);
430 #else 428 #else
431 lw(AT, receiver, oopDesc::klass_offset_in_bytes()); 429 lw(T9, receiver, oopDesc::klass_offset_in_bytes());
432 #endif 430 #endif
433 beq(AT, iCache, L); 431 beq(T9, iCache, L);
434 delayed()->nop(); 432 delayed()->nop();
435 // jmp(Runtime1::entry_for(Runtime1::handle_ic_miss_id), relocInfo::runtime_call_type); 433 // jmp(Runtime1::entry_for(Runtime1::handle_ic_miss_id), relocInfo::runtime_call_type);
436 jmp(SharedRuntime::get_ic_miss_stub(), relocInfo::runtime_call_type); 434 jmp(SharedRuntime::get_ic_miss_stub(), relocInfo::runtime_call_type);
437 delayed()->nop(); 435 delayed()->nop();
438 bind(L); 436 bind(L);
439 // assert(UseCompressedOops, "check alignment in emit_method_entry"); 437 // assert(UseCompressedOops, "check alignment in emit_method_entry");
440 } 438 }
441 /* 439 /*
452 // Make sure there is enough stack space for this method's activation. 450 // Make sure there is enough stack space for this method's activation.
453 // Note that we do this before doing an enter(). This matches the 451 // Note that we do this before doing an enter(). This matches the
454 // ordering of C2's stack overflow check / esp decrement and allows 452 // ordering of C2's stack overflow check / esp decrement and allows
455 // the SharedRuntime stack overflow handling to be consistent 453 // the SharedRuntime stack overflow handling to be consistent
456 // between the two compilers. 454 // between the two compilers.
457 generate_stack_overflow_check(frame_size_in_bytes); 455 generate_stack_overflow_check(bang_size_in_bytes);
458 456
459 enter(); 457 enter();
460 //FIXME 458 //FIXME
461 #ifdef TIERED 459 #ifdef TIERED
462 // c2 leaves fpu stack dirty. Clean it on entry 460 // c2 leaves fpu stack dirty. Clean it on entry
463 // if (UseSSE < 2 ) { 461 // if (UseSSE < 2 ) {
464 empty_FPU_stack(); 462 empty_FPU_stack();
465 // } 463 // }
466 #endif // TIERED 464 #endif // TIERED
467 465
468 decrement(SP, frame_size_in_bytes); // does not emit code for frame_size == 0 466 decrement(SP, frame_size_in_bytes); // does not emit code for frame_size == 0
467 }
468
469 void C1_MacroAssembler::remove_frame(int frame_size_in_bytes) {
470 increment(SP, frame_size_in_bytes); // Does not emit code for frame_size == 0
471 pop(FP);
469 } 472 }
470 473
471 void C1_MacroAssembler::unverified_entry(Register receiver, Register ic_klass) { 474 void C1_MacroAssembler::unverified_entry(Register receiver, Register ic_klass) {
472 if (C1Breakpoint) int3(); 475 if (C1Breakpoint) int3();
473 inline_cache_check(receiver, ic_klass); 476 inline_cache_check(receiver, ic_klass);

mercurial