src/cpu/sparc/vm/assembler_sparc.cpp

changeset 855
a1980da045cc
parent 791
1ee8caae33af
child 1057
56aae7be60d4
     1.1 --- a/src/cpu/sparc/vm/assembler_sparc.cpp	Thu Nov 06 20:00:03 2008 -0800
     1.2 +++ b/src/cpu/sparc/vm/assembler_sparc.cpp	Fri Nov 07 09:29:38 2008 -0800
     1.3 @@ -2615,7 +2615,8 @@
     1.4    }
     1.5  }
     1.6  
     1.7 -void MacroAssembler::biased_locking_enter(Register obj_reg, Register mark_reg, Register temp_reg,
     1.8 +void MacroAssembler::biased_locking_enter(Register obj_reg, Register mark_reg,
     1.9 +                                          Register temp_reg,
    1.10                                            Label& done, Label* slow_case,
    1.11                                            BiasedLockingCounters* counters) {
    1.12    assert(UseBiasedLocking, "why call this otherwise?");
    1.13 @@ -2691,8 +2692,7 @@
    1.14                    markOopDesc::biased_lock_mask_in_place | markOopDesc::age_mask_in_place | markOopDesc::epoch_mask_in_place,
    1.15                    mark_reg);
    1.16    or3(G2_thread, mark_reg, temp_reg);
    1.17 -  casx_under_lock(mark_addr.base(), mark_reg, temp_reg,
    1.18 -                  (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
    1.19 +  casn(mark_addr.base(), mark_reg, temp_reg);
    1.20    // If the biasing toward our thread failed, this means that
    1.21    // another thread succeeded in biasing it toward itself and we
    1.22    // need to revoke that bias. The revocation will occur in the
    1.23 @@ -2721,8 +2721,7 @@
    1.24    load_klass(obj_reg, temp_reg);
    1.25    ld_ptr(Address(temp_reg, 0, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes()), temp_reg);
    1.26    or3(G2_thread, temp_reg, temp_reg);
    1.27 -  casx_under_lock(mark_addr.base(), mark_reg, temp_reg,
    1.28 -                  (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
    1.29 +  casn(mark_addr.base(), mark_reg, temp_reg);
    1.30    // If the biasing toward our thread failed, this means that
    1.31    // another thread succeeded in biasing it toward itself and we
    1.32    // need to revoke that bias. The revocation will occur in the
    1.33 @@ -2752,8 +2751,7 @@
    1.34    // bits in this situation. Should attempt to preserve them.
    1.35    load_klass(obj_reg, temp_reg);
    1.36    ld_ptr(Address(temp_reg, 0, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes()), temp_reg);
    1.37 -  casx_under_lock(mark_addr.base(), mark_reg, temp_reg,
    1.38 -                  (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
    1.39 +  casn(mark_addr.base(), mark_reg, temp_reg);
    1.40    // Fall through to the normal CAS-based lock, because no matter what
    1.41    // the result of the above CAS, some thread must have succeeded in
    1.42    // removing the bias bit from the object's header.
    1.43 @@ -2815,8 +2813,10 @@
    1.44  // effect).
    1.45  
    1.46  
    1.47 -void MacroAssembler::compiler_lock_object(Register Roop, Register Rmark, Register Rbox, Register Rscratch,
    1.48 -                                          BiasedLockingCounters* counters) {
    1.49 +void MacroAssembler::compiler_lock_object(Register Roop, Register Rmark,
    1.50 +                                          Register Rbox, Register Rscratch,
    1.51 +                                          BiasedLockingCounters* counters,
    1.52 +                                          bool try_bias) {
    1.53     Address mark_addr(Roop, 0, oopDesc::mark_offset_in_bytes());
    1.54  
    1.55     verify_oop(Roop);
    1.56 @@ -2838,7 +2838,7 @@
    1.57       // Fetch object's markword
    1.58       ld_ptr(mark_addr, Rmark);
    1.59  
    1.60 -     if (UseBiasedLocking) {
    1.61 +     if (try_bias) {
    1.62          biased_locking_enter(Roop, Rmark, Rscratch, done, NULL, counters);
    1.63       }
    1.64  
    1.65 @@ -2881,7 +2881,7 @@
    1.66  
    1.67        ld_ptr (mark_addr, Rmark);           // fetch obj->mark
    1.68        // Triage: biased, stack-locked, neutral, inflated
    1.69 -      if (UseBiasedLocking) {
    1.70 +      if (try_bias) {
    1.71          biased_locking_enter(Roop, Rmark, Rscratch, done, NULL, counters);
    1.72          // Invariant: if control reaches this point in the emitted stream
    1.73          // then Rmark has not been modified.
    1.74 @@ -2945,7 +2945,7 @@
    1.75        ld_ptr (mark_addr, Rmark);           // fetch obj->mark
    1.76        // Triage: biased, stack-locked, neutral, inflated
    1.77  
    1.78 -      if (UseBiasedLocking) {
    1.79 +      if (try_bias) {
    1.80          biased_locking_enter(Roop, Rmark, Rscratch, done, NULL, counters);
    1.81          // Invariant: if control reaches this point in the emitted stream
    1.82          // then Rmark has not been modified.
    1.83 @@ -3039,7 +3039,9 @@
    1.84     bind   (done) ;
    1.85  }
    1.86  
    1.87 -void MacroAssembler::compiler_unlock_object(Register Roop, Register Rmark, Register Rbox, Register Rscratch) {
    1.88 +void MacroAssembler::compiler_unlock_object(Register Roop, Register Rmark,
    1.89 +                                            Register Rbox, Register Rscratch,
    1.90 +                                            bool try_bias) {
    1.91     Address mark_addr(Roop, 0, oopDesc::mark_offset_in_bytes());
    1.92  
    1.93     Label done ;
    1.94 @@ -3050,7 +3052,7 @@
    1.95     }
    1.96  
    1.97     if (EmitSync & 8) {
    1.98 -     if (UseBiasedLocking) {
    1.99 +     if (try_bias) {
   1.100          biased_locking_exit(mark_addr, Rscratch, done);
   1.101       }
   1.102  
   1.103 @@ -3077,7 +3079,7 @@
   1.104     // I$ effects.
   1.105     Label LStacked ;
   1.106  
   1.107 -   if (UseBiasedLocking) {
   1.108 +   if (try_bias) {
   1.109        // TODO: eliminate redundant LDs of obj->mark
   1.110        biased_locking_exit(mark_addr, Rscratch, done);
   1.111     }

mercurial