1.1 --- a/src/cpu/x86/vm/c1_Runtime1_x86.cpp Tue Dec 20 00:55:02 2011 -0800 1.2 +++ b/src/cpu/x86/vm/c1_Runtime1_x86.cpp Wed Dec 07 11:35:03 2011 +0100 1.3 @@ -1011,7 +1011,7 @@ 1.4 1.5 if (id == fast_new_instance_init_check_id) { 1.6 // make sure the klass is initialized 1.7 - __ cmpl(Address(klass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)), instanceKlass::fully_initialized); 1.8 + __ cmpl(Address(klass, instanceKlass::init_state_offset()), instanceKlass::fully_initialized); 1.9 __ jcc(Assembler::notEqual, slow_path); 1.10 } 1.11 1.12 @@ -1019,7 +1019,7 @@ 1.13 // assert object can be fast path allocated 1.14 { 1.15 Label ok, not_ok; 1.16 - __ movl(obj_size, Address(klass, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc))); 1.17 + __ movl(obj_size, Address(klass, Klass::layout_helper_offset())); 1.18 __ cmpl(obj_size, 0); // make sure it's an instance (LH > 0) 1.19 __ jcc(Assembler::lessEqual, not_ok); 1.20 __ testl(obj_size, Klass::_lh_instance_slow_path_bit); 1.21 @@ -1040,7 +1040,7 @@ 1.22 __ bind(retry_tlab); 1.23 1.24 // get the instance size (size is postive so movl is fine for 64bit) 1.25 - __ movl(obj_size, Address(klass, klassOopDesc::header_size() * HeapWordSize + Klass::layout_helper_offset_in_bytes())); 1.26 + __ movl(obj_size, Address(klass, Klass::layout_helper_offset())); 1.27 1.28 __ tlab_allocate(obj, obj_size, 0, t1, t2, slow_path); 1.29 1.30 @@ -1052,7 +1052,7 @@ 1.31 1.32 __ bind(try_eden); 1.33 // get the instance size (size is postive so movl is fine for 64bit) 1.34 - __ movl(obj_size, Address(klass, klassOopDesc::header_size() * HeapWordSize + Klass::layout_helper_offset_in_bytes())); 1.35 + __ movl(obj_size, Address(klass, Klass::layout_helper_offset())); 1.36 1.37 __ eden_allocate(obj, obj_size, 0, t1, slow_path); 1.38 __ incr_allocated_bytes(thread, obj_size, 0); 1.39 @@ -1119,7 +1119,7 @@ 1.40 { 1.41 Label ok; 1.42 Register t0 = obj; 1.43 - __ movl(t0, Address(klass, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc))); 1.44 + __ movl(t0, Address(klass, Klass::layout_helper_offset())); 1.45 __ sarl(t0, Klass::_lh_array_tag_shift); 1.46 int tag = ((id == new_type_array_id) 1.47 ? Klass::_lh_array_tag_type_value 1.48 @@ -1153,7 +1153,7 @@ 1.49 1.50 // get the allocation size: round_up(hdr + length << (layout_helper & 0x1F)) 1.51 // since size is positive movl does right thing on 64bit 1.52 - __ movl(t1, Address(klass, klassOopDesc::header_size() * HeapWordSize + Klass::layout_helper_offset_in_bytes())); 1.53 + __ movl(t1, Address(klass, Klass::layout_helper_offset())); 1.54 // since size is postive movl does right thing on 64bit 1.55 __ movl(arr_size, length); 1.56 assert(t1 == rcx, "fixed register usage"); 1.57 @@ -1167,7 +1167,7 @@ 1.58 __ tlab_allocate(obj, arr_size, 0, t1, t2, slow_path); // preserves arr_size 1.59 1.60 __ initialize_header(obj, klass, length, t1, t2); 1.61 - __ movb(t1, Address(klass, klassOopDesc::header_size() * HeapWordSize + Klass::layout_helper_offset_in_bytes() + (Klass::_lh_header_size_shift / BitsPerByte))); 1.62 + __ movb(t1, Address(klass, in_bytes(Klass::layout_helper_offset()) + (Klass::_lh_header_size_shift / BitsPerByte))); 1.63 assert(Klass::_lh_header_size_shift % BitsPerByte == 0, "bytewise"); 1.64 assert(Klass::_lh_header_size_mask <= 0xFF, "bytewise"); 1.65 __ andptr(t1, Klass::_lh_header_size_mask); 1.66 @@ -1180,7 +1180,7 @@ 1.67 __ bind(try_eden); 1.68 // get the allocation size: round_up(hdr + length << (layout_helper & 0x1F)) 1.69 // since size is positive movl does right thing on 64bit 1.70 - __ movl(t1, Address(klass, klassOopDesc::header_size() * HeapWordSize + Klass::layout_helper_offset_in_bytes())); 1.71 + __ movl(t1, Address(klass, Klass::layout_helper_offset())); 1.72 // since size is postive movl does right thing on 64bit 1.73 __ movl(arr_size, length); 1.74 assert(t1 == rcx, "fixed register usage"); 1.75 @@ -1195,7 +1195,7 @@ 1.76 __ incr_allocated_bytes(thread, arr_size, 0); 1.77 1.78 __ initialize_header(obj, klass, length, t1, t2); 1.79 - __ movb(t1, Address(klass, klassOopDesc::header_size() * HeapWordSize + Klass::layout_helper_offset_in_bytes() + (Klass::_lh_header_size_shift / BitsPerByte))); 1.80 + __ movb(t1, Address(klass, in_bytes(Klass::layout_helper_offset()) + (Klass::_lh_header_size_shift / BitsPerByte))); 1.81 assert(Klass::_lh_header_size_shift % BitsPerByte == 0, "bytewise"); 1.82 assert(Klass::_lh_header_size_mask <= 0xFF, "bytewise"); 1.83 __ andptr(t1, Klass::_lh_header_size_mask); 1.84 @@ -1267,7 +1267,7 @@ 1.85 Label register_finalizer; 1.86 Register t = rsi; 1.87 __ load_klass(t, rax); 1.88 - __ movl(t, Address(t, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc))); 1.89 + __ movl(t, Address(t, Klass::access_flags_offset())); 1.90 __ testl(t, JVM_ACC_HAS_FINALIZER); 1.91 __ jcc(Assembler::notZero, register_finalizer); 1.92 __ ret(0);