478 #endif |
478 #endif |
479 |
479 |
480 |
480 |
481 //============================================================================= |
481 //============================================================================= |
482 int MachConstantNode::constant_offset() { |
482 int MachConstantNode::constant_offset() { |
483 int offset = _constant.offset(); |
|
484 // Bind the offset lazily. |
483 // Bind the offset lazily. |
485 if (offset == -1) { |
484 if (_constant.offset() == -1) { |
486 Compile::ConstantTable& constant_table = Compile::current()->constant_table(); |
485 Compile::ConstantTable& constant_table = Compile::current()->constant_table(); |
487 // If called from Compile::scratch_emit_size assume the worst-case |
486 int offset = constant_table.find_offset(_constant); |
488 // for load offsets: half the constant table size. |
487 // If called from Compile::scratch_emit_size return the |
489 // NOTE: Don't return or calculate the actual offset (which might |
488 // pre-calculated offset. |
490 // be zero) because that leads to problems with e.g. jumpXtnd on |
489 // NOTE: If the AD file does some table base offset optimizations |
491 // some architectures (cf. add-optimization in SPARC jumpXtnd). |
490 // later the AD file needs to take care of this fact. |
492 if (Compile::current()->in_scratch_emit_size()) |
491 if (Compile::current()->in_scratch_emit_size()) { |
493 return constant_table.size() / 2; |
492 return constant_table.calculate_table_base_offset() + offset; |
494 offset = constant_table.table_base_offset() + constant_table.find_offset(_constant); |
493 } |
495 _constant.set_offset(offset); |
494 _constant.set_offset(constant_table.table_base_offset() + offset); |
496 } |
495 } |
497 return offset; |
496 return _constant.offset(); |
498 } |
497 } |
499 |
498 |
500 |
499 |
501 //============================================================================= |
500 //============================================================================= |
502 #ifndef PRODUCT |
501 #ifndef PRODUCT |