src/share/vm/oops/cpCache.hpp

changeset 6493
3205e78d8193
parent 6081
41cb10cbfb3c
child 6720
0b9500028980
     1.1 --- a/src/share/vm/oops/cpCache.hpp	Thu Dec 05 15:13:12 2013 -0800
     1.2 +++ b/src/share/vm/oops/cpCache.hpp	Mon Dec 02 10:26:14 2013 +0100
     1.3 @@ -138,7 +138,7 @@
     1.4  
     1.5    void set_bytecode_1(Bytecodes::Code code);
     1.6    void set_bytecode_2(Bytecodes::Code code);
     1.7 -  void set_f1(Metadata* f1)                            {
     1.8 +  void set_f1(Metadata* f1) {
     1.9      Metadata* existing_f1 = (Metadata*)_f1; // read once
    1.10      assert(existing_f1 == NULL || existing_f1 == f1, "illegal field change");
    1.11      _f1 = f1;
    1.12 @@ -325,14 +325,21 @@
    1.13  
    1.14    // Accessors
    1.15    int indices() const                            { return _indices; }
    1.16 +  int indices_ord() const                        { return (intx)OrderAccess::load_ptr_acquire(&_indices); }
    1.17    int constant_pool_index() const                { return (indices() & cp_index_mask); }
    1.18 -  Bytecodes::Code bytecode_1() const             { return Bytecodes::cast((indices() >> bytecode_1_shift) & bytecode_1_mask); }
    1.19 -  Bytecodes::Code bytecode_2() const             { return Bytecodes::cast((indices() >> bytecode_2_shift) & bytecode_2_mask); }
    1.20 -  Method* f1_as_method() const                   { Metadata* f1 = (Metadata*)_f1; assert(f1 == NULL || f1->is_method(), ""); return (Method*)f1; }
    1.21 -  Klass*    f1_as_klass() const                  { Metadata* f1 = (Metadata*)_f1; assert(f1 == NULL || f1->is_klass(), ""); return (Klass*)f1; }
    1.22 -  bool      is_f1_null() const                   { Metadata* f1 = (Metadata*)_f1; return f1 == NULL; }  // classifies a CPC entry as unbound
    1.23 +  Bytecodes::Code bytecode_1() const             { return Bytecodes::cast((indices_ord() >> bytecode_1_shift) & bytecode_1_mask); }
    1.24 +  Bytecodes::Code bytecode_2() const             { return Bytecodes::cast((indices_ord() >> bytecode_2_shift) & bytecode_2_mask); }
    1.25 +  Metadata* f1_ord() const                       { return (Metadata *)OrderAccess::load_ptr_acquire(&_f1); }
    1.26 +  Method*   f1_as_method() const                 { Metadata* f1 = f1_ord(); assert(f1 == NULL || f1->is_method(), ""); return (Method*)f1; }
    1.27 +  Klass*    f1_as_klass() const                  { Metadata* f1 = f1_ord(); assert(f1 == NULL || f1->is_klass(), ""); return (Klass*)f1; }
    1.28 +  // Use the accessor f1() to acquire _f1's value. This is needed for
    1.29 +  // example in BytecodeInterpreter::run(), where is_f1_null() is
    1.30 +  // called to check if an invokedynamic call is resolved. This load
    1.31 +  // of _f1 must be ordered with the loads performed by
    1.32 +  // cache->main_entry_index().
    1.33 +  bool      is_f1_null() const                   { Metadata* f1 = f1_ord(); return f1 == NULL; }  // classifies a CPC entry as unbound
    1.34    int       f2_as_index() const                  { assert(!is_vfinal(), ""); return (int) _f2; }
    1.35 -  Method* f2_as_vfinal_method() const            { assert(is_vfinal(), ""); return (Method*)_f2; }
    1.36 +  Method*   f2_as_vfinal_method() const          { assert(is_vfinal(), ""); return (Method*)_f2; }
    1.37    int  field_index() const                       { assert(is_field_entry(),  ""); return (_flags & field_index_mask); }
    1.38    int  parameter_size() const                    { assert(is_method_entry(), ""); return (_flags & parameter_size_mask); }
    1.39    bool is_volatile() const                       { return (_flags & (1 << is_volatile_shift))       != 0; }

mercurial