8206075: On x86, assert on unbound assembler Labels used as branch targets

Thu, 20 Dec 2018 20:11:38 +0000

author
phh
date
Thu, 20 Dec 2018 20:11:38 +0000
changeset 9604
da2e98c027fd
parent 9603
6ce4101edc7a
child 9605
b484b18b9f14

8206075: On x86, assert on unbound assembler Labels used as branch targets
8208480: Test failure: assert(is_bound() || is_unused()) after JDK-8206075 in C1
Summary: Combine unbound Label assertion checking backports
Reviewed-by: andrew, coffeys

src/cpu/x86/vm/interp_masm_x86_32.cpp file | annotate | diff | comparison | revisions
src/cpu/x86/vm/interp_masm_x86_64.cpp file | annotate | diff | comparison | revisions
src/cpu/x86/vm/templateTable_x86_32.cpp file | annotate | diff | comparison | revisions
src/cpu/x86/vm/templateTable_x86_64.cpp file | annotate | diff | comparison | revisions
src/share/vm/asm/assembler.hpp file | annotate | diff | comparison | revisions
src/share/vm/c1/c1_LIRAssembler.cpp file | annotate | diff | comparison | revisions
     1.1 --- a/src/cpu/x86/vm/interp_masm_x86_32.cpp	Thu Dec 06 13:26:54 2018 -0500
     1.2 +++ b/src/cpu/x86/vm/interp_masm_x86_32.cpp	Thu Dec 20 20:11:38 2018 +0000
     1.3 @@ -1,5 +1,5 @@
     1.4  /*
     1.5 - * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
     1.6 + * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
     1.7   * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     1.8   *
     1.9   * This code is free software; you can redistribute it and/or modify it
    1.10 @@ -1445,5 +1445,7 @@
    1.11    incrementl(scratch, increment);
    1.12    movl(counter_addr, scratch);
    1.13    andl(scratch, mask);
    1.14 -  jcc(cond, *where);
    1.15 +  if (where != NULL) {
    1.16 +    jcc(cond, *where);
    1.17 +  }
    1.18  }
     2.1 --- a/src/cpu/x86/vm/interp_masm_x86_64.cpp	Thu Dec 06 13:26:54 2018 -0500
     2.2 +++ b/src/cpu/x86/vm/interp_masm_x86_64.cpp	Thu Dec 20 20:11:38 2018 +0000
     2.3 @@ -1,5 +1,5 @@
     2.4  /*
     2.5 - * Copyright (c) 2003, 2016, Oracle and/or its affiliates. All rights reserved.
     2.6 + * Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
     2.7   * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     2.8   *
     2.9   * This code is free software; you can redistribute it and/or modify it
    2.10 @@ -1505,5 +1505,7 @@
    2.11    incrementl(scratch, increment);
    2.12    movl(counter_addr, scratch);
    2.13    andl(scratch, mask);
    2.14 -  jcc(cond, *where);
    2.15 +  if (where != NULL) {
    2.16 +    jcc(cond, *where);
    2.17 +  }
    2.18  }
     3.1 --- a/src/cpu/x86/vm/templateTable_x86_32.cpp	Thu Dec 06 13:26:54 2018 -0500
     3.2 +++ b/src/cpu/x86/vm/templateTable_x86_32.cpp	Thu Dec 20 20:11:38 2018 +0000
     3.3 @@ -1,5 +1,5 @@
     3.4  /*
     3.5 - * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved.
     3.6 + * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
     3.7   * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     3.8   *
     3.9   * This code is free software; you can redistribute it and/or modify it
    3.10 @@ -1640,15 +1640,16 @@
    3.11          // Increment the MDO backedge counter
    3.12          const Address mdo_backedge_counter(rbx, in_bytes(MethodData::backedge_counter_offset()) +
    3.13                                                  in_bytes(InvocationCounter::counter_offset()));
    3.14 -        __ increment_mask_and_jump(mdo_backedge_counter, increment, mask,
    3.15 -                                   rax, false, Assembler::zero, &backedge_counter_overflow);
    3.16 +        __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, rax, false, Assembler::zero,
    3.17 +                                   UseOnStackReplacement ? &backedge_counter_overflow : NULL);
    3.18          __ jmp(dispatch);
    3.19        }
    3.20        __ bind(no_mdo);
    3.21        // Increment backedge counter in MethodCounters*
    3.22        __ movptr(rcx, Address(rcx, Method::method_counters_offset()));
    3.23        __ increment_mask_and_jump(Address(rcx, be_offset), increment, mask,
    3.24 -                                 rax, false, Assembler::zero, &backedge_counter_overflow);
    3.25 +                                 rax, false, Assembler::zero,
    3.26 +                                 UseOnStackReplacement ? &backedge_counter_overflow : NULL);
    3.27      } else {
    3.28        // increment counter
    3.29        __ movptr(rcx, Address(rcx, Method::method_counters_offset()));
     4.1 --- a/src/cpu/x86/vm/templateTable_x86_64.cpp	Thu Dec 06 13:26:54 2018 -0500
     4.2 +++ b/src/cpu/x86/vm/templateTable_x86_64.cpp	Thu Dec 20 20:11:38 2018 +0000
     4.3 @@ -1,5 +1,5 @@
     4.4  /*
     4.5 - * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved.
     4.6 + * Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
     4.7   * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4.8   *
     4.9   * This code is free software; you can redistribute it and/or modify it
    4.10 @@ -1665,15 +1665,16 @@
    4.11          // Increment the MDO backedge counter
    4.12          const Address mdo_backedge_counter(rbx, in_bytes(MethodData::backedge_counter_offset()) +
    4.13                                             in_bytes(InvocationCounter::counter_offset()));
    4.14 -        __ increment_mask_and_jump(mdo_backedge_counter, increment, mask,
    4.15 -                                   rax, false, Assembler::zero, &backedge_counter_overflow);
    4.16 +        __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, rax, false, Assembler::zero,
    4.17 +                                   UseOnStackReplacement ? &backedge_counter_overflow : NULL);
    4.18          __ jmp(dispatch);
    4.19        }
    4.20        __ bind(no_mdo);
    4.21        // Increment backedge counter in MethodCounters*
    4.22        __ movptr(rcx, Address(rcx, Method::method_counters_offset()));
    4.23        __ increment_mask_and_jump(Address(rcx, be_offset), increment, mask,
    4.24 -                                 rax, false, Assembler::zero, &backedge_counter_overflow);
    4.25 +                                 rax, false, Assembler::zero,
    4.26 +                                 UseOnStackReplacement ? &backedge_counter_overflow : NULL);
    4.27      } else {
    4.28        // increment counter
    4.29        __ movptr(rcx, Address(rcx, Method::method_counters_offset()));
     5.1 --- a/src/share/vm/asm/assembler.hpp	Thu Dec 06 13:26:54 2018 -0500
     5.2 +++ b/src/share/vm/asm/assembler.hpp	Thu Dec 20 20:11:38 2018 +0000
     5.3 @@ -1,5 +1,5 @@
     5.4  /*
     5.5 - * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
     5.6 + * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
     5.7   * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     5.8   *
     5.9   * This code is free software; you can redistribute it and/or modify it
    5.10 @@ -169,6 +169,14 @@
    5.11    Label() {
    5.12      init();
    5.13    }
    5.14 +
    5.15 +  ~Label() {
    5.16 +    assert(is_bound() || is_unused(), "Label was never bound to a location, but it was used as a jmp target");
    5.17 +  }
    5.18 +
    5.19 +  void reset() {
    5.20 +    init(); //leave _patch_overflow because it points to CodeBuffer.
    5.21 +  }
    5.22  };
    5.23  
    5.24  // A union type for code which has to assemble both constant and
     6.1 --- a/src/share/vm/c1/c1_LIRAssembler.cpp	Thu Dec 06 13:26:54 2018 -0500
     6.2 +++ b/src/share/vm/c1/c1_LIRAssembler.cpp	Thu Dec 20 20:11:38 2018 +0000
     6.3 @@ -1,5 +1,5 @@
     6.4  /*
     6.5 - * Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
     6.6 + * Copyright (c) 2000, 2018, Oracle and/or its affiliates. All rights reserved.
     6.7   * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     6.8   *
     6.9   * This code is free software; you can redistribute it and/or modify it
    6.10 @@ -128,6 +128,9 @@
    6.11  
    6.12  
    6.13  LIR_Assembler::~LIR_Assembler() {
    6.14 +  // The unwind handler label may be unbound if this destructor is invoked because of a bail-out.
    6.15 +  // Reset it here to avoid an assertion.
    6.16 +  _unwind_handler_entry.reset();
    6.17  }
    6.18  
    6.19  

mercurial