src/os_cpu/bsd_x86/vm/atomic_bsd_x86.inline.hpp

Fri, 08 Feb 2013 12:48:24 +0100

author
sla
date
Fri, 08 Feb 2013 12:48:24 +0100
changeset 4564
758935f7c23f
parent 3156
f08d439fab8c
child 4675
63e54c37ac64
permissions
-rw-r--r--

8006423: SA: NullPointerException in sun.jvm.hotspot.debugger.bsd.BsdThread.getContext(BsdThread.java:67)
Summary: Do not rely on mach thread port names to identify threads from SA
Reviewed-by: dholmes, minqi, rbackman

     1 /*
     2  * Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #ifndef OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_INLINE_HPP
    26 #define OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_INLINE_HPP
    28 #include "orderAccess_bsd_x86.inline.hpp"
    29 #include "runtime/atomic.hpp"
    30 #include "runtime/os.hpp"
    31 #include "vm_version_x86.hpp"
    33 // Implementation of class atomic
    35 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
    36 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
    37 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
    38 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
    39 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
    41 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
    42 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
    43 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
    44 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
    45 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
    48 // Adding a lock prefix to an instruction on MP machine
    49 #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: "
    51 inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {
    52   jint addend = add_value;
    53   int mp = os::is_MP();
    54   __asm__ volatile (  LOCK_IF_MP(%3) "xaddl %0,(%2)"
    55                     : "=r" (addend)
    56                     : "0" (addend), "r" (dest), "r" (mp)
    57                     : "cc", "memory");
    58   return addend + add_value;
    59 }
    61 inline void Atomic::inc    (volatile jint*     dest) {
    62   int mp = os::is_MP();
    63   __asm__ volatile (LOCK_IF_MP(%1) "addl $1,(%0)" :
    64                     : "r" (dest), "r" (mp) : "cc", "memory");
    65 }
    67 inline void Atomic::inc_ptr(volatile void*     dest) {
    68   inc_ptr((volatile intptr_t*)dest);
    69 }
    71 inline void Atomic::dec    (volatile jint*     dest) {
    72   int mp = os::is_MP();
    73   __asm__ volatile (LOCK_IF_MP(%1) "subl $1,(%0)" :
    74                     : "r" (dest), "r" (mp) : "cc", "memory");
    75 }
    77 inline void Atomic::dec_ptr(volatile void*     dest) {
    78   dec_ptr((volatile intptr_t*)dest);
    79 }
    81 inline jint     Atomic::xchg    (jint     exchange_value, volatile jint*     dest) {
    82   __asm__ volatile (  "xchgl (%2),%0"
    83                     : "=r" (exchange_value)
    84                     : "0" (exchange_value), "r" (dest)
    85                     : "memory");
    86   return exchange_value;
    87 }
    89 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
    90   return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
    91 }
    94 inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value) {
    95   int mp = os::is_MP();
    96   __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
    97                     : "=a" (exchange_value)
    98                     : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
    99                     : "cc", "memory");
   100   return exchange_value;
   101 }
   103 #ifdef AMD64
   104 inline void Atomic::store    (jlong    store_value, jlong*    dest) { *dest = store_value; }
   105 inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
   107 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
   108   intptr_t addend = add_value;
   109   bool mp = os::is_MP();
   110   __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
   111                         : "=r" (addend)
   112                         : "0" (addend), "r" (dest), "r" (mp)
   113                         : "cc", "memory");
   114   return addend + add_value;
   115 }
   117 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
   118   return (void*)add_ptr(add_value, (volatile intptr_t*)dest);
   119 }
   121 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
   122   bool mp = os::is_MP();
   123   __asm__ __volatile__ (LOCK_IF_MP(%1) "addq $1,(%0)"
   124                         :
   125                         : "r" (dest), "r" (mp)
   126                         : "cc", "memory");
   127 }
   129 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
   130   bool mp = os::is_MP();
   131   __asm__ __volatile__ (LOCK_IF_MP(%1) "subq $1,(%0)"
   132                         :
   133                         : "r" (dest), "r" (mp)
   134                         : "cc", "memory");
   135 }
   137 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
   138   __asm__ __volatile__ ("xchgq (%2),%0"
   139                         : "=r" (exchange_value)
   140                         : "0" (exchange_value), "r" (dest)
   141                         : "memory");
   142   return exchange_value;
   143 }
   145 inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value) {
   146   bool mp = os::is_MP();
   147   __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)"
   148                         : "=a" (exchange_value)
   149                         : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
   150                         : "cc", "memory");
   151   return exchange_value;
   152 }
   154 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
   155   return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
   156 }
   158 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
   159   return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
   160 }
   162 inline jlong Atomic::load(volatile jlong* src) { return *src; }
   164 #else // !AMD64
   166 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
   167   return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest);
   168 }
   170 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
   171   return (void*)Atomic::add((jint)add_value, (volatile jint*)dest);
   172 }
   175 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
   176   inc((volatile jint*)dest);
   177 }
   179 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
   180   dec((volatile jint*)dest);
   181 }
   183 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
   184   return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
   185 }
   187 extern "C" {
   188   // defined in bsd_x86.s
   189   jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool);
   190   void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
   191 }
   193 inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value) {
   194   return _Atomic_cmpxchg_long(exchange_value, dest, compare_value, os::is_MP());
   195 }
   197 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
   198   return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
   199 }
   201 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
   202   return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
   203 }
   205 inline jlong Atomic::load(volatile jlong* src) {
   206   volatile jlong dest;
   207   _Atomic_move_long(src, &dest);
   208   return dest;
   209 }
   211 inline void Atomic::store(jlong store_value, jlong* dest) {
   212   _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
   213 }
   215 inline void Atomic::store(jlong store_value, volatile jlong* dest) {
   216   _Atomic_move_long((volatile jlong*)&store_value, dest);
   217 }
   219 #endif // AMD64
   221 #endif // OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_INLINE_HPP

mercurial