src/os_cpu/linux_x86/vm/atomic_linux_x86.inline.hpp

Wed, 25 Sep 2013 13:58:13 +0200

author
dsimms
date
Wed, 25 Sep 2013 13:58:13 +0200
changeset 5781
899ecf76b570
parent 4675
63e54c37ac64
child 6876
710a3c8b516e
permissions
-rw-r--r--

8023956: Provide a work-around to broken Linux 32 bit "Exec Shield" using CS for NX emulation (crashing with SI_KERNEL)
Summary: Execute some code at a high virtual address value, and keep mapped
Reviewed-by: coleenp, zgu

     1 /*
     2  * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_INLINE_HPP
    26 #define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_INLINE_HPP
    28 #include "runtime/atomic.hpp"
    29 #include "runtime/os.hpp"
    30 #include "vm_version_x86.hpp"
    32 // Implementation of class atomic
    34 inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
    35 inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
    36 inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
    37 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
    38 inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
    40 inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
    41 inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
    42 inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
    43 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
    44 inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
    47 // Adding a lock prefix to an instruction on MP machine
    48 #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: "
    50 inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {
    51   jint addend = add_value;
    52   int mp = os::is_MP();
    53   __asm__ volatile (  LOCK_IF_MP(%3) "xaddl %0,(%2)"
    54                     : "=r" (addend)
    55                     : "0" (addend), "r" (dest), "r" (mp)
    56                     : "cc", "memory");
    57   return addend + add_value;
    58 }
    60 inline void Atomic::inc    (volatile jint*     dest) {
    61   int mp = os::is_MP();
    62   __asm__ volatile (LOCK_IF_MP(%1) "addl $1,(%0)" :
    63                     : "r" (dest), "r" (mp) : "cc", "memory");
    64 }
    66 inline void Atomic::inc_ptr(volatile void*     dest) {
    67   inc_ptr((volatile intptr_t*)dest);
    68 }
    70 inline void Atomic::dec    (volatile jint*     dest) {
    71   int mp = os::is_MP();
    72   __asm__ volatile (LOCK_IF_MP(%1) "subl $1,(%0)" :
    73                     : "r" (dest), "r" (mp) : "cc", "memory");
    74 }
    76 inline void Atomic::dec_ptr(volatile void*     dest) {
    77   dec_ptr((volatile intptr_t*)dest);
    78 }
    80 inline jint     Atomic::xchg    (jint     exchange_value, volatile jint*     dest) {
    81   __asm__ volatile (  "xchgl (%2),%0"
    82                     : "=r" (exchange_value)
    83                     : "0" (exchange_value), "r" (dest)
    84                     : "memory");
    85   return exchange_value;
    86 }
    88 inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
    89   return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
    90 }
    93 inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value) {
    94   int mp = os::is_MP();
    95   __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
    96                     : "=a" (exchange_value)
    97                     : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
    98                     : "cc", "memory");
    99   return exchange_value;
   100 }
   102 #ifdef AMD64
   103 inline void Atomic::store    (jlong    store_value, jlong*    dest) { *dest = store_value; }
   104 inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
   106 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
   107   intptr_t addend = add_value;
   108   bool mp = os::is_MP();
   109   __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
   110                         : "=r" (addend)
   111                         : "0" (addend), "r" (dest), "r" (mp)
   112                         : "cc", "memory");
   113   return addend + add_value;
   114 }
   116 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
   117   return (void*)add_ptr(add_value, (volatile intptr_t*)dest);
   118 }
   120 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
   121   bool mp = os::is_MP();
   122   __asm__ __volatile__ (LOCK_IF_MP(%1) "addq $1,(%0)"
   123                         :
   124                         : "r" (dest), "r" (mp)
   125                         : "cc", "memory");
   126 }
   128 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
   129   bool mp = os::is_MP();
   130   __asm__ __volatile__ (LOCK_IF_MP(%1) "subq $1,(%0)"
   131                         :
   132                         : "r" (dest), "r" (mp)
   133                         : "cc", "memory");
   134 }
   136 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
   137   __asm__ __volatile__ ("xchgq (%2),%0"
   138                         : "=r" (exchange_value)
   139                         : "0" (exchange_value), "r" (dest)
   140                         : "memory");
   141   return exchange_value;
   142 }
   144 inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value) {
   145   bool mp = os::is_MP();
   146   __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)"
   147                         : "=a" (exchange_value)
   148                         : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
   149                         : "cc", "memory");
   150   return exchange_value;
   151 }
   153 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
   154   return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
   155 }
   157 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
   158   return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
   159 }
   161 inline jlong Atomic::load(volatile jlong* src) { return *src; }
   163 #else // !AMD64
   165 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
   166   return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest);
   167 }
   169 inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
   170   return (void*)Atomic::add((jint)add_value, (volatile jint*)dest);
   171 }
   174 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
   175   inc((volatile jint*)dest);
   176 }
   178 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
   179   dec((volatile jint*)dest);
   180 }
   182 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
   183   return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
   184 }
   186 extern "C" {
   187   // defined in linux_x86.s
   188   jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool);
   189   void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
   190 }
   192 inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value) {
   193   return _Atomic_cmpxchg_long(exchange_value, dest, compare_value, os::is_MP());
   194 }
   196 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
   197   return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
   198 }
   200 inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value) {
   201   return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
   202 }
   204 inline jlong Atomic::load(volatile jlong* src) {
   205   volatile jlong dest;
   206   _Atomic_move_long(src, &dest);
   207   return dest;
   208 }
   210 inline void Atomic::store(jlong store_value, jlong* dest) {
   211   _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
   212 }
   214 inline void Atomic::store(jlong store_value, volatile jlong* dest) {
   215   _Atomic_move_long((volatile jlong*)&store_value, dest);
   216 }
   218 #endif // AMD64
   220 #endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_INLINE_HPP

mercurial