Wed, 25 Sep 2013 13:58:13 +0200
8023956: Provide a work-around to broken Linux 32 bit "Exec Shield" using CS for NX emulation (crashing with SI_KERNEL)
Summary: Execute some code at a high virtual address value, and keep mapped
Reviewed-by: coleenp, zgu
duke@435 | 1 | /* |
simonis@4675 | 2 | * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. |
duke@435 | 3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
duke@435 | 4 | * |
duke@435 | 5 | * This code is free software; you can redistribute it and/or modify it |
duke@435 | 6 | * under the terms of the GNU General Public License version 2 only, as |
duke@435 | 7 | * published by the Free Software Foundation. |
duke@435 | 8 | * |
duke@435 | 9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
duke@435 | 10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
duke@435 | 11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
duke@435 | 12 | * version 2 for more details (a copy is included in the LICENSE file that |
duke@435 | 13 | * accompanied this code). |
duke@435 | 14 | * |
duke@435 | 15 | * You should have received a copy of the GNU General Public License version |
duke@435 | 16 | * 2 along with this work; if not, write to the Free Software Foundation, |
duke@435 | 17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
duke@435 | 18 | * |
trims@1907 | 19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
trims@1907 | 20 | * or visit www.oracle.com if you need additional information or have any |
trims@1907 | 21 | * questions. |
duke@435 | 22 | * |
duke@435 | 23 | */ |
duke@435 | 24 | |
stefank@2314 | 25 | #ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_INLINE_HPP |
stefank@2314 | 26 | #define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_INLINE_HPP |
stefank@2314 | 27 | |
stefank@2314 | 28 | #include "runtime/atomic.hpp" |
stefank@2314 | 29 | #include "runtime/os.hpp" |
stefank@2314 | 30 | #include "vm_version_x86.hpp" |
stefank@2314 | 31 | |
duke@435 | 32 | // Implementation of class atomic |
duke@435 | 33 | |
duke@435 | 34 | inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; } |
duke@435 | 35 | inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; } |
duke@435 | 36 | inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; } |
duke@435 | 37 | inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; } |
duke@435 | 38 | inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; } |
duke@435 | 39 | |
duke@435 | 40 | inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; } |
duke@435 | 41 | inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; } |
duke@435 | 42 | inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; } |
duke@435 | 43 | inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; } |
duke@435 | 44 | inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; } |
duke@435 | 45 | |
duke@435 | 46 | |
duke@435 | 47 | // Adding a lock prefix to an instruction on MP machine |
duke@435 | 48 | #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: " |
duke@435 | 49 | |
duke@435 | 50 | inline jint Atomic::add (jint add_value, volatile jint* dest) { |
duke@435 | 51 | jint addend = add_value; |
duke@435 | 52 | int mp = os::is_MP(); |
duke@435 | 53 | __asm__ volatile ( LOCK_IF_MP(%3) "xaddl %0,(%2)" |
duke@435 | 54 | : "=r" (addend) |
duke@435 | 55 | : "0" (addend), "r" (dest), "r" (mp) |
duke@435 | 56 | : "cc", "memory"); |
duke@435 | 57 | return addend + add_value; |
duke@435 | 58 | } |
duke@435 | 59 | |
duke@435 | 60 | inline void Atomic::inc (volatile jint* dest) { |
duke@435 | 61 | int mp = os::is_MP(); |
duke@435 | 62 | __asm__ volatile (LOCK_IF_MP(%1) "addl $1,(%0)" : |
duke@435 | 63 | : "r" (dest), "r" (mp) : "cc", "memory"); |
duke@435 | 64 | } |
duke@435 | 65 | |
duke@435 | 66 | inline void Atomic::inc_ptr(volatile void* dest) { |
duke@435 | 67 | inc_ptr((volatile intptr_t*)dest); |
duke@435 | 68 | } |
duke@435 | 69 | |
duke@435 | 70 | inline void Atomic::dec (volatile jint* dest) { |
duke@435 | 71 | int mp = os::is_MP(); |
duke@435 | 72 | __asm__ volatile (LOCK_IF_MP(%1) "subl $1,(%0)" : |
duke@435 | 73 | : "r" (dest), "r" (mp) : "cc", "memory"); |
duke@435 | 74 | } |
duke@435 | 75 | |
duke@435 | 76 | inline void Atomic::dec_ptr(volatile void* dest) { |
duke@435 | 77 | dec_ptr((volatile intptr_t*)dest); |
duke@435 | 78 | } |
duke@435 | 79 | |
duke@435 | 80 | inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) { |
duke@435 | 81 | __asm__ volatile ( "xchgl (%2),%0" |
duke@435 | 82 | : "=r" (exchange_value) |
duke@435 | 83 | : "0" (exchange_value), "r" (dest) |
duke@435 | 84 | : "memory"); |
duke@435 | 85 | return exchange_value; |
duke@435 | 86 | } |
duke@435 | 87 | |
duke@435 | 88 | inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) { |
duke@435 | 89 | return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest); |
duke@435 | 90 | } |
duke@435 | 91 | |
duke@435 | 92 | |
duke@435 | 93 | inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) { |
duke@435 | 94 | int mp = os::is_MP(); |
duke@435 | 95 | __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)" |
duke@435 | 96 | : "=a" (exchange_value) |
duke@435 | 97 | : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) |
duke@435 | 98 | : "cc", "memory"); |
duke@435 | 99 | return exchange_value; |
duke@435 | 100 | } |
duke@435 | 101 | |
duke@435 | 102 | #ifdef AMD64 |
duke@435 | 103 | inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; } |
duke@435 | 104 | inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; } |
duke@435 | 105 | |
duke@435 | 106 | inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { |
duke@435 | 107 | intptr_t addend = add_value; |
duke@435 | 108 | bool mp = os::is_MP(); |
duke@435 | 109 | __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)" |
duke@435 | 110 | : "=r" (addend) |
duke@435 | 111 | : "0" (addend), "r" (dest), "r" (mp) |
duke@435 | 112 | : "cc", "memory"); |
duke@435 | 113 | return addend + add_value; |
duke@435 | 114 | } |
duke@435 | 115 | |
duke@435 | 116 | inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { |
duke@435 | 117 | return (void*)add_ptr(add_value, (volatile intptr_t*)dest); |
duke@435 | 118 | } |
duke@435 | 119 | |
duke@435 | 120 | inline void Atomic::inc_ptr(volatile intptr_t* dest) { |
duke@435 | 121 | bool mp = os::is_MP(); |
duke@435 | 122 | __asm__ __volatile__ (LOCK_IF_MP(%1) "addq $1,(%0)" |
duke@435 | 123 | : |
duke@435 | 124 | : "r" (dest), "r" (mp) |
duke@435 | 125 | : "cc", "memory"); |
duke@435 | 126 | } |
duke@435 | 127 | |
duke@435 | 128 | inline void Atomic::dec_ptr(volatile intptr_t* dest) { |
duke@435 | 129 | bool mp = os::is_MP(); |
duke@435 | 130 | __asm__ __volatile__ (LOCK_IF_MP(%1) "subq $1,(%0)" |
duke@435 | 131 | : |
duke@435 | 132 | : "r" (dest), "r" (mp) |
duke@435 | 133 | : "cc", "memory"); |
duke@435 | 134 | } |
duke@435 | 135 | |
duke@435 | 136 | inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { |
duke@435 | 137 | __asm__ __volatile__ ("xchgq (%2),%0" |
duke@435 | 138 | : "=r" (exchange_value) |
duke@435 | 139 | : "0" (exchange_value), "r" (dest) |
duke@435 | 140 | : "memory"); |
duke@435 | 141 | return exchange_value; |
duke@435 | 142 | } |
duke@435 | 143 | |
duke@435 | 144 | inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) { |
duke@435 | 145 | bool mp = os::is_MP(); |
duke@435 | 146 | __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)" |
duke@435 | 147 | : "=a" (exchange_value) |
duke@435 | 148 | : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp) |
duke@435 | 149 | : "cc", "memory"); |
duke@435 | 150 | return exchange_value; |
duke@435 | 151 | } |
duke@435 | 152 | |
duke@435 | 153 | inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) { |
duke@435 | 154 | return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value); |
duke@435 | 155 | } |
duke@435 | 156 | |
duke@435 | 157 | inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) { |
duke@435 | 158 | return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value); |
duke@435 | 159 | } |
duke@435 | 160 | |
kvn@2434 | 161 | inline jlong Atomic::load(volatile jlong* src) { return *src; } |
kvn@2434 | 162 | |
kvn@2434 | 163 | #else // !AMD64 |
duke@435 | 164 | |
duke@435 | 165 | inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) { |
duke@435 | 166 | return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest); |
duke@435 | 167 | } |
duke@435 | 168 | |
duke@435 | 169 | inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) { |
duke@435 | 170 | return (void*)Atomic::add((jint)add_value, (volatile jint*)dest); |
duke@435 | 171 | } |
duke@435 | 172 | |
duke@435 | 173 | |
duke@435 | 174 | inline void Atomic::inc_ptr(volatile intptr_t* dest) { |
duke@435 | 175 | inc((volatile jint*)dest); |
duke@435 | 176 | } |
duke@435 | 177 | |
duke@435 | 178 | inline void Atomic::dec_ptr(volatile intptr_t* dest) { |
duke@435 | 179 | dec((volatile jint*)dest); |
duke@435 | 180 | } |
duke@435 | 181 | |
duke@435 | 182 | inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) { |
duke@435 | 183 | return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest); |
duke@435 | 184 | } |
duke@435 | 185 | |
kvn@2434 | 186 | extern "C" { |
kvn@2434 | 187 | // defined in linux_x86.s |
kvn@2434 | 188 | jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool); |
kvn@2434 | 189 | void _Atomic_move_long(volatile jlong* src, volatile jlong* dst); |
kvn@2434 | 190 | } |
kvn@2434 | 191 | |
duke@435 | 192 | inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) { |
duke@435 | 193 | return _Atomic_cmpxchg_long(exchange_value, dest, compare_value, os::is_MP()); |
duke@435 | 194 | } |
duke@435 | 195 | |
duke@435 | 196 | inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) { |
duke@435 | 197 | return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value); |
duke@435 | 198 | } |
duke@435 | 199 | |
duke@435 | 200 | inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) { |
duke@435 | 201 | return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value); |
duke@435 | 202 | } |
kvn@2434 | 203 | |
kvn@2434 | 204 | inline jlong Atomic::load(volatile jlong* src) { |
kvn@2434 | 205 | volatile jlong dest; |
kvn@2434 | 206 | _Atomic_move_long(src, &dest); |
kvn@2434 | 207 | return dest; |
kvn@2434 | 208 | } |
kvn@2434 | 209 | |
kvn@2434 | 210 | inline void Atomic::store(jlong store_value, jlong* dest) { |
kvn@2434 | 211 | _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest); |
kvn@2434 | 212 | } |
kvn@2434 | 213 | |
kvn@2434 | 214 | inline void Atomic::store(jlong store_value, volatile jlong* dest) { |
kvn@2434 | 215 | _Atomic_move_long((volatile jlong*)&store_value, dest); |
kvn@2434 | 216 | } |
kvn@2434 | 217 | |
duke@435 | 218 | #endif // AMD64 |
stefank@2314 | 219 | |
stefank@2314 | 220 | #endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_INLINE_HPP |