src/share/vm/memory/allocation.inline.hpp

Mon, 23 Mar 2020 17:57:13 +0000

author
poonam
date
Mon, 23 Mar 2020 17:57:13 +0000
changeset 9965
c39172598323
parent 7806
ed0067c67bd7
child 7994
04ff2f6cd0eb
permissions
-rw-r--r--

8231779: crash HeapWord*ParallelScavengeHeap::failed_mem_allocate
Reviewed-by: dlong, tschatzl, pliden

duke@435 1 /*
zgu@7074 2 * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #ifndef SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
stefank@2314 26 #define SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP
stefank@2314 27
simonis@4675 28 #include "runtime/atomic.inline.hpp"
stefank@2314 29 #include "runtime/os.hpp"
zgu@7074 30 #include "services/memTracker.hpp"
stefank@2314 31
duke@435 32 // Explicit C-heap memory management
duke@435 33
duke@435 34 void trace_heap_malloc(size_t size, const char* name, void *p);
duke@435 35 void trace_heap_free(void *p);
duke@435 36
kvn@2562 37 #ifndef PRODUCT
kvn@2557 38 // Increments unsigned long value for statistics (not atomic on MP).
kvn@2557 39 inline void inc_stat_counter(volatile julong* dest, julong add_value) {
kvn@2562 40 #if defined(SPARC) || defined(X86)
kvn@2562 41 // Sparc and X86 have atomic jlong (8 bytes) instructions
kvn@2557 42 julong value = Atomic::load((volatile jlong*)dest);
kvn@2557 43 value += add_value;
kvn@2557 44 Atomic::store((jlong)value, (volatile jlong*)dest);
kvn@2562 45 #else
kvn@2562 46 // possible word-tearing during load/store
kvn@2562 47 *dest += add_value;
kvn@2562 48 #endif
kvn@2557 49 }
kvn@2562 50 #endif
duke@435 51
duke@435 52 // allocate using malloc; will fail if no memory available
zgu@7074 53 inline char* AllocateHeap(size_t size, MEMFLAGS flags,
zgu@7074 54 const NativeCallStack& stack,
nloodin@4183 55 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
zgu@7074 56 char* p = (char*) os::malloc(size, flags, stack);
duke@435 57 #ifdef ASSERT
zgu@3900 58 if (PrintMallocFree) trace_heap_malloc(size, "AllocateHeap", p);
duke@435 59 #endif
ccheung@4993 60 if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
ccheung@4993 61 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "AllocateHeap");
ccheung@4993 62 }
duke@435 63 return p;
duke@435 64 }
ysuenaga@7806 65
ysuenaga@7806 66 #ifdef __GNUC__
ysuenaga@7806 67 __attribute__((always_inline))
ysuenaga@7806 68 #endif
zgu@7074 69 inline char* AllocateHeap(size_t size, MEMFLAGS flags,
zgu@7074 70 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
zgu@7074 71 return AllocateHeap(size, flags, CURRENT_PC, alloc_failmode);
zgu@7074 72 }
duke@435 73
ysuenaga@7806 74 #ifdef __GNUC__
ysuenaga@7806 75 __attribute__((always_inline))
ysuenaga@7806 76 #endif
zgu@7074 77 inline char* ReallocateHeap(char *old, size_t size, MEMFLAGS flag,
nloodin@4183 78 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
zgu@7074 79 char* p = (char*) os::realloc(old, size, flag, CURRENT_PC);
duke@435 80 #ifdef ASSERT
zgu@3900 81 if (PrintMallocFree) trace_heap_malloc(size, "ReallocateHeap", p);
duke@435 82 #endif
ccheung@4993 83 if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
ccheung@4993 84 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "ReallocateHeap");
ccheung@4993 85 }
duke@435 86 return p;
duke@435 87 }
duke@435 88
zgu@3900 89 inline void FreeHeap(void* p, MEMFLAGS memflags = mtInternal) {
duke@435 90 #ifdef ASSERT
duke@435 91 if (PrintMallocFree) trace_heap_free(p);
duke@435 92 #endif
zgu@3900 93 os::free(p, memflags);
duke@435 94 }
stefank@2314 95
zgu@3900 96
zgu@3900 97 template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size,
zgu@7074 98 const NativeCallStack& stack) throw() {
zgu@7074 99 void* p = (void*)AllocateHeap(size, F, stack);
zgu@7074 100 #ifdef ASSERT
zgu@7074 101 if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
zgu@7074 102 #endif
zgu@7074 103 return p;
zgu@7074 104 }
zgu@7074 105
zgu@7074 106 template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size) throw() {
zgu@7074 107 return CHeapObj<F>::operator new(size, CALLER_PC);
zgu@7074 108 }
zgu@7074 109
zgu@7074 110 template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
zgu@7074 111 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() {
zgu@7074 112 void* p = (void*)AllocateHeap(size, F, stack,
zgu@7074 113 AllocFailStrategy::RETURN_NULL);
dcubed@4967 114 #ifdef ASSERT
zgu@3900 115 if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p);
minqi@5103 116 #endif
dcubed@4967 117 return p;
zgu@3900 118 }
zgu@3900 119
zgu@3900 120 template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size,
zgu@7074 121 const std::nothrow_t& nothrow_constant) throw() {
zgu@7074 122 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
minqi@5103 123 }
minqi@5103 124
minqi@5103 125 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
zgu@7074 126 const NativeCallStack& stack) throw() {
zgu@7074 127 return CHeapObj<F>::operator new(size, stack);
zgu@7074 128 }
zgu@7074 129
zgu@7074 130 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size)
zgu@7074 131 throw() {
zgu@7074 132 return CHeapObj<F>::operator new(size, CALLER_PC);
minqi@5103 133 }
minqi@5103 134
minqi@5103 135 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
zgu@7074 136 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() {
zgu@7074 137 return CHeapObj<F>::operator new(size, nothrow_constant, stack);
zgu@7074 138 }
zgu@7074 139
zgu@7074 140 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size,
zgu@7074 141 const std::nothrow_t& nothrow_constant) throw() {
zgu@7074 142 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC);
zgu@3900 143 }
zgu@3900 144
zgu@3900 145 template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){
minqi@5103 146 FreeHeap(p, F);
minqi@5103 147 }
minqi@5103 148
minqi@5103 149 template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){
minqi@5103 150 FreeHeap(p, F);
zgu@3900 151 }
zgu@3900 152
brutisso@4901 153 template <class E, MEMFLAGS F>
brutisso@4901 154 E* ArrayAllocator<E, F>::allocate(size_t length) {
brutisso@4901 155 assert(_addr == NULL, "Already in use");
brutisso@4901 156
brutisso@4901 157 _size = sizeof(E) * length;
brutisso@4901 158 _use_malloc = _size < ArrayAllocatorMallocLimit;
brutisso@4901 159
brutisso@4901 160 if (_use_malloc) {
brutisso@4901 161 _addr = AllocateHeap(_size, F);
brutisso@4901 162 if (_addr == NULL && _size >= (size_t)os::vm_allocation_granularity()) {
brutisso@4901 163 // malloc failed let's try with mmap instead
brutisso@4901 164 _use_malloc = false;
brutisso@4901 165 } else {
brutisso@4901 166 return (E*)_addr;
brutisso@4901 167 }
brutisso@4901 168 }
brutisso@4901 169
brutisso@4901 170 int alignment = os::vm_allocation_granularity();
brutisso@4901 171 _size = align_size_up(_size, alignment);
brutisso@4901 172
zgu@5053 173 _addr = os::reserve_memory(_size, NULL, alignment, F);
brutisso@4901 174 if (_addr == NULL) {
ccheung@4993 175 vm_exit_out_of_memory(_size, OOM_MMAP_ERROR, "Allocator (reserve)");
brutisso@4901 176 }
brutisso@4901 177
dcubed@5255 178 os::commit_memory_or_exit(_addr, _size, !ExecMem, "Allocator (commit)");
brutisso@4901 179
brutisso@4901 180 return (E*)_addr;
brutisso@4901 181 }
brutisso@4901 182
brutisso@4901 183 template<class E, MEMFLAGS F>
brutisso@4901 184 void ArrayAllocator<E, F>::free() {
brutisso@4901 185 if (_addr != NULL) {
brutisso@4901 186 if (_use_malloc) {
brutisso@4901 187 FreeHeap(_addr, F);
brutisso@4901 188 } else {
brutisso@4901 189 os::release_memory(_addr, _size);
brutisso@4901 190 }
brutisso@4901 191 _addr = NULL;
brutisso@4901 192 }
brutisso@4901 193 }
zgu@3900 194
stefank@2314 195 #endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP

mercurial