src/share/vm/jfr/recorder/storage/jfrMemorySpace.inline.hpp

Wed, 17 Jun 2020 11:43:05 +0300

author
apetushkov
date
Wed, 17 Jun 2020 11:43:05 +0300
changeset 9928
d2c2cd90513e
parent 9858
b985cbb00e68
permissions
-rw-r--r--

8220293: Deadlock in JFR string pool
Reviewed-by: rehn, egahlin

apetushkov@9858 1 /*
apetushkov@9858 2 * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
apetushkov@9858 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
apetushkov@9858 4 *
apetushkov@9858 5 * This code is free software; you can redistribute it and/or modify it
apetushkov@9858 6 * under the terms of the GNU General Public License version 2 only, as
apetushkov@9858 7 * published by the Free Software Foundation.
apetushkov@9858 8 *
apetushkov@9858 9 * This code is distributed in the hope that it will be useful, but WITHOUT
apetushkov@9858 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
apetushkov@9858 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
apetushkov@9858 12 * version 2 for more details (a copy is included in the LICENSE file that
apetushkov@9858 13 * accompanied this code).
apetushkov@9858 14 *
apetushkov@9858 15 * You should have received a copy of the GNU General Public License version
apetushkov@9858 16 * 2 along with this work; if not, write to the Free Software Foundation,
apetushkov@9858 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
apetushkov@9858 18 *
apetushkov@9858 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
apetushkov@9858 20 * or visit www.oracle.com if you need additional information or have any
apetushkov@9858 21 * questions.
apetushkov@9858 22 *
apetushkov@9858 23 */
apetushkov@9858 24
apetushkov@9858 25 #ifndef SHARE_VM_JFR_RECORDER_STORAGE_JFRMEMORYSPACE_INLINE_HPP
apetushkov@9858 26 #define SHARE_VM_JFR_RECORDER_STORAGE_JFRMEMORYSPACE_INLINE_HPP
apetushkov@9858 27
apetushkov@9858 28 #include "jfr/recorder/storage/jfrMemorySpace.hpp"
apetushkov@9858 29
apetushkov@9858 30 template <typename T, template <typename> class RetrievalType, typename Callback>
apetushkov@9858 31 JfrMemorySpace<T, RetrievalType, Callback>::
apetushkov@9858 32 JfrMemorySpace(size_t min_elem_size, size_t limit_size, size_t cache_count, Callback* callback) :
apetushkov@9858 33 _free(),
apetushkov@9858 34 _full(),
apetushkov@9858 35 _min_elem_size(min_elem_size),
apetushkov@9858 36 _limit_size(limit_size),
apetushkov@9858 37 _cache_count(cache_count),
apetushkov@9858 38 _callback(callback) {}
apetushkov@9858 39
apetushkov@9858 40 template <typename T, template <typename> class RetrievalType, typename Callback>
apetushkov@9858 41 JfrMemorySpace<T, RetrievalType, Callback>::~JfrMemorySpace() {
apetushkov@9858 42 Iterator full_iter(_full);
apetushkov@9858 43 while (full_iter.has_next()) {
apetushkov@9858 44 Type* t = full_iter.next();
apetushkov@9858 45 _full.remove(t);
apetushkov@9858 46 deallocate(t);
apetushkov@9858 47 }
apetushkov@9858 48 Iterator free_iter(_free);
apetushkov@9858 49 while (free_iter.has_next()) {
apetushkov@9858 50 Type* t = free_iter.next();
apetushkov@9858 51 _free.remove(t);
apetushkov@9858 52 deallocate(t);
apetushkov@9858 53 }
apetushkov@9858 54 }
apetushkov@9858 55
apetushkov@9858 56 template <typename T, template <typename> class RetrievalType, typename Callback>
apetushkov@9858 57 bool JfrMemorySpace<T, RetrievalType, Callback>::initialize() {
apetushkov@9858 58 assert(_min_elem_size % os::vm_page_size() == 0, "invariant");
apetushkov@9858 59 assert(_limit_size % os::vm_page_size() == 0, "invariant");
apetushkov@9858 60 // pre-allocate cache elements
apetushkov@9858 61 for (size_t i = 0; i < _cache_count; ++i) {
apetushkov@9858 62 Type* const t = allocate(_min_elem_size);
apetushkov@9858 63 if (t == NULL) {
apetushkov@9858 64 return false;
apetushkov@9858 65 }
apetushkov@9858 66 insert_free_head(t);
apetushkov@9858 67 }
apetushkov@9858 68 assert(_free.count() == _cache_count, "invariant");
apetushkov@9858 69 return true;
apetushkov@9858 70 }
apetushkov@9858 71
apetushkov@9858 72 template <typename T, template <typename> class RetrievalType, typename Callback>
apetushkov@9858 73 inline void JfrMemorySpace<T, RetrievalType, Callback>::release_full(T* t) {
apetushkov@9858 74 assert(is_locked(), "invariant");
apetushkov@9858 75 assert(t != NULL, "invariant");
apetushkov@9858 76 assert(_full.in_list(t), "invariant");
apetushkov@9858 77 remove_full(t);
apetushkov@9858 78 assert(!_full.in_list(t), "invariant");
apetushkov@9858 79 if (t->transient()) {
apetushkov@9858 80 deallocate(t);
apetushkov@9858 81 return;
apetushkov@9858 82 }
apetushkov@9858 83 assert(t->empty(), "invariant");
apetushkov@9858 84 assert(!t->retired(), "invariant");
apetushkov@9858 85 assert(t->identity() == NULL, "invariant");
apetushkov@9858 86 if (should_populate_cache()) {
apetushkov@9858 87 assert(!_free.in_list(t), "invariant");
apetushkov@9858 88 insert_free_head(t);
apetushkov@9858 89 } else {
apetushkov@9858 90 deallocate(t);
apetushkov@9858 91 }
apetushkov@9858 92 }
apetushkov@9858 93
apetushkov@9858 94 template <typename T, template <typename> class RetrievalType, typename Callback>
apetushkov@9858 95 inline void JfrMemorySpace<T, RetrievalType, Callback>::release_free(T* t) {
apetushkov@9858 96 assert(is_locked(), "invariant");
apetushkov@9858 97 assert(t != NULL, "invariant");
apetushkov@9858 98 assert(_free.in_list(t), "invariant");
apetushkov@9858 99 if (t->transient()) {
apetushkov@9858 100 remove_free(t);
apetushkov@9858 101 assert(!_free.in_list(t), "invariant");
apetushkov@9858 102 deallocate(t);
apetushkov@9858 103 return;
apetushkov@9858 104 }
apetushkov@9858 105 assert(t->empty(), "invariant");
apetushkov@9858 106 assert(!t->retired(), "invariant");
apetushkov@9858 107 assert(t->identity() == NULL, "invariant");
apetushkov@9858 108 if (!should_populate_cache()) {
apetushkov@9858 109 remove_free(t);
apetushkov@9858 110 assert(!_free.in_list(t), "invariant");
apetushkov@9858 111 deallocate(t);
apetushkov@9858 112 }
apetushkov@9858 113 }
apetushkov@9858 114
apetushkov@9858 115 template <typename T, template <typename> class RetrievalType, typename Callback>
apetushkov@9858 116 template <typename IteratorCallback, typename IteratorType>
apetushkov@9858 117 inline void JfrMemorySpace<T, RetrievalType, Callback>
apetushkov@9858 118 ::iterate(IteratorCallback& callback, bool full, jfr_iter_direction direction) {
apetushkov@9858 119 IteratorType iterator(full ? _full : _free, direction);
apetushkov@9858 120 while (iterator.has_next()) {
apetushkov@9858 121 callback.process(iterator.next());
apetushkov@9858 122 }
apetushkov@9858 123 }
apetushkov@9858 124
apetushkov@9858 125 template <typename Mspace>
apetushkov@9858 126 inline size_t size_adjustment(size_t size, Mspace* mspace) {
apetushkov@9858 127 assert(mspace != NULL, "invariant");
apetushkov@9858 128 static const size_t min_elem_size = mspace->min_elem_size();
apetushkov@9858 129 if (size < min_elem_size) {
apetushkov@9858 130 size = min_elem_size;
apetushkov@9858 131 }
apetushkov@9858 132 return size;
apetushkov@9858 133 }
apetushkov@9858 134
apetushkov@9858 135 template <typename Mspace>
apetushkov@9858 136 inline typename Mspace::Type* mspace_allocate(size_t size, Mspace* mspace) {
apetushkov@9858 137 return mspace->allocate(size_adjustment(size, mspace));
apetushkov@9858 138 }
apetushkov@9858 139
apetushkov@9858 140 template <typename Mspace>
apetushkov@9858 141 inline typename Mspace::Type* mspace_allocate_acquired(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 142 typename Mspace::Type* const t = mspace_allocate(size, mspace);
apetushkov@9858 143 if (t == NULL) return NULL;
apetushkov@9858 144 t->acquire(thread);
apetushkov@9858 145 return t;
apetushkov@9858 146 }
apetushkov@9858 147
apetushkov@9858 148 template <typename Mspace>
apetushkov@9858 149 inline typename Mspace::Type* mspace_allocate_transient(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 150 typename Mspace::Type* const t = mspace_allocate_acquired(size, mspace, thread);
apetushkov@9858 151 if (t == NULL) return NULL;
apetushkov@9858 152 assert(t->acquired_by_self(), "invariant");
apetushkov@9858 153 t->set_transient();
apetushkov@9858 154 return t;
apetushkov@9858 155 }
apetushkov@9858 156
apetushkov@9858 157 template <typename Mspace>
apetushkov@9858 158 inline typename Mspace::Type* mspace_allocate_transient_lease(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 159 typename Mspace::Type* const t = mspace_allocate_transient(size, mspace, thread);
apetushkov@9858 160 if (t == NULL) return NULL;
apetushkov@9858 161 assert(t->acquired_by_self(), "invariant");
apetushkov@9858 162 assert(t->transient(), "invaiant");
apetushkov@9858 163 t->set_lease();
apetushkov@9858 164 return t;
apetushkov@9858 165 }
apetushkov@9858 166
apetushkov@9858 167 template <typename Mspace>
apetushkov@9858 168 inline typename Mspace::Type* mspace_allocate_to_full(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 169 assert(mspace->is_locked(), "invariant");
apetushkov@9858 170 typename Mspace::Type* const t = mspace_allocate_acquired(size, mspace, thread);
apetushkov@9858 171 if (t == NULL) return NULL;
apetushkov@9858 172 mspace->insert_full_head(t);
apetushkov@9858 173 return t;
apetushkov@9858 174 }
apetushkov@9858 175
apetushkov@9858 176 template <typename Mspace>
apetushkov@9858 177 inline typename Mspace::Type* mspace_allocate_transient_to_full(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 178 typename Mspace::Type* const t = mspace_allocate_transient(size, mspace, thread);
apetushkov@9858 179 if (t == NULL) return NULL;
apetushkov@9858 180 MspaceLock<Mspace> lock(mspace);
apetushkov@9858 181 mspace->insert_full_head(t);
apetushkov@9858 182 return t;
apetushkov@9858 183 }
apetushkov@9858 184
apetushkov@9858 185 template <typename Mspace>
apetushkov@9858 186 inline typename Mspace::Type* mspace_allocate_transient_lease_to_full(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 187 typename Mspace::Type* const t = mspace_allocate_transient_lease(size, mspace, thread);
apetushkov@9858 188 if (t == NULL) return NULL;
apetushkov@9858 189 assert(t->acquired_by_self(), "invariant");
apetushkov@9858 190 assert(t->transient(), "invaiant");
apetushkov@9858 191 assert(t->lease(), "invariant");
apetushkov@9858 192 MspaceLock<Mspace> lock(mspace);
apetushkov@9858 193 mspace->insert_full_head(t);
apetushkov@9858 194 return t;
apetushkov@9858 195 }
apetushkov@9858 196
apetushkov@9858 197 template <typename Mspace>
apetushkov@9858 198 inline typename Mspace::Type* mspace_allocate_transient_lease_to_free(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 199 typename Mspace::Type* const t = mspace_allocate_transient_lease(size, mspace, thread);
apetushkov@9858 200 if (t == NULL) return NULL;
apetushkov@9858 201 assert(t->acquired_by_self(), "invariant");
apetushkov@9858 202 assert(t->transient(), "invaiant");
apetushkov@9858 203 assert(t->lease(), "invariant");
apetushkov@9858 204 MspaceLock<Mspace> lock(mspace);
apetushkov@9858 205 mspace->insert_free_head(t);
apetushkov@9858 206 return t;
apetushkov@9858 207 }
apetushkov@9858 208
apetushkov@9858 209 template <typename Mspace>
apetushkov@9858 210 inline typename Mspace::Type* mspace_get_free(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 211 return mspace->get(size, thread);
apetushkov@9858 212 }
apetushkov@9858 213
apetushkov@9858 214 template <typename Mspace>
apetushkov@9858 215 inline typename Mspace::Type* mspace_get_free_with_retry(size_t size, Mspace* mspace, size_t retry_count, Thread* thread) {
apetushkov@9858 216 assert(size <= mspace->min_elem_size(), "invariant");
apetushkov@9858 217 for (size_t i = 0; i < retry_count; ++i) {
apetushkov@9858 218 typename Mspace::Type* const t = mspace_get_free(size, mspace, thread);
apetushkov@9858 219 if (t != NULL) {
apetushkov@9858 220 return t;
apetushkov@9858 221 }
apetushkov@9858 222 }
apetushkov@9858 223 return NULL;
apetushkov@9858 224 }
apetushkov@9858 225
apetushkov@9858 226 template <typename Mspace>
apetushkov@9858 227 inline typename Mspace::Type* mspace_get_free_with_detach(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 228 typename Mspace::Type* t = mspace_get_free(size, mspace, thread);
apetushkov@9858 229 if (t != NULL) {
apetushkov@9858 230 mspace->remove_free(t);
apetushkov@9858 231 }
apetushkov@9858 232 return t;
apetushkov@9858 233 }
apetushkov@9858 234
apetushkov@9858 235 template <typename Mspace>
apetushkov@9858 236 inline typename Mspace::Type* mspace_get_free_to_full(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 237 assert(size <= mspace->min_elem_size(), "invariant");
apetushkov@9858 238 assert(mspace->is_locked(), "invariant");
apetushkov@9858 239 typename Mspace::Type* t = mspace_get_free(size, mspace, thread);
apetushkov@9858 240 if (t == NULL) {
apetushkov@9858 241 return NULL;
apetushkov@9858 242 }
apetushkov@9858 243 assert(t->acquired_by_self(), "invariant");
apetushkov@9858 244 move_to_head(t, mspace->free(), mspace->full());
apetushkov@9858 245 return t;
apetushkov@9858 246 }
apetushkov@9858 247
apetushkov@9858 248 template <typename Mspace>
apetushkov@9858 249 inline typename Mspace::Type* mspace_get_to_full(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 250 size = size_adjustment(size, mspace);
apetushkov@9858 251 MspaceLock<Mspace> lock(mspace);
apetushkov@9858 252 if (size <= mspace->min_elem_size()) {
apetushkov@9858 253 typename Mspace::Type* const t = mspace_get_free_to_full(size, mspace, thread);
apetushkov@9858 254 if (t != NULL) {
apetushkov@9858 255 return t;
apetushkov@9858 256 }
apetushkov@9858 257 }
apetushkov@9858 258 return mspace_allocate_to_full(size, mspace, thread);
apetushkov@9858 259 }
apetushkov@9858 260
apetushkov@9858 261 template <typename Mspace>
apetushkov@9858 262 inline typename Mspace::Type* mspace_get_free_lease_with_retry(size_t size, Mspace* mspace, size_t retry_count, Thread* thread) {
apetushkov@9858 263 typename Mspace::Type* t = mspace_get_free_with_retry(size, mspace, retry_count, thread);
apetushkov@9858 264 if (t != NULL) {
apetushkov@9858 265 t->set_lease();
apetushkov@9858 266 }
apetushkov@9858 267 return t;
apetushkov@9858 268 }
apetushkov@9858 269
apetushkov@9858 270 template <typename Mspace>
apetushkov@9858 271 inline typename Mspace::Type* mspace_get_lease(size_t size, Mspace* mspace, Thread* thread) {
apetushkov@9858 272 typename Mspace::Type* t;
apetushkov@9858 273 t = mspace_get_free_lease(size, mspace, thread);
apetushkov@9858 274 if (t != NULL) {
apetushkov@9858 275 assert(t->acquired_by_self(), "invariant");
apetushkov@9858 276 assert(t->lease(), "invariant");
apetushkov@9858 277 return t;
apetushkov@9858 278 }
apetushkov@9858 279 t = mspace_allocate_transient_to_full(size, mspace, thread);
apetushkov@9858 280 if (t != NULL) {
apetushkov@9858 281 t->set_lease();
apetushkov@9858 282 }
apetushkov@9858 283 return t;
apetushkov@9858 284 }
apetushkov@9858 285
apetushkov@9858 286 template <typename Mspace>
apetushkov@9858 287 inline void mspace_release_full(typename Mspace::Type* t, Mspace* mspace) {
apetushkov@9858 288 assert(t != NULL, "invariant");
apetushkov@9858 289 assert(t->unflushed_size() == 0, "invariant");
apetushkov@9858 290 assert(mspace != NULL, "invariant");
apetushkov@9858 291 assert(mspace->is_locked(), "invariant");
apetushkov@9858 292 mspace->release_full(t);
apetushkov@9858 293 }
apetushkov@9858 294
apetushkov@9858 295 template <typename Mspace>
apetushkov@9858 296 inline void mspace_release_free(typename Mspace::Type* t, Mspace* mspace) {
apetushkov@9858 297 assert(t != NULL, "invariant");
apetushkov@9858 298 assert(t->unflushed_size() == 0, "invariant");
apetushkov@9858 299 assert(mspace != NULL, "invariant");
apetushkov@9858 300 assert(mspace->is_locked(), "invariant");
apetushkov@9858 301 mspace->release_free(t);
apetushkov@9858 302 }
apetushkov@9858 303
apetushkov@9858 304 template <typename Mspace>
apetushkov@9858 305 inline void mspace_release_full_critical(typename Mspace::Type* t, Mspace* mspace) {
apetushkov@9858 306 MspaceLock<Mspace> lock(mspace);
apetushkov@9858 307 mspace_release_full(t, mspace);
apetushkov@9858 308 }
apetushkov@9858 309
apetushkov@9858 310 template <typename Mspace>
apetushkov@9858 311 inline void mspace_release_free_critical(typename Mspace::Type* t, Mspace* mspace) {
apetushkov@9858 312 MspaceLock<Mspace> lock(mspace);
apetushkov@9858 313 mspace_release_free(t, mspace);
apetushkov@9858 314 }
apetushkov@9858 315
apetushkov@9858 316 template <typename List>
apetushkov@9858 317 inline void move_to_head(typename List::Node* t, List& from, List& to) {
apetushkov@9858 318 assert(from.in_list(t), "invariant");
apetushkov@9858 319 to.prepend(from.remove(t));
apetushkov@9858 320 }
apetushkov@9858 321
apetushkov@9858 322 template <typename Processor, typename Mspace, typename Iterator>
apetushkov@9858 323 inline void process_free_list_iterator_control(Processor& processor, Mspace* mspace, jfr_iter_direction direction = forward) {
apetushkov@9858 324 mspace->template iterate<Processor, Iterator>(processor, false, direction);
apetushkov@9858 325 }
apetushkov@9858 326
apetushkov@9858 327 template <typename Processor, typename Mspace, typename Iterator>
apetushkov@9858 328 inline void process_full_list_iterator_control(Processor& processor, Mspace* mspace, jfr_iter_direction direction = forward) {
apetushkov@9858 329 mspace->template iterate<Processor, Iterator>(processor, true, direction);
apetushkov@9858 330 }
apetushkov@9858 331
apetushkov@9858 332 template <typename Processor, typename Mspace>
apetushkov@9858 333 inline void process_full_list(Processor& processor, Mspace* mspace, jfr_iter_direction direction = forward) {
apetushkov@9858 334 assert(mspace != NULL, "invariant");
apetushkov@9858 335 if (mspace->is_full_empty()) return;
apetushkov@9858 336 process_full_list_iterator_control<Processor, Mspace, typename Mspace::Iterator>(processor, mspace, direction);
apetushkov@9858 337 }
apetushkov@9858 338
apetushkov@9858 339 template <typename Processor, typename Mspace>
apetushkov@9858 340 inline void process_free_list(Processor& processor, Mspace* mspace, jfr_iter_direction direction = forward) {
apetushkov@9858 341 assert(mspace != NULL, "invariant");
apetushkov@9858 342 assert(mspace->has_free(), "invariant");
apetushkov@9858 343 process_free_list_iterator_control<Processor, Mspace, typename Mspace::Iterator>(processor, mspace, direction);
apetushkov@9858 344 }
apetushkov@9858 345
apetushkov@9858 346 template <typename Mspace>
apetushkov@9858 347 inline bool ReleaseOp<Mspace>::process(typename Mspace::Type* t) {
apetushkov@9858 348 assert(t != NULL, "invariant");
apetushkov@9928 349 // assumes some means of exclusive access to t
apetushkov@9928 350 if (t->transient()) {
apetushkov@9928 351 if (_release_full) {
apetushkov@9928 352 mspace_release_full_critical(t, _mspace);
apetushkov@9928 353 } else {
apetushkov@9928 354 mspace_release_free_critical(t, _mspace);
apetushkov@9858 355 }
apetushkov@9928 356 return true;
apetushkov@9858 357 }
apetushkov@9928 358 t->reinitialize();
apetushkov@9928 359 assert(t->empty(), "invariant");
apetushkov@9928 360 assert(!t->retired(), "invariant");
apetushkov@9928 361 t->release(); // publish
apetushkov@9858 362 return true;
apetushkov@9858 363 }
apetushkov@9858 364
apetushkov@9858 365 #ifdef ASSERT
apetushkov@9858 366 template <typename T>
apetushkov@9858 367 inline void assert_migration_state(const T* old, const T* new_buffer, size_t used, size_t requested) {
apetushkov@9858 368 assert(old != NULL, "invariant");
apetushkov@9858 369 assert(new_buffer != NULL, "invariant");
apetushkov@9858 370 assert(old->pos() >= old->start(), "invariant");
apetushkov@9858 371 assert(old->pos() + used <= old->end(), "invariant");
apetushkov@9858 372 assert(new_buffer->free_size() >= (used + requested), "invariant");
apetushkov@9858 373 }
apetushkov@9858 374 #endif // ASSERT
apetushkov@9858 375
apetushkov@9858 376 template <typename T>
apetushkov@9858 377 inline void migrate_outstanding_writes(const T* old, T* new_buffer, size_t used, size_t requested) {
apetushkov@9858 378 DEBUG_ONLY(assert_migration_state(old, new_buffer, used, requested);)
apetushkov@9858 379 if (used > 0) {
apetushkov@9858 380 memcpy(new_buffer->pos(), old->pos(), used);
apetushkov@9858 381 }
apetushkov@9858 382 }
apetushkov@9858 383
apetushkov@9858 384 #endif // SHARE_VM_JFR_RECORDER_STORAGE_JFRMEMORYSPACE_INLINE_HPP
apetushkov@9858 385

mercurial