src/share/vm/memory/allocation.cpp

changeset 4183
7b5885dadbdc
parent 4037
da91efe96a93
child 4193
716c64bda5ba
     1.1 --- a/src/share/vm/memory/allocation.cpp	Tue Aug 28 15:15:29 2012 +0200
     1.2 +++ b/src/share/vm/memory/allocation.cpp	Wed Oct 17 17:36:48 2012 +0200
     1.3 @@ -92,6 +92,26 @@
     1.4    return res;
     1.5  }
     1.6  
     1.7 +void* ResourceObj::operator new(size_t size, const std::nothrow_t&  nothrow_constant,
     1.8 +    allocation_type type, MEMFLAGS flags) {
     1.9 +  //should only call this with std::nothrow, use other operator new() otherwise
    1.10 +  address res;
    1.11 +  switch (type) {
    1.12 +   case C_HEAP:
    1.13 +    res = (address)AllocateHeap(size, flags, CALLER_PC, AllocFailStrategy::RETURN_NULL);
    1.14 +    DEBUG_ONLY(if (res!= NULL) set_allocation_type(res, C_HEAP);)
    1.15 +    break;
    1.16 +   case RESOURCE_AREA:
    1.17 +    // new(size) sets allocation type RESOURCE_AREA.
    1.18 +    res = (address)operator new(size, std::nothrow);
    1.19 +    break;
    1.20 +   default:
    1.21 +    ShouldNotReachHere();
    1.22 +  }
    1.23 +  return res;
    1.24 +}
    1.25 +
    1.26 +
    1.27  void ResourceObj::operator delete(void* p) {
    1.28    assert(((ResourceObj *)p)->allocated_on_C_heap(),
    1.29           "delete only allowed for C_HEAP objects");
    1.30 @@ -506,7 +526,7 @@
    1.31  }
    1.32  
    1.33  // Grow a new Chunk
    1.34 -void* Arena::grow( size_t x ) {
    1.35 +void* Arena::grow(size_t x, AllocFailType alloc_failmode) {
    1.36    // Get minimal required size.  Either real big, or even bigger for giant objs
    1.37    size_t len = MAX2(x, (size_t) Chunk::size);
    1.38  
    1.39 @@ -514,7 +534,10 @@
    1.40    _chunk = new (len) Chunk(len);
    1.41  
    1.42    if (_chunk == NULL) {
    1.43 -    signal_out_of_memory(len * Chunk::aligned_overhead_size(), "Arena::grow");
    1.44 +    if (alloc_failmode == AllocFailStrategy::EXIT_OOM) {
    1.45 +      signal_out_of_memory(len * Chunk::aligned_overhead_size(), "Arena::grow");
    1.46 +    }
    1.47 +    return NULL;
    1.48    }
    1.49    if (k) k->set_next(_chunk);   // Append new chunk to end of linked list
    1.50    else _first = _chunk;
    1.51 @@ -529,13 +552,16 @@
    1.52  
    1.53  
    1.54  // Reallocate storage in Arena.
    1.55 -void *Arena::Arealloc(void* old_ptr, size_t old_size, size_t new_size) {
    1.56 +void *Arena::Arealloc(void* old_ptr, size_t old_size, size_t new_size, AllocFailType alloc_failmode) {
    1.57    assert(new_size >= 0, "bad size");
    1.58    if (new_size == 0) return NULL;
    1.59  #ifdef ASSERT
    1.60    if (UseMallocOnly) {
    1.61      // always allocate a new object  (otherwise we'll free this one twice)
    1.62 -    char* copy = (char*)Amalloc(new_size);
    1.63 +    char* copy = (char*)Amalloc(new_size, alloc_failmode);
    1.64 +    if (copy == NULL) {
    1.65 +      return NULL;
    1.66 +    }
    1.67      size_t n = MIN2(old_size, new_size);
    1.68      if (n > 0) memcpy(copy, old_ptr, n);
    1.69      Afree(old_ptr,old_size);    // Mostly done to keep stats accurate
    1.70 @@ -561,7 +587,10 @@
    1.71    }
    1.72  
    1.73    // Oops, got to relocate guts
    1.74 -  void *new_ptr = Amalloc(new_size);
    1.75 +  void *new_ptr = Amalloc(new_size, alloc_failmode);
    1.76 +  if (new_ptr == NULL) {
    1.77 +    return NULL;
    1.78 +  }
    1.79    memcpy( new_ptr, c_old, old_size );
    1.80    Afree(c_old,old_size);        // Mostly done to keep stats accurate
    1.81    return new_ptr;

mercurial