|  | @@ -25,6 +25,7 @@
 | 
	
		
			
				|  |  |  #include <grpc/support/alloc.h>
 | 
	
		
			
				|  |  |  #include <grpc/support/atm.h>
 | 
	
		
			
				|  |  |  #include <grpc/support/log.h>
 | 
	
		
			
				|  |  | +#include <grpc/support/sync.h>
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  #include "src/core/lib/gpr/alloc.h"
 | 
	
		
			
				|  |  |  
 | 
	
	
		
			
				|  | @@ -36,8 +37,6 @@
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  #ifdef SIMPLE_ARENA_FOR_DEBUGGING
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  | -#include <grpc/support/sync.h>
 | 
	
		
			
				|  |  | -
 | 
	
		
			
				|  |  |  struct gpr_arena {
 | 
	
		
			
				|  |  |    gpr_mu mu;
 | 
	
		
			
				|  |  |    void** ptrs;
 | 
	
	
		
			
				|  | @@ -78,14 +77,17 @@ void* gpr_arena_alloc(gpr_arena* arena, size_t size) {
 | 
	
		
			
				|  |  |  // would allow us to use the alignment actually needed by the caller.
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  typedef struct zone {
 | 
	
		
			
				|  |  | -  size_t size_begin;
 | 
	
		
			
				|  |  | -  size_t size_end;
 | 
	
		
			
				|  |  | -  gpr_atm next_atm;
 | 
	
		
			
				|  |  | +  size_t size_begin;  // All the space we have set aside for allocations up
 | 
	
		
			
				|  |  | +                      // until this zone.
 | 
	
		
			
				|  |  | +  size_t size_end;  // size_end = size_begin plus all the space we set aside for
 | 
	
		
			
				|  |  | +                    // allocations in zone z itself.
 | 
	
		
			
				|  |  | +  zone* next;
 | 
	
		
			
				|  |  |  } zone;
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  struct gpr_arena {
 | 
	
		
			
				|  |  |    gpr_atm size_so_far;
 | 
	
		
			
				|  |  |    zone initial_zone;
 | 
	
		
			
				|  |  | +  gpr_mu arena_growth_mutex;
 | 
	
		
			
				|  |  |  };
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  static void* zalloc_aligned(size_t size) {
 | 
	
	
		
			
				|  | @@ -99,15 +101,17 @@ gpr_arena* gpr_arena_create(size_t initial_size) {
 | 
	
		
			
				|  |  |    gpr_arena* a = static_cast<gpr_arena*>(zalloc_aligned(
 | 
	
		
			
				|  |  |        GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena)) + initial_size));
 | 
	
		
			
				|  |  |    a->initial_zone.size_end = initial_size;
 | 
	
		
			
				|  |  | +  gpr_mu_init(&a->arena_growth_mutex);
 | 
	
		
			
				|  |  |    return a;
 | 
	
		
			
				|  |  |  }
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  size_t gpr_arena_destroy(gpr_arena* arena) {
 | 
	
		
			
				|  |  | +  gpr_mu_destroy(&arena->arena_growth_mutex);
 | 
	
		
			
				|  |  |    gpr_atm size = gpr_atm_no_barrier_load(&arena->size_so_far);
 | 
	
		
			
				|  |  | -  zone* z = (zone*)gpr_atm_no_barrier_load(&arena->initial_zone.next_atm);
 | 
	
		
			
				|  |  | +  zone* z = arena->initial_zone.next;
 | 
	
		
			
				|  |  |    gpr_free_aligned(arena);
 | 
	
		
			
				|  |  |    while (z) {
 | 
	
		
			
				|  |  | -    zone* next_z = (zone*)gpr_atm_no_barrier_load(&z->next_atm);
 | 
	
		
			
				|  |  | +    zone* next_z = z->next;
 | 
	
		
			
				|  |  |      gpr_free_aligned(z);
 | 
	
		
			
				|  |  |      z = next_z;
 | 
	
		
			
				|  |  |    }
 | 
	
	
		
			
				|  | @@ -116,37 +120,55 @@ size_t gpr_arena_destroy(gpr_arena* arena) {
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  void* gpr_arena_alloc(gpr_arena* arena, size_t size) {
 | 
	
		
			
				|  |  |    size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(size);
 | 
	
		
			
				|  |  | -  size_t start = static_cast<size_t>(
 | 
	
		
			
				|  |  | +  size_t previous_size_of_arena_allocations = static_cast<size_t>(
 | 
	
		
			
				|  |  |        gpr_atm_no_barrier_fetch_add(&arena->size_so_far, size));
 | 
	
		
			
				|  |  | +  size_t updated_size_of_arena_allocations =
 | 
	
		
			
				|  |  | +      previous_size_of_arena_allocations + size;
 | 
	
		
			
				|  |  |    zone* z = &arena->initial_zone;
 | 
	
		
			
				|  |  | -  while (start > z->size_end) {
 | 
	
		
			
				|  |  | -    zone* next_z = (zone*)gpr_atm_acq_load(&z->next_atm);
 | 
	
		
			
				|  |  | -    if (next_z == nullptr) {
 | 
	
		
			
				|  |  | -      size_t next_z_size =
 | 
	
		
			
				|  |  | -          static_cast<size_t>(gpr_atm_no_barrier_load(&arena->size_so_far));
 | 
	
		
			
				|  |  | -      next_z = static_cast<zone*>(zalloc_aligned(
 | 
	
		
			
				|  |  | -          GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)) + next_z_size));
 | 
	
		
			
				|  |  | -      next_z->size_begin = z->size_end;
 | 
	
		
			
				|  |  | -      next_z->size_end = z->size_end + next_z_size;
 | 
	
		
			
				|  |  | -      if (!gpr_atm_rel_cas(&z->next_atm, static_cast<gpr_atm>(NULL),
 | 
	
		
			
				|  |  | -                           (gpr_atm)next_z)) {
 | 
	
		
			
				|  |  | -        gpr_free_aligned(next_z);
 | 
	
		
			
				|  |  | -        next_z = (zone*)gpr_atm_acq_load(&z->next_atm);
 | 
	
		
			
				|  |  | +  // Check to see if the allocation isn't able to end in the initial zone.
 | 
	
		
			
				|  |  | +  // This statement is true only in the uncommon case because of our arena
 | 
	
		
			
				|  |  | +  // sizing historesis (that is, most calls should have a large enough initial
 | 
	
		
			
				|  |  | +  // zone and will not need to grow the arena).
 | 
	
		
			
				|  |  | +  if (updated_size_of_arena_allocations > z->size_end) {
 | 
	
		
			
				|  |  | +    // Find a zone to fit this allocation
 | 
	
		
			
				|  |  | +    gpr_mu_lock(&arena->arena_growth_mutex);
 | 
	
		
			
				|  |  | +    while (updated_size_of_arena_allocations > z->size_end) {
 | 
	
		
			
				|  |  | +      if (z->next == nullptr) {
 | 
	
		
			
				|  |  | +        // Note that we do an extra increment of size_so_far to prevent multiple
 | 
	
		
			
				|  |  | +        // simultaneous callers from stepping on each other. However, this extra
 | 
	
		
			
				|  |  | +        // increment means some space in the arena is wasted.
 | 
	
		
			
				|  |  | +        // So whenever we need to allocate x bytes and there are x - n (where
 | 
	
		
			
				|  |  | +        // n > 0) remaining in the current zone, we will waste x bytes (x - n
 | 
	
		
			
				|  |  | +        // in the current zone and n in the new zone).
 | 
	
		
			
				|  |  | +        previous_size_of_arena_allocations = static_cast<size_t>(
 | 
	
		
			
				|  |  | +            gpr_atm_no_barrier_fetch_add(&arena->size_so_far, size));
 | 
	
		
			
				|  |  | +        updated_size_of_arena_allocations =
 | 
	
		
			
				|  |  | +            previous_size_of_arena_allocations + size;
 | 
	
		
			
				|  |  | +        size_t next_z_size = updated_size_of_arena_allocations;
 | 
	
		
			
				|  |  | +        z->next = static_cast<zone*>(zalloc_aligned(
 | 
	
		
			
				|  |  | +            GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)) + next_z_size));
 | 
	
		
			
				|  |  | +        z->next->size_begin = z->size_end;
 | 
	
		
			
				|  |  | +        z->next->size_end = z->size_end + next_z_size;
 | 
	
		
			
				|  |  |        }
 | 
	
		
			
				|  |  | +      z = z->next;
 | 
	
		
			
				|  |  |      }
 | 
	
		
			
				|  |  | -    z = next_z;
 | 
	
		
			
				|  |  | -  }
 | 
	
		
			
				|  |  | -  if (start + size > z->size_end) {
 | 
	
		
			
				|  |  | -    return gpr_arena_alloc(arena, size);
 | 
	
		
			
				|  |  | +    gpr_mu_unlock(&arena->arena_growth_mutex);
 | 
	
		
			
				|  |  |    }
 | 
	
		
			
				|  |  | -  GPR_ASSERT(start >= z->size_begin);
 | 
	
		
			
				|  |  | -  GPR_ASSERT(start + size <= z->size_end);
 | 
	
		
			
				|  |  | -  char* ptr = (z == &arena->initial_zone)
 | 
	
		
			
				|  |  | -                  ? reinterpret_cast<char*>(arena) +
 | 
	
		
			
				|  |  | -                        GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena))
 | 
	
		
			
				|  |  | -                  : reinterpret_cast<char*>(z) +
 | 
	
		
			
				|  |  | -                        GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone));
 | 
	
		
			
				|  |  | -  return ptr + start - z->size_begin;
 | 
	
		
			
				|  |  | +  GPR_ASSERT(previous_size_of_arena_allocations >= z->size_begin);
 | 
	
		
			
				|  |  | +  GPR_ASSERT(updated_size_of_arena_allocations <= z->size_end);
 | 
	
		
			
				|  |  | +  // Skip the first part of the zone, which just contains tracking information.
 | 
	
		
			
				|  |  | +  // For the initial zone, this is the gpr_arena struct and for any other zone,
 | 
	
		
			
				|  |  | +  // it's the zone struct.
 | 
	
		
			
				|  |  | +  char* start_of_allocation_space =
 | 
	
		
			
				|  |  | +      (z == &arena->initial_zone)
 | 
	
		
			
				|  |  | +          ? reinterpret_cast<char*>(arena) +
 | 
	
		
			
				|  |  | +                GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena))
 | 
	
		
			
				|  |  | +          : reinterpret_cast<char*>(z) +
 | 
	
		
			
				|  |  | +                GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone));
 | 
	
		
			
				|  |  | +  // previous_size_of_arena_allocations - size_begin is how many bytes have been
 | 
	
		
			
				|  |  | +  // allocated into the current zone
 | 
	
		
			
				|  |  | +  return start_of_allocation_space + previous_size_of_arena_allocations -
 | 
	
		
			
				|  |  | +         z->size_begin;
 | 
	
		
			
				|  |  |  }
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |  #endif  // SIMPLE_ARENA_FOR_DEBUGGING
 |