Blob Blame History Raw
commit 85ee11e317058d44d5c6c29bb4c53acc6c0e22c9
Author: Florian Weimer <fweimer@redhat.com>
Date:   Wed Oct 28 19:32:46 2015 +0100

    malloc: Prevent arena free_list from turning cyclic [BZ #19048]

        [BZ# 19048]
        * malloc/malloc.c (struct malloc_state): Update comment.  Add
        attached_threads member.
        (main_arena): Initialize attached_threads.
        * malloc/arena.c (list_lock): Update comment.
        (ptmalloc_lock_all, ptmalloc_unlock_all): Likewise.
        (ptmalloc_unlock_all2): Reinitialize arena reference counts.
        (deattach_arena): New function.
        (_int_new_arena): Initialize arena reference count and deattach
        replaced arena.
        (get_free_list, reused_arena): Update reference count and deattach
        replaced arena.
        (arena_thread_freeres): Update arena reference count and only put
        unreferenced arenas on the free list.

(Backport reintroduces tsd_getspecific, tsd_setspecific.)

diff -up glibc-2.21/malloc/arena.c.bz1276112 glibc-2.21/malloc/arena.c
--- glibc-2.21/malloc/arena.c.bz1276112	2015-10-28 20:04:37.043402183 +0100
+++ glibc-2.21/malloc/arena.c	2015-10-28 20:19:32.570584876 +0100
@@ -67,6 +67,12 @@ extern int sanity_check_heap_info_alignm
 /* Thread specific data */
 
 static tsd_key_t arena_key;
+
+/* Arena free list.  list_lock protects the free_list variable below,
+   and the next_free and attached_threads members of the mstate
+   objects.  No other (malloc) locks must be taken while list_lock is
+   active, otherwise deadlocks may occur.  */
+
 static mutex_t list_lock = MUTEX_INITIALIZER;
 static size_t narenas = 1;
 static mstate free_list;
@@ -233,7 +239,10 @@ ptmalloc_lock_all (void)
   save_free_hook = __free_hook;
   __malloc_hook = malloc_atfork;
   __free_hook = free_atfork;
-  /* Only the current thread may perform malloc/free calls now. */
+  /* Only the current thread may perform malloc/free calls now.
+     save_arena will be reattached to the current thread, in
+     ptmalloc_lock_all, so save_arena->attached_threads is not
+     updated.  */
   tsd_getspecific (arena_key, save_arena);
   tsd_setspecific (arena_key, ATFORK_ARENA_PTR);
 out:
@@ -251,6 +260,9 @@ ptmalloc_unlock_all (void)
   if (--atfork_recursive_cntr != 0)
     return;
 
+  /* Replace ATFORK_ARENA_PTR with save_arena.
+     save_arena->attached_threads was not changed in ptmalloc_lock_all
+     and is still correct.  */
   tsd_setspecific (arena_key, save_arena);
   __malloc_hook = save_malloc_hook;
   __free_hook = save_free_hook;
@@ -282,12 +294,19 @@ ptmalloc_unlock_all2 (void)
   tsd_setspecific (arena_key, save_arena);
   __malloc_hook = save_malloc_hook;
   __free_hook = save_free_hook;
+
+  /* Push all arenas to the free list, except save_arena, which is
+     attached to the current thread.  */
+  if (save_arena != NULL)
+    ((mstate) save_arena)->attached_threads = 1;
   free_list = NULL;
   for (ar_ptr = &main_arena;; )
     {
       mutex_init (&ar_ptr->mutex);
       if (ar_ptr != save_arena)
         {
+	  /* This arena is no longer attached to any thread.  */
+	  ar_ptr->attached_threads = 0;
           ar_ptr->next_free = free_list;
           free_list = ar_ptr;
         }
@@ -714,6 +733,22 @@ heap_trim (heap_info *heap, size_t pad)
 
 /* Create a new arena with initial size "size".  */
 
+/* If REPLACED_ARENA is not NULL, detach it from this thread.  Must be
+   called while list_lock is held.  */
+static void
+detach_arena (mstate replaced_arena)
+{
+  if (replaced_arena != NULL)
+    {
+      assert (replaced_arena->attached_threads > 0);
+      /* The current implementation only detaches from main_arena in
+	 case of allocation failure.  This means that it is likely not
+	 beneficial to put the arena on free_list even if the
+	 reference count reaches zero.  */
+      --replaced_arena->attached_threads;
+    }
+}
+
 static mstate
 _int_new_arena (size_t size)
 {
@@ -735,6 +770,7 @@ _int_new_arena (size_t size)
     }
   a = h->ar_ptr = (mstate) (h + 1);
   malloc_init_state (a);
+  a->attached_threads = 1;
   /*a->next = NULL;*/
   a->system_mem = a->max_system_mem = h->size;
   arena_mem += h->size;
@@ -748,12 +784,19 @@ _int_new_arena (size_t size)
   set_head (top (a), (((char *) h + h->size) - ptr) | PREV_INUSE);
 
   LIBC_PROBE (memory_arena_new, 2, a, size);
+  mstate replaced_arena;
+  {
+    void *vptr = NULL;
+    replaced_arena = tsd_getspecific (arena_key, vptr);
+  }
   tsd_setspecific (arena_key, (void *) a);
   mutex_init (&a->mutex);
   (void) mutex_lock (&a->mutex);
 
   (void) mutex_lock (&list_lock);
 
+  detach_arena (replaced_arena);
+
   /* Add the new arena to the global list.  */
   a->next = main_arena.next;
   atomic_write_barrier ();
@@ -768,13 +811,24 @@ _int_new_arena (size_t size)
 static mstate
 get_free_list (void)
 {
+  void *vptr = NULL;
+  mstate replaced_arena = tsd_getspecific (arena_key, vptr);
   mstate result = free_list;
   if (result != NULL)
     {
       (void) mutex_lock (&list_lock);
       result = free_list;
       if (result != NULL)
-        free_list = result->next_free;
+	{
+	  free_list = result->next_free;
+
+	  /* Arenas on the free list are not attached to any thread.  */
+	  assert (result->attached_threads == 0);
+	  /* But the arena will now be attached to this thread.  */
+	  result->attached_threads = 1;
+
+	  detach_arena (replaced_arena);
+	}
       (void) mutex_unlock (&list_lock);
 
       if (result != NULL)
@@ -819,6 +873,15 @@ reused_arena (mstate avoid_arena)
   (void) mutex_lock (&result->mutex);
 
 out:
+  {
+    void *vptr = NULL;
+    mstate replaced_arena = tsd_getspecific (arena_key, vptr);
+    (void) mutex_lock (&list_lock);
+    detach_arena (replaced_arena);
+    ++result->attached_threads;
+    (void) mutex_unlock (&list_lock);
+  }
+
   LIBC_PROBE (memory_arena_reuse, 2, result, avoid_arena);
   tsd_setspecific (arena_key, (void *) result);
   next_to_use = result->next;
@@ -912,8 +975,14 @@ arena_thread_freeres (void)
   if (a != NULL)
     {
       (void) mutex_lock (&list_lock);
-      a->next_free = free_list;
-      free_list = a;
+      /* If this was the last attached thread for this arena, put the
+	 arena on the free list.  */
+      assert (a->attached_threads > 0);
+      if (--a->attached_threads == 0)
+	{
+	  a->next_free = free_list;
+	  free_list = a;
+	}
       (void) mutex_unlock (&list_lock);
     }
 }
diff -up glibc-2.21/malloc/malloc.c.bz1276112 glibc-2.21/malloc/malloc.c
--- glibc-2.21/malloc/malloc.c.bz1276112	2015-10-28 20:04:45.171449221 +0100
+++ glibc-2.21/malloc/malloc.c	2015-10-28 20:05:39.124761465 +0100
@@ -1700,9 +1700,15 @@ struct malloc_state
   /* Linked list */
   struct malloc_state *next;
 
-  /* Linked list for free arenas.  */
+  /* Linked list for free arenas.  Access to this field is serialized
+     by list_lock in arena.c.  */
   struct malloc_state *next_free;
 
+  /* Number of threads attached to this arena.  0 if the arena is on
+     the free list.  Access to this field is serialized by list_lock
+     in arena.c.  */
+  INTERNAL_SIZE_T attached_threads;
+
   /* Memory allocated from the system in this arena.  */
   INTERNAL_SIZE_T system_mem;
   INTERNAL_SIZE_T max_system_mem;
@@ -1746,7 +1752,8 @@ struct malloc_par
 static struct malloc_state main_arena =
 {
   .mutex = MUTEX_INITIALIZER,
-  .next = &main_arena
+  .next = &main_arena,
+  .attached_threads = 1
 };
 
 /* There is only one instance of the malloc parameters.  */