LCOV - code coverage report
Current view: top level - vppinfra - mem_dlmalloc.c (source / functions) Hit Total Coverage
Test: coverage-filtered.info Lines: 160 350 45.7 %
Date: 2023-07-05 22:20:52 Functions: 25 45 55.6 %

          Line data    Source code
       1             : /*
       2             :  * Copyright (c) 2015 Cisco and/or its affiliates.
       3             :  * Licensed under the Apache License, Version 2.0 (the "License");
       4             :  * you may not use this file except in compliance with the License.
       5             :  * You may obtain a copy of the License at:
       6             :  *
       7             :  *     http://www.apache.org/licenses/LICENSE-2.0
       8             :  *
       9             :  * Unless required by applicable law or agreed to in writing, software
      10             :  * distributed under the License is distributed on an "AS IS" BASIS,
      11             :  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      12             :  * See the License for the specific language governing permissions and
      13             :  * limitations under the License.
      14             :  */
      15             : 
      16             : #include <vppinfra/format.h>
      17             : #include <vppinfra/dlmalloc.h>
      18             : #include <vppinfra/os.h>
      19             : #include <vppinfra/lock.h>
      20             : #include <vppinfra/hash.h>
      21             : #include <vppinfra/elf_clib.h>
      22             : 
      23             : typedef struct
      24             : {
      25             :   /* Address of callers: outer first, inner last. */
      26             :   uword callers[12];
      27             : 
      28             :   /* Count of allocations with this traceback. */
      29             :   u32 n_allocations;
      30             : 
      31             :   /* Count of bytes allocated with this traceback. */
      32             :   u32 n_bytes;
      33             : 
      34             :   /* Offset of this item */
      35             :   uword offset;
      36             : } mheap_trace_t;
      37             : 
      38             : typedef struct
      39             : {
      40             :   clib_spinlock_t lock;
      41             : 
      42             :   mheap_trace_t *traces;
      43             : 
      44             :   /* Indices of free traces. */
      45             :   u32 *trace_free_list;
      46             : 
      47             :   /* Hash table mapping callers to trace index. */
      48             :   uword *trace_by_callers;
      49             : 
      50             :   /* Hash table mapping mheap offset to trace index. */
      51             :   uword *trace_index_by_offset;
      52             : 
      53             :   /* So we can easily shut off current segment trace, if any */
      54             :   const clib_mem_heap_t *current_traced_mheap;
      55             : 
      56             : } mheap_trace_main_t;
      57             : 
      58             : mheap_trace_main_t mheap_trace_main;
      59             : 
      60             : static __thread int mheap_trace_thread_disable;
      61             : 
      62             : static void
      63           0 : mheap_get_trace_internal (const clib_mem_heap_t *heap, uword offset,
      64             :                           uword size)
      65             : {
      66           0 :   mheap_trace_main_t *tm = &mheap_trace_main;
      67             :   mheap_trace_t *t;
      68             :   uword i, n_callers, trace_index, *p;
      69             :   mheap_trace_t trace;
      70             : 
      71           0 :   if (heap != tm->current_traced_mheap || mheap_trace_thread_disable)
      72           0 :     return;
      73             : 
      74             :   /* Spurious Coverity warnings be gone. */
      75           0 :   clib_memset (&trace, 0, sizeof (trace));
      76             : 
      77           0 :   clib_spinlock_lock (&tm->lock);
      78             : 
      79             :   /* heap could have changed while we were waiting on the lock */
      80           0 :   if (heap != tm->current_traced_mheap)
      81           0 :     goto out;
      82             : 
      83             :   /* Turn off tracing for this thread to avoid embarrassment... */
      84           0 :   mheap_trace_thread_disable = 1;
      85             : 
      86             :   /* Skip our frame and mspace_get_aligned's frame */
      87           0 :   n_callers = clib_backtrace (trace.callers, ARRAY_LEN (trace.callers), 2);
      88           0 :   if (n_callers == 0)
      89           0 :     goto out;
      90             : 
      91           0 :   if (!tm->trace_by_callers)
      92           0 :     tm->trace_by_callers =
      93           0 :       hash_create_shmem (0, sizeof (trace.callers), sizeof (uword));
      94             : 
      95           0 :   p = hash_get_mem (tm->trace_by_callers, &trace.callers);
      96           0 :   if (p)
      97             :     {
      98           0 :       trace_index = p[0];
      99           0 :       t = tm->traces + trace_index;
     100             :     }
     101             :   else
     102             :     {
     103           0 :       i = vec_len (tm->trace_free_list);
     104           0 :       if (i > 0)
     105             :         {
     106           0 :           trace_index = tm->trace_free_list[i - 1];
     107           0 :           vec_set_len (tm->trace_free_list, i - 1);
     108             :         }
     109             :       else
     110             :         {
     111           0 :           mheap_trace_t *old_start = tm->traces;
     112           0 :           mheap_trace_t *old_end = vec_end (tm->traces);
     113             : 
     114           0 :           vec_add2 (tm->traces, t, 1);
     115             : 
     116           0 :           if (tm->traces != old_start)
     117             :             {
     118             :               hash_pair_t *p;
     119             :               mheap_trace_t *q;
     120             :             /* *INDENT-OFF* */
     121           0 :             hash_foreach_pair (p, tm->trace_by_callers,
     122             :             ({
     123             :               q = uword_to_pointer (p->key, mheap_trace_t *);
     124             :               ASSERT (q >= old_start && q < old_end);
     125             :               p->key = pointer_to_uword (tm->traces + (q - old_start));
     126             :             }));
     127             :             /* *INDENT-ON* */
     128             :             }
     129           0 :           trace_index = t - tm->traces;
     130             :         }
     131             : 
     132           0 :       t = tm->traces + trace_index;
     133           0 :       t[0] = trace;
     134           0 :       t->n_allocations = 0;
     135           0 :       t->n_bytes = 0;
     136           0 :       hash_set_mem (tm->trace_by_callers, t->callers, trace_index);
     137             :     }
     138             : 
     139           0 :   t->n_allocations += 1;
     140           0 :   t->n_bytes += size;
     141           0 :   t->offset = offset;                /* keep a sample to autopsy */
     142           0 :   hash_set (tm->trace_index_by_offset, offset, t - tm->traces);
     143             : 
     144           0 : out:
     145           0 :   mheap_trace_thread_disable = 0;
     146           0 :   clib_spinlock_unlock (&tm->lock);
     147             : }
     148             : 
     149             : static void
     150           0 : mheap_put_trace_internal (const clib_mem_heap_t *heap, uword offset,
     151             :                           uword size)
     152             : {
     153             :   mheap_trace_t *t;
     154             :   uword trace_index, *p;
     155           0 :   mheap_trace_main_t *tm = &mheap_trace_main;
     156             : 
     157           0 :   if (heap != tm->current_traced_mheap || mheap_trace_thread_disable)
     158           0 :     return;
     159             : 
     160           0 :   clib_spinlock_lock (&tm->lock);
     161             : 
     162             :   /* heap could have changed while we were waiting on the lock */
     163           0 :   if (heap != tm->current_traced_mheap)
     164           0 :     goto out;
     165             : 
     166             :   /* Turn off tracing for this thread for a moment */
     167           0 :   mheap_trace_thread_disable = 1;
     168             : 
     169           0 :   p = hash_get (tm->trace_index_by_offset, offset);
     170           0 :   if (!p)
     171           0 :     goto out;
     172             : 
     173           0 :   trace_index = p[0];
     174           0 :   hash_unset (tm->trace_index_by_offset, offset);
     175           0 :   ASSERT (trace_index < vec_len (tm->traces));
     176             : 
     177           0 :   t = tm->traces + trace_index;
     178           0 :   ASSERT (t->n_allocations > 0);
     179           0 :   ASSERT (t->n_bytes >= size);
     180           0 :   t->n_allocations -= 1;
     181           0 :   t->n_bytes -= size;
     182           0 :   if (t->n_allocations == 0)
     183             :     {
     184           0 :       hash_unset_mem (tm->trace_by_callers, t->callers);
     185           0 :       vec_add1 (tm->trace_free_list, trace_index);
     186           0 :       clib_memset (t, 0, sizeof (t[0]));
     187             :     }
     188             : 
     189           0 : out:
     190           0 :   mheap_trace_thread_disable = 0;
     191           0 :   clib_spinlock_unlock (&tm->lock);
     192             : }
     193             : 
     194             : void
     195           0 : mheap_get_trace (uword offset, uword size)
     196             : {
     197           0 :   mheap_get_trace_internal (clib_mem_get_heap (), offset, size);
     198           0 : }
     199             : 
     200             : void
     201           0 : mheap_put_trace (uword offset, uword size)
     202             : {
     203           0 :   mheap_put_trace_internal (clib_mem_get_heap (), offset, size);
     204           0 : }
     205             : 
     206             : always_inline void
     207           0 : mheap_trace_main_free (mheap_trace_main_t * tm)
     208             : {
     209           0 :   CLIB_SPINLOCK_ASSERT_LOCKED (&tm->lock);
     210           0 :   tm->current_traced_mheap = 0;
     211           0 :   vec_free (tm->traces);
     212           0 :   vec_free (tm->trace_free_list);
     213           0 :   hash_free (tm->trace_by_callers);
     214           0 :   hash_free (tm->trace_index_by_offset);
     215           0 :   mheap_trace_thread_disable = 0;
     216           0 : }
     217             : 
     218             : static clib_mem_heap_t *
     219        4774 : clib_mem_create_heap_internal (void *base, uword size,
     220             :                                clib_mem_page_sz_t log2_page_sz, int is_locked,
     221             :                                char *name)
     222             : {
     223             :   clib_mem_heap_t *h;
     224        4774 :   u8 flags = 0;
     225        4774 :   int sz = sizeof (clib_mem_heap_t);
     226             : 
     227        4774 :   if (base == 0)
     228             :     {
     229        2007 :       log2_page_sz = clib_mem_log2_page_size_validate (log2_page_sz);
     230        2007 :       size = round_pow2 (size, clib_mem_page_bytes (log2_page_sz));
     231        2007 :       base = clib_mem_vm_map_internal (0, log2_page_sz, size, -1, 0,
     232             :                                        "main heap");
     233             : 
     234        2007 :       if (base == CLIB_MEM_VM_MAP_FAILED)
     235           0 :         return 0;
     236             : 
     237        2007 :       flags = CLIB_MEM_HEAP_F_UNMAP_ON_DESTROY;
     238             :     }
     239             :   else
     240        2767 :     log2_page_sz = CLIB_MEM_PAGE_SZ_UNKNOWN;
     241             : 
     242        4774 :   if (is_locked)
     243        4774 :     flags |= CLIB_MEM_HEAP_F_LOCKED;
     244             : 
     245        4774 :   h = base;
     246        4774 :   h->base = base;
     247        4774 :   h->size = size;
     248        4774 :   h->log2_page_sz = log2_page_sz;
     249        4774 :   h->flags = flags;
     250        4774 :   sz = strlen (name);
     251        4774 :   strcpy (h->name, name);
     252        4774 :   sz = round_pow2 (sz + sizeof (clib_mem_heap_t), 16);
     253        4774 :   h->mspace = create_mspace_with_base (base + sz, size - sz, is_locked);
     254             : 
     255        4774 :   mspace_disable_expand (h->mspace);
     256             : 
     257        4774 :   clib_mem_poison (mspace_least_addr (h->mspace),
     258             :                    mspace_footprint (h->mspace));
     259             : 
     260        4774 :   return h;
     261             : }
     262             : 
     263             : /* Initialize CLIB heap based on memory/size given by user.
     264             :    Set memory to 0 and CLIB will try to allocate its own heap. */
     265             : static void *
     266        1189 : clib_mem_init_internal (void *base, uword size,
     267             :                         clib_mem_page_sz_t log2_page_sz)
     268             : {
     269             :   clib_mem_heap_t *h;
     270             : 
     271        1189 :   clib_mem_main_init ();
     272             : 
     273        1189 :   h = clib_mem_create_heap_internal (base, size, log2_page_sz,
     274             :                                      1 /*is_locked */ , "main heap");
     275             : 
     276        1189 :   clib_mem_set_heap (h);
     277             : 
     278        1189 :   if (mheap_trace_main.lock == 0)
     279             :     {
     280             :       /* clib_spinlock_init() dynamically allocates the spinlock in the current
     281             :        * per-cpu heap, but it is used for all traces accross all heaps and
     282             :        * hence we can't really allocate it in the current per-cpu heap as it
     283             :        * could be destroyed later */
     284             :       static struct clib_spinlock_s mheap_trace_main_lock = {};
     285         618 :       mheap_trace_main.lock = &mheap_trace_main_lock;
     286             :     }
     287             : 
     288        1189 :   return h;
     289             : }
     290             : 
     291             : __clib_export void *
     292         577 : clib_mem_init (void *memory, uword memory_size)
     293             : {
     294         577 :   return clib_mem_init_internal (memory, memory_size,
     295             :                                  CLIB_MEM_PAGE_SZ_DEFAULT);
     296             : }
     297             : 
     298             : __clib_export void *
     299         559 : clib_mem_init_with_page_size (uword memory_size,
     300             :                               clib_mem_page_sz_t log2_page_sz)
     301             : {
     302         559 :   return clib_mem_init_internal (0, memory_size, log2_page_sz);
     303             : }
     304             : 
     305             : __clib_export void *
     306          53 : clib_mem_init_thread_safe (void *memory, uword memory_size)
     307             : {
     308          53 :   return clib_mem_init_internal (memory, memory_size,
     309             :                                  CLIB_MEM_PAGE_SZ_DEFAULT);
     310             : }
     311             : 
     312             : __clib_export void
     313         559 : clib_mem_destroy (void)
     314             : {
     315         559 :   mheap_trace_main_t *tm = &mheap_trace_main;
     316         559 :   clib_mem_heap_t *heap = clib_mem_get_heap ();
     317             : 
     318         559 :   if (heap->mspace == tm->current_traced_mheap)
     319           0 :     mheap_trace (heap, 0);
     320             : 
     321         559 :   destroy_mspace (heap->mspace);
     322         559 :   clib_mem_vm_unmap (heap);
     323         559 : }
     324             : 
     325             : __clib_export u8 *
     326           0 : format_clib_mem_usage (u8 *s, va_list *va)
     327             : {
     328           0 :   int verbose = va_arg (*va, int);
     329           0 :   return format (s, "$$$$ heap at %llx verbose %d", clib_mem_get_heap (),
     330             :                  verbose);
     331             : }
     332             : 
     333             : /*
     334             :  * Magic decoder ring for mallinfo stats (ala dlmalloc):
     335             :  *
     336             :  * size_t arena;     / * Non-mmapped space allocated (bytes) * /
     337             :  * size_t ordblks;   / * Number of free chunks * /
     338             :  * size_t smblks;    / * Number of free fastbin blocks * /
     339             :  * size_t hblks;     / * Number of mmapped regions * /
     340             :  * size_t hblkhd;    / * Space allocated in mmapped regions (bytes) * /
     341             :  * size_t usmblks;   / * Maximum total allocated space (bytes) * /
     342             :  * size_t fsmblks;   / * Space in freed fastbin blocks (bytes) * /
     343             :  * size_t uordblks;  / * Total allocated space (bytes) * /
     344             :  * size_t fordblks;  / * Total free space (bytes) * /
     345             :  * size_t keepcost;  / * Top-most, releasable space (bytes) * /
     346             :  *
     347             :  */
     348             : 
     349             : u8 *
     350        3364 : format_msize (u8 * s, va_list * va)
     351             : {
     352        3364 :   uword a = va_arg (*va, uword);
     353             : 
     354        3364 :   if (a >= 1ULL << 30)
     355           0 :     s = format (s, "%.2fG", (((f64) a) / ((f64) (1ULL << 30))));
     356        3364 :   else if (a >= 1ULL << 20)
     357        2484 :     s = format (s, "%.2fM", (((f64) a) / ((f64) (1ULL << 20))));
     358         880 :   else if (a >= 1ULL << 10)
     359         880 :     s = format (s, "%.2fK", (((f64) a) / ((f64) (1ULL << 10))));
     360             :   else
     361           0 :     s = format (s, "%lld", a);
     362        3364 :   return s;
     363             : }
     364             : 
     365             : static int
     366           0 : mheap_trace_sort (const void *_t1, const void *_t2)
     367             : {
     368           0 :   const mheap_trace_t *t1 = _t1;
     369           0 :   const mheap_trace_t *t2 = _t2;
     370             :   word cmp;
     371             : 
     372           0 :   cmp = (word) t2->n_bytes - (word) t1->n_bytes;
     373           0 :   if (!cmp)
     374           0 :     cmp = (word) t2->n_allocations - (word) t1->n_allocations;
     375           0 :   return cmp;
     376             : }
     377             : 
     378             : u8 *
     379           0 : format_mheap_trace (u8 * s, va_list * va)
     380             : {
     381           0 :   mheap_trace_main_t *tm = va_arg (*va, mheap_trace_main_t *);
     382           0 :   int verbose = va_arg (*va, int);
     383           0 :   int have_traces = 0;
     384             :   int i;
     385           0 :   int n = 0;
     386             : 
     387           0 :   clib_spinlock_lock (&tm->lock);
     388           0 :   if (vec_len (tm->traces) > 0 &&
     389           0 :       clib_mem_get_heap () == tm->current_traced_mheap)
     390             :     {
     391           0 :       have_traces = 1;
     392             : 
     393             :       /* Make a copy of traces since we'll be sorting them. */
     394             :       mheap_trace_t *t, *traces_copy;
     395             :       u32 indent, total_objects_traced;
     396             : 
     397           0 :       traces_copy = vec_dup (tm->traces);
     398             : 
     399           0 :       qsort (traces_copy, vec_len (traces_copy), sizeof (traces_copy[0]),
     400             :              mheap_trace_sort);
     401             : 
     402           0 :       total_objects_traced = 0;
     403           0 :       s = format (s, "\n");
     404           0 :       vec_foreach (t, traces_copy)
     405             :       {
     406             :         /* Skip over free elements. */
     407           0 :         if (t->n_allocations == 0)
     408           0 :           continue;
     409             : 
     410           0 :         total_objects_traced += t->n_allocations;
     411             : 
     412             :         /* When not verbose only report the 50 biggest allocations */
     413           0 :         if (!verbose && n >= 50)
     414           0 :           continue;
     415           0 :         n++;
     416             : 
     417           0 :         if (t == traces_copy)
     418           0 :           s = format (s, "%=9s%=9s %=10s Traceback\n", "Bytes", "Count",
     419             :                       "Sample");
     420           0 :         s = format (s, "%9d%9d %p", t->n_bytes, t->n_allocations, t->offset);
     421           0 :         indent = format_get_indent (s);
     422           0 :         for (i = 0; i < ARRAY_LEN (t->callers) && t->callers[i]; i++)
     423             :           {
     424           0 :             if (i > 0)
     425           0 :               s = format (s, "%U", format_white_space, indent);
     426             : #if defined(CLIB_UNIX) && !defined(__APPLE__)
     427             :             /* $$$$ does this actually work? */
     428             :             s =
     429           0 :               format (s, " %U\n", format_clib_elf_symbol_with_address,
     430             :                       t->callers[i]);
     431             : #else
     432             :             s = format (s, " %p\n", t->callers[i]);
     433             : #endif
     434             :           }
     435             :       }
     436             : 
     437           0 :       s = format (s, "%d total traced objects\n", total_objects_traced);
     438             : 
     439           0 :       vec_free (traces_copy);
     440             :     }
     441           0 :   clib_spinlock_unlock (&tm->lock);
     442           0 :   if (have_traces == 0)
     443           0 :     s = format (s, "no traced allocations\n");
     444             : 
     445           0 :   return s;
     446             : }
     447             : 
     448             : __clib_export u8 *
     449         841 : format_clib_mem_heap (u8 * s, va_list * va)
     450             : {
     451         841 :   clib_mem_heap_t *heap = va_arg (*va, clib_mem_heap_t *);
     452         841 :   int verbose = va_arg (*va, int);
     453             :   struct dlmallinfo mi;
     454         841 :   mheap_trace_main_t *tm = &mheap_trace_main;
     455         841 :   u32 indent = format_get_indent (s) + 2;
     456             : 
     457         841 :   if (heap == 0)
     458           0 :     heap = clib_mem_get_heap ();
     459             : 
     460         841 :   mi = mspace_mallinfo (heap->mspace);
     461             : 
     462         841 :   s = format (s, "base %p, size %U",
     463             :               heap->base, format_memory_size, heap->size);
     464             : 
     465             : #define _(i,v,str) \
     466             :   if (heap->flags & CLIB_MEM_HEAP_F_##v) s = format (s, ", %s", str);
     467         841 :   foreach_clib_mem_heap_flag;
     468             : #undef _
     469             : 
     470         841 :   s = format (s, ", name '%s'", heap->name);
     471             : 
     472         841 :   if (heap->log2_page_sz != CLIB_MEM_PAGE_SZ_UNKNOWN)
     473             :     {
     474             :       clib_mem_page_stats_t stats;
     475         841 :       clib_mem_get_page_stats (heap->base, heap->log2_page_sz,
     476         841 :                                heap->size >> heap->log2_page_sz, &stats);
     477         841 :       s = format (s, "\n%U%U", format_white_space, indent,
     478             :                   format_clib_mem_page_stats, &stats);
     479             :     }
     480             : 
     481         841 :   s = format (s, "\n%Utotal: %U, used: %U, free: %U, trimmable: %U",
     482             :               format_white_space, indent,
     483             :               format_msize, mi.arena,
     484             :               format_msize, mi.uordblks,
     485             :               format_msize, mi.fordblks, format_msize, mi.keepcost);
     486         841 :   if (verbose > 0)
     487             :     {
     488           0 :       s = format (s, "\n%Ufree chunks %llu free fastbin blks %llu",
     489             :                   format_white_space, indent + 2, mi.ordblks, mi.smblks);
     490           0 :       s = format (s, "\n%Umax total allocated %U",
     491             :                   format_white_space, indent + 2, format_msize, mi.usmblks);
     492             :     }
     493             : 
     494         841 :   if (heap->flags & CLIB_MEM_HEAP_F_TRACED)
     495           0 :     s = format (s, "\n%U", format_mheap_trace, tm, verbose);
     496         841 :   return s;
     497             : }
     498             : 
     499             : __clib_export __clib_flatten void
     500        2406 : clib_mem_get_heap_usage (clib_mem_heap_t *heap, clib_mem_usage_t *usage)
     501             : {
     502        2406 :   struct dlmallinfo mi = mspace_mallinfo (heap->mspace);
     503             : 
     504        2406 :   usage->bytes_total = mi.arena; /* non-mmapped space allocated from system */
     505        2406 :   usage->bytes_used = mi.uordblks;       /* total allocated space */
     506        2406 :   usage->bytes_free = mi.fordblks;       /* total free space */
     507        2406 :   usage->bytes_used_mmap = mi.hblkhd;            /* space in mmapped regions */
     508        2406 :   usage->bytes_max = mi.usmblks;         /* maximum total allocated space */
     509        2406 :   usage->bytes_free_reclaimed = mi.ordblks; /* number of free chunks */
     510        2406 :   usage->bytes_overhead = mi.keepcost; /* releasable (via malloc_trim) space */
     511             : 
     512             :   /* Not supported */
     513        2406 :   usage->bytes_used_sbrk = 0;
     514        2406 :   usage->object_count = 0;
     515        2406 : }
     516             : 
     517             : /* Call serial number for debugger breakpoints. */
     518             : uword clib_mem_validate_serial = 0;
     519             : 
     520             : __clib_export void
     521           0 : mheap_trace (clib_mem_heap_t * h, int enable)
     522             : {
     523           0 :   mheap_trace_main_t *tm = &mheap_trace_main;
     524             : 
     525           0 :   clib_spinlock_lock (&tm->lock);
     526             : 
     527           0 :   if (tm->current_traced_mheap != 0 && tm->current_traced_mheap != h)
     528             :     {
     529           0 :       clib_warning ("tracing already enabled for another heap, ignoring");
     530           0 :       goto out;
     531             :     }
     532             : 
     533           0 :   if (enable)
     534             :     {
     535           0 :       h->flags |= CLIB_MEM_HEAP_F_TRACED;
     536           0 :       tm->current_traced_mheap = h;
     537             :     }
     538             :   else
     539             :     {
     540           0 :       h->flags &= ~CLIB_MEM_HEAP_F_TRACED;
     541           0 :       mheap_trace_main_free (&mheap_trace_main);
     542             :     }
     543             : 
     544           0 : out:
     545           0 :   clib_spinlock_unlock (&tm->lock);
     546           0 : }
     547             : 
     548             : __clib_export void
     549           0 : clib_mem_trace (int enable)
     550             : {
     551           0 :   void *current_heap = clib_mem_get_heap ();
     552           0 :   mheap_trace (current_heap, enable);
     553           0 : }
     554             : 
     555             : int
     556           0 : clib_mem_is_traced (void)
     557             : {
     558           0 :   clib_mem_heap_t *h = clib_mem_get_heap ();
     559           0 :   return (h->flags &= CLIB_MEM_HEAP_F_TRACED) != 0;
     560             : }
     561             : 
     562             : __clib_export uword
     563           0 : clib_mem_trace_enable_disable (uword enable)
     564             : {
     565           0 :   uword rv = !mheap_trace_thread_disable;
     566           0 :   mheap_trace_thread_disable = !enable;
     567           0 :   return rv;
     568             : }
     569             : 
     570             : __clib_export clib_mem_heap_t *
     571        3585 : clib_mem_create_heap (void *base, uword size, int is_locked, char *fmt, ...)
     572             : {
     573        3585 :   clib_mem_page_sz_t log2_page_sz = clib_mem_get_log2_page_size ();
     574             :   clib_mem_heap_t *h;
     575             :   char *name;
     576        3585 :   u8 *s = 0;
     577             : 
     578        3585 :   if (fmt == 0)
     579             :     {
     580           0 :       name = "";
     581             :     }
     582        3585 :   else if (strchr (fmt, '%'))
     583             :     {
     584             :       va_list va;
     585           0 :       va_start (va, fmt);
     586           0 :       s = va_format (0, fmt, &va);
     587           0 :       vec_add1 (s, 0);
     588           0 :       va_end (va);
     589           0 :       name = (char *) s;
     590             :     }
     591             :   else
     592        3585 :     name = fmt;
     593             : 
     594        3585 :   h = clib_mem_create_heap_internal (base, size, log2_page_sz, is_locked,
     595             :                                      name);
     596        3585 :   vec_free (s);
     597        3585 :   return h;
     598             : }
     599             : 
     600             : __clib_export void
     601         911 : clib_mem_destroy_heap (clib_mem_heap_t * h)
     602             : {
     603         911 :   mheap_trace_main_t *tm = &mheap_trace_main;
     604             : 
     605         911 :   if (h->mspace == tm->current_traced_mheap)
     606           0 :     mheap_trace (h, 0);
     607             : 
     608         911 :   destroy_mspace (h->mspace);
     609         911 :   if (h->flags & CLIB_MEM_HEAP_F_UNMAP_ON_DESTROY)
     610         809 :     clib_mem_vm_unmap (h->base);
     611         911 : }
     612             : 
     613             : __clib_export __clib_flatten uword
     614         211 : clib_mem_get_heap_free_space (clib_mem_heap_t *h)
     615             : {
     616         211 :   struct dlmallinfo dlminfo = mspace_mallinfo (h->mspace);
     617         211 :   return dlminfo.fordblks;
     618             : }
     619             : 
     620             : __clib_export __clib_flatten void *
     621      252830 : clib_mem_get_heap_base (clib_mem_heap_t *h)
     622             : {
     623      252830 :   return h->base;
     624             : }
     625             : 
     626             : __clib_export __clib_flatten uword
     627          24 : clib_mem_get_heap_size (clib_mem_heap_t *heap)
     628             : {
     629          24 :   return heap->size;
     630             : }
     631             : 
     632             : /* Memory allocator which may call os_out_of_memory() if it fails */
     633             : static inline void *
     634   110470000 : clib_mem_heap_alloc_inline (void *heap, uword size, uword align,
     635             :                             int os_out_of_memory_on_failure)
     636             : {
     637   110470000 :   clib_mem_heap_t *h = heap ? heap : clib_mem_get_per_cpu_heap ();
     638             :   void *p;
     639             : 
     640   110470000 :   align = clib_max (CLIB_MEM_MIN_ALIGN, align);
     641             : 
     642   110470000 :   p = mspace_memalign (h->mspace, align, size);
     643             : 
     644   110472000 :   if (PREDICT_FALSE (0 == p))
     645             :     {
     646           0 :       if (os_out_of_memory_on_failure)
     647           0 :         os_out_of_memory ();
     648           0 :       return 0;
     649             :     }
     650             : 
     651   110472000 :   if (PREDICT_FALSE (h->flags & CLIB_MEM_HEAP_F_TRACED))
     652           0 :     mheap_get_trace_internal (h, pointer_to_uword (p), clib_mem_size (p));
     653             : 
     654   110472000 :   clib_mem_unpoison (p, size);
     655   110472000 :   return p;
     656             : }
     657             : 
     658             : /* Memory allocator which calls os_out_of_memory() when it fails */
     659             : __clib_export __clib_flatten void *
     660    45876900 : clib_mem_alloc (uword size)
     661             : {
     662    45876900 :   return clib_mem_heap_alloc_inline (0, size, CLIB_MEM_MIN_ALIGN,
     663             :                                      /* os_out_of_memory */ 1);
     664             : }
     665             : 
     666             : __clib_export __clib_flatten void *
     667      889762 : clib_mem_alloc_aligned (uword size, uword align)
     668             : {
     669      889762 :   return clib_mem_heap_alloc_inline (0, size, align,
     670             :                                      /* os_out_of_memory */ 1);
     671             : }
     672             : 
     673             : /* Memory allocator which calls os_out_of_memory() when it fails */
     674             : __clib_export __clib_flatten void *
     675           0 : clib_mem_alloc_or_null (uword size)
     676             : {
     677           0 :   return clib_mem_heap_alloc_inline (0, size, CLIB_MEM_MIN_ALIGN,
     678             :                                      /* os_out_of_memory */ 0);
     679             : }
     680             : 
     681             : __clib_export __clib_flatten void *
     682           0 : clib_mem_alloc_aligned_or_null (uword size, uword align)
     683             : {
     684           0 :   return clib_mem_heap_alloc_inline (0, size, align,
     685             :                                      /* os_out_of_memory */ 0);
     686             : }
     687             : 
     688             : __clib_export __clib_flatten void *
     689           0 : clib_mem_heap_alloc (void *heap, uword size)
     690             : {
     691           0 :   return clib_mem_heap_alloc_inline (heap, size, CLIB_MEM_MIN_ALIGN,
     692             :                                      /* os_out_of_memory */ 1);
     693             : }
     694             : 
     695             : __clib_export __clib_flatten void *
     696    49023500 : clib_mem_heap_alloc_aligned (void *heap, uword size, uword align)
     697             : {
     698    49023500 :   return clib_mem_heap_alloc_inline (heap, size, align,
     699             :                                      /* os_out_of_memory */ 1);
     700             : }
     701             : 
     702             : __clib_export __clib_flatten void *
     703           0 : clib_mem_heap_alloc_or_null (void *heap, uword size)
     704             : {
     705           0 :   return clib_mem_heap_alloc_inline (heap, size, CLIB_MEM_MIN_ALIGN,
     706             :                                      /* os_out_of_memory */ 0);
     707             : }
     708             : 
     709             : __clib_export __clib_flatten void *
     710           0 : clib_mem_heap_alloc_aligned_or_null (void *heap, uword size, uword align)
     711             : {
     712           0 :   return clib_mem_heap_alloc_inline (heap, size, align,
     713             :                                      /* os_out_of_memory */ 0);
     714             : }
     715             : 
     716             : __clib_export __clib_flatten void *
     717    23318400 : clib_mem_heap_realloc_aligned (void *heap, void *p, uword new_size,
     718             :                                uword align)
     719             : {
     720             :   uword old_alloc_size;
     721    23318400 :   clib_mem_heap_t *h = heap ? heap : clib_mem_get_per_cpu_heap ();
     722             :   void *new;
     723             : 
     724    23318300 :   ASSERT (count_set_bits (align) == 1);
     725             : 
     726    23318300 :   old_alloc_size = p ? mspace_usable_size (p) : 0;
     727             : 
     728    23318300 :   if (new_size == old_alloc_size)
     729           0 :     return p;
     730             : 
     731    69956500 :   if (p && pointer_is_aligned (p, align) &&
     732    23318400 :       mspace_realloc_in_place (h->mspace, p, new_size))
     733             :     {
     734     8639170 :       clib_mem_unpoison (p, new_size);
     735     8639160 :       if (PREDICT_FALSE (h->flags & CLIB_MEM_HEAP_F_TRACED))
     736             :         {
     737           0 :           mheap_put_trace_internal (h, pointer_to_uword (p), old_alloc_size);
     738           0 :           mheap_get_trace_internal (h, pointer_to_uword (p),
     739             :                                     clib_mem_size (p));
     740             :         }
     741             :     }
     742             :   else
     743             :     {
     744    14680600 :       new = clib_mem_heap_alloc_inline (h, new_size, align, 1);
     745             : 
     746    14680700 :       clib_mem_unpoison (new, new_size);
     747    14680700 :       if (old_alloc_size)
     748             :         {
     749    14680700 :           clib_mem_unpoison (p, old_alloc_size);
     750    14680700 :           clib_memcpy_fast (new, p, clib_min (new_size, old_alloc_size));
     751    14680500 :           clib_mem_heap_free (h, p);
     752             :         }
     753    14680700 :       p = new;
     754             :     }
     755             : 
     756    23319900 :   return p;
     757             : }
     758             : 
     759             : __clib_export __clib_flatten void *
     760           0 : clib_mem_heap_realloc (void *heap, void *p, uword new_size)
     761             : {
     762           0 :   return clib_mem_heap_realloc_aligned (heap, p, new_size, CLIB_MEM_MIN_ALIGN);
     763             : }
     764             : 
     765             : __clib_export __clib_flatten void *
     766           0 : clib_mem_realloc_aligned (void *p, uword new_size, uword align)
     767             : {
     768           0 :   return clib_mem_heap_realloc_aligned (0, p, new_size, align);
     769             : }
     770             : 
     771             : __clib_export __clib_flatten void *
     772     6674670 : clib_mem_realloc (void *p, uword new_size)
     773             : {
     774     6674670 :   return clib_mem_heap_realloc_aligned (0, p, new_size, CLIB_MEM_MIN_ALIGN);
     775             : }
     776             : 
     777             : __clib_export __clib_flatten uword
     778    64261400 : clib_mem_heap_is_heap_object (void *heap, void *p)
     779             : {
     780    64261400 :   clib_mem_heap_t *h = heap ? heap : clib_mem_get_per_cpu_heap ();
     781    64261400 :   return mspace_is_heap_object (h->mspace, p);
     782             : }
     783             : 
     784             : __clib_export __clib_flatten uword
     785           0 : clib_mem_is_heap_object (void *p)
     786             : {
     787           0 :   return clib_mem_heap_is_heap_object (0, p);
     788             : }
     789             : 
     790             : __clib_export __clib_flatten void
     791    64099700 : clib_mem_heap_free (void *heap, void *p)
     792             : {
     793    64099700 :   clib_mem_heap_t *h = heap ? heap : clib_mem_get_per_cpu_heap ();
     794    64099700 :   uword size = clib_mem_size (p);
     795             : 
     796             :   /* Make sure object is in the correct heap. */
     797    64099600 :   ASSERT (clib_mem_heap_is_heap_object (h, p));
     798             : 
     799    64099600 :   if (PREDICT_FALSE (h->flags & CLIB_MEM_HEAP_F_TRACED))
     800           0 :     mheap_put_trace_internal (h, pointer_to_uword (p), size);
     801    64099600 :   clib_mem_poison (p, clib_mem_size (p));
     802             : 
     803    64099500 :   mspace_free (h->mspace, p);
     804    64100100 : }
     805             : 
     806             : __clib_export __clib_flatten void
     807    23692400 : clib_mem_free (void *p)
     808             : {
     809    23692400 :   clib_mem_heap_free (0, p);
     810    23692400 : }
     811             : 
     812             : __clib_export __clib_flatten uword
     813  1578530000 : clib_mem_size (void *p)
     814             : {
     815  1578530000 :   return mspace_usable_size (p);
     816             : }
     817             : 
     818             : __clib_export void
     819      262669 : clib_mem_free_s (void *p)
     820             : {
     821      262669 :   uword size = clib_mem_size (p);
     822      262669 :   clib_mem_unpoison (p, size);
     823      262669 :   memset_s_inline (p, size, 0, size);
     824      262669 :   clib_mem_free (p);
     825      262669 : }

Generated by: LCOV version 1.14