vppinfra: vector allocator rework
- support of in-place growth of vectors (if there is available space next to existing alloc) - drops the need for alloc_aligned_at_offset from memory allocator, which allows easier swap to different memory allocator and reduces malloc overhead - rework of pool and vec macros to inline functions to improve debuggability - fix alignment - in many cases macros were not using native alignment of the particular datatype. Explicitly setting alignment with XXX_aligned() versions of the macro is not needed anymore in > 99% of cases - fix ASAN usage - avoid use of vector of voids, this was root cause of several bugs found in vec_* and pool_* function where sizeof() was used on voids instead of real vector data type - introduce minimal alignment which is currently 8 bytes, vectors will be always aligned at least to that value (underlay allocator actually always provide 16-byte aligned allocs) Type: improvement Change-Id: I20f4b081bb13bbf7bc0ace85cc4e301787f12fdf Signed-off-by: Damjan Marion <damarion@cisco.com>
This commit is contained in:

committed by
Florin Coras

parent
9539647b89
commit
299571aca3
@ -42,27 +42,6 @@ get_stats_epoch ()
|
||||
return sm->shared_header->epoch;
|
||||
}
|
||||
|
||||
/*
|
||||
* Return the maximum element count of the vector based on its allocated
|
||||
* memory.
|
||||
*/
|
||||
static int
|
||||
get_vec_mem_size (void *v, uword data_size)
|
||||
{
|
||||
vlib_stats_segment_t *sm = vlib_stats_get_segment ();
|
||||
|
||||
if (v == 0)
|
||||
return 0;
|
||||
|
||||
uword aligned_header_bytes = vec_header_bytes (0);
|
||||
void *p = v - aligned_header_bytes;
|
||||
void *oldheap = clib_mem_set_heap (sm->heap);
|
||||
int mem_size = (clib_mem_size (p) - aligned_header_bytes) / data_size;
|
||||
clib_mem_set_heap (oldheap);
|
||||
|
||||
return mem_size;
|
||||
}
|
||||
|
||||
/* number of times to repeat the counter expand tests */
|
||||
#define EXPAND_TEST_ROUNDS 3
|
||||
|
||||
@ -90,8 +69,7 @@ test_simple_counter_expand (vlib_main_t *vm)
|
||||
// Check how many elements fit into the counter vector without expanding
|
||||
// that. The next validate calls should not increase the stats segment
|
||||
// epoch.
|
||||
int mem_size = get_vec_mem_size (counter.counters[0],
|
||||
sizeof ((counter.counters[0])[0]));
|
||||
int mem_size = vec_max_len (counter.counters[0]);
|
||||
for (index = 1; index <= mem_size - 1; index++)
|
||||
{
|
||||
vlib_validate_simple_counter (&counter, index);
|
||||
@ -138,8 +116,7 @@ test_combined_counter_expand (vlib_main_t *vm)
|
||||
// Check how many elements fit into the counter vector without expanding
|
||||
// that. The next validate calls should not increase the stats segment
|
||||
// epoch.
|
||||
int mem_size = get_vec_mem_size (counter.counters[0],
|
||||
sizeof ((counter.counters[0])[0]));
|
||||
int mem_size = vec_max_len (counter.counters[0]);
|
||||
for (index = 1; index <= mem_size - 1; index++)
|
||||
{
|
||||
vlib_validate_combined_counter (&counter, index);
|
||||
|
@ -43,23 +43,6 @@ svm_mem_alloc (svm_region_t * rp, uword size)
|
||||
return (rv);
|
||||
}
|
||||
|
||||
static inline void *
|
||||
svm_mem_alloc_aligned_at_offset (svm_region_t * rp,
|
||||
uword size, uword align, uword offset)
|
||||
{
|
||||
clib_mem_heap_t *oldheap;
|
||||
ASSERT (rp->flags & SVM_FLAGS_MHEAP);
|
||||
u8 *rv;
|
||||
|
||||
pthread_mutex_lock (&rp->mutex);
|
||||
oldheap = clib_mem_set_heap (rp->data_heap);
|
||||
rv = clib_mem_alloc_aligned_at_offset (size, align, offset,
|
||||
1 /* yes, call os_out_of_memory */ );
|
||||
clib_mem_set_heap (oldheap);
|
||||
pthread_mutex_unlock (&rp->mutex);
|
||||
return (rv);
|
||||
}
|
||||
|
||||
static inline void
|
||||
svm_mem_free (svm_region_t * rp, void *ptr)
|
||||
{
|
||||
|
@ -274,7 +274,7 @@ typedef struct vlib_node_t
|
||||
u32 runtime_index;
|
||||
|
||||
/* Runtime data for this node. */
|
||||
void *runtime_data;
|
||||
u8 *runtime_data;
|
||||
|
||||
/* Node flags. */
|
||||
u16 flags;
|
||||
|
@ -856,11 +856,9 @@ vlib_process_signal_event_helper (vlib_node_main_t * nm,
|
||||
|
||||
l = vec_len (data_vec);
|
||||
|
||||
data_vec = _vec_resize (data_vec,
|
||||
/* length_increment */ n_data_elts,
|
||||
/* total size after increment */
|
||||
(l + n_data_elts) * n_data_elt_bytes,
|
||||
/* header_bytes */ 0, /* data_align */ 0);
|
||||
data_vec =
|
||||
_vec_realloc (data_vec, l + n_data_elts, n_data_elt_bytes,
|
||||
/* header_bytes */ 0, /* data_align */ 0, /* heap */ 0);
|
||||
|
||||
p->pending_event_data_by_type_index[t] = data_vec;
|
||||
data_to_be_written_by_caller = data_vec + l * n_data_elt_bytes;
|
||||
|
@ -69,7 +69,7 @@ realloc(void *p, size_t size)
|
||||
if (!check_vpp_heap ())
|
||||
return __libc_realloc (p, size);
|
||||
|
||||
return clib_mem_realloc (p, size, clib_mem_size (p));
|
||||
return clib_mem_realloc (p, size);
|
||||
}
|
||||
|
||||
int
|
||||
|
@ -3420,7 +3420,7 @@ void* dlcalloc(size_t n_elements, size_t elem_size) {
|
||||
/* ------------ Internal support for realloc, memalign, etc -------------- */
|
||||
|
||||
/* Try to realloc; only in-place unless can_move true */
|
||||
static mchunkptr try_realloc_chunk(mstate m, mchunkptr p, size_t nb,
|
||||
static CLIB_NOSANITIZE_ADDR mchunkptr try_realloc_chunk(mstate m, mchunkptr p, size_t nb,
|
||||
int can_move) {
|
||||
mchunkptr newp = 0;
|
||||
size_t oldsize = chunksize(p);
|
||||
@ -4118,7 +4118,7 @@ void mspace_get_address_and_size (mspace msp, char **addrp, size_t *sizep)
|
||||
*sizep = this_seg->size;
|
||||
}
|
||||
|
||||
CLIB_NOSANITIZE_ADDR __clib_export
|
||||
CLIB_NOSANITIZE_ADDR
|
||||
int mspace_is_heap_object (mspace msp, void *p)
|
||||
{
|
||||
msegment *this_seg;
|
||||
@ -4185,7 +4185,7 @@ int mspace_is_traced (mspace msp)
|
||||
return 0;
|
||||
}
|
||||
|
||||
CLIB_NOSANITIZE_ADDR __clib_export
|
||||
CLIB_NOSANITIZE_ADDR
|
||||
void* mspace_get_aligned (mspace msp,
|
||||
unsigned long n_user_data_bytes,
|
||||
unsigned long align,
|
||||
@ -4265,7 +4265,7 @@ void* mspace_get_aligned (mspace msp,
|
||||
return (void *) searchp;
|
||||
}
|
||||
|
||||
CLIB_NOSANITIZE_ADDR __clib_export
|
||||
CLIB_NOSANITIZE_ADDR
|
||||
void mspace_put (mspace msp, void *p_arg)
|
||||
{
|
||||
char *object_header;
|
||||
@ -4315,7 +4315,7 @@ void mspace_put_no_offset (mspace msp, void *p_arg)
|
||||
mspace_free (msp, p_arg);
|
||||
}
|
||||
|
||||
CLIB_NOSANITIZE_ADDR __clib_export
|
||||
CLIB_NOSANITIZE_ADDR
|
||||
size_t mspace_usable_size_with_delta (const void *p)
|
||||
{
|
||||
size_t usable_size;
|
||||
@ -4623,6 +4623,7 @@ void* mspace_realloc(mspace msp, void* oldmem, size_t bytes) {
|
||||
return mem;
|
||||
}
|
||||
|
||||
CLIB_NOSANITIZE_ADDR
|
||||
void* mspace_realloc_in_place(mspace msp, void* oldmem, size_t bytes) {
|
||||
void* mem = 0;
|
||||
if (oldmem != 0) {
|
||||
@ -4655,6 +4656,7 @@ void* mspace_realloc_in_place(mspace msp, void* oldmem, size_t bytes) {
|
||||
return mem;
|
||||
}
|
||||
|
||||
CLIB_NOSANITIZE_ADDR
|
||||
void* mspace_memalign(mspace msp, size_t alignment, size_t bytes) {
|
||||
mstate ms = (mstate)msp;
|
||||
if (!ok_magic(ms)) {
|
||||
|
@ -1447,6 +1447,8 @@ DLMALLOC_EXPORT int mspace_trim(mspace msp, size_t pad);
|
||||
*/
|
||||
DLMALLOC_EXPORT int mspace_mallopt(int, int);
|
||||
|
||||
DLMALLOC_EXPORT void* mspace_realloc_in_place (mspace msp, void *oldmem, size_t bytes);
|
||||
|
||||
DLMALLOC_EXPORT void* mspace_get_aligned (mspace msp,
|
||||
unsigned long n_user_data_bytes,
|
||||
unsigned long align,
|
||||
|
@ -967,11 +967,9 @@ elf_get_section_contents (elf_main_t * em,
|
||||
if (vec_len (s->contents) > 0)
|
||||
{
|
||||
/* Make vector copy of contents with given element size. */
|
||||
result = _vec_resize (result,
|
||||
vec_len (s->contents) / elt_size,
|
||||
vec_len (s->contents),
|
||||
/* header_bytes */ 0,
|
||||
/* align */ 0);
|
||||
result =
|
||||
_vec_realloc (result, vec_len (s->contents) / elt_size, elt_size,
|
||||
/* header_bytes */ 0, /* align */ 0, 0);
|
||||
clib_memcpy (result, s->contents, vec_len (s->contents));
|
||||
}
|
||||
|
||||
|
@ -285,9 +285,7 @@ set_indirect (void *v, hash_pair_indirect_t * pi, uword key,
|
||||
new_len = len + 1;
|
||||
if (new_len * hash_pair_bytes (h) > (1ULL << log2_bytes))
|
||||
{
|
||||
pi->pairs = clib_mem_realloc (pi->pairs,
|
||||
1ULL << (log2_bytes + 1),
|
||||
1ULL << log2_bytes);
|
||||
pi->pairs = clib_mem_realloc (pi->pairs, 1ULL << (log2_bytes + 1));
|
||||
log2_bytes++;
|
||||
}
|
||||
|
||||
@ -560,13 +558,8 @@ _hash_create (uword elts, hash_t * h_user)
|
||||
if (h_user)
|
||||
log2_pair_size = h_user->log2_pair_size;
|
||||
|
||||
v = _vec_resize ((void *) 0,
|
||||
/* vec len: */ elts,
|
||||
/* data bytes: */
|
||||
(elts << log2_pair_size) * sizeof (hash_pair_t),
|
||||
/* header bytes: */
|
||||
sizeof (h[0]),
|
||||
/* alignment */ sizeof (hash_pair_t));
|
||||
v = _vec_realloc (0, elts, (1 << log2_pair_size) * sizeof (hash_pair_t),
|
||||
sizeof (h[0]), sizeof (hash_pair_t), 0);
|
||||
h = hash_header (v);
|
||||
|
||||
if (h_user)
|
||||
|
@ -422,10 +422,8 @@ _heap_alloc (void *v,
|
||||
|
||||
h = heap_header (v);
|
||||
if (!v || !(h->flags & HEAP_IS_STATIC))
|
||||
v = _vec_resize (v,
|
||||
align_size,
|
||||
(offset + align_size) * elt_bytes,
|
||||
sizeof (h[0]), HEAP_DATA_ALIGN);
|
||||
v = _vec_realloc (v, offset + align_size, elt_bytes, sizeof (h[0]),
|
||||
HEAP_DATA_ALIGN, 0);
|
||||
else
|
||||
_vec_len (v) += align_size;
|
||||
|
||||
|
@ -163,12 +163,6 @@ heap_header (void *v)
|
||||
return vec_header (v);
|
||||
}
|
||||
|
||||
always_inline uword
|
||||
heap_header_bytes ()
|
||||
{
|
||||
return vec_header_bytes (sizeof (heap_header_t));
|
||||
}
|
||||
|
||||
always_inline void
|
||||
heap_dup_header (heap_header_t * old, heap_header_t * new)
|
||||
{
|
||||
@ -198,10 +192,8 @@ _heap_dup (void *v_old, uword v_bytes)
|
||||
if (!v_old)
|
||||
return v_old;
|
||||
|
||||
v_new = 0;
|
||||
v_new =
|
||||
_vec_resize (v_new, _vec_len (v_old), v_bytes, sizeof (heap_header_t),
|
||||
HEAP_DATA_ALIGN);
|
||||
v_new = _vec_realloc (0, _vec_len (v_old), 1, sizeof (heap_header_t),
|
||||
HEAP_DATA_ALIGN, 0);
|
||||
h_new = heap_header (v_new);
|
||||
heap_dup_header (h_old, h_new);
|
||||
clib_memcpy_fast (v_new, v_old, v_bytes);
|
||||
@ -220,9 +212,8 @@ uword heap_bytes (void *v);
|
||||
always_inline void *
|
||||
_heap_new (u32 len, u32 n_elt_bytes)
|
||||
{
|
||||
void *v = _vec_resize ((void *) 0, len, (uword) len * n_elt_bytes,
|
||||
sizeof (heap_header_t),
|
||||
HEAP_DATA_ALIGN);
|
||||
void *v = _vec_realloc ((void *) 0, len, n_elt_bytes, sizeof (heap_header_t),
|
||||
HEAP_DATA_ALIGN, 0);
|
||||
heap_header (v)->elt_bytes = n_elt_bytes;
|
||||
return v;
|
||||
}
|
||||
@ -249,27 +240,6 @@ heap_get_max_len (void *v)
|
||||
return v ? heap_header (v)->max_len : 0;
|
||||
}
|
||||
|
||||
/* Create fixed size heap with given block of memory. */
|
||||
always_inline void *
|
||||
heap_create_from_memory (void *memory, uword max_len, uword elt_bytes)
|
||||
{
|
||||
heap_header_t *h;
|
||||
void *v;
|
||||
|
||||
if (max_len * elt_bytes < sizeof (h[0]))
|
||||
return 0;
|
||||
|
||||
h = memory;
|
||||
clib_memset (h, 0, sizeof (h[0]));
|
||||
h->max_len = max_len;
|
||||
h->elt_bytes = elt_bytes;
|
||||
h->flags = HEAP_IS_STATIC;
|
||||
|
||||
v = (void *) (memory + heap_header_bytes ());
|
||||
_vec_len (v) = 0;
|
||||
return v;
|
||||
}
|
||||
|
||||
/* Execute BODY for each allocated heap element. */
|
||||
#define heap_foreach(var,len,heap,body) \
|
||||
do { \
|
||||
|
@ -53,6 +53,8 @@
|
||||
#define CLIB_MAX_NUMAS 16
|
||||
#define CLIB_MEM_VM_MAP_FAILED ((void *) ~0)
|
||||
#define CLIB_MEM_ERROR (-1)
|
||||
#define CLIB_MEM_LOG2_MIN_ALIGN (3)
|
||||
#define CLIB_MEM_MIN_ALIGN (1 << CLIB_MEM_LOG2_MIN_ALIGN)
|
||||
|
||||
typedef enum
|
||||
{
|
||||
@ -93,9 +95,10 @@ typedef struct _clib_mem_vm_map_hdr
|
||||
struct _clib_mem_vm_map_hdr *prev, *next;
|
||||
} clib_mem_vm_map_hdr_t;
|
||||
|
||||
#define foreach_clib_mem_heap_flag \
|
||||
_(0, LOCKED, "locked") \
|
||||
_(1, UNMAP_ON_DESTROY, "unmap-on-destroy")
|
||||
#define foreach_clib_mem_heap_flag \
|
||||
_ (0, LOCKED, "locked") \
|
||||
_ (1, UNMAP_ON_DESTROY, "unmap-on-destroy") \
|
||||
_ (2, TRACED, "traced")
|
||||
|
||||
typedef enum
|
||||
{
|
||||
@ -213,77 +216,13 @@ clib_mem_set_thread_index (void)
|
||||
ASSERT (__os_thread_index > 0);
|
||||
}
|
||||
|
||||
always_inline uword
|
||||
clib_mem_size_nocheck (void *p)
|
||||
{
|
||||
size_t mspace_usable_size_with_delta (const void *p);
|
||||
return mspace_usable_size_with_delta (p);
|
||||
}
|
||||
|
||||
/* Memory allocator which may call os_out_of_memory() if it fails */
|
||||
always_inline void *
|
||||
clib_mem_alloc_aligned_at_offset (uword size, uword align, uword align_offset,
|
||||
int os_out_of_memory_on_failure)
|
||||
{
|
||||
void *mspace_get_aligned (void *msp, unsigned long n_user_data_bytes,
|
||||
unsigned long align, unsigned long align_offset);
|
||||
clib_mem_heap_t *h = clib_mem_get_per_cpu_heap ();
|
||||
void *p;
|
||||
|
||||
if (align_offset > align)
|
||||
{
|
||||
if (align > 0)
|
||||
align_offset %= align;
|
||||
else
|
||||
align_offset = align;
|
||||
}
|
||||
|
||||
p = mspace_get_aligned (h->mspace, size, align, align_offset);
|
||||
|
||||
if (PREDICT_FALSE (0 == p))
|
||||
{
|
||||
if (os_out_of_memory_on_failure)
|
||||
os_out_of_memory ();
|
||||
return 0;
|
||||
}
|
||||
|
||||
CLIB_MEM_UNPOISON (p, size);
|
||||
return p;
|
||||
}
|
||||
|
||||
/* Memory allocator which calls os_out_of_memory() when it fails */
|
||||
always_inline void *
|
||||
clib_mem_alloc (uword size)
|
||||
{
|
||||
return clib_mem_alloc_aligned_at_offset (size, /* align */ 1,
|
||||
/* align_offset */ 0,
|
||||
/* os_out_of_memory */ 1);
|
||||
}
|
||||
|
||||
always_inline void *
|
||||
clib_mem_alloc_aligned (uword size, uword align)
|
||||
{
|
||||
return clib_mem_alloc_aligned_at_offset (size, align, /* align_offset */ 0,
|
||||
/* os_out_of_memory */ 1);
|
||||
}
|
||||
|
||||
/* Memory allocator which calls os_out_of_memory() when it fails */
|
||||
always_inline void *
|
||||
clib_mem_alloc_or_null (uword size)
|
||||
{
|
||||
return clib_mem_alloc_aligned_at_offset (size, /* align */ 1,
|
||||
/* align_offset */ 0,
|
||||
/* os_out_of_memory */ 0);
|
||||
}
|
||||
|
||||
always_inline void *
|
||||
clib_mem_alloc_aligned_or_null (uword size, uword align)
|
||||
{
|
||||
return clib_mem_alloc_aligned_at_offset (size, align, /* align_offset */ 0,
|
||||
/* os_out_of_memory */ 0);
|
||||
}
|
||||
|
||||
|
||||
void *clib_mem_alloc (uword size);
|
||||
void *clib_mem_alloc_aligned (uword size, uword align);
|
||||
void *clib_mem_alloc_or_null (uword size);
|
||||
void *clib_mem_alloc_aligned_or_null (uword size, uword align);
|
||||
void *clib_mem_realloc (void *p, uword new_size);
|
||||
void *clib_mem_realloc_aligned (void *p, uword new_size, uword align);
|
||||
|
||||
/* Memory allocator which panics when it fails.
|
||||
Use macro so that clib_panic macro can expand __FUNCTION__ and __LINE__. */
|
||||
@ -302,61 +241,10 @@ clib_mem_alloc_aligned_or_null (uword size, uword align)
|
||||
/* Alias to stack allocator for naming consistency. */
|
||||
#define clib_mem_alloc_stack(bytes) __builtin_alloca(bytes)
|
||||
|
||||
always_inline uword
|
||||
clib_mem_is_heap_object (void *p)
|
||||
{
|
||||
int mspace_is_heap_object (void *msp, void *p);
|
||||
clib_mem_heap_t *h = clib_mem_get_per_cpu_heap ();
|
||||
return mspace_is_heap_object (h->mspace, p);
|
||||
}
|
||||
|
||||
always_inline void
|
||||
clib_mem_free (void *p)
|
||||
{
|
||||
void mspace_put (void *msp, void *p_arg);
|
||||
clib_mem_heap_t *h = clib_mem_get_per_cpu_heap ();
|
||||
|
||||
/* Make sure object is in the correct heap. */
|
||||
ASSERT (clib_mem_is_heap_object (p));
|
||||
|
||||
CLIB_MEM_POISON (p, clib_mem_size_nocheck (p));
|
||||
|
||||
mspace_put (h->mspace, p);
|
||||
}
|
||||
|
||||
always_inline void *
|
||||
clib_mem_realloc (void *p, uword new_size, uword old_size)
|
||||
{
|
||||
/* By default use alloc, copy and free to emulate realloc. */
|
||||
void *q = clib_mem_alloc (new_size);
|
||||
if (q)
|
||||
{
|
||||
uword copy_size;
|
||||
if (old_size < new_size)
|
||||
copy_size = old_size;
|
||||
else
|
||||
copy_size = new_size;
|
||||
clib_memcpy_fast (q, p, copy_size);
|
||||
clib_mem_free (p);
|
||||
}
|
||||
return q;
|
||||
}
|
||||
|
||||
always_inline uword
|
||||
clib_mem_size (void *p)
|
||||
{
|
||||
ASSERT (clib_mem_is_heap_object (p));
|
||||
return clib_mem_size_nocheck (p);
|
||||
}
|
||||
|
||||
always_inline void
|
||||
clib_mem_free_s (void *p)
|
||||
{
|
||||
uword size = clib_mem_size (p);
|
||||
CLIB_MEM_UNPOISON (p, size);
|
||||
memset_s_inline (p, size, 0, size);
|
||||
clib_mem_free (p);
|
||||
}
|
||||
uword clib_mem_is_heap_object (void *p);
|
||||
void clib_mem_free (void *p);
|
||||
uword clib_mem_size (void *p);
|
||||
void clib_mem_free_s (void *p);
|
||||
|
||||
always_inline clib_mem_heap_t *
|
||||
clib_mem_get_heap (void)
|
||||
|
@ -464,7 +464,7 @@ format_clib_mem_heap (u8 * s, va_list * va)
|
||||
format_white_space, indent + 2, format_msize, mi.usmblks);
|
||||
}
|
||||
|
||||
if (mspace_is_traced (heap->mspace))
|
||||
if (heap->flags & CLIB_MEM_HEAP_F_TRACED)
|
||||
s = format (s, "\n%U", format_mheap_trace, tm, verbose);
|
||||
return s;
|
||||
}
|
||||
@ -493,7 +493,10 @@ uword clib_mem_validate_serial = 0;
|
||||
__clib_export void
|
||||
mheap_trace (clib_mem_heap_t * h, int enable)
|
||||
{
|
||||
(void) mspace_enable_disable_trace (h->mspace, enable);
|
||||
if (enable)
|
||||
h->flags |= CLIB_MEM_HEAP_F_TRACED;
|
||||
else
|
||||
h->flags &= ~CLIB_MEM_HEAP_F_TRACED;
|
||||
|
||||
if (enable == 0)
|
||||
mheap_trace_main_free (&mheap_trace_main);
|
||||
@ -518,7 +521,7 @@ int
|
||||
clib_mem_is_traced (void)
|
||||
{
|
||||
clib_mem_heap_t *h = clib_mem_get_heap ();
|
||||
return mspace_is_traced (h->mspace);
|
||||
return (h->flags &= CLIB_MEM_HEAP_F_TRACED) != 0;
|
||||
}
|
||||
|
||||
__clib_export uword
|
||||
@ -594,10 +597,139 @@ clib_mem_get_heap_size (clib_mem_heap_t * heap)
|
||||
return heap->size;
|
||||
}
|
||||
|
||||
/*
|
||||
* fd.io coding-style-patch-verification: ON
|
||||
*
|
||||
* Local Variables:
|
||||
* eval: (c-set-style "gnu")
|
||||
* End:
|
||||
*/
|
||||
/* Memory allocator which may call os_out_of_memory() if it fails */
|
||||
static void *
|
||||
clib_mem_alloc_inline (uword size, uword align,
|
||||
int os_out_of_memory_on_failure)
|
||||
{
|
||||
clib_mem_heap_t *h = clib_mem_get_per_cpu_heap ();
|
||||
void *p;
|
||||
|
||||
align = clib_max (CLIB_MEM_MIN_ALIGN, align);
|
||||
|
||||
p = mspace_memalign (h->mspace, align, size);
|
||||
|
||||
if (PREDICT_FALSE (0 == p))
|
||||
{
|
||||
if (os_out_of_memory_on_failure)
|
||||
os_out_of_memory ();
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (PREDICT_FALSE (h->flags & CLIB_MEM_HEAP_F_TRACED))
|
||||
mheap_get_trace (pointer_to_uword (p), clib_mem_size (p));
|
||||
|
||||
CLIB_MEM_UNPOISON (p, size);
|
||||
return p;
|
||||
}
|
||||
|
||||
/* Memory allocator which calls os_out_of_memory() when it fails */
|
||||
__clib_export void *
|
||||
clib_mem_alloc (uword size)
|
||||
{
|
||||
return clib_mem_alloc_inline (size, CLIB_MEM_MIN_ALIGN,
|
||||
/* os_out_of_memory */ 1);
|
||||
}
|
||||
|
||||
__clib_export void *
|
||||
clib_mem_alloc_aligned (uword size, uword align)
|
||||
{
|
||||
return clib_mem_alloc_inline (size, align,
|
||||
/* os_out_of_memory */ 1);
|
||||
}
|
||||
|
||||
/* Memory allocator which calls os_out_of_memory() when it fails */
|
||||
__clib_export void *
|
||||
clib_mem_alloc_or_null (uword size)
|
||||
{
|
||||
return clib_mem_alloc_inline (size, CLIB_MEM_MIN_ALIGN,
|
||||
/* os_out_of_memory */ 0);
|
||||
}
|
||||
|
||||
__clib_export void *
|
||||
clib_mem_alloc_aligned_or_null (uword size, uword align)
|
||||
{
|
||||
return clib_mem_alloc_inline (size, align,
|
||||
/* os_out_of_memory */ 0);
|
||||
}
|
||||
|
||||
__clib_export void *
|
||||
clib_mem_realloc_aligned (void *p, uword new_size, uword align)
|
||||
{
|
||||
uword old_alloc_size;
|
||||
clib_mem_heap_t *h = clib_mem_get_per_cpu_heap ();
|
||||
void *new;
|
||||
|
||||
ASSERT (count_set_bits (align) == 1);
|
||||
|
||||
old_alloc_size = p ? mspace_usable_size (p) : 0;
|
||||
|
||||
if (new_size == old_alloc_size)
|
||||
return p;
|
||||
|
||||
if (p && pointer_is_aligned (p, align) &&
|
||||
mspace_realloc_in_place (h->mspace, p, new_size))
|
||||
{
|
||||
CLIB_MEM_UNPOISON (p, new_size);
|
||||
}
|
||||
else
|
||||
{
|
||||
new = clib_mem_alloc_inline (new_size, align, 1);
|
||||
|
||||
CLIB_MEM_UNPOISON (new, new_size);
|
||||
if (old_alloc_size)
|
||||
{
|
||||
CLIB_MEM_UNPOISON (p, old_alloc_size);
|
||||
clib_memcpy_fast (new, p, clib_min (new_size, old_alloc_size));
|
||||
clib_mem_free (p);
|
||||
}
|
||||
p = new;
|
||||
}
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
__clib_export void *
|
||||
clib_mem_realloc (void *p, uword new_size)
|
||||
{
|
||||
return clib_mem_realloc_aligned (p, new_size, CLIB_MEM_MIN_ALIGN);
|
||||
}
|
||||
|
||||
__clib_export uword
|
||||
clib_mem_is_heap_object (void *p)
|
||||
{
|
||||
int mspace_is_heap_object (void *msp, void *p);
|
||||
clib_mem_heap_t *h = clib_mem_get_per_cpu_heap ();
|
||||
return mspace_is_heap_object (h->mspace, p);
|
||||
}
|
||||
|
||||
__clib_export void
|
||||
clib_mem_free (void *p)
|
||||
{
|
||||
clib_mem_heap_t *h = clib_mem_get_per_cpu_heap ();
|
||||
uword size = clib_mem_size (p);
|
||||
|
||||
/* Make sure object is in the correct heap. */
|
||||
ASSERT (clib_mem_is_heap_object (p));
|
||||
|
||||
if (PREDICT_FALSE (h->flags & CLIB_MEM_HEAP_F_TRACED))
|
||||
mheap_put_trace (pointer_to_uword (p), size);
|
||||
CLIB_MEM_POISON (p, clib_mem_size (p));
|
||||
|
||||
mspace_free (h->mspace, p);
|
||||
}
|
||||
|
||||
__clib_export uword
|
||||
clib_mem_size (void *p)
|
||||
{
|
||||
return mspace_usable_size (p);
|
||||
}
|
||||
|
||||
__clib_export void
|
||||
clib_mem_free_s (void *p)
|
||||
{
|
||||
uword size = clib_mem_size (p);
|
||||
CLIB_MEM_UNPOISON (p, size);
|
||||
memset_s_inline (p, size, 0, size);
|
||||
clib_mem_free (p);
|
||||
}
|
||||
|
@ -38,7 +38,7 @@
|
||||
#include <vppinfra/pool.h>
|
||||
|
||||
__clib_export void
|
||||
_pool_init_fixed (void **pool_ptr, u32 elt_size, u32 max_elts)
|
||||
_pool_init_fixed (void **pool_ptr, uword elt_size, uword max_elts, uword align)
|
||||
{
|
||||
uword *b;
|
||||
pool_header_t *ph;
|
||||
@ -48,9 +48,7 @@ _pool_init_fixed (void **pool_ptr, u32 elt_size, u32 max_elts)
|
||||
ASSERT (elt_size);
|
||||
ASSERT (max_elts);
|
||||
|
||||
v =
|
||||
vec_resize_allocate_memory (0, max_elts, elt_size * max_elts,
|
||||
sizeof (pool_header_t), CLIB_CACHE_LINE_BYTES);
|
||||
v = _vec_realloc (0, max_elts, elt_size, sizeof (pool_header_t), align, 0);
|
||||
|
||||
ph = pool_header (v);
|
||||
ph->max_elts = max_elts;
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -38,11 +38,11 @@ clib_ring_new_inline (void **p, u32 elt_bytes, u32 size, u32 align)
|
||||
void *v;
|
||||
clib_ring_header_t *h;
|
||||
|
||||
v = _vec_resize ((void *) 0,
|
||||
/* length increment */ size,
|
||||
/* data bytes */ elt_bytes * size,
|
||||
/* header bytes */ sizeof (h[0]),
|
||||
/* data align */ align);
|
||||
v = _vec_realloc (0,
|
||||
/* length increment */ size,
|
||||
/* data bytes */ elt_bytes,
|
||||
/* header bytes */ sizeof (h[0]),
|
||||
/* data align */ align, 0);
|
||||
|
||||
h = clib_ring_header (v);
|
||||
h->next = 0;
|
||||
|
@ -313,8 +313,8 @@ unserialize_vector_ha (serialize_main_t * m,
|
||||
if (l > max_length)
|
||||
serialize_error (&m->header,
|
||||
clib_error_create ("bad vector length %d", l));
|
||||
p = v = _vec_resize ((void *) 0, l, (uword) l * elt_bytes, header_bytes,
|
||||
/* align */ align);
|
||||
p = v = _vec_realloc ((void *) 0, l, elt_bytes, header_bytes,
|
||||
/* align */ align, 0);
|
||||
|
||||
while (l != 0)
|
||||
{
|
||||
@ -444,8 +444,7 @@ unserialize_pool_helper (serialize_main_t * m,
|
||||
return 0;
|
||||
}
|
||||
|
||||
v = _vec_resize ((void *) 0, l, (uword) l * elt_bytes, sizeof (p[0]),
|
||||
align);
|
||||
v = _vec_realloc ((void *) 0, l, elt_bytes, sizeof (p[0]), align, 0);
|
||||
p = pool_header (v);
|
||||
|
||||
vec_unserialize (m, &p->free_indices, unserialize_vec_32);
|
||||
|
@ -76,11 +76,9 @@ sparse_vec_new (uword elt_bytes, uword sparse_index_bits)
|
||||
|
||||
ASSERT (sparse_index_bits <= 16);
|
||||
|
||||
v = _vec_resize ((void *) 0,
|
||||
/* length increment */ 8,
|
||||
/* data bytes */ 8 * elt_bytes,
|
||||
/* header bytes */ sizeof (h[0]),
|
||||
/* data align */ 0);
|
||||
v = _vec_realloc (0, /* data bytes */ 8, elt_bytes,
|
||||
/* header bytes */ sizeof (h[0]), /* data align */ 0,
|
||||
/* heap */ 0);
|
||||
|
||||
/* Make space for invalid entry (entry 0). */
|
||||
_vec_len (v) = 1;
|
||||
|
@ -85,6 +85,25 @@ clib_memcpy_fast (void *restrict dst, const void *restrict src, size_t n)
|
||||
#endif
|
||||
}
|
||||
|
||||
static_always_inline void *
|
||||
clib_memmove (void *dst, const void *src, size_t n)
|
||||
{
|
||||
u8 *d = (u8 *) dst;
|
||||
u8 *s = (u8 *) src;
|
||||
|
||||
if (s == d)
|
||||
return d;
|
||||
|
||||
if (d > s)
|
||||
for (uword i = n - 1; (i + 1) > 0; i--)
|
||||
d[i] = s[i];
|
||||
else
|
||||
for (uword i = 0; i < n; i++)
|
||||
d[i] = s[i];
|
||||
|
||||
return d;
|
||||
}
|
||||
|
||||
#include <vppinfra/memcpy.h>
|
||||
|
||||
/* c-11 string manipulation variants */
|
||||
|
@ -61,14 +61,13 @@ main (int argc, char *argv[])
|
||||
uword *objects = 0;
|
||||
uword *handles = 0;
|
||||
uword objects_used;
|
||||
uword align, fixed_size;
|
||||
uword align;
|
||||
|
||||
clib_mem_init (0, 10 << 20);
|
||||
|
||||
n = 10;
|
||||
seed = (u32) getpid ();
|
||||
check_mask = 0;
|
||||
fixed_size = 0;
|
||||
|
||||
if (argc > 1)
|
||||
{
|
||||
@ -100,13 +99,6 @@ main (int argc, char *argv[])
|
||||
|
||||
objects_used = 0;
|
||||
|
||||
if (fixed_size)
|
||||
{
|
||||
uword max_len = 1024 * 1024;
|
||||
void *memory = clib_mem_alloc (max_len * sizeof (h[0]));
|
||||
h = heap_create_from_memory (memory, max_len, sizeof (h[0]));
|
||||
}
|
||||
|
||||
for (i = 0; i < n; i++)
|
||||
{
|
||||
while (1)
|
||||
@ -188,9 +180,6 @@ main (int argc, char *argv[])
|
||||
vec_free (objects);
|
||||
vec_free (handles);
|
||||
|
||||
if (fixed_size)
|
||||
vec_free (h);
|
||||
|
||||
if (verbose)
|
||||
fformat (stderr, "%U\n", format_clib_mem_usage, /* verbose */ 0);
|
||||
|
||||
|
@ -221,8 +221,7 @@ dump_call_stats (uword * stats)
|
||||
if (_v (l) == ~0) \
|
||||
_v (l) = bounded_random_u32 (&(seed), 0, MAX_VEC_LEN); \
|
||||
\
|
||||
_v (v) = \
|
||||
_vec_resize (NULL, _v (l), _v (l) * sizeof (elt_type), _v (h), 0); \
|
||||
_v (v) = _vec_realloc (NULL, _v (l), sizeof (elt_type), _v (h), 0, 0); \
|
||||
fill_with_random_data (_v (v), vec_bytes (_v (v)), (seed)); \
|
||||
\
|
||||
/* Fill header with random data as well. */ \
|
||||
|
@ -135,6 +135,14 @@ pointer_to_uword (const void *p)
|
||||
return (uword) (clib_address_t) p;
|
||||
}
|
||||
|
||||
static inline __attribute__ ((always_inline)) uword
|
||||
pointer_is_aligned (void *p, uword align)
|
||||
{
|
||||
if ((pointer_to_uword (p) & (align - 1)) == 0)
|
||||
return 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
#define uword_to_pointer(u,type) ((type) (clib_address_t) (u))
|
||||
|
||||
/* Any type: can be either word or pointer. */
|
||||
|
@ -1,39 +1,6 @@
|
||||
/*
|
||||
* Copyright (c) 2015 Cisco and/or its affiliates.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at:
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
/* SPDX-License-Identifier: Apache-2.0
|
||||
* Copyright(c) 2022 Cisco Systems, Inc.
|
||||
*/
|
||||
/*
|
||||
Copyright (c) 2001, 2002, 2003 Eliot Dresselhaus
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#include <vppinfra/vec.h>
|
||||
#include <vppinfra/mem.h>
|
||||
@ -42,94 +9,74 @@
|
||||
#define CLIB_VECTOR_GROW_BY_ONE 0
|
||||
#endif
|
||||
|
||||
/* Vector resize operator. Called as needed by various macros such as
|
||||
vec_add1() when we need to allocate memory. */
|
||||
__clib_export void *
|
||||
vec_resize_allocate_memory (void *v, word length_increment, uword data_bytes,
|
||||
uword header_bytes, uword data_align)
|
||||
__clib_export uword
|
||||
vec_mem_size (void *v)
|
||||
{
|
||||
vec_header_t *vh = _vec_find (v);
|
||||
uword old_alloc_bytes, new_alloc_bytes;
|
||||
void *old, *new;
|
||||
return v ? clib_mem_size (v - vec_get_header_size (v)) : 0;
|
||||
}
|
||||
|
||||
header_bytes = vec_header_bytes (header_bytes);
|
||||
data_align = data_align == 0 ? 1 : data_align;
|
||||
|
||||
data_bytes += header_bytes;
|
||||
__clib_export void *
|
||||
_vec_realloc (void *v, uword n_elts, uword elt_sz, uword hdr_sz, uword align,
|
||||
void *heap)
|
||||
{
|
||||
uword n_data_bytes, data_offset, new_data_size, alloc_size;
|
||||
void *p;
|
||||
|
||||
/* alignment must be power of 2 */
|
||||
ASSERT (count_set_bits (data_align) == 1);
|
||||
align = clib_max (align, VEC_MIN_ALIGN);
|
||||
ASSERT (count_set_bits (align) == 1);
|
||||
|
||||
if (!v)
|
||||
/* number of bytes needed to store both vector header and optional user
|
||||
* header */
|
||||
data_offset = round_pow2 (hdr_sz + sizeof (vec_header_t), align);
|
||||
|
||||
/* mumber of bytes needed to store vector data */
|
||||
n_data_bytes = n_elts * elt_sz;
|
||||
|
||||
/* minimal allocation needed to store data and headers */
|
||||
new_data_size = data_offset + n_data_bytes;
|
||||
|
||||
if (v)
|
||||
{
|
||||
new = clib_mem_alloc_aligned_at_offset (data_bytes, data_align, header_bytes, 1 /* yes, call os_out_of_memory */
|
||||
);
|
||||
new_alloc_bytes = clib_mem_size (new);
|
||||
CLIB_MEM_UNPOISON (new + data_bytes, new_alloc_bytes - data_bytes);
|
||||
clib_memset (new, 0, new_alloc_bytes);
|
||||
CLIB_MEM_POISON (new + data_bytes, new_alloc_bytes - data_bytes);
|
||||
v = new + header_bytes;
|
||||
_vec_len (v) = length_increment;
|
||||
ASSERT (header_bytes / VEC_HEADER_ROUND <= 255);
|
||||
_vec_find (v)->hdr_size = header_bytes / VEC_HEADER_ROUND;
|
||||
_vec_find (v)->log2_align = min_log2 (data_align);
|
||||
return v;
|
||||
uword old_data_size = data_offset + _vec_len (v) * elt_sz;
|
||||
p = vec_header (v);
|
||||
alloc_size = clib_mem_size (p);
|
||||
|
||||
/* check that we are still dealing with the same vector type */
|
||||
ASSERT (_vec_find (v)->hdr_size * VEC_MIN_ALIGN == data_offset);
|
||||
ASSERT (_vec_find (v)->log2_align == min_log2 (align));
|
||||
|
||||
/* realloc if new size cannot fit into existing allocation */
|
||||
if (alloc_size < new_data_size)
|
||||
{
|
||||
if (CLIB_VECTOR_GROW_BY_ONE)
|
||||
alloc_size = n_data_bytes + data_offset;
|
||||
else
|
||||
alloc_size = (n_data_bytes * 3) / 2 + data_offset;
|
||||
|
||||
p = clib_mem_realloc_aligned (p, alloc_size, align);
|
||||
alloc_size = clib_mem_size (p);
|
||||
v = p + data_offset;
|
||||
}
|
||||
|
||||
CLIB_MEM_UNPOISON (p, alloc_size);
|
||||
clib_memset_u8 (p + old_data_size, 0, alloc_size - old_data_size);
|
||||
}
|
||||
else
|
||||
{
|
||||
/* new allocation */
|
||||
p = clib_mem_alloc_aligned (new_data_size, align);
|
||||
alloc_size = clib_mem_size (p);
|
||||
CLIB_MEM_UNPOISON (p, alloc_size);
|
||||
clib_memset_u8 (p, 0, alloc_size);
|
||||
v = p + data_offset;
|
||||
_vec_find (v)->hdr_size = data_offset / VEC_MIN_ALIGN;
|
||||
_vec_find (v)->log2_align = min_log2 (align);
|
||||
}
|
||||
|
||||
ASSERT (_vec_find (v)->hdr_size * VEC_HEADER_ROUND == header_bytes);
|
||||
header_bytes = _vec_find (v)->hdr_size * VEC_HEADER_ROUND;
|
||||
|
||||
ASSERT (data_align == (1 << _vec_find (v)->log2_align));
|
||||
data_align = 1 << _vec_find (v)->log2_align;
|
||||
|
||||
vh->len += length_increment;
|
||||
old = v - header_bytes;
|
||||
|
||||
/* Vector header must start heap object. */
|
||||
ASSERT (clib_mem_is_heap_object (old));
|
||||
|
||||
old_alloc_bytes = clib_mem_size (old);
|
||||
|
||||
/* Need to resize? */
|
||||
if (data_bytes <= old_alloc_bytes)
|
||||
{
|
||||
CLIB_MEM_UNPOISON (v, data_bytes);
|
||||
return v;
|
||||
}
|
||||
|
||||
#if CLIB_VECTOR_GROW_BY_ONE > 0
|
||||
new_alloc_bytes = data_bytes;
|
||||
#else
|
||||
new_alloc_bytes = (old_alloc_bytes * 3) / 2;
|
||||
if (new_alloc_bytes < data_bytes)
|
||||
new_alloc_bytes = data_bytes;
|
||||
#endif
|
||||
|
||||
new =
|
||||
clib_mem_alloc_aligned_at_offset (new_alloc_bytes, data_align,
|
||||
header_bytes,
|
||||
1 /* yes, call os_out_of_memory */ );
|
||||
|
||||
/* FIXME fail gracefully. */
|
||||
if (!new)
|
||||
clib_panic
|
||||
("vec_resize fails, length increment %d, data bytes %d, alignment %d",
|
||||
length_increment, data_bytes, data_align);
|
||||
|
||||
CLIB_MEM_UNPOISON (old, old_alloc_bytes);
|
||||
clib_memcpy_fast (new, old, old_alloc_bytes);
|
||||
clib_mem_free (old);
|
||||
|
||||
/* Allocator may give a bit of extra room. */
|
||||
new_alloc_bytes = clib_mem_size (new);
|
||||
v = new;
|
||||
|
||||
/* Zero new memory. */
|
||||
CLIB_MEM_UNPOISON (new + data_bytes, new_alloc_bytes - data_bytes);
|
||||
memset (v + old_alloc_bytes, 0, new_alloc_bytes - old_alloc_bytes);
|
||||
CLIB_MEM_POISON (new + data_bytes, new_alloc_bytes - data_bytes);
|
||||
|
||||
return v + header_bytes;
|
||||
CLIB_MEM_POISON (p + new_data_size, alloc_size - new_data_size);
|
||||
_vec_len (v) = n_elts;
|
||||
return v;
|
||||
}
|
||||
|
||||
__clib_export u32
|
||||
@ -143,62 +90,3 @@ vec_free_not_inline (void *v)
|
||||
{
|
||||
vec_free (v);
|
||||
}
|
||||
|
||||
/** \cond */
|
||||
|
||||
#ifdef TEST
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
void
|
||||
main (int argc, char *argv[])
|
||||
{
|
||||
word n = atoi (argv[1]);
|
||||
word i, *x = 0;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
word x, y, z;
|
||||
} FOO;
|
||||
|
||||
FOO *foos = vec_init (FOO, 10), *f;
|
||||
|
||||
vec_validate (foos, 100);
|
||||
foos[100].x = 99;
|
||||
|
||||
_vec_len (foos) = 0;
|
||||
for (i = 0; i < n; i++)
|
||||
{
|
||||
vec_add1 (x, i);
|
||||
vec_add2 (foos, f, 1);
|
||||
f->x = 2 * i;
|
||||
f->y = 3 * i;
|
||||
f->z = 4 * i;
|
||||
}
|
||||
|
||||
{
|
||||
word n = 2;
|
||||
word m = 42;
|
||||
vec_delete (foos, n, m);
|
||||
}
|
||||
|
||||
{
|
||||
word n = 2;
|
||||
word m = 42;
|
||||
vec_insert (foos, n, m);
|
||||
}
|
||||
|
||||
vec_free (x);
|
||||
vec_free (foos);
|
||||
exit (0);
|
||||
}
|
||||
#endif
|
||||
/** \endcond */
|
||||
|
||||
/*
|
||||
* fd.io coding-style-patch-verification: ON
|
||||
*
|
||||
* Local Variables:
|
||||
* eval: (c-set-style "gnu")
|
||||
* End:
|
||||
*/
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -55,13 +55,13 @@
|
||||
typedef struct
|
||||
{
|
||||
u32 len; /**< Number of elements in vector (NOT its allocated length). */
|
||||
u8 hdr_size; /**< header size divided by VEC_HEADER_ROUND */
|
||||
u8 hdr_size; /**< header size divided by VEC_MIN_ALIGN */
|
||||
u8 log2_align; /**< data alignment */
|
||||
u8 vpad[2]; /**< pad to 8 bytes */
|
||||
u8 vector_data[0]; /**< Vector data . */
|
||||
} vec_header_t;
|
||||
|
||||
#define VEC_HEADER_ROUND 8
|
||||
#define VEC_MIN_ALIGN 8
|
||||
|
||||
/** \brief Find the vector header
|
||||
|
||||
@ -73,19 +73,20 @@ typedef struct
|
||||
*/
|
||||
#define _vec_find(v) ((vec_header_t *) (v) - 1)
|
||||
|
||||
always_inline uword __vec_align (uword data_align, uword configuered_align);
|
||||
always_inline uword __vec_elt_sz (uword elt_sz, int is_void);
|
||||
|
||||
#define _vec_round_size(s) \
|
||||
(((s) + sizeof (uword) - 1) &~ (sizeof (uword) - 1))
|
||||
#define _vec_is_void(P) \
|
||||
__builtin_types_compatible_p (__typeof__ ((P)[0]), void)
|
||||
#define _vec_elt_sz(V) __vec_elt_sz (sizeof ((V)[0]), _vec_is_void (V))
|
||||
#define _vec_align(V, A) __vec_align (__alignof__((V)[0]), A)
|
||||
|
||||
always_inline uword
|
||||
vec_header_bytes (uword header_bytes)
|
||||
{
|
||||
return round_pow2 (header_bytes + sizeof (vec_header_t), VEC_HEADER_ROUND);
|
||||
}
|
||||
|
||||
always_inline uword
|
||||
always_inline CLIB_NOSANITIZE_ADDR uword
|
||||
vec_get_header_size (void *v)
|
||||
{
|
||||
uword header_size = _vec_find (v)->hdr_size * VEC_HEADER_ROUND;
|
||||
uword header_size = _vec_find (v)->hdr_size * VEC_MIN_ALIGN;
|
||||
return header_size;
|
||||
}
|
||||
|
||||
@ -141,11 +142,7 @@ u32 vec_len_not_inline (void *v);
|
||||
* @return memory size allocated for the vector
|
||||
*/
|
||||
|
||||
always_inline uword
|
||||
vec_mem_size (void *v)
|
||||
{
|
||||
return v ? clib_mem_size (v - vec_get_header_size (v)) : 0;
|
||||
}
|
||||
uword vec_mem_size (void *v);
|
||||
|
||||
/**
|
||||
* Number of elements that can fit into generic vector
|
||||
@ -156,24 +153,35 @@ vec_mem_size (void *v)
|
||||
*/
|
||||
|
||||
always_inline uword
|
||||
_vec_max_len (void *v, uword elt_size)
|
||||
vec_max_bytes (void *v)
|
||||
{
|
||||
return v ? vec_mem_size (v) / elt_size : 0;
|
||||
return v ? vec_mem_size (v) - vec_get_header_size (v) : 0;
|
||||
}
|
||||
|
||||
#define vec_max_len(v) _vec_max_len (v, sizeof ((v)[0]))
|
||||
always_inline uword
|
||||
_vec_max_len (void *v, uword elt_sz)
|
||||
{
|
||||
return vec_max_bytes (v) / elt_sz;
|
||||
}
|
||||
|
||||
#define vec_max_len(v) _vec_max_len (v, _vec_elt_sz (v))
|
||||
|
||||
always_inline void
|
||||
_vec_set_len (void *v, uword len, uword elt_size)
|
||||
_vec_set_len (void *v, uword len, uword elt_sz)
|
||||
{
|
||||
ASSERT (v);
|
||||
ASSERT (len <= vec_max_len (v));
|
||||
ASSERT (len <= _vec_max_len (v, elt_sz));
|
||||
uword old_len = _vec_len (v);
|
||||
|
||||
if (len > old_len)
|
||||
CLIB_MEM_UNPOISON (v + old_len * elt_sz, (len - old_len) * elt_sz);
|
||||
else if (len > old_len)
|
||||
CLIB_MEM_POISON (v + len * elt_sz, (old_len - len) * elt_sz);
|
||||
|
||||
CLIB_MEM_POISON_LEN (v, _vec_len (v) * elt_size, len * elt_size);
|
||||
_vec_len (v) = len;
|
||||
}
|
||||
|
||||
#define vec_set_len(v, l) _vec_set_len ((void *) v, l, sizeof ((v)[0]))
|
||||
#define vec_set_len(v, l) _vec_set_len ((void *) v, l, _vec_elt_sz (v))
|
||||
|
||||
/** \brief Reset vector length to zero
|
||||
NULL-pointer tolerant
|
||||
|
Reference in New Issue
Block a user