vlib: allocating buffers on thread-x and freeing them on thread-y causes

a crash on debug image (VPP-1151)

In debug image, there is extra code to validate the buffer when it is
freed. It uses the hash table to lookup the buffer index with spinlock
to prevent contention. However, there is one spinlock for each worker
thread. So allocating the buffer on thread-x and freeing the same buffer
on thread-y causes the validation to fail on thread-y. The fix is to
have only one spinlock, stored in vlib_global_main.

Change-Id: Ic383846cefe84a3e262255afcf82276742f0f62e
Signed-off-by: Steven <sluong@cisco.com>
This commit is contained in:
Steven
2018-01-29 20:09:09 -08:00
parent 92b15bcea4
commit a7effa1b07
4 changed files with 17 additions and 25 deletions

View File

@ -1462,14 +1462,10 @@ ixge_rx_queue_no_wrap (ixge_main_t * xm,
to_add -= 2;
#if 0
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED ==
vlib_buffer_is_known (vm, bi0));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED ==
vlib_buffer_is_known (vm, bi1));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED ==
vlib_buffer_is_known (vm, fi0));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED ==
vlib_buffer_is_known (vm, fi1));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED == vlib_buffer_is_known (bi0));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED == vlib_buffer_is_known (bi1));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED == vlib_buffer_is_known (fi0));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED == vlib_buffer_is_known (fi1));
#endif
b0 = vlib_get_buffer (vm, bi0);
@ -1680,10 +1676,8 @@ ixge_rx_queue_no_wrap (ixge_main_t * xm,
to_add -= 1;
#if 0
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED ==
vlib_buffer_is_known (vm, bi0));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED ==
vlib_buffer_is_known (vm, fi0));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED == vlib_buffer_is_known (bi0));
ASSERT (VLIB_BUFFER_KNOWN_ALLOCATED == vlib_buffer_is_known (fi0));
#endif
b0 = vlib_get_buffer (vm, bi0);

View File

@ -185,7 +185,7 @@ vlib_validate_buffer_helper (vlib_main_t * vm,
vlib_buffer_known_state_t k;
u8 *msg, *result;
k = vlib_buffer_is_known (vm, b->next_buffer);
k = vlib_buffer_is_known (b->next_buffer);
if (k != VLIB_BUFFER_KNOWN_ALLOCATED)
return format (0, "next 0x%x: %U",
b->next_buffer, format_vlib_buffer_known_state, k);
@ -243,7 +243,7 @@ vlib_validate_buffers (vlib_main_t * vm,
goto done;
}
k = vlib_buffer_is_known (vm, bi);
k = vlib_buffer_is_known (bi);
if (k != known_state)
{
msg = format (0, "is %U; expected %U",
@ -317,7 +317,7 @@ vlib_buffer_validate_alloc_free (vlib_main_t * vm,
bi = b[0];
b += 1;
known = vlib_buffer_is_known (vm, bi);
known = vlib_buffer_is_known (bi);
if (known != expected_state)
{
ASSERT (0);
@ -328,8 +328,7 @@ vlib_buffer_validate_alloc_free (vlib_main_t * vm,
}
vlib_buffer_set_known_state
(vm, bi,
is_free ? VLIB_BUFFER_KNOWN_FREE : VLIB_BUFFER_KNOWN_ALLOCATED);
(bi, is_free ? VLIB_BUFFER_KNOWN_FREE : VLIB_BUFFER_KNOWN_ALLOCATED);
}
}
@ -580,7 +579,7 @@ vlib_buffer_fill_free_list_internal (vlib_main_t * vm,
bi[i] = vlib_get_buffer_index (vm, b);
if (CLIB_DEBUG > 0)
vlib_buffer_set_known_state (vm, bi[i], VLIB_BUFFER_KNOWN_FREE);
vlib_buffer_set_known_state (bi[i], VLIB_BUFFER_KNOWN_FREE);
b = vlib_buffer_next_contiguous (b, fl->n_data_bytes);
}

View File

@ -222,9 +222,9 @@ void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
expected_state);
always_inline vlib_buffer_known_state_t
vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
vlib_buffer_is_known (u32 buffer_index)
{
vlib_buffer_main_t *bm = vm->buffer_main;
vlib_buffer_main_t *bm = vlib_global_main.buffer_main;
clib_spinlock_lock (&bm->buffer_known_hash_lockp);
uword *p = hash_get (bm->buffer_known_hash, buffer_index);
@ -233,11 +233,11 @@ vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
}
always_inline void
vlib_buffer_set_known_state (vlib_main_t * vm,
u32 buffer_index,
vlib_buffer_set_known_state (u32 buffer_index,
vlib_buffer_known_state_t state)
{
vlib_buffer_main_t *bm = vm->buffer_main;
vlib_buffer_main_t *bm = vlib_global_main.buffer_main;
clib_spinlock_lock (&bm->buffer_known_hash_lockp);
hash_set (bm->buffer_known_hash, buffer_index, state);
clib_spinlock_unlock (&bm->buffer_known_hash_lockp);

View File

@ -220,8 +220,7 @@ replication_recycle_callback (vlib_main_t * vm, vlib_buffer_free_list_t * fl)
#if (CLIB_DEBUG > 0)
if (vm->buffer_main->callbacks_registered == 0)
vlib_buffer_set_known_state (vm, bi0,
VLIB_BUFFER_KNOWN_ALLOCATED);
vlib_buffer_set_known_state (bi0, VLIB_BUFFER_KNOWN_ALLOCATED);
#endif
/* If buffer is traced, mark frame as traced */