vlib: enqueue_to_next_with_aux implementation

Change-Id: I0e1bb39d765ec3efa7b28ca02fb7beeb23607e51
Type: improvement
Signed-off-by: Mohammed Hawari <mohammed@hawari.fr>
This commit is contained in:
Mohammed Hawari
2022-05-31 18:11:05 +02:00
committed by Damjan Marion
parent b03eec969f
commit cd758e6af5
4 changed files with 232 additions and 36 deletions
+114 -21
View File
@@ -10,26 +10,38 @@
static_always_inline u32
enqueue_one (vlib_main_t *vm, vlib_node_runtime_t *node,
vlib_frame_bitmap_t used_elt_bmp, u16 next_index, u32 *buffers,
u16 *nexts, u32 n_buffers, u32 n_left, u32 *tmp)
u16 *nexts, u32 n_buffers, u32 n_left, u32 *tmp, u8 maybe_aux,
u32 *aux_data, u32 *tmp_aux)
{
vlib_frame_bitmap_t match_bmp;
vlib_frame_t *f;
u32 n_extracted, n_free;
u32 *to;
u32 *to, *to_aux;
f = vlib_get_next_frame_internal (vm, node, next_index, 0);
maybe_aux = maybe_aux && f->aux_offset;
n_free = VLIB_FRAME_SIZE - f->n_vectors;
/* if frame contains enough space for worst case scenario, we can avoid
* use of tmp */
if (n_free >= n_left)
to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
{
to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
if (maybe_aux)
to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
}
else
to = tmp;
{
to = tmp;
if (maybe_aux)
to_aux = tmp_aux;
}
clib_mask_compare_u16 (next_index, nexts, match_bmp, n_buffers);
n_extracted = clib_compress_u32 (to, buffers, match_bmp, n_buffers);
if (maybe_aux)
clib_compress_u32 (to_aux, aux_data, match_bmp, n_buffers);
vlib_frame_bitmap_or (used_elt_bmp, match_bmp);
if (to != tmp)
@@ -42,6 +54,11 @@ enqueue_one (vlib_main_t *vm, vlib_node_runtime_t *node,
/* enough space in the existing frame */
to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
vlib_buffer_copy_indices (to, tmp, n_extracted);
if (maybe_aux)
{
to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
vlib_buffer_copy_indices (to_aux, tmp_aux, n_extracted);
}
vlib_put_next_frame (vm, node, next_index, n_free - n_extracted);
}
else
@@ -49,6 +66,11 @@ enqueue_one (vlib_main_t *vm, vlib_node_runtime_t *node,
/* full frame */
to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
vlib_buffer_copy_indices (to, tmp, n_free);
if (maybe_aux)
{
to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
vlib_buffer_copy_indices (to_aux, tmp_aux, n_free);
}
vlib_put_next_frame (vm, node, next_index, 0);
/* second frame */
@@ -56,6 +78,11 @@ enqueue_one (vlib_main_t *vm, vlib_node_runtime_t *node,
f = vlib_get_next_frame_internal (vm, node, next_index, 1);
to = vlib_frame_vector_args (f);
vlib_buffer_copy_indices (to, tmp + n_free, n_2nd_frame);
if (maybe_aux)
{
to_aux = vlib_frame_aux_args (f);
vlib_buffer_copy_indices (to_aux, tmp_aux + n_free, n_2nd_frame);
}
vlib_put_next_frame (vm, node, next_index,
VLIB_FRAME_SIZE - n_2nd_frame);
}
@@ -63,12 +90,14 @@ enqueue_one (vlib_main_t *vm, vlib_node_runtime_t *node,
return n_left - n_extracted;
}
void __clib_section (".vlib_buffer_enqueue_to_next_fn")
CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_fn)
(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts,
uword count)
static_always_inline void
vlib_buffer_enqueue_to_next_fn_inline (vlib_main_t *vm,
vlib_node_runtime_t *node, u32 *buffers,
u32 *aux_data, u16 *nexts, uword count,
u8 maybe_aux)
{
u32 tmp[VLIB_FRAME_SIZE];
u32 tmp_aux[VLIB_FRAME_SIZE];
u32 n_left;
u16 next_index;
@@ -80,7 +109,8 @@ CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_fn)
next_index = nexts[0];
n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
VLIB_FRAME_SIZE, n_left, tmp);
VLIB_FRAME_SIZE, n_left, tmp, maybe_aux, aux_data,
tmp_aux);
while (n_left)
{
@@ -93,10 +123,13 @@ CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_fn)
next_index =
nexts[off * 64 + count_trailing_zeros (~used_elt_bmp[off])];
n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers,
nexts, VLIB_FRAME_SIZE, n_left, tmp);
nexts, VLIB_FRAME_SIZE, n_left, tmp, maybe_aux,
aux_data, tmp_aux);
}
buffers += VLIB_FRAME_SIZE;
if (maybe_aux)
aux_data += VLIB_FRAME_SIZE;
nexts += VLIB_FRAME_SIZE;
count -= VLIB_FRAME_SIZE;
}
@@ -109,7 +142,7 @@ CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_fn)
u32 off = 0;
n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
count, n_left, tmp);
count, n_left, tmp, maybe_aux, aux_data, tmp_aux);
while (n_left)
{
@@ -121,26 +154,55 @@ CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_fn)
next_index =
nexts[off * 64 + count_trailing_zeros (~used_elt_bmp[off])];
n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers,
nexts, count, n_left, tmp);
n_left =
enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
count, n_left, tmp, maybe_aux, aux_data, tmp_aux);
}
}
}
void __clib_section (".vlib_buffer_enqueue_to_next_fn")
CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_fn)
(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts,
uword count)
{
vlib_buffer_enqueue_to_next_fn_inline (vm, node, buffers, NULL, nexts, count,
0 /* maybe_aux */);
}
CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_next_fn);
void __clib_section (".vlib_buffer_enqueue_to_single_next_fn")
CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_single_next_fn)
(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 next_index,
u32 count)
void __clib_section (".vlib_buffer_enqueue_to_next_with_aux_fn")
CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_with_aux_fn)
(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
u16 *nexts, uword count)
{
u32 *to_next, n_left_to_next, n_enq;
vlib_buffer_enqueue_to_next_fn_inline (vm, node, buffers, aux_data, nexts,
count, 1 /* maybe_aux */);
}
vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_next_with_aux_fn);
static_always_inline void
vlib_buffer_enqueue_to_single_next_fn_inline (vlib_main_t *vm,
vlib_node_runtime_t *node,
u32 *buffers, u32 *aux_data,
u16 next_index, u32 count,
u8 with_aux)
{
u32 *to_next, *to_next_aux, n_left_to_next, n_enq;
if (with_aux)
vlib_get_next_frame_with_aux (vm, node, next_index, to_next, to_next_aux,
n_left_to_next);
else
vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
if (PREDICT_TRUE (n_left_to_next >= count))
{
vlib_buffer_copy_indices (to_next, buffers, count);
if (with_aux)
vlib_buffer_copy_indices (to_next_aux, aux_data, count);
n_left_to_next -= count;
vlib_put_next_frame (vm, node, next_index, n_left_to_next);
return;
@@ -149,22 +211,49 @@ CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_single_next_fn)
n_enq = n_left_to_next;
next:
vlib_buffer_copy_indices (to_next, buffers, n_enq);
if (with_aux)
vlib_buffer_copy_indices (to_next_aux, aux_data, n_enq);
n_left_to_next -= n_enq;
if (PREDICT_FALSE (count > n_enq))
{
count -= n_enq;
buffers += n_enq;
if (with_aux)
aux_data += n_enq;
vlib_put_next_frame (vm, node, next_index, n_left_to_next);
vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
if (with_aux)
vlib_get_next_frame_with_aux (vm, node, next_index, to_next,
to_next_aux, n_left_to_next);
else
vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
n_enq = clib_min (n_left_to_next, count);
goto next;
}
vlib_put_next_frame (vm, node, next_index, n_left_to_next);
}
void __clib_section (".vlib_buffer_enqueue_to_single_next_fn")
CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_single_next_fn)
(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 next_index,
u32 count)
{
vlib_buffer_enqueue_to_single_next_fn_inline (
vm, node, buffers, NULL, next_index, count, 0 /* with_aux */);
}
CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_single_next_fn);
void __clib_section (".vlib_buffer_enqueue_to_single_next_with_aux_fn")
CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_single_next_with_aux_fn)
(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
u16 next_index, u32 count)
{
vlib_buffer_enqueue_to_single_next_fn_inline (
vm, node, buffers, aux_data, next_index, count, 1 /* with_aux */);
}
CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_single_next_with_aux_fn);
static inline vlib_frame_queue_elt_t *
vlib_get_frame_queue_elt (vlib_frame_queue_main_t *fqm, u32 index,
int dont_wait)
@@ -482,8 +571,12 @@ vlib_buffer_funcs_init (vlib_main_t *vm)
vlib_buffer_func_main_t *bfm = &vlib_buffer_func_main;
bfm->buffer_enqueue_to_next_fn =
CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_next_fn);
bfm->buffer_enqueue_to_next_with_aux_fn =
CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_next_with_aux_fn);
bfm->buffer_enqueue_to_single_next_fn =
CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_single_next_fn);
bfm->buffer_enqueue_to_single_next_with_aux_fn =
CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_single_next_with_aux_fn);
bfm->buffer_enqueue_to_thread_fn =
CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_thread_fn);
bfm->buffer_enqueue_to_thread_with_aux_fn =
+11
View File
@@ -56,10 +56,17 @@ typedef void (vlib_buffer_enqueue_to_next_fn_t) (vlib_main_t *vm,
vlib_node_runtime_t *node,
u32 *buffers, u16 *nexts,
uword count);
typedef void (vlib_buffer_enqueue_to_next_with_aux_fn_t) (
vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
u16 *nexts, uword count);
typedef void (vlib_buffer_enqueue_to_single_next_fn_t) (
vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u16 next_index,
u32 count);
typedef void (vlib_buffer_enqueue_to_single_next_with_aux_fn_t) (
vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u32 *aux_data,
u16 next_index, u32 count);
typedef u32 (vlib_buffer_enqueue_to_thread_fn_t) (
vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
u32 *buffer_indices, u16 *thread_indices, u32 n_packets,
@@ -73,7 +80,11 @@ typedef u32 (vlib_buffer_enqueue_to_thread_with_aux_fn_t) (
typedef struct
{
vlib_buffer_enqueue_to_next_fn_t *buffer_enqueue_to_next_fn;
vlib_buffer_enqueue_to_next_with_aux_fn_t
*buffer_enqueue_to_next_with_aux_fn;
vlib_buffer_enqueue_to_single_next_fn_t *buffer_enqueue_to_single_next_fn;
vlib_buffer_enqueue_to_single_next_with_aux_fn_t
*buffer_enqueue_to_single_next_with_aux_fn;
vlib_buffer_enqueue_to_thread_fn_t *buffer_enqueue_to_thread_fn;
vlib_buffer_enqueue_to_thread_with_aux_fn_t
*buffer_enqueue_to_thread_with_aux_fn;
+21
View File
@@ -355,6 +355,16 @@ vlib_buffer_enqueue_to_next (vlib_main_t * vm, vlib_node_runtime_t * node,
(fn) (vm, node, buffers, nexts, count);
}
static_always_inline void
vlib_buffer_enqueue_to_next_with_aux (vlib_main_t *vm,
vlib_node_runtime_t *node, u32 *buffers,
u32 *aux_data, u16 *nexts, uword count)
{
vlib_buffer_enqueue_to_next_with_aux_fn_t *fn;
fn = vlib_buffer_func_main.buffer_enqueue_to_next_with_aux_fn;
(fn) (vm, node, buffers, aux_data, nexts, count);
}
static_always_inline void
vlib_buffer_enqueue_to_next_vec (vlib_main_t *vm, vlib_node_runtime_t *node,
u32 **buffers, u16 **nexts, uword count)
@@ -379,6 +389,17 @@ vlib_buffer_enqueue_to_single_next (vlib_main_t * vm,
(fn) (vm, node, buffers, next_index, count);
}
static_always_inline void
vlib_buffer_enqueue_to_single_next_with_aux (vlib_main_t *vm,
vlib_node_runtime_t *node,
u32 *buffers, u32 *aux_data,
u16 next_index, u32 count)
{
vlib_buffer_enqueue_to_single_next_with_aux_fn_t *fn;
fn = vlib_buffer_func_main.buffer_enqueue_to_single_next_with_aux_fn;
(fn) (vm, node, buffers, aux_data, next_index, count);
}
static_always_inline u32
vlib_buffer_enqueue_to_thread (vlib_main_t *vm, vlib_node_runtime_t *node,
u32 frame_queue_index, u32 *buffer_indices,
+86 -15
View File
@@ -372,16 +372,34 @@ vlib_frame_t *vlib_get_next_frame_internal (vlib_main_t * vm,
u32 next_index,
u32 alloc_new_frame);
#define vlib_get_next_frame_macro(vm,node,next_index,vectors,n_vectors_left,alloc_new_frame) \
do { \
vlib_frame_t * _f \
= vlib_get_next_frame_internal ((vm), (node), (next_index), \
(alloc_new_frame)); \
u32 _n = _f->n_vectors; \
(vectors) = vlib_frame_vector_args (_f) + _n * sizeof ((vectors)[0]); \
(n_vectors_left) = VLIB_FRAME_SIZE - _n; \
} while (0)
#define vlib_get_next_frame_macro(vm, node, next_index, vectors, \
n_vectors_left, alloc_new_frame) \
do \
{ \
vlib_frame_t *_f = vlib_get_next_frame_internal ( \
(vm), (node), (next_index), (alloc_new_frame)); \
u32 _n = _f->n_vectors; \
(vectors) = vlib_frame_vector_args (_f) + _n * sizeof ((vectors)[0]); \
(n_vectors_left) = VLIB_FRAME_SIZE - _n; \
} \
while (0)
#define vlib_get_next_frame_macro_with_aux(vm, node, next_index, vectors, \
n_vectors_left, alloc_new_frame, \
aux_data, maybe_no_aux) \
do \
{ \
vlib_frame_t *_f = vlib_get_next_frame_internal ( \
(vm), (node), (next_index), (alloc_new_frame)); \
u32 _n = _f->n_vectors; \
(vectors) = vlib_frame_vector_args (_f) + _n * sizeof ((vectors)[0]); \
if ((maybe_no_aux) && (_f)->aux_offset == 0) \
(aux_data) = NULL; \
else \
(aux_data) = vlib_frame_aux_args (_f) + _n * sizeof ((aux_data)[0]); \
(n_vectors_left) = VLIB_FRAME_SIZE - _n; \
} \
while (0)
/** \brief Get pointer to next frame vector data by
(@c vlib_node_runtime_t, @c next_index).
@@ -395,16 +413,69 @@ do { \
@return @c vectors -- pointer to next available vector slot
@return @c n_vectors_left -- number of vector slots available
*/
#define vlib_get_next_frame(vm,node,next_index,vectors,n_vectors_left) \
vlib_get_next_frame_macro (vm, node, next_index, \
vectors, n_vectors_left, \
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left) \
vlib_get_next_frame_macro (vm, node, next_index, vectors, n_vectors_left, \
/* alloc new frame */ 0)
#define vlib_get_new_next_frame(vm,node,next_index,vectors,n_vectors_left) \
vlib_get_next_frame_macro (vm, node, next_index, \
vectors, n_vectors_left, \
#define vlib_get_new_next_frame(vm, node, next_index, vectors, \
n_vectors_left) \
vlib_get_next_frame_macro (vm, node, next_index, vectors, n_vectors_left, \
/* alloc new frame */ 1)
/** \brief Get pointer to next frame vector data and next frame aux data by
(@c vlib_node_runtime_t, @c next_index).
Standard single/dual loop boilerplate element.
@attention This is a MACRO, with SIDE EFFECTS.
@attention This MACRO is unsafe in case the next node does not support
aux_data
@param vm vlib_main_t pointer, varies by thread
@param node current node vlib_node_runtime_t pointer
@param next_index requested graph arc index
@return @c vectors -- pointer to next available vector slot
@return @c aux_data -- pointer to next available aux data slot
@return @c n_vectors_left -- number of vector slots available
*/
#define vlib_get_next_frame_with_aux(vm, node, next_index, vectors, aux_data, \
n_vectors_left) \
vlib_get_next_frame_macro_with_aux ( \
vm, node, next_index, vectors, n_vectors_left, /* alloc new frame */ 0, \
aux_data, /* maybe_no_aux */ 0)
#define vlib_get_new_next_frame_with_aux(vm, node, next_index, vectors, \
aux_data, n_vectors_left) \
vlib_get_next_frame_macro_with_aux ( \
vm, node, next_index, vectors, n_vectors_left, /* alloc new frame */ 1, \
aux_data, /* maybe_no_aux */ 0)
/** \brief Get pointer to next frame vector data and next frame aux data by
(@c vlib_node_runtime_t, @c next_index).
Standard single/dual loop boilerplate element.
@attention This is a MACRO, with SIDE EFFECTS.
@attention This MACRO is safe in case the next node does not support aux_data.
In that case aux_data is set to NULL.
@param vm vlib_main_t pointer, varies by thread
@param node current node vlib_node_runtime_t pointer
@param next_index requested graph arc index
@return @c vectors -- pointer to next available vector slot
@return @c aux_data -- pointer to next available aux data slot
@return @c n_vectors_left -- number of vector slots available
*/
#define vlib_get_next_frame_with_aux_safe(vm, node, next_index, vectors, \
aux_data, n_vectors_left) \
vlib_get_next_frame_macro_with_aux ( \
vm, node, next_index, vectors, n_vectors_left, /* alloc new frame */ 0, \
aux_data, /* maybe_no_aux */ 1)
#define vlib_get_new_next_frame_with_aux_safe(vm, node, next_index, vectors, \
aux_data, n_vectors_left) \
vlib_get_next_frame_macro_with_aux ( \
vm, node, next_index, vectors, n_vectors_left, /* alloc new frame */ 1, \
aux_data, /* maybe_no_aux */ 1)
/** \brief Release pointer to next frame vector data.
Standard single/dual loop boilerplate element.
@param vm vlib_main_t pointer, varies by thread