vppinfra: bitops cleanup

Type: refactor
Change-Id: I7fa113e924640f9d798c1eb6ae64b9c0a9e2104c
Signed-off-by: Damjan Marion <damarion@cisco.com>
This commit is contained in:
Damjan Marion
2022-01-13 00:28:14 +01:00
committed by Florin Coras
parent 5233d4619c
commit 7b90f669d8
11 changed files with 57 additions and 145 deletions
+1
View File
@@ -19,6 +19,7 @@ ForEachMacros:
- 'vec_foreach_index_backwards'
- 'vlib_foreach_rx_tx'
- 'foreach_vlib_main'
- 'foreach_set_bit_index'
- 'RTE_ETH_FOREACH_DEV'
StatementMacros:
+1 -1
View File
@@ -54,7 +54,7 @@ format_vnet_hw_if_caps (u8 *s, va_list *va)
else
s = format (s, "unknown-%u", bit);
caps = reset_lowest_set_bit (caps);
caps = clear_lowest_set_bit (caps);
if (caps)
vec_add1 (s, ' ');
}
+1 -2
View File
@@ -40,8 +40,7 @@
#ifndef included_srp_packet_h
#define included_srp_packet_h
#include <vppinfra/byte_order.h>
#include <vppinfra/bitops.h>
#include <vppinfra/clib.h>
#include <vnet/ethernet/packet.h>
/* SRP version 2. */
-1
View File
@@ -45,7 +45,6 @@
#include <vppinfra/vec.h>
#include <vppinfra/random.h>
#include <vppinfra/error.h>
#include <vppinfra/bitops.h> /* for count_set_bits */
typedef uword clib_bitmap_t;
+40 -43
View File
@@ -38,18 +38,38 @@
#ifndef included_clib_bitops_h
#define included_clib_bitops_h
#include <vppinfra/clib.h>
static_always_inline uword
clear_lowest_set_bit (uword x)
{
#ifdef __BMI__
return uword_bits > 32 ? _blsr_u64 (x) : _blsr_u32 (x);
#else
return x & (x - 1);
#endif
}
static_always_inline uword
get_lowest_set_bit (uword x)
{
#ifdef __BMI__
return uword_bits > 32 ? _blsi_u64 (x) : _blsi_u32 (x);
#else
return x & -x;
#endif
}
static_always_inline u8
get_lowest_set_bit_index (uword x)
{
return uword_bits > 32 ? __builtin_ctzll (x) : __builtin_ctz (x);
}
/* Population count from Hacker's Delight. */
always_inline uword
count_set_bits (uword x)
{
#ifdef __POPCNT__
#if uword_bits == 64
return __builtin_popcountll (x);
#else
return __builtin_popcount (x);
#endif
return uword_bits > 32 ? __builtin_popcountll (x) : __builtin_popcount (x);
#else
#if uword_bits == 64
const uword c1 = 0x5555555555555555;
@@ -81,6 +101,15 @@ count_set_bits (uword x)
#endif
}
#if uword_bits == 64
#define count_leading_zeros(x) __builtin_clzll (x)
#else
#define count_leading_zeros(x) __builtin_clzll (x)
#endif
#define count_trailing_zeros(x) get_lowest_set_bit_index (x)
#define log2_first_set(x) get_lowest_set_bit_index (x)
/* Based on "Hacker's Delight" code from GLS. */
typedef struct
{
@@ -163,45 +192,13 @@ next_with_same_number_of_set_bits (uword x)
return ripple | ones;
}
#define foreach_set_bit(var,mask,body) \
do { \
uword _foreach_set_bit_m_##var = (mask); \
uword _foreach_set_bit_f_##var; \
while (_foreach_set_bit_m_##var != 0) \
{ \
_foreach_set_bit_f_##var = first_set (_foreach_set_bit_m_##var); \
_foreach_set_bit_m_##var ^= _foreach_set_bit_f_##var; \
(var) = min_log2 (_foreach_set_bit_f_##var); \
do { body; } while (0); \
} \
} while (0)
#define foreach_set_bit_index(i, v) \
for (uword _tmp = (v) + 0 * (uword) (i = get_lowest_set_bit_index (v)); \
_tmp; \
i = get_lowest_set_bit_index (_tmp = clear_lowest_set_bit (_tmp)))
static_always_inline u64
reset_lowest_set_bit (u64 x)
{
#ifdef __BMI__
return _blsr_u64 (x);
#else
return x & (x - 1);
#endif
}
static_always_inline u64
get_lowest_set_bit (u64 x)
{
#ifdef __BMI__
return _blsi_u64 (x);
#else
return x & -x;
#endif
}
static_always_inline u64
get_lowest_set_bit_index (u64 x)
{
return __builtin_ctzll (x);
}
#warning "already included"
#endif /* included_clib_bitops_h */
/*
+1 -70
View File
@@ -164,25 +164,7 @@
decl __attribute ((destructor)); \
decl
/* Use __builtin_clz if available. */
#if uword_bits == 64
#define count_leading_zeros(x) __builtin_clzll (x)
#define count_trailing_zeros(x) __builtin_ctzll (x)
#else
#define count_leading_zeros(x) __builtin_clzl (x)
#define count_trailing_zeros(x) __builtin_ctzl (x)
#endif
#if defined (count_leading_zeros)
always_inline uword
clear_lowest_set_bit (uword x)
{
#ifdef __BMI2__
return _blsr_u64 (x);
#else
return x ^ (1ULL << count_trailing_zeros (x));
#endif
}
#include <vppinfra/bitops.h>
always_inline uword
min_log2 (uword x)
@@ -191,45 +173,6 @@ min_log2 (uword x)
n = count_leading_zeros (x);
return BITS (uword) - n - 1;
}
#else
always_inline uword
min_log2 (uword x)
{
uword a = x, b = BITS (uword) / 2, c = 0, r = 0;
/* Reduce x to 4 bit result. */
#define _ \
{ \
c = a >> b; \
if (c) a = c; \
if (c) r += b; \
b /= 2; \
}
if (BITS (uword) > 32)
_;
_;
_;
_;
#undef _
/* Do table lookup on 4 bit partial. */
if (BITS (uword) > 32)
{
const u64 table = 0x3333333322221104LL;
uword t = (table >> (4 * a)) & 0xf;
r = t < 4 ? r + t : ~0;
}
else
{
const u32 table = 0x22221104;
uword t = (a & 8) ? 3 : ((table >> (4 * a)) & 0xf);
r = t < 4 ? r + t : ~0;
}
return r;
}
#endif
always_inline uword
max_log2 (uword x)
@@ -308,18 +251,6 @@ first_set (uword x)
return x & -x;
}
always_inline uword
log2_first_set (uword x)
{
uword result;
#ifdef count_trailing_zeros
result = count_trailing_zeros (x);
#else
result = min_log2 (first_set (x));
#endif
return result;
}
always_inline f64
flt_round_down (f64 x)
{
-1
View File
@@ -15,7 +15,6 @@
*/
#include <vppinfra/clib.h>
#include <vppinfra/bitops.h> /* for count_set_bits */
#include <vppinfra/vec.h>
#include <vppinfra/interrupt.h>
#include <vppinfra/format.h>
-1
View File
@@ -17,7 +17,6 @@
#define included_clib_interrupt_h
#include <vppinfra/clib.h>
#include <vppinfra/bitops.h> /* for count_set_bits */
#include <vppinfra/vec.h>
typedef struct
+1 -1
View File
@@ -38,8 +38,8 @@
#ifndef included_sparse_vec_h
#define included_sparse_vec_h
#include <vppinfra/clib.h>
#include <vppinfra/vec.h>
#include <vppinfra/bitops.h>
/* Sparsely indexed vectors. Basic idea taken from Hacker's delight.
Eliot added ranges. */
-1
View File
@@ -91,7 +91,6 @@
# include <netinet/if_ether.h>
#endif /* __KERNEL__ */
#include <vppinfra/bitops.h> /* foreach_set_bit */
#include <vppinfra/format.h>
#include <vppinfra/error.h>
+12 -24
View File
@@ -27,12 +27,9 @@ clib_compress_u64_x64 (u64 *dst, u64 *src, u64 mask)
mask >>= 4;
}
#else
while (mask)
{
u16 bit = count_trailing_zeros (mask);
mask = clear_lowest_set_bit (mask);
dst++[0] = src[bit];
}
u32 i;
foreach_set_bit_index (i, mask)
dst++[0] = src[i];
#endif
return dst;
}
@@ -93,12 +90,9 @@ clib_compress_u32_x64 (u32 *dst, u32 *src, u64 mask)
mask >>= 8;
}
#else
while (mask)
{
u16 bit = count_trailing_zeros (mask);
mask = clear_lowest_set_bit (mask);
dst++[0] = src[bit];
}
u32 i;
foreach_set_bit_index (i, mask)
dst++[0] = src[i];
#endif
return dst;
}
@@ -150,12 +144,9 @@ clib_compress_u16_x64 (u16 *dst, u16 *src, u64 mask)
mask >>= 32;
}
#else
while (mask)
{
u16 bit = count_trailing_zeros (mask);
mask = clear_lowest_set_bit (mask);
dst++[0] = src[bit];
}
u32 i;
foreach_set_bit_index (i, mask)
dst++[0] = src[i];
#endif
return dst;
}
@@ -203,12 +194,9 @@ clib_compress_u8_x64 (u8 *dst, u8 *src, u64 mask)
u8x64_compress_store (sv[0], mask, dst);
dst += _popcnt64 (mask);
#else
while (mask)
{
u16 bit = count_trailing_zeros (mask);
mask = clear_lowest_set_bit (mask);
dst++[0] = src[bit];
}
u32 i;
foreach_set_bit_index (i, mask)
dst++[0] = src[i];
#endif
return dst;
}