Compare commits
12 Commits
v24.10-rc1
...
v24.06-rc2
Author | SHA1 | Date | |
---|---|---|---|
|
55457075d9 | ||
|
4c872e9f75 | ||
|
6e0f70a035 | ||
|
d8d3ad99af | ||
|
2f93a43118 | ||
|
29ad53b932 | ||
|
2f06443855 | ||
|
13780d4383 | ||
|
1b955f9ee6 | ||
|
2ab34c8cb0 | ||
|
1121f0067c | ||
|
b3304b2b76 |
@ -27,7 +27,6 @@ ForEachMacros:
|
||||
- 'foreach_vlib_frame_bitmap_set_bit_index'
|
||||
- 'FOREACH_ARRAY_ELT'
|
||||
- 'RTE_ETH_FOREACH_DEV'
|
||||
- 'foreach_clib_stack_frame'
|
||||
- 'foreach_vnet_dev_rx_queue_runtime'
|
||||
- 'foreach_vnet_dev_counter'
|
||||
- 'foreach_vnet_dev_port_rx_queue'
|
||||
|
11
.gitignore
vendored
11
.gitignore
vendored
@ -24,13 +24,6 @@
|
||||
/build/external/vpp-*
|
||||
/build/external/dpdk_mlx_default.sh
|
||||
/build/external/downloads/
|
||||
/build/external/deb/_build/
|
||||
/build/external/deb/debian/changelog
|
||||
/build/external/deb/debian/debhelper-build-stamp
|
||||
/build/external/deb/debian/tmp/
|
||||
/build/external/deb/debian/files/
|
||||
/build/external/deb/debian/.debhelper/
|
||||
/build/external/deb/debian/vpp-ext-deps*
|
||||
/path_setup
|
||||
/tools/
|
||||
# start autotools ignore
|
||||
@ -130,8 +123,6 @@ compile_commands.json
|
||||
/extras/hs-test/hs-test
|
||||
/extras/hs-test/http_server
|
||||
/extras/hs-test/.build.ok
|
||||
/extras/hs-test/.build.cov.ok
|
||||
/extras/hs-test/.last_hst_ppid
|
||||
/extras/hs-test/summary/
|
||||
|
||||
# ./configure
|
||||
@ -146,5 +137,3 @@ compile_commands.json
|
||||
/cmake_install.cmake
|
||||
/startup.conf
|
||||
/startup.vpp
|
||||
# log file archives
|
||||
/archives
|
||||
|
@ -2,4 +2,4 @@
|
||||
host=gerrit.fd.io
|
||||
port=29418
|
||||
project=vpp
|
||||
defaultbranch=stable/2410
|
||||
defaultbranch=stable/2406
|
||||
|
16
MAINTAINERS
16
MAINTAINERS
@ -460,6 +460,11 @@ M: Dave Barach <vpp@barachs.net>
|
||||
M: Florin Coras <fcoras@cisco.com>
|
||||
F: src/plugins/http_static/
|
||||
|
||||
Plugin - builtinurl
|
||||
I: builtinurl
|
||||
M: Dave Barach <vpp@barachs.net>
|
||||
F: src/plugins/builtinurl/
|
||||
|
||||
Plugin - GTPU
|
||||
I: gtpu
|
||||
M: Hongjun Ni <hongjun.ni@intel.com>
|
||||
@ -500,10 +505,10 @@ I: memif
|
||||
M: Damjan Marion <damarion@cisco.com>
|
||||
F: src/plugins/memif/
|
||||
|
||||
Plugin - Marvell Armada device driver
|
||||
I: armada
|
||||
Plugin - Marvell MUSDK device driver
|
||||
I: marvell
|
||||
M: Damjan Marion <damarion@cisco.com>
|
||||
F: src/plugins/dev_armada/
|
||||
F: src/plugins/marvell/
|
||||
|
||||
Plugin - performance counter
|
||||
I: perfmon
|
||||
@ -847,11 +852,6 @@ I: tracenode
|
||||
M: Maxime Peim <mpeim@cisco.com>
|
||||
F: src/plugins/tracenode
|
||||
|
||||
Plugin - Packet Vector Tunnel Interface
|
||||
I: pvti
|
||||
M: Andrew Yourtchenko <ayourtch@gmail.com>
|
||||
F: src/plugins/pvti
|
||||
|
||||
cJSON
|
||||
I: cjson
|
||||
M: Ole Troan <ot@cisco.com>
|
||||
|
75
Makefile
75
Makefile
@ -72,7 +72,7 @@ DEB_DEPENDS += debhelper dkms git libtool libapr1-dev dh-python
|
||||
DEB_DEPENDS += libconfuse-dev git-review exuberant-ctags cscope pkg-config
|
||||
DEB_DEPENDS += gcovr lcov chrpath autoconf libnuma-dev
|
||||
DEB_DEPENDS += python3-all python3-setuptools check
|
||||
DEB_DEPENDS += libffi-dev python3-ply libunwind-dev
|
||||
DEB_DEPENDS += libffi-dev python3-ply
|
||||
DEB_DEPENDS += cmake ninja-build python3-jsonschema python3-yaml
|
||||
DEB_DEPENDS += python3-venv # ensurepip
|
||||
DEB_DEPENDS += python3-dev python3-pip
|
||||
@ -89,21 +89,11 @@ DEB_DEPENDS += tshark
|
||||
DEB_DEPENDS += jq # for extracting test summary from .json report (hs-test)
|
||||
|
||||
LIBFFI=libffi6 # works on all but 20.04 and debian-testing
|
||||
ifeq ($(OS_VERSION_ID),24.04)
|
||||
DEB_DEPENDS += libssl-dev
|
||||
DEB_DEPENDS += llvm clang clang-format-15
|
||||
# overwrite clang-format version to run `make checkstyle` successfully
|
||||
# TODO: remove once ubuntu 20.04 is deprecated and extras/scripts/checkstyle.sh is upgraded to 15
|
||||
export CLANG_FORMAT_VER=15
|
||||
LIBFFI=libffi8
|
||||
DEB_DEPENDS += enchant-2 # for docs
|
||||
else ifeq ($(OS_VERSION_ID),22.04)
|
||||
|
||||
ifeq ($(OS_VERSION_ID),22.04)
|
||||
DEB_DEPENDS += python3-virtualenv
|
||||
DEB_DEPENDS += libssl-dev
|
||||
DEB_DEPENDS += clang clang-format-15
|
||||
# overwrite clang-format version to run `make checkstyle` successfully
|
||||
# TODO: remove once ubuntu 20.04 is deprecated and extras/scripts/checkstyle.sh is upgraded to 15
|
||||
export CLANG_FORMAT_VER=15
|
||||
DEB_DEPENDS += clang clang-format-11
|
||||
LIBFFI=libffi7
|
||||
DEB_DEPENDS += enchant-2 # for docs
|
||||
else ifeq ($(OS_VERSION_ID),20.04)
|
||||
@ -112,6 +102,9 @@ else ifeq ($(OS_VERSION_ID),20.04)
|
||||
DEB_DEPENDS += clang clang-format-11
|
||||
LIBFFI=libffi7
|
||||
DEB_DEPENDS += enchant-2 # for docs
|
||||
else ifeq ($(OS_VERSION_ID),20.10)
|
||||
DEB_DEPENDS += clang clang-format-11
|
||||
LIBFFI=libffi8ubuntu1
|
||||
else ifeq ($(OS_ID)-$(OS_VERSION_ID),debian-10)
|
||||
DEB_DEPENDS += virtualenv
|
||||
else ifeq ($(OS_ID)-$(OS_VERSION_ID),debian-11)
|
||||
@ -120,10 +113,9 @@ else ifeq ($(OS_ID)-$(OS_VERSION_ID),debian-11)
|
||||
LIBFFI=libffi7
|
||||
else ifeq ($(OS_ID)-$(OS_VERSION_ID),debian-12)
|
||||
DEB_DEPENDS += virtualenv
|
||||
DEB_DEPENDS += clang-14 clang-format-15
|
||||
DEB_DEPENDS += clang-14 clang-format-14
|
||||
# for extras/scripts/checkstyle.sh
|
||||
# TODO: remove once ubuntu 20.04 is deprecated and extras/scripts/checkstyle.sh is upgraded to -15
|
||||
export CLANG_FORMAT_VER=15
|
||||
export CLANG_FORMAT_VER=14
|
||||
LIBFFI=libffi8
|
||||
else
|
||||
DEB_DEPENDS += clang-11 clang-format-11
|
||||
@ -253,7 +245,6 @@ help:
|
||||
@echo " build - build debug binaries"
|
||||
@echo " build-release - build release binaries"
|
||||
@echo " build-coverity - build coverity artifacts"
|
||||
@echo " build-vpp-gcov - build gcov vpp only"
|
||||
@echo " rebuild - wipe and build debug binaries"
|
||||
@echo " rebuild-release - wipe and build release binaries"
|
||||
@echo " run - run debug binary"
|
||||
@ -261,8 +252,6 @@ help:
|
||||
@echo " debug - run debug binary with debugger"
|
||||
@echo " debug-release - run release binary with debugger"
|
||||
@echo " test - build and run tests"
|
||||
@echo " test-cov-hs - build and run host stack tests with coverage"
|
||||
@echo " test-cov-both - build and run python and host stack tests, merge coverage data"
|
||||
@echo " test-help - show help on test framework"
|
||||
@echo " run-vat - run vpp-api-test tool"
|
||||
@echo " pkg-deb - build DEB packages"
|
||||
@ -279,10 +268,8 @@ help:
|
||||
@echo " checkstyle-commit - check commit message format"
|
||||
@echo " checkstyle-python - check python coding style using 'black' formatter"
|
||||
@echo " checkstyle-api - check api for incompatible changes"
|
||||
@echo " checkstyle-go - check style of .go source files"
|
||||
@echo " fixstyle - fix coding style"
|
||||
@echo " fixstyle-python - fix python coding style using 'black' formatter"
|
||||
@echo " fixstyle-go - format .go source files"
|
||||
@echo " doxygen - DEPRECATED - use 'make docs'"
|
||||
@echo " bootstrap-doxygen - DEPRECATED"
|
||||
@echo " wipe-doxygen - DEPRECATED"
|
||||
@ -291,7 +278,6 @@ help:
|
||||
@echo " json-api-files - (re)-generate json api files"
|
||||
@echo " json-api-files-debug - (re)-generate json api files for debug target"
|
||||
@echo " go-api-files - (re)-generate golang api files"
|
||||
@echo " cleanup-hst - stops and removes all docker contaiers and namespaces"
|
||||
@echo " docs - Build the Sphinx documentation"
|
||||
@echo " docs-venv - Build the virtual environment for the Sphinx docs"
|
||||
@echo " docs-clean - Remove the generated files from the Sphinx docs"
|
||||
@ -454,10 +440,6 @@ rebuild: wipe build
|
||||
build-release: $(BR)/.deps.ok
|
||||
$(call make,$(PLATFORM),$(addsuffix -install,$(TARGETS)))
|
||||
|
||||
.PHONY: build-vpp-gcov
|
||||
build-vpp-gcov:
|
||||
$(call test,vpp_gcov)
|
||||
|
||||
.PHONY: wipe-release
|
||||
wipe-release: test-wipe $(BR)/.deps.ok
|
||||
$(call make,$(PLATFORM),$(addsuffix -wipe,$(TARGETS)))
|
||||
@ -475,7 +457,6 @@ define test
|
||||
VPP_BIN=$(BR)/install-$(1)-native/vpp/bin/vpp \
|
||||
VPP_INSTALL_PATH=$(BR)/install-$(1)-native/ \
|
||||
EXTENDED_TESTS=$(EXTENDED_TESTS) \
|
||||
DECODE_PCAPS=$(DECODE_PCAPS) \
|
||||
TEST_GCOV=$(TEST_GCOV) \
|
||||
PYTHON=$(PYTHON) \
|
||||
OS_ID=$(OS_ID) \
|
||||
@ -505,20 +486,6 @@ test-cov:
|
||||
$(eval TEST_GCOV=1)
|
||||
$(call test,vpp_gcov,cov)
|
||||
|
||||
.PHONY: test-cov-hs
|
||||
test-cov-hs:
|
||||
@$(MAKE) -C extras/hs-test build-cov
|
||||
@$(MAKE) -C extras/hs-test test-cov
|
||||
|
||||
.PHONY: test-cov-both
|
||||
test-cov-both:
|
||||
@echo "Running Python, Golang tests and merging coverage reports."
|
||||
find $(BR) -name '*.gcda' -delete
|
||||
@$(MAKE) test-cov
|
||||
find $(BR) -name '*.gcda' -delete
|
||||
@$(MAKE) test-cov-hs
|
||||
@$(MAKE) cov-merge
|
||||
|
||||
.PHONY: test-cov-build
|
||||
test-cov-build:
|
||||
$(eval CC=gcc)
|
||||
@ -535,14 +502,6 @@ test-cov-post:
|
||||
$(eval CC=gcc)
|
||||
$(call test,vpp_gcov,cov-post)
|
||||
|
||||
.PHONY: cov-merge
|
||||
cov-merge:
|
||||
@lcov --add-tracefile $(BR)/test-coverage-merged/coverage-filtered.info \
|
||||
-a $(BR)/test-coverage-merged/coverage-filtered1.info -o $(BR)/test-coverage-merged/coverage-merged.info
|
||||
@genhtml $(BR)/test-coverage-merged/coverage-merged.info \
|
||||
--output-directory $(BR)/test-coverage-merged/html
|
||||
@echo "Code coverage report is in $(BR)/test-coverage-merged/html/index.html"
|
||||
|
||||
.PHONY: test-all
|
||||
test-all:
|
||||
$(eval EXTENDED_TESTS=1)
|
||||
@ -592,7 +551,7 @@ test-shell-cov:
|
||||
|
||||
.PHONY: test-dep
|
||||
test-dep:
|
||||
@$(MAKE) -C test test-dep
|
||||
@make -C test test-dep
|
||||
|
||||
.PHONY: test-doc
|
||||
test-doc:
|
||||
@ -752,10 +711,6 @@ json-api-files-debug:
|
||||
go-api-files: json-api-files
|
||||
$(WS_ROOT)/src/tools/vppapigen/generate_go.py $(ARGS)
|
||||
|
||||
.PHONY: cleanup-hst
|
||||
cleanup-hst:
|
||||
$(MAKE) -C extras/hs-test cleanup-hst
|
||||
|
||||
.PHONY: ctags
|
||||
ctags: ctags.files
|
||||
@ctags --totals --tag-relative=yes -L $<
|
||||
@ -793,16 +748,8 @@ checkstyle-test:
|
||||
checkstyle-python:
|
||||
@$(MAKE) -C test checkstyle-python-all
|
||||
|
||||
.PHONY: checkstyle-go
|
||||
checkstyle-go:
|
||||
@$(MAKE) -C extras/hs-test checkstyle-go
|
||||
|
||||
.PHONY: fixstyle-go
|
||||
fixstyle-go:
|
||||
@$(MAKE) -C extras/hs-test fixstyle-go
|
||||
|
||||
.PHONY: checkstyle-all
|
||||
checkstyle-all: checkstyle-commit checkstyle checkstyle-python docs-spell checkstyle-go
|
||||
checkstyle-all: checkstyle-commit checkstyle checkstyle-python docs-spell
|
||||
|
||||
.PHONY: fixstyle
|
||||
fixstyle:
|
||||
|
@ -997,7 +997,7 @@ $(PLATFORM_IMAGE_DIR)/ro.img ro-image: $(patsubst %,%-find-source,$(ROOT_PACKAGE
|
||||
| xargs sign $($(PLATFORM)_public_key) \
|
||||
$($(PLATFORM)_private_key_passphrase) ; \
|
||||
fi ; \
|
||||
: $(MAKE) read-only file system ; \
|
||||
: make read-only file system ; \
|
||||
mksquashfs \
|
||||
$${tmp_dir} $${ro_image} \
|
||||
-no-exports -no-progress -no-recovery ; \
|
||||
|
12
build/external/Makefile
vendored
12
build/external/Makefile
vendored
@ -21,7 +21,7 @@ INSTALL_DIR ?= $(CURDIR)/_install
|
||||
PKG_VERSION ?= $(shell git describe --abbrev=0 --match 'v[0-9]*' | cut -d- -f1 | cut -dv -f2 | cut -d. -f1,2)
|
||||
PKG_SUFFIX ?= $(shell git log --oneline v$(PKG_VERSION)-rc0.. . | wc -l)
|
||||
SOURCE_DATE_EPOCH ?= $(shell git log -1 --pretty=%ct .)
|
||||
ifeq ($(shell uname), FreeBSD)
|
||||
ifeq ($shell(uname), FreeBSD)
|
||||
JOBS := $(shell nproc)
|
||||
else
|
||||
JOBS := $(if $(shell [ -f /proc/cpuinfo ] && head /proc/cpuinfo),\
|
||||
@ -30,12 +30,6 @@ endif # FreeBSD
|
||||
|
||||
B := $(BUILD_DIR)
|
||||
I := $(INSTALL_DIR)
|
||||
ifeq ($(WORKSPACE),)
|
||||
L := $(B)
|
||||
else
|
||||
L := $(WORKSPACE)/archives/vpp-ext-deps
|
||||
$(shell rm -rf $(L) && mkdir -p $(L))
|
||||
endif
|
||||
|
||||
ifneq ($(shell which cmake3),)
|
||||
CMAKE?=cmake3
|
||||
@ -100,7 +94,7 @@ build-deb: $(DEV_DEB)
|
||||
|
||||
install-deb:
|
||||
ifneq ($(INSTALLED_VER),$(DEB_VER)-$(PKG_SUFFIX))
|
||||
@$(MAKE) $(DEV_DEB)
|
||||
@make $(DEV_DEB)
|
||||
@sudo dpkg -i $(DEV_DEB)
|
||||
else
|
||||
@echo "=========================================================="
|
||||
@ -181,7 +175,7 @@ ifeq ($(INSTALLED_VER)$(INSTALLED_RPM_VER),)
|
||||
$(MAKE) config
|
||||
else
|
||||
ifneq ($(INSTALLED_VER),)
|
||||
$(MAKE) check-deb
|
||||
make check-deb
|
||||
endif
|
||||
ifneq ($(INSTALLED_RPM_VER),)
|
||||
$(MAKE) check-rpm
|
||||
|
1
build/external/mlx_rdma_dpdk_matrix.txt
vendored
1
build/external/mlx_rdma_dpdk_matrix.txt
vendored
@ -1,3 +1,2 @@
|
||||
rdma=49.0 dpdk=23.11
|
||||
rdma=51.0 dpdk=24.03
|
||||
rdma=52.0 dpdk=24.07
|
||||
|
6
build/external/packages.mk
vendored
6
build/external/packages.mk
vendored
@ -21,9 +21,9 @@ $1_src_dir ?= $(B)/src-$1
|
||||
$1_patch_dir ?= $(CURDIR)/patches/$1_$($1_version)
|
||||
$1_build_dir ?= $(B)/build-$1
|
||||
$1_install_dir ?= $(I)
|
||||
$1_config_log ?= $(L)/$1.config.log
|
||||
$1_build_log ?= $(L)/$1.build.log
|
||||
$1_install_log ?= $(L)/$1.install.log
|
||||
$1_config_log ?= $(B)/$1.config.log
|
||||
$1_build_log ?= $(B)/$1.build.log
|
||||
$1_install_log ?= $(B)/$1.install.log
|
||||
|
||||
##############################################################################
|
||||
# Download
|
||||
|
5
build/external/packages/dpdk.mk
vendored
5
build/external/packages/dpdk.mk
vendored
@ -19,10 +19,9 @@ DPDK_FAILSAFE_PMD ?= n
|
||||
DPDK_MACHINE ?= default
|
||||
DPDK_MLX_IBV_LINK ?= static
|
||||
|
||||
dpdk_version ?= 24.07
|
||||
dpdk_version ?= 24.03
|
||||
dpdk_base_url ?= http://fast.dpdk.org/rel
|
||||
dpdk_tarball := dpdk-$(dpdk_version).tar.xz
|
||||
dpdk_tarball_md5sum_24.07 := 48151b1bd545cd95447979fa033199bb
|
||||
dpdk_tarball_md5sum_24.03 := a98da848d6ba09808ef00f9a26aaa49a
|
||||
dpdk_tarball_md5sum_23.11 := 896c09f5b45b452bd77287994650b916
|
||||
dpdk_tarball_md5sum_23.07 := 2b6d57f077585cb15b885482362fd47f
|
||||
@ -139,7 +138,7 @@ DPDK_DRIVERS_DISABLED := $(shell echo $(DPDK_DRIVERS_DISABLED) | tr -d '\\\t ')
|
||||
DPDK_LIBS_DISABLED := $(shell echo $(DPDK_LIBS_DISABLED) | tr -d '\\\t ')
|
||||
|
||||
SED=sed
|
||||
ifeq ($(shell uname), FreeBSD)
|
||||
ifeq ($shell(uname), FreeBSD)
|
||||
SED=gsed
|
||||
endif
|
||||
|
||||
|
2
build/external/packages/ipsec-mb.mk
vendored
2
build/external/packages/ipsec-mb.mk
vendored
@ -32,7 +32,7 @@ define ipsec-mb_config_cmds
|
||||
endef
|
||||
|
||||
define ipsec-mb_build_cmds
|
||||
@$(MAKE) -C $(ipsec-mb_src_dir)/lib -j \
|
||||
@make -C $(ipsec-mb_src_dir)/lib -j \
|
||||
SHARED=n \
|
||||
SAFE_PARAM=n \
|
||||
SAFE_LOOKUP=n \
|
||||
|
3
build/external/packages/rdma-core.mk
vendored
3
build/external/packages/rdma-core.mk
vendored
@ -23,11 +23,10 @@ RDMA_CORE_DEBUG?=n
|
||||
# 2. Verify that the file build/external/dpdk_mlx_default.sh was generated
|
||||
# and contains 'DPDK_MLX_DEFAULT=y'
|
||||
#
|
||||
rdma-core_version := 52.0
|
||||
rdma-core_version := 51.0
|
||||
rdma-core_tarball := rdma-core-$(rdma-core_version).tar.gz
|
||||
rdma-core_tarball_md5sum_49.0 := 9fe3909f19c7e0276c9e546411bbb49c
|
||||
rdma-core_tarball_md5sum_51.0 := ed95d79f782ea00bd7233d453abd60b3
|
||||
rdma-core_tarball_md5sum_52.0 := c78dba484aac72eb8586d88f7b399b0f
|
||||
rdma-core_tarball_md5sum := $(rdma-core_tarball_md5sum_$(rdma-core_version))
|
||||
rdma-core_tarball_strip_dirs := 1
|
||||
rdma-core_url := http://github.com/linux-rdma/rdma-core/releases/download/v$(rdma-core_version)/$(rdma-core_tarball)
|
||||
|
6
build/external/packages/xdp-tools.mk
vendored
6
build/external/packages/xdp-tools.mk
vendored
@ -24,15 +24,15 @@ define xdp-tools_config_cmds
|
||||
endef
|
||||
|
||||
define xdp-tools_build_cmds
|
||||
@cd ${xdp-tools_src_dir} && $(MAKE) V=1 BUILD_STATIC_ONLY=y > $(xdp-tools_build_log)
|
||||
@cd ${xdp-tools_src_dir} && make V=1 BUILD_STATIC_ONLY=y > $(xdp-tools_build_log)
|
||||
endef
|
||||
|
||||
define xdp-tools_install_cmds
|
||||
@rm -f $(xdp-tools_install_log)
|
||||
@cd ${xdp-tools_src_dir} && \
|
||||
$(MAKE) -C lib/libbpf/src install V=1 BUILD_STATIC_ONLY=y PREFIX='' DESTDIR='$(xdp-tools_install_dir)' >> $(xdp-tools_install_log)
|
||||
make -C lib/libbpf/src install V=1 BUILD_STATIC_ONLY=y PREFIX='' DESTDIR='$(xdp-tools_install_dir)' >> $(xdp-tools_install_log)
|
||||
@cd ${xdp-tools_src_dir} && \
|
||||
$(MAKE) libxdp_install V=1 BUILD_STATIC_ONLY=y PREFIX='' DESTDIR='$(xdp-tools_install_dir)' >> $(xdp-tools_install_log)
|
||||
make libxdp_install V=1 BUILD_STATIC_ONLY=y PREFIX='' DESTDIR='$(xdp-tools_install_dir)' >> $(xdp-tools_install_log)
|
||||
endef
|
||||
|
||||
$(eval $(call package,xdp-tools))
|
||||
|
@ -70,7 +70,7 @@ venv:
|
||||
spell: venv
|
||||
@( \
|
||||
. ${VENV_DIR}/bin/activate; \
|
||||
$(MAKE) -C ${SCRIPTS_DIR} generate && \
|
||||
make -C ${SCRIPTS_DIR} generate && \
|
||||
$(SPHINXBUILD) -b spelling $(SPHINXOPTS) $(BUILDDIR_SRC) $(BUILDDIR_OUT); \
|
||||
)
|
||||
|
||||
@ -81,7 +81,7 @@ rebuild-spell: clean spell
|
||||
docs: venv
|
||||
@( \
|
||||
. ${VENV_DIR}/bin/activate; \
|
||||
$(MAKE) -C ${SCRIPTS_DIR} generate && \
|
||||
make -C ${SCRIPTS_DIR} generate && \
|
||||
$(SPHINXBUILD) $(SPHINXOPTS) -b html $(BUILDDIR_SRC) $(BUILDDIR_OUT); \
|
||||
)
|
||||
|
||||
@ -90,7 +90,7 @@ rebuild: clean docs
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
@$(MAKE) -C ${SCRIPTS_DIR} clean
|
||||
@make -C ${SCRIPTS_DIR} clean
|
||||
|
||||
.PHONY: build
|
||||
build: docs
|
||||
|
@ -6,8 +6,9 @@ Release notes
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
v24.06
|
||||
v24.02
|
||||
v23.10
|
||||
v23.06
|
||||
v23.02
|
||||
v22.10.1
|
||||
past
|
||||
|
@ -6,8 +6,6 @@ Past releases
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
v23.02
|
||||
v22.10.1
|
||||
v22.10
|
||||
v22.06.1
|
||||
v22.06
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -497,9 +497,6 @@ The buffers Section
|
||||
buffers-per-numa 128000
|
||||
default data-size 2048
|
||||
page-size default-hugepage
|
||||
numa 1 {
|
||||
buffers 64000
|
||||
}
|
||||
}
|
||||
|
||||
buffers-per-numa number
|
||||
@ -535,33 +532,6 @@ Set the page size for buffer allocation
|
||||
page-size default
|
||||
page-size default-hugepage
|
||||
|
||||
numa <numa index> { .. }
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Settings specific to a single NUMA domain.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
buffers {
|
||||
numa 0 {
|
||||
buffers 32768
|
||||
}
|
||||
}
|
||||
|
||||
buffers <n>
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
The number of buffers allocated for this specific NUMA domain.
|
||||
If this is set to zero, no buffers are allocated for this domain.
|
||||
|
||||
By default, the value configured in **buffers-per-numa** is used.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
numa 0 {
|
||||
buffers 32768
|
||||
}
|
||||
|
||||
|
||||
The dpdk Section
|
||||
----------------
|
||||
|
@ -1 +0,0 @@
|
||||
../../../src/plugins/dev_armada/README.rst
|
@ -1 +0,0 @@
|
||||
../../../src/plugins/http/http_plugin.rst
|
@ -19,9 +19,9 @@ For more on plugins please refer to :ref:`add_plugin`.
|
||||
|
||||
quic
|
||||
cnat
|
||||
dev_armada
|
||||
lcp
|
||||
srv6/index
|
||||
marvell
|
||||
lldp
|
||||
nat64
|
||||
nat44_ei_ha
|
||||
@ -42,4 +42,3 @@ For more on plugins please refer to :ref:`add_plugin`.
|
||||
bufmon_doc
|
||||
ip_session_redirect_doc
|
||||
bpf_trace_filter
|
||||
http
|
||||
|
1
docs/developer/plugins/marvell.rst
Symbolic link
1
docs/developer/plugins/marvell.rst
Symbolic link
@ -0,0 +1 @@
|
||||
../../../src/plugins/marvell/README.rst
|
@ -805,7 +805,6 @@ operationalize
|
||||
Optimisations
|
||||
optimised
|
||||
os
|
||||
osi
|
||||
outacl
|
||||
packagecloud
|
||||
papi
|
||||
@ -903,9 +902,6 @@ pthreads
|
||||
pton
|
||||
pushingapatch
|
||||
putatively
|
||||
pvti
|
||||
PVTI
|
||||
Pvti
|
||||
pwait
|
||||
py
|
||||
pypi
|
||||
@ -1007,7 +1003,6 @@ screenshot
|
||||
scrollbar
|
||||
scrollbars
|
||||
sctp
|
||||
sdl
|
||||
sealert
|
||||
seatbelt
|
||||
seg
|
||||
|
@ -19,7 +19,8 @@ why returning data in .json format tends to work out pretty well.
|
||||
::
|
||||
|
||||
static int
|
||||
handle_get_status (hss_url_handler_args_t *args)
|
||||
handle_get_status (http_builtin_method_type_t reqtype,
|
||||
u8 * request, http_session_t * hs)
|
||||
{
|
||||
my_main_t *mm = &my_main;
|
||||
u8 *s = 0;
|
||||
@ -33,11 +34,11 @@ why returning data in .json format tends to work out pretty well.
|
||||
s = format (s, "}}");
|
||||
|
||||
/* And tell the static server plugin how to send the results */
|
||||
args->data = s;
|
||||
args->data_len = vec_len (s);
|
||||
args->ct = HTTP_CONTENT_APP_JSON;
|
||||
args->free_vec_data = 1; /* free s when done with it, in the framework */
|
||||
return HSS_URL_HANDLER_OK;
|
||||
hs->data = s;
|
||||
hs->data_offset = 0;
|
||||
hs->cache_pool_index = ~0;
|
||||
hs->free_data = 1; /* free s when done with it, in the framework */
|
||||
return 0;
|
||||
}
|
||||
|
||||
Words to the Wise: Chrome has a very nice set of debugging tools. Select
|
||||
@ -52,7 +53,7 @@ considerable amount of time debugging .json bugs.
|
||||
Step 2: Register URL handlers with the server
|
||||
---------------------------------------------
|
||||
|
||||
Call ``hss_register_url_handler`` as shown. It’s likely
|
||||
Call http_static_server_register_builtin_handler() as shown. It’s likely
|
||||
but not guaranteed that the static server plugin will be available.
|
||||
|
||||
::
|
||||
@ -64,7 +65,7 @@ but not guaranteed that the static server plugin will be available.
|
||||
|
||||
/* Look up the builtin URL registration handler */
|
||||
fp = vlib_get_plugin_symbol ("http_static_plugin.so",
|
||||
"hss_register_url_handler");
|
||||
"http_static_server_register_builtin_handler");
|
||||
|
||||
if (fp == 0)
|
||||
{
|
||||
@ -258,7 +259,7 @@ above:
|
||||
|
||||
::
|
||||
|
||||
http static server url-handlers www-root /myhugosite/public uri tcp://0.0.0.0/2345 cache-size 5m fifo-size 8192
|
||||
http static server www-root /myhugosite/public uri tcp://0.0.0.0/2345 cache-size 5m fifo-size 8192
|
||||
|
||||
The www-root must be specified, and must correctly name the compiled
|
||||
hugo site root. If your Hugo site is located at /myhugosite, specify
|
||||
|
@ -3,4 +3,5 @@ create tap host-if-name lstack host-ip4-addr 192.168.10.2/24
|
||||
set int ip address tap0 192.168.10.1/24
|
||||
set int state tap0 up
|
||||
|
||||
http static server url-handlers www-root <path> uri tcp://0.0.0.0/1234 cache-size 10m fifo-size 2048
|
||||
http static server www-root <path> uri tcp://0.0.0.0/1234 cache-size 10m fifo-size 2048
|
||||
builtinurl enable
|
||||
|
@ -1,10 +1,5 @@
|
||||
export HS_ROOT=$(CURDIR)
|
||||
|
||||
# sets WS_ROOT if called from extras/hs-test
|
||||
ifeq ($(WS_ROOT),)
|
||||
export WS_ROOT=$(HS_ROOT)/../..
|
||||
endif
|
||||
|
||||
ifeq ($(VERBOSE),)
|
||||
VERBOSE=false
|
||||
endif
|
||||
@ -21,10 +16,6 @@ ifeq ($(TEST),)
|
||||
TEST=all
|
||||
endif
|
||||
|
||||
ifeq ($(TEST-HS),)
|
||||
TEST-HS=all
|
||||
endif
|
||||
|
||||
ifeq ($(DEBUG),)
|
||||
DEBUG=false
|
||||
endif
|
||||
@ -41,10 +32,6 @@ ifeq ($(REPEAT),)
|
||||
REPEAT=0
|
||||
endif
|
||||
|
||||
ifeq ($(CPU0),)
|
||||
CPU0=false
|
||||
endif
|
||||
|
||||
ifeq ($(VPPSRC),)
|
||||
VPPSRC=$(shell pwd)/../..
|
||||
endif
|
||||
@ -57,92 +44,73 @@ ifeq ($(ARCH),)
|
||||
ARCH=$(shell dpkg --print-architecture)
|
||||
endif
|
||||
|
||||
list_tests = @go run github.com/onsi/ginkgo/v2/ginkgo --dry-run -v --no-color --seed=2 | head -n -1 | grep 'Test' | \
|
||||
sed 's/^/* /; s/\(Suite\) /\1\//g'
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Make targets:"
|
||||
@echo " test - run tests"
|
||||
@echo " test-debug - run tests (vpp debug image)"
|
||||
@echo " test-leak - run memory leak tests (vpp debug image)"
|
||||
@echo " build - build test infra"
|
||||
@echo " build-cov - coverage build of VPP and Docker images"
|
||||
@echo " build-debug - build test infra (vpp debug image)"
|
||||
@echo " build-go - just build golang files"
|
||||
@echo " checkstyle-go - check style of .go source files"
|
||||
@echo " fixstyle-go - format .go source files"
|
||||
@echo " cleanup-hst - stops and removes all docker contaiers and namespaces"
|
||||
@echo " list-tests - list all tests"
|
||||
@echo " test - run tests"
|
||||
@echo " test-debug - run tests (vpp debug image)"
|
||||
@echo " build - build test infra"
|
||||
@echo " build-debug - build test infra (vpp debug image)"
|
||||
@echo " build-go - just build golang files"
|
||||
@echo " fixstyle - format .go source files"
|
||||
@echo " list-tests - list all tests"
|
||||
@echo
|
||||
@echo "'make build' arguments:"
|
||||
@echo "make build arguments:"
|
||||
@echo " UBUNTU_VERSION - ubuntu version for docker image"
|
||||
@echo " HST_EXTENDED_TESTS - build extended tests"
|
||||
@echo " HST_EXTENDED_TESTS - build extended tests"
|
||||
@echo
|
||||
@echo "'make test' arguments:"
|
||||
@echo "make test arguments:"
|
||||
@echo " PERSIST=[true|false] - whether clean up topology and dockers after test"
|
||||
@echo " VERBOSE=[true|false] - verbose output"
|
||||
@echo " UNCONFIGURE=[true|false] - unconfigure selected test"
|
||||
@echo " DEBUG=[true|false] - attach VPP to GDB"
|
||||
@echo " TEST=[test-name] - specific test to run"
|
||||
@echo " CPUS=[n-cpus] - number of cpus to allocate to VPP and containers"
|
||||
@echo " CPUS=[n-cpus] - number of cpus to run with vpp"
|
||||
@echo " VPPSRC=[path-to-vpp-src] - path to vpp source files (for gdb)"
|
||||
@echo " PARALLEL=[n-cpus] - number of test processes to spawn to run in parallel"
|
||||
@echo " REPEAT=[n] - repeat tests up to N times or until a failure occurs"
|
||||
@echo " CPU0=[true|false] - use cpu0"
|
||||
@echo
|
||||
@echo "List of all tests:"
|
||||
@$(MAKE) list-tests
|
||||
$(call list_tests)
|
||||
|
||||
.PHONY: list-tests
|
||||
list-tests:
|
||||
@go run github.com/onsi/ginkgo/v2/ginkgo --dry-run -v --no-color --seed=2 | head -n -1 | grep 'Test' | \
|
||||
sed 's/^/* /; s/\(Suite\) /\1\//g'
|
||||
$(call list_tests)
|
||||
|
||||
.PHONY: build-vpp-release
|
||||
build-vpp-release:
|
||||
@$(MAKE) -C ../.. build-release
|
||||
@make -C ../.. build-release
|
||||
|
||||
.PHONY: build-vpp-debug
|
||||
build-vpp-debug:
|
||||
@$(MAKE) -C ../.. build
|
||||
|
||||
.PHONY: build-vpp-gcov
|
||||
build-vpp-gcov:
|
||||
@$(MAKE) -C ../.. build-vpp-gcov
|
||||
@make -C ../.. build
|
||||
|
||||
.build.ok: build
|
||||
@touch .build.ok
|
||||
|
||||
.build.cov.ok: build-vpp-gcov
|
||||
@touch .build.cov.ok
|
||||
|
||||
.build_debug.ok: build-debug
|
||||
@touch .build.ok
|
||||
|
||||
.PHONY: test
|
||||
test: .deps.ok .build.ok
|
||||
@bash ./hs_test.sh --persist=$(PERSIST) --verbose=$(VERBOSE) \
|
||||
# '-' ignores the exit status, it is set in compress.sh
|
||||
# necessary so gmake won't skip executing the bash script
|
||||
-bash ./test --persist=$(PERSIST) --verbose=$(VERBOSE) \
|
||||
--unconfigure=$(UNCONFIGURE) --debug=$(DEBUG) --test=$(TEST) --cpus=$(CPUS) \
|
||||
--vppsrc=$(VPPSRC) --parallel=$(PARALLEL) --repeat=$(REPEAT) --cpu0=$(CPU0); \
|
||||
./script/compress.sh $$?
|
||||
|
||||
--vppsrc=$(VPPSRC) --parallel=$(PARALLEL) --repeat=$(REPEAT)
|
||||
@bash ./script/compress.sh
|
||||
|
||||
.PHONY: test-debug
|
||||
test-debug: .deps.ok .build_debug.ok
|
||||
@bash ./hs_test.sh --persist=$(PERSIST) --verbose=$(VERBOSE) \
|
||||
# '-' ignores the exit status, it is set in compress.sh
|
||||
# necessary so gmake won't skip executing the bash script
|
||||
-bash ./test --persist=$(PERSIST) --verbose=$(VERBOSE) \
|
||||
--unconfigure=$(UNCONFIGURE) --debug=$(DEBUG) --test=$(TEST) --cpus=$(CPUS) \
|
||||
--vppsrc=$(VPPSRC) --parallel=$(PARALLEL) --repeat=$(REPEAT) --debug_build=true \
|
||||
--cpu0=$(CPU0); \
|
||||
./script/compress.sh $$?
|
||||
|
||||
.PHONY: test-cov
|
||||
test-cov: .deps.ok .build.cov.ok
|
||||
@bash ./hs_test.sh --persist=$(PERSIST) --verbose=$(VERBOSE) \
|
||||
--unconfigure=$(UNCONFIGURE) --debug=$(DEBUG) --test=$(TEST-HS) --cpus=$(CPUS) \
|
||||
--vppsrc=$(VPPSRC) --cpu0=$(CPU0); \
|
||||
./script/compress.sh $$?
|
||||
|
||||
.PHONY: test-leak
|
||||
test-leak: .deps.ok .build_debug.ok
|
||||
@bash ./hs_test.sh --test=$(TEST) --debug_build=true --leak_check=true --vppsrc=$(VPPSRC)
|
||||
--vppsrc=$(VPPSRC) --parallel=$(PARALLEL) --repeat=$(REPEAT)
|
||||
@bash ./script/compress.sh
|
||||
|
||||
.PHONY: build-go
|
||||
build-go:
|
||||
@ -154,12 +122,6 @@ build: .deps.ok build-vpp-release build-go
|
||||
bash ./script/build_hst.sh release
|
||||
@touch .build.ok
|
||||
|
||||
.PHONY: build-cov
|
||||
build-cov: .deps.ok build-vpp-gcov build-go
|
||||
@rm -f .build.cov.ok
|
||||
bash ./script/build_hst.sh gcov
|
||||
@touch .build.cov.ok
|
||||
|
||||
.PHONY: build-debug
|
||||
build-debug: .deps.ok build-vpp-debug build-go
|
||||
@rm -f .build.ok
|
||||
@ -167,66 +129,23 @@ build-debug: .deps.ok build-vpp-debug build-go
|
||||
@touch .build.ok
|
||||
|
||||
.deps.ok:
|
||||
@sudo $(MAKE) install-deps
|
||||
@sudo make install-deps
|
||||
|
||||
.PHONY: install-deps
|
||||
install-deps:
|
||||
@rm -f .deps.ok
|
||||
@apt-get update \
|
||||
&& apt-get install -y apt-transport-https ca-certificates curl software-properties-common \
|
||||
apache2-utils wrk bridge-utils gpg
|
||||
apache2-utils wrk bridge-utils
|
||||
@if [ ! -f /usr/share/keyrings/docker-archive-keyring.gpg ] ; then \
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg; \
|
||||
echo "deb [arch=$(ARCH) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(UBUNTU_CODENAME) stable" \
|
||||
| sudo tee /etc/apt/sources.list.d/docker.list > /dev/null ; \
|
||||
apt-get update; \
|
||||
fi
|
||||
@apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
|
||||
@touch .deps.ok
|
||||
|
||||
.PHONY: checkstyle-go
|
||||
checkstyle-go:
|
||||
@output=$$(gofmt -d $${WS_ROOT}); \
|
||||
if [ -z "$$output" ]; then \
|
||||
echo "*******************************************************************"; \
|
||||
echo "Checkstyle OK."; \
|
||||
echo "*******************************************************************"; \
|
||||
else \
|
||||
echo "$$output"; \
|
||||
echo "*******************************************************************"; \
|
||||
echo "Checkstyle failed. Use 'make fixstyle-go' or fix errors manually."; \
|
||||
echo "*******************************************************************"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
.PHONY: fixstyle-go
|
||||
fixstyle-go:
|
||||
@echo "Modified files:"
|
||||
@gofmt -w -l $(WS_ROOT)
|
||||
.PHONY: fixstyle
|
||||
fixstyle:
|
||||
@gofmt -w .
|
||||
@go mod tidy
|
||||
@echo "*******************************************************************"
|
||||
@echo "Fixstyle done."
|
||||
@echo "*******************************************************************"
|
||||
|
||||
.PHONY: cleanup-hst
|
||||
cleanup-hst:
|
||||
@if [ ! -f ".last_hst_ppid" ]; then \
|
||||
echo "'.last_hst_ppid' file does not exist."; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "****************************"
|
||||
@echo "Removing docker containers:"
|
||||
@# "-" ignores errors
|
||||
@-sudo docker rm $$(sudo docker stop $$(sudo docker ps -a -q --filter "name=$$(cat .last_hst_ppid)") -t 0)
|
||||
@echo "****************************"
|
||||
@echo "Removing IP address files:"
|
||||
@find . -type f -regextype egrep -regex '.*[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' -exec sudo rm -v {} \;
|
||||
@echo "****************************"
|
||||
@echo "Removing network namespaces:"
|
||||
@for ns in $$(ip netns list | grep $$(cat .last_hst_ppid) | awk '{print $$1}'); do \
|
||||
echo $$ns; \
|
||||
sudo ip netns delete $$ns; \
|
||||
done
|
||||
@echo "****************************"
|
||||
@echo "Done."
|
||||
@echo "****************************"
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
||||
package hst
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
@ -84,7 +84,7 @@ func (a *Ip4AddressAllocator) createIpAddress(networkNumber int, numberOfAddress
|
||||
return address, nil
|
||||
}
|
||||
|
||||
func (a *Ip4AddressAllocator) DeleteIpAddresses() {
|
||||
func (a *Ip4AddressAllocator) deleteIpAddresses() {
|
||||
for ip := range a.assignedIps {
|
||||
os.Remove(a.assignedIps[ip])
|
||||
}
|
373
extras/hs-test/container.go
Normal file
373
extras/hs-test/container.go
Normal file
File diff suppressed because it is too large
Load Diff
90
extras/hs-test/cpu.go
Normal file
90
extras/hs-test/cpu.go
Normal file
@ -0,0 +1,90 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var CgroupPath = "/sys/fs/cgroup/"
|
||||
|
||||
type CpuContext struct {
|
||||
cpuAllocator *CpuAllocatorT
|
||||
cpus []int
|
||||
}
|
||||
|
||||
func (c *CpuContext) Release() {
|
||||
c.cpuAllocator.cpus = append(c.cpuAllocator.cpus, c.cpus...)
|
||||
c.cpus = c.cpus[:0] // empty the list
|
||||
}
|
||||
|
||||
type CpuAllocatorT struct {
|
||||
cpus []int
|
||||
}
|
||||
|
||||
var cpuAllocator *CpuAllocatorT = nil
|
||||
|
||||
func (c *CpuAllocatorT) Allocate(nCpus int) (*CpuContext, error) {
|
||||
var cpuCtx CpuContext
|
||||
|
||||
if len(c.cpus) < nCpus {
|
||||
return nil, fmt.Errorf("could not allocate %d CPUs; available: %d", nCpus, len(c.cpus))
|
||||
}
|
||||
cpuCtx.cpus = c.cpus[0:nCpus]
|
||||
cpuCtx.cpuAllocator = c
|
||||
c.cpus = c.cpus[nCpus:]
|
||||
return &cpuCtx, nil
|
||||
}
|
||||
|
||||
func (c *CpuAllocatorT) readCpus() error {
|
||||
var first, last int
|
||||
|
||||
// Path depends on cgroup version. We need to check which version is in use.
|
||||
// For that following command can be used: 'stat -fc %T /sys/fs/cgroup/'
|
||||
// In case the output states 'cgroup2fs' then cgroups v2 is used, 'tmpfs' in case cgroups v1.
|
||||
cmd := exec.Command("stat", "-fc", "%T", "/sys/fs/cgroup/")
|
||||
byteOutput, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
CpuPath := CgroupPath
|
||||
if strings.Contains(string(byteOutput), "tmpfs") {
|
||||
CpuPath += "cpuset/cpuset.effective_cpus"
|
||||
} else if strings.Contains(string(byteOutput), "cgroup2fs") {
|
||||
CpuPath += "cpuset.cpus.effective"
|
||||
} else {
|
||||
return errors.New("cgroup unknown fs: " + string(byteOutput))
|
||||
}
|
||||
|
||||
file, err := os.Open(CpuPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
sc := bufio.NewScanner(file)
|
||||
sc.Scan()
|
||||
line := sc.Text()
|
||||
_, err = fmt.Sscanf(line, "%d-%d", &first, &last)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := first; i <= last; i++ {
|
||||
c.cpus = append(c.cpus, i)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func CpuAllocator() (*CpuAllocatorT, error) {
|
||||
if cpuAllocator == nil {
|
||||
cpuAllocator = new(CpuAllocatorT)
|
||||
err := cpuAllocator.readCpus()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return cpuAllocator, nil
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user