Skip to content

Fix[bmqimp]: Close channel on open, reopen, and close timeouts #5549

Fix[bmqimp]: Close channel on open, reopen, and close timeouts

Fix[bmqimp]: Close channel on open, reopen, and close timeouts #5549

Workflow file for this run

name: Tests
on:
push:
branches:
- main
- 'integration/**'
pull_request:
branches:
- main
- 'integration/**'
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
get_dependencies:
name: "Dependencies"
uses: ./.github/workflows/dependencies.yaml
build_ubuntu:
name: "Build [ubuntu]"
needs: get_dependencies
uses: ./.github/workflows/build-ubuntu.yaml
with:
ref: ${{ github.sha }}
target: "bmqbrkr bmqtool bmqstoragetool all.it"
save_build_as_artifacts: true
run_unit_tests: false
unit_tests_cxx:
name: "UT [c++]"
needs: get_dependencies
uses: ./.github/workflows/build-ubuntu.yaml
with:
ref: ${{ github.sha }}
target: "all.t"
save_build_as_artifacts: false
run_unit_tests: true
unit_tests_python:
name: UT [python]
runs-on: ubuntu-latest
needs: build_ubuntu
steps:
- uses: actions/checkout@v4
- name: Run Python Unit Tests
env:
PYTHONPATH: ${{ github.workspace }}/src/python
run: |
pip install -r ${{ github.workspace }}/src/python/requirements.txt
pip install "xsdata[cli]"
cd ${{ github.workspace }}
src/python/bin/schemagen
src/python/bin/tweakgen
pytest src/python
integration_tests_ubuntu:
name: IT [${{ matrix.cluster }}/${{ matrix.mode }}/${{ matrix.consistency }}]
strategy:
matrix:
mode: ["legacy_mode", "fsm_mode"]
cluster: ["single", "multi"]
consistency: ["eventual_consistency", "strong_consistency"]
fail-fast: false
runs-on: ubuntu-latest
needs: build_ubuntu
env:
RUN_UID: ${{ matrix.cluster }}_${{ matrix.mode }}_${{ matrix.consistency }}
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: ${{needs.build_ubuntu.outputs.artifact_key }}
# Solaris: make sure all ITs scripts are compatible with python3.8
- uses: actions/setup-python@v5
with:
python-version: '3.8'
# Set execution permissions for all .tsk
# which were lost when the artifact was created
- name: Chmod all .tsk
run: find ./build -type f -name "*.tsk" -exec chmod +x {} +
- name: Setup core_pattern
run: |
# Default Ubuntu core_pattern: '|/usr/lib/systemd/systemd-coredump %P %u %g %s %t 9223372036854775808 %h'
# To enable core dumps copying to the failure-logs folder (implemented in src/python/blazingmq/dev/it/cluster.py)
# the core_pattern must be an absolute path and must contain %p (i.e. process id)
sudo mkdir /cores
sudo chmod 777 /cores
echo "/cores/%e.%p.%s.%t" | sudo tee /proc/sys/kernel/core_pattern
- name: Run Integration Tests
run: |
# Allow core dumps
ulimit -c unlimited
go install github.com/kevwan/tproxy@latest
export GOPATH=$HOME/go
export PATH=$PATH:/usr/local/go/bin:$GOPATH/bin
pip install -r ${{ github.workspace }}/src/python/requirements.txt
${{ github.workspace }}/src/integration-tests/run-tests \
"${{ matrix.mode }} and ${{ matrix.cluster }} and ${{ matrix.consistency }}" \
--log-level ERROR \
--log-file-level=info \
--bmq-tolerate-dirty-shutdown \
--bmq-log-dir=failure-logs \
--bmq-log-level=INFO \
--junitxml=integration-tests.xml \
--tb long \
--reruns=3 \
--durations=0 \
-n logical -v
- name: Print core information
if: failure()
run: |
sudo apt-get update && sudo apt-get install -qy gdb
python3 ${{ github.workspace }}/.github/workflows/ext/print_cores.py \
--cores-dir=${{ github.workspace }}/src/integration-tests/failure-logs \
--bin-path=${{ github.workspace }}/build/blazingmq/src/applications/bmqbrkr/bmqbrkr.tsk
- name: Upload failure-logs as artifacts
if: failure()
uses: actions/upload-artifact@v4
with:
name: ${{ github.run_id }}_failure_logs_${{ env.RUN_UID }}
path: ${{ github.workspace }}/src/integration-tests/failure-logs
retention-days: 5
- name: Upload broker executable as artifacts to debug the core
if: failure()
uses: actions/upload-artifact@v4
with:
name: ${{ github.run_id }}_bmqbrkr
overwrite: true # broker binary is the same across all matrix runs
path: ${{ github.workspace }}/build/blazingmq/src/applications/bmqbrkr/bmqbrkr.tsk
retention-days: 5
fuzz_tests_ubuntu:
name: Fuzz test [${{ matrix.request }}]
strategy:
matrix:
request: ["identity", "open_queue", "configure_queue_stream", "put", "confirm", "close_queue", "disconnect"]
fail-fast: false
runs-on: ubuntu-latest
needs: build_ubuntu
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: ${{needs.build_ubuntu.outputs.artifact_key }}
# Set execution permissions for all .tsk
# which were lost when the artifact was created
- name: Chmod all .tsk
run: |
find ./build -type f -name "*.tsk" -exec chmod +x {} +
chmod +x ./build/blazingmq/src/applications/bmqbrkr/run
- name: Setup core_pattern
run: |
sudo mkdir /cores
sudo chmod 777 /cores
echo "/cores/%e.%p.%s.%t" | sudo tee /proc/sys/kernel/core_pattern
- name: Run Fuzz Test
run: |
pip install -r ${{ github.workspace }}/src/python/requirements.txt
cd src/python
python3 -m blazingmq.dev.fuzztest --broker-dir ${{ github.workspace }}/build/blazingmq/src/applications/bmqbrkr --request ${{ matrix.request }} > ${{ matrix.request}}_fuzz.log
# Do not print everything, GitHub Actions is not able to handle 100MB+ of logs.
# Typically the error found with fuzzer is related to the latest requests and broker state.
# If this is not enough, consider running the test on your local machine and get full logs.
- name: Print Fuzz Log tail
if: failure()
run: |
cd src/python && tail -n 2000 ${{ matrix.request}}_fuzz.log
- name: Print core information
if: failure()
run: |
sudo apt-get update && sudo apt-get install -qy gdb
python3 ${{ github.workspace }}/.github/workflows/ext/print_cores.py \
--cores-dir=/cores \
--bin-path=${{ github.workspace }}/build/blazingmq/src/applications/bmqbrkr/bmqbrkr.tsk
sanitize_ubuntu:
name: Sanitize
uses: ./.github/workflows/sanitize.yaml
needs: integration_tests_ubuntu
integration_tests_storagetool_ubuntu:
name: IT [Storage tool]
runs-on: ubuntu-latest
needs: build_ubuntu
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: ${{needs.build_ubuntu.outputs.artifact_key }}
# Set execution permissions for all .tsk
# which were lost when the artifact was created
- name: Chmod all .tsk
run: find ./build -type f -name "*.tsk" -exec chmod +x {} +
- name: Run Storage Tool integration tests
run: |
pip install -r ${{ github.workspace }}/src/python/requirements.txt
cd ${{ github.workspace }}/src/applications/bmqstoragetool/integration-tests
./run-tests
build_macosx:
name: Build [macosx_${{ matrix.arch }}]
runs-on: ${{ matrix.os }}
strategy:
matrix:
include:
- os: macos-14
arch: arm64
steps:
- uses: actions/checkout@v4
- name: Set up dependencies
run: |
brew install \
bison \
flex \
google-benchmark \
googletest \
[email protected] \
zlib
- name: Build BlazingMQ
run: |
bin/build-darwin.sh
build_and_test_bmqprometheus_plugin:
name: "Build Prometheus plugin [ubuntu]"
runs-on: ubuntu-latest
needs: get_dependencies
steps:
- uses: actions/checkout@v4
# The following steps restore cached deps
# If the cache is not hit, it will build and install the dependencies
# If the cache is hit, building deps is skipped, only make install is run
- uses: actions/cache/restore@v4
with:
path: deps
key: ${{ needs.get_dependencies.outputs.cache_key }}
- name: Set up plugins dependencies
run: |
sudo apt-get update
sudo apt-get install -qy build-essential \
gdb \
curl \
python3.10 \
cmake \
ninja-build \
pkg-config \
bison \
libfl-dev \
libbenchmark-dev \
libgmock-dev \
libz-dev \
autoconf \
libtool
- name: Install cached non packaged dependencies
working-directory: deps
run: ../docker/build_deps.sh
- name: Create dependency fetcher working directory
run: mkdir -p deps
- name: Fetch & Build non packaged plugins dependencies
working-directory: deps
run: ${{ github.workspace }}/src/plugins/bmqprometheus/build_prometheus_deps.sh
- name: Build plugins
env:
PKG_CONFIG_PATH: /usr/lib/x86_64-linux-gnu/pkgconfig:/opt/bb/lib64/pkgconfig
run: |
cmake -S . -B build/blazingmq -G Ninja \
-DCMAKE_TOOLCHAIN_FILE=${{ github.workspace }}/deps/srcs/bde-tools/BdeBuildSystem/toolchains/linux/gcc-default.cmake \
-DCMAKE_BUILD_TYPE=Debug \
-DBDE_BUILD_TARGET_SAFE=ON \
-DBDE_BUILD_TARGET_64=ON \
-DBDE_BUILD_TARGET_CPP17=ON \
-DCMAKE_PREFIX_PATH=${{ github.workspace }}/deps/srcs/bde-tools/BdeBuildSystem \
-DCMAKE_INSTALL_LIBDIR=lib64 \
-DINSTALL_TARGETS="prometheus;bmqbrkr;bmqtool"
echo "::add-matcher::.github/workflows/problem-matchers/cpp.json"
cmake --build build/blazingmq --parallel 8 --target bmqbrkr bmqtool bmqprometheus
echo "::remove-matcher owner=cpp"
- name: Create prometheus dir
run: mkdir -p prometheus_dir
- name: Download Prometheus
run: curl -SL "https://github.com/prometheus/prometheus/releases/download/v2.45.1/prometheus-2.45.1.linux-amd64.tar.gz" | tar -xzC prometheus_dir/
- name: Run Prometheus
run: ./prometheus_dir/prometheus-2.45.1.linux-amd64/prometheus --config.file=${{ github.workspace }}/src/plugins/bmqprometheus/tests/prometheus_localhost.yaml --web.enable-lifecycle --storage.tsdb.path=${{ github.workspace }}/prometheus_dir/data &
- name: Run Pushgateway
run: |
docker pull prom/pushgateway
docker run -d -p 9091:9091 prom/pushgateway
- name: Run BMQPrometheus plugin integration test in "pull" mode
run: ${{ github.workspace }}/src/plugins/bmqprometheus/tests/bmqprometheus_prometheusstatconsumer_test.py -p ${{ github.workspace }}/build/blazingmq -m pull --no-docker
- name: Clear Prometheus database and restart
run: |
curl -X POST "http://localhost:9090/-/quit"
rm -rf prometheus_dir/data
./prometheus_dir/prometheus-2.45.1.linux-amd64/prometheus --config.file=${{ github.workspace }}/src/plugins/bmqprometheus/tests/prometheus_localhost.yaml --web.enable-lifecycle --storage.tsdb.path=${{ github.workspace }}/prometheus_dir/data &
- name: Run Prometheus plugin integration test in "push" mode
run: ${{ github.workspace }}/src/plugins/bmqprometheus/tests/bmqprometheus_prometheusstatconsumer_test.py -p ${{ github.workspace }}/build/blazingmq -m push --no-docker
documentation:
name: "Documentation"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up dependencies
run: |
sudo apt-get update
sudo apt-get install -qy doxygen
- name: Set up output directory
run: |
mkdir -p docs/docs/apidocs
- name: Build docs
run: |
echo "::add-matcher::.github/workflows/problem-matchers/doxygen.json"
doxygen Doxyfile
echo "::remove-matcher owner=doxygen"
shellcheck:
name: Shellcheck
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run ShellCheck
uses: ludeeus/action-shellcheck@master
docker_build_ubuntu:
name: "Docker [ubuntu]"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Docker Single-Node Workflow
run: docker compose -f docker/single-node/docker-compose.yaml up --build -d
- name: Docker Cluster Workflow
run: docker compose -f docker/cluster/docker-compose.yaml up --build -d