diff --git a/.github/workflows/backend-ncnn.yml b/.github/workflows/backend-ncnn.yml
index e1584f826f..4a068ea720 100644
--- a/.github/workflows/backend-ncnn.yml
+++ b/.github/workflows/backend-ncnn.yml
@@ -58,6 +58,7 @@ jobs:
echo $(pwd)
ln -s build/bin/mmdeploy_onnx2ncnn ./
python .github/scripts/test_onnx2ncnn.py --run 1
+
build_ncnn:
runs-on: ubuntu-20.04
strategy:
diff --git a/.github/workflows/backend-pplnn.yml b/.github/workflows/backend-pplnn.yml
index 660e0cf8ae..399bd499b0 100644
--- a/.github/workflows/backend-pplnn.yml
+++ b/.github/workflows/backend-pplnn.yml
@@ -42,11 +42,22 @@ jobs:
-Dpplnn_DIR=${pplnn_DIR}
ls build/lib
- name: Install mmdeploy with pplnn
+ id: badge_status
run: |
rm -rf .eggs && python3 -m pip install -e .
export LD_LIBRARY_PATH="/root/workspace/mmdeploy/build/lib:${LD_LIBRARY_PATH}"
python3 tools/check_env.py
python3 -c 'import mmdeploy.apis.pplnn as pplnn_api; assert pplnn_api.is_available()'
+ - name: create badge
+ if: always()
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: build_pplnn_cuda
+ LABEL: 'build'
+ STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
+ COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
build_pplnn_cpu:
runs-on: ubuntu-20.04
@@ -56,8 +67,18 @@ jobs:
with:
submodules: 'recursive'
- name: Install mmdeploy with pplnn
+ id: badge_status
run: |
python -m pip install torch==1.8.2 torchvision==0.9.2 --extra-index-url https://download.pytorch.org/whl/lts/1.8/cpu
python -m pip install mmcv-lite protobuf==3.20.2
python tools/scripts/build_ubuntu_x64_pplnn.py 8
python -c 'import mmdeploy.apis.pplnn as pplnn_api; assert pplnn_api.is_available()'
+ - name: create badge
+ if: always()
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: build_pplnn_cpu
+ LABEL: 'build'
+ STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
+ COLOR: ${{ steps.badge_status.conclusion == 'success' && 'blue' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 4801f0839e..2c477fe8cd 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -64,10 +64,20 @@ jobs:
coverage xml
coverage report -m
- name: Run mmyolo deploy unittests
+ id: badge_status
run: |
python -m pip install xdoctest
cd /home/runner/work/mmyolo
pytest tests/test_deploy
+ - name: create badge
+ if: always()
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: build_cpu_model_convert
+ LABEL: 'build'
+ STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
+ COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cpu_sdk:
runs-on: ubuntu-20.04
@@ -85,6 +95,7 @@ jobs:
sudo apt install -y ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libxrender-dev libc++1-9 libc++abi1-9
sudo apt install libopencv-dev lcov wget
- name: Build and run SDK unit test without backend
+ id: badge_status
run: |
mkdir -p build && pushd build
cmake .. -DCMAKE_CXX_COMPILER=g++ -DMMDEPLOY_CODEBASES=all -DMMDEPLOY_BUILD_SDK=ON -DMMDEPLOY_BUILD_SDK_PYTHON_API=OFF -DMMDEPLOY_TARGET_DEVICES=cpu -DMMDEPLOY_COVERAGE=ON -DMMDEPLOY_BUILD_TEST=ON
@@ -95,6 +106,15 @@ jobs:
lcov --capture --directory . --output-file coverage.info
ls -lah coverage.info
cp coverage.info ../
+ - name: create badge
+ if: always()
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: build_cpu_sdk
+ LABEL: 'build'
+ STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
+ COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
cross_build_aarch64:
runs-on: ubuntu-20.04
@@ -110,8 +130,18 @@ jobs:
with:
python-version: 3.8
- name: gcc-multilib
+ id: badge_status
run: |
sh -ex tools/scripts/ubuntu_cross_build_aarch64.sh
+ - name: create badge
+ if: always()
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: cross_build_aarch64
+ LABEL: 'build'
+ STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
+ COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cuda102:
runs-on: ubuntu-20.04
@@ -155,10 +185,20 @@ jobs:
rm -rf .eggs && python -m pip install -e .
python tools/check_env.py
- name: Run unittests and generate coverage report
+ id: badge_status
run: |
coverage run --branch --source mmdeploy -m pytest -rsE tests
coverage xml
coverage report -m
+ - name: create badge
+ if: always()
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: build_cuda102
+ LABEL: 'build'
+ STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
+ COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cuda113:
runs-on: ubuntu-20.04
@@ -205,6 +245,7 @@ jobs:
coverage xml
coverage report -m
- name: Upload coverage to Codecov
+ id: badge_status
uses: codecov/codecov-action@v2
with:
file: ./coverage.xml,./coverage.info
@@ -212,6 +253,15 @@ jobs:
env_vars: OS,PYTHON,CPLUS
name: codecov-umbrella
fail_ci_if_error: false
+ - name: create badge
+ if: always()
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: build_cuda113
+ LABEL: 'build'
+ STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
+ COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cuda113_linux:
runs-on: [self-hosted, linux-3090]
@@ -251,10 +301,20 @@ jobs:
export LD_LIBRARY_PATH="/root/workspace/mmdeploy/build/lib:${LD_LIBRARY_PATH}"
python3 tools/check_env.py
- name: Test TensorRT pipeline
+ id: badge_status
run: |
export LD_LIBRARY_PATH="/root/workspace/mmdeploy/build/lib:${LD_LIBRARY_PATH}"
export LD_LIBRARY_PATH="/root/workspace/mmdeploy/mmdeploy/lib:${LD_LIBRARY_PATH}"
bash .github/scripts/linux/test_full_pipeline.sh trt cuda
+ - name: create badge
+ if: always()
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: build_cuda113_linux
+ LABEL: 'build'
+ STATUS: ${{ steps.badge_status.conclusion == 'success' && 'passing' || 'failing' }}
+ COLOR: ${{ steps.badge_status.conclusion == 'success' && 'green' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
build_cuda113_windows:
runs-on: [self-hosted, win10-3080]
@@ -316,3 +376,17 @@ jobs:
conda activate $pwd\tmp_env
$env:path = "$pwd\build\bin\Release;" + $env:path
.github\scripts\windows\test_full_pipeline.ps1 -Backend trt -Device cuda
+
+ badge_build_cuda113_windows:
+ needs: build_cuda113_windows
+ if: always()
+ runs-on: ubuntu-20.04
+ steps:
+ - name: create badge
+ uses: RubbaBoy/BYOB@v1.2.1
+ with:
+ NAME: build_cuda113_windows
+ LABEL: 'build'
+ STATUS: ${{ needs.build_cuda113_windows.result == 'success' && 'passing' || needs.build_cuda113_windows.result }}
+ COLOR: ${{ needs.build_cuda113_windows.result == 'success' && 'green' || 'red' }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/README.md b/README.md
index c305282d3f..584103d484 100644
--- a/README.md
+++ b/README.md
@@ -83,18 +83,188 @@ The supported Device-Platform-InferenceBackend matrix is presented as following,
The benchmark can be found from [here](docs/en/03-benchmark/benchmark.md)
-| Device / Platform | Linux | Windows | macOS | Android |
-| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
-| x86_64 CPU | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ort.yml)ONNXRuntime
[](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml)pplnn
[](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ncnn.yml)ncnn
[](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-torchscript.yml)LibTorch
OpenVINO
TVM
| [](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime
OpenVINO
[](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ncnn
| - | - |
-| ARM CPU | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ncnn
| - | - | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)ncnn
|
-| RISC-V | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/linux-riscv64-gcc.yml)ncnn
| - | - | - |
-| NVIDIA GPU | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime
[](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)TensorRT
LibTorch
[](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-pplnn.yml)pplnn
| [](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)ONNXRuntime
[](https://github.com/open-mmlab/mmdeploy/actions/workflows/build.yml)TensorRT
| - | - |
-| NVIDIA Jetson | TensorRT
| - | - | - |
-| Huawei ascend310 | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-ascend.yml)CANN
| - | - | - |
-| Rockchip | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)RKNN
| - | - | - |
-| Apple M1 | - | - | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-coreml.yml)CoreML
| - |
-| Adreno GPU | - | - | - | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml)SNPE
[](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-rknn.yml)ncnn
|
-| Hexagon DSP | - | - | - | [](https://github.com/open-mmlab/mmdeploy/actions/workflows/backend-snpe.yml)SNPE
|
+