Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test lower protoc version in Linux CI #4365

Merged
merged 28 commits into from
Jul 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
3b8add7
use apt-get's libprotobuf-dev
jcwchen Jul 18, 2022
19afa2a
change order
jcwchen Jul 18, 2022
9af3cd3
Merge branch 'main' into jcw/mini-protoc
jcwchen Jul 18, 2022
7dfb18c
use Git instead of opeartor[]
jcwchen Jul 18, 2022
184a266
use index
jcwchen Jul 18, 2022
0141e7d
-DONNX_USE_PROTOBUF_SHARED_LIBS=ON
jcwchen Jul 18, 2022
3208e82
shape_inference test as well
jcwchen Jul 18, 2022
7d596b0
test onnx_lite in GitHub Action instead
jcwchen Jul 18, 2022
b4e0acc
remove duplicate pip install
jcwchen Jul 18, 2022
11b74b8
fix warnings in script
jcwchen Jul 18, 2022
95383bd
fix the rest of warnings
jcwchen Jul 18, 2022
5f1a443
update docment
jcwchen Jul 18, 2022
fc607df
use $4 instead
jcwchen Jul 18, 2022
fc58a56
use sh instead of shell
jcwchen Jul 18, 2022
31a49ea
use $1
jcwchen Jul 18, 2022
0e20f98
failed_wheels
jcwchen Jul 19, 2022
d9b1b02
add doc
jcwchen Jul 19, 2022
f53c103
missing $1
jcwchen Jul 19, 2022
360801e
use $4
jcwchen Jul 19, 2022
9e9df47
use original
jcwchen Jul 19, 2022
9de7c9a
Merge branch 'main' into jcw/mini-protoc
jcwchen Jul 21, 2022
30197fc
Merge branch 'jcw/mini-protoc' of https://github.com/jcwchen/onnx int…
jcwchen Jul 21, 2022
51918fc
onnx_lite does not work with old protoc version
jcwchen Jul 21, 2022
e999882
Merge branch 'main' into jcw/mini-protoc
jcwchen Jul 21, 2022
74c1efa
make Mac use ONNX_USE_LITE_PROTO to sync options for all envs
jcwchen Jul 22, 2022
58d920e
Merge branch 'jcw/mini-protoc' of https://github.com/jcwchen/onnx int…
jcwchen Jul 22, 2022
7fab94f
update CI doc to reflect recent changes
jcwchen Jul 22, 2022
3ebc4e1
Merge branch 'main' into jcw/mini-protoc
jcwchen Jul 22, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
21 changes: 4 additions & 17 deletions .azure-pipelines/Linux-CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,27 +11,22 @@ jobs:
python.version: '3.9'
onnx_ml: 1
onnx_debug: 1
onnx_lite: 0
py38:
python.version: '3.8'
onnx_ml: 0
onnx_debug: 0
onnx_lite: 1
py38-ml:
python.version: '3.8'
onnx_ml: 1
onnx_debug: 0
onnx_lite: 1
py37:
python.version: '3.7'
onnx_ml: 0
onnx_debug: 0
onnx_lite: 1
py37-ml:
python.version: '3.7'
onnx_ml: 1
onnx_debug: 0
onnx_lite: 1
maxParallel: 6

steps:
Expand All @@ -45,12 +40,12 @@ jobs:
python -m virtualenv py$(python.version)
source py$(python.version)/bin/activate

source workflow_scripts/protobuf/build_protobuf_unix.sh $(nproc)

python -m pip install -q --upgrade pip setuptools wheel
sudo apt-get install libprotobuf-dev protobuf-compiler
python -m pip install -q --upgrade pip
python -m pip install -q -r requirements-release.txt

sudo apt-get install -qq -o=Dpkg::Use-Pty=0 -y --no-install-recommends dos2unix

git submodule update --init --recursive
export ONNX_BUILD_TESTS=1
if [ '$(onnx_debug)' == '1' ]; then
Expand All @@ -59,10 +54,7 @@ jobs:
if [ '$(onnx_ml)' == '1' ]; then
export ONNX_ML=1
fi
export CMAKE_ARGS="-DONNXIFI_DUMMY_BACKEND=ON -DONNX_WERROR=ON"
if [ '$(onnx_lite)' == '1' ]; then
export CMAKE_ARGS="${CMAKE_ARGS} -DONNX_USE_LITE_PROTO=ON"
fi
export CMAKE_ARGS="-DONNXIFI_DUMMY_BACKEND=ON -DONNX_WERROR=ON -DONNX_USE_PROTOBUF_SHARED_LIBS=ON"
export ONNX_NAMESPACE=ONNX_NAMESPACE_FOO_BAR_FOR_CI
python setup.py --quiet install
displayName: 'Install ONNX and dependencies'
Expand Down Expand Up @@ -122,11 +114,6 @@ jobs:
exit 1
fi

# Mypy only works with our generated _pb.py files when we install in develop mode, so let's do that
python -m pip uninstall -y onnx
rm -rf .setuptools-cmake-build
python -m pip install -q .

displayName: 'Run ONNX tests'

- script: |
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/manylinux/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@ export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib

# Compile wheels
# Need to be updated if there is a new Python Version
if [ `uname -m` == 'aarch64' ]; then
if [ "$(uname -m)" == "aarch64" ]; then
PIP_INSTALL_COMMAND="$PY_VERSION -m pip install --no-cache-dir -q"
PYTHON_COMMAND="$PY_VERSION"
else
declare -A python_map=( ["3.7"]="cp37-cp37m" ["3.8"]="cp38-cp38" ["3.9"]="cp39-cp39" ["3.10"]="cp310-cp310")
declare -A python_include=( ["3.7"]="3.7m" ["3.8"]="3.8" ["3.9"]="3.9" ["3.10"]="3.10")
PY_VER=${python_map[$PY_VERSION]}
PIP_INSTALL_COMMAND="/opt/python/${PY_VER}/bin/pip install --no-cache-dir -q"
PYTHON_COMMAND="/opt/python/"${PY_VER}"/bin/python"
PYTHON_COMMAND="/opt/python/${PY_VER}/bin/python"
fi

# Update pip
Expand All @@ -28,11 +28,11 @@ $PIP_INSTALL_COMMAND cmake

# Build protobuf from source
yum install -y wget
source workflow_scripts/protobuf/build_protobuf_unix.sh $(nproc) $(pwd)/protobuf/protobuf_install
source workflow_scripts/protobuf/build_protobuf_unix.sh "$(nproc)" "$(pwd)"/protobuf/protobuf_install

# set ONNX build environments
export ONNX_ML=1
export CMAKE_ARGS="-DPYTHON_INCLUDE_DIR=/opt/python/${PY_VER}/include/python${python_include[$PY_VERSION]}"
export CMAKE_ARGS="-DPYTHON_INCLUDE_DIR=/opt/python/${PY_VER}/include/python${python_include[$PY_VERSION]} -DONNX_USE_LITE_PROTO=ON"

# Install Python dependency
$PIP_INSTALL_COMMAND -r requirements-release.txt || { echo "Installing Python requirements failed."; exit 1; }
Expand All @@ -48,7 +48,7 @@ fi
# find -exec does not preserve failed exit codes, so use an output file for failures
failed_wheels=$PWD/failed-wheels
rm -f "$failed_wheels"
find . -type f -iname "*-linux*.whl" -exec sh -c "auditwheel repair '{}' -w \$(dirname '{}') --plat '${PLAT}' || { echo 'Repairing wheels failed.'; auditwheel show '{}' >> "$failed_wheels"; }" \;
find . -type f -iname "*-linux*.whl" -exec sh -c "auditwheel repair '{}' -w \$(dirname '{}') --plat '${PLAT}' || { echo 'Repairing wheels failed.'; auditwheel show '{}' >> '$failed_wheels'; }" \;

if [[ -f "$failed_wheels" ]]; then
echo "Repairing wheels failed:"
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/release_mac.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ jobs:
# Install protobuf from source
export NUM_CORES=`sysctl -n hw.logicalcpu`
source workflow_scripts/protobuf/build_protobuf_unix.sh $NUM_CORES $(pwd)/protobuf/protobuf_install

# Currently GitHub Action agent is using 10.14 Python, use -p to force change the final MACOSX_DEPLOYMENT_TARGET
export CMAKE_ARGS="-DONNX_USE_LITE_PROTO=ON"
# Currently GitHub Action agent is using MacOS 10.15, use -p to force change the final MACOSX_DEPLOYMENT_TARGET
# Change -p if MACOSX_DEPLOYMENT_TARGET is different
if [ '${{ github.event_name }}' == 'schedule' ]; then
python setup.py bdist_wheel -p macosx_10_12_x86_64 --weekly_build
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ pip install -e .

### Linux

First, you need to install protobuf. The minimum Protobuf compiler (protoc) version required by ONNX is 3.0.0.
First, you need to install protobuf. The minimum Protobuf compiler (protoc) version required by ONNX is 3.0.0. Please note that old protoc versions might not work with `CMAKE_ARGS=-DONNX_USE_LITE_PROTO=ON`.

Ubuntu 18.04 (and newer) users may choose to install protobuf via
```bash
Expand Down Expand Up @@ -175,8 +175,8 @@ Then you can build ONNX as:
git clone https://github.com/onnx/onnx.git
cd onnx
git submodule update --init --recursive
# prefer lite proto
set CMAKE_ARGS=-DONNX_USE_LITE_PROTO=ON
# Optional: prefer lite proto
export CMAKE_ARGS=-DONNX_USE_LITE_PROTO=ON
pip install -e .
```

Expand All @@ -201,7 +201,7 @@ Then you can build ONNX as:
```
git clone --recursive https://github.com/onnx/onnx.git
cd onnx
# prefer lite proto
# Optional: prefer lite proto
set CMAKE_ARGS=-DONNX_USE_LITE_PROTO=ON
pip install -e .
```
Expand Down
14 changes: 7 additions & 7 deletions docs/CIPipelines.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@

|   | When it runs | Config | Test |
-- | -- | -- | -- |
[Linux-CI](/.azure-pipelines/Linux-CI.yml) | Every PR | <ul><li>Ubuntu-18.04</li><li>DEBUG=1 or 0</li><li>ONNX_USE_LITE_PROTO=ON or OFF</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_BUILD_TESTS=1</li><li>ONNX_WERROR=ON</li><li>ONNX_ML=1 or 0</li></ul>| <ul><li>ONNX C++ tests</li><li>Style check (flake8, mypy, and clang-format)</li><li>Test doc generation</li><li>Test proto generation</li><li>Verify backend node data</li><li>Verify node test generation</li></ul> |
[Windows-CI](/.azure-pipelines/Windows-CI.yml) | Every PR  | <ul><li>vs2017-win2016</li><li>ONNX_USE_LITE_PROTO=ON</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=ON</li><li>ONNX_BUILD_TESTS=1</li><li>ONNX_WERROR=ON</li><li>ONNX_ML=1 or 0</li></ul>| <ul><li>Test building ONNX in conda environment</li><li>Test doc generation</li><li>Test proto generation</li></ul> |
[Mac-CI](/.azure-pipelines/MacOS-CI.yml) | Every PR  | <ul><li>macOS-10.14</li><li>DEBUG=1</li><li>ONNX_USE_LITE_PROTO=ON or OFF</li><li>ONNX_ML=1 or 0</li><li>ONNX_BUILD_TESTS=1</li><li>ONNX_WERROR=ON</li></ul>| <ul><li>ONNX C++ tests</li><li>Test doc generation</li><li>Test proto generation</li></ul>|
[Linux-CI](/.azure-pipelines/Linux-CI.yml) | Every PR | <ul><li>Ubuntu-18.04</li><li>DEBUG=1 or 0</li><li>ONNX_USE_LITE_PROTO=OFF</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_BUILD_TESTS=1</li><li>ONNX_WERROR=ON</li><li>ONNX_ML=1 or 0</li></ul>| <ul><li>ONNX C++ tests</li><li>Style check (flake8, mypy, and clang-format)</li><li>Test doc generation</li><li>Test proto generation</li><li>Verify node test generation</li></ul> |
[Windows-CI](/.azure-pipelines/Windows-CI.yml) | Every PR  | <ul><li>windows-2019</li><li>ONNX_USE_LITE_PROTO=ON</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=ON</li><li>ONNX_BUILD_TESTS=1</li><li>ONNX_WERROR=ON</li><li>ONNX_ML=1 or 0</li></ul>| <ul><li>Test building ONNX in conda environment</li><li>Test doc generation</li><li>Test proto generation</li><li>Verify node test generation</li></ul> |
[Mac-CI](/.azure-pipelines/MacOS-CI.yml) | Every PR  | <ul><li>macOS-10.15</li><li>DEBUG=1</li><li>ONNX_USE_LITE_PROTO=ON or OFF</li><li>ONNX_ML=1 or 0</li><li>ONNX_BUILD_TESTS=1</li><li>ONNX_WERROR=ON</li></ul>| <ul><li>ONNX C++ tests</li><li>Test doc generation</li><li>Test proto generation</li><li>Verify node test generation</li></ul>|
[Windows_No_Exception CI](/.github/workflows/win_no_exception_ci.yml) | Every PR  | <ul><li>vs2019-winlatest</li><li>ONNX_DISABLE_EXCEPTIONS=ON</li><li>ONNX_USE_LITE_PROTO=ON</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li><li>ONNX_USE_MSVC_STATIC_RUNTIME=ON</li><li>ONNX_DISABLE_STATIC_REGISTRATION=ON or OFF</li></ul>| <ul><li>Only ONNX C++ tests</li><li>Test selective schema loading</li></ul> |
[WindowsRelease](/.github/workflows/release_win.yml) | <ul><li>Main branch</li><li>Release branch</li><li>Weekly(1)</li></ul> | <ul><li>Latest Windows</li><li>x86 and x64</li><li>ONNX_USE_LITE_PROTO=ON</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li><li>ONNX_USE_MSVC_STATIC_RUNTIME=OFF</li></ul>| <ul><li> Release Windows wheel</li><li>Release onnx-weekly package</li><li>Verify backend node data</li><li>Verify node test generation</li><li>Verify with different dependency versions - latest and min supported numpy version, latest and min supported protobuf version(2)</li><li>Verify ONNX with the latest [ONNX Runtime PyPI package](https://pypi.org/project/onnxruntime/)(3).</li></ul> |
[LinuxRelease_aarch64](/.github/workflows/release_linux_aarch64.yml) | <ul><li>Main branch</li><li>Release branch</li><li>Weekly</li></ul>  | <ul><li>Latest manylinux2014_aarch64</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li></ul>| <ul><li> Release Linux aarch64 wheel</li><li>Release onnx-weekly package</li><li>Verify backend node data</li><li>Verify node test generation</li><li>Verify with different dependency versions - latest numpy version, latest and min supported protobuf version</li><li>Verify ONNX with the latest ONNX Runtime PyPI package.</li></ul> |
[LinuxRelease_x86_64](/.github/workflows/release_linux_x86_64.yml) | <ul><li>Main branch</li><li>Release branch</li><li>Weekly</li></ul> | <ul><li>Latest LinuxRelease_x86_64</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li></ul>| <ul><li> Release Linux x86_64 wheel</li><li>Release onnx-weekly package</li><li>Test TEST_HUB=1(4)</li><li>Verify backend node data</li><li>Verify node test generation</li></li><li>Verify with different dependency versions - latest numpy version, latest and min supported protobuf version</li><li>Verify ONNX with the latest ONNX Runtime PyPI package.</li></ul> |
[MacRelease](/.github/workflows/release_win.yml) | <ul><li>Main branch</li><li>Release branch</li><li>Weekly</li></ul> | <ul><li>macos-10.15</li><li> MACOSX_DEPLOYMENT_TARGET=10.12(5) </li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li></ul>| <ul><li>Release Mac wheel</li><li>Release onnx-weekly package</li><li>Verify backend node data</li><li>Verify node test generation</li><li>Verify with different dependency versions - latest numpy version, latest and min supported protobuf version</li><li>Verify ONNX with the latest ONNX Runtime PyPI package.</li><li>Test source distribution generation</li><li>Test build with source distribution</li><li>Release onnx-weekly source distribution</li></ul>
[WindowsRelease](/.github/workflows/release_win.yml) | <ul><li>Main branch</li><li>Release branch</li><li>Weekly(1)</li></ul> | <ul><li>Latest Windows</li><li>x86 and x64</li><li>ONNX_USE_LITE_PROTO=ON</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li><li>ONNX_USE_MSVC_STATIC_RUNTIME=OFF</li></ul>| <ul><li> Release Windows wheel</li><li>Release onnx-weekly package</li><li>Verify with different dependency versions - latest and min supported numpy version, latest and min supported protobuf version(2)</li><li>Verify ONNX with the latest [ONNX Runtime PyPI package](https://pypi.org/project/onnxruntime/)(3).</li></ul> |
[LinuxRelease_aarch64](/.github/workflows/release_linux_aarch64.yml) | <ul><li>Main branch</li><li>Release branch</li><li>Weekly</li></ul>  | <ul><li>Latest manylinux2014_aarch64</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li><li>ONNX_USE_LITE_PROTO=ON</li></ul>| <ul><li> Release Linux aarch64 wheel</li><li>Release onnx-weekly package</li><li>Verify with different dependency versions - latest numpy version, latest and min supported protobuf version</li><li>Verify ONNX with the latest ONNX Runtime PyPI package</li></ul> |
[LinuxRelease_x86_64](/.github/workflows/release_linux_x86_64.yml) | <ul><li>Main branch</li><li>Release branch</li><li>Weekly</li></ul> | <ul><li>Latest LinuxRelease_x86_64</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li><li>ONNX_USE_LITE_PROTO=ON</li></ul>| <ul><li> Release Linux x86_64 wheel</li><li>Release onnx-weekly package</li><li>Test TEST_HUB=1(4)</li><li>Verify with different dependency versions - latest numpy version, latest and min supported protobuf version</li><li>Verify ONNX with the latest ONNX Runtime PyPI package.</li></ul> |
[MacRelease](/.github/workflows/release_win.yml) | <ul><li>Main branch</li><li>Release branch</li><li>Weekly</li></ul> | <ul><li>macos-10.15</li><li> MACOSX_DEPLOYMENT_TARGET=10.12(5) </li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li><li>ONNX_USE_LITE_PROTO=ON</li></ul>| <ul><li>Release Mac wheel</li><li>Release onnx-weekly package</li><li>Verify with different dependency versions - latest numpy version, latest and min supported protobuf version</li><li>Verify ONNX with the latest ONNX Runtime PyPI package.</li><li>Test source distribution generation</li><li>Test build with source distribution</li><li>Release onnx-weekly source distribution</li></ul>
[Weekly CI with latest onnx.checker](/.github/workflows/weekly_mac_ci.yml) | weekly(6) |<ul><li>macos-latest</li><li>MACOSX_DEPLOYMENT_TARGET=10.15</li><li>ONNX_USE_PROTOBUF_SHARED_LIBS=OFF</li><li>ONNX_ML=1</li></ul>| <ul><li>Test latest ONNX checker</li><li>Test latest ONNX shape inference</li><li>With all models from [onnx/models](https://github.com/onnx/models)(7)</li></ul> |

* (1) When the release CIs will run:
Expand Down
3 changes: 3 additions & 0 deletions docs/CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,10 @@ from onnx root dir should work.
[Operator docs in Operators.md](Operators.md) are automatically generated based on C++ operator definitions and backend Python snippets. To refresh these docs, run the following commands from the repo root and commit the results. Note `ONNX_ML=0` updates Operators.md whereas `ONNX_ML=1` updates Operators-ml.md:

```
# Windows
set ONNX_ML=0
# UNIX
# export ONNX_ML=0
pip install setup.py
python onnx/defs/gen_doc.py
```
Expand Down
2 changes: 1 addition & 1 deletion onnx/test/cpp/data_propagation_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ TensorShapeProto RunDataPropagation(const char* graphCode, int domainVersion = 1

// Assuming the graph being tested only has 1 output.
// If this ever changes then fixes are required here.
const auto inputShapeDataIter = generatedShapeDataByName.find(graph.output()[0].name());
const auto inputShapeDataIter = generatedShapeDataByName.find(graph.output(0).name());
EXPECT_TRUE(inputShapeDataIter != generatedShapeDataByName.cend());

inferredShape.CopyFrom(inputShapeDataIter->second);
Expand Down
2 changes: 1 addition & 1 deletion onnx/test/cpp/shape_inference_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -513,7 +513,7 @@ void RunReshapeShapeInfTest(const char* modelStr, TensorShapeProto& expectedShap
ShapeInferenceOptions options{true, 1, true};
ONNX_NAMESPACE::shape_inference::InferShapes(model, ONNX_NAMESPACE::OpSchemaRegistry::Instance(), options);

const auto inferredShape = model.graph().output()[0].type().tensor_type().shape();
const auto inferredShape = model.graph().output(0).type().tensor_type().shape();
EXPECT_TRUE(inferredShape.dim_size() == expectedShape.dim_size());

for (int i = 0; i < inferredShape.dim_size(); i++) {
Expand Down