diff --git a/.appveyor.yml b/.appveyor.yml.back similarity index 100% rename from .appveyor.yml rename to .appveyor.yml.back diff --git a/.travis.yml b/.travis.yml.back similarity index 97% rename from .travis.yml rename to .travis.yml.back index 9fbe9ec5..51a07588 100644 --- a/.travis.yml +++ b/.travis.yml.back @@ -135,6 +135,6 @@ after_success: - pip install wheelhouse-uploader twine - python -m twine check ${TRAVIS_BUILD_DIR}/wheelhouse/* - python -m wheelhouse_uploader upload --local-folder - ${TRAVIS_BUILD_DIR}/wheelhouse/ - $UPLOAD_ARGS - $CONTAINER + ${TRAVIS_BUILD_DIR}/wheelhouse/ + $UPLOAD_ARGS + $CONTAINER diff --git a/README.rst b/README.rst index cd04d69c..3fce86c1 100644 --- a/README.rst +++ b/README.rst @@ -1,7 +1,4 @@ -.. image:: https://travis-ci.org/MacPython/scikit-learn-wheels.svg?branch=master - :target: https://travis-ci.org/MacPython/scikit-learn-wheels -.. image:: https://ci.appveyor.com/api/projects/status/0vgnsltgf2ghhbr2/branch/master?svg=true - :target: https://ci.appveyor.com/project/sklearn-wheels/scikit-learn-wheels +TODO: add Azure Pipelines badge here ########################## Scikit-learn wheel builder @@ -9,15 +6,17 @@ Scikit-learn wheel builder Repository to build scikit-learn wheels. -Edit `appveyor.yml` and `.travis.yml` to change the `BUILD_COMMIT` environment -variable to set the name of the git tag to build, commit and push (to master). +Edit `azure/windows.yml` and `azure/posix.yml` to change the `BUILD_COMMIT` +environment variable to set the name of the git tag to build, commit and push +(to master). -Travis and appveyor should automatically build and test that version on -Windows, Linux and OSX for various versions of Python (both 32 bit and 64 bit). +Azure Pipelines should automatically build and test that version on Windows, +Linux and OSX for various versions of Python (both 32 bit and 64 bit). If the tests pass, the resulting wheels should show up on: - http://wheels.scipy.org + - https://anaconda.org/scikit-learn-wheels-staging for release wheels staging; + - https://anaconda.org/scipy-wheels-nightly for nightly builds. The following tool can be useful to download all the wheels for a specific release: @@ -25,4 +24,4 @@ release: https://github.com/ogrisel/wheelhouse-uploader and then use `twine` to publish all the wheels along with the locally built -source tarball of the release all at once to PyPI. +source tarball of the release all at once to PyPI. diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 00000000..f113b606 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,88 @@ +schedules: +- cron: "27 3 */1 * *" + # 3:27am UTC everyday + displayName: Nighthly build + branches: + include: + - master + always: true + +jobs: + - template: azure/windows.yml + parameters: + name: windows + vmImage: vs2017-win2016 + matrix: + py_3.6_32: + PYTHON_VERSION: "3.6.x" + PYTHON_ARCH: "x86" + NP_BUILD_DEP: "1.12.1" + py_3.6_64: + PYTHON_VERSION: "3.6.x" + NP_BUILD_DEP: "1.12.1" + py_3.7_32: + PYTHON_VERSION: "3.7.x" + PYTHON_ARCH: "x86" + NP_BUILD_DEP: "1.14.5" + NIGHTLY_BUILD: "true" + py_3.7_64: + PYTHON_VERSION: "3.7.x" + NP_BUILD_DEP: "1.14.5" + NIGHTLY_BUILD: "true" + py_3.8_32: + PYTHON_VERSION: "3.8.x" + PYTHON_ARCH: "x86" + NP_BUILD_DEP: "1.17.3" + SCIPY_BUILD_DEP: "1.4.1" + NIGHTLY_BUILD: "true" + py_3.8_64: + PYTHON_VERSION: "3.8.x" + NP_BUILD_DEP: "1.17.3" + SCIPY_BUILD_DEP: "1.4.1" + NIGHTLY_BUILD: "true" + + - template: azure/posix.yml + parameters: + name: linux + vmImage: ubuntu-16.04 + matrix: + py_3.6_32: + MB_PYTHON_VERSION: "3.6" + PLAT: "i686" + py_3.6_64: + MB_PYTHON_VERSION: "3.6" + py_3.7_32: + MB_PYTHON_VERSION: "3.7" + PLAT: "i686" + NP_BUILD_DEP: "numpy==1.14.5" + NIGHTLY_BUILD: "true" + py_3.7_64: + MB_PYTHON_VERSION: "3.7" + NP_BUILD_DEP: "numpy==1.14.5" + NIGHTLY_BUILD: "true" + py_3.8_32: + MB_PYTHON_VERSION: "3.8" + PLAT: "i686" + NP_BUILD_DEP: "numpy==1.17.3" + NIGHTLY_BUILD: "true" + py_3.8_64: + MB_PYTHON_VERSION: "3.8" + NP_BUILD_DEP: "numpy==1.17.3" + NIGHTLY_BUILD: "true" + + - template: azure/posix.yml + parameters: + name: macOS + vmImage: xcode9-macos10.13 + matrix: + py_3.6_64: + MB_PYTHON_VERSION: "3.6" + NP_BUILD_DEP: "numpy==1.13.3" + py_3.7_64: + MB_PYTHON_VERSION: "3.7" + NP_BUILD_DEP: "numpy==1.14.5" + NIGHTLY_BUILD: "true" + py_3.8_64: + MB_PYTHON_VERSION: "3.8" + NP_BUILD_DEP: "numpy==1.17.3" + NIGHTLY_BUILD: "true" diff --git a/azure/posix.yml b/azure/posix.yml new file mode 100644 index 00000000..6e28200c --- /dev/null +++ b/azure/posix.yml @@ -0,0 +1,141 @@ +parameters: + name: "" + vmImage: "" + matrix: [] + +jobs: + - job: ${{ parameters.name }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + REPO_DIR: "scikit-learn" + BUILD_COMMIT: "0.22.1" + PLAT: "x86_64" + NP_BUILD_DEP: "numpy==1.11.0" + CYTHON_BUILD_DEP: "cython==0.29.14" + SCIPY_BUILD_DEP: "scipy" + JOBLIB_BUILD_DEP: "joblib==0.11" + NIGHTLY_BUILD_COMMIT: "master" + NIGHTLY_BUILD: "false" + TEST_DEPENDS: "pytest" + JUNITXML: "test-data.xml" + TEST_DIR: "tmp_for_test" + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + + steps: + - checkout: self + submodules: true + + - task: UsePythonVersion@0 + inputs: + versionSpec: $(MB_PYTHON_VERSION) + displayName: Set python version + + - bash: | + set -e + + SKIP_BUILD="false" + if [ "$BUILD_REASON" == "Schedule" ]; then + BUILD_COMMIT=$NIGHTLY_BUILD_COMMIT + if [ "$NIGHTLY_BUILD" != "true" ]; then + SKIP_BUILD="true" + fi + fi + echo "Building scikit-learn@$BUILD_COMMIT" + echo "##vso[task.setvariable variable=BUILD_COMMIT]$BUILD_COMMIT" + echo "##vso[task.setvariable variable=SKIP_BUILD]$SKIP_BUILD" + + # Platform variables used in multibuild scripts + if [ `uname` == 'Darwin' ]; then + echo "##vso[task.setvariable variable=TRAVIS_OS_NAME]osx" + echo "##vso[task.setvariable variable=MACOSX_DEPLOYMENT_TARGET]10.9" + else + echo "##vso[task.setvariable variable=TRAVIS_OS_NAME]linux" + fi + + # Store original Python path to be able to create test_venv pointing + # to same Python version. + PYTHON_EXE=`which python` + echo "##vso[task.setvariable variable=PYTHON_EXE]$PYTHON_EXE" + displayName: Define build env variables + + - bash: | + set -e + pip install virtualenv + BUILD_DEPENDS="$NP_BUILD_DEP $CYTHON_BUILD_DEP $SCIPY_BUILD_DEP" + + source multibuild/common_utils.sh + source multibuild/travis_steps.sh + source extra_functions.sh + + # Setup build dependencies + before_install + + # OpenMP is not present on macOS by default + setup_compiler + clean_code $REPO_DIR $BUILD_COMMIT + build_wheel $REPO_DIR $PLAT + teardown_compiler + displayName: Build wheel + condition: eq(variables['SKIP_BUILD'], 'false') + + - bash: | + set -xe + source multibuild/common_utils.sh + source multibuild/travis_steps.sh + source extra_functions.sh + setup_test_venv + install_run $PLAT + teardown_test_venv + displayName: Install wheel and test + condition: eq(variables['SKIP_BUILD'], 'false') + + - task: PublishTestResults@2 + inputs: + testResultsFiles: '$(TEST_DIR)/$(JUNITXML)' + testRunTitle: ${{ format('{0}-$(Agent.JobName)', parameters.name) }} + displayName: 'Publish Test Results' + condition: eq(variables['SKIP_BUILD'], 'false') + + - bash: | + echo "##vso[task.prependpath]$CONDA/bin" + sudo chown -R $USER $CONDA + displayName: Add conda to PATH + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + + - bash: conda install -q -y anaconda-client + displayName: Install anaconda-client + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + + - bash: | + set -e + if [ "$BUILD_REASON" == "Schedule" ]; then + ANACONDA_ORG="scipy-wheels-nightly" + TOKEN="$SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN" + else + ANACONDA_ORG="scikit-learn-wheels-staging" + TOKEN="$SCIKIT_LEARN_STAGING_UPLOAD_TOKEN" + fi + if [ "$TOKEN" == "" ]; then + echo "##[warning] Could not find anaconda.org upload token in secret variables" + fi + echo "##vso[task.setvariable variable=TOKEN]$TOKEN" + echo "##vso[task.setvariable variable=ANACONDA_ORG]$ANACONDA_ORG" + displayName: Retrieve secret upload token + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + env: + # Secret variables need to mapped to env variables explicitly: + SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN: $(SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN) + SCIKIT_LEARN_STAGING_UPLOAD_TOKEN: $(SCIKIT_LEARN_STAGING_UPLOAD_TOKEN) + + - bash: | + set -e + # The --force option forces a replacement if the remote file already + # exists. + ls wheelhouse/*.whl + anaconda -t $TOKEN upload --force -u $ANACONDA_ORG wheelhouse/*.whl + echo "PyPI-style index: https://pypi.anaconda.org/$ANACONDA_ORG/simple" + displayName: Upload to anaconda.org (only if secret token is retrieved) + condition: ne(variables['TOKEN'], '') diff --git a/azure/windows.yml b/azure/windows.yml new file mode 100644 index 00000000..50b85e78 --- /dev/null +++ b/azure/windows.yml @@ -0,0 +1,148 @@ +parameters: + name: "" + vmImage: "" + matrix: [] + +jobs: + - job: ${{ parameters.name }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + BUILD_COMMIT: "0.22.1" + SKLEARN_SKIP_NETWORK_TESTS: "1" + NP_BUILD_DEP: "1.11.0" + CYTHON_BUILD_DEP: "0.29.14" + SCIPY_BUILD_DEP: "1.1.0" + NIGHTLY_BUILD_COMMIT: "master" + NIGHTLY_BUILD: "false" + PYTHON_ARCH: "x64" + TEST_DEPENDS: "pytest" + JUNITXML: "test-data.xml" + TEST_DIR: '$(Agent.WorkFolder)/tmp_for_test' + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + steps: + - checkout: self + submodules: true + + - task: UsePythonVersion@0 + inputs: + versionSpec: $(PYTHON_VERSION) + architecture: $(PYTHON_ARCH) + displayName: Set python version + + - bash: | + set -e + + echo PYTHON $PYTHON_VERSION $PYTHON_ARCH + echo Build Reason: $BUILD_REASON + python --version + python -c "import struct; print(struct.calcsize('P') * 8)" + pip --version + displayName: Check that we have the expected version and architecture for Python + + - bash: | + set -e + + SKIP_BUILD="false" + if [ "$BUILD_REASON" == "Schedule" ]; then + BUILD_COMMIT=$NIGHTLY_BUILD_COMMIT + if [ "$NIGHTLY_BUILD" != "true" ]; then + SKIP_BUILD="true" + fi + fi + echo "Building scikit-learn@$BUILD_COMMIT" + echo "##vso[task.setvariable variable=BUILD_COMMIT]$BUILD_COMMIT" + echo "##vso[task.setvariable variable=SKIP_BUILD]$SKIP_BUILD" + + # Store original Python path to be able to create test_venv pointing + # to same Python version. + PYTHON_EXE=`which python` + echo "##vso[task.setvariable variable=PYTHON_EXE]$PYTHON_EXE" + displayName: Define build env variables + + - bash: | + set -e + cd scikit-learn + git checkout $BUILD_COMMIT + git clean -fxd + git reset --hard + displayName: Checkout scikit-learn commit + condition: eq(variables['SKIP_BUILD'], 'false') + + - bash: | + set -e + + pip install --timeout=60 numpy==$NP_BUILD_DEP + pip install --timeout=60 pytest wheel joblib scipy==$SCIPY_BUILD_DEP Cython==$CYTHON_BUILD_DEP + pip install twine + + pushd scikit-learn + python setup.py build + python ../appveyor/vendor_vcomp140.py + python setup.py bdist_wheel + ls dist + twine check dist/* + popd + displayName: Build wheel + condition: eq(variables['SKIP_BUILD'], 'false') + + - bash: | + set -e + source extra_functions.sh + setup_test_venv + pip install scikit-learn/dist/scikit_learn-*.whl + mkdir $TEST_DIR + pushd $TEST_DIR + pytest -rs -l --junitxml=$JUNITXML --pyargs sklearn + popd + teardown_test_venv + displayName: Install wheel and test + condition: eq(variables['SKIP_BUILD'], 'false') + + - task: PublishTestResults@2 + inputs: + testResultsFiles: '$(TEST_DIR)/$(JUNITXML)' + testRunTitle: ${{ format('{0}-$(Agent.JobName)', parameters.name) }} + displayName: 'Publish Test Results' + condition: eq(variables['SKIP_BUILD'], 'false') + + - bash: echo "##vso[task.prependpath]$CONDA/Scripts" + displayName: Add conda to PATH + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + + - bash: conda install -q -y anaconda-client + displayName: Install anaconda-client + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + + - bash: | + set -e + if [ "$BUILD_REASON" == "Schedule" ]; then + ANACONDA_ORG="scipy-wheels-nightly" + TOKEN="$SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN" + else + ANACONDA_ORG="scikit-learn-wheels-staging" + TOKEN="$SCIKIT_LEARN_STAGING_UPLOAD_TOKEN" + fi + if [ "$TOKEN" == "" ]; then + echo "##[warning] Could not find anaconda.org upload token in secret variables" + fi + echo "##vso[task.setvariable variable=TOKEN]$TOKEN" + echo "##vso[task.setvariable variable=ANACONDA_ORG]$ANACONDA_ORG" + displayName: Retrieve secret upload token + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + env: + # Secret variables need to mapped to env variables explicitly: + SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN: $(SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN) + SCIKIT_LEARN_STAGING_UPLOAD_TOKEN: $(SCIKIT_LEARN_STAGING_UPLOAD_TOKEN) + + - bash: | + set -e + # The --force option forces a replacement if the remote file already + # exists. + ls scikit-learn/dist/scikit_learn-*.whl + anaconda -t $TOKEN upload --force -u $ANACONDA_ORG scikit-learn/dist/scikit_learn-*.whl + echo "PyPI-style index: https://pypi.anaconda.org/$ANACONDA_ORG/simple" + displayName: Upload to anaconda.org (only if secret token is retrieved) + condition: ne(variables['TOKEN'], '') diff --git a/config.sh b/config.sh index ef85678d..a9d40c73 100644 --- a/config.sh +++ b/config.sh @@ -7,22 +7,10 @@ function pre_build { : } +# XXX: test-data.xml is hard-coded because the JUNITXML env variable +# is not forwarded to the docker container environment. function run_tests { # Runs tests on installed distribution from an empty directory python --version - pytest -l --pyargs sklearn -} - - function enable_openmp { - # Install OpenMP - brew install libomp - export CPPFLAGS="$CPPFLAGS -Xpreprocessor -fopenmp" - export CFLAGS="$CFLAGS -I/usr/local/opt/libomp/include" - export CXXFLAGS="$CXXFLAGS -I/usr/local/opt/libomp/include" - export LDFLAGS="$LDFLAGS -L/usr/local/opt/libomp/lib -lomp" - export DYLD_LIBRARY_PATH=/usr/local/opt/libomp/lib -} - -function disable_system_openmp { - brew uninstall libomp + pytest -l --junitxml=test-data.xml --pyargs sklearn } diff --git a/extra_functions.sh b/extra_functions.sh new file mode 100644 index 00000000..92d20fbf --- /dev/null +++ b/extra_functions.sh @@ -0,0 +1,48 @@ + + function setup_compiler { + # Install OpenMP support on macOS + if [ $(uname) == "Darwin" ]; then + brew install libomp + export CPPFLAGS="$CPPFLAGS -Xpreprocessor -fopenmp" + export CFLAGS="$CFLAGS -I/usr/local/opt/libomp/include" + export CXXFLAGS="$CXXFLAGS -I/usr/local/opt/libomp/include" + export LDFLAGS="$LDFLAGS -L/usr/local/opt/libomp/lib -lomp" + export DYLD_LIBRARY_PATH=/usr/local/opt/libomp/lib + fi +} + +function teardown_compiler { + if [ $(uname) == "Darwin" ]; then + brew uninstall libomp + fi +} + +function setup_test_venv { + # Create a new empty venv dedicated to testing for non-Linux platforms. On + # Linux the tests are run in a Docker container. + if [ $(uname) != "Linux" ]; then + deactivate || echo "" + $PYTHON_EXE -m venv test_venv + if [ $(uname) == "Darwin" ]; then + source test_venv/bin/activate + else + source test_venv/Scripts/activate + fi + # Note: the idiom "python -m pip install ..." is necessary to upgrade + # pip itself on Windows. Otherwise one would get a permission error on + # pip.exe. + python -m pip install --upgrade pip wheel + if [ "$TEST_DEPENDS" != "" ]; then + pip install $TEST_DEPENDS + fi + fi +} + +function teardown_test_venv { + if [ $(uname) != "Linux" ]; then + deactivate || echo "" + if [ $(uname) == "Darwin" ]; then + source venv/bin/activate + fi + fi +} diff --git a/multibuild b/multibuild index 68a4af04..6b0ddb52 160000 --- a/multibuild +++ b/multibuild @@ -1 +1 @@ -Subproject commit 68a4af043e2adb0d9353d4a0e1f3d871203237aa +Subproject commit 6b0ddb5281f59d976c8026c082c9d73faf274790