diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index f716995416..15a3856159 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -7,6 +7,6 @@ - [ ] Adds description and name entries in the appropriate "what's new" file in [`docs/sphinx/source/whatsnew`](https://github.com/pvlib/pvlib-python/tree/master/docs/sphinx/source/whatsnew) for all changes. Includes link to the GitHub Issue with `` :issue:`num` `` or this Pull Request with `` :pull:`num` ``. Includes contributor name and/or GitHub username (link with `` :ghuser:`user` ``). - [ ] New code is fully documented. Includes [numpydoc](https://numpydoc.readthedocs.io/en/latest/format.html) compliant docstrings, examples, and comments where necessary. - [ ] Pull request is nearly complete and ready for detailed review. - - [ ] Maintainer: Appropriate GitHub Labels and Milestone are assigned to the Pull Request and linked Issue. + - [ ] Maintainer: Appropriate GitHub Labels (including `remote-data`) and Milestone are assigned to the Pull Request and linked Issue. diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e404ca7d5d..8e342564a6 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,11 +1,10 @@ name: Publish distributions to PyPI -# if this workflow is modified to be a generic CI workflow then -# add an if statement to the publish step so it only runs on tags. on: + pull_request: push: - tags: - - "v*" + branches: + - master jobs: build-n-publish: @@ -31,7 +30,9 @@ jobs: - name: Build packages run: python setup.py sdist bdist_wheel + # only publish distribution to PyPI for tagged commits - name: Publish distribution to PyPI + if: startsWith(github.ref, 'refs/tags/v') uses: pypa/gh-action-pypi-publish@master with: user: __token__ diff --git a/.github/workflows/pytest-remote-data.yml b/.github/workflows/pytest-remote-data.yml new file mode 100644 index 0000000000..f9a726af41 --- /dev/null +++ b/.github/workflows/pytest-remote-data.yml @@ -0,0 +1,110 @@ +# A secondary test job that only runs the iotools tests if explicitly requested +# (for pull requests) or on a push to the master branch. +# Because the iotools tests require GitHub secrets, we need to be careful about +# malicious PRs accessing the secrets and exposing them externally. +# +# We prevent this by only running this workflow when a maintainer has looked +# over the PR's diff and verified that nothing malicious seems to be going on. +# The maintainer then adds the "remote-data" label to the PR, which will then +# trigger this workflow via the combination of the "on: ... types:" +# and "if:" sections below. The first restricts the workflow to only run when +# a label is added to the PR and the second requires one of the PR's labels +# is the "remote-data" label. Technically this is slightly different from +# triggering when the "remote-data" label is added, since it will also trigger +# when "remote-data" is added, then later some other label is added. Maybe +# there's a better way to do this. +# +# But wait, you say! Can't a malicious PR get around this by modifying +# this workflow file and removing the label requirement? I think the answer +# is "no" as long as we trigger the workflow on "pull_request_target" instead +# of the usual "pull_request". The difference is what context the workflow +# runs inside: "pull_request" runs in the context of the fork, where changes +# to the workflow definition will take immediate effect, while "pull_request_target" +# runs in the context of the main pvlib repository, where the original (non-fork) +# workflow definition is used instead. Of course by switching away from the fork's +# context to keep our original workflow definitions, we're also keeping all the +# original code, so the tests won't be run against the PR's new code. To fix this +# we explicitly check out the PR's code as the first step of the workflow. +# This allows the job to run modified pvlib & pytest code, but only ever via +# the original workflow file. +# So long as a maintainer always verifies that the PR's code is not malicious prior to +# adding the label and triggering this workflow, I think this should not present +# a security risk. +# +# Note that this workflow can be triggered again by removing and re-adding the +# "remote-data" label to the PR. +# +# Note also that "pull_request_target" is also the only way for the secrets +# to be accessible in the first place. +# +# Further reading: +# - https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ +# - https://github.community/t/can-workflow-changes-be-used-with-pull-request-target/178626/7 + +name: pytest-remote-data + +on: + pull_request_target: + types: [labeled] + push: + branches: + - master + +jobs: + test: + + strategy: + fail-fast: false # don't cancel other matrix jobs when one fails + matrix: + python-version: [3.6, 3.7, 3.8, 3.9] + suffix: [''] # the alternative to "-min" + include: + - python-version: 3.6 + suffix: -min + + runs-on: ubuntu-latest + if: (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'remote-data')) || (github.event_name == 'push') + + steps: + - uses: actions/checkout@v3 + if: github.event_name == 'pull_request_target' + # pull_request_target runs in the context of the target branch (pvlib/master), + # but what we need is the hypothetical merge commit from the PR: + with: + ref: "refs/pull/${{ github.event.number }}/merge" + + - uses: actions/checkout@v2 + if: github.event_name == 'push' + + - name: Set up conda environment + uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: test_env + environment-file: ${{ env.REQUIREMENTS }} + python-version: ${{ matrix.python-version }} + auto-activate-base: false + env: + # build requirement filename. First replacement is for the python + # version, second is to add "-min" if needed + REQUIREMENTS: ci/requirements-py${{ matrix.python-version }}${{ matrix.suffix }}.yml + + - name: List installed package versions + shell: bash -l {0} # necessary for conda env to be active + run: conda list + + - name: Run tests + shell: bash -l {0} # necessary for conda env to be active + env: + # copy GitHub Secrets into environment variables for the tests to access + NREL_API_KEY: ${{ secrets.NRELAPIKEY }} + BSRN_FTP_USERNAME: ${{ secrets.BSRN_FTP_USERNAME }} + BSRN_FTP_PASSWORD: ${{ secrets.BSRN_FTP_PASSWORD }} + run: pytest pvlib/tests/iotools pvlib/tests/test_forecast.py --cov=./ --cov-report=xml --remote-data + + - name: Upload coverage to Codecov + if: matrix.python-version == 3.6 && matrix.suffix == '' + uses: codecov/codecov-action@v2 + with: + fail_ci_if_error: true + verbose: true + flags: remote-data # flags are configured in codecov.yml diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml new file mode 100644 index 0000000000..07c0045a48 --- /dev/null +++ b/.github/workflows/pytest.yml @@ -0,0 +1,78 @@ +name: pytest + +on: + pull_request: + push: + branches: + - master + +jobs: + test: + + strategy: + fail-fast: false # don't cancel other matrix jobs when one fails + matrix: + # use macos-10.15 instead of macos-latest for py3.6 support + os: [ubuntu-latest, macos-10.15, windows-latest] + python-version: [3.6, 3.7, 3.8, 3.9] + environment-type: [conda, bare] + suffix: [''] # placeholder as an alternative to "-min" + include: + - os: ubuntu-latest + python-version: 3.6 + environment-type: conda + suffix: -min + exclude: + - os: macos-10.15 + environment-type: conda + - os: windows-latest + environment-type: bare + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v3 + + - name: Set up conda environment + if: matrix.environment-type == 'conda' + uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: test_env + environment-file: ${{ env.REQUIREMENTS }} + python-version: ${{ matrix.python-version }} + auto-activate-base: false + env: + # build requirement filename. First replacement is for the python + # version, second is to add "-min" if needed + REQUIREMENTS: ci/requirements-py${{ matrix.python-version }}${{ matrix.suffix }}.yml + + - name: List installed package versions (conda) + if: matrix.environment-type == 'conda' + shell: bash -l {0} # necessary for conda env to be active + run: conda list + + - name: Install bare Python ${{ matrix.python-version }}${{ matrix.suffix }} + if: matrix.environment-type == 'bare' + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Set up bare environment + if: matrix.environment-type == 'bare' + run: | + pip install .[test] + pip freeze + + - name: Run tests + shell: bash -l {0} # necessary for conda env to be active + run: | + # ignore iotools & forecast; those tests are run in a separate workflow + pytest pvlib --cov=./ --cov-report=xml --ignore=pvlib/tests/iotools --ignore=pvlib/tests/test_forecast.py + + - name: Upload coverage to Codecov + if: matrix.python-version == 3.6 && matrix.suffix == '' && matrix.os == 'ubuntu-latest' && matrix.environment-type == 'conda' + uses: codecov/codecov-action@v2 + with: + fail_ci_if_error: true + verbose: true + flags: core # flags are configured in codecov.yml diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index dc048dd04a..0000000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,45 +0,0 @@ -# https://docs.microsoft.com/azure/devops/pipelines/languages/python - -trigger: -- master - - -jobs: - -- template: ci/azure/posix.yml - parameters: - name: Test_bare_Linux - vmImage: ubuntu-20.04 - - -- template: ci/azure/posix.yml - parameters: - name: Test_bare_macOS - vmImage: macOS-10.15 - - -- template: ci/azure/conda_linux.yml - parameters: - name: Test_conda_linux - vmImage: ubuntu-20.04 - - -- template: ci/azure/conda_windows.yml - parameters: - name: Test_conda_windows - vmImage: windows-latest - - -- job: 'Publish' - dependsOn: 'Test_conda_linux' - pool: - vmImage: 'ubuntu-latest' - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.x' - architecture: 'x64' - - - script: python setup.py sdist - displayName: 'Build sdist' diff --git a/ci/azure/conda_linux.yml b/ci/azure/conda_linux.yml deleted file mode 100644 index 3bf8215cc4..0000000000 --- a/ci/azure/conda_linux.yml +++ /dev/null @@ -1,58 +0,0 @@ -parameters: - name: '' - vmImage: '' - -jobs: -- job: ${{ parameters.name }} - pool: - vmImage: ${{ parameters.vmImage }} - strategy: - matrix: - Python36-min: - python.version: '36' - suffix: '-min' - Python36: - python.version: '36' - coverage: true - Python37: - python.version: '37' - Python38: - python.version: '38' - Python39: - python.version: '39' - - steps: - - bash: echo "##vso[task.prependpath]/usr/share/miniconda/bin" - displayName: Add conda to PATH - - script: conda env create --quiet --file ci/requirements-py$(python.version)$(suffix).yml - displayName: Create Anaconda environment - - script: | - source activate test_env - pip install pytest-azurepipelines - pip install -e . - displayName: 'pip dependencies' - - script: | - source activate test_env - conda list - displayName: 'List installed dependencies' - - script: | - source activate test_env - export NREL_API_KEY=$(nrelApiKey) - export BSRN_FTP_USERNAME=$(BSRN_FTP_USERNAME) - export BSRN_FTP_PASSWORD=$(BSRN_FTP_PASSWORD) - pytest pvlib --remote-data --junitxml=junit/test-results.xml --cov --cov-report=xml --cov-report=html - displayName: 'pytest' - - task: PublishTestResults@2 - inputs: - testResultsFiles: '**/test-results.xml' - testRunTitle: 'Linux $(python.version)' - - task: PublishCodeCoverageResults@1 - inputs: - codeCoverageTool: Cobertura - summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml' - reportDirectory: '$(System.DefaultWorkingDirectory)/**/htmlcov' - condition: eq(variables['coverage'], true) - - script: | - bash <(curl https://codecov.io/bash) -t bbc2bdbe-5e67-4fef-9cb7-f52fe0b703a8 -f coverage.xml -F adder -F subtractor -F conda - displayName: 'codecov' - condition: eq(variables['coverage'], true) diff --git a/ci/azure/conda_windows.yml b/ci/azure/conda_windows.yml deleted file mode 100644 index 4b8cf61911..0000000000 --- a/ci/azure/conda_windows.yml +++ /dev/null @@ -1,41 +0,0 @@ -parameters: - name: '' - vmImage: '' - -jobs: -- job: ${{ parameters.name }} - pool: - vmImage: ${{ parameters.vmImage }} - strategy: - matrix: - Python36-windows: - python.version: '36' - Python37-windows: - python.version: '37' - Python38-windows: - python.version: '38' - Python39-windows: - python.version: '39' - - steps: - - powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" - displayName: Add conda to PATH - - script: conda env create --quiet --file ci/requirements-py$(python.version).yml - displayName: Create Anaconda environment - - script: | - call activate test_env - pip install pytest-azurepipelines - pip install -e . - displayName: 'pip dependencies' - - script: | - call activate test_env - conda list - displayName: 'List installed dependencies' - - script: | - call activate test_env - pytest pvlib --junitxml=junit/test-results.xml - displayName: 'pytest' - - task: PublishTestResults@2 - inputs: - testResultsFiles: '**/test-results.xml' - testRunTitle: 'Windows $(python.version)' diff --git a/ci/azure/posix.yml b/ci/azure/posix.yml deleted file mode 100644 index 086f03dd69..0000000000 --- a/ci/azure/posix.yml +++ /dev/null @@ -1,41 +0,0 @@ -parameters: - name: '' - vmImage: '' - -jobs: -- job: ${{ parameters.name }} - pool: - vmImage: ${{ parameters.vmImage }} - strategy: - matrix: - Python36: - python.version: '3.6' - Python37: - python.version: '3.7' - Python38: - python.version: '3.8' - Python39: - python.version: '3.9' - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '$(python.version)' - - - script: | - pip install pytest pytest-cov pytest-mock requests-mock pytest-timeout pytest-azurepipelines pytest-rerunfailures pytest-remotedata - pip install -e . - pytest pvlib --junitxml=junit/test-results.xml --cov=com --cov-report=xml --cov-report=html - displayName: 'Test with pytest' - - - task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-*.xml' - testRunTitle: 'Publish test results for Python $(python.version)' - - - task: PublishCodeCoverageResults@1 - inputs: - codeCoverageTool: Cobertura - summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml' - reportDirectory: '$(System.DefaultWorkingDirectory)/**/htmlcov' diff --git a/ci/requirements-py36-min.yml b/ci/requirements-py3.6-min.yml similarity index 100% rename from ci/requirements-py36-min.yml rename to ci/requirements-py3.6-min.yml diff --git a/ci/requirements-py36.yml b/ci/requirements-py3.6.yml similarity index 100% rename from ci/requirements-py36.yml rename to ci/requirements-py3.6.yml diff --git a/ci/requirements-py37.yml b/ci/requirements-py3.7.yml similarity index 100% rename from ci/requirements-py37.yml rename to ci/requirements-py3.7.yml diff --git a/ci/requirements-py38.yml b/ci/requirements-py3.8.yml similarity index 100% rename from ci/requirements-py38.yml rename to ci/requirements-py3.8.yml diff --git a/ci/requirements-py39.yml b/ci/requirements-py3.9.yml similarity index 100% rename from ci/requirements-py39.yml rename to ci/requirements-py3.9.yml diff --git a/codecov.yml b/codecov.yml index dbcb6f075f..c407f28d16 100644 --- a/codecov.yml +++ b/codecov.yml @@ -2,26 +2,68 @@ codecov: notify: require_ci_to_pass: no +# "flags" are used to identify subsets of the overall codebase when calculating coverage. +# Currently used for "remote_data" (pvlib.iotools & forecast) and "core" (everything else). +# Because we only run the remote_data tests sometimes, we need to split it out so that +# codecov doesn't report a big drop in coverage when we don't run them. +# We also use "carryforward: true" so that, when we don't run remote_data tests, the last +# known coverage is carried forward and used in place of the missing coverage. +# https://docs.codecov.com/docs/flags +flags: + + core: + paths: + - pvlib/ + - '!pvlib/iotools/' + - '!pvlib/tests/iotools/' + - '!pvlib/forecast.py' + - '!pvlib/tests/test_forecast.py' + carryforward: false + + remote-data: + paths: + - pvlib/iotools/ + - pvlib/tests/iotools + - pvlib/forecast.py + - pvlib/tests/test_forecast.py + carryforward: true # if not run, use coverage from previous commit + + coverage: - status: + status: # each entry here represents a check status to report to GitHub patch: default: target: 100% - if_no_uploads: error - if_not_found: success - if_ci_failed: failure + project: - default: false - library: + default: off + + core: target: auto - if_no_uploads: error - if_not_found: success - if_ci_failed: failure + flags: + - core + + remote-data: + target: auto + flags: + - remote-data + + tests-core: + target: 95% paths: - - "pvlib/.*" - tests: + - 'pvlib/tests/.*' + - '!pvlib/tests/iotools/.*' + - '!pvlib/tests/test_forecast.py' + flags: + - core + + tests-remote-data: target: 95% paths: - - "pvlib/tests/.*" + - 'pvlib/tests/iotools/.*' + - 'pvlib/tests/test_forecast.py' + flags: + - remote-data + comment: off diff --git a/docs/sphinx/source/whatsnew/v0.9.2.rst b/docs/sphinx/source/whatsnew/v0.9.2.rst index 05b7ed58ab..39a741c080 100644 --- a/docs/sphinx/source/whatsnew/v0.9.2.rst +++ b/docs/sphinx/source/whatsnew/v0.9.2.rst @@ -22,6 +22,8 @@ Bug fixes Testing ~~~~~~~ +* Switched CI testing provider from Azure to GitHub Actions (:pull:`1306`) + Documentation ~~~~~~~~~~~~~ @@ -40,3 +42,4 @@ Contributors * Naman Priyadarshi (:ghuser:`Naman-Priyadarshi`) * Chencheng Luo (:ghuser:`roger-lcc`) * Prajwal Borkar (:ghuser:`PrajwalBorkar`) +* Kevin Anderson (:ghuser:`kanderso-nrel`)