|
1 | 1 | # This file defines our primary CI workflow that runs on pull requests
|
2 | 2 | # and also on pushes to special branches (auto, try).
|
3 | 3 | #
|
4 |
| -# The actual definition of the executed jobs is calculated by a Python |
5 |
| -# script located at src/ci/github-actions/ci.py, which |
| 4 | +# The actual definition of the executed jobs is calculated by the |
| 5 | +# `src/ci/citool` crate, which |
6 | 6 | # uses job definition data from src/ci/github-actions/jobs.yml.
|
7 | 7 | # You should primarily modify the `jobs.yml` file if you want to modify
|
8 | 8 | # what jobs are executed in CI.
|
|
56 | 56 | - name: Calculate the CI job matrix
|
57 | 57 | env:
|
58 | 58 | COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
|
59 |
| - run: python3 src/ci/github-actions/ci.py calculate-job-matrix >> $GITHUB_OUTPUT |
| 59 | + run: | |
| 60 | + cd src/ci/citool |
| 61 | + cargo test |
| 62 | + cargo run calculate-job-matrix >> $GITHUB_OUTPUT |
60 | 63 | id: jobs
|
61 | 64 | job:
|
62 | 65 | name: ${{ matrix.full_name }}
|
@@ -173,52 +176,54 @@ jobs:
|
173 | 176 | - name: ensure the stable version number is correct
|
174 | 177 | run: src/ci/scripts/verify-stable-version-number.sh
|
175 | 178 |
|
176 |
| - - name: run the build |
177 |
| - # Redirect stderr to stdout to avoid reordering the two streams in the GHA logs. |
178 |
| - run: src/ci/scripts/run-build-from-ci.sh 2>&1 |
179 |
| - env: |
180 |
| - AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }} |
181 |
| - AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }} |
182 |
| - |
183 |
| - - name: create github artifacts |
184 |
| - run: src/ci/scripts/create-doc-artifacts.sh |
185 |
| - |
186 |
| - - name: print disk usage |
| 179 | + # Prebuilt citool before the following step uninstall rustup |
| 180 | + - name: Build citool |
187 | 181 | run: |
|
188 |
| - echo "disk usage:" |
189 |
| - df -h |
190 |
| -
|
191 |
| - - name: upload artifacts to github |
192 |
| - uses: actions/upload-artifact@v4 |
193 |
| - with: |
194 |
| - # name is set in previous step |
195 |
| - name: ${{ env.DOC_ARTIFACT_NAME }} |
196 |
| - path: obj/artifacts/doc |
197 |
| - if-no-files-found: ignore |
198 |
| - retention-days: 5 |
199 |
| - |
200 |
| - - name: upload artifacts to S3 |
201 |
| - run: src/ci/scripts/upload-artifacts.sh |
202 |
| - env: |
203 |
| - AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }} |
204 |
| - AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }} |
205 |
| - # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy |
206 |
| - # builders *should* have the AWS credentials available. Still, explicitly |
207 |
| - # adding the condition is helpful as this way CI will not silently skip |
208 |
| - # deploying artifacts from a dist builder if the variables are misconfigured, |
209 |
| - # erroring about invalid credentials instead. |
210 |
| - if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1' |
| 182 | + cd src/ci/citool |
| 183 | + cargo build |
| 184 | +
|
| 185 | +# - name: run the build |
| 186 | +# # Redirect stderr to stdout to avoid reordering the two streams in the GHA logs. |
| 187 | +# run: src/ci/scripts/run-build-from-ci.sh 2>&1 |
| 188 | +# env: |
| 189 | +# AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }} |
| 190 | +# AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }} |
| 191 | +# |
| 192 | +# - name: create github artifacts |
| 193 | +# run: src/ci/scripts/create-doc-artifacts.sh |
| 194 | +# |
| 195 | +# - name: print disk usage |
| 196 | +# run: | |
| 197 | +# echo "disk usage:" |
| 198 | +# df -h |
| 199 | +# |
| 200 | +# - name: upload artifacts to github |
| 201 | +# uses: actions/upload-artifact@v4 |
| 202 | +# with: |
| 203 | +# # name is set in previous step |
| 204 | +# name: ${{ env.DOC_ARTIFACT_NAME }} |
| 205 | +# path: obj/artifacts/doc |
| 206 | +# if-no-files-found: ignore |
| 207 | +# retention-days: 5 |
| 208 | +# |
| 209 | +# - name: upload artifacts to S3 |
| 210 | +# run: src/ci/scripts/upload-artifacts.sh |
| 211 | +# env: |
| 212 | +# AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }} |
| 213 | +# AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }} |
| 214 | +# # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy |
| 215 | +# # builders *should* have the AWS credentials available. Still, explicitly |
| 216 | +# # adding the condition is helpful as this way CI will not silently skip |
| 217 | +# # deploying artifacts from a dist builder if the variables are misconfigured, |
| 218 | +# # erroring about invalid credentials instead. |
| 219 | +# if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1' |
211 | 220 |
|
212 | 221 | - name: upload job metrics to DataDog
|
213 | 222 | if: needs.calculate_matrix.outputs.run_type != 'pr'
|
214 | 223 | env:
|
215 |
| - DATADOG_SITE: datadoghq.com |
216 | 224 | DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
|
217 | 225 | DD_GITHUB_JOB_NAME: ${{ matrix.full_name }}
|
218 |
| - run: | |
219 |
| - cd src/ci |
220 |
| - npm ci |
221 |
| - python3 scripts/upload-build-metrics.py ../../build/cpu-usage.csv |
| 226 | + run: ./src/ci/citool/target/debug/citool upload-build-metrics build/cpu-usage.csv |
222 | 227 |
|
223 | 228 | # This job isused to tell bors the final status of the build, as there is no practical way to detect
|
224 | 229 | # when a workflow is successful listening to webhooks only in our current bors implementation (homu).
|
|
0 commit comments