Skip to content

Commit a4e8dc6

Browse files
committed
Auto merge of rust-lang#136977 - Kobzol:citool-datadog, r=<try>
[WIP] Upload Datadog metrics with citool Opening as a draft for testing. r? `@ghost`
2 parents 905b1bf + 38ad38d commit a4e8dc6

File tree

19 files changed

+1877
-5462
lines changed

19 files changed

+1877
-5462
lines changed

.github/workflows/ci.yml

+47-42
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# This file defines our primary CI workflow that runs on pull requests
22
# and also on pushes to special branches (auto, try).
33
#
4-
# The actual definition of the executed jobs is calculated by a Python
5-
# script located at src/ci/github-actions/ci.py, which
4+
# The actual definition of the executed jobs is calculated by the
5+
# `src/ci/citool` crate, which
66
# uses job definition data from src/ci/github-actions/jobs.yml.
77
# You should primarily modify the `jobs.yml` file if you want to modify
88
# what jobs are executed in CI.
@@ -56,7 +56,10 @@ jobs:
5656
- name: Calculate the CI job matrix
5757
env:
5858
COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
59-
run: python3 src/ci/github-actions/ci.py calculate-job-matrix >> $GITHUB_OUTPUT
59+
run: |
60+
cd src/ci/citool
61+
cargo test
62+
cargo run calculate-job-matrix >> $GITHUB_OUTPUT
6063
id: jobs
6164
job:
6265
name: ${{ matrix.full_name }}
@@ -173,52 +176,54 @@ jobs:
173176
- name: ensure the stable version number is correct
174177
run: src/ci/scripts/verify-stable-version-number.sh
175178

176-
- name: run the build
177-
# Redirect stderr to stdout to avoid reordering the two streams in the GHA logs.
178-
run: src/ci/scripts/run-build-from-ci.sh 2>&1
179-
env:
180-
AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }}
181-
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}
182-
183-
- name: create github artifacts
184-
run: src/ci/scripts/create-doc-artifacts.sh
185-
186-
- name: print disk usage
179+
# Prebuilt citool before the following step uninstall rustup
180+
- name: Build citool
187181
run: |
188-
echo "disk usage:"
189-
df -h
190-
191-
- name: upload artifacts to github
192-
uses: actions/upload-artifact@v4
193-
with:
194-
# name is set in previous step
195-
name: ${{ env.DOC_ARTIFACT_NAME }}
196-
path: obj/artifacts/doc
197-
if-no-files-found: ignore
198-
retention-days: 5
199-
200-
- name: upload artifacts to S3
201-
run: src/ci/scripts/upload-artifacts.sh
202-
env:
203-
AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}
204-
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}
205-
# Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy
206-
# builders *should* have the AWS credentials available. Still, explicitly
207-
# adding the condition is helpful as this way CI will not silently skip
208-
# deploying artifacts from a dist builder if the variables are misconfigured,
209-
# erroring about invalid credentials instead.
210-
if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'
182+
cd src/ci/citool
183+
cargo build
184+
185+
# - name: run the build
186+
# # Redirect stderr to stdout to avoid reordering the two streams in the GHA logs.
187+
# run: src/ci/scripts/run-build-from-ci.sh 2>&1
188+
# env:
189+
# AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }}
190+
# AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}
191+
#
192+
# - name: create github artifacts
193+
# run: src/ci/scripts/create-doc-artifacts.sh
194+
#
195+
# - name: print disk usage
196+
# run: |
197+
# echo "disk usage:"
198+
# df -h
199+
#
200+
# - name: upload artifacts to github
201+
# uses: actions/upload-artifact@v4
202+
# with:
203+
# # name is set in previous step
204+
# name: ${{ env.DOC_ARTIFACT_NAME }}
205+
# path: obj/artifacts/doc
206+
# if-no-files-found: ignore
207+
# retention-days: 5
208+
#
209+
# - name: upload artifacts to S3
210+
# run: src/ci/scripts/upload-artifacts.sh
211+
# env:
212+
# AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}
213+
# AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}
214+
# # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy
215+
# # builders *should* have the AWS credentials available. Still, explicitly
216+
# # adding the condition is helpful as this way CI will not silently skip
217+
# # deploying artifacts from a dist builder if the variables are misconfigured,
218+
# # erroring about invalid credentials instead.
219+
# if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'
211220

212221
- name: upload job metrics to DataDog
213222
if: needs.calculate_matrix.outputs.run_type != 'pr'
214223
env:
215-
DATADOG_SITE: datadoghq.com
216224
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
217225
DD_GITHUB_JOB_NAME: ${{ matrix.full_name }}
218-
run: |
219-
cd src/ci
220-
npm ci
221-
python3 scripts/upload-build-metrics.py ../../build/cpu-usage.csv
226+
run: ./src/ci/citool/target/debug/citool upload-build-metrics build/cpu-usage.csv
222227

223228
# This job isused to tell bors the final status of the build, as there is no practical way to detect
224229
# when a workflow is successful listening to webhooks only in our current bors implementation (homu).

0 commit comments

Comments
 (0)