Auto merge of #124366 - Kobzol:remove-yaml-expansion, r=pietroalbini
CI: remove `expand-yaml-anchors` This PR unifies all CI outcome jobs in a single job, and then removes the `expand-yaml-anchors` tool, since it is no longer needed after this change. I have tested try builds for both situations with the new `outcome` job (note that these two workflow runs use a different step structure in the outcome job, I have simplified it since): - [Success](https://github.com/rust-lang-ci/rust/actions/runs/8831529677/job/24251135366) - [Failure](https://github.com/rust-lang-ci/rust/actions/runs/8833052319/job/24251628792) r? `@ghost`
This commit is contained in:
commit
72f616273c
203
.github/workflows/ci.yml
vendored
203
.github/workflows/ci.yml
vendored
@ -1,21 +1,14 @@
|
||||
#############################################################
|
||||
# WARNING: automatically generated file, DO NOT CHANGE! #
|
||||
#############################################################
|
||||
# This file defines our primary CI workflow that runs on pull requests
|
||||
# and also on pushes to special branches (auto, try).
|
||||
#
|
||||
# The actual definition of the executed jobs is calculated by a Python
|
||||
# script located at src/ci/github-actions/calculate-job-matrix.py, which
|
||||
# uses job definition data from src/ci/github-actions/jobs.yml.
|
||||
# You should primarily modify the `jobs.yml` file if you want to modify
|
||||
# what jobs are executed in CI.
|
||||
|
||||
# This file was automatically generated by the expand-yaml-anchors tool. The
|
||||
# source file that generated this one is:
|
||||
#
|
||||
# src/ci/github-actions/ci.yml
|
||||
#
|
||||
# Once you make changes to that file you need to run:
|
||||
#
|
||||
# ./x.py run src/tools/expand-yaml-anchors/
|
||||
#
|
||||
# The CI build will fail if the tool is not run after changes to this file.
|
||||
|
||||
---
|
||||
name: CI
|
||||
"on":
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- auto
|
||||
@ -25,23 +18,36 @@ name: CI
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
# On Linux, macOS, and Windows, use the system-provided bash as the default
|
||||
# shell. (This should only make a difference on Windows, where the default
|
||||
# shell is PowerShell.)
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }}"
|
||||
# For a given workflow, if we push to the same branch, cancel all previous builds on that branch.
|
||||
# We add an exception for try builds (try branch) and unrolled rollup builds (try-perf), which
|
||||
# are all triggered on the same branch, but which should be able to run concurrently.
|
||||
group: ${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
TOOLSTATE_REPO: "https://github.com/rust-lang-nursery/rust-toolstate"
|
||||
jobs:
|
||||
# The job matrix for `calculate_matrix` is defined in src/ci/github-actions/jobs.yml.
|
||||
# It calculates which jobs should be executed, based on the data of the ${{ github }} context.
|
||||
# If you want to modify CI jobs, take a look at src/ci/github-actions/jobs.yml.
|
||||
calculate_matrix:
|
||||
name: Calculate job matrix
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
jobs: "${{ steps.jobs.outputs.jobs }}"
|
||||
jobs: ${{ steps.jobs.outputs.jobs }}
|
||||
run_type: ${{ steps.jobs.outputs.run_type }}
|
||||
steps:
|
||||
- name: Checkout the source code
|
||||
uses: actions/checkout@v4
|
||||
@ -49,152 +55,195 @@ jobs:
|
||||
run: python3 src/ci/github-actions/calculate-job-matrix.py >> $GITHUB_OUTPUT
|
||||
id: jobs
|
||||
job:
|
||||
name: "${{ matrix.name }}"
|
||||
needs:
|
||||
- calculate_matrix
|
||||
name: ${{ matrix.name }}
|
||||
needs: [ calculate_matrix ]
|
||||
runs-on: "${{ matrix.os }}"
|
||||
defaults:
|
||||
run:
|
||||
shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}"
|
||||
shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}
|
||||
timeout-minutes: 600
|
||||
env:
|
||||
CI_JOB_NAME: "${{ matrix.image }}"
|
||||
CI_JOB_NAME: ${{ matrix.image }}
|
||||
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
|
||||
HEAD_SHA: "${{ github.event.pull_request.head.sha || github.sha }}"
|
||||
DOCKER_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
# commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs.
|
||||
HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
DOCKER_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SCCACHE_BUCKET: rust-lang-ci-sccache2
|
||||
CACHE_DOMAIN: ci-caches.rust-lang.org
|
||||
continue-on-error: "${{ matrix.continue_on_error || false }}"
|
||||
continue-on-error: ${{ matrix.continue_on_error || false }}
|
||||
strategy:
|
||||
matrix:
|
||||
include: "${{ fromJSON(needs.calculate_matrix.outputs.jobs) }}"
|
||||
if: "fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null"
|
||||
# Check the `calculate_matrix` job to see how is the matrix defined.
|
||||
include: ${{ fromJSON(needs.calculate_matrix.outputs.jobs) }}
|
||||
# GitHub Actions fails the workflow if an empty list of jobs is provided to
|
||||
# the workflow, so we need to skip this job if nothing was produced by
|
||||
# the Python script.
|
||||
#
|
||||
# Unfortunately checking whether a list is empty is not possible in a nice
|
||||
# way due to GitHub Actions expressions limits.
|
||||
# This hack is taken from https://github.com/ferrocene/ferrocene/blob/d43edc6b7697cf1719ec1c17c54904ab94825763/.github/workflows/release.yml#L75-L82
|
||||
if: fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null
|
||||
steps:
|
||||
- if: "contains(matrix.os, 'windows')"
|
||||
- if: contains(matrix.os, 'windows')
|
||||
uses: msys2/setup-msys2@v2.22.0
|
||||
with:
|
||||
msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}"
|
||||
# i686 jobs use mingw32. x86_64 and cross-compile jobs use mingw64.
|
||||
msystem: ${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}
|
||||
# don't try to download updates for already installed packages
|
||||
update: false
|
||||
# don't try to use the msys that comes built-in to the github runner,
|
||||
# so we can control what is installed (i.e. not python)
|
||||
release: true
|
||||
# Inherit the full path from the Windows environment, with MSYS2's */bin/
|
||||
# dirs placed in front. This lets us run Windows-native Python etc.
|
||||
path-type: inherit
|
||||
install: "make dos2unix diffutils\n"
|
||||
install: >
|
||||
make
|
||||
dos2unix
|
||||
diffutils
|
||||
|
||||
- name: disable git crlf conversion
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: checkout the source code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
# Rust Log Analyzer can't currently detect the PR number of a GitHub
|
||||
# Actions build on its own, so a hint in the log message is needed to
|
||||
# point it in the right direction.
|
||||
- name: configure the PR in which the error message will be posted
|
||||
run: "echo \"[CI_PR_NUMBER=$num]\""
|
||||
run: echo "[CI_PR_NUMBER=$num]"
|
||||
env:
|
||||
num: "${{ github.event.number }}"
|
||||
if: "success() && github.event_name == 'pull_request'"
|
||||
num: ${{ github.event.number }}
|
||||
if: needs.calculate_matrix.outputs.run_type == 'pr'
|
||||
|
||||
- name: add extra environment variables
|
||||
run: src/ci/scripts/setup-environment.sh
|
||||
env:
|
||||
EXTRA_VARIABLES: "${{ toJson(matrix.env) }}"
|
||||
# Since it's not possible to merge `${{ matrix.env }}` with the other
|
||||
# variables in `job.<name>.env`, the variables defined in the matrix
|
||||
# are passed to the `setup-environment.sh` script encoded in JSON,
|
||||
# which then uses log commands to actually set them.
|
||||
EXTRA_VARIABLES: ${{ toJson(matrix.env) }}
|
||||
|
||||
- name: ensure the channel matches the target branch
|
||||
run: src/ci/scripts/verify-channel.sh
|
||||
|
||||
- name: collect CPU statistics
|
||||
run: src/ci/scripts/collect-cpu-stats.sh
|
||||
|
||||
- name: show the current environment
|
||||
run: src/ci/scripts/dump-environment.sh
|
||||
|
||||
- name: install awscli
|
||||
run: src/ci/scripts/install-awscli.sh
|
||||
|
||||
- name: install sccache
|
||||
run: src/ci/scripts/install-sccache.sh
|
||||
|
||||
- name: select Xcode
|
||||
run: src/ci/scripts/select-xcode.sh
|
||||
|
||||
- name: install clang
|
||||
run: src/ci/scripts/install-clang.sh
|
||||
|
||||
- name: install tidy
|
||||
run: src/ci/scripts/install-tidy.sh
|
||||
|
||||
- name: install WIX
|
||||
run: src/ci/scripts/install-wix.sh
|
||||
|
||||
- name: disable git crlf conversion
|
||||
run: src/ci/scripts/disable-git-crlf-conversion.sh
|
||||
|
||||
- name: checkout submodules
|
||||
run: src/ci/scripts/checkout-submodules.sh
|
||||
|
||||
- name: install MSYS2
|
||||
run: src/ci/scripts/install-msys2.sh
|
||||
|
||||
- name: install MinGW
|
||||
run: src/ci/scripts/install-mingw.sh
|
||||
|
||||
- name: install ninja
|
||||
run: src/ci/scripts/install-ninja.sh
|
||||
|
||||
- name: enable ipv6 on Docker
|
||||
run: src/ci/scripts/enable-docker-ipv6.sh
|
||||
|
||||
# Disable automatic line ending conversion (again). On Windows, when we're
|
||||
# installing dependencies, something switches the git configuration directory or
|
||||
# re-enables autocrlf. We've not tracked down the exact cause -- and there may
|
||||
# be multiple -- but this should ensure submodules are checked out with the
|
||||
# appropriate line endings.
|
||||
- name: disable git crlf conversion
|
||||
run: src/ci/scripts/disable-git-crlf-conversion.sh
|
||||
|
||||
- name: ensure line endings are correct
|
||||
run: src/ci/scripts/verify-line-endings.sh
|
||||
|
||||
- name: ensure backported commits are in upstream branches
|
||||
run: src/ci/scripts/verify-backported-commits.sh
|
||||
|
||||
- name: ensure the stable version number is correct
|
||||
run: src/ci/scripts/verify-stable-version-number.sh
|
||||
|
||||
- name: run the build
|
||||
# Redirect stderr to stdout to avoid reordering the two streams in the GHA logs.
|
||||
run: src/ci/scripts/run-build-from-ci.sh 2>&1
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: "${{ env.CACHES_AWS_ACCESS_KEY_ID }}"
|
||||
AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}"
|
||||
TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}"
|
||||
AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}
|
||||
TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}
|
||||
|
||||
- name: create github artifacts
|
||||
run: src/ci/scripts/create-doc-artifacts.sh
|
||||
|
||||
- name: upload artifacts to github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: "${{ env.DOC_ARTIFACT_NAME }}"
|
||||
# name is set in previous step
|
||||
name: ${{ env.DOC_ARTIFACT_NAME }}
|
||||
path: obj/artifacts/doc
|
||||
if-no-files-found: ignore
|
||||
retention-days: 5
|
||||
|
||||
- name: upload artifacts to S3
|
||||
run: src/ci/scripts/upload-artifacts.sh
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: "${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}"
|
||||
AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}"
|
||||
if: "success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')"
|
||||
try-success:
|
||||
needs:
|
||||
- job
|
||||
if: "success() && github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'"
|
||||
steps:
|
||||
- name: mark the job as a success
|
||||
run: exit 0
|
||||
shell: bash
|
||||
AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}
|
||||
# Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy
|
||||
# builders *should* have the AWS credentials available. Still, explicitly
|
||||
# adding the condition is helpful as this way CI will not silently skip
|
||||
# deploying artifacts from a dist builder if the variables are misconfigured,
|
||||
# erroring about invalid credentials instead.
|
||||
if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'
|
||||
|
||||
# This job isused to tell bors the final status of the build, as there is no practical way to detect
|
||||
# when a workflow is successful listening to webhooks only in our current bors implementation (homu).
|
||||
outcome:
|
||||
name: bors build finished
|
||||
runs-on: ubuntu-latest
|
||||
try-failure:
|
||||
needs:
|
||||
- job
|
||||
if: "!success() && github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'"
|
||||
steps:
|
||||
- name: mark the job as a failure
|
||||
run: exit 1
|
||||
shell: bash
|
||||
name: bors build finished
|
||||
runs-on: ubuntu-latest
|
||||
auto-success:
|
||||
needs:
|
||||
- job
|
||||
if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'"
|
||||
needs: [ calculate_matrix, job ]
|
||||
# !cancelled() executes the job regardless of whether the previous jobs passed or failed
|
||||
if: ${{ !cancelled() && contains(fromJSON('["auto", "try"]'), needs.calculate_matrix.outputs.run_type) }}
|
||||
steps:
|
||||
- name: checkout the source code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
# Calculate the exit status of the whole CI workflow.
|
||||
# If all dependent jobs were successful, this exits with 0 (and the outcome job continues successfully).
|
||||
# If a some dependent job has failed, this exits with 1.
|
||||
- name: calculate the correct exit status
|
||||
run: jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}'
|
||||
# Publish the toolstate if an auto build succeeds (just before push to master)
|
||||
- name: publish toolstate
|
||||
run: src/ci/publish_toolstate.sh
|
||||
shell: bash
|
||||
if: needs.calculate_matrix.outputs.run_type == 'auto'
|
||||
env:
|
||||
TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}"
|
||||
name: bors build finished
|
||||
runs-on: ubuntu-latest
|
||||
auto-failure:
|
||||
needs:
|
||||
- job
|
||||
if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'"
|
||||
steps:
|
||||
- name: mark the job as a failure
|
||||
run: exit 1
|
||||
shell: bash
|
||||
name: bors build finished
|
||||
runs-on: ubuntu-latest
|
||||
TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}
|
||||
|
34
Cargo.lock
34
Cargo.lock
@ -1254,14 +1254,6 @@ dependencies = [
|
||||
"mdbook",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "expand-yaml-anchors"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"yaml-merge-keys",
|
||||
"yaml-rust",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "expect-test"
|
||||
version = "1.5.0"
|
||||
@ -2234,12 +2226,6 @@ dependencies = [
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linked-hash-map"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
|
||||
|
||||
[[package]]
|
||||
name = "lint-docs"
|
||||
version = "0.1.0"
|
||||
@ -6540,26 +6526,6 @@ dependencies = [
|
||||
"lzma-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaml-merge-keys"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd236a7dc9bb598f349fe4a8754f49181fee50284daa15cd1ba652d722280004"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"thiserror",
|
||||
"yaml-rust",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
|
||||
dependencies = [
|
||||
"linked-hash-map",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yansi-term"
|
||||
version = "0.1.2"
|
||||
|
@ -31,7 +31,6 @@ members = [
|
||||
"src/tools/miri/cargo-miri",
|
||||
"src/tools/rustdoc-themes",
|
||||
"src/tools/unicode-table-generator",
|
||||
"src/tools/expand-yaml-anchors",
|
||||
"src/tools/jsondocck",
|
||||
"src/tools/jsondoclint",
|
||||
"src/tools/llvm-bitcode-linker",
|
||||
|
@ -15,32 +15,6 @@
|
||||
use crate::utils::helpers::output;
|
||||
use crate::Mode;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ExpandYamlAnchors;
|
||||
|
||||
impl Step for ExpandYamlAnchors {
|
||||
type Output = ();
|
||||
|
||||
/// Runs the `expand-yaml_anchors` tool.
|
||||
///
|
||||
/// This tool in `src/tools` reads the CI configuration files written in YAML and expands the
|
||||
/// anchors in them, since GitHub Actions doesn't support them.
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
builder.info("Expanding YAML anchors in the GitHub Actions configuration");
|
||||
builder.run_delaying_failure(
|
||||
builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src),
|
||||
);
|
||||
}
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.path("src/tools/expand-yaml-anchors")
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(ExpandYamlAnchors);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct BuildManifest;
|
||||
|
||||
|
@ -1146,8 +1146,6 @@ fn run(self, builder: &Builder<'_>) {
|
||||
builder.info("tidy check");
|
||||
builder.run_delaying_failure(&mut cmd);
|
||||
|
||||
builder.ensure(ExpandYamlAnchors);
|
||||
|
||||
builder.info("x.py completions check");
|
||||
let [bash, zsh, fish, powershell] = ["x.py.sh", "x.py.zsh", "x.py.fish", "x.py.ps1"]
|
||||
.map(|filename| builder.src.join("src/etc/completions").join(filename));
|
||||
@ -1175,39 +1173,6 @@ fn make_run(run: RunConfig<'_>) {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ExpandYamlAnchors;
|
||||
|
||||
impl Step for ExpandYamlAnchors {
|
||||
type Output = ();
|
||||
const ONLY_HOSTS: bool = true;
|
||||
|
||||
/// Ensure the `generate-ci-config` tool was run locally.
|
||||
///
|
||||
/// The tool in `src/tools` reads the CI definition in `src/ci/builders.yml` and generates the
|
||||
/// appropriate configuration for all our CI providers. This step ensures the tool was called
|
||||
/// by the user before committing CI changes.
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
// NOTE: `.github/` is not included in dist-src tarballs
|
||||
if !builder.src.join(".github/workflows/ci.yml").exists() {
|
||||
builder.info("Skipping YAML anchors check: GitHub Actions config not found");
|
||||
return;
|
||||
}
|
||||
builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded");
|
||||
builder.run_delaying_failure(
|
||||
builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src),
|
||||
);
|
||||
}
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.path("src/tools/expand-yaml-anchors")
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(ExpandYamlAnchors);
|
||||
}
|
||||
}
|
||||
|
||||
fn testdir(builder: &Builder<'_>, host: TargetSelection) -> PathBuf {
|
||||
builder.out.join(host.triple).join("test")
|
||||
}
|
||||
|
@ -302,7 +302,6 @@ fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
RemoteTestClient, "src/tools/remote-test-client", "remote-test-client";
|
||||
RustInstaller, "src/tools/rust-installer", "rust-installer";
|
||||
RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes";
|
||||
ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors";
|
||||
LintDocs, "src/tools/lint-docs", "lint-docs";
|
||||
JsonDocCk, "src/tools/jsondocck", "jsondocck";
|
||||
JsonDocLint, "src/tools/jsondoclint", "jsondoclint";
|
||||
|
@ -791,7 +791,6 @@ macro_rules! describe {
|
||||
),
|
||||
Kind::Test => describe!(
|
||||
crate::core::build_steps::toolstate::ToolStateCheck,
|
||||
test::ExpandYamlAnchors,
|
||||
test::Tidy,
|
||||
test::Ui,
|
||||
test::Crashes,
|
||||
@ -933,7 +932,6 @@ macro_rules! describe {
|
||||
install::Src,
|
||||
),
|
||||
Kind::Run => describe!(
|
||||
run::ExpandYamlAnchors,
|
||||
run::BuildManifest,
|
||||
run::BumpStage0,
|
||||
run::ReplaceVersionPlaceholder,
|
||||
|
@ -420,7 +420,7 @@ pub enum Subcommand {
|
||||
Arguments:
|
||||
This subcommand accepts a number of paths to tools to build and run. For
|
||||
example:
|
||||
./x.py run src/tools/expand-yaml-anchors
|
||||
./x.py run src/tools/bump-stage0
|
||||
At least a tool needs to be called.")]
|
||||
/// Run tools contained in this repository
|
||||
Run {
|
||||
|
@ -42,11 +42,10 @@ COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/
|
||||
|
||||
ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1
|
||||
|
||||
ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \
|
||||
# Check library crates on all tier 1 targets.
|
||||
# We disable optimized compiler built-ins because that requires a C toolchain for the target.
|
||||
# We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs.
|
||||
python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \
|
||||
# Check library crates on all tier 1 targets.
|
||||
# We disable optimized compiler built-ins because that requires a C toolchain for the target.
|
||||
# We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs.
|
||||
ENV SCRIPT python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \
|
||||
python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \
|
||||
python3 ../x.py clippy bootstrap -Dwarnings && \
|
||||
python3 ../x.py clippy compiler library -Aclippy::all -Dclippy::correctness && \
|
||||
|
@ -44,7 +44,7 @@ def add_base_env(jobs: List[Job], environment: Dict[str, str]) -> List[Job]:
|
||||
return jobs
|
||||
|
||||
|
||||
class JobType(enum.Enum):
|
||||
class WorkflowRunType(enum.Enum):
|
||||
PR = enum.auto()
|
||||
Try = enum.auto()
|
||||
Auto = enum.auto()
|
||||
@ -57,9 +57,9 @@ class GitHubCtx:
|
||||
repository: str
|
||||
|
||||
|
||||
def find_job_type(ctx: GitHubCtx) -> Optional[JobType]:
|
||||
def find_run_type(ctx: GitHubCtx) -> Optional[WorkflowRunType]:
|
||||
if ctx.event_name == "pull_request":
|
||||
return JobType.PR
|
||||
return WorkflowRunType.PR
|
||||
elif ctx.event_name == "push":
|
||||
old_bors_try_build = (
|
||||
ctx.ref in ("refs/heads/try", "refs/heads/try-perf") and
|
||||
@ -72,20 +72,20 @@ def find_job_type(ctx: GitHubCtx) -> Optional[JobType]:
|
||||
try_build = old_bors_try_build or new_bors_try_build
|
||||
|
||||
if try_build:
|
||||
return JobType.Try
|
||||
return WorkflowRunType.Try
|
||||
|
||||
if ctx.ref == "refs/heads/auto" and ctx.repository == "rust-lang-ci/rust":
|
||||
return JobType.Auto
|
||||
return WorkflowRunType.Auto
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def calculate_jobs(job_type: JobType, job_data: Dict[str, Any]) -> List[Job]:
|
||||
if job_type == JobType.PR:
|
||||
def calculate_jobs(run_type: WorkflowRunType, job_data: Dict[str, Any]) -> List[Job]:
|
||||
if run_type == WorkflowRunType.PR:
|
||||
return add_base_env(name_jobs(job_data["pr"], "PR"), job_data["envs"]["pr"])
|
||||
elif job_type == JobType.Try:
|
||||
elif run_type == WorkflowRunType.Try:
|
||||
return add_base_env(name_jobs(job_data["try"], "try"), job_data["envs"]["try"])
|
||||
elif job_type == JobType.Auto:
|
||||
elif run_type == WorkflowRunType.Auto:
|
||||
return add_base_env(name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"])
|
||||
|
||||
return []
|
||||
@ -106,6 +106,17 @@ def get_github_ctx() -> GitHubCtx:
|
||||
)
|
||||
|
||||
|
||||
def format_run_type(run_type: WorkflowRunType) -> str:
|
||||
if run_type == WorkflowRunType.PR:
|
||||
return "pr"
|
||||
elif run_type == WorkflowRunType.Auto:
|
||||
return "auto"
|
||||
elif run_type == WorkflowRunType.Try:
|
||||
return "try"
|
||||
else:
|
||||
raise AssertionError()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
@ -114,16 +125,18 @@ if __name__ == "__main__":
|
||||
|
||||
github_ctx = get_github_ctx()
|
||||
|
||||
job_type = find_job_type(github_ctx)
|
||||
logging.info(f"Job type: {job_type}")
|
||||
run_type = find_run_type(github_ctx)
|
||||
logging.info(f"Job type: {run_type}")
|
||||
|
||||
with open(CI_DIR / "channel") as f:
|
||||
channel = f.read().strip()
|
||||
|
||||
jobs = []
|
||||
if job_type is not None:
|
||||
jobs = calculate_jobs(job_type, data)
|
||||
if run_type is not None:
|
||||
jobs = calculate_jobs(run_type, data)
|
||||
jobs = skip_jobs(jobs, channel)
|
||||
run_type = format_run_type(run_type)
|
||||
|
||||
logging.info(f"Output:\n{yaml.dump(jobs, indent=4)}")
|
||||
logging.info(f"Output:\n{yaml.dump(dict(jobs=jobs, run_type=run_type), indent=4)}")
|
||||
print(f"jobs={json.dumps(jobs)}")
|
||||
print(f"run_type={run_type}")
|
||||
|
@ -1,302 +0,0 @@
|
||||
######################################################
|
||||
# WARNING! Action needed when changing this file #
|
||||
######################################################
|
||||
|
||||
# Due to GitHub Actions limitations, we can't use YAML Anchors directly in the
|
||||
# CI configuration stored on the repository. To work around that this file is
|
||||
# expanded by a tool in the repository, and the expansion is committed as well.
|
||||
#
|
||||
# After you make any change to the file you'll need to run this command:
|
||||
#
|
||||
# ./x.py run src/tools/expand-yaml-anchors
|
||||
#
|
||||
# ...and commit the file it updated in addition to this one. If you forget this
|
||||
# step CI will fail.
|
||||
|
||||
---
|
||||
###############################
|
||||
# YAML Anchors Definition #
|
||||
###############################
|
||||
|
||||
# This key contains most of the YAML anchors that will be used later in the
|
||||
# document. YAML anchors allows us to greatly reduce duplication inside the CI
|
||||
# configuration by reusing parts of the configuration.
|
||||
#
|
||||
# YAML anchors work by defining an anchor with `&anchor-name` and reusing its
|
||||
# content in another place with `*anchor-name`. The special `<<` map key merges
|
||||
# the content of the map with the content of the anchor (or list of anchors).
|
||||
#
|
||||
# The expand-yaml-anchors tool will automatically remove this block from the
|
||||
# output YAML file.
|
||||
x--expand-yaml-anchors--remove:
|
||||
# These snippets are used by the try-success, try-failure, auto-success and auto-failure jobs.
|
||||
# Check out their documentation for more information on why they're needed.
|
||||
|
||||
- &base-outcome-job
|
||||
name: bors build finished
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
- &base-success-job
|
||||
steps:
|
||||
- name: mark the job as a success
|
||||
run: exit 0
|
||||
shell: bash
|
||||
<<: *base-outcome-job
|
||||
|
||||
- &base-failure-job
|
||||
steps:
|
||||
- name: mark the job as a failure
|
||||
run: exit 1
|
||||
shell: bash
|
||||
<<: *base-outcome-job
|
||||
|
||||
###########################
|
||||
# Builders definition #
|
||||
###########################
|
||||
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- auto
|
||||
- try
|
||||
- try-perf
|
||||
- automation/bors/try
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
# On Linux, macOS, and Windows, use the system-provided bash as the default
|
||||
# shell. (This should only make a difference on Windows, where the default
|
||||
# shell is PowerShell.)
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
# For a given workflow, if we push to the same branch, cancel all previous builds on that branch.
|
||||
# We add an exception for try builds (try branch) and unrolled rollup builds (try-perf), which
|
||||
# are all triggered on the same branch, but which should be able to run concurrently.
|
||||
group: ${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
TOOLSTATE_REPO: https://github.com/rust-lang-nursery/rust-toolstate
|
||||
|
||||
jobs:
|
||||
# The job matrix for `calculate_matrix` is defined in src/ci/github-actions/jobs.yml.
|
||||
# It calculates which jobs should be executed, based on the data of the ${{ github }} context.
|
||||
# If you want to modify CI jobs, take a look at src/ci/github-actions/jobs.yml.
|
||||
calculate_matrix:
|
||||
name: Calculate job matrix
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
jobs: ${{ steps.jobs.outputs.jobs }}
|
||||
steps:
|
||||
- name: Checkout the source code
|
||||
uses: actions/checkout@v4
|
||||
- name: Calculate the CI job matrix
|
||||
run: python3 src/ci/github-actions/calculate-job-matrix.py >> $GITHUB_OUTPUT
|
||||
id: jobs
|
||||
job:
|
||||
name: ${{ matrix.name }}
|
||||
needs: [ calculate_matrix ]
|
||||
runs-on: "${{ matrix.os }}"
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}
|
||||
timeout-minutes: 600
|
||||
env:
|
||||
CI_JOB_NAME: ${{ matrix.image }}
|
||||
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
|
||||
# commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs.
|
||||
HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
DOCKER_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SCCACHE_BUCKET: rust-lang-ci-sccache2
|
||||
CACHE_DOMAIN: ci-caches.rust-lang.org
|
||||
continue-on-error: ${{ matrix.continue_on_error || false }}
|
||||
strategy:
|
||||
matrix:
|
||||
# Check the `calculate_matrix` job to see how is the matrix defined.
|
||||
include: ${{ fromJSON(needs.calculate_matrix.outputs.jobs) }}
|
||||
# GitHub Actions fails the workflow if an empty list of jobs is provided to
|
||||
# the workflow, so we need to skip this job if nothing was produced by
|
||||
# the Python script.
|
||||
#
|
||||
# Unfortunately checking whether a list is empty is not possible in a nice
|
||||
# way due to GitHub Actions expressions limits.
|
||||
# This hack is taken from https://github.com/ferrocene/ferrocene/blob/d43edc6b7697cf1719ec1c17c54904ab94825763/.github/workflows/release.yml#L75-L82
|
||||
if: fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null
|
||||
steps:
|
||||
- if: contains(matrix.os, 'windows')
|
||||
uses: msys2/setup-msys2@v2.22.0
|
||||
with:
|
||||
# i686 jobs use mingw32. x86_64 and cross-compile jobs use mingw64.
|
||||
msystem: ${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}
|
||||
# don't try to download updates for already installed packages
|
||||
update: false
|
||||
# don't try to use the msys that comes built-in to the github runner,
|
||||
# so we can control what is installed (i.e. not python)
|
||||
release: true
|
||||
# Inherit the full path from the Windows environment, with MSYS2's */bin/
|
||||
# dirs placed in front. This lets us run Windows-native Python etc.
|
||||
path-type: inherit
|
||||
install: >
|
||||
make
|
||||
dos2unix
|
||||
diffutils
|
||||
|
||||
- name: disable git crlf conversion
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: checkout the source code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
# Rust Log Analyzer can't currently detect the PR number of a GitHub
|
||||
# Actions build on its own, so a hint in the log message is needed to
|
||||
# point it in the right direction.
|
||||
- name: configure the PR in which the error message will be posted
|
||||
run: echo "[CI_PR_NUMBER=$num]"
|
||||
env:
|
||||
num: ${{ github.event.number }}
|
||||
if: success() && github.event_name == 'pull_request'
|
||||
|
||||
- name: add extra environment variables
|
||||
run: src/ci/scripts/setup-environment.sh
|
||||
env:
|
||||
# Since it's not possible to merge `${{ matrix.env }}` with the other
|
||||
# variables in `job.<name>.env`, the variables defined in the matrix
|
||||
# are passed to the `setup-environment.sh` script encoded in JSON,
|
||||
# which then uses log commands to actually set them.
|
||||
EXTRA_VARIABLES: ${{ toJson(matrix.env) }}
|
||||
|
||||
- name: ensure the channel matches the target branch
|
||||
run: src/ci/scripts/verify-channel.sh
|
||||
|
||||
- name: collect CPU statistics
|
||||
run: src/ci/scripts/collect-cpu-stats.sh
|
||||
|
||||
- name: show the current environment
|
||||
run: src/ci/scripts/dump-environment.sh
|
||||
|
||||
- name: install awscli
|
||||
run: src/ci/scripts/install-awscli.sh
|
||||
|
||||
- name: install sccache
|
||||
run: src/ci/scripts/install-sccache.sh
|
||||
|
||||
- name: select Xcode
|
||||
run: src/ci/scripts/select-xcode.sh
|
||||
|
||||
- name: install clang
|
||||
run: src/ci/scripts/install-clang.sh
|
||||
|
||||
- name: install tidy
|
||||
run: src/ci/scripts/install-tidy.sh
|
||||
|
||||
- name: install WIX
|
||||
run: src/ci/scripts/install-wix.sh
|
||||
|
||||
- name: disable git crlf conversion
|
||||
run: src/ci/scripts/disable-git-crlf-conversion.sh
|
||||
|
||||
- name: checkout submodules
|
||||
run: src/ci/scripts/checkout-submodules.sh
|
||||
|
||||
- name: install MSYS2
|
||||
run: src/ci/scripts/install-msys2.sh
|
||||
|
||||
- name: install MinGW
|
||||
run: src/ci/scripts/install-mingw.sh
|
||||
|
||||
- name: install ninja
|
||||
run: src/ci/scripts/install-ninja.sh
|
||||
|
||||
- name: enable ipv6 on Docker
|
||||
run: src/ci/scripts/enable-docker-ipv6.sh
|
||||
|
||||
# Disable automatic line ending conversion (again). On Windows, when we're
|
||||
# installing dependencies, something switches the git configuration directory or
|
||||
# re-enables autocrlf. We've not tracked down the exact cause -- and there may
|
||||
# be multiple -- but this should ensure submodules are checked out with the
|
||||
# appropriate line endings.
|
||||
- name: disable git crlf conversion
|
||||
run: src/ci/scripts/disable-git-crlf-conversion.sh
|
||||
|
||||
- name: ensure line endings are correct
|
||||
run: src/ci/scripts/verify-line-endings.sh
|
||||
|
||||
- name: ensure backported commits are in upstream branches
|
||||
run: src/ci/scripts/verify-backported-commits.sh
|
||||
|
||||
- name: ensure the stable version number is correct
|
||||
run: src/ci/scripts/verify-stable-version-number.sh
|
||||
|
||||
- name: run the build
|
||||
# Redirect stderr to stdout to avoid reordering the two streams in the GHA logs.
|
||||
run: src/ci/scripts/run-build-from-ci.sh 2>&1
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}
|
||||
TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}
|
||||
|
||||
- name: create github artifacts
|
||||
run: src/ci/scripts/create-doc-artifacts.sh
|
||||
|
||||
- name: upload artifacts to github
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
# name is set in previous step
|
||||
name: ${{ env.DOC_ARTIFACT_NAME }}
|
||||
path: obj/artifacts/doc
|
||||
if-no-files-found: ignore
|
||||
retention-days: 5
|
||||
|
||||
- name: upload artifacts to S3
|
||||
run: src/ci/scripts/upload-artifacts.sh
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}
|
||||
# Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy
|
||||
# builders *should* have the AWS credentials available. Still, explicitly
|
||||
# adding the condition is helpful as this way CI will not silently skip
|
||||
# deploying artifacts from a dist builder if the variables are misconfigured,
|
||||
# erroring about invalid credentials instead.
|
||||
if: success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')
|
||||
|
||||
# These jobs don't actually test anything, but they're used to tell bors the
|
||||
# build completed, as there is no practical way to detect when a workflow is
|
||||
# successful listening to webhooks only.
|
||||
try-success:
|
||||
needs: [ job ]
|
||||
if: "success() && github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'"
|
||||
<<: *base-success-job
|
||||
try-failure:
|
||||
needs: [ job ]
|
||||
if: "!success() && github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'"
|
||||
<<: *base-failure-job
|
||||
auto-success:
|
||||
needs: [ job ]
|
||||
if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'"
|
||||
<<: *base-outcome-job
|
||||
steps:
|
||||
- name: checkout the source code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- name: publish toolstate
|
||||
run: src/ci/publish_toolstate.sh
|
||||
shell: bash
|
||||
env:
|
||||
TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}
|
||||
|
||||
auto-failure:
|
||||
needs: [ job ]
|
||||
if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'"
|
||||
<<: *base-failure-job
|
@ -1,7 +1,5 @@
|
||||
# This file contains definitions of CI job parameters that are loaded
|
||||
# dynamically in CI from ci.yml.
|
||||
# You *do not* need to re-run `src/tools/expand-yaml-anchors` when you
|
||||
# modify this file.
|
||||
runners:
|
||||
- &base-job
|
||||
env: { }
|
||||
|
@ -1,8 +0,0 @@
|
||||
[package]
|
||||
name = "expand-yaml-anchors"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
yaml-rust = "0.4.3"
|
||||
yaml-merge-keys = "0.4.0"
|
@ -1,198 +0,0 @@
|
||||
use std::error::Error;
|
||||
use std::path::{Path, PathBuf};
|
||||
use yaml_rust::{Yaml, YamlEmitter, YamlLoader};
|
||||
|
||||
/// List of files to expand. The first tuple element is the source
|
||||
/// file, while the second tuple element is the destination file.
|
||||
#[rustfmt::skip]
|
||||
static TO_EXPAND: &[(&str, &str)] = &[
|
||||
("src/ci/github-actions/ci.yml", ".github/workflows/ci.yml"),
|
||||
];
|
||||
|
||||
/// Name of a special key that will be removed from all the maps in expanded configuration files.
|
||||
/// This key can then be used to contain shared anchors.
|
||||
static REMOVE_MAP_KEY: &str = "x--expand-yaml-anchors--remove";
|
||||
|
||||
/// Message that will be included at the top of all the expanded files. {source} will be replaced
|
||||
/// with the source filename relative to the base path.
|
||||
static HEADER_MESSAGE: &str = "\
|
||||
#############################################################
|
||||
# WARNING: automatically generated file, DO NOT CHANGE! #
|
||||
#############################################################
|
||||
|
||||
# This file was automatically generated by the expand-yaml-anchors tool. The
|
||||
# source file that generated this one is:
|
||||
#
|
||||
# {source}
|
||||
#
|
||||
# Once you make changes to that file you need to run:
|
||||
#
|
||||
# ./x.py run src/tools/expand-yaml-anchors/
|
||||
#
|
||||
# The CI build will fail if the tool is not run after changes to this file.
|
||||
|
||||
";
|
||||
|
||||
enum Mode {
|
||||
Check,
|
||||
Generate,
|
||||
}
|
||||
|
||||
struct App {
|
||||
mode: Mode,
|
||||
base: PathBuf,
|
||||
}
|
||||
|
||||
impl App {
|
||||
fn from_args() -> Result<Self, Box<dyn Error>> {
|
||||
// Parse CLI arguments
|
||||
let args = std::env::args().skip(1).collect::<Vec<_>>();
|
||||
let (mode, base) = match args.iter().map(|s| s.as_str()).collect::<Vec<_>>().as_slice() {
|
||||
["generate", ref base] => (Mode::Generate, PathBuf::from(base)),
|
||||
["check", ref base] => (Mode::Check, PathBuf::from(base)),
|
||||
_ => {
|
||||
eprintln!("usage: expand-yaml-anchors <generate|check> <base-dir>");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(App { mode, base })
|
||||
}
|
||||
|
||||
fn run(&self) -> Result<(), Box<dyn Error>> {
|
||||
for (source, dest) in TO_EXPAND {
|
||||
let source = self.base.join(source);
|
||||
let dest_path = self.base.join(dest);
|
||||
|
||||
self.expand(&source, &dest_path).with_context(|| match self.mode {
|
||||
Mode::Generate => format!(
|
||||
"failed to expand {} into {}",
|
||||
self.path(&source),
|
||||
self.path(&dest_path)
|
||||
),
|
||||
Mode::Check => format!(
|
||||
"{} is not up to date; please run \
|
||||
`x.py run src/tools/expand-yaml-anchors`.",
|
||||
self.path(&dest_path)
|
||||
),
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn expand(&self, source: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
|
||||
let content = std::fs::read_to_string(source)
|
||||
.with_context(|| format!("failed to read {}", self.path(source)))?;
|
||||
|
||||
let mut buf =
|
||||
HEADER_MESSAGE.replace("{source}", &self.path(source).to_string().replace("\\", "/"));
|
||||
|
||||
let documents = YamlLoader::load_from_str(&content)
|
||||
.with_context(|| format!("failed to parse {}", self.path(source)))?;
|
||||
for mut document in documents.into_iter() {
|
||||
document = yaml_merge_keys::merge_keys(document)
|
||||
.with_context(|| format!("failed to expand {}", self.path(source)))?;
|
||||
document = filter_document(document);
|
||||
|
||||
YamlEmitter::new(&mut buf).dump(&document).map_err(|err| WithContext {
|
||||
context: "failed to serialize the expanded yaml".into(),
|
||||
source: Box::new(err),
|
||||
})?;
|
||||
buf.push('\n');
|
||||
}
|
||||
|
||||
match self.mode {
|
||||
Mode::Check => {
|
||||
let old = std::fs::read_to_string(dest)
|
||||
.with_context(|| format!("failed to read {}", self.path(dest)))?;
|
||||
if old != buf {
|
||||
return Err(Box::new(StrError(format!(
|
||||
"{} and {} are different",
|
||||
self.path(source),
|
||||
self.path(dest),
|
||||
))));
|
||||
}
|
||||
}
|
||||
Mode::Generate => {
|
||||
std::fs::write(dest, buf.as_bytes())
|
||||
.with_context(|| format!("failed to write to {}", self.path(dest)))?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn path<'a>(&self, path: &'a Path) -> impl std::fmt::Display + 'a {
|
||||
path.strip_prefix(&self.base).unwrap_or(path).display()
|
||||
}
|
||||
}
|
||||
|
||||
fn filter_document(document: Yaml) -> Yaml {
|
||||
match document {
|
||||
Yaml::Hash(map) => Yaml::Hash(
|
||||
map.into_iter()
|
||||
.filter(|(key, _)| {
|
||||
if let Yaml::String(string) = &key { string != REMOVE_MAP_KEY } else { true }
|
||||
})
|
||||
.map(|(key, value)| (filter_document(key), filter_document(value)))
|
||||
.collect(),
|
||||
),
|
||||
Yaml::Array(vec) => Yaml::Array(vec.into_iter().map(filter_document).collect()),
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
if let Err(err) = App::from_args().and_then(|app| app.run()) {
|
||||
eprintln!("error: {}", err);
|
||||
|
||||
let mut source = err.as_ref() as &dyn Error;
|
||||
while let Some(err) = source.source() {
|
||||
eprintln!("caused by: {}", err);
|
||||
source = err;
|
||||
}
|
||||
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct StrError(String);
|
||||
|
||||
impl Error for StrError {}
|
||||
|
||||
impl std::fmt::Display for StrError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct WithContext {
|
||||
context: String,
|
||||
source: Box<dyn Error>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for WithContext {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.context)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for WithContext {
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
Some(self.source.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait ResultExt<T> {
|
||||
fn with_context<F: FnOnce() -> String>(self, f: F) -> Result<T, Box<dyn Error>>;
|
||||
}
|
||||
|
||||
impl<T, E: Into<Box<dyn Error>>> ResultExt<T> for Result<T, E> {
|
||||
fn with_context<F: FnOnce() -> String>(self, f: F) -> Result<T, Box<dyn Error>> {
|
||||
match self {
|
||||
Ok(ok) => Ok(ok),
|
||||
Err(err) => Err(WithContext { source: err.into(), context: f() }.into()),
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user