2021-07-07 04:14:20 -05:00
|
|
|
use std::ffi::OsStr;
|
|
|
|
use std::fs;
|
2024-02-19 06:58:04 -06:00
|
|
|
use std::hash::{Hash, Hasher};
|
2022-10-23 09:22:55 -05:00
|
|
|
use std::path::{Path, PathBuf};
|
2021-07-07 04:14:20 -05:00
|
|
|
use std::process::Command;
|
|
|
|
|
2023-07-24 08:27:17 -05:00
|
|
|
use crate::build_sysroot::STDLIB_SRC;
|
|
|
|
use crate::path::{Dirs, RelPath};
|
|
|
|
use crate::rustc_info::get_default_sysroot;
|
|
|
|
use crate::utils::{
|
2023-05-28 07:37:48 -05:00
|
|
|
copy_dir_recursively, git_command, remove_dir_if_exists, retry_spawn_and_wait, spawn_and_wait,
|
|
|
|
};
|
2021-07-07 04:14:20 -05:00
|
|
|
|
2023-06-04 10:41:43 -05:00
|
|
|
pub(crate) fn prepare(dirs: &Dirs) {
|
2023-04-14 08:49:41 -05:00
|
|
|
RelPath::DOWNLOAD.ensure_exists(dirs);
|
2023-07-24 08:27:17 -05:00
|
|
|
crate::tests::RAND_REPO.fetch(dirs);
|
|
|
|
crate::tests::REGEX_REPO.fetch(dirs);
|
2021-07-07 04:14:20 -05:00
|
|
|
}
|
|
|
|
|
2023-06-04 10:41:43 -05:00
|
|
|
pub(crate) fn prepare_stdlib(dirs: &Dirs, rustc: &Path) {
|
2023-02-12 08:28:09 -06:00
|
|
|
let sysroot_src_orig = get_default_sysroot(rustc).join("lib/rustlib/src/rust");
|
2021-07-07 04:14:20 -05:00
|
|
|
assert!(sysroot_src_orig.exists());
|
|
|
|
|
2023-05-29 04:58:59 -05:00
|
|
|
apply_patches(dirs, "stdlib", &sysroot_src_orig, &STDLIB_SRC.to_path(dirs));
|
2023-01-05 11:26:54 -06:00
|
|
|
|
2023-05-29 07:19:38 -05:00
|
|
|
std::fs::write(
|
|
|
|
STDLIB_SRC.to_path(dirs).join("Cargo.toml"),
|
|
|
|
r#"
|
|
|
|
[workspace]
|
2023-06-19 12:36:32 -05:00
|
|
|
resolver = "1"
|
2023-05-29 07:19:38 -05:00
|
|
|
members = ["./library/sysroot"]
|
|
|
|
|
|
|
|
[patch.crates-io]
|
|
|
|
rustc-std-workspace-core = { path = "./library/rustc-std-workspace-core" }
|
|
|
|
rustc-std-workspace-alloc = { path = "./library/rustc-std-workspace-alloc" }
|
|
|
|
rustc-std-workspace-std = { path = "./library/rustc-std-workspace-std" }
|
|
|
|
|
|
|
|
# Mandatory for correctly compiling compiler-builtins
|
|
|
|
[profile.dev.package.compiler_builtins]
|
|
|
|
debug-assertions = false
|
|
|
|
overflow-checks = false
|
|
|
|
codegen-units = 10000
|
|
|
|
|
|
|
|
[profile.release.package.compiler_builtins]
|
|
|
|
debug-assertions = false
|
|
|
|
overflow-checks = false
|
|
|
|
codegen-units = 10000
|
|
|
|
"#,
|
|
|
|
)
|
|
|
|
.unwrap();
|
2023-06-05 08:59:08 -05:00
|
|
|
|
|
|
|
let source_lockfile = RelPath::PATCHES.to_path(dirs).join("stdlib-lock.toml");
|
|
|
|
let target_lockfile = STDLIB_SRC.to_path(dirs).join("Cargo.lock");
|
|
|
|
fs::copy(source_lockfile, target_lockfile).unwrap();
|
2023-02-26 06:15:25 -06:00
|
|
|
}
|
|
|
|
|
2022-10-23 09:22:55 -05:00
|
|
|
pub(crate) struct GitRepo {
|
|
|
|
url: GitRepoUrl,
|
|
|
|
rev: &'static str,
|
2023-04-14 08:49:41 -05:00
|
|
|
content_hash: &'static str,
|
2022-10-23 09:22:55 -05:00
|
|
|
patch_name: &'static str,
|
|
|
|
}
|
|
|
|
|
|
|
|
enum GitRepoUrl {
|
|
|
|
Github { user: &'static str, repo: &'static str },
|
|
|
|
}
|
|
|
|
|
2023-04-14 08:49:41 -05:00
|
|
|
// Note: This uses a hasher which is not cryptographically secure. This is fine as the hash is meant
|
|
|
|
// to protect against accidental modification and outdated downloads, not against manipulation.
|
|
|
|
fn hash_file(file: &std::path::Path) -> u64 {
|
|
|
|
let contents = std::fs::read(file).unwrap();
|
|
|
|
#[allow(deprecated)]
|
|
|
|
let mut hasher = std::hash::SipHasher::new();
|
2024-02-19 06:58:04 -06:00
|
|
|
// The following is equivalent to
|
|
|
|
// std::hash::Hash::hash(&contents, &mut hasher);
|
|
|
|
// but gives the same result independent of host byte order.
|
|
|
|
hasher.write_usize(contents.len().to_le());
|
|
|
|
Hash::hash_slice(&contents, &mut hasher);
|
2023-04-14 08:49:41 -05:00
|
|
|
std::hash::Hasher::finish(&hasher)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn hash_dir(dir: &std::path::Path) -> u64 {
|
|
|
|
let mut sub_hashes = std::collections::BTreeMap::new();
|
|
|
|
for entry in std::fs::read_dir(dir).unwrap() {
|
|
|
|
let entry = entry.unwrap();
|
|
|
|
if entry.file_type().unwrap().is_dir() {
|
2024-02-19 06:58:04 -06:00
|
|
|
sub_hashes.insert(
|
|
|
|
entry.file_name().to_str().unwrap().to_owned(),
|
|
|
|
hash_dir(&entry.path()).to_le(),
|
|
|
|
);
|
2023-04-14 08:49:41 -05:00
|
|
|
} else {
|
2024-02-19 06:58:04 -06:00
|
|
|
sub_hashes.insert(
|
|
|
|
entry.file_name().to_str().unwrap().to_owned(),
|
|
|
|
hash_file(&entry.path()).to_le(),
|
|
|
|
);
|
2023-04-14 08:49:41 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
#[allow(deprecated)]
|
|
|
|
let mut hasher = std::hash::SipHasher::new();
|
2024-02-19 06:58:04 -06:00
|
|
|
// The following is equivalent to
|
|
|
|
// std::hash::Hash::hash(&sub_hashes, &mut hasher);
|
|
|
|
// but gives the same result independent of host byte order.
|
|
|
|
hasher.write_usize(sub_hashes.len().to_le());
|
|
|
|
for elt in sub_hashes {
|
|
|
|
elt.hash(&mut hasher);
|
|
|
|
}
|
2023-04-14 08:49:41 -05:00
|
|
|
std::hash::Hasher::finish(&hasher)
|
|
|
|
}
|
|
|
|
|
2022-10-23 09:22:55 -05:00
|
|
|
impl GitRepo {
|
2022-10-26 09:51:03 -05:00
|
|
|
pub(crate) const fn github(
|
2022-10-23 09:22:55 -05:00
|
|
|
user: &'static str,
|
|
|
|
repo: &'static str,
|
|
|
|
rev: &'static str,
|
2023-04-14 08:49:41 -05:00
|
|
|
content_hash: &'static str,
|
2022-10-23 09:22:55 -05:00
|
|
|
patch_name: &'static str,
|
|
|
|
) -> GitRepo {
|
2023-04-14 08:49:41 -05:00
|
|
|
GitRepo { url: GitRepoUrl::Github { user, repo }, rev, content_hash, patch_name }
|
|
|
|
}
|
|
|
|
|
|
|
|
fn download_dir(&self, dirs: &Dirs) -> PathBuf {
|
|
|
|
match self.url {
|
|
|
|
GitRepoUrl::Github { user: _, repo } => RelPath::DOWNLOAD.join(repo).to_path(dirs),
|
|
|
|
}
|
2022-10-23 09:22:55 -05:00
|
|
|
}
|
|
|
|
|
2022-12-01 07:30:03 -06:00
|
|
|
pub(crate) const fn source_dir(&self) -> RelPath {
|
2022-10-23 09:22:55 -05:00
|
|
|
match self.url {
|
2023-04-14 08:17:11 -05:00
|
|
|
GitRepoUrl::Github { user: _, repo } => RelPath::BUILD.join(repo),
|
2022-10-23 09:22:55 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-27 12:44:19 -06:00
|
|
|
pub(crate) fn fetch(&self, dirs: &Dirs) {
|
2023-04-14 08:49:41 -05:00
|
|
|
let download_dir = self.download_dir(dirs);
|
|
|
|
|
|
|
|
if download_dir.exists() {
|
|
|
|
let actual_hash = format!("{:016x}", hash_dir(&download_dir));
|
|
|
|
if actual_hash == self.content_hash {
|
2023-10-07 04:47:03 -05:00
|
|
|
eprintln!("[FRESH] {}", download_dir.display());
|
2023-04-14 08:49:41 -05:00
|
|
|
return;
|
|
|
|
} else {
|
2023-10-07 04:47:03 -05:00
|
|
|
eprintln!(
|
2023-04-14 08:49:41 -05:00
|
|
|
"Mismatched content hash for {download_dir}: {actual_hash} != {content_hash}. Downloading again.",
|
|
|
|
download_dir = download_dir.display(),
|
|
|
|
content_hash = self.content_hash,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-23 09:22:55 -05:00
|
|
|
match self.url {
|
|
|
|
GitRepoUrl::Github { user, repo } => {
|
2023-04-14 08:17:11 -05:00
|
|
|
clone_repo_shallow_github(dirs, &download_dir, user, repo, self.rev);
|
2022-10-23 09:22:55 -05:00
|
|
|
}
|
|
|
|
}
|
2023-04-14 08:49:41 -05:00
|
|
|
|
2023-06-05 08:59:08 -05:00
|
|
|
let source_lockfile =
|
|
|
|
RelPath::PATCHES.to_path(dirs).join(format!("{}-lock.toml", self.patch_name));
|
|
|
|
let target_lockfile = download_dir.join("Cargo.lock");
|
|
|
|
if source_lockfile.exists() {
|
2023-10-14 07:41:44 -05:00
|
|
|
assert!(!target_lockfile.exists());
|
2023-06-05 08:59:08 -05:00
|
|
|
fs::copy(source_lockfile, target_lockfile).unwrap();
|
|
|
|
} else {
|
|
|
|
assert!(target_lockfile.exists());
|
|
|
|
}
|
|
|
|
|
2023-04-14 08:49:41 -05:00
|
|
|
let actual_hash = format!("{:016x}", hash_dir(&download_dir));
|
|
|
|
if actual_hash != self.content_hash {
|
2023-10-07 04:47:03 -05:00
|
|
|
eprintln!(
|
2023-04-14 08:49:41 -05:00
|
|
|
"Download of {download_dir} failed with mismatched content hash: {actual_hash} != {content_hash}",
|
|
|
|
download_dir = download_dir.display(),
|
|
|
|
content_hash = self.content_hash,
|
|
|
|
);
|
|
|
|
std::process::exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) fn patch(&self, dirs: &Dirs) {
|
2023-05-28 07:37:48 -05:00
|
|
|
apply_patches(
|
|
|
|
dirs,
|
|
|
|
self.patch_name,
|
|
|
|
&self.download_dir(dirs),
|
|
|
|
&self.source_dir().to_path(dirs),
|
|
|
|
);
|
2022-10-23 09:22:55 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-20 10:55:21 -05:00
|
|
|
#[allow(dead_code)]
|
2022-10-23 09:22:55 -05:00
|
|
|
fn clone_repo(download_dir: &Path, repo: &str, rev: &str) {
|
2021-07-07 04:14:20 -05:00
|
|
|
eprintln!("[CLONE] {}", repo);
|
|
|
|
// Ignore exit code as the repo may already have been checked out
|
2023-01-13 08:39:12 -06:00
|
|
|
git_command(None, "clone").arg(repo).arg(download_dir).spawn().unwrap().wait().unwrap();
|
2021-07-07 04:14:20 -05:00
|
|
|
|
2023-01-13 08:39:12 -06:00
|
|
|
let mut clean_cmd = git_command(download_dir, "checkout");
|
|
|
|
clean_cmd.arg("--").arg(".");
|
2021-07-07 04:14:20 -05:00
|
|
|
spawn_and_wait(clean_cmd);
|
|
|
|
|
2023-01-13 08:39:12 -06:00
|
|
|
let mut checkout_cmd = git_command(download_dir, "checkout");
|
|
|
|
checkout_cmd.arg("-q").arg(rev);
|
2021-07-07 04:14:20 -05:00
|
|
|
spawn_and_wait(checkout_cmd);
|
2023-04-14 08:17:11 -05:00
|
|
|
|
|
|
|
std::fs::remove_dir_all(download_dir.join(".git")).unwrap();
|
2021-07-07 04:14:20 -05:00
|
|
|
}
|
|
|
|
|
2022-12-01 08:54:37 -06:00
|
|
|
fn clone_repo_shallow_github(dirs: &Dirs, download_dir: &Path, user: &str, repo: &str, rev: &str) {
|
2022-03-20 10:55:21 -05:00
|
|
|
if cfg!(windows) {
|
|
|
|
// Older windows doesn't have tar or curl by default. Fall back to using git.
|
2022-10-23 09:22:55 -05:00
|
|
|
clone_repo(download_dir, &format!("https://github.com/{}/{}.git", user, repo), rev);
|
2022-03-20 10:55:21 -05:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-10-23 09:22:55 -05:00
|
|
|
let archive_url = format!("https://github.com/{}/{}/archive/{}.tar.gz", user, repo, rev);
|
2022-12-01 08:54:37 -06:00
|
|
|
let archive_file = RelPath::DOWNLOAD.to_path(dirs).join(format!("{}.tar.gz", rev));
|
|
|
|
let archive_dir = RelPath::DOWNLOAD.to_path(dirs).join(format!("{}-{}", repo, rev));
|
2022-10-23 09:22:55 -05:00
|
|
|
|
|
|
|
eprintln!("[DOWNLOAD] {}/{} from {}", user, repo, archive_url);
|
2022-03-20 10:55:21 -05:00
|
|
|
|
|
|
|
// Remove previous results if they exists
|
|
|
|
let _ = std::fs::remove_file(&archive_file);
|
|
|
|
let _ = std::fs::remove_dir_all(&archive_dir);
|
2022-10-23 09:22:55 -05:00
|
|
|
let _ = std::fs::remove_dir_all(&download_dir);
|
2022-03-20 10:55:21 -05:00
|
|
|
|
|
|
|
// Download zip archive
|
|
|
|
let mut download_cmd = Command::new("curl");
|
2023-01-11 08:14:14 -06:00
|
|
|
download_cmd
|
|
|
|
.arg("--max-time")
|
|
|
|
.arg("600")
|
|
|
|
.arg("-y")
|
|
|
|
.arg("30")
|
|
|
|
.arg("-Y")
|
|
|
|
.arg("10")
|
|
|
|
.arg("--connect-timeout")
|
|
|
|
.arg("30")
|
|
|
|
.arg("--continue-at")
|
|
|
|
.arg("-")
|
|
|
|
.arg("--location")
|
|
|
|
.arg("--output")
|
|
|
|
.arg(&archive_file)
|
|
|
|
.arg(archive_url);
|
|
|
|
retry_spawn_and_wait(5, download_cmd);
|
2022-03-20 10:55:21 -05:00
|
|
|
|
|
|
|
// Unpack tar archive
|
|
|
|
let mut unpack_cmd = Command::new("tar");
|
2022-12-01 08:54:37 -06:00
|
|
|
unpack_cmd.arg("xf").arg(&archive_file).current_dir(RelPath::DOWNLOAD.to_path(dirs));
|
2022-03-20 10:55:21 -05:00
|
|
|
spawn_and_wait(unpack_cmd);
|
|
|
|
|
|
|
|
// Rename unpacked dir to the expected name
|
2022-10-23 09:22:55 -05:00
|
|
|
std::fs::rename(archive_dir, &download_dir).unwrap();
|
2022-03-20 10:55:21 -05:00
|
|
|
|
|
|
|
// Cleanup
|
|
|
|
std::fs::remove_file(archive_file).unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
fn init_git_repo(repo_dir: &Path) {
|
2023-01-13 08:39:12 -06:00
|
|
|
let mut git_init_cmd = git_command(repo_dir, "init");
|
|
|
|
git_init_cmd.arg("-q");
|
2022-03-20 10:55:21 -05:00
|
|
|
spawn_and_wait(git_init_cmd);
|
|
|
|
|
2023-01-13 08:39:12 -06:00
|
|
|
let mut git_add_cmd = git_command(repo_dir, "add");
|
|
|
|
git_add_cmd.arg(".");
|
2022-03-20 10:55:21 -05:00
|
|
|
spawn_and_wait(git_add_cmd);
|
|
|
|
|
2023-01-13 08:39:12 -06:00
|
|
|
let mut git_commit_cmd = git_command(repo_dir, "commit");
|
|
|
|
git_commit_cmd.arg("-m").arg("Initial commit").arg("-q");
|
2022-03-20 10:55:21 -05:00
|
|
|
spawn_and_wait(git_commit_cmd);
|
|
|
|
}
|
|
|
|
|
2022-12-01 08:54:37 -06:00
|
|
|
fn get_patches(dirs: &Dirs, crate_name: &str) -> Vec<PathBuf> {
|
|
|
|
let mut patches: Vec<_> = fs::read_dir(RelPath::PATCHES.to_path(dirs))
|
2021-07-07 04:14:20 -05:00
|
|
|
.unwrap()
|
|
|
|
.map(|entry| entry.unwrap().path())
|
|
|
|
.filter(|path| path.extension() == Some(OsStr::new("patch")))
|
2022-10-23 09:22:55 -05:00
|
|
|
.filter(|path| {
|
|
|
|
path.file_name()
|
|
|
|
.unwrap()
|
|
|
|
.to_str()
|
|
|
|
.unwrap()
|
|
|
|
.split_once("-")
|
|
|
|
.unwrap()
|
|
|
|
.1
|
|
|
|
.starts_with(crate_name)
|
2021-07-07 04:14:20 -05:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
patches.sort();
|
|
|
|
patches
|
|
|
|
}
|
|
|
|
|
2023-05-28 07:45:05 -05:00
|
|
|
pub(crate) fn apply_patches(dirs: &Dirs, crate_name: &str, source_dir: &Path, target_dir: &Path) {
|
2023-05-28 07:37:48 -05:00
|
|
|
// FIXME avoid copy and patch if src, patches and target are unchanged
|
|
|
|
|
2023-05-29 04:58:59 -05:00
|
|
|
eprintln!("[COPY] {crate_name} source");
|
|
|
|
|
2023-05-28 07:37:48 -05:00
|
|
|
remove_dir_if_exists(target_dir);
|
|
|
|
fs::create_dir_all(target_dir).unwrap();
|
2023-05-29 08:41:55 -05:00
|
|
|
if crate_name == "stdlib" {
|
|
|
|
fs::create_dir(target_dir.join("library")).unwrap();
|
|
|
|
copy_dir_recursively(&source_dir.join("library"), &target_dir.join("library"));
|
|
|
|
} else {
|
|
|
|
copy_dir_recursively(source_dir, target_dir);
|
|
|
|
}
|
2023-05-28 07:37:48 -05:00
|
|
|
|
|
|
|
init_git_repo(target_dir);
|
2023-04-14 08:17:11 -05:00
|
|
|
|
2022-10-23 09:22:55 -05:00
|
|
|
if crate_name == "<none>" {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-12-01 08:54:37 -06:00
|
|
|
for patch in get_patches(dirs, crate_name) {
|
2022-10-23 09:22:55 -05:00
|
|
|
eprintln!(
|
|
|
|
"[PATCH] {:?} <- {:?}",
|
|
|
|
target_dir.file_name().unwrap(),
|
|
|
|
patch.file_name().unwrap()
|
|
|
|
);
|
2023-01-13 08:39:12 -06:00
|
|
|
let mut apply_patch_cmd = git_command(target_dir, "am");
|
|
|
|
apply_patch_cmd.arg(patch).arg("-q");
|
2021-07-07 04:14:20 -05:00
|
|
|
spawn_and_wait(apply_patch_cmd);
|
|
|
|
}
|
|
|
|
}
|