Move download
functions from native
to builder.rs
This has no logic changes, just a move.
This commit is contained in:
parent
1b5e1215ef
commit
61dd278ade
@ -2,13 +2,14 @@ use std::any::{type_name, Any};
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::collections::BTreeSet;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fmt::{Debug, Write};
|
||||
use std::fs;
|
||||
use std::fs::{self, File};
|
||||
use std::hash::Hash;
|
||||
use std::io::{BufRead, BufReader, ErrorKind};
|
||||
use std::ops::Deref;
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::process::{Command, Stdio};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::cache::{Cache, Interned, INTERNER};
|
||||
@ -29,7 +30,8 @@ use crate::{Build, CLang, DocTests, GitRepo, Mode};
|
||||
|
||||
pub use crate::Compiler;
|
||||
// FIXME: replace with std::lazy after it gets stabilized and reaches beta
|
||||
use once_cell::sync::Lazy;
|
||||
use once_cell::sync::{Lazy, OnceCell};
|
||||
use xz2::bufread::XzDecoder;
|
||||
|
||||
pub struct Builder<'a> {
|
||||
pub build: &'a Build,
|
||||
@ -758,6 +760,207 @@ impl<'a> Builder<'a> {
|
||||
StepDescription::run(v, self, paths);
|
||||
}
|
||||
|
||||
/// Modifies the interpreter section of 'fname' to fix the dynamic linker,
|
||||
/// or the RPATH section, to fix the dynamic library search path
|
||||
///
|
||||
/// This is only required on NixOS and uses the PatchELF utility to
|
||||
/// change the interpreter/RPATH of ELF executables.
|
||||
///
|
||||
/// Please see https://nixos.org/patchelf.html for more information
|
||||
pub(crate) fn fix_bin_or_dylib(&self, fname: &Path) {
|
||||
// FIXME: cache NixOS detection?
|
||||
match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() {
|
||||
Err(_) => return,
|
||||
Ok(output) if !output.status.success() => return,
|
||||
Ok(output) => {
|
||||
let mut s = output.stdout;
|
||||
if s.last() == Some(&b'\n') {
|
||||
s.pop();
|
||||
}
|
||||
if s != b"Linux" {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the user has asked binaries to be patched for Nix, then
|
||||
// don't check for NixOS or `/lib`, just continue to the patching.
|
||||
// FIXME: shouldn't this take precedence over the `uname` check above?
|
||||
if !self.config.patch_binaries_for_nix {
|
||||
// Use `/etc/os-release` instead of `/etc/NIXOS`.
|
||||
// The latter one does not exist on NixOS when using tmpfs as root.
|
||||
const NIX_IDS: &[&str] = &["ID=nixos", "ID='nixos'", "ID=\"nixos\""];
|
||||
let os_release = match File::open("/etc/os-release") {
|
||||
Err(e) if e.kind() == ErrorKind::NotFound => return,
|
||||
Err(e) => panic!("failed to access /etc/os-release: {}", e),
|
||||
Ok(f) => f,
|
||||
};
|
||||
if !BufReader::new(os_release).lines().any(|l| NIX_IDS.contains(&t!(l).trim())) {
|
||||
return;
|
||||
}
|
||||
if Path::new("/lib").exists() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// At this point we're pretty sure the user is running NixOS or using Nix
|
||||
println!("info: you seem to be using Nix. Attempting to patch {}", fname.display());
|
||||
|
||||
// Only build `.nix-deps` once.
|
||||
static NIX_DEPS_DIR: OnceCell<PathBuf> = OnceCell::new();
|
||||
let mut nix_build_succeeded = true;
|
||||
let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| {
|
||||
// Run `nix-build` to "build" each dependency (which will likely reuse
|
||||
// the existing `/nix/store` copy, or at most download a pre-built copy).
|
||||
//
|
||||
// Importantly, we create a gc-root called `.nix-deps` in the `build/`
|
||||
// directory, but still reference the actual `/nix/store` path in the rpath
|
||||
// as it makes it significantly more robust against changes to the location of
|
||||
// the `.nix-deps` location.
|
||||
//
|
||||
// bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
|
||||
// zlib: Needed as a system dependency of `libLLVM-*.so`.
|
||||
// patchelf: Needed for patching ELF binaries (see doc comment above).
|
||||
let nix_deps_dir = self.out.join(".nix-deps");
|
||||
const NIX_EXPR: &str = "
|
||||
with (import <nixpkgs> {});
|
||||
symlinkJoin {
|
||||
name = \"rust-stage0-dependencies\";
|
||||
paths = [
|
||||
zlib
|
||||
patchelf
|
||||
stdenv.cc.bintools
|
||||
];
|
||||
}
|
||||
";
|
||||
nix_build_succeeded = self.try_run(Command::new("nix-build").args(&[
|
||||
Path::new("-E"),
|
||||
Path::new(NIX_EXPR),
|
||||
Path::new("-o"),
|
||||
&nix_deps_dir,
|
||||
]));
|
||||
nix_deps_dir
|
||||
});
|
||||
if !nix_build_succeeded {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut patchelf = Command::new(nix_deps_dir.join("bin/patchelf"));
|
||||
let rpath_entries = {
|
||||
// ORIGIN is a relative default, all binary and dynamic libraries we ship
|
||||
// appear to have this (even when `../lib` is redundant).
|
||||
// NOTE: there are only two paths here, delimited by a `:`
|
||||
let mut entries = OsString::from("$ORIGIN/../lib:");
|
||||
entries.push(t!(fs::canonicalize(nix_deps_dir)));
|
||||
entries.push("/lib");
|
||||
entries
|
||||
};
|
||||
patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]);
|
||||
if !fname.extension().map_or(false, |ext| ext == "so") {
|
||||
// Finally, set the corret .interp for binaries
|
||||
let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
|
||||
// FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
|
||||
let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
|
||||
patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]);
|
||||
}
|
||||
|
||||
self.try_run(patchelf.arg(fname));
|
||||
}
|
||||
|
||||
pub(crate) fn download_component(&self, base: &str, url: &str, dest_path: &Path) {
|
||||
// Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/.
|
||||
let tempfile = self.tempdir().join(dest_path.file_name().unwrap());
|
||||
// FIXME: support `do_verify` (only really needed for nightly rustfmt)
|
||||
// FIXME: support non-utf8 paths?
|
||||
self.download_with_retries(tempfile.to_str().unwrap(), &format!("{}/{}", base, url));
|
||||
t!(std::fs::rename(&tempfile, dest_path));
|
||||
}
|
||||
|
||||
fn download_with_retries(&self, tempfile: &str, url: &str) {
|
||||
println!("downloading {}", url);
|
||||
// Try curl. If that fails and we are on windows, fallback to PowerShell.
|
||||
if !self.check_run(Command::new("curl").args(&[
|
||||
"-#",
|
||||
"-y",
|
||||
"30",
|
||||
"-Y",
|
||||
"10", // timeout if speed is < 10 bytes/sec for > 30 seconds
|
||||
"--connect-timeout",
|
||||
"30", // timeout if cannot connect within 30 seconds
|
||||
"--retry",
|
||||
"3",
|
||||
"-Sf",
|
||||
"-o",
|
||||
tempfile,
|
||||
url,
|
||||
])) {
|
||||
if self.build.build.contains("windows-msvc") {
|
||||
println!("Fallback to PowerShell");
|
||||
for _ in 0..3 {
|
||||
if self.try_run(Command::new("PowerShell.exe").args(&[
|
||||
"/nologo",
|
||||
"-Command",
|
||||
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
|
||||
&format!(
|
||||
"(New-Object System.Net.WebClient).DownloadFile('{}', '{}')",
|
||||
url, tempfile
|
||||
),
|
||||
])) {
|
||||
return;
|
||||
}
|
||||
println!("\nspurious failure, trying again");
|
||||
}
|
||||
}
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn unpack(&self, tarball: &Path, dst: &Path) {
|
||||
println!("extracting {} to {}", tarball.display(), dst.display());
|
||||
if !dst.exists() {
|
||||
t!(fs::create_dir_all(dst));
|
||||
}
|
||||
|
||||
// FIXME: will need to be a parameter once `download-rustc` is moved to rustbuild
|
||||
const MATCH: &str = "rust-dev";
|
||||
|
||||
// `tarball` ends with `.tar.xz`; strip that suffix
|
||||
// example: `rust-dev-nightly-x86_64-unknown-linux-gnu`
|
||||
let uncompressed_filename =
|
||||
Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap();
|
||||
let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap());
|
||||
|
||||
// decompress the file
|
||||
let data = t!(File::open(tarball));
|
||||
let decompressor = XzDecoder::new(BufReader::new(data));
|
||||
|
||||
let mut tar = tar::Archive::new(decompressor);
|
||||
for member in t!(tar.entries()) {
|
||||
let mut member = t!(member);
|
||||
let original_path = t!(member.path()).into_owned();
|
||||
// skip the top-level directory
|
||||
if original_path == directory_prefix {
|
||||
continue;
|
||||
}
|
||||
let mut short_path = t!(original_path.strip_prefix(directory_prefix));
|
||||
if !short_path.starts_with(MATCH) {
|
||||
continue;
|
||||
}
|
||||
short_path = t!(short_path.strip_prefix(MATCH));
|
||||
let dst_path = dst.join(short_path);
|
||||
self.verbose(&format!("extracting {} to {}", original_path.display(), dst.display()));
|
||||
if !t!(member.unpack_in(dst)) {
|
||||
panic!("path traversal attack ??");
|
||||
}
|
||||
let src_path = dst.join(original_path);
|
||||
if src_path.is_dir() && dst_path.exists() {
|
||||
continue;
|
||||
}
|
||||
t!(fs::rename(src_path, dst_path));
|
||||
}
|
||||
t!(fs::remove_dir_all(dst.join(directory_prefix)));
|
||||
}
|
||||
|
||||
/// Obtain a compiler at a given stage and for a given host. Explicitly does
|
||||
/// not take `Compiler` since all `Compiler` instances are meant to be
|
||||
/// obtained through this function, since it ensures that they are valid
|
||||
|
@ -12,16 +12,13 @@ use std::env;
|
||||
use std::env::consts::EXE_EXTENSION;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fs::{self, File};
|
||||
use std::io::{self, BufRead, BufReader, ErrorKind};
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
use xz2::bufread::XzDecoder;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
|
||||
use crate::config::TargetSelection;
|
||||
use crate::util::{self, exe, output, t, up_to_date};
|
||||
use crate::util::{self, exe, output, program_out_of_date, t, up_to_date};
|
||||
use crate::{CLang, GitRepo};
|
||||
|
||||
pub struct Meta {
|
||||
@ -151,13 +148,13 @@ pub(crate) fn maybe_download_ci_llvm(builder: &Builder<'_>) {
|
||||
if program_out_of_date(&llvm_stamp, &key) && !config.dry_run {
|
||||
download_ci_llvm(builder, &llvm_sha);
|
||||
for binary in ["llvm-config", "FileCheck"] {
|
||||
fix_bin_or_dylib(builder, &llvm_root.join("bin").join(binary));
|
||||
builder.fix_bin_or_dylib(&llvm_root.join("bin").join(binary));
|
||||
}
|
||||
let llvm_lib = llvm_root.join("lib");
|
||||
for entry in t!(fs::read_dir(&llvm_lib)) {
|
||||
let lib = t!(entry).path();
|
||||
if lib.extension().map_or(false, |ext| ext == "so") {
|
||||
fix_bin_or_dylib(builder, &lib);
|
||||
builder.fix_bin_or_dylib(&lib);
|
||||
}
|
||||
}
|
||||
t!(fs::write(llvm_stamp, key));
|
||||
@ -182,218 +179,10 @@ fn download_ci_llvm(builder: &Builder<'_>, llvm_sha: &str) {
|
||||
let filename = format!("rust-dev-nightly-{}.tar.xz", builder.build.build.triple);
|
||||
let tarball = rustc_cache.join(&filename);
|
||||
if !tarball.exists() {
|
||||
download_component(builder, base, &format!("{}/{}", url, filename), &tarball);
|
||||
builder.download_component(base, &format!("{}/{}", url, filename), &tarball);
|
||||
}
|
||||
let llvm_root = builder.config.ci_llvm_root();
|
||||
unpack(builder, &tarball, &llvm_root);
|
||||
}
|
||||
|
||||
/// Modifies the interpreter section of 'fname' to fix the dynamic linker,
|
||||
/// or the RPATH section, to fix the dynamic library search path
|
||||
///
|
||||
/// This is only required on NixOS and uses the PatchELF utility to
|
||||
/// change the interpreter/RPATH of ELF executables.
|
||||
///
|
||||
/// Please see https://nixos.org/patchelf.html for more information
|
||||
fn fix_bin_or_dylib(builder: &Builder<'_>, fname: &Path) {
|
||||
// FIXME: cache NixOS detection?
|
||||
match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() {
|
||||
Err(_) => return,
|
||||
Ok(output) if !output.status.success() => return,
|
||||
Ok(output) => {
|
||||
let mut s = output.stdout;
|
||||
if s.last() == Some(&b'\n') {
|
||||
s.pop();
|
||||
}
|
||||
if s != b"Linux" {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the user has asked binaries to be patched for Nix, then
|
||||
// don't check for NixOS or `/lib`, just continue to the patching.
|
||||
// FIXME: shouldn't this take precedence over the `uname` check above?
|
||||
if !builder.config.patch_binaries_for_nix {
|
||||
// Use `/etc/os-release` instead of `/etc/NIXOS`.
|
||||
// The latter one does not exist on NixOS when using tmpfs as root.
|
||||
const NIX_IDS: &[&str] = &["ID=nixos", "ID='nixos'", "ID=\"nixos\""];
|
||||
let os_release = match File::open("/etc/os-release") {
|
||||
Err(e) if e.kind() == ErrorKind::NotFound => return,
|
||||
Err(e) => panic!("failed to access /etc/os-release: {}", e),
|
||||
Ok(f) => f,
|
||||
};
|
||||
if !BufReader::new(os_release).lines().any(|l| NIX_IDS.contains(&t!(l).trim())) {
|
||||
return;
|
||||
}
|
||||
if Path::new("/lib").exists() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// At this point we're pretty sure the user is running NixOS or using Nix
|
||||
println!("info: you seem to be using Nix. Attempting to patch {}", fname.display());
|
||||
|
||||
// Only build `.nix-deps` once.
|
||||
static NIX_DEPS_DIR: OnceCell<PathBuf> = OnceCell::new();
|
||||
let mut nix_build_succeeded = true;
|
||||
let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| {
|
||||
// Run `nix-build` to "build" each dependency (which will likely reuse
|
||||
// the existing `/nix/store` copy, or at most download a pre-built copy).
|
||||
//
|
||||
// Importantly, we create a gc-root called `.nix-deps` in the `build/`
|
||||
// directory, but still reference the actual `/nix/store` path in the rpath
|
||||
// as it makes it significantly more robust against changes to the location of
|
||||
// the `.nix-deps` location.
|
||||
//
|
||||
// bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
|
||||
// zlib: Needed as a system dependency of `libLLVM-*.so`.
|
||||
// patchelf: Needed for patching ELF binaries (see doc comment above).
|
||||
let nix_deps_dir = builder.out.join(".nix-deps");
|
||||
const NIX_EXPR: &str = "
|
||||
with (import <nixpkgs> {});
|
||||
symlinkJoin {
|
||||
name = \"rust-stage0-dependencies\";
|
||||
paths = [
|
||||
zlib
|
||||
patchelf
|
||||
stdenv.cc.bintools
|
||||
];
|
||||
}
|
||||
";
|
||||
nix_build_succeeded = builder.try_run(Command::new("nix-build").args(&[
|
||||
Path::new("-E"),
|
||||
Path::new(NIX_EXPR),
|
||||
Path::new("-o"),
|
||||
&nix_deps_dir,
|
||||
]));
|
||||
nix_deps_dir
|
||||
});
|
||||
if !nix_build_succeeded {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut patchelf = Command::new(nix_deps_dir.join("bin/patchelf"));
|
||||
let rpath_entries = {
|
||||
// ORIGIN is a relative default, all binary and dynamic libraries we ship
|
||||
// appear to have this (even when `../lib` is redundant).
|
||||
// NOTE: there are only two paths here, delimited by a `:`
|
||||
let mut entries = OsString::from("$ORIGIN/../lib:");
|
||||
entries.push(t!(fs::canonicalize(nix_deps_dir)));
|
||||
entries.push("/lib");
|
||||
entries
|
||||
};
|
||||
patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]);
|
||||
if !fname.extension().map_or(false, |ext| ext == "so") {
|
||||
// Finally, set the corret .interp for binaries
|
||||
let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
|
||||
// FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
|
||||
let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
|
||||
patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]);
|
||||
}
|
||||
|
||||
builder.try_run(patchelf.arg(fname));
|
||||
}
|
||||
|
||||
fn download_component(builder: &Builder<'_>, base: &str, url: &str, dest_path: &Path) {
|
||||
// Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/.
|
||||
let tempfile = builder.tempdir().join(dest_path.file_name().unwrap());
|
||||
// FIXME: support `do_verify` (only really needed for nightly rustfmt)
|
||||
// FIXME: support non-utf8 paths?
|
||||
download_with_retries(builder, tempfile.to_str().unwrap(), &format!("{}/{}", base, url));
|
||||
t!(std::fs::rename(&tempfile, dest_path));
|
||||
}
|
||||
|
||||
fn download_with_retries(builder: &Builder<'_>, tempfile: &str, url: &str) {
|
||||
println!("downloading {}", url);
|
||||
// Try curl. If that fails and we are on windows, fallback to PowerShell.
|
||||
if !builder.check_run(Command::new("curl").args(&[
|
||||
"-#",
|
||||
"-y",
|
||||
"30",
|
||||
"-Y",
|
||||
"10", // timeout if speed is < 10 bytes/sec for > 30 seconds
|
||||
"--connect-timeout",
|
||||
"30", // timeout if cannot connect within 30 seconds
|
||||
"--retry",
|
||||
"3",
|
||||
"-Sf",
|
||||
"-o",
|
||||
tempfile,
|
||||
url,
|
||||
])) {
|
||||
if builder.build.build.contains("windows-msvc") {
|
||||
println!("Fallback to PowerShell");
|
||||
for _ in 0..3 {
|
||||
if builder.try_run(Command::new("PowerShell.exe").args(&[
|
||||
"/nologo",
|
||||
"-Command",
|
||||
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
|
||||
&format!(
|
||||
"(New-Object System.Net.WebClient).DownloadFile('{}', '{}')",
|
||||
url, tempfile
|
||||
),
|
||||
])) {
|
||||
return;
|
||||
}
|
||||
println!("\nspurious failure, trying again");
|
||||
}
|
||||
}
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn unpack(builder: &Builder<'_>, tarball: &Path, dst: &Path) {
|
||||
println!("extracting {} to {}", tarball.display(), dst.display());
|
||||
if !dst.exists() {
|
||||
t!(fs::create_dir_all(dst));
|
||||
}
|
||||
|
||||
// FIXME: will need to be a parameter once `download-rustc` is moved to rustbuild
|
||||
const MATCH: &str = "rust-dev";
|
||||
|
||||
// `tarball` ends with `.tar.xz`; strip that suffix
|
||||
// example: `rust-dev-nightly-x86_64-unknown-linux-gnu`
|
||||
let uncompressed_filename =
|
||||
Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap();
|
||||
let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap());
|
||||
|
||||
// decompress the file
|
||||
let data = t!(File::open(tarball));
|
||||
let decompressor = XzDecoder::new(BufReader::new(data));
|
||||
|
||||
let mut tar = tar::Archive::new(decompressor);
|
||||
for member in t!(tar.entries()) {
|
||||
let mut member = t!(member);
|
||||
let original_path = t!(member.path()).into_owned();
|
||||
// skip the top-level directory
|
||||
if original_path == directory_prefix {
|
||||
continue;
|
||||
}
|
||||
let mut short_path = t!(original_path.strip_prefix(directory_prefix));
|
||||
if !short_path.starts_with(MATCH) {
|
||||
continue;
|
||||
}
|
||||
short_path = t!(short_path.strip_prefix(MATCH));
|
||||
let dst_path = dst.join(short_path);
|
||||
builder.verbose(&format!("extracting {} to {}", original_path.display(), dst.display()));
|
||||
if !t!(member.unpack_in(dst)) {
|
||||
panic!("path traversal attack ??");
|
||||
}
|
||||
let src_path = dst.join(original_path);
|
||||
if src_path.is_dir() && dst_path.exists() {
|
||||
continue;
|
||||
}
|
||||
t!(fs::rename(src_path, dst_path));
|
||||
}
|
||||
t!(fs::remove_dir_all(dst.join(directory_prefix)));
|
||||
}
|
||||
|
||||
fn program_out_of_date(stamp: &Path, key: &str) -> bool {
|
||||
if !stamp.exists() {
|
||||
return true;
|
||||
}
|
||||
t!(fs::read_to_string(stamp)) != key
|
||||
builder.unpack(&tarball, &llvm_root);
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
|
@ -115,6 +115,14 @@ impl Drop for TimeIt {
|
||||
}
|
||||
}
|
||||
|
||||
/// Used for download caching
|
||||
pub(crate) fn program_out_of_date(stamp: &Path, key: &str) -> bool {
|
||||
if !stamp.exists() {
|
||||
return true;
|
||||
}
|
||||
t!(fs::read_to_string(stamp)) != key
|
||||
}
|
||||
|
||||
/// Symlinks two directories, using junctions on Windows and normal symlinks on
|
||||
/// Unix.
|
||||
pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
|
||||
|
Loading…
x
Reference in New Issue
Block a user