Auto merge of #110214 - compiler-errors:rollup-mkig4t6, r=compiler-errors
Rollup of 10 pull requests Successful merges: - #96971 (Initial support for loongarch64-unknown-linux-gnu) - #109894 (Remove Errors section from var_os docs) - #110000 (Rename tests/ui/unique to tests/ui/box/unit) - #110018 (Pass host linker to compiletest.) - #110104 ( Reword the docstring in todo! macro definition, fixing a typo) - #110113 (Fix `x test ui --target foo` when download-rustc is enabled) - #110126 (Support safe transmute in new solver) - #110155 (Fix typos in librustdoc, tools and config files) - #110162 (rustdoc: remove redundant expandSection code from main.js) - #110173 (kmc-solid: Implement `Socket::read_buf`) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
0d7ed3ba84
@ -15,6 +15,7 @@
|
||||
const MIN_ALIGN: usize = 8;
|
||||
#[cfg(any(target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "loongarch64",
|
||||
target_arch = "mips64",
|
||||
target_arch = "s390x",
|
||||
target_arch = "sparc64"))]
|
||||
|
@ -127,6 +127,7 @@ pub(crate) fn create_object_file(sess: &Session) -> Option<write::Object<'static
|
||||
"msp430" => Architecture::Msp430,
|
||||
"hexagon" => Architecture::Hexagon,
|
||||
"bpf" => Architecture::Bpf,
|
||||
"loongarch64" => Architecture::LoongArch64,
|
||||
// Unsupported architecture.
|
||||
_ => return None,
|
||||
};
|
||||
@ -190,6 +191,10 @@ pub(crate) fn create_object_file(sess: &Session) -> Option<write::Object<'static
|
||||
}
|
||||
e_flags
|
||||
}
|
||||
Architecture::LoongArch64 => {
|
||||
// Source: https://loongson.github.io/LoongArch-Documentation/LoongArch-ELF-ABI-EN.html#_e_flags_identifies_abi_type_and_version
|
||||
elf::EF_LARCH_OBJABI_V1 | elf::EF_LARCH_ABI_DOUBLE_FLOAT
|
||||
}
|
||||
_ => 0,
|
||||
};
|
||||
// adapted from LLVM's `MCELFObjectTargetWriter::getOSABI`
|
||||
|
@ -10,6 +10,7 @@ const OPTIONAL_COMPONENTS: &[&str] = &[
|
||||
"aarch64",
|
||||
"amdgpu",
|
||||
"avr",
|
||||
"loongarch",
|
||||
"m68k",
|
||||
"mips",
|
||||
"powerpc",
|
||||
|
@ -146,6 +146,12 @@ extern "C" void LLVMTimeTraceProfilerFinish(const char* FileName) {
|
||||
#define SUBTARGET_HEXAGON
|
||||
#endif
|
||||
|
||||
#ifdef LLVM_COMPONENT_LOONGARCH
|
||||
#define SUBTARGET_LOONGARCH SUBTARGET(LoongArch)
|
||||
#else
|
||||
#define SUBTARGET_LOONGARCH
|
||||
#endif
|
||||
|
||||
#define GEN_SUBTARGETS \
|
||||
SUBTARGET_X86 \
|
||||
SUBTARGET_ARM \
|
||||
@ -159,6 +165,7 @@ extern "C" void LLVMTimeTraceProfilerFinish(const char* FileName) {
|
||||
SUBTARGET_SPARC \
|
||||
SUBTARGET_HEXAGON \
|
||||
SUBTARGET_RISCV \
|
||||
SUBTARGET_LOONGARCH \
|
||||
|
||||
#define SUBTARGET(x) \
|
||||
namespace llvm { \
|
||||
|
@ -102,6 +102,14 @@ pub fn initialize_available_targets() {
|
||||
LLVMInitializeM68kAsmPrinter,
|
||||
LLVMInitializeM68kAsmParser
|
||||
);
|
||||
init_target!(
|
||||
llvm_component = "loongarch",
|
||||
LLVMInitializeLoongArchTargetInfo,
|
||||
LLVMInitializeLoongArchTarget,
|
||||
LLVMInitializeLoongArchTargetMC,
|
||||
LLVMInitializeLoongArchAsmPrinter,
|
||||
LLVMInitializeLoongArchAsmParser
|
||||
);
|
||||
init_target!(
|
||||
llvm_component = "mips",
|
||||
LLVMInitializeMipsTargetInfo,
|
||||
|
@ -83,6 +83,9 @@ pub trait TypeVisitableExt<'tcx>: TypeVisitable<TyCtxt<'tcx>> {
|
||||
| TypeFlags::HAS_CT_PLACEHOLDER,
|
||||
)
|
||||
}
|
||||
fn has_non_region_placeholders(&self) -> bool {
|
||||
self.has_type_flags(TypeFlags::HAS_TY_PLACEHOLDER | TypeFlags::HAS_CT_PLACEHOLDER)
|
||||
}
|
||||
fn needs_subst(&self) -> bool {
|
||||
self.has_type_flags(TypeFlags::NEEDS_SUBST)
|
||||
}
|
||||
|
@ -0,0 +1,17 @@
|
||||
use crate::spec::{Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
Target {
|
||||
llvm_target: "loongarch64-unknown-linux-gnu".into(),
|
||||
pointer_width: 64,
|
||||
data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(),
|
||||
arch: "loongarch64".into(),
|
||||
options: TargetOptions {
|
||||
cpu: "generic".into(),
|
||||
features: "+f,+d".into(),
|
||||
llvm_abiname: "lp64d".into(),
|
||||
max_atomic_width: Some(64),
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
@ -1021,6 +1021,7 @@ supported_targets! {
|
||||
("x86_64-unknown-linux-gnux32", x86_64_unknown_linux_gnux32),
|
||||
("i686-unknown-linux-gnu", i686_unknown_linux_gnu),
|
||||
("i586-unknown-linux-gnu", i586_unknown_linux_gnu),
|
||||
("loongarch64-unknown-linux-gnu", loongarch64_unknown_linux_gnu),
|
||||
("m68k-unknown-linux-gnu", m68k_unknown_linux_gnu),
|
||||
("mips-unknown-linux-gnu", mips_unknown_linux_gnu),
|
||||
("mips64-unknown-linux-gnuabi64", mips64_unknown_linux_gnuabi64),
|
||||
|
@ -225,6 +225,11 @@ pub(super) trait GoalKind<'tcx>: TypeFoldable<TyCtxt<'tcx>> + Copy + Eq {
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
) -> QueryResult<'tcx>;
|
||||
|
||||
fn consider_builtin_transmute_candidate(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
) -> QueryResult<'tcx>;
|
||||
}
|
||||
|
||||
impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
@ -373,6 +378,8 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
G::consider_builtin_discriminant_kind_candidate(self, goal)
|
||||
} else if lang_items.destruct_trait() == Some(trait_def_id) {
|
||||
G::consider_builtin_destruct_candidate(self, goal)
|
||||
} else if lang_items.transmute_trait() == Some(trait_def_id) {
|
||||
G::consider_builtin_transmute_candidate(self, goal)
|
||||
} else {
|
||||
Err(NoSolution)
|
||||
};
|
||||
|
@ -639,4 +639,25 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
crate::traits::wf::unnormalized_obligations(self.infcx, param_env, arg)
|
||||
.map(|obligations| obligations.into_iter().map(|obligation| obligation.into()))
|
||||
}
|
||||
|
||||
pub(super) fn is_transmutable(
|
||||
&self,
|
||||
src_and_dst: rustc_transmute::Types<'tcx>,
|
||||
scope: Ty<'tcx>,
|
||||
assume: rustc_transmute::Assume,
|
||||
) -> Result<Certainty, NoSolution> {
|
||||
// FIXME(transmutability): This really should be returning nested goals for `Answer::If*`
|
||||
match rustc_transmute::TransmuteTypeEnv::new(self.infcx).is_transmutable(
|
||||
ObligationCause::dummy(),
|
||||
ty::Binder::dummy(src_and_dst),
|
||||
scope,
|
||||
assume,
|
||||
) {
|
||||
rustc_transmute::Answer::Yes => Ok(Certainty::Yes),
|
||||
rustc_transmute::Answer::No(_)
|
||||
| rustc_transmute::Answer::IfTransmutable { .. }
|
||||
| rustc_transmute::Answer::IfAll(_)
|
||||
| rustc_transmute::Answer::IfAny(_) => Err(NoSolution),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -524,6 +524,13 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
) -> QueryResult<'tcx> {
|
||||
bug!("`Destruct` does not have an associated type: {:?}", goal);
|
||||
}
|
||||
|
||||
fn consider_builtin_transmute_candidate(
|
||||
_ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
) -> QueryResult<'tcx> {
|
||||
bug!("`BikeshedIntrinsicFrom` does not have an associated type: {:?}", goal)
|
||||
}
|
||||
}
|
||||
|
||||
/// This behavior is also implemented in `rustc_ty_utils` and in the old `project` code.
|
||||
|
@ -598,6 +598,35 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
|
||||
Err(NoSolution)
|
||||
}
|
||||
}
|
||||
|
||||
fn consider_builtin_transmute_candidate(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
) -> QueryResult<'tcx> {
|
||||
// `rustc_transmute` does not have support for type or const params
|
||||
if goal.has_non_region_placeholders() {
|
||||
return Err(NoSolution);
|
||||
}
|
||||
|
||||
// Erase regions because we compute layouts in `rustc_transmute`,
|
||||
// which will ICE for region vars.
|
||||
let substs = ecx.tcx().erase_regions(goal.predicate.trait_ref.substs);
|
||||
|
||||
let Some(assume) = rustc_transmute::Assume::from_const(
|
||||
ecx.tcx(),
|
||||
goal.param_env,
|
||||
substs.const_at(3),
|
||||
) else {
|
||||
return Err(NoSolution);
|
||||
};
|
||||
|
||||
let certainty = ecx.is_transmutable(
|
||||
rustc_transmute::Types { dst: substs.type_at(0), src: substs.type_at(1) },
|
||||
substs.type_at(2),
|
||||
assume,
|
||||
)?;
|
||||
ecx.evaluate_added_goals_and_make_canonical_response(certainty)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
|
@ -88,7 +88,7 @@ changelog-seen = 2
|
||||
# the resulting rustc being unable to compile for the disabled architectures.
|
||||
#
|
||||
# To add support for new targets, see https://rustc-dev-guide.rust-lang.org/building/new-target.html.
|
||||
#targets = "AArch64;ARM;BPF;Hexagon;MSP430;Mips;NVPTX;PowerPC;RISCV;Sparc;SystemZ;WebAssembly;X86"
|
||||
#targets = "AArch64;ARM;BPF;Hexagon;LoongArch;MSP430;Mips;NVPTX;PowerPC;RISCV;Sparc;SystemZ;WebAssembly;X86"
|
||||
|
||||
# LLVM experimental targets to build support for. These targets are specified in
|
||||
# the same format as above, but since these targets are experimental, they are
|
||||
@ -257,7 +257,7 @@ changelog-seen = 2
|
||||
#python = "python"
|
||||
|
||||
# The path to the REUSE executable to use. Note that REUSE is not required in
|
||||
# most cases, as our tooling relies on a cached (and shrinked) copy of the
|
||||
# most cases, as our tooling relies on a cached (and shrunk) copy of the
|
||||
# REUSE output present in the git repository and in our source tarballs.
|
||||
#
|
||||
# REUSE is only needed if your changes caused the overall licensing of the
|
||||
|
@ -712,8 +712,8 @@ macro_rules! unimplemented {
|
||||
|
||||
/// Indicates unfinished code.
|
||||
///
|
||||
/// This can be useful if you are prototyping and are just looking to have your
|
||||
/// code typecheck.
|
||||
/// This can be useful if you are prototyping and just
|
||||
/// want a placeholder to let your code pass type analysis.
|
||||
///
|
||||
/// The difference between [`unimplemented!`] and `todo!` is that while `todo!` conveys
|
||||
/// an intent of implementing the functionality later and the message is "not yet
|
||||
|
@ -236,21 +236,14 @@ fn _var(key: &OsStr) -> Result<String, VarError> {
|
||||
}
|
||||
|
||||
/// Fetches the environment variable `key` from the current process, returning
|
||||
/// [`None`] if the variable isn't set or there's another error.
|
||||
/// [`None`] if the variable isn't set or if there is another error.
|
||||
///
|
||||
/// Note that the method will not check if the environment variable
|
||||
/// is valid Unicode. If you want to have an error on invalid UTF-8,
|
||||
/// use the [`var`] function instead.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This function returns an error if the environment variable isn't set.
|
||||
///
|
||||
/// This function may return an error if the environment variable's name contains
|
||||
/// It may return `None` if the environment variable's name contains
|
||||
/// the equal sign character (`=`) or the NUL character.
|
||||
///
|
||||
/// This function may return an error if the environment variable's value contains
|
||||
/// the NUL character.
|
||||
/// Note that this function will not check if the environment variable
|
||||
/// is valid Unicode. If you want to have an error on invalid UTF-8,
|
||||
/// use the [`var`] function instead.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@ -895,6 +888,7 @@ pub mod consts {
|
||||
/// - x86_64
|
||||
/// - arm
|
||||
/// - aarch64
|
||||
/// - loongarch64
|
||||
/// - m68k
|
||||
/// - mips
|
||||
/// - mips64
|
||||
|
@ -231,6 +231,7 @@ mod arch {
|
||||
}
|
||||
|
||||
#[cfg(any(
|
||||
target_arch = "loongarch64",
|
||||
target_arch = "mips64",
|
||||
target_arch = "s390x",
|
||||
target_arch = "sparc64",
|
||||
|
@ -77,6 +77,9 @@ const UNWIND_DATA_REG: (i32, i32) = (0, 1); // R0, R1
|
||||
#[cfg(any(target_arch = "riscv64", target_arch = "riscv32"))]
|
||||
const UNWIND_DATA_REG: (i32, i32) = (10, 11); // x10, x11
|
||||
|
||||
#[cfg(target_arch = "loongarch64")]
|
||||
const UNWIND_DATA_REG: (i32, i32) = (4, 5); // a0, a1
|
||||
|
||||
// The following code is based on GCC's C and C++ personality routines. For reference, see:
|
||||
// https://github.com/gcc-mirror/gcc/blob/master/libstdc++-v3/libsupc++/eh_personality.cc
|
||||
// https://github.com/gcc-mirror/gcc/blob/trunk/libgcc/unwind-c.c
|
||||
|
@ -22,6 +22,7 @@ pub const MIN_ALIGN: usize = 8;
|
||||
#[cfg(any(
|
||||
target_arch = "x86_64",
|
||||
target_arch = "aarch64",
|
||||
target_arch = "loongarch64",
|
||||
target_arch = "mips64",
|
||||
target_arch = "s390x",
|
||||
target_arch = "sparc64",
|
||||
|
@ -2,7 +2,7 @@ use super::abi;
|
||||
use crate::{
|
||||
cmp,
|
||||
ffi::CStr,
|
||||
io::{self, ErrorKind, IoSlice, IoSliceMut},
|
||||
io::{self, BorrowedBuf, BorrowedCursor, ErrorKind, IoSlice, IoSliceMut},
|
||||
mem,
|
||||
net::{Shutdown, SocketAddr},
|
||||
ptr, str,
|
||||
@ -294,19 +294,30 @@ impl Socket {
|
||||
self.0.duplicate().map(Socket)
|
||||
}
|
||||
|
||||
fn recv_with_flags(&self, buf: &mut [u8], flags: c_int) -> io::Result<usize> {
|
||||
fn recv_with_flags(&self, mut buf: BorrowedCursor<'_>, flags: c_int) -> io::Result<()> {
|
||||
let ret = cvt(unsafe {
|
||||
netc::recv(self.0.raw(), buf.as_mut_ptr() as *mut c_void, buf.len(), flags)
|
||||
netc::recv(self.0.raw(), buf.as_mut().as_mut_ptr().cast(), buf.capacity(), flags)
|
||||
})?;
|
||||
Ok(ret as usize)
|
||||
unsafe {
|
||||
buf.advance(ret as usize);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
self.recv_with_flags(buf, 0)
|
||||
let mut buf = BorrowedBuf::from(buf);
|
||||
self.recv_with_flags(buf.unfilled(), 0)?;
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
pub fn peek(&self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
self.recv_with_flags(buf, MSG_PEEK)
|
||||
let mut buf = BorrowedBuf::from(buf);
|
||||
self.recv_with_flags(buf.unfilled(), MSG_PEEK)?;
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
pub fn read_buf(&self, buf: BorrowedCursor<'_>) -> io::Result<()> {
|
||||
self.recv_with_flags(buf, 0)
|
||||
}
|
||||
|
||||
pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
|
||||
|
@ -75,6 +75,9 @@ pub const unwinder_private_data_size: usize = 20;
|
||||
#[cfg(all(target_arch = "hexagon", target_os = "linux"))]
|
||||
pub const unwinder_private_data_size: usize = 35;
|
||||
|
||||
#[cfg(target_arch = "loongarch64")]
|
||||
pub const unwinder_private_data_size: usize = 2;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct _Unwind_Exception {
|
||||
pub exception_class: _Unwind_Exception_Class,
|
||||
|
@ -304,6 +304,7 @@ def default_build_triple(verbose):
|
||||
'i486': 'i686',
|
||||
'i686': 'i686',
|
||||
'i786': 'i686',
|
||||
'loongarch64': 'loongarch64',
|
||||
'm68k': 'm68k',
|
||||
'powerpc': 'powerpc',
|
||||
'powerpc64': 'powerpc64',
|
||||
|
@ -139,7 +139,7 @@ pub fn read_commit_info_file(root: &Path) -> Option<Info> {
|
||||
sha: sha.to_owned(),
|
||||
short_sha: short_sha.to_owned(),
|
||||
},
|
||||
_ => panic!("the `git-comit-info` file is malformed"),
|
||||
_ => panic!("the `git-commit-info` file is malformed"),
|
||||
};
|
||||
Some(info)
|
||||
} else {
|
||||
|
@ -83,11 +83,11 @@ impl Step for Std {
|
||||
let target = self.target;
|
||||
let compiler = self.compiler;
|
||||
|
||||
// These artifacts were already copied (in `impl Step for Sysroot`).
|
||||
// Don't recompile them.
|
||||
// When using `download-rustc`, we already have artifacts for the host available
|
||||
// (they were copied in `impl Step for Sysroot`). Don't recompile them.
|
||||
// NOTE: the ABI of the beta compiler is different from the ABI of the downloaded compiler,
|
||||
// so its artifacts can't be reused.
|
||||
if builder.download_rustc() && compiler.stage != 0 {
|
||||
if builder.download_rustc() && compiler.stage != 0 && target == builder.build.build {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -129,7 +129,8 @@ const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)]
|
||||
/* Extra values not defined in the built-in targets yet, but used in std */
|
||||
(Some(Mode::Std), "target_env", Some(&["libnx"])),
|
||||
// (Some(Mode::Std), "target_os", Some(&[])),
|
||||
(Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa"])),
|
||||
// #[cfg(bootstrap)] loongarch64
|
||||
(Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa", "loongarch64"])),
|
||||
/* Extra names used by dependencies */
|
||||
// FIXME: Used by serde_json, but we should not be triggering on external dependencies.
|
||||
(Some(Mode::Rustc), "no_btreemap_remove_entry", None),
|
||||
|
@ -291,7 +291,7 @@ impl Step for Llvm {
|
||||
let llvm_targets = match &builder.config.llvm_targets {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
"AArch64;ARM;BPF;Hexagon;MSP430;Mips;NVPTX;PowerPC;RISCV;\
|
||||
"AArch64;ARM;BPF;Hexagon;LoongArch;MSP430;Mips;NVPTX;PowerPC;RISCV;\
|
||||
Sparc;SystemZ;WebAssembly;X86"
|
||||
}
|
||||
};
|
||||
|
@ -1535,7 +1535,10 @@ note: if you're sure you want to do this, please open an issue as to why. In the
|
||||
flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string()));
|
||||
|
||||
if let Some(linker) = builder.linker(target) {
|
||||
cmd.arg("--linker").arg(linker);
|
||||
cmd.arg("--target-linker").arg(linker);
|
||||
}
|
||||
if let Some(linker) = builder.linker(compiler.host) {
|
||||
cmd.arg("--host-linker").arg(linker);
|
||||
}
|
||||
|
||||
let mut hostflags = flags.clone();
|
||||
|
@ -29,6 +29,7 @@
|
||||
- [\*-linux-ohos](platform-support/openharmony.md)
|
||||
- [\*-unknown-fuchsia](platform-support/fuchsia.md)
|
||||
- [\*-kmc-solid_\*](platform-support/kmc-solid.md)
|
||||
- [loongarch\*-unknown-linux-\*](platform-support/loongarch-linux.md)
|
||||
- [m68k-unknown-linux-gnu](platform-support/m68k-unknown-linux-gnu.md)
|
||||
- [mips64-openwrt-linux-musl](platform-support/mips64-openwrt-linux-musl.md)
|
||||
- [mipsel-sony-psx](platform-support/mipsel-sony-psx.md)
|
||||
|
@ -266,6 +266,7 @@ target | std | host | notes
|
||||
`i686-uwp-windows-gnu` | ? | |
|
||||
`i686-uwp-windows-msvc` | ? | |
|
||||
`i686-wrs-vxworks` | ? | |
|
||||
[`loongarch64-unknown-linux-gnu`](platform-support/loongarch-linux.md) | ? | | LoongArch64 Linux (LP64D ABI)
|
||||
[`m68k-unknown-linux-gnu`](platform-support/m68k-unknown-linux-gnu.md) | ? | | Motorola 680x0 Linux
|
||||
`mips-unknown-linux-uclibc` | ✓ | | MIPS Linux with uClibc
|
||||
[`mips64-openwrt-linux-musl`](platform-support/mips64-openwrt-linux-musl.md) | ? | | MIPS64 for OpenWrt Linux MUSL
|
||||
|
92
src/doc/rustc/src/platform-support/loongarch-linux.md
Normal file
92
src/doc/rustc/src/platform-support/loongarch-linux.md
Normal file
@ -0,0 +1,92 @@
|
||||
# loongarch\*-unknown-linux-\*
|
||||
|
||||
**Tier: 3**
|
||||
|
||||
[LoongArch] is a new RISC ISA developed by Loongson Technology Corporation Limited.
|
||||
|
||||
[LoongArch]: https://loongson.github.io/LoongArch-Documentation/README-EN.html
|
||||
|
||||
The target name follow this format: `<machine>-<vendor>-<os><fabi_suffix>, where `<machine>` specifies the CPU family/model, `<vendor>` specifies the vendor and `<os>` the operating system name.
|
||||
While the integer base ABI is implied by the machine field, the floating point base ABI type is encoded into the os field of the specifier using the string suffix `<fabi-suffix>`.
|
||||
|
||||
| `<fabi-suffix>` | `Description` |
|
||||
|------------------------|--------------------------------------------------------------------|
|
||||
| f64 | The base ABI use 64-bits FPRs for parameter passing.(lp64d)|
|
||||
| f32 | The base ABI uses 32-bit FPRs for parameter passing. (lp64f)|
|
||||
| sf | The base ABI uses no FPR for parameter passing. (lp64s) |
|
||||
|
||||
|`ABI type(Base ABI/ABI extension)`| `C library` | `kernel` | `target tuple` |
|
||||
|----------------------------------|-------------|----------|----------------------------------|
|
||||
| lp64d/base | glibc | linux | loongarch64-unknown-linux-gnu |
|
||||
| lp64f/base | glibc | linux | loongarch64-unknown-linux-gnuf32 |
|
||||
| lp64s/base | glibc | linux | loongarch64-unknown-linux-gnusf |
|
||||
| lp64d/base | musl libc | linux | loongarch64-unknown-linux-musl|
|
||||
| lp64f/base | musl libc | linux | loongarch64-unknown-linux-muslf32|
|
||||
| lp64s/base | musl libc | linux | loongarch64-unknown-linux-muslsf |
|
||||
|
||||
## Target maintainers
|
||||
|
||||
- [ZHAI xiaojuan](https://github.com/zhaixiaojuan) `zhaixiaojuan@loongson.cn`
|
||||
- [WANG rui](https://github.com/heiher) `wangrui@loongson.cn`
|
||||
- [ZHAI xiang](https://github.com/xiangzhai) `zhaixiang@loongson.cn`
|
||||
- [WANG Xuerui](https://github.com/xen0n) `git@xen0n.name`
|
||||
|
||||
## Requirements
|
||||
|
||||
This target is cross-compiled.
|
||||
A GNU toolchain for LoongArch target is required. It can be downloaded from https://github.com/loongson/build-tools/releases, or built from the source code of GCC (12.1.0 or later) and Binutils (2.40 or later).
|
||||
|
||||
## Building the target
|
||||
|
||||
The target can be built by enabling it for a `rustc` build.
|
||||
|
||||
```toml
|
||||
[build]
|
||||
target = ["loongarch64-unknown-linux-gnu"]
|
||||
```
|
||||
|
||||
Make sure `loongarch64-unknown-linux-gnu-gcc` can be searched from the directories specified in`$PATH`. Alternatively, you can use GNU LoongArch Toolchain by adding the following to `config.toml`:
|
||||
|
||||
```toml
|
||||
[target.loongarch64-unknown-linux-gnu]
|
||||
# ADJUST THIS PATH TO POINT AT YOUR TOOLCHAIN
|
||||
cc = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc"
|
||||
cxx = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-g++"
|
||||
ar = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-ar"
|
||||
ranlib = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-ranlib"
|
||||
linker = "/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc"
|
||||
```
|
||||
|
||||
## Cross-compilation
|
||||
|
||||
This target can be cross-compiled on a `x86_64-unknown-linux-gnu` host. Cross-compilation on other hosts may work but is not tested.
|
||||
|
||||
## Testing
|
||||
To test a cross-compiled binary on your build system, install the qemu binary that supports the LoongArch architecture and execute the following commands.
|
||||
```text
|
||||
CC_loongarch64_unknown_linux_gnu=/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc \
|
||||
CXX_loongarch64_unknown_linux_gnu=/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-g++ \
|
||||
AR_loongarch64_unknown_linux_gnu=/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc-ar \
|
||||
CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNUN_LINKER=/TOOLCHAIN_PATH/bin/loongarch64-unknown-linux-gnu-gcc \
|
||||
# SET TARGET SYSTEM LIBRARY PATH
|
||||
CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNUN_RUNNER="qemu-loongarch64 -L /TOOLCHAIN_PATH/TARGET_LIBRAY_PATH" \
|
||||
cargo run --target loongarch64-unknown-linux-gnu --release
|
||||
```
|
||||
Tested on x86 architecture, other architectures not tested.
|
||||
|
||||
## Building Rust programs
|
||||
|
||||
Rust does not yet ship pre-compiled artifacts for this target. To compile for this target, you will either need to build Rust with the target enabled (see "Building the target" above), or build your own copy of `std` by using `build-std` or similar.
|
||||
|
||||
If `rustc` has support for that target and the library artifacts are available, then Rust static libraries can be built for that target:
|
||||
|
||||
```shell
|
||||
$ rustc --target loongarch64-unknown-linux-gnu your-code.rs --crate-type staticlib
|
||||
$ ls libyour_code.a
|
||||
```
|
||||
|
||||
On Rust Nightly it's possible to build without the target artifacts available:
|
||||
|
||||
```text
|
||||
cargo build -Z build-std --target loongarch64-unknown-linux-gnu
|
||||
```
|
@ -643,7 +643,7 @@ Examples:
|
||||
```rust
|
||||
match foo {
|
||||
foo => bar,
|
||||
a_very_long_patten | another_pattern if an_expression() => {
|
||||
a_very_long_pattern | another_pattern if an_expression() => {
|
||||
no_room_for_this_expression()
|
||||
}
|
||||
foo => {
|
||||
|
@ -3,4 +3,4 @@
|
||||
--------------------
|
||||
|
||||
The `-Z dump-mono-stats-format` compiler flag controls what file format to use for `-Z dump-mono-stats`.
|
||||
The default is markdown; currently JSON is also supported. JSON can be useful for programatically manipulating the results (e.g. to find the item that took the longest to compile).
|
||||
The default is markdown; currently JSON is also supported. JSON can be useful for programmatically manipulating the results (e.g. to find the item that took the longest to compile).
|
||||
|
@ -119,7 +119,7 @@
|
||||
<SetProperty Sequence="ui" Before="CostFinalize"
|
||||
Id="WixAppFolder" Value="WixPerUserFolder">NOT ALLUSERS</SetProperty>
|
||||
|
||||
<!-- UI sets ALLUSERS per user selection; progagate this choice to MSIINSTALLPERUSER before executing installation actions -->
|
||||
<!-- UI sets ALLUSERS per user selection; propagate this choice to MSIINSTALLPERUSER before executing installation actions -->
|
||||
<SetProperty Sequence="ui" Before="ExecuteAction"
|
||||
Id="MSIINSTALLPERUSER" Value="1">NOT ALLUSERS</SetProperty>
|
||||
|
||||
|
@ -517,6 +517,7 @@ impl<'a> fmt::Display for Display<'a> {
|
||||
"aarch64" => "AArch64",
|
||||
"arm" => "ARM",
|
||||
"asmjs" => "JavaScript",
|
||||
"loongarch64" => "LoongArch LA64",
|
||||
"m68k" => "M68k",
|
||||
"mips" => "MIPS",
|
||||
"mips64" => "MIPS-64",
|
||||
|
@ -230,7 +230,7 @@ pub(crate) struct RenderOptions {
|
||||
pub(crate) extension_css: Option<PathBuf>,
|
||||
/// A map of crate names to the URL to use instead of querying the crate's `html_root_url`.
|
||||
pub(crate) extern_html_root_urls: BTreeMap<String, String>,
|
||||
/// Whether to give precedence to `html_root_url` or `--exten-html-root-url`.
|
||||
/// Whether to give precedence to `html_root_url` or `--extern-html-root-url`.
|
||||
pub(crate) extern_html_root_takes_precedence: bool,
|
||||
/// A map of the default settings (values are as for DOM storage API). Keys should lack the
|
||||
/// `rustdoc-` prefix.
|
||||
|
@ -349,10 +349,10 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
|
||||
let mut br_with_padding = String::with_capacity(6 * indent + 28);
|
||||
br_with_padding.push_str("\n");
|
||||
|
||||
let padding_amout =
|
||||
let padding_amount =
|
||||
if ending == Ending::Newline { indent + 4 } else { indent + "fn where ".len() };
|
||||
|
||||
for _ in 0..padding_amout {
|
||||
for _ in 0..padding_amount {
|
||||
br_with_padding.push_str(" ");
|
||||
}
|
||||
let where_preds = where_preds.to_string().replace('\n', &br_with_padding);
|
||||
|
@ -1796,10 +1796,11 @@ fn render_struct(
|
||||
}
|
||||
match ty {
|
||||
None => {
|
||||
let where_diplayed = g.map(|g| print_where_clause_and_check(w, g, cx)).unwrap_or(false);
|
||||
let where_displayed =
|
||||
g.map(|g| print_where_clause_and_check(w, g, cx)).unwrap_or(false);
|
||||
|
||||
// If there wasn't a `where` clause, we add a whitespace.
|
||||
if !where_diplayed {
|
||||
if !where_displayed {
|
||||
w.write_str(" {");
|
||||
} else {
|
||||
w.write_str("{");
|
||||
|
@ -331,10 +331,6 @@ function preLoadCss(cssUrl) {
|
||||
},
|
||||
};
|
||||
|
||||
function getPageId() {
|
||||
return window.location.hash.replace(/^#/, "");
|
||||
}
|
||||
|
||||
const toggleAllDocsId = "toggle-all-docs";
|
||||
let savedHash = "";
|
||||
|
||||
@ -355,12 +351,12 @@ function preLoadCss(cssUrl) {
|
||||
}
|
||||
}
|
||||
// This part is used in case an element is not visible.
|
||||
if (savedHash !== window.location.hash) {
|
||||
savedHash = window.location.hash;
|
||||
if (savedHash.length === 0) {
|
||||
return;
|
||||
const pageId = window.location.hash.replace(/^#/, "");
|
||||
if (savedHash !== pageId) {
|
||||
savedHash = pageId;
|
||||
if (pageId !== "") {
|
||||
expandSection(pageId);
|
||||
}
|
||||
expandSection(savedHash.slice(1)); // we remove the '#'
|
||||
}
|
||||
}
|
||||
|
||||
@ -699,11 +695,6 @@ function preLoadCss(cssUrl) {
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
const pageId = getPageId();
|
||||
if (pageId !== "") {
|
||||
expandSection(pageId);
|
||||
}
|
||||
}());
|
||||
|
||||
window.rustdoc_add_line_numbers_to_examples = () => {
|
||||
|
@ -550,7 +550,7 @@ pub enum Type {
|
||||
DynTrait(DynTrait),
|
||||
/// Parameterized types
|
||||
Generic(String),
|
||||
/// Built in numberic (i*, u*, f*) types, bool, and char
|
||||
/// Built in numeric (i*, u*, f*) types, bool, and char
|
||||
Primitive(String),
|
||||
/// `extern "ABI" fn`
|
||||
FunctionPointer(Box<FunctionPointer>),
|
||||
|
@ -10,8 +10,8 @@ use std::path::{Path, PathBuf};
|
||||
#[derive(serde::Serialize)]
|
||||
#[serde(rename_all = "kebab-case", tag = "type")]
|
||||
pub(crate) enum Node<L> {
|
||||
Root { childs: Vec<Node<L>> },
|
||||
Directory { name: PathBuf, childs: Vec<Node<L>>, license: Option<L> },
|
||||
Root { children: Vec<Node<L>> },
|
||||
Directory { name: PathBuf, children: Vec<Node<L>>, license: Option<L> },
|
||||
File { name: PathBuf, license: L },
|
||||
Group { files: Vec<PathBuf>, directories: Vec<PathBuf>, license: L },
|
||||
Empty,
|
||||
@ -48,14 +48,14 @@ impl Node<LicenseId> {
|
||||
/// ```
|
||||
fn merge_directories(&mut self) {
|
||||
match self {
|
||||
Node::Root { childs } | Node::Directory { childs, license: None, .. } => {
|
||||
Node::Root { children } | Node::Directory { children, license: None, .. } => {
|
||||
let mut directories = BTreeMap::new();
|
||||
let mut files = Vec::new();
|
||||
|
||||
for child in childs.drain(..) {
|
||||
for child in children.drain(..) {
|
||||
match child {
|
||||
Node::Directory { name, mut childs, license: None } => {
|
||||
directories.entry(name).or_insert_with(Vec::new).append(&mut childs);
|
||||
Node::Directory { name, mut children, license: None } => {
|
||||
directories.entry(name).or_insert_with(Vec::new).append(&mut children);
|
||||
}
|
||||
file @ Node::File { .. } => {
|
||||
files.push(file);
|
||||
@ -73,14 +73,14 @@ impl Node<LicenseId> {
|
||||
}
|
||||
}
|
||||
|
||||
childs.extend(directories.into_iter().map(|(name, childs)| Node::Directory {
|
||||
children.extend(directories.into_iter().map(|(name, children)| Node::Directory {
|
||||
name,
|
||||
childs,
|
||||
children,
|
||||
license: None,
|
||||
}));
|
||||
childs.append(&mut files);
|
||||
children.append(&mut files);
|
||||
|
||||
for child in &mut *childs {
|
||||
for child in &mut *children {
|
||||
child.merge_directories();
|
||||
}
|
||||
}
|
||||
@ -105,13 +105,13 @@ impl Node<LicenseId> {
|
||||
/// our inclusion of LLVM.
|
||||
fn collapse_in_licensed_directories(&mut self) {
|
||||
match self {
|
||||
Node::Directory { childs, license, .. } => {
|
||||
for child in &mut *childs {
|
||||
Node::Directory { children, license, .. } => {
|
||||
for child in &mut *children {
|
||||
child.collapse_in_licensed_directories();
|
||||
}
|
||||
|
||||
let mut licenses_count = BTreeMap::new();
|
||||
for child in &*childs {
|
||||
for child in &*children {
|
||||
let Some(license) = child.license() else { continue };
|
||||
*licenses_count.entry(license).or_insert(0) += 1;
|
||||
}
|
||||
@ -122,12 +122,12 @@ impl Node<LicenseId> {
|
||||
.map(|(license, _)| license);
|
||||
|
||||
if let Some(most_popular_license) = most_popular_license {
|
||||
childs.retain(|child| child.license() != Some(most_popular_license));
|
||||
children.retain(|child| child.license() != Some(most_popular_license));
|
||||
*license = Some(most_popular_license);
|
||||
}
|
||||
}
|
||||
Node::Root { childs } => {
|
||||
for child in &mut *childs {
|
||||
Node::Root { children } => {
|
||||
for child in &mut *children {
|
||||
child.collapse_in_licensed_directories();
|
||||
}
|
||||
}
|
||||
@ -138,29 +138,29 @@ impl Node<LicenseId> {
|
||||
}
|
||||
|
||||
/// Reduce the depth of the tree by merging subdirectories with the same license as their
|
||||
/// parent directory into their parent, and adjusting the paths of the childs accordingly.
|
||||
/// parent directory into their parent, and adjusting the paths of the children accordingly.
|
||||
fn merge_directory_licenses(&mut self) {
|
||||
match self {
|
||||
Node::Root { childs } => {
|
||||
for child in &mut *childs {
|
||||
Node::Root { children } => {
|
||||
for child in &mut *children {
|
||||
child.merge_directory_licenses();
|
||||
}
|
||||
}
|
||||
Node::Directory { childs, license, .. } => {
|
||||
Node::Directory { children, license, .. } => {
|
||||
let mut to_add = Vec::new();
|
||||
for child in &mut *childs {
|
||||
for child in &mut *children {
|
||||
child.merge_directory_licenses();
|
||||
|
||||
let Node::Directory {
|
||||
name: child_name,
|
||||
childs: child_childs,
|
||||
children: child_children,
|
||||
license: child_license,
|
||||
} = child else { continue };
|
||||
|
||||
if child_license != license {
|
||||
continue;
|
||||
}
|
||||
for mut child_child in child_childs.drain(..) {
|
||||
for mut child_child in child_children.drain(..) {
|
||||
match &mut child_child {
|
||||
Node::Root { .. } => {
|
||||
panic!("can't have a root inside another element");
|
||||
@ -181,7 +181,7 @@ impl Node<LicenseId> {
|
||||
|
||||
*child = Node::Empty;
|
||||
}
|
||||
childs.append(&mut to_add);
|
||||
children.append(&mut to_add);
|
||||
}
|
||||
Node::Empty => {}
|
||||
Node::File { .. } => {}
|
||||
@ -203,14 +203,14 @@ impl Node<LicenseId> {
|
||||
directories: Vec<PathBuf>,
|
||||
}
|
||||
match self {
|
||||
Node::Root { childs } | Node::Directory { childs, .. } => {
|
||||
Node::Root { children } | Node::Directory { children, .. } => {
|
||||
let mut grouped: BTreeMap<LicenseId, Grouped> = BTreeMap::new();
|
||||
|
||||
for child in &mut *childs {
|
||||
for child in &mut *children {
|
||||
child.merge_groups();
|
||||
match child {
|
||||
Node::Directory { name, childs, license: Some(license) } => {
|
||||
if childs.is_empty() {
|
||||
Node::Directory { name, children, license: Some(license) } => {
|
||||
if children.is_empty() {
|
||||
grouped
|
||||
.entry(*license)
|
||||
.or_insert_with(Grouped::default)
|
||||
@ -234,16 +234,16 @@ impl Node<LicenseId> {
|
||||
for (license, mut grouped) in grouped.into_iter() {
|
||||
if grouped.files.len() + grouped.directories.len() <= 1 {
|
||||
if let Some(name) = grouped.files.pop() {
|
||||
childs.push(Node::File { license, name });
|
||||
children.push(Node::File { license, name });
|
||||
} else if let Some(name) = grouped.directories.pop() {
|
||||
childs.push(Node::Directory {
|
||||
children.push(Node::Directory {
|
||||
name,
|
||||
childs: Vec::new(),
|
||||
children: Vec::new(),
|
||||
license: Some(license),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
childs.push(Node::Group {
|
||||
children.push(Node::Group {
|
||||
license,
|
||||
files: grouped.files,
|
||||
directories: grouped.directories,
|
||||
@ -261,11 +261,11 @@ impl Node<LicenseId> {
|
||||
/// sure to remove them from the tree.
|
||||
fn remove_empty(&mut self) {
|
||||
match self {
|
||||
Node::Root { childs } | Node::Directory { childs, .. } => {
|
||||
for child in &mut *childs {
|
||||
Node::Root { children } | Node::Directory { children, .. } => {
|
||||
for child in &mut *children {
|
||||
child.remove_empty();
|
||||
}
|
||||
childs.retain(|child| !matches!(child, Node::Empty));
|
||||
children.retain(|child| !matches!(child, Node::Empty));
|
||||
}
|
||||
Node::Group { .. } => {}
|
||||
Node::File { .. } => {}
|
||||
@ -275,7 +275,7 @@ impl Node<LicenseId> {
|
||||
|
||||
fn license(&self) -> Option<LicenseId> {
|
||||
match self {
|
||||
Node::Directory { childs, license: Some(license), .. } if childs.is_empty() => {
|
||||
Node::Directory { children, license: Some(license), .. } if children.is_empty() => {
|
||||
Some(*license)
|
||||
}
|
||||
Node::File { license, .. } => Some(*license),
|
||||
@ -285,7 +285,7 @@ impl Node<LicenseId> {
|
||||
}
|
||||
|
||||
pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
|
||||
let mut childs = Vec::new();
|
||||
let mut children = Vec::new();
|
||||
|
||||
// Ensure reproducibility of all future steps.
|
||||
input.sort();
|
||||
@ -295,15 +295,15 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
|
||||
for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() {
|
||||
node = Node::Directory {
|
||||
name: component.as_os_str().into(),
|
||||
childs: vec![node],
|
||||
children: vec![node],
|
||||
license: None,
|
||||
};
|
||||
}
|
||||
|
||||
childs.push(node);
|
||||
children.push(node);
|
||||
}
|
||||
|
||||
Node::Root { childs }
|
||||
Node::Root { children }
|
||||
}
|
||||
|
||||
/// Convert a `Node<LicenseId>` into a `Node<&License>`, expanding all interned license IDs with a
|
||||
@ -313,14 +313,14 @@ pub(crate) fn expand_interned_licenses(
|
||||
interner: &LicensesInterner,
|
||||
) -> Node<&License> {
|
||||
match node {
|
||||
Node::Root { childs } => Node::Root {
|
||||
childs: childs
|
||||
Node::Root { children } => Node::Root {
|
||||
children: children
|
||||
.into_iter()
|
||||
.map(|child| expand_interned_licenses(child, interner))
|
||||
.collect(),
|
||||
},
|
||||
Node::Directory { name, childs, license } => Node::Directory {
|
||||
childs: childs
|
||||
Node::Directory { name, children, license } => Node::Directory {
|
||||
children: children
|
||||
.into_iter()
|
||||
.map(|child| expand_interned_licenses(child, interner))
|
||||
.collect(),
|
||||
|
@ -313,7 +313,8 @@ pub struct Config {
|
||||
pub cflags: String,
|
||||
pub cxxflags: String,
|
||||
pub ar: String,
|
||||
pub linker: Option<String>,
|
||||
pub target_linker: Option<String>,
|
||||
pub host_linker: Option<String>,
|
||||
pub llvm_components: String,
|
||||
|
||||
/// Path to a NodeJS executable. Used for JS doctests, emscripten and WASM tests
|
||||
|
@ -134,7 +134,8 @@ pub fn parse_config(args: Vec<String>) -> Config {
|
||||
.reqopt("", "cflags", "flags for the C compiler", "FLAGS")
|
||||
.reqopt("", "cxxflags", "flags for the CXX compiler", "FLAGS")
|
||||
.optopt("", "ar", "path to an archiver", "PATH")
|
||||
.optopt("", "linker", "path to a linker", "PATH")
|
||||
.optopt("", "target-linker", "path to a linker for the target", "PATH")
|
||||
.optopt("", "host-linker", "path to a linker for the host", "PATH")
|
||||
.reqopt("", "llvm-components", "list of LLVM components built in", "LIST")
|
||||
.optopt("", "llvm-bin-dir", "Path to LLVM's `bin` directory", "PATH")
|
||||
.optopt("", "nodejs", "the name of nodejs", "PATH")
|
||||
@ -307,7 +308,8 @@ pub fn parse_config(args: Vec<String>) -> Config {
|
||||
cflags: matches.opt_str("cflags").unwrap(),
|
||||
cxxflags: matches.opt_str("cxxflags").unwrap(),
|
||||
ar: matches.opt_str("ar").unwrap_or_else(|| String::from("ar")),
|
||||
linker: matches.opt_str("linker"),
|
||||
target_linker: matches.opt_str("target-linker"),
|
||||
host_linker: matches.opt_str("host-linker"),
|
||||
llvm_components: matches.opt_str("llvm-components").unwrap(),
|
||||
nodejs: matches.opt_str("nodejs"),
|
||||
npm: matches.opt_str("npm"),
|
||||
@ -350,7 +352,8 @@ pub fn log_config(config: &Config) {
|
||||
logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
|
||||
logv(c, format!("adb_device_status: {}", config.adb_device_status));
|
||||
logv(c, format!("ar: {}", config.ar));
|
||||
logv(c, format!("linker: {:?}", config.linker));
|
||||
logv(c, format!("target-linker: {:?}", config.target_linker));
|
||||
logv(c, format!("host-linker: {:?}", config.host_linker));
|
||||
logv(c, format!("verbose: {}", config.verbose));
|
||||
logv(c, format!("format: {:?}", config.format));
|
||||
logv(c, "\n".to_string());
|
||||
|
@ -1570,7 +1570,7 @@ impl<'test> TestCx<'test> {
|
||||
rustdoc.arg("--output-format").arg("json").arg("-Zunstable-options");
|
||||
}
|
||||
|
||||
if let Some(ref linker) = self.config.linker {
|
||||
if let Some(ref linker) = self.config.target_linker {
|
||||
rustdoc.arg(format!("-Clinker={}", linker));
|
||||
}
|
||||
|
||||
@ -2083,10 +2083,15 @@ impl<'test> TestCx<'test> {
|
||||
|
||||
if self.props.force_host {
|
||||
self.maybe_add_external_args(&mut rustc, &self.config.host_rustcflags);
|
||||
if !is_rustdoc {
|
||||
if let Some(ref linker) = self.config.host_linker {
|
||||
rustc.arg(format!("-Clinker={}", linker));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.maybe_add_external_args(&mut rustc, &self.config.target_rustcflags);
|
||||
if !is_rustdoc {
|
||||
if let Some(ref linker) = self.config.linker {
|
||||
if let Some(ref linker) = self.config.target_linker {
|
||||
rustc.arg(format!("-Clinker={}", linker));
|
||||
}
|
||||
}
|
||||
@ -3039,7 +3044,7 @@ impl<'test> TestCx<'test> {
|
||||
cmd.env("NODE", node);
|
||||
}
|
||||
|
||||
if let Some(ref linker) = self.config.linker {
|
||||
if let Some(ref linker) = self.config.target_linker {
|
||||
cmd.env("RUSTC_LINKER", linker);
|
||||
}
|
||||
|
||||
|
@ -20,17 +20,17 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
|
||||
let prefix = std::iter::repeat("> ").take(depth + 1).collect::<String>();
|
||||
|
||||
match node {
|
||||
Node::Root { childs } => {
|
||||
for child in childs {
|
||||
Node::Root { children } => {
|
||||
for child in children {
|
||||
render_recursive(child, buffer, depth)?;
|
||||
}
|
||||
}
|
||||
Node::Directory { name, childs, license } => {
|
||||
Node::Directory { name, children, license } => {
|
||||
render_license(&prefix, std::iter::once(name), license, buffer)?;
|
||||
if !childs.is_empty() {
|
||||
if !children.is_empty() {
|
||||
writeln!(buffer, "{prefix}")?;
|
||||
writeln!(buffer, "{prefix}*Exceptions:*")?;
|
||||
for child in childs {
|
||||
for child in children {
|
||||
writeln!(buffer, "{prefix}")?;
|
||||
render_recursive(child, buffer, depth + 1)?;
|
||||
}
|
||||
@ -73,8 +73,8 @@ struct Metadata {
|
||||
#[derive(serde::Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", tag = "type")]
|
||||
pub(crate) enum Node {
|
||||
Root { childs: Vec<Node> },
|
||||
Directory { name: String, childs: Vec<Node>, license: License },
|
||||
Root { children: Vec<Node> },
|
||||
Directory { name: String, children: Vec<Node>, license: License },
|
||||
File { name: String, license: License },
|
||||
Group { files: Vec<String>, directories: Vec<String>, license: License },
|
||||
}
|
||||
|
@ -237,7 +237,7 @@ fn check_command(command: Command, cache: &mut Cache) -> Result<(), CkError> {
|
||||
|
||||
// Serde json doesn't implement Ord or Hash for Value, so we must
|
||||
// use a Vec here. While in theory that makes setwize equality
|
||||
// O(n^2), in practice n will never be large enought to matter.
|
||||
// O(n^2), in practice n will never be large enough to matter.
|
||||
let expected_values =
|
||||
values.iter().map(|v| string_to_value(v, cache)).collect::<Vec<_>>();
|
||||
if expected_values.len() != got_values.len() {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use rustdoc_json_types::{Item, ItemEnum, ItemKind, ItemSummary};
|
||||
|
||||
/// A univeral way to represent an [`ItemEnum`] or [`ItemKind`]
|
||||
/// A universal way to represent an [`ItemEnum`] or [`ItemKind`]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(crate) enum Kind {
|
||||
Module,
|
||||
@ -53,7 +53,7 @@ impl Kind {
|
||||
Primitive => true,
|
||||
ForeignType => true,
|
||||
|
||||
// FIXME(adotinthevoid): I'm not sure if these are corrent
|
||||
// FIXME(adotinthevoid): I'm not sure if these are correct
|
||||
Keyword => false,
|
||||
OpaqueTy => false,
|
||||
ProcAttribute => false,
|
||||
|
@ -72,7 +72,7 @@ fn main() -> Result<()> {
|
||||
)
|
||||
}
|
||||
[sel] => eprintln!(
|
||||
"{} not in index or paths, but refered to at '{}'",
|
||||
"{} not in index or paths, but referred to at '{}'",
|
||||
err.id.0,
|
||||
json_find::to_jsonpath(&sel)
|
||||
),
|
||||
@ -85,12 +85,12 @@ fn main() -> Result<()> {
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
eprintln!(
|
||||
"{} not in index or paths, but refered to at {sels}",
|
||||
"{} not in index or paths, but referred to at {sels}",
|
||||
err.id.0
|
||||
);
|
||||
} else {
|
||||
eprintln!(
|
||||
"{} not in index or paths, but refered to at '{}' and {} more",
|
||||
"{} not in index or paths, but referred to at '{}' and {} more",
|
||||
err.id.0,
|
||||
json_find::to_jsonpath(&sel),
|
||||
sels.len() - 1,
|
||||
|
@ -86,7 +86,7 @@ def gh_url():
|
||||
return os.environ['TOOLSTATE_ISSUES_API_URL']
|
||||
|
||||
|
||||
def maybe_delink(message):
|
||||
def maybe_remove_mention(message):
|
||||
# type: (str) -> str
|
||||
if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
|
||||
return message.replace("@", "")
|
||||
@ -109,7 +109,7 @@ def issue(
|
||||
else:
|
||||
status_description = 'no longer builds'
|
||||
request = json.dumps({
|
||||
'body': maybe_delink(textwrap.dedent('''\
|
||||
'body': maybe_remove_mention(textwrap.dedent('''\
|
||||
Hello, this is your friendly neighborhood mergebot.
|
||||
After merging PR {}, I observed that the tool {} {}.
|
||||
A follow-up PR to the repository {} is needed to fix the fallout.
|
||||
@ -285,7 +285,7 @@ try:
|
||||
issue_url = gh_url() + '/{}/comments'.format(number)
|
||||
response = urllib2.urlopen(urllib2.Request(
|
||||
issue_url,
|
||||
json.dumps({'body': maybe_delink(message)}).encode(),
|
||||
json.dumps({'body': maybe_remove_mention(message)}).encode(),
|
||||
{
|
||||
'Authorization': 'token ' + github_token,
|
||||
'Content-Type': 'application/json',
|
||||
|
@ -4,7 +4,7 @@ warning: unexpected `cfg` condition value
|
||||
LL | #[cfg(target(os = "linux", arch = "X"))]
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
= note: expected values for `target_arch` are: aarch64, arm, avr, bpf, hexagon, m68k, mips, mips64, msp430, nvptx64, powerpc, powerpc64, riscv32, riscv64, s390x, sparc, sparc64, wasm32, wasm64, x86, x86_64
|
||||
= note: expected values for `target_arch` are: aarch64, arm, avr, bpf, hexagon, loongarch64, m68k, mips, mips64, msp430, nvptx64, powerpc, powerpc64, riscv32, riscv64, s390x, sparc, sparc64, wasm32, wasm64, x86, x86_64
|
||||
= note: `#[warn(unexpected_cfgs)]` on by default
|
||||
|
||||
warning: 1 warning emitted
|
||||
|
@ -1,12 +1,12 @@
|
||||
error[E0277]: `u8` cannot be safely transmuted into `bool` in the defining scope of `assert::Context`.
|
||||
--> $DIR/bool.rs:22:35
|
||||
--> $DIR/bool.rs:24:35
|
||||
|
|
||||
LL | assert::is_transmutable::<u8, bool>();
|
||||
| ^^^^ `u8` cannot be safely transmuted into `bool` in the defining scope of `assert::Context`.
|
||||
|
|
||||
= help: the trait `BikeshedIntrinsicFrom<u8, assert::Context, Assume { alignment: false, lifetimes: false, safety: true, validity: false }>` is not implemented for `bool`
|
||||
note: required by a bound in `is_transmutable`
|
||||
--> $DIR/bool.rs:12:14
|
||||
--> $DIR/bool.rs:14:14
|
||||
|
|
||||
LL | pub fn is_transmutable<Src, Dst>()
|
||||
| --------------- required by a bound in this function
|
19
tests/ui/transmutability/primitives/bool.next.stderr
Normal file
19
tests/ui/transmutability/primitives/bool.next.stderr
Normal file
@ -0,0 +1,19 @@
|
||||
error[E0277]: `u8` cannot be safely transmuted into `bool` in the defining scope of `assert::Context`.
|
||||
--> $DIR/bool.rs:24:35
|
||||
|
|
||||
LL | assert::is_transmutable::<u8, bool>();
|
||||
| ^^^^ `u8` cannot be safely transmuted into `bool` in the defining scope of `assert::Context`.
|
||||
|
|
||||
= help: the trait `BikeshedIntrinsicFrom<u8, assert::Context, Assume { alignment: false, lifetimes: false, safety: true, validity: false }>` is not implemented for `bool`
|
||||
note: required by a bound in `is_transmutable`
|
||||
--> $DIR/bool.rs:14:14
|
||||
|
|
||||
LL | pub fn is_transmutable<Src, Dst>()
|
||||
| --------------- required by a bound in this function
|
||||
LL | where
|
||||
LL | Dst: BikeshedIntrinsicFrom<Src, Context, { Assume::SAFETY }>
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `is_transmutable`
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0277`.
|
@ -1,8 +1,10 @@
|
||||
// revisions: current next
|
||||
//[next] compile-flags: -Ztrait-solver=next
|
||||
|
||||
#![crate_type = "lib"]
|
||||
#![feature(transmutability)]
|
||||
#![allow(dead_code)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
mod assert {
|
||||
use std::mem::{Assume, BikeshedIntrinsicFrom};
|
||||
pub struct Context;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user