Auto merge of #100123 - matthiaskrgr:rollup-aylwvyc, r=matthiaskrgr
Rollup of 9 pull requests Successful merges: - #98877 (Set llvm configs when building lld) - #100068 (Fix backwards-compatibility check for tests with `+whole-archive`) - #100083 (rustdoc: use a more compact encoding for source-files.js) - #100102 (Fix typo) - #100104 (Remove more Clean trait implementations) - #100105 (Add regression test for #90871) - #100107 (fix trailing whitespace in error message) - #100111 (Provide suggestion on missing `let` in binding statement) - #100119 (FilesTimes support does not build for ESP-IDF) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
1ad56350cd
@ -2,7 +2,7 @@ use rustc_ast as ast;
|
||||
use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor};
|
||||
use rustc_ast::{AssocConstraint, AssocConstraintKind, NodeId};
|
||||
use rustc_ast::{PatKind, RangeEnd, VariantData};
|
||||
use rustc_errors::struct_span_err;
|
||||
use rustc_errors::{struct_span_err, Applicability};
|
||||
use rustc_feature::{AttributeGate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
|
||||
use rustc_feature::{Features, GateIssue};
|
||||
use rustc_session::parse::{feature_err, feature_err_issue};
|
||||
@ -577,6 +577,32 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, stmt: &'a ast::Stmt) {
|
||||
if let ast::StmtKind::Semi(expr) = &stmt.kind
|
||||
&& let ast::ExprKind::Assign(lhs, _, _) = &expr.kind
|
||||
&& let ast::ExprKind::Type(..) = lhs.kind
|
||||
&& self.sess.parse_sess.span_diagnostic.err_count() == 0
|
||||
&& !self.features.type_ascription
|
||||
&& !lhs.span.allows_unstable(sym::type_ascription)
|
||||
{
|
||||
// When we encounter a statement of the form `foo: Ty = val;`, this will emit a type
|
||||
// ascription error, but the likely intention was to write a `let` statement. (#78907).
|
||||
feature_err_issue(
|
||||
&self.sess.parse_sess,
|
||||
sym::type_ascription,
|
||||
lhs.span,
|
||||
GateIssue::Language,
|
||||
"type ascription is experimental",
|
||||
).span_suggestion_verbose(
|
||||
lhs.span.shrink_to_lo(),
|
||||
"you might have meant to introduce a new binding",
|
||||
"let ".to_string(),
|
||||
Applicability::MachineApplicable,
|
||||
).emit();
|
||||
}
|
||||
visit::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, e: &'a ast::Expr) {
|
||||
match e.kind {
|
||||
ast::ExprKind::Box(_) => {
|
||||
@ -795,8 +821,6 @@ fn maybe_stage_features(sess: &Session, krate: &ast::Crate) {
|
||||
// checks if `#![feature]` has been used to enable any lang feature
|
||||
// does not check the same for lib features unless there's at least one
|
||||
// declared lang feature
|
||||
use rustc_errors::Applicability;
|
||||
|
||||
if !sess.opts.unstable_features.is_nightly_build() {
|
||||
let lang_features = &sess.features_untracked().declared_lang_features;
|
||||
if lang_features.len() == 0 {
|
||||
|
@ -2736,7 +2736,7 @@ impl<'b, 'v> Visitor<'v> for ConditionVisitor<'b> {
|
||||
self.errors.push((
|
||||
e.span,
|
||||
format!(
|
||||
"if the `for` loop runs 0 times, {} is not initialized ",
|
||||
"if the `for` loop runs 0 times, {} is not initialized",
|
||||
self.name
|
||||
),
|
||||
));
|
||||
|
@ -2267,7 +2267,7 @@ fn add_local_native_libraries(
|
||||
// be added explicitly if necessary, see the error in `fn link_rlib`) compiled
|
||||
// as an executable due to `--test`. Use whole-archive implicitly, like before
|
||||
// the introduction of native lib modifiers.
|
||||
|| (bundle != Some(false) && sess.opts.test)
|
||||
|| (whole_archive == None && bundle != Some(false) && sess.opts.test)
|
||||
{
|
||||
cmd.link_whole_staticlib(
|
||||
name,
|
||||
|
@ -17,7 +17,7 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> {
|
||||
///
|
||||
/// This is implemented by first entering a new universe.
|
||||
/// We then replace all bound variables in `sup` with placeholders,
|
||||
/// and all bound variables in `sup` with inference vars.
|
||||
/// and all bound variables in `sub` with inference vars.
|
||||
/// We can then just relate the two resulting types as normal.
|
||||
///
|
||||
/// Note: this is a subtle algorithm. For a full explanation, please see
|
||||
|
@ -544,9 +544,11 @@ impl Default for FileTimes {
|
||||
fn default() -> Self {
|
||||
// Redox doesn't appear to support `UTIME_OMIT`, so we stub it out here, and always return
|
||||
// an error in `set_times`.
|
||||
#[cfg(target_os = "redox")]
|
||||
// ESP-IDF does not support `futimens` at all and the behavior for that OS is therefore
|
||||
// the same as for Redox.
|
||||
#[cfg(any(target_os = "redox", target_os = "espidf"))]
|
||||
let omit = libc::timespec { tv_sec: 0, tv_nsec: 0 };
|
||||
#[cfg(not(target_os = "redox"))]
|
||||
#[cfg(not(any(target_os = "redox", target_os = "espidf")))]
|
||||
let omit = libc::timespec { tv_sec: 0, tv_nsec: libc::UTIME_OMIT as _ };
|
||||
Self([omit; 2])
|
||||
}
|
||||
@ -1077,8 +1079,10 @@ impl File {
|
||||
|
||||
pub fn set_times(&self, times: FileTimes) -> io::Result<()> {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(target_os = "redox")] {
|
||||
if #[cfg(any(target_os = "redox", target_os = "espidf"))] {
|
||||
// Redox doesn't appear to support `UTIME_OMIT`.
|
||||
// ESP-IDF does not support `futimens` at all and the behavior for that OS is therefore
|
||||
// the same as for Redox.
|
||||
drop(times);
|
||||
Err(io::const_io_error!(
|
||||
io::ErrorKind::Unsupported,
|
||||
|
@ -345,13 +345,6 @@ impl Step for Llvm {
|
||||
cfg.define("LLVM_ENABLE_ZLIB", "OFF");
|
||||
}
|
||||
|
||||
if builder.config.llvm_thin_lto {
|
||||
cfg.define("LLVM_ENABLE_LTO", "Thin");
|
||||
if !target.contains("apple") {
|
||||
cfg.define("LLVM_ENABLE_LLD", "ON");
|
||||
}
|
||||
}
|
||||
|
||||
// This setting makes the LLVM tools link to the dynamic LLVM library,
|
||||
// which saves both memory during parallel links and overall disk space
|
||||
// for the tools. We don't do this on every platform as it doesn't work
|
||||
@ -463,15 +456,8 @@ impl Step for Llvm {
|
||||
cfg.define("LLVM_VERSION_SUFFIX", suffix);
|
||||
}
|
||||
|
||||
if let Some(ref linker) = builder.config.llvm_use_linker {
|
||||
cfg.define("LLVM_USE_LINKER", linker);
|
||||
}
|
||||
|
||||
if builder.config.llvm_allow_old_toolchain {
|
||||
cfg.define("LLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN", "YES");
|
||||
}
|
||||
|
||||
configure_cmake(builder, target, &mut cfg, true, ldflags);
|
||||
configure_llvm(builder, target, &mut cfg);
|
||||
|
||||
for (key, val) in &builder.config.llvm_build_config {
|
||||
cfg.define(key, val);
|
||||
@ -731,6 +717,25 @@ fn configure_cmake(
|
||||
}
|
||||
}
|
||||
|
||||
fn configure_llvm(builder: &Builder<'_>, target: TargetSelection, cfg: &mut cmake::Config) {
|
||||
// ThinLTO is only available when building with LLVM, enabling LLD is required.
|
||||
// Apple's linker ld64 supports ThinLTO out of the box though, so don't use LLD on Darwin.
|
||||
if builder.config.llvm_thin_lto {
|
||||
cfg.define("LLVM_ENABLE_LTO", "Thin");
|
||||
if !target.contains("apple") {
|
||||
cfg.define("LLVM_ENABLE_LLD", "ON");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref linker) = builder.config.llvm_use_linker {
|
||||
cfg.define("LLVM_USE_LINKER", linker);
|
||||
}
|
||||
|
||||
if builder.config.llvm_allow_old_toolchain {
|
||||
cfg.define("LLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN", "YES");
|
||||
}
|
||||
}
|
||||
|
||||
// Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365
|
||||
fn get_var(var_base: &str, host: &str, target: &str) -> Option<OsString> {
|
||||
let kind = if host == target { "HOST" } else { "TARGET" };
|
||||
@ -794,6 +799,7 @@ impl Step for Lld {
|
||||
}
|
||||
|
||||
configure_cmake(builder, target, &mut cfg, true, ldflags);
|
||||
configure_llvm(builder, target, &mut cfg);
|
||||
|
||||
// This is an awful, awful hack. Discovered when we migrated to using
|
||||
// clang-cl to compile LLVM/LLD it turns out that LLD, when built out of
|
||||
@ -825,10 +831,6 @@ impl Step for Lld {
|
||||
.define("LLVM_CONFIG_PATH", llvm_config_shim)
|
||||
.define("LLVM_INCLUDE_TESTS", "OFF");
|
||||
|
||||
if builder.config.llvm_allow_old_toolchain {
|
||||
cfg.define("LLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN", "YES");
|
||||
}
|
||||
|
||||
// While we're using this horrible workaround to shim the execution of
|
||||
// llvm-config, let's just pile on more. I can't seem to figure out how
|
||||
// to build LLD as a standalone project and also cross-compile it at the
|
||||
|
@ -247,30 +247,28 @@ pub(crate) fn clean_middle_const<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Clean<'tcx, Option<Lifetime>> for ty::Region<'tcx> {
|
||||
fn clean(&self, _cx: &mut DocContext<'_>) -> Option<Lifetime> {
|
||||
match **self {
|
||||
ty::ReStatic => Some(Lifetime::statik()),
|
||||
ty::ReLateBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. }) => {
|
||||
if name != kw::UnderscoreLifetime { Some(Lifetime(name)) } else { None }
|
||||
}
|
||||
ty::ReEarlyBound(ref data) => {
|
||||
if data.name != kw::UnderscoreLifetime {
|
||||
Some(Lifetime(data.name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ty::ReLateBound(..)
|
||||
| ty::ReFree(..)
|
||||
| ty::ReVar(..)
|
||||
| ty::RePlaceholder(..)
|
||||
| ty::ReEmpty(_)
|
||||
| ty::ReErased => {
|
||||
debug!("cannot clean region {:?}", self);
|
||||
pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option<Lifetime> {
|
||||
match *region {
|
||||
ty::ReStatic => Some(Lifetime::statik()),
|
||||
ty::ReLateBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. }) => {
|
||||
if name != kw::UnderscoreLifetime { Some(Lifetime(name)) } else { None }
|
||||
}
|
||||
ty::ReEarlyBound(ref data) => {
|
||||
if data.name != kw::UnderscoreLifetime {
|
||||
Some(Lifetime(data.name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ty::ReLateBound(..)
|
||||
| ty::ReFree(..)
|
||||
| ty::ReVar(..)
|
||||
| ty::RePlaceholder(..)
|
||||
| ty::ReEmpty(_)
|
||||
| ty::ReErased => {
|
||||
debug!("cannot clean region {:?}", region);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -321,7 +319,7 @@ impl<'tcx> Clean<'tcx, Option<WherePredicate>> for ty::Predicate<'tcx> {
|
||||
ty::PredicateKind::Trait(pred) => {
|
||||
clean_poly_trait_predicate(bound_predicate.rebind(pred), cx)
|
||||
}
|
||||
ty::PredicateKind::RegionOutlives(pred) => clean_region_outlives_predicate(pred, cx),
|
||||
ty::PredicateKind::RegionOutlives(pred) => clean_region_outlives_predicate(pred),
|
||||
ty::PredicateKind::TypeOutlives(pred) => clean_type_outlives_predicate(pred, cx),
|
||||
ty::PredicateKind::Projection(pred) => Some(clean_projection_predicate(pred, cx)),
|
||||
ty::PredicateKind::ConstEvaluatable(..) => None,
|
||||
@ -358,7 +356,6 @@ fn clean_poly_trait_predicate<'tcx>(
|
||||
|
||||
fn clean_region_outlives_predicate<'tcx>(
|
||||
pred: ty::OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>,
|
||||
cx: &mut DocContext<'tcx>,
|
||||
) -> Option<WherePredicate> {
|
||||
let ty::OutlivesPredicate(a, b) = pred;
|
||||
|
||||
@ -367,8 +364,10 @@ fn clean_region_outlives_predicate<'tcx>(
|
||||
}
|
||||
|
||||
Some(WherePredicate::RegionPredicate {
|
||||
lifetime: a.clean(cx).expect("failed to clean lifetime"),
|
||||
bounds: vec![GenericBound::Outlives(b.clean(cx).expect("failed to clean bounds"))],
|
||||
lifetime: clean_middle_region(a).expect("failed to clean lifetime"),
|
||||
bounds: vec![GenericBound::Outlives(
|
||||
clean_middle_region(b).expect("failed to clean bounds"),
|
||||
)],
|
||||
})
|
||||
}
|
||||
|
||||
@ -384,7 +383,9 @@ fn clean_type_outlives_predicate<'tcx>(
|
||||
|
||||
Some(WherePredicate::BoundPredicate {
|
||||
ty: clean_middle_ty(ty, cx, None),
|
||||
bounds: vec![GenericBound::Outlives(lt.clean(cx).expect("failed to clean lifetimes"))],
|
||||
bounds: vec![GenericBound::Outlives(
|
||||
clean_middle_region(lt).expect("failed to clean lifetimes"),
|
||||
)],
|
||||
bound_params: Vec::new(),
|
||||
})
|
||||
}
|
||||
@ -999,15 +1000,6 @@ impl<'tcx> Clean<'tcx, FnRetTy> for hir::FnRetTy<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Clean<'tcx, bool> for hir::IsAuto {
|
||||
fn clean(&self, _: &mut DocContext<'tcx>) -> bool {
|
||||
match *self {
|
||||
hir::IsAuto::Yes => true,
|
||||
hir::IsAuto::No => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Clean<'tcx, Path> for hir::TraitRef<'tcx> {
|
||||
fn clean(&self, cx: &mut DocContext<'tcx>) -> Path {
|
||||
let path = clean_path(self.path, cx);
|
||||
@ -1597,7 +1589,7 @@ pub(crate) fn clean_middle_ty<'tcx>(
|
||||
}
|
||||
ty::RawPtr(mt) => RawPointer(mt.mutbl, Box::new(clean_middle_ty(mt.ty, cx, None))),
|
||||
ty::Ref(r, ty, mutbl) => BorrowedRef {
|
||||
lifetime: r.clean(cx),
|
||||
lifetime: clean_middle_region(r),
|
||||
mutability: mutbl,
|
||||
type_: Box::new(clean_middle_ty(ty, cx, None)),
|
||||
},
|
||||
@ -1644,7 +1636,7 @@ pub(crate) fn clean_middle_ty<'tcx>(
|
||||
|
||||
inline::record_extern_fqn(cx, did, ItemType::Trait);
|
||||
|
||||
let lifetime = reg.clean(cx);
|
||||
let lifetime = clean_middle_region(*reg);
|
||||
let mut bounds = vec![];
|
||||
|
||||
for did in dids {
|
||||
@ -1710,7 +1702,7 @@ pub(crate) fn clean_middle_ty<'tcx>(
|
||||
let trait_ref = match bound_predicate.skip_binder() {
|
||||
ty::PredicateKind::Trait(tr) => bound_predicate.rebind(tr.trait_ref),
|
||||
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(_ty, reg)) => {
|
||||
if let Some(r) = reg.clean(cx) {
|
||||
if let Some(r) = clean_middle_region(reg) {
|
||||
regions.push(GenericBound::Outlives(r));
|
||||
}
|
||||
return None;
|
||||
|
@ -2,9 +2,9 @@ use crate::clean::auto_trait::AutoTraitFinder;
|
||||
use crate::clean::blanket_impl::BlanketImplFinder;
|
||||
use crate::clean::render_macro_matchers::render_macro_matcher;
|
||||
use crate::clean::{
|
||||
clean_middle_const, clean_middle_ty, inline, Clean, Crate, ExternalCrate, Generic, GenericArg,
|
||||
GenericArgs, ImportSource, Item, ItemKind, Lifetime, Path, PathSegment, Primitive,
|
||||
PrimitiveType, Type, TypeBinding, Visibility,
|
||||
clean_middle_const, clean_middle_region, clean_middle_ty, inline, Clean, Crate, ExternalCrate,
|
||||
Generic, GenericArg, GenericArgs, ImportSource, Item, ItemKind, Lifetime, Path, PathSegment,
|
||||
Primitive, PrimitiveType, Type, TypeBinding, Visibility,
|
||||
};
|
||||
use crate::core::DocContext;
|
||||
use crate::formats::item_type::ItemType;
|
||||
@ -86,7 +86,7 @@ pub(crate) fn substs_to_args<'tcx>(
|
||||
Vec::with_capacity(substs.len().saturating_sub(if skip_first { 1 } else { 0 }));
|
||||
ret_val.extend(substs.iter().filter_map(|kind| match kind.unpack() {
|
||||
GenericArgKind::Lifetime(lt) => {
|
||||
Some(GenericArg::Lifetime(lt.clean(cx).unwrap_or(Lifetime::elided())))
|
||||
Some(GenericArg::Lifetime(clean_middle_region(lt).unwrap_or(Lifetime::elided())))
|
||||
}
|
||||
GenericArgKind::Type(_) if skip_first => {
|
||||
skip_first = false;
|
||||
|
@ -366,13 +366,15 @@ pub(super) fn write_shared(
|
||||
.collect::<Vec<_>>();
|
||||
files.sort_unstable();
|
||||
let subs = subs.iter().map(|s| s.to_json_string()).collect::<Vec<_>>().join(",");
|
||||
let dirs =
|
||||
if subs.is_empty() { String::new() } else { format!(",\"dirs\":[{}]", subs) };
|
||||
let dirs = if subs.is_empty() && files.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(",[{}]", subs)
|
||||
};
|
||||
let files = files.join(",");
|
||||
let files =
|
||||
if files.is_empty() { String::new() } else { format!(",\"files\":[{}]", files) };
|
||||
let files = if files.is_empty() { String::new() } else { format!(",[{}]", files) };
|
||||
format!(
|
||||
"{{\"name\":\"{name}\"{dirs}{files}}}",
|
||||
"[\"{name}\"{dirs}{files}]",
|
||||
name = self.elem.to_str().expect("invalid osstring conversion"),
|
||||
dirs = dirs,
|
||||
files = files
|
||||
@ -411,18 +413,23 @@ pub(super) fn write_shared(
|
||||
let dst = cx.dst.join(&format!("source-files{}.js", cx.shared.resource_suffix));
|
||||
let make_sources = || {
|
||||
let (mut all_sources, _krates) =
|
||||
try_err!(collect(&dst, krate.name(cx.tcx()).as_str(), "sourcesIndex"), &dst);
|
||||
try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
|
||||
all_sources.push(format!(
|
||||
"sourcesIndex[\"{}\"] = {};",
|
||||
r#""{}":{}"#,
|
||||
&krate.name(cx.tcx()),
|
||||
hierarchy.to_json_string()
|
||||
hierarchy
|
||||
.to_json_string()
|
||||
// All these `replace` calls are because we have to go through JS string for JSON content.
|
||||
.replace('\\', r"\\")
|
||||
.replace('\'', r"\'")
|
||||
// We need to escape double quotes for the JSON.
|
||||
.replace("\\\"", "\\\\\"")
|
||||
));
|
||||
all_sources.sort();
|
||||
Ok(format!(
|
||||
"var sourcesIndex = {{}};\n{}\ncreateSourceSidebar();\n",
|
||||
all_sources.join("\n")
|
||||
)
|
||||
.into_bytes())
|
||||
let mut v = String::from("var sourcesIndex = JSON.parse('{\\\n");
|
||||
v.push_str(&all_sources.join(",\\\n"));
|
||||
v.push_str("\\\n}');\ncreateSourceSidebar();\n");
|
||||
Ok(v.into_bytes())
|
||||
};
|
||||
write_crate("source-files.js", &make_sources)?;
|
||||
}
|
||||
|
@ -12,6 +12,10 @@
|
||||
const rootPath = document.getElementById("rustdoc-vars").attributes["data-root-path"].value;
|
||||
let oldScrollPosition = 0;
|
||||
|
||||
const NAME_OFFSET = 0;
|
||||
const DIRS_OFFSET = 1;
|
||||
const FILES_OFFSET = 2;
|
||||
|
||||
function closeSidebarIfMobile() {
|
||||
if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT) {
|
||||
updateLocalStorage("source-sidebar-show", "false");
|
||||
@ -24,15 +28,15 @@ function createDirEntry(elem, parent, fullPath, hasFoundFile) {
|
||||
|
||||
dirEntry.className = "dir-entry";
|
||||
|
||||
fullPath += elem["name"] + "/";
|
||||
fullPath += elem[NAME_OFFSET] + "/";
|
||||
|
||||
summary.innerText = elem["name"];
|
||||
summary.innerText = elem[NAME_OFFSET];
|
||||
dirEntry.appendChild(summary);
|
||||
|
||||
const folders = document.createElement("div");
|
||||
folders.className = "folders";
|
||||
if (elem.dirs) {
|
||||
for (const dir of elem.dirs) {
|
||||
if (elem[DIRS_OFFSET]) {
|
||||
for (const dir of elem[DIRS_OFFSET]) {
|
||||
if (createDirEntry(dir, folders, fullPath, false)) {
|
||||
dirEntry.open = true;
|
||||
hasFoundFile = true;
|
||||
@ -43,8 +47,8 @@ function createDirEntry(elem, parent, fullPath, hasFoundFile) {
|
||||
|
||||
const files = document.createElement("div");
|
||||
files.className = "files";
|
||||
if (elem.files) {
|
||||
for (const file_text of elem.files) {
|
||||
if (elem[FILES_OFFSET]) {
|
||||
for (const file_text of elem[FILES_OFFSET]) {
|
||||
const file = document.createElement("a");
|
||||
file.innerText = file_text;
|
||||
file.href = rootPath + "src/" + fullPath + file_text + ".html";
|
||||
@ -125,7 +129,7 @@ function createSourceSidebar() {
|
||||
title.innerText = "Files";
|
||||
sidebar.appendChild(title);
|
||||
Object.keys(sourcesIndex).forEach(key => {
|
||||
sourcesIndex[key].name = key;
|
||||
sourcesIndex[key][NAME_OFFSET] = key;
|
||||
hasFoundFile = createDirEntry(sourcesIndex[key], sidebar, "",
|
||||
hasFoundFile);
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
# ignore-cross-compile -- compiling C++ code does not work well when cross-compiling
|
||||
|
||||
# This test case makes sure that native libraries are linked with --whole-archive semantics
|
||||
# when the `-bundle,+whole-archive` modifiers are applied to them.
|
||||
# This test case makes sure that native libraries are linked with appropriate semantics
|
||||
# when the `[+-]bundle,[+-]whole-archive` modifiers are applied to them.
|
||||
#
|
||||
# The test works by checking that the resulting executables produce the expected output,
|
||||
# part of which is emitted by otherwise unreferenced C code. If +whole-archive didn't work
|
||||
@ -10,8 +10,14 @@
|
||||
|
||||
-include ../../run-make-fulldeps/tools.mk
|
||||
|
||||
all: $(TMPDIR)/$(call BIN,directly_linked) $(TMPDIR)/$(call BIN,indirectly_linked) $(TMPDIR)/$(call BIN,indirectly_linked_via_attr)
|
||||
all: $(TMPDIR)/$(call BIN,directly_linked) \
|
||||
$(TMPDIR)/$(call BIN,directly_linked_test_plus_whole_archive) \
|
||||
$(TMPDIR)/$(call BIN,directly_linked_test_minus_whole_archive) \
|
||||
$(TMPDIR)/$(call BIN,indirectly_linked) \
|
||||
$(TMPDIR)/$(call BIN,indirectly_linked_via_attr)
|
||||
$(call RUN,directly_linked) | $(CGREP) 'static-initializer.directly_linked.'
|
||||
$(call RUN,directly_linked_test_plus_whole_archive) --nocapture | $(CGREP) 'static-initializer.'
|
||||
$(call RUN,directly_linked_test_minus_whole_archive) --nocapture | $(CGREP) -v 'static-initializer.'
|
||||
$(call RUN,indirectly_linked) | $(CGREP) 'static-initializer.indirectly_linked.'
|
||||
$(call RUN,indirectly_linked_via_attr) | $(CGREP) 'static-initializer.native_lib_in_src.'
|
||||
|
||||
@ -19,6 +25,13 @@ all: $(TMPDIR)/$(call BIN,directly_linked) $(TMPDIR)/$(call BIN,indirectly_linke
|
||||
$(TMPDIR)/$(call BIN,directly_linked): $(call NATIVE_STATICLIB,c_static_lib_with_constructor)
|
||||
$(RUSTC) directly_linked.rs -l static:+whole-archive=c_static_lib_with_constructor
|
||||
|
||||
# Native lib linked into test executable, +whole-archive
|
||||
$(TMPDIR)/$(call BIN,directly_linked_test_plus_whole_archive): $(call NATIVE_STATICLIB,c_static_lib_with_constructor)
|
||||
$(RUSTC) directly_linked_test_plus_whole_archive.rs --test -l static:+whole-archive=c_static_lib_with_constructor
|
||||
# Native lib linked into test executable, -whole-archive
|
||||
$(TMPDIR)/$(call BIN,directly_linked_test_minus_whole_archive): $(call NATIVE_STATICLIB,c_static_lib_with_constructor)
|
||||
$(RUSTC) directly_linked_test_minus_whole_archive.rs --test -l static:-whole-archive=c_static_lib_with_constructor
|
||||
|
||||
# Native lib linked into RLIB via `-l static:-bundle,+whole-archive`, RLIB linked into executable
|
||||
$(TMPDIR)/$(call BIN,indirectly_linked): $(TMPDIR)/librlib_with_cmdline_native_lib.rlib
|
||||
$(RUSTC) indirectly_linked.rs
|
||||
|
@ -0,0 +1,7 @@
|
||||
use std::io::Write;
|
||||
|
||||
#[test]
|
||||
fn test_thing() {
|
||||
print!("ran the test");
|
||||
std::io::stdout().flush().unwrap();
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
use std::io::Write;
|
||||
|
||||
#[test]
|
||||
fn test_thing() {
|
||||
print!("ran the test");
|
||||
std::io::stdout().flush().unwrap();
|
||||
}
|
@ -4,7 +4,7 @@ error[E0381]: used binding `x` is possibly-uninitialized
|
||||
LL | let mut x: isize;
|
||||
| ----- binding declared here but left uninitialized
|
||||
LL | for _ in 0..0 { x = 10; }
|
||||
| ---- if the `for` loop runs 0 times, `x` is not initialized
|
||||
| ---- if the `for` loop runs 0 times, `x` is not initialized
|
||||
LL | return x;
|
||||
| ^ `x` used here but it is possibly-uninitialized
|
||||
|
||||
|
5
src/test/ui/closures/issue-90871.rs
Normal file
5
src/test/ui/closures/issue-90871.rs
Normal file
@ -0,0 +1,5 @@
|
||||
fn main() {
|
||||
2: n([u8; || 1])
|
||||
//~^ ERROR cannot find type `n` in this scope
|
||||
//~| ERROR mismatched types
|
||||
}
|
23
src/test/ui/closures/issue-90871.stderr
Normal file
23
src/test/ui/closures/issue-90871.stderr
Normal file
@ -0,0 +1,23 @@
|
||||
error[E0412]: cannot find type `n` in this scope
|
||||
--> $DIR/issue-90871.rs:2:8
|
||||
|
|
||||
LL | 2: n([u8; || 1])
|
||||
| ^ expecting a type here because of type ascription
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/issue-90871.rs:2:15
|
||||
|
|
||||
LL | 2: n([u8; || 1])
|
||||
| ^^^^ expected `usize`, found closure
|
||||
|
|
||||
= note: expected type `usize`
|
||||
found closure `[closure@$DIR/issue-90871.rs:2:15: 2:17]`
|
||||
help: use parentheses to call this closure
|
||||
|
|
||||
LL | 2: n([u8; (|| 1)()])
|
||||
| + +++
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0308, E0412.
|
||||
For more information about an error, try `rustc --explain E0308`.
|
5
src/test/ui/type/missing-let-in-binding.fixed
Normal file
5
src/test/ui/type/missing-let-in-binding.fixed
Normal file
@ -0,0 +1,5 @@
|
||||
// run-rustfix
|
||||
fn main() {
|
||||
let mut _foo: i32 = 1;
|
||||
let _foo: i32 = 4; //~ ERROR type ascription is experimental
|
||||
}
|
5
src/test/ui/type/missing-let-in-binding.rs
Normal file
5
src/test/ui/type/missing-let-in-binding.rs
Normal file
@ -0,0 +1,5 @@
|
||||
// run-rustfix
|
||||
fn main() {
|
||||
let mut _foo: i32 = 1;
|
||||
_foo: i32 = 4; //~ ERROR type ascription is experimental
|
||||
}
|
16
src/test/ui/type/missing-let-in-binding.stderr
Normal file
16
src/test/ui/type/missing-let-in-binding.stderr
Normal file
@ -0,0 +1,16 @@
|
||||
error[E0658]: type ascription is experimental
|
||||
--> $DIR/missing-let-in-binding.rs:4:5
|
||||
|
|
||||
LL | _foo: i32 = 4;
|
||||
| ^^^^^^^^^
|
||||
|
|
||||
= note: see issue #23416 <https://github.com/rust-lang/rust/issues/23416> for more information
|
||||
= help: add `#![feature(type_ascription)]` to the crate attributes to enable
|
||||
help: you might have meant to introduce a new binding
|
||||
|
|
||||
LL | let _foo: i32 = 4;
|
||||
| +++
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0658`.
|
Loading…
x
Reference in New Issue
Block a user