Auto merge of #40091 - eddyb:rollup, r=eddyb
Rollup of 28 pull requests - Successful merges: #39859, #39864, #39888, #39903, #39905, #39914, #39945, #39950, #39953, #39961, #39980, #39988, #39993, #39995, #40019, #40020, #40022, #40024, #40025, #40026, #40027, #40031, #40035, #40037, #40038, #40064, #40069, #40086 - Failed merges: #39927, #40008, #40047
This commit is contained in:
commit
1572bf104d
@ -68,6 +68,7 @@ fn main() {
|
||||
};
|
||||
let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
|
||||
let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
|
||||
let mut on_fail = env::var_os("RUSTC_ON_FAIL").map(|of| Command::new(of));
|
||||
|
||||
let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
|
||||
let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
|
||||
@ -217,9 +218,20 @@ fn main() {
|
||||
}
|
||||
|
||||
// Actually run the compiler!
|
||||
std::process::exit(match exec_cmd(&mut cmd) {
|
||||
Ok(s) => s.code().unwrap_or(0xfe),
|
||||
Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
|
||||
std::process::exit(if let Some(ref mut on_fail) = on_fail {
|
||||
match cmd.status() {
|
||||
Ok(s) if s.success() => 0,
|
||||
_ => {
|
||||
println!("\nDid not run successfully:\n{:?}\n-------------", cmd);
|
||||
exec_cmd(on_fail).expect("could not run the backup command");
|
||||
1
|
||||
}
|
||||
}
|
||||
} else {
|
||||
std::process::exit(match exec_cmd(&mut cmd) {
|
||||
Ok(s) => s.code().unwrap_or(0xfe),
|
||||
Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -355,8 +355,12 @@ class RustBuild(object):
|
||||
env = os.environ.copy()
|
||||
env["CARGO_TARGET_DIR"] = build_dir
|
||||
env["RUSTC"] = self.rustc()
|
||||
env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib")
|
||||
env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib")
|
||||
env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
|
||||
(os.pathsep + env["LD_LIBRARY_PATH"]) \
|
||||
if "LD_LIBRARY_PATH" in env else ""
|
||||
env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
|
||||
(os.pathsep + env["DYLD_LIBRARY_PATH"]) \
|
||||
if "DYLD_LIBRARY_PATH" in env else ""
|
||||
env["PATH"] = os.path.join(self.bin_root(), "bin") + \
|
||||
os.pathsep + env["PATH"]
|
||||
if not os.path.isfile(self.cargo()):
|
||||
@ -485,6 +489,8 @@ class RustBuild(object):
|
||||
ostype += 'abi64'
|
||||
elif cputype in {'powerpc', 'ppc', 'ppc64'}:
|
||||
cputype = 'powerpc'
|
||||
elif cputype == 'sparcv9':
|
||||
pass
|
||||
elif cputype in {'amd64', 'x86_64', 'x86-64', 'x64'}:
|
||||
cputype = 'x86_64'
|
||||
else:
|
||||
|
@ -28,6 +28,7 @@ use step;
|
||||
/// Deserialized version of all flags for this compile.
|
||||
pub struct Flags {
|
||||
pub verbose: usize, // verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose
|
||||
pub on_fail: Option<String>,
|
||||
pub stage: Option<u32>,
|
||||
pub keep_stage: Option<u32>,
|
||||
pub build: String,
|
||||
@ -81,6 +82,7 @@ impl Flags {
|
||||
opts.optopt("", "build", "build target of the stage0 compiler", "BUILD");
|
||||
opts.optmulti("", "host", "host targets to build", "HOST");
|
||||
opts.optmulti("", "target", "target targets to build", "TARGET");
|
||||
opts.optopt("", "on-fail", "command to run on failure", "CMD");
|
||||
opts.optopt("", "stage", "stage to build", "N");
|
||||
opts.optopt("", "keep-stage", "stage to keep without recompiling", "N");
|
||||
opts.optopt("", "src", "path to the root of the rust checkout", "DIR");
|
||||
@ -283,6 +285,7 @@ To learn more about a subcommand, run `./x.py <command> -h`
|
||||
Flags {
|
||||
verbose: m.opt_count("v"),
|
||||
stage: stage,
|
||||
on_fail: m.opt_str("on-fail"),
|
||||
keep_stage: m.opt_str("keep-stage").map(|j| j.parse().unwrap()),
|
||||
build: m.opt_str("build").unwrap_or_else(|| {
|
||||
env::var("BUILD").unwrap()
|
||||
|
@ -499,6 +499,10 @@ impl Build {
|
||||
cargo.env("RUSTC_INCREMENTAL", incr_dir);
|
||||
}
|
||||
|
||||
if let Some(ref on_fail) = self.flags.on_fail {
|
||||
cargo.env("RUSTC_ON_FAIL", on_fail);
|
||||
}
|
||||
|
||||
let verbose = cmp::max(self.config.verbose, self.flags.verbose);
|
||||
cargo.env("RUSTC_VERBOSE", format!("{}", verbose));
|
||||
|
||||
@ -828,17 +832,6 @@ impl Build {
|
||||
if target.contains("apple-darwin") {
|
||||
base.push("-stdlib=libc++".into());
|
||||
}
|
||||
// This is a hack, because newer binutils broke things on some vms/distros
|
||||
// (i.e., linking against unknown relocs disabled by the following flag)
|
||||
// See: https://github.com/rust-lang/rust/issues/34978
|
||||
match target {
|
||||
"i586-unknown-linux-gnu" |
|
||||
"i686-unknown-linux-musl" |
|
||||
"x86_64-unknown-linux-musl" => {
|
||||
base.push("-Wa,-mrelax-relocations=no".into());
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
return base
|
||||
}
|
||||
|
||||
|
@ -34,6 +34,14 @@ ENV RUST_CONFIGURE_ARGS \
|
||||
--musl-root-x86_64=/musl-x86_64 \
|
||||
--musl-root-i686=/musl-i686
|
||||
|
||||
# Newer binutils broke things on some vms/distros (i.e., linking against
|
||||
# unknown relocs disabled by the following flag), so we need to go out of our
|
||||
# way to produce "super compatible" binaries.
|
||||
#
|
||||
# See: https://github.com/rust-lang/rust/issues/34978
|
||||
ENV CFLAGS_i686_unknown_linux_gnu=-Wa,-mrelax-relocations=no \
|
||||
CFLAGS_x86_64_unknown_linux_gnu=-Wa,-mrelax-relocations=no
|
||||
|
||||
ENV SCRIPT \
|
||||
python2.7 ../x.py test \
|
||||
--target x86_64-unknown-linux-musl \
|
||||
|
@ -11,7 +11,10 @@
|
||||
|
||||
set -ex
|
||||
|
||||
export CFLAGS="-fPIC"
|
||||
# We need to mitigate rust-lang/rust#34978 when compiling musl itself as well
|
||||
export CFLAGS="-fPIC -Wa,-mrelax-relocations=no"
|
||||
export CXXFLAGS="-Wa,-mrelax-relocations=no"
|
||||
|
||||
MUSL=musl-1.1.14
|
||||
curl https://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf -
|
||||
cd $MUSL
|
||||
|
@ -82,5 +82,23 @@ standard library made a utility for itself called `Unique<T>` which:
|
||||
|
||||
* wraps a `*const T` for variance
|
||||
* includes a `PhantomData<T>`
|
||||
* auto-derives Send/Sync as if T was contained
|
||||
* marks the pointer as NonZero for the null-pointer optimization
|
||||
* auto-derives `Send`/`Sync` as if T was contained
|
||||
* marks the pointer as `NonZero` for the null-pointer optimization
|
||||
|
||||
## Table of `PhantomData` patterns
|
||||
|
||||
Here’s a table of all the wonderful ways `PhantomData` could be used:
|
||||
|
||||
| Phantom type | `'a` | `T` |
|
||||
|-----------------------------|-----------|---------------------------|
|
||||
| `PhantomData<T>` | - | variant (with drop check) |
|
||||
| `PhantomData<&'a T>` | variant | variant |
|
||||
| `PhantomData<&'a mut T>` | variant | invariant |
|
||||
| `PhantomData<*const T>` | - | variant |
|
||||
| `PhantomData<*mut T>` | - | invariant |
|
||||
| `PhantomData<fn(T)>` | - | contravariant (*) |
|
||||
| `PhantomData<fn() -> T>` | - | variant |
|
||||
| `PhantomData<fn(T) -> T>` | - | invariant |
|
||||
| `PhantomData<Cell<&'a ()>>` | invariant | - |
|
||||
|
||||
(*) If contravariance gets scrapped, this would be invariant.
|
||||
|
@ -92,7 +92,15 @@ fn main() {
|
||||
// compiler-rt's build system already
|
||||
cfg.flag("-fno-builtin");
|
||||
cfg.flag("-fvisibility=hidden");
|
||||
cfg.flag("-fomit-frame-pointer");
|
||||
// Accepted practice on Solaris is to never omit frame pointer so that
|
||||
// system observability tools work as expected. In addition, at least
|
||||
// on Solaris, -fomit-frame-pointer on sparcv9 appears to generate
|
||||
// references to data outside of the current stack frame. A search of
|
||||
// the gcc bug database provides a variety of issues surrounding
|
||||
// -fomit-frame-pointer on non-x86 platforms.
|
||||
if !target.contains("solaris") && !target.contains("sparc") {
|
||||
cfg.flag("-fomit-frame-pointer");
|
||||
}
|
||||
cfg.flag("-ffreestanding");
|
||||
cfg.define("VISIBILITY_HIDDEN", None);
|
||||
}
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 7a3754f2bb5e65eb39adacb189c3626173032177
|
||||
Subproject commit 64d954c6a76e896fbf7ed5c17e77c40e388abe84
|
@ -220,15 +220,24 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
// Note that `break` and `continue` statements
|
||||
// may cause additional edges.
|
||||
|
||||
// Is the condition considered part of the loop?
|
||||
let loopback = self.add_dummy_node(&[pred]); // 1
|
||||
let cond_exit = self.expr(&cond, loopback); // 2
|
||||
let expr_exit = self.add_ast_node(expr.id, &[cond_exit]); // 3
|
||||
|
||||
// Create expr_exit without pred (cond_exit)
|
||||
let expr_exit = self.add_ast_node(expr.id, &[]); // 3
|
||||
|
||||
// The LoopScope needs to be on the loop_scopes stack while evaluating the
|
||||
// condition and the body of the loop (both can break out of the loop)
|
||||
self.loop_scopes.push(LoopScope {
|
||||
loop_id: expr.id,
|
||||
continue_index: loopback,
|
||||
break_index: expr_exit
|
||||
});
|
||||
|
||||
let cond_exit = self.expr(&cond, loopback); // 2
|
||||
|
||||
// Add pred (cond_exit) to expr_exit
|
||||
self.add_contained_edge(cond_exit, expr_exit);
|
||||
|
||||
let body_exit = self.block(&body, cond_exit); // 4
|
||||
self.add_contained_edge(body_exit, loopback); // 5
|
||||
self.loop_scopes.pop();
|
||||
@ -294,17 +303,17 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
self.add_unreachable_node()
|
||||
}
|
||||
|
||||
hir::ExprBreak(label, ref opt_expr) => {
|
||||
hir::ExprBreak(destination, ref opt_expr) => {
|
||||
let v = self.opt_expr(opt_expr, pred);
|
||||
let loop_scope = self.find_scope(expr, label);
|
||||
let loop_scope = self.find_scope(expr, destination);
|
||||
let b = self.add_ast_node(expr.id, &[v]);
|
||||
self.add_exiting_edge(expr, b,
|
||||
loop_scope, loop_scope.break_index);
|
||||
self.add_unreachable_node()
|
||||
}
|
||||
|
||||
hir::ExprAgain(label) => {
|
||||
let loop_scope = self.find_scope(expr, label);
|
||||
hir::ExprAgain(destination) => {
|
||||
let loop_scope = self.find_scope(expr, destination);
|
||||
let a = self.add_ast_node(expr.id, &[pred]);
|
||||
self.add_exiting_edge(expr, a,
|
||||
loop_scope, loop_scope.continue_index);
|
||||
@ -579,17 +588,18 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
|
||||
fn find_scope(&self,
|
||||
expr: &hir::Expr,
|
||||
label: Option<hir::Label>) -> LoopScope {
|
||||
match label {
|
||||
None => *self.loop_scopes.last().unwrap(),
|
||||
Some(label) => {
|
||||
destination: hir::Destination) -> LoopScope {
|
||||
|
||||
match destination.loop_id.into() {
|
||||
Ok(loop_id) => {
|
||||
for l in &self.loop_scopes {
|
||||
if l.loop_id == label.loop_id {
|
||||
if l.loop_id == loop_id {
|
||||
return *l;
|
||||
}
|
||||
}
|
||||
span_bug!(expr.span, "no loop scope for id {}", label.loop_id);
|
||||
span_bug!(expr.span, "no loop scope for id {}", loop_id);
|
||||
}
|
||||
Err(err) => span_bug!(expr.span, "loop scope error: {}", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -11,6 +11,7 @@
|
||||
use hir::def_id::DefId;
|
||||
use util::nodemap::NodeMap;
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::MacroKind;
|
||||
use hir;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||
@ -53,7 +54,7 @@ pub enum Def {
|
||||
Label(ast::NodeId),
|
||||
|
||||
// Macro namespace
|
||||
Macro(DefId),
|
||||
Macro(DefId, MacroKind),
|
||||
|
||||
// Both namespaces
|
||||
Err,
|
||||
@ -141,7 +142,7 @@ impl Def {
|
||||
Def::Variant(id) | Def::VariantCtor(id, ..) | Def::Enum(id) | Def::TyAlias(id) |
|
||||
Def::AssociatedTy(id) | Def::TyParam(id) | Def::Struct(id) | Def::StructCtor(id, ..) |
|
||||
Def::Union(id) | Def::Trait(id) | Def::Method(id) | Def::Const(id) |
|
||||
Def::AssociatedConst(id) | Def::Local(id) | Def::Upvar(id, ..) | Def::Macro(id) => {
|
||||
Def::AssociatedConst(id) | Def::Local(id) | Def::Upvar(id, ..) | Def::Macro(id, ..) => {
|
||||
id
|
||||
}
|
||||
|
||||
|
@ -1006,18 +1006,22 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
|
||||
ExprPath(ref qpath) => {
|
||||
visitor.visit_qpath(qpath, expression.id, expression.span);
|
||||
}
|
||||
ExprBreak(None, ref opt_expr) => {
|
||||
ExprBreak(label, ref opt_expr) => {
|
||||
label.ident.map(|ident| {
|
||||
if let Ok(loop_id) = label.loop_id.into() {
|
||||
visitor.visit_def_mention(Def::Label(loop_id));
|
||||
}
|
||||
visitor.visit_name(ident.span, ident.node.name);
|
||||
});
|
||||
walk_list!(visitor, visit_expr, opt_expr);
|
||||
}
|
||||
ExprBreak(Some(label), ref opt_expr) => {
|
||||
visitor.visit_def_mention(Def::Label(label.loop_id));
|
||||
visitor.visit_name(label.span, label.name);
|
||||
walk_list!(visitor, visit_expr, opt_expr);
|
||||
}
|
||||
ExprAgain(None) => {}
|
||||
ExprAgain(Some(label)) => {
|
||||
visitor.visit_def_mention(Def::Label(label.loop_id));
|
||||
visitor.visit_name(label.span, label.name);
|
||||
ExprAgain(label) => {
|
||||
label.ident.map(|ident| {
|
||||
if let Ok(loop_id) = label.loop_id.into() {
|
||||
visitor.visit_def_mention(Def::Label(loop_id));
|
||||
}
|
||||
visitor.visit_name(ident.span, ident.node.name);
|
||||
});
|
||||
}
|
||||
ExprRet(ref optional_expression) => {
|
||||
walk_list!(visitor, visit_expr, optional_expression);
|
||||
|
@ -50,6 +50,7 @@ use util::nodemap::{DefIdMap, NodeMap, FxHashMap};
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::iter;
|
||||
use std::mem;
|
||||
|
||||
use syntax::attr;
|
||||
use syntax::ast::*;
|
||||
@ -79,6 +80,9 @@ pub struct LoweringContext<'a> {
|
||||
impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem>,
|
||||
bodies: FxHashMap<hir::BodyId, hir::Body>,
|
||||
|
||||
loop_scopes: Vec<NodeId>,
|
||||
is_in_loop_condition: bool,
|
||||
|
||||
type_def_lifetime_params: DefIdMap<usize>,
|
||||
}
|
||||
|
||||
@ -112,6 +116,8 @@ pub fn lower_crate(sess: &Session,
|
||||
trait_items: BTreeMap::new(),
|
||||
impl_items: BTreeMap::new(),
|
||||
bodies: FxHashMap(),
|
||||
loop_scopes: Vec::new(),
|
||||
is_in_loop_condition: false,
|
||||
type_def_lifetime_params: DefIdMap(),
|
||||
}.lower_crate(krate)
|
||||
}
|
||||
@ -244,6 +250,55 @@ impl<'a> LoweringContext<'a> {
|
||||
span
|
||||
}
|
||||
|
||||
fn with_loop_scope<T, F>(&mut self, loop_id: NodeId, f: F) -> T
|
||||
where F: FnOnce(&mut LoweringContext) -> T
|
||||
{
|
||||
// We're no longer in the base loop's condition; we're in another loop.
|
||||
let was_in_loop_condition = self.is_in_loop_condition;
|
||||
self.is_in_loop_condition = false;
|
||||
|
||||
let len = self.loop_scopes.len();
|
||||
self.loop_scopes.push(loop_id);
|
||||
|
||||
let result = f(self);
|
||||
assert_eq!(len + 1, self.loop_scopes.len(),
|
||||
"Loop scopes should be added and removed in stack order");
|
||||
|
||||
self.loop_scopes.pop().unwrap();
|
||||
|
||||
self.is_in_loop_condition = was_in_loop_condition;
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn with_loop_condition_scope<T, F>(&mut self, f: F) -> T
|
||||
where F: FnOnce(&mut LoweringContext) -> T
|
||||
{
|
||||
let was_in_loop_condition = self.is_in_loop_condition;
|
||||
self.is_in_loop_condition = true;
|
||||
|
||||
let result = f(self);
|
||||
|
||||
self.is_in_loop_condition = was_in_loop_condition;
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn with_new_loop_scopes<T, F>(&mut self, f: F) -> T
|
||||
where F: FnOnce(&mut LoweringContext) -> T
|
||||
{
|
||||
let was_in_loop_condition = self.is_in_loop_condition;
|
||||
self.is_in_loop_condition = false;
|
||||
|
||||
let loop_scopes = mem::replace(&mut self.loop_scopes, Vec::new());
|
||||
let result = f(self);
|
||||
mem::replace(&mut self.loop_scopes, loop_scopes);
|
||||
|
||||
self.is_in_loop_condition = was_in_loop_condition;
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn with_parent_def<T, F>(&mut self, parent_id: NodeId, f: F) -> T
|
||||
where F: FnOnce(&mut LoweringContext) -> T
|
||||
{
|
||||
@ -271,17 +326,24 @@ impl<'a> LoweringContext<'a> {
|
||||
o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name))
|
||||
}
|
||||
|
||||
fn lower_label(&mut self, id: NodeId, label: Option<Spanned<Ident>>) -> Option<hir::Label> {
|
||||
label.map(|sp_ident| {
|
||||
hir::Label {
|
||||
span: sp_ident.span,
|
||||
name: sp_ident.node.name,
|
||||
loop_id: match self.expect_full_def(id) {
|
||||
Def::Label(loop_id) => loop_id,
|
||||
_ => DUMMY_NODE_ID
|
||||
fn lower_destination(&mut self, destination: Option<(NodeId, Spanned<Ident>)>)
|
||||
-> hir::Destination
|
||||
{
|
||||
match destination {
|
||||
Some((id, label_ident)) => hir::Destination {
|
||||
ident: Some(label_ident),
|
||||
loop_id: if let Def::Label(loop_id) = self.expect_full_def(id) {
|
||||
hir::LoopIdResult::Ok(loop_id)
|
||||
} else {
|
||||
hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel)
|
||||
}
|
||||
},
|
||||
None => hir::Destination {
|
||||
ident: None,
|
||||
loop_id: self.loop_scopes.last().map(|innermost_loop_id| Ok(*innermost_loop_id))
|
||||
.unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)).into()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_attrs(&mut self, attrs: &Vec<Attribute>) -> hir::HirVec<Attribute> {
|
||||
@ -992,15 +1054,17 @@ impl<'a> LoweringContext<'a> {
|
||||
self.record_body(value, None))
|
||||
}
|
||||
ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => {
|
||||
let body = self.lower_block(body);
|
||||
let body = self.expr_block(body, ThinVec::new());
|
||||
let body_id = self.record_body(body, Some(decl));
|
||||
hir::ItemFn(self.lower_fn_decl(decl),
|
||||
self.lower_unsafety(unsafety),
|
||||
self.lower_constness(constness),
|
||||
abi,
|
||||
self.lower_generics(generics),
|
||||
body_id)
|
||||
self.with_new_loop_scopes(|this| {
|
||||
let body = this.lower_block(body);
|
||||
let body = this.expr_block(body, ThinVec::new());
|
||||
let body_id = this.record_body(body, Some(decl));
|
||||
hir::ItemFn(this.lower_fn_decl(decl),
|
||||
this.lower_unsafety(unsafety),
|
||||
this.lower_constness(constness),
|
||||
abi,
|
||||
this.lower_generics(generics),
|
||||
body_id)
|
||||
})
|
||||
}
|
||||
ItemKind::Mod(ref m) => hir::ItemMod(self.lower_mod(m)),
|
||||
ItemKind::ForeignMod(ref nm) => hir::ItemForeignMod(self.lower_foreign_mod(nm)),
|
||||
@ -1562,13 +1626,17 @@ impl<'a> LoweringContext<'a> {
|
||||
hir::ExprIf(P(self.lower_expr(cond)), self.lower_block(blk), else_opt)
|
||||
}
|
||||
ExprKind::While(ref cond, ref body, opt_ident) => {
|
||||
hir::ExprWhile(P(self.lower_expr(cond)), self.lower_block(body),
|
||||
self.lower_opt_sp_ident(opt_ident))
|
||||
self.with_loop_scope(e.id, |this|
|
||||
hir::ExprWhile(
|
||||
this.with_loop_condition_scope(|this| P(this.lower_expr(cond))),
|
||||
this.lower_block(body),
|
||||
this.lower_opt_sp_ident(opt_ident)))
|
||||
}
|
||||
ExprKind::Loop(ref body, opt_ident) => {
|
||||
hir::ExprLoop(self.lower_block(body),
|
||||
self.lower_opt_sp_ident(opt_ident),
|
||||
hir::LoopSource::Loop)
|
||||
self.with_loop_scope(e.id, |this|
|
||||
hir::ExprLoop(this.lower_block(body),
|
||||
this.lower_opt_sp_ident(opt_ident),
|
||||
hir::LoopSource::Loop))
|
||||
}
|
||||
ExprKind::Match(ref expr, ref arms) => {
|
||||
hir::ExprMatch(P(self.lower_expr(expr)),
|
||||
@ -1576,12 +1644,14 @@ impl<'a> LoweringContext<'a> {
|
||||
hir::MatchSource::Normal)
|
||||
}
|
||||
ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => {
|
||||
self.with_parent_def(e.id, |this| {
|
||||
let expr = this.lower_expr(body);
|
||||
hir::ExprClosure(this.lower_capture_clause(capture_clause),
|
||||
this.lower_fn_decl(decl),
|
||||
this.record_body(expr, Some(decl)),
|
||||
fn_decl_span)
|
||||
self.with_new_loop_scopes(|this| {
|
||||
this.with_parent_def(e.id, |this| {
|
||||
let expr = this.lower_expr(body);
|
||||
hir::ExprClosure(this.lower_capture_clause(capture_clause),
|
||||
this.lower_fn_decl(decl),
|
||||
this.record_body(expr, Some(decl)),
|
||||
fn_decl_span)
|
||||
})
|
||||
})
|
||||
}
|
||||
ExprKind::Block(ref blk) => hir::ExprBlock(self.lower_block(blk)),
|
||||
@ -1660,10 +1730,29 @@ impl<'a> LoweringContext<'a> {
|
||||
hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional))
|
||||
}
|
||||
ExprKind::Break(opt_ident, ref opt_expr) => {
|
||||
hir::ExprBreak(self.lower_label(e.id, opt_ident),
|
||||
opt_expr.as_ref().map(|x| P(self.lower_expr(x))))
|
||||
let label_result = if self.is_in_loop_condition && opt_ident.is_none() {
|
||||
hir::Destination {
|
||||
ident: opt_ident,
|
||||
loop_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into(),
|
||||
}
|
||||
} else {
|
||||
self.lower_destination(opt_ident.map(|ident| (e.id, ident)))
|
||||
};
|
||||
hir::ExprBreak(
|
||||
label_result,
|
||||
opt_expr.as_ref().map(|x| P(self.lower_expr(x))))
|
||||
}
|
||||
ExprKind::Continue(opt_ident) => hir::ExprAgain(self.lower_label(e.id, opt_ident)),
|
||||
ExprKind::Continue(opt_ident) =>
|
||||
hir::ExprAgain(
|
||||
if self.is_in_loop_condition && opt_ident.is_none() {
|
||||
hir::Destination {
|
||||
ident: opt_ident,
|
||||
loop_id: Err(
|
||||
hir::LoopIdError::UnlabeledCfInWhileCondition).into(),
|
||||
}
|
||||
} else {
|
||||
self.lower_destination(opt_ident.map( |ident| (e.id, ident)))
|
||||
}),
|
||||
ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))),
|
||||
ExprKind::InlineAsm(ref asm) => {
|
||||
let hir_asm = hir::InlineAsm {
|
||||
@ -1804,9 +1893,16 @@ impl<'a> LoweringContext<'a> {
|
||||
// }
|
||||
// }
|
||||
|
||||
// Note that the block AND the condition are evaluated in the loop scope.
|
||||
// This is done to allow `break` from inside the condition of the loop.
|
||||
let (body, break_expr, sub_expr) = self.with_loop_scope(e.id, |this| (
|
||||
this.lower_block(body),
|
||||
this.expr_break(e.span, ThinVec::new()),
|
||||
this.with_loop_condition_scope(|this| P(this.lower_expr(sub_expr))),
|
||||
));
|
||||
|
||||
// `<pat> => <body>`
|
||||
let pat_arm = {
|
||||
let body = self.lower_block(body);
|
||||
let body_expr = P(self.expr_block(body, ThinVec::new()));
|
||||
let pat = self.lower_pat(pat);
|
||||
self.arm(hir_vec![pat], body_expr)
|
||||
@ -1815,13 +1911,11 @@ impl<'a> LoweringContext<'a> {
|
||||
// `_ => break`
|
||||
let break_arm = {
|
||||
let pat_under = self.pat_wild(e.span);
|
||||
let break_expr = self.expr_break(e.span, ThinVec::new());
|
||||
self.arm(hir_vec![pat_under], break_expr)
|
||||
};
|
||||
|
||||
// `match <sub_expr> { ... }`
|
||||
let arms = hir_vec![pat_arm, break_arm];
|
||||
let sub_expr = P(self.lower_expr(sub_expr));
|
||||
let match_expr = self.expr(e.span,
|
||||
hir::ExprMatch(sub_expr,
|
||||
arms,
|
||||
@ -1863,7 +1957,7 @@ impl<'a> LoweringContext<'a> {
|
||||
|
||||
// `::std::option::Option::Some(<pat>) => <body>`
|
||||
let pat_arm = {
|
||||
let body_block = self.lower_block(body);
|
||||
let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body));
|
||||
let body_expr = P(self.expr_block(body_block, ThinVec::new()));
|
||||
let pat = self.lower_pat(pat);
|
||||
let some_pat = self.pat_some(e.span, pat);
|
||||
@ -1873,7 +1967,8 @@ impl<'a> LoweringContext<'a> {
|
||||
|
||||
// `::std::option::Option::None => break`
|
||||
let break_arm = {
|
||||
let break_expr = self.expr_break(e.span, ThinVec::new());
|
||||
let break_expr = self.with_loop_scope(e.id, |this|
|
||||
this.expr_break(e.span, ThinVec::new()));
|
||||
let pat = self.pat_none(e.span);
|
||||
self.arm(hir_vec![pat], break_expr)
|
||||
};
|
||||
@ -2151,7 +2246,8 @@ impl<'a> LoweringContext<'a> {
|
||||
}
|
||||
|
||||
fn expr_break(&mut self, span: Span, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
|
||||
P(self.expr(span, hir::ExprBreak(None, None), attrs))
|
||||
let expr_break = hir::ExprBreak(self.lower_destination(None), None);
|
||||
P(self.expr(span, expr_break, attrs))
|
||||
}
|
||||
|
||||
fn expr_call(&mut self, span: Span, e: P<hir::Expr>, args: hir::HirVec<hir::Expr>)
|
||||
|
@ -36,7 +36,7 @@ use util::nodemap::{NodeMap, FxHashMap, FxHashSet};
|
||||
use syntax_pos::{Span, ExpnId, DUMMY_SP};
|
||||
use syntax::codemap::{self, Spanned};
|
||||
use syntax::abi::Abi;
|
||||
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect};
|
||||
use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
|
||||
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
@ -959,9 +959,9 @@ pub enum Expr_ {
|
||||
/// A referencing operation (`&a` or `&mut a`)
|
||||
ExprAddrOf(Mutability, P<Expr>),
|
||||
/// A `break`, with an optional label to break
|
||||
ExprBreak(Option<Label>, Option<P<Expr>>),
|
||||
ExprBreak(Destination, Option<P<Expr>>),
|
||||
/// A `continue`, with an optional label
|
||||
ExprAgain(Option<Label>),
|
||||
ExprAgain(Destination),
|
||||
/// A `return`, with an optional value to be returned
|
||||
ExprRet(Option<P<Expr>>),
|
||||
|
||||
@ -1030,12 +1030,56 @@ pub enum LoopSource {
|
||||
ForLoop,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
pub enum LoopIdError {
|
||||
OutsideLoopScope,
|
||||
UnlabeledCfInWhileCondition,
|
||||
UnresolvedLabel,
|
||||
}
|
||||
|
||||
impl fmt::Display for LoopIdError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(match *self {
|
||||
LoopIdError::OutsideLoopScope => "not inside loop scope",
|
||||
LoopIdError::UnlabeledCfInWhileCondition =>
|
||||
"unlabeled control flow (break or continue) in while condition",
|
||||
LoopIdError::UnresolvedLabel => "label not found",
|
||||
}, f)
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(cramertj) this should use `Result` once master compiles w/ a vesion of Rust where
|
||||
// `Result` implements `Encodable`/`Decodable`
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
pub enum LoopIdResult {
|
||||
Ok(NodeId),
|
||||
Err(LoopIdError),
|
||||
}
|
||||
impl Into<Result<NodeId, LoopIdError>> for LoopIdResult {
|
||||
fn into(self) -> Result<NodeId, LoopIdError> {
|
||||
match self {
|
||||
LoopIdResult::Ok(ok) => Ok(ok),
|
||||
LoopIdResult::Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<Result<NodeId, LoopIdError>> for LoopIdResult {
|
||||
fn from(res: Result<NodeId, LoopIdError>) -> Self {
|
||||
match res {
|
||||
Ok(ok) => LoopIdResult::Ok(ok),
|
||||
Err(err) => LoopIdResult::Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
pub struct Label {
|
||||
pub span: Span,
|
||||
pub name: Name,
|
||||
pub loop_id: NodeId
|
||||
pub struct Destination {
|
||||
// This is `Some(_)` iff there is an explicit user-specified `label
|
||||
pub ident: Option<Spanned<Ident>>,
|
||||
|
||||
// These errors are caught and then reported during the diagnostics pass in
|
||||
// librustc_passes/loops.rs
|
||||
pub loop_id: LoopIdResult,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
|
@ -1354,11 +1354,11 @@ impl<'a> State<'a> {
|
||||
hir::ExprPath(ref qpath) => {
|
||||
self.print_qpath(qpath, true)?
|
||||
}
|
||||
hir::ExprBreak(opt_label, ref opt_expr) => {
|
||||
hir::ExprBreak(label, ref opt_expr) => {
|
||||
word(&mut self.s, "break")?;
|
||||
space(&mut self.s)?;
|
||||
if let Some(label) = opt_label {
|
||||
self.print_name(label.name)?;
|
||||
if let Some(label_ident) = label.ident {
|
||||
self.print_name(label_ident.node.name)?;
|
||||
space(&mut self.s)?;
|
||||
}
|
||||
if let Some(ref expr) = *opt_expr {
|
||||
@ -1366,11 +1366,11 @@ impl<'a> State<'a> {
|
||||
space(&mut self.s)?;
|
||||
}
|
||||
}
|
||||
hir::ExprAgain(opt_label) => {
|
||||
hir::ExprAgain(label) => {
|
||||
word(&mut self.s, "continue")?;
|
||||
space(&mut self.s)?;
|
||||
if let Some(label) = opt_label {
|
||||
self.print_name(label.name)?;
|
||||
if let Some(label_ident) = label.ident {
|
||||
self.print_name(label_ident.node.name)?;
|
||||
space(&mut self.s)?
|
||||
}
|
||||
}
|
||||
|
@ -379,40 +379,41 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||
values: Option<ValuePairs<'tcx>>,
|
||||
terr: &TypeError<'tcx>)
|
||||
{
|
||||
let expected_found = match values {
|
||||
None => None,
|
||||
Some(values) => match self.values_str(&values) {
|
||||
Some((expected, found)) => Some((expected, found)),
|
||||
None => {
|
||||
// Derived error. Cancel the emitter.
|
||||
self.tcx.sess.diagnostic().cancel(diag);
|
||||
return
|
||||
}
|
||||
let (expected_found, is_simple_error) = match values {
|
||||
None => (None, false),
|
||||
Some(values) => {
|
||||
let is_simple_error = match values {
|
||||
ValuePairs::Types(exp_found) => {
|
||||
exp_found.expected.is_primitive() && exp_found.found.is_primitive()
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
let vals = match self.values_str(&values) {
|
||||
Some((expected, found)) => Some((expected, found)),
|
||||
None => {
|
||||
// Derived error. Cancel the emitter.
|
||||
self.tcx.sess.diagnostic().cancel(diag);
|
||||
return
|
||||
}
|
||||
};
|
||||
(vals, is_simple_error)
|
||||
}
|
||||
};
|
||||
|
||||
let span = cause.span;
|
||||
|
||||
if let Some((expected, found)) = expected_found {
|
||||
let is_simple_error = if let &TypeError::Sorts(ref values) = terr {
|
||||
values.expected.is_primitive() && values.found.is_primitive()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if !is_simple_error {
|
||||
if expected == found {
|
||||
if let &TypeError::Sorts(ref values) = terr {
|
||||
diag.note_expected_found_extra(
|
||||
&"type", &expected, &found,
|
||||
&format!(" ({})", values.expected.sort_string(self.tcx)),
|
||||
&format!(" ({})", values.found.sort_string(self.tcx)));
|
||||
} else {
|
||||
diag.note_expected_found(&"type", &expected, &found);
|
||||
}
|
||||
} else {
|
||||
match (terr, is_simple_error, expected == found) {
|
||||
(&TypeError::Sorts(ref values), false, true) => {
|
||||
diag.note_expected_found_extra(
|
||||
&"type", &expected, &found,
|
||||
&format!(" ({})", values.expected.sort_string(self.tcx)),
|
||||
&format!(" ({})", values.found.sort_string(self.tcx)));
|
||||
}
|
||||
(_, false, _) => {
|
||||
diag.note_expected_found(&"type", &expected, &found);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -715,6 +715,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||
adjustment::Adjust::NeverToAny |
|
||||
adjustment::Adjust::ReifyFnPointer |
|
||||
adjustment::Adjust::UnsafeFnPointer |
|
||||
adjustment::Adjust::ClosureFnPointer |
|
||||
adjustment::Adjust::MutToConstPointer => {
|
||||
// Creating a closure/fn-pointer or unsizing consumes
|
||||
// the input and stores it into the resulting rvalue.
|
||||
|
@ -675,23 +675,6 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn find_loop_scope(&self,
|
||||
opt_label: Option<hir::Label>,
|
||||
sp: Span)
|
||||
-> NodeId {
|
||||
match opt_label {
|
||||
Some(label) => label.loop_id,
|
||||
None => {
|
||||
// Vanilla 'break' or 'continue', so use the enclosing
|
||||
// loop scope
|
||||
if self.loop_scope.is_empty() {
|
||||
span_bug!(sp, "break outside loop");
|
||||
} else {
|
||||
*self.loop_scope.last().unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused_must_use)]
|
||||
fn ln_str(&self, ln: LiveNode) -> String {
|
||||
@ -1018,9 +1001,12 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), exit_ln)
|
||||
}
|
||||
|
||||
hir::ExprBreak(opt_label, ref opt_expr) => {
|
||||
hir::ExprBreak(label, ref opt_expr) => {
|
||||
// Find which label this break jumps to
|
||||
let sc = self.find_loop_scope(opt_label, expr.span);
|
||||
let sc = match label.loop_id.into() {
|
||||
Ok(loop_id) => loop_id,
|
||||
Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
|
||||
};
|
||||
|
||||
// Now that we know the label we're going to,
|
||||
// look it up in the break loop nodes table
|
||||
@ -1031,9 +1017,13 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
hir::ExprAgain(opt_label) => {
|
||||
hir::ExprAgain(label) => {
|
||||
// Find which label this expr continues to
|
||||
let sc = self.find_loop_scope(opt_label, expr.span);
|
||||
let sc = match label.loop_id.into() {
|
||||
Ok(loop_id) => loop_id,
|
||||
Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
|
||||
};
|
||||
|
||||
|
||||
// Now that we know the label we're going to,
|
||||
// look it up in the continue loop nodes table
|
||||
@ -1297,12 +1287,13 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
debug!("propagate_through_loop: using id for loop body {} {}",
|
||||
expr.id, self.ir.tcx.hir.node_to_pretty_string(body.id));
|
||||
|
||||
let cond_ln = match kind {
|
||||
LoopLoop => ln,
|
||||
WhileLoop(ref cond) => self.propagate_through_expr(&cond, ln),
|
||||
};
|
||||
let body_ln = self.with_loop_nodes(expr.id, succ, ln, |this| {
|
||||
this.propagate_through_block(body, cond_ln)
|
||||
let (cond_ln, body_ln) = self.with_loop_nodes(expr.id, succ, ln, |this| {
|
||||
let cond_ln = match kind {
|
||||
LoopLoop => ln,
|
||||
WhileLoop(ref cond) => this.propagate_through_expr(&cond, ln),
|
||||
};
|
||||
let body_ln = this.propagate_through_block(body, cond_ln);
|
||||
(cond_ln, body_ln)
|
||||
});
|
||||
|
||||
// repeat until fixed point is reached:
|
||||
|
@ -464,6 +464,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
|
||||
adjustment::Adjust::NeverToAny |
|
||||
adjustment::Adjust::ReifyFnPointer |
|
||||
adjustment::Adjust::UnsafeFnPointer |
|
||||
adjustment::Adjust::ClosureFnPointer |
|
||||
adjustment::Adjust::MutToConstPointer |
|
||||
adjustment::Adjust::DerefRef {..} => {
|
||||
debug!("cat_expr({:?}): {:?}",
|
||||
|
@ -1022,6 +1022,9 @@ pub enum CastKind {
|
||||
/// Convert unique, zero-sized type for a fn to fn()
|
||||
ReifyFnPointer,
|
||||
|
||||
/// Convert non capturing closure to fn()
|
||||
ClosureFnPointer,
|
||||
|
||||
/// Convert safe fn() to unsafe fn()
|
||||
UnsafeFnPointer,
|
||||
|
||||
|
@ -51,7 +51,7 @@ pub struct Config {
|
||||
pub uint_type: UintTy,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Hash)]
|
||||
pub enum Sanitizer {
|
||||
Address,
|
||||
Leak,
|
||||
@ -288,7 +288,7 @@ top_level_options!(
|
||||
// much sense: The search path can stay the same while the
|
||||
// things discovered there might have changed on disk.
|
||||
search_paths: SearchPaths [TRACKED],
|
||||
libs: Vec<(String, Option<String>, cstore::NativeLibraryKind)> [TRACKED],
|
||||
libs: Vec<(String, Option<String>, Option<cstore::NativeLibraryKind>)> [TRACKED],
|
||||
maybe_sysroot: Option<PathBuf> [TRACKED],
|
||||
|
||||
target_triple: String [TRACKED],
|
||||
@ -804,6 +804,8 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
|
||||
"save all temporary output files during compilation"),
|
||||
rpath: bool = (false, parse_bool, [UNTRACKED],
|
||||
"set rpath values in libs/exes"),
|
||||
overflow_checks: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
||||
"use overflow checks for integer arithmetic"),
|
||||
no_prepopulate_passes: bool = (false, parse_bool, [TRACKED],
|
||||
"don't pre-populate the pass manager with a list of passes"),
|
||||
no_vectorize_loops: bool = (false, parse_bool, [TRACKED],
|
||||
@ -970,7 +972,7 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
|
||||
"encode MIR of all functions into the crate metadata"),
|
||||
osx_rpath_install_name: bool = (false, parse_bool, [TRACKED],
|
||||
"pass `-install_name @rpath/...` to the OSX linker"),
|
||||
sanitizer: Option<Sanitizer> = (None, parse_sanitizer, [UNTRACKED],
|
||||
sanitizer: Option<Sanitizer> = (None, parse_sanitizer, [TRACKED],
|
||||
"Use a sanitizer"),
|
||||
}
|
||||
|
||||
@ -1495,18 +1497,18 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
|
||||
let mut parts = s.splitn(2, '=');
|
||||
let kind = parts.next().unwrap();
|
||||
let (name, kind) = match (parts.next(), kind) {
|
||||
(None, name) |
|
||||
(Some(name), "dylib") => (name, cstore::NativeUnknown),
|
||||
(Some(name), "framework") => (name, cstore::NativeFramework),
|
||||
(Some(name), "static") => (name, cstore::NativeStatic),
|
||||
(Some(name), "static-nobundle") => (name, cstore::NativeStaticNobundle),
|
||||
(None, name) => (name, None),
|
||||
(Some(name), "dylib") => (name, Some(cstore::NativeUnknown)),
|
||||
(Some(name), "framework") => (name, Some(cstore::NativeFramework)),
|
||||
(Some(name), "static") => (name, Some(cstore::NativeStatic)),
|
||||
(Some(name), "static-nobundle") => (name, Some(cstore::NativeStaticNobundle)),
|
||||
(_, s) => {
|
||||
early_error(error_format, &format!("unknown library kind `{}`, expected \
|
||||
one of dylib, framework, or static",
|
||||
s));
|
||||
}
|
||||
};
|
||||
if kind == cstore::NativeStaticNobundle && !nightly_options::is_nightly_build() {
|
||||
if kind == Some(cstore::NativeStaticNobundle) && !nightly_options::is_nightly_build() {
|
||||
early_error(error_format, &format!("the library kind 'static-nobundle' is only \
|
||||
accepted on the nightly compiler"));
|
||||
}
|
||||
@ -1728,7 +1730,7 @@ mod dep_tracking {
|
||||
use std::path::PathBuf;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use super::{Passes, CrateType, OptLevel, DebugInfoLevel,
|
||||
OutputTypes, Externs, ErrorOutputType};
|
||||
OutputTypes, Externs, ErrorOutputType, Sanitizer};
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
use rustc_back::PanicStrategy;
|
||||
|
||||
@ -1772,6 +1774,7 @@ mod dep_tracking {
|
||||
impl_dep_tracking_hash_via_hash!(Option<PanicStrategy>);
|
||||
impl_dep_tracking_hash_via_hash!(Option<lint::Level>);
|
||||
impl_dep_tracking_hash_via_hash!(Option<PathBuf>);
|
||||
impl_dep_tracking_hash_via_hash!(Option<cstore::NativeLibraryKind>);
|
||||
impl_dep_tracking_hash_via_hash!(CrateType);
|
||||
impl_dep_tracking_hash_via_hash!(PanicStrategy);
|
||||
impl_dep_tracking_hash_via_hash!(Passes);
|
||||
@ -1781,12 +1784,14 @@ mod dep_tracking {
|
||||
impl_dep_tracking_hash_via_hash!(Externs);
|
||||
impl_dep_tracking_hash_via_hash!(OutputTypes);
|
||||
impl_dep_tracking_hash_via_hash!(cstore::NativeLibraryKind);
|
||||
impl_dep_tracking_hash_via_hash!(Sanitizer);
|
||||
impl_dep_tracking_hash_via_hash!(Option<Sanitizer>);
|
||||
|
||||
impl_dep_tracking_hash_for_sortable_vec_of!(String);
|
||||
impl_dep_tracking_hash_for_sortable_vec_of!(CrateType);
|
||||
impl_dep_tracking_hash_for_sortable_vec_of!((String, lint::Level));
|
||||
impl_dep_tracking_hash_for_sortable_vec_of!((String, Option<String>,
|
||||
cstore::NativeLibraryKind));
|
||||
Option<cstore::NativeLibraryKind>));
|
||||
impl DepTrackingHash for SearchPaths {
|
||||
fn hash(&self, hasher: &mut DefaultHasher, _: ErrorOutputType) {
|
||||
let mut elems: Vec<_> = self
|
||||
@ -2230,24 +2235,24 @@ mod tests {
|
||||
let mut v4 = super::basic_options();
|
||||
|
||||
// Reference
|
||||
v1.libs = vec![(String::from("a"), None, cstore::NativeStatic),
|
||||
(String::from("b"), None, cstore::NativeFramework),
|
||||
(String::from("c"), None, cstore::NativeUnknown)];
|
||||
v1.libs = vec![(String::from("a"), None, Some(cstore::NativeStatic)),
|
||||
(String::from("b"), None, Some(cstore::NativeFramework)),
|
||||
(String::from("c"), None, Some(cstore::NativeUnknown))];
|
||||
|
||||
// Change label
|
||||
v2.libs = vec![(String::from("a"), None, cstore::NativeStatic),
|
||||
(String::from("X"), None, cstore::NativeFramework),
|
||||
(String::from("c"), None, cstore::NativeUnknown)];
|
||||
v2.libs = vec![(String::from("a"), None, Some(cstore::NativeStatic)),
|
||||
(String::from("X"), None, Some(cstore::NativeFramework)),
|
||||
(String::from("c"), None, Some(cstore::NativeUnknown))];
|
||||
|
||||
// Change kind
|
||||
v3.libs = vec![(String::from("a"), None, cstore::NativeStatic),
|
||||
(String::from("b"), None, cstore::NativeStatic),
|
||||
(String::from("c"), None, cstore::NativeUnknown)];
|
||||
v3.libs = vec![(String::from("a"), None, Some(cstore::NativeStatic)),
|
||||
(String::from("b"), None, Some(cstore::NativeStatic)),
|
||||
(String::from("c"), None, Some(cstore::NativeUnknown))];
|
||||
|
||||
// Change new-name
|
||||
v4.libs = vec![(String::from("a"), None, cstore::NativeStatic),
|
||||
(String::from("b"), Some(String::from("X")), cstore::NativeFramework),
|
||||
(String::from("c"), None, cstore::NativeUnknown)];
|
||||
v4.libs = vec![(String::from("a"), None, Some(cstore::NativeStatic)),
|
||||
(String::from("b"), Some(String::from("X")), Some(cstore::NativeFramework)),
|
||||
(String::from("c"), None, Some(cstore::NativeUnknown))];
|
||||
|
||||
assert!(v1.dep_tracking_hash() != v2.dep_tracking_hash());
|
||||
assert!(v1.dep_tracking_hash() != v3.dep_tracking_hash());
|
||||
@ -2267,17 +2272,17 @@ mod tests {
|
||||
let mut v3 = super::basic_options();
|
||||
|
||||
// Reference
|
||||
v1.libs = vec![(String::from("a"), None, cstore::NativeStatic),
|
||||
(String::from("b"), None, cstore::NativeFramework),
|
||||
(String::from("c"), None, cstore::NativeUnknown)];
|
||||
v1.libs = vec![(String::from("a"), None, Some(cstore::NativeStatic)),
|
||||
(String::from("b"), None, Some(cstore::NativeFramework)),
|
||||
(String::from("c"), None, Some(cstore::NativeUnknown))];
|
||||
|
||||
v2.libs = vec![(String::from("b"), None, cstore::NativeFramework),
|
||||
(String::from("a"), None, cstore::NativeStatic),
|
||||
(String::from("c"), None, cstore::NativeUnknown)];
|
||||
v2.libs = vec![(String::from("b"), None, Some(cstore::NativeFramework)),
|
||||
(String::from("a"), None, Some(cstore::NativeStatic)),
|
||||
(String::from("c"), None, Some(cstore::NativeUnknown))];
|
||||
|
||||
v3.libs = vec![(String::from("c"), None, cstore::NativeUnknown),
|
||||
(String::from("a"), None, cstore::NativeStatic),
|
||||
(String::from("b"), None, cstore::NativeFramework)];
|
||||
v3.libs = vec![(String::from("c"), None, Some(cstore::NativeUnknown)),
|
||||
(String::from("a"), None, Some(cstore::NativeStatic)),
|
||||
(String::from("b"), None, Some(cstore::NativeFramework))];
|
||||
|
||||
assert!(v1.dep_tracking_hash() == v2.dep_tracking_hash());
|
||||
assert!(v1.dep_tracking_hash() == v3.dep_tracking_hash());
|
||||
@ -2345,6 +2350,10 @@ mod tests {
|
||||
opts.cg.llvm_args = vec![String::from("1"), String::from("2")];
|
||||
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
|
||||
|
||||
opts = reference.clone();
|
||||
opts.cg.overflow_checks = Some(true);
|
||||
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
|
||||
|
||||
opts = reference.clone();
|
||||
opts.cg.no_prepopulate_passes = true;
|
||||
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
|
||||
|
@ -372,6 +372,11 @@ impl Session {
|
||||
pub fn nonzeroing_move_hints(&self) -> bool {
|
||||
self.opts.debugging_opts.enable_nonzeroing_move_hints
|
||||
}
|
||||
pub fn overflow_checks(&self) -> bool {
|
||||
self.opts.cg.overflow_checks
|
||||
.or(self.opts.debugging_opts.force_overflow_checks)
|
||||
.unwrap_or(self.opts.debug_assertions)
|
||||
}
|
||||
|
||||
pub fn must_not_eliminate_frame_pointers(&self) -> bool {
|
||||
self.opts.debuginfo != DebugInfoLevel::NoDebugInfo ||
|
||||
|
@ -33,6 +33,9 @@ pub enum Adjust<'tcx> {
|
||||
/// Go from a safe fn pointer to an unsafe fn pointer.
|
||||
UnsafeFnPointer,
|
||||
|
||||
// Go from a non-capturing closure to an fn pointer.
|
||||
ClosureFnPointer,
|
||||
|
||||
/// Go from a mut raw pointer to a const raw pointer.
|
||||
MutToConstPointer,
|
||||
|
||||
@ -120,6 +123,7 @@ impl<'tcx> Adjustment<'tcx> {
|
||||
|
||||
Adjust::ReifyFnPointer |
|
||||
Adjust::UnsafeFnPointer |
|
||||
Adjust::ClosureFnPointer |
|
||||
Adjust::MutToConstPointer |
|
||||
Adjust::DerefRef {..} => false,
|
||||
}
|
||||
|
@ -200,6 +200,7 @@ supported_targets! {
|
||||
("armv7s-apple-ios", armv7s_apple_ios),
|
||||
|
||||
("x86_64-sun-solaris", x86_64_sun_solaris),
|
||||
("sparcv9-sun-solaris", sparcv9_sun_solaris),
|
||||
|
||||
("x86_64-pc-windows-gnu", x86_64_pc_windows_gnu),
|
||||
("i686-pc-windows-gnu", i686_pc_windows_gnu),
|
||||
|
35
src/librustc_back/target/sparcv9_sun_solaris.rs
Normal file
35
src/librustc_back/target/sparcv9_sun_solaris.rs
Normal file
@ -0,0 +1,35 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use target::{Target, TargetResult};
|
||||
|
||||
pub fn target() -> TargetResult {
|
||||
let mut base = super::solaris_base::opts();
|
||||
base.pre_link_args.push("-m64".to_string());
|
||||
// llvm calls this "v9"
|
||||
base.cpu = "v9".to_string();
|
||||
base.max_atomic_width = Some(64);
|
||||
|
||||
Ok(Target {
|
||||
llvm_target: "sparcv9-sun-solaris".to_string(),
|
||||
target_endian: "big".to_string(),
|
||||
target_pointer_width: "64".to_string(),
|
||||
data_layout: "E-m:e-i64:64-n32:64-S128".to_string(),
|
||||
// Use "sparc64" instead of "sparcv9" here, since the former is already
|
||||
// used widely in the source base. If we ever needed ABI
|
||||
// differentiation from the sparc64, we could, but that would probably
|
||||
// just be confusing.
|
||||
arch: "sparc64".to_string(),
|
||||
target_os: "solaris".to_string(),
|
||||
target_env: "".to_string(),
|
||||
target_vendor: "sun".to_string(),
|
||||
options: base,
|
||||
})
|
||||
}
|
@ -196,6 +196,28 @@ impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
|
||||
}
|
||||
}).clone()
|
||||
}
|
||||
|
||||
fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
|
||||
if self.tcx.sess.features.borrow().never_type {
|
||||
ty.is_uninhabited_from(self.module, self.tcx)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn is_variant_uninhabited(&self,
|
||||
variant: &'tcx ty::VariantDef,
|
||||
substs: &'tcx ty::subst::Substs<'tcx>) -> bool
|
||||
{
|
||||
if self.tcx.sess.features.borrow().never_type {
|
||||
let forest = variant.uninhabited_from(
|
||||
&mut FxHashMap::default(), self.tcx, substs, AdtKind::Enum
|
||||
);
|
||||
forest.contains(self.tcx, self.module)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
@ -379,48 +401,32 @@ impl<'tcx> Witness<'tcx> {
|
||||
fn all_constructors<'a, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
||||
pcx: PatternContext<'tcx>) -> Vec<Constructor>
|
||||
{
|
||||
let check_inhabited = cx.tcx.sess.features.borrow().never_type;
|
||||
debug!("all_constructors({:?})", pcx.ty);
|
||||
match pcx.ty.sty {
|
||||
ty::TyBool =>
|
||||
[true, false].iter().map(|b| ConstantValue(ConstVal::Bool(*b))).collect(),
|
||||
ty::TySlice(ref sub_ty) => {
|
||||
if sub_ty.is_uninhabited_from(cx.module, cx.tcx)
|
||||
&& check_inhabited
|
||||
{
|
||||
if cx.is_uninhabited(sub_ty) {
|
||||
vec![Slice(0)]
|
||||
} else {
|
||||
(0..pcx.max_slice_length+1).map(|length| Slice(length)).collect()
|
||||
}
|
||||
}
|
||||
ty::TyArray(ref sub_ty, length) => {
|
||||
if length == 0 || !(sub_ty.is_uninhabited_from(cx.module, cx.tcx)
|
||||
&& check_inhabited)
|
||||
{
|
||||
vec![Slice(length)]
|
||||
} else {
|
||||
if length > 0 && cx.is_uninhabited(sub_ty) {
|
||||
vec![]
|
||||
} else {
|
||||
vec![Slice(length)]
|
||||
}
|
||||
}
|
||||
ty::TyAdt(def, substs) if def.is_enum() && def.variants.len() != 1 => {
|
||||
def.variants.iter().filter_map(|v| {
|
||||
let mut visited = FxHashMap::default();
|
||||
let forest = v.uninhabited_from(&mut visited,
|
||||
cx.tcx, substs,
|
||||
AdtKind::Enum);
|
||||
if forest.contains(cx.tcx, cx.module)
|
||||
&& check_inhabited
|
||||
{
|
||||
None
|
||||
} else {
|
||||
Some(Variant(v.did))
|
||||
}
|
||||
}).collect()
|
||||
def.variants.iter()
|
||||
.filter(|v| !cx.is_variant_uninhabited(v, substs))
|
||||
.map(|v| Variant(v.did))
|
||||
.collect()
|
||||
}
|
||||
_ => {
|
||||
if pcx.ty.is_uninhabited_from(cx.module, cx.tcx)
|
||||
&& check_inhabited
|
||||
{
|
||||
if cx.is_uninhabited(pcx.ty) {
|
||||
vec![]
|
||||
} else {
|
||||
vec![Single]
|
||||
@ -564,7 +570,6 @@ pub fn is_useful<'p, 'a: 'p, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
||||
|
||||
assert!(rows.iter().all(|r| r.len() == v.len()));
|
||||
|
||||
|
||||
let pcx = PatternContext {
|
||||
ty: rows.iter().map(|r| r[0].ty).find(|ty| !ty.references_error())
|
||||
.unwrap_or(v[0].ty),
|
||||
@ -590,7 +595,6 @@ pub fn is_useful<'p, 'a: 'p, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
||||
let missing_ctors: Vec<Constructor> = all_ctors.iter().filter(|c| {
|
||||
!used_ctors.contains(*c)
|
||||
}).cloned().collect();
|
||||
debug!("missing_ctors = {:?}", missing_ctors);
|
||||
|
||||
// `missing_ctors` is the set of constructors from the same type as the
|
||||
// first column of `matrix` that are matched only by wildcard patterns
|
||||
@ -599,8 +603,23 @@ pub fn is_useful<'p, 'a: 'p, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
|
||||
// Therefore, if there is some pattern that is unmatched by `matrix`,
|
||||
// it will still be unmatched if the first constructor is replaced by
|
||||
// any of the constructors in `missing_ctors`
|
||||
//
|
||||
// However, if our scrutinee is *privately* an empty enum, we
|
||||
// must treat it as though it had an "unknown" constructor (in
|
||||
// that case, all other patterns obviously can't be variants)
|
||||
// to avoid exposing its emptyness. See the `match_privately_empty`
|
||||
// test for details.
|
||||
//
|
||||
// FIXME: currently the only way I know of something can
|
||||
// be a privately-empty enum is when the never_type
|
||||
// feature flag is not present, so this is only
|
||||
// needed for that case.
|
||||
|
||||
if missing_ctors.is_empty() {
|
||||
let is_privately_empty =
|
||||
all_ctors.is_empty() && !cx.is_uninhabited(pcx.ty);
|
||||
debug!("missing_ctors={:?} is_privately_empty={:?}", missing_ctors,
|
||||
is_privately_empty);
|
||||
if missing_ctors.is_empty() && !is_privately_empty {
|
||||
all_ctors.into_iter().map(|c| {
|
||||
is_useful_specialized(cx, matrix, v, c.clone(), pcx.ty, witness)
|
||||
}).find(|result| result.is_useful()).unwrap_or(NotUseful)
|
||||
@ -649,6 +668,7 @@ fn is_useful_specialized<'p, 'a:'p, 'tcx: 'a>(
|
||||
lty: Ty<'tcx>,
|
||||
witness: WitnessPreference) -> Usefulness<'tcx>
|
||||
{
|
||||
debug!("is_useful_specialized({:?}, {:?}, {:?})", v, ctor, lty);
|
||||
let sub_pat_tys = constructor_sub_pattern_tys(cx, &ctor, lty);
|
||||
let wild_patterns_owned: Vec<_> = sub_pat_tys.iter().map(|ty| {
|
||||
Pattern {
|
||||
@ -754,7 +774,19 @@ fn constructor_sub_pattern_tys<'a, 'tcx: 'a>(cx: &MatchCheckCtxt<'a, 'tcx>,
|
||||
ty::TyRef(_, ref ty_and_mut) => vec![ty_and_mut.ty],
|
||||
ty::TyAdt(adt, substs) => {
|
||||
adt.variants[ctor.variant_index_for_adt(adt)].fields.iter().map(|field| {
|
||||
field.ty(cx.tcx, substs)
|
||||
let is_visible = adt.is_enum()
|
||||
|| field.vis.is_accessible_from(cx.module, cx.tcx);
|
||||
if is_visible {
|
||||
field.ty(cx.tcx, substs)
|
||||
} else {
|
||||
// Treat all non-visible fields as nil. They
|
||||
// can't appear in any other pattern from
|
||||
// this match (because they are private),
|
||||
// so their type does not matter - but
|
||||
// we don't want to know they are
|
||||
// uninhabited.
|
||||
cx.tcx.mk_nil()
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
_ => vec![],
|
||||
|
@ -177,6 +177,31 @@ impl<'a, 'tcx> MatchVisitor<'a, 'tcx> {
|
||||
// Fourth, check for unreachable arms.
|
||||
check_arms(cx, &inlined_arms, source);
|
||||
|
||||
// Then, if the match has no arms, check whether the scrutinee
|
||||
// is uninhabited.
|
||||
let pat_ty = self.tables.node_id_to_type(scrut.id);
|
||||
let module = self.tcx.hir.local_def_id(self.tcx.hir.get_module_parent(scrut.id));
|
||||
if inlined_arms.is_empty() {
|
||||
let scrutinee_is_uninhabited = if self.tcx.sess.features.borrow().never_type {
|
||||
pat_ty.is_uninhabited_from(module, self.tcx)
|
||||
} else {
|
||||
self.conservative_is_uninhabited(pat_ty)
|
||||
};
|
||||
if !scrutinee_is_uninhabited {
|
||||
// We know the type is inhabited, so this must be wrong
|
||||
let mut err = create_e0004(self.tcx.sess, scrut.span,
|
||||
format!("non-exhaustive patterns: type {} \
|
||||
is non-empty",
|
||||
pat_ty));
|
||||
span_help!(&mut err, scrut.span,
|
||||
"Please ensure that all possible cases are being handled; \
|
||||
possibly adding wildcards or more match arms.");
|
||||
err.emit();
|
||||
}
|
||||
// If the type *is* uninhabited, it's vacuously exhaustive
|
||||
return;
|
||||
}
|
||||
|
||||
let matrix: Matrix = inlined_arms
|
||||
.iter()
|
||||
.filter(|&&(_, guard)| guard.is_none())
|
||||
@ -188,6 +213,15 @@ impl<'a, 'tcx> MatchVisitor<'a, 'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
fn conservative_is_uninhabited(&self, scrutinee_ty: Ty<'tcx>) -> bool {
|
||||
// "rustc-1.0-style" uncontentious uninhabitableness check
|
||||
match scrutinee_ty.sty {
|
||||
ty::TyNever => true,
|
||||
ty::TyAdt(def, _) => def.variants.is_empty(),
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn check_irrefutable(&self, pat: &Pat, is_fn_arg: bool) {
|
||||
let origin = if is_fn_arg {
|
||||
"function argument"
|
||||
|
@ -63,8 +63,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
||||
hash_spans: bool,
|
||||
hash_bodies: bool)
|
||||
-> Self {
|
||||
let check_overflow = tcx.sess.opts.debugging_opts.force_overflow_checks
|
||||
.unwrap_or(tcx.sess.opts.debug_assertions);
|
||||
let check_overflow = tcx.sess.overflow_checks();
|
||||
|
||||
StrictVersionHashVisitor {
|
||||
st: st,
|
||||
@ -338,8 +337,10 @@ fn saw_expr<'a>(node: &'a Expr_,
|
||||
ExprIndex(..) => (SawExprIndex, true),
|
||||
ExprPath(_) => (SawExprPath, false),
|
||||
ExprAddrOf(m, _) => (SawExprAddrOf(m), false),
|
||||
ExprBreak(label, _) => (SawExprBreak(label.map(|l| l.name.as_str())), false),
|
||||
ExprAgain(label) => (SawExprAgain(label.map(|l| l.name.as_str())), false),
|
||||
ExprBreak(label, _) => (SawExprBreak(label.ident.map(|i|
|
||||
i.node.name.as_str())), false),
|
||||
ExprAgain(label) => (SawExprAgain(label.ident.map(|i|
|
||||
i.node.name.as_str())), false),
|
||||
ExprRet(..) => (SawExprRet, false),
|
||||
ExprInlineAsm(ref a,..) => (SawExprInlineAsm(StableInlineAsm(a)), false),
|
||||
ExprStruct(..) => (SawExprStruct, false),
|
||||
|
@ -23,6 +23,11 @@ use super::directory::DefPathIndex;
|
||||
pub struct SerializedDepGraph {
|
||||
pub edges: Vec<SerializedEdgeSet>,
|
||||
|
||||
/// These are output nodes that have no incoming edges. We track
|
||||
/// these separately so that when we reload all edges, we don't
|
||||
/// lose track of these nodes.
|
||||
pub bootstrap_outputs: Vec<DepNode<DefPathIndex>>,
|
||||
|
||||
/// These are hashes of two things:
|
||||
/// - the HIR nodes in this crate
|
||||
/// - the metadata nodes from dependent crates we use
|
||||
|
@ -184,6 +184,18 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
}
|
||||
}
|
||||
|
||||
// Recreate bootstrap outputs, which are outputs that have no incoming edges (and hence cannot
|
||||
// be dirty).
|
||||
for bootstrap_output in &serialized_dep_graph.bootstrap_outputs {
|
||||
if let Some(n) = retraced.map(bootstrap_output) {
|
||||
if let DepNode::WorkProduct(ref wp) = n {
|
||||
clean_work_products.insert(wp.clone());
|
||||
}
|
||||
|
||||
tcx.dep_graph.with_task(n, || ()); // create the node with no inputs
|
||||
}
|
||||
}
|
||||
|
||||
// Subtle. Sometimes we have intermediate nodes that we can't recreate in the new graph.
|
||||
// This is pretty unusual but it arises in a scenario like this:
|
||||
//
|
||||
|
@ -11,7 +11,7 @@
|
||||
use rustc::dep_graph::{DepGraphQuery, DepNode};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::graph::Graph;
|
||||
use rustc_data_structures::graph::{Graph, NodeIndex};
|
||||
|
||||
use super::hash::*;
|
||||
use ich::Fingerprint;
|
||||
@ -28,6 +28,14 @@ pub struct Predecessors<'query> {
|
||||
// of the graph down.
|
||||
pub reduced_graph: Graph<&'query DepNode<DefId>, ()>,
|
||||
|
||||
// These are output nodes that have no incoming edges. We have to
|
||||
// track these specially because, when we load the data back up
|
||||
// again, we want to make sure and recreate these nodes (we want
|
||||
// to recreate the nodes where all incoming edges are clean; but
|
||||
// since we ordinarily just serialize edges, we wind up just
|
||||
// forgetting that bootstrap outputs even exist in that case.)
|
||||
pub bootstrap_outputs: Vec<&'query DepNode<DefId>>,
|
||||
|
||||
// For the inputs (hir/foreign-metadata), we include hashes.
|
||||
pub hashes: FxHashMap<&'query DepNode<DefId>, Fingerprint>,
|
||||
}
|
||||
@ -57,7 +65,7 @@ impl<'q> Predecessors<'q> {
|
||||
|
||||
// Reduce the graph to the most important nodes.
|
||||
let compress::Reduction { graph, input_nodes } =
|
||||
compress::reduce_graph(&query.graph, HashContext::is_hashable, is_output);
|
||||
compress::reduce_graph(&query.graph, HashContext::is_hashable, |n| is_output(n));
|
||||
|
||||
let mut hashes = FxHashMap();
|
||||
for input_index in input_nodes {
|
||||
@ -67,8 +75,17 @@ impl<'q> Predecessors<'q> {
|
||||
.or_insert_with(|| hcx.hash(input).unwrap());
|
||||
}
|
||||
|
||||
let bootstrap_outputs: Vec<&'q DepNode<DefId>> =
|
||||
(0 .. graph.len_nodes())
|
||||
.map(NodeIndex)
|
||||
.filter(|&n| graph.incoming_edges(n).next().is_none())
|
||||
.map(|n| *graph.node_data(n))
|
||||
.filter(|n| is_output(n))
|
||||
.collect();
|
||||
|
||||
Predecessors {
|
||||
reduced_graph: graph,
|
||||
bootstrap_outputs: bootstrap_outputs,
|
||||
hashes: hashes,
|
||||
}
|
||||
}
|
||||
|
@ -204,11 +204,15 @@ pub fn encode_dep_graph(preds: &Predecessors,
|
||||
}
|
||||
|
||||
// Create the serialized dep-graph.
|
||||
let bootstrap_outputs = preds.bootstrap_outputs.iter()
|
||||
.map(|n| builder.map(n))
|
||||
.collect();
|
||||
let edges = edges.into_iter()
|
||||
.map(|(k, v)| SerializedEdgeSet { source: k, targets: v })
|
||||
.collect();
|
||||
let graph = SerializedDepGraph {
|
||||
edges: edges,
|
||||
bootstrap_outputs,
|
||||
edges,
|
||||
hashes: preds.hashes
|
||||
.iter()
|
||||
.map(|(&dep_node, &hash)| {
|
||||
@ -221,6 +225,7 @@ pub fn encode_dep_graph(preds: &Predecessors,
|
||||
};
|
||||
|
||||
if tcx.sess.opts.debugging_opts.incremental_info {
|
||||
println!("incremental: {} nodes in reduced dep-graph", preds.reduced_graph.len_nodes());
|
||||
println!("incremental: {} edges in serialized dep-graph", graph.edges.len());
|
||||
println!("incremental: {} hashes in serialized dep-graph", graph.hashes.len());
|
||||
}
|
||||
|
@ -1077,10 +1077,20 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> {
|
||||
let mut found = false;
|
||||
for lib in self.cstore.get_used_libraries().borrow_mut().iter_mut() {
|
||||
if lib.name == name as &str {
|
||||
lib.kind = kind;
|
||||
let mut changed = false;
|
||||
if let Some(k) = kind {
|
||||
lib.kind = k;
|
||||
changed = true;
|
||||
}
|
||||
if let &Some(ref new_name) = new_name {
|
||||
lib.name = Symbol::intern(new_name);
|
||||
changed = true;
|
||||
}
|
||||
if !changed {
|
||||
self.sess.warn(&format!("redundant linker flag specified for library `{}`",
|
||||
name));
|
||||
}
|
||||
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
@ -1089,7 +1099,7 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> {
|
||||
let new_name = new_name.as_ref().map(|s| &**s); // &Option<String> -> Option<&str>
|
||||
let lib = NativeLibrary {
|
||||
name: Symbol::intern(new_name.unwrap_or(name)),
|
||||
kind: kind,
|
||||
kind: if let Some(k) = kind { k } else { cstore::NativeUnknown },
|
||||
cfg: None,
|
||||
foreign_items: Vec::new(),
|
||||
};
|
||||
|
@ -39,6 +39,7 @@ use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque};
|
||||
use syntax::attr;
|
||||
use syntax::ast;
|
||||
use syntax::codemap;
|
||||
use syntax::ext::base::MacroKind;
|
||||
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP};
|
||||
|
||||
pub struct DecodeContext<'a, 'tcx: 'a> {
|
||||
@ -434,7 +435,7 @@ impl<'tcx> EntryKind<'tcx> {
|
||||
EntryKind::Variant(_) => Def::Variant(did),
|
||||
EntryKind::Trait(_) => Def::Trait(did),
|
||||
EntryKind::Enum(..) => Def::Enum(did),
|
||||
EntryKind::MacroDef(_) => Def::Macro(did),
|
||||
EntryKind::MacroDef(_) => Def::Macro(did, MacroKind::Bang),
|
||||
|
||||
EntryKind::ForeignMod |
|
||||
EntryKind::Impl(_) |
|
||||
@ -483,9 +484,11 @@ impl<'a, 'tcx> CrateMetadata {
|
||||
}
|
||||
|
||||
pub fn get_def(&self, index: DefIndex) -> Option<Def> {
|
||||
match self.is_proc_macro(index) {
|
||||
true => Some(Def::Macro(self.local_def_id(index))),
|
||||
false => self.entry(index).kind.to_def(self.local_def_id(index)),
|
||||
if !self.is_proc_macro(index) {
|
||||
self.entry(index).kind.to_def(self.local_def_id(index))
|
||||
} else {
|
||||
let kind = self.proc_macros.as_ref().unwrap()[index.as_usize() - 1].1.kind();
|
||||
Some(Def::Macro(self.local_def_id(index), kind))
|
||||
}
|
||||
}
|
||||
|
||||
@ -688,8 +691,14 @@ impl<'a, 'tcx> CrateMetadata {
|
||||
{
|
||||
if let Some(ref proc_macros) = self.proc_macros {
|
||||
if id == CRATE_DEF_INDEX {
|
||||
for (id, &(name, _)) in proc_macros.iter().enumerate() {
|
||||
let def = Def::Macro(DefId { krate: self.cnum, index: DefIndex::new(id + 1) });
|
||||
for (id, &(name, ref ext)) in proc_macros.iter().enumerate() {
|
||||
let def = Def::Macro(
|
||||
DefId {
|
||||
krate: self.cnum,
|
||||
index: DefIndex::new(id + 1)
|
||||
},
|
||||
ext.kind()
|
||||
);
|
||||
callback(def::Export { name: name, def: def });
|
||||
}
|
||||
}
|
||||
|
@ -99,6 +99,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
ExprKind::Use { .. } |
|
||||
ExprKind::NeverToAny { .. } |
|
||||
ExprKind::ReifyFnPointer { .. } |
|
||||
ExprKind::ClosureFnPointer { .. } |
|
||||
ExprKind::UnsafeFnPointer { .. } |
|
||||
ExprKind::Unsize { .. } |
|
||||
ExprKind::Repeat { .. } |
|
||||
|
@ -112,6 +112,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
let source = unpack!(block = this.as_operand(block, source));
|
||||
block.and(Rvalue::Cast(CastKind::UnsafeFnPointer, source, expr.ty))
|
||||
}
|
||||
ExprKind::ClosureFnPointer { source } => {
|
||||
let source = unpack!(block = this.as_operand(block, source));
|
||||
block.and(Rvalue::Cast(CastKind::ClosureFnPointer, source, expr.ty))
|
||||
}
|
||||
ExprKind::Unsize { source } => {
|
||||
let source = unpack!(block = this.as_operand(block, source));
|
||||
block.and(Rvalue::Cast(CastKind::Unsize, source, expr.ty))
|
||||
|
@ -70,6 +70,7 @@ impl Category {
|
||||
ExprKind::Cast { .. } |
|
||||
ExprKind::Use { .. } |
|
||||
ExprKind::ReifyFnPointer { .. } |
|
||||
ExprKind::ClosureFnPointer { .. } |
|
||||
ExprKind::UnsafeFnPointer { .. } |
|
||||
ExprKind::Unsize { .. } |
|
||||
ExprKind::Repeat { .. } |
|
||||
|
@ -244,6 +244,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
ExprKind::Cast { .. } |
|
||||
ExprKind::Use { .. } |
|
||||
ExprKind::ReifyFnPointer { .. } |
|
||||
ExprKind::ClosureFnPointer { .. } |
|
||||
ExprKind::UnsafeFnPointer { .. } |
|
||||
ExprKind::Unsize { .. } |
|
||||
ExprKind::Repeat { .. } |
|
||||
|
@ -385,22 +385,14 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
/// resolving `break` and `continue`.
|
||||
pub fn find_loop_scope(&mut self,
|
||||
span: Span,
|
||||
label: Option<CodeExtent>)
|
||||
label: CodeExtent)
|
||||
-> &mut LoopScope<'tcx> {
|
||||
let loop_scopes = &mut self.loop_scopes;
|
||||
match label {
|
||||
None => {
|
||||
// no label? return the innermost loop scope
|
||||
loop_scopes.iter_mut().rev().next()
|
||||
}
|
||||
Some(label) => {
|
||||
// otherwise, find the loop-scope with the correct id
|
||||
loop_scopes.iter_mut()
|
||||
.rev()
|
||||
.filter(|loop_scope| loop_scope.extent == label)
|
||||
.next()
|
||||
}
|
||||
}.unwrap_or_else(|| span_bug!(span, "no enclosing loop scope found?"))
|
||||
// find the loop-scope with the correct id
|
||||
self.loop_scopes.iter_mut()
|
||||
.rev()
|
||||
.filter(|loop_scope| loop_scope.extent == label)
|
||||
.next()
|
||||
.unwrap_or_else(|| span_bug!(span, "no enclosing loop scope found?"))
|
||||
}
|
||||
|
||||
/// Given a span and the current visibility scope, make a SourceInfo.
|
||||
|
@ -60,6 +60,15 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
kind: ExprKind::UnsafeFnPointer { source: expr.to_ref() },
|
||||
};
|
||||
}
|
||||
Some((ty::adjustment::Adjust::ClosureFnPointer, adjusted_ty)) => {
|
||||
expr = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
temp_lifetime_was_shrunk: was_shrunk,
|
||||
ty: adjusted_ty,
|
||||
span: self.span,
|
||||
kind: ExprKind::ClosureFnPointer { source: expr.to_ref() },
|
||||
};
|
||||
}
|
||||
Some((ty::adjustment::Adjust::NeverToAny, adjusted_ty)) => {
|
||||
expr = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
@ -605,14 +614,21 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
hir::ExprRet(ref v) => ExprKind::Return { value: v.to_ref() },
|
||||
hir::ExprBreak(label, ref value) => {
|
||||
ExprKind::Break {
|
||||
label: label.map(|label| cx.tcx.region_maps.node_extent(label.loop_id)),
|
||||
value: value.to_ref(),
|
||||
match label.loop_id.into() {
|
||||
Ok(loop_id) => ExprKind::Break {
|
||||
label: cx.tcx.region_maps.node_extent(loop_id),
|
||||
value: value.to_ref(),
|
||||
},
|
||||
Err(err) => bug!("invalid loop id for break: {}", err)
|
||||
}
|
||||
|
||||
}
|
||||
hir::ExprAgain(label) => {
|
||||
ExprKind::Continue {
|
||||
label: label.map(|label| cx.tcx.region_maps.node_extent(label.loop_id)),
|
||||
match label.loop_id.into() {
|
||||
Ok(loop_id) => ExprKind::Continue {
|
||||
label: cx.tcx.region_maps.node_extent(loop_id),
|
||||
},
|
||||
Err(err) => bug!("invalid loop id for continue: {}", err)
|
||||
}
|
||||
}
|
||||
hir::ExprMatch(ref discr, ref arms, _) => {
|
||||
|
@ -59,13 +59,8 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
|
||||
let mut check_overflow = attrs.iter()
|
||||
.any(|item| item.check_name("rustc_inherit_overflow_checks"));
|
||||
|
||||
// Respect -Z force-overflow-checks=on and -C debug-assertions.
|
||||
check_overflow |= infcx.tcx
|
||||
.sess
|
||||
.opts
|
||||
.debugging_opts
|
||||
.force_overflow_checks
|
||||
.unwrap_or(infcx.tcx.sess.opts.debug_assertions);
|
||||
// Respect -C overflow-checks.
|
||||
check_overflow |= infcx.tcx.sess.overflow_checks();
|
||||
|
||||
// Constants and const fn's always need overflow checks.
|
||||
check_overflow |= constness == hir::Constness::Const;
|
||||
|
@ -152,6 +152,9 @@ pub enum ExprKind<'tcx> {
|
||||
ReifyFnPointer {
|
||||
source: ExprRef<'tcx>,
|
||||
},
|
||||
ClosureFnPointer {
|
||||
source: ExprRef<'tcx>,
|
||||
},
|
||||
UnsafeFnPointer {
|
||||
source: ExprRef<'tcx>,
|
||||
},
|
||||
@ -205,11 +208,11 @@ pub enum ExprKind<'tcx> {
|
||||
arg: ExprRef<'tcx>,
|
||||
},
|
||||
Break {
|
||||
label: Option<CodeExtent>,
|
||||
label: CodeExtent,
|
||||
value: Option<ExprRef<'tcx>>,
|
||||
},
|
||||
Continue {
|
||||
label: Option<CodeExtent>,
|
||||
label: CodeExtent,
|
||||
},
|
||||
Return {
|
||||
value: Option<ExprRef<'tcx>>,
|
||||
|
@ -619,6 +619,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
|
||||
Rvalue::CheckedBinaryOp(..) |
|
||||
Rvalue::Cast(CastKind::ReifyFnPointer, ..) |
|
||||
Rvalue::Cast(CastKind::UnsafeFnPointer, ..) |
|
||||
Rvalue::Cast(CastKind::ClosureFnPointer, ..) |
|
||||
Rvalue::Cast(CastKind::Unsize, ..) => {}
|
||||
|
||||
Rvalue::Len(_) => {
|
||||
|
@ -447,6 +447,7 @@ fn check_adjustments<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Exp
|
||||
Some(Adjust::NeverToAny) |
|
||||
Some(Adjust::ReifyFnPointer) |
|
||||
Some(Adjust::UnsafeFnPointer) |
|
||||
Some(Adjust::ClosureFnPointer) |
|
||||
Some(Adjust::MutToConstPointer) => {}
|
||||
|
||||
Some(Adjust::DerefRef { autoderefs, .. }) => {
|
||||
|
@ -241,6 +241,22 @@ match 5u32 {
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0590: r##"
|
||||
`break` or `continue` must include a label when used in the condition of a
|
||||
`while` loop.
|
||||
|
||||
Example of erroneous code:
|
||||
|
||||
```compile_fail
|
||||
while break {}
|
||||
```
|
||||
|
||||
To fix this, add a label specifying which loop is being broken out of:
|
||||
```
|
||||
`foo: while break `foo {}
|
||||
```
|
||||
"##
|
||||
}
|
||||
|
||||
register_diagnostics! {
|
||||
|
@ -87,23 +87,26 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> {
|
||||
self.with_context(Closure, |v| v.visit_nested_body(b));
|
||||
}
|
||||
hir::ExprBreak(label, ref opt_expr) => {
|
||||
let loop_id = match label.loop_id.into() {
|
||||
Ok(loop_id) => loop_id,
|
||||
Err(hir::LoopIdError::OutsideLoopScope) => ast::DUMMY_NODE_ID,
|
||||
Err(hir::LoopIdError::UnlabeledCfInWhileCondition) => {
|
||||
self.emit_unlabled_cf_in_while_condition(e.span, "break");
|
||||
ast::DUMMY_NODE_ID
|
||||
},
|
||||
Err(hir::LoopIdError::UnresolvedLabel) => ast::DUMMY_NODE_ID,
|
||||
};
|
||||
|
||||
if opt_expr.is_some() {
|
||||
let loop_kind = if let Some(label) = label {
|
||||
if label.loop_id == ast::DUMMY_NODE_ID {
|
||||
None
|
||||
} else {
|
||||
Some(match self.hir_map.expect_expr(label.loop_id).node {
|
||||
hir::ExprWhile(..) => LoopKind::WhileLoop,
|
||||
hir::ExprLoop(_, _, source) => LoopKind::Loop(source),
|
||||
ref r => span_bug!(e.span,
|
||||
"break label resolved to a non-loop: {:?}", r),
|
||||
})
|
||||
}
|
||||
} else if let Loop(kind) = self.cx {
|
||||
Some(kind)
|
||||
} else {
|
||||
// `break` outside a loop - caught below
|
||||
let loop_kind = if loop_id == ast::DUMMY_NODE_ID {
|
||||
None
|
||||
} else {
|
||||
Some(match self.hir_map.expect_expr(loop_id).node {
|
||||
hir::ExprWhile(..) => LoopKind::WhileLoop,
|
||||
hir::ExprLoop(_, _, source) => LoopKind::Loop(source),
|
||||
ref r => span_bug!(e.span,
|
||||
"break label resolved to a non-loop: {:?}", r),
|
||||
})
|
||||
};
|
||||
match loop_kind {
|
||||
None | Some(LoopKind::Loop(hir::LoopSource::Loop)) => (),
|
||||
@ -117,9 +120,15 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.require_loop("break", e.span);
|
||||
}
|
||||
hir::ExprAgain(_) => self.require_loop("continue", e.span),
|
||||
hir::ExprAgain(label) => {
|
||||
if let Err(hir::LoopIdError::UnlabeledCfInWhileCondition) = label.loop_id.into() {
|
||||
self.emit_unlabled_cf_in_while_condition(e.span, "continue");
|
||||
}
|
||||
self.require_loop("continue", e.span)
|
||||
},
|
||||
_ => intravisit::walk_expr(self, e),
|
||||
}
|
||||
}
|
||||
@ -150,4 +159,12 @@ impl<'a, 'hir> CheckLoopVisitor<'a, 'hir> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn emit_unlabled_cf_in_while_condition(&mut self, span: Span, cf_type: &str) {
|
||||
struct_span_err!(self.sess, span, E0590,
|
||||
"`break` or `continue` with no label in the condition of a `while` loop")
|
||||
.span_label(span,
|
||||
&format!("unlabeled `{}` in the condition of a `while` loop", cf_type))
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ use rustc::hir::def::{Def, CtorKind};
|
||||
use rustc::util::nodemap::{NodeMap, NodeSet};
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::feature_gate::{GateIssue, emit_feature_err};
|
||||
use syntax_pos::Span;
|
||||
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
|
||||
use rustc::hir;
|
||||
@ -43,7 +42,7 @@ impl<'a, 'hir: 'a> Visitor<'hir> for CheckCrateVisitor<'a, 'hir> {
|
||||
match it.node {
|
||||
hir::ItemStatic(..) |
|
||||
hir::ItemConst(..) => {
|
||||
let mut recursion_visitor = CheckItemRecursionVisitor::new(self, &it.span);
|
||||
let mut recursion_visitor = CheckItemRecursionVisitor::new(self);
|
||||
recursion_visitor.visit_item(it);
|
||||
}
|
||||
hir::ItemEnum(ref enum_def, ref generics) => {
|
||||
@ -52,8 +51,7 @@ impl<'a, 'hir: 'a> Visitor<'hir> for CheckCrateVisitor<'a, 'hir> {
|
||||
// less redundant output.
|
||||
for variant in &enum_def.variants {
|
||||
if let Some(_) = variant.node.disr_expr {
|
||||
let mut recursion_visitor = CheckItemRecursionVisitor::new(self,
|
||||
&variant.span);
|
||||
let mut recursion_visitor = CheckItemRecursionVisitor::new(self);
|
||||
recursion_visitor.populate_enum_discriminants(enum_def);
|
||||
recursion_visitor.visit_variant(variant, generics, it.id);
|
||||
}
|
||||
@ -68,7 +66,7 @@ impl<'a, 'hir: 'a> Visitor<'hir> for CheckCrateVisitor<'a, 'hir> {
|
||||
match ti.node {
|
||||
hir::TraitItemKind::Const(_, ref default) => {
|
||||
if let Some(_) = *default {
|
||||
let mut recursion_visitor = CheckItemRecursionVisitor::new(self, &ti.span);
|
||||
let mut recursion_visitor = CheckItemRecursionVisitor::new(self);
|
||||
recursion_visitor.visit_trait_item(ti);
|
||||
}
|
||||
}
|
||||
@ -80,7 +78,7 @@ impl<'a, 'hir: 'a> Visitor<'hir> for CheckCrateVisitor<'a, 'hir> {
|
||||
fn visit_impl_item(&mut self, ii: &'hir hir::ImplItem) {
|
||||
match ii.node {
|
||||
hir::ImplItemKind::Const(..) => {
|
||||
let mut recursion_visitor = CheckItemRecursionVisitor::new(self, &ii.span);
|
||||
let mut recursion_visitor = CheckItemRecursionVisitor::new(self);
|
||||
recursion_visitor.visit_impl_item(ii);
|
||||
}
|
||||
_ => {}
|
||||
@ -105,7 +103,6 @@ pub fn check_crate<'hir>(sess: &Session, hir_map: &hir_map::Map<'hir>) -> Compil
|
||||
}
|
||||
|
||||
struct CheckItemRecursionVisitor<'a, 'b: 'a, 'hir: 'b> {
|
||||
root_span: &'b Span,
|
||||
sess: &'b Session,
|
||||
hir_map: &'b hir_map::Map<'hir>,
|
||||
discriminant_map: &'a mut NodeMap<Option<hir::BodyId>>,
|
||||
@ -114,9 +111,8 @@ struct CheckItemRecursionVisitor<'a, 'b: 'a, 'hir: 'b> {
|
||||
}
|
||||
|
||||
impl<'a, 'b: 'a, 'hir: 'b> CheckItemRecursionVisitor<'a, 'b, 'hir> {
|
||||
fn new(v: &'a mut CheckCrateVisitor<'b, 'hir>, span: &'b Span) -> Self {
|
||||
fn new(v: &'a mut CheckCrateVisitor<'b, 'hir>) -> Self {
|
||||
CheckItemRecursionVisitor {
|
||||
root_span: span,
|
||||
sess: v.sess,
|
||||
hir_map: v.hir_map,
|
||||
discriminant_map: &mut v.discriminant_map,
|
||||
@ -143,15 +139,7 @@ impl<'a, 'b: 'a, 'hir: 'b> CheckItemRecursionVisitor<'a, 'b, 'hir> {
|
||||
false
|
||||
}
|
||||
});
|
||||
if any_static {
|
||||
if !self.sess.features.borrow().static_recursion {
|
||||
emit_feature_err(&self.sess.parse_sess,
|
||||
"static_recursion",
|
||||
*self.root_span,
|
||||
GateIssue::Language,
|
||||
"recursive static");
|
||||
}
|
||||
} else {
|
||||
if !any_static {
|
||||
struct_span_err!(self.sess, span, E0265, "recursive constant")
|
||||
.span_label(span, &format!("recursion not allowed in constant"))
|
||||
.emit();
|
||||
|
@ -495,7 +495,7 @@ impl<'a> Resolver<'a> {
|
||||
|
||||
pub fn get_macro(&mut self, def: Def) -> Rc<SyntaxExtension> {
|
||||
let def_id = match def {
|
||||
Def::Macro(def_id) => def_id,
|
||||
Def::Macro(def_id, ..) => def_id,
|
||||
_ => panic!("Expected Def::Macro(..)"),
|
||||
};
|
||||
if let Some(ext) = self.macro_map.get(&def_id) {
|
||||
@ -537,7 +537,6 @@ impl<'a> Resolver<'a> {
|
||||
binding: &'a NameBinding<'a>,
|
||||
span: Span,
|
||||
allow_shadowing: bool) {
|
||||
self.macro_names.insert(name);
|
||||
if self.builtin_macros.insert(name, binding).is_some() && !allow_shadowing {
|
||||
let msg = format!("`{}` is already in scope", name);
|
||||
let note =
|
||||
|
@ -1265,7 +1265,7 @@ impl<'a> Resolver<'a> {
|
||||
ribs: PerNS {
|
||||
value_ns: vec![Rib::new(ModuleRibKind(graph_root))],
|
||||
type_ns: vec![Rib::new(ModuleRibKind(graph_root))],
|
||||
macro_ns: None,
|
||||
macro_ns: Some(vec![Rib::new(ModuleRibKind(graph_root))]),
|
||||
},
|
||||
label_ribs: Vec::new(),
|
||||
|
||||
@ -2328,10 +2328,13 @@ impl<'a> Resolver<'a> {
|
||||
};
|
||||
}
|
||||
}
|
||||
if primary_ns != MacroNS && path.len() == 1 &&
|
||||
self.macro_names.contains(&path[0].name) {
|
||||
let is_builtin = self.builtin_macros.get(&path[0].name).cloned()
|
||||
.map(|binding| binding.get_macro(self).kind() == MacroKind::Bang).unwrap_or(false);
|
||||
if primary_ns != MacroNS && (is_builtin || self.macro_names.contains(&path[0].name)) {
|
||||
// Return some dummy definition, it's enough for error reporting.
|
||||
return Some(PathResolution::new(Def::Macro(DefId::local(CRATE_DEF_INDEX))));
|
||||
return Some(
|
||||
PathResolution::new(Def::Macro(DefId::local(CRATE_DEF_INDEX), MacroKind::Bang))
|
||||
);
|
||||
}
|
||||
fin_res
|
||||
}
|
||||
@ -2768,18 +2771,24 @@ impl<'a> Resolver<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_labeled_block(&mut self, label: Option<SpannedIdent>, id: NodeId, block: &Block) {
|
||||
fn with_resolved_label<F>(&mut self, label: Option<SpannedIdent>, id: NodeId, f: F)
|
||||
where F: FnOnce(&mut Resolver)
|
||||
{
|
||||
if let Some(label) = label {
|
||||
let def = Def::Label(id);
|
||||
self.with_label_rib(|this| {
|
||||
this.label_ribs.last_mut().unwrap().bindings.insert(label.node, def);
|
||||
this.visit_block(block);
|
||||
f(this);
|
||||
});
|
||||
} else {
|
||||
self.visit_block(block);
|
||||
f(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_labeled_block(&mut self, label: Option<SpannedIdent>, id: NodeId, block: &Block) {
|
||||
self.with_resolved_label(label, id, |this| this.visit_block(block));
|
||||
}
|
||||
|
||||
fn resolve_expr(&mut self, expr: &Expr, parent: Option<&ExprKind>) {
|
||||
// First, record candidate traits for this expression if it could
|
||||
// result in the invocation of a method call.
|
||||
@ -2833,18 +2842,18 @@ impl<'a> Resolver<'a> {
|
||||
ExprKind::Loop(ref block, label) => self.resolve_labeled_block(label, expr.id, &block),
|
||||
|
||||
ExprKind::While(ref subexpression, ref block, label) => {
|
||||
self.visit_expr(subexpression);
|
||||
self.resolve_labeled_block(label, expr.id, &block);
|
||||
self.with_resolved_label(label, expr.id, |this| {
|
||||
this.visit_expr(subexpression);
|
||||
this.visit_block(block);
|
||||
});
|
||||
}
|
||||
|
||||
ExprKind::WhileLet(ref pattern, ref subexpression, ref block, label) => {
|
||||
self.visit_expr(subexpression);
|
||||
self.ribs[ValueNS].push(Rib::new(NormalRibKind));
|
||||
self.resolve_pattern(pattern, PatternSource::WhileLet, &mut FxHashMap());
|
||||
|
||||
self.resolve_labeled_block(label, expr.id, block);
|
||||
|
||||
self.ribs[ValueNS].pop();
|
||||
self.with_resolved_label(label, expr.id, |this| {
|
||||
this.visit_expr(subexpression);
|
||||
this.resolve_pattern(pattern, PatternSource::WhileLet, &mut FxHashMap());
|
||||
this.visit_block(block);
|
||||
});
|
||||
}
|
||||
|
||||
ExprKind::ForLoop(ref pattern, ref subexpression, ref block, label) => {
|
||||
|
@ -23,7 +23,7 @@ use syntax::ast::{self, Name, Ident};
|
||||
use syntax::attr;
|
||||
use syntax::errors::DiagnosticBuilder;
|
||||
use syntax::ext::base::{self, Determinacy, MultiModifier, MultiDecorator};
|
||||
use syntax::ext::base::{NormalTT, Resolver as SyntaxResolver, SyntaxExtension};
|
||||
use syntax::ext::base::{Resolver as SyntaxResolver, SyntaxExtension};
|
||||
use syntax::ext::base::MacroKind;
|
||||
use syntax::ext::expand::{Expansion, mark_tts};
|
||||
use syntax::ext::hygiene::Mark;
|
||||
@ -152,16 +152,14 @@ impl<'a> base::Resolver for Resolver<'a> {
|
||||
}
|
||||
|
||||
fn add_ext(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
|
||||
if let NormalTT(..) = *ext {
|
||||
self.macro_names.insert(ident.name);
|
||||
}
|
||||
let def_id = DefId {
|
||||
krate: BUILTIN_MACROS_CRATE,
|
||||
index: DefIndex::new(self.macro_map.len()),
|
||||
};
|
||||
let kind = ext.kind();
|
||||
self.macro_map.insert(def_id, ext);
|
||||
let binding = self.arenas.alloc_name_binding(NameBinding {
|
||||
kind: NameBindingKind::Def(Def::Macro(def_id)),
|
||||
kind: NameBindingKind::Def(Def::Macro(def_id, kind)),
|
||||
span: DUMMY_SP,
|
||||
vis: ty::Visibility::Invisible,
|
||||
expansion: Mark::root(),
|
||||
@ -470,24 +468,40 @@ impl<'a> Resolver<'a> {
|
||||
|
||||
fn suggest_macro_name(&mut self, name: &str, kind: MacroKind,
|
||||
err: &mut DiagnosticBuilder<'a>) {
|
||||
let suggestion = match kind {
|
||||
MacroKind::Bang =>
|
||||
find_best_match_for_name(self.macro_names.iter(), name, None),
|
||||
MacroKind::Attr |
|
||||
MacroKind::Derive => {
|
||||
// Find a suggestion from the legacy namespace.
|
||||
// FIXME: get_macro needs an &mut Resolver, can we do it without cloning?
|
||||
let builtin_macros = self.builtin_macros.clone();
|
||||
let names = builtin_macros.iter().filter_map(|(name, binding)| {
|
||||
if binding.get_macro(self).kind() == kind {
|
||||
Some(name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
find_best_match_for_name(names, name, None)
|
||||
// First check if this is a locally-defined bang macro.
|
||||
let suggestion = if let MacroKind::Bang = kind {
|
||||
find_best_match_for_name(self.macro_names.iter(), name, None)
|
||||
} else {
|
||||
None
|
||||
// Then check builtin macros.
|
||||
}.or_else(|| {
|
||||
// FIXME: get_macro needs an &mut Resolver, can we do it without cloning?
|
||||
let builtin_macros = self.builtin_macros.clone();
|
||||
let names = builtin_macros.iter().filter_map(|(name, binding)| {
|
||||
if binding.get_macro(self).kind() == kind {
|
||||
Some(name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
find_best_match_for_name(names, name, None)
|
||||
// Then check modules.
|
||||
}).or_else(|| {
|
||||
if !self.use_extern_macros {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
let is_macro = |def| {
|
||||
if let Def::Macro(_, def_kind) = def {
|
||||
def_kind == kind
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
let ident = Ident::from_str(name);
|
||||
self.lookup_typo_candidate(&vec![ident], MacroNS, is_macro)
|
||||
.as_ref().map(|s| Symbol::intern(s))
|
||||
});
|
||||
|
||||
if let Some(suggestion) = suggestion {
|
||||
if suggestion != name {
|
||||
if let MacroKind::Bang = kind {
|
||||
@ -566,7 +580,7 @@ impl<'a> Resolver<'a> {
|
||||
});
|
||||
self.macro_exports.push(Export {
|
||||
name: def.ident.name,
|
||||
def: Def::Macro(self.definitions.local_def_id(def.id)),
|
||||
def: Def::Macro(self.definitions.local_def_id(def.id), MacroKind::Bang),
|
||||
});
|
||||
self.exported_macros.push(def);
|
||||
}
|
||||
|
@ -336,7 +336,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> {
|
||||
Def::AssociatedTy(..) |
|
||||
Def::AssociatedConst(..) |
|
||||
Def::PrimTy(_) |
|
||||
Def::Macro(_) |
|
||||
Def::Macro(..) |
|
||||
Def::Err => {
|
||||
span_bug!(span,
|
||||
"process_def_kind for unexpected item: {:?}",
|
||||
|
@ -506,7 +506,11 @@ impl FnType {
|
||||
if let Some(inner) = rust_ptr_attrs(ty, &mut data) {
|
||||
data.attrs.set(ArgAttribute::NonNull);
|
||||
if ccx.tcx().struct_tail(inner).is_trait() {
|
||||
// vtables can be safely marked non-null, readonly
|
||||
// and noalias.
|
||||
info.attrs.set(ArgAttribute::NonNull);
|
||||
info.attrs.set(ArgAttribute::ReadOnly);
|
||||
info.attrs.set(ArgAttribute::NoAlias);
|
||||
}
|
||||
}
|
||||
args.push(data);
|
||||
|
@ -472,8 +472,15 @@ pub fn load_fat_ptr<'a, 'tcx>(
|
||||
b.load(ptr, alignment.to_align())
|
||||
};
|
||||
|
||||
// FIXME: emit metadata on `meta`.
|
||||
let meta = b.load(get_meta(b, src), alignment.to_align());
|
||||
let meta = get_meta(b, src);
|
||||
let meta_ty = val_ty(meta);
|
||||
// If the 'meta' field is a pointer, it's a vtable, so use load_nonnull
|
||||
// instead
|
||||
let meta = if meta_ty.element_type().kind() == llvm::TypeKind::Pointer {
|
||||
b.load_nonnull(meta, None)
|
||||
} else {
|
||||
b.load(meta, None)
|
||||
};
|
||||
|
||||
(ptr, meta)
|
||||
}
|
||||
@ -1132,11 +1139,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let ty::CrateAnalysis { export_map, reachable, name, .. } = analysis;
|
||||
let exported_symbols = find_exported_symbols(tcx, reachable);
|
||||
|
||||
let check_overflow = if let Some(v) = tcx.sess.opts.debugging_opts.force_overflow_checks {
|
||||
v
|
||||
} else {
|
||||
tcx.sess.opts.debug_assertions
|
||||
};
|
||||
let check_overflow = tcx.sess.overflow_checks();
|
||||
|
||||
let link_meta = link::build_link_meta(incremental_hashes_map, &name);
|
||||
|
||||
|
@ -1149,6 +1149,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_invariant_load(&self, load: ValueRef) {
|
||||
unsafe {
|
||||
llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
|
||||
llvm::LLVMMDNodeInContext(self.ccx.llcx(), ptr::null(), 0));
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the ptr value that should be used for storing `val`.
|
||||
fn check_store<'b>(&self,
|
||||
val: ValueRef,
|
||||
@ -1181,7 +1188,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
}
|
||||
|
||||
assert!(fn_ty.kind() == llvm::TypeKind::Function,
|
||||
"builder::{} not passed a function", typ);
|
||||
"builder::{} not passed a function, but {:?}", typ, fn_ty);
|
||||
|
||||
let param_tys = fn_ty.func_params();
|
||||
|
||||
|
@ -489,6 +489,20 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
self.output);
|
||||
}
|
||||
}
|
||||
mir::Rvalue::Cast(mir::CastKind::ClosureFnPointer, ref operand, _) => {
|
||||
let source_ty = operand.ty(self.mir, self.scx.tcx());
|
||||
match source_ty.sty {
|
||||
ty::TyClosure(def_id, substs) => {
|
||||
let closure_trans_item =
|
||||
create_fn_trans_item(self.scx,
|
||||
def_id,
|
||||
substs.substs,
|
||||
self.param_substs);
|
||||
self.output.push(closure_trans_item);
|
||||
}
|
||||
_ => bug!(),
|
||||
}
|
||||
}
|
||||
mir::Rvalue::Box(..) => {
|
||||
let exchange_malloc_fn_def_id =
|
||||
self.scx
|
||||
@ -615,19 +629,13 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
def_id: DefId)
|
||||
-> bool {
|
||||
match tcx.item_type(def_id).sty {
|
||||
ty::TyFnDef(def_id, _, f) => {
|
||||
ty::TyFnDef(def_id, _, _) => {
|
||||
// Some constructors also have type TyFnDef but they are
|
||||
// always instantiated inline and don't result in a
|
||||
// translation item. Same for FFI functions.
|
||||
if let Some(hir_map::NodeForeignItem(_)) = tcx.hir.get_if_local(def_id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Some(adt_def) = f.sig.output().skip_binder().ty_adt_def() {
|
||||
if adt_def.variants.iter().any(|v| def_id == v.did) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
ty::TyClosure(..) => {}
|
||||
_ => return false
|
||||
@ -689,6 +697,16 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
fn should_trans_locally<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_id: DefId)
|
||||
-> bool {
|
||||
if let ty::TyFnDef(_, _, f) = tcx.item_type(def_id).sty {
|
||||
if let Some(adt_def) = f.sig.output().skip_binder().ty_adt_def() {
|
||||
if adt_def.variants.iter().any(|v| def_id == v.did) {
|
||||
// HACK: ADT constructors are translated in-place and
|
||||
// do not have a trans-item.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if def_id.is_local() {
|
||||
true
|
||||
} else {
|
||||
|
@ -386,7 +386,15 @@ pub fn size_and_align_of_dst<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, inf
|
||||
let info = bcx.pointercast(info, Type::int(bcx.ccx).ptr_to());
|
||||
let size_ptr = bcx.gepi(info, &[1]);
|
||||
let align_ptr = bcx.gepi(info, &[2]);
|
||||
(bcx.load(size_ptr, None), bcx.load(align_ptr, None))
|
||||
|
||||
let size = bcx.load(size_ptr, None);
|
||||
let align = bcx.load(align_ptr, None);
|
||||
|
||||
// Vtable loads are invariant
|
||||
bcx.set_invariant_load(size);
|
||||
bcx.set_invariant_load(align);
|
||||
|
||||
(size, align)
|
||||
}
|
||||
ty::TySlice(_) | ty::TyStr => {
|
||||
let unit_ty = t.sequence_element_type(bcx.tcx());
|
||||
|
@ -30,13 +30,15 @@ const VTABLE_OFFSET: usize = 3;
|
||||
/// Extracts a method from a trait object's vtable, at the specified index.
|
||||
pub fn get_virtual_method<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
|
||||
llvtable: ValueRef,
|
||||
vtable_index: usize)
|
||||
-> ValueRef {
|
||||
vtable_index: usize) -> ValueRef {
|
||||
// Load the data pointer from the object.
|
||||
debug!("get_virtual_method(vtable_index={}, llvtable={:?})",
|
||||
vtable_index, Value(llvtable));
|
||||
|
||||
bcx.load(bcx.gepi(llvtable, &[vtable_index + VTABLE_OFFSET]), None)
|
||||
let ptr = bcx.load_nonnull(bcx.gepi(llvtable, &[vtable_index + VTABLE_OFFSET]), None);
|
||||
// Vtable loads are invariant
|
||||
bcx.set_invariant_load(ptr);
|
||||
ptr
|
||||
}
|
||||
|
||||
/// Generate a shim function that allows an object type like `SomeTrait` to
|
||||
|
@ -20,7 +20,7 @@ use rustc::mir;
|
||||
use rustc::mir::tcx::LvalueTy;
|
||||
use rustc::ty::{self, layout, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc::ty::cast::{CastTy, IntTy};
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::ty::subst::{Kind, Substs};
|
||||
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
|
||||
use {abi, adt, base, Disr, machine};
|
||||
use callee::Callee;
|
||||
@ -578,6 +578,27 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
mir::CastKind::ClosureFnPointer => {
|
||||
match operand.ty.sty {
|
||||
ty::TyClosure(def_id, substs) => {
|
||||
// Get the def_id for FnOnce::call_once
|
||||
let fn_once = tcx.lang_items.fn_once_trait().unwrap();
|
||||
let call_once = tcx
|
||||
.global_tcx().associated_items(fn_once)
|
||||
.find(|it| it.kind == ty::AssociatedKind::Method)
|
||||
.unwrap().def_id;
|
||||
// Now create its substs [Closure, Tuple]
|
||||
let input = tcx.closure_type(def_id, substs).sig.input(0);
|
||||
let substs = tcx.mk_substs([operand.ty, input.skip_binder()]
|
||||
.iter().cloned().map(Kind::from));
|
||||
Callee::def(self.ccx, call_once, substs)
|
||||
.reify(self.ccx)
|
||||
}
|
||||
_ => {
|
||||
bug!("{} cannot be cast to a fn ptr", operand.ty)
|
||||
}
|
||||
}
|
||||
}
|
||||
mir::CastKind::UnsafeFnPointer => {
|
||||
// this is a no-op at the LLVM level
|
||||
operand.llval
|
||||
|
@ -12,6 +12,7 @@ use llvm::{self, ValueRef};
|
||||
use rustc::ty::{self, Ty};
|
||||
use rustc::ty::cast::{CastTy, IntTy};
|
||||
use rustc::ty::layout::Layout;
|
||||
use rustc::ty::subst::Kind;
|
||||
use rustc::mir::tcx::LvalueTy;
|
||||
use rustc::mir;
|
||||
use middle::lang_items::ExchangeMallocFnLangItem;
|
||||
@ -190,6 +191,28 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
mir::CastKind::ClosureFnPointer => {
|
||||
match operand.ty.sty {
|
||||
ty::TyClosure(def_id, substs) => {
|
||||
// Get the def_id for FnOnce::call_once
|
||||
let fn_once = bcx.tcx().lang_items.fn_once_trait().unwrap();
|
||||
let call_once = bcx.tcx()
|
||||
.global_tcx().associated_items(fn_once)
|
||||
.find(|it| it.kind == ty::AssociatedKind::Method)
|
||||
.unwrap().def_id;
|
||||
// Now create its substs [Closure, Tuple]
|
||||
let input = bcx.tcx().closure_type(def_id, substs).sig.input(0);
|
||||
let substs = bcx.tcx().mk_substs([operand.ty, input.skip_binder()]
|
||||
.iter().cloned().map(Kind::from));
|
||||
OperandValue::Immediate(
|
||||
Callee::def(bcx.ccx, call_once, substs)
|
||||
.reify(bcx.ccx))
|
||||
}
|
||||
_ => {
|
||||
bug!("{} cannot be cast to a fn ptr", operand.ty)
|
||||
}
|
||||
}
|
||||
}
|
||||
mir::CastKind::UnsafeFnPointer => {
|
||||
// this is a no-op at the LLVM level
|
||||
operand.val
|
||||
|
@ -63,13 +63,17 @@
|
||||
use check::FnCtxt;
|
||||
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::infer::{Coercion, InferOk, TypeTrace};
|
||||
use rustc::traits::{self, ObligationCause, ObligationCauseCode};
|
||||
use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
|
||||
use rustc::ty::{self, LvaluePreference, TypeAndMut, Ty};
|
||||
use rustc::ty::{self, LvaluePreference, TypeAndMut,
|
||||
Ty, ClosureSubsts};
|
||||
use rustc::ty::fold::TypeFoldable;
|
||||
use rustc::ty::error::TypeError;
|
||||
use rustc::ty::relate::RelateResult;
|
||||
use syntax::abi;
|
||||
use syntax::feature_gate;
|
||||
use util::common::indent;
|
||||
|
||||
use std::cell::RefCell;
|
||||
@ -196,6 +200,11 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
// unsafe qualifier.
|
||||
self.coerce_from_fn_pointer(a, a_f, b)
|
||||
}
|
||||
ty::TyClosure(def_id_a, substs_a) => {
|
||||
// Non-capturing closures are coercible to
|
||||
// function pointers
|
||||
self.coerce_closure_to_fn(a, def_id_a, substs_a, b)
|
||||
}
|
||||
_ => {
|
||||
// Otherwise, just use unification rules.
|
||||
self.unify_and_identity(a, b)
|
||||
@ -551,6 +560,60 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn coerce_closure_to_fn(&self,
|
||||
a: Ty<'tcx>,
|
||||
def_id_a: DefId,
|
||||
substs_a: ClosureSubsts<'tcx>,
|
||||
b: Ty<'tcx>)
|
||||
-> CoerceResult<'tcx> {
|
||||
//! Attempts to coerce from the type of a non-capturing closure
|
||||
//! into a function pointer.
|
||||
//!
|
||||
|
||||
let b = self.shallow_resolve(b);
|
||||
|
||||
let node_id_a = self.tcx.hir.as_local_node_id(def_id_a).unwrap();
|
||||
match b.sty {
|
||||
ty::TyFnPtr(_) if self.tcx.with_freevars(node_id_a, |v| v.is_empty()) => {
|
||||
if !self.tcx.sess.features.borrow().closure_to_fn_coercion {
|
||||
feature_gate::emit_feature_err(&self.tcx.sess.parse_sess,
|
||||
"closure_to_fn_coercion",
|
||||
self.cause.span,
|
||||
feature_gate::GateIssue::Language,
|
||||
feature_gate::CLOSURE_TO_FN_COERCION);
|
||||
return self.unify_and_identity(a, b);
|
||||
}
|
||||
// We coerce the closure, which has fn type
|
||||
// `extern "rust-call" fn((arg0,arg1,...)) -> _`
|
||||
// to
|
||||
// `fn(arg0,arg1,...) -> _`
|
||||
let sig = self.closure_type(def_id_a, substs_a).sig;
|
||||
let converted_sig = sig.map_bound(|s| {
|
||||
let params_iter = match s.inputs()[0].sty {
|
||||
ty::TyTuple(params, _) => {
|
||||
params.into_iter().cloned()
|
||||
}
|
||||
_ => bug!(),
|
||||
};
|
||||
self.tcx.mk_fn_sig(params_iter,
|
||||
s.output(),
|
||||
s.variadic)
|
||||
});
|
||||
let fn_ty = self.tcx.mk_bare_fn(ty::BareFnTy {
|
||||
unsafety: hir::Unsafety::Normal,
|
||||
abi: abi::Abi::Rust,
|
||||
sig: converted_sig,
|
||||
});
|
||||
let pointer_ty = self.tcx.mk_fn_ptr(&fn_ty);
|
||||
debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})",
|
||||
a, b, pointer_ty);
|
||||
self.unify_and_identity(pointer_ty, b)
|
||||
.map(|(ty, _)| (ty, Adjust::ClosureFnPointer))
|
||||
}
|
||||
_ => self.unify_and_identity(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
fn coerce_unsafe_ptr(&self,
|
||||
a: Ty<'tcx>,
|
||||
b: Ty<'tcx>,
|
||||
|
@ -425,15 +425,12 @@ pub struct EnclosingLoops<'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'gcx, 'tcx> EnclosingLoops<'gcx, 'tcx> {
|
||||
fn find_loop(&mut self, id: Option<ast::NodeId>) -> Option<&mut LoopCtxt<'gcx, 'tcx>> {
|
||||
if let Some(id) = id {
|
||||
if let Some(ix) = self.by_id.get(&id).cloned() {
|
||||
Some(&mut self.stack[ix])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
fn find_loop(&mut self, id: hir::LoopIdResult) -> Option<&mut LoopCtxt<'gcx, 'tcx>> {
|
||||
let id_res: Result<_,_> = id.into();
|
||||
if let Some(ix) = id_res.ok().and_then(|id| self.by_id.get(&id).cloned()) {
|
||||
Some(&mut self.stack[ix])
|
||||
} else {
|
||||
self.stack.last_mut()
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3596,10 +3593,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
tcx.mk_nil()
|
||||
}
|
||||
hir::ExprBreak(label, ref expr_opt) => {
|
||||
let loop_id = label.map(|l| l.loop_id);
|
||||
let coerce_to = {
|
||||
let mut enclosing_loops = self.enclosing_loops.borrow_mut();
|
||||
enclosing_loops.find_loop(loop_id).map(|ctxt| ctxt.coerce_to)
|
||||
enclosing_loops.find_loop(label.loop_id).map(|ctxt| ctxt.coerce_to)
|
||||
};
|
||||
if let Some(coerce_to) = coerce_to {
|
||||
let e_ty;
|
||||
@ -3614,8 +3610,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
e_ty = tcx.mk_nil();
|
||||
cause = self.misc(expr.span);
|
||||
}
|
||||
|
||||
let mut enclosing_loops = self.enclosing_loops.borrow_mut();
|
||||
let ctxt = enclosing_loops.find_loop(loop_id).unwrap();
|
||||
let ctxt = enclosing_loops.find_loop(label.loop_id).unwrap();
|
||||
|
||||
let result = if let Some(ref e) = *expr_opt {
|
||||
// Special-case the first element, as it has no "previous expressions".
|
||||
|
@ -412,6 +412,10 @@ impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
|
||||
adjustment::Adjust::MutToConstPointer
|
||||
}
|
||||
|
||||
adjustment::Adjust::ClosureFnPointer => {
|
||||
adjustment::Adjust::ClosureFnPointer
|
||||
}
|
||||
|
||||
adjustment::Adjust::UnsafeFnPointer => {
|
||||
adjustment::Adjust::UnsafeFnPointer
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ use std::env;
|
||||
use std::ffi::OsString;
|
||||
use std::io::prelude::*;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::panic::{self, AssertUnwindSafe};
|
||||
use std::process::Command;
|
||||
use std::rc::Rc;
|
||||
@ -485,7 +485,15 @@ impl Collector {
|
||||
|
||||
pub fn get_filename(&self) -> String {
|
||||
if let Some(ref codemap) = self.codemap {
|
||||
codemap.span_to_filename(self.position)
|
||||
let filename = codemap.span_to_filename(self.position);
|
||||
if let Ok(cur_dir) = env::current_dir() {
|
||||
if let Ok(path) = Path::new(&filename).strip_prefix(&cur_dir) {
|
||||
if let Some(path) = path.to_str() {
|
||||
return path.to_owned();
|
||||
}
|
||||
}
|
||||
}
|
||||
filename
|
||||
} else if let Some(ref filename) = self.filename {
|
||||
filename.clone()
|
||||
} else {
|
||||
|
@ -199,7 +199,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
||||
self.inside_public_path = orig_inside_public_path;
|
||||
if let Some(exports) = self.cx.export_map.get(&id) {
|
||||
for export in exports {
|
||||
if let Def::Macro(def_id) = export.def {
|
||||
if let Def::Macro(def_id, ..) = export.def {
|
||||
if def_id.krate == LOCAL_CRATE {
|
||||
continue // These are `krate.exported_macros`, handled in `self.visit()`.
|
||||
}
|
||||
|
@ -59,6 +59,10 @@ fn main() {
|
||||
println!("cargo:rustc-link-lib=userenv");
|
||||
println!("cargo:rustc-link-lib=shell32");
|
||||
} else if target.contains("fuchsia") {
|
||||
// use system-provided libbacktrace
|
||||
if cfg!(feature = "backtrace") {
|
||||
println!("cargo:rustc-link-lib=backtrace");
|
||||
}
|
||||
println!("cargo:rustc-link-lib=magenta");
|
||||
println!("cargo:rustc-link-lib=mxio");
|
||||
println!("cargo:rustc-link-lib=launchpad"); // for std::process
|
||||
|
@ -182,12 +182,12 @@ impl DefaultResizePolicy {
|
||||
// ----------------------
|
||||
// To protect against degenerate performance scenarios (including DOS attacks),
|
||||
// the implementation includes an adaptive behavior that can resize the map
|
||||
// early (before its capacity is exceeded) when suspiciously long probe or
|
||||
// forward shifts sequences are encountered.
|
||||
// early (before its capacity is exceeded) when suspiciously long probe sequences
|
||||
// are encountered.
|
||||
//
|
||||
// With this algorithm in place it would be possible to turn a CPU attack into
|
||||
// a memory attack due to the aggressive resizing. To prevent that the
|
||||
// adaptive behavior only triggers when the map occupancy is half the maximum occupancy.
|
||||
// adaptive behavior only triggers when the map is at least half full.
|
||||
// This reduces the effectiveness of the algorithm but also makes it completely safe.
|
||||
//
|
||||
// The previous safety measure also prevents degenerate interactions with
|
||||
@ -195,16 +195,11 @@ impl DefaultResizePolicy {
|
||||
// DOS attack.
|
||||
//
|
||||
const DISPLACEMENT_THRESHOLD: usize = 128;
|
||||
const FORWARD_SHIFT_THRESHOLD: usize = 512;
|
||||
//
|
||||
// The thresholds of 128 and 512 are chosen to minimize the chance of exceeding them.
|
||||
// The threshold of 128 is chosen to minimize the chance of exceeding it.
|
||||
// In particular, we want that chance to be less than 10^-8 with a load of 90%.
|
||||
// For displacement, the smallest constant that fits our needs is 90,
|
||||
// so we round that up to 128. For the number of forward-shifted buckets,
|
||||
// we choose k=512. Keep in mind that the run length is a sum of the displacement and
|
||||
// the number of forward-shifted buckets, so its threshold is 128+512=640.
|
||||
// Even though the probability of having a run length of more than 640 buckets may be
|
||||
// higher than the probability we want, it should be low enough.
|
||||
// so we round that up to 128.
|
||||
//
|
||||
// At a load factor of α, the odds of finding the target bucket after exactly n
|
||||
// unsuccesful probes[1] are
|
||||
@ -212,16 +207,12 @@ const FORWARD_SHIFT_THRESHOLD: usize = 512;
|
||||
// Pr_α{displacement = n} =
|
||||
// (1 - α) / α * ∑_{k≥1} e^(-kα) * (kα)^(k+n) / (k + n)! * (1 - kα / (k + n + 1))
|
||||
//
|
||||
// We use this formula to find the probability of loading half of triggering the adaptive behavior
|
||||
// We use this formula to find the probability of triggering the adaptive behavior
|
||||
//
|
||||
// Pr_0.909{displacement > 128} = 1.601 * 10^-11
|
||||
//
|
||||
// FIXME: Extend with math for shift threshold in [2]
|
||||
//
|
||||
// 1. Alfredo Viola (2005). Distributional analysis of Robin Hood linear probing
|
||||
// hashing with buckets.
|
||||
// 2. http://www.cs.tau.ac.il/~zwick/Adv-Alg-2015/Linear-Probing.pdf
|
||||
|
||||
|
||||
/// A hash map implementation which uses linear probing with Robin Hood bucket
|
||||
/// stealing.
|
||||
@ -494,7 +485,7 @@ fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>,
|
||||
mut hash: SafeHash,
|
||||
mut key: K,
|
||||
mut val: V)
|
||||
-> (usize, &'a mut V) {
|
||||
-> &'a mut V {
|
||||
let start_index = bucket.index();
|
||||
let size = bucket.table().size();
|
||||
// Save the *starting point*.
|
||||
@ -519,7 +510,6 @@ fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>,
|
||||
Empty(bucket) => {
|
||||
// Found a hole!
|
||||
let bucket = bucket.put(hash, key, val);
|
||||
let end_index = bucket.index();
|
||||
// Now that it's stolen, just read the value's pointer
|
||||
// right out of the table! Go back to the *starting point*.
|
||||
//
|
||||
@ -527,7 +517,7 @@ fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>,
|
||||
// bucket, which is a FullBucket on top of a
|
||||
// FullBucketMut, into just one FullBucketMut. The "table"
|
||||
// refers to the inner FullBucketMut in this context.
|
||||
return (end_index - start_index, bucket.into_table().into_mut_refs().1);
|
||||
return bucket.into_table().into_mut_refs().1;
|
||||
}
|
||||
Full(bucket) => bucket,
|
||||
};
|
||||
@ -2128,18 +2118,16 @@ impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> {
|
||||
pub fn insert(self, value: V) -> &'a mut V {
|
||||
match self.elem {
|
||||
NeqElem(bucket, disp) => {
|
||||
let (shift, v_ref) = robin_hood(bucket, disp, self.hash, self.key, value);
|
||||
if disp >= DISPLACEMENT_THRESHOLD || shift >= FORWARD_SHIFT_THRESHOLD {
|
||||
if disp >= DISPLACEMENT_THRESHOLD {
|
||||
*self.long_probes = true;
|
||||
}
|
||||
v_ref
|
||||
robin_hood(bucket, disp, self.hash, self.key, value)
|
||||
},
|
||||
NoElem(bucket, disp) => {
|
||||
if disp >= DISPLACEMENT_THRESHOLD {
|
||||
*self.long_probes = true;
|
||||
}
|
||||
let bucket = bucket.put(self.hash, self.key, value);
|
||||
bucket.into_mut_refs().1
|
||||
bucket.put(self.hash, self.key, value).into_mut_refs().1
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -198,7 +198,7 @@ impl<T: RefUnwindSafe + ?Sized> UnwindSafe for *const T {}
|
||||
#[stable(feature = "catch_unwind", since = "1.9.0")]
|
||||
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for *mut T {}
|
||||
#[unstable(feature = "unique", issue = "27730")]
|
||||
impl<T: UnwindSafe> UnwindSafe for Unique<T> {}
|
||||
impl<T: UnwindSafe + ?Sized> UnwindSafe for Unique<T> {}
|
||||
#[unstable(feature = "shared", issue = "27730")]
|
||||
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Shared<T> {}
|
||||
#[stable(feature = "catch_unwind", since = "1.9.0")]
|
||||
|
@ -13,7 +13,7 @@
|
||||
use io::{self, ErrorKind};
|
||||
|
||||
pub mod args;
|
||||
#[cfg(any(not(cargobuild), feature = "backtrace"))]
|
||||
#[cfg(feature = "backtrace")]
|
||||
pub mod backtrace;
|
||||
pub mod condvar;
|
||||
pub mod env;
|
||||
|
@ -63,6 +63,10 @@ impl TcpStream {
|
||||
Ok(path_to_local_addr(path.to_str().unwrap_or("")))
|
||||
}
|
||||
|
||||
pub fn peek(&self, _buf: &mut [u8]) -> Result<usize> {
|
||||
Err(Error::new(ErrorKind::Other, "TcpStream::peek not implemented"))
|
||||
}
|
||||
|
||||
pub fn shutdown(&self, _how: Shutdown) -> Result<()> {
|
||||
Err(Error::new(ErrorKind::Other, "TcpStream::shutdown not implemented"))
|
||||
}
|
||||
|
@ -87,6 +87,14 @@ impl UdpSocket {
|
||||
Ok(path_to_local_addr(path.to_str().unwrap_or("")))
|
||||
}
|
||||
|
||||
pub fn peek(&self, _buf: &mut [u8]) -> Result<usize> {
|
||||
Err(Error::new(ErrorKind::Other, "UdpSocket::peek not implemented"))
|
||||
}
|
||||
|
||||
pub fn peek_from(&self, _buf: &mut [u8]) -> Result<(usize, SocketAddr)> {
|
||||
Err(Error::new(ErrorKind::Other, "UdpSocket::peek_from not implemented"))
|
||||
}
|
||||
|
||||
pub fn broadcast(&self) -> Result<bool> {
|
||||
Err(Error::new(ErrorKind::Other, "UdpSocket::broadcast not implemented"))
|
||||
}
|
||||
|
@ -35,7 +35,8 @@ use libc::{stat as stat64, fstat as fstat64, lstat as lstat64, off_t as off64_t,
|
||||
ftruncate as ftruncate64, lseek as lseek64, dirent as dirent64, open as open64};
|
||||
#[cfg(not(any(target_os = "linux",
|
||||
target_os = "emscripten",
|
||||
target_os = "solaris")))]
|
||||
target_os = "solaris",
|
||||
target_os = "fuchsia")))]
|
||||
use libc::{readdir_r as readdir64_r};
|
||||
|
||||
pub struct File(FileDesc);
|
||||
@ -59,10 +60,10 @@ pub struct DirEntry {
|
||||
entry: dirent64,
|
||||
root: Arc<PathBuf>,
|
||||
// We need to store an owned copy of the directory name
|
||||
// on Solaris because a) it uses a zero-length array to
|
||||
// store the name, b) its lifetime between readdir calls
|
||||
// is not guaranteed.
|
||||
#[cfg(target_os = "solaris")]
|
||||
// on Solaris and Fuchsia because a) it uses a zero-length
|
||||
// array to store the name, b) its lifetime between readdir
|
||||
// calls is not guaranteed.
|
||||
#[cfg(any(target_os = "solaris", target_os = "fuchsia"))]
|
||||
name: Box<[u8]>
|
||||
}
|
||||
|
||||
@ -205,14 +206,14 @@ impl fmt::Debug for ReadDir {
|
||||
impl Iterator for ReadDir {
|
||||
type Item = io::Result<DirEntry>;
|
||||
|
||||
#[cfg(target_os = "solaris")]
|
||||
#[cfg(any(target_os = "solaris", target_os = "fuchsia"))]
|
||||
fn next(&mut self) -> Option<io::Result<DirEntry>> {
|
||||
unsafe {
|
||||
loop {
|
||||
// Although readdir_r(3) would be a correct function to use here because
|
||||
// of the thread safety, on Illumos the readdir(3C) function is safe to use
|
||||
// in threaded applications and it is generally preferred over the
|
||||
// readdir_r(3C) function.
|
||||
// of the thread safety, on Illumos and Fuchsia the readdir(3C) function
|
||||
// is safe to use in threaded applications and it is generally preferred
|
||||
// over the readdir_r(3C) function.
|
||||
super::os::set_errno(0);
|
||||
let entry_ptr = libc::readdir(self.dirp.0);
|
||||
if entry_ptr.is_null() {
|
||||
@ -240,7 +241,7 @@ impl Iterator for ReadDir {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "solaris"))]
|
||||
#[cfg(not(any(target_os = "solaris", target_os = "fuchsia")))]
|
||||
fn next(&mut self) -> Option<io::Result<DirEntry>> {
|
||||
unsafe {
|
||||
let mut ret = DirEntry {
|
||||
@ -344,14 +345,14 @@ impl DirEntry {
|
||||
#[cfg(any(target_os = "android",
|
||||
target_os = "linux",
|
||||
target_os = "emscripten",
|
||||
target_os = "haiku",
|
||||
target_os = "fuchsia"))]
|
||||
target_os = "haiku"))]
|
||||
fn name_bytes(&self) -> &[u8] {
|
||||
unsafe {
|
||||
CStr::from_ptr(self.entry.d_name.as_ptr()).to_bytes()
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "solaris")]
|
||||
#[cfg(any(target_os = "solaris",
|
||||
target_os = "fuchsia"))]
|
||||
fn name_bytes(&self) -> &[u8] {
|
||||
&*self.name
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ pub fn errno() -> i32 {
|
||||
}
|
||||
|
||||
/// Sets the platform-specific value of errno
|
||||
#[cfg(target_os = "solaris")] // only needed for readdir so far
|
||||
#[cfg(any(target_os = "solaris", target_os = "fuchsia"))] // only needed for readdir so far
|
||||
pub fn set_errno(e: i32) {
|
||||
unsafe {
|
||||
*errno_location() = e as c_int
|
||||
|
@ -111,7 +111,7 @@ extern {
|
||||
pub fn mx_handle_duplicate(handle: mx_handle_t, rights: mx_rights_t,
|
||||
out: *const mx_handle_t) -> mx_handle_t;
|
||||
|
||||
pub fn mx_handle_wait_one(handle: mx_handle_t, signals: mx_signals_t, timeout: mx_time_t,
|
||||
pub fn mx_object_wait_one(handle: mx_handle_t, signals: mx_signals_t, timeout: mx_time_t,
|
||||
pending: *mut mx_signals_t) -> mx_status_t;
|
||||
|
||||
pub fn mx_object_get_info(handle: mx_handle_t, topic: u32, buffer: *mut c_void,
|
||||
|
@ -151,7 +151,7 @@ impl Process {
|
||||
let mut avail: mx_size_t = 0;
|
||||
|
||||
unsafe {
|
||||
mx_cvt(mx_handle_wait_one(self.handle.raw(), MX_TASK_TERMINATED,
|
||||
mx_cvt(mx_object_wait_one(self.handle.raw(), MX_TASK_TERMINATED,
|
||||
MX_TIME_INFINITE, ptr::null_mut()))?;
|
||||
mx_cvt(mx_object_get_info(self.handle.raw(), MX_INFO_PROCESS,
|
||||
&mut proc_info as *mut _ as *mut libc::c_void,
|
||||
@ -174,7 +174,7 @@ impl Process {
|
||||
let mut avail: mx_size_t = 0;
|
||||
|
||||
unsafe {
|
||||
let status = mx_handle_wait_one(self.handle.raw(), MX_TASK_TERMINATED,
|
||||
let status = mx_object_wait_one(self.handle.raw(), MX_TASK_TERMINATED,
|
||||
0, ptr::null_mut());
|
||||
match status {
|
||||
0 => { }, // Success
|
||||
|
@ -475,7 +475,7 @@ pub type BuiltinDeriveFn =
|
||||
for<'cx> fn(&'cx mut ExtCtxt, Span, &MetaItem, &Annotatable, &mut FnMut(Annotatable));
|
||||
|
||||
/// Represents different kinds of macro invocations that can be resolved.
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||
pub enum MacroKind {
|
||||
/// A bang macro - foo!()
|
||||
Bang,
|
||||
|
@ -200,9 +200,6 @@ declare_features! (
|
||||
// rustc internal
|
||||
(active, prelude_import, "1.2.0", None),
|
||||
|
||||
// Allows the definition recursive static items.
|
||||
(active, static_recursion, "1.3.0", Some(29719)),
|
||||
|
||||
// Allows default type parameters to influence type inference.
|
||||
(active, default_type_parameter_fallback, "1.3.0", Some(27336)),
|
||||
|
||||
@ -326,6 +323,10 @@ declare_features! (
|
||||
// `extern "msp430-interrupt" fn()`
|
||||
(active, abi_msp430_interrupt, "1.16.0", Some(38487)),
|
||||
|
||||
// Used to identify crates that contain sanitizer runtimes
|
||||
// rustc internal
|
||||
(active, closure_to_fn_coercion, "1.17.0", Some(39817)),
|
||||
|
||||
// Used to identify crates that contain sanitizer runtimes
|
||||
// rustc internal
|
||||
(active, sanitizer_runtime, "1.17.0", None),
|
||||
@ -387,6 +388,8 @@ declare_features! (
|
||||
(accepted, static_in_const, "1.17.0", Some(35897)),
|
||||
// Allows field shorthands (`x` meaning `x: x`) in struct literal expressions.
|
||||
(accepted, field_init_shorthand, "1.17.0", Some(37340)),
|
||||
// Allows the definition recursive static items.
|
||||
(accepted, static_recursion, "1.17.0", Some(29719)),
|
||||
);
|
||||
// If you change this, please modify src/doc/unstable-book as well. You must
|
||||
// move that documentation into the relevant place in the other docs, and
|
||||
@ -982,6 +985,9 @@ pub const EXPLAIN_DERIVE_UNDERSCORE: &'static str =
|
||||
pub const EXPLAIN_PLACEMENT_IN: &'static str =
|
||||
"placement-in expression syntax is experimental and subject to change.";
|
||||
|
||||
pub const CLOSURE_TO_FN_COERCION: &'static str =
|
||||
"non-capturing closure to fn coercion is experimental";
|
||||
|
||||
struct PostExpansionVisitor<'a> {
|
||||
context: &'a Context<'a>,
|
||||
}
|
||||
|
@ -121,13 +121,13 @@ pub fn unsafe_slice(_: &[UnsafeInner]) {
|
||||
fn str(_: &[u8]) {
|
||||
}
|
||||
|
||||
// CHECK: @trait_borrow(i8* nonnull, void (i8*)** nonnull)
|
||||
// CHECK: @trait_borrow(i8* nonnull, void (i8*)** noalias nonnull readonly)
|
||||
// FIXME #25759 This should also have `nocapture`
|
||||
#[no_mangle]
|
||||
fn trait_borrow(_: &Drop) {
|
||||
}
|
||||
|
||||
// CHECK: @trait_box(i8* noalias nonnull, void (i8*)** nonnull)
|
||||
// CHECK: @trait_box(i8* noalias nonnull, void (i8*)** noalias nonnull readonly)
|
||||
#[no_mangle]
|
||||
fn trait_box(_: Box<Drop>) {
|
||||
}
|
||||
|
@ -22,15 +22,24 @@ extern crate attr_proc_macro;
|
||||
|
||||
use attr_proc_macro::attr_proc_macro;
|
||||
|
||||
#[derive(FooWithLongNam)]
|
||||
//~^ ERROR cannot find derive macro `FooWithLongNam` in this scope
|
||||
macro_rules! FooWithLongNam {
|
||||
() => {}
|
||||
}
|
||||
|
||||
#[derive(FooWithLongNan)]
|
||||
//~^ ERROR cannot find derive macro `FooWithLongNan` in this scope
|
||||
//~^^ HELP did you mean `FooWithLongName`?
|
||||
struct Foo;
|
||||
|
||||
#[attr_proc_macra]
|
||||
//~^ ERROR cannot find attribute macro `attr_proc_macra` in this scope
|
||||
//~^^ HELP did you mean `attr_proc_macro`?
|
||||
struct Bar;
|
||||
|
||||
#[FooWithLongNan]
|
||||
//~^ ERROR cannot find attribute macro `FooWithLongNan` in this scope
|
||||
struct Asdf;
|
||||
|
||||
#[derive(Dlone)]
|
||||
//~^ ERROR cannot find derive macro `Dlone` in this scope
|
||||
//~^^ HELP did you mean `Clone`?
|
||||
@ -41,4 +50,18 @@ struct A;
|
||||
//~^^ HELP did you mean `Clona`?
|
||||
struct B;
|
||||
|
||||
fn main() {}
|
||||
#[derive(attr_proc_macra)]
|
||||
//~^ ERROR cannot find derive macro `attr_proc_macra` in this scope
|
||||
struct C;
|
||||
|
||||
fn main() {
|
||||
FooWithLongNama!();
|
||||
//~^ ERROR cannot find macro `FooWithLongNama!` in this scope
|
||||
//~^^ HELP did you mean `FooWithLongNam!`?
|
||||
|
||||
attr_proc_macra!();
|
||||
//~^ ERROR cannot find macro `attr_proc_macra!` in this scope
|
||||
|
||||
Dlona!();
|
||||
//~^ ERROR cannot find macro `Dlona!` in this scope
|
||||
}
|
||||
|
@ -13,6 +13,7 @@
|
||||
// ignore-aarch64
|
||||
// ignore-s390x
|
||||
// ignore-emscripten
|
||||
// ignore-powerpc
|
||||
|
||||
#![feature(asm, rustc_attrs)]
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
// ignore-s390x
|
||||
// ignore-emscripten
|
||||
// ignore-powerpc
|
||||
|
||||
#![feature(asm)]
|
||||
|
||||
|
@ -13,6 +13,7 @@
|
||||
// ignore-aarch64
|
||||
// ignore-s390x
|
||||
// ignore-emscripten
|
||||
// ignore-powerpc
|
||||
|
||||
#![feature(asm, rustc_attrs)]
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
// ignore-s390x
|
||||
// ignore-emscripten
|
||||
// ignore-powerpc
|
||||
|
||||
#![feature(asm)]
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
// ignore-s390x
|
||||
// ignore-emscripten
|
||||
// ignore-powerpc
|
||||
|
||||
#![feature(asm)]
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
// ignore-s390x
|
||||
// ignore-emscripten
|
||||
// ignore-powerpc
|
||||
|
||||
#![feature(asm)]
|
||||
|
||||
|
24
src/test/compile-fail/closure-no-fn.rs
Normal file
24
src/test/compile-fail/closure-no-fn.rs
Normal file
@ -0,0 +1,24 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// Ensure that capturing closures are never coerced to fns
|
||||
// Especially interesting as non-capturing closures can be.
|
||||
|
||||
fn main() {
|
||||
let mut a = 0u8;
|
||||
let foo: fn(u8) -> u8 = |v: u8| { a += v; a };
|
||||
//~^ ERROR mismatched types
|
||||
let b = 0u8;
|
||||
let bar: fn() -> u8 = || { b };
|
||||
//~^ ERROR mismatched types
|
||||
let baz: fn() -> u8 = || { b } as fn() -> u8;
|
||||
//~^ ERROR mismatched types
|
||||
//~^^ ERROR non-scalar cast
|
||||
}
|
@ -23,8 +23,6 @@ fn main() {
|
||||
// Here, F is instantiated with $0=uint
|
||||
let x = foo();
|
||||
//~^ ERROR: mismatched types
|
||||
//~| expected type `usize`
|
||||
//~| found type `isize`
|
||||
//~| NOTE: conflicting type parameter defaults `usize` and `isize`
|
||||
//~| NOTE: conflicting type parameter defaults `usize` and `isize`
|
||||
//~| NOTE: ...that was applied to an unconstrained type variable here
|
||||
|
@ -29,6 +29,4 @@ fn main() {
|
||||
//~| NOTE: conflicting type parameter defaults `bool` and `char`
|
||||
//~| a second default is defined on `default_param_test::bleh`
|
||||
//~| NOTE: ...that was applied to an unconstrained type variable here
|
||||
//~| expected type `bool`
|
||||
//~| found type `char`
|
||||
}
|
||||
|
45
src/test/compile-fail/feature-gate-closure_to_fn_coercion.rs
Normal file
45
src/test/compile-fail/feature-gate-closure_to_fn_coercion.rs
Normal file
@ -0,0 +1,45 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// ignore-stage0: new feature, remove this when SNAP
|
||||
// revisions: a b
|
||||
|
||||
#[cfg(a)]
|
||||
mod a {
|
||||
const FOO: fn(u8) -> u8 = |v: u8| { v };
|
||||
//[a]~^ ERROR non-capturing closure to fn coercion is experimental
|
||||
//[a]~^^ ERROR mismatched types
|
||||
|
||||
const BAR: [fn(&mut u32); 1] = [
|
||||
|v: &mut u32| *v += 1,
|
||||
//[a]~^ ERROR non-capturing closure to fn coercion is experimental
|
||||
//[a]~^^ ERROR mismatched types
|
||||
];
|
||||
}
|
||||
|
||||
#[cfg(b)]
|
||||
mod b {
|
||||
fn func_specific() -> (fn() -> u32) {
|
||||
|| return 42
|
||||
//[b]~^ ERROR non-capturing closure to fn coercion is experimental
|
||||
//[b]~^^ ERROR mismatched types
|
||||
}
|
||||
fn foo() {
|
||||
// Items
|
||||
assert_eq!(func_specific()(), 42);
|
||||
let foo: fn(u8) -> u8 = |v: u8| { v };
|
||||
//[b]~^ ERROR non-capturing closure to fn coercion is experimental
|
||||
//[b]~^^ ERROR mismatched types
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,49 +0,0 @@
|
||||
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
static mut S: *const u8 = unsafe { &S as *const *const u8 as *const u8 };
|
||||
//~^ ERROR recursive static (see issue #29719)
|
||||
|
||||
struct StaticDoubleLinked {
|
||||
prev: &'static StaticDoubleLinked,
|
||||
next: &'static StaticDoubleLinked,
|
||||
data: i32,
|
||||
head: bool,
|
||||
}
|
||||
|
||||
static L1: StaticDoubleLinked = StaticDoubleLinked{prev: &L3, next: &L2, data: 1, head: true};
|
||||
//~^ ERROR recursive static (see issue #29719)
|
||||
//~^^ ERROR recursive static (see issue #29719)
|
||||
//~^^^ ERROR recursive static (see issue #29719)
|
||||
static L2: StaticDoubleLinked = StaticDoubleLinked{prev: &L1, next: &L3, data: 2, head: false};
|
||||
static L3: StaticDoubleLinked = StaticDoubleLinked{prev: &L2, next: &L1, data: 3, head: false};
|
||||
|
||||
|
||||
pub fn main() {
|
||||
unsafe { assert_eq!(S, *(S as *const *const u8)); }
|
||||
|
||||
let mut test_vec = Vec::new();
|
||||
let mut cur = &L1;
|
||||
loop {
|
||||
test_vec.push(cur.data);
|
||||
cur = cur.next;
|
||||
if cur.head { break }
|
||||
}
|
||||
assert_eq!(&test_vec, &[1,2,3]);
|
||||
|
||||
let mut test_vec = Vec::new();
|
||||
let mut cur = &L1;
|
||||
loop {
|
||||
cur = cur.prev;
|
||||
test_vec.push(cur.data);
|
||||
if cur.head { break }
|
||||
}
|
||||
assert_eq!(&test_vec, &[3,2,1]);
|
||||
}
|
@ -8,8 +8,6 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![feature(static_recursion)]
|
||||
|
||||
enum foo { foo_(bar) }
|
||||
struct bar { x: bar }
|
||||
//~^ ERROR E0072
|
||||
|
@ -23,15 +23,19 @@ impl Foo for Bar {
|
||||
fn foo(_: fn(u16) -> ()) {}
|
||||
//~^ ERROR method `foo` has an incompatible type for trait
|
||||
//~| NOTE expected u8
|
||||
//~| NOTE expected type `fn(fn(u8))`
|
||||
fn bar(_: Option<u16>) {}
|
||||
//~^ ERROR method `bar` has an incompatible type for trait
|
||||
//~| NOTE expected u8
|
||||
//~| NOTE expected type `fn(std::option::Option<u8>)`
|
||||
fn baz(_: (u16, u16)) {}
|
||||
//~^ ERROR method `baz` has an incompatible type for trait
|
||||
//~| NOTE expected u8
|
||||
//~| NOTE expected type `fn((u8, u16))`
|
||||
fn qux() -> u16 { 5u16 }
|
||||
//~^ ERROR method `qux` has an incompatible type for trait
|
||||
//~| NOTE expected u8
|
||||
//~| NOTE expected type `fn() -> u8`
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -11,8 +11,8 @@
|
||||
// Tests that compiling for a target which is not installed will result in a helpful
|
||||
// error message.
|
||||
|
||||
// compile-flags: --target=s390x-unknown-linux-gnu
|
||||
// ignore s390x
|
||||
// compile-flags: --target=thumbv6m-none-eabi
|
||||
// ignore-arm
|
||||
|
||||
// error-pattern:target may not be installed
|
||||
fn main() { }
|
||||
|
55
src/test/compile-fail/issue-37576.rs
Normal file
55
src/test/compile-fail/issue-37576.rs
Normal file
@ -0,0 +1,55 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
'test_1: while break 'test_1 {}
|
||||
while break {}
|
||||
//~^ ERROR `break` or `continue` with no label
|
||||
|
||||
'test_2: while let true = break 'test_2 {}
|
||||
while let true = break {}
|
||||
//~^ ERROR `break` or `continue` with no label
|
||||
|
||||
loop { 'test_3: while break 'test_3 {} }
|
||||
loop { while break {} }
|
||||
//~^ ERROR `break` or `continue` with no label
|
||||
|
||||
loop {
|
||||
'test_4: while break 'test_4 {}
|
||||
break;
|
||||
}
|
||||
loop {
|
||||
while break {}
|
||||
//~^ ERROR `break` or `continue` with no label
|
||||
break;
|
||||
}
|
||||
|
||||
'test_5: while continue 'test_5 {}
|
||||
while continue {}
|
||||
//~^ ERROR `break` or `continue` with no label
|
||||
|
||||
'test_6: while let true = continue 'test_6 {}
|
||||
while let true = continue {}
|
||||
//~^ ERROR `break` or `continue` with no label
|
||||
|
||||
loop { 'test_7: while continue 'test_7 {} }
|
||||
loop { while continue {} }
|
||||
//~^ ERROR `break` or `continue` with no label
|
||||
|
||||
loop {
|
||||
'test_8: while continue 'test_8 {}
|
||||
continue;
|
||||
}
|
||||
loop {
|
||||
while continue {}
|
||||
//~^ ERROR `break` or `continue` with no label
|
||||
continue;
|
||||
}
|
||||
}
|
30
src/test/compile-fail/match-privately-empty.rs
Normal file
30
src/test/compile-fail/match-privately-empty.rs
Normal file
@ -0,0 +1,30 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![feature(never_type)]
|
||||
|
||||
mod private {
|
||||
pub struct Private {
|
||||
_bot: !,
|
||||
pub misc: bool,
|
||||
}
|
||||
pub const DATA: Option<Private> = None;
|
||||
}
|
||||
|
||||
fn main() {
|
||||
match private::DATA {
|
||||
//~^ ERROR non-exhaustive patterns: `Some(Private { misc: true, .. })` not covered
|
||||
None => {}
|
||||
Some(private::Private {
|
||||
misc: false,
|
||||
..
|
||||
}) => {}
|
||||
}
|
||||
}
|
@ -17,8 +17,7 @@ fn f() {
|
||||
}
|
||||
}
|
||||
|
||||
// issue #37353
|
||||
loop { 'w: while break 'w { } } //~ ERROR use of undeclared label
|
||||
loop { 'w: while break 'w { } }
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -19,16 +19,13 @@ fn main() {
|
||||
};
|
||||
|
||||
let x: &Void = unsafe { std::mem::uninitialized() };
|
||||
let _ = match x {};
|
||||
//~^ ERROR non-exhaustive
|
||||
let _ = match x {}; //~ ERROR non-exhaustive
|
||||
|
||||
let x: (Void,) = unsafe { std::mem::uninitialized() };
|
||||
let _ = match x {};
|
||||
//~^ ERROR non-exhaustive
|
||||
let _ = match x {}; //~ ERROR non-exhaustive
|
||||
|
||||
let x: [Void; 1] = unsafe { std::mem::uninitialized() };
|
||||
let _ = match x {};
|
||||
//~^ ERROR non-exhaustive
|
||||
let _ = match x {}; //~ ERROR non-exhaustive
|
||||
|
||||
let x: &[Void] = unsafe { std::mem::uninitialized() };
|
||||
let _ = match x { //~ ERROR non-exhaustive
|
||||
@ -47,4 +44,3 @@ fn main() {
|
||||
let Ok(x) = x;
|
||||
//~^ ERROR refutable
|
||||
}
|
||||
|
||||
|
18
src/test/incremental/issue-39828/auxiliary/generic.rs
Normal file
18
src/test/incremental/issue-39828/auxiliary/generic.rs
Normal file
@ -0,0 +1,18 @@
|
||||
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// revisions:rpass1 rpass2
|
||||
// compile-flags: -Z query-dep-graph
|
||||
|
||||
#![rustc_partition_reused(module="__rustc_fallback_codegen_unit", cfg="rpass2")]
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
#![crate_type="rlib"]
|
||||
pub fn foo<T>() { }
|
22
src/test/incremental/issue-39828/issue-39828.rs
Normal file
22
src/test/incremental/issue-39828/issue-39828.rs
Normal file
@ -0,0 +1,22 @@
|
||||
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// Regression test for #39828. If you make use of a module that
|
||||
// consists only of generics, no code is generated, just a dummy
|
||||
// module. The reduced graph consists of a single node (for that
|
||||
// module) with no inputs. Since we only serialize edges, when we
|
||||
// reload, we would consider that node dirty since it is not recreated
|
||||
// (it is not the target of any edges).
|
||||
|
||||
// revisions:rpass1 rpass2
|
||||
// aux-build:generic.rs
|
||||
|
||||
extern crate generic;
|
||||
fn main() { }
|
@ -8,6 +8,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// gate-test-inclusive_range_syntax
|
||||
|
||||
// Make sure that #![feature(inclusive_range_syntax)] is required.
|
||||
|
||||
// #![feature(inclusive_range_syntax, inclusive_range)]
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user