Auto merge of #49008 - kennytm:rollup, r=kennytm
Rollup of 12 pull requests - Successful merges: #48765, #48831, #48840, #48964, #48970, #48971, #48981, #48988, #48991, #48966, #48993, #48874 - Failed merges:
This commit is contained in:
commit
521d91c6be
@ -19,15 +19,19 @@ running `rustdoc --test foo.rs` will extract this example, and then run it as a
|
||||
Please note that by default, if no language is set for the block code, `rustdoc`
|
||||
assumes it is `Rust` code. So the following:
|
||||
|
||||
``````markdown
|
||||
```rust
|
||||
let x = 5;
|
||||
```
|
||||
``````
|
||||
|
||||
is strictly equivalent to:
|
||||
|
||||
``````markdown
|
||||
```
|
||||
let x = 5;
|
||||
```
|
||||
``````
|
||||
|
||||
There's some subtlety though! Read on for more details.
|
||||
|
||||
|
@ -1379,27 +1379,159 @@ impl<'a> Formatter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Optionally specified integer width that the output should be
|
||||
/// Optionally specified integer width that the output should be.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::fmt;
|
||||
///
|
||||
/// struct Foo(i32);
|
||||
///
|
||||
/// impl fmt::Display for Foo {
|
||||
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
/// if let Some(width) = formatter.width() {
|
||||
/// // If we received a width, we use it
|
||||
/// write!(formatter, "{:width$}", &format!("Foo({})", self.0), width = width)
|
||||
/// } else {
|
||||
/// // Otherwise we do nothing special
|
||||
/// write!(formatter, "Foo({})", self.0)
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(&format!("{:10}", Foo(23)), "Foo(23) ");
|
||||
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
|
||||
/// ```
|
||||
#[stable(feature = "fmt_flags", since = "1.5.0")]
|
||||
pub fn width(&self) -> Option<usize> { self.width }
|
||||
|
||||
/// Optionally specified precision for numeric types
|
||||
/// Optionally specified precision for numeric types.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::fmt;
|
||||
///
|
||||
/// struct Foo(f32);
|
||||
///
|
||||
/// impl fmt::Display for Foo {
|
||||
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
/// if let Some(precision) = formatter.precision() {
|
||||
/// // If we received a precision, we use it.
|
||||
/// write!(formatter, "Foo({1:.*})", precision, self.0)
|
||||
/// } else {
|
||||
/// // Otherwise we default to 2.
|
||||
/// write!(formatter, "Foo({:.2})", self.0)
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(&format!("{:.4}", Foo(23.2)), "Foo(23.2000)");
|
||||
/// assert_eq!(&format!("{}", Foo(23.2)), "Foo(23.20)");
|
||||
/// ```
|
||||
#[stable(feature = "fmt_flags", since = "1.5.0")]
|
||||
pub fn precision(&self) -> Option<usize> { self.precision }
|
||||
|
||||
/// Determines if the `+` flag was specified.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::fmt;
|
||||
///
|
||||
/// struct Foo(i32);
|
||||
///
|
||||
/// impl fmt::Display for Foo {
|
||||
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
/// if formatter.sign_plus() {
|
||||
/// write!(formatter,
|
||||
/// "Foo({}{})",
|
||||
/// if self.0 < 0 { '-' } else { '+' },
|
||||
/// self.0)
|
||||
/// } else {
|
||||
/// write!(formatter, "Foo({})", self.0)
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(&format!("{:+}", Foo(23)), "Foo(+23)");
|
||||
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
|
||||
/// ```
|
||||
#[stable(feature = "fmt_flags", since = "1.5.0")]
|
||||
pub fn sign_plus(&self) -> bool { self.flags & (1 << FlagV1::SignPlus as u32) != 0 }
|
||||
|
||||
/// Determines if the `-` flag was specified.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::fmt;
|
||||
///
|
||||
/// struct Foo(i32);
|
||||
///
|
||||
/// impl fmt::Display for Foo {
|
||||
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
/// if formatter.sign_minus() {
|
||||
/// // You want a minus sign? Have one!
|
||||
/// write!(formatter, "-Foo({})", self.0)
|
||||
/// } else {
|
||||
/// write!(formatter, "Foo({})", self.0)
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(&format!("{:-}", Foo(23)), "-Foo(23)");
|
||||
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
|
||||
/// ```
|
||||
#[stable(feature = "fmt_flags", since = "1.5.0")]
|
||||
pub fn sign_minus(&self) -> bool { self.flags & (1 << FlagV1::SignMinus as u32) != 0 }
|
||||
|
||||
/// Determines if the `#` flag was specified.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::fmt;
|
||||
///
|
||||
/// struct Foo(i32);
|
||||
///
|
||||
/// impl fmt::Display for Foo {
|
||||
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
/// if formatter.alternate() {
|
||||
/// write!(formatter, "Foo({})", self.0)
|
||||
/// } else {
|
||||
/// write!(formatter, "{}", self.0)
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(&format!("{:#}", Foo(23)), "Foo(23)");
|
||||
/// assert_eq!(&format!("{}", Foo(23)), "23");
|
||||
/// ```
|
||||
#[stable(feature = "fmt_flags", since = "1.5.0")]
|
||||
pub fn alternate(&self) -> bool { self.flags & (1 << FlagV1::Alternate as u32) != 0 }
|
||||
|
||||
/// Determines if the `0` flag was specified.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::fmt;
|
||||
///
|
||||
/// struct Foo(i32);
|
||||
///
|
||||
/// impl fmt::Display for Foo {
|
||||
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
/// assert!(formatter.sign_aware_zero_pad());
|
||||
/// assert_eq!(formatter.width(), Some(4));
|
||||
/// // We ignore the formatter's options.
|
||||
/// write!(formatter, "{}", self.0)
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(&format!("{:04}", Foo(23)), "23");
|
||||
/// ```
|
||||
#[stable(feature = "fmt_flags", since = "1.5.0")]
|
||||
pub fn sign_aware_zero_pad(&self) -> bool {
|
||||
self.flags & (1 << FlagV1::SignAwareZeroPad as u32) != 0
|
||||
|
@ -13,6 +13,7 @@ use dep_graph::{DepGraph, DepKind, DepNodeIndex};
|
||||
use hir::def_id::{LOCAL_CRATE, CrateNum};
|
||||
use hir::intravisit::{Visitor, NestedVisitorMap};
|
||||
use hir::svh::Svh;
|
||||
use ich::Fingerprint;
|
||||
use middle::cstore::CrateStore;
|
||||
use session::CrateDisambiguator;
|
||||
use std::iter::repeat;
|
||||
@ -121,21 +122,24 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
|
||||
collector
|
||||
}
|
||||
|
||||
pub(super) fn finalize_and_compute_crate_hash(self,
|
||||
pub(super) fn finalize_and_compute_crate_hash(mut self,
|
||||
crate_disambiguator: CrateDisambiguator,
|
||||
cstore: &dyn CrateStore,
|
||||
codemap: &CodeMap,
|
||||
commandline_args_hash: u64)
|
||||
-> (Vec<MapEntry<'hir>>, Svh) {
|
||||
let mut node_hashes: Vec<_> = self
|
||||
self
|
||||
.hir_body_nodes
|
||||
.sort_unstable_by(|&(ref d1, _), &(ref d2, _)| d1.cmp(d2));
|
||||
|
||||
let node_hashes = self
|
||||
.hir_body_nodes
|
||||
.iter()
|
||||
.map(|&(def_path_hash, dep_node_index)| {
|
||||
(def_path_hash, self.dep_graph.fingerprint_of(dep_node_index))
|
||||
})
|
||||
.collect();
|
||||
|
||||
node_hashes.sort_unstable_by(|&(ref d1, _), &(ref d2, _)| d1.cmp(d2));
|
||||
.fold(Fingerprint::ZERO, |fingerprint , &(def_path_hash, dep_node_index)| {
|
||||
fingerprint.combine(
|
||||
def_path_hash.0.combine(self.dep_graph.fingerprint_of(dep_node_index))
|
||||
)
|
||||
});
|
||||
|
||||
let mut upstream_crates: Vec<_> = cstore.crates_untracked().iter().map(|&cnum| {
|
||||
let name = cstore.crate_name_untracked(cnum).as_str();
|
||||
|
@ -1641,7 +1641,7 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
|
||||
opt::multi_s(
|
||||
"",
|
||||
"remap-path-prefix",
|
||||
"remap source names in output",
|
||||
"Remap source names in all output (compiler messages and output files)",
|
||||
"FROM=TO",
|
||||
),
|
||||
]);
|
||||
|
@ -25,7 +25,7 @@ pub fn target() -> TargetResult {
|
||||
linker_flavor: LinkerFlavor::Gcc,
|
||||
options: TargetOptions {
|
||||
cpu: "mips32r2".to_string(),
|
||||
features: "+mips32r2".to_string(),
|
||||
features: "+mips32r2,+fpxx,+nooddspreg".to_string(),
|
||||
max_atomic_width: Some(32),
|
||||
|
||||
// see #36994
|
||||
|
@ -25,8 +25,8 @@ pub fn target() -> TargetResult {
|
||||
linker_flavor: LinkerFlavor::Gcc,
|
||||
|
||||
options: TargetOptions {
|
||||
cpu: "mips32".to_string(),
|
||||
features: "+mips32".to_string(),
|
||||
cpu: "mips32r2".to_string(),
|
||||
features: "+mips32r2,+fpxx,+nooddspreg".to_string(),
|
||||
max_atomic_width: Some(32),
|
||||
|
||||
// see #36994
|
||||
|
@ -13,8 +13,8 @@ use target::{Target, TargetResult};
|
||||
|
||||
pub fn target() -> TargetResult {
|
||||
let mut base = super::linux_musl_base::opts();
|
||||
base.cpu = "mips32".to_string();
|
||||
base.features = "+mips32,+soft-float".to_string();
|
||||
base.cpu = "mips32r2".to_string();
|
||||
base.features = "+mips32r2,+soft-float".to_string();
|
||||
base.max_atomic_width = Some(32);
|
||||
// see #36994
|
||||
base.exe_allocation_crate = None;
|
||||
|
@ -25,8 +25,8 @@ pub fn target() -> TargetResult {
|
||||
linker_flavor: LinkerFlavor::Gcc,
|
||||
|
||||
options: TargetOptions {
|
||||
cpu: "mips32".to_string(),
|
||||
features: "+mips32,+soft-float".to_string(),
|
||||
cpu: "mips32r2".to_string(),
|
||||
features: "+mips32r2,+soft-float".to_string(),
|
||||
max_atomic_width: Some(32),
|
||||
|
||||
// see #36994
|
||||
|
@ -224,70 +224,6 @@ impl<T: Idx> IdxSet<T> {
|
||||
_pd: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Calls `f` on each index value held in this set, up to the
|
||||
/// bound `max_bits` on the size of universe of indexes.
|
||||
pub fn each_bit<F>(&self, max_bits: usize, f: F) where F: FnMut(T) {
|
||||
each_bit(self, max_bits, f)
|
||||
}
|
||||
|
||||
/// Removes all elements from this set.
|
||||
pub fn reset_to_empty(&mut self) {
|
||||
for word in self.words_mut() { *word = 0; }
|
||||
}
|
||||
|
||||
pub fn elems(&self, universe_size: usize) -> Elems<T> {
|
||||
Elems { i: 0, set: self, universe_size: universe_size }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Elems<'a, T: Idx> { i: usize, set: &'a IdxSet<T>, universe_size: usize }
|
||||
|
||||
impl<'a, T: Idx> Iterator for Elems<'a, T> {
|
||||
type Item = T;
|
||||
fn next(&mut self) -> Option<T> {
|
||||
if self.i >= self.universe_size { return None; }
|
||||
let mut i = self.i;
|
||||
loop {
|
||||
if i >= self.universe_size {
|
||||
self.i = i; // (mark iteration as complete.)
|
||||
return None;
|
||||
}
|
||||
if self.set.contains(&T::new(i)) {
|
||||
self.i = i + 1; // (next element to start at.)
|
||||
return Some(T::new(i));
|
||||
}
|
||||
i = i + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn each_bit<T: Idx, F>(words: &IdxSet<T>, max_bits: usize, mut f: F) where F: FnMut(T) {
|
||||
let usize_bits: usize = mem::size_of::<usize>() * 8;
|
||||
|
||||
for (word_index, &word) in words.words().iter().enumerate() {
|
||||
if word != 0 {
|
||||
let base_index = word_index * usize_bits;
|
||||
for offset in 0..usize_bits {
|
||||
let bit = 1 << offset;
|
||||
if (word & bit) != 0 {
|
||||
// NB: we round up the total number of bits
|
||||
// that we store in any given bit set so that
|
||||
// it is an even multiple of usize::BITS. This
|
||||
// means that there may be some stray bits at
|
||||
// the end that do not correspond to any
|
||||
// actual value; that's why we first check
|
||||
// that we are in range of bits_per_block.
|
||||
let bit_index = base_index + offset as usize;
|
||||
if bit_index >= max_bits {
|
||||
return;
|
||||
} else {
|
||||
f(Idx::new(bit_index));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Iter<'a, T: Idx> {
|
||||
|
@ -1147,6 +1147,15 @@ fn usage(verbose: bool, include_unstable_options: bool) {
|
||||
verbose_help);
|
||||
}
|
||||
|
||||
fn print_wall_help() {
|
||||
println!("
|
||||
The flag `-Wall` does not exist in `rustc`. Most useful lints are enabled by
|
||||
default. Use `rustc -W help` to see all available lints. It's more common to put
|
||||
warning settings in the crate root using `#![warn(LINT_NAME)]` instead of using
|
||||
the command line flag directly.
|
||||
");
|
||||
}
|
||||
|
||||
fn describe_lints(sess: &Session, lint_store: &lint::LintStore, loaded_plugins: bool) {
|
||||
println!("
|
||||
Available lint options:
|
||||
@ -1391,6 +1400,13 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Handle the special case of -Wall.
|
||||
let wall = matches.opt_strs("W");
|
||||
if wall.iter().any(|x| *x == "all") {
|
||||
print_wall_help();
|
||||
return None;
|
||||
}
|
||||
|
||||
// Don't handle -W help here, because we might first load plugins.
|
||||
let r = matches.opt_strs("Z");
|
||||
if r.iter().any(|x| *x == "help") {
|
||||
@ -1468,6 +1484,12 @@ fn extra_compiler_flags() -> Option<(Vec<String>, bool)> {
|
||||
args.push(arg.to_string_lossy().to_string());
|
||||
}
|
||||
|
||||
// Avoid printing help because of empty args. This can suggest the compiler
|
||||
// itself is not the program root (consider RLS).
|
||||
if args.len() < 2 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let matches = if let Some(matches) = handle_options(&args) {
|
||||
matches
|
||||
} else {
|
||||
|
@ -530,7 +530,7 @@ impl<'cx, 'gcx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx
|
||||
// Look for any active borrows to locals
|
||||
let domain = flow_state.borrows.operator();
|
||||
let data = domain.borrows();
|
||||
flow_state.borrows.with_elems_outgoing(|borrows| {
|
||||
flow_state.borrows.with_iter_outgoing(|borrows| {
|
||||
for i in borrows {
|
||||
let borrow = &data[i.borrow_index()];
|
||||
self.check_for_local_borrow(borrow, span);
|
||||
@ -546,7 +546,7 @@ impl<'cx, 'gcx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx
|
||||
// so this "extra check" serves as a kind of backup.
|
||||
let domain = flow_state.borrows.operator();
|
||||
let data = domain.borrows();
|
||||
flow_state.borrows.with_elems_outgoing(|borrows| {
|
||||
flow_state.borrows.with_iter_outgoing(|borrows| {
|
||||
for i in borrows {
|
||||
let borrow = &data[i.borrow_index()];
|
||||
let context = ContextKind::StorageDead.new(loc);
|
||||
@ -1292,7 +1292,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
place
|
||||
);
|
||||
|
||||
for i in flow_state.ever_inits.elems_incoming() {
|
||||
for i in flow_state.ever_inits.iter_incoming() {
|
||||
let init = self.move_data.inits[i];
|
||||
let init_place = &self.move_data.move_paths[init.path].place;
|
||||
if self.places_conflict(&init_place, place, Deep) {
|
||||
@ -2129,8 +2129,8 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
|
||||
// check for loan restricting path P being used. Accounts for
|
||||
// borrows of P, P.a.b, etc.
|
||||
let mut elems_incoming = flow_state.borrows.elems_incoming();
|
||||
while let Some(i) = elems_incoming.next() {
|
||||
let mut iter_incoming = flow_state.borrows.iter_incoming();
|
||||
while let Some(i) = iter_incoming.next() {
|
||||
let borrowed = &data[i.borrow_index()];
|
||||
|
||||
if self.places_conflict(&borrowed.borrowed_place, place, access) {
|
||||
|
@ -12,7 +12,7 @@
|
||||
//! locations.
|
||||
|
||||
use rustc::mir::{BasicBlock, Location};
|
||||
use rustc_data_structures::indexed_set::{self, IdxSetBuf};
|
||||
use rustc_data_structures::indexed_set::{IdxSetBuf, Iter};
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
|
||||
use dataflow::{BitDenotation, BlockSets, DataflowResults};
|
||||
@ -81,8 +81,7 @@ where
|
||||
where
|
||||
F: FnMut(BD::Idx),
|
||||
{
|
||||
self.curr_state
|
||||
.each_bit(self.base_results.operator().bits_per_block(), f)
|
||||
self.curr_state.iter().for_each(f)
|
||||
}
|
||||
|
||||
/// Iterate over each `gen` bit in the current effect (invoke
|
||||
@ -92,8 +91,7 @@ where
|
||||
where
|
||||
F: FnMut(BD::Idx),
|
||||
{
|
||||
self.stmt_gen
|
||||
.each_bit(self.base_results.operator().bits_per_block(), f)
|
||||
self.stmt_gen.iter().for_each(f)
|
||||
}
|
||||
|
||||
pub fn new(results: DataflowResults<BD>) -> Self {
|
||||
@ -119,23 +117,21 @@ where
|
||||
}
|
||||
|
||||
/// Returns an iterator over the elements present in the current state.
|
||||
pub fn elems_incoming(&self) -> iter::Peekable<indexed_set::Elems<BD::Idx>> {
|
||||
let univ = self.base_results.sets().bits_per_block();
|
||||
self.curr_state.elems(univ).peekable()
|
||||
pub fn iter_incoming(&self) -> iter::Peekable<Iter<BD::Idx>> {
|
||||
self.curr_state.iter().peekable()
|
||||
}
|
||||
|
||||
/// Creates a clone of the current state and applies the local
|
||||
/// effects to the clone (leaving the state of self intact).
|
||||
/// Invokes `f` with an iterator over the resulting state.
|
||||
pub fn with_elems_outgoing<F>(&self, f: F)
|
||||
pub fn with_iter_outgoing<F>(&self, f: F)
|
||||
where
|
||||
F: FnOnce(indexed_set::Elems<BD::Idx>),
|
||||
F: FnOnce(Iter<BD::Idx>),
|
||||
{
|
||||
let mut curr_state = self.curr_state.clone();
|
||||
curr_state.union(&self.stmt_gen);
|
||||
curr_state.subtract(&self.stmt_kill);
|
||||
let univ = self.base_results.sets().bits_per_block();
|
||||
f(curr_state.elems(univ));
|
||||
f(curr_state.iter());
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,8 +143,8 @@ impl<BD> FlowsAtLocation for FlowAtLocation<BD>
|
||||
}
|
||||
|
||||
fn reconstruct_statement_effect(&mut self, loc: Location) {
|
||||
self.stmt_gen.reset_to_empty();
|
||||
self.stmt_kill.reset_to_empty();
|
||||
self.stmt_gen.clear();
|
||||
self.stmt_kill.clear();
|
||||
{
|
||||
let mut sets = BlockSets {
|
||||
on_entry: &mut self.curr_state,
|
||||
@ -172,8 +168,8 @@ impl<BD> FlowsAtLocation for FlowAtLocation<BD>
|
||||
}
|
||||
|
||||
fn reconstruct_terminator_effect(&mut self, loc: Location) {
|
||||
self.stmt_gen.reset_to_empty();
|
||||
self.stmt_kill.reset_to_empty();
|
||||
self.stmt_gen.clear();
|
||||
self.stmt_kill.clear();
|
||||
{
|
||||
let mut sets = BlockSets {
|
||||
on_entry: &mut self.curr_state,
|
||||
|
@ -444,8 +444,7 @@ pub struct DataflowState<O: BitDenotation>
|
||||
impl<O: BitDenotation> DataflowState<O> {
|
||||
pub fn each_bit<F>(&self, words: &IdxSet<O::Idx>, f: F) where F: FnMut(O::Idx)
|
||||
{
|
||||
let bits_per_block = self.operator.bits_per_block();
|
||||
words.each_bit(bits_per_block, f)
|
||||
words.iter().for_each(f)
|
||||
}
|
||||
|
||||
pub(crate) fn interpret_set<'c, P>(&self,
|
||||
|
@ -132,6 +132,13 @@ impl Command {
|
||||
return false
|
||||
}
|
||||
|
||||
// Right now LLD doesn't support the `@` syntax of passing an argument
|
||||
// through files, so regardless of the platform we try to go to the OS
|
||||
// on this one.
|
||||
if let Program::Lld(..) = self.program {
|
||||
return false
|
||||
}
|
||||
|
||||
// Ok so on Windows to spawn a process is 32,768 characters in its
|
||||
// command line [1]. Unfortunately we don't actually have access to that
|
||||
// as it's calculated just before spawning. Instead we perform a
|
||||
|
@ -827,11 +827,14 @@ fn exec_linker(sess: &Session, cmd: &mut Command, tmpdir: &Path)
|
||||
if !cmd.very_likely_to_exceed_some_spawn_limit() {
|
||||
match cmd.command().stdout(Stdio::piped()).stderr(Stdio::piped()).spawn() {
|
||||
Ok(child) => return child.wait_with_output(),
|
||||
Err(ref e) if command_line_too_big(e) => {}
|
||||
Err(ref e) if command_line_too_big(e) => {
|
||||
info!("command line to linker was too big: {}", e);
|
||||
}
|
||||
Err(e) => return Err(e)
|
||||
}
|
||||
}
|
||||
|
||||
info!("falling back to passing arguments to linker via an @-file");
|
||||
let mut cmd2 = cmd.clone();
|
||||
let mut args = String::new();
|
||||
for arg in cmd2.take_args() {
|
||||
@ -856,6 +859,7 @@ fn exec_linker(sess: &Session, cmd: &mut Command, tmpdir: &Path)
|
||||
};
|
||||
fs::write(&file, &bytes)?;
|
||||
cmd2.arg(format!("@{}", file.display()));
|
||||
info!("invoking linker {:?}", cmd2);
|
||||
return cmd2.output();
|
||||
|
||||
#[cfg(unix)]
|
||||
|
@ -104,7 +104,7 @@ const POWERPC_WHITELIST: &'static [&'static str] = &["altivec",
|
||||
"power8-vector", "power9-vector",
|
||||
"vsx"];
|
||||
|
||||
const MIPS_WHITELIST: &'static [&'static str] = &["msa"];
|
||||
const MIPS_WHITELIST: &'static [&'static str] = &["fp64", "msa"];
|
||||
|
||||
pub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {
|
||||
let arch = if sess.target.target.arch == "x86_64" {
|
||||
|
@ -1,48 +1,5 @@
|
||||
NB: This crate is part of the Rust compiler. For an overview of the
|
||||
compiler as a whole, see
|
||||
[the README.md file found in `librustc`](../librustc/README.md).
|
||||
For high-level intro to how type checking works in rustc, see the
|
||||
[type checking] chapter of the [rustc guide].
|
||||
|
||||
The `rustc_typeck` crate contains the source for "type collection" and
|
||||
"type checking", as well as a few other bits of related functionality.
|
||||
(It draws heavily on the [type inferencing][infer] and
|
||||
[trait solving][traits] code found in librustc.)
|
||||
|
||||
[infer]: ../librustc/infer/README.md
|
||||
[traits]: ../librustc/traits/README.md
|
||||
|
||||
## Type collection
|
||||
|
||||
Type "collection" is the process of converting the types found in the
|
||||
HIR (`hir::Ty`), which represent the syntactic things that the user
|
||||
wrote, into the **internal representation** used by the compiler
|
||||
(`Ty<'tcx>`) -- we also do similar conversions for where-clauses and
|
||||
other bits of the function signature.
|
||||
|
||||
To try and get a sense for the difference, consider this function:
|
||||
|
||||
```rust
|
||||
struct Foo { }
|
||||
fn foo(x: Foo, y: self::Foo) { .. }
|
||||
// ^^^ ^^^^^^^^^
|
||||
```
|
||||
|
||||
Those two parameters `x` and `y` each have the same type: but they
|
||||
will have distinct `hir::Ty` nodes. Those nodes will have different
|
||||
spans, and of course they encode the path somewhat differently. But
|
||||
once they are "collected" into `Ty<'tcx>` nodes, they will be
|
||||
represented by the exact same internal type.
|
||||
|
||||
Collection is defined as a bundle of queries (e.g., `type_of`) for
|
||||
computing information about the various functions, traits, and other
|
||||
items in the crate being compiled. Note that each of these queries is
|
||||
concerned with *interprocedural* things -- for example, for a function
|
||||
definition, collection will figure out the type and signature of the
|
||||
function, but it will not visit the *body* of the function in any way,
|
||||
nor examine type annotations on local variables (that's the job of
|
||||
type *checking*).
|
||||
|
||||
For more details, see the `collect` module.
|
||||
|
||||
## Type checking
|
||||
|
||||
TODO
|
||||
[type checking]: https://rust-lang-nursery.github.io/rustc-guide/type-checking.html
|
||||
[rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/
|
||||
|
@ -1,111 +0,0 @@
|
||||
# Method lookup
|
||||
|
||||
Method lookup can be rather complex due to the interaction of a number
|
||||
of factors, such as self types, autoderef, trait lookup, etc. This
|
||||
file provides an overview of the process. More detailed notes are in
|
||||
the code itself, naturally.
|
||||
|
||||
One way to think of method lookup is that we convert an expression of
|
||||
the form:
|
||||
|
||||
receiver.method(...)
|
||||
|
||||
into a more explicit UFCS form:
|
||||
|
||||
Trait::method(ADJ(receiver), ...) // for a trait call
|
||||
ReceiverType::method(ADJ(receiver), ...) // for an inherent method call
|
||||
|
||||
Here `ADJ` is some kind of adjustment, which is typically a series of
|
||||
autoderefs and then possibly an autoref (e.g., `&**receiver`). However
|
||||
we sometimes do other adjustments and coercions along the way, in
|
||||
particular unsizing (e.g., converting from `[T; n]` to `[T]`).
|
||||
|
||||
## The Two Phases
|
||||
|
||||
Method lookup is divided into two major phases: probing (`probe.rs`)
|
||||
and confirmation (`confirm.rs`). The probe phase is when we decide
|
||||
what method to call and how to adjust the receiver. The confirmation
|
||||
phase "applies" this selection, updating the side-tables, unifying
|
||||
type variables, and otherwise doing side-effectful things.
|
||||
|
||||
One reason for this division is to be more amenable to caching. The
|
||||
probe phase produces a "pick" (`probe::Pick`), which is designed to be
|
||||
cacheable across method-call sites. Therefore, it does not include
|
||||
inference variables or other information.
|
||||
|
||||
## Probe phase
|
||||
|
||||
The probe phase (`probe.rs`) decides what method is being called and
|
||||
how to adjust the receiver.
|
||||
|
||||
### Steps
|
||||
|
||||
The first thing that the probe phase does is to create a series of
|
||||
*steps*. This is done by progressively dereferencing the receiver type
|
||||
until it cannot be deref'd anymore, as well as applying an optional
|
||||
"unsize" step. So if the receiver has type `Rc<Box<[T; 3]>>`, this
|
||||
might yield:
|
||||
|
||||
Rc<Box<[T; 3]>>
|
||||
Box<[T; 3]>
|
||||
[T; 3]
|
||||
[T]
|
||||
|
||||
### Candidate assembly
|
||||
|
||||
We then search along those steps to create a list of *candidates*. A
|
||||
`Candidate` is a method item that might plausibly be the method being
|
||||
invoked. For each candidate, we'll derive a "transformed self type"
|
||||
that takes into account explicit self.
|
||||
|
||||
Candidates are grouped into two kinds, inherent and extension.
|
||||
|
||||
**Inherent candidates** are those that are derived from the
|
||||
type of the receiver itself. So, if you have a receiver of some
|
||||
nominal type `Foo` (e.g., a struct), any methods defined within an
|
||||
impl like `impl Foo` are inherent methods. Nothing needs to be
|
||||
imported to use an inherent method, they are associated with the type
|
||||
itself (note that inherent impls can only be defined in the same
|
||||
module as the type itself).
|
||||
|
||||
FIXME: Inherent candidates are not always derived from impls. If you
|
||||
have a trait object, such as a value of type `Box<ToString>`, then the
|
||||
trait methods (`to_string()`, in this case) are inherently associated
|
||||
with it. Another case is type parameters, in which case the methods of
|
||||
their bounds are inherent. However, this part of the rules is subject
|
||||
to change: when DST's "impl Trait for Trait" is complete, trait object
|
||||
dispatch could be subsumed into trait matching, and the type parameter
|
||||
behavior should be reconsidered in light of where clauses.
|
||||
|
||||
**Extension candidates** are derived from imported traits. If I have
|
||||
the trait `ToString` imported, and I call `to_string()` on a value of
|
||||
type `T`, then we will go off to find out whether there is an impl of
|
||||
`ToString` for `T`. These kinds of method calls are called "extension
|
||||
methods". They can be defined in any module, not only the one that
|
||||
defined `T`. Furthermore, you must import the trait to call such a
|
||||
method.
|
||||
|
||||
So, let's continue our example. Imagine that we were calling a method
|
||||
`foo` with the receiver `Rc<Box<[T; 3]>>` and there is a trait `Foo`
|
||||
that defines it with `&self` for the type `Rc<U>` as well as a method
|
||||
on the type `Box` that defines `Foo` but with `&mut self`. Then we
|
||||
might have two candidates:
|
||||
|
||||
&Rc<Box<[T; 3]>> from the impl of `Foo` for `Rc<U>` where `U=Box<T; 3]>
|
||||
&mut Box<[T; 3]>> from the inherent impl on `Box<U>` where `U=[T; 3]`
|
||||
|
||||
### Candidate search
|
||||
|
||||
Finally, to actually pick the method, we will search down the steps,
|
||||
trying to match the receiver type against the candidate types. At
|
||||
each step, we also consider an auto-ref and auto-mut-ref to see whether
|
||||
that makes any of the candidates match. We pick the first step where
|
||||
we find a match.
|
||||
|
||||
In the case of our example, the first step is `Rc<Box<[T; 3]>>`,
|
||||
which does not itself match any candidate. But when we autoref it, we
|
||||
get the type `&Rc<Box<[T; 3]>>` which does match. We would then
|
||||
recursively consider all where-clauses that appear on the impl: if
|
||||
those match (or we cannot rule out that they do), then this is the
|
||||
method we would pick. Otherwise, we would continue down the series of
|
||||
steps.
|
@ -8,7 +8,9 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Method lookup: the secret sauce of Rust. See `README.md`.
|
||||
//! Method lookup: the secret sauce of Rust. See the [rustc guide] chapter.
|
||||
//!
|
||||
//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/method-lookup.html
|
||||
|
||||
use check::FnCtxt;
|
||||
use hir::def::Def;
|
||||
|
@ -1,276 +0,0 @@
|
||||
## Variance of type and lifetime parameters
|
||||
|
||||
This file infers the variance of type and lifetime parameters. The
|
||||
algorithm is taken from Section 4 of the paper "Taming the Wildcards:
|
||||
Combining Definition- and Use-Site Variance" published in PLDI'11 and
|
||||
written by Altidor et al., and hereafter referred to as The Paper.
|
||||
|
||||
This inference is explicitly designed *not* to consider the uses of
|
||||
types within code. To determine the variance of type parameters
|
||||
defined on type `X`, we only consider the definition of the type `X`
|
||||
and the definitions of any types it references.
|
||||
|
||||
We only infer variance for type parameters found on *data types*
|
||||
like structs and enums. In these cases, there is fairly straightforward
|
||||
explanation for what variance means. The variance of the type
|
||||
or lifetime parameters defines whether `T<A>` is a subtype of `T<B>`
|
||||
(resp. `T<'a>` and `T<'b>`) based on the relationship of `A` and `B`
|
||||
(resp. `'a` and `'b`).
|
||||
|
||||
We do not infer variance for type parameters found on traits, fns,
|
||||
or impls. Variance on trait parameters can make indeed make sense
|
||||
(and we used to compute it) but it is actually rather subtle in
|
||||
meaning and not that useful in practice, so we removed it. See the
|
||||
addendum for some details. Variances on fn/impl parameters, otoh,
|
||||
doesn't make sense because these parameters are instantiated and
|
||||
then forgotten, they don't persist in types or compiled
|
||||
byproducts.
|
||||
|
||||
### The algorithm
|
||||
|
||||
The basic idea is quite straightforward. We iterate over the types
|
||||
defined and, for each use of a type parameter X, accumulate a
|
||||
constraint indicating that the variance of X must be valid for the
|
||||
variance of that use site. We then iteratively refine the variance of
|
||||
X until all constraints are met. There is *always* a sol'n, because at
|
||||
the limit we can declare all type parameters to be invariant and all
|
||||
constraints will be satisfied.
|
||||
|
||||
As a simple example, consider:
|
||||
|
||||
enum Option<A> { Some(A), None }
|
||||
enum OptionalFn<B> { Some(|B|), None }
|
||||
enum OptionalMap<C> { Some(|C| -> C), None }
|
||||
|
||||
Here, we will generate the constraints:
|
||||
|
||||
1. V(A) <= +
|
||||
2. V(B) <= -
|
||||
3. V(C) <= +
|
||||
4. V(C) <= -
|
||||
|
||||
These indicate that (1) the variance of A must be at most covariant;
|
||||
(2) the variance of B must be at most contravariant; and (3, 4) the
|
||||
variance of C must be at most covariant *and* contravariant. All of these
|
||||
results are based on a variance lattice defined as follows:
|
||||
|
||||
* Top (bivariant)
|
||||
- +
|
||||
o Bottom (invariant)
|
||||
|
||||
Based on this lattice, the solution `V(A)=+`, `V(B)=-`, `V(C)=o` is the
|
||||
optimal solution. Note that there is always a naive solution which
|
||||
just declares all variables to be invariant.
|
||||
|
||||
You may be wondering why fixed-point iteration is required. The reason
|
||||
is that the variance of a use site may itself be a function of the
|
||||
variance of other type parameters. In full generality, our constraints
|
||||
take the form:
|
||||
|
||||
V(X) <= Term
|
||||
Term := + | - | * | o | V(X) | Term x Term
|
||||
|
||||
Here the notation `V(X)` indicates the variance of a type/region
|
||||
parameter `X` with respect to its defining class. `Term x Term`
|
||||
represents the "variance transform" as defined in the paper:
|
||||
|
||||
> If the variance of a type variable `X` in type expression `E` is `V2`
|
||||
and the definition-site variance of the [corresponding] type parameter
|
||||
of a class `C` is `V1`, then the variance of `X` in the type expression
|
||||
`C<E>` is `V3 = V1.xform(V2)`.
|
||||
|
||||
### Constraints
|
||||
|
||||
If I have a struct or enum with where clauses:
|
||||
|
||||
struct Foo<T:Bar> { ... }
|
||||
|
||||
you might wonder whether the variance of `T` with respect to `Bar`
|
||||
affects the variance `T` with respect to `Foo`. I claim no. The
|
||||
reason: assume that `T` is invariant w/r/t `Bar` but covariant w/r/t
|
||||
`Foo`. And then we have a `Foo<X>` that is upcast to `Foo<Y>`, where
|
||||
`X <: Y`. However, while `X : Bar`, `Y : Bar` does not hold. In that
|
||||
case, the upcast will be illegal, but not because of a variance
|
||||
failure, but rather because the target type `Foo<Y>` is itself just
|
||||
not well-formed. Basically we get to assume well-formedness of all
|
||||
types involved before considering variance.
|
||||
|
||||
#### Dependency graph management
|
||||
|
||||
Because variance is a whole-crate inference, its dependency graph
|
||||
can become quite muddled if we are not careful. To resolve this, we refactor
|
||||
into two queries:
|
||||
|
||||
- `crate_variances` computes the variance for all items in the current crate.
|
||||
- `variances_of` accesses the variance for an individual reading; it
|
||||
works by requesting `crate_variances` and extracting the relevant data.
|
||||
|
||||
If you limit yourself to reading `variances_of`, your code will only
|
||||
depend then on the inference inferred for that particular item.
|
||||
|
||||
Ultimately, this setup relies on the red-green algorithm.
|
||||
In particular, every variance query ultimately depends on -- effectively --
|
||||
all type definitions in the entire crate (through `crate_variances`),
|
||||
but since most changes will not result in a change
|
||||
to the actual results from variance inference,
|
||||
the `variances_of` query will wind up being considered green after it is re-evaluated.
|
||||
|
||||
### Addendum: Variance on traits
|
||||
|
||||
As mentioned above, we used to permit variance on traits. This was
|
||||
computed based on the appearance of trait type parameters in
|
||||
method signatures and was used to represent the compatibility of
|
||||
vtables in trait objects (and also "virtual" vtables or dictionary
|
||||
in trait bounds). One complication was that variance for
|
||||
associated types is less obvious, since they can be projected out
|
||||
and put to myriad uses, so it's not clear when it is safe to allow
|
||||
`X<A>::Bar` to vary (or indeed just what that means). Moreover (as
|
||||
covered below) all inputs on any trait with an associated type had
|
||||
to be invariant, limiting the applicability. Finally, the
|
||||
annotations (`MarkerTrait`, `PhantomFn`) needed to ensure that all
|
||||
trait type parameters had a variance were confusing and annoying
|
||||
for little benefit.
|
||||
|
||||
Just for historical reference,I am going to preserve some text indicating
|
||||
how one could interpret variance and trait matching.
|
||||
|
||||
#### Variance and object types
|
||||
|
||||
Just as with structs and enums, we can decide the subtyping
|
||||
relationship between two object types `&Trait<A>` and `&Trait<B>`
|
||||
based on the relationship of `A` and `B`. Note that for object
|
||||
types we ignore the `Self` type parameter -- it is unknown, and
|
||||
the nature of dynamic dispatch ensures that we will always call a
|
||||
function that is expected the appropriate `Self` type. However, we
|
||||
must be careful with the other type parameters, or else we could
|
||||
end up calling a function that is expecting one type but provided
|
||||
another.
|
||||
|
||||
To see what I mean, consider a trait like so:
|
||||
|
||||
trait ConvertTo<A> {
|
||||
fn convertTo(&self) -> A;
|
||||
}
|
||||
|
||||
Intuitively, If we had one object `O=&ConvertTo<Object>` and another
|
||||
`S=&ConvertTo<String>`, then `S <: O` because `String <: Object`
|
||||
(presuming Java-like "string" and "object" types, my go to examples
|
||||
for subtyping). The actual algorithm would be to compare the
|
||||
(explicit) type parameters pairwise respecting their variance: here,
|
||||
the type parameter A is covariant (it appears only in a return
|
||||
position), and hence we require that `String <: Object`.
|
||||
|
||||
You'll note though that we did not consider the binding for the
|
||||
(implicit) `Self` type parameter: in fact, it is unknown, so that's
|
||||
good. The reason we can ignore that parameter is precisely because we
|
||||
don't need to know its value until a call occurs, and at that time (as
|
||||
you said) the dynamic nature of virtual dispatch means the code we run
|
||||
will be correct for whatever value `Self` happens to be bound to for
|
||||
the particular object whose method we called. `Self` is thus different
|
||||
from `A`, because the caller requires that `A` be known in order to
|
||||
know the return type of the method `convertTo()`. (As an aside, we
|
||||
have rules preventing methods where `Self` appears outside of the
|
||||
receiver position from being called via an object.)
|
||||
|
||||
#### Trait variance and vtable resolution
|
||||
|
||||
But traits aren't only used with objects. They're also used when
|
||||
deciding whether a given impl satisfies a given trait bound. To set the
|
||||
scene here, imagine I had a function:
|
||||
|
||||
fn convertAll<A,T:ConvertTo<A>>(v: &[T]) {
|
||||
...
|
||||
}
|
||||
|
||||
Now imagine that I have an implementation of `ConvertTo` for `Object`:
|
||||
|
||||
impl ConvertTo<i32> for Object { ... }
|
||||
|
||||
And I want to call `convertAll` on an array of strings. Suppose
|
||||
further that for whatever reason I specifically supply the value of
|
||||
`String` for the type parameter `T`:
|
||||
|
||||
let mut vector = vec!["string", ...];
|
||||
convertAll::<i32, String>(vector);
|
||||
|
||||
Is this legal? To put another way, can we apply the `impl` for
|
||||
`Object` to the type `String`? The answer is yes, but to see why
|
||||
we have to expand out what will happen:
|
||||
|
||||
- `convertAll` will create a pointer to one of the entries in the
|
||||
vector, which will have type `&String`
|
||||
- It will then call the impl of `convertTo()` that is intended
|
||||
for use with objects. This has the type:
|
||||
|
||||
fn(self: &Object) -> i32
|
||||
|
||||
It is ok to provide a value for `self` of type `&String` because
|
||||
`&String <: &Object`.
|
||||
|
||||
OK, so intuitively we want this to be legal, so let's bring this back
|
||||
to variance and see whether we are computing the correct result. We
|
||||
must first figure out how to phrase the question "is an impl for
|
||||
`Object,i32` usable where an impl for `String,i32` is expected?"
|
||||
|
||||
Maybe it's helpful to think of a dictionary-passing implementation of
|
||||
type classes. In that case, `convertAll()` takes an implicit parameter
|
||||
representing the impl. In short, we *have* an impl of type:
|
||||
|
||||
V_O = ConvertTo<i32> for Object
|
||||
|
||||
and the function prototype expects an impl of type:
|
||||
|
||||
V_S = ConvertTo<i32> for String
|
||||
|
||||
As with any argument, this is legal if the type of the value given
|
||||
(`V_O`) is a subtype of the type expected (`V_S`). So is `V_O <: V_S`?
|
||||
The answer will depend on the variance of the various parameters. In
|
||||
this case, because the `Self` parameter is contravariant and `A` is
|
||||
covariant, it means that:
|
||||
|
||||
V_O <: V_S iff
|
||||
i32 <: i32
|
||||
String <: Object
|
||||
|
||||
These conditions are satisfied and so we are happy.
|
||||
|
||||
#### Variance and associated types
|
||||
|
||||
Traits with associated types -- or at minimum projection
|
||||
expressions -- must be invariant with respect to all of their
|
||||
inputs. To see why this makes sense, consider what subtyping for a
|
||||
trait reference means:
|
||||
|
||||
<T as Trait> <: <U as Trait>
|
||||
|
||||
means that if I know that `T as Trait`, I also know that `U as
|
||||
Trait`. Moreover, if you think of it as dictionary passing style,
|
||||
it means that a dictionary for `<T as Trait>` is safe to use where
|
||||
a dictionary for `<U as Trait>` is expected.
|
||||
|
||||
The problem is that when you can project types out from `<T as
|
||||
Trait>`, the relationship to types projected out of `<U as Trait>`
|
||||
is completely unknown unless `T==U` (see #21726 for more
|
||||
details). Making `Trait` invariant ensures that this is true.
|
||||
|
||||
Another related reason is that if we didn't make traits with
|
||||
associated types invariant, then projection is no longer a
|
||||
function with a single result. Consider:
|
||||
|
||||
```
|
||||
trait Identity { type Out; fn foo(&self); }
|
||||
impl<T> Identity for T { type Out = T; ... }
|
||||
```
|
||||
|
||||
Now if I have `<&'static () as Identity>::Out`, this can be
|
||||
validly derived as `&'a ()` for any `'a`:
|
||||
|
||||
<&'a () as Identity> <: <&'static () as Identity>
|
||||
if &'static () < : &'a () -- Identity is contravariant in Self
|
||||
if 'static : 'a -- Subtyping rules for relations
|
||||
|
||||
This change otoh means that `<'static () as Identity>::Out` is
|
||||
always `&'static ()` (which might then be upcast to `'a ()`,
|
||||
separately). This was helpful in solving #21750.
|
||||
|
||||
|
@ -8,8 +8,10 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Module for inferring the variance of type and lifetime
|
||||
//! parameters. See README.md for details.
|
||||
//! Module for inferring the variance of type and lifetime parameters. See the [rustc guide]
|
||||
//! chapter for more info.
|
||||
//!
|
||||
//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/variance.html
|
||||
|
||||
use arena;
|
||||
use rustc::hir;
|
||||
|
@ -87,7 +87,10 @@ pub fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>
|
||||
lang_items: lang_items(tcx),
|
||||
};
|
||||
|
||||
// See README.md for a discussion on dep-graph management.
|
||||
// See the following for a discussion on dep-graph management.
|
||||
//
|
||||
// - https://rust-lang-nursery.github.io/rustc-guide/query.html
|
||||
// - https://rust-lang-nursery.github.io/rustc-guide/variance.html
|
||||
tcx.hir.krate().visit_all_item_likes(&mut terms_cx);
|
||||
|
||||
terms_cx
|
||||
|
@ -44,8 +44,12 @@ function switchTheme(styleElem, mainStyleElem, newTheme) {
|
||||
var fullBasicCss = "rustdoc" + resourcesSuffix + ".css";
|
||||
var fullNewTheme = newTheme + resourcesSuffix + ".css";
|
||||
var newHref = mainStyleElem.href.replace(fullBasicCss, fullNewTheme);
|
||||
var found = false;
|
||||
|
||||
if (styleElem.href === newHref) {
|
||||
return;
|
||||
}
|
||||
|
||||
var found = false;
|
||||
if (savedHref.length === 0) {
|
||||
onEach(document.getElementsByTagName("link"), function(el) {
|
||||
savedHref.push(el.href);
|
||||
|
35
src/test/ui/nll/issue-48070.rs
Normal file
35
src/test/ui/nll/issue-48070.rs
Normal file
@ -0,0 +1,35 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// run-pass
|
||||
// revisions: lxl nll
|
||||
|
||||
#![cfg_attr(nll, feature(nll))]
|
||||
|
||||
struct Foo {
|
||||
x: u32
|
||||
}
|
||||
|
||||
impl Foo {
|
||||
fn twiddle(&mut self) -> &mut Self { self }
|
||||
fn twaddle(&mut self) -> &mut Self { self }
|
||||
fn emit(&mut self) {
|
||||
self.x += 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let mut foo = Foo { x: 0 };
|
||||
match 22 {
|
||||
22 => &mut foo,
|
||||
44 => foo.twiddle(),
|
||||
_ => foo.twaddle(),
|
||||
}.emit();
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user