Auto merge of #50413 - kennytm:rollup, r=kennytm
Rollup of 12 pull requests Successful merges: - #50302 (Add query search order check) - #50320 (Fix invalid path generation in rustdoc search) - #50349 (Rename "show type declaration" to "show declaration") - #50360 (Clarify wordings of the `unstable_name_collision` lint.) - #50365 (Use two vectors in nearest_common_ancestor.) - #50393 (Allow unaligned reads in constants) - #50401 (Revert "Implement FromStr for PathBuf") - #50406 (Forbid constructing empty identifiers from concat_idents) - #50407 (Always inline simple BytePos and CharPos methods.) - #50416 (check if the token is a lifetime before parsing) - #50417 (Update Cargo) - #50421 (Fix ICE when using a..=b in a closure.) Failed merges:
This commit is contained in:
commit
e82261dfbb
@ -606,8 +606,8 @@ mod builtin {
|
||||
#[macro_export]
|
||||
#[cfg(dox)]
|
||||
macro_rules! concat_idents {
|
||||
($($e:ident),*) => ({ /* compiler built-in */ });
|
||||
($($e:ident,)*) => ({ /* compiler built-in */ });
|
||||
($($e:ident),+) => ({ /* compiler built-in */ });
|
||||
($($e:ident,)+) => ({ /* compiler built-in */ });
|
||||
}
|
||||
|
||||
/// Concatenates literals into a static string slice.
|
||||
|
@ -3121,9 +3121,9 @@ impl<'a> LoweringContext<'a> {
|
||||
}
|
||||
// Desugar `<start>..=<end>` to `std::ops::RangeInclusive::new(<start>, <end>)`
|
||||
ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
|
||||
// FIXME: Use head_sp directly after RangeInclusive::new() is stabilized in stage0.
|
||||
// FIXME: Use e.span directly after RangeInclusive::new() is stabilized in stage0.
|
||||
let span = self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
|
||||
let id = self.lower_node_id(e.id);
|
||||
let id = self.next_id();
|
||||
let e1 = self.lower_expr(e1);
|
||||
let e2 = self.lower_expr(e2);
|
||||
let ty_path = P(self.std_path(span, &["ops", "RangeInclusive"], false));
|
||||
|
@ -507,7 +507,7 @@ pub fn struct_lint_level<'a>(sess: &'a Session,
|
||||
|
||||
let explanation = if lint_id == LintId::of(::lint::builtin::UNSTABLE_NAME_COLLISION) {
|
||||
"once this method is added to the standard library, \
|
||||
there will be ambiguity here, which will cause a hard error!"
|
||||
the ambiguity may cause an error or change in behavior!"
|
||||
.to_owned()
|
||||
} else if let Some(edition) = future_incompatible.edition {
|
||||
format!("{} in the {} edition!", STANDARD_MESSAGE, edition)
|
||||
|
@ -690,21 +690,22 @@ impl<'tcx> ScopeTree {
|
||||
// the start. So this algorithm is faster.
|
||||
let mut ma = Some(scope_a);
|
||||
let mut mb = Some(scope_b);
|
||||
let mut seen: SmallVec<[Scope; 32]> = SmallVec::new();
|
||||
let mut seen_a: SmallVec<[Scope; 32]> = SmallVec::new();
|
||||
let mut seen_b: SmallVec<[Scope; 32]> = SmallVec::new();
|
||||
loop {
|
||||
if let Some(a) = ma {
|
||||
if seen.iter().position(|s| *s == a).is_some() {
|
||||
if seen_b.iter().position(|s| *s == a).is_some() {
|
||||
return a;
|
||||
}
|
||||
seen.push(a);
|
||||
seen_a.push(a);
|
||||
ma = self.parent_map.get(&a).map(|s| *s);
|
||||
}
|
||||
|
||||
if let Some(b) = mb {
|
||||
if seen.iter().position(|s| *s == b).is_some() {
|
||||
if seen_a.iter().position(|s| *s == b).is_some() {
|
||||
return b;
|
||||
}
|
||||
seen.push(b);
|
||||
seen_b.push(b);
|
||||
mb = self.parent_map.get(&b).map(|s| *s);
|
||||
}
|
||||
|
||||
|
@ -792,7 +792,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> {
|
||||
ConstVal::Value(miri) => const_val_field(
|
||||
self.tcx, self.param_env, instance,
|
||||
variant_opt, field, miri, cv.ty,
|
||||
).unwrap(),
|
||||
).expect("field access failed"),
|
||||
_ => bug!("{:#?} is not a valid adt", cv),
|
||||
};
|
||||
self.const_to_pat(instance, val, id, span)
|
||||
|
@ -1340,9 +1340,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
|
||||
use syntax::ast::FloatTy;
|
||||
|
||||
let layout = self.layout_of(ty)?;
|
||||
// do the strongest layout check of the two
|
||||
let align = layout.align.max(ptr_align);
|
||||
self.memory.check_align(ptr, align)?;
|
||||
self.memory.check_align(ptr, ptr_align)?;
|
||||
|
||||
if layout.size.bytes() == 0 {
|
||||
return Ok(Some(Value::ByVal(PrimVal::Undef)));
|
||||
|
@ -1427,7 +1427,7 @@ impl<'a> Cache {
|
||||
}
|
||||
if let Some(ref item_name) = item.name {
|
||||
let path = self.paths.get(&item.def_id)
|
||||
.map(|p| p.0.join("::").to_string())
|
||||
.map(|p| p.0[..p.0.len() - 1].join("::"))
|
||||
.unwrap_or("std".to_owned());
|
||||
for alias in item.attrs.lists("doc")
|
||||
.filter(|a| a.check_name("alias"))
|
||||
|
@ -1996,7 +1996,7 @@
|
||||
if (e.parentNode.id === "main") {
|
||||
var otherMessage;
|
||||
if (hasClass(e, "type-decl")) {
|
||||
otherMessage = ' Show type declaration';
|
||||
otherMessage = ' Show declaration';
|
||||
}
|
||||
e.parentNode.insertBefore(createToggle(otherMessage), e);
|
||||
if (otherMessage && getCurrentValue('rustdoc-item-declarations') !== "false") {
|
||||
|
@ -450,8 +450,8 @@ pub mod builtin {
|
||||
#[unstable(feature = "concat_idents_macro", issue = "29599")]
|
||||
#[macro_export]
|
||||
macro_rules! concat_idents {
|
||||
($($e:ident),*) => ({ /* compiler built-in */ });
|
||||
($($e:ident,)*) => ({ /* compiler built-in */ });
|
||||
($($e:ident),+) => ({ /* compiler built-in */ });
|
||||
($($e:ident,)+) => ({ /* compiler built-in */ });
|
||||
}
|
||||
|
||||
/// Concatenates literals into a static string slice.
|
||||
|
@ -87,7 +87,6 @@ use io;
|
||||
use iter::{self, FusedIterator};
|
||||
use ops::{self, Deref};
|
||||
use rc::Rc;
|
||||
use str::FromStr;
|
||||
use sync::Arc;
|
||||
|
||||
use ffi::{OsStr, OsString};
|
||||
@ -1441,32 +1440,6 @@ impl From<String> for PathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
/// Error returned from [`PathBuf::from_str`][`from_str`].
|
||||
///
|
||||
/// Note that parsing a path will never fail. This error is just a placeholder
|
||||
/// for implementing `FromStr` for `PathBuf`.
|
||||
///
|
||||
/// [`from_str`]: struct.PathBuf.html#method.from_str
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[stable(feature = "path_from_str", since = "1.26.0")]
|
||||
pub enum ParsePathError {}
|
||||
|
||||
#[stable(feature = "path_from_str", since = "1.26.0")]
|
||||
impl fmt::Display for ParsePathError {
|
||||
fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "path_from_str", since = "1.26.0")]
|
||||
impl FromStr for PathBuf {
|
||||
type Err = ParsePathError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(PathBuf::from(s))
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<P: AsRef<Path>> iter::FromIterator<P> for PathBuf {
|
||||
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> PathBuf {
|
||||
|
@ -835,7 +835,13 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
|
||||
"path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))),
|
||||
"meta" => token::NtMeta(panictry!(p.parse_meta_item())),
|
||||
"vis" => token::NtVis(panictry!(p.parse_visibility(true))),
|
||||
"lifetime" => token::NtLifetime(p.expect_lifetime().ident),
|
||||
"lifetime" => if p.check_lifetime() {
|
||||
token::NtLifetime(p.expect_lifetime().ident)
|
||||
} else {
|
||||
let token_str = pprust::token_to_string(&p.token);
|
||||
p.fatal(&format!("expected a lifetime, found `{}`", &token_str)).emit();
|
||||
FatalError.raise();
|
||||
}
|
||||
// this is not supposed to happen, since it has been checked
|
||||
// when compiling the macro.
|
||||
_ => p.span_bug(sp, "invalid fragment specifier"),
|
||||
|
@ -2042,7 +2042,7 @@ impl<'a> Parser<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
fn check_lifetime(&mut self) -> bool {
|
||||
pub fn check_lifetime(&mut self) -> bool {
|
||||
self.expected_tokens.push(TokenType::Lifetime);
|
||||
self.token.is_lifetime()
|
||||
}
|
||||
|
@ -31,6 +31,11 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
|
||||
return base::DummyResult::expr(sp);
|
||||
}
|
||||
|
||||
if tts.is_empty() {
|
||||
cx.span_err(sp, "concat_idents! takes 1 or more arguments.");
|
||||
return DummyResult::expr(sp);
|
||||
}
|
||||
|
||||
let mut res_str = String::new();
|
||||
for (i, e) in tts.iter().enumerate() {
|
||||
if i & 1 == 1 {
|
||||
|
@ -1150,13 +1150,17 @@ pub struct CharPos(pub usize);
|
||||
// have been unsuccessful
|
||||
|
||||
impl Pos for BytePos {
|
||||
#[inline(always)]
|
||||
fn from_usize(n: usize) -> BytePos { BytePos(n as u32) }
|
||||
|
||||
#[inline(always)]
|
||||
fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize }
|
||||
}
|
||||
|
||||
impl Add for BytePos {
|
||||
type Output = BytePos;
|
||||
|
||||
#[inline(always)]
|
||||
fn add(self, rhs: BytePos) -> BytePos {
|
||||
BytePos((self.to_usize() + rhs.to_usize()) as u32)
|
||||
}
|
||||
@ -1165,6 +1169,7 @@ impl Add for BytePos {
|
||||
impl Sub for BytePos {
|
||||
type Output = BytePos;
|
||||
|
||||
#[inline(always)]
|
||||
fn sub(self, rhs: BytePos) -> BytePos {
|
||||
BytePos((self.to_usize() - rhs.to_usize()) as u32)
|
||||
}
|
||||
@ -1183,13 +1188,17 @@ impl Decodable for BytePos {
|
||||
}
|
||||
|
||||
impl Pos for CharPos {
|
||||
#[inline(always)]
|
||||
fn from_usize(n: usize) -> CharPos { CharPos(n) }
|
||||
|
||||
#[inline(always)]
|
||||
fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
|
||||
}
|
||||
|
||||
impl Add for CharPos {
|
||||
type Output = CharPos;
|
||||
|
||||
#[inline(always)]
|
||||
fn add(self, rhs: CharPos) -> CharPos {
|
||||
CharPos(self.to_usize() + rhs.to_usize())
|
||||
}
|
||||
@ -1198,6 +1207,7 @@ impl Add for CharPos {
|
||||
impl Sub for CharPos {
|
||||
type Output = CharPos;
|
||||
|
||||
#[inline(always)]
|
||||
fn sub(self, rhs: CharPos) -> CharPos {
|
||||
CharPos(self.to_usize() - rhs.to_usize())
|
||||
}
|
||||
|
20
src/test/compile-fail/macro-non-lifetime.rs
Normal file
20
src/test/compile-fail/macro-non-lifetime.rs
Normal file
@ -0,0 +1,20 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// Test for issue #50381: non-lifetime passed to :lifetime.
|
||||
|
||||
#![feature(macro_lifetime_matcher)]
|
||||
|
||||
macro_rules! m { ($x:lifetime) => { } }
|
||||
|
||||
fn main() {
|
||||
m!(a);
|
||||
//~^ ERROR expected a lifetime, found `a`
|
||||
}
|
27
src/test/run-pass/issue-50415.rs
Normal file
27
src/test/run-pass/issue-50415.rs
Normal file
@ -0,0 +1,27 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
// -------- Simplified test case --------
|
||||
|
||||
let _ = || 0..=1;
|
||||
|
||||
// -------- Original test case --------
|
||||
|
||||
let full_length = 1024;
|
||||
let range = {
|
||||
// do some stuff, omit here
|
||||
None
|
||||
};
|
||||
|
||||
let range = range.map(|(s, t)| s..=t).unwrap_or(0..=(full_length-1));
|
||||
|
||||
assert_eq!(range, 0..=1023);
|
||||
}
|
@ -8,11 +8,13 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// ignore-order
|
||||
|
||||
const QUERY = '+';
|
||||
|
||||
const EXPECTED = {
|
||||
'others': [
|
||||
{ 'path': 'std::ops::AddAssign', 'name': 'AddAssign' },
|
||||
{ 'path': 'std::ops::Add', 'name': 'Add' },
|
||||
{ 'path': 'std::ops', 'name': 'AddAssign' },
|
||||
{ 'path': 'std::ops', 'name': 'Add' },
|
||||
],
|
||||
};
|
||||
|
@ -8,12 +8,14 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// ignore-order
|
||||
|
||||
const QUERY = '[';
|
||||
|
||||
const EXPECTED = {
|
||||
'others': [
|
||||
{ 'path': 'std', 'name': 'slice' },
|
||||
{ 'path': 'std::ops::IndexMut', 'name': 'IndexMut' },
|
||||
{ 'path': 'std::ops::Index', 'name': 'Index' },
|
||||
{ 'path': 'std::ops', 'name': 'IndexMut' },
|
||||
{ 'path': 'std::ops', 'name': 'Index' },
|
||||
],
|
||||
};
|
||||
|
@ -13,8 +13,8 @@ const QUERY = 'String';
|
||||
const EXPECTED = {
|
||||
'others': [
|
||||
{ 'path': 'std::string', 'name': 'String' },
|
||||
{ 'path': 'std::ffi', 'name': 'OsString' },
|
||||
{ 'path': 'std::ffi', 'name': 'CString' },
|
||||
{ 'path': 'std::ffi', 'name': 'OsString' },
|
||||
],
|
||||
'in_args': [
|
||||
{ 'path': 'std::str', 'name': 'eq' },
|
||||
|
28
src/test/ui/const-eval/ice-packed.rs
Normal file
28
src/test/ui/const-eval/ice-packed.rs
Normal file
@ -0,0 +1,28 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// compile-pass
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
#[repr(packed)]
|
||||
pub struct Num(u64);
|
||||
|
||||
impl Num {
|
||||
pub const ZERO: Self = Num(0);
|
||||
}
|
||||
|
||||
pub fn decrement(a: Num) -> Num {
|
||||
match a {
|
||||
Num::ZERO => Num::ZERO,
|
||||
a => Num(a.0 - 1)
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
}
|
@ -25,5 +25,5 @@ use inference_unstable_itertools::IpuItertools;
|
||||
fn main() {
|
||||
assert_eq!('x'.ipu_flatten(), 1);
|
||||
//~^ WARN a method with this name may be added to the standard library in the future
|
||||
//~^^ WARN once this method is added to the standard library, there will be ambiguity here
|
||||
//~^^ WARN once this method is added to the standard library, the ambiguity may cause an error
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ LL | assert_eq!('x'.ipu_flatten(), 1);
|
||||
| ^^^^^^^^^^^
|
||||
|
|
||||
= note: #[warn(unstable_name_collision)] on by default
|
||||
= warning: once this method is added to the standard library, there will be ambiguity here, which will cause a hard error!
|
||||
= warning: once this method is added to the standard library, the ambiguity may cause an error or change in behavior!
|
||||
= note: for more information, see issue #48919 <https://github.com/rust-lang/rust/issues/48919>
|
||||
= help: call with fully qualified syntax `inference_unstable_itertools::IpuItertools::ipu_flatten(...)` to keep using the current method
|
||||
= note: add #![feature(ipu_flatten)] to the crate attributes to enable `inference_unstable_iterator::IpuIterator::ipu_flatten`
|
||||
|
15
src/test/ui/issue-50403.rs
Normal file
15
src/test/ui/issue-50403.rs
Normal file
@ -0,0 +1,15 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![feature(concat_idents)]
|
||||
|
||||
fn main() {
|
||||
let x = concat_idents!(); //~ ERROR concat_idents! takes 1 or more arguments
|
||||
}
|
8
src/test/ui/issue-50403.stderr
Normal file
8
src/test/ui/issue-50403.stderr
Normal file
@ -0,0 +1,8 @@
|
||||
error: concat_idents! takes 1 or more arguments.
|
||||
--> $DIR/issue-50403.rs:14:13
|
||||
|
|
||||
LL | let x = concat_idents!(); //~ ERROR concat_idents! takes 1 or more arguments
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 122fd5be5201913d42e219e132d6569493583bca
|
||||
Subproject commit 66b0ffa81c560be1b79511b51f49cbb23bc78651
|
@ -87,6 +87,7 @@ function loadContent(content) {
|
||||
var Module = module.constructor;
|
||||
var m = new Module();
|
||||
m._compile(content, "tmp.js");
|
||||
m.exports.ignore_order = content.indexOf("\n// ignore-order\n") !== -1;
|
||||
return m.exports;
|
||||
}
|
||||
|
||||
@ -130,10 +131,10 @@ function lookForEntry(entry, data) {
|
||||
}
|
||||
}
|
||||
if (allGood === true) {
|
||||
return true;
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
return null;
|
||||
}
|
||||
|
||||
function main(argv) {
|
||||
@ -177,6 +178,7 @@ function main(argv) {
|
||||
'exports.QUERY = QUERY;exports.EXPECTED = EXPECTED;');
|
||||
const expected = loadedFile.EXPECTED;
|
||||
const query = loadedFile.QUERY;
|
||||
const ignore_order = loadedFile.ignore_order;
|
||||
var results = loaded.execSearch(loaded.getQuery(query), index);
|
||||
process.stdout.write('Checking "' + file + '" ... ');
|
||||
var error_text = [];
|
||||
@ -189,13 +191,17 @@ function main(argv) {
|
||||
break;
|
||||
}
|
||||
var entry = expected[key];
|
||||
var found = false;
|
||||
var prev_pos = 0;
|
||||
for (var i = 0; i < entry.length; ++i) {
|
||||
if (lookForEntry(entry[i], results[key]) === true) {
|
||||
found = true;
|
||||
} else {
|
||||
var entry_pos = lookForEntry(entry[i], results[key]);
|
||||
if (entry_pos === null) {
|
||||
error_text.push("==> Result not found in '" + key + "': '" +
|
||||
JSON.stringify(entry[i]) + "'");
|
||||
} else if (entry_pos < prev_pos && ignore_order === false) {
|
||||
error_text.push("==> '" + JSON.stringify(entry[i]) + "' was supposed to be " +
|
||||
" before '" + JSON.stringify(results[key][entry_pos]) + "'");
|
||||
} else {
|
||||
prev_pos = entry_pos;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user