Auto merge of #89430 - GuillaumeGomez:rustdoc-clippy-lints, r=jyn514,camelid,notriddle
Fix clippy lints in librustdoc I ran clippy on librustdoc and simply fixed the lints. :) r? `@notriddle`
This commit is contained in:
commit
29b1248025
@ -136,7 +136,7 @@ fn generate_for_trait(
|
||||
let f = auto_trait::AutoTraitFinder::new(tcx);
|
||||
|
||||
debug!("get_auto_trait_impls({:?})", ty);
|
||||
let auto_traits: Vec<_> = self.cx.auto_traits.iter().cloned().collect();
|
||||
let auto_traits: Vec<_> = self.cx.auto_traits.iter().copied().collect();
|
||||
let mut auto_traits: Vec<Item> = auto_traits
|
||||
.into_iter()
|
||||
.filter_map(|trait_def_id| {
|
||||
@ -193,8 +193,8 @@ fn handle_lifetimes<'cx>(
|
||||
// to its smaller and larger regions. Note that 'larger' regions correspond
|
||||
// to sub-regions in Rust code (e.g., in 'a: 'b, 'a is the larger region).
|
||||
for constraint in regions.constraints.keys() {
|
||||
match constraint {
|
||||
&Constraint::VarSubVar(r1, r2) => {
|
||||
match *constraint {
|
||||
Constraint::VarSubVar(r1, r2) => {
|
||||
{
|
||||
let deps1 = vid_map.entry(RegionTarget::RegionVid(r1)).or_default();
|
||||
deps1.larger.insert(RegionTarget::RegionVid(r2));
|
||||
@ -203,15 +203,15 @@ fn handle_lifetimes<'cx>(
|
||||
let deps2 = vid_map.entry(RegionTarget::RegionVid(r2)).or_default();
|
||||
deps2.smaller.insert(RegionTarget::RegionVid(r1));
|
||||
}
|
||||
&Constraint::RegSubVar(region, vid) => {
|
||||
Constraint::RegSubVar(region, vid) => {
|
||||
let deps = vid_map.entry(RegionTarget::RegionVid(vid)).or_default();
|
||||
deps.smaller.insert(RegionTarget::Region(region));
|
||||
}
|
||||
&Constraint::VarSubReg(vid, region) => {
|
||||
Constraint::VarSubReg(vid, region) => {
|
||||
let deps = vid_map.entry(RegionTarget::RegionVid(vid)).or_default();
|
||||
deps.larger.insert(RegionTarget::Region(region));
|
||||
}
|
||||
&Constraint::RegSubReg(r1, r2) => {
|
||||
Constraint::RegSubReg(r1, r2) => {
|
||||
// The constraint is already in the form that we want, so we're done with it
|
||||
// Desired order is 'larger, smaller', so flip then
|
||||
if region_name(r1) != region_name(r2) {
|
||||
@ -513,8 +513,8 @@ fn param_env_to_generics(
|
||||
// as we want to combine them with any 'Output' qpaths
|
||||
// later
|
||||
|
||||
let is_fn = match &mut b {
|
||||
&mut GenericBound::TraitBound(ref mut p, _) => {
|
||||
let is_fn = match b {
|
||||
GenericBound::TraitBound(ref mut p, _) => {
|
||||
// Insert regions into the for_generics hash map first, to ensure
|
||||
// that we don't end up with duplicate bounds (e.g., for<'b, 'b>)
|
||||
for_generics.extend(p.generic_params.clone());
|
||||
@ -699,8 +699,8 @@ fn is_fn_trait(&self, path: &Path) -> bool {
|
||||
}
|
||||
|
||||
fn region_name(region: Region<'_>) -> Option<Symbol> {
|
||||
match region {
|
||||
&ty::ReEarlyBound(r) => Some(r.name),
|
||||
match *region {
|
||||
ty::ReEarlyBound(r) => Some(r.name),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -717,8 +717,8 @@ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
|
||||
}
|
||||
|
||||
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
|
||||
(match r {
|
||||
&ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(),
|
||||
(match *r {
|
||||
ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or_else(|| r.super_fold_with(self))
|
||||
|
@ -216,17 +216,15 @@ fn clean(&self, cx: &mut DocContext<'_>) -> GenericBound {
|
||||
impl Clean<Lifetime> for hir::Lifetime {
|
||||
fn clean(&self, cx: &mut DocContext<'_>) -> Lifetime {
|
||||
let def = cx.tcx.named_region(self.hir_id);
|
||||
match def {
|
||||
Some(
|
||||
rl::Region::EarlyBound(_, node_id, _)
|
||||
| rl::Region::LateBound(_, _, node_id, _)
|
||||
| rl::Region::Free(_, node_id),
|
||||
) => {
|
||||
if let Some(lt) = cx.lt_substs.get(&node_id).cloned() {
|
||||
return lt;
|
||||
}
|
||||
if let Some(
|
||||
rl::Region::EarlyBound(_, node_id, _)
|
||||
| rl::Region::LateBound(_, _, node_id, _)
|
||||
| rl::Region::Free(_, node_id),
|
||||
) = def
|
||||
{
|
||||
if let Some(lt) = cx.lt_substs.get(&node_id).cloned() {
|
||||
return lt;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Lifetime(self.name.ident().name)
|
||||
}
|
||||
@ -828,7 +826,7 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Arguments {
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, ty)| Argument {
|
||||
name: name_from_pat(&body.params[i].pat),
|
||||
name: name_from_pat(body.params[i].pat),
|
||||
type_: ty.clean(cx),
|
||||
})
|
||||
.collect(),
|
||||
@ -924,7 +922,7 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Item {
|
||||
}
|
||||
MethodItem(m, None)
|
||||
}
|
||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(ref names)) => {
|
||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(names)) => {
|
||||
let (generics, decl) = enter_impl_trait(cx, |cx| {
|
||||
(self.generics.clean(cx), (&*sig.decl, &names[..]).clean(cx))
|
||||
});
|
||||
@ -936,7 +934,7 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Item {
|
||||
}
|
||||
TyMethodItem(t)
|
||||
}
|
||||
hir::TraitItemKind::Type(ref bounds, ref default) => {
|
||||
hir::TraitItemKind::Type(bounds, ref default) => {
|
||||
AssocTypeItem(bounds.clean(cx), default.clean(cx))
|
||||
}
|
||||
};
|
||||
@ -1260,7 +1258,7 @@ fn clean_qpath(hir_ty: &hir::Ty<'_>, cx: &mut DocContext<'_>) -> Type {
|
||||
let path = path.clean(cx);
|
||||
resolve_type(cx, path)
|
||||
}
|
||||
hir::QPath::Resolved(Some(ref qself), ref p) => {
|
||||
hir::QPath::Resolved(Some(ref qself), p) => {
|
||||
// Try to normalize `<X as Y>::T` to a type
|
||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
||||
if let Some(normalized_value) = normalize(cx, ty) {
|
||||
@ -1281,7 +1279,7 @@ fn clean_qpath(hir_ty: &hir::Ty<'_>, cx: &mut DocContext<'_>) -> Type {
|
||||
trait_,
|
||||
}
|
||||
}
|
||||
hir::QPath::TypeRelative(ref qself, ref segment) => {
|
||||
hir::QPath::TypeRelative(ref qself, segment) => {
|
||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
||||
let res = match ty.kind() {
|
||||
ty::Projection(proj) => Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id),
|
||||
@ -1337,7 +1335,7 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Type {
|
||||
let length = print_const(cx, ct.eval(cx.tcx, param_env));
|
||||
Array(box ty.clean(cx), length)
|
||||
}
|
||||
TyKind::Tup(ref tys) => Tuple(tys.clean(cx)),
|
||||
TyKind::Tup(tys) => Tuple(tys.clean(cx)),
|
||||
TyKind::OpaqueDef(item_id, _) => {
|
||||
let item = cx.tcx.hir().item(item_id);
|
||||
if let hir::ItemKind::OpaqueTy(ref ty) = item.kind {
|
||||
@ -1346,8 +1344,8 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Type {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
TyKind::Path(_) => clean_qpath(&self, cx),
|
||||
TyKind::TraitObject(ref bounds, ref lifetime, _) => {
|
||||
TyKind::Path(_) => clean_qpath(self, cx),
|
||||
TyKind::TraitObject(bounds, ref lifetime, _) => {
|
||||
let bounds = bounds.iter().map(|bound| bound.clean(cx)).collect();
|
||||
let lifetime = if !lifetime.is_elided() { Some(lifetime.clean(cx)) } else { None };
|
||||
DynTrait(bounds, lifetime)
|
||||
@ -1441,7 +1439,7 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Type {
|
||||
let path = external_path(cx, did, false, vec![], InternalSubsts::empty());
|
||||
ResolvedPath { path, did }
|
||||
}
|
||||
ty::Dynamic(ref obj, ref reg) => {
|
||||
ty::Dynamic(obj, ref reg) => {
|
||||
// HACK: pick the first `did` as the `did` of the trait object. Someone
|
||||
// might want to implement "native" support for marker-trait-only
|
||||
// trait objects.
|
||||
@ -1481,9 +1479,7 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Type {
|
||||
|
||||
DynTrait(bounds, lifetime)
|
||||
}
|
||||
ty::Tuple(ref t) => {
|
||||
Tuple(t.iter().map(|t| t.expect_ty()).collect::<Vec<_>>().clean(cx))
|
||||
}
|
||||
ty::Tuple(t) => Tuple(t.iter().map(|t| t.expect_ty()).collect::<Vec<_>>().clean(cx)),
|
||||
|
||||
ty::Projection(ref data) => data.clean(cx),
|
||||
|
||||
@ -1821,9 +1817,9 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Vec<Item> {
|
||||
clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
|
||||
}
|
||||
ItemKind::Macro(ref macro_def) => MacroItem(Macro {
|
||||
source: display_macro_source(cx, name, ¯o_def, def_id, &item.vis),
|
||||
source: display_macro_source(cx, name, macro_def, def_id, &item.vis),
|
||||
}),
|
||||
ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref item_ids) => {
|
||||
ItemKind::Trait(is_auto, unsafety, ref generics, bounds, item_ids) => {
|
||||
let items = item_ids
|
||||
.iter()
|
||||
.map(|ti| cx.tcx.hir().trait_item(ti.id).clean(cx))
|
||||
@ -2065,10 +2061,10 @@ fn clean(&self, cx: &mut DocContext<'_>) -> Item {
|
||||
let def_id = item.def_id.to_def_id();
|
||||
cx.with_param_env(def_id, |cx| {
|
||||
let kind = match item.kind {
|
||||
hir::ForeignItemKind::Fn(ref decl, ref names, ref generics) => {
|
||||
hir::ForeignItemKind::Fn(decl, names, ref generics) => {
|
||||
let abi = cx.tcx.hir().get_foreign_abi(item.hir_id());
|
||||
let (generics, decl) = enter_impl_trait(cx, |cx| {
|
||||
(generics.clean(cx), (&**decl, &names[..]).clean(cx))
|
||||
(generics.clean(cx), (&*decl, &names[..]).clean(cx))
|
||||
});
|
||||
ForeignFunctionItem(Function {
|
||||
decl,
|
||||
@ -2113,7 +2109,7 @@ fn clean(&self, cx: &mut DocContext<'_>) -> TypeBindingKind {
|
||||
hir::TypeBindingKind::Equality { ref ty } => {
|
||||
TypeBindingKind::Equality { ty: ty.clean(cx) }
|
||||
}
|
||||
hir::TypeBindingKind::Constraint { ref bounds } => {
|
||||
hir::TypeBindingKind::Constraint { bounds } => {
|
||||
TypeBindingKind::Constraint { bounds: bounds.iter().map(|b| b.clean(cx)).collect() }
|
||||
}
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ fn to_remote(url: impl ToString) -> ExternalLocation {
|
||||
.filter_map(|a| a.value_str())
|
||||
.map(to_remote)
|
||||
.next()
|
||||
.or(extern_url.map(to_remote)) // NOTE: only matters if `extern_url_takes_precedence` is false
|
||||
.or_else(|| extern_url.map(to_remote)) // NOTE: only matters if `extern_url_takes_precedence` is false
|
||||
.unwrap_or(Unknown) // Well, at least we tried.
|
||||
}
|
||||
|
||||
@ -238,7 +238,7 @@ fn to_remote(url: impl ToString) -> ExternalLocation {
|
||||
hir::ItemKind::Mod(_) => {
|
||||
as_keyword(Res::Def(DefKind::Mod, id.def_id.to_def_id()))
|
||||
}
|
||||
hir::ItemKind::Use(ref path, hir::UseKind::Single)
|
||||
hir::ItemKind::Use(path, hir::UseKind::Single)
|
||||
if item.vis.node.is_pub() =>
|
||||
{
|
||||
as_keyword(path.res.expect_non_local())
|
||||
@ -304,7 +304,7 @@ fn to_remote(url: impl ToString) -> ExternalLocation {
|
||||
hir::ItemKind::Mod(_) => {
|
||||
as_primitive(Res::Def(DefKind::Mod, id.def_id.to_def_id()))
|
||||
}
|
||||
hir::ItemKind::Use(ref path, hir::UseKind::Single)
|
||||
hir::ItemKind::Use(path, hir::UseKind::Single)
|
||||
if item.vis.node.is_pub() =>
|
||||
{
|
||||
as_primitive(path.res.expect_non_local()).map(|(_, prim)| {
|
||||
@ -381,7 +381,7 @@ impl Item {
|
||||
{
|
||||
*span
|
||||
} else {
|
||||
self.def_id.as_def_id().map(|did| rustc_span(did, tcx)).unwrap_or_else(|| Span::dummy())
|
||||
self.def_id.as_def_id().map(|did| rustc_span(did, tcx)).unwrap_or_else(Span::dummy)
|
||||
}
|
||||
}
|
||||
|
||||
@ -562,7 +562,7 @@ pub fn from_def_id_and_attrs_and_parts(
|
||||
}
|
||||
|
||||
crate fn stability_class(&self, tcx: TyCtxt<'_>) -> Option<String> {
|
||||
self.stability(tcx).as_ref().and_then(|ref s| {
|
||||
self.stability(tcx).as_ref().and_then(|s| {
|
||||
let mut classes = Vec::with_capacity(2);
|
||||
|
||||
if s.level.is_unstable() {
|
||||
@ -820,9 +820,9 @@ fn single<T: IntoIterator>(it: T) -> Option<T::Item> {
|
||||
// #[doc(cfg(...))]
|
||||
if let Some(cfg_mi) = item
|
||||
.meta_item()
|
||||
.and_then(|item| rustc_expand::config::parse_cfg(&item, sess))
|
||||
.and_then(|item| rustc_expand::config::parse_cfg(item, sess))
|
||||
{
|
||||
match Cfg::parse(&cfg_mi) {
|
||||
match Cfg::parse(cfg_mi) {
|
||||
Ok(new_cfg) => cfg &= new_cfg,
|
||||
Err(e) => sess.span_err(e.span, e.msg),
|
||||
}
|
||||
@ -934,7 +934,7 @@ fn from_iter<T>(iter: T) -> Self
|
||||
T: IntoIterator<Item = &'a DocFragment>,
|
||||
{
|
||||
iter.into_iter().fold(String::new(), |mut acc, frag| {
|
||||
add_doc_fragment(&mut acc, &frag);
|
||||
add_doc_fragment(&mut acc, frag);
|
||||
acc
|
||||
})
|
||||
}
|
||||
@ -1061,12 +1061,12 @@ fn update_need_backline(doc_strings: &mut Vec<DocFragment>) {
|
||||
|
||||
let ori = iter.next()?;
|
||||
let mut out = String::new();
|
||||
add_doc_fragment(&mut out, &ori);
|
||||
while let Some(new_frag) = iter.next() {
|
||||
add_doc_fragment(&mut out, ori);
|
||||
for new_frag in iter {
|
||||
if new_frag.kind != ori.kind || new_frag.parent_module != ori.parent_module {
|
||||
break;
|
||||
}
|
||||
add_doc_fragment(&mut out, &new_frag);
|
||||
add_doc_fragment(&mut out, new_frag);
|
||||
}
|
||||
if out.is_empty() { None } else { Some(out) }
|
||||
}
|
||||
@ -1079,7 +1079,7 @@ fn update_need_backline(doc_strings: &mut Vec<DocFragment>) {
|
||||
|
||||
for new_frag in self.doc_strings.iter() {
|
||||
let out = ret.entry(new_frag.parent_module).or_default();
|
||||
add_doc_fragment(out, &new_frag);
|
||||
add_doc_fragment(out, new_frag);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
@ -171,8 +171,8 @@ pub(super) fn external_path(
|
||||
|
||||
crate fn qpath_to_string(p: &hir::QPath<'_>) -> String {
|
||||
let segments = match *p {
|
||||
hir::QPath::Resolved(_, ref path) => &path.segments,
|
||||
hir::QPath::TypeRelative(_, ref segment) => return segment.ident.to_string(),
|
||||
hir::QPath::Resolved(_, path) => &path.segments,
|
||||
hir::QPath::TypeRelative(_, segment) => return segment.ident.to_string(),
|
||||
hir::QPath::LangItem(lang_item, ..) => return lang_item.name().to_string(),
|
||||
};
|
||||
|
||||
@ -217,15 +217,15 @@ pub(super) fn external_path(
|
||||
PatKind::Wild | PatKind::Struct(..) => return kw::Underscore,
|
||||
PatKind::Binding(_, _, ident, _) => return ident.name,
|
||||
PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
|
||||
PatKind::Or(ref pats) => {
|
||||
PatKind::Or(pats) => {
|
||||
pats.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(" | ")
|
||||
}
|
||||
PatKind::Tuple(ref elts, _) => format!(
|
||||
PatKind::Tuple(elts, _) => format!(
|
||||
"({})",
|
||||
elts.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(", ")
|
||||
),
|
||||
PatKind::Box(ref p) => return name_from_pat(&**p),
|
||||
PatKind::Ref(ref p, _) => return name_from_pat(&**p),
|
||||
PatKind::Box(p) => return name_from_pat(&*p),
|
||||
PatKind::Ref(p, _) => return name_from_pat(&*p),
|
||||
PatKind::Lit(..) => {
|
||||
warn!(
|
||||
"tried to get argument name from PatKind::Lit, which is silly in function arguments"
|
||||
@ -233,7 +233,7 @@ pub(super) fn external_path(
|
||||
return Symbol::intern("()");
|
||||
}
|
||||
PatKind::Range(..) => return kw::Underscore,
|
||||
PatKind::Slice(ref begin, ref mid, ref end) => {
|
||||
PatKind::Slice(begin, ref mid, end) => {
|
||||
let begin = begin.iter().map(|p| name_from_pat(p).to_string());
|
||||
let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter();
|
||||
let end = end.iter().map(|p| name_from_pat(p).to_string());
|
||||
@ -507,7 +507,7 @@ fn print_const_with_custom_print_scalar(tcx: TyCtxt<'_>, ct: &'tcx ty::Const<'tc
|
||||
/// so that the channel is consistent.
|
||||
///
|
||||
/// Set by `bootstrap::Builder::doc_rust_lang_org_channel` in order to keep tests passing on beta/stable.
|
||||
crate const DOC_RUST_LANG_ORG_CHANNEL: &'static str = env!("DOC_RUST_LANG_ORG_CHANNEL");
|
||||
crate const DOC_RUST_LANG_ORG_CHANNEL: &str = env!("DOC_RUST_LANG_ORG_CHANNEL");
|
||||
|
||||
/// Render a sequence of macro arms in a format suitable for displaying to the user
|
||||
/// as part of an item declaration.
|
||||
|
@ -321,13 +321,13 @@ impl Options {
|
||||
/// been printed, returns `Err` with the exit code.
|
||||
crate fn from_matches(matches: &getopts::Matches) -> Result<Options, i32> {
|
||||
// Check for unstable options.
|
||||
nightly_options::check_nightly_options(&matches, &opts());
|
||||
nightly_options::check_nightly_options(matches, &opts());
|
||||
|
||||
if matches.opt_present("h") || matches.opt_present("help") {
|
||||
crate::usage("rustdoc");
|
||||
return Err(0);
|
||||
} else if matches.opt_present("version") {
|
||||
rustc_driver::version("rustdoc", &matches);
|
||||
rustc_driver::version("rustdoc", matches);
|
||||
return Err(0);
|
||||
}
|
||||
|
||||
@ -363,10 +363,10 @@ fn println_condition(condition: Condition) {
|
||||
return Err(0);
|
||||
}
|
||||
|
||||
let color = config::parse_color(&matches);
|
||||
let color = config::parse_color(matches);
|
||||
let config::JsonConfig { json_rendered, json_unused_externs, .. } =
|
||||
config::parse_json(&matches);
|
||||
let error_format = config::parse_error_format(&matches, color, json_rendered);
|
||||
config::parse_json(matches);
|
||||
let error_format = config::parse_error_format(matches, color, json_rendered);
|
||||
|
||||
let codegen_options = CodegenOptions::build(matches, error_format);
|
||||
let debugging_opts = DebuggingOptions::build(matches, error_format);
|
||||
@ -374,7 +374,7 @@ fn println_condition(condition: Condition) {
|
||||
let diag = new_handler(error_format, None, &debugging_opts);
|
||||
|
||||
// check for deprecated options
|
||||
check_deprecated_options(&matches, &diag);
|
||||
check_deprecated_options(matches, &diag);
|
||||
|
||||
let mut emit = Vec::new();
|
||||
for list in matches.opt_strs("emit") {
|
||||
@ -440,8 +440,8 @@ fn println_condition(condition: Condition) {
|
||||
.iter()
|
||||
.map(|s| SearchPath::from_cli_opt(s, error_format))
|
||||
.collect();
|
||||
let externs = parse_externs(&matches, &debugging_opts, error_format);
|
||||
let extern_html_root_urls = match parse_extern_html_roots(&matches) {
|
||||
let externs = parse_externs(matches, &debugging_opts, error_format);
|
||||
let extern_html_root_urls = match parse_extern_html_roots(matches) {
|
||||
Ok(ex) => ex,
|
||||
Err(err) => {
|
||||
diag.struct_err(err).emit();
|
||||
@ -560,7 +560,7 @@ fn println_condition(condition: Condition) {
|
||||
}
|
||||
}
|
||||
|
||||
let edition = config::parse_crate_edition(&matches);
|
||||
let edition = config::parse_crate_edition(matches);
|
||||
|
||||
let mut id_map = html::markdown::IdMap::new();
|
||||
let external_html = match ExternalHtml::load(
|
||||
@ -569,7 +569,7 @@ fn println_condition(condition: Condition) {
|
||||
&matches.opt_strs("html-after-content"),
|
||||
&matches.opt_strs("markdown-before-content"),
|
||||
&matches.opt_strs("markdown-after-content"),
|
||||
nightly_options::match_is_nightly_build(&matches),
|
||||
nightly_options::match_is_nightly_build(matches),
|
||||
&diag,
|
||||
&mut id_map,
|
||||
edition,
|
||||
|
@ -85,7 +85,7 @@
|
||||
|
||||
impl<'tcx> DocContext<'tcx> {
|
||||
crate fn sess(&self) -> &'tcx Session {
|
||||
&self.tcx.sess
|
||||
self.tcx.sess
|
||||
}
|
||||
|
||||
crate fn with_param_env<T, F: FnOnce(&mut Self) -> T>(&mut self, def_id: DefId, f: F) -> T {
|
||||
@ -464,7 +464,7 @@ fn report_deprecated_attr(name: &str, diag: &rustc_errors::Handler, sp: Span) {
|
||||
_ => continue,
|
||||
};
|
||||
for name in value.as_str().split_whitespace() {
|
||||
let span = attr.name_value_literal_span().unwrap_or(attr.span());
|
||||
let span = attr.name_value_literal_span().unwrap_or_else(|| attr.span());
|
||||
manual_passes.extend(parse_pass(name, Some(span)));
|
||||
}
|
||||
}
|
||||
|
@ -73,7 +73,7 @@
|
||||
search_paths: options.libs.clone(),
|
||||
crate_types,
|
||||
lint_opts: if !options.display_doctest_warnings { lint_opts } else { vec![] },
|
||||
lint_cap: Some(options.lint_cap.unwrap_or_else(|| lint::Forbid)),
|
||||
lint_cap: Some(options.lint_cap.unwrap_or(lint::Forbid)),
|
||||
cg: options.codegen_options.clone(),
|
||||
externs: options.externs.clone(),
|
||||
unstable_features: options.render_options.unstable_features,
|
||||
@ -176,7 +176,7 @@
|
||||
.iter()
|
||||
.map(|uexts| uexts.unused_extern_names.iter().collect::<FxHashSet<&String>>())
|
||||
.fold(extern_names, |uextsa, uextsb| {
|
||||
uextsa.intersection(&uextsb).map(|v| *v).collect::<FxHashSet<&String>>()
|
||||
uextsa.intersection(&uextsb).copied().collect::<FxHashSet<&String>>()
|
||||
})
|
||||
.iter()
|
||||
.map(|v| (*v).clone())
|
||||
@ -423,7 +423,7 @@ fn drop(&mut self) {
|
||||
|
||||
// Add a \n to the end to properly terminate the last line,
|
||||
// but only if there was output to be printed
|
||||
if out_lines.len() > 0 {
|
||||
if !out_lines.is_empty() {
|
||||
out_lines.push("");
|
||||
}
|
||||
|
||||
@ -1124,7 +1124,7 @@ fn visit_testable<F: FnOnce(&mut Self)>(
|
||||
let mut attrs = Attributes::from_ast(ast_attrs, None);
|
||||
|
||||
if let Some(ref cfg) = ast_attrs.cfg(self.tcx, &FxHashSet::default()) {
|
||||
if !cfg.matches(&self.sess.parse_sess, Some(&self.sess.features_untracked())) {
|
||||
if !cfg.matches(&self.sess.parse_sess, Some(self.sess.features_untracked())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -292,7 +292,7 @@ fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
||||
// inserted later on when serializing the search-index.
|
||||
if item.def_id.index().map_or(false, |idx| idx != CRATE_DEF_INDEX) {
|
||||
let desc = item.doc_value().map_or_else(String::new, |x| {
|
||||
short_markdown_summary(&x.as_str(), &item.link_names(&self.cache))
|
||||
short_markdown_summary(x.as_str(), &item.link_names(self.cache))
|
||||
});
|
||||
self.cache.search_index.push(IndexItem {
|
||||
ty: item.type_(),
|
||||
@ -462,7 +462,7 @@ fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
||||
let impl_item = Impl { impl_item: item };
|
||||
if impl_item.trait_did().map_or(true, |d| self.cache.traits.contains_key(&d)) {
|
||||
for did in dids {
|
||||
self.cache.impls.entry(did).or_insert(vec![]).push(impl_item.clone());
|
||||
self.cache.impls.entry(did).or_insert_with(Vec::new).push(impl_item.clone());
|
||||
}
|
||||
} else {
|
||||
let trait_did = impl_item.trait_did().expect("no trait did");
|
||||
|
@ -597,7 +597,7 @@ fn to_module_fqp(shortty: ItemType, fqp: &[String]) -> &[String] {
|
||||
|
||||
/// Used when rendering a `ResolvedPath` structure. This invokes the `path`
|
||||
/// rendering function with the necessary arguments for linking to a local path.
|
||||
fn resolved_path<'a, 'cx: 'a>(
|
||||
fn resolved_path<'cx>(
|
||||
w: &mut fmt::Formatter<'_>,
|
||||
did: DefId,
|
||||
path: &clean::Path,
|
||||
@ -696,7 +696,7 @@ fn primitive_link(
|
||||
|
||||
/// Helper to render type parameters
|
||||
fn tybounds<'a, 'tcx: 'a>(
|
||||
bounds: &'a Vec<clean::PolyTrait>,
|
||||
bounds: &'a [clean::PolyTrait],
|
||||
lt: &'a Option<clean::Lifetime>,
|
||||
cx: &'a Context<'tcx>,
|
||||
) -> impl fmt::Display + 'a + Captures<'tcx> {
|
||||
@ -886,7 +886,7 @@ fn fmt_type<'cx>(
|
||||
if bounds.len() > 1 || trait_lt.is_some() =>
|
||||
{
|
||||
write!(f, "{}{}{}(", amp, lt, m)?;
|
||||
fmt_type(&ty, f, use_absolute, cx)?;
|
||||
fmt_type(ty, f, use_absolute, cx)?;
|
||||
write!(f, ")")
|
||||
}
|
||||
clean::Generic(..) => {
|
||||
@ -896,11 +896,11 @@ fn fmt_type<'cx>(
|
||||
&format!("{}{}{}", amp, lt, m),
|
||||
cx,
|
||||
)?;
|
||||
fmt_type(&ty, f, use_absolute, cx)
|
||||
fmt_type(ty, f, use_absolute, cx)
|
||||
}
|
||||
_ => {
|
||||
write!(f, "{}{}{}", amp, lt, m)?;
|
||||
fmt_type(&ty, f, use_absolute, cx)
|
||||
fmt_type(ty, f, use_absolute, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -62,7 +62,7 @@
|
||||
}
|
||||
|
||||
write_header(out, class, extra_content);
|
||||
write_code(out, &src, edition, context_info, decoration_info);
|
||||
write_code(out, src, edition, context_info, decoration_info);
|
||||
write_footer(out, playground_button);
|
||||
}
|
||||
|
||||
@ -718,7 +718,7 @@ fn string<T: Display>(
|
||||
.map(|(url, _, _)| url)
|
||||
}
|
||||
LinkFromSrc::Primitive(prim) => format::href_with_root_path(
|
||||
PrimitiveType::primitive_locations(context.tcx())[&prim],
|
||||
PrimitiveType::primitive_locations(context.tcx())[prim],
|
||||
context,
|
||||
Some(context_info.root_path),
|
||||
)
|
||||
|
@ -68,10 +68,8 @@ struct PageLayout<'a> {
|
||||
let krate_with_trailing_slash = ensure_trailing_slash(&layout.krate).to_string();
|
||||
let style_files = style_files
|
||||
.iter()
|
||||
.filter_map(|t| {
|
||||
if let Some(stem) = t.path.file_stem() { Some((stem, t.disabled)) } else { None }
|
||||
})
|
||||
.filter_map(|t| if let Some(path) = t.0.to_str() { Some((path, t.1)) } else { None })
|
||||
.filter_map(|t| t.path.file_stem().map(|stem| (stem, t.disabled)))
|
||||
.filter_map(|t| t.0.to_str().map(|path| (path, t.1)))
|
||||
.map(|t| {
|
||||
format!(
|
||||
r#"<link rel="stylesheet" type="text/css" href="{}.css" {} {}>"#,
|
||||
|
@ -178,7 +178,7 @@ fn map_line(s: &str) -> Line<'_> {
|
||||
Line::Shown(Cow::Owned(s.replacen("##", "#", 1)))
|
||||
} else if let Some(stripped) = trimmed.strip_prefix("# ") {
|
||||
// # text
|
||||
Line::Hidden(&stripped)
|
||||
Line::Hidden(stripped)
|
||||
} else if trimmed == "#" {
|
||||
// We cannot handle '#text' because it could be #[attr].
|
||||
Line::Hidden("")
|
||||
@ -258,7 +258,7 @@ fn next(&mut self) -> Option<Self::Item> {
|
||||
let parse_result = match kind {
|
||||
CodeBlockKind::Fenced(ref lang) => {
|
||||
let parse_result =
|
||||
LangString::parse_without_check(&lang, self.check_error_codes, false);
|
||||
LangString::parse_without_check(lang, self.check_error_codes, false);
|
||||
if !parse_result.rust {
|
||||
return Some(Event::Html(
|
||||
format!(
|
||||
@ -669,7 +669,7 @@ fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
match self.inner.next() {
|
||||
Some((Event::FootnoteReference(ref reference), range)) => {
|
||||
let entry = self.get_entry(&reference);
|
||||
let entry = self.get_entry(reference);
|
||||
let reference = format!(
|
||||
"<sup id=\"fnref{0}\"><a href=\"#fn{0}\">{0}</a></sup>",
|
||||
(*entry).1
|
||||
@ -904,7 +904,7 @@ fn tokens(string: &str) -> impl Iterator<Item = &str> {
|
||||
string
|
||||
.split(|c| c == ',' || c == ' ' || c == '\t')
|
||||
.map(str::trim)
|
||||
.map(|token| if token.chars().next() == Some('.') { &token[1..] } else { token })
|
||||
.map(|token| token.strip_prefix('.').unwrap_or(token))
|
||||
.filter(|token| !token.is_empty())
|
||||
}
|
||||
|
||||
@ -974,7 +974,10 @@ fn parse(
|
||||
}
|
||||
x if extra.is_some() => {
|
||||
let s = x.to_lowercase();
|
||||
match if s == "compile-fail" || s == "compile_fail" || s == "compilefail" {
|
||||
if let Some((flag, help)) = if s == "compile-fail"
|
||||
|| s == "compile_fail"
|
||||
|| s == "compilefail"
|
||||
{
|
||||
Some((
|
||||
"compile_fail",
|
||||
"the code block will either not be tested if not marked as a rust one \
|
||||
@ -1007,15 +1010,12 @@ fn parse(
|
||||
} else {
|
||||
None
|
||||
} {
|
||||
Some((flag, help)) => {
|
||||
if let Some(ref extra) = extra {
|
||||
extra.error_invalid_codeblock_attr(
|
||||
&format!("unknown attribute `{}`. Did you mean `{}`?", x, flag),
|
||||
help,
|
||||
);
|
||||
}
|
||||
if let Some(extra) = extra {
|
||||
extra.error_invalid_codeblock_attr(
|
||||
&format!("unknown attribute `{}`. Did you mean `{}`?", x, flag),
|
||||
help,
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
seen_other_tags = true;
|
||||
}
|
||||
@ -1051,13 +1051,10 @@ pub fn into_string(self) -> String {
|
||||
return String::new();
|
||||
}
|
||||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
if let Some(link) =
|
||||
links.iter().find(|link| &*link.original_text == broken_link.reference)
|
||||
{
|
||||
Some((link.href.as_str().into(), link.new_text.as_str().into()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
links
|
||||
.iter()
|
||||
.find(|link| &*link.original_text == broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
|
||||
};
|
||||
|
||||
let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut replacer));
|
||||
@ -1135,13 +1132,10 @@ impl MarkdownSummaryLine<'_> {
|
||||
}
|
||||
|
||||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
if let Some(link) =
|
||||
links.iter().find(|link| &*link.original_text == broken_link.reference)
|
||||
{
|
||||
Some((link.href.as_str().into(), link.new_text.as_str().into()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
links
|
||||
.iter()
|
||||
.find(|link| &*link.original_text == broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
|
||||
};
|
||||
|
||||
let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer));
|
||||
@ -1172,13 +1166,10 @@ fn markdown_summary_with_limit(
|
||||
}
|
||||
|
||||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
if let Some(link) =
|
||||
link_names.iter().find(|link| &*link.original_text == broken_link.reference)
|
||||
{
|
||||
Some((link.href.as_str().into(), link.new_text.as_str().into()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
link_names
|
||||
.iter()
|
||||
.find(|link| &*link.original_text == broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
|
||||
};
|
||||
|
||||
let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer));
|
||||
@ -1413,7 +1404,7 @@ fn markdown_summary_with_limit(
|
||||
CodeBlockKind::Indented => {
|
||||
// The ending of the offset goes too far sometime so we reduce it by one in
|
||||
// these cases.
|
||||
if offset.end > offset.start && md.get(offset.end..=offset.end) == Some(&"\n") {
|
||||
if offset.end > offset.start && md.get(offset.end..=offset.end) == Some("\n") {
|
||||
(
|
||||
LangString::default(),
|
||||
offset.start,
|
||||
|
@ -1,3 +1,4 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
@ -36,7 +37,7 @@
|
||||
if let Some(&(ref fqp, _)) = cache.paths.get(&did) {
|
||||
let desc = item
|
||||
.doc_value()
|
||||
.map_or_else(String::new, |s| short_markdown_summary(&s, &item.link_names(&cache)));
|
||||
.map_or_else(String::new, |s| short_markdown_summary(&s, &item.link_names(cache)));
|
||||
cache.search_index.push(IndexItem {
|
||||
ty: item.type_(),
|
||||
name: item.name.unwrap().to_string(),
|
||||
@ -44,7 +45,7 @@
|
||||
desc,
|
||||
parent: Some(did),
|
||||
parent_idx: None,
|
||||
search_type: get_index_search_type(&item, tcx),
|
||||
search_type: get_index_search_type(item, tcx),
|
||||
aliases: item.attrs.get_doc_aliases(),
|
||||
});
|
||||
}
|
||||
@ -53,7 +54,7 @@
|
||||
let crate_doc = krate
|
||||
.module
|
||||
.doc_value()
|
||||
.map_or_else(String::new, |s| short_markdown_summary(&s, &krate.module.link_names(&cache)));
|
||||
.map_or_else(String::new, |s| short_markdown_summary(&s, &krate.module.link_names(cache)));
|
||||
|
||||
let Cache { ref mut search_index, ref paths, .. } = *cache;
|
||||
|
||||
@ -72,7 +73,7 @@
|
||||
// Set up alias indexes.
|
||||
for (i, item) in search_index.iter().enumerate() {
|
||||
for alias in &item.aliases[..] {
|
||||
aliases.entry(alias.to_lowercase()).or_insert(Vec::new()).push(i);
|
||||
aliases.entry(alias.to_lowercase()).or_insert_with(Vec::new).push(i);
|
||||
}
|
||||
}
|
||||
|
||||
@ -82,12 +83,11 @@
|
||||
let mut lastpathid = 0usize;
|
||||
|
||||
for item in search_index {
|
||||
item.parent_idx = item.parent.and_then(|defid| {
|
||||
if defid_to_pathid.contains_key(&defid) {
|
||||
defid_to_pathid.get(&defid).copied()
|
||||
} else {
|
||||
item.parent_idx = item.parent.and_then(|defid| match defid_to_pathid.entry(defid) {
|
||||
Entry::Occupied(entry) => Some(*entry.get()),
|
||||
Entry::Vacant(entry) => {
|
||||
let pathid = lastpathid;
|
||||
defid_to_pathid.insert(defid, pathid);
|
||||
entry.insert(pathid);
|
||||
lastpathid += 1;
|
||||
|
||||
if let Some(&(ref fqp, short)) = paths.get(&defid) {
|
||||
@ -203,12 +203,12 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
|
||||
let inputs = all_types
|
||||
.iter()
|
||||
.map(|(ty, kind)| TypeWithKind::from((get_index_type(&ty), *kind)))
|
||||
.map(|(ty, kind)| TypeWithKind::from((get_index_type(ty), *kind)))
|
||||
.filter(|a| a.ty.name.is_some())
|
||||
.collect();
|
||||
let output = ret_types
|
||||
.iter()
|
||||
.map(|(ty, kind)| TypeWithKind::from((get_index_type(&ty), *kind)))
|
||||
.map(|(ty, kind)| TypeWithKind::from((get_index_type(ty), *kind)))
|
||||
.filter(|a| a.ty.name.is_some())
|
||||
.collect::<Vec<_>>();
|
||||
let output = if output.is_empty() { None } else { Some(output) };
|
||||
@ -296,7 +296,7 @@ fn insert(res: &mut FxHashSet<(Type, ItemType)>, tcx: TyCtxt<'_>, ty: Type) -> u
|
||||
}
|
||||
let mut nb_added = 0;
|
||||
|
||||
if let &Type::Generic(arg_s) = arg {
|
||||
if let Type::Generic(arg_s) = *arg {
|
||||
if let Some(where_pred) = generics.where_predicates.iter().find(|g| match g {
|
||||
WherePredicate::BoundPredicate { ty, .. } => ty.def_id() == arg.def_id(),
|
||||
_ => false,
|
||||
@ -374,7 +374,7 @@ fn insert(res: &mut FxHashSet<(Type, ItemType)>, tcx: TyCtxt<'_>, ty: Type) -> u
|
||||
let ret_types = match decl.output {
|
||||
FnRetTy::Return(ref return_type) => {
|
||||
let mut ret = FxHashSet::default();
|
||||
get_real_types(generics, &return_type, tcx, 0, &mut ret);
|
||||
get_real_types(generics, return_type, tcx, 0, &mut ret);
|
||||
if ret.is_empty() {
|
||||
if let Some(kind) = return_type.def_id().map(|did| tcx.def_kind(did).into()) {
|
||||
ret.insert((return_type.clone(), kind));
|
||||
|
@ -160,7 +160,7 @@ pub(crate) fn cache(&self) -> &Cache {
|
||||
}
|
||||
|
||||
pub(super) fn sess(&self) -> &'tcx Session {
|
||||
&self.shared.tcx.sess
|
||||
self.shared.tcx.sess
|
||||
}
|
||||
|
||||
pub(super) fn derive_id(&self, id: String) -> String {
|
||||
@ -188,7 +188,7 @@ fn render_item(&self, it: &clean::Item, is_module: bool) -> String {
|
||||
};
|
||||
title.push_str(" - Rust");
|
||||
let tyname = it.type_();
|
||||
let desc = it.doc_value().as_ref().map(|doc| plain_text_summary(&doc));
|
||||
let desc = it.doc_value().as_ref().map(|doc| plain_text_summary(doc));
|
||||
let desc = if let Some(desc) = desc {
|
||||
desc
|
||||
} else if it.is_crate() {
|
||||
|
@ -126,8 +126,8 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
// If we couldn't figure out a type, just write `null`.
|
||||
let mut iter = self.inputs.iter();
|
||||
if match self.output {
|
||||
Some(ref output) => iter.chain(output.iter()).any(|ref i| i.ty.name.is_none()),
|
||||
None => iter.any(|ref i| i.ty.name.is_none()),
|
||||
Some(ref output) => iter.chain(output.iter()).any(|i| i.ty.name.is_none()),
|
||||
None => iter.any(|i| i.ty.name.is_none()),
|
||||
} {
|
||||
serializer.serialize_none()
|
||||
} else {
|
||||
@ -906,7 +906,7 @@ fn method(
|
||||
AssocItemLink::GotoSource(did, provided_methods) => {
|
||||
// We're creating a link from an impl-item to the corresponding
|
||||
// trait-item and need to map the anchored type accordingly.
|
||||
let ty = if provided_methods.contains(&name) {
|
||||
let ty = if provided_methods.contains(name) {
|
||||
ItemType::Method
|
||||
} else {
|
||||
ItemType::TyMethod
|
||||
@ -965,7 +965,7 @@ fn method(
|
||||
name = name,
|
||||
generics = g.print(cx),
|
||||
decl = d.full_print(header_len, indent, header.asyncness, cx),
|
||||
notable_traits = notable_traits_decl(&d, cx),
|
||||
notable_traits = notable_traits_decl(d, cx),
|
||||
where_clause = print_where_clause(g, cx, indent, end_newline),
|
||||
)
|
||||
}
|
||||
@ -1008,7 +1008,7 @@ fn attributes(it: &clean::Item) -> Vec<String> {
|
||||
.iter()
|
||||
.filter_map(|attr| {
|
||||
if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) {
|
||||
Some(pprust::attribute_to_string(&attr).replace("\n", "").replace(" ", " "))
|
||||
Some(pprust::attribute_to_string(attr).replace("\n", "").replace(" ", " "))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -1041,7 +1041,7 @@ enum AssocItemLink<'a> {
|
||||
impl<'a> AssocItemLink<'a> {
|
||||
fn anchor(&self, id: &'a str) -> Self {
|
||||
match *self {
|
||||
AssocItemLink::Anchor(_) => AssocItemLink::Anchor(Some(&id)),
|
||||
AssocItemLink::Anchor(_) => AssocItemLink::Anchor(Some(id)),
|
||||
ref other => *other,
|
||||
}
|
||||
}
|
||||
@ -1120,7 +1120,7 @@ fn render_assoc_items(
|
||||
let (blanket_impl, concrete): (Vec<&&Impl>, _) =
|
||||
concrete.into_iter().partition(|t| t.inner_impl().blanket_impl.is_some());
|
||||
|
||||
let mut impls = Buffer::empty_from(&w);
|
||||
let mut impls = Buffer::empty_from(w);
|
||||
render_impls(cx, &mut impls, &concrete, containing_item);
|
||||
let impls = impls.into_inner();
|
||||
if !impls.is_empty() {
|
||||
@ -1333,7 +1333,7 @@ fn doc_impl_item(
|
||||
&& match render_mode {
|
||||
RenderMode::Normal => true,
|
||||
RenderMode::ForDeref { mut_: deref_mut_ } => {
|
||||
should_render_item(&item, deref_mut_, cx.tcx())
|
||||
should_render_item(item, deref_mut_, cx.tcx())
|
||||
}
|
||||
};
|
||||
|
||||
@ -1566,7 +1566,7 @@ fn render_default_items(
|
||||
&mut impl_items,
|
||||
cx,
|
||||
&t.trait_,
|
||||
&i.inner_impl(),
|
||||
i.inner_impl(),
|
||||
&i.impl_item,
|
||||
parent,
|
||||
render_mode,
|
||||
@ -2060,7 +2060,7 @@ fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) {
|
||||
}
|
||||
}
|
||||
|
||||
fn sidebar_deref_methods(cx: &Context<'_>, out: &mut Buffer, impl_: &Impl, v: &Vec<Impl>) {
|
||||
fn sidebar_deref_methods(cx: &Context<'_>, out: &mut Buffer, impl_: &Impl, v: &[Impl]) {
|
||||
let c = cx.cache();
|
||||
|
||||
debug!("found Deref: {:?}", impl_);
|
||||
@ -2159,16 +2159,14 @@ fn get_id_for_impl_on_foreign_type(
|
||||
fn extract_for_impl_name(item: &clean::Item, cx: &Context<'_>) -> Option<(String, String)> {
|
||||
match *item.kind {
|
||||
clean::ItemKind::ImplItem(ref i) => {
|
||||
if let Some(ref trait_) = i.trait_ {
|
||||
i.trait_.as_ref().map(|trait_| {
|
||||
// Alternative format produces no URLs,
|
||||
// so this parameter does nothing.
|
||||
Some((
|
||||
(
|
||||
format!("{:#}", i.for_.print(cx)),
|
||||
get_id_for_impl_on_foreign_type(&i.for_, trait_, cx),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
@ -2343,9 +2341,10 @@ fn sidebar_enum(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, e: &clean:
|
||||
let mut variants = e
|
||||
.variants
|
||||
.iter()
|
||||
.filter_map(|v| match v.name {
|
||||
Some(ref name) => Some(format!("<a href=\"#variant.{name}\">{name}</a>", name = name)),
|
||||
_ => None,
|
||||
.filter_map(|v| {
|
||||
v.name
|
||||
.as_ref()
|
||||
.map(|name| format!("<a href=\"#variant.{name}\">{name}</a>", name = name))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if !variants.is_empty() {
|
||||
|
@ -34,10 +34,10 @@
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
const ITEM_TABLE_OPEN: &'static str = "<div class=\"item-table\">";
|
||||
const ITEM_TABLE_CLOSE: &'static str = "</div>";
|
||||
const ITEM_TABLE_ROW_OPEN: &'static str = "<div class=\"item-row\">";
|
||||
const ITEM_TABLE_ROW_CLOSE: &'static str = "</div>";
|
||||
const ITEM_TABLE_OPEN: &str = "<div class=\"item-table\">";
|
||||
const ITEM_TABLE_CLOSE: &str = "</div>";
|
||||
const ITEM_TABLE_ROW_OPEN: &str = "<div class=\"item-row\">";
|
||||
const ITEM_TABLE_ROW_CLOSE: &str = "</div>";
|
||||
|
||||
// A component in a `use` path, like `string` in std::string::ToString
|
||||
#[derive(Serialize)]
|
||||
@ -761,7 +761,7 @@ fn trait_item(w: &mut Buffer, cx: &Context<'_>, m: &clean::Item, t: &clean::Item
|
||||
render_impl(
|
||||
w,
|
||||
cx,
|
||||
&implementor,
|
||||
implementor,
|
||||
it,
|
||||
assoc_link,
|
||||
RenderMode::Normal,
|
||||
@ -1497,7 +1497,7 @@ fn render_union(
|
||||
);
|
||||
if let Some(g) = g {
|
||||
write!(w, "{}", g.print(cx));
|
||||
write!(w, "{}", print_where_clause(&g, cx, 0, true));
|
||||
write!(w, "{}", print_where_clause(g, cx, 0, true));
|
||||
}
|
||||
|
||||
write!(w, " {{\n{}", tab);
|
||||
|
@ -105,7 +105,7 @@ fn visit_generic_param(&mut self, p: &'tcx GenericParam<'tcx>) {
|
||||
}
|
||||
for bound in p.bounds {
|
||||
if let Some(trait_ref) = bound.trait_ref() {
|
||||
self.handle_path(&trait_ref.path, None);
|
||||
self.handle_path(trait_ref.path, None);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -121,42 +121,33 @@ fn visit_mod(&mut self, m: &'tcx Mod<'tcx>, span: Span, id: HirId) {
|
||||
if !span.overlaps(m.inner) {
|
||||
// Now that we confirmed it's a file import, we want to get the span for the module
|
||||
// name only and not all the "mod foo;".
|
||||
if let Some(node) = self.tcx.hir().find(id) {
|
||||
match node {
|
||||
Node::Item(item) => {
|
||||
self.matches
|
||||
.insert(item.ident.span, LinkFromSrc::Local(clean::Span::new(m.inner)));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if let Some(Node::Item(item)) = self.tcx.hir().find(id) {
|
||||
self.matches.insert(item.ident.span, LinkFromSrc::Local(clean::Span::new(m.inner)));
|
||||
}
|
||||
}
|
||||
intravisit::walk_mod(self, m, id);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'tcx rustc_hir::Expr<'tcx>) {
|
||||
match expr.kind {
|
||||
ExprKind::MethodCall(segment, method_span, _, _) => {
|
||||
if let Some(hir_id) = segment.hir_id {
|
||||
let hir = self.tcx.hir();
|
||||
let body_id = hir.enclosing_body_owner(hir_id);
|
||||
let typeck_results = self.tcx.sess.with_disabled_diagnostic(|| {
|
||||
self.tcx.typeck_body(
|
||||
hir.maybe_body_owned_by(body_id).expect("a body which isn't a body"),
|
||||
)
|
||||
});
|
||||
if let Some(def_id) = typeck_results.type_dependent_def_id(expr.hir_id) {
|
||||
self.matches.insert(
|
||||
method_span,
|
||||
match hir.span_if_local(def_id) {
|
||||
Some(span) => LinkFromSrc::Local(clean::Span::new(span)),
|
||||
None => LinkFromSrc::External(def_id),
|
||||
},
|
||||
);
|
||||
}
|
||||
if let ExprKind::MethodCall(segment, method_span, _, _) = expr.kind {
|
||||
if let Some(hir_id) = segment.hir_id {
|
||||
let hir = self.tcx.hir();
|
||||
let body_id = hir.enclosing_body_owner(hir_id);
|
||||
let typeck_results = self.tcx.sess.with_disabled_diagnostic(|| {
|
||||
self.tcx.typeck_body(
|
||||
hir.maybe_body_owned_by(body_id).expect("a body which isn't a body"),
|
||||
)
|
||||
});
|
||||
if let Some(def_id) = typeck_results.type_dependent_def_id(expr.hir_id) {
|
||||
self.matches.insert(
|
||||
method_span,
|
||||
match hir.span_if_local(def_id) {
|
||||
Some(span) => LinkFromSrc::Local(clean::Span::new(span)),
|
||||
None => LinkFromSrc::External(def_id),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
intravisit::walk_expr(self, expr);
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ fn write_minify(
|
||||
) -> Result<(), Error> {
|
||||
if minify {
|
||||
let contents = contents.as_ref();
|
||||
let contents = if resource.extension() == Some(&OsStr::new("css")) {
|
||||
let contents = if resource.extension() == Some(OsStr::new("css")) {
|
||||
minifier::css::minify(contents).map_err(|e| {
|
||||
Error::new(format!("failed to minify CSS file: {}", e), resource.path(self))
|
||||
})?
|
||||
|
@ -67,7 +67,7 @@ fn add_local_source(&mut self, item: &clean::Item) {
|
||||
}
|
||||
|
||||
let mut href = String::new();
|
||||
clean_path(&self.src_root, &p, false, |component| {
|
||||
clean_path(self.src_root, &p, false, |component| {
|
||||
href.push_str(&component.to_string_lossy());
|
||||
href.push('/');
|
||||
});
|
||||
@ -168,7 +168,7 @@ fn emit_source(
|
||||
};
|
||||
|
||||
// Remove the utf-8 BOM if any
|
||||
let contents = if contents.starts_with('\u{feff}') { &contents[3..] } else { &contents };
|
||||
let contents = contents.strip_prefix('\u{feff}').unwrap_or(&contents);
|
||||
|
||||
// Create the intermediate directories
|
||||
let mut cur = self.dst.clone();
|
||||
@ -209,7 +209,7 @@ fn emit_source(
|
||||
contents,
|
||||
self.cx.shared.edition(),
|
||||
file_span,
|
||||
&self.cx,
|
||||
self.cx,
|
||||
&root_path,
|
||||
None,
|
||||
SourceContext::Standalone,
|
||||
|
@ -412,7 +412,7 @@ fn from_tcx(ty: clean::Type, tcx: TyCtxt<'_>) -> Self {
|
||||
.map(|t| {
|
||||
clean::GenericBound::TraitBound(t, rustc_hir::TraitBoundModifier::None)
|
||||
})
|
||||
.chain(lt.into_iter().map(|lt| clean::GenericBound::Outlives(lt)))
|
||||
.chain(lt.into_iter().map(clean::GenericBound::Outlives))
|
||||
.map(|bound| bound.into_tcx(tcx))
|
||||
.collect(),
|
||||
}
|
||||
|
@ -775,7 +775,7 @@ fn main_options(options: config::Options) -> MainResult {
|
||||
// We need to hold on to the complete resolver, so we cause everything to be
|
||||
// cloned for the analysis passes to use. Suboptimal, but necessary in the
|
||||
// current architecture.
|
||||
let resolver = core::create_resolver(queries, &sess);
|
||||
let resolver = core::create_resolver(queries, sess);
|
||||
|
||||
if sess.has_errors() {
|
||||
sess.fatal("Compilation failed, aborting rustdoc");
|
||||
|
@ -39,7 +39,7 @@ fn find_raw_urls(
|
||||
) {
|
||||
trace!("looking for raw urls in {}", text);
|
||||
// For now, we only check "full" URLs (meaning, starting with "http://" or "https://").
|
||||
for match_ in URL_REGEX.find_iter(&text) {
|
||||
for match_ in URL_REGEX.find_iter(text) {
|
||||
let url = match_.as_str();
|
||||
let url_range = match_.range();
|
||||
f(
|
||||
|
@ -36,7 +36,7 @@ fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeB
|
||||
let source = dox[code_block.code].to_owned();
|
||||
let sess = ParseSess::with_span_handler(handler, sm);
|
||||
|
||||
let edition = code_block.lang_string.edition.unwrap_or(self.cx.tcx.sess.edition());
|
||||
let edition = code_block.lang_string.edition.unwrap_or_else(|| self.cx.tcx.sess.edition());
|
||||
let expn_data = ExpnData::default(
|
||||
ExpnKind::AstPass(AstPass::TestHarness),
|
||||
DUMMY_SP,
|
||||
@ -77,7 +77,7 @@ fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeB
|
||||
// The span and whether it is precise or not.
|
||||
let (sp, precise_span) = match super::source_span_for_markdown_range(
|
||||
self.cx.tcx,
|
||||
&dox,
|
||||
dox,
|
||||
&code_block.range,
|
||||
&item.attrs,
|
||||
) {
|
||||
@ -123,7 +123,7 @@ fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeB
|
||||
|
||||
// FIXME(#67563): Provide more context for these errors by displaying the spans inline.
|
||||
for message in buffer.messages.iter() {
|
||||
diag.note(&message);
|
||||
diag.note(message);
|
||||
}
|
||||
|
||||
diag.emit();
|
||||
@ -150,8 +150,8 @@ fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
||||
item.def_id.expect_def_id(),
|
||||
sp,
|
||||
);
|
||||
for code_block in markdown::rust_code_blocks(&dox, &extra) {
|
||||
self.check_rust_syntax(&item, &dox, code_block);
|
||||
for code_block in markdown::rust_code_blocks(dox, &extra) {
|
||||
self.check_rust_syntax(&item, dox, code_block);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -115,10 +115,10 @@ fn add_test(&mut self, _: String, config: LangString, _: usize) {
|
||||
|
||||
let mut tests = Tests { found_tests: 0 };
|
||||
|
||||
find_testable_code(&dox, &mut tests, ErrorCodes::No, false, None);
|
||||
find_testable_code(dox, &mut tests, ErrorCodes::No, false, None);
|
||||
|
||||
if tests.found_tests == 0 && cx.tcx.sess.is_nightly_build() {
|
||||
if should_have_doc_example(cx, &item) {
|
||||
if should_have_doc_example(cx, item) {
|
||||
debug!("reporting error for {:?} (hir_id={:?})", item, hir_id);
|
||||
let sp = item.attr_span(cx.tcx);
|
||||
cx.tcx.struct_span_lint_hir(
|
||||
|
@ -289,7 +289,7 @@ fn variant_field(
|
||||
) -> Result<(Res, Option<String>), ErrorKind<'path>> {
|
||||
let tcx = self.cx.tcx;
|
||||
let no_res = || ResolutionFailure::NotResolved {
|
||||
module_id: module_id,
|
||||
module_id,
|
||||
partial_res: None,
|
||||
unresolved: path_str.into(),
|
||||
};
|
||||
@ -437,7 +437,7 @@ fn resolve_macro(
|
||||
fn resolve_path(&self, path_str: &str, ns: Namespace, module_id: DefId) -> Option<Res> {
|
||||
let result = self.cx.enter_resolver(|resolver| {
|
||||
resolver
|
||||
.resolve_str_path_error(DUMMY_SP, &path_str, ns, module_id)
|
||||
.resolve_str_path_error(DUMMY_SP, path_str, ns, module_id)
|
||||
.and_then(|(_, res)| res.try_into())
|
||||
});
|
||||
debug!("{} resolved to {:?} in namespace {:?}", path_str, result, ns);
|
||||
@ -543,7 +543,7 @@ fn def_id_to_res(&self, ty_id: DefId) -> Option<Res> {
|
||||
ty::Uint(uty) => Res::Primitive(uty.into()),
|
||||
ty::Float(fty) => Res::Primitive(fty.into()),
|
||||
ty::Str => Res::Primitive(Str),
|
||||
ty::Tuple(ref tys) if tys.is_empty() => Res::Primitive(Unit),
|
||||
ty::Tuple(tys) if tys.is_empty() => Res::Primitive(Unit),
|
||||
ty::Tuple(_) => Res::Primitive(Tuple),
|
||||
ty::Array(..) => Res::Primitive(Array),
|
||||
ty::Slice(_) => Res::Primitive(Slice),
|
||||
@ -978,13 +978,13 @@ fn preprocess_link<'a>(
|
||||
}
|
||||
|
||||
// Parse and strip the disambiguator from the link, if present.
|
||||
let (disambiguator, path_str, link_text) = match Disambiguator::from_str(&link) {
|
||||
let (disambiguator, path_str, link_text) = match Disambiguator::from_str(link) {
|
||||
Ok(Some((d, path, link_text))) => (Some(d), path.trim(), link_text.trim()),
|
||||
Ok(None) => (None, link.trim(), link.trim()),
|
||||
Err((err_msg, relative_range)) => {
|
||||
// Only report error if we would not have ignored this link. See issue #83859.
|
||||
if !should_ignore_link_with_disambiguators(link) {
|
||||
let no_backticks_range = range_between_backticks(&ori_link);
|
||||
let no_backticks_range = range_between_backticks(ori_link);
|
||||
let disambiguator_range = (no_backticks_range.start + relative_range.start)
|
||||
..(no_backticks_range.start + relative_range.end);
|
||||
return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg)));
|
||||
@ -1000,7 +1000,7 @@ fn preprocess_link<'a>(
|
||||
|
||||
// Strip generics from the path.
|
||||
let path_str = if path_str.contains(['<', '>'].as_slice()) {
|
||||
match strip_generics_from_path(&path_str) {
|
||||
match strip_generics_from_path(path_str) {
|
||||
Ok(path) => path,
|
||||
Err(err_kind) => {
|
||||
debug!("link has malformed generics: {}", path_str);
|
||||
@ -1228,7 +1228,7 @@ fn resolve_link(
|
||||
if self.cx.tcx.privacy_access_levels(()).is_exported(src_id)
|
||||
&& !self.cx.tcx.privacy_access_levels(()).is_exported(dst_id)
|
||||
{
|
||||
privacy_error(self.cx, &diag_info, &path_str);
|
||||
privacy_error(self.cx, &diag_info, path_str);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1766,8 +1766,8 @@ fn report_diagnostic(
|
||||
|
||||
let span =
|
||||
super::source_span_for_markdown_range(tcx, dox, link_range, &item.attrs).map(|sp| {
|
||||
if dox.bytes().nth(link_range.start) == Some(b'`')
|
||||
&& dox.bytes().nth(link_range.end - 1) == Some(b'`')
|
||||
if dox.as_bytes().get(link_range.start) == Some(&b'`')
|
||||
&& dox.as_bytes().get(link_range.end - 1) == Some(&b'`')
|
||||
{
|
||||
sp.with_lo(sp.lo() + BytePos(1)).with_hi(sp.hi() - BytePos(1))
|
||||
} else {
|
||||
@ -1868,8 +1868,7 @@ fn split(path: &str) -> Option<(&str, &str)> {
|
||||
};
|
||||
name = start;
|
||||
for ns in [TypeNS, ValueNS, MacroNS] {
|
||||
if let Some(res) =
|
||||
collector.check_full_res(ns, &start, module_id, &None)
|
||||
if let Some(res) = collector.check_full_res(ns, start, module_id, &None)
|
||||
{
|
||||
debug!("found partial_res={:?}", res);
|
||||
*partial_res = Some(res);
|
||||
|
@ -34,7 +34,7 @@ fn load_links_in_attrs(&mut self, attrs: &[ast::Attribute], span: Span) {
|
||||
let attrs = crate::clean::Attributes::from_ast(attrs, None);
|
||||
for (parent_module, doc) in attrs.collapsed_doc_value_by_module_level() {
|
||||
debug!(?doc);
|
||||
for link in markdown_links(&doc.as_str()) {
|
||||
for link in markdown_links(doc.as_str()) {
|
||||
debug!(?link.link);
|
||||
let path_str = if let Some(Ok(x)) = preprocess_link(&link) {
|
||||
x.path_str
|
||||
@ -46,7 +46,7 @@ fn load_links_in_attrs(&mut self, attrs: &[ast::Attribute], span: Span) {
|
||||
span,
|
||||
&path_str,
|
||||
TypeNS,
|
||||
parent_module.unwrap_or(self.current_mod.to_def_id()),
|
||||
parent_module.unwrap_or_else(|| self.current_mod.to_def_id()),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -9,7 +9,6 @@
|
||||
use rustc_hir::CRATE_HIR_ID;
|
||||
use rustc_middle::middle::privacy::AccessLevel;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span;
|
||||
use rustc_span::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
@ -277,7 +276,7 @@ fn visit_item(
|
||||
_ if self.inlining && !is_pub => {}
|
||||
hir::ItemKind::GlobalAsm(..) => {}
|
||||
hir::ItemKind::Use(_, hir::UseKind::ListStem) => {}
|
||||
hir::ItemKind::Use(ref path, kind) => {
|
||||
hir::ItemKind::Use(path, kind) => {
|
||||
let is_glob = kind == hir::UseKind::Glob;
|
||||
|
||||
// Struct and variant constructors and proc macro stubs always show up alongside
|
||||
|
Loading…
Reference in New Issue
Block a user