Move render_macro_matcher to own module
This commit is contained in:
parent
0904614751
commit
81b1e32790
@ -5,6 +5,7 @@
|
||||
mod blanket_impl;
|
||||
crate mod cfg;
|
||||
crate mod inline;
|
||||
mod render_macro_matchers;
|
||||
mod simplify;
|
||||
crate mod types;
|
||||
crate mod utils;
|
||||
|
191
src/librustdoc/clean/render_macro_matchers.rs
Normal file
191
src/librustdoc/clean/render_macro_matchers.rs
Normal file
@ -0,0 +1,191 @@
|
||||
use rustc_ast::token::{self, BinOpToken, DelimToken};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
use rustc_ast_pretty::pprust::state::State as Printer;
|
||||
use rustc_ast_pretty::pprust::PrintState;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::FilePathMapping;
|
||||
use rustc_span::symbol::Symbol;
|
||||
|
||||
/// Render a macro matcher in a format suitable for displaying to the user
|
||||
/// as part of an item declaration.
|
||||
pub(super) fn render_macro_matcher(tcx: TyCtxt<'_>, matcher: &TokenTree) -> String {
|
||||
if let Some(snippet) = snippet_equal_to_token(tcx, matcher) {
|
||||
// If the original source code is known, we display the matcher exactly
|
||||
// as present in the source code.
|
||||
return snippet;
|
||||
}
|
||||
|
||||
// If the matcher is macro-generated or some other reason the source code
|
||||
// snippet is not available, we attempt to nicely render the token tree.
|
||||
let mut printer = Printer::new();
|
||||
|
||||
// If the inner ibox fits on one line, we get:
|
||||
//
|
||||
// macro_rules! macroname {
|
||||
// (the matcher) => {...};
|
||||
// }
|
||||
//
|
||||
// If the inner ibox gets wrapped, the cbox will break and get indented:
|
||||
//
|
||||
// macro_rules! macroname {
|
||||
// (
|
||||
// the matcher ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!
|
||||
// ) => {...};
|
||||
// }
|
||||
printer.cbox(8);
|
||||
printer.word("(");
|
||||
printer.zerobreak();
|
||||
printer.ibox(0);
|
||||
match matcher {
|
||||
TokenTree::Delimited(_span, _delim, tts) => print_tts(&mut printer, tts),
|
||||
// Matcher which is not a Delimited is unexpected and should've failed
|
||||
// to compile, but we render whatever it is wrapped in parens.
|
||||
TokenTree::Token(_) => print_tt(&mut printer, matcher),
|
||||
}
|
||||
printer.end();
|
||||
printer.break_offset_if_not_bol(0, -4);
|
||||
printer.word(")");
|
||||
printer.end();
|
||||
printer.s.eof()
|
||||
}
|
||||
|
||||
/// Find the source snippet for this token's Span, reparse it, and return the
|
||||
/// snippet if the reparsed TokenTree matches the argument TokenTree.
|
||||
fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option<String> {
|
||||
// Find what rustc thinks is the source snippet.
|
||||
// This may not actually be anything meaningful if this matcher was itself
|
||||
// generated by a macro.
|
||||
let source_map = tcx.sess.source_map();
|
||||
let span = matcher.span();
|
||||
let snippet = source_map.span_to_snippet(span).ok()?;
|
||||
|
||||
// Create a Parser.
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let file_name = source_map.span_to_filename(span);
|
||||
let mut parser =
|
||||
match rustc_parse::maybe_new_parser_from_source_str(&sess, file_name, snippet.clone()) {
|
||||
Ok(parser) => parser,
|
||||
Err(diagnostics) => {
|
||||
for mut diagnostic in diagnostics {
|
||||
diagnostic.cancel();
|
||||
}
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
// Reparse a single token tree.
|
||||
let mut reparsed_trees = match parser.parse_all_token_trees() {
|
||||
Ok(reparsed_trees) => reparsed_trees,
|
||||
Err(mut diagnostic) => {
|
||||
diagnostic.cancel();
|
||||
return None;
|
||||
}
|
||||
};
|
||||
if reparsed_trees.len() != 1 {
|
||||
return None;
|
||||
}
|
||||
let reparsed_tree = reparsed_trees.pop().unwrap();
|
||||
|
||||
// Compare against the original tree.
|
||||
if reparsed_tree.eq_unspanned(matcher) { Some(snippet) } else { None }
|
||||
}
|
||||
|
||||
fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
|
||||
match tt {
|
||||
TokenTree::Token(token) => {
|
||||
let token_str = printer.token_to_string(token);
|
||||
printer.word(token_str);
|
||||
if let token::DocComment(..) = token.kind {
|
||||
printer.hardbreak()
|
||||
}
|
||||
}
|
||||
TokenTree::Delimited(_span, delim, tts) => {
|
||||
let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
|
||||
printer.word(open_delim);
|
||||
if !tts.is_empty() {
|
||||
if *delim == DelimToken::Brace {
|
||||
printer.space();
|
||||
}
|
||||
print_tts(printer, tts);
|
||||
if *delim == DelimToken::Brace {
|
||||
printer.space();
|
||||
}
|
||||
}
|
||||
let close_delim = printer.token_kind_to_string(&token::CloseDelim(*delim));
|
||||
printer.word(close_delim);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
enum State {
|
||||
Start,
|
||||
Dollar,
|
||||
DollarIdent,
|
||||
DollarIdentColon,
|
||||
DollarParen,
|
||||
DollarParenSep,
|
||||
Pound,
|
||||
PoundBang,
|
||||
Ident,
|
||||
Other,
|
||||
}
|
||||
|
||||
use State::*;
|
||||
|
||||
let mut state = Start;
|
||||
for tt in tts.trees() {
|
||||
let (needs_space, next_state) = match &tt {
|
||||
TokenTree::Token(tt) => match (state, &tt.kind) {
|
||||
(Dollar, token::Ident(..)) => (false, DollarIdent),
|
||||
(DollarIdent, token::Colon) => (false, DollarIdentColon),
|
||||
(DollarIdentColon, token::Ident(..)) => (false, Other),
|
||||
(
|
||||
DollarParen,
|
||||
token::BinOp(BinOpToken::Plus | BinOpToken::Star) | token::Question,
|
||||
) => (false, Other),
|
||||
(DollarParen, _) => (false, DollarParenSep),
|
||||
(DollarParenSep, token::BinOp(BinOpToken::Plus | BinOpToken::Star)) => {
|
||||
(false, Other)
|
||||
}
|
||||
(Pound, token::Not) => (false, PoundBang),
|
||||
(_, token::Ident(symbol, /* is_raw */ false))
|
||||
if !usually_needs_space_between_keyword_and_open_delim(*symbol) =>
|
||||
{
|
||||
(true, Ident)
|
||||
}
|
||||
(_, token::Comma | token::Semi) => (false, Other),
|
||||
(_, token::Dollar) => (true, Dollar),
|
||||
(_, token::Pound) => (true, Pound),
|
||||
(_, _) => (true, Other),
|
||||
},
|
||||
TokenTree::Delimited(_, delim, _) => match (state, delim) {
|
||||
(Dollar, DelimToken::Paren) => (false, DollarParen),
|
||||
(Pound | PoundBang, DelimToken::Bracket) => (false, Other),
|
||||
(Ident, DelimToken::Paren | DelimToken::Bracket) => (false, Other),
|
||||
(_, _) => (true, Other),
|
||||
},
|
||||
};
|
||||
if state != Start && needs_space {
|
||||
printer.space();
|
||||
}
|
||||
print_tt(printer, &tt);
|
||||
state = next_state;
|
||||
}
|
||||
}
|
||||
|
||||
// This rough subset of keywords is listed here to distinguish tokens resembling
|
||||
// `f(0)` (no space between ident and paren) from tokens resembling `if let (0,
|
||||
// 0) = x` (space between ident and paren).
|
||||
fn usually_needs_space_between_keyword_and_open_delim(symbol: Symbol) -> bool {
|
||||
match symbol.as_str() {
|
||||
"as" | "box" | "break" | "const" | "continue" | "crate" | "else" | "enum" | "extern"
|
||||
| "for" | "if" | "impl" | "in" | "let" | "loop" | "macro" | "match" | "mod" | "move"
|
||||
| "mut" | "ref" | "return" | "static" | "struct" | "trait" | "type" | "unsafe" | "use"
|
||||
| "where" | "while" | "yield" => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
use crate::clean::auto_trait::AutoTraitFinder;
|
||||
use crate::clean::blanket_impl::BlanketImplFinder;
|
||||
use crate::clean::render_macro_matchers::render_macro_matcher;
|
||||
use crate::clean::{
|
||||
inline, Clean, Crate, ExternalCrate, Generic, GenericArg, GenericArgs, ImportSource, Item,
|
||||
ItemKind, Lifetime, Path, PathSegment, Primitive, PrimitiveType, Type, TypeBinding, Visibility,
|
||||
@ -9,10 +10,7 @@
|
||||
use crate::visit_lib::LibEmbargoVisitor;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::token::{self, BinOpToken, DelimToken};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
use rustc_ast_pretty::pprust::state::State as Printer;
|
||||
use rustc_ast_pretty::pprust::PrintState;
|
||||
use rustc_ast::tokenstream::TokenTree;
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
@ -20,8 +18,6 @@
|
||||
use rustc_middle::mir::interpret::ConstValue;
|
||||
use rustc_middle::ty::subst::{GenericArgKind, SubstsRef};
|
||||
use rustc_middle::ty::{self, DefIdTree, TyCtxt};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::FilePathMapping;
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
use std::fmt::Write as _;
|
||||
use std::mem;
|
||||
@ -503,189 +499,6 @@ pub(super) fn render_macro_arms<'a>(
|
||||
out
|
||||
}
|
||||
|
||||
/// Render a macro matcher in a format suitable for displaying to the user
|
||||
/// as part of an item declaration.
|
||||
pub(super) fn render_macro_matcher(tcx: TyCtxt<'_>, matcher: &TokenTree) -> String {
|
||||
if let Some(snippet) = snippet_equal_to_token(tcx, matcher) {
|
||||
// If the original source code is known, we display the matcher exactly
|
||||
// as present in the source code.
|
||||
return snippet;
|
||||
}
|
||||
|
||||
// If the matcher is macro-generated or some other reason the source code
|
||||
// snippet is not available, we attempt to nicely render the token tree.
|
||||
let mut printer = Printer::new();
|
||||
|
||||
// If the inner ibox fits on one line, we get:
|
||||
//
|
||||
// macro_rules! macroname {
|
||||
// (the matcher) => {...};
|
||||
// }
|
||||
//
|
||||
// If the inner ibox gets wrapped, the cbox will break and get indented:
|
||||
//
|
||||
// macro_rules! macroname {
|
||||
// (
|
||||
// the matcher ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~!
|
||||
// ) => {...};
|
||||
// }
|
||||
printer.cbox(8);
|
||||
printer.word("(");
|
||||
printer.zerobreak();
|
||||
printer.ibox(0);
|
||||
match matcher {
|
||||
TokenTree::Delimited(_span, _delim, tts) => print_tts(&mut printer, tts),
|
||||
// Matcher which is not a Delimited is unexpected and should've failed
|
||||
// to compile, but we render whatever it is wrapped in parens.
|
||||
TokenTree::Token(_) => print_tt(&mut printer, matcher),
|
||||
}
|
||||
printer.end();
|
||||
printer.break_offset_if_not_bol(0, -4);
|
||||
printer.word(")");
|
||||
printer.end();
|
||||
printer.s.eof()
|
||||
}
|
||||
|
||||
/// Find the source snippet for this token's Span, reparse it, and return the
|
||||
/// snippet if the reparsed TokenTree matches the argument TokenTree.
|
||||
fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option<String> {
|
||||
// Find what rustc thinks is the source snippet.
|
||||
// This may not actually be anything meaningful if this matcher was itself
|
||||
// generated by a macro.
|
||||
let source_map = tcx.sess.source_map();
|
||||
let span = matcher.span();
|
||||
let snippet = source_map.span_to_snippet(span).ok()?;
|
||||
|
||||
// Create a Parser.
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let file_name = source_map.span_to_filename(span);
|
||||
let mut parser =
|
||||
match rustc_parse::maybe_new_parser_from_source_str(&sess, file_name, snippet.clone()) {
|
||||
Ok(parser) => parser,
|
||||
Err(diagnostics) => {
|
||||
for mut diagnostic in diagnostics {
|
||||
diagnostic.cancel();
|
||||
}
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
// Reparse a single token tree.
|
||||
let mut reparsed_trees = match parser.parse_all_token_trees() {
|
||||
Ok(reparsed_trees) => reparsed_trees,
|
||||
Err(mut diagnostic) => {
|
||||
diagnostic.cancel();
|
||||
return None;
|
||||
}
|
||||
};
|
||||
if reparsed_trees.len() != 1 {
|
||||
return None;
|
||||
}
|
||||
let reparsed_tree = reparsed_trees.pop().unwrap();
|
||||
|
||||
// Compare against the original tree.
|
||||
if reparsed_tree.eq_unspanned(matcher) { Some(snippet) } else { None }
|
||||
}
|
||||
|
||||
fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
|
||||
match tt {
|
||||
TokenTree::Token(token) => {
|
||||
let token_str = printer.token_to_string(token);
|
||||
printer.word(token_str);
|
||||
if let token::DocComment(..) = token.kind {
|
||||
printer.hardbreak()
|
||||
}
|
||||
}
|
||||
TokenTree::Delimited(_span, delim, tts) => {
|
||||
let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
|
||||
printer.word(open_delim);
|
||||
if !tts.is_empty() {
|
||||
if *delim == DelimToken::Brace {
|
||||
printer.space();
|
||||
}
|
||||
print_tts(printer, tts);
|
||||
if *delim == DelimToken::Brace {
|
||||
printer.space();
|
||||
}
|
||||
}
|
||||
let close_delim = printer.token_kind_to_string(&token::CloseDelim(*delim));
|
||||
printer.word(close_delim);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
enum State {
|
||||
Start,
|
||||
Dollar,
|
||||
DollarIdent,
|
||||
DollarIdentColon,
|
||||
DollarParen,
|
||||
DollarParenSep,
|
||||
Pound,
|
||||
PoundBang,
|
||||
Ident,
|
||||
Other,
|
||||
}
|
||||
|
||||
use State::*;
|
||||
|
||||
let mut state = Start;
|
||||
for tt in tts.trees() {
|
||||
let (needs_space, next_state) = match &tt {
|
||||
TokenTree::Token(tt) => match (state, &tt.kind) {
|
||||
(Dollar, token::Ident(..)) => (false, DollarIdent),
|
||||
(DollarIdent, token::Colon) => (false, DollarIdentColon),
|
||||
(DollarIdentColon, token::Ident(..)) => (false, Other),
|
||||
(
|
||||
DollarParen,
|
||||
token::BinOp(BinOpToken::Plus | BinOpToken::Star) | token::Question,
|
||||
) => (false, Other),
|
||||
(DollarParen, _) => (false, DollarParenSep),
|
||||
(DollarParenSep, token::BinOp(BinOpToken::Plus | BinOpToken::Star)) => {
|
||||
(false, Other)
|
||||
}
|
||||
(Pound, token::Not) => (false, PoundBang),
|
||||
(_, token::Ident(symbol, /* is_raw */ false))
|
||||
if !usually_needs_space_between_keyword_and_open_delim(*symbol) =>
|
||||
{
|
||||
(true, Ident)
|
||||
}
|
||||
(_, token::Comma | token::Semi) => (false, Other),
|
||||
(_, token::Dollar) => (true, Dollar),
|
||||
(_, token::Pound) => (true, Pound),
|
||||
(_, _) => (true, Other),
|
||||
},
|
||||
TokenTree::Delimited(_, delim, _) => match (state, delim) {
|
||||
(Dollar, DelimToken::Paren) => (false, DollarParen),
|
||||
(Pound | PoundBang, DelimToken::Bracket) => (false, Other),
|
||||
(Ident, DelimToken::Paren | DelimToken::Bracket) => (false, Other),
|
||||
(_, _) => (true, Other),
|
||||
},
|
||||
};
|
||||
if state != Start && needs_space {
|
||||
printer.space();
|
||||
}
|
||||
print_tt(printer, &tt);
|
||||
state = next_state;
|
||||
}
|
||||
}
|
||||
|
||||
// This rough subset of keywords is listed here to distinguish tokens resembling
|
||||
// `f(0)` (no space between ident and paren) from tokens resembling `if let (0,
|
||||
// 0) = x` (space between ident and paren).
|
||||
fn usually_needs_space_between_keyword_and_open_delim(symbol: Symbol) -> bool {
|
||||
match symbol.as_str() {
|
||||
"as" | "box" | "break" | "const" | "continue" | "crate" | "else" | "enum" | "extern"
|
||||
| "for" | "if" | "impl" | "in" | "let" | "loop" | "macro" | "match" | "mod" | "move"
|
||||
| "mut" | "ref" | "return" | "static" | "struct" | "trait" | "type" | "unsafe" | "use"
|
||||
| "where" | "while" | "yield" => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn display_macro_source(
|
||||
cx: &mut DocContext<'_>,
|
||||
name: Symbol,
|
||||
|
Loading…
Reference in New Issue
Block a user