Cleanup highlighting code

Removes some unused code and de-publicizes structs
This commit is contained in:
Mark Rousskov 2018-07-22 14:10:10 -06:00
parent 7bea518d3a
commit 8e65a46268
3 changed files with 23 additions and 42 deletions

View File

@ -13,12 +13,7 @@
//! This module uses libsyntax's lexer to provide token-based highlighting for
//! the HTML documentation generated by rustdoc.
//!
//! If you just want to syntax highlighting for a Rust program, then you can use
//! the `render_inner_with_highlighting` or `render_with_highlighting`
//! functions. For more advanced use cases (if you want to supply your own css
//! classes or control how the HTML is generated, or even generate something
//! other then HTML), then you should implement the `Writer` trait and use a
//! `Classifier`.
//! Use the `render_with_highlighting` to highlight some rust code.
use html::escape::Escape;
@ -33,7 +28,7 @@
use syntax_pos::{Span, FileName};
/// Highlights `src`, returning the HTML output.
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>,
pub fn render_with_highlighting(src: &str, class: Option<&str>,
extension: Option<&str>,
tooltip: Option<(&str, &str)>) -> String {
debug!("highlighting: ================\n{}\n==============", src);
@ -46,7 +41,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>
class='tooltiptext'>{}</span></div></div>",
class, tooltip).unwrap();
}
write_header(class, id, &mut out).unwrap();
write_header(class, &mut out).unwrap();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm, None), sess.codemap());
if let Err(_) = classifier.write_source(&mut out) {
@ -63,7 +58,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>
/// Processes a program (nested in the internal `lexer`), classifying strings of
/// text by highlighting category (`Class`). Calls out to a `Writer` to write
/// each span of text in sequence.
pub struct Classifier<'a> {
struct Classifier<'a> {
lexer: lexer::StringReader<'a>,
codemap: &'a CodeMap,
@ -75,7 +70,7 @@ pub struct Classifier<'a> {
/// How a span of text is classified. Mostly corresponds to token kinds.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Class {
enum Class {
None,
Comment,
DocComment,
@ -103,7 +98,7 @@ pub enum Class {
/// The classifier will call into the `Writer` implementation as it finds spans
/// of text to highlight. Exactly how that text should be highlighted is up to
/// the implementation.
pub trait Writer {
trait Writer {
/// Called when we start processing a span of text that should be highlighted.
/// The `Class` argument specifies how it should be highlighted.
fn enter_span(&mut self, _: Class) -> io::Result<()>;
@ -111,11 +106,9 @@ pub trait Writer {
/// Called at the end of a span of highlighted text.
fn exit_span(&mut self) -> io::Result<()>;
/// Called for a span of text, usually, but not always, a single token. If
/// the string of text (`T`) does correspond to a token, then the token will
/// also be passed. If the text should be highlighted differently from the
/// surrounding text, then the `Class` argument will be a value other than
/// `None`.
/// Called for a span of text. If the text should be highlighted differently from the
/// surrounding text, then the `Class` argument will be a value other than `None`.
///
/// The following sequences of callbacks are equivalent:
/// ```plain
/// enter_span(Foo), string("text", None), exit_span()
@ -125,8 +118,7 @@ pub trait Writer {
/// more flexible.
fn string<T: Display>(&mut self,
text: T,
klass: Class,
tok: Option<&TokenAndSpan>)
klass: Class)
-> io::Result<()>;
}
@ -135,8 +127,7 @@ fn string<T: Display>(&mut self,
impl<U: Write> Writer for U {
fn string<T: Display>(&mut self,
text: T,
klass: Class,
_tas: Option<&TokenAndSpan>)
klass: Class)
-> io::Result<()> {
match klass {
Class::None => write!(self, "{}", text),
@ -154,7 +145,7 @@ fn exit_span(&mut self) -> io::Result<()> {
}
impl<'a> Classifier<'a> {
pub fn new(lexer: lexer::StringReader<'a>, codemap: &'a CodeMap) -> Classifier<'a> {
fn new(lexer: lexer::StringReader<'a>, codemap: &'a CodeMap) -> Classifier<'a> {
Classifier {
lexer,
codemap,
@ -186,7 +177,7 @@ fn try_next_token(&mut self) -> io::Result<TokenAndSpan> {
/// is used. All source code emission is done as slices from the source map,
/// not from the tokens themselves, in order to stay true to the original
/// source.
pub fn write_source<W: Writer>(&mut self,
fn write_source<W: Writer>(&mut self,
out: &mut W)
-> io::Result<()> {
loop {
@ -208,7 +199,7 @@ fn write_token<W: Writer>(&mut self,
-> io::Result<()> {
let klass = match tas.tok {
token::Shebang(s) => {
out.string(Escape(&s.as_str()), Class::None, Some(&tas))?;
out.string(Escape(&s.as_str()), Class::None)?;
return Ok(());
},
@ -272,8 +263,8 @@ fn write_token<W: Writer>(&mut self,
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
}
out.string("#", Class::None, None)?;
out.string("!", Class::None, None)?;
out.string("#", Class::None)?;
out.string("!", Class::None)?;
return Ok(());
}
@ -282,13 +273,13 @@ fn write_token<W: Writer>(&mut self,
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
}
out.string("#", Class::None, None)?;
out.string("#", Class::None)?;
return Ok(());
}
token::CloseDelim(token::Bracket) => {
if self.in_attribute {
self.in_attribute = false;
out.string("]", Class::None, None)?;
out.string("]", Class::None)?;
out.exit_span()?;
return Ok(());
} else {
@ -344,7 +335,7 @@ fn write_token<W: Writer>(&mut self,
// Anything that didn't return above is the simple case where we the
// class just spans a single token, so we can use the `string` method.
out.string(Escape(&self.snip(tas.sp)), klass, Some(&tas))
out.string(Escape(&self.snip(tas.sp)), klass)
}
// Helper function to get a snippet from the codemap.
@ -355,7 +346,7 @@ fn snip(&self, sp: Span) -> String {
impl Class {
/// Returns the css class expected by rustdoc for each `Class`.
pub fn rustdoc_class(self) -> &'static str {
fn rustdoc_class(self) -> &'static str {
match self {
Class::None => "",
Class::Comment => "comment",
@ -379,15 +370,8 @@ pub fn rustdoc_class(self) -> &'static str {
}
}
fn write_header(class: Option<&str>,
id: Option<&str>,
out: &mut dyn Write)
-> io::Result<()> {
write!(out, "<pre ")?;
if let Some(id) = id {
write!(out, "id='{}' ", id)?;
}
write!(out, "class=\"rust {}\">\n", class.unwrap_or(""))
fn write_header(class: Option<&str>, out: &mut Write) -> io::Result<()> {
write!(out, "<pre class=\"rust {}\">\n", class.unwrap_or(""))
}
fn write_footer(out: &mut dyn Write) -> io::Result<()> {

View File

@ -247,7 +247,6 @@ fn dont_escape(c: u8) -> bool {
if ignore { " ignore" }
else if compile_fail { " compile_fail" }
else { "" })),
None,
playground_button.as_ref().map(String::as_str),
tooltip));
Some(Event::Html(s.into()))

View File

@ -2239,7 +2239,6 @@ fn render_assoc_const_value(item: &clean::Item) -> String {
None,
None,
None,
None,
)
}
_ => String::new(),
@ -4551,7 +4550,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
}
write!(fmt, "</pre>")?;
write!(fmt, "{}",
highlight::render_with_highlighting(s, None, None, None, None))?;
highlight::render_with_highlighting(s, None, None, None))?;
Ok(())
}
}
@ -4562,7 +4561,6 @@ fn item_macro(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
w.write_str(&highlight::render_with_highlighting(&t.source,
Some("macro"),
None,
None,
None))
})?;
document(w, cx, it)