2014-02-20 01:14:51 -08:00
|
|
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
|
|
|
//! Basic html highlighting functionality
|
|
|
|
//!
|
|
|
|
//! This module uses libsyntax's lexer to provide token-based highlighting for
|
|
|
|
//! the HTML documentation generated by rustdoc.
|
|
|
|
|
|
|
|
use html::escape::Escape;
|
|
|
|
|
2015-03-11 15:24:14 -07:00
|
|
|
use std::io;
|
|
|
|
use std::io::prelude::*;
|
2016-04-04 11:07:41 +12:00
|
|
|
use syntax::parse::lexer::{self, Reader};
|
2014-10-27 19:22:52 +11:00
|
|
|
use syntax::parse::token;
|
2014-08-18 08:29:44 -07:00
|
|
|
use syntax::parse;
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-04-04 11:07:41 +12:00
|
|
|
/// Highlights `src`, returning the HTML output.
|
|
|
|
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>) -> String {
|
2014-05-10 17:39:08 -07:00
|
|
|
debug!("highlighting: ================\n{}\n==============", src);
|
2015-05-13 23:00:17 +03:00
|
|
|
let sess = parse::ParseSess::new();
|
2015-05-13 23:08:02 +03:00
|
|
|
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2014-11-11 16:01:29 -05:00
|
|
|
let mut out = Vec::new();
|
2016-04-04 11:07:41 +12:00
|
|
|
write_header(class, id, &mut out).unwrap();
|
|
|
|
write_source(&sess,
|
|
|
|
lexer::StringReader::new(&sess.span_diagnostic, fm),
|
|
|
|
&mut out).unwrap();
|
|
|
|
write_footer(&mut out).unwrap();
|
|
|
|
String::from_utf8_lossy(&out[..]).into_owned()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Highlights `src`, returning the HTML output. Returns only the inner html to
|
|
|
|
/// be inserted into an element. C.f., `render_with_highlighting` which includes
|
|
|
|
/// an enclosing `<pre>` block.
|
|
|
|
pub fn render_inner_with_highlighting(src: &str) -> String {
|
|
|
|
let sess = parse::ParseSess::new();
|
|
|
|
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
|
|
|
|
|
|
|
|
let mut out = Vec::new();
|
|
|
|
write_source(&sess,
|
|
|
|
lexer::StringReader::new(&sess.span_diagnostic, fm),
|
|
|
|
&mut out).unwrap();
|
2015-02-18 14:48:57 -05:00
|
|
|
String::from_utf8_lossy(&out[..]).into_owned()
|
2014-02-20 01:14:51 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Exhausts the `lexer` writing the output into `out`.
|
|
|
|
///
|
|
|
|
/// The general structure for this method is to iterate over each token,
|
|
|
|
/// possibly giving it an HTML span with a class specifying what flavor of token
|
|
|
|
/// it's used. All source code emission is done as slices from the source map,
|
|
|
|
/// not from the tokens themselves, in order to stay true to the original
|
|
|
|
/// source.
|
2016-04-04 11:07:41 +12:00
|
|
|
fn write_source(sess: &parse::ParseSess,
|
|
|
|
mut lexer: lexer::StringReader,
|
|
|
|
out: &mut Write)
|
|
|
|
-> io::Result<()> {
|
2014-02-20 01:14:51 -08:00
|
|
|
let mut is_attribute = false;
|
|
|
|
let mut is_macro = false;
|
2014-03-02 13:30:28 +11:00
|
|
|
let mut is_macro_nonterminal = false;
|
2014-02-20 01:14:51 -08:00
|
|
|
loop {
|
|
|
|
let next = lexer.next_token();
|
|
|
|
|
2015-05-13 23:08:02 +03:00
|
|
|
let snip = |sp| sess.codemap().span_to_snippet(sp).unwrap();
|
2014-07-04 22:30:39 -07:00
|
|
|
|
2014-10-27 19:22:52 +11:00
|
|
|
if next.tok == token::Eof { break }
|
2014-02-20 01:14:51 -08:00
|
|
|
|
|
|
|
let klass = match next.tok {
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Whitespace => {
|
2016-03-22 22:01:37 -05:00
|
|
|
write!(out, "{}", Escape(&snip(next.sp)))?;
|
2014-07-04 22:30:39 -07:00
|
|
|
continue
|
|
|
|
},
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Comment => {
|
2016-03-22 22:01:37 -05:00
|
|
|
write!(out, "<span class='comment'>{}</span>",
|
2016-03-22 17:58:45 -05:00
|
|
|
Escape(&snip(next.sp)))?;
|
2014-07-04 22:30:39 -07:00
|
|
|
continue
|
|
|
|
},
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Shebang(s) => {
|
2016-03-22 22:01:37 -05:00
|
|
|
write!(out, "{}", Escape(&s.as_str()))?;
|
2014-07-04 22:30:39 -07:00
|
|
|
continue
|
|
|
|
},
|
2014-02-20 01:14:51 -08:00
|
|
|
// If this '&' token is directly adjacent to another token, assume
|
|
|
|
// that it's the address-of operator instead of the and-operator.
|
2014-05-05 18:56:44 -07:00
|
|
|
// This allows us to give all pointers their own class (`Box` and
|
|
|
|
// `@` are below).
|
2014-10-27 19:22:52 +11:00
|
|
|
token::BinOp(token::And) if lexer.peek().sp.lo == next.sp.hi => "kw-2",
|
|
|
|
token::At | token::Tilde => "kw-2",
|
2014-02-20 01:14:51 -08:00
|
|
|
|
|
|
|
// consider this as part of a macro invocation if there was a
|
|
|
|
// leading identifier
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Not if is_macro => { is_macro = false; "macro" }
|
2014-02-20 01:14:51 -08:00
|
|
|
|
|
|
|
// operators
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Eq | token::Lt | token::Le | token::EqEq | token::Ne | token::Ge | token::Gt |
|
|
|
|
token::AndAnd | token::OrOr | token::Not | token::BinOp(..) | token::RArrow |
|
|
|
|
token::BinOpEq(..) | token::FatArrow => "op",
|
2014-02-20 01:14:51 -08:00
|
|
|
|
|
|
|
// miscellaneous, no highlighting
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi |
|
2014-10-29 21:37:54 +11:00
|
|
|
token::Colon | token::ModSep | token::LArrow | token::OpenDelim(_) |
|
|
|
|
token::CloseDelim(token::Brace) | token::CloseDelim(token::Paren) |
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Question => "",
|
|
|
|
token::Dollar => {
|
2014-10-27 23:33:30 +11:00
|
|
|
if lexer.peek().tok.is_ident() {
|
2014-03-02 13:30:28 +11:00
|
|
|
is_macro_nonterminal = true;
|
|
|
|
"macro-nonterminal"
|
|
|
|
} else {
|
|
|
|
""
|
|
|
|
}
|
|
|
|
}
|
2014-02-20 01:14:51 -08:00
|
|
|
|
|
|
|
// This is the start of an attribute. We're going to want to
|
|
|
|
// continue highlighting it as an attribute until the ending ']' is
|
|
|
|
// seen, so skip out early. Down below we terminate the attribute
|
|
|
|
// span when we see the ']'.
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Pound => {
|
2014-02-20 01:14:51 -08:00
|
|
|
is_attribute = true;
|
2016-03-22 22:01:37 -05:00
|
|
|
write!(out, r"<span class='attribute'>#")?;
|
2014-02-20 01:14:51 -08:00
|
|
|
continue
|
|
|
|
}
|
2014-10-29 21:37:54 +11:00
|
|
|
token::CloseDelim(token::Bracket) => {
|
2014-02-20 01:14:51 -08:00
|
|
|
if is_attribute {
|
|
|
|
is_attribute = false;
|
2016-03-22 22:01:37 -05:00
|
|
|
write!(out, "]</span>")?;
|
2014-02-20 01:14:51 -08:00
|
|
|
continue
|
|
|
|
} else {
|
|
|
|
""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-19 15:48:38 +11:00
|
|
|
token::Literal(lit, _suf) => {
|
|
|
|
match lit {
|
|
|
|
// text literals
|
|
|
|
token::Byte(..) | token::Char(..) |
|
2015-09-03 10:54:53 +03:00
|
|
|
token::ByteStr(..) | token::ByteStrRaw(..) |
|
2014-11-19 15:48:38 +11:00
|
|
|
token::Str_(..) | token::StrRaw(..) => "string",
|
|
|
|
|
|
|
|
// number literals
|
|
|
|
token::Integer(..) | token::Float(..) => "number",
|
|
|
|
}
|
|
|
|
}
|
2014-02-20 01:14:51 -08:00
|
|
|
|
|
|
|
// keywords are also included in the identifier set
|
2016-04-16 04:12:02 +03:00
|
|
|
token::Ident(ident) => {
|
2015-07-28 18:07:20 +02:00
|
|
|
match &*ident.name.as_str() {
|
2014-02-20 01:14:51 -08:00
|
|
|
"ref" | "mut" => "kw-2",
|
|
|
|
|
|
|
|
"self" => "self",
|
|
|
|
"false" | "true" => "boolval",
|
|
|
|
|
|
|
|
"Option" | "Result" => "prelude-ty",
|
|
|
|
"Some" | "None" | "Ok" | "Err" => "prelude-val",
|
|
|
|
|
2014-10-27 23:33:30 +11:00
|
|
|
_ if next.tok.is_any_keyword() => "kw",
|
2014-02-20 01:14:51 -08:00
|
|
|
_ => {
|
2014-03-02 13:30:28 +11:00
|
|
|
if is_macro_nonterminal {
|
|
|
|
is_macro_nonterminal = false;
|
|
|
|
"macro-nonterminal"
|
2014-10-27 19:22:52 +11:00
|
|
|
} else if lexer.peek().tok == token::Not {
|
2014-02-20 01:14:51 -08:00
|
|
|
is_macro = true;
|
|
|
|
"macro"
|
|
|
|
} else {
|
|
|
|
"ident"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-09-15 18:27:28 -07:00
|
|
|
// Special macro vars are like keywords
|
|
|
|
token::SpecialVarNt(_) => "kw-2",
|
|
|
|
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Lifetime(..) => "lifetime",
|
|
|
|
token::DocComment(..) => "doccomment",
|
2014-10-06 23:00:56 +01:00
|
|
|
token::Underscore | token::Eof | token::Interpolated(..) |
|
|
|
|
token::MatchNt(..) | token::SubstNt(..) => "",
|
2014-02-20 01:14:51 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
// as mentioned above, use the original source code instead of
|
|
|
|
// stringifying this token
|
2015-05-13 23:08:02 +03:00
|
|
|
let snip = sess.codemap().span_to_snippet(next.sp).unwrap();
|
2014-02-20 01:14:51 -08:00
|
|
|
if klass == "" {
|
2016-03-22 22:01:37 -05:00
|
|
|
write!(out, "{}", Escape(&snip))?;
|
2014-02-20 01:14:51 -08:00
|
|
|
} else {
|
2016-03-22 17:58:45 -05:00
|
|
|
write!(out, "<span class='{}'>{}</span>", klass, Escape(&snip))?;
|
2014-02-20 01:14:51 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-04 11:07:41 +12:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn write_header(class: Option<&str>,
|
|
|
|
id: Option<&str>,
|
|
|
|
out: &mut Write)
|
|
|
|
-> io::Result<()> {
|
|
|
|
write!(out, "<pre ")?;
|
|
|
|
match id {
|
|
|
|
Some(id) => write!(out, "id='{}' ", id)?,
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
write!(out, "class='rust {}'>\n", class.unwrap_or(""))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn write_footer(out: &mut Write) -> io::Result<()> {
|
2014-02-20 01:14:51 -08:00
|
|
|
write!(out, "</pre>\n")
|
|
|
|
}
|