2014-02-04 22:31:33 -06:00
|
|
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2014-11-15 19:30:33 -06:00
|
|
|
use rustc::session::Session;
|
2014-02-04 22:31:33 -06:00
|
|
|
|
2016-03-22 11:40:24 -05:00
|
|
|
use generated_code;
|
2014-02-04 22:31:33 -06:00
|
|
|
|
|
|
|
use std::cell::Cell;
|
2015-11-25 23:05:32 -06:00
|
|
|
use std::env;
|
|
|
|
use std::path::Path;
|
2014-02-04 22:31:33 -06:00
|
|
|
|
|
|
|
use syntax::ast;
|
2017-01-12 17:32:00 -06:00
|
|
|
use syntax::parse::filemap_to_tts;
|
|
|
|
use syntax::parse::lexer::{self, StringReader};
|
2016-11-29 16:50:08 -06:00
|
|
|
use syntax::parse::token::{self, Token};
|
|
|
|
use syntax::symbol::keywords;
|
2016-11-20 12:07:40 -06:00
|
|
|
use syntax::tokenstream::TokenTree;
|
2016-06-21 17:08:13 -05:00
|
|
|
use syntax_pos::*;
|
2014-02-04 22:31:33 -06:00
|
|
|
|
2015-01-03 21:54:18 -06:00
|
|
|
#[derive(Clone)]
|
2014-02-04 22:31:33 -06:00
|
|
|
pub struct SpanUtils<'a> {
|
|
|
|
pub sess: &'a Session,
|
2016-04-17 17:30:55 -05:00
|
|
|
// FIXME given that we clone SpanUtils all over the place, this err_count is
|
|
|
|
// probably useless and any logic relying on it is bogus.
|
2015-03-25 19:06:52 -05:00
|
|
|
pub err_count: Cell<isize>,
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> SpanUtils<'a> {
|
2015-07-13 21:21:54 -05:00
|
|
|
pub fn new(sess: &'a Session) -> SpanUtils<'a> {
|
2015-09-27 15:20:49 -05:00
|
|
|
SpanUtils {
|
|
|
|
sess: sess,
|
|
|
|
err_count: Cell::new(0),
|
|
|
|
}
|
2015-07-13 21:21:54 -05:00
|
|
|
}
|
|
|
|
|
2015-11-25 23:05:32 -06:00
|
|
|
pub fn make_path_string(file_name: &str) -> String {
|
|
|
|
let path = Path::new(file_name);
|
|
|
|
if path.is_absolute() {
|
|
|
|
path.clone().display().to_string()
|
|
|
|
} else {
|
|
|
|
env::current_dir().unwrap().join(&path).display().to_string()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-04 22:31:33 -06:00
|
|
|
// sub_span starts at span.lo, so we need to adjust the positions etc.
|
|
|
|
// If sub_span is None, we don't need to adjust.
|
|
|
|
pub fn make_sub_span(&self, span: Span, sub_span: Option<Span>) -> Option<Span> {
|
|
|
|
match sub_span {
|
|
|
|
None => None,
|
|
|
|
Some(sub) => {
|
2015-09-01 22:37:07 -05:00
|
|
|
let FileMapAndBytePos {fm, pos} = self.sess.codemap().lookup_byte_offset(span.lo);
|
2014-02-04 22:31:33 -06:00
|
|
|
let base = pos + fm.start_pos;
|
|
|
|
Some(Span {
|
|
|
|
lo: base + self.sess.codemap().lookup_byte_offset(sub.lo).pos,
|
|
|
|
hi: base + self.sess.codemap().lookup_byte_offset(sub.hi).pos,
|
2016-01-21 16:58:09 -06:00
|
|
|
expn_id: span.expn_id,
|
2014-02-04 22:31:33 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn snippet(&self, span: Span) -> String {
|
|
|
|
match self.sess.codemap().span_to_snippet(span) {
|
2015-02-05 09:02:22 -06:00
|
|
|
Ok(s) => s,
|
|
|
|
Err(_) => String::new(),
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn retokenise_span(&self, span: Span) -> StringReader<'a> {
|
|
|
|
// sadness - we don't have spans for sub-expressions nor access to the tokens
|
|
|
|
// so in order to get extents for the function name itself (which dxr expects)
|
|
|
|
// we need to re-tokenise the fn definition
|
|
|
|
|
|
|
|
// Note: this is a bit awful - it adds the contents of span to the end of
|
|
|
|
// the codemap as a new filemap. This is mostly OK, but means we should
|
|
|
|
// not iterate over the codemap. Also, any spans over the new filemap
|
|
|
|
// are incompatible with spans over other filemaps.
|
2015-09-27 15:20:49 -05:00
|
|
|
let filemap = self.sess
|
|
|
|
.codemap()
|
2016-06-09 15:36:20 -05:00
|
|
|
.new_filemap(String::from("<anon-dxr>"), None, self.snippet(span));
|
2017-01-16 19:14:53 -06:00
|
|
|
lexer::StringReader::new(&self.sess.parse_sess, filemap)
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
|
2016-11-20 12:07:40 -06:00
|
|
|
fn span_to_tts(&self, span: Span) -> Vec<TokenTree> {
|
2017-01-12 17:32:00 -06:00
|
|
|
let filename = String::from("<anon-dxr>");
|
|
|
|
let filemap = self.sess.codemap().new_filemap(filename, None, self.snippet(span));
|
|
|
|
filemap_to_tts(&self.sess.parse_sess, filemap)
|
2016-11-20 12:07:40 -06:00
|
|
|
}
|
|
|
|
|
2014-02-04 22:31:33 -06:00
|
|
|
// Re-parses a path and returns the span for the last identifier in the path
|
|
|
|
pub fn span_for_last_ident(&self, span: Span) -> Option<Span> {
|
|
|
|
let mut result = None;
|
|
|
|
|
|
|
|
let mut toks = self.retokenise_span(span);
|
2015-01-25 04:58:43 -06:00
|
|
|
let mut bracket_count = 0;
|
2014-02-04 22:31:33 -06:00
|
|
|
loop {
|
2014-11-02 22:52:00 -06:00
|
|
|
let ts = toks.real_token();
|
2014-10-27 03:22:52 -05:00
|
|
|
if ts.tok == token::Eof {
|
2014-02-04 22:31:33 -06:00
|
|
|
return self.make_sub_span(span, result)
|
|
|
|
}
|
2015-09-01 22:37:07 -05:00
|
|
|
if bracket_count == 0 && (ts.tok.is_ident() || ts.tok.is_keyword(keywords::SelfValue)) {
|
2014-02-04 22:31:33 -06:00
|
|
|
result = Some(ts.sp);
|
|
|
|
}
|
|
|
|
|
|
|
|
bracket_count += match ts.tok {
|
2014-10-27 03:22:52 -05:00
|
|
|
token::Lt => 1,
|
|
|
|
token::Gt => -1,
|
|
|
|
token::BinOp(token::Shr) => -2,
|
2015-09-01 22:37:07 -05:00
|
|
|
_ => 0,
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Return the span for the first identifier in the path.
|
|
|
|
pub fn span_for_first_ident(&self, span: Span) -> Option<Span> {
|
|
|
|
let mut toks = self.retokenise_span(span);
|
2015-01-25 04:58:43 -06:00
|
|
|
let mut bracket_count = 0;
|
2014-02-04 22:31:33 -06:00
|
|
|
loop {
|
2014-11-02 22:52:00 -06:00
|
|
|
let ts = toks.real_token();
|
2014-10-27 03:22:52 -05:00
|
|
|
if ts.tok == token::Eof {
|
2014-02-04 22:31:33 -06:00
|
|
|
return None;
|
|
|
|
}
|
2015-09-01 22:37:07 -05:00
|
|
|
if bracket_count == 0 && (ts.tok.is_ident() || ts.tok.is_keyword(keywords::SelfValue)) {
|
2014-02-04 22:31:33 -06:00
|
|
|
return self.make_sub_span(span, Some(ts.sp));
|
|
|
|
}
|
|
|
|
|
|
|
|
bracket_count += match ts.tok {
|
2014-10-27 03:22:52 -05:00
|
|
|
token::Lt => 1,
|
|
|
|
token::Gt => -1,
|
|
|
|
token::BinOp(token::Shr) => -2,
|
2015-09-01 22:37:07 -05:00
|
|
|
_ => 0,
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Return the span for the last ident before a `(` or `<` or '::<' and outside any
|
|
|
|
// any brackets, or the last span.
|
|
|
|
pub fn sub_span_for_meth_name(&self, span: Span) -> Option<Span> {
|
|
|
|
let mut toks = self.retokenise_span(span);
|
2014-11-02 22:52:00 -06:00
|
|
|
let mut prev = toks.real_token();
|
2014-02-04 22:31:33 -06:00
|
|
|
let mut result = None;
|
2015-01-25 04:58:43 -06:00
|
|
|
let mut bracket_count = 0;
|
2016-09-20 21:09:22 -05:00
|
|
|
let mut prev_span = None;
|
2014-10-27 03:22:52 -05:00
|
|
|
while prev.tok != token::Eof {
|
2016-09-20 21:09:22 -05:00
|
|
|
prev_span = None;
|
2014-11-02 22:52:00 -06:00
|
|
|
let mut next = toks.real_token();
|
2014-02-04 22:31:33 -06:00
|
|
|
|
2015-09-01 22:37:07 -05:00
|
|
|
if (next.tok == token::OpenDelim(token::Paren) || next.tok == token::Lt) &&
|
|
|
|
bracket_count == 0 && prev.tok.is_ident() {
|
2014-02-04 22:31:33 -06:00
|
|
|
result = Some(prev.sp);
|
|
|
|
}
|
|
|
|
|
2015-09-01 22:37:07 -05:00
|
|
|
if bracket_count == 0 && next.tok == token::ModSep {
|
2014-02-04 22:31:33 -06:00
|
|
|
let old = prev;
|
|
|
|
prev = next;
|
2014-11-02 22:52:00 -06:00
|
|
|
next = toks.real_token();
|
2015-09-01 22:37:07 -05:00
|
|
|
if next.tok == token::Lt && old.tok.is_ident() {
|
2014-02-04 22:31:33 -06:00
|
|
|
result = Some(old.sp);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bracket_count += match prev.tok {
|
2014-10-29 05:37:54 -05:00
|
|
|
token::OpenDelim(token::Paren) | token::Lt => 1,
|
|
|
|
token::CloseDelim(token::Paren) | token::Gt => -1,
|
2014-10-27 03:22:52 -05:00
|
|
|
token::BinOp(token::Shr) => -2,
|
2015-09-01 22:37:07 -05:00
|
|
|
_ => 0,
|
2014-02-04 22:31:33 -06:00
|
|
|
};
|
|
|
|
|
2014-10-27 07:33:30 -05:00
|
|
|
if prev.tok.is_ident() && bracket_count == 0 {
|
2016-09-20 21:09:22 -05:00
|
|
|
prev_span = Some(prev.sp);
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
prev = next;
|
|
|
|
}
|
2016-09-20 21:09:22 -05:00
|
|
|
if result.is_none() && prev_span.is_some() {
|
|
|
|
return self.make_sub_span(span, prev_span);
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
return self.make_sub_span(span, result);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Return the span for the last ident before a `<` and outside any
|
2016-11-14 22:03:27 -06:00
|
|
|
// angle brackets, or the last span.
|
2014-02-04 22:31:33 -06:00
|
|
|
pub fn sub_span_for_type_name(&self, span: Span) -> Option<Span> {
|
|
|
|
let mut toks = self.retokenise_span(span);
|
2014-11-02 22:52:00 -06:00
|
|
|
let mut prev = toks.real_token();
|
2014-02-04 22:31:33 -06:00
|
|
|
let mut result = None;
|
2016-11-14 22:03:27 -06:00
|
|
|
|
|
|
|
// We keep track of the following two counts - the depth of nesting of
|
|
|
|
// angle brackets, and the depth of nesting of square brackets. For the
|
|
|
|
// angle bracket count, we only count tokens which occur outside of any
|
|
|
|
// square brackets (i.e. bracket_count == 0). The intutition here is
|
|
|
|
// that we want to count angle brackets in the type, but not any which
|
|
|
|
// could be in expression context (because these could mean 'less than',
|
|
|
|
// etc.).
|
|
|
|
let mut angle_count = 0;
|
2015-01-25 04:58:43 -06:00
|
|
|
let mut bracket_count = 0;
|
2014-02-04 22:31:33 -06:00
|
|
|
loop {
|
2014-11-02 22:52:00 -06:00
|
|
|
let next = toks.real_token();
|
2014-02-04 22:31:33 -06:00
|
|
|
|
2016-11-14 22:03:27 -06:00
|
|
|
if (next.tok == token::Lt || next.tok == token::Colon) &&
|
|
|
|
angle_count == 0 &&
|
|
|
|
bracket_count == 0 &&
|
2014-10-27 07:33:30 -05:00
|
|
|
prev.tok.is_ident() {
|
2014-02-04 22:31:33 -06:00
|
|
|
result = Some(prev.sp);
|
|
|
|
}
|
|
|
|
|
2016-11-14 22:03:27 -06:00
|
|
|
if bracket_count == 0 {
|
|
|
|
angle_count += match prev.tok {
|
|
|
|
token::Lt => 1,
|
|
|
|
token::Gt => -1,
|
|
|
|
token::BinOp(token::Shl) => 2,
|
|
|
|
token::BinOp(token::Shr) => -2,
|
|
|
|
_ => 0,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2014-02-04 22:31:33 -06:00
|
|
|
bracket_count += match prev.tok {
|
2016-11-14 22:03:27 -06:00
|
|
|
token::OpenDelim(token::Bracket) => 1,
|
|
|
|
token::CloseDelim(token::Bracket) => -1,
|
2015-09-01 22:37:07 -05:00
|
|
|
_ => 0,
|
2014-02-04 22:31:33 -06:00
|
|
|
};
|
|
|
|
|
2014-10-27 03:22:52 -05:00
|
|
|
if next.tok == token::Eof {
|
2014-02-04 22:31:33 -06:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
prev = next;
|
|
|
|
}
|
2016-11-14 22:03:27 -06:00
|
|
|
if angle_count != 0 || bracket_count != 0 {
|
2014-02-04 22:31:33 -06:00
|
|
|
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
2016-03-28 16:21:01 -05:00
|
|
|
span_bug!(span,
|
|
|
|
"Mis-counted brackets when breaking path? Parsing '{}' \
|
|
|
|
in {}, line {}",
|
|
|
|
self.snippet(span),
|
|
|
|
loc.file.name,
|
|
|
|
loc.line);
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
2016-11-14 22:03:27 -06:00
|
|
|
if result.is_none() && prev.tok.is_ident() && angle_count == 0 {
|
2014-02-04 22:31:33 -06:00
|
|
|
return self.make_sub_span(span, Some(prev.sp));
|
|
|
|
}
|
|
|
|
self.make_sub_span(span, result)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reparse span and return an owned vector of sub spans of the first limit
|
|
|
|
// identifier tokens in the given nesting level.
|
|
|
|
// example with Foo<Bar<T,V>, Bar<T,V>>
|
2016-11-14 22:03:27 -06:00
|
|
|
// Nesting = 0: all idents outside of angle brackets: [Foo]
|
|
|
|
// Nesting = 1: idents within one level of angle brackets: [Bar, Bar]
|
2015-03-25 19:06:52 -05:00
|
|
|
pub fn spans_with_brackets(&self, span: Span, nesting: isize, limit: isize) -> Vec<Span> {
|
2016-10-29 16:54:04 -05:00
|
|
|
let mut result: Vec<Span> = vec![];
|
2014-02-04 22:31:33 -06:00
|
|
|
|
|
|
|
let mut toks = self.retokenise_span(span);
|
|
|
|
// We keep track of how many brackets we're nested in
|
2016-11-14 22:03:27 -06:00
|
|
|
let mut angle_count: isize = 0;
|
2015-05-14 00:44:08 -05:00
|
|
|
let mut bracket_count: isize = 0;
|
2015-02-20 10:33:29 -06:00
|
|
|
let mut found_ufcs_sep = false;
|
2014-02-04 22:31:33 -06:00
|
|
|
loop {
|
2014-11-02 22:52:00 -06:00
|
|
|
let ts = toks.real_token();
|
2014-10-27 03:22:52 -05:00
|
|
|
if ts.tok == token::Eof {
|
2016-11-14 22:03:27 -06:00
|
|
|
if angle_count != 0 || bracket_count != 0 {
|
2016-01-21 16:58:09 -06:00
|
|
|
if generated_code(span) {
|
2016-10-29 16:54:04 -05:00
|
|
|
return vec![];
|
2016-01-21 16:58:09 -06:00
|
|
|
}
|
2014-02-04 22:31:33 -06:00
|
|
|
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
2016-03-28 16:21:01 -05:00
|
|
|
span_bug!(span,
|
|
|
|
"Mis-counted brackets when breaking path? \
|
|
|
|
Parsing '{}' in {}, line {}",
|
|
|
|
self.snippet(span),
|
|
|
|
loc.file.name,
|
|
|
|
loc.line);
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
return result
|
|
|
|
}
|
2015-03-25 19:06:52 -05:00
|
|
|
if (result.len() as isize) == limit {
|
2014-02-04 22:31:33 -06:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
bracket_count += match ts.tok {
|
2016-11-14 22:03:27 -06:00
|
|
|
token::OpenDelim(token::Bracket) => 1,
|
|
|
|
token::CloseDelim(token::Bracket) => -1,
|
|
|
|
_ => 0,
|
|
|
|
};
|
|
|
|
if bracket_count > 0 {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
angle_count += match ts.tok {
|
2014-10-27 03:22:52 -05:00
|
|
|
token::Lt => 1,
|
2015-05-14 00:44:08 -05:00
|
|
|
token::Gt => -1,
|
2014-10-27 03:22:52 -05:00
|
|
|
token::BinOp(token::Shl) => 2,
|
|
|
|
token::BinOp(token::Shr) => -2,
|
2015-09-01 22:37:07 -05:00
|
|
|
_ => 0,
|
2014-02-04 22:31:33 -06:00
|
|
|
};
|
2015-05-15 02:06:56 -05:00
|
|
|
|
2015-05-14 00:44:08 -05:00
|
|
|
// Ignore the `>::` in `<Type as Trait>::AssocTy`.
|
2015-05-15 02:06:56 -05:00
|
|
|
|
|
|
|
// The root cause of this hack is that the AST representation of
|
|
|
|
// qpaths is horrible. It treats <A as B>::C as a path with two
|
|
|
|
// segments, B and C and notes that there is also a self type A at
|
|
|
|
// position 0. Because we don't have spans for individual idents,
|
|
|
|
// only the whole path, we have to iterate over the tokens in the
|
|
|
|
// path, trying to pull out the non-nested idents (e.g., avoiding 'a
|
|
|
|
// in `<A as B<'a>>::C`). So we end up with a span for `B>::C` from
|
|
|
|
// the start of the first ident to the end of the path.
|
2016-11-14 22:03:27 -06:00
|
|
|
if !found_ufcs_sep && angle_count == -1 {
|
2015-05-14 00:44:08 -05:00
|
|
|
found_ufcs_sep = true;
|
2016-11-14 22:03:27 -06:00
|
|
|
angle_count += 1;
|
2015-05-14 00:44:08 -05:00
|
|
|
}
|
2016-11-14 22:03:27 -06:00
|
|
|
if ts.tok.is_ident() && angle_count == nesting {
|
2014-02-04 22:31:33 -06:00
|
|
|
result.push(self.make_sub_span(span, Some(ts.sp)).unwrap());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-20 12:07:40 -06:00
|
|
|
/// `span` must be the span for an item such as a function or struct. This
|
|
|
|
/// function returns the program text from the start of the span until the
|
|
|
|
/// end of the 'signature' part, that is up to, but not including an opening
|
|
|
|
/// brace or semicolon.
|
2016-11-29 16:50:08 -06:00
|
|
|
pub fn signature_string_for_span(&self, span: Span) -> String {
|
2016-11-20 12:07:40 -06:00
|
|
|
let mut toks = self.span_to_tts(span).into_iter();
|
|
|
|
let mut prev = toks.next().unwrap();
|
|
|
|
let first_span = prev.get_span();
|
|
|
|
let mut angle_count = 0;
|
|
|
|
for tok in toks {
|
|
|
|
if let TokenTree::Token(_, ref tok) = prev {
|
|
|
|
angle_count += match *tok {
|
2016-11-29 16:50:08 -06:00
|
|
|
token::Eof => { break; }
|
2016-11-20 12:07:40 -06:00
|
|
|
token::Lt => 1,
|
|
|
|
token::Gt => -1,
|
|
|
|
token::BinOp(token::Shl) => 2,
|
|
|
|
token::BinOp(token::Shr) => -2,
|
|
|
|
_ => 0,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
if angle_count > 0 {
|
|
|
|
prev = tok;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if let TokenTree::Token(_, token::Semi) = tok {
|
2016-11-29 16:50:08 -06:00
|
|
|
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
|
2016-11-20 12:07:40 -06:00
|
|
|
} else if let TokenTree::Delimited(_, ref d) = tok {
|
|
|
|
if d.delim == token::Brace {
|
2016-11-29 16:50:08 -06:00
|
|
|
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
|
2016-11-20 12:07:40 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
prev = tok;
|
|
|
|
}
|
2016-11-29 16:50:08 -06:00
|
|
|
self.snippet(span)
|
2016-11-20 12:07:40 -06:00
|
|
|
}
|
|
|
|
|
2014-02-04 22:31:33 -06:00
|
|
|
pub fn sub_span_before_token(&self, span: Span, tok: Token) -> Option<Span> {
|
|
|
|
let mut toks = self.retokenise_span(span);
|
2014-11-02 22:52:00 -06:00
|
|
|
let mut prev = toks.real_token();
|
2014-02-04 22:31:33 -06:00
|
|
|
loop {
|
2014-10-27 03:22:52 -05:00
|
|
|
if prev.tok == token::Eof {
|
2014-02-04 22:31:33 -06:00
|
|
|
return None;
|
|
|
|
}
|
2014-11-02 22:52:00 -06:00
|
|
|
let next = toks.real_token();
|
2014-02-04 22:31:33 -06:00
|
|
|
if next.tok == tok {
|
|
|
|
return self.make_sub_span(span, Some(prev.sp));
|
|
|
|
}
|
|
|
|
prev = next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-23 03:29:41 -06:00
|
|
|
pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
|
|
|
|
let mut toks = self.retokenise_span(span);
|
|
|
|
loop {
|
|
|
|
let next = toks.real_token();
|
|
|
|
if next.tok == token::Eof {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
if next.tok == tok {
|
|
|
|
return self.make_sub_span(span, Some(next.sp));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-09-01 22:37:07 -05:00
|
|
|
pub fn sub_span_after_keyword(&self, span: Span, keyword: keywords::Keyword) -> Option<Span> {
|
2015-01-20 14:15:37 -06:00
|
|
|
self.sub_span_after(span, |t| t.is_keyword(keyword))
|
|
|
|
}
|
|
|
|
|
2015-09-01 22:37:07 -05:00
|
|
|
pub fn sub_span_after_token(&self, span: Span, tok: Token) -> Option<Span> {
|
2015-01-20 14:15:37 -06:00
|
|
|
self.sub_span_after(span, |t| t == tok)
|
|
|
|
}
|
|
|
|
|
2015-09-01 22:37:07 -05:00
|
|
|
fn sub_span_after<F: Fn(Token) -> bool>(&self, span: Span, f: F) -> Option<Span> {
|
2014-02-04 22:31:33 -06:00
|
|
|
let mut toks = self.retokenise_span(span);
|
|
|
|
loop {
|
2014-11-02 22:52:00 -06:00
|
|
|
let ts = toks.real_token();
|
2014-10-27 03:22:52 -05:00
|
|
|
if ts.tok == token::Eof {
|
2014-02-04 22:31:33 -06:00
|
|
|
return None;
|
|
|
|
}
|
2015-01-20 14:15:37 -06:00
|
|
|
if f(ts.tok) {
|
2014-11-02 22:52:00 -06:00
|
|
|
let ts = toks.real_token();
|
2014-10-27 03:22:52 -05:00
|
|
|
if ts.tok == token::Eof {
|
2014-02-04 22:31:33 -06:00
|
|
|
return None
|
|
|
|
} else {
|
|
|
|
return self.make_sub_span(span, Some(ts.sp));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-20 14:15:37 -06:00
|
|
|
|
2015-05-15 02:06:56 -05:00
|
|
|
// Returns a list of the spans of idents in a path.
|
2014-02-04 22:31:33 -06:00
|
|
|
// E.g., For foo::bar<x,t>::baz, we return [foo, bar, baz] (well, their spans)
|
|
|
|
pub fn spans_for_path_segments(&self, path: &ast::Path) -> Vec<Span> {
|
|
|
|
self.spans_with_brackets(path.span, 0, -1)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Return an owned vector of the subspans of the param identifier
|
|
|
|
// tokens found in span.
|
2015-03-25 19:06:52 -05:00
|
|
|
pub fn spans_for_ty_params(&self, span: Span, number: isize) -> Vec<Span> {
|
2014-02-04 22:31:33 -06:00
|
|
|
// Type params are nested within one level of brackets:
|
2015-05-02 15:25:49 -05:00
|
|
|
// i.e. we want Vec<A, B> from Foo<A, B<T,U>>
|
2014-02-04 22:31:33 -06:00
|
|
|
self.spans_with_brackets(span, 1, number)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn report_span_err(&self, kind: &str, span: Span) {
|
|
|
|
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
|
|
|
info!("({}) Could not find sub_span in `{}` in {}, line {}",
|
2015-09-27 15:20:49 -05:00
|
|
|
kind,
|
|
|
|
self.snippet(span),
|
|
|
|
loc.file.name,
|
|
|
|
loc.line);
|
|
|
|
self.err_count.set(self.err_count.get() + 1);
|
2014-02-04 22:31:33 -06:00
|
|
|
if self.err_count.get() > 1000 {
|
2016-03-28 16:21:01 -05:00
|
|
|
bug!("span errors reached 1000, giving up");
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|
|
|
|
}
|
2016-01-21 16:58:09 -06:00
|
|
|
|
2016-02-03 01:44:53 -06:00
|
|
|
// Return the name for a macro definition (identifier after first `!`)
|
|
|
|
pub fn span_for_macro_def_name(&self, span: Span) -> Option<Span> {
|
2016-01-29 01:22:55 -06:00
|
|
|
let mut toks = self.retokenise_span(span);
|
|
|
|
loop {
|
|
|
|
let ts = toks.real_token();
|
|
|
|
if ts.tok == token::Eof {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
if ts.tok == token::Not {
|
|
|
|
let ts = toks.real_token();
|
|
|
|
if ts.tok.is_ident() {
|
|
|
|
return self.make_sub_span(span, Some(ts.sp));
|
|
|
|
} else {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-02-03 01:44:53 -06:00
|
|
|
// Return the name for a macro use (identifier before first `!`).
|
|
|
|
pub fn span_for_macro_use_name(&self, span:Span) -> Option<Span> {
|
|
|
|
let mut toks = self.retokenise_span(span);
|
|
|
|
let mut prev = toks.real_token();
|
|
|
|
loop {
|
|
|
|
if prev.tok == token::Eof {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
let ts = toks.real_token();
|
|
|
|
if ts.tok == token::Not {
|
|
|
|
if prev.tok.is_ident() {
|
|
|
|
return self.make_sub_span(span, Some(prev.sp));
|
|
|
|
} else {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
prev = ts;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-21 16:58:09 -06:00
|
|
|
/// Return true if the span is generated code, and
|
|
|
|
/// it is not a subspan of the root callsite.
|
|
|
|
///
|
|
|
|
/// Used to filter out spans of minimal value,
|
|
|
|
/// such as references to macro internal variables.
|
|
|
|
pub fn filter_generated(&self, sub_span: Option<Span>, parent: Span) -> bool {
|
|
|
|
if !generated_code(parent) {
|
|
|
|
if sub_span.is_none() {
|
|
|
|
// Edge case - this occurs on generated code with incorrect expansion info.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
// If sub_span is none, filter out generated code.
|
|
|
|
if sub_span.is_none() {
|
|
|
|
return true;
|
|
|
|
}
|
2016-01-29 01:22:55 -06:00
|
|
|
|
|
|
|
//If the span comes from a fake filemap, filter it.
|
|
|
|
if !self.sess.codemap().lookup_char_pos(parent.lo).file.is_real_file() {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise, a generated span is deemed invalid if it is not a sub-span of the root
|
2016-01-21 16:58:09 -06:00
|
|
|
// callsite. This filters out macro internal variables and most malformed spans.
|
|
|
|
let span = self.sess.codemap().source_callsite(parent);
|
2016-01-29 01:22:55 -06:00
|
|
|
!(span.contains(parent))
|
2016-01-21 16:58:09 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! filter {
|
|
|
|
($util: expr, $span: ident, $parent: expr, None) => {
|
|
|
|
if $util.filter_generated($span, $parent) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
($util: expr, $span: ident, $parent: expr) => {
|
|
|
|
if $util.filter_generated($span, $parent) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
};
|
2014-02-04 22:31:33 -06:00
|
|
|
}
|