2018-11-26 20:59:49 -06:00
|
|
|
//! The source positions and related helper functions.
|
2016-06-21 17:08:13 -05:00
|
|
|
//!
|
2018-11-26 20:59:49 -06:00
|
|
|
//! ## Note
|
2016-06-21 17:08:13 -05:00
|
|
|
//!
|
|
|
|
//! This API is completely unstable and subject to change.
|
|
|
|
|
2019-02-05 07:37:15 -06:00
|
|
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
|
2018-06-30 11:35:00 -05:00
|
|
|
#![feature(crate_visibility_modifier)]
|
2020-03-10 15:41:33 -05:00
|
|
|
#![feature(const_if_match)]
|
|
|
|
#![feature(const_fn)]
|
|
|
|
#![feature(const_panic)]
|
2020-01-09 04:56:38 -06:00
|
|
|
#![cfg_attr(not(bootstrap), feature(negative_impls))]
|
2019-02-10 01:13:30 -06:00
|
|
|
#![feature(nll)]
|
2017-03-16 23:04:41 -05:00
|
|
|
#![feature(optin_builtin_traits)]
|
2016-08-31 06:00:29 -05:00
|
|
|
#![feature(specialization)]
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2019-12-25 12:38:57 -06:00
|
|
|
use rustc_data_structures::AtomicRef;
|
2019-11-09 15:25:30 -06:00
|
|
|
use rustc_macros::HashStable_Generic;
|
2019-12-22 16:42:04 -06:00
|
|
|
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2019-11-23 07:17:37 -06:00
|
|
|
mod caching_source_map_view;
|
2019-12-22 16:42:04 -06:00
|
|
|
pub mod source_map;
|
2019-11-23 07:17:37 -06:00
|
|
|
pub use self::caching_source_map_view::CachingSourceMapView;
|
2019-11-15 07:27:09 -06:00
|
|
|
|
2018-04-22 17:44:19 -05:00
|
|
|
pub mod edition;
|
2019-04-05 17:15:49 -05:00
|
|
|
use edition::Edition;
|
2017-03-16 05:23:33 -05:00
|
|
|
pub mod hygiene;
|
2019-08-21 13:28:22 -05:00
|
|
|
use hygiene::Transparency;
|
2019-12-22 16:42:04 -06:00
|
|
|
pub use hygiene::{DesugaringKind, ExpnData, ExpnId, ExpnKind, MacroKind, SyntaxContext};
|
2020-02-08 14:06:31 -06:00
|
|
|
pub mod def_id;
|
2020-02-07 13:02:24 -06:00
|
|
|
use def_id::{CrateNum, DefId, LOCAL_CRATE};
|
2017-09-16 13:43:05 -05:00
|
|
|
mod span_encoding;
|
|
|
|
pub use span_encoding::{Span, DUMMY_SP};
|
|
|
|
|
2017-03-16 23:04:41 -05:00
|
|
|
pub mod symbol;
|
2019-12-22 16:42:04 -06:00
|
|
|
pub use symbol::{sym, Symbol};
|
2017-03-16 05:23:33 -05:00
|
|
|
|
2018-08-18 05:13:56 -05:00
|
|
|
mod analyze_source_file;
|
2019-11-14 13:01:03 -06:00
|
|
|
pub mod fatal_error;
|
2018-05-29 10:50:13 -05:00
|
|
|
|
2019-11-23 07:39:00 -06:00
|
|
|
use rustc_data_structures::fingerprint::Fingerprint;
|
|
|
|
use rustc_data_structures::fx::FxHashMap;
|
2019-12-22 16:42:04 -06:00
|
|
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
|
|
|
use rustc_data_structures::sync::{Lock, Lrc};
|
2018-11-26 20:59:49 -06:00
|
|
|
|
|
|
|
use std::borrow::Cow;
|
2019-12-25 12:38:57 -06:00
|
|
|
use std::cell::RefCell;
|
2018-11-26 20:59:49 -06:00
|
|
|
use std::cmp::{self, Ordering};
|
|
|
|
use std::fmt;
|
2019-12-22 16:42:04 -06:00
|
|
|
use std::hash::{Hash, Hasher};
|
2018-11-26 20:59:49 -06:00
|
|
|
use std::ops::{Add, Sub};
|
|
|
|
use std::path::PathBuf;
|
|
|
|
|
2019-07-31 19:20:23 -05:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests;
|
|
|
|
|
2018-03-06 19:44:10 -06:00
|
|
|
pub struct Globals {
|
|
|
|
symbol_interner: Lock<symbol::Interner>,
|
|
|
|
span_interner: Lock<span_encoding::SpanInterner>,
|
|
|
|
hygiene_data: Lock<hygiene::HygieneData>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Globals {
|
2019-04-05 17:15:49 -05:00
|
|
|
pub fn new(edition: Edition) -> Globals {
|
2018-03-06 19:44:10 -06:00
|
|
|
Globals {
|
|
|
|
symbol_interner: Lock::new(symbol::Interner::fresh()),
|
|
|
|
span_interner: Lock::new(span_encoding::SpanInterner::default()),
|
2019-07-07 08:45:41 -05:00
|
|
|
hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
|
2018-03-06 19:44:10 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-03 12:42:27 -06:00
|
|
|
scoped_tls::scoped_thread_local!(pub static GLOBALS: Globals);
|
2018-03-06 19:44:10 -06:00
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Differentiates between real files and common virtual files.
|
2020-03-23 09:48:59 -05:00
|
|
|
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, RustcDecodable, RustcEncodable)]
|
|
|
|
#[derive(HashStable_Generic)]
|
2017-12-14 01:09:19 -06:00
|
|
|
pub enum FileName {
|
|
|
|
Real(PathBuf),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Call to `quote!`.
|
2018-10-30 09:11:24 -05:00
|
|
|
QuoteExpansion(u64),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Command line.
|
2018-10-30 09:11:24 -05:00
|
|
|
Anon(u64),
|
2020-02-29 11:16:26 -06:00
|
|
|
/// Hack in `src/librustc_ast/parse.rs`.
|
2018-11-26 20:59:49 -06:00
|
|
|
// FIXME(jseyfried)
|
2018-10-30 09:11:24 -05:00
|
|
|
MacroExpansion(u64),
|
|
|
|
ProcMacroSourceCode(u64),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Strings provided as `--cfg [cfgspec]` stored in a `crate_cfg`.
|
2018-10-30 09:11:24 -05:00
|
|
|
CfgSpec(u64),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Strings provided as crate attributes in the CLI.
|
2018-10-30 09:11:24 -05:00
|
|
|
CliCrateAttr(u64),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Custom sources for explicit parser calls from plugins and drivers.
|
2017-12-14 01:09:19 -06:00
|
|
|
Custom(String),
|
2018-12-04 14:18:03 -06:00
|
|
|
DocTest(PathBuf, isize),
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
impl std::fmt::Display for FileName {
|
2019-02-03 12:42:27 -06:00
|
|
|
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
use FileName::*;
|
2017-12-14 01:09:19 -06:00
|
|
|
match *self {
|
|
|
|
Real(ref path) => write!(fmt, "{}", path.display()),
|
2018-10-30 09:11:24 -05:00
|
|
|
QuoteExpansion(_) => write!(fmt, "<quote expansion>"),
|
|
|
|
MacroExpansion(_) => write!(fmt, "<macro expansion>"),
|
|
|
|
Anon(_) => write!(fmt, "<anon>"),
|
2019-12-22 16:42:04 -06:00
|
|
|
ProcMacroSourceCode(_) => write!(fmt, "<proc-macro source code>"),
|
2018-10-30 09:11:24 -05:00
|
|
|
CfgSpec(_) => write!(fmt, "<cfgspec>"),
|
|
|
|
CliCrateAttr(_) => write!(fmt, "<crate attribute>"),
|
2017-12-14 01:09:19 -06:00
|
|
|
Custom(ref s) => write!(fmt, "<{}>", s),
|
2018-12-04 14:18:03 -06:00
|
|
|
DocTest(ref path, _) => write!(fmt, "{}", path.display()),
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<PathBuf> for FileName {
|
|
|
|
fn from(p: PathBuf) -> Self {
|
|
|
|
assert!(!p.to_string_lossy().ends_with('>'));
|
|
|
|
FileName::Real(p)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FileName {
|
|
|
|
pub fn is_real(&self) -> bool {
|
2019-02-03 12:42:27 -06:00
|
|
|
use FileName::*;
|
2017-12-14 01:09:19 -06:00
|
|
|
match *self {
|
|
|
|
Real(_) => true,
|
2019-11-13 06:01:43 -06:00
|
|
|
Anon(_)
|
2019-12-22 16:42:04 -06:00
|
|
|
| MacroExpansion(_)
|
|
|
|
| ProcMacroSourceCode(_)
|
|
|
|
| CfgSpec(_)
|
|
|
|
| CliCrateAttr(_)
|
|
|
|
| Custom(_)
|
|
|
|
| QuoteExpansion(_)
|
|
|
|
| DocTest(_, _) => false,
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-30 09:11:24 -05:00
|
|
|
pub fn quote_expansion_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::QuoteExpansion(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn macro_expansion_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::MacroExpansion(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn anon_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::Anon(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn proc_macro_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::ProcMacroSourceCode(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn cfg_spec_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::QuoteExpansion(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn cli_crate_attr_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::CliCrateAttr(hasher.finish())
|
|
|
|
}
|
2018-12-04 14:18:03 -06:00
|
|
|
|
2019-12-22 16:42:04 -06:00
|
|
|
pub fn doc_test_source_code(path: PathBuf, line: isize) -> FileName {
|
2018-12-04 14:18:03 -06:00
|
|
|
FileName::DocTest(path, line)
|
|
|
|
}
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
/// Spans represent a region of code, used for error reporting. Positions in spans
|
2018-08-18 05:14:14 -05:00
|
|
|
/// are *absolute* positions from the beginning of the source_map, not positions
|
2018-11-26 20:59:49 -06:00
|
|
|
/// relative to `SourceFile`s. Methods on the `SourceMap` can be used to relate spans back
|
2016-06-21 17:08:13 -05:00
|
|
|
/// to the original source.
|
|
|
|
/// You must be careful if the span crosses more than one file - you will not be
|
2018-08-18 05:14:14 -05:00
|
|
|
/// able to use many of the functions on spans in source_map and you cannot assume
|
2018-11-26 20:59:49 -06:00
|
|
|
/// that the length of the `span = hi - lo`; there may be space in the `BytePos`
|
2016-06-21 17:08:13 -05:00
|
|
|
/// range between files.
|
2017-09-16 13:43:05 -05:00
|
|
|
///
|
|
|
|
/// `SpanData` is public because `Span` uses a thread-local interner and can't be
|
|
|
|
/// sent to other threads, but some pieces of performance infra run in a separate thread.
|
|
|
|
/// Using `Span` is generally preferred.
|
2016-10-26 01:24:09 -05:00
|
|
|
#[derive(Clone, Copy, Hash, PartialEq, Eq, Ord, PartialOrd)]
|
2017-09-16 13:43:05 -05:00
|
|
|
pub struct SpanData {
|
2017-08-29 17:41:05 -05:00
|
|
|
pub lo: BytePos,
|
|
|
|
pub hi: BytePos,
|
2016-06-21 17:08:13 -05:00
|
|
|
/// Information about where the macro came from, if this piece of
|
|
|
|
/// code was created by a macro expansion.
|
2017-08-29 17:41:05 -05:00
|
|
|
pub ctxt: SyntaxContext,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2017-10-21 09:21:02 -05:00
|
|
|
impl SpanData {
|
|
|
|
#[inline]
|
|
|
|
pub fn with_lo(&self, lo: BytePos) -> Span {
|
|
|
|
Span::new(lo, self.hi, self.ctxt)
|
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_hi(&self, hi: BytePos) -> Span {
|
|
|
|
Span::new(self.lo, hi, self.ctxt)
|
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_ctxt(&self, ctxt: SyntaxContext) -> Span {
|
|
|
|
Span::new(self.lo, self.hi, ctxt)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-03 07:37:23 -06:00
|
|
|
// The interner is pointed to by a thread local value which is only set on the main thread
|
2018-11-26 20:59:49 -06:00
|
|
|
// with parallelization is disabled. So we don't allow `Span` to transfer between threads
|
2017-12-03 07:37:23 -06:00
|
|
|
// to avoid panics and other errors, even though it would be memory safe to do so.
|
2019-01-28 08:51:47 -06:00
|
|
|
#[cfg(not(parallel_compiler))]
|
2017-09-16 13:43:05 -05:00
|
|
|
impl !Send for Span {}
|
2019-01-28 08:51:47 -06:00
|
|
|
#[cfg(not(parallel_compiler))]
|
2017-09-16 13:43:05 -05:00
|
|
|
impl !Sync for Span {}
|
|
|
|
|
|
|
|
impl PartialOrd for Span {
|
|
|
|
fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
|
|
|
|
PartialOrd::partial_cmp(&self.data(), &rhs.data())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
impl Ord for Span {
|
|
|
|
fn cmp(&self, rhs: &Self) -> Ordering {
|
|
|
|
Ord::cmp(&self.data(), &rhs.data())
|
|
|
|
}
|
|
|
|
}
|
2017-07-31 15:04:34 -05:00
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
/// A collection of spans. Spans have two orthogonal attributes:
|
|
|
|
///
|
2018-11-26 20:59:49 -06:00
|
|
|
/// - They can be *primary spans*. In this case they are the locus of
|
2016-06-21 17:08:13 -05:00
|
|
|
/// the error, and would be rendered with `^^^`.
|
2018-11-26 20:59:49 -06:00
|
|
|
/// - They can have a *label*. In this case, the label is written next
|
2016-06-21 17:08:13 -05:00
|
|
|
/// to the mark in the snippet when we render.
|
2017-01-28 17:13:21 -06:00
|
|
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, RustcEncodable, RustcDecodable)]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub struct MultiSpan {
|
|
|
|
primary_spans: Vec<Span>,
|
|
|
|
span_labels: Vec<(Span, String)>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Span {
|
2017-07-31 15:04:34 -05:00
|
|
|
#[inline]
|
|
|
|
pub fn lo(self) -> BytePos {
|
2017-09-16 13:43:05 -05:00
|
|
|
self.data().lo
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_lo(self, lo: BytePos) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
self.data().with_lo(lo)
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn hi(self) -> BytePos {
|
2017-09-16 13:43:05 -05:00
|
|
|
self.data().hi
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_hi(self, hi: BytePos) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
self.data().with_hi(hi)
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn ctxt(self) -> SyntaxContext {
|
2017-09-16 13:43:05 -05:00
|
|
|
self.data().ctxt
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
self.data().with_ctxt(ctxt)
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
|
2018-06-24 17:00:21 -05:00
|
|
|
/// Returns `true` if this is a dummy span with any hygienic context.
|
|
|
|
#[inline]
|
|
|
|
pub fn is_dummy(self) -> bool {
|
|
|
|
let span = self.data();
|
|
|
|
span.lo.0 == 0 && span.hi.0 == 0
|
|
|
|
}
|
|
|
|
|
2019-08-10 17:08:30 -05:00
|
|
|
/// Returns `true` if this span comes from a macro or desugaring.
|
|
|
|
#[inline]
|
|
|
|
pub fn from_expansion(self) -> bool {
|
2019-08-10 17:44:55 -05:00
|
|
|
self.ctxt() != SyntaxContext::root()
|
|
|
|
}
|
|
|
|
|
2020-01-08 20:03:48 -06:00
|
|
|
/// Returns `true` if `span` originates in a derive-macro's expansion.
|
|
|
|
pub fn in_derive_expansion(self) -> bool {
|
|
|
|
matches!(self.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Derive, _))
|
|
|
|
}
|
|
|
|
|
2019-08-10 17:44:55 -05:00
|
|
|
#[inline]
|
|
|
|
pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span {
|
|
|
|
Span::new(lo, hi, SyntaxContext::root())
|
2019-08-10 17:08:30 -05:00
|
|
|
}
|
|
|
|
|
2018-02-04 06:19:14 -06:00
|
|
|
/// Returns a new span representing an empty span at the beginning of this span
|
|
|
|
#[inline]
|
2018-03-10 08:45:47 -06:00
|
|
|
pub fn shrink_to_lo(self) -> Span {
|
|
|
|
let span = self.data();
|
|
|
|
span.with_hi(span.lo)
|
|
|
|
}
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Returns a new span representing an empty span at the end of this span.
|
2018-03-10 08:45:47 -06:00
|
|
|
#[inline]
|
|
|
|
pub fn shrink_to_hi(self) -> Span {
|
|
|
|
let span = self.data();
|
|
|
|
span.with_lo(span.hi)
|
2018-02-04 06:19:14 -06:00
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
/// Returns `self` if `self` is not the dummy span, and `other` otherwise.
|
|
|
|
pub fn substitute_dummy(self, other: Span) -> Span {
|
2020-01-09 04:56:38 -06:00
|
|
|
if self.is_dummy() {
|
|
|
|
other
|
|
|
|
} else {
|
|
|
|
self
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if `self` fully encloses `other`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn contains(self, other: Span) -> bool {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let other = other.data();
|
|
|
|
span.lo <= other.lo && other.hi <= span.hi
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if `self` touches `other`.
|
2018-12-20 18:52:52 -06:00
|
|
|
pub fn overlaps(self, other: Span) -> bool {
|
|
|
|
let span = self.data();
|
|
|
|
let other = other.data();
|
|
|
|
span.lo < other.hi && other.lo < span.hi
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if the spans are equal with regards to the source text.
|
2016-06-21 17:08:13 -05:00
|
|
|
///
|
|
|
|
/// Use this instead of `==` when either span could be generated code,
|
|
|
|
/// and you only care that they point to the same bytes of source text.
|
|
|
|
pub fn source_equal(&self, other: &Span) -> bool {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let other = other.data();
|
|
|
|
span.lo == other.lo && span.hi == other.hi
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Returns `Some(span)`, where the start is trimmed by the end of `other`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn trim_start(self, other: Span) -> Option<Span> {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let other = other.data();
|
2020-01-09 04:56:38 -06:00
|
|
|
if span.hi > other.hi {
|
|
|
|
Some(span.with_lo(cmp::max(span.lo, other.hi)))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
2017-03-16 23:04:41 -05:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns the source span -- this is either the supplied span, or the span for
|
2017-03-16 23:04:41 -05:00
|
|
|
/// the macro callsite that expanded to it.
|
|
|
|
pub fn source_callsite(self) -> Span {
|
2019-08-13 15:56:42 -05:00
|
|
|
let expn_data = self.ctxt().outer_expn_data();
|
2020-01-09 04:56:38 -06:00
|
|
|
if !expn_data.is_root() {
|
|
|
|
expn_data.call_site.source_callsite()
|
|
|
|
} else {
|
|
|
|
self
|
|
|
|
}
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
|
|
|
|
2018-04-26 17:28:34 -05:00
|
|
|
/// The `Span` for the tokens in the previous macro expansion from which `self` was generated,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// if any.
|
2018-04-21 17:15:54 -05:00
|
|
|
pub fn parent(self) -> Option<Span> {
|
2019-08-13 15:56:42 -05:00
|
|
|
let expn_data = self.ctxt().outer_expn_data();
|
2020-01-09 04:56:38 -06:00
|
|
|
if !expn_data.is_root() {
|
|
|
|
Some(expn_data.call_site)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2018-04-21 17:15:54 -05:00
|
|
|
}
|
|
|
|
|
2018-05-13 08:14:43 -05:00
|
|
|
/// Edition of the crate from which this span came.
|
|
|
|
pub fn edition(self) -> edition::Edition {
|
2019-08-13 15:56:42 -05:00
|
|
|
self.ctxt().outer_expn_data().edition
|
2018-05-13 08:14:43 -05:00
|
|
|
}
|
|
|
|
|
2018-11-17 18:25:59 -06:00
|
|
|
#[inline]
|
|
|
|
pub fn rust_2015(&self) -> bool {
|
|
|
|
self.edition() == edition::Edition::Edition2015
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn rust_2018(&self) -> bool {
|
|
|
|
self.edition() >= edition::Edition::Edition2018
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns the source callee.
|
2017-03-16 23:04:41 -05:00
|
|
|
///
|
2018-06-23 13:41:39 -05:00
|
|
|
/// Returns `None` if the supplied span has no expansion trace,
|
2019-08-13 15:56:42 -05:00
|
|
|
/// else returns the `ExpnData` for the macro definition
|
2017-03-16 23:04:41 -05:00
|
|
|
/// corresponding to the source callsite.
|
2019-08-13 15:56:42 -05:00
|
|
|
pub fn source_callee(self) -> Option<ExpnData> {
|
|
|
|
fn source_callee(expn_data: ExpnData) -> ExpnData {
|
|
|
|
let next_expn_data = expn_data.call_site.ctxt().outer_expn_data();
|
2020-01-09 04:56:38 -06:00
|
|
|
if !next_expn_data.is_root() {
|
|
|
|
source_callee(next_expn_data)
|
|
|
|
} else {
|
|
|
|
expn_data
|
|
|
|
}
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
2019-08-13 15:56:42 -05:00
|
|
|
let expn_data = self.ctxt().outer_expn_data();
|
2020-01-09 04:56:38 -06:00
|
|
|
if !expn_data.is_root() {
|
|
|
|
Some(source_callee(expn_data))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Checks if a span is "internal" to a macro in which `#[unstable]`
|
2017-03-16 23:04:41 -05:00
|
|
|
/// items can be used (that is, a macro marked with
|
|
|
|
/// `#[allow_internal_unstable]`).
|
2019-05-07 22:21:18 -05:00
|
|
|
pub fn allows_unstable(&self, feature: Symbol) -> bool {
|
2019-08-13 15:56:42 -05:00
|
|
|
self.ctxt().outer_expn_data().allow_internal_unstable.map_or(false, |features| {
|
2019-12-22 16:42:04 -06:00
|
|
|
features
|
|
|
|
.iter()
|
|
|
|
.any(|&f| f == feature || f == sym::allow_internal_unstable_backcompat_hack)
|
2019-08-10 19:00:05 -05:00
|
|
|
})
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Checks if this span arises from a compiler desugaring of kind `kind`.
|
2019-06-18 17:08:45 -05:00
|
|
|
pub fn is_desugaring(&self, kind: DesugaringKind) -> bool {
|
2019-08-13 15:56:42 -05:00
|
|
|
match self.ctxt().outer_expn_data().kind {
|
2019-08-10 19:00:05 -05:00
|
|
|
ExpnKind::Desugaring(k) => k == kind,
|
|
|
|
_ => false,
|
2017-08-12 19:43:43 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns the compiler desugaring that created this span, or `None`
|
2017-08-30 16:12:34 -05:00
|
|
|
/// if this span is not from a desugaring.
|
2019-06-18 17:08:45 -05:00
|
|
|
pub fn desugaring_kind(&self) -> Option<DesugaringKind> {
|
2019-08-13 15:56:42 -05:00
|
|
|
match self.ctxt().outer_expn_data().kind {
|
2019-08-10 19:00:05 -05:00
|
|
|
ExpnKind::Desugaring(k) => Some(k),
|
2019-12-22 16:42:04 -06:00
|
|
|
_ => None,
|
2017-08-30 16:12:34 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Checks if a span is "internal" to a macro in which `unsafe`
|
2017-08-08 10:21:20 -05:00
|
|
|
/// can be used without triggering the `unsafe_code` lint
|
|
|
|
// (that is, a macro marked with `#[allow_internal_unsafe]`).
|
|
|
|
pub fn allows_unsafe(&self) -> bool {
|
2019-08-13 15:56:42 -05:00
|
|
|
self.ctxt().outer_expn_data().allow_internal_unsafe
|
2017-08-08 10:21:20 -05:00
|
|
|
}
|
|
|
|
|
2020-01-20 17:46:53 -06:00
|
|
|
pub fn macro_backtrace(mut self) -> impl Iterator<Item = ExpnData> {
|
2017-03-16 23:04:41 -05:00
|
|
|
let mut prev_span = DUMMY_SP;
|
2020-01-20 17:46:53 -06:00
|
|
|
std::iter::from_fn(move || {
|
|
|
|
loop {
|
|
|
|
let expn_data = self.ctxt().outer_expn_data();
|
|
|
|
if expn_data.is_root() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let is_recursive = expn_data.call_site.source_equal(&prev_span);
|
|
|
|
|
|
|
|
prev_span = self;
|
|
|
|
self = expn_data.call_site;
|
|
|
|
|
|
|
|
// Don't print recursive invocations.
|
|
|
|
if !is_recursive {
|
|
|
|
return Some(expn_data);
|
|
|
|
}
|
2019-08-10 19:00:05 -05:00
|
|
|
}
|
2020-01-20 17:46:53 -06:00
|
|
|
})
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
2017-03-14 19:22:48 -05:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns a `Span` that would enclose both `self` and `end`.
|
2017-03-14 19:22:48 -05:00
|
|
|
pub fn to(self, end: Span) -> Span {
|
2018-02-01 13:51:49 -06:00
|
|
|
let span_data = self.data();
|
|
|
|
let end_data = end.data();
|
2018-11-26 20:59:49 -06:00
|
|
|
// FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480).
|
2018-02-01 13:51:49 -06:00
|
|
|
// Return the macro span on its own to avoid weird diagnostic output. It is preferable to
|
|
|
|
// have an incomplete span than a completely nonsensical one.
|
|
|
|
if span_data.ctxt != end_data.ctxt {
|
2019-08-10 17:44:55 -05:00
|
|
|
if span_data.ctxt == SyntaxContext::root() {
|
2018-02-01 13:51:49 -06:00
|
|
|
return end;
|
2019-08-10 17:44:55 -05:00
|
|
|
} else if end_data.ctxt == SyntaxContext::root() {
|
2018-02-01 13:51:49 -06:00
|
|
|
return self;
|
|
|
|
}
|
2018-11-26 20:59:49 -06:00
|
|
|
// Both spans fall within a macro.
|
|
|
|
// FIXME(estebank): check if it is the *same* macro.
|
2018-02-01 13:51:49 -06:00
|
|
|
}
|
2017-07-31 15:04:34 -05:00
|
|
|
Span::new(
|
2018-02-01 13:51:49 -06:00
|
|
|
cmp::min(span_data.lo, end_data.lo),
|
|
|
|
cmp::max(span_data.hi, end_data.hi),
|
2019-08-10 17:44:55 -05:00
|
|
|
if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt },
|
2017-07-31 15:04:34 -05:00
|
|
|
)
|
2017-03-14 19:22:48 -05:00
|
|
|
}
|
2017-04-03 06:58:48 -05:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns a `Span` between the end of `self` to the beginning of `end`.
|
2017-04-03 06:58:48 -05:00
|
|
|
pub fn between(self, end: Span) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let end = end.data();
|
2017-07-31 15:04:34 -05:00
|
|
|
Span::new(
|
2017-10-21 09:21:02 -05:00
|
|
|
span.hi,
|
|
|
|
end.lo,
|
2019-08-10 17:44:55 -05:00
|
|
|
if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
|
2017-07-31 15:04:34 -05:00
|
|
|
)
|
2017-04-03 06:58:48 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns a `Span` between the beginning of `self` to the beginning of `end`.
|
2017-04-03 06:58:48 -05:00
|
|
|
pub fn until(self, end: Span) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let end = end.data();
|
2017-07-31 15:04:34 -05:00
|
|
|
Span::new(
|
2017-10-21 09:21:02 -05:00
|
|
|
span.lo,
|
|
|
|
end.lo,
|
2019-08-10 17:44:55 -05:00
|
|
|
if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
|
2017-07-31 15:04:34 -05:00
|
|
|
)
|
2017-04-03 06:58:48 -05:00
|
|
|
}
|
2018-03-17 18:57:23 -05:00
|
|
|
|
2019-06-04 10:03:43 -05:00
|
|
|
pub fn from_inner(self, inner: InnerSpan) -> Span {
|
2018-05-10 11:09:58 -05:00
|
|
|
let span = self.data();
|
2019-12-22 16:42:04 -06:00
|
|
|
Span::new(
|
|
|
|
span.lo + BytePos::from_usize(inner.start),
|
|
|
|
span.lo + BytePos::from_usize(inner.end),
|
|
|
|
span.ctxt,
|
|
|
|
)
|
2018-05-10 11:09:58 -05:00
|
|
|
}
|
|
|
|
|
2019-08-28 04:41:29 -05:00
|
|
|
/// Equivalent of `Span::def_site` from the proc macro API,
|
|
|
|
/// except that the location is taken from the `self` span.
|
|
|
|
pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span {
|
|
|
|
self.with_ctxt_from_mark(expn_id, Transparency::Opaque)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Equivalent of `Span::call_site` from the proc macro API,
|
|
|
|
/// except that the location is taken from the `self` span.
|
|
|
|
pub fn with_call_site_ctxt(&self, expn_id: ExpnId) -> Span {
|
|
|
|
self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
|
|
|
|
}
|
|
|
|
|
2019-09-22 10:38:02 -05:00
|
|
|
/// Equivalent of `Span::mixed_site` from the proc macro API,
|
|
|
|
/// except that the location is taken from the `self` span.
|
|
|
|
pub fn with_mixed_site_ctxt(&self, expn_id: ExpnId) -> Span {
|
|
|
|
self.with_ctxt_from_mark(expn_id, Transparency::SemiTransparent)
|
|
|
|
}
|
|
|
|
|
2019-08-21 13:28:22 -05:00
|
|
|
/// Produces a span with the same location as `self` and context produced by a macro with the
|
|
|
|
/// given ID and transparency, assuming that macro was defined directly and not produced by
|
|
|
|
/// some other macro (which is the case for built-in and procedural macros).
|
|
|
|
pub fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
|
2019-08-22 17:31:01 -05:00
|
|
|
self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency))
|
2019-08-21 13:28:22 -05:00
|
|
|
}
|
|
|
|
|
2018-03-17 18:57:23 -05:00
|
|
|
#[inline]
|
2019-08-22 17:31:01 -05:00
|
|
|
pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
|
2018-03-17 18:57:23 -05:00
|
|
|
let span = self.data();
|
2019-08-22 17:31:01 -05:00
|
|
|
span.with_ctxt(span.ctxt.apply_mark(expn_id, transparency))
|
2018-03-17 18:57:23 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2019-07-15 17:04:05 -05:00
|
|
|
pub fn remove_mark(&mut self) -> ExpnId {
|
2018-03-17 18:57:23 -05:00
|
|
|
let mut span = self.data();
|
|
|
|
let mark = span.ctxt.remove_mark();
|
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2019-07-15 17:42:58 -05:00
|
|
|
pub fn adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
|
2018-03-17 18:57:23 -05:00
|
|
|
let mut span = self.data();
|
2019-07-15 17:42:58 -05:00
|
|
|
let mark = span.ctxt.adjust(expn_id);
|
2018-03-17 18:57:23 -05:00
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
2019-06-03 01:10:03 -05:00
|
|
|
#[inline]
|
2020-03-13 17:36:46 -05:00
|
|
|
pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
|
2019-06-03 01:10:03 -05:00
|
|
|
let mut span = self.data();
|
2020-03-13 17:36:46 -05:00
|
|
|
let mark = span.ctxt.normalize_to_macros_2_0_and_adjust(expn_id);
|
2019-06-03 01:10:03 -05:00
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
2018-03-17 18:57:23 -05:00
|
|
|
#[inline]
|
2019-07-15 17:42:58 -05:00
|
|
|
pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option<Option<ExpnId>> {
|
2018-03-17 18:57:23 -05:00
|
|
|
let mut span = self.data();
|
2019-07-15 17:42:58 -05:00
|
|
|
let mark = span.ctxt.glob_adjust(expn_id, glob_span);
|
2018-03-17 18:57:23 -05:00
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2019-12-22 16:42:04 -06:00
|
|
|
pub fn reverse_glob_adjust(
|
|
|
|
&mut self,
|
|
|
|
expn_id: ExpnId,
|
|
|
|
glob_span: Span,
|
|
|
|
) -> Option<Option<ExpnId>> {
|
2018-03-17 18:57:23 -05:00
|
|
|
let mut span = self.data();
|
2019-07-15 17:42:58 -05:00
|
|
|
let mark = span.ctxt.reverse_glob_adjust(expn_id, glob_span);
|
2018-03-17 18:57:23 -05:00
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2020-03-13 17:36:46 -05:00
|
|
|
pub fn normalize_to_macros_2_0(self) -> Span {
|
2018-03-17 18:57:23 -05:00
|
|
|
let span = self.data();
|
2020-03-13 17:36:46 -05:00
|
|
|
span.with_ctxt(span.ctxt.normalize_to_macros_2_0())
|
2018-03-17 18:57:23 -05:00
|
|
|
}
|
2018-06-24 11:54:23 -05:00
|
|
|
|
|
|
|
#[inline]
|
2020-03-13 17:36:46 -05:00
|
|
|
pub fn normalize_to_macro_rules(self) -> Span {
|
2018-06-24 11:54:23 -05:00
|
|
|
let span = self.data();
|
2020-03-13 17:36:46 -05:00
|
|
|
span.with_ctxt(span.ctxt.normalize_to_macro_rules())
|
2018-06-24 11:54:23 -05:00
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct SpanLabel {
|
|
|
|
/// The span we are going to include in the final snippet.
|
|
|
|
pub span: Span,
|
|
|
|
|
|
|
|
/// Is this a primary span? This is the "locus" of the message,
|
|
|
|
/// and is indicated with a `^^^^` underline, versus `----`.
|
|
|
|
pub is_primary: bool,
|
|
|
|
|
|
|
|
/// What label should we attach to this span (if any)?
|
|
|
|
pub label: Option<String>,
|
|
|
|
}
|
|
|
|
|
2017-07-23 12:50:56 -05:00
|
|
|
impl Default for Span {
|
|
|
|
fn default() -> Self {
|
|
|
|
DUMMY_SP
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-23 10:50:47 -05:00
|
|
|
impl rustc_serialize::UseSpecializedEncodable for Span {
|
2016-09-17 15:31:03 -05:00
|
|
|
fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
2016-06-21 17:08:13 -05:00
|
|
|
s.emit_struct("Span", 2, |s| {
|
2019-12-22 16:42:04 -06:00
|
|
|
s.emit_struct_field("lo", 0, |s| span.lo.encode(s))?;
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2019-12-22 16:42:04 -06:00
|
|
|
s.emit_struct_field("hi", 1, |s| span.hi.encode(s))
|
2016-06-21 17:08:13 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-23 10:50:47 -05:00
|
|
|
impl rustc_serialize::UseSpecializedDecodable for Span {
|
2016-09-17 15:31:03 -05:00
|
|
|
fn default_decode<D: Decoder>(d: &mut D) -> Result<Span, D::Error> {
|
|
|
|
d.read_struct("Span", 2, |d| {
|
|
|
|
let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
|
|
|
|
let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
|
2019-08-10 17:44:55 -05:00
|
|
|
Ok(Span::with_root_ctxt(lo, hi))
|
2016-09-17 15:31:03 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2019-02-03 12:42:27 -06:00
|
|
|
pub fn default_span_debug(span: Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2017-10-09 13:09:08 -05:00
|
|
|
f.debug_struct("Span")
|
|
|
|
.field("lo", &span.lo())
|
|
|
|
.field("hi", &span.hi())
|
|
|
|
.field("ctxt", &span.ctxt())
|
|
|
|
.finish()
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Debug for Span {
|
2019-02-03 12:42:27 -06:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2019-12-25 12:38:57 -06:00
|
|
|
(*SPAN_DEBUG)(*self, f)
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-16 13:43:05 -05:00
|
|
|
impl fmt::Debug for SpanData {
|
2019-02-03 12:42:27 -06:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2019-12-25 12:38:57 -06:00
|
|
|
(*SPAN_DEBUG)(Span::new(self.lo, self.hi, self.ctxt), f)
|
2017-09-16 13:43:05 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
impl MultiSpan {
|
2018-11-29 14:13:04 -06:00
|
|
|
#[inline]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn new() -> MultiSpan {
|
2019-12-22 16:42:04 -06:00
|
|
|
MultiSpan { primary_spans: vec![], span_labels: vec![] }
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2016-07-28 12:33:31 -05:00
|
|
|
pub fn from_span(primary_span: Span) -> MultiSpan {
|
2019-12-22 16:42:04 -06:00
|
|
|
MultiSpan { primary_spans: vec![primary_span], span_labels: vec![] }
|
2016-07-28 12:33:31 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn from_spans(vec: Vec<Span>) -> MultiSpan {
|
2019-12-22 16:42:04 -06:00
|
|
|
MultiSpan { primary_spans: vec, span_labels: vec![] }
|
2016-07-28 12:33:31 -05:00
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn push_span_label(&mut self, span: Span, label: String) {
|
|
|
|
self.span_labels.push((span, label));
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Selects the first primary span (if any).
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn primary_span(&self) -> Option<Span> {
|
|
|
|
self.primary_spans.first().cloned()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns all primary spans.
|
|
|
|
pub fn primary_spans(&self) -> &[Span] {
|
|
|
|
&self.primary_spans
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if any of the primary spans are displayable.
|
2018-11-25 15:05:06 -06:00
|
|
|
pub fn has_primary_spans(&self) -> bool {
|
2019-01-20 15:53:13 -06:00
|
|
|
self.primary_spans.iter().any(|sp| !sp.is_dummy())
|
2018-11-25 15:05:06 -06:00
|
|
|
}
|
|
|
|
|
2018-10-23 12:07:11 -05:00
|
|
|
/// Returns `true` if this contains only a dummy primary span with any hygienic context.
|
|
|
|
pub fn is_dummy(&self) -> bool {
|
|
|
|
let mut is_dummy = true;
|
|
|
|
for span in &self.primary_spans {
|
|
|
|
if !span.is_dummy() {
|
|
|
|
is_dummy = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
is_dummy
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Replaces all occurrences of one Span with another. Used to move `Span`s in areas that don't
|
2019-02-08 07:53:55 -06:00
|
|
|
/// display well (like std macros). Returns whether replacements occurred.
|
2016-08-17 09:20:04 -05:00
|
|
|
pub fn replace(&mut self, before: Span, after: Span) -> bool {
|
|
|
|
let mut replacements_occurred = false;
|
|
|
|
for primary_span in &mut self.primary_spans {
|
|
|
|
if *primary_span == before {
|
|
|
|
*primary_span = after;
|
|
|
|
replacements_occurred = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for span_label in &mut self.span_labels {
|
|
|
|
if span_label.0 == before {
|
|
|
|
span_label.0 = after;
|
|
|
|
replacements_occurred = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
replacements_occurred
|
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
/// Returns the strings to highlight. We always ensure that there
|
|
|
|
/// is an entry for each of the primary spans -- for each primary
|
2018-11-26 20:59:49 -06:00
|
|
|
/// span `P`, if there is at least one label with span `P`, we return
|
2016-06-21 17:08:13 -05:00
|
|
|
/// those labels (marked as primary). But otherwise we return
|
|
|
|
/// `SpanLabel` instances with empty labels.
|
|
|
|
pub fn span_labels(&self) -> Vec<SpanLabel> {
|
|
|
|
let is_primary = |span| self.primary_spans.contains(&span);
|
|
|
|
|
2019-12-22 16:42:04 -06:00
|
|
|
let mut span_labels = self
|
|
|
|
.span_labels
|
|
|
|
.iter()
|
|
|
|
.map(|&(span, ref label)| SpanLabel {
|
2017-08-07 00:54:09 -05:00
|
|
|
span,
|
2016-06-21 17:08:13 -05:00
|
|
|
is_primary: is_primary(span),
|
2019-12-22 16:42:04 -06:00
|
|
|
label: Some(label.clone()),
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
for &span in &self.primary_spans {
|
|
|
|
if !span_labels.iter().any(|sl| sl.span == span) {
|
2019-12-22 16:42:04 -06:00
|
|
|
span_labels.push(SpanLabel { span, is_primary: true, label: None });
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
span_labels
|
|
|
|
}
|
2018-11-25 15:05:06 -06:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if any of the span labels is displayable.
|
2018-11-25 15:05:06 -06:00
|
|
|
pub fn has_span_labels(&self) -> bool {
|
2019-01-20 15:53:13 -06:00
|
|
|
self.span_labels.iter().any(|(sp, _)| !sp.is_dummy())
|
2018-11-25 15:05:06 -06:00
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Span> for MultiSpan {
|
|
|
|
fn from(span: Span) -> MultiSpan {
|
2016-07-28 12:33:31 -05:00
|
|
|
MultiSpan::from_span(span)
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-09 04:07:31 -05:00
|
|
|
impl From<Vec<Span>> for MultiSpan {
|
|
|
|
fn from(spans: Vec<Span>) -> MultiSpan {
|
|
|
|
MultiSpan::from_spans(spans)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Identifies an offset of a multi-byte character in a `SourceFile`.
|
2018-05-29 10:50:13 -05:00
|
|
|
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub struct MultiByteChar {
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The absolute offset of the character in the `SourceMap`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub pos: BytePos,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The number of bytes, `>= 2`.
|
2018-06-26 08:37:09 -05:00
|
|
|
pub bytes: u8,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Identifies an offset of a non-narrow character in a `SourceFile`.
|
2018-05-29 10:50:13 -05:00
|
|
|
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
|
2017-11-01 20:25:54 -05:00
|
|
|
pub enum NonNarrowChar {
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Represents a zero-width character.
|
2017-11-01 20:25:54 -05:00
|
|
|
ZeroWidth(BytePos),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Represents a wide (full-width) character.
|
2017-11-01 20:25:54 -05:00
|
|
|
Wide(BytePos),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Represents a tab character, represented visually with a width of 4 characters.
|
2017-11-13 00:06:00 -06:00
|
|
|
Tab(BytePos),
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl NonNarrowChar {
|
|
|
|
fn new(pos: BytePos, width: usize) -> Self {
|
|
|
|
match width {
|
|
|
|
0 => NonNarrowChar::ZeroWidth(pos),
|
|
|
|
2 => NonNarrowChar::Wide(pos),
|
2017-11-13 00:06:00 -06:00
|
|
|
4 => NonNarrowChar::Tab(pos),
|
2017-11-01 20:25:54 -05:00
|
|
|
_ => panic!("width {} given for non-narrow character", width),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Returns the absolute offset of the character in the `SourceMap`.
|
2017-11-01 20:25:54 -05:00
|
|
|
pub fn pos(&self) -> BytePos {
|
|
|
|
match *self {
|
2019-12-22 16:42:04 -06:00
|
|
|
NonNarrowChar::ZeroWidth(p) | NonNarrowChar::Wide(p) | NonNarrowChar::Tab(p) => p,
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Returns the width of the character, 0 (zero-width) or 2 (wide).
|
2017-11-01 20:25:54 -05:00
|
|
|
pub fn width(&self) -> usize {
|
|
|
|
match *self {
|
|
|
|
NonNarrowChar::ZeroWidth(_) => 0,
|
|
|
|
NonNarrowChar::Wide(_) => 2,
|
2017-11-13 00:06:00 -06:00
|
|
|
NonNarrowChar::Tab(_) => 4,
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Add<BytePos> for NonNarrowChar {
|
|
|
|
type Output = Self;
|
|
|
|
|
|
|
|
fn add(self, rhs: BytePos) -> Self {
|
|
|
|
match self {
|
|
|
|
NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs),
|
|
|
|
NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs),
|
2017-11-13 00:06:00 -06:00
|
|
|
NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos + rhs),
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Sub<BytePos> for NonNarrowChar {
|
|
|
|
type Output = Self;
|
|
|
|
|
|
|
|
fn sub(self, rhs: BytePos) -> Self {
|
|
|
|
match self {
|
|
|
|
NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs),
|
|
|
|
NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs),
|
2017-11-13 00:06:00 -06:00
|
|
|
NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos - rhs),
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-02 19:55:31 -05:00
|
|
|
/// Identifies an offset of a character that was normalized away from `SourceFile`.
|
|
|
|
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
|
|
|
|
pub struct NormalizedPos {
|
|
|
|
/// The absolute offset of the character in the `SourceMap`.
|
|
|
|
pub pos: BytePos,
|
|
|
|
/// The difference between original and normalized string at position.
|
|
|
|
pub diff: u32,
|
|
|
|
}
|
|
|
|
|
2020-02-07 13:02:24 -06:00
|
|
|
#[derive(PartialEq, Eq, Clone, Debug)]
|
2017-06-10 14:08:32 -05:00
|
|
|
pub enum ExternalSource {
|
2020-02-07 13:02:24 -06:00
|
|
|
/// No external source has to be loaded, since the `SourceFile` represents a local crate.
|
|
|
|
Unneeded,
|
|
|
|
Foreign {
|
|
|
|
kind: ExternalSourceKind,
|
|
|
|
/// This SourceFile's byte-offset within the source_map of its original crate
|
|
|
|
original_start_pos: BytePos,
|
|
|
|
/// The end of this SourceFile within the source_map of its original crate
|
|
|
|
original_end_pos: BytePos,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
/// The state of the lazy external source loading mechanism of a `SourceFile`.
|
|
|
|
#[derive(PartialEq, Eq, Clone, Debug)]
|
|
|
|
pub enum ExternalSourceKind {
|
2017-06-11 03:19:46 -05:00
|
|
|
/// The external source has been loaded already.
|
2020-03-20 01:00:06 -05:00
|
|
|
Present(Lrc<String>),
|
2017-06-11 03:19:46 -05:00
|
|
|
/// No attempt has been made to load the external source.
|
2017-06-10 14:08:32 -05:00
|
|
|
AbsentOk,
|
2017-06-11 03:19:46 -05:00
|
|
|
/// A failed attempt has been made to load the external source.
|
2017-06-10 14:08:32 -05:00
|
|
|
AbsentErr,
|
|
|
|
Unneeded,
|
|
|
|
}
|
|
|
|
|
2017-06-11 03:19:46 -05:00
|
|
|
impl ExternalSource {
|
|
|
|
pub fn is_absent(&self) -> bool {
|
2020-02-07 13:02:24 -06:00
|
|
|
match self {
|
|
|
|
ExternalSource::Foreign { kind: ExternalSourceKind::Present(_), .. } => false,
|
2017-06-11 03:19:46 -05:00
|
|
|
_ => true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-20 01:00:06 -05:00
|
|
|
pub fn get_source(&self) -> Option<&Lrc<String>> {
|
2020-02-07 13:02:24 -06:00
|
|
|
match self {
|
|
|
|
ExternalSource::Foreign { kind: ExternalSourceKind::Present(ref src), .. } => Some(src),
|
2017-06-11 03:19:46 -05:00
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-17 03:35:26 -05:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct OffsetOverflowError;
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// A single source in the `SourceMap`.
|
2017-04-24 12:01:19 -05:00
|
|
|
#[derive(Clone)]
|
2018-08-18 05:13:52 -05:00
|
|
|
pub struct SourceFile {
|
2019-09-29 19:06:00 -05:00
|
|
|
/// The name of the file that the source came from. Source that doesn't
|
2018-11-26 20:59:49 -06:00
|
|
|
/// originate from files has names between angle brackets by convention
|
|
|
|
/// (e.g., `<anon>`).
|
2016-06-21 17:08:13 -05:00
|
|
|
pub name: FileName,
|
2019-02-08 07:53:55 -06:00
|
|
|
/// `true` if the `name` field above has been modified by `--remap-path-prefix`.
|
2017-04-24 12:01:19 -05:00
|
|
|
pub name_was_remapped: bool,
|
2017-10-03 04:44:58 -05:00
|
|
|
/// The unmapped path of the file that the source came from.
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Set to `None` if the `SourceFile` was imported from an external crate.
|
2017-12-14 01:09:19 -06:00
|
|
|
pub unmapped_path: Option<FileName>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The complete source code.
|
2018-02-27 10:11:14 -06:00
|
|
|
pub src: Option<Lrc<String>>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The source code's hash.
|
2017-06-10 06:39:39 -05:00
|
|
|
pub src_hash: u128,
|
2017-06-10 14:08:32 -05:00
|
|
|
/// The external source code (used for external crates, which will have a `None`
|
|
|
|
/// value as `self.src`.
|
2018-03-09 23:40:17 -06:00
|
|
|
pub external_src: Lock<ExternalSource>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The start position of this source in the `SourceMap`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub start_pos: BytePos,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The end position of this source in the `SourceMap`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub end_pos: BytePos,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Locations of lines beginnings in the source code.
|
2018-05-23 08:59:42 -05:00
|
|
|
pub lines: Vec<BytePos>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Locations of multi-byte characters in the source code.
|
2018-05-23 08:59:42 -05:00
|
|
|
pub multibyte_chars: Vec<MultiByteChar>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Width of characters that are not narrow in the source code.
|
2018-05-23 08:59:42 -05:00
|
|
|
pub non_narrow_chars: Vec<NonNarrowChar>,
|
2019-10-02 19:55:31 -05:00
|
|
|
/// Locations of characters removed during normalization.
|
|
|
|
pub normalized_pos: Vec<NormalizedPos>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// A hash of the filename, used for speeding up hashing in incremental compilation.
|
2017-12-19 08:14:41 -06:00
|
|
|
pub name_hash: u128,
|
2020-02-07 13:02:24 -06:00
|
|
|
/// Indicates which crate this `SourceFile` was imported from.
|
|
|
|
pub cnum: CrateNum,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2018-08-18 05:13:52 -05:00
|
|
|
impl Encodable for SourceFile {
|
2016-06-21 17:08:13 -05:00
|
|
|
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
2018-08-18 05:13:52 -05:00
|
|
|
s.emit_struct("SourceFile", 8, |s| {
|
2016-06-21 17:08:13 -05:00
|
|
|
s.emit_struct_field("name", 0, |s| self.name.encode(s))?;
|
2017-04-24 12:01:19 -05:00
|
|
|
s.emit_struct_field("name_was_remapped", 1, |s| self.name_was_remapped.encode(s))?;
|
2017-12-08 10:07:48 -06:00
|
|
|
s.emit_struct_field("src_hash", 2, |s| self.src_hash.encode(s))?;
|
2019-09-29 19:06:00 -05:00
|
|
|
s.emit_struct_field("start_pos", 3, |s| self.start_pos.encode(s))?;
|
|
|
|
s.emit_struct_field("end_pos", 4, |s| self.end_pos.encode(s))?;
|
|
|
|
s.emit_struct_field("lines", 5, |s| {
|
2018-05-23 08:59:42 -05:00
|
|
|
let lines = &self.lines[..];
|
2018-11-26 20:59:49 -06:00
|
|
|
// Store the length.
|
2016-06-21 17:08:13 -05:00
|
|
|
s.emit_u32(lines.len() as u32)?;
|
|
|
|
|
|
|
|
if !lines.is_empty() {
|
|
|
|
// In order to preserve some space, we exploit the fact that
|
|
|
|
// the lines list is sorted and individual lines are
|
|
|
|
// probably not that long. Because of that we can store lines
|
|
|
|
// as a difference list, using as little space as possible
|
|
|
|
// for the differences.
|
|
|
|
let max_line_length = if lines.len() == 1 {
|
|
|
|
0
|
|
|
|
} else {
|
2019-12-22 16:42:04 -06:00
|
|
|
lines.windows(2).map(|w| w[1] - w[0]).map(|bp| bp.to_usize()).max().unwrap()
|
2016-06-21 17:08:13 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
let bytes_per_diff: u8 = match max_line_length {
|
2019-12-22 16:42:04 -06:00
|
|
|
0..=0xFF => 1,
|
|
|
|
0x100..=0xFFFF => 2,
|
|
|
|
_ => 4,
|
2016-06-21 17:08:13 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
// Encode the number of bytes used per diff.
|
|
|
|
bytes_per_diff.encode(s)?;
|
|
|
|
|
|
|
|
// Encode the first element.
|
|
|
|
lines[0].encode(s)?;
|
|
|
|
|
2019-12-22 16:42:04 -06:00
|
|
|
let diff_iter = (&lines[..]).windows(2).map(|w| (w[1] - w[0]));
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
match bytes_per_diff {
|
2019-12-22 16:42:04 -06:00
|
|
|
1 => {
|
|
|
|
for diff in diff_iter {
|
|
|
|
(diff.0 as u8).encode(s)?
|
|
|
|
}
|
|
|
|
}
|
|
|
|
2 => {
|
|
|
|
for diff in diff_iter {
|
|
|
|
(diff.0 as u16).encode(s)?
|
|
|
|
}
|
|
|
|
}
|
|
|
|
4 => {
|
|
|
|
for diff in diff_iter {
|
|
|
|
diff.0.encode(s)?
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => unreachable!(),
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
})?;
|
2019-12-22 16:42:04 -06:00
|
|
|
s.emit_struct_field("multibyte_chars", 6, |s| self.multibyte_chars.encode(s))?;
|
|
|
|
s.emit_struct_field("non_narrow_chars", 7, |s| self.non_narrow_chars.encode(s))?;
|
|
|
|
s.emit_struct_field("name_hash", 8, |s| self.name_hash.encode(s))?;
|
2020-02-07 13:02:24 -06:00
|
|
|
s.emit_struct_field("normalized_pos", 9, |s| self.normalized_pos.encode(s))?;
|
|
|
|
s.emit_struct_field("cnum", 10, |s| self.cnum.encode(s))
|
2016-06-21 17:08:13 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-18 05:13:52 -05:00
|
|
|
impl Decodable for SourceFile {
|
|
|
|
fn decode<D: Decoder>(d: &mut D) -> Result<SourceFile, D::Error> {
|
|
|
|
d.read_struct("SourceFile", 8, |d| {
|
2017-12-14 01:09:19 -06:00
|
|
|
let name: FileName = d.read_struct_field("name", 0, |d| Decodable::decode(d))?;
|
2017-04-24 12:01:19 -05:00
|
|
|
let name_was_remapped: bool =
|
|
|
|
d.read_struct_field("name_was_remapped", 1, |d| Decodable::decode(d))?;
|
2019-12-22 16:42:04 -06:00
|
|
|
let src_hash: u128 = d.read_struct_field("src_hash", 2, |d| Decodable::decode(d))?;
|
2017-06-10 06:39:39 -05:00
|
|
|
let start_pos: BytePos =
|
2019-09-29 19:06:00 -05:00
|
|
|
d.read_struct_field("start_pos", 3, |d| Decodable::decode(d))?;
|
|
|
|
let end_pos: BytePos = d.read_struct_field("end_pos", 4, |d| Decodable::decode(d))?;
|
|
|
|
let lines: Vec<BytePos> = d.read_struct_field("lines", 5, |d| {
|
2016-06-21 17:08:13 -05:00
|
|
|
let num_lines: u32 = Decodable::decode(d)?;
|
|
|
|
let mut lines = Vec::with_capacity(num_lines as usize);
|
|
|
|
|
|
|
|
if num_lines > 0 {
|
|
|
|
// Read the number of bytes used per diff.
|
|
|
|
let bytes_per_diff: u8 = Decodable::decode(d)?;
|
|
|
|
|
|
|
|
// Read the first element.
|
|
|
|
let mut line_start: BytePos = Decodable::decode(d)?;
|
|
|
|
lines.push(line_start);
|
|
|
|
|
|
|
|
for _ in 1..num_lines {
|
|
|
|
let diff = match bytes_per_diff {
|
|
|
|
1 => d.read_u8()? as u32,
|
|
|
|
2 => d.read_u16()? as u32,
|
|
|
|
4 => d.read_u32()?,
|
2019-12-22 16:42:04 -06:00
|
|
|
_ => unreachable!(),
|
2016-06-21 17:08:13 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
line_start = line_start + BytePos(diff);
|
|
|
|
|
|
|
|
lines.push(line_start);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(lines)
|
|
|
|
})?;
|
|
|
|
let multibyte_chars: Vec<MultiByteChar> =
|
2019-09-29 19:06:00 -05:00
|
|
|
d.read_struct_field("multibyte_chars", 6, |d| Decodable::decode(d))?;
|
2017-11-01 20:25:54 -05:00
|
|
|
let non_narrow_chars: Vec<NonNarrowChar> =
|
2019-09-29 19:06:00 -05:00
|
|
|
d.read_struct_field("non_narrow_chars", 7, |d| Decodable::decode(d))?;
|
2019-12-22 16:42:04 -06:00
|
|
|
let name_hash: u128 = d.read_struct_field("name_hash", 8, |d| Decodable::decode(d))?;
|
2019-10-02 19:55:31 -05:00
|
|
|
let normalized_pos: Vec<NormalizedPos> =
|
|
|
|
d.read_struct_field("normalized_pos", 9, |d| Decodable::decode(d))?;
|
2020-02-07 13:02:24 -06:00
|
|
|
let cnum: CrateNum = d.read_struct_field("cnum", 10, |d| Decodable::decode(d))?;
|
2018-08-18 05:13:52 -05:00
|
|
|
Ok(SourceFile {
|
2017-08-07 00:54:09 -05:00
|
|
|
name,
|
|
|
|
name_was_remapped,
|
2017-10-03 04:44:58 -05:00
|
|
|
unmapped_path: None,
|
2017-08-07 00:54:09 -05:00
|
|
|
start_pos,
|
|
|
|
end_pos,
|
2016-06-21 17:08:13 -05:00
|
|
|
src: None,
|
2017-08-07 00:54:09 -05:00
|
|
|
src_hash,
|
2020-02-07 13:02:24 -06:00
|
|
|
// Unused - the metadata decoder will construct
|
|
|
|
// a new SourceFile, filling in `external_src` properly
|
|
|
|
external_src: Lock::new(ExternalSource::Unneeded),
|
2018-05-23 08:59:42 -05:00
|
|
|
lines,
|
|
|
|
multibyte_chars,
|
|
|
|
non_narrow_chars,
|
2019-10-02 19:55:31 -05:00
|
|
|
normalized_pos,
|
2017-12-19 08:14:41 -06:00
|
|
|
name_hash,
|
2020-02-07 13:02:24 -06:00
|
|
|
cnum,
|
2016-06-21 17:08:13 -05:00
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-18 05:13:52 -05:00
|
|
|
impl fmt::Debug for SourceFile {
|
2019-02-03 12:42:27 -06:00
|
|
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2018-08-18 05:13:52 -05:00
|
|
|
write!(fmt, "SourceFile({})", self.name)
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-18 05:13:52 -05:00
|
|
|
impl SourceFile {
|
2019-12-22 16:42:04 -06:00
|
|
|
pub fn new(
|
|
|
|
name: FileName,
|
|
|
|
name_was_remapped: bool,
|
|
|
|
unmapped_path: FileName,
|
|
|
|
mut src: String,
|
|
|
|
start_pos: BytePos,
|
2020-02-18 11:24:36 -06:00
|
|
|
) -> Self {
|
2019-10-02 19:55:31 -05:00
|
|
|
let normalized_pos = normalize_src(&mut src, start_pos);
|
2017-06-10 09:09:51 -05:00
|
|
|
|
2017-12-19 08:14:41 -06:00
|
|
|
let src_hash = {
|
2019-09-26 17:54:39 -05:00
|
|
|
let mut hasher: StableHasher = StableHasher::new();
|
2017-12-19 08:14:41 -06:00
|
|
|
hasher.write(src.as_bytes());
|
2019-09-26 17:54:39 -05:00
|
|
|
hasher.finish::<u128>()
|
2017-12-19 08:14:41 -06:00
|
|
|
};
|
|
|
|
let name_hash = {
|
2019-09-26 17:54:39 -05:00
|
|
|
let mut hasher: StableHasher = StableHasher::new();
|
2017-12-19 08:14:41 -06:00
|
|
|
name.hash(&mut hasher);
|
2019-09-26 17:54:39 -05:00
|
|
|
hasher.finish::<u128>()
|
2017-12-19 08:14:41 -06:00
|
|
|
};
|
2017-06-10 09:09:51 -05:00
|
|
|
let end_pos = start_pos.to_usize() + src.len();
|
2020-02-18 11:24:36 -06:00
|
|
|
assert!(end_pos <= u32::max_value() as usize);
|
2017-06-10 09:09:51 -05:00
|
|
|
|
2018-05-23 08:59:42 -05:00
|
|
|
let (lines, multibyte_chars, non_narrow_chars) =
|
2018-08-18 05:13:56 -05:00
|
|
|
analyze_source_file::analyze_source_file(&src[..], start_pos);
|
2018-05-23 08:59:42 -05:00
|
|
|
|
2020-02-18 11:24:36 -06:00
|
|
|
SourceFile {
|
2017-08-07 00:54:09 -05:00
|
|
|
name,
|
|
|
|
name_was_remapped,
|
2017-10-03 04:44:58 -05:00
|
|
|
unmapped_path: Some(unmapped_path),
|
2018-02-27 10:11:14 -06:00
|
|
|
src: Some(Lrc::new(src)),
|
2017-08-07 00:54:09 -05:00
|
|
|
src_hash,
|
2018-03-09 23:40:17 -06:00
|
|
|
external_src: Lock::new(ExternalSource::Unneeded),
|
2017-08-07 00:54:09 -05:00
|
|
|
start_pos,
|
2017-06-10 09:09:51 -05:00
|
|
|
end_pos: Pos::from_usize(end_pos),
|
2018-05-23 08:59:42 -05:00
|
|
|
lines,
|
|
|
|
multibyte_chars,
|
|
|
|
non_narrow_chars,
|
2019-10-02 19:55:31 -05:00
|
|
|
normalized_pos,
|
2017-12-19 08:14:41 -06:00
|
|
|
name_hash,
|
2020-02-07 13:02:24 -06:00
|
|
|
cnum: LOCAL_CRATE,
|
2020-02-18 11:24:36 -06:00
|
|
|
}
|
2017-06-10 09:09:51 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns the `BytePos` of the beginning of the current line.
|
2018-06-28 03:45:57 -05:00
|
|
|
pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
|
|
|
|
let line_index = self.lookup_line(pos).unwrap();
|
|
|
|
self.lines[line_index]
|
2018-05-08 04:58:54 -05:00
|
|
|
}
|
|
|
|
|
2017-06-12 08:37:26 -05:00
|
|
|
/// Add externally loaded source.
|
|
|
|
/// If the hash of the input doesn't match or no input is supplied via None,
|
2017-06-11 06:48:54 -05:00
|
|
|
/// it is interpreted as an error and the corresponding enum variant is set.
|
2017-06-12 08:37:26 -05:00
|
|
|
/// The return value signifies whether some kind of source is present.
|
2017-08-01 06:17:11 -05:00
|
|
|
pub fn add_external_src<F>(&self, get_src: F) -> bool
|
2019-12-22 16:42:04 -06:00
|
|
|
where
|
|
|
|
F: FnOnce() -> Option<String>,
|
2017-08-01 06:17:11 -05:00
|
|
|
{
|
2020-02-07 13:02:24 -06:00
|
|
|
if matches!(
|
|
|
|
*self.external_src.borrow(),
|
|
|
|
ExternalSource::Foreign { kind: ExternalSourceKind::AbsentOk, .. }
|
|
|
|
) {
|
2017-08-01 06:17:11 -05:00
|
|
|
let src = get_src();
|
2017-06-12 08:37:26 -05:00
|
|
|
let mut external_src = self.external_src.borrow_mut();
|
2018-03-09 23:40:17 -06:00
|
|
|
// Check that no-one else have provided the source while we were getting it
|
2020-02-07 13:02:24 -06:00
|
|
|
if let ExternalSource::Foreign {
|
|
|
|
kind: src_kind @ ExternalSourceKind::AbsentOk, ..
|
|
|
|
} = &mut *external_src
|
|
|
|
{
|
2018-03-09 23:40:17 -06:00
|
|
|
if let Some(src) = src {
|
2019-09-26 17:54:39 -05:00
|
|
|
let mut hasher: StableHasher = StableHasher::new();
|
2018-03-09 23:40:17 -06:00
|
|
|
hasher.write(src.as_bytes());
|
|
|
|
|
2019-09-26 17:54:39 -05:00
|
|
|
if hasher.finish::<u128>() == self.src_hash {
|
2020-03-20 01:00:06 -05:00
|
|
|
*src_kind = ExternalSourceKind::Present(Lrc::new(src));
|
2018-03-09 23:40:17 -06:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
} else {
|
2020-02-07 13:02:24 -06:00
|
|
|
*src_kind = ExternalSourceKind::AbsentErr;
|
2017-06-12 08:37:26 -05:00
|
|
|
}
|
2018-03-09 23:40:17 -06:00
|
|
|
|
|
|
|
false
|
2017-06-12 08:37:26 -05:00
|
|
|
} else {
|
2018-03-09 23:40:17 -06:00
|
|
|
self.src.is_some() || external_src.get_source().is_some()
|
2017-06-11 06:48:54 -05:00
|
|
|
}
|
|
|
|
} else {
|
2017-06-12 08:37:26 -05:00
|
|
|
self.src.is_some() || self.external_src.borrow().get_source().is_some()
|
2017-06-11 06:48:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Gets a line from the list of pre-computed line-beginnings.
|
2017-06-12 08:37:26 -05:00
|
|
|
/// The line number here is 0-based.
|
2019-02-03 12:42:27 -06:00
|
|
|
pub fn get_line(&self, line_number: usize) -> Option<Cow<'_, str>> {
|
2017-06-11 06:31:40 -05:00
|
|
|
fn get_until_newline(src: &str, begin: usize) -> &str {
|
|
|
|
// We can't use `lines.get(line_number+1)` because we might
|
|
|
|
// be parsing when we call this function and thus the current
|
|
|
|
// line is the last one we have line info for.
|
|
|
|
let slice = &src[begin..];
|
|
|
|
match slice.find('\n') {
|
|
|
|
Some(e) => &slice[..e],
|
2019-12-22 16:42:04 -06:00
|
|
|
None => slice,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
2017-06-11 06:31:40 -05:00
|
|
|
}
|
|
|
|
|
2018-03-09 23:40:17 -06:00
|
|
|
let begin = {
|
2020-03-05 14:50:44 -06:00
|
|
|
let line = self.lines.get(line_number)?;
|
2018-03-09 23:40:17 -06:00
|
|
|
let begin: BytePos = *line - self.start_pos;
|
|
|
|
begin.to_usize()
|
2017-06-11 06:31:40 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
if let Some(ref src) = self.src {
|
|
|
|
Some(Cow::from(get_until_newline(src, begin)))
|
|
|
|
} else if let Some(src) = self.external_src.borrow().get_source() {
|
|
|
|
Some(Cow::Owned(String::from(get_until_newline(src, begin))))
|
|
|
|
} else {
|
|
|
|
None
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_real_file(&self) -> bool {
|
2017-12-14 01:09:19 -06:00
|
|
|
self.name.is_real()
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_imported(&self) -> bool {
|
2017-06-11 06:31:40 -05:00
|
|
|
self.src.is_none()
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2016-10-28 10:12:54 -05:00
|
|
|
pub fn byte_length(&self) -> u32 {
|
|
|
|
self.end_pos.0 - self.start_pos.0
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn count_lines(&self) -> usize {
|
2018-05-23 08:59:42 -05:00
|
|
|
self.lines.len()
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
2016-08-24 16:06:31 -05:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Finds the line containing the given position. The return value is the
|
2018-11-26 20:59:49 -06:00
|
|
|
/// index into the `lines` array of this `SourceFile`, not the 1-based line
|
2018-08-18 05:13:56 -05:00
|
|
|
/// number. If the source_file is empty or the position is located before the
|
2018-11-26 20:59:49 -06:00
|
|
|
/// first line, `None` is returned.
|
2016-08-24 16:06:31 -05:00
|
|
|
pub fn lookup_line(&self, pos: BytePos) -> Option<usize> {
|
2020-02-28 07:20:33 -06:00
|
|
|
if self.lines.is_empty() {
|
2016-08-24 16:06:31 -05:00
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:59:42 -05:00
|
|
|
let line_index = lookup_line(&self.lines[..], pos);
|
|
|
|
assert!(line_index < self.lines.len() as isize);
|
2020-01-09 04:56:38 -06:00
|
|
|
if line_index >= 0 {
|
|
|
|
Some(line_index as usize)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2016-08-24 16:06:31 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn line_bounds(&self, line_index: usize) -> (BytePos, BytePos) {
|
|
|
|
if self.start_pos == self.end_pos {
|
|
|
|
return (self.start_pos, self.end_pos);
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:59:42 -05:00
|
|
|
assert!(line_index < self.lines.len());
|
|
|
|
if line_index == (self.lines.len() - 1) {
|
|
|
|
(self.lines[line_index], self.end_pos)
|
2016-08-24 16:06:31 -05:00
|
|
|
} else {
|
2018-05-23 08:59:42 -05:00
|
|
|
(self.lines[line_index], self.lines[line_index + 1])
|
2016-08-24 16:06:31 -05:00
|
|
|
}
|
|
|
|
}
|
2017-11-24 07:00:33 -06:00
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn contains(&self, byte_pos: BytePos) -> bool {
|
|
|
|
byte_pos >= self.start_pos && byte_pos <= self.end_pos
|
|
|
|
}
|
2019-10-02 19:55:31 -05:00
|
|
|
|
|
|
|
/// Calculates the original byte position relative to the start of the file
|
|
|
|
/// based on the given byte position.
|
|
|
|
pub fn original_relative_byte_pos(&self, pos: BytePos) -> BytePos {
|
|
|
|
// Diff before any records is 0. Otherwise use the previously recorded
|
|
|
|
// diff as that applies to the following characters until a new diff
|
|
|
|
// is recorded.
|
2019-12-22 16:42:04 -06:00
|
|
|
let diff = match self.normalized_pos.binary_search_by(|np| np.pos.cmp(&pos)) {
|
2019-10-02 19:55:31 -05:00
|
|
|
Ok(i) => self.normalized_pos[i].diff,
|
|
|
|
Err(i) if i == 0 => 0,
|
2019-12-22 16:42:04 -06:00
|
|
|
Err(i) => self.normalized_pos[i - 1].diff,
|
2019-10-02 19:55:31 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
BytePos::from_u32(pos.0 - self.start_pos.0 + diff)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Normalizes the source code and records the normalizations.
|
|
|
|
fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec<NormalizedPos> {
|
|
|
|
let mut normalized_pos = vec![];
|
|
|
|
remove_bom(src, &mut normalized_pos);
|
|
|
|
normalize_newlines(src, &mut normalized_pos);
|
|
|
|
|
|
|
|
// Offset all the positions by start_pos to match the final file positions.
|
|
|
|
for np in &mut normalized_pos {
|
|
|
|
np.pos.0 += start_pos.0;
|
|
|
|
}
|
|
|
|
|
|
|
|
normalized_pos
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Removes UTF-8 BOM, if any.
|
2019-10-02 19:55:31 -05:00
|
|
|
fn remove_bom(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
|
2017-06-10 09:09:51 -05:00
|
|
|
if src.starts_with("\u{feff}") {
|
|
|
|
src.drain(..3);
|
2019-10-02 19:55:31 -05:00
|
|
|
normalized_pos.push(NormalizedPos { pos: BytePos(0), diff: 3 });
|
2017-06-10 09:09:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-14 07:35:12 -05:00
|
|
|
/// Replaces `\r\n` with `\n` in-place in `src`.
|
|
|
|
///
|
|
|
|
/// Returns error if there's a lone `\r` in the string
|
2019-10-02 19:55:31 -05:00
|
|
|
fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
|
2019-08-14 07:35:12 -05:00
|
|
|
if !src.as_bytes().contains(&b'\r') {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding.
|
|
|
|
// While we *can* call `as_mut_vec` and do surgery on the live string
|
|
|
|
// directly, let's rather steal the contents of `src`. This makes the code
|
|
|
|
// safe even if a panic occurs.
|
|
|
|
|
|
|
|
let mut buf = std::mem::replace(src, String::new()).into_bytes();
|
|
|
|
let mut gap_len = 0;
|
|
|
|
let mut tail = buf.as_mut_slice();
|
2019-10-02 19:55:31 -05:00
|
|
|
let mut cursor = 0;
|
|
|
|
let original_gap = normalized_pos.last().map_or(0, |l| l.diff);
|
2019-08-14 07:35:12 -05:00
|
|
|
loop {
|
|
|
|
let idx = match find_crlf(&tail[gap_len..]) {
|
|
|
|
None => tail.len(),
|
|
|
|
Some(idx) => idx + gap_len,
|
|
|
|
};
|
|
|
|
tail.copy_within(gap_len..idx, 0);
|
|
|
|
tail = &mut tail[idx - gap_len..];
|
|
|
|
if tail.len() == gap_len {
|
|
|
|
break;
|
|
|
|
}
|
2019-10-02 19:55:31 -05:00
|
|
|
cursor += idx - gap_len;
|
2019-08-14 07:35:12 -05:00
|
|
|
gap_len += 1;
|
2019-10-02 19:55:31 -05:00
|
|
|
normalized_pos.push(NormalizedPos {
|
|
|
|
pos: BytePos::from_usize(cursor + 1),
|
|
|
|
diff: original_gap + gap_len as u32,
|
|
|
|
});
|
2019-08-14 07:35:12 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Account for removed `\r`.
|
|
|
|
// After `set_len`, `buf` is guaranteed to contain utf-8 again.
|
|
|
|
let new_len = buf.len() - gap_len;
|
|
|
|
unsafe {
|
|
|
|
buf.set_len(new_len);
|
|
|
|
*src = String::from_utf8_unchecked(buf);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn find_crlf(src: &[u8]) -> Option<usize> {
|
|
|
|
let mut search_idx = 0;
|
|
|
|
while let Some(idx) = find_cr(&src[search_idx..]) {
|
|
|
|
if src[search_idx..].get(idx + 1) != Some(&b'\n') {
|
|
|
|
search_idx += idx + 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
return Some(search_idx + idx);
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
|
|
|
fn find_cr(src: &[u8]) -> Option<usize> {
|
|
|
|
src.iter().position(|&b| b == b'\r')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
// _____________________________________________________________________________
|
|
|
|
// Pos, BytePos, CharPos
|
|
|
|
//
|
|
|
|
|
|
|
|
pub trait Pos {
|
|
|
|
fn from_usize(n: usize) -> Self;
|
|
|
|
fn to_usize(&self) -> usize;
|
2018-05-29 10:49:35 -05:00
|
|
|
fn from_u32(n: u32) -> Self;
|
|
|
|
fn to_u32(&self) -> u32;
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// A byte offset. Keep this small (currently 32-bits), as AST contains
|
|
|
|
/// a lot of them.
|
|
|
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
|
|
|
|
pub struct BytePos(pub u32);
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// A character offset. Because of multibyte UTF-8 characters, a byte offset
|
|
|
|
/// is not equivalent to a character offset. The `SourceMap` will convert `BytePos`
|
|
|
|
/// values to `CharPos` values as necessary.
|
2019-10-19 23:54:53 -05:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub struct CharPos(pub usize);
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
// FIXME: lots of boilerplate in these impls, but so far my attempts to fix
|
|
|
|
// have been unsuccessful.
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
impl Pos for BytePos {
|
2018-05-02 18:12:26 -05:00
|
|
|
#[inline(always)]
|
2019-12-22 16:42:04 -06:00
|
|
|
fn from_usize(n: usize) -> BytePos {
|
|
|
|
BytePos(n as u32)
|
|
|
|
}
|
2018-05-02 18:12:26 -05:00
|
|
|
|
|
|
|
#[inline(always)]
|
2019-12-22 16:42:04 -06:00
|
|
|
fn to_usize(&self) -> usize {
|
|
|
|
self.0 as usize
|
|
|
|
}
|
2018-05-29 10:49:35 -05:00
|
|
|
|
|
|
|
#[inline(always)]
|
2019-12-22 16:42:04 -06:00
|
|
|
fn from_u32(n: u32) -> BytePos {
|
|
|
|
BytePos(n)
|
|
|
|
}
|
2018-05-29 10:49:35 -05:00
|
|
|
|
|
|
|
#[inline(always)]
|
2019-12-22 16:42:04 -06:00
|
|
|
fn to_u32(&self) -> u32 {
|
|
|
|
self.0
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Add for BytePos {
|
|
|
|
type Output = BytePos;
|
|
|
|
|
2018-05-02 18:12:26 -05:00
|
|
|
#[inline(always)]
|
2016-06-21 17:08:13 -05:00
|
|
|
fn add(self, rhs: BytePos) -> BytePos {
|
|
|
|
BytePos((self.to_usize() + rhs.to_usize()) as u32)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Sub for BytePos {
|
|
|
|
type Output = BytePos;
|
|
|
|
|
2018-05-02 18:12:26 -05:00
|
|
|
#[inline(always)]
|
2016-06-21 17:08:13 -05:00
|
|
|
fn sub(self, rhs: BytePos) -> BytePos {
|
|
|
|
BytePos((self.to_usize() - rhs.to_usize()) as u32)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Encodable for BytePos {
|
|
|
|
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
|
|
|
s.emit_u32(self.0)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Decodable for BytePos {
|
|
|
|
fn decode<D: Decoder>(d: &mut D) -> Result<BytePos, D::Error> {
|
|
|
|
Ok(BytePos(d.read_u32()?))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Pos for CharPos {
|
2018-05-02 18:12:26 -05:00
|
|
|
#[inline(always)]
|
2019-12-22 16:42:04 -06:00
|
|
|
fn from_usize(n: usize) -> CharPos {
|
|
|
|
CharPos(n)
|
|
|
|
}
|
2018-05-02 18:12:26 -05:00
|
|
|
|
|
|
|
#[inline(always)]
|
2019-12-22 16:42:04 -06:00
|
|
|
fn to_usize(&self) -> usize {
|
|
|
|
self.0
|
|
|
|
}
|
2018-05-29 10:49:35 -05:00
|
|
|
|
|
|
|
#[inline(always)]
|
2019-12-22 16:42:04 -06:00
|
|
|
fn from_u32(n: u32) -> CharPos {
|
|
|
|
CharPos(n as usize)
|
|
|
|
}
|
2018-05-29 10:49:35 -05:00
|
|
|
|
|
|
|
#[inline(always)]
|
2019-12-22 16:42:04 -06:00
|
|
|
fn to_u32(&self) -> u32 {
|
|
|
|
self.0 as u32
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Add for CharPos {
|
|
|
|
type Output = CharPos;
|
|
|
|
|
2018-05-02 18:12:26 -05:00
|
|
|
#[inline(always)]
|
2016-06-21 17:08:13 -05:00
|
|
|
fn add(self, rhs: CharPos) -> CharPos {
|
|
|
|
CharPos(self.to_usize() + rhs.to_usize())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Sub for CharPos {
|
|
|
|
type Output = CharPos;
|
|
|
|
|
2018-05-02 18:12:26 -05:00
|
|
|
#[inline(always)]
|
2016-06-21 17:08:13 -05:00
|
|
|
fn sub(self, rhs: CharPos) -> CharPos {
|
|
|
|
CharPos(self.to_usize() - rhs.to_usize())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// _____________________________________________________________________________
|
2019-04-05 14:42:40 -05:00
|
|
|
// Loc, SourceFileAndLine, SourceFileAndBytePos
|
2016-06-21 17:08:13 -05:00
|
|
|
//
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// A source code location used for error reporting.
|
2016-07-11 15:02:03 -05:00
|
|
|
#[derive(Debug, Clone)]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub struct Loc {
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Information about the original source.
|
2018-08-18 05:13:52 -05:00
|
|
|
pub file: Lrc<SourceFile>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The (1-based) line number.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub line: usize,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The (0-based) column offset.
|
2017-11-01 20:25:54 -05:00
|
|
|
pub col: CharPos,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The (0-based) column offset when displayed.
|
2017-11-01 20:25:54 -05:00
|
|
|
pub col_display: usize,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
// Used to be structural records.
|
2016-06-21 17:08:13 -05:00
|
|
|
#[derive(Debug)]
|
2019-12-22 16:42:04 -06:00
|
|
|
pub struct SourceFileAndLine {
|
|
|
|
pub sf: Lrc<SourceFile>,
|
|
|
|
pub line: usize,
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
#[derive(Debug)]
|
2019-12-22 16:42:04 -06:00
|
|
|
pub struct SourceFileAndBytePos {
|
|
|
|
pub sf: Lrc<SourceFile>,
|
|
|
|
pub pos: BytePos,
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
|
|
|
pub struct LineInfo {
|
|
|
|
/// Index of line, starting from 0.
|
|
|
|
pub line_index: usize,
|
|
|
|
|
|
|
|
/// Column in line where span begins, starting from 0.
|
|
|
|
pub start_col: CharPos,
|
|
|
|
|
|
|
|
/// Column in line where span ends, starting from 0, exclusive.
|
|
|
|
pub end_col: CharPos,
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct FileLines {
|
2018-08-18 05:13:52 -05:00
|
|
|
pub file: Lrc<SourceFile>,
|
2019-12-22 16:42:04 -06:00
|
|
|
pub lines: Vec<LineInfo>,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-12-25 12:38:57 -06:00
|
|
|
pub static SPAN_DEBUG: AtomicRef<fn(Span, &mut fmt::Formatter<'_>) -> fmt::Result> =
|
|
|
|
AtomicRef::new(&(default_span_debug as fn(_, &mut fmt::Formatter<'_>) -> _));
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
// _____________________________________________________________________________
|
2018-10-29 15:26:13 -05:00
|
|
|
// SpanLinesError, SpanSnippetError, DistinctSources, MalformedSourceMapPositions
|
2016-06-21 17:08:13 -05:00
|
|
|
//
|
|
|
|
|
|
|
|
pub type FileLinesResult = Result<FileLines, SpanLinesError>;
|
|
|
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
|
|
pub enum SpanLinesError {
|
|
|
|
DistinctSources(DistinctSources),
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
|
|
pub enum SpanSnippetError {
|
|
|
|
IllFormedSpan(Span),
|
|
|
|
DistinctSources(DistinctSources),
|
2018-10-29 15:26:13 -05:00
|
|
|
MalformedForSourcemap(MalformedSourceMapPositions),
|
2019-12-22 16:42:04 -06:00
|
|
|
SourceNotAvailable { filename: FileName },
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
|
|
pub struct DistinctSources {
|
2017-12-14 01:09:19 -06:00
|
|
|
pub begin: (FileName, BytePos),
|
2019-12-22 16:42:04 -06:00
|
|
|
pub end: (FileName, BytePos),
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
2018-10-29 15:26:13 -05:00
|
|
|
pub struct MalformedSourceMapPositions {
|
2017-12-14 01:09:19 -06:00
|
|
|
pub name: FileName,
|
2016-06-21 17:08:13 -05:00
|
|
|
pub source_len: usize,
|
|
|
|
pub begin_pos: BytePos,
|
2019-12-22 16:42:04 -06:00
|
|
|
pub end_pos: BytePos,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-07-29 23:43:54 -05:00
|
|
|
/// Range inside of a `Span` used for diagnostics when we only have access to relative positions.
|
2019-06-04 10:03:43 -05:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
|
|
|
pub struct InnerSpan {
|
|
|
|
pub start: usize,
|
|
|
|
pub end: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl InnerSpan {
|
|
|
|
pub fn new(start: usize, end: usize) -> InnerSpan {
|
|
|
|
InnerSpan { start, end }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-24 16:06:31 -05:00
|
|
|
// Given a slice of line start positions and a position, returns the index of
|
|
|
|
// the line the position is on. Returns -1 if the position is located before
|
|
|
|
// the first line.
|
|
|
|
fn lookup_line(lines: &[BytePos], pos: BytePos) -> isize {
|
|
|
|
match lines.binary_search(&pos) {
|
|
|
|
Ok(line) => line as isize,
|
2019-12-22 16:42:04 -06:00
|
|
|
Err(line) => line as isize - 1,
|
2016-08-24 16:06:31 -05:00
|
|
|
}
|
|
|
|
}
|
2019-11-10 10:19:08 -06:00
|
|
|
|
|
|
|
/// Requirements for a `StableHashingContext` to be used in this crate.
|
|
|
|
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
|
|
|
/// instead of implementing everything in librustc.
|
2019-11-23 06:58:17 -06:00
|
|
|
pub trait HashStableContext {
|
2019-11-23 07:39:00 -06:00
|
|
|
fn hash_spans(&self) -> bool;
|
2020-02-08 14:06:31 -06:00
|
|
|
fn hash_def_id(&mut self, _: DefId, hasher: &mut StableHasher);
|
2019-12-22 16:42:04 -06:00
|
|
|
fn byte_pos_to_line_and_col(
|
|
|
|
&mut self,
|
|
|
|
byte: BytePos,
|
|
|
|
) -> Option<(Lrc<SourceFile>, usize, BytePos)>;
|
2019-11-23 07:39:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<CTX> HashStable<CTX> for Span
|
2019-12-22 16:42:04 -06:00
|
|
|
where
|
|
|
|
CTX: HashStableContext,
|
2019-11-23 07:39:00 -06:00
|
|
|
{
|
|
|
|
/// Hashes a span in a stable way. We can't directly hash the span's `BytePos`
|
|
|
|
/// fields (that would be similar to hashing pointers, since those are just
|
|
|
|
/// offsets into the `SourceMap`). Instead, we hash the (file name, line, column)
|
|
|
|
/// triple, which stays the same even if the containing `SourceFile` has moved
|
|
|
|
/// within the `SourceMap`.
|
|
|
|
/// Also note that we are hashing byte offsets for the column, not unicode
|
|
|
|
/// codepoint offsets. For the purpose of the hash that's sufficient.
|
|
|
|
/// Also, hashing filenames is expensive so we avoid doing it twice when the
|
|
|
|
/// span starts and ends in the same file, which is almost always the case.
|
|
|
|
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
|
|
|
const TAG_VALID_SPAN: u8 = 0;
|
|
|
|
const TAG_INVALID_SPAN: u8 = 1;
|
|
|
|
const TAG_EXPANSION: u8 = 0;
|
|
|
|
const TAG_NO_EXPANSION: u8 = 1;
|
|
|
|
|
|
|
|
if !ctx.hash_spans() {
|
2019-12-22 16:42:04 -06:00
|
|
|
return;
|
2019-11-23 07:39:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if *self == DUMMY_SP {
|
|
|
|
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
|
|
|
}
|
|
|
|
|
|
|
|
// If this is not an empty or invalid span, we want to hash the last
|
|
|
|
// position that belongs to it, as opposed to hashing the first
|
|
|
|
// position past it.
|
|
|
|
let span = self.data();
|
|
|
|
let (file_lo, line_lo, col_lo) = match ctx.byte_pos_to_line_and_col(span.lo) {
|
|
|
|
Some(pos) => pos,
|
|
|
|
None => {
|
|
|
|
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if !file_lo.contains(span.hi) {
|
|
|
|
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::hash::Hash::hash(&TAG_VALID_SPAN, hasher);
|
|
|
|
// We truncate the stable ID hash and line and column numbers. The chances
|
|
|
|
// of causing a collision this way should be minimal.
|
|
|
|
std::hash::Hash::hash(&(file_lo.name_hash as u64), hasher);
|
|
|
|
|
|
|
|
let col = (col_lo.0 as u64) & 0xFF;
|
|
|
|
let line = ((line_lo as u64) & 0xFF_FF_FF) << 8;
|
|
|
|
let len = ((span.hi - span.lo).0 as u64) << 32;
|
|
|
|
let line_col_len = col | line | len;
|
|
|
|
std::hash::Hash::hash(&line_col_len, hasher);
|
|
|
|
|
|
|
|
if span.ctxt == SyntaxContext::root() {
|
|
|
|
TAG_NO_EXPANSION.hash_stable(ctx, hasher);
|
|
|
|
} else {
|
|
|
|
TAG_EXPANSION.hash_stable(ctx, hasher);
|
|
|
|
|
|
|
|
// Since the same expansion context is usually referenced many
|
|
|
|
// times, we cache a stable hash of it and hash that instead of
|
|
|
|
// recursing every time.
|
|
|
|
thread_local! {
|
|
|
|
static CACHE: RefCell<FxHashMap<hygiene::ExpnId, u64>> = Default::default();
|
|
|
|
}
|
|
|
|
|
|
|
|
let sub_hash: u64 = CACHE.with(|cache| {
|
|
|
|
let expn_id = span.ctxt.outer_expn();
|
|
|
|
|
|
|
|
if let Some(&sub_hash) = cache.borrow().get(&expn_id) {
|
|
|
|
return sub_hash;
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
expn_id.expn_data().hash_stable(ctx, &mut hasher);
|
|
|
|
let sub_hash: Fingerprint = hasher.finish();
|
|
|
|
let sub_hash = sub_hash.to_smaller_hash();
|
|
|
|
cache.borrow_mut().insert(expn_id, sub_hash);
|
|
|
|
sub_hash
|
|
|
|
});
|
|
|
|
|
|
|
|
sub_hash.hash_stable(ctx, hasher);
|
|
|
|
}
|
|
|
|
}
|
2019-11-10 10:31:21 -06:00
|
|
|
}
|