2018-11-26 20:59:49 -06:00
|
|
|
//! The source positions and related helper functions.
|
2016-06-21 17:08:13 -05:00
|
|
|
//!
|
2018-11-26 20:59:49 -06:00
|
|
|
//! ## Note
|
2016-06-21 17:08:13 -05:00
|
|
|
//!
|
|
|
|
//! This API is completely unstable and subject to change.
|
|
|
|
|
2020-09-23 14:51:56 -05:00
|
|
|
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
2020-09-17 02:28:14 -05:00
|
|
|
#![feature(array_windows)]
|
2018-06-30 11:35:00 -05:00
|
|
|
#![feature(crate_visibility_modifier)]
|
2020-03-10 15:41:33 -05:00
|
|
|
#![feature(const_fn)]
|
|
|
|
#![feature(const_panic)]
|
2020-04-22 14:45:35 -05:00
|
|
|
#![feature(negative_impls)]
|
2019-02-10 01:13:30 -06:00
|
|
|
#![feature(nll)]
|
2020-05-09 06:59:21 -05:00
|
|
|
#![feature(min_specialization)]
|
2020-03-17 10:45:02 -05:00
|
|
|
#![feature(option_expect_none)]
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2020-06-11 09:49:57 -05:00
|
|
|
#[macro_use]
|
2020-05-07 23:13:50 -05:00
|
|
|
extern crate rustc_macros;
|
|
|
|
|
2019-12-25 12:38:57 -06:00
|
|
|
use rustc_data_structures::AtomicRef;
|
2019-11-09 15:25:30 -06:00
|
|
|
use rustc_macros::HashStable_Generic;
|
2019-12-22 16:42:04 -06:00
|
|
|
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2019-11-23 07:17:37 -06:00
|
|
|
mod caching_source_map_view;
|
2019-12-22 16:42:04 -06:00
|
|
|
pub mod source_map;
|
2019-11-23 07:17:37 -06:00
|
|
|
pub use self::caching_source_map_view::CachingSourceMapView;
|
2020-05-26 12:21:58 -05:00
|
|
|
use source_map::SourceMap;
|
2019-11-15 07:27:09 -06:00
|
|
|
|
2018-04-22 17:44:19 -05:00
|
|
|
pub mod edition;
|
2019-04-05 17:15:49 -05:00
|
|
|
use edition::Edition;
|
2017-03-16 05:23:33 -05:00
|
|
|
pub mod hygiene;
|
2020-06-11 12:48:46 -05:00
|
|
|
pub use hygiene::SyntaxContext;
|
2020-08-04 02:16:01 -05:00
|
|
|
use hygiene::Transparency;
|
2020-06-11 12:48:46 -05:00
|
|
|
pub use hygiene::{DesugaringKind, ExpnData, ExpnId, ExpnKind, ForLoopLoc, MacroKind};
|
2020-02-08 14:06:31 -06:00
|
|
|
pub mod def_id;
|
2020-02-07 13:02:24 -06:00
|
|
|
use def_id::{CrateNum, DefId, LOCAL_CRATE};
|
2017-09-16 13:43:05 -05:00
|
|
|
mod span_encoding;
|
|
|
|
pub use span_encoding::{Span, DUMMY_SP};
|
|
|
|
|
2017-03-16 23:04:41 -05:00
|
|
|
pub mod symbol;
|
2019-12-22 16:42:04 -06:00
|
|
|
pub use symbol::{sym, Symbol};
|
2017-03-16 05:23:33 -05:00
|
|
|
|
2018-08-18 05:13:56 -05:00
|
|
|
mod analyze_source_file;
|
2019-11-14 13:01:03 -06:00
|
|
|
pub mod fatal_error;
|
2018-05-29 10:50:13 -05:00
|
|
|
|
2019-11-23 07:39:00 -06:00
|
|
|
use rustc_data_structures::fingerprint::Fingerprint;
|
2019-12-22 16:42:04 -06:00
|
|
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
|
|
|
use rustc_data_structures::sync::{Lock, Lrc};
|
2018-11-26 20:59:49 -06:00
|
|
|
|
|
|
|
use std::borrow::Cow;
|
2019-12-25 12:38:57 -06:00
|
|
|
use std::cell::RefCell;
|
2018-11-26 20:59:49 -06:00
|
|
|
use std::cmp::{self, Ordering};
|
|
|
|
use std::fmt;
|
2020-03-31 00:17:15 -05:00
|
|
|
use std::hash::Hash;
|
2018-11-26 20:59:49 -06:00
|
|
|
use std::ops::{Add, Sub};
|
2020-05-29 10:31:55 -05:00
|
|
|
use std::path::{Path, PathBuf};
|
2020-03-31 00:17:15 -05:00
|
|
|
use std::str::FromStr;
|
|
|
|
|
|
|
|
use md5::Md5;
|
|
|
|
use sha1::Digest;
|
|
|
|
use sha1::Sha1;
|
2018-11-26 20:59:49 -06:00
|
|
|
|
Updates to experimental coverage counter injection
This is a combination of 18 commits.
Commit #2:
Additional examples and some small improvements.
Commit #3:
fixed mir-opt non-mir extensions and spanview title elements
Corrected a fairly recent assumption in runtest.rs that all MIR dump
files end in .mir. (It was appending .mir to the graphviz .dot and
spanview .html file names when generating blessed output files. That
also left outdated files in the baseline alongside the files with the
incorrect names, which I've now removed.)
Updated spanview HTML title elements to match their content, replacing a
hardcoded and incorrect name that was left in accidentally when
originally submitted.
Commit #4:
added more test examples
also improved Makefiles with support for non-zero exit status and to
force validation of tests unless a specific test overrides it with a
specific comment.
Commit #5:
Fixed rare issues after testing on real-world crate
Commit #6:
Addressed PR feedback, and removed temporary -Zexperimental-coverage
-Zinstrument-coverage once again supports the latest capabilities of
LLVM instrprof coverage instrumentation.
Also fixed a bug in spanview.
Commit #7:
Fix closure handling, add tests for closures and inner items
And cleaned up other tests for consistency, and to make it more clear
where spans start/end by breaking up lines.
Commit #8:
renamed "typical" test results "expected"
Now that the `llvm-cov show` tests are improved to normally expect
matching actuals, and to allow individual tests to override that
expectation.
Commit #9:
test coverage of inline generic struct function
Commit #10:
Addressed review feedback
* Removed unnecessary Unreachable filter.
* Replaced a match wildcard with remining variants.
* Added more comments to help clarify the role of successors() in the
CFG traversal
Commit #11:
refactoring based on feedback
* refactored `fn coverage_spans()`.
* changed the way I expand an empty coverage span to improve performance
* fixed a typo that I had accidently left in, in visit.rs
Commit #12:
Optimized use of SourceMap and SourceFile
Commit #13:
Fixed a regression, and synched with upstream
Some generated test file names changed due to some new change upstream.
Commit #14:
Stripping out crate disambiguators from demangled names
These can vary depending on the test platform.
Commit #15:
Ignore llvm-cov show diff on test with generics, expand IO error message
Tests with generics produce llvm-cov show results with demangled names
that can include an unstable "crate disambiguator" (hex value). The
value changes when run in the Rust CI Windows environment. I added a sed
filter to strip them out (in a prior commit), but sed also appears to
fail in the same environment. Until I can figure out a workaround, I'm
just going to ignore this specific test result. I added a FIXME to
follow up later, but it's not that critical.
I also saw an error with Windows GNU, but the IO error did not
specify a path for the directory or file that triggered the error. I
updated the error messages to provide more info for next, time but also
noticed some other tests with similar steps did not fail. Looks
spurious.
Commit #16:
Modify rust-demangler to strip disambiguators by default
Commit #17:
Remove std::process::exit from coverage tests
Due to Issue #77553, programs that call std::process::exit() do not
generate coverage results on Windows MSVC.
Commit #18:
fix: test file paths exceeding Windows max path len
2020-09-01 18:15:17 -05:00
|
|
|
use tracing::debug;
|
|
|
|
|
2019-07-31 19:20:23 -05:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests;
|
|
|
|
|
2020-07-05 19:53:14 -05:00
|
|
|
// Per-session global variables: this struct is stored in thread-local storage
|
|
|
|
// in such a way that it is accessible without any kind of handle to all
|
|
|
|
// threads within the compilation session, but is not accessible outside the
|
|
|
|
// session.
|
|
|
|
pub struct SessionGlobals {
|
2018-03-06 19:44:10 -06:00
|
|
|
symbol_interner: Lock<symbol::Interner>,
|
|
|
|
span_interner: Lock<span_encoding::SpanInterner>,
|
|
|
|
hygiene_data: Lock<hygiene::HygieneData>,
|
2020-05-26 12:21:58 -05:00
|
|
|
source_map: Lock<Option<Lrc<SourceMap>>>,
|
2018-03-06 19:44:10 -06:00
|
|
|
}
|
|
|
|
|
2020-07-05 19:53:14 -05:00
|
|
|
impl SessionGlobals {
|
|
|
|
pub fn new(edition: Edition) -> SessionGlobals {
|
|
|
|
SessionGlobals {
|
2018-03-06 19:44:10 -06:00
|
|
|
symbol_interner: Lock::new(symbol::Interner::fresh()),
|
|
|
|
span_interner: Lock::new(span_encoding::SpanInterner::default()),
|
2019-07-07 08:45:41 -05:00
|
|
|
hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
|
2020-05-26 12:21:58 -05:00
|
|
|
source_map: Lock::new(None),
|
2018-03-06 19:44:10 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-29 20:27:50 -05:00
|
|
|
pub fn with_session_globals<R>(edition: Edition, f: impl FnOnce() -> R) -> R {
|
|
|
|
let session_globals = SessionGlobals::new(edition);
|
|
|
|
SESSION_GLOBALS.set(&session_globals, f)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn with_default_session_globals<R>(f: impl FnOnce() -> R) -> R {
|
|
|
|
with_session_globals(edition::DEFAULT_EDITION, f)
|
|
|
|
}
|
|
|
|
|
2020-03-17 10:45:02 -05:00
|
|
|
// If this ever becomes non thread-local, `decode_syntax_context`
|
|
|
|
// and `decode_expn_id` will need to be updated to handle concurrent
|
|
|
|
// deserialization.
|
2020-07-05 19:53:14 -05:00
|
|
|
scoped_tls::scoped_thread_local!(pub static SESSION_GLOBALS: SessionGlobals);
|
2018-03-06 19:44:10 -06:00
|
|
|
|
2020-05-30 07:19:35 -05:00
|
|
|
// FIXME: Perhaps this should not implement Rustc{Decodable, Encodable}
|
|
|
|
//
|
|
|
|
// FIXME: We should use this enum or something like it to get rid of the
|
|
|
|
// use of magic `/rust/1.x/...` paths across the board.
|
2020-06-11 09:49:57 -05:00
|
|
|
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)]
|
|
|
|
#[derive(HashStable_Generic, Decodable, Encodable)]
|
2020-05-29 10:31:55 -05:00
|
|
|
pub enum RealFileName {
|
|
|
|
Named(PathBuf),
|
|
|
|
/// For de-virtualized paths (namely paths into libstd that have been mapped
|
|
|
|
/// to the appropriate spot on the local host's file system),
|
|
|
|
Devirtualized {
|
|
|
|
/// `local_path` is the (host-dependent) local path to the file.
|
|
|
|
local_path: PathBuf,
|
|
|
|
/// `virtual_name` is the stable path rustc will store internally within
|
|
|
|
/// build artifacts.
|
|
|
|
virtual_name: PathBuf,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RealFileName {
|
|
|
|
/// Returns the path suitable for reading from the file system on the local host.
|
2020-05-29 13:04:03 -05:00
|
|
|
/// Avoid embedding this in build artifacts; see `stable_name` for that.
|
2020-05-29 10:31:55 -05:00
|
|
|
pub fn local_path(&self) -> &Path {
|
|
|
|
match self {
|
|
|
|
RealFileName::Named(p)
|
|
|
|
| RealFileName::Devirtualized { local_path: p, virtual_name: _ } => &p,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the path suitable for reading from the file system on the local host.
|
2020-05-29 13:04:03 -05:00
|
|
|
/// Avoid embedding this in build artifacts; see `stable_name` for that.
|
2020-05-29 10:31:55 -05:00
|
|
|
pub fn into_local_path(self) -> PathBuf {
|
|
|
|
match self {
|
|
|
|
RealFileName::Named(p)
|
|
|
|
| RealFileName::Devirtualized { local_path: p, virtual_name: _ } => p,
|
|
|
|
}
|
|
|
|
}
|
2020-05-29 13:04:03 -05:00
|
|
|
|
|
|
|
/// Returns the path suitable for embedding into build artifacts. Note that
|
|
|
|
/// a virtualized path will not correspond to a valid file system path; see
|
|
|
|
/// `local_path` for something that is more likely to return paths into the
|
|
|
|
/// local host file system.
|
|
|
|
pub fn stable_name(&self) -> &Path {
|
|
|
|
match self {
|
|
|
|
RealFileName::Named(p)
|
|
|
|
| RealFileName::Devirtualized { local_path: _, virtual_name: p } => &p,
|
|
|
|
}
|
|
|
|
}
|
2020-05-29 10:31:55 -05:00
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Differentiates between real files and common virtual files.
|
2020-06-11 09:49:57 -05:00
|
|
|
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)]
|
|
|
|
#[derive(HashStable_Generic, Decodable, Encodable)]
|
2017-12-14 01:09:19 -06:00
|
|
|
pub enum FileName {
|
2020-05-29 10:31:55 -05:00
|
|
|
Real(RealFileName),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Call to `quote!`.
|
2018-10-30 09:11:24 -05:00
|
|
|
QuoteExpansion(u64),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Command line.
|
2018-10-30 09:11:24 -05:00
|
|
|
Anon(u64),
|
2020-02-29 11:16:26 -06:00
|
|
|
/// Hack in `src/librustc_ast/parse.rs`.
|
2018-11-26 20:59:49 -06:00
|
|
|
// FIXME(jseyfried)
|
2018-10-30 09:11:24 -05:00
|
|
|
MacroExpansion(u64),
|
|
|
|
ProcMacroSourceCode(u64),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Strings provided as `--cfg [cfgspec]` stored in a `crate_cfg`.
|
2018-10-30 09:11:24 -05:00
|
|
|
CfgSpec(u64),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Strings provided as crate attributes in the CLI.
|
2018-10-30 09:11:24 -05:00
|
|
|
CliCrateAttr(u64),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Custom sources for explicit parser calls from plugins and drivers.
|
2017-12-14 01:09:19 -06:00
|
|
|
Custom(String),
|
2018-12-04 14:18:03 -06:00
|
|
|
DocTest(PathBuf, isize),
|
2020-05-26 14:07:59 -05:00
|
|
|
/// Post-substitution inline assembly from LLVM
|
|
|
|
InlineAsm(u64),
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
impl std::fmt::Display for FileName {
|
2019-02-03 12:42:27 -06:00
|
|
|
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
use FileName::*;
|
2017-12-14 01:09:19 -06:00
|
|
|
match *self {
|
2020-05-29 10:31:55 -05:00
|
|
|
Real(RealFileName::Named(ref path)) => write!(fmt, "{}", path.display()),
|
|
|
|
// FIXME: might be nice to display both compoments of Devirtualized.
|
|
|
|
// But for now (to backport fix for issue #70924), best to not
|
|
|
|
// perturb diagnostics so its obvious test suite still works.
|
|
|
|
Real(RealFileName::Devirtualized { ref local_path, virtual_name: _ }) => {
|
|
|
|
write!(fmt, "{}", local_path.display())
|
|
|
|
}
|
2018-10-30 09:11:24 -05:00
|
|
|
QuoteExpansion(_) => write!(fmt, "<quote expansion>"),
|
|
|
|
MacroExpansion(_) => write!(fmt, "<macro expansion>"),
|
|
|
|
Anon(_) => write!(fmt, "<anon>"),
|
2019-12-22 16:42:04 -06:00
|
|
|
ProcMacroSourceCode(_) => write!(fmt, "<proc-macro source code>"),
|
2018-10-30 09:11:24 -05:00
|
|
|
CfgSpec(_) => write!(fmt, "<cfgspec>"),
|
|
|
|
CliCrateAttr(_) => write!(fmt, "<crate attribute>"),
|
2017-12-14 01:09:19 -06:00
|
|
|
Custom(ref s) => write!(fmt, "<{}>", s),
|
2018-12-04 14:18:03 -06:00
|
|
|
DocTest(ref path, _) => write!(fmt, "{}", path.display()),
|
2020-05-26 14:07:59 -05:00
|
|
|
InlineAsm(_) => write!(fmt, "<inline asm>"),
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<PathBuf> for FileName {
|
|
|
|
fn from(p: PathBuf) -> Self {
|
|
|
|
assert!(!p.to_string_lossy().ends_with('>'));
|
2020-05-29 10:31:55 -05:00
|
|
|
FileName::Real(RealFileName::Named(p))
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FileName {
|
|
|
|
pub fn is_real(&self) -> bool {
|
2019-02-03 12:42:27 -06:00
|
|
|
use FileName::*;
|
2017-12-14 01:09:19 -06:00
|
|
|
match *self {
|
|
|
|
Real(_) => true,
|
2019-11-13 06:01:43 -06:00
|
|
|
Anon(_)
|
2019-12-22 16:42:04 -06:00
|
|
|
| MacroExpansion(_)
|
|
|
|
| ProcMacroSourceCode(_)
|
|
|
|
| CfgSpec(_)
|
|
|
|
| CliCrateAttr(_)
|
|
|
|
| Custom(_)
|
|
|
|
| QuoteExpansion(_)
|
2020-05-26 14:07:59 -05:00
|
|
|
| DocTest(_, _)
|
|
|
|
| InlineAsm(_) => false,
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-30 09:11:24 -05:00
|
|
|
pub fn macro_expansion_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::MacroExpansion(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn anon_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::Anon(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn proc_macro_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::ProcMacroSourceCode(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn cfg_spec_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::QuoteExpansion(hasher.finish())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn cli_crate_attr_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::CliCrateAttr(hasher.finish())
|
|
|
|
}
|
2018-12-04 14:18:03 -06:00
|
|
|
|
2019-12-22 16:42:04 -06:00
|
|
|
pub fn doc_test_source_code(path: PathBuf, line: isize) -> FileName {
|
2018-12-04 14:18:03 -06:00
|
|
|
FileName::DocTest(path, line)
|
|
|
|
}
|
2020-05-26 14:07:59 -05:00
|
|
|
|
|
|
|
pub fn inline_asm_source_code(src: &str) -> FileName {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
src.hash(&mut hasher);
|
|
|
|
FileName::InlineAsm(hasher.finish())
|
|
|
|
}
|
2017-12-14 01:09:19 -06:00
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
/// Spans represent a region of code, used for error reporting. Positions in spans
|
2018-08-18 05:14:14 -05:00
|
|
|
/// are *absolute* positions from the beginning of the source_map, not positions
|
2018-11-26 20:59:49 -06:00
|
|
|
/// relative to `SourceFile`s. Methods on the `SourceMap` can be used to relate spans back
|
2016-06-21 17:08:13 -05:00
|
|
|
/// to the original source.
|
|
|
|
/// You must be careful if the span crosses more than one file - you will not be
|
2018-08-18 05:14:14 -05:00
|
|
|
/// able to use many of the functions on spans in source_map and you cannot assume
|
2018-11-26 20:59:49 -06:00
|
|
|
/// that the length of the `span = hi - lo`; there may be space in the `BytePos`
|
2016-06-21 17:08:13 -05:00
|
|
|
/// range between files.
|
2017-09-16 13:43:05 -05:00
|
|
|
///
|
|
|
|
/// `SpanData` is public because `Span` uses a thread-local interner and can't be
|
|
|
|
/// sent to other threads, but some pieces of performance infra run in a separate thread.
|
|
|
|
/// Using `Span` is generally preferred.
|
2016-10-26 01:24:09 -05:00
|
|
|
#[derive(Clone, Copy, Hash, PartialEq, Eq, Ord, PartialOrd)]
|
2017-09-16 13:43:05 -05:00
|
|
|
pub struct SpanData {
|
2017-08-29 17:41:05 -05:00
|
|
|
pub lo: BytePos,
|
|
|
|
pub hi: BytePos,
|
2016-06-21 17:08:13 -05:00
|
|
|
/// Information about where the macro came from, if this piece of
|
|
|
|
/// code was created by a macro expansion.
|
2017-08-29 17:41:05 -05:00
|
|
|
pub ctxt: SyntaxContext,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2017-10-21 09:21:02 -05:00
|
|
|
impl SpanData {
|
|
|
|
#[inline]
|
|
|
|
pub fn with_lo(&self, lo: BytePos) -> Span {
|
|
|
|
Span::new(lo, self.hi, self.ctxt)
|
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_hi(&self, hi: BytePos) -> Span {
|
|
|
|
Span::new(self.lo, hi, self.ctxt)
|
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_ctxt(&self, ctxt: SyntaxContext) -> Span {
|
|
|
|
Span::new(self.lo, self.hi, ctxt)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-03 07:37:23 -06:00
|
|
|
// The interner is pointed to by a thread local value which is only set on the main thread
|
2018-11-26 20:59:49 -06:00
|
|
|
// with parallelization is disabled. So we don't allow `Span` to transfer between threads
|
2017-12-03 07:37:23 -06:00
|
|
|
// to avoid panics and other errors, even though it would be memory safe to do so.
|
2019-01-28 08:51:47 -06:00
|
|
|
#[cfg(not(parallel_compiler))]
|
2017-09-16 13:43:05 -05:00
|
|
|
impl !Send for Span {}
|
2019-01-28 08:51:47 -06:00
|
|
|
#[cfg(not(parallel_compiler))]
|
2017-09-16 13:43:05 -05:00
|
|
|
impl !Sync for Span {}
|
|
|
|
|
|
|
|
impl PartialOrd for Span {
|
|
|
|
fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
|
|
|
|
PartialOrd::partial_cmp(&self.data(), &rhs.data())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
impl Ord for Span {
|
|
|
|
fn cmp(&self, rhs: &Self) -> Ordering {
|
|
|
|
Ord::cmp(&self.data(), &rhs.data())
|
|
|
|
}
|
|
|
|
}
|
2017-07-31 15:04:34 -05:00
|
|
|
|
2020-06-28 17:38:39 -05:00
|
|
|
/// A collection of `Span`s.
|
|
|
|
///
|
|
|
|
/// Spans have two orthogonal attributes:
|
2016-06-21 17:08:13 -05:00
|
|
|
///
|
2018-11-26 20:59:49 -06:00
|
|
|
/// - They can be *primary spans*. In this case they are the locus of
|
2016-06-21 17:08:13 -05:00
|
|
|
/// the error, and would be rendered with `^^^`.
|
2018-11-26 20:59:49 -06:00
|
|
|
/// - They can have a *label*. In this case, the label is written next
|
2016-06-21 17:08:13 -05:00
|
|
|
/// to the mark in the snippet when we render.
|
2020-06-11 09:49:57 -05:00
|
|
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, Encodable, Decodable)]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub struct MultiSpan {
|
|
|
|
primary_spans: Vec<Span>,
|
|
|
|
span_labels: Vec<(Span, String)>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Span {
|
2017-07-31 15:04:34 -05:00
|
|
|
#[inline]
|
|
|
|
pub fn lo(self) -> BytePos {
|
2017-09-16 13:43:05 -05:00
|
|
|
self.data().lo
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_lo(self, lo: BytePos) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
self.data().with_lo(lo)
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn hi(self) -> BytePos {
|
2017-09-16 13:43:05 -05:00
|
|
|
self.data().hi
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_hi(self, hi: BytePos) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
self.data().with_hi(hi)
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn ctxt(self) -> SyntaxContext {
|
2017-09-16 13:43:05 -05:00
|
|
|
self.data().ctxt
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
#[inline]
|
|
|
|
pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
self.data().with_ctxt(ctxt)
|
2017-07-31 15:04:34 -05:00
|
|
|
}
|
|
|
|
|
2018-06-24 17:00:21 -05:00
|
|
|
/// Returns `true` if this is a dummy span with any hygienic context.
|
|
|
|
#[inline]
|
|
|
|
pub fn is_dummy(self) -> bool {
|
|
|
|
let span = self.data();
|
|
|
|
span.lo.0 == 0 && span.hi.0 == 0
|
|
|
|
}
|
|
|
|
|
2019-08-10 17:08:30 -05:00
|
|
|
/// Returns `true` if this span comes from a macro or desugaring.
|
|
|
|
#[inline]
|
|
|
|
pub fn from_expansion(self) -> bool {
|
2019-08-10 17:44:55 -05:00
|
|
|
self.ctxt() != SyntaxContext::root()
|
|
|
|
}
|
|
|
|
|
2020-01-08 20:03:48 -06:00
|
|
|
/// Returns `true` if `span` originates in a derive-macro's expansion.
|
|
|
|
pub fn in_derive_expansion(self) -> bool {
|
|
|
|
matches!(self.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Derive, _))
|
|
|
|
}
|
|
|
|
|
2019-08-10 17:44:55 -05:00
|
|
|
#[inline]
|
|
|
|
pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span {
|
|
|
|
Span::new(lo, hi, SyntaxContext::root())
|
2019-08-10 17:08:30 -05:00
|
|
|
}
|
|
|
|
|
2018-02-04 06:19:14 -06:00
|
|
|
/// Returns a new span representing an empty span at the beginning of this span
|
|
|
|
#[inline]
|
2018-03-10 08:45:47 -06:00
|
|
|
pub fn shrink_to_lo(self) -> Span {
|
|
|
|
let span = self.data();
|
|
|
|
span.with_hi(span.lo)
|
|
|
|
}
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Returns a new span representing an empty span at the end of this span.
|
2018-03-10 08:45:47 -06:00
|
|
|
#[inline]
|
|
|
|
pub fn shrink_to_hi(self) -> Span {
|
|
|
|
let span = self.data();
|
|
|
|
span.with_lo(span.hi)
|
2018-02-04 06:19:14 -06:00
|
|
|
}
|
|
|
|
|
Adds two source span utility functions used in source-based coverage
`span.is_empty()` - returns true if `lo()` and `hi()` are equal. This is
not only a convenience, but makes it clear that a `Span` can be empty
(that is, retrieving the source for an empty `Span` will return an empty
string), and codifies the (otherwise undocumented--in the rustc_span
package, at least) fact that `Span` is a half-open interval (where
`hi()` is the open end).
`source_map.lookup_file_span()` - returns an enclosing `Span`
representing the start and end positions of the file enclosing the given
`BytePos`. This gives developers a clear way to quickly determine if any
any other `BytePos` or `Span` is also from the same file (for example,
by simply calling `file_span.contains(span)`).
This results in much simpler code and is much more runtime efficient
compared with the obvious alternative: calling `source_map.lookup_line()`
for any two `Span`'s byte positions, handle both arms of the `Result`
(both contain the file), and then compare files. It is also more
efficient than the non-public method `lookup_source_file_idx()` for each
`BytePos`, because, while comparing the internal source file indexes
would be efficient, looking up the source file index for every `BytePos`
or `Span` to be compared requires a binary search (worst case
performance being O(log n) for every lookup).
`source_map.lookup_file_span()` performs the binary search only once, to
get the `file_span` result that can be used to compare to any number of
other `BytePos` or `Span` values and those comparisons are always O(1).
2020-08-27 15:58:01 -05:00
|
|
|
#[inline]
|
|
|
|
/// Returns true if hi == lo
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
|
|
let span = self.data();
|
|
|
|
span.hi == span.lo
|
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
/// Returns `self` if `self` is not the dummy span, and `other` otherwise.
|
|
|
|
pub fn substitute_dummy(self, other: Span) -> Span {
|
2020-03-19 12:55:46 -05:00
|
|
|
if self.is_dummy() { other } else { self }
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if `self` fully encloses `other`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn contains(self, other: Span) -> bool {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let other = other.data();
|
|
|
|
span.lo <= other.lo && other.hi <= span.hi
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if `self` touches `other`.
|
2018-12-20 18:52:52 -06:00
|
|
|
pub fn overlaps(self, other: Span) -> bool {
|
|
|
|
let span = self.data();
|
|
|
|
let other = other.data();
|
|
|
|
span.lo < other.hi && other.lo < span.hi
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if the spans are equal with regards to the source text.
|
2016-06-21 17:08:13 -05:00
|
|
|
///
|
|
|
|
/// Use this instead of `==` when either span could be generated code,
|
|
|
|
/// and you only care that they point to the same bytes of source text.
|
|
|
|
pub fn source_equal(&self, other: &Span) -> bool {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let other = other.data();
|
|
|
|
span.lo == other.lo && span.hi == other.hi
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Returns `Some(span)`, where the start is trimmed by the end of `other`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn trim_start(self, other: Span) -> Option<Span> {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let other = other.data();
|
2020-03-19 12:55:46 -05:00
|
|
|
if span.hi > other.hi { Some(span.with_lo(cmp::max(span.lo, other.hi))) } else { None }
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
2017-03-16 23:04:41 -05:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns the source span -- this is either the supplied span, or the span for
|
2017-03-16 23:04:41 -05:00
|
|
|
/// the macro callsite that expanded to it.
|
|
|
|
pub fn source_callsite(self) -> Span {
|
2019-08-13 15:56:42 -05:00
|
|
|
let expn_data = self.ctxt().outer_expn_data();
|
2020-03-19 12:55:46 -05:00
|
|
|
if !expn_data.is_root() { expn_data.call_site.source_callsite() } else { self }
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
|
|
|
|
2018-04-26 17:28:34 -05:00
|
|
|
/// The `Span` for the tokens in the previous macro expansion from which `self` was generated,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// if any.
|
2018-04-21 17:15:54 -05:00
|
|
|
pub fn parent(self) -> Option<Span> {
|
2019-08-13 15:56:42 -05:00
|
|
|
let expn_data = self.ctxt().outer_expn_data();
|
2020-03-19 12:55:46 -05:00
|
|
|
if !expn_data.is_root() { Some(expn_data.call_site) } else { None }
|
2018-04-21 17:15:54 -05:00
|
|
|
}
|
|
|
|
|
2018-05-13 08:14:43 -05:00
|
|
|
/// Edition of the crate from which this span came.
|
|
|
|
pub fn edition(self) -> edition::Edition {
|
2019-08-13 15:56:42 -05:00
|
|
|
self.ctxt().outer_expn_data().edition
|
2018-05-13 08:14:43 -05:00
|
|
|
}
|
|
|
|
|
2018-11-17 18:25:59 -06:00
|
|
|
#[inline]
|
|
|
|
pub fn rust_2015(&self) -> bool {
|
|
|
|
self.edition() == edition::Edition::Edition2015
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn rust_2018(&self) -> bool {
|
|
|
|
self.edition() >= edition::Edition::Edition2018
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns the source callee.
|
2017-03-16 23:04:41 -05:00
|
|
|
///
|
2018-06-23 13:41:39 -05:00
|
|
|
/// Returns `None` if the supplied span has no expansion trace,
|
2019-08-13 15:56:42 -05:00
|
|
|
/// else returns the `ExpnData` for the macro definition
|
2017-03-16 23:04:41 -05:00
|
|
|
/// corresponding to the source callsite.
|
2019-08-13 15:56:42 -05:00
|
|
|
pub fn source_callee(self) -> Option<ExpnData> {
|
|
|
|
fn source_callee(expn_data: ExpnData) -> ExpnData {
|
|
|
|
let next_expn_data = expn_data.call_site.ctxt().outer_expn_data();
|
2020-03-19 12:55:46 -05:00
|
|
|
if !next_expn_data.is_root() { source_callee(next_expn_data) } else { expn_data }
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
2019-08-13 15:56:42 -05:00
|
|
|
let expn_data = self.ctxt().outer_expn_data();
|
2020-03-19 12:55:46 -05:00
|
|
|
if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None }
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Checks if a span is "internal" to a macro in which `#[unstable]`
|
2017-03-16 23:04:41 -05:00
|
|
|
/// items can be used (that is, a macro marked with
|
|
|
|
/// `#[allow_internal_unstable]`).
|
2019-05-07 22:21:18 -05:00
|
|
|
pub fn allows_unstable(&self, feature: Symbol) -> bool {
|
2019-08-13 15:56:42 -05:00
|
|
|
self.ctxt().outer_expn_data().allow_internal_unstable.map_or(false, |features| {
|
2019-12-22 16:42:04 -06:00
|
|
|
features
|
|
|
|
.iter()
|
|
|
|
.any(|&f| f == feature || f == sym::allow_internal_unstable_backcompat_hack)
|
2019-08-10 19:00:05 -05:00
|
|
|
})
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Checks if this span arises from a compiler desugaring of kind `kind`.
|
2019-06-18 17:08:45 -05:00
|
|
|
pub fn is_desugaring(&self, kind: DesugaringKind) -> bool {
|
2019-08-13 15:56:42 -05:00
|
|
|
match self.ctxt().outer_expn_data().kind {
|
2019-08-10 19:00:05 -05:00
|
|
|
ExpnKind::Desugaring(k) => k == kind,
|
|
|
|
_ => false,
|
2017-08-12 19:43:43 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns the compiler desugaring that created this span, or `None`
|
2017-08-30 16:12:34 -05:00
|
|
|
/// if this span is not from a desugaring.
|
2019-06-18 17:08:45 -05:00
|
|
|
pub fn desugaring_kind(&self) -> Option<DesugaringKind> {
|
2019-08-13 15:56:42 -05:00
|
|
|
match self.ctxt().outer_expn_data().kind {
|
2019-08-10 19:00:05 -05:00
|
|
|
ExpnKind::Desugaring(k) => Some(k),
|
2019-12-22 16:42:04 -06:00
|
|
|
_ => None,
|
2017-08-30 16:12:34 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Checks if a span is "internal" to a macro in which `unsafe`
|
2017-08-08 10:21:20 -05:00
|
|
|
/// can be used without triggering the `unsafe_code` lint
|
|
|
|
// (that is, a macro marked with `#[allow_internal_unsafe]`).
|
|
|
|
pub fn allows_unsafe(&self) -> bool {
|
2019-08-13 15:56:42 -05:00
|
|
|
self.ctxt().outer_expn_data().allow_internal_unsafe
|
2017-08-08 10:21:20 -05:00
|
|
|
}
|
|
|
|
|
2020-01-20 17:46:53 -06:00
|
|
|
pub fn macro_backtrace(mut self) -> impl Iterator<Item = ExpnData> {
|
2017-03-16 23:04:41 -05:00
|
|
|
let mut prev_span = DUMMY_SP;
|
2020-01-20 17:46:53 -06:00
|
|
|
std::iter::from_fn(move || {
|
|
|
|
loop {
|
|
|
|
let expn_data = self.ctxt().outer_expn_data();
|
|
|
|
if expn_data.is_root() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let is_recursive = expn_data.call_site.source_equal(&prev_span);
|
|
|
|
|
|
|
|
prev_span = self;
|
|
|
|
self = expn_data.call_site;
|
|
|
|
|
|
|
|
// Don't print recursive invocations.
|
|
|
|
if !is_recursive {
|
|
|
|
return Some(expn_data);
|
|
|
|
}
|
2019-08-10 19:00:05 -05:00
|
|
|
}
|
2020-01-20 17:46:53 -06:00
|
|
|
})
|
2017-03-16 23:04:41 -05:00
|
|
|
}
|
2017-03-14 19:22:48 -05:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns a `Span` that would enclose both `self` and `end`.
|
2020-09-13 07:48:15 -05:00
|
|
|
///
|
|
|
|
/// ```text
|
|
|
|
/// ____ ___
|
|
|
|
/// self lorem ipsum end
|
|
|
|
/// ^^^^^^^^^^^^^^^^^^^^
|
|
|
|
/// ```
|
2017-03-14 19:22:48 -05:00
|
|
|
pub fn to(self, end: Span) -> Span {
|
2018-02-01 13:51:49 -06:00
|
|
|
let span_data = self.data();
|
|
|
|
let end_data = end.data();
|
2018-11-26 20:59:49 -06:00
|
|
|
// FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480).
|
2018-02-01 13:51:49 -06:00
|
|
|
// Return the macro span on its own to avoid weird diagnostic output. It is preferable to
|
|
|
|
// have an incomplete span than a completely nonsensical one.
|
|
|
|
if span_data.ctxt != end_data.ctxt {
|
2019-08-10 17:44:55 -05:00
|
|
|
if span_data.ctxt == SyntaxContext::root() {
|
2018-02-01 13:51:49 -06:00
|
|
|
return end;
|
2019-08-10 17:44:55 -05:00
|
|
|
} else if end_data.ctxt == SyntaxContext::root() {
|
2018-02-01 13:51:49 -06:00
|
|
|
return self;
|
|
|
|
}
|
2018-11-26 20:59:49 -06:00
|
|
|
// Both spans fall within a macro.
|
|
|
|
// FIXME(estebank): check if it is the *same* macro.
|
2018-02-01 13:51:49 -06:00
|
|
|
}
|
2017-07-31 15:04:34 -05:00
|
|
|
Span::new(
|
2018-02-01 13:51:49 -06:00
|
|
|
cmp::min(span_data.lo, end_data.lo),
|
|
|
|
cmp::max(span_data.hi, end_data.hi),
|
2019-08-10 17:44:55 -05:00
|
|
|
if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt },
|
2017-07-31 15:04:34 -05:00
|
|
|
)
|
2017-03-14 19:22:48 -05:00
|
|
|
}
|
2017-04-03 06:58:48 -05:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns a `Span` between the end of `self` to the beginning of `end`.
|
2020-09-13 07:48:15 -05:00
|
|
|
///
|
|
|
|
/// ```text
|
|
|
|
/// ____ ___
|
|
|
|
/// self lorem ipsum end
|
|
|
|
/// ^^^^^^^^^^^^^
|
|
|
|
/// ```
|
2017-04-03 06:58:48 -05:00
|
|
|
pub fn between(self, end: Span) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let end = end.data();
|
2017-07-31 15:04:34 -05:00
|
|
|
Span::new(
|
2017-10-21 09:21:02 -05:00
|
|
|
span.hi,
|
|
|
|
end.lo,
|
2019-08-10 17:44:55 -05:00
|
|
|
if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
|
2017-07-31 15:04:34 -05:00
|
|
|
)
|
2017-04-03 06:58:48 -05:00
|
|
|
}
|
|
|
|
|
2020-09-13 07:48:15 -05:00
|
|
|
/// Returns a `Span` from the beginning of `self` until the beginning of `end`.
|
|
|
|
///
|
|
|
|
/// ```text
|
|
|
|
/// ____ ___
|
|
|
|
/// self lorem ipsum end
|
|
|
|
/// ^^^^^^^^^^^^^^^^^
|
|
|
|
/// ```
|
2017-04-03 06:58:48 -05:00
|
|
|
pub fn until(self, end: Span) -> Span {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
|
|
|
let end = end.data();
|
2017-07-31 15:04:34 -05:00
|
|
|
Span::new(
|
2017-10-21 09:21:02 -05:00
|
|
|
span.lo,
|
|
|
|
end.lo,
|
2019-08-10 17:44:55 -05:00
|
|
|
if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
|
2017-07-31 15:04:34 -05:00
|
|
|
)
|
2017-04-03 06:58:48 -05:00
|
|
|
}
|
2018-03-17 18:57:23 -05:00
|
|
|
|
2019-06-04 10:03:43 -05:00
|
|
|
pub fn from_inner(self, inner: InnerSpan) -> Span {
|
2018-05-10 11:09:58 -05:00
|
|
|
let span = self.data();
|
2019-12-22 16:42:04 -06:00
|
|
|
Span::new(
|
|
|
|
span.lo + BytePos::from_usize(inner.start),
|
|
|
|
span.lo + BytePos::from_usize(inner.end),
|
|
|
|
span.ctxt,
|
|
|
|
)
|
2018-05-10 11:09:58 -05:00
|
|
|
}
|
|
|
|
|
2019-08-28 04:41:29 -05:00
|
|
|
/// Equivalent of `Span::def_site` from the proc macro API,
|
|
|
|
/// except that the location is taken from the `self` span.
|
|
|
|
pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span {
|
|
|
|
self.with_ctxt_from_mark(expn_id, Transparency::Opaque)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Equivalent of `Span::call_site` from the proc macro API,
|
|
|
|
/// except that the location is taken from the `self` span.
|
|
|
|
pub fn with_call_site_ctxt(&self, expn_id: ExpnId) -> Span {
|
|
|
|
self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
|
|
|
|
}
|
|
|
|
|
2019-09-22 10:38:02 -05:00
|
|
|
/// Equivalent of `Span::mixed_site` from the proc macro API,
|
|
|
|
/// except that the location is taken from the `self` span.
|
|
|
|
pub fn with_mixed_site_ctxt(&self, expn_id: ExpnId) -> Span {
|
|
|
|
self.with_ctxt_from_mark(expn_id, Transparency::SemiTransparent)
|
|
|
|
}
|
|
|
|
|
2019-08-21 13:28:22 -05:00
|
|
|
/// Produces a span with the same location as `self` and context produced by a macro with the
|
|
|
|
/// given ID and transparency, assuming that macro was defined directly and not produced by
|
|
|
|
/// some other macro (which is the case for built-in and procedural macros).
|
|
|
|
pub fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
|
2019-08-22 17:31:01 -05:00
|
|
|
self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency))
|
2019-08-21 13:28:22 -05:00
|
|
|
}
|
|
|
|
|
2018-03-17 18:57:23 -05:00
|
|
|
#[inline]
|
2019-08-22 17:31:01 -05:00
|
|
|
pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
|
2018-03-17 18:57:23 -05:00
|
|
|
let span = self.data();
|
2019-08-22 17:31:01 -05:00
|
|
|
span.with_ctxt(span.ctxt.apply_mark(expn_id, transparency))
|
2018-03-17 18:57:23 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2019-07-15 17:04:05 -05:00
|
|
|
pub fn remove_mark(&mut self) -> ExpnId {
|
2018-03-17 18:57:23 -05:00
|
|
|
let mut span = self.data();
|
|
|
|
let mark = span.ctxt.remove_mark();
|
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2019-07-15 17:42:58 -05:00
|
|
|
pub fn adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
|
2018-03-17 18:57:23 -05:00
|
|
|
let mut span = self.data();
|
2019-07-15 17:42:58 -05:00
|
|
|
let mark = span.ctxt.adjust(expn_id);
|
2018-03-17 18:57:23 -05:00
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
2019-06-03 01:10:03 -05:00
|
|
|
#[inline]
|
2020-03-13 17:36:46 -05:00
|
|
|
pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
|
2019-06-03 01:10:03 -05:00
|
|
|
let mut span = self.data();
|
2020-03-13 17:36:46 -05:00
|
|
|
let mark = span.ctxt.normalize_to_macros_2_0_and_adjust(expn_id);
|
2019-06-03 01:10:03 -05:00
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
2018-03-17 18:57:23 -05:00
|
|
|
#[inline]
|
2019-07-15 17:42:58 -05:00
|
|
|
pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option<Option<ExpnId>> {
|
2018-03-17 18:57:23 -05:00
|
|
|
let mut span = self.data();
|
2019-07-15 17:42:58 -05:00
|
|
|
let mark = span.ctxt.glob_adjust(expn_id, glob_span);
|
2018-03-17 18:57:23 -05:00
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2019-12-22 16:42:04 -06:00
|
|
|
pub fn reverse_glob_adjust(
|
|
|
|
&mut self,
|
|
|
|
expn_id: ExpnId,
|
|
|
|
glob_span: Span,
|
|
|
|
) -> Option<Option<ExpnId>> {
|
2018-03-17 18:57:23 -05:00
|
|
|
let mut span = self.data();
|
2019-07-15 17:42:58 -05:00
|
|
|
let mark = span.ctxt.reverse_glob_adjust(expn_id, glob_span);
|
2018-03-17 18:57:23 -05:00
|
|
|
*self = Span::new(span.lo, span.hi, span.ctxt);
|
|
|
|
mark
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2020-03-13 17:36:46 -05:00
|
|
|
pub fn normalize_to_macros_2_0(self) -> Span {
|
2018-03-17 18:57:23 -05:00
|
|
|
let span = self.data();
|
2020-03-13 17:36:46 -05:00
|
|
|
span.with_ctxt(span.ctxt.normalize_to_macros_2_0())
|
2018-03-17 18:57:23 -05:00
|
|
|
}
|
2018-06-24 11:54:23 -05:00
|
|
|
|
|
|
|
#[inline]
|
2020-03-13 17:36:46 -05:00
|
|
|
pub fn normalize_to_macro_rules(self) -> Span {
|
2018-06-24 11:54:23 -05:00
|
|
|
let span = self.data();
|
2020-03-13 17:36:46 -05:00
|
|
|
span.with_ctxt(span.ctxt.normalize_to_macro_rules())
|
2018-06-24 11:54:23 -05:00
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct SpanLabel {
|
|
|
|
/// The span we are going to include in the final snippet.
|
|
|
|
pub span: Span,
|
|
|
|
|
|
|
|
/// Is this a primary span? This is the "locus" of the message,
|
|
|
|
/// and is indicated with a `^^^^` underline, versus `----`.
|
|
|
|
pub is_primary: bool,
|
|
|
|
|
|
|
|
/// What label should we attach to this span (if any)?
|
|
|
|
pub label: Option<String>,
|
|
|
|
}
|
|
|
|
|
2017-07-23 12:50:56 -05:00
|
|
|
impl Default for Span {
|
|
|
|
fn default() -> Self {
|
|
|
|
DUMMY_SP
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 09:49:57 -05:00
|
|
|
impl<E: Encoder> Encodable<E> for Span {
|
|
|
|
default fn encode(&self, s: &mut E) -> Result<(), E::Error> {
|
2017-10-21 09:21:02 -05:00
|
|
|
let span = self.data();
|
2016-06-21 17:08:13 -05:00
|
|
|
s.emit_struct("Span", 2, |s| {
|
2019-12-22 16:42:04 -06:00
|
|
|
s.emit_struct_field("lo", 0, |s| span.lo.encode(s))?;
|
|
|
|
s.emit_struct_field("hi", 1, |s| span.hi.encode(s))
|
2016-06-21 17:08:13 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2020-06-11 09:49:57 -05:00
|
|
|
impl<D: Decoder> Decodable<D> for Span {
|
|
|
|
default fn decode(s: &mut D) -> Result<Span, D::Error> {
|
|
|
|
s.read_struct("Span", 2, |d| {
|
2016-09-17 15:31:03 -05:00
|
|
|
let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
|
|
|
|
let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
|
2020-06-11 09:49:57 -05:00
|
|
|
|
|
|
|
Ok(Span::new(lo, hi, SyntaxContext::root()))
|
2016-09-17 15:31:03 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2020-05-26 12:21:58 -05:00
|
|
|
/// Calls the provided closure, using the provided `SourceMap` to format
|
|
|
|
/// any spans that are debug-printed during the closure'e exectuino.
|
|
|
|
///
|
|
|
|
/// Normally, the global `TyCtxt` is used to retrieve the `SourceMap`
|
|
|
|
/// (see `rustc_interface::callbacks::span_debug1). However, some parts
|
|
|
|
/// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before
|
|
|
|
/// a `TyCtxt` is available. In this case, we fall back to
|
|
|
|
/// the `SourceMap` provided to this function. If that is not available,
|
|
|
|
/// we fall back to printing the raw `Span` field values
|
|
|
|
pub fn with_source_map<T, F: FnOnce() -> T>(source_map: Lrc<SourceMap>, f: F) -> T {
|
2020-07-05 19:53:14 -05:00
|
|
|
SESSION_GLOBALS.with(|session_globals| {
|
|
|
|
*session_globals.source_map.borrow_mut() = Some(source_map);
|
2020-05-26 12:21:58 -05:00
|
|
|
});
|
|
|
|
struct ClearSourceMap;
|
|
|
|
impl Drop for ClearSourceMap {
|
|
|
|
fn drop(&mut self) {
|
2020-07-05 19:53:14 -05:00
|
|
|
SESSION_GLOBALS.with(|session_globals| {
|
|
|
|
session_globals.source_map.borrow_mut().take();
|
2020-05-26 12:21:58 -05:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let _guard = ClearSourceMap;
|
|
|
|
f()
|
|
|
|
}
|
|
|
|
|
2020-06-04 22:05:28 -05:00
|
|
|
pub fn debug_with_source_map(
|
|
|
|
span: Span,
|
|
|
|
f: &mut fmt::Formatter<'_>,
|
|
|
|
source_map: &SourceMap,
|
|
|
|
) -> fmt::Result {
|
|
|
|
write!(f, "{} ({:?})", source_map.span_to_string(span), span.ctxt())
|
|
|
|
}
|
|
|
|
|
2019-02-03 12:42:27 -06:00
|
|
|
pub fn default_span_debug(span: Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2020-07-05 19:53:14 -05:00
|
|
|
SESSION_GLOBALS.with(|session_globals| {
|
|
|
|
if let Some(source_map) = &*session_globals.source_map.borrow() {
|
2020-06-04 22:05:28 -05:00
|
|
|
debug_with_source_map(span, f, source_map)
|
2020-05-26 12:21:58 -05:00
|
|
|
} else {
|
|
|
|
f.debug_struct("Span")
|
|
|
|
.field("lo", &span.lo())
|
|
|
|
.field("hi", &span.hi())
|
|
|
|
.field("ctxt", &span.ctxt())
|
|
|
|
.finish()
|
|
|
|
}
|
|
|
|
})
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Debug for Span {
|
2019-02-03 12:42:27 -06:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2019-12-25 12:38:57 -06:00
|
|
|
(*SPAN_DEBUG)(*self, f)
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-16 13:43:05 -05:00
|
|
|
impl fmt::Debug for SpanData {
|
2019-02-03 12:42:27 -06:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2019-12-25 12:38:57 -06:00
|
|
|
(*SPAN_DEBUG)(Span::new(self.lo, self.hi, self.ctxt), f)
|
2017-09-16 13:43:05 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
impl MultiSpan {
|
2018-11-29 14:13:04 -06:00
|
|
|
#[inline]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn new() -> MultiSpan {
|
2019-12-22 16:42:04 -06:00
|
|
|
MultiSpan { primary_spans: vec![], span_labels: vec![] }
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2016-07-28 12:33:31 -05:00
|
|
|
pub fn from_span(primary_span: Span) -> MultiSpan {
|
2019-12-22 16:42:04 -06:00
|
|
|
MultiSpan { primary_spans: vec![primary_span], span_labels: vec![] }
|
2016-07-28 12:33:31 -05:00
|
|
|
}
|
|
|
|
|
2020-04-22 19:15:34 -05:00
|
|
|
pub fn from_spans(mut vec: Vec<Span>) -> MultiSpan {
|
|
|
|
vec.sort();
|
2019-12-22 16:42:04 -06:00
|
|
|
MultiSpan { primary_spans: vec, span_labels: vec![] }
|
2016-07-28 12:33:31 -05:00
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn push_span_label(&mut self, span: Span, label: String) {
|
|
|
|
self.span_labels.push((span, label));
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Selects the first primary span (if any).
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn primary_span(&self) -> Option<Span> {
|
|
|
|
self.primary_spans.first().cloned()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns all primary spans.
|
|
|
|
pub fn primary_spans(&self) -> &[Span] {
|
|
|
|
&self.primary_spans
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if any of the primary spans are displayable.
|
2018-11-25 15:05:06 -06:00
|
|
|
pub fn has_primary_spans(&self) -> bool {
|
2019-01-20 15:53:13 -06:00
|
|
|
self.primary_spans.iter().any(|sp| !sp.is_dummy())
|
2018-11-25 15:05:06 -06:00
|
|
|
}
|
|
|
|
|
2018-10-23 12:07:11 -05:00
|
|
|
/// Returns `true` if this contains only a dummy primary span with any hygienic context.
|
|
|
|
pub fn is_dummy(&self) -> bool {
|
|
|
|
let mut is_dummy = true;
|
|
|
|
for span in &self.primary_spans {
|
|
|
|
if !span.is_dummy() {
|
|
|
|
is_dummy = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
is_dummy
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Replaces all occurrences of one Span with another. Used to move `Span`s in areas that don't
|
2019-02-08 07:53:55 -06:00
|
|
|
/// display well (like std macros). Returns whether replacements occurred.
|
2016-08-17 09:20:04 -05:00
|
|
|
pub fn replace(&mut self, before: Span, after: Span) -> bool {
|
|
|
|
let mut replacements_occurred = false;
|
|
|
|
for primary_span in &mut self.primary_spans {
|
|
|
|
if *primary_span == before {
|
|
|
|
*primary_span = after;
|
|
|
|
replacements_occurred = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for span_label in &mut self.span_labels {
|
|
|
|
if span_label.0 == before {
|
|
|
|
span_label.0 = after;
|
|
|
|
replacements_occurred = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
replacements_occurred
|
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
/// Returns the strings to highlight. We always ensure that there
|
|
|
|
/// is an entry for each of the primary spans -- for each primary
|
2018-11-26 20:59:49 -06:00
|
|
|
/// span `P`, if there is at least one label with span `P`, we return
|
2016-06-21 17:08:13 -05:00
|
|
|
/// those labels (marked as primary). But otherwise we return
|
|
|
|
/// `SpanLabel` instances with empty labels.
|
|
|
|
pub fn span_labels(&self) -> Vec<SpanLabel> {
|
|
|
|
let is_primary = |span| self.primary_spans.contains(&span);
|
|
|
|
|
2019-12-22 16:42:04 -06:00
|
|
|
let mut span_labels = self
|
|
|
|
.span_labels
|
|
|
|
.iter()
|
|
|
|
.map(|&(span, ref label)| SpanLabel {
|
2017-08-07 00:54:09 -05:00
|
|
|
span,
|
2016-06-21 17:08:13 -05:00
|
|
|
is_primary: is_primary(span),
|
2019-12-22 16:42:04 -06:00
|
|
|
label: Some(label.clone()),
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
for &span in &self.primary_spans {
|
|
|
|
if !span_labels.iter().any(|sl| sl.span == span) {
|
2019-12-22 16:42:04 -06:00
|
|
|
span_labels.push(SpanLabel { span, is_primary: true, label: None });
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
span_labels
|
|
|
|
}
|
2018-11-25 15:05:06 -06:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns `true` if any of the span labels is displayable.
|
2018-11-25 15:05:06 -06:00
|
|
|
pub fn has_span_labels(&self) -> bool {
|
2019-01-20 15:53:13 -06:00
|
|
|
self.span_labels.iter().any(|(sp, _)| !sp.is_dummy())
|
2018-11-25 15:05:06 -06:00
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Span> for MultiSpan {
|
|
|
|
fn from(span: Span) -> MultiSpan {
|
2016-07-28 12:33:31 -05:00
|
|
|
MultiSpan::from_span(span)
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-09 04:07:31 -05:00
|
|
|
impl From<Vec<Span>> for MultiSpan {
|
|
|
|
fn from(spans: Vec<Span>) -> MultiSpan {
|
|
|
|
MultiSpan::from_spans(spans)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Identifies an offset of a multi-byte character in a `SourceFile`.
|
2020-06-11 09:49:57 -05:00
|
|
|
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub struct MultiByteChar {
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The absolute offset of the character in the `SourceMap`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub pos: BytePos,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The number of bytes, `>= 2`.
|
2018-06-26 08:37:09 -05:00
|
|
|
pub bytes: u8,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Identifies an offset of a non-narrow character in a `SourceFile`.
|
2020-06-11 09:49:57 -05:00
|
|
|
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)]
|
2017-11-01 20:25:54 -05:00
|
|
|
pub enum NonNarrowChar {
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Represents a zero-width character.
|
2017-11-01 20:25:54 -05:00
|
|
|
ZeroWidth(BytePos),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Represents a wide (full-width) character.
|
2017-11-01 20:25:54 -05:00
|
|
|
Wide(BytePos),
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Represents a tab character, represented visually with a width of 4 characters.
|
2017-11-13 00:06:00 -06:00
|
|
|
Tab(BytePos),
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl NonNarrowChar {
|
|
|
|
fn new(pos: BytePos, width: usize) -> Self {
|
|
|
|
match width {
|
|
|
|
0 => NonNarrowChar::ZeroWidth(pos),
|
|
|
|
2 => NonNarrowChar::Wide(pos),
|
2017-11-13 00:06:00 -06:00
|
|
|
4 => NonNarrowChar::Tab(pos),
|
2017-11-01 20:25:54 -05:00
|
|
|
_ => panic!("width {} given for non-narrow character", width),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Returns the absolute offset of the character in the `SourceMap`.
|
2017-11-01 20:25:54 -05:00
|
|
|
pub fn pos(&self) -> BytePos {
|
|
|
|
match *self {
|
2019-12-22 16:42:04 -06:00
|
|
|
NonNarrowChar::ZeroWidth(p) | NonNarrowChar::Wide(p) | NonNarrowChar::Tab(p) => p,
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Returns the width of the character, 0 (zero-width) or 2 (wide).
|
2017-11-01 20:25:54 -05:00
|
|
|
pub fn width(&self) -> usize {
|
|
|
|
match *self {
|
|
|
|
NonNarrowChar::ZeroWidth(_) => 0,
|
|
|
|
NonNarrowChar::Wide(_) => 2,
|
2017-11-13 00:06:00 -06:00
|
|
|
NonNarrowChar::Tab(_) => 4,
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Add<BytePos> for NonNarrowChar {
|
|
|
|
type Output = Self;
|
|
|
|
|
|
|
|
fn add(self, rhs: BytePos) -> Self {
|
|
|
|
match self {
|
|
|
|
NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs),
|
|
|
|
NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs),
|
2017-11-13 00:06:00 -06:00
|
|
|
NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos + rhs),
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Sub<BytePos> for NonNarrowChar {
|
|
|
|
type Output = Self;
|
|
|
|
|
|
|
|
fn sub(self, rhs: BytePos) -> Self {
|
|
|
|
match self {
|
|
|
|
NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs),
|
|
|
|
NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs),
|
2017-11-13 00:06:00 -06:00
|
|
|
NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos - rhs),
|
2017-11-01 20:25:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-02 19:55:31 -05:00
|
|
|
/// Identifies an offset of a character that was normalized away from `SourceFile`.
|
2020-06-11 09:49:57 -05:00
|
|
|
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)]
|
2019-10-02 19:55:31 -05:00
|
|
|
pub struct NormalizedPos {
|
|
|
|
/// The absolute offset of the character in the `SourceMap`.
|
|
|
|
pub pos: BytePos,
|
|
|
|
/// The difference between original and normalized string at position.
|
|
|
|
pub diff: u32,
|
|
|
|
}
|
|
|
|
|
2020-02-07 13:02:24 -06:00
|
|
|
#[derive(PartialEq, Eq, Clone, Debug)]
|
2017-06-10 14:08:32 -05:00
|
|
|
pub enum ExternalSource {
|
2020-02-07 13:02:24 -06:00
|
|
|
/// No external source has to be loaded, since the `SourceFile` represents a local crate.
|
|
|
|
Unneeded,
|
|
|
|
Foreign {
|
|
|
|
kind: ExternalSourceKind,
|
|
|
|
/// This SourceFile's byte-offset within the source_map of its original crate
|
|
|
|
original_start_pos: BytePos,
|
|
|
|
/// The end of this SourceFile within the source_map of its original crate
|
|
|
|
original_end_pos: BytePos,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
/// The state of the lazy external source loading mechanism of a `SourceFile`.
|
|
|
|
#[derive(PartialEq, Eq, Clone, Debug)]
|
|
|
|
pub enum ExternalSourceKind {
|
2017-06-11 03:19:46 -05:00
|
|
|
/// The external source has been loaded already.
|
2020-03-20 01:00:06 -05:00
|
|
|
Present(Lrc<String>),
|
2017-06-11 03:19:46 -05:00
|
|
|
/// No attempt has been made to load the external source.
|
2017-06-10 14:08:32 -05:00
|
|
|
AbsentOk,
|
2017-06-11 03:19:46 -05:00
|
|
|
/// A failed attempt has been made to load the external source.
|
2017-06-10 14:08:32 -05:00
|
|
|
AbsentErr,
|
|
|
|
Unneeded,
|
|
|
|
}
|
|
|
|
|
2017-06-11 03:19:46 -05:00
|
|
|
impl ExternalSource {
|
|
|
|
pub fn is_absent(&self) -> bool {
|
2020-02-07 13:02:24 -06:00
|
|
|
match self {
|
|
|
|
ExternalSource::Foreign { kind: ExternalSourceKind::Present(_), .. } => false,
|
2017-06-11 03:19:46 -05:00
|
|
|
_ => true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-20 01:00:06 -05:00
|
|
|
pub fn get_source(&self) -> Option<&Lrc<String>> {
|
2020-02-07 13:02:24 -06:00
|
|
|
match self {
|
|
|
|
ExternalSource::Foreign { kind: ExternalSourceKind::Present(ref src), .. } => Some(src),
|
2017-06-11 03:19:46 -05:00
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-17 03:35:26 -05:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct OffsetOverflowError;
|
|
|
|
|
2020-06-11 09:49:57 -05:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
|
2020-03-31 00:17:15 -05:00
|
|
|
pub enum SourceFileHashAlgorithm {
|
|
|
|
Md5,
|
|
|
|
Sha1,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FromStr for SourceFileHashAlgorithm {
|
|
|
|
type Err = ();
|
|
|
|
|
|
|
|
fn from_str(s: &str) -> Result<SourceFileHashAlgorithm, ()> {
|
|
|
|
match s {
|
|
|
|
"md5" => Ok(SourceFileHashAlgorithm::Md5),
|
|
|
|
"sha1" => Ok(SourceFileHashAlgorithm::Sha1),
|
|
|
|
_ => Err(()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
rustc_data_structures::impl_stable_hash_via_hash!(SourceFileHashAlgorithm);
|
|
|
|
|
|
|
|
/// The hash of the on-disk source file used for debug info.
|
2020-06-11 09:49:57 -05:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
|
|
|
#[derive(HashStable_Generic, Encodable, Decodable)]
|
2020-03-31 00:17:15 -05:00
|
|
|
pub struct SourceFileHash {
|
|
|
|
pub kind: SourceFileHashAlgorithm,
|
|
|
|
value: [u8; 20],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl SourceFileHash {
|
|
|
|
pub fn new(kind: SourceFileHashAlgorithm, src: &str) -> SourceFileHash {
|
|
|
|
let mut hash = SourceFileHash { kind, value: Default::default() };
|
|
|
|
let len = hash.hash_len();
|
|
|
|
let value = &mut hash.value[..len];
|
|
|
|
let data = src.as_bytes();
|
|
|
|
match kind {
|
|
|
|
SourceFileHashAlgorithm::Md5 => {
|
|
|
|
value.copy_from_slice(&Md5::digest(data));
|
|
|
|
}
|
|
|
|
SourceFileHashAlgorithm::Sha1 => {
|
|
|
|
value.copy_from_slice(&Sha1::digest(data));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
hash
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Check if the stored hash matches the hash of the string.
|
|
|
|
pub fn matches(&self, src: &str) -> bool {
|
|
|
|
Self::new(self.kind, src) == *self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// The bytes of the hash.
|
|
|
|
pub fn hash_bytes(&self) -> &[u8] {
|
|
|
|
let len = self.hash_len();
|
|
|
|
&self.value[..len]
|
|
|
|
}
|
|
|
|
|
|
|
|
fn hash_len(&self) -> usize {
|
|
|
|
match self.kind {
|
|
|
|
SourceFileHashAlgorithm::Md5 => 16,
|
|
|
|
SourceFileHashAlgorithm::Sha1 => 20,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// A single source in the `SourceMap`.
|
2017-04-24 12:01:19 -05:00
|
|
|
#[derive(Clone)]
|
2018-08-18 05:13:52 -05:00
|
|
|
pub struct SourceFile {
|
2019-09-29 19:06:00 -05:00
|
|
|
/// The name of the file that the source came from. Source that doesn't
|
2018-11-26 20:59:49 -06:00
|
|
|
/// originate from files has names between angle brackets by convention
|
|
|
|
/// (e.g., `<anon>`).
|
2016-06-21 17:08:13 -05:00
|
|
|
pub name: FileName,
|
2019-02-08 07:53:55 -06:00
|
|
|
/// `true` if the `name` field above has been modified by `--remap-path-prefix`.
|
2017-04-24 12:01:19 -05:00
|
|
|
pub name_was_remapped: bool,
|
2017-10-03 04:44:58 -05:00
|
|
|
/// The unmapped path of the file that the source came from.
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Set to `None` if the `SourceFile` was imported from an external crate.
|
2017-12-14 01:09:19 -06:00
|
|
|
pub unmapped_path: Option<FileName>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The complete source code.
|
2018-02-27 10:11:14 -06:00
|
|
|
pub src: Option<Lrc<String>>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The source code's hash.
|
2020-03-31 00:17:15 -05:00
|
|
|
pub src_hash: SourceFileHash,
|
2017-06-10 14:08:32 -05:00
|
|
|
/// The external source code (used for external crates, which will have a `None`
|
|
|
|
/// value as `self.src`.
|
2018-03-09 23:40:17 -06:00
|
|
|
pub external_src: Lock<ExternalSource>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The start position of this source in the `SourceMap`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub start_pos: BytePos,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The end position of this source in the `SourceMap`.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub end_pos: BytePos,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Locations of lines beginnings in the source code.
|
2018-05-23 08:59:42 -05:00
|
|
|
pub lines: Vec<BytePos>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Locations of multi-byte characters in the source code.
|
2018-05-23 08:59:42 -05:00
|
|
|
pub multibyte_chars: Vec<MultiByteChar>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Width of characters that are not narrow in the source code.
|
2018-05-23 08:59:42 -05:00
|
|
|
pub non_narrow_chars: Vec<NonNarrowChar>,
|
2019-10-02 19:55:31 -05:00
|
|
|
/// Locations of characters removed during normalization.
|
|
|
|
pub normalized_pos: Vec<NormalizedPos>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// A hash of the filename, used for speeding up hashing in incremental compilation.
|
2017-12-19 08:14:41 -06:00
|
|
|
pub name_hash: u128,
|
2020-02-07 13:02:24 -06:00
|
|
|
/// Indicates which crate this `SourceFile` was imported from.
|
|
|
|
pub cnum: CrateNum,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2020-06-11 09:49:57 -05:00
|
|
|
impl<S: Encoder> Encodable<S> for SourceFile {
|
|
|
|
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
|
2018-08-18 05:13:52 -05:00
|
|
|
s.emit_struct("SourceFile", 8, |s| {
|
2016-06-21 17:08:13 -05:00
|
|
|
s.emit_struct_field("name", 0, |s| self.name.encode(s))?;
|
2017-04-24 12:01:19 -05:00
|
|
|
s.emit_struct_field("name_was_remapped", 1, |s| self.name_was_remapped.encode(s))?;
|
2017-12-08 10:07:48 -06:00
|
|
|
s.emit_struct_field("src_hash", 2, |s| self.src_hash.encode(s))?;
|
2019-09-29 19:06:00 -05:00
|
|
|
s.emit_struct_field("start_pos", 3, |s| self.start_pos.encode(s))?;
|
|
|
|
s.emit_struct_field("end_pos", 4, |s| self.end_pos.encode(s))?;
|
|
|
|
s.emit_struct_field("lines", 5, |s| {
|
2018-05-23 08:59:42 -05:00
|
|
|
let lines = &self.lines[..];
|
2018-11-26 20:59:49 -06:00
|
|
|
// Store the length.
|
2016-06-21 17:08:13 -05:00
|
|
|
s.emit_u32(lines.len() as u32)?;
|
|
|
|
|
|
|
|
if !lines.is_empty() {
|
|
|
|
// In order to preserve some space, we exploit the fact that
|
|
|
|
// the lines list is sorted and individual lines are
|
|
|
|
// probably not that long. Because of that we can store lines
|
|
|
|
// as a difference list, using as little space as possible
|
|
|
|
// for the differences.
|
|
|
|
let max_line_length = if lines.len() == 1 {
|
|
|
|
0
|
|
|
|
} else {
|
2020-09-17 02:28:14 -05:00
|
|
|
lines
|
|
|
|
.array_windows()
|
|
|
|
.map(|&[fst, snd]| snd - fst)
|
|
|
|
.map(|bp| bp.to_usize())
|
|
|
|
.max()
|
|
|
|
.unwrap()
|
2016-06-21 17:08:13 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
let bytes_per_diff: u8 = match max_line_length {
|
2019-12-22 16:42:04 -06:00
|
|
|
0..=0xFF => 1,
|
|
|
|
0x100..=0xFFFF => 2,
|
|
|
|
_ => 4,
|
2016-06-21 17:08:13 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
// Encode the number of bytes used per diff.
|
|
|
|
bytes_per_diff.encode(s)?;
|
|
|
|
|
|
|
|
// Encode the first element.
|
|
|
|
lines[0].encode(s)?;
|
|
|
|
|
2020-09-17 02:28:14 -05:00
|
|
|
let diff_iter = lines[..].array_windows().map(|&[fst, snd]| snd - fst);
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
match bytes_per_diff {
|
2019-12-22 16:42:04 -06:00
|
|
|
1 => {
|
|
|
|
for diff in diff_iter {
|
|
|
|
(diff.0 as u8).encode(s)?
|
|
|
|
}
|
|
|
|
}
|
|
|
|
2 => {
|
|
|
|
for diff in diff_iter {
|
|
|
|
(diff.0 as u16).encode(s)?
|
|
|
|
}
|
|
|
|
}
|
|
|
|
4 => {
|
|
|
|
for diff in diff_iter {
|
|
|
|
diff.0.encode(s)?
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => unreachable!(),
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
})?;
|
2019-12-22 16:42:04 -06:00
|
|
|
s.emit_struct_field("multibyte_chars", 6, |s| self.multibyte_chars.encode(s))?;
|
|
|
|
s.emit_struct_field("non_narrow_chars", 7, |s| self.non_narrow_chars.encode(s))?;
|
|
|
|
s.emit_struct_field("name_hash", 8, |s| self.name_hash.encode(s))?;
|
2020-02-07 13:02:24 -06:00
|
|
|
s.emit_struct_field("normalized_pos", 9, |s| self.normalized_pos.encode(s))?;
|
|
|
|
s.emit_struct_field("cnum", 10, |s| self.cnum.encode(s))
|
2016-06-21 17:08:13 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 09:49:57 -05:00
|
|
|
impl<D: Decoder> Decodable<D> for SourceFile {
|
|
|
|
fn decode(d: &mut D) -> Result<SourceFile, D::Error> {
|
2018-08-18 05:13:52 -05:00
|
|
|
d.read_struct("SourceFile", 8, |d| {
|
2017-12-14 01:09:19 -06:00
|
|
|
let name: FileName = d.read_struct_field("name", 0, |d| Decodable::decode(d))?;
|
2017-04-24 12:01:19 -05:00
|
|
|
let name_was_remapped: bool =
|
|
|
|
d.read_struct_field("name_was_remapped", 1, |d| Decodable::decode(d))?;
|
2020-03-31 00:17:15 -05:00
|
|
|
let src_hash: SourceFileHash =
|
|
|
|
d.read_struct_field("src_hash", 2, |d| Decodable::decode(d))?;
|
2017-06-10 06:39:39 -05:00
|
|
|
let start_pos: BytePos =
|
2019-09-29 19:06:00 -05:00
|
|
|
d.read_struct_field("start_pos", 3, |d| Decodable::decode(d))?;
|
|
|
|
let end_pos: BytePos = d.read_struct_field("end_pos", 4, |d| Decodable::decode(d))?;
|
|
|
|
let lines: Vec<BytePos> = d.read_struct_field("lines", 5, |d| {
|
2016-06-21 17:08:13 -05:00
|
|
|
let num_lines: u32 = Decodable::decode(d)?;
|
|
|
|
let mut lines = Vec::with_capacity(num_lines as usize);
|
|
|
|
|
|
|
|
if num_lines > 0 {
|
|
|
|
// Read the number of bytes used per diff.
|
|
|
|
let bytes_per_diff: u8 = Decodable::decode(d)?;
|
|
|
|
|
|
|
|
// Read the first element.
|
|
|
|
let mut line_start: BytePos = Decodable::decode(d)?;
|
|
|
|
lines.push(line_start);
|
|
|
|
|
|
|
|
for _ in 1..num_lines {
|
|
|
|
let diff = match bytes_per_diff {
|
|
|
|
1 => d.read_u8()? as u32,
|
|
|
|
2 => d.read_u16()? as u32,
|
|
|
|
4 => d.read_u32()?,
|
2019-12-22 16:42:04 -06:00
|
|
|
_ => unreachable!(),
|
2016-06-21 17:08:13 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
line_start = line_start + BytePos(diff);
|
|
|
|
|
|
|
|
lines.push(line_start);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(lines)
|
|
|
|
})?;
|
|
|
|
let multibyte_chars: Vec<MultiByteChar> =
|
2019-09-29 19:06:00 -05:00
|
|
|
d.read_struct_field("multibyte_chars", 6, |d| Decodable::decode(d))?;
|
2017-11-01 20:25:54 -05:00
|
|
|
let non_narrow_chars: Vec<NonNarrowChar> =
|
2019-09-29 19:06:00 -05:00
|
|
|
d.read_struct_field("non_narrow_chars", 7, |d| Decodable::decode(d))?;
|
2019-12-22 16:42:04 -06:00
|
|
|
let name_hash: u128 = d.read_struct_field("name_hash", 8, |d| Decodable::decode(d))?;
|
2019-10-02 19:55:31 -05:00
|
|
|
let normalized_pos: Vec<NormalizedPos> =
|
|
|
|
d.read_struct_field("normalized_pos", 9, |d| Decodable::decode(d))?;
|
2020-02-07 13:02:24 -06:00
|
|
|
let cnum: CrateNum = d.read_struct_field("cnum", 10, |d| Decodable::decode(d))?;
|
2018-08-18 05:13:52 -05:00
|
|
|
Ok(SourceFile {
|
2017-08-07 00:54:09 -05:00
|
|
|
name,
|
|
|
|
name_was_remapped,
|
2017-10-03 04:44:58 -05:00
|
|
|
unmapped_path: None,
|
2017-08-07 00:54:09 -05:00
|
|
|
start_pos,
|
|
|
|
end_pos,
|
2016-06-21 17:08:13 -05:00
|
|
|
src: None,
|
2017-08-07 00:54:09 -05:00
|
|
|
src_hash,
|
2020-02-07 13:02:24 -06:00
|
|
|
// Unused - the metadata decoder will construct
|
|
|
|
// a new SourceFile, filling in `external_src` properly
|
|
|
|
external_src: Lock::new(ExternalSource::Unneeded),
|
2018-05-23 08:59:42 -05:00
|
|
|
lines,
|
|
|
|
multibyte_chars,
|
|
|
|
non_narrow_chars,
|
2019-10-02 19:55:31 -05:00
|
|
|
normalized_pos,
|
2017-12-19 08:14:41 -06:00
|
|
|
name_hash,
|
2020-02-07 13:02:24 -06:00
|
|
|
cnum,
|
2016-06-21 17:08:13 -05:00
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-18 05:13:52 -05:00
|
|
|
impl fmt::Debug for SourceFile {
|
2019-02-03 12:42:27 -06:00
|
|
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2018-08-18 05:13:52 -05:00
|
|
|
write!(fmt, "SourceFile({})", self.name)
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-18 05:13:52 -05:00
|
|
|
impl SourceFile {
|
2019-12-22 16:42:04 -06:00
|
|
|
pub fn new(
|
|
|
|
name: FileName,
|
|
|
|
name_was_remapped: bool,
|
|
|
|
unmapped_path: FileName,
|
|
|
|
mut src: String,
|
|
|
|
start_pos: BytePos,
|
2020-03-31 00:17:15 -05:00
|
|
|
hash_kind: SourceFileHashAlgorithm,
|
2020-02-18 11:24:36 -06:00
|
|
|
) -> Self {
|
2020-03-31 00:17:15 -05:00
|
|
|
// Compute the file hash before any normalization.
|
|
|
|
let src_hash = SourceFileHash::new(hash_kind, &src);
|
2019-10-02 19:55:31 -05:00
|
|
|
let normalized_pos = normalize_src(&mut src, start_pos);
|
2017-06-10 09:09:51 -05:00
|
|
|
|
2017-12-19 08:14:41 -06:00
|
|
|
let name_hash = {
|
2019-09-26 17:54:39 -05:00
|
|
|
let mut hasher: StableHasher = StableHasher::new();
|
2017-12-19 08:14:41 -06:00
|
|
|
name.hash(&mut hasher);
|
2019-09-26 17:54:39 -05:00
|
|
|
hasher.finish::<u128>()
|
2017-12-19 08:14:41 -06:00
|
|
|
};
|
2017-06-10 09:09:51 -05:00
|
|
|
let end_pos = start_pos.to_usize() + src.len();
|
2020-06-02 02:59:11 -05:00
|
|
|
assert!(end_pos <= u32::MAX as usize);
|
2017-06-10 09:09:51 -05:00
|
|
|
|
2018-05-23 08:59:42 -05:00
|
|
|
let (lines, multibyte_chars, non_narrow_chars) =
|
2018-08-18 05:13:56 -05:00
|
|
|
analyze_source_file::analyze_source_file(&src[..], start_pos);
|
2018-05-23 08:59:42 -05:00
|
|
|
|
2020-02-18 11:24:36 -06:00
|
|
|
SourceFile {
|
2017-08-07 00:54:09 -05:00
|
|
|
name,
|
|
|
|
name_was_remapped,
|
2017-10-03 04:44:58 -05:00
|
|
|
unmapped_path: Some(unmapped_path),
|
2018-02-27 10:11:14 -06:00
|
|
|
src: Some(Lrc::new(src)),
|
2017-08-07 00:54:09 -05:00
|
|
|
src_hash,
|
2018-03-09 23:40:17 -06:00
|
|
|
external_src: Lock::new(ExternalSource::Unneeded),
|
2017-08-07 00:54:09 -05:00
|
|
|
start_pos,
|
2017-06-10 09:09:51 -05:00
|
|
|
end_pos: Pos::from_usize(end_pos),
|
2018-05-23 08:59:42 -05:00
|
|
|
lines,
|
|
|
|
multibyte_chars,
|
|
|
|
non_narrow_chars,
|
2019-10-02 19:55:31 -05:00
|
|
|
normalized_pos,
|
2017-12-19 08:14:41 -06:00
|
|
|
name_hash,
|
2020-02-07 13:02:24 -06:00
|
|
|
cnum: LOCAL_CRATE,
|
2020-02-18 11:24:36 -06:00
|
|
|
}
|
2017-06-10 09:09:51 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Returns the `BytePos` of the beginning of the current line.
|
2018-06-28 03:45:57 -05:00
|
|
|
pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
|
|
|
|
let line_index = self.lookup_line(pos).unwrap();
|
|
|
|
self.lines[line_index]
|
2018-05-08 04:58:54 -05:00
|
|
|
}
|
|
|
|
|
2017-06-12 08:37:26 -05:00
|
|
|
/// Add externally loaded source.
|
|
|
|
/// If the hash of the input doesn't match or no input is supplied via None,
|
2017-06-11 06:48:54 -05:00
|
|
|
/// it is interpreted as an error and the corresponding enum variant is set.
|
2017-06-12 08:37:26 -05:00
|
|
|
/// The return value signifies whether some kind of source is present.
|
2017-08-01 06:17:11 -05:00
|
|
|
pub fn add_external_src<F>(&self, get_src: F) -> bool
|
2019-12-22 16:42:04 -06:00
|
|
|
where
|
|
|
|
F: FnOnce() -> Option<String>,
|
2017-08-01 06:17:11 -05:00
|
|
|
{
|
2020-02-07 13:02:24 -06:00
|
|
|
if matches!(
|
|
|
|
*self.external_src.borrow(),
|
|
|
|
ExternalSource::Foreign { kind: ExternalSourceKind::AbsentOk, .. }
|
|
|
|
) {
|
2017-08-01 06:17:11 -05:00
|
|
|
let src = get_src();
|
2017-06-12 08:37:26 -05:00
|
|
|
let mut external_src = self.external_src.borrow_mut();
|
2018-03-09 23:40:17 -06:00
|
|
|
// Check that no-one else have provided the source while we were getting it
|
2020-02-07 13:02:24 -06:00
|
|
|
if let ExternalSource::Foreign {
|
|
|
|
kind: src_kind @ ExternalSourceKind::AbsentOk, ..
|
|
|
|
} = &mut *external_src
|
|
|
|
{
|
2020-04-11 22:21:51 -05:00
|
|
|
if let Some(mut src) = src {
|
|
|
|
// The src_hash needs to be computed on the pre-normalized src.
|
2020-03-31 00:17:15 -05:00
|
|
|
if self.src_hash.matches(&src) {
|
2020-04-11 22:21:51 -05:00
|
|
|
normalize_src(&mut src, BytePos::from_usize(0));
|
2020-03-20 01:00:06 -05:00
|
|
|
*src_kind = ExternalSourceKind::Present(Lrc::new(src));
|
2018-03-09 23:40:17 -06:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
} else {
|
2020-02-07 13:02:24 -06:00
|
|
|
*src_kind = ExternalSourceKind::AbsentErr;
|
2017-06-12 08:37:26 -05:00
|
|
|
}
|
2018-03-09 23:40:17 -06:00
|
|
|
|
|
|
|
false
|
2017-06-12 08:37:26 -05:00
|
|
|
} else {
|
2018-03-09 23:40:17 -06:00
|
|
|
self.src.is_some() || external_src.get_source().is_some()
|
2017-06-11 06:48:54 -05:00
|
|
|
}
|
|
|
|
} else {
|
2017-06-12 08:37:26 -05:00
|
|
|
self.src.is_some() || self.external_src.borrow().get_source().is_some()
|
2017-06-11 06:48:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Gets a line from the list of pre-computed line-beginnings.
|
2017-06-12 08:37:26 -05:00
|
|
|
/// The line number here is 0-based.
|
2019-02-03 12:42:27 -06:00
|
|
|
pub fn get_line(&self, line_number: usize) -> Option<Cow<'_, str>> {
|
2017-06-11 06:31:40 -05:00
|
|
|
fn get_until_newline(src: &str, begin: usize) -> &str {
|
|
|
|
// We can't use `lines.get(line_number+1)` because we might
|
|
|
|
// be parsing when we call this function and thus the current
|
|
|
|
// line is the last one we have line info for.
|
|
|
|
let slice = &src[begin..];
|
|
|
|
match slice.find('\n') {
|
|
|
|
Some(e) => &slice[..e],
|
2019-12-22 16:42:04 -06:00
|
|
|
None => slice,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
2017-06-11 06:31:40 -05:00
|
|
|
}
|
|
|
|
|
2018-03-09 23:40:17 -06:00
|
|
|
let begin = {
|
2020-03-05 14:50:44 -06:00
|
|
|
let line = self.lines.get(line_number)?;
|
2018-03-09 23:40:17 -06:00
|
|
|
let begin: BytePos = *line - self.start_pos;
|
|
|
|
begin.to_usize()
|
2017-06-11 06:31:40 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
if let Some(ref src) = self.src {
|
|
|
|
Some(Cow::from(get_until_newline(src, begin)))
|
|
|
|
} else if let Some(src) = self.external_src.borrow().get_source() {
|
|
|
|
Some(Cow::Owned(String::from(get_until_newline(src, begin))))
|
|
|
|
} else {
|
|
|
|
None
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_real_file(&self) -> bool {
|
2017-12-14 01:09:19 -06:00
|
|
|
self.name.is_real()
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_imported(&self) -> bool {
|
2017-06-11 06:31:40 -05:00
|
|
|
self.src.is_none()
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2016-10-28 10:12:54 -05:00
|
|
|
pub fn byte_length(&self) -> u32 {
|
|
|
|
self.end_pos.0 - self.start_pos.0
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
pub fn count_lines(&self) -> usize {
|
2018-05-23 08:59:42 -05:00
|
|
|
self.lines.len()
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
2016-08-24 16:06:31 -05:00
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Finds the line containing the given position. The return value is the
|
2018-11-26 20:59:49 -06:00
|
|
|
/// index into the `lines` array of this `SourceFile`, not the 1-based line
|
2018-08-18 05:13:56 -05:00
|
|
|
/// number. If the source_file is empty or the position is located before the
|
2018-11-26 20:59:49 -06:00
|
|
|
/// first line, `None` is returned.
|
2016-08-24 16:06:31 -05:00
|
|
|
pub fn lookup_line(&self, pos: BytePos) -> Option<usize> {
|
2020-02-28 07:20:33 -06:00
|
|
|
if self.lines.is_empty() {
|
2016-08-24 16:06:31 -05:00
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:59:42 -05:00
|
|
|
let line_index = lookup_line(&self.lines[..], pos);
|
|
|
|
assert!(line_index < self.lines.len() as isize);
|
2020-03-19 12:55:46 -05:00
|
|
|
if line_index >= 0 { Some(line_index as usize) } else { None }
|
2016-08-24 16:06:31 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn line_bounds(&self, line_index: usize) -> (BytePos, BytePos) {
|
|
|
|
if self.start_pos == self.end_pos {
|
|
|
|
return (self.start_pos, self.end_pos);
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:59:42 -05:00
|
|
|
assert!(line_index < self.lines.len());
|
|
|
|
if line_index == (self.lines.len() - 1) {
|
|
|
|
(self.lines[line_index], self.end_pos)
|
2016-08-24 16:06:31 -05:00
|
|
|
} else {
|
2018-05-23 08:59:42 -05:00
|
|
|
(self.lines[line_index], self.lines[line_index + 1])
|
2016-08-24 16:06:31 -05:00
|
|
|
}
|
|
|
|
}
|
2017-11-24 07:00:33 -06:00
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn contains(&self, byte_pos: BytePos) -> bool {
|
|
|
|
byte_pos >= self.start_pos && byte_pos <= self.end_pos
|
|
|
|
}
|
2019-10-02 19:55:31 -05:00
|
|
|
|
|
|
|
/// Calculates the original byte position relative to the start of the file
|
|
|
|
/// based on the given byte position.
|
|
|
|
pub fn original_relative_byte_pos(&self, pos: BytePos) -> BytePos {
|
|
|
|
// Diff before any records is 0. Otherwise use the previously recorded
|
|
|
|
// diff as that applies to the following characters until a new diff
|
|
|
|
// is recorded.
|
2019-12-22 16:42:04 -06:00
|
|
|
let diff = match self.normalized_pos.binary_search_by(|np| np.pos.cmp(&pos)) {
|
2019-10-02 19:55:31 -05:00
|
|
|
Ok(i) => self.normalized_pos[i].diff,
|
|
|
|
Err(i) if i == 0 => 0,
|
2019-12-22 16:42:04 -06:00
|
|
|
Err(i) => self.normalized_pos[i - 1].diff,
|
2019-10-02 19:55:31 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
BytePos::from_u32(pos.0 - self.start_pos.0 + diff)
|
|
|
|
}
|
Updates to experimental coverage counter injection
This is a combination of 18 commits.
Commit #2:
Additional examples and some small improvements.
Commit #3:
fixed mir-opt non-mir extensions and spanview title elements
Corrected a fairly recent assumption in runtest.rs that all MIR dump
files end in .mir. (It was appending .mir to the graphviz .dot and
spanview .html file names when generating blessed output files. That
also left outdated files in the baseline alongside the files with the
incorrect names, which I've now removed.)
Updated spanview HTML title elements to match their content, replacing a
hardcoded and incorrect name that was left in accidentally when
originally submitted.
Commit #4:
added more test examples
also improved Makefiles with support for non-zero exit status and to
force validation of tests unless a specific test overrides it with a
specific comment.
Commit #5:
Fixed rare issues after testing on real-world crate
Commit #6:
Addressed PR feedback, and removed temporary -Zexperimental-coverage
-Zinstrument-coverage once again supports the latest capabilities of
LLVM instrprof coverage instrumentation.
Also fixed a bug in spanview.
Commit #7:
Fix closure handling, add tests for closures and inner items
And cleaned up other tests for consistency, and to make it more clear
where spans start/end by breaking up lines.
Commit #8:
renamed "typical" test results "expected"
Now that the `llvm-cov show` tests are improved to normally expect
matching actuals, and to allow individual tests to override that
expectation.
Commit #9:
test coverage of inline generic struct function
Commit #10:
Addressed review feedback
* Removed unnecessary Unreachable filter.
* Replaced a match wildcard with remining variants.
* Added more comments to help clarify the role of successors() in the
CFG traversal
Commit #11:
refactoring based on feedback
* refactored `fn coverage_spans()`.
* changed the way I expand an empty coverage span to improve performance
* fixed a typo that I had accidently left in, in visit.rs
Commit #12:
Optimized use of SourceMap and SourceFile
Commit #13:
Fixed a regression, and synched with upstream
Some generated test file names changed due to some new change upstream.
Commit #14:
Stripping out crate disambiguators from demangled names
These can vary depending on the test platform.
Commit #15:
Ignore llvm-cov show diff on test with generics, expand IO error message
Tests with generics produce llvm-cov show results with demangled names
that can include an unstable "crate disambiguator" (hex value). The
value changes when run in the Rust CI Windows environment. I added a sed
filter to strip them out (in a prior commit), but sed also appears to
fail in the same environment. Until I can figure out a workaround, I'm
just going to ignore this specific test result. I added a FIXME to
follow up later, but it's not that critical.
I also saw an error with Windows GNU, but the IO error did not
specify a path for the directory or file that triggered the error. I
updated the error messages to provide more info for next, time but also
noticed some other tests with similar steps did not fail. Looks
spurious.
Commit #16:
Modify rust-demangler to strip disambiguators by default
Commit #17:
Remove std::process::exit from coverage tests
Due to Issue #77553, programs that call std::process::exit() do not
generate coverage results on Windows MSVC.
Commit #18:
fix: test file paths exceeding Windows max path len
2020-09-01 18:15:17 -05:00
|
|
|
|
|
|
|
/// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`.
|
|
|
|
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
|
|
|
|
// The number of extra bytes due to multibyte chars in the `SourceFile`.
|
|
|
|
let mut total_extra_bytes = 0;
|
|
|
|
|
|
|
|
for mbc in self.multibyte_chars.iter() {
|
|
|
|
debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
|
|
|
|
if mbc.pos < bpos {
|
|
|
|
// Every character is at least one byte, so we only
|
|
|
|
// count the actual extra bytes.
|
|
|
|
total_extra_bytes += mbc.bytes as u32 - 1;
|
|
|
|
// We should never see a byte position in the middle of a
|
|
|
|
// character.
|
|
|
|
assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
assert!(self.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32());
|
|
|
|
CharPos(bpos.to_usize() - self.start_pos.to_usize() - total_extra_bytes as usize)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Looks up the file's (1-based) line number and (0-based `CharPos`) column offset, for a
|
|
|
|
/// given `BytePos`.
|
|
|
|
pub fn lookup_file_pos(&self, pos: BytePos) -> (usize, CharPos) {
|
|
|
|
let chpos = self.bytepos_to_file_charpos(pos);
|
|
|
|
match self.lookup_line(pos) {
|
|
|
|
Some(a) => {
|
|
|
|
let line = a + 1; // Line numbers start at 1
|
|
|
|
let linebpos = self.lines[a];
|
|
|
|
let linechpos = self.bytepos_to_file_charpos(linebpos);
|
|
|
|
let col = chpos - linechpos;
|
|
|
|
debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos);
|
|
|
|
debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos);
|
|
|
|
debug!("byte is on line: {}", line);
|
|
|
|
assert!(chpos >= linechpos);
|
|
|
|
(line, col)
|
|
|
|
}
|
|
|
|
None => (0, chpos),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Looks up the file's (1-based) line number, (0-based `CharPos`) column offset, and (0-based)
|
|
|
|
/// column offset when displayed, for a given `BytePos`.
|
|
|
|
pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) {
|
|
|
|
let (line, col_or_chpos) = self.lookup_file_pos(pos);
|
|
|
|
if line > 0 {
|
|
|
|
let col = col_or_chpos;
|
|
|
|
let linebpos = self.lines[line - 1];
|
|
|
|
let col_display = {
|
|
|
|
let start_width_idx = self
|
|
|
|
.non_narrow_chars
|
|
|
|
.binary_search_by_key(&linebpos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
|
|
|
let end_width_idx = self
|
|
|
|
.non_narrow_chars
|
|
|
|
.binary_search_by_key(&pos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
|
|
|
let special_chars = end_width_idx - start_width_idx;
|
|
|
|
let non_narrow: usize = self.non_narrow_chars[start_width_idx..end_width_idx]
|
|
|
|
.iter()
|
|
|
|
.map(|x| x.width())
|
|
|
|
.sum();
|
|
|
|
col.0 - special_chars + non_narrow
|
|
|
|
};
|
|
|
|
(line, col, col_display)
|
|
|
|
} else {
|
|
|
|
let chpos = col_or_chpos;
|
|
|
|
let col_display = {
|
|
|
|
let end_width_idx = self
|
|
|
|
.non_narrow_chars
|
|
|
|
.binary_search_by_key(&pos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
|
|
|
let non_narrow: usize =
|
|
|
|
self.non_narrow_chars[0..end_width_idx].iter().map(|x| x.width()).sum();
|
|
|
|
chpos.0 - end_width_idx + non_narrow
|
|
|
|
};
|
|
|
|
(0, chpos, col_display)
|
|
|
|
}
|
|
|
|
}
|
2019-10-02 19:55:31 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Normalizes the source code and records the normalizations.
|
|
|
|
fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec<NormalizedPos> {
|
|
|
|
let mut normalized_pos = vec![];
|
|
|
|
remove_bom(src, &mut normalized_pos);
|
|
|
|
normalize_newlines(src, &mut normalized_pos);
|
|
|
|
|
|
|
|
// Offset all the positions by start_pos to match the final file positions.
|
|
|
|
for np in &mut normalized_pos {
|
|
|
|
np.pos.0 += start_pos.0;
|
|
|
|
}
|
|
|
|
|
|
|
|
normalized_pos
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-02-08 07:53:55 -06:00
|
|
|
/// Removes UTF-8 BOM, if any.
|
2019-10-02 19:55:31 -05:00
|
|
|
fn remove_bom(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
|
2017-06-10 09:09:51 -05:00
|
|
|
if src.starts_with("\u{feff}") {
|
|
|
|
src.drain(..3);
|
2019-10-02 19:55:31 -05:00
|
|
|
normalized_pos.push(NormalizedPos { pos: BytePos(0), diff: 3 });
|
2017-06-10 09:09:51 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-14 07:35:12 -05:00
|
|
|
/// Replaces `\r\n` with `\n` in-place in `src`.
|
|
|
|
///
|
|
|
|
/// Returns error if there's a lone `\r` in the string
|
2019-10-02 19:55:31 -05:00
|
|
|
fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
|
2019-08-14 07:35:12 -05:00
|
|
|
if !src.as_bytes().contains(&b'\r') {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding.
|
|
|
|
// While we *can* call `as_mut_vec` and do surgery on the live string
|
|
|
|
// directly, let's rather steal the contents of `src`. This makes the code
|
|
|
|
// safe even if a panic occurs.
|
|
|
|
|
|
|
|
let mut buf = std::mem::replace(src, String::new()).into_bytes();
|
|
|
|
let mut gap_len = 0;
|
|
|
|
let mut tail = buf.as_mut_slice();
|
2019-10-02 19:55:31 -05:00
|
|
|
let mut cursor = 0;
|
|
|
|
let original_gap = normalized_pos.last().map_or(0, |l| l.diff);
|
2019-08-14 07:35:12 -05:00
|
|
|
loop {
|
|
|
|
let idx = match find_crlf(&tail[gap_len..]) {
|
|
|
|
None => tail.len(),
|
|
|
|
Some(idx) => idx + gap_len,
|
|
|
|
};
|
|
|
|
tail.copy_within(gap_len..idx, 0);
|
|
|
|
tail = &mut tail[idx - gap_len..];
|
|
|
|
if tail.len() == gap_len {
|
|
|
|
break;
|
|
|
|
}
|
2019-10-02 19:55:31 -05:00
|
|
|
cursor += idx - gap_len;
|
2019-08-14 07:35:12 -05:00
|
|
|
gap_len += 1;
|
2019-10-02 19:55:31 -05:00
|
|
|
normalized_pos.push(NormalizedPos {
|
|
|
|
pos: BytePos::from_usize(cursor + 1),
|
|
|
|
diff: original_gap + gap_len as u32,
|
|
|
|
});
|
2019-08-14 07:35:12 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Account for removed `\r`.
|
|
|
|
// After `set_len`, `buf` is guaranteed to contain utf-8 again.
|
|
|
|
let new_len = buf.len() - gap_len;
|
|
|
|
unsafe {
|
|
|
|
buf.set_len(new_len);
|
|
|
|
*src = String::from_utf8_unchecked(buf);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn find_crlf(src: &[u8]) -> Option<usize> {
|
|
|
|
let mut search_idx = 0;
|
|
|
|
while let Some(idx) = find_cr(&src[search_idx..]) {
|
|
|
|
if src[search_idx..].get(idx + 1) != Some(&b'\n') {
|
|
|
|
search_idx += idx + 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
return Some(search_idx + idx);
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
|
|
|
fn find_cr(src: &[u8]) -> Option<usize> {
|
|
|
|
src.iter().position(|&b| b == b'\r')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-21 17:08:13 -05:00
|
|
|
// _____________________________________________________________________________
|
|
|
|
// Pos, BytePos, CharPos
|
|
|
|
//
|
|
|
|
|
|
|
|
pub trait Pos {
|
|
|
|
fn from_usize(n: usize) -> Self;
|
|
|
|
fn to_usize(&self) -> usize;
|
2018-05-29 10:49:35 -05:00
|
|
|
fn from_u32(n: u32) -> Self;
|
|
|
|
fn to_u32(&self) -> u32;
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
macro_rules! impl_pos {
|
|
|
|
(
|
|
|
|
$(
|
|
|
|
$(#[$attr:meta])*
|
|
|
|
$vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty);
|
|
|
|
)*
|
|
|
|
) => {
|
|
|
|
$(
|
|
|
|
$(#[$attr])*
|
|
|
|
$vis struct $ident($inner_vis $inner_ty);
|
|
|
|
|
|
|
|
impl Pos for $ident {
|
|
|
|
#[inline(always)]
|
|
|
|
fn from_usize(n: usize) -> $ident {
|
|
|
|
$ident(n as $inner_ty)
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
#[inline(always)]
|
|
|
|
fn to_usize(&self) -> usize {
|
|
|
|
self.0 as usize
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
#[inline(always)]
|
|
|
|
fn from_u32(n: u32) -> $ident {
|
|
|
|
$ident(n as $inner_ty)
|
|
|
|
}
|
2018-05-02 18:12:26 -05:00
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
#[inline(always)]
|
|
|
|
fn to_u32(&self) -> u32 {
|
|
|
|
self.0 as u32
|
|
|
|
}
|
|
|
|
}
|
2018-05-29 10:49:35 -05:00
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
impl Add for $ident {
|
|
|
|
type Output = $ident;
|
2018-05-29 10:49:35 -05:00
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
#[inline(always)]
|
|
|
|
fn add(self, rhs: $ident) -> $ident {
|
2020-09-21 13:42:43 -05:00
|
|
|
$ident(self.0 + rhs.0)
|
2020-09-21 12:27:43 -05:00
|
|
|
}
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
impl Sub for $ident {
|
|
|
|
type Output = $ident;
|
2016-06-21 17:08:13 -05:00
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
#[inline(always)]
|
|
|
|
fn sub(self, rhs: $ident) -> $ident {
|
2020-09-21 13:42:43 -05:00
|
|
|
$ident(self.0 - rhs.0)
|
2020-09-21 12:27:43 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
)*
|
|
|
|
};
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2020-09-21 12:27:43 -05:00
|
|
|
impl_pos! {
|
|
|
|
/// A byte offset. Keep this small (currently 32-bits), as AST contains
|
|
|
|
/// a lot of them.
|
|
|
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
|
|
|
|
pub struct BytePos(pub u32);
|
|
|
|
|
|
|
|
/// A character offset. Because of multibyte UTF-8 characters, a byte offset
|
|
|
|
/// is not equivalent to a character offset. The `SourceMap` will convert `BytePos`
|
|
|
|
/// values to `CharPos` values as necessary.
|
|
|
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
|
|
|
pub struct CharPos(pub usize);
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2020-06-11 09:49:57 -05:00
|
|
|
impl<S: rustc_serialize::Encoder> Encodable<S> for BytePos {
|
|
|
|
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
|
2016-06-21 17:08:13 -05:00
|
|
|
s.emit_u32(self.0)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 09:49:57 -05:00
|
|
|
impl<D: rustc_serialize::Decoder> Decodable<D> for BytePos {
|
|
|
|
fn decode(d: &mut D) -> Result<BytePos, D::Error> {
|
2016-06-21 17:08:13 -05:00
|
|
|
Ok(BytePos(d.read_u32()?))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// _____________________________________________________________________________
|
2019-04-05 14:42:40 -05:00
|
|
|
// Loc, SourceFileAndLine, SourceFileAndBytePos
|
2016-06-21 17:08:13 -05:00
|
|
|
//
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
/// A source code location used for error reporting.
|
2016-07-11 15:02:03 -05:00
|
|
|
#[derive(Debug, Clone)]
|
2016-06-21 17:08:13 -05:00
|
|
|
pub struct Loc {
|
2018-11-26 20:59:49 -06:00
|
|
|
/// Information about the original source.
|
2018-08-18 05:13:52 -05:00
|
|
|
pub file: Lrc<SourceFile>,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The (1-based) line number.
|
2016-06-21 17:08:13 -05:00
|
|
|
pub line: usize,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The (0-based) column offset.
|
2017-11-01 20:25:54 -05:00
|
|
|
pub col: CharPos,
|
2018-11-26 20:59:49 -06:00
|
|
|
/// The (0-based) column offset when displayed.
|
2017-11-01 20:25:54 -05:00
|
|
|
pub col_display: usize,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2018-11-26 20:59:49 -06:00
|
|
|
// Used to be structural records.
|
2016-06-21 17:08:13 -05:00
|
|
|
#[derive(Debug)]
|
2019-12-22 16:42:04 -06:00
|
|
|
pub struct SourceFileAndLine {
|
|
|
|
pub sf: Lrc<SourceFile>,
|
|
|
|
pub line: usize,
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
#[derive(Debug)]
|
2019-12-22 16:42:04 -06:00
|
|
|
pub struct SourceFileAndBytePos {
|
|
|
|
pub sf: Lrc<SourceFile>,
|
|
|
|
pub pos: BytePos,
|
|
|
|
}
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
|
|
|
pub struct LineInfo {
|
|
|
|
/// Index of line, starting from 0.
|
|
|
|
pub line_index: usize,
|
|
|
|
|
|
|
|
/// Column in line where span begins, starting from 0.
|
|
|
|
pub start_col: CharPos,
|
|
|
|
|
|
|
|
/// Column in line where span ends, starting from 0, exclusive.
|
|
|
|
pub end_col: CharPos,
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct FileLines {
|
2018-08-18 05:13:52 -05:00
|
|
|
pub file: Lrc<SourceFile>,
|
2019-12-22 16:42:04 -06:00
|
|
|
pub lines: Vec<LineInfo>,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-12-25 12:38:57 -06:00
|
|
|
pub static SPAN_DEBUG: AtomicRef<fn(Span, &mut fmt::Formatter<'_>) -> fmt::Result> =
|
|
|
|
AtomicRef::new(&(default_span_debug as fn(_, &mut fmt::Formatter<'_>) -> _));
|
2016-06-21 17:08:13 -05:00
|
|
|
|
|
|
|
// _____________________________________________________________________________
|
2018-10-29 15:26:13 -05:00
|
|
|
// SpanLinesError, SpanSnippetError, DistinctSources, MalformedSourceMapPositions
|
2016-06-21 17:08:13 -05:00
|
|
|
//
|
|
|
|
|
|
|
|
pub type FileLinesResult = Result<FileLines, SpanLinesError>;
|
|
|
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
|
|
pub enum SpanLinesError {
|
|
|
|
DistinctSources(DistinctSources),
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
|
|
pub enum SpanSnippetError {
|
|
|
|
IllFormedSpan(Span),
|
|
|
|
DistinctSources(DistinctSources),
|
2018-10-29 15:26:13 -05:00
|
|
|
MalformedForSourcemap(MalformedSourceMapPositions),
|
2019-12-22 16:42:04 -06:00
|
|
|
SourceNotAvailable { filename: FileName },
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
|
|
pub struct DistinctSources {
|
2017-12-14 01:09:19 -06:00
|
|
|
pub begin: (FileName, BytePos),
|
2019-12-22 16:42:04 -06:00
|
|
|
pub end: (FileName, BytePos),
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
2018-10-29 15:26:13 -05:00
|
|
|
pub struct MalformedSourceMapPositions {
|
2017-12-14 01:09:19 -06:00
|
|
|
pub name: FileName,
|
2016-06-21 17:08:13 -05:00
|
|
|
pub source_len: usize,
|
|
|
|
pub begin_pos: BytePos,
|
2019-12-22 16:42:04 -06:00
|
|
|
pub end_pos: BytePos,
|
2016-06-21 17:08:13 -05:00
|
|
|
}
|
|
|
|
|
2019-07-29 23:43:54 -05:00
|
|
|
/// Range inside of a `Span` used for diagnostics when we only have access to relative positions.
|
2019-06-04 10:03:43 -05:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
|
|
|
pub struct InnerSpan {
|
|
|
|
pub start: usize,
|
|
|
|
pub end: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl InnerSpan {
|
|
|
|
pub fn new(start: usize, end: usize) -> InnerSpan {
|
|
|
|
InnerSpan { start, end }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-24 16:06:31 -05:00
|
|
|
// Given a slice of line start positions and a position, returns the index of
|
|
|
|
// the line the position is on. Returns -1 if the position is located before
|
|
|
|
// the first line.
|
|
|
|
fn lookup_line(lines: &[BytePos], pos: BytePos) -> isize {
|
|
|
|
match lines.binary_search(&pos) {
|
|
|
|
Ok(line) => line as isize,
|
2019-12-22 16:42:04 -06:00
|
|
|
Err(line) => line as isize - 1,
|
2016-08-24 16:06:31 -05:00
|
|
|
}
|
|
|
|
}
|
2019-11-10 10:19:08 -06:00
|
|
|
|
|
|
|
/// Requirements for a `StableHashingContext` to be used in this crate.
|
|
|
|
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
2020-03-29 08:24:45 -05:00
|
|
|
/// instead of implementing everything in librustc_middle.
|
2019-11-23 06:58:17 -06:00
|
|
|
pub trait HashStableContext {
|
2020-02-08 14:06:31 -06:00
|
|
|
fn hash_def_id(&mut self, _: DefId, hasher: &mut StableHasher);
|
2020-03-17 10:45:02 -05:00
|
|
|
fn hash_crate_num(&mut self, _: CrateNum, hasher: &mut StableHasher);
|
|
|
|
fn hash_spans(&self) -> bool;
|
2019-12-22 16:42:04 -06:00
|
|
|
fn byte_pos_to_line_and_col(
|
|
|
|
&mut self,
|
|
|
|
byte: BytePos,
|
|
|
|
) -> Option<(Lrc<SourceFile>, usize, BytePos)>;
|
2019-11-23 07:39:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<CTX> HashStable<CTX> for Span
|
2019-12-22 16:42:04 -06:00
|
|
|
where
|
|
|
|
CTX: HashStableContext,
|
2019-11-23 07:39:00 -06:00
|
|
|
{
|
|
|
|
/// Hashes a span in a stable way. We can't directly hash the span's `BytePos`
|
|
|
|
/// fields (that would be similar to hashing pointers, since those are just
|
|
|
|
/// offsets into the `SourceMap`). Instead, we hash the (file name, line, column)
|
|
|
|
/// triple, which stays the same even if the containing `SourceFile` has moved
|
|
|
|
/// within the `SourceMap`.
|
|
|
|
/// Also note that we are hashing byte offsets for the column, not unicode
|
|
|
|
/// codepoint offsets. For the purpose of the hash that's sufficient.
|
|
|
|
/// Also, hashing filenames is expensive so we avoid doing it twice when the
|
|
|
|
/// span starts and ends in the same file, which is almost always the case.
|
|
|
|
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
|
|
|
const TAG_VALID_SPAN: u8 = 0;
|
|
|
|
const TAG_INVALID_SPAN: u8 = 1;
|
|
|
|
|
|
|
|
if !ctx.hash_spans() {
|
2019-12-22 16:42:04 -06:00
|
|
|
return;
|
2019-11-23 07:39:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if *self == DUMMY_SP {
|
2020-10-07 20:25:01 -05:00
|
|
|
Hash::hash(&TAG_INVALID_SPAN, hasher);
|
2020-03-17 10:45:02 -05:00
|
|
|
return;
|
2019-11-23 07:39:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// If this is not an empty or invalid span, we want to hash the last
|
|
|
|
// position that belongs to it, as opposed to hashing the first
|
|
|
|
// position past it.
|
|
|
|
let span = self.data();
|
|
|
|
let (file_lo, line_lo, col_lo) = match ctx.byte_pos_to_line_and_col(span.lo) {
|
|
|
|
Some(pos) => pos,
|
|
|
|
None => {
|
2020-10-07 20:25:01 -05:00
|
|
|
Hash::hash(&TAG_INVALID_SPAN, hasher);
|
2020-03-17 10:45:02 -05:00
|
|
|
span.ctxt.hash_stable(ctx, hasher);
|
|
|
|
return;
|
2019-11-23 07:39:00 -06:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if !file_lo.contains(span.hi) {
|
2020-10-07 20:25:01 -05:00
|
|
|
Hash::hash(&TAG_INVALID_SPAN, hasher);
|
2020-03-17 10:45:02 -05:00
|
|
|
span.ctxt.hash_stable(ctx, hasher);
|
|
|
|
return;
|
2019-11-23 07:39:00 -06:00
|
|
|
}
|
|
|
|
|
2020-10-07 20:25:01 -05:00
|
|
|
Hash::hash(&TAG_VALID_SPAN, hasher);
|
2019-11-23 07:39:00 -06:00
|
|
|
// We truncate the stable ID hash and line and column numbers. The chances
|
|
|
|
// of causing a collision this way should be minimal.
|
2020-10-07 20:25:01 -05:00
|
|
|
Hash::hash(&(file_lo.name_hash as u64), hasher);
|
2019-11-23 07:39:00 -06:00
|
|
|
|
|
|
|
let col = (col_lo.0 as u64) & 0xFF;
|
|
|
|
let line = ((line_lo as u64) & 0xFF_FF_FF) << 8;
|
|
|
|
let len = ((span.hi - span.lo).0 as u64) << 32;
|
|
|
|
let line_col_len = col | line | len;
|
2020-10-07 20:25:01 -05:00
|
|
|
Hash::hash(&line_col_len, hasher);
|
2020-03-17 10:45:02 -05:00
|
|
|
span.ctxt.hash_stable(ctx, hasher);
|
|
|
|
}
|
|
|
|
}
|
2019-11-23 07:39:00 -06:00
|
|
|
|
2020-03-17 10:45:02 -05:00
|
|
|
impl<CTX: HashStableContext> HashStable<CTX> for SyntaxContext {
|
|
|
|
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
|
|
|
const TAG_EXPANSION: u8 = 0;
|
|
|
|
const TAG_NO_EXPANSION: u8 = 1;
|
|
|
|
|
|
|
|
if *self == SyntaxContext::root() {
|
2019-11-23 07:39:00 -06:00
|
|
|
TAG_NO_EXPANSION.hash_stable(ctx, hasher);
|
|
|
|
} else {
|
|
|
|
TAG_EXPANSION.hash_stable(ctx, hasher);
|
2020-08-04 02:16:01 -05:00
|
|
|
let (expn_id, transparency) = self.outer_mark();
|
|
|
|
expn_id.hash_stable(ctx, hasher);
|
|
|
|
transparency.hash_stable(ctx, hasher);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-11-23 07:39:00 -06:00
|
|
|
|
2020-08-04 02:16:01 -05:00
|
|
|
impl<CTX: HashStableContext> HashStable<CTX> for ExpnId {
|
|
|
|
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
|
|
|
// Since the same expansion context is usually referenced many
|
|
|
|
// times, we cache a stable hash of it and hash that instead of
|
|
|
|
// recursing every time.
|
|
|
|
thread_local! {
|
|
|
|
static CACHE: RefCell<Vec<Option<Fingerprint>>> = Default::default();
|
|
|
|
}
|
2019-11-23 07:39:00 -06:00
|
|
|
|
2020-08-04 02:16:01 -05:00
|
|
|
const TAG_ROOT: u8 = 0;
|
|
|
|
const TAG_NOT_ROOT: u8 = 1;
|
2019-11-23 07:39:00 -06:00
|
|
|
|
2020-08-04 02:16:01 -05:00
|
|
|
if *self == ExpnId::root() {
|
|
|
|
TAG_ROOT.hash_stable(ctx, hasher);
|
|
|
|
return;
|
|
|
|
}
|
2019-11-23 07:39:00 -06:00
|
|
|
|
2020-08-04 02:16:01 -05:00
|
|
|
TAG_NOT_ROOT.hash_stable(ctx, hasher);
|
|
|
|
let index = self.as_u32() as usize;
|
2020-03-17 10:45:02 -05:00
|
|
|
|
2020-08-04 02:16:01 -05:00
|
|
|
let res = CACHE.with(|cache| cache.borrow().get(index).copied().flatten());
|
|
|
|
|
|
|
|
if let Some(res) = res {
|
|
|
|
res.hash_stable(ctx, hasher);
|
|
|
|
} else {
|
|
|
|
let new_len = index + 1;
|
2020-03-17 10:45:02 -05:00
|
|
|
|
2020-08-04 02:16:01 -05:00
|
|
|
let mut sub_hasher = StableHasher::new();
|
|
|
|
self.expn_data().hash_stable(ctx, &mut sub_hasher);
|
|
|
|
let sub_hash: Fingerprint = sub_hasher.finish();
|
2020-03-17 10:45:02 -05:00
|
|
|
|
2020-08-04 02:16:01 -05:00
|
|
|
CACHE.with(|cache| {
|
2020-03-17 10:45:02 -05:00
|
|
|
let mut cache = cache.borrow_mut();
|
|
|
|
if cache.len() < new_len {
|
|
|
|
cache.resize(new_len, None);
|
|
|
|
}
|
2020-08-04 02:16:01 -05:00
|
|
|
cache[index].replace(sub_hash).expect_none("Cache slot was filled");
|
2019-11-23 07:39:00 -06:00
|
|
|
});
|
|
|
|
sub_hash.hash_stable(ctx, hasher);
|
|
|
|
}
|
|
|
|
}
|
2019-11-10 10:31:21 -06:00
|
|
|
}
|