2019-10-26 11:58:18 -05:00
|
|
|
//! Assorted testing utilities.
|
|
|
|
//!
|
|
|
|
//! Most notable things are:
|
|
|
|
//!
|
|
|
|
//! * Rich text comparison, which outputs a diff.
|
|
|
|
//! * Extracting markup (mainly, `<|>` markers) out of fixture strings.
|
|
|
|
//! * marks (see the eponymous module).
|
2019-09-30 03:58:53 -05:00
|
|
|
|
2019-01-23 06:36:29 -06:00
|
|
|
#[macro_use]
|
|
|
|
pub mod marks;
|
|
|
|
|
|
|
|
use std::{
|
|
|
|
fs,
|
2019-07-04 15:05:17 -05:00
|
|
|
path::{Path, PathBuf},
|
2019-01-23 06:36:29 -06:00
|
|
|
};
|
2018-08-17 08:04:34 -05:00
|
|
|
|
2018-12-06 14:07:31 -06:00
|
|
|
use serde_json::Value;
|
2019-07-04 15:05:17 -05:00
|
|
|
use text_unit::{TextRange, TextUnit};
|
2018-08-17 08:04:34 -05:00
|
|
|
|
2018-12-06 11:42:03 -06:00
|
|
|
pub use difference::Changeset as __Changeset;
|
2018-08-12 10:50:16 -05:00
|
|
|
|
2018-10-31 14:34:31 -05:00
|
|
|
pub const CURSOR_MARKER: &str = "<|>";
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Asserts that two strings are equal, otherwise displays a rich diff between them.
|
|
|
|
///
|
|
|
|
/// The diff shows changes from the "original" left string to the "actual" right string.
|
|
|
|
///
|
|
|
|
/// All arguments starting from and including the 3rd one are passed to
|
|
|
|
/// `eprintln!()` macro in case of text inequality.
|
2018-08-12 10:50:16 -05:00
|
|
|
#[macro_export]
|
|
|
|
macro_rules! assert_eq_text {
|
2019-01-13 09:21:23 -06:00
|
|
|
($left:expr, $right:expr) => {
|
|
|
|
assert_eq_text!($left, $right,)
|
2018-12-21 09:13:21 -06:00
|
|
|
};
|
2019-01-13 09:21:23 -06:00
|
|
|
($left:expr, $right:expr, $($tt:tt)*) => {{
|
|
|
|
let left = $left;
|
|
|
|
let right = $right;
|
|
|
|
if left != right {
|
|
|
|
if left.trim() == right.trim() {
|
|
|
|
eprintln!("Left:\n{:?}\n\nRight:\n{:?}\n\nWhitespace difference\n", left, right);
|
2018-12-21 09:13:21 -06:00
|
|
|
} else {
|
2019-01-13 09:21:23 -06:00
|
|
|
let changeset = $crate::__Changeset::new(right, left, "\n");
|
|
|
|
eprintln!("Left:\n{}\n\nRight:\n{}\n\nDiff:\n{}\n", left, right, changeset);
|
2018-12-21 09:13:21 -06:00
|
|
|
}
|
|
|
|
eprintln!($($tt)*);
|
2018-08-12 10:50:16 -05:00
|
|
|
panic!("text differs");
|
|
|
|
}
|
|
|
|
}};
|
|
|
|
}
|
2018-08-17 08:04:34 -05:00
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Infallible version of `try_extract_offset()`.
|
2018-08-25 06:30:54 -05:00
|
|
|
pub fn extract_offset(text: &str) -> (TextUnit, String) {
|
2018-10-13 14:33:15 -05:00
|
|
|
match try_extract_offset(text) {
|
2018-08-25 06:30:54 -05:00
|
|
|
None => panic!("text should contain cursor marker"),
|
2018-10-13 14:33:15 -05:00
|
|
|
Some(result) => result,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Returns the offset of the first occurence of `<|>` marker and the copy of `text`
|
|
|
|
/// without the marker.
|
2019-10-26 11:58:18 -05:00
|
|
|
fn try_extract_offset(text: &str) -> Option<(TextUnit, String)> {
|
2018-10-31 14:34:31 -05:00
|
|
|
let cursor_pos = text.find(CURSOR_MARKER)?;
|
|
|
|
let mut new_text = String::with_capacity(text.len() - CURSOR_MARKER.len());
|
2018-08-25 06:30:54 -05:00
|
|
|
new_text.push_str(&text[..cursor_pos]);
|
2018-10-31 14:34:31 -05:00
|
|
|
new_text.push_str(&text[cursor_pos + CURSOR_MARKER.len()..]);
|
2018-08-25 06:30:54 -05:00
|
|
|
let cursor_pos = TextUnit::from(cursor_pos as u32);
|
2018-10-13 14:33:15 -05:00
|
|
|
Some((cursor_pos, new_text))
|
2018-08-25 06:30:54 -05:00
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Infallible version of `try_extract_range()`.
|
2018-08-25 06:30:54 -05:00
|
|
|
pub fn extract_range(text: &str) -> (TextRange, String) {
|
2018-10-13 14:33:15 -05:00
|
|
|
match try_extract_range(text) {
|
|
|
|
None => panic!("text should contain cursor marker"),
|
|
|
|
Some(result) => result,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Returns `TextRange` between the first two markers `<|>...<|>` and the copy
|
|
|
|
/// of `text` without both of these markers.
|
2019-10-26 11:58:18 -05:00
|
|
|
fn try_extract_range(text: &str) -> Option<(TextRange, String)> {
|
2018-10-13 14:33:15 -05:00
|
|
|
let (start, text) = try_extract_offset(text)?;
|
|
|
|
let (end, text) = try_extract_offset(&text)?;
|
|
|
|
Some((TextRange::from_to(start, end), text))
|
|
|
|
}
|
|
|
|
|
2020-02-25 11:57:47 -06:00
|
|
|
#[derive(Clone, Copy)]
|
2019-10-26 11:58:18 -05:00
|
|
|
pub enum RangeOrOffset {
|
|
|
|
Range(TextRange),
|
|
|
|
Offset(TextUnit),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<RangeOrOffset> for TextRange {
|
|
|
|
fn from(selection: RangeOrOffset) -> Self {
|
|
|
|
match selection {
|
|
|
|
RangeOrOffset::Range(it) => it,
|
|
|
|
RangeOrOffset::Offset(it) => TextRange::from_to(it, it),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Extracts `TextRange` or `TextUnit` depending on the amount of `<|>` markers
|
|
|
|
/// found in `text`.
|
|
|
|
///
|
|
|
|
/// # Panics
|
|
|
|
/// Panics if no `<|>` marker is present in the `text`.
|
2019-10-26 11:58:18 -05:00
|
|
|
pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) {
|
|
|
|
if let Some((range, text)) = try_extract_range(text) {
|
|
|
|
return (RangeOrOffset::Range(range), text);
|
|
|
|
}
|
|
|
|
let (offset, text) = extract_offset(text);
|
|
|
|
(RangeOrOffset::Offset(offset), text)
|
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Extracts ranges, marked with `<tag> </tag>` pairs from the `text`
|
2018-12-20 13:30:30 -06:00
|
|
|
pub fn extract_ranges(mut text: &str, tag: &str) -> (Vec<TextRange>, String) {
|
|
|
|
let open = format!("<{}>", tag);
|
|
|
|
let close = format!("</{}>", tag);
|
2018-10-13 14:33:15 -05:00
|
|
|
let mut ranges = Vec::new();
|
2018-12-20 13:30:30 -06:00
|
|
|
let mut res = String::new();
|
|
|
|
let mut stack = Vec::new();
|
|
|
|
loop {
|
|
|
|
match text.find('<') {
|
|
|
|
None => {
|
|
|
|
res.push_str(text);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Some(i) => {
|
|
|
|
res.push_str(&text[..i]);
|
|
|
|
text = &text[i..];
|
|
|
|
if text.starts_with(&open) {
|
|
|
|
text = &text[open.len()..];
|
|
|
|
let from = TextUnit::of_str(&res);
|
|
|
|
stack.push(from);
|
|
|
|
} else if text.starts_with(&close) {
|
|
|
|
text = &text[close.len()..];
|
2019-02-08 05:49:43 -06:00
|
|
|
let from = stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag));
|
2018-12-20 13:30:30 -06:00
|
|
|
let to = TextUnit::of_str(&res);
|
|
|
|
ranges.push(TextRange::from_to(from, to));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-10-13 14:33:15 -05:00
|
|
|
}
|
2018-12-20 13:30:30 -06:00
|
|
|
assert!(stack.is_empty(), "unmatched <{}>", tag);
|
2018-12-20 13:13:16 -06:00
|
|
|
ranges.sort_by_key(|r| (r.start(), r.end()));
|
2018-12-20 13:30:30 -06:00
|
|
|
(ranges, res)
|
2018-08-25 06:30:54 -05:00
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Inserts `<|>` marker into the `text` at `offset`.
|
2018-08-25 06:30:54 -05:00
|
|
|
pub fn add_cursor(text: &str, offset: TextUnit) -> String {
|
2020-01-28 19:52:13 -06:00
|
|
|
let offset: usize = offset.to_usize();
|
2018-08-25 06:30:54 -05:00
|
|
|
let mut res = String::new();
|
|
|
|
res.push_str(&text[..offset]);
|
|
|
|
res.push_str("<|>");
|
|
|
|
res.push_str(&text[offset..]);
|
|
|
|
res
|
|
|
|
}
|
2018-10-31 13:37:32 -05:00
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct FixtureEntry {
|
|
|
|
pub meta: String,
|
|
|
|
pub text: String,
|
|
|
|
}
|
|
|
|
|
2019-02-11 10:18:27 -06:00
|
|
|
/// Parses text which looks like this:
|
2018-10-31 13:37:32 -05:00
|
|
|
///
|
2019-02-19 10:51:48 -06:00
|
|
|
/// ```not_rust
|
2018-10-31 13:37:32 -05:00
|
|
|
/// //- some meta
|
|
|
|
/// line 1
|
|
|
|
/// line 2
|
|
|
|
/// // - other meta
|
|
|
|
/// ```
|
|
|
|
pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> {
|
2018-10-31 15:41:43 -05:00
|
|
|
let margin = fixture
|
|
|
|
.lines()
|
2018-10-31 14:34:31 -05:00
|
|
|
.filter(|it| it.trim_start().starts_with("//-"))
|
|
|
|
.map(|it| it.len() - it.trim_start().len())
|
2018-10-31 15:41:43 -05:00
|
|
|
.next()
|
|
|
|
.expect("empty fixture");
|
2019-08-20 10:53:59 -05:00
|
|
|
|
2020-01-29 07:06:23 -06:00
|
|
|
let mut lines = fixture
|
2019-08-20 10:53:59 -05:00
|
|
|
.split('\n') // don't use `.lines` to not drop `\r\n`
|
|
|
|
.filter_map(|line| {
|
|
|
|
if line.len() >= margin {
|
|
|
|
assert!(line[..margin].trim().is_empty());
|
|
|
|
Some(&line[margin..])
|
|
|
|
} else {
|
|
|
|
assert!(line.trim().is_empty());
|
|
|
|
None
|
|
|
|
}
|
|
|
|
});
|
2018-10-31 14:34:31 -05:00
|
|
|
|
2020-01-30 06:17:56 -06:00
|
|
|
let mut res: Vec<FixtureEntry> = Vec::new();
|
|
|
|
for line in lines.by_ref() {
|
|
|
|
if line.starts_with("//-") {
|
|
|
|
let meta = line["//-".len()..].trim().to_string();
|
|
|
|
res.push(FixtureEntry { meta, text: String::new() })
|
|
|
|
} else if let Some(entry) = res.last_mut() {
|
|
|
|
entry.text.push_str(line);
|
|
|
|
entry.text.push('\n');
|
2018-10-31 13:37:32 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|
2018-12-06 14:07:31 -06:00
|
|
|
|
2020-03-09 13:02:43 -05:00
|
|
|
/// Same as `parse_fixture`, except it allow empty fixture
|
|
|
|
pub fn parse_single_fixture(fixture: &str) -> Option<FixtureEntry> {
|
|
|
|
if !fixture.lines().any(|it| it.trim_start().starts_with("//-")) {
|
|
|
|
return None;
|
|
|
|
}
|
2020-03-10 09:52:43 -05:00
|
|
|
|
|
|
|
let fixtures = parse_fixture(fixture);
|
|
|
|
if fixtures.len() > 1 {
|
|
|
|
panic!("too many fixtures");
|
|
|
|
}
|
|
|
|
fixtures.into_iter().nth(0)
|
2020-03-09 13:02:43 -05:00
|
|
|
}
|
|
|
|
|
2018-12-06 14:07:31 -06:00
|
|
|
// Comparison functionality borrowed from cargo:
|
|
|
|
|
|
|
|
/// Compare a line with an expected pattern.
|
|
|
|
/// - Use `[..]` as a wildcard to match 0 or more characters on the same line
|
|
|
|
/// (similar to `.*` in a regex).
|
|
|
|
pub fn lines_match(expected: &str, actual: &str) -> bool {
|
|
|
|
// Let's not deal with / vs \ (windows...)
|
|
|
|
// First replace backslash-escaped backslashes with forward slashes
|
|
|
|
// which can occur in, for example, JSON output
|
2019-12-18 08:33:36 -06:00
|
|
|
let expected = expected.replace(r"\\", "/").replace(r"\", "/");
|
|
|
|
let mut actual: &str = &actual.replace(r"\\", "/").replace(r"\", "/");
|
2018-12-06 14:07:31 -06:00
|
|
|
for (i, part) in expected.split("[..]").enumerate() {
|
|
|
|
match actual.find(part) {
|
|
|
|
Some(j) => {
|
|
|
|
if i == 0 && j != 0 {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
actual = &actual[j + part.len()..];
|
|
|
|
}
|
|
|
|
None => return false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
actual.is_empty() || expected.ends_with("[..]")
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn lines_match_works() {
|
|
|
|
assert!(lines_match("a b", "a b"));
|
|
|
|
assert!(lines_match("a[..]b", "a b"));
|
|
|
|
assert!(lines_match("a[..]", "a b"));
|
|
|
|
assert!(lines_match("[..]", "a b"));
|
|
|
|
assert!(lines_match("[..]b", "a b"));
|
|
|
|
|
|
|
|
assert!(!lines_match("[..]b", "c"));
|
|
|
|
assert!(!lines_match("b", "c"));
|
|
|
|
assert!(!lines_match("b", "cb"));
|
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Compares JSON object for approximate equality.
|
|
|
|
/// You can use `[..]` wildcard in strings (useful for OS dependent things such
|
|
|
|
/// as paths). You can use a `"{...}"` string literal as a wildcard for
|
|
|
|
/// arbitrary nested JSON. Arrays are sorted before comparison.
|
2018-12-06 14:07:31 -06:00
|
|
|
pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> {
|
|
|
|
use serde_json::Value::*;
|
|
|
|
match (expected, actual) {
|
|
|
|
(&Number(ref l), &Number(ref r)) if l == r => None,
|
|
|
|
(&Bool(l), &Bool(r)) if l == r => None,
|
|
|
|
(&String(ref l), &String(ref r)) if lines_match(l, r) => None,
|
|
|
|
(&Array(ref l), &Array(ref r)) => {
|
|
|
|
if l.len() != r.len() {
|
|
|
|
return Some((expected, actual));
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut l = l.iter().collect::<Vec<_>>();
|
|
|
|
let mut r = r.iter().collect::<Vec<_>>();
|
|
|
|
|
2019-02-08 05:49:43 -06:00
|
|
|
l.retain(|l| match r.iter().position(|r| find_mismatch(l, r).is_none()) {
|
|
|
|
Some(i) => {
|
|
|
|
r.remove(i);
|
|
|
|
false
|
|
|
|
}
|
|
|
|
None => true,
|
|
|
|
});
|
2018-12-06 14:07:31 -06:00
|
|
|
|
|
|
|
if !l.is_empty() {
|
|
|
|
assert!(!r.is_empty());
|
|
|
|
Some((&l[0], &r[0]))
|
|
|
|
} else {
|
|
|
|
assert_eq!(r.len(), 0);
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
(&Object(ref l), &Object(ref r)) => {
|
|
|
|
let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
|
|
|
|
if !same_keys {
|
|
|
|
return Some((expected, actual));
|
|
|
|
}
|
|
|
|
|
2020-02-18 06:53:02 -06:00
|
|
|
l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).next()
|
2018-12-06 14:07:31 -06:00
|
|
|
}
|
|
|
|
(&Null, &Null) => None,
|
|
|
|
// magic string literal "{...}" acts as wildcard for any sub-JSON
|
|
|
|
(&String(ref l), _) if l == "{...}" => None,
|
|
|
|
_ => Some((expected, actual)),
|
|
|
|
}
|
|
|
|
}
|
2018-12-23 05:05:54 -06:00
|
|
|
|
2020-04-06 06:04:26 -05:00
|
|
|
/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir`
|
2020-01-28 19:52:13 -06:00
|
|
|
/// subdirectories defined by `paths`.
|
|
|
|
///
|
2020-04-06 06:04:26 -05:00
|
|
|
/// If the content of the matching output file differs from the output of `f()`
|
2020-01-28 19:52:13 -06:00
|
|
|
/// the test will fail.
|
|
|
|
///
|
2020-04-06 06:04:26 -05:00
|
|
|
/// If there is no matching output file it will be created and filled with the
|
2020-01-28 19:52:13 -06:00
|
|
|
/// output of `f()`, but the test will fail.
|
2020-04-06 06:04:26 -05:00
|
|
|
pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F)
|
2018-12-23 05:05:54 -06:00
|
|
|
where
|
|
|
|
F: Fn(&str, &Path) -> String,
|
|
|
|
{
|
2020-04-06 06:04:26 -05:00
|
|
|
for (path, input_code) in collect_rust_files(test_data_dir, paths) {
|
|
|
|
let actual = f(&input_code, &path);
|
|
|
|
let path = path.with_extension(outfile_extension);
|
2018-12-23 05:05:54 -06:00
|
|
|
if !path.exists() {
|
|
|
|
println!("\nfile: {}", path.display());
|
|
|
|
println!("No .txt file with expected result, creating...\n");
|
2020-04-06 06:04:26 -05:00
|
|
|
println!("{}\n{}", input_code, actual);
|
|
|
|
fs::write(&path, &actual).unwrap();
|
|
|
|
panic!("No expected result");
|
2018-12-23 05:05:54 -06:00
|
|
|
}
|
|
|
|
let expected = read_text(&path);
|
2020-04-06 06:04:26 -05:00
|
|
|
assert_equal_text(&expected, &actual, &path);
|
2018-12-23 05:05:54 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-06 06:04:26 -05:00
|
|
|
/// Collects all `.rs` files from `dir` subdirectories defined by `paths`.
|
|
|
|
pub fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {
|
2018-12-23 05:05:54 -06:00
|
|
|
paths
|
|
|
|
.iter()
|
|
|
|
.flat_map(|path| {
|
2020-04-06 06:04:26 -05:00
|
|
|
let path = root_dir.to_owned().join(path);
|
|
|
|
rust_files_in_dir(&path).into_iter()
|
2018-12-23 05:05:54 -06:00
|
|
|
})
|
|
|
|
.map(|path| {
|
|
|
|
let text = read_text(&path);
|
|
|
|
(path, text)
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`.
|
2020-04-06 06:04:26 -05:00
|
|
|
fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> {
|
2018-12-23 05:05:54 -06:00
|
|
|
let mut acc = Vec::new();
|
|
|
|
for file in fs::read_dir(&dir).unwrap() {
|
|
|
|
let file = file.unwrap();
|
|
|
|
let path = file.path();
|
|
|
|
if path.extension().unwrap_or_default() == "rs" {
|
|
|
|
acc.push(path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
acc.sort();
|
|
|
|
acc
|
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Returns the path to the root directory of `rust-analyzer` project.
|
2018-12-23 05:05:54 -06:00
|
|
|
pub fn project_dir() -> PathBuf {
|
|
|
|
let dir = env!("CARGO_MANIFEST_DIR");
|
2019-02-08 05:49:43 -06:00
|
|
|
PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
|
2018-12-23 05:05:54 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Read file and normalize newlines.
|
|
|
|
///
|
|
|
|
/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
|
|
|
|
///
|
|
|
|
/// ```
|
|
|
|
/// let s = "
|
|
|
|
/// ";
|
|
|
|
/// assert_eq!(s.as_bytes(), &[10]);
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// so this should always be correct.
|
|
|
|
pub fn read_text(path: &Path) -> String {
|
|
|
|
fs::read_to_string(path)
|
2019-06-03 09:13:19 -05:00
|
|
|
.unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
|
2018-12-23 05:05:54 -06:00
|
|
|
.replace("\r\n", "\n")
|
|
|
|
}
|
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Returns `false` if slow tests should not run, otherwise returns `true` and
|
|
|
|
/// also creates a file at `./target/.slow_tests_cookie` which serves as a flag
|
|
|
|
/// that slow tests did run.
|
2019-12-07 05:46:36 -06:00
|
|
|
pub fn skip_slow_tests() -> bool {
|
|
|
|
let should_skip = std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err();
|
|
|
|
if should_skip {
|
|
|
|
eprintln!("ignoring slow test")
|
|
|
|
} else {
|
|
|
|
let path = project_dir().join("./target/.slow_tests_cookie");
|
|
|
|
fs::write(&path, ".").unwrap();
|
|
|
|
}
|
|
|
|
should_skip
|
|
|
|
}
|
|
|
|
|
2020-04-11 09:42:24 -05:00
|
|
|
const REWRITE: bool = false;
|
2020-03-13 06:20:42 -05:00
|
|
|
|
2020-01-28 19:52:13 -06:00
|
|
|
/// Asserts that `expected` and `actual` strings are equal. If they differ only
|
|
|
|
/// in trailing or leading whitespace the test won't fail and
|
|
|
|
/// the contents of `actual` will be written to the file located at `path`.
|
2018-12-23 05:05:54 -06:00
|
|
|
fn assert_equal_text(expected: &str, actual: &str, path: &Path) {
|
|
|
|
if expected == actual {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
let dir = project_dir();
|
|
|
|
let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path);
|
|
|
|
if expected.trim() == actual.trim() {
|
|
|
|
println!("whitespace difference, rewriting");
|
|
|
|
println!("file: {}\n", pretty_path.display());
|
|
|
|
fs::write(path, actual).unwrap();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if REWRITE {
|
|
|
|
println!("rewriting {}", pretty_path.display());
|
|
|
|
fs::write(path, actual).unwrap();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
assert_eq_text!(expected, actual, "file: {}", pretty_path.display());
|
|
|
|
}
|