2013-05-30 05:16:33 -05:00
|
|
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
|
2012-12-03 18:48:01 -06:00
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2012-03-07 20:17:30 -06:00
|
|
|
#[doc(hidden)];
|
|
|
|
|
2011-07-09 18:08:03 -05:00
|
|
|
// Support code for rustc's built in test runner generator. Currently,
|
|
|
|
// none of this is meant for users. It is intended to support the
|
|
|
|
// simplest interface possible for representing and running tests
|
|
|
|
// while providing a base that other test frameworks may build off of.
|
|
|
|
|
2013-05-17 17:28:44 -05:00
|
|
|
|
2012-12-23 16:41:37 -06:00
|
|
|
use getopts;
|
2013-07-16 22:08:01 -05:00
|
|
|
use getopts::groups;
|
2013-07-10 18:17:41 -05:00
|
|
|
use json::ToJson;
|
|
|
|
use json;
|
|
|
|
use serialize::Decodable;
|
2012-12-23 16:41:37 -06:00
|
|
|
use sort;
|
2013-06-14 20:21:47 -05:00
|
|
|
use stats::Stats;
|
2013-07-10 18:17:41 -05:00
|
|
|
use stats;
|
2012-12-23 16:41:37 -06:00
|
|
|
use term;
|
2013-06-14 20:21:47 -05:00
|
|
|
use time::precise_time_ns;
|
2013-07-10 18:17:41 -05:00
|
|
|
use treemap::TreeMap;
|
2012-12-23 16:41:37 -06:00
|
|
|
|
2013-07-17 17:15:34 -05:00
|
|
|
use std::clone::Clone;
|
2013-06-28 17:32:26 -05:00
|
|
|
use std::comm::{stream, SharedChan};
|
2013-07-16 22:08:01 -05:00
|
|
|
use std::libc;
|
2013-06-28 17:32:26 -05:00
|
|
|
use std::either;
|
|
|
|
use std::io;
|
|
|
|
use std::result;
|
|
|
|
use std::task;
|
|
|
|
use std::to_str::ToStr;
|
|
|
|
use std::u64;
|
2013-07-11 17:16:11 -05:00
|
|
|
use std::f64;
|
2013-07-10 18:17:41 -05:00
|
|
|
use std::os;
|
2011-07-16 19:04:20 -05:00
|
|
|
|
2011-07-25 20:07:25 -05:00
|
|
|
|
2011-07-09 18:08:03 -05:00
|
|
|
// The name of a test. By convention this follows the rules for rust
|
2013-05-08 12:34:47 -05:00
|
|
|
// paths; i.e. it should be a series of identifiers separated by double
|
2011-07-09 18:08:03 -05:00
|
|
|
// colons. This way if some test runner wants to arrange the tests
|
2011-09-26 12:51:23 -05:00
|
|
|
// hierarchically it may.
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-07-02 14:47:32 -05:00
|
|
|
#[deriving(Clone)]
|
2013-02-13 13:46:14 -06:00
|
|
|
pub enum TestName {
|
2013-03-14 13:22:51 -05:00
|
|
|
StaticTestName(&'static str),
|
2013-02-13 13:46:14 -06:00
|
|
|
DynTestName(~str)
|
|
|
|
}
|
|
|
|
impl ToStr for TestName {
|
2013-03-21 23:34:30 -05:00
|
|
|
fn to_str(&self) -> ~str {
|
2013-07-02 14:47:32 -05:00
|
|
|
match (*self).clone() {
|
2013-05-08 21:44:43 -05:00
|
|
|
StaticTestName(s) => s.to_str(),
|
|
|
|
DynTestName(s) => s.to_str()
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-07-09 18:08:03 -05:00
|
|
|
|
|
|
|
// A function that runs a test. If the function returns successfully,
|
|
|
|
// the test succeeds; if the function fails then the test fails. We
|
|
|
|
// may need to come up with a more clever definition of test in order
|
|
|
|
// to support isolation of tests into tasks.
|
2013-02-13 13:46:14 -06:00
|
|
|
pub enum TestFn {
|
|
|
|
StaticTestFn(extern fn()),
|
|
|
|
StaticBenchFn(extern fn(&mut BenchHarness)),
|
2013-07-15 20:50:32 -05:00
|
|
|
StaticMetricFn(~fn(&mut MetricMap)),
|
2013-02-13 13:46:14 -06:00
|
|
|
DynTestFn(~fn()),
|
2013-07-15 20:50:32 -05:00
|
|
|
DynMetricFn(~fn(&mut MetricMap)),
|
2013-02-13 13:46:14 -06:00
|
|
|
DynBenchFn(~fn(&mut BenchHarness))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Structure passed to BenchFns
|
|
|
|
pub struct BenchHarness {
|
|
|
|
iterations: u64,
|
|
|
|
ns_start: u64,
|
|
|
|
ns_end: u64,
|
|
|
|
bytes: u64
|
|
|
|
}
|
2011-07-09 18:08:03 -05:00
|
|
|
|
|
|
|
// The definition of a single test. A test runner will run a list of
|
|
|
|
// these.
|
2013-07-02 14:47:32 -05:00
|
|
|
#[deriving(Clone)]
|
2013-01-08 16:00:45 -06:00
|
|
|
pub struct TestDesc {
|
2012-09-04 20:05:57 -05:00
|
|
|
name: TestName,
|
2011-11-01 12:31:23 -05:00
|
|
|
ignore: bool,
|
|
|
|
should_fail: bool
|
2013-01-08 16:00:45 -06:00
|
|
|
}
|
2011-07-09 18:08:03 -05:00
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
pub struct TestDescAndFn {
|
|
|
|
desc: TestDesc,
|
|
|
|
testfn: TestFn,
|
|
|
|
}
|
|
|
|
|
2013-07-17 17:15:34 -05:00
|
|
|
#[deriving(Clone, Encodable, Decodable, Eq)]
|
2013-07-10 18:17:41 -05:00
|
|
|
pub struct Metric {
|
|
|
|
value: f64,
|
|
|
|
noise: f64
|
|
|
|
}
|
|
|
|
|
2013-07-15 20:50:32 -05:00
|
|
|
#[deriving(Eq)]
|
2013-07-10 18:17:41 -05:00
|
|
|
pub struct MetricMap(TreeMap<~str,Metric>);
|
|
|
|
|
2013-07-17 17:15:34 -05:00
|
|
|
impl Clone for MetricMap {
|
|
|
|
pub fn clone(&self) -> MetricMap {
|
|
|
|
MetricMap((**self).clone())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
/// Analysis of a single change in metric
|
2013-07-11 19:05:23 -05:00
|
|
|
#[deriving(Eq)]
|
2013-07-10 18:17:41 -05:00
|
|
|
pub enum MetricChange {
|
|
|
|
LikelyNoise,
|
|
|
|
MetricAdded,
|
|
|
|
MetricRemoved,
|
|
|
|
Improvement(f64),
|
|
|
|
Regression(f64)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub type MetricDiff = TreeMap<~str,MetricChange>;
|
|
|
|
|
2011-07-09 18:08:03 -05:00
|
|
|
// The default console test runner. It accepts the command line
|
2013-02-13 13:46:14 -06:00
|
|
|
// arguments and a vector of test_descs.
|
2013-01-31 19:12:29 -06:00
|
|
|
pub fn test_main(args: &[~str], tests: ~[TestDescAndFn]) {
|
2011-07-27 07:19:39 -05:00
|
|
|
let opts =
|
2012-08-06 14:34:08 -05:00
|
|
|
match parse_opts(args) {
|
2013-02-15 01:30:30 -06:00
|
|
|
either::Left(o) => o,
|
|
|
|
either::Right(m) => fail!(m)
|
2011-07-27 07:19:39 -05:00
|
|
|
};
|
2013-05-05 17:18:51 -05:00
|
|
|
if !run_tests_console(&opts, tests) { fail!("Some tests failed"); }
|
2011-07-09 18:08:03 -05:00
|
|
|
}
|
|
|
|
|
2013-02-13 13:46:14 -06:00
|
|
|
// A variant optimized for invocation with a static test vector.
|
|
|
|
// This will fail (intentionally) when fed any dynamic tests, because
|
|
|
|
// it is copying the static values out into a dynamic vector and cannot
|
|
|
|
// copy dynamic values. It is doing this because from this point on
|
|
|
|
// a ~[TestDescAndFn] is used in order to effect ownership-transfer
|
|
|
|
// semantics into parallel test runners, which in turn requires a ~[]
|
|
|
|
// rather than a &[].
|
|
|
|
pub fn test_main_static(args: &[~str], tests: &[TestDescAndFn]) {
|
|
|
|
let owned_tests = do tests.map |t| {
|
|
|
|
match t.testfn {
|
|
|
|
StaticTestFn(f) =>
|
2013-07-02 14:47:32 -05:00
|
|
|
TestDescAndFn { testfn: StaticTestFn(f), desc: t.desc.clone() },
|
2013-02-13 13:46:14 -06:00
|
|
|
|
|
|
|
StaticBenchFn(f) =>
|
2013-07-02 14:47:32 -05:00
|
|
|
TestDescAndFn { testfn: StaticBenchFn(f), desc: t.desc.clone() },
|
2013-02-13 13:46:14 -06:00
|
|
|
|
|
|
|
_ => {
|
2013-05-05 17:18:51 -05:00
|
|
|
fail!("non-static tests passed to test::test_main_static");
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
test_main(args, owned_tests)
|
|
|
|
}
|
|
|
|
|
2013-01-22 10:44:24 -06:00
|
|
|
pub struct TestOpts {
|
|
|
|
filter: Option<~str>,
|
|
|
|
run_ignored: bool,
|
2013-02-13 13:46:14 -06:00
|
|
|
run_tests: bool,
|
|
|
|
run_benchmarks: bool,
|
2013-07-10 18:17:41 -05:00
|
|
|
ratchet_metrics: Option<Path>,
|
2013-07-11 17:16:11 -05:00
|
|
|
ratchet_noise_percent: Option<f64>,
|
2013-07-10 18:17:41 -05:00
|
|
|
save_metrics: Option<Path>,
|
2013-02-13 13:46:14 -06:00
|
|
|
logfile: Option<Path>
|
2013-01-22 10:44:24 -06:00
|
|
|
}
|
2011-07-14 18:05:33 -05:00
|
|
|
|
2012-09-04 20:05:57 -05:00
|
|
|
type OptRes = Either<TestOpts, ~str>;
|
2011-07-14 18:05:33 -05:00
|
|
|
|
2013-07-16 22:08:01 -05:00
|
|
|
fn optgroups() -> ~[getopts::groups::OptGroup] {
|
|
|
|
~[groups::optflag("", "ignored", "Run ignored tests"),
|
|
|
|
groups::optflag("", "test", "Run tests and not benchmarks"),
|
|
|
|
groups::optflag("", "bench", "Run benchmarks instead of tests"),
|
|
|
|
groups::optflag("h", "help", "Display this message (longer with --help)"),
|
|
|
|
groups::optopt("", "save-metrics", "Location to save bench metrics",
|
|
|
|
"PATH"),
|
|
|
|
groups::optopt("", "ratchet-metrics",
|
|
|
|
"Location to load and save metrics from. The metrics \
|
|
|
|
loaded are cause benchmarks to fail if they run too \
|
|
|
|
slowly", "PATH"),
|
|
|
|
groups::optopt("", "ratchet-noise-percent",
|
|
|
|
"Tests within N% of the recorded metrics will be \
|
|
|
|
considered as passing", "PERCENTAGE"),
|
|
|
|
groups::optopt("", "logfile", "Write logs to the specified file instead \
|
|
|
|
of stdout", "PATH")]
|
|
|
|
}
|
|
|
|
|
|
|
|
fn usage(binary: &str, helpstr: &str) -> ! {
|
|
|
|
let message = fmt!("Usage: %s [OPTIONS] [FILTER]", binary);
|
|
|
|
println(groups::usage(message, optgroups()));
|
|
|
|
if helpstr == "help" {
|
|
|
|
println("\
|
|
|
|
The FILTER is matched against the name of all tests to run, and if any tests
|
|
|
|
have a substring match, only those tests are run.
|
|
|
|
|
|
|
|
By default, all tests are run in parallel. This can be altered with the
|
|
|
|
RUST_THREADS environment variable when running tests (set it to 1).
|
|
|
|
|
|
|
|
Test Attributes:
|
|
|
|
|
|
|
|
#[test] - Indicates a function is a test to be run. This function
|
|
|
|
takes no arguments.
|
|
|
|
#[bench] - Indicates a function is a benchmark to be run. This
|
|
|
|
function takes one argument (extra::test::BenchHarness).
|
|
|
|
#[should_fail] - This function (also labeled with #[test]) will only pass if
|
|
|
|
the code causes a failure (an assertion failure or fail!)
|
|
|
|
#[ignore] - When applied to a function which is already attributed as a
|
|
|
|
test, then the test runner will ignore these tests during
|
|
|
|
normal test runs. Running with --ignored will run these
|
|
|
|
tests. This may also be written as #[ignore(cfg(...))] to
|
|
|
|
ignore the test on certain configurations.");
|
|
|
|
}
|
|
|
|
unsafe { libc::exit(0) }
|
|
|
|
}
|
|
|
|
|
2011-07-14 18:05:33 -05:00
|
|
|
// Parses command line arguments into test options
|
2013-01-08 21:37:25 -06:00
|
|
|
pub fn parse_opts(args: &[~str]) -> OptRes {
|
2013-06-27 07:36:27 -05:00
|
|
|
let args_ = args.tail();
|
2012-07-31 18:38:41 -05:00
|
|
|
let matches =
|
2013-07-16 22:08:01 -05:00
|
|
|
match groups::getopts(args_, optgroups()) {
|
2013-02-15 01:30:30 -06:00
|
|
|
Ok(m) => m,
|
|
|
|
Err(f) => return either::Right(getopts::fail_str(f))
|
2011-07-27 07:19:39 -05:00
|
|
|
};
|
|
|
|
|
2013-07-16 22:08:01 -05:00
|
|
|
if getopts::opt_present(&matches, "h") { usage(args[0], "h"); }
|
|
|
|
if getopts::opt_present(&matches, "help") { usage(args[0], "help"); }
|
|
|
|
|
2011-07-27 07:19:39 -05:00
|
|
|
let filter =
|
2013-05-14 04:52:12 -05:00
|
|
|
if matches.free.len() > 0 {
|
2013-07-02 14:47:32 -05:00
|
|
|
Some((matches).free[0].clone())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
2011-07-14 18:05:33 -05:00
|
|
|
|
2013-05-19 00:07:44 -05:00
|
|
|
let run_ignored = getopts::opt_present(&matches, "ignored");
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-05-19 00:07:44 -05:00
|
|
|
let logfile = getopts::opt_maybe_str(&matches, "logfile");
|
2013-02-13 13:46:14 -06:00
|
|
|
let logfile = logfile.map(|s| Path(*s));
|
|
|
|
|
2013-05-19 00:07:44 -05:00
|
|
|
let run_benchmarks = getopts::opt_present(&matches, "bench");
|
2013-02-13 13:46:14 -06:00
|
|
|
let run_tests = ! run_benchmarks ||
|
2013-05-19 00:07:44 -05:00
|
|
|
getopts::opt_present(&matches, "test");
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
let ratchet_metrics = getopts::opt_maybe_str(&matches, "ratchet-metrics");
|
|
|
|
let ratchet_metrics = ratchet_metrics.map(|s| Path(*s));
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-07-11 17:16:11 -05:00
|
|
|
let ratchet_noise_percent =
|
|
|
|
getopts::opt_maybe_str(&matches, "ratchet-noise-percent");
|
|
|
|
let ratchet_noise_percent = ratchet_noise_percent.map(|s| f64::from_str(*s).get());
|
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
let save_metrics = getopts::opt_maybe_str(&matches, "save-metrics");
|
|
|
|
let save_metrics = save_metrics.map(|s| Path(*s));
|
2011-07-14 18:05:33 -05:00
|
|
|
|
2013-01-22 10:44:24 -06:00
|
|
|
let test_opts = TestOpts {
|
|
|
|
filter: filter,
|
|
|
|
run_ignored: run_ignored,
|
2013-02-13 13:46:14 -06:00
|
|
|
run_tests: run_tests,
|
|
|
|
run_benchmarks: run_benchmarks,
|
2013-07-10 18:17:41 -05:00
|
|
|
ratchet_metrics: ratchet_metrics,
|
2013-07-11 17:16:11 -05:00
|
|
|
ratchet_noise_percent: ratchet_noise_percent,
|
2013-07-10 18:17:41 -05:00
|
|
|
save_metrics: save_metrics,
|
2013-02-13 13:46:14 -06:00
|
|
|
logfile: logfile
|
2013-01-22 10:44:24 -06:00
|
|
|
};
|
2011-07-11 18:33:21 -05:00
|
|
|
|
2013-01-22 10:44:24 -06:00
|
|
|
either::Left(test_opts)
|
2011-07-11 18:33:21 -05:00
|
|
|
}
|
|
|
|
|
2013-07-02 14:47:32 -05:00
|
|
|
#[deriving(Clone, Eq)]
|
2013-02-13 13:46:14 -06:00
|
|
|
pub struct BenchSamples {
|
2013-07-07 17:43:31 -05:00
|
|
|
ns_iter_summ: stats::Summary,
|
2013-02-13 13:46:14 -06:00
|
|
|
mb_s: uint
|
|
|
|
}
|
|
|
|
|
2013-07-02 14:47:32 -05:00
|
|
|
#[deriving(Clone, Eq)]
|
2013-07-15 20:50:32 -05:00
|
|
|
pub enum TestResult {
|
|
|
|
TrOk,
|
|
|
|
TrFailed,
|
|
|
|
TrIgnored,
|
|
|
|
TrMetrics(MetricMap),
|
2013-07-02 14:47:32 -05:00
|
|
|
TrBench(BenchSamples),
|
2013-07-15 20:50:32 -05:00
|
|
|
}
|
2011-07-14 13:29:54 -05:00
|
|
|
|
2013-02-04 18:48:52 -06:00
|
|
|
struct ConsoleTestState {
|
2013-03-12 15:00:50 -05:00
|
|
|
out: @io::Writer,
|
|
|
|
log_out: Option<@io::Writer>,
|
2013-07-09 19:18:02 -05:00
|
|
|
term: Option<term::Terminal>,
|
2013-02-04 18:48:52 -06:00
|
|
|
use_color: bool,
|
2013-03-24 11:41:19 -05:00
|
|
|
total: uint,
|
|
|
|
passed: uint,
|
|
|
|
failed: uint,
|
|
|
|
ignored: uint,
|
2013-07-15 20:50:32 -05:00
|
|
|
measured: uint,
|
2013-07-10 18:17:41 -05:00
|
|
|
metrics: MetricMap,
|
2013-03-24 11:41:19 -05:00
|
|
|
failures: ~[TestDesc]
|
2013-02-04 18:48:52 -06:00
|
|
|
}
|
2012-03-12 19:31:03 -05:00
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
impl ConsoleTestState {
|
|
|
|
pub fn new(opts: &TestOpts) -> ConsoleTestState {
|
|
|
|
let log_out = match opts.logfile {
|
|
|
|
Some(ref path) => match io::file_writer(path,
|
|
|
|
[io::Create,
|
|
|
|
io::Truncate]) {
|
|
|
|
result::Ok(w) => Some(w),
|
|
|
|
result::Err(ref s) => {
|
|
|
|
fail!("can't open output file: %s", *s)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
None => None
|
|
|
|
};
|
|
|
|
let out = io::stdout();
|
|
|
|
let term = match term::Terminal::new(out) {
|
|
|
|
Err(_) => None,
|
|
|
|
Ok(t) => Some(t)
|
|
|
|
};
|
|
|
|
ConsoleTestState {
|
|
|
|
out: out,
|
|
|
|
log_out: log_out,
|
|
|
|
use_color: use_color(),
|
|
|
|
term: term,
|
|
|
|
total: 0u,
|
|
|
|
passed: 0u,
|
|
|
|
failed: 0u,
|
|
|
|
ignored: 0u,
|
2013-07-15 20:50:32 -05:00
|
|
|
measured: 0u,
|
2013-07-10 18:17:41 -05:00
|
|
|
metrics: MetricMap::new(),
|
2013-07-09 19:18:02 -05:00
|
|
|
failures: ~[]
|
2011-07-11 13:19:32 -05:00
|
|
|
}
|
2011-07-09 18:08:03 -05:00
|
|
|
}
|
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_ok(&self) {
|
|
|
|
self.write_pretty("ok", term::color::GREEN);
|
|
|
|
}
|
2011-07-11 13:19:32 -05:00
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_failed(&self) {
|
|
|
|
self.write_pretty("FAILED", term::color::RED);
|
2011-07-22 00:26:53 -05:00
|
|
|
}
|
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_ignored(&self) {
|
|
|
|
self.write_pretty("ignored", term::color::YELLOW);
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
2011-07-11 13:19:32 -05:00
|
|
|
|
2013-07-15 20:50:32 -05:00
|
|
|
pub fn write_metric(&self) {
|
|
|
|
self.write_pretty("metric", term::color::CYAN);
|
|
|
|
}
|
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_bench(&self) {
|
|
|
|
self.write_pretty("bench", term::color::CYAN);
|
|
|
|
}
|
2011-07-11 13:19:32 -05:00
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
pub fn write_added(&self) {
|
|
|
|
self.write_pretty("added", term::color::GREEN);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn write_improved(&self) {
|
|
|
|
self.write_pretty("improved", term::color::GREEN);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn write_removed(&self) {
|
|
|
|
self.write_pretty("removed", term::color::YELLOW);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn write_regressed(&self) {
|
|
|
|
self.write_pretty("regressed", term::color::RED);
|
|
|
|
}
|
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_pretty(&self,
|
|
|
|
word: &str,
|
|
|
|
color: term::color::Color) {
|
|
|
|
match self.term {
|
|
|
|
None => self.out.write_str(word),
|
|
|
|
Some(ref t) => {
|
|
|
|
if self.use_color {
|
|
|
|
t.fg(color);
|
|
|
|
}
|
|
|
|
self.out.write_str(word);
|
|
|
|
if self.use_color {
|
|
|
|
t.reset();
|
|
|
|
}
|
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_run_start(&mut self, len: uint) {
|
|
|
|
self.total = len;
|
|
|
|
let noun = if len != 1 { &"tests" } else { &"test" };
|
|
|
|
self.out.write_line(fmt!("\nrunning %u %s", len, noun));
|
2012-04-03 10:27:51 -05:00
|
|
|
}
|
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_test_start(&self, test: &TestDesc) {
|
|
|
|
self.out.write_str(fmt!("test %s ... ", test.name.to_str()));
|
2011-07-27 07:19:39 -05:00
|
|
|
}
|
2011-07-11 13:19:32 -05:00
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_result(&self, result: &TestResult) {
|
|
|
|
match *result {
|
|
|
|
TrOk => self.write_ok(),
|
|
|
|
TrFailed => self.write_failed(),
|
|
|
|
TrIgnored => self.write_ignored(),
|
2013-07-15 20:50:32 -05:00
|
|
|
TrMetrics(ref mm) => {
|
|
|
|
self.write_metric();
|
|
|
|
self.out.write_str(": " + fmt_metrics(mm));
|
|
|
|
}
|
2013-07-09 19:18:02 -05:00
|
|
|
TrBench(ref bs) => {
|
|
|
|
self.write_bench();
|
|
|
|
self.out.write_str(": " + fmt_bench_samples(bs))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.out.write_str(&"\n");
|
2011-07-11 13:19:32 -05:00
|
|
|
}
|
2011-07-14 13:29:54 -05:00
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_log(&self, test: &TestDesc, result: &TestResult) {
|
|
|
|
match self.log_out {
|
|
|
|
None => (),
|
|
|
|
Some(out) => {
|
|
|
|
out.write_line(fmt!("%s %s",
|
|
|
|
match *result {
|
|
|
|
TrOk => ~"ok",
|
|
|
|
TrFailed => ~"failed",
|
|
|
|
TrIgnored => ~"ignored",
|
2013-07-15 20:50:32 -05:00
|
|
|
TrMetrics(ref mm) => fmt_metrics(mm),
|
2013-07-09 19:18:02 -05:00
|
|
|
TrBench(ref bs) => fmt_bench_samples(bs)
|
|
|
|
}, test.name.to_str()));
|
|
|
|
}
|
|
|
|
}
|
2011-07-15 02:31:00 -05:00
|
|
|
}
|
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn write_failures(&self) {
|
|
|
|
self.out.write_line("\nfailures:");
|
|
|
|
let mut failures = ~[];
|
|
|
|
for self.failures.iter().advance() |f| {
|
|
|
|
failures.push(f.name.to_str());
|
|
|
|
}
|
|
|
|
sort::tim_sort(failures);
|
|
|
|
for failures.iter().advance |name| {
|
|
|
|
self.out.write_line(fmt!(" %s", name.to_str()));
|
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
pub fn write_metric_diff(&self, diff: &MetricDiff) {
|
|
|
|
let mut noise = 0;
|
|
|
|
let mut improved = 0;
|
|
|
|
let mut regressed = 0;
|
|
|
|
let mut added = 0;
|
|
|
|
let mut removed = 0;
|
|
|
|
|
|
|
|
for diff.iter().advance() |(k, v)| {
|
|
|
|
match *v {
|
|
|
|
LikelyNoise => noise += 1,
|
|
|
|
MetricAdded => {
|
|
|
|
added += 1;
|
|
|
|
self.write_added();
|
|
|
|
self.out.write_line(fmt!(": %s", *k));
|
|
|
|
}
|
|
|
|
MetricRemoved => {
|
|
|
|
removed += 1;
|
|
|
|
self.write_removed();
|
|
|
|
self.out.write_line(fmt!(": %s", *k));
|
|
|
|
}
|
|
|
|
Improvement(pct) => {
|
|
|
|
improved += 1;
|
|
|
|
self.out.write_str(*k);
|
|
|
|
self.out.write_str(": ");
|
|
|
|
self.write_improved();
|
|
|
|
self.out.write_line(fmt!(" by %.2f%%", pct as float))
|
|
|
|
}
|
|
|
|
Regression(pct) => {
|
|
|
|
regressed += 1;
|
|
|
|
self.out.write_str(*k);
|
|
|
|
self.out.write_str(": ");
|
|
|
|
self.write_regressed();
|
|
|
|
self.out.write_line(fmt!(" by %.2f%%", pct as float))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.out.write_line(fmt!("result of ratchet: %u matrics added, %u removed, \
|
|
|
|
%u improved, %u regressed, %u noise",
|
|
|
|
added, removed, improved, regressed, noise));
|
|
|
|
if regressed == 0 {
|
|
|
|
self.out.write_line("updated ratchet file")
|
|
|
|
} else {
|
|
|
|
self.out.write_line("left ratchet file untouched")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-11 17:16:11 -05:00
|
|
|
pub fn write_run_finish(&self,
|
|
|
|
ratchet_metrics: &Option<Path>,
|
|
|
|
ratchet_pct: Option<f64>) -> bool {
|
2013-07-15 20:50:32 -05:00
|
|
|
assert!(self.passed + self.failed + self.ignored + self.measured == self.total);
|
2013-07-10 18:17:41 -05:00
|
|
|
|
|
|
|
let ratchet_success = match *ratchet_metrics {
|
|
|
|
None => true,
|
|
|
|
Some(ref pth) => {
|
|
|
|
self.out.write_str(fmt!("\nusing metrics ratchet: %s\n", pth.to_str()));
|
2013-07-11 17:16:11 -05:00
|
|
|
match ratchet_pct {
|
|
|
|
None => (),
|
|
|
|
Some(pct) =>
|
|
|
|
self.out.write_str(fmt!("with noise-tolerance forced to: %f%%\n",
|
|
|
|
pct as float))
|
|
|
|
}
|
|
|
|
let (diff, ok) = self.metrics.ratchet(pth, ratchet_pct);
|
2013-07-10 18:17:41 -05:00
|
|
|
self.write_metric_diff(&diff);
|
|
|
|
ok
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let test_success = self.failed == 0u;
|
|
|
|
if !test_success {
|
2013-07-09 19:18:02 -05:00
|
|
|
self.write_failures();
|
2011-07-14 13:29:54 -05:00
|
|
|
}
|
2013-07-09 19:18:02 -05:00
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
let success = ratchet_success && test_success;
|
|
|
|
|
|
|
|
self.out.write_str("\ntest result: ");
|
2013-07-09 19:18:02 -05:00
|
|
|
if success {
|
|
|
|
// There's no parallelism at this point so it's safe to use color
|
|
|
|
self.write_ok();
|
|
|
|
} else {
|
|
|
|
self.write_failed();
|
|
|
|
}
|
2013-07-15 20:50:32 -05:00
|
|
|
self.out.write_str(fmt!(". %u passed; %u failed; %u ignored; %u measured\n\n",
|
|
|
|
self.passed, self.failed, self.ignored, self.measured));
|
2013-07-09 19:18:02 -05:00
|
|
|
return success;
|
2011-07-14 13:29:54 -05:00
|
|
|
}
|
2011-07-09 18:08:03 -05:00
|
|
|
}
|
|
|
|
|
2013-07-15 20:50:32 -05:00
|
|
|
pub fn fmt_metrics(mm: &MetricMap) -> ~str {
|
|
|
|
use std::iterator::IteratorUtil;
|
|
|
|
let v : ~[~str] = mm.iter()
|
|
|
|
.transform(|(k,v)| fmt!("%s: %f (+/- %f)",
|
|
|
|
*k,
|
|
|
|
v.value as float,
|
|
|
|
v.noise as float))
|
|
|
|
.collect();
|
|
|
|
v.connect(", ")
|
|
|
|
}
|
|
|
|
|
2013-07-09 19:18:02 -05:00
|
|
|
pub fn fmt_bench_samples(bs: &BenchSamples) -> ~str {
|
|
|
|
if bs.mb_s != 0 {
|
|
|
|
fmt!("%u ns/iter (+/- %u) = %u MB/s",
|
|
|
|
bs.ns_iter_summ.median as uint,
|
|
|
|
(bs.ns_iter_summ.max - bs.ns_iter_summ.min) as uint,
|
|
|
|
bs.mb_s)
|
|
|
|
} else {
|
|
|
|
fmt!("%u ns/iter (+/- %u)",
|
|
|
|
bs.ns_iter_summ.median as uint,
|
|
|
|
(bs.ns_iter_summ.max - bs.ns_iter_summ.min) as uint)
|
2013-03-16 13:11:31 -05:00
|
|
|
}
|
2013-07-09 19:18:02 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// A simple console test runner
|
|
|
|
pub fn run_tests_console(opts: &TestOpts,
|
|
|
|
tests: ~[TestDescAndFn]) -> bool {
|
|
|
|
fn callback(event: &TestEvent, st: &mut ConsoleTestState) {
|
|
|
|
debug!("callback(event=%?)", event);
|
2013-07-02 14:47:32 -05:00
|
|
|
match (*event).clone() {
|
2013-07-09 19:18:02 -05:00
|
|
|
TeFiltered(ref filtered_tests) => st.write_run_start(filtered_tests.len()),
|
|
|
|
TeWait(ref test) => st.write_test_start(test),
|
|
|
|
TeResult(test, result) => {
|
|
|
|
st.write_log(&test, &result);
|
|
|
|
st.write_result(&result);
|
|
|
|
match result {
|
|
|
|
TrOk => st.passed += 1,
|
|
|
|
TrIgnored => st.ignored += 1,
|
2013-07-15 20:50:32 -05:00
|
|
|
TrMetrics(mm) => {
|
|
|
|
let tname = test.name.to_str();
|
|
|
|
for mm.iter().advance() |(k,v)| {
|
|
|
|
st.metrics.insert_metric(tname + "." + *k,
|
|
|
|
v.value, v.noise);
|
|
|
|
}
|
|
|
|
st.measured += 1
|
|
|
|
}
|
2013-07-10 18:17:41 -05:00
|
|
|
TrBench(bs) => {
|
|
|
|
st.metrics.insert_metric(test.name.to_str(),
|
|
|
|
bs.ns_iter_summ.median,
|
|
|
|
bs.ns_iter_summ.max - bs.ns_iter_summ.min);
|
2013-07-15 20:50:32 -05:00
|
|
|
st.measured += 1
|
2013-07-10 18:17:41 -05:00
|
|
|
}
|
2013-07-09 19:18:02 -05:00
|
|
|
TrFailed => {
|
|
|
|
st.failed += 1;
|
|
|
|
st.failures.push(test);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2012-03-12 19:31:03 -05:00
|
|
|
}
|
2013-07-09 19:18:02 -05:00
|
|
|
let st = @mut ConsoleTestState::new(opts);
|
|
|
|
run_tests(opts, tests, |x| callback(&x, st));
|
2013-07-10 18:17:41 -05:00
|
|
|
match opts.save_metrics {
|
|
|
|
None => (),
|
|
|
|
Some(ref pth) => {
|
|
|
|
st.metrics.save(pth);
|
|
|
|
st.out.write_str(fmt!("\nmetrics saved to: %s", pth.to_str()));
|
|
|
|
}
|
|
|
|
}
|
2013-07-11 17:16:11 -05:00
|
|
|
return st.write_run_finish(&opts.ratchet_metrics, opts.ratchet_noise_percent);
|
2012-03-12 19:31:03 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn should_sort_failures_before_printing_them() {
|
2013-01-31 19:12:29 -06:00
|
|
|
fn dummy() {}
|
|
|
|
|
2012-09-14 11:40:28 -05:00
|
|
|
let s = do io::with_str_writer |wr| {
|
2013-01-08 16:00:45 -06:00
|
|
|
let test_a = TestDesc {
|
2013-02-13 13:46:14 -06:00
|
|
|
name: StaticTestName("a"),
|
2012-09-14 11:40:28 -05:00
|
|
|
ignore: false,
|
|
|
|
should_fail: false
|
|
|
|
};
|
2012-03-12 19:31:03 -05:00
|
|
|
|
2013-01-08 16:00:45 -06:00
|
|
|
let test_b = TestDesc {
|
2013-02-13 13:46:14 -06:00
|
|
|
name: StaticTestName("b"),
|
2012-09-14 11:40:28 -05:00
|
|
|
ignore: false,
|
|
|
|
should_fail: false
|
|
|
|
};
|
2012-03-12 19:31:03 -05:00
|
|
|
|
2013-02-13 13:46:14 -06:00
|
|
|
let st = @ConsoleTestState {
|
|
|
|
out: wr,
|
2013-07-11 17:16:11 -05:00
|
|
|
log_out: None,
|
|
|
|
term: None,
|
2013-02-13 13:46:14 -06:00
|
|
|
use_color: false,
|
2013-03-24 11:41:19 -05:00
|
|
|
total: 0u,
|
|
|
|
passed: 0u,
|
|
|
|
failed: 0u,
|
|
|
|
ignored: 0u,
|
2013-07-15 20:50:32 -05:00
|
|
|
measured: 0u,
|
2013-07-11 17:16:11 -05:00
|
|
|
metrics: MetricMap::new(),
|
2013-03-24 11:41:19 -05:00
|
|
|
failures: ~[test_b, test_a]
|
2013-02-13 13:46:14 -06:00
|
|
|
};
|
2012-03-12 19:31:03 -05:00
|
|
|
|
2013-07-11 17:16:11 -05:00
|
|
|
st.write_failures();
|
2012-09-14 11:40:28 -05:00
|
|
|
};
|
2012-03-12 19:31:03 -05:00
|
|
|
|
2013-06-10 01:23:05 -05:00
|
|
|
let apos = s.find_str("a").get();
|
|
|
|
let bpos = s.find_str("b").get();
|
2013-03-28 20:39:09 -05:00
|
|
|
assert!(apos < bpos);
|
2012-03-12 19:31:03 -05:00
|
|
|
}
|
|
|
|
|
2013-02-04 18:48:52 -06:00
|
|
|
fn use_color() -> bool { return get_concurrency() == 1; }
|
2012-04-13 05:34:41 -05:00
|
|
|
|
2013-07-02 14:47:32 -05:00
|
|
|
#[deriving(Clone)]
|
2012-09-04 20:05:57 -05:00
|
|
|
enum TestEvent {
|
|
|
|
TeFiltered(~[TestDesc]),
|
|
|
|
TeWait(TestDesc),
|
|
|
|
TeResult(TestDesc, TestResult),
|
2011-07-29 21:54:05 -05:00
|
|
|
}
|
|
|
|
|
2012-09-04 20:05:57 -05:00
|
|
|
type MonitorMsg = (TestDesc, TestResult);
|
2012-01-19 16:36:11 -06:00
|
|
|
|
2013-01-08 21:37:25 -06:00
|
|
|
fn run_tests(opts: &TestOpts,
|
2013-01-31 19:12:29 -06:00
|
|
|
tests: ~[TestDescAndFn],
|
2013-06-19 21:06:50 -05:00
|
|
|
callback: &fn(e: TestEvent)) {
|
2013-01-31 19:12:29 -06:00
|
|
|
|
2013-05-09 15:27:24 -05:00
|
|
|
let filtered_tests = filter_tests(opts, tests);
|
2013-07-02 14:47:32 -05:00
|
|
|
let filtered_descs = filtered_tests.map(|t| t.desc.clone());
|
2013-05-09 15:27:24 -05:00
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
callback(TeFiltered(filtered_descs));
|
2011-07-29 21:54:05 -05:00
|
|
|
|
2013-07-15 20:50:32 -05:00
|
|
|
let (filtered_tests, filtered_benchs_and_metrics) =
|
2013-06-27 09:10:18 -05:00
|
|
|
do filtered_tests.partition |e| {
|
2013-02-13 13:46:14 -06:00
|
|
|
match e.testfn {
|
|
|
|
StaticTestFn(_) | DynTestFn(_) => true,
|
2013-07-15 20:50:32 -05:00
|
|
|
_ => false
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2012-01-19 15:44:07 -06:00
|
|
|
// It's tempting to just spawn all the tests at once, but since we have
|
|
|
|
// many tests that run in other processes we would be making a big mess.
|
2011-07-29 21:54:05 -05:00
|
|
|
let concurrency = get_concurrency();
|
2012-08-22 19:24:52 -05:00
|
|
|
debug!("using %u test tasks", concurrency);
|
2012-01-19 16:36:11 -06:00
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
let mut remaining = filtered_tests;
|
2013-06-28 11:54:03 -05:00
|
|
|
remaining.reverse();
|
2013-01-31 19:12:29 -06:00
|
|
|
let mut pending = 0;
|
2011-07-29 21:54:05 -05:00
|
|
|
|
2013-01-25 02:52:50 -06:00
|
|
|
let (p, ch) = stream();
|
2013-04-17 01:45:29 -05:00
|
|
|
let ch = SharedChan::new(ch);
|
2012-01-19 16:36:11 -06:00
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
while pending > 0 || !remaining.is_empty() {
|
|
|
|
while pending < concurrency && !remaining.is_empty() {
|
|
|
|
let test = remaining.pop();
|
2012-09-10 19:50:48 -05:00
|
|
|
if concurrency == 1 {
|
2012-02-18 18:30:07 -06:00
|
|
|
// We are doing one test at a time so we can print the name
|
|
|
|
// of the test before we run it. Useful for debugging tests
|
|
|
|
// that hang forever.
|
2013-07-02 14:47:32 -05:00
|
|
|
callback(TeWait(test.desc.clone()));
|
2012-02-18 18:30:07 -06:00
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
run_test(!opts.run_tests, test, ch.clone());
|
2013-01-31 19:12:29 -06:00
|
|
|
pending += 1;
|
2011-07-29 21:54:05 -05:00
|
|
|
}
|
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
let (desc, result) = p.recv();
|
2012-09-10 19:50:48 -05:00
|
|
|
if concurrency != 1 {
|
2013-07-02 14:47:32 -05:00
|
|
|
callback(TeWait(desc.clone()));
|
2012-02-18 18:30:07 -06:00
|
|
|
}
|
2013-01-31 19:12:29 -06:00
|
|
|
callback(TeResult(desc, result));
|
|
|
|
pending -= 1;
|
2011-07-29 21:54:05 -05:00
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
|
|
|
|
// All benchmarks run at the end, in serial.
|
2013-07-15 20:50:32 -05:00
|
|
|
// (this includes metric fns)
|
|
|
|
for filtered_benchs_and_metrics.consume_iter().advance |b| {
|
2013-07-02 14:47:32 -05:00
|
|
|
callback(TeWait(b.desc.clone()));
|
2013-02-13 13:46:14 -06:00
|
|
|
run_test(!opts.run_benchmarks, b, ch.clone());
|
|
|
|
let (test, result) = p.recv();
|
2013-02-15 01:30:30 -06:00
|
|
|
callback(TeResult(test, result));
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
2011-07-29 21:54:05 -05:00
|
|
|
}
|
|
|
|
|
2012-02-07 20:55:02 -06:00
|
|
|
// Windows tends to dislike being overloaded with threads.
|
2012-06-07 23:38:25 -05:00
|
|
|
#[cfg(windows)]
|
2013-06-30 22:51:13 -05:00
|
|
|
static SCHED_OVERCOMMIT : uint = 1;
|
2012-02-07 20:55:02 -06:00
|
|
|
|
2012-06-07 23:38:25 -05:00
|
|
|
#[cfg(unix)]
|
2013-06-30 22:51:13 -05:00
|
|
|
static SCHED_OVERCOMMIT : uint = 4u;
|
2012-02-07 20:55:02 -06:00
|
|
|
|
2012-01-19 16:43:56 -06:00
|
|
|
fn get_concurrency() -> uint {
|
2013-07-02 19:36:58 -05:00
|
|
|
use std::rt;
|
2013-06-23 20:22:57 -05:00
|
|
|
let threads = rt::util::default_sched_threads();
|
|
|
|
if threads == 1 { 1 }
|
2013-07-08 18:20:38 -05:00
|
|
|
else { threads * SCHED_OVERCOMMIT }
|
2012-01-19 16:43:56 -06:00
|
|
|
}
|
2011-07-25 17:21:36 -05:00
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
pub fn filter_tests(
|
|
|
|
opts: &TestOpts,
|
|
|
|
tests: ~[TestDescAndFn]) -> ~[TestDescAndFn]
|
|
|
|
{
|
|
|
|
let mut filtered = tests;
|
2011-07-11 18:33:21 -05:00
|
|
|
|
2011-07-16 19:04:20 -05:00
|
|
|
// Remove tests that don't match the test filter
|
2012-09-21 21:37:57 -05:00
|
|
|
filtered = if opts.filter.is_none() {
|
2013-02-15 01:30:30 -06:00
|
|
|
filtered
|
2011-10-13 18:42:43 -05:00
|
|
|
} else {
|
2013-05-29 18:59:33 -05:00
|
|
|
let filter_str = match opts.filter {
|
2013-07-02 14:47:32 -05:00
|
|
|
Some(ref f) => (*f).clone(),
|
2013-07-11 17:16:11 -05:00
|
|
|
None => ~""
|
2011-07-27 07:19:39 -05:00
|
|
|
};
|
2011-07-14 18:05:33 -05:00
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
fn filter_fn(test: TestDescAndFn, filter_str: &str) ->
|
|
|
|
Option<TestDescAndFn> {
|
2013-06-10 02:32:36 -05:00
|
|
|
if test.desc.name.to_str().contains(filter_str) {
|
2013-07-11 17:16:11 -05:00
|
|
|
return Some(test);
|
|
|
|
} else {
|
|
|
|
return None;
|
|
|
|
}
|
2011-10-13 18:42:43 -05:00
|
|
|
}
|
|
|
|
|
2013-07-01 21:38:19 -05:00
|
|
|
filtered.consume_iter().filter_map(|x| filter_fn(x, filter_str)).collect()
|
2011-10-13 18:42:43 -05:00
|
|
|
};
|
|
|
|
|
2011-07-16 19:04:20 -05:00
|
|
|
// Maybe pull out the ignored test and unignore them
|
2011-10-13 18:42:43 -05:00
|
|
|
filtered = if !opts.run_ignored {
|
2013-02-15 01:30:30 -06:00
|
|
|
filtered
|
2011-10-13 18:42:43 -05:00
|
|
|
} else {
|
2013-01-31 19:12:29 -06:00
|
|
|
fn filter(test: TestDescAndFn) -> Option<TestDescAndFn> {
|
|
|
|
if test.desc.ignore {
|
|
|
|
let TestDescAndFn {desc, testfn} = test;
|
|
|
|
Some(TestDescAndFn {
|
|
|
|
desc: TestDesc {ignore: false, ..desc},
|
|
|
|
testfn: testfn
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2011-07-14 18:05:33 -05:00
|
|
|
};
|
2013-07-01 21:38:19 -05:00
|
|
|
filtered.consume_iter().filter_map(|x| filter(x)).collect()
|
2011-10-13 18:42:43 -05:00
|
|
|
};
|
|
|
|
|
2011-07-16 19:04:20 -05:00
|
|
|
// Sort the tests alphabetically
|
2013-03-21 23:34:30 -05:00
|
|
|
fn lteq(t1: &TestDescAndFn, t2: &TestDescAndFn) -> bool {
|
2013-06-13 22:37:47 -05:00
|
|
|
t1.desc.name.to_str() < t2.desc.name.to_str()
|
2013-01-31 19:12:29 -06:00
|
|
|
}
|
|
|
|
sort::quick_sort(filtered, lteq);
|
2011-07-11 18:33:21 -05:00
|
|
|
|
2013-02-15 01:30:30 -06:00
|
|
|
filtered
|
2011-07-11 18:33:21 -05:00
|
|
|
}
|
2011-07-09 18:08:03 -05:00
|
|
|
|
2013-01-22 10:44:24 -06:00
|
|
|
struct TestFuture {
|
|
|
|
test: TestDesc,
|
2013-03-01 16:15:15 -06:00
|
|
|
wait: @fn() -> TestResult,
|
2013-01-22 10:44:24 -06:00
|
|
|
}
|
2011-07-25 17:21:36 -05:00
|
|
|
|
2013-02-13 13:46:14 -06:00
|
|
|
pub fn run_test(force_ignore: bool,
|
|
|
|
test: TestDescAndFn,
|
|
|
|
monitor_ch: SharedChan<MonitorMsg>) {
|
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
let TestDescAndFn {desc, testfn} = test;
|
|
|
|
|
2013-02-13 13:46:14 -06:00
|
|
|
if force_ignore || desc.ignore {
|
2013-01-31 19:12:29 -06:00
|
|
|
monitor_ch.send((desc, TrIgnored));
|
2012-08-01 19:30:05 -05:00
|
|
|
return;
|
2011-11-01 12:31:23 -05:00
|
|
|
}
|
|
|
|
|
2013-02-13 13:46:14 -06:00
|
|
|
fn run_test_inner(desc: TestDesc,
|
|
|
|
monitor_ch: SharedChan<MonitorMsg>,
|
|
|
|
testfn: ~fn()) {
|
2013-06-28 17:32:26 -05:00
|
|
|
let testfn_cell = ::std::cell::Cell::new(testfn);
|
2013-02-13 13:46:14 -06:00
|
|
|
do task::spawn {
|
|
|
|
let mut result_future = None; // task::future_result(builder);
|
2013-05-03 15:21:33 -05:00
|
|
|
|
|
|
|
let mut task = task::task();
|
|
|
|
task.unlinked();
|
2013-05-07 16:20:56 -05:00
|
|
|
task.future_result(|r| { result_future = Some(r) });
|
2013-05-03 15:21:33 -05:00
|
|
|
task.spawn(testfn_cell.take());
|
|
|
|
|
2013-03-16 14:49:12 -05:00
|
|
|
let task_result = result_future.unwrap().recv();
|
2013-02-13 13:46:14 -06:00
|
|
|
let test_result = calc_result(&desc,
|
|
|
|
task_result == task::Success);
|
2013-07-02 14:47:32 -05:00
|
|
|
monitor_ch.send((desc.clone(), test_result));
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
match testfn {
|
|
|
|
DynBenchFn(benchfn) => {
|
|
|
|
let bs = ::test::bench::benchmark(benchfn);
|
|
|
|
monitor_ch.send((desc, TrBench(bs)));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
StaticBenchFn(benchfn) => {
|
|
|
|
let bs = ::test::bench::benchmark(benchfn);
|
|
|
|
monitor_ch.send((desc, TrBench(bs)));
|
|
|
|
return;
|
|
|
|
}
|
2013-07-15 20:50:32 -05:00
|
|
|
DynMetricFn(f) => {
|
|
|
|
let mut mm = MetricMap::new();
|
|
|
|
f(&mut mm);
|
|
|
|
monitor_ch.send((desc, TrMetrics(mm)));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
StaticMetricFn(f) => {
|
|
|
|
let mut mm = MetricMap::new();
|
|
|
|
f(&mut mm);
|
|
|
|
monitor_ch.send((desc, TrMetrics(mm)));
|
|
|
|
return;
|
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
DynTestFn(f) => run_test_inner(desc, monitor_ch, f),
|
|
|
|
StaticTestFn(f) => run_test_inner(desc, monitor_ch, || f())
|
|
|
|
}
|
2011-07-14 13:29:54 -05:00
|
|
|
}
|
|
|
|
|
2013-01-31 19:12:29 -06:00
|
|
|
fn calc_result(desc: &TestDesc, task_succeeded: bool) -> TestResult {
|
2012-01-19 16:36:11 -06:00
|
|
|
if task_succeeded {
|
2013-01-31 19:12:29 -06:00
|
|
|
if desc.should_fail { TrFailed }
|
2012-09-04 20:05:57 -05:00
|
|
|
else { TrOk }
|
2012-01-19 16:36:11 -06:00
|
|
|
} else {
|
2013-01-31 19:12:29 -06:00
|
|
|
if desc.should_fail { TrOk }
|
2012-09-04 20:05:57 -05:00
|
|
|
else { TrFailed }
|
2012-01-19 16:36:11 -06:00
|
|
|
}
|
2011-07-15 00:24:19 -05:00
|
|
|
}
|
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
|
|
|
|
impl ToJson for Metric {
|
|
|
|
fn to_json(&self) -> json::Json {
|
2013-07-18 14:37:40 -05:00
|
|
|
let mut map = ~TreeMap::new();
|
2013-07-10 18:17:41 -05:00
|
|
|
map.insert(~"value", json::Number(self.value as float));
|
|
|
|
map.insert(~"noise", json::Number(self.noise as float));
|
|
|
|
json::Object(map)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MetricMap {
|
|
|
|
|
2013-07-15 20:50:32 -05:00
|
|
|
pub fn new() -> MetricMap {
|
2013-07-10 18:17:41 -05:00
|
|
|
MetricMap(TreeMap::new())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Load MetricDiff from a file.
|
2013-07-15 20:50:32 -05:00
|
|
|
pub fn load(p: &Path) -> MetricMap {
|
2013-07-10 18:17:41 -05:00
|
|
|
assert!(os::path_exists(p));
|
2013-07-02 14:47:32 -05:00
|
|
|
let f = io::file_reader(p).unwrap();
|
2013-07-10 18:17:41 -05:00
|
|
|
let mut decoder = json::Decoder(json::from_reader(f).get());
|
|
|
|
MetricMap(Decodable::decode(&mut decoder))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Write MetricDiff to a file.
|
|
|
|
pub fn save(&self, p: &Path) {
|
2013-07-02 14:47:32 -05:00
|
|
|
let f = io::file_writer(p, [io::Create, io::Truncate]).unwrap();
|
2013-07-10 18:17:41 -05:00
|
|
|
json::to_pretty_writer(f, &self.to_json());
|
|
|
|
}
|
|
|
|
|
2013-07-11 19:05:23 -05:00
|
|
|
/// Compare against another MetricMap. Optionally compare all
|
|
|
|
/// measurements in the maps using the provided `noise_pct` as a
|
|
|
|
/// percentage of each value to consider noise. If `None`, each
|
|
|
|
/// measurement's noise threshold is independently chosen as the
|
|
|
|
/// maximum of that measurement's recorded noise quantity in either
|
|
|
|
/// map.
|
|
|
|
pub fn compare_to_old(&self, old: &MetricMap,
|
2013-07-11 17:16:11 -05:00
|
|
|
noise_pct: Option<f64>) -> MetricDiff {
|
2013-07-10 18:17:41 -05:00
|
|
|
let mut diff : MetricDiff = TreeMap::new();
|
|
|
|
for old.iter().advance |(k, vold)| {
|
|
|
|
let r = match self.find(k) {
|
|
|
|
None => MetricRemoved,
|
|
|
|
Some(v) => {
|
|
|
|
let delta = (v.value - vold.value);
|
2013-07-11 17:16:11 -05:00
|
|
|
let noise = match noise_pct {
|
|
|
|
None => f64::max(vold.noise.abs(), v.noise.abs()),
|
|
|
|
Some(pct) => vold.value * pct / 100.0
|
|
|
|
};
|
2013-07-15 20:50:32 -05:00
|
|
|
if delta.abs() <= noise {
|
2013-07-10 18:17:41 -05:00
|
|
|
LikelyNoise
|
|
|
|
} else {
|
2013-07-17 14:28:48 -05:00
|
|
|
let pct = delta.abs() / (vold.value).max(&f64::epsilon) * 100.0;
|
2013-07-10 18:17:41 -05:00
|
|
|
if vold.noise < 0.0 {
|
|
|
|
// When 'noise' is negative, it means we want
|
|
|
|
// to see deltas that go up over time, and can
|
|
|
|
// only tolerate slight negative movement.
|
|
|
|
if delta < 0.0 {
|
|
|
|
Regression(pct)
|
|
|
|
} else {
|
|
|
|
Improvement(pct)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// When 'noise' is positive, it means we want
|
|
|
|
// to see deltas that go down over time, and
|
|
|
|
// can only tolerate slight positive movements.
|
|
|
|
if delta < 0.0 {
|
|
|
|
Improvement(pct)
|
|
|
|
} else {
|
|
|
|
Regression(pct)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2013-07-02 14:47:32 -05:00
|
|
|
diff.insert((*k).clone(), r);
|
2013-07-10 18:17:41 -05:00
|
|
|
}
|
|
|
|
for self.iter().advance |(k, _)| {
|
|
|
|
if !diff.contains_key(k) {
|
2013-07-02 14:47:32 -05:00
|
|
|
diff.insert((*k).clone(), MetricAdded);
|
2013-07-10 18:17:41 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
diff
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Insert a named `value` (+/- `noise`) metric into the map. The value
|
|
|
|
/// must be non-negative. The `noise` indicates the uncertainty of the
|
|
|
|
/// metric, which doubles as the "noise range" of acceptable
|
|
|
|
/// pairwise-regressions on this named value, when comparing from one
|
|
|
|
/// metric to the next using `compare_to_old`.
|
|
|
|
///
|
|
|
|
/// If `noise` is positive, then it means this metric is of a value
|
|
|
|
/// you want to see grow smaller, so a change larger than `noise` in the
|
|
|
|
/// positive direction represents a regression.
|
|
|
|
///
|
|
|
|
/// If `noise` is negative, then it means this metric is of a value
|
|
|
|
/// you want to see grow larger, so a change larger than `noise` in the
|
|
|
|
/// negative direction represents a regression.
|
|
|
|
pub fn insert_metric(&mut self, name: &str, value: f64, noise: f64) {
|
|
|
|
let m = Metric {
|
|
|
|
value: value,
|
|
|
|
noise: noise
|
|
|
|
};
|
|
|
|
self.insert(name.to_owned(), m);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Attempt to "ratchet" an external metric file. This involves loading
|
|
|
|
/// metrics from a metric file (if it exists), comparing against
|
|
|
|
/// the metrics in `self` using `compare_to_old`, and rewriting the
|
|
|
|
/// file to contain the metrics in `self` if none of the
|
|
|
|
/// `MetricChange`s are `Regression`. Returns the diff as well
|
|
|
|
/// as a boolean indicating whether the ratchet succeeded.
|
2013-07-11 17:16:11 -05:00
|
|
|
pub fn ratchet(&self, p: &Path, pct: Option<f64>) -> (MetricDiff, bool) {
|
2013-07-10 18:17:41 -05:00
|
|
|
let old = if os::path_exists(p) {
|
|
|
|
MetricMap::load(p)
|
|
|
|
} else {
|
|
|
|
MetricMap::new()
|
|
|
|
};
|
|
|
|
|
2013-07-11 19:05:23 -05:00
|
|
|
let diff : MetricDiff = self.compare_to_old(&old, pct);
|
2013-07-10 18:17:41 -05:00
|
|
|
let ok = do diff.iter().all() |(_, v)| {
|
|
|
|
match *v {
|
|
|
|
Regression(_) => false,
|
|
|
|
_ => true
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if ok {
|
|
|
|
debug!("rewriting file '%s' with updated metrics");
|
|
|
|
self.save(p);
|
|
|
|
}
|
|
|
|
return (diff, ok)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Benchmarking
|
|
|
|
|
2013-06-14 20:21:47 -05:00
|
|
|
impl BenchHarness {
|
|
|
|
/// Callback for benchmark functions to run in their body.
|
|
|
|
pub fn iter(&mut self, inner:&fn()) {
|
|
|
|
self.ns_start = precise_time_ns();
|
|
|
|
let k = self.iterations;
|
|
|
|
for u64::range(0, k) |_| {
|
|
|
|
inner();
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
2013-06-14 20:21:47 -05:00
|
|
|
self.ns_end = precise_time_ns();
|
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-06-14 20:21:47 -05:00
|
|
|
pub fn ns_elapsed(&mut self) -> u64 {
|
|
|
|
if self.ns_start == 0 || self.ns_end == 0 {
|
|
|
|
0
|
|
|
|
} else {
|
|
|
|
self.ns_end - self.ns_start
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
2013-06-14 20:21:47 -05:00
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-06-14 20:21:47 -05:00
|
|
|
pub fn ns_per_iter(&mut self) -> u64 {
|
|
|
|
if self.iterations == 0 {
|
|
|
|
0
|
|
|
|
} else {
|
2013-07-17 14:28:48 -05:00
|
|
|
self.ns_elapsed() / self.iterations.max(&1)
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
2013-06-14 20:21:47 -05:00
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-06-14 20:21:47 -05:00
|
|
|
pub fn bench_n(&mut self, n: u64, f: &fn(&mut BenchHarness)) {
|
|
|
|
self.iterations = n;
|
|
|
|
debug!("running benchmark for %u iterations",
|
|
|
|
n as uint);
|
|
|
|
f(self);
|
|
|
|
}
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
// This is a more statistics-driven benchmark algorithm
|
|
|
|
pub fn auto_bench(&mut self, f: &fn(&mut BenchHarness)) -> stats::Summary {
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-06-14 20:21:47 -05:00
|
|
|
// Initial bench run to get ballpark figure.
|
|
|
|
let mut n = 1_u64;
|
2013-06-21 19:08:35 -05:00
|
|
|
self.bench_n(n, |x| f(x));
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
// Try to estimate iter count for 1ms falling back to 1m
|
|
|
|
// iterations if first run took < 1ns.
|
|
|
|
if self.ns_per_iter() == 0 {
|
|
|
|
n = 1_000_000;
|
|
|
|
} else {
|
2013-07-17 14:28:48 -05:00
|
|
|
n = 1_000_000 / self.ns_per_iter().max(&1);
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
let mut total_run = 0;
|
|
|
|
let samples : &mut [f64] = [0.0_f64, ..50];
|
2013-06-14 20:21:47 -05:00
|
|
|
loop {
|
2013-07-07 17:43:31 -05:00
|
|
|
let loop_start = precise_time_ns();
|
2013-06-14 20:21:47 -05:00
|
|
|
|
2013-07-07 17:43:31 -05:00
|
|
|
for samples.mut_iter().advance() |p| {
|
2013-07-10 18:17:41 -05:00
|
|
|
self.bench_n(n as u64, |x| f(x));
|
2013-07-07 17:43:31 -05:00
|
|
|
*p = self.ns_per_iter() as f64;
|
2013-06-14 20:21:47 -05:00
|
|
|
};
|
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
stats::winsorize(samples, 5.0);
|
2013-07-07 17:43:31 -05:00
|
|
|
let summ = stats::Summary::new(samples);
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
for samples.mut_iter().advance() |p| {
|
|
|
|
self.bench_n(5 * n as u64, |x| f(x));
|
|
|
|
*p = self.ns_per_iter() as f64;
|
|
|
|
};
|
|
|
|
|
|
|
|
stats::winsorize(samples, 5.0);
|
|
|
|
let summ5 = stats::Summary::new(samples);
|
|
|
|
|
2013-07-07 17:43:31 -05:00
|
|
|
debug!("%u samples, median %f, MAD=%f, MADP=%f",
|
|
|
|
samples.len(),
|
|
|
|
summ.median as float,
|
|
|
|
summ.median_abs_dev as float,
|
|
|
|
summ.median_abs_dev_pct as float);
|
|
|
|
|
|
|
|
let now = precise_time_ns();
|
|
|
|
let loop_run = now - loop_start;
|
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
// If we've run for 100ms an seem to have converged to a
|
|
|
|
// stable median.
|
|
|
|
if loop_run > 100_000_000 &&
|
|
|
|
summ.median_abs_dev_pct < 1.0 &&
|
|
|
|
summ.median - summ5.median < summ5.median_abs_dev {
|
|
|
|
return summ5;
|
2013-07-07 17:43:31 -05:00
|
|
|
}
|
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
total_run += loop_run;
|
2013-07-15 22:34:11 -05:00
|
|
|
// Longest we ever run for is 3s.
|
|
|
|
if total_run > 3_000_000_000 {
|
2013-07-10 18:17:41 -05:00
|
|
|
return summ5;
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
2013-06-14 20:21:47 -05:00
|
|
|
|
2013-07-10 18:17:41 -05:00
|
|
|
n *= 2;
|
2013-02-13 13:46:14 -06:00
|
|
|
}
|
|
|
|
}
|
2013-07-10 18:17:41 -05:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2013-06-14 20:21:47 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub mod bench {
|
|
|
|
use test::{BenchHarness, BenchSamples};
|
2013-02-13 13:46:14 -06:00
|
|
|
|
|
|
|
pub fn benchmark(f: &fn(&mut BenchHarness)) -> BenchSamples {
|
|
|
|
|
|
|
|
let mut bs = BenchHarness {
|
|
|
|
iterations: 0,
|
|
|
|
ns_start: 0,
|
|
|
|
ns_end: 0,
|
|
|
|
bytes: 0
|
|
|
|
};
|
|
|
|
|
2013-07-07 17:43:31 -05:00
|
|
|
let ns_iter_summ = bs.auto_bench(f);
|
2013-02-13 13:46:14 -06:00
|
|
|
|
2013-07-17 14:28:48 -05:00
|
|
|
let ns_iter = (ns_iter_summ.median as u64).max(&1);
|
|
|
|
let iter_s = 1_000_000_000 / ns_iter;
|
2013-02-13 13:46:14 -06:00
|
|
|
let mb_s = (bs.bytes * iter_s) / 1_000_000;
|
|
|
|
|
|
|
|
BenchSamples {
|
2013-07-07 17:43:31 -05:00
|
|
|
ns_iter_summ: ns_iter_summ,
|
2013-02-13 13:46:14 -06:00
|
|
|
mb_s: mb_s as uint
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-17 21:05:07 -06:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2013-01-31 19:12:29 -06:00
|
|
|
use test::{TrFailed, TrIgnored, TrOk, filter_tests, parse_opts,
|
2013-02-13 13:46:14 -06:00
|
|
|
TestDesc, TestDescAndFn,
|
2013-07-11 19:05:23 -05:00
|
|
|
Metric, MetricMap, MetricAdded, MetricRemoved,
|
|
|
|
Improvement, Regression, LikelyNoise,
|
2013-02-13 13:46:14 -06:00
|
|
|
StaticTestName, DynTestName, DynTestFn};
|
2013-01-22 10:44:24 -06:00
|
|
|
use test::{TestOpts, run_test};
|
2013-01-08 21:37:25 -06:00
|
|
|
|
2013-06-28 17:32:26 -05:00
|
|
|
use std::either;
|
|
|
|
use std::comm::{stream, SharedChan};
|
|
|
|
use std::vec;
|
2013-07-11 19:05:23 -05:00
|
|
|
use tempfile;
|
|
|
|
use std::os;
|
2012-12-27 20:24:18 -06:00
|
|
|
|
2012-01-17 21:05:07 -06:00
|
|
|
#[test]
|
2013-01-29 14:06:09 -06:00
|
|
|
pub fn do_not_run_ignored_tests() {
|
2013-02-11 21:26:38 -06:00
|
|
|
fn f() { fail!(); }
|
2013-01-31 19:12:29 -06:00
|
|
|
let desc = TestDescAndFn {
|
|
|
|
desc: TestDesc {
|
2013-02-13 13:46:14 -06:00
|
|
|
name: StaticTestName("whatever"),
|
2013-01-31 19:12:29 -06:00
|
|
|
ignore: true,
|
|
|
|
should_fail: false
|
|
|
|
},
|
2013-03-01 16:15:15 -06:00
|
|
|
testfn: DynTestFn(|| f()),
|
2012-01-17 21:05:07 -06:00
|
|
|
};
|
2013-01-25 02:52:50 -06:00
|
|
|
let (p, ch) = stream();
|
2013-04-17 01:45:29 -05:00
|
|
|
let ch = SharedChan::new(ch);
|
2013-02-13 13:46:14 -06:00
|
|
|
run_test(false, desc, ch);
|
2013-01-25 02:52:50 -06:00
|
|
|
let (_, res) = p.recv();
|
2013-03-28 20:39:09 -05:00
|
|
|
assert!(res != TrOk);
|
2012-01-17 21:05:07 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2013-01-29 14:06:09 -06:00
|
|
|
pub fn ignored_tests_result_in_ignored() {
|
2012-01-17 21:05:07 -06:00
|
|
|
fn f() { }
|
2013-01-31 19:12:29 -06:00
|
|
|
let desc = TestDescAndFn {
|
|
|
|
desc: TestDesc {
|
2013-02-13 13:46:14 -06:00
|
|
|
name: StaticTestName("whatever"),
|
2013-01-31 19:12:29 -06:00
|
|
|
ignore: true,
|
|
|
|
should_fail: false
|
|
|
|
},
|
2013-03-01 16:15:15 -06:00
|
|
|
testfn: DynTestFn(|| f()),
|
2012-01-17 21:05:07 -06:00
|
|
|
};
|
2013-01-25 02:52:50 -06:00
|
|
|
let (p, ch) = stream();
|
2013-04-17 01:45:29 -05:00
|
|
|
let ch = SharedChan::new(ch);
|
2013-02-13 13:46:14 -06:00
|
|
|
run_test(false, desc, ch);
|
2013-01-25 02:52:50 -06:00
|
|
|
let (_, res) = p.recv();
|
2013-05-18 21:02:45 -05:00
|
|
|
assert_eq!(res, TrIgnored);
|
2012-01-17 21:05:07 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2012-06-07 23:38:25 -05:00
|
|
|
#[ignore(cfg(windows))]
|
2013-04-15 10:08:52 -05:00
|
|
|
fn test_should_fail() {
|
2013-02-11 21:26:38 -06:00
|
|
|
fn f() { fail!(); }
|
2013-01-31 19:12:29 -06:00
|
|
|
let desc = TestDescAndFn {
|
|
|
|
desc: TestDesc {
|
2013-02-13 13:46:14 -06:00
|
|
|
name: StaticTestName("whatever"),
|
2013-01-31 19:12:29 -06:00
|
|
|
ignore: false,
|
|
|
|
should_fail: true
|
|
|
|
},
|
2013-03-01 16:15:15 -06:00
|
|
|
testfn: DynTestFn(|| f()),
|
2012-01-17 21:05:07 -06:00
|
|
|
};
|
2013-01-25 02:52:50 -06:00
|
|
|
let (p, ch) = stream();
|
2013-04-17 01:45:29 -05:00
|
|
|
let ch = SharedChan::new(ch);
|
2013-02-13 13:46:14 -06:00
|
|
|
run_test(false, desc, ch);
|
2013-01-25 02:52:50 -06:00
|
|
|
let (_, res) = p.recv();
|
2013-05-18 21:02:45 -05:00
|
|
|
assert_eq!(res, TrOk);
|
2012-01-17 21:05:07 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2013-04-15 10:08:52 -05:00
|
|
|
fn test_should_fail_but_succeeds() {
|
2012-01-17 21:05:07 -06:00
|
|
|
fn f() { }
|
2013-01-31 19:12:29 -06:00
|
|
|
let desc = TestDescAndFn {
|
|
|
|
desc: TestDesc {
|
2013-02-13 13:46:14 -06:00
|
|
|
name: StaticTestName("whatever"),
|
2013-01-31 19:12:29 -06:00
|
|
|
ignore: false,
|
|
|
|
should_fail: true
|
|
|
|
},
|
2013-03-01 16:15:15 -06:00
|
|
|
testfn: DynTestFn(|| f()),
|
2012-01-17 21:05:07 -06:00
|
|
|
};
|
2013-01-25 02:52:50 -06:00
|
|
|
let (p, ch) = stream();
|
2013-04-17 01:45:29 -05:00
|
|
|
let ch = SharedChan::new(ch);
|
2013-02-13 13:46:14 -06:00
|
|
|
run_test(false, desc, ch);
|
2013-01-25 02:52:50 -06:00
|
|
|
let (_, res) = p.recv();
|
2013-05-18 21:02:45 -05:00
|
|
|
assert_eq!(res, TrFailed);
|
2012-01-17 21:05:07 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2013-04-15 10:08:52 -05:00
|
|
|
fn first_free_arg_should_be_a_filter() {
|
2012-07-14 00:57:48 -05:00
|
|
|
let args = ~[~"progname", ~"filter"];
|
2012-08-06 14:34:08 -05:00
|
|
|
let opts = match parse_opts(args) {
|
2013-05-29 18:59:33 -05:00
|
|
|
either::Left(o) => o,
|
2013-05-05 17:18:51 -05:00
|
|
|
_ => fail!("Malformed arg in first_free_arg_should_be_a_filter")
|
2012-08-03 21:59:04 -05:00
|
|
|
};
|
2013-07-02 14:47:32 -05:00
|
|
|
assert!("filter" == opts.filter.clone().get());
|
2012-01-17 21:05:07 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2013-04-15 10:08:52 -05:00
|
|
|
fn parse_ignored_flag() {
|
2012-07-14 00:57:48 -05:00
|
|
|
let args = ~[~"progname", ~"filter", ~"--ignored"];
|
2012-08-06 14:34:08 -05:00
|
|
|
let opts = match parse_opts(args) {
|
2013-05-29 18:59:33 -05:00
|
|
|
either::Left(o) => o,
|
2013-05-05 17:18:51 -05:00
|
|
|
_ => fail!("Malformed arg in parse_ignored_flag")
|
2012-08-03 21:59:04 -05:00
|
|
|
};
|
2013-03-28 20:39:09 -05:00
|
|
|
assert!((opts.run_ignored));
|
2012-01-17 21:05:07 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2013-01-29 14:06:09 -06:00
|
|
|
pub fn filter_for_ignored_option() {
|
2013-01-31 19:12:29 -06:00
|
|
|
fn dummy() {}
|
|
|
|
|
2012-01-17 21:05:07 -06:00
|
|
|
// When we run ignored tests the test filter should filter out all the
|
|
|
|
// unignored tests and flip the ignore flag on the rest to false
|
|
|
|
|
2013-01-22 10:44:24 -06:00
|
|
|
let opts = TestOpts {
|
2013-07-11 17:16:11 -05:00
|
|
|
filter: None,
|
2013-01-22 10:44:24 -06:00
|
|
|
run_ignored: true,
|
2013-07-11 17:16:11 -05:00
|
|
|
logfile: None,
|
2013-02-13 13:46:14 -06:00
|
|
|
run_tests: true,
|
|
|
|
run_benchmarks: false,
|
2013-07-11 17:16:11 -05:00
|
|
|
ratchet_noise_percent: None,
|
|
|
|
ratchet_metrics: None,
|
|
|
|
save_metrics: None,
|
2013-01-22 10:44:24 -06:00
|
|
|
};
|
|
|
|
|
|
|
|
let tests = ~[
|
2013-01-31 19:12:29 -06:00
|
|
|
TestDescAndFn {
|
|
|
|
desc: TestDesc {
|
2013-02-13 13:46:14 -06:00
|
|
|
name: StaticTestName("1"),
|
2013-01-31 19:12:29 -06:00
|
|
|
ignore: true,
|
|
|
|
should_fail: false,
|
|
|
|
},
|
2013-03-01 16:15:15 -06:00
|
|
|
testfn: DynTestFn(|| {}),
|
2013-01-22 10:44:24 -06:00
|
|
|
},
|
2013-01-31 19:12:29 -06:00
|
|
|
TestDescAndFn {
|
|
|
|
desc: TestDesc {
|
2013-02-13 13:46:14 -06:00
|
|
|
name: StaticTestName("2"),
|
2013-01-31 19:12:29 -06:00
|
|
|
ignore: false,
|
|
|
|
should_fail: false
|
|
|
|
},
|
2013-03-01 16:15:15 -06:00
|
|
|
testfn: DynTestFn(|| {}),
|
2013-01-22 10:44:24 -06:00
|
|
|
},
|
|
|
|
];
|
2012-09-19 20:51:35 -05:00
|
|
|
let filtered = filter_tests(&opts, tests);
|
2012-01-17 21:05:07 -06:00
|
|
|
|
2013-05-18 21:02:45 -05:00
|
|
|
assert_eq!(filtered.len(), 1);
|
|
|
|
assert_eq!(filtered[0].desc.name.to_str(), ~"1");
|
|
|
|
assert!(filtered[0].desc.ignore == false);
|
2012-01-17 21:05:07 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2013-01-29 14:06:09 -06:00
|
|
|
pub fn sort_tests() {
|
2013-01-22 10:44:24 -06:00
|
|
|
let opts = TestOpts {
|
2013-07-11 17:16:11 -05:00
|
|
|
filter: None,
|
2013-01-22 10:44:24 -06:00
|
|
|
run_ignored: false,
|
2013-07-11 17:16:11 -05:00
|
|
|
logfile: None,
|
2013-02-13 13:46:14 -06:00
|
|
|
run_tests: true,
|
|
|
|
run_benchmarks: false,
|
2013-07-11 17:16:11 -05:00
|
|
|
ratchet_noise_percent: None,
|
|
|
|
ratchet_metrics: None,
|
|
|
|
save_metrics: None,
|
2013-01-22 10:44:24 -06:00
|
|
|
};
|
2012-01-17 21:05:07 -06:00
|
|
|
|
|
|
|
let names =
|
2012-07-14 00:57:48 -05:00
|
|
|
~[~"sha1::test", ~"int::test_to_str", ~"int::test_pow",
|
|
|
|
~"test::do_not_run_ignored_tests",
|
|
|
|
~"test::ignored_tests_result_in_ignored",
|
|
|
|
~"test::first_free_arg_should_be_a_filter",
|
|
|
|
~"test::parse_ignored_flag", ~"test::filter_for_ignored_option",
|
|
|
|
~"test::sort_tests"];
|
2012-01-17 21:05:07 -06:00
|
|
|
let tests =
|
|
|
|
{
|
2013-01-31 19:12:29 -06:00
|
|
|
fn testfn() { }
|
2012-09-18 23:41:37 -05:00
|
|
|
let mut tests = ~[];
|
2013-06-21 07:29:53 -05:00
|
|
|
for names.iter().advance |name| {
|
2013-01-31 19:12:29 -06:00
|
|
|
let test = TestDescAndFn {
|
|
|
|
desc: TestDesc {
|
2013-07-02 14:47:32 -05:00
|
|
|
name: DynTestName((*name).clone()),
|
2013-02-13 13:46:14 -06:00
|
|
|
ignore: false,
|
2013-01-31 19:12:29 -06:00
|
|
|
should_fail: false
|
|
|
|
},
|
2013-07-11 14:05:17 -05:00
|
|
|
testfn: DynTestFn(testfn),
|
2013-01-31 19:12:29 -06:00
|
|
|
};
|
2013-02-15 01:30:30 -06:00
|
|
|
tests.push(test);
|
2012-09-18 23:41:37 -05:00
|
|
|
}
|
2013-02-15 01:30:30 -06:00
|
|
|
tests
|
2012-09-18 23:41:37 -05:00
|
|
|
};
|
2012-09-19 20:51:35 -05:00
|
|
|
let filtered = filter_tests(&opts, tests);
|
2012-01-17 21:05:07 -06:00
|
|
|
|
2012-09-18 23:41:37 -05:00
|
|
|
let expected =
|
|
|
|
~[~"int::test_pow", ~"int::test_to_str", ~"sha1::test",
|
|
|
|
~"test::do_not_run_ignored_tests",
|
|
|
|
~"test::filter_for_ignored_option",
|
|
|
|
~"test::first_free_arg_should_be_a_filter",
|
|
|
|
~"test::ignored_tests_result_in_ignored",
|
|
|
|
~"test::parse_ignored_flag",
|
|
|
|
~"test::sort_tests"];
|
2012-01-17 21:05:07 -06:00
|
|
|
|
2013-02-15 01:30:30 -06:00
|
|
|
let pairs = vec::zip(expected, filtered);
|
2012-01-17 21:05:07 -06:00
|
|
|
|
2013-06-21 07:29:53 -05:00
|
|
|
for pairs.iter().advance |p| {
|
2012-09-18 23:41:37 -05:00
|
|
|
match *p {
|
2013-03-06 21:09:17 -06:00
|
|
|
(ref a, ref b) => {
|
2013-05-18 21:02:45 -05:00
|
|
|
assert!(*a == b.desc.name.to_str());
|
2013-03-06 21:09:17 -06:00
|
|
|
}
|
2012-09-18 23:41:37 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-07-11 19:05:23 -05:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
pub fn test_metricmap_compare() {
|
|
|
|
let mut m1 = MetricMap::new();
|
|
|
|
let mut m2 = MetricMap::new();
|
|
|
|
m1.insert_metric("in-both-noise", 1000.0, 200.0);
|
|
|
|
m2.insert_metric("in-both-noise", 1100.0, 200.0);
|
|
|
|
|
|
|
|
m1.insert_metric("in-first-noise", 1000.0, 2.0);
|
|
|
|
m2.insert_metric("in-second-noise", 1000.0, 2.0);
|
|
|
|
|
|
|
|
m1.insert_metric("in-both-want-downwards-but-regressed", 1000.0, 10.0);
|
|
|
|
m2.insert_metric("in-both-want-downwards-but-regressed", 2000.0, 10.0);
|
|
|
|
|
|
|
|
m1.insert_metric("in-both-want-downwards-and-improved", 2000.0, 10.0);
|
|
|
|
m2.insert_metric("in-both-want-downwards-and-improved", 1000.0, 10.0);
|
|
|
|
|
|
|
|
m1.insert_metric("in-both-want-upwards-but-regressed", 2000.0, -10.0);
|
|
|
|
m2.insert_metric("in-both-want-upwards-but-regressed", 1000.0, -10.0);
|
|
|
|
|
|
|
|
m1.insert_metric("in-both-want-upwards-and-improved", 1000.0, -10.0);
|
|
|
|
m2.insert_metric("in-both-want-upwards-and-improved", 2000.0, -10.0);
|
|
|
|
|
|
|
|
let diff1 = m2.compare_to_old(&m1, None);
|
|
|
|
|
|
|
|
assert_eq!(*(diff1.find(&~"in-both-noise").get()), LikelyNoise);
|
|
|
|
assert_eq!(*(diff1.find(&~"in-first-noise").get()), MetricRemoved);
|
|
|
|
assert_eq!(*(diff1.find(&~"in-second-noise").get()), MetricAdded);
|
2013-07-15 20:50:32 -05:00
|
|
|
assert_eq!(*(diff1.find(&~"in-both-want-downwards-but-regressed").get()),
|
|
|
|
Regression(100.0));
|
|
|
|
assert_eq!(*(diff1.find(&~"in-both-want-downwards-and-improved").get()),
|
|
|
|
Improvement(50.0));
|
|
|
|
assert_eq!(*(diff1.find(&~"in-both-want-upwards-but-regressed").get()),
|
|
|
|
Regression(50.0));
|
|
|
|
assert_eq!(*(diff1.find(&~"in-both-want-upwards-and-improved").get()),
|
|
|
|
Improvement(100.0));
|
2013-07-11 19:05:23 -05:00
|
|
|
assert_eq!(diff1.len(), 7);
|
|
|
|
|
|
|
|
let diff2 = m2.compare_to_old(&m1, Some(200.0));
|
|
|
|
|
|
|
|
assert_eq!(*(diff2.find(&~"in-both-noise").get()), LikelyNoise);
|
|
|
|
assert_eq!(*(diff2.find(&~"in-first-noise").get()), MetricRemoved);
|
|
|
|
assert_eq!(*(diff2.find(&~"in-second-noise").get()), MetricAdded);
|
|
|
|
assert_eq!(*(diff2.find(&~"in-both-want-downwards-but-regressed").get()), LikelyNoise);
|
|
|
|
assert_eq!(*(diff2.find(&~"in-both-want-downwards-and-improved").get()), LikelyNoise);
|
|
|
|
assert_eq!(*(diff2.find(&~"in-both-want-upwards-but-regressed").get()), LikelyNoise);
|
|
|
|
assert_eq!(*(diff2.find(&~"in-both-want-upwards-and-improved").get()), LikelyNoise);
|
|
|
|
assert_eq!(diff2.len(), 7);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn ratchet_test() {
|
|
|
|
|
|
|
|
let dpth = tempfile::mkdtemp(&os::tmpdir(),
|
|
|
|
"test-ratchet").expect("missing test for ratchet");
|
|
|
|
let pth = dpth.push("ratchet.json");
|
|
|
|
|
|
|
|
let mut m1 = MetricMap::new();
|
|
|
|
m1.insert_metric("runtime", 1000.0, 2.0);
|
|
|
|
m1.insert_metric("throughput", 50.0, 2.0);
|
|
|
|
|
|
|
|
let mut m2 = MetricMap::new();
|
|
|
|
m2.insert_metric("runtime", 1100.0, 2.0);
|
|
|
|
m2.insert_metric("throughput", 50.0, 2.0);
|
|
|
|
|
|
|
|
m1.save(&pth);
|
|
|
|
|
|
|
|
// Ask for a ratchet that should fail to advance.
|
|
|
|
let (diff1, ok1) = m2.ratchet(&pth, None);
|
|
|
|
assert_eq!(ok1, false);
|
|
|
|
assert_eq!(diff1.len(), 2);
|
|
|
|
assert_eq!(*(diff1.find(&~"runtime").get()), Regression(10.0));
|
|
|
|
assert_eq!(*(diff1.find(&~"throughput").get()), LikelyNoise);
|
|
|
|
|
|
|
|
// Check that it was not rewritten.
|
|
|
|
let m3 = MetricMap::load(&pth);
|
|
|
|
assert_eq!(m3.len(), 2);
|
|
|
|
assert_eq!(*(m3.find(&~"runtime").get()), Metric { value: 1000.0, noise: 2.0 });
|
|
|
|
assert_eq!(*(m3.find(&~"throughput").get()), Metric { value: 50.0, noise: 2.0 });
|
|
|
|
|
|
|
|
// Ask for a ratchet with an explicit noise-percentage override,
|
|
|
|
// that should advance.
|
|
|
|
let (diff2, ok2) = m2.ratchet(&pth, Some(10.0));
|
|
|
|
assert_eq!(ok2, true);
|
|
|
|
assert_eq!(diff2.len(), 2);
|
|
|
|
assert_eq!(*(diff2.find(&~"runtime").get()), LikelyNoise);
|
|
|
|
assert_eq!(*(diff2.find(&~"throughput").get()), LikelyNoise);
|
|
|
|
|
|
|
|
// Check that it was rewritten.
|
|
|
|
let m4 = MetricMap::load(&pth);
|
|
|
|
assert_eq!(m4.len(), 2);
|
|
|
|
assert_eq!(*(m4.find(&~"runtime").get()), Metric { value: 1100.0, noise: 2.0 });
|
|
|
|
assert_eq!(*(m4.find(&~"throughput").get()), Metric { value: 50.0, noise: 2.0 });
|
|
|
|
|
|
|
|
os::remove_dir_recursive(&dpth);
|
|
|
|
}
|
2012-01-17 21:05:07 -06:00
|
|
|
}
|