Fix a bug where cached stacks weren't re-used

The condition was the wrong direction and it also didn't take equality into
account. Tests were added for both cases.

For the small benchmark of `task::try(proc() {}).unwrap()`, this takes the
iteration time on OSX from 15119 ns/iter to 6179 ns/iter (timed with
RUST_THREADS=1)

cc #11389
This commit is contained in:
Alex Crichton 2014-02-10 14:17:10 -08:00
parent 89b1686bd7
commit d5e0622f95

View File

@ -138,9 +138,9 @@ pub fn new() -> StackPool {
pub fn take_stack(&mut self, min_size: uint) -> Stack {
// Ideally this would be a binary search
match self.stacks.iter().position(|s| s.min_size < min_size) {
match self.stacks.iter().position(|s| min_size <= s.min_size) {
Some(idx) => self.stacks.swap_remove(idx),
None => Stack::new(min_size)
None => Stack::new(min_size)
}
}
@ -156,3 +156,33 @@ fn rust_valgrind_stack_register(start: *libc::uintptr_t,
end: *libc::uintptr_t) -> libc::c_uint;
fn rust_valgrind_stack_deregister(id: libc::c_uint);
}
#[cfg(test)]
mod tests {
use super::StackPool;
#[test]
fn stack_pool_caches() {
let mut p = StackPool::new();
let s = p.take_stack(10);
p.give_stack(s);
let s = p.take_stack(4);
assert_eq!(s.min_size, 10);
p.give_stack(s);
let s = p.take_stack(14);
assert_eq!(s.min_size, 14);
p.give_stack(s);
}
#[test]
fn stack_pool_caches_exact() {
let mut p = StackPool::new();
let mut s = p.take_stack(10);
s.valgrind_id = 100;
p.give_stack(s);
let s = p.take_stack(10);
assert_eq!(s.min_size, 10);
assert_eq!(s.valgrind_id, 100);
}
}