auto merge of #14764 : jbcrail/rust/fix-more-comments, r=alexcrichton

This commit is contained in:
bors 2014-06-10 15:17:01 -07:00
commit b1302f9c4f
48 changed files with 64 additions and 64 deletions

View File

@ -184,7 +184,7 @@ fn drop(&mut self) {
// This fence is needed to prevent reordering of use of the data and
// deletion of the data. Because it is marked `Release`, the
// decreasing of the reference count sychronizes with this `Acquire`
// decreasing of the reference count synchronizes with this `Acquire`
// fence. This means that use of the data happens before decreasing
// the refernce count, which happens before this fence, which
// happens before the deletion of the data.

View File

@ -539,7 +539,7 @@ pub fn pad(&mut self, s: &str) -> Result {
}
/// Runs a callback, emitting the correct padding either before or
/// afterwards depending on whether right or left alingment is requested.
/// afterwards depending on whether right or left alignment is requested.
fn with_padding(&mut self,
padding: uint,
default: rt::Alignment,

View File

@ -105,7 +105,7 @@
//! *Note: The actual definition of `Writer` uses `IoResult`, which
//! is just a synonym for `Result<T, IoError>`.*
//!
//! This method doesn`t produce a value, but the write may
//! This method doesn't produce a value, but the write may
//! fail. It's crucial to handle the error case, and *not* write
//! something like this:
//!

View File

@ -163,7 +163,7 @@ pub struct OptGroup {
pub occur: Occur
}
/// Describes wether an option is given at all or has a value.
/// Describes whether an option is given at all or has a value.
#[deriving(Clone, PartialEq)]
enum Optval {
Val(String),

View File

@ -227,7 +227,7 @@ fn initialize_call_frame(regs: &mut Registers, fptr: InitFn, arg: uint,
regs[RUSTRT_R14] = procedure.env as uint;
regs[RUSTRT_R15] = fptr as uint;
// These registers are picked up by the regulard context switch paths. These
// These registers are picked up by the regular context switch paths. These
// will put us in "mostly the right context" except for frobbing all the
// arguments to the right place. We have the small trampoline code inside of
// rust_bootstrap_green_task to do that.

View File

@ -82,7 +82,7 @@ pub struct Scheduler {
run_anything: bool,
/// A fast XorShift rng for scheduler use
rng: XorShiftRng,
/// A togglable idle callback
/// A toggleable idle callback
idle_callback: Option<Box<PausableIdleCallback:Send>>,
/// A countdown that starts at a random value and is decremented
/// every time a yield check is performed. When it hits 0 a task
@ -287,7 +287,7 @@ fn run_sched_once(mut ~self, stask: Box<GreenTask>) {
// After processing a message, we consider doing some more work on the
// event loop. The "keep going" condition changes after the first
// iteration becase we don't want to spin here infinitely.
// iteration because we don't want to spin here infinitely.
//
// Once we start doing work we can keep doing work so long as the
// iteration does something. Note that we don't want to starve the

View File

@ -291,7 +291,7 @@
extern {}
/// A wrapper for a nullable pointer. Don't use this except for interacting
/// with libc. Basically Option, but without the dependance on libstd.
/// with libc. Basically Option, but without the dependence on libstd.
// If/when libprim happens, this can be removed in favor of that
pub enum Nullable<T> {
Null,
@ -3497,7 +3497,7 @@ pub mod sysconf {
pub mod funcs {
// Thankfull most of c95 is universally available and does not vary by OS
// Thankfully most of c95 is universally available and does not vary by OS
// or anything. The same is not true of POSIX.
pub mod c95 {

View File

@ -50,7 +50,7 @@
//! it sounded like named pipes just weren't built for this kind of interaction,
//! and the suggested solution was to use overlapped I/O.
//!
//! I don't realy know what overlapped I/O is, but my basic understanding after
//! I don't really know what overlapped I/O is, but my basic understanding after
//! reading about it is that you have an external Event which is used to signal
//! I/O completion, passed around in some OVERLAPPED structures. As to what this
//! is, I'm not exactly sure.

View File

@ -923,7 +923,7 @@ fn waitpid(pid: pid_t, deadline: u64) -> IoResult<rtio::ProcessExit> {
// Register a new SIGCHLD handler, returning the reading half of the
// self-pipe plus the old handler registered (return value of sigaction).
//
// Be sure to set up the self-pipe first because as soon as we reigster a
// Be sure to set up the self-pipe first because as soon as we register a
// handler we're going to start receiving signals.
fn register_sigchld() -> (libc::c_int, c::sigaction) {
unsafe {

View File

@ -166,7 +166,7 @@ fn can_block(&self) -> bool { true }
//
// On a mildly unrelated note, it should also be pointed out that OS
// condition variables are susceptible to spurious wakeups, which we need to
// be ready for. In order to accomodate for this fact, we have an extra
// be ready for. In order to accommodate for this fact, we have an extra
// `awoken` field which indicates whether we were actually woken up via some
// invocation of `reawaken`. This flag is only ever accessed inside the
// lock, so there's no need to make it atomic.

View File

@ -34,7 +34,7 @@ pub enum Inst {
// The CharClass instruction tries to match one input character against
// the range of characters given.
// The flags indicate whether to do a case insentivie match and whether
// The flags indicate whether to do a case insensitive match and whether
// the character class is negated or not.
CharClass(Vec<(char, char)>, Flags),
@ -48,7 +48,7 @@ pub enum Inst {
EmptyBegin(Flags),
// Matches the end of the string, consumes no characters.
// The flags indicate whether it matches if the proceding character
// The flags indicate whether it matches if the proceeding character
// is a new line.
EmptyEnd(Flags),

View File

@ -189,7 +189,7 @@ fn describe_codegen_flags() {
}
}
/// Process command line options. Emits messages as appropirate.If compilation
/// Process command line options. Emits messages as appropriate. If compilation
/// should continue, returns a getopts::Matches object parsed from args, otherwise
/// returns None.
pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {

View File

@ -551,7 +551,7 @@ struct defined in `middle::borrowck`. Formally, we define `LOAN` as
The reasoning here is that a mutable borrow must be the only writer,
therefore it prevents other writes (`MUTATE`), mutable borrows
(`CLAIM`), and immutable borrows (`FREEZE`). An immutable borrow
permits other immutable borrows but forbids writes and mutable borows.
permits other immutable borrows but forbids writes and mutable borrows.
Finally, a const borrow just wants to be sure that the value is not
moved out from under it, so no actions are forbidden.

View File

@ -438,7 +438,7 @@ fn walk_callee(&mut self, call: &ast::Expr, callee: &ast::Expr) {
None => {
self.tcx().sess.span_bug(
callee.span,
format!("unxpected callee type {}",
format!("unexpected callee type {}",
callee_ty.repr(self.tcx())).as_slice());
}
}

View File

@ -257,7 +257,7 @@ pub fn each_adjacent_edge<'a>(&'a self,
//
// A common use for graphs in our compiler is to perform
// fixed-point iteration. In this case, each edge represents a
// constaint, and the nodes themselves are associated with
// constraint, and the nodes themselves are associated with
// variables or other bitsets. This method facilitates such a
// computation.

View File

@ -31,7 +31,7 @@
* is the address of the lvalue. If Expr is an rvalue, this is the address of
* some temporary spot in memory where the result is stored.
*
* Now, cat_expr() classies the expression Expr and the address A=ToAddr(Expr)
* Now, cat_expr() classifies the expression Expr and the address A=ToAddr(Expr)
* as follows:
*
* - cat: what kind of expression was this? This is a subset of the
@ -42,7 +42,7 @@
*
* The resulting categorization tree differs somewhat from the expressions
* themselves. For example, auto-derefs are explicit. Also, an index a[b] is
* decomposed into two operations: a derefence to reach the array data and
* decomposed into two operations: a dereference to reach the array data and
* then an index to jump forward to the relevant item.
*
* ## By-reference upvars

View File

@ -39,7 +39,7 @@
- `scope_map` maps from a scope id to the enclosing scope id; this is
usually corresponding to the lexical nesting, though in the case of
closures the parent scope is the innermost conditinal expression or repeating
closures the parent scope is the innermost conditional expression or repeating
block
- `var_map` maps from a variable or binding id to the block in which

View File

@ -717,7 +717,7 @@ pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
let ty = type_of::type_of(bcx.ccx(), *nullfields.get(ix));
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
// The contents of memory at this pointer can't matter, but use
// the value that's "reasonable" in case of pointer comparision.
// the value that's "reasonable" in case of pointer comparison.
PointerCast(bcx, val, ty.ptr_to())
}
RawNullablePointer { nndiscr, nnty, .. } => {

View File

@ -1573,7 +1573,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span,
for var in variants.iter() {
let mut size = 0;
for field in var.fields.iter().skip(1) {
// skip the dicriminant
// skip the discriminant
size += llsize_of_real(ccx, sizing_type_of(ccx, *field));
}
sizes.push(size);
@ -2320,7 +2320,7 @@ pub fn trans_crate(krate: ast::Crate,
// LLVM code generator emits a ".file filename" directive
// for ELF backends. Value of the "filename" is set as the
// LLVM module identifier. Due to a LLVM MC bug[1], LLVM
// crashes if the module identifer is same as other symbols
// crashes if the module identifier is same as other symbols
// such as a function name in the module.
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
let mut llmod_id = link_meta.crateid.name.clone();

View File

@ -1527,7 +1527,7 @@ fn create_member_descriptions(&self, cx: &CrateContext) -> Vec<MemberDescription
// As far as debuginfo is concerned, the pointer this enum represents is still
// wrapped in a struct. This is to make the DWARF representation of enums uniform.
// First create a description of the artifical wrapper struct:
// First create a description of the artificial wrapper struct:
let non_null_variant = self.variants.get(non_null_variant_index as uint);
let non_null_variant_ident = non_null_variant.name;
let non_null_variant_name = token::get_ident(non_null_variant_ident);

View File

@ -204,7 +204,7 @@ pub fn recurse(&mut self, blk: &ast::Block) -> FnStyleState {
}
/// Whether `check_binop` is part of an assignment or not.
/// Used to know wether we allow user overloads and to print
/// Used to know whether we allow user overloads and to print
/// better messages on error.
#[deriving(PartialEq)]
enum IsBinopAssignment{
@ -3702,7 +3702,7 @@ pub fn check_const_with_ty(fcx: &FnCtxt,
e: &ast::Expr,
declty: ty::t) {
// Gather locals in statics (because of block expressions).
// This is technically uneccessary because locals in static items are forbidden,
// This is technically unnecessary because locals in static items are forbidden,
// but prevents type checking from blowing up before const checking can properly
// emit a error.
GatherLocalsVisitor { fcx: fcx }.visit_expr(e, ());
@ -4174,7 +4174,7 @@ pub fn instantiate_path(fcx: &FnCtxt,
}
None => {
fcx.tcx().sess.span_bug(span,
"missing default for a not explicitely provided type param")
"missing default for a not explicitly provided type param")
}
}
}

View File

@ -180,7 +180,7 @@ pub fn tys(&self, a: ty::t, b: ty::t) -> CoerceResult {
self.unpack_actual_value(a, |sty_a| {
match *sty_a {
ty::ty_bare_fn(ref a_f) => {
// Bare functions are coercable to any closure type.
// Bare functions are coercible to any closure type.
//
// FIXME(#3320) this should go away and be
// replaced with proper inference, got a patch

View File

@ -372,7 +372,7 @@ fn argvecs<C:Combine>(this: &C, a_args: &[ty::t], b_args: &[ty::t]) -> cres<Vec<
pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
// This is a horible hack - historically, [T] was not treated as a type,
// This is a horrible hack - historically, [T] was not treated as a type,
// so, for example, &T and &[U] should not unify. In fact the only thing
// &[U] should unify with is &[T]. We preserve that behaviour with this
// check.

View File

@ -17,7 +17,7 @@
the relevant line of code has been type-checked. Therefore, there is
an elaborate system to track why a particular constraint in the
inference graph arose so that we can explain to the user what gave
rise to a patricular error.
rise to a particular error.
The basis of the system are the "origin" types. An "origin" is the
reason that a constraint or inference variable arose. There are

View File

@ -19,7 +19,7 @@
* The code in here is defined quite generically so that it can be
* applied both to type variables, which represent types being inferred,
* and fn variables, which represent function types being inferred.
* It may eventually be applied to ther types as well, who knows.
* It may eventually be applied to their types as well, who knows.
* In some cases, the functions are also generic with respect to the
* operation on the lattice (GLB vs LUB).
*

View File

@ -362,7 +362,7 @@ fn foo() { // 'foo is the function body
Note that such components must consist solely of region variables; all
of these variables can effectively be unified into a single variable.
Once SCCs are removed, we are left with a DAG. At this point, we
could walk the DAG in toplogical order once to compute the expanding
could walk the DAG in topological order once to compute the expanding
nodes, and again in reverse topological order to compute the
contracting nodes. However, as I said, this does not work given the
current treatment of closure bounds, but perhaps in the future we can
@ -617,7 +617,7 @@ fn<a,b>(&a, &b, &a) fn<x,y>(&x, &y, &y) fn<a>(&a, &a, &a) fn<a,b,c>(&a,&b,&c)
contains 'intermediate' variables created to represent the LUB/GLB of
individual regions. Basically, when asked to compute the LUB/GLB of a
region variable with another region, the inferencer cannot oblige
immediately since the valuese of that variables are not known.
immediately since the values of that variables are not known.
Therefore, it creates a new variable that is related to the two
regions. For example, the LUB of two variables `$x` and `$y` is a
fresh variable `$z` that is constrained such that `$x <= $z` and `$y

View File

@ -483,7 +483,7 @@ fn visit_item(&mut self, item: &ast::Item, _: ()) {
let variant =
ty::VariantInfo::from_ast_variant(tcx,
ast_variant,
/*discrimant*/ 0);
/*discriminant*/ 0);
for &arg_ty in variant.args.iter() {
self.add_constraints_from_ty(arg_ty, self.covariant);
}

View File

@ -61,12 +61,12 @@ fn add_bytes_to_bits<T: Int + CheckedAdd + ToBits>(bits: T, bytes: T) -> T {
let (new_high_bits, new_low_bits) = bytes.to_bits();
if new_high_bits > Zero::zero() {
fail!("numeric overflow occured.")
fail!("numeric overflow occurred.")
}
match bits.checked_add(&new_low_bits) {
Some(x) => return x,
None => fail!("numeric overflow occured.")
None => fail!("numeric overflow occurred.")
}
}

View File

@ -391,7 +391,7 @@ fn parse_lang_string(string: &str) -> (bool,bool,bool,bool) {
}
/// By default this markdown renderer generates anchors for each header in the
/// rendered document. The anchor name is the contents of the header spearated
/// rendered document. The anchor name is the contents of the header separated
/// by hyphens, and a task-local map is used to disambiguate among duplicate
/// headers (numbers are appended).
///

View File

@ -49,11 +49,11 @@ pub struct TocEntry {
#[deriving(PartialEq)]
pub struct TocBuilder {
top_level: Toc,
/// The current heirachy of parent headings, the levels are
/// The current hierarchy of parent headings, the levels are
/// strictly increasing (i.e. chain[0].level < chain[1].level <
/// ...) with each entry being the most recent occurance of a
/// ...) with each entry being the most recent occurrence of a
/// heading with that level (it doesn't include the most recent
/// occurences of every level, just, if *is* in `chain` then is is
/// occurrences of every level, just, if *is* in `chain` then is is
/// the most recent one).
///
/// We also have `chain[0].level <= top_level.entries[last]`.
@ -123,7 +123,7 @@ fn fold_until(&mut self, level: u32) {
}
/// Push a level `level` heading into the appropriate place in the
/// heirarchy, returning a string containing the section number in
/// hierarchy, returning a string containing the section number in
/// `<num>.<num>.<num>` format.
pub fn push<'a>(&'a mut self, level: u32, name: String, id: String) -> &'a str {
assert!(level >= 1);

View File

@ -383,7 +383,7 @@ fn should_leave_multiple_indent_levels() {
#[test]
fn should_ignore_first_line_indent() {
// Thi first line of the first paragraph may not be indented as
// The first line of the first paragraph may not be indented as
// far due to the way the doc string was written:
//
// #[doc = "Start way over here

View File

@ -163,7 +163,7 @@ pub fn run(~self, mut f: ||) -> Box<Task> {
// Here we must unsafely borrow the task in order to not remove it from
// TLS. When collecting failure, we may attempt to send on a channel (or
// just run aribitrary code), so we must be sure to still have a local
// just run arbitrary code), so we must be sure to still have a local
// task in TLS.
unsafe {
let me: *mut Task = Local::unsafe_borrow();

View File

@ -395,7 +395,7 @@ pub fn begin_unwind<M: Any + Send>(msg: M, file: &'static str, line: uint) -> !
/// The core of the unwinding.
///
/// This is non-generic to avoid instantiation bloat in other crates
/// (which makes compilation of small crates noticably slower). (Note:
/// (which makes compilation of small crates noticeably slower). (Note:
/// we need the `Any` object anyway, we're not just creating it to
/// avoid being generic.)
///
@ -408,7 +408,7 @@ fn begin_unwind_inner(msg: Box<Any:Send>,
// First, invoke call the user-defined callbacks triggered on task failure.
//
// By the time that we see a callback has been registered (by reading
// MAX_CALLBACKS), the actuall callback itself may have not been stored yet,
// MAX_CALLBACKS), the actual callback itself may have not been stored yet,
// so we just chalk it up to a race condition and move on to the next
// callback. Additionally, CALLBACK_CNT may briefly be higher than
// MAX_CALLBACKS, so we're sure to clamp it as necessary.

View File

@ -212,7 +212,7 @@ fn new(s: &'static str) -> ForbidSwitch {
impl Drop for ForbidSwitch {
fn drop(&mut self) {
assert!(self.io == homing::local_id(),
"didnt want a scheduler switch: {}",
"didn't want a scheduler switch: {}",
self.msg);
}
}

View File

@ -147,7 +147,7 @@ pub fn write(&mut self, buf: &[u8], may_timeout: bool) -> Result<(), UvError> {
// function is why that wording exists.
//
// Implementation-wise, we must be careful when passing a buffer down to
// libuv. Most of this implementation avoids allocations becuase of the
// libuv. Most of this implementation avoids allocations because of the
// blocking guarantee (all stack local variables are valid for the
// entire read/write request). If our write request can be timed out,
// however, we must heap allocate the data and pass that to the libuv
@ -164,7 +164,7 @@ pub fn write(&mut self, buf: &[u8], may_timeout: bool) -> Result<(), UvError> {
};
// Send off the request, but be careful to not block until we're sure
// that the write reqeust is queued. If the reqeust couldn't be queued,
// that the write request is queued. If the request couldn't be queued,
// then we should return immediately with an error.
match unsafe {
uvll::uv_write(req.handle, self.handle, [uv_buf], write_cb)

View File

@ -542,7 +542,7 @@ pub fn send(&self, t: T) {
/// ```
pub fn send_opt(&self, t: T) -> Result<(), T> {
// In order to prevent starvation of other tasks in situations where
// a task sends repeatedly without ever receiving, we occassionally
// a task sends repeatedly without ever receiving, we occasionally
// yield instead of doing a send immediately.
//
// Don't unconditionally attempt to yield because the TLS overhead can

View File

@ -513,7 +513,7 @@ fn test_mutex_arc_poison() {
#[test]
fn test_mutex_arc_nested() {
// Tests nested mutexes and access
// to underlaying data.
// to underlying data.
let arc = Arc::new(Mutex::new(1));
let arc2 = Arc::new(Mutex::new(arc));
task::spawn(proc() {

View File

@ -71,7 +71,7 @@ pub fn doit(&self, f: ||) {
// Implementation-wise, this would seem like a fairly trivial primitive.
// The stickler part is where our mutexes currently require an
// allocation, and usage of a `Once` should't leak this allocation.
// allocation, and usage of a `Once` shouldn't leak this allocation.
//
// This means that there must be a deterministic destroyer of the mutex
// contained within (because it's not needed after the initialization

View File

@ -195,7 +195,7 @@ pub fn any(sp: Span) -> Box<MacResult> {
/// Create a default MacResult that can only be an expression.
///
/// Use this for macros that must expand to an expression, so even
/// if an error is encountered internally, the user will recieve
/// if an error is encountered internally, the user will receive
/// an error that they also used it in the wrong place.
pub fn expr(sp: Span) -> Box<MacResult> {
box DummyResult { expr_only: true, span: sp } as Box<MacResult>

View File

@ -77,7 +77,7 @@ fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt, span: Span, substr: &Substru
```
The optimiser should remove the redundancy. We explicitly
get use the binops to avoid auto-deref derefencing too many
get use the binops to avoid auto-deref dereferencing too many
layers of pointers, if the type includes pointers.
*/
let other_f = match other_fs {

View File

@ -723,7 +723,7 @@ fn expand_struct_method_body(&self,
&Struct(fields));
// make a series of nested matches, to destructure the
// structs. This is actually right-to-left, but it shoudn't
// structs. This is actually right-to-left, but it shouldn't
// matter.
for (&arg_expr, &pat) in self_args.iter().zip(patterns.iter()) {
body = cx.expr_match(trait_.span, arg_expr,

View File

@ -58,7 +58,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
dotdotdoted: false,
sep: None,
}),
interpolations: match interp { /* just a convienience */
interpolations: match interp { /* just a convenience */
None => HashMap::new(),
Some(x) => x,
},

View File

@ -112,7 +112,7 @@ fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute {
}
// Parse attributes that appear after the opening of an item. These should
// be preceded by an exclaimation mark, but we accept and warn about one
// be preceded by an exclamation mark, but we accept and warn about one
// terminated by a semicolon. In addition to a vector of inner attributes,
// this function also returns a vector that may contain the first outer
// attribute of the next item (since we can't know whether the attribute

View File

@ -2582,7 +2582,7 @@ pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::Ident>) -> @Expr {
self.mk_expr(lo, hi, ExprLoop(body, opt_ident))
}
// For distingishing between struct literals and blocks
// For distinguishing between struct literals and blocks
fn looking_at_struct_literal(&mut self) -> bool {
self.token == token::LBRACE &&
((self.look_ahead(1, |t| token::is_plain_ident(t)) &&

View File

@ -233,7 +233,7 @@ pub fn mk_printer(out: Box<io::Writer>, linewidth: uint) -> Printer {
*
* There is a parallel ring buffer, 'size', that holds the calculated size of
* each token. Why calculated? Because for Begin/End pairs, the "size"
* includes everything betwen the pair. That is, the "size" of Begin is
* includes everything between the pair. That is, the "size" of Begin is
* actually the sum of the sizes of everything between Begin and the paired
* End that follows. Since that is arbitrarily far in the future, 'size' is
* being rewritten regularly while the printer runs; in fact most of the
@ -434,7 +434,7 @@ pub fn advance_right(&mut self) {
assert!((self.right != self.left));
}
pub fn advance_left(&mut self, x: Token, l: int) -> io::IoResult<()> {
debug!("advnce_left ~[{},{}], sizeof({})={}", self.left, self.right,
debug!("advance_left ~[{},{}], sizeof({})={}", self.left, self.right,
self.left, l);
if l >= 0 {
let ret = self.print(x.clone(), l);

View File

@ -62,7 +62,7 @@ pub fn generics_of_fn(fk: &FnKind) -> Generics {
}
/// Each method of the Visitor trait is a hook to be potentially
/// overriden. Each method's default implementation recursively visits
/// overridden. Each method's default implementation recursively visits
/// the substructure of the input via the corresponding `walk` method;
/// e.g. the `visit_mod` method by default calls `visit::walk_mod`.
///

View File

@ -32,7 +32,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<Box<Path>> {
Some(dir) => dirs_to_search.push(Path::new(dir)),
None => {
if homedir.is_some() {
// ncurses compatability;
// ncurses compatibility;
dirs_to_search.push(homedir.unwrap().join(".terminfo"))
}
match getenv("TERMINFO_DIRS") {

View File

@ -462,7 +462,7 @@ impl FromStr for Uuid {
/// Parse a hex string and interpret as a UUID
///
/// Accepted formats are a sequence of 32 hexadecimal characters,
/// with or without hypens (grouped as 8, 4, 4, 4, 12).
/// with or without hyphens (grouped as 8, 4, 4, 4, 12).
fn from_str(us: &str) -> Option<Uuid> {
let result = Uuid::parse_string(us);
match result {
@ -492,7 +492,7 @@ impl Eq for Uuid {}
// FIXME #9845: Test these more thoroughly
impl<T: Encoder<E>, E> Encodable<T, E> for Uuid {
/// Encode a UUID as a hypenated string
/// Encode a UUID as a hyphenated string
fn encode(&self, e: &mut T) -> Result<(), E> {
e.emit_str(self.to_hyphenated_str().as_slice())
}