fix the remaining warnings

This commit is contained in:
Erick Tryzelaar 2014-10-06 21:50:22 -07:00
parent da34268be1
commit 538fcf244f
2 changed files with 32 additions and 30 deletions

View File

@ -107,7 +107,7 @@ pub enum TokenKind {
EndKind, EndKind,
} }
static primitive_token_kinds: [TokenKind, .. 12] = [ static PRIMITIVE_TOKEN_KINDS: [TokenKind, .. 12] = [
IntKind, IntKind,
I8Kind, I8Kind,
I16Kind, I16Kind,
@ -122,12 +122,12 @@ static primitive_token_kinds: [TokenKind, .. 12] = [
F64Kind, F64Kind,
]; ];
static str_token_kinds: [TokenKind, .. 2] = [ static STR_TOKEN_KINDS: [TokenKind, .. 2] = [
StrKind, StrKind,
StringKind, StringKind,
]; ];
static compound_token_kinds: [TokenKind, .. 6] = [ static COMPOUND_TOKEN_KINDS: [TokenKind, .. 6] = [
OptionKind, OptionKind,
EnumStartKind, EnumStartKind,
StructStartKind, StructStartKind,
@ -236,19 +236,19 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
#[inline] #[inline]
fn expect_num<T: NumCast>(&mut self, token: Token) -> Result<T, E> { fn expect_num<T: NumCast>(&mut self, token: Token) -> Result<T, E> {
match token { match token {
Int(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), Int(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
I8(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), I8(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
I16(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), I16(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
I32(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), I32(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
I64(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), I64(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
Uint(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), Uint(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
U8(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), U8(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
U16(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), U16(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
U32(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), U32(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
U64(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), U64(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
F32(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), F32(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
F64(x) => to_result!(num::cast(x), self.syntax_error(token, primitive_token_kinds)), F64(x) => to_result!(num::cast(x), self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
token => Err(self.syntax_error(token, primitive_token_kinds)), token => Err(self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
} }
} }
@ -267,7 +267,7 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
U64(x) => to_result!(num::from_u64(x), self.conversion_error(token)), U64(x) => to_result!(num::from_u64(x), self.conversion_error(token)),
F32(x) => to_result!(num::from_f32(x), self.conversion_error(token)), F32(x) => to_result!(num::from_f32(x), self.conversion_error(token)),
F64(x) => to_result!(num::from_f64(x), self.conversion_error(token)), F64(x) => to_result!(num::from_f64(x), self.conversion_error(token)),
token => Err(self.syntax_error(token, primitive_token_kinds)), token => Err(self.syntax_error(token, PRIMITIVE_TOKEN_KINDS)),
} }
} }
@ -289,7 +289,7 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
fn expect_str(&mut self, token: Token) -> Result<&'static str, E> { fn expect_str(&mut self, token: Token) -> Result<&'static str, E> {
match token { match token {
Str(value) => Ok(value), Str(value) => Ok(value),
token => Err(self.syntax_error(token, str_token_kinds)), token => Err(self.syntax_error(token, STR_TOKEN_KINDS)),
} }
} }
@ -299,7 +299,7 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
Char(value) => Ok(value.to_string()), Char(value) => Ok(value.to_string()),
Str(value) => Ok(value.to_string()), Str(value) => Ok(value.to_string()),
String(value) => Ok(value), String(value) => Ok(value),
token => Err(self.syntax_error(token, str_token_kinds)), token => Err(self.syntax_error(token, STR_TOKEN_KINDS)),
} }
} }
@ -371,7 +371,7 @@ pub trait Deserializer<E>: Iterator<Result<Token, E>> {
} }
} }
token => { token => {
return Err(self.syntax_error(token, str_token_kinds)); return Err(self.syntax_error(token, STR_TOKEN_KINDS));
} }
} }
@ -842,7 +842,7 @@ impl<D: Deserializer<E>, E> Deserializable<D, E> for IgnoreTokens {
} }
End => { End => {
Err(d.syntax_error(token, compound_token_kinds)) Err(d.syntax_error(token, COMPOUND_TOKEN_KINDS))
} }
_ => Ok(IgnoreTokens), _ => Ok(IgnoreTokens),
@ -903,7 +903,7 @@ impl GatherTokens {
self.gather_map(d) self.gather_map(d)
} }
End => { End => {
Err(d.syntax_error(token, compound_token_kinds)) Err(d.syntax_error(token, COMPOUND_TOKEN_KINDS))
} }
token => { token => {
self.tokens.push(token); self.tokens.push(token);

View File

@ -906,21 +906,22 @@ fn fmt_f64_or_null<W: Writer>(wr: &mut W, v: f64) -> IoResult<()> {
} }
fn spaces<W: Writer>(wr: &mut W, mut n: uint) -> IoResult<()> { fn spaces<W: Writer>(wr: &mut W, mut n: uint) -> IoResult<()> {
static len: uint = 16; static LEN: uint = 16;
static buf: [u8, ..len] = [b' ', ..len]; static BUF: [u8, ..LEN] = [b' ', ..LEN];
while n >= len { while n >= LEN {
try!(wr.write(buf)); try!(wr.write(BUF));
n -= len; n -= LEN;
} }
if n > 0 { if n > 0 {
wr.write(buf.slice_to(n)) wr.write(BUF.slice_to(n))
} else { } else {
Ok(()) Ok(())
} }
} }
/*
#[deriving(Show)] #[deriving(Show)]
enum SerializerState { enum SerializerState {
ValueState, ValueState,
@ -928,6 +929,7 @@ enum SerializerState {
StructState, StructState,
EnumState, EnumState,
} }
*/
/// A structure for implementing serialization to JSON. /// A structure for implementing serialization to JSON.
pub struct Serializer<W> { pub struct Serializer<W> {
@ -1484,7 +1486,7 @@ enum ParserState {
// Parse ',' or ']' after an element in an object. // Parse ',' or ']' after an element in an object.
ParseObjectCommaOrEnd, ParseObjectCommaOrEnd,
// Parse a key in an object. // Parse a key in an object.
ParseObjectKey, //ParseObjectKey,
// Parse a value in an object. // Parse a value in an object.
ParseObjectValue, ParseObjectValue,
} }
@ -1669,7 +1671,7 @@ impl<Iter: Iterator<char>> Iterator<Result<de::Token, ParserError>> for Parser<I
ParseListCommaOrEnd => Some(self.parse_list_comma_or_end()), ParseListCommaOrEnd => Some(self.parse_list_comma_or_end()),
ParseObjectStart => Some(self.parse_object_start()), ParseObjectStart => Some(self.parse_object_start()),
ParseObjectCommaOrEnd => Some(self.parse_object_comma_or_end()), ParseObjectCommaOrEnd => Some(self.parse_object_comma_or_end()),
ParseObjectKey => Some(self.parse_object_key()), //ParseObjectKey => Some(self.parse_object_key()),
ParseObjectValue => Some(self.parse_object_value()), ParseObjectValue => Some(self.parse_object_value()),
} }
} }