auto merge of #14520 : Ryman/rust/SnakeCaseLint, r=alexcrichton

This enforces `snake_case` for functions and methods only. Might be worth extending it to fields and locals too at some point in the future.

A number of breaking changes each detailed in the attached commits.
This commit is contained in:
bors 2014-05-30 11:01:37 -07:00
commit 24e489f1e1
71 changed files with 609 additions and 492 deletions

View File

@ -73,7 +73,7 @@ fn run_cfail_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = compile_test(config, props, testfile);
if proc_res.status.success() {
fatal_ProcRes("compile-fail test compiled successfully!".to_string(),
fatal_proc_rec("compile-fail test compiled successfully!".to_string(),
&proc_res);
}
@ -97,7 +97,7 @@ fn run_rfail_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = compile_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
fatal_proc_rec("compilation failed!".to_string(), &proc_res);
}
exec_compiled_test(config, props, testfile)
@ -108,7 +108,7 @@ fn run_rfail_test(config: &Config, props: &TestProps, testfile: &Path) {
// The value our Makefile configures valgrind to return on failure
static VALGRIND_ERR: int = 100;
if proc_res.status.matches_exit_status(VALGRIND_ERR) {
fatal_ProcRes("run-fail test isn't valgrind-clean!".to_string(),
fatal_proc_rec("run-fail test isn't valgrind-clean!".to_string(),
&proc_res);
}
@ -120,7 +120,7 @@ fn check_correct_failure_status(proc_res: &ProcRes) {
// The value the rust runtime returns on failure
static RUST_ERR: int = 101;
if !proc_res.status.matches_exit_status(RUST_ERR) {
fatal_ProcRes(
fatal_proc_rec(
format!("failure produced the wrong error: {}", proc_res.status),
proc_res);
}
@ -131,19 +131,19 @@ fn run_rpass_test(config: &Config, props: &TestProps, testfile: &Path) {
let mut proc_res = compile_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
fatal_proc_rec("compilation failed!".to_string(), &proc_res);
}
proc_res = exec_compiled_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("test run failed!".to_string(), &proc_res);
fatal_proc_rec("test run failed!".to_string(), &proc_res);
}
} else {
let proc_res = jit_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("jit failed!".to_string(), &proc_res);
fatal_proc_rec("jit failed!".to_string(), &proc_res);
}
}
}
@ -172,7 +172,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
"normal");
if !proc_res.status.success() {
fatal_ProcRes(format!("pretty-printing failed in round {}", round),
fatal_proc_rec(format!("pretty-printing failed in round {}", round),
&proc_res);
}
@ -204,7 +204,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = typecheck_source(config, props, testfile, actual);
if !proc_res.status.success() {
fatal_ProcRes("pretty-printed source does not typecheck".to_string(),
fatal_proc_rec("pretty-printed source does not typecheck".to_string(),
&proc_res);
}
if props.no_pretty_expanded { return }
@ -212,13 +212,13 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
// additionally, run `--pretty expanded` and try to build it.
let proc_res = print_source(config, props, testfile, (*srcs.get(round)).clone(), "expanded");
if !proc_res.status.success() {
fatal_ProcRes(format!("pretty-printing (expanded) failed"), &proc_res);
fatal_proc_rec(format!("pretty-printing (expanded) failed"), &proc_res);
}
let ProcRes{ stdout: expanded_src, .. } = proc_res;
let proc_res = typecheck_source(config, props, testfile, expanded_src);
if !proc_res.status.success() {
fatal_ProcRes(format!("pretty-printed source (expanded) does \
fatal_proc_rec(format!("pretty-printed source (expanded) does \
not typecheck"),
&proc_res);
}
@ -326,7 +326,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
// compile test file (it shoud have 'compile-flags:-g' in the header)
let compiler_run_result = compile_test(config, props, testfile);
if !compiler_run_result.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &compiler_run_result);
fatal_proc_rec("compilation failed!".to_string(), &compiler_run_result);
}
let exe_file = make_exe_name(config, testfile);
@ -517,7 +517,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
// compile test file (it shoud have 'compile-flags:-g' in the header)
let compile_result = compile_test(config, props, testfile);
if !compile_result.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &compile_result);
fatal_proc_rec("compilation failed!".to_string(), &compile_result);
}
let exe_file = make_exe_name(config, testfile);
@ -560,7 +560,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
let debugger_run_result = run_lldb(config, &exe_file, &debugger_script);
if !debugger_run_result.status.success() {
fatal_ProcRes("Error while running LLDB".to_string(),
fatal_proc_rec("Error while running LLDB".to_string(),
&debugger_run_result);
}
@ -720,7 +720,7 @@ fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String])
}
}
if i != num_check_lines {
fatal_ProcRes(format!("line not found in debugger output: {}",
fatal_proc_rec(format!("line not found in debugger output: {}",
check_lines.get(i).unwrap()),
debugger_run_result);
}
@ -764,14 +764,14 @@ fn check_error_patterns(props: &TestProps,
let missing_patterns =
props.error_patterns.slice(next_err_idx, props.error_patterns.len());
if missing_patterns.len() == 1u {
fatal_ProcRes(format!("error pattern '{}' not found!",
fatal_proc_rec(format!("error pattern '{}' not found!",
missing_patterns[0]),
proc_res);
} else {
for pattern in missing_patterns.iter() {
error(format!("error pattern '{}' not found!", *pattern));
}
fatal_ProcRes("multiple error patterns not found".to_string(),
fatal_proc_rec("multiple error patterns not found".to_string(),
proc_res);
}
}
@ -779,7 +779,7 @@ fn check_error_patterns(props: &TestProps,
fn check_no_compiler_crash(proc_res: &ProcRes) {
for line in proc_res.stderr.as_slice().lines() {
if line.starts_with("error: internal compiler error:") {
fatal_ProcRes("compiler encountered internal error".to_string(),
fatal_proc_rec("compiler encountered internal error".to_string(),
proc_res);
}
}
@ -857,7 +857,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
}
if !was_expected && is_compiler_error_or_warning(line) {
fatal_ProcRes(format!("unexpected compiler error or warning: '{}'",
fatal_proc_rec(format!("unexpected compiler error or warning: '{}'",
line),
proc_res);
}
@ -866,7 +866,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
for (i, &flag) in found_flags.iter().enumerate() {
if !flag {
let ee = expected_errors.get(i);
fatal_ProcRes(format!("expected {} on line {} not found: {}",
fatal_proc_rec(format!("expected {} on line {} not found: {}",
ee.kind, ee.line, ee.msg),
proc_res);
}
@ -1047,7 +1047,7 @@ fn compose_and_run_compiler(
config.compile_lib_path.as_slice(),
None);
if !auxres.status.success() {
fatal_ProcRes(
fatal_proc_rec(
format!("auxiliary build of {} failed to compile: ",
abs_ab.display()),
&auxres);
@ -1286,7 +1286,7 @@ fn error(err: String) { println!("\nerror: {}", err); }
fn fatal(err: String) -> ! { error(err); fail!(); }
fn fatal_ProcRes(err: String, proc_res: &ProcRes) -> ! {
fn fatal_proc_rec(err: String, proc_res: &ProcRes) -> ! {
print!("\n\
error: {}\n\
status: {}\n\
@ -1562,35 +1562,35 @@ fn run_codegen_test(config: &Config, props: &TestProps,
let mut proc_res = compile_test_and_save_bitcode(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
fatal_proc_rec("compilation failed!".to_string(), &proc_res);
}
proc_res = extract_function_from_bitcode(config, props, "test", testfile, "");
if !proc_res.status.success() {
fatal_ProcRes("extracting 'test' function failed".to_string(),
fatal_proc_rec("extracting 'test' function failed".to_string(),
&proc_res);
}
proc_res = disassemble_extract(config, props, testfile, "");
if !proc_res.status.success() {
fatal_ProcRes("disassembling extract failed".to_string(), &proc_res);
fatal_proc_rec("disassembling extract failed".to_string(), &proc_res);
}
let mut proc_res = compile_cc_with_clang_and_save_bitcode(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
fatal_proc_rec("compilation failed!".to_string(), &proc_res);
}
proc_res = extract_function_from_bitcode(config, props, "test", testfile, "clang");
if !proc_res.status.success() {
fatal_ProcRes("extracting 'test' function failed".to_string(),
fatal_proc_rec("extracting 'test' function failed".to_string(),
&proc_res);
}
proc_res = disassemble_extract(config, props, testfile, "clang");
if !proc_res.status.success() {
fatal_ProcRes("disassembling extract failed".to_string(), &proc_res);
fatal_proc_rec("disassembling extract failed".to_string(), &proc_res);
}
let base = output_base_name(config, testfile);

View File

@ -476,6 +476,7 @@ extern crate libc;
#[cfg(target_os = "win32", target_arch = "x86")]
#[link(name = "kernel32")]
#[allow(non_snake_case_functions)]
extern "stdcall" {
fn SetEnvironmentVariableA(n: *u8, v: *u8) -> libc::c_int;
}

View File

@ -296,20 +296,20 @@ mod test_map {
// given a new key, initialize it with this new count,
// given an existing key, add more to its count
fn addMoreToCount(_k: uint, v0: uint, v1: uint) -> uint {
fn add_more_to_count(_k: uint, v0: uint, v1: uint) -> uint {
v0 + v1
}
fn addMoreToCount_simple(v0: uint, v1: uint) -> uint {
fn add_more_to_count_simple(v0: uint, v1: uint) -> uint {
v0 + v1
}
// count integers
map.update(3, 1, addMoreToCount_simple);
map.update_with_key(9, 1, addMoreToCount);
map.update(3, 7, addMoreToCount_simple);
map.update_with_key(5, 3, addMoreToCount);
map.update_with_key(3, 2, addMoreToCount);
map.update(3, 1, add_more_to_count_simple);
map.update_with_key(9, 1, add_more_to_count);
map.update(3, 7, add_more_to_count_simple);
map.update_with_key(5, 3, add_more_to_count);
map.update_with_key(3, 2, add_more_to_count);
// check the total counts
assert_eq!(map.find(&3).unwrap(), &10);

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -23,6 +23,7 @@
//! however the converse is not always true due to the above range limits
//! and, as such, should be performed via the `from_u32` function..
#![allow(non_snake_case_functions)]
use mem::transmute;
use option::{None, Option, Some};

View File

@ -10,7 +10,7 @@
// NOTE: The following code was generated by "src/etc/unicode.py", do not edit directly
#![allow(missing_doc, non_uppercase_statics)]
#![allow(missing_doc, non_uppercase_statics, non_snake_case_functions)]
fn bsearch_range_table(c: char, r: &'static [(char,char)]) -> bool {

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -42,11 +42,12 @@ pub fn align(size: uint, align: uint) -> uint {
pub struct MovePtrAdaptor<V> {
inner: V
}
pub fn MovePtrAdaptor<V:TyVisitor + MovePtr>(v: V) -> MovePtrAdaptor<V> {
MovePtrAdaptor { inner: v }
}
impl<V:TyVisitor + MovePtr> MovePtrAdaptor<V> {
pub fn new(v: V) -> MovePtrAdaptor<V> {
MovePtrAdaptor { inner: v }
}
#[inline]
pub fn bump(&mut self, sz: uint) {
self.inner.move_ptr(|p| ((p as uint) + sz) as *u8)

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -99,17 +99,6 @@ pub struct ReprVisitor<'a> {
last_err: Option<io::IoError>,
}
pub fn ReprVisitor<'a>(ptr: *u8,
writer: &'a mut io::Writer) -> ReprVisitor<'a> {
ReprVisitor {
ptr: ptr,
ptr_stk: vec!(),
var_stk: vec!(),
writer: writer,
last_err: None,
}
}
impl<'a> MovePtr for ReprVisitor<'a> {
#[inline]
fn move_ptr(&mut self, adjustment: |*u8| -> *u8) {
@ -125,6 +114,15 @@ impl<'a> MovePtr for ReprVisitor<'a> {
impl<'a> ReprVisitor<'a> {
// Various helpers for the TyVisitor impl
pub fn new(ptr: *u8, writer: &'a mut io::Writer) -> ReprVisitor<'a> {
ReprVisitor {
ptr: ptr,
ptr_stk: vec!(),
var_stk: vec!(),
writer: writer,
last_err: None,
}
}
#[inline]
pub fn get<T>(&mut self, f: |&mut ReprVisitor, &T| -> bool) -> bool {
@ -141,16 +139,8 @@ impl<'a> ReprVisitor<'a> {
#[inline]
pub fn visit_ptr_inner(&mut self, ptr: *u8, inner: *TyDesc) -> bool {
unsafe {
// This should call the constructor up above, but due to limiting
// issues we have to recreate it here.
let u = ReprVisitor {
ptr: ptr,
ptr_stk: vec!(),
var_stk: vec!(),
writer: mem::transmute_copy(&self.writer),
last_err: None,
};
let mut v = reflect::MovePtrAdaptor(u);
let u = ReprVisitor::new(ptr, mem::transmute_copy(&self.writer));
let mut v = reflect::MovePtrAdaptor::new(u);
// Obviously this should not be a thing, but blame #8401 for now
visit_tydesc(inner, &mut v as &mut TyVisitor);
match v.unwrap().last_err {
@ -584,8 +574,8 @@ pub fn write_repr<T>(writer: &mut io::Writer, object: &T) -> io::IoResult<()> {
unsafe {
let ptr = object as *T as *u8;
let tydesc = get_tydesc::<T>();
let u = ReprVisitor(ptr, writer);
let mut v = reflect::MovePtrAdaptor(u);
let u = ReprVisitor::new(ptr, writer);
let mut v = reflect::MovePtrAdaptor::new(u);
visit_tydesc(tydesc, &mut v as &mut TyVisitor);
match v.unwrap().last_err {
Some(e) => Err(e),

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -71,6 +71,7 @@
*/
#![allow(non_camel_case_types)]
#![allow(non_snake_case_functions)]
#![allow(non_uppercase_statics)]
#![allow(missing_doc)]
#![allow(uppercase_variables)]

View File

@ -1,4 +1,4 @@
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -21,6 +21,8 @@
//! play. The only dependencies of these modules are the normal system libraries
//! that you would find on the respective platform.
#![allow(non_snake_case_functions)]
use libc::c_int;
use libc;
use std::c_str::CString;

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -758,6 +758,7 @@ fn free_handle(_handle: *()) {
#[cfg(unix)]
fn translate_status(status: c_int) -> p::ProcessExit {
#![allow(non_snake_case_functions)]
#[cfg(target_os = "linux")]
#[cfg(target_os = "android")]
mod imp {

View File

@ -7,6 +7,7 @@
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_snake_case_functions)]
use std::rand::{Rng, task_rng};
use std::str;

View File

@ -69,7 +69,7 @@ pub fn llvm_err(sess: &Session, msg: String) -> ! {
}
}
pub fn WriteOutputFile(
pub fn write_output_file(
sess: &Session,
target: lib::llvm::TargetMachineRef,
pm: lib::llvm::PassManagerRef,
@ -90,7 +90,7 @@ pub fn WriteOutputFile(
pub mod write {
use back::lto;
use back::link::{WriteOutputFile, OutputType};
use back::link::{write_output_file, OutputType};
use back::link::{OutputTypeAssembly, OutputTypeBitcode};
use back::link::{OutputTypeExe, OutputTypeLlvmAssembly};
use back::link::{OutputTypeObject};
@ -310,7 +310,7 @@ pub mod write {
output.temp_path(OutputTypeAssembly)
};
with_codegen(tm, llmod, trans.no_builtins, |cpm| {
WriteOutputFile(sess, tm, cpm, llmod, &path,
write_output_file(sess, tm, cpm, llmod, &path,
lib::llvm::AssemblyFile);
});
}
@ -328,7 +328,7 @@ pub mod write {
match object_file {
Some(ref path) => {
with_codegen(tm, llmod, trans.no_builtins, |cpm| {
WriteOutputFile(sess, tm, cpm, llmod, path,
write_output_file(sess, tm, cpm, llmod, path,
lib::llvm::ObjectFile);
});
}
@ -339,7 +339,7 @@ pub mod write {
trans.no_builtins, |cpm| {
let out = output.temp_path(OutputTypeObject)
.with_extension("metadata.o");
WriteOutputFile(sess, tm, cpm,
write_output_file(sess, tm, cpm,
trans.metadata_module, &out,
lib::llvm::ObjectFile);
})

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -10,6 +10,7 @@
#![allow(non_uppercase_pattern_statics)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case_functions)]
#![allow(dead_code)]
use std::c_str::ToCStr;

View File

@ -19,7 +19,8 @@ use middle::lang_items;
use middle::ty;
use middle::typeck;
use reader = serialize::ebml::reader;
use serialize::ebml;
use serialize::ebml::reader;
use std::rc::Rc;
use syntax::ast;
use syntax::ast_map;
@ -206,7 +207,7 @@ pub fn get_field_type(tcx: &ty::ctxt, class_id: ast::DefId,
def: ast::DefId) -> ty::ty_param_bounds_and_ty {
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(class_id.krate);
let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items);
let all_items = reader::get_doc(ebml::Doc::new(cdata.data()), tag_items);
let class_doc = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(class_id.node, all_items),
|| {

View File

@ -99,7 +99,7 @@ fn find_item<'a>(item_id: ast::NodeId, items: ebml::Doc<'a>) -> ebml::Doc<'a> {
// Looks up an item in the given metadata and returns an ebml doc pointing
// to the item data.
fn lookup_item<'a>(item_id: ast::NodeId, data: &'a [u8]) -> ebml::Doc<'a> {
let items = reader::get_doc(reader::Doc(data), tag_items);
let items = reader::get_doc(ebml::Doc::new(data), tag_items);
find_item(item_id, items)
}
@ -383,7 +383,7 @@ pub fn get_trait_def(cdata: Cmd,
tag_items_data_item_ty_param_bounds);
let rp_defs = item_region_param_defs(item_doc, cdata);
let sized = item_sized(item_doc);
let mut bounds = ty::EmptyBuiltinBounds();
let mut bounds = ty::empty_builtin_bounds();
// Collect the builtin bounds from the encoded supertraits.
// FIXME(#8559): They should be encoded directly.
reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
@ -443,7 +443,7 @@ pub fn get_impl_vtables(cdata: Cmd,
{
let item_doc = lookup_item(id, cdata.data());
let vtables_doc = reader::get_doc(item_doc, tag_item_impl_vtables);
let mut decoder = reader::Decoder(vtables_doc);
let mut decoder = reader::Decoder::new(vtables_doc);
typeck::impl_res {
trait_vtables: decoder.read_vtable_res(tcx, cdata),
@ -466,7 +466,7 @@ pub enum DefLike {
/// Iterates over the language items in the given crate.
pub fn each_lang_item(cdata: Cmd, f: |ast::NodeId, uint| -> bool) -> bool {
let root = reader::Doc(cdata.data());
let root = ebml::Doc::new(cdata.data());
let lang_items = reader::get_doc(root, tag_lang_items);
reader::tagged_docs(lang_items, tag_lang_items_item, |item_doc| {
let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id);
@ -506,7 +506,7 @@ fn each_child_of_item_or_crate(intr: Rc<IdentInterner>,
None => cdata
};
let other_crates_items = reader::get_doc(reader::Doc(crate_data.data()), tag_items);
let other_crates_items = reader::get_doc(ebml::Doc::new(crate_data.data()), tag_items);
// Get the item.
match maybe_find_item(child_def_id.node, other_crates_items) {
@ -534,7 +534,7 @@ fn each_child_of_item_or_crate(intr: Rc<IdentInterner>,
|inherent_impl_def_id_doc| {
let inherent_impl_def_id = item_def_id(inherent_impl_def_id_doc,
cdata);
let items = reader::get_doc(reader::Doc(cdata.data()), tag_items);
let items = reader::get_doc(ebml::Doc::new(cdata.data()), tag_items);
match maybe_find_item(inherent_impl_def_id.node, items) {
None => {}
Some(inherent_impl_doc) => {
@ -599,7 +599,7 @@ fn each_child_of_item_or_crate(intr: Rc<IdentInterner>,
None => cdata
};
let other_crates_items = reader::get_doc(reader::Doc(crate_data.data()), tag_items);
let other_crates_items = reader::get_doc(ebml::Doc::new(crate_data.data()), tag_items);
// Get the item.
match maybe_find_item(child_def_id.node, other_crates_items) {
@ -626,7 +626,7 @@ pub fn each_child_of_item(intr: Rc<IdentInterner>,
get_crate_data: GetCrateDataCb,
callback: |DefLike, ast::Ident, ast::Visibility|) {
// Find the item.
let root_doc = reader::Doc(cdata.data());
let root_doc = ebml::Doc::new(cdata.data());
let items = reader::get_doc(root_doc, tag_items);
let item_doc = match maybe_find_item(id, items) {
None => return,
@ -647,7 +647,7 @@ pub fn each_top_level_item_of_crate(intr: Rc<IdentInterner>,
callback: |DefLike,
ast::Ident,
ast::Visibility|) {
let root_doc = reader::Doc(cdata.data());
let root_doc = ebml::Doc::new(cdata.data());
let misc_info_doc = reader::get_doc(root_doc, tag_misc_info);
let crate_items_doc = reader::get_doc(misc_info_doc,
tag_misc_info_crate_items);
@ -696,7 +696,7 @@ pub fn maybe_get_item_ast(cdata: Cmd, tcx: &ty::ctxt, id: ast::NodeId,
pub fn get_enum_variants(intr: Rc<IdentInterner>, cdata: Cmd, id: ast::NodeId,
tcx: &ty::ctxt) -> Vec<Rc<ty::VariantInfo>> {
let data = cdata.data();
let items = reader::get_doc(reader::Doc(data), tag_items);
let items = reader::get_doc(ebml::Doc::new(data), tag_items);
let item = find_item(id, items);
let mut disr_val = 0;
enum_variant_ids(item, cdata).iter().map(|did| {
@ -829,7 +829,7 @@ pub fn get_item_variances(cdata: Cmd, id: ast::NodeId) -> ty::ItemVariances {
let data = cdata.data();
let item_doc = lookup_item(id, data);
let variance_doc = reader::get_doc(item_doc, tag_item_variances);
let mut decoder = reader::Decoder(variance_doc);
let mut decoder = reader::Decoder::new(variance_doc);
Decodable::decode(&mut decoder).unwrap()
}
@ -1078,7 +1078,7 @@ fn list_crate_attributes(md: ebml::Doc, hash: &Svh,
}
pub fn get_crate_attributes(data: &[u8]) -> Vec<ast::Attribute> {
get_attributes(reader::Doc(data))
get_attributes(ebml::Doc::new(data))
}
#[deriving(Clone)]
@ -1090,7 +1090,7 @@ pub struct CrateDep {
pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
let mut deps: Vec<CrateDep> = Vec::new();
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1;
fn docstr(doc: ebml::Doc, tag_: uint) -> String {
@ -1123,40 +1123,40 @@ fn list_crate_deps(data: &[u8], out: &mut io::Writer) -> io::IoResult<()> {
}
pub fn maybe_get_crate_hash(data: &[u8]) -> Option<Svh> {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
reader::maybe_get_doc(cratedoc, tag_crate_hash).map(|doc| {
Svh::new(doc.as_str_slice())
})
}
pub fn get_crate_hash(data: &[u8]) -> Svh {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
Svh::new(hashdoc.as_str_slice())
}
pub fn maybe_get_crate_id(data: &[u8]) -> Option<CrateId> {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
reader::maybe_get_doc(cratedoc, tag_crate_crateid).map(|doc| {
from_str(doc.as_str_slice()).unwrap()
})
}
pub fn get_crate_triple(data: &[u8]) -> String {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple);
triple_doc.expect("No triple in crate").as_str().to_string()
}
pub fn get_crate_id(data: &[u8]) -> CrateId {
let cratedoc = reader::Doc(data);
let cratedoc = ebml::Doc::new(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_crateid);
from_str(hashdoc.as_str_slice()).unwrap()
}
pub fn list_crate_metadata(bytes: &[u8], out: &mut io::Writer) -> io::IoResult<()> {
let hash = get_crate_hash(bytes);
let md = reader::Doc(bytes);
let md = ebml::Doc::new(bytes);
try!(list_crate_attributes(md, &hash, out));
list_crate_deps(bytes, out)
}
@ -1183,7 +1183,7 @@ pub fn translate_def_id(cdata: Cmd, did: ast::DefId) -> ast::DefId {
}
pub fn each_impl(cdata: Cmd, callback: |ast::DefId|) {
let impls_doc = reader::get_doc(reader::Doc(cdata.data()), tag_impls);
let impls_doc = reader::get_doc(ebml::Doc::new(cdata.data()), tag_impls);
let _ = reader::tagged_docs(impls_doc, tag_impls_impl, |impl_doc| {
callback(item_def_id(impl_doc, cdata));
true
@ -1239,7 +1239,7 @@ pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt)
pub fn get_native_libraries(cdata: Cmd)
-> Vec<(cstore::NativeLibaryKind, String)> {
let libraries = reader::get_doc(reader::Doc(cdata.data()),
let libraries = reader::get_doc(ebml::Doc::new(cdata.data()),
tag_native_libraries);
let mut result = Vec::new();
reader::tagged_docs(libraries, tag_native_libraries_lib, |lib_doc| {
@ -1255,12 +1255,12 @@ pub fn get_native_libraries(cdata: Cmd)
}
pub fn get_macro_registrar_fn(data: &[u8]) -> Option<ast::NodeId> {
reader::maybe_get_doc(reader::Doc(data), tag_macro_registrar_fn)
reader::maybe_get_doc(ebml::Doc::new(data), tag_macro_registrar_fn)
.map(|doc| FromPrimitive::from_u32(reader::doc_as_u32(doc)).unwrap())
}
pub fn get_exported_macros(data: &[u8]) -> Vec<String> {
let macros = reader::get_doc(reader::Doc(data),
let macros = reader::get_doc(ebml::Doc::new(data),
tag_exported_macros);
let mut result = Vec::new();
reader::tagged_docs(macros, tag_macro_def, |macro_doc| {
@ -1273,7 +1273,7 @@ pub fn get_exported_macros(data: &[u8]) -> Vec<String> {
pub fn get_dylib_dependency_formats(cdata: Cmd)
-> Vec<(ast::CrateNum, cstore::LinkagePreference)>
{
let formats = reader::get_doc(reader::Doc(cdata.data()),
let formats = reader::get_doc(ebml::Doc::new(cdata.data()),
tag_dylib_dependency_formats);
let mut result = Vec::new();
@ -1299,7 +1299,7 @@ pub fn get_dylib_dependency_formats(cdata: Cmd)
pub fn get_missing_lang_items(cdata: Cmd)
-> Vec<lang_items::LangItem>
{
let items = reader::get_doc(reader::Doc(cdata.data()), tag_lang_items);
let items = reader::get_doc(ebml::Doc::new(cdata.data()), tag_lang_items);
let mut result = Vec::new();
reader::tagged_docs(items, tag_lang_items_missing, |missing_doc| {
let item: lang_items::LangItem =

View File

@ -1813,7 +1813,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
type_abbrevs: RefCell::new(HashMap::new()),
};
let mut ebml_w = writer::Encoder(wr);
let mut ebml_w = writer::Encoder::new(wr);
encode_crate_id(&mut ebml_w, &ecx.link_meta.crateid);
encode_crate_triple(&mut ebml_w,

View File

@ -556,7 +556,7 @@ fn parse_type_param_def(st: &mut PState, conv: conv_did) -> ty::TypeParameterDef
fn parse_bounds(st: &mut PState, conv: conv_did) -> ty::ParamBounds {
let mut param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: Vec::new()
};
loop {

View File

@ -120,7 +120,7 @@ pub fn decode_inlined_item(cdata: &cstore::crate_metadata,
path_as_str = Some(s);
path_as_str.as_ref().map(|x| x.as_slice())
});
let mut ast_dsr = reader::Decoder(ast_doc);
let mut ast_dsr = reader::Decoder::new(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
let to_id_range = reserve_id_range(&dcx.tcx.sess, from_id_range);
let xcx = &ExtendedDecodeContext {
@ -349,7 +349,7 @@ fn simplify_ast(ii: e::InlinedItemRef) -> ast::InlinedItem {
fn decode_ast(par_doc: ebml::Doc) -> ast::InlinedItem {
let chi_doc = par_doc.get(c::tag_tree as uint);
let mut d = reader::Decoder(chi_doc);
let mut d = reader::Decoder::new(chi_doc);
Decodable::decode(&mut d).unwrap()
}
@ -395,7 +395,7 @@ fn renumber_and_map_ast(xcx: &ExtendedDecodeContext,
// Encoding and decoding of ast::def
fn decode_def(xcx: &ExtendedDecodeContext, doc: ebml::Doc) -> ast::Def {
let mut dsr = reader::Decoder(doc);
let mut dsr = reader::Decoder::new(doc);
let def: ast::Def = Decodable::decode(&mut dsr).unwrap();
def.tr(xcx)
}
@ -1317,7 +1317,7 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext,
}
Some(value) => {
let val_doc = entry_doc.get(c::tag_table_val as uint);
let mut val_dsr = reader::Decoder(val_doc);
let mut val_dsr = reader::Decoder::new(val_doc);
let val_dsr = &mut val_dsr;
match value {
@ -1402,7 +1402,7 @@ fn encode_item_ast(ebml_w: &mut Encoder, item: @ast::Item) {
#[cfg(test)]
fn decode_item_ast(par_doc: ebml::Doc) -> @ast::Item {
let chi_doc = par_doc.get(c::tag_tree as uint);
let mut d = reader::Decoder(chi_doc);
let mut d = reader::Decoder::new(chi_doc);
@Decodable::decode(&mut d).unwrap()
}
@ -1444,10 +1444,10 @@ fn roundtrip(in_item: Option<@ast::Item>) {
let in_item = in_item.unwrap();
let mut wr = MemWriter::new();
{
let mut ebml_w = writer::Encoder(&mut wr);
let mut ebml_w = writer::Encoder::new(&mut wr);
encode_item_ast(&mut ebml_w, in_item);
}
let ebml_doc = reader::Doc(wr.get_ref());
let ebml_doc = ebml::Doc::new(wr.get_ref());
let out_item = decode_item_ast(ebml_doc);
assert!(in_item == out_item);

View File

@ -371,7 +371,7 @@ pub fn check_builtin_bounds(cx: &Context,
bounds: ty::BuiltinBounds,
any_missing: |ty::BuiltinBounds|) {
let kind = ty::type_contents(cx.tcx, ty);
let mut missing = ty::EmptyBuiltinBounds();
let mut missing = ty::empty_builtin_bounds();
for bound in bounds.iter() {
if !kind.meets_bound(cx.tcx, bound) {
missing.add(bound);
@ -565,6 +565,7 @@ pub fn check_cast_for_escaping_regions(
}
});
#[allow(non_snake_case_functions)]
fn is_ReScope(r: ty::Region) -> bool {
match r {
ty::ReScope(..) => true,

View File

@ -83,6 +83,7 @@ pub enum Lint {
NonCamelCaseTypes,
NonUppercaseStatics,
NonUppercasePatternStatics,
NonSnakeCaseFunctions,
UppercaseVariables,
UnnecessaryParens,
TypeLimits,
@ -220,6 +221,13 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
default: Warn
}),
("non_snake_case_functions",
LintSpec {
lint: NonSnakeCaseFunctions,
desc: "methods and functions should have snake case names",
default: Warn
}),
("uppercase_variables",
LintSpec {
lint: UppercaseVariables,
@ -1342,6 +1350,30 @@ fn check_item_non_camel_case_types(cx: &Context, it: &ast::Item) {
}
}
fn check_snake_case(cx: &Context, sort: &str, ident: ast::Ident, span: Span) {
fn is_snake_case(ident: ast::Ident) -> bool {
let ident = token::get_ident(ident);
assert!(!ident.get().is_empty());
let ident = ident.get().trim_chars('_');
let mut allow_underscore = true;
ident.chars().all(|c| {
allow_underscore = match c {
c if c.is_lowercase() || c.is_digit() => true,
'_' if allow_underscore => false,
_ => return false,
};
true
})
}
if !is_snake_case(ident) {
cx.span_lint(NonSnakeCaseFunctions, span,
format!("{} `{}` should have a snake case identifier",
sort, token::get_ident(ident)).as_slice());
}
}
fn check_item_non_uppercase_statics(cx: &Context, it: &ast::Item) {
match it.node {
// only check static constants
@ -1618,7 +1650,27 @@ fn check_missing_doc_item(cx: &Context, it: &ast::Item) {
desc);
}
#[deriving(Eq)]
enum MethodContext {
TraitDefaultImpl,
TraitImpl,
PlainImpl
}
fn check_missing_doc_method(cx: &Context, m: &ast::Method) {
// If the method is an impl for a trait, don't doc.
if method_context(cx, m) == TraitImpl { return; }
// Otherwise, doc according to privacy. This will also check
// doc for default methods defined on traits.
check_missing_doc_attrs(cx,
Some(m.id),
m.attrs.as_slice(),
m.span,
"a method");
}
fn method_context(cx: &Context, m: &ast::Method) -> MethodContext {
let did = ast::DefId {
krate: ast::LOCAL_CRATE,
node: m.id
@ -1628,25 +1680,16 @@ fn check_missing_doc_method(cx: &Context, m: &ast::Method) {
None => cx.tcx.sess.span_bug(m.span, "missing method descriptor?!"),
Some(md) => {
match md.container {
// Always check default methods defined on traits.
ty::TraitContainer(..) => {}
// For methods defined on impls, it depends on whether
// it is an implementation for a trait or is a plain
// impl.
ty::TraitContainer(..) => TraitDefaultImpl,
ty::ImplContainer(cid) => {
match ty::impl_trait_ref(cx.tcx, cid) {
Some(..) => return, // impl for trait: don't doc
None => {} // plain impl: doc according to privacy
Some(..) => TraitImpl,
None => PlainImpl
}
}
}
}
}
check_missing_doc_attrs(cx,
Some(m.id),
m.attrs.as_slice(),
m.span,
"a method");
}
fn check_missing_doc_ty_method(cx: &Context, tm: &ast::TypeMethod) {
@ -1889,26 +1932,36 @@ impl<'a> Visitor<()> for Context<'a> {
}
match *fk {
visit::FkMethod(_, _, m) => {
visit::FkMethod(ident, _, m) => {
self.with_lint_attrs(m.attrs.as_slice(), |cx| {
check_missing_doc_method(cx, m);
check_attrs_usage(cx, m.attrs.as_slice());
match method_context(cx, m) {
PlainImpl => check_snake_case(cx, "method", ident, span),
TraitDefaultImpl => check_snake_case(cx, "trait method", ident, span),
_ => (),
}
cx.visit_ids(|v| {
v.visit_fn(fk, decl, body, span, id, ());
});
recurse(cx);
})
},
visit::FkItemFn(ident, _, _, _) => {
check_snake_case(self, "function", ident, span);
recurse(self);
}
_ => recurse(self),
}
}
fn visit_ty_method(&mut self, t: &ast::TypeMethod, _: ()) {
self.with_lint_attrs(t.attrs.as_slice(), |cx| {
check_missing_doc_ty_method(cx, t);
check_attrs_usage(cx, t.attrs.as_slice());
check_snake_case(cx, "trait method", t.ident, t.span);
visit::walk_ty_method(cx, t, ());
})

View File

@ -177,7 +177,7 @@ impl<'a> Visitor<()> for IrMaps<'a> {
pub fn check_crate(tcx: &ty::ctxt,
krate: &Crate) {
visit::walk_crate(&mut IrMaps(tcx), krate, ());
visit::walk_crate(&mut IrMaps::new(tcx), krate, ());
tcx.sess.abort_if_errors();
}
@ -260,21 +260,20 @@ struct IrMaps<'a> {
lnks: Vec<LiveNodeKind>,
}
fn IrMaps<'a>(tcx: &'a ty::ctxt)
-> IrMaps<'a> {
IrMaps {
tcx: tcx,
num_live_nodes: 0,
num_vars: 0,
live_node_map: NodeMap::new(),
variable_map: NodeMap::new(),
capture_info_map: NodeMap::new(),
var_kinds: Vec::new(),
lnks: Vec::new(),
}
}
impl<'a> IrMaps<'a> {
fn new(tcx: &'a ty::ctxt) -> IrMaps<'a> {
IrMaps {
tcx: tcx,
num_live_nodes: 0,
num_vars: 0,
live_node_map: NodeMap::new(),
variable_map: NodeMap::new(),
capture_info_map: NodeMap::new(),
var_kinds: Vec::new(),
lnks: Vec::new(),
}
}
fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
let ln = LiveNode(self.num_live_nodes);
self.lnks.push(lnk);
@ -365,7 +364,7 @@ fn visit_fn(ir: &mut IrMaps,
let _i = ::util::common::indenter();
// swap in a new set of IR maps for this function body:
let mut fn_maps = IrMaps(ir.tcx);
let mut fn_maps = IrMaps::new(ir.tcx);
unsafe {
debug!("creating fn_maps: {}", transmute::<&IrMaps, *IrMaps>(&fn_maps));
@ -396,7 +395,7 @@ fn visit_fn(ir: &mut IrMaps,
};
// compute liveness
let mut lsets = Liveness(&mut fn_maps, specials);
let mut lsets = Liveness::new(&mut fn_maps, specials);
let entry_ln = lsets.compute(decl, body);
// check for various error conditions
@ -584,19 +583,19 @@ struct Liveness<'a> {
cont_ln: NodeMap<LiveNode>
}
fn Liveness<'a>(ir: &'a mut IrMaps<'a>, specials: Specials) -> Liveness<'a> {
Liveness {
ir: ir,
s: specials,
successors: Vec::from_elem(ir.num_live_nodes, invalid_node()),
users: Vec::from_elem(ir.num_live_nodes * ir.num_vars, invalid_users()),
loop_scope: Vec::new(),
break_ln: NodeMap::new(),
cont_ln: NodeMap::new(),
}
}
impl<'a> Liveness<'a> {
fn new(ir: &'a mut IrMaps<'a>, specials: Specials) -> Liveness<'a> {
Liveness {
ir: ir,
s: specials,
successors: Vec::from_elem(ir.num_live_nodes, invalid_node()),
users: Vec::from_elem(ir.num_live_nodes * ir.num_vars, invalid_users()),
loop_scope: Vec::new(),
break_ln: NodeMap::new(),
cont_ln: NodeMap::new(),
}
}
fn live_node(&self, node_id: NodeId, span: Span) -> LiveNode {
match self.ir.live_node_map.find(&node_id) {
Some(&ln) => ln,

View File

@ -549,6 +549,13 @@ enum TraitReferenceType {
}
impl NameBindings {
fn new() -> NameBindings {
NameBindings {
type_def: RefCell::new(None),
value_def: RefCell::new(None),
}
}
/// Creates a new module in this set of name bindings.
fn define_module(&self,
parent_link: ParentLink,
@ -749,49 +756,42 @@ impl NameBindings {
}
}
fn NameBindings() -> NameBindings {
NameBindings {
type_def: RefCell::new(None),
value_def: RefCell::new(None),
}
}
/// Interns the names of the primitive types.
struct PrimitiveTypeTable {
primitive_types: HashMap<Name, PrimTy>,
}
impl PrimitiveTypeTable {
fn new() -> PrimitiveTypeTable {
let mut table = PrimitiveTypeTable {
primitive_types: HashMap::new()
};
table.intern("bool", TyBool);
table.intern("char", TyChar);
table.intern("f32", TyFloat(TyF32));
table.intern("f64", TyFloat(TyF64));
table.intern("f128", TyFloat(TyF128));
table.intern("int", TyInt(TyI));
table.intern("i8", TyInt(TyI8));
table.intern("i16", TyInt(TyI16));
table.intern("i32", TyInt(TyI32));
table.intern("i64", TyInt(TyI64));
table.intern("str", TyStr);
table.intern("uint", TyUint(TyU));
table.intern("u8", TyUint(TyU8));
table.intern("u16", TyUint(TyU16));
table.intern("u32", TyUint(TyU32));
table.intern("u64", TyUint(TyU64));
table
}
fn intern(&mut self, string: &str, primitive_type: PrimTy) {
self.primitive_types.insert(token::intern(string), primitive_type);
}
}
fn PrimitiveTypeTable() -> PrimitiveTypeTable {
let mut table = PrimitiveTypeTable {
primitive_types: HashMap::new()
};
table.intern("bool", TyBool);
table.intern("char", TyChar);
table.intern("f32", TyFloat(TyF32));
table.intern("f64", TyFloat(TyF64));
table.intern("f128", TyFloat(TyF128));
table.intern("int", TyInt(TyI));
table.intern("i8", TyInt(TyI8));
table.intern("i16", TyInt(TyI16));
table.intern("i32", TyInt(TyI32));
table.intern("i64", TyInt(TyI64));
table.intern("str", TyStr);
table.intern("uint", TyUint(TyU));
table.intern("u8", TyUint(TyU8));
table.intern("u16", TyUint(TyU16));
table.intern("u32", TyUint(TyU32));
table.intern("u64", TyUint(TyU64));
return table;
}
fn namespace_error_to_str(ns: NamespaceError) -> &'static str {
match ns {
@ -802,62 +802,6 @@ fn namespace_error_to_str(ns: NamespaceError) -> &'static str {
}
}
fn Resolver<'a>(session: &'a Session,
lang_items: &'a LanguageItems,
crate_span: Span) -> Resolver<'a> {
let graph_root = NameBindings();
graph_root.define_module(NoParentLink,
Some(DefId { krate: 0, node: 0 }),
NormalModuleKind,
false,
true,
crate_span);
let current_module = graph_root.get_module();
let this = Resolver {
session: session,
lang_items: lang_items,
// The outermost module has def ID 0; this is not reflected in the
// AST.
graph_root: graph_root,
method_map: RefCell::new(FnvHashMap::new()),
structs: FnvHashMap::new(),
unresolved_imports: 0,
current_module: current_module,
value_ribs: RefCell::new(Vec::new()),
type_ribs: RefCell::new(Vec::new()),
label_ribs: RefCell::new(Vec::new()),
current_trait_ref: None,
current_self_type: None,
self_ident: special_idents::self_,
type_self_ident: special_idents::type_self,
primitive_type_table: PrimitiveTypeTable(),
namespaces: vec!(TypeNS, ValueNS),
def_map: RefCell::new(NodeMap::new()),
export_map2: RefCell::new(NodeMap::new()),
trait_map: NodeMap::new(),
used_imports: HashSet::new(),
external_exports: DefIdSet::new(),
last_private: NodeMap::new(),
emit_errors: true,
};
this
}
/// The main resolver class.
struct Resolver<'a> {
session: &'a Session,
@ -957,6 +901,57 @@ impl<'a, 'b> Visitor<()> for UnusedImportCheckVisitor<'a, 'b> {
}
impl<'a> Resolver<'a> {
fn new(session: &'a Session, lang_items: &'a LanguageItems, crate_span: Span) -> Resolver<'a> {
let graph_root = NameBindings::new();
graph_root.define_module(NoParentLink,
Some(DefId { krate: 0, node: 0 }),
NormalModuleKind,
false,
true,
crate_span);
let current_module = graph_root.get_module();
Resolver {
session: session,
lang_items: lang_items,
// The outermost module has def ID 0; this is not reflected in the
// AST.
graph_root: graph_root,
method_map: RefCell::new(FnvHashMap::new()),
structs: FnvHashMap::new(),
unresolved_imports: 0,
current_module: current_module,
value_ribs: RefCell::new(Vec::new()),
type_ribs: RefCell::new(Vec::new()),
label_ribs: RefCell::new(Vec::new()),
current_trait_ref: None,
current_self_type: None,
self_ident: special_idents::self_,
type_self_ident: special_idents::type_self,
primitive_type_table: PrimitiveTypeTable::new(),
namespaces: vec!(TypeNS, ValueNS),
def_map: RefCell::new(NodeMap::new()),
export_map2: RefCell::new(NodeMap::new()),
trait_map: NodeMap::new(),
used_imports: HashSet::new(),
external_exports: DefIdSet::new(),
last_private: NodeMap::new(),
emit_errors: true,
}
}
/// The main name resolution procedure.
fn resolve(&mut self, krate: &ast::Crate) {
self.build_reduced_graph(krate);
@ -1017,7 +1012,7 @@ impl<'a> Resolver<'a> {
let child = module_.children.borrow().find_copy(&name.name);
match child {
None => {
let child = Rc::new(NameBindings());
let child = Rc::new(NameBindings::new());
module_.children.borrow_mut().insert(name.name, child.clone());
child
}
@ -5574,7 +5569,7 @@ pub fn resolve_crate(session: &Session,
lang_items: &LanguageItems,
krate: &Crate)
-> CrateMap {
let mut resolver = Resolver(session, lang_items, krate.span);
let mut resolver = Resolver::new(session, lang_items, krate.span);
resolver.resolve(krate);
let Resolver { def_map, export_map2, trait_map, last_private,
external_exports, .. } = resolver;

View File

@ -997,7 +997,7 @@ fn match_datum(bcx: &Block,
*/
let ty = node_id_type(bcx, pat_id);
Datum(val, ty, Lvalue)
Datum::new(val, ty, Lvalue)
}
@ -1297,7 +1297,7 @@ fn store_non_ref_bindings<'a>(
match binding_info.trmode {
TrByValue(lldest) => {
let llval = Load(bcx, binding_info.llmatch); // get a T*
let datum = Datum(llval, binding_info.ty, Lvalue);
let datum = Datum::new(llval, binding_info.ty, Lvalue);
bcx = datum.store_to(bcx, lldest);
match opt_cleanup_scope {
@ -1334,7 +1334,7 @@ fn insert_lllocals<'a>(bcx: &'a Block<'a>,
TrByRef => binding_info.llmatch
};
let datum = Datum(llval, binding_info.ty, Lvalue);
let datum = Datum::new(llval, binding_info.ty, Lvalue);
fcx.schedule_drop_mem(cleanup_scope, llval, binding_info.ty);
debug!("binding {:?} to {}",
@ -2081,7 +2081,7 @@ pub fn store_arg<'a>(mut bcx: &'a Block<'a>,
// we emit extra-debug-info, which requires local allocas :(.
let arg_val = arg.add_clean(bcx.fcx, arg_scope);
bcx.fcx.llargs.borrow_mut()
.insert(pat.id, Datum(arg_val, arg_ty, Lvalue));
.insert(pat.id, Datum::new(arg_val, arg_ty, Lvalue));
bcx
} else {
mk_binding_alloca(
@ -2122,7 +2122,7 @@ fn mk_binding_alloca<'a,A>(bcx: &'a Block<'a>,
// Now that memory is initialized and has cleanup scheduled,
// create the datum and insert into the local variable map.
let datum = Datum(llval, var_ty, Lvalue);
let datum = Datum::new(llval, var_ty, Lvalue);
let mut llmap = match binding_mode {
BindLocal => bcx.fcx.lllocals.borrow_mut(),
BindArgument => bcx.fcx.llargs.borrow_mut()
@ -2183,7 +2183,7 @@ fn bind_irrefutable_pat<'a>(
ast::BindByValue(_) => {
// By value binding: move the value that `val`
// points at into the binding's stack slot.
let d = Datum(val, ty, Lvalue);
let d = Datum::new(val, ty, Lvalue);
d.store_to(bcx, llval)
}

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -70,7 +70,7 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
.connect(",")
.as_slice());
let mut clobbers = getClobbers();
let mut clobbers = get_clobbers();
if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
clobbers = format!("{},{}", ia.clobbers.get(), clobbers);
} else {
@ -135,12 +135,12 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
#[cfg(target_arch = "arm")]
#[cfg(target_arch = "mips")]
fn getClobbers() -> String {
fn get_clobbers() -> String {
"".to_string()
}
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
fn getClobbers() -> String {
fn get_clobbers() -> String {
"~{dirflag},~{fpsr},~{flags}".to_string()
}

View File

@ -1210,7 +1210,7 @@ pub fn create_datums_for_fn_args(fcx: &FunctionContext,
let llarg = unsafe {
llvm::LLVMGetParam(fcx.llfn, fcx.arg_pos(i) as c_uint)
};
datum::Datum(llarg, arg_ty, arg_kind(fcx, arg_ty))
datum::Datum::new(llarg, arg_ty, arg_kind(fcx, arg_ty))
}).collect()
}

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -9,6 +9,7 @@
// except according to those terms.
#![allow(dead_code)] // FFI wrappers
#![allow(non_snake_case_functions)]
use lib::llvm::llvm;
use lib::llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect};

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -460,5 +460,5 @@ pub fn make_closure_from_bare_fn<'a>(bcx: &'a Block<'a>,
let wrapper = get_wrapper_for_bare_fn(bcx.ccx(), closure_ty, def, fn_ptr, true);
fill_fn_pair(bcx, scratch.val, wrapper, C_null(Type::i8p(bcx.ccx())));
DatumBlock(bcx, scratch.to_expr_datum())
DatumBlock::new(bcx, scratch.to_expr_datum())
}

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
#![allow(non_camel_case_types, non_snake_case_functions)]
//! Code that is useful in various trans modules.
@ -21,7 +21,6 @@ use middle::lang_items::LangItem;
use middle::trans::build;
use middle::trans::cleanup;
use middle::trans::datum;
use middle::trans::datum::{Datum, Lvalue};
use middle::trans::debuginfo;
use middle::trans::type_::Type;
use middle::ty;

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -71,8 +71,10 @@ pub struct Rvalue {
pub mode: RvalueMode
}
pub fn Rvalue(m: RvalueMode) -> Rvalue {
Rvalue { mode: m }
impl Rvalue {
pub fn new(m: RvalueMode) -> Rvalue {
Rvalue { mode: m }
}
}
// Make Datum linear for more type safety.
@ -89,25 +91,15 @@ pub enum RvalueMode {
ByValue,
}
pub fn Datum<K:KindOps>(val: ValueRef, ty: ty::t, kind: K) -> Datum<K> {
Datum { val: val, ty: ty, kind: kind }
}
pub fn DatumBlock<'a, K>(bcx: &'a Block<'a>,
datum: Datum<K>)
-> DatumBlock<'a, K> {
DatumBlock { bcx: bcx, datum: datum }
}
pub fn immediate_rvalue(val: ValueRef, ty: ty::t) -> Datum<Rvalue> {
return Datum(val, ty, Rvalue(ByValue));
return Datum::new(val, ty, Rvalue::new(ByValue));
}
pub fn immediate_rvalue_bcx<'a>(bcx: &'a Block<'a>,
val: ValueRef,
ty: ty::t)
-> DatumBlock<'a, Rvalue> {
return DatumBlock(bcx, immediate_rvalue(val, ty))
return DatumBlock::new(bcx, immediate_rvalue(val, ty))
}
@ -136,7 +128,7 @@ pub fn lvalue_scratch_datum<'a, A>(bcx: &'a Block<'a>,
let bcx = populate(arg, bcx, scratch);
bcx.fcx.schedule_drop_mem(scope, scratch, ty);
DatumBlock(bcx, Datum(scratch, ty, Lvalue))
DatumBlock::new(bcx, Datum::new(scratch, ty, Lvalue))
}
pub fn rvalue_scratch_datum(bcx: &Block,
@ -155,7 +147,7 @@ pub fn rvalue_scratch_datum(bcx: &Block,
let llty = type_of::type_of(bcx.ccx(), ty);
let scratch = alloca_maybe_zeroed(bcx, llty, name, false);
Datum(scratch, ty, Rvalue(ByRef))
Datum::new(scratch, ty, Rvalue::new(ByRef))
}
pub fn appropriate_rvalue_mode(ccx: &CrateContext, ty: ty::t) -> RvalueMode {
@ -320,7 +312,7 @@ impl Datum<Rvalue> {
match self.kind.mode {
ByRef => {
add_rvalue_clean(ByRef, fcx, scope, self.val, self.ty);
DatumBlock(bcx, Datum(self.val, self.ty, Lvalue))
DatumBlock::new(bcx, Datum::new(self.val, self.ty, Lvalue))
}
ByValue => {
@ -334,11 +326,11 @@ impl Datum<Rvalue> {
pub fn to_ref_datum<'a>(self, bcx: &'a Block<'a>) -> DatumBlock<'a, Rvalue> {
let mut bcx = bcx;
match self.kind.mode {
ByRef => DatumBlock(bcx, self),
ByRef => DatumBlock::new(bcx, self),
ByValue => {
let scratch = rvalue_scratch_datum(bcx, self.ty, "to_ref");
bcx = self.store_to(bcx, scratch.val);
DatumBlock(bcx, scratch)
DatumBlock::new(bcx, scratch)
}
}
}
@ -352,10 +344,10 @@ impl Datum<Rvalue> {
}
ByValue => {
match self.kind.mode {
ByValue => DatumBlock(bcx, self),
ByValue => DatumBlock::new(bcx, self),
ByRef => {
let llval = load(bcx, self.val, self.ty);
DatumBlock(bcx, Datum(llval, self.ty, Rvalue(ByValue)))
DatumBlock::new(bcx, Datum::new(llval, self.ty, Rvalue::new(ByValue)))
}
}
}
@ -378,8 +370,8 @@ impl Datum<Expr> {
-> R {
let Datum { val, ty, kind } = self;
match kind {
LvalueExpr => if_lvalue(Datum(val, ty, Lvalue)),
RvalueExpr(r) => if_rvalue(Datum(val, ty, r)),
LvalueExpr => if_lvalue(Datum::new(val, ty, Lvalue)),
RvalueExpr(r) => if_rvalue(Datum::new(val, ty, r)),
}
}
@ -455,7 +447,7 @@ impl Datum<Expr> {
expr_id: ast::NodeId)
-> DatumBlock<'a, Lvalue> {
self.match_kind(
|l| DatumBlock(bcx, l),
|l| DatumBlock::new(bcx, l),
|r| {
let scope = cleanup::temporary_scope(bcx.tcx(), expr_id);
r.to_lvalue_datum_in_scope(bcx, name, scope)
@ -478,16 +470,16 @@ impl Datum<Expr> {
ByRef => {
let scratch = rvalue_scratch_datum(bcx, l.ty, name);
bcx = l.store_to(bcx, scratch.val);
DatumBlock(bcx, scratch)
DatumBlock::new(bcx, scratch)
}
ByValue => {
let v = load(bcx, l.val, l.ty);
bcx = l.kind.post_store(bcx, l.val, l.ty);
DatumBlock(bcx, Datum(v, l.ty, Rvalue(ByValue)))
DatumBlock::new(bcx, Datum::new(v, l.ty, Rvalue::new(ByValue)))
}
}
},
|r| DatumBlock(bcx, r))
|r| DatumBlock::new(bcx, r))
}
}
@ -550,6 +542,10 @@ fn load<'a>(bcx: &'a Block<'a>, llptr: ValueRef, ty: ty::t) -> ValueRef {
* Generic methods applicable to any sort of datum.
*/
impl<K:KindOps> Datum<K> {
pub fn new(val: ValueRef, ty: ty::t, kind: K) -> Datum<K> {
Datum { val: val, ty: ty, kind: kind }
}
pub fn to_expr_datum(self) -> Datum<Expr> {
let Datum { val, ty, kind } = self;
Datum { val: val, ty: ty, kind: kind.to_expr_kind() }
@ -663,9 +659,15 @@ impl<K:KindOps> Datum<K> {
}
}
impl <'a, K> DatumBlock<'a, K> {
pub fn new(bcx: &'a Block<'a>, datum: Datum<K>) -> DatumBlock<'a, K> {
DatumBlock { bcx: bcx, datum: datum }
}
}
impl<'a, K:KindOps> DatumBlock<'a, K> {
pub fn to_expr_datumblock(self) -> DatumBlock<'a, Expr> {
DatumBlock(self.bcx, self.datum.to_expr_datum())
DatumBlock::new(self.bcx, self.datum.to_expr_datum())
}
}

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -941,6 +941,7 @@ fn is_node_local_to_unit(cx: &CrateContext, node_id: ast::NodeId) -> bool
!cx.reachable.contains(&node_id)
}
#[allow(non_snake_case_functions)]
fn create_DIArray(builder: DIBuilderRef, arr: &[DIDescriptor]) -> DIArray {
return unsafe {
llvm::LLVMDIBuilderGetOrCreateArray(builder, arr.as_ptr(), arr.len() as u32)
@ -2487,6 +2488,7 @@ fn debug_context<'a>(cx: &'a CrateContext) -> &'a CrateDebugContext {
}
#[inline]
#[allow(non_snake_case_functions)]
fn DIB(cx: &CrateContext) -> DIBuilderRef {
cx.dbg_cx.get_ref().builder
}

View File

@ -152,7 +152,7 @@ pub fn trans<'a>(bcx: &'a Block<'a>,
let datum = unpack_datum!(bcx, trans_unadjusted(bcx, expr));
let datum = unpack_datum!(bcx, apply_adjustments(bcx, expr, datum));
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id);
return DatumBlock(bcx, datum);
return DatumBlock::new(bcx, datum);
}
fn apply_adjustments<'a>(bcx: &'a Block<'a>,
@ -168,7 +168,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
let mut datum = datum;
let adjustment = match bcx.tcx().adjustments.borrow().find_copy(&expr.id) {
None => {
return DatumBlock(bcx, datum);
return DatumBlock::new(bcx, datum);
}
Some(adj) => { adj }
};
@ -244,7 +244,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
let scratch = rvalue_scratch_datum(bcx, slice_ty, "__adjust");
Store(bcx, base, GEPi(bcx, scratch.val, [0u, abi::slice_elt_base]));
Store(bcx, len, GEPi(bcx, scratch.val, [0u, abi::slice_elt_len]));
DatumBlock(bcx, scratch.to_expr_datum())
DatumBlock::new(bcx, scratch.to_expr_datum())
}
fn add_env<'a>(bcx: &'a Block<'a>,
@ -282,7 +282,7 @@ fn apply_adjustments<'a>(bcx: &'a Block<'a>,
let mut datum = source_datum.to_expr_datum();
datum.ty = target_obj_ty;
DatumBlock(bcx, datum)
DatumBlock::new(bcx, datum)
}
}
@ -357,7 +357,7 @@ fn trans_unadjusted<'a>(bcx: &'a Block<'a>,
let scratch = unpack_datum!(
bcx, scratch.to_appropriate_datum(bcx));
DatumBlock(bcx, scratch.to_expr_datum())
DatumBlock::new(bcx, scratch.to_expr_datum())
}
}
};
@ -365,7 +365,7 @@ fn trans_unadjusted<'a>(bcx: &'a Block<'a>,
fn nil<'a>(bcx: &'a Block<'a>, ty: ty::t) -> DatumBlock<'a, Expr> {
let llval = C_undef(type_of::type_of(bcx.ccx(), ty));
let datum = immediate_rvalue(llval, ty);
DatumBlock(bcx, datum.to_expr_datum())
DatumBlock::new(bcx, datum.to_expr_datum())
}
}
@ -394,7 +394,7 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>,
let datum = unpack_datum!(
bcx, tvec::trans_uniq_vstore(bcx, expr, contents));
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, contents.id);
DatumBlock(bcx, datum)
DatumBlock::new(bcx, datum)
}
ast::ExprBox(_, contents) => {
// Special case for `box T`. (The other case, for GC, is handled
@ -494,7 +494,7 @@ fn trans_index<'a>(bcx: &'a Block<'a>,
});
let elt = InBoundsGEP(bcx, base, [ix_val]);
let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
DatumBlock(bcx, Datum(elt, vt.unit_ty, LvalueExpr))
DatumBlock::new(bcx, Datum::new(elt, vt.unit_ty, LvalueExpr))
}
fn trans_def<'a>(bcx: &'a Block<'a>,
@ -559,10 +559,10 @@ fn trans_def<'a>(bcx: &'a Block<'a>,
let did = get_did(bcx.ccx(), did);
let val = get_val(bcx, did, const_ty);
DatumBlock(bcx, Datum(val, const_ty, LvalueExpr))
DatumBlock::new(bcx, Datum::new(val, const_ty, LvalueExpr))
}
_ => {
DatumBlock(bcx, trans_local_var(bcx, def).to_expr_datum())
DatumBlock::new(bcx, trans_local_var(bcx, def).to_expr_datum())
}
}
}
@ -845,7 +845,7 @@ fn trans_def_fn_unadjusted<'a>(bcx: &'a Block<'a>,
};
let fn_ty = expr_ty(bcx, ref_expr);
DatumBlock(bcx, Datum(llfn, fn_ty, RvalueExpr(Rvalue(ByValue))))
DatumBlock::new(bcx, Datum::new(llfn, fn_ty, RvalueExpr(Rvalue::new(ByValue))))
}
pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
@ -863,7 +863,7 @@ pub fn trans_local_var<'a>(bcx: &'a Block<'a>,
// Can't move upvars, so this is never a ZeroMemLastUse.
let local_ty = node_id_type(bcx, nid);
match bcx.fcx.llupvars.borrow().find(&nid) {
Some(&val) => Datum(val, local_ty, Lvalue),
Some(&val) => Datum::new(val, local_ty, Lvalue),
None => {
bcx.sess().bug(format!(
"trans_local_var: no llval for upvar {:?} found",
@ -1664,7 +1664,7 @@ fn auto_ref<'a>(bcx: &'a Block<'a>,
// Construct the resulting datum, using what was the "by ref"
// ValueRef of type `referent_ty` to be the "by value" ValueRef
// of type `&referent_ty`.
DatumBlock(bcx, Datum(llref, ptr_ty, RvalueExpr(Rvalue(ByValue))))
DatumBlock::new(bcx, Datum::new(llref, ptr_ty, RvalueExpr(Rvalue::new(ByValue))))
}
fn deref_multiple<'a>(bcx: &'a Block<'a>,
@ -1717,7 +1717,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
let val = unpack_result!(bcx, trans_overloaded_op(bcx, expr, method_call,
datum, None, None));
let ref_ty = ty::ty_fn_ret(monomorphize_type(bcx, method_ty));
Datum(val, ref_ty, RvalueExpr(Rvalue(ByValue)))
Datum::new(val, ref_ty, RvalueExpr(Rvalue::new(ByValue)))
}
None => {
// Not overloaded. We already have a pointer we know how to deref.
@ -1740,7 +1740,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
let llptrref = datum.to_llref();
let llptr = Load(bcx, llptrref);
let llbody = GEPi(bcx, llptr, [0u, abi::box_field_body]);
DatumBlock(bcx, Datum(llbody, content_ty, LvalueExpr))
DatumBlock::new(bcx, Datum::new(llbody, content_ty, LvalueExpr))
}
ty::ty_ptr(ty::mt { ty: content_ty, .. }) |
@ -1758,7 +1758,7 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
// rvalue for non-owning pointers like &T or *T, in which
// case cleanup *is* scheduled elsewhere, by the true
// owner (or, in the case of *T, by the user).
DatumBlock(bcx, Datum(ptr, content_ty, LvalueExpr))
DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr))
}
}
}
@ -1818,10 +1818,10 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
(Load(bcx, datum.val), LvalueExpr)
}
RvalueExpr(Rvalue { mode: ByRef }) => {
(Load(bcx, datum.val), RvalueExpr(Rvalue(ByRef)))
(Load(bcx, datum.val), RvalueExpr(Rvalue::new(ByRef)))
}
RvalueExpr(Rvalue { mode: ByValue }) => {
(datum.val, RvalueExpr(Rvalue(ByRef)))
(datum.val, RvalueExpr(Rvalue::new(ByRef)))
}
};

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -309,7 +309,7 @@ pub fn trans_intrinsic(ccx: &CrateContext,
let mode = appropriate_rvalue_mode(ccx, tp_ty);
let src = Datum {val: get_param(decl, first_real_arg + 1u),
ty: tp_ty,
kind: Rvalue(mode)};
kind: Rvalue::new(mode)};
bcx = src.store_to(bcx, get_param(decl, first_real_arg));
RetVoid(bcx);
}

View File

@ -257,7 +257,7 @@ pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
alloc_fn,
[ llptrval, llsizeval ],
Some(expr::SaveIn(lldestval.val))).bcx;
return DatumBlock(bcx, lldestval).to_expr_datumblock();
return DatumBlock::new(bcx, lldestval).to_expr_datumblock();
}
_ => {}
}

View File

@ -840,11 +840,11 @@ pub enum BuiltinBound {
BoundShare,
}
pub fn EmptyBuiltinBounds() -> BuiltinBounds {
pub fn empty_builtin_bounds() -> BuiltinBounds {
EnumSet::empty()
}
pub fn AllBuiltinBounds() -> BuiltinBounds {
pub fn all_builtin_bounds() -> BuiltinBounds {
let mut set = EnumSet::empty();
set.add(BoundStatic);
set.add(BoundSend);
@ -2833,7 +2833,7 @@ pub fn adjust_ty(cx: &ctxt,
ty::ClosureTy {fn_style: b.fn_style,
onceness: ast::Many,
store: store,
bounds: ty::AllBuiltinBounds(),
bounds: ty::all_builtin_bounds(),
sig: b.sig.clone()})
}
ref b => {
@ -4303,7 +4303,7 @@ pub fn visitor_object_ty(tcx: &ctxt,
trait_ref.def_id,
trait_ref.substs.clone(),
RegionTraitStore(region, ast::MutMutable),
EmptyBuiltinBounds())))
empty_builtin_bounds())))
}
pub fn item_variances(tcx: &ctxt, item_id: ast::DefId) -> Rc<ItemVariances> {

View File

@ -907,7 +907,7 @@ fn conv_builtin_bounds(tcx: &ty::ctxt, ast_bounds: &Option<OwnedSlice<ast::TyPar
match (ast_bounds, store) {
(&Some(ref bound_vec), _) => {
let mut builtin_bounds = ty::EmptyBuiltinBounds();
let mut builtin_bounds = ty::empty_builtin_bounds();
for ast_bound in bound_vec.iter() {
match *ast_bound {
ast::TraitTyParamBound(ref b) => {
@ -942,10 +942,10 @@ fn conv_builtin_bounds(tcx: &ty::ctxt, ast_bounds: &Option<OwnedSlice<ast::TyPar
},
// &'static Trait is sugar for &'static Trait:'static.
(&None, ty::RegionTraitStore(ty::ReStatic, _)) => {
let mut set = ty::EmptyBuiltinBounds(); set.add(ty::BoundStatic); set
let mut set = ty::empty_builtin_bounds(); set.add(ty::BoundStatic); set
}
// No bounds are automatically applied for &'r Trait or ~Trait
(&None, ty::RegionTraitStore(..)) |
(&None, ty::UniqTraitStore) => ty::EmptyBuiltinBounds(),
(&None, ty::UniqTraitStore) => ty::empty_builtin_bounds(),
}
}

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -274,12 +274,12 @@ fn construct_transformed_self_ty_for_object(
let r = r.subst(tcx, &substs); // handle Early-Bound lifetime
ty::mk_trait(tcx, trait_def_id, substs,
RegionTraitStore(r, mt.mutbl),
ty::EmptyBuiltinBounds())
ty::empty_builtin_bounds())
}
ty::ty_uniq(_) => { // must be SelfUniq
ty::mk_trait(tcx, trait_def_id, substs,
UniqTraitStore,
ty::EmptyBuiltinBounds())
ty::empty_builtin_bounds())
}
_ => {
tcx.sess.span_bug(span,

View File

@ -2335,7 +2335,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
}
_ => {
// Not an error! Means we're inferring the closure type
let mut bounds = ty::EmptyBuiltinBounds();
let mut bounds = ty::empty_builtin_bounds();
let onceness = match expr.node {
ast::ExprProc(..) => {
bounds.add(ty::BoundSend);

View File

@ -472,7 +472,7 @@ fn fixup_substs(vcx: &VtableContext,
let t = ty::mk_trait(tcx,
id, substs,
ty::RegionTraitStore(ty::ReStatic, ast::MutImmutable),
ty::EmptyBuiltinBounds());
ty::empty_builtin_bounds());
fixup_ty(vcx, span, t, is_early).map(|t_f| {
match ty::get(t_f).sty {
ty::ty_trait(ref inner) => inner.substs.clone(),
@ -574,7 +574,7 @@ pub fn early_resolve_expr(ex: &ast::Expr, fcx: &FnCtxt, is_early: bool) {
});
let param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: vec!(target_trait_ref)
};
let vtables =
@ -766,7 +766,7 @@ pub fn resolve_impl(tcx: &ty::ctxt,
// purpose of this is to check for supertrait impls,
// but that falls out of doing this.
let param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: vec!(Rc::new(impl_trait_ref))
};
let t = ty::node_id_to_type(tcx, impl_item.id);

View File

@ -347,7 +347,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
ident: special_idents::self_,
def_id: dummy_defid,
bounds: Rc::new(ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: vec!(self_trait_ref)
}),
default: None
@ -418,7 +418,7 @@ pub fn ensure_supertraits(ccx: &CrateCtxt,
let self_ty = ty::mk_self(ccx.tcx, local_def(id));
let mut ty_trait_refs: Vec<Rc<ty::TraitRef>> = Vec::new();
let mut bounds = ty::EmptyBuiltinBounds();
let mut bounds = ty::empty_builtin_bounds();
for ast_trait_ref in ast_trait_refs.iter() {
let trait_def_id = ty::trait_ref_to_def_id(ccx.tcx, ast_trait_ref);
// FIXME(#8559): Need to instantiate the trait_ref whether or not it's a
@ -1094,7 +1094,7 @@ fn ty_generics(ccx: &CrateCtxt,
*/
let mut param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
builtin_bounds: ty::empty_builtin_bounds(),
trait_bounds: Vec::new()
};
for ast_bound in ast_bounds.iter() {

View File

@ -22,7 +22,6 @@ pub use middle::typeck::infer::resolve::{resolve_nested_tvar};
pub use middle::typeck::infer::resolve::{resolve_rvar};
use collections::HashMap;
use collections::SmallIntMap;
use middle::ty::{TyVid, IntVid, FloatVid, RegionVid, Vid};
use middle::ty;
use middle::ty_fold;
@ -258,27 +257,20 @@ pub fn fixup_err_to_str(f: fixup_err) -> String {
}
}
fn new_ValsAndBindings<V:Clone,T:Clone>() -> ValsAndBindings<V, T> {
ValsAndBindings {
vals: SmallIntMap::new(),
bindings: Vec::new()
}
}
pub fn new_infer_ctxt<'a>(tcx: &'a ty::ctxt) -> InferCtxt<'a> {
InferCtxt {
tcx: tcx,
ty_var_bindings: RefCell::new(new_ValsAndBindings()),
ty_var_bindings: RefCell::new(ValsAndBindings::new()),
ty_var_counter: Cell::new(0),
int_var_bindings: RefCell::new(new_ValsAndBindings()),
int_var_bindings: RefCell::new(ValsAndBindings::new()),
int_var_counter: Cell::new(0),
float_var_bindings: RefCell::new(new_ValsAndBindings()),
float_var_bindings: RefCell::new(ValsAndBindings::new()),
float_var_counter: Cell::new(0),
region_vars: RegionVarBindings(tcx),
region_vars: RegionVarBindings::new(tcx),
}
}
@ -679,7 +671,7 @@ impl<'a> InferCtxt<'a> {
trait_ref.def_id,
trait_ref.substs.clone(),
ty::UniqTraitStore,
ty::EmptyBuiltinBounds());
ty::empty_builtin_bounds());
let dummy1 = self.resolve_type_vars_if_possible(dummy0);
match ty::get(dummy1).sty {
ty::ty_trait(box ty::TyTrait { ref def_id, ref substs, .. }) => {

View File

@ -143,21 +143,21 @@ pub struct RegionVarBindings<'a> {
values: RefCell<Option<Vec<VarValue> >>,
}
pub fn RegionVarBindings<'a>(tcx: &'a ty::ctxt) -> RegionVarBindings<'a> {
RegionVarBindings {
tcx: tcx,
var_origins: RefCell::new(Vec::new()),
values: RefCell::new(None),
constraints: RefCell::new(HashMap::new()),
lubs: RefCell::new(HashMap::new()),
glbs: RefCell::new(HashMap::new()),
skolemization_count: Cell::new(0),
bound_count: Cell::new(0),
undo_log: RefCell::new(Vec::new())
}
}
impl<'a> RegionVarBindings<'a> {
pub fn new(tcx: &'a ty::ctxt) -> RegionVarBindings<'a> {
RegionVarBindings {
tcx: tcx,
var_origins: RefCell::new(Vec::new()),
values: RefCell::new(None),
constraints: RefCell::new(HashMap::new()),
lubs: RefCell::new(HashMap::new()),
glbs: RefCell::new(HashMap::new()),
skolemization_count: Cell::new(0),
bound_count: Cell::new(0),
undo_log: RefCell::new(Vec::new())
}
}
pub fn in_snapshot(&self) -> bool {
self.undo_log.borrow().len() > 0
}

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -30,6 +30,15 @@ pub struct ValsAndBindings<V, T> {
pub bindings: Vec<(V, VarValue<V, T>)> ,
}
impl<V:Clone, T:Clone> ValsAndBindings<V, T> {
pub fn new() -> ValsAndBindings<V, T> {
ValsAndBindings {
vals: SmallIntMap::new(),
bindings: Vec::new()
}
}
}
pub struct Node<V, T> {
pub root: V,
pub possible_types: T,

View File

@ -141,6 +141,7 @@ mod imp {
static LOCKFILE_EXCLUSIVE_LOCK: libc::DWORD = 0x00000002;
#[allow(non_snake_case_functions)]
extern "system" {
fn LockFileEx(hFile: libc::HANDLE,
dwFlags: libc::DWORD,

View File

@ -1,4 +1,4 @@
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -88,7 +88,7 @@ pub trait HomingIO {
/// This function will move tasks to run on their home I/O scheduler. Note
/// that this function does *not* pin the task to the I/O scheduler, but
/// rather it simply moves it to running on the I/O scheduler.
fn go_to_IO_home(&mut self) -> uint {
fn go_to_io_home(&mut self) -> uint {
let _f = ForbidUnwind::new("going home");
let cur_loop_id = local_id();
@ -118,7 +118,7 @@ pub trait HomingIO {
/// move the local task to its I/O scheduler and then return an RAII wrapper
/// which will return the task home.
fn fire_homing_missile(&mut self) -> HomingMissile {
HomingMissile { io_home: self.go_to_IO_home() }
HomingMissile { io_home: self.go_to_io_home() }
}
}

View File

@ -26,6 +26,10 @@ pub struct Doc<'a> {
}
impl<'doc> Doc<'doc> {
pub fn new(data: &'doc [u8]) -> Doc<'doc> {
Doc { data: data, start: 0u, end: data.len() }
}
pub fn get<'a>(&'a self, tag: uint) -> Doc<'a> {
reader::get_doc(*self, tag)
}
@ -192,10 +196,6 @@ pub mod reader {
}
}
pub fn Doc<'a>(data: &'a [u8]) -> Doc<'a> {
Doc { data: data, start: 0u, end: data.len() }
}
pub fn doc_at<'a>(data: &'a [u8], start: uint) -> DecodeResult<TaggedDoc<'a>> {
let elt_tag = try!(vuint_at(data, start));
let elt_size = try!(vuint_at(data, elt_tag.next));
@ -296,14 +296,14 @@ pub mod reader {
pos: uint,
}
pub fn Decoder<'a>(d: Doc<'a>) -> Decoder<'a> {
Decoder {
parent: d,
pos: d.start
}
}
impl<'doc> Decoder<'doc> {
pub fn new(d: Doc<'doc>) -> Decoder<'doc> {
Decoder {
parent: d,
pos: d.start
}
}
fn _check_label(&mut self, lbl: &str) -> DecodeResult<()> {
if self.pos < self.parent.end {
let TaggedDoc { tag: r_tag, doc: r_doc } =
@ -673,15 +673,15 @@ pub mod writer {
})
}
pub fn Encoder<'a, W: Writer + Seek>(w: &'a mut W) -> Encoder<'a, W> {
Encoder {
writer: w,
size_positions: vec!(),
}
}
// FIXME (#2741): Provide a function to write the standard ebml header.
impl<'a, W: Writer + Seek> Encoder<'a, W> {
pub fn new(w: &'a mut W) -> Encoder<'a, W> {
Encoder {
writer: w,
size_positions: vec!(),
}
}
/// FIXME(pcwalton): Workaround for badness in trans. DO NOT USE ME.
pub unsafe fn unsafe_clone(&self) -> Encoder<'a, W> {
Encoder {
@ -1020,6 +1020,7 @@ pub mod writer {
#[cfg(test)]
mod tests {
use super::Doc;
use ebml::reader;
use ebml::writer;
use {Encodable, Decodable};
@ -1081,11 +1082,11 @@ mod tests {
debug!("v == {}", v);
let mut wr = MemWriter::new();
{
let mut ebml_w = writer::Encoder(&mut wr);
let mut ebml_w = writer::Encoder::new(&mut wr);
let _ = v.encode(&mut ebml_w);
}
let ebml_doc = reader::Doc(wr.get_ref());
let mut deser = reader::Decoder(ebml_doc);
let ebml_doc = Doc::new(wr.get_ref());
let mut deser = reader::Decoder::new(ebml_doc);
let v1 = Decodable::decode(&mut deser).unwrap();
debug!("v1 == {}", v1);
assert_eq!(v, v1);
@ -1099,6 +1100,7 @@ mod tests {
#[cfg(test)]
mod bench {
#![allow(non_snake_case_functions)]
extern crate test;
use self::test::Bencher;
use ebml::reader;

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -27,6 +27,7 @@
*/
#![allow(missing_doc)]
#![allow(non_snake_case_functions)]
use clone::Clone;
use container::Container;

View File

@ -1,4 +1,4 @@
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -93,6 +93,7 @@ mod imp {
static CRYPT_VERIFYCONTEXT: DWORD = 0xF0000000;
static NTE_BAD_SIGNATURE: DWORD = 0x80090006;
#[allow(non_snake_case_functions)]
extern "system" {
fn CryptAcquireContextA(phProv: *mut HCRYPTPROV,
pszContainer: LPCSTR,

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -42,11 +42,12 @@ pub fn align(size: uint, align: uint) -> uint {
pub struct MovePtrAdaptor<V> {
inner: V
}
pub fn MovePtrAdaptor<V:TyVisitor + MovePtr>(v: V) -> MovePtrAdaptor<V> {
MovePtrAdaptor { inner: v }
}
impl<V:TyVisitor + MovePtr> MovePtrAdaptor<V> {
pub fn new(v: V) -> MovePtrAdaptor<V> {
MovePtrAdaptor { inner: v }
}
#[inline]
pub fn bump(&mut self, sz: uint) {
self.inner.move_ptr(|p| ((p as uint) + sz) as *u8)

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -110,17 +110,6 @@ pub struct ReprVisitor<'a> {
last_err: Option<io::IoError>,
}
pub fn ReprVisitor<'a>(ptr: *u8,
writer: &'a mut io::Writer) -> ReprVisitor<'a> {
ReprVisitor {
ptr: ptr,
ptr_stk: vec!(),
var_stk: vec!(),
writer: writer,
last_err: None,
}
}
impl<'a> MovePtr for ReprVisitor<'a> {
#[inline]
fn move_ptr(&mut self, adjustment: |*u8| -> *u8) {
@ -136,6 +125,15 @@ impl<'a> MovePtr for ReprVisitor<'a> {
impl<'a> ReprVisitor<'a> {
// Various helpers for the TyVisitor impl
pub fn new(ptr: *u8, writer: &'a mut io::Writer) -> ReprVisitor<'a> {
ReprVisitor {
ptr: ptr,
ptr_stk: vec!(),
var_stk: vec!(),
writer: writer,
last_err: None,
}
}
#[inline]
pub fn get<T>(&mut self, f: |&mut ReprVisitor, &T| -> bool) -> bool {
@ -152,16 +150,8 @@ impl<'a> ReprVisitor<'a> {
#[inline]
pub fn visit_ptr_inner(&mut self, ptr: *u8, inner: *TyDesc) -> bool {
unsafe {
// This should call the constructor up above, but due to limiting
// issues we have to recreate it here.
let u = ReprVisitor {
ptr: ptr,
ptr_stk: vec!(),
var_stk: vec!(),
writer: ::mem::transmute_copy(&self.writer),
last_err: None,
};
let mut v = reflect::MovePtrAdaptor(u);
let u = ReprVisitor::new(ptr, ::mem::transmute_copy(&self.writer));
let mut v = reflect::MovePtrAdaptor::new(u);
// Obviously this should not be a thing, but blame #8401 for now
visit_tydesc(inner, &mut v as &mut TyVisitor);
match v.unwrap().last_err {
@ -592,8 +582,8 @@ pub fn write_repr<T>(writer: &mut io::Writer, object: &T) -> io::IoResult<()> {
unsafe {
let ptr = object as *T as *u8;
let tydesc = get_tydesc::<T>();
let u = ReprVisitor(ptr, writer);
let mut v = reflect::MovePtrAdaptor(u);
let u = ReprVisitor::new(ptr, writer);
let mut v = reflect::MovePtrAdaptor::new(u);
visit_tydesc(tydesc, &mut v as &mut TyVisitor);
match v.unwrap().last_err {
Some(e) => Err(e),

View File

@ -518,6 +518,7 @@ mod imp {
use unstable::mutex::{StaticNativeMutex, NATIVE_MUTEX_INIT};
use slice::ImmutableVector;
#[allow(non_snake_case_functions)]
extern "system" {
fn GetCurrentProcess() -> libc::HANDLE;
fn GetCurrentThread() -> libc::HANDLE;

View File

@ -11,6 +11,7 @@
//! Unwind library interface
#![allow(non_camel_case_types)]
#![allow(non_snake_case_functions)]
#![allow(dead_code)] // these are just bindings
use libc;

View File

@ -199,6 +199,7 @@ mod imp {
SwitchToThread();
}
#[allow(non_snake_case_functions)]
extern "system" {
fn CreateThread(lpThreadAttributes: LPSECURITY_ATTRIBUTES,
dwStackSize: SIZE_T,

View File

@ -1,4 +1,4 @@
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -86,6 +86,7 @@ pub unsafe fn destroy(key: Key) {
}
#[cfg(windows)]
#[allow(non_snake_case_functions)]
extern "system" {
fn TlsAlloc() -> DWORD;
fn TlsFree(dwTlsIndex: DWORD) -> BOOL;

View File

@ -1,4 +1,4 @@
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -317,6 +317,7 @@ pub mod dl {
FreeLibrary(handle as *libc::c_void); ()
}
#[allow(non_snake_case_functions)]
extern "system" {
fn SetLastError(error: libc::size_t);
fn LoadLibraryW(name: *libc::c_void) -> *libc::c_void;

View File

@ -543,6 +543,7 @@ mod imp {
libc::CloseHandle(block);
}
#[allow(non_snake_case_functions)]
extern "system" {
fn CreateEventA(lpSecurityAttributes: LPSECURITY_ATTRIBUTES,
bManualReset: BOOL,

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -159,6 +159,7 @@ impl fmt::Show for Abi {
}
}
#[allow(non_snake_case_functions)]
#[test]
fn lookup_Rust() {
let abi = lookup("Rust");

View File

@ -26,6 +26,7 @@ use serialize::{Encodable, Decodable, Encoder, Decoder};
/// A pointer abstraction. FIXME(eddyb) #10676 use Rc<T> in the future.
pub type P<T> = @T;
#[allow(non_snake_case_functions)]
/// Construct a P<T> from a T value.
pub fn P<T: 'static>(value: T) -> P<T> {
@value

View File

@ -223,7 +223,7 @@ fn marksof_internal(ctxt: SyntaxContext,
return result;
},
Mark(mark, tl) => {
xorPush(&mut result, mark);
xor_push(&mut result, mark);
loopvar = tl;
},
Rename(_,name,tl) => {
@ -253,7 +253,7 @@ pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
// Push a name... unless it matches the one on top, in which
// case pop and discard (so two of the same marks cancel)
fn xorPush(marks: &mut Vec<Mrk>, mark: Mrk) {
fn xor_push(marks: &mut Vec<Mrk>, mark: Mrk) {
if (marks.len() > 0) && (*marks.last().unwrap() == mark) {
marks.pop().unwrap();
} else {
@ -264,26 +264,27 @@ fn xorPush(marks: &mut Vec<Mrk>, mark: Mrk) {
#[cfg(test)]
mod tests {
use ast::*;
use super::{resolve, xorPush, new_mark_internal, new_sctable_internal};
use super::{resolve, xor_push, new_mark_internal, new_sctable_internal};
use super::{new_rename_internal, marksof_internal, resolve_internal};
use super::{SCTable, EmptyCtxt, Mark, Rename, IllegalCtxt};
use collections::HashMap;
#[test] fn xorpush_test () {
#[test]
fn xorpush_test () {
let mut s = Vec::new();
xorPush(&mut s, 14);
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xorPush(&mut s, 14);
xor_push(&mut s, 14);
assert_eq!(s.clone(), Vec::new());
xorPush(&mut s, 14);
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xorPush(&mut s, 15);
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14, 15));
xorPush(&mut s, 16);
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15, 16));
xorPush(&mut s, 16);
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15));
xorPush(&mut s, 15);
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14));
}
@ -331,7 +332,8 @@ mod tests {
}
}
#[test] fn test_unfold_refold(){
#[test]
fn test_unfold_refold(){
let mut t = new_sctable_internal();
let test_sc = vec!(M(3),R(id(101,0),14),M(9));
@ -364,7 +366,8 @@ mod tests {
}
}
#[test] fn test_marksof () {
#[test]
fn test_marksof () {
let stopname = 242;
let name1 = 243;
let mut t = new_sctable_internal();
@ -397,7 +400,8 @@ mod tests {
}
#[test] fn resolve_tests () {
#[test]
fn resolve_tests () {
let a = 40;
let mut t = new_sctable_internal();
let mut rt = HashMap::new();
@ -447,13 +451,15 @@ mod tests {
assert_eq!(resolve_internal(id(a,a50_to_a51_b),&mut t, &mut rt),50);}
}
#[test] fn mtwt_resolve_test(){
#[test]
fn mtwt_resolve_test(){
let a = 40;
assert_eq!(resolve(id(a,EMPTY_CTXT)),a);
}
#[test] fn hashing_tests () {
#[test]
fn hashing_tests () {
let mut t = new_sctable_internal();
assert_eq!(new_mark_internal(12,EMPTY_CTXT,&mut t),2);
assert_eq!(new_mark_internal(13,EMPTY_CTXT,&mut t),3);
@ -462,7 +468,8 @@ mod tests {
// I'm assuming that the rename table will behave the same....
}
#[test] fn resolve_table_hashing_tests() {
#[test]
fn resolve_table_hashing_tests() {
let mut t = new_sctable_internal();
let mut rt = HashMap::new();
assert_eq!(rt.len(),0);

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -402,7 +402,7 @@ pub fn parse(sess: &ParseSess,
}
rdr.next_token();
} else /* bb_eis.len() == 1 */ {
let mut rust_parser = Parser(sess, cfg.clone(), box rdr.clone());
let mut rust_parser = Parser::new(sess, cfg.clone(), box rdr.clone());
let mut ei = bb_eis.pop().unwrap();
match ei.elts.get(ei.idx).node {

View File

@ -171,7 +171,7 @@ fn generic_extension(cx: &ExtCtxt,
let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
Some(named_matches),
rhs);
let p = Parser(cx.parse_sess(), cx.cfg(), box trncbr);
let p = Parser::new(cx.parse_sess(), cx.cfg(), box trncbr);
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
return box ParserAnyMacro {

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -256,7 +256,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
// parsing tt's probably shouldn't require a parser at all.
let cfg = Vec::new();
let srdr = lexer::new_string_reader(&sess.span_diagnostic, filemap);
let mut p1 = Parser(sess, cfg, box srdr);
let mut p1 = Parser::new(sess, cfg, box srdr);
p1.parse_all_token_trees()
}
@ -265,7 +265,7 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess,
tts: Vec<ast::TokenTree>,
cfg: ast::CrateConfig) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
Parser(sess, cfg, box trdr)
Parser::new(sess, cfg, box trdr)
}
// abort if necessary

View File

@ -1,4 +1,4 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -19,7 +19,7 @@ removed.
use ast::{Expr, ExprLit, LitNil};
use codemap::{Span, respan};
use parse::parser::Parser;
use parse::parser;
use parse::token;
/// The specific types of unsupported syntax
@ -45,7 +45,7 @@ pub trait ParserObsoleteMethods {
fn eat_obsolete_ident(&mut self, ident: &str) -> bool;
}
impl<'a> ParserObsoleteMethods for Parser<'a> {
impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
/// Reports an obsolete syntax non-fatal error.
fn obsolete(&mut self, sp: Span, kind: ObsoleteSyntax) {
let (kind_str, desc) = match kind {

View File

@ -278,50 +278,11 @@ struct ParsedItemsAndViewItems {
attrs_remaining: Vec<Attribute> ,
view_items: Vec<ViewItem> ,
items: Vec<@Item> ,
foreign_items: Vec<@ForeignItem> }
foreign_items: Vec<@ForeignItem>
}
/* ident is handled by common.rs */
pub fn Parser<'a>(
sess: &'a ParseSess,
cfg: ast::CrateConfig,
mut rdr: Box<Reader:>)
-> Parser<'a> {
let tok0 = rdr.next_token();
let span = tok0.sp;
let placeholder = TokenAndSpan {
tok: token::UNDERSCORE,
sp: span,
};
Parser {
reader: rdr,
interner: token::get_ident_interner(),
sess: sess,
cfg: cfg,
token: tok0.tok,
span: span,
last_span: span,
last_token: None,
buffer: [
placeholder.clone(),
placeholder.clone(),
placeholder.clone(),
placeholder.clone(),
],
buffer_start: 0,
buffer_end: 0,
tokens_consumed: 0,
restriction: UNRESTRICTED,
quote_depth: 0,
obsolete_set: HashSet::new(),
mod_path_stack: Vec::new(),
open_braces: Vec::new(),
owns_directory: true,
root_module_name: None,
}
}
pub struct Parser<'a> {
pub sess: &'a ParseSess,
// the current token:
@ -362,6 +323,41 @@ fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
}
impl<'a> Parser<'a> {
pub fn new(sess: &'a ParseSess, cfg: ast::CrateConfig, mut rdr: Box<Reader:>) -> Parser<'a> {
let tok0 = rdr.next_token();
let span = tok0.sp;
let placeholder = TokenAndSpan {
tok: token::UNDERSCORE,
sp: span,
};
Parser {
reader: rdr,
interner: token::get_ident_interner(),
sess: sess,
cfg: cfg,
token: tok0.tok,
span: span,
last_span: span,
last_token: None,
buffer: [
placeholder.clone(),
placeholder.clone(),
placeholder.clone(),
placeholder.clone(),
],
buffer_start: 0,
buffer_end: 0,
tokens_consumed: 0,
restriction: UNRESTRICTED,
quote_depth: 0,
obsolete_set: HashSet::new(),
mod_path_stack: Vec::new(),
open_braces: Vec::new(),
owns_directory: true,
root_module_name: None,
}
}
// convert a token to a string using self's reader
pub fn token_to_str(token: &token::Token) -> String {
token::to_str(token)

View File

@ -27,6 +27,7 @@ pub struct WinConsole<T> {
background: color::Color,
}
#[allow(non_snake_case_functions)]
#[link(name = "kernel32")]
extern "system" {
fn SetConsoleTextAttribute(handle: libc::HANDLE, attr: libc::WORD) -> libc::BOOL;

View File

@ -1,4 +1,4 @@
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -795,14 +795,16 @@ mod test {
#[test]
fn test_serialize_round_trip() {
use serialize::ebml;
use serialize::ebml::Doc;
use serialize::ebml::writer::Encoder;
use serialize::ebml::reader::Decoder;
use serialize::{Encodable, Decodable};
let u = Uuid::new_v4();
let mut wr = MemWriter::new();
let _ = u.encode(&mut ebml::writer::Encoder(&mut wr));
let doc = ebml::reader::Doc(wr.get_ref());
let u2 = Decodable::decode(&mut ebml::reader::Decoder(doc)).unwrap();
let _ = u.encode(&mut Encoder::new(&mut wr));
let doc = Doc::new(wr.get_ref());
let u2 = Decodable::decode(&mut Decoder::new(doc)).unwrap();
assert_eq!(u, u2);
}

View File

@ -9,6 +9,7 @@
// except according to those terms.
#![feature(phase)]
#![allow(non_snake_case_functions)]
#[phase(syntax)] extern crate green;
extern crate sync;

View File

@ -11,6 +11,7 @@
// ignore-pretty very bad with line comments
#![feature(managed_boxes)]
#![allow(non_snake_case_functions)]
use std::io;
use std::io::stdio::StdReader;

View File

@ -0,0 +1,51 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(non_snake_case_functions)]
#![allow(dead_code)]
struct Foo;
impl Foo {
fn Foo_Method() {}
//~^ ERROR method `Foo_Method` should have a snake case identifier
// Don't allow two underscores in a row
fn foo__method(&self) {}
//~^ ERROR method `foo__method` should have a snake case identifier
pub fn xyZ(&mut self) {}
//~^ ERROR method `xyZ` should have a snake case identifier
}
trait X {
fn ABC();
//~^ ERROR trait method `ABC` should have a snake case identifier
fn a_b_C(&self) {}
//~^ ERROR trait method `a_b_C` should have a snake case identifier
fn something__else(&mut self);
//~^ ERROR trait method `something__else` should have a snake case identifier
}
impl X for Foo {
// These errors should be caught at the trait definition not the impl
fn ABC() {}
fn something__else(&mut self) {}
}
fn Cookie() {}
//~^ ERROR function `Cookie` should have a snake case identifier
pub fn bi_S_Cuit() {}
//~^ ERROR function `bi_S_Cuit` should have a snake case identifier
fn main() { }

View File

@ -33,12 +33,12 @@ fn test_ebml<'a, 'b, A:
Decodable<EBReader::Decoder<'b>>
>(a1: &A) {
let mut wr = std::io::MemWriter::new();
let mut ebml_w = EBWriter::Encoder(&mut wr);
let mut ebml_w = EBwriter::Encoder::new(&mut wr);
a1.encode(&mut ebml_w);
let bytes = wr.get_ref();
let d: serialize::ebml::Doc<'a> = EBReader::Doc(bytes);
let mut decoder: EBReader::Decoder<'a> = EBReader::Decoder(d);
let d: serialize::ebml::Doc<'a> = EBDoc::new(bytes);
let mut decoder: EBReader::Decoder<'a> = EBreader::Decoder::new(d);
let a2: A = Decodable::decode(&mut decoder);
assert!(*a1 == a2);
}

View File

@ -22,6 +22,7 @@ use std::io::MemWriter;
use rand::{random, Rand};
use serialize::{Encodable, Decodable};
use serialize::ebml;
use serialize::ebml::Doc;
use serialize::ebml::writer::Encoder;
use serialize::ebml::reader::Decoder;
@ -58,10 +59,10 @@ fn roundtrip<'a, T: Rand + Eq + Encodable<Encoder<'a>> +
Decodable<Decoder<'a>>>() {
let obj: T = random();
let mut w = MemWriter::new();
let mut e = Encoder(&mut w);
let mut e = Encoder::new(&mut w);
obj.encode(&mut e);
let doc = ebml::reader::Doc(@w.get_ref());
let mut dec = Decoder(doc);
let doc = ebml::Doc::new(@w.get_ref());
let mut dec = Decoder::new(doc);
let obj2 = Decodable::decode(&mut dec);
assert!(obj == obj2);
}

View File

@ -43,7 +43,7 @@ fn encode_ebml<'a,
T: Encodable<writer::Encoder<'a, MemWriter>,
std::io::IoError>>(val: &T,
wr: &'a mut MemWriter) {
let mut encoder = writer::Encoder(wr);
let mut encoder = writer::Encoder::new(wr);
val.encode(&mut encoder);
}