Auto merge of #49558 - Zoxc:sync-misc, r=michaelwoerister

Even more thread-safety changes

r? @michaelwoerister
This commit is contained in:
bors 2018-04-12 16:22:36 +00:00
commit 6c537493d0
40 changed files with 425 additions and 285 deletions

View File

@ -632,6 +632,7 @@ define_dep_nodes!( <'tcx>
[input] MaybeUnusedTraitImport(DefId),
[input] MaybeUnusedExternCrates,
[eval_always] StabilityIndex,
[eval_always] AllTraits,
[input] AllCrateNums,
[] ExportedSymbols(CrateNum),
[eval_always] CollectAndPartitionTranslationItems,

View File

@ -36,13 +36,13 @@ use rustc_data_structures::indexed_vec::Idx;
use serialize::UseSpecializedDecodable;
use std::fmt::Debug;
use std::ops::Index;
use std::sync::atomic::Ordering;
use syntax::codemap::Span;
use traits::{Obligation, ObligationCause, PredicateObligation};
use ty::{self, CanonicalVar, Lift, Region, Slice, Ty, TyCtxt, TypeFlags};
use ty::subst::{Kind, UnpackedKind};
use ty::fold::{TypeFoldable, TypeFolder};
use util::captures::Captures;
use util::common::CellUsizeExt;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::fx::FxHashMap;
@ -473,7 +473,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
where
V: Canonicalize<'gcx, 'tcx>,
{
self.tcx.sess.perf_stats.queries_canonicalized.increment();
self.tcx.sess.perf_stats.queries_canonicalized.fetch_add(1, Ordering::Relaxed);
Canonicalizer::canonicalize(
value,

View File

@ -408,7 +408,7 @@ fn create_and_seed_worklist<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
// Seed entry point
if let Some((id, _)) = *tcx.sess.entry_fn.borrow() {
if let Some((id, _, _)) = *tcx.sess.entry_fn.borrow() {
worklist.push(id);
}

View File

@ -94,13 +94,14 @@ pub enum Linkage {
pub fn calculate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let sess = &tcx.sess;
let mut fmts = sess.dependency_formats.borrow_mut();
let mut fmts = FxHashMap();
for &ty in sess.crate_types.borrow().iter() {
let linkage = calculate_type(tcx, ty);
verify_ok(tcx, &linkage);
fmts.insert(ty, linkage);
}
sess.abort_if_errors();
sess.dependency_formats.set(fmts);
}
fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
@ -222,7 +223,7 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
//
// Things like allocators and panic runtimes may not have been activated
// quite yet, so do so here.
activate_injected_dep(sess.injected_panic_runtime.get(), &mut ret,
activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret,
&|cnum| tcx.is_panic_runtime(cnum));
activate_injected_allocator(sess, &mut ret);
@ -301,7 +302,7 @@ fn attempt_static<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<DependencyLis
// Our allocator/panic runtime may not have been linked above if it wasn't
// explicitly linked, which is the case for any injected dependency. Handle
// that here and activate them.
activate_injected_dep(sess.injected_panic_runtime.get(), &mut ret,
activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret,
&|cnum| tcx.is_panic_runtime(cnum));
activate_injected_allocator(sess, &mut ret);

View File

@ -63,12 +63,13 @@ pub fn find_entry_point(session: &Session,
});
if !any_exe {
// No need to find a main function
session.entry_fn.set(None);
return
}
// If the user wants no main function at all, then stop here.
if attr::contains_name(&hir_map.krate().attrs, "no_main") {
session.entry_type.set(Some(config::EntryNone));
session.entry_fn.set(None);
return
}
@ -153,17 +154,15 @@ fn find_item(item: &Item, ctxt: &mut EntryContext, at_root: bool) {
}
fn configure_main(this: &mut EntryContext, crate_name: &str) {
if this.start_fn.is_some() {
*this.session.entry_fn.borrow_mut() = this.start_fn;
this.session.entry_type.set(Some(config::EntryStart));
} else if this.attr_main_fn.is_some() {
*this.session.entry_fn.borrow_mut() = this.attr_main_fn;
this.session.entry_type.set(Some(config::EntryMain));
} else if this.main_fn.is_some() {
*this.session.entry_fn.borrow_mut() = this.main_fn;
this.session.entry_type.set(Some(config::EntryMain));
if let Some((node_id, span)) = this.start_fn {
this.session.entry_fn.set(Some((node_id, span, config::EntryStart)));
} else if let Some((node_id, span)) = this.attr_main_fn {
this.session.entry_fn.set(Some((node_id, span, config::EntryMain)));
} else if let Some((node_id, span)) = this.main_fn {
this.session.entry_fn.set(Some((node_id, span, config::EntryMain)));
} else {
// No main function
this.session.entry_fn.set(None);
let mut err = struct_err!(this.session, E0601,
"`main` function not found in crate `{}`", crate_name);
if !this.non_main_fns.is_empty() {

View File

@ -18,17 +18,17 @@
use session::Session;
use syntax::ast;
use std::cell::Cell;
use rustc_data_structures::sync::Once;
pub fn update_limits(sess: &Session, krate: &ast::Crate) {
update_limit(sess, krate, &sess.recursion_limit, "recursion_limit",
"recursion limit");
"recursion limit", 64);
update_limit(sess, krate, &sess.type_length_limit, "type_length_limit",
"type length limit");
"type length limit", 1048576);
}
fn update_limit(sess: &Session, krate: &ast::Crate, limit: &Cell<usize>,
name: &str, description: &str) {
fn update_limit(sess: &Session, krate: &ast::Crate, limit: &Once<usize>,
name: &str, description: &str, default: usize) {
for attr in &krate.attrs {
if !attr.check_name(name) {
continue;
@ -45,4 +45,5 @@ fn update_limit(sess: &Session, krate: &ast::Crate, limit: &Cell<usize>,
"malformed {} attribute, expected #![{}=\"N\"]",
description, name);
}
limit.set(default);
}

View File

@ -614,13 +614,11 @@ impl Options {
// The type of entry function, so
// users can have their own entry
// functions that don't start a
// scheduler
// functions
#[derive(Copy, Clone, PartialEq)]
pub enum EntryFnType {
EntryMain,
EntryStart,
EntryNone,
}
#[derive(Copy, PartialEq, PartialOrd, Clone, Ord, Eq, Hash, Debug)]
@ -1861,6 +1859,13 @@ pub fn build_session_options_and_crate_config(
);
}
if debugging_opts.query_threads.unwrap_or(1) > 1 && debugging_opts.fuel.is_some() {
early_error(
error_format,
"Optimization fuel is incompatible with multiple query threads",
);
}
if codegen_units == Some(0) {
early_error(
error_format,

View File

@ -22,11 +22,11 @@ use middle::dependency_format;
use session::search_paths::PathKind;
use session::config::{DebugInfoLevel, OutputType};
use ty::tls;
use util::nodemap::{FxHashMap, FxHashSet};
use util::nodemap::{FxHashSet};
use util::common::{duration_to_secs_str, ErrorReported};
use util::common::ProfileQueriesMsg;
use rustc_data_structures::sync::{Lrc, Lock};
use rustc_data_structures::sync::{Lrc, Lock, LockCell, OneThread, Once};
use syntax::ast::NodeId;
use errors::{self, DiagnosticBuilder, DiagnosticId};
@ -46,15 +46,16 @@ use rustc_back::target::{Target, TargetTriple};
use rustc_data_structures::flock;
use jobserver::Client;
use std;
use std::cell::{self, Cell, RefCell};
use std::collections::HashMap;
use std::env;
use std::fmt;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::{Once, ONCE_INIT};
use std::time::Duration;
use std::sync::mpsc;
use std::sync::atomic::{AtomicUsize, Ordering};
mod code_stats;
pub mod config;
@ -69,10 +70,9 @@ pub struct Session {
pub opts: config::Options,
pub parse_sess: ParseSess,
/// For a library crate, this is always none
pub entry_fn: RefCell<Option<(NodeId, Span)>>,
pub entry_type: Cell<Option<config::EntryFnType>>,
pub plugin_registrar_fn: Cell<Option<ast::NodeId>>,
pub derive_registrar_fn: Cell<Option<ast::NodeId>>,
pub entry_fn: Once<Option<(NodeId, Span, config::EntryFnType)>>,
pub plugin_registrar_fn: Once<Option<ast::NodeId>>,
pub derive_registrar_fn: Once<Option<ast::NodeId>>,
pub default_sysroot: Option<PathBuf>,
/// The name of the root source file of the crate, in the local file system.
/// `None` means that there is no source file.
@ -80,50 +80,54 @@ pub struct Session {
/// The directory the compiler has been executed in plus a flag indicating
/// if the value stored here has been affected by path remapping.
pub working_dir: (PathBuf, bool),
pub lint_store: RefCell<lint::LintStore>,
pub buffered_lints: RefCell<Option<lint::LintBuffer>>,
// FIXME: lint_store and buffered_lints are not thread-safe,
// but are only used in a single thread
pub lint_store: OneThread<RefCell<lint::LintStore>>,
pub buffered_lints: OneThread<RefCell<Option<lint::LintBuffer>>>,
/// Set of (DiagnosticId, Option<Span>, message) tuples tracking
/// (sub)diagnostics that have been set once, but should not be set again,
/// in order to avoid redundantly verbose output (Issue #24690, #44953).
pub one_time_diagnostics: RefCell<FxHashSet<(DiagnosticMessageId, Option<Span>, String)>>,
pub plugin_llvm_passes: RefCell<Vec<String>>,
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
pub crate_types: RefCell<Vec<config::CrateType>>,
pub dependency_formats: RefCell<dependency_format::Dependencies>,
pub plugin_llvm_passes: OneThread<RefCell<Vec<String>>>,
pub plugin_attributes: OneThread<RefCell<Vec<(String, AttributeType)>>>,
pub crate_types: Once<Vec<config::CrateType>>,
pub dependency_formats: Once<dependency_format::Dependencies>,
/// The crate_disambiguator is constructed out of all the `-C metadata`
/// arguments passed to the compiler. Its value together with the crate-name
/// forms a unique global identifier for the crate. It is used to allow
/// multiple crates with the same name to coexist. See the
/// trans::back::symbol_names module for more information.
pub crate_disambiguator: RefCell<Option<CrateDisambiguator>>,
pub crate_disambiguator: Once<CrateDisambiguator>,
features: RefCell<Option<feature_gate::Features>>,
features: Once<feature_gate::Features>,
/// The maximum recursion limit for potentially infinitely recursive
/// operations such as auto-dereference and monomorphization.
pub recursion_limit: Cell<usize>,
pub recursion_limit: Once<usize>,
/// The maximum length of types during monomorphization.
pub type_length_limit: Cell<usize>,
pub type_length_limit: Once<usize>,
/// The maximum number of stackframes allowed in const eval
pub const_eval_stack_frame_limit: Cell<usize>,
pub const_eval_stack_frame_limit: usize,
/// The maximum number miri steps per constant
pub const_eval_step_limit: Cell<usize>,
pub const_eval_step_limit: usize,
/// The metadata::creader module may inject an allocator/panic_runtime
/// dependency if it didn't already find one, and this tracks what was
/// injected.
pub injected_allocator: Cell<Option<CrateNum>>,
pub allocator_kind: Cell<Option<AllocatorKind>>,
pub injected_panic_runtime: Cell<Option<CrateNum>>,
pub injected_allocator: Once<Option<CrateNum>>,
pub allocator_kind: Once<Option<AllocatorKind>>,
pub injected_panic_runtime: Once<Option<CrateNum>>,
/// Map from imported macro spans (which consist of
/// the localized span for the macro body) to the
/// macro name and definition span in the source crate.
pub imported_macro_spans: RefCell<HashMap<Span, (String, Span)>>,
pub imported_macro_spans: OneThread<RefCell<HashMap<Span, (String, Span)>>>,
incr_comp_session: RefCell<IncrCompSession>,
incr_comp_session: OneThread<RefCell<IncrCompSession>>,
/// A cache of attributes ignored by StableHashingContext
pub ignored_attr_names: FxHashSet<Symbol>,
@ -135,53 +139,42 @@ pub struct Session {
pub perf_stats: PerfStats,
/// Data about code being compiled, gathered during compilation.
pub code_stats: RefCell<CodeStats>,
pub code_stats: Lock<CodeStats>,
next_node_id: Cell<ast::NodeId>,
next_node_id: OneThread<Cell<ast::NodeId>>,
/// If -zfuel=crate=n is specified, Some(crate).
optimization_fuel_crate: Option<String>,
/// If -zfuel=crate=n is specified, initially set to n. Otherwise 0.
optimization_fuel_limit: Cell<u64>,
optimization_fuel_limit: LockCell<u64>,
/// We're rejecting all further optimizations.
out_of_fuel: Cell<bool>,
out_of_fuel: LockCell<bool>,
// The next two are public because the driver needs to read them.
/// If -zprint-fuel=crate, Some(crate).
pub print_fuel_crate: Option<String>,
/// Always set to zero and incremented so that we can print fuel expended by a crate.
pub print_fuel: Cell<u64>,
pub print_fuel: LockCell<u64>,
/// Loaded up early on in the initialization of this `Session` to avoid
/// false positives about a job server in our environment.
pub jobserver_from_env: Option<Client>,
/// Metadata about the allocators for the current crate being compiled
pub has_global_allocator: Cell<bool>,
pub has_global_allocator: Once<bool>,
}
pub struct PerfStats {
/// The accumulated time needed for computing the SVH of the crate
pub svh_time: Cell<Duration>,
/// The accumulated time spent on computing incr. comp. hashes
pub incr_comp_hashes_time: Cell<Duration>,
/// The number of incr. comp. hash computations performed
pub incr_comp_hashes_count: Cell<u64>,
/// The number of bytes hashed when computing ICH values
pub incr_comp_bytes_hashed: Cell<u64>,
/// The accumulated time spent on computing symbol hashes
pub symbol_hash_time: Cell<Duration>,
pub symbol_hash_time: Lock<Duration>,
/// The accumulated time spent decoding def path tables from metadata
pub decode_def_path_tables_time: Cell<Duration>,
pub decode_def_path_tables_time: Lock<Duration>,
/// Total number of values canonicalized queries constructed.
pub queries_canonicalized: Cell<usize>,
/// Number of times we canonicalized a value and found that the
/// result had already been canonicalized.
pub canonicalized_values_allocated: Cell<usize>,
pub queries_canonicalized: AtomicUsize,
/// Number of times this query is invoked.
pub normalize_ty_after_erasing_regions: Cell<usize>,
pub normalize_ty_after_erasing_regions: AtomicUsize,
/// Number of times this query is invoked.
pub normalize_projection_ty: Cell<usize>,
pub normalize_projection_ty: AtomicUsize,
}
/// Enum to support dispatch of one-time diagnostics (in Session.diag_once)
@ -209,10 +202,7 @@ impl From<&'static lint::Lint> for DiagnosticMessageId {
impl Session {
pub fn local_crate_disambiguator(&self) -> CrateDisambiguator {
match *self.crate_disambiguator.borrow() {
Some(value) => value,
None => bug!("accessing disambiguator before initialization"),
}
*self.crate_disambiguator.get()
}
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(
@ -539,18 +529,12 @@ impl Session {
/// DO NOT USE THIS METHOD if there is a TyCtxt available, as it circumvents
/// dependency tracking. Use tcx.features() instead.
#[inline]
pub fn features_untracked(&self) -> cell::Ref<feature_gate::Features> {
let features = self.features.borrow();
if features.is_none() {
bug!("Access to Session::features before it is initialized");
}
cell::Ref::map(features, |r| r.as_ref().unwrap())
pub fn features_untracked(&self) -> &feature_gate::Features {
self.features.get()
}
pub fn init_features(&self, features: feature_gate::Features) {
*(self.features.borrow_mut()) = Some(features);
self.features.set(features);
}
/// Calculates the flavor of LTO to use for this compilation.
@ -834,52 +818,26 @@ impl Session {
}
pub fn print_perf_stats(&self) {
println!(
"Total time spent computing SVHs: {}",
duration_to_secs_str(self.perf_stats.svh_time.get())
);
println!(
"Total time spent computing incr. comp. hashes: {}",
duration_to_secs_str(self.perf_stats.incr_comp_hashes_time.get())
);
println!(
"Total number of incr. comp. hashes computed: {}",
self.perf_stats.incr_comp_hashes_count.get()
);
println!(
"Total number of bytes hashed for incr. comp.: {}",
self.perf_stats.incr_comp_bytes_hashed.get()
);
if self.perf_stats.incr_comp_hashes_count.get() != 0 {
println!(
"Average bytes hashed per incr. comp. HIR node: {}",
self.perf_stats.incr_comp_bytes_hashed.get()
/ self.perf_stats.incr_comp_hashes_count.get()
);
} else {
println!("Average bytes hashed per incr. comp. HIR node: N/A");
}
println!(
"Total time spent computing symbol hashes: {}",
duration_to_secs_str(self.perf_stats.symbol_hash_time.get())
duration_to_secs_str(*self.perf_stats.symbol_hash_time.lock())
);
println!(
"Total time spent decoding DefPath tables: {}",
duration_to_secs_str(self.perf_stats.decode_def_path_tables_time.get())
duration_to_secs_str(*self.perf_stats.decode_def_path_tables_time.lock())
);
println!("Total queries canonicalized: {}",
self.perf_stats.queries_canonicalized.get());
println!("Total canonical values interned: {}",
self.perf_stats.canonicalized_values_allocated.get());
self.perf_stats.queries_canonicalized.load(Ordering::Relaxed));
println!("normalize_ty_after_erasing_regions: {}",
self.perf_stats.normalize_ty_after_erasing_regions.get());
self.perf_stats.normalize_ty_after_erasing_regions.load(Ordering::Relaxed));
println!("normalize_projection_ty: {}",
self.perf_stats.normalize_projection_ty.get());
self.perf_stats.normalize_projection_ty.load(Ordering::Relaxed));
}
/// We want to know if we're allowed to do an optimization for crate foo from -z fuel=foo=n.
/// This expends fuel if applicable, and records fuel if applicable.
pub fn consider_optimizing<T: Fn() -> String>(&self, crate_name: &str, msg: T) -> bool {
assert!(self.query_threads() == 1);
let mut ret = true;
match self.optimization_fuel_crate {
Some(ref c) if c == crate_name => {
@ -1109,9 +1067,9 @@ pub fn build_session_(
let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone());
let optimization_fuel_limit =
Cell::new(sopts.debugging_opts.fuel.as_ref().map(|i| i.1).unwrap_or(0));
LockCell::new(sopts.debugging_opts.fuel.as_ref().map(|i| i.1).unwrap_or(0));
let print_fuel_crate = sopts.debugging_opts.print_fuel.clone();
let print_fuel = Cell::new(0);
let print_fuel = LockCell::new(0);
let working_dir = match env::current_dir() {
Ok(dir) => dir,
@ -1127,52 +1085,46 @@ pub fn build_session_(
opts: sopts,
parse_sess: p_s,
// For a library crate, this is always none
entry_fn: RefCell::new(None),
entry_type: Cell::new(None),
plugin_registrar_fn: Cell::new(None),
derive_registrar_fn: Cell::new(None),
entry_fn: Once::new(),
plugin_registrar_fn: Once::new(),
derive_registrar_fn: Once::new(),
default_sysroot,
local_crate_source_file,
working_dir,
lint_store: RefCell::new(lint::LintStore::new()),
buffered_lints: RefCell::new(Some(lint::LintBuffer::new())),
lint_store: OneThread::new(RefCell::new(lint::LintStore::new())),
buffered_lints: OneThread::new(RefCell::new(Some(lint::LintBuffer::new()))),
one_time_diagnostics: RefCell::new(FxHashSet()),
plugin_llvm_passes: RefCell::new(Vec::new()),
plugin_attributes: RefCell::new(Vec::new()),
crate_types: RefCell::new(Vec::new()),
dependency_formats: RefCell::new(FxHashMap()),
crate_disambiguator: RefCell::new(None),
features: RefCell::new(None),
recursion_limit: Cell::new(64),
type_length_limit: Cell::new(1048576),
const_eval_stack_frame_limit: Cell::new(100),
const_eval_step_limit: Cell::new(1_000_000),
next_node_id: Cell::new(NodeId::new(1)),
injected_allocator: Cell::new(None),
allocator_kind: Cell::new(None),
injected_panic_runtime: Cell::new(None),
imported_macro_spans: RefCell::new(HashMap::new()),
incr_comp_session: RefCell::new(IncrCompSession::NotInitialized),
plugin_llvm_passes: OneThread::new(RefCell::new(Vec::new())),
plugin_attributes: OneThread::new(RefCell::new(Vec::new())),
crate_types: Once::new(),
dependency_formats: Once::new(),
crate_disambiguator: Once::new(),
features: Once::new(),
recursion_limit: Once::new(),
type_length_limit: Once::new(),
const_eval_stack_frame_limit: 100,
const_eval_step_limit: 1_000_000,
next_node_id: OneThread::new(Cell::new(NodeId::new(1))),
injected_allocator: Once::new(),
allocator_kind: Once::new(),
injected_panic_runtime: Once::new(),
imported_macro_spans: OneThread::new(RefCell::new(HashMap::new())),
incr_comp_session: OneThread::new(RefCell::new(IncrCompSession::NotInitialized)),
ignored_attr_names: ich::compute_ignored_attr_names(),
profile_channel: Lock::new(None),
perf_stats: PerfStats {
svh_time: Cell::new(Duration::from_secs(0)),
incr_comp_hashes_time: Cell::new(Duration::from_secs(0)),
incr_comp_hashes_count: Cell::new(0),
incr_comp_bytes_hashed: Cell::new(0),
symbol_hash_time: Cell::new(Duration::from_secs(0)),
decode_def_path_tables_time: Cell::new(Duration::from_secs(0)),
queries_canonicalized: Cell::new(0),
canonicalized_values_allocated: Cell::new(0),
normalize_ty_after_erasing_regions: Cell::new(0),
normalize_projection_ty: Cell::new(0),
symbol_hash_time: Lock::new(Duration::from_secs(0)),
decode_def_path_tables_time: Lock::new(Duration::from_secs(0)),
queries_canonicalized: AtomicUsize::new(0),
normalize_ty_after_erasing_regions: AtomicUsize::new(0),
normalize_projection_ty: AtomicUsize::new(0),
},
code_stats: RefCell::new(CodeStats::new()),
code_stats: Lock::new(CodeStats::new()),
optimization_fuel_crate,
optimization_fuel_limit,
print_fuel_crate,
print_fuel,
out_of_fuel: Cell::new(false),
out_of_fuel: LockCell::new(false),
// Note that this is unsafe because it may misinterpret file descriptors
// on Unix as jobserver file descriptors. We hopefully execute this near
// the beginning of the process though to ensure we don't get false
@ -1184,13 +1136,13 @@ pub fn build_session_(
// per-process.
jobserver_from_env: unsafe {
static mut GLOBAL_JOBSERVER: *mut Option<Client> = 0 as *mut _;
static INIT: Once = ONCE_INIT;
static INIT: std::sync::Once = std::sync::ONCE_INIT;
INIT.call_once(|| {
GLOBAL_JOBSERVER = Box::into_raw(Box::new(Client::from_env()));
});
(*GLOBAL_JOBSERVER).clone()
},
has_global_allocator: Cell::new(false),
has_global_allocator: Once::new(),
};
sess

View File

@ -345,7 +345,7 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a,
Reveal::UserFacing => ty,
Reveal::All => {
let recursion_limit = self.tcx().sess.recursion_limit.get();
let recursion_limit = *self.tcx().sess.recursion_limit.get();
if self.depth >= recursion_limit {
let obligation = Obligation::with_depth(
self.cause.clone(),
@ -566,7 +566,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
found cache entry: in-progress");
// But for now, let's classify this as an overflow:
let recursion_limit = selcx.tcx().sess.recursion_limit.get();
let recursion_limit = *selcx.tcx().sess.recursion_limit.get();
let obligation = Obligation::with_depth(cause.clone(),
recursion_limit,
param_env,
@ -848,7 +848,7 @@ fn project_type<'cx, 'gcx, 'tcx>(
debug!("project(obligation={:?})",
obligation);
let recursion_limit = selcx.tcx().sess.recursion_limit.get();
let recursion_limit = *selcx.tcx().sess.recursion_limit.get();
if obligation.recursion_depth >= recursion_limit {
debug!("project: overflow!");
selcx.infcx().report_overflow_error(&obligation, true);

View File

@ -109,7 +109,7 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for QueryNormalizer<'cx, 'gcx, 'tcx
Reveal::UserFacing => ty,
Reveal::All => {
let recursion_limit = self.tcx().sess.recursion_limit.get();
let recursion_limit = *self.tcx().sess.recursion_limit.get();
if self.anon_depth >= recursion_limit {
let obligation = Obligation::with_depth(
self.cause.clone(),

View File

@ -997,7 +997,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
{
// Watch out for overflow. This intentionally bypasses (and does
// not update) the cache.
let recursion_limit = self.infcx.tcx.sess.recursion_limit.get();
let recursion_limit = *self.infcx.tcx.sess.recursion_limit.get();
if stack.obligation.recursion_depth >= recursion_limit {
self.infcx().report_overflow_error(&stack.obligation, true);
}

View File

@ -50,7 +50,7 @@ use ty::maps;
use ty::steal::Steal;
use ty::BindingMode;
use ty::CanonicalTy;
use util::nodemap::{NodeMap, DefIdSet, ItemLocalMap};
use util::nodemap::{DefIdSet, ItemLocalMap};
use util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::accumulate_vec::AccumulateVec;
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
@ -888,22 +888,12 @@ pub struct GlobalCtxt<'tcx> {
/// Used to prevent layout from recursing too deeply.
pub layout_depth: Cell<usize>,
/// Map from function to the `#[derive]` mode that it's defining. Only used
/// by `proc-macro` crates.
pub derive_macros: RefCell<NodeMap<Symbol>>,
stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
pub interpret_interner: InterpretInterner<'tcx>,
layout_interner: Lock<FxHashSet<&'tcx LayoutDetails>>,
/// A vector of every trait accessible in the whole crate
/// (i.e. including those from subcrates). This is used only for
/// error reporting, and so is lazily initialized and generally
/// shouldn't taint the common path (hence the RefCell).
pub all_traits: RefCell<Option<Vec<DefId>>>,
/// A general purpose channel to throw data out the back towards LLVM worker
/// threads.
///
@ -1280,10 +1270,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
data_layout,
layout_interner: Lock::new(FxHashSet()),
layout_depth: Cell::new(0),
derive_macros: RefCell::new(NodeMap()),
stability_interner: Lock::new(FxHashSet()),
interpret_interner: Default::default(),
all_traits: RefCell::new(None),
tx_to_llvm_workers: Lock::new(tx),
output_filenames: Arc::new(output_filenames.clone()),
};

View File

@ -898,7 +898,7 @@ fn layout_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
{
let (param_env, ty) = query.into_parts();
let rec_limit = tcx.sess.recursion_limit.get();
let rec_limit = *tcx.sess.recursion_limit.get();
let depth = tcx.layout_depth.get();
if depth > rec_limit {
tcx.sess.fatal(

View File

@ -586,6 +586,12 @@ impl<'tcx> QueryDescription<'tcx> for queries::stability_index<'tcx> {
}
}
impl<'tcx> QueryDescription<'tcx> for queries::all_traits<'tcx> {
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
format!("fetching all foreign and local traits")
}
}
impl<'tcx> QueryDescription<'tcx> for queries::all_crate_nums<'tcx> {
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
format!("fetching all foreign CrateNum instances")

View File

@ -386,6 +386,11 @@ define_maps! { <'tcx>
[] fn stability_index: stability_index_node(CrateNum) -> Lrc<stability::Index<'tcx>>,
[] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Lrc<Vec<CrateNum>>,
/// A vector of every trait accessible in the whole crate
/// (i.e. including those from subcrates). This is used only for
/// error reporting.
[] fn all_traits: all_traits_node(CrateNum) -> Lrc<Vec<DefId>>,
[] fn exported_symbols: ExportedSymbols(CrateNum)
-> Arc<Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)>>,
[] fn collect_and_partition_translation_items:
@ -575,6 +580,10 @@ fn all_crate_nums_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
DepConstructor::AllCrateNums
}
fn all_traits_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
DepConstructor::AllTraits
}
fn collect_and_partition_translation_items_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
DepConstructor::CollectAndPartitionTranslationItems
}

View File

@ -1124,6 +1124,7 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>,
}
DepKind::MaybeUnusedExternCrates => { force!(maybe_unused_extern_crates, LOCAL_CRATE); }
DepKind::StabilityIndex => { force!(stability_index, LOCAL_CRATE); }
DepKind::AllTraits => { force!(all_traits, LOCAL_CRATE); }
DepKind::AllCrateNums => { force!(all_crate_nums, LOCAL_CRATE); }
DepKind::ExportedSymbols => { force!(exported_symbols, krate!()); }
DepKind::CollectAndPartitionTranslationItems => {

View File

@ -10,6 +10,8 @@
#![allow(non_camel_case_types)]
use rustc_data_structures::sync::Lock;
use std::cell::{RefCell, Cell};
use std::collections::HashMap;
use std::ffi::CString;
@ -236,13 +238,14 @@ pub fn to_readable_str(mut val: usize) -> String {
groups.join("_")
}
pub fn record_time<T, F>(accu: &Cell<Duration>, f: F) -> T where
pub fn record_time<T, F>(accu: &Lock<Duration>, f: F) -> T where
F: FnOnce() -> T,
{
let start = Instant::now();
let rv = f();
let duration = start.elapsed();
accu.set(duration + accu.get());
let mut accu = accu.lock();
*accu = *accu + duration;
rv
}
@ -382,13 +385,3 @@ fn test_to_readable_str() {
assert_eq!("1_000_000", to_readable_str(1_000_000));
assert_eq!("1_234_567", to_readable_str(1_234_567));
}
pub trait CellUsizeExt {
fn increment(&self);
}
impl CellUsizeExt for Cell<usize> {
fn increment(&self) {
self.set(self.get() + 1);
}
}

View File

@ -29,10 +29,15 @@
//! `rustc_erase_owner!` erases a OwningRef owner into Erased or Erased + Send + Sync
//! depending on the value of cfg!(parallel_queries).
use std::collections::HashMap;
use std::hash::{Hash, BuildHasher};
use std::cmp::Ordering;
use std::marker::PhantomData;
use std::fmt::Debug;
use std::fmt::Formatter;
use std::fmt;
use std;
use std::ops::{Deref, DerefMut};
use owning_ref::{Erased, OwningRef};
cfg_if! {
@ -161,6 +166,8 @@ cfg_if! {
use parking_lot::Mutex as InnerLock;
use parking_lot::RwLock as InnerRwLock;
use std::thread;
pub type MetadataRef = OwningRef<Box<Erased + Send + Sync>, [u8]>;
/// This makes locks panic if they are already held.
@ -223,6 +230,146 @@ pub fn assert_sync<T: ?Sized + Sync>() {}
pub fn assert_send_val<T: ?Sized + Send>(_t: &T) {}
pub fn assert_send_sync_val<T: ?Sized + Sync + Send>(_t: &T) {}
pub trait HashMapExt<K, V> {
/// Same as HashMap::insert, but it may panic if there's already an
/// entry for `key` with a value not equal to `value`
fn insert_same(&mut self, key: K, value: V);
}
impl<K: Eq + Hash, V: Eq, S: BuildHasher> HashMapExt<K, V> for HashMap<K, V, S> {
fn insert_same(&mut self, key: K, value: V) {
self.entry(key).and_modify(|old| assert!(*old == value)).or_insert(value);
}
}
/// A type whose inner value can be written once and then will stay read-only
// This contains a PhantomData<T> since this type conceptually owns a T outside the Mutex once
// initialized. This ensures that Once<T> is Sync only if T is. If we did not have PhantomData<T>
// we could send a &Once<Cell<bool>> to multiple threads and call `get` on it to get access
// to &Cell<bool> on those threads.
pub struct Once<T>(Lock<Option<T>>, PhantomData<T>);
impl<T> Once<T> {
/// Creates an Once value which is uninitialized
#[inline(always)]
pub fn new() -> Self {
Once(Lock::new(None), PhantomData)
}
/// Consumes the value and returns Some(T) if it was initialized
#[inline(always)]
pub fn into_inner(self) -> Option<T> {
self.0.into_inner()
}
/// Tries to initialize the inner value to `value`.
/// Returns `None` if the inner value was uninitialized and `value` was consumed setting it
/// otherwise if the inner value was already set it returns `value` back to the caller
#[inline]
pub fn try_set(&self, value: T) -> Option<T> {
let mut lock = self.0.lock();
if lock.is_some() {
return Some(value);
}
*lock = Some(value);
None
}
/// Tries to initialize the inner value to `value`.
/// Returns `None` if the inner value was uninitialized and `value` was consumed setting it
/// otherwise if the inner value was already set it asserts that `value` is equal to the inner
/// value and then returns `value` back to the caller
#[inline]
pub fn try_set_same(&self, value: T) -> Option<T> where T: Eq {
let mut lock = self.0.lock();
if let Some(ref inner) = *lock {
assert!(*inner == value);
return Some(value);
}
*lock = Some(value);
None
}
/// Tries to initialize the inner value to `value` and panics if it was already initialized
#[inline]
pub fn set(&self, value: T) {
assert!(self.try_set(value).is_none());
}
/// Tries to initialize the inner value by calling the closure while ensuring that no-one else
/// can access the value in the mean time by holding a lock for the duration of the closure.
/// If the value was already initialized the closure is not called and `false` is returned,
/// otherwise if the value from the closure initializes the inner value, `true` is returned
#[inline]
pub fn init_locking<F: FnOnce() -> T>(&self, f: F) -> bool {
let mut lock = self.0.lock();
if lock.is_some() {
return false;
}
*lock = Some(f());
true
}
/// Tries to initialize the inner value by calling the closure without ensuring that no-one
/// else can access it. This mean when this is called from multiple threads, multiple
/// closures may concurrently be computing a value which the inner value should take.
/// Only one of these closures are used to actually initialize the value.
/// If some other closure already set the value,
/// we return the value our closure computed wrapped in a `Option`.
/// If our closure set the value, `None` is returned.
/// If the value is already initialized, the closure is not called and `None` is returned.
#[inline]
pub fn init_nonlocking<F: FnOnce() -> T>(&self, f: F) -> Option<T> {
if self.0.lock().is_some() {
None
} else {
self.try_set(f())
}
}
/// Tries to initialize the inner value by calling the closure without ensuring that no-one
/// else can access it. This mean when this is called from multiple threads, multiple
/// closures may concurrently be computing a value which the inner value should take.
/// Only one of these closures are used to actually initialize the value.
/// If some other closure already set the value, we assert that it our closure computed
/// a value equal to the value aready set and then
/// we return the value our closure computed wrapped in a `Option`.
/// If our closure set the value, `None` is returned.
/// If the value is already initialized, the closure is not called and `None` is returned.
#[inline]
pub fn init_nonlocking_same<F: FnOnce() -> T>(&self, f: F) -> Option<T> where T: Eq {
if self.0.lock().is_some() {
None
} else {
self.try_set_same(f())
}
}
/// Tries to get a reference to the inner value, returns `None` if it is not yet initialized
#[inline(always)]
pub fn try_get(&self) -> Option<&T> {
let lock = &*self.0.lock();
if let Some(ref inner) = *lock {
// This is safe since we won't mutate the inner value
unsafe { Some(&*(inner as *const T)) }
} else {
None
}
}
/// Gets reference to the inner value, panics if it is not yet initialized
#[inline(always)]
pub fn get(&self) -> &T {
self.try_get().expect("value was not set")
}
/// Gets reference to the inner value, panics if it is not yet initialized
#[inline(always)]
pub fn borrow(&self) -> &T {
self.get()
}
}
impl<T: Copy + Debug> Debug for LockCell<T> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_struct("LockCell")
@ -439,3 +586,54 @@ impl<T: Clone> Clone for RwLock<T> {
RwLock::new(self.borrow().clone())
}
}
/// A type which only allows its inner value to be used in one thread.
/// It will panic if it is used on multiple threads.
#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq)]
pub struct OneThread<T> {
#[cfg(parallel_queries)]
thread: thread::ThreadId,
inner: T,
}
unsafe impl<T> std::marker::Sync for OneThread<T> {}
unsafe impl<T> std::marker::Send for OneThread<T> {}
impl<T> OneThread<T> {
#[inline(always)]
fn check(&self) {
#[cfg(parallel_queries)]
assert_eq!(thread::current().id(), self.thread);
}
#[inline(always)]
pub fn new(inner: T) -> Self {
OneThread {
#[cfg(parallel_queries)]
thread: thread::current().id(),
inner,
}
}
#[inline(always)]
pub fn into_inner(value: Self) -> T {
value.check();
value.inner
}
}
impl<T> Deref for OneThread<T> {
type Target = T;
fn deref(&self) -> &T {
self.check();
&self.inner
}
}
impl<T> DerefMut for OneThread<T> {
fn deref_mut(&mut self) -> &mut T {
self.check();
&mut self.inner
}
}

View File

@ -652,10 +652,11 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
// these need to be set "early" so that expansion sees `quote` if enabled.
sess.init_features(features);
*sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs);
let crate_types = collect_crate_types(sess, &krate.attrs);
sess.crate_types.set(crate_types);
let disambiguator = compute_crate_disambiguator(sess);
*sess.crate_disambiguator.borrow_mut() = Some(disambiguator);
sess.crate_disambiguator.set(disambiguator);
rustc_incremental::prepare_session_directory(
sess,
&crate_name,
@ -783,7 +784,7 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
let features = sess.features_untracked();
let cfg = syntax::ext::expand::ExpansionConfig {
features: Some(&features),
recursion_limit: sess.recursion_limit.get(),
recursion_limit: *sess.recursion_limit.get(),
trace_mac: sess.opts.debugging_opts.trace_macros,
should_test: sess.opts.test,
..syntax::ext::expand::ExpansionConfig::default(crate_name.to_string())

View File

@ -614,6 +614,7 @@ impl<'a> CrateLoader<'a> {
});
if !any_non_rlib {
info!("panic runtime injection skipped, only generating rlib");
self.sess.injected_panic_runtime.set(None);
return
}
@ -646,6 +647,7 @@ impl<'a> CrateLoader<'a> {
// we just don't need one at all, then we're done here and there's
// nothing else to do.
if !needs_panic_runtime || runtime_found {
self.sess.injected_panic_runtime.set(None);
return
}
@ -812,9 +814,7 @@ impl<'a> CrateLoader<'a> {
fn inject_allocator_crate(&mut self, krate: &ast::Crate) {
let has_global_allocator = has_global_allocator(krate);
if has_global_allocator {
self.sess.has_global_allocator.set(true);
}
self.sess.has_global_allocator.set(has_global_allocator);
// Check to see if we actually need an allocator. This desire comes
// about through the `#![needs_allocator]` attribute and is typically
@ -825,6 +825,8 @@ impl<'a> CrateLoader<'a> {
needs_allocator = needs_allocator || data.needs_allocator(self.sess);
});
if !needs_allocator {
self.sess.injected_allocator.set(None);
self.sess.allocator_kind.set(None);
return
}
@ -844,6 +846,8 @@ impl<'a> CrateLoader<'a> {
}
}
if !need_lib_alloc && !need_exe_alloc {
self.sess.injected_allocator.set(None);
self.sess.allocator_kind.set(None);
return
}
@ -881,6 +885,7 @@ impl<'a> CrateLoader<'a> {
});
if global_allocator.is_some() {
self.sess.allocator_kind.set(Some(AllocatorKind::Global));
self.sess.injected_allocator.set(None);
return
}
@ -924,6 +929,9 @@ impl<'a> CrateLoader<'a> {
};
let allocation_crate_data = exe_allocation_crate_data.or_else(|| {
// No allocator was injected
self.sess.injected_allocator.set(None);
if attr::contains_name(&krate.attrs, "default_lib_allocator") {
// Prefer self as the allocator if there's a collision
return None;

View File

@ -459,7 +459,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro);
let has_default_lib_allocator =
attr::contains_name(tcx.hir.krate_attrs(), "default_lib_allocator");
let has_global_allocator = tcx.sess.has_global_allocator.get();
let has_global_allocator = *tcx.sess.has_global_allocator.get();
let root = self.lazy(&CrateRoot {
name: tcx.crate_name(LOCAL_CRATE),
extra_filename: tcx.sess.opts.cg.extra_filename.clone(),

View File

@ -194,8 +194,8 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
param_env,
memory: Memory::new(tcx, memory_data),
stack: Vec::new(),
stack_limit: tcx.sess.const_eval_stack_frame_limit.get(),
steps_remaining: tcx.sess.const_eval_step_limit.get(),
stack_limit: tcx.sess.const_eval_stack_frame_limit,
steps_remaining: tcx.sess.const_eval_step_limit,
}
}

View File

@ -325,7 +325,7 @@ fn collect_roots<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let mut roots = Vec::new();
{
let entry_fn = tcx.sess.entry_fn.borrow().map(|(node_id, _)| {
let entry_fn = tcx.sess.entry_fn.borrow().map(|(node_id, _, _)| {
tcx.hir.local_def_id(node_id)
});
@ -457,7 +457,7 @@ fn check_recursion_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// Code that needs to instantiate the same function recursively
// more than the recursion limit is assumed to be causing an
// infinite expansion.
if recursion_depth > tcx.sess.recursion_limit.get() {
if recursion_depth > *tcx.sess.recursion_limit.get() {
let error = format!("reached the recursion limit while instantiating `{}`",
instance);
if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
@ -484,7 +484,7 @@ fn check_type_length_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// which means that rustc basically hangs.
//
// Bail out in these cases to avoid that bad user experience.
let type_length_limit = tcx.sess.type_length_limit.get();
let type_length_limit = *tcx.sess.type_length_limit.get();
if type_length > type_length_limit {
// The instance name is already known to be too long for rustc. Use
// `{:.64}` to avoid blasting the user's terminal with thousands of
@ -1038,7 +1038,7 @@ impl<'b, 'a, 'v> RootCollector<'b, 'a, 'v> {
/// the return type of `main`. This is not needed when
/// the user writes their own `start` manually.
fn push_extra_entry_roots(&mut self) {
if self.tcx.sess.entry_type.get() != Some(config::EntryMain) {
if self.tcx.sess.entry_fn.get().map(|e| e.2) != Some(config::EntryMain) {
return
}

View File

@ -92,7 +92,7 @@ pub trait MonoItemExt<'a, 'tcx>: fmt::Debug {
match *self.as_mono_item() {
MonoItem::Fn(ref instance) => {
let entry_def_id =
tcx.sess.entry_fn.borrow().map(|(id, _)| tcx.hir.local_def_id(id));
tcx.sess.entry_fn.borrow().map(|(id, _, _)| tcx.hir.local_def_id(id));
// If this function isn't inlined or otherwise has explicit
// linkage, then we'll be creating a globally shared version.
if self.explicit_linkage(tcx).is_some() ||

View File

@ -153,7 +153,7 @@ fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>(
span, for_ty, depth, ty
);
if depth >= tcx.sess.recursion_limit.get() {
if depth >= *tcx.sess.recursion_limit.get() {
return Ok(DtorckConstraint {
outlives: vec![],
dtorck_types: vec![],

View File

@ -11,17 +11,14 @@
use rustc::traits::{Normalized, ObligationCause};
use rustc::traits::query::NoSolution;
use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
use rustc::util::common::CellUsizeExt;
use std::sync::atomic::Ordering;
crate fn normalize_ty_after_erasing_regions<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> Ty<'tcx> {
let ParamEnvAnd { param_env, value } = goal;
tcx.sess
.perf_stats
.normalize_ty_after_erasing_regions
.increment();
tcx.sess.perf_stats.normalize_ty_after_erasing_regions.fetch_add(1, Ordering::Relaxed);
tcx.infer_ctxt().enter(|infcx| {
let cause = ObligationCause::dummy();
match infcx.at(&cause, param_env).normalize(&value) {

View File

@ -13,11 +13,11 @@ use rustc::traits::{self, FulfillmentContext, Normalized, ObligationCause,
SelectionContext};
use rustc::traits::query::{CanonicalProjectionGoal, NoSolution, normalize::NormalizationResult};
use rustc::ty::{ParamEnvAnd, TyCtxt};
use rustc::util::common::CellUsizeExt;
use rustc_data_structures::sync::Lrc;
use syntax::ast::DUMMY_NODE_ID;
use syntax_pos::DUMMY_SP;
use util;
use std::sync::atomic::Ordering;
crate fn normalize_projection_ty<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
@ -25,7 +25,7 @@ crate fn normalize_projection_ty<'tcx>(
) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, NormalizationResult<'tcx>>>>, NoSolution> {
debug!("normalize_provider(goal={:#?})", goal);
tcx.sess.perf_stats.normalize_projection_ty.increment();
tcx.sess.perf_stats.normalize_projection_ty.fetch_add(1, Ordering::Relaxed);
tcx.infer_ctxt().enter(|ref infcx| {
let (
ParamEnvAnd {

View File

@ -157,12 +157,12 @@ fn reachable_non_generics_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
})
.collect();
if let Some(id) = tcx.sess.derive_registrar_fn.get() {
if let Some(id) = *tcx.sess.derive_registrar_fn.get() {
let def_id = tcx.hir.local_def_id(id);
reachable_non_generics.insert(def_id, SymbolExportLevel::C);
}
if let Some(id) = tcx.sess.plugin_registrar_fn.get() {
if let Some(id) = *tcx.sess.plugin_registrar_fn.get() {
let def_id = tcx.hir.local_def_id(id);
reachable_non_generics.insert(def_id, SymbolExportLevel::C);
}

View File

@ -154,13 +154,16 @@ fn get_llvm_opt_size(optimize: config::OptLevel) -> llvm::CodeGenOptSize {
}
}
pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
target_machine_factory(sess)().unwrap_or_else(|err| {
pub fn create_target_machine(sess: &Session, find_features: bool) -> TargetMachineRef {
target_machine_factory(sess, find_features)().unwrap_or_else(|err| {
llvm_err(sess.diagnostic(), err).raise()
})
}
pub fn target_machine_factory(sess: &Session)
// If find_features is true this won't access `sess.crate_types` by assuming
// that `is_pie_binary` is false. When we discover LLVM target features
// `sess.crate_types` is uninitialized so we cannot access it.
pub fn target_machine_factory(sess: &Session, find_features: bool)
-> Arc<Fn() -> Result<TargetMachineRef, String> + Send + Sync>
{
let reloc_model = get_reloc_model(sess);
@ -201,7 +204,7 @@ pub fn target_machine_factory(sess: &Session)
};
let cpu = CString::new(cpu.as_bytes()).unwrap();
let features = CString::new(target_feature(sess).as_bytes()).unwrap();
let is_pie_binary = is_pie_binary(sess);
let is_pie_binary = !find_features && is_pie_binary(sess);
let trap_unreachable = sess.target.target.options.trap_unreachable;
Arc::new(move || {
@ -1510,7 +1513,7 @@ fn start_executing_work(tcx: TyCtxt,
regular_module_config: modules_config,
metadata_module_config: metadata_config,
allocator_module_config: allocator_config,
tm_factory: target_machine_factory(tcx.sess),
tm_factory: target_machine_factory(tcx.sess, false),
total_cgus,
msvc_imps_needed: msvc_imps_needed(tcx),
target_pointer_width: tcx.sess.target.target.target_pointer_width.clone(),

View File

@ -518,7 +518,7 @@ pub fn set_link_section(cx: &CodegenCx,
/// users main function.
fn maybe_create_entry_wrapper(cx: &CodegenCx) {
let (main_def_id, span) = match *cx.sess().entry_fn.borrow() {
Some((id, span)) => {
Some((id, span, _)) => {
(cx.tcx.hir.local_def_id(id), span)
}
None => return,
@ -534,11 +534,11 @@ fn maybe_create_entry_wrapper(cx: &CodegenCx) {
let main_llfn = callee::get_fn(cx, instance);
let et = cx.sess().entry_type.get().unwrap();
let et = cx.sess().entry_fn.get().map(|e| e.2);
match et {
config::EntryMain => create_entry_fn(cx, span, main_llfn, main_def_id, true),
config::EntryStart => create_entry_fn(cx, span, main_llfn, main_def_id, false),
config::EntryNone => {} // Do nothing.
Some(config::EntryMain) => create_entry_fn(cx, span, main_llfn, main_def_id, true),
Some(config::EntryStart) => create_entry_fn(cx, span, main_llfn, main_def_id, false),
None => {} // Do nothing.
}
fn create_entry_fn<'cx>(cx: &'cx CodegenCx,
@ -738,7 +738,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
source: ModuleSource::Translated(ModuleLlvm {
llcx: metadata_llcx,
llmod: metadata_llmod,
tm: create_target_machine(tcx.sess),
tm: create_target_machine(tcx.sess, false),
}),
kind: ModuleKind::Metadata,
};
@ -796,7 +796,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
codegen_units.len());
// Translate an allocator shim, if any
let allocator_module = if let Some(kind) = tcx.sess.allocator_kind.get() {
let allocator_module = if let Some(kind) = *tcx.sess.allocator_kind.get() {
unsafe {
let llmod_id = "allocator";
let (llcx, llmod) =
@ -804,7 +804,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let modules = ModuleLlvm {
llmod,
llcx,
tm: create_target_machine(tcx.sess),
tm: create_target_machine(tcx.sess, false),
};
time(tcx.sess, "write allocator module", || {
allocator::trans(tcx, &modules, kind)
@ -1261,7 +1261,7 @@ fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let llvm_module = ModuleLlvm {
llcx: cx.llcx,
llmod: cx.llmod,
tm: create_target_machine(cx.sess()),
tm: create_target_machine(cx.sess(), false),
};
ModuleTranslation {

View File

@ -162,7 +162,7 @@ pub unsafe fn create_context_and_module(sess: &Session, mod_name: &str) -> (Cont
// Ensure the data-layout values hardcoded remain the defaults.
if sess.target.target.options.is_builtin {
let tm = ::back::write::create_target_machine(sess);
let tm = ::back::write::create_target_machine(sess, false);
llvm::LLVMRustSetDataLayoutFromTargetMachine(llmod, tm);
llvm::LLVMRustDisposeTargetMachine(tm);

View File

@ -263,7 +263,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
let local_id = cx.tcx.hir.as_local_node_id(def_id);
match *cx.sess().entry_fn.borrow() {
Some((id, _)) => {
Some((id, _, _)) => {
if local_id == Some(id) {
flags = flags | DIFlags::FlagMainSubprogram;
}

View File

@ -141,7 +141,7 @@ pub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {
}
pub fn target_features(sess: &Session) -> Vec<Symbol> {
let target_machine = create_target_machine(sess);
let target_machine = create_target_machine(sess, true);
target_feature_whitelist(sess)
.iter()
.filter(|feature| {
@ -179,7 +179,7 @@ pub fn print_passes() {
pub(crate) fn print(req: PrintRequest, sess: &Session) {
require_inited();
let tm = create_target_machine(sess);
let tm = create_target_machine(sess, true);
unsafe {
match req {
PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),

View File

@ -51,7 +51,7 @@ pub mod symbol_names_test;
/// that actually test that compilation succeeds without
/// reporting an error.
pub fn check_for_rustc_errors_attr(tcx: TyCtxt) {
if let Some((id, span)) = *tcx.sess.entry_fn.borrow() {
if let Some((id, span, _)) = *tcx.sess.entry_fn.borrow() {
let main_def_id = tcx.hir.local_def_id(id);
if tcx.has_attr(main_def_id, "rustc_error") {

View File

@ -244,11 +244,11 @@ fn compute_symbol_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, instance: Instance
let node_id = tcx.hir.as_local_node_id(def_id);
if let Some(id) = node_id {
if tcx.sess.plugin_registrar_fn.get() == Some(id) {
if *tcx.sess.plugin_registrar_fn.get() == Some(id) {
let disambiguator = tcx.sess.local_crate_disambiguator();
return tcx.sess.generate_plugin_registrar_symbol(disambiguator);
}
if tcx.sess.derive_registrar_fn.get() == Some(id) {
if *tcx.sess.derive_registrar_fn.get() == Some(id) {
let disambiguator = tcx.sess.local_crate_disambiguator();
return tcx.sess.generate_derive_registrar_symbol(disambiguator);
}

View File

@ -56,9 +56,9 @@ impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> {
return Some((self.cur_ty, 0));
}
if self.steps.len() >= tcx.sess.recursion_limit.get() {
if self.steps.len() >= *tcx.sess.recursion_limit.get() {
// We've reached the recursion limit, error gracefully.
let suggested_limit = tcx.sess.recursion_limit.get() * 2;
let suggested_limit = *tcx.sess.recursion_limit.get() * 2;
let msg = format!("reached the recursion limit while auto-dereferencing {:?}",
self.cur_ty);
let error_id = (DiagnosticMessageId::ErrorId(55), Some(self.span), msg.clone());

View File

@ -31,6 +31,7 @@ use rustc_data_structures::sync::Lrc;
pub use self::MethodError::*;
pub use self::CandidateSource::*;
pub use self::suggest::TraitInfo;
mod confirm;
pub mod probe;
@ -38,6 +39,10 @@ mod suggest;
use self::probe::{IsSuggestion, ProbeScope};
pub fn provide(providers: &mut ty::maps::Providers) {
suggest::provide(providers);
}
#[derive(Clone, Copy, Debug)]
pub struct MethodCallee<'tcx> {
/// Impl method ID, for inherent methods, or trait method ID, otherwise.

View File

@ -13,6 +13,7 @@
use check::FnCtxt;
use rustc::hir::map as hir_map;
use rustc_data_structures::sync::Lrc;
use rustc::ty::{self, Ty, TyCtxt, ToPolyTraitRef, ToPredicate, TypeFoldable};
use hir::def::Def;
use hir::def_id::{CRATE_DEF_INDEX, DefId};
@ -26,12 +27,12 @@ use syntax::util::lev_distance::find_best_match_for_name;
use errors::DiagnosticBuilder;
use syntax_pos::Span;
use rustc::hir::def_id::LOCAL_CRATE;
use rustc::hir;
use rustc::hir::print;
use rustc::infer::type_variable::TypeVariableOrigin;
use rustc::ty::TyAdt;
use std::cell;
use std::cmp::Ordering;
use super::{MethodError, NoMatchData, CandidateSource};
@ -208,6 +209,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
// be used exists at all, and the type is an ambiuous numeric type
// ({integer}/{float}).
let mut candidates = all_traits(self.tcx)
.into_iter()
.filter(|info| {
self.associated_item(info.def_id, item_name, Namespace::Value).is_some()
});
@ -519,6 +521,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
// implement, by finding ones that have the item name, and are
// legal to implement.
let mut candidates = all_traits(self.tcx)
.into_iter()
.filter(|info| {
// we approximate the coherence rules to only suggest
// traits that are legal to implement by requiring that
@ -603,18 +606,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
}
}
pub type AllTraitsVec = Vec<DefId>;
#[derive(Copy, Clone)]
pub struct TraitInfo {
pub def_id: DefId,
}
impl TraitInfo {
fn new(def_id: DefId) -> TraitInfo {
TraitInfo { def_id: def_id }
}
}
impl PartialEq for TraitInfo {
fn eq(&self, other: &TraitInfo) -> bool {
self.cmp(other) == Ordering::Equal
@ -638,8 +634,12 @@ impl Ord for TraitInfo {
}
/// Retrieve all traits in this crate and any dependent crates.
pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> AllTraits<'a> {
if tcx.all_traits.borrow().is_none() {
pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Vec<TraitInfo> {
tcx.all_traits(LOCAL_CRATE).iter().map(|&def_id| TraitInfo { def_id }).collect()
}
/// Compute all traits in this crate and any dependent crates.
fn compute_all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Vec<DefId> {
use rustc::hir::itemlikevisit;
let mut traits = vec![];
@ -649,7 +649,7 @@ pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> AllTraits<'a>
// meh.
struct Visitor<'a, 'tcx: 'a> {
map: &'a hir_map::Map<'tcx>,
traits: &'a mut AllTraitsVec,
traits: &'a mut Vec<DefId>,
}
impl<'v, 'a, 'tcx> itemlikevisit::ItemLikeVisitor<'v> for Visitor<'a, 'tcx> {
fn visit_item(&mut self, i: &'v hir::Item) {
@ -676,7 +676,7 @@ pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> AllTraits<'a>
// Cross-crate:
let mut external_mods = FxHashSet();
fn handle_external_def(tcx: TyCtxt,
traits: &mut AllTraitsVec,
traits: &mut Vec<DefId>,
external_mods: &mut FxHashSet<DefId>,
def: Def) {
let def_id = def.def_id();
@ -703,43 +703,16 @@ pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> AllTraits<'a>
handle_external_def(tcx, &mut traits, &mut external_mods, Def::Mod(def_id));
}
*tcx.all_traits.borrow_mut() = Some(traits);
}
let borrow = tcx.all_traits.borrow();
assert!(borrow.is_some());
AllTraits {
borrow,
idx: 0,
}
traits
}
pub struct AllTraits<'a> {
borrow: cell::Ref<'a, Option<AllTraitsVec>>,
idx: usize,
}
impl<'a> Iterator for AllTraits<'a> {
type Item = TraitInfo;
fn next(&mut self) -> Option<TraitInfo> {
let AllTraits { ref borrow, ref mut idx } = *self;
// ugh.
borrow.as_ref().unwrap().get(*idx).map(|info| {
*idx += 1;
TraitInfo::new(*info)
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.borrow.as_ref().unwrap().len() - self.idx;
(len, Some(len))
pub fn provide(providers: &mut ty::maps::Providers) {
providers.all_traits = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
Lrc::new(compute_all_traits(tcx))
}
}
impl<'a> ExactSizeIterator for AllTraits<'a> {}
struct UsePlacementFinder<'a, 'tcx: 'a, 'gcx: 'tcx> {
target_module: ast::NodeId,
span: Option<Span>,

View File

@ -730,6 +730,7 @@ fn check_impl_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: De
}
pub fn provide(providers: &mut Providers) {
method::provide(providers);
*providers = Providers {
typeck_item_bodies,
typeck_tables_of,
@ -1127,10 +1128,10 @@ fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
// Check that the main return type implements the termination trait.
if let Some(term_id) = fcx.tcx.lang_items().termination() {
if let Some((id, _)) = *fcx.tcx.sess.entry_fn.borrow() {
if let Some((id, _, entry_type)) = *fcx.tcx.sess.entry_fn.borrow() {
if id == fn_id {
match fcx.sess().entry_type.get() {
Some(config::EntryMain) => {
match entry_type {
config::EntryMain => {
let substs = fcx.tcx.mk_substs(iter::once(Kind::from(ret_ty)));
let trait_ref = ty::TraitRef::new(term_id, substs);
let return_ty_span = decl.output.span();
@ -1141,7 +1142,7 @@ fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
traits::Obligation::new(
cause, param_env, trait_ref.to_predicate()));
},
_ => {},
config::EntryStart => {},
}
}
}

View File

@ -288,12 +288,10 @@ fn check_start_fn_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
fn check_for_entry_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
if let Some((id, sp)) = *tcx.sess.entry_fn.borrow() {
match tcx.sess.entry_type.get() {
Some(config::EntryMain) => check_main_fn_ty(tcx, id, sp),
Some(config::EntryStart) => check_start_fn_ty(tcx, id, sp),
Some(config::EntryNone) => {}
None => bug!("entry function without a type")
if let Some((id, sp, entry_type)) = *tcx.sess.entry_fn.borrow() {
match entry_type {
config::EntryMain => check_main_fn_ty(tcx, id, sp),
config::EntryStart => check_start_fn_ty(tcx, id, sp),
}
}
}