Auto merge of #109547 - matthiaskrgr:rollup-zczqgdk, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #108629 (rustdoc: add support for type filters in arguments and generics)
 - #108924 (panic_immediate_abort requires abort as a panic strategy)
 - #108961 (Refine error spans for const args in hir typeck)
 - #108986 (sync LVI tests)
 - #109142 (Add block-based mutex unlocking example)
 - #109368 (fix typo in the creation of OpenOption for RustyHermit)
 - #109493 (Return nested obligations from canonical response var unification)
 - #109515 (Add AixLinker to support linking on AIX)
 - #109536 (resolve: Rename some cstore methods to match queries and add comments)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2023-03-24 02:29:48 +00:00
commit 4c0f5008ce
48 changed files with 717 additions and 289 deletions

View File

@ -133,6 +133,9 @@ pub fn get_linker<'a>(
LinkerFlavor::Unix(Cc::No) if sess.target.os == "l4re" => {
Box::new(L4Bender::new(cmd, sess)) as Box<dyn Linker>
}
LinkerFlavor::Unix(Cc::No) if sess.target.os == "aix" => {
Box::new(AixLinker::new(cmd, sess)) as Box<dyn Linker>
}
LinkerFlavor::WasmLld(Cc::No) => Box::new(WasmLd::new(cmd, sess)) as Box<dyn Linker>,
LinkerFlavor::Gnu(cc, _)
| LinkerFlavor::Darwin(cc, _)
@ -1474,6 +1477,177 @@ impl<'a> L4Bender<'a> {
}
}
/// Linker for AIX.
pub struct AixLinker<'a> {
cmd: Command,
sess: &'a Session,
hinted_static: bool,
}
impl<'a> AixLinker<'a> {
pub fn new(cmd: Command, sess: &'a Session) -> AixLinker<'a> {
AixLinker { cmd: cmd, sess: sess, hinted_static: false }
}
fn hint_static(&mut self) {
if !self.hinted_static {
self.cmd.arg("-bstatic");
self.hinted_static = true;
}
}
fn hint_dynamic(&mut self) {
if self.hinted_static {
self.cmd.arg("-bdynamic");
self.hinted_static = false;
}
}
fn build_dylib(&mut self, _out_filename: &Path) {
self.cmd.arg("-bM:SRE");
self.cmd.arg("-bnoentry");
// FIXME: Use CreateExportList utility to create export list
// and remove -bexpfull.
self.cmd.arg("-bexpfull");
}
}
impl<'a> Linker for AixLinker<'a> {
fn link_dylib(&mut self, lib: &str, _verbatim: bool, _as_needed: bool) {
self.hint_dynamic();
self.cmd.arg(format!("-l{}", lib));
}
fn link_staticlib(&mut self, lib: &str, _verbatim: bool) {
self.hint_static();
self.cmd.arg(format!("-l{}", lib));
}
fn link_rlib(&mut self, lib: &Path) {
self.hint_static();
self.cmd.arg(lib);
}
fn include_path(&mut self, path: &Path) {
self.cmd.arg("-L").arg(path);
}
fn framework_path(&mut self, _: &Path) {
bug!("frameworks are not supported on AIX");
}
fn output_filename(&mut self, path: &Path) {
self.cmd.arg("-o").arg(path);
}
fn add_object(&mut self, path: &Path) {
self.cmd.arg(path);
}
fn full_relro(&mut self) {}
fn partial_relro(&mut self) {}
fn no_relro(&mut self) {}
fn cmd(&mut self) -> &mut Command {
&mut self.cmd
}
fn set_output_kind(&mut self, output_kind: LinkOutputKind, out_filename: &Path) {
match output_kind {
LinkOutputKind::DynamicDylib => {
self.hint_dynamic();
self.build_dylib(out_filename);
}
LinkOutputKind::StaticDylib => {
self.hint_static();
self.build_dylib(out_filename);
}
_ => {}
}
}
fn link_rust_dylib(&mut self, lib: &str, _: &Path) {
self.hint_dynamic();
self.cmd.arg(format!("-l{}", lib));
}
fn link_framework(&mut self, _framework: &str, _as_needed: bool) {
bug!("frameworks not supported on AIX");
}
fn link_whole_staticlib(&mut self, lib: &str, verbatim: bool, search_path: &[PathBuf]) {
self.hint_static();
let lib = find_native_static_library(lib, verbatim, search_path, &self.sess);
self.cmd.arg(format!("-bkeepfile:{}", lib.to_str().unwrap()));
}
fn link_whole_rlib(&mut self, lib: &Path) {
self.hint_static();
self.cmd.arg(format!("-bkeepfile:{}", lib.to_str().unwrap()));
}
fn gc_sections(&mut self, _keep_metadata: bool) {
self.cmd.arg("-bgc");
}
fn no_gc_sections(&mut self) {
self.cmd.arg("-bnogc");
}
fn optimize(&mut self) {}
fn pgo_gen(&mut self) {}
fn control_flow_guard(&mut self) {}
fn debuginfo(&mut self, strip: Strip, _: &[PathBuf]) {
match strip {
Strip::None => {}
// FIXME: -s strips the symbol table, line number information
// and relocation information.
Strip::Debuginfo | Strip::Symbols => {
self.cmd.arg("-s");
}
}
}
fn no_crt_objects(&mut self) {}
fn no_default_libraries(&mut self) {}
fn export_symbols(&mut self, tmpdir: &Path, _crate_type: CrateType, symbols: &[String]) {
let path = tmpdir.join("list.exp");
let res: io::Result<()> = try {
let mut f = BufWriter::new(File::create(&path)?);
// TODO: use llvm-nm to generate export list.
for symbol in symbols {
debug!(" _{}", symbol);
writeln!(f, " {}", symbol)?;
}
};
if let Err(e) = res {
self.sess.fatal(&format!("failed to write export file: {}", e));
}
self.cmd.arg(format!("-bE:{}", path.to_str().unwrap()));
}
fn subsystem(&mut self, _subsystem: &str) {}
fn reset_per_library_state(&mut self) {
self.hint_dynamic();
}
fn linker_plugin_lto(&mut self) {}
fn add_eh_frame_header(&mut self) {}
fn add_no_exec(&mut self) {}
fn add_as_needed(&mut self) {}
}
fn for_each_exported_symbols_include_dep<'tcx>(
tcx: TyCtxt<'tcx>,
crate_type: CrateType,

View File

@ -4,7 +4,7 @@ use rustc_hir::def::Res;
use rustc_hir::def_id::DefId;
use rustc_infer::traits::ObligationCauseCode;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor};
use rustc_span::{self, Span};
use rustc_span::{self, symbol::kw, Span};
use rustc_trait_selection::traits;
use std::ops::ControlFlow;
@ -25,17 +25,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let generics = self.tcx.generics_of(def_id);
let predicate_substs = match unsubstituted_pred.kind().skip_binder() {
ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => pred.trait_ref.substs,
ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => pred.projection_ty.substs,
_ => ty::List::empty(),
ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => pred.trait_ref.substs.to_vec(),
ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
pred.projection_ty.substs.to_vec()
}
ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(arg, ty)) => {
vec![ty.into(), arg.into()]
}
ty::PredicateKind::ConstEvaluatable(e) => vec![e.into()],
_ => return false,
};
let find_param_matching = |matches: &dyn Fn(&ty::ParamTy) -> bool| {
predicate_substs.types().find_map(|ty| {
ty.walk().find_map(|arg| {
let find_param_matching = |matches: &dyn Fn(ty::ParamTerm) -> bool| {
predicate_substs.iter().find_map(|arg| {
arg.walk().find_map(|arg| {
if let ty::GenericArgKind::Type(ty) = arg.unpack()
&& let ty::Param(param_ty) = ty.kind()
&& matches(param_ty)
&& let ty::Param(param_ty) = *ty.kind()
&& matches(ty::ParamTerm::Ty(param_ty))
{
Some(arg)
} else if let ty::GenericArgKind::Const(ct) = arg.unpack()
&& let ty::ConstKind::Param(param_ct) = ct.kind()
&& matches(ty::ParamTerm::Const(param_ct))
{
Some(arg)
} else {
@ -47,21 +58,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Prefer generics that are local to the fn item, since these are likely
// to be the cause of the unsatisfied predicate.
let mut param_to_point_at = find_param_matching(&|param_ty| {
self.tcx.parent(generics.type_param(param_ty, self.tcx).def_id) == def_id
let mut param_to_point_at = find_param_matching(&|param_term| {
self.tcx.parent(generics.param_at(param_term.index(), self.tcx).def_id) == def_id
});
// Fall back to generic that isn't local to the fn item. This will come
// from a trait or impl, for example.
let mut fallback_param_to_point_at = find_param_matching(&|param_ty| {
self.tcx.parent(generics.type_param(param_ty, self.tcx).def_id) != def_id
&& param_ty.name != rustc_span::symbol::kw::SelfUpper
let mut fallback_param_to_point_at = find_param_matching(&|param_term| {
self.tcx.parent(generics.param_at(param_term.index(), self.tcx).def_id) != def_id
&& !matches!(param_term, ty::ParamTerm::Ty(ty) if ty.name == kw::SelfUpper)
});
// Finally, the `Self` parameter is possibly the reason that the predicate
// is unsatisfied. This is less likely to be true for methods, because
// method probe means that we already kinda check that the predicates due
// to the `Self` type are true.
let mut self_param_to_point_at =
find_param_matching(&|param_ty| param_ty.name == rustc_span::symbol::kw::SelfUpper);
let mut self_param_to_point_at = find_param_matching(
&|param_term| matches!(param_term, ty::ParamTerm::Ty(ty) if ty.name == kw::SelfUpper),
);
// Finally, for ambiguity-related errors, we actually want to look
// for a parameter that is the source of the inference type left
@ -225,14 +237,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.own_substs(ty::InternalSubsts::identity_for_item(self.tcx, def_id));
let Some((index, _)) = own_substs
.iter()
.filter(|arg| matches!(arg.unpack(), ty::GenericArgKind::Type(_)))
.enumerate()
.find(|(_, arg)| **arg == param_to_point_at) else { return false };
let Some(arg) = segment
.args()
.args
.iter()
.filter(|arg| matches!(arg, hir::GenericArg::Type(_)))
.nth(index) else { return false; };
error.obligation.cause.span = arg
.span()

View File

@ -1041,13 +1041,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
self.root.tables.optimized_mir.get(self, id).is_some()
}
fn module_expansion(self, id: DefIndex, sess: &Session) -> ExpnId {
match self.def_kind(id) {
DefKind::Mod | DefKind::Enum | DefKind::Trait => self.get_expn_that_defined(id, sess),
_ => panic!("Expected module, found {:?}", self.local_def_id(id)),
}
}
fn get_fn_has_self_parameter(self, id: DefIndex, sess: &'a Session) -> bool {
self.root
.tables

View File

@ -490,6 +490,9 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
.alloc_slice(&CStore::from_tcx(tcx).crate_dependencies_in_postorder(LOCAL_CRATE))
},
crates: |tcx, ()| {
// The list of loaded crates is now frozen in query cache,
// so make sure cstore is not mutably accessed from here on.
tcx.untracked().cstore.leak();
tcx.arena.alloc_from_iter(CStore::from_tcx(tcx).iter_crate_data().map(|(cnum, _)| cnum))
},
..*providers
@ -537,16 +540,16 @@ impl CStore {
)
}
pub fn get_span_untracked(&self, def_id: DefId, sess: &Session) -> Span {
pub fn def_span_untracked(&self, def_id: DefId, sess: &Session) -> Span {
self.get_crate_data(def_id.krate).get_span(def_id.index, sess)
}
pub fn def_kind(&self, def: DefId) -> DefKind {
pub fn def_kind_untracked(&self, def: DefId) -> DefKind {
self.get_crate_data(def.krate).def_kind(def.index)
}
pub fn module_expansion_untracked(&self, def_id: DefId, sess: &Session) -> ExpnId {
self.get_crate_data(def_id.krate).module_expansion(def_id.index, sess)
pub fn expn_that_defined_untracked(&self, def_id: DefId, sess: &Session) -> ExpnId {
self.get_crate_data(def_id.krate).get_expn_that_defined(def_id.index, sess)
}
/// Only public-facing way to traverse all the definitions in a non-local crate.

View File

@ -1051,6 +1051,21 @@ impl<'tcx> TermKind<'tcx> {
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum ParamTerm {
Ty(ParamTy),
Const(ParamConst),
}
impl ParamTerm {
pub fn index(self) -> usize {
match self {
ParamTerm::Ty(ty) => ty.index as usize,
ParamTerm::Const(ct) => ct.index as usize,
}
}
}
/// This kind of predicate has no *direct* correspondent in the
/// syntax, but it roughly corresponds to the syntactic forms:
///

View File

@ -114,13 +114,16 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
}
if !def_id.is_local() {
let def_kind = self.cstore().def_kind(def_id);
// Query `def_kind` is not used because query system overhead is too expensive here.
let def_kind = self.cstore().def_kind_untracked(def_id);
if let DefKind::Mod | DefKind::Enum | DefKind::Trait = def_kind {
let parent = self
.tcx
.opt_parent(def_id)
.map(|parent_id| self.get_nearest_non_block_module(parent_id));
let expn_id = self.cstore().module_expansion_untracked(def_id, &self.tcx.sess);
// Query `expn_that_defined` is not used because
// hashing spans in its result is expensive.
let expn_id = self.cstore().expn_that_defined_untracked(def_id, &self.tcx.sess);
return Some(self.new_module(
parent,
ModuleKind::Def(def_kind, def_id, self.tcx.item_name(def_id)),
@ -194,6 +197,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
}
pub(crate) fn build_reduced_graph_external(&mut self, module: Module<'a>) {
// Query `module_children` is not used because hashing spans in its result is expensive.
let children =
Vec::from_iter(self.cstore().module_children_untracked(module.def_id(), self.tcx.sess));
for child in children {

View File

@ -1875,7 +1875,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
fn def_span(&self, def_id: DefId) -> Span {
match def_id.as_local() {
Some(def_id) => self.tcx.source_span(def_id),
None => self.cstore().get_span_untracked(def_id, self.tcx.sess),
// Query `def_span` is not used because hashing its result span is expensive.
None => self.cstore().def_span_untracked(def_id, self.tcx.sess),
}
}

View File

@ -99,20 +99,20 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
param_env: ty::ParamEnv<'tcx>,
original_values: Vec<ty::GenericArg<'tcx>>,
response: CanonicalResponse<'tcx>,
) -> Result<Certainty, NoSolution> {
) -> Result<(Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution> {
let substitution = self.compute_query_response_substitution(&original_values, &response);
let Response { var_values, external_constraints, certainty } =
response.substitute(self.tcx(), &substitution);
self.unify_query_var_values(param_env, &original_values, var_values)?;
let nested_goals = self.unify_query_var_values(param_env, &original_values, var_values)?;
// FIXME: implement external constraints.
let ExternalConstraintsData { region_constraints, opaque_types: _ } =
external_constraints.deref();
self.register_region_constraints(region_constraints);
Ok(certainty)
Ok((certainty, nested_goals))
}
/// This returns the substitutions to instantiate the bound variables of
@ -205,21 +205,15 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
param_env: ty::ParamEnv<'tcx>,
original_values: &[ty::GenericArg<'tcx>],
var_values: CanonicalVarValues<'tcx>,
) -> Result<(), NoSolution> {
) -> Result<Vec<Goal<'tcx, ty::Predicate<'tcx>>>, NoSolution> {
assert_eq!(original_values.len(), var_values.len());
let mut nested_goals = vec![];
for (&orig, response) in iter::zip(original_values, var_values.var_values) {
// This can fail due to the occurs check, see
// `tests/ui/typeck/lazy-norm/equating-projection-cyclically.rs` for an example
// where that can happen.
//
// FIXME: To deal with #105787 I also expect us to emit nested obligations here at
// some point. We can figure out how to deal with this once we actually have
// an ICE.
let nested_goals = self.eq_and_get_goals(param_env, orig, response)?;
assert!(nested_goals.is_empty(), "{nested_goals:?}");
nested_goals.extend(self.eq_and_get_goals(param_env, orig, response)?);
}
Ok(())
Ok(nested_goals)
}
fn register_region_constraints(&mut self, region_constraints: &QueryRegionConstraints<'tcx>) {

View File

@ -70,7 +70,7 @@ pub trait InferCtxtEvalExt<'tcx> {
fn evaluate_root_goal(
&self,
goal: Goal<'tcx, ty::Predicate<'tcx>>,
) -> Result<(bool, Certainty), NoSolution>;
) -> Result<(bool, Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution>;
}
impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> {
@ -78,9 +78,8 @@ impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> {
fn evaluate_root_goal(
&self,
goal: Goal<'tcx, ty::Predicate<'tcx>>,
) -> Result<(bool, Certainty), NoSolution> {
) -> Result<(bool, Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution> {
let mode = if self.intercrate { SolverMode::Coherence } else { SolverMode::Normal };
let mut search_graph = search_graph::SearchGraph::new(self.tcx, mode);
let mut ecx = EvalCtxt {
@ -152,13 +151,13 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
&mut self,
is_normalizes_to_hack: IsNormalizesToHack,
goal: Goal<'tcx, ty::Predicate<'tcx>>,
) -> Result<(bool, Certainty), NoSolution> {
) -> Result<(bool, Certainty, Vec<Goal<'tcx, ty::Predicate<'tcx>>>), NoSolution> {
let (orig_values, canonical_goal) = self.canonicalize_goal(goal);
let canonical_response =
EvalCtxt::evaluate_canonical_goal(self.tcx(), self.search_graph, canonical_goal)?;
let has_changed = !canonical_response.value.var_values.is_identity();
let certainty = self.instantiate_and_apply_query_response(
let (certainty, nested_goals) = self.instantiate_and_apply_query_response(
goal.param_env,
orig_values,
canonical_response,
@ -186,7 +185,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
assert_eq!(certainty, canonical_response.value.certainty);
}
Ok((has_changed, certainty))
Ok((has_changed, certainty, nested_goals))
}
fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> {
@ -263,13 +262,14 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
let mut has_changed = Err(Certainty::Yes);
if let Some(goal) = goals.normalizes_to_hack_goal.take() {
let (_, certainty) = match this.evaluate_goal(
let (_, certainty, nested_goals) = match this.evaluate_goal(
IsNormalizesToHack::Yes,
goal.with(this.tcx(), ty::Binder::dummy(goal.predicate)),
) {
Ok(r) => r,
Err(NoSolution) => return Some(Err(NoSolution)),
};
new_goals.goals.extend(nested_goals);
if goal.predicate.projection_ty
!= this.resolve_vars_if_possible(goal.predicate.projection_ty)
@ -308,11 +308,12 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
}
for nested_goal in goals.goals.drain(..) {
let (changed, certainty) =
let (changed, certainty, nested_goals) =
match this.evaluate_goal(IsNormalizesToHack::No, nested_goal) {
Ok(result) => result,
Err(NoSolution) => return Some(Err(NoSolution)),
};
new_goals.goals.extend(nested_goals);
if changed {
has_changed = Ok(());

View File

@ -1,6 +1,7 @@
use std::mem;
use rustc_infer::infer::InferCtxt;
use rustc_infer::traits::Obligation;
use rustc_infer::traits::{
query::NoSolution, FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes,
PredicateObligation, SelectionError, TraitEngine,
@ -61,7 +62,7 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> {
let mut has_changed = false;
for obligation in mem::take(&mut self.obligations) {
let goal = obligation.clone().into();
let (changed, certainty) = match infcx.evaluate_root_goal(goal) {
let (changed, certainty, nested_goals) = match infcx.evaluate_root_goal(goal) {
Ok(result) => result,
Err(NoSolution) => {
errors.push(FulfillmentError {
@ -125,7 +126,16 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> {
continue;
}
};
// Push any nested goals that we get from unifying our canonical response
// with our obligation onto the fulfillment context.
self.obligations.extend(nested_goals.into_iter().map(|goal| {
Obligation::new(
infcx.tcx,
obligation.cause.clone(),
goal.param_env,
goal.predicate,
)
}));
has_changed |= changed;
match certainty {
Certainty::Yes => {}

View File

@ -1282,10 +1282,20 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
),
ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(ct, ty)) => {
self.tcx.sess.struct_span_err(
let mut diag = self.tcx.sess.struct_span_err(
span,
&format!("the constant `{}` is not of type `{}`", ct, ty),
)
);
self.note_type_err(
&mut diag,
&obligation.cause,
None,
None,
TypeError::Sorts(ty::error::ExpectedFound::new(true, ty, ct.ty())),
false,
false,
);
diag
}
}
}

View File

@ -1,9 +1,8 @@
use rustc_middle::traits::solve::{Certainty, Goal, MaybeCause};
use rustc_infer::traits::{TraitEngine, TraitEngineExt};
use rustc_middle::ty;
use crate::infer::canonical::OriginalQueryValues;
use crate::infer::InferCtxt;
use crate::solve::InferCtxtEvalExt;
use crate::traits::{EvaluationResult, OverflowError, PredicateObligation, SelectionContext};
pub trait InferCtxtExt<'tcx> {
@ -81,27 +80,20 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
if self.tcx.trait_solver_next() {
self.probe(|snapshot| {
if let Ok((_, certainty)) =
self.evaluate_root_goal(Goal::new(self.tcx, param_env, obligation.predicate))
{
match certainty {
Certainty::Yes => {
if self.opaque_types_added_in_snapshot(snapshot) {
Ok(EvaluationResult::EvaluatedToOkModuloOpaqueTypes)
} else if self.region_constraints_added_in_snapshot(snapshot).is_some()
{
Ok(EvaluationResult::EvaluatedToOkModuloRegions)
} else {
Ok(EvaluationResult::EvaluatedToOk)
}
}
Certainty::Maybe(MaybeCause::Ambiguity) => {
Ok(EvaluationResult::EvaluatedToAmbig)
}
Certainty::Maybe(MaybeCause::Overflow) => Err(OverflowError::Canonical),
}
} else {
let mut fulfill_cx = crate::solve::FulfillmentCtxt::new();
fulfill_cx.register_predicate_obligation(self, obligation.clone());
// True errors
// FIXME(-Ztrait-solver=next): Overflows are reported as ambig here, is that OK?
if !fulfill_cx.select_where_possible(self).is_empty() {
Ok(EvaluationResult::EvaluatedToErr)
} else if !fulfill_cx.select_all_or_error(self).is_empty() {
Ok(EvaluationResult::EvaluatedToAmbig)
} else if self.opaque_types_added_in_snapshot(snapshot) {
Ok(EvaluationResult::EvaluatedToOkModuloOpaqueTypes)
} else if self.region_constraints_added_in_snapshot(snapshot).is_some() {
Ok(EvaluationResult::EvaluatedToOkModuloRegions)
} else {
Ok(EvaluationResult::EvaluatedToOk)
}
})
} else {

View File

@ -618,6 +618,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let mut fulfill_cx = crate::solve::FulfillmentCtxt::new();
fulfill_cx.register_predicate_obligations(self.infcx, predicates);
// True errors
// FIXME(-Ztrait-solver=next): Overflows are reported as ambig here, is that OK?
if !fulfill_cx.select_where_possible(self.infcx).is_empty() {
return Ok(EvaluatedToErr);
}

View File

@ -29,6 +29,9 @@
use crate::fmt;
use crate::panic::{Location, PanicInfo};
#[cfg(feature = "panic_immediate_abort")]
const _: () = assert!(cfg!(panic = "abort"), "panic_immediate_abort requires -C panic=abort");
// First we define the two main entry points that all panics go through.
// In the end both are just convenience wrappers around `panic_impl`.

View File

@ -107,8 +107,8 @@ use crate::sys::locks as sys;
/// *guard += 1;
/// ```
///
/// It is sometimes necessary to manually drop the mutex guard to unlock it
/// sooner than the end of the enclosing scope.
/// To unlock a mutex guard sooner than the end of the enclosing scope,
/// either create an inner scope or drop the guard manually.
///
/// ```
/// use std::sync::{Arc, Mutex};
@ -125,11 +125,18 @@ use crate::sys::locks as sys;
/// let res_mutex_clone = Arc::clone(&res_mutex);
///
/// threads.push(thread::spawn(move || {
/// let mut data = data_mutex_clone.lock().unwrap();
/// // This is the result of some important and long-ish work.
/// let result = data.iter().fold(0, |acc, x| acc + x * 2);
/// data.push(result);
/// drop(data);
/// // Here we use a block to limit the lifetime of the lock guard.
/// let result = {
/// let mut data = data_mutex_clone.lock().unwrap();
/// // This is the result of some important and long-ish work.
/// let result = data.iter().fold(0, |acc, x| acc + x * 2);
/// data.push(result);
/// result
/// // The mutex guard gets dropped here, together with any other values
/// // created in the critical section.
/// };
/// // The guard created here is a temporary dropped at the end of the statement, i.e.
/// // the lock would not remain being held even if the thread did some additional work.
/// *res_mutex_clone.lock().unwrap() += result;
/// }));
/// });
@ -146,6 +153,8 @@ use crate::sys::locks as sys;
/// // It's even more important here than in the threads because we `.join` the
/// // threads after that. If we had not dropped the mutex guard, a thread could
/// // be waiting forever for it, causing a deadlock.
/// // As in the threads, a block could have been used instead of calling the
/// // `drop` function.
/// drop(data);
/// // Here the mutex guard is not assigned to a variable and so, even if the
/// // scope does not end after this line, the mutex is still released: there is
@ -160,6 +169,7 @@ use crate::sys::locks as sys;
///
/// assert_eq!(*res_mutex.lock().unwrap(), 800);
/// ```
///
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "Mutex")]
pub struct Mutex<T: ?Sized> {

View File

@ -202,7 +202,7 @@ impl OpenOptions {
create: false,
create_new: false,
// system-specific
mode: 0x777,
mode: 0o777,
}
}

View File

@ -80,13 +80,20 @@ functions, and "In Return Types" shows matches in the return types of functions.
Both are very useful when looking for a function whose name you can't quite
bring to mind when you know the type you have or want.
When typing in the search bar, you can prefix your search term with a type
followed by a colon (such as `mod:`) to restrict the results to just that
kind of item. (The available items are listed in the help popup.)
Searching for `println!` will search for a macro named `println`, just like
Names in the search interface can be prefixed with an item type followed by a
colon (such as `mod:`) to restrict the results to just that kind of item. Also,
searching for `println!` will search for a macro named `println`, just like
searching for `macro:println` does.
Function signature searches can query generics, wrapped in angle brackets, and
traits are normalized like types in the search engine. For example, a function
with the signature `fn my_function<I: Iterator<Item=u32>>(input: I) -> usize`
can be matched with the following queries:
* `Iterator<u32> -> usize`
* `trait:Iterator<primitive:u32> -> primitive:usize`
* `Iterator -> usize`
### Changing displayed theme
You can change the displayed theme by opening the settings menu (the gear

View File

@ -354,12 +354,15 @@ function initSearch(rawSearchIndex) {
if (isInGenerics) {
parserState.genericsElems += 1;
}
const typeFilter = parserState.typeFilter;
parserState.typeFilter = null;
return {
name: name,
fullPath: pathSegments,
pathWithoutLast: pathSegments.slice(0, pathSegments.length - 1),
pathLast: pathSegments[pathSegments.length - 1],
generics: generics,
typeFilter,
};
}
@ -495,6 +498,11 @@ function initSearch(rawSearchIndex) {
*/
function getItemsBefore(query, parserState, elems, endChar) {
let foundStopChar = true;
let start = parserState.pos;
// If this is a generic, keep the outer item's type filter around.
const oldTypeFilter = parserState.typeFilter;
parserState.typeFilter = null;
while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos];
@ -506,7 +514,25 @@ function initSearch(rawSearchIndex) {
continue;
} else if (c === ":" && isPathStart(parserState)) {
throw ["Unexpected ", "::", ": paths cannot start with ", "::"];
} else if (c === ":" || isEndCharacter(c)) {
} else if (c === ":") {
if (parserState.typeFilter !== null) {
throw ["Unexpected ", ":"];
}
if (elems.length === 0) {
throw ["Expected type filter before ", ":"];
} else if (query.literalSearch) {
throw ["You cannot use quotes on type filter"];
}
// The type filter doesn't count as an element since it's a modifier.
const typeFilterElem = elems.pop();
checkExtraTypeFilterCharacters(start, parserState);
parserState.typeFilter = typeFilterElem.name;
parserState.pos += 1;
parserState.totalElems -= 1;
query.literalSearch = false;
foundStopChar = true;
continue;
} else if (isEndCharacter(c)) {
let extra = "";
if (endChar === ">") {
extra = "<";
@ -540,15 +566,10 @@ function initSearch(rawSearchIndex) {
];
}
const posBefore = parserState.pos;
start = parserState.pos;
getNextElem(query, parserState, elems, endChar === ">");
if (endChar !== "") {
if (parserState.pos >= parserState.length) {
throw ["Unclosed ", "<"];
}
const c2 = parserState.userQuery[parserState.pos];
if (!isSeparatorCharacter(c2) && c2 !== endChar) {
throw ["Expected ", endChar, ", found ", c2];
}
if (endChar !== "" && parserState.pos >= parserState.length) {
throw ["Unclosed ", "<"];
}
// This case can be encountered if `getNextElem` encountered a "stop character" right
// from the start. For example if you have `,,` or `<>`. In this case, we simply move up
@ -564,6 +585,8 @@ function initSearch(rawSearchIndex) {
// We are either at the end of the string or on the `endChar` character, let's move forward
// in any case.
parserState.pos += 1;
parserState.typeFilter = oldTypeFilter;
}
/**
@ -572,10 +595,10 @@ function initSearch(rawSearchIndex) {
*
* @param {ParserState} parserState
*/
function checkExtraTypeFilterCharacters(parserState) {
function checkExtraTypeFilterCharacters(start, parserState) {
const query = parserState.userQuery;
for (let pos = 0; pos < parserState.pos; ++pos) {
for (let pos = start; pos < parserState.pos; ++pos) {
if (!isIdentCharacter(query[pos]) && !isWhitespaceCharacter(query[pos])) {
throw ["Unexpected ", query[pos], " in type filter"];
}
@ -591,6 +614,7 @@ function initSearch(rawSearchIndex) {
*/
function parseInput(query, parserState) {
let foundStopChar = true;
let start = parserState.pos;
while (parserState.pos < parserState.length) {
const c = parserState.userQuery[parserState.pos];
@ -612,16 +636,15 @@ function initSearch(rawSearchIndex) {
}
if (query.elems.length === 0) {
throw ["Expected type filter before ", ":"];
} else if (query.elems.length !== 1 || parserState.totalElems !== 1) {
throw ["Unexpected ", ":"];
} else if (query.literalSearch) {
throw ["You cannot use quotes on type filter"];
}
checkExtraTypeFilterCharacters(parserState);
// The type filter doesn't count as an element since it's a modifier.
parserState.typeFilter = query.elems.pop().name;
const typeFilterElem = query.elems.pop();
checkExtraTypeFilterCharacters(start, parserState);
parserState.typeFilter = typeFilterElem.name;
parserState.pos += 1;
parserState.totalElems = 0;
parserState.totalElems -= 1;
query.literalSearch = false;
foundStopChar = true;
continue;
@ -653,6 +676,7 @@ function initSearch(rawSearchIndex) {
];
}
const before = query.elems.length;
start = parserState.pos;
getNextElem(query, parserState, query.elems, false);
if (query.elems.length === before) {
// Nothing was added, weird... Let's increase the position to not remain stuck.
@ -660,6 +684,9 @@ function initSearch(rawSearchIndex) {
}
foundStopChar = false;
}
if (parserState.typeFilter !== null) {
throw ["Unexpected ", ":", " (expected path after type filter)"];
}
while (parserState.pos < parserState.length) {
if (isReturnArrow(parserState)) {
parserState.pos += 2;
@ -687,7 +714,6 @@ function initSearch(rawSearchIndex) {
return {
original: userQuery,
userQuery: userQuery.toLowerCase(),
typeFilter: NO_TYPE_FILTER,
elems: [],
returned: [],
// Total number of "top" elements (does not include generics).
@ -738,8 +764,8 @@ function initSearch(rawSearchIndex) {
*
* ident = *(ALPHA / DIGIT / "_")
* path = ident *(DOUBLE-COLON ident) [!]
* arg = path [generics]
* arg-without-generic = path
* arg = [type-filter *WS COLON *WS] path [generics]
* arg-without-generic = [type-filter *WS COLON *WS] path
* type-sep = COMMA/WS *(COMMA/WS)
* nonempty-arg-list = *(type-sep) arg *(type-sep arg) *(type-sep)
* nonempty-arg-list-without-generics = *(type-sep) arg-without-generic
@ -749,7 +775,7 @@ function initSearch(rawSearchIndex) {
* return-args = RETURN-ARROW *(type-sep) nonempty-arg-list
*
* exact-search = [type-filter *WS COLON] [ RETURN-ARROW ] *WS QUOTE ident QUOTE [ generics ]
* type-search = [type-filter *WS COLON] [ nonempty-arg-list ] [ return-args ]
* type-search = [ nonempty-arg-list ] [ return-args ]
*
* query = *WS (exact-search / type-search) *WS
*
@ -798,6 +824,20 @@ function initSearch(rawSearchIndex) {
* @return {ParsedQuery} - The parsed query
*/
function parseQuery(userQuery) {
function convertTypeFilterOnElem(elem) {
if (elem.typeFilter !== null) {
let typeFilter = elem.typeFilter;
if (typeFilter === "const") {
typeFilter = "constant";
}
elem.typeFilter = itemTypeFromName(typeFilter);
} else {
elem.typeFilter = NO_TYPE_FILTER;
}
for (const elem2 of elem.generics) {
convertTypeFilterOnElem(elem2);
}
}
userQuery = userQuery.trim();
const parserState = {
length: userQuery.length,
@ -812,17 +852,15 @@ function initSearch(rawSearchIndex) {
try {
parseInput(query, parserState);
if (parserState.typeFilter !== null) {
let typeFilter = parserState.typeFilter;
if (typeFilter === "const") {
typeFilter = "constant";
}
query.typeFilter = itemTypeFromName(typeFilter);
for (const elem of query.elems) {
convertTypeFilterOnElem(elem);
}
for (const elem of query.returned) {
convertTypeFilterOnElem(elem);
}
} catch (err) {
query = newParsedQuery(userQuery);
query.error = err;
query.typeFilter = -1;
return query;
}
@ -1057,12 +1095,10 @@ function initSearch(rawSearchIndex) {
}
// The names match, but we need to be sure that all generics kinda
// match as well.
let elem_name;
if (elem.generics.length > 0 && row.generics.length >= elem.generics.length) {
const elems = Object.create(null);
for (const entry of row.generics) {
elem_name = entry.name;
if (elem_name === "") {
if (entry.name === "") {
// Pure generic, needs to check into it.
if (checkGenerics(entry, elem, maxEditDistance + 1, maxEditDistance)
!== 0) {
@ -1070,19 +1106,19 @@ function initSearch(rawSearchIndex) {
}
continue;
}
if (elems[elem_name] === undefined) {
elems[elem_name] = 0;
if (elems[entry.name] === undefined) {
elems[entry.name] = [];
}
elems[elem_name] += 1;
elems[entry.name].push(entry.ty);
}
// We need to find the type that matches the most to remove it in order
// to move forward.
for (const generic of elem.generics) {
const handleGeneric = generic => {
let match = null;
if (elems[generic.name]) {
match = generic.name;
} else {
for (elem_name in elems) {
for (const elem_name in elems) {
if (!hasOwnPropertyRustdoc(elems, elem_name)) {
continue;
}
@ -1093,11 +1129,31 @@ function initSearch(rawSearchIndex) {
}
}
if (match === null) {
return false;
}
const matchIdx = elems[match].findIndex(tmp_elem =>
typePassesFilter(generic.typeFilter, tmp_elem));
if (matchIdx === -1) {
return false;
}
elems[match].splice(matchIdx, 1);
if (elems[match].length === 0) {
delete elems[match];
}
return true;
};
// To do the right thing with type filters, we first process generics
// that have them, removing matching ones from the "bag," then do the
// ones with no type filter, which can match any entry regardless of its
// own type.
for (const generic of elem.generics) {
if (generic.typeFilter !== -1 && !handleGeneric(generic)) {
return maxEditDistance + 1;
}
elems[match] -= 1;
if (elems[match] === 0) {
delete elems[match];
}
for (const generic of elem.generics) {
if (generic.typeFilter === -1 && !handleGeneric(generic)) {
return maxEditDistance + 1;
}
}
return 0;
@ -1145,14 +1201,20 @@ function initSearch(rawSearchIndex) {
return maxEditDistance + 1;
}
let dist = editDistance(row.name, elem.name, maxEditDistance);
let dist;
if (typePassesFilter(elem.typeFilter, row.ty)) {
dist = editDistance(row.name, elem.name, maxEditDistance);
} else {
dist = maxEditDistance + 1;
}
if (literalSearch) {
if (dist !== 0) {
// The name didn't match, let's try to check if the generics do.
if (elem.generics.length === 0) {
const checkGeneric = row.generics.length > 0;
if (checkGeneric && row.generics
.findIndex(tmp_elem => tmp_elem.name === elem.name) !== -1) {
.findIndex(tmp_elem => tmp_elem.name === elem.name &&
typePassesFilter(elem.typeFilter, tmp_elem.ty)) !== -1) {
return 0;
}
}
@ -1201,22 +1263,21 @@ function initSearch(rawSearchIndex) {
*
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
* @param {integer} typeFilter
* @param {integer} maxEditDistance
* @param {Array<integer>} skipPositions - Do not return one of these positions.
*
* @return {dist: integer, position: integer} - Returns an edit distance to the best match.
* If there is no match, returns
* `maxEditDistance + 1` and position: -1.
*/
function findArg(row, elem, typeFilter, maxEditDistance, skipPositions) {
function findArg(row, elem, maxEditDistance, skipPositions) {
let dist = maxEditDistance + 1;
let position = -1;
if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
let i = 0;
for (const input of row.type.inputs) {
if (!typePassesFilter(typeFilter, input.ty) ||
skipPositions.indexOf(i) !== -1) {
if (skipPositions.indexOf(i) !== -1) {
i += 1;
continue;
}
@ -1245,14 +1306,14 @@ function initSearch(rawSearchIndex) {
*
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
* @param {integer} typeFilter
* @param {integer} maxEditDistance
* @param {Array<integer>} skipPositions - Do not return one of these positions.
*
* @return {dist: integer, position: integer} - Returns an edit distance to the best match.
* If there is no match, returns
* `maxEditDistance + 1` and position: -1.
*/
function checkReturned(row, elem, typeFilter, maxEditDistance, skipPositions) {
function checkReturned(row, elem, maxEditDistance, skipPositions) {
let dist = maxEditDistance + 1;
let position = -1;
@ -1260,8 +1321,7 @@ function initSearch(rawSearchIndex) {
const ret = row.type.output;
let i = 0;
for (const ret_ty of ret) {
if (!typePassesFilter(typeFilter, ret_ty.ty) ||
skipPositions.indexOf(i) !== -1) {
if (skipPositions.indexOf(i) !== -1) {
i += 1;
continue;
}
@ -1483,15 +1543,15 @@ function initSearch(rawSearchIndex) {
const fullId = row.id;
const searchWord = searchWords[pos];
const in_args = findArg(row, elem, parsedQuery.typeFilter, maxEditDistance, []);
const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxEditDistance, []);
const in_args = findArg(row, elem, maxEditDistance, []);
const returned = checkReturned(row, elem, maxEditDistance, []);
// path_dist is 0 because no parent path information is currently stored
// in the search index
addIntoResults(results_in_args, fullId, pos, -1, in_args.dist, 0, maxEditDistance);
addIntoResults(results_returned, fullId, pos, -1, returned.dist, 0, maxEditDistance);
if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) {
if (!typePassesFilter(elem.typeFilter, row.ty)) {
return;
}
@ -1568,7 +1628,6 @@ function initSearch(rawSearchIndex) {
const { dist, position } = callback(
row,
elem,
NO_TYPE_FILTER,
maxEditDistance,
skipPositions
);
@ -1632,7 +1691,6 @@ function initSearch(rawSearchIndex) {
in_returned = checkReturned(
row,
elem,
parsedQuery.typeFilter,
maxEditDistance,
[]
);

View File

@ -79,11 +79,18 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
"foundElems",
"original",
"returned",
"typeFilter",
"userQuery",
"error",
];
} else if (fullPath.endsWith("elems") || fullPath.endsWith("generics")) {
} else if (fullPath.endsWith("elems") || fullPath.endsWith("returned")) {
fieldsToCheck = [
"name",
"fullPath",
"pathWithoutLast",
"pathLast",
"generics",
];
} else if (fullPath.endsWith("generics")) {
fieldsToCheck = [
"name",
"fullPath",

View File

@ -1,6 +1,7 @@
// assembly-output: emit-asm
// min-llvm-version: 15.0
// only-x86_64
// ignore-sgx
// revisions: opt-speed opt-size
// [opt-speed] compile-flags: -Copt-level=1
// [opt-size] compile-flags: -Copt-level=s

View File

@ -1,6 +1,7 @@
// assembly-output: emit-asm
// compile-flags: -Copt-level=1
// only-x86_64
// ignore-sgx
// min-llvm-version: 15.0
#![crate_type = "rlib"]

View File

@ -4,6 +4,7 @@
// assembly-output: emit-asm
// compile-flags: --crate-type=lib -O -C llvm-args=-x86-asm-syntax=intel
// only-x86_64
// ignore-sgx
// CHECK-LABEL: clamp_demo:
#[no_mangle]

View File

@ -11,7 +11,7 @@ pub extern fn plus_one(r: &mut u64) {
// CHECK: plus_one
// CHECK: lfence
// CHECK-NEXT: addq
// CHECK-NEXT: incq
// CHECK: popq [[REGISTER:%[a-z]+]]
// CHECK-NEXT: lfence
// CHECK-NEXT: jmpq *[[REGISTER]]

View File

@ -10,9 +10,7 @@ use std::arch::asm;
pub extern "C" fn get(ptr: *const u64) -> u64 {
let value: u64;
unsafe {
asm!(".start_inline_asm:",
"mov {}, [{}]",
".end_inline_asm:",
asm!("mov {}, [{}]",
out(reg) value,
in(reg) ptr);
}
@ -20,24 +18,17 @@ pub extern "C" fn get(ptr: *const u64) -> u64 {
}
// CHECK: get
// CHECK: .start_inline_asm
// CHECK-NEXT: movq
// CHECK: movq
// CHECK-NEXT: lfence
// CHECK-NEXT: .end_inline_asm
#[no_mangle]
pub extern "C" fn myret() {
unsafe {
asm!(
".start_myret_inline_asm:",
"ret",
".end_myret_inline_asm:",
);
asm!("ret");
}
}
// CHECK: myret
// CHECK: .start_myret_inline_asm
// CHECK-NEXT: shlq $0, (%rsp)
// CHECK: shlq $0, (%rsp)
// CHECK-NEXT: lfence
// CHECK-NEXT: retq

View File

@ -6,6 +6,7 @@
// compile-flags: -O
// [set] compile-flags: -Zno-jump-tables
// only-x86_64
// ignore-sgx
#![crate_type = "lib"]

View File

@ -4,6 +4,7 @@
# ignore-nvptx64-nvidia-cuda FIXME: can't find crate for `std`
# ignore-musl FIXME: this makefile needs teaching how to use a musl toolchain
# (see dist-i586-gnu-i586-i686-musl Dockerfile)
# ignore-sgx
include ../../run-make-fulldeps/tools.mk

View File

@ -1,8 +1,7 @@
CHECK: cc_plus_one_asm
CHECK-NEXT: movl
CHECK-NEXT: lfence
CHECK-NEXT: inc
CHECK-NEXT: notq (%rsp)
CHECK-NEXT: notq (%rsp)
CHECK-NEXT: incl
CHECK-NEXT: shlq $0, (%rsp)
CHECK-NEXT: lfence
CHECK-NEXT: retq

View File

@ -1,8 +1,24 @@
CHECK: libunwind::Registers_x86_64::jumpto
CHECK: __libunwind_Registers_x86_64_jumpto
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: shlq $0, (%rsp)
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK-NEXT: popq [[REGISTER:%[a-z]+]]
CHECK-NEXT: lfence
CHECK-NEXT: retq
CHECK-NEXT: popq [[REGISTER:%[a-z]+]]
CHECK-NEXT: lfence
CHECK-NEXT: jmpq *[[REGISTER]]

View File

@ -2,6 +2,5 @@ CHECK: print
CHECK: lfence
CHECK: lfence
CHECK: lfence
CHECK: popq
CHECK: callq 0x{{[[:xdigit:]]*}} <_Unwind_Resume>
CHECK-NEXT: ud2

View File

@ -20,39 +20,38 @@ function build {
}
function check {
local func=$1
local func_re="$1"
local checks="${TEST_DIR}/$2"
local asm=$(mktemp)
local objdump="${BUILD_DIR}/x86_64-unknown-linux-gnu/llvm/build/bin/llvm-objdump"
local filecheck="${BUILD_DIR}/x86_64-unknown-linux-gnu/llvm/build/bin/FileCheck"
local objdump="${LLVM_BIN_DIR}/llvm-objdump"
local filecheck="${LLVM_BIN_DIR}/FileCheck"
local enclave=${WORK_DIR}/enclave/target/x86_64-fortanix-unknown-sgx/debug/enclave
${objdump} --disassemble-symbols=${func} --demangle \
${WORK_DIR}/enclave/target/x86_64-fortanix-unknown-sgx/debug/enclave > ${asm}
func="$(${objdump} --syms --demangle ${enclave} | \
grep --only-matching -E "[[:blank:]]+${func_re}\$" | \
sed -e 's/^[[:space:]]*//' )"
${objdump} --disassemble-symbols="${func}" --demangle \
${enclave} > ${asm}
${filecheck} --input-file ${asm} ${checks}
}
build
check unw_getcontext unw_getcontext.checks
check "libunwind::Registers_x86_64::jumpto()" jumpto.checks
check "std::io::stdio::_print::h87f0c238421c45bc" print.checks
check rust_plus_one_global_asm rust_plus_one_global_asm.checks \
|| echo "warning: module level assembly currently not hardened"
check "unw_getcontext" unw_getcontext.checks
check "__libunwind_Registers_x86_64_jumpto" jumpto.checks
check 'std::io::stdio::_print::[[:alnum:]]+' print.checks
check rust_plus_one_global_asm rust_plus_one_global_asm.checks
check cc_plus_one_c cc_plus_one_c.checks
check cc_plus_one_c_asm cc_plus_one_c_asm.checks
check cc_plus_one_cxx cc_plus_one_cxx.checks
check cc_plus_one_cxx_asm cc_plus_one_cxx_asm.checks
check cc_plus_one_asm cc_plus_one_asm.checks \
|| echo "warning: the cc crate forwards assembly files to the CC compiler." \
"Clang uses its own integrated assembler, which does not include the LVI passes."
check cc_plus_one_asm cc_plus_one_asm.checks
check cmake_plus_one_c cmake_plus_one_c.checks
check cmake_plus_one_c_asm cmake_plus_one_c_asm.checks
check cmake_plus_one_c_global_asm cmake_plus_one_c_global_asm.checks \
|| echo "warning: module level assembly currently not hardened"
check cmake_plus_one_c_global_asm cmake_plus_one_c_global_asm.checks
check cmake_plus_one_cxx cmake_plus_one_cxx.checks
check cmake_plus_one_cxx_asm cmake_plus_one_cxx_asm.checks
check cmake_plus_one_cxx_global_asm cmake_plus_one_cxx_global_asm.checks \
|| echo "warning: module level assembly currently not hardened"
check cmake_plus_one_cxx_global_asm cmake_plus_one_cxx_global_asm.checks
check cmake_plus_one_asm cmake_plus_one_asm.checks

View File

@ -17,6 +17,7 @@ const QUERY = [
"a b:",
"a (b:",
"_:",
"_:a",
"a-bb",
"a>bb",
"ab'",
@ -48,7 +49,6 @@ const PARSED = [
foundElems: 0,
original: "<P>",
returned: [],
typeFilter: -1,
userQuery: "<p>",
error: "Found generics without a path",
},
@ -57,7 +57,6 @@ const PARSED = [
foundElems: 0,
original: "-> <P>",
returned: [],
typeFilter: -1,
userQuery: "-> <p>",
error: "Found generics without a path",
},
@ -66,7 +65,6 @@ const PARSED = [
foundElems: 0,
original: "a<\"P\">",
returned: [],
typeFilter: -1,
userQuery: "a<\"p\">",
error: "Unexpected `\"` in generics",
},
@ -75,7 +73,6 @@ const PARSED = [
foundElems: 0,
original: "\"P\" \"P\"",
returned: [],
typeFilter: -1,
userQuery: "\"p\" \"p\"",
error: "Cannot have more than one literal search element",
},
@ -84,7 +81,6 @@ const PARSED = [
foundElems: 0,
original: "P \"P\"",
returned: [],
typeFilter: -1,
userQuery: "p \"p\"",
error: "Cannot use literal search when there is more than one element",
},
@ -93,7 +89,6 @@ const PARSED = [
foundElems: 0,
original: "\"p\" p",
returned: [],
typeFilter: -1,
userQuery: "\"p\" p",
error: "You cannot have more than one element if you use quotes",
},
@ -102,7 +97,6 @@ const PARSED = [
foundElems: 0,
original: "\"const\": p",
returned: [],
typeFilter: -1,
userQuery: "\"const\": p",
error: "You cannot use quotes on type filter",
},
@ -111,16 +105,14 @@ const PARSED = [
foundElems: 0,
original: "a<:a>",
returned: [],
typeFilter: -1,
userQuery: "a<:a>",
error: "Unexpected `:` after `<`",
error: "Expected type filter before `:`",
},
{
elems: [],
foundElems: 0,
original: "a<::a>",
returned: [],
typeFilter: -1,
userQuery: "a<::a>",
error: "Unexpected `::`: paths cannot start with `::`",
},
@ -129,7 +121,6 @@ const PARSED = [
foundElems: 0,
original: "((a))",
returned: [],
typeFilter: -1,
userQuery: "((a))",
error: "Unexpected `(`",
},
@ -138,7 +129,6 @@ const PARSED = [
foundElems: 0,
original: "(p -> p",
returned: [],
typeFilter: -1,
userQuery: "(p -> p",
error: "Unexpected `(`",
},
@ -147,7 +137,6 @@ const PARSED = [
foundElems: 0,
original: "::a::b",
returned: [],
typeFilter: -1,
userQuery: "::a::b",
error: "Paths cannot start with `::`",
},
@ -156,7 +145,6 @@ const PARSED = [
foundElems: 0,
original: "a::::b",
returned: [],
typeFilter: -1,
userQuery: "a::::b",
error: "Unexpected `::::`",
},
@ -165,7 +153,6 @@ const PARSED = [
foundElems: 0,
original: "a::b::",
returned: [],
typeFilter: -1,
userQuery: "a::b::",
error: "Paths cannot end with `::`",
},
@ -174,7 +161,6 @@ const PARSED = [
foundElems: 0,
original: ":a",
returned: [],
typeFilter: -1,
userQuery: ":a",
error: "Expected type filter before `:`",
},
@ -183,16 +169,14 @@ const PARSED = [
foundElems: 0,
original: "a b:",
returned: [],
typeFilter: -1,
userQuery: "a b:",
error: "Unexpected `:`",
error: "Unexpected `:` (expected path after type filter)",
},
{
elems: [],
foundElems: 0,
original: "a (b:",
returned: [],
typeFilter: -1,
userQuery: "a (b:",
error: "Unexpected `(`",
},
@ -201,8 +185,15 @@ const PARSED = [
foundElems: 0,
original: "_:",
returned: [],
typeFilter: -1,
userQuery: "_:",
error: "Unexpected `:` (expected path after type filter)",
},
{
elems: [],
foundElems: 0,
original: "_:a",
returned: [],
userQuery: "_:a",
error: "Unknown type filter `_`",
},
{
@ -210,7 +201,6 @@ const PARSED = [
foundElems: 0,
original: "a-bb",
returned: [],
typeFilter: -1,
userQuery: "a-bb",
error: "Unexpected `-` (did you mean `->`?)",
},
@ -219,7 +209,6 @@ const PARSED = [
foundElems: 0,
original: "a>bb",
returned: [],
typeFilter: -1,
userQuery: "a>bb",
error: "Unexpected `>` (did you mean `->`?)",
},
@ -228,7 +217,6 @@ const PARSED = [
foundElems: 0,
original: "ab'",
returned: [],
typeFilter: -1,
userQuery: "ab'",
error: "Unexpected `'`",
},
@ -237,7 +225,6 @@ const PARSED = [
foundElems: 0,
original: "a->",
returned: [],
typeFilter: -1,
userQuery: "a->",
error: "Expected at least one item after `->`",
},
@ -246,7 +233,6 @@ const PARSED = [
foundElems: 0,
original: '"p" <a>',
returned: [],
typeFilter: -1,
userQuery: '"p" <a>',
error: "Found generics without a path",
},
@ -255,7 +241,6 @@ const PARSED = [
foundElems: 0,
original: '"p" a<a>',
returned: [],
typeFilter: -1,
userQuery: '"p" a<a>',
error: "You cannot have more than one element if you use quotes",
},
@ -264,7 +249,6 @@ const PARSED = [
foundElems: 0,
original: 'a,<',
returned: [],
typeFilter: -1,
userQuery: 'a,<',
error: 'Found generics without a path',
},
@ -273,7 +257,6 @@ const PARSED = [
foundElems: 0,
original: 'aaaaa<>b',
returned: [],
typeFilter: -1,
userQuery: 'aaaaa<>b',
error: 'Expected `,`, ` `, `:` or `->`, found `b`',
},
@ -282,16 +265,14 @@ const PARSED = [
foundElems: 0,
original: 'fn:aaaaa<>b',
returned: [],
typeFilter: -1,
userQuery: 'fn:aaaaa<>b',
error: 'Expected `,`, ` ` or `->`, found `b`',
error: 'Expected `,`, ` `, `:` or `->`, found `b`',
},
{
elems: [],
foundElems: 0,
original: '->a<>b',
returned: [],
typeFilter: -1,
userQuery: '->a<>b',
error: 'Expected `,` or ` `, found `b`',
},
@ -300,7 +281,6 @@ const PARSED = [
foundElems: 0,
original: 'a<->',
returned: [],
typeFilter: -1,
userQuery: 'a<->',
error: 'Unexpected `-` after `<`',
},
@ -309,7 +289,6 @@ const PARSED = [
foundElems: 0,
original: 'a:: a',
returned: [],
typeFilter: -1,
userQuery: 'a:: a',
error: 'Paths cannot end with `::`',
},
@ -318,7 +297,6 @@ const PARSED = [
foundElems: 0,
original: 'a ::a',
returned: [],
typeFilter: -1,
userQuery: 'a ::a',
error: 'Paths cannot start with `::`',
},
@ -327,16 +305,14 @@ const PARSED = [
foundElems: 0,
original: "a<a>:",
returned: [],
typeFilter: -1,
userQuery: "a<a>:",
error: 'Unexpected `:`',
error: 'Unexpected `<` in type filter',
},
{
elems: [],
foundElems: 0,
original: "a<>:",
returned: [],
typeFilter: -1,
userQuery: "a<>:",
error: 'Unexpected `<` in type filter',
},
@ -345,7 +321,6 @@ const PARSED = [
foundElems: 0,
original: "a,:",
returned: [],
typeFilter: -1,
userQuery: "a,:",
error: 'Unexpected `,` in type filter',
},
@ -354,7 +329,6 @@ const PARSED = [
foundElems: 0,
original: "a<> :",
returned: [],
typeFilter: -1,
userQuery: "a<> :",
error: 'Unexpected `<` in type filter',
},
@ -363,7 +337,6 @@ const PARSED = [
foundElems: 0,
original: "mod : :",
returned: [],
typeFilter: -1,
userQuery: "mod : :",
error: 'Unexpected `:`',
},
@ -372,7 +345,6 @@ const PARSED = [
foundElems: 0,
original: "a!a",
returned: [],
typeFilter: -1,
userQuery: "a!a",
error: 'Unexpected `!`: it can only be at the end of an ident',
},
@ -381,7 +353,6 @@ const PARSED = [
foundElems: 0,
original: "a!!",
returned: [],
typeFilter: -1,
userQuery: "a!!",
error: 'Cannot have more than one `!` in an ident',
},
@ -390,7 +361,6 @@ const PARSED = [
foundElems: 0,
original: "mod:a!",
returned: [],
typeFilter: -1,
userQuery: "mod:a!",
error: 'Invalid search type: macro `!` and `mod` both specified',
},
@ -399,7 +369,6 @@ const PARSED = [
foundElems: 0,
original: "a!::a",
returned: [],
typeFilter: -1,
userQuery: "a!::a",
error: 'Cannot have associated items in macros',
},
@ -408,7 +377,6 @@ const PARSED = [
foundElems: 0,
original: "a<",
returned: [],
typeFilter: -1,
userQuery: "a<",
error: "Unclosed `<`",
},

View File

@ -1,4 +1,14 @@
const QUERY = ['fn:foo', 'enum : foo', 'macro<f>:foo', 'macro!', 'macro:mac!', 'a::mac!'];
const QUERY = [
'fn:foo',
'enum : foo',
'macro<f>:foo',
'macro!',
'macro:mac!',
'a::mac!',
'-> fn:foo',
'-> fn:foo<fn:bar>',
'-> fn:foo<fn:bar, enum : baz::fuzz>',
];
const PARSED = [
{
@ -8,11 +18,11 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "foo",
generics: [],
typeFilter: 5,
}],
foundElems: 1,
original: "fn:foo",
returned: [],
typeFilter: 5,
userQuery: "fn:foo",
error: null,
},
@ -23,11 +33,11 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "foo",
generics: [],
typeFilter: 4,
}],
foundElems: 1,
original: "enum : foo",
returned: [],
typeFilter: 4,
userQuery: "enum : foo",
error: null,
},
@ -36,9 +46,8 @@ const PARSED = [
foundElems: 0,
original: "macro<f>:foo",
returned: [],
typeFilter: -1,
userQuery: "macro<f>:foo",
error: "Unexpected `:`",
error: "Unexpected `<` in type filter",
},
{
elems: [{
@ -47,11 +56,11 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "macro",
generics: [],
typeFilter: 14,
}],
foundElems: 1,
original: "macro!",
returned: [],
typeFilter: 14,
userQuery: "macro!",
error: null,
},
@ -62,11 +71,11 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "mac",
generics: [],
typeFilter: 14,
}],
foundElems: 1,
original: "macro:mac!",
returned: [],
typeFilter: 14,
userQuery: "macro:mac!",
error: null,
},
@ -77,12 +86,83 @@ const PARSED = [
pathWithoutLast: ["a"],
pathLast: "mac",
generics: [],
typeFilter: 14,
}],
foundElems: 1,
original: "a::mac!",
returned: [],
typeFilter: 14,
userQuery: "a::mac!",
error: null,
},
{
elems: [],
foundElems: 1,
original: "-> fn:foo",
returned: [{
name: "foo",
fullPath: ["foo"],
pathWithoutLast: [],
pathLast: "foo",
generics: [],
typeFilter: 5,
}],
userQuery: "-> fn:foo",
error: null,
},
{
elems: [],
foundElems: 1,
original: "-> fn:foo<fn:bar>",
returned: [{
name: "foo",
fullPath: ["foo"],
pathWithoutLast: [],
pathLast: "foo",
generics: [
{
name: "bar",
fullPath: ["bar"],
pathWithoutLast: [],
pathLast: "bar",
generics: [],
typeFilter: 5,
}
],
typeFilter: 5,
}],
userQuery: "-> fn:foo<fn:bar>",
error: null,
},
{
elems: [],
foundElems: 1,
original: "-> fn:foo<fn:bar, enum : baz::fuzz>",
returned: [{
name: "foo",
fullPath: ["foo"],
pathWithoutLast: [],
pathLast: "foo",
generics: [
{
name: "bar",
fullPath: ["bar"],
pathWithoutLast: [],
pathLast: "bar",
generics: [],
typeFilter: 5,
},
{
name: "baz::fuzz",
fullPath: ["baz", "fuzz"],
pathWithoutLast: ["baz"],
pathLast: "fuzz",
generics: [],
typeFilter: 4,
},
],
typeFilter: 5,
}],
userQuery: "-> fn:foo<fn:bar, enum : baz::fuzz>",
error: null,
},
];

View File

@ -6,7 +6,6 @@ const PARSED = [
foundElems: 0,
original: 'A<B<C<D>, E>',
returned: [],
typeFilter: -1,
userQuery: 'a<b<c<d>, e>',
error: 'Unexpected `<` after `<`',
},
@ -18,6 +17,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "p",
generics: [],
typeFilter: -1,
},
{
name: "u8",
@ -25,12 +25,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "u8",
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: "p<> u8",
returned: [],
typeFilter: -1,
userQuery: "p<> u8",
error: null,
},
@ -50,12 +50,12 @@ const PARSED = [
generics: [],
},
],
typeFilter: -1,
},
],
foundElems: 1,
original: '"p"<a>',
returned: [],
typeFilter: -1,
userQuery: '"p"<a>',
error: null,
},

View File

@ -23,11 +23,11 @@ const PARSED = [
generics: [],
},
],
typeFilter: -1,
}],
foundElems: 1,
original: "R<!>",
returned: [],
typeFilter: -1,
userQuery: "r<!>",
error: null,
},
@ -38,11 +38,11 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "!",
generics: [],
typeFilter: -1,
}],
foundElems: 1,
original: "!",
returned: [],
typeFilter: -1,
userQuery: "!",
error: null,
},
@ -53,11 +53,11 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "a",
generics: [],
typeFilter: 14,
}],
foundElems: 1,
original: "a!",
returned: [],
typeFilter: 14,
userQuery: "a!",
error: null,
},
@ -66,7 +66,6 @@ const PARSED = [
foundElems: 0,
original: "a!::b",
returned: [],
typeFilter: -1,
userQuery: "a!::b",
error: "Cannot have associated items in macros",
},
@ -77,11 +76,11 @@ const PARSED = [
pathWithoutLast: ["!"],
pathLast: "b",
generics: [],
typeFilter: -1,
}],
foundElems: 1,
original: "!::b",
returned: [],
typeFilter: -1,
userQuery: "!::b",
error: null,
},
@ -90,7 +89,6 @@ const PARSED = [
foundElems: 0,
original: "a!::b!",
returned: [],
typeFilter: -1,
userQuery: "a!::b!",
error: "Cannot have associated items in macros",
},

View File

@ -16,11 +16,11 @@ const PARSED = [
generics: [],
},
],
typeFilter: -1,
}],
foundElems: 1,
original: "R<P>",
returned: [],
typeFilter: -1,
userQuery: "r<p>",
error: null,
}

View File

@ -8,11 +8,11 @@ const PARSED = [
pathWithoutLast: ["a"],
pathLast: "b",
generics: [],
typeFilter: -1,
}],
foundElems: 1,
original: "A::B",
returned: [],
typeFilter: -1,
userQuery: "a::b",
error: null,
},
@ -24,6 +24,7 @@ const PARSED = [
pathWithoutLast: ["a"],
pathLast: "b",
generics: [],
typeFilter: -1,
},
{
name: "c",
@ -31,12 +32,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "c",
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: 'A::B,C',
returned: [],
typeFilter: -1,
userQuery: 'a::b,c',
error: null,
},
@ -56,6 +57,7 @@ const PARSED = [
generics: [],
},
],
typeFilter: -1,
},
{
name: "c",
@ -63,12 +65,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "c",
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: 'A::B<f>,C',
returned: [],
typeFilter: -1,
userQuery: 'a::b<f>,c',
error: null,
},
@ -79,11 +81,11 @@ const PARSED = [
pathWithoutLast: ["mod"],
pathLast: "a",
generics: [],
typeFilter: -1,
}],
foundElems: 1,
original: "mod::a",
returned: [],
typeFilter: -1,
userQuery: "mod::a",
error: null,
},

View File

@ -19,8 +19,8 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "p",
generics: [],
typeFilter: -1,
}],
typeFilter: -1,
userQuery: '-> "p"',
error: null,
},
@ -31,11 +31,11 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "p",
generics: [],
typeFilter: -1,
}],
foundElems: 1,
original: '"p",',
returned: [],
typeFilter: -1,
userQuery: '"p",',
error: null,
},
@ -44,7 +44,6 @@ const PARSED = [
foundElems: 0,
original: '"p" -> a',
returned: [],
typeFilter: -1,
userQuery: '"p" -> a',
error: "You cannot have more than one element if you use quotes",
},
@ -53,7 +52,6 @@ const PARSED = [
foundElems: 0,
original: '"a" -> "p"',
returned: [],
typeFilter: -1,
userQuery: '"a" -> "p"',
error: "Cannot have more than one literal search element",
},
@ -62,7 +60,6 @@ const PARSED = [
foundElems: 0,
original: '->"-"',
returned: [],
typeFilter: -1,
userQuery: '->"-"',
error: 'Unexpected `-` in a string element',
},
@ -71,7 +68,6 @@ const PARSED = [
foundElems: 0,
original: '"a',
returned: [],
typeFilter: -1,
userQuery: '"a',
error: 'Unclosed `"`',
},
@ -80,7 +76,6 @@ const PARSED = [
foundElems: 0,
original: '""',
returned: [],
typeFilter: -1,
userQuery: '""',
error: 'Cannot have empty string element',
},

View File

@ -25,8 +25,8 @@ const PARSED = [
generics: [],
},
],
typeFilter: -1,
}],
typeFilter: -1,
userQuery: "-> f<p>",
error: null,
},
@ -40,8 +40,8 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "p",
generics: [],
typeFilter: -1,
}],
typeFilter: -1,
userQuery: "-> p",
error: null,
},
@ -55,8 +55,8 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "a",
generics: [],
typeFilter: -1,
}],
typeFilter: -1,
userQuery: "->,a",
error: null,
},
@ -67,6 +67,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "aaaaa",
generics: [],
typeFilter: -1,
}],
foundElems: 2,
original: "aaaaa->a",
@ -76,8 +77,8 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "a",
generics: [],
typeFilter: -1,
}],
typeFilter: -1,
userQuery: "aaaaa->a",
error: null,
},
@ -91,8 +92,8 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "!",
generics: [],
typeFilter: -1,
}],
typeFilter: -1,
userQuery: "-> !",
error: null,
},

View File

@ -19,6 +19,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: 'aaaaaa',
generics: [],
typeFilter: -1,
},
{
name: 'b',
@ -26,12 +27,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: 'b',
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: "aaaaaa b",
returned: [],
typeFilter: -1,
userQuery: "aaaaaa b",
error: null,
},
@ -43,6 +44,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: 'a',
generics: [],
typeFilter: -1,
},
{
name: 'b',
@ -50,12 +52,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: 'b',
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: "a b",
returned: [],
typeFilter: -1,
userQuery: "a b",
error: null,
},
@ -67,6 +69,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: 'a',
generics: [],
typeFilter: -1,
},
{
name: 'b',
@ -74,12 +77,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: 'b',
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: "a,b",
returned: [],
typeFilter: -1,
userQuery: "a,b",
error: null,
},
@ -91,6 +94,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: 'a',
generics: [],
typeFilter: -1,
},
{
name: 'b',
@ -98,12 +102,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: 'b',
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: "a\tb",
returned: [],
typeFilter: -1,
userQuery: "a\tb",
error: null,
},
@ -130,12 +134,12 @@ const PARSED = [
generics: [],
},
],
typeFilter: -1,
},
],
foundElems: 1,
original: "a<b c>",
returned: [],
typeFilter: -1,
userQuery: "a<b c>",
error: null,
},
@ -162,12 +166,12 @@ const PARSED = [
generics: [],
},
],
typeFilter: -1,
},
],
foundElems: 1,
original: "a<b,c>",
returned: [],
typeFilter: -1,
userQuery: "a<b,c>",
error: null,
},
@ -194,12 +198,12 @@ const PARSED = [
generics: [],
},
],
typeFilter: -1,
},
],
foundElems: 1,
original: "a<b\tc>",
returned: [],
typeFilter: -1,
userQuery: "a<b\tc>",
error: null,
},

View File

@ -20,6 +20,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "a",
generics: [],
typeFilter: -1,
},
{
name: "b",
@ -27,12 +28,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "b",
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: "a b",
returned: [],
typeFilter: -1,
userQuery: "a b",
error: null,
},
@ -44,6 +45,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "a",
generics: [],
typeFilter: -1,
},
{
name: "b",
@ -51,12 +53,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "b",
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: "a b",
returned: [],
typeFilter: -1,
userQuery: "a b",
error: null,
},
@ -65,7 +67,6 @@ const PARSED = [
foundElems: 0,
original: "a,b(c)",
returned: [],
typeFilter: -1,
userQuery: "a,b(c)",
error: "Unexpected `(`",
},
@ -77,6 +78,7 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "aaa",
generics: [],
typeFilter: -1,
},
{
name: "a",
@ -84,12 +86,12 @@ const PARSED = [
pathWithoutLast: [],
pathLast: "a",
generics: [],
typeFilter: -1,
},
],
foundElems: 2,
original: "aaa,a",
returned: [],
typeFilter: -1,
userQuery: "aaa,a",
error: null,
},
@ -98,7 +100,6 @@ const PARSED = [
foundElems: 0,
original: ",,,,",
returned: [],
typeFilter: -1,
userQuery: ",,,,",
error: null,
},
@ -107,17 +108,15 @@ const PARSED = [
foundElems: 0,
original: 'mod :',
returned: [],
typeFilter: 0,
userQuery: 'mod :',
error: null,
error: "Unexpected `:` (expected path after type filter)",
},
{
elems: [],
foundElems: 0,
original: 'mod\t:',
returned: [],
typeFilter: 0,
userQuery: 'mod\t:',
error: null,
error: "Unexpected `:` (expected path after type filter)",
},
];

View File

@ -5,6 +5,8 @@ const QUERY = [
'Aaaaaaa -> bool',
'Aaaaaaa -> usize',
'Read -> u64',
'trait:Read -> u64',
'struct:Read -> u64',
'bool -> u64',
'Ddddddd -> u64',
'-> Ddddddd'
@ -36,6 +38,17 @@ const EXPECTED = [
{ 'path': 'generics_impl::Ddddddd', 'name': 'ggggggg' },
],
},
{
// trait:Read -> u64
'others': [
{ 'path': 'generics_impl::Ddddddd', 'name': 'eeeeeee' },
{ 'path': 'generics_impl::Ddddddd', 'name': 'ggggggg' },
],
},
{
// struct:Read -> u64
'others': [],
},
{
// bool -> u64
'others': [

View File

@ -2,6 +2,8 @@
const QUERY = [
'R<P>',
'R<struct:P>',
'R<enum:P>',
'"P"',
'P',
'ExtraCreditStructMulti<ExtraCreditInnerMulti, ExtraCreditInnerMulti>',
@ -20,6 +22,20 @@ const EXPECTED = [
{ 'path': 'generics', 'name': 'alpha' },
],
},
{
// R<struct:P>
'returned': [
{ 'path': 'generics', 'name': 'alef' },
],
'in_args': [
{ 'path': 'generics', 'name': 'alpha' },
],
},
{
// R<enum:P>
'returned': [],
'in_args': [],
},
{
// "P"
'others': [

View File

@ -3,6 +3,8 @@
const QUERY = [
"i32",
"str",
"primitive:str",
"struct:str",
"TotoIsSomewhere",
];
@ -17,6 +19,14 @@ const EXPECTED = [
{ 'path': 'primitive', 'name': 'foo' },
],
},
{
'returned': [
{ 'path': 'primitive', 'name': 'foo' },
],
},
{
'returned': [],
},
{
'others': [],
'in_args': [],

View File

@ -1,8 +1,8 @@
error: unconstrained generic constant
--> $DIR/cross_crate_predicate.rs:7:13
--> $DIR/cross_crate_predicate.rs:7:44
|
LL | let _ = const_evaluatable_lib::test1::<T>();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^
|
= help: try adding a `where` bound using this expression: `where [(); std::mem::size_of::<T>() - 1]:`
note: required by a bound in `test1`
@ -12,10 +12,10 @@ LL | [u8; std::mem::size_of::<T>() - 1]: Sized,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `test1`
error: unconstrained generic constant
--> $DIR/cross_crate_predicate.rs:7:13
--> $DIR/cross_crate_predicate.rs:7:44
|
LL | let _ = const_evaluatable_lib::test1::<T>();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^
|
= help: try adding a `where` bound using this expression: `where [(); std::mem::size_of::<T>() - 1]:`
note: required by a bound in `test1`

View File

@ -1,8 +1,8 @@
error: the constant `N` is not of type `u8`
--> $DIR/type_mismatch.rs:2:5
--> $DIR/type_mismatch.rs:2:11
|
LL | bar::<N>()
| ^^^^^^^^
| ^ expected `u8`, found `usize`
|
note: required by a bound in `bar`
--> $DIR/type_mismatch.rs:6:8

View File

@ -2,7 +2,7 @@ error: the constant `N` is not of type `usize`
--> $DIR/bad-const-wf-doesnt-specialize.rs:8:29
|
LL | impl<const N: i32> Copy for S<N> {}
| ^^^^
| ^^^^ expected `usize`, found `i32`
|
note: required by a bound in `S`
--> $DIR/bad-const-wf-doesnt-specialize.rs:6:10

View File

@ -0,0 +1,40 @@
// check-pass
// compile-flags: -Ztrait-solver=next
trait Foo {
type Gat<'a>
where
Self: 'a;
fn bar(&self) -> Self::Gat<'_>;
}
enum Option<T> {
Some(T),
None,
}
impl<T> Option<T> {
fn as_ref(&self) -> Option<&T> {
match self {
Option::Some(t) => Option::Some(t),
Option::None => Option::None,
}
}
fn map<U>(self, f: impl FnOnce(T) -> U) -> Option<U> {
match self {
Option::Some(t) => Option::Some(f(t)),
Option::None => Option::None,
}
}
}
impl<T: Foo + 'static> Foo for Option<T> {
type Gat<'a> = Option<<T as Foo>::Gat<'a>> where Self: 'a;
fn bar(&self) -> Self::Gat<'_> {
self.as_ref().map(Foo::bar)
}
}
fn main() {}

View File

@ -8,7 +8,7 @@ error: the constant `ASSUME_ALIGNMENT` is not of type `Assume`
--> $DIR/issue-101739-1.rs:8:14
|
LL | Dst: BikeshedIntrinsicFrom<Src, Context, ASSUME_ALIGNMENT>,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `Assume`, found `bool`
|
note: required by a bound in `BikeshedIntrinsicFrom`
--> $SRC_DIR/core/src/mem/transmutability.rs:LL:COL