Auto merge of #69402 - GuillaumeGomez:extend-search, r=kinnison
Extend search I realized that when looking for "struct:String" in the rustdoc search for example, the "in arguments" and "returned" tabs were always empty. After some investigation, I realized it was because we only provided the name, and not the type, making it impossible to pass the "type filtering" check. To resolve this, I added the type alongside the name. Note for the future: we could improve this by instead only registering the path id and use the path dictionary directly. The only problem with that solution (which I already tested) is that it becomes complicated for types in other crates. It'd force us to handle both case with an id and a case with `(name, type)`. I found the current PR big enough to not want to provide it directly. However, I think this is definitely worth it to make it work this way in the future. About the two tests I added: they don't have much interest except checking that we actually have something returned in the search in the cases of a type filtering with and without literal search. I also had to update a bit the test script to add the new locally global (haha) variable I created (`NO_TYPE_FILTER`). I added this variable to make the code easier to read than just "-1". r? @kinnison cc @ollie27
This commit is contained in:
commit
f4c675c476
@ -1078,6 +1078,26 @@ impl Clean<PolyTrait> for hir::PolyTraitRef<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Clean<TypeKind> for hir::def::DefKind {
|
||||
fn clean(&self, _: &DocContext<'_>) -> TypeKind {
|
||||
match *self {
|
||||
hir::def::DefKind::Mod => TypeKind::Module,
|
||||
hir::def::DefKind::Struct => TypeKind::Struct,
|
||||
hir::def::DefKind::Union => TypeKind::Union,
|
||||
hir::def::DefKind::Enum => TypeKind::Enum,
|
||||
hir::def::DefKind::Trait => TypeKind::Trait,
|
||||
hir::def::DefKind::TyAlias => TypeKind::Typedef,
|
||||
hir::def::DefKind::ForeignTy => TypeKind::Foreign,
|
||||
hir::def::DefKind::TraitAlias => TypeKind::TraitAlias,
|
||||
hir::def::DefKind::Fn => TypeKind::Function,
|
||||
hir::def::DefKind::Const => TypeKind::Const,
|
||||
hir::def::DefKind::Static => TypeKind::Static,
|
||||
hir::def::DefKind::Macro(_) => TypeKind::Macro,
|
||||
_ => TypeKind::Foreign,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clean<Item> for hir::TraitItem<'_> {
|
||||
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||
let inner = match self.kind {
|
||||
|
@ -836,8 +836,8 @@ pub struct Method {
|
||||
pub decl: FnDecl,
|
||||
pub header: hir::FnHeader,
|
||||
pub defaultness: Option<hir::Defaultness>,
|
||||
pub all_types: Vec<Type>,
|
||||
pub ret_types: Vec<Type>,
|
||||
pub all_types: Vec<(Type, TypeKind)>,
|
||||
pub ret_types: Vec<(Type, TypeKind)>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@ -845,8 +845,8 @@ pub struct TyMethod {
|
||||
pub header: hir::FnHeader,
|
||||
pub decl: FnDecl,
|
||||
pub generics: Generics,
|
||||
pub all_types: Vec<Type>,
|
||||
pub ret_types: Vec<Type>,
|
||||
pub all_types: Vec<(Type, TypeKind)>,
|
||||
pub ret_types: Vec<(Type, TypeKind)>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@ -854,8 +854,8 @@ pub struct Function {
|
||||
pub decl: FnDecl,
|
||||
pub generics: Generics,
|
||||
pub header: hir::FnHeader,
|
||||
pub all_types: Vec<Type>,
|
||||
pub ret_types: Vec<Type>,
|
||||
pub all_types: Vec<(Type, TypeKind)>,
|
||||
pub ret_types: Vec<(Type, TypeKind)>,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
@ -1042,7 +1042,7 @@ pub enum PrimitiveType {
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Copy, Debug)]
|
||||
pub enum TypeKind {
|
||||
Enum,
|
||||
Function,
|
||||
|
@ -184,7 +184,7 @@ pub fn get_real_types(
|
||||
arg: &Type,
|
||||
cx: &DocContext<'_>,
|
||||
recurse: i32,
|
||||
) -> FxHashSet<Type> {
|
||||
) -> FxHashSet<(Type, TypeKind)> {
|
||||
let arg_s = arg.print().to_string();
|
||||
let mut res = FxHashSet::default();
|
||||
if recurse >= 10 {
|
||||
@ -209,7 +209,11 @@ pub fn get_real_types(
|
||||
if !adds.is_empty() {
|
||||
res.extend(adds);
|
||||
} else if !ty.is_full_generic() {
|
||||
res.insert(ty);
|
||||
if let Some(did) = ty.def_id() {
|
||||
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
|
||||
res.insert((ty, kind));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -225,13 +229,21 @@ pub fn get_real_types(
|
||||
if !adds.is_empty() {
|
||||
res.extend(adds);
|
||||
} else if !ty.is_full_generic() {
|
||||
res.insert(ty.clone());
|
||||
if let Some(did) = ty.def_id() {
|
||||
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
|
||||
res.insert((ty.clone(), kind));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
res.insert(arg.clone());
|
||||
if let Some(did) = arg.def_id() {
|
||||
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
|
||||
res.insert((arg.clone(), kind));
|
||||
}
|
||||
}
|
||||
if let Some(gens) = arg.generics() {
|
||||
for gen in gens.iter() {
|
||||
if gen.is_full_generic() {
|
||||
@ -239,8 +251,10 @@ pub fn get_real_types(
|
||||
if !adds.is_empty() {
|
||||
res.extend(adds);
|
||||
}
|
||||
} else {
|
||||
res.insert(gen.clone());
|
||||
} else if let Some(did) = gen.def_id() {
|
||||
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
|
||||
res.insert((gen.clone(), kind));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -256,7 +270,7 @@ pub fn get_all_types(
|
||||
generics: &Generics,
|
||||
decl: &FnDecl,
|
||||
cx: &DocContext<'_>,
|
||||
) -> (Vec<Type>, Vec<Type>) {
|
||||
) -> (Vec<(Type, TypeKind)>, Vec<(Type, TypeKind)>) {
|
||||
let mut all_types = FxHashSet::default();
|
||||
for arg in decl.inputs.values.iter() {
|
||||
if arg.type_.is_self_type() {
|
||||
@ -266,7 +280,11 @@ pub fn get_all_types(
|
||||
if !args.is_empty() {
|
||||
all_types.extend(args);
|
||||
} else {
|
||||
all_types.insert(arg.type_.clone());
|
||||
if let Some(did) = arg.type_.def_id() {
|
||||
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
|
||||
all_types.insert((arg.type_.clone(), kind));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -274,7 +292,11 @@ pub fn get_all_types(
|
||||
FnRetTy::Return(ref return_type) => {
|
||||
let mut ret = get_real_types(generics, &return_type, cx, 0);
|
||||
if ret.is_empty() {
|
||||
ret.insert(return_type.clone());
|
||||
if let Some(did) = return_type.def_id() {
|
||||
if let Some(kind) = cx.tcx.def_kind(did).clean(cx) {
|
||||
ret.insert((return_type.clone(), kind));
|
||||
}
|
||||
}
|
||||
}
|
||||
ret.into_iter().collect()
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ use rustc_span::symbol::{sym, Symbol};
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use crate::clean::{self, AttributesExt, Deprecation, GetDefId, SelfTy};
|
||||
use crate::clean::{self, AttributesExt, Deprecation, GetDefId, SelfTy, TypeKind};
|
||||
use crate::config::{OutputFormat, RenderOptions};
|
||||
use crate::docfs::{DocFS, ErrorStorage, PathError};
|
||||
use crate::doctree;
|
||||
@ -302,19 +302,25 @@ impl Serialize for IndexItem {
|
||||
|
||||
/// A type used for the search index.
|
||||
#[derive(Debug)]
|
||||
struct Type {
|
||||
struct RenderType {
|
||||
ty: Option<DefId>,
|
||||
idx: Option<usize>,
|
||||
name: Option<String>,
|
||||
generics: Option<Vec<String>>,
|
||||
generics: Option<Vec<Generic>>,
|
||||
}
|
||||
|
||||
impl Serialize for Type {
|
||||
impl Serialize for RenderType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
if let Some(name) = &self.name {
|
||||
let mut seq = serializer.serialize_seq(None)?;
|
||||
seq.serialize_element(&name)?;
|
||||
if let Some(id) = self.idx {
|
||||
seq.serialize_element(&id)?;
|
||||
} else {
|
||||
seq.serialize_element(&name)?;
|
||||
}
|
||||
if let Some(generics) = &self.generics {
|
||||
seq.serialize_element(&generics)?;
|
||||
}
|
||||
@ -325,11 +331,32 @@ impl Serialize for Type {
|
||||
}
|
||||
}
|
||||
|
||||
/// A type used for the search index.
|
||||
#[derive(Debug)]
|
||||
struct Generic {
|
||||
name: String,
|
||||
defid: Option<DefId>,
|
||||
idx: Option<usize>,
|
||||
}
|
||||
|
||||
impl Serialize for Generic {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
if let Some(id) = self.idx {
|
||||
serializer.serialize_some(&id)
|
||||
} else {
|
||||
serializer.serialize_some(&self.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Full type of functions/methods in the search index.
|
||||
#[derive(Debug)]
|
||||
struct IndexItemFunctionType {
|
||||
inputs: Vec<Type>,
|
||||
output: Option<Vec<Type>>,
|
||||
inputs: Vec<TypeWithKind>,
|
||||
output: Option<Vec<TypeWithKind>>,
|
||||
}
|
||||
|
||||
impl Serialize for IndexItemFunctionType {
|
||||
@ -340,8 +367,8 @@ impl Serialize for IndexItemFunctionType {
|
||||
// If we couldn't figure out a type, just write `null`.
|
||||
let mut iter = self.inputs.iter();
|
||||
if match self.output {
|
||||
Some(ref output) => iter.chain(output.iter()).any(|ref i| i.name.is_none()),
|
||||
None => iter.any(|ref i| i.name.is_none()),
|
||||
Some(ref output) => iter.chain(output.iter()).any(|ref i| i.ty.name.is_none()),
|
||||
None => iter.any(|ref i| i.ty.name.is_none()),
|
||||
} {
|
||||
serializer.serialize_none()
|
||||
} else {
|
||||
@ -359,6 +386,31 @@ impl Serialize for IndexItemFunctionType {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TypeWithKind {
|
||||
ty: RenderType,
|
||||
kind: TypeKind,
|
||||
}
|
||||
|
||||
impl From<(RenderType, TypeKind)> for TypeWithKind {
|
||||
fn from(x: (RenderType, TypeKind)) -> TypeWithKind {
|
||||
TypeWithKind { ty: x.0, kind: x.1 }
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for TypeWithKind {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut seq = serializer.serialize_seq(None)?;
|
||||
seq.serialize_element(&self.ty.name)?;
|
||||
let x: ItemType = self.kind.into();
|
||||
seq.serialize_element(&x)?;
|
||||
seq.end()
|
||||
}
|
||||
}
|
||||
|
||||
thread_local!(static CACHE_KEY: RefCell<Arc<Cache>> = Default::default());
|
||||
thread_local!(pub static CURRENT_DEPTH: Cell<usize> = Cell::new(0));
|
||||
|
||||
|
@ -12,7 +12,7 @@ use std::path::{Path, PathBuf};
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{plain_summary_line, shorten, Impl, IndexItem, IndexItemFunctionType, ItemType};
|
||||
use super::{RenderInfo, Type};
|
||||
use super::{Generic, RenderInfo, RenderType, TypeWithKind};
|
||||
|
||||
/// Indicates where an external crate can be found.
|
||||
pub enum ExternalLocation {
|
||||
@ -588,17 +588,20 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
|
||||
let mut lastpathid = 0usize;
|
||||
|
||||
for item in search_index {
|
||||
item.parent_idx = item.parent.map(|defid| {
|
||||
item.parent_idx = item.parent.and_then(|defid| {
|
||||
if defid_to_pathid.contains_key(&defid) {
|
||||
*defid_to_pathid.get(&defid).expect("no pathid")
|
||||
defid_to_pathid.get(&defid).map(|x| *x)
|
||||
} else {
|
||||
let pathid = lastpathid;
|
||||
defid_to_pathid.insert(defid, pathid);
|
||||
lastpathid += 1;
|
||||
|
||||
let &(ref fqp, short) = paths.get(&defid).unwrap();
|
||||
crate_paths.push((short, fqp.last().unwrap().clone()));
|
||||
pathid
|
||||
if let Some(&(ref fqp, short)) = paths.get(&defid) {
|
||||
crate_paths.push((short, fqp.last().unwrap().clone()));
|
||||
Some(pathid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@ -647,20 +650,25 @@ fn get_index_search_type(item: &clean::Item) -> Option<IndexItemFunctionType> {
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let inputs =
|
||||
all_types.iter().map(|arg| get_index_type(&arg)).filter(|a| a.name.is_some()).collect();
|
||||
let inputs = all_types
|
||||
.iter()
|
||||
.map(|(ty, kind)| TypeWithKind::from((get_index_type(&ty), *kind)))
|
||||
.filter(|a| a.ty.name.is_some())
|
||||
.collect();
|
||||
let output = ret_types
|
||||
.iter()
|
||||
.map(|arg| get_index_type(&arg))
|
||||
.filter(|a| a.name.is_some())
|
||||
.map(|(ty, kind)| TypeWithKind::from((get_index_type(&ty), *kind)))
|
||||
.filter(|a| a.ty.name.is_some())
|
||||
.collect::<Vec<_>>();
|
||||
let output = if output.is_empty() { None } else { Some(output) };
|
||||
|
||||
Some(IndexItemFunctionType { inputs, output })
|
||||
}
|
||||
|
||||
fn get_index_type(clean_type: &clean::Type) -> Type {
|
||||
let t = Type {
|
||||
fn get_index_type(clean_type: &clean::Type) -> RenderType {
|
||||
let t = RenderType {
|
||||
ty: clean_type.def_id(),
|
||||
idx: None,
|
||||
name: get_index_type_name(clean_type, true).map(|s| s.to_ascii_lowercase()),
|
||||
generics: get_generics(clean_type),
|
||||
};
|
||||
@ -685,12 +693,17 @@ fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option
|
||||
}
|
||||
}
|
||||
|
||||
fn get_generics(clean_type: &clean::Type) -> Option<Vec<String>> {
|
||||
fn get_generics(clean_type: &clean::Type) -> Option<Vec<Generic>> {
|
||||
clean_type.generics().and_then(|types| {
|
||||
let r = types
|
||||
.iter()
|
||||
.filter_map(|t| get_index_type_name(t, false))
|
||||
.map(|s| s.to_ascii_lowercase())
|
||||
.filter_map(|t| {
|
||||
if let Some(name) = get_index_type_name(t, false) {
|
||||
Some(Generic { name: name.to_ascii_lowercase(), defid: t.def_id(), idx: None })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if r.is_empty() { None } else { Some(r) }
|
||||
})
|
||||
|
@ -523,13 +523,14 @@ function getSearchElement() {
|
||||
}
|
||||
|
||||
function initSearch(rawSearchIndex) {
|
||||
var currentResults, index, searchIndex;
|
||||
var MAX_LEV_DISTANCE = 3;
|
||||
var MAX_RESULTS = 200;
|
||||
var GENERICS_DATA = 1;
|
||||
var NAME = 0;
|
||||
var INPUTS_DATA = 0;
|
||||
var OUTPUT_DATA = 1;
|
||||
var NO_TYPE_FILTER = -1;
|
||||
var currentResults, index, searchIndex;
|
||||
var params = getQueryStringParams();
|
||||
|
||||
// Populate search bar with query string search term when provided,
|
||||
@ -556,7 +557,7 @@ function getSearchElement() {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
return NO_TYPE_FILTER;
|
||||
}
|
||||
|
||||
var valLower = query.query.toLowerCase(),
|
||||
@ -719,6 +720,13 @@ function getSearchElement() {
|
||||
};
|
||||
}
|
||||
|
||||
function getObjectFromId(id) {
|
||||
if (typeof id === "number") {
|
||||
return searchIndex[id];
|
||||
}
|
||||
return {'name': id};
|
||||
}
|
||||
|
||||
function checkGenerics(obj, val) {
|
||||
// The names match, but we need to be sure that all generics kinda
|
||||
// match as well.
|
||||
@ -735,8 +743,10 @@ function getSearchElement() {
|
||||
for (var y = 0; y < vlength; ++y) {
|
||||
var lev = { pos: -1, lev: MAX_LEV_DISTANCE + 1};
|
||||
var elength = elems.length;
|
||||
var firstGeneric = getObjectFromId(val.generics[y]).name;
|
||||
for (var x = 0; x < elength; ++x) {
|
||||
var tmp_lev = levenshtein(elems[x], val.generics[y]);
|
||||
var tmp_lev = levenshtein(getObjectFromId(elems[x]).name,
|
||||
firstGeneric);
|
||||
if (tmp_lev < lev.lev) {
|
||||
lev.lev = tmp_lev;
|
||||
lev.pos = x;
|
||||
@ -771,8 +781,9 @@ function getSearchElement() {
|
||||
|
||||
for (var y = 0; allFound === true && y < val.generics.length; ++y) {
|
||||
allFound = false;
|
||||
var firstGeneric = getObjectFromId(val.generics[y]).name;
|
||||
for (x = 0; allFound === false && x < elems.length; ++x) {
|
||||
allFound = elems[x] === val.generics[y];
|
||||
allFound = getObjectFromId(elems[x]).name === firstGeneric;
|
||||
}
|
||||
if (allFound === true) {
|
||||
elems.splice(x - 1, 1);
|
||||
@ -829,16 +840,22 @@ function getSearchElement() {
|
||||
return lev_distance + 1;
|
||||
}
|
||||
|
||||
function findArg(obj, val, literalSearch) {
|
||||
function findArg(obj, val, literalSearch, typeFilter) {
|
||||
var lev_distance = MAX_LEV_DISTANCE + 1;
|
||||
|
||||
if (obj && obj.type && obj.type[INPUTS_DATA] &&
|
||||
obj.type[INPUTS_DATA].length > 0) {
|
||||
if (obj && obj.type && obj.type[INPUTS_DATA] && obj.type[INPUTS_DATA].length > 0) {
|
||||
var length = obj.type[INPUTS_DATA].length;
|
||||
for (var i = 0; i < length; i++) {
|
||||
var tmp = checkType(obj.type[INPUTS_DATA][i], val, literalSearch);
|
||||
if (literalSearch === true && tmp === true) {
|
||||
return true;
|
||||
var tmp = obj.type[INPUTS_DATA][i];
|
||||
if (typePassesFilter(typeFilter, tmp[1]) === false) {
|
||||
continue;
|
||||
}
|
||||
tmp = checkType(tmp, val, literalSearch);
|
||||
if (literalSearch === true) {
|
||||
if (tmp === true) {
|
||||
return true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
lev_distance = Math.min(tmp, lev_distance);
|
||||
if (lev_distance === 0) {
|
||||
@ -849,20 +866,20 @@ function getSearchElement() {
|
||||
return literalSearch === true ? false : lev_distance;
|
||||
}
|
||||
|
||||
function checkReturned(obj, val, literalSearch) {
|
||||
function checkReturned(obj, val, literalSearch, typeFilter) {
|
||||
var lev_distance = MAX_LEV_DISTANCE + 1;
|
||||
|
||||
if (obj && obj.type && obj.type.length > OUTPUT_DATA) {
|
||||
var ret = obj.type[OUTPUT_DATA];
|
||||
if (!obj.type[OUTPUT_DATA].length) {
|
||||
if (typeof ret[0] === "string") {
|
||||
ret = [ret];
|
||||
}
|
||||
for (var x = 0; x < ret.length; ++x) {
|
||||
var r = ret[x];
|
||||
if (typeof r === "string") {
|
||||
r = [r];
|
||||
var tmp = ret[x];
|
||||
if (typePassesFilter(typeFilter, tmp[1]) === false) {
|
||||
continue;
|
||||
}
|
||||
var tmp = checkType(r, val, literalSearch);
|
||||
tmp = checkType(tmp, val, literalSearch);
|
||||
if (literalSearch === true) {
|
||||
if (tmp === true) {
|
||||
return true;
|
||||
@ -917,7 +934,7 @@ function getSearchElement() {
|
||||
|
||||
function typePassesFilter(filter, type) {
|
||||
// No filter
|
||||
if (filter < 0) return true;
|
||||
if (filter <= NO_TYPE_FILTER) return true;
|
||||
|
||||
// Exact match
|
||||
if (filter === type) return true;
|
||||
@ -926,11 +943,13 @@ function getSearchElement() {
|
||||
var name = itemTypes[type];
|
||||
switch (itemTypes[filter]) {
|
||||
case "constant":
|
||||
return (name == "associatedconstant");
|
||||
return name === "associatedconstant";
|
||||
case "fn":
|
||||
return (name == "method" || name == "tymethod");
|
||||
return name === "method" || name === "tymethod";
|
||||
case "type":
|
||||
return (name == "primitive" || name == "keyword");
|
||||
return name === "primitive" || name === "associatedtype";
|
||||
case "trait":
|
||||
return name === "traitalias";
|
||||
}
|
||||
|
||||
// No match
|
||||
@ -959,42 +978,33 @@ function getSearchElement() {
|
||||
if (filterCrates !== undefined && searchIndex[i].crate !== filterCrates) {
|
||||
continue;
|
||||
}
|
||||
in_args = findArg(searchIndex[i], val, true);
|
||||
returned = checkReturned(searchIndex[i], val, true);
|
||||
in_args = findArg(searchIndex[i], val, true, typeFilter);
|
||||
returned = checkReturned(searchIndex[i], val, true, typeFilter);
|
||||
ty = searchIndex[i];
|
||||
fullId = generateId(ty);
|
||||
|
||||
if (searchWords[i] === val.name) {
|
||||
// filter type: ... queries
|
||||
if (typePassesFilter(typeFilter, searchIndex[i].ty) &&
|
||||
results[fullId] === undefined)
|
||||
{
|
||||
results[fullId] = {id: i, index: -1};
|
||||
}
|
||||
} else if ((in_args === true || returned === true) &&
|
||||
typePassesFilter(typeFilter, searchIndex[i].ty)) {
|
||||
if (in_args === true || returned === true) {
|
||||
if (in_args === true) {
|
||||
results_in_args[fullId] = {
|
||||
id: i,
|
||||
index: -1,
|
||||
dontValidate: true,
|
||||
};
|
||||
}
|
||||
if (returned === true) {
|
||||
results_returned[fullId] = {
|
||||
id: i,
|
||||
index: -1,
|
||||
dontValidate: true,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
results[fullId] = {
|
||||
id: i,
|
||||
index: -1,
|
||||
dontValidate: true,
|
||||
};
|
||||
}
|
||||
if (searchWords[i] === val.name
|
||||
&& typePassesFilter(typeFilter, searchIndex[i].ty)
|
||||
&& results[fullId] === undefined) {
|
||||
results[fullId] = {
|
||||
id: i,
|
||||
index: -1,
|
||||
dontValidate: true,
|
||||
};
|
||||
}
|
||||
if (in_args === true && results_in_args[fullId] === undefined) {
|
||||
results_in_args[fullId] = {
|
||||
id: i,
|
||||
index: -1,
|
||||
dontValidate: true,
|
||||
};
|
||||
}
|
||||
if (returned === true && results_returned[fullId] === undefined) {
|
||||
results_returned[fullId] = {
|
||||
id: i,
|
||||
index: -1,
|
||||
dontValidate: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
query.inputs = [val];
|
||||
@ -1025,7 +1035,7 @@ function getSearchElement() {
|
||||
|
||||
// allow searching for void (no output) functions as well
|
||||
var typeOutput = type.length > OUTPUT_DATA ? type[OUTPUT_DATA].name : "";
|
||||
returned = checkReturned(ty, output, true);
|
||||
returned = checkReturned(ty, output, true, NO_TYPE_FILTER);
|
||||
if (output.name === "*" || returned === true) {
|
||||
in_args = false;
|
||||
var is_module = false;
|
||||
@ -1126,16 +1136,8 @@ function getSearchElement() {
|
||||
lev += 1;
|
||||
}
|
||||
}
|
||||
if ((in_args = findArg(ty, valGenerics)) <= MAX_LEV_DISTANCE) {
|
||||
if (typePassesFilter(typeFilter, ty.ty) === false) {
|
||||
in_args = MAX_LEV_DISTANCE + 1;
|
||||
}
|
||||
}
|
||||
if ((returned = checkReturned(ty, valGenerics)) <= MAX_LEV_DISTANCE) {
|
||||
if (typePassesFilter(typeFilter, ty.ty) === false) {
|
||||
returned = MAX_LEV_DISTANCE + 1;
|
||||
}
|
||||
}
|
||||
in_args = findArg(ty, valGenerics, false, typeFilter);
|
||||
returned = checkReturned(ty, valGenerics, false, typeFilter);
|
||||
|
||||
lev += lev_add;
|
||||
if (lev > 0 && val.length > 3 && searchWords[j].indexOf(val) > -1) {
|
||||
|
10
src/test/rustdoc-js-std/return-specific-literal.js
Normal file
10
src/test/rustdoc-js-std/return-specific-literal.js
Normal file
@ -0,0 +1,10 @@
|
||||
const QUERY = 'struct:"string"';
|
||||
|
||||
const EXPECTED = {
|
||||
'in_args': [
|
||||
{ 'path': 'std::string::String', 'name': 'ne' },
|
||||
],
|
||||
'returned': [
|
||||
{ 'path': 'std::string::String', 'name': 'add' },
|
||||
],
|
||||
};
|
10
src/test/rustdoc-js-std/return-specific.js
Normal file
10
src/test/rustdoc-js-std/return-specific.js
Normal file
@ -0,0 +1,10 @@
|
||||
const QUERY = 'struct:string';
|
||||
|
||||
const EXPECTED = {
|
||||
'in_args': [
|
||||
{ 'path': 'std::string::String', 'name': 'ne' },
|
||||
],
|
||||
'returned': [
|
||||
{ 'path': 'std::string::String', 'name': 'add' },
|
||||
],
|
||||
};
|
@ -263,7 +263,7 @@ function main(argv) {
|
||||
finalJS = "";
|
||||
|
||||
var arraysToLoad = ["itemTypes"];
|
||||
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS",
|
||||
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", "NO_TYPE_FILTER",
|
||||
"GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA",
|
||||
"TY_PRIMITIVE", "TY_KEYWORD",
|
||||
"levenshtein_row2"];
|
||||
@ -336,7 +336,7 @@ function main(argv) {
|
||||
console.log("OK");
|
||||
}
|
||||
});
|
||||
return errors;
|
||||
return errors > 0 ? 1 : 0;
|
||||
}
|
||||
|
||||
process.exit(main(process.argv));
|
||||
|
@ -231,7 +231,7 @@ function load_files(out_folder, crate) {
|
||||
finalJS = "";
|
||||
|
||||
var arraysToLoad = ["itemTypes"];
|
||||
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS",
|
||||
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", "NO_TYPE_FILTER",
|
||||
"GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA",
|
||||
"TY_PRIMITIVE", "TY_KEYWORD",
|
||||
"levenshtein_row2"];
|
||||
@ -328,7 +328,7 @@ function main(argv) {
|
||||
console.log("OK");
|
||||
}
|
||||
}
|
||||
return errors;
|
||||
return errors > 0 ? 1 : 0;
|
||||
}
|
||||
|
||||
process.exit(main(process.argv));
|
||||
|
Loading…
x
Reference in New Issue
Block a user