Auto merge of #70726 - Centril:rollup-zrdkkpt, r=Centril
Rollup of 5 pull requests Successful merges: - #68334 (AArch64 bare-metal targets: Build rust-std) - #70224 (Clean up rustdoc js testers) - #70532 (Miri engine: stronger type-based sanity check for assignments) - #70698 (bootstrap: add `--json-output` for rust-analyzer) - #70715 (Fix typo in operands section) Failed merges: r? @ghost
This commit is contained in:
commit
548afdbe1a
@ -935,7 +935,11 @@ pub fn stream_cargo(
|
||||
}
|
||||
// Instruct Cargo to give us json messages on stdout, critically leaving
|
||||
// stderr as piped so we can get those pretty colors.
|
||||
let mut message_format = String::from("json-render-diagnostics");
|
||||
let mut message_format = if builder.config.json_output {
|
||||
String::from("json")
|
||||
} else {
|
||||
String::from("json-render-diagnostics")
|
||||
};
|
||||
if let Some(s) = &builder.config.rustc_error_format {
|
||||
message_format.push_str(",json-diagnostic-");
|
||||
message_format.push_str(s);
|
||||
|
@ -48,6 +48,7 @@ pub struct Config {
|
||||
pub ignore_git: bool,
|
||||
pub exclude: Vec<PathBuf>,
|
||||
pub rustc_error_format: Option<String>,
|
||||
pub json_output: bool,
|
||||
pub test_compare_mode: bool,
|
||||
pub llvm_libunwind: bool,
|
||||
|
||||
@ -415,6 +416,7 @@ pub fn parse(args: &[String]) -> Config {
|
||||
let mut config = Config::default_opts();
|
||||
config.exclude = flags.exclude;
|
||||
config.rustc_error_format = flags.rustc_error_format;
|
||||
config.json_output = flags.json_output;
|
||||
config.on_fail = flags.on_fail;
|
||||
config.stage = flags.stage;
|
||||
config.jobs = flags.jobs.map(threads_from_config);
|
||||
|
@ -31,6 +31,7 @@ pub struct Flags {
|
||||
pub incremental: bool,
|
||||
pub exclude: Vec<PathBuf>,
|
||||
pub rustc_error_format: Option<String>,
|
||||
pub json_output: bool,
|
||||
pub dry_run: bool,
|
||||
|
||||
// This overrides the deny-warnings configuration option,
|
||||
@ -156,6 +157,7 @@ pub fn parse(args: &[String]) -> Flags {
|
||||
"VALUE",
|
||||
);
|
||||
opts.optopt("", "error-format", "rustc error format", "FORMAT");
|
||||
opts.optflag("", "json-output", "use message-format=json");
|
||||
opts.optopt(
|
||||
"",
|
||||
"llvm-skip-rebuild",
|
||||
@ -503,6 +505,7 @@ pub fn parse(args: &[String]) -> Flags {
|
||||
dry_run: matches.opt_present("dry-run"),
|
||||
on_fail: matches.opt_str("on-fail"),
|
||||
rustc_error_format: matches.opt_str("error-format"),
|
||||
json_output: matches.opt_present("json-output"),
|
||||
keep_stage: matches
|
||||
.opt_strs("keep-stage")
|
||||
.into_iter()
|
||||
|
@ -72,6 +72,9 @@ RUN ./install-mips-musl.sh
|
||||
COPY dist-various-1/install-mipsel-musl.sh /build
|
||||
RUN ./install-mipsel-musl.sh
|
||||
|
||||
COPY dist-various-1/install-aarch64-none-elf.sh /build
|
||||
RUN ./install-aarch64-none-elf.sh
|
||||
|
||||
# Suppress some warnings in the openwrt toolchains we downloaded
|
||||
ENV STAGING_DIR=/tmp
|
||||
|
||||
@ -140,6 +143,8 @@ ENV TARGETS=$TARGETS,armv5te-unknown-linux-gnueabi
|
||||
ENV TARGETS=$TARGETS,armv5te-unknown-linux-musleabi
|
||||
ENV TARGETS=$TARGETS,armv7-unknown-linux-musleabihf
|
||||
ENV TARGETS=$TARGETS,aarch64-unknown-linux-musl
|
||||
ENV TARGETS=$TARGETS,aarch64-unknown-none
|
||||
ENV TARGETS=$TARGETS,aarch64-unknown-none-softfloat
|
||||
ENV TARGETS=$TARGETS,sparc64-unknown-linux-gnu
|
||||
ENV TARGETS=$TARGETS,x86_64-unknown-redox
|
||||
ENV TARGETS=$TARGETS,thumbv6m-none-eabi
|
||||
@ -178,6 +183,10 @@ ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \
|
||||
CC_armv7a_none_eabihf=arm-none-eabi-gcc \
|
||||
CFLAGS_armv7a_none_eabi=-march=armv7-a \
|
||||
CFLAGS_armv7a_none_eabihf=-march=armv7-a+vfpv3 \
|
||||
CC_aarch64_unknown_none_softfloat=aarch64-none-elf-gcc \
|
||||
CFLAGS_aarch64_unknown_none_softfloat=-mstrict-align -march=armv8-a+nofp+nosimd \
|
||||
CC_aarch64_unknown_none=aarch64-none-elf-gcc \
|
||||
CFLAGS_aarch64_unknown_none=-mstrict-align -march=armv8-a+fp+simd \
|
||||
CC_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-gcc \
|
||||
AR_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-ar \
|
||||
CXX_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-g++ \
|
||||
|
6
src/ci/docker/dist-various-1/install-aarch64-none-elf.sh
Executable file
6
src/ci/docker/dist-various-1/install-aarch64-none-elf.sh
Executable file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
curl -L https://developer.arm.com/-/media/Files/downloads/gnu-a/9.2-2019.12/binrel/gcc-arm-9.2-2019.12-x86_64-aarch64-none-elf.tar.xz \
|
||||
| tar --extract --xz --strip 1 --directory /usr/local
|
@ -86,7 +86,7 @@ llvm_asm!("xor %eax, %eax" ::: "eax");
|
||||
|
||||
Input and output operands follow the same format: `:
|
||||
"constraints1"(expr1), "constraints2"(expr2), ..."`. Output operand
|
||||
expressions must be mutable lvalues, or not yet assigned:
|
||||
expressions must be mutable place, or not yet assigned:
|
||||
|
||||
```rust
|
||||
# #![feature(llvm_asm)]
|
||||
|
@ -18,7 +18,7 @@
|
||||
use rustc_middle::ty::subst::SubstsRef;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc_span::source_map::DUMMY_SP;
|
||||
use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};
|
||||
use rustc_target::abi::{Abi, Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};
|
||||
|
||||
use super::{
|
||||
Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, OpTy, Operand, Place, PlaceTy,
|
||||
@ -210,6 +210,53 @@ fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyAndLayout {
|
||||
}
|
||||
}
|
||||
|
||||
/// Test if it is valid for a MIR assignment to assign `src`-typed place to `dest`-typed value.
|
||||
/// This test should be symmetric, as it is primarily about layout compatibility.
|
||||
pub(super) fn mir_assign_valid_types<'tcx>(
|
||||
src: TyAndLayout<'tcx>,
|
||||
dest: TyAndLayout<'tcx>,
|
||||
) -> bool {
|
||||
if src.ty == dest.ty {
|
||||
// Equal types, all is good.
|
||||
return true;
|
||||
}
|
||||
// Type-changing assignments can happen for (at least) two reasons:
|
||||
// - `&mut T` -> `&T` gets optimized from a reborrow to a mere assignment.
|
||||
// - Subtyping is used. While all normal lifetimes are erased, higher-ranked lifetime
|
||||
// bounds are still around and can lead to type differences.
|
||||
// There is no good way to check the latter, so we compare layouts instead -- but only
|
||||
// for values with `Scalar`/`ScalarPair` abi.
|
||||
// FIXME: Do something more accurate, type-based.
|
||||
match &src.abi {
|
||||
Abi::Scalar(..) | Abi::ScalarPair(..) => src.layout == dest.layout,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Use the already known layout if given (but sanity check in debug mode),
|
||||
/// or compute the layout.
|
||||
#[cfg_attr(not(debug_assertions), inline(always))]
|
||||
pub(super) fn from_known_layout<'tcx>(
|
||||
known_layout: Option<TyAndLayout<'tcx>>,
|
||||
compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>,
|
||||
) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
|
||||
match known_layout {
|
||||
None => compute(),
|
||||
Some(known_layout) => {
|
||||
if cfg!(debug_assertions) {
|
||||
let check_layout = compute()?;
|
||||
assert!(
|
||||
mir_assign_valid_types(check_layout, known_layout),
|
||||
"expected type differs from actual type.\nexpected: {:?}\nactual: {:?}",
|
||||
known_layout.ty,
|
||||
check_layout.ty,
|
||||
);
|
||||
}
|
||||
Ok(known_layout)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
pub fn new(
|
||||
tcx: TyCtxtAt<'tcx>,
|
||||
@ -377,7 +424,7 @@ pub fn layout_of_local(
|
||||
// have to support that case (mostly by skipping all caching).
|
||||
match frame.locals.get(local).and_then(|state| state.layout.get()) {
|
||||
None => {
|
||||
let layout = crate::interpret::operand::from_known_layout(layout, || {
|
||||
let layout = from_known_layout(layout, || {
|
||||
let local_ty = frame.body.local_decls[local].ty;
|
||||
let local_ty =
|
||||
self.subst_from_frame_and_normalize_erasing_regions(frame, local_ty);
|
||||
|
@ -18,19 +18,13 @@
|
||||
pub use rustc_middle::mir::interpret::*; // have all the `interpret` symbols in one place: here
|
||||
|
||||
pub use self::eval_context::{Frame, InterpCx, LocalState, LocalValue, StackPopCleanup};
|
||||
|
||||
pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy};
|
||||
|
||||
pub use self::memory::{AllocCheck, FnVal, Memory, MemoryKind};
|
||||
|
||||
pub use self::intern::{intern_const_alloc_recursive, InternKind};
|
||||
pub use self::machine::{AllocMap, Machine, MayLeak, StackPopJump};
|
||||
|
||||
pub use self::operand::{ImmTy, Immediate, OpTy, Operand, ScalarMaybeUndef};
|
||||
|
||||
pub use self::memory::{AllocCheck, FnVal, Memory, MemoryKind};
|
||||
pub use self::operand::{ImmTy, Immediate, OpTy, Operand};
|
||||
pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy};
|
||||
pub use self::validity::RefTracking;
|
||||
pub use self::visitor::{MutValueVisitor, ValueVisitor};
|
||||
|
||||
pub use self::validity::RefTracking;
|
||||
|
||||
pub use self::intern::{intern_const_alloc_recursive, InternKind};
|
||||
|
||||
crate use self::intrinsics::eval_nullary_intrinsic;
|
||||
use eval_context::{from_known_layout, mir_assign_valid_types};
|
||||
|
@ -2,21 +2,21 @@
|
||||
//! All high-level functions to read from memory work on operands as sources.
|
||||
|
||||
use std::convert::TryFrom;
|
||||
use std::fmt::Write;
|
||||
|
||||
use super::{InterpCx, MPlaceTy, Machine, MemPlace, Place, PlaceTy};
|
||||
use rustc_hir::def::Namespace;
|
||||
use rustc_macros::HashStable;
|
||||
pub use rustc_middle::mir::interpret::ScalarMaybeUndef;
|
||||
use rustc_middle::mir::interpret::{
|
||||
sign_extend, truncate, AllocId, ConstValue, GlobalId, InterpResult, Pointer, Scalar,
|
||||
};
|
||||
use rustc_middle::ty::layout::{IntegerExt, PrimitiveExt, TyAndLayout};
|
||||
use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter, Printer};
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_middle::{mir, ty};
|
||||
use rustc_target::abi::{Abi, DiscriminantKind, HasDataLayout, Integer, LayoutOf, Size};
|
||||
use rustc_target::abi::{VariantIdx, Variants};
|
||||
use std::fmt::Write;
|
||||
|
||||
use super::{
|
||||
from_known_layout, sign_extend, truncate, AllocId, ConstValue, GlobalId, InterpCx,
|
||||
InterpResult, MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer, Scalar, ScalarMaybeUndef,
|
||||
};
|
||||
|
||||
/// An `Immediate` represents a single immediate self-contained Rust value.
|
||||
///
|
||||
@ -203,29 +203,6 @@ pub fn from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self {
|
||||
}
|
||||
}
|
||||
|
||||
// Use the existing layout if given (but sanity check in debug mode),
|
||||
// or compute the layout.
|
||||
#[inline(always)]
|
||||
pub(super) fn from_known_layout<'tcx>(
|
||||
layout: Option<TyAndLayout<'tcx>>,
|
||||
compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>,
|
||||
) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
|
||||
match layout {
|
||||
None => compute(),
|
||||
Some(layout) => {
|
||||
if cfg!(debug_assertions) {
|
||||
let layout2 = compute()?;
|
||||
assert_eq!(
|
||||
layout.layout, layout2.layout,
|
||||
"mismatch in layout of supposedly equal-layout types {:?} and {:?}",
|
||||
layout.ty, layout2.ty
|
||||
);
|
||||
}
|
||||
Ok(layout)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
/// Normalice `place.ptr` to a `Pointer` if this is a place and not a ZST.
|
||||
/// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot.
|
||||
|
@ -7,16 +7,15 @@
|
||||
|
||||
use rustc_macros::HashStable;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::interpret::truncate;
|
||||
use rustc_middle::ty::layout::{PrimitiveExt, TyAndLayout};
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_target::abi::{Abi, Align, DiscriminantKind, FieldsShape};
|
||||
use rustc_target::abi::{HasDataLayout, LayoutOf, Size, VariantIdx, Variants};
|
||||
|
||||
use super::{
|
||||
AllocId, AllocMap, Allocation, AllocationExtra, ImmTy, Immediate, InterpCx, InterpResult,
|
||||
LocalValue, Machine, MemoryKind, OpTy, Operand, Pointer, PointerArithmetic, RawConst, Scalar,
|
||||
ScalarMaybeUndef,
|
||||
mir_assign_valid_types, truncate, AllocId, AllocMap, Allocation, AllocationExtra, ImmTy,
|
||||
Immediate, InterpCx, InterpResult, LocalValue, Machine, MemoryKind, OpTy, Operand, Pointer,
|
||||
PointerArithmetic, RawConst, Scalar, ScalarMaybeUndef,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, HashStable)]
|
||||
@ -869,10 +868,10 @@ fn copy_op_no_validate(
|
||||
// We do NOT compare the types for equality, because well-typed code can
|
||||
// actually "transmute" `&mut T` to `&T` in an assignment without a cast.
|
||||
assert!(
|
||||
src.layout.layout == dest.layout.layout,
|
||||
"Layout mismatch when copying!\nsrc: {:#?}\ndest: {:#?}",
|
||||
src,
|
||||
dest
|
||||
mir_assign_valid_types(src.layout, dest.layout),
|
||||
"type mismatch when copying!\nsrc: {:?},\ndest: {:?}",
|
||||
src.layout.ty,
|
||||
dest.layout.ty,
|
||||
);
|
||||
|
||||
// Let us see if the layout is simple so we take a shortcut, avoid force_allocation.
|
||||
@ -923,7 +922,7 @@ pub fn copy_op_transmute(
|
||||
src: OpTy<'tcx, M::PointerTag>,
|
||||
dest: PlaceTy<'tcx, M::PointerTag>,
|
||||
) -> InterpResult<'tcx> {
|
||||
if src.layout.layout == dest.layout.layout {
|
||||
if mir_assign_valid_types(src.layout, dest.layout) {
|
||||
// Fast path: Just use normal `copy_op`
|
||||
return self.copy_op(src, dest);
|
||||
}
|
||||
|
@ -56,6 +56,8 @@
|
||||
"aarch64-unknown-hermit",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"aarch64-unknown-none",
|
||||
"aarch64-unknown-none-softfloat",
|
||||
"aarch64-unknown-redox",
|
||||
"arm-linux-androideabi",
|
||||
"arm-unknown-linux-gnueabi",
|
||||
|
319
src/tools/rustdoc-js-common/lib.js
Normal file
319
src/tools/rustdoc-js-common/lib.js
Normal file
@ -0,0 +1,319 @@
|
||||
const fs = require('fs');
|
||||
|
||||
function getNextStep(content, pos, stop) {
|
||||
while (pos < content.length && content[pos] !== stop &&
|
||||
(content[pos] === ' ' || content[pos] === '\t' || content[pos] === '\n')) {
|
||||
pos += 1;
|
||||
}
|
||||
if (pos >= content.length) {
|
||||
return null;
|
||||
}
|
||||
if (content[pos] !== stop) {
|
||||
return pos * -1;
|
||||
}
|
||||
return pos;
|
||||
}
|
||||
|
||||
// Stupid function extractor based on indent. Doesn't support block
|
||||
// comments. If someone puts a ' or an " in a block comment this
|
||||
// will blow up. Template strings are not tested and might also be
|
||||
// broken.
|
||||
function extractFunction(content, functionName) {
|
||||
var indent = 0;
|
||||
var splitter = "function " + functionName + "(";
|
||||
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = start;
|
||||
while (pos < content.length && content[pos] !== ')') {
|
||||
pos += 1;
|
||||
}
|
||||
if (pos >= content.length) {
|
||||
break;
|
||||
}
|
||||
pos = getNextStep(content, pos + 1, '{');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
// Eat single-line comments
|
||||
if (content[pos] === '/' && pos > 0 && content[pos-1] === '/') {
|
||||
do {
|
||||
pos += 1;
|
||||
} while (pos < content.length && content[pos] !== '\n');
|
||||
|
||||
// Eat quoted strings
|
||||
} else if (content[pos] === '"' || content[pos] === "'" || content[pos] === "`") {
|
||||
var stop = content[pos];
|
||||
var is_escaped = false;
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
|
||||
// Otherwise, check for indent
|
||||
} else if (content[pos] === '{') {
|
||||
indent += 1;
|
||||
} else if (content[pos] === '}') {
|
||||
indent -= 1;
|
||||
if (indent === 0) {
|
||||
return content.slice(start, pos + 1);
|
||||
}
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Stupid function extractor for array.
|
||||
function extractArrayVariable(content, arrayName) {
|
||||
var splitter = "var " + arrayName;
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = getNextStep(content, start, '=');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
pos = getNextStep(content, pos, '[');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
if (content[pos] === '"' || content[pos] === "'") {
|
||||
var stop = content[pos];
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
} else if (content[pos] === ']' &&
|
||||
pos + 1 < content.length &&
|
||||
content[pos + 1] === ';') {
|
||||
return content.slice(start, pos + 2);
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Stupid function extractor for variable.
|
||||
function extractVariable(content, varName) {
|
||||
var splitter = "var " + varName;
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = getNextStep(content, start, '=');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
if (content[pos] === '"' || content[pos] === "'") {
|
||||
var stop = content[pos];
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
} else if (content[pos] === ';' || content[pos] === ',') {
|
||||
return content.slice(start, pos + 1);
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadContent(content) {
|
||||
var Module = module.constructor;
|
||||
var m = new Module();
|
||||
m._compile(content, "tmp.js");
|
||||
m.exports.ignore_order = content.indexOf("\n// ignore-order\n") !== -1 ||
|
||||
content.startsWith("// ignore-order\n");
|
||||
m.exports.exact_check = content.indexOf("\n// exact-check\n") !== -1 ||
|
||||
content.startsWith("// exact-check\n");
|
||||
m.exports.should_fail = content.indexOf("\n// should-fail\n") !== -1 ||
|
||||
content.startsWith("// should-fail\n");
|
||||
return m.exports;
|
||||
}
|
||||
|
||||
function readFile(filePath) {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
}
|
||||
|
||||
function loadThings(thingsToLoad, kindOfLoad, funcToCall, fileContent) {
|
||||
var content = '';
|
||||
for (var i = 0; i < thingsToLoad.length; ++i) {
|
||||
var tmp = funcToCall(fileContent, thingsToLoad[i]);
|
||||
if (tmp === null) {
|
||||
console.error('unable to find ' + kindOfLoad + ' "' + thingsToLoad[i] + '"');
|
||||
process.exit(1);
|
||||
}
|
||||
content += tmp;
|
||||
content += 'exports.' + thingsToLoad[i] + ' = ' + thingsToLoad[i] + ';';
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
function lookForEntry(entry, data) {
|
||||
for (var i = 0; i < data.length; ++i) {
|
||||
var allGood = true;
|
||||
for (var key in entry) {
|
||||
if (!entry.hasOwnProperty(key)) {
|
||||
continue;
|
||||
}
|
||||
var value = data[i][key];
|
||||
// To make our life easier, if there is a "parent" type, we add it to the path.
|
||||
if (key === 'path' && data[i]['parent'] !== undefined) {
|
||||
if (value.length > 0) {
|
||||
value += '::' + data[i]['parent']['name'];
|
||||
} else {
|
||||
value = data[i]['parent']['name'];
|
||||
}
|
||||
}
|
||||
if (value !== entry[key]) {
|
||||
allGood = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (allGood === true) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadMainJsAndIndex(mainJs, aliases, searchIndex, crate) {
|
||||
if (searchIndex[searchIndex.length - 1].length === 0) {
|
||||
searchIndex.pop();
|
||||
}
|
||||
searchIndex.pop();
|
||||
searchIndex = loadContent(searchIndex.join("\n") + '\nexports.searchIndex = searchIndex;');
|
||||
finalJS = "";
|
||||
|
||||
var arraysToLoad = ["itemTypes"];
|
||||
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", "NO_TYPE_FILTER",
|
||||
"GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA",
|
||||
"TY_PRIMITIVE", "TY_KEYWORD",
|
||||
"levenshtein_row2"];
|
||||
// execQuery first parameter is built in getQuery (which takes in the search input).
|
||||
// execQuery last parameter is built in buildIndex.
|
||||
// buildIndex requires the hashmap from search-index.
|
||||
var functionsToLoad = ["buildHrefAndPath", "pathSplitter", "levenshtein", "validateResult",
|
||||
"getQuery", "buildIndex", "execQuery", "execSearch"];
|
||||
|
||||
finalJS += 'window = { "currentCrate": "' + crate + '" };\n';
|
||||
finalJS += 'var rootPath = "../";\n';
|
||||
finalJS += aliases;
|
||||
finalJS += loadThings(arraysToLoad, 'array', extractArrayVariable, mainJs);
|
||||
finalJS += loadThings(variablesToLoad, 'variable', extractVariable, mainJs);
|
||||
finalJS += loadThings(functionsToLoad, 'function', extractFunction, mainJs);
|
||||
|
||||
var loaded = loadContent(finalJS);
|
||||
var index = loaded.buildIndex(searchIndex.searchIndex);
|
||||
|
||||
return [loaded, index];
|
||||
}
|
||||
|
||||
function runChecks(testFile, loaded, index) {
|
||||
var errors = 0;
|
||||
var loadedFile = loadContent(
|
||||
readFile(testFile) + 'exports.QUERY = QUERY;exports.EXPECTED = EXPECTED;');
|
||||
|
||||
const expected = loadedFile.EXPECTED;
|
||||
const query = loadedFile.QUERY;
|
||||
const filter_crate = loadedFile.FILTER_CRATE;
|
||||
const ignore_order = loadedFile.ignore_order;
|
||||
const exact_check = loadedFile.exact_check;
|
||||
const should_fail = loadedFile.should_fail;
|
||||
|
||||
var results = loaded.execSearch(loaded.getQuery(query), index);
|
||||
var error_text = [];
|
||||
|
||||
for (var key in expected) {
|
||||
if (!expected.hasOwnProperty(key)) {
|
||||
continue;
|
||||
}
|
||||
if (!results.hasOwnProperty(key)) {
|
||||
error_text.push('==> Unknown key "' + key + '"');
|
||||
break;
|
||||
}
|
||||
var entry = expected[key];
|
||||
var prev_pos = -1;
|
||||
for (var i = 0; i < entry.length; ++i) {
|
||||
var entry_pos = lookForEntry(entry[i], results[key]);
|
||||
if (entry_pos === null) {
|
||||
error_text.push("==> Result not found in '" + key + "': '" +
|
||||
JSON.stringify(entry[i]) + "'");
|
||||
} else if (exact_check === true && prev_pos + 1 !== entry_pos) {
|
||||
error_text.push("==> Exact check failed at position " + (prev_pos + 1) + ": " +
|
||||
"expected '" + JSON.stringify(entry[i]) + "' but found '" +
|
||||
JSON.stringify(results[key][i]) + "'");
|
||||
} else if (ignore_order === false && entry_pos < prev_pos) {
|
||||
error_text.push("==> '" + JSON.stringify(entry[i]) + "' was supposed to be " +
|
||||
" before '" + JSON.stringify(results[key][entry_pos]) + "'");
|
||||
} else {
|
||||
prev_pos = entry_pos;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (error_text.length === 0 && should_fail === true) {
|
||||
errors += 1;
|
||||
console.error("FAILED");
|
||||
console.error("==> Test was supposed to fail but all items were found...");
|
||||
} else if (error_text.length !== 0 && should_fail === false) {
|
||||
errors += 1;
|
||||
console.error("FAILED");
|
||||
console.error(error_text.join("\n"));
|
||||
} else {
|
||||
console.log("OK");
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'getNextStep': getNextStep,
|
||||
'extractFunction': extractFunction,
|
||||
'extractArrayVariable': extractArrayVariable,
|
||||
'extractVariable': extractVariable,
|
||||
'loadContent': loadContent,
|
||||
'readFile': readFile,
|
||||
'loadThings': loadThings,
|
||||
'lookForEntry': lookForEntry,
|
||||
'loadMainJsAndIndex': loadMainJsAndIndex,
|
||||
'runChecks': runChecks,
|
||||
};
|
@ -1,225 +1,11 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const tools = require('../rustdoc-js-common/lib.js');
|
||||
|
||||
function getNextStep(content, pos, stop) {
|
||||
while (pos < content.length && content[pos] !== stop &&
|
||||
(content[pos] === ' ' || content[pos] === '\t' || content[pos] === '\n')) {
|
||||
pos += 1;
|
||||
}
|
||||
if (pos >= content.length) {
|
||||
return null;
|
||||
}
|
||||
if (content[pos] !== stop) {
|
||||
return pos * -1;
|
||||
}
|
||||
return pos;
|
||||
}
|
||||
|
||||
// Stupid function extractor based on indent. Doesn't support block
|
||||
// comments. If someone puts a ' or an " in a block comment this
|
||||
// will blow up. Template strings are not tested and might also be
|
||||
// broken.
|
||||
function extractFunction(content, functionName) {
|
||||
var indent = 0;
|
||||
var splitter = "function " + functionName + "(";
|
||||
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = start;
|
||||
while (pos < content.length && content[pos] !== ')') {
|
||||
pos += 1;
|
||||
}
|
||||
if (pos >= content.length) {
|
||||
break;
|
||||
}
|
||||
pos = getNextStep(content, pos + 1, '{');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
// Eat single-line comments
|
||||
if (content[pos] === '/' && pos > 0 && content[pos-1] === '/') {
|
||||
do {
|
||||
pos += 1;
|
||||
} while (pos < content.length && content[pos] !== '\n');
|
||||
|
||||
// Eat quoted strings
|
||||
} else if (content[pos] === '"' || content[pos] === "'" || content[pos] === "`") {
|
||||
var stop = content[pos];
|
||||
var is_escaped = false;
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
|
||||
// Otherwise, check for indent
|
||||
} else if (content[pos] === '{') {
|
||||
indent += 1;
|
||||
} else if (content[pos] === '}') {
|
||||
indent -= 1;
|
||||
if (indent === 0) {
|
||||
return content.slice(start, pos + 1);
|
||||
}
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Stupid function extractor for array.
|
||||
function extractArrayVariable(content, arrayName) {
|
||||
var splitter = "var " + arrayName;
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = getNextStep(content, start, '=');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
pos = getNextStep(content, pos, '[');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
if (content[pos] === '"' || content[pos] === "'") {
|
||||
var stop = content[pos];
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
} else if (content[pos] === ']' &&
|
||||
pos + 1 < content.length &&
|
||||
content[pos + 1] === ';') {
|
||||
return content.slice(start, pos + 2);
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Stupid function extractor for variable.
|
||||
function extractVariable(content, varName) {
|
||||
var splitter = "var " + varName;
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = getNextStep(content, start, '=');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
if (content[pos] === '"' || content[pos] === "'") {
|
||||
var stop = content[pos];
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
} else if (content[pos] === ';') {
|
||||
return content.slice(start, pos + 1);
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadContent(content) {
|
||||
var Module = module.constructor;
|
||||
var m = new Module();
|
||||
m._compile(content, "tmp.js");
|
||||
m.exports.ignore_order = content.indexOf("\n// ignore-order\n") !== -1 ||
|
||||
content.startsWith("// ignore-order\n");
|
||||
m.exports.exact_check = content.indexOf("\n// exact-check\n") !== -1 ||
|
||||
content.startsWith("// exact-check\n");
|
||||
m.exports.should_fail = content.indexOf("\n// should-fail\n") !== -1 ||
|
||||
content.startsWith("// should-fail\n");
|
||||
return m.exports;
|
||||
}
|
||||
|
||||
function readFile(filePath) {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
}
|
||||
|
||||
function loadThings(thingsToLoad, kindOfLoad, funcToCall, fileContent) {
|
||||
var content = '';
|
||||
for (var i = 0; i < thingsToLoad.length; ++i) {
|
||||
var tmp = funcToCall(fileContent, thingsToLoad[i]);
|
||||
if (tmp === null) {
|
||||
console.error('unable to find ' + kindOfLoad + ' "' + thingsToLoad[i] + '"');
|
||||
process.exit(1);
|
||||
}
|
||||
content += tmp;
|
||||
content += 'exports.' + thingsToLoad[i] + ' = ' + thingsToLoad[i] + ';';
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
function lookForEntry(entry, data) {
|
||||
for (var i = 0; i < data.length; ++i) {
|
||||
var allGood = true;
|
||||
for (var key in entry) {
|
||||
if (!entry.hasOwnProperty(key)) {
|
||||
continue;
|
||||
}
|
||||
var value = data[i][key];
|
||||
// To make our life easier, if there is a "parent" type, we add it to the path.
|
||||
if (key === 'path' && data[i]['parent'] !== undefined) {
|
||||
if (value.length > 0) {
|
||||
value += '::' + data[i]['parent']['name'];
|
||||
} else {
|
||||
value = data[i]['parent']['name'];
|
||||
}
|
||||
}
|
||||
if (value !== entry[key]) {
|
||||
allGood = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (allGood === true) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function findFile(dir, name, extension) {
|
||||
var entries = fs.readdirSync(dir);
|
||||
var matches = [];
|
||||
for (var i = 0; i < entries.length; ++i) {
|
||||
var entry = entries[i];
|
||||
var file_type = fs.statSync(dir + entry);
|
||||
@ -227,10 +13,28 @@ function findFile(dir, name, extension) {
|
||||
continue;
|
||||
}
|
||||
if (entry.startsWith(name) && entry.endsWith(extension)) {
|
||||
return entry;
|
||||
var version = entry.slice(name.length, entry.length - extension.length);
|
||||
version = version.split(".").map(function(x) {
|
||||
return parseInt(x);
|
||||
});
|
||||
var total = 0;
|
||||
var mult = 1;
|
||||
for (var j = version.length - 1; j >= 0; --j) {
|
||||
total += version[j] * mult;
|
||||
mult *= 1000;
|
||||
}
|
||||
matches.push([entry, total]);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
if (matches.length === 0) {
|
||||
return null;
|
||||
}
|
||||
// We make a reverse sort to have the "highest" file. Very useful in case you didn't clean up
|
||||
// you std doc folder...
|
||||
matches.sort(function(a, b) {
|
||||
return b[1] - a[1];
|
||||
});
|
||||
return matches[0][0];
|
||||
}
|
||||
|
||||
function readFileMatching(dir, name, extension) {
|
||||
@ -241,7 +45,7 @@ function readFileMatching(dir, name, extension) {
|
||||
if (f === null) {
|
||||
return "";
|
||||
}
|
||||
return readFile(dir + f);
|
||||
return tools.readFile(dir + f);
|
||||
}
|
||||
|
||||
function main(argv) {
|
||||
@ -253,88 +57,16 @@ function main(argv) {
|
||||
var test_folder = argv[3];
|
||||
|
||||
var mainJs = readFileMatching(std_docs, "main", ".js");
|
||||
var ALIASES = readFileMatching(std_docs, "aliases", ".js");
|
||||
var aliases = readFileMatching(std_docs, "aliases", ".js");
|
||||
var searchIndex = readFileMatching(std_docs, "search-index", ".js").split("\n");
|
||||
if (searchIndex[searchIndex.length - 1].length === 0) {
|
||||
searchIndex.pop();
|
||||
}
|
||||
searchIndex.pop();
|
||||
searchIndex = loadContent(searchIndex.join("\n") + '\nexports.searchIndex = searchIndex;');
|
||||
finalJS = "";
|
||||
|
||||
var arraysToLoad = ["itemTypes"];
|
||||
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", "NO_TYPE_FILTER",
|
||||
"GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA",
|
||||
"TY_PRIMITIVE", "TY_KEYWORD",
|
||||
"levenshtein_row2"];
|
||||
// execQuery first parameter is built in getQuery (which takes in the search input).
|
||||
// execQuery last parameter is built in buildIndex.
|
||||
// buildIndex requires the hashmap from search-index.
|
||||
var functionsToLoad = ["buildHrefAndPath", "pathSplitter", "levenshtein", "validateResult",
|
||||
"getQuery", "buildIndex", "execQuery", "execSearch"];
|
||||
|
||||
finalJS += 'window = { "currentCrate": "std" };\n';
|
||||
finalJS += 'var rootPath = "../";\n';
|
||||
finalJS += ALIASES;
|
||||
finalJS += loadThings(arraysToLoad, 'array', extractArrayVariable, mainJs);
|
||||
finalJS += loadThings(variablesToLoad, 'variable', extractVariable, mainJs);
|
||||
finalJS += loadThings(functionsToLoad, 'function', extractFunction, mainJs);
|
||||
|
||||
var loaded = loadContent(finalJS);
|
||||
var index = loaded.buildIndex(searchIndex.searchIndex);
|
||||
var [loaded, index] = tools.loadMainJsAndIndex(mainJs, aliases, searchIndex, "std");
|
||||
|
||||
var errors = 0;
|
||||
|
||||
fs.readdirSync(test_folder).forEach(function(file) {
|
||||
var loadedFile = loadContent(readFile(path.join(test_folder, file)) +
|
||||
'exports.QUERY = QUERY;exports.EXPECTED = EXPECTED;');
|
||||
const expected = loadedFile.EXPECTED;
|
||||
const query = loadedFile.QUERY;
|
||||
const filter_crate = loadedFile.FILTER_CRATE;
|
||||
const ignore_order = loadedFile.ignore_order;
|
||||
const exact_check = loadedFile.exact_check;
|
||||
const should_fail = loadedFile.should_fail;
|
||||
var results = loaded.execSearch(loaded.getQuery(query), index);
|
||||
process.stdout.write('Checking "' + file + '" ... ');
|
||||
var error_text = [];
|
||||
for (var key in expected) {
|
||||
if (!expected.hasOwnProperty(key)) {
|
||||
continue;
|
||||
}
|
||||
if (!results.hasOwnProperty(key)) {
|
||||
error_text.push('==> Unknown key "' + key + '"');
|
||||
break;
|
||||
}
|
||||
var entry = expected[key];
|
||||
var prev_pos = -1;
|
||||
for (var i = 0; i < entry.length; ++i) {
|
||||
var entry_pos = lookForEntry(entry[i], results[key]);
|
||||
if (entry_pos === null) {
|
||||
error_text.push("==> Result not found in '" + key + "': '" +
|
||||
JSON.stringify(entry[i]) + "'");
|
||||
} else if (exact_check === true && prev_pos + 1 !== entry_pos) {
|
||||
error_text.push("==> Exact check failed at position " + (prev_pos + 1) + ": " +
|
||||
"expected '" + JSON.stringify(entry[i]) + "' but found '" +
|
||||
JSON.stringify(results[key][i]) + "'");
|
||||
} else if (ignore_order === false && entry_pos < prev_pos) {
|
||||
error_text.push("==> '" + JSON.stringify(entry[i]) + "' was supposed to be " +
|
||||
" before '" + JSON.stringify(results[key][entry_pos]) + "'");
|
||||
} else {
|
||||
prev_pos = entry_pos;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (error_text.length === 0 && should_fail === true) {
|
||||
errors += 1;
|
||||
console.error("FAILED");
|
||||
console.error("==> Test was supposed to fail but all items were found...");
|
||||
} else if (error_text.length !== 0 && should_fail === false) {
|
||||
errors += 1;
|
||||
console.error("FAILED");
|
||||
console.error(error_text.join("\n"));
|
||||
} else {
|
||||
console.log("OK");
|
||||
}
|
||||
errors += tools.runChecks(path.join(test_folder, file), loaded, index);
|
||||
});
|
||||
return errors > 0 ? 1 : 0;
|
||||
}
|
||||
|
@ -1,255 +1,13 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
function getNextStep(content, pos, stop) {
|
||||
while (pos < content.length && content[pos] !== stop &&
|
||||
(content[pos] === ' ' || content[pos] === '\t' || content[pos] === '\n')) {
|
||||
pos += 1;
|
||||
}
|
||||
if (pos >= content.length) {
|
||||
return null;
|
||||
}
|
||||
if (content[pos] !== stop) {
|
||||
return pos * -1;
|
||||
}
|
||||
return pos;
|
||||
}
|
||||
|
||||
// Stupid function extractor based on indent. Doesn't support block
|
||||
// comments. If someone puts a ' or an " in a block comment this
|
||||
// will blow up. Template strings are not tested and might also be
|
||||
// broken.
|
||||
function extractFunction(content, functionName) {
|
||||
var indent = 0;
|
||||
var splitter = "function " + functionName + "(";
|
||||
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = start;
|
||||
while (pos < content.length && content[pos] !== ')') {
|
||||
pos += 1;
|
||||
}
|
||||
if (pos >= content.length) {
|
||||
break;
|
||||
}
|
||||
pos = getNextStep(content, pos + 1, '{');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
// Eat single-line comments
|
||||
if (content[pos] === '/' && pos > 0 && content[pos-1] === '/') {
|
||||
do {
|
||||
pos += 1;
|
||||
} while (pos < content.length && content[pos] !== '\n');
|
||||
|
||||
// Eat quoted strings
|
||||
} else if (content[pos] === '"' || content[pos] === "'" || content[pos] === "`") {
|
||||
var stop = content[pos];
|
||||
var is_escaped = false;
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
|
||||
// Otherwise, check for indent
|
||||
} else if (content[pos] === '{') {
|
||||
indent += 1;
|
||||
} else if (content[pos] === '}') {
|
||||
indent -= 1;
|
||||
if (indent === 0) {
|
||||
return content.slice(start, pos + 1);
|
||||
}
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Stupid function extractor for array.
|
||||
function extractArrayVariable(content, arrayName) {
|
||||
var splitter = "var " + arrayName;
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = getNextStep(content, start, '=');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
pos = getNextStep(content, pos, '[');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
if (content[pos] === '"' || content[pos] === "'") {
|
||||
var stop = content[pos];
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
} else if (content[pos] === ']' &&
|
||||
pos + 1 < content.length &&
|
||||
content[pos + 1] === ';') {
|
||||
return content.slice(start, pos + 2);
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Stupid function extractor for variable.
|
||||
function extractVariable(content, varName) {
|
||||
var splitter = "var " + varName;
|
||||
while (true) {
|
||||
var start = content.indexOf(splitter);
|
||||
if (start === -1) {
|
||||
break;
|
||||
}
|
||||
var pos = getNextStep(content, start, '=');
|
||||
if (pos === null) {
|
||||
break;
|
||||
} else if (pos < 0) {
|
||||
content = content.slice(-pos);
|
||||
continue;
|
||||
}
|
||||
while (pos < content.length) {
|
||||
if (content[pos] === '"' || content[pos] === "'") {
|
||||
var stop = content[pos];
|
||||
do {
|
||||
if (content[pos] === '\\') {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
} while (pos < content.length &&
|
||||
(content[pos] !== stop || content[pos - 1] === '\\'));
|
||||
} else if (content[pos] === ';') {
|
||||
return content.slice(start, pos + 1);
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
content = content.slice(start + 1);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadContent(content) {
|
||||
var Module = module.constructor;
|
||||
var m = new Module();
|
||||
m._compile(content, "tmp.js");
|
||||
m.exports.ignore_order = content.indexOf("\n// ignore-order\n") !== -1 ||
|
||||
content.startsWith("// ignore-order\n");
|
||||
m.exports.exact_check = content.indexOf("\n// exact-check\n") !== -1 ||
|
||||
content.startsWith("// exact-check\n");
|
||||
m.exports.should_fail = content.indexOf("\n// should-fail\n") !== -1 ||
|
||||
content.startsWith("// should-fail\n");
|
||||
return m.exports;
|
||||
}
|
||||
|
||||
function readFile(filePath) {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
}
|
||||
|
||||
function loadThings(thingsToLoad, kindOfLoad, funcToCall, fileContent) {
|
||||
var content = '';
|
||||
for (var i = 0; i < thingsToLoad.length; ++i) {
|
||||
var tmp = funcToCall(fileContent, thingsToLoad[i]);
|
||||
if (tmp === null) {
|
||||
console.error('unable to find ' + kindOfLoad + ' "' + thingsToLoad[i] + '"');
|
||||
process.exit(1);
|
||||
}
|
||||
content += tmp;
|
||||
content += 'exports.' + thingsToLoad[i] + ' = ' + thingsToLoad[i] + ';';
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
function lookForEntry(entry, data) {
|
||||
for (var i = 0; i < data.length; ++i) {
|
||||
var allGood = true;
|
||||
for (var key in entry) {
|
||||
if (!entry.hasOwnProperty(key)) {
|
||||
continue;
|
||||
}
|
||||
var value = data[i][key];
|
||||
// To make our life easier, if there is a "parent" type, we add it to the path.
|
||||
if (key === 'path' && data[i]['parent'] !== undefined) {
|
||||
if (value.length > 0) {
|
||||
value += '::' + data[i]['parent']['name'];
|
||||
} else {
|
||||
value = data[i]['parent']['name'];
|
||||
}
|
||||
}
|
||||
if (value !== entry[key]) {
|
||||
allGood = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (allGood === true) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const tools = require('../rustdoc-js-common/lib.js');
|
||||
|
||||
function load_files(out_folder, crate) {
|
||||
var mainJs = readFile(out_folder + "/main.js");
|
||||
var ALIASES = readFile(out_folder + "/aliases.js");
|
||||
var searchIndex = readFile(out_folder + "/search-index.js").split("\n");
|
||||
if (searchIndex[searchIndex.length - 1].length === 0) {
|
||||
searchIndex.pop();
|
||||
}
|
||||
searchIndex.pop();
|
||||
searchIndex = loadContent(searchIndex.join("\n") + '\nexports.searchIndex = searchIndex;');
|
||||
finalJS = "";
|
||||
var mainJs = tools.readFile(out_folder + "/main.js");
|
||||
var aliases = tools.readFile(out_folder + "/aliases.js");
|
||||
var searchIndex = tools.readFile(out_folder + "/search-index.js").split("\n");
|
||||
|
||||
var arraysToLoad = ["itemTypes"];
|
||||
var variablesToLoad = ["MAX_LEV_DISTANCE", "MAX_RESULTS", "NO_TYPE_FILTER",
|
||||
"GENERICS_DATA", "NAME", "INPUTS_DATA", "OUTPUT_DATA",
|
||||
"TY_PRIMITIVE", "TY_KEYWORD",
|
||||
"levenshtein_row2"];
|
||||
// execQuery first parameter is built in getQuery (which takes in the search input).
|
||||
// execQuery last parameter is built in buildIndex.
|
||||
// buildIndex requires the hashmap from search-index.
|
||||
var functionsToLoad = ["buildHrefAndPath", "pathSplitter", "levenshtein", "validateResult",
|
||||
"getQuery", "buildIndex", "execQuery", "execSearch"];
|
||||
|
||||
finalJS += 'window = { "currentCrate": "' + crate + '" };\n';
|
||||
finalJS += 'var rootPath = "../";\n';
|
||||
finalJS += ALIASES;
|
||||
finalJS += loadThings(arraysToLoad, 'array', extractArrayVariable, mainJs);
|
||||
finalJS += loadThings(variablesToLoad, 'variable', extractVariable, mainJs);
|
||||
finalJS += loadThings(functionsToLoad, 'function', extractFunction, mainJs);
|
||||
|
||||
var loaded = loadContent(finalJS);
|
||||
return [loaded, loaded.buildIndex(searchIndex.searchIndex)];
|
||||
return tools.loadMainJsAndIndex(mainJs, aliases, searchIndex, crate);
|
||||
}
|
||||
|
||||
function main(argv) {
|
||||
@ -279,54 +37,7 @@ function main(argv) {
|
||||
const test_out_folder = out_folder + test_name;
|
||||
|
||||
var [loaded, index] = load_files(test_out_folder, test_name);
|
||||
var loadedFile = loadContent(readFile(test_file) +
|
||||
'exports.QUERY = QUERY;exports.EXPECTED = EXPECTED;');
|
||||
const expected = loadedFile.EXPECTED;
|
||||
const query = loadedFile.QUERY;
|
||||
const filter_crate = loadedFile.FILTER_CRATE;
|
||||
const ignore_order = loadedFile.ignore_order;
|
||||
const exact_check = loadedFile.exact_check;
|
||||
const should_fail = loadedFile.should_fail;
|
||||
var results = loaded.execSearch(loaded.getQuery(query), index);
|
||||
var error_text = [];
|
||||
for (var key in expected) {
|
||||
if (!expected.hasOwnProperty(key)) {
|
||||
continue;
|
||||
}
|
||||
if (!results.hasOwnProperty(key)) {
|
||||
error_text.push('==> Unknown key "' + key + '"');
|
||||
break;
|
||||
}
|
||||
var entry = expected[key];
|
||||
var prev_pos = -1;
|
||||
for (var i = 0; i < entry.length; ++i) {
|
||||
var entry_pos = lookForEntry(entry[i], results[key]);
|
||||
if (entry_pos === null) {
|
||||
error_text.push("==> Result not found in '" + key + "': '" +
|
||||
JSON.stringify(entry[i]) + "'");
|
||||
} else if (exact_check === true && prev_pos + 1 !== entry_pos) {
|
||||
error_text.push("==> Exact check failed at position " + (prev_pos + 1) + ": " +
|
||||
"expected '" + JSON.stringify(entry[i]) + "' but found '" +
|
||||
JSON.stringify(results[key][i]) + "'");
|
||||
} else if (ignore_order === false && entry_pos < prev_pos) {
|
||||
error_text.push("==> '" + JSON.stringify(entry[i]) + "' was supposed to be " +
|
||||
" before '" + JSON.stringify(results[key][entry_pos]) + "'");
|
||||
} else {
|
||||
prev_pos = entry_pos;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (error_text.length === 0 && should_fail === true) {
|
||||
errors += 1;
|
||||
console.error("FAILED");
|
||||
console.error("==> Test was supposed to fail but all items were found...");
|
||||
} else if (error_text.length !== 0 && should_fail === false) {
|
||||
errors += 1;
|
||||
console.error("FAILED");
|
||||
console.error(error_text.join("\n"));
|
||||
} else {
|
||||
console.log("OK");
|
||||
}
|
||||
errors += tools.runChecks(test_file, loaded, index);
|
||||
}
|
||||
return errors > 0 ? 1 : 0;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user