2013-09-18 22:18:38 -07:00
|
|
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
//! Rustdoc's HTML Rendering module
|
|
|
|
//!
|
|
|
|
//! This modules contains the bulk of the logic necessary for rendering a
|
|
|
|
//! rustdoc `clean::Crate` instance to a set of static HTML pages. This
|
|
|
|
//! rendering process is largely driven by the `format!` syntax extension to
|
|
|
|
//! perform all I/O into files and streams.
|
|
|
|
//!
|
|
|
|
//! The rendering process is largely driven by the `Context` and `Cache`
|
|
|
|
//! structures. The cache is pre-populated by crawling the crate in question,
|
|
|
|
//! and then it is shared among the various rendering tasks. The cache is meant
|
|
|
|
//! to be a fairly large structure not implementing `Clone` (because it's shared
|
|
|
|
//! among tasks). The context, however, should be a lightweight structure. This
|
|
|
|
//! is cloned per-task and contains information about what is currently being
|
|
|
|
//! rendered.
|
|
|
|
//!
|
|
|
|
//! In order to speed up rendering (mostly because of markdown rendering), the
|
|
|
|
//! rendering process has been parallelized. This parallelization is only
|
|
|
|
//! exposed through the `crate` method on the context, and then also from the
|
|
|
|
//! fact that the shared cache is stored in TLS (and must be accessed as such).
|
|
|
|
//!
|
|
|
|
//! In addition to rendering the crate itself, this module is also responsible
|
|
|
|
//! for creating the corresponding search index and source file renderings.
|
|
|
|
//! These tasks are not parallelized (they haven't been a bottleneck yet), and
|
|
|
|
//! both occur before the crate is rendered.
|
|
|
|
|
2013-09-18 22:18:38 -07:00
|
|
|
use std::fmt;
|
2013-09-27 15:12:23 -07:00
|
|
|
use std::hashmap::{HashMap, HashSet};
|
2013-09-18 22:18:38 -07:00
|
|
|
use std::local_data;
|
2013-11-10 22:46:32 -08:00
|
|
|
use std::io::buffered::BufferedWriter;
|
|
|
|
use std::io;
|
|
|
|
use std::io::fs;
|
|
|
|
use std::io::File;
|
2013-09-27 15:12:23 -07:00
|
|
|
use std::str;
|
2013-09-18 22:18:38 -07:00
|
|
|
use std::vec;
|
|
|
|
|
2013-12-05 18:19:06 -08:00
|
|
|
use extra::arc::Arc;
|
2013-09-18 22:18:38 -07:00
|
|
|
use extra::json::ToJson;
|
|
|
|
use syntax::ast;
|
2013-09-26 12:53:06 -07:00
|
|
|
use syntax::attr;
|
2013-09-18 22:18:38 -07:00
|
|
|
|
|
|
|
use clean;
|
|
|
|
use doctree;
|
|
|
|
use fold::DocFolder;
|
2013-09-27 15:12:23 -07:00
|
|
|
use html::escape::Escape;
|
2013-09-23 20:38:17 -07:00
|
|
|
use html::format::{VisSpace, Method, PuritySpace};
|
2013-09-18 22:18:38 -07:00
|
|
|
use html::layout;
|
|
|
|
use html::markdown::Markdown;
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Major driving force in all rustdoc rendering. This contains information
|
|
|
|
/// about where in the tree-like hierarchy rendering is occurring and controls
|
|
|
|
/// how the current page is being rendered.
|
|
|
|
///
|
|
|
|
/// It is intended that this context is a lightweight object which can be fairly
|
|
|
|
/// easily cloned because it is cloned per work-job (about once per item in the
|
|
|
|
/// rustdoc tree).
|
2013-09-18 22:18:38 -07:00
|
|
|
#[deriving(Clone)]
|
|
|
|
pub struct Context {
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Current hierarchy of components leading down to what's currently being
|
|
|
|
/// rendered
|
2013-09-18 22:18:38 -07:00
|
|
|
current: ~[~str],
|
2013-10-03 10:24:40 -07:00
|
|
|
/// String representation of how to get back to the root path of the 'doc/'
|
|
|
|
/// folder in terms of a relative URL.
|
2013-09-18 22:18:38 -07:00
|
|
|
root_path: ~str,
|
2013-10-03 10:24:40 -07:00
|
|
|
/// The current destination folder of where HTML artifacts should be placed.
|
|
|
|
/// This changes as the context descends into the module hierarchy.
|
2013-09-18 22:18:38 -07:00
|
|
|
dst: Path,
|
2013-10-03 10:24:40 -07:00
|
|
|
/// This describes the layout of each page, and is not modified after
|
|
|
|
/// creation of the context (contains info like the favicon)
|
2013-09-18 22:18:38 -07:00
|
|
|
layout: layout::Layout,
|
2013-10-03 10:24:40 -07:00
|
|
|
/// This map is a list of what should be displayed on the sidebar of the
|
|
|
|
/// current page. The key is the section header (traits, modules,
|
|
|
|
/// functions), and the value is the list of containers belonging to this
|
|
|
|
/// header. This map will change depending on the surrounding context of the
|
|
|
|
/// page.
|
2013-09-18 22:18:38 -07:00
|
|
|
sidebar: HashMap<~str, ~[~str]>,
|
2013-10-03 10:24:40 -07:00
|
|
|
/// This flag indicates whether [src] links should be generated or not. If
|
|
|
|
/// the source files are present in the html rendering, then this will be
|
|
|
|
/// `true`.
|
2013-09-27 15:12:23 -07:00
|
|
|
include_sources: bool,
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Indicates where an external crate can be found.
|
2013-10-02 15:39:32 -07:00
|
|
|
pub enum ExternalLocation {
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Remote URL root of the external crate
|
|
|
|
Remote(~str),
|
|
|
|
/// This external crate can be found in the local doc/ folder
|
|
|
|
Local,
|
|
|
|
/// The external crate could not be found.
|
|
|
|
Unknown,
|
2013-10-02 15:39:32 -07:00
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Different ways an implementor of a trait can be rendered.
|
2013-09-18 22:18:38 -07:00
|
|
|
enum Implementor {
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Paths are displayed specially by omitting the `impl XX for` cruft
|
2013-09-18 22:18:38 -07:00
|
|
|
PathType(clean::Type),
|
2013-10-03 10:24:40 -07:00
|
|
|
/// This is the generic representation of an trait implementor, used for
|
|
|
|
/// primitive types and otherwise non-path types.
|
2013-09-18 22:18:38 -07:00
|
|
|
OtherType(clean::Generics, /* trait */ clean::Type, /* for */ clean::Type),
|
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// This cache is used to store information about the `clean::Crate` being
|
|
|
|
/// rendered in order to provide more useful documentation. This contains
|
|
|
|
/// information like all implementors of a trait, all traits a type implements,
|
|
|
|
/// documentation for all known traits, etc.
|
|
|
|
///
|
|
|
|
/// This structure purposefully does not implement `Clone` because it's intended
|
|
|
|
/// to be a fairly large and expensive structure to clone. Instead this adheres
|
2013-12-05 18:19:06 -08:00
|
|
|
/// to both `Send` and `Freeze` so it may be stored in a `Arc` instance and
|
2013-10-03 10:24:40 -07:00
|
|
|
/// shared among the various rendering tasks.
|
2013-10-05 14:44:37 -07:00
|
|
|
pub struct Cache {
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Mapping of typaram ids to the name of the type parameter. This is used
|
|
|
|
/// when pretty-printing a type (so pretty printing doesn't have to
|
|
|
|
/// painfully maintain a context like this)
|
2013-09-18 22:18:38 -07:00
|
|
|
typarams: HashMap<ast::NodeId, ~str>,
|
2013-10-03 10:24:40 -07:00
|
|
|
|
|
|
|
/// Maps a type id to all known implementations for that type. This is only
|
|
|
|
/// recognized for intra-crate `ResolvedPath` types, and is used to print
|
|
|
|
/// out extra documentation on the page of an enum/struct.
|
|
|
|
///
|
|
|
|
/// The values of the map are a list of implementations and documentation
|
|
|
|
/// found on that implementation.
|
2013-09-30 17:04:14 -07:00
|
|
|
impls: HashMap<ast::NodeId, ~[(clean::Impl, Option<~str>)]>,
|
2013-10-03 10:24:40 -07:00
|
|
|
|
|
|
|
/// Maintains a mapping of local crate node ids to the fully qualified name
|
|
|
|
/// and "short type description" of that node. This is used when generating
|
|
|
|
/// URLs when a type is being linked to. External paths are not located in
|
|
|
|
/// this map because the `External` type itself has all the information
|
|
|
|
/// necessary.
|
2013-09-18 22:18:38 -07:00
|
|
|
paths: HashMap<ast::NodeId, (~[~str], &'static str)>,
|
2013-10-03 10:24:40 -07:00
|
|
|
|
|
|
|
/// This map contains information about all known traits of this crate.
|
|
|
|
/// Implementations of a crate should inherit the documentation of the
|
2013-10-21 11:33:04 -07:00
|
|
|
/// parent trait if no extra documentation is specified, and default methods
|
|
|
|
/// should show up in documentation about trait implementations.
|
|
|
|
traits: HashMap<ast::NodeId, clean::Trait>,
|
2013-10-03 10:24:40 -07:00
|
|
|
|
|
|
|
/// When rendering traits, it's often useful to be able to list all
|
|
|
|
/// implementors of the trait, and this mapping is exactly, that: a mapping
|
|
|
|
/// of trait ids to the list of known implementors of the trait
|
2013-09-18 22:18:38 -07:00
|
|
|
implementors: HashMap<ast::NodeId, ~[Implementor]>,
|
2013-10-03 10:24:40 -07:00
|
|
|
|
|
|
|
/// Cache of where external crate documentation can be found.
|
2013-10-02 15:39:32 -07:00
|
|
|
extern_locations: HashMap<ast::CrateNum, ExternalLocation>,
|
2013-09-18 22:18:38 -07:00
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
// Private fields only used when initially crawling a crate to build a cache
|
|
|
|
|
2013-09-18 22:18:38 -07:00
|
|
|
priv stack: ~[~str],
|
|
|
|
priv parent_stack: ~[ast::NodeId],
|
|
|
|
priv search_index: ~[IndexItem],
|
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Helper struct to render all source code to HTML pages
|
2013-12-09 23:16:18 -08:00
|
|
|
struct SourceCollector<'a> {
|
|
|
|
cx: &'a mut Context,
|
2013-10-03 10:24:40 -07:00
|
|
|
|
|
|
|
/// Processed source-file paths
|
2013-09-27 15:12:23 -07:00
|
|
|
seen: HashSet<~str>,
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Root destination to place all HTML output into
|
2013-09-27 15:12:23 -07:00
|
|
|
dst: Path,
|
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Wrapper struct to render the source code of a file. This will do things like
|
|
|
|
/// adding line numbers to the left-hand side.
|
2013-12-09 23:16:18 -08:00
|
|
|
struct Source<'a>(&'a str);
|
2013-10-03 10:24:40 -07:00
|
|
|
|
|
|
|
// Helper structs for rendering items/sidebars and carrying along contextual
|
|
|
|
// information
|
|
|
|
|
2013-12-09 23:16:18 -08:00
|
|
|
struct Item<'a> { cx: &'a Context, item: &'a clean::Item, }
|
|
|
|
struct Sidebar<'a> { cx: &'a Context, item: &'a clean::Item, }
|
2013-09-18 22:18:38 -07:00
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Struct representing one entry in the JS search index. These are all emitted
|
|
|
|
/// by hand to a large JS file at the end of cache-creation.
|
2013-09-18 22:18:38 -07:00
|
|
|
struct IndexItem {
|
|
|
|
ty: &'static str,
|
|
|
|
name: ~str,
|
|
|
|
path: ~str,
|
|
|
|
desc: ~str,
|
|
|
|
parent: Option<ast::NodeId>,
|
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
// TLS keys used to carry information around during rendering.
|
2013-09-27 15:12:23 -07:00
|
|
|
|
2013-12-05 18:19:06 -08:00
|
|
|
local_data_key!(pub cache_key: Arc<Cache>)
|
2013-09-18 22:18:38 -07:00
|
|
|
local_data_key!(pub current_location_key: ~[~str])
|
|
|
|
|
|
|
|
/// Generates the documentation for `crate` into the directory `dst`
|
|
|
|
pub fn run(mut crate: clean::Crate, dst: Path) {
|
|
|
|
let mut cx = Context {
|
|
|
|
dst: dst,
|
|
|
|
current: ~[],
|
|
|
|
root_path: ~"",
|
|
|
|
sidebar: HashMap::new(),
|
|
|
|
layout: layout::Layout {
|
|
|
|
logo: ~"",
|
|
|
|
favicon: ~"",
|
|
|
|
crate: crate.name.clone(),
|
|
|
|
},
|
2013-09-27 15:12:23 -07:00
|
|
|
include_sources: true,
|
2013-09-18 22:18:38 -07:00
|
|
|
};
|
|
|
|
mkdir(&cx.dst);
|
|
|
|
|
2013-10-12 14:58:37 -07:00
|
|
|
match crate.module.as_ref().map(|m| m.doc_list().unwrap_or(&[])) {
|
2013-09-18 22:18:38 -07:00
|
|
|
Some(attrs) => {
|
|
|
|
for attr in attrs.iter() {
|
|
|
|
match *attr {
|
|
|
|
clean::NameValue(~"html_favicon_url", ref s) => {
|
|
|
|
cx.layout.favicon = s.to_owned();
|
|
|
|
}
|
|
|
|
clean::NameValue(~"html_logo_url", ref s) => {
|
|
|
|
cx.layout.logo = s.to_owned();
|
|
|
|
}
|
2013-09-27 15:12:23 -07:00
|
|
|
clean::Word(~"html_no_source") => {
|
|
|
|
cx.include_sources = false;
|
|
|
|
}
|
2013-09-18 22:18:38 -07:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Crawl the crate to build various caches used for the output
|
|
|
|
let mut cache = Cache {
|
|
|
|
impls: HashMap::new(),
|
|
|
|
typarams: HashMap::new(),
|
|
|
|
paths: HashMap::new(),
|
|
|
|
traits: HashMap::new(),
|
|
|
|
implementors: HashMap::new(),
|
|
|
|
stack: ~[],
|
|
|
|
parent_stack: ~[],
|
|
|
|
search_index: ~[],
|
2013-10-02 15:39:32 -07:00
|
|
|
extern_locations: HashMap::new(),
|
2013-09-18 22:18:38 -07:00
|
|
|
};
|
|
|
|
cache.stack.push(crate.name.clone());
|
|
|
|
crate = cache.fold_crate(crate);
|
|
|
|
|
|
|
|
// Add all the static files
|
2013-10-05 19:49:32 -07:00
|
|
|
let mut dst = cx.dst.join(crate.name.as_slice());
|
2013-09-22 20:09:42 -07:00
|
|
|
mkdir(&dst);
|
2013-10-05 19:49:32 -07:00
|
|
|
write(dst.join("jquery.js"), include_str!("static/jquery-2.0.3.min.js"));
|
|
|
|
write(dst.join("main.js"), include_str!("static/main.js"));
|
|
|
|
write(dst.join("main.css"), include_str!("static/main.css"));
|
|
|
|
write(dst.join("normalize.css"), include_str!("static/normalize.css"));
|
2013-09-18 22:18:38 -07:00
|
|
|
|
2013-10-02 15:39:32 -07:00
|
|
|
// Publish the search index
|
2013-09-18 22:18:38 -07:00
|
|
|
{
|
2013-10-05 19:49:32 -07:00
|
|
|
dst.push("search-index.js");
|
2013-10-29 23:31:07 -07:00
|
|
|
let mut w = BufferedWriter::new(File::create(&dst).unwrap());
|
2013-10-25 17:04:37 -07:00
|
|
|
let w = &mut w as &mut Writer;
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "var searchIndex = [");
|
|
|
|
for (i, item) in cache.search_index.iter().enumerate() {
|
|
|
|
if i > 0 { write!(w, ","); }
|
|
|
|
write!(w, "\\{ty:\"{}\",name:\"{}\",path:\"{}\",desc:{}",
|
|
|
|
item.ty, item.name, item.path,
|
|
|
|
item.desc.to_json().to_str())
|
|
|
|
match item.parent {
|
|
|
|
Some(id) => { write!(w, ",parent:'{}'", id); }
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
write!(w, "\\}");
|
|
|
|
}
|
|
|
|
write!(w, "];");
|
|
|
|
write!(w, "var allPaths = \\{");
|
|
|
|
for (i, (&id, &(ref fqp, short))) in cache.paths.iter().enumerate() {
|
|
|
|
if i > 0 { write!(w, ","); }
|
|
|
|
write!(w, "'{}':\\{type:'{}',name:'{}'\\}", id, short, *fqp.last());
|
|
|
|
}
|
|
|
|
write!(w, "\\};");
|
|
|
|
w.flush();
|
|
|
|
}
|
|
|
|
|
2013-10-02 15:39:32 -07:00
|
|
|
// Render all source files (this may turn into a giant no-op)
|
2013-09-30 12:58:18 -07:00
|
|
|
{
|
2013-10-21 13:08:31 -07:00
|
|
|
info!("emitting source files");
|
2013-10-05 19:49:32 -07:00
|
|
|
let dst = cx.dst.join("src");
|
2013-09-27 15:12:23 -07:00
|
|
|
mkdir(&dst);
|
2013-10-05 19:49:32 -07:00
|
|
|
let dst = dst.join(crate.name.as_slice());
|
2013-09-27 15:12:23 -07:00
|
|
|
mkdir(&dst);
|
|
|
|
let mut folder = SourceCollector {
|
|
|
|
dst: dst,
|
|
|
|
seen: HashSet::new(),
|
2013-09-30 12:58:18 -07:00
|
|
|
cx: &mut cx,
|
2013-09-27 15:12:23 -07:00
|
|
|
};
|
|
|
|
crate = folder.fold_crate(crate);
|
|
|
|
}
|
|
|
|
|
2013-10-02 15:39:32 -07:00
|
|
|
for (&n, e) in crate.externs.iter() {
|
|
|
|
cache.extern_locations.insert(n, extern_location(e, &cx.dst));
|
|
|
|
}
|
|
|
|
|
|
|
|
// And finally render the whole crate's documentation
|
2013-09-18 22:18:38 -07:00
|
|
|
cx.crate(crate, cache);
|
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Writes the entire contents of a string to a destination, not attempting to
|
|
|
|
/// catch any errors.
|
2013-09-18 22:18:38 -07:00
|
|
|
fn write(dst: Path, contents: &str) {
|
2013-10-29 23:31:07 -07:00
|
|
|
File::create(&dst).write(contents.as_bytes());
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Makes a directory on the filesystem, failing the task if an error occurs and
|
|
|
|
/// skipping if the directory already exists.
|
2013-09-18 22:18:38 -07:00
|
|
|
fn mkdir(path: &Path) {
|
2013-11-21 15:42:55 -08:00
|
|
|
io::io_error::cond.trap(|err| {
|
2013-10-21 13:08:31 -07:00
|
|
|
error!("Couldn't create directory `{}`: {}",
|
2013-09-26 17:21:59 -07:00
|
|
|
path.display(), err.desc);
|
2013-10-21 13:08:31 -07:00
|
|
|
fail!()
|
2013-11-21 15:42:55 -08:00
|
|
|
}).inside(|| {
|
2013-09-18 22:18:38 -07:00
|
|
|
if !path.is_dir() {
|
2013-10-31 15:15:30 -07:00
|
|
|
fs::mkdir(path, io::UserRWX);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-11-21 15:42:55 -08:00
|
|
|
})
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Takes a path to a source file and cleans the path to it. This canonicalizes
|
2013-09-26 17:21:59 -07:00
|
|
|
/// things like ".." to components which preserve the "top down" hierarchy of a
|
|
|
|
/// static HTML tree.
|
|
|
|
// FIXME (#9639): The closure should deal with &[u8] instead of &str
|
2013-11-19 17:36:32 -08:00
|
|
|
fn clean_srcpath(src: &[u8], f: |&str|) {
|
2013-12-03 19:15:12 -08:00
|
|
|
let p = Path::new(src);
|
2013-09-26 17:21:59 -07:00
|
|
|
if p.as_vec() != bytes!(".") {
|
2013-11-23 11:18:51 +01:00
|
|
|
for c in p.str_components().map(|x|x.unwrap()) {
|
2013-09-26 17:21:59 -07:00
|
|
|
if ".." == c {
|
|
|
|
f("up");
|
|
|
|
} else {
|
|
|
|
f(c.as_slice())
|
|
|
|
}
|
2013-09-27 15:12:23 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Attempts to find where an external crate is located, given that we're
|
|
|
|
/// rendering in to the specified source destination.
|
2013-10-02 15:39:32 -07:00
|
|
|
fn extern_location(e: &clean::ExternalCrate, dst: &Path) -> ExternalLocation {
|
|
|
|
// See if there's documentation generated into the local directory
|
2013-10-05 19:49:32 -07:00
|
|
|
let local_location = dst.join(e.name.as_slice());
|
2013-10-02 15:39:32 -07:00
|
|
|
if local_location.is_dir() {
|
|
|
|
return Local;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Failing that, see if there's an attribute specifying where to find this
|
|
|
|
// external crate
|
|
|
|
for attr in e.attrs.iter() {
|
|
|
|
match *attr {
|
|
|
|
clean::List(~"doc", ref list) => {
|
|
|
|
for attr in list.iter() {
|
|
|
|
match *attr {
|
|
|
|
clean::NameValue(~"html_root_url", ref s) => {
|
|
|
|
if s.ends_with("/") {
|
|
|
|
return Remote(s.to_owned());
|
|
|
|
}
|
|
|
|
return Remote(*s + "/");
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Well, at least we tried.
|
|
|
|
return Unknown;
|
|
|
|
}
|
|
|
|
|
2013-12-09 23:16:18 -08:00
|
|
|
impl<'a> DocFolder for SourceCollector<'a> {
|
2013-09-27 15:12:23 -07:00
|
|
|
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
2013-10-03 10:24:40 -07:00
|
|
|
// If we're including source files, and we haven't seen this file yet,
|
|
|
|
// then we need to render it out to the filesystem
|
2013-09-30 12:58:18 -07:00
|
|
|
if self.cx.include_sources && !self.seen.contains(&item.source.filename) {
|
2013-10-03 10:24:40 -07:00
|
|
|
|
2013-09-30 12:58:18 -07:00
|
|
|
// If it turns out that we couldn't read this file, then we probably
|
|
|
|
// can't read any of the files (generating html output from json or
|
|
|
|
// something like that), so just don't include sources for the
|
|
|
|
// entire crate. The other option is maintaining this mapping on a
|
|
|
|
// per-file basis, but that's probably not worth it...
|
|
|
|
self.cx.include_sources = self.emit_source(item.source.filename);
|
2013-09-27 15:12:23 -07:00
|
|
|
self.seen.insert(item.source.filename.clone());
|
2013-09-30 12:58:18 -07:00
|
|
|
|
|
|
|
if !self.cx.include_sources {
|
|
|
|
println!("warning: source code was requested to be rendered, \
|
|
|
|
but `{}` is a missing source file.",
|
|
|
|
item.source.filename);
|
|
|
|
println!(" skipping rendering of source code");
|
|
|
|
}
|
2013-09-27 15:12:23 -07:00
|
|
|
}
|
2013-10-03 10:24:40 -07:00
|
|
|
|
2013-09-27 15:12:23 -07:00
|
|
|
self.fold_item_recur(item)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-12-09 23:16:18 -08:00
|
|
|
impl<'a> SourceCollector<'a> {
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Renders the given filename into its corresponding HTML source file.
|
2013-09-30 12:58:18 -07:00
|
|
|
fn emit_source(&mut self, filename: &str) -> bool {
|
2013-12-03 19:15:12 -08:00
|
|
|
let p = Path::new(filename);
|
2013-09-27 15:12:23 -07:00
|
|
|
|
|
|
|
// Read the contents of the file
|
|
|
|
let mut contents = ~[];
|
|
|
|
{
|
|
|
|
let mut buf = [0, ..1024];
|
|
|
|
// If we couldn't open this file, then just returns because it
|
|
|
|
// probably means that it's some standard library macro thing and we
|
|
|
|
// can't have the source to it anyway.
|
2013-10-29 23:31:07 -07:00
|
|
|
let mut r = match io::result(|| File::open(&p)) {
|
2013-10-25 17:04:37 -07:00
|
|
|
Ok(r) => r,
|
2013-09-30 12:58:18 -07:00
|
|
|
// eew macro hacks
|
2013-11-28 12:22:53 -08:00
|
|
|
Err(..) => return filename == "<std-macros>"
|
2013-09-30 12:58:18 -07:00
|
|
|
};
|
2013-09-27 15:12:23 -07:00
|
|
|
|
|
|
|
// read everything
|
|
|
|
loop {
|
|
|
|
match r.read(buf) {
|
|
|
|
Some(n) => contents.push_all(buf.slice_to(n)),
|
|
|
|
None => break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let contents = str::from_utf8_owned(contents);
|
|
|
|
|
|
|
|
// Create the intermediate directories
|
|
|
|
let mut cur = self.dst.clone();
|
|
|
|
let mut root_path = ~"../../";
|
2013-11-21 15:42:55 -08:00
|
|
|
clean_srcpath(p.dirname(), |component| {
|
2013-10-05 19:49:32 -07:00
|
|
|
cur.push(component);
|
2013-09-27 15:12:23 -07:00
|
|
|
mkdir(&cur);
|
|
|
|
root_path.push_str("../");
|
2013-11-21 15:42:55 -08:00
|
|
|
});
|
2013-09-27 15:12:23 -07:00
|
|
|
|
2013-09-26 17:21:59 -07:00
|
|
|
cur.push(p.filename().expect("source has no filename") + bytes!(".html"));
|
2013-10-29 23:31:07 -07:00
|
|
|
let mut w = BufferedWriter::new(File::create(&cur).unwrap());
|
2013-09-27 15:12:23 -07:00
|
|
|
|
2013-10-06 18:51:49 -07:00
|
|
|
let title = cur.filename_display().with_str(|s| format!("{} -- source", s));
|
2013-09-27 15:12:23 -07:00
|
|
|
let page = layout::Page {
|
|
|
|
title: title,
|
|
|
|
ty: "source",
|
|
|
|
root_path: root_path,
|
|
|
|
};
|
2013-10-25 17:04:37 -07:00
|
|
|
layout::render(&mut w as &mut Writer, &self.cx.layout,
|
2013-09-27 15:12:23 -07:00
|
|
|
&page, &(""), &Source(contents.as_slice()));
|
2013-09-30 16:08:12 -07:00
|
|
|
w.flush();
|
2013-09-30 12:58:18 -07:00
|
|
|
return true;
|
2013-09-27 15:12:23 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl DocFolder for Cache {
|
2013-09-18 22:18:38 -07:00
|
|
|
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
|
|
|
// Register any generics to their corresponding string. This is used
|
|
|
|
// when pretty-printing types
|
|
|
|
match item.inner {
|
2013-11-21 13:17:46 -08:00
|
|
|
clean::StructItem(ref s) => self.generics(&s.generics),
|
|
|
|
clean::EnumItem(ref e) => self.generics(&e.generics),
|
|
|
|
clean::FunctionItem(ref f) => self.generics(&f.generics),
|
|
|
|
clean::TypedefItem(ref t) => self.generics(&t.generics),
|
|
|
|
clean::TraitItem(ref t) => self.generics(&t.generics),
|
|
|
|
clean::ImplItem(ref i) => self.generics(&i.generics),
|
|
|
|
clean::TyMethodItem(ref i) => self.generics(&i.generics),
|
|
|
|
clean::MethodItem(ref i) => self.generics(&i.generics),
|
|
|
|
clean::ForeignFunctionItem(ref f) => self.generics(&f.generics),
|
2013-09-18 22:18:38 -07:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Propagate a trait methods' documentation to all implementors of the
|
|
|
|
// trait
|
|
|
|
match item.inner {
|
|
|
|
clean::TraitItem(ref t) => {
|
2013-10-21 11:33:04 -07:00
|
|
|
self.traits.insert(item.id, t.clone());
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Collect all the implementors of traits.
|
|
|
|
match item.inner {
|
|
|
|
clean::ImplItem(ref i) => {
|
|
|
|
match i.trait_ {
|
2013-11-28 12:22:53 -08:00
|
|
|
Some(clean::ResolvedPath{ id, .. }) => {
|
2013-11-21 15:42:55 -08:00
|
|
|
let v = self.implementors.find_or_insert_with(id, |_|{
|
2013-09-18 22:18:38 -07:00
|
|
|
~[]
|
2013-11-21 15:42:55 -08:00
|
|
|
});
|
2013-09-18 22:18:38 -07:00
|
|
|
match i.for_ {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ResolvedPath{..} => {
|
2013-09-18 22:18:38 -07:00
|
|
|
v.unshift(PathType(i.for_.clone()));
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
v.push(OtherType(i.generics.clone(),
|
|
|
|
i.trait_.get_ref().clone(),
|
|
|
|
i.for_.clone()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-11-28 12:22:53 -08:00
|
|
|
Some(..) | None => {}
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Index this method for searching later on
|
|
|
|
match item.name {
|
|
|
|
Some(ref s) => {
|
|
|
|
let parent = match item.inner {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::TyMethodItem(..) |
|
|
|
|
clean::StructFieldItem(..) |
|
|
|
|
clean::VariantItem(..) => {
|
2013-09-18 22:18:38 -07:00
|
|
|
Some((Some(*self.parent_stack.last()),
|
|
|
|
self.stack.slice_to(self.stack.len() - 1)))
|
|
|
|
|
|
|
|
}
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::MethodItem(..) => {
|
2013-09-18 22:18:38 -07:00
|
|
|
if self.parent_stack.len() == 0 {
|
|
|
|
None
|
|
|
|
} else {
|
2013-09-27 10:45:09 -07:00
|
|
|
let last = self.parent_stack.last();
|
|
|
|
let amt = match self.paths.find(last) {
|
|
|
|
Some(&(_, "trait")) => self.stack.len() - 1,
|
2013-11-28 12:22:53 -08:00
|
|
|
Some(..) | None => self.stack.len(),
|
2013-09-27 10:45:09 -07:00
|
|
|
};
|
|
|
|
Some((Some(*last), self.stack.slice_to(amt)))
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => Some((None, self.stack.as_slice()))
|
|
|
|
};
|
|
|
|
match parent {
|
|
|
|
Some((parent, path)) => {
|
|
|
|
self.search_index.push(IndexItem {
|
|
|
|
ty: shortty(&item),
|
|
|
|
name: s.to_owned(),
|
|
|
|
path: path.connect("::"),
|
|
|
|
desc: shorter(item.doc_value()).to_owned(),
|
|
|
|
parent: parent,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Keep track of the fully qualified path for this item.
|
|
|
|
let pushed = if item.name.is_some() {
|
|
|
|
let n = item.name.get_ref();
|
|
|
|
if n.len() > 0 {
|
|
|
|
self.stack.push(n.to_owned());
|
|
|
|
true
|
|
|
|
} else { false }
|
|
|
|
} else { false };
|
|
|
|
match item.inner {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::StructItem(..) | clean::EnumItem(..) |
|
|
|
|
clean::TypedefItem(..) | clean::TraitItem(..) |
|
|
|
|
clean::FunctionItem(..) | clean::ModuleItem(..) |
|
|
|
|
clean::ForeignFunctionItem(..) | clean::VariantItem(..) => {
|
2013-09-18 22:18:38 -07:00
|
|
|
self.paths.insert(item.id, (self.stack.clone(), shortty(&item)));
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Maintain the parent stack
|
|
|
|
let parent_pushed = match item.inner {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::TraitItem(..) | clean::EnumItem(..) | clean::StructItem(..) => {
|
2013-09-18 22:18:38 -07:00
|
|
|
self.parent_stack.push(item.id); true
|
|
|
|
}
|
|
|
|
clean::ImplItem(ref i) => {
|
|
|
|
match i.for_ {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ResolvedPath{ id, .. } => {
|
2013-10-02 15:39:32 -07:00
|
|
|
self.parent_stack.push(id); true
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
_ => false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => false
|
|
|
|
};
|
|
|
|
|
|
|
|
// Once we've recursively found all the generics, then hoard off all the
|
|
|
|
// implementations elsewhere
|
|
|
|
let ret = match self.fold_item_recur(item) {
|
|
|
|
Some(item) => {
|
2013-09-30 17:04:14 -07:00
|
|
|
match item {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::Item{ attrs, inner: clean::ImplItem(i), .. } => {
|
2013-09-18 22:18:38 -07:00
|
|
|
match i.for_ {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ResolvedPath { id, .. } => {
|
2013-11-21 15:42:55 -08:00
|
|
|
let v = self.impls.find_or_insert_with(id, |_| {
|
2013-09-18 22:18:38 -07:00
|
|
|
~[]
|
2013-11-21 15:42:55 -08:00
|
|
|
});
|
2013-09-30 17:04:14 -07:00
|
|
|
// extract relevant documentation for this impl
|
|
|
|
match attrs.move_iter().find(|a| {
|
|
|
|
match *a {
|
|
|
|
clean::NameValue(~"doc", _) => true,
|
|
|
|
_ => false
|
|
|
|
}
|
|
|
|
}) {
|
|
|
|
Some(clean::NameValue(_, dox)) => {
|
|
|
|
v.push((i, Some(dox)));
|
|
|
|
}
|
2013-11-28 12:22:53 -08:00
|
|
|
Some(..) | None => {
|
2013-09-30 17:04:14 -07:00
|
|
|
v.push((i, None));
|
|
|
|
}
|
|
|
|
}
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
2013-11-01 23:32:58 -07:00
|
|
|
// Private modules may survive the strip-private pass if
|
|
|
|
// they contain impls for public types, but those will get
|
|
|
|
// stripped here
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::Item { inner: clean::ModuleItem(ref m), .. }
|
2013-11-01 23:32:58 -07:00
|
|
|
if m.items.len() == 0 => None,
|
2013-09-30 17:04:14 -07:00
|
|
|
i => Some(i),
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
i => i,
|
|
|
|
};
|
|
|
|
|
|
|
|
if pushed { self.stack.pop(); }
|
|
|
|
if parent_pushed { self.parent_stack.pop(); }
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-12-09 23:16:18 -08:00
|
|
|
impl<'a> Cache {
|
2013-09-18 22:18:38 -07:00
|
|
|
fn generics(&mut self, generics: &clean::Generics) {
|
|
|
|
for typ in generics.type_params.iter() {
|
|
|
|
self.typarams.insert(typ.id, typ.name.clone());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Context {
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Recurse in the directory structure and change the "root path" to make
|
|
|
|
/// sure it always points to the top (relatively)
|
2013-11-19 17:36:32 -08:00
|
|
|
fn recurse<T>(&mut self, s: ~str, f: |&mut Context| -> T) -> T {
|
2013-09-18 22:18:38 -07:00
|
|
|
if s.len() == 0 {
|
2013-10-21 13:08:31 -07:00
|
|
|
fail!("what {:?}", self);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-09-26 17:21:59 -07:00
|
|
|
let prev = self.dst.clone();
|
2013-10-05 19:49:32 -07:00
|
|
|
self.dst.push(s.as_slice());
|
2013-09-18 22:18:38 -07:00
|
|
|
self.root_path.push_str("../");
|
|
|
|
self.current.push(s);
|
|
|
|
|
2013-12-17 11:19:14 -05:00
|
|
|
info!("Recursing into {}", self.dst.display());
|
|
|
|
|
2013-09-18 22:18:38 -07:00
|
|
|
mkdir(&self.dst);
|
|
|
|
let ret = f(self);
|
|
|
|
|
2013-12-17 11:19:14 -05:00
|
|
|
info!("Recursed; leaving {}", self.dst.display());
|
|
|
|
|
2013-09-18 22:18:38 -07:00
|
|
|
// Go back to where we were at
|
|
|
|
self.dst = prev;
|
|
|
|
let len = self.root_path.len();
|
|
|
|
self.root_path.truncate(len - 3);
|
|
|
|
self.current.pop();
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2013-12-05 18:19:06 -08:00
|
|
|
/// Main method for rendering a crate.
|
|
|
|
///
|
|
|
|
/// This currently isn't parallelized, but it'd be pretty easy to add
|
|
|
|
/// parallelization to this function.
|
|
|
|
fn crate(mut self, mut crate: clean::Crate, cache: Cache) {
|
2013-09-18 22:18:38 -07:00
|
|
|
let mut item = match crate.module.take() {
|
|
|
|
Some(i) => i,
|
|
|
|
None => return
|
|
|
|
};
|
|
|
|
item.name = Some(crate.name);
|
|
|
|
|
2013-12-05 18:19:06 -08:00
|
|
|
// using a rwarc makes this parallelizable in the future
|
|
|
|
local_data::set(cache_key, Arc::new(cache));
|
2013-10-03 10:24:40 -07:00
|
|
|
|
2013-12-18 12:26:19 -05:00
|
|
|
let mut work = ~[(self, item)];
|
|
|
|
while work.len() > 0 {
|
|
|
|
let (mut cx, item) = work.pop();
|
|
|
|
cx.item(item, |cx, item| {
|
|
|
|
work.push((cx.clone(), item));
|
|
|
|
})
|
|
|
|
}
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
|
2013-10-03 10:24:40 -07:00
|
|
|
/// Non-parellelized version of rendering an item. This will take the input
|
|
|
|
/// item, render its contents, and then invoke the specified closure with
|
|
|
|
/// all sub-items which need to be rendered.
|
|
|
|
///
|
|
|
|
/// The rendering driver uses this closure to queue up more work.
|
2013-12-18 12:26:19 -05:00
|
|
|
fn item(&mut self, item: clean::Item, f: |&mut Context, clean::Item|) {
|
2013-10-29 23:31:07 -07:00
|
|
|
fn render(w: io::File, cx: &mut Context, it: &clean::Item,
|
2013-09-18 22:18:38 -07:00
|
|
|
pushname: bool) {
|
2013-12-17 11:19:14 -05:00
|
|
|
info!("Rendering an item to {}", w.path().display());
|
2013-09-18 22:18:38 -07:00
|
|
|
// A little unfortunate that this is done like this, but it sure
|
|
|
|
// does make formatting *a lot* nicer.
|
|
|
|
local_data::set(current_location_key, cx.current.clone());
|
|
|
|
|
|
|
|
let mut title = cx.current.connect("::");
|
|
|
|
if pushname {
|
|
|
|
if title.len() > 0 { title.push_str("::"); }
|
|
|
|
title.push_str(*it.name.get_ref());
|
|
|
|
}
|
|
|
|
title.push_str(" - Rust");
|
|
|
|
let page = layout::Page {
|
|
|
|
ty: shortty(it),
|
|
|
|
root_path: cx.root_path,
|
|
|
|
title: title,
|
|
|
|
};
|
|
|
|
|
|
|
|
// We have a huge number of calls to write, so try to alleviate some
|
|
|
|
// of the pain by using a buffered writer instead of invoking the
|
|
|
|
// write sycall all the time.
|
|
|
|
let mut writer = BufferedWriter::new(w);
|
2013-10-25 17:04:37 -07:00
|
|
|
layout::render(&mut writer as &mut Writer, &cx.layout, &page,
|
2013-09-18 22:18:38 -07:00
|
|
|
&Sidebar{ cx: cx, item: it },
|
|
|
|
&Item{ cx: cx, item: it });
|
|
|
|
writer.flush();
|
|
|
|
}
|
|
|
|
|
|
|
|
match item.inner {
|
2013-09-24 13:56:52 -07:00
|
|
|
// modules are special because they add a namespace. We also need to
|
|
|
|
// recurse into the items of the module as well.
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ModuleItem(..) => {
|
2013-09-18 22:18:38 -07:00
|
|
|
let name = item.name.get_ref().to_owned();
|
2013-12-04 21:28:47 -08:00
|
|
|
let mut item = Some(item);
|
2013-11-21 15:42:55 -08:00
|
|
|
self.recurse(name, |this| {
|
2013-12-04 21:28:47 -08:00
|
|
|
let item = item.take_unwrap();
|
2013-10-05 19:49:32 -07:00
|
|
|
let dst = this.dst.join("index.html");
|
2013-10-29 23:31:07 -07:00
|
|
|
render(File::create(&dst).unwrap(), this, &item, false);
|
2013-09-18 22:18:38 -07:00
|
|
|
|
|
|
|
let m = match item.inner {
|
|
|
|
clean::ModuleItem(m) => m,
|
|
|
|
_ => unreachable!()
|
|
|
|
};
|
|
|
|
this.sidebar = build_sidebar(&m);
|
|
|
|
for item in m.items.move_iter() {
|
2013-12-18 12:26:19 -05:00
|
|
|
f(this,item);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-11-21 15:42:55 -08:00
|
|
|
})
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-09-24 13:56:52 -07:00
|
|
|
|
|
|
|
// Things which don't have names (like impls) don't get special
|
|
|
|
// pages dedicated to them.
|
2013-09-18 22:18:38 -07:00
|
|
|
_ if item.name.is_some() => {
|
2013-10-05 19:49:32 -07:00
|
|
|
let dst = self.dst.join(item_path(&item));
|
2013-10-29 23:31:07 -07:00
|
|
|
render(File::create(&dst).unwrap(), self, &item, true);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-09-24 13:56:52 -07:00
|
|
|
|
2013-09-18 22:18:38 -07:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn shortty(item: &clean::Item) -> &'static str {
|
|
|
|
match item.inner {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ModuleItem(..) => "mod",
|
|
|
|
clean::StructItem(..) => "struct",
|
|
|
|
clean::EnumItem(..) => "enum",
|
|
|
|
clean::FunctionItem(..) => "fn",
|
|
|
|
clean::TypedefItem(..) => "typedef",
|
|
|
|
clean::StaticItem(..) => "static",
|
|
|
|
clean::TraitItem(..) => "trait",
|
|
|
|
clean::ImplItem(..) => "impl",
|
|
|
|
clean::ViewItemItem(..) => "viewitem",
|
|
|
|
clean::TyMethodItem(..) => "tymethod",
|
|
|
|
clean::MethodItem(..) => "method",
|
|
|
|
clean::StructFieldItem(..) => "structfield",
|
|
|
|
clean::VariantItem(..) => "variant",
|
|
|
|
clean::ForeignFunctionItem(..) => "ffi",
|
|
|
|
clean::ForeignStaticItem(..) => "ffs",
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-12-09 23:16:18 -08:00
|
|
|
impl<'a> Item<'a> {
|
2013-09-18 22:18:38 -07:00
|
|
|
fn ismodule(&self) -> bool {
|
|
|
|
match self.item.inner {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ModuleItem(..) => true, _ => false
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-12-09 23:16:18 -08:00
|
|
|
impl<'a> fmt::Default for Item<'a> {
|
|
|
|
fn fmt(it: &Item<'a>, fmt: &mut fmt::Formatter) {
|
2013-09-26 12:53:06 -07:00
|
|
|
match attr::find_stability(it.item.attrs.iter()) {
|
|
|
|
Some(stability) => {
|
|
|
|
write!(fmt.buf,
|
|
|
|
"<a class='stability {lvl}' title='{reason}'>{lvl}</a>",
|
|
|
|
lvl = stability.level.to_str(),
|
|
|
|
reason = match stability.text {
|
|
|
|
Some(s) => s, None => @"",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
|
2013-09-27 15:12:23 -07:00
|
|
|
if it.cx.include_sources {
|
|
|
|
let mut path = ~[];
|
2013-11-21 15:42:55 -08:00
|
|
|
clean_srcpath(it.item.source.filename.as_bytes(), |component| {
|
2013-09-27 15:12:23 -07:00
|
|
|
path.push(component.to_owned());
|
2013-11-21 15:42:55 -08:00
|
|
|
});
|
2013-10-02 19:35:30 +02:00
|
|
|
let href = if it.item.source.loline == it.item.source.hiline {
|
|
|
|
format!("{}", it.item.source.loline)
|
|
|
|
} else {
|
|
|
|
format!("{}-{}", it.item.source.loline, it.item.source.hiline)
|
|
|
|
};
|
2013-09-27 15:12:23 -07:00
|
|
|
write!(fmt.buf,
|
|
|
|
"<a class='source'
|
2013-10-02 19:35:30 +02:00
|
|
|
href='{root}src/{crate}/{path}.html\\#{href}'>[src]</a>",
|
2013-09-27 15:12:23 -07:00
|
|
|
root = it.cx.root_path,
|
|
|
|
crate = it.cx.layout.crate,
|
|
|
|
path = path.connect("/"),
|
2013-10-02 19:35:30 +02:00
|
|
|
href = href);
|
2013-09-27 15:12:23 -07:00
|
|
|
}
|
|
|
|
|
2013-09-18 22:18:38 -07:00
|
|
|
// Write the breadcrumb trail header for the top
|
|
|
|
write!(fmt.buf, "<h1 class='fqn'>");
|
|
|
|
match it.item.inner {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ModuleItem(..) => write!(fmt.buf, "Module "),
|
|
|
|
clean::FunctionItem(..) => write!(fmt.buf, "Function "),
|
|
|
|
clean::TraitItem(..) => write!(fmt.buf, "Trait "),
|
|
|
|
clean::StructItem(..) => write!(fmt.buf, "Struct "),
|
|
|
|
clean::EnumItem(..) => write!(fmt.buf, "Enum "),
|
2013-09-18 22:18:38 -07:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
let cur = it.cx.current.as_slice();
|
|
|
|
let amt = if it.ismodule() { cur.len() - 1 } else { cur.len() };
|
|
|
|
for (i, component) in cur.iter().enumerate().take(amt) {
|
|
|
|
let mut trail = ~"";
|
|
|
|
for _ in range(0, cur.len() - i - 1) {
|
|
|
|
trail.push_str("../");
|
|
|
|
}
|
|
|
|
write!(fmt.buf, "<a href='{}index.html'>{}</a>::",
|
|
|
|
trail, component.as_slice());
|
|
|
|
}
|
|
|
|
write!(fmt.buf, "<a class='{}' href=''>{}</a></h1>",
|
|
|
|
shortty(it.item), it.item.name.get_ref().as_slice());
|
|
|
|
|
|
|
|
match it.item.inner {
|
|
|
|
clean::ModuleItem(ref m) => item_module(fmt.buf, it.cx,
|
|
|
|
it.item, m.items),
|
2013-09-26 11:57:25 -07:00
|
|
|
clean::FunctionItem(ref f) | clean::ForeignFunctionItem(ref f) =>
|
|
|
|
item_function(fmt.buf, it.item, f),
|
2013-09-18 22:18:38 -07:00
|
|
|
clean::TraitItem(ref t) => item_trait(fmt.buf, it.item, t),
|
|
|
|
clean::StructItem(ref s) => item_struct(fmt.buf, it.item, s),
|
|
|
|
clean::EnumItem(ref e) => item_enum(fmt.buf, it.item, e),
|
|
|
|
clean::TypedefItem(ref t) => item_typedef(fmt.buf, it.item, t),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn item_path(item: &clean::Item) -> ~str {
|
|
|
|
match item.inner {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ModuleItem(..) => *item.name.get_ref() + "/index.html",
|
2013-09-18 22:18:38 -07:00
|
|
|
_ => shortty(item) + "." + *item.name.get_ref() + ".html"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn full_path(cx: &Context, item: &clean::Item) -> ~str {
|
|
|
|
let mut s = cx.current.connect("::");
|
|
|
|
s.push_str("::");
|
|
|
|
s.push_str(item.name.get_ref().as_slice());
|
|
|
|
return s;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn blank<'a>(s: Option<&'a str>) -> &'a str {
|
|
|
|
match s {
|
|
|
|
Some(s) => s,
|
|
|
|
None => ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn shorter<'a>(s: Option<&'a str>) -> &'a str {
|
|
|
|
match s {
|
|
|
|
Some(s) => match s.find_str("\n\n") {
|
|
|
|
Some(pos) => s.slice_to(pos),
|
|
|
|
None => s,
|
|
|
|
},
|
|
|
|
None => ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn document(w: &mut Writer, item: &clean::Item) {
|
2013-09-18 22:18:38 -07:00
|
|
|
match item.doc_value() {
|
|
|
|
Some(s) => {
|
|
|
|
write!(w, "<div class='docblock'>{}</div>", Markdown(s));
|
|
|
|
}
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn item_module(w: &mut Writer, cx: &Context,
|
2013-09-18 22:18:38 -07:00
|
|
|
item: &clean::Item, items: &[clean::Item]) {
|
|
|
|
document(w, item);
|
2013-10-21 13:08:31 -07:00
|
|
|
debug!("{:?}", items);
|
2013-09-18 22:18:38 -07:00
|
|
|
let mut indices = vec::from_fn(items.len(), |i| i);
|
|
|
|
|
2013-12-19 16:53:02 +11:00
|
|
|
fn le(i1: &clean::Item, i2: &clean::Item, idx1: uint, idx2: uint) -> bool {
|
2013-09-18 22:18:38 -07:00
|
|
|
if shortty(i1) == shortty(i2) {
|
2013-12-19 16:53:02 +11:00
|
|
|
return i1.name <= i2.name;
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
match (&i1.inner, &i2.inner) {
|
2013-09-24 13:55:22 -07:00
|
|
|
(&clean::ViewItemItem(ref a), &clean::ViewItemItem(ref b)) => {
|
|
|
|
match (&a.inner, &b.inner) {
|
2013-11-28 12:22:53 -08:00
|
|
|
(&clean::ExternMod(..), _) => true,
|
|
|
|
(_, &clean::ExternMod(..)) => false,
|
2013-12-19 16:53:02 +11:00
|
|
|
_ => idx1 <= idx2,
|
2013-09-24 13:55:22 -07:00
|
|
|
}
|
|
|
|
}
|
2013-11-28 12:22:53 -08:00
|
|
|
(&clean::ViewItemItem(..), _) => true,
|
|
|
|
(_, &clean::ViewItemItem(..)) => false,
|
|
|
|
(&clean::ModuleItem(..), _) => true,
|
|
|
|
(_, &clean::ModuleItem(..)) => false,
|
|
|
|
(&clean::StructItem(..), _) => true,
|
|
|
|
(_, &clean::StructItem(..)) => false,
|
|
|
|
(&clean::EnumItem(..), _) => true,
|
|
|
|
(_, &clean::EnumItem(..)) => false,
|
|
|
|
(&clean::StaticItem(..), _) => true,
|
|
|
|
(_, &clean::StaticItem(..)) => false,
|
|
|
|
(&clean::ForeignFunctionItem(..), _) => true,
|
|
|
|
(_, &clean::ForeignFunctionItem(..)) => false,
|
|
|
|
(&clean::ForeignStaticItem(..), _) => true,
|
|
|
|
(_, &clean::ForeignStaticItem(..)) => false,
|
|
|
|
(&clean::TraitItem(..), _) => true,
|
|
|
|
(_, &clean::TraitItem(..)) => false,
|
|
|
|
(&clean::FunctionItem(..), _) => true,
|
|
|
|
(_, &clean::FunctionItem(..)) => false,
|
|
|
|
(&clean::TypedefItem(..), _) => true,
|
|
|
|
(_, &clean::TypedefItem(..)) => false,
|
2013-12-19 16:53:02 +11:00
|
|
|
_ => idx1 <= idx2,
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-21 13:08:31 -07:00
|
|
|
debug!("{:?}", indices);
|
2013-12-19 23:03:11 +11:00
|
|
|
indices.sort_by(|&i1, &i2| le(&items[i1], &items[i2], i1, i2));
|
2013-09-18 22:18:38 -07:00
|
|
|
|
2013-10-21 13:08:31 -07:00
|
|
|
debug!("{:?}", indices);
|
2013-09-18 22:18:38 -07:00
|
|
|
let mut curty = "";
|
|
|
|
for &idx in indices.iter() {
|
|
|
|
let myitem = &items[idx];
|
|
|
|
|
|
|
|
let myty = shortty(myitem);
|
|
|
|
if myty != curty {
|
|
|
|
if curty != "" {
|
|
|
|
write!(w, "</table>");
|
|
|
|
}
|
|
|
|
curty = myty;
|
|
|
|
write!(w, "<h2>{}</h2>\n<table>", match myitem.inner {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ModuleItem(..) => "Modules",
|
|
|
|
clean::StructItem(..) => "Structs",
|
|
|
|
clean::EnumItem(..) => "Enums",
|
|
|
|
clean::FunctionItem(..) => "Functions",
|
|
|
|
clean::TypedefItem(..) => "Type Definitions",
|
|
|
|
clean::StaticItem(..) => "Statics",
|
|
|
|
clean::TraitItem(..) => "Traits",
|
|
|
|
clean::ImplItem(..) => "Implementations",
|
|
|
|
clean::ViewItemItem(..) => "Reexports",
|
|
|
|
clean::TyMethodItem(..) => "Type Methods",
|
|
|
|
clean::MethodItem(..) => "Methods",
|
|
|
|
clean::StructFieldItem(..) => "Struct Fields",
|
|
|
|
clean::VariantItem(..) => "Variants",
|
|
|
|
clean::ForeignFunctionItem(..) => "Foreign Functions",
|
|
|
|
clean::ForeignStaticItem(..) => "Foreign Statics",
|
2013-09-18 22:18:38 -07:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
match myitem.inner {
|
2013-09-26 11:57:25 -07:00
|
|
|
clean::StaticItem(ref s) | clean::ForeignStaticItem(ref s) => {
|
2013-12-09 23:16:18 -08:00
|
|
|
struct Initializer<'a>(&'a str);
|
|
|
|
impl<'a> fmt::Default for Initializer<'a> {
|
|
|
|
fn fmt(s: &Initializer<'a>, f: &mut fmt::Formatter) {
|
2013-09-26 11:57:25 -07:00
|
|
|
if s.len() == 0 { return; }
|
|
|
|
write!(f.buf, "<code> = </code>");
|
2013-09-18 22:18:38 -07:00
|
|
|
let tag = if s.contains("\n") { "pre" } else { "code" };
|
|
|
|
write!(f.buf, "<{tag}>{}</{tag}>",
|
|
|
|
s.as_slice(), tag=tag);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
write!(w, "
|
|
|
|
<tr>
|
2013-09-26 11:57:25 -07:00
|
|
|
<td><code>{}static {}: {}</code>{}</td>
|
2013-09-18 22:18:38 -07:00
|
|
|
<td class='docblock'>{} </td>
|
|
|
|
</tr>
|
|
|
|
",
|
2013-09-24 13:56:52 -07:00
|
|
|
VisSpace(myitem.visibility),
|
2013-09-18 22:18:38 -07:00
|
|
|
*myitem.name.get_ref(),
|
|
|
|
s.type_,
|
|
|
|
Initializer(s.expr),
|
|
|
|
Markdown(blank(myitem.doc_value())));
|
|
|
|
}
|
|
|
|
|
2013-09-24 13:56:52 -07:00
|
|
|
clean::ViewItemItem(ref item) => {
|
|
|
|
match item.inner {
|
|
|
|
clean::ExternMod(ref name, ref src, _, _) => {
|
|
|
|
write!(w, "<tr><td><code>extern mod {}",
|
|
|
|
name.as_slice());
|
|
|
|
match *src {
|
|
|
|
Some(ref src) => write!(w, " = \"{}\"",
|
|
|
|
src.as_slice()),
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
write!(w, ";</code></td></tr>");
|
|
|
|
}
|
|
|
|
|
|
|
|
clean::Import(ref imports) => {
|
|
|
|
for import in imports.iter() {
|
|
|
|
write!(w, "<tr><td><code>{}{}</code></td></tr>",
|
|
|
|
VisSpace(myitem.visibility),
|
|
|
|
*import);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2013-09-18 22:18:38 -07:00
|
|
|
_ => {
|
2013-10-01 14:31:03 -07:00
|
|
|
if myitem.name.is_none() { continue }
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "
|
|
|
|
<tr>
|
|
|
|
<td><a class='{class}' href='{href}'
|
|
|
|
title='{title}'>{}</a></td>
|
|
|
|
<td class='docblock short'>{}</td>
|
|
|
|
</tr>
|
|
|
|
",
|
|
|
|
*myitem.name.get_ref(),
|
|
|
|
Markdown(shorter(myitem.doc_value())),
|
|
|
|
class = shortty(myitem),
|
|
|
|
href = item_path(myitem),
|
|
|
|
title = full_path(cx, myitem));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
write!(w, "</table>");
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn item_function(w: &mut Writer, it: &clean::Item, f: &clean::Function) {
|
2013-09-23 20:38:17 -07:00
|
|
|
write!(w, "<pre class='fn'>{vis}{purity}fn {name}{generics}{decl}</pre>",
|
2013-09-18 22:18:38 -07:00
|
|
|
vis = VisSpace(it.visibility),
|
2013-09-23 20:38:17 -07:00
|
|
|
purity = PuritySpace(f.purity),
|
2013-09-18 22:18:38 -07:00
|
|
|
name = it.name.get_ref().as_slice(),
|
|
|
|
generics = f.generics,
|
|
|
|
decl = f.decl);
|
|
|
|
document(w, it);
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn item_trait(w: &mut Writer, it: &clean::Item, t: &clean::Trait) {
|
2013-09-18 22:18:38 -07:00
|
|
|
let mut parents = ~"";
|
|
|
|
if t.parents.len() > 0 {
|
|
|
|
parents.push_str(": ");
|
|
|
|
for (i, p) in t.parents.iter().enumerate() {
|
|
|
|
if i > 0 { parents.push_str(" + "); }
|
|
|
|
parents.push_str(format!("{}", *p));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Output the trait definition
|
|
|
|
write!(w, "<pre class='trait'>{}trait {}{}{} ",
|
|
|
|
VisSpace(it.visibility),
|
|
|
|
it.name.get_ref().as_slice(),
|
|
|
|
t.generics,
|
|
|
|
parents);
|
|
|
|
let required = t.methods.iter().filter(|m| m.is_req()).to_owned_vec();
|
|
|
|
let provided = t.methods.iter().filter(|m| !m.is_req()).to_owned_vec();
|
|
|
|
|
|
|
|
if t.methods.len() == 0 {
|
|
|
|
write!(w, "\\{ \\}");
|
|
|
|
} else {
|
|
|
|
write!(w, "\\{\n");
|
|
|
|
for m in required.iter() {
|
|
|
|
write!(w, " ");
|
|
|
|
render_method(w, m.item(), true);
|
|
|
|
write!(w, ";\n");
|
|
|
|
}
|
|
|
|
if required.len() > 0 && provided.len() > 0 {
|
|
|
|
w.write("\n".as_bytes());
|
|
|
|
}
|
|
|
|
for m in provided.iter() {
|
|
|
|
write!(w, " ");
|
|
|
|
render_method(w, m.item(), true);
|
|
|
|
write!(w, " \\{ ... \\}\n");
|
|
|
|
}
|
|
|
|
write!(w, "\\}");
|
|
|
|
}
|
|
|
|
write!(w, "</pre>");
|
|
|
|
|
|
|
|
// Trait documentation
|
|
|
|
document(w, it);
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn meth(w: &mut Writer, m: &clean::TraitMethod) {
|
2013-09-27 10:40:41 -07:00
|
|
|
write!(w, "<h3 id='{}.{}' class='method'><code>",
|
|
|
|
shortty(m.item()),
|
2013-09-18 22:18:38 -07:00
|
|
|
*m.item().name.get_ref());
|
|
|
|
render_method(w, m.item(), false);
|
|
|
|
write!(w, "</code></h3>");
|
|
|
|
document(w, m.item());
|
|
|
|
}
|
|
|
|
|
|
|
|
// Output the documentation for each function individually
|
|
|
|
if required.len() > 0 {
|
|
|
|
write!(w, "
|
|
|
|
<h2 id='required-methods'>Required Methods</h2>
|
|
|
|
<div class='methods'>
|
|
|
|
");
|
|
|
|
for m in required.iter() {
|
|
|
|
meth(w, *m);
|
|
|
|
}
|
|
|
|
write!(w, "</div>");
|
|
|
|
}
|
|
|
|
if provided.len() > 0 {
|
|
|
|
write!(w, "
|
|
|
|
<h2 id='provided-methods'>Provided Methods</h2>
|
|
|
|
<div class='methods'>
|
|
|
|
");
|
|
|
|
for m in provided.iter() {
|
|
|
|
meth(w, *m);
|
|
|
|
}
|
|
|
|
write!(w, "</div>");
|
|
|
|
}
|
|
|
|
|
2013-11-21 15:42:55 -08:00
|
|
|
local_data::get(cache_key, |cache| {
|
2013-12-05 18:19:06 -08:00
|
|
|
let cache = cache.unwrap().get();
|
|
|
|
match cache.implementors.find(&it.id) {
|
|
|
|
Some(implementors) => {
|
|
|
|
write!(w, "
|
|
|
|
<h2 id='implementors'>Implementors</h2>
|
|
|
|
<ul class='item-list'>
|
|
|
|
");
|
|
|
|
for i in implementors.iter() {
|
|
|
|
match *i {
|
|
|
|
PathType(ref ty) => {
|
|
|
|
write!(w, "<li><code>{}</code></li>", *ty);
|
|
|
|
}
|
|
|
|
OtherType(ref generics, ref trait_, ref for_) => {
|
|
|
|
write!(w, "<li><code>impl{} {} for {}</code></li>",
|
|
|
|
*generics, *trait_, *for_);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
write!(w, "</ul>");
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
None => {}
|
|
|
|
}
|
2013-11-21 15:42:55 -08:00
|
|
|
})
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn render_method(w: &mut Writer, meth: &clean::Item, withlink: bool) {
|
|
|
|
fn fun(w: &mut Writer, it: &clean::Item, purity: ast::purity,
|
2013-09-18 22:18:38 -07:00
|
|
|
g: &clean::Generics, selfty: &clean::SelfTy, d: &clean::FnDecl,
|
|
|
|
withlink: bool) {
|
|
|
|
write!(w, "{}fn {withlink, select,
|
2013-09-27 10:40:41 -07:00
|
|
|
true{<a href='\\#{ty}.{name}'
|
|
|
|
class='fnname'>{name}</a>}
|
2013-09-24 14:07:13 -07:00
|
|
|
other{<span class='fnname'>{name}</span>}
|
2013-09-18 22:18:38 -07:00
|
|
|
}{generics}{decl}",
|
|
|
|
match purity {
|
|
|
|
ast::unsafe_fn => "unsafe ",
|
|
|
|
_ => "",
|
|
|
|
},
|
2013-09-27 10:40:41 -07:00
|
|
|
ty = shortty(it),
|
2013-09-18 22:18:38 -07:00
|
|
|
name = it.name.get_ref().as_slice(),
|
|
|
|
generics = *g,
|
|
|
|
decl = Method(selfty, d),
|
|
|
|
withlink = if withlink {"true"} else {"false"});
|
|
|
|
}
|
|
|
|
match meth.inner {
|
|
|
|
clean::TyMethodItem(ref m) => {
|
|
|
|
fun(w, meth, m.purity, &m.generics, &m.self_, &m.decl, withlink);
|
|
|
|
}
|
|
|
|
clean::MethodItem(ref m) => {
|
|
|
|
fun(w, meth, m.purity, &m.generics, &m.self_, &m.decl, withlink);
|
|
|
|
}
|
|
|
|
_ => unreachable!()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn item_struct(w: &mut Writer, it: &clean::Item, s: &clean::Struct) {
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "<pre class='struct'>");
|
2013-10-13 20:37:43 -07:00
|
|
|
render_struct(w, it, Some(&s.generics), s.struct_type, s.fields,
|
|
|
|
s.fields_stripped, "", true);
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "</pre>");
|
|
|
|
|
|
|
|
document(w, it);
|
2013-09-30 16:31:35 -07:00
|
|
|
match s.struct_type {
|
|
|
|
doctree::Plain => {
|
|
|
|
write!(w, "<h2 class='fields'>Fields</h2>\n<table>");
|
|
|
|
for field in s.fields.iter() {
|
|
|
|
write!(w, "<tr><td id='structfield.{name}'>\
|
|
|
|
<code>{name}</code></td><td>",
|
|
|
|
name = field.name.get_ref().as_slice());
|
|
|
|
document(w, field);
|
|
|
|
write!(w, "</td></tr>");
|
|
|
|
}
|
|
|
|
write!(w, "</table>");
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
2013-09-18 22:18:38 -07:00
|
|
|
render_methods(w, it);
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn item_enum(w: &mut Writer, it: &clean::Item, e: &clean::Enum) {
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "<pre class='enum'>{}enum {}{}",
|
|
|
|
VisSpace(it.visibility),
|
|
|
|
it.name.get_ref().as_slice(),
|
|
|
|
e.generics);
|
2013-12-16 21:30:02 -08:00
|
|
|
if e.variants.len() == 0 && !e.variants_stripped {
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, " \\{\\}");
|
|
|
|
} else {
|
|
|
|
write!(w, " \\{\n");
|
|
|
|
for v in e.variants.iter() {
|
2013-09-30 16:31:35 -07:00
|
|
|
write!(w, " ");
|
|
|
|
let name = v.name.get_ref().as_slice();
|
2013-09-18 22:18:38 -07:00
|
|
|
match v.inner {
|
|
|
|
clean::VariantItem(ref var) => {
|
|
|
|
match var.kind {
|
2013-09-30 16:31:35 -07:00
|
|
|
clean::CLikeVariant => write!(w, "{}", name),
|
2013-09-18 22:18:38 -07:00
|
|
|
clean::TupleVariant(ref tys) => {
|
2013-09-30 16:31:35 -07:00
|
|
|
write!(w, "{}(", name);
|
2013-09-18 22:18:38 -07:00
|
|
|
for (i, ty) in tys.iter().enumerate() {
|
|
|
|
if i > 0 { write!(w, ", ") }
|
|
|
|
write!(w, "{}", *ty);
|
|
|
|
}
|
2013-09-30 16:31:35 -07:00
|
|
|
write!(w, ")");
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
clean::StructVariant(ref s) => {
|
|
|
|
render_struct(w, v, None, s.struct_type, s.fields,
|
2013-10-13 20:37:43 -07:00
|
|
|
s.fields_stripped, " ", false);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => unreachable!()
|
|
|
|
}
|
2013-09-30 16:31:35 -07:00
|
|
|
write!(w, ",\n");
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-10-13 20:37:43 -07:00
|
|
|
|
|
|
|
if e.variants_stripped {
|
|
|
|
write!(w, " // some variants omitted\n");
|
|
|
|
}
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "\\}");
|
|
|
|
}
|
|
|
|
write!(w, "</pre>");
|
|
|
|
|
|
|
|
document(w, it);
|
2013-09-30 16:31:35 -07:00
|
|
|
if e.variants.len() > 0 {
|
|
|
|
write!(w, "<h2 class='variants'>Variants</h2>\n<table>");
|
|
|
|
for variant in e.variants.iter() {
|
|
|
|
write!(w, "<tr><td id='variant.{name}'><code>{name}</code></td><td>",
|
|
|
|
name = variant.name.get_ref().as_slice());
|
|
|
|
document(w, variant);
|
2013-10-18 22:00:08 -07:00
|
|
|
match variant.inner {
|
|
|
|
clean::VariantItem(ref var) => {
|
|
|
|
match var.kind {
|
|
|
|
clean::StructVariant(ref s) => {
|
|
|
|
write!(w, "<h3 class='fields'>Fields</h3>\n<table>");
|
|
|
|
for field in s.fields.iter() {
|
|
|
|
write!(w, "<tr><td id='variant.{v}.field.{f}'>\
|
|
|
|
<code>{f}</code></td><td>",
|
|
|
|
v = variant.name.get_ref().as_slice(),
|
|
|
|
f = field.name.get_ref().as_slice());
|
|
|
|
document(w, field);
|
|
|
|
write!(w, "</td></tr>");
|
|
|
|
}
|
|
|
|
write!(w, "</table>");
|
|
|
|
}
|
|
|
|
_ => ()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => ()
|
|
|
|
}
|
2013-09-30 16:31:35 -07:00
|
|
|
write!(w, "</td></tr>");
|
|
|
|
}
|
|
|
|
write!(w, "</table>");
|
|
|
|
|
|
|
|
}
|
2013-09-18 22:18:38 -07:00
|
|
|
render_methods(w, it);
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn render_struct(w: &mut Writer, it: &clean::Item,
|
2013-09-18 22:18:38 -07:00
|
|
|
g: Option<&clean::Generics>,
|
|
|
|
ty: doctree::StructType,
|
|
|
|
fields: &[clean::Item],
|
2013-10-13 20:37:43 -07:00
|
|
|
fields_stripped: bool,
|
2013-09-30 11:44:25 -07:00
|
|
|
tab: &str,
|
|
|
|
structhead: bool) {
|
|
|
|
write!(w, "{}{}{}",
|
2013-09-18 22:18:38 -07:00
|
|
|
VisSpace(it.visibility),
|
2013-09-30 11:44:25 -07:00
|
|
|
if structhead {"struct "} else {""},
|
2013-09-18 22:18:38 -07:00
|
|
|
it.name.get_ref().as_slice());
|
|
|
|
match g {
|
|
|
|
Some(g) => write!(w, "{}", *g),
|
|
|
|
None => {}
|
|
|
|
}
|
|
|
|
match ty {
|
|
|
|
doctree::Plain => {
|
2013-09-30 11:44:25 -07:00
|
|
|
write!(w, " \\{\n{}", tab);
|
2013-09-18 22:18:38 -07:00
|
|
|
for field in fields.iter() {
|
|
|
|
match field.inner {
|
|
|
|
clean::StructFieldItem(ref ty) => {
|
2013-09-30 16:31:35 -07:00
|
|
|
write!(w, " {}{}: {},\n{}",
|
2013-09-18 22:18:38 -07:00
|
|
|
VisSpace(field.visibility),
|
2013-09-30 16:31:35 -07:00
|
|
|
field.name.get_ref().as_slice(),
|
2013-09-18 22:18:38 -07:00
|
|
|
ty.type_,
|
2013-09-30 16:31:35 -07:00
|
|
|
tab);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
_ => unreachable!()
|
|
|
|
}
|
|
|
|
}
|
2013-10-13 20:37:43 -07:00
|
|
|
|
|
|
|
if fields_stripped {
|
|
|
|
write!(w, " // some fields omitted\n{}", tab);
|
|
|
|
}
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "\\}");
|
|
|
|
}
|
|
|
|
doctree::Tuple | doctree::Newtype => {
|
|
|
|
write!(w, "(");
|
|
|
|
for (i, field) in fields.iter().enumerate() {
|
|
|
|
if i > 0 { write!(w, ", ") }
|
|
|
|
match field.inner {
|
|
|
|
clean::StructFieldItem(ref field) => {
|
|
|
|
write!(w, "{}", field.type_);
|
|
|
|
}
|
|
|
|
_ => unreachable!()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
write!(w, ");");
|
|
|
|
}
|
|
|
|
doctree::Unit => { write!(w, ";"); }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn render_methods(w: &mut Writer, it: &clean::Item) {
|
2013-11-21 15:42:55 -08:00
|
|
|
local_data::get(cache_key, |cache| {
|
2013-12-05 18:19:06 -08:00
|
|
|
let c = cache.unwrap().get();
|
|
|
|
match c.impls.find(&it.id) {
|
|
|
|
Some(v) => {
|
|
|
|
let mut non_trait = v.iter().filter(|p| {
|
|
|
|
p.n0_ref().trait_.is_none()
|
|
|
|
});
|
|
|
|
let non_trait = non_trait.to_owned_vec();
|
|
|
|
let mut traits = v.iter().filter(|p| {
|
|
|
|
p.n0_ref().trait_.is_some()
|
|
|
|
});
|
|
|
|
let traits = traits.to_owned_vec();
|
|
|
|
|
|
|
|
if non_trait.len() > 0 {
|
|
|
|
write!(w, "<h2 id='methods'>Methods</h2>");
|
|
|
|
for &(ref i, ref dox) in non_trait.move_iter() {
|
|
|
|
render_impl(w, i, dox);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
}
|
|
|
|
if traits.len() > 0 {
|
|
|
|
write!(w, "<h2 id='implementations'>Trait \
|
|
|
|
Implementations</h2>");
|
|
|
|
for &(ref i, ref dox) in traits.move_iter() {
|
|
|
|
render_impl(w, i, dox);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
None => {}
|
|
|
|
}
|
2013-11-21 15:42:55 -08:00
|
|
|
})
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn render_impl(w: &mut Writer, i: &clean::Impl, dox: &Option<~str>) {
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "<h3 class='impl'><code>impl{} ", i.generics);
|
|
|
|
let trait_id = match i.trait_ {
|
|
|
|
Some(ref ty) => {
|
|
|
|
write!(w, "{} for ", *ty);
|
|
|
|
match *ty {
|
2013-11-28 12:22:53 -08:00
|
|
|
clean::ResolvedPath { id, .. } => Some(id),
|
2013-09-18 22:18:38 -07:00
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => None
|
|
|
|
};
|
|
|
|
write!(w, "{}</code></h3>", i.for_);
|
2013-09-30 17:04:14 -07:00
|
|
|
match *dox {
|
|
|
|
Some(ref dox) => {
|
|
|
|
write!(w, "<div class='docblock'>{}</div>",
|
|
|
|
Markdown(dox.as_slice()));
|
|
|
|
}
|
|
|
|
None => {}
|
|
|
|
}
|
2013-10-21 11:33:04 -07:00
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn docmeth(w: &mut Writer, item: &clean::Item) -> bool {
|
2013-09-27 10:40:41 -07:00
|
|
|
write!(w, "<h4 id='method.{}' class='method'><code>",
|
2013-10-21 11:33:04 -07:00
|
|
|
*item.name.get_ref());
|
|
|
|
render_method(w, item, false);
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "</code></h4>\n");
|
2013-10-21 11:33:04 -07:00
|
|
|
match item.doc_value() {
|
2013-09-18 22:18:38 -07:00
|
|
|
Some(s) => {
|
|
|
|
write!(w, "<div class='docblock'>{}</div>", Markdown(s));
|
2013-10-21 11:33:04 -07:00
|
|
|
true
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-10-21 11:33:04 -07:00
|
|
|
None => false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
write!(w, "<div class='methods'>");
|
|
|
|
for meth in i.methods.iter() {
|
|
|
|
if docmeth(w, meth) {
|
|
|
|
continue
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// No documentation? Attempt to slurp in the trait's documentation
|
2013-09-27 18:23:57 -07:00
|
|
|
let trait_id = match trait_id {
|
2013-10-01 14:31:03 -07:00
|
|
|
None => continue,
|
2013-10-02 15:39:32 -07:00
|
|
|
Some(id) => id,
|
2013-09-27 18:23:57 -07:00
|
|
|
};
|
2013-11-21 15:42:55 -08:00
|
|
|
local_data::get(cache_key, |cache| {
|
2013-12-05 18:19:06 -08:00
|
|
|
let cache = cache.unwrap().get();
|
|
|
|
match cache.traits.find(&trait_id) {
|
|
|
|
Some(t) => {
|
|
|
|
let name = meth.name.clone();
|
|
|
|
match t.methods.iter().find(|t| t.item().name == name) {
|
|
|
|
Some(method) => {
|
|
|
|
match method.item().doc_value() {
|
|
|
|
Some(s) => {
|
|
|
|
write!(w,
|
|
|
|
"<div class='docblock'>{}</div>",
|
|
|
|
Markdown(s));
|
2013-10-21 11:33:04 -07:00
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
None => {}
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
None => {}
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
None => {}
|
|
|
|
}
|
2013-11-21 15:42:55 -08:00
|
|
|
})
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
2013-10-21 11:33:04 -07:00
|
|
|
|
|
|
|
// If we've implemented a trait, then also emit documentation for all
|
|
|
|
// default methods which weren't overridden in the implementation block.
|
|
|
|
match trait_id {
|
|
|
|
None => {}
|
|
|
|
Some(id) => {
|
2013-11-21 15:42:55 -08:00
|
|
|
local_data::get(cache_key, |cache| {
|
2013-12-05 18:19:06 -08:00
|
|
|
let cache = cache.unwrap().get();
|
|
|
|
match cache.traits.find(&id) {
|
|
|
|
Some(t) => {
|
|
|
|
for method in t.methods.iter() {
|
|
|
|
let n = method.item().name.clone();
|
|
|
|
match i.methods.iter().find(|m| m.name == n) {
|
|
|
|
Some(..) => continue,
|
|
|
|
None => {}
|
2013-10-21 11:33:04 -07:00
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
|
|
|
|
docmeth(w, method.item());
|
2013-10-21 11:33:04 -07:00
|
|
|
}
|
|
|
|
}
|
2013-12-05 18:19:06 -08:00
|
|
|
None => {}
|
|
|
|
}
|
2013-11-21 15:42:55 -08:00
|
|
|
})
|
2013-10-21 11:33:04 -07:00
|
|
|
}
|
|
|
|
}
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "</div>");
|
|
|
|
}
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn item_typedef(w: &mut Writer, it: &clean::Item, t: &clean::Typedef) {
|
2013-09-18 22:18:38 -07:00
|
|
|
write!(w, "<pre class='typedef'>type {}{} = {};</pre>",
|
|
|
|
it.name.get_ref().as_slice(),
|
|
|
|
t.generics,
|
|
|
|
t.type_);
|
|
|
|
|
|
|
|
document(w, it);
|
|
|
|
}
|
|
|
|
|
2013-12-09 23:16:18 -08:00
|
|
|
impl<'a> fmt::Default for Sidebar<'a> {
|
|
|
|
fn fmt(s: &Sidebar<'a>, fmt: &mut fmt::Formatter) {
|
2013-09-18 22:18:38 -07:00
|
|
|
let cx = s.cx;
|
|
|
|
let it = s.item;
|
|
|
|
write!(fmt.buf, "<p class='location'>");
|
|
|
|
let len = cx.current.len() - if it.is_mod() {1} else {0};
|
|
|
|
for (i, name) in cx.current.iter().take(len).enumerate() {
|
|
|
|
if i > 0 { write!(fmt.buf, "&\\#8203;::") }
|
|
|
|
write!(fmt.buf, "<a href='{}index.html'>{}</a>",
|
|
|
|
cx.root_path.slice_to((cx.current.len() - i - 1) * 3), *name);
|
|
|
|
}
|
|
|
|
write!(fmt.buf, "</p>");
|
|
|
|
|
2013-10-25 17:04:37 -07:00
|
|
|
fn block(w: &mut Writer, short: &str, longty: &str,
|
2013-09-18 22:18:38 -07:00
|
|
|
cur: &clean::Item, cx: &Context) {
|
|
|
|
let items = match cx.sidebar.find_equiv(&short) {
|
|
|
|
Some(items) => items.as_slice(),
|
|
|
|
None => return
|
|
|
|
};
|
|
|
|
write!(w, "<div class='block {}'><h2>{}</h2>", short, longty);
|
|
|
|
for item in items.iter() {
|
|
|
|
let class = if cur.name.get_ref() == item &&
|
|
|
|
short == shortty(cur) { "current" } else { "" };
|
|
|
|
write!(w, "<a class='{ty} {class}' href='{curty, select,
|
|
|
|
mod{../}
|
|
|
|
other{}
|
2013-09-26 13:44:54 -07:00
|
|
|
}{tysel, select,
|
2013-09-18 22:18:38 -07:00
|
|
|
mod{{name}/index.html}
|
|
|
|
other{#.{name}.html}
|
|
|
|
}'>{name}</a><br/>",
|
|
|
|
ty = short,
|
2013-09-26 13:44:54 -07:00
|
|
|
tysel = short,
|
2013-09-18 22:18:38 -07:00
|
|
|
class = class,
|
|
|
|
curty = shortty(cur),
|
|
|
|
name = item.as_slice());
|
|
|
|
}
|
|
|
|
write!(w, "</div>");
|
|
|
|
}
|
|
|
|
|
|
|
|
block(fmt.buf, "mod", "Modules", it, cx);
|
|
|
|
block(fmt.buf, "struct", "Structs", it, cx);
|
|
|
|
block(fmt.buf, "enum", "Enums", it, cx);
|
|
|
|
block(fmt.buf, "trait", "Traits", it, cx);
|
|
|
|
block(fmt.buf, "fn", "Functions", it, cx);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn build_sidebar(m: &clean::Module) -> HashMap<~str, ~[~str]> {
|
|
|
|
let mut map = HashMap::new();
|
|
|
|
for item in m.items.iter() {
|
|
|
|
let short = shortty(item);
|
|
|
|
let myname = match item.name {
|
2013-10-01 14:31:03 -07:00
|
|
|
None => continue,
|
2013-09-18 22:18:38 -07:00
|
|
|
Some(ref s) => s.to_owned(),
|
|
|
|
};
|
|
|
|
let v = map.find_or_insert_with(short.to_owned(), |_| ~[]);
|
|
|
|
v.push(myname);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (_, items) in map.mut_iter() {
|
2013-12-19 16:53:02 +11:00
|
|
|
items.sort(|i1, i2| i1 <= i2);
|
2013-09-18 22:18:38 -07:00
|
|
|
}
|
|
|
|
return map;
|
|
|
|
}
|
2013-09-24 13:57:31 -07:00
|
|
|
|
2013-12-09 23:16:18 -08:00
|
|
|
impl<'a> fmt::Default for Source<'a> {
|
|
|
|
fn fmt(s: &Source<'a>, fmt: &mut fmt::Formatter) {
|
2013-11-23 11:18:51 +01:00
|
|
|
let lines = s.lines().len();
|
2013-09-27 15:12:23 -07:00
|
|
|
let mut cols = 0;
|
|
|
|
let mut tmp = lines;
|
|
|
|
while tmp > 0 {
|
|
|
|
cols += 1;
|
|
|
|
tmp /= 10;
|
|
|
|
}
|
|
|
|
write!(fmt.buf, "<pre class='line-numbers'>");
|
|
|
|
for i in range(1, lines + 1) {
|
2013-09-27 18:23:57 -07:00
|
|
|
write!(fmt.buf, "<span id='{0:u}'>{0:1$u}</span>\n", i, cols);
|
2013-09-27 15:12:23 -07:00
|
|
|
}
|
|
|
|
write!(fmt.buf, "</pre>");
|
|
|
|
write!(fmt.buf, "<pre class='rust'>");
|
|
|
|
write!(fmt.buf, "{}", Escape(s.as_slice()));
|
|
|
|
write!(fmt.buf, "</pre>");
|
|
|
|
}
|
2013-09-26 11:09:47 -07:00
|
|
|
}
|