,
+}
+
+/// Paths (relative to the doc root) and their pre-merge contents
+#[derive(Serialize, Deserialize, Debug, Clone)]
+#[serde(transparent)]
+struct PartsAndLocations {
+ parts: Vec<(PathBuf, P)>,
+}
- #[derive(Debug, Default)]
- struct Hierarchy {
- parent: Weak,
- elem: OsString,
- children: RefCell>>,
- elems: RefCell>,
+impl Default for PartsAndLocations
{
+ fn default() -> Self {
+ Self { parts: Vec::default() }
}
+}
- impl Hierarchy {
- fn with_parent(elem: OsString, parent: &Rc) -> Self {
- Self { elem, parent: Rc::downgrade(parent), ..Self::default() }
- }
+impl PartsAndLocations> {
+ fn push(&mut self, path: PathBuf, item: U) {
+ self.parts.push((path, Part { _artifact: PhantomData, item }));
+ }
- fn to_json_string(&self) -> String {
- let borrow = self.children.borrow();
- let mut subs: Vec<_> = borrow.values().collect();
- subs.sort_unstable_by(|a, b| a.elem.cmp(&b.elem));
- let mut files = self
- .elems
- .borrow()
- .iter()
- .map(|s| format!("\"{}\"", s.to_str().expect("invalid osstring conversion")))
- .collect::>();
- files.sort_unstable();
- let subs = subs.iter().map(|s| s.to_json_string()).collect::>().join(",");
- let dirs = if subs.is_empty() && files.is_empty() {
- String::new()
- } else {
- format!(",[{subs}]")
- };
- let files = files.join(",");
- let files = if files.is_empty() { String::new() } else { format!(",[{files}]") };
- format!(
- "[\"{name}\"{dirs}{files}]",
- name = self.elem.to_str().expect("invalid osstring conversion"),
- dirs = dirs,
- files = files
- )
- }
+ /// Singleton part, one file
+ fn with(path: PathBuf, part: U) -> Self {
+ let mut ret = Self::default();
+ ret.push(path, part);
+ ret
+ }
+}
- fn add_path(self: &Rc, path: &Path) {
- let mut h = Rc::clone(&self);
- let mut elems = path
- .components()
- .filter_map(|s| match s {
- Component::Normal(s) => Some(s.to_owned()),
- Component::ParentDir => Some(OsString::from("..")),
- _ => None,
- })
- .peekable();
- loop {
- let cur_elem = elems.next().expect("empty file path");
- if cur_elem == ".." {
- if let Some(parent) = h.parent.upgrade() {
- h = parent;
- }
- continue;
- }
- if elems.peek().is_none() {
- h.elems.borrow_mut().insert(cur_elem);
- break;
- } else {
- let entry = Rc::clone(
- h.children
- .borrow_mut()
- .entry(cur_elem.clone())
- .or_insert_with(|| Rc::new(Self::with_parent(cur_elem, &h))),
- );
- h = entry;
- }
- }
+/// A piece of one of the shared artifacts for documentation (search index, sources, alias list, etc.)
+///
+/// Merged at a user specified time and written to the `doc/` directory
+#[derive(Serialize, Deserialize, Debug, Clone)]
+#[serde(transparent)]
+struct Part {
+ #[serde(skip)]
+ _artifact: PhantomData,
+ item: U,
+}
+
+impl fmt::Display for Part {
+ /// Writes serialized JSON
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.item)
+ }
+}
+
+/// Wrapper trait for `Part`
+trait CciPart: Sized + fmt::Display + DeserializeOwned + 'static {
+ /// Identifies the file format of the cross-crate information
+ type FileFormat: sorted_template::FileFormat;
+ fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations;
+}
+
+#[derive(Serialize, Deserialize, Clone, Default, Debug)]
+struct SearchIndex;
+type SearchIndexPart = Part;
+impl CciPart for SearchIndexPart {
+ type FileFormat = sorted_template::Js;
+ fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations {
+ &crate_info.search_index_js
+ }
+}
+
+impl SearchIndexPart {
+ fn blank() -> SortedTemplate<::FileFormat> {
+ SortedTemplate::from_before_after(
+ r"var searchIndex = new Map(JSON.parse('[",
+ r"]'));
+if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
+else if (window.initSearch) window.initSearch(searchIndex);",
+ )
+ }
+
+ fn get(
+ search_index: OrderedJson,
+ resource_suffix: &str,
+ ) -> Result, Error> {
+ let path = suffix_path("search-index.js", resource_suffix);
+ let search_index = EscapedJson::from(search_index);
+ Ok(PartsAndLocations::with(path, search_index))
+ }
+}
+
+#[derive(Serialize, Deserialize, Clone, Default, Debug)]
+struct AllCrates;
+type AllCratesPart = Part;
+impl CciPart for AllCratesPart {
+ type FileFormat = sorted_template::Js;
+ fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations {
+ &crate_info.all_crates
+ }
+}
+
+impl AllCratesPart {
+ fn blank() -> SortedTemplate<::FileFormat> {
+ SortedTemplate::from_before_after("window.ALL_CRATES = [", "];")
+ }
+
+ fn get(crate_name_json: OrderedJson) -> Result, Error> {
+ // external hack_get_external_crate_names not needed here, because
+ // there's no way that we write the search index but not crates.js
+ let path = PathBuf::from("crates.js");
+ Ok(PartsAndLocations::with(path, crate_name_json))
+ }
+}
+
+/// Reads `crates.js`, which seems like the best
+/// place to obtain the list of externally documented crates if the index
+/// page was disabled when documenting the deps.
+///
+/// This is to match the current behavior of rustdoc, which allows you to get all crates
+/// on the index page, even if --enable-index-page is only passed to the last crate.
+fn hack_get_external_crate_names(doc_root: &Path) -> Result, Error> {
+ let path = doc_root.join("crates.js");
+ let Ok(content) = fs::read_to_string(&path) else {
+ // they didn't emit invocation specific, so we just say there were no crates
+ return Ok(Vec::default());
+ };
+ // this is only run once so it's fine not to cache it
+ // !dot_matches_new_line: all crates on same line. greedy: match last bracket
+ let regex = Regex::new(r"\[.*\]").unwrap();
+ let Some(content) = regex.find(&content) else {
+ return Err(Error::new("could not find crates list in crates.js", path));
+ };
+ let content: Vec = try_err!(serde_json::from_str(content.as_str()), &path);
+ Ok(content)
+}
+
+#[derive(Serialize, Deserialize, Clone, Default, Debug)]
+struct CratesIndex;
+type CratesIndexPart = Part;
+impl CciPart for CratesIndexPart {
+ type FileFormat = sorted_template::Html;
+ fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations {
+ &crate_info.crates_index
+ }
+}
+
+impl CratesIndexPart {
+ fn blank(cx: &Context<'_>) -> SortedTemplate<::FileFormat> {
+ let page = layout::Page {
+ title: "Index of crates",
+ css_class: "mod sys",
+ root_path: "./",
+ static_root_path: cx.shared.static_root_path.as_deref(),
+ description: "List of crates",
+ resource_suffix: &cx.shared.resource_suffix,
+ rust_logo: true,
+ };
+ let layout = &cx.shared.layout;
+ let style_files = &cx.shared.style_files;
+ const DELIMITER: &str = "\u{FFFC}"; // users are being naughty if they have this
+ let content =
+ format!("List of all crates
");
+ let template = layout::render(layout, &page, "", content, &style_files);
+ match SortedTemplate::from_template(&template, DELIMITER) {
+ Ok(template) => template,
+ Err(e) => panic!(
+ "Object Replacement Character (U+FFFC) should not appear in the --index-page: {e}"
+ ),
}
}
- if cx.include_sources {
- let hierarchy = Rc::new(Hierarchy::default());
- for source in cx
- .shared
- .local_sources
- .iter()
- .filter_map(|p| p.0.strip_prefix(&cx.shared.src_root).ok())
- {
- hierarchy.add_path(source);
+ /// Might return parts that are duplicate with ones in prexisting index.html
+ fn get(crate_name: &str, external_crates: &[String]) -> Result, Error> {
+ let mut ret = PartsAndLocations::default();
+ let path = PathBuf::from("index.html");
+ for crate_name in external_crates.iter().map(|s| s.as_str()).chain(once(crate_name)) {
+ let part = format!(
+ "{crate_name}",
+ trailing_slash = ensure_trailing_slash(crate_name),
+ );
+ ret.push(path.clone(), part);
}
- let hierarchy = Rc::try_unwrap(hierarchy).unwrap();
- let dst = cx.dst.join(&format!("src-files{}.js", cx.shared.resource_suffix));
- let make_sources = || {
- let (mut all_sources, _krates) =
- try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
- all_sources.push(format!(
- r#"["{}",{}]"#,
- &krate.name(cx.tcx()),
- hierarchy
- .to_json_string()
- // All these `replace` calls are because we have to go through JS string for JSON content.
- .replace('\\', r"\\")
- .replace('\'', r"\'")
- // We need to escape double quotes for the JSON.
- .replace("\\\"", "\\\\\"")
- ));
- all_sources.sort();
- // This needs to be `var`, not `const`.
- // This variable needs declared in the current global scope so that if
- // src-script.js loads first, it can pick it up.
- let mut v = String::from("var srcIndex = new Map(JSON.parse('[\\\n");
- v.push_str(&all_sources.join(",\\\n"));
- v.push_str("\\\n]'));\ncreateSrcSidebar();\n");
- Ok(v.into_bytes())
- };
- write_invocation_specific("src-files.js", &make_sources)?;
+ Ok(ret)
}
+}
- // Update the search index and crate list.
- let dst = cx.dst.join(&format!("search-index{}.js", cx.shared.resource_suffix));
- let (mut all_indexes, mut krates) =
- try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
- all_indexes.push(search_index.index);
- krates.push(krate.name(cx.tcx()).to_string());
- krates.sort();
+#[derive(Serialize, Deserialize, Clone, Default, Debug)]
+struct Sources;
+type SourcesPart = Part;
+impl CciPart for SourcesPart {
+ type FileFormat = sorted_template::Js;
+ fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations {
+ &crate_info.src_files_js
+ }
+}
- // Sort the indexes by crate so the file will be generated identically even
- // with rustdoc running in parallel.
- all_indexes.sort();
- write_invocation_specific("search-index.js", &|| {
+impl SourcesPart {
+ fn blank() -> SortedTemplate<::FileFormat> {
// This needs to be `var`, not `const`.
// This variable needs declared in the current global scope so that if
- // search.js loads first, it can pick it up.
- let mut v = String::from("var searchIndex = new Map(JSON.parse('[\\\n");
- v.push_str(&all_indexes.join(",\\\n"));
- v.push_str(
- r#"\
-]'));
-if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
-else if (window.initSearch) window.initSearch(searchIndex);
-"#,
- );
- Ok(v.into_bytes())
- })?;
-
- let search_desc_dir = cx.dst.join(format!("search.desc/{krate}", krate = krate.name(cx.tcx())));
- if Path::new(&search_desc_dir).exists() {
- try_err!(std::fs::remove_dir_all(&search_desc_dir), &search_desc_dir);
- }
- try_err!(std::fs::create_dir_all(&search_desc_dir), &search_desc_dir);
- let kratename = krate.name(cx.tcx()).to_string();
- for (i, (_, data)) in search_index.desc.into_iter().enumerate() {
- let output_filename = static_files::suffix_path(
- &format!("{kratename}-desc-{i}-.js"),
- &cx.shared.resource_suffix,
- );
- let path = search_desc_dir.join(output_filename);
- try_err!(
- std::fs::write(
- &path,
- &format!(
- r##"searchState.loadedDescShard({kratename}, {i}, {data})"##,
- kratename = serde_json::to_string(&kratename).unwrap(),
- data = serde_json::to_string(&data).unwrap(),
- )
- .into_bytes()
- ),
- &path
- );
+ // src-script.js loads first, it can pick it up.
+ SortedTemplate::from_before_after(
+ r"var srcIndex = new Map(JSON.parse('[",
+ r"]'));
+createSrcSidebar();",
+ )
}
- write_invocation_specific("crates.js", &|| {
- let krates = krates.iter().map(|k| format!("\"{k}\"")).join(",");
- Ok(format!("window.ALL_CRATES = [{krates}];").into_bytes())
- })?;
+ fn get(cx: &Context<'_>, crate_name: &OrderedJson) -> Result, Error> {
+ let hierarchy = Rc::new(Hierarchy::default());
+ cx.shared
+ .local_sources
+ .iter()
+ .filter_map(|p| p.0.strip_prefix(&cx.shared.src_root).ok())
+ .for_each(|source| hierarchy.add_path(source));
+ let path = suffix_path("src-files.js", &cx.shared.resource_suffix);
+ let hierarchy = hierarchy.to_json_string();
+ let part = OrderedJson::array_unsorted([crate_name, &hierarchy]);
+ let part = EscapedJson::from(part);
+ Ok(PartsAndLocations::with(path, part))
+ }
+}
- if options.enable_index_page {
- if let Some(index_page) = options.index_page.clone() {
- let mut md_opts = options.clone();
- md_opts.output = cx.dst.clone();
- md_opts.external_html = (*cx.shared).layout.external_html.clone();
+/// Source files directory tree
+#[derive(Debug, Default)]
+struct Hierarchy {
+ parent: Weak,
+ elem: OsString,
+ children: RefCell>>,
+ elems: RefCell>,
+}
- crate::markdown::render(&index_page, md_opts, cx.shared.edition())
- .map_err(|e| Error::new(e, &index_page))?;
- } else {
- let shared = Rc::clone(&cx.shared);
- let dst = cx.dst.join("index.html");
- let page = layout::Page {
- title: "Index of crates",
- css_class: "mod sys",
- root_path: "./",
- static_root_path: shared.static_root_path.as_deref(),
- description: "List of crates",
- resource_suffix: &shared.resource_suffix,
- rust_logo: true,
- };
+impl Hierarchy {
+ fn with_parent(elem: OsString, parent: &Rc) -> Self {
+ Self { elem, parent: Rc::downgrade(parent), ..Self::default() }
+ }
- let content = format!(
- "List of all crates
",
- krates.iter().format_with("", |k, f| {
- f(&format_args!(
- "{k}",
- trailing_slash = ensure_trailing_slash(k),
- ))
- })
- );
- let v = layout::render(&shared.layout, &page, "", content, &shared.style_files);
- shared.fs.write(dst, v)?;
+ fn to_json_string(&self) -> OrderedJson {
+ let subs = self.children.borrow();
+ let files = self.elems.borrow();
+ let name = OrderedJson::serialize(self.elem.to_str().expect("invalid osstring conversion"))
+ .unwrap();
+ let mut out = Vec::from([name]);
+ if !subs.is_empty() || !files.is_empty() {
+ let subs = subs.iter().map(|(_, s)| s.to_json_string());
+ out.push(OrderedJson::array_sorted(subs));
+ }
+ if !files.is_empty() {
+ let files = files
+ .iter()
+ .map(|s| OrderedJson::serialize(s.to_str().expect("invalid osstring")).unwrap());
+ out.push(OrderedJson::array_sorted(files));
}
+ OrderedJson::array_unsorted(out)
}
- let cloned_shared = Rc::clone(&cx.shared);
- let cache = &cloned_shared.cache;
-
- // Collect the list of aliased types and their aliases.
- //
- //
- // The clean AST has type aliases that point at their types, but
- // this visitor works to reverse that: `aliased_types` is a map
- // from target to the aliases that reference it, and each one
- // will generate one file.
- struct TypeImplCollector<'cx, 'cache> {
- // Map from DefId-of-aliased-type to its data.
- aliased_types: IndexMap>,
- visited_aliases: FxHashSet,
- cache: &'cache Cache,
- cx: &'cache mut Context<'cx>,
- }
- // Data for an aliased type.
- //
- // In the final file, the format will be roughly:
- //
- // ```json
- // // type.impl/CRATE/TYPENAME.js
- // JSONP(
- // "CRATE": [
- // ["IMPL1 HTML", "ALIAS1", "ALIAS2", ...],
- // ["IMPL2 HTML", "ALIAS3", "ALIAS4", ...],
- // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ struct AliasedType
- // ...
- // ]
- // )
- // ```
- struct AliasedType<'cache> {
- // This is used to generate the actual filename of this aliased type.
- target_fqp: &'cache [Symbol],
- target_type: ItemType,
- // This is the data stored inside the file.
- // ItemId is used to deduplicate impls.
- impl_: IndexMap>,
- }
- // The `impl_` contains data that's used to figure out if an alias will work,
- // and to generate the HTML at the end.
- //
- // The `type_aliases` list is built up with each type alias that matches.
- struct AliasedTypeImpl<'cache> {
- impl_: &'cache Impl,
- type_aliases: Vec<(&'cache [Symbol], Item)>,
- }
- impl<'cx, 'cache> DocVisitor for TypeImplCollector<'cx, 'cache> {
- fn visit_item(&mut self, it: &Item) {
- self.visit_item_recur(it);
- let cache = self.cache;
- let ItemKind::TypeAliasItem(ref t) = *it.kind else { return };
- let Some(self_did) = it.item_id.as_def_id() else { return };
- if !self.visited_aliases.insert(self_did) {
- return;
- }
- let Some(target_did) = t.type_.def_id(cache) else { return };
- let get_extern = { || cache.external_paths.get(&target_did) };
- let Some(&(ref target_fqp, target_type)) =
- cache.paths.get(&target_did).or_else(get_extern)
- else {
- return;
- };
- let aliased_type = self.aliased_types.entry(target_did).or_insert_with(|| {
- let impl_ = cache
- .impls
- .get(&target_did)
- .map(|v| &v[..])
- .unwrap_or_default()
- .iter()
- .map(|impl_| {
- (
- impl_.impl_item.item_id,
- AliasedTypeImpl { impl_, type_aliases: Vec::new() },
- )
- })
- .collect();
- AliasedType { target_fqp: &target_fqp[..], target_type, impl_ }
- });
- let get_local = { || cache.paths.get(&self_did).map(|(p, _)| p) };
- let Some(self_fqp) = cache.exact_paths.get(&self_did).or_else(get_local) else {
- return;
- };
- let aliased_ty = self.cx.tcx().type_of(self_did).skip_binder();
- // Exclude impls that are directly on this type. They're already in the HTML.
- // Some inlining scenarios can cause there to be two versions of the same
- // impl: one on the type alias and one on the underlying target type.
- let mut seen_impls: FxHashSet = cache
- .impls
- .get(&self_did)
- .map(|s| &s[..])
- .unwrap_or_default()
- .iter()
- .map(|i| i.impl_item.item_id)
- .collect();
- for (impl_item_id, aliased_type_impl) in &mut aliased_type.impl_ {
- // Only include this impl if it actually unifies with this alias.
- // Synthetic impls are not included; those are also included in the HTML.
- //
- // FIXME(lazy_type_alias): Once the feature is complete or stable, rewrite this
- // to use type unification.
- // Be aware of `tests/rustdoc/type-alias/deeply-nested-112515.rs` which might regress.
- let Some(impl_did) = impl_item_id.as_def_id() else { continue };
- let for_ty = self.cx.tcx().type_of(impl_did).skip_binder();
- let reject_cx = DeepRejectCtxt::new(self.cx.tcx(), TreatParams::AsCandidateKey);
- if !reject_cx.types_may_unify(aliased_ty, for_ty) {
- continue;
- }
- // Avoid duplicates
- if !seen_impls.insert(*impl_item_id) {
- continue;
+ fn add_path(self: &Rc, path: &Path) {
+ let mut h = Rc::clone(&self);
+ let mut elems = path
+ .components()
+ .filter_map(|s| match s {
+ Component::Normal(s) => Some(s.to_owned()),
+ Component::ParentDir => Some(OsString::from("..")),
+ _ => None,
+ })
+ .peekable();
+ loop {
+ let cur_elem = elems.next().expect("empty file path");
+ if cur_elem == ".." {
+ if let Some(parent) = h.parent.upgrade() {
+ h = parent;
}
- // This impl was not found in the set of rejected impls
- aliased_type_impl.type_aliases.push((&self_fqp[..], it.clone()));
+ continue;
}
- }
- }
- let mut type_impl_collector = TypeImplCollector {
- aliased_types: IndexMap::default(),
- visited_aliases: FxHashSet::default(),
- cache,
- cx,
- };
- DocVisitor::visit_crate(&mut type_impl_collector, &krate);
- // Final serialized form of the alias impl
- struct AliasSerializableImpl {
- text: String,
- trait_: Option,
- aliases: Vec,
- }
- impl Serialize for AliasSerializableImpl {
- fn serialize(&self, serializer: S) -> Result
- where
- S: Serializer,
- {
- let mut seq = serializer.serialize_seq(None)?;
- seq.serialize_element(&self.text)?;
- if let Some(trait_) = &self.trait_ {
- seq.serialize_element(trait_)?;
+ if elems.peek().is_none() {
+ h.elems.borrow_mut().insert(cur_elem);
+ break;
} else {
- seq.serialize_element(&0)?;
- }
- for type_ in &self.aliases {
- seq.serialize_element(type_)?;
+ let entry = Rc::clone(
+ h.children
+ .borrow_mut()
+ .entry(cur_elem.clone())
+ .or_insert_with(|| Rc::new(Self::with_parent(cur_elem, &h))),
+ );
+ h = entry;
}
- seq.end()
}
}
- let cx = type_impl_collector.cx;
- let dst = cx.dst.join("type.impl");
- let aliased_types = type_impl_collector.aliased_types;
- for aliased_type in aliased_types.values() {
- let impls = aliased_type
- .impl_
- .values()
- .flat_map(|AliasedTypeImpl { impl_, type_aliases }| {
- let mut ret = Vec::new();
- let trait_ = impl_
- .inner_impl()
- .trait_
- .as_ref()
- .map(|trait_| format!("{:#}", trait_.print(cx)));
- // render_impl will filter out "impossible-to-call" methods
- // to make that functionality work here, it needs to be called with
- // each type alias, and if it gives a different result, split the impl
- for &(type_alias_fqp, ref type_alias_item) in type_aliases {
- let mut buf = Buffer::html();
- cx.id_map = Default::default();
- cx.deref_id_map = Default::default();
- let target_did = impl_
+}
+
+#[derive(Serialize, Deserialize, Clone, Default, Debug)]
+struct TypeAlias;
+type TypeAliasPart = Part;
+impl CciPart for TypeAliasPart {
+ type FileFormat = sorted_template::Js;
+ fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations {
+ &crate_info.type_impl
+ }
+}
+
+impl TypeAliasPart {
+ fn blank() -> SortedTemplate<::FileFormat> {
+ SortedTemplate::from_before_after(
+ r"(function() {
+ var type_impls = Object.fromEntries([",
+ r"]);
+ if (window.register_type_impls) {
+ window.register_type_impls(type_impls);
+ } else {
+ window.pending_type_impls = type_impls;
+ }
+})()",
+ )
+ }
+
+ fn get(
+ cx: &mut Context<'_>,
+ krate: &Crate,
+ crate_name_json: &OrderedJson,
+ ) -> Result, Error> {
+ let cache = &Rc::clone(&cx.shared).cache;
+ let mut path_parts = PartsAndLocations::default();
+
+ let mut type_impl_collector = TypeImplCollector {
+ aliased_types: IndexMap::default(),
+ visited_aliases: FxHashSet::default(),
+ cache,
+ cx,
+ };
+ DocVisitor::visit_crate(&mut type_impl_collector, &krate);
+ let cx = type_impl_collector.cx;
+ let aliased_types = type_impl_collector.aliased_types;
+ for aliased_type in aliased_types.values() {
+ let impls = aliased_type
+ .impl_
+ .values()
+ .flat_map(|AliasedTypeImpl { impl_, type_aliases }| {
+ let mut ret = Vec::new();
+ let trait_ = impl_
.inner_impl()
.trait_
.as_ref()
- .map(|trait_| trait_.def_id())
- .or_else(|| impl_.inner_impl().for_.def_id(cache));
- let provided_methods;
- let assoc_link = if let Some(target_did) = target_did {
- provided_methods = impl_.inner_impl().provided_trait_methods(cx.tcx());
- AssocItemLink::GotoSource(ItemId::DefId(target_did), &provided_methods)
- } else {
- AssocItemLink::Anchor(None)
- };
- super::render_impl(
- &mut buf,
- cx,
- *impl_,
- &type_alias_item,
- assoc_link,
- RenderMode::Normal,
- None,
- &[],
- ImplRenderingParameters {
- show_def_docs: true,
- show_default_items: true,
- show_non_assoc_items: true,
- toggle_open_by_default: true,
- },
- );
- let text = buf.into_inner();
- let type_alias_fqp = (*type_alias_fqp).iter().join("::");
- if Some(&text) == ret.last().map(|s: &AliasSerializableImpl| &s.text) {
- ret.last_mut()
- .expect("already established that ret.last() is Some()")
- .aliases
- .push(type_alias_fqp);
+ .map(|trait_| format!("{:#}", trait_.print(cx)));
+ // render_impl will filter out "impossible-to-call" methods
+ // to make that functionality work here, it needs to be called with
+ // each type alias, and if it gives a different result, split the impl
+ for &(type_alias_fqp, ref type_alias_item) in type_aliases {
+ let mut buf = Buffer::html();
+ cx.id_map = Default::default();
+ cx.deref_id_map = Default::default();
+ let target_did = impl_
+ .inner_impl()
+ .trait_
+ .as_ref()
+ .map(|trait_| trait_.def_id())
+ .or_else(|| impl_.inner_impl().for_.def_id(cache));
+ let provided_methods;
+ let assoc_link = if let Some(target_did) = target_did {
+ provided_methods = impl_.inner_impl().provided_trait_methods(cx.tcx());
+ AssocItemLink::GotoSource(ItemId::DefId(target_did), &provided_methods)
+ } else {
+ AssocItemLink::Anchor(None)
+ };
+ super::render_impl(
+ &mut buf,
+ cx,
+ *impl_,
+ &type_alias_item,
+ assoc_link,
+ RenderMode::Normal,
+ None,
+ &[],
+ ImplRenderingParameters {
+ show_def_docs: true,
+ show_default_items: true,
+ show_non_assoc_items: true,
+ toggle_open_by_default: true,
+ },
+ );
+ let text = buf.into_inner();
+ let type_alias_fqp = (*type_alias_fqp).iter().join("::");
+ if Some(&text) == ret.last().map(|s: &AliasSerializableImpl| &s.text) {
+ ret.last_mut()
+ .expect("already established that ret.last() is Some()")
+ .aliases
+ .push(type_alias_fqp);
+ } else {
+ ret.push(AliasSerializableImpl {
+ text,
+ trait_: trait_.clone(),
+ aliases: vec![type_alias_fqp],
+ })
+ }
+ }
+ ret
+ })
+ .collect::>();
+
+ let mut path = PathBuf::from("type.impl");
+ for component in &aliased_type.target_fqp[..aliased_type.target_fqp.len() - 1] {
+ path.push(component.as_str());
+ }
+ let aliased_item_type = aliased_type.target_type;
+ path.push(&format!(
+ "{aliased_item_type}.{}.js",
+ aliased_type.target_fqp[aliased_type.target_fqp.len() - 1]
+ ));
+
+ let part = OrderedJson::array_sorted(
+ impls.iter().map(OrderedJson::serialize).collect::, _>>().unwrap(),
+ );
+ path_parts.push(path, OrderedJson::array_unsorted([crate_name_json, &part]));
+ }
+ Ok(path_parts)
+ }
+}
+
+#[derive(Serialize, Deserialize, Clone, Default, Debug)]
+struct TraitAlias;
+type TraitAliasPart = Part;
+impl CciPart for TraitAliasPart {
+ type FileFormat = sorted_template::Js;
+ fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations {
+ &crate_info.trait_impl
+ }
+}
+
+impl TraitAliasPart {
+ fn blank() -> SortedTemplate<::FileFormat> {
+ SortedTemplate::from_before_after(
+ r"(function() {
+ var implementors = Object.fromEntries([",
+ r"]);
+ if (window.register_implementors) {
+ window.register_implementors(implementors);
+ } else {
+ window.pending_implementors = implementors;
+ }
+})()",
+ )
+ }
+
+ fn get(
+ cx: &mut Context<'_>,
+ crate_name_json: &OrderedJson,
+ ) -> Result, Error> {
+ let cache = &cx.shared.cache;
+ let mut path_parts = PartsAndLocations::default();
+ // Update the list of all implementors for traits
+ //
+ for (&did, imps) in &cache.implementors {
+ // Private modules can leak through to this phase of rustdoc, which
+ // could contain implementations for otherwise private types. In some
+ // rare cases we could find an implementation for an item which wasn't
+ // indexed, so we just skip this step in that case.
+ //
+ // FIXME: this is a vague explanation for why this can't be a `get`, in
+ // theory it should be...
+ let (remote_path, remote_item_type) = match cache.exact_paths.get(&did) {
+ Some(p) => match cache.paths.get(&did).or_else(|| cache.external_paths.get(&did)) {
+ Some((_, t)) => (p, t),
+ None => continue,
+ },
+ None => match cache.external_paths.get(&did) {
+ Some((p, t)) => (p, t),
+ None => continue,
+ },
+ };
+
+ let implementors = imps
+ .iter()
+ .filter_map(|imp| {
+ // If the trait and implementation are in the same crate, then
+ // there's no need to emit information about it (there's inlining
+ // going on). If they're in different crates then the crate defining
+ // the trait will be interested in our implementation.
+ //
+ // If the implementation is from another crate then that crate
+ // should add it.
+ if imp.impl_item.item_id.krate() == did.krate
+ || !imp.impl_item.item_id.is_local()
+ {
+ None
} else {
- ret.push(AliasSerializableImpl {
- text,
- trait_: trait_.clone(),
- aliases: vec![type_alias_fqp],
+ Some(Implementor {
+ text: imp.inner_impl().print(false, cx).to_string(),
+ synthetic: imp.inner_impl().kind.is_auto(),
+ types: collect_paths_for_type(imp.inner_impl().for_.clone(), cache),
})
}
- }
- ret
- })
- .collect::>();
+ })
+ .collect::>();
- // FIXME: this fixes only rustdoc part of instability of trait impls
- // for js files, see #120371
- // Manually collect to string and sort to make list not depend on order
- let mut impls = impls
- .iter()
- .map(|i| serde_json::to_string(i).expect("failed serde conversion"))
- .collect::>();
- impls.sort();
+ // Only create a js file if we have impls to add to it. If the trait is
+ // documented locally though we always create the file to avoid dead
+ // links.
+ if implementors.is_empty() && !cache.paths.contains_key(&did) {
+ continue;
+ }
- let impls = format!(r#""{}":[{}]"#, krate.name(cx.tcx()), impls.join(","));
+ let mut path = PathBuf::from("trait.impl");
+ for component in &remote_path[..remote_path.len() - 1] {
+ path.push(component.as_str());
+ }
+ path.push(&format!("{remote_item_type}.{}.js", remote_path[remote_path.len() - 1]));
- let mut mydst = dst.clone();
- for part in &aliased_type.target_fqp[..aliased_type.target_fqp.len() - 1] {
- mydst.push(part.to_string());
+ let part = OrderedJson::array_sorted(
+ implementors
+ .iter()
+ .map(OrderedJson::serialize)
+ .collect::, _>>()
+ .unwrap(),
+ );
+ path_parts.push(path, OrderedJson::array_unsorted([crate_name_json, &part]));
}
- cx.shared.ensure_dir(&mydst)?;
- let aliased_item_type = aliased_type.target_type;
- mydst.push(&format!(
- "{aliased_item_type}.{}.js",
- aliased_type.target_fqp[aliased_type.target_fqp.len() - 1]
- ));
-
- let (mut all_impls, _) = try_err!(collect(&mydst, krate.name(cx.tcx()).as_str()), &mydst);
- all_impls.push(impls);
- // Sort the implementors by crate so the file will be generated
- // identically even with rustdoc running in parallel.
- all_impls.sort();
-
- let mut v = String::from("(function() {var type_impls = {\n");
- v.push_str(&all_impls.join(",\n"));
- v.push_str("\n};");
- v.push_str(
- "if (window.register_type_impls) {\
- window.register_type_impls(type_impls);\
- } else {\
- window.pending_type_impls = type_impls;\
- }",
- );
- v.push_str("})()");
- cx.shared.fs.write(mydst, v)?;
- }
-
- // Update the list of all implementors for traits
- //
- let dst = cx.dst.join("trait.impl");
- for (&did, imps) in &cache.implementors {
- // Private modules can leak through to this phase of rustdoc, which
- // could contain implementations for otherwise private types. In some
- // rare cases we could find an implementation for an item which wasn't
- // indexed, so we just skip this step in that case.
- //
- // FIXME: this is a vague explanation for why this can't be a `get`, in
- // theory it should be...
- let (remote_path, remote_item_type) = match cache.exact_paths.get(&did) {
- Some(p) => match cache.paths.get(&did).or_else(|| cache.external_paths.get(&did)) {
- Some((_, t)) => (p, t),
- None => continue,
- },
- None => match cache.external_paths.get(&did) {
- Some((p, t)) => (p, t),
- None => continue,
- },
- };
+ Ok(path_parts)
+ }
+}
- struct Implementor {
- text: String,
- synthetic: bool,
- types: Vec,
+struct Implementor {
+ text: String,
+ synthetic: bool,
+ types: Vec,
+}
+
+impl Serialize for Implementor {
+ fn serialize(&self, serializer: S) -> Result
+ where
+ S: Serializer,
+ {
+ let mut seq = serializer.serialize_seq(None)?;
+ seq.serialize_element(&self.text)?;
+ if self.synthetic {
+ seq.serialize_element(&1)?;
+ seq.serialize_element(&self.types)?;
}
+ seq.end()
+ }
+}
- impl Serialize for Implementor {
- fn serialize(&self, serializer: S) -> Result
- where
- S: Serializer,
- {
- let mut seq = serializer.serialize_seq(None)?;
- seq.serialize_element(&self.text)?;
- if self.synthetic {
- seq.serialize_element(&1)?;
- seq.serialize_element(&self.types)?;
- }
- seq.end()
+/// Collect the list of aliased types and their aliases.
+///
+///
+/// The clean AST has type aliases that point at their types, but
+/// this visitor works to reverse that: `aliased_types` is a map
+/// from target to the aliases that reference it, and each one
+/// will generate one file.
+struct TypeImplCollector<'cx, 'cache> {
+ /// Map from DefId-of-aliased-type to its data.
+ aliased_types: IndexMap>,
+ visited_aliases: FxHashSet,
+ cache: &'cache Cache,
+ cx: &'cache mut Context<'cx>,
+}
+
+/// Data for an aliased type.
+///
+/// In the final file, the format will be roughly:
+///
+/// ```json
+/// // type.impl/CRATE/TYPENAME.js
+/// JSONP(
+/// "CRATE": [
+/// ["IMPL1 HTML", "ALIAS1", "ALIAS2", ...],
+/// ["IMPL2 HTML", "ALIAS3", "ALIAS4", ...],
+/// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ struct AliasedType
+/// ...
+/// ]
+/// )
+/// ```
+struct AliasedType<'cache> {
+ /// This is used to generate the actual filename of this aliased type.
+ target_fqp: &'cache [Symbol],
+ target_type: ItemType,
+ /// This is the data stored inside the file.
+ /// ItemId is used to deduplicate impls.
+ impl_: IndexMap>,
+}
+
+/// The `impl_` contains data that's used to figure out if an alias will work,
+/// and to generate the HTML at the end.
+///
+/// The `type_aliases` list is built up with each type alias that matches.
+struct AliasedTypeImpl<'cache> {
+ impl_: &'cache Impl,
+ type_aliases: Vec<(&'cache [Symbol], Item)>,
+}
+
+impl<'cx, 'cache> DocVisitor for TypeImplCollector<'cx, 'cache> {
+ fn visit_item(&mut self, it: &Item) {
+ self.visit_item_recur(it);
+ let cache = self.cache;
+ let ItemKind::TypeAliasItem(ref t) = *it.kind else { return };
+ let Some(self_did) = it.item_id.as_def_id() else { return };
+ if !self.visited_aliases.insert(self_did) {
+ return;
+ }
+ let Some(target_did) = t.type_.def_id(cache) else { return };
+ let get_extern = { || cache.external_paths.get(&target_did) };
+ let Some(&(ref target_fqp, target_type)) = cache.paths.get(&target_did).or_else(get_extern)
+ else {
+ return;
+ };
+ let aliased_type = self.aliased_types.entry(target_did).or_insert_with(|| {
+ let impl_ = cache
+ .impls
+ .get(&target_did)
+ .map(|v| &v[..])
+ .unwrap_or_default()
+ .iter()
+ .map(|impl_| {
+ (impl_.impl_item.item_id, AliasedTypeImpl { impl_, type_aliases: Vec::new() })
+ })
+ .collect();
+ AliasedType { target_fqp: &target_fqp[..], target_type, impl_ }
+ });
+ let get_local = { || cache.paths.get(&self_did).map(|(p, _)| p) };
+ let Some(self_fqp) = cache.exact_paths.get(&self_did).or_else(get_local) else {
+ return;
+ };
+ let aliased_ty = self.cx.tcx().type_of(self_did).skip_binder();
+ // Exclude impls that are directly on this type. They're already in the HTML.
+ // Some inlining scenarios can cause there to be two versions of the same
+ // impl: one on the type alias and one on the underlying target type.
+ let mut seen_impls: FxHashSet = cache
+ .impls
+ .get(&self_did)
+ .map(|s| &s[..])
+ .unwrap_or_default()
+ .iter()
+ .map(|i| i.impl_item.item_id)
+ .collect();
+ for (impl_item_id, aliased_type_impl) in &mut aliased_type.impl_ {
+ // Only include this impl if it actually unifies with this alias.
+ // Synthetic impls are not included; those are also included in the HTML.
+ //
+ // FIXME(lazy_type_alias): Once the feature is complete or stable, rewrite this
+ // to use type unification.
+ // Be aware of `tests/rustdoc/type-alias/deeply-nested-112515.rs` which might regress.
+ let Some(impl_did) = impl_item_id.as_def_id() else { continue };
+ let for_ty = self.cx.tcx().type_of(impl_did).skip_binder();
+ let reject_cx = DeepRejectCtxt::new(self.cx.tcx(), TreatParams::AsCandidateKey);
+ if !reject_cx.types_may_unify(aliased_ty, for_ty) {
+ continue;
+ }
+ // Avoid duplicates
+ if !seen_impls.insert(*impl_item_id) {
+ continue;
}
+ // This impl was not found in the set of rejected impls
+ aliased_type_impl.type_aliases.push((&self_fqp[..], it.clone()));
}
+ }
+}
- let implementors = imps
- .iter()
- .filter_map(|imp| {
- // If the trait and implementation are in the same crate, then
- // there's no need to emit information about it (there's inlining
- // going on). If they're in different crates then the crate defining
- // the trait will be interested in our implementation.
- //
- // If the implementation is from another crate then that crate
- // should add it.
- if imp.impl_item.item_id.krate() == did.krate || !imp.impl_item.item_id.is_local() {
- None
- } else {
- Some(Implementor {
- text: imp.inner_impl().print(false, cx).to_string(),
- synthetic: imp.inner_impl().kind.is_auto(),
- types: collect_paths_for_type(imp.inner_impl().for_.clone(), cache),
- })
- }
- })
- .collect::>();
+/// Final serialized form of the alias impl
+struct AliasSerializableImpl {
+ text: String,
+ trait_: Option,
+ aliases: Vec,
+}
- // Only create a js file if we have impls to add to it. If the trait is
- // documented locally though we always create the file to avoid dead
- // links.
- if implementors.is_empty() && !cache.paths.contains_key(&did) {
- continue;
+impl Serialize for AliasSerializableImpl {
+ fn serialize(&self, serializer: S) -> Result
+ where
+ S: Serializer,
+ {
+ let mut seq = serializer.serialize_seq(None)?;
+ seq.serialize_element(&self.text)?;
+ if let Some(trait_) = &self.trait_ {
+ seq.serialize_element(trait_)?;
+ } else {
+ seq.serialize_element(&0)?;
+ }
+ for type_ in &self.aliases {
+ seq.serialize_element(type_)?;
}
+ seq.end()
+ }
+}
- // FIXME: this fixes only rustdoc part of instability of trait impls
- // for js files, see #120371
- // Manually collect to string and sort to make list not depend on order
- let mut implementors = implementors
- .iter()
- .map(|i| serde_json::to_string(i).expect("failed serde conversion"))
- .collect::>();
- implementors.sort();
+fn get_path_parts(
+ dst: &Path,
+ crates_info: &[CrateInfo],
+) -> FxHashMap> {
+ let mut templates: FxHashMap> = FxHashMap::default();
+ crates_info
+ .iter()
+ .map(|crate_info| T::from_crate_info(crate_info).parts.iter())
+ .flatten()
+ .for_each(|(path, part)| {
+ let path = dst.join(&path);
+ let part = part.to_string();
+ templates.entry(path).or_default().push(part);
+ });
+ templates
+}
- let implementors = format!(r#""{}":[{}]"#, krate.name(cx.tcx()), implementors.join(","));
+/// Create all parents
+fn create_parents(path: &Path) -> Result<(), Error> {
+ let parent = path.parent().expect("should not have an empty path here");
+ try_err!(fs::create_dir_all(parent), parent);
+ Ok(())
+}
+
+/// Returns a blank template unless we could find one to append to
+fn read_template_or_blank(
+ mut make_blank: F,
+ path: &Path,
+) -> Result, Error>
+where
+ F: FnMut() -> SortedTemplate,
+{
+ match fs::read_to_string(&path) {
+ Ok(template) => Ok(try_err!(SortedTemplate::from_str(&template), &path)),
+ Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(make_blank()),
+ Err(e) => Err(Error::new(e, &path)),
+ }
+}
- let mut mydst = dst.clone();
- for part in &remote_path[..remote_path.len() - 1] {
- mydst.push(part.to_string());
+/// info from this crate and the --include-info-json'd crates
+fn write_rendered_cci(
+ mut make_blank: F,
+ dst: &Path,
+ crates_info: &[CrateInfo],
+) -> Result<(), Error>
+where
+ F: FnMut() -> SortedTemplate,
+{
+ // write the merged cci to disk
+ for (path, parts) in get_path_parts::(dst, crates_info) {
+ create_parents(&path)?;
+ // read previous rendered cci from storage, append to them
+ let mut template = read_template_or_blank::<_, T::FileFormat>(&mut make_blank, &path)?;
+ for part in parts {
+ template.append(part);
}
- cx.shared.ensure_dir(&mydst)?;
- mydst.push(&format!("{remote_item_type}.{}.js", remote_path[remote_path.len() - 1]));
-
- let (mut all_implementors, _) =
- try_err!(collect(&mydst, krate.name(cx.tcx()).as_str()), &mydst);
- all_implementors.push(implementors);
- // Sort the implementors by crate so the file will be generated
- // identically even with rustdoc running in parallel.
- all_implementors.sort();
-
- let mut v = String::from("(function() {var implementors = {\n");
- v.push_str(&all_implementors.join(",\n"));
- v.push_str("\n};");
- v.push_str(
- "if (window.register_implementors) {\
- window.register_implementors(implementors);\
- } else {\
- window.pending_implementors = implementors;\
- }",
- );
- v.push_str("})()");
- cx.shared.fs.write(mydst, v)?;
+ let file = try_err!(File::create(&path), &path);
+ let mut file = BufWriter::new(file);
+ try_err!(write!(file, "{template}"), &path);
+ try_err!(file.flush(), &path);
}
Ok(())
}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/librustdoc/html/render/write_shared/tests.rs b/src/librustdoc/html/render/write_shared/tests.rs
new file mode 100644
index 0000000000000..4d1874b7df5f9
--- /dev/null
+++ b/src/librustdoc/html/render/write_shared/tests.rs
@@ -0,0 +1,207 @@
+use crate::html::render::ordered_json::{EscapedJson, OrderedJson};
+use crate::html::render::sorted_template::{Html, SortedTemplate};
+use crate::html::render::write_shared::*;
+
+#[test]
+fn hack_external_crate_names() {
+ let path = tempfile::TempDir::new().unwrap();
+ let path = path.path();
+ let crates = hack_get_external_crate_names(&path).unwrap();
+ assert!(crates.is_empty());
+ fs::write(path.join("crates.js"), r#"window.ALL_CRATES = ["a","b","c"];"#).unwrap();
+ let crates = hack_get_external_crate_names(&path).unwrap();
+ assert_eq!(crates, ["a".to_string(), "b".to_string(), "c".to_string()]);
+}
+
+fn but_last_line(s: &str) -> &str {
+ let (before, _) = s.rsplit_once("\n").unwrap();
+ before
+}
+
+#[test]
+fn sources_template() {
+ let mut template = SourcesPart::blank();
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r"var srcIndex = new Map(JSON.parse('[]'));
+createSrcSidebar();"
+ );
+ template.append(EscapedJson::from(OrderedJson::serialize("u").unwrap()).to_string());
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r#"var srcIndex = new Map(JSON.parse('["u"]'));
+createSrcSidebar();"#
+ );
+ template.append(EscapedJson::from(OrderedJson::serialize("v").unwrap()).to_string());
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r#"var srcIndex = new Map(JSON.parse('["u","v"]'));
+createSrcSidebar();"#
+ );
+}
+
+#[test]
+fn sources_parts() {
+ let parts =
+ SearchIndexPart::get(OrderedJson::serialize(["foo", "bar"]).unwrap(), "suffix").unwrap();
+ assert_eq!(&parts.parts[0].0, Path::new("search-indexsuffix.js"));
+ assert_eq!(&parts.parts[0].1.to_string(), r#"["foo","bar"]"#);
+}
+
+#[test]
+fn all_crates_template() {
+ let mut template = AllCratesPart::blank();
+ assert_eq!(but_last_line(&template.to_string()), r"window.ALL_CRATES = [];");
+ template.append(EscapedJson::from(OrderedJson::serialize("b").unwrap()).to_string());
+ assert_eq!(but_last_line(&template.to_string()), r#"window.ALL_CRATES = ["b"];"#);
+ template.append(EscapedJson::from(OrderedJson::serialize("a").unwrap()).to_string());
+ assert_eq!(but_last_line(&template.to_string()), r#"window.ALL_CRATES = ["a","b"];"#);
+}
+
+#[test]
+fn all_crates_parts() {
+ let parts = AllCratesPart::get(OrderedJson::serialize("crate").unwrap()).unwrap();
+ assert_eq!(&parts.parts[0].0, Path::new("crates.js"));
+ assert_eq!(&parts.parts[0].1.to_string(), r#""crate""#);
+}
+
+#[test]
+fn search_index_template() {
+ let mut template = SearchIndexPart::blank();
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r"var searchIndex = new Map(JSON.parse('[]'));
+if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
+else if (window.initSearch) window.initSearch(searchIndex);"
+ );
+ template.append(EscapedJson::from(OrderedJson::serialize([1, 2]).unwrap()).to_string());
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r"var searchIndex = new Map(JSON.parse('[[1,2]]'));
+if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
+else if (window.initSearch) window.initSearch(searchIndex);"
+ );
+ template.append(EscapedJson::from(OrderedJson::serialize([4, 3]).unwrap()).to_string());
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r"var searchIndex = new Map(JSON.parse('[[1,2],[4,3]]'));
+if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
+else if (window.initSearch) window.initSearch(searchIndex);"
+ );
+}
+
+#[test]
+fn crates_index_part() {
+ let external_crates = ["bar".to_string(), "baz".to_string()];
+ let mut parts = CratesIndexPart::get("foo", &external_crates).unwrap();
+ parts.parts.sort_by(|a, b| a.1.to_string().cmp(&b.1.to_string()));
+
+ assert_eq!(&parts.parts[0].0, Path::new("index.html"));
+ assert_eq!(&parts.parts[0].1.to_string(), r#"bar"#);
+
+ assert_eq!(&parts.parts[1].0, Path::new("index.html"));
+ assert_eq!(&parts.parts[1].1.to_string(), r#"baz"#);
+
+ assert_eq!(&parts.parts[2].0, Path::new("index.html"));
+ assert_eq!(&parts.parts[2].1.to_string(), r#"foo"#);
+}
+
+#[test]
+fn trait_alias_template() {
+ let mut template = TraitAliasPart::blank();
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r#"(function() {
+ var implementors = Object.fromEntries([]);
+ if (window.register_implementors) {
+ window.register_implementors(implementors);
+ } else {
+ window.pending_implementors = implementors;
+ }
+})()"#,
+ );
+ template.append(OrderedJson::serialize(["a"]).unwrap().to_string());
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r#"(function() {
+ var implementors = Object.fromEntries([["a"]]);
+ if (window.register_implementors) {
+ window.register_implementors(implementors);
+ } else {
+ window.pending_implementors = implementors;
+ }
+})()"#,
+ );
+ template.append(OrderedJson::serialize(["b"]).unwrap().to_string());
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r#"(function() {
+ var implementors = Object.fromEntries([["a"],["b"]]);
+ if (window.register_implementors) {
+ window.register_implementors(implementors);
+ } else {
+ window.pending_implementors = implementors;
+ }
+})()"#,
+ );
+}
+
+#[test]
+fn type_alias_template() {
+ let mut template = TypeAliasPart::blank();
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r#"(function() {
+ var type_impls = Object.fromEntries([]);
+ if (window.register_type_impls) {
+ window.register_type_impls(type_impls);
+ } else {
+ window.pending_type_impls = type_impls;
+ }
+})()"#,
+ );
+ template.append(OrderedJson::serialize(["a"]).unwrap().to_string());
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r#"(function() {
+ var type_impls = Object.fromEntries([["a"]]);
+ if (window.register_type_impls) {
+ window.register_type_impls(type_impls);
+ } else {
+ window.pending_type_impls = type_impls;
+ }
+})()"#,
+ );
+ template.append(OrderedJson::serialize(["b"]).unwrap().to_string());
+ assert_eq!(
+ but_last_line(&template.to_string()),
+ r#"(function() {
+ var type_impls = Object.fromEntries([["a"],["b"]]);
+ if (window.register_type_impls) {
+ window.register_type_impls(type_impls);
+ } else {
+ window.pending_type_impls = type_impls;
+ }
+})()"#,
+ );
+}
+
+#[test]
+fn read_template_test() {
+ let path = tempfile::TempDir::new().unwrap();
+ let path = path.path().join("file.html");
+ let make_blank = || SortedTemplate::::from_before_after("", "
");
+
+ let template = read_template_or_blank(make_blank, &path).unwrap();
+ assert_eq!(but_last_line(&template.to_string()), "");
+ fs::write(&path, template.to_string()).unwrap();
+ let mut template = read_template_or_blank(make_blank, &path).unwrap();
+ template.append("".to_string());
+ fs::write(&path, template.to_string()).unwrap();
+ let mut template = read_template_or_blank(make_blank, &path).unwrap();
+ template.append("
".to_string());
+ fs::write(&path, template.to_string()).unwrap();
+ let template = read_template_or_blank(make_blank, &path).unwrap();
+
+ assert_eq!(but_last_line(&template.to_string()), "");
+}