diff --git a/CHANGELOG.md b/CHANGELOG.md index 22e07a49..696e94ae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ - Switch to pulldown-cmark anchor rather than ours, some (very niche) edge cases are not supported anymore, you can also specify classes on headers now +- Now outputs empty taxonomies instead of ignoring them ### Other - Fix markup for fenced code with linenos diff --git a/Cargo.lock b/Cargo.lock index 5a863840..d6f1e1af 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,6 +14,17 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.5", + "once_cell", + "version_check", +] + [[package]] name = "aho-corasick" version = "0.7.18" @@ -379,6 +390,21 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "content" +version = "0.1.0" +dependencies = [ + "config", + "errors", + "libs", + "markdown", + "serde", + "tempfile", + "test-case", + "time 0.3.9", + "utils", +] + [[package]] name = "core-foundation" version = "0.9.3" @@ -751,18 +777,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "front_matter" -version = "0.1.0" -dependencies = [ - "errors", - "libs", - "serde", - "test-case", - "time 0.3.9", - "utils", -] - [[package]] name = "fsevent" version = "0.4.0" @@ -1371,24 +1385,11 @@ version = "0.2.121" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "efaa7b300f3b5fe8eb6bf21ce3895e1751d9665086af2d64b42f19701015ff4f" -[[package]] -name = "library" -version = "0.1.0" -dependencies = [ - "config", - "errors", - "front_matter", - "libs", - "rendering", - "serde", - "tempfile", - "utils", -] - [[package]] name = "libs" version = "0.1.0" dependencies = [ + "ahash", "ammonia", "base64", "csv", @@ -1414,7 +1415,6 @@ dependencies = [ "serde_json", "serde_yaml", "sha2", - "slotmap", "slug", "svg_metadata", "syntect", @@ -1572,6 +1572,22 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" +[[package]] +name = "markdown" +version = "0.1.0" +dependencies = [ + "config", + "errors", + "insta", + "libs", + "link_checker", + "pest", + "pest_derive", + "serde", + "templates", + "utils", +] + [[package]] name = "markup5ever" version = "0.10.1" @@ -2575,23 +2591,6 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "rendering" -version = "0.1.0" -dependencies = [ - "config", - "errors", - "front_matter", - "insta", - "libs", - "link_checker", - "pest", - "pest_derive", - "serde", - "templates", - "utils", -] - [[package]] name = "reqwest" version = "0.11.10" @@ -2768,8 +2767,8 @@ name = "search" version = "0.1.0" dependencies = [ "config", + "content", "errors", - "library", "libs", ] @@ -2901,10 +2900,9 @@ name = "site" version = "0.1.0" dependencies = [ "config", + "content", "errors", - "front_matter", "imageproc", - "library", "libs", "link_checker", "path-slash", @@ -2921,15 +2919,6 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" -[[package]] -name = "slotmap" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1e08e261d0e8f5c43123b7adf3e4ca1690d655377ac93a03b2c9d3e98de1342" -dependencies = [ - "version_check", -] - [[package]] name = "slug" version = "0.1.4" @@ -3107,12 +3096,12 @@ name = "templates" version = "0.1.0" dependencies = [ "config", + "content", "errors", "imageproc", - "library", "libs", + "markdown", "mockito", - "rendering", "serde", "tempfile", "utils", @@ -3923,9 +3912,9 @@ dependencies = [ "atty", "clap 3.1.6", "clap_complete", + "content", "ctrlc", "errors", - "front_matter", "hyper", "libs", "mime_guess", diff --git a/Cargo.toml b/Cargo.toml index c05001ff..9509b26b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,7 +39,7 @@ mime_guess = "2.0" site = { path = "components/site" } errors = { path = "components/errors" } -front_matter = { path = "components/front_matter" } +content = { path = "components/content" } utils = { path = "components/utils" } search = { path = "components/search" } libs = { path = "components/libs" } diff --git a/components/config/src/config/languages.rs b/components/config/src/config/languages.rs index ac953766..fa1b3bad 100644 --- a/components/config/src/config/languages.rs +++ b/components/config/src/config/languages.rs @@ -19,7 +19,7 @@ pub struct LanguageOptions { /// The filename to use for feeds. Used to find the template, too. /// Defaults to "atom.xml", with "rss.xml" also having a template provided out of the box. pub feed_filename: String, - pub taxonomies: Vec, + pub taxonomies: Vec, /// Whether to generate search index for that language, defaults to `false` pub build_search_index: bool, /// The search config, telling what to include in the search index for that language diff --git a/components/config/src/config/mod.rs b/components/config/src/config/mod.rs index f001503f..81cd36ef 100644 --- a/components/config/src/config/mod.rs +++ b/components/config/src/config/mod.rs @@ -56,7 +56,7 @@ pub struct Config { /// If set, files from static/ will be hardlinked instead of copied to the output dir. pub hard_link_static: bool, - pub taxonomies: Vec, + pub taxonomies: Vec, /// Whether to compile the `sass` directory and output the css files into the static folder pub compile_sass: bool, @@ -99,7 +99,7 @@ pub struct SerializedConfig<'a> { default_language: &'a str, generate_feed: bool, feed_filename: &'a str, - taxonomies: &'a [taxonomies::Taxonomy], + taxonomies: &'a [taxonomies::TaxonomyConfig], build_search_index: bool, extra: &'a HashMap, } @@ -246,6 +246,10 @@ impl Config { others } + pub fn other_languages_codes(&self) -> Vec<&str> { + self.languages.keys().filter(|k| *k != &self.default_language).map(|k| k.as_str()).collect() + } + /// Is this site using i18n? pub fn is_multilingual(&self) -> bool { !self.other_languages().is_empty() diff --git a/components/config/src/config/taxonomies.rs b/components/config/src/config/taxonomies.rs index 468e5015..6d324a65 100644 --- a/components/config/src/config/taxonomies.rs +++ b/components/config/src/config/taxonomies.rs @@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] #[serde(default)] -pub struct Taxonomy { +pub struct TaxonomyConfig { /// The name used in the URL, usually the plural pub name: String, /// If this is set, the list of individual taxonomy term page will be paginated @@ -13,7 +13,7 @@ pub struct Taxonomy { pub feed: bool, } -impl Taxonomy { +impl TaxonomyConfig { pub fn is_paginated(&self) -> bool { if let Some(paginate_by) = self.paginate_by { paginate_by > 0 diff --git a/components/config/src/lib.rs b/components/config/src/lib.rs index a915cbfc..23ec6f45 100644 --- a/components/config/src/lib.rs +++ b/components/config/src/lib.rs @@ -6,7 +6,7 @@ use std::path::Path; pub use crate::config::{ languages::LanguageOptions, link_checker::LinkChecker, search::Search, slugify::Slugify, - taxonomies::Taxonomy, Config, + taxonomies::TaxonomyConfig, Config, }; use errors::Result; diff --git a/components/front_matter/Cargo.toml b/components/content/Cargo.toml similarity index 75% rename from components/front_matter/Cargo.toml rename to components/content/Cargo.toml index 530e433a..9a958bad 100644 --- a/components/front_matter/Cargo.toml +++ b/components/content/Cargo.toml @@ -1,14 +1,20 @@ [package] -name = "front_matter" +name = "content" version = "0.1.0" edition = "2021" [dependencies] serde = {version = "1.0", features = ["derive"] } time = { version = "0.3", features = ["macros"] } + errors = { path = "../errors" } utils = { path = "../utils" } libs = { path = "../libs" } +config = { path = "../config" } + +# TODO: remove it? +markdown = { path = "../markdown" } [dev-dependencies] test-case = "2" # TODO: can we solve that usecase in src/page.rs in a simpler way? A custom macro_rules! maybe +tempfile = "3.3.0" diff --git a/components/library/src/content/file_info.rs b/components/content/src/file_info.rs similarity index 84% rename from components/library/src/content/file_info.rs rename to components/content/src/file_info.rs index 606d1d46..941d8ea1 100644 --- a/components/library/src/content/file_info.rs +++ b/components/content/src/file_info.rs @@ -1,6 +1,5 @@ use std::path::{Path, PathBuf}; -use config::Config; use errors::{bail, Result}; /// Takes a full path to a file and returns only the components after the first `content` directory @@ -115,14 +114,18 @@ impl FileInfo { /// Look for a language in the filename. /// If a language has been found, update the name of the file in this struct to /// remove it and return the language code - pub fn find_language(&mut self, config: &Config) -> Result { + pub fn find_language( + &mut self, + default_language: &str, + other_languages: &[&str], + ) -> Result { // No languages? Nothing to do - if !config.is_multilingual() { - return Ok(config.default_language.clone()); + if other_languages.is_empty() { + return Ok(default_language.to_owned()); } if !self.name.contains('.') { - return Ok(config.default_language.clone()); + return Ok(default_language.to_owned()); } // Go with the assumption that no one is using `.` in filenames when using i18n @@ -130,13 +133,13 @@ impl FileInfo { let mut parts: Vec = self.name.splitn(2, '.').map(|s| s.to_string()).collect(); // If language code is same as default language, go for default - if config.default_language == parts[1].as_str() { - return Ok(config.default_language.clone()); + if default_language == parts[1].as_str() { + return Ok(default_language.to_owned()); } // The language code is not present in the config: typo or the user forgot to add it to the // config - if !config.other_languages().contains_key(&parts[1].as_ref()) { + if !other_languages.contains(&parts[1].as_ref()) { bail!("File {:?} has a language code of {} which isn't present in the config.toml `languages`", self.path, parts[1]); } @@ -152,8 +155,6 @@ impl FileInfo { mod tests { use std::path::{Path, PathBuf}; - use config::{Config, LanguageOptions}; - use super::{find_content_components, FileInfo}; #[test] @@ -183,77 +184,66 @@ mod tests { #[test] fn can_find_valid_language_in_page() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); assert_eq!(res.unwrap(), "fr"); } #[test] fn can_find_valid_language_with_default_locale() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python.en.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); - assert_eq!(res.unwrap(), config.default_language); + assert_eq!(res.unwrap(), "en"); } #[test] fn can_find_valid_language_in_page_with_assets() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"), &PathBuf::new(), ); assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); assert_eq!(res.unwrap(), "fr"); } #[test] fn do_nothing_on_unknown_language_in_page_with_i18n_off() { - let config = Config::default(); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &[]); assert!(res.is_ok()); - assert_eq!(res.unwrap(), config.default_language); + assert_eq!(res.unwrap(), "en"); } #[test] fn errors_on_unknown_language_in_page_with_i18n_on() { - let mut config = Config::default(); - config.languages.insert("it".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["it"]); assert!(res.is_err()); } #[test] fn can_find_valid_language_in_section() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_section( Path::new("/home/vincent/code/site/content/posts/tutorials/_index.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); assert_eq!(res.unwrap(), "fr"); } @@ -274,13 +264,11 @@ mod tests { /// Regression test for https://github.com/getzola/zola/issues/854 #[test] fn correct_canonical_after_find_language() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); let mut file = FileInfo::new_page( Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"), &PathBuf::new(), ); - let res = file.find_language(&config); + let res = file.find_language("en", &["fr"]); assert!(res.is_ok()); assert_eq!( file.canonical, diff --git a/components/content/src/front_matter/mod.rs b/components/content/src/front_matter/mod.rs new file mode 100644 index 00000000..94c5f51d --- /dev/null +++ b/components/content/src/front_matter/mod.rs @@ -0,0 +1,7 @@ +mod page; +mod section; +mod split; + +pub use page::PageFrontMatter; +pub use section::SectionFrontMatter; +pub use split::{split_page_content, split_section_content}; diff --git a/components/front_matter/src/page.rs b/components/content/src/front_matter/page.rs similarity index 98% rename from components/front_matter/src/page.rs rename to components/content/src/front_matter/page.rs index 8d116cac..8213b671 100644 --- a/components/front_matter/src/page.rs +++ b/components/content/src/front_matter/page.rs @@ -9,7 +9,7 @@ use time::{Date, OffsetDateTime, PrimitiveDateTime}; use errors::{bail, Result}; use utils::de::{fix_toml_dates, from_toml_datetime}; -use crate::RawFrontMatter; +use crate::front_matter::split::RawFrontMatter; /// The front matter of every page #[derive(Debug, Clone, PartialEq, Deserialize)] @@ -131,6 +131,7 @@ impl PageFrontMatter { impl Default for PageFrontMatter { fn default() -> PageFrontMatter { PageFrontMatter { + in_search_index: true, title: None, description: None, updated: None, @@ -145,7 +146,6 @@ impl Default for PageFrontMatter { taxonomies: HashMap::new(), weight: None, aliases: Vec::new(), - in_search_index: true, template: None, extra: Map::new(), } @@ -154,8 +154,8 @@ impl Default for PageFrontMatter { #[cfg(test)] mod tests { - use super::PageFrontMatter; - use super::RawFrontMatter; + use crate::front_matter::page::PageFrontMatter; + use crate::front_matter::split::RawFrontMatter; use libs::tera::to_value; use test_case::test_case; use time::macros::datetime; diff --git a/components/front_matter/src/section.rs b/components/content/src/front_matter/section.rs similarity index 97% rename from components/front_matter/src/section.rs rename to components/content/src/front_matter/section.rs index 9424fb7a..d1bef86d 100644 --- a/components/front_matter/src/section.rs +++ b/components/content/src/front_matter/section.rs @@ -1,11 +1,12 @@ use libs::tera::{Map, Value}; use serde::{Deserialize, Serialize}; -use super::{InsertAnchor, SortBy}; use errors::Result; use utils::de::fix_toml_dates; +use utils::types::InsertAnchor; -use crate::RawFrontMatter; +use crate::front_matter::split::RawFrontMatter; +use crate::SortBy; static DEFAULT_PAGINATE_PATH: &str = "page"; diff --git a/components/front_matter/src/lib.rs b/components/content/src/front_matter/split.rs similarity index 90% rename from components/front_matter/src/lib.rs rename to components/content/src/front_matter/split.rs index 35b3ab89..d7aede9c 100644 --- a/components/front_matter/src/lib.rs +++ b/components/content/src/front_matter/split.rs @@ -1,17 +1,12 @@ use std::path::Path; -use libs::once_cell::sync::Lazy; -use serde::{Deserialize, Serialize}; - use errors::{bail, Context, Result}; +use libs::once_cell::sync::Lazy; use libs::regex::Regex; use libs::{serde_yaml, toml}; -mod page; -mod section; - -pub use page::PageFrontMatter; -pub use section::SectionFrontMatter; +use crate::front_matter::page::PageFrontMatter; +use crate::front_matter::section::SectionFrontMatter; static TOML_RE: Lazy = Lazy::new(|| { Regex::new( @@ -31,7 +26,7 @@ pub enum RawFrontMatter<'a> { } impl RawFrontMatter<'_> { - fn deserialize(&self) -> Result + pub(crate) fn deserialize(&self) -> Result where T: serde::de::DeserializeOwned, { @@ -46,29 +41,6 @@ impl RawFrontMatter<'_> { } } -#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum SortBy { - /// Most recent to oldest - Date, - /// Most recent to oldest - UpdateDate, - /// Sort by title - Title, - /// Lower weight comes first - Weight, - /// No sorting - None, -} - -#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum InsertAnchor { - Left, - Right, - None, -} - /// Split a file between the front matter and its content /// Will return an error if the front matter wasn't found fn split_content<'c>(file_path: &Path, content: &'c str) -> Result<(RawFrontMatter<'c>, &'c str)> { diff --git a/components/content/src/lib.rs b/components/content/src/lib.rs new file mode 100644 index 00000000..661b365c --- /dev/null +++ b/components/content/src/lib.rs @@ -0,0 +1,24 @@ +mod front_matter; + +mod file_info; +mod library; +mod page; +mod pagination; +mod section; +mod ser; +mod sorting; +mod taxonomies; +mod types; +mod utils; + +pub use file_info::FileInfo; +pub use front_matter::{PageFrontMatter, SectionFrontMatter}; +pub use library::Library; +pub use page::Page; +pub use pagination::Paginator; +pub use section::Section; +pub use taxonomies::{Taxonomy, TaxonomyItem}; +pub use types::*; + +// TODO +// 3. add more tests diff --git a/components/content/src/library.rs b/components/content/src/library.rs new file mode 100644 index 00000000..9b0dac00 --- /dev/null +++ b/components/content/src/library.rs @@ -0,0 +1,525 @@ +use std::path::{Path, PathBuf}; + +use config::Config; +use errors::Result; +use libs::ahash::{AHashMap, AHashSet}; + +use crate::ser::TranslatedContent; +use crate::sorting::sort_pages; +use crate::taxonomies::{find_taxonomies, Taxonomy}; +use crate::{Page, Section, SortBy}; + +#[derive(Debug)] +pub struct Library { + pub pages: AHashMap, + pub sections: AHashMap, + pub taxonomies: Vec, + // aliases -> files, so we can easily check for conflicts + pub reverse_aliases: AHashMap>, + pub translations: AHashMap>, +} + +impl Library { + pub fn new() -> Self { + Self { + pages: AHashMap::new(), + sections: AHashMap::new(), + taxonomies: Vec::new(), + reverse_aliases: AHashMap::new(), + translations: AHashMap::new(), + } + } + + fn insert_reverse_aliases(&mut self, file_path: &Path, entries: Vec) { + for entry in entries { + self.reverse_aliases + .entry(entry) + .and_modify(|s| { + s.insert(file_path.to_path_buf()); + }) + .or_insert_with(|| { + let mut s = AHashSet::new(); + s.insert(file_path.to_path_buf()); + s + }); + } + } + + /// This will check every section/page paths + the aliases and ensure none of them + /// are colliding. + /// Returns Vec<(path colliding, [list of files causing that collision])> + pub fn find_path_collisions(&self) -> Vec<(String, Vec)> { + self.reverse_aliases + .iter() + .filter_map(|(alias, files)| { + if files.len() > 1 { + Some((alias.clone(), files.clone().into_iter().collect::>())) + } else { + None + } + }) + .collect() + } + + pub fn insert_page(&mut self, page: Page) { + let file_path = page.file.path.clone(); + let mut entries = vec![page.path.clone()]; + entries.extend(page.meta.aliases.to_vec()); + self.insert_reverse_aliases(&file_path, entries); + self.pages.insert(file_path, page); + } + + pub fn insert_section(&mut self, section: Section) { + let file_path = section.file.path.clone(); + let mut entries = vec![section.path.clone()]; + entries.extend(section.meta.aliases.to_vec()); + self.insert_reverse_aliases(&file_path, entries); + self.sections.insert(file_path, section); + } + + /// Separate from `populate_sections` as it's called _before_ markdown the pages/sections + pub fn populate_taxonomies(&mut self, config: &Config) -> Result<()> { + self.taxonomies = find_taxonomies(config, &self.pages)?; + Ok(()) + } + + /// Sort all sections pages according to sorting method given + /// Pages that cannot be sorted are set to the section.ignored_pages instead + pub fn sort_section_pages(&mut self) { + let mut updates = AHashMap::new(); + for (path, section) in &self.sections { + let pages: Vec<_> = section.pages.iter().map(|p| &self.pages[p]).collect(); + let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by { + SortBy::None => continue, + _ => sort_pages(&pages, section.meta.sort_by), + }; + + updates + .insert(path.clone(), (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by)); + } + + for (path, (sorted, unsortable, _)) in updates { + if !self.sections[&path].meta.transparent { + // Fill siblings + for (i, page_path) in sorted.iter().enumerate() { + let mut p = self.pages.get_mut(page_path).unwrap(); + if i > 0 { + // lighter / later / title_prev + p.lower = Some(sorted[i - 1].clone()); + } + + if i < sorted.len() - 1 { + // heavier / earlier / title_next + p.higher = Some(sorted[i + 1].clone()); + } + } + } + + if let Some(s) = self.sections.get_mut(&path) { + s.pages = sorted; + s.ignored_pages = unsortable; + } + } + } + + /// Find out the direct subsections of each subsection if there are some + /// as well as the pages for each section + pub fn populate_sections(&mut self, config: &Config) { + let mut add_translation = |entry: &Path, path: &Path| { + if config.is_multilingual() { + self.translations + .entry(entry.to_path_buf()) + .and_modify(|trans| { + trans.insert(path.to_path_buf()); + }) + .or_insert({ + let mut s = AHashSet::new(); + s.insert(path.to_path_buf()); + s + }); + } + }; + + let root_path = + self.sections.values().find(|s| s.is_index()).map(|s| s.file.parent.clone()).unwrap(); + let mut ancestors = AHashMap::new(); + let mut subsections = AHashMap::new(); + let mut sections_weight = AHashMap::new(); + + // We iterate over the sections twice + // The first time to build up the list of ancestors for each section + for (path, section) in &self.sections { + sections_weight.insert(path.clone(), section.meta.weight); + if let Some(ref grand_parent) = section.file.grand_parent { + subsections + // Using the original filename to work for multi-lingual sections + .entry(grand_parent.join(§ion.file.filename)) + .or_insert_with(Vec::new) + .push(section.file.path.clone()); + } + + add_translation(§ion.file.canonical, path); + + // Root sections have no ancestors + if section.is_index() { + ancestors.insert(section.file.path.clone(), vec![]); + continue; + } + + // Index section is the first ancestor of every single section + let mut cur_path = root_path.clone(); + let mut parents = vec![section.file.filename.clone()]; + for component in §ion.file.components { + cur_path = cur_path.join(component); + // Skip itself + if cur_path == section.file.parent { + continue; + } + + let index_path = cur_path.join(§ion.file.filename); + if let Some(s) = self.sections.get(&index_path) { + parents.push(s.file.relative.clone()); + } + } + ancestors.insert(section.file.path.clone(), parents); + } + + // The second time we actually assign ancestors and order subsections based on their weights + for (path, section) in self.sections.iter_mut() { + section.subsections.clear(); + section.pages.clear(); + section.ignored_pages.clear(); + section.ancestors.clear(); + + if let Some(children) = subsections.get(&*path) { + let mut children: Vec<_> = children.clone(); + children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b])); + section.subsections = children; + } + if let Some(parents) = ancestors.get(&*path) { + section.ancestors = parents.clone(); + } + } + + // We pre-build the index filename for each language + let mut index_filename_by_lang = AHashMap::with_capacity(config.languages.len()); + for code in config.languages.keys() { + if code == &config.default_language { + index_filename_by_lang.insert(code, "_index.md".to_owned()); + } else { + index_filename_by_lang.insert(code, format!("_index.{}.md", code)); + } + } + + // Then once we took care of the sections, we find the pages of each section + for (path, page) in self.pages.iter_mut() { + let parent_filename = &index_filename_by_lang[&page.lang]; + add_translation(&page.file.canonical, path); + let mut parent_section_path = page.file.parent.join(&parent_filename); + + while let Some(parent_section) = self.sections.get_mut(&parent_section_path) { + let is_transparent = parent_section.meta.transparent; + parent_section.pages.push(path.clone()); + page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_default(); + // Don't forget to push the actual parent + page.ancestors.push(parent_section.file.relative.clone()); + + // Find the page template if one of a parent has page_template set + // Stops after the first one found, keep in mind page.ancestors + // is [index, ..., parent] so we need to reverse it first + if page.meta.template.is_none() { + for ancestor in page.ancestors.iter().rev() { + let s = self.sections.get(&root_path.join(ancestor)).unwrap(); + if let Some(ref tpl) = s.meta.page_template { + page.meta.template = Some(tpl.clone()); + break; + } + } + } + + if !is_transparent { + break; + } + + // We've added `_index(.{LANG})?.md` so if we are here so we need to go up twice + match parent_section_path.clone().parent().unwrap().parent() { + Some(parent) => parent_section_path = parent.join(&parent_filename), + None => break, + } + } + } + + // And once we have all the pages assigned to their section, we sort them + self.sort_section_pages(); + } + + /// Find all the orphan pages: pages that are in a folder without an `_index.md` + pub fn get_all_orphan_pages(&self) -> Vec<&Page> { + self.pages.iter().filter(|(_, p)| p.ancestors.is_empty()).map(|(_, p)| p).collect() + } + + /// Find all the translated content for a given canonical path. + /// The translated content can be either for a section or a page + pub fn find_translations(&self, canonical_path: &Path) -> Vec> { + let mut translations = vec![]; + + if let Some(paths) = self.translations.get(canonical_path) { + for path in paths { + let (lang, permalink, title, path) = { + if self.sections.contains_key(path) { + let s = &self.sections[path]; + (&s.lang, &s.permalink, &s.meta.title, &s.file.path) + } else { + let s = &self.pages[path]; + (&s.lang, &s.permalink, &s.meta.title, &s.file.path) + } + }; + translations.push(TranslatedContent { lang, permalink, title, path }); + } + } + + translations + } + + pub fn find_pages_by_path(&self, paths: &[PathBuf]) -> Vec<&Page> { + paths.iter().map(|p| &self.pages[p]).collect() + } + + pub fn find_sections_by_path(&self, paths: &[PathBuf]) -> Vec<&Section> { + paths.iter().map(|p| &self.sections[p]).collect() + } + + pub fn find_taxonomies(&self, config: &Config) -> Result> { + find_taxonomies(config, &self.pages) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::FileInfo; + use config::LanguageOptions; + + #[test] + fn can_find_collisions_with_paths() { + let mut library = Library::new(); + let mut section = Section { path: "hello".to_owned(), ..Default::default() }; + section.file.path = PathBuf::from("hello.md"); + library.insert_section(section.clone()); + let mut section2 = Section { path: "hello".to_owned(), ..Default::default() }; + section2.file.path = PathBuf::from("bonjour.md"); + library.insert_section(section2.clone()); + + let collisions = library.find_path_collisions(); + assert_eq!(collisions.len(), 1); + assert_eq!(collisions[0].0, "hello"); + assert!(collisions[0].1.contains(§ion.file.path)); + assert!(collisions[0].1.contains(§ion2.file.path)); + } + + #[test] + fn can_find_collisions_with_aliases() { + let mut library = Library::new(); + let mut section = Section { path: "hello".to_owned(), ..Default::default() }; + section.file.path = PathBuf::from("hello.md"); + library.insert_section(section.clone()); + let mut section2 = Section { path: "world".to_owned(), ..Default::default() }; + section2.file.path = PathBuf::from("bonjour.md"); + section2.meta.aliases = vec!["hello".to_owned()]; + library.insert_section(section2.clone()); + + let collisions = library.find_path_collisions(); + assert_eq!(collisions.len(), 1); + assert_eq!(collisions[0].0, "hello"); + assert!(collisions[0].1.contains(§ion.file.path)); + assert!(collisions[0].1.contains(§ion2.file.path)); + } + + #[derive(Debug, Clone)] + enum PageSort { + None, + Date(&'static str), + Title(&'static str), + Weight(usize), + } + + fn create_page(file_path: &str, lang: &str, page_sort: PageSort) -> Page { + let mut page = Page::default(); + page.lang = lang.to_owned(); + page.file = FileInfo::new_page(Path::new(file_path), &PathBuf::new()); + match page_sort { + PageSort::None => (), + PageSort::Date(date) => { + page.meta.date = Some(date.to_owned()); + page.meta.date_to_datetime(); + } + PageSort::Title(title) => { + page.meta.title = Some(title.to_owned()); + } + PageSort::Weight(w) => { + page.meta.weight = Some(w); + } + } + page.file.find_language("en", &["fr"]).unwrap(); + page + } + + fn create_section( + file_path: &str, + lang: &str, + weight: usize, + transparent: bool, + sort_by: SortBy, + ) -> Section { + let mut section = Section::default(); + section.lang = lang.to_owned(); + section.file = FileInfo::new_section(Path::new(file_path), &PathBuf::new()); + section.meta.weight = weight; + section.meta.transparent = transparent; + section.meta.sort_by = sort_by; + section.meta.page_template = Some("new_page.html".to_owned()); + section.file.find_language("en", &["fr"]).unwrap(); + section + } + + #[test] + fn can_populate_sections() { + let mut config = Config::default_for_test(); + config.languages.insert("fr".to_owned(), LanguageOptions::default()); + let mut library = Library::new(); + let sections = vec![ + ("content/_index.md", "en", 0, false, SortBy::None), + ("content/_index.fr.md", "fr", 0, false, SortBy::None), + ("content/blog/_index.md", "en", 0, false, SortBy::Date), + ("content/wiki/_index.md", "en", 0, false, SortBy::Weight), + ("content/wiki/_index.fr.md", "fr", 0, false, SortBy::Weight), + ("content/wiki/recipes/_index.md", "en", 1, true, SortBy::Weight), + ("content/wiki/recipes/_index.fr.md", "fr", 1, true, SortBy::Weight), + ("content/wiki/programming/_index.md", "en", 10, true, SortBy::Weight), + ("content/wiki/programming/_index.fr.md", "fr", 10, true, SortBy::Weight), + ("content/novels/_index.md", "en", 10, false, SortBy::Title), + ("content/novels/_index.fr.md", "fr", 10, false, SortBy::Title), + ]; + for (p, l, w, t, s) in sections.clone() { + library.insert_section(create_section(p, l, w, t, s)); + } + + let pages = vec![ + ("content/about.md", "en", PageSort::None), + ("content/about.fr.md", "en", PageSort::None), + ("content/blog/rust.md", "en", PageSort::Date("2022-01-01")), + ("content/blog/python.md", "en", PageSort::Date("2022-03-03")), + ("content/blog/docker.md", "en", PageSort::Date("2022-02-02")), + ("content/wiki/recipes/chocolate-cake.md", "en", PageSort::Weight(100)), + ("content/wiki/recipes/chocolate-cake.fr.md", "fr", PageSort::Weight(100)), + ("content/wiki/recipes/rendang.md", "en", PageSort::Weight(5)), + ("content/wiki/recipes/rendang.fr.md", "fr", PageSort::Weight(5)), + ("content/wiki/programming/rust.md", "en", PageSort::Weight(1)), + ("content/wiki/programming/rust.fr.md", "fr", PageSort::Weight(1)), + ("content/wiki/programming/zola.md", "en", PageSort::Weight(10)), + ("content/wiki/programming/python.md", "en", PageSort::None), + ("content/novels/the-colour-of-magic.md", "en", PageSort::Title("The Colour of Magic")), + ( + "content/novels/the-colour-of-magic.fr.md", + "en", + PageSort::Title("La Huitième Couleur"), + ), + ("content/novels/reaper.md", "en", PageSort::Title("Reaper")), + ("content/novels/reaper.fr.md", "fr", PageSort::Title("Reaper (fr)")), + ("content/random/hello.md", "en", PageSort::None), + ]; + for (p, l, s) in pages.clone() { + library.insert_page(create_page(p, l, s)); + } + library.populate_sections(&config); + assert_eq!(library.sections.len(), sections.len()); + assert_eq!(library.pages.len(), pages.len()); + let blog_section = &library.sections[&PathBuf::from("content/blog/_index.md")]; + assert_eq!(blog_section.pages.len(), 3); + // sorted by date in desc order + assert_eq!( + blog_section.pages, + vec![ + PathBuf::from("content/blog/python.md"), + PathBuf::from("content/blog/docker.md"), + PathBuf::from("content/blog/rust.md") + ] + ); + assert_eq!(blog_section.ignored_pages.len(), 0); + assert!(&library.pages[&PathBuf::from("content/blog/python.md")].lower.is_none()); + assert_eq!( + &library.pages[&PathBuf::from("content/blog/python.md")].higher, + &Some(PathBuf::from("content/blog/docker.md")) + ); + assert_eq!( + library.pages[&PathBuf::from("content/blog/python.md")].meta.template, + Some("new_page.html".to_owned()) + ); + + let wiki = &library.sections[&PathBuf::from("content/wiki/_index.md")]; + assert_eq!(wiki.pages.len(), 4); + // sorted by weight, in asc order + assert_eq!( + wiki.pages, + vec![ + PathBuf::from("content/wiki/programming/rust.md"), + PathBuf::from("content/wiki/recipes/rendang.md"), + PathBuf::from("content/wiki/programming/zola.md"), + PathBuf::from("content/wiki/recipes/chocolate-cake.md"), + ] + ); + assert_eq!(wiki.ignored_pages.len(), 1); + assert_eq!(wiki.ignored_pages, vec![PathBuf::from("content/wiki/programming/python.md")]); + assert_eq!( + &library.pages[&PathBuf::from("content/wiki/recipes/rendang.md")].lower, + &Some(PathBuf::from("content/wiki/programming/rust.md")) + ); + assert_eq!( + &library.pages[&PathBuf::from("content/wiki/recipes/rendang.md")].higher, + &Some(PathBuf::from("content/wiki/programming/zola.md")) + ); + assert_eq!( + wiki.subsections, + vec![ + PathBuf::from("content/wiki/recipes/_index.md"), + PathBuf::from("content/wiki/programming/_index.md") + ] + ); + assert_eq!(wiki.ancestors, vec!["_index.md".to_owned()]); + assert_eq!( + library.sections[&PathBuf::from("content/wiki/recipes/_index.md")].ancestors, + vec!["_index.md".to_owned(), "wiki/_index.md".to_owned()] + ); + + // also works for other languages + let french_wiki = &library.sections[&PathBuf::from("content/wiki/_index.fr.md")]; + assert_eq!(french_wiki.pages.len(), 3); + // sorted by weight, in asc order + assert_eq!( + french_wiki.pages, + vec![ + PathBuf::from("content/wiki/programming/rust.fr.md"), + PathBuf::from("content/wiki/recipes/rendang.fr.md"), + PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md"), + ] + ); + assert_eq!(french_wiki.ignored_pages.len(), 0); + assert!(&library.pages[&PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md")] + .higher + .is_none()); + assert_eq!( + &library.pages[&PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md")].lower, + &Some(PathBuf::from("content/wiki/recipes/rendang.fr.md")) + ); + + let orphans = library.get_all_orphan_pages(); + assert_eq!(orphans.len(), 1); + assert_eq!(orphans[0].file.path, PathBuf::from("content/random/hello.md")); + + // And translations should be filled in + let translations = library.find_translations(&PathBuf::from("content/novels/reaper")); + assert_eq!(translations.len(), 2); + assert!(translations[0].title.is_some()); + assert!(translations[1].title.is_some()); + } +} diff --git a/components/library/src/content/page.rs b/components/content/src/page.rs similarity index 94% rename from components/library/src/content/page.rs rename to components/content/src/page.rs index 364903da..add40199 100644 --- a/components/library/src/content/page.rs +++ b/components/content/src/page.rs @@ -4,21 +4,22 @@ use std::path::{Path, PathBuf}; use libs::once_cell::sync::Lazy; use libs::regex::Regex; -use libs::slotmap::DefaultKey; use libs::tera::{Context as TeraContext, Tera}; -use crate::library::Library; use config::Config; use errors::{Context, Result}; -use front_matter::{split_page_content, InsertAnchor, PageFrontMatter}; -use rendering::{render_content, Heading, RenderContext}; -use utils::site::get_reading_analytics; +use markdown::{render_content, RenderContext}; use utils::slugs::slugify_paths; +use utils::table_of_contents::Heading; use utils::templates::{render_template, ShortcodeDefinition}; +use utils::types::InsertAnchor; -use crate::content::file_info::FileInfo; -use crate::content::ser::SerializingPage; -use crate::content::{find_related_assets, has_anchor}; +use crate::file_info::FileInfo; +use crate::front_matter::{split_page_content, PageFrontMatter}; +use crate::library::Library; +use crate::ser::SerializingPage; +use crate::utils::get_reading_analytics; +use crate::utils::{find_related_assets, has_anchor}; use utils::fs::read_file; use utils::links::has_anchor_id; @@ -38,8 +39,8 @@ pub struct Page { pub file: FileInfo, /// The front matter meta-data pub meta: PageFrontMatter, - /// The list of parent sections - pub ancestors: Vec, + /// The list of parent sections relative paths + pub ancestors: Vec, /// The actual content of the page, in markdown pub raw_content: String, /// All the non-md files we found next to the .md file @@ -61,22 +62,10 @@ pub struct Page { /// When is found in the text, will take the content up to that part /// as summary pub summary: Option, - /// The earlier updated page, for pages sorted by updated date - pub earlier_updated: Option, - /// The later updated page, for pages sorted by updated date - pub later_updated: Option, - /// The earlier page, for pages sorted by date - pub earlier: Option, - /// The later page, for pages sorted by date - pub later: Option, - /// The previous page, for pages sorted by title - pub title_prev: Option, - /// The next page, for pages sorted by title - pub title_next: Option, - /// The lighter page, for pages sorted by weight - pub lighter: Option, - /// The heavier page, for pages sorted by weight - pub heavier: Option, + /// The previous page when sorting: earlier/earlier_updated/lighter/prev + pub lower: Option, + /// The next page when sorting: later/later_updated/heavier/next + pub higher: Option, /// Toc made from the headings of the markdown file pub toc: Vec, /// How many words in the raw content @@ -88,7 +77,7 @@ pub struct Page { /// Corresponds to the lang in the {slug}.{lang}.md file scheme pub lang: String, /// Contains all the translated version of that page - pub translations: Vec, + pub translations: Vec, /// The list of all internal links (as path to markdown file), with optional anchor fragments. /// We can only check the anchor after all pages have been built and their ToC compiled. /// The page itself should exist otherwise it would have errored before getting there. @@ -116,7 +105,8 @@ impl Page { let (meta, content) = split_page_content(file_path, content)?; let mut page = Page::new(file_path, meta, base_path); - page.lang = page.file.find_language(config)?; + page.lang = + page.file.find_language(&config.default_language, &config.other_languages_codes())?; page.raw_content = content.to_string(); let (word_count, reading_time) = get_reading_analytics(&page.raw_content); @@ -201,6 +191,8 @@ impl Page { Ok(page) } + pub fn find_language(&mut self) {} + /// Read and parse a .md file into a Page struct pub fn from_file>(path: P, config: &Config, base_path: &Path) -> Result { let path = path.as_ref(); @@ -238,7 +230,7 @@ impl Page { ); context.set_shortcode_definitions(shortcode_definitions); context.set_current_page_path(&self.file.relative); - context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None)); + context.tera_context.insert("page", &SerializingPage::new(self, None, false)); let res = render_content(&self.raw_content, &context) .with_context(|| format!("Failed to render content of {}", self.file.path.display()))?; @@ -266,7 +258,7 @@ impl Page { context.insert("config", &config.serialize(&self.lang)); context.insert("current_url", &self.permalink); context.insert("current_path", &self.path); - context.insert("page", &self.to_serialized(library)); + context.insert("page", &self.serialize(library)); context.insert("lang", &self.lang); render_template(tpl_name, tera, context, &config.theme) @@ -303,12 +295,12 @@ impl Page { has_anchor_id(&self.content, id) } - pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { - SerializingPage::from_page(self, library) + pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { + SerializingPage::new(self, Some(library), true) } - pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { - SerializingPage::from_page_basic(self, Some(library)) + pub fn serialize_without_siblings<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { + SerializingPage::new(self, Some(library), false) } } @@ -323,10 +315,10 @@ mod tests { use libs::tera::Tera; use tempfile::tempdir; - use super::Page; + use crate::Page; use config::{Config, LanguageOptions}; - use front_matter::InsertAnchor; use utils::slugs::SlugifyStrategy; + use utils::types::InsertAnchor; #[test] fn can_parse_a_valid_page() { diff --git a/components/library/src/pagination/mod.rs b/components/content/src/pagination.rs similarity index 77% rename from components/library/src/pagination/mod.rs rename to components/content/src/pagination.rs index 02fd8da7..70d731db 100644 --- a/components/library/src/pagination/mod.rs +++ b/components/content/src/pagination.rs @@ -1,17 +1,17 @@ +use config::Config; +use serde::Serialize; use std::borrow::Cow; use std::collections::HashMap; +use std::path::PathBuf; -use libs::slotmap::DefaultKey; -use libs::tera::{to_value, Context, Tera, Value}; -use serde::Serialize; - -use config::Config; use errors::{Context as ErrorContext, Result}; +use libs::tera::{to_value, Context, Tera, Value}; use utils::templates::{check_template_fallbacks, render_template}; -use crate::content::{Section, SerializingPage, SerializingSection}; use crate::library::Library; +use crate::ser::{SectionSerMode, SerializingPage, SerializingSection}; use crate::taxonomies::{Taxonomy, TaxonomyItem}; +use crate::Section; #[derive(Clone, Debug, PartialEq)] enum PaginationRoot<'a> { @@ -25,11 +25,11 @@ pub struct Pager<'a> { /// The page number in the paginator (1-indexed) pub index: usize, /// Permalink to that page - permalink: String, + pub permalink: String, /// Path to that page - path: String, + pub path: String, /// All pages for the pager - pages: Vec>, + pub pages: Vec>, } impl<'a> Pager<'a> { @@ -46,7 +46,7 @@ impl<'a> Pager<'a> { #[derive(Clone, Debug, PartialEq)] pub struct Paginator<'a> { /// All pages in the section/taxonomy - all_pages: Cow<'a, [DefaultKey]>, + all_pages: Cow<'a, [PathBuf]>, /// Pages split in chunks of `paginate_by` pub pagers: Vec>, /// How many content pages on a paginated page at max @@ -69,12 +69,11 @@ impl<'a> Paginator<'a> { /// It will always at least create one pager (the first) even if there are not enough pages to paginate pub fn from_section(section: &'a Section, library: &'a Library) -> Paginator<'a> { let paginate_by = section.meta.paginate_by.unwrap(); - let paginate_reversed = section.meta.paginate_reversed; let mut paginator = Paginator { all_pages: Cow::from(§ion.pages[..]), pagers: Vec::with_capacity(section.pages.len() / paginate_by), paginate_by, - paginate_reversed, + paginate_reversed: section.meta.paginate_reversed, root: PaginationRoot::Section(section), permalink: section.permalink.clone(), path: section.path.clone(), @@ -133,9 +132,9 @@ impl<'a> Paginator<'a> { self.all_pages.to_mut().reverse(); } - for key in self.all_pages.to_mut().iter_mut() { - let page = library.get_page_by_key(*key); - current_page.push(page.to_serialized_basic(library)); + for p in &*self.all_pages { + let page = &library.pages[p]; + current_page.push(SerializingPage::new(page, Some(library), false)); if current_page.len() == self.paginate_by { pages.push(current_page); @@ -230,8 +229,10 @@ impl<'a> Paginator<'a> { let mut context = Context::new(); match self.root { PaginationRoot::Section(s) => { - context - .insert("section", &SerializingSection::from_section_basic(s, Some(library))); + context.insert( + "section", + &SerializingSection::new(s, SectionSerMode::MetadataOnly(library)), + ); context.insert("lang", &s.lang); context.insert("config", &config.serialize(&s.lang)); } @@ -253,18 +254,12 @@ impl<'a> Paginator<'a> { #[cfg(test)] mod tests { - use libs::tera::{to_value, Tera}; - use std::path::PathBuf; - - use crate::content::{Page, Section}; - use crate::library::Library; - use crate::taxonomies::{Taxonomy, TaxonomyItem}; - use config::Taxonomy as TaxonomyConfig; - - use super::Paginator; + use super::*; + use crate::{Page, SectionFrontMatter}; + use config::TaxonomyConfig; fn create_section(is_index: bool, paginate_reversed: bool) -> Section { - let f = front_matter::SectionFrontMatter { + let f = SectionFrontMatter { paginate_by: Some(2), paginate_path: "page".to_string(), paginate_reversed, @@ -275,9 +270,11 @@ mod tests { if !is_index { s.path = "/posts/".to_string(); s.permalink = "https://vincent.is/posts/".to_string(); + s.file.path = PathBuf::from("posts/_index.md"); s.file.components = vec!["posts".to_string()]; } else { s.path = "/".into(); + s.file.path = PathBuf::from("_index.md"); s.permalink = "https://vincent.is/".to_string(); } s @@ -288,89 +285,64 @@ mod tests { num_pages: usize, paginate_reversed: bool, ) -> (Section, Library) { - let mut library = Library::new(num_pages, 0, false); + let mut library = Library::new(); for i in 1..=num_pages { let mut page = Page::default(); page.meta.title = Some(i.to_string()); + page.file.path = PathBuf::from(&format!("{}.md", i)); library.insert_page(page); } - let mut draft = Page::default(); - draft.meta.draft = true; - library.insert_page(draft); let mut section = create_section(is_index, paginate_reversed); - section.pages = library.pages().keys().collect(); + section.pages = library.pages.keys().cloned().collect(); + section.pages.sort(); library.insert_section(section.clone()); (section, library) } #[test] - fn test_can_create_paginator() { + fn test_can_create_section_paginator() { let (section, library) = create_library(false, 3, false); let paginator = Paginator::from_section(§ion, &library); assert_eq!(paginator.pagers.len(), 2); assert_eq!(paginator.pagers[0].index, 1); assert_eq!(paginator.pagers[0].pages.len(), 2); + assert_eq!(paginator.pagers[0].pages[0].title.clone().unwrap(), "1"); + assert_eq!(paginator.pagers[0].pages[1].title.clone().unwrap(), "2"); assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/posts/"); assert_eq!(paginator.pagers[0].path, "/posts/"); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); + assert_eq!(paginator.pagers[1].pages[0].title.clone().unwrap(), "3"); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/page/2/"); assert_eq!(paginator.pagers[1].path, "/posts/page/2/"); } #[test] - fn test_can_create_reversed_paginator() { - // 6 pages, 5 normal and 1 draft - let (section, library) = create_library(false, 5, true); + fn test_can_create_reversed_section_paginator() { + let (section, library) = create_library(false, 3, true); let paginator = Paginator::from_section(§ion, &library); - assert_eq!(paginator.pagers.len(), 3); + assert_eq!(paginator.pagers.len(), 2); assert_eq!(paginator.pagers[0].index, 1); assert_eq!(paginator.pagers[0].pages.len(), 2); + assert_eq!(paginator.pagers[0].pages[0].title.clone().unwrap(), "3"); + assert_eq!(paginator.pagers[0].pages[1].title.clone().unwrap(), "2"); assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/posts/"); assert_eq!(paginator.pagers[0].path, "/posts/"); - assert_eq!( - vec!["".to_string(), "5".to_string()], - paginator.pagers[0] - .pages - .iter() - .map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string()) - .collect::>() - ); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); + assert_eq!(paginator.pagers[1].pages[0].title.clone().unwrap(), "1"); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/page/2/"); assert_eq!(paginator.pagers[1].path, "/posts/page/2/"); - assert_eq!( - vec!["4".to_string(), "3".to_string()], - paginator.pagers[1] - .pages - .iter() - .map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string()) - .collect::>() - ); - - assert_eq!(paginator.pagers[2].index, 3); - assert_eq!(paginator.pagers[2].pages.len(), 2); - assert_eq!(paginator.pagers[2].permalink, "https://vincent.is/posts/page/3/"); - assert_eq!(paginator.pagers[2].path, "/posts/page/3/"); - assert_eq!( - vec!["2".to_string(), "1".to_string()], - paginator.pagers[2] - .pages - .iter() - .map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string()) - .collect::>() - ); } #[test] - fn test_can_create_paginator_for_index() { + fn can_create_paginator_for_index() { let (section, library) = create_library(true, 3, false); let paginator = Paginator::from_section(§ion, &library); assert_eq!(paginator.pagers.len(), 2); @@ -381,7 +353,7 @@ mod tests { assert_eq!(paginator.pagers[0].path, "/"); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/page/2/"); assert_eq!(paginator.pagers[1].path, "/page/2/"); } @@ -399,6 +371,7 @@ mod tests { assert_eq!(context["previous"], to_value::>(None).unwrap()); assert_eq!(context["next"], to_value("https://vincent.is/posts/page/2/").unwrap()); assert_eq!(context["current_index"], to_value(1).unwrap()); + assert_eq!(context["pages"].as_array().unwrap().len(), 2); let context = paginator.build_paginator_context(&paginator.pagers[1]); assert_eq!(context["paginate_by"], to_value(2).unwrap()); @@ -407,48 +380,12 @@ mod tests { assert_eq!(context["next"], to_value::>(None).unwrap()); assert_eq!(context["previous"], to_value("https://vincent.is/posts/").unwrap()); assert_eq!(context["current_index"], to_value(2).unwrap()); - assert_eq!(context["total_pages"], to_value(4).unwrap()); + assert_eq!(context["total_pages"], to_value(3).unwrap()); + assert_eq!(context["pages"].as_array().unwrap().len(), 1); } #[test] fn test_can_create_paginator_for_taxonomy() { - let (_, library) = create_library(false, 3, false); - let tera = Tera::default(); - let taxonomy_def = TaxonomyConfig { - name: "tags".to_string(), - paginate_by: Some(2), - ..TaxonomyConfig::default() - }; - let taxonomy_item = TaxonomyItem { - name: "Something".to_string(), - slug: "something".to_string(), - path: "/tags/something".to_string(), - permalink: "https://vincent.is/tags/something/".to_string(), - pages: library.pages().keys().collect(), - }; - let taxonomy = Taxonomy { - kind: taxonomy_def, - lang: "en".to_owned(), - slug: "tags".to_string(), - permalink: "/tags/".to_string(), - items: vec![taxonomy_item.clone()], - }; - let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library, &tera, &None); - assert_eq!(paginator.pagers.len(), 2); - - assert_eq!(paginator.pagers[0].index, 1); - assert_eq!(paginator.pagers[0].pages.len(), 2); - assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/tags/something/"); - assert_eq!(paginator.pagers[0].path, "/tags/something/"); - - assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); - assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/tags/something/page/2/"); - assert_eq!(paginator.pagers[1].path, "/tags/something/page/2/"); - } - - #[test] - fn test_can_create_paginator_for_slugified_taxonomy() { let (_, library) = create_library(false, 3, false); let tera = Tera::default(); let taxonomy_def = TaxonomyConfig { @@ -461,7 +398,7 @@ mod tests { slug: "something".to_string(), path: "/some-tags/something/".to_string(), permalink: "https://vincent.is/some-tags/something/".to_string(), - pages: library.pages().keys().collect(), + pages: library.pages.keys().cloned().collect(), }; let taxonomy = Taxonomy { kind: taxonomy_def, @@ -479,7 +416,7 @@ mod tests { assert_eq!(paginator.pagers[0].path, "/some-tags/something/"); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/some-tags/something/page/2/"); assert_eq!(paginator.pagers[1].path, "/some-tags/something/page/2/"); } @@ -498,7 +435,7 @@ mod tests { assert_eq!(paginator.pagers[0].path, "/posts/"); assert_eq!(paginator.pagers[1].index, 2); - assert_eq!(paginator.pagers[1].pages.len(), 2); + assert_eq!(paginator.pagers[1].pages.len(), 1); assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/2/"); assert_eq!(paginator.pagers[1].path, "/posts/2/"); diff --git a/components/library/src/content/section.rs b/components/content/src/section.rs similarity index 91% rename from components/library/src/content/section.rs rename to components/content/src/section.rs index bfe7baa0..0d422ca2 100644 --- a/components/library/src/content/section.rs +++ b/components/content/src/section.rs @@ -1,21 +1,20 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; -use libs::slotmap::DefaultKey; use libs::tera::{Context as TeraContext, Tera}; use config::Config; use errors::{Context, Result}; -use front_matter::{split_section_content, SectionFrontMatter}; -use rendering::{render_content, Heading, RenderContext}; +use markdown::{render_content, RenderContext}; use utils::fs::read_file; -use utils::site::get_reading_analytics; +use utils::table_of_contents::Heading; use utils::templates::{render_template, ShortcodeDefinition}; -use crate::content::file_info::FileInfo; -use crate::content::ser::SerializingSection; -use crate::content::{find_related_assets, has_anchor}; +use crate::file_info::FileInfo; +use crate::front_matter::{split_section_content, SectionFrontMatter}; use crate::library::Library; +use crate::ser::{SectionSerMode, SerializingSection}; +use crate::utils::{find_related_assets, get_reading_analytics, has_anchor}; // Default is used to create a default index section if there is no _index.md in the root content directory #[derive(Clone, Debug, Default, PartialEq)] @@ -39,13 +38,13 @@ pub struct Section { /// All the non-md files we found next to the .md file as string pub serialized_assets: Vec, /// All direct pages of that section - pub pages: Vec, + pub pages: Vec, /// All pages that cannot be sorted in this section - pub ignored_pages: Vec, - /// The list of parent sections - pub ancestors: Vec, + pub ignored_pages: Vec, + /// The list of parent sections relative paths + pub ancestors: Vec, /// All direct subsections - pub subsections: Vec, + pub subsections: Vec, /// Toc made from the headings of the markdown file pub toc: Vec, /// How many words in the raw content @@ -83,7 +82,9 @@ impl Section { ) -> Result
{ let (meta, content) = split_section_content(file_path, content)?; let mut section = Section::new(file_path, meta, base_path); - section.lang = section.file.find_language(config)?; + section.lang = section + .file + .find_language(&config.default_language, &config.other_languages_codes())?; section.raw_content = content.to_string(); let (word_count, reading_time) = get_reading_analytics(§ion.raw_content); section.word_count = Some(word_count); @@ -159,7 +160,9 @@ impl Section { ); context.set_shortcode_definitions(shortcode_definitions); context.set_current_page_path(&self.file.relative); - context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None)); + context + .tera_context + .insert("section", &SerializingSection::new(self, SectionSerMode::ForMarkdown)); let res = render_content(&self.raw_content, &context) .with_context(|| format!("Failed to render content of {}", self.file.path.display()))?; @@ -179,7 +182,7 @@ impl Section { context.insert("config", &config.serialize(&self.lang)); context.insert("current_url", &self.permalink); context.insert("current_path", &self.path); - context.insert("section", &self.to_serialized(library)); + context.insert("section", &SerializingSection::new(&self, SectionSerMode::Full(library))); context.insert("lang", &self.lang); render_template(tpl_name, tera, context, &config.theme) @@ -205,14 +208,6 @@ impl Section { has_anchor(&self.toc, anchor) } - pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { - SerializingSection::from_section(self, library) - } - - pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { - SerializingSection::from_section_basic(self, Some(library)) - } - pub fn paginate_by(&self) -> Option { match self.meta.paginate_by { None => None, @@ -222,6 +217,14 @@ impl Section { }, } } + + pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { + SerializingSection::new(self, SectionSerMode::Full(library)) + } + + pub fn serialize_basic<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { + SerializingSection::new(self, SectionSerMode::MetadataOnly(library)) + } } #[cfg(test)] diff --git a/components/content/src/ser.rs b/components/content/src/ser.rs new file mode 100644 index 00000000..021b901d --- /dev/null +++ b/components/content/src/ser.rs @@ -0,0 +1,189 @@ +use std::collections::HashMap; +use std::path::Path; + +use serde::Serialize; + +use crate::library::Library; +use crate::{Page, Section}; +use libs::tera::{Map, Value}; +use utils::table_of_contents::Heading; + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct TranslatedContent<'a> { + pub lang: &'a str, + pub permalink: &'a str, + pub title: &'a Option, + /// The path to the markdown file + pub path: &'a Path, +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct SerializingPage<'a> { + relative_path: &'a str, + content: &'a str, + permalink: &'a str, + slug: &'a str, + ancestors: &'a [String], + pub(crate) title: &'a Option, + description: &'a Option, + updated: &'a Option, + date: &'a Option, + year: Option, + month: Option, + day: Option, + taxonomies: &'a HashMap>, + extra: &'a Map, + path: &'a str, + components: &'a [String], + summary: &'a Option, + toc: &'a [Heading], + word_count: Option, + reading_time: Option, + assets: &'a [String], + draft: bool, + lang: &'a str, + lower: Option>>, + higher: Option>>, + translations: Vec>, +} + +impl<'a> SerializingPage<'a> { + pub fn new(page: &'a Page, library: Option<&'a Library>, include_siblings: bool) -> Self { + let mut year = None; + let mut month = None; + let mut day = None; + if let Some(d) = page.meta.datetime_tuple { + year = Some(d.0); + month = Some(d.1); + day = Some(d.2); + } + let mut lower = None; + let mut higher = None; + let mut translations = vec![]; + + if let Some(lib) = library { + translations = lib.find_translations(&page.file.canonical); + + if include_siblings { + lower = page + .lower + .as_ref() + .map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false))); + higher = page + .higher + .as_ref() + .map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false))); + } + } + + Self { + relative_path: &page.file.relative, + ancestors: &page.ancestors, + content: &page.content, + permalink: &page.permalink, + slug: &page.slug, + title: &page.meta.title, + description: &page.meta.description, + extra: &page.meta.extra, + updated: &page.meta.updated, + date: &page.meta.date, + year, + month, + day, + taxonomies: &page.meta.taxonomies, + path: &page.path, + components: &page.components, + summary: &page.summary, + toc: &page.toc, + word_count: page.word_count, + reading_time: page.reading_time, + assets: &page.serialized_assets, + draft: page.meta.draft, + lang: &page.lang, + lower, + higher, + translations, + } + } +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct SerializingSection<'a> { + relative_path: &'a str, + content: &'a str, + permalink: &'a str, + draft: bool, + ancestors: &'a [String], + title: &'a Option, + description: &'a Option, + extra: &'a Map, + path: &'a str, + components: &'a [String], + toc: &'a [Heading], + word_count: Option, + reading_time: Option, + lang: &'a str, + assets: &'a [String], + pages: Vec>, + subsections: Vec<&'a str>, + translations: Vec>, +} + +#[derive(Debug)] +pub enum SectionSerMode<'a> { + /// Just itself, no pages or subsections + /// TODO: I believe we can get rid of it? + ForMarkdown, + /// Fetches subsections/ancestors/translations but not the pages + MetadataOnly(&'a Library), + /// Fetches everything + Full(&'a Library), +} + +impl<'a> SerializingSection<'a> { + pub fn new(section: &'a Section, mode: SectionSerMode<'a>) -> Self { + let mut pages = Vec::with_capacity(section.pages.len()); + let mut subsections = Vec::with_capacity(section.subsections.len()); + let mut translations = Vec::new(); + + match mode { + SectionSerMode::ForMarkdown => {} + SectionSerMode::MetadataOnly(lib) | SectionSerMode::Full(lib) => { + translations = lib.find_translations(§ion.file.canonical); + subsections = section + .subsections + .iter() + .map(|p| lib.sections[p].file.relative.as_str()) + .collect(); + + // Fetching pages on top + if let SectionSerMode::Full(_) = mode { + for p in §ion.pages { + pages.push(SerializingPage::new(&lib.pages[p], Some(lib), true)); + } + } + } + } + + Self { + relative_path: §ion.file.relative, + ancestors: §ion.ancestors, + draft: section.meta.draft, + content: §ion.content, + permalink: §ion.permalink, + title: §ion.meta.title, + description: §ion.meta.description, + extra: §ion.meta.extra, + path: §ion.path, + components: §ion.components, + toc: §ion.toc, + word_count: section.word_count, + reading_time: section.reading_time, + assets: §ion.serialized_assets, + lang: §ion.lang, + pages, + subsections, + translations, + } + } +} diff --git a/components/content/src/sorting.rs b/components/content/src/sorting.rs new file mode 100644 index 00000000..d4369488 --- /dev/null +++ b/components/content/src/sorting.rs @@ -0,0 +1,161 @@ +use std::cmp::Ordering; +use std::path::PathBuf; + +use crate::{Page, SortBy}; +use libs::lexical_sort::natural_lexical_cmp; +use libs::rayon::prelude::*; + +/// Sort by the field picked by the function. +/// The pages permalinks are used to break the ties +pub fn sort_pages(pages: &[&Page], sort_by: SortBy) -> (Vec, Vec) { + let (mut can_be_sorted, cannot_be_sorted): (Vec<&Page>, Vec<_>) = + pages.par_iter().partition(|page| match sort_by { + SortBy::Date => page.meta.datetime.is_some(), + SortBy::UpdateDate => { + page.meta.datetime.is_some() || page.meta.updated_datetime.is_some() + } + SortBy::Title => page.meta.title.is_some(), + SortBy::Weight => page.meta.weight.is_some(), + SortBy::None => unreachable!(), + }); + + can_be_sorted.par_sort_unstable_by(|a, b| { + let ord = match sort_by { + SortBy::Date => b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()), + SortBy::UpdateDate => std::cmp::max(b.meta.datetime, b.meta.updated_datetime) + .unwrap() + .cmp(&std::cmp::max(a.meta.datetime, a.meta.updated_datetime).unwrap()), + SortBy::Title => { + natural_lexical_cmp(a.meta.title.as_ref().unwrap(), b.meta.title.as_ref().unwrap()) + } + SortBy::Weight => a.meta.weight.unwrap().cmp(&b.meta.weight.unwrap()), + SortBy::None => unreachable!(), + }; + + if ord == Ordering::Equal { + a.permalink.cmp(&b.permalink) + } else { + ord + } + }); + + ( + can_be_sorted.iter().map(|p| p.file.path.clone()).collect(), + cannot_be_sorted.iter().map(|p: &&Page| p.file.path.clone()).collect(), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::PageFrontMatter; + + fn create_page_with_date(date: &str, updated_date: Option<&str>) -> Page { + let mut front_matter = PageFrontMatter { + date: Some(date.to_string()), + updated: updated_date.map(|c| c.to_string()), + ..Default::default() + }; + front_matter.date_to_datetime(); + Page::new(format!("content/hello-{}.md", date), front_matter, &PathBuf::new()) + } + + fn create_page_with_title(title: &str) -> Page { + let front_matter = PageFrontMatter { title: Some(title.to_string()), ..Default::default() }; + Page::new(format!("content/hello-{}.md", title), front_matter, &PathBuf::new()) + } + + fn create_page_with_weight(weight: usize) -> Page { + let front_matter = PageFrontMatter { weight: Some(weight), ..Default::default() }; + Page::new(format!("content/hello-{}.md", weight), front_matter, &PathBuf::new()) + } + + #[test] + fn can_sort_by_dates() { + let page1 = create_page_with_date("2018-01-01", None); + let page2 = create_page_with_date("2017-01-01", None); + let page3 = create_page_with_date("2019-01-01", None); + let (pages, ignored_pages) = sort_pages(&vec![&page1, &page2, &page3], SortBy::Date); + assert_eq!(pages[0], page3.file.path); + assert_eq!(pages[1], page1.file.path); + assert_eq!(pages[2], page2.file.path); + assert_eq!(ignored_pages.len(), 0); + } + + #[test] + fn can_sort_by_updated_dates() { + let page1 = create_page_with_date("2018-01-01", None); + let page2 = create_page_with_date("2017-01-01", Some("2022-02-01")); + let page3 = create_page_with_date("2019-01-01", None); + let (pages, ignored_pages) = sort_pages(&vec![&page1, &page2, &page3], SortBy::UpdateDate); + assert_eq!(pages[0], page2.file.path); + assert_eq!(pages[1], page3.file.path); + assert_eq!(pages[2], page1.file.path); + assert_eq!(ignored_pages.len(), 0); + } + + #[test] + fn can_sort_by_weight() { + let page1 = create_page_with_weight(2); + let page2 = create_page_with_weight(3); + let page3 = create_page_with_weight(1); + let (pages, ignored_pages) = sort_pages(&vec![&page1, &page2, &page3], SortBy::Weight); + // Should be sorted by weight + assert_eq!(pages[0], page3.file.path); + assert_eq!(pages[1], page1.file.path); + assert_eq!(pages[2], page2.file.path); + assert_eq!(ignored_pages.len(), 0); + } + + #[test] + fn can_sort_by_title() { + let titles = vec![ + "bagel", + "track_3", + "microkernel", + "métro", + "BART", + "Underground", + "track_13", + "μ-kernel", + "meter", + "track_1", + ]; + let pages: Vec = titles.iter().map(|title| create_page_with_title(title)).collect(); + let (sorted_pages, ignored_pages) = + sort_pages(&pages.iter().map(|p| p).collect::>(), SortBy::Title); + // Should be sorted by title in lexical order + let sorted_titles: Vec<_> = sorted_pages + .iter() + .map(|key| { + pages.iter().find(|p| &p.file.path == key).unwrap().meta.title.as_ref().unwrap() + }) + .collect(); + assert_eq!(ignored_pages.len(), 0); + assert_eq!( + sorted_titles, + vec![ + "bagel", + "BART", + "μ-kernel", + "meter", + "métro", + "microkernel", + "track_1", + "track_3", + "track_13", + "Underground", + ] + ); + } + + #[test] + fn can_find_ignored_pages() { + let page1 = create_page_with_date("2018-01-01", None); + let page2 = create_page_with_weight(1); + let (pages, ignored_pages) = sort_pages(&vec![&page1, &page2], SortBy::Date); + assert_eq!(pages[0], page1.file.path); + assert_eq!(ignored_pages.len(), 1); + assert_eq!(ignored_pages[0], page2.file.path); + } +} diff --git a/components/content/src/taxonomies.rs b/components/content/src/taxonomies.rs new file mode 100644 index 00000000..15cc7da4 --- /dev/null +++ b/components/content/src/taxonomies.rs @@ -0,0 +1,463 @@ +use std::cmp::Ordering; +use std::path::PathBuf; + +use serde::Serialize; + +use config::{Config, TaxonomyConfig}; +use errors::{bail, Context as ErrorContext, Result}; +use libs::ahash::AHashMap; +use libs::tera::{Context, Tera}; +use utils::slugs::slugify_paths; +use utils::templates::{check_template_fallbacks, render_template}; + +use crate::library::Library; +use crate::ser::SerializingPage; +use crate::{Page, SortBy}; + +use crate::sorting::sort_pages; + +#[derive(Debug, Clone, PartialEq, Serialize)] +pub struct SerializedTaxonomyItem<'a> { + name: &'a str, + slug: &'a str, + path: &'a str, + permalink: &'a str, + pages: Vec>, +} + +impl<'a> SerializedTaxonomyItem<'a> { + pub fn from_item(item: &'a TaxonomyItem, library: &'a Library) -> Self { + let mut pages = vec![]; + + for p in &item.pages { + pages.push(SerializingPage::new(&library.pages[p], Some(library), false)); + } + + SerializedTaxonomyItem { + name: &item.name, + slug: &item.slug, + path: &item.path, + permalink: &item.permalink, + pages, + } + } +} + +/// A taxonomy with all its pages +#[derive(Debug, Clone)] +pub struct TaxonomyItem { + pub name: String, + pub slug: String, + pub path: String, + pub permalink: String, + pub pages: Vec, +} + +impl TaxonomyItem { + pub fn new( + name: &str, + lang: &str, + taxo_slug: &str, + taxo_pages: &[&Page], + config: &Config, + ) -> Self { + let item_slug = slugify_paths(name, config.slugify.taxonomies); + let path = if lang != config.default_language { + format!("/{}/{}/{}/", lang, taxo_slug, item_slug) + } else { + format!("/{}/{}/", taxo_slug, item_slug) + }; + let permalink = config.make_permalink(&path); + + // Taxonomy are almost always used for blogs so we filter by dates + // and it's not like we can sort things across sections by anything other + // than dates + let (mut pages, ignored_pages) = sort_pages(taxo_pages, SortBy::Date); + // We still append pages without dates at the end + pages.extend(ignored_pages); + TaxonomyItem { name: name.to_string(), permalink, path, slug: item_slug, pages } + } + + pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializedTaxonomyItem<'a> { + SerializedTaxonomyItem::from_item(self, library) + } + + pub fn merge(&mut self, other: Self) { + self.pages.extend(other.pages); + } +} + +impl PartialEq for TaxonomyItem { + fn eq(&self, other: &Self) -> bool { + self.permalink == other.permalink + } +} + +#[derive(Debug, Clone, PartialEq, Serialize)] +pub struct SerializedTaxonomy<'a> { + kind: &'a TaxonomyConfig, + lang: &'a str, + permalink: &'a str, + items: Vec>, +} + +impl<'a> SerializedTaxonomy<'a> { + pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self { + let items: Vec = + taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); + SerializedTaxonomy { + kind: &taxonomy.kind, + lang: &taxonomy.lang, + permalink: &taxonomy.permalink, + items, + } + } +} +/// All different taxonomies we have and their content +#[derive(Debug, Clone, PartialEq)] +pub struct Taxonomy { + pub kind: TaxonomyConfig, + pub lang: String, + pub slug: String, + pub permalink: String, + // this vec is sorted by the count of item + pub items: Vec, +} + +impl Taxonomy { + fn new(tax_found: TaxonomyFound, config: &Config) -> Self { + let mut sorted_items = vec![]; + let slug = tax_found.slug; + for (name, pages) in tax_found.terms { + sorted_items.push(TaxonomyItem::new(name, tax_found.lang, &slug, &pages, config)); + } + + sorted_items.sort_by(|a, b| match a.slug.cmp(&b.slug) { + Ordering::Less => Ordering::Less, + Ordering::Greater => Ordering::Greater, + Ordering::Equal => a.name.cmp(&b.name), + }); + sorted_items.dedup_by(|a, b| { + // custom Eq impl checks for equal permalinks + // here we make sure all pages from a get copied to b + // before dedup gets rid of it + if a == b { + b.merge(a.to_owned()); + true + } else { + false + } + }); + let path = if tax_found.lang != config.default_language { + format!("/{}/{}/", tax_found.lang, slug) + } else { + format!("/{}/", slug) + }; + let permalink = config.make_permalink(&path); + + Taxonomy { + slug, + lang: tax_found.lang.to_owned(), + kind: tax_found.config.clone(), + permalink, + items: sorted_items, + } + } + + pub fn render_term( + &self, + item: &TaxonomyItem, + tera: &Tera, + config: &Config, + library: &Library, + ) -> Result { + let mut context = Context::new(); + context.insert("config", &config.serialize(&self.lang)); + context.insert("lang", &self.lang); + context.insert("term", &SerializedTaxonomyItem::from_item(item, library)); + context.insert("taxonomy", &self.kind); + context.insert( + "current_url", + &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)), + ); + context.insert("current_path", &format!("/{}/{}/", self.kind.name, item.slug)); + + // Check for taxon-specific template, or use generic as fallback. + let specific_template = format!("{}/single.html", self.kind.name); + let template = check_template_fallbacks(&specific_template, tera, &config.theme) + .unwrap_or("taxonomy_single.html"); + + render_template(template, tera, context, &config.theme) + .with_context(|| format!("Failed to render single term {} page.", self.kind.name)) + } + + pub fn render_all_terms( + &self, + tera: &Tera, + config: &Config, + library: &Library, + ) -> Result { + let mut context = Context::new(); + context.insert("config", &config.serialize(&self.lang)); + let terms: Vec = + self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); + context.insert("terms", &terms); + context.insert("lang", &self.lang); + context.insert("taxonomy", &self.kind); + context.insert("current_url", &config.make_permalink(&self.kind.name)); + context.insert("current_path", &format!("/{}/", self.kind.name)); + + // Check for taxon-specific template, or use generic as fallback. + let specific_template = format!("{}/list.html", self.kind.name); + let template = check_template_fallbacks(&specific_template, tera, &config.theme) + .unwrap_or("taxonomy_list.html"); + + render_template(template, tera, context, &config.theme) + .with_context(|| format!("Failed to render a list of {} page.", self.kind.name)) + } + + pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> { + SerializedTaxonomy::from_taxonomy(self, library) + } + + pub fn len(&self) -> usize { + self.items.len() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +/// Only used while building the taxonomies +#[derive(Debug, PartialEq)] +struct TaxonomyFound<'a> { + pub lang: &'a str, + pub slug: String, + pub config: &'a TaxonomyConfig, + pub terms: AHashMap<&'a str, Vec<&'a Page>>, +} + +impl<'a> TaxonomyFound<'a> { + pub fn new(slug: String, lang: &'a str, config: &'a TaxonomyConfig) -> Self { + Self { slug, lang, config, terms: AHashMap::new() } + } +} + +pub fn find_taxonomies(config: &Config, pages: &AHashMap) -> Result> { + // lang -> tax names -> def + let mut taxonomies_def = AHashMap::new(); + let mut taxonomies_slug = AHashMap::new(); + + for (code, options) in &config.languages { + let mut taxo_lang_def = AHashMap::new(); + for t in &options.taxonomies { + let slug = slugify_paths(&t.name, config.slugify.taxonomies); + taxonomies_slug.insert(&t.name, slug.clone()); + taxo_lang_def.insert(slug.clone(), TaxonomyFound::new(slug, code, t)); + } + taxonomies_def.insert(code, taxo_lang_def); + } + + for (_, page) in pages { + for (name, terms) in &page.meta.taxonomies { + let slug = taxonomies_slug.get(name); + let mut exists = slug.is_some(); + if let Some(s) = slug { + if !taxonomies_def[&page.lang].contains_key(s) { + exists = false; + } + } + if !exists { + bail!( + "Page `{}` has taxonomy `{}` which is not defined in config.toml", + page.file.path.display(), + name + ); + } + let slug = slug.unwrap(); + + let taxonomy_found = taxonomies_def.get_mut(&page.lang).unwrap().get_mut(slug).unwrap(); + for term in terms { + taxonomy_found.terms.entry(term).or_insert_with(Vec::new).push(page); + } + } + } + + // And now generates the actual taxonomies + let mut taxonomies = vec![]; + for (_, vals) in taxonomies_def { + for (_, tax_found) in vals { + taxonomies.push(Taxonomy::new(tax_found, config)); + } + } + + Ok(taxonomies) +} + +#[cfg(test)] +mod tests { + use super::*; + use config::LanguageOptions; + use std::collections::HashMap; + use utils::slugs::SlugifyStrategy; + + macro_rules! taxonomies { + ($config:expr, [$($page:expr),+]) => {{ + let mut pages = AHashMap::new(); + $( + pages.insert($page.file.path.clone(), $page.clone()); + )+ + find_taxonomies(&$config, &pages).unwrap() + }}; + } + + fn create_page(path: &str, lang: &str, taxo: Vec<(&str, Vec<&str>)>) -> Page { + let mut page = Page::default(); + page.file.path = PathBuf::from(path); + page.lang = lang.to_owned(); + let mut taxonomies = HashMap::new(); + for (name, terms) in taxo { + taxonomies.insert(name.to_owned(), terms.iter().map(|t| t.to_string()).collect()); + } + page.meta.taxonomies = taxonomies; + page + } + + #[test] + fn errors_on_unknown_taxonomy() { + let config = Config::default_for_test(); + let page1 = create_page("unknown/taxo.md", "en", vec![("tags", vec!["rust", "db"])]); + let mut pages = AHashMap::new(); + pages.insert(page1.file.path.clone(), page1); + let taxonomies = find_taxonomies(&config, &pages); + assert!(taxonomies.is_err()); + let err = taxonomies.unwrap_err(); + assert_eq!( + err.to_string(), + "Page `unknown/taxo.md` has taxonomy `tags` which is not defined in config.toml" + ); + } + + #[test] + fn can_make_taxonomies() { + let mut config = Config::default_for_test(); + config.languages.get_mut("en").unwrap().taxonomies = vec![ + TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, + ]; + + let page1 = create_page( + "a.md", + "en", + vec![("tags", vec!["rust", "db"]), ("categories", vec!["tutorials"])], + ); + let page2 = create_page( + "b.md", + "en", + vec![("tags", vec!["rust", "js"]), ("categories", vec!["others"])], + ); + let page3 = create_page( + "c.md", + "en", + vec![("tags", vec!["js"]), ("authors", vec!["Vincent Prouillet"])], + ); + let taxonomies = taxonomies!(config, [page1, page2, page3]); + + let tags = taxonomies.iter().find(|t| t.kind.name == "tags").unwrap(); + assert_eq!(tags.len(), 3); + assert_eq!(tags.items[0].name, "db"); + assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); + assert_eq!(tags.items[0].pages.len(), 1); + assert_eq!(tags.items[1].name, "js"); + assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/"); + assert_eq!(tags.items[1].pages.len(), 2); + assert_eq!(tags.items[2].name, "rust"); + assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/"); + assert_eq!(tags.items[2].pages.len(), 2); + + let categories = taxonomies.iter().find(|t| t.kind.name == "categories").unwrap(); + assert_eq!(categories.items.len(), 2); + assert_eq!(categories.items[0].name, "others"); + assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/others/"); + assert_eq!(categories.items[0].pages.len(), 1); + + let authors = taxonomies.iter().find(|t| t.kind.name == "authors").unwrap(); + assert_eq!(authors.items.len(), 1); + assert_eq!(authors.items[0].permalink, "http://a-website.com/authors/vincent-prouillet/"); + } + + #[test] + fn can_make_multiple_language_taxonomies() { + let mut config = Config::default_for_test(); + config.slugify.taxonomies = SlugifyStrategy::Safe; + config.languages.insert("fr".to_owned(), LanguageOptions::default()); + config.languages.get_mut("en").unwrap().taxonomies = vec![ + TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, + ]; + config.languages.get_mut("fr").unwrap().taxonomies = vec![ + TaxonomyConfig { name: "catégories".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, + ]; + + let page1 = create_page("a.md", "en", vec![("categories", vec!["rust"])]); + let page2 = create_page("b.md", "en", vec![("tags", vec!["rust"])]); + let page3 = create_page("c.md", "fr", vec![("catégories", vec!["rust"])]); + let taxonomies = taxonomies!(config, [page1, page2, page3]); + + let categories = taxonomies.iter().find(|t| t.kind.name == "categories").unwrap(); + assert_eq!(categories.len(), 1); + assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/rust/"); + let tags = taxonomies.iter().find(|t| t.kind.name == "tags" && t.lang == "en").unwrap(); + assert_eq!(tags.len(), 1); + assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/rust/"); + let fr_categories = taxonomies.iter().find(|t| t.kind.name == "catégories").unwrap(); + assert_eq!(fr_categories.len(), 1); + assert_eq!(fr_categories.items[0].permalink, "http://a-website.com/fr/catégories/rust/"); + } + + #[test] + fn taxonomies_with_unic_are_grouped_with_default_slugify_strategy() { + let mut config = Config::default_for_test(); + config.languages.get_mut("en").unwrap().taxonomies = vec![ + TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() }, + ]; + let page1 = create_page("a.md", "en", vec![("test-taxonomy", vec!["Ecole"])]); + let page2 = create_page("b.md", "en", vec![("test taxonomy", vec!["École"])]); + let page3 = create_page("c.md", "en", vec![("test-taxonomy ", vec!["ecole"])]); + let page4 = create_page("d.md", "en", vec![("Test-Taxonomy ", vec!["école"])]); + let taxonomies = taxonomies!(config, [page1, page2, page3, page4]); + assert_eq!(taxonomies.len(), 1); + + let tax = &taxonomies[0]; + // under the default slugify strategy all of the provided terms should be the same + assert_eq!(tax.items.len(), 1); + let term1 = &tax.items[0]; + assert_eq!(term1.name, "Ecole"); + assert_eq!(term1.slug, "ecole"); + assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/ecole/"); + assert_eq!(term1.pages.len(), 4); + } + + #[test] + fn taxonomies_with_unic_are_not_grouped_with_safe_slugify_strategy() { + let mut config = Config::default_for_test(); + config.slugify.taxonomies = SlugifyStrategy::Safe; + config.languages.get_mut("en").unwrap().taxonomies = + vec![TaxonomyConfig { name: "test".to_string(), ..TaxonomyConfig::default() }]; + let page1 = create_page("a.md", "en", vec![("test", vec!["Ecole"])]); + let page2 = create_page("b.md", "en", vec![("test", vec!["École"])]); + let page3 = create_page("c.md", "en", vec![("test", vec!["ecole"])]); + let page4 = create_page("d.md", "en", vec![("test", vec!["école"])]); + let taxonomies = taxonomies!(config, [page1, page2, page3, page4]); + assert_eq!(taxonomies.len(), 1); + let tax = &taxonomies[0]; + // under the safe slugify strategy all terms should be distinct + assert_eq!(tax.items.len(), 4); + } +} diff --git a/components/content/src/types.rs b/components/content/src/types.rs new file mode 100644 index 00000000..39174e96 --- /dev/null +++ b/components/content/src/types.rs @@ -0,0 +1,16 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum SortBy { + /// Most recent to oldest + Date, + /// Most recent to oldest + UpdateDate, + /// Sort by title lexicographically + Title, + /// Lower weight comes first + Weight, + /// No sorting + None, +} diff --git a/components/library/src/content/mod.rs b/components/content/src/utils.rs similarity index 84% rename from components/library/src/content/mod.rs rename to components/content/src/utils.rs index ff1c9e90..3153571d 100644 --- a/components/library/src/content/mod.rs +++ b/components/content/src/utils.rs @@ -1,19 +1,10 @@ use std::path::{Path, PathBuf}; +use libs::unicode_segmentation::UnicodeSegmentation; use libs::walkdir::WalkDir; -mod file_info; -mod page; -mod section; -mod ser; - -pub use self::file_info::FileInfo; -pub use self::page::Page; -pub use self::section::Section; -pub use self::ser::{SerializingPage, SerializingSection}; - use config::Config; -use rendering::Heading; +use utils::table_of_contents::Heading; pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool { for heading in headings { @@ -60,6 +51,15 @@ pub fn find_related_assets(path: &Path, config: &Config, recursive: bool) -> Vec assets } +/// Get word count and estimated reading time +pub fn get_reading_analytics(content: &str) -> (usize, usize) { + let word_count: usize = content.unicode_words().count(); + + // https://help.medium.com/hc/en-us/articles/214991667-Read-time + // 275 seems a bit too high though + (word_count, ((word_count + 199) / 200)) +} + #[cfg(test)] mod tests { use super::*; @@ -194,4 +194,29 @@ mod tests { assert!(has_anchor(&input, "1-2")); } + + #[test] + fn reading_analytics_empty_text() { + let (word_count, reading_time) = get_reading_analytics(" "); + assert_eq!(word_count, 0); + assert_eq!(reading_time, 0); + } + + #[test] + fn reading_analytics_short_text() { + let (word_count, reading_time) = get_reading_analytics("Hello World"); + assert_eq!(word_count, 2); + assert_eq!(reading_time, 1); + } + + #[test] + fn reading_analytics_long_text() { + let mut content = String::new(); + for _ in 0..1000 { + content.push_str(" Hello world"); + } + let (word_count, reading_time) = get_reading_analytics(&content); + assert_eq!(word_count, 2000); + assert_eq!(reading_time, 10); + } } diff --git a/components/library/Cargo.toml b/components/library/Cargo.toml deleted file mode 100644 index 3b105150..00000000 --- a/components/library/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "library" -version = "0.1.0" -edition = "2018" - -[dependencies] -serde = {version = "1.0", features = ["derive"] } - -front_matter = { path = "../front_matter" } -config = { path = "../config" } -utils = { path = "../utils" } -rendering = { path = "../rendering" } -errors = { path = "../errors" } -libs = { path = "../libs" } - -[dev-dependencies] -tempfile = "3" diff --git a/components/library/src/content/ser.rs b/components/library/src/content/ser.rs deleted file mode 100644 index b7a77752..00000000 --- a/components/library/src/content/ser.rs +++ /dev/null @@ -1,351 +0,0 @@ -//! What we are sending to the templates when rendering them -use std::collections::{HashMap, HashSet}; -use std::path::Path; - -use libs::tera::{Map, Value}; -use serde::Serialize; - -use crate::content::{Page, Section}; -use crate::library::Library; -use rendering::Heading; - -#[derive(Clone, Debug, PartialEq, Serialize)] -pub struct TranslatedContent<'a> { - lang: &'a str, - permalink: &'a str, - title: &'a Option, - /// The path to the markdown file; useful for retrieving the full page through - /// the `get_page` function. - path: &'a Path, -} - -impl<'a> TranslatedContent<'a> { - // copypaste eh, not worth creating an enum imo - pub fn find_all_sections(section: &'a Section, library: &'a Library) -> Vec { - let mut translations = vec![]; - - #[allow(clippy::or_fun_call)] - for key in library - .translations - .get(§ion.file.canonical) - .or(Some(&HashSet::new())) - .unwrap() - .iter() - { - let other = library.get_section_by_key(*key); - translations.push(TranslatedContent { - lang: &other.lang, - permalink: &other.permalink, - title: &other.meta.title, - path: &other.file.path, - }); - } - - translations - } - - pub fn find_all_pages(page: &'a Page, library: &'a Library) -> Vec { - let mut translations = vec![]; - - #[allow(clippy::or_fun_call)] - for key in - library.translations.get(&page.file.canonical).or(Some(&HashSet::new())).unwrap().iter() - { - let other = library.get_page_by_key(*key); - translations.push(TranslatedContent { - lang: &other.lang, - permalink: &other.permalink, - title: &other.meta.title, - path: &other.file.path, - }); - } - - translations - } -} - -#[derive(Clone, Debug, PartialEq, Serialize)] -pub struct SerializingPage<'a> { - relative_path: &'a str, - content: &'a str, - permalink: &'a str, - slug: &'a str, - ancestors: Vec<&'a str>, - title: &'a Option, - description: &'a Option, - updated: &'a Option, - date: &'a Option, - year: Option, - month: Option, - day: Option, - taxonomies: &'a HashMap>, - extra: &'a Map, - path: &'a str, - components: &'a [String], - summary: &'a Option, - toc: &'a [Heading], - word_count: Option, - reading_time: Option, - assets: &'a [String], - draft: bool, - lang: &'a str, - lighter: Option>>, - heavier: Option>>, - earlier_updated: Option>>, - later_updated: Option>>, - earlier: Option>>, - later: Option>>, - title_prev: Option>>, - title_next: Option>>, - translations: Vec>, -} - -impl<'a> SerializingPage<'a> { - /// Grabs all the data from a page, including sibling pages - pub fn from_page(page: &'a Page, library: &'a Library) -> Self { - let mut year = None; - let mut month = None; - let mut day = None; - if let Some(d) = page.meta.datetime_tuple { - year = Some(d.0); - month = Some(d.1); - day = Some(d.2); - } - let pages = library.pages(); - let lighter = page - .lighter - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let heavier = page - .heavier - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let earlier_updated = page - .earlier_updated - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let later_updated = page - .later_updated - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let earlier = page - .earlier - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let later = page - .later - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let title_prev = page - .title_prev - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let title_next = page - .title_next - .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let ancestors = page - .ancestors - .iter() - .map(|k| library.get_section_by_key(*k).file.relative.as_str()) - .collect(); - - let translations = TranslatedContent::find_all_pages(page, library); - - SerializingPage { - relative_path: &page.file.relative, - ancestors, - content: &page.content, - permalink: &page.permalink, - slug: &page.slug, - title: &page.meta.title, - description: &page.meta.description, - extra: &page.meta.extra, - updated: &page.meta.updated, - date: &page.meta.date, - year, - month, - day, - taxonomies: &page.meta.taxonomies, - path: &page.path, - components: &page.components, - summary: &page.summary, - toc: &page.toc, - word_count: page.word_count, - reading_time: page.reading_time, - assets: &page.serialized_assets, - draft: page.meta.draft, - lang: &page.lang, - lighter, - heavier, - earlier_updated, - later_updated, - earlier, - later, - title_prev, - title_next, - translations, - } - } - - /// currently only used in testing - pub fn get_title(&'a self) -> &'a Option { - self.title - } - - /// Same as from_page but does not fill sibling pages - pub fn from_page_basic(page: &'a Page, library: Option<&'a Library>) -> Self { - let mut year = None; - let mut month = None; - let mut day = None; - if let Some(d) = page.meta.datetime_tuple { - year = Some(d.0); - month = Some(d.1); - day = Some(d.2); - } - let ancestors = if let Some(lib) = library { - page.ancestors - .iter() - .map(|k| lib.get_section_by_key(*k).file.relative.as_str()) - .collect() - } else { - vec![] - }; - - let translations = if let Some(lib) = library { - TranslatedContent::find_all_pages(page, lib) - } else { - vec![] - }; - - SerializingPage { - relative_path: &page.file.relative, - ancestors, - content: &page.content, - permalink: &page.permalink, - slug: &page.slug, - title: &page.meta.title, - description: &page.meta.description, - extra: &page.meta.extra, - updated: &page.meta.updated, - date: &page.meta.date, - year, - month, - day, - taxonomies: &page.meta.taxonomies, - path: &page.path, - components: &page.components, - summary: &page.summary, - toc: &page.toc, - word_count: page.word_count, - reading_time: page.reading_time, - assets: &page.serialized_assets, - draft: page.meta.draft, - lang: &page.lang, - lighter: None, - heavier: None, - earlier_updated: None, - later_updated: None, - earlier: None, - later: None, - title_prev: None, - title_next: None, - translations, - } - } -} - -#[derive(Clone, Debug, PartialEq, Serialize)] -pub struct SerializingSection<'a> { - relative_path: &'a str, - content: &'a str, - permalink: &'a str, - draft: bool, - ancestors: Vec<&'a str>, - title: &'a Option, - description: &'a Option, - extra: &'a Map, - path: &'a str, - components: &'a [String], - toc: &'a [Heading], - word_count: Option, - reading_time: Option, - lang: &'a str, - assets: &'a [String], - pages: Vec>, - subsections: Vec<&'a str>, - translations: Vec>, -} - -impl<'a> SerializingSection<'a> { - pub fn from_section(section: &'a Section, library: &'a Library) -> Self { - let mut pages = Vec::with_capacity(section.pages.len()); - let mut subsections = Vec::with_capacity(section.subsections.len()); - - for k in §ion.pages { - pages.push(library.get_page_by_key(*k).to_serialized_basic(library)); - } - - for k in §ion.subsections { - subsections.push(library.get_section_path_by_key(*k)); - } - - let ancestors = section - .ancestors - .iter() - .map(|k| library.get_section_by_key(*k).file.relative.as_str()) - .collect(); - let translations = TranslatedContent::find_all_sections(section, library); - - SerializingSection { - relative_path: §ion.file.relative, - ancestors, - draft: section.meta.draft, - content: §ion.content, - permalink: §ion.permalink, - title: §ion.meta.title, - description: §ion.meta.description, - extra: §ion.meta.extra, - path: §ion.path, - components: §ion.components, - toc: §ion.toc, - word_count: section.word_count, - reading_time: section.reading_time, - assets: §ion.serialized_assets, - lang: §ion.lang, - pages, - subsections, - translations, - } - } - - /// Same as from_section but doesn't fetch pages - pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self { - let mut ancestors = vec![]; - let mut translations = vec![]; - let mut subsections = vec![]; - if let Some(lib) = library { - ancestors = section - .ancestors - .iter() - .map(|k| lib.get_section_by_key(*k).file.relative.as_str()) - .collect(); - translations = TranslatedContent::find_all_sections(section, lib); - subsections = - section.subsections.iter().map(|k| lib.get_section_path_by_key(*k)).collect(); - } - - SerializingSection { - relative_path: §ion.file.relative, - ancestors, - draft: section.meta.draft, - content: §ion.content, - permalink: §ion.permalink, - title: §ion.meta.title, - description: §ion.meta.description, - extra: §ion.meta.extra, - path: §ion.path, - components: §ion.components, - toc: §ion.toc, - word_count: section.word_count, - reading_time: section.reading_time, - assets: §ion.serialized_assets, - lang: §ion.lang, - pages: vec![], - subsections, - translations, - } - } -} diff --git a/components/library/src/lib.rs b/components/library/src/lib.rs deleted file mode 100644 index 6e2f0abb..00000000 --- a/components/library/src/lib.rs +++ /dev/null @@ -1,13 +0,0 @@ -mod content; -mod library; -mod pagination; -mod sorting; -mod taxonomies; - -pub use libs::slotmap::{DenseSlotMap, Key}; - -pub use crate::library::Library; -pub use content::{Page, Section, SerializingPage, SerializingSection}; -pub use pagination::Paginator; -pub use sorting::sort_actual_pages_by_date; -pub use taxonomies::{find_taxonomies, Taxonomy, TaxonomyItem}; diff --git a/components/library/src/library.rs b/components/library/src/library.rs deleted file mode 100644 index 1128160f..00000000 --- a/components/library/src/library.rs +++ /dev/null @@ -1,499 +0,0 @@ -use std::collections::{HashMap, HashSet}; -use std::path::{Path, PathBuf}; - -use libs::slotmap::{DefaultKey, DenseSlotMap}; - -use crate::content::{Page, Section}; -use crate::sorting::{ - find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight, -}; -use config::Config; -use front_matter::{PageFrontMatter, SortBy}; - -// Like vec! but for HashSet -macro_rules! set { - ( $( $x:expr ),* ) => { - { - let mut s = HashSet::new(); - $( - s.insert($x); - )* - s - } - }; -} - -/// Houses everything about pages and sections -/// Think of it as a database where each page and section has an id (Key here) -/// that can be used to find the actual value -/// Sections and pages can then refer to other elements by those keys, which are very cheap to -/// copy. -/// We can assume the keys are always existing as removing a page/section deletes all references -/// to that key. -#[derive(Debug)] -pub struct Library { - /// All the pages of the site - pages: DenseSlotMap, - /// All the sections of the site - sections: DenseSlotMap, - /// A mapping path -> key for pages so we can easily get their key - pub paths_to_pages: HashMap, - /// A mapping path -> key for sections so we can easily get their key - pub paths_to_sections: HashMap, - /// Whether we need to look for translations - is_multilingual: bool, - - // aliases -> files, - // so we can easily check for conflicts - pub reverse_aliases: HashMap>, - - pub translations: HashMap>, -} - -impl Library { - pub fn new(cap_pages: usize, cap_sections: usize, is_multilingual: bool) -> Self { - Library { - pages: DenseSlotMap::with_capacity(cap_pages), - sections: DenseSlotMap::with_capacity(cap_sections), - paths_to_pages: HashMap::with_capacity(cap_pages), - paths_to_sections: HashMap::with_capacity(cap_sections), - is_multilingual, - reverse_aliases: HashMap::new(), - translations: HashMap::new(), - } - } - - fn insert_reverse_aliases(&mut self, entries: Vec, file_rel_path: &str) { - for entry in entries { - self.reverse_aliases - .entry(entry) - .and_modify(|s| { - s.insert(file_rel_path.to_owned()); - }) - .or_insert_with(|| { - let mut s = HashSet::new(); - s.insert(file_rel_path.to_owned()); - s - }); - } - } - - /// Add a section and return its Key - pub fn insert_section(&mut self, section: Section) -> DefaultKey { - let file_path = section.file.path.clone(); - let rel_path = section.path.clone(); - - let mut entries = vec![rel_path]; - entries.extend(section.meta.aliases.to_vec()); - self.insert_reverse_aliases(entries, §ion.file.relative); - - let key = self.sections.insert(section); - self.paths_to_sections.insert(file_path, key); - key - } - - /// Add a page and return its Key - pub fn insert_page(&mut self, page: Page) -> DefaultKey { - let file_path = page.file.path.clone(); - let rel_path = page.path.clone(); - - let mut entries = vec![rel_path]; - entries.extend(page.meta.aliases.to_vec()); - self.insert_reverse_aliases(entries, &page.file.relative); - - let key = self.pages.insert(page); - - self.paths_to_pages.insert(file_path, key); - key - } - - pub fn pages(&self) -> &DenseSlotMap { - &self.pages - } - - pub fn pages_mut(&mut self) -> &mut DenseSlotMap { - &mut self.pages - } - - pub fn pages_values(&self) -> Vec<&Page> { - self.pages.values().collect::>() - } - - pub fn sections(&self) -> &DenseSlotMap { - &self.sections - } - - pub fn sections_mut(&mut self) -> &mut DenseSlotMap { - &mut self.sections - } - - pub fn sections_values(&self) -> Vec<&Section> { - self.sections.values().collect::>() - } - - /// Find out the direct subsections of each subsection if there are some - /// as well as the pages for each section - pub fn populate_sections(&mut self, config: &Config) { - let root_path = - self.sections.values().find(|s| s.is_index()).map(|s| s.file.parent.clone()).unwrap(); - // We are going to get both the ancestors and grandparents for each section in one go - let mut ancestors: HashMap> = HashMap::new(); - let mut subsections: HashMap> = HashMap::new(); - - for (key, section) in self.sections.iter_mut() { - // Make sure the pages of a section are empty since we can call that many times on `serve` - section.pages = vec![]; - section.ignored_pages = vec![]; - - if let Some(ref grand_parent) = section.file.grand_parent { - subsections - // Using the original filename to work for multi-lingual sections - .entry(grand_parent.join(§ion.file.filename)) - .or_insert_with(Vec::new) - .push(section.file.path.clone()); - } - - // populate translations if necessary - if self.is_multilingual { - self.translations - .entry(section.file.canonical.clone()) - .and_modify(|trans| { - trans.insert(key); - }) - .or_insert(set![key]); - }; - - // Index has no ancestors, no need to go through it - if section.is_index() { - ancestors.insert(section.file.path.clone(), vec![]); - continue; - } - - let mut path = root_path.clone(); - let root_key = self.paths_to_sections[&root_path.join(§ion.file.filename)]; - // Index section is the first ancestor of every single section - let mut parents = vec![root_key]; - for component in §ion.file.components { - path = path.join(component); - // Skip itself - if path == section.file.parent { - continue; - } - if let Some(section_key) = - self.paths_to_sections.get(&path.join(§ion.file.filename)) - { - parents.push(*section_key); - } - } - ancestors.insert(section.file.path.clone(), parents); - } - - for (key, page) in &mut self.pages { - let parent_filename = if page.lang != config.default_language { - format!("_index.{}.md", page.lang) - } else { - "_index.md".to_string() - }; - let mut parent_section_path = page.file.parent.join(&parent_filename); - while let Some(section_key) = self.paths_to_sections.get(&parent_section_path) { - let parent_is_transparent; - // We need to get a reference to a section later so keep the scope of borrowing small - { - let section = self.sections.get_mut(*section_key).unwrap(); - section.pages.push(key); - parent_is_transparent = section.meta.transparent; - } - page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_default(); - // Don't forget to push the actual parent - page.ancestors.push(*section_key); - - // Find the page template if one of a parent has page_template set - // Stops after the first one found, keep in mind page.ancestors - // is [index, ..., parent] so we need to reverse it first - if page.meta.template.is_none() { - for ancestor in page.ancestors.iter().rev() { - let s = self.sections.get(*ancestor).unwrap(); - if s.meta.page_template.is_some() { - page.meta.template = s.meta.page_template.clone(); - break; - } - } - } - - if !parent_is_transparent { - break; - } - - // We've added `_index(.{LANG})?.md` so if we are here so we need to go up twice - match parent_section_path.clone().parent().unwrap().parent() { - Some(parent) => parent_section_path = parent.join(&parent_filename), - None => break, - } - } - - // populate translations if necessary - if self.is_multilingual { - self.translations - .entry(page.file.canonical.clone()) - .and_modify(|trans| { - trans.insert(key); - }) - .or_insert(set![key]); - }; - } - - self.sort_sections_pages(); - - let sections = self.paths_to_sections.clone(); - let mut sections_weight = HashMap::new(); - for (key, section) in &self.sections { - sections_weight.insert(key, section.meta.weight); - } - - for section in self.sections.values_mut() { - if let Some(children) = subsections.get(§ion.file.path) { - let mut children: Vec<_> = children.iter().map(|p| sections[p]).collect(); - children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b])); - section.subsections = children; - } - section.ancestors = ancestors.get(§ion.file.path).cloned().unwrap_or_default(); - } - } - - /// Sort all sections pages according to sorting method given - /// Pages that cannot be sorted are set to the section.ignored_pages instead - pub fn sort_sections_pages(&mut self) { - fn get_data<'a, T>( - section: &'a Section, - pages: &'a DenseSlotMap, - field: impl Fn(&'a PageFrontMatter) -> Option, - ) -> Vec<(&'a DefaultKey, Option, &'a str)> { - section - .pages - .iter() - .map(|k| { - if let Some(page) = pages.get(*k) { - (k, field(&page.meta), page.permalink.as_ref()) - } else { - unreachable!("Sorting got an unknown page") - } - }) - .collect() - } - - let mut updates = HashMap::new(); - for (key, section) in &self.sections { - let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by { - SortBy::None => continue, - SortBy::Date => { - let data = get_data(section, &self.pages, |meta| meta.datetime); - - sort_pages_by_date(data) - } - SortBy::UpdateDate => { - let data = get_data(section, &self.pages, |meta| { - std::cmp::max(meta.datetime, meta.updated_datetime) - }); - - sort_pages_by_date(data) - } - SortBy::Title => { - let data = get_data(section, &self.pages, |meta| meta.title.as_deref()); - - sort_pages_by_title(data) - } - SortBy::Weight => { - let data = get_data(section, &self.pages, |meta| meta.weight); - - sort_pages_by_weight(data) - } - }; - updates.insert(key, (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by)); - } - - for (key, (sorted, cannot_be_sorted, sort_by)) in updates { - let section_is_transparent = if let Some(section) = self.sections.get(key) { - section.meta.transparent - } else { - false - }; - - if !section_is_transparent { - // Find sibling between sorted pages first - let with_siblings = find_siblings(&sorted); - - for (k2, val1, val2) in with_siblings { - if let Some(page) = self.pages.get_mut(k2) { - match sort_by { - SortBy::Date => { - page.earlier = val2; - page.later = val1; - } - SortBy::UpdateDate => { - page.earlier_updated = val2; - page.later_updated = val1; - } - SortBy::Title => { - page.title_prev = val1; - page.title_next = val2; - } - SortBy::Weight => { - page.lighter = val1; - page.heavier = val2; - } - SortBy::None => { - unreachable!("Impossible to find siblings in SortBy::None") - } - } - } else { - unreachable!("Sorting got an unknown page") - } - } - } - - if let Some(s) = self.sections.get_mut(key) { - s.pages = sorted; - s.ignored_pages = cannot_be_sorted; - } - } - } - - /// Find all the orphan pages: pages that are in a folder without an `_index.md` - pub fn get_all_orphan_pages(&self) -> Vec<&Page> { - let pages_in_sections = - self.sections.values().flat_map(|s| &s.pages).collect::>(); - - self.pages - .iter() - .filter(|(key, _)| !pages_in_sections.contains(&key)) - .map(|(_, page)| page) - .collect() - } - - /// Used in integration tests - pub fn get_section_key>(&self, path: P) -> Option<&DefaultKey> { - self.paths_to_sections.get(path.as_ref()) - } - - pub fn get_section>(&self, path: P) -> Option<&Section> { - self.sections.get(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default()) - } - - /// Used in integration tests - pub fn get_section_mut>(&mut self, path: P) -> Option<&mut Section> { - self.sections - .get_mut(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default()) - } - - pub fn get_section_by_key(&self, key: DefaultKey) -> &Section { - self.sections.get(key).unwrap() - } - - pub fn get_section_path_by_key(&self, key: DefaultKey) -> &str { - &self.get_section_by_key(key).file.relative - } - - pub fn get_page>(&self, path: P) -> Option<&Page> { - self.pages.get(self.paths_to_pages.get(path.as_ref()).cloned().unwrap_or_default()) - } - - pub fn get_page_by_key(&self, key: DefaultKey) -> &Page { - self.pages.get(key).unwrap() - } - - pub fn remove_section>(&mut self, path: P) -> Option
{ - if let Some(k) = self.paths_to_sections.remove(path.as_ref()) { - self.sections.remove(k) - } else { - None - } - } - - pub fn remove_page>(&mut self, path: P) -> Option { - if let Some(k) = self.paths_to_pages.remove(path.as_ref()) { - self.pages.remove(k) - } else { - None - } - } - - pub fn contains_section>(&self, path: P) -> bool { - self.paths_to_sections.contains_key(path.as_ref()) - } - - /// This will check every section/page paths + the aliases and ensure none of them - /// are colliding. - /// Returns (path colliding, [list of files causing that collision]) - pub fn check_for_path_collisions(&self) -> Vec<(String, Vec)> { - self.reverse_aliases - .iter() - .filter_map(|(alias, files)| { - if files.len() > 1 { - Some((alias.clone(), files.clone().into_iter().collect::>())) - } else { - None - } - }) - .collect() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_find_no_collisions() { - let mut library = Library::new(10, 10, false); - let page = Page { path: "hello".to_string(), ..Default::default() }; - let page2 = Page { path: "hello-world".to_string(), ..Default::default() }; - let section = Section { path: "blog".to_string(), ..Default::default() }; - library.insert_page(page); - library.insert_page(page2); - library.insert_section(section); - - let collisions = library.check_for_path_collisions(); - assert_eq!(collisions.len(), 0); - } - - #[test] - fn can_find_collisions_between_pages() { - let mut library = Library::new(10, 10, false); - let mut page = Page { path: "hello".to_string(), ..Default::default() }; - page.file.relative = "hello".to_string(); - let mut page2 = Page { path: "hello".to_string(), ..Default::default() }; - page2.file.relative = "hello-world".to_string(); - let mut section = Section { path: "blog".to_string(), ..Default::default() }; - section.file.relative = "hello-world".to_string(); - library.insert_page(page.clone()); - library.insert_page(page2.clone()); - library.insert_section(section); - - let collisions = library.check_for_path_collisions(); - assert_eq!(collisions.len(), 1); - assert_eq!(collisions[0].0, page.path); - assert!(collisions[0].1.contains(&page.file.relative)); - assert!(collisions[0].1.contains(&page2.file.relative)); - } - - #[test] - fn can_find_collisions_with_an_alias() { - let mut library = Library::new(10, 10, false); - let mut page = Page { path: "hello".to_string(), ..Default::default() }; - page.file.relative = "hello".to_string(); - let mut page2 = Page { path: "hello".to_string(), ..Default::default() }; - page2.file.relative = "hello-world".to_string(); - page2.meta.aliases = vec!["hello".to_string()]; - let mut section = Section { path: "blog".to_string(), ..Default::default() }; - section.file.relative = "hello-world".to_string(); - library.insert_page(page.clone()); - library.insert_page(page2.clone()); - library.insert_section(section); - - let collisions = library.check_for_path_collisions(); - assert_eq!(collisions.len(), 1); - assert_eq!(collisions[0].0, page.path); - assert!(collisions[0].1.contains(&page.file.relative)); - assert!(collisions[0].1.contains(&page2.file.relative)); - } -} diff --git a/components/library/src/sorting.rs b/components/library/src/sorting.rs deleted file mode 100644 index 8d27e61a..00000000 --- a/components/library/src/sorting.rs +++ /dev/null @@ -1,271 +0,0 @@ -use std::cmp::Ordering; - -use libs::lexical_sort::natural_lexical_cmp; -use libs::rayon::prelude::*; -use libs::slotmap::DefaultKey; -use libs::time::OffsetDateTime; - -use crate::content::Page; - -/// Used by the feed -/// There to not have to import sorting stuff in the site crate -#[allow(clippy::trivially_copy_pass_by_ref)] -pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering { - let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); - if ord == Ordering::Equal { - a.permalink.cmp(&b.permalink) - } else { - ord - } -} - -/// Takes a list of (page key, date, permalink) and sort them by dates if possible -/// Pages without date will be put in the unsortable bucket -/// The permalink is used to break ties -pub fn sort_pages_by_date( - pages: Vec<(&DefaultKey, Option, &str)>, -) -> (Vec, Vec) { - let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = - pages.into_par_iter().partition(|page| page.1.is_some()); - - can_be_sorted.par_sort_unstable_by(|a, b| { - let ord = b.1.unwrap().cmp(&a.1.unwrap()); - if ord == Ordering::Equal { - a.2.cmp(b.2) - } else { - ord - } - }); - - (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) -} - -/// Takes a list of (page key, title, permalink) and sort them by title if possible. -/// Uses the a natural lexical comparison as defined by the lexical_sort crate. -/// Pages without title will be put in the unsortable bucket. -/// The permalink is used to break ties. -pub fn sort_pages_by_title( - pages: Vec<(&DefaultKey, Option<&str>, &str)>, -) -> (Vec, Vec) { - let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = - pages.into_par_iter().partition(|page| page.1.is_some()); - - can_be_sorted.par_sort_unstable_by(|a, b| { - let ord = natural_lexical_cmp(a.1.unwrap(), b.1.unwrap()); - if ord == Ordering::Equal { - a.2.cmp(b.2) - } else { - ord - } - }); - - (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) -} - -/// Takes a list of (page key, weight, permalink) and sort them by weight if possible -/// Pages without weight will be put in the unsortable bucket -/// The permalink is used to break ties -pub fn sort_pages_by_weight( - pages: Vec<(&DefaultKey, Option, &str)>, -) -> (Vec, Vec) { - let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = - pages.into_par_iter().partition(|page| page.1.is_some()); - - can_be_sorted.par_sort_unstable_by(|a, b| { - let ord = a.1.unwrap().cmp(&b.1.unwrap()); - if ord == Ordering::Equal { - a.2.cmp(b.2) - } else { - ord - } - }); - - (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) -} - -/// Find the lighter/heavier, earlier/later, and title_prev/title_next -/// pages for all pages having a date/weight/title -pub fn find_siblings( - sorted: &[DefaultKey], -) -> Vec<(DefaultKey, Option, Option)> { - let mut res = Vec::with_capacity(sorted.len()); - let length = sorted.len(); - - for (i, key) in sorted.iter().enumerate() { - let mut with_siblings = (*key, None, None); - - if i > 0 { - // lighter / later / title_prev - with_siblings.1 = Some(sorted[i - 1]); - } - - if i < length - 1 { - // heavier / earlier / title_next - with_siblings.2 = Some(sorted[i + 1]); - } - res.push(with_siblings); - } - - res -} - -#[cfg(test)] -mod tests { - use libs::slotmap::DenseSlotMap; - use std::path::PathBuf; - - use super::{find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight}; - use crate::content::Page; - use front_matter::PageFrontMatter; - - fn create_page_with_date(date: &str) -> Page { - let mut front_matter = - PageFrontMatter { date: Some(date.to_string()), ..Default::default() }; - front_matter.date_to_datetime(); - Page::new("content/hello.md", front_matter, &PathBuf::new()) - } - - fn create_page_with_title(title: &str) -> Page { - let front_matter = PageFrontMatter { title: Some(title.to_string()), ..Default::default() }; - Page::new("content/hello.md", front_matter, &PathBuf::new()) - } - - fn create_page_with_weight(weight: usize) -> Page { - let front_matter = PageFrontMatter { weight: Some(weight), ..Default::default() }; - Page::new("content/hello.md", front_matter, &PathBuf::new()) - } - - #[test] - fn can_sort_by_dates() { - let mut dense = DenseSlotMap::new(); - let page1 = create_page_with_date("2018-01-01"); - let key1 = dense.insert(page1.clone()); - let page2 = create_page_with_date("2017-01-01"); - let key2 = dense.insert(page2.clone()); - let page3 = create_page_with_date("2019-01-01"); - let key3 = dense.insert(page3.clone()); - - let input = vec![ - (&key1, page1.meta.datetime, page1.permalink.as_ref()), - (&key2, page2.meta.datetime, page2.permalink.as_ref()), - (&key3, page3.meta.datetime, page3.permalink.as_ref()), - ]; - let (pages, _) = sort_pages_by_date(input); - // Should be sorted by date - assert_eq!(pages[0], key3); - assert_eq!(pages[1], key1); - assert_eq!(pages[2], key2); - } - - #[test] - fn can_sort_by_titles() { - let titles = vec![ - "bagel", - "track_3", - "microkernel", - "métro", - "BART", - "Underground", - "track_13", - "μ-kernel", - "meter", - "track_1", - ]; - let pages: Vec = titles.iter().map(|title| create_page_with_title(title)).collect(); - let mut dense = DenseSlotMap::new(); - let keys: Vec<_> = pages.iter().map(|p| dense.insert(p)).collect(); - let input: Vec<_> = pages - .iter() - .enumerate() - .map(|(i, page)| (&keys[i], page.meta.title.as_deref(), page.permalink.as_ref())) - .collect(); - let (sorted, _) = sort_pages_by_title(input); - // Should be sorted by title - let sorted_titles: Vec<_> = sorted - .iter() - .map(|key| dense.get(*key).unwrap().meta.title.as_ref().unwrap()) - .collect(); - assert_eq!( - sorted_titles, - vec![ - "bagel", - "BART", - "μ-kernel", - "meter", - "métro", - "microkernel", - "track_1", - "track_3", - "track_13", - "Underground", - ] - ); - } - - #[test] - fn can_sort_by_weight() { - let mut dense = DenseSlotMap::new(); - let page1 = create_page_with_weight(2); - let key1 = dense.insert(page1.clone()); - let page2 = create_page_with_weight(3); - let key2 = dense.insert(page2.clone()); - let page3 = create_page_with_weight(1); - let key3 = dense.insert(page3.clone()); - - let input = vec![ - (&key1, page1.meta.weight, page1.permalink.as_ref()), - (&key2, page2.meta.weight, page2.permalink.as_ref()), - (&key3, page3.meta.weight, page3.permalink.as_ref()), - ]; - let (pages, _) = sort_pages_by_weight(input); - // Should be sorted by weight - assert_eq!(pages[0], key3); - assert_eq!(pages[1], key1); - assert_eq!(pages[2], key2); - } - - #[test] - fn ignore_page_with_missing_field() { - let mut dense = DenseSlotMap::new(); - let page1 = create_page_with_weight(2); - let key1 = dense.insert(page1.clone()); - let page2 = create_page_with_weight(3); - let key2 = dense.insert(page2.clone()); - let page3 = create_page_with_date("2019-01-01"); - let key3 = dense.insert(page3.clone()); - - let input = vec![ - (&key1, page1.meta.weight, page1.permalink.as_ref()), - (&key2, page2.meta.weight, page2.permalink.as_ref()), - (&key3, page3.meta.weight, page3.permalink.as_ref()), - ]; - - let (pages, unsorted) = sort_pages_by_weight(input); - assert_eq!(pages.len(), 2); - assert_eq!(unsorted.len(), 1); - } - - #[test] - fn can_find_siblings() { - let mut dense = DenseSlotMap::new(); - let page1 = create_page_with_weight(1); - let key1 = dense.insert(page1); - let page2 = create_page_with_weight(2); - let key2 = dense.insert(page2); - let page3 = create_page_with_weight(3); - let key3 = dense.insert(page3); - - let input = vec![key1, key2, key3]; - - let pages = find_siblings(&input); - - assert_eq!(pages[0].1, None); - assert_eq!(pages[0].2, Some(key2)); - - assert_eq!(pages[1].1, Some(key1)); - assert_eq!(pages[1].2, Some(key3)); - - assert_eq!(pages[2].1, Some(key2)); - assert_eq!(pages[2].2, None); - } -} diff --git a/components/library/src/taxonomies/mod.rs b/components/library/src/taxonomies/mod.rs deleted file mode 100644 index bdd9a295..00000000 --- a/components/library/src/taxonomies/mod.rs +++ /dev/null @@ -1,924 +0,0 @@ -use std::cmp::Ordering; -use std::collections::HashMap; - -use libs::slotmap::DefaultKey; -use libs::tera::{Context, Tera}; -use serde::Serialize; - -use config::{Config, Taxonomy as TaxonomyConfig}; -use errors::{bail, Context as ErrorContext, Result}; -use utils::templates::{check_template_fallbacks, render_template}; - -use crate::content::SerializingPage; -use crate::library::Library; -use crate::sorting::sort_pages_by_date; -use utils::slugs::slugify_paths; - -#[derive(Debug, Clone, PartialEq, Serialize)] -pub struct SerializedTaxonomyItem<'a> { - name: &'a str, - slug: &'a str, - path: &'a str, - permalink: &'a str, - pages: Vec>, -} - -impl<'a> SerializedTaxonomyItem<'a> { - pub fn from_item(item: &'a TaxonomyItem, library: &'a Library) -> Self { - let mut pages = vec![]; - - for key in &item.pages { - let page = library.get_page_by_key(*key); - pages.push(page.to_serialized_basic(library)); - } - - SerializedTaxonomyItem { - name: &item.name, - slug: &item.slug, - path: &item.path, - permalink: &item.permalink, - pages, - } - } -} - -/// A taxonomy with all its pages -#[derive(Debug, Clone)] -pub struct TaxonomyItem { - pub name: String, - pub slug: String, - pub path: String, - pub permalink: String, - pub pages: Vec, -} - -impl TaxonomyItem { - pub fn new( - name: &str, - lang: &str, - taxo_slug: &str, - config: &Config, - keys: Vec, - library: &Library, - ) -> Self { - // Taxonomy are almost always used for blogs so we filter by dates - // and it's not like we can sort things across sections by anything other - // than dates - let data = keys - .iter() - .map(|k| { - if let Some(page) = library.pages().get(*k) { - (k, page.meta.datetime, page.permalink.as_ref()) - } else { - unreachable!("Sorting got an unknown page") - } - }) - .collect(); - let (mut pages, ignored_pages) = sort_pages_by_date(data); - let item_slug = slugify_paths(name, config.slugify.taxonomies); - let path = if lang != config.default_language { - format!("/{}/{}/{}/", lang, taxo_slug, item_slug) - } else { - format!("/{}/{}/", taxo_slug, item_slug) - }; - let permalink = config.make_permalink(&path); - - // We still append pages without dates at the end - pages.extend(ignored_pages); - - TaxonomyItem { name: name.to_string(), permalink, path, slug: item_slug, pages } - } - - pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializedTaxonomyItem<'a> { - SerializedTaxonomyItem::from_item(self, library) - } - - pub fn merge(&mut self, other: Self) { - self.pages.extend(other.pages); - } -} - -impl PartialEq for TaxonomyItem { - fn eq(&self, other: &Self) -> bool { - self.permalink == other.permalink - } -} - -#[derive(Debug, Clone, PartialEq, Serialize)] -pub struct SerializedTaxonomy<'a> { - kind: &'a TaxonomyConfig, - lang: &'a str, - permalink: &'a str, - items: Vec>, -} - -impl<'a> SerializedTaxonomy<'a> { - pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self { - let items: Vec = - taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); - SerializedTaxonomy { - kind: &taxonomy.kind, - lang: &taxonomy.lang, - permalink: &taxonomy.permalink, - items, - } - } -} - -/// All different taxonomies we have and their content -#[derive(Debug, Clone, PartialEq)] -pub struct Taxonomy { - pub kind: TaxonomyConfig, - pub lang: String, - pub slug: String, - pub permalink: String, - // this vec is sorted by the count of item - pub items: Vec, -} - -impl Taxonomy { - fn new( - kind: TaxonomyConfig, - lang: &str, - config: &Config, - items: HashMap>, - library: &Library, - ) -> Taxonomy { - let mut sorted_items = vec![]; - let slug = slugify_paths(&kind.name, config.slugify.taxonomies); - for (name, pages) in items { - sorted_items.push(TaxonomyItem::new(&name, lang, &slug, config, pages, library)); - } - //sorted_items.sort_by(|a, b| a.name.cmp(&b.name)); - sorted_items.sort_by(|a, b| match a.slug.cmp(&b.slug) { - Ordering::Less => Ordering::Less, - Ordering::Greater => Ordering::Greater, - Ordering::Equal => a.name.cmp(&b.name), - }); - sorted_items.dedup_by(|a, b| { - // custom Eq impl checks for equal permalinks - // here we make sure all pages from a get copied to b - // before dedup gets rid of it - if a == b { - b.merge(a.to_owned()); - true - } else { - false - } - }); - let path = if lang != config.default_language { - format!("/{}/{}/", lang, slug) - } else { - format!("/{}/", slug) - }; - let permalink = config.make_permalink(&path); - - Taxonomy { kind, slug, lang: lang.to_owned(), permalink, items: sorted_items } - } - - pub fn len(&self) -> usize { - self.items.len() - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - pub fn render_term( - &self, - item: &TaxonomyItem, - tera: &Tera, - config: &Config, - library: &Library, - ) -> Result { - let mut context = Context::new(); - context.insert("config", &config.serialize(&self.lang)); - context.insert("lang", &self.lang); - context.insert("term", &SerializedTaxonomyItem::from_item(item, library)); - context.insert("taxonomy", &self.kind); - context.insert( - "current_url", - &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)), - ); - context.insert("current_path", &format!("/{}/{}/", self.kind.name, item.slug)); - - // Check for taxon-specific template, or use generic as fallback. - let specific_template = format!("{}/single.html", self.kind.name); - let template = check_template_fallbacks(&specific_template, tera, &config.theme) - .unwrap_or("taxonomy_single.html"); - - render_template(template, tera, context, &config.theme) - .with_context(|| format!("Failed to render single term {} page.", self.kind.name)) - } - - pub fn render_all_terms( - &self, - tera: &Tera, - config: &Config, - library: &Library, - ) -> Result { - let mut context = Context::new(); - context.insert("config", &config.serialize(&self.lang)); - let terms: Vec = - self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); - context.insert("terms", &terms); - context.insert("lang", &self.lang); - context.insert("taxonomy", &self.kind); - context.insert("current_url", &config.make_permalink(&self.kind.name)); - context.insert("current_path", &format!("/{}/", self.kind.name)); - - // Check for taxon-specific template, or use generic as fallback. - let specific_template = format!("{}/list.html", self.kind.name); - let template = check_template_fallbacks(&specific_template, tera, &config.theme) - .unwrap_or("taxonomy_list.html"); - - render_template(template, tera, context, &config.theme) - .with_context(|| format!("Failed to render a list of {} page.", self.kind.name)) - } - - pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> { - SerializedTaxonomy::from_taxonomy(self, library) - } -} - -pub fn find_taxonomies(config: &Config, library: &Library) -> Result> { - let mut slugs_to_lang = HashMap::new(); - - let taxonomies_def = { - let mut m = HashMap::new(); - // the default language taxonomies - for t in &config.taxonomies { - let slug = slugify_paths(&t.name, config.slugify.taxonomies); - let key = format!("{}-{}", slug, config.default_language); - slugs_to_lang.insert(key.clone(), config.default_language.as_str()); - m.insert(key, t); - } - - // other languages taxonomies - for (code, options) in config.other_languages() { - for t in &options.taxonomies { - let slug = slugify_paths(&t.name, config.slugify.taxonomies); - let key = format!("{}-{}", slug, code); - slugs_to_lang.insert(key.clone(), code); - m.insert(key, t); - } - } - m - }; - - let mut all_taxonomies = HashMap::new(); - for (key, page) in library.pages() { - for (name, taxo_term) in &page.meta.taxonomies { - let taxo_slug = slugify_paths(name, config.slugify.taxonomies); - let taxo_key = format!("{}-{}", &taxo_slug, page.lang); - if taxonomies_def.contains_key(&taxo_key) { - all_taxonomies.entry(taxo_key.clone()).or_insert_with(HashMap::new); - - for term in taxo_term { - all_taxonomies - .get_mut(&taxo_key) - .unwrap() - .entry(term.to_string()) - .or_insert_with(Vec::new) - .push(key); - } - } else { - bail!( - "Page `{}` has taxonomy `{}` which is not defined in config.toml", - page.file.path.display(), - name - ); - } - } - } - - let mut taxonomies = vec![]; - - for (name, taxo) in all_taxonomies { - taxonomies.push(Taxonomy::new( - taxonomies_def[&name].clone(), - slugs_to_lang[&name], - config, - taxo, - library, - )); - } - - Ok(taxonomies) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::collections::HashMap; - - use crate::content::Page; - use crate::library::Library; - use config::{Config, LanguageOptions, Slugify, Taxonomy as TaxonomyConfig}; - use utils::slugs::SlugifyStrategy; - - #[test] - fn can_make_taxonomies() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]); - taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("tags".to_string(), vec!["js".to_string()]); - taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let (tags, categories, authors) = { - let mut t = None; - let mut c = None; - let mut a = None; - for x in taxonomies { - match x.kind.name.as_ref() { - "tags" => t = Some(x), - "categories" => c = Some(x), - "authors" => a = Some(x), - _ => unreachable!(), - } - } - (t.unwrap(), c.unwrap(), a.unwrap()) - }; - assert_eq!(tags.items.len(), 3); - assert_eq!(categories.items.len(), 2); - assert_eq!(authors.items.len(), 1); - - assert_eq!(tags.items[0].name, "db"); - assert_eq!(tags.items[0].slug, "db"); - assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); - assert_eq!(tags.items[0].path, "/tags/db/"); - assert_eq!(tags.items[0].pages.len(), 1); - - assert_eq!(tags.items[1].name, "js"); - assert_eq!(tags.items[1].slug, "js"); - assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/"); - assert_eq!(tags.items[1].pages.len(), 2); - - assert_eq!(tags.items[2].name, "rust"); - assert_eq!(tags.items[2].slug, "rust"); - assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/"); - assert_eq!(tags.items[2].pages.len(), 2); - - assert_eq!(categories.items[0].name, "Other"); - assert_eq!(categories.items[0].slug, "other"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/"); - assert_eq!(categories.items[0].pages.len(), 1); - - assert_eq!(categories.items[1].name, "Programming tutorials"); - assert_eq!(categories.items[1].slug, "programming-tutorials"); - assert_eq!( - categories.items[1].permalink, - "http://a-website.com/categories/programming-tutorials/" - ); - assert_eq!(categories.items[1].pages.len(), 1); - } - - #[test] - fn can_make_slugified_taxonomies() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]); - taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("tags".to_string(), vec!["js".to_string()]); - taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let (tags, categories, authors) = { - let mut t = None; - let mut c = None; - let mut a = None; - for x in taxonomies { - match x.kind.name.as_ref() { - "tags" => t = Some(x), - "categories" => c = Some(x), - "authors" => a = Some(x), - _ => unreachable!(), - } - } - (t.unwrap(), c.unwrap(), a.unwrap()) - }; - assert_eq!(tags.items.len(), 3); - assert_eq!(categories.items.len(), 2); - assert_eq!(authors.items.len(), 1); - - assert_eq!(tags.items[0].name, "db"); - assert_eq!(tags.items[0].slug, "db"); - assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); - assert_eq!(tags.items[0].pages.len(), 1); - - assert_eq!(tags.items[1].name, "js"); - assert_eq!(tags.items[1].slug, "js"); - assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/"); - assert_eq!(tags.items[1].path, "/tags/js/"); - assert_eq!(tags.items[1].pages.len(), 2); - - assert_eq!(tags.items[2].name, "rust"); - assert_eq!(tags.items[2].slug, "rust"); - assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/"); - assert_eq!(tags.items[2].pages.len(), 2); - - assert_eq!(categories.items[0].name, "Other"); - assert_eq!(categories.items[0].slug, "other"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/"); - assert_eq!(categories.items[0].pages.len(), 1); - - assert_eq!(categories.items[1].name, "Programming tutorials"); - assert_eq!(categories.items[1].slug, "programming-tutorials"); - assert_eq!( - categories.items[1].permalink, - "http://a-website.com/categories/programming-tutorials/" - ); - assert_eq!(categories.items[1].pages.len(), 1); - } - - #[test] - fn errors_on_unknown_taxonomy() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = - vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }]; - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let taxonomies = find_taxonomies(&config, &library); - assert!(taxonomies.is_err()); - let err = taxonomies.unwrap_err(); - // no path as this is created by Default - assert_eq!( - format!("{}", err), - "Page `` has taxonomy `tags` which is not defined in config.toml" - ); - } - - #[test] - fn can_make_taxonomies_in_multiple_languages() { - let mut config = Config::default(); - config.languages.insert("fr".to_owned(), LanguageOptions::default()); - let mut library = Library::new(2, 0, true); - - config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - ]; - let french_taxo = vec![ - TaxonomyConfig { name: "auteurs".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - ]; - let lang_options = config::LanguageOptions { - taxonomies: french_taxo, - ..config::LanguageOptions::default() - }; - config.languages.insert("fr".to_owned(), lang_options); - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("tags".to_string(), vec!["rust".to_string()]); - taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page { lang: "fr".to_string(), ..Default::default() }; - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("tags".to_string(), vec!["rust".to_string()]); - taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]); - page3.meta.taxonomies = taxo_page3; - library.insert_page(page3); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let (tags, categories, authors) = { - let mut t = None; - let mut c = None; - let mut a = None; - for x in taxonomies { - match x.kind.name.as_ref() { - "tags" => { - if x.lang == "en" { - t = Some(x) - } - } - "categories" => c = Some(x), - "auteurs" => a = Some(x), - _ => unreachable!(), - } - } - (t.unwrap(), c.unwrap(), a.unwrap()) - }; - - assert_eq!(tags.items.len(), 2); - assert_eq!(categories.items.len(), 2); - assert_eq!(authors.items.len(), 1); - - assert_eq!(tags.items[0].name, "db"); - assert_eq!(tags.items[0].slug, "db"); - assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); - assert_eq!(tags.items[0].pages.len(), 1); - - assert_eq!(tags.items[1].name, "rust"); - assert_eq!(tags.items[1].slug, "rust"); - assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/rust/"); - assert_eq!(tags.items[1].pages.len(), 2); - - assert_eq!(authors.items[0].name, "Vincent Prouillet"); - assert_eq!(authors.items[0].slug, "vincent-prouillet"); - assert_eq!( - authors.items[0].permalink, - "http://a-website.com/fr/auteurs/vincent-prouillet/" - ); - assert_eq!(authors.items[0].pages.len(), 1); - - assert_eq!(categories.items[0].name, "Other"); - assert_eq!(categories.items[0].slug, "other"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/"); - assert_eq!(categories.items[0].pages.len(), 1); - - assert_eq!(categories.items[1].name, "Programming tutorials"); - assert_eq!(categories.items[1].slug, "programming-tutorials"); - assert_eq!( - categories.items[1].permalink, - "http://a-website.com/categories/programming-tutorials/" - ); - assert_eq!(categories.items[1].pages.len(), 1); - } - - #[test] - fn can_make_utf8_taxonomies() { - let mut config = Config::default(); - config.slugify.taxonomies = SlugifyStrategy::Safe; - let mut library = Library::new(2, 0, true); - - let french_taxo = - vec![TaxonomyConfig { name: "catégories".to_string(), ..TaxonomyConfig::default() }]; - let lang_options = config::LanguageOptions { - taxonomies: french_taxo, - ..config::LanguageOptions::default() - }; - config.languages.insert("fr".to_owned(), lang_options); - - let mut page = Page { lang: "fr".to_string(), ..Default::default() }; - let mut taxo_page = HashMap::new(); - taxo_page.insert("catégories".to_string(), vec!["Écologie".to_string()]); - page.meta.taxonomies = taxo_page; - library.insert_page(page); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let categories = &taxonomies[0]; - - assert_eq!(categories.items.len(), 1); - assert_eq!(categories.items[0].name, "Écologie"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/fr/catégories/Écologie/"); - assert_eq!(categories.items[0].pages.len(), 1); - } - - #[test] - fn can_make_slugified_taxonomies_in_multiple_languages() { - let mut config = Config::default(); - config.slugify.taxonomies = SlugifyStrategy::On; - let mut library = Library::new(2, 0, true); - - config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - ]; - let french_taxo = vec![ - TaxonomyConfig { name: "auteurs".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, - ]; - let lang_options = config::LanguageOptions { - taxonomies: french_taxo, - ..config::LanguageOptions::default() - }; - config.languages.insert("fr".to_owned(), lang_options); - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); - taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("tags".to_string(), vec!["rust".to_string()]); - taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page { lang: "fr".to_string(), ..Default::default() }; - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("tags".to_string(), vec!["rust".to_string()]); - taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]); - page3.meta.taxonomies = taxo_page3; - library.insert_page(page3); - - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let (tags, categories, authors) = { - let mut t = None; - let mut c = None; - let mut a = None; - for x in taxonomies { - match x.kind.name.as_ref() { - "tags" => { - if x.lang == "en" { - t = Some(x) - } - } - "categories" => c = Some(x), - "auteurs" => a = Some(x), - _ => unreachable!(), - } - } - (t.unwrap(), c.unwrap(), a.unwrap()) - }; - - assert_eq!(tags.items.len(), 2); - assert_eq!(categories.items.len(), 2); - assert_eq!(authors.items.len(), 1); - - assert_eq!(tags.items[0].name, "db"); - assert_eq!(tags.items[0].slug, "db"); - assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/"); - assert_eq!(tags.items[0].pages.len(), 1); - - assert_eq!(tags.items[1].name, "rust"); - assert_eq!(tags.items[1].slug, "rust"); - assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/rust/"); - assert_eq!(tags.items[1].pages.len(), 2); - - assert_eq!(authors.items[0].name, "Vincent Prouillet"); - assert_eq!(authors.items[0].slug, "vincent-prouillet"); - assert_eq!( - authors.items[0].permalink, - "http://a-website.com/fr/auteurs/vincent-prouillet/" - ); - assert_eq!(authors.items[0].pages.len(), 1); - - assert_eq!(categories.items[0].name, "Other"); - assert_eq!(categories.items[0].slug, "other"); - assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/"); - assert_eq!(categories.items[0].pages.len(), 1); - - assert_eq!(categories.items[1].name, "Programming tutorials"); - assert_eq!(categories.items[1].slug, "programming-tutorials"); - assert_eq!( - categories.items[1].permalink, - "http://a-website.com/categories/programming-tutorials/" - ); - assert_eq!(categories.items[1].pages.len(), 1); - } - - #[test] - fn taxonomies_are_groupted_by_permalink() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert( - "test-taxonomy".to_string(), - vec!["term one".to_string(), "term two".to_string()], - ); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert( - "test taxonomy".to_string(), - vec!["Term Two".to_string(), "term-one".to_string()], - ); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("test-taxonomy ".to_string(), vec!["term one ".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let mut page4 = Page::default(); - let mut taxo_page4 = HashMap::new(); - taxo_page4.insert("Test-Taxonomy ".to_string(), vec!["Term-Two ".to_string()]); - page4.meta.taxonomies = taxo_page4; - page4.lang = config.default_language.clone(); - library.insert_page(page4); - - // taxonomies should all be the same - let taxonomies = find_taxonomies(&config, &library).unwrap(); - assert_eq!(taxonomies.len(), 1); - - let tax = &taxonomies[0]; - - // terms should be "term one", "term two" - assert_eq!(tax.items.len(), 2); - - let term1 = &tax.items[0]; - let term2 = &tax.items[1]; - - assert_eq!(term1.name, "term one"); - assert_eq!(term1.slug, "term-one"); - assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/term-one/"); - assert_eq!(term1.pages.len(), 3); - - assert_eq!(term2.name, "Term Two"); - assert_eq!(term2.slug, "term-two"); - assert_eq!(term2.permalink, "http://a-website.com/test-taxonomy/term-two/"); - assert_eq!(term2.pages.len(), 3); - } - - #[test] - fn taxonomies_with_unic_are_grouped_with_default_slugify_strategy() { - let mut config = Config::default(); - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("test-taxonomy".to_string(), vec!["Ecole".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("test taxonomy".to_string(), vec!["École".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("test-taxonomy ".to_string(), vec!["ecole".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let mut page4 = Page::default(); - let mut taxo_page4 = HashMap::new(); - taxo_page4.insert("Test-Taxonomy ".to_string(), vec!["école".to_string()]); - page4.meta.taxonomies = taxo_page4; - page4.lang = config.default_language.clone(); - library.insert_page(page4); - - // taxonomies should all be the same - let taxonomies = find_taxonomies(&config, &library).unwrap(); - assert_eq!(taxonomies.len(), 1); - - let tax = &taxonomies[0]; - - // under the default slugify stratagy all of the provided terms should be the same - assert_eq!(tax.items.len(), 1); - - let term1 = &tax.items[0]; - - assert_eq!(term1.name, "Ecole"); - assert_eq!(term1.slug, "ecole"); - assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/ecole/"); - assert_eq!(term1.pages.len(), 4); - } - - #[test] - fn taxonomies_with_unic_are_not_grouped_with_safe_slugify_strategy() { - let mut config = Config::default(); - config.slugify = Slugify { - paths: SlugifyStrategy::Safe, - taxonomies: SlugifyStrategy::Safe, - anchors: SlugifyStrategy::Safe, - }; - let mut library = Library::new(2, 0, false); - - config.taxonomies = vec![ - TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() }, - ]; - - let mut page1 = Page::default(); - let mut taxo_page1 = HashMap::new(); - taxo_page1.insert("test-taxonomy".to_string(), vec!["Ecole".to_string()]); - page1.meta.taxonomies = taxo_page1; - page1.lang = config.default_language.clone(); - library.insert_page(page1); - - let mut page2 = Page::default(); - let mut taxo_page2 = HashMap::new(); - taxo_page2.insert("test-taxonomy".to_string(), vec!["École".to_string()]); - page2.meta.taxonomies = taxo_page2; - page2.lang = config.default_language.clone(); - library.insert_page(page2); - - let mut page3 = Page::default(); - let mut taxo_page3 = HashMap::new(); - taxo_page3.insert("test-taxonomy".to_string(), vec!["ecole".to_string()]); - page3.meta.taxonomies = taxo_page3; - page3.lang = config.default_language.clone(); - library.insert_page(page3); - - let mut page4 = Page::default(); - let mut taxo_page4 = HashMap::new(); - taxo_page4.insert("test-taxonomy".to_string(), vec!["école".to_string()]); - page4.meta.taxonomies = taxo_page4; - page4.lang = config.default_language.clone(); - library.insert_page(page4); - - // taxonomies should all be the same - let taxonomies = find_taxonomies(&config, &library).unwrap(); - let tax = &taxonomies[0]; - - // if names are different permalinks should also be different so - // the items are still accessible - for term1 in tax.items.iter() { - for term2 in tax.items.iter() { - assert!(term1.name == term2.name || term1.permalink != term2.permalink); - } - } - - // under the safe slugify strategy all terms should be distinct - assert_eq!(tax.items.len(), 4); - } -} diff --git a/components/libs/Cargo.toml b/components/libs/Cargo.toml index dd88e18a..af6d8ae1 100644 --- a/components/libs/Cargo.toml +++ b/components/libs/Cargo.toml @@ -4,46 +4,48 @@ version = "0.1.0" edition = "2021" [dependencies] -tera = { version = "1", features = ["preserve_order"] } -toml = "0.5" -csv = "1" -base64 = "0.13" -serde_json = "1" -serde_yaml = "0.8" -quickxml_to_serde = "0.5" -url = "2" -syntect = "4" -once_cell = "1" -globset = "0.4" -unic-langid = "0.9" -image = "0.24" -regex = "1" -time = { version = "0.3" } -rayon = "1" -webp = "0.2" -svg_metadata = "0.4" -slotmap = "1" -lexical-sort = "0.3" -walkdir = "2" -pulldown-cmark = { version = "0.9", default-features = false, features = ["simd"] } -gh-emoji = "1" -elasticlunr-rs = {version = "2", default-features = false, features = ["da", "no", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] } +ahash = "0.7.6" ammonia = "3" +base64 = "0.13" +csv = "1" +elasticlunr-rs = {version = "2", default-features = false, features = ["da", "no", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] } +filetime = "0.2" +gh-emoji = "1" +glob = "0.3" +globset = "0.4" +image = "0.24" +lexical-sort = "0.3" +minify-html = "0.8" nom-bibtex = "0.3" num-format = "0.4" -sha2 = "0.10" -unicode-segmentation = "1.2" -slug = "0.1" +once_cell = "1" percent-encoding = "2" -filetime = "0.2" -minify-html = "0.8" -reqwest = {version = "0.11", default-features = false, features = ["blocking"]} -sass-rs = "0.2" +pulldown-cmark = { version = "0.9", default-features = false, features = ["simd"] } +quickxml_to_serde = "0.5" +rayon = "1" +regex = "1" relative-path = "1" -glob = "0.3" +reqwest = { version = "0.11", default-features = false, features = ["blocking"] } +sass-rs = "0.2" +serde_json = "1" +serde_yaml = "0.8" +sha2 = "0.10" +slug = "0.1" +svg_metadata = "0.4" +syntect = "4" +tera = { version = "1", features = ["preserve_order"] } +time = "0.3" +toml = "0.5" +unic-langid = "0.9" +unicode-segmentation = "1.2" +url = "2" +walkdir = "2" +webp = "0.2" [features] +# TODO: fix me, it doesn't pick up the reqwuest feature if not set as default +default = ["rust-tls"] rust-tls = ["reqwest/rustls-tls"] native-tls = ["reqwest/default-tls"] indexing-zh = ["elasticlunr-rs/zh"] diff --git a/components/libs/src/lib.rs b/components/libs/src/lib.rs index ebf29693..d2cb9981 100644 --- a/components/libs/src/lib.rs +++ b/components/libs/src/lib.rs @@ -4,6 +4,7 @@ //! to define features, it is done in a single place. //! It doesn't work for crates exporting macros like `serde` or dev deps but that's ok for most. +pub use ahash; pub use ammonia; pub use base64; pub use csv; @@ -29,7 +30,6 @@ pub use sass_rs; pub use serde_json; pub use serde_yaml; pub use sha2; -pub use slotmap; pub use slug; pub use svg_metadata; pub use syntect; diff --git a/components/link_checker/src/lib.rs b/components/link_checker/src/lib.rs index c1d7d399..1123d239 100644 --- a/components/link_checker/src/lib.rs +++ b/components/link_checker/src/lib.rs @@ -43,6 +43,7 @@ pub fn check_url(url: &str, config: &LinkChecker) -> Result { headers.insert(ACCEPT, "text/html".parse().unwrap()); headers.append(ACCEPT, "*/*".parse().unwrap()); + // TODO: pass the client to the check_url, do not pass the config let client = Client::builder() .user_agent(concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"))) .build() diff --git a/components/rendering/Cargo.toml b/components/markdown/Cargo.toml similarity index 86% rename from components/rendering/Cargo.toml rename to components/markdown/Cargo.toml index 8c449605..90040f9a 100644 --- a/components/rendering/Cargo.toml +++ b/components/markdown/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "rendering" +name = "markdown" version = "0.1.0" edition = "2021" include = ["src/**/*"] @@ -10,7 +10,6 @@ pest = "2" pest_derive = "2" errors = { path = "../errors" } -front_matter = { path = "../front_matter" } utils = { path = "../utils" } config = { path = "../config" } link_checker = { path = "../link_checker" } diff --git a/components/rendering/benches/all.rs b/components/markdown/benches/all.rs similarity index 100% rename from components/rendering/benches/all.rs rename to components/markdown/benches/all.rs diff --git a/components/rendering/src/codeblock/fence.rs b/components/markdown/src/codeblock/fence.rs similarity index 100% rename from components/rendering/src/codeblock/fence.rs rename to components/markdown/src/codeblock/fence.rs diff --git a/components/rendering/src/codeblock/highlight.rs b/components/markdown/src/codeblock/highlight.rs similarity index 100% rename from components/rendering/src/codeblock/highlight.rs rename to components/markdown/src/codeblock/highlight.rs diff --git a/components/rendering/src/codeblock/mod.rs b/components/markdown/src/codeblock/mod.rs similarity index 100% rename from components/rendering/src/codeblock/mod.rs rename to components/markdown/src/codeblock/mod.rs diff --git a/components/rendering/src/content.pest b/components/markdown/src/content.pest similarity index 100% rename from components/rendering/src/content.pest rename to components/markdown/src/content.pest diff --git a/components/rendering/src/context.rs b/components/markdown/src/context.rs similarity index 98% rename from components/rendering/src/context.rs rename to components/markdown/src/context.rs index 179dd319..cefce702 100644 --- a/components/rendering/src/context.rs +++ b/components/markdown/src/context.rs @@ -2,9 +2,9 @@ use std::borrow::Cow; use std::collections::HashMap; use config::Config; -use front_matter::InsertAnchor; use libs::tera::{Context, Tera}; use utils::templates::ShortcodeDefinition; +use utils::types::InsertAnchor; /// All the information from the zola site that is needed to render HTML from markdown #[derive(Debug)] diff --git a/components/rendering/src/lib.rs b/components/markdown/src/lib.rs similarity index 90% rename from components/rendering/src/lib.rs rename to components/markdown/src/lib.rs index 11936102..f3744423 100644 --- a/components/rendering/src/lib.rs +++ b/components/markdown/src/lib.rs @@ -2,16 +2,14 @@ mod codeblock; mod context; mod markdown; mod shortcode; -mod table_of_contents; use shortcode::{extract_shortcodes, insert_md_shortcodes}; use errors::Result; +use crate::markdown::markdown_to_html; +pub use crate::markdown::Rendered; pub use context::RenderContext; -use markdown::markdown_to_html; -pub use markdown::Rendered; -pub use table_of_contents::Heading; pub fn render_content(content: &str, context: &RenderContext) -> Result { // avoid parsing the content if needed diff --git a/components/rendering/src/markdown.rs b/components/markdown/src/markdown.rs similarity index 99% rename from components/rendering/src/markdown.rs rename to components/markdown/src/markdown.rs index 68ce8aa3..78ecebca 100644 --- a/components/rendering/src/markdown.rs +++ b/components/markdown/src/markdown.rs @@ -6,12 +6,12 @@ use libs::pulldown_cmark as cmark; use libs::tera; use crate::context::RenderContext; -use crate::table_of_contents::{make_table_of_contents, Heading}; use errors::{anyhow, Context, Error, Result}; -use front_matter::InsertAnchor; use libs::pulldown_cmark::escape::escape_html; use utils::site::resolve_internal_link; use utils::slugs::slugify_anchors; +use utils::table_of_contents::{make_table_of_contents, Heading}; +use utils::types::InsertAnchor; use utils::vec::InsertMany; use self::cmark::{Event, LinkType, Options, Parser, Tag}; diff --git a/components/rendering/src/shortcode/mod.rs b/components/markdown/src/shortcode/mod.rs similarity index 100% rename from components/rendering/src/shortcode/mod.rs rename to components/markdown/src/shortcode/mod.rs diff --git a/components/rendering/src/shortcode/parser.rs b/components/markdown/src/shortcode/parser.rs similarity index 100% rename from components/rendering/src/shortcode/parser.rs rename to components/markdown/src/shortcode/parser.rs diff --git a/components/rendering/tests/codeblocks.rs b/components/markdown/tests/codeblocks.rs similarity index 100% rename from components/rendering/tests/codeblocks.rs rename to components/markdown/tests/codeblocks.rs diff --git a/components/rendering/tests/common.rs b/components/markdown/tests/common.rs similarity index 97% rename from components/rendering/tests/common.rs rename to components/markdown/tests/common.rs index 8ac274bf..ae09cd38 100644 --- a/components/rendering/tests/common.rs +++ b/components/markdown/tests/common.rs @@ -7,9 +7,9 @@ use libs::tera::Tera; use config::Config; use errors::Result; -use front_matter::InsertAnchor; -use rendering::{render_content, RenderContext, Rendered}; +use markdown::{render_content, RenderContext, Rendered}; use templates::ZOLA_TERA; +use utils::types::InsertAnchor; fn configurable_render( content: &str, diff --git a/components/rendering/tests/links.rs b/components/markdown/tests/links.rs similarity index 100% rename from components/rendering/tests/links.rs rename to components/markdown/tests/links.rs diff --git a/components/rendering/tests/markdown.rs b/components/markdown/tests/markdown.rs similarity index 99% rename from components/rendering/tests/markdown.rs rename to components/markdown/tests/markdown.rs index be3cbd14..bee98409 100644 --- a/components/rendering/tests/markdown.rs +++ b/components/markdown/tests/markdown.rs @@ -3,10 +3,10 @@ use std::collections::HashMap; use libs::tera::Tera; use config::Config; -use front_matter::InsertAnchor; -use rendering::{render_content, RenderContext}; +use markdown::{render_content, RenderContext}; use templates::ZOLA_TERA; use utils::slugs::SlugifyStrategy; +use utils::types::InsertAnchor; mod common; diff --git a/components/rendering/tests/shortcodes.rs b/components/markdown/tests/shortcodes.rs similarity index 100% rename from components/rendering/tests/shortcodes.rs rename to components/markdown/tests/shortcodes.rs diff --git a/components/rendering/tests/snapshots/codeblocks__can_add_line_numbers.snap b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_add_line_numbers.snap rename to components/markdown/tests/snapshots/codeblocks__can_add_line_numbers.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_add_line_numbers_with_highlight.snap b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_highlight.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_add_line_numbers_with_highlight.snap rename to components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_highlight.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_add_line_numbers_with_lineno_start.snap b/components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_lineno_start.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_add_line_numbers_with_lineno_start.snap rename to components/markdown/tests/snapshots/codeblocks__can_add_line_numbers_with_lineno_start.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_hide_lines.snap b/components/markdown/tests/snapshots/codeblocks__can_hide_lines.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_hide_lines.snap rename to components/markdown/tests/snapshots/codeblocks__can_hide_lines.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_all_lines.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_all_lines.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_all_lines.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_all_lines.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_at_end.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_at_end.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_at_end.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_at_end.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_line_range.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_line_range.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_line_range.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_line_range.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_mix_line_ranges.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_mix_line_ranges.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_mix_line_ranges.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_mix_line_ranges.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_out_of_bounds.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_out_of_bounds.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_out_of_bounds.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_out_of_bounds.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_ranges_overlap.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_ranges_overlap.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_ranges_overlap.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_ranges_overlap.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_reversed_range.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_reversed_range.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_reversed_range.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_reversed_range.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_single_line.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_single_line.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_single_line.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_single_line.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_single_line_range.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_single_line_range.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_single_line_range.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_single_line_range.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_unknown_lang.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_unknown_lang.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_unknown_lang.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_unknown_lang.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_highlight_weird_fence_tokens.snap b/components/markdown/tests/snapshots/codeblocks__can_highlight_weird_fence_tokens.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_highlight_weird_fence_tokens.snap rename to components/markdown/tests/snapshots/codeblocks__can_highlight_weird_fence_tokens.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_render_completely_mixed_codeblock.snap b/components/markdown/tests/snapshots/codeblocks__can_render_completely_mixed_codeblock.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_render_completely_mixed_codeblock.snap rename to components/markdown/tests/snapshots/codeblocks__can_render_completely_mixed_codeblock.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_render_multiple_shortcodes_in_codeblock.snap b/components/markdown/tests/snapshots/codeblocks__can_render_multiple_shortcodes_in_codeblock.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_render_multiple_shortcodes_in_codeblock.snap rename to components/markdown/tests/snapshots/codeblocks__can_render_multiple_shortcodes_in_codeblock.snap diff --git a/components/rendering/tests/snapshots/codeblocks__can_render_shortcode_in_codeblock.snap b/components/markdown/tests/snapshots/codeblocks__can_render_shortcode_in_codeblock.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__can_render_shortcode_in_codeblock.snap rename to components/markdown/tests/snapshots/codeblocks__can_render_shortcode_in_codeblock.snap diff --git a/components/rendering/tests/snapshots/codeblocks__does_nothing_with_highlighting_disabled.snap b/components/markdown/tests/snapshots/codeblocks__does_nothing_with_highlighting_disabled.snap similarity index 100% rename from components/rendering/tests/snapshots/codeblocks__does_nothing_with_highlighting_disabled.snap rename to components/markdown/tests/snapshots/codeblocks__does_nothing_with_highlighting_disabled.snap diff --git a/components/rendering/tests/snapshots/markdown__all_markdown_features_integration.snap b/components/markdown/tests/snapshots/markdown__all_markdown_features_integration.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__all_markdown_features_integration.snap rename to components/markdown/tests/snapshots/markdown__all_markdown_features_integration.snap diff --git a/components/rendering/tests/snapshots/markdown__can_customise_anchor_template.snap b/components/markdown/tests/snapshots/markdown__can_customise_anchor_template.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_customise_anchor_template.snap rename to components/markdown/tests/snapshots/markdown__can_customise_anchor_template.snap diff --git a/components/rendering/tests/snapshots/markdown__can_handle_heading_ids-2.snap b/components/markdown/tests/snapshots/markdown__can_handle_heading_ids-2.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_handle_heading_ids-2.snap rename to components/markdown/tests/snapshots/markdown__can_handle_heading_ids-2.snap diff --git a/components/rendering/tests/snapshots/markdown__can_handle_heading_ids.snap b/components/markdown/tests/snapshots/markdown__can_handle_heading_ids.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_handle_heading_ids.snap rename to components/markdown/tests/snapshots/markdown__can_handle_heading_ids.snap diff --git a/components/rendering/tests/snapshots/markdown__can_insert_anchors-2.snap b/components/markdown/tests/snapshots/markdown__can_insert_anchors-2.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_insert_anchors-2.snap rename to components/markdown/tests/snapshots/markdown__can_insert_anchors-2.snap diff --git a/components/rendering/tests/snapshots/markdown__can_insert_anchors.snap b/components/markdown/tests/snapshots/markdown__can_insert_anchors.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_insert_anchors.snap rename to components/markdown/tests/snapshots/markdown__can_insert_anchors.snap diff --git a/components/rendering/tests/snapshots/markdown__can_make_zola_internal_links.snap b/components/markdown/tests/snapshots/markdown__can_make_zola_internal_links.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_make_zola_internal_links.snap rename to components/markdown/tests/snapshots/markdown__can_make_zola_internal_links.snap diff --git a/components/rendering/tests/snapshots/markdown__can_render_basic_markdown.snap b/components/markdown/tests/snapshots/markdown__can_render_basic_markdown.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_render_basic_markdown.snap rename to components/markdown/tests/snapshots/markdown__can_render_basic_markdown.snap diff --git a/components/rendering/tests/snapshots/markdown__can_use_external_links_options-2.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-2.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_use_external_links_options-2.snap rename to components/markdown/tests/snapshots/markdown__can_use_external_links_options-2.snap diff --git a/components/rendering/tests/snapshots/markdown__can_use_external_links_options-3.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-3.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_use_external_links_options-3.snap rename to components/markdown/tests/snapshots/markdown__can_use_external_links_options-3.snap diff --git a/components/rendering/tests/snapshots/markdown__can_use_external_links_options-4.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-4.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_use_external_links_options-4.snap rename to components/markdown/tests/snapshots/markdown__can_use_external_links_options-4.snap diff --git a/components/rendering/tests/snapshots/markdown__can_use_external_links_options-5.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options-5.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_use_external_links_options-5.snap rename to components/markdown/tests/snapshots/markdown__can_use_external_links_options-5.snap diff --git a/components/rendering/tests/snapshots/markdown__can_use_external_links_options.snap b/components/markdown/tests/snapshots/markdown__can_use_external_links_options.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_use_external_links_options.snap rename to components/markdown/tests/snapshots/markdown__can_use_external_links_options.snap diff --git a/components/rendering/tests/snapshots/markdown__can_use_smart_punctuation.snap b/components/markdown/tests/snapshots/markdown__can_use_smart_punctuation.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__can_use_smart_punctuation.snap rename to components/markdown/tests/snapshots/markdown__can_use_smart_punctuation.snap diff --git a/components/rendering/tests/snapshots/markdown__custom_url_schemes_are_untouched.snap b/components/markdown/tests/snapshots/markdown__custom_url_schemes_are_untouched.snap similarity index 100% rename from components/rendering/tests/snapshots/markdown__custom_url_schemes_are_untouched.snap rename to components/markdown/tests/snapshots/markdown__custom_url_schemes_are_untouched.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_emit_newlines_and_whitespace_with_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__can_emit_newlines_and_whitespace_with_shortcode.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_emit_newlines_and_whitespace_with_shortcode.snap rename to components/markdown/tests/snapshots/shortcodes__can_emit_newlines_and_whitespace_with_shortcode.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_grab_lang_in_html_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_html_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_grab_lang_in_html_shortcodes.snap rename to components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_html_shortcodes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_grab_lang_in_md_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_md_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_grab_lang_in_md_shortcodes.snap rename to components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_md_shortcodes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_grab_lang_in_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_grab_lang_in_shortcodes.snap rename to components/markdown/tests/snapshots/shortcodes__can_grab_lang_in_shortcodes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_passthrough_markdown_from_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__can_passthrough_markdown_from_shortcode.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_passthrough_markdown_from_shortcode.snap rename to components/markdown/tests/snapshots/shortcodes__can_passthrough_markdown_from_shortcode.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_render_body_shortcode_and_paragraph_after.snap b/components/markdown/tests/snapshots/shortcodes__can_render_body_shortcode_and_paragraph_after.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_render_body_shortcode_and_paragraph_after.snap rename to components/markdown/tests/snapshots/shortcodes__can_render_body_shortcode_and_paragraph_after.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_render_commented_out_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_render_commented_out_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_render_commented_out_shortcodes.snap rename to components/markdown/tests/snapshots/shortcodes__can_render_commented_out_shortcodes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_render_list_with_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__can_render_list_with_shortcode.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_render_list_with_shortcode.snap rename to components/markdown/tests/snapshots/shortcodes__can_render_list_with_shortcode.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_render_markdown_in_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_render_markdown_in_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_render_markdown_in_shortcodes.snap rename to components/markdown/tests/snapshots/shortcodes__can_render_markdown_in_shortcodes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_render_shortcodes_with_tabs.snap b/components/markdown/tests/snapshots/shortcodes__can_render_shortcodes_with_tabs.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_render_shortcodes_with_tabs.snap rename to components/markdown/tests/snapshots/shortcodes__can_render_shortcodes_with_tabs.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_render_simple_text_with_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__can_render_simple_text_with_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_render_simple_text_with_shortcodes.snap rename to components/markdown/tests/snapshots/shortcodes__can_render_simple_text_with_shortcodes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_render_two_body_shortcode_and_paragraph_after_with_line_break_between.snap b/components/markdown/tests/snapshots/shortcodes__can_render_two_body_shortcode_and_paragraph_after_with_line_break_between.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_render_two_body_shortcode_and_paragraph_after_with_line_break_between.snap rename to components/markdown/tests/snapshots/shortcodes__can_render_two_body_shortcode_and_paragraph_after_with_line_break_between.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_split_shortcode_body_lines.snap b/components/markdown/tests/snapshots/shortcodes__can_split_shortcode_body_lines.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_split_shortcode_body_lines.snap rename to components/markdown/tests/snapshots/shortcodes__can_split_shortcode_body_lines.snap diff --git a/components/rendering/tests/snapshots/shortcodes__can_use_shortcodes_in_quotes.snap b/components/markdown/tests/snapshots/shortcodes__can_use_shortcodes_in_quotes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__can_use_shortcodes_in_quotes.snap rename to components/markdown/tests/snapshots/shortcodes__can_use_shortcodes_in_quotes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__doesnt_escape_html_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__doesnt_escape_html_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__doesnt_escape_html_shortcodes.snap rename to components/markdown/tests/snapshots/shortcodes__doesnt_escape_html_shortcodes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__doesnt_render_ignored_shortcodes.snap b/components/markdown/tests/snapshots/shortcodes__doesnt_render_ignored_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__doesnt_render_ignored_shortcodes.snap rename to components/markdown/tests/snapshots/shortcodes__doesnt_render_ignored_shortcodes.snap diff --git a/components/rendering/tests/snapshots/shortcodes__doesnt_try_to_highlight_content_from_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__doesnt_try_to_highlight_content_from_shortcode.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__doesnt_try_to_highlight_content_from_shortcode.snap rename to components/markdown/tests/snapshots/shortcodes__doesnt_try_to_highlight_content_from_shortcode.snap diff --git a/components/rendering/tests/snapshots/shortcodes__html_shortcode_regression-2.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-2.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__html_shortcode_regression-2.snap rename to components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-2.snap diff --git a/components/rendering/tests/snapshots/shortcodes__html_shortcode_regression-3.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-3.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__html_shortcode_regression-3.snap rename to components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-3.snap diff --git a/components/rendering/tests/snapshots/shortcodes__html_shortcode_regression-4.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-4.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__html_shortcode_regression-4.snap rename to components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-4.snap diff --git a/components/rendering/tests/snapshots/shortcodes__html_shortcode_regression-5.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-5.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__html_shortcode_regression-5.snap rename to components/markdown/tests/snapshots/shortcodes__html_shortcode_regression-5.snap diff --git a/components/rendering/tests/snapshots/shortcodes__html_shortcode_regression.snap b/components/markdown/tests/snapshots/shortcodes__html_shortcode_regression.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__html_shortcode_regression.snap rename to components/markdown/tests/snapshots/shortcodes__html_shortcode_regression.snap diff --git a/components/rendering/tests/snapshots/shortcodes__invocation_count_increments_in_shortcode.snap b/components/markdown/tests/snapshots/shortcodes__invocation_count_increments_in_shortcode.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__invocation_count_increments_in_shortcode.snap rename to components/markdown/tests/snapshots/shortcodes__invocation_count_increments_in_shortcode.snap diff --git a/components/rendering/tests/snapshots/shortcodes__md_shortcode_regression.snap b/components/markdown/tests/snapshots/shortcodes__md_shortcode_regression.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__md_shortcode_regression.snap rename to components/markdown/tests/snapshots/shortcodes__md_shortcode_regression.snap diff --git a/components/rendering/tests/snapshots/shortcodes__shortcodes_do_not_generate_paragraphs.snap b/components/markdown/tests/snapshots/shortcodes__shortcodes_do_not_generate_paragraphs.snap similarity index 100% rename from components/rendering/tests/snapshots/shortcodes__shortcodes_do_not_generate_paragraphs.snap rename to components/markdown/tests/snapshots/shortcodes__shortcodes_do_not_generate_paragraphs.snap diff --git a/components/rendering/tests/snapshots/summary__basic_summary.snap b/components/markdown/tests/snapshots/summary__basic_summary.snap similarity index 100% rename from components/rendering/tests/snapshots/summary__basic_summary.snap rename to components/markdown/tests/snapshots/summary__basic_summary.snap diff --git a/components/rendering/tests/snapshots/summary__summary_with_shortcodes.snap b/components/markdown/tests/snapshots/summary__summary_with_shortcodes.snap similarity index 100% rename from components/rendering/tests/snapshots/summary__summary_with_shortcodes.snap rename to components/markdown/tests/snapshots/summary__summary_with_shortcodes.snap diff --git a/components/rendering/tests/summary.rs b/components/markdown/tests/summary.rs similarity index 100% rename from components/rendering/tests/summary.rs rename to components/markdown/tests/summary.rs diff --git a/components/rendering/tests/toc.rs b/components/markdown/tests/toc.rs similarity index 100% rename from components/rendering/tests/toc.rs rename to components/markdown/tests/toc.rs diff --git a/components/search/Cargo.toml b/components/search/Cargo.toml index f80fcfe5..d2b26b52 100644 --- a/components/search/Cargo.toml +++ b/components/search/Cargo.toml @@ -5,6 +5,6 @@ edition = "2021" [dependencies] errors = { path = "../errors" } -library = { path = "../library" } +content = { path = "../content" } config = { path = "../config" } libs = { path = "../libs" } diff --git a/components/search/src/lib.rs b/components/search/src/lib.rs index daaedc45..02f0a177 100644 --- a/components/search/src/lib.rs +++ b/components/search/src/lib.rs @@ -7,8 +7,8 @@ use libs::elasticlunr::{Index, Language}; use libs::once_cell::sync::Lazy; use config::{Config, Search}; +use content::{Library, Section}; use errors::{bail, Result}; -use library::{Library, Section}; pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js"); @@ -137,7 +137,7 @@ pub fn build_index(lang: &str, library: &Library, config: &Config) -> Result>()[0]; let paginator = Paginator::from_section(section, &library); b.iter(|| site.render_paginated(Vec::new(), &paginator)); diff --git a/components/site/src/feed.rs b/components/site/src/feed.rs index 06f7e99a..7f5ff6d9 100644 --- a/components/site/src/feed.rs +++ b/components/site/src/feed.rs @@ -1,3 +1,4 @@ +use std::cmp::Ordering; use std::path::PathBuf; use libs::rayon::prelude::*; @@ -5,8 +6,8 @@ use libs::tera::Context; use serde::Serialize; use crate::Site; +use content::{Page, TaxonomyItem}; use errors::Result; -use library::{sort_actual_pages_by_date, Page, TaxonomyItem}; use utils::templates::render_template; #[derive(Debug, Clone, PartialEq, Serialize)] @@ -40,7 +41,14 @@ pub fn render_feed( return Ok(None); } - pages.par_sort_unstable_by(sort_actual_pages_by_date); + pages.par_sort_unstable_by(|a, b| { + let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); + if ord == Ordering::Equal { + a.permalink.cmp(&b.permalink) + } else { + ord + } + }); let mut context = Context::new(); context.insert( @@ -55,8 +63,11 @@ pub fn render_feed( let library = site.library.read().unwrap(); // limit to the last n elements if the limit is set; otherwise use all. let num_entries = site.config.feed_limit.unwrap_or(pages.len()); - let p = - pages.iter().take(num_entries).map(|x| x.to_serialized_basic(&library)).collect::>(); + let p = pages + .iter() + .take(num_entries) + .map(|x| x.serialize_without_siblings(&library)) + .collect::>(); context.insert("pages", &p); context.insert("config", &site.config.serialize(lang)); diff --git a/components/site/src/lib.rs b/components/site/src/lib.rs index 60100ec1..c1d6dad9 100644 --- a/components/site/src/lib.rs +++ b/components/site/src/lib.rs @@ -9,16 +9,14 @@ use std::fs::remove_dir_all; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex, RwLock}; -// use lazy_static::lazy_static; use libs::once_cell::sync::Lazy; use libs::rayon::prelude::*; use libs::tera::{Context, Tera}; use libs::walkdir::{DirEntry, WalkDir}; use config::{get_config, Config}; +use content::{Library, Page, Paginator, Section, Taxonomy}; use errors::{anyhow, bail, Context as ErrorContext, Result}; -use front_matter::InsertAnchor; -use library::{find_taxonomies, Library, Page, Paginator, Section, Taxonomy}; use libs::relative_path::RelativePathBuf; use std::time::Instant; use templates::{load_tera, render_redirect_template}; @@ -28,6 +26,7 @@ use utils::fs::{ use utils::minify; use utils::net::get_available_port; use utils::templates::{render_template, ShortcodeDefinition}; +use utils::types::InsertAnchor; pub static SITE_CONTENT: Lazy>>> = Lazy::new(|| Arc::new(RwLock::new(HashMap::new()))); @@ -100,7 +99,7 @@ impl Site { permalinks: HashMap::new(), include_drafts: false, // We will allocate it properly later on - library: Arc::new(RwLock::new(Library::new(0, 0, false))), + library: Arc::new(RwLock::new(Library::new())), build_mode: BuildMode::Disk, shortcode_definitions, }; @@ -143,7 +142,7 @@ impl Site { self.live_reload = Some(live_reload_port); } - /// Reloads the templates and rebuild the site without re-rendering the Markdown. + /// Reloads the templates and rebuild the site without re-markdown the Markdown. pub fn reload_templates(&mut self) -> Result<()> { self.tera.full_reload()?; // TODO: be smarter than that, no need to recompile sass for example @@ -165,7 +164,7 @@ impl Site { pub fn load(&mut self) -> Result<()> { let base_path = self.base_path.to_string_lossy().replace('\\', "/"); - self.library = Arc::new(RwLock::new(Library::new(0, 0, self.config.is_multilingual()))); + self.library = Arc::new(RwLock::new(Library::new())); let mut pages_insert_anchors = HashMap::new(); // not the most elegant loop, but this is necessary to use skip_current_dir @@ -272,7 +271,7 @@ impl Site { { let library = self.library.read().unwrap(); - let collisions = library.check_for_path_collisions(); + let collisions = library.find_path_collisions(); if !collisions.is_empty() { let mut msg = String::from("Found path collisions:\n"); for (path, filepaths) in collisions { @@ -305,7 +304,7 @@ impl Site { /// a _index.md to render the index page at the root of the site pub fn create_default_index_sections(&mut self) -> Result<()> { for (index_path, lang) in self.index_section_paths() { - if let Some(index_section) = self.library.read().unwrap().get_section(&index_path) { + if let Some(index_section) = self.library.read().unwrap().sections.get(&index_path) { if self.config.build_search_index && !index_section.meta.in_search_index { bail!( "You have enabled search in the config but disabled it in the index section: \ @@ -316,7 +315,7 @@ impl Site { } let mut library = self.library.write().expect("Get lock for load"); // Not in else because of borrow checker - if !library.contains_section(&index_path) { + if !library.sections.contains_key(&index_path) { let mut index_section = Section::default(); index_section.file.parent = self.content_path.clone(); index_section.file.filename = @@ -335,7 +334,10 @@ impl Site { index_section.file.relative = "_index.md".to_string(); index_section.path = "/".to_string(); } - index_section.lang = index_section.file.find_language(&self.config)?; + index_section.lang = index_section.file.find_language( + &self.config.default_language, + &self.config.other_languages_codes(), + )?; library.insert_section(index_section); } } @@ -354,7 +356,7 @@ impl Site { // This is needed in the first place because of silly borrow checker let mut pages_insert_anchors = HashMap::new(); - for (_, p) in self.library.read().unwrap().pages() { + for (_, p) in &self.library.read().unwrap().pages { pages_insert_anchors.insert( p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone(), &p.lang), @@ -363,7 +365,7 @@ impl Site { let mut library = self.library.write().expect("Get lock for render_markdown"); library - .pages_mut() + .pages .values_mut() .collect::>() .par_iter_mut() @@ -380,7 +382,7 @@ impl Site { .collect::>()?; library - .sections_mut() + .sections .values_mut() .collect::>() .par_iter_mut() @@ -409,7 +411,7 @@ impl Site { } let mut library = self.library.write().expect("Get lock for add_page"); - library.remove_page(&page.file.path); + library.pages.remove(&page.file.path); library.insert_page(page); Ok(()) @@ -423,7 +425,7 @@ impl Site { self.populate_sections(); self.populate_taxonomies()?; let library = self.library.read().unwrap(); - let page = library.get_page(&path).unwrap(); + let page = library.pages.get(path).unwrap(); self.render_page(page) } @@ -440,7 +442,7 @@ impl Site { )?; } let mut library = self.library.write().expect("Get lock for add_section"); - library.remove_section(§ion.file.path); + library.sections.remove(§ion.file.path); library.insert_section(section); Ok(()) @@ -453,7 +455,7 @@ impl Site { self.add_section(section, true)?; self.populate_sections(); let library = self.library.read().unwrap(); - let section = library.get_section(&path).unwrap(); + let section = library.sections.get(path).unwrap(); self.render_section(section, true) } @@ -469,7 +471,7 @@ impl Site { } else { parent_path.join("_index.md") }; - match self.library.read().unwrap().get_section(&parent) { + match self.library.read().unwrap().sections.get(&parent) { Some(s) => s.meta.insert_anchor_links, None => InsertAnchor::None, } @@ -488,7 +490,7 @@ impl Site { return Ok(()); } - self.taxonomies = find_taxonomies(&self.config, &self.library.read().unwrap())?; + self.taxonomies = self.library.read().unwrap().find_taxonomies(&self.config)?; Ok(()) } @@ -637,7 +639,7 @@ impl Site { } start = log_time(start, "Cleaned folder"); - // Generate/move all assets before rendering any content + // Generate/move all assets before markdown any content if let Some(ref theme) = self.config.theme { let theme_path = self.base_path.join("themes").join(theme); if theme_path.join("sass").exists() { @@ -669,15 +671,10 @@ impl Site { let library = self.library.read().unwrap(); if self.config.generate_feed { let is_multilingual = self.config.is_multilingual(); - let pages = if is_multilingual { - library - .pages_values() - .iter() - .filter(|p| p.lang == self.config.default_language) - .cloned() - .collect() + let pages: Vec<_> = if is_multilingual { + library.pages.values().filter(|p| p.lang == self.config.default_language).collect() } else { - library.pages_values() + library.pages.values().collect() }; self.render_feed(pages, None, &self.config.default_language, |c| c)?; start = log_time(start, "Generated feed in default language"); @@ -687,8 +684,7 @@ impl Site { if !language.generate_feed { continue; } - let pages = - library.pages_values().iter().filter(|p| &p.lang == code).cloned().collect(); + let pages: Vec<_> = library.pages.values().filter(|p| &p.lang == code).collect(); self.render_feed(pages, Some(&PathBuf::from(code)), code, |c| c)?; start = log_time(start, "Generated feed in other language"); } @@ -783,12 +779,12 @@ impl Site { pub fn render_aliases(&self) -> Result<()> { ensure_directory_exists(&self.output_path)?; let library = self.library.read().unwrap(); - for (_, page) in library.pages() { + for (_, page) in &library.pages { for alias in &page.meta.aliases { self.render_alias(alias, &page.permalink)?; } } - for (_, section) in library.sections() { + for (_, section) in &library.sections { for alias in §ion.meta.aliases { self.render_alias(alias, §ion.permalink)?; } @@ -874,7 +870,7 @@ impl Site { if taxonomy.kind.feed { self.render_feed( - item.pages.iter().map(|p| library.get_page_by_key(*p)).collect(), + item.pages.iter().map(|p| library.pages.get(p).unwrap()).collect(), Some(&PathBuf::from(format!("{}/{}", taxonomy.slug, item.slug))), &taxonomy.lang, |mut context: Context| { @@ -1002,13 +998,13 @@ impl Site { if section.meta.generate_feed { let library = &self.library.read().unwrap(); - let pages = section.pages.iter().map(|k| library.get_page_by_key(*k)).collect(); + let pages = section.pages.iter().map(|k| library.pages.get(k).unwrap()).collect(); self.render_feed( pages, Some(&PathBuf::from(§ion.path[1..])), §ion.lang, |mut context: Context| { - context.insert("section", §ion.to_serialized(library)); + context.insert("section", §ion.serialize(library)); context }, )?; @@ -1031,7 +1027,7 @@ impl Site { section .pages .par_iter() - .map(|k| self.render_page(self.library.read().unwrap().get_page_by_key(*k))) + .map(|k| self.render_page(self.library.read().unwrap().pages.get(k).unwrap())) .collect::>()?; } @@ -1071,9 +1067,9 @@ impl Site { self.library .read() .unwrap() - .sections_values() - .into_par_iter() - .map(|s| self.render_section(s, true)) + .sections + .par_iter() + .map(|(_, s)| self.render_section(s, true)) .collect::>() } diff --git a/components/site/src/link_checking.rs b/components/site/src/link_checking.rs index f385cbd6..d9157b12 100644 --- a/components/site/src/link_checking.rs +++ b/components/site/src/link_checking.rs @@ -17,11 +17,11 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> { let library = site.library.write().expect("Get lock for check_internal_links_with_anchors"); // Chain all internal links, from both sections and pages. - let page_links = library.pages().values().flat_map(|p| { + let page_links = library.pages.values().flat_map(|p| { let path = &p.file.path; p.internal_links.iter().map(move |l| (path.clone(), l)) }); - let section_links = library.sections().values().flat_map(|p| { + let section_links = library.sections.values().flat_map(|p| { let path = &p.file.path; p.internal_links.iter().map(move |l| (path.clone(), l)) }); @@ -51,14 +51,14 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> { // as well as any other sring containing "_index." which is now referenced as // unsupported page path in the docs. if md_path.contains("_index.") { - let section = library.get_section(&full_path).expect(&format!( + let section = library.sections.get(&full_path).expect(&format!( "Couldn't find section {} in check_internal_links_with_anchors from page {:?}", md_path, page.strip_prefix(&site.base_path).unwrap() )); !section.has_anchor(anchor) } else { - let page = library.get_page(&full_path).expect(&format!( + let page = library.pages.get(&full_path).expect(&format!( "Couldn't find page {} in check_internal_links_with_anchors from page {:?}", md_path, page.strip_prefix(&site.base_path).unwrap() @@ -128,7 +128,7 @@ pub fn check_external_links(site: &Site) -> Result<()> { let mut checked_links: Vec = vec![]; let mut skipped_link_count: u32 = 0; - for p in library.pages_values().into_iter() { + for p in library.pages.values() { for external_link in p.clone().external_links.into_iter() { if should_skip_by_prefix(&external_link, &site.config.link_checker.skip_prefixes) { skipped_link_count += 1; @@ -139,7 +139,7 @@ pub fn check_external_links(site: &Site) -> Result<()> { } } - for s in library.sections_values().into_iter() { + for s in library.sections.values() { for external_link in s.clone().external_links.into_iter() { if should_skip_by_prefix(&external_link, &site.config.link_checker.skip_prefixes) { skipped_link_count += 1; diff --git a/components/site/src/sitemap.rs b/components/site/src/sitemap.rs index c637db66..db63a75a 100644 --- a/components/site/src/sitemap.rs +++ b/components/site/src/sitemap.rs @@ -5,7 +5,7 @@ use std::hash::{Hash, Hasher}; use serde::Serialize; use config::Config; -use library::{Library, Taxonomy}; +use content::{Library, Taxonomy}; use libs::tera::{Map, Value}; use std::cmp::Ordering; @@ -62,8 +62,8 @@ pub fn find_entries<'a>( config: &'a Config, ) -> Vec> { let pages = library - .pages_values() - .iter() + .pages + .values() .map(|p| { let mut entry = SitemapEntry::new( Cow::Borrowed(&p.permalink), @@ -75,8 +75,8 @@ pub fn find_entries<'a>( .collect::>(); let mut sections = library - .sections_values() - .iter() + .sections + .values() .filter(|s| s.meta.render) .map(|s| { let mut entry = SitemapEntry::new(Cow::Borrowed(&s.permalink), None); @@ -85,7 +85,7 @@ pub fn find_entries<'a>( }) .collect::>(); - for section in library.sections_values().iter() { + for section in library.sections.values() { if let Some(paginate_by) = section.paginate_by() { let number_pagers = (section.pages.len() as f64 / paginate_by as f64).ceil() as isize; for i in 1..=number_pagers { diff --git a/components/site/tests/common.rs b/components/site/tests/common.rs index 19297f4f..a90ae085 100644 --- a/components/site/tests/common.rs +++ b/components/site/tests/common.rs @@ -186,16 +186,16 @@ impl Translations { let library = site.library.clone(); let library = library.read().unwrap(); // WORKAROUND because site.content_path is private - let unified_path = if let Some(page) = - library.get_page(site.base_path.join("content").join(path)) - { - page.file.canonical.clone() - } else if let Some(section) = library.get_section(site.base_path.join("content").join(path)) - { - section.file.canonical.clone() - } else { - panic!("No such page or section: {}", path); - }; + let unified_path = + if let Some(page) = library.pages.get(&site.base_path.join("content").join(path)) { + page.file.canonical.clone() + } else if let Some(section) = + library.sections.get(&site.base_path.join("content").join(path)) + { + section.file.canonical.clone() + } else { + panic!("No such page or section: {}", path); + }; let translations = library.translations.get(&unified_path); if translations.is_none() { @@ -213,14 +213,14 @@ impl Translations { // Are we looking for a section? (no file extension here) if unified_path.ends_with("_index") { //library.get_section_by_key(*key).file.relative.to_string() - let section = library.get_section_by_key(*key); + let section = &library.sections[key]; Translation { lang: section.lang.clone(), permalink: section.permalink.clone(), path: section.file.path.to_str().unwrap().to_string(), } } else { - let page = library.get_page_by_key(*key); + let page = &library.pages[key]; Translation { lang: page.lang.clone(), permalink: page.permalink.clone(), diff --git a/components/site/tests/site.rs b/components/site/tests/site.rs index 69b838f4..bb9d8377 100644 --- a/components/site/tests/site.rs +++ b/components/site/tests/site.rs @@ -5,7 +5,7 @@ use std::env; use std::path::Path; use common::{build_site, build_site_with_setup}; -use config::Taxonomy; +use config::TaxonomyConfig; use site::sitemap; use site::Site; @@ -19,71 +19,67 @@ fn can_parse_site() { let library = site.library.read().unwrap(); // Correct number of pages (sections do not count as pages, draft are ignored) - assert_eq!(library.pages().len(), 33); + assert_eq!(library.pages.len(), 33); let posts_path = path.join("content").join("posts"); // Make sure the page with a url doesn't have any sections - let url_post = library.get_page(&posts_path.join("fixed-url.md")).unwrap(); + let url_post = library.pages.get(&posts_path.join("fixed-url.md")).unwrap(); assert_eq!(url_post.path, "/a-fixed-url/"); // Make sure the article in a folder with only asset doesn't get counted as a section let asset_folder_post = - library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap(); + library.pages.get(&posts_path.join("with-assets").join("index.md")).unwrap(); assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]); // That we have the right number of sections - assert_eq!(library.sections().len(), 12); + assert_eq!(library.sections.len(), 12); // And that the sections are correct - let index_section = library.get_section(&path.join("content").join("_index.md")).unwrap(); + let index_section = library.sections.get(&path.join("content").join("_index.md")).unwrap(); assert_eq!(index_section.subsections.len(), 5); assert_eq!(index_section.pages.len(), 3); assert!(index_section.ancestors.is_empty()); - let posts_section = library.get_section(&posts_path.join("_index.md")).unwrap(); + let posts_section = library.sections.get(&posts_path.join("_index.md")).unwrap(); assert_eq!(posts_section.subsections.len(), 2); assert_eq!(posts_section.pages.len(), 9); // 10 with 1 draft == 9 - assert_eq!( - posts_section.ancestors, - vec![*library.get_section_key(&index_section.file.path).unwrap()] - ); + assert_eq!(posts_section.ancestors, vec![index_section.file.relative.clone()]); // Make sure we remove all the pwd + content from the sections - let basic = library.get_page(&posts_path.join("simple.md")).unwrap(); + let basic = library.pages.get(&posts_path.join("simple.md")).unwrap(); assert_eq!(basic.file.components, vec!["posts".to_string()]); assert_eq!( basic.ancestors, - vec![ - *library.get_section_key(&index_section.file.path).unwrap(), - *library.get_section_key(&posts_section.file.path).unwrap(), - ] + vec![index_section.file.relative.clone(), posts_section.file.relative.clone(),] ); let tutorials_section = - library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap(); + library.sections.get(&posts_path.join("tutorials").join("_index.md")).unwrap(); assert_eq!(tutorials_section.subsections.len(), 2); - let sub1 = library.get_section_by_key(tutorials_section.subsections[0]); - let sub2 = library.get_section_by_key(tutorials_section.subsections[1]); + let sub1 = &library.sections[&tutorials_section.subsections[0]]; + let sub2 = &library.sections[&tutorials_section.subsections[1]]; assert_eq!(sub1.clone().meta.title.unwrap(), "Programming"); assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps"); assert_eq!(tutorials_section.pages.len(), 0); let devops_section = library - .get_section(&posts_path.join("tutorials").join("devops").join("_index.md")) + .sections + .get(&posts_path.join("tutorials").join("devops").join("_index.md")) .unwrap(); assert_eq!(devops_section.subsections.len(), 0); assert_eq!(devops_section.pages.len(), 2); assert_eq!( devops_section.ancestors, vec![ - *library.get_section_key(&index_section.file.path).unwrap(), - *library.get_section_key(&posts_section.file.path).unwrap(), - *library.get_section_key(&tutorials_section.file.path).unwrap(), + index_section.file.relative.clone(), + posts_section.file.relative.clone(), + tutorials_section.file.relative.clone(), ] ); let prog_section = library - .get_section(&posts_path.join("tutorials").join("programming").join("_index.md")) + .sections + .get(&posts_path.join("tutorials").join("programming").join("_index.md")) .unwrap(); assert_eq!(prog_section.subsections.len(), 0); assert_eq!(prog_section.pages.len(), 2); @@ -259,7 +255,7 @@ fn can_build_site_with_live_reload_and_drafts() { // drafted sections are included let library = site.library.read().unwrap(); - assert_eq!(library.sections().len(), 14); + assert_eq!(library.sections.len(), 14); assert!(file_exists!(public, "secret_section/index.html")); assert!(file_exists!(public, "secret_section/draft-page/index.html")); @@ -273,7 +269,7 @@ fn can_build_site_with_taxonomies() { site.load().unwrap(); { let mut library = site.library.write().unwrap(); - for (i, (_, page)) in library.pages_mut().iter_mut().enumerate() { + for (i, (_, page)) in library.pages.iter_mut().enumerate() { page.meta.taxonomies = { let mut taxonomies = HashMap::new(); taxonomies.insert( @@ -289,7 +285,7 @@ fn can_build_site_with_taxonomies() { }); assert!(&public.exists()); - assert_eq!(site.taxonomies.len(), 1); + assert_eq!(site.taxonomies.len(), 2); assert!(file_exists!(public, "index.html")); assert!(file_exists!(public, "sitemap.xml")); @@ -353,7 +349,7 @@ fn can_build_site_with_pagination_for_section() { site.load().unwrap(); { let mut library = site.library.write().unwrap(); - for (_, section) in library.sections_mut() { + for (_, section) in library.sections.iter_mut() { if section.is_index() { continue; } @@ -481,7 +477,8 @@ fn can_build_site_with_pagination_for_index() { let mut library = site.library.write().unwrap(); { let index = library - .get_section_mut(&site.base_path.join("content").join("_index.md")) + .sections + .get_mut(&site.base_path.join("content").join("_index.md")) .unwrap(); index.meta.paginate_by = Some(2); index.meta.template = Some("index_paginated.html".to_string()); @@ -544,7 +541,7 @@ fn can_build_site_with_pagination_for_index() { #[test] fn can_build_site_with_pagination_for_taxonomy() { let (_, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| { - site.config.taxonomies.push(Taxonomy { + site.config.languages.get_mut("en").unwrap().taxonomies.push(TaxonomyConfig { name: "tags".to_string(), paginate_by: Some(2), paginate_path: None, @@ -554,7 +551,7 @@ fn can_build_site_with_pagination_for_taxonomy() { { let mut library = site.library.write().unwrap(); - for (i, (_, page)) in library.pages_mut().iter_mut().enumerate() { + for (i, (_, page)) in library.pages.iter_mut().enumerate() { page.meta.taxonomies = { let mut taxonomies = HashMap::new(); taxonomies.insert( @@ -678,35 +675,35 @@ fn can_apply_page_templates() { let template_path = path.join("content").join("applying_page_template"); let library = site.library.read().unwrap(); - let template_section = library.get_section(&template_path.join("_index.md")).unwrap(); + let template_section = library.sections.get(&template_path.join("_index.md")).unwrap(); assert_eq!(template_section.subsections.len(), 2); assert_eq!(template_section.pages.len(), 2); - let from_section_config = library.get_page_by_key(template_section.pages[0]); + let from_section_config = &library.pages[&template_section.pages[0]]; assert_eq!(from_section_config.meta.template, Some("page_template.html".into())); assert_eq!(from_section_config.meta.title, Some("From section config".into())); - let override_page_template = library.get_page_by_key(template_section.pages[1]); + let override_page_template = &library.pages[&template_section.pages[1]]; assert_eq!(override_page_template.meta.template, Some("page_template_override.html".into())); assert_eq!(override_page_template.meta.title, Some("Override".into())); // It should have applied recursively as well let another_section = - library.get_section(&template_path.join("another_section").join("_index.md")).unwrap(); + library.sections.get(&template_path.join("another_section").join("_index.md")).unwrap(); assert_eq!(another_section.subsections.len(), 0); assert_eq!(another_section.pages.len(), 1); - let changed_recursively = library.get_page_by_key(another_section.pages[0]); + let changed_recursively = &library.pages[&another_section.pages[0]]; assert_eq!(changed_recursively.meta.template, Some("page_template.html".into())); assert_eq!(changed_recursively.meta.title, Some("Changed recursively".into())); // But it should not have override a children page_template let yet_another_section = - library.get_section(&template_path.join("yet_another_section").join("_index.md")).unwrap(); + library.sections.get(&template_path.join("yet_another_section").join("_index.md")).unwrap(); assert_eq!(yet_another_section.subsections.len(), 0); assert_eq!(yet_another_section.pages.len(), 1); - let child = library.get_page_by_key(yet_another_section.pages[0]); + let child = &library.pages[&yet_another_section.pages[0]]; assert_eq!(child.meta.template, Some("page_template_child.html".into())); assert_eq!(child.meta.title, Some("Local section override".into())); } @@ -767,7 +764,7 @@ fn can_get_hash_for_static_files() { } #[test] -fn check_site() { +fn can_check_site() { let (mut site, _tmp_dir, _public) = build_site("test_site"); assert_eq!( diff --git a/components/site/tests/site_i18n.rs b/components/site/tests/site_i18n.rs index c57b6cc5..088ee4a3 100644 --- a/components/site/tests/site_i18n.rs +++ b/components/site/tests/site_i18n.rs @@ -14,43 +14,38 @@ fn can_parse_multilingual_site() { site.load().unwrap(); let library = site.library.read().unwrap(); - assert_eq!(library.pages().len(), 11); - assert_eq!(library.sections().len(), 6); + assert_eq!(library.pages.len(), 11); + assert_eq!(library.sections.len(), 6); // default index sections let default_index_section = - library.get_section(&path.join("content").join("_index.md")).unwrap(); + library.sections.get(&path.join("content").join("_index.md")).unwrap(); assert_eq!(default_index_section.pages.len(), 1); assert!(default_index_section.ancestors.is_empty()); - let fr_index_section = library.get_section(&path.join("content").join("_index.fr.md")).unwrap(); + let fr_index_section = + library.sections.get(&path.join("content").join("_index.fr.md")).unwrap(); assert_eq!(fr_index_section.pages.len(), 1); assert!(fr_index_section.ancestors.is_empty()); // blog sections get only their own language pages let blog_path = path.join("content").join("blog"); - let default_blog = library.get_section(&blog_path.join("_index.md")).unwrap(); + let default_blog = library.sections.get(&blog_path.join("_index.md")).unwrap(); assert_eq!(default_blog.subsections.len(), 0); assert_eq!(default_blog.pages.len(), 4); - assert_eq!( - default_blog.ancestors, - vec![*library.get_section_key(&default_index_section.file.path).unwrap()] - ); + assert_eq!(default_blog.ancestors, vec![default_index_section.file.relative.clone()]); for key in &default_blog.pages { - let page = library.get_page_by_key(*key); + let page = &library.pages[key]; assert_eq!(page.lang, "en"); } - let fr_blog = library.get_section(&blog_path.join("_index.fr.md")).unwrap(); + let fr_blog = library.sections.get(&blog_path.join("_index.fr.md")).unwrap(); assert_eq!(fr_blog.subsections.len(), 0); - assert_eq!(fr_blog.pages.len(), 3); - assert_eq!( - fr_blog.ancestors, - vec![*library.get_section_key(&fr_index_section.file.path).unwrap()] - ); + assert_eq!(fr_blog.pages.len(), 4); + assert_eq!(fr_blog.ancestors, vec![fr_index_section.file.relative.clone()]); for key in &fr_blog.pages { - let page = library.get_page_by_key(*key); + let page = &library.pages[key]; assert_eq!(page.lang, "fr"); } } @@ -188,6 +183,7 @@ fn correct_translations_on_all_pages() { let link = format!("{}index.html", link); // Ensure every permalink has produced a HTML page + println!("{:?}", link); assert!(ensure_output_exists(&public, &site.config.base_url, &link)); // Ensure translations expected here match with those in the library diff --git a/components/templates/Cargo.toml b/components/templates/Cargo.toml index 53925b99..6baf0eb3 100644 --- a/components/templates/Cargo.toml +++ b/components/templates/Cargo.toml @@ -8,10 +8,10 @@ serde = {version = "1.0", features = ["derive"] } errors = { path = "../errors" } utils = { path = "../utils" } -library = { path = "../library" } +content = { path = "../content" } config = { path = "../config" } imageproc = { path = "../imageproc" } -rendering = { path = "../rendering" } +markdown = { path = "../markdown" } libs = { path = "../libs" } diff --git a/components/templates/src/filters.rs b/components/templates/src/filters.rs index eaab13cf..39115b2a 100644 --- a/components/templates/src/filters.rs +++ b/components/templates/src/filters.rs @@ -9,7 +9,7 @@ use libs::tera::{ to_value, try_get_value, Error as TeraError, Filter as TeraFilter, Result as TeraResult, Tera, Value, }; -use rendering::{render_content, RenderContext}; +use markdown::{render_content, RenderContext}; use crate::load_tera; diff --git a/components/templates/src/global_fns/content.rs b/components/templates/src/global_fns/content.rs index dfe0035a..c082dff1 100644 --- a/components/templates/src/global_fns/content.rs +++ b/components/templates/src/global_fns/content.rs @@ -1,4 +1,4 @@ -use library::{Library, Taxonomy}; +use content::{Library, Taxonomy}; use libs::tera::{from_value, to_value, Function as TeraFn, Result, Value}; use std::collections::HashMap; use std::path::PathBuf; @@ -90,8 +90,8 @@ impl TeraFn for GetPage { ); let full_path = self.base_path.join(&path); let library = self.library.read().unwrap(); - match library.get_page(&full_path) { - Some(p) => Ok(to_value(p.to_serialized(&library)).unwrap()), + match library.pages.get(&full_path) { + Some(p) => Ok(to_value(p.serialize(&library)).unwrap()), None => Err(format!("Page `{}` not found.", path).into()), } } @@ -122,12 +122,12 @@ impl TeraFn for GetSection { let full_path = self.base_path.join(&path); let library = self.library.read().unwrap(); - match library.get_section(&full_path) { + match library.sections.get(&full_path) { Some(s) => { if metadata_only { - Ok(to_value(s.to_serialized_basic(&library)).unwrap()) + Ok(to_value(s.serialize_basic(&library)).unwrap()) } else { - Ok(to_value(s.to_serialized(&library)).unwrap()) + Ok(to_value(s.serialize(&library)).unwrap()) } } None => Err(format!("Section `{}` not found.", path).into()), @@ -185,8 +185,8 @@ impl TeraFn for GetTaxonomy { #[cfg(test)] mod tests { use super::*; - use config::{Config, Taxonomy as TaxonomyConfig}; - use library::TaxonomyItem; + use config::{Config, TaxonomyConfig}; + use content::TaxonomyItem; #[test] fn can_get_taxonomy() { @@ -195,23 +195,9 @@ mod tests { let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; let taxo_config_fr = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; - let library = Arc::new(RwLock::new(Library::new(0, 0, false))); - let tag = TaxonomyItem::new( - "Programming", - &config.default_language, - "tags", - &config, - vec![], - &library.read().unwrap(), - ); - let tag_fr = TaxonomyItem::new( - "Programmation", - "fr", - "tags", - &config, - vec![], - &library.read().unwrap(), - ); + let library = Arc::new(RwLock::new(Library::new())); + let tag = TaxonomyItem::new("Programming", &config.default_language, "tags", &[], &config); + let tag_fr = TaxonomyItem::new("Programmation", "fr", "tags", &[], &config); let tags = Taxonomy { kind: taxo_config, lang: config.default_language.clone(), @@ -279,16 +265,8 @@ mod tests { let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; let taxo_config_fr = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; - let library = Library::new(0, 0, false); - let tag = TaxonomyItem::new( - "Programming", - &config.default_language, - "tags", - &config, - vec![], - &library, - ); - let tag_fr = TaxonomyItem::new("Programmation", "fr", "tags", &config, vec![], &library); + let tag = TaxonomyItem::new("Programming", &config.default_language, "tags", &[], &config); + let tag_fr = TaxonomyItem::new("Programmation", "fr", "tags", &[], &config); let tags = Taxonomy { kind: taxo_config, lang: config.default_language.clone(), diff --git a/components/utils/src/lib.rs b/components/utils/src/lib.rs index 2a8162d0..44f24e5a 100644 --- a/components/utils/src/lib.rs +++ b/components/utils/src/lib.rs @@ -5,5 +5,7 @@ pub mod minify; pub mod net; pub mod site; pub mod slugs; +pub mod table_of_contents; pub mod templates; +pub mod types; pub mod vec; diff --git a/components/utils/src/minify.rs b/components/utils/src/minify.rs index 5cfde316..46a48f48 100644 --- a/components/utils/src/minify.rs +++ b/components/utils/src/minify.rs @@ -1,6 +1,8 @@ use errors::{bail, Result}; use libs::minify_html::{minify, Cfg}; +// TODO: move to site + pub fn html(html: String) -> Result { let mut cfg = Cfg::spec_compliant(); cfg.keep_html_and_head_opening_tags = true; diff --git a/components/utils/src/site.rs b/components/utils/src/site.rs index 0d57b858..26d4cad4 100644 --- a/components/utils/src/site.rs +++ b/components/utils/src/site.rs @@ -5,6 +5,7 @@ use std::hash::BuildHasher; use errors::{anyhow, Result}; +// TODO: move to content /// Get word count and estimated reading time pub fn get_reading_analytics(content: &str) -> (usize, usize) { let word_count: usize = content.unicode_words().count(); @@ -22,7 +23,7 @@ pub struct ResolvedInternalLink { /// Internal path to the .md file, without the leading `@/`. pub md_path: String, /// Optional anchor target. - /// We can check whether it exists only after all the markdown rendering is done. + /// We can check whether it exists only after all the markdown markdown is done. pub anchor: Option, } diff --git a/components/rendering/src/table_of_contents.rs b/components/utils/src/table_of_contents.rs similarity index 100% rename from components/rendering/src/table_of_contents.rs rename to components/utils/src/table_of_contents.rs diff --git a/components/utils/src/types.rs b/components/utils/src/types.rs new file mode 100644 index 00000000..3feb9576 --- /dev/null +++ b/components/utils/src/types.rs @@ -0,0 +1,9 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum InsertAnchor { + Left, + Right, + None, +} diff --git a/src/console.rs b/src/console.rs index ba389644..a4729de4 100644 --- a/src/console.rs +++ b/src/console.rs @@ -56,9 +56,9 @@ pub fn notify_site_size(site: &Site) { let library = site.library.read().unwrap(); println!( "-> Creating {} pages ({} orphan) and {} sections", - library.pages().len(), + library.pages.len(), library.get_all_orphan_pages().len(), - library.sections().len() - 1, // -1 since we do not count the index as a section there + library.sections.len() - 1, // -1 since we do not count the index as a section there ); } @@ -68,9 +68,9 @@ pub fn check_site_summary(site: &Site) { let orphans = library.get_all_orphan_pages(); println!( "-> Site content: {} pages ({} orphan), {} sections", - library.pages().len(), + library.pages.len(), orphans.len(), - library.sections().len() - 1, // -1 since we do not count the index as a section there + library.sections.len() - 1, // -1 since we do not count the index as a section there ); for orphan in orphans { @@ -82,9 +82,9 @@ pub fn check_site_summary(site: &Site) { pub fn warn_about_ignored_pages(site: &Site) { let library = site.library.read().unwrap(); let ignored_pages: Vec<_> = library - .sections_values() - .iter() - .flat_map(|s| s.ignored_pages.iter().map(|k| library.get_page_by_key(*k).file.path.clone())) + .sections + .values() + .flat_map(|s| s.ignored_pages.iter().map(|k| library.pages[k].file.path.clone())) .collect(); if !ignored_pages.is_empty() { diff --git a/test_site_i18n/content/blog/not-translated-in-default-language.fr.md b/test_site_i18n/content/blog/not-translated-in-default-language.fr.md index a918561c..7d15a08f 100644 --- a/test_site_i18n/content/blog/not-translated-in-default-language.fr.md +++ b/test_site_i18n/content/blog/not-translated-in-default-language.fr.md @@ -1,5 +1,6 @@ +++ title = "Ma page que en français" +date = 2018-08-10 +++ Cette page n'est pas traduite dans la langue par défaut (anglais).