33
CHANGELOG.md
@ -1,5 +1,38 @@
|
||||
# Changelog
|
||||
|
||||
## 0.16.0 (unreleased)
|
||||
|
||||
### Breaking
|
||||
|
||||
- Switch to pulldown-cmark anchor system rather than ours, some (very niche) edge cases are not supported anymore, you can
|
||||
also specify classes on headers now
|
||||
- Now outputs empty taxonomies instead of ignoring them
|
||||
- Unify all pages sorting variable names in templates to `lower`/`higher` in order to make it easy to re-use templates and it
|
||||
was becoming hard to come up with names to be honest
|
||||
|
||||
### Other
|
||||
- Fix markup for fenced code with linenos
|
||||
- Make `ignored_content` work with nested paths and directories
|
||||
- `zola serve/build` can now run from anywhere in a zola directory
|
||||
- Add XML support to `load_data`
|
||||
- Add YAML support to `load_data`
|
||||
- `skip_prefixes` is now checked before parsing external link URLs
|
||||
- Add `render` attribute to taxonomies configuration in `config.toml`, for when you don't want to render
|
||||
any pages related to that taxonomy
|
||||
- Serialize `transparent` field from front-matter of sections
|
||||
- Use Zola Tera instance for markdown filter: this means you have access to the same Tera functions as in shortcodes
|
||||
- Ignore sections with `render=false` when looking for path collisions
|
||||
- Add support for backlinks
|
||||
- Add a warning mode for internal/external link checking in case you don't want zola to stop the build on invalid links
|
||||
- Always follow symlinks when loading the site/assets
|
||||
- Add `rel="alternate"` to Atom post links
|
||||
- Fix taxonomy `current_path`
|
||||
- Fix feed location for taxonomies not in the default language
|
||||
- Add `title_bytes` sorting method
|
||||
- Add `insert_anchor = "heading"`, which allows users to use the entire heading as a link
|
||||
- Apply orientation transformation based on EXIF data
|
||||
- Fix generated homepages not having their `translations` filled properly
|
||||
|
||||
## 0.15.3 (2022-01-23)
|
||||
|
||||
- Fix shortcodes not being rendered in code blocks
|
||||
|
1825
Cargo.lock
generated
54
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "zola"
|
||||
version = "0.15.3"
|
||||
version = "0.16.0"
|
||||
authors = ["Vincent Prouillet <hello@vincentprouillet.com>"]
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
@ -13,62 +13,50 @@ keywords = ["static", "site", "generator", "blog"]
|
||||
include = ["src/**/*", "LICENSE", "README.md"]
|
||||
|
||||
[build-dependencies]
|
||||
clap = "2"
|
||||
clap = "3"
|
||||
clap_complete = "3"
|
||||
winres = "0.1"
|
||||
time = "0.3"
|
||||
|
||||
[[bin]]
|
||||
name = "zola"
|
||||
|
||||
[dependencies]
|
||||
atty = "0.2.11"
|
||||
clap = { version = "2", default-features = false }
|
||||
chrono = "0.4"
|
||||
lazy_static = "1.1"
|
||||
termcolor = "1.0.4"
|
||||
# Used in init to ensure the url given as base_url is a valid one
|
||||
url = "2"
|
||||
clap = { version = "3", features = ["derive"] }
|
||||
# Below is for the serve cmd
|
||||
hyper = { version = "0.14.1", default-features = false, features = ["runtime", "server", "http2", "http1"] }
|
||||
tokio = { version = "1.0.1", default-features = false, features = ["rt", "fs", "time"] }
|
||||
percent-encoding = "2"
|
||||
time = { version = "0.3", features = ["formatting", "macros", "local-offset"] }
|
||||
notify = "4"
|
||||
ws = "0.9"
|
||||
ctrlc = "3"
|
||||
open = "2"
|
||||
globset = "0.4"
|
||||
relative-path = "1"
|
||||
open = "3"
|
||||
pathdiff = "0.2"
|
||||
serde_json = "1.0"
|
||||
# For mimetype detection in serve mode
|
||||
mime_guess = "2.0"
|
||||
# For essence_str() function, see https://github.com/getzola/zola/issues/1845
|
||||
mime = "0.3.16"
|
||||
|
||||
|
||||
site = { path = "components/site" }
|
||||
errors = { path = "components/errors" }
|
||||
front_matter = { path = "components/front_matter" }
|
||||
console = { path = "components/console" }
|
||||
utils = { path = "components/utils" }
|
||||
search = { path = "components/search" }
|
||||
libs = { path = "components/libs" }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
same-file = "1"
|
||||
|
||||
[features]
|
||||
default = ["rust-tls"]
|
||||
rust-tls = ["site/rust-tls"]
|
||||
native-tls = ["site/native-tls"]
|
||||
rust-tls = ["libs/rust-tls"]
|
||||
native-tls = ["libs/native-tls"]
|
||||
indexing-zh = ["libs/indexing-zh"]
|
||||
indexing-ja = ["libs/indexing-ja"]
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"components/config",
|
||||
"components/errors",
|
||||
"components/front_matter",
|
||||
"components/rendering",
|
||||
"components/site",
|
||||
"components/templates",
|
||||
"components/utils",
|
||||
"components/search",
|
||||
"components/imageproc",
|
||||
"components/link_checker",
|
||||
"components/library",
|
||||
]
|
||||
members = ["components/*"]
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
@ -78,3 +66,7 @@ codegen-units = 1
|
||||
# Disabling debug info speeds up builds a bunch,
|
||||
# and we don't rely on it for debugging that much.
|
||||
debug = 0
|
||||
|
||||
[package.metadata.winres]
|
||||
OriginalFilename = "zola.exe"
|
||||
InternalName = "zola"
|
||||
|
@ -11,7 +11,7 @@ stages:
|
||||
strategy:
|
||||
matrix:
|
||||
windows-stable:
|
||||
imageName: 'vs2017-win2016'
|
||||
imageName: 'windows-2019'
|
||||
rustup_toolchain: stable
|
||||
mac-stable:
|
||||
imageName: 'macos-11'
|
||||
@ -21,7 +21,7 @@ stages:
|
||||
rustup_toolchain: stable
|
||||
linux-pinned:
|
||||
imageName: 'ubuntu-20.04'
|
||||
rustup_toolchain: 1.53.0
|
||||
rustup_toolchain: 1.57.0
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
|
23
build.rs
@ -2,6 +2,21 @@
|
||||
|
||||
include!("src/cli.rs");
|
||||
|
||||
fn generate_pe_header() {
|
||||
use time::OffsetDateTime;
|
||||
|
||||
let today = OffsetDateTime::now_utc();
|
||||
let copyright = format!("Copyright © 2017-{} Vincent Prouillet", today.year());
|
||||
let mut res = winres::WindowsResource::new();
|
||||
// needed for MinGW cross-compiling
|
||||
if cfg!(unix) {
|
||||
res.set_windres_path("x86_64-w64-mingw32-windres");
|
||||
}
|
||||
res.set_icon("docs/static/favicon.ico");
|
||||
res.set("LegalCopyright", ©right);
|
||||
res.compile().expect("Failed to compile Windows resources!");
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// disabled below as it fails in CI
|
||||
// let mut app = build_cli();
|
||||
@ -9,4 +24,12 @@ fn main() {
|
||||
// app.gen_completions("zola", Shell::Fish, "completions/");
|
||||
// app.gen_completions("zola", Shell::Zsh, "completions/");
|
||||
// app.gen_completions("zola", Shell::PowerShell, "completions/");
|
||||
if std::env::var("CARGO_CFG_TARGET_OS").unwrap() != "windows"
|
||||
&& std::env::var("PROFILE").unwrap() != "release"
|
||||
{
|
||||
return;
|
||||
}
|
||||
if cfg!(windows) {
|
||||
generate_pe_header();
|
||||
}
|
||||
}
|
||||
|
@ -1,19 +1,12 @@
|
||||
[package]
|
||||
name = "config"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
include = ["src/**/*"]
|
||||
|
||||
[dependencies]
|
||||
toml = "0.5"
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
chrono = "0.4"
|
||||
globset = "0.4"
|
||||
lazy_static = "1"
|
||||
syntect = "4"
|
||||
unic-langid = "0.9"
|
||||
serde = {version = "1.0", features = ["derive"] }
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
libs = { path = "../libs" }
|
||||
|
@ -3,14 +3,14 @@
|
||||
//! Although it is a valid example for serializing syntaxes, you probably won't need
|
||||
//! to do this yourself unless you want to cache your own compiled grammars.
|
||||
|
||||
use libs::syntect::dumps::*;
|
||||
use libs::syntect::highlighting::ThemeSet;
|
||||
use libs::syntect::parsing::SyntaxSetBuilder;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::iter::FromIterator;
|
||||
use std::path::Path;
|
||||
use syntect::dumps::*;
|
||||
use syntect::highlighting::ThemeSet;
|
||||
use syntect::parsing::SyntaxSetBuilder;
|
||||
|
||||
fn usage_and_exit() -> ! {
|
||||
println!("USAGE: cargo run --example generate_sublime synpack source-dir newlines.packdump nonewlines.packdump\n
|
||||
|
@ -1,13 +1,13 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use errors::{bail, Result};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use unic_langid::LanguageIdentifier;
|
||||
use libs::unic_langid::LanguageIdentifier;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::config::search;
|
||||
use crate::config::taxonomies;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
#[serde(default)]
|
||||
pub struct LanguageOptions {
|
||||
/// Title of the site. Defaults to None
|
||||
@ -19,33 +19,17 @@ pub struct LanguageOptions {
|
||||
/// The filename to use for feeds. Used to find the template, too.
|
||||
/// Defaults to "atom.xml", with "rss.xml" also having a template provided out of the box.
|
||||
pub feed_filename: String,
|
||||
pub taxonomies: Vec<taxonomies::Taxonomy>,
|
||||
pub taxonomies: Vec<taxonomies::TaxonomyConfig>,
|
||||
/// Whether to generate search index for that language, defaults to `false`
|
||||
pub build_search_index: bool,
|
||||
/// The search config, telling what to include in the search index for that language
|
||||
pub search: search::Search,
|
||||
/// A toml crate `Table` with String key representing term and value
|
||||
/// another `String` representing its translation.
|
||||
///
|
||||
/// Use `get_translation()` method for translating key into different languages.
|
||||
pub translations: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl Default for LanguageOptions {
|
||||
fn default() -> Self {
|
||||
LanguageOptions {
|
||||
title: None,
|
||||
description: None,
|
||||
generate_feed: false,
|
||||
feed_filename: String::new(),
|
||||
build_search_index: false,
|
||||
taxonomies: Vec::new(),
|
||||
search: search::Search::default(),
|
||||
translations: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// We want to ensure the language codes are valid ones
|
||||
pub fn validate_code(code: &str) -> Result<()> {
|
||||
if LanguageIdentifier::from_bytes(code.as_bytes()).is_err() {
|
||||
|
@ -1,4 +1,18 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum LinkCheckerLevel {
|
||||
#[serde(rename = "error")]
|
||||
Error,
|
||||
#[serde(rename = "warn")]
|
||||
Warn,
|
||||
}
|
||||
|
||||
impl Default for LinkCheckerLevel {
|
||||
fn default() -> Self {
|
||||
Self::Error
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
@ -7,5 +21,8 @@ pub struct LinkChecker {
|
||||
pub skip_prefixes: Vec<String>,
|
||||
/// Skip anchor checking for these URL prefixes
|
||||
pub skip_anchor_prefixes: Vec<String>,
|
||||
/// Emit either "error" or "warn" for broken internal links (including anchor links).
|
||||
pub internal_level: LinkCheckerLevel,
|
||||
/// Emit either "error" or "warn" for broken external links (including anchor links).
|
||||
pub external_level: LinkCheckerLevel,
|
||||
}
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use syntect::{
|
||||
use libs::syntect::{
|
||||
highlighting::{Theme, ThemeSet},
|
||||
html::css_for_theme_with_class_style,
|
||||
parsing::{SyntaxSet, SyntaxSetBuilder},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use errors::{bail, Result};
|
||||
|
||||
@ -13,7 +13,7 @@ use crate::highlighting::{CLASS_STYLE, THEME_SET};
|
||||
|
||||
pub const DEFAULT_HIGHLIGHT_THEME: &str = "base16-ocean-dark";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
#[serde(default)]
|
||||
pub struct ThemeCss {
|
||||
/// Which theme are we generating the CSS from
|
||||
@ -22,12 +22,6 @@ pub struct ThemeCss {
|
||||
pub filename: String,
|
||||
}
|
||||
|
||||
impl Default for ThemeCss {
|
||||
fn default() -> ThemeCss {
|
||||
ThemeCss { theme: String::new(), filename: String::new() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Markdown {
|
||||
@ -66,16 +60,16 @@ impl Markdown {
|
||||
if self.highlight_theme == "css" {
|
||||
None
|
||||
} else {
|
||||
Some(self.get_highlight_theme_by_name(&self.highlight_theme))
|
||||
self.get_highlight_theme_by_name(&self.highlight_theme)
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets an arbitrary theme from the THEME_SET or the extra_theme_set
|
||||
pub fn get_highlight_theme_by_name(&self, theme_name: &str) -> &Theme {
|
||||
pub fn get_highlight_theme_by_name(&self, theme_name: &str) -> Option<&Theme> {
|
||||
(*self.extra_theme_set)
|
||||
.as_ref()
|
||||
.and_then(|ts| ts.themes.get(theme_name))
|
||||
.unwrap_or_else(|| &THEME_SET.themes[theme_name])
|
||||
.or_else(|| THEME_SET.themes.get(theme_name))
|
||||
}
|
||||
|
||||
/// Attempt to load any extra syntaxes and themes found in the extra_syntaxes_and_themes folders
|
||||
@ -101,9 +95,13 @@ impl Markdown {
|
||||
))
|
||||
}
|
||||
|
||||
pub fn export_theme_css(&self, theme_name: &str) -> String {
|
||||
let theme = self.get_highlight_theme_by_name(theme_name);
|
||||
css_for_theme_with_class_style(theme, CLASS_STYLE)
|
||||
pub fn export_theme_css(&self, theme_name: &str) -> Result<String> {
|
||||
if let Some(theme) = self.get_highlight_theme_by_name(theme_name) {
|
||||
Ok(css_for_theme_with_class_style(theme, CLASS_STYLE)
|
||||
.expect("the function can't even error?"))
|
||||
} else {
|
||||
bail!("Theme {} not found", theme_name)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_extra_syntaxes_and_highlight_themes(&mut self, path: &Path) -> Result<()> {
|
||||
@ -114,14 +112,14 @@ impl Markdown {
|
||||
self.extra_syntax_set = Some(extra_syntax_set);
|
||||
}
|
||||
|
||||
if self.highlight_theme == "css" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(extra_theme_set) = loaded_extra_highlight_themes {
|
||||
self.extra_theme_set = Arc::new(Some(extra_theme_set));
|
||||
}
|
||||
|
||||
if self.highlight_theme == "css" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Validate that the chosen highlight_theme exists in the loaded highlight theme sets
|
||||
if !THEME_SET.themes.contains_key(&self.highlight_theme) {
|
||||
if let Some(extra) = &*self.extra_theme_set {
|
||||
|
@ -8,13 +8,14 @@ pub mod taxonomies;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use toml::Value as Toml;
|
||||
use libs::globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use libs::toml::Value as Toml;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::theme::Theme;
|
||||
use errors::{bail, Error, Result};
|
||||
use errors::{anyhow, bail, Result};
|
||||
use utils::fs::read_file;
|
||||
use utils::slugs::slugify_paths;
|
||||
|
||||
// We want a default base url for tests
|
||||
static DEFAULT_BASE_URL: &str = "http://a-website.com";
|
||||
@ -55,8 +56,7 @@ pub struct Config {
|
||||
pub feed_filename: String,
|
||||
/// If set, files from static/ will be hardlinked instead of copied to the output dir.
|
||||
pub hard_link_static: bool,
|
||||
|
||||
pub taxonomies: Vec<taxonomies::Taxonomy>,
|
||||
pub taxonomies: Vec<taxonomies::TaxonomyConfig>,
|
||||
|
||||
/// Whether to compile the `sass` directory and output the css files into the static folder
|
||||
pub compile_sass: bool,
|
||||
@ -99,7 +99,7 @@ pub struct SerializedConfig<'a> {
|
||||
default_language: &'a str,
|
||||
generate_feed: bool,
|
||||
feed_filename: &'a str,
|
||||
taxonomies: &'a [taxonomies::Taxonomy],
|
||||
taxonomies: &'a [taxonomies::TaxonomyConfig],
|
||||
build_search_index: bool,
|
||||
extra: &'a HashMap<String, Toml>,
|
||||
}
|
||||
@ -109,7 +109,7 @@ impl Config {
|
||||
/// Parses a string containing TOML to our Config struct
|
||||
/// Any extra parameter will end up in the extra field
|
||||
pub fn parse(content: &str) -> Result<Config> {
|
||||
let mut config: Config = match toml::from_str(content) {
|
||||
let mut config: Config = match libs::toml::from_str(content) {
|
||||
Ok(c) => c,
|
||||
Err(e) => bail!(e),
|
||||
};
|
||||
@ -124,6 +124,7 @@ impl Config {
|
||||
}
|
||||
|
||||
config.add_default_language();
|
||||
config.slugify_taxonomies();
|
||||
|
||||
if !config.ignored_content.is_empty() {
|
||||
// Convert the file glob strings into a compiled glob set matcher. We want to do this once,
|
||||
@ -149,19 +150,19 @@ impl Config {
|
||||
pub fn default_for_test() -> Self {
|
||||
let mut config = Config::default();
|
||||
config.add_default_language();
|
||||
config.slugify_taxonomies();
|
||||
config
|
||||
}
|
||||
|
||||
/// Parses a config file from the given path
|
||||
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Config> {
|
||||
let path = path.as_ref();
|
||||
let content =
|
||||
read_file(path).map_err(|e| errors::Error::chain("Failed to load config", e))?;
|
||||
let content = read_file(path)?;
|
||||
|
||||
let mut config = Config::parse(&content)?;
|
||||
let config_dir = path
|
||||
.parent()
|
||||
.ok_or_else(|| Error::msg("Failed to find directory containing the config file."))?;
|
||||
.ok_or_else(|| anyhow!("Failed to find directory containing the config file."))?;
|
||||
|
||||
// this is the step at which missing extra syntax and highlighting themes are raised as errors
|
||||
config.markdown.init_extra_syntaxes_and_highlight_themes(config_dir)?;
|
||||
@ -169,6 +170,14 @@ impl Config {
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
pub fn slugify_taxonomies(&mut self) {
|
||||
for (_, lang_options) in self.languages.iter_mut() {
|
||||
for tax_def in lang_options.taxonomies.iter_mut() {
|
||||
tax_def.slug = slugify_paths(&tax_def.name, self.slugify.taxonomies);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Makes a url, taking into account that the base url might have a trailing slash
|
||||
pub fn make_permalink(&self, path: &str) -> String {
|
||||
let trailing_bit =
|
||||
@ -247,6 +256,10 @@ impl Config {
|
||||
others
|
||||
}
|
||||
|
||||
pub fn other_languages_codes(&self) -> Vec<&str> {
|
||||
self.languages.keys().filter(|k| *k != &self.default_language).map(|k| k.as_str()).collect()
|
||||
}
|
||||
|
||||
/// Is this site using i18n?
|
||||
pub fn is_multilingual(&self) -> bool {
|
||||
!self.other_languages().is_empty()
|
||||
@ -272,10 +285,7 @@ impl Config {
|
||||
.translations
|
||||
.get(key)
|
||||
.ok_or_else(|| {
|
||||
Error::msg(format!(
|
||||
"Translation key '{}' for language '{}' is missing",
|
||||
key, lang
|
||||
))
|
||||
anyhow!("Translation key '{}' for language '{}' is missing", key, lang)
|
||||
})
|
||||
.map(|term| term.to_string())
|
||||
} else {
|
||||
@ -283,6 +293,14 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_taxonomy(&self, name: &str, lang: &str) -> bool {
|
||||
if let Some(lang_options) = self.languages.get(lang) {
|
||||
lang_options.taxonomies.iter().any(|t| t.name == name)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize(&self, lang: &str) -> SerializedConfig {
|
||||
let options = &self.languages[lang];
|
||||
|
||||
@ -325,7 +343,7 @@ pub fn merge(into: &mut Toml, from: &Toml) -> Result<()> {
|
||||
}
|
||||
_ => {
|
||||
// Trying to merge a table with something else
|
||||
Err(Error::msg(&format!("Cannot merge config.toml with theme.toml because the following values have incompatibles types:\n- {}\n - {}", into, from)))
|
||||
Err(anyhow!("Cannot merge config.toml with theme.toml because the following values have incompatibles types:\n- {}\n - {}", into, from))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -561,21 +579,28 @@ ignored_content = []
|
||||
let config_str = r#"
|
||||
title = "My site"
|
||||
base_url = "example.com"
|
||||
ignored_content = ["*.{graphml,iso}", "*.py?"]
|
||||
ignored_content = ["*.{graphml,iso}", "*.py?", "**/{target,temp_folder}"]
|
||||
"#;
|
||||
|
||||
let config = Config::parse(config_str).unwrap();
|
||||
let v = config.ignored_content;
|
||||
assert_eq!(v, vec!["*.{graphml,iso}", "*.py?"]);
|
||||
assert_eq!(v, vec!["*.{graphml,iso}", "*.py?", "**/{target,temp_folder}"]);
|
||||
|
||||
let g = config.ignored_content_globset.unwrap();
|
||||
assert_eq!(g.len(), 2);
|
||||
assert_eq!(g.len(), 3);
|
||||
assert!(g.is_match("foo.graphml"));
|
||||
assert!(g.is_match("foo/bar/foo.graphml"));
|
||||
assert!(g.is_match("foo.iso"));
|
||||
assert!(!g.is_match("foo.png"));
|
||||
assert!(g.is_match("foo.py2"));
|
||||
assert!(g.is_match("foo.py3"));
|
||||
assert!(!g.is_match("foo.py"));
|
||||
assert!(g.is_match("foo/bar/target"));
|
||||
assert!(g.is_match("foo/bar/baz/temp_folder"));
|
||||
assert!(g.is_match("foo/bar/baz/temp_folder/target"));
|
||||
assert!(g.is_match("temp_folder"));
|
||||
assert!(g.is_match("my/isos/foo.iso"));
|
||||
assert!(g.is_match("content/poetry/zen.py2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -652,7 +677,7 @@ bar = "baz"
|
||||
"#;
|
||||
let theme = Theme::parse(theme_str).unwrap();
|
||||
// We expect an error here
|
||||
assert!(!config.add_theme_extra(&theme).is_ok());
|
||||
assert!(config.add_theme_extra(&theme).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -689,7 +714,7 @@ highlight_theme = "asdf"
|
||||
"#;
|
||||
|
||||
let config = Config::parse(config);
|
||||
assert_eq!(config.is_err(), true);
|
||||
assert!(config.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -703,7 +728,7 @@ highlight_themes_css = [
|
||||
"#;
|
||||
|
||||
let config = Config::parse(config);
|
||||
assert_eq!(config.is_err(), true);
|
||||
assert!(config.is_err());
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1687
|
||||
|
@ -1,4 +1,4 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use utils::slugs::SlugifyStrategy;
|
||||
|
||||
|
@ -1,19 +1,36 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Taxonomy {
|
||||
pub struct TaxonomyConfig {
|
||||
/// The name used in the URL, usually the plural
|
||||
pub name: String,
|
||||
/// The slug according to the config slugification strategy
|
||||
pub slug: String,
|
||||
/// If this is set, the list of individual taxonomy term page will be paginated
|
||||
/// by this much
|
||||
pub paginate_by: Option<usize>,
|
||||
pub paginate_path: Option<String>,
|
||||
/// Whether to generate a feed only for each taxonomy term, defaults to false
|
||||
/// Whether the taxonomy will be rendered, defaults to `true`
|
||||
pub render: bool,
|
||||
/// Whether to generate a feed only for each taxonomy term, defaults to `false`
|
||||
pub feed: bool,
|
||||
}
|
||||
|
||||
impl Taxonomy {
|
||||
impl Default for TaxonomyConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
name: String::new(),
|
||||
slug: String::new(),
|
||||
paginate_by: None,
|
||||
paginate_path: None,
|
||||
render: true,
|
||||
feed: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TaxonomyConfig {
|
||||
pub fn is_paginated(&self) -> bool {
|
||||
if let Some(paginate_by) = self.paginate_by {
|
||||
paginate_by > 0
|
||||
|
@ -1,22 +1,18 @@
|
||||
use lazy_static::lazy_static;
|
||||
use syntect::dumps::from_binary;
|
||||
use syntect::highlighting::{Theme, ThemeSet};
|
||||
use syntect::html::ClassStyle;
|
||||
use syntect::parsing::{SyntaxReference, SyntaxSet};
|
||||
use libs::once_cell::sync::Lazy;
|
||||
use libs::syntect::dumps::from_binary;
|
||||
use libs::syntect::highlighting::{Theme, ThemeSet};
|
||||
use libs::syntect::html::ClassStyle;
|
||||
use libs::syntect::parsing::{SyntaxReference, SyntaxSet};
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
pub const CLASS_STYLE: ClassStyle = ClassStyle::SpacedPrefixed { prefix: "z-" };
|
||||
|
||||
lazy_static! {
|
||||
pub static ref SYNTAX_SET: SyntaxSet = {
|
||||
let ss: SyntaxSet =
|
||||
from_binary(include_bytes!("../../../sublime/syntaxes/newlines.packdump"));
|
||||
ss
|
||||
};
|
||||
pub static ref THEME_SET: ThemeSet =
|
||||
from_binary(include_bytes!("../../../sublime/themes/all.themedump"));
|
||||
}
|
||||
pub static SYNTAX_SET: Lazy<SyntaxSet> =
|
||||
Lazy::new(|| from_binary(include_bytes!("../../../sublime/syntaxes/newlines.packdump")));
|
||||
|
||||
pub static THEME_SET: Lazy<ThemeSet> =
|
||||
Lazy::new(|| from_binary(include_bytes!("../../../sublime/themes/all.themedump")));
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum HighlightSource {
|
||||
|
@ -2,14 +2,14 @@ mod config;
|
||||
pub mod highlighting;
|
||||
mod theme;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
pub use crate::config::{
|
||||
languages::LanguageOptions, link_checker::LinkChecker, search::Search, slugify::Slugify,
|
||||
taxonomies::Taxonomy, Config,
|
||||
languages::LanguageOptions, link_checker::LinkChecker, link_checker::LinkCheckerLevel,
|
||||
search::Search, slugify::Slugify, taxonomies::TaxonomyConfig, Config,
|
||||
};
|
||||
use errors::Result;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
/// Get and parse the config.
|
||||
/// If it doesn't succeed, exit
|
||||
pub fn get_config(filename: &Path) -> Result<Config> {
|
||||
|
@ -1,10 +1,10 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use toml::Value as Toml;
|
||||
use libs::toml::Value as Toml;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use errors::{bail, Result};
|
||||
use errors::{bail, Context, Result};
|
||||
use utils::fs::read_file;
|
||||
|
||||
/// Holds the data from a `theme.toml` file.
|
||||
@ -40,8 +40,8 @@ impl Theme {
|
||||
|
||||
/// Parses a theme file from the given path
|
||||
pub fn from_file(path: &Path, theme_name: &str) -> Result<Theme> {
|
||||
let content = read_file(path)
|
||||
.map_err(|e| errors::Error::chain(format!("Failed to load theme {}", theme_name), e))?;
|
||||
let content =
|
||||
read_file(path).with_context(|| format!("Failed to load theme {}", theme_name))?;
|
||||
Theme::parse(&content)
|
||||
}
|
||||
}
|
||||
|
8
components/console/Cargo.toml
Normal file
@ -0,0 +1,8 @@
|
||||
[package]
|
||||
name = "console"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
errors = { path = "../errors" }
|
||||
libs = { path = "../libs" }
|
57
components/console/src/lib.rs
Normal file
@ -0,0 +1,57 @@
|
||||
use std::env;
|
||||
use std::io::Write;
|
||||
|
||||
use libs::atty;
|
||||
use libs::once_cell::sync::Lazy;
|
||||
use libs::termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
||||
|
||||
/// Termcolor color choice.
|
||||
/// We do not rely on ColorChoice::Auto behavior
|
||||
/// as the check is already performed by has_color.
|
||||
static COLOR_CHOICE: Lazy<ColorChoice> =
|
||||
Lazy::new(|| if has_color() { ColorChoice::Always } else { ColorChoice::Never });
|
||||
|
||||
pub fn info(message: &str) {
|
||||
colorize(message, ColorSpec::new().set_bold(true), StandardStream::stdout(*COLOR_CHOICE));
|
||||
}
|
||||
|
||||
pub fn warn(message: &str) {
|
||||
colorize(
|
||||
&format!("{}{}", "Warning: ", message),
|
||||
ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)),
|
||||
StandardStream::stdout(*COLOR_CHOICE),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn success(message: &str) {
|
||||
colorize(
|
||||
message,
|
||||
ColorSpec::new().set_bold(true).set_fg(Some(Color::Green)),
|
||||
StandardStream::stdout(*COLOR_CHOICE),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn error(message: &str) {
|
||||
colorize(
|
||||
&format!("{}{}", "Error: ", message),
|
||||
ColorSpec::new().set_bold(true).set_fg(Some(Color::Red)),
|
||||
StandardStream::stderr(*COLOR_CHOICE),
|
||||
);
|
||||
}
|
||||
|
||||
/// Print a colorized message to stdout
|
||||
fn colorize(message: &str, color: &ColorSpec, mut stream: StandardStream) {
|
||||
stream.set_color(color).unwrap();
|
||||
write!(stream, "{}", message).unwrap();
|
||||
stream.set_color(&ColorSpec::new()).unwrap();
|
||||
writeln!(stream).unwrap();
|
||||
}
|
||||
|
||||
/// Check whether to output colors
|
||||
fn has_color() -> bool {
|
||||
let use_colors = env::var("CLICOLOR").unwrap_or_else(|_| "1".to_string()) != "0"
|
||||
&& env::var("NO_COLOR").is_err();
|
||||
let force_colors = env::var("CLICOLOR_FORCE").unwrap_or_else(|_| "0".to_string()) != "0";
|
||||
|
||||
force_colors || use_colors && atty::is(atty::Stream::Stdout)
|
||||
}
|
20
components/content/Cargo.toml
Normal file
@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "content"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
serde = {version = "1.0", features = ["derive"] }
|
||||
time = { version = "0.3", features = ["macros"] }
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
libs = { path = "../libs" }
|
||||
config = { path = "../config" }
|
||||
|
||||
# TODO: remove it?
|
||||
markdown = { path = "../markdown" }
|
||||
|
||||
[dev-dependencies]
|
||||
test-case = "2" # TODO: can we solve that usecase in src/page.rs in a simpler way? A custom macro_rules! maybe
|
||||
tempfile = "3.3.0"
|
@ -1,6 +1,5 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use config::Config;
|
||||
use errors::{bail, Result};
|
||||
|
||||
/// Takes a full path to a file and returns only the components after the first `content` directory
|
||||
@ -115,14 +114,18 @@ impl FileInfo {
|
||||
/// Look for a language in the filename.
|
||||
/// If a language has been found, update the name of the file in this struct to
|
||||
/// remove it and return the language code
|
||||
pub fn find_language(&mut self, config: &Config) -> Result<String> {
|
||||
pub fn find_language(
|
||||
&mut self,
|
||||
default_language: &str,
|
||||
other_languages: &[&str],
|
||||
) -> Result<String> {
|
||||
// No languages? Nothing to do
|
||||
if !config.is_multilingual() {
|
||||
return Ok(config.default_language.clone());
|
||||
if other_languages.is_empty() {
|
||||
return Ok(default_language.to_owned());
|
||||
}
|
||||
|
||||
if !self.name.contains('.') {
|
||||
return Ok(config.default_language.clone());
|
||||
return Ok(default_language.to_owned());
|
||||
}
|
||||
|
||||
// Go with the assumption that no one is using `.` in filenames when using i18n
|
||||
@ -130,13 +133,13 @@ impl FileInfo {
|
||||
let mut parts: Vec<String> = self.name.splitn(2, '.').map(|s| s.to_string()).collect();
|
||||
|
||||
// If language code is same as default language, go for default
|
||||
if config.default_language == parts[1].as_str() {
|
||||
return Ok(config.default_language.clone());
|
||||
if default_language == parts[1].as_str() {
|
||||
return Ok(default_language.to_owned());
|
||||
}
|
||||
|
||||
// The language code is not present in the config: typo or the user forgot to add it to the
|
||||
// config
|
||||
if !config.other_languages().contains_key(&parts[1].as_ref()) {
|
||||
if !other_languages.contains(&parts[1].as_ref()) {
|
||||
bail!("File {:?} has a language code of {} which isn't present in the config.toml `languages`", self.path, parts[1]);
|
||||
}
|
||||
|
||||
@ -152,8 +155,6 @@ impl FileInfo {
|
||||
mod tests {
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use config::{Config, LanguageOptions};
|
||||
|
||||
use super::{find_content_components, FileInfo};
|
||||
|
||||
#[test]
|
||||
@ -183,77 +184,66 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn can_find_valid_language_in_page() {
|
||||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
let res = file.find_language("en", &["fr"]);
|
||||
assert!(res.is_ok());
|
||||
assert_eq!(res.unwrap(), "fr");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_valid_language_with_default_locale() {
|
||||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python.en.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
let res = file.find_language("en", &["fr"]);
|
||||
assert!(res.is_ok());
|
||||
assert_eq!(res.unwrap(), config.default_language);
|
||||
assert_eq!(res.unwrap(), "en");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_valid_language_in_page_with_assets() {
|
||||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
|
||||
let res = file.find_language(&config);
|
||||
let res = file.find_language("en", &["fr"]);
|
||||
assert!(res.is_ok());
|
||||
assert_eq!(res.unwrap(), "fr");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn do_nothing_on_unknown_language_in_page_with_i18n_off() {
|
||||
let config = Config::default();
|
||||
let mut file = FileInfo::new_page(
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
let res = file.find_language("en", &[]);
|
||||
assert!(res.is_ok());
|
||||
assert_eq!(res.unwrap(), config.default_language);
|
||||
assert_eq!(res.unwrap(), "en");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors_on_unknown_language_in_page_with_i18n_on() {
|
||||
let mut config = Config::default();
|
||||
config.languages.insert("it".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
let res = file.find_language("en", &["it"]);
|
||||
assert!(res.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_valid_language_in_section() {
|
||||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_section(
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/_index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
let res = file.find_language("en", &["fr"]);
|
||||
assert!(res.is_ok());
|
||||
assert_eq!(res.unwrap(), "fr");
|
||||
}
|
||||
@ -274,13 +264,11 @@ mod tests {
|
||||
/// Regression test for https://github.com/getzola/zola/issues/854
|
||||
#[test]
|
||||
fn correct_canonical_after_find_language() {
|
||||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
let res = file.find_language("en", &["fr"]);
|
||||
assert!(res.is_ok());
|
||||
assert_eq!(
|
||||
file.canonical,
|
7
components/content/src/front_matter/mod.rs
Normal file
@ -0,0 +1,7 @@
|
||||
mod page;
|
||||
mod section;
|
||||
mod split;
|
||||
|
||||
pub use page::PageFrontMatter;
|
||||
pub use section::SectionFrontMatter;
|
||||
pub use split::{split_page_content, split_section_content};
|
@ -1,13 +1,15 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use chrono::prelude::*;
|
||||
use serde_derive::Deserialize;
|
||||
use tera::{Map, Value};
|
||||
use libs::tera::{Map, Value};
|
||||
use serde::Deserialize;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::{format_description, time};
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
|
||||
use errors::{bail, Result};
|
||||
use utils::de::{fix_toml_dates, from_toml_datetime};
|
||||
|
||||
use crate::RawFrontMatter;
|
||||
use crate::front_matter::split::RawFrontMatter;
|
||||
|
||||
/// The front matter of every page
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize)]
|
||||
@ -20,21 +22,21 @@ pub struct PageFrontMatter {
|
||||
/// Updated date
|
||||
#[serde(default, deserialize_with = "from_toml_datetime")]
|
||||
pub updated: Option<String>,
|
||||
/// Chrono converted update datatime
|
||||
/// Datetime content was last updated
|
||||
#[serde(default, skip_deserializing)]
|
||||
pub updated_datetime: Option<NaiveDateTime>,
|
||||
pub updated_datetime: Option<OffsetDateTime>,
|
||||
/// The converted update datetime into a (year, month, day) tuple
|
||||
#[serde(default, skip_deserializing)]
|
||||
pub updated_datetime_tuple: Option<(i32, u32, u32)>,
|
||||
pub updated_datetime_tuple: Option<(i32, u8, u8)>,
|
||||
/// Date if we want to order pages (ie blog post)
|
||||
#[serde(default, deserialize_with = "from_toml_datetime")]
|
||||
pub date: Option<String>,
|
||||
/// Chrono converted datetime
|
||||
/// Datetime content was created
|
||||
#[serde(default, skip_deserializing)]
|
||||
pub datetime: Option<NaiveDateTime>,
|
||||
pub datetime: Option<OffsetDateTime>,
|
||||
/// The converted date into a (year, month, day) tuple
|
||||
#[serde(default, skip_deserializing)]
|
||||
pub datetime_tuple: Option<(i32, u32, u32)>,
|
||||
pub datetime_tuple: Option<(i32, u8, u8)>,
|
||||
/// Whether this page is a draft
|
||||
pub draft: bool,
|
||||
/// The page slug. Will be used instead of the filename if present
|
||||
@ -68,11 +70,13 @@ pub struct PageFrontMatter {
|
||||
/// 2. a local datetime (RFC3339 with timezone omitted)
|
||||
/// 3. a local date (YYYY-MM-DD).
|
||||
/// This tries each in order.
|
||||
fn parse_datetime(d: &str) -> Option<NaiveDateTime> {
|
||||
DateTime::parse_from_rfc3339(d)
|
||||
.or_else(|_| DateTime::parse_from_rfc3339(format!("{}Z", d).as_ref()))
|
||||
.map(|s| s.naive_local())
|
||||
.or_else(|_| NaiveDate::parse_from_str(d, "%Y-%m-%d").map(|s| s.and_hms(0, 0, 0)))
|
||||
fn parse_datetime(d: &str) -> Option<OffsetDateTime> {
|
||||
OffsetDateTime::parse(d, &Rfc3339)
|
||||
.or_else(|_| OffsetDateTime::parse(format!("{}Z", d).as_ref(), &Rfc3339))
|
||||
.or_else(|_| match Date::parse(d, &format_description!("[year]-[month]-[day]")) {
|
||||
Ok(date) => Ok(PrimitiveDateTime::new(date, time!(0:00)).assume_utc()),
|
||||
Err(e) => Err(e),
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
@ -108,15 +112,15 @@ impl PageFrontMatter {
|
||||
Ok(f)
|
||||
}
|
||||
|
||||
/// Converts the TOML datetime to a Chrono naive datetime
|
||||
/// Converts the TOML datetime to a time::OffsetDateTime
|
||||
/// Also grabs the year/month/day tuple that will be used in serialization
|
||||
pub fn date_to_datetime(&mut self) {
|
||||
self.datetime = self.date.as_ref().map(|s| s.as_ref()).and_then(parse_datetime);
|
||||
self.datetime_tuple = self.datetime.map(|dt| (dt.year(), dt.month(), dt.day()));
|
||||
self.datetime_tuple = self.datetime.map(|dt| (dt.year(), dt.month().into(), dt.day()));
|
||||
|
||||
self.updated_datetime = self.updated.as_ref().map(|s| s.as_ref()).and_then(parse_datetime);
|
||||
self.updated_datetime_tuple =
|
||||
self.updated_datetime.map(|dt| (dt.year(), dt.month(), dt.day()));
|
||||
self.updated_datetime.map(|dt| (dt.year(), dt.month().into(), dt.day()));
|
||||
}
|
||||
|
||||
pub fn weight(&self) -> usize {
|
||||
@ -127,6 +131,7 @@ impl PageFrontMatter {
|
||||
impl Default for PageFrontMatter {
|
||||
fn default() -> PageFrontMatter {
|
||||
PageFrontMatter {
|
||||
in_search_index: true,
|
||||
title: None,
|
||||
description: None,
|
||||
updated: None,
|
||||
@ -141,7 +146,6 @@ impl Default for PageFrontMatter {
|
||||
taxonomies: HashMap::new(),
|
||||
weight: None,
|
||||
aliases: Vec::new(),
|
||||
in_search_index: true,
|
||||
template: None,
|
||||
extra: Map::new(),
|
||||
}
|
||||
@ -150,10 +154,11 @@ impl Default for PageFrontMatter {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::PageFrontMatter;
|
||||
use super::RawFrontMatter;
|
||||
use tera::to_value;
|
||||
use crate::front_matter::page::PageFrontMatter;
|
||||
use crate::front_matter::split::RawFrontMatter;
|
||||
use libs::tera::to_value;
|
||||
use test_case::test_case;
|
||||
use time::macros::datetime;
|
||||
|
||||
#[test_case(&RawFrontMatter::Toml(r#" "#); "toml")]
|
||||
#[test_case(&RawFrontMatter::Toml(r#" "#); "yaml")]
|
||||
@ -229,6 +234,7 @@ date: 2016-10-10
|
||||
fn can_parse_date_yyyy_mm_dd(content: &RawFrontMatter) {
|
||||
let res = PageFrontMatter::parse(content).unwrap();
|
||||
assert!(res.datetime.is_some());
|
||||
assert_eq!(res.datetime.unwrap(), datetime!(2016 - 10 - 10 0:00 UTC));
|
||||
}
|
||||
|
||||
#[test_case(&RawFrontMatter::Toml(r#"
|
||||
@ -244,6 +250,7 @@ date: 2002-10-02T15:00:00Z
|
||||
fn can_parse_date_rfc3339(content: &RawFrontMatter) {
|
||||
let res = PageFrontMatter::parse(content).unwrap();
|
||||
assert!(res.datetime.is_some());
|
||||
assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00 UTC));
|
||||
}
|
||||
|
||||
#[test_case(&RawFrontMatter::Toml(r#"
|
||||
@ -259,6 +266,7 @@ date: 2002-10-02T15:00:00
|
||||
fn can_parse_date_rfc3339_without_timezone(content: &RawFrontMatter) {
|
||||
let res = PageFrontMatter::parse(content).unwrap();
|
||||
assert!(res.datetime.is_some());
|
||||
assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00 UTC));
|
||||
}
|
||||
|
||||
#[test_case(&RawFrontMatter::Toml(r#"
|
||||
@ -274,6 +282,7 @@ date: 2002-10-02 15:00:00+02:00
|
||||
fn can_parse_date_rfc3339_with_space(content: &RawFrontMatter) {
|
||||
let res = PageFrontMatter::parse(content).unwrap();
|
||||
assert!(res.datetime.is_some());
|
||||
assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00+02:00));
|
||||
}
|
||||
|
||||
#[test_case(&RawFrontMatter::Toml(r#"
|
||||
@ -289,6 +298,7 @@ date: 2002-10-02 15:00:00
|
||||
fn can_parse_date_rfc3339_with_space_without_timezone(content: &RawFrontMatter) {
|
||||
let res = PageFrontMatter::parse(content).unwrap();
|
||||
assert!(res.datetime.is_some());
|
||||
assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00 UTC));
|
||||
}
|
||||
|
||||
#[test_case(&RawFrontMatter::Toml(r#"
|
||||
@ -304,6 +314,7 @@ date: 2002-10-02T15:00:00.123456Z
|
||||
fn can_parse_date_rfc3339_with_microseconds(content: &RawFrontMatter) {
|
||||
let res = PageFrontMatter::parse(content).unwrap();
|
||||
assert!(res.datetime.is_some());
|
||||
assert_eq!(res.datetime.unwrap(), datetime!(2002 - 10 - 02 15:00:00.123456 UTC));
|
||||
}
|
||||
|
||||
#[test_case(&RawFrontMatter::Toml(r#"
|
||||
@ -349,6 +360,8 @@ date: "2016-10-10"
|
||||
fn can_parse_valid_date_as_string(content: &RawFrontMatter) {
|
||||
let res = PageFrontMatter::parse(content).unwrap();
|
||||
assert!(res.date.is_some());
|
||||
assert!(res.datetime.is_some());
|
||||
assert_eq!(res.datetime.unwrap(), datetime!(2016 - 10 - 10 0:00 UTC));
|
||||
}
|
||||
|
||||
#[test_case(&RawFrontMatter::Toml(r#"
|
@ -1,11 +1,12 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use tera::{Map, Value};
|
||||
use libs::tera::{Map, Value};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{InsertAnchor, SortBy};
|
||||
use errors::Result;
|
||||
use utils::de::fix_toml_dates;
|
||||
use utils::types::InsertAnchor;
|
||||
|
||||
use crate::RawFrontMatter;
|
||||
use crate::front_matter::split::RawFrontMatter;
|
||||
use crate::SortBy;
|
||||
|
||||
static DEFAULT_PAGINATE_PATH: &str = "page";
|
||||
|
||||
@ -58,7 +59,6 @@ pub struct SectionFrontMatter {
|
||||
/// Whether the section should pass its pages on to the parent section. Defaults to `false`.
|
||||
/// Useful when the section shouldn't split up the parent section, like
|
||||
/// sections for each year under a posts section.
|
||||
#[serde(skip_serializing)]
|
||||
pub transparent: bool,
|
||||
/// Optional template for all pages in this section (including the pages of children section)
|
||||
#[serde(skip_serializing)]
|
@ -1,26 +1,24 @@
|
||||
use lazy_static::lazy_static;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
use errors::{bail, Error, Result};
|
||||
use regex::Regex;
|
||||
use std::path::Path;
|
||||
|
||||
mod page;
|
||||
mod section;
|
||||
use errors::{bail, Context, Result};
|
||||
use libs::once_cell::sync::Lazy;
|
||||
use libs::regex::Regex;
|
||||
use libs::{serde_yaml, toml};
|
||||
|
||||
pub use page::PageFrontMatter;
|
||||
pub use section::SectionFrontMatter;
|
||||
use crate::front_matter::page::PageFrontMatter;
|
||||
use crate::front_matter::section::SectionFrontMatter;
|
||||
|
||||
lazy_static! {
|
||||
static ref TOML_RE: Regex = Regex::new(
|
||||
r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))"
|
||||
static TOML_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))",
|
||||
)
|
||||
.unwrap();
|
||||
static ref YAML_RE: Regex = Regex::new(
|
||||
r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
static YAML_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))")
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub enum RawFrontMatter<'a> {
|
||||
Toml(&'a str),
|
||||
@ -28,7 +26,7 @@ pub enum RawFrontMatter<'a> {
|
||||
}
|
||||
|
||||
impl RawFrontMatter<'_> {
|
||||
fn deserialize<T>(&self) -> Result<T>
|
||||
pub(crate) fn deserialize<T>(&self) -> Result<T>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
@ -36,36 +34,13 @@ impl RawFrontMatter<'_> {
|
||||
RawFrontMatter::Toml(s) => toml::from_str(s)?,
|
||||
RawFrontMatter::Yaml(s) => match serde_yaml::from_str(s) {
|
||||
Ok(d) => d,
|
||||
Err(e) => bail!(format!("YAML deserialize error: {:?}", e)),
|
||||
Err(e) => bail!("YAML deserialize error: {:?}", e),
|
||||
},
|
||||
};
|
||||
Ok(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum SortBy {
|
||||
/// Most recent to oldest
|
||||
Date,
|
||||
/// Most recent to oldest
|
||||
UpdateDate,
|
||||
/// Sort by title
|
||||
Title,
|
||||
/// Lower weight comes first
|
||||
Weight,
|
||||
/// No sorting
|
||||
None,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum InsertAnchor {
|
||||
Left,
|
||||
Right,
|
||||
None,
|
||||
}
|
||||
|
||||
/// Split a file between the front matter and its content
|
||||
/// Will return an error if the front matter wasn't found
|
||||
fn split_content<'c>(file_path: &Path, content: &'c str) -> Result<(RawFrontMatter<'c>, &'c str)> {
|
||||
@ -102,12 +77,10 @@ pub fn split_section_content<'c>(
|
||||
content: &'c str,
|
||||
) -> Result<(SectionFrontMatter, &'c str)> {
|
||||
let (front_matter, content) = split_content(file_path, content)?;
|
||||
let meta = SectionFrontMatter::parse(&front_matter).map_err(|e| {
|
||||
Error::chain(
|
||||
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()),
|
||||
e,
|
||||
)
|
||||
let meta = SectionFrontMatter::parse(&front_matter).with_context(|| {
|
||||
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy())
|
||||
})?;
|
||||
|
||||
Ok((meta, content))
|
||||
}
|
||||
|
||||
@ -118,11 +91,8 @@ pub fn split_page_content<'c>(
|
||||
content: &'c str,
|
||||
) -> Result<(PageFrontMatter, &'c str)> {
|
||||
let (front_matter, content) = split_content(file_path, content)?;
|
||||
let meta = PageFrontMatter::parse(&front_matter).map_err(|e| {
|
||||
Error::chain(
|
||||
format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()),
|
||||
e,
|
||||
)
|
||||
let meta = PageFrontMatter::parse(&front_matter).with_context(|| {
|
||||
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy())
|
||||
})?;
|
||||
Ok((meta, content))
|
||||
}
|
21
components/content/src/lib.rs
Normal file
@ -0,0 +1,21 @@
|
||||
mod front_matter;
|
||||
|
||||
mod file_info;
|
||||
mod library;
|
||||
mod page;
|
||||
mod pagination;
|
||||
mod section;
|
||||
mod ser;
|
||||
mod sorting;
|
||||
mod taxonomies;
|
||||
mod types;
|
||||
mod utils;
|
||||
|
||||
pub use file_info::FileInfo;
|
||||
pub use front_matter::{PageFrontMatter, SectionFrontMatter};
|
||||
pub use library::Library;
|
||||
pub use page::Page;
|
||||
pub use pagination::Paginator;
|
||||
pub use section::Section;
|
||||
pub use taxonomies::{Taxonomy, TaxonomyTerm};
|
||||
pub use types::*;
|
782
components/content/src/library.rs
Normal file
@ -0,0 +1,782 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use config::Config;
|
||||
use libs::ahash::{AHashMap, AHashSet};
|
||||
|
||||
use crate::ser::TranslatedContent;
|
||||
use crate::sorting::sort_pages;
|
||||
use crate::taxonomies::{Taxonomy, TaxonomyFound};
|
||||
use crate::{Page, Section, SortBy};
|
||||
|
||||
macro_rules! set {
|
||||
($($key:expr,)+) => (set!($($key),+));
|
||||
|
||||
( $($key:expr),* ) => {
|
||||
{
|
||||
let mut _set = AHashSet::new();
|
||||
$(
|
||||
_set.insert($key);
|
||||
)*
|
||||
_set
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Library {
|
||||
pub pages: AHashMap<PathBuf, Page>,
|
||||
pub sections: AHashMap<PathBuf, Section>,
|
||||
// aliases -> files, so we can easily check for conflicts
|
||||
pub reverse_aliases: AHashMap<String, AHashSet<PathBuf>>,
|
||||
pub translations: AHashMap<PathBuf, AHashSet<PathBuf>>,
|
||||
pub backlinks: AHashMap<String, AHashSet<PathBuf>>,
|
||||
// A mapping of {lang -> <slug, {term -> vec<paths>}>>}
|
||||
taxonomies_def: AHashMap<String, AHashMap<String, AHashMap<String, Vec<PathBuf>>>>,
|
||||
// All the taxonomies from config.toml in their slugifiedv ersion
|
||||
// So we don't need to pass the Config when adding a page to know how to slugify and we only
|
||||
// slugify once
|
||||
taxo_name_to_slug: AHashMap<String, String>,
|
||||
}
|
||||
|
||||
impl Library {
|
||||
pub fn new(config: &Config) -> Self {
|
||||
let mut lib = Self::default();
|
||||
|
||||
for (lang, options) in &config.languages {
|
||||
let mut taxas = AHashMap::new();
|
||||
for tax_def in &options.taxonomies {
|
||||
taxas.insert(tax_def.slug.clone(), AHashMap::new());
|
||||
lib.taxo_name_to_slug.insert(tax_def.name.clone(), tax_def.slug.clone());
|
||||
}
|
||||
lib.taxonomies_def.insert(lang.to_string(), taxas);
|
||||
}
|
||||
lib
|
||||
}
|
||||
|
||||
fn insert_reverse_aliases(&mut self, file_path: &Path, entries: Vec<String>) {
|
||||
for entry in entries {
|
||||
self.reverse_aliases
|
||||
.entry(entry)
|
||||
.and_modify(|s| {
|
||||
s.insert(file_path.to_path_buf());
|
||||
})
|
||||
.or_insert_with(|| set! {file_path.to_path_buf()});
|
||||
}
|
||||
}
|
||||
|
||||
/// This will check every section/page paths + the aliases and ensure none of them
|
||||
/// are colliding.
|
||||
/// Returns Vec<(path colliding, [list of files causing that collision])>
|
||||
pub fn find_path_collisions(&self) -> Vec<(String, Vec<PathBuf>)> {
|
||||
self.reverse_aliases
|
||||
.iter()
|
||||
.filter_map(|(alias, files)| {
|
||||
if files.len() > 1 {
|
||||
Some((alias.clone(), files.clone().into_iter().collect::<Vec<_>>()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn insert_page(&mut self, page: Page) {
|
||||
let file_path = page.file.path.clone();
|
||||
let mut entries = vec![page.path.clone()];
|
||||
entries.extend(page.meta.aliases.to_vec());
|
||||
self.insert_reverse_aliases(&file_path, entries);
|
||||
|
||||
for (taxa_name, terms) in &page.meta.taxonomies {
|
||||
for term in terms {
|
||||
// Safe unwraps as we create all lang/taxa and we validated that they are correct
|
||||
// before getting there
|
||||
let taxa_def = self
|
||||
.taxonomies_def
|
||||
.get_mut(&page.lang)
|
||||
.expect("lang not found")
|
||||
.get_mut(&self.taxo_name_to_slug[taxa_name])
|
||||
.expect("taxa not found");
|
||||
|
||||
if !taxa_def.contains_key(term) {
|
||||
taxa_def.insert(term.to_string(), Vec::new());
|
||||
}
|
||||
taxa_def.get_mut(term).unwrap().push(page.file.path.clone());
|
||||
}
|
||||
}
|
||||
|
||||
self.pages.insert(file_path, page);
|
||||
}
|
||||
|
||||
pub fn insert_section(&mut self, section: Section) {
|
||||
let file_path = section.file.path.clone();
|
||||
if section.meta.render {
|
||||
let mut entries = vec![section.path.clone()];
|
||||
entries.extend(section.meta.aliases.to_vec());
|
||||
self.insert_reverse_aliases(&file_path, entries);
|
||||
}
|
||||
self.sections.insert(file_path, section);
|
||||
}
|
||||
|
||||
/// Fills a map of target -> {content mentioning it}
|
||||
/// This can only be called _after_ rendering markdown as we need to have accumulated all
|
||||
/// the links first
|
||||
pub fn fill_backlinks(&mut self) {
|
||||
self.backlinks.clear();
|
||||
|
||||
let mut add_backlink = |target: &str, source: &Path| {
|
||||
self.backlinks
|
||||
.entry(target.to_owned())
|
||||
.and_modify(|s| {
|
||||
s.insert(source.to_path_buf());
|
||||
})
|
||||
.or_insert(set! {source.to_path_buf()});
|
||||
};
|
||||
|
||||
for (_, page) in &self.pages {
|
||||
for (internal_link, _) in &page.internal_links {
|
||||
add_backlink(internal_link, &page.file.path);
|
||||
}
|
||||
}
|
||||
for (_, section) in &self.sections {
|
||||
for (internal_link, _) in §ion.internal_links {
|
||||
add_backlink(internal_link, §ion.file.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This is called _before_ rendering the markdown the pages/sections
|
||||
pub fn find_taxonomies(&self, config: &Config) -> Vec<Taxonomy> {
|
||||
let mut taxonomies = Vec::new();
|
||||
|
||||
for (lang, taxonomies_data) in &self.taxonomies_def {
|
||||
for (taxa_slug, terms_pages) in taxonomies_data {
|
||||
let taxo_config = &config.languages[lang]
|
||||
.taxonomies
|
||||
.iter()
|
||||
.find(|t| &t.slug == taxa_slug)
|
||||
.expect("taxo should exist");
|
||||
let mut taxo_found = TaxonomyFound::new(taxa_slug.to_string(), lang, taxo_config);
|
||||
for (term, page_path) in terms_pages {
|
||||
taxo_found
|
||||
.terms
|
||||
.insert(term, page_path.iter().map(|p| &self.pages[p]).collect());
|
||||
}
|
||||
|
||||
taxonomies.push(Taxonomy::new(taxo_found, config));
|
||||
}
|
||||
}
|
||||
|
||||
taxonomies
|
||||
}
|
||||
|
||||
/// Sort all sections pages according to sorting method given
|
||||
/// Pages that cannot be sorted are set to the section.ignored_pages instead
|
||||
pub fn sort_section_pages(&mut self) {
|
||||
let mut updates = AHashMap::new();
|
||||
for (path, section) in &self.sections {
|
||||
let pages: Vec<_> = section.pages.iter().map(|p| &self.pages[p]).collect();
|
||||
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by {
|
||||
SortBy::None => continue,
|
||||
_ => sort_pages(&pages, section.meta.sort_by),
|
||||
};
|
||||
|
||||
updates
|
||||
.insert(path.clone(), (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by));
|
||||
}
|
||||
|
||||
for (path, (sorted, unsortable, _)) in updates {
|
||||
if !self.sections[&path].meta.transparent {
|
||||
// Fill siblings
|
||||
for (i, page_path) in sorted.iter().enumerate() {
|
||||
let mut p = self.pages.get_mut(page_path).unwrap();
|
||||
if i > 0 {
|
||||
// lighter / later / title_prev
|
||||
p.lower = Some(sorted[i - 1].clone());
|
||||
}
|
||||
|
||||
if i < sorted.len() - 1 {
|
||||
// heavier / earlier / title_next
|
||||
p.higher = Some(sorted[i + 1].clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(s) = self.sections.get_mut(&path) {
|
||||
s.pages = sorted;
|
||||
s.ignored_pages = unsortable;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Find out the direct subsections of each subsection if there are some
|
||||
/// as well as the pages for each section
|
||||
pub fn populate_sections(&mut self, config: &Config, content_path: &Path) {
|
||||
let mut add_translation = |entry: &Path, path: &Path| {
|
||||
if config.is_multilingual() {
|
||||
self.translations
|
||||
.entry(entry.to_path_buf())
|
||||
.and_modify(|trans| {
|
||||
trans.insert(path.to_path_buf());
|
||||
})
|
||||
.or_insert(set! {path.to_path_buf()});
|
||||
}
|
||||
};
|
||||
|
||||
let mut ancestors = AHashMap::new();
|
||||
let mut subsections = AHashMap::new();
|
||||
let mut sections_weight = AHashMap::new();
|
||||
|
||||
// We iterate over the sections twice
|
||||
// The first time to build up the list of ancestors for each section
|
||||
for (path, section) in &self.sections {
|
||||
sections_weight.insert(path.clone(), section.meta.weight);
|
||||
if let Some(ref grand_parent) = section.file.grand_parent {
|
||||
subsections
|
||||
// Using the original filename to work for multi-lingual sections
|
||||
.entry(grand_parent.join(§ion.file.filename))
|
||||
.or_insert_with(Vec::new)
|
||||
.push(section.file.path.clone());
|
||||
}
|
||||
|
||||
add_translation(§ion.file.canonical, path);
|
||||
|
||||
// Root sections have no ancestors
|
||||
if section.is_index() {
|
||||
ancestors.insert(section.file.path.clone(), vec![]);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Index section is the first ancestor of every single section
|
||||
let mut cur_path = content_path.to_path_buf();
|
||||
let mut parents = vec![section.file.filename.clone()];
|
||||
for component in §ion.file.components {
|
||||
cur_path = cur_path.join(component);
|
||||
// Skip itself
|
||||
if cur_path == section.file.parent {
|
||||
continue;
|
||||
}
|
||||
|
||||
let index_path = cur_path.join(§ion.file.filename);
|
||||
if let Some(s) = self.sections.get(&index_path) {
|
||||
parents.push(s.file.relative.clone());
|
||||
}
|
||||
}
|
||||
ancestors.insert(section.file.path.clone(), parents);
|
||||
}
|
||||
|
||||
// The second time we actually assign ancestors and order subsections based on their weights
|
||||
for (path, section) in self.sections.iter_mut() {
|
||||
section.subsections.clear();
|
||||
section.pages.clear();
|
||||
section.ignored_pages.clear();
|
||||
section.ancestors.clear();
|
||||
|
||||
if let Some(children) = subsections.get(&*path) {
|
||||
let mut children: Vec<_> = children.clone();
|
||||
children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
|
||||
section.subsections = children;
|
||||
}
|
||||
if let Some(parents) = ancestors.get(&*path) {
|
||||
section.ancestors = parents.clone();
|
||||
}
|
||||
}
|
||||
|
||||
// We pre-build the index filename for each language
|
||||
let mut index_filename_by_lang = AHashMap::with_capacity(config.languages.len());
|
||||
for code in config.languages.keys() {
|
||||
if code == &config.default_language {
|
||||
index_filename_by_lang.insert(code, "_index.md".to_owned());
|
||||
} else {
|
||||
index_filename_by_lang.insert(code, format!("_index.{}.md", code));
|
||||
}
|
||||
}
|
||||
|
||||
// Then once we took care of the sections, we find the pages of each section
|
||||
for (path, page) in self.pages.iter_mut() {
|
||||
let parent_filename = &index_filename_by_lang[&page.lang];
|
||||
add_translation(&page.file.canonical, path);
|
||||
let mut parent_section_path = page.file.parent.join(&parent_filename);
|
||||
|
||||
while let Some(parent_section) = self.sections.get_mut(&parent_section_path) {
|
||||
let is_transparent = parent_section.meta.transparent;
|
||||
parent_section.pages.push(path.clone());
|
||||
page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_default();
|
||||
// Don't forget to push the actual parent
|
||||
page.ancestors.push(parent_section.file.relative.clone());
|
||||
|
||||
// Find the page template if one of a parent has page_template set
|
||||
// Stops after the first one found, keep in mind page.ancestors
|
||||
// is [index, ..., parent] so we need to reverse it first
|
||||
if page.meta.template.is_none() {
|
||||
for ancestor in page.ancestors.iter().rev() {
|
||||
let s = self.sections.get(&content_path.join(ancestor)).unwrap();
|
||||
if let Some(ref tpl) = s.meta.page_template {
|
||||
page.meta.template = Some(tpl.clone());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !is_transparent {
|
||||
break;
|
||||
}
|
||||
|
||||
// We've added `_index(.{LANG})?.md` so if we are here so we need to go up twice
|
||||
match parent_section_path.clone().parent().unwrap().parent() {
|
||||
Some(parent) => parent_section_path = parent.join(&parent_filename),
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// And once we have all the pages assigned to their section, we sort them
|
||||
self.sort_section_pages();
|
||||
}
|
||||
|
||||
/// Find all the orphan pages: pages that are in a folder without an `_index.md`
|
||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
|
||||
self.pages.iter().filter(|(_, p)| p.ancestors.is_empty()).map(|(_, p)| p).collect()
|
||||
}
|
||||
|
||||
/// Find all the translated content for a given canonical path.
|
||||
/// The translated content can be either for a section or a page
|
||||
pub fn find_translations(&self, canonical_path: &Path) -> Vec<TranslatedContent<'_>> {
|
||||
let mut translations = vec![];
|
||||
|
||||
if let Some(paths) = self.translations.get(canonical_path) {
|
||||
for path in paths {
|
||||
let (lang, permalink, title, path) = {
|
||||
if self.sections.contains_key(path) {
|
||||
let s = &self.sections[path];
|
||||
(&s.lang, &s.permalink, &s.meta.title, &s.file.path)
|
||||
} else {
|
||||
let s = &self.pages[path];
|
||||
(&s.lang, &s.permalink, &s.meta.title, &s.file.path)
|
||||
}
|
||||
};
|
||||
translations.push(TranslatedContent { lang, permalink, title, path });
|
||||
}
|
||||
}
|
||||
|
||||
translations
|
||||
}
|
||||
|
||||
pub fn find_pages_by_path(&self, paths: &[PathBuf]) -> Vec<&Page> {
|
||||
paths.iter().map(|p| &self.pages[p]).collect()
|
||||
}
|
||||
|
||||
pub fn find_sections_by_path(&self, paths: &[PathBuf]) -> Vec<&Section> {
|
||||
paths.iter().map(|p| &self.sections[p]).collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::FileInfo;
|
||||
use config::{LanguageOptions, TaxonomyConfig};
|
||||
use std::collections::HashMap;
|
||||
use utils::slugs::SlugifyStrategy;
|
||||
|
||||
#[test]
|
||||
fn can_find_collisions_with_paths() {
|
||||
let mut library = Library::default();
|
||||
let mut section = Section { path: "hello".to_owned(), ..Default::default() };
|
||||
section.file.path = PathBuf::from("hello.md");
|
||||
library.insert_section(section.clone());
|
||||
let mut section2 = Section { path: "hello".to_owned(), ..Default::default() };
|
||||
section2.file.path = PathBuf::from("bonjour.md");
|
||||
library.insert_section(section2.clone());
|
||||
|
||||
let collisions = library.find_path_collisions();
|
||||
assert_eq!(collisions.len(), 1);
|
||||
assert_eq!(collisions[0].0, "hello");
|
||||
assert!(collisions[0].1.contains(§ion.file.path));
|
||||
assert!(collisions[0].1.contains(§ion2.file.path));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_collisions_with_aliases() {
|
||||
let mut library = Library::default();
|
||||
let mut section = Section { path: "hello".to_owned(), ..Default::default() };
|
||||
section.file.path = PathBuf::from("hello.md");
|
||||
library.insert_section(section.clone());
|
||||
let mut section2 = Section { path: "world".to_owned(), ..Default::default() };
|
||||
section2.file.path = PathBuf::from("bonjour.md");
|
||||
section2.meta.aliases = vec!["hello".to_owned(), "hola".to_owned()];
|
||||
library.insert_section(section2.clone());
|
||||
// Sections with render=false do not collide with anything
|
||||
// https://github.com/getzola/zola/issues/1656
|
||||
let mut section3 = Section { path: "world2".to_owned(), ..Default::default() };
|
||||
section3.meta.render = false;
|
||||
section3.file.path = PathBuf::from("bonjour2.md");
|
||||
section3.meta.aliases = vec!["hola".to_owned()];
|
||||
library.insert_section(section3);
|
||||
|
||||
let collisions = library.find_path_collisions();
|
||||
assert_eq!(collisions.len(), 1);
|
||||
assert_eq!(collisions[0].0, "hello");
|
||||
assert!(collisions[0].1.contains(§ion.file.path));
|
||||
assert!(collisions[0].1.contains(§ion2.file.path));
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum PageSort {
|
||||
None,
|
||||
Date(&'static str),
|
||||
Title(&'static str),
|
||||
Weight(usize),
|
||||
}
|
||||
|
||||
fn create_page(file_path: &str, lang: &str, page_sort: PageSort) -> Page {
|
||||
let mut page = Page::default();
|
||||
page.lang = lang.to_owned();
|
||||
page.file = FileInfo::new_page(Path::new(file_path), &PathBuf::new());
|
||||
match page_sort {
|
||||
PageSort::None => (),
|
||||
PageSort::Date(date) => {
|
||||
page.meta.date = Some(date.to_owned());
|
||||
page.meta.date_to_datetime();
|
||||
}
|
||||
PageSort::Title(title) => {
|
||||
page.meta.title = Some(title.to_owned());
|
||||
}
|
||||
PageSort::Weight(w) => {
|
||||
page.meta.weight = Some(w);
|
||||
}
|
||||
}
|
||||
page.file.find_language("en", &["fr"]).unwrap();
|
||||
page
|
||||
}
|
||||
|
||||
fn create_section(
|
||||
file_path: &str,
|
||||
lang: &str,
|
||||
weight: usize,
|
||||
transparent: bool,
|
||||
sort_by: SortBy,
|
||||
) -> Section {
|
||||
let mut section = Section::default();
|
||||
section.lang = lang.to_owned();
|
||||
section.file = FileInfo::new_section(Path::new(file_path), &PathBuf::new());
|
||||
section.meta.weight = weight;
|
||||
section.meta.transparent = transparent;
|
||||
section.meta.sort_by = sort_by;
|
||||
section.meta.page_template = Some("new_page.html".to_owned());
|
||||
section.file.find_language("en", &["fr"]).unwrap();
|
||||
section
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_populate_sections() {
|
||||
let mut config = Config::default_for_test();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut library = Library::default();
|
||||
let sections = vec![
|
||||
("content/_index.md", "en", 0, false, SortBy::None),
|
||||
("content/_index.fr.md", "fr", 0, false, SortBy::None),
|
||||
("content/blog/_index.md", "en", 0, false, SortBy::Date),
|
||||
("content/wiki/_index.md", "en", 0, false, SortBy::Weight),
|
||||
("content/wiki/_index.fr.md", "fr", 0, false, SortBy::Weight),
|
||||
("content/wiki/recipes/_index.md", "en", 1, true, SortBy::Weight),
|
||||
("content/wiki/recipes/_index.fr.md", "fr", 1, true, SortBy::Weight),
|
||||
("content/wiki/programming/_index.md", "en", 10, true, SortBy::Weight),
|
||||
("content/wiki/programming/_index.fr.md", "fr", 10, true, SortBy::Weight),
|
||||
("content/novels/_index.md", "en", 10, false, SortBy::Title),
|
||||
("content/novels/_index.fr.md", "fr", 10, false, SortBy::Title),
|
||||
];
|
||||
for (p, l, w, t, s) in sections.clone() {
|
||||
library.insert_section(create_section(p, l, w, t, s));
|
||||
}
|
||||
|
||||
let pages = vec![
|
||||
("content/about.md", "en", PageSort::None),
|
||||
("content/about.fr.md", "en", PageSort::None),
|
||||
("content/blog/rust.md", "en", PageSort::Date("2022-01-01")),
|
||||
("content/blog/python.md", "en", PageSort::Date("2022-03-03")),
|
||||
("content/blog/docker.md", "en", PageSort::Date("2022-02-02")),
|
||||
("content/wiki/recipes/chocolate-cake.md", "en", PageSort::Weight(100)),
|
||||
("content/wiki/recipes/chocolate-cake.fr.md", "fr", PageSort::Weight(100)),
|
||||
("content/wiki/recipes/rendang.md", "en", PageSort::Weight(5)),
|
||||
("content/wiki/recipes/rendang.fr.md", "fr", PageSort::Weight(5)),
|
||||
("content/wiki/programming/rust.md", "en", PageSort::Weight(1)),
|
||||
("content/wiki/programming/rust.fr.md", "fr", PageSort::Weight(1)),
|
||||
("content/wiki/programming/zola.md", "en", PageSort::Weight(10)),
|
||||
("content/wiki/programming/python.md", "en", PageSort::None),
|
||||
("content/novels/the-colour-of-magic.md", "en", PageSort::Title("The Colour of Magic")),
|
||||
(
|
||||
"content/novels/the-colour-of-magic.fr.md",
|
||||
"en",
|
||||
PageSort::Title("La Huitième Couleur"),
|
||||
),
|
||||
("content/novels/reaper.md", "en", PageSort::Title("Reaper")),
|
||||
("content/novels/reaper.fr.md", "fr", PageSort::Title("Reaper (fr)")),
|
||||
("content/random/hello.md", "en", PageSort::None),
|
||||
];
|
||||
for (p, l, s) in pages.clone() {
|
||||
library.insert_page(create_page(p, l, s));
|
||||
}
|
||||
library.populate_sections(&config, Path::new("content"));
|
||||
assert_eq!(library.sections.len(), sections.len());
|
||||
assert_eq!(library.pages.len(), pages.len());
|
||||
let blog_section = &library.sections[&PathBuf::from("content/blog/_index.md")];
|
||||
assert_eq!(blog_section.pages.len(), 3);
|
||||
// sorted by date in desc order
|
||||
assert_eq!(
|
||||
blog_section.pages,
|
||||
vec![
|
||||
PathBuf::from("content/blog/python.md"),
|
||||
PathBuf::from("content/blog/docker.md"),
|
||||
PathBuf::from("content/blog/rust.md")
|
||||
]
|
||||
);
|
||||
assert_eq!(blog_section.ignored_pages.len(), 0);
|
||||
assert!(&library.pages[&PathBuf::from("content/blog/python.md")].lower.is_none());
|
||||
assert_eq!(
|
||||
&library.pages[&PathBuf::from("content/blog/python.md")].higher,
|
||||
&Some(PathBuf::from("content/blog/docker.md"))
|
||||
);
|
||||
assert_eq!(
|
||||
library.pages[&PathBuf::from("content/blog/python.md")].meta.template,
|
||||
Some("new_page.html".to_owned())
|
||||
);
|
||||
|
||||
let wiki = &library.sections[&PathBuf::from("content/wiki/_index.md")];
|
||||
assert_eq!(wiki.pages.len(), 4);
|
||||
// sorted by weight, in asc order
|
||||
assert_eq!(
|
||||
wiki.pages,
|
||||
vec![
|
||||
PathBuf::from("content/wiki/programming/rust.md"),
|
||||
PathBuf::from("content/wiki/recipes/rendang.md"),
|
||||
PathBuf::from("content/wiki/programming/zola.md"),
|
||||
PathBuf::from("content/wiki/recipes/chocolate-cake.md"),
|
||||
]
|
||||
);
|
||||
assert_eq!(wiki.ignored_pages.len(), 1);
|
||||
assert_eq!(wiki.ignored_pages, vec![PathBuf::from("content/wiki/programming/python.md")]);
|
||||
assert_eq!(
|
||||
&library.pages[&PathBuf::from("content/wiki/recipes/rendang.md")].lower,
|
||||
&Some(PathBuf::from("content/wiki/programming/rust.md"))
|
||||
);
|
||||
assert_eq!(
|
||||
&library.pages[&PathBuf::from("content/wiki/recipes/rendang.md")].higher,
|
||||
&Some(PathBuf::from("content/wiki/programming/zola.md"))
|
||||
);
|
||||
assert_eq!(
|
||||
wiki.subsections,
|
||||
vec![
|
||||
PathBuf::from("content/wiki/recipes/_index.md"),
|
||||
PathBuf::from("content/wiki/programming/_index.md")
|
||||
]
|
||||
);
|
||||
assert_eq!(wiki.ancestors, vec!["_index.md".to_owned()]);
|
||||
assert_eq!(
|
||||
library.sections[&PathBuf::from("content/wiki/recipes/_index.md")].ancestors,
|
||||
vec!["_index.md".to_owned(), "wiki/_index.md".to_owned()]
|
||||
);
|
||||
|
||||
// also works for other languages
|
||||
let french_wiki = &library.sections[&PathBuf::from("content/wiki/_index.fr.md")];
|
||||
assert_eq!(french_wiki.pages.len(), 3);
|
||||
// sorted by weight, in asc order
|
||||
assert_eq!(
|
||||
french_wiki.pages,
|
||||
vec![
|
||||
PathBuf::from("content/wiki/programming/rust.fr.md"),
|
||||
PathBuf::from("content/wiki/recipes/rendang.fr.md"),
|
||||
PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md"),
|
||||
]
|
||||
);
|
||||
assert_eq!(french_wiki.ignored_pages.len(), 0);
|
||||
assert!(&library.pages[&PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md")]
|
||||
.higher
|
||||
.is_none());
|
||||
assert_eq!(
|
||||
&library.pages[&PathBuf::from("content/wiki/recipes/chocolate-cake.fr.md")].lower,
|
||||
&Some(PathBuf::from("content/wiki/recipes/rendang.fr.md"))
|
||||
);
|
||||
|
||||
let orphans = library.get_all_orphan_pages();
|
||||
assert_eq!(orphans.len(), 1);
|
||||
assert_eq!(orphans[0].file.path, PathBuf::from("content/random/hello.md"));
|
||||
|
||||
// And translations should be filled in
|
||||
let translations = library.find_translations(&PathBuf::from("content/novels/reaper"));
|
||||
assert_eq!(translations.len(), 2);
|
||||
assert!(translations[0].title.is_some());
|
||||
assert!(translations[1].title.is_some());
|
||||
}
|
||||
|
||||
macro_rules! taxonomies {
|
||||
($config:expr, [$($page:expr),+]) => {{
|
||||
let mut library = Library::new(&$config);
|
||||
$(
|
||||
library.insert_page($page);
|
||||
)+
|
||||
library.find_taxonomies(&$config)
|
||||
}};
|
||||
}
|
||||
|
||||
fn create_page_w_taxa(path: &str, lang: &str, taxo: Vec<(&str, Vec<&str>)>) -> Page {
|
||||
let mut page = Page::default();
|
||||
page.file.path = PathBuf::from(path);
|
||||
page.lang = lang.to_owned();
|
||||
let mut taxonomies = HashMap::new();
|
||||
for (name, terms) in taxo {
|
||||
taxonomies.insert(name.to_owned(), terms.iter().map(|t| t.to_string()).collect());
|
||||
}
|
||||
page.meta.taxonomies = taxonomies;
|
||||
page
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_taxonomies() {
|
||||
let mut config = Config::default_for_test();
|
||||
config.languages.get_mut("en").unwrap().taxonomies = vec![
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
config.slugify_taxonomies();
|
||||
|
||||
let page1 = create_page_w_taxa(
|
||||
"a.md",
|
||||
"en",
|
||||
vec![("tags", vec!["rust", "db"]), ("categories", vec!["tutorials"])],
|
||||
);
|
||||
let page2 = create_page_w_taxa(
|
||||
"b.md",
|
||||
"en",
|
||||
vec![("tags", vec!["rust", "js"]), ("categories", vec!["others"])],
|
||||
);
|
||||
let page3 = create_page_w_taxa(
|
||||
"c.md",
|
||||
"en",
|
||||
vec![("tags", vec!["js"]), ("authors", vec!["Vincent Prouillet"])],
|
||||
);
|
||||
let taxonomies = taxonomies!(config, [page1, page2, page3]);
|
||||
|
||||
let tags = taxonomies.iter().find(|t| t.kind.name == "tags").unwrap();
|
||||
assert_eq!(tags.len(), 3);
|
||||
assert_eq!(tags.items[0].name, "db");
|
||||
assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/");
|
||||
assert_eq!(tags.items[0].pages.len(), 1);
|
||||
assert_eq!(tags.items[1].name, "js");
|
||||
assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/");
|
||||
assert_eq!(tags.items[1].pages.len(), 2);
|
||||
assert_eq!(tags.items[2].name, "rust");
|
||||
assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/");
|
||||
assert_eq!(tags.items[2].pages.len(), 2);
|
||||
|
||||
let categories = taxonomies.iter().find(|t| t.kind.name == "categories").unwrap();
|
||||
assert_eq!(categories.items.len(), 2);
|
||||
assert_eq!(categories.items[0].name, "others");
|
||||
assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/others/");
|
||||
assert_eq!(categories.items[0].pages.len(), 1);
|
||||
|
||||
let authors = taxonomies.iter().find(|t| t.kind.name == "authors").unwrap();
|
||||
assert_eq!(authors.items.len(), 1);
|
||||
assert_eq!(authors.items[0].permalink, "http://a-website.com/authors/vincent-prouillet/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_multiple_language_taxonomies() {
|
||||
let mut config = Config::default_for_test();
|
||||
config.slugify.taxonomies = SlugifyStrategy::Safe;
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
config.languages.get_mut("en").unwrap().taxonomies = vec![
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
config.languages.get_mut("fr").unwrap().taxonomies = vec![
|
||||
TaxonomyConfig { name: "catégories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
config.slugify_taxonomies();
|
||||
|
||||
let page1 = create_page_w_taxa("a.md", "en", vec![("categories", vec!["rust"])]);
|
||||
let page2 = create_page_w_taxa("b.md", "en", vec![("tags", vec!["rust"])]);
|
||||
let page3 = create_page_w_taxa("c.md", "fr", vec![("catégories", vec!["rust"])]);
|
||||
let taxonomies = taxonomies!(config, [page1, page2, page3]);
|
||||
|
||||
let categories = taxonomies.iter().find(|t| t.kind.name == "categories").unwrap();
|
||||
assert_eq!(categories.len(), 1);
|
||||
assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/rust/");
|
||||
let tags = taxonomies.iter().find(|t| t.kind.name == "tags" && t.lang == "en").unwrap();
|
||||
assert_eq!(tags.len(), 1);
|
||||
assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/rust/");
|
||||
let fr_categories = taxonomies.iter().find(|t| t.kind.name == "catégories").unwrap();
|
||||
assert_eq!(fr_categories.len(), 1);
|
||||
assert_eq!(fr_categories.items[0].permalink, "http://a-website.com/fr/catégories/rust/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn taxonomies_with_unic_are_grouped_with_default_slugify_strategy() {
|
||||
let mut config = Config::default_for_test();
|
||||
config.languages.get_mut("en").unwrap().taxonomies = vec![
|
||||
TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
config.slugify_taxonomies();
|
||||
let page1 = create_page_w_taxa("a.md", "en", vec![("test-taxonomy", vec!["Ecole"])]);
|
||||
let page2 = create_page_w_taxa("b.md", "en", vec![("test taxonomy", vec!["École"])]);
|
||||
let page3 = create_page_w_taxa("c.md", "en", vec![("test-taxonomy ", vec!["ecole"])]);
|
||||
let page4 = create_page_w_taxa("d.md", "en", vec![("Test-Taxonomy ", vec!["école"])]);
|
||||
let taxonomies = taxonomies!(config, [page1, page2, page3, page4]);
|
||||
assert_eq!(taxonomies.len(), 1);
|
||||
|
||||
let tax = &taxonomies[0];
|
||||
// under the default slugify strategy all of the provided terms should be the same
|
||||
assert_eq!(tax.items.len(), 1);
|
||||
let term1 = &tax.items[0];
|
||||
assert_eq!(term1.name, "Ecole");
|
||||
assert_eq!(term1.slug, "ecole");
|
||||
assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/ecole/");
|
||||
assert_eq!(term1.pages.len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn taxonomies_with_unic_are_not_grouped_with_safe_slugify_strategy() {
|
||||
let mut config = Config::default_for_test();
|
||||
config.slugify.taxonomies = SlugifyStrategy::Safe;
|
||||
config.languages.get_mut("en").unwrap().taxonomies =
|
||||
vec![TaxonomyConfig { name: "test".to_string(), ..TaxonomyConfig::default() }];
|
||||
config.slugify_taxonomies();
|
||||
let page1 = create_page_w_taxa("a.md", "en", vec![("test", vec!["Ecole"])]);
|
||||
let page2 = create_page_w_taxa("b.md", "en", vec![("test", vec!["École"])]);
|
||||
let page3 = create_page_w_taxa("c.md", "en", vec![("test", vec!["ecole"])]);
|
||||
let page4 = create_page_w_taxa("d.md", "en", vec![("test", vec!["école"])]);
|
||||
let taxonomies = taxonomies!(config, [page1, page2, page3, page4]);
|
||||
assert_eq!(taxonomies.len(), 1);
|
||||
let tax = &taxonomies[0];
|
||||
// under the safe slugify strategy all terms should be distinct
|
||||
assert_eq!(tax.items.len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_fill_backlinks() {
|
||||
let mut page1 = create_page("page1.md", "en", PageSort::None);
|
||||
page1.internal_links.push(("page2.md".to_owned(), None));
|
||||
let mut page2 = create_page("page2.md", "en", PageSort::None);
|
||||
page2.internal_links.push(("_index.md".to_owned(), None));
|
||||
let mut section1 = create_section("_index.md", "en", 10, false, SortBy::None);
|
||||
section1.internal_links.push(("page1.md".to_owned(), None));
|
||||
section1.internal_links.push(("page2.md".to_owned(), None));
|
||||
let mut library = Library::default();
|
||||
library.insert_page(page1);
|
||||
library.insert_page(page2);
|
||||
library.insert_section(section1);
|
||||
library.fill_backlinks();
|
||||
|
||||
assert_eq!(library.backlinks.len(), 3);
|
||||
assert_eq!(library.backlinks["page1.md"], set! {PathBuf::from("_index.md")});
|
||||
assert_eq!(
|
||||
library.backlinks["page2.md"],
|
||||
set! {PathBuf::from("page1.md"), PathBuf::from("_index.md")}
|
||||
);
|
||||
assert_eq!(library.backlinks["_index.md"], set! {PathBuf::from("page2.md")});
|
||||
}
|
||||
}
|
@ -2,35 +2,36 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use slotmap::DefaultKey;
|
||||
use tera::{Context as TeraContext, Tera};
|
||||
use libs::once_cell::sync::Lazy;
|
||||
use libs::regex::Regex;
|
||||
use libs::tera::{Context as TeraContext, Tera};
|
||||
|
||||
use crate::library::Library;
|
||||
use config::Config;
|
||||
use errors::{Error, Result};
|
||||
use front_matter::{split_page_content, InsertAnchor, PageFrontMatter};
|
||||
use rendering::{render_content, Heading, RenderContext};
|
||||
use utils::site::get_reading_analytics;
|
||||
use errors::{Context, Result};
|
||||
use markdown::{render_content, RenderContext};
|
||||
use utils::slugs::slugify_paths;
|
||||
use utils::table_of_contents::Heading;
|
||||
use utils::templates::{render_template, ShortcodeDefinition};
|
||||
use utils::types::InsertAnchor;
|
||||
|
||||
use crate::content::file_info::FileInfo;
|
||||
use crate::content::ser::SerializingPage;
|
||||
use crate::content::{find_related_assets, has_anchor};
|
||||
use crate::file_info::FileInfo;
|
||||
use crate::front_matter::{split_page_content, PageFrontMatter};
|
||||
use crate::library::Library;
|
||||
use crate::ser::SerializingPage;
|
||||
use crate::utils::get_reading_analytics;
|
||||
use crate::utils::{find_related_assets, has_anchor};
|
||||
use utils::anchors::has_anchor_id;
|
||||
use utils::fs::read_file;
|
||||
use utils::links::has_anchor_id;
|
||||
|
||||
lazy_static! {
|
||||
// Based on https://regex101.com/r/H2n38Z/1/tests
|
||||
// A regex parsing RFC3339 date followed by {_,-}, some characters and ended by .md
|
||||
static ref RFC3339_DATE: Regex = Regex::new(
|
||||
// Based on https://regex101.com/r/H2n38Z/1/tests
|
||||
// A regex parsing RFC3339 date followed by {_,-}, some characters and ended by .md
|
||||
static RFC3339_DATE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
r"^(?P<datetime>(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])(T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?(Z|(\+|-)([01][0-9]|2[0-3]):([0-5][0-9])))?)\s?(_|-)(?P<slug>.+$)"
|
||||
).unwrap();
|
||||
).unwrap()
|
||||
});
|
||||
|
||||
static ref FOOTNOTES_RE: Regex = Regex::new(r"<sup\s*.*?>\s*.*?</sup>").unwrap();
|
||||
}
|
||||
static FOOTNOTES_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"<sup\s*.*?>\s*.*?</sup>").unwrap());
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
pub struct Page {
|
||||
@ -38,8 +39,8 @@ pub struct Page {
|
||||
pub file: FileInfo,
|
||||
/// The front matter meta-data
|
||||
pub meta: PageFrontMatter,
|
||||
/// The list of parent sections
|
||||
pub ancestors: Vec<DefaultKey>,
|
||||
/// The list of parent sections relative paths
|
||||
pub ancestors: Vec<String>,
|
||||
/// The actual content of the page, in markdown
|
||||
pub raw_content: String,
|
||||
/// All the non-md files we found next to the .md file
|
||||
@ -61,22 +62,10 @@ pub struct Page {
|
||||
/// When <!-- more --> is found in the text, will take the content up to that part
|
||||
/// as summary
|
||||
pub summary: Option<String>,
|
||||
/// The earlier updated page, for pages sorted by updated date
|
||||
pub earlier_updated: Option<DefaultKey>,
|
||||
/// The later updated page, for pages sorted by updated date
|
||||
pub later_updated: Option<DefaultKey>,
|
||||
/// The earlier page, for pages sorted by date
|
||||
pub earlier: Option<DefaultKey>,
|
||||
/// The later page, for pages sorted by date
|
||||
pub later: Option<DefaultKey>,
|
||||
/// The previous page, for pages sorted by title
|
||||
pub title_prev: Option<DefaultKey>,
|
||||
/// The next page, for pages sorted by title
|
||||
pub title_next: Option<DefaultKey>,
|
||||
/// The lighter page, for pages sorted by weight
|
||||
pub lighter: Option<DefaultKey>,
|
||||
/// The heavier page, for pages sorted by weight
|
||||
pub heavier: Option<DefaultKey>,
|
||||
/// The previous page when sorting: earlier/earlier_updated/lighter/prev
|
||||
pub lower: Option<PathBuf>,
|
||||
/// The next page when sorting: later/later_updated/heavier/next
|
||||
pub higher: Option<PathBuf>,
|
||||
/// Toc made from the headings of the markdown file
|
||||
pub toc: Vec<Heading>,
|
||||
/// How many words in the raw content
|
||||
@ -88,7 +77,7 @@ pub struct Page {
|
||||
/// Corresponds to the lang in the {slug}.{lang}.md file scheme
|
||||
pub lang: String,
|
||||
/// Contains all the translated version of that page
|
||||
pub translations: Vec<DefaultKey>,
|
||||
pub translations: Vec<PathBuf>,
|
||||
/// The list of all internal links (as path to markdown file), with optional anchor fragments.
|
||||
/// We can only check the anchor after all pages have been built and their ToC compiled.
|
||||
/// The page itself should exist otherwise it would have errored before getting there.
|
||||
@ -116,7 +105,8 @@ impl Page {
|
||||
let (meta, content) = split_page_content(file_path, content)?;
|
||||
let mut page = Page::new(file_path, meta, base_path);
|
||||
|
||||
page.lang = page.file.find_language(config)?;
|
||||
page.lang =
|
||||
page.file.find_language(&config.default_language, &config.other_languages_codes())?;
|
||||
|
||||
page.raw_content = content.to_string();
|
||||
let (word_count, reading_time) = get_reading_analytics(&page.raw_content);
|
||||
@ -201,6 +191,8 @@ impl Page {
|
||||
Ok(page)
|
||||
}
|
||||
|
||||
pub fn find_language(&mut self) {}
|
||||
|
||||
/// Read and parse a .md file into a Page struct
|
||||
pub fn from_file<P: AsRef<Path>>(path: P, config: &Config, base_path: &Path) -> Result<Page> {
|
||||
let path = path.as_ref();
|
||||
@ -238,11 +230,10 @@ impl Page {
|
||||
);
|
||||
context.set_shortcode_definitions(shortcode_definitions);
|
||||
context.set_current_page_path(&self.file.relative);
|
||||
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));
|
||||
context.tera_context.insert("page", &SerializingPage::new(self, None, false));
|
||||
|
||||
let res = render_content(&self.raw_content, &context).map_err(|e| {
|
||||
Error::chain(format!("Failed to render content of {}", self.file.path.display()), e)
|
||||
})?;
|
||||
let res = render_content(&self.raw_content, &context)
|
||||
.with_context(|| format!("Failed to render content of {}", self.file.path.display()))?;
|
||||
|
||||
self.summary = res
|
||||
.summary_len
|
||||
@ -267,12 +258,11 @@ impl Page {
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
context.insert("current_url", &self.permalink);
|
||||
context.insert("current_path", &self.path);
|
||||
context.insert("page", &self.to_serialized(library));
|
||||
context.insert("page", &self.serialize(library));
|
||||
context.insert("lang", &self.lang);
|
||||
|
||||
render_template(tpl_name, tera, context, &config.theme).map_err(|e| {
|
||||
Error::chain(format!("Failed to render page '{}'", self.file.path.display()), e)
|
||||
})
|
||||
render_template(tpl_name, tera, context, &config.theme)
|
||||
.with_context(|| format!("Failed to render page '{}'", self.file.path.display()))
|
||||
}
|
||||
|
||||
/// Creates a vectors of asset URLs.
|
||||
@ -305,12 +295,12 @@ impl Page {
|
||||
has_anchor_id(&self.content, id)
|
||||
}
|
||||
|
||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> {
|
||||
SerializingPage::from_page(self, library)
|
||||
pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> {
|
||||
SerializingPage::new(self, Some(library), true)
|
||||
}
|
||||
|
||||
pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> {
|
||||
SerializingPage::from_page_basic(self, Some(library))
|
||||
pub fn serialize_without_siblings<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> {
|
||||
SerializingPage::new(self, Some(library), false)
|
||||
}
|
||||
}
|
||||
|
||||
@ -321,14 +311,14 @@ mod tests {
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use globset::{Glob, GlobSetBuilder};
|
||||
use libs::globset::{Glob, GlobSetBuilder};
|
||||
use libs::tera::Tera;
|
||||
use tempfile::tempdir;
|
||||
use tera::Tera;
|
||||
|
||||
use super::Page;
|
||||
use crate::Page;
|
||||
use config::{Config, LanguageOptions};
|
||||
use front_matter::InsertAnchor;
|
||||
use utils::slugs::SlugifyStrategy;
|
||||
use utils::types::InsertAnchor;
|
||||
|
||||
#[test]
|
||||
fn can_parse_a_valid_page() {
|
||||
@ -573,11 +563,7 @@ And here's another. [^2]
|
||||
File::create(nested_path.join("graph.jpg")).unwrap();
|
||||
File::create(nested_path.join("fail.png")).unwrap();
|
||||
|
||||
let res = Page::from_file(
|
||||
nested_path.join("index.md").as_path(),
|
||||
&Config::default(),
|
||||
&path.to_path_buf(),
|
||||
);
|
||||
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default(), path);
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
||||
@ -601,11 +587,7 @@ And here's another. [^2]
|
||||
File::create(nested_path.join("graph.jpg")).unwrap();
|
||||
File::create(nested_path.join("fail.png")).unwrap();
|
||||
|
||||
let res = Page::from_file(
|
||||
nested_path.join("index.md").as_path(),
|
||||
&Config::default(),
|
||||
&path.to_path_buf(),
|
||||
);
|
||||
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default(), path);
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
||||
@ -629,11 +611,7 @@ And here's another. [^2]
|
||||
File::create(nested_path.join("graph.jpg")).unwrap();
|
||||
File::create(nested_path.join("fail.png")).unwrap();
|
||||
|
||||
let res = Page::from_file(
|
||||
nested_path.join("index.md").as_path(),
|
||||
&Config::default(),
|
||||
&path.to_path_buf(),
|
||||
);
|
||||
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default(), path);
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
||||
@ -659,11 +637,7 @@ And here's another. [^2]
|
||||
File::create(nested_path.join("graph.jpg")).unwrap();
|
||||
File::create(nested_path.join("fail.png")).unwrap();
|
||||
|
||||
let res = Page::from_file(
|
||||
nested_path.join("index.md").as_path(),
|
||||
&Config::default(),
|
||||
&path.to_path_buf(),
|
||||
);
|
||||
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default(), path);
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
||||
@ -692,8 +666,7 @@ And here's another. [^2]
|
||||
let mut config = Config::default();
|
||||
config.ignored_content_globset = Some(gsb.build().unwrap());
|
||||
|
||||
let res =
|
||||
Page::from_file(nested_path.join("index.md").as_path(), &config, &path.to_path_buf());
|
||||
let res = Page::from_file(nested_path.join("index.md").as_path(), &config, path);
|
||||
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
@ -1,23 +1,22 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use serde_derive::Serialize;
|
||||
use slotmap::DefaultKey;
|
||||
use tera::{to_value, Context, Tera, Value};
|
||||
|
||||
use config::Config;
|
||||
use errors::{Error, Result};
|
||||
use serde::Serialize;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use errors::{Context as ErrorContext, Result};
|
||||
use libs::tera::{to_value, Context, Tera, Value};
|
||||
use utils::templates::{check_template_fallbacks, render_template};
|
||||
|
||||
use crate::content::{Section, SerializingPage, SerializingSection};
|
||||
use crate::library::Library;
|
||||
use crate::taxonomies::{Taxonomy, TaxonomyItem};
|
||||
|
||||
use std::borrow::Cow;
|
||||
use crate::ser::{SectionSerMode, SerializingPage, SerializingSection};
|
||||
use crate::taxonomies::{Taxonomy, TaxonomyTerm};
|
||||
use crate::Section;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum PaginationRoot<'a> {
|
||||
Section(&'a Section),
|
||||
Taxonomy(&'a Taxonomy, &'a TaxonomyItem),
|
||||
Taxonomy(&'a Taxonomy, &'a TaxonomyTerm),
|
||||
}
|
||||
|
||||
/// A list of all the pages in the paginator with their index and links
|
||||
@ -26,11 +25,11 @@ pub struct Pager<'a> {
|
||||
/// The page number in the paginator (1-indexed)
|
||||
pub index: usize,
|
||||
/// Permalink to that page
|
||||
permalink: String,
|
||||
pub permalink: String,
|
||||
/// Path to that page
|
||||
path: String,
|
||||
pub path: String,
|
||||
/// All pages for the pager
|
||||
pages: Vec<SerializingPage<'a>>,
|
||||
pub pages: Vec<SerializingPage<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Pager<'a> {
|
||||
@ -47,7 +46,7 @@ impl<'a> Pager<'a> {
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Paginator<'a> {
|
||||
/// All pages in the section/taxonomy
|
||||
all_pages: Cow<'a, [DefaultKey]>,
|
||||
all_pages: Cow<'a, [PathBuf]>,
|
||||
/// Pages split in chunks of `paginate_by`
|
||||
pub pagers: Vec<Pager<'a>>,
|
||||
/// How many content pages on a paginated page at max
|
||||
@ -70,12 +69,11 @@ impl<'a> Paginator<'a> {
|
||||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate
|
||||
pub fn from_section(section: &'a Section, library: &'a Library) -> Paginator<'a> {
|
||||
let paginate_by = section.meta.paginate_by.unwrap();
|
||||
let paginate_reversed = section.meta.paginate_reversed;
|
||||
let mut paginator = Paginator {
|
||||
all_pages: Cow::from(§ion.pages[..]),
|
||||
pagers: Vec::with_capacity(section.pages.len() / paginate_by),
|
||||
paginate_by,
|
||||
paginate_reversed,
|
||||
paginate_reversed: section.meta.paginate_reversed,
|
||||
root: PaginationRoot::Section(section),
|
||||
permalink: section.permalink.clone(),
|
||||
path: section.path.clone(),
|
||||
@ -92,7 +90,7 @@ impl<'a> Paginator<'a> {
|
||||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate
|
||||
pub fn from_taxonomy(
|
||||
taxonomy: &'a Taxonomy,
|
||||
item: &'a TaxonomyItem,
|
||||
item: &'a TaxonomyTerm,
|
||||
library: &'a Library,
|
||||
tera: &Tera,
|
||||
theme: &Option<String>,
|
||||
@ -100,10 +98,8 @@ impl<'a> Paginator<'a> {
|
||||
let paginate_by = taxonomy.kind.paginate_by.unwrap();
|
||||
// Check for taxon-specific template, or use generic as fallback.
|
||||
let specific_template = format!("{}/single.html", taxonomy.kind.name);
|
||||
let template = match check_template_fallbacks(&specific_template, tera, theme) {
|
||||
Some(template) => template,
|
||||
None => "taxonomy_single.html",
|
||||
};
|
||||
let template = check_template_fallbacks(&specific_template, tera, theme)
|
||||
.unwrap_or("taxonomy_single.html");
|
||||
let mut paginator = Paginator {
|
||||
all_pages: Cow::Borrowed(&item.pages),
|
||||
pagers: Vec::with_capacity(item.pages.len() / paginate_by),
|
||||
@ -136,9 +132,9 @@ impl<'a> Paginator<'a> {
|
||||
self.all_pages.to_mut().reverse();
|
||||
}
|
||||
|
||||
for key in self.all_pages.to_mut().iter_mut() {
|
||||
let page = library.get_page_by_key(*key);
|
||||
current_page.push(page.to_serialized_basic(library));
|
||||
for p in &*self.all_pages {
|
||||
let page = &library.pages[p];
|
||||
current_page.push(SerializingPage::new(page, Some(library), false));
|
||||
|
||||
if current_page.len() == self.paginate_by {
|
||||
pages.push(current_page);
|
||||
@ -233,8 +229,10 @@ impl<'a> Paginator<'a> {
|
||||
let mut context = Context::new();
|
||||
match self.root {
|
||||
PaginationRoot::Section(s) => {
|
||||
context
|
||||
.insert("section", &SerializingSection::from_section_basic(s, Some(library)));
|
||||
context.insert(
|
||||
"section",
|
||||
&SerializingSection::new(s, SectionSerMode::MetadataOnly(library)),
|
||||
);
|
||||
context.insert("lang", &s.lang);
|
||||
context.insert("config", &config.serialize(&s.lang));
|
||||
}
|
||||
@ -250,24 +248,18 @@ impl<'a> Paginator<'a> {
|
||||
context.insert("paginator", &self.build_paginator_context(pager));
|
||||
|
||||
render_template(&self.template, tera, context, &config.theme)
|
||||
.map_err(|e| Error::chain(format!("Failed to render pager {}", pager.index), e))
|
||||
.with_context(|| format!("Failed to render pager {}", pager.index))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::PathBuf;
|
||||
use tera::{to_value, Tera};
|
||||
|
||||
use crate::content::{Page, Section};
|
||||
use crate::library::Library;
|
||||
use crate::taxonomies::{Taxonomy, TaxonomyItem};
|
||||
use config::Taxonomy as TaxonomyConfig;
|
||||
|
||||
use super::Paginator;
|
||||
use super::*;
|
||||
use crate::{Page, SectionFrontMatter};
|
||||
use config::TaxonomyConfig;
|
||||
|
||||
fn create_section(is_index: bool, paginate_reversed: bool) -> Section {
|
||||
let f = front_matter::SectionFrontMatter {
|
||||
let f = SectionFrontMatter {
|
||||
paginate_by: Some(2),
|
||||
paginate_path: "page".to_string(),
|
||||
paginate_reversed,
|
||||
@ -278,9 +270,11 @@ mod tests {
|
||||
if !is_index {
|
||||
s.path = "/posts/".to_string();
|
||||
s.permalink = "https://vincent.is/posts/".to_string();
|
||||
s.file.path = PathBuf::from("posts/_index.md");
|
||||
s.file.components = vec!["posts".to_string()];
|
||||
} else {
|
||||
s.path = "/".into();
|
||||
s.file.path = PathBuf::from("_index.md");
|
||||
s.permalink = "https://vincent.is/".to_string();
|
||||
}
|
||||
s
|
||||
@ -291,89 +285,64 @@ mod tests {
|
||||
num_pages: usize,
|
||||
paginate_reversed: bool,
|
||||
) -> (Section, Library) {
|
||||
let mut library = Library::new(num_pages, 0, false);
|
||||
let mut library = Library::default();
|
||||
for i in 1..=num_pages {
|
||||
let mut page = Page::default();
|
||||
page.meta.title = Some(i.to_string());
|
||||
page.file.path = PathBuf::from(&format!("{}.md", i));
|
||||
library.insert_page(page);
|
||||
}
|
||||
|
||||
let mut draft = Page::default();
|
||||
draft.meta.draft = true;
|
||||
library.insert_page(draft);
|
||||
let mut section = create_section(is_index, paginate_reversed);
|
||||
section.pages = library.pages().keys().collect();
|
||||
section.pages = library.pages.keys().cloned().collect();
|
||||
section.pages.sort();
|
||||
library.insert_section(section.clone());
|
||||
|
||||
(section, library)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_can_create_paginator() {
|
||||
fn test_can_create_section_paginator() {
|
||||
let (section, library) = create_library(false, 3, false);
|
||||
let paginator = Paginator::from_section(§ion, &library);
|
||||
assert_eq!(paginator.pagers.len(), 2);
|
||||
|
||||
assert_eq!(paginator.pagers[0].index, 1);
|
||||
assert_eq!(paginator.pagers[0].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[0].pages[0].title.clone().unwrap(), "1");
|
||||
assert_eq!(paginator.pagers[0].pages[1].title.clone().unwrap(), "2");
|
||||
assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/posts/");
|
||||
assert_eq!(paginator.pagers[0].path, "/posts/");
|
||||
|
||||
assert_eq!(paginator.pagers[1].index, 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 1);
|
||||
assert_eq!(paginator.pagers[1].pages[0].title.clone().unwrap(), "3");
|
||||
assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/page/2/");
|
||||
assert_eq!(paginator.pagers[1].path, "/posts/page/2/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_can_create_reversed_paginator() {
|
||||
// 6 pages, 5 normal and 1 draft
|
||||
let (section, library) = create_library(false, 5, true);
|
||||
fn test_can_create_reversed_section_paginator() {
|
||||
let (section, library) = create_library(false, 3, true);
|
||||
let paginator = Paginator::from_section(§ion, &library);
|
||||
assert_eq!(paginator.pagers.len(), 3);
|
||||
assert_eq!(paginator.pagers.len(), 2);
|
||||
|
||||
assert_eq!(paginator.pagers[0].index, 1);
|
||||
assert_eq!(paginator.pagers[0].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[0].pages[0].title.clone().unwrap(), "3");
|
||||
assert_eq!(paginator.pagers[0].pages[1].title.clone().unwrap(), "2");
|
||||
assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/posts/");
|
||||
assert_eq!(paginator.pagers[0].path, "/posts/");
|
||||
assert_eq!(
|
||||
vec!["".to_string(), "5".to_string()],
|
||||
paginator.pagers[0]
|
||||
.pages
|
||||
.iter()
|
||||
.map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string())
|
||||
.collect::<Vec<String>>()
|
||||
);
|
||||
|
||||
assert_eq!(paginator.pagers[1].index, 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 1);
|
||||
assert_eq!(paginator.pagers[1].pages[0].title.clone().unwrap(), "1");
|
||||
assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/page/2/");
|
||||
assert_eq!(paginator.pagers[1].path, "/posts/page/2/");
|
||||
assert_eq!(
|
||||
vec!["4".to_string(), "3".to_string()],
|
||||
paginator.pagers[1]
|
||||
.pages
|
||||
.iter()
|
||||
.map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string())
|
||||
.collect::<Vec<String>>()
|
||||
);
|
||||
|
||||
assert_eq!(paginator.pagers[2].index, 3);
|
||||
assert_eq!(paginator.pagers[2].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[2].permalink, "https://vincent.is/posts/page/3/");
|
||||
assert_eq!(paginator.pagers[2].path, "/posts/page/3/");
|
||||
assert_eq!(
|
||||
vec!["2".to_string(), "1".to_string()],
|
||||
paginator.pagers[2]
|
||||
.pages
|
||||
.iter()
|
||||
.map(|p| p.get_title().as_ref().unwrap_or(&"".to_string()).to_string())
|
||||
.collect::<Vec<String>>()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_can_create_paginator_for_index() {
|
||||
fn can_create_paginator_for_index() {
|
||||
let (section, library) = create_library(true, 3, false);
|
||||
let paginator = Paginator::from_section(§ion, &library);
|
||||
assert_eq!(paginator.pagers.len(), 2);
|
||||
@ -384,7 +353,7 @@ mod tests {
|
||||
assert_eq!(paginator.pagers[0].path, "/");
|
||||
|
||||
assert_eq!(paginator.pagers[1].index, 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 1);
|
||||
assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/page/2/");
|
||||
assert_eq!(paginator.pagers[1].path, "/page/2/");
|
||||
}
|
||||
@ -402,6 +371,7 @@ mod tests {
|
||||
assert_eq!(context["previous"], to_value::<Option<()>>(None).unwrap());
|
||||
assert_eq!(context["next"], to_value("https://vincent.is/posts/page/2/").unwrap());
|
||||
assert_eq!(context["current_index"], to_value(1).unwrap());
|
||||
assert_eq!(context["pages"].as_array().unwrap().len(), 2);
|
||||
|
||||
let context = paginator.build_paginator_context(&paginator.pagers[1]);
|
||||
assert_eq!(context["paginate_by"], to_value(2).unwrap());
|
||||
@ -410,48 +380,12 @@ mod tests {
|
||||
assert_eq!(context["next"], to_value::<Option<()>>(None).unwrap());
|
||||
assert_eq!(context["previous"], to_value("https://vincent.is/posts/").unwrap());
|
||||
assert_eq!(context["current_index"], to_value(2).unwrap());
|
||||
assert_eq!(context["total_pages"], to_value(4).unwrap());
|
||||
assert_eq!(context["total_pages"], to_value(3).unwrap());
|
||||
assert_eq!(context["pages"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_can_create_paginator_for_taxonomy() {
|
||||
let (_, library) = create_library(false, 3, false);
|
||||
let tera = Tera::default();
|
||||
let taxonomy_def = TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
paginate_by: Some(2),
|
||||
..TaxonomyConfig::default()
|
||||
};
|
||||
let taxonomy_item = TaxonomyItem {
|
||||
name: "Something".to_string(),
|
||||
slug: "something".to_string(),
|
||||
path: "/tags/something".to_string(),
|
||||
permalink: "https://vincent.is/tags/something/".to_string(),
|
||||
pages: library.pages().keys().collect(),
|
||||
};
|
||||
let taxonomy = Taxonomy {
|
||||
kind: taxonomy_def,
|
||||
lang: "en".to_owned(),
|
||||
slug: "tags".to_string(),
|
||||
permalink: "/tags/".to_string(),
|
||||
items: vec![taxonomy_item.clone()],
|
||||
};
|
||||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library, &tera, &None);
|
||||
assert_eq!(paginator.pagers.len(), 2);
|
||||
|
||||
assert_eq!(paginator.pagers[0].index, 1);
|
||||
assert_eq!(paginator.pagers[0].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[0].permalink, "https://vincent.is/tags/something/");
|
||||
assert_eq!(paginator.pagers[0].path, "/tags/something/");
|
||||
|
||||
assert_eq!(paginator.pagers[1].index, 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/tags/something/page/2/");
|
||||
assert_eq!(paginator.pagers[1].path, "/tags/something/page/2/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_can_create_paginator_for_slugified_taxonomy() {
|
||||
let (_, library) = create_library(false, 3, false);
|
||||
let tera = Tera::default();
|
||||
let taxonomy_def = TaxonomyConfig {
|
||||
@ -459,18 +393,19 @@ mod tests {
|
||||
paginate_by: Some(2),
|
||||
..TaxonomyConfig::default()
|
||||
};
|
||||
let taxonomy_item = TaxonomyItem {
|
||||
let taxonomy_item = TaxonomyTerm {
|
||||
name: "Something".to_string(),
|
||||
slug: "something".to_string(),
|
||||
path: "/some-tags/something/".to_string(),
|
||||
permalink: "https://vincent.is/some-tags/something/".to_string(),
|
||||
pages: library.pages().keys().collect(),
|
||||
pages: library.pages.keys().cloned().collect(),
|
||||
};
|
||||
let taxonomy = Taxonomy {
|
||||
kind: taxonomy_def,
|
||||
lang: "en".to_owned(),
|
||||
slug: "some-tags".to_string(),
|
||||
permalink: "/some-tags/".to_string(),
|
||||
path: "/some-tags/".to_string(),
|
||||
permalink: "https://vincent.is/some-tags/".to_string(),
|
||||
items: vec![taxonomy_item.clone()],
|
||||
};
|
||||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library, &tera, &None);
|
||||
@ -482,7 +417,7 @@ mod tests {
|
||||
assert_eq!(paginator.pagers[0].path, "/some-tags/something/");
|
||||
|
||||
assert_eq!(paginator.pagers[1].index, 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 1);
|
||||
assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/some-tags/something/page/2/");
|
||||
assert_eq!(paginator.pagers[1].path, "/some-tags/something/page/2/");
|
||||
}
|
||||
@ -501,7 +436,7 @@ mod tests {
|
||||
assert_eq!(paginator.pagers[0].path, "/posts/");
|
||||
|
||||
assert_eq!(paginator.pagers[1].index, 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 2);
|
||||
assert_eq!(paginator.pagers[1].pages.len(), 1);
|
||||
assert_eq!(paginator.pagers[1].permalink, "https://vincent.is/posts/2/");
|
||||
assert_eq!(paginator.pagers[1].path, "/posts/2/");
|
||||
|
@ -1,21 +1,20 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use slotmap::DefaultKey;
|
||||
use tera::{Context as TeraContext, Tera};
|
||||
use libs::tera::{Context as TeraContext, Tera};
|
||||
|
||||
use config::Config;
|
||||
use errors::{Error, Result};
|
||||
use front_matter::{split_section_content, SectionFrontMatter};
|
||||
use rendering::{render_content, Heading, RenderContext};
|
||||
use errors::{Context, Result};
|
||||
use markdown::{render_content, RenderContext};
|
||||
use utils::fs::read_file;
|
||||
use utils::site::get_reading_analytics;
|
||||
use utils::table_of_contents::Heading;
|
||||
use utils::templates::{render_template, ShortcodeDefinition};
|
||||
|
||||
use crate::content::file_info::FileInfo;
|
||||
use crate::content::ser::SerializingSection;
|
||||
use crate::content::{find_related_assets, has_anchor};
|
||||
use crate::file_info::FileInfo;
|
||||
use crate::front_matter::{split_section_content, SectionFrontMatter};
|
||||
use crate::library::Library;
|
||||
use crate::ser::{SectionSerMode, SerializingSection};
|
||||
use crate::utils::{find_related_assets, get_reading_analytics, has_anchor};
|
||||
|
||||
// Default is used to create a default index section if there is no _index.md in the root content directory
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
@ -39,13 +38,13 @@ pub struct Section {
|
||||
/// All the non-md files we found next to the .md file as string
|
||||
pub serialized_assets: Vec<String>,
|
||||
/// All direct pages of that section
|
||||
pub pages: Vec<DefaultKey>,
|
||||
pub pages: Vec<PathBuf>,
|
||||
/// All pages that cannot be sorted in this section
|
||||
pub ignored_pages: Vec<DefaultKey>,
|
||||
/// The list of parent sections
|
||||
pub ancestors: Vec<DefaultKey>,
|
||||
pub ignored_pages: Vec<PathBuf>,
|
||||
/// The list of parent sections relative paths
|
||||
pub ancestors: Vec<String>,
|
||||
/// All direct subsections
|
||||
pub subsections: Vec<DefaultKey>,
|
||||
pub subsections: Vec<PathBuf>,
|
||||
/// Toc made from the headings of the markdown file
|
||||
pub toc: Vec<Heading>,
|
||||
/// How many words in the raw content
|
||||
@ -83,7 +82,9 @@ impl Section {
|
||||
) -> Result<Section> {
|
||||
let (meta, content) = split_section_content(file_path, content)?;
|
||||
let mut section = Section::new(file_path, meta, base_path);
|
||||
section.lang = section.file.find_language(config)?;
|
||||
section.lang = section
|
||||
.file
|
||||
.find_language(&config.default_language, &config.other_languages_codes())?;
|
||||
section.raw_content = content.to_string();
|
||||
let (word_count, reading_time) = get_reading_analytics(§ion.raw_content);
|
||||
section.word_count = Some(word_count);
|
||||
@ -159,11 +160,12 @@ impl Section {
|
||||
);
|
||||
context.set_shortcode_definitions(shortcode_definitions);
|
||||
context.set_current_page_path(&self.file.relative);
|
||||
context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None));
|
||||
context
|
||||
.tera_context
|
||||
.insert("section", &SerializingSection::new(self, SectionSerMode::ForMarkdown));
|
||||
|
||||
let res = render_content(&self.raw_content, &context).map_err(|e| {
|
||||
Error::chain(format!("Failed to render content of {}", self.file.path.display()), e)
|
||||
})?;
|
||||
let res = render_content(&self.raw_content, &context)
|
||||
.with_context(|| format!("Failed to render content of {}", self.file.path.display()))?;
|
||||
self.content = res.body;
|
||||
self.toc = res.toc;
|
||||
self.external_links = res.external_links;
|
||||
@ -180,12 +182,11 @@ impl Section {
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
context.insert("current_url", &self.permalink);
|
||||
context.insert("current_path", &self.path);
|
||||
context.insert("section", &self.to_serialized(library));
|
||||
context.insert("section", &SerializingSection::new(self, SectionSerMode::Full(library)));
|
||||
context.insert("lang", &self.lang);
|
||||
|
||||
render_template(tpl_name, tera, context, &config.theme).map_err(|e| {
|
||||
Error::chain(format!("Failed to render section '{}'", self.file.path.display()), e)
|
||||
})
|
||||
render_template(tpl_name, tera, context, &config.theme)
|
||||
.with_context(|| format!("Failed to render section '{}'", self.file.path.display()))
|
||||
}
|
||||
|
||||
/// Is this the index section?
|
||||
@ -207,14 +208,6 @@ impl Section {
|
||||
has_anchor(&self.toc, anchor)
|
||||
}
|
||||
|
||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> {
|
||||
SerializingSection::from_section(self, library)
|
||||
}
|
||||
|
||||
pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> {
|
||||
SerializingSection::from_section_basic(self, Some(library))
|
||||
}
|
||||
|
||||
pub fn paginate_by(&self) -> Option<usize> {
|
||||
match self.meta.paginate_by {
|
||||
None => None,
|
||||
@ -224,15 +217,23 @@ impl Section {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> {
|
||||
SerializingSection::new(self, SectionSerMode::Full(library))
|
||||
}
|
||||
|
||||
pub fn serialize_basic<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> {
|
||||
SerializingSection::new(self, SectionSerMode::MetadataOnly(library))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::fs::{create_dir, File};
|
||||
use std::fs::{create_dir, create_dir_all, File};
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use globset::{Glob, GlobSetBuilder};
|
||||
use libs::globset::{Glob, GlobSetBuilder};
|
||||
use tempfile::tempdir;
|
||||
|
||||
use super::Section;
|
||||
@ -268,23 +269,27 @@ mod tests {
|
||||
fn section_with_ignored_assets_filters_out_correct_files() {
|
||||
let tmp_dir = tempdir().expect("create temp dir");
|
||||
let path = tmp_dir.path();
|
||||
create_dir(&path.join("content")).expect("create content temp dir");
|
||||
create_dir(&path.join("content").join("posts")).expect("create posts temp dir");
|
||||
let nested_path = path.join("content").join("posts").join("with-assets");
|
||||
create_dir(&nested_path).expect("create nested temp dir");
|
||||
let mut f = File::create(nested_path.join("_index.md")).unwrap();
|
||||
let article_path = path.join("content/posts/with-assets");
|
||||
create_dir_all(path.join(&article_path).join("foo/bar/baz/quux"))
|
||||
.expect("create nested temp dir");
|
||||
create_dir_all(path.join(&article_path).join("foo/baz/quux"))
|
||||
.expect("create nested temp dir");
|
||||
let mut f = File::create(article_path.join("_index.md")).unwrap();
|
||||
f.write_all(b"+++\nslug=\"hey\"\n+++\n").unwrap();
|
||||
File::create(nested_path.join("example.js")).unwrap();
|
||||
File::create(nested_path.join("graph.jpg")).unwrap();
|
||||
File::create(nested_path.join("fail.png")).unwrap();
|
||||
File::create(article_path.join("example.js")).unwrap();
|
||||
File::create(article_path.join("graph.jpg")).unwrap();
|
||||
File::create(article_path.join("fail.png")).unwrap();
|
||||
File::create(article_path.join("foo/bar/baz/quux/quo.xlsx")).unwrap();
|
||||
File::create(article_path.join("foo/bar/baz/quux/quo.docx")).unwrap();
|
||||
|
||||
let mut gsb = GlobSetBuilder::new();
|
||||
gsb.add(Glob::new("*.{js,png}").unwrap());
|
||||
gsb.add(Glob::new("foo/**/baz").unwrap());
|
||||
let mut config = Config::default();
|
||||
config.ignored_content_globset = Some(gsb.build().unwrap());
|
||||
|
||||
let res =
|
||||
Section::from_file(nested_path.join("_index.md").as_path(), &config, &PathBuf::new());
|
||||
Section::from_file(article_path.join("_index.md").as_path(), &config, &PathBuf::new());
|
||||
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
221
components/content/src/ser.rs
Normal file
@ -0,0 +1,221 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::library::Library;
|
||||
use crate::{Page, Section};
|
||||
use libs::tera::{Map, Value};
|
||||
use utils::table_of_contents::Heading;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct BackLink<'a> {
|
||||
pub permalink: &'a str,
|
||||
pub title: &'a Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct TranslatedContent<'a> {
|
||||
pub lang: &'a str,
|
||||
pub permalink: &'a str,
|
||||
pub title: &'a Option<String>,
|
||||
/// The path to the markdown file
|
||||
pub path: &'a Path,
|
||||
}
|
||||
|
||||
fn find_backlinks<'a>(relative_path: &str, library: &'a Library) -> Vec<BackLink<'a>> {
|
||||
let mut backlinks = Vec::new();
|
||||
if let Some(b) = library.backlinks.get(relative_path) {
|
||||
for backlink in b {
|
||||
if let Some(p) = library.pages.get(backlink) {
|
||||
backlinks.push(BackLink { permalink: &p.permalink, title: &p.meta.title });
|
||||
}
|
||||
if let Some(s) = library.sections.get(backlink) {
|
||||
backlinks.push(BackLink { permalink: &s.permalink, title: &s.meta.title });
|
||||
}
|
||||
}
|
||||
backlinks.sort_by_key(|b| b.permalink);
|
||||
}
|
||||
backlinks
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingPage<'a> {
|
||||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
slug: &'a str,
|
||||
ancestors: &'a [String],
|
||||
pub(crate) title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
updated: &'a Option<String>,
|
||||
date: &'a Option<String>,
|
||||
year: Option<i32>,
|
||||
month: Option<u8>,
|
||||
day: Option<u8>,
|
||||
taxonomies: &'a HashMap<String, Vec<String>>,
|
||||
extra: &'a Map<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
summary: &'a Option<String>,
|
||||
toc: &'a [Heading],
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
assets: &'a [String],
|
||||
draft: bool,
|
||||
lang: &'a str,
|
||||
lower: Option<Box<SerializingPage<'a>>>,
|
||||
higher: Option<Box<SerializingPage<'a>>>,
|
||||
translations: Vec<TranslatedContent<'a>>,
|
||||
backlinks: Vec<BackLink<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingPage<'a> {
|
||||
pub fn new(page: &'a Page, library: Option<&'a Library>, include_siblings: bool) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let mut lower = None;
|
||||
let mut higher = None;
|
||||
let mut translations = vec![];
|
||||
let mut backlinks = vec![];
|
||||
|
||||
if let Some(lib) = library {
|
||||
translations = lib.find_translations(&page.file.canonical);
|
||||
|
||||
if include_siblings {
|
||||
lower = page
|
||||
.lower
|
||||
.as_ref()
|
||||
.map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false)));
|
||||
higher = page
|
||||
.higher
|
||||
.as_ref()
|
||||
.map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false)));
|
||||
}
|
||||
|
||||
backlinks = find_backlinks(&page.file.relative, lib);
|
||||
}
|
||||
|
||||
Self {
|
||||
relative_path: &page.file.relative,
|
||||
ancestors: &page.ancestors,
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
updated: &page.meta.updated,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
toc: &page.toc,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
assets: &page.serialized_assets,
|
||||
draft: page.meta.draft,
|
||||
lang: &page.lang,
|
||||
lower,
|
||||
higher,
|
||||
translations,
|
||||
backlinks,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingSection<'a> {
|
||||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
draft: bool,
|
||||
ancestors: &'a [String],
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
extra: &'a Map<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
toc: &'a [Heading],
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
lang: &'a str,
|
||||
assets: &'a [String],
|
||||
pages: Vec<SerializingPage<'a>>,
|
||||
subsections: Vec<&'a str>,
|
||||
translations: Vec<TranslatedContent<'a>>,
|
||||
backlinks: Vec<BackLink<'a>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SectionSerMode<'a> {
|
||||
/// Just itself, no pages or subsections
|
||||
/// TODO: I believe we can get rid of it?
|
||||
ForMarkdown,
|
||||
/// Fetches subsections/ancestors/translations but not the pages
|
||||
MetadataOnly(&'a Library),
|
||||
/// Fetches everything
|
||||
Full(&'a Library),
|
||||
}
|
||||
|
||||
impl<'a> SerializingSection<'a> {
|
||||
pub fn new(section: &'a Section, mode: SectionSerMode<'a>) -> Self {
|
||||
let mut pages = Vec::with_capacity(section.pages.len());
|
||||
let mut subsections = Vec::with_capacity(section.subsections.len());
|
||||
let mut translations = Vec::new();
|
||||
let mut backlinks = Vec::new();
|
||||
|
||||
match mode {
|
||||
SectionSerMode::ForMarkdown => {}
|
||||
SectionSerMode::MetadataOnly(lib) | SectionSerMode::Full(lib) => {
|
||||
translations = lib.find_translations(§ion.file.canonical);
|
||||
subsections = section
|
||||
.subsections
|
||||
.iter()
|
||||
.map(|p| lib.sections[p].file.relative.as_str())
|
||||
.collect();
|
||||
|
||||
// Fetching pages on top
|
||||
if let SectionSerMode::Full(_) = mode {
|
||||
for p in §ion.pages {
|
||||
pages.push(SerializingPage::new(&lib.pages[p], Some(lib), true));
|
||||
}
|
||||
}
|
||||
|
||||
backlinks = find_backlinks(§ion.file.relative, lib);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors: §ion.ancestors,
|
||||
draft: section.meta.draft,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
toc: §ion.toc,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
assets: §ion.serialized_assets,
|
||||
lang: §ion.lang,
|
||||
pages,
|
||||
subsections,
|
||||
translations,
|
||||
backlinks,
|
||||
}
|
||||
}
|
||||
}
|
197
components/content/src/sorting.rs
Normal file
@ -0,0 +1,197 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::{Page, SortBy};
|
||||
use libs::lexical_sort::natural_lexical_cmp;
|
||||
use libs::rayon::prelude::*;
|
||||
|
||||
/// Sort by the field picked by the function.
|
||||
/// The pages permalinks are used to break the ties
|
||||
pub fn sort_pages(pages: &[&Page], sort_by: SortBy) -> (Vec<PathBuf>, Vec<PathBuf>) {
|
||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<&Page>, Vec<_>) =
|
||||
pages.par_iter().partition(|page| match sort_by {
|
||||
SortBy::Date => page.meta.datetime.is_some(),
|
||||
SortBy::UpdateDate => {
|
||||
page.meta.datetime.is_some() || page.meta.updated_datetime.is_some()
|
||||
}
|
||||
SortBy::Title | SortBy::TitleBytes => page.meta.title.is_some(),
|
||||
SortBy::Weight => page.meta.weight.is_some(),
|
||||
SortBy::None => unreachable!(),
|
||||
});
|
||||
|
||||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = match sort_by {
|
||||
SortBy::Date => b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()),
|
||||
SortBy::UpdateDate => std::cmp::max(b.meta.datetime, b.meta.updated_datetime)
|
||||
.unwrap()
|
||||
.cmp(&std::cmp::max(a.meta.datetime, a.meta.updated_datetime).unwrap()),
|
||||
SortBy::Title => {
|
||||
natural_lexical_cmp(a.meta.title.as_ref().unwrap(), b.meta.title.as_ref().unwrap())
|
||||
}
|
||||
SortBy::TitleBytes => {
|
||||
a.meta.title.as_ref().unwrap().cmp(b.meta.title.as_ref().unwrap())
|
||||
}
|
||||
SortBy::Weight => a.meta.weight.unwrap().cmp(&b.meta.weight.unwrap()),
|
||||
SortBy::None => unreachable!(),
|
||||
};
|
||||
|
||||
if ord == Ordering::Equal {
|
||||
a.permalink.cmp(&b.permalink)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
});
|
||||
|
||||
(
|
||||
can_be_sorted.iter().map(|p| p.file.path.clone()).collect(),
|
||||
cannot_be_sorted.iter().map(|p: &&Page| p.file.path.clone()).collect(),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::PageFrontMatter;
|
||||
|
||||
fn create_page_with_date(date: &str, updated_date: Option<&str>) -> Page {
|
||||
let mut front_matter = PageFrontMatter {
|
||||
date: Some(date.to_string()),
|
||||
updated: updated_date.map(|c| c.to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
front_matter.date_to_datetime();
|
||||
Page::new(format!("content/hello-{}.md", date), front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
fn create_page_with_title(title: &str) -> Page {
|
||||
let front_matter = PageFrontMatter { title: Some(title.to_string()), ..Default::default() };
|
||||
Page::new(format!("content/hello-{}.md", title), front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
fn create_page_with_weight(weight: usize) -> Page {
|
||||
let front_matter = PageFrontMatter { weight: Some(weight), ..Default::default() };
|
||||
Page::new(format!("content/hello-{}.md", weight), front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_dates() {
|
||||
let page1 = create_page_with_date("2018-01-01", None);
|
||||
let page2 = create_page_with_date("2017-01-01", None);
|
||||
let page3 = create_page_with_date("2019-01-01", None);
|
||||
let (pages, ignored_pages) = sort_pages(&[&page1, &page2, &page3], SortBy::Date);
|
||||
assert_eq!(pages[0], page3.file.path);
|
||||
assert_eq!(pages[1], page1.file.path);
|
||||
assert_eq!(pages[2], page2.file.path);
|
||||
assert_eq!(ignored_pages.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_updated_dates() {
|
||||
let page1 = create_page_with_date("2018-01-01", None);
|
||||
let page2 = create_page_with_date("2017-01-01", Some("2022-02-01"));
|
||||
let page3 = create_page_with_date("2019-01-01", None);
|
||||
let (pages, ignored_pages) = sort_pages(&[&page1, &page2, &page3], SortBy::UpdateDate);
|
||||
assert_eq!(pages[0], page2.file.path);
|
||||
assert_eq!(pages[1], page3.file.path);
|
||||
assert_eq!(pages[2], page1.file.path);
|
||||
assert_eq!(ignored_pages.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_weight() {
|
||||
let page1 = create_page_with_weight(2);
|
||||
let page2 = create_page_with_weight(3);
|
||||
let page3 = create_page_with_weight(1);
|
||||
let (pages, ignored_pages) = sort_pages(&[&page1, &page2, &page3], SortBy::Weight);
|
||||
// Should be sorted by weight
|
||||
assert_eq!(pages[0], page3.file.path);
|
||||
assert_eq!(pages[1], page1.file.path);
|
||||
assert_eq!(pages[2], page2.file.path);
|
||||
assert_eq!(ignored_pages.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_title() {
|
||||
let titles = vec![
|
||||
"åland",
|
||||
"bagel",
|
||||
"track_3",
|
||||
"microkernel",
|
||||
"Österrike",
|
||||
"métro",
|
||||
"BART",
|
||||
"Underground",
|
||||
"track_13",
|
||||
"μ-kernel",
|
||||
"meter",
|
||||
"track_1",
|
||||
];
|
||||
let pages: Vec<Page> = titles.iter().map(|title| create_page_with_title(title)).collect();
|
||||
let (sorted_pages, ignored_pages) =
|
||||
sort_pages(&pages.iter().collect::<Vec<_>>(), SortBy::Title);
|
||||
// Should be sorted by title in lexical order
|
||||
let sorted_titles: Vec<_> = sorted_pages
|
||||
.iter()
|
||||
.map(|key| {
|
||||
pages.iter().find(|p| &p.file.path == key).unwrap().meta.title.as_ref().unwrap()
|
||||
})
|
||||
.collect();
|
||||
assert_eq!(ignored_pages.len(), 0);
|
||||
assert_eq!(
|
||||
sorted_titles,
|
||||
vec![
|
||||
"åland",
|
||||
"bagel",
|
||||
"BART",
|
||||
"μ-kernel",
|
||||
"meter",
|
||||
"métro",
|
||||
"microkernel",
|
||||
"Österrike",
|
||||
"track_1",
|
||||
"track_3",
|
||||
"track_13",
|
||||
"Underground"
|
||||
]
|
||||
);
|
||||
|
||||
let (sorted_pages, ignored_pages) =
|
||||
sort_pages(&pages.iter().collect::<Vec<_>>(), SortBy::TitleBytes);
|
||||
// Should be sorted by title in bytes order
|
||||
let sorted_titles: Vec<_> = sorted_pages
|
||||
.iter()
|
||||
.map(|key| {
|
||||
pages.iter().find(|p| &p.file.path == key).unwrap().meta.title.as_ref().unwrap()
|
||||
})
|
||||
.collect();
|
||||
assert_eq!(ignored_pages.len(), 0);
|
||||
assert_eq!(
|
||||
sorted_titles,
|
||||
vec![
|
||||
"BART",
|
||||
"Underground",
|
||||
"bagel",
|
||||
"meter",
|
||||
"microkernel",
|
||||
"métro",
|
||||
"track_1",
|
||||
"track_13",
|
||||
"track_3",
|
||||
// Non ASCII letters are not merged with the ASCII equivalent (o/a/m here)
|
||||
"Österrike",
|
||||
"åland",
|
||||
"μ-kernel"
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_ignored_pages() {
|
||||
let page1 = create_page_with_date("2018-01-01", None);
|
||||
let page2 = create_page_with_weight(1);
|
||||
let (pages, ignored_pages) = sort_pages(&[&page1, &page2], SortBy::Date);
|
||||
assert_eq!(pages[0], page1.file.path);
|
||||
assert_eq!(ignored_pages.len(), 1);
|
||||
assert_eq!(ignored_pages[0], page2.file.path);
|
||||
}
|
||||
}
|
244
components/content/src/taxonomies.rs
Normal file
@ -0,0 +1,244 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
use config::{Config, TaxonomyConfig};
|
||||
use errors::{Context as ErrorContext, Result};
|
||||
use libs::ahash::AHashMap;
|
||||
use libs::tera::{Context, Tera};
|
||||
use utils::slugs::slugify_paths;
|
||||
use utils::templates::{check_template_fallbacks, render_template};
|
||||
|
||||
use crate::library::Library;
|
||||
use crate::ser::SerializingPage;
|
||||
use crate::{Page, SortBy};
|
||||
|
||||
use crate::sorting::sort_pages;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub struct SerializedTaxonomyTerm<'a> {
|
||||
name: &'a str,
|
||||
slug: &'a str,
|
||||
path: &'a str,
|
||||
permalink: &'a str,
|
||||
pages: Vec<SerializingPage<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializedTaxonomyTerm<'a> {
|
||||
pub fn from_item(item: &'a TaxonomyTerm, library: &'a Library) -> Self {
|
||||
let mut pages = vec![];
|
||||
|
||||
for p in &item.pages {
|
||||
pages.push(SerializingPage::new(&library.pages[p], Some(library), false));
|
||||
}
|
||||
|
||||
SerializedTaxonomyTerm {
|
||||
name: &item.name,
|
||||
slug: &item.slug,
|
||||
path: &item.path,
|
||||
permalink: &item.permalink,
|
||||
pages,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A taxonomy with all its pages
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TaxonomyTerm {
|
||||
pub name: String,
|
||||
pub slug: String,
|
||||
pub path: String,
|
||||
pub permalink: String,
|
||||
pub pages: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
impl TaxonomyTerm {
|
||||
pub fn new(
|
||||
name: &str,
|
||||
lang: &str,
|
||||
taxo_slug: &str,
|
||||
taxo_pages: &[&Page],
|
||||
config: &Config,
|
||||
) -> Self {
|
||||
let item_slug = slugify_paths(name, config.slugify.taxonomies);
|
||||
let path = if lang != config.default_language {
|
||||
format!("/{}/{}/{}/", lang, taxo_slug, item_slug)
|
||||
} else {
|
||||
format!("/{}/{}/", taxo_slug, item_slug)
|
||||
};
|
||||
let permalink = config.make_permalink(&path);
|
||||
|
||||
// Taxonomy are almost always used for blogs so we filter by dates
|
||||
// and it's not like we can sort things across sections by anything other
|
||||
// than dates
|
||||
let (mut pages, ignored_pages) = sort_pages(taxo_pages, SortBy::Date);
|
||||
// We still append pages without dates at the end
|
||||
pages.extend(ignored_pages);
|
||||
TaxonomyTerm { name: name.to_string(), permalink, path, slug: item_slug, pages }
|
||||
}
|
||||
|
||||
pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializedTaxonomyTerm<'a> {
|
||||
SerializedTaxonomyTerm::from_item(self, library)
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: Self) {
|
||||
self.pages.extend(other.pages);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for TaxonomyTerm {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.permalink == other.permalink
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub struct SerializedTaxonomy<'a> {
|
||||
kind: &'a TaxonomyConfig,
|
||||
lang: &'a str,
|
||||
permalink: &'a str,
|
||||
items: Vec<SerializedTaxonomyTerm<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializedTaxonomy<'a> {
|
||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self {
|
||||
let items: Vec<SerializedTaxonomyTerm> =
|
||||
taxonomy.items.iter().map(|i| SerializedTaxonomyTerm::from_item(i, library)).collect();
|
||||
SerializedTaxonomy {
|
||||
kind: &taxonomy.kind,
|
||||
lang: &taxonomy.lang,
|
||||
permalink: &taxonomy.permalink,
|
||||
items,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// All different taxonomies we have and their content
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Taxonomy {
|
||||
pub kind: TaxonomyConfig,
|
||||
pub lang: String,
|
||||
pub slug: String,
|
||||
pub path: String,
|
||||
pub permalink: String,
|
||||
// this vec is sorted by the count of item
|
||||
pub items: Vec<TaxonomyTerm>,
|
||||
}
|
||||
|
||||
impl Taxonomy {
|
||||
pub(crate) fn new(tax_found: TaxonomyFound, config: &Config) -> Self {
|
||||
let mut sorted_items = vec![];
|
||||
let slug = tax_found.slug;
|
||||
for (name, pages) in tax_found.terms {
|
||||
sorted_items.push(TaxonomyTerm::new(name, tax_found.lang, &slug, &pages, config));
|
||||
}
|
||||
|
||||
sorted_items.sort_by(|a, b| match a.slug.cmp(&b.slug) {
|
||||
Ordering::Less => Ordering::Less,
|
||||
Ordering::Greater => Ordering::Greater,
|
||||
Ordering::Equal => a.name.cmp(&b.name),
|
||||
});
|
||||
sorted_items.dedup_by(|a, b| {
|
||||
// custom Eq impl checks for equal permalinks
|
||||
// here we make sure all pages from a get copied to b
|
||||
// before dedup gets rid of it
|
||||
if a == b {
|
||||
b.merge(a.to_owned());
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
let path = if tax_found.lang != config.default_language {
|
||||
format!("/{}/{}/", tax_found.lang, slug)
|
||||
} else {
|
||||
format!("/{}/", slug)
|
||||
};
|
||||
let permalink = config.make_permalink(&path);
|
||||
|
||||
Taxonomy {
|
||||
slug,
|
||||
lang: tax_found.lang.to_owned(),
|
||||
kind: tax_found.config.clone(),
|
||||
path,
|
||||
permalink,
|
||||
items: sorted_items,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render_term(
|
||||
&self,
|
||||
item: &TaxonomyTerm,
|
||||
tera: &Tera,
|
||||
config: &Config,
|
||||
library: &Library,
|
||||
) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
context.insert("lang", &self.lang);
|
||||
context.insert("term", &SerializedTaxonomyTerm::from_item(item, library));
|
||||
context.insert("taxonomy", &self.kind);
|
||||
context.insert("current_url", &self.permalink);
|
||||
context.insert("current_path", &self.path);
|
||||
|
||||
// Check for taxon-specific template, or use generic as fallback.
|
||||
let specific_template = format!("{}/single.html", self.kind.name);
|
||||
let template = check_template_fallbacks(&specific_template, tera, &config.theme)
|
||||
.unwrap_or("taxonomy_single.html");
|
||||
|
||||
render_template(template, tera, context, &config.theme)
|
||||
.with_context(|| format!("Failed to render single term {} page.", self.kind.name))
|
||||
}
|
||||
|
||||
pub fn render_all_terms(
|
||||
&self,
|
||||
tera: &Tera,
|
||||
config: &Config,
|
||||
library: &Library,
|
||||
) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
let terms: Vec<SerializedTaxonomyTerm> =
|
||||
self.items.iter().map(|i| SerializedTaxonomyTerm::from_item(i, library)).collect();
|
||||
context.insert("terms", &terms);
|
||||
context.insert("lang", &self.lang);
|
||||
context.insert("taxonomy", &self.kind);
|
||||
context.insert("current_url", &self.permalink);
|
||||
context.insert("current_path", &self.path);
|
||||
|
||||
// Check for taxon-specific template, or use generic as fallback.
|
||||
let specific_template = format!("{}/list.html", self.kind.name);
|
||||
let template = check_template_fallbacks(&specific_template, tera, &config.theme)
|
||||
.unwrap_or("taxonomy_list.html");
|
||||
|
||||
render_template(template, tera, context, &config.theme)
|
||||
.with_context(|| format!("Failed to render a list of {} page.", self.kind.name))
|
||||
}
|
||||
|
||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> {
|
||||
SerializedTaxonomy::from_taxonomy(self, library)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.items.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Only used while building the taxonomies
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub(crate) struct TaxonomyFound<'a> {
|
||||
pub lang: &'a str,
|
||||
pub slug: String,
|
||||
pub config: &'a TaxonomyConfig,
|
||||
pub terms: AHashMap<&'a str, Vec<&'a Page>>,
|
||||
}
|
||||
|
||||
impl<'a> TaxonomyFound<'a> {
|
||||
pub fn new(slug: String, lang: &'a str, config: &'a TaxonomyConfig) -> Self {
|
||||
Self { slug, lang, config, terms: AHashMap::new() }
|
||||
}
|
||||
}
|
20
components/content/src/types.rs
Normal file
@ -0,0 +1,20 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum SortBy {
|
||||
/// Most recent to oldest
|
||||
Date,
|
||||
/// Most recent to oldest
|
||||
#[serde(rename = "update_date")]
|
||||
UpdateDate,
|
||||
/// Sort by title lexicographically
|
||||
Title,
|
||||
/// Sort by titles using the bytes directly
|
||||
#[serde(rename = "title_bytes")]
|
||||
TitleBytes,
|
||||
/// Lower weight comes first
|
||||
Weight,
|
||||
/// No sorting
|
||||
None,
|
||||
}
|
@ -1,19 +1,10 @@
|
||||
mod file_info;
|
||||
mod page;
|
||||
mod section;
|
||||
mod ser;
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub use self::file_info::FileInfo;
|
||||
pub use self::page::Page;
|
||||
pub use self::section::Section;
|
||||
pub use self::ser::{SerializingPage, SerializingSection};
|
||||
use libs::unicode_segmentation::UnicodeSegmentation;
|
||||
use libs::walkdir::WalkDir;
|
||||
|
||||
use config::Config;
|
||||
use rendering::Heading;
|
||||
use utils::table_of_contents::Heading;
|
||||
|
||||
pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool {
|
||||
for heading in headings {
|
||||
@ -36,7 +27,7 @@ pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool {
|
||||
pub fn find_related_assets(path: &Path, config: &Config, recursive: bool) -> Vec<PathBuf> {
|
||||
let mut assets = vec![];
|
||||
|
||||
let mut builder = WalkDir::new(path);
|
||||
let mut builder = WalkDir::new(path).follow_links(true);
|
||||
if !recursive {
|
||||
builder = builder.max_depth(1);
|
||||
}
|
||||
@ -54,18 +45,21 @@ pub fn find_related_assets(path: &Path, config: &Config, recursive: bool) -> Vec
|
||||
}
|
||||
|
||||
if let Some(ref globset) = config.ignored_content_globset {
|
||||
assets = assets
|
||||
.into_iter()
|
||||
.filter(|p| match p.strip_prefix(path) {
|
||||
Err(_) => false,
|
||||
Ok(file) => !globset.is_match(file),
|
||||
})
|
||||
.collect();
|
||||
assets = assets.into_iter().filter(|p| !globset.is_match(p)).collect();
|
||||
}
|
||||
|
||||
assets
|
||||
}
|
||||
|
||||
/// Get word count and estimated reading time
|
||||
pub fn get_reading_analytics(content: &str) -> (usize, usize) {
|
||||
let word_count: usize = content.unicode_words().count();
|
||||
|
||||
// https://help.medium.com/hc/en-us/articles/214991667-Read-time
|
||||
// 275 seems a bit too high though
|
||||
(word_count, ((word_count + 199) / 200))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -89,13 +83,10 @@ mod tests {
|
||||
|
||||
let assets = find_related_assets(path, &Config::default(), true);
|
||||
assert_eq!(assets.len(), 5);
|
||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap_or("".as_ref()) != "md").count(), 5);
|
||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap_or_default() != "md").count(), 5);
|
||||
|
||||
for asset in vec!["example.js", "graph.jpg", "fail.png", "subdir/example.js", "extensionless"] {
|
||||
assert!(assets
|
||||
.iter()
|
||||
.find(|p| p.strip_prefix(path).unwrap() == Path::new(asset))
|
||||
.is_some())
|
||||
for asset in ["example.js", "graph.jpg", "fail.png", "subdir/example.js", "extensionless"] {
|
||||
assert!(assets.iter().any(|p| p.strip_prefix(path).unwrap() == Path::new(asset)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -113,13 +104,10 @@ mod tests {
|
||||
File::create(path.join("subdir").join("example.js")).unwrap();
|
||||
let assets = find_related_assets(path, &Config::default(), false);
|
||||
assert_eq!(assets.len(), 4);
|
||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap_or("".as_ref()) != "md").count(), 4);
|
||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap_or_default() != "md").count(), 4);
|
||||
|
||||
for asset in vec!["example.js", "graph.jpg", "fail.png", "extensionless"] {
|
||||
assert!(assets
|
||||
.iter()
|
||||
.find(|p| p.strip_prefix(path).unwrap() == Path::new(asset))
|
||||
.is_some())
|
||||
for asset in ["example.js", "graph.jpg", "fail.png", "extensionless"] {
|
||||
assert!(assets.iter().any(|p| p.strip_prefix(path).unwrap() == Path::new(asset)))
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
@ -192,4 +180,29 @@ mod tests {
|
||||
|
||||
assert!(has_anchor(&input, "1-2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reading_analytics_empty_text() {
|
||||
let (word_count, reading_time) = get_reading_analytics(" ");
|
||||
assert_eq!(word_count, 0);
|
||||
assert_eq!(reading_time, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reading_analytics_short_text() {
|
||||
let (word_count, reading_time) = get_reading_analytics("Hello World");
|
||||
assert_eq!(word_count, 2);
|
||||
assert_eq!(reading_time, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reading_analytics_long_text() {
|
||||
let mut content = String::new();
|
||||
for _ in 0..1000 {
|
||||
content.push_str(" Hello world");
|
||||
}
|
||||
let (word_count, reading_time) = get_reading_analytics(&content);
|
||||
assert_eq!(word_count, 2000);
|
||||
assert_eq!(reading_time, 10);
|
||||
}
|
||||
}
|
@ -1,11 +1,7 @@
|
||||
[package]
|
||||
name = "errors"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tera = "1"
|
||||
toml = "0.5"
|
||||
image = "0.23"
|
||||
syntect = "4"
|
||||
anyhow = "1.0.56"
|
||||
|
@ -1,119 +1 @@
|
||||
use std::convert::Into;
|
||||
use std::error::Error as StdError;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ErrorKind {
|
||||
Msg(String),
|
||||
Tera(tera::Error),
|
||||
Io(::std::io::Error),
|
||||
Toml(toml::de::Error),
|
||||
Image(image::ImageError),
|
||||
Syntect(syntect::LoadingError),
|
||||
}
|
||||
|
||||
/// The Error type
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
/// Kind of error
|
||||
pub kind: ErrorKind,
|
||||
pub source: Option<Box<dyn StdError + Send + Sync>>,
|
||||
}
|
||||
|
||||
impl StdError for Error {
|
||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||
match self.source {
|
||||
Some(ref err) => Some(&**err),
|
||||
None => match self.kind {
|
||||
ErrorKind::Tera(ref err) => err.source(),
|
||||
_ => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.kind {
|
||||
ErrorKind::Msg(ref message) => write!(f, "{}", message),
|
||||
ErrorKind::Tera(ref e) => write!(f, "{}", e),
|
||||
ErrorKind::Io(ref e) => write!(f, "{}", e),
|
||||
ErrorKind::Toml(ref e) => write!(f, "{}", e),
|
||||
ErrorKind::Image(ref e) => write!(f, "{}", e),
|
||||
ErrorKind::Syntect(ref e) => write!(f, "{}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// Creates generic error
|
||||
pub fn msg(value: impl ToString) -> Self {
|
||||
Self { kind: ErrorKind::Msg(value.to_string()), source: None }
|
||||
}
|
||||
|
||||
/// Creates generic error with a cause
|
||||
pub fn chain(value: impl ToString, source: impl Into<Box<dyn StdError + Send + Sync>>) -> Self {
|
||||
Self { kind: ErrorKind::Msg(value.to_string()), source: Some(source.into()) }
|
||||
}
|
||||
|
||||
/// Create an error from a list of path collisions, formatting the output
|
||||
pub fn from_collisions(collisions: Vec<(String, Vec<String>)>) -> Self {
|
||||
let mut msg = String::from("Found path collisions:\n");
|
||||
|
||||
for (path, filepaths) in collisions {
|
||||
let row = format!("- `{}` from files {:?}\n", path, filepaths);
|
||||
msg.push_str(&row);
|
||||
}
|
||||
|
||||
Self { kind: ErrorKind::Msg(msg), source: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Error {
|
||||
fn from(e: &str) -> Self {
|
||||
Self::msg(e)
|
||||
}
|
||||
}
|
||||
impl From<String> for Error {
|
||||
fn from(e: String) -> Self {
|
||||
Self::msg(e)
|
||||
}
|
||||
}
|
||||
impl From<toml::de::Error> for Error {
|
||||
fn from(e: toml::de::Error) -> Self {
|
||||
Self { kind: ErrorKind::Toml(e), source: None }
|
||||
}
|
||||
}
|
||||
impl From<syntect::LoadingError> for Error {
|
||||
fn from(e: syntect::LoadingError) -> Self {
|
||||
Self { kind: ErrorKind::Syntect(e), source: None }
|
||||
}
|
||||
}
|
||||
impl From<tera::Error> for Error {
|
||||
fn from(e: tera::Error) -> Self {
|
||||
Self { kind: ErrorKind::Tera(e), source: None }
|
||||
}
|
||||
}
|
||||
impl From<::std::io::Error> for Error {
|
||||
fn from(e: ::std::io::Error) -> Self {
|
||||
Self { kind: ErrorKind::Io(e), source: None }
|
||||
}
|
||||
}
|
||||
impl From<image::ImageError> for Error {
|
||||
fn from(e: image::ImageError) -> Self {
|
||||
Self { kind: ErrorKind::Image(e), source: None }
|
||||
}
|
||||
}
|
||||
/// Convenient wrapper around std::Result.
|
||||
pub type Result<T> = ::std::result::Result<T, Error>;
|
||||
|
||||
// So we can use bail! in all other crates
|
||||
#[macro_export]
|
||||
macro_rules! bail {
|
||||
($e:expr) => {
|
||||
return Err($e.into());
|
||||
};
|
||||
($fmt:expr, $($arg:tt)+) => {
|
||||
return Err(format!($fmt, $($arg)+).into());
|
||||
};
|
||||
}
|
||||
pub use anyhow::*;
|
||||
|
@ -1,21 +0,0 @@
|
||||
[package]
|
||||
name = "front_matter"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
tera = "1"
|
||||
chrono = "0.4"
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
serde_yaml = "0.8"
|
||||
toml = "0.5"
|
||||
regex = "1"
|
||||
lazy_static = "1"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
|
||||
[dev-dependencies]
|
||||
test-case = "1"
|
@ -1,24 +1,16 @@
|
||||
[package]
|
||||
name = "imageproc"
|
||||
version = "0.1.0"
|
||||
authors = ["Vojtěch Král <vojtech@kral.hk>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1"
|
||||
regex = "1.0"
|
||||
tera = "1"
|
||||
image = "0.23"
|
||||
rayon = "1"
|
||||
webp = "0.1.1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
svg_metadata = "0.4.1"
|
||||
kamadak-exif = "0.5.4"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
config = { path = "../config" }
|
||||
libs = { path = "../libs" }
|
||||
|
||||
[dev-dependencies]
|
||||
# TODO: prune
|
||||
serde_json = "1"
|
||||
site = { path = "../site" }
|
||||
tempfile = "3"
|
||||
|
@ -1,6 +1,5 @@
|
||||
use std::collections::hash_map::Entry as HEntry;
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error as StdError;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::{self, File};
|
||||
use std::hash::{Hash, Hasher};
|
||||
@ -11,23 +10,23 @@ use image::error::ImageResult;
|
||||
use image::io::Reader as ImgReader;
|
||||
use image::{imageops::FilterType, EncodableLayout};
|
||||
use image::{ImageFormat, ImageOutputFormat};
|
||||
use lazy_static::lazy_static;
|
||||
use libs::image::DynamicImage;
|
||||
use libs::{image, once_cell, rayon, regex, svg_metadata, webp};
|
||||
use once_cell::sync::Lazy;
|
||||
use rayon::prelude::*;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use svg_metadata::Metadata as SvgMetadata;
|
||||
|
||||
use config::Config;
|
||||
use errors::{Error, Result};
|
||||
use errors::{anyhow, Context, Error, Result};
|
||||
use utils::fs as ufs;
|
||||
|
||||
static RESIZED_SUBDIR: &str = "processed_images";
|
||||
const DEFAULT_Q_JPG: u8 = 75;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref RESIZED_FILENAME: Regex =
|
||||
Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.](jpg|png|webp)"#).unwrap();
|
||||
}
|
||||
static RESIZED_FILENAME: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.](jpg|png|webp)"#).unwrap());
|
||||
|
||||
/// Size and format read cheaply with `image`'s `Reader`.
|
||||
#[derive(Debug)]
|
||||
@ -84,22 +83,20 @@ impl ResizeArgs {
|
||||
match op {
|
||||
"fit_width" => {
|
||||
if width.is_none() {
|
||||
return Err("op=\"fit_width\" requires a `width` argument".into());
|
||||
return Err(anyhow!("op=\"fit_width\" requires a `width` argument"));
|
||||
}
|
||||
}
|
||||
"fit_height" => {
|
||||
if height.is_none() {
|
||||
return Err("op=\"fit_height\" requires a `height` argument".into());
|
||||
return Err(anyhow!("op=\"fit_height\" requires a `height` argument"));
|
||||
}
|
||||
}
|
||||
"scale" | "fit" | "fill" => {
|
||||
if width.is_none() || height.is_none() {
|
||||
return Err(
|
||||
format!("op={} requires a `width` and `height` argument", op).into()
|
||||
);
|
||||
return Err(anyhow!("op={} requires a `width` and `height` argument", op));
|
||||
}
|
||||
}
|
||||
_ => return Err(format!("Invalid image resize operation: {}", op).into()),
|
||||
_ => return Err(anyhow!("Invalid image resize operation: {}", op)),
|
||||
};
|
||||
|
||||
Ok(match op {
|
||||
@ -225,7 +222,7 @@ impl Format {
|
||||
"jpeg" | "jpg" => Ok(Jpeg(jpg_quality)),
|
||||
"png" => Ok(Png),
|
||||
"webp" => Ok(WebP(quality)),
|
||||
_ => Err(format!("Invalid image format: {}", format).into()),
|
||||
_ => Err(anyhow!("Invalid image format: {}", format)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -323,6 +320,8 @@ impl ImageOp {
|
||||
None => img,
|
||||
};
|
||||
|
||||
let img = fix_orientation(&img, &self.input_path).unwrap_or(img);
|
||||
|
||||
let mut f = File::create(target_path)?;
|
||||
|
||||
match self.format {
|
||||
@ -333,7 +332,8 @@ impl ImageOp {
|
||||
img.write_to(&mut f, ImageOutputFormat::Jpeg(q))?;
|
||||
}
|
||||
Format::WebP(q) => {
|
||||
let encoder = webp::Encoder::from_image(&img);
|
||||
let encoder = webp::Encoder::from_image(&img)
|
||||
.map_err(|_| anyhow!("Unable to load this kind of image with webp"))?;
|
||||
let memory = match q {
|
||||
Some(q) => encoder.encode(q as f32),
|
||||
None => encoder.encode_lossless(),
|
||||
@ -346,6 +346,31 @@ impl ImageOp {
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply image rotation based on EXIF data
|
||||
/// Returns `None` if no transformation is needed
|
||||
pub fn fix_orientation(img: &DynamicImage, path: &Path) -> Option<DynamicImage> {
|
||||
let file = std::fs::File::open(path).ok()?;
|
||||
let mut buf_reader = std::io::BufReader::new(&file);
|
||||
let exif_reader = exif::Reader::new();
|
||||
let exif = exif_reader.read_from_container(&mut buf_reader).ok()?;
|
||||
let orientation =
|
||||
exif.get_field(exif::Tag::Orientation, exif::In::PRIMARY)?.value.get_uint(0)?;
|
||||
match orientation {
|
||||
// Values are taken from the page 30 of
|
||||
// https://www.cipa.jp/std/documents/e/DC-008-2012_E.pdf
|
||||
// For more details check http://sylvana.net/jpegcrop/exif_orientation.html
|
||||
1 => None,
|
||||
2 => Some(img.fliph()),
|
||||
3 => Some(img.rotate180()),
|
||||
4 => Some(img.flipv()),
|
||||
5 => Some(img.fliph().rotate270()),
|
||||
6 => Some(img.rotate90()),
|
||||
7 => Some(img.fliph().rotate90()),
|
||||
8 => Some(img.rotate270()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct EnqueueResponse {
|
||||
/// The final URL for that asset
|
||||
@ -416,9 +441,8 @@ impl Processor {
|
||||
format: &str,
|
||||
quality: Option<u8>,
|
||||
) -> Result<EnqueueResponse> {
|
||||
let meta = ImageMeta::read(&input_path).map_err(|e| {
|
||||
Error::chain(format!("Failed to read image: {}", input_path.display()), e)
|
||||
})?;
|
||||
let meta = ImageMeta::read(&input_path)
|
||||
.with_context(|| format!("Failed to read image: {}", input_path.display()))?;
|
||||
|
||||
let args = ResizeArgs::from_args(op, width, height)?;
|
||||
let op = ResizeOp::new(args, meta.size);
|
||||
@ -530,8 +554,9 @@ impl Processor {
|
||||
.map(|(hash, op)| {
|
||||
let target =
|
||||
self.output_dir.join(Self::op_filename(*hash, op.collision_id, op.format));
|
||||
op.perform(&target).map_err(|e| {
|
||||
Error::chain(format!("Failed to process image: {}", op.input_path.display()), e)
|
||||
|
||||
op.perform(&target).with_context(|| {
|
||||
format!("Failed to process image: {}", op.input_path.display())
|
||||
})
|
||||
})
|
||||
.collect::<Result<()>>()
|
||||
@ -572,29 +597,28 @@ pub fn read_image_metadata<P: AsRef<Path>>(path: P) -> Result<ImageMetaResponse>
|
||||
let path = path.as_ref();
|
||||
let ext = path.extension().and_then(OsStr::to_str).unwrap_or("").to_lowercase();
|
||||
|
||||
let error = |e: Box<dyn StdError + Send + Sync>| {
|
||||
Error::chain(format!("Failed to read image: {}", path.display()), e)
|
||||
};
|
||||
let err_context = || format!("Failed to read image: {}", path.display());
|
||||
|
||||
match ext.as_str() {
|
||||
"svg" => {
|
||||
let img = SvgMetadata::parse_file(&path).map_err(|e| error(e.into()))?;
|
||||
let img = SvgMetadata::parse_file(&path).with_context(err_context)?;
|
||||
match (img.height(), img.width(), img.view_box()) {
|
||||
(Some(h), Some(w), _) => Ok((h, w)),
|
||||
(_, _, Some(view_box)) => Ok((view_box.height, view_box.width)),
|
||||
_ => Err("Invalid dimensions: SVG width/height and viewbox not set.".into()),
|
||||
_ => Err(anyhow!("Invalid dimensions: SVG width/height and viewbox not set.")),
|
||||
}
|
||||
.map(|(h, w)| ImageMetaResponse::new_svg(h as u32, w as u32))
|
||||
//this is not a typo, this returns the correct values for width and height.
|
||||
.map(|(h, w)| ImageMetaResponse::new_svg(w as u32, h as u32))
|
||||
}
|
||||
"webp" => {
|
||||
// Unfortunatelly we have to load the entire image here, unlike with the others :|
|
||||
let data = fs::read(path).map_err(|e| error(e.into()))?;
|
||||
// Unfortunately we have to load the entire image here, unlike with the others :|
|
||||
let data = fs::read(path).with_context(err_context)?;
|
||||
let decoder = webp::Decoder::new(&data[..]);
|
||||
decoder.decode().map(ImageMetaResponse::from).ok_or_else(|| {
|
||||
Error::msg(format!("Failed to decode WebP image: {}", path.display()))
|
||||
})
|
||||
}
|
||||
_ => ImageMeta::read(path).map(ImageMetaResponse::from).map_err(|e| error(e.into())),
|
||||
_ => ImageMeta::read(path).map(ImageMetaResponse::from).with_context(err_context),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,11 +1,10 @@
|
||||
use std::env;
|
||||
use std::path::{PathBuf, MAIN_SEPARATOR as SLASH};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use config::Config;
|
||||
use imageproc::{assert_processed_path_matches, ImageMetaResponse, Processor};
|
||||
use utils::fs as ufs;
|
||||
use imageproc::{assert_processed_path_matches, fix_orientation, ImageMetaResponse, Processor};
|
||||
use libs::image::{self, DynamicImage, GenericImageView, Pixel};
|
||||
use libs::once_cell::sync::Lazy;
|
||||
|
||||
static CONFIG: &str = r#"
|
||||
title = "imageproc integration tests"
|
||||
@ -17,18 +16,10 @@ build_search_index = false
|
||||
highlight_code = false
|
||||
"#;
|
||||
|
||||
lazy_static! {
|
||||
static ref TEST_IMGS: PathBuf =
|
||||
[env!("CARGO_MANIFEST_DIR"), "tests", "test_imgs"].iter().collect();
|
||||
static ref TMPDIR: PathBuf = {
|
||||
let tmpdir = option_env!("CARGO_TARGET_TMPDIR").map(PathBuf::from).unwrap_or_else(|| {
|
||||
env::current_exe().unwrap().parent().unwrap().parent().unwrap().join("tmpdir")
|
||||
});
|
||||
ufs::ensure_directory_exists(&tmpdir).unwrap();
|
||||
tmpdir
|
||||
};
|
||||
static ref PROCESSED_PREFIX: String = format!("static{0}processed_images{0}", SLASH);
|
||||
}
|
||||
static TEST_IMGS: Lazy<PathBuf> =
|
||||
Lazy::new(|| [env!("CARGO_MANIFEST_DIR"), "tests", "test_imgs"].iter().collect());
|
||||
static PROCESSED_PREFIX: Lazy<String> =
|
||||
Lazy::new(|| format!("static{0}processed_images{0}", SLASH));
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn image_op_test(
|
||||
@ -44,9 +35,9 @@ fn image_op_test(
|
||||
orig_height: u32,
|
||||
) {
|
||||
let source_path = TEST_IMGS.join(source_img);
|
||||
|
||||
let tmpdir = tempfile::tempdir().unwrap().into_path();
|
||||
let config = Config::parse(CONFIG).unwrap();
|
||||
let mut proc = Processor::new(TMPDIR.clone(), &config);
|
||||
let mut proc = Processor::new(tmpdir.clone(), &config);
|
||||
|
||||
let resp =
|
||||
proc.enqueue(source_img.into(), source_path, op, width, height, format, None).unwrap();
|
||||
@ -60,7 +51,7 @@ fn image_op_test(
|
||||
proc.do_process().unwrap();
|
||||
|
||||
let processed_path = PathBuf::from(&resp.static_path);
|
||||
let processed_size = imageproc::read_image_metadata(&TMPDIR.join(processed_path))
|
||||
let processed_size = imageproc::read_image_metadata(&tmpdir.join(processed_path))
|
||||
.map(|meta| (meta.width, meta.height))
|
||||
.unwrap();
|
||||
assert_eq!(processed_size, (expect_width, expect_height));
|
||||
@ -163,4 +154,75 @@ fn read_image_metadata_webp() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fix_orientation_test() {
|
||||
fn load_img_and_fix_orientation(img_name: &str) -> DynamicImage {
|
||||
let path = TEST_IMGS.join(img_name);
|
||||
let img = image::open(&path).unwrap();
|
||||
fix_orientation(&img, &path).unwrap_or(img)
|
||||
}
|
||||
|
||||
let img = image::open(TEST_IMGS.join("exif_1.jpg")).unwrap();
|
||||
assert!(check_img(img));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_0.jpg")));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_1.jpg")));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_2.jpg")));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_3.jpg")));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_4.jpg")));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_5.jpg")));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_6.jpg")));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_7.jpg")));
|
||||
assert!(check_img(load_img_and_fix_orientation("exif_8.jpg")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_applies_exif_rotation() {
|
||||
// No exif metadata
|
||||
assert!(resize_and_check("exif_0.jpg"));
|
||||
// 1: Horizontal (normal)
|
||||
assert!(resize_and_check("exif_1.jpg"));
|
||||
// 2: Mirror horizontal
|
||||
assert!(resize_and_check("exif_2.jpg"));
|
||||
// 3: Rotate 180
|
||||
assert!(resize_and_check("exif_3.jpg"));
|
||||
// 4: Mirror vertical
|
||||
assert!(resize_and_check("exif_4.jpg"));
|
||||
// 5: Mirror horizontal and rotate 270 CW
|
||||
assert!(resize_and_check("exif_5.jpg"));
|
||||
// 6: Rotate 90 CW
|
||||
assert!(resize_and_check("exif_6.jpg"));
|
||||
// 7: Mirror horizontal and rotate 90 CW
|
||||
assert!(resize_and_check("exif_7.jpg"));
|
||||
// 8: Rotate 270 CW
|
||||
assert!(resize_and_check("exif_8.jpg"));
|
||||
}
|
||||
|
||||
fn resize_and_check(source_img: &str) -> bool {
|
||||
let source_path = TEST_IMGS.join(source_img);
|
||||
let tmpdir = tempfile::tempdir().unwrap().into_path();
|
||||
let config = Config::parse(CONFIG).unwrap();
|
||||
let mut proc = Processor::new(tmpdir.clone(), &config);
|
||||
|
||||
let resp = proc
|
||||
.enqueue(source_img.into(), source_path, "scale", Some(16), Some(16), "jpg", None)
|
||||
.unwrap();
|
||||
|
||||
proc.do_process().unwrap();
|
||||
let processed_path = PathBuf::from(&resp.static_path);
|
||||
let img = image::open(&tmpdir.join(processed_path)).unwrap();
|
||||
check_img(img)
|
||||
}
|
||||
|
||||
// Checks that an image has the correct orientation
|
||||
fn check_img(img: DynamicImage) -> bool {
|
||||
// top left is red
|
||||
img.get_pixel(0, 0)[0] > 250 // because of the jpeg compression some colors are a bit less than 255
|
||||
// top right is green
|
||||
&& img.get_pixel(15, 0)[1] > 250
|
||||
// bottom left is blue
|
||||
&& img.get_pixel(0, 15)[2] > 250
|
||||
// bottom right is white
|
||||
&& img.get_pixel(15, 15).channels() == [255, 255, 255, 255]
|
||||
}
|
||||
|
||||
// TODO: Test that hash remains the same if physical path is changed
|
||||
|
BIN
components/imageproc/tests/test_imgs/exif_0.jpg
Normal file
After Width: | Height: | Size: 661 B |
BIN
components/imageproc/tests/test_imgs/exif_1.jpg
Normal file
After Width: | Height: | Size: 761 B |
BIN
components/imageproc/tests/test_imgs/exif_2.jpg
Normal file
After Width: | Height: | Size: 762 B |
BIN
components/imageproc/tests/test_imgs/exif_3.jpg
Normal file
After Width: | Height: | Size: 755 B |
BIN
components/imageproc/tests/test_imgs/exif_4.jpg
Normal file
After Width: | Height: | Size: 758 B |
BIN
components/imageproc/tests/test_imgs/exif_5.jpg
Normal file
After Width: | Height: | Size: 761 B |
BIN
components/imageproc/tests/test_imgs/exif_6.jpg
Normal file
After Width: | Height: | Size: 763 B |
BIN
components/imageproc/tests/test_imgs/exif_7.jpg
Normal file
After Width: | Height: | Size: 757 B |
BIN
components/imageproc/tests/test_imgs/exif_8.jpg
Normal file
After Width: | Height: | Size: 759 B |
@ -1,28 +0,0 @@
|
||||
[package]
|
||||
name = "library"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
slotmap = "1"
|
||||
rayon = "1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
tera = "1"
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
regex = "1"
|
||||
lazy_static = "1"
|
||||
lexical-sort = "0.3"
|
||||
walkdir = "2"
|
||||
|
||||
front_matter = { path = "../front_matter" }
|
||||
config = { path = "../config" }
|
||||
utils = { path = "../utils" }
|
||||
rendering = { path = "../rendering" }
|
||||
errors = { path = "../errors" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
toml = "0.5"
|
||||
globset = "0.4"
|
@ -1,352 +0,0 @@
|
||||
//! What we are sending to the templates when rendering them
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
|
||||
use serde_derive::Serialize;
|
||||
use tera::{Map, Value};
|
||||
|
||||
use crate::content::{Page, Section};
|
||||
use crate::library::Library;
|
||||
use rendering::Heading;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct TranslatedContent<'a> {
|
||||
lang: &'a str,
|
||||
permalink: &'a str,
|
||||
title: &'a Option<String>,
|
||||
/// The path to the markdown file; useful for retrieving the full page through
|
||||
/// the `get_page` function.
|
||||
path: &'a Path,
|
||||
}
|
||||
|
||||
impl<'a> TranslatedContent<'a> {
|
||||
// copypaste eh, not worth creating an enum imo
|
||||
pub fn find_all_sections(section: &'a Section, library: &'a Library) -> Vec<Self> {
|
||||
let mut translations = vec![];
|
||||
|
||||
#[allow(clippy::or_fun_call)]
|
||||
for key in library
|
||||
.translations
|
||||
.get(§ion.file.canonical)
|
||||
.or(Some(&HashSet::new()))
|
||||
.unwrap()
|
||||
.iter()
|
||||
{
|
||||
let other = library.get_section_by_key(*key);
|
||||
translations.push(TranslatedContent {
|
||||
lang: &other.lang,
|
||||
permalink: &other.permalink,
|
||||
title: &other.meta.title,
|
||||
path: &other.file.path,
|
||||
});
|
||||
}
|
||||
|
||||
translations
|
||||
}
|
||||
|
||||
pub fn find_all_pages(page: &'a Page, library: &'a Library) -> Vec<Self> {
|
||||
let mut translations = vec![];
|
||||
|
||||
#[allow(clippy::or_fun_call)]
|
||||
for key in
|
||||
library.translations.get(&page.file.canonical).or(Some(&HashSet::new())).unwrap().iter()
|
||||
{
|
||||
let other = library.get_page_by_key(*key);
|
||||
translations.push(TranslatedContent {
|
||||
lang: &other.lang,
|
||||
permalink: &other.permalink,
|
||||
title: &other.meta.title,
|
||||
path: &other.file.path,
|
||||
});
|
||||
}
|
||||
|
||||
translations
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingPage<'a> {
|
||||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
slug: &'a str,
|
||||
ancestors: Vec<&'a str>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
updated: &'a Option<String>,
|
||||
date: &'a Option<String>,
|
||||
year: Option<i32>,
|
||||
month: Option<u32>,
|
||||
day: Option<u32>,
|
||||
taxonomies: &'a HashMap<String, Vec<String>>,
|
||||
extra: &'a Map<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
summary: &'a Option<String>,
|
||||
toc: &'a [Heading],
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
assets: &'a [String],
|
||||
draft: bool,
|
||||
lang: &'a str,
|
||||
lighter: Option<Box<SerializingPage<'a>>>,
|
||||
heavier: Option<Box<SerializingPage<'a>>>,
|
||||
earlier_updated: Option<Box<SerializingPage<'a>>>,
|
||||
later_updated: Option<Box<SerializingPage<'a>>>,
|
||||
earlier: Option<Box<SerializingPage<'a>>>,
|
||||
later: Option<Box<SerializingPage<'a>>>,
|
||||
title_prev: Option<Box<SerializingPage<'a>>>,
|
||||
title_next: Option<Box<SerializingPage<'a>>>,
|
||||
translations: Vec<TranslatedContent<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingPage<'a> {
|
||||
/// Grabs all the data from a page, including sibling pages
|
||||
pub fn from_page(page: &'a Page, library: &'a Library) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let pages = library.pages();
|
||||
let lighter = page
|
||||
.lighter
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let heavier = page
|
||||
.heavier
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let earlier_updated = page
|
||||
.earlier_updated
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let later_updated = page
|
||||
.later_updated
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let earlier = page
|
||||
.earlier
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let later = page
|
||||
.later
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let title_prev = page
|
||||
.title_prev
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let title_next = page
|
||||
.title_next
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let ancestors = page
|
||||
.ancestors
|
||||
.iter()
|
||||
.map(|k| library.get_section_by_key(*k).file.relative.as_str())
|
||||
.collect();
|
||||
|
||||
let translations = TranslatedContent::find_all_pages(page, library);
|
||||
|
||||
SerializingPage {
|
||||
relative_path: &page.file.relative,
|
||||
ancestors,
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
updated: &page.meta.updated,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
toc: &page.toc,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
assets: &page.serialized_assets,
|
||||
draft: page.meta.draft,
|
||||
lang: &page.lang,
|
||||
lighter,
|
||||
heavier,
|
||||
earlier_updated,
|
||||
later_updated,
|
||||
earlier,
|
||||
later,
|
||||
title_prev,
|
||||
title_next,
|
||||
translations,
|
||||
}
|
||||
}
|
||||
|
||||
/// currently only used in testing
|
||||
pub fn get_title(&'a self) -> &'a Option<String> {
|
||||
self.title
|
||||
}
|
||||
|
||||
/// Same as from_page but does not fill sibling pages
|
||||
pub fn from_page_basic(page: &'a Page, library: Option<&'a Library>) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let ancestors = if let Some(lib) = library {
|
||||
page.ancestors
|
||||
.iter()
|
||||
.map(|k| lib.get_section_by_key(*k).file.relative.as_str())
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let translations = if let Some(lib) = library {
|
||||
TranslatedContent::find_all_pages(page, lib)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
SerializingPage {
|
||||
relative_path: &page.file.relative,
|
||||
ancestors,
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
updated: &page.meta.updated,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
toc: &page.toc,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
assets: &page.serialized_assets,
|
||||
draft: page.meta.draft,
|
||||
lang: &page.lang,
|
||||
lighter: None,
|
||||
heavier: None,
|
||||
earlier_updated: None,
|
||||
later_updated: None,
|
||||
earlier: None,
|
||||
later: None,
|
||||
title_prev: None,
|
||||
title_next: None,
|
||||
translations,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingSection<'a> {
|
||||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
draft: bool,
|
||||
ancestors: Vec<&'a str>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
extra: &'a Map<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
toc: &'a [Heading],
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
lang: &'a str,
|
||||
assets: &'a [String],
|
||||
pages: Vec<SerializingPage<'a>>,
|
||||
subsections: Vec<&'a str>,
|
||||
translations: Vec<TranslatedContent<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingSection<'a> {
|
||||
pub fn from_section(section: &'a Section, library: &'a Library) -> Self {
|
||||
let mut pages = Vec::with_capacity(section.pages.len());
|
||||
let mut subsections = Vec::with_capacity(section.subsections.len());
|
||||
|
||||
for k in §ion.pages {
|
||||
pages.push(library.get_page_by_key(*k).to_serialized_basic(library));
|
||||
}
|
||||
|
||||
for k in §ion.subsections {
|
||||
subsections.push(library.get_section_path_by_key(*k));
|
||||
}
|
||||
|
||||
let ancestors = section
|
||||
.ancestors
|
||||
.iter()
|
||||
.map(|k| library.get_section_by_key(*k).file.relative.as_str())
|
||||
.collect();
|
||||
let translations = TranslatedContent::find_all_sections(section, library);
|
||||
|
||||
SerializingSection {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors,
|
||||
draft: section.meta.draft,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
toc: §ion.toc,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
assets: §ion.serialized_assets,
|
||||
lang: §ion.lang,
|
||||
pages,
|
||||
subsections,
|
||||
translations,
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as from_section but doesn't fetch pages
|
||||
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self {
|
||||
let mut ancestors = vec![];
|
||||
let mut translations = vec![];
|
||||
let mut subsections = vec![];
|
||||
if let Some(lib) = library {
|
||||
ancestors = section
|
||||
.ancestors
|
||||
.iter()
|
||||
.map(|k| lib.get_section_by_key(*k).file.relative.as_str())
|
||||
.collect();
|
||||
translations = TranslatedContent::find_all_sections(section, lib);
|
||||
subsections =
|
||||
section.subsections.iter().map(|k| lib.get_section_path_by_key(*k)).collect();
|
||||
}
|
||||
|
||||
SerializingSection {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors,
|
||||
draft: section.meta.draft,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
toc: §ion.toc,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
assets: §ion.serialized_assets,
|
||||
lang: §ion.lang,
|
||||
pages: vec![],
|
||||
subsections,
|
||||
translations,
|
||||
}
|
||||
}
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
mod content;
|
||||
mod library;
|
||||
mod pagination;
|
||||
mod sorting;
|
||||
mod taxonomies;
|
||||
|
||||
pub use slotmap::{DenseSlotMap, Key};
|
||||
|
||||
pub use crate::library::Library;
|
||||
pub use content::{Page, Section, SerializingPage, SerializingSection};
|
||||
pub use pagination::Paginator;
|
||||
pub use sorting::sort_actual_pages_by_date;
|
||||
pub use taxonomies::{find_taxonomies, Taxonomy, TaxonomyItem};
|
@ -1,500 +0,0 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use slotmap::{DefaultKey, DenseSlotMap};
|
||||
|
||||
use crate::content::{Page, Section};
|
||||
use crate::sorting::{
|
||||
find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight,
|
||||
};
|
||||
use config::Config;
|
||||
use front_matter::{PageFrontMatter, SortBy};
|
||||
|
||||
// Like vec! but for HashSet
|
||||
macro_rules! set {
|
||||
( $( $x:expr ),* ) => {
|
||||
{
|
||||
let mut s = HashSet::new();
|
||||
$(
|
||||
s.insert($x);
|
||||
)*
|
||||
s
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Houses everything about pages and sections
|
||||
/// Think of it as a database where each page and section has an id (Key here)
|
||||
/// that can be used to find the actual value
|
||||
/// Sections and pages can then refer to other elements by those keys, which are very cheap to
|
||||
/// copy.
|
||||
/// We can assume the keys are always existing as removing a page/section deletes all references
|
||||
/// to that key.
|
||||
#[derive(Debug)]
|
||||
pub struct Library {
|
||||
/// All the pages of the site
|
||||
pages: DenseSlotMap<DefaultKey, Page>,
|
||||
/// All the sections of the site
|
||||
sections: DenseSlotMap<DefaultKey, Section>,
|
||||
/// A mapping path -> key for pages so we can easily get their key
|
||||
pub paths_to_pages: HashMap<PathBuf, DefaultKey>,
|
||||
/// A mapping path -> key for sections so we can easily get their key
|
||||
pub paths_to_sections: HashMap<PathBuf, DefaultKey>,
|
||||
/// Whether we need to look for translations
|
||||
is_multilingual: bool,
|
||||
|
||||
// aliases -> files,
|
||||
// so we can easily check for conflicts
|
||||
pub reverse_aliases: HashMap<String, HashSet<String>>,
|
||||
|
||||
pub translations: HashMap<PathBuf, HashSet<DefaultKey>>,
|
||||
}
|
||||
|
||||
impl Library {
|
||||
pub fn new(cap_pages: usize, cap_sections: usize, is_multilingual: bool) -> Self {
|
||||
Library {
|
||||
pages: DenseSlotMap::with_capacity(cap_pages),
|
||||
sections: DenseSlotMap::with_capacity(cap_sections),
|
||||
paths_to_pages: HashMap::with_capacity(cap_pages),
|
||||
paths_to_sections: HashMap::with_capacity(cap_sections),
|
||||
is_multilingual,
|
||||
reverse_aliases: HashMap::new(),
|
||||
translations: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_reverse_aliases(&mut self, entries: Vec<String>, file_rel_path: &str) {
|
||||
for entry in entries {
|
||||
self.reverse_aliases
|
||||
.entry(entry)
|
||||
.and_modify(|s| {
|
||||
s.insert(file_rel_path.to_owned());
|
||||
})
|
||||
.or_insert_with(|| {
|
||||
let mut s = HashSet::new();
|
||||
s.insert(file_rel_path.to_owned());
|
||||
s
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a section and return its Key
|
||||
pub fn insert_section(&mut self, section: Section) -> DefaultKey {
|
||||
let file_path = section.file.path.clone();
|
||||
let rel_path = section.path.clone();
|
||||
|
||||
let mut entries = vec![rel_path];
|
||||
entries.extend(section.meta.aliases.to_vec());
|
||||
self.insert_reverse_aliases(entries, §ion.file.relative);
|
||||
|
||||
let key = self.sections.insert(section);
|
||||
self.paths_to_sections.insert(file_path, key);
|
||||
key
|
||||
}
|
||||
|
||||
/// Add a page and return its Key
|
||||
pub fn insert_page(&mut self, page: Page) -> DefaultKey {
|
||||
let file_path = page.file.path.clone();
|
||||
let rel_path = page.path.clone();
|
||||
|
||||
let mut entries = vec![rel_path];
|
||||
entries.extend(page.meta.aliases.to_vec());
|
||||
self.insert_reverse_aliases(entries, &page.file.relative);
|
||||
|
||||
let key = self.pages.insert(page);
|
||||
|
||||
self.paths_to_pages.insert(file_path, key);
|
||||
key
|
||||
}
|
||||
|
||||
pub fn pages(&self) -> &DenseSlotMap<DefaultKey, Page> {
|
||||
&self.pages
|
||||
}
|
||||
|
||||
pub fn pages_mut(&mut self) -> &mut DenseSlotMap<DefaultKey, Page> {
|
||||
&mut self.pages
|
||||
}
|
||||
|
||||
pub fn pages_values(&self) -> Vec<&Page> {
|
||||
self.pages.values().collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
pub fn sections(&self) -> &DenseSlotMap<DefaultKey, Section> {
|
||||
&self.sections
|
||||
}
|
||||
|
||||
pub fn sections_mut(&mut self) -> &mut DenseSlotMap<DefaultKey, Section> {
|
||||
&mut self.sections
|
||||
}
|
||||
|
||||
pub fn sections_values(&self) -> Vec<&Section> {
|
||||
self.sections.values().collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
/// Find out the direct subsections of each subsection if there are some
|
||||
/// as well as the pages for each section
|
||||
pub fn populate_sections(&mut self, config: &Config) {
|
||||
let root_path =
|
||||
self.sections.values().find(|s| s.is_index()).map(|s| s.file.parent.clone()).unwrap();
|
||||
// We are going to get both the ancestors and grandparents for each section in one go
|
||||
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new();
|
||||
let mut subsections: HashMap<PathBuf, Vec<_>> = HashMap::new();
|
||||
|
||||
for (key, section) in self.sections.iter_mut() {
|
||||
// Make sure the pages of a section are empty since we can call that many times on `serve`
|
||||
section.pages = vec![];
|
||||
section.ignored_pages = vec![];
|
||||
|
||||
if let Some(ref grand_parent) = section.file.grand_parent {
|
||||
subsections
|
||||
// Using the original filename to work for multi-lingual sections
|
||||
.entry(grand_parent.join(§ion.file.filename))
|
||||
.or_insert_with(Vec::new)
|
||||
.push(section.file.path.clone());
|
||||
}
|
||||
|
||||
// populate translations if necessary
|
||||
if self.is_multilingual {
|
||||
self.translations
|
||||
.entry(section.file.canonical.clone())
|
||||
.and_modify(|trans| {
|
||||
trans.insert(key);
|
||||
})
|
||||
.or_insert(set![key]);
|
||||
};
|
||||
|
||||
// Index has no ancestors, no need to go through it
|
||||
if section.is_index() {
|
||||
ancestors.insert(section.file.path.clone(), vec![]);
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut path = root_path.clone();
|
||||
let root_key = self.paths_to_sections[&root_path.join(§ion.file.filename)];
|
||||
// Index section is the first ancestor of every single section
|
||||
let mut parents = vec![root_key];
|
||||
for component in §ion.file.components {
|
||||
path = path.join(component);
|
||||
// Skip itself
|
||||
if path == section.file.parent {
|
||||
continue;
|
||||
}
|
||||
if let Some(section_key) =
|
||||
self.paths_to_sections.get(&path.join(§ion.file.filename))
|
||||
{
|
||||
parents.push(*section_key);
|
||||
}
|
||||
}
|
||||
ancestors.insert(section.file.path.clone(), parents);
|
||||
}
|
||||
|
||||
for (key, page) in &mut self.pages {
|
||||
let parent_filename = if page.lang != config.default_language {
|
||||
format!("_index.{}.md", page.lang)
|
||||
} else {
|
||||
"_index.md".to_string()
|
||||
};
|
||||
let mut parent_section_path = page.file.parent.join(&parent_filename);
|
||||
while let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
|
||||
let parent_is_transparent;
|
||||
// We need to get a reference to a section later so keep the scope of borrowing small
|
||||
{
|
||||
let section = self.sections.get_mut(*section_key).unwrap();
|
||||
section.pages.push(key);
|
||||
parent_is_transparent = section.meta.transparent;
|
||||
}
|
||||
page.ancestors =
|
||||
ancestors.get(&parent_section_path).cloned().unwrap_or_else(Vec::new);
|
||||
// Don't forget to push the actual parent
|
||||
page.ancestors.push(*section_key);
|
||||
|
||||
// Find the page template if one of a parent has page_template set
|
||||
// Stops after the first one found, keep in mind page.ancestors
|
||||
// is [index, ..., parent] so we need to reverse it first
|
||||
if page.meta.template.is_none() {
|
||||
for ancestor in page.ancestors.iter().rev() {
|
||||
let s = self.sections.get(*ancestor).unwrap();
|
||||
if s.meta.page_template.is_some() {
|
||||
page.meta.template = s.meta.page_template.clone();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !parent_is_transparent {
|
||||
break;
|
||||
}
|
||||
|
||||
// We've added `_index(.{LANG})?.md` so if we are here so we need to go up twice
|
||||
match parent_section_path.clone().parent().unwrap().parent() {
|
||||
Some(parent) => parent_section_path = parent.join(&parent_filename),
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
|
||||
// populate translations if necessary
|
||||
if self.is_multilingual {
|
||||
self.translations
|
||||
.entry(page.file.canonical.clone())
|
||||
.and_modify(|trans| {
|
||||
trans.insert(key);
|
||||
})
|
||||
.or_insert(set![key]);
|
||||
};
|
||||
}
|
||||
|
||||
self.sort_sections_pages();
|
||||
|
||||
let sections = self.paths_to_sections.clone();
|
||||
let mut sections_weight = HashMap::new();
|
||||
for (key, section) in &self.sections {
|
||||
sections_weight.insert(key, section.meta.weight);
|
||||
}
|
||||
|
||||
for section in self.sections.values_mut() {
|
||||
if let Some(children) = subsections.get(§ion.file.path) {
|
||||
let mut children: Vec<_> = children.iter().map(|p| sections[p]).collect();
|
||||
children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
|
||||
section.subsections = children;
|
||||
}
|
||||
section.ancestors = ancestors.get(§ion.file.path).cloned().unwrap_or_else(Vec::new);
|
||||
}
|
||||
}
|
||||
|
||||
/// Sort all sections pages according to sorting method given
|
||||
/// Pages that cannot be sorted are set to the section.ignored_pages instead
|
||||
pub fn sort_sections_pages(&mut self) {
|
||||
fn get_data<'a, T>(
|
||||
section: &'a Section,
|
||||
pages: &'a DenseSlotMap<DefaultKey, Page>,
|
||||
field: impl Fn(&'a PageFrontMatter) -> Option<T>,
|
||||
) -> Vec<(&'a DefaultKey, Option<T>, &'a str)> {
|
||||
section
|
||||
.pages
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = pages.get(*k) {
|
||||
(k, field(&page.meta), page.permalink.as_ref())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
let mut updates = HashMap::new();
|
||||
for (key, section) in &self.sections {
|
||||
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by {
|
||||
SortBy::None => continue,
|
||||
SortBy::Date => {
|
||||
let data = get_data(section, &self.pages, |meta| meta.datetime);
|
||||
|
||||
sort_pages_by_date(data)
|
||||
}
|
||||
SortBy::UpdateDate => {
|
||||
let data = get_data(section, &self.pages, |meta| {
|
||||
std::cmp::max(meta.datetime, meta.updated_datetime)
|
||||
});
|
||||
|
||||
sort_pages_by_date(data)
|
||||
}
|
||||
SortBy::Title => {
|
||||
let data = get_data(section, &self.pages, |meta| meta.title.as_deref());
|
||||
|
||||
sort_pages_by_title(data)
|
||||
}
|
||||
SortBy::Weight => {
|
||||
let data = get_data(section, &self.pages, |meta| meta.weight);
|
||||
|
||||
sort_pages_by_weight(data)
|
||||
}
|
||||
};
|
||||
updates.insert(key, (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by));
|
||||
}
|
||||
|
||||
for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
|
||||
let section_is_transparent = if let Some(section) = self.sections.get(key) {
|
||||
section.meta.transparent
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if !section_is_transparent {
|
||||
// Find sibling between sorted pages first
|
||||
let with_siblings = find_siblings(&sorted);
|
||||
|
||||
for (k2, val1, val2) in with_siblings {
|
||||
if let Some(page) = self.pages.get_mut(k2) {
|
||||
match sort_by {
|
||||
SortBy::Date => {
|
||||
page.earlier = val2;
|
||||
page.later = val1;
|
||||
}
|
||||
SortBy::UpdateDate => {
|
||||
page.earlier_updated = val2;
|
||||
page.later_updated = val1;
|
||||
}
|
||||
SortBy::Title => {
|
||||
page.title_prev = val1;
|
||||
page.title_next = val2;
|
||||
}
|
||||
SortBy::Weight => {
|
||||
page.lighter = val1;
|
||||
page.heavier = val2;
|
||||
}
|
||||
SortBy::None => {
|
||||
unreachable!("Impossible to find siblings in SortBy::None")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(s) = self.sections.get_mut(key) {
|
||||
s.pages = sorted;
|
||||
s.ignored_pages = cannot_be_sorted;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Find all the orphan pages: pages that are in a folder without an `_index.md`
|
||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
|
||||
let pages_in_sections =
|
||||
self.sections.values().flat_map(|s| &s.pages).collect::<HashSet<_>>();
|
||||
|
||||
self.pages
|
||||
.iter()
|
||||
.filter(|(key, _)| !pages_in_sections.contains(&key))
|
||||
.map(|(_, page)| page)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Used in integration tests
|
||||
pub fn get_section_key<P: AsRef<Path>>(&self, path: P) -> Option<&DefaultKey> {
|
||||
self.paths_to_sections.get(path.as_ref())
|
||||
}
|
||||
|
||||
pub fn get_section<P: AsRef<Path>>(&self, path: P) -> Option<&Section> {
|
||||
self.sections.get(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default())
|
||||
}
|
||||
|
||||
/// Used in integration tests
|
||||
pub fn get_section_mut<P: AsRef<Path>>(&mut self, path: P) -> Option<&mut Section> {
|
||||
self.sections
|
||||
.get_mut(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn get_section_by_key(&self, key: DefaultKey) -> &Section {
|
||||
self.sections.get(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn get_section_path_by_key(&self, key: DefaultKey) -> &str {
|
||||
&self.get_section_by_key(key).file.relative
|
||||
}
|
||||
|
||||
pub fn get_page<P: AsRef<Path>>(&self, path: P) -> Option<&Page> {
|
||||
self.pages.get(self.paths_to_pages.get(path.as_ref()).cloned().unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn get_page_by_key(&self, key: DefaultKey) -> &Page {
|
||||
self.pages.get(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn remove_section<P: AsRef<Path>>(&mut self, path: P) -> Option<Section> {
|
||||
if let Some(k) = self.paths_to_sections.remove(path.as_ref()) {
|
||||
self.sections.remove(k)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove_page<P: AsRef<Path>>(&mut self, path: P) -> Option<Page> {
|
||||
if let Some(k) = self.paths_to_pages.remove(path.as_ref()) {
|
||||
self.pages.remove(k)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_section<P: AsRef<Path>>(&self, path: P) -> bool {
|
||||
self.paths_to_sections.contains_key(path.as_ref())
|
||||
}
|
||||
|
||||
/// This will check every section/page paths + the aliases and ensure none of them
|
||||
/// are colliding.
|
||||
/// Returns (path colliding, [list of files causing that collision])
|
||||
pub fn check_for_path_collisions(&self) -> Vec<(String, Vec<String>)> {
|
||||
self.reverse_aliases
|
||||
.iter()
|
||||
.filter_map(|(alias, files)| {
|
||||
if files.len() > 1 {
|
||||
Some((alias.clone(), files.clone().into_iter().collect::<Vec<_>>()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn can_find_no_collisions() {
|
||||
let mut library = Library::new(10, 10, false);
|
||||
let page = Page { path: "hello".to_string(), ..Default::default() };
|
||||
let page2 = Page { path: "hello-world".to_string(), ..Default::default() };
|
||||
let section = Section { path: "blog".to_string(), ..Default::default() };
|
||||
library.insert_page(page);
|
||||
library.insert_page(page2);
|
||||
library.insert_section(section);
|
||||
|
||||
let collisions = library.check_for_path_collisions();
|
||||
assert_eq!(collisions.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_collisions_between_pages() {
|
||||
let mut library = Library::new(10, 10, false);
|
||||
let mut page = Page { path: "hello".to_string(), ..Default::default() };
|
||||
page.file.relative = "hello".to_string();
|
||||
let mut page2 = Page { path: "hello".to_string(), ..Default::default() };
|
||||
page2.file.relative = "hello-world".to_string();
|
||||
let mut section = Section { path: "blog".to_string(), ..Default::default() };
|
||||
section.file.relative = "hello-world".to_string();
|
||||
library.insert_page(page.clone());
|
||||
library.insert_page(page2.clone());
|
||||
library.insert_section(section);
|
||||
|
||||
let collisions = library.check_for_path_collisions();
|
||||
assert_eq!(collisions.len(), 1);
|
||||
assert_eq!(collisions[0].0, page.path);
|
||||
assert!(collisions[0].1.contains(&page.file.relative));
|
||||
assert!(collisions[0].1.contains(&page2.file.relative));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_collisions_with_an_alias() {
|
||||
let mut library = Library::new(10, 10, false);
|
||||
let mut page = Page { path: "hello".to_string(), ..Default::default() };
|
||||
page.file.relative = "hello".to_string();
|
||||
let mut page2 = Page { path: "hello".to_string(), ..Default::default() };
|
||||
page2.file.relative = "hello-world".to_string();
|
||||
page2.meta.aliases = vec!["hello".to_string()];
|
||||
let mut section = Section { path: "blog".to_string(), ..Default::default() };
|
||||
section.file.relative = "hello-world".to_string();
|
||||
library.insert_page(page.clone());
|
||||
library.insert_page(page2.clone());
|
||||
library.insert_section(section);
|
||||
|
||||
let collisions = library.check_for_path_collisions();
|
||||
assert_eq!(collisions.len(), 1);
|
||||
assert_eq!(collisions[0].0, page.path);
|
||||
assert!(collisions[0].1.contains(&page.file.relative));
|
||||
assert!(collisions[0].1.contains(&page2.file.relative));
|
||||
}
|
||||
}
|
@ -1,271 +0,0 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use chrono::NaiveDateTime;
|
||||
use lexical_sort::natural_lexical_cmp;
|
||||
use rayon::prelude::*;
|
||||
use slotmap::DefaultKey;
|
||||
|
||||
use crate::content::Page;
|
||||
|
||||
/// Used by the feed
|
||||
/// There to not have to import sorting stuff in the site crate
|
||||
#[allow(clippy::trivially_copy_pass_by_ref)]
|
||||
pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering {
|
||||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.permalink.cmp(&b.permalink)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes a list of (page key, date, permalink) and sort them by dates if possible
|
||||
/// Pages without date will be put in the unsortable bucket
|
||||
/// The permalink is used to break ties
|
||||
pub fn sort_pages_by_date(
|
||||
pages: Vec<(&DefaultKey, Option<NaiveDateTime>, &str)>,
|
||||
) -> (Vec<DefaultKey>, Vec<DefaultKey>) {
|
||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =
|
||||
pages.into_par_iter().partition(|page| page.1.is_some());
|
||||
|
||||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = b.1.unwrap().cmp(&a.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
});
|
||||
|
||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||
}
|
||||
|
||||
/// Takes a list of (page key, title, permalink) and sort them by title if possible.
|
||||
/// Uses the a natural lexical comparison as defined by the lexical_sort crate.
|
||||
/// Pages without title will be put in the unsortable bucket.
|
||||
/// The permalink is used to break ties.
|
||||
pub fn sort_pages_by_title(
|
||||
pages: Vec<(&DefaultKey, Option<&str>, &str)>,
|
||||
) -> (Vec<DefaultKey>, Vec<DefaultKey>) {
|
||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =
|
||||
pages.into_par_iter().partition(|page| page.1.is_some());
|
||||
|
||||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = natural_lexical_cmp(a.1.unwrap(), b.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
});
|
||||
|
||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||
}
|
||||
|
||||
/// Takes a list of (page key, weight, permalink) and sort them by weight if possible
|
||||
/// Pages without weight will be put in the unsortable bucket
|
||||
/// The permalink is used to break ties
|
||||
pub fn sort_pages_by_weight(
|
||||
pages: Vec<(&DefaultKey, Option<usize>, &str)>,
|
||||
) -> (Vec<DefaultKey>, Vec<DefaultKey>) {
|
||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =
|
||||
pages.into_par_iter().partition(|page| page.1.is_some());
|
||||
|
||||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = a.1.unwrap().cmp(&b.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
});
|
||||
|
||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||
}
|
||||
|
||||
/// Find the lighter/heavier, earlier/later, and title_prev/title_next
|
||||
/// pages for all pages having a date/weight/title
|
||||
pub fn find_siblings(
|
||||
sorted: &[DefaultKey],
|
||||
) -> Vec<(DefaultKey, Option<DefaultKey>, Option<DefaultKey>)> {
|
||||
let mut res = Vec::with_capacity(sorted.len());
|
||||
let length = sorted.len();
|
||||
|
||||
for (i, key) in sorted.iter().enumerate() {
|
||||
let mut with_siblings = (*key, None, None);
|
||||
|
||||
if i > 0 {
|
||||
// lighter / later / title_prev
|
||||
with_siblings.1 = Some(sorted[i - 1]);
|
||||
}
|
||||
|
||||
if i < length - 1 {
|
||||
// heavier / earlier / title_next
|
||||
with_siblings.2 = Some(sorted[i + 1]);
|
||||
}
|
||||
res.push(with_siblings);
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use slotmap::DenseSlotMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::{find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight};
|
||||
use crate::content::Page;
|
||||
use front_matter::PageFrontMatter;
|
||||
|
||||
fn create_page_with_date(date: &str) -> Page {
|
||||
let mut front_matter =
|
||||
PageFrontMatter { date: Some(date.to_string()), ..Default::default() };
|
||||
front_matter.date_to_datetime();
|
||||
Page::new("content/hello.md", front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
fn create_page_with_title(title: &str) -> Page {
|
||||
let front_matter = PageFrontMatter { title: Some(title.to_string()), ..Default::default() };
|
||||
Page::new("content/hello.md", front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
fn create_page_with_weight(weight: usize) -> Page {
|
||||
let front_matter = PageFrontMatter { weight: Some(weight), ..Default::default() };
|
||||
Page::new("content/hello.md", front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_dates() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_date("2018-01-01");
|
||||
let key1 = dense.insert(page1.clone());
|
||||
let page2 = create_page_with_date("2017-01-01");
|
||||
let key2 = dense.insert(page2.clone());
|
||||
let page3 = create_page_with_date("2019-01-01");
|
||||
let key3 = dense.insert(page3.clone());
|
||||
|
||||
let input = vec![
|
||||
(&key1, page1.meta.datetime, page1.permalink.as_ref()),
|
||||
(&key2, page2.meta.datetime, page2.permalink.as_ref()),
|
||||
(&key3, page3.meta.datetime, page3.permalink.as_ref()),
|
||||
];
|
||||
let (pages, _) = sort_pages_by_date(input);
|
||||
// Should be sorted by date
|
||||
assert_eq!(pages[0], key3);
|
||||
assert_eq!(pages[1], key1);
|
||||
assert_eq!(pages[2], key2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_titles() {
|
||||
let titles = vec![
|
||||
"bagel",
|
||||
"track_3",
|
||||
"microkernel",
|
||||
"métro",
|
||||
"BART",
|
||||
"Underground",
|
||||
"track_13",
|
||||
"μ-kernel",
|
||||
"meter",
|
||||
"track_1",
|
||||
];
|
||||
let pages: Vec<Page> = titles.iter().map(|title| create_page_with_title(title)).collect();
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let keys: Vec<_> = pages.iter().map(|p| dense.insert(p)).collect();
|
||||
let input: Vec<_> = pages
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, page)| (&keys[i], page.meta.title.as_deref(), page.permalink.as_ref()))
|
||||
.collect();
|
||||
let (sorted, _) = sort_pages_by_title(input);
|
||||
// Should be sorted by title
|
||||
let sorted_titles: Vec<_> = sorted
|
||||
.iter()
|
||||
.map(|key| dense.get(*key).unwrap().meta.title.as_ref().unwrap())
|
||||
.collect();
|
||||
assert_eq!(
|
||||
sorted_titles,
|
||||
vec![
|
||||
"bagel",
|
||||
"BART",
|
||||
"μ-kernel",
|
||||
"meter",
|
||||
"métro",
|
||||
"microkernel",
|
||||
"track_1",
|
||||
"track_3",
|
||||
"track_13",
|
||||
"Underground",
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_weight() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_weight(2);
|
||||
let key1 = dense.insert(page1.clone());
|
||||
let page2 = create_page_with_weight(3);
|
||||
let key2 = dense.insert(page2.clone());
|
||||
let page3 = create_page_with_weight(1);
|
||||
let key3 = dense.insert(page3.clone());
|
||||
|
||||
let input = vec![
|
||||
(&key1, page1.meta.weight, page1.permalink.as_ref()),
|
||||
(&key2, page2.meta.weight, page2.permalink.as_ref()),
|
||||
(&key3, page3.meta.weight, page3.permalink.as_ref()),
|
||||
];
|
||||
let (pages, _) = sort_pages_by_weight(input);
|
||||
// Should be sorted by weight
|
||||
assert_eq!(pages[0], key3);
|
||||
assert_eq!(pages[1], key1);
|
||||
assert_eq!(pages[2], key2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_page_with_missing_field() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_weight(2);
|
||||
let key1 = dense.insert(page1.clone());
|
||||
let page2 = create_page_with_weight(3);
|
||||
let key2 = dense.insert(page2.clone());
|
||||
let page3 = create_page_with_date("2019-01-01");
|
||||
let key3 = dense.insert(page3.clone());
|
||||
|
||||
let input = vec![
|
||||
(&key1, page1.meta.weight, page1.permalink.as_ref()),
|
||||
(&key2, page2.meta.weight, page2.permalink.as_ref()),
|
||||
(&key3, page3.meta.weight, page3.permalink.as_ref()),
|
||||
];
|
||||
|
||||
let (pages, unsorted) = sort_pages_by_weight(input);
|
||||
assert_eq!(pages.len(), 2);
|
||||
assert_eq!(unsorted.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_siblings() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_weight(1);
|
||||
let key1 = dense.insert(page1);
|
||||
let page2 = create_page_with_weight(2);
|
||||
let key2 = dense.insert(page2);
|
||||
let page3 = create_page_with_weight(3);
|
||||
let key3 = dense.insert(page3);
|
||||
|
||||
let input = vec![key1, key2, key3];
|
||||
|
||||
let pages = find_siblings(&input);
|
||||
|
||||
assert_eq!(pages[0].1, None);
|
||||
assert_eq!(pages[0].2, Some(key2));
|
||||
|
||||
assert_eq!(pages[1].1, Some(key1));
|
||||
assert_eq!(pages[1].2, Some(key3));
|
||||
|
||||
assert_eq!(pages[2].1, Some(key2));
|
||||
assert_eq!(pages[2].2, None);
|
||||
}
|
||||
}
|
@ -1,930 +0,0 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use serde_derive::Serialize;
|
||||
use slotmap::DefaultKey;
|
||||
use tera::{Context, Tera};
|
||||
|
||||
use config::{Config, Taxonomy as TaxonomyConfig};
|
||||
use errors::{bail, Error, Result};
|
||||
use utils::templates::{check_template_fallbacks, render_template};
|
||||
|
||||
use crate::content::SerializingPage;
|
||||
use crate::library::Library;
|
||||
use crate::sorting::sort_pages_by_date;
|
||||
use utils::slugs::slugify_paths;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub struct SerializedTaxonomyItem<'a> {
|
||||
name: &'a str,
|
||||
slug: &'a str,
|
||||
path: &'a str,
|
||||
permalink: &'a str,
|
||||
pages: Vec<SerializingPage<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializedTaxonomyItem<'a> {
|
||||
pub fn from_item(item: &'a TaxonomyItem, library: &'a Library) -> Self {
|
||||
let mut pages = vec![];
|
||||
|
||||
for key in &item.pages {
|
||||
let page = library.get_page_by_key(*key);
|
||||
pages.push(page.to_serialized_basic(library));
|
||||
}
|
||||
|
||||
SerializedTaxonomyItem {
|
||||
name: &item.name,
|
||||
slug: &item.slug,
|
||||
path: &item.path,
|
||||
permalink: &item.permalink,
|
||||
pages,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A taxonomy with all its pages
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TaxonomyItem {
|
||||
pub name: String,
|
||||
pub slug: String,
|
||||
pub path: String,
|
||||
pub permalink: String,
|
||||
pub pages: Vec<DefaultKey>,
|
||||
}
|
||||
|
||||
impl TaxonomyItem {
|
||||
pub fn new(
|
||||
name: &str,
|
||||
lang: &str,
|
||||
taxo_slug: &str,
|
||||
config: &Config,
|
||||
keys: Vec<DefaultKey>,
|
||||
library: &Library,
|
||||
) -> Self {
|
||||
// Taxonomy are almost always used for blogs so we filter by dates
|
||||
// and it's not like we can sort things across sections by anything other
|
||||
// than dates
|
||||
let data = keys
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = library.pages().get(*k) {
|
||||
(k, page.meta.datetime, page.permalink.as_ref())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let (mut pages, ignored_pages) = sort_pages_by_date(data);
|
||||
let item_slug = slugify_paths(name, config.slugify.taxonomies);
|
||||
let path = if lang != config.default_language {
|
||||
format!("/{}/{}/{}/", lang, taxo_slug, item_slug)
|
||||
} else {
|
||||
format!("/{}/{}/", taxo_slug, item_slug)
|
||||
};
|
||||
let permalink = config.make_permalink(&path);
|
||||
|
||||
// We still append pages without dates at the end
|
||||
pages.extend(ignored_pages);
|
||||
|
||||
TaxonomyItem { name: name.to_string(), permalink, path, slug: item_slug, pages }
|
||||
}
|
||||
|
||||
pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializedTaxonomyItem<'a> {
|
||||
SerializedTaxonomyItem::from_item(self, library)
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: Self) {
|
||||
self.pages.extend(other.pages);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for TaxonomyItem {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.permalink == other.permalink
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub struct SerializedTaxonomy<'a> {
|
||||
kind: &'a TaxonomyConfig,
|
||||
lang: &'a str,
|
||||
permalink: &'a str,
|
||||
items: Vec<SerializedTaxonomyItem<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializedTaxonomy<'a> {
|
||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self {
|
||||
let items: Vec<SerializedTaxonomyItem> =
|
||||
taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
||||
SerializedTaxonomy {
|
||||
kind: &taxonomy.kind,
|
||||
lang: &taxonomy.lang,
|
||||
permalink: &taxonomy.permalink,
|
||||
items,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// All different taxonomies we have and their content
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Taxonomy {
|
||||
pub kind: TaxonomyConfig,
|
||||
pub lang: String,
|
||||
pub slug: String,
|
||||
pub permalink: String,
|
||||
// this vec is sorted by the count of item
|
||||
pub items: Vec<TaxonomyItem>,
|
||||
}
|
||||
|
||||
impl Taxonomy {
|
||||
fn new(
|
||||
kind: TaxonomyConfig,
|
||||
lang: &str,
|
||||
config: &Config,
|
||||
items: HashMap<String, Vec<DefaultKey>>,
|
||||
library: &Library,
|
||||
) -> Taxonomy {
|
||||
let mut sorted_items = vec![];
|
||||
let slug = slugify_paths(&kind.name, config.slugify.taxonomies);
|
||||
for (name, pages) in items {
|
||||
sorted_items.push(TaxonomyItem::new(&name, lang, &slug, config, pages, library));
|
||||
}
|
||||
//sorted_items.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
sorted_items.sort_by(|a, b| match a.slug.cmp(&b.slug) {
|
||||
Ordering::Less => Ordering::Less,
|
||||
Ordering::Greater => Ordering::Greater,
|
||||
Ordering::Equal => a.name.cmp(&b.name),
|
||||
});
|
||||
sorted_items.dedup_by(|a, b| {
|
||||
// custom Eq impl checks for equal permalinks
|
||||
// here we make sure all pages from a get copied to b
|
||||
// before dedup gets rid of it
|
||||
if a == b {
|
||||
b.merge(a.to_owned());
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
let path = if lang != config.default_language {
|
||||
format!("/{}/{}/", lang, slug)
|
||||
} else {
|
||||
format!("/{}/", slug)
|
||||
};
|
||||
let permalink = config.make_permalink(&path);
|
||||
|
||||
Taxonomy { kind, slug, lang: lang.to_owned(), permalink, items: sorted_items }
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.items.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn render_term(
|
||||
&self,
|
||||
item: &TaxonomyItem,
|
||||
tera: &Tera,
|
||||
config: &Config,
|
||||
library: &Library,
|
||||
) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
context.insert("lang", &self.lang);
|
||||
context.insert("term", &SerializedTaxonomyItem::from_item(item, library));
|
||||
context.insert("taxonomy", &self.kind);
|
||||
context.insert(
|
||||
"current_url",
|
||||
&config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)),
|
||||
);
|
||||
context.insert("current_path", &format!("/{}/{}/", self.kind.name, item.slug));
|
||||
|
||||
// Check for taxon-specific template, or use generic as fallback.
|
||||
let specific_template = format!("{}/single.html", self.kind.name);
|
||||
let template = match check_template_fallbacks(&specific_template, tera, &config.theme) {
|
||||
Some(template) => template,
|
||||
None => "taxonomy_single.html",
|
||||
};
|
||||
|
||||
render_template(&template, tera, context, &config.theme).map_err(|e| {
|
||||
Error::chain(format!("Failed to render single term {} page.", self.kind.name), e)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn render_all_terms(
|
||||
&self,
|
||||
tera: &Tera,
|
||||
config: &Config,
|
||||
library: &Library,
|
||||
) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
let terms: Vec<SerializedTaxonomyItem> =
|
||||
self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
||||
context.insert("terms", &terms);
|
||||
context.insert("lang", &self.lang);
|
||||
context.insert("taxonomy", &self.kind);
|
||||
context.insert("current_url", &config.make_permalink(&self.kind.name));
|
||||
context.insert("current_path", &format!("/{}/", self.kind.name));
|
||||
|
||||
// Check for taxon-specific template, or use generic as fallback.
|
||||
let specific_template = format!("{}/list.html", self.kind.name);
|
||||
let template = match check_template_fallbacks(&specific_template, tera, &config.theme) {
|
||||
Some(template) => template,
|
||||
None => "taxonomy_list.html",
|
||||
};
|
||||
|
||||
render_template(&template, tera, context, &config.theme).map_err(|e| {
|
||||
Error::chain(format!("Failed to render a list of {} page.", self.kind.name), e)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> {
|
||||
SerializedTaxonomy::from_taxonomy(self, library)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonomy>> {
|
||||
let mut slugs_to_lang = HashMap::new();
|
||||
|
||||
let taxonomies_def = {
|
||||
let mut m = HashMap::new();
|
||||
// the default language taxonomies
|
||||
for t in &config.taxonomies {
|
||||
let slug = slugify_paths(&t.name, config.slugify.taxonomies);
|
||||
let key = format!("{}-{}", slug, config.default_language);
|
||||
slugs_to_lang.insert(key.clone(), config.default_language.as_str());
|
||||
m.insert(key, t);
|
||||
}
|
||||
|
||||
// other languages taxonomies
|
||||
for (code, options) in config.other_languages() {
|
||||
for t in &options.taxonomies {
|
||||
let slug = slugify_paths(&t.name, config.slugify.taxonomies);
|
||||
let key = format!("{}-{}", slug, code);
|
||||
slugs_to_lang.insert(key.clone(), code);
|
||||
m.insert(key, t);
|
||||
}
|
||||
}
|
||||
m
|
||||
};
|
||||
|
||||
let mut all_taxonomies = HashMap::new();
|
||||
for (key, page) in library.pages() {
|
||||
for (name, taxo_term) in &page.meta.taxonomies {
|
||||
let taxo_slug = slugify_paths(name, config.slugify.taxonomies);
|
||||
let taxo_key = format!("{}-{}", &taxo_slug, page.lang);
|
||||
if taxonomies_def.contains_key(&taxo_key) {
|
||||
all_taxonomies.entry(taxo_key.clone()).or_insert_with(HashMap::new);
|
||||
|
||||
for term in taxo_term {
|
||||
all_taxonomies
|
||||
.get_mut(&taxo_key)
|
||||
.unwrap()
|
||||
.entry(term.to_string())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(key);
|
||||
}
|
||||
} else {
|
||||
bail!(
|
||||
"Page `{}` has taxonomy `{}` which is not defined in config.toml",
|
||||
page.file.path.display(),
|
||||
name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut taxonomies = vec![];
|
||||
|
||||
for (name, taxo) in all_taxonomies {
|
||||
taxonomies.push(Taxonomy::new(
|
||||
taxonomies_def[&name].clone(),
|
||||
slugs_to_lang[&name],
|
||||
config,
|
||||
taxo,
|
||||
library,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(taxonomies)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::content::Page;
|
||||
use crate::library::Library;
|
||||
use config::{Config, LanguageOptions, Slugify, Taxonomy as TaxonomyConfig};
|
||||
use utils::slugs::SlugifyStrategy;
|
||||
|
||||
#[test]
|
||||
fn can_make_taxonomies() {
|
||||
let mut config = Config::default();
|
||||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
page1.lang = config.default_language.clone();
|
||||
library.insert_page(page1);
|
||||
|
||||
let mut page2 = Page::default();
|
||||
let mut taxo_page2 = HashMap::new();
|
||||
taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]);
|
||||
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]);
|
||||
page2.meta.taxonomies = taxo_page2;
|
||||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page::default();
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("tags".to_string(), vec!["js".to_string()]);
|
||||
taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]);
|
||||
page3.meta.taxonomies = taxo_page3;
|
||||
page3.lang = config.default_language.clone();
|
||||
library.insert_page(page3);
|
||||
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
let (tags, categories, authors) = {
|
||||
let mut t = None;
|
||||
let mut c = None;
|
||||
let mut a = None;
|
||||
for x in taxonomies {
|
||||
match x.kind.name.as_ref() {
|
||||
"tags" => t = Some(x),
|
||||
"categories" => c = Some(x),
|
||||
"authors" => a = Some(x),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
(t.unwrap(), c.unwrap(), a.unwrap())
|
||||
};
|
||||
assert_eq!(tags.items.len(), 3);
|
||||
assert_eq!(categories.items.len(), 2);
|
||||
assert_eq!(authors.items.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[0].name, "db");
|
||||
assert_eq!(tags.items[0].slug, "db");
|
||||
assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/");
|
||||
assert_eq!(tags.items[0].path, "/tags/db/");
|
||||
assert_eq!(tags.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[1].name, "js");
|
||||
assert_eq!(tags.items[1].slug, "js");
|
||||
assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/");
|
||||
assert_eq!(tags.items[1].pages.len(), 2);
|
||||
|
||||
assert_eq!(tags.items[2].name, "rust");
|
||||
assert_eq!(tags.items[2].slug, "rust");
|
||||
assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/");
|
||||
assert_eq!(tags.items[2].pages.len(), 2);
|
||||
|
||||
assert_eq!(categories.items[0].name, "Other");
|
||||
assert_eq!(categories.items[0].slug, "other");
|
||||
assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/");
|
||||
assert_eq!(categories.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(categories.items[1].name, "Programming tutorials");
|
||||
assert_eq!(categories.items[1].slug, "programming-tutorials");
|
||||
assert_eq!(
|
||||
categories.items[1].permalink,
|
||||
"http://a-website.com/categories/programming-tutorials/"
|
||||
);
|
||||
assert_eq!(categories.items[1].pages.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_slugified_taxonomies() {
|
||||
let mut config = Config::default();
|
||||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
page1.lang = config.default_language.clone();
|
||||
library.insert_page(page1);
|
||||
|
||||
let mut page2 = Page::default();
|
||||
let mut taxo_page2 = HashMap::new();
|
||||
taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]);
|
||||
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]);
|
||||
page2.meta.taxonomies = taxo_page2;
|
||||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page::default();
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("tags".to_string(), vec!["js".to_string()]);
|
||||
taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]);
|
||||
page3.meta.taxonomies = taxo_page3;
|
||||
page3.lang = config.default_language.clone();
|
||||
library.insert_page(page3);
|
||||
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
let (tags, categories, authors) = {
|
||||
let mut t = None;
|
||||
let mut c = None;
|
||||
let mut a = None;
|
||||
for x in taxonomies {
|
||||
match x.kind.name.as_ref() {
|
||||
"tags" => t = Some(x),
|
||||
"categories" => c = Some(x),
|
||||
"authors" => a = Some(x),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
(t.unwrap(), c.unwrap(), a.unwrap())
|
||||
};
|
||||
assert_eq!(tags.items.len(), 3);
|
||||
assert_eq!(categories.items.len(), 2);
|
||||
assert_eq!(authors.items.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[0].name, "db");
|
||||
assert_eq!(tags.items[0].slug, "db");
|
||||
assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/");
|
||||
assert_eq!(tags.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[1].name, "js");
|
||||
assert_eq!(tags.items[1].slug, "js");
|
||||
assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/");
|
||||
assert_eq!(tags.items[1].path, "/tags/js/");
|
||||
assert_eq!(tags.items[1].pages.len(), 2);
|
||||
|
||||
assert_eq!(tags.items[2].name, "rust");
|
||||
assert_eq!(tags.items[2].slug, "rust");
|
||||
assert_eq!(tags.items[2].permalink, "http://a-website.com/tags/rust/");
|
||||
assert_eq!(tags.items[2].pages.len(), 2);
|
||||
|
||||
assert_eq!(categories.items[0].name, "Other");
|
||||
assert_eq!(categories.items[0].slug, "other");
|
||||
assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/");
|
||||
assert_eq!(categories.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(categories.items[1].name, "Programming tutorials");
|
||||
assert_eq!(categories.items[1].slug, "programming-tutorials");
|
||||
assert_eq!(
|
||||
categories.items[1].permalink,
|
||||
"http://a-website.com/categories/programming-tutorials/"
|
||||
);
|
||||
assert_eq!(categories.items[1].pages.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors_on_unknown_taxonomy() {
|
||||
let mut config = Config::default();
|
||||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies =
|
||||
vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }];
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
page1.lang = config.default_language.clone();
|
||||
library.insert_page(page1);
|
||||
|
||||
let taxonomies = find_taxonomies(&config, &library);
|
||||
assert!(taxonomies.is_err());
|
||||
let err = taxonomies.unwrap_err();
|
||||
// no path as this is created by Default
|
||||
assert_eq!(
|
||||
format!("{}", err),
|
||||
"Page `` has taxonomy `tags` which is not defined in config.toml"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_taxonomies_in_multiple_languages() {
|
||||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut library = Library::new(2, 0, true);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
let french_taxo = vec![
|
||||
TaxonomyConfig { name: "auteurs".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
let lang_options = config::LanguageOptions {
|
||||
taxonomies: french_taxo,
|
||||
..config::LanguageOptions::default()
|
||||
};
|
||||
config.languages.insert("fr".to_owned(), lang_options);
|
||||
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
page1.lang = config.default_language.clone();
|
||||
library.insert_page(page1);
|
||||
|
||||
let mut page2 = Page::default();
|
||||
let mut taxo_page2 = HashMap::new();
|
||||
taxo_page2.insert("tags".to_string(), vec!["rust".to_string()]);
|
||||
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]);
|
||||
page2.meta.taxonomies = taxo_page2;
|
||||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page { lang: "fr".to_string(), ..Default::default() };
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("tags".to_string(), vec!["rust".to_string()]);
|
||||
taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]);
|
||||
page3.meta.taxonomies = taxo_page3;
|
||||
library.insert_page(page3);
|
||||
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
let (tags, categories, authors) = {
|
||||
let mut t = None;
|
||||
let mut c = None;
|
||||
let mut a = None;
|
||||
for x in taxonomies {
|
||||
match x.kind.name.as_ref() {
|
||||
"tags" => {
|
||||
if x.lang == "en" {
|
||||
t = Some(x)
|
||||
}
|
||||
}
|
||||
"categories" => c = Some(x),
|
||||
"auteurs" => a = Some(x),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
(t.unwrap(), c.unwrap(), a.unwrap())
|
||||
};
|
||||
|
||||
assert_eq!(tags.items.len(), 2);
|
||||
assert_eq!(categories.items.len(), 2);
|
||||
assert_eq!(authors.items.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[0].name, "db");
|
||||
assert_eq!(tags.items[0].slug, "db");
|
||||
assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/");
|
||||
assert_eq!(tags.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[1].name, "rust");
|
||||
assert_eq!(tags.items[1].slug, "rust");
|
||||
assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/rust/");
|
||||
assert_eq!(tags.items[1].pages.len(), 2);
|
||||
|
||||
assert_eq!(authors.items[0].name, "Vincent Prouillet");
|
||||
assert_eq!(authors.items[0].slug, "vincent-prouillet");
|
||||
assert_eq!(
|
||||
authors.items[0].permalink,
|
||||
"http://a-website.com/fr/auteurs/vincent-prouillet/"
|
||||
);
|
||||
assert_eq!(authors.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(categories.items[0].name, "Other");
|
||||
assert_eq!(categories.items[0].slug, "other");
|
||||
assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/");
|
||||
assert_eq!(categories.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(categories.items[1].name, "Programming tutorials");
|
||||
assert_eq!(categories.items[1].slug, "programming-tutorials");
|
||||
assert_eq!(
|
||||
categories.items[1].permalink,
|
||||
"http://a-website.com/categories/programming-tutorials/"
|
||||
);
|
||||
assert_eq!(categories.items[1].pages.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_utf8_taxonomies() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.taxonomies = SlugifyStrategy::Safe;
|
||||
let mut library = Library::new(2, 0, true);
|
||||
|
||||
let french_taxo =
|
||||
vec![TaxonomyConfig { name: "catégories".to_string(), ..TaxonomyConfig::default() }];
|
||||
let lang_options = config::LanguageOptions {
|
||||
taxonomies: french_taxo,
|
||||
..config::LanguageOptions::default()
|
||||
};
|
||||
config.languages.insert("fr".to_owned(), lang_options);
|
||||
|
||||
let mut page = Page { lang: "fr".to_string(), ..Default::default() };
|
||||
let mut taxo_page = HashMap::new();
|
||||
taxo_page.insert("catégories".to_string(), vec!["Écologie".to_string()]);
|
||||
page.meta.taxonomies = taxo_page;
|
||||
library.insert_page(page);
|
||||
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
let categories = &taxonomies[0];
|
||||
|
||||
assert_eq!(categories.items.len(), 1);
|
||||
assert_eq!(categories.items[0].name, "Écologie");
|
||||
assert_eq!(categories.items[0].permalink, "http://a-website.com/fr/catégories/Écologie/");
|
||||
assert_eq!(categories.items[0].pages.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_slugified_taxonomies_in_multiple_languages() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.taxonomies = SlugifyStrategy::On;
|
||||
let mut library = Library::new(2, 0, true);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
let french_taxo = vec![
|
||||
TaxonomyConfig { name: "auteurs".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
let lang_options = config::LanguageOptions {
|
||||
taxonomies: french_taxo,
|
||||
..config::LanguageOptions::default()
|
||||
};
|
||||
config.languages.insert("fr".to_owned(), lang_options);
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
page1.lang = config.default_language.clone();
|
||||
library.insert_page(page1);
|
||||
|
||||
let mut page2 = Page::default();
|
||||
let mut taxo_page2 = HashMap::new();
|
||||
taxo_page2.insert("tags".to_string(), vec!["rust".to_string()]);
|
||||
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]);
|
||||
page2.meta.taxonomies = taxo_page2;
|
||||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page { lang: "fr".to_string(), ..Default::default() };
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("tags".to_string(), vec!["rust".to_string()]);
|
||||
taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]);
|
||||
page3.meta.taxonomies = taxo_page3;
|
||||
library.insert_page(page3);
|
||||
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
let (tags, categories, authors) = {
|
||||
let mut t = None;
|
||||
let mut c = None;
|
||||
let mut a = None;
|
||||
for x in taxonomies {
|
||||
match x.kind.name.as_ref() {
|
||||
"tags" => {
|
||||
if x.lang == "en" {
|
||||
t = Some(x)
|
||||
}
|
||||
}
|
||||
"categories" => c = Some(x),
|
||||
"auteurs" => a = Some(x),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
(t.unwrap(), c.unwrap(), a.unwrap())
|
||||
};
|
||||
|
||||
assert_eq!(tags.items.len(), 2);
|
||||
assert_eq!(categories.items.len(), 2);
|
||||
assert_eq!(authors.items.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[0].name, "db");
|
||||
assert_eq!(tags.items[0].slug, "db");
|
||||
assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/");
|
||||
assert_eq!(tags.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[1].name, "rust");
|
||||
assert_eq!(tags.items[1].slug, "rust");
|
||||
assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/rust/");
|
||||
assert_eq!(tags.items[1].pages.len(), 2);
|
||||
|
||||
assert_eq!(authors.items[0].name, "Vincent Prouillet");
|
||||
assert_eq!(authors.items[0].slug, "vincent-prouillet");
|
||||
assert_eq!(
|
||||
authors.items[0].permalink,
|
||||
"http://a-website.com/fr/auteurs/vincent-prouillet/"
|
||||
);
|
||||
assert_eq!(authors.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(categories.items[0].name, "Other");
|
||||
assert_eq!(categories.items[0].slug, "other");
|
||||
assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/");
|
||||
assert_eq!(categories.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(categories.items[1].name, "Programming tutorials");
|
||||
assert_eq!(categories.items[1].slug, "programming-tutorials");
|
||||
assert_eq!(
|
||||
categories.items[1].permalink,
|
||||
"http://a-website.com/categories/programming-tutorials/"
|
||||
);
|
||||
assert_eq!(categories.items[1].pages.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn taxonomies_are_groupted_by_permalink() {
|
||||
let mut config = Config::default();
|
||||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert(
|
||||
"test-taxonomy".to_string(),
|
||||
vec!["term one".to_string(), "term two".to_string()],
|
||||
);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
page1.lang = config.default_language.clone();
|
||||
library.insert_page(page1);
|
||||
|
||||
let mut page2 = Page::default();
|
||||
let mut taxo_page2 = HashMap::new();
|
||||
taxo_page2.insert(
|
||||
"test taxonomy".to_string(),
|
||||
vec!["Term Two".to_string(), "term-one".to_string()],
|
||||
);
|
||||
page2.meta.taxonomies = taxo_page2;
|
||||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page::default();
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("test-taxonomy ".to_string(), vec!["term one ".to_string()]);
|
||||
page3.meta.taxonomies = taxo_page3;
|
||||
page3.lang = config.default_language.clone();
|
||||
library.insert_page(page3);
|
||||
|
||||
let mut page4 = Page::default();
|
||||
let mut taxo_page4 = HashMap::new();
|
||||
taxo_page4.insert("Test-Taxonomy ".to_string(), vec!["Term-Two ".to_string()]);
|
||||
page4.meta.taxonomies = taxo_page4;
|
||||
page4.lang = config.default_language.clone();
|
||||
library.insert_page(page4);
|
||||
|
||||
// taxonomies should all be the same
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
assert_eq!(taxonomies.len(), 1);
|
||||
|
||||
let tax = &taxonomies[0];
|
||||
|
||||
// terms should be "term one", "term two"
|
||||
assert_eq!(tax.items.len(), 2);
|
||||
|
||||
let term1 = &tax.items[0];
|
||||
let term2 = &tax.items[1];
|
||||
|
||||
assert_eq!(term1.name, "term one");
|
||||
assert_eq!(term1.slug, "term-one");
|
||||
assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/term-one/");
|
||||
assert_eq!(term1.pages.len(), 3);
|
||||
|
||||
assert_eq!(term2.name, "Term Two");
|
||||
assert_eq!(term2.slug, "term-two");
|
||||
assert_eq!(term2.permalink, "http://a-website.com/test-taxonomy/term-two/");
|
||||
assert_eq!(term2.pages.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn taxonomies_with_unic_are_grouped_with_default_slugify_strategy() {
|
||||
let mut config = Config::default();
|
||||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("test-taxonomy".to_string(), vec!["Ecole".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
page1.lang = config.default_language.clone();
|
||||
library.insert_page(page1);
|
||||
|
||||
let mut page2 = Page::default();
|
||||
let mut taxo_page2 = HashMap::new();
|
||||
taxo_page2.insert("test taxonomy".to_string(), vec!["École".to_string()]);
|
||||
page2.meta.taxonomies = taxo_page2;
|
||||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page::default();
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("test-taxonomy ".to_string(), vec!["ecole".to_string()]);
|
||||
page3.meta.taxonomies = taxo_page3;
|
||||
page3.lang = config.default_language.clone();
|
||||
library.insert_page(page3);
|
||||
|
||||
let mut page4 = Page::default();
|
||||
let mut taxo_page4 = HashMap::new();
|
||||
taxo_page4.insert("Test-Taxonomy ".to_string(), vec!["école".to_string()]);
|
||||
page4.meta.taxonomies = taxo_page4;
|
||||
page4.lang = config.default_language.clone();
|
||||
library.insert_page(page4);
|
||||
|
||||
// taxonomies should all be the same
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
assert_eq!(taxonomies.len(), 1);
|
||||
|
||||
let tax = &taxonomies[0];
|
||||
|
||||
// under the default slugify stratagy all of the provided terms should be the same
|
||||
assert_eq!(tax.items.len(), 1);
|
||||
|
||||
let term1 = &tax.items[0];
|
||||
|
||||
assert_eq!(term1.name, "Ecole");
|
||||
assert_eq!(term1.slug, "ecole");
|
||||
assert_eq!(term1.permalink, "http://a-website.com/test-taxonomy/ecole/");
|
||||
assert_eq!(term1.pages.len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn taxonomies_with_unic_are_not_grouped_with_safe_slugify_strategy() {
|
||||
let mut config = Config::default();
|
||||
config.slugify = Slugify {
|
||||
paths: SlugifyStrategy::Safe,
|
||||
taxonomies: SlugifyStrategy::Safe,
|
||||
anchors: SlugifyStrategy::Safe,
|
||||
};
|
||||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("test-taxonomy".to_string(), vec!["Ecole".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
page1.lang = config.default_language.clone();
|
||||
library.insert_page(page1);
|
||||
|
||||
let mut page2 = Page::default();
|
||||
let mut taxo_page2 = HashMap::new();
|
||||
taxo_page2.insert("test-taxonomy".to_string(), vec!["École".to_string()]);
|
||||
page2.meta.taxonomies = taxo_page2;
|
||||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page::default();
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("test-taxonomy".to_string(), vec!["ecole".to_string()]);
|
||||
page3.meta.taxonomies = taxo_page3;
|
||||
page3.lang = config.default_language.clone();
|
||||
library.insert_page(page3);
|
||||
|
||||
let mut page4 = Page::default();
|
||||
let mut taxo_page4 = HashMap::new();
|
||||
taxo_page4.insert("test-taxonomy".to_string(), vec!["école".to_string()]);
|
||||
page4.meta.taxonomies = taxo_page4;
|
||||
page4.lang = config.default_language.clone();
|
||||
library.insert_page(page4);
|
||||
|
||||
// taxonomies should all be the same
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
let tax = &taxonomies[0];
|
||||
|
||||
// if names are different permalinks should also be different so
|
||||
// the items are still accessible
|
||||
for term1 in tax.items.iter() {
|
||||
for term2 in tax.items.iter() {
|
||||
assert!(term1.name == term2.name || term1.permalink != term2.permalink);
|
||||
}
|
||||
}
|
||||
|
||||
// under the safe slugify strategy all terms should be distinct
|
||||
assert_eq!(tax.items.len(), 4);
|
||||
}
|
||||
}
|
54
components/libs/Cargo.toml
Normal file
@ -0,0 +1,54 @@
|
||||
[package]
|
||||
name = "libs"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
ahash = "0.7.6"
|
||||
ammonia = "3"
|
||||
atty = "0.2.11"
|
||||
base64 = "0.13"
|
||||
csv = "1"
|
||||
elasticlunr-rs = { version = "3.0.0", features = ["da", "no", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] }
|
||||
filetime = "0.2"
|
||||
gh-emoji = "1"
|
||||
glob = "0.3"
|
||||
globset = "0.4"
|
||||
image = "0.24"
|
||||
lexical-sort = "0.3"
|
||||
minify-html = "0.9"
|
||||
nom-bibtex = "0.3"
|
||||
num-format = "0.4"
|
||||
once_cell = "1"
|
||||
percent-encoding = "2"
|
||||
pulldown-cmark = { version = "0.9", default-features = false, features = ["simd"] }
|
||||
quickxml_to_serde = "0.5"
|
||||
rayon = "1"
|
||||
regex = "1"
|
||||
relative-path = "1"
|
||||
reqwest = { version = "0.11", default-features = false, features = ["blocking"] }
|
||||
sass-rs = "0.2"
|
||||
serde_json = "1"
|
||||
serde_yaml = "0.8"
|
||||
sha2 = "0.10"
|
||||
slug = "0.1"
|
||||
svg_metadata = "0.4"
|
||||
syntect = "5"
|
||||
tera = { version = "1", features = ["preserve_order"] }
|
||||
termcolor = "1.0.4"
|
||||
time = "0.3"
|
||||
toml = "0.5"
|
||||
unic-langid = "0.9"
|
||||
unicode-segmentation = "1.2"
|
||||
url = "2"
|
||||
walkdir = "2"
|
||||
webp = "0.2"
|
||||
|
||||
|
||||
[features]
|
||||
# TODO: fix me, it doesn't pick up the reqwuest feature if not set as default
|
||||
default = ["rust-tls"]
|
||||
rust-tls = ["reqwest/rustls-tls"]
|
||||
native-tls = ["reqwest/default-tls"]
|
||||
indexing-zh = ["elasticlunr-rs/zh"]
|
||||
indexing-ja = ["elasticlunr-rs/ja"]
|
45
components/libs/src/lib.rs
Normal file
@ -0,0 +1,45 @@
|
||||
//! This component is only there to re-export libraries used in the rest of the sub-crates
|
||||
//! without having to add them to each `Cargo.toml`. This way, updating a library version only requires
|
||||
//! modifying one crate instead of eg updating Tera in 5 sub crates using it. It also means if you want
|
||||
//! to define features, it is done in a single place.
|
||||
//! It doesn't work for crates exporting macros like `serde` or dev deps but that's ok for most.
|
||||
|
||||
pub use ahash;
|
||||
pub use ammonia;
|
||||
pub use atty;
|
||||
pub use base64;
|
||||
pub use csv;
|
||||
pub use elasticlunr;
|
||||
pub use filetime;
|
||||
pub use gh_emoji;
|
||||
pub use glob;
|
||||
pub use globset;
|
||||
pub use image;
|
||||
pub use lexical_sort;
|
||||
pub use minify_html;
|
||||
pub use nom_bibtex;
|
||||
pub use num_format;
|
||||
pub use once_cell;
|
||||
pub use percent_encoding;
|
||||
pub use pulldown_cmark;
|
||||
pub use quickxml_to_serde;
|
||||
pub use rayon;
|
||||
pub use regex;
|
||||
pub use relative_path;
|
||||
pub use reqwest;
|
||||
pub use sass_rs;
|
||||
pub use serde_json;
|
||||
pub use serde_yaml;
|
||||
pub use sha2;
|
||||
pub use slug;
|
||||
pub use svg_metadata;
|
||||
pub use syntect;
|
||||
pub use tera;
|
||||
pub use termcolor;
|
||||
pub use time;
|
||||
pub use toml;
|
||||
pub use unic_langid;
|
||||
pub use unicode_segmentation;
|
||||
pub use url;
|
||||
pub use walkdir;
|
||||
pub use webp;
|
@ -1,24 +1,13 @@
|
||||
[package]
|
||||
name = "link_checker"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1"
|
||||
|
||||
config = { path = "../config" }
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
|
||||
[dependencies.reqwest]
|
||||
version = "0.11"
|
||||
default-features = false
|
||||
features = ["blocking"]
|
||||
libs = { path = "../libs" }
|
||||
|
||||
[dev-dependencies]
|
||||
mockito = "0.30"
|
||||
|
||||
[features]
|
||||
rust-tls = ["reqwest/rustls-tls"]
|
||||
native-tls = ["reqwest/default-tls"]
|
||||
mockito = "0.31"
|
||||
|
@ -1,13 +1,15 @@
|
||||
use lazy_static::lazy_static;
|
||||
use reqwest::header::{HeaderMap, ACCEPT};
|
||||
use reqwest::{blocking::Client, StatusCode};
|
||||
|
||||
use config::LinkChecker;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::result;
|
||||
use std::sync::{Arc, RwLock};
|
||||
use utils::links::has_anchor_id;
|
||||
|
||||
use libs::once_cell::sync::Lazy;
|
||||
use libs::reqwest::header::{HeaderMap, ACCEPT};
|
||||
use libs::reqwest::{blocking::Client, StatusCode};
|
||||
|
||||
use config::LinkChecker;
|
||||
use errors::anyhow;
|
||||
|
||||
use utils::anchors::has_anchor_id;
|
||||
|
||||
pub type Result = result::Result<StatusCode, String>;
|
||||
|
||||
@ -25,10 +27,9 @@ pub fn message(res: &Result) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
// Keep history of link checks so a rebuild doesn't have to check again
|
||||
static ref LINKS: Arc<RwLock<HashMap<String, Result>>> = Arc::new(RwLock::new(HashMap::new()));
|
||||
}
|
||||
// Keep history of link checks so a rebuild doesn't have to check again
|
||||
static LINKS: Lazy<Arc<RwLock<HashMap<String, Result>>>> =
|
||||
Lazy::new(|| Arc::new(RwLock::new(HashMap::new())));
|
||||
|
||||
pub fn check_url(url: &str, config: &LinkChecker) -> Result {
|
||||
{
|
||||
@ -42,6 +43,7 @@ pub fn check_url(url: &str, config: &LinkChecker) -> Result {
|
||||
headers.insert(ACCEPT, "text/html".parse().unwrap());
|
||||
headers.append(ACCEPT, "*/*".parse().unwrap());
|
||||
|
||||
// TODO: pass the client to the check_url, do not pass the config
|
||||
let client = Client::builder()
|
||||
.user_agent(concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")))
|
||||
.build()
|
||||
@ -106,11 +108,10 @@ fn check_page_for_anchor(url: &str, body: String) -> errors::Result<()> {
|
||||
let index = url.find('#').unwrap();
|
||||
let anchor = url.get(index + 1..).unwrap();
|
||||
|
||||
|
||||
if has_anchor_id(&body, &anchor){
|
||||
if has_anchor_id(&body, anchor) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(errors::Error::from(format!("Anchor `#{}` not found on page", anchor)))
|
||||
Err(anyhow!("Anchor `#{}` not found on page", anchor))
|
||||
}
|
||||
}
|
||||
|
||||
@ -119,8 +120,8 @@ mod tests {
|
||||
use super::{
|
||||
check_page_for_anchor, check_url, has_anchor, is_valid, message, LinkChecker, LINKS,
|
||||
};
|
||||
use libs::reqwest::StatusCode;
|
||||
use mockito::mock;
|
||||
use reqwest::StatusCode;
|
||||
|
||||
// NOTE: HTTP mock paths below are randomly generated to avoid name
|
||||
// collisions. Mocks with the same path can sometimes bleed between tests
|
||||
|
19
components/markdown/Cargo.toml
Normal file
@ -0,0 +1,19 @@
|
||||
[package]
|
||||
name = "markdown"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
include = ["src/**/*"]
|
||||
|
||||
[dependencies]
|
||||
pest = "2"
|
||||
pest_derive = "2"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
config = { path = "../config" }
|
||||
console = { path = "../console" }
|
||||
libs = { path = "../libs" }
|
||||
|
||||
[dev-dependencies]
|
||||
templates = { path = "../templates" }
|
||||
insta = "1.12.0"
|
@ -4,9 +4,9 @@ extern crate test;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use config::Config;
|
||||
use front_matter::InsertAnchor;
|
||||
use rendering::{render_content, RenderContext};
|
||||
use tera::Tera;
|
||||
use libs::tera::Tera;
|
||||
use markdown::{render_content, RenderContext};
|
||||
use utils::types::InsertAnchor;
|
||||
|
||||
static CONTENT: &str = r#"
|
||||
# Modus cognitius profanam ne duae virtutis mundi
|
||||
@ -85,10 +85,10 @@ fn bench_render_content_with_highlighting(b: &mut test::Bencher) {
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let current_page_permalink = "";
|
||||
let context = RenderContext::new(
|
||||
let mut context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
&config.default_language,
|
||||
@ -96,6 +96,8 @@ fn bench_render_content_with_highlighting(b: &mut test::Bencher) {
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let shortcode_def = utils::templates::get_shortcodes(&tera);
|
||||
context.set_shortcode_definitions(&shortcode_def);
|
||||
b.iter(|| render_content(CONTENT, &context).unwrap());
|
||||
}
|
||||
|
||||
@ -104,10 +106,10 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) {
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = false;
|
||||
let current_page_permalink = "";
|
||||
let context = RenderContext::new(
|
||||
let mut context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
&config.default_language,
|
||||
@ -115,13 +117,16 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) {
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let shortcode_def = utils::templates::get_shortcodes(&tera);
|
||||
context.set_shortcode_definitions(&shortcode_def);
|
||||
b.iter(|| render_content(CONTENT, &context).unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_render_content_no_shortcode(b: &mut test::Bencher) {
|
||||
let tera = Tera::default();
|
||||
let content2 = CONTENT.replace(r#"{{ youtube(id="my_youtube_id") }}"#, "");
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = false;
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let current_page_permalink = "";
|
||||
@ -141,7 +146,7 @@ fn bench_render_content_no_shortcode(b: &mut test::Bencher) {
|
||||
fn bench_render_content_with_emoji(b: &mut test::Bencher) {
|
||||
let tera = Tera::default();
|
||||
let content2 = CONTENT.replace(r#"{{ youtube(id="my_youtube_id") }}"#, "");
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = false;
|
||||
config.markdown.render_emoji = true;
|
||||
let permalinks_ctx = HashMap::new();
|
@ -1,13 +1,13 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use config::highlighting::{SyntaxAndTheme, CLASS_STYLE};
|
||||
use syntect::easy::HighlightLines;
|
||||
use syntect::highlighting::{Color, Theme};
|
||||
use syntect::html::{
|
||||
use libs::syntect::easy::HighlightLines;
|
||||
use libs::syntect::highlighting::{Color, Theme};
|
||||
use libs::syntect::html::{
|
||||
line_tokens_to_classed_spans, styled_line_to_highlighted_html, ClassStyle, IncludeBackground,
|
||||
};
|
||||
use syntect::parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet};
|
||||
use tera::escape_html;
|
||||
use libs::syntect::parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet};
|
||||
use libs::tera::escape_html;
|
||||
|
||||
/// Not public, but from syntect::html
|
||||
fn write_css_color(s: &mut String, c: Color) {
|
||||
@ -37,13 +37,15 @@ impl<'config> ClassHighlighter<'config> {
|
||||
/// also use of the `load_defaults_newlines` version of the syntaxes.
|
||||
pub fn highlight_line(&mut self, line: &str) -> String {
|
||||
debug_assert!(line.ends_with('\n'));
|
||||
let parsed_line = self.parse_state.parse_line(line, self.syntax_set);
|
||||
let parsed_line =
|
||||
self.parse_state.parse_line(line, self.syntax_set).expect("failed to parse line");
|
||||
let (formatted_line, delta) = line_tokens_to_classed_spans(
|
||||
line,
|
||||
parsed_line.as_slice(),
|
||||
CLASS_STYLE,
|
||||
&mut self.scope_stack,
|
||||
);
|
||||
)
|
||||
.expect("line_tokens_to_classed_spans should not fail");
|
||||
self.open_spans += delta;
|
||||
formatted_line
|
||||
}
|
||||
@ -81,12 +83,14 @@ impl<'config> InlineHighlighter<'config> {
|
||||
}
|
||||
|
||||
pub fn highlight_line(&mut self, line: &str) -> String {
|
||||
let regions = self.h.highlight(line, self.syntax_set);
|
||||
let regions =
|
||||
self.h.highlight_line(line, self.syntax_set).expect("failed to highlight line");
|
||||
// TODO: add a param like `IncludeBackground` for `IncludeForeground` in syntect
|
||||
let highlighted = styled_line_to_highlighted_html(
|
||||
®ions,
|
||||
IncludeBackground::IfDifferent(self.bg_color),
|
||||
);
|
||||
)
|
||||
.expect("styled_line_to_highlighted_html should not error");
|
||||
// Spans don't get nested even if the scopes generated by the syntax highlighting do,
|
||||
// so this is safe even when some internal scope happens to have the same color
|
||||
// as the default foreground color. Also note that `"`s in the original source
|
||||
@ -192,7 +196,7 @@ mod tests {
|
||||
use super::*;
|
||||
use config::highlighting::resolve_syntax_and_theme;
|
||||
use config::Config;
|
||||
use syntect::util::LinesWithEndings;
|
||||
use libs::syntect::util::LinesWithEndings;
|
||||
|
||||
#[test]
|
||||
fn can_highlight_with_classes() {
|
@ -3,7 +3,7 @@ mod highlight;
|
||||
|
||||
use std::ops::RangeInclusive;
|
||||
|
||||
use syntect::util::LinesWithEndings;
|
||||
use libs::syntect::util::LinesWithEndings;
|
||||
|
||||
use crate::codeblock::highlight::SyntaxHighlighter;
|
||||
use config::highlighting::{resolve_syntax_and_theme, HighlightSource};
|
||||
@ -162,6 +162,10 @@ impl<'config> CodeBlock<'config> {
|
||||
|
||||
let highlighted_line = self.highlighter.highlight_line(line);
|
||||
maybe_mark(&mut buffer, &highlighted_line);
|
||||
|
||||
if self.line_numbers {
|
||||
buffer.push_str("</td></tr>");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(rest) = self.highlighter.finalize() {
|
||||
@ -169,7 +173,7 @@ impl<'config> CodeBlock<'config> {
|
||||
}
|
||||
|
||||
if self.line_numbers {
|
||||
buffer.push_str("</tr></tbody></table>");
|
||||
buffer.push_str("</tbody></table>");
|
||||
}
|
||||
|
||||
buffer
|
@ -2,9 +2,9 @@ use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use config::Config;
|
||||
use front_matter::InsertAnchor;
|
||||
use tera::{Context, Tera};
|
||||
use libs::tera::{Context, Tera};
|
||||
use utils::templates::ShortcodeDefinition;
|
||||
use utils::types::InsertAnchor;
|
||||
|
||||
/// All the information from the zola site that is needed to render HTML from markdown
|
||||
#[derive(Debug)]
|
@ -2,16 +2,14 @@ mod codeblock;
|
||||
mod context;
|
||||
mod markdown;
|
||||
mod shortcode;
|
||||
mod table_of_contents;
|
||||
|
||||
use shortcode::{extract_shortcodes, insert_md_shortcodes};
|
||||
|
||||
use errors::Result;
|
||||
|
||||
use crate::markdown::markdown_to_html;
|
||||
pub use crate::markdown::Rendered;
|
||||
pub use context::RenderContext;
|
||||
use markdown::markdown_to_html;
|
||||
pub use markdown::Rendered;
|
||||
pub use table_of_contents::Heading;
|
||||
|
||||
pub fn render_content(content: &str, context: &RenderContext) -> Result<markdown::Rendered> {
|
||||
// avoid parsing the content if needed
|
@ -1,13 +1,19 @@
|
||||
use lazy_static::lazy_static;
|
||||
use pulldown_cmark as cmark;
|
||||
use std::fmt::Write;
|
||||
|
||||
use errors::bail;
|
||||
use libs::gh_emoji::Replacer as EmojiReplacer;
|
||||
use libs::once_cell::sync::Lazy;
|
||||
use libs::pulldown_cmark as cmark;
|
||||
use libs::tera;
|
||||
|
||||
use crate::context::RenderContext;
|
||||
use crate::table_of_contents::{make_table_of_contents, Heading};
|
||||
use errors::{Error, Result};
|
||||
use front_matter::InsertAnchor;
|
||||
use errors::{Context, Error, Result};
|
||||
use libs::pulldown_cmark::escape::escape_html;
|
||||
use libs::regex::Regex;
|
||||
use utils::site::resolve_internal_link;
|
||||
use utils::slugs::slugify_anchors;
|
||||
use utils::vec::InsertMany;
|
||||
use utils::table_of_contents::{make_table_of_contents, Heading};
|
||||
use utils::types::InsertAnchor;
|
||||
|
||||
use self::cmark::{Event, LinkType, Options, Parser, Tag};
|
||||
use crate::codeblock::{CodeBlock, FenceSettings};
|
||||
@ -15,6 +21,43 @@ use crate::shortcode::{Shortcode, SHORTCODE_PLACEHOLDER};
|
||||
|
||||
const CONTINUE_READING: &str = "<span id=\"continue-reading\"></span>";
|
||||
const ANCHOR_LINK_TEMPLATE: &str = "anchor-link.html";
|
||||
static EMOJI_REPLACER: Lazy<EmojiReplacer> = Lazy::new(EmojiReplacer::new);
|
||||
|
||||
/// Although there exists [a list of registered URI schemes][uri-schemes], a link may use arbitrary,
|
||||
/// private schemes. This regex checks if the given string starts with something that just looks
|
||||
/// like a scheme, i.e., a case-insensitive identifier followed by a colon.
|
||||
///
|
||||
/// [uri-schemes]: https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml
|
||||
static STARTS_WITH_SCHEMA_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^[0-9A-Za-z\-]+:").unwrap());
|
||||
|
||||
/// Matches a <a>..</a> tag, getting the opening tag in a capture group.
|
||||
/// Used only with AnchorInsert::Heading to grab it from the template
|
||||
static A_HTML_TAG: Lazy<Regex> = Lazy::new(|| Regex::new(r"(<\s*a[^>]*>).*?<\s*/\s*a>").unwrap());
|
||||
|
||||
/// Efficiently insert multiple element in their specified index.
|
||||
/// The elements should sorted in ascending order by their index.
|
||||
///
|
||||
/// This is done in O(n) time.
|
||||
fn insert_many<T>(input: &mut Vec<T>, elem_to_insert: Vec<(usize, T)>) {
|
||||
let mut inserted = vec![];
|
||||
let mut last_idx = 0;
|
||||
|
||||
for (idx, elem) in elem_to_insert.into_iter() {
|
||||
let head_len = idx - last_idx;
|
||||
inserted.extend(input.splice(0..head_len, std::iter::empty()));
|
||||
inserted.push(elem);
|
||||
last_idx = idx;
|
||||
}
|
||||
let len = input.len();
|
||||
inserted.extend(input.drain(0..len));
|
||||
|
||||
*input = inserted;
|
||||
}
|
||||
|
||||
/// Colocated asset links refers to the files in the same directory.
|
||||
fn is_colocated_asset_link(link: &str) -> bool {
|
||||
!link.starts_with('/') && !link.starts_with('#') && !STARTS_WITH_SCHEMA_RE.is_match(link)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Rendered {
|
||||
@ -34,11 +77,39 @@ struct HeadingRef {
|
||||
end_idx: usize,
|
||||
level: u32,
|
||||
id: Option<String>,
|
||||
classes: Vec<String>,
|
||||
}
|
||||
|
||||
impl HeadingRef {
|
||||
fn new(start: usize, level: u32) -> HeadingRef {
|
||||
HeadingRef { start_idx: start, end_idx: 0, level, id: None }
|
||||
fn new(start: usize, level: u32, anchor: Option<String>, classes: &[String]) -> HeadingRef {
|
||||
HeadingRef { start_idx: start, end_idx: 0, level, id: anchor, classes: classes.to_vec() }
|
||||
}
|
||||
|
||||
fn to_html(&self, id: &str) -> String {
|
||||
let mut buffer = String::with_capacity(100);
|
||||
buffer.write_str("<h").unwrap();
|
||||
buffer.write_str(&format!("{}", self.level)).unwrap();
|
||||
|
||||
buffer.write_str(" id=\"").unwrap();
|
||||
escape_html(&mut buffer, id).unwrap();
|
||||
buffer.write_str("\"").unwrap();
|
||||
|
||||
if !self.classes.is_empty() {
|
||||
buffer.write_str(" class=\"").unwrap();
|
||||
let num_classes = self.classes.len();
|
||||
|
||||
for (i, class) in self.classes.iter().enumerate() {
|
||||
escape_html(&mut buffer, class).unwrap();
|
||||
if i < num_classes - 1 {
|
||||
buffer.write_str(" ").unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
buffer.write_str("\"").unwrap();
|
||||
}
|
||||
|
||||
buffer.write_str(">").unwrap();
|
||||
buffer
|
||||
}
|
||||
}
|
||||
|
||||
@ -86,24 +157,35 @@ fn fix_link(
|
||||
resolved.permalink
|
||||
}
|
||||
Err(_) => {
|
||||
return Err(format!("Relative link {} not found.", link).into());
|
||||
let msg = format!(
|
||||
"Broken relative link `{}` in {}",
|
||||
link,
|
||||
context.current_page_path.unwrap_or("unknown"),
|
||||
);
|
||||
match context.config.link_checker.internal_level {
|
||||
config::LinkCheckerLevel::Error => bail!(msg),
|
||||
config::LinkCheckerLevel::Warn => {
|
||||
console::warn(&msg);
|
||||
link.to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if is_external_link(link) {
|
||||
external_links.push(link.to_owned());
|
||||
link.to_owned()
|
||||
} else if link.starts_with("#") {
|
||||
// local anchor without the internal zola path
|
||||
if let Some(current_path) = context.current_page_path {
|
||||
internal_links.push((current_path.to_owned(), Some(link[1..].to_owned())));
|
||||
format!("{}{}", context.current_page_permalink, &link)
|
||||
} else {
|
||||
link.to_string()
|
||||
}
|
||||
} else if is_external_link(link) {
|
||||
external_links.push(link.to_owned());
|
||||
link.to_owned()
|
||||
} else if link == "#" {
|
||||
link.to_string()
|
||||
} else if let Some(stripped_link) = link.strip_prefix('#') {
|
||||
// local anchor without the internal zola path
|
||||
if let Some(current_path) = context.current_page_path {
|
||||
internal_links.push((current_path.to_owned(), Some(stripped_link.to_owned())));
|
||||
format!("{}{}", context.current_page_permalink, &link)
|
||||
} else {
|
||||
link.to_string()
|
||||
}
|
||||
} else {
|
||||
link.to_string()
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
@ -128,10 +210,15 @@ fn get_heading_refs(events: &[Event]) -> Vec<HeadingRef> {
|
||||
|
||||
for (i, event) in events.iter().enumerate() {
|
||||
match event {
|
||||
Event::Start(Tag::Heading(level)) => {
|
||||
heading_refs.push(HeadingRef::new(i, *level));
|
||||
Event::Start(Tag::Heading(level, anchor, classes)) => {
|
||||
heading_refs.push(HeadingRef::new(
|
||||
i,
|
||||
*level as u32,
|
||||
anchor.map(|a| a.to_owned()),
|
||||
&classes.iter().map(|x| x.to_string()).collect::<Vec<_>>(),
|
||||
));
|
||||
}
|
||||
Event::End(Tag::Heading(_)) => {
|
||||
Event::End(Tag::Heading(_, _, _)) => {
|
||||
heading_refs.last_mut().expect("Heading end before start?").end_idx = i;
|
||||
}
|
||||
_ => (),
|
||||
@ -146,10 +233,6 @@ pub fn markdown_to_html(
|
||||
context: &RenderContext,
|
||||
html_shortcodes: Vec<Shortcode>,
|
||||
) -> Result<Rendered> {
|
||||
lazy_static! {
|
||||
static ref EMOJI_REPLACER: gh_emoji::Replacer = gh_emoji::Replacer::new();
|
||||
}
|
||||
|
||||
let path = context
|
||||
.tera_context
|
||||
.get("page")
|
||||
@ -162,7 +245,6 @@ pub fn markdown_to_html(
|
||||
|
||||
let mut code_block: Option<CodeBlock> = None;
|
||||
|
||||
let mut inserted_anchors: Vec<String> = vec![];
|
||||
let mut headings: Vec<Heading> = vec![];
|
||||
let mut internal_links = Vec::new();
|
||||
let mut external_links = Vec::new();
|
||||
@ -175,6 +257,7 @@ pub fn markdown_to_html(
|
||||
opts.insert(Options::ENABLE_FOOTNOTES);
|
||||
opts.insert(Options::ENABLE_STRIKETHROUGH);
|
||||
opts.insert(Options::ENABLE_TASKLISTS);
|
||||
opts.insert(Options::ENABLE_HEADING_ATTRIBUTES);
|
||||
|
||||
if context.config.markdown.smart_punctuation {
|
||||
opts.insert(Options::ENABLE_SMART_PUNCTUATION);
|
||||
@ -238,13 +321,12 @@ pub fn markdown_to_html(
|
||||
};
|
||||
}
|
||||
|
||||
let mut accumulated_block = String::new();
|
||||
for (event, mut range) in Parser::new_ext(content, opts).into_offset_iter() {
|
||||
match event {
|
||||
Event::Text(text) => {
|
||||
if let Some(ref mut code_block) = code_block {
|
||||
let html;
|
||||
if let Some(ref mut _code_block) = code_block {
|
||||
if contains_shortcode(text.as_ref()) {
|
||||
let mut accumulated_block = String::new();
|
||||
// mark the start of the code block events
|
||||
let stack_start = events.len();
|
||||
render_shortcodes!(true, text, range);
|
||||
@ -262,13 +344,12 @@ pub fn markdown_to_html(
|
||||
}
|
||||
}
|
||||
}
|
||||
html = code_block.highlight(&accumulated_block);
|
||||
|
||||
// remove all the original events from shortcode rendering
|
||||
events.truncate(stack_start);
|
||||
} else {
|
||||
html = code_block.highlight(&text);
|
||||
accumulated_block += &text;
|
||||
}
|
||||
events.push(Event::Html(html.into()));
|
||||
} else {
|
||||
let text = if context.config.markdown.render_emoji {
|
||||
EMOJI_REPLACER.replace_all(&text).to_string().into()
|
||||
@ -294,10 +375,24 @@ pub fn markdown_to_html(
|
||||
events.push(Event::Html(begin.into()));
|
||||
}
|
||||
Event::End(Tag::CodeBlock(_)) => {
|
||||
if let Some(ref mut code_block) = code_block {
|
||||
let html = code_block.highlight(&accumulated_block);
|
||||
events.push(Event::Html(html.into()));
|
||||
accumulated_block.clear();
|
||||
}
|
||||
|
||||
// reset highlight and close the code block
|
||||
code_block = None;
|
||||
events.push(Event::Html("</code></pre>\n".into()));
|
||||
}
|
||||
Event::Start(Tag::Image(link_type, src, title)) => {
|
||||
if is_colocated_asset_link(&src) {
|
||||
let link = format!("{}{}", context.current_page_permalink, &*src);
|
||||
events.push(Event::Start(Tag::Image(link_type, link.into(), title)));
|
||||
} else {
|
||||
events.push(Event::Start(Tag::Image(link_type, src, title)));
|
||||
}
|
||||
}
|
||||
Event::Start(Tag::Link(link_type, link, title)) if link.is_empty() => {
|
||||
error = Some(Error::msg("There is a link that is missing a URL"));
|
||||
events.push(Event::Start(Tag::Link(link_type, "#".into(), title)));
|
||||
@ -390,45 +485,34 @@ pub fn markdown_to_html(
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut heading_refs = get_heading_refs(&events);
|
||||
let heading_refs = get_heading_refs(&events);
|
||||
|
||||
let mut anchors_to_insert = vec![];
|
||||
|
||||
// First heading pass: look for a manually-specified IDs, e.g. `# Heading text {#hash}`
|
||||
// (This is a separate first pass so that auto IDs can avoid collisions with manual IDs.)
|
||||
for heading_ref in heading_refs.iter_mut() {
|
||||
let end_idx = heading_ref.end_idx;
|
||||
if let Event::Text(ref mut text) = events[end_idx - 1] {
|
||||
if text.as_bytes().last() == Some(&b'}') {
|
||||
if let Some(mut i) = text.find("{#") {
|
||||
let id = text[i + 2..text.len() - 1].to_owned();
|
||||
inserted_anchors.push(id.clone());
|
||||
while i > 0 && text.as_bytes()[i - 1] == b' ' {
|
||||
i -= 1;
|
||||
}
|
||||
heading_ref.id = Some(id);
|
||||
*text = text[..i].to_owned().into();
|
||||
}
|
||||
}
|
||||
let mut inserted_anchors = vec![];
|
||||
for heading in &heading_refs {
|
||||
if let Some(s) = &heading.id {
|
||||
inserted_anchors.push(s.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
// Second heading pass: auto-generate remaining IDs, and emit HTML
|
||||
for heading_ref in heading_refs {
|
||||
for mut heading_ref in heading_refs {
|
||||
let start_idx = heading_ref.start_idx;
|
||||
let end_idx = heading_ref.end_idx;
|
||||
let title = get_text(&events[start_idx + 1..end_idx]);
|
||||
let id = heading_ref.id.unwrap_or_else(|| {
|
||||
find_anchor(
|
||||
|
||||
if heading_ref.id.is_none() {
|
||||
heading_ref.id = Some(find_anchor(
|
||||
&inserted_anchors,
|
||||
slugify_anchors(&title, context.config.slugify.anchors),
|
||||
0,
|
||||
)
|
||||
});
|
||||
inserted_anchors.push(id.clone());
|
||||
));
|
||||
}
|
||||
|
||||
// insert `id` to the tag
|
||||
let html = format!("<h{lvl} id=\"{id}\">", lvl = heading_ref.level, id = id);
|
||||
inserted_anchors.push(heading_ref.id.clone().unwrap());
|
||||
let id = inserted_anchors.last().unwrap();
|
||||
|
||||
let html = heading_ref.to_html(id);
|
||||
events[start_idx] = Event::Html(html.into());
|
||||
|
||||
// generate anchors and places to insert them
|
||||
@ -436,7 +520,8 @@ pub fn markdown_to_html(
|
||||
let anchor_idx = match context.insert_anchor {
|
||||
InsertAnchor::Left => start_idx + 1,
|
||||
InsertAnchor::Right => end_idx,
|
||||
InsertAnchor::None => 0, // Not important
|
||||
InsertAnchor::Heading => 0, // modified later to the correct value
|
||||
InsertAnchor::None => unreachable!(),
|
||||
};
|
||||
let mut c = tera::Context::new();
|
||||
c.insert("id", &id);
|
||||
@ -449,19 +534,32 @@ pub fn markdown_to_html(
|
||||
c,
|
||||
&None,
|
||||
)
|
||||
.map_err(|e| Error::chain("Failed to render anchor link template", e))?;
|
||||
anchors_to_insert.push((anchor_idx, Event::Html(anchor_link.into())));
|
||||
.context("Failed to render anchor link template")?;
|
||||
if context.insert_anchor != InsertAnchor::Heading {
|
||||
anchors_to_insert.push((anchor_idx, Event::Html(anchor_link.into())));
|
||||
} else {
|
||||
if let Some(captures) = A_HTML_TAG.captures(&anchor_link) {
|
||||
let opening_tag = captures.get(1).map_or("", |m| m.as_str()).to_string();
|
||||
anchors_to_insert.push((start_idx + 1, Event::Html(opening_tag.into())));
|
||||
anchors_to_insert.push((end_idx, Event::Html("</a>".into())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// record heading to make table of contents
|
||||
let permalink = format!("{}#{}", context.current_page_permalink, id);
|
||||
let h =
|
||||
Heading { level: heading_ref.level, id, permalink, title, children: Vec::new() };
|
||||
let h = Heading {
|
||||
level: heading_ref.level,
|
||||
id: id.to_owned(),
|
||||
permalink,
|
||||
title,
|
||||
children: Vec::new(),
|
||||
};
|
||||
headings.push(h);
|
||||
}
|
||||
|
||||
if context.insert_anchor != InsertAnchor::None {
|
||||
events.insert_many(anchors_to_insert);
|
||||
insert_many(&mut events, anchors_to_insert);
|
||||
}
|
||||
|
||||
cmark::html::push_html(&mut html, events.into_iter());
|
||||
@ -483,6 +581,17 @@ pub fn markdown_to_html(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
|
||||
fn insert_many_works() {
|
||||
let mut v = vec![1, 2, 3, 4, 5];
|
||||
insert_many(&mut v, vec![(0, 0), (2, -1), (5, 6)]);
|
||||
assert_eq!(v, &[0, 1, 2, -1, 3, 4, 5, 6]);
|
||||
|
||||
let mut v2 = vec![1, 2, 3, 4, 5];
|
||||
insert_many(&mut v2, vec![(0, 0), (2, -1)]);
|
||||
assert_eq!(v2, &[0, 1, 2, -1, 3, 4, 5]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_external_link() {
|
@ -1,6 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use errors::{Error, Result};
|
||||
use libs::tera;
|
||||
use utils::templates::{ShortcodeDefinition, ShortcodeFileType};
|
||||
|
||||
mod parser;
|
@ -1,11 +1,11 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use errors::{bail, Result};
|
||||
use errors::{bail, Context as ErrorContext, Result};
|
||||
use libs::tera::{to_value, Context, Map, Tera, Value};
|
||||
use pest::iterators::Pair;
|
||||
use pest::Parser;
|
||||
use pest_derive::Parser;
|
||||
use std::collections::HashMap;
|
||||
use tera::{to_value, Context, Map, Tera, Value};
|
||||
use utils::templates::ShortcodeFileType;
|
||||
|
||||
pub const SHORTCODE_PLACEHOLDER: &str = "@@ZOLA_SC_PLACEHOLDER@@";
|
||||
@ -43,7 +43,7 @@ impl Shortcode {
|
||||
new_context.extend(context.clone());
|
||||
|
||||
let res = utils::templates::render_template(&tpl_name, tera, new_context, &None)
|
||||
.map_err(|e| errors::Error::chain(format!("Failed to render {} shortcode", name), e))?
|
||||
.with_context(|| format!("Failed to render {} shortcode", name))?
|
||||
.replace("\r\n", "\n");
|
||||
|
||||
Ok(res)
|
||||
@ -481,10 +481,10 @@ mod tests {
|
||||
fn can_handle_multiple_shortcodes() {
|
||||
let (_, shortcodes) = parse_for_shortcodes(
|
||||
r#"
|
||||
{{ youtube(id="ub36ffWAqgQ") }}
|
||||
{{ youtube(id="ub36ffWAqgQ_hey_") }}
|
||||
{{ youtube(id="ub36ffWAqgQ", autoplay=true) }}
|
||||
{{ vimeo(id="210073083") }}
|
||||
{{ streamable(id="c0ic") }}
|
||||
{{ vimeo(id="210073083#hello", n_a_me="hello") }}
|
||||
{{ streamable(id="c0ic", n1=true) }}
|
||||
{{ gist(url="https://gist.github.com/Keats/32d26f699dcc13ebd41b") }}"#,
|
||||
)
|
||||
.unwrap();
|
343
components/markdown/tests/codeblocks.rs
Normal file
@ -0,0 +1,343 @@
|
||||
use config::Config;
|
||||
|
||||
mod common;
|
||||
|
||||
fn render_codeblock(content: &str, highlight_code: bool) -> String {
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = highlight_code;
|
||||
common::render_with_config(content, config).unwrap().body
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn does_nothing_with_highlighting_disabled() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```
|
||||
foo
|
||||
bar
|
||||
```
|
||||
"#,
|
||||
false,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_hide_lines() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hide_lines=2
|
||||
foo
|
||||
bar
|
||||
baz
|
||||
bat
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_single_line() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=2
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_line_range() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=2-3
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_all_lines() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=1-4
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_zero_start_same_as_one() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=0-3
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
let body2 = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=1-3
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
assert_eq!(body, body2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_at_end() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=3-4
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_out_of_bounds() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=3-4567898765
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_ranges_overlap() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=2-3 1-2
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_weird_fence_tokens() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=2-3, hl_lines = 1 - 2
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_mix_line_ranges() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=1 3-4
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_single_line_range() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=2-2
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_reversed_range() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```hl_lines=3-2
|
||||
foo
|
||||
bar
|
||||
bar
|
||||
baz
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_line_numbers() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```linenos
|
||||
foo
|
||||
bar
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_line_numbers_windows_eol() {
|
||||
let body = render_codeblock("```linenos\r\nfoo\r\nbar\r\n```\r\n", true);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_line_numbers_with_lineno_start() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```linenos, linenostart=40
|
||||
foo
|
||||
bar
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_line_numbers_with_highlight() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```linenos, hl_lines=2
|
||||
foo
|
||||
bar
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_render_shortcode_in_codeblock() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```html,linenos
|
||||
<div id="custom-attr">
|
||||
{{ out_put_id(id="dQw4w9WgXcQ") }}
|
||||
</div>
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_render_multiple_shortcodes_in_codeblock() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```linenos
|
||||
text1
|
||||
{{ out_put_id(id="first") }}
|
||||
text2
|
||||
{{ out_put_id(id="second") }}
|
||||
text3
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_render_completely_mixed_codeblock() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```html,linenos
|
||||
<a href="javascript:void(0);">{{/* before(texts="1") */}}</a>
|
||||
Normally people would not write something & like <> this:
|
||||
<div id="custom-attr">
|
||||
An inline {{ out_put_id(id="dQw4w9WgXcQ") }} shortcode
|
||||
</div>
|
||||
Plain text in-between
|
||||
{%/* quote(author="Vincent") */%}
|
||||
A quote
|
||||
{%/* end */%}
|
||||
{# A Tera comment, you should see it #}
|
||||
<!-- end text goes here -->
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_unknown_lang() {
|
||||
let body = render_codeblock(
|
||||
r#"
|
||||
```rustscript
|
||||
foo
|
||||
bar
|
||||
```
|
||||
"#,
|
||||
true,
|
||||
);
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
87
components/markdown/tests/common.rs
Normal file
@ -0,0 +1,87 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use libs::tera::Tera;
|
||||
|
||||
use config::Config;
|
||||
use errors::Result;
|
||||
use markdown::{render_content, RenderContext, Rendered};
|
||||
use templates::ZOLA_TERA;
|
||||
use utils::types::InsertAnchor;
|
||||
|
||||
fn configurable_render(
|
||||
content: &str,
|
||||
config: Config,
|
||||
insert_anchor: InsertAnchor,
|
||||
) -> Result<Rendered> {
|
||||
let mut tera = Tera::default();
|
||||
tera.extend(&ZOLA_TERA).unwrap();
|
||||
|
||||
// out_put_id looks like a markdown string
|
||||
tera.add_raw_template("shortcodes/out_put_id.html", "{{id}}").unwrap();
|
||||
tera.add_raw_template(
|
||||
"shortcodes/image.html",
|
||||
"<img src='https://placekitten.com/200/300' alt='{{alt}}'></img>",
|
||||
)
|
||||
.unwrap();
|
||||
tera.add_raw_template("shortcodes/split_lines.html", r#"{{ body | split(pat="\n") }}"#)
|
||||
.unwrap();
|
||||
tera.add_raw_template("shortcodes/ex1.html", "1").unwrap();
|
||||
tera.add_raw_template("shortcodes/ex2.html", "2").unwrap();
|
||||
tera.add_raw_template("shortcodes/ex3.html", "3").unwrap();
|
||||
tera.add_raw_template("shortcodes/with_tabs.html", "<div>\n\tHello World!\n </div>")
|
||||
.unwrap();
|
||||
tera.add_raw_template(
|
||||
"shortcodes/web_component.html",
|
||||
"<bc-authorizer-example><code>{{ body | safe}}</code></bc-authorizer-example>",
|
||||
)
|
||||
.unwrap();
|
||||
tera.add_raw_template("shortcodes/render_md.html", "<div>{{ body | markdown | safe}}</div>")
|
||||
.unwrap();
|
||||
tera.add_raw_template("shortcodes/a.html", "<p>a: {{ nth }}</p>").unwrap();
|
||||
tera.add_raw_template("shortcodes/b.html", "<p>b: {{ nth }}</p>").unwrap();
|
||||
tera.add_raw_template("shortcodes/quote.html", "<quote>{{body}}</quote>").unwrap();
|
||||
tera.add_raw_template("shortcodes/pre.html", "<pre>{{body}}</pre>").unwrap();
|
||||
tera.add_raw_template("shortcodes/four_spaces.html", " no highlight\n or there").unwrap();
|
||||
tera.add_raw_template("shortcodes/i18n.html", "{{lang}}").unwrap();
|
||||
tera.add_raw_template(
|
||||
"shortcodes/book.md",
|
||||
"",
|
||||
)
|
||||
.unwrap();
|
||||
tera.add_raw_template("shortcodes/md_passthrough.md", "{{body}}").unwrap();
|
||||
|
||||
let mut permalinks = HashMap::new();
|
||||
permalinks.insert("pages/about.md".to_owned(), "https://getzola.org/about/".to_owned());
|
||||
|
||||
tera.register_filter(
|
||||
"markdown",
|
||||
templates::filters::MarkdownFilter::new(config.clone(), permalinks.clone(), tera.clone()),
|
||||
);
|
||||
let mut context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"https://www.getzola.org/test/",
|
||||
&permalinks,
|
||||
insert_anchor,
|
||||
);
|
||||
let shortcode_def = utils::templates::get_shortcodes(&tera);
|
||||
context.set_shortcode_definitions(&shortcode_def);
|
||||
context.set_current_page_path("my_page.md");
|
||||
|
||||
render_content(content, &context)
|
||||
}
|
||||
|
||||
pub fn render(content: &str) -> Result<Rendered> {
|
||||
configurable_render(content, Config::default_for_test(), InsertAnchor::None)
|
||||
}
|
||||
|
||||
pub fn render_with_config(content: &str, config: Config) -> Result<Rendered> {
|
||||
configurable_render(content, config, InsertAnchor::None)
|
||||
}
|
||||
|
||||
pub fn render_with_insert_anchor(content: &str, insert_anchor: InsertAnchor) -> Result<Rendered> {
|
||||
configurable_render(content, Config::default_for_test(), insert_anchor)
|
||||
}
|
55
components/markdown/tests/links.rs
Normal file
@ -0,0 +1,55 @@
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn can_detect_links() {
|
||||
// no links
|
||||
let rendered = common::render("Hello World!").unwrap();
|
||||
assert_eq!(rendered.internal_links.len(), 0);
|
||||
assert_eq!(rendered.external_links.len(), 0);
|
||||
|
||||
// external
|
||||
let rendered = common::render("[abc](https://google.com/)").unwrap();
|
||||
assert_eq!(rendered.internal_links.len(), 0);
|
||||
assert_eq!(rendered.external_links.len(), 1);
|
||||
assert_eq!(rendered.external_links[0], "https://google.com/");
|
||||
|
||||
// internal
|
||||
let rendered = common::render("[abc](@/pages/about.md)").unwrap();
|
||||
assert_eq!(rendered.internal_links, vec![("pages/about.md".to_owned(), None)]);
|
||||
assert_eq!(rendered.external_links.len(), 0);
|
||||
|
||||
// internal with anchors
|
||||
let rendered = common::render("[abc](@/pages/about.md#hello)").unwrap();
|
||||
assert_eq!(rendered.internal_links[0], ("pages/about.md".to_owned(), Some("hello".to_owned())));
|
||||
assert_eq!(rendered.external_links.len(), 0);
|
||||
|
||||
// internal link referring to self
|
||||
let rendered = common::render("[abc](#hello)").unwrap();
|
||||
assert_eq!(rendered.internal_links.len(), 1);
|
||||
assert_eq!(rendered.internal_links[0], ("my_page.md".to_owned(), Some("hello".to_owned())));
|
||||
assert_eq!(rendered.external_links.len(), 0);
|
||||
|
||||
// Mixed with various protocols
|
||||
let rendered = common::render(
|
||||
"
|
||||
[a link](http://google.com)
|
||||
[a link](http://google.fr)
|
||||
Email: [foo@bar.baz](mailto:foo@bar.baz)
|
||||
Email: <foo@bar.baz>",
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(rendered.internal_links.len(), 0);
|
||||
assert_eq!(
|
||||
rendered.external_links,
|
||||
&["http://google.com".to_owned(), "http://google.fr".to_owned()]
|
||||
);
|
||||
|
||||
// Not pointing to anything known so that's an error
|
||||
let res = common::render("[abc](@/def/123.md)");
|
||||
assert!(res.is_err());
|
||||
|
||||
// Empty link is an error as well
|
||||
let res = common::render("[abc]()");
|
||||
assert!(res.is_err());
|
||||
assert_eq!(res.unwrap_err().to_string(), "There is a link that is missing a URL");
|
||||
}
|
357
components/markdown/tests/markdown.rs
Normal file
@ -0,0 +1,357 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use libs::tera::Tera;
|
||||
|
||||
use config::Config;
|
||||
use markdown::{render_content, RenderContext};
|
||||
use templates::ZOLA_TERA;
|
||||
use utils::slugs::SlugifyStrategy;
|
||||
use utils::types::InsertAnchor;
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn can_render_basic_markdown() {
|
||||
let cases = vec",
|
||||
"",
|
||||
"<h1>some html</h1>",
|
||||
];
|
||||
|
||||
let body = common::render(&cases.join("\n")).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_zola_internal_links() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
[rel link](@/pages/about.md)
|
||||
[rel link with anchor](@/pages/about.md#cv)
|
||||
[abs link](https://getzola.org/about/)
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_handle_heading_ids() {
|
||||
let mut config = Config::default_for_test();
|
||||
|
||||
let cases = vec",
|
||||
// https://github.com/Keats/gutenberg/issues/297
|
||||
"# [Rust](https://rust-lang.org \"Rust homepage\")",
|
||||
// and then some markdown in them
|
||||
"# `hi`",
|
||||
"# *hi*",
|
||||
"# **hi**",
|
||||
// See https://github.com/getzola/zola/issues/569
|
||||
"# text [^1] there\n[^1]: footnote",
|
||||
// Chosen slug that already exists with space
|
||||
"# Classes {#classes .bold .another}",
|
||||
];
|
||||
let body = common::render_with_config(&cases.join("\n"), config.clone()).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
|
||||
// And now test without slugifying everything
|
||||
config.slugify.anchors = SlugifyStrategy::Safe;
|
||||
let body = common::render_with_config(&cases.join("\n"), config).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_insert_anchors() {
|
||||
let cases = vec",
|
||||
"# Hello*_()",
|
||||
];
|
||||
let body =
|
||||
common::render_with_insert_anchor(&cases.join("\n"), InsertAnchor::Left).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
let body =
|
||||
common::render_with_insert_anchor(&cases.join("\n"), InsertAnchor::Right).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
let body =
|
||||
common::render_with_insert_anchor(&cases.join("\n"), InsertAnchor::Heading).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_customise_anchor_template() {
|
||||
let mut tera = Tera::default();
|
||||
tera.extend(&ZOLA_TERA).unwrap();
|
||||
tera.add_raw_template("anchor-link.html", " (in {{ lang }})").unwrap();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let config = Config::default_for_test();
|
||||
let context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::Right,
|
||||
);
|
||||
let body = render_content("# Hello", &context).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_smart_punctuation() {
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.smart_punctuation = true;
|
||||
let body = common::render_with_config(r#"This -- is "it"..."#, config).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_external_links_options() {
|
||||
let mut config = Config::default_for_test();
|
||||
|
||||
// no options
|
||||
let body = common::render("<https://google.com>").unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
|
||||
// target blank
|
||||
config.markdown.external_links_target_blank = true;
|
||||
let body = common::render_with_config("<https://google.com>", config.clone()).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
|
||||
// no follow
|
||||
config.markdown.external_links_target_blank = false;
|
||||
config.markdown.external_links_no_follow = true;
|
||||
let body = common::render_with_config("<https://google.com>", config.clone()).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
|
||||
// no referrer
|
||||
config.markdown.external_links_no_follow = false;
|
||||
config.markdown.external_links_no_referrer = true;
|
||||
let body = common::render_with_config("<https://google.com>", config.clone()).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
|
||||
// all of them
|
||||
config.markdown.external_links_no_follow = true;
|
||||
config.markdown.external_links_target_blank = true;
|
||||
config.markdown.external_links_no_referrer = true;
|
||||
let body = common::render_with_config("<https://google.com>", config).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_render_emojis() {
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.render_emoji = true;
|
||||
let body = common::render_with_config("Hello, World! :smile:", config).unwrap().body;
|
||||
assert_eq!(body, "<p>Hello, World! 😄</p>\n");
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/747
|
||||
// https://github.com/getzola/zola/issues/816
|
||||
#[test]
|
||||
fn custom_url_schemes_are_untouched() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
[foo@bar.tld](xmpp:foo@bar.tld)
|
||||
|
||||
[(123) 456-7890](tel:+11234567890)
|
||||
|
||||
[blank page](about:blank)
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_markdown_features_integration() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
<!-- Adapted from https://markdown-it.github.io/ -->
|
||||
|
||||
# h1 Heading
|
||||
|
||||
## h2 Heading
|
||||
|
||||
### h3 Heading
|
||||
|
||||
#### h4 Heading
|
||||
|
||||
##### h5 Heading
|
||||
|
||||
###### h6 Heading
|
||||
|
||||
## Horizontal Rules
|
||||
|
||||
___
|
||||
|
||||
---
|
||||
|
||||
***
|
||||
|
||||
## Emphasis
|
||||
|
||||
**This is bold text**
|
||||
|
||||
__This is bold text__
|
||||
|
||||
*This is italic text*
|
||||
|
||||
_This is italic text_
|
||||
|
||||
~~Strikethrough~~
|
||||
|
||||
|
||||
## Blockquotes
|
||||
|
||||
|
||||
> Blockquotes can also be nested...
|
||||
>> ...by using additional greater-than signs right next to each other...
|
||||
> > > ...or with spaces between arrows.
|
||||
|
||||
|
||||
## Lists
|
||||
|
||||
Unordered
|
||||
|
||||
+ Create a list by starting a line with `+`, `-`, or `*`
|
||||
+ Sub-lists are made by indenting 2 spaces:
|
||||
- Marker character change forces new list start:
|
||||
* Ac tristique libero volutpat at
|
||||
+ Facilisis in pretium nisl aliquet
|
||||
- Nulla volutpat aliquam velit
|
||||
+ Very easy!
|
||||
|
||||
Ordered
|
||||
|
||||
1. Lorem ipsum dolor sit amet
|
||||
2. Consectetur adipiscing elit
|
||||
3. Integer molestie lorem at massa
|
||||
|
||||
|
||||
1. You can use sequential numbers...
|
||||
1. ...or keep all the numbers as `1.`
|
||||
|
||||
Start numbering with offset:
|
||||
|
||||
57. foo
|
||||
1. bar
|
||||
|
||||
|
||||
## Code
|
||||
|
||||
Inline `code`
|
||||
|
||||
Indented code
|
||||
|
||||
// Some comments
|
||||
line 1 of code
|
||||
line 2 of code
|
||||
line 3 of code
|
||||
|
||||
|
||||
Block code "fences"
|
||||
|
||||
```
|
||||
Sample text here...
|
||||
```
|
||||
|
||||
Syntax highlighting
|
||||
|
||||
``` js
|
||||
var foo = function (bar) {
|
||||
return bar++;
|
||||
};
|
||||
|
||||
console.log(foo(5));
|
||||
```
|
||||
|
||||
## Shortcodes
|
||||
|
||||
## Tables
|
||||
|
||||
| Option | Description |
|
||||
| ------ | ----------- |
|
||||
| data | path to data files to supply the data that will be passed into templates. |
|
||||
| engine | engine to be used for processing templates. Handlebars is the default. |
|
||||
| ext | extension to be used for dest files. |
|
||||
|
||||
Right aligned columns
|
||||
|
||||
| Option | Description |
|
||||
| ------:| -----------:|
|
||||
| data | path to data files to supply the data that will be passed into templates. |
|
||||
| engine | engine to be used for processing templates. Handlebars is the default. |
|
||||
| ext | extension to be used for dest files. |
|
||||
|
||||
|
||||
## Links
|
||||
|
||||
[link text](http://duckduckgo.com)
|
||||
|
||||
[link with title](http://duckduckgo.com/ "Duck duck go")
|
||||
|
||||
## Images
|
||||
|
||||

|
||||

|
||||
|
||||
Like links, Images also have a footnote style syntax
|
||||
|
||||
![Alt text][id]
|
||||
|
||||
With a reference later in the document defining the URL location:
|
||||
|
||||
[id]: https://octodex.github.com/images/dojocat.jpg "The Dojocat"
|
||||
|
||||
## Smileys
|
||||
|
||||
Like :smile:, :cry:
|
||||
|
||||
### Footnotes
|
||||
|
||||
Footnote 1 link[^first].
|
||||
|
||||
Footnote 2 link[^second].
|
||||
|
||||
Duplicated footnote reference[^second].
|
||||
|
||||
[^first]: Footnote **can have markup**
|
||||
and multiple paragraphs.
|
||||
|
||||
[^second]: Footnote text.
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
313
components/markdown/tests/shortcodes.rs
Normal file
@ -0,0 +1,313 @@
|
||||
use config::Config;
|
||||
|
||||
mod common;
|
||||
|
||||
#[test]
|
||||
fn can_render_simple_text_with_shortcodes() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
hello {{ out_put_id(id="shortcode-id") }}
|
||||
|
||||
{% quote() %}
|
||||
A quote
|
||||
{% end %}
|
||||
|
||||
{{ out_put_id(id="shortcode-id2") }}
|
||||
|
||||
{{ out_put_id(id="shortcode-id3") }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_grab_lang_in_html_shortcodes() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
hello in {{ i18n() }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_grab_lang_in_md_shortcodes() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{{ book() }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_render_body_shortcode_and_paragraph_after() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{% quote() %}
|
||||
This is a quote
|
||||
{% end %}
|
||||
|
||||
Here is another paragraph.
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_render_two_body_shortcode_and_paragraph_after_with_line_break_between() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{% quote() %}
|
||||
This is a quote
|
||||
{% end %}
|
||||
|
||||
{% quote() %}
|
||||
This is a quote
|
||||
{% end %}
|
||||
|
||||
Here is another paragraph.
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doesnt_render_ignored_shortcodes() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{{/* youtube(id="w7Ft2ymGmfc") */}}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
// https://github.com/Keats/gutenberg/issues/522
|
||||
#[test]
|
||||
fn doesnt_try_to_highlight_content_from_shortcode() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{{ four_spaces() }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_emit_newlines_and_whitespace_with_shortcode() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{% pre() %}
|
||||
Hello
|
||||
|
||||
Zola
|
||||
|
||||
!
|
||||
|
||||
{% end %}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_passthrough_markdown_from_shortcode() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
Hello
|
||||
|
||||
{% md_passthrough() %}
|
||||
# Passing through
|
||||
|
||||
*to* **the** document
|
||||
{% end %}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1172
|
||||
#[test]
|
||||
fn doesnt_escape_html_shortcodes() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{{ image(alt="something") }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors_on_unknown_shortcodes() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{{ unknown() }}
|
||||
"#,
|
||||
);
|
||||
assert!(body.is_err());
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1172
|
||||
#[test]
|
||||
fn can_render_commented_out_shortcodes() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
<!-- {{ image(alt="something") }} -->
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invocation_count_increments_in_shortcode() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{{ a() }}
|
||||
{{ b() }}
|
||||
{{ a() }}
|
||||
{{ b() }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1689
|
||||
#[test]
|
||||
fn html_shortcode_regression() {
|
||||
let inputs = vec![
|
||||
r#"{{ ex2(page="") }} {{ ex1(page="") }} {{ ex3(page="std") }}"#,
|
||||
r#"<p>{{ ex2(page="") }} {{ ex1(page="") }} {{ ex3(page="std") }}</p>"#, // in html
|
||||
r#"<p>\n{{ ex2(page='') }}\n</p>"#, // with newlines
|
||||
r#"<span>{{ ex2(page='') }}</span>\n**The Book** {{ ex2(page='') }}"#,
|
||||
r#"a.{{ ex2(page="") }} b.{{ ex1(page="") }} c.{{ ex3(page="std") }}"#,
|
||||
];
|
||||
|
||||
for input in inputs {
|
||||
let body = common::render(input).unwrap().body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_split_shortcode_body_lines() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{% split_lines() %}
|
||||
multi
|
||||
ple
|
||||
lines
|
||||
{% end %}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_render_shortcodes_with_tabs() {
|
||||
// This can cause problems mostly because the 4 spaces sometimes used for tabs also are used
|
||||
// to indicate code-blocks
|
||||
let body = common::render(
|
||||
r#"
|
||||
{{ with_tabs() }} {{ with_tabs() }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1355
|
||||
#[test]
|
||||
fn can_render_list_with_shortcode() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
* a
|
||||
* b
|
||||
{{ with_tabs() }}
|
||||
* c
|
||||
{{ with_tabs() }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1655
|
||||
#[test]
|
||||
fn shortcodes_do_not_generate_paragraphs() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
{% web_component() %}
|
||||
some code;
|
||||
more code;
|
||||
|
||||
other code here;
|
||||
{% end %}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_render_markdown_in_shortcodes() {
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let body = common::render_with_config(
|
||||
r#"
|
||||
{% render_md() %}
|
||||
|
||||
```
|
||||
some code;
|
||||
```
|
||||
|
||||
{% end %}
|
||||
"#,
|
||||
config,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1600
|
||||
#[test]
|
||||
fn can_use_shortcodes_in_quotes() {
|
||||
let body = common::render(
|
||||
r#"
|
||||
> test quote
|
||||
> {{ image(alt="a quote") }}
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.body;
|
||||
insta::assert_snapshot!(body);
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 227
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre data-linenos style="background-color:#2b303b;color:#c0c5ce;"><code><table><tbody><tr><td>1</td><td><span>foo
|
||||
</span></td></tr><tr><td>2</td><td><span>bar
|
||||
</span></td></tr></tbody></table></code></pre>
|
||||
|
@ -0,0 +1,9 @@
|
||||
---
|
||||
source: components/markdown/tests/codeblocks.rs
|
||||
assertion_line: 248
|
||||
expression: body
|
||||
---
|
||||
<pre data-linenos style="background-color:#2b303b;color:#c0c5ce;"><code><table><tbody><tr><td>1</td><td><span>foo
|
||||
</span></td></tr><tr><td>2</td><td><span>bar
|
||||
</span></td></tr></tbody></table></code></pre>
|
||||
|
@ -0,0 +1,10 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 253
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre data-linenos style="background-color:#2b303b;color:#c0c5ce;"><code><table><tbody><tr><td>1</td><td><span>foo
|
||||
</span></td></tr><tr><td><mark style="background-color:#65737e30;">2</mark></td><td><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark></td></tr></tbody></table></code></pre>
|
||||
|
@ -0,0 +1,10 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 240
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre data-linenos style="background-color:#2b303b;color:#c0c5ce;"><code><table><tbody><tr><td>40</td><td><span>foo
|
||||
</span></td></tr><tr><td>41</td><td><span>bar
|
||||
</span></td></tr></tbody></table></code></pre>
|
||||
|
@ -0,0 +1,11 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 36
|
||||
expression: res.body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><span>foo
|
||||
</span><span>baz
|
||||
</span><span>bat
|
||||
</span></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 83
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><mark style="background-color:#65737e30;"><span>foo
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>baz
|
||||
</span></mark></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 124
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><span>foo
|
||||
</span><span>bar
|
||||
</span><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>baz
|
||||
</span></mark></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 68
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><span>foo
|
||||
</span><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><span>baz
|
||||
</span></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 184
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><mark style="background-color:#65737e30;"><span>foo
|
||||
</span></mark><span>bar
|
||||
</span><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>baz
|
||||
</span></mark></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 139
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><span>foo
|
||||
</span><span>bar
|
||||
</span><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>baz
|
||||
</span></mark></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 154
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><mark style="background-color:#65737e30;"><span>foo
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><span>baz
|
||||
</span></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 214
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><span>foo
|
||||
</span><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><span>baz
|
||||
</span></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 53
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><span>foo
|
||||
</span><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><span>bar
|
||||
</span><span>baz
|
||||
</span></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 199
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><span>foo
|
||||
</span><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><span>bar
|
||||
</span><span>baz
|
||||
</span></code></pre>
|
||||
|
@ -0,0 +1,10 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 323
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre data-lang="rustscript" style="background-color:#2b303b;color:#c0c5ce;" class="language-rustscript "><code class="language-rustscript" data-lang="rustscript"><span>foo
|
||||
</span><span>bar
|
||||
</span></code></pre>
|
||||
|
@ -0,0 +1,12 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 169
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre style="background-color:#2b303b;color:#c0c5ce;"><code><mark style="background-color:#65737e30;"><span>foo
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><mark style="background-color:#65737e30;"><span>bar
|
||||
</span></mark><span>baz
|
||||
</span></code></pre>
|
||||
|
@ -0,0 +1,19 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 310
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre data-linenos data-lang="html" style="background-color:#2b303b;color:#c0c5ce;" class="language-html "><code class="language-html" data-lang="html"><table><tbody><tr><td>1</td><td><span><</span><span style="color:#bf616a;">a </span><span style="color:#d08770;">href</span><span>="</span><span style="color:#a3be8c;">javascript:void(0);</span><span>">{{ before(texts="1") }}</</span><span style="color:#bf616a;">a</span><span>>
|
||||
</span></td></tr><tr><td>2</td><td><span>Normally people would not write something & like </span><span style="background-color:#bf616a;color:#2b303b;"><></span><span> this:
|
||||
</span></td></tr><tr><td>3</td><td><span><</span><span style="color:#bf616a;">div </span><span style="color:#8fa1b3;">id</span><span>="</span><span style="color:#a3be8c;">custom-attr</span><span>">
|
||||
</span></td></tr><tr><td>4</td><td><span>An inline dQw4w9WgXcQ shortcode
|
||||
</span></td></tr><tr><td>5</td><td><span></</span><span style="color:#bf616a;">div</span><span>>
|
||||
</span></td></tr><tr><td>6</td><td><span>Plain text in-between
|
||||
</span></td></tr><tr><td>7</td><td><span>{% quote(author="Vincent") %}
|
||||
</span></td></tr><tr><td>8</td><td><span>A quote
|
||||
</span></td></tr><tr><td>9</td><td><span>{% end %}
|
||||
</span></td></tr><tr><td>10</td><td><span>{# A Tera comment, you should see it #}
|
||||
</span></td></tr><tr><td>11</td><td><span style="color:#65737e;"><!-- end text goes here -->
|
||||
</span></td></tr></tbody></table></code></pre>
|
||||
|
@ -0,0 +1,13 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 288
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre data-linenos style="background-color:#2b303b;color:#c0c5ce;"><code><table><tbody><tr><td>1</td><td><span>text1
|
||||
</span></td></tr><tr><td>2</td><td><span>first
|
||||
</span></td></tr><tr><td>3</td><td><span>text2
|
||||
</span></td></tr><tr><td>4</td><td><span>second
|
||||
</span></td></tr><tr><td>5</td><td><span>text3
|
||||
</span></td></tr></tbody></table></code></pre>
|
||||
|
@ -0,0 +1,11 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 272
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre data-linenos data-lang="html" style="background-color:#2b303b;color:#c0c5ce;" class="language-html "><code class="language-html" data-lang="html"><table><tbody><tr><td>1</td><td><span><</span><span style="color:#bf616a;">div </span><span style="color:#8fa1b3;">id</span><span>="</span><span style="color:#a3be8c;">custom-attr</span><span>">
|
||||
</span></td></tr><tr><td>2</td><td><span>dQw4w9WgXcQ
|
||||
</span></td></tr><tr><td>3</td><td><span></</span><span style="color:#bf616a;">div</span><span>>
|
||||
</span></td></tr></tbody></table></code></pre>
|
||||
|
@ -0,0 +1,10 @@
|
||||
---
|
||||
source: components/rendering/tests/codeblocks.rs
|
||||
assertion_line: 336
|
||||
expression: body
|
||||
|
||||
---
|
||||
<pre><code>foo
|
||||
bar
|
||||
</code></pre>
|
||||
|
@ -0,0 +1,131 @@
|
||||
---
|
||||
source: components/rendering/tests/markdown.rs
|
||||
assertion_line: 358
|
||||
expression: body
|
||||
|
||||
---
|
||||
<!-- Adapted from https://markdown-it.github.io/ -->
|
||||
<h1 id="h1-heading">h1 Heading</h1>
|
||||
<h2 id="h2-heading">h2 Heading</h2>
|
||||
<h3 id="h3-heading">h3 Heading</h3>
|
||||
<h4 id="h4-heading">h4 Heading</h4>
|
||||
<h5 id="h5-heading">h5 Heading</h5>
|
||||
<h6 id="h6-heading">h6 Heading</h6>
|
||||
<h2 id="horizontal-rules">Horizontal Rules</h2>
|
||||
<hr />
|
||||
<hr />
|
||||
<hr />
|
||||
<h2 id="emphasis">Emphasis</h2>
|
||||
<p><strong>This is bold text</strong></p>
|
||||
<p><strong>This is bold text</strong></p>
|
||||
<p><em>This is italic text</em></p>
|
||||
<p><em>This is italic text</em></p>
|
||||
<p><del>Strikethrough</del></p>
|
||||
<h2 id="blockquotes">Blockquotes</h2>
|
||||
<blockquote>
|
||||
<p>Blockquotes can also be nested...</p>
|
||||
<blockquote>
|
||||
<p>...by using additional greater-than signs right next to each other...</p>
|
||||
<blockquote>
|
||||
<p>...or with spaces between arrows.</p>
|
||||
</blockquote>
|
||||
</blockquote>
|
||||
</blockquote>
|
||||
<h2 id="lists">Lists</h2>
|
||||
<p>Unordered</p>
|
||||
<ul>
|
||||
<li>Create a list by starting a line with <code>+</code>, <code>-</code>, or <code>*</code></li>
|
||||
<li>Sub-lists are made by indenting 2 spaces:
|
||||
<ul>
|
||||
<li>Marker character change forces new list start:
|
||||
<ul>
|
||||
<li>Ac tristique libero volutpat at</li>
|
||||
</ul>
|
||||
<ul>
|
||||
<li>Facilisis in pretium nisl aliquet</li>
|
||||
</ul>
|
||||
<ul>
|
||||
<li>Nulla volutpat aliquam velit</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>Very easy!</li>
|
||||
</ul>
|
||||
<p>Ordered</p>
|
||||
<ol>
|
||||
<li>
|
||||
<p>Lorem ipsum dolor sit amet</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>Consectetur adipiscing elit</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>Integer molestie lorem at massa</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>You can use sequential numbers...</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>...or keep all the numbers as <code>1.</code></p>
|
||||
</li>
|
||||
</ol>
|
||||
<p>Start numbering with offset:</p>
|
||||
<ol start="57">
|
||||
<li>foo</li>
|
||||
<li>bar</li>
|
||||
</ol>
|
||||
<h2 id="code">Code</h2>
|
||||
<p>Inline <code>code</code></p>
|
||||
<p>Indented code</p>
|
||||
<pre><code>// Some comments
|
||||
line 1 of code
|
||||
line 2 of code
|
||||
line 3 of code
|
||||
</code></pre>
|
||||
<p>Block code "fences"</p>
|
||||
<pre><code>Sample text here...
|
||||
</code></pre>
|
||||
<p>Syntax highlighting</p>
|
||||
<pre data-lang="js" class="language-js "><code class="language-js" data-lang="js">var foo = function (bar) {
|
||||
return bar++;
|
||||
};
|
||||
|
||||
console.log(foo(5));
|
||||
</code></pre>
|
||||
<h2 id="shortcodes">Shortcodes</h2>
|
||||
<h2 id="tables">Tables</h2>
|
||||
<table><thead><tr><th>Option</th><th>Description</th></tr></thead><tbody>
|
||||
<tr><td>data</td><td>path to data files to supply the data that will be passed into templates.</td></tr>
|
||||
<tr><td>engine</td><td>engine to be used for processing templates. Handlebars is the default.</td></tr>
|
||||
<tr><td>ext</td><td>extension to be used for dest files.</td></tr>
|
||||
</tbody></table>
|
||||
<p>Right aligned columns</p>
|
||||
<table><thead><tr><th style="text-align: right">Option</th><th style="text-align: right">Description</th></tr></thead><tbody>
|
||||
<tr><td style="text-align: right">data</td><td style="text-align: right">path to data files to supply the data that will be passed into templates.</td></tr>
|
||||
<tr><td style="text-align: right">engine</td><td style="text-align: right">engine to be used for processing templates. Handlebars is the default.</td></tr>
|
||||
<tr><td style="text-align: right">ext</td><td style="text-align: right">extension to be used for dest files.</td></tr>
|
||||
</tbody></table>
|
||||
<h2 id="links">Links</h2>
|
||||
<p><a href="http://duckduckgo.com">link text</a></p>
|
||||
<p><a href="http://duckduckgo.com/" title="Duck duck go">link with title</a></p>
|
||||
<h2 id="images">Images</h2>
|
||||
<p><img src="https://octodex.github.com/images/minion.png" alt="Minion" />
|
||||
<img src="https://octodex.github.com/images/stormtroopocat.jpg" alt="Stormtroopocat" title="The Stormtroopocat" /></p>
|
||||
<p>Like links, Images also have a footnote style syntax</p>
|
||||
<p><img src="https://octodex.github.com/images/dojocat.jpg" alt="Alt text" title="The Dojocat" /></p>
|
||||
<p>With a reference later in the document defining the URL location:</p>
|
||||
<h2 id="smileys">Smileys</h2>
|
||||
<p>Like :smile:, :cry:</p>
|
||||
<h3 id="footnotes">Footnotes</h3>
|
||||
<p>Footnote 1 link<sup class="footnote-reference"><a href="#first">1</a></sup>.</p>
|
||||
<p>Footnote 2 link<sup class="footnote-reference"><a href="#second">2</a></sup>.</p>
|
||||
<p>Duplicated footnote reference<sup class="footnote-reference"><a href="#second">2</a></sup>.</p>
|
||||
<div class="footnote-definition" id="first"><sup class="footnote-definition-label">1</sup>
|
||||
<p>Footnote <strong>can have markup</strong>
|
||||
and multiple paragraphs.</p>
|
||||
</div>
|
||||
<div class="footnote-definition" id="second"><sup class="footnote-definition-label">2</sup>
|
||||
<p>Footnote text.</p>
|
||||
</div>
|
||||
|
@ -0,0 +1,8 @@
|
||||
---
|
||||
source: components/rendering/tests/markdown.rs
|
||||
assertion_line: 127
|
||||
expression: body
|
||||
|
||||
---
|
||||
<h1 id="hello">Hello (in en)</h1>
|
||||
|
@ -0,0 +1,27 @@
|
||||
---
|
||||
source: components/rendering/tests/markdown.rs
|
||||
assertion_line: 84
|
||||
expression: body
|
||||
|
||||
---
|
||||
<h1 id="Hello">Hello</h1>
|
||||
<h1 id="Hello-1">Hello</h1>
|
||||
<h1 id="L'écologie_et_vous">L'écologie et vous</h1>
|
||||
<h1 id="hello">Hello</h1>
|
||||
<h1 id="hello">Hello</h1>
|
||||
<h1 id="Something_else">Hello</h1>
|
||||
<h1 id="Workaround_for_literal_{#…}">Workaround for literal {#…}</h1>
|
||||
<h1 id="*matic*">Auto</h1>
|
||||
<h1 id=""></h1>
|
||||
<h1 id="-1"></h1>
|
||||
<h1 id="About"><a href="https://getzola.org/about/">About</a></h1>
|
||||
<h1 id="Rust"><a href="https://rust-lang.org" title="Rust homepage">Rust</a></h1>
|
||||
<h1 id="hi"><code>hi</code></h1>
|
||||
<h1 id="hi-1"><em>hi</em></h1>
|
||||
<h1 id="hi-2"><strong>hi</strong></h1>
|
||||
<h1 id="text__there">text <sup class="footnote-reference"><a href="#1">1</a></sup> there</h1>
|
||||
<div class="footnote-definition" id="1"><sup class="footnote-definition-label">1</sup>
|
||||
<p>footnote</p>
|
||||
<h1 id="classes" class="bold another">Classes</h1>
|
||||
</div>
|
||||
|
@ -0,0 +1,27 @@
|
||||
---
|
||||
source: components/rendering/tests/markdown.rs
|
||||
assertion_line: 79
|
||||
expression: body
|
||||
|
||||
---
|
||||
<h1 id="hello-1">Hello</h1>
|
||||
<h1 id="hello-2">Hello</h1>
|
||||
<h1 id="l-ecologie-et-vous">L'écologie et vous</h1>
|
||||
<h1 id="hello">Hello</h1>
|
||||
<h1 id="hello">Hello</h1>
|
||||
<h1 id="Something_else">Hello</h1>
|
||||
<h1 id="workaround-for-literal">Workaround for literal {#…}</h1>
|
||||
<h1 id="*matic*">Auto</h1>
|
||||
<h1 id=""></h1>
|
||||
<h1 id="-1"></h1>
|
||||
<h1 id="about"><a href="https://getzola.org/about/">About</a></h1>
|
||||
<h1 id="rust"><a href="https://rust-lang.org" title="Rust homepage">Rust</a></h1>
|
||||
<h1 id="hi"><code>hi</code></h1>
|
||||
<h1 id="hi-1"><em>hi</em></h1>
|
||||
<h1 id="hi-2"><strong>hi</strong></h1>
|
||||
<h1 id="text-there">text <sup class="footnote-reference"><a href="#1">1</a></sup> there</h1>
|
||||
<div class="footnote-definition" id="1"><sup class="footnote-definition-label">1</sup>
|
||||
<p>footnote</p>
|
||||
<h1 id="classes" class="bold another">Classes</h1>
|
||||
</div>
|
||||
|