add link_checker settings for external_level and internal_level (#1848)
* add external_level and internal_level * remove unnecessary debug derive on LinkDef * clarify doc comment about link check levels * simplify link checker logging * add missing warn prefix * simplify link level logging, remove "Level" from linklevel variants * remove link level config from test site * switch back to using bail! from get_link_domain * move console's deps to libs * remove unnecessary reference * calling console::error/warn directly * emit one error, or one warning, per link checker run * various link checker level changes * add docs about link checker levels * remove accidentally committed test site * remove completed TODO
This commit is contained in:
parent
2291c6e9c3
commit
6240ed5469
|
@ -385,6 +385,14 @@ dependencies = [
|
||||||
"utils",
|
"utils",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "console"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"errors",
|
||||||
|
"libs",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "console"
|
name = "console"
|
||||||
version = "0.15.0"
|
version = "0.15.0"
|
||||||
|
@ -1244,7 +1252,7 @@ version = "1.14.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "689960f187c43c01650c805fb6bc6f55ab944499d86d4ffe9474ad78991d8e94"
|
checksum = "689960f187c43c01650c805fb6bc6f55ab944499d86d4ffe9474ad78991d8e94"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console",
|
"console 0.15.0",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
@ -1392,6 +1400,7 @@ version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
"ammonia",
|
"ammonia",
|
||||||
|
"atty",
|
||||||
"base64",
|
"base64",
|
||||||
"csv",
|
"csv",
|
||||||
"elasticlunr-rs",
|
"elasticlunr-rs",
|
||||||
|
@ -1420,6 +1429,7 @@ dependencies = [
|
||||||
"svg_metadata",
|
"svg_metadata",
|
||||||
"syntect",
|
"syntect",
|
||||||
"tera",
|
"tera",
|
||||||
|
"termcolor",
|
||||||
"time",
|
"time",
|
||||||
"toml",
|
"toml",
|
||||||
"unic-langid",
|
"unic-langid",
|
||||||
|
@ -1579,6 +1589,7 @@ name = "markdown"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"config",
|
"config",
|
||||||
|
"console 0.1.0",
|
||||||
"errors",
|
"errors",
|
||||||
"insta",
|
"insta",
|
||||||
"libs",
|
"libs",
|
||||||
|
@ -2811,6 +2822,7 @@ name = "site"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"config",
|
"config",
|
||||||
|
"console 0.1.0",
|
||||||
"content",
|
"content",
|
||||||
"errors",
|
"errors",
|
||||||
"imageproc",
|
"imageproc",
|
||||||
|
@ -3811,9 +3823,9 @@ dependencies = [
|
||||||
name = "zola"
|
name = "zola"
|
||||||
version = "0.16.0"
|
version = "0.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"atty",
|
|
||||||
"clap 3.1.17",
|
"clap 3.1.17",
|
||||||
"clap_complete",
|
"clap_complete",
|
||||||
|
"console 0.1.0",
|
||||||
"ctrlc",
|
"ctrlc",
|
||||||
"errors",
|
"errors",
|
||||||
"hyper",
|
"hyper",
|
||||||
|
@ -3824,7 +3836,6 @@ dependencies = [
|
||||||
"pathdiff",
|
"pathdiff",
|
||||||
"same-file",
|
"same-file",
|
||||||
"site",
|
"site",
|
||||||
"termcolor",
|
|
||||||
"time",
|
"time",
|
||||||
"tokio",
|
"tokio",
|
||||||
"utils",
|
"utils",
|
||||||
|
|
|
@ -22,9 +22,7 @@ time = "0.3"
|
||||||
name = "zola"
|
name = "zola"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
atty = "0.2.11"
|
|
||||||
clap = { version = "3", features = ["derive"] }
|
clap = { version = "3", features = ["derive"] }
|
||||||
termcolor = "1.0.4"
|
|
||||||
# Below is for the serve cmd
|
# Below is for the serve cmd
|
||||||
hyper = { version = "0.14.1", default-features = false, features = ["runtime", "server", "http2", "http1"] }
|
hyper = { version = "0.14.1", default-features = false, features = ["runtime", "server", "http2", "http1"] }
|
||||||
tokio = { version = "1.0.1", default-features = false, features = ["rt", "fs", "time"] }
|
tokio = { version = "1.0.1", default-features = false, features = ["rt", "fs", "time"] }
|
||||||
|
@ -39,6 +37,7 @@ mime_guess = "2.0"
|
||||||
|
|
||||||
site = { path = "components/site" }
|
site = { path = "components/site" }
|
||||||
errors = { path = "components/errors" }
|
errors = { path = "components/errors" }
|
||||||
|
console = { path = "components/console" }
|
||||||
utils = { path = "components/utils" }
|
utils = { path = "components/utils" }
|
||||||
libs = { path = "components/libs" }
|
libs = { path = "components/libs" }
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,19 @@
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub enum LinkCheckerLevel {
|
||||||
|
#[serde(rename = "error")]
|
||||||
|
Error,
|
||||||
|
#[serde(rename = "warn")]
|
||||||
|
Warn,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LinkCheckerLevel {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub struct LinkChecker {
|
pub struct LinkChecker {
|
||||||
|
@ -7,4 +21,8 @@ pub struct LinkChecker {
|
||||||
pub skip_prefixes: Vec<String>,
|
pub skip_prefixes: Vec<String>,
|
||||||
/// Skip anchor checking for these URL prefixes
|
/// Skip anchor checking for these URL prefixes
|
||||||
pub skip_anchor_prefixes: Vec<String>,
|
pub skip_anchor_prefixes: Vec<String>,
|
||||||
|
/// Emit either "error" or "warn" for broken internal links (including anchor links).
|
||||||
|
pub internal_level: LinkCheckerLevel,
|
||||||
|
/// Emit either "error" or "warn" for broken external links (including anchor links).
|
||||||
|
pub external_level: LinkCheckerLevel,
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,8 +5,8 @@ mod theme;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
pub use crate::config::{
|
pub use crate::config::{
|
||||||
languages::LanguageOptions, link_checker::LinkChecker, search::Search, slugify::Slugify,
|
languages::LanguageOptions, link_checker::LinkChecker, link_checker::LinkCheckerLevel,
|
||||||
taxonomies::TaxonomyConfig, Config,
|
search::Search, slugify::Slugify, taxonomies::TaxonomyConfig, Config,
|
||||||
};
|
};
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
[package]
|
||||||
|
name = "console"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
errors = { path = "../errors" }
|
||||||
|
libs = { path = "../libs" }
|
|
@ -0,0 +1,57 @@
|
||||||
|
use std::env;
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
|
use libs::atty;
|
||||||
|
use libs::once_cell::sync::Lazy;
|
||||||
|
use libs::termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
||||||
|
|
||||||
|
/// Termcolor color choice.
|
||||||
|
/// We do not rely on ColorChoice::Auto behavior
|
||||||
|
/// as the check is already performed by has_color.
|
||||||
|
static COLOR_CHOICE: Lazy<ColorChoice> =
|
||||||
|
Lazy::new(|| if has_color() { ColorChoice::Always } else { ColorChoice::Never });
|
||||||
|
|
||||||
|
pub fn info(message: &str) {
|
||||||
|
colorize(message, ColorSpec::new().set_bold(true), StandardStream::stdout(*COLOR_CHOICE));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn warn(message: &str) {
|
||||||
|
colorize(
|
||||||
|
&format!("{}{}", "Warning: ", message),
|
||||||
|
ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)),
|
||||||
|
StandardStream::stdout(*COLOR_CHOICE),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn success(message: &str) {
|
||||||
|
colorize(
|
||||||
|
message,
|
||||||
|
ColorSpec::new().set_bold(true).set_fg(Some(Color::Green)),
|
||||||
|
StandardStream::stdout(*COLOR_CHOICE),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn error(message: &str) {
|
||||||
|
colorize(
|
||||||
|
&format!("{}{}", "Error: ", message),
|
||||||
|
ColorSpec::new().set_bold(true).set_fg(Some(Color::Red)),
|
||||||
|
StandardStream::stderr(*COLOR_CHOICE),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print a colorized message to stdout
|
||||||
|
fn colorize(message: &str, color: &ColorSpec, mut stream: StandardStream) {
|
||||||
|
stream.set_color(color).unwrap();
|
||||||
|
write!(stream, "{}", message).unwrap();
|
||||||
|
stream.set_color(&ColorSpec::new()).unwrap();
|
||||||
|
writeln!(stream).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check whether to output colors
|
||||||
|
fn has_color() -> bool {
|
||||||
|
let use_colors = env::var("CLICOLOR").unwrap_or_else(|_| "1".to_string()) != "0"
|
||||||
|
&& env::var("NO_COLOR").is_err();
|
||||||
|
let force_colors = env::var("CLICOLOR_FORCE").unwrap_or_else(|_| "0".to_string()) != "0";
|
||||||
|
|
||||||
|
force_colors || use_colors && atty::is(atty::Stream::Stdout)
|
||||||
|
}
|
|
@ -6,6 +6,7 @@ edition = "2021"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ahash = "0.7.6"
|
ahash = "0.7.6"
|
||||||
ammonia = "3"
|
ammonia = "3"
|
||||||
|
atty = "0.2.11"
|
||||||
base64 = "0.13"
|
base64 = "0.13"
|
||||||
csv = "1"
|
csv = "1"
|
||||||
elasticlunr-rs = {version = "2", default-features = false, features = ["da", "no", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] }
|
elasticlunr-rs = {version = "2", default-features = false, features = ["da", "no", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] }
|
||||||
|
@ -34,6 +35,7 @@ slug = "0.1"
|
||||||
svg_metadata = "0.4"
|
svg_metadata = "0.4"
|
||||||
syntect = "5"
|
syntect = "5"
|
||||||
tera = { version = "1", features = ["preserve_order"] }
|
tera = { version = "1", features = ["preserve_order"] }
|
||||||
|
termcolor = "1.0.4"
|
||||||
time = "0.3"
|
time = "0.3"
|
||||||
toml = "0.5"
|
toml = "0.5"
|
||||||
unic-langid = "0.9"
|
unic-langid = "0.9"
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
|
|
||||||
pub use ahash;
|
pub use ahash;
|
||||||
pub use ammonia;
|
pub use ammonia;
|
||||||
|
pub use atty;
|
||||||
pub use base64;
|
pub use base64;
|
||||||
pub use csv;
|
pub use csv;
|
||||||
pub use elasticlunr;
|
pub use elasticlunr;
|
||||||
|
@ -34,6 +35,7 @@ pub use slug;
|
||||||
pub use svg_metadata;
|
pub use svg_metadata;
|
||||||
pub use syntect;
|
pub use syntect;
|
||||||
pub use tera;
|
pub use tera;
|
||||||
|
pub use termcolor;
|
||||||
pub use time;
|
pub use time;
|
||||||
pub use toml;
|
pub use toml;
|
||||||
pub use unic_langid;
|
pub use unic_langid;
|
||||||
|
|
|
@ -11,6 +11,7 @@ pest_derive = "2"
|
||||||
errors = { path = "../errors" }
|
errors = { path = "../errors" }
|
||||||
utils = { path = "../utils" }
|
utils = { path = "../utils" }
|
||||||
config = { path = "../config" }
|
config = { path = "../config" }
|
||||||
|
console = { path = "../console" }
|
||||||
libs = { path = "../libs" }
|
libs = { path = "../libs" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
|
use errors::bail;
|
||||||
use libs::gh_emoji::Replacer as EmojiReplacer;
|
use libs::gh_emoji::Replacer as EmojiReplacer;
|
||||||
use libs::once_cell::sync::Lazy;
|
use libs::once_cell::sync::Lazy;
|
||||||
use libs::pulldown_cmark as cmark;
|
use libs::pulldown_cmark as cmark;
|
||||||
use libs::tera;
|
use libs::tera;
|
||||||
|
|
||||||
use crate::context::RenderContext;
|
use crate::context::RenderContext;
|
||||||
use errors::{anyhow, Context, Error, Result};
|
use errors::{Context, Error, Result};
|
||||||
use libs::pulldown_cmark::escape::escape_html;
|
use libs::pulldown_cmark::escape::escape_html;
|
||||||
use utils::site::resolve_internal_link;
|
use utils::site::resolve_internal_link;
|
||||||
use utils::slugs::slugify_anchors;
|
use utils::slugs::slugify_anchors;
|
||||||
|
@ -139,7 +140,18 @@ fn fix_link(
|
||||||
resolved.permalink
|
resolved.permalink
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
return Err(anyhow!("Relative link {} not found.", link));
|
let msg = format!(
|
||||||
|
"Broken relative link `{}` in {}",
|
||||||
|
link,
|
||||||
|
context.current_page_path.unwrap_or("unknown"),
|
||||||
|
);
|
||||||
|
match context.config.link_checker.internal_level {
|
||||||
|
config::LinkCheckerLevel::Error => bail!(msg),
|
||||||
|
config::LinkCheckerLevel::Warn => {
|
||||||
|
console::warn(&msg);
|
||||||
|
link.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if is_external_link(link) {
|
} else if is_external_link(link) {
|
||||||
|
|
|
@ -10,6 +10,7 @@ serde = { version = "1.0", features = ["derive"] }
|
||||||
|
|
||||||
errors = { path = "../errors" }
|
errors = { path = "../errors" }
|
||||||
config = { path = "../config" }
|
config = { path = "../config" }
|
||||||
|
console = { path = "../console" }
|
||||||
utils = { path = "../utils" }
|
utils = { path = "../utils" }
|
||||||
templates = { path = "../templates" }
|
templates = { path = "../templates" }
|
||||||
search = { path = "../search" }
|
search = { path = "../search" }
|
||||||
|
|
|
@ -295,10 +295,45 @@ impl Site {
|
||||||
tpls::register_tera_global_fns(self);
|
tpls::register_tera_global_fns(self);
|
||||||
|
|
||||||
// Needs to be done after rendering markdown as we only get the anchors at that point
|
// Needs to be done after rendering markdown as we only get the anchors at that point
|
||||||
link_checking::check_internal_links_with_anchors(self)?;
|
let internal_link_messages = link_checking::check_internal_links_with_anchors(self);
|
||||||
|
|
||||||
|
// log any broken internal links and error out if needed
|
||||||
|
if let Err(messages) = internal_link_messages {
|
||||||
|
let messages: Vec<String> = messages
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, msg)| format!(" {}. {}", i + 1, msg))
|
||||||
|
.collect();
|
||||||
|
let msg = format!(
|
||||||
|
"Found {} broken internal anchor link(s)\n{}",
|
||||||
|
messages.len(),
|
||||||
|
messages.join("\n")
|
||||||
|
);
|
||||||
|
match self.config.link_checker.internal_level {
|
||||||
|
config::LinkCheckerLevel::Warn => console::warn(&msg),
|
||||||
|
config::LinkCheckerLevel::Error => return Err(anyhow!(msg.clone())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check external links, log the results, and error out if needed
|
||||||
if self.config.is_in_check_mode() {
|
if self.config.is_in_check_mode() {
|
||||||
link_checking::check_external_links(self)?;
|
let external_link_messages = link_checking::check_external_links(self);
|
||||||
|
if let Err(messages) = external_link_messages {
|
||||||
|
let messages: Vec<String> = messages
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, msg)| format!(" {}. {}", i + 1, msg))
|
||||||
|
.collect();
|
||||||
|
let msg = format!(
|
||||||
|
"Found {} broken external link(s)\n{}",
|
||||||
|
messages.len(),
|
||||||
|
messages.join("\n")
|
||||||
|
);
|
||||||
|
match self.config.link_checker.external_level {
|
||||||
|
config::LinkCheckerLevel::Warn => console::warn(&msg),
|
||||||
|
config::LinkCheckerLevel::Error => return Err(anyhow!(msg.clone())),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
use core::time;
|
use core::time;
|
||||||
use std::{collections::HashMap, path::PathBuf, thread};
|
use std::{collections::HashMap, path::PathBuf, thread};
|
||||||
|
|
||||||
|
use config::LinkCheckerLevel;
|
||||||
use libs::rayon::prelude::*;
|
use libs::rayon::prelude::*;
|
||||||
|
|
||||||
use crate::{anyhow, Site};
|
use crate::Site;
|
||||||
use errors::{bail, Result};
|
use errors::{bail, Result};
|
||||||
use libs::rayon;
|
use libs::rayon;
|
||||||
use libs::url::Url;
|
use libs::url::Url;
|
||||||
|
@ -11,8 +12,10 @@ use libs::url::Url;
|
||||||
/// Check whether all internal links pointing to explicit anchor fragments are valid.
|
/// Check whether all internal links pointing to explicit anchor fragments are valid.
|
||||||
///
|
///
|
||||||
/// This is very similar to `check_external_links`, although internal links checking
|
/// This is very similar to `check_external_links`, although internal links checking
|
||||||
/// is always performed (while external ones only conditionally in `zola check`).
|
/// is always performed (while external ones only conditionally in `zola check`). If broken links
|
||||||
pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
/// are encountered, the `internal_level` setting in config.toml will determine whether they are
|
||||||
|
/// treated as warnings or errors.
|
||||||
|
pub fn check_internal_links_with_anchors(site: &Site) -> Result<(), Vec<String>> {
|
||||||
println!("Checking all internal links with anchors.");
|
println!("Checking all internal links with anchors.");
|
||||||
let library = site.library.write().expect("Get lock for check_internal_links_with_anchors");
|
let library = site.library.write().expect("Get lock for check_internal_links_with_anchors");
|
||||||
|
|
||||||
|
@ -73,7 +76,7 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
||||||
});
|
});
|
||||||
|
|
||||||
// Format faulty entries into error messages, and collect them.
|
// Format faulty entries into error messages, and collect them.
|
||||||
let errors = missing_targets
|
let messages = missing_targets
|
||||||
.map(|(page_path, md_path, anchor)| {
|
.map(|(page_path, md_path, anchor)| {
|
||||||
format!(
|
format!(
|
||||||
"The anchor in the link `@/{}#{}` in {} does not exist.",
|
"The anchor in the link `@/{}#{}` in {} does not exist.",
|
||||||
|
@ -85,7 +88,7 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// Finally emit a summary, and return overall anchors-checking result.
|
// Finally emit a summary, and return overall anchors-checking result.
|
||||||
match errors.len() {
|
match messages.len() {
|
||||||
0 => {
|
0 => {
|
||||||
println!("> Successfully checked {} internal link(s) with anchors.", anchors_total);
|
println!("> Successfully checked {} internal link(s) with anchors.", anchors_total);
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -95,7 +98,7 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
||||||
"> Checked {} internal link(s) with anchors: {} target(s) missing.",
|
"> Checked {} internal link(s) with anchors: {} target(s) missing.",
|
||||||
anchors_total, errors_total,
|
anchors_total, errors_total,
|
||||||
);
|
);
|
||||||
Err(anyhow!(errors.join("\n")))
|
Err(messages)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,21 +117,22 @@ fn get_link_domain(link: &str) -> Result<String> {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_external_links(site: &Site) -> Result<()> {
|
pub fn check_external_links(site: &Site) -> Result<(), Vec<String>> {
|
||||||
let library = site.library.write().expect("Get lock for check_external_links");
|
let library = site.library.write().expect("Get lock for check_external_links");
|
||||||
|
|
||||||
struct LinkDef {
|
struct LinkDef {
|
||||||
file_path: PathBuf,
|
file_path: PathBuf,
|
||||||
external_link: String,
|
external_link: String,
|
||||||
domain: String,
|
domain: Result<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LinkDef {
|
impl LinkDef {
|
||||||
pub fn new(file_path: PathBuf, external_link: String, domain: String) -> Self {
|
pub fn new(file_path: PathBuf, external_link: String, domain: Result<String>) -> Self {
|
||||||
Self { file_path, external_link, domain }
|
Self { file_path, external_link, domain }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut messages: Vec<String> = vec![];
|
||||||
let mut checked_links: Vec<LinkDef> = vec![];
|
let mut checked_links: Vec<LinkDef> = vec![];
|
||||||
let mut skipped_link_count: u32 = 0;
|
let mut skipped_link_count: u32 = 0;
|
||||||
|
|
||||||
|
@ -137,7 +141,7 @@ pub fn check_external_links(site: &Site) -> Result<()> {
|
||||||
if should_skip_by_prefix(&external_link, &site.config.link_checker.skip_prefixes) {
|
if should_skip_by_prefix(&external_link, &site.config.link_checker.skip_prefixes) {
|
||||||
skipped_link_count += 1;
|
skipped_link_count += 1;
|
||||||
} else {
|
} else {
|
||||||
let domain = get_link_domain(&external_link)?;
|
let domain = get_link_domain(&external_link);
|
||||||
checked_links.push(LinkDef::new(p.file.path.clone(), external_link, domain));
|
checked_links.push(LinkDef::new(p.file.path.clone(), external_link, domain));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -148,24 +152,45 @@ pub fn check_external_links(site: &Site) -> Result<()> {
|
||||||
if should_skip_by_prefix(&external_link, &site.config.link_checker.skip_prefixes) {
|
if should_skip_by_prefix(&external_link, &site.config.link_checker.skip_prefixes) {
|
||||||
skipped_link_count += 1;
|
skipped_link_count += 1;
|
||||||
} else {
|
} else {
|
||||||
let domain = get_link_domain(&external_link)?;
|
let domain = get_link_domain(&external_link);
|
||||||
checked_links.push(LinkDef::new(s.file.path.clone(), external_link, domain));
|
checked_links.push(LinkDef::new(s.file.path.clone(), external_link, domain));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// separate the links with valid domains from the links with invalid domains
|
||||||
|
let (checked_links, invalid_url_links): (Vec<&LinkDef>, Vec<&LinkDef>) =
|
||||||
|
checked_links.iter().partition(|link| link.domain.is_ok());
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"Checking {} external link(s). Skipping {} external link(s).",
|
"Checking {} external link(s). Skipping {} external link(s).{}",
|
||||||
checked_links.len(),
|
checked_links.len(),
|
||||||
skipped_link_count
|
skipped_link_count,
|
||||||
|
if invalid_url_links.is_empty() {
|
||||||
|
"".to_string()
|
||||||
|
} else {
|
||||||
|
format!(" {} link(s) had unparseable URLs.", invalid_url_links.len())
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
for err in invalid_url_links.into_iter() {
|
||||||
|
let msg = err.domain.as_ref().unwrap_err().to_string();
|
||||||
|
messages.push(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
// error out if we're in error mode and any external URLs couldn't be parsed
|
||||||
|
match site.config.link_checker.external_level {
|
||||||
|
LinkCheckerLevel::Error if messages.len() > 0 => return Err(messages),
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
let mut links_by_domain: HashMap<String, Vec<&LinkDef>> = HashMap::new();
|
let mut links_by_domain: HashMap<String, Vec<&LinkDef>> = HashMap::new();
|
||||||
|
|
||||||
for link in checked_links.iter() {
|
for link in checked_links.iter() {
|
||||||
links_by_domain.entry(link.domain.to_string()).or_default();
|
let domain = link.domain.as_ref().unwrap();
|
||||||
|
links_by_domain.entry(domain.to_string()).or_default();
|
||||||
// Insert content path and link under the domain key
|
// Insert content path and link under the domain key
|
||||||
links_by_domain.get_mut(&link.domain).unwrap().push(link);
|
links_by_domain.get_mut(domain).unwrap().push(link);
|
||||||
}
|
}
|
||||||
|
|
||||||
if checked_links.is_empty() {
|
if checked_links.is_empty() {
|
||||||
|
@ -176,62 +201,63 @@ pub fn check_external_links(site: &Site) -> Result<()> {
|
||||||
// (almost) all pages simultaneously, limiting all links for a single
|
// (almost) all pages simultaneously, limiting all links for a single
|
||||||
// domain to one thread to avoid rate-limiting
|
// domain to one thread to avoid rate-limiting
|
||||||
let threads = std::cmp::min(links_by_domain.len(), 8);
|
let threads = std::cmp::min(links_by_domain.len(), 8);
|
||||||
let pool = rayon::ThreadPoolBuilder::new().num_threads(threads).build()?;
|
let pool = rayon::ThreadPoolBuilder::new().num_threads(threads).build();
|
||||||
|
|
||||||
let errors = pool.install(|| {
|
match pool {
|
||||||
links_by_domain
|
Ok(pool) => {
|
||||||
.par_iter()
|
let errors = pool.install(|| {
|
||||||
.map(|(_domain, links)| {
|
links_by_domain
|
||||||
let mut links_to_process = links.len();
|
.par_iter()
|
||||||
links
|
.map(|(_domain, links)| {
|
||||||
.iter()
|
let mut links_to_process = links.len();
|
||||||
.filter_map(move |link_def| {
|
links
|
||||||
links_to_process -= 1;
|
.iter()
|
||||||
|
.filter_map(move |link_def| {
|
||||||
|
links_to_process -= 1;
|
||||||
|
|
||||||
let res = link_checker::check_url(
|
let res = link_checker::check_url(
|
||||||
&link_def.external_link,
|
&link_def.external_link,
|
||||||
&site.config.link_checker,
|
&site.config.link_checker,
|
||||||
);
|
);
|
||||||
|
|
||||||
if links_to_process > 0 {
|
if links_to_process > 0 {
|
||||||
// Prevent rate-limiting, wait before next crawl unless we're done with this domain
|
// Prevent rate-limiting, wait before next crawl unless we're done with this domain
|
||||||
thread::sleep(time::Duration::from_millis(500));
|
thread::sleep(time::Duration::from_millis(500));
|
||||||
}
|
}
|
||||||
|
|
||||||
if link_checker::is_valid(&res) {
|
if link_checker::is_valid(&res) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some((&link_def.file_path, &link_def.external_link, res))
|
Some((&link_def.file_path, &link_def.external_link, res))
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
})
|
})
|
||||||
|
.flatten()
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
})
|
});
|
||||||
.flatten()
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
});
|
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"> Checked {} external link(s): {} error(s) found.",
|
"> Checked {} external link(s): {} error(s) found.",
|
||||||
checked_links.len(),
|
checked_links.len(),
|
||||||
errors.len()
|
errors.len()
|
||||||
);
|
);
|
||||||
|
|
||||||
if errors.is_empty() {
|
if errors.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (page_path, link, check_res) in errors.iter() {
|
||||||
|
messages.push(format!(
|
||||||
|
"Broken link in {} to {}: {}",
|
||||||
|
page_path.to_string_lossy(),
|
||||||
|
link,
|
||||||
|
link_checker::message(check_res)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(pool_err) => messages.push(pool_err.to_string()),
|
||||||
}
|
}
|
||||||
|
|
||||||
let msg = errors
|
Err(messages)
|
||||||
.into_iter()
|
|
||||||
.map(|(page_path, link, check_res)| {
|
|
||||||
format!(
|
|
||||||
"Dead link in {} to {}: {}",
|
|
||||||
page_path.to_string_lossy(),
|
|
||||||
link,
|
|
||||||
link_checker::message(&check_res)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n");
|
|
||||||
|
|
||||||
Err(anyhow!(msg))
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,3 +54,5 @@ to link to. The path to the file starts from the `content` directory.
|
||||||
|
|
||||||
For example, linking to a file located at `content/pages/about.md` would be `[my link](@/pages/about.md)`.
|
For example, linking to a file located at `content/pages/about.md` would be `[my link](@/pages/about.md)`.
|
||||||
You can still link to an anchor directly; `[my link](@/pages/about.md#example)` will work as expected.
|
You can still link to an anchor directly; `[my link](@/pages/about.md#example)` will work as expected.
|
||||||
|
|
||||||
|
By default, broken internal links are treated as errors. To treat them as warnings instead, visit the `[link_checker]` section of `config.toml` and set `internal_level = "warn"`. Note: treating broken links as warnings allows the site to be built with broken links intact, so a link such as `[my link](@/pages/whoops.md)` will be rendered to HTML as `<a href="@/pages/whoops.md">`.
|
||||||
|
|
|
@ -130,6 +130,12 @@ skip_anchor_prefixes = [
|
||||||
"https://caniuse.com/",
|
"https://caniuse.com/",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Treat internal link problems as either "error" or "warn", default is "error"
|
||||||
|
internal_level = "error"
|
||||||
|
|
||||||
|
# Treat external link problems as either "error" or "warn", default is "error"
|
||||||
|
external_level = "error"
|
||||||
|
|
||||||
# Various slugification strategies, see below for details
|
# Various slugification strategies, see below for details
|
||||||
# Defaults to everything being a slug
|
# Defaults to everything being a slug
|
||||||
[slugify]
|
[slugify]
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::path::Path;
|
||||||
use errors::{Error, Result};
|
use errors::{Error, Result};
|
||||||
use site::Site;
|
use site::Site;
|
||||||
|
|
||||||
use crate::console;
|
use crate::messages;
|
||||||
use crate::prompt::ask_bool_timeout;
|
use crate::prompt::ask_bool_timeout;
|
||||||
|
|
||||||
const BUILD_PROMPT_TIMEOUT_MILLIS: u64 = 10_000;
|
const BUILD_PROMPT_TIMEOUT_MILLIS: u64 = 10_000;
|
||||||
|
@ -47,7 +47,7 @@ pub fn build(
|
||||||
site.include_drafts();
|
site.include_drafts();
|
||||||
}
|
}
|
||||||
site.load()?;
|
site.load()?;
|
||||||
console::notify_site_size(&site);
|
messages::notify_site_size(&site);
|
||||||
console::warn_about_ignored_pages(&site);
|
messages::warn_about_ignored_pages(&site);
|
||||||
site.build()
|
site.build()
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
use site::Site;
|
use site::Site;
|
||||||
|
|
||||||
use crate::console;
|
use crate::messages;
|
||||||
|
|
||||||
pub fn check(
|
pub fn check(
|
||||||
root_dir: &Path,
|
root_dir: &Path,
|
||||||
|
@ -23,7 +23,7 @@ pub fn check(
|
||||||
site.include_drafts();
|
site.include_drafts();
|
||||||
}
|
}
|
||||||
site.load()?;
|
site.load()?;
|
||||||
console::check_site_summary(&site);
|
messages::check_site_summary(&site);
|
||||||
console::warn_about_ignored_pages(&site);
|
messages::warn_about_ignored_pages(&site);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ use std::path::Path;
|
||||||
use errors::{bail, Result};
|
use errors::{bail, Result};
|
||||||
use utils::fs::create_file;
|
use utils::fs::create_file;
|
||||||
|
|
||||||
use crate::console;
|
|
||||||
use crate::prompt::{ask_bool, ask_url};
|
use crate::prompt::{ask_bool, ask_url};
|
||||||
|
|
||||||
const CONFIG: &str = r#"
|
const CONFIG: &str = r#"
|
||||||
|
|
|
@ -49,7 +49,7 @@ use site::sass::compile_sass;
|
||||||
use site::{Site, SITE_CONTENT};
|
use site::{Site, SITE_CONTENT};
|
||||||
use utils::fs::copy_file;
|
use utils::fs::copy_file;
|
||||||
|
|
||||||
use crate::console;
|
use crate::messages;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
|
@ -228,7 +228,7 @@ fn rebuild_done_handling(broadcaster: &Sender, res: Result<()>, reload_path: &st
|
||||||
))
|
))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
Err(e) => console::unravel_errors("Failed to build the site", &e),
|
Err(e) => messages::unravel_errors("Failed to build the site", &e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -274,8 +274,8 @@ fn create_new_site(
|
||||||
} else {
|
} else {
|
||||||
site.enable_live_reload(interface_port);
|
site.enable_live_reload(interface_port);
|
||||||
}
|
}
|
||||||
console::notify_site_size(&site);
|
messages::notify_site_size(&site);
|
||||||
console::warn_about_ignored_pages(&site);
|
messages::warn_about_ignored_pages(&site);
|
||||||
site.build()?;
|
site.build()?;
|
||||||
Ok((site, address))
|
Ok((site, address))
|
||||||
}
|
}
|
||||||
|
@ -304,7 +304,7 @@ pub fn serve(
|
||||||
include_drafts,
|
include_drafts,
|
||||||
None,
|
None,
|
||||||
)?;
|
)?;
|
||||||
console::report_elapsed_time(start);
|
messages::report_elapsed_time(start);
|
||||||
|
|
||||||
// Stop right there if we can't bind to the address
|
// Stop right there if we can't bind to the address
|
||||||
let bind_address: SocketAddrV4 = match address.parse() {
|
let bind_address: SocketAddrV4 = match address.parse() {
|
||||||
|
@ -509,7 +509,7 @@ pub fn serve(
|
||||||
Some(s)
|
Some(s)
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console::unravel_errors("Failed to build the site", &e);
|
messages::unravel_errors("Failed to build the site", &e);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -628,7 +628,7 @@ pub fn serve(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
console::report_elapsed_time(start);
|
messages::report_elapsed_time(start);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
134
src/console.rs
134
src/console.rs
|
@ -1,134 +0,0 @@
|
||||||
use std::io::Write;
|
|
||||||
use std::time::Instant;
|
|
||||||
use std::{convert::TryInto, env};
|
|
||||||
|
|
||||||
use libs::once_cell::sync::Lazy;
|
|
||||||
use libs::time::Duration;
|
|
||||||
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
|
||||||
|
|
||||||
use errors::Error;
|
|
||||||
use site::Site;
|
|
||||||
|
|
||||||
/// Termcolor color choice.
|
|
||||||
/// We do not rely on ColorChoice::Auto behavior
|
|
||||||
/// as the check is already performed by has_color.
|
|
||||||
static COLOR_CHOICE: Lazy<ColorChoice> =
|
|
||||||
Lazy::new(|| if has_color() { ColorChoice::Always } else { ColorChoice::Never });
|
|
||||||
|
|
||||||
pub fn info(message: &str) {
|
|
||||||
colorize(message, ColorSpec::new().set_bold(true), StandardStream::stdout(*COLOR_CHOICE));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn warn(message: &str) {
|
|
||||||
colorize(
|
|
||||||
message,
|
|
||||||
ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)),
|
|
||||||
StandardStream::stdout(*COLOR_CHOICE),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn success(message: &str) {
|
|
||||||
colorize(
|
|
||||||
message,
|
|
||||||
ColorSpec::new().set_bold(true).set_fg(Some(Color::Green)),
|
|
||||||
StandardStream::stdout(*COLOR_CHOICE),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn error(message: &str) {
|
|
||||||
colorize(
|
|
||||||
message,
|
|
||||||
ColorSpec::new().set_bold(true).set_fg(Some(Color::Red)),
|
|
||||||
StandardStream::stderr(*COLOR_CHOICE),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Print a colorized message to stdout
|
|
||||||
fn colorize(message: &str, color: &ColorSpec, mut stream: StandardStream) {
|
|
||||||
stream.set_color(color).unwrap();
|
|
||||||
write!(stream, "{}", message).unwrap();
|
|
||||||
stream.set_color(&ColorSpec::new()).unwrap();
|
|
||||||
writeln!(stream).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Display in the console the number of pages/sections in the site
|
|
||||||
pub fn notify_site_size(site: &Site) {
|
|
||||||
let library = site.library.read().unwrap();
|
|
||||||
println!(
|
|
||||||
"-> Creating {} pages ({} orphan) and {} sections",
|
|
||||||
library.pages.len(),
|
|
||||||
library.get_all_orphan_pages().len(),
|
|
||||||
library.sections.len() - 1, // -1 since we do not count the index as a section there
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Display in the console only the number of pages/sections in the site
|
|
||||||
pub fn check_site_summary(site: &Site) {
|
|
||||||
let library = site.library.read().unwrap();
|
|
||||||
let orphans = library.get_all_orphan_pages();
|
|
||||||
println!(
|
|
||||||
"-> Site content: {} pages ({} orphan), {} sections",
|
|
||||||
library.pages.len(),
|
|
||||||
orphans.len(),
|
|
||||||
library.sections.len() - 1, // -1 since we do not count the index as a section there
|
|
||||||
);
|
|
||||||
|
|
||||||
for orphan in orphans {
|
|
||||||
warn(&format!("Orphan page found: {}", orphan.path));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Display a warning in the console if there are ignored pages in the site
|
|
||||||
pub fn warn_about_ignored_pages(site: &Site) {
|
|
||||||
let library = site.library.read().unwrap();
|
|
||||||
let ignored_pages: Vec<_> = library
|
|
||||||
.sections
|
|
||||||
.values()
|
|
||||||
.flat_map(|s| s.ignored_pages.iter().map(|k| library.pages[k].file.path.clone()))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if !ignored_pages.is_empty() {
|
|
||||||
warn(&format!(
|
|
||||||
"{} page(s) ignored (missing date or weight in a sorted section):",
|
|
||||||
ignored_pages.len()
|
|
||||||
));
|
|
||||||
for path in ignored_pages {
|
|
||||||
warn(&format!("- {}", path.display()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Print the time elapsed rounded to 1 decimal
|
|
||||||
pub fn report_elapsed_time(instant: Instant) {
|
|
||||||
let duration: Duration = instant.elapsed().try_into().unwrap();
|
|
||||||
let duration_ms = duration.whole_milliseconds() as f64;
|
|
||||||
|
|
||||||
if duration_ms < 1000.0 {
|
|
||||||
success(&format!("Done in {}ms.\n", duration_ms));
|
|
||||||
} else {
|
|
||||||
let duration_sec = duration_ms / 1000.0;
|
|
||||||
success(&format!("Done in {:.1}s.\n", ((duration_sec * 10.0).round() / 10.0)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Display an error message and the actual error(s)
|
|
||||||
pub fn unravel_errors(message: &str, error: &Error) {
|
|
||||||
if !message.is_empty() {
|
|
||||||
self::error(message);
|
|
||||||
}
|
|
||||||
self::error(&format!("Error: {}", error));
|
|
||||||
let mut cause = error.source();
|
|
||||||
while let Some(e) = cause {
|
|
||||||
self::error(&format!("Reason: {}", e));
|
|
||||||
cause = e.source();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check whether to output colors
|
|
||||||
fn has_color() -> bool {
|
|
||||||
let use_colors = env::var("CLICOLOR").unwrap_or_else(|_| "1".to_string()) != "0"
|
|
||||||
&& env::var("NO_COLOR").is_err();
|
|
||||||
let force_colors = env::var("CLICOLOR_FORCE").unwrap_or_else(|_| "0".to_string()) != "0";
|
|
||||||
|
|
||||||
force_colors || use_colors && atty::is(atty::Stream::Stdout)
|
|
||||||
}
|
|
14
src/main.rs
14
src/main.rs
|
@ -9,7 +9,7 @@ use time::UtcOffset;
|
||||||
|
|
||||||
mod cli;
|
mod cli;
|
||||||
mod cmd;
|
mod cmd;
|
||||||
mod console;
|
mod messages;
|
||||||
mod prompt;
|
mod prompt;
|
||||||
|
|
||||||
fn get_config_file_path(dir: &Path, config_path: &Path) -> (PathBuf, PathBuf) {
|
fn get_config_file_path(dir: &Path, config_path: &Path) -> (PathBuf, PathBuf) {
|
||||||
|
@ -35,7 +35,7 @@ fn main() {
|
||||||
match cli.command {
|
match cli.command {
|
||||||
Command::Init { name, force } => {
|
Command::Init { name, force } => {
|
||||||
if let Err(e) = cmd::create_new_project(&name, force) {
|
if let Err(e) = cmd::create_new_project(&name, force) {
|
||||||
console::unravel_errors("Failed to create the project", &e);
|
messages::unravel_errors("Failed to create the project", &e);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -50,9 +50,9 @@ fn main() {
|
||||||
output_dir.as_deref(),
|
output_dir.as_deref(),
|
||||||
drafts,
|
drafts,
|
||||||
) {
|
) {
|
||||||
Ok(()) => console::report_elapsed_time(start),
|
Ok(()) => messages::report_elapsed_time(start),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console::unravel_errors("Failed to build the site", &e);
|
messages::unravel_errors("Failed to build the site", &e);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -84,7 +84,7 @@ fn main() {
|
||||||
fast,
|
fast,
|
||||||
UtcOffset::current_local_offset().unwrap_or(UtcOffset::UTC),
|
UtcOffset::current_local_offset().unwrap_or(UtcOffset::UTC),
|
||||||
) {
|
) {
|
||||||
console::unravel_errors("Failed to serve the site", &e);
|
messages::unravel_errors("Failed to serve the site", &e);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -93,9 +93,9 @@ fn main() {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let (root_dir, config_file) = get_config_file_path(&cli_dir, &cli.config);
|
let (root_dir, config_file) = get_config_file_path(&cli_dir, &cli.config);
|
||||||
match cmd::check(&root_dir, &config_file, None, None, drafts) {
|
match cmd::check(&root_dir, &config_file, None, None, drafts) {
|
||||||
Ok(()) => console::report_elapsed_time(start),
|
Ok(()) => messages::report_elapsed_time(start),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console::unravel_errors("Failed to check the site", &e);
|
messages::unravel_errors("Failed to check the site", &e);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,79 @@
|
||||||
|
use libs::time::Duration;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
use errors::Error;
|
||||||
|
use site::Site;
|
||||||
|
|
||||||
|
/// Display in the console the number of pages/sections in the site
|
||||||
|
pub fn notify_site_size(site: &Site) {
|
||||||
|
let library = site.library.read().unwrap();
|
||||||
|
println!(
|
||||||
|
"-> Creating {} pages ({} orphan) and {} sections",
|
||||||
|
library.pages.len(),
|
||||||
|
library.get_all_orphan_pages().len(),
|
||||||
|
library.sections.len() - 1, // -1 since we do not count the index as a section there
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Display in the console only the number of pages/sections in the site
|
||||||
|
pub fn check_site_summary(site: &Site) {
|
||||||
|
let library = site.library.read().unwrap();
|
||||||
|
let orphans = library.get_all_orphan_pages();
|
||||||
|
println!(
|
||||||
|
"-> Site content: {} pages ({} orphan), {} sections",
|
||||||
|
library.pages.len(),
|
||||||
|
orphans.len(),
|
||||||
|
library.sections.len() - 1, // -1 since we do not count the index as a section there
|
||||||
|
);
|
||||||
|
|
||||||
|
for orphan in orphans {
|
||||||
|
console::warn(&format!("Orphan page found: {}", orphan.path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Display a warning in the console if there are ignored pages in the site
|
||||||
|
pub fn warn_about_ignored_pages(site: &Site) {
|
||||||
|
let library = site.library.read().unwrap();
|
||||||
|
let ignored_pages: Vec<_> = library
|
||||||
|
.sections
|
||||||
|
.values()
|
||||||
|
.flat_map(|s| s.ignored_pages.iter().map(|k| library.pages[k].file.path.clone()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if !ignored_pages.is_empty() {
|
||||||
|
console::warn(&format!(
|
||||||
|
"{} page(s) ignored (missing date or weight in a sorted section):",
|
||||||
|
ignored_pages.len()
|
||||||
|
));
|
||||||
|
for path in ignored_pages {
|
||||||
|
console::warn(&format!("- {}", path.display()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print the time elapsed rounded to 1 decimal
|
||||||
|
pub fn report_elapsed_time(instant: Instant) {
|
||||||
|
let duration: Duration = instant.elapsed().try_into().unwrap();
|
||||||
|
let duration_ms = duration.whole_milliseconds() as f64;
|
||||||
|
|
||||||
|
if duration_ms < 1000.0 {
|
||||||
|
console::success(&format!("Done in {}ms.\n", duration_ms));
|
||||||
|
} else {
|
||||||
|
let duration_sec = duration_ms / 1000.0;
|
||||||
|
console::success(&format!("Done in {:.1}s.\n", ((duration_sec * 10.0).round() / 10.0)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Display an error message and the actual error(s)
|
||||||
|
pub fn unravel_errors(message: &str, error: &Error) {
|
||||||
|
if !message.is_empty() {
|
||||||
|
console::error(message);
|
||||||
|
}
|
||||||
|
console::error(&error.to_string());
|
||||||
|
let mut cause = error.source();
|
||||||
|
while let Some(e) = cause {
|
||||||
|
console::error(&format!("Reason: {}", e));
|
||||||
|
cause = e.source();
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,7 +3,6 @@ use std::time::Duration;
|
||||||
|
|
||||||
use libs::url::Url;
|
use libs::url::Url;
|
||||||
|
|
||||||
use crate::console;
|
|
||||||
use errors::{anyhow, Result};
|
use errors::{anyhow, Result};
|
||||||
|
|
||||||
/// Wait for user input and return what they typed
|
/// Wait for user input and return what they typed
|
||||||
|
|
Loading…
Reference in New Issue