Expose backlinks

This commit is contained in:
Vincent Prouillet 2022-05-08 14:10:29 +02:00
parent 413a68d8b2
commit beb93f2b0a
8 changed files with 125 additions and 40 deletions

View File

@ -7,6 +7,8 @@
- Switch to pulldown-cmark anchor rather than ours, some (very niche) edge cases are not supported anymore, you can
also specify classes on headers now
- Now outputs empty taxonomies instead of ignoring them
- Unify all sorting variable names in templates to `lower`/`higher` in order to make it easy to re-use templates and it
was becoming hard to come up with names
### Other
- Fix markup for fenced code with linenos
@ -20,6 +22,7 @@ any pages related to that taxonomy
- Serialize `transparent` field from front-matter of sections
- Use Zola Tera instance for markdown filter: this means you have access to the same Tera functions as in shortcodes
- Ignore sections with `render=false` when looking for path collisions
- Add support for backlinks
## 0.15.3 (2022-01-23)

View File

@ -8,6 +8,20 @@ use crate::sorting::sort_pages;
use crate::taxonomies::{Taxonomy, TaxonomyFound};
use crate::{Page, Section, SortBy};
macro_rules! set {
($($key:expr,)+) => (set!($($key),+));
( $($key:expr),* ) => {
{
let mut _set = AHashSet::new();
$(
_set.insert($key);
)*
_set
}
};
}
#[derive(Debug, Default)]
pub struct Library {
pub pages: AHashMap<PathBuf, Page>,
@ -15,8 +29,10 @@ pub struct Library {
// aliases -> files, so we can easily check for conflicts
pub reverse_aliases: AHashMap<String, AHashSet<PathBuf>>,
pub translations: AHashMap<PathBuf, AHashSet<PathBuf>>,
pub backlinks: AHashMap<String, AHashSet<PathBuf>>,
// A mapping of {lang -> <slug, {term -> vec<paths>}>>}
taxonomies_def: AHashMap<String, AHashMap<String, AHashMap<String, Vec<PathBuf>>>>,
// All the taxonomies from config.toml in their slugifiedv ersion
// So we don't need to pass the Config when adding a page to know how to slugify and we only
// slugify once
taxo_name_to_slug: AHashMap<String, String>,
@ -44,11 +60,7 @@ impl Library {
.and_modify(|s| {
s.insert(file_path.to_path_buf());
})
.or_insert_with(|| {
let mut s = AHashSet::new();
s.insert(file_path.to_path_buf());
s
});
.or_insert_with(|| set! {file_path.to_path_buf()});
}
}
@ -105,6 +117,33 @@ impl Library {
self.sections.insert(file_path, section);
}
/// Fills a map of target -> {content mentioning it}
/// This can only be called _after_ rendering markdown as we need to have accumulated all
/// the links first
pub fn fill_backlinks(&mut self) {
self.backlinks.clear();
let mut add_backlink = |target: &str, source: &Path| {
self.backlinks
.entry(target.to_owned())
.and_modify(|s| {
s.insert(source.to_path_buf());
})
.or_insert(set! {source.to_path_buf()});
};
for (_, page) in &self.pages {
for (internal_link, _) in &page.internal_links {
add_backlink(internal_link, &page.file.path);
}
}
for (_, section) in &self.sections {
for (internal_link, _) in &section.internal_links {
add_backlink(internal_link, &section.file.path);
}
}
}
/// This is called _before_ rendering the markdown the pages/sections
pub fn find_taxonomies(&self, config: &Config) -> Vec<Taxonomy> {
let mut taxonomies = Vec::new();
@ -179,11 +218,7 @@ impl Library {
.and_modify(|trans| {
trans.insert(path.to_path_buf());
})
.or_insert({
let mut s = AHashSet::new();
s.insert(path.to_path_buf());
s
});
.or_insert(set! {path.to_path_buf()});
}
};
@ -720,4 +755,28 @@ mod tests {
// under the safe slugify strategy all terms should be distinct
assert_eq!(tax.items.len(), 4);
}
#[test]
fn can_fill_backlinks() {
let mut page1 = create_page("page1.md", "en", PageSort::None);
page1.internal_links.push(("page2.md".to_owned(), None));
let mut page2 = create_page("page2.md", "en", PageSort::None);
page2.internal_links.push(("_index.md".to_owned(), None));
let mut section1 = create_section("_index.md", "en", 10, false, SortBy::None);
section1.internal_links.push(("page1.md".to_owned(), None));
section1.internal_links.push(("page2.md".to_owned(), None));
let mut library = Library::default();
library.insert_page(page1);
library.insert_page(page2);
library.insert_section(section1);
library.fill_backlinks();
assert_eq!(library.backlinks.len(), 3);
assert_eq!(library.backlinks["page1.md"], set! {"_index.md".to_owned()});
assert_eq!(
library.backlinks["page2.md"],
set! {"page1.md".to_owned(), "_index.md".to_owned()}
);
assert_eq!(library.backlinks["_index.md"], set! {"page2.md".to_owned()});
}
}

View File

@ -8,6 +8,12 @@ use crate::{Page, Section};
use libs::tera::{Map, Value};
use utils::table_of_contents::Heading;
#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct BackLink<'a> {
pub permalink: &'a str,
pub title: &'a Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct TranslatedContent<'a> {
pub lang: &'a str,
@ -17,6 +23,22 @@ pub struct TranslatedContent<'a> {
pub path: &'a Path,
}
fn find_backlinks<'a>(relative_path: &str, library: &'a Library) -> Vec<BackLink<'a>> {
let mut backlinks = Vec::new();
if let Some(b) = library.backlinks.get(relative_path) {
for backlink in b {
if let Some(p) = library.pages.get(backlink) {
backlinks.push(BackLink { permalink: &p.permalink, title: &p.meta.title });
}
if let Some(s) = library.sections.get(backlink) {
backlinks.push(BackLink { permalink: &s.permalink, title: &s.meta.title });
}
}
backlinks.sort_by_key(|b| b.permalink);
}
backlinks
}
#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct SerializingPage<'a> {
relative_path: &'a str,
@ -45,6 +67,7 @@ pub struct SerializingPage<'a> {
lower: Option<Box<SerializingPage<'a>>>,
higher: Option<Box<SerializingPage<'a>>>,
translations: Vec<TranslatedContent<'a>>,
backlinks: Vec<BackLink<'a>>,
}
impl<'a> SerializingPage<'a> {
@ -60,6 +83,7 @@ impl<'a> SerializingPage<'a> {
let mut lower = None;
let mut higher = None;
let mut translations = vec![];
let mut backlinks = vec![];
if let Some(lib) = library {
translations = lib.find_translations(&page.file.canonical);
@ -74,6 +98,8 @@ impl<'a> SerializingPage<'a> {
.as_ref()
.map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false)));
}
backlinks = find_backlinks(&page.file.relative, &lib);
}
Self {
@ -103,6 +129,7 @@ impl<'a> SerializingPage<'a> {
lower,
higher,
translations,
backlinks,
}
}
}
@ -127,6 +154,7 @@ pub struct SerializingSection<'a> {
pages: Vec<SerializingPage<'a>>,
subsections: Vec<&'a str>,
translations: Vec<TranslatedContent<'a>>,
backlinks: Vec<BackLink<'a>>,
}
#[derive(Debug)]
@ -145,6 +173,7 @@ impl<'a> SerializingSection<'a> {
let mut pages = Vec::with_capacity(section.pages.len());
let mut subsections = Vec::with_capacity(section.subsections.len());
let mut translations = Vec::new();
let mut backlinks = Vec::new();
match mode {
SectionSerMode::ForMarkdown => {}
@ -162,6 +191,8 @@ impl<'a> SerializingSection<'a> {
pages.push(SerializingPage::new(&lib.pages[p], Some(lib), true));
}
}
backlinks = find_backlinks(&section.file.relative, &lib);
}
}
@ -184,6 +215,7 @@ impl<'a> SerializingSection<'a> {
pages,
subsections,
translations,
backlinks,
}
}
}

View File

@ -57,7 +57,7 @@ fn configurable_render(
tera.register_filter(
"markdown",
templates::filters::MarkdownFilter::new(config.clone(), permalinks.clone(), tera.clone())
templates::filters::MarkdownFilter::new(config.clone(), permalinks.clone(), tera.clone()),
);
let mut context = RenderContext::new(
&tera,

View File

@ -288,6 +288,10 @@ impl Site {
tpls::register_early_global_fns(self)?;
self.populate_sections();
self.render_markdown()?;
{
let mut lib = self.library.write().unwrap();
lib.fill_backlinks();
}
tpls::register_tera_global_fns(self);
// Needs to be done after rendering markdown as we only get the anchors at that point

View File

@ -10,7 +10,6 @@ use libs::tera::{
};
use markdown::{render_content, RenderContext};
#[derive(Debug)]
pub struct MarkdownFilter {
config: Config,
@ -19,11 +18,7 @@ pub struct MarkdownFilter {
}
impl MarkdownFilter {
pub fn new(
config: Config,
permalinks: HashMap<String, String>,
tera: Tera,
) -> Self {
pub fn new(config: Config, permalinks: HashMap<String, String>, tera: Tera) -> Self {
Self { config, permalinks, tera }
}
}
@ -110,9 +105,9 @@ impl TeraFilter for NumFormatFilter {
#[cfg(test)]
mod tests {
use std::{collections::HashMap};
use std::collections::HashMap;
use libs::tera::{to_value, Tera, Filter};
use libs::tera::{to_value, Filter, Tera};
use super::{base64_decode, base64_encode, MarkdownFilter, NumFormatFilter};
use config::Config;
@ -146,8 +141,8 @@ mod tests {
fn markdown_filter_inline() {
let mut args = HashMap::new();
args.insert("inline".to_string(), to_value(true).unwrap());
let result =
MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default()).filter(
let result = MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default())
.filter(
&to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(),
&args,
);
@ -160,8 +155,8 @@ mod tests {
fn markdown_filter_inline_tables() {
let mut args = HashMap::new();
args.insert("inline".to_string(), to_value(true).unwrap());
let result =
MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default()).filter(
let result = MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default())
.filter(
&to_value(
&r#"
|id|author_id| timestamp_created|title |content |

View File

@ -160,13 +160,13 @@ The `sort_by` front-matter variable can have the following values:
### `date`
This will sort all pages by their `date` field, from the most recent (at the
top of the list) to the oldest (at the bottom of the list). Each page will
get `page.earlier` and `page.later` variables that contain the pages with
get `page.lower` and `page.higher` variables that contain the pages with
earlier and later dates, respectively.
### `title`
This will sort all pages by their `title` field in natural lexical order, as
defined by `natural_lexical_cmp` in the [lexical-sort] crate. Each page will
get `page.title_prev` and `page.title_next` variables that contain the pages
get `page.lower` and `page.higher` variables that contain the pages
with previous and next titles, respectively.
For example, here is a natural lexical ordering: "bachata, BART, bolero,
@ -180,7 +180,7 @@ bolero, meter, underground, μ-kernel".
### `weight`
This will be sort all pages by their `weight` field, from lightest weight
(at the top of the list) to heaviest (at the bottom of the list). Each
page gets `page.lighter` and `page.heavier` variables that contain the
page gets `page.lower` and `page.higher` variables that contain the
pages with lighter and heavier weights, respectively.
### Reversed sorting
@ -190,11 +190,7 @@ pages sorted by weight will be sorted from lightest (at the top) to heaviest
(at the bottom); pages sorted by date will be sorted from oldest (at the top)
to newest (at the bottom).
`reverse` has no effect on:
* `page.later` / `page.earlier`,
* `page.title_prev` / `page.title_next`, or
* `page.heavier` / `page.lighter`.
`reverse` has no effect on `page.lower` / `page.higher`.
If the section is paginated the `paginate_reversed=true` in the front matter of the relevant section should be set instead of using the filter.

View File

@ -34,14 +34,10 @@ toc: Array<Header>,
word_count: Number;
// Based on https://help.medium.com/hc/en-us/articles/214991667-Read-time
reading_time: Number;
// `earlier` and `later` are only populated if the section variable `sort_by` is set to `date`
// and only set when rendering the page itself
earlier: Page?;
later: Page?;
// `heavier` and `lighter` are only populated if the section variable `sort_by` is set to `weight`
// and only set when rendering the page itself
heavier: Page?;
lighter: Page?;
// earlier / ligher
lower: Page?;
// later / heavier
higher: Page?;
// Year/month/day is only set if the page has a date and month/day are 1-indexed
year: Number?;
month: Number?;