Have a list of ancestors instead of only parent section
This commit is contained in:
parent
f14dbcbdf4
commit
957c6bed9d
@ -27,8 +27,8 @@ Tera function
|
|||||||
- RSS feed now takes all available articles by default instead of limiting to 10000
|
- RSS feed now takes all available articles by default instead of limiting to 10000
|
||||||
- `templates` directory is now optional
|
- `templates` directory is now optional
|
||||||
- Add Reason and F# syntax highlighting
|
- Add Reason and F# syntax highlighting
|
||||||
- Add `parent_section` to pages and section pointing to the relative path of the parent
|
- Add `ancestors` to pages and sections pointing to the relative path of all ancestor
|
||||||
section if there is one to be used with the `get_section` Tera function
|
sections up to the index to be used with the `get_section` Tera function
|
||||||
|
|
||||||
## 0.4.2 (2018-09-03)
|
## 0.4.2 (2018-09-03)
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ pub struct SerializingPage<'a> {
|
|||||||
content: &'a str,
|
content: &'a str,
|
||||||
permalink: &'a str,
|
permalink: &'a str,
|
||||||
slug: &'a str,
|
slug: &'a str,
|
||||||
parent_section: Option<String>,
|
ancestors: Vec<String>,
|
||||||
title: &'a Option<String>,
|
title: &'a Option<String>,
|
||||||
description: &'a Option<String>,
|
description: &'a Option<String>,
|
||||||
date: &'a Option<String>,
|
date: &'a Option<String>,
|
||||||
@ -58,14 +58,14 @@ impl<'a> SerializingPage<'a> {
|
|||||||
day = Some(d.2);
|
day = Some(d.2);
|
||||||
}
|
}
|
||||||
let pages = library.pages();
|
let pages = library.pages();
|
||||||
let lighter = page.lighter.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
|
let lighter = page.lighter.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||||
let heavier = page.heavier.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
|
let heavier = page.heavier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||||
let earlier = page.earlier.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
|
let earlier = page.earlier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||||
let later = page.later.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
|
let later = page.later.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||||
let parent_section = page.parent_section.map(|k| library.get_section_by_key(k).file.relative.clone());
|
let ancestors = page.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
|
||||||
|
|
||||||
SerializingPage {
|
SerializingPage {
|
||||||
parent_section,
|
ancestors,
|
||||||
content: &page.content,
|
content: &page.content,
|
||||||
permalink: &page.permalink,
|
permalink: &page.permalink,
|
||||||
slug: &page.slug,
|
slug: &page.slug,
|
||||||
@ -93,7 +93,7 @@ impl<'a> SerializingPage<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Same as from_page but does not fill sibling pages
|
/// Same as from_page but does not fill sibling pages
|
||||||
pub fn from_page_basic(page: &'a Page) -> Self {
|
pub fn from_page_basic(page: &'a Page, library: Option<&'a Library>) -> Self {
|
||||||
let mut year = None;
|
let mut year = None;
|
||||||
let mut month = None;
|
let mut month = None;
|
||||||
let mut day = None;
|
let mut day = None;
|
||||||
@ -102,9 +102,14 @@ impl<'a> SerializingPage<'a> {
|
|||||||
month = Some(d.1);
|
month = Some(d.1);
|
||||||
day = Some(d.2);
|
day = Some(d.2);
|
||||||
}
|
}
|
||||||
|
let ancestors = if let Some(ref lib) = library {
|
||||||
|
page.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
};
|
||||||
|
|
||||||
SerializingPage {
|
SerializingPage {
|
||||||
parent_section: None,
|
ancestors,
|
||||||
content: &page.content,
|
content: &page.content,
|
||||||
permalink: &page.permalink,
|
permalink: &page.permalink,
|
||||||
slug: &page.slug,
|
slug: &page.slug,
|
||||||
@ -138,8 +143,8 @@ pub struct Page {
|
|||||||
pub file: FileInfo,
|
pub file: FileInfo,
|
||||||
/// The front matter meta-data
|
/// The front matter meta-data
|
||||||
pub meta: PageFrontMatter,
|
pub meta: PageFrontMatter,
|
||||||
/// The parent section if there is one
|
/// The list of parent sections
|
||||||
pub parent_section: Option<Key>,
|
pub ancestors: Vec<Key>,
|
||||||
/// The actual content of the page, in markdown
|
/// The actual content of the page, in markdown
|
||||||
pub raw_content: String,
|
pub raw_content: String,
|
||||||
/// All the non-md files we found next to the .md file
|
/// All the non-md files we found next to the .md file
|
||||||
@ -184,7 +189,7 @@ impl Page {
|
|||||||
Page {
|
Page {
|
||||||
file: FileInfo::new_page(file_path),
|
file: FileInfo::new_page(file_path),
|
||||||
meta,
|
meta,
|
||||||
parent_section: None,
|
ancestors: vec![],
|
||||||
raw_content: "".to_string(),
|
raw_content: "".to_string(),
|
||||||
assets: vec![],
|
assets: vec![],
|
||||||
content: "".to_string(),
|
content: "".to_string(),
|
||||||
@ -305,7 +310,7 @@ impl Page {
|
|||||||
anchor_insert,
|
anchor_insert,
|
||||||
);
|
);
|
||||||
|
|
||||||
context.tera_context.insert("page", &SerializingPage::from_page_basic(self));
|
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));
|
||||||
|
|
||||||
let res = render_content(&self.raw_content, &context)
|
let res = render_content(&self.raw_content, &context)
|
||||||
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?;
|
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?;
|
||||||
@ -347,8 +352,8 @@ impl Page {
|
|||||||
SerializingPage::from_page(self, library)
|
SerializingPage::from_page(self, library)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_serialized_basic(&self) -> SerializingPage {
|
pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> {
|
||||||
SerializingPage::from_page_basic(self)
|
SerializingPage::from_page_basic(self, Some(library))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -357,7 +362,7 @@ impl Default for Page {
|
|||||||
Page {
|
Page {
|
||||||
file: FileInfo::default(),
|
file: FileInfo::default(),
|
||||||
meta: PageFrontMatter::default(),
|
meta: PageFrontMatter::default(),
|
||||||
parent_section: None,
|
ancestors: vec![],
|
||||||
raw_content: "".to_string(),
|
raw_content: "".to_string(),
|
||||||
assets: vec![],
|
assets: vec![],
|
||||||
content: "".to_string(),
|
content: "".to_string(),
|
||||||
|
@ -21,7 +21,7 @@ use library::Library;
|
|||||||
pub struct SerializingSection<'a> {
|
pub struct SerializingSection<'a> {
|
||||||
content: &'a str,
|
content: &'a str,
|
||||||
permalink: &'a str,
|
permalink: &'a str,
|
||||||
parent_section: Option<String>,
|
ancestors: Vec<String>,
|
||||||
title: &'a Option<String>,
|
title: &'a Option<String>,
|
||||||
description: &'a Option<String>,
|
description: &'a Option<String>,
|
||||||
extra: &'a HashMap<String, Value>,
|
extra: &'a HashMap<String, Value>,
|
||||||
@ -48,10 +48,10 @@ impl<'a> SerializingSection<'a> {
|
|||||||
subsections.push(library.get_section_path_by_key(*k));
|
subsections.push(library.get_section_path_by_key(*k));
|
||||||
}
|
}
|
||||||
|
|
||||||
let parent_section = section.parent_section.map(|k| library.get_section_by_key(k).file.relative.clone());
|
let ancestors = section.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
|
||||||
|
|
||||||
SerializingSection {
|
SerializingSection {
|
||||||
parent_section,
|
ancestors,
|
||||||
content: §ion.content,
|
content: §ion.content,
|
||||||
permalink: §ion.permalink,
|
permalink: §ion.permalink,
|
||||||
title: §ion.meta.title,
|
title: §ion.meta.title,
|
||||||
@ -69,9 +69,15 @@ impl<'a> SerializingSection<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Same as from_section but doesn't fetch pages and sections
|
/// Same as from_section but doesn't fetch pages and sections
|
||||||
pub fn from_section_basic(section: &'a Section) -> Self {
|
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self {
|
||||||
|
let ancestors = if let Some(ref lib) = library {
|
||||||
|
section.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
};
|
||||||
|
|
||||||
SerializingSection {
|
SerializingSection {
|
||||||
parent_section: None,
|
ancestors,
|
||||||
content: §ion.content,
|
content: §ion.content,
|
||||||
permalink: §ion.permalink,
|
permalink: §ion.permalink,
|
||||||
title: §ion.meta.title,
|
title: §ion.meta.title,
|
||||||
@ -111,8 +117,8 @@ pub struct Section {
|
|||||||
pub pages: Vec<Key>,
|
pub pages: Vec<Key>,
|
||||||
/// All pages that cannot be sorted in this section
|
/// All pages that cannot be sorted in this section
|
||||||
pub ignored_pages: Vec<Key>,
|
pub ignored_pages: Vec<Key>,
|
||||||
/// The relative path of the parent section if there is one
|
/// The list of parent sections
|
||||||
pub parent_section: Option<Key>,
|
pub ancestors: Vec<Key>,
|
||||||
/// All direct subsections
|
/// All direct subsections
|
||||||
pub subsections: Vec<Key>,
|
pub subsections: Vec<Key>,
|
||||||
/// Toc made from the headers of the markdown file
|
/// Toc made from the headers of the markdown file
|
||||||
@ -131,7 +137,7 @@ impl Section {
|
|||||||
Section {
|
Section {
|
||||||
file: FileInfo::new_section(file_path),
|
file: FileInfo::new_section(file_path),
|
||||||
meta,
|
meta,
|
||||||
parent_section: None,
|
ancestors: vec![],
|
||||||
path: "".to_string(),
|
path: "".to_string(),
|
||||||
components: vec![],
|
components: vec![],
|
||||||
permalink: "".to_string(),
|
permalink: "".to_string(),
|
||||||
@ -222,7 +228,7 @@ impl Section {
|
|||||||
self.meta.insert_anchor_links,
|
self.meta.insert_anchor_links,
|
||||||
);
|
);
|
||||||
|
|
||||||
context.tera_context.insert("section", &SerializingSection::from_section_basic(self));
|
context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None));
|
||||||
|
|
||||||
let res = render_content(&self.raw_content, &context)
|
let res = render_content(&self.raw_content, &context)
|
||||||
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?;
|
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?;
|
||||||
@ -270,7 +276,7 @@ impl Default for Section {
|
|||||||
Section {
|
Section {
|
||||||
file: FileInfo::default(),
|
file: FileInfo::default(),
|
||||||
meta: SectionFrontMatter::default(),
|
meta: SectionFrontMatter::default(),
|
||||||
parent_section: None,
|
ancestors: vec![],
|
||||||
path: "".to_string(),
|
path: "".to_string(),
|
||||||
components: vec![],
|
components: vec![],
|
||||||
permalink: "".to_string(),
|
permalink: "".to_string(),
|
||||||
|
@ -25,7 +25,7 @@ pub struct Library {
|
|||||||
/// A mapping path -> key for pages so we can easily get their key
|
/// A mapping path -> key for pages so we can easily get their key
|
||||||
paths_to_pages: HashMap<PathBuf, Key>,
|
paths_to_pages: HashMap<PathBuf, Key>,
|
||||||
/// A mapping path -> key for sections so we can easily get their key
|
/// A mapping path -> key for sections so we can easily get their key
|
||||||
paths_to_sections: HashMap<PathBuf, Key>,
|
pub paths_to_sections: HashMap<PathBuf, Key>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Library {
|
impl Library {
|
||||||
@ -81,25 +81,58 @@ impl Library {
|
|||||||
/// Find out the direct subsections of each subsection if there are some
|
/// Find out the direct subsections of each subsection if there are some
|
||||||
/// as well as the pages for each section
|
/// as well as the pages for each section
|
||||||
pub fn populate_sections(&mut self) {
|
pub fn populate_sections(&mut self) {
|
||||||
let mut grandparent_paths: HashMap<PathBuf, Vec<_>> = HashMap::new();
|
let (root_path, index_path) = self.sections
|
||||||
|
.values()
|
||||||
|
.find(|s| s.is_index())
|
||||||
|
.map(|s| (s.file.parent.clone(), s.file.path.clone()))
|
||||||
|
.unwrap();
|
||||||
|
let root_key = self.paths_to_sections[&index_path];
|
||||||
|
|
||||||
|
// We are going to get both the ancestors and grandparents for each section in one go
|
||||||
|
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new();
|
||||||
|
let mut subsections: HashMap<PathBuf, Vec<_>> = HashMap::new();
|
||||||
|
|
||||||
for section in self.sections.values_mut() {
|
for section in self.sections.values_mut() {
|
||||||
if let Some(ref grand_parent) = section.file.grand_parent {
|
|
||||||
grandparent_paths
|
|
||||||
.entry(grand_parent.to_path_buf())
|
|
||||||
.or_insert_with(|| vec![])
|
|
||||||
.push(section.file.path.clone());
|
|
||||||
}
|
|
||||||
// Make sure the pages of a section are empty since we can call that many times on `serve`
|
// Make sure the pages of a section are empty since we can call that many times on `serve`
|
||||||
section.pages = vec![];
|
section.pages = vec![];
|
||||||
section.ignored_pages = vec![];
|
section.ignored_pages = vec![];
|
||||||
|
|
||||||
|
if let Some(ref grand_parent) = section.file.grand_parent {
|
||||||
|
subsections
|
||||||
|
.entry(grand_parent.join("_index.md"))
|
||||||
|
.or_insert_with(|| vec![])
|
||||||
|
.push(section.file.path.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Index has no ancestors, no need to go through it
|
||||||
|
if section.is_index() {
|
||||||
|
ancestors.insert(section.file.path.clone(), vec![]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut path = root_path.clone();
|
||||||
|
// Index section is the first ancestor of every single section
|
||||||
|
let mut parents = vec![root_key.clone()];
|
||||||
|
for component in §ion.file.components {
|
||||||
|
path = path.join(component);
|
||||||
|
// Skip itself
|
||||||
|
if path == section.file.parent {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Some(section_key) = self.paths_to_sections.get(&path.join("_index.md")) {
|
||||||
|
parents.push(*section_key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ancestors.insert(section.file.path.clone(), parents);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (key, page) in &mut self.pages {
|
for (key, page) in &mut self.pages {
|
||||||
let parent_section_path = page.file.parent.join("_index.md");
|
let parent_section_path = page.file.parent.join("_index.md");
|
||||||
if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
|
if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
|
||||||
self.sections.get_mut(*section_key).unwrap().pages.push(key);
|
self.sections.get_mut(*section_key).unwrap().pages.push(key);
|
||||||
page.parent_section = Some(*section_key);
|
page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]);
|
||||||
|
// Don't forget to push the actual parent
|
||||||
|
page.ancestors.push(*section_key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,22 +144,13 @@ impl Library {
|
|||||||
sections_weight.insert(key, section.meta.weight);
|
sections_weight.insert(key, section.meta.weight);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (grandparent, children) in &grandparent_paths {
|
for section in self.sections.values_mut() {
|
||||||
let mut subsections = vec![];
|
if let Some(ref children) = subsections.get(§ion.file.path) {
|
||||||
let grandparent_path = grandparent.join("_index.md");
|
let mut children: Vec<_> = children.iter().map(|p| sections[p]).collect();
|
||||||
|
children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
|
||||||
if let Some(ref mut section) = self.get_section_mut(&grandparent_path) {
|
section.subsections = children;
|
||||||
subsections = children.iter().map(|p| sections[p]).collect();
|
|
||||||
subsections.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
|
|
||||||
section.subsections = subsections.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only there for subsections so we must have a parent section
|
|
||||||
for key in &subsections {
|
|
||||||
if let Some(ref mut subsection) = self.sections.get_mut(*key) {
|
|
||||||
subsection.parent_section = Some(sections[&grandparent_path]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
section.ancestors = ancestors.get(§ion.file.path).cloned().unwrap_or_else(|| vec![]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ impl<'a> Paginator<'a> {
|
|||||||
|
|
||||||
for key in self.all_pages {
|
for key in self.all_pages {
|
||||||
let page = library.get_page_by_key(*key);
|
let page = library.get_page_by_key(*key);
|
||||||
current_page.push(page.to_serialized_basic());
|
current_page.push(page.to_serialized_basic(library));
|
||||||
|
|
||||||
if current_page.len() == self.paginate_by {
|
if current_page.len() == self.paginate_by {
|
||||||
pages.push(current_page);
|
pages.push(current_page);
|
||||||
@ -188,12 +188,12 @@ impl<'a> Paginator<'a> {
|
|||||||
paginator
|
paginator
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_pager(&self, pager: &Pager, config: &Config, tera: &Tera) -> Result<String> {
|
pub fn render_pager(&self, pager: &Pager, config: &Config, tera: &Tera, library: &Library) -> Result<String> {
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
context.insert("config", &config);
|
context.insert("config", &config);
|
||||||
let template_name = match self.root {
|
let template_name = match self.root {
|
||||||
PaginationRoot::Section(s) => {
|
PaginationRoot::Section(s) => {
|
||||||
context.insert("section", &SerializingSection::from_section_basic(s));
|
context.insert("section", &SerializingSection::from_section_basic(s, Some(library)));
|
||||||
s.get_template_name()
|
s.get_template_name()
|
||||||
}
|
}
|
||||||
PaginationRoot::Taxonomy(t) => {
|
PaginationRoot::Taxonomy(t) => {
|
||||||
|
@ -26,7 +26,7 @@ impl<'a> SerializedTaxonomyItem<'a> {
|
|||||||
|
|
||||||
for key in &item.pages {
|
for key in &item.pages {
|
||||||
let page = library.get_page_by_key(*key);
|
let page = library.get_page_by_key(*key);
|
||||||
pages.push(page.to_serialized_basic());
|
pages.push(page.to_serialized_basic(library));
|
||||||
}
|
}
|
||||||
|
|
||||||
SerializedTaxonomyItem {
|
SerializedTaxonomyItem {
|
||||||
|
@ -125,6 +125,7 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
|
|||||||
s.ignored_pages = prev.ignored_pages;
|
s.ignored_pages = prev.ignored_pages;
|
||||||
s.subsections = prev.subsections;
|
s.subsections = prev.subsections;
|
||||||
}
|
}
|
||||||
|
site.populate_sections();
|
||||||
|
|
||||||
if site.library.get_section(&pathbuf).unwrap().meta == prev.meta {
|
if site.library.get_section(&pathbuf).unwrap().meta == prev.meta {
|
||||||
// Front matter didn't change, only content did
|
// Front matter didn't change, only content did
|
||||||
|
@ -63,7 +63,7 @@ fn bench_render_paginated(b: &mut test::Bencher) {
|
|||||||
let public = &tmp_dir.path().join("public");
|
let public = &tmp_dir.path().join("public");
|
||||||
site.set_output_path(&public);
|
site.set_output_path(&public);
|
||||||
let section = site.library.sections_values()[0];
|
let section = site.library.sections_values()[0];
|
||||||
let paginator = Paginator::from_section(§ion, site.library.pages());
|
let paginator = Paginator::from_section(§ion, &site.library);
|
||||||
|
|
||||||
b.iter(|| site.render_paginated(public, &paginator));
|
b.iter(|| site.render_paginated(public, &paginator));
|
||||||
}
|
}
|
||||||
|
@ -735,7 +735,7 @@ impl Site {
|
|||||||
let p = pages
|
let p = pages
|
||||||
.iter()
|
.iter()
|
||||||
.take(num_entries)
|
.take(num_entries)
|
||||||
.map(|x| x.to_serialized_basic())
|
.map(|x| x.to_serialized_basic(&self.library))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
context.insert("pages", &p);
|
context.insert("pages", &p);
|
||||||
@ -856,7 +856,7 @@ impl Site {
|
|||||||
.map(|pager| {
|
.map(|pager| {
|
||||||
let page_path = folder_path.join(&format!("{}", pager.index));
|
let page_path = folder_path.join(&format!("{}", pager.index));
|
||||||
create_directory(&page_path)?;
|
create_directory(&page_path)?;
|
||||||
let output = paginator.render_pager(pager, &self.config, &self.tera)?;
|
let output = paginator.render_pager(pager, &self.config, &self.tera, &self.library)?;
|
||||||
if pager.index > 1 {
|
if pager.index > 1 {
|
||||||
create_file(&page_path.join("index.html"), &self.inject_livereload(output))?;
|
create_file(&page_path.join("index.html"), &self.inject_livereload(output))?;
|
||||||
} else {
|
} else {
|
||||||
|
@ -37,17 +37,23 @@ fn can_parse_site() {
|
|||||||
let index_section = site.library.get_section(&path.join("content").join("_index.md")).unwrap();
|
let index_section = site.library.get_section(&path.join("content").join("_index.md")).unwrap();
|
||||||
assert_eq!(index_section.subsections.len(), 3);
|
assert_eq!(index_section.subsections.len(), 3);
|
||||||
assert_eq!(index_section.pages.len(), 1);
|
assert_eq!(index_section.pages.len(), 1);
|
||||||
assert!(index_section.parent_section.is_none());
|
assert!(index_section.ancestors.is_empty());
|
||||||
|
|
||||||
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap();
|
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap();
|
||||||
assert_eq!(posts_section.subsections.len(), 1);
|
assert_eq!(posts_section.subsections.len(), 1);
|
||||||
assert_eq!(posts_section.pages.len(), 7);
|
assert_eq!(posts_section.pages.len(), 7);
|
||||||
assert_eq!(posts_section.parent_section, Some(*site.library.get_section_key(&index_section.file.path).unwrap()));
|
assert_eq!(posts_section.ancestors, vec![*site.library.get_section_key(&index_section.file.path).unwrap()]);
|
||||||
|
|
||||||
// Make sure we remove all the pwd + content from the sections
|
// Make sure we remove all the pwd + content from the sections
|
||||||
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap();
|
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap();
|
||||||
assert_eq!(basic.file.components, vec!["posts".to_string()]);
|
assert_eq!(basic.file.components, vec!["posts".to_string()]);
|
||||||
assert_eq!(basic.parent_section, Some(*site.library.get_section_key(&posts_section.file.path).unwrap()));
|
assert_eq!(
|
||||||
|
basic.ancestors,
|
||||||
|
vec![
|
||||||
|
*site.library.get_section_key(&index_section.file.path).unwrap(),
|
||||||
|
*site.library.get_section_key(&posts_section.file.path).unwrap(),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
let tutorials_section = site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap();
|
let tutorials_section = site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap();
|
||||||
assert_eq!(tutorials_section.subsections.len(), 2);
|
assert_eq!(tutorials_section.subsections.len(), 2);
|
||||||
@ -60,7 +66,14 @@ fn can_parse_site() {
|
|||||||
let devops_section = site.library.get_section(&posts_path.join("tutorials").join("devops").join("_index.md")).unwrap();
|
let devops_section = site.library.get_section(&posts_path.join("tutorials").join("devops").join("_index.md")).unwrap();
|
||||||
assert_eq!(devops_section.subsections.len(), 0);
|
assert_eq!(devops_section.subsections.len(), 0);
|
||||||
assert_eq!(devops_section.pages.len(), 2);
|
assert_eq!(devops_section.pages.len(), 2);
|
||||||
assert_eq!(devops_section.parent_section, Some(*site.library.get_section_key(&tutorials_section.file.path).unwrap()));
|
assert_eq!(
|
||||||
|
devops_section.ancestors,
|
||||||
|
vec![
|
||||||
|
*site.library.get_section_key(&index_section.file.path).unwrap(),
|
||||||
|
*site.library.get_section_key(&posts_section.file.path).unwrap(),
|
||||||
|
*site.library.get_section_key(&tutorials_section.file.path).unwrap(),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
let prog_section = site.library.get_section(&posts_path.join("tutorials").join("programming").join("_index.md")).unwrap();
|
let prog_section = site.library.get_section(&posts_path.join("tutorials").join("programming").join("_index.md")).unwrap();
|
||||||
assert_eq!(prog_section.subsections.len(), 0);
|
assert_eq!(prog_section.subsections.len(), 0);
|
||||||
|
@ -45,8 +45,10 @@ month: Number?;
|
|||||||
day: Number?;
|
day: Number?;
|
||||||
// Paths of colocated assets, relative to the content directory
|
// Paths of colocated assets, relative to the content directory
|
||||||
assets: Array<String>;
|
assets: Array<String>;
|
||||||
// The relative path of the parent section if existing, for use with the `get_section` Tera function
|
// The relative paths of the parent sections until the index onef for use with the `get_section` Tera function
|
||||||
parent_section: String?;
|
// The first item is the index section and the last one is the parent section
|
||||||
|
// This is filled after rendering a page content so it will be empty in shortcodes
|
||||||
|
ancestors: Array<String>;
|
||||||
```
|
```
|
||||||
|
|
||||||
## Section variables
|
## Section variables
|
||||||
@ -83,8 +85,10 @@ reading_time: Number;
|
|||||||
toc: Array<Header>;
|
toc: Array<Header>;
|
||||||
// Paths of colocated assets, relative to the content directory
|
// Paths of colocated assets, relative to the content directory
|
||||||
assets: Array<String>;
|
assets: Array<String>;
|
||||||
// The relative path of the parent section if existing, for use with the `get_section` Tera function
|
// The relative paths of the parent sections until the index onef for use with the `get_section` Tera function
|
||||||
parent_section: String?;
|
// The first item is the index section and the last one is the parent section
|
||||||
|
// This is filled after rendering a page content so it will be empty in shortcodes
|
||||||
|
ancestors: Array<String>;
|
||||||
```
|
```
|
||||||
|
|
||||||
## Table of contents
|
## Table of contents
|
||||||
|
Loading…
Reference in New Issue
Block a user