Expose backlinks
This commit is contained in:
parent
413a68d8b2
commit
beb93f2b0a
|
@ -7,6 +7,8 @@
|
||||||
- Switch to pulldown-cmark anchor rather than ours, some (very niche) edge cases are not supported anymore, you can
|
- Switch to pulldown-cmark anchor rather than ours, some (very niche) edge cases are not supported anymore, you can
|
||||||
also specify classes on headers now
|
also specify classes on headers now
|
||||||
- Now outputs empty taxonomies instead of ignoring them
|
- Now outputs empty taxonomies instead of ignoring them
|
||||||
|
- Unify all sorting variable names in templates to `lower`/`higher` in order to make it easy to re-use templates and it
|
||||||
|
was becoming hard to come up with names
|
||||||
|
|
||||||
### Other
|
### Other
|
||||||
- Fix markup for fenced code with linenos
|
- Fix markup for fenced code with linenos
|
||||||
|
@ -20,6 +22,7 @@ any pages related to that taxonomy
|
||||||
- Serialize `transparent` field from front-matter of sections
|
- Serialize `transparent` field from front-matter of sections
|
||||||
- Use Zola Tera instance for markdown filter: this means you have access to the same Tera functions as in shortcodes
|
- Use Zola Tera instance for markdown filter: this means you have access to the same Tera functions as in shortcodes
|
||||||
- Ignore sections with `render=false` when looking for path collisions
|
- Ignore sections with `render=false` when looking for path collisions
|
||||||
|
- Add support for backlinks
|
||||||
|
|
||||||
## 0.15.3 (2022-01-23)
|
## 0.15.3 (2022-01-23)
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,20 @@ use crate::sorting::sort_pages;
|
||||||
use crate::taxonomies::{Taxonomy, TaxonomyFound};
|
use crate::taxonomies::{Taxonomy, TaxonomyFound};
|
||||||
use crate::{Page, Section, SortBy};
|
use crate::{Page, Section, SortBy};
|
||||||
|
|
||||||
|
macro_rules! set {
|
||||||
|
($($key:expr,)+) => (set!($($key),+));
|
||||||
|
|
||||||
|
( $($key:expr),* ) => {
|
||||||
|
{
|
||||||
|
let mut _set = AHashSet::new();
|
||||||
|
$(
|
||||||
|
_set.insert($key);
|
||||||
|
)*
|
||||||
|
_set
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct Library {
|
pub struct Library {
|
||||||
pub pages: AHashMap<PathBuf, Page>,
|
pub pages: AHashMap<PathBuf, Page>,
|
||||||
|
@ -15,8 +29,10 @@ pub struct Library {
|
||||||
// aliases -> files, so we can easily check for conflicts
|
// aliases -> files, so we can easily check for conflicts
|
||||||
pub reverse_aliases: AHashMap<String, AHashSet<PathBuf>>,
|
pub reverse_aliases: AHashMap<String, AHashSet<PathBuf>>,
|
||||||
pub translations: AHashMap<PathBuf, AHashSet<PathBuf>>,
|
pub translations: AHashMap<PathBuf, AHashSet<PathBuf>>,
|
||||||
|
pub backlinks: AHashMap<String, AHashSet<PathBuf>>,
|
||||||
// A mapping of {lang -> <slug, {term -> vec<paths>}>>}
|
// A mapping of {lang -> <slug, {term -> vec<paths>}>>}
|
||||||
taxonomies_def: AHashMap<String, AHashMap<String, AHashMap<String, Vec<PathBuf>>>>,
|
taxonomies_def: AHashMap<String, AHashMap<String, AHashMap<String, Vec<PathBuf>>>>,
|
||||||
|
// All the taxonomies from config.toml in their slugifiedv ersion
|
||||||
// So we don't need to pass the Config when adding a page to know how to slugify and we only
|
// So we don't need to pass the Config when adding a page to know how to slugify and we only
|
||||||
// slugify once
|
// slugify once
|
||||||
taxo_name_to_slug: AHashMap<String, String>,
|
taxo_name_to_slug: AHashMap<String, String>,
|
||||||
|
@ -44,11 +60,7 @@ impl Library {
|
||||||
.and_modify(|s| {
|
.and_modify(|s| {
|
||||||
s.insert(file_path.to_path_buf());
|
s.insert(file_path.to_path_buf());
|
||||||
})
|
})
|
||||||
.or_insert_with(|| {
|
.or_insert_with(|| set! {file_path.to_path_buf()});
|
||||||
let mut s = AHashSet::new();
|
|
||||||
s.insert(file_path.to_path_buf());
|
|
||||||
s
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,6 +117,33 @@ impl Library {
|
||||||
self.sections.insert(file_path, section);
|
self.sections.insert(file_path, section);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Fills a map of target -> {content mentioning it}
|
||||||
|
/// This can only be called _after_ rendering markdown as we need to have accumulated all
|
||||||
|
/// the links first
|
||||||
|
pub fn fill_backlinks(&mut self) {
|
||||||
|
self.backlinks.clear();
|
||||||
|
|
||||||
|
let mut add_backlink = |target: &str, source: &Path| {
|
||||||
|
self.backlinks
|
||||||
|
.entry(target.to_owned())
|
||||||
|
.and_modify(|s| {
|
||||||
|
s.insert(source.to_path_buf());
|
||||||
|
})
|
||||||
|
.or_insert(set! {source.to_path_buf()});
|
||||||
|
};
|
||||||
|
|
||||||
|
for (_, page) in &self.pages {
|
||||||
|
for (internal_link, _) in &page.internal_links {
|
||||||
|
add_backlink(internal_link, &page.file.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (_, section) in &self.sections {
|
||||||
|
for (internal_link, _) in §ion.internal_links {
|
||||||
|
add_backlink(internal_link, §ion.file.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// This is called _before_ rendering the markdown the pages/sections
|
/// This is called _before_ rendering the markdown the pages/sections
|
||||||
pub fn find_taxonomies(&self, config: &Config) -> Vec<Taxonomy> {
|
pub fn find_taxonomies(&self, config: &Config) -> Vec<Taxonomy> {
|
||||||
let mut taxonomies = Vec::new();
|
let mut taxonomies = Vec::new();
|
||||||
|
@ -179,11 +218,7 @@ impl Library {
|
||||||
.and_modify(|trans| {
|
.and_modify(|trans| {
|
||||||
trans.insert(path.to_path_buf());
|
trans.insert(path.to_path_buf());
|
||||||
})
|
})
|
||||||
.or_insert({
|
.or_insert(set! {path.to_path_buf()});
|
||||||
let mut s = AHashSet::new();
|
|
||||||
s.insert(path.to_path_buf());
|
|
||||||
s
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -720,4 +755,28 @@ mod tests {
|
||||||
// under the safe slugify strategy all terms should be distinct
|
// under the safe slugify strategy all terms should be distinct
|
||||||
assert_eq!(tax.items.len(), 4);
|
assert_eq!(tax.items.len(), 4);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_fill_backlinks() {
|
||||||
|
let mut page1 = create_page("page1.md", "en", PageSort::None);
|
||||||
|
page1.internal_links.push(("page2.md".to_owned(), None));
|
||||||
|
let mut page2 = create_page("page2.md", "en", PageSort::None);
|
||||||
|
page2.internal_links.push(("_index.md".to_owned(), None));
|
||||||
|
let mut section1 = create_section("_index.md", "en", 10, false, SortBy::None);
|
||||||
|
section1.internal_links.push(("page1.md".to_owned(), None));
|
||||||
|
section1.internal_links.push(("page2.md".to_owned(), None));
|
||||||
|
let mut library = Library::default();
|
||||||
|
library.insert_page(page1);
|
||||||
|
library.insert_page(page2);
|
||||||
|
library.insert_section(section1);
|
||||||
|
library.fill_backlinks();
|
||||||
|
|
||||||
|
assert_eq!(library.backlinks.len(), 3);
|
||||||
|
assert_eq!(library.backlinks["page1.md"], set! {"_index.md".to_owned()});
|
||||||
|
assert_eq!(
|
||||||
|
library.backlinks["page2.md"],
|
||||||
|
set! {"page1.md".to_owned(), "_index.md".to_owned()}
|
||||||
|
);
|
||||||
|
assert_eq!(library.backlinks["_index.md"], set! {"page2.md".to_owned()});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,12 @@ use crate::{Page, Section};
|
||||||
use libs::tera::{Map, Value};
|
use libs::tera::{Map, Value};
|
||||||
use utils::table_of_contents::Heading;
|
use utils::table_of_contents::Heading;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||||
|
pub struct BackLink<'a> {
|
||||||
|
pub permalink: &'a str,
|
||||||
|
pub title: &'a Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||||
pub struct TranslatedContent<'a> {
|
pub struct TranslatedContent<'a> {
|
||||||
pub lang: &'a str,
|
pub lang: &'a str,
|
||||||
|
@ -17,6 +23,22 @@ pub struct TranslatedContent<'a> {
|
||||||
pub path: &'a Path,
|
pub path: &'a Path,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn find_backlinks<'a>(relative_path: &str, library: &'a Library) -> Vec<BackLink<'a>> {
|
||||||
|
let mut backlinks = Vec::new();
|
||||||
|
if let Some(b) = library.backlinks.get(relative_path) {
|
||||||
|
for backlink in b {
|
||||||
|
if let Some(p) = library.pages.get(backlink) {
|
||||||
|
backlinks.push(BackLink { permalink: &p.permalink, title: &p.meta.title });
|
||||||
|
}
|
||||||
|
if let Some(s) = library.sections.get(backlink) {
|
||||||
|
backlinks.push(BackLink { permalink: &s.permalink, title: &s.meta.title });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
backlinks.sort_by_key(|b| b.permalink);
|
||||||
|
}
|
||||||
|
backlinks
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||||
pub struct SerializingPage<'a> {
|
pub struct SerializingPage<'a> {
|
||||||
relative_path: &'a str,
|
relative_path: &'a str,
|
||||||
|
@ -45,6 +67,7 @@ pub struct SerializingPage<'a> {
|
||||||
lower: Option<Box<SerializingPage<'a>>>,
|
lower: Option<Box<SerializingPage<'a>>>,
|
||||||
higher: Option<Box<SerializingPage<'a>>>,
|
higher: Option<Box<SerializingPage<'a>>>,
|
||||||
translations: Vec<TranslatedContent<'a>>,
|
translations: Vec<TranslatedContent<'a>>,
|
||||||
|
backlinks: Vec<BackLink<'a>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> SerializingPage<'a> {
|
impl<'a> SerializingPage<'a> {
|
||||||
|
@ -60,6 +83,7 @@ impl<'a> SerializingPage<'a> {
|
||||||
let mut lower = None;
|
let mut lower = None;
|
||||||
let mut higher = None;
|
let mut higher = None;
|
||||||
let mut translations = vec![];
|
let mut translations = vec![];
|
||||||
|
let mut backlinks = vec![];
|
||||||
|
|
||||||
if let Some(lib) = library {
|
if let Some(lib) = library {
|
||||||
translations = lib.find_translations(&page.file.canonical);
|
translations = lib.find_translations(&page.file.canonical);
|
||||||
|
@ -74,6 +98,8 @@ impl<'a> SerializingPage<'a> {
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false)));
|
.map(|p| Box::new(Self::new(&lib.pages[p], Some(lib), false)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
backlinks = find_backlinks(&page.file.relative, &lib);
|
||||||
}
|
}
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
|
@ -103,6 +129,7 @@ impl<'a> SerializingPage<'a> {
|
||||||
lower,
|
lower,
|
||||||
higher,
|
higher,
|
||||||
translations,
|
translations,
|
||||||
|
backlinks,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -127,6 +154,7 @@ pub struct SerializingSection<'a> {
|
||||||
pages: Vec<SerializingPage<'a>>,
|
pages: Vec<SerializingPage<'a>>,
|
||||||
subsections: Vec<&'a str>,
|
subsections: Vec<&'a str>,
|
||||||
translations: Vec<TranslatedContent<'a>>,
|
translations: Vec<TranslatedContent<'a>>,
|
||||||
|
backlinks: Vec<BackLink<'a>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -145,6 +173,7 @@ impl<'a> SerializingSection<'a> {
|
||||||
let mut pages = Vec::with_capacity(section.pages.len());
|
let mut pages = Vec::with_capacity(section.pages.len());
|
||||||
let mut subsections = Vec::with_capacity(section.subsections.len());
|
let mut subsections = Vec::with_capacity(section.subsections.len());
|
||||||
let mut translations = Vec::new();
|
let mut translations = Vec::new();
|
||||||
|
let mut backlinks = Vec::new();
|
||||||
|
|
||||||
match mode {
|
match mode {
|
||||||
SectionSerMode::ForMarkdown => {}
|
SectionSerMode::ForMarkdown => {}
|
||||||
|
@ -162,6 +191,8 @@ impl<'a> SerializingSection<'a> {
|
||||||
pages.push(SerializingPage::new(&lib.pages[p], Some(lib), true));
|
pages.push(SerializingPage::new(&lib.pages[p], Some(lib), true));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
backlinks = find_backlinks(§ion.file.relative, &lib);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,6 +215,7 @@ impl<'a> SerializingSection<'a> {
|
||||||
pages,
|
pages,
|
||||||
subsections,
|
subsections,
|
||||||
translations,
|
translations,
|
||||||
|
backlinks,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,7 @@ fn configurable_render(
|
||||||
|
|
||||||
tera.register_filter(
|
tera.register_filter(
|
||||||
"markdown",
|
"markdown",
|
||||||
templates::filters::MarkdownFilter::new(config.clone(), permalinks.clone(), tera.clone())
|
templates::filters::MarkdownFilter::new(config.clone(), permalinks.clone(), tera.clone()),
|
||||||
);
|
);
|
||||||
let mut context = RenderContext::new(
|
let mut context = RenderContext::new(
|
||||||
&tera,
|
&tera,
|
||||||
|
|
|
@ -288,6 +288,10 @@ impl Site {
|
||||||
tpls::register_early_global_fns(self)?;
|
tpls::register_early_global_fns(self)?;
|
||||||
self.populate_sections();
|
self.populate_sections();
|
||||||
self.render_markdown()?;
|
self.render_markdown()?;
|
||||||
|
{
|
||||||
|
let mut lib = self.library.write().unwrap();
|
||||||
|
lib.fill_backlinks();
|
||||||
|
}
|
||||||
tpls::register_tera_global_fns(self);
|
tpls::register_tera_global_fns(self);
|
||||||
|
|
||||||
// Needs to be done after rendering markdown as we only get the anchors at that point
|
// Needs to be done after rendering markdown as we only get the anchors at that point
|
||||||
|
|
|
@ -10,7 +10,6 @@ use libs::tera::{
|
||||||
};
|
};
|
||||||
use markdown::{render_content, RenderContext};
|
use markdown::{render_content, RenderContext};
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct MarkdownFilter {
|
pub struct MarkdownFilter {
|
||||||
config: Config,
|
config: Config,
|
||||||
|
@ -19,11 +18,7 @@ pub struct MarkdownFilter {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MarkdownFilter {
|
impl MarkdownFilter {
|
||||||
pub fn new(
|
pub fn new(config: Config, permalinks: HashMap<String, String>, tera: Tera) -> Self {
|
||||||
config: Config,
|
|
||||||
permalinks: HashMap<String, String>,
|
|
||||||
tera: Tera,
|
|
||||||
) -> Self {
|
|
||||||
Self { config, permalinks, tera }
|
Self { config, permalinks, tera }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -110,9 +105,9 @@ impl TeraFilter for NumFormatFilter {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::{collections::HashMap};
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use libs::tera::{to_value, Tera, Filter};
|
use libs::tera::{to_value, Filter, Tera};
|
||||||
|
|
||||||
use super::{base64_decode, base64_encode, MarkdownFilter, NumFormatFilter};
|
use super::{base64_decode, base64_encode, MarkdownFilter, NumFormatFilter};
|
||||||
use config::Config;
|
use config::Config;
|
||||||
|
@ -146,8 +141,8 @@ mod tests {
|
||||||
fn markdown_filter_inline() {
|
fn markdown_filter_inline() {
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("inline".to_string(), to_value(true).unwrap());
|
args.insert("inline".to_string(), to_value(true).unwrap());
|
||||||
let result =
|
let result = MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default())
|
||||||
MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default()).filter(
|
.filter(
|
||||||
&to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(),
|
&to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(),
|
||||||
&args,
|
&args,
|
||||||
);
|
);
|
||||||
|
@ -160,8 +155,8 @@ mod tests {
|
||||||
fn markdown_filter_inline_tables() {
|
fn markdown_filter_inline_tables() {
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("inline".to_string(), to_value(true).unwrap());
|
args.insert("inline".to_string(), to_value(true).unwrap());
|
||||||
let result =
|
let result = MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default())
|
||||||
MarkdownFilter::new(Config::default(), HashMap::new(), Tera::default()).filter(
|
.filter(
|
||||||
&to_value(
|
&to_value(
|
||||||
&r#"
|
&r#"
|
||||||
|id|author_id| timestamp_created|title |content |
|
|id|author_id| timestamp_created|title |content |
|
||||||
|
|
|
@ -160,13 +160,13 @@ The `sort_by` front-matter variable can have the following values:
|
||||||
### `date`
|
### `date`
|
||||||
This will sort all pages by their `date` field, from the most recent (at the
|
This will sort all pages by their `date` field, from the most recent (at the
|
||||||
top of the list) to the oldest (at the bottom of the list). Each page will
|
top of the list) to the oldest (at the bottom of the list). Each page will
|
||||||
get `page.earlier` and `page.later` variables that contain the pages with
|
get `page.lower` and `page.higher` variables that contain the pages with
|
||||||
earlier and later dates, respectively.
|
earlier and later dates, respectively.
|
||||||
|
|
||||||
### `title`
|
### `title`
|
||||||
This will sort all pages by their `title` field in natural lexical order, as
|
This will sort all pages by their `title` field in natural lexical order, as
|
||||||
defined by `natural_lexical_cmp` in the [lexical-sort] crate. Each page will
|
defined by `natural_lexical_cmp` in the [lexical-sort] crate. Each page will
|
||||||
get `page.title_prev` and `page.title_next` variables that contain the pages
|
get `page.lower` and `page.higher` variables that contain the pages
|
||||||
with previous and next titles, respectively.
|
with previous and next titles, respectively.
|
||||||
|
|
||||||
For example, here is a natural lexical ordering: "bachata, BART, bolero,
|
For example, here is a natural lexical ordering: "bachata, BART, bolero,
|
||||||
|
@ -180,7 +180,7 @@ bolero, meter, underground, μ-kernel".
|
||||||
### `weight`
|
### `weight`
|
||||||
This will be sort all pages by their `weight` field, from lightest weight
|
This will be sort all pages by their `weight` field, from lightest weight
|
||||||
(at the top of the list) to heaviest (at the bottom of the list). Each
|
(at the top of the list) to heaviest (at the bottom of the list). Each
|
||||||
page gets `page.lighter` and `page.heavier` variables that contain the
|
page gets `page.lower` and `page.higher` variables that contain the
|
||||||
pages with lighter and heavier weights, respectively.
|
pages with lighter and heavier weights, respectively.
|
||||||
|
|
||||||
### Reversed sorting
|
### Reversed sorting
|
||||||
|
@ -190,11 +190,7 @@ pages sorted by weight will be sorted from lightest (at the top) to heaviest
|
||||||
(at the bottom); pages sorted by date will be sorted from oldest (at the top)
|
(at the bottom); pages sorted by date will be sorted from oldest (at the top)
|
||||||
to newest (at the bottom).
|
to newest (at the bottom).
|
||||||
|
|
||||||
`reverse` has no effect on:
|
`reverse` has no effect on `page.lower` / `page.higher`.
|
||||||
|
|
||||||
* `page.later` / `page.earlier`,
|
|
||||||
* `page.title_prev` / `page.title_next`, or
|
|
||||||
* `page.heavier` / `page.lighter`.
|
|
||||||
|
|
||||||
If the section is paginated the `paginate_reversed=true` in the front matter of the relevant section should be set instead of using the filter.
|
If the section is paginated the `paginate_reversed=true` in the front matter of the relevant section should be set instead of using the filter.
|
||||||
|
|
||||||
|
|
|
@ -34,14 +34,10 @@ toc: Array<Header>,
|
||||||
word_count: Number;
|
word_count: Number;
|
||||||
// Based on https://help.medium.com/hc/en-us/articles/214991667-Read-time
|
// Based on https://help.medium.com/hc/en-us/articles/214991667-Read-time
|
||||||
reading_time: Number;
|
reading_time: Number;
|
||||||
// `earlier` and `later` are only populated if the section variable `sort_by` is set to `date`
|
// earlier / ligher
|
||||||
// and only set when rendering the page itself
|
lower: Page?;
|
||||||
earlier: Page?;
|
// later / heavier
|
||||||
later: Page?;
|
higher: Page?;
|
||||||
// `heavier` and `lighter` are only populated if the section variable `sort_by` is set to `weight`
|
|
||||||
// and only set when rendering the page itself
|
|
||||||
heavier: Page?;
|
|
||||||
lighter: Page?;
|
|
||||||
// Year/month/day is only set if the page has a date and month/day are 1-indexed
|
// Year/month/day is only set if the page has a date and month/day are 1-indexed
|
||||||
year: Number?;
|
year: Number?;
|
||||||
month: Number?;
|
month: Number?;
|
||||||
|
|
Loading…
Reference in New Issue