Search is working now
This commit is contained in:
parent
ddf8970ad8
commit
b0363298f5
File diff suppressed because it is too large
Load Diff
|
@ -4,7 +4,7 @@ version = "0.1.0"
|
|||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
elasticlunr-rs = "1"
|
||||
elasticlunr-rs = "2"
|
||||
ammonia = "1"
|
||||
lazy_static = "1"
|
||||
|
||||
|
|
|
@ -2,15 +2,17 @@ extern crate elasticlunr;
|
|||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
extern crate ammonia;
|
||||
|
||||
#[macro_use]
|
||||
extern crate errors;
|
||||
extern crate content;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use elasticlunr::Index;
|
||||
use elasticlunr::{Index, Language};
|
||||
|
||||
use content::Section;
|
||||
use errors::Result;
|
||||
|
||||
|
||||
pub const ELASTICLUNR_JS: &'static str = include_str!("elasticlunr.min.js");
|
||||
|
@ -33,16 +35,23 @@ lazy_static! {
|
|||
}
|
||||
|
||||
|
||||
/// Returns the generated JSON index with all the documents of the site added
|
||||
/// Returns the generated JSON index with all the documents of the site added using
|
||||
/// the language given
|
||||
/// Errors if the language given is not available in Elasticlunr
|
||||
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful?
|
||||
pub fn build_index(sections: &HashMap<PathBuf, Section>) -> String {
|
||||
let mut index = Index::new(&["title", "body"]);
|
||||
pub fn build_index(sections: &HashMap<PathBuf, Section>, lang: &str) -> Result<String> {
|
||||
let language = match Language::from_code(lang) {
|
||||
Some(l) => l,
|
||||
None => { bail!("Tried to build search index for language {} which is not supported", lang); }
|
||||
};
|
||||
|
||||
let mut index = Index::with_language(language, &["title", "body"]);
|
||||
|
||||
for section in sections.values() {
|
||||
add_section_to_index(&mut index, section);
|
||||
}
|
||||
|
||||
index.to_json()
|
||||
Ok(index.to_json())
|
||||
}
|
||||
|
||||
fn add_section_to_index(index: &mut Index, section: &Section) {
|
||||
|
|
|
@ -21,13 +21,12 @@ extern crate search;
|
|||
extern crate tempdir;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs::{remove_dir_all, copy};
|
||||
use std::fs::{create_dir_all, remove_dir_all, copy};
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use glob::glob;
|
||||
use tera::{Tera, Context};
|
||||
use walkdir::WalkDir;
|
||||
use sass_rs::{Options as SassOptions, OutputStyle, compile_file};
|
||||
|
||||
use errors::{Result, ResultExt};
|
||||
|
@ -522,10 +521,10 @@ impl Site {
|
|||
pub fn build_search_index(&self) -> Result<()> {
|
||||
// index first
|
||||
create_file(
|
||||
&self.output_path.join("search_index.js"),
|
||||
&self.output_path.join(&format!("search_index.{}.js", self.config.default_language)),
|
||||
&format!(
|
||||
"window.searchIndex = {};",
|
||||
search::build_index(&self.sections)
|
||||
search::build_index(&self.sections, &self.config.default_language)?
|
||||
),
|
||||
)?;
|
||||
|
||||
|
|
|
@ -1,3 +1,30 @@
|
|||
.search-container {
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
width: 300px;
|
||||
|
||||
input {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
.search-results {
|
||||
display: none;
|
||||
position: absolute;
|
||||
background: white;
|
||||
color: black;
|
||||
padding: 1rem;
|
||||
width: 100%;
|
||||
box-shadow: 2px 2px 2px 0 rgba(0, 0, 0, 0.5);
|
||||
max-height: 500px;
|
||||
overflow: auto;
|
||||
|
||||
&__items {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
&__item {
|
||||
margin-bottom: 1rem;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,29 +1,141 @@
|
|||
function formatSearchResultHeader(term, count) {
|
||||
if (count === 0) {
|
||||
return "No search results for '" + term + "'.";
|
||||
}
|
||||
function debounce(func, wait) {
|
||||
var timeout;
|
||||
|
||||
return count + " search result" + count > 1 ? "s" : "" + " for '" + term + "':";
|
||||
return function () {
|
||||
var context = this;
|
||||
var args = arguments;
|
||||
clearTimeout(timeout);
|
||||
|
||||
timeout = setTimeout(function () {
|
||||
timeout = null;
|
||||
func.apply(context, args);
|
||||
}, wait);
|
||||
};
|
||||
}
|
||||
|
||||
function formatSearchResultItem(term, item) {
|
||||
console.log(item);
|
||||
// Taken from mdbook
|
||||
// The strategy is as follows:
|
||||
// First, assign a value to each word in the document:
|
||||
// Words that correspond to search terms (stemmer aware): 40
|
||||
// Normal words: 2
|
||||
// First word in a sentence: 8
|
||||
// Then use a sliding window with a constant number of words and count the
|
||||
// sum of the values of the words within the window. Then use the window that got the
|
||||
// maximum sum. If there are multiple maximas, then get the last one.
|
||||
// Enclose the terms in <b>.
|
||||
function makeTeaser(body, terms) {
|
||||
var TERM_WEIGHT = 40;
|
||||
var NORMAL_WORD_WEIGHT = 2;
|
||||
var FIRST_WORD_WEIGHT = 8;
|
||||
var TEASER_MAX_WORDS = 30;
|
||||
|
||||
var stemmedTerms = terms.map(function (w) {
|
||||
return elasticlunr.stemmer(w.toLowerCase());
|
||||
});
|
||||
var termFound = false;
|
||||
var index = 0;
|
||||
var weighted = []; // contains elements of ["word", weight, index_in_document]
|
||||
|
||||
// split in sentences, then words
|
||||
var sentences = body.toLowerCase().split(". ");
|
||||
|
||||
for (var i in sentences) {
|
||||
var words = sentences[i].split(" ");
|
||||
var value = FIRST_WORD_WEIGHT;
|
||||
|
||||
for (var j in words) {
|
||||
var word = words[j];
|
||||
|
||||
if (word.length > 0) {
|
||||
for (var k in stemmedTerms) {
|
||||
if (elasticlunr.stemmer(word).startsWith(stemmedTerms[k])) {
|
||||
value = TERM_WEIGHT;
|
||||
termFound = true;
|
||||
}
|
||||
}
|
||||
weighted.push([word, value, index]);
|
||||
value = NORMAL_WORD_WEIGHT;
|
||||
}
|
||||
|
||||
index += word.length;
|
||||
index += 1; // ' ' or '.' if last word in sentence
|
||||
}
|
||||
|
||||
index += 1; // because we split at a two-char boundary '. '
|
||||
}
|
||||
|
||||
if (weighted.length === 0) {
|
||||
return body;
|
||||
}
|
||||
|
||||
var windowWeights = [];
|
||||
var windowSize = Math.min(weighted.length, TEASER_MAX_WORDS);
|
||||
// We add a window with all the weights first
|
||||
var curSum = 0;
|
||||
for (var i = 0; i < windowSize; i++) {
|
||||
curSum += weighted[i][1];
|
||||
}
|
||||
windowWeights.push(curSum);
|
||||
|
||||
for (var i = 0; i < weighted.length - windowSize; i++) {
|
||||
curSum -= weighted[i][1];
|
||||
curSum += weighted[i + windowSize][1];
|
||||
windowWeights.push(curSum);
|
||||
}
|
||||
|
||||
// If we didn't find the term, just pick the first window
|
||||
var maxSumIndex = 0;
|
||||
if (termFound) {
|
||||
var maxFound = 0;
|
||||
// backwards
|
||||
for (var i = windowWeights.length - 1; i >= 0; i--) {
|
||||
if (windowWeights[i] > maxFound) {
|
||||
maxFound = windowWeights[i];
|
||||
maxSumIndex = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var teaser = [];
|
||||
var startIndex = weighted[maxSumIndex][2];
|
||||
for (var i = maxSumIndex; i < maxSumIndex + windowSize; i++) {
|
||||
var word = weighted[i];
|
||||
if (startIndex < word[2]) {
|
||||
// missing text from index to start of `word`
|
||||
teaser.push(body.substring(startIndex, word[2]));
|
||||
startIndex = word[2];
|
||||
}
|
||||
|
||||
// add <em/> around search terms
|
||||
if (word[1] === TERM_WEIGHT) {
|
||||
teaser.push("<b>");
|
||||
}
|
||||
startIndex = word[2] + word[0].length;
|
||||
teaser.push(body.substring(word[2], startIndex));
|
||||
|
||||
if (word[1] === TERM_WEIGHT) {
|
||||
teaser.push("</b>");
|
||||
}
|
||||
}
|
||||
teaser.push("…");
|
||||
return teaser.join("");
|
||||
}
|
||||
|
||||
function formatSearchResultItem(item, terms) {
|
||||
return '<div class="search-results__item">'
|
||||
+ item
|
||||
+ `<a href="${item.ref}">${item.doc.title}</a>`
|
||||
+ `<div>${makeTeaser(item.doc.body, terms)}</div>`
|
||||
+ '</div>';
|
||||
}
|
||||
|
||||
function initSearch() {
|
||||
var $searchInput = document.getElementById("search");
|
||||
var $searchResults = document.querySelector(".search-results");
|
||||
var $searchResultsHeader = document.querySelector(".search-results__headers");
|
||||
var $searchResultsItems = document.querySelector(".search-results__items");
|
||||
var MAX_ITEMS = 10;
|
||||
|
||||
var options = {
|
||||
bool: "AND",
|
||||
expand: true,
|
||||
teaser_word_count: 30,
|
||||
limit_results: 30,
|
||||
fields: {
|
||||
title: {boost: 2},
|
||||
body: {boost: 1},
|
||||
|
@ -32,22 +144,25 @@ function initSearch() {
|
|||
var currentTerm = "";
|
||||
var index = elasticlunr.Index.load(window.searchIndex);
|
||||
|
||||
$searchInput.addEventListener("keyup", function() {
|
||||
$searchInput.addEventListener("keyup", debounce(function() {
|
||||
var term = $searchInput.value.trim();
|
||||
if (!index || term === "" || term === currentTerm) {
|
||||
if (term === currentTerm || !index) {
|
||||
return;
|
||||
}
|
||||
$searchResults.style.display = term === "" ? "block" : "none";
|
||||
$searchResults.style.display = term === "" ? "none" : "block";
|
||||
$searchResultsItems.innerHTML = "";
|
||||
if (term === "") {
|
||||
return;
|
||||
}
|
||||
|
||||
var results = index.search(term, options);
|
||||
currentTerm = term;
|
||||
$searchResultsHeader.textContent = searchResultText(term, results.length);
|
||||
for (var i = 0; i < results.length; i++) {
|
||||
for (var i = 0; i < Math.min(results.length, MAX_ITEMS); i++) {
|
||||
var item = document.createElement("li");
|
||||
item.innerHTML = formatSearchResult(results[i], term);
|
||||
item.innerHTML = formatSearchResultItem(results[i], term.split(" "));
|
||||
$searchResultsItems.appendChild(item);
|
||||
}
|
||||
});
|
||||
}, 150));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -18,15 +18,17 @@
|
|||
<a class="white" href="{{ get_url(path="./documentation/_index.md") }}" class="nav-link">Docs</a>
|
||||
<a class="white" href="{{ get_url(path="./themes/_index.md") }}" class="nav-link">Themes</a>
|
||||
<a class="white" href="https://github.com/Keats/gutenberg" class="nav-link">GitHub</a>
|
||||
<input id="search" type="search" placeholder="Search the docs">
|
||||
|
||||
<div class="search-container">
|
||||
<input id="search" type="search" placeholder="Search the docs">
|
||||
|
||||
<div class="search-results">
|
||||
<div class="search-results__items"></div>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
</header>
|
||||
|
||||
<div class="search-results">
|
||||
<h2 class="search-results__header"></h2>
|
||||
<div class="search-results__items"></div>
|
||||
</div>
|
||||
|
||||
<div class="content {% block extra_content_class %}{% endblock extra_content_class %}">
|
||||
{% block content %}
|
||||
<div class="hero">
|
||||
|
@ -101,7 +103,7 @@
|
|||
</footer>
|
||||
|
||||
<script type="text/javascript" src="{{ get_url(path="elasticlunr.min.js", trailing_slash=false) }}"></script>
|
||||
<script type="text/javascript" src="{{ get_url(path="search_index.js", trailing_slash=false) }}"></script>
|
||||
<script type="text/javascript" src="{{ get_url(path="search_index.en.js", trailing_slash=false) }}"></script>
|
||||
<script type="text/javascript" src="{{ get_url(path="search.js", trailing_slash=false) }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
Loading…
Reference in New Issue