add backlinks
This commit is contained in:
parent
9d8f308a4d
commit
7d097ebe72
2 changed files with 132 additions and 44 deletions
165
src/main.rs
165
src/main.rs
|
@ -6,8 +6,8 @@ use chrono::{DateTime, Local};
|
|||
use clap::{App as ClapApp, Arg};
|
||||
use linkify::LinkFinder;
|
||||
use log::{info, trace};
|
||||
use percent_encoding::utf8_percent_encode;
|
||||
use pulldown_cmark::{html, Options, Parser};
|
||||
use percent_encoding::{percent_decode_str, utf8_percent_encode};
|
||||
use pulldown_cmark::{html, Event, Options, Parser, Tag};
|
||||
use regex::{Captures, Regex};
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
|
@ -33,7 +33,7 @@ struct MutableState {
|
|||
|
||||
#[derive(Clone, Debug)]
|
||||
struct GardenCache {
|
||||
pages: HashMap<PathBuf, ParsedPage>,
|
||||
pages: HashMap<String, ParsedPage>,
|
||||
files: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
|
@ -167,20 +167,41 @@ async fn render(
|
|||
let full_path = data.garden_dir.join(path.as_str());
|
||||
|
||||
// Redirect to ".md" version if requested path matches a .md file without the extension
|
||||
if !full_path.exists() && full_path.extension().is_none() {
|
||||
let md_path = format!("{}.md", path.to_string());
|
||||
if Path::new(&md_path).exists() {
|
||||
return Ok(HttpResponse::Found()
|
||||
.header(http::header::LOCATION, md_path)
|
||||
.finish());
|
||||
}
|
||||
if !full_path.exists()
|
||||
&& full_path.extension().is_none()
|
||||
&& Path::new(&format!("{}.md", full_path.to_str().unwrap())).exists()
|
||||
{
|
||||
return Ok(HttpResponse::Found()
|
||||
.header(http::header::LOCATION, format!("{}.md", path.to_string()))
|
||||
.finish());
|
||||
}
|
||||
|
||||
if full_path.exists() && !path.ends_with(".md") {
|
||||
return Ok(NamedFile::open(full_path)?.into_response(&request)?);
|
||||
}
|
||||
|
||||
let page = cache.pages.get(&full_path);
|
||||
let filename = full_path
|
||||
.components()
|
||||
.last()
|
||||
.unwrap()
|
||||
.as_os_str()
|
||||
.to_str()
|
||||
.unwrap();
|
||||
|
||||
let normalized_name = normalize_name(filename);
|
||||
|
||||
let mut backlinks: Vec<String> = vec![];
|
||||
for (path, page) in cache.pages.iter() {
|
||||
if page
|
||||
.links
|
||||
.iter()
|
||||
.any(|link| normalize_name(link.as_str()) == normalized_name)
|
||||
{
|
||||
backlinks.push(normalize_name(path));
|
||||
}
|
||||
}
|
||||
|
||||
let page = cache.pages.get(path.as_ref());
|
||||
|
||||
let mut context = Context::new();
|
||||
context.insert("version", VERSION);
|
||||
|
@ -193,14 +214,7 @@ async fn render(
|
|||
"page_title",
|
||||
&match page {
|
||||
Some(page) => page.title.clone(),
|
||||
None => full_path
|
||||
.components()
|
||||
.last()
|
||||
.unwrap()
|
||||
.as_os_str()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
None => filename.to_string(),
|
||||
},
|
||||
);
|
||||
context.insert(
|
||||
|
@ -213,6 +227,7 @@ async fn render(
|
|||
.map_err(ErrorInternalServerError)?,
|
||||
},
|
||||
);
|
||||
context.insert("backlinks", &backlinks);
|
||||
context.insert(
|
||||
"mtime",
|
||||
&match page {
|
||||
|
@ -265,12 +280,12 @@ fn update_garden<P: AsRef<Path>>(
|
|||
|
||||
let markdown_paths = files
|
||||
.iter()
|
||||
.filter(|p| p.to_str().unwrap_or("").ends_with(".md"))
|
||||
.map(|p| garden_path.join(p));
|
||||
.filter(|p| p.to_str().unwrap_or("").ends_with(".md"));
|
||||
for path in markdown_paths {
|
||||
trace!("Loading {} into cache...", path.display());
|
||||
let mtime = path.metadata().unwrap().modified().ok();
|
||||
if let Some(page) = pages.get(&path) {
|
||||
let full_path = garden_path.join(path);
|
||||
let mtime = full_path.metadata().unwrap().modified().ok();
|
||||
if let Some(page) = pages.get(path.to_str().unwrap()) {
|
||||
if let (Some(fs_time), Some(last_time)) = (mtime, page.timestamp) {
|
||||
if fs_time == last_time {
|
||||
continue;
|
||||
|
@ -278,34 +293,29 @@ fn update_garden<P: AsRef<Path>>(
|
|||
}
|
||||
}
|
||||
|
||||
let mut file = File::open(&path)?;
|
||||
let mut file = File::open(&full_path)?;
|
||||
let mut file_string = String::new();
|
||||
file.read_to_string(&mut file_string)?;
|
||||
let markdown_source = preprocess_markdown(file_string);
|
||||
let parser = Parser::new_ext(markdown_source.as_str(), Options::all());
|
||||
let mut html_output = String::new();
|
||||
html::push_html(&mut html_output, parser);
|
||||
|
||||
// TODO!
|
||||
let h1_regex = Regex::new(r"<h1>([^>]+)</h1>").unwrap();
|
||||
let title = match h1_regex.captures(&html_output) {
|
||||
Some(h1_match) => h1_match.get(1).unwrap().as_str(),
|
||||
_ => &path
|
||||
.components()
|
||||
.last()
|
||||
.unwrap()
|
||||
.as_os_str()
|
||||
.to_str()
|
||||
.unwrap_or("???"),
|
||||
};
|
||||
let result = GardenParser::parse(&markdown_source);
|
||||
|
||||
pages.insert(
|
||||
path.clone(),
|
||||
String::from(path.to_str().unwrap()),
|
||||
ParsedPage {
|
||||
timestamp: mtime,
|
||||
html: html_output.clone(),
|
||||
title: String::from(title),
|
||||
links: vec![], // todo!,
|
||||
html: result.html,
|
||||
links: result.links,
|
||||
title: match result.title {
|
||||
Some(title) => title,
|
||||
_ => String::from(
|
||||
path.components()
|
||||
.last()
|
||||
.unwrap()
|
||||
.as_os_str()
|
||||
.to_str()
|
||||
.unwrap_or("???"),
|
||||
),
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
@ -315,6 +325,67 @@ fn update_garden<P: AsRef<Path>>(
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
struct GardenParser<'a> {
|
||||
parser: Parser<'a>,
|
||||
last_nontext_event: Option<Event<'a>>,
|
||||
current_top_heading: u32,
|
||||
top_heading_text: &'a mut Option<String>,
|
||||
links: &'a mut Vec<String>,
|
||||
}
|
||||
|
||||
struct ParseResult {
|
||||
html: String,
|
||||
title: Option<String>,
|
||||
links: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> GardenParser<'a> {
|
||||
fn parse<S: AsRef<str>>(text: &'a S) -> ParseResult {
|
||||
let mut title: Option<String> = None;
|
||||
let mut links: Vec<String> = vec![];
|
||||
|
||||
let parser = GardenParser {
|
||||
parser: Parser::new_ext(text.as_ref(), Options::all()),
|
||||
last_nontext_event: None,
|
||||
current_top_heading: 999,
|
||||
top_heading_text: &mut title,
|
||||
links: &mut links,
|
||||
};
|
||||
|
||||
let mut html = String::new();
|
||||
html::push_html(&mut html, parser);
|
||||
|
||||
ParseResult { html, title, links }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for GardenParser<'a> {
|
||||
type Item = Event<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let event = self.parser.next();
|
||||
|
||||
if let Some(event) = &event {
|
||||
if let Event::Start(Tag::Link(_, str, _)) = &event {
|
||||
self.links.push(str.to_string());
|
||||
}
|
||||
|
||||
if let Some(Event::Start(Tag::Heading(hl))) = self.last_nontext_event {
|
||||
if hl < self.current_top_heading {
|
||||
self.current_top_heading = hl;
|
||||
if let Event::Text(str) = &event {
|
||||
*self.top_heading_text = Some(str.clone().into_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.last_nontext_event = Some(event.clone());
|
||||
}
|
||||
|
||||
event
|
||||
}
|
||||
}
|
||||
|
||||
fn preprocess_markdown(string: String) -> String {
|
||||
let double_brackets = Regex::new(r"\[\[(?P<inner>[\w .]+)\]\]").unwrap();
|
||||
let finder = LinkFinder::new();
|
||||
|
@ -351,3 +422,9 @@ fn preprocess_markdown(string: String) -> String {
|
|||
|
||||
result_string
|
||||
}
|
||||
|
||||
fn normalize_name(filename: &str) -> String {
|
||||
let decoded = percent_decode_str(filename).decode_utf8_lossy();
|
||||
let result = decoded.strip_suffix(".md");
|
||||
String::from(result.unwrap_or(filename))
|
||||
}
|
||||
|
|
|
@ -28,6 +28,17 @@
|
|||
<main>
|
||||
{{content | safe}}
|
||||
|
||||
<hr>
|
||||
|
||||
{% if backlinks %}
|
||||
<h1>Linked from:</h1>
|
||||
<ul>
|
||||
{% for link in backlinks %}
|
||||
<li><a href="/{{ link }}">{{link}}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{% if mtime %}
|
||||
<footer>
|
||||
Last modified at {{mtime}}
|
||||
|
|
Loading…
Reference in a new issue