use actix_files::NamedFile; use actix_web::error::ErrorInternalServerError; use actix_web::{error, get, http, middleware, web, App, Error, HttpResponse, HttpServer}; use anyhow::anyhow; use chrono::{DateTime, Local}; use clap::{App as ClapApp, Arg}; use linkify::LinkFinder; use log::{info, trace}; use percent_encoding::utf8_percent_encode; use pulldown_cmark::{html, Options, Parser}; use regex::{Captures, Regex}; use std::collections::HashMap; use std::fs::File; use std::io::Read; use std::net::SocketAddr; use std::path::{Path, PathBuf}; use std::sync::Mutex; use std::time::SystemTime; use std::{env, fs}; use tera::{Context, Tera}; #[derive(Clone)] struct State { garden_dir: PathBuf, index_file: Option, title: Option, tera: Tera, } struct MutableState { garden_cache: Mutex, } #[derive(Clone, Debug)] struct GardenCache { pages: HashMap, files: Vec, } impl Default for GardenCache { fn default() -> Self { GardenCache { pages: HashMap::new(), files: vec![], } } } #[derive(Clone, Debug)] struct ParsedPage { timestamp: Option, title: String, html: String, links: Vec, } const VERSION: &str = env!("CARGO_PKG_VERSION"); fn main() -> anyhow::Result<()> { let env = env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"); env_logger::init_from_env(env); let app = ClapApp::new("gardenserver") .version(VERSION) .author("Tomáš Mládek ") .arg(Arg::with_name("DIRECTORY").required(true).index(1)) .arg( Arg::with_name("BIND") .long("bind") .default_value("127.0.0.1:8642") .help("address and port to bind the Web interface on") .required(true), ) .arg( Arg::with_name("INDEX_FILE") .takes_value(true) .short("i") .long("index") .help("File to be served at the root."), ) .arg( Arg::with_name("TITLE") .takes_value(true) .short("t") .long("title") .help("Title of this digital garden."), ); let matches = app.get_matches(); let directory = Path::new(matches.value_of("DIRECTORY").unwrap()); info!( "Starting GardenServer {} of {}...", VERSION, directory.display() ); let tera = Tera::new("templates/**/*.html")?; let sys = actix::System::new("gardenserver"); let bind: SocketAddr = matches .value_of("BIND") .unwrap() .parse() .expect("Incorrect bind format."); info!("Starting server at: {}", &bind); let mutable_state = web::Data::new(MutableState { garden_cache: Mutex::new(update_garden(directory, GardenCache::default())?), }); let state = State { garden_dir: directory.to_path_buf(), index_file: matches.value_of("INDEX_FILE").map(|s| s.to_string()), title: matches.value_of("TITLE").map(|s| s.to_string()), tera, }; // Start HTTP server HttpServer::new(move || { App::new() .wrap(middleware::Logger::default()) .data(state.clone()) .app_data(mutable_state.clone()) .service(actix_files::Files::new("/static", "templates")) .service(render) }) .bind(&bind)? .run(); Ok(sys.run()?) } #[get("{path:.*}")] async fn render( request: web::HttpRequest, data: web::Data, state: web::Data, path: web::Path, ) -> Result { let mut cache = state.garden_cache.lock().unwrap(); *cache = update_garden(&data.garden_dir, (*cache).clone()) .map_err(error::ErrorInternalServerError)?; // Redirect to index if path is empty. if path.is_empty() { let location = match data.index_file.as_ref() { Some(index_file) => index_file.clone(), None => cache .files .iter() .filter(|f| f.to_str().unwrap().ends_with(".md")) .collect::>() .first() .unwrap_or(&cache.files.first().unwrap()) .display() .to_string(), }; return Ok(HttpResponse::Found() .header(http::header::LOCATION, location.as_str()) .finish()); } let full_path = data.garden_dir.join(path.as_str()); // Redirect to ".md" version if requested path matches a .md file without the extension if !full_path.exists() && full_path.extension().is_none() { let md_path = format!("{}.md", path.to_string()); if Path::new(&md_path).exists() { return Ok(HttpResponse::Found() .header(http::header::LOCATION, md_path) .finish()); } } if full_path.exists() && !path.ends_with(".md") { return Ok(NamedFile::open(full_path)?.into_response(&request)?); } let page = cache.pages.get(&full_path); let mut context = Context::new(); context.insert("version", VERSION); context.insert( "garden_title", data.title.as_ref().unwrap_or(&"Digital Garden".to_string()), ); context.insert("files", &cache.files); context.insert( "page_title", &match page { Some(page) => page.title.clone(), None => full_path .components() .last() .unwrap() .as_os_str() .to_str() .unwrap() .to_string(), }, ); context.insert( "content", &match page { Some(page) => page.html.clone(), None => data .tera .render("_not_found.html", &Context::new()) .map_err(ErrorInternalServerError)?, }, ); context.insert( "mtime", &match page { Some(page) => { if let Some(timestamp) = page.timestamp { let mtime: DateTime = timestamp.into(); Some(mtime.format("%c").to_string()) } else { None } } None => None, }, ); Ok(HttpResponse::Ok().body( data.tera .render("main.html", &context) .map_err(ErrorInternalServerError)?, )) } fn update_garden>( garden_path: P, current: GardenCache, ) -> anyhow::Result { let garden_path = garden_path.as_ref().clone(); let mut files: Vec = fs::read_dir(&garden_path)? .filter_map(|entry| { if let Ok(entry) = entry { let path = entry.path(); if path.is_file() { let stripped_path = path.strip_prefix(&garden_path).unwrap().to_path_buf(); if !stripped_path.to_str().unwrap().starts_with(".") { return Some(stripped_path); } } } None }) .collect(); files.sort(); if files.is_empty() { return Err(anyhow!("Garden is empty.")); } let mut pages = current.pages.clone(); let markdown_paths = files .iter() .filter(|p| p.to_str().unwrap_or("").ends_with(".md")) .map(|p| garden_path.join(p)); for path in markdown_paths { trace!("Loading {} into cache...", path.display()); let mtime = path.metadata().unwrap().modified().ok(); if let Some(page) = pages.get(&path) { match (mtime, page.timestamp) { (Some(fs_time), Some(last_time)) => { if fs_time == last_time { continue; } } _ => {} } } let mut file = File::open(&path)?; let mut file_string = String::new(); file.read_to_string(&mut file_string)?; let markdown_source = preprocess_markdown(file_string); let parser = Parser::new_ext(markdown_source.as_str(), Options::all()); let mut html_output = String::new(); html::push_html(&mut html_output, parser); // TODO! let h1_regex = Regex::new(r"

([^>]+)

").unwrap(); let title = match h1_regex.captures(&html_output) { Some(h1_match) => h1_match.get(1).unwrap().as_str(), _ => &path .components() .last() .unwrap() .as_os_str() .to_str() .unwrap_or("???"), }; pages.insert( path.clone(), ParsedPage { timestamp: mtime, html: html_output.clone(), title: String::from(title), links: vec![], // todo!, }, ); } let result = GardenCache { files, pages }; trace!("{:#?}", result); Ok(result) } fn preprocess_markdown(string: String) -> String { let double_brackets = Regex::new(r"\[\[(?P[\w .]+)\]\]").unwrap(); let finder = LinkFinder::new(); let result = double_brackets .replace_all(string.as_str(), |caps: &Captures| { format!( "[{}]({})", &caps[1], utf8_percent_encode(&caps[1], percent_encoding::NON_ALPHANUMERIC) ) }) .to_string(); let result_vec = Vec::from(result.as_str()); let start_delims = vec![b'(', b'<']; let end_delims = vec![b')', b'>']; // link.end() is the first char AFTER the link! let links = finder.links(result.as_str()).filter(|link| { link.start() == 0 || link.end() == result.len() || !start_delims.contains(&result_vec[link.start() - 1]) || !end_delims.contains(&result_vec[link.end()]) }); let mut offset = 0; let mut result_string = result.to_string(); for link in links { let orig = link.as_str(); let new = format!("<{}>", orig); result_string.replace_range((link.start() + offset)..(link.end() + offset), new.as_str()); offset += new.len() - orig.len(); } result_string }