thumbnails module, "text thumbnails" on backend

feat/vaults
Tomáš Mládek 2021-12-23 23:45:46 +01:00
parent 0f3dd88193
commit a608659baa
No known key found for this signature in database
GPG Key ID: ED21612889E75EC5
8 changed files with 193 additions and 28 deletions

View File

@ -65,5 +65,6 @@ nonempty = "0.6.0"
tempdir = "0.3.7"
[features]
default = ["desktop"]
default = ["desktop", "thumbnails"]
desktop = ["webbrowser", "opener", "is_executable"]
thumbnails = []

View File

@ -1,3 +1,5 @@
use std::path::PathBuf;
use chrono::NaiveDateTime;
use serde::Serialize;
@ -15,6 +17,18 @@ pub struct File {
pub mtime: Option<NaiveDateTime>,
}
// todo - remove, try_from the actual model, impl queryable...
#[derive(Serialize, Clone, Debug)]
pub struct OutFile {
pub id: i32,
pub hash: Hash,
pub path: PathBuf,
pub valid: bool,
pub added: NaiveDateTime,
pub size: i64,
pub mtime: Option<NaiveDateTime>,
}
#[derive(Insertable, Debug)]
#[table_name = "files"]
pub struct NewFile {

View File

@ -29,6 +29,7 @@ use log::{debug, trace};
use std::convert::TryFrom;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
#[derive(Debug)]
@ -73,7 +74,8 @@ pub struct OpenResult {
pub struct UpEndDatabase {
pool: DbPool,
pub vault_path: PathBuf,
pub vault_path: Arc<PathBuf>,
pub db_path: Arc<PathBuf>
}
pub const UPEND_SUBDIR: &str = ".upend";
@ -113,7 +115,8 @@ impl UpEndDatabase {
let db = UpEndDatabase {
pool,
vault_path: PathBuf::from(dirpath.as_ref()),
vault_path: Arc::new(PathBuf::from(dirpath.as_ref())),
db_path: Arc::new(upend_path)
};
let connection = db.connection().unwrap();
@ -146,15 +149,21 @@ impl UpEndDatabase {
}
pub fn connection(&self) -> Result<UpEndConnection> {
Ok(UpEndConnection(self.pool.get()?))
Ok(UpEndConnection {
conn: self.pool.get()?,
vault_path: self.vault_path.clone(),
})
}
}
pub struct UpEndConnection(PooledConnection<ConnectionManager<SqliteConnection>>);
pub struct UpEndConnection {
conn: PooledConnection<ConnectionManager<SqliteConnection>>,
vault_path: Arc<PathBuf>,
}
impl UpEndConnection {
pub fn execute<S: AsRef<str>>(&self, query: S) -> Result<usize, diesel::result::Error> {
self.0.execute(query.as_ref())
self.conn.execute(query.as_ref())
}
pub fn transaction<T, E, F>(&self, f: F) -> Result<T, E>
@ -162,7 +171,7 @@ impl UpEndConnection {
F: FnOnce() -> Result<T, E>,
E: From<Error>,
{
self.0.transaction(f)
self.conn.transaction(f)
}
pub fn insert_file(&self, file: models::NewFile) -> Result<usize> {
@ -176,23 +185,36 @@ impl UpEndConnection {
Ok(diesel::insert_into(files::table)
.values(file)
.execute(&self.0)?)
.execute(&self.conn)?)
}
pub fn retrieve_file(&self, obj_hash: Hash) -> Result<Vec<models::File>> {
pub fn retrieve_file(&self, obj_hash: Hash) -> Result<Vec<models::OutFile>> {
use crate::database::inner::schema::files::dsl::*;
let matches = files
.filter(valid.eq(true))
.filter(hash.eq(obj_hash.0))
.load::<models::File>(&self.0)?;
.load::<models::File>(&self.conn)?;
let matches = matches
.into_iter()
.map(|f| models::OutFile {
id: f.id,
hash: f.hash,
path: self.vault_path.join(PathBuf::from(f.path)),
valid: f.valid,
added: f.added,
size: f.size,
mtime: f.mtime,
})
.collect();
Ok(matches)
}
pub fn retrieve_all_files(&self) -> Result<Vec<models::File>> {
use crate::database::inner::schema::files::dsl::*;
let matches = files.load::<models::File>(&self.0)?;
let matches = files.load::<models::File>(&self.conn)?;
Ok(matches)
}
@ -202,23 +224,19 @@ impl UpEndConnection {
let matches = files
.order_by(added.desc())
.limit(count)
.load::<models::File>(&self.0)?;
.load::<models::File>(&self.conn)?;
Ok(matches)
}
pub fn file_update_mtime(
&self,
file_id: i32,
m_time: Option<NaiveDateTime>,
) -> Result<usize> {
pub fn file_update_mtime(&self, file_id: i32, m_time: Option<NaiveDateTime>) -> Result<usize> {
use crate::database::inner::schema::files::dsl::*;
debug!("Setting file ID {}'s mtime = {:?}", file_id, m_time);
Ok(diesel::update(files.filter(id.eq(file_id)))
.set(mtime.eq(m_time))
.execute(&self.0)?)
.execute(&self.conn)?)
}
pub fn file_set_valid(&self, file_id: i32, is_valid: bool) -> Result<usize> {
@ -228,7 +246,7 @@ impl UpEndConnection {
Ok(diesel::update(files.filter(id.eq(file_id)))
.set(valid.eq(is_valid))
.execute(&self.0)?)
.execute(&self.conn)?)
}
pub fn retrieve_object(&self, object_address: Address) -> Result<Vec<Entry>> {
@ -237,7 +255,7 @@ impl UpEndConnection {
let primary = data
.filter(entity.eq(object_address.encode()?))
.or_filter(value.eq(EntryValue::Address(object_address).to_string()?))
.load::<models::Entry>(&self.0)?;
.load::<models::Entry>(&self.conn)?;
let entries = primary
.iter()
@ -255,7 +273,7 @@ impl UpEndConnection {
.collect::<Result<Vec<Vec<u8>>>>()?,
),
)
.load::<models::Entry>(&self.0)?;
.load::<models::Entry>(&self.conn)?;
let secondary_entries = secondary
.iter()
@ -275,7 +293,7 @@ impl UpEndConnection {
.or_filter(entity.eq(object_address.encode()?))
.or_filter(value.eq(EntryValue::Address(object_address).to_string()?));
Ok(diesel::delete(matches).execute(&self.0)?)
Ok(diesel::delete(matches).execute(&self.conn)?)
}
pub fn query(&self, query: Query) -> Result<Vec<Entry>> {
@ -287,7 +305,7 @@ impl UpEndConnection {
trace!("DB query: {}", debug_query(&db_query));
let matches = db_query.load::<models::Entry>(&self.0)?;
let matches = db_query.load::<models::Entry>(&self.conn)?;
let entries = matches
.iter()
@ -307,7 +325,7 @@ impl UpEndConnection {
let result = diesel::insert_into(data::table)
.values(insert_entry)
.execute(&self.0);
.execute(&self.conn);
if let Some(error) = result.err() {
match error {

View File

@ -117,8 +117,8 @@ fn _rescan_vault<D: Borrow<UpEndDatabase>>(
// Walk through the vault, find all paths
debug!("Traversing vault directory");
let absolute_dir_path = fs::canonicalize(&db.vault_path)?;
let path_entries: Vec<PathBuf> = WalkDir::new(&db.vault_path)
let absolute_dir_path = fs::canonicalize(&*db.vault_path)?;
let path_entries: Vec<PathBuf> = WalkDir::new(&*db.vault_path)
.follow_links(true)
.into_iter()
.filter_map(|e| e.ok())

View File

@ -16,6 +16,7 @@ use log::{info, warn};
use std::sync::{Arc, RwLock};
use crate::database::UpEndDatabase;
use crate::thumbnails::ThumbnailStore;
mod addressing;
mod database;
@ -23,6 +24,9 @@ mod filesystem;
mod routes;
mod util;
#[cfg(feature = "thumbnails")]
mod thumbnails;
const VERSION: &str = env!("CARGO_PKG_VERSION");
fn main() -> Result<()> {
@ -96,6 +100,15 @@ fn main() -> Result<()> {
let upend = Arc::new(open_result.db);
#[cfg(feature = "thumbnails")]
let thumbnail_store = Some(Arc::new(ThumbnailStore::new(
upend.db_path.join("thumbnails"),
upend.clone(),
)));
#[cfg(not(feature = "thumbnails"))]
let thumbnail_store = None;
let bind: SocketAddr = matches
.value_of("BIND")
.unwrap()
@ -119,6 +132,7 @@ fn main() -> Result<()> {
}),
),
job_container: job_container.clone(),
thumbnail_store,
};
// Start HTTP server
@ -132,6 +146,7 @@ fn main() -> Result<()> {
.data(state.clone())
.wrap(middleware::Logger::default().exclude("/api/jobs"))
.service(routes::get_raw)
.service(routes::get_thumbnail)
.service(routes::get_query)
.service(routes::get_object)
.service(routes::put_object)

View File

@ -3,6 +3,7 @@ use crate::database::entry::{Entry, InEntry};
use crate::database::hierarchies::{list_roots, resolve_path, UHierPath};
use crate::database::lang::Query;
use crate::database::UpEndDatabase;
use crate::thumbnails::ThumbnailStore;
use crate::util::hash::{decode, encode};
use crate::util::jobs::JobContainer;
use actix_files::NamedFile;
@ -28,6 +29,7 @@ pub struct State {
pub upend: Arc<UpEndDatabase>,
pub vault_name: Option<String>,
pub job_container: Arc<RwLock<JobContainer>>,
pub thumbnail_store: Option<Arc<ThumbnailStore>>,
}
#[derive(Deserialize)]
@ -85,7 +87,7 @@ pub async fn get_raw(
#[cfg(not(feature = "desktop"))]
unreachable!()
} else {
Err(error::ErrorBadRequest("Desktop features not enabled."))
Err(error::ErrorNotImplemented("Desktop features not enabled."))
}
} else {
Err(error::ErrorNotFound("NOT FOUND"))
@ -303,7 +305,31 @@ pub async fn get_jobs(state: web::Data<State>) -> Result<HttpResponse, Error> {
pub async fn get_info(state: web::Data<State>) -> Result<HttpResponse, Error> {
Ok(HttpResponse::Ok().json(json!({
"name": state.vault_name,
"location": state.upend.vault_path,
"location": &*state.upend.vault_path,
"version": VERSION
})))
}
#[get("/api/thumb/{hash}")]
pub async fn get_thumbnail(
state: web::Data<State>,
hash: web::Path<String>,
) -> Result<NamedFile, Error> {
if let Some(thumbnail_store) = &state.thumbnail_store {
let address =
Address::decode(&decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
.map_err(ErrorInternalServerError)?;
if let Address::Hash(hash) = address {
let thumbnail_path = thumbnail_store
.get(hash)
.map_err(error::ErrorInternalServerError)?;
Ok(NamedFile::open(thumbnail_path)?)
} else {
Err(ErrorBadRequest(
"Address does not refer to a thumbnailable object.",
))
}
} else {
Err(error::ErrorNotImplemented("Thumbnails not enabled."))
}
}

76
src/thumbnails/mod.rs Normal file
View File

@ -0,0 +1,76 @@
use crate::util::hash::Hash;
use crate::{database::UpEndDatabase, util::hash::encode};
use anyhow::{anyhow, Result};
use std::{
collections::HashMap,
fs::File,
io::Write,
path::{Path, PathBuf},
sync::{Arc, Mutex},
};
use self::text::TextPath;
pub mod text;
pub trait Thumbnailable {
fn get_thumbnail(&self) -> Result<Vec<u8>>;
}
pub struct ThumbnailStore {
path: PathBuf,
db: Arc<UpEndDatabase>,
locks: Mutex<HashMap<Hash, Arc<Mutex<PathBuf>>>>,
}
impl ThumbnailStore {
pub fn new<P: AsRef<Path>>(path: P, db: Arc<UpEndDatabase>) -> Self {
ThumbnailStore {
path: PathBuf::from(path.as_ref()),
db,
locks: Mutex::new(HashMap::new()),
}
}
fn get_path(&self, hash: &Hash) -> Arc<Mutex<PathBuf>> {
let mut locks = self.locks.lock().unwrap();
if let Some(path) = locks.get(hash) {
path.clone()
} else {
let thumbpath = self.path.join(encode(hash));
let path = Arc::new(Mutex::new(thumbpath));
locks.insert(hash.clone(), path.clone());
path
}
}
pub fn get(&self, hash: Hash) -> Result<PathBuf> {
let path_mutex = self.get_path(&hash);
let thumbpath = path_mutex.lock().unwrap();
if thumbpath.exists() {
Ok(thumbpath.clone())
} else {
let connection = self.db.connection()?;
let files = connection.retrieve_file(hash)?;
if let Some(file) = files.get(0) {
// because tree_magic panics on nonexistent paths, poisioning our mutexes
if !file.path.exists() {
return Err(anyhow!("File doesn't exist - shouldn't happen!"));
}
let data = match tree_magic::from_filepath(&file.path) {
tm if tm.starts_with("text") => Ok(TextPath(&file.path).get_thumbnail()?),
unknown => Err(anyhow!("No capability for {:?} thumbnails.", unknown)),
}?;
std::fs::create_dir_all(&self.path)?;
let mut file = File::create(&*thumbpath)?;
file.write_all(&data)?;
Ok(thumbpath.clone())
} else {
Err(anyhow!("Object not found, or is not a file."))
}
}
}
}

15
src/thumbnails/text.rs Normal file
View File

@ -0,0 +1,15 @@
use anyhow::Result;
use std::{cmp::min, convert::TryInto, fs::File, io::Read, path::Path};
use super::Thumbnailable;
pub struct TextPath<'a>(pub &'a Path);
impl<'a> Thumbnailable for TextPath<'a> {
fn get_thumbnail(&self) -> Result<Vec<u8>> {
let mut f = File::open(self.0)?;
let mut buffer = vec![0u8; min(1024, f.metadata()?.len().try_into()?)];
f.read_exact(&mut buffer)?;
Ok(buffer)
}
}