diff --git a/src/database/mod.rs b/src/database/mod.rs index 994b3b0..9b9567f 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -79,8 +79,7 @@ pub struct OpenResult { pub struct UpEndDatabase { pool: DbPool, lock: Arc>, - vault_path: Arc, - db_path: Arc, + vault_path: Arc } pub const UPEND_SUBDIR: &str = ".upend"; @@ -123,7 +122,6 @@ impl UpEndDatabase { pool, lock: Arc::new(RwLock::new(())), vault_path: Arc::new(dirpath.as_ref().canonicalize()?), - db_path: Arc::new(upend_path), }; let connection = db.connection().unwrap(); @@ -168,7 +166,6 @@ impl UpEndDatabase { Ok(UpEndConnection { conn: self.pool.get()?, lock: self.lock.clone(), - vault_path: self.vault_path.clone(), }) } } @@ -176,7 +173,6 @@ impl UpEndDatabase { pub struct UpEndConnection { conn: PooledConnection>, lock: Arc>, - vault_path: Arc, } impl UpEndConnection { diff --git a/src/database/stores/fs/mod.rs b/src/database/stores/fs/mod.rs index 85284b9..9ffa94d 100644 --- a/src/database/stores/fs/mod.rs +++ b/src/database/stores/fs/mod.rs @@ -608,11 +608,11 @@ impl UpStore for FsStore { .collect()) } - fn store>>( + fn store( &self, connection: UpEndConnection, blob: Blob, - name_hint: S, + name_hint: Option, ) -> Result { let file_path = blob.get_file_path(); let hash = file_path @@ -629,7 +629,7 @@ impl UpStore for FsStore { .map_err(|e| StoreError::Unknown(e.to_string()))?, ); - let final_name = if let Some(name_hint) = name_hint.into() { + let final_name = if let Some(name_hint) = name_hint { format!("{addr_str}_{name_hint}") } else { addr_str @@ -639,7 +639,6 @@ impl UpStore for FsStore { fs::copy(file_path, &final_path).map_err(|e| StoreError::Unknown(e.to_string()))?; - self.add_file(&connection, &final_path, hash.clone()) .map_err(|e| StoreError::Unknown(e.to_string()))?; } @@ -647,9 +646,9 @@ impl UpStore for FsStore { Ok(hash) } - fn update>( + fn update( &self, - db: D, + db: &UpEndDatabase, mut job_container: JobContainer, ) -> Result, StoreError> { let job_result = job_container.add_job("REIMPORT", "Scaning vault directory..."); diff --git a/src/database/stores/mod.rs b/src/database/stores/mod.rs index 66ca8d4..44e36bd 100644 --- a/src/database/stores/mod.rs +++ b/src/database/stores/mod.rs @@ -1,22 +1,24 @@ -use std::{ - borrow::Borrow, - path::{Path, PathBuf}, -}; +use std::path::{Path, PathBuf}; -use super::{UpEndDatabase, UpEndConnection}; +use super::{UpEndConnection, UpEndDatabase}; use crate::util::{hash::Hash, jobs::JobContainer}; pub mod fs; #[derive(Debug, Clone)] pub enum StoreError { - NotFound, Unknown(String), } impl std::fmt::Display for StoreError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "STORE ERROR") + write!( + f, + "STORE ERROR: {}", + match self { + StoreError::Unknown(err) => err, + } + ) } } @@ -50,15 +52,15 @@ pub enum UpdatePathOutcome { pub trait UpStore { fn retrieve(&self, hash: &Hash) -> Result>; fn retrieve_all(&self) -> Result>; - fn store>>( + fn store( &self, connection: UpEndConnection, blob: Blob, - name_hint: S, + name_hint: Option, ) -> Result; - fn update>( + fn update( &self, - database: D, + database: &UpEndDatabase, job_container: JobContainer, ) -> Result>; } diff --git a/src/extractors/audio.rs b/src/extractors/audio.rs index 62e0b95..7480802 100644 --- a/src/extractors/audio.rs +++ b/src/extractors/audio.rs @@ -1,13 +1,12 @@ +use std::sync::Arc; + use super::Extractor; use crate::{ addressing::Address, database::{ constants, entry::{Entry, EntryValue}, - stores::{ - fs::{FsStore, FILE_MIME_KEY}, - UpStore, - }, + stores::{fs::FILE_MIME_KEY, UpStore}, UpEndConnection, }, util::jobs::{JobContainer, JobState}, @@ -21,7 +20,7 @@ impl Extractor for ID3Extractor { &self, address: &Address, connection: &UpEndConnection, - store: &FsStore, + store: Arc>, mut job_container: JobContainer, ) -> Result> { if let Address::Hash(hash) = address { diff --git a/src/extractors/mod.rs b/src/extractors/mod.rs index 72d93bf..a2b938b 100644 --- a/src/extractors/mod.rs +++ b/src/extractors/mod.rs @@ -1,7 +1,6 @@ use crate::{ addressing::Address, - database::stores::fs::FsStore, - database::{entry::Entry, UpEndConnection, UpEndDatabase}, + database::{entry::Entry, stores::UpStore, UpEndConnection, UpEndDatabase}, util::jobs::JobContainer, }; use anyhow::Result; @@ -26,7 +25,7 @@ pub trait Extractor { &self, address: &Address, connection: &UpEndConnection, - store: &FsStore, + store: Arc>, job_container: JobContainer, ) -> Result>; @@ -38,7 +37,7 @@ pub trait Extractor { &self, address: &Address, connection: &UpEndConnection, - store: &FsStore, + store: Arc>, job_container: JobContainer, ) -> Result { if self.is_needed(address, connection)? { @@ -57,15 +56,14 @@ pub trait Extractor { } } -pub fn extract_all, S: Borrow>( +pub fn extract_all>( db: D, - store: S, + store: Arc>, mut job_container: JobContainer, ) -> Result { info!("Extracting metadata for all addresses."); let db = db.borrow(); - let store = store.borrow(); let job_handle = job_container.add_job("EXTRACT_ALL", "Extracting additional metadata...")?; let all_addresses = db.connection()?.get_all_addresses()?; @@ -77,7 +75,7 @@ pub fn extract_all, S: Borrow>( .par_iter() .map(|address| { let connection = db.connection()?; - let extract_result = extract(address, &connection, store, job_container.clone()); + let extract_result = extract(address, &connection, store.clone(), job_container.clone()); let mut cnt = count.write().unwrap(); *cnt += 1; @@ -104,7 +102,7 @@ pub fn extract_all, S: Borrow>( pub fn extract( address: &Address, connection: &UpEndConnection, - store: &FsStore, + store: Arc>, job_container: JobContainer, ) -> Result { let mut entry_count = 0; @@ -113,19 +111,19 @@ pub fn extract( #[cfg(feature = "extractors-web")] { entry_count += - web::WebExtractor.insert_info(address, connection, store, job_container.clone())?; + web::WebExtractor.insert_info(address, connection, store.clone(), job_container.clone())?; } #[cfg(feature = "extractors-audio")] { entry_count += - audio::ID3Extractor.insert_info(address, connection, store, job_container.clone())?; + audio::ID3Extractor.insert_info(address, connection, store.clone(), job_container.clone())?; } #[cfg(feature = "extractors-photo")] { entry_count += - photo::ExifExtractor.insert_info(address, connection, store, job_container)?; + photo::ExifExtractor.insert_info(address, connection, store.clone(), job_container)?; } trace!("Extracting metadata for {address:?} - got {entry_count} entries."); diff --git a/src/extractors/photo.rs b/src/extractors/photo.rs index d89b1a9..9b2244e 100644 --- a/src/extractors/photo.rs +++ b/src/extractors/photo.rs @@ -1,10 +1,12 @@ +use std::sync::Arc; + use super::Extractor; use crate::{ addressing::Address, database::{ constants, entry::{Entry, EntryValue}, - stores::{fs::{FILE_MIME_KEY, FsStore}, UpStore}, + stores::{fs::{FILE_MIME_KEY}, UpStore}, UpEndConnection, }, util::jobs::{JobContainer, JobState}, @@ -21,7 +23,7 @@ impl Extractor for ExifExtractor { &self, address: &Address, connection: &UpEndConnection, - store: &FsStore, + store: Arc>, mut job_container: JobContainer, ) -> Result> { if let Address::Hash(hash) = address { diff --git a/src/extractors/web.rs b/src/extractors/web.rs index 20208a1..26756c1 100644 --- a/src/extractors/web.rs +++ b/src/extractors/web.rs @@ -1,7 +1,9 @@ +use std::sync::Arc; + use super::Extractor; use crate::{ addressing::Address, - database::{entry::Entry, stores::fs::FsStore, UpEndConnection}, + database::{entry::Entry, stores::UpStore, UpEndConnection}, util::jobs::{JobContainer, JobState}, }; use anyhow::anyhow; @@ -15,10 +17,10 @@ impl Extractor for WebExtractor { fn get( &self, address: &Address, - _: &UpEndConnection, - _: &FsStore, + _connection: &UpEndConnection, + _store: Arc>, mut job_container: JobContainer, - ) -> anyhow::Result> { + ) -> Result> { if let Address::Url(url) = address { let mut job_handle = job_container.add_job(None, &format!("Getting info about {url:?}"))?; @@ -82,10 +84,12 @@ impl Extractor for WebExtractor { #[cfg(test)] mod test { - use crate::util::jobs::JobContainer; + + use crate::{database::stores::fs::FsStore, util::jobs::JobContainer}; use super::*; use anyhow::Result; + use std::sync::Arc; use tempfile::TempDir; #[test] @@ -93,13 +97,14 @@ mod test { let temp_dir = TempDir::new().unwrap(); let open_result = crate::database::UpEndDatabase::open(&temp_dir, None, true)?; let connection = open_result.db.connection()?; - let store = FsStore::from_path(&temp_dir)?; + let store = + Arc::new(Box::new(FsStore::from_path(&temp_dir)?) as Box); let job_container = JobContainer::new(); let address = Address::Url("https://upend.dev".into()); assert!(WebExtractor.is_needed(&address, &connection)?); - WebExtractor.insert_info(&address, &connection, &store, job_container)?; + WebExtractor.insert_info(&address, &connection, store, job_container)?; assert!(!WebExtractor.is_needed(&address, &connection)?); diff --git a/src/main.rs b/src/main.rs index 76b2460..3227824 100644 --- a/src/main.rs +++ b/src/main.rs @@ -11,7 +11,7 @@ use std::path::PathBuf; use actix_web::{middleware, App, HttpServer}; use anyhow::Result; use clap::{App as ClapApp, Arg}; -use log::{debug, info, warn}; +use log::{info, warn}; use rand::{thread_rng, Rng}; use std::sync::Arc; use tracing_subscriber::filter::{EnvFilter, LevelFilter}; @@ -127,7 +127,9 @@ fn main() -> Result<()> { .expect("failed to open database!"); let upend = Arc::new(open_result.db); - let store = Arc::new(FsStore::from_path(vault_path.clone()).unwrap()); + let store = + Arc::new(Box::new(FsStore::from_path(vault_path.clone()).unwrap()) + as Box); let ui_path = get_static_dir("webui"); if ui_path.is_err() { @@ -262,7 +264,7 @@ fn main() -> Result<()> { info!("Running initial update..."); // let new = open_result.new; block_background::<_, _, anyhow::Error>(move || { - state.store.update(upend.clone(), job_container.clone()); + let _ = state.store.update(&upend, job_container.clone()); let _ = extractors::extract_all(upend, state.store, job_container); Ok(()) }) diff --git a/src/previews/mod.rs b/src/previews/mod.rs index 7364c69..4a4bbdb 100644 --- a/src/previews/mod.rs +++ b/src/previews/mod.rs @@ -1,8 +1,7 @@ -use crate::database::stores::fs::FsStore; use crate::database::stores::UpStore; +use crate::util::hash::b58_encode; use crate::util::hash::Hash; use crate::util::jobs::{JobContainer, JobState}; -use crate::{database::UpEndDatabase, util::hash::b58_encode}; use anyhow::{anyhow, Result}; use log::{debug, trace}; @@ -29,14 +28,14 @@ pub trait Previewable { } pub struct PreviewStore { path: PathBuf, - store: Arc, + store: Arc>, locks: Mutex>>>, } #[cfg(feature = "previews")] impl PreviewStore { - pub fn new>(path: P, store: Arc) -> Self { + pub fn new>(path: P, store: Arc>) -> Self { PreviewStore { path: PathBuf::from(path.as_ref()), store, @@ -86,18 +85,18 @@ impl PreviewStore { let mime_type: Option = if mime_type.is_some() { mime_type } else { - tree_magic_mini::from_filepath(&file_path).map(|m| m.into()) + tree_magic_mini::from_filepath(file_path).map(|m| m.into()) }; let preview = match mime_type { - Some(tm) if tm.starts_with("text") => TextPath(&file_path).get_thumbnail(), + Some(tm) if tm.starts_with("text") => TextPath(file_path).get_thumbnail(), Some(tm) if tm.starts_with("video") || tm == "application/x-matroska" => { - VideoPath(&file_path).get_thumbnail() + VideoPath(file_path).get_thumbnail() } Some(tm) if tm.starts_with("audio") || tm == "application/x-riff" => { - AudioPath(&file_path).get_thumbnail() + AudioPath(file_path).get_thumbnail() } - Some(tm) if tm.starts_with("image") => ImagePath(&file_path).get_thumbnail(), + Some(tm) if tm.starts_with("image") => ImagePath(file_path).get_thumbnail(), Some(unknown) => Err(anyhow!("No capability for {:?} thumbnails.", unknown)), _ => Err(anyhow!("Unknown file type, or file doesn't exist.")), }; diff --git a/src/routes.rs b/src/routes.rs index e3512db..e2f0b4d 100644 --- a/src/routes.rs +++ b/src/routes.rs @@ -3,13 +3,12 @@ use crate::database::constants::{ADDED_ATTR, LABEL_ATTR}; use crate::database::entry::{Entry, EntryValue, InvariantEntry}; use crate::database::hierarchies::{list_roots, resolve_path, UHierPath}; use crate::database::lang::Query; -use crate::database::stores::fs::FsStore; use crate::database::stores::{Blob, UpStore}; use crate::database::UpEndDatabase; use crate::extractors::{self}; use crate::previews::PreviewStore; use crate::util::exec::block_background; -use crate::util::hash::{b58_decode, b58_encode, Hashable}; +use crate::util::hash::{b58_decode, b58_encode}; use crate::util::jobs::JobContainer; use actix_files::NamedFile; use actix_multipart::Multipart; @@ -30,7 +29,6 @@ use serde_json::json; use std::collections::HashMap; use std::convert::{TryFrom, TryInto}; use std::io::Write; -use std::path::PathBuf; use std::sync::Arc; use std::time::{SystemTime, UNIX_EPOCH}; use tempfile::NamedTempFile; @@ -42,7 +40,7 @@ use is_executable::IsExecutable; #[derive(Clone)] pub struct State { pub upend: Arc, - pub store: Arc, + pub store: Arc>, pub vault_name: Option, pub job_container: JobContainer, pub preview_store: Option>, @@ -141,7 +139,6 @@ pub async fn get_raw( } // Then, check the files - let connection = state.upend.connection().map_err(ErrorInternalServerError)?; let _hash = hash.clone(); let _store = state.store.clone(); let blobs = web::block(move || _store.retrieve(_hash.as_ref())) @@ -443,7 +440,7 @@ pub async fn put_object( let _store = state.store.clone(); block_background::<_, _, anyhow::Error>(move || { let extract_result = - extractors::extract(&_address, &connection, &_store, _job_container); + extractors::extract(&_address, &connection, _store, _job_container); if let Ok(entry_count) = extract_result { debug!("Added {entry_count} extracted entries for {_address:?}"); } else { @@ -521,7 +518,7 @@ pub async fn put_object( let connection = state.upend.connection().map_err(ErrorInternalServerError)?; block_background::<_, _, anyhow::Error>(move || { let extract_result = - extractors::extract(&_address, &connection, &_store, _job_container); + extractors::extract(&_address, &connection, _store, _job_container); if let Ok(entry_count) = extract_result { debug!("Added {entry_count} extracted entries for {_address:?}"); } else { @@ -696,23 +693,23 @@ pub async fn list_hier_roots(state: web::Data) -> Result, -} +// #[derive(Deserialize)] +// pub struct RescanRequest { +// full: Option, +// } #[post("/api/refresh")] pub async fn api_refresh( req: HttpRequest, state: web::Data, - web::Query(query): web::Query, + // web::Query(query): web::Query, ) -> Result { check_auth(&req, &state)?; block_background::<_, _, anyhow::Error>(move || { let _ = state .store - .update(state.upend.clone(), state.job_container.clone()); + .update(&state.upend, state.job_container.clone()); let _ = crate::extractors::extract_all( state.upend.clone(), state.store.clone(),