refactor: use trait objects instead of FsStore directly
also fix most clippy hints
This commit is contained in:
parent
4a988acdad
commit
5152675bad
10 changed files with 72 additions and 73 deletions
|
@ -79,8 +79,7 @@ pub struct OpenResult {
|
|||
pub struct UpEndDatabase {
|
||||
pool: Arc<DbPool>,
|
||||
lock: Arc<RwLock<()>>,
|
||||
vault_path: Arc<PathBuf>,
|
||||
db_path: Arc<PathBuf>,
|
||||
vault_path: Arc<PathBuf>
|
||||
}
|
||||
|
||||
pub const UPEND_SUBDIR: &str = ".upend";
|
||||
|
@ -123,7 +122,6 @@ impl UpEndDatabase {
|
|||
pool: Arc::new(pool),
|
||||
lock: Arc::new(RwLock::new(())),
|
||||
vault_path: Arc::new(dirpath.as_ref().canonicalize()?),
|
||||
db_path: Arc::new(upend_path),
|
||||
};
|
||||
let connection = db.connection().unwrap();
|
||||
|
||||
|
@ -168,7 +166,6 @@ impl UpEndDatabase {
|
|||
Ok(UpEndConnection {
|
||||
pool: self.pool.clone(),
|
||||
lock: self.lock.clone(),
|
||||
vault_path: self.vault_path.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -176,7 +173,6 @@ impl UpEndDatabase {
|
|||
pub struct UpEndConnection {
|
||||
pool: Arc<DbPool>,
|
||||
lock: Arc<RwLock<()>>,
|
||||
vault_path: Arc<PathBuf>,
|
||||
}
|
||||
|
||||
impl UpEndConnection {
|
||||
|
|
|
@ -608,11 +608,11 @@ impl UpStore for FsStore {
|
|||
.collect())
|
||||
}
|
||||
|
||||
fn store<S: Into<Option<String>>>(
|
||||
fn store(
|
||||
&self,
|
||||
connection: UpEndConnection,
|
||||
blob: Blob,
|
||||
name_hint: S,
|
||||
name_hint: Option<String>,
|
||||
) -> Result<Hash, super::StoreError> {
|
||||
let file_path = blob.get_file_path();
|
||||
let hash = file_path
|
||||
|
@ -629,7 +629,7 @@ impl UpStore for FsStore {
|
|||
.map_err(|e| StoreError::Unknown(e.to_string()))?,
|
||||
);
|
||||
|
||||
let final_name = if let Some(name_hint) = name_hint.into() {
|
||||
let final_name = if let Some(name_hint) = name_hint {
|
||||
format!("{addr_str}_{name_hint}")
|
||||
} else {
|
||||
addr_str
|
||||
|
@ -639,7 +639,6 @@ impl UpStore for FsStore {
|
|||
|
||||
fs::copy(file_path, &final_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
|
||||
|
||||
|
||||
self.add_file(&connection, &final_path, hash.clone())
|
||||
.map_err(|e| StoreError::Unknown(e.to_string()))?;
|
||||
}
|
||||
|
@ -647,9 +646,9 @@ impl UpStore for FsStore {
|
|||
Ok(hash)
|
||||
}
|
||||
|
||||
fn update<D: Borrow<UpEndDatabase>>(
|
||||
fn update(
|
||||
&self,
|
||||
db: D,
|
||||
db: &UpEndDatabase,
|
||||
mut job_container: JobContainer,
|
||||
) -> Result<Vec<UpdatePathOutcome>, StoreError> {
|
||||
let job_result = job_container.add_job("REIMPORT", "Scaning vault directory...");
|
||||
|
|
|
@ -1,22 +1,24 @@
|
|||
use std::{
|
||||
borrow::Borrow,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use super::{UpEndDatabase, UpEndConnection};
|
||||
use super::{UpEndConnection, UpEndDatabase};
|
||||
use crate::util::{hash::Hash, jobs::JobContainer};
|
||||
|
||||
pub mod fs;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum StoreError {
|
||||
NotFound,
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for StoreError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "STORE ERROR")
|
||||
write!(
|
||||
f,
|
||||
"STORE ERROR: {}",
|
||||
match self {
|
||||
StoreError::Unknown(err) => err,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,15 +52,15 @@ pub enum UpdatePathOutcome {
|
|||
pub trait UpStore {
|
||||
fn retrieve(&self, hash: &Hash) -> Result<Vec<Blob>>;
|
||||
fn retrieve_all(&self) -> Result<Vec<Blob>>;
|
||||
fn store<S: Into<Option<String>>>(
|
||||
fn store(
|
||||
&self,
|
||||
connection: UpEndConnection,
|
||||
blob: Blob,
|
||||
name_hint: S,
|
||||
name_hint: Option<String>,
|
||||
) -> Result<Hash>;
|
||||
fn update<D: Borrow<UpEndDatabase>>(
|
||||
fn update(
|
||||
&self,
|
||||
database: D,
|
||||
database: &UpEndDatabase,
|
||||
job_container: JobContainer,
|
||||
) -> Result<Vec<UpdatePathOutcome>>;
|
||||
}
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use super::Extractor;
|
||||
use crate::{
|
||||
addressing::Address,
|
||||
database::{
|
||||
constants,
|
||||
entry::{Entry, EntryValue},
|
||||
stores::{
|
||||
fs::{FsStore, FILE_MIME_KEY},
|
||||
UpStore,
|
||||
},
|
||||
stores::{fs::FILE_MIME_KEY, UpStore},
|
||||
UpEndConnection,
|
||||
},
|
||||
util::jobs::{JobContainer, JobState},
|
||||
|
@ -21,7 +20,7 @@ impl Extractor for ID3Extractor {
|
|||
&self,
|
||||
address: &Address,
|
||||
connection: &UpEndConnection,
|
||||
store: &FsStore,
|
||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||
mut job_container: JobContainer,
|
||||
) -> Result<Vec<Entry>> {
|
||||
if let Address::Hash(hash) = address {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use crate::{
|
||||
addressing::Address,
|
||||
database::stores::fs::FsStore,
|
||||
database::{entry::Entry, UpEndConnection, UpEndDatabase},
|
||||
database::{entry::Entry, stores::UpStore, UpEndConnection, UpEndDatabase},
|
||||
util::jobs::JobContainer,
|
||||
};
|
||||
use anyhow::Result;
|
||||
|
@ -26,7 +25,7 @@ pub trait Extractor {
|
|||
&self,
|
||||
address: &Address,
|
||||
connection: &UpEndConnection,
|
||||
store: &FsStore,
|
||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||
job_container: JobContainer,
|
||||
) -> Result<Vec<Entry>>;
|
||||
|
||||
|
@ -38,7 +37,7 @@ pub trait Extractor {
|
|||
&self,
|
||||
address: &Address,
|
||||
connection: &UpEndConnection,
|
||||
store: &FsStore,
|
||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||
job_container: JobContainer,
|
||||
) -> Result<usize> {
|
||||
if self.is_needed(address, connection)? {
|
||||
|
@ -57,15 +56,14 @@ pub trait Extractor {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn extract_all<D: Borrow<UpEndDatabase>, S: Borrow<FsStore>>(
|
||||
pub fn extract_all<D: Borrow<UpEndDatabase>>(
|
||||
db: D,
|
||||
store: S,
|
||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||
mut job_container: JobContainer,
|
||||
) -> Result<usize> {
|
||||
info!("Extracting metadata for all addresses.");
|
||||
|
||||
let db = db.borrow();
|
||||
let store = store.borrow();
|
||||
let job_handle = job_container.add_job("EXTRACT_ALL", "Extracting additional metadata...")?;
|
||||
|
||||
let all_addresses = db.connection()?.get_all_addresses()?;
|
||||
|
@ -77,7 +75,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>, S: Borrow<FsStore>>(
|
|||
.par_iter()
|
||||
.map(|address| {
|
||||
let connection = db.connection()?;
|
||||
let extract_result = extract(address, &connection, store, job_container.clone());
|
||||
let extract_result = extract(address, &connection, store.clone(), job_container.clone());
|
||||
|
||||
let mut cnt = count.write().unwrap();
|
||||
*cnt += 1;
|
||||
|
@ -104,7 +102,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>, S: Borrow<FsStore>>(
|
|||
pub fn extract(
|
||||
address: &Address,
|
||||
connection: &UpEndConnection,
|
||||
store: &FsStore,
|
||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||
job_container: JobContainer,
|
||||
) -> Result<usize> {
|
||||
let mut entry_count = 0;
|
||||
|
@ -113,19 +111,19 @@ pub fn extract(
|
|||
#[cfg(feature = "extractors-web")]
|
||||
{
|
||||
entry_count +=
|
||||
web::WebExtractor.insert_info(address, connection, store, job_container.clone())?;
|
||||
web::WebExtractor.insert_info(address, connection, store.clone(), job_container.clone())?;
|
||||
}
|
||||
|
||||
#[cfg(feature = "extractors-audio")]
|
||||
{
|
||||
entry_count +=
|
||||
audio::ID3Extractor.insert_info(address, connection, store, job_container.clone())?;
|
||||
audio::ID3Extractor.insert_info(address, connection, store.clone(), job_container.clone())?;
|
||||
}
|
||||
|
||||
#[cfg(feature = "extractors-photo")]
|
||||
{
|
||||
entry_count +=
|
||||
photo::ExifExtractor.insert_info(address, connection, store, job_container)?;
|
||||
photo::ExifExtractor.insert_info(address, connection, store.clone(), job_container)?;
|
||||
}
|
||||
|
||||
trace!("Extracting metadata for {address:?} - got {entry_count} entries.");
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use super::Extractor;
|
||||
use crate::{
|
||||
addressing::Address,
|
||||
database::{
|
||||
constants,
|
||||
entry::{Entry, EntryValue},
|
||||
stores::{fs::{FILE_MIME_KEY, FsStore}, UpStore},
|
||||
stores::{fs::{FILE_MIME_KEY}, UpStore},
|
||||
UpEndConnection,
|
||||
},
|
||||
util::jobs::{JobContainer, JobState},
|
||||
|
@ -21,7 +23,7 @@ impl Extractor for ExifExtractor {
|
|||
&self,
|
||||
address: &Address,
|
||||
connection: &UpEndConnection,
|
||||
store: &FsStore,
|
||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||
mut job_container: JobContainer,
|
||||
) -> Result<Vec<Entry>> {
|
||||
if let Address::Hash(hash) = address {
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use super::Extractor;
|
||||
use crate::{
|
||||
addressing::Address,
|
||||
database::{entry::Entry, stores::fs::FsStore, UpEndConnection},
|
||||
database::{entry::Entry, stores::UpStore, UpEndConnection},
|
||||
util::jobs::{JobContainer, JobState},
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
|
@ -15,10 +17,10 @@ impl Extractor for WebExtractor {
|
|||
fn get(
|
||||
&self,
|
||||
address: &Address,
|
||||
_: &UpEndConnection,
|
||||
_: &FsStore,
|
||||
_connection: &UpEndConnection,
|
||||
_store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||
mut job_container: JobContainer,
|
||||
) -> anyhow::Result<Vec<Entry>> {
|
||||
) -> Result<Vec<Entry>> {
|
||||
if let Address::Url(url) = address {
|
||||
let mut job_handle =
|
||||
job_container.add_job(None, &format!("Getting info about {url:?}"))?;
|
||||
|
@ -82,10 +84,12 @@ impl Extractor for WebExtractor {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::util::jobs::JobContainer;
|
||||
|
||||
use crate::{database::stores::fs::FsStore, util::jobs::JobContainer};
|
||||
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use std::sync::Arc;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
|
@ -93,13 +97,14 @@ mod test {
|
|||
let temp_dir = TempDir::new().unwrap();
|
||||
let open_result = crate::database::UpEndDatabase::open(&temp_dir, None, true)?;
|
||||
let connection = open_result.db.connection()?;
|
||||
let store = FsStore::from_path(&temp_dir)?;
|
||||
let store =
|
||||
Arc::new(Box::new(FsStore::from_path(&temp_dir)?) as Box<dyn UpStore + Sync + Send>);
|
||||
let job_container = JobContainer::new();
|
||||
|
||||
let address = Address::Url("https://upend.dev".into());
|
||||
assert!(WebExtractor.is_needed(&address, &connection)?);
|
||||
|
||||
WebExtractor.insert_info(&address, &connection, &store, job_container)?;
|
||||
WebExtractor.insert_info(&address, &connection, store, job_container)?;
|
||||
|
||||
assert!(!WebExtractor.is_needed(&address, &connection)?);
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ use actix_cors::Cors;
|
|||
use actix_web::{middleware, App, HttpServer};
|
||||
use anyhow::Result;
|
||||
use clap::{App as ClapApp, Arg};
|
||||
use log::{debug, info, warn};
|
||||
use log::{info, warn};
|
||||
use rand::{thread_rng, Rng};
|
||||
use std::sync::Arc;
|
||||
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
|
||||
|
@ -128,7 +128,9 @@ fn main() -> Result<()> {
|
|||
.expect("failed to open database!");
|
||||
|
||||
let upend = Arc::new(open_result.db);
|
||||
let store = Arc::new(FsStore::from_path(vault_path.clone()).unwrap());
|
||||
let store =
|
||||
Arc::new(Box::new(FsStore::from_path(vault_path.clone()).unwrap())
|
||||
as Box<dyn UpStore + Send + Sync>);
|
||||
|
||||
let ui_path = get_static_dir("webui");
|
||||
if ui_path.is_err() {
|
||||
|
@ -271,7 +273,7 @@ fn main() -> Result<()> {
|
|||
info!("Running initial update...");
|
||||
// let new = open_result.new;
|
||||
block_background::<_, _, anyhow::Error>(move || {
|
||||
state.store.update(upend.clone(), job_container.clone());
|
||||
let _ = state.store.update(&upend, job_container.clone());
|
||||
let _ = extractors::extract_all(upend, state.store, job_container);
|
||||
Ok(())
|
||||
})
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
use crate::database::stores::fs::FsStore;
|
||||
use crate::database::stores::UpStore;
|
||||
use crate::util::hash::b58_encode;
|
||||
use crate::util::hash::Hash;
|
||||
use crate::util::jobs::{JobContainer, JobState};
|
||||
use crate::{database::UpEndDatabase, util::hash::b58_encode};
|
||||
use anyhow::{anyhow, Result};
|
||||
use log::{debug, trace};
|
||||
|
||||
|
@ -29,14 +28,14 @@ pub trait Previewable {
|
|||
}
|
||||
pub struct PreviewStore {
|
||||
path: PathBuf,
|
||||
store: Arc<FsStore>,
|
||||
store: Arc<Box<dyn UpStore + Send + Sync>>,
|
||||
|
||||
locks: Mutex<HashMap<Hash, Arc<Mutex<PathBuf>>>>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "previews")]
|
||||
impl PreviewStore {
|
||||
pub fn new<P: AsRef<Path>>(path: P, store: Arc<FsStore>) -> Self {
|
||||
pub fn new<P: AsRef<Path>>(path: P, store: Arc<Box<dyn UpStore + Send + Sync>>) -> Self {
|
||||
PreviewStore {
|
||||
path: PathBuf::from(path.as_ref()),
|
||||
store,
|
||||
|
@ -86,18 +85,18 @@ impl PreviewStore {
|
|||
let mime_type: Option<String> = if mime_type.is_some() {
|
||||
mime_type
|
||||
} else {
|
||||
tree_magic_mini::from_filepath(&file_path).map(|m| m.into())
|
||||
tree_magic_mini::from_filepath(file_path).map(|m| m.into())
|
||||
};
|
||||
|
||||
let preview = match mime_type {
|
||||
Some(tm) if tm.starts_with("text") => TextPath(&file_path).get_thumbnail(),
|
||||
Some(tm) if tm.starts_with("text") => TextPath(file_path).get_thumbnail(),
|
||||
Some(tm) if tm.starts_with("video") || tm == "application/x-matroska" => {
|
||||
VideoPath(&file_path).get_thumbnail()
|
||||
VideoPath(file_path).get_thumbnail()
|
||||
}
|
||||
Some(tm) if tm.starts_with("audio") || tm == "application/x-riff" => {
|
||||
AudioPath(&file_path).get_thumbnail()
|
||||
AudioPath(file_path).get_thumbnail()
|
||||
}
|
||||
Some(tm) if tm.starts_with("image") => ImagePath(&file_path).get_thumbnail(),
|
||||
Some(tm) if tm.starts_with("image") => ImagePath(file_path).get_thumbnail(),
|
||||
Some(unknown) => Err(anyhow!("No capability for {:?} thumbnails.", unknown)),
|
||||
_ => Err(anyhow!("Unknown file type, or file doesn't exist.")),
|
||||
};
|
||||
|
|
|
@ -3,13 +3,12 @@ use crate::database::constants::{ADDED_ATTR, LABEL_ATTR};
|
|||
use crate::database::entry::{Entry, EntryValue, InvariantEntry};
|
||||
use crate::database::hierarchies::{list_roots, resolve_path, UHierPath};
|
||||
use crate::database::lang::Query;
|
||||
use crate::database::stores::fs::FsStore;
|
||||
use crate::database::stores::{Blob, UpStore};
|
||||
use crate::database::UpEndDatabase;
|
||||
use crate::extractors::{self};
|
||||
use crate::previews::PreviewStore;
|
||||
use crate::util::exec::block_background;
|
||||
use crate::util::hash::{b58_decode, b58_encode, Hashable};
|
||||
use crate::util::hash::{b58_decode, b58_encode};
|
||||
use crate::util::jobs::JobContainer;
|
||||
use actix_files::NamedFile;
|
||||
use actix_multipart::Multipart;
|
||||
|
@ -30,7 +29,6 @@ use serde_json::json;
|
|||
use std::collections::HashMap;
|
||||
use std::convert::{TryFrom, TryInto};
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use tempfile::NamedTempFile;
|
||||
|
@ -42,7 +40,7 @@ use is_executable::IsExecutable;
|
|||
#[derive(Clone)]
|
||||
pub struct State {
|
||||
pub upend: Arc<UpEndDatabase>,
|
||||
pub store: Arc<FsStore>,
|
||||
pub store: Arc<Box<dyn UpStore + Sync + Send>>,
|
||||
pub vault_name: Option<String>,
|
||||
pub job_container: JobContainer,
|
||||
pub preview_store: Option<Arc<PreviewStore>>,
|
||||
|
@ -130,7 +128,6 @@ pub async fn get_raw(
|
|||
if let Address::Hash(hash) = address {
|
||||
let hash = Arc::new(hash);
|
||||
|
||||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||
let _hash = hash.clone();
|
||||
let _store = state.store.clone();
|
||||
let blobs = web::block(move || _store.retrieve(_hash.as_ref()))
|
||||
|
@ -441,7 +438,7 @@ pub async fn put_object(
|
|||
let _store = state.store.clone();
|
||||
block_background::<_, _, anyhow::Error>(move || {
|
||||
let extract_result =
|
||||
extractors::extract(&_address, &connection, &_store, _job_container);
|
||||
extractors::extract(&_address, &connection, _store, _job_container);
|
||||
if let Ok(entry_count) = extract_result {
|
||||
debug!("Added {entry_count} extracted entries for {_address:?}");
|
||||
} else {
|
||||
|
@ -519,7 +516,7 @@ pub async fn put_object(
|
|||
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
|
||||
block_background::<_, _, anyhow::Error>(move || {
|
||||
let extract_result =
|
||||
extractors::extract(&_address, &connection, &_store, _job_container);
|
||||
extractors::extract(&_address, &connection, _store, _job_container);
|
||||
if let Ok(entry_count) = extract_result {
|
||||
debug!("Added {entry_count} extracted entries for {_address:?}");
|
||||
} else {
|
||||
|
@ -694,23 +691,23 @@ pub async fn list_hier_roots(state: web::Data<State>) -> Result<HttpResponse, Er
|
|||
Ok(HttpResponse::Ok().json(result.as_hash().map_err(ErrorInternalServerError)?))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RescanRequest {
|
||||
full: Option<String>,
|
||||
}
|
||||
// #[derive(Deserialize)]
|
||||
// pub struct RescanRequest {
|
||||
// full: Option<String>,
|
||||
// }
|
||||
|
||||
#[post("/api/refresh")]
|
||||
pub async fn api_refresh(
|
||||
req: HttpRequest,
|
||||
state: web::Data<State>,
|
||||
web::Query(query): web::Query<RescanRequest>,
|
||||
// web::Query(query): web::Query<RescanRequest>,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
check_auth(&req, &state)?;
|
||||
|
||||
block_background::<_, _, anyhow::Error>(move || {
|
||||
let _ = state
|
||||
.store
|
||||
.update(state.upend.clone(), state.job_container.clone());
|
||||
.update(&state.upend, state.job_container.clone());
|
||||
let _ = crate::extractors::extract_all(
|
||||
state.upend.clone(),
|
||||
state.store.clone(),
|
||||
|
|
Loading…
Reference in a new issue