refactor: use trait objects instead of FsStore directly

also fix most clippy hints
This commit is contained in:
Tomáš Mládek 2022-09-13 19:16:22 +02:00
parent 4a988acdad
commit 5152675bad
10 changed files with 72 additions and 73 deletions

View file

@ -79,8 +79,7 @@ pub struct OpenResult {
pub struct UpEndDatabase { pub struct UpEndDatabase {
pool: Arc<DbPool>, pool: Arc<DbPool>,
lock: Arc<RwLock<()>>, lock: Arc<RwLock<()>>,
vault_path: Arc<PathBuf>, vault_path: Arc<PathBuf>
db_path: Arc<PathBuf>,
} }
pub const UPEND_SUBDIR: &str = ".upend"; pub const UPEND_SUBDIR: &str = ".upend";
@ -123,7 +122,6 @@ impl UpEndDatabase {
pool: Arc::new(pool), pool: Arc::new(pool),
lock: Arc::new(RwLock::new(())), lock: Arc::new(RwLock::new(())),
vault_path: Arc::new(dirpath.as_ref().canonicalize()?), vault_path: Arc::new(dirpath.as_ref().canonicalize()?),
db_path: Arc::new(upend_path),
}; };
let connection = db.connection().unwrap(); let connection = db.connection().unwrap();
@ -168,7 +166,6 @@ impl UpEndDatabase {
Ok(UpEndConnection { Ok(UpEndConnection {
pool: self.pool.clone(), pool: self.pool.clone(),
lock: self.lock.clone(), lock: self.lock.clone(),
vault_path: self.vault_path.clone(),
}) })
} }
} }
@ -176,7 +173,6 @@ impl UpEndDatabase {
pub struct UpEndConnection { pub struct UpEndConnection {
pool: Arc<DbPool>, pool: Arc<DbPool>,
lock: Arc<RwLock<()>>, lock: Arc<RwLock<()>>,
vault_path: Arc<PathBuf>,
} }
impl UpEndConnection { impl UpEndConnection {

View file

@ -608,11 +608,11 @@ impl UpStore for FsStore {
.collect()) .collect())
} }
fn store<S: Into<Option<String>>>( fn store(
&self, &self,
connection: UpEndConnection, connection: UpEndConnection,
blob: Blob, blob: Blob,
name_hint: S, name_hint: Option<String>,
) -> Result<Hash, super::StoreError> { ) -> Result<Hash, super::StoreError> {
let file_path = blob.get_file_path(); let file_path = blob.get_file_path();
let hash = file_path let hash = file_path
@ -629,7 +629,7 @@ impl UpStore for FsStore {
.map_err(|e| StoreError::Unknown(e.to_string()))?, .map_err(|e| StoreError::Unknown(e.to_string()))?,
); );
let final_name = if let Some(name_hint) = name_hint.into() { let final_name = if let Some(name_hint) = name_hint {
format!("{addr_str}_{name_hint}") format!("{addr_str}_{name_hint}")
} else { } else {
addr_str addr_str
@ -639,7 +639,6 @@ impl UpStore for FsStore {
fs::copy(file_path, &final_path).map_err(|e| StoreError::Unknown(e.to_string()))?; fs::copy(file_path, &final_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
self.add_file(&connection, &final_path, hash.clone()) self.add_file(&connection, &final_path, hash.clone())
.map_err(|e| StoreError::Unknown(e.to_string()))?; .map_err(|e| StoreError::Unknown(e.to_string()))?;
} }
@ -647,9 +646,9 @@ impl UpStore for FsStore {
Ok(hash) Ok(hash)
} }
fn update<D: Borrow<UpEndDatabase>>( fn update(
&self, &self,
db: D, db: &UpEndDatabase,
mut job_container: JobContainer, mut job_container: JobContainer,
) -> Result<Vec<UpdatePathOutcome>, StoreError> { ) -> Result<Vec<UpdatePathOutcome>, StoreError> {
let job_result = job_container.add_job("REIMPORT", "Scaning vault directory..."); let job_result = job_container.add_job("REIMPORT", "Scaning vault directory...");

View file

@ -1,22 +1,24 @@
use std::{ use std::path::{Path, PathBuf};
borrow::Borrow,
path::{Path, PathBuf},
};
use super::{UpEndDatabase, UpEndConnection}; use super::{UpEndConnection, UpEndDatabase};
use crate::util::{hash::Hash, jobs::JobContainer}; use crate::util::{hash::Hash, jobs::JobContainer};
pub mod fs; pub mod fs;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum StoreError { pub enum StoreError {
NotFound,
Unknown(String), Unknown(String),
} }
impl std::fmt::Display for StoreError { impl std::fmt::Display for StoreError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "STORE ERROR") write!(
f,
"STORE ERROR: {}",
match self {
StoreError::Unknown(err) => err,
}
)
} }
} }
@ -50,15 +52,15 @@ pub enum UpdatePathOutcome {
pub trait UpStore { pub trait UpStore {
fn retrieve(&self, hash: &Hash) -> Result<Vec<Blob>>; fn retrieve(&self, hash: &Hash) -> Result<Vec<Blob>>;
fn retrieve_all(&self) -> Result<Vec<Blob>>; fn retrieve_all(&self) -> Result<Vec<Blob>>;
fn store<S: Into<Option<String>>>( fn store(
&self, &self,
connection: UpEndConnection, connection: UpEndConnection,
blob: Blob, blob: Blob,
name_hint: S, name_hint: Option<String>,
) -> Result<Hash>; ) -> Result<Hash>;
fn update<D: Borrow<UpEndDatabase>>( fn update(
&self, &self,
database: D, database: &UpEndDatabase,
job_container: JobContainer, job_container: JobContainer,
) -> Result<Vec<UpdatePathOutcome>>; ) -> Result<Vec<UpdatePathOutcome>>;
} }

View file

@ -1,13 +1,12 @@
use std::sync::Arc;
use super::Extractor; use super::Extractor;
use crate::{ use crate::{
addressing::Address, addressing::Address,
database::{ database::{
constants, constants,
entry::{Entry, EntryValue}, entry::{Entry, EntryValue},
stores::{ stores::{fs::FILE_MIME_KEY, UpStore},
fs::{FsStore, FILE_MIME_KEY},
UpStore,
},
UpEndConnection, UpEndConnection,
}, },
util::jobs::{JobContainer, JobState}, util::jobs::{JobContainer, JobState},
@ -21,7 +20,7 @@ impl Extractor for ID3Extractor {
&self, &self,
address: &Address, address: &Address,
connection: &UpEndConnection, connection: &UpEndConnection,
store: &FsStore, store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer, mut job_container: JobContainer,
) -> Result<Vec<Entry>> { ) -> Result<Vec<Entry>> {
if let Address::Hash(hash) = address { if let Address::Hash(hash) = address {

View file

@ -1,7 +1,6 @@
use crate::{ use crate::{
addressing::Address, addressing::Address,
database::stores::fs::FsStore, database::{entry::Entry, stores::UpStore, UpEndConnection, UpEndDatabase},
database::{entry::Entry, UpEndConnection, UpEndDatabase},
util::jobs::JobContainer, util::jobs::JobContainer,
}; };
use anyhow::Result; use anyhow::Result;
@ -26,7 +25,7 @@ pub trait Extractor {
&self, &self,
address: &Address, address: &Address,
connection: &UpEndConnection, connection: &UpEndConnection,
store: &FsStore, store: Arc<Box<dyn UpStore + Send + Sync>>,
job_container: JobContainer, job_container: JobContainer,
) -> Result<Vec<Entry>>; ) -> Result<Vec<Entry>>;
@ -38,7 +37,7 @@ pub trait Extractor {
&self, &self,
address: &Address, address: &Address,
connection: &UpEndConnection, connection: &UpEndConnection,
store: &FsStore, store: Arc<Box<dyn UpStore + Send + Sync>>,
job_container: JobContainer, job_container: JobContainer,
) -> Result<usize> { ) -> Result<usize> {
if self.is_needed(address, connection)? { if self.is_needed(address, connection)? {
@ -57,15 +56,14 @@ pub trait Extractor {
} }
} }
pub fn extract_all<D: Borrow<UpEndDatabase>, S: Borrow<FsStore>>( pub fn extract_all<D: Borrow<UpEndDatabase>>(
db: D, db: D,
store: S, store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer, mut job_container: JobContainer,
) -> Result<usize> { ) -> Result<usize> {
info!("Extracting metadata for all addresses."); info!("Extracting metadata for all addresses.");
let db = db.borrow(); let db = db.borrow();
let store = store.borrow();
let job_handle = job_container.add_job("EXTRACT_ALL", "Extracting additional metadata...")?; let job_handle = job_container.add_job("EXTRACT_ALL", "Extracting additional metadata...")?;
let all_addresses = db.connection()?.get_all_addresses()?; let all_addresses = db.connection()?.get_all_addresses()?;
@ -77,7 +75,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>, S: Borrow<FsStore>>(
.par_iter() .par_iter()
.map(|address| { .map(|address| {
let connection = db.connection()?; let connection = db.connection()?;
let extract_result = extract(address, &connection, store, job_container.clone()); let extract_result = extract(address, &connection, store.clone(), job_container.clone());
let mut cnt = count.write().unwrap(); let mut cnt = count.write().unwrap();
*cnt += 1; *cnt += 1;
@ -104,7 +102,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>, S: Borrow<FsStore>>(
pub fn extract( pub fn extract(
address: &Address, address: &Address,
connection: &UpEndConnection, connection: &UpEndConnection,
store: &FsStore, store: Arc<Box<dyn UpStore + Send + Sync>>,
job_container: JobContainer, job_container: JobContainer,
) -> Result<usize> { ) -> Result<usize> {
let mut entry_count = 0; let mut entry_count = 0;
@ -113,19 +111,19 @@ pub fn extract(
#[cfg(feature = "extractors-web")] #[cfg(feature = "extractors-web")]
{ {
entry_count += entry_count +=
web::WebExtractor.insert_info(address, connection, store, job_container.clone())?; web::WebExtractor.insert_info(address, connection, store.clone(), job_container.clone())?;
} }
#[cfg(feature = "extractors-audio")] #[cfg(feature = "extractors-audio")]
{ {
entry_count += entry_count +=
audio::ID3Extractor.insert_info(address, connection, store, job_container.clone())?; audio::ID3Extractor.insert_info(address, connection, store.clone(), job_container.clone())?;
} }
#[cfg(feature = "extractors-photo")] #[cfg(feature = "extractors-photo")]
{ {
entry_count += entry_count +=
photo::ExifExtractor.insert_info(address, connection, store, job_container)?; photo::ExifExtractor.insert_info(address, connection, store.clone(), job_container)?;
} }
trace!("Extracting metadata for {address:?} - got {entry_count} entries."); trace!("Extracting metadata for {address:?} - got {entry_count} entries.");

View file

@ -1,10 +1,12 @@
use std::sync::Arc;
use super::Extractor; use super::Extractor;
use crate::{ use crate::{
addressing::Address, addressing::Address,
database::{ database::{
constants, constants,
entry::{Entry, EntryValue}, entry::{Entry, EntryValue},
stores::{fs::{FILE_MIME_KEY, FsStore}, UpStore}, stores::{fs::{FILE_MIME_KEY}, UpStore},
UpEndConnection, UpEndConnection,
}, },
util::jobs::{JobContainer, JobState}, util::jobs::{JobContainer, JobState},
@ -21,7 +23,7 @@ impl Extractor for ExifExtractor {
&self, &self,
address: &Address, address: &Address,
connection: &UpEndConnection, connection: &UpEndConnection,
store: &FsStore, store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer, mut job_container: JobContainer,
) -> Result<Vec<Entry>> { ) -> Result<Vec<Entry>> {
if let Address::Hash(hash) = address { if let Address::Hash(hash) = address {

View file

@ -1,7 +1,9 @@
use std::sync::Arc;
use super::Extractor; use super::Extractor;
use crate::{ use crate::{
addressing::Address, addressing::Address,
database::{entry::Entry, stores::fs::FsStore, UpEndConnection}, database::{entry::Entry, stores::UpStore, UpEndConnection},
util::jobs::{JobContainer, JobState}, util::jobs::{JobContainer, JobState},
}; };
use anyhow::anyhow; use anyhow::anyhow;
@ -15,10 +17,10 @@ impl Extractor for WebExtractor {
fn get( fn get(
&self, &self,
address: &Address, address: &Address,
_: &UpEndConnection, _connection: &UpEndConnection,
_: &FsStore, _store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer, mut job_container: JobContainer,
) -> anyhow::Result<Vec<Entry>> { ) -> Result<Vec<Entry>> {
if let Address::Url(url) = address { if let Address::Url(url) = address {
let mut job_handle = let mut job_handle =
job_container.add_job(None, &format!("Getting info about {url:?}"))?; job_container.add_job(None, &format!("Getting info about {url:?}"))?;
@ -82,10 +84,12 @@ impl Extractor for WebExtractor {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::util::jobs::JobContainer;
use crate::{database::stores::fs::FsStore, util::jobs::JobContainer};
use super::*; use super::*;
use anyhow::Result; use anyhow::Result;
use std::sync::Arc;
use tempfile::TempDir; use tempfile::TempDir;
#[test] #[test]
@ -93,13 +97,14 @@ mod test {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
let open_result = crate::database::UpEndDatabase::open(&temp_dir, None, true)?; let open_result = crate::database::UpEndDatabase::open(&temp_dir, None, true)?;
let connection = open_result.db.connection()?; let connection = open_result.db.connection()?;
let store = FsStore::from_path(&temp_dir)?; let store =
Arc::new(Box::new(FsStore::from_path(&temp_dir)?) as Box<dyn UpStore + Sync + Send>);
let job_container = JobContainer::new(); let job_container = JobContainer::new();
let address = Address::Url("https://upend.dev".into()); let address = Address::Url("https://upend.dev".into());
assert!(WebExtractor.is_needed(&address, &connection)?); assert!(WebExtractor.is_needed(&address, &connection)?);
WebExtractor.insert_info(&address, &connection, &store, job_container)?; WebExtractor.insert_info(&address, &connection, store, job_container)?;
assert!(!WebExtractor.is_needed(&address, &connection)?); assert!(!WebExtractor.is_needed(&address, &connection)?);

View file

@ -12,7 +12,7 @@ use actix_cors::Cors;
use actix_web::{middleware, App, HttpServer}; use actix_web::{middleware, App, HttpServer};
use anyhow::Result; use anyhow::Result;
use clap::{App as ClapApp, Arg}; use clap::{App as ClapApp, Arg};
use log::{debug, info, warn}; use log::{info, warn};
use rand::{thread_rng, Rng}; use rand::{thread_rng, Rng};
use std::sync::Arc; use std::sync::Arc;
use tracing_subscriber::filter::{EnvFilter, LevelFilter}; use tracing_subscriber::filter::{EnvFilter, LevelFilter};
@ -128,7 +128,9 @@ fn main() -> Result<()> {
.expect("failed to open database!"); .expect("failed to open database!");
let upend = Arc::new(open_result.db); let upend = Arc::new(open_result.db);
let store = Arc::new(FsStore::from_path(vault_path.clone()).unwrap()); let store =
Arc::new(Box::new(FsStore::from_path(vault_path.clone()).unwrap())
as Box<dyn UpStore + Send + Sync>);
let ui_path = get_static_dir("webui"); let ui_path = get_static_dir("webui");
if ui_path.is_err() { if ui_path.is_err() {
@ -271,7 +273,7 @@ fn main() -> Result<()> {
info!("Running initial update..."); info!("Running initial update...");
// let new = open_result.new; // let new = open_result.new;
block_background::<_, _, anyhow::Error>(move || { block_background::<_, _, anyhow::Error>(move || {
state.store.update(upend.clone(), job_container.clone()); let _ = state.store.update(&upend, job_container.clone());
let _ = extractors::extract_all(upend, state.store, job_container); let _ = extractors::extract_all(upend, state.store, job_container);
Ok(()) Ok(())
}) })

View file

@ -1,8 +1,7 @@
use crate::database::stores::fs::FsStore;
use crate::database::stores::UpStore; use crate::database::stores::UpStore;
use crate::util::hash::b58_encode;
use crate::util::hash::Hash; use crate::util::hash::Hash;
use crate::util::jobs::{JobContainer, JobState}; use crate::util::jobs::{JobContainer, JobState};
use crate::{database::UpEndDatabase, util::hash::b58_encode};
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use log::{debug, trace}; use log::{debug, trace};
@ -29,14 +28,14 @@ pub trait Previewable {
} }
pub struct PreviewStore { pub struct PreviewStore {
path: PathBuf, path: PathBuf,
store: Arc<FsStore>, store: Arc<Box<dyn UpStore + Send + Sync>>,
locks: Mutex<HashMap<Hash, Arc<Mutex<PathBuf>>>>, locks: Mutex<HashMap<Hash, Arc<Mutex<PathBuf>>>>,
} }
#[cfg(feature = "previews")] #[cfg(feature = "previews")]
impl PreviewStore { impl PreviewStore {
pub fn new<P: AsRef<Path>>(path: P, store: Arc<FsStore>) -> Self { pub fn new<P: AsRef<Path>>(path: P, store: Arc<Box<dyn UpStore + Send + Sync>>) -> Self {
PreviewStore { PreviewStore {
path: PathBuf::from(path.as_ref()), path: PathBuf::from(path.as_ref()),
store, store,
@ -86,18 +85,18 @@ impl PreviewStore {
let mime_type: Option<String> = if mime_type.is_some() { let mime_type: Option<String> = if mime_type.is_some() {
mime_type mime_type
} else { } else {
tree_magic_mini::from_filepath(&file_path).map(|m| m.into()) tree_magic_mini::from_filepath(file_path).map(|m| m.into())
}; };
let preview = match mime_type { let preview = match mime_type {
Some(tm) if tm.starts_with("text") => TextPath(&file_path).get_thumbnail(), Some(tm) if tm.starts_with("text") => TextPath(file_path).get_thumbnail(),
Some(tm) if tm.starts_with("video") || tm == "application/x-matroska" => { Some(tm) if tm.starts_with("video") || tm == "application/x-matroska" => {
VideoPath(&file_path).get_thumbnail() VideoPath(file_path).get_thumbnail()
} }
Some(tm) if tm.starts_with("audio") || tm == "application/x-riff" => { Some(tm) if tm.starts_with("audio") || tm == "application/x-riff" => {
AudioPath(&file_path).get_thumbnail() AudioPath(file_path).get_thumbnail()
} }
Some(tm) if tm.starts_with("image") => ImagePath(&file_path).get_thumbnail(), Some(tm) if tm.starts_with("image") => ImagePath(file_path).get_thumbnail(),
Some(unknown) => Err(anyhow!("No capability for {:?} thumbnails.", unknown)), Some(unknown) => Err(anyhow!("No capability for {:?} thumbnails.", unknown)),
_ => Err(anyhow!("Unknown file type, or file doesn't exist.")), _ => Err(anyhow!("Unknown file type, or file doesn't exist.")),
}; };

View file

@ -3,13 +3,12 @@ use crate::database::constants::{ADDED_ATTR, LABEL_ATTR};
use crate::database::entry::{Entry, EntryValue, InvariantEntry}; use crate::database::entry::{Entry, EntryValue, InvariantEntry};
use crate::database::hierarchies::{list_roots, resolve_path, UHierPath}; use crate::database::hierarchies::{list_roots, resolve_path, UHierPath};
use crate::database::lang::Query; use crate::database::lang::Query;
use crate::database::stores::fs::FsStore;
use crate::database::stores::{Blob, UpStore}; use crate::database::stores::{Blob, UpStore};
use crate::database::UpEndDatabase; use crate::database::UpEndDatabase;
use crate::extractors::{self}; use crate::extractors::{self};
use crate::previews::PreviewStore; use crate::previews::PreviewStore;
use crate::util::exec::block_background; use crate::util::exec::block_background;
use crate::util::hash::{b58_decode, b58_encode, Hashable}; use crate::util::hash::{b58_decode, b58_encode};
use crate::util::jobs::JobContainer; use crate::util::jobs::JobContainer;
use actix_files::NamedFile; use actix_files::NamedFile;
use actix_multipart::Multipart; use actix_multipart::Multipart;
@ -30,7 +29,6 @@ use serde_json::json;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::{TryFrom, TryInto}; use std::convert::{TryFrom, TryInto};
use std::io::Write; use std::io::Write;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
@ -42,7 +40,7 @@ use is_executable::IsExecutable;
#[derive(Clone)] #[derive(Clone)]
pub struct State { pub struct State {
pub upend: Arc<UpEndDatabase>, pub upend: Arc<UpEndDatabase>,
pub store: Arc<FsStore>, pub store: Arc<Box<dyn UpStore + Sync + Send>>,
pub vault_name: Option<String>, pub vault_name: Option<String>,
pub job_container: JobContainer, pub job_container: JobContainer,
pub preview_store: Option<Arc<PreviewStore>>, pub preview_store: Option<Arc<PreviewStore>>,
@ -130,7 +128,6 @@ pub async fn get_raw(
if let Address::Hash(hash) = address { if let Address::Hash(hash) = address {
let hash = Arc::new(hash); let hash = Arc::new(hash);
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let _hash = hash.clone(); let _hash = hash.clone();
let _store = state.store.clone(); let _store = state.store.clone();
let blobs = web::block(move || _store.retrieve(_hash.as_ref())) let blobs = web::block(move || _store.retrieve(_hash.as_ref()))
@ -441,7 +438,7 @@ pub async fn put_object(
let _store = state.store.clone(); let _store = state.store.clone();
block_background::<_, _, anyhow::Error>(move || { block_background::<_, _, anyhow::Error>(move || {
let extract_result = let extract_result =
extractors::extract(&_address, &connection, &_store, _job_container); extractors::extract(&_address, &connection, _store, _job_container);
if let Ok(entry_count) = extract_result { if let Ok(entry_count) = extract_result {
debug!("Added {entry_count} extracted entries for {_address:?}"); debug!("Added {entry_count} extracted entries for {_address:?}");
} else { } else {
@ -519,7 +516,7 @@ pub async fn put_object(
let connection = state.upend.connection().map_err(ErrorInternalServerError)?; let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
block_background::<_, _, anyhow::Error>(move || { block_background::<_, _, anyhow::Error>(move || {
let extract_result = let extract_result =
extractors::extract(&_address, &connection, &_store, _job_container); extractors::extract(&_address, &connection, _store, _job_container);
if let Ok(entry_count) = extract_result { if let Ok(entry_count) = extract_result {
debug!("Added {entry_count} extracted entries for {_address:?}"); debug!("Added {entry_count} extracted entries for {_address:?}");
} else { } else {
@ -694,23 +691,23 @@ pub async fn list_hier_roots(state: web::Data<State>) -> Result<HttpResponse, Er
Ok(HttpResponse::Ok().json(result.as_hash().map_err(ErrorInternalServerError)?)) Ok(HttpResponse::Ok().json(result.as_hash().map_err(ErrorInternalServerError)?))
} }
#[derive(Deserialize)] // #[derive(Deserialize)]
pub struct RescanRequest { // pub struct RescanRequest {
full: Option<String>, // full: Option<String>,
} // }
#[post("/api/refresh")] #[post("/api/refresh")]
pub async fn api_refresh( pub async fn api_refresh(
req: HttpRequest, req: HttpRequest,
state: web::Data<State>, state: web::Data<State>,
web::Query(query): web::Query<RescanRequest>, // web::Query(query): web::Query<RescanRequest>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?; check_auth(&req, &state)?;
block_background::<_, _, anyhow::Error>(move || { block_background::<_, _, anyhow::Error>(move || {
let _ = state let _ = state
.store .store
.update(state.upend.clone(), state.job_container.clone()); .update(&state.upend, state.job_container.clone());
let _ = crate::extractors::extract_all( let _ = crate::extractors::extract_all(
state.upend.clone(), state.upend.clone(),
state.store.clone(), state.store.clone(),