refactor - remove unnecessary async fns, add helper fn for running blocking tasks in background

also impl Error for JobInProgressError
feat/vaults
Tomáš Mládek 2022-02-28 19:49:42 +01:00
parent ce50f5ad80
commit 27cc6eb31c
No known key found for this signature in database
GPG Key ID: 65E225C8B3E2ED8A
10 changed files with 84 additions and 74 deletions

1
Cargo.lock generated
View File

@ -2884,7 +2884,6 @@ dependencies = [
"actix-web",
"actix_derive",
"anyhow",
"async-trait",
"bs58",
"built",
"chrono",

View File

@ -69,7 +69,6 @@ image = { version = "0.23.14", optional = true }
webp = { version = "0.2.0", optional = true }
webpage = { version = "1.4.0", optional = true }
async-trait = "0.1.52"
[build-dependencies]
built = "0.5.1"

View File

@ -4,45 +4,28 @@ use crate::{
util::jobs::JobContainer,
};
use anyhow::Result;
use async_trait::async_trait;
use std::sync::{Arc, RwLock};
#[cfg(feature = "extractors-web")]
pub mod web;
#[async_trait]
pub trait Extractor {
async fn get(
&self,
address: Address,
job_container: Arc<RwLock<JobContainer>>,
) -> Result<Vec<Entry>>;
fn get(&self, address: Address, job_container: Arc<RwLock<JobContainer>>)
-> Result<Vec<Entry>>;
async fn insert_info(
fn insert_info(
&self,
address: Address,
connection: UpEndConnection,
job_container: Arc<RwLock<JobContainer>>,
) -> Result<()> {
let entries = self.get(address, job_container).await?;
let entries = self.get(address, job_container)?;
Ok(actix_web::web::block::<_, _, anyhow::Error>(move || {
connection.transaction(|| {
for entry in entries {
connection.insert_entry(entry)?;
}
Ok(())
})
connection.transaction(|| {
for entry in entries {
connection.insert_entry(entry)?;
}
Ok(())
})
.await?)
}
async fn insert_info_fnf(
&self,
address: Address,
connection: UpEndConnection,
job_container: Arc<RwLock<JobContainer>>,
) {
let _ = self.insert_info(address, connection, job_container).await;
}
}

View File

@ -4,17 +4,14 @@ use crate::{
database::entry::Entry,
util::jobs::{Job, JobContainer, State},
};
use actix_web::web;
use anyhow::anyhow;
use async_trait::async_trait;
use std::sync::{Arc, RwLock};
use webpage::{Webpage, WebpageOptions};
pub struct WebExtractor;
#[async_trait]
impl Extractor for WebExtractor {
async fn get(
fn get(
&self,
address: Address,
job_container: Arc<RwLock<JobContainer>>,
@ -27,9 +24,7 @@ impl Extractor for WebExtractor {
.unwrap();
let webpage_url = url.clone();
let webpage_get =
web::block(move || Webpage::from_url(&webpage_url, WebpageOptions::default()))
.await;
let webpage_get = Webpage::from_url(&webpage_url, WebpageOptions::default());
if let Ok(webpage) = webpage_get {
let _ = job_container
@ -37,7 +32,7 @@ impl Extractor for WebExtractor {
.unwrap()
.update_progress(&job_id, 50.0);
let address = Address::Url(url.clone());
let address = Address::Url(url);
let mut entries = vec![
webpage.html.title.map(|html_title| Entry {
entity: address.clone(),
@ -79,7 +74,7 @@ impl Extractor for WebExtractor {
.update_state(&job_id, State::Failed);
Err(anyhow!("Failed for unknown reason."))
} else {
Err(anyhow!("Can only extract info for URLs."))
Ok(vec![])
}
}
}

View File

@ -51,40 +51,41 @@ fn initialize_types(connection: &UpEndConnection) -> Result<()> {
Ok(())
}
pub async fn rescan_vault(
db: Arc<UpEndDatabase>,
pub fn rescan_vault<D: Borrow<UpEndDatabase>>(
db: D,
job_container: Arc<RwLock<JobContainer>>,
quick_check: bool,
disable_synchronous: bool,
) {
let add_result = job_container
) -> Result<Vec<UpdatePathOutcome>> {
let job_id_result = job_container
.write()
.unwrap()
.add_job(Job::new("REIMPORT", "Reimporting vault..."));
if let Ok(job_id) = add_result {
let job_container_rescan = job_container.clone();
let result = actix_web::web::block(move || {
rescan_vault_inner(
match job_id_result {
Ok(job_id) => {
let job_container_rescan = job_container.clone();
let result = rescan_vault_inner(
db,
job_container_rescan,
job_id,
quick_check,
disable_synchronous,
)
})
.await;
);
if result.is_err() {
let err = result.err().unwrap();
error!("Update did not succeed! {:?}", err);
if let Err(err) = &result {
error!("Update did not succeed! {:?}", err);
job_container
.write()
.unwrap()
.update_state(&job_id, State::Failed)
.unwrap();
job_container
.write()
.unwrap()
.update_state(&job_id, State::Failed)
.unwrap();
}
result
}
Err(err) => Err(err.into()),
}
}
struct PragmaSynchronousGuard<'a>(&'a UpEndConnection);
@ -105,7 +106,7 @@ impl Drop for PragmaSynchronousGuard<'_> {
type UpdatePathResult = Result<UpdatePathOutcome>;
#[derive(Debug)]
enum UpdatePathOutcome {
pub enum UpdatePathOutcome {
Added(PathBuf),
Unchanged(PathBuf),
Removed(PathBuf),
@ -524,8 +525,7 @@ mod test {
.unwrap();
// Initial scan
let rescan_result =
rescan_vault_inner(&open_result.db, job_container.clone(), job_id, quick, true);
let rescan_result = rescan_vault(&open_result.db, job_container.clone(), quick, true);
assert!(rescan_result.is_ok());
let rescan_result = rescan_result.unwrap();

View File

@ -17,6 +17,7 @@ use std::sync::{Arc, RwLock};
use crate::{
common::{get_static_dir, PKG_VERSION},
database::UpEndDatabase,
util::exec::block_background,
};
mod addressing;
@ -218,9 +219,9 @@ fn main() -> Result<()> {
if !matches.is_present("NO_INITIAL_UPDATE") {
info!("Running initial update...");
if open_result.new {
actix::spawn(filesystem::rescan_vault(upend, job_container, false, true));
block_background(|| filesystem::rescan_vault(upend, job_container, false, true));
} else {
actix::spawn(filesystem::rescan_vault(upend, job_container, true, false));
block_background(|| filesystem::rescan_vault(upend, job_container, true, false));
}
}

View File

@ -7,6 +7,7 @@ use crate::database::UpEndDatabase;
use crate::extractors::Extractor;
use crate::filesystem::add_file;
use crate::previews::PreviewStore;
use crate::util::exec::block_background;
use crate::util::hash::{b58_decode, b58_encode, Hashable};
use crate::util::jobs::JobContainer;
use actix_files::NamedFile;
@ -362,13 +363,16 @@ pub async fn put_object(
}],
Address::Url(url) => {
#[cfg(feature = "extractors-web")]
actix::spawn(
(crate::extractors::web::WebExtractor {}).insert_info_fnf(
address.clone(),
state.upend.connection().map_err(ErrorInternalServerError)?,
state.job_container.clone(),
),
);
{
let _address = address.clone();
block_background(move || {
(crate::extractors::web::WebExtractor {}).insert_info(
_address,
state.upend.connection()?,
state.job_container.clone(),
)
});
}
vec![Entry {
entity: address.clone(),
attribute: LABEL_ATTR.to_string(),
@ -606,12 +610,14 @@ pub async fn api_refresh(
state: web::Data<State>,
web::Query(query): web::Query<RescanRequest>,
) -> Result<HttpResponse, Error> {
actix::spawn(crate::filesystem::rescan_vault(
state.upend.clone(),
state.job_container.clone(),
query.full.is_none(),
false,
));
block_background(move || {
crate::filesystem::rescan_vault(
state.upend.clone(),
state.job_container.clone(),
query.full.is_none(),
false,
)
});
Ok(HttpResponse::Ok().finish())
}

18
src/util/exec.rs Normal file
View File

@ -0,0 +1,18 @@
use actix_web::web;
pub fn block_background<F, I, E>(f: F)
where
F: FnOnce() -> Result<I, E> + Send + 'static,
I: Send + 'static,
E: Send + std::fmt::Debug + 'static,
{
async fn inner<F, I, E>(f: F)
where
F: FnOnce() -> Result<I, E> + Send + 'static,
I: Send + 'static,
E: Send + std::fmt::Debug + 'static,
{
let _ = web::block(f).await;
}
actix::spawn(inner(f));
}

View File

@ -74,6 +74,14 @@ impl Serialize for JobId {
#[derive(Debug, Clone)]
pub struct JobInProgessError(String);
impl std::fmt::Display for JobInProgessError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "job of type {} is already in progress", self.0)
}
}
impl std::error::Error for JobInProgessError {}
impl JobContainer {
pub fn add_job(&mut self, job: Job) -> Result<JobId, JobInProgessError> {
if let Some(job_type) = &job.job_type {

View File

@ -1,3 +1,4 @@
pub mod exec;
pub mod hash;
pub mod jobs;