fix: continue with other extractors when one fails

feat/type-attributes
Tomáš Mládek 2022-10-23 10:50:14 +02:00
parent ea3fc015f5
commit 33565fdc27
2 changed files with 50 additions and 25 deletions

View File

@ -78,7 +78,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
.par_iter() .par_iter()
.map(|address| { .map(|address| {
let connection = db.connection()?; let connection = db.connection()?;
let extract_result = extract(address, &connection, store.clone(), job_container.clone()); let entry_count = extract(address, &connection, store.clone(), job_container.clone());
let mut cnt = count.write().unwrap(); let mut cnt = count.write().unwrap();
*cnt += 1; *cnt += 1;
@ -88,7 +88,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
.unwrap() .unwrap()
.update_progress(*cnt as f32 / total * 100.0)?; .update_progress(*cnt as f32 / total * 100.0)?;
extract_result anyhow::Ok(entry_count)
}) })
.flatten() .flatten()
.sum(); .sum();
@ -107,35 +107,67 @@ pub fn extract(
connection: &UpEndConnection, connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>, store: Arc<Box<dyn UpStore + Send + Sync>>,
job_container: JobContainer, job_container: JobContainer,
) -> Result<usize> { ) -> usize {
let mut entry_count = 0; let mut entry_count = 0;
trace!("Extracting metadata for {address:?}"); trace!("Extracting metadata for {address:?}");
#[cfg(feature = "extractors-web")] #[cfg(feature = "extractors-web")]
{ {
entry_count += let extract_result = web::WebExtractor.insert_info(
web::WebExtractor.insert_info(address, connection, store.clone(), job_container.clone())?; address,
connection,
store.clone(),
job_container.clone(),
);
match extract_result {
Ok(count) => entry_count += count,
Err(err) => error!("{}", err),
}
} }
#[cfg(feature = "extractors-audio")] #[cfg(feature = "extractors-audio")]
{ {
entry_count += let extract_result = audio::ID3Extractor.insert_info(
audio::ID3Extractor.insert_info(address, connection, store.clone(), job_container.clone())?; address,
connection,
store.clone(),
job_container.clone(),
);
match extract_result {
Ok(count) => entry_count += count,
Err(err) => error!("{}", err),
}
} }
#[cfg(feature = "extractors-photo")] #[cfg(feature = "extractors-photo")]
{ {
entry_count += let extract_result = photo::ExifExtractor.insert_info(
photo::ExifExtractor.insert_info(address, connection, store.clone(), job_container.clone())?; address,
connection,
store.clone(),
job_container.clone(),
);
match extract_result {
Ok(count) => entry_count += count,
Err(err) => error!("{}", err),
}
} }
#[cfg(feature = "extractors-media")] #[cfg(feature = "extractors-media")]
{ {
entry_count += let extract_result =
media::MediaExtractor.insert_info(address, connection, store.clone(), job_container)?; media::MediaExtractor.insert_info(address, connection, store.clone(), job_container);
match extract_result {
Ok(count) => entry_count += count,
Err(err) => error!("{}", err),
}
} }
trace!("Extracting metadata for {address:?} - got {entry_count} entries."); trace!("Extracting metadata for {address:?} - got {entry_count} entries.");
Ok(entry_count) entry_count
} }

View File

@ -26,7 +26,7 @@ use actix_web::{
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use futures::channel::oneshot; use futures::channel::oneshot;
use futures_util::TryStreamExt; use futures_util::TryStreamExt;
use log::{debug, info, trace, warn}; use log::{debug, info, trace};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use std::collections::HashMap; use std::collections::HashMap;
@ -437,13 +437,10 @@ pub async fn put_object(
let _job_container = state.job_container.clone(); let _job_container = state.job_container.clone();
let _store = state.store.clone(); let _store = state.store.clone();
block_background::<_, _, anyhow::Error>(move || { block_background::<_, _, anyhow::Error>(move || {
let extract_result = let entry_count =
extractors::extract(&_address, &connection, _store, _job_container); extractors::extract(&_address, &connection, _store, _job_container);
if let Ok(entry_count) = extract_result {
debug!("Added {entry_count} extracted entries for {_address:?}"); debug!("Added {entry_count} extracted entries for {_address:?}");
} else {
warn!("Failed to add extracted entries for {_address:?}!");
}
Ok(()) Ok(())
}); });
@ -515,13 +512,9 @@ pub async fn put_object(
let _store = state.store.clone(); let _store = state.store.clone();
let connection = state.upend.connection().map_err(ErrorInternalServerError)?; let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
block_background::<_, _, anyhow::Error>(move || { block_background::<_, _, anyhow::Error>(move || {
let extract_result = let entry_count =
extractors::extract(&_address, &connection, _store, _job_container); extractors::extract(&_address, &connection, _store, _job_container);
if let Ok(entry_count) = extract_result { debug!("Added {entry_count} extracted entries for {_address:?}");
debug!("Added {entry_count} extracted entries for {_address:?}");
} else {
warn!("Failed to add extracted entries for {_address:?}!");
}
Ok(()) Ok(())
}); });