fix: continue with other extractors when one fails

feat/type-attributes
Tomáš Mládek 2022-10-23 10:50:14 +02:00
parent ea3fc015f5
commit 33565fdc27
2 changed files with 50 additions and 25 deletions

View File

@ -78,7 +78,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
.par_iter()
.map(|address| {
let connection = db.connection()?;
let extract_result = extract(address, &connection, store.clone(), job_container.clone());
let entry_count = extract(address, &connection, store.clone(), job_container.clone());
let mut cnt = count.write().unwrap();
*cnt += 1;
@ -88,7 +88,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
.unwrap()
.update_progress(*cnt as f32 / total * 100.0)?;
extract_result
anyhow::Ok(entry_count)
})
.flatten()
.sum();
@ -107,35 +107,67 @@ pub fn extract(
connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
job_container: JobContainer,
) -> Result<usize> {
) -> usize {
let mut entry_count = 0;
trace!("Extracting metadata for {address:?}");
#[cfg(feature = "extractors-web")]
{
entry_count +=
web::WebExtractor.insert_info(address, connection, store.clone(), job_container.clone())?;
let extract_result = web::WebExtractor.insert_info(
address,
connection,
store.clone(),
job_container.clone(),
);
match extract_result {
Ok(count) => entry_count += count,
Err(err) => error!("{}", err),
}
}
#[cfg(feature = "extractors-audio")]
{
entry_count +=
audio::ID3Extractor.insert_info(address, connection, store.clone(), job_container.clone())?;
let extract_result = audio::ID3Extractor.insert_info(
address,
connection,
store.clone(),
job_container.clone(),
);
match extract_result {
Ok(count) => entry_count += count,
Err(err) => error!("{}", err),
}
}
#[cfg(feature = "extractors-photo")]
{
entry_count +=
photo::ExifExtractor.insert_info(address, connection, store.clone(), job_container.clone())?;
let extract_result = photo::ExifExtractor.insert_info(
address,
connection,
store.clone(),
job_container.clone(),
);
match extract_result {
Ok(count) => entry_count += count,
Err(err) => error!("{}", err),
}
}
#[cfg(feature = "extractors-media")]
{
entry_count +=
media::MediaExtractor.insert_info(address, connection, store.clone(), job_container)?;
let extract_result =
media::MediaExtractor.insert_info(address, connection, store.clone(), job_container);
match extract_result {
Ok(count) => entry_count += count,
Err(err) => error!("{}", err),
}
}
trace!("Extracting metadata for {address:?} - got {entry_count} entries.");
Ok(entry_count)
entry_count
}

View File

@ -26,7 +26,7 @@ use actix_web::{
use anyhow::{anyhow, Result};
use futures::channel::oneshot;
use futures_util::TryStreamExt;
use log::{debug, info, trace, warn};
use log::{debug, info, trace};
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::collections::HashMap;
@ -437,13 +437,10 @@ pub async fn put_object(
let _job_container = state.job_container.clone();
let _store = state.store.clone();
block_background::<_, _, anyhow::Error>(move || {
let extract_result =
let entry_count =
extractors::extract(&_address, &connection, _store, _job_container);
if let Ok(entry_count) = extract_result {
debug!("Added {entry_count} extracted entries for {_address:?}");
} else {
warn!("Failed to add extracted entries for {_address:?}!");
}
debug!("Added {entry_count} extracted entries for {_address:?}");
Ok(())
});
@ -515,13 +512,9 @@ pub async fn put_object(
let _store = state.store.clone();
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
block_background::<_, _, anyhow::Error>(move || {
let extract_result =
let entry_count =
extractors::extract(&_address, &connection, _store, _job_container);
if let Ok(entry_count) = extract_result {
debug!("Added {entry_count} extracted entries for {_address:?}");
} else {
warn!("Failed to add extracted entries for {_address:?}!");
}
debug!("Added {entry_count} extracted entries for {_address:?}");
Ok(())
});