wip
ci/woodpecker/push/woodpecker Pipeline failed Details

refactor/errors
Tomáš Mládek 2023-11-08 21:08:39 +01:00
parent d8fa68f558
commit 9fbc662398
26 changed files with 245 additions and 138 deletions

25
Cargo.lock generated
View File

@ -425,12 +425,6 @@ dependencies = [
"windows-sys",
]
[[package]]
name = "anyhow"
version = "1.0.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
[[package]]
name = "arrayref"
version = "0.3.7"
@ -1066,6 +1060,16 @@ dependencies = [
"libc",
]
[[package]]
name = "eyre"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb"
dependencies = [
"indenter",
"once_cell",
]
[[package]]
name = "fastrand"
version = "2.0.0"
@ -1491,6 +1495,12 @@ dependencies = [
"num-traits",
]
[[package]]
name = "indenter"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "1.9.3"
@ -3179,12 +3189,12 @@ dependencies = [
"actix-rt",
"actix-web",
"actix_derive",
"anyhow",
"bytes",
"chrono",
"clap",
"diesel",
"diesel_migrations",
"eyre",
"filebuffer",
"futures",
"futures-util",
@ -3232,7 +3242,6 @@ dependencies = [
name = "upend-db"
version = "0.0.2"
dependencies = [
"anyhow",
"chrono",
"diesel",
"diesel_migrations",

View File

@ -33,7 +33,6 @@ pub enum EntryValue {
Number(f64),
Address(Address),
Null,
Invalid,
}
impl Default for Entry {
@ -118,7 +117,6 @@ impl EntryValue {
EntryValue::Number(n) => ('N', n.to_string()),
EntryValue::Address(address) => ('O', address.to_string()),
EntryValue::Null => ('X', "".to_string()),
EntryValue::Invalid => return Err(UpEndError::CannotSerializeInvalid),
};
Ok(format!("{}{}", type_char, content))
@ -140,14 +138,17 @@ impl EntryValue {
}
impl std::str::FromStr for EntryValue {
type Err = std::convert::Infallible;
type Err = UpEndError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.len() < 2 {
match s.chars().next() {
Some('S') => Ok(EntryValue::String("".into())),
Some('X') => Ok(EntryValue::Null),
_ => Ok(EntryValue::Invalid),
_ => Err(UpEndError::InvalidValue(format!(
"Couldn't parse EntryValue from {}",
s
))),
}
} else {
let (type_char, content) = s.split_at(1);
@ -157,17 +158,26 @@ impl std::str::FromStr for EntryValue {
if let Ok(n) = content.parse::<f64>() {
Ok(EntryValue::Number(n))
} else {
Ok(EntryValue::Invalid)
Err(UpEndError::InvalidValue(format!(
"Couldn't parse number from {}",
content
)))
}
}
("O", content) => {
if let Ok(addr) = b58_decode(content).and_then(|v| Address::decode(&v)) {
Ok(EntryValue::Address(addr))
} else {
Ok(EntryValue::Invalid)
Err(UpEndError::InvalidValue(format!(
"Couldn't parse address from {}",
content
)))
}
}
_ => Ok(EntryValue::Invalid),
_ => Err(UpEndError::InvalidValue(format!(
"Couldn't parse EntryValue from {}",
s
))),
}
}
}
@ -186,7 +196,6 @@ impl std::fmt::Display for EntryValue {
EntryValue::String(string) => ("STRING", string.to_owned()),
EntryValue::Number(n) => ("NUMBER", n.to_string()),
EntryValue::Null => ("NULL", "NULL".to_string()),
EntryValue::Invalid => ("INVALID", "INVALID".to_string()),
};
write!(f, "{}: {}", entry_type, entry_value)
}

View File

@ -3,7 +3,7 @@ pub enum UpEndError {
HashDecodeError(String),
AddressParseError(String),
AddressComponentsDecodeError(AddressComponentsDecodeError),
CannotSerializeInvalid,
InvalidValue(String),
QueryParseError(String),
Other(String),
}
@ -31,8 +31,7 @@ impl std::fmt::Display for UpEndError {
AddressComponentsDecodeError::UrlDecodeError(err) =>
format!("Couldn't decode URL: {err}"),
},
UpEndError::CannotSerializeInvalid =>
String::from("Invalid EntryValues cannot be serialized."),
UpEndError::InvalidValue(err) => format!("Invalid EntryValue: {}", err),
UpEndError::QueryParseError(err) => format!("Error parsing query: {err}"),
UpEndError::Other(err) => format!("Unknown error: {err}"),
}

View File

@ -11,13 +11,19 @@ path = "src/main.rs"
[dependencies]
upend-base = { path = "../base" }
upend-db = { path = "../db" }
clap = { version = "4.2.4", features = ["derive", "env", "color", "string", "cargo"] }
clap = { version = "4.2.4", features = [
"derive",
"env",
"color",
"string",
"cargo",
] }
log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
anyhow = "1.0"
eyre = "0.6.8"
thiserror = "1.0"
rayon = "1.4.0"

View File

@ -1,6 +1,6 @@
use std::env::current_exe;
use anyhow::{anyhow, Result};
use eyre::{eyre, Result};
use lazy_static::lazy_static;
use shadow_rs::{is_debug, shadow};
@ -13,7 +13,7 @@ pub fn get_resource_path<S: AsRef<str>>(dir: S) -> Result<std::path::PathBuf> {
} else {
current_exe()?
.parent()
.ok_or(anyhow!("couldn't locate resource path, binary in root"))?
.ok_or(eyre!("couldn't locate resource path, binary in root"))?
.join("../share/upend")
};
@ -21,7 +21,7 @@ pub fn get_resource_path<S: AsRef<str>>(dir: S) -> Result<std::path::PathBuf> {
if result.exists() {
Ok(result)
} else {
Err(anyhow!("Path {result:?} doesn't exist."))
Err(eyre!("Path {result:?} doesn't exist."))
}
}
@ -39,4 +39,4 @@ lazy_static! {
pub fn get_version() -> &'static str {
option_env!("UPEND_VERSION").unwrap_or("unknown")
}
}

View File

@ -1,7 +1,7 @@
use std::sync::Arc;
use super::Extractor;
use anyhow::{anyhow, Result};
use eyre::{eyre, Result};
use lazy_static::lazy_static;
use upend_base::{
addressing::Address,
@ -112,7 +112,7 @@ impl Extractor for ID3Extractor {
Ok(result)
} else {
Err(anyhow!("Couldn't find file for {hash:?}!"))
Err(eyre!("Couldn't find file for {hash:?}!"))
}
} else {
Ok(vec![])

View File

@ -1,7 +1,7 @@
use std::sync::Arc;
use super::Extractor;
use anyhow::{anyhow, Result};
use eyre::{eyre, Result};
use lazy_static::lazy_static;
use upend_base::{
addressing::Address,
@ -131,7 +131,7 @@ impl Extractor for ExifExtractor {
Ok(result)
} else {
Err(anyhow!("Couldn't find file for {hash:?}!"))
Err(eyre!("Couldn't find file for {hash:?}!"))
}
} else {
Ok(vec![])

View File

@ -1,7 +1,7 @@
use std::{process::Command, sync::Arc};
use super::Extractor;
use anyhow::{anyhow, Result};
use eyre::{eyre, Result};
use lazy_static::lazy_static;
use upend_base::{
addressing::Address,
@ -77,7 +77,7 @@ impl Extractor for MediaExtractor {
debug!("Ran `{:?}`, took {}s", command, now.elapsed().as_secs_f32());
if !ffprobe_cmd.status.success() {
return Err(anyhow!(
return Err(eyre!(
"Failed to retrieve file duration: {:?}",
String::from_utf8_lossy(&ffprobe_cmd.stderr)
));
@ -111,7 +111,7 @@ impl Extractor for MediaExtractor {
let _ = job_handle.update_state(JobState::Done);
Ok(result)
} else {
Err(anyhow!("Couldn't find file for {hash:?}!"))
Err(eyre!("Couldn't find file for {hash:?}!"))
}
} else {
Ok(vec![])

View File

@ -1,4 +1,4 @@
use anyhow::Result;
use eyre::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{
borrow::Borrow,
@ -87,7 +87,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
.unwrap()
.update_progress(*cnt as f32 / total * 100.0)?;
anyhow::Ok(entry_count)
Ok(entry_count)
})
.flatten()
.sum();

View File

@ -2,8 +2,8 @@ use std::sync::Arc;
use super::Extractor;
use crate::common::REQWEST_CLIENT;
use anyhow::anyhow;
use anyhow::Result;
use eyre::eyre;
use eyre::Result;
use upend_base::addressing::Address;
use upend_base::constants::ATTR_LABEL;
@ -109,7 +109,7 @@ impl Extractor for WebExtractor {
})
.collect());
}
Err(anyhow!("Failed for unknown reason."))
Err(eyre!("Failed for unknown reason."))
} else {
Ok(vec![])
}
@ -133,7 +133,7 @@ mod test {
use url::Url;
use super::*;
use anyhow::Result;
use eyre::Result;
use std::sync::Arc;
use tempfile::TempDir;

View File

@ -4,8 +4,8 @@ extern crate upend_db;
use crate::common::{get_resource_path, REQWEST_ASYNC_CLIENT};
use crate::config::UpEndConfig;
use actix_web::HttpServer;
use anyhow::Result;
use clap::{Args, CommandFactory, FromArgMatches, Parser, Subcommand, ValueEnum};
use eyre::Result;
use filebuffer::FileBuffer;
use rand::{thread_rng, Rng};
use regex::Captures;
@ -446,7 +446,7 @@ async fn main() -> Result<()> {
if !args.no_initial_update {
if !open_result.new {
info!("Running update...");
block_background::<_, _, anyhow::Error>(move || {
block_background::<_, _, eyre::Error>(move || {
let connection = upend.connection()?;
let _ = state.store.update(
&upend,

View File

@ -1,10 +1,10 @@
use anyhow::anyhow;
use eyre::eyre;
use std::collections::HashMap;
use std::io::Read;
use std::path::Path;
use std::process::Command;
use anyhow::Result;
use eyre::Result;
use super::Previewable;
@ -27,7 +27,7 @@ impl<'a> Previewable for AudioPath<'a> {
.output()?;
if !audiowaveform_cmd.status.success() {
return Err(anyhow!(
return Err(eyre!(
"Failed to retrieve audiofile peaks: {:?}",
String::from_utf8_lossy(&audiowaveform_cmd.stderr)
));
@ -66,7 +66,7 @@ impl<'a> Previewable for AudioPath<'a> {
debug!("Ran `{:?}`, took {}s", command, now.elapsed().as_secs_f32());
if !cmd_output.status.success() {
return Err(anyhow!(
return Err(eyre!(
"Failed to render thumbnail: {:?}",
String::from_utf8_lossy(&cmd_output.stderr)
));
@ -76,7 +76,7 @@ impl<'a> Previewable for AudioPath<'a> {
outfile.as_file().read_to_end(&mut buffer)?;
Ok(Some(buffer))
}
Some(_) => Err(anyhow!("type has to be one of: image, json")),
Some(_) => Err(eyre!("type has to be one of: image, json")),
}
}
}

View File

@ -1,10 +1,10 @@
#[cfg(not(feature = "previews-image"))]
use anyhow::anyhow;
use eyre::eyre;
#[cfg(feature = "previews-image")]
use image::{io::Reader as ImageReader, GenericImageView};
use std::{cmp, collections::HashMap, path::Path};
use anyhow::Result;
use eyre::Result;
use super::Previewable;
@ -66,6 +66,6 @@ impl<'a> Previewable for ImagePath<'a> {
}
#[cfg(not(feature = "previews-image"))]
Err(anyhow!("Image preview support not enabled!"))
Err(eyre!("Image preview support not enabled!"))
}
}

View File

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result};
use eyre::{eyre, Result};
use tracing::{debug, trace};
use upend_base::hash::{b58_encode, UpMultihash};
use upend_db::jobs::{JobContainer, JobState};
@ -118,8 +118,8 @@ impl PreviewStore {
Some(tm) if tm.starts_with("image") => {
ImagePath(file_path).get_thumbnail(options)
}
Some(unknown) => Err(anyhow!("No capability for {:?} thumbnails.", unknown)),
_ => Err(anyhow!("Unknown file type, or file doesn't exist.")),
Some(unknown) => Err(eyre!("No capability for {:?} thumbnails.", unknown)),
_ => Err(eyre!("Unknown file type, or file doesn't exist.")),
};
match preview {
@ -140,7 +140,7 @@ impl PreviewStore {
Err(err) => Err(err),
}
} else {
Err(anyhow!("Object not found, or is not a file."))
Err(eyre!("Object not found, or is not a file."))
}
}
}

View File

@ -1,4 +1,4 @@
use anyhow::Result;
use eyre::Result;
use std::{collections::HashMap, convert::TryInto, fs::File, io::Read, path::Path};
use super::Previewable;

View File

@ -1,10 +1,10 @@
use anyhow::anyhow;
use eyre::eyre;
use std::collections::HashMap;
use std::io::Read;
use std::path::Path;
use std::process::Command;
use anyhow::Result;
use eyre::Result;
use super::Previewable;
@ -26,7 +26,7 @@ impl<'a> Previewable for VideoPath<'a> {
debug!("Ran `{:?}`, took {}s", command, now.elapsed().as_secs_f32());
if !duration_cmd.status.success() {
return Err(anyhow!(
return Err(eyre!(
"Failed to retrieve file duration: {:?}",
String::from_utf8_lossy(&duration_cmd.stderr)
));
@ -62,7 +62,7 @@ impl<'a> Previewable for VideoPath<'a> {
debug!("Ran `{:?}`, took {}s", command, now.elapsed().as_secs_f32());
if !thumbnail_cmd.status.success() {
return Err(anyhow!(
return Err(eyre!(
"Failed to render thumbnail: {:?}",
String::from_utf8_lossy(&thumbnail_cmd.stderr)
));

View File

@ -18,7 +18,7 @@ use actix_web::{
http::header::{CacheControl, CacheDirective, DispositionType},
HttpRequest,
};
use anyhow::Result;
use eyre::Result;
use futures::channel::oneshot;
use futures_util::{StreamExt, TryStreamExt};
use serde::{Deserialize, Serialize};
@ -331,7 +331,7 @@ pub enum InAddress {
}
impl TryInto<Address> for InAddress {
type Error = anyhow::Error;
type Error = eyre::Error;
fn try_into(self) -> Result<Address, Self::Error> {
Ok(match self {
@ -413,7 +413,7 @@ pub async fn put_object(
}
PutInput::EntryList(entries) => {
web::block(move || {
connection.transaction::<_, anyhow::Error, _>(|| {
connection.transaction::<_, eyre::Error, _>(|| {
for entry in entries {
connection.insert_entry(process_inentry(entry)?)?;
}
@ -431,7 +431,7 @@ pub async fn put_object(
let _address = address.clone();
let _job_container = state.job_container.clone();
let _store = state.store.clone();
block_background::<_, _, anyhow::Error>(move || {
block_background::<_, _, eyre::Error>(move || {
let entry_count =
extractors::extract(&_address, &connection, _store, _job_container);
@ -441,7 +441,7 @@ pub async fn put_object(
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
web::block(move || {
connection.transaction::<_, anyhow::Error, _>(|| {
connection.transaction::<_, eyre::Error, _>(|| {
if connection.retrieve_object(&address)?.is_empty() {
connection.insert_entry(Entry {
entity: address.clone(),
@ -544,7 +544,7 @@ pub async fn put_blob(
let _job_container = state.job_container.clone();
let _store = state.store.clone();
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
block_background::<_, _, anyhow::Error>(move || {
block_background::<_, _, eyre::Error>(move || {
let entry_count = extractors::extract(&_address, &connection, _store, _job_container);
debug!("Added {entry_count} extracted entries for {_address:?}");
Ok(())
@ -568,7 +568,7 @@ pub async fn put_object_attribute(
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let new_address = web::block(move || {
connection.transaction::<_, anyhow::Error, _>(|| {
connection.transaction::<_, eyre::Error, _>(|| {
let existing_attr_entries =
connection.query(format!(r#"(matches @{address} "{attribute}" ?)"#).parse()?)?;
@ -628,7 +628,7 @@ pub async fn delete_object(
// let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
// connection
// .transaction::<_, anyhow::Error, _>(|| {
// .transaction::<_, eyre::Error, _>(|| {
// let address = Address::decode(&decode(address_str.into_inner())?)?;
// let _ = connection.remove_object(address)?;
@ -781,7 +781,7 @@ pub async fn api_refresh(
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
block_background::<_, _, anyhow::Error>(move || {
block_background::<_, _, eyre::Error>(move || {
let _ = state.store.update(
&state.upend,
state.job_container.clone(),
@ -899,10 +899,10 @@ pub async fn get_user_entries(state: web::Data<State>) -> Result<HttpResponse, E
#[derive(Debug)]
enum ExternalFetchError {
Status(anyhow::Error),
Status(eyre::Error),
TooLarge((usize, usize)),
UnknownSize,
Other(anyhow::Error),
Other(eyre::Error),
}
impl std::fmt::Display for ExternalFetchError {
@ -982,7 +982,7 @@ mod tests {
use std::fs::File;
use super::*;
use anyhow::Result;
use eyre::Result;
use tempfile::TempDir;
use upend_base::hash::UpMultihash;

View File

@ -17,8 +17,6 @@ log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
anyhow = "1.0"
rayon = "1.4.0"
num_cpus = "1.13"
lazy_static = "1.4.0"

View File

@ -3,7 +3,6 @@ use std::collections::HashMap;
use super::inner::models::Entry;
use super::inner::schema::data;
use crate::inner::models;
use anyhow::Result;
use diesel::expression::grouped::Grouped;
use diesel::expression::operators::{And, Not, Or};
use diesel::sql_types::Bool;

View File

@ -1,11 +1,10 @@
use crate::inner::models;
use anyhow::{anyhow, Result};
use std::convert::TryFrom;
use upend_base::addressing::{Address, Addressable};
use upend_base::entry::{Entry, EntryValue, ImmutableEntry};
impl TryFrom<&models::Entry> for Entry {
type Error = anyhow::Error;
type Error = crate::UpEndDbError;
fn try_from(e: &models::Entry) -> Result<Self, Self::Error> {
if let Some(value_str) = &e.value_str {
@ -37,11 +36,13 @@ impl TryFrom<&models::Entry> for Entry {
}
impl TryFrom<&Entry> for models::Entry {
type Error = anyhow::Error;
type Error = crate::UpEndDbError;
fn try_from(e: &Entry) -> Result<Self, Self::Error> {
if e.attribute.is_empty() {
return Err(anyhow!("Attribute cannot be empty."));
return Err(crate::UpEndDbError::InvalidData(
"Attribute cannot be empty.".to_string(),
));
}
let base_entry = models::Entry {
identity: e.address()?.encode()?,
@ -75,7 +76,7 @@ impl TryFrom<&Entry> for models::Entry {
}
impl TryFrom<&ImmutableEntry> for models::Entry {
type Error = anyhow::Error;
type Error = crate::UpEndDbError;
fn try_from(e: &ImmutableEntry) -> Result<Self, Self::Error> {
Ok(models::Entry {

View File

@ -1,7 +1,6 @@
use std::convert::TryFrom;
use std::sync::{Arc, Mutex};
use anyhow::{anyhow, Result};
use lru::LruCache;
use tracing::trace;
use uuid::Uuid;
@ -12,17 +11,21 @@ use upend_base::constants::{ATTR_IN, HIER_ROOT_ADDR, HIER_ROOT_INVARIANT};
use upend_base::entry::Entry;
use upend_base::lang::{PatternQuery, Query, QueryComponent, QueryPart};
use crate::UpEndDbError;
use super::UpEndConnection;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct UNode(String);
impl std::str::FromStr for UNode {
type Err = anyhow::Error;
type Err = crate::UpEndDbError;
fn from_str(string: &str) -> Result<Self, Self::Err> {
if string.is_empty() {
Err(anyhow!("UNode can not be empty."))
Err(crate::UpEndDbError::InvalidData(
"UNode can not be empty.".to_string(),
))
} else {
Ok(Self(string.to_string()))
}
@ -39,7 +42,7 @@ impl std::fmt::Display for UNode {
pub struct UHierPath(pub Vec<UNode>);
impl std::str::FromStr for UHierPath {
type Err = anyhow::Error;
type Err = crate::UpEndDbError;
fn from_str(string: &str) -> Result<Self, Self::Err> {
if string.is_empty() {
@ -70,7 +73,7 @@ impl std::fmt::Display for UHierPath {
}
}
pub fn list_roots(connection: &UpEndConnection) -> Result<Vec<Address>> {
pub fn list_roots(connection: &UpEndConnection) -> crate::Result<Vec<Address>> {
Ok(connection
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
entity: QueryComponent::Variable(None),
@ -91,7 +94,7 @@ pub fn fetch_or_create_dir(
parent: Option<Address>,
directory: UNode,
create: bool,
) -> Result<Address> {
) -> crate::Result<Address> {
match parent.clone() {
Some(address) => trace!("FETCHING/CREATING {}/{:#}", address, directory),
None => trace!("FETCHING/CREATING /{:#}", directory),
@ -162,11 +165,14 @@ pub fn fetch_or_create_dir(
Ok(new_directory_address)
} else {
Err(anyhow!("Node {:?} does not exist.", directory.0))
Err(UpEndDbError::InvalidStateError(format!(
"Node {:?} does not exist.",
directory.0
)))
}
}
1 => Ok(valid_directories[0].clone()),
_ => Err(anyhow!(format!(
_ => Err(UpEndDbError::InvalidStateError(format!(
"Invalid database state - more than one directory matches the query {:?}/{:#}!",
parent, directory
))),
@ -177,7 +183,7 @@ pub fn resolve_path(
connection: &UpEndConnection,
path: &UHierPath,
create: bool,
) -> Result<Vec<Address>> {
) -> crate::Result<Vec<Address>> {
let mut result: Vec<Address> = vec![];
let mut path_stack = path.0.to_vec();
@ -202,7 +208,7 @@ pub fn resolve_path_cached(
path: &UHierPath,
create: bool,
cache: &Arc<Mutex<ResolveCache>>,
) -> Result<Vec<Address>> {
) -> crate::Result<Vec<Address>> {
let mut result: Vec<Address> = vec![];
let mut path_stack = path.0.to_vec();
@ -225,7 +231,7 @@ pub fn resolve_path_cached(
Ok(result)
}
pub fn initialize_hier(connection: &UpEndConnection) -> Result<()> {
pub fn initialize_hier(connection: &UpEndConnection) -> crate::Result<()> {
connection.insert_entry(Entry::try_from(&*HIER_ROOT_INVARIANT)?)?;
upend_insert_val!(connection, HIER_ROOT_ADDR, ATTR_LABEL, "Hierarchy Root")?;
Ok(())
@ -233,8 +239,6 @@ pub fn initialize_hier(connection: &UpEndConnection) -> Result<()> {
#[cfg(test)]
mod tests {
use anyhow::Result;
use crate::UpEndDatabase;
use tempfile::TempDir;

View File

@ -1,4 +1,3 @@
use anyhow::{anyhow, Result};
use serde::{Serialize, Serializer};
use std::{
collections::HashMap,
@ -31,16 +30,22 @@ pub struct JobContainerData {
}
#[derive(Debug, Clone)]
pub struct JobInProgessError(String);
impl std::fmt::Display for JobInProgessError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "job of type {} is already in progress", self.0)
}
pub enum JobError {
JobInProgessError(String),
JobNotFound,
Unknown(String),
}
impl std::error::Error for JobInProgessError {}
impl std::fmt::Display for JobError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
JobError::JobInProgessError(msg) => {
write!(f, "job of type {} is already in progress", self.0)
}
JobError::Unknown(msg) => write!(f, "Unknown: {}", msg),
}
}
}
#[derive(Clone)]
pub struct JobContainer(Arc<RwLock<JobContainerData>>);
@ -49,7 +54,7 @@ impl JobContainer {
JobContainer(Arc::new(RwLock::new(JobContainerData::default())))
}
pub fn add_job<S, IS>(&mut self, job_type: IS, title: S) -> Result<JobHandle>
pub fn add_job<S, IS>(&mut self, job_type: IS, title: S) -> Result<JobHandle, JobError>
where
S: AsRef<str>,
IS: Into<Option<S>>,
@ -57,7 +62,9 @@ impl JobContainer {
let jobs = &mut self
.0
.write()
.map_err(|err| anyhow!("Couldn't lock job container for writing! {err:?}"))?
.map_err(|err| {
JobError::Unknown(format!("Couldn't lock job container for writing! {err:?}"))
})?
.jobs;
let job = Job {
@ -71,11 +78,7 @@ impl JobContainer {
.iter()
.any(|(_, j)| j.state == JobState::InProgress && j.job_type == job.job_type)
{
return Err(JobInProgessError(format!(
r#"Job of type "{}" currently in progress."#,
job_type
))
.into());
return Err(JobError::JobInProgessError(job_type).into());
}
}
@ -87,11 +90,13 @@ impl JobContainer {
})
}
pub fn get_jobs(&self) -> Result<HashMap<JobId, Job>> {
pub fn get_jobs(&self) -> Result<HashMap<JobId, Job>, JobError> {
let jobs = &self
.0
.read()
.map_err(|err| anyhow!("Couldn't lock job container for writing! {err:?}"))?
.map_err(|err| {
JobError::Unknown(format!("Couldn't lock job container for writing! {err:?}"))
})?
.jobs;
Ok(jobs.clone())
@ -122,11 +127,13 @@ pub struct JobHandle {
}
impl JobHandle {
pub fn update_progress(&mut self, progress: f32) -> Result<()> {
pub fn update_progress(&mut self, progress: f32) -> Result<(), JobError> {
let jobs = &mut self
.container
.write()
.map_err(|err| anyhow!("Couldn't lock job container for writing! {err:?}"))?
.map_err(|err| {
JobError::Unknown(format!("Couldn't lock job container for writing! {err:?}"))
})?
.jobs;
if let Some(job) = jobs.get_mut(&self.job_id) {
@ -136,22 +143,24 @@ impl JobHandle {
}
Ok(())
} else {
Err(anyhow!("No such job."))
Err(JobError::JobNotFound)
}
}
pub fn update_state(&mut self, state: JobState) -> Result<()> {
pub fn update_state(&mut self, state: JobState) -> Result<(), JobError> {
let jobs = &mut self
.container
.write()
.map_err(|err| anyhow!("Couldn't lock job container for writing! {err:?}"))?
.map_err(|err| {
JobError::Unknown(format!("Couldn't lock job container for writing! {err:?}"))
})?
.jobs;
if let Some(job) = jobs.get_mut(&self.job_id) {
job.state = state;
Ok(())
} else {
Err(anyhow!("No such job."))
Err(JobError::JobNotFound)
}
}
}

View File

@ -25,7 +25,6 @@ use crate::engine::execute;
use crate::inner::models;
use crate::inner::schema::data;
use crate::util::LoggerSink;
use anyhow::{anyhow, Result};
use diesel::prelude::*;
use diesel::r2d2::{self, ConnectionManager};
use diesel::result::{DatabaseErrorKind, Error};
@ -52,6 +51,62 @@ pub struct ConnectionOptions {
pub mutex: Arc<Mutex<()>>,
}
#[derive(Debug)]
pub enum UpEndDbError {
DatabaseError(diesel::result::Error),
InvalidData(String),
InvalidStateError(String),
IncompatibleVersionError(String),
UpEndError(UpEndError),
IOError(std::io::Error),
Unknown(String),
}
impl From<std::io::Error> for UpEndDbError {
fn from(error: std::io::Error) -> Self {
UpEndDbError::IOError(error)
}
}
impl From<diesel::result::Error> for UpEndDbError {
fn from(error: diesel::result::Error) -> Self {
match error {
Error::DatabaseError(_, _) => UpEndDbError::DatabaseError(error),
_ => UpEndDbError::Unknown(format!("{}", error)),
}
}
}
impl From<UpEndError> for UpEndDbError {
fn from(error: UpEndError) -> Self {
UpEndDbError::UpEndError(error)
}
}
impl std::fmt::Display for UpEndDbError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
UpEndDbError::DatabaseError(err) => format!("Database error: {}", err),
UpEndDbError::InvalidData(err) => format!("Invalid data: {}", err),
UpEndDbError::InvalidStateError(err) => format!("Invalid state: {}", err),
UpEndDbError::IncompatibleVersionError(err) => {
format!("Incompatible version: {}", err)
}
UpEndDbError::UpEndError(err) => format!("UpEnd error: {}", err),
UpEndDbError::Unknown(err) => format!("Unknown error: {}", err),
UpEndDbError::IOError(err) => format!("I/O error: {}", err),
}
)
}
}
impl std::error::Error for UpEndDbError {}
pub type Result<T> = std::result::Result<T, UpEndDbError>;
impl ConnectionOptions {
pub fn apply(&self, connection: &SqliteConnection) -> QueryResult<()> {
let _lock = self.mutex.lock().unwrap();
@ -79,7 +134,10 @@ impl ConnectionOptions {
impl diesel::r2d2::CustomizeConnection<SqliteConnection, diesel::r2d2::Error>
for ConnectionOptions
{
fn on_acquire(&self, conn: &mut SqliteConnection) -> Result<(), diesel::r2d2::Error> {
fn on_acquire(
&self,
conn: &mut SqliteConnection,
) -> std::result::Result<(), diesel::r2d2::Error> {
self.apply(conn).map_err(diesel::r2d2::Error::QueryError)
}
}
@ -155,10 +213,16 @@ impl UpEndDatabase {
if !new {
let db_major: u64 = connection
.get_meta("VERSION")?
.ok_or(anyhow!("Database version not found!"))?
.ok_or(UpEndDbError::InvalidStateError(
"Database version not found!".to_string(),
))?
.parse()?;
if db_major > build::PKG_VERSION_MAJOR.parse().unwrap() {
return Err(anyhow!("Incompatible database! Found version "));
return Err(UpEndDbError::IncompatibleVersionError(format!(
"{} > {}",
db_major,
build::PKG_VERSION_MAJOR
)));
}
}
@ -190,9 +254,9 @@ pub struct UpEndConnection {
}
impl UpEndConnection {
pub fn transaction<T, E, F>(&self, f: F) -> Result<T, E>
pub fn transaction<T, E, F>(&self, f: F) -> Result<T>
where
F: FnOnce() -> Result<T, E>,
F: FnOnce() -> Result<T>,
E: From<Error>,
{
/*
@ -390,7 +454,7 @@ impl UpEndConnection {
Ok(num) => Ok(num),
Err(error) => match error {
Error::DatabaseError(DatabaseErrorKind::UniqueViolation, _) => Ok(0),
_ => Err(anyhow!(error)),
_ => error,
},
}
}
@ -435,9 +499,12 @@ impl UpEndConnection {
let conn = self.pool.get()?;
let total_entry_count = data.count().load::<i64>(&conn)?;
let total_entry_count = total_entry_count
.first()
.ok_or(anyhow!("Couldn't get entry count"))?;
let total_entry_count =
total_entry_count
.first()
.ok_or(UpEndDbError::InvalidStateError(
"Couldn't get entry count".to_string(),
))?;
let api_entry_count = data
.filter(provenance.like("API%"))
@ -445,15 +512,20 @@ impl UpEndConnection {
.load::<i64>(&conn)?;
let api_entry_count = api_entry_count
.first()
.ok_or(anyhow!("Couldn't get API entry count"))?;
.ok_or(UpEndDbError::InvalidStateError(
"Couldn't get API entry count".to_string(),
))?;
let implicit_entry_count = data
.filter(provenance.like("%IMPLICIT%"))
.count()
.load::<i64>(&conn)?;
let implicit_entry_count = implicit_entry_count
.first()
.ok_or(anyhow!("Couldn't get API entry count"))?;
let implicit_entry_count =
implicit_entry_count
.first()
.ok_or(UpEndDbError::InvalidStateError(
"Couldn't get API entry count".to_string(),
))?;
Ok(serde_json::json!({
"entryCount": {

View File

@ -3,11 +3,12 @@ use self::db::files;
use super::{Blob, StoreError, UpStore, UpdateOptions, UpdatePathOutcome};
use crate::hierarchies::{resolve_path, resolve_path_cached, ResolveCache, UHierPath, UNode};
use crate::jobs::{JobContainer, JobHandle};
use crate::stores::Result;
use crate::util::hash_at_path;
use crate::{
BlobMode, ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UPEND_SUBDIR,
BlobMode, ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UpEndDbError,
UPEND_SUBDIR,
};
use anyhow::{anyhow, Result};
use chrono::prelude::*;
use diesel::r2d2::{self, ConnectionManager, ManageConnection};
use diesel::ExpressionMethods;
@ -256,7 +257,7 @@ impl FsStore {
let normalized_path = self.normalize_path(&path)?;
let normalized_path_str = normalized_path
.to_str()
.ok_or(anyhow!("Path not valid unicode!"))?;
.ok_or(UpEndDbError::Unknown("Path not UTF-8?!".to_string()))?;
let mut file_hash: Option<UpMultihash> = None;
@ -420,7 +421,7 @@ impl FsStore {
let new_file = db::NewFile {
path: normalized_path
.to_str()
.ok_or(anyhow!("Path not UTF-8?!"))?
.ok_or(UpEndDbError::Unknown("Path not UTF-8?!".to_string()))?
.to_string(),
hash: hash.to_bytes(),
added: NaiveDateTime::from_timestamp_opt(Utc::now().timestamp(), 0).unwrap(),
@ -620,7 +621,7 @@ impl From<db::File> for Blob {
}
impl UpStore for FsStore {
fn retrieve(&self, hash: &UpMultihash) -> Result<Vec<Blob>, super::StoreError> {
fn retrieve(&self, hash: &UpMultihash) -> Result<Vec<Blob>> {
Ok(self
.retrieve_file(hash)
.map_err(|e| StoreError::Unknown(e.to_string()))?
@ -629,7 +630,7 @@ impl UpStore for FsStore {
.collect())
}
fn retrieve_all(&self) -> Result<Vec<Blob>, super::StoreError> {
fn retrieve_all(&self) -> Result<Vec<Blob>> {
Ok(self
.retrieve_all_files()
.map_err(|e| StoreError::Unknown(e.to_string()))?
@ -643,7 +644,7 @@ impl UpStore for FsStore {
connection: UpEndConnection,
blob: Blob,
name_hint: Option<String>,
) -> Result<UpMultihash, super::StoreError> {
) -> Result<UpMultihash> {
let file_path = blob.get_file_path();
let hash = hash_at_path(file_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
@ -701,7 +702,7 @@ impl UpStore for FsStore {
db: &UpEndDatabase,
mut job_container: JobContainer,
options: UpdateOptions,
) -> Result<Vec<UpdatePathOutcome>, StoreError> {
) -> Result<Vec<UpdatePathOutcome>> {
trace!(
"Running a vault update of {:?}, options = {:?}.",
self.path,

View File

@ -25,7 +25,7 @@ impl std::fmt::Display for StoreError {
impl std::error::Error for StoreError {}
type Result<T> = std::result::Result<T, StoreError>;
pub type Result<T> = std::result::Result<T, StoreError>;
pub struct Blob {
file_path: PathBuf,

View File

@ -34,7 +34,7 @@ impl std::io::Write for LoggerSink {
}
}
pub fn hash_at_path<P: AsRef<Path>>(path: P) -> anyhow::Result<UpMultihash> {
pub fn hash_at_path<P: AsRef<Path>>(path: P) -> crate::Result<UpMultihash> {
let path = path.as_ref();
trace!("Hashing {:?}...", path);
let fbuffer = FileBuffer::open(path)?;