chore: `cargo update`, fix clippy lints

feat/type-attributes
Tomáš Mládek 2023-04-24 17:43:49 +02:00
parent d98ebf8917
commit 6c0434a289
7 changed files with 822 additions and 709 deletions

1466
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,6 @@ use serde::de::Visitor;
use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
use std::str::FromStr;
use thiserror::private::DisplayAsDisplay;
use uuid::Uuid;
#[derive(Clone, Eq, PartialEq, Hash)]
@ -133,7 +132,7 @@ impl std::fmt::Debug for Address {
Address::Attribute(_) => "Attribute",
Address::Url(_) => "URL",
},
self.as_display()
self
)
}
}

View File

@ -6,19 +6,19 @@ use super::inner::models::Entry;
use super::inner::schema::data;
use super::lang::{PatternQuery, Query, QueryComponent, QueryPart, QueryQualifier};
use crate::database::inner::models;
use diesel::IntoSql;
use diesel::RunQueryDsl;
use diesel::{ExpressionMethods, TextExpressionMethods};
use anyhow::Result;
use diesel::expression::grouped::Grouped;
use diesel::expression::operators::{And, Not, Or};
use diesel::sql_types::Bool;
use diesel::sqlite::Sqlite;
use diesel::IntoSql;
use diesel::RunQueryDsl;
use diesel::{
r2d2::{ConnectionManager, PooledConnection},
SqliteConnection,
};
use diesel::{BoxableExpression, QueryDsl};
use diesel::{ExpressionMethods, TextExpressionMethods};
#[derive(Debug, Clone)]
pub struct QueryExecutionError(String);
@ -83,14 +83,14 @@ pub fn execute(
.collect::<Option<Vec<_>>>();
if let Some(pattern_queries) = pattern_queries {
let entries = zip(pattern_queries, subquery_results)
.into_iter()
.map(|(query, results)| {
let entries = zip(pattern_queries, subquery_results).map(
|(query, results)| {
results
.into_iter()
.map(|e| EntryWithVars::new(&query, e))
.collect::<Vec<EntryWithVars>>()
});
},
);
let joined = entries
.reduce(|acc, cur| {

View File

@ -46,7 +46,7 @@ impl TryFrom<&models::Entry> for Entry {
attribute: e.attribute.clone(),
value: value_str.parse()?,
provenance: e.provenance.clone(),
timestamp: e.timestamp.clone(),
timestamp: e.timestamp,
})
} else if let Some(value_num) = e.value_num {
Ok(Entry {
@ -54,7 +54,7 @@ impl TryFrom<&models::Entry> for Entry {
attribute: e.attribute.clone(),
value: EntryValue::Number(value_num),
provenance: e.provenance.clone(),
timestamp: e.timestamp.clone(),
timestamp: e.timestamp,
})
} else {
Ok(Entry {
@ -62,7 +62,7 @@ impl TryFrom<&models::Entry> for Entry {
attribute: e.attribute.clone(),
value: EntryValue::Number(f64::NAN),
provenance: e.provenance.clone(),
timestamp: e.timestamp.clone(),
timestamp: e.timestamp,
})
}
}
@ -88,7 +88,7 @@ impl TryFrom<&Entry> for models::Entry {
value_num: None,
immutable: false,
provenance: e.provenance.clone(),
timestamp: e.timestamp.clone(),
timestamp: e.timestamp,
};
match e.value {

View File

@ -28,9 +28,11 @@ impl UNode {
Ok(Self(s))
}
}
pub fn as_ref(&self) -> &String {
&self.0
impl From<UNode> for String {
fn from(value: UNode) -> Self {
value.0
}
}
@ -144,7 +146,7 @@ pub fn fetch_or_create_dir(
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
entity: QueryComponent::Variable(None),
attribute: QueryComponent::Exact(LABEL_ATTR.into()),
value: QueryComponent::Exact(directory.as_ref().clone().into()),
value: QueryComponent::Exact(String::from(directory.clone()).into()),
})))?
.into_iter()
.map(|e: Entry| e.entity);
@ -183,7 +185,7 @@ pub fn fetch_or_create_dir(
let directory_entry = Entry {
entity: new_directory_address.clone(),
attribute: String::from(LABEL_ATTR),
value: directory.as_ref().clone().into(),
value: String::from(directory).into(),
provenance: "SYSTEM FS".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};

View File

@ -274,15 +274,16 @@ impl FsStore {
if size == 0 {
return Ok(UpdatePathOutcome::Skipped(path));
}
let mtime = metadata
let mtime: Option<NaiveDateTime> = metadata
.modified()
.map(|t| {
NaiveDateTime::from_timestamp(
NaiveDateTime::from_timestamp_opt(
t.duration_since(UNIX_EPOCH).unwrap().as_secs() as i64,
0,
)
})
.ok();
.ok()
.flatten();
// Check if the path entry for this file already exists in database
let existing_files_read = existing_files.read().unwrap();
@ -358,12 +359,13 @@ impl FsStore {
let mtime = metadata
.modified()
.map(|t| {
NaiveDateTime::from_timestamp(
NaiveDateTime::from_timestamp_opt(
t.duration_since(UNIX_EPOCH).unwrap().as_secs() as i64,
0,
)
})
.ok();
.ok()
.flatten();
let mime_type = tree_magic_mini::from_filepath(path).map(|s| s.to_string());
self.insert_file_with_metadata(
@ -377,6 +379,7 @@ impl FsStore {
)
}
#[allow(clippy::too_many_arguments)]
fn insert_file_with_metadata(
&self,
connection: &UpEndConnection,
@ -393,7 +396,7 @@ impl FsStore {
.ok_or(anyhow!("Path not UTF-8?!"))?
.to_string(),
hash: (hash.clone()).0,
added: NaiveDateTime::from_timestamp(Utc::now().timestamp(), 0),
added: NaiveDateTime::from_timestamp_opt(Utc::now().timestamp(), 0).unwrap(),
size,
mtime,
};

View File

@ -17,19 +17,14 @@ pub struct Job {
pub type JobType = String;
#[derive(Serialize, Clone, Copy, PartialEq)]
#[derive(Default, Serialize, Clone, Copy, PartialEq)]
pub enum JobState {
#[default]
InProgress,
Done,
Failed,
}
impl Default for JobState {
fn default() -> Self {
JobState::InProgress
}
}
#[derive(Default)]
pub struct JobContainerData {
jobs: HashMap<JobId, Job>,
@ -103,6 +98,12 @@ impl JobContainer {
}
}
impl Default for JobContainer {
fn default() -> Self {
Self::new()
}
}
#[derive(Clone, Hash, PartialEq, Eq, Copy)]
pub struct JobId(Uuid);