chore!: separate server functionality into a crate

feat/type-attributes
Tomáš Mládek 2023-04-20 16:02:41 +02:00
parent be7cc11e36
commit 7e0151fa64
26 changed files with 1048 additions and 208 deletions

764
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -9,9 +9,10 @@ license = "AGPL-3.0-or-later"
edition = "2018"
build = "build.rs"
[dependencies]
clap = "2.33.0"
[workspace]
members = ["server", "tools/upend_cli"]
[dependencies]
log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
@ -36,14 +37,6 @@ diesel = { version = "1.4", features = [
diesel_migrations = "1.4"
libsqlite3-sys = { version = "^0", features = ["bundled"] }
actix = "^0.10"
actix-files = "^0.5"
actix-rt = "^2.0"
actix-web = "^3.3"
actix_derive = "^0.5"
actix-cors = "0.5"
jsonwebtoken = "8"
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
@ -51,7 +44,12 @@ lexpr = "0.2.6"
regex = "1"
multibase = "0.9"
multihash = { version = "*", default-features = false, features = ["alloc", "multihash-impl", "sha2", "identity"] }
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "0.8", features = ["v4"] }
filebuffer = "0.4.0"
@ -60,45 +58,11 @@ walkdir = "2"
rand = "0.8"
mime = "^0.3.16"
tree_magic_mini = "3.0.2"
dotenv = "0.15.0"
xdg = "^2.1"
opener = { version = "^0.5.0", optional = true }
is_executable = { version = "1.0.1", optional = true }
webbrowser = { version = "^0.5.5", optional = true }
nonempty = "0.6.0"
actix-multipart = "0.3.0"
image = { version = "0.23.14", optional = true }
webp = { version = "0.2.0", optional = true }
webpage = { version = "1.4.0", optional = true }
id3 = { version = "1.0.2", optional = true }
kamadak-exif = { version = "0.5.4", optional = true }
shadow-rs = "0.17"
[build-dependencies]
shadow-rs = "0.17"
[features]
default = [
"desktop",
"previews",
"previews-image",
"extractors-web",
"extractors-audio",
"extractors-photo",
"extractors-media",
]
desktop = ["webbrowser", "opener", "is_executable"]
previews = []
previews-image = ["image", "webp", "kamadak-exif"]
extractors-web = ["webpage"]
extractors-audio = ["id3"]
extractors-photo = ["kamadak-exif"]
extractors-media = []

View File

@ -10,7 +10,7 @@ package: all
linuxdeploy-x86_64.AppImage --appdir dist -d upend.desktop --output appimage
backend:
cargo build --release
cd server && cargo build --release
frontend: tools/upend_js/index.js
cd webui && yarn add ../tools/upend_js && yarn install --immutable && yarn build

101
server/Cargo.toml Normal file
View File

@ -0,0 +1,101 @@
[package]
name = "upend-server"
version = "0.1.0"
edition = "2021"
[dependencies]
upend = { path = "../" }
clap = "2.33.0"
log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
anyhow = "1.0"
thiserror = "1.0"
rayon = "1.4.0"
num_cpus = "1.13"
futures = "0.3.24"
futures-util = "~0.3.12"
lazy_static = "1.4.0"
once_cell = "1.7.2"
lru = "0.7.0"
diesel = { version = "1.4", features = [
"sqlite",
"r2d2",
"chrono",
"serde_json",
] }
diesel_migrations = "1.4"
libsqlite3-sys = { version = "^0", features = ["bundled"] }
actix = "^0.10"
actix-files = "^0.5"
actix-rt = "^2.0"
actix-web = "^3.3"
actix_derive = "^0.5"
actix-cors = "0.5"
jsonwebtoken = "8"
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
lexpr = "0.2.6"
regex = "1"
multibase = "0.9"
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "0.8", features = ["v4"] }
filebuffer = "0.4.0"
tempfile = "^3.2.0"
walkdir = "2"
rand = "0.8"
mime = "^0.3.16"
tree_magic_mini = "3.0.2"
opener = { version = "^0.5.0", optional = true }
is_executable = { version = "1.0.1", optional = true }
webbrowser = { version = "^0.5.5", optional = true }
nonempty = "0.6.0"
actix-multipart = "0.3.0"
image = { version = "0.23.14", optional = true }
webp = { version = "0.2.0", optional = true }
webpage = { version = "1.4.0", optional = true }
id3 = { version = "1.0.2", optional = true }
kamadak-exif = { version = "0.5.4", optional = true }
shadow-rs = "0.17"
[build-dependencies]
shadow-rs = "0.17"
[features]
default = [
"desktop",
"previews",
"previews-image",
"extractors-web",
"extractors-audio",
"extractors-photo",
"extractors-media",
]
desktop = ["webbrowser", "opener", "is_executable"]
previews = []
previews-image = ["image", "webp", "kamadak-exif"]
extractors-web = ["webpage"]
extractors-audio = ["id3"]
extractors-photo = ["kamadak-exif"]
extractors-media = []

190
server/src/addressing.rs Normal file
View File

@ -0,0 +1,190 @@
use crate::util::hash::{b58_decode, b58_encode, Hash, Hashable};
use anyhow::{anyhow, Result};
use multihash::{Code, Multihash, MultihashDigest};
use serde::de::Visitor;
use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
use std::str::FromStr;
use thiserror::private::DisplayAsDisplay;
use uuid::Uuid;
#[derive(Clone, Eq, PartialEq, Hash)]
pub enum Address {
Hash(Hash),
Uuid(Uuid),
Attribute(String),
Url(String),
}
// multihash SHA2-256
const SHA2_256: u64 = 0x12;
// multihash identity
const IDENTITY: u64 = 0x00;
impl Address {
pub fn encode(&self) -> Result<Vec<u8>> {
let hash = match self {
Self::Hash(hash) => Multihash::wrap(SHA2_256, &hash.0).map_err(|err| anyhow!(err))?,
Self::Uuid(uuid) => {
Code::Identity.digest(&[vec![b'U'], uuid.as_bytes().to_vec()].concat())
}
Self::Attribute(attribute) => {
Code::Identity.digest(&[&[b'A'], attribute.as_bytes()].concat())
}
Self::Url(url) => Code::Identity.digest(&[&[b'X'], url.as_bytes()].concat()),
};
Ok(hash.to_bytes())
}
pub fn decode(buffer: &[u8]) -> Result<Self> {
let multihash = Multihash::from_bytes(buffer)
.map_err(|err| anyhow!("Error decoding address: {}", err))?;
match multihash.code() {
SHA2_256 => Ok(Self::Hash(Hash(multihash.digest().to_vec()))),
IDENTITY => {
let digest = multihash.digest().to_owned();
let digest_content: Vec<u8> = digest.clone().into_iter().skip(1).collect();
match digest[0] {
b'U' => Ok(Self::Uuid(uuid::Uuid::from_slice(
digest_content.as_slice(),
)?)),
b'A' => Ok(Self::Attribute(String::from_utf8(digest_content)?)),
b'X' => Ok(Self::Url(String::from_utf8(digest_content)?)),
_ => Err(anyhow!("Error decoding address: Unknown identity marker.")),
}
}
_ => Err(anyhow!(
"Error decoding address: Unknown hash function type."
)),
}
}
}
impl Serialize for Address {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
serializer.serialize_str(b58_encode(self.encode().map_err(ser::Error::custom)?).as_str())
}
}
struct AddressVisitor;
impl<'de> Visitor<'de> for AddressVisitor {
type Value = Address;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a valid UpEnd address (hash/UUID) as a multi-hashed string")
}
fn visit_str<E>(self, str: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let bytes = b58_decode(str)
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))?;
Address::decode(bytes.as_ref())
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))
}
}
impl<'de> Deserialize<'de> for Address {
fn deserialize<D>(deserializer: D) -> Result<Address, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(AddressVisitor)
}
}
impl FromStr for Address {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Address::decode(
b58_decode(s)
.map_err(|e| anyhow!("Error deserializing address: {}", e))?
.as_ref(),
)
}
}
impl std::fmt::Display for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
b58_encode(self.encode().map_err(|_| std::fmt::Error)?)
)
}
}
impl std::fmt::Debug for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Address<{}>: {}",
match self {
Address::Hash(_) => "Hash",
Address::Uuid(_) => "UUID",
Address::Attribute(_) => "Attribute",
Address::Url(_) => "URL",
},
self.as_display()
)
}
}
pub trait Addressable: Hashable {
fn address(&self) -> Result<Address> {
Ok(Address::Hash(self.hash()?))
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use uuid::Uuid;
use crate::addressing::Address;
use crate::util::hash::Hash;
#[test]
fn test_hash_codec() -> Result<()> {
let addr = Address::Hash(Hash(vec![1, 2, 3, 4, 5]));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_uuid_codec() -> Result<()> {
let addr = Address::Uuid(Uuid::new_v4());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_attribute_codec() -> Result<()> {
let addr = Address::Attribute(String::from("ATTRIBUTE"));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_url_codec() -> Result<()> {
let addr = Address::Url(String::from("https://upend.dev"));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
}

19
server/src/common.rs Normal file
View File

@ -0,0 +1,19 @@
use anyhow::{anyhow, Result};
use shadow_rs::{is_debug, shadow};
shadow!(build);
pub fn get_static_dir<S: AsRef<str>>(dir: S) -> Result<std::path::PathBuf> {
let cwd = std::env::current_exe()?.parent().unwrap().to_path_buf();
let base_path = if is_debug() {
cwd.join("../../tmp/static")
} else {
cwd
};
let result = base_path.join(dir.as_ref());
if result.exists() {
Ok(result)
} else {
Err(anyhow!("Path {result:?} doesn't exist."))
}
}

8
server/src/config.rs Normal file
View File

@ -0,0 +1,8 @@
#[derive(Clone, Debug)]
pub struct UpEndConfig {
pub vault_name: Option<String>,
pub desktop_enabled: bool,
pub trust_executables: bool,
pub secret: String,
pub key: Option<String>,
}

View File

@ -1,7 +1,7 @@
use std::sync::Arc;
use super::Extractor;
use crate::{
use upend::{
addressing::Address,
database::{
constants,

View File

@ -1,7 +1,7 @@
use std::{process::Command, sync::Arc};
use super::Extractor;
use crate::{
use upend::{
addressing::Address,
database::{
constants::LABEL_ATTR,

View File

@ -1,4 +1,4 @@
use crate::{
use upend::{
addressing::Address,
database::{entry::Entry, stores::UpStore, UpEndConnection, UpEndDatabase},
util::jobs::JobContainer,

View File

@ -1,7 +1,7 @@
use std::sync::Arc;
use super::Extractor;
use crate::{
use upend::{
addressing::Address,
database::{
constants,

View File

@ -1,7 +1,7 @@
use std::sync::Arc;
use super::Extractor;
use crate::{
use upend::{
addressing::Address,
database::{entry::Entry, stores::UpStore, UpEndConnection},
util::jobs::{JobContainer, JobState},
@ -79,7 +79,7 @@ impl Extractor for WebExtractor {
fn is_needed(
&self,
address: &Address,
connection: &crate::database::UpEndConnection,
connection: &UpEndConnection,
) -> Result<bool> {
Ok(connection
.query(

View File

@ -1,9 +1,5 @@
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
#[macro_use]
extern crate lazy_static;
extern crate upend;
use std::net::SocketAddr;
use std::path::PathBuf;
@ -17,25 +13,24 @@ use std::sync::Arc;
use tracing::{debug, info, warn};
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
use crate::{
use upend::{
common::{build, get_static_dir},
config::UpEndConfig,
database::{
stores::{fs::FsStore, UpStore},
UpEndDatabase,
},
util::{exec::block_background, jobs::JobContainer},
util::jobs::JobContainer,
};
mod addressing;
mod common;
mod config;
mod database;
mod extractors;
mod previews;
use crate::util::exec::block_background;
mod routes;
mod util;
mod extractors;
mod previews;
fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_env_filter(

View File

@ -1,7 +1,7 @@
use crate::database::stores::UpStore;
use crate::util::hash::b58_encode;
use crate::util::hash::Hash;
use crate::util::jobs::{JobContainer, JobState};
use upend::database::stores::UpStore;
use upend::util::hash::b58_encode;
use upend::util::hash::Hash;
use upend::util::jobs::{JobContainer, JobState};
use anyhow::{anyhow, Result};
use tracing::{debug, trace};

View File

@ -1,17 +1,17 @@
use crate::addressing::{Address, Addressable};
use crate::common::build;
use crate::config::UpEndConfig;
use crate::database::constants::{ADDED_ATTR, LABEL_ATTR};
use crate::database::entry::{Entry, EntryValue, InvariantEntry};
use crate::database::hierarchies::{list_roots, resolve_path, UHierPath};
use crate::database::lang::Query;
use crate::database::stores::{Blob, UpStore};
use crate::database::UpEndDatabase;
use upend::addressing::{Address, Addressable};
use upend::common::build;
use upend::config::UpEndConfig;
use upend::database::constants::{ADDED_ATTR, LABEL_ATTR};
use upend::database::entry::{Entry, EntryValue, InvariantEntry};
use upend::database::hierarchies::{list_roots, resolve_path, UHierPath};
use upend::database::lang::Query;
use upend::database::stores::{Blob, UpStore};
use upend::database::UpEndDatabase;
use crate::extractors::{self};
use crate::previews::PreviewStore;
use crate::util::exec::block_background;
use crate::util::hash::{b58_decode, b58_encode};
use crate::util::jobs::JobContainer;
use upend::util::hash::{b58_decode, b58_encode};
use upend::util::jobs;
use actix_files::NamedFile;
use actix_multipart::Multipart;
use actix_web::error::{
@ -45,7 +45,7 @@ pub struct State {
pub upend: Arc<UpEndDatabase>,
pub store: Arc<Box<dyn UpStore + Sync + Send>>,
pub config: UpEndConfig,
pub job_container: JobContainer,
pub job_container: jobs::JobContainer,
pub preview_store: Option<Arc<PreviewStore>>,
pub preview_pool: Option<Arc<rayon::ThreadPool>>,
}
@ -783,7 +783,7 @@ pub async fn get_jobs(
jobs
} else {
jobs.into_iter()
.filter(|(_, j)| matches!(j.state, crate::util::jobs::JobState::InProgress))
.filter(|(_, j)| matches!(j.state, jobs::JobState::InProgress))
.collect()
}))
}

1
server/src/util/mod.rs Normal file
View File

@ -0,0 +1 @@
pub mod exec;

View File

@ -6,9 +6,9 @@ use super::inner::models::Entry;
use super::inner::schema::data;
use super::lang::{PatternQuery, Query, QueryComponent, QueryPart, QueryQualifier};
use crate::database::inner::models;
use crate::diesel::IntoSql;
use crate::diesel::RunQueryDsl;
use crate::diesel::{ExpressionMethods, TextExpressionMethods};
use diesel::IntoSql;
use diesel::RunQueryDsl;
use diesel::{ExpressionMethods, TextExpressionMethods};
use anyhow::Result;
use diesel::expression::grouped::Grouped;
use diesel::expression::operators::{And, Not, Or};

View File

@ -1,21 +1,23 @@
#[macro_export]
macro_rules! upend_insert_val {
($db_connection:expr, $entity:expr, $attribute:expr, $value:expr) => {{
$db_connection.insert_entry(Entry {
entity: $entity.clone(),
attribute: String::from($attribute),
value: crate::database::entry::EntryValue::String(String::from($value)),
value: upend::database::entry::EntryValue::String(String::from($value)),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
})
}};
}
#[macro_export]
macro_rules! upend_insert_addr {
($db_connection:expr, $entity:expr, $attribute:expr, $addr:expr) => {{
$db_connection.insert_entry(Entry {
entity: $entity.clone(),
attribute: String::from($attribute),
value: crate::database::entry::EntryValue::Address($addr.clone()),
value: upend::database::entry::EntryValue::Address($addr.clone()),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
})

14
src/lib.rs Normal file
View File

@ -0,0 +1,14 @@
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
#[macro_use]
extern crate lazy_static;
extern crate self as upend;
pub mod database;
pub mod util;
pub mod addressing;
pub mod common;
pub mod config;

View File

@ -1,5 +1,4 @@
use crate::addressing::Address;
use actix::prelude::*;
use anyhow::Result;
use diesel::backend::Backend;
use diesel::deserialize::FromSql;
@ -8,7 +7,7 @@ use diesel::{deserialize, sql_types};
use filebuffer::FileBuffer;
use multihash::Hasher;
use serde::{ser, Serialize, Serializer};
use std::path::{Path, PathBuf};
use std::path::{Path};
use tracing::trace;
#[derive(Debug, Clone, Eq, PartialEq, FromSqlRow, Hash)]
@ -46,26 +45,6 @@ pub trait Hashable {
fn hash(&self) -> Result<Hash>;
}
pub struct HasherWorker;
impl Actor for HasherWorker {
type Context = SyncContext<Self>;
}
#[derive(Message)]
#[rtype(result = "Result<Hash>")]
pub struct ComputeHash {
pub path: PathBuf,
}
impl Handler<ComputeHash> for HasherWorker {
type Result = Result<Hash>;
fn handle(&mut self, msg: ComputeHash, _: &mut Self::Context) -> Self::Result {
msg.path.as_path().hash()
}
}
impl Hashable for Path {
fn hash(self: &Path) -> Result<Hash> {
trace!("Hashing {:?}...", self);

View File

@ -1,4 +1,3 @@
pub mod exec;
pub mod hash;
pub mod jobs;