Merge branch 'develop'

feat/type-attributes
Tomáš Mládek 2023-07-06 17:44:33 +02:00
commit 6e16d7090d
83 changed files with 2245 additions and 1652 deletions

254
Cargo.lock generated
View File

@ -249,7 +249,7 @@ dependencies = [
"ahash 0.7.6",
"bytes",
"bytestring",
"cfg-if",
"cfg-if 1.0.0",
"cookie",
"derive_more",
"encoding_rs",
@ -324,7 +324,7 @@ version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"getrandom 0.2.9",
"once_cell",
"version_check",
@ -418,6 +418,18 @@ version = "1.0.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
[[package]]
name = "arrayref"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545"
[[package]]
name = "arrayvec"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]]
name = "askama_escape"
version = "0.10.3"
@ -460,6 +472,41 @@ version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24a6904aef64d73cf10ab17ebace7befb918b82164785cb89907993be7f83813"
[[package]]
name = "blake2b_simd"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c2f0dc9a68c6317d884f97cc36cf5a3d20ba14ce404227df55e1af708ab04bc"
dependencies = [
"arrayref",
"arrayvec",
"constant_time_eq",
]
[[package]]
name = "blake2s_simd"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6637f448b9e61dfadbdcbae9a885fadee1f3eaffb1f8d3c1965d3ade8bdfd44f"
dependencies = [
"arrayref",
"arrayvec",
"constant_time_eq",
]
[[package]]
name = "blake3"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "729b71f35bd3fa1a4c86b85d32c8b9069ea7fe14f7a53cfabb65f62d4265b888"
dependencies = [
"arrayref",
"arrayvec",
"cc",
"cfg-if 1.0.0",
"constant_time_eq",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@ -550,6 +597,12 @@ dependencies = [
"jobserver",
]
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -572,6 +625,19 @@ dependencies = [
"winapi",
]
[[package]]
name = "cid"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd94671561e36e4e7de75f753f577edafb0e7c05d6e4547229fdf7938fbcd2c3"
dependencies = [
"core2",
"multibase",
"multihash",
"serde",
"unsigned-varint",
]
[[package]]
name = "clap"
version = "4.2.4"
@ -662,6 +728,12 @@ dependencies = [
"unicode-xid",
]
[[package]]
name = "constant_time_eq"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21a53c0a4d288377e7415b53dcfc3c04da5cdc2cc95c8d5ac178b58f0b861ad6"
[[package]]
name = "convert_case"
version = "0.4.0"
@ -719,7 +791,7 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
]
[[package]]
@ -728,7 +800,7 @@ version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"crossbeam-utils",
]
@ -738,7 +810,7 @@ version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"crossbeam-epoch",
"crossbeam-utils",
]
@ -750,7 +822,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695"
dependencies = [
"autocfg",
"cfg-if",
"cfg-if 1.0.0",
"crossbeam-utils",
"memoffset",
"scopeguard",
@ -762,7 +834,7 @@ version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
]
[[package]]
@ -960,7 +1032,7 @@ version = "0.8.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
]
[[package]]
@ -1164,7 +1236,7 @@ version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
@ -1175,9 +1247,11 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"js-sys",
"libc",
"wasi 0.11.0+wasi-snapshot-preview1",
"wasm-bindgen",
]
[[package]]
@ -1442,7 +1516,7 @@ version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
]
[[package]]
@ -1545,6 +1619,15 @@ dependencies = [
"mutate_once",
]
[[package]]
name = "keccak"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940"
dependencies = [
"cpufeatures",
]
[[package]]
name = "language-tags"
version = "0.3.2"
@ -1677,7 +1760,7 @@ version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
]
[[package]]
@ -1745,6 +1828,12 @@ dependencies = [
"autocfg",
]
[[package]]
name = "memory_units"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3"
[[package]]
name = "migrations_internals"
version = "1.4.1"
@ -1845,10 +1934,14 @@ version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfd8a792c1694c6da4f68db0a9d707c72bd260994da179e6030a5dcee00bb815"
dependencies = [
"blake2b_simd",
"blake2s_simd",
"blake3",
"core2",
"digest",
"multihash-derive",
"sha2",
"sha3",
"unsigned-varint",
]
@ -2027,7 +2120,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97ea2d98598bf9ada7ea6ee8a30fb74f9156b63bbe495d64ec2b87c269d2dda3"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"cfg-if 1.0.0",
"foreign-types",
"libc",
"once_cell",
@ -2086,7 +2179,7 @@ version = "0.9.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"libc",
"redox_syscall 0.2.16",
"smallvec",
@ -2658,7 +2751,7 @@ version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"cpufeatures",
"digest",
]
@ -2669,11 +2762,21 @@ version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"cpufeatures",
"digest",
]
[[package]]
name = "sha3"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60"
dependencies = [
"digest",
"keccak",
]
[[package]]
name = "shadow-rs"
version = "0.17.1"
@ -2836,7 +2939,7 @@ version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"fastrand",
"redox_syscall 0.3.5",
"rustix",
@ -2889,7 +2992,7 @@ version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"once_cell",
]
@ -3022,7 +3125,7 @@ version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"log",
"pin-project-lite",
"tracing-attributes",
@ -3187,36 +3290,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "upend"
version = "0.0.71"
name = "upend-base"
version = "0.0.1"
dependencies = [
"anyhow",
"chrono",
"cid",
"diesel",
"diesel_migrations",
"filebuffer",
"lazy_static",
"lexpr",
"libsqlite3-sys",
"log",
"lru",
"multibase",
"multihash",
"nonempty",
"num_cpus",
"once_cell",
"rayon",
"regex",
"serde",
"serde_json",
"shadow-rs",
"tempfile",
"tracing",
"tracing-subscriber",
"tree_magic_mini",
"url",
"uuid",
"walkdir",
"wasm-bindgen",
]
[[package]]
@ -3269,7 +3359,8 @@ dependencies = [
"tracing",
"tracing-subscriber",
"tree_magic_mini",
"upend",
"upend-base",
"upend-db",
"url",
"uuid",
"walkdir",
@ -3278,6 +3369,49 @@ dependencies = [
"webpage",
]
[[package]]
name = "upend-db"
version = "0.0.1"
dependencies = [
"anyhow",
"chrono",
"diesel",
"diesel_migrations",
"filebuffer",
"lazy_static",
"lexpr",
"libsqlite3-sys",
"log",
"lru",
"multibase",
"multihash",
"nonempty",
"num_cpus",
"once_cell",
"rayon",
"regex",
"serde",
"serde_json",
"shadow-rs",
"tempfile",
"tracing",
"tracing-subscriber",
"tree_magic_mini",
"upend-base",
"url",
"uuid",
"walkdir",
]
[[package]]
name = "upend_wasm"
version = "0.1.0"
dependencies = [
"upend-base",
"wasm-bindgen",
"wee_alloc",
]
[[package]]
name = "url"
version = "2.3.1"
@ -3321,11 +3455,13 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "uuid"
version = "0.8.2"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be"
dependencies = [
"getrandom 0.2.9",
"serde",
"wasm-bindgen",
]
[[package]]
@ -3386,26 +3522,26 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
version = "0.2.84"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b"
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.84"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9"
checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.15",
"wasm-bindgen-shared",
]
@ -3415,7 +3551,7 @@ version = "0.4.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454"
dependencies = [
"cfg-if",
"cfg-if 1.0.0",
"js-sys",
"wasm-bindgen",
"web-sys",
@ -3423,9 +3559,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.84"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5"
checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@ -3433,22 +3569,22 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.84"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6"
checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.15",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.84"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d"
checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
[[package]]
name = "web-sys"
@ -3492,6 +3628,18 @@ dependencies = [
"serde_json",
]
[[package]]
name = "wee_alloc"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbb3b5a6b2bb17cb6ad44a2e68a43e8d2722c997da10e928665c72ec6c0a0b8e"
dependencies = [
"cfg-if 0.1.10",
"libc",
"memory_units",
"winapi",
]
[[package]]
name = "weezl"
version = "0.1.7"

View File

@ -1,64 +1,2 @@
[package]
name = "upend"
description = "A user-oriented all-purpose graph database."
version = "0.0.71"
homepage = "https://upend.dev/"
repository = "https://git.thm.place/thm/upend"
authors = ["Tomáš Mládek <t@mldk.cz>"]
license = "AGPL-3.0-or-later"
edition = "2018"
build = "build.rs"
[workspace]
members = ["cli"]
[dependencies]
log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
anyhow = "1.0"
rayon = "1.4.0"
num_cpus = "1.13"
lazy_static = "1.4.0"
once_cell = "1.7.2"
lru = "0.7.0"
diesel = { version = "1.4", features = [
"sqlite",
"r2d2",
"chrono",
"serde_json",
] }
diesel_migrations = "1.4"
libsqlite3-sys = { version = "^0", features = ["bundled"] }
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
lexpr = "0.2.6"
regex = "1"
multibase = "0.9"
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "0.8", features = ["v4"] }
url = { version = "2", features = ["serde"] }
filebuffer = "0.4.0"
tempfile = "^3.2.0"
walkdir = "2"
tree_magic_mini = {version = "3.0.2", features=["with-gpl-data"] }
nonempty = "0.6.0"
shadow-rs = "0.17"
[build-dependencies]
shadow-rs = "0.17"
members = ["base", "db", "cli", "tools/upend_wasm"]

View File

@ -36,8 +36,8 @@ tasks:
lint:backend:
sources:
- ./**/Cargo.toml
- ./**/*.rs
- ./**/Cargo.toml
- ./**/*.rs
cmds:
- cargo clippy --workspace
@ -64,8 +64,8 @@ tasks:
test:backend:
sources:
- ./**/Cargo.toml
- ./**/*.rs
- ./**/Cargo.toml
- ./**/*.rs
cmds:
- cargo nextest run --workspace
@ -90,14 +90,24 @@ tasks:
- yarn build
build:jslib:
deps: [build:wasmlib]
dir: tools/upend_js
sources:
- ./*.ts
- ./package.lock
cmds:
- yarn add ../upend_wasm/pkg
- yarn install --immutable
- yarn build
build:wasmlib:
dir: tools/upend_wasm
sources:
- ./**/*.rs
- Cargo.toml
cmds:
- wasm-pack build --target web
setup:frontend:
deps: [build:jslib]
dir: webui
@ -140,7 +150,6 @@ tasks:
cmds:
- task: build:jslib
- cd webui && yarn add ../tools/upend_js && cd ..
- cd webext && yarn add ../tools/upend_js && cd ..
dev:
deps: [dev:backend, dev:frontend]

43
base/Cargo.toml Normal file
View File

@ -0,0 +1,43 @@
[package]
name = "upend-base"
version = "0.0.1"
homepage = "https://upend.dev/"
repository = "https://git.thm.place/thm/upend"
authors = ["Tomáš Mládek <t@mldk.cz>"]
license = "AGPL-3.0-or-later"
edition = "2018"
[features]
diesel = []
wasm = ["wasm-bindgen", "uuid/js"]
[dependencies]
log = "0.4"
lazy_static = "1.4.0"
diesel = { version = "1.4", features = ["sqlite"] }
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
lexpr = "0.2.6"
cid = { version = "0.10.1", features = ["serde"] }
multibase = "0.9"
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "1.4", features = ["v4", "serde"] }
url = { version = "2", features = ["serde"] }
nonempty = "0.6.0"
wasm-bindgen = { version = "0.2", optional = true }
shadow-rs = "0.17"
[build-dependencies]
shadow-rs = "0.17"

3
base/build.rs Normal file
View File

@ -0,0 +1,3 @@
fn main() -> shadow_rs::SdResult<()> {
shadow_rs::new()
}

287
base/src/addressing.rs Normal file
View File

@ -0,0 +1,287 @@
use crate::error::{AddressComponentsDecodeError, UpEndError};
use crate::hash::{
b58_decode, b58_encode, AsMultihash, AsMultihashError, LargeMultihash, UpMultihash, IDENTITY,
};
use serde::de::Visitor;
use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer};
use std::convert::TryFrom;
use std::fmt;
use std::hash::Hash;
use std::str::FromStr;
use url::Url;
use uuid::Uuid;
#[cfg(feature = "wasm")]
use wasm_bindgen::prelude::*;
#[derive(Clone, Eq, PartialEq, Hash)]
pub enum Address {
Hash(UpMultihash),
Uuid(Uuid),
Attribute(String),
Url(Url),
}
#[cfg_attr(feature = "wasm", wasm_bindgen(getter_with_clone, inspectable))]
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct AddressComponents {
pub t: String,
pub c: Option<String>,
}
/// multicodec RAW code
const RAW: u64 = 0x55;
/// multicodec UpEnd UUID code (reserved area)
const UP_UUID: u64 = 0x300001;
/// multicodec UpEnd Attribute code (reserved area)
const UP_ATTRIBUTE: u64 = 0x300000;
/// multicodec URL code (technically `http`)
const UP_URL: u64 = 0x01e0;
pub type UpCid = cid::CidGeneric<256>;
impl Address {
pub fn encode(&self) -> Result<Vec<u8>, UpEndError> {
let (codec, hash) = match self {
Self::Hash(hash) => (RAW, LargeMultihash::from(hash)),
Self::Uuid(uuid) => (
UP_UUID,
LargeMultihash::wrap(IDENTITY, uuid.as_bytes()).map_err(UpEndError::from_any)?,
),
Self::Attribute(attribute) => (
UP_ATTRIBUTE,
LargeMultihash::wrap(IDENTITY, attribute.as_bytes())
.map_err(UpEndError::from_any)?,
),
Self::Url(url) => (
UP_URL,
LargeMultihash::wrap(IDENTITY, url.to_string().as_bytes())
.map_err(UpEndError::from_any)?,
),
};
let cid = UpCid::new_v1(codec, hash);
Ok(cid.to_bytes())
}
pub fn decode(buffer: &[u8]) -> Result<Self, UpEndError> {
let cid = UpCid::try_from(buffer).map_err(|err| {
UpEndError::AddressParseError(format!("Error decoding address: {}", err))
})?;
if cid.codec() == RAW {
return Ok(Address::Hash(UpMultihash::from(*cid.hash())));
}
let hash = cid.hash();
if hash.code() != IDENTITY {
return Err(UpEndError::AddressParseError(format!(
"Unexpected multihash code \"{}\" for codec \"{}\"",
hash.code(),
cid.codec()
)));
}
let digest = cid.hash().digest().to_vec();
match cid.codec() {
UP_UUID => Ok(Address::Uuid(
Uuid::from_slice(digest.as_slice()).map_err(UpEndError::from_any)?,
)),
UP_ATTRIBUTE => Ok(Address::Attribute(
String::from_utf8(digest).map_err(UpEndError::from_any)?,
)),
UP_URL => Ok(Address::Url(
Url::parse(&String::from_utf8(digest).map_err(UpEndError::from_any)?)
.map_err(UpEndError::from_any)?,
)),
_ => Err(UpEndError::AddressParseError(
"Error decoding address: Unknown codec.".to_string(),
)),
}
}
pub fn as_components(&self) -> AddressComponents {
// TODO: make this automatically derive from `Address` definition
let (entity_type, entity_content) = match self {
Address::Hash(uphash) => ("Hash", Some(b58_encode(uphash.to_bytes()))),
Address::Uuid(uuid) => ("Uuid", Some(uuid.to_string())),
Address::Attribute(attribute) => ("Attribute", Some(attribute.clone())),
Address::Url(url) => ("Url", Some(url.to_string())),
};
AddressComponents {
t: entity_type.to_string(),
c: entity_content,
}
}
pub fn from_components(components: AddressComponents) -> Result<Self, UpEndError> {
// TODO: make this automatically derive from `Address` definition
let address = match components {
AddressComponents { t, c } if t == "Attribute" => {
Address::Attribute(c.ok_or(UpEndError::AddressComponentsDecodeError(
AddressComponentsDecodeError::MissingValue,
))?)
}
AddressComponents { t, c } if t == "Url" => Address::Url(if let Some(string) = c {
Url::parse(&string).map_err(|e| {
UpEndError::AddressComponentsDecodeError(
AddressComponentsDecodeError::UrlDecodeError(e.to_string()),
)
})?
} else {
Err(UpEndError::AddressComponentsDecodeError(
AddressComponentsDecodeError::MissingValue,
))?
}),
AddressComponents { t, c } if t == "Uuid" => match c {
Some(c) => c.parse()?,
None => Address::Uuid(Uuid::new_v4()),
},
AddressComponents { t, .. } => Err(UpEndError::AddressComponentsDecodeError(
AddressComponentsDecodeError::UnknownType(t),
))?,
};
Ok(address)
}
}
impl Serialize for Address {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
serializer.serialize_str(b58_encode(self.encode().map_err(ser::Error::custom)?).as_str())
}
}
struct AddressVisitor;
impl<'de> Visitor<'de> for AddressVisitor {
type Value = Address;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a valid UpEnd address (hash/UUID) as a multi-hashed string")
}
fn visit_str<E>(self, str: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let bytes = b58_decode(str)
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))?;
Address::decode(bytes.as_ref())
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))
}
}
impl<'de> Deserialize<'de> for Address {
fn deserialize<D>(deserializer: D) -> Result<Address, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(AddressVisitor)
}
}
impl FromStr for Address {
type Err = UpEndError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Address::decode(
b58_decode(s)
.map_err(|e| {
UpEndError::HashDecodeError(format!("Error deserializing address: {}", e))
})?
.as_ref(),
)
}
}
impl std::fmt::Display for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
b58_encode(self.encode().map_err(|_| std::fmt::Error)?)
)
}
}
impl std::fmt::Debug for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Address<{}>: {}",
match self {
Address::Hash(_) => "Hash",
Address::Uuid(_) => "UUID",
Address::Attribute(_) => "Attribute",
Address::Url(_) => "URL",
},
self
)
}
}
pub trait Addressable: AsMultihash {
fn address(&self) -> Result<Address, AsMultihashError> {
Ok(Address::Hash(self.as_multihash()?))
}
}
#[cfg(test)]
mod tests {
use url::Url;
use uuid::Uuid;
use crate::addressing::{Address, IDENTITY};
use crate::hash::{LargeMultihash, UpMultihash};
use super::UpEndError;
#[test]
fn test_hash_codec() -> Result<(), UpEndError> {
let addr = Address::Hash(UpMultihash::from(
LargeMultihash::wrap(IDENTITY, &[1, 2, 3, 4, 5]).unwrap(),
));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_uuid_codec() -> Result<(), UpEndError> {
let addr = Address::Uuid(Uuid::new_v4());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_attribute_codec() -> Result<(), UpEndError> {
let addr = Address::Attribute(String::from("ATTRIBUTE"));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_url_codec() -> Result<(), UpEndError> {
let addr = Address::Url(Url::parse("https://upend.dev/an/url/that/is/particularly/long/because/multihash/used/to/have/a/small/limit").unwrap());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
}

31
base/src/constants.rs Normal file
View File

@ -0,0 +1,31 @@
use crate::addressing::Address;
use crate::entry::InvariantEntry;
use crate::hash::{LargeMultihash, UpMultihash};
/// Attribute denoting (hierarchical) relation, in the "upwards" direction. For example, a file `IN` a group, an image `IN` photos, etc.
pub const ATTR_IN: &str = "IN";
/// Attribute denoting that an entry belongs to the set relating to a given (hierarchical) relation.
/// For example, a data blob may have a label entry, and to qualify that label within the context of belonging to a given hierarchical group, that label entry and the hierarchical entry will be linked with `BY`.
pub const ATTR_BY: &str = "BY";
/// Attribute denoting that an attribute belongs to a given "tagging" entity. If an entity belongs to (`IN`) a "tagging" entity, it is expected to have attributes that are `OF` that entity.
pub const ATTR_OF: &str = "OF";
/// Attribute denoting a human readable label.
pub const ATTR_LABEL: &str = "LBL";
/// Attribute denoting the date & time an entity was noted in the database.
/// (TODO: This info can be trivially derived from existing entry timestamps, while at the same time the "Introduction problem" is still open.)
pub const ATTR_ADDED: &str = "ADDED";
/// Attribute for cross-vault unambiguous referencing of non-hashable (e.g. UUID) entities.
pub const ATTR_KEY: &str = "KEY";
lazy_static! {
pub static ref HIER_ROOT_INVARIANT: InvariantEntry = InvariantEntry {
attribute: String::from(ATTR_KEY),
value: "HIER_ROOT".into(),
};
pub static ref HIER_ROOT_ADDR: Address = HIER_ROOT_INVARIANT.entity().unwrap();
pub static ref TYPE_HASH_ADDRESS: Address =
Address::Hash(UpMultihash::from(LargeMultihash::default()));
pub static ref TYPE_UUID_ADDRESS: Address = Address::Uuid(uuid::Uuid::nil());
pub static ref TYPE_ATTRIBUTE_ADDRESS: Address = Address::Attribute("".to_string());
pub static ref TYPE_URL_ADDRESS: Address = Address::Url(url::Url::parse("up:").unwrap());
}

View File

@ -1,7 +1,6 @@
use crate::addressing::{Address, Addressable};
use crate::database::inner::models;
use crate::util::hash::{b58_decode, hash, Hash, Hashable};
use anyhow::{anyhow, Result};
use crate::error::UpEndError;
use crate::hash::{b58_decode, sha256hash, AsMultihash, AsMultihashError, UpMultihash};
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;
@ -36,89 +35,8 @@ pub enum EntryValue {
Invalid,
}
impl TryFrom<&models::Entry> for Entry {
type Error = anyhow::Error;
fn try_from(e: &models::Entry) -> Result<Self, Self::Error> {
if let Some(value_str) = &e.value_str {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.clone(),
value: value_str.parse()?,
provenance: e.provenance.clone(),
timestamp: e.timestamp,
})
} else if let Some(value_num) = e.value_num {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.clone(),
value: EntryValue::Number(value_num),
provenance: e.provenance.clone(),
timestamp: e.timestamp,
})
} else {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.clone(),
value: EntryValue::Number(f64::NAN),
provenance: e.provenance.clone(),
timestamp: e.timestamp,
})
}
}
}
impl TryFrom<&Entry> for models::Entry {
type Error = anyhow::Error;
fn try_from(e: &Entry) -> Result<Self, Self::Error> {
if e.attribute.is_empty() {
return Err(anyhow!("Attribute cannot be empty."));
}
let base_entry = models::Entry {
identity: e.address()?.encode()?,
entity_searchable: match &e.entity {
Address::Attribute(attr) => Some(attr.clone()),
Address::Url(url) => Some(url.to_string()),
_ => None,
},
entity: e.entity.encode()?,
attribute: e.attribute.clone(),
value_str: None,
value_num: None,
immutable: false,
provenance: e.provenance.clone(),
timestamp: e.timestamp,
};
match e.value {
EntryValue::Number(n) => Ok(models::Entry {
value_str: None,
value_num: Some(n),
..base_entry
}),
_ => Ok(models::Entry {
value_str: Some(e.value.to_string()?),
value_num: None,
..base_entry
}),
}
}
}
impl TryFrom<&ImmutableEntry> for models::Entry {
type Error = anyhow::Error;
fn try_from(e: &ImmutableEntry) -> Result<Self, Self::Error> {
Ok(models::Entry {
immutable: true,
..models::Entry::try_from(&e.0)?
})
}
}
impl TryFrom<&InvariantEntry> for Entry {
type Error = anyhow::Error;
type Error = UpEndError;
fn try_from(invariant: &InvariantEntry) -> Result<Self, Self::Error> {
Ok(Entry {
@ -132,11 +50,17 @@ impl TryFrom<&InvariantEntry> for Entry {
}
impl InvariantEntry {
pub fn entity(&self) -> Result<Address> {
pub fn entity(&self) -> Result<Address, UpEndError> {
let mut entity = Cursor::new(vec![0u8; 0]);
entity.write_all(self.attribute.as_bytes())?;
entity.write_all(self.value.to_string()?.as_bytes())?;
Ok(Address::Hash(hash(entity.into_inner())))
entity
.write_all(self.attribute.as_bytes())
.map_err(UpEndError::from_any)?;
entity
.write_all(self.value.to_string()?.as_bytes())
.map_err(UpEndError::from_any)?;
Ok(Address::Hash(
sha256hash(entity.into_inner()).map_err(UpEndError::from_any)?,
))
}
}
@ -146,19 +70,31 @@ impl std::fmt::Display for Entry {
}
}
impl Hashable for Entry {
fn hash(self: &Entry) -> Result<Hash> {
impl AsMultihash for Entry {
fn as_multihash(&self) -> Result<UpMultihash, AsMultihashError> {
let mut result = Cursor::new(vec![0u8; 0]);
result.write_all(self.entity.encode()?.as_slice())?;
result.write_all(
self.entity
.encode()
.map_err(|e| AsMultihashError(e.to_string()))?
.as_slice(),
)?;
result.write_all(self.attribute.as_bytes())?;
result.write_all(self.value.to_string()?.as_bytes())?;
Ok(hash(result.get_ref()))
result.write_all(
self.value
.to_string()
.map_err(|e| AsMultihashError(e.to_string()))?
.as_bytes(),
)?;
sha256hash(result.get_ref())
}
}
impl Hashable for InvariantEntry {
fn hash(&self) -> Result<Hash> {
Entry::try_from(self)?.hash()
impl AsMultihash for InvariantEntry {
fn as_multihash(&self) -> Result<UpMultihash, AsMultihashError> {
Entry::try_from(self)
.map_err(|e| AsMultihashError(e.to_string()))?
.as_multihash()
}
}
@ -166,13 +102,13 @@ impl Addressable for Entry {}
impl Addressable for InvariantEntry {}
impl EntryValue {
pub fn to_string(&self) -> Result<String> {
pub fn to_string(&self) -> Result<String, UpEndError> {
let (type_char, content) = match self {
EntryValue::String(value) => ('S', value.to_owned()),
EntryValue::Number(n) => ('N', n.to_string()),
EntryValue::Address(address) => ('O', address.to_string()),
EntryValue::Null => ('X', "".to_string()),
EntryValue::Invalid => return Err(anyhow!("Cannot serialize invalid value.")),
EntryValue::Invalid => return Err(UpEndError::CannotSerializeInvalid),
};
Ok(format!("{}{}", type_char, content))
@ -273,33 +209,32 @@ impl From<Address> for EntryValue {
#[cfg(test)]
mod tests {
use super::*;
use anyhow::Result;
#[test]
fn test_value_from_to_string() -> Result<()> {
fn test_value_from_to_string() -> Result<(), UpEndError> {
let entry = EntryValue::String("hello".to_string());
let encoded = entry.to_string()?;
let decoded = encoded.parse::<EntryValue>()?;
let decoded = encoded.parse::<EntryValue>().unwrap();
assert_eq!(entry, decoded);
let entry = EntryValue::Number(1337.93);
let encoded = entry.to_string()?;
let decoded = encoded.parse::<EntryValue>()?;
let decoded = encoded.parse::<EntryValue>().unwrap();
assert_eq!(entry, decoded);
let entry = EntryValue::Address(Address::Url(Url::parse("https://upend.dev").unwrap()));
let encoded = entry.to_string()?;
let decoded = encoded.parse::<EntryValue>()?;
let decoded = encoded.parse::<EntryValue>().unwrap();
assert_eq!(entry, decoded);
let entry = EntryValue::String("".to_string());
let encoded = entry.to_string()?;
let decoded = encoded.parse::<EntryValue>()?;
let decoded = encoded.parse::<EntryValue>().unwrap();
assert_eq!(entry, decoded);
let entry = EntryValue::Null;
let encoded = entry.to_string()?;
let decoded = encoded.parse::<EntryValue>()?;
let decoded = encoded.parse::<EntryValue>().unwrap();
assert_eq!(entry, decoded);
Ok(())

49
base/src/error.rs Normal file
View File

@ -0,0 +1,49 @@
#[derive(Debug, Clone)]
pub enum UpEndError {
HashDecodeError(String),
AddressParseError(String),
AddressComponentsDecodeError(AddressComponentsDecodeError),
CannotSerializeInvalid,
QueryParseError(String),
Other(String),
}
#[derive(Debug, Clone)]
pub enum AddressComponentsDecodeError {
UnknownType(String),
UrlDecodeError(String),
MissingValue,
}
impl std::fmt::Display for UpEndError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
UpEndError::HashDecodeError(err) => format!("Could not decode hash: {err}"),
UpEndError::AddressParseError(err) => format!("Error parsing address: {err}"),
UpEndError::AddressComponentsDecodeError(cde) => match cde {
AddressComponentsDecodeError::UnknownType(t) =>
format!("Unknown type: \"{t}\""),
AddressComponentsDecodeError::MissingValue =>
String::from("Address type requires a value."),
AddressComponentsDecodeError::UrlDecodeError(err) =>
format!("Couldn't decode URL: {err}"),
},
UpEndError::CannotSerializeInvalid =>
String::from("Invalid EntryValues cannot be serialized."),
UpEndError::QueryParseError(err) => format!("Error parsing query: {err}"),
UpEndError::Other(err) => format!("Unknown error: {err}"),
}
)
}
}
impl std::error::Error for UpEndError {}
impl UpEndError {
pub fn from_any<E: std::fmt::Display>(error: E) -> Self {
UpEndError::Other(error.to_string())
}
}

166
base/src/hash.rs Normal file
View File

@ -0,0 +1,166 @@
use std::fmt;
use crate::{addressing::Address, error::UpEndError};
use multihash::Hasher;
use serde::{
de::{self, Visitor},
ser, Deserialize, Deserializer, Serialize, Serializer,
};
/// multihash SHA2-256 code
pub const SHA2_256: u64 = 0x12;
/// multihash identity code
pub const IDENTITY: u64 = 0x00;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
#[cfg_attr(feature = "diesel", derive(diesel::FromSqlRow))]
pub struct UpMultihash(LargeMultihash);
impl UpMultihash {
pub fn to_bytes(&self) -> Vec<u8> {
self.0.to_bytes()
}
pub fn from_bytes<T: AsRef<[u8]>>(input: T) -> Result<Self, UpEndError> {
Ok(UpMultihash(
LargeMultihash::from_bytes(input.as_ref())
.map_err(|e| UpEndError::HashDecodeError(e.to_string()))?,
))
}
pub fn from_sha256<T: AsRef<[u8]>>(input: T) -> Result<Self, UpEndError> {
Ok(UpMultihash(
LargeMultihash::wrap(SHA2_256, input.as_ref()).map_err(UpEndError::from_any)?,
))
}
}
pub(crate) type LargeMultihash = multihash::MultihashGeneric<256>;
impl From<LargeMultihash> for UpMultihash {
fn from(value: LargeMultihash) -> Self {
UpMultihash(value)
}
}
impl From<&UpMultihash> for LargeMultihash {
fn from(value: &UpMultihash) -> Self {
value.0
}
}
impl Serialize for UpMultihash {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
serializer.serialize_str(
b58_encode(
Address::Hash(self.clone())
.encode()
.map_err(ser::Error::custom)?,
)
.as_str(),
)
}
}
struct UpMultihashVisitor;
impl<'de> Visitor<'de> for UpMultihashVisitor {
type Value = UpMultihash;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a valid UpEnd address (hash/UUID) as a multi-hashed string")
}
fn visit_str<E>(self, str: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let bytes = b58_decode(str)
.map_err(|e| de::Error::custom(format!("Error deserializing UpMultihash: {}", e)))?;
Ok(UpMultihash(LargeMultihash::from_bytes(&bytes).map_err(
|e| de::Error::custom(format!("Error parsing UpMultihash: {}", e)),
)?))
}
}
impl<'de> Deserialize<'de> for UpMultihash {
fn deserialize<D>(deserializer: D) -> Result<UpMultihash, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(UpMultihashVisitor)
}
}
#[cfg(feature = "diesel")]
impl diesel::types::FromSql<diesel::sql_types::Binary, diesel::sqlite::Sqlite> for UpMultihash {
fn from_sql(
bytes: Option<&<diesel::sqlite::Sqlite as diesel::backend::Backend>::RawValue>,
) -> diesel::deserialize::Result<Self> {
Ok(UpMultihash(LargeMultihash::from_bytes(
diesel::not_none!(bytes).read_blob(),
)?))
}
}
pub fn sha256hash<T: AsRef<[u8]>>(input: T) -> Result<UpMultihash, AsMultihashError> {
let mut hasher = multihash::Sha2_256::default();
hasher.update(input.as_ref());
Ok(UpMultihash(
LargeMultihash::wrap(SHA2_256, hasher.finalize())
.map_err(|e| AsMultihashError(e.to_string()))?,
))
}
pub fn b58_encode<T: AsRef<[u8]>>(vec: T) -> String {
multibase::encode(multibase::Base::Base58Btc, vec.as_ref())
}
pub fn b58_decode<T: AsRef<str>>(input: T) -> Result<Vec<u8>, UpEndError> {
let input = input.as_ref();
let (_base, data) =
multibase::decode(input).map_err(|err| UpEndError::HashDecodeError(err.to_string()))?;
Ok(data)
}
#[derive(Debug, Clone)]
pub struct AsMultihashError(pub String);
impl std::fmt::Display for AsMultihashError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::error::Error for AsMultihashError {}
impl From<std::io::Error> for AsMultihashError {
fn from(err: std::io::Error) -> Self {
AsMultihashError(err.to_string())
}
}
pub trait AsMultihash {
fn as_multihash(&self) -> Result<UpMultihash, AsMultihashError>;
}
#[cfg(test)]
mod tests {
use crate::hash::{b58_decode, b58_encode};
#[test]
fn test_encode_decode() {
let content = "Hello, World!".as_bytes();
let encoded = b58_encode(content);
let decoded = b58_decode(encoded);
assert!(decoded.is_ok());
assert_eq!(content, decoded.unwrap());
}
}

View File

@ -1,5 +1,6 @@
use crate::addressing::Address;
use crate::database::entry::EntryValue;
use crate::entry::EntryValue;
use crate::error::UpEndError;
use nonempty::NonEmpty;
use std::borrow::Borrow;
use std::convert::TryFrom;
@ -33,7 +34,7 @@ pub struct PatternQuery {
}
impl TryFrom<lexpr::Value> for Address {
type Error = QueryParseError;
type Error = UpEndError;
fn try_from(value: lexpr::Value) -> Result<Self, Self::Error> {
match value {
@ -41,48 +42,46 @@ impl TryFrom<lexpr::Value> for Address {
if let Some(address_str) = str.strip_prefix('@') {
address_str
.parse()
.map_err(|e: anyhow::Error| QueryParseError(e.to_string()))
.map_err(|e: UpEndError| UpEndError::QueryParseError(e.to_string()))
} else {
Err(QueryParseError(
Err(UpEndError::QueryParseError(
"Incorrect address format (use @address).".into(),
))
}
}
_ => Err(QueryParseError(
_ => Err(UpEndError::QueryParseError(
"Incorrect type for address (use @address).".into(),
)),
}
}
}
impl TryFrom<lexpr::Value> for Attribute {
type Error = QueryParseError;
impl TryFrom<lexpr::Value> for EntryValue {
type Error = UpEndError;
fn try_from(value: lexpr::Value) -> Result<Self, Self::Error> {
match value {
lexpr::Value::String(str) => Ok(Attribute(str.to_string())),
_ => Err(QueryParseError(
"Can only convert to attribute from string.".into(),
lexpr::Value::Number(num) => Ok(EntryValue::Number(num.as_f64().ok_or_else(|| {
UpEndError::QueryParseError(format!("Error processing number ({num:?})."))
})?)),
lexpr::Value::Char(chr) => Ok(EntryValue::String(chr.to_string())),
lexpr::Value::String(str) => Ok(EntryValue::String(str.to_string())),
lexpr::Value::Symbol(_) => Ok(EntryValue::Address(Address::try_from(value.clone())?)),
_ => Err(UpEndError::QueryParseError(
"Value can only be a string, number or address.".into(),
)),
}
}
}
impl TryFrom<lexpr::Value> for EntryValue {
type Error = QueryParseError;
impl TryFrom<lexpr::Value> for Attribute {
type Error = UpEndError;
fn try_from(value: lexpr::Value) -> Result<Self, Self::Error> {
match value {
lexpr::Value::Number(num) => {
Ok(EntryValue::Number(num.as_f64().ok_or_else(|| {
QueryParseError(format!("Error processing number ({num:?})."))
})?))
}
lexpr::Value::Char(chr) => Ok(EntryValue::String(chr.to_string())),
lexpr::Value::String(str) => Ok(EntryValue::String(str.to_string())),
lexpr::Value::Symbol(_) => Ok(EntryValue::Address(Address::try_from(value.clone())?)),
_ => Err(QueryParseError(
"Value can only be a string, number or address.".into(),
lexpr::Value::String(str) => Ok(Attribute(str.to_string())),
_ => Err(UpEndError::QueryParseError(
"Can only convert to attribute from string.".into(),
)),
}
}
@ -115,26 +114,15 @@ pub enum Query {
MultiQuery(MultiQuery),
}
#[derive(Debug, Clone)]
pub struct QueryParseError(String);
impl std::fmt::Display for QueryParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::error::Error for QueryParseError {}
impl TryFrom<&lexpr::Value> for Query {
type Error = QueryParseError;
type Error = UpEndError;
fn try_from(expression: &lexpr::Value) -> Result<Self, Self::Error> {
fn parse_component<T: TryFrom<lexpr::Value>>(
value: &lexpr::Value,
) -> Result<QueryComponent<T>, QueryParseError>
) -> Result<QueryComponent<T>, UpEndError>
where
QueryParseError: From<<T as TryFrom<lexpr::Value>>::Error>,
UpEndError: From<<T as TryFrom<lexpr::Value>>::Error>,
{
match value {
lexpr::Value::Cons(cons) => {
@ -151,7 +139,7 @@ impl TryFrom<&lexpr::Value> for Query {
Ok(QueryComponent::In(values?))
} else {
Err(QueryParseError(
Err(UpEndError::QueryParseError(
"Malformed expression: Inner value cannot be empty.".into(),
))
}
@ -164,21 +152,21 @@ impl TryFrom<&lexpr::Value> for Query {
if let lexpr::Value::String(str) = value {
Ok(QueryComponent::Contains(str.into_string()))
} else {
Err(QueryParseError("Malformed expression: 'Contains' argument must be a string.".into()))
Err(UpEndError::QueryParseError("Malformed expression: 'Contains' argument must be a string.".into()))
}
}
_ => Err(QueryParseError(
_ => Err(UpEndError::QueryParseError(
"Malformed expression: 'Contains' requires a single argument.".into()
)),
}
}
_ => Err(QueryParseError(format!(
_ => Err(UpEndError::QueryParseError(format!(
"Malformed expression: Unknown symbol {}",
symbol
))),
}
} else {
Err(QueryParseError(format!(
Err(UpEndError::QueryParseError(format!(
"Malformed expression: Inner value '{:?}' is not a symbol.",
value
)))
@ -211,7 +199,7 @@ impl TryFrom<&lexpr::Value> for Query {
value,
})))
} else {
Err(QueryParseError(
Err(UpEndError::QueryParseError(
"Malformed expression: Wrong number of arguments to 'matches'."
.into(),
))
@ -225,13 +213,13 @@ impl TryFrom<&lexpr::Value> for Query {
type_name_str.to_string(),
)))
} else {
Err(QueryParseError(
Err(UpEndError::QueryParseError(
"Malformed expression: Type must be specified as a string."
.into(),
))
}
} else {
Err(QueryParseError(
Err(UpEndError::QueryParseError(
"Malformed expression: Wrong number of arguments to 'type'.".into(),
))
}
@ -242,7 +230,7 @@ impl TryFrom<&lexpr::Value> for Query {
let values = sub_expressions
.iter()
.map(|value| Ok(Box::new(Query::try_from(value)?)))
.collect::<Result<Vec<Box<Query>>, QueryParseError>>()?;
.collect::<Result<Vec<Box<Query>>, UpEndError>>()?;
if let Some(queries) = NonEmpty::from_vec(values) {
Ok(Query::MultiQuery(MultiQuery {
@ -254,7 +242,7 @@ impl TryFrom<&lexpr::Value> for Query {
queries,
}))
} else {
Err(QueryParseError(
Err(UpEndError::QueryParseError(
"Malformed expression: sub-query list cannot be empty.".into(),
))
}
@ -265,7 +253,7 @@ impl TryFrom<&lexpr::Value> for Query {
let values = sub_expressions
.iter()
.map(|value| Ok(Box::new(Query::try_from(value)?)))
.collect::<Result<Vec<Box<Query>>, QueryParseError>>()?;
.collect::<Result<Vec<Box<Query>>, UpEndError>>()?;
if values.len() == 1 {
Ok(Query::MultiQuery(MultiQuery {
@ -273,46 +261,50 @@ impl TryFrom<&lexpr::Value> for Query {
queries: NonEmpty::from_vec(values).unwrap(),
}))
} else {
Err(QueryParseError(
Err(UpEndError::QueryParseError(
"Malformed expression: NOT takes exactly one parameter.".into(),
))
}
}
_ => Err(QueryParseError(format!(
_ => Err(UpEndError::QueryParseError(format!(
"Malformed expression: Unknown symbol '{}'.",
symbol
))),
}
} else {
Err(QueryParseError(format!(
Err(UpEndError::QueryParseError(format!(
"Malformed expression: Value '{:?}' is not a symbol.",
value
)))
}
} else {
Err(QueryParseError("Malformed expression: Not a list.".into()))
Err(UpEndError::QueryParseError(
"Malformed expression: Not a list.".into(),
))
}
}
}
impl FromStr for Query {
type Err = QueryParseError;
type Err = UpEndError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let sexp = lexpr::from_str_custom(s, lexpr::parse::Options::new())
.map_err(|e| QueryParseError(format!("failed to parse s-expression: {e}")))?;
let sexp = lexpr::from_str_custom(s, lexpr::parse::Options::new()).map_err(|e| {
UpEndError::QueryParseError(format!("failed to parse s-expression: {e}"))
})?;
Query::try_from(&sexp)
}
}
#[cfg(test)]
mod test {
use crate::error::UpEndError;
use super::*;
use anyhow::Result;
use url::Url;
#[test]
fn test_matches() -> Result<()> {
fn test_matches() -> Result<(), UpEndError> {
let query = "(matches ? ? ?)".parse::<Query>()?;
assert_eq!(
query,
@ -359,7 +351,7 @@ mod test {
}
#[test]
fn test_joins() -> Result<()> {
fn test_joins() -> Result<(), UpEndError> {
let query = "(matches ?a ?b ?)".parse::<Query>()?;
assert_eq!(
query,
@ -374,7 +366,7 @@ mod test {
}
#[test]
fn test_in_parse() -> Result<()> {
fn test_in_parse() -> Result<(), UpEndError> {
let query = r#"(matches ? (in "FOO" "BAR") ?)"#.parse::<Query>()?;
assert_eq!(
query,
@ -436,7 +428,7 @@ mod test {
}
#[test]
fn test_contains() -> Result<()> {
fn test_contains() -> Result<(), UpEndError> {
let query = r#"(matches (contains "foo") ? ?)"#.parse::<Query>()?;
assert_eq!(
query,

10
base/src/lib.rs Normal file
View File

@ -0,0 +1,10 @@
#[macro_use]
extern crate lazy_static;
pub mod addressing;
pub mod common;
pub mod constants;
pub mod entry;
pub mod error;
pub mod hash;
pub mod lang;

View File

@ -5,7 +5,8 @@ version = "0.1.0"
edition = "2021"
[dependencies]
upend = { path = "../" }
upend-base = { path = "../base" }
upend-db = { path = "../db" }
clap = { version = "4.2.4", features = ["derive", "env", "color"] }
log = "0.4"
@ -54,7 +55,7 @@ multihash = { version = "*", default-features = false, features = [
"sha2",
"identity",
] }
uuid = { version = "0.8", features = ["v4"] }
uuid = { version = "1.4", features = ["v4"] }
filebuffer = "0.4.0"
tempfile = "^3.2.0"
@ -63,7 +64,7 @@ walkdir = "2"
rand = "0.8"
mime = "^0.3.16"
tree_magic_mini = {version = "3.0.2", features=["with-gpl-data"] }
tree_magic_mini = { version = "3.0.2", features = ["with-gpl-data"] }
opener = { version = "^0.5.0", optional = true }
is_executable = { version = "1.0.1", optional = true }
@ -74,7 +75,7 @@ nonempty = "0.6.0"
image = { version = "0.23.14", optional = true }
webp = { version = "0.2.0", optional = true }
webpage = { version = "1.5.0", optional = true, default-features = false}
webpage = { version = "1.5.0", optional = true, default-features = false }
id3 = { version = "1.0.2", optional = true }
kamadak-exif = { version = "0.5.4", optional = true }
@ -89,7 +90,15 @@ signal-hook = "0.3.15"
shadow-rs = "0.17"
[features]
default = ["desktop", "previews", "previews-image", "extractors-web", "extractors-audio", "extractors-photo", "extractors-media"]
default = [
"desktop",
"previews",
"previews-image",
"extractors-web",
"extractors-audio",
"extractors-photo",
"extractors-media",
]
desktop = ["webbrowser", "opener", "is_executable"]
previews = []
previews-image = ["image", "webp", "kamadak-exif"]

3
cli/build.rs Normal file
View File

@ -0,0 +1,3 @@
fn main() -> shadow_rs::SdResult<()> {
shadow_rs::new()
}

View File

@ -1,190 +0,0 @@
use crate::util::hash::{b58_decode, b58_encode, Hash, Hashable};
use anyhow::{anyhow, Result};
use multihash::{Code, Multihash, MultihashDigest};
use serde::de::Visitor;
use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
use std::str::FromStr;
use thiserror::private::DisplayAsDisplay;
use uuid::Uuid;
#[derive(Clone, Eq, PartialEq, Hash)]
pub enum Address {
Hash(Hash),
Uuid(Uuid),
Attribute(String),
Url(String),
}
// multihash SHA2-256
const SHA2_256: u64 = 0x12;
// multihash identity
const IDENTITY: u64 = 0x00;
impl Address {
pub fn encode(&self) -> Result<Vec<u8>> {
let hash = match self {
Self::Hash(hash) => Multihash::wrap(SHA2_256, &hash.0).map_err(|err| anyhow!(err))?,
Self::Uuid(uuid) => {
Code::Identity.digest(&[vec![b'U'], uuid.as_bytes().to_vec()].concat())
}
Self::Attribute(attribute) => {
Code::Identity.digest(&[&[b'A'], attribute.as_bytes()].concat())
}
Self::Url(url) => Code::Identity.digest(&[&[b'X'], url.as_bytes()].concat()),
};
Ok(hash.to_bytes())
}
pub fn decode(buffer: &[u8]) -> Result<Self> {
let multihash = Multihash::from_bytes(buffer)
.map_err(|err| anyhow!("Error decoding address: {}", err))?;
match multihash.code() {
SHA2_256 => Ok(Self::Hash(Hash(multihash.digest().to_vec()))),
IDENTITY => {
let digest = multihash.digest().to_owned();
let digest_content: Vec<u8> = digest.clone().into_iter().skip(1).collect();
match digest[0] {
b'U' => Ok(Self::Uuid(uuid::Uuid::from_slice(
digest_content.as_slice(),
)?)),
b'A' => Ok(Self::Attribute(String::from_utf8(digest_content)?)),
b'X' => Ok(Self::Url(String::from_utf8(digest_content)?)),
_ => Err(anyhow!("Error decoding address: Unknown identity marker.")),
}
}
_ => Err(anyhow!(
"Error decoding address: Unknown hash function type."
)),
}
}
}
impl Serialize for Address {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
serializer.serialize_str(b58_encode(self.encode().map_err(ser::Error::custom)?).as_str())
}
}
struct AddressVisitor;
impl<'de> Visitor<'de> for AddressVisitor {
type Value = Address;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a valid UpEnd address (hash/UUID) as a multi-hashed string")
}
fn visit_str<E>(self, str: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let bytes = b58_decode(str)
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))?;
Address::decode(bytes.as_ref())
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))
}
}
impl<'de> Deserialize<'de> for Address {
fn deserialize<D>(deserializer: D) -> Result<Address, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(AddressVisitor)
}
}
impl FromStr for Address {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Address::decode(
b58_decode(s)
.map_err(|e| anyhow!("Error deserializing address: {}", e))?
.as_ref(),
)
}
}
impl std::fmt::Display for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
b58_encode(self.encode().map_err(|_| std::fmt::Error)?)
)
}
}
impl std::fmt::Debug for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Address<{}>: {}",
match self {
Address::Hash(_) => "Hash",
Address::Uuid(_) => "UUID",
Address::Attribute(_) => "Attribute",
Address::Url(_) => "URL",
},
self.as_display()
)
}
}
pub trait Addressable: Hashable {
fn address(&self) -> Result<Address> {
Ok(Address::Hash(self.hash()?))
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use uuid::Uuid;
use crate::addressing::Address;
use crate::util::hash::Hash;
#[test]
fn test_hash_codec() -> Result<()> {
let addr = Address::Hash(Hash(vec![1, 2, 3, 4, 5]));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_uuid_codec() -> Result<()> {
let addr = Address::Uuid(Uuid::new_v4());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_attribute_codec() -> Result<()> {
let addr = Address::Attribute(String::from("ATTRIBUTE"));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_url_codec() -> Result<()> {
let addr = Address::Url(String::from("https://upend.dev"));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
}

View File

@ -1,6 +1,8 @@
use anyhow::{anyhow, Result};
use lazy_static::lazy_static;
use shadow_rs::is_debug;
use shadow_rs::{is_debug, shadow};
shadow!(build);
pub fn get_static_dir<S: AsRef<str>>(dir: S) -> Result<std::path::PathBuf> {
let cwd = std::env::current_exe()?.parent().unwrap().to_path_buf();
@ -18,16 +20,11 @@ pub fn get_static_dir<S: AsRef<str>>(dir: S) -> Result<std::path::PathBuf> {
}
lazy_static! {
static ref APP_USER_AGENT: String = format!(
"{} / {}",
upend::common::build::PROJECT_NAME,
upend::common::build::PKG_VERSION
);
static ref APP_USER_AGENT: String = format!("upend / {}", build::PKG_VERSION);
pub static ref REQWEST_CLIENT: reqwest::blocking::Client = reqwest::blocking::Client::builder()
.user_agent(APP_USER_AGENT.as_str())
.build()
.unwrap();
pub static ref REQWEST_ASYNC_CLIENT: reqwest::Client = reqwest::Client::builder()
.user_agent(APP_USER_AGENT.as_str())
.build()

View File

@ -1,17 +1,17 @@
use std::sync::Arc;
use super::Extractor;
use upend::{
addressing::Address,
database::{
constants,
entry::{Entry, EntryValue},
stores::{fs::FILE_MIME_KEY, UpStore},
UpEndConnection,
},
util::jobs::{JobContainer, JobState},
};
use anyhow::{anyhow, Result};
use upend_base::{
addressing::Address,
constants::ATTR_LABEL,
entry::{Entry, EntryValue},
};
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
UpEndConnection,
};
pub struct ID3Extractor;
@ -59,7 +59,7 @@ impl Extractor for ID3Extractor {
},
Entry {
entity: Address::Attribute(format!("ID3_{}", frame.id())),
attribute: constants::LABEL_ATTR.into(),
attribute: ATTR_LABEL.into(),
value: format!("ID3: {}", frame.name()).into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
timestamp: chrono::Utc::now().naive_utc(),

View File

@ -1,17 +1,17 @@
use std::{process::Command, sync::Arc};
use super::Extractor;
use upend::{
addressing::Address,
database::{
constants::LABEL_ATTR,
entry::{Entry, EntryValue},
stores::{fs::FILE_MIME_KEY, UpStore},
UpEndConnection,
},
util::jobs::{JobContainer, JobState},
};
use anyhow::{anyhow, Result};
use upend_base::{
addressing::Address,
constants::ATTR_LABEL,
entry::{Entry, EntryValue},
};
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
UpEndConnection,
};
const DURATION_KEY: &str = "MEDIA_DURATION";
@ -87,7 +87,7 @@ impl Extractor for MediaExtractor {
return mime.starts_with("audio") || mime.starts_with("video");
}
}
if e.attribute == LABEL_ATTR {
if e.attribute == ATTR_LABEL {
if let EntryValue::String(label) = &e.value {
let label = label.to_lowercase();
return label.ends_with(".ogg")

View File

@ -1,8 +1,3 @@
use upend::{
addressing::Address,
database::{entry::Entry, stores::UpStore, UpEndConnection, UpEndDatabase},
util::jobs::JobContainer,
};
use anyhow::Result;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::{
@ -10,6 +5,8 @@ use std::{
sync::{Arc, Mutex, RwLock},
};
use tracing::{debug, info, trace};
use upend_base::{addressing::Address, entry::Entry};
use upend_db::{jobs::JobContainer, stores::UpStore, UpEndConnection, UpEndDatabase};
#[cfg(feature = "extractors-web")]
pub mod web;

View File

@ -1,17 +1,17 @@
use std::sync::Arc;
use super::Extractor;
use upend::{
addressing::Address,
database::{
constants,
entry::{Entry, EntryValue},
stores::{fs::FILE_MIME_KEY, UpStore},
UpEndConnection,
},
util::jobs::{JobContainer, JobState},
};
use anyhow::{anyhow, Result};
use upend_base::{
addressing::Address,
constants::ATTR_LABEL,
entry::{Entry, EntryValue},
};
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
UpEndConnection,
};
pub struct ExifExtractor;
@ -74,7 +74,7 @@ impl Extractor for ExifExtractor {
},
Entry {
entity: Address::Attribute(attribute),
attribute: constants::LABEL_ATTR.into(),
attribute: ATTR_LABEL.into(),
value: format!("EXIF: {}", tag_description).into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
timestamp: chrono::Utc::now().naive_utc(),

View File

@ -4,13 +4,14 @@ use super::Extractor;
use crate::common::REQWEST_CLIENT;
use anyhow::anyhow;
use anyhow::Result;
use upend::database::constants::LABEL_ATTR;
use upend::{
addressing::Address,
database::{entry::Entry, stores::UpStore, UpEndConnection},
util::jobs::{JobContainer, JobState},
};
use upend_base::addressing::Address;
use upend_base::constants::ATTR_LABEL;
use upend_base::entry::Entry;
use upend_db::jobs::JobContainer;
use upend_db::jobs::JobState;
use upend_db::stores::UpStore;
use upend_db::UpEndConnection;
use webpage::HTML;
pub struct WebExtractor;
@ -43,7 +44,7 @@ impl Extractor for WebExtractor {
}),
html.title.map(|html_title| Entry {
entity: address.clone(),
attribute: LABEL_ATTR.to_string(),
attribute: ATTR_LABEL.to_string(),
value: html_title.into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
@ -61,7 +62,7 @@ impl Extractor for WebExtractor {
if attribute == "OG_TITLE" {
entries.push(Some(Entry {
entity: address.clone(),
attribute: LABEL_ATTR.to_string(),
attribute: ATTR_LABEL.to_string(),
value: value.clone().into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
@ -109,8 +110,8 @@ impl Extractor for WebExtractor {
#[cfg(test)]
mod test {
use upend::database::stores::fs::FsStore;
use upend::util::jobs::JobContainer;
use upend_db::jobs::JobContainer;
use upend_db::stores::fs::FsStore;
use url::Url;
use super::*;
@ -121,7 +122,7 @@ mod test {
#[test]
fn test_extract() -> Result<()> {
let temp_dir = TempDir::new().unwrap();
let open_result = upend::database::UpEndDatabase::open(&temp_dir, true)?;
let open_result = upend_db::UpEndDatabase::open(&temp_dir, true)?;
let connection = open_result.db.connection()?;
let store =
Arc::new(Box::new(FsStore::from_path(&temp_dir)?) as Box<dyn UpStore + Sync + Send>);

View File

@ -1,6 +1,8 @@
#[macro_use]
extern crate upend;
extern crate upend_db;
use crate::common::{get_static_dir, REQWEST_ASYNC_CLIENT};
use crate::config::UpEndConfig;
use actix_web::HttpServer;
use anyhow::Result;
use clap::{Args, Parser, Subcommand, ValueEnum};
@ -18,23 +20,18 @@ use std::sync::Arc;
use tracing::trace;
use tracing::{debug, error, info, warn};
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
use upend::addressing::Address;
use upend::database::entry::EntryValue;
use upend::util::hash::hash;
use upend::{
common::build,
config::UpEndConfig,
database::{
stores::{fs::FsStore, UpStore},
UpEndDatabase,
},
util::jobs::JobContainer,
};
use upend_base::addressing::Address;
use upend_base::entry::EntryValue;
use upend_base::hash::{sha256hash, UpMultihash};
use upend_db::jobs::JobContainer;
use upend_db::stores::fs::FsStore;
use upend_db::stores::UpStore;
use upend_db::UpEndDatabase;
use crate::util::exec::block_background;
mod common;
mod config;
mod routes;
mod serve;
mod util;
@ -310,7 +307,7 @@ async fn main() -> Result<()> {
AddressType::File => hash_path(&input)?,
AddressType::Sha256sum => {
let digest = multibase::Base::Base16Lower.decode(input)?;
Address::Hash(upend::util::hash::Hash(digest))
Address::Hash(UpMultihash::from_sha256(digest).unwrap())
}
};
@ -320,7 +317,7 @@ async fn main() -> Result<()> {
}
}
Commands::Serve(args) => {
info!("Starting UpEnd {}...", build::PKG_VERSION);
info!("Starting UpEnd {}...", common::build::PKG_VERSION);
let term_now = Arc::new(std::sync::atomic::AtomicBool::new(false));
for sig in signal_hook::consts::TERM_SIGNALS {
@ -528,7 +525,7 @@ fn hash_path<P: AsRef<Path>>(filepath: P) -> Result<Address> {
let filepath = filepath.as_ref();
debug!("Hashing {:?}...", filepath);
let fbuffer = FileBuffer::open(filepath)?;
let digest = hash(&fbuffer);
let hash = sha256hash(&fbuffer)?;
trace!("Finished hashing {:?}...", filepath);
Ok(Address::Hash(digest))
Ok(Address::Hash(hash))
}

View File

@ -1,9 +1,8 @@
use upend::database::stores::UpStore;
use upend::util::hash::b58_encode;
use upend::util::hash::Hash;
use upend::util::jobs::{JobContainer, JobState};
use anyhow::{anyhow, Result};
use tracing::{debug, trace};
use upend_base::hash::{b58_encode, UpMultihash};
use upend_db::jobs::{JobContainer, JobState};
use upend_db::stores::UpStore;
use std::{
collections::HashMap,
@ -27,7 +26,7 @@ pub trait Previewable {
fn get_thumbnail(&self, options: HashMap<String, String>) -> Result<Option<Vec<u8>>>;
}
type HashWithOptions = (Hash, String);
type HashWithOptions = (UpMultihash, String);
pub struct PreviewStore {
path: PathBuf,
store: Arc<Box<dyn UpStore + Send + Sync>>,
@ -45,7 +44,11 @@ impl PreviewStore {
}
}
fn get_path(&self, hash: &Hash, options: &HashMap<String, String>) -> Arc<Mutex<PathBuf>> {
fn get_path(
&self,
hash: &UpMultihash,
options: &HashMap<String, String>,
) -> Arc<Mutex<PathBuf>> {
let mut locks = self.locks.lock().unwrap();
let mut options_strs = options
.iter()
@ -58,7 +61,7 @@ impl PreviewStore {
} else {
let thumbpath = self.path.join(format!(
"{}{}",
b58_encode(hash),
b58_encode(hash.to_bytes()),
if options_concat.is_empty() {
String::from("")
} else {
@ -74,7 +77,7 @@ impl PreviewStore {
pub fn get(
&self,
hash: Hash,
hash: UpMultihash,
options: HashMap<String, String>,
mut job_container: JobContainer,
) -> Result<Option<PathBuf>> {

View File

@ -1,4 +1,6 @@
use crate::common::build;
use crate::common::REQWEST_CLIENT;
use crate::config::UpEndConfig;
use crate::extractors;
use crate::previews::PreviewStore;
use crate::util::exec::block_background;
@ -16,7 +18,7 @@ use actix_web::{
http::header::{CacheControl, CacheDirective, DispositionType},
HttpRequest,
};
use anyhow::{anyhow, Result};
use anyhow::Result;
use futures::channel::oneshot;
use futures_util::{StreamExt, TryStreamExt};
use serde::{Deserialize, Serialize};
@ -28,19 +30,17 @@ use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH};
use tempfile::NamedTempFile;
use tracing::{debug, info, trace};
use upend::addressing::{Address, Addressable};
use upend::common::build;
use upend::config::UpEndConfig;
use upend::database::constants::{ADDED_ATTR, LABEL_ATTR};
use upend::database::entry::{Entry, EntryValue, InvariantEntry};
use upend::database::hierarchies::{list_roots, resolve_path, UHierPath};
use upend::database::lang::Query;
use upend::database::stores::{Blob, UpStore};
use upend::database::UpEndDatabase;
use upend::util::hash::{b58_decode, b58_encode, hash};
use upend::util::jobs;
use upend_base::addressing::AddressComponents;
use upend_base::addressing::{Address, Addressable};
use upend_base::constants::{ATTR_ADDED, ATTR_LABEL};
use upend_base::entry::{Entry, EntryValue, InvariantEntry};
use upend_base::hash::{b58_decode, b58_encode, sha256hash};
use upend_base::lang::Query;
use upend_db::hierarchies::{list_roots, resolve_path, UHierPath};
use upend_db::jobs;
use upend_db::stores::{Blob, UpStore};
use upend_db::UpEndDatabase;
use url::Url;
use uuid::Uuid;
#[cfg(feature = "desktop")]
use is_executable::IsExecutable;
@ -314,19 +314,8 @@ pub async fn get_object(
debug!("{:?}", result);
// TODO: make this automatically derive from `Address` definition
let (entity_type, entity_content) = match address {
Address::Hash(_) => ("Hash", None),
Address::Uuid(_) => ("Uuid", None),
Address::Attribute(attribute) => ("Attribute", Some(attribute)),
Address::Url(url) => ("Url", Some(url.to_string())),
};
Ok(HttpResponse::Ok().json(json!({
"entity": {
"t": entity_type,
"c": entity_content
},
"entity": address.as_components(),
"entries": result.as_hash().map_err(ErrorInternalServerError)?
})))
}
@ -344,23 +333,7 @@ impl TryInto<Address> for InAddress {
fn try_into(self) -> Result<Address, Self::Error> {
Ok(match self {
InAddress::Address(address) => address.parse()?,
InAddress::Components { t, c } => {
// I absolutely cannot handle serde magic right now
// TODO: make this automatically derive from `Address` definition
match t.as_str() {
"Attribute" => Address::Attribute(c.ok_or(anyhow!("Missing attribute."))?),
"Url" => Address::Url(if let Some(string) = c {
Url::parse(&string)?
} else {
Err(anyhow!("Missing URL."))?
}),
"Uuid" => match c {
Some(c) => c.parse()?,
None => Address::Uuid(Uuid::new_v4()),
},
_ => c.ok_or(anyhow!("Missing address."))?.parse()?,
}
}
InAddress::Components { t, c } => Address::from_components(AddressComponents { t, c })?,
})
}
}
@ -416,10 +389,10 @@ pub async fn put_object(
timestamp: chrono::Utc::now().naive_utc(),
})
} else {
Entry::try_from(&InvariantEntry {
Ok(Entry::try_from(&InvariantEntry {
attribute: in_entry.attribute,
value: in_entry.value,
})
})?)
}
};
@ -469,7 +442,7 @@ pub async fn put_object(
if connection.retrieve_object(&address)?.is_empty() {
connection.insert_entry(Entry {
entity: address.clone(),
attribute: ADDED_ATTR.to_string(),
attribute: ATTR_ADDED.to_string(),
value: EntryValue::Number(
SystemTime::now()
.duration_since(UNIX_EPOCH)
@ -558,7 +531,7 @@ pub async fn put_blob(
upend_insert_val!(
&connection,
_address,
LABEL_ATTR,
ATTR_LABEL,
_filename.unwrap_or(fallback_label)
)
})
@ -677,12 +650,20 @@ pub async fn get_address(
} else if let Some(url) = query.get("url_content") {
let url = Url::parse(url).map_err(ErrorBadRequest)?;
let (bytes, _) = web::block(|| fetch_external(url)).await??;
let hash_result = hash(&bytes);
let hash_result = sha256hash(&bytes).map_err(ErrorInternalServerError)?;
(Address::Hash(hash_result), false)
} else if let Some(type_str) = query.get("type") {
match type_str.as_str() {
"Hash" => (upend_base::constants::TYPE_HASH_ADDRESS.clone(), true),
"Uuid" => (upend_base::constants::TYPE_UUID_ADDRESS.clone(), true),
"Attribute" => (upend_base::constants::TYPE_ATTRIBUTE_ADDRESS.clone(), true),
"Url" => (upend_base::constants::TYPE_URL_ADDRESS.clone(), true),
_ => return Err(ErrorBadRequest(format!("Unknown type: {type_str}"))),
}
} else {
return Err(ErrorBadRequest(anyhow!(
"Specify one of: `attribute`, `url`, `url_content`."
)));
return Err(ErrorBadRequest(
"Specify one of: `attribute`, `url`, `url_content`, `type`.",
));
};
let mut response = HttpResponse::Ok();
@ -716,7 +697,7 @@ pub async fn get_all_attributes(state: web::Data<State>) -> Result<HttpResponse,
.unwrap_or_else(|_| vec![])
.into_iter()
.filter_map(|e| {
if e.attribute == LABEL_ATTR {
if e.attribute == ATTR_LABEL {
if let EntryValue::String(label) = e.value {
Some(label)
} else {
@ -850,7 +831,12 @@ pub async fn get_info(state: web::Data<State>) -> Result<HttpResponse, Error> {
Ok(HttpResponse::Ok().json(json!({
"name": state.config.vault_name,
// "location": &*state.store.path,
"version": build::PKG_VERSION,
"version": format!(
"{} / {} / {}",
upend_base::common::build::PKG_VERSION,
upend_db::common::build::PKG_VERSION,
build::PKG_VERSION
),
"desktop": state.config.desktop_enabled
})))
}
@ -953,6 +939,7 @@ mod tests {
use super::*;
use anyhow::Result;
use tempfile::TempDir;
use upend_base::hash::UpMultihash;
#[test]
fn test_in_address() -> Result<()> {
@ -989,7 +976,15 @@ mod tests {
let info: VaultInfo = actix_web::test::call_and_read_body_json(&app, req).await;
assert_eq!(info.name, Some("TEST VAULT".to_string()));
assert_eq!(info.version, format!("{}", build::PKG_VERSION));
assert_eq!(
info.version,
format!(
"{} / {} / {}",
upend_base::common::build::PKG_VERSION,
upend_db::common::build::PKG_VERSION,
build::PKG_VERSION
)
);
assert!(!info.desktop);
}
@ -1023,7 +1018,7 @@ mod tests {
let roots: HashMap<String, Entry> =
actix_web::test::call_and_read_body_json(&app, req).await;
let mut labels = roots.values().filter(|v| v.attribute == LABEL_ATTR);
let mut labels = roots.values().filter(|v| v.attribute == ATTR_LABEL);
assert_eq!(labels.next().unwrap().value, "NATIVE".into());
assert!(labels.next().is_none());
@ -1039,10 +1034,58 @@ mod tests {
.unwrap()
.to_str()
.unwrap(),
"../../api/obj/zQmbuQbmm7z1AeZjbBw2iX1557ZoUFQ8vrMKaaw2UYrt5zG"
"../../api/obj/zb2rhkD35pyMqGBbkb9B2o1CuTXsqWxxtTfm87etbv4K8rGBz"
);
}
#[actix_web::test]
async fn test_obj_entity_info() {
let app = actix_web::test::init_service(crate::serve::get_app::<
std::path::PathBuf,
Vec<String>,
>(None, vec![], get_state()))
.await;
let digest = UpMultihash::from_sha256([1, 2, 3, 4, 5]).unwrap();
let digest_str = b58_encode(digest.to_bytes());
let address = Address::Hash(digest);
let req = actix_web::test::TestRequest::get()
.uri(&format!("/api/obj/{}", address))
.to_request();
let result: serde_json::Value = actix_web::test::call_and_read_body_json(&app, req).await;
assert_eq!(result["entity"]["t"], "Hash");
assert_eq!(result["entity"]["c"], digest_str);
let address = Address::Attribute("TEST".to_string());
let req = actix_web::test::TestRequest::get()
.uri(&format!("/api/obj/{}", address))
.to_request();
let result: serde_json::Value = actix_web::test::call_and_read_body_json(&app, req).await;
assert_eq!(result["entity"]["t"], "Attribute");
assert_eq!(result["entity"]["c"], "TEST");
let address = Address::Url("https://upend.dev/".parse().unwrap());
let req = actix_web::test::TestRequest::get()
.uri(&format!("/api/obj/{}", address))
.to_request();
let result: serde_json::Value = actix_web::test::call_and_read_body_json(&app, req).await;
assert_eq!(result["entity"]["t"], "Url");
assert_eq!(result["entity"]["c"], "https://upend.dev/");
let uuid = uuid::Uuid::new_v4();
let address = Address::Uuid(uuid);
let req = actix_web::test::TestRequest::get()
.uri(&format!("/api/obj/{}", address))
.to_request();
let result: serde_json::Value = actix_web::test::call_and_read_body_json(&app, req).await;
assert_eq!(result["entity"]["t"], "Uuid");
assert_eq!(result["entity"]["c"], uuid.to_string());
}
fn get_state() -> State {
// Prepare temporary filesystem structure
let temp_dir = TempDir::new().unwrap();
@ -1063,7 +1106,7 @@ mod tests {
let upend = Arc::new(open_result.db);
let store = Arc::new(Box::new(
upend::database::stores::fs::FsStore::from_path(temp_dir.path()).unwrap(),
upend_db::stores::fs::FsStore::from_path(temp_dir.path()).unwrap(),
) as Box<dyn UpStore + Send + Sync>);
let job_container = jobs::JobContainer::new();

View File

@ -41,7 +41,7 @@ where
.wrap(cors)
.wrap(
actix_web::middleware::DefaultHeaders::new()
.add(("UPEND-VERSION", upend::common::build::PKG_VERSION)),
.add(("UPEND-VERSION", crate::common::build::PKG_VERSION)),
)
.app_data(actix_web::web::PayloadConfig::new(4_294_967_296))
.app_data(actix_web::web::Data::new(state))

61
db/Cargo.toml Normal file
View File

@ -0,0 +1,61 @@
[package]
name = "upend-db"
version = "0.0.1"
homepage = "https://upend.dev/"
repository = "https://git.thm.place/thm/upend"
authors = ["Tomáš Mládek <t@mldk.cz>"]
license = "AGPL-3.0-or-later"
edition = "2018"
[dependencies]
upend-base = { path = "../base", features = ["diesel"] }
log = "0.4"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
anyhow = "1.0"
rayon = "1.4.0"
num_cpus = "1.13"
lazy_static = "1.4.0"
once_cell = "1.7.2"
lru = "0.7.0"
diesel = { version = "1.4", features = [
"sqlite",
"r2d2",
"chrono",
"serde_json",
] }
diesel_migrations = "1.4"
libsqlite3-sys = { version = "^0", features = ["bundled"] }
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
lexpr = "0.2.6"
regex = "1"
multibase = "0.9"
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "1.4", features = ["v4"] }
url = { version = "2", features = ["serde"] }
filebuffer = "0.4.0"
tempfile = "^3.2.0"
walkdir = "2"
tree_magic_mini = { version = "3.0.2", features = ["with-gpl-data"] }
nonempty = "0.6.0"
shadow-rs = "0.17"
[build-dependencies]
shadow-rs = "0.17"

3
db/build.rs Normal file
View File

@ -0,0 +1,3 @@
fn main() -> shadow_rs::SdResult<()> {
shadow_rs::new()
}

3
db/src/common.rs Normal file
View File

@ -0,0 +1,3 @@
use shadow_rs::shadow;
shadow!(build);

14
db/src/constants.rs Normal file
View File

@ -0,0 +1,14 @@
use crate::addressing::Address;
use crate::entry::InvariantEntry;
lazy_static! {
pub static ref HIER_ROOT_INVARIANT: InvariantEntry = InvariantEntry {
attribute: String::from(ATTR_KEY),
value: "HIER_ROOT".into(),
};
pub static ref HIER_ROOT_ADDR: Address = HIER_ROOT_INVARIANT.entity().unwrap();
pub static ref TYPE_HASH_ADDRESS: Address = Address::Hash(crate::util::hash::Hash(vec![]));
pub static ref TYPE_UUID_ADDRESS: Address = Address::Uuid(uuid::Uuid::nil());
pub static ref TYPE_ATTRIBUTE_ADDRESS: Address = Address::Attribute("".to_string());
pub static ref TYPE_URL_ADDRESS: Address = Address::Url(url::Url::parse("up:").unwrap());
}

View File

@ -1,11 +1,9 @@
use std::collections::HashMap;
use std::iter::zip;
use super::entry::EntryValue;
use super::inner::models::Entry;
use super::inner::schema::data;
use super::lang::{PatternQuery, Query, QueryComponent, QueryPart, QueryQualifier};
use crate::database::inner::models;
use crate::inner::models;
use anyhow::Result;
use diesel::expression::grouped::Grouped;
use diesel::expression::operators::{And, Not, Or};
@ -19,6 +17,8 @@ use diesel::{
};
use diesel::{BoxableExpression, QueryDsl};
use diesel::{ExpressionMethods, TextExpressionMethods};
use upend_base::entry::EntryValue;
use upend_base::lang::{PatternQuery, Query, QueryComponent, QueryPart, QueryQualifier};
#[derive(Debug, Clone)]
pub struct QueryExecutionError(String);
@ -35,7 +35,7 @@ pub fn execute(
connection: &PooledConnection<ConnectionManager<SqliteConnection>>,
query: Query,
) -> Result<Vec<Entry>, QueryExecutionError> {
use crate::database::inner::schema::data::dsl::*;
use crate::inner::schema::data::dsl::*;
if let Some(predicates) = to_sqlite_predicates(query.clone())? {
let db_query = data.filter(predicates);
@ -135,7 +135,7 @@ impl EntryWithVars {
if let QueryComponent::Variable(Some(var_name)) = &query.entity {
vars.insert(
var_name.clone(),
crate::util::hash::b58_encode(&entry.entity),
upend_base::hash::b58_encode(&entry.entity),
);
}

86
db/src/entry.rs Normal file
View File

@ -0,0 +1,86 @@
use crate::inner::models;
use anyhow::{anyhow, Result};
use std::convert::TryFrom;
use upend_base::addressing::{Address, Addressable};
use upend_base::entry::{Entry, EntryValue, ImmutableEntry};
impl TryFrom<&models::Entry> for Entry {
type Error = anyhow::Error;
fn try_from(e: &models::Entry) -> Result<Self, Self::Error> {
if let Some(value_str) = &e.value_str {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.clone(),
value: value_str.parse()?,
provenance: e.provenance.clone(),
timestamp: e.timestamp,
})
} else if let Some(value_num) = e.value_num {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.clone(),
value: EntryValue::Number(value_num),
provenance: e.provenance.clone(),
timestamp: e.timestamp,
})
} else {
Ok(Entry {
entity: Address::decode(&e.entity)?,
attribute: e.attribute.clone(),
value: EntryValue::Number(f64::NAN),
provenance: e.provenance.clone(),
timestamp: e.timestamp,
})
}
}
}
impl TryFrom<&Entry> for models::Entry {
type Error = anyhow::Error;
fn try_from(e: &Entry) -> Result<Self, Self::Error> {
if e.attribute.is_empty() {
return Err(anyhow!("Attribute cannot be empty."));
}
let base_entry = models::Entry {
identity: e.address()?.encode()?,
entity_searchable: match &e.entity {
Address::Attribute(attr) => Some(attr.clone()),
Address::Url(url) => Some(url.to_string()),
_ => None,
},
entity: e.entity.encode()?,
attribute: e.attribute.clone(),
value_str: None,
value_num: None,
immutable: false,
provenance: e.provenance.clone(),
timestamp: e.timestamp,
};
match e.value {
EntryValue::Number(n) => Ok(models::Entry {
value_str: None,
value_num: Some(n),
..base_entry
}),
_ => Ok(models::Entry {
value_str: Some(e.value.to_string()?),
value_num: None,
..base_entry
}),
}
}
}
impl TryFrom<&ImmutableEntry> for models::Entry {
type Error = anyhow::Error;
fn try_from(e: &ImmutableEntry) -> Result<Self, Self::Error> {
Ok(models::Entry {
immutable: true,
..models::Entry::try_from(&e.0)?
})
}
}

View File

@ -6,12 +6,11 @@ use lru::LruCache;
use tracing::trace;
use uuid::Uuid;
use crate::addressing::{Address, Addressable};
use crate::database::constants::{
HIER_ADDR, HIER_HAS_ATTR, HIER_INVARIANT, IS_OF_TYPE_ATTR, LABEL_ATTR, TYPE_ADDR, TYPE_HAS_ATTR,
};
use crate::database::entry::{Entry, EntryValue};
use crate::database::lang::{PatternQuery, Query, QueryComponent, QueryPart};
use upend_base::addressing::Address;
use upend_base::constants::ATTR_LABEL;
use upend_base::constants::{ATTR_IN, HIER_ROOT_ADDR, HIER_ROOT_INVARIANT};
use upend_base::entry::Entry;
use upend_base::lang::{PatternQuery, Query, QueryComponent, QueryPart};
use super::UpEndConnection;
@ -77,47 +76,14 @@ impl std::fmt::Display for UHierPath {
}
}
trait PointerEntries {
fn extract_pointers(&self) -> Vec<(Address, Address)>;
}
impl PointerEntries for Vec<Entry> {
fn extract_pointers(&self) -> Vec<(Address, Address)> {
self.iter()
.filter_map(|e| {
if let EntryValue::Address(address) = &e.value {
Some((e.address().unwrap(), address.clone()))
} else {
None
}
})
.collect()
}
}
pub fn list_roots(connection: &UpEndConnection) -> Result<Vec<Address>> {
let all_directories: Vec<Entry> =
connection.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
entity: QueryComponent::Variable(None),
attribute: QueryComponent::Exact(IS_OF_TYPE_ATTR.into()),
value: QueryComponent::Exact(HIER_ADDR.clone().into()),
})))?;
// TODO: this is horrible
let directories_with_parents: Vec<Address> = connection
Ok(connection
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
entity: QueryComponent::Variable(None),
attribute: QueryComponent::Exact(HIER_HAS_ATTR.into()),
value: QueryComponent::Variable(None),
attribute: QueryComponent::Exact(ATTR_IN.into()),
value: QueryComponent::Exact((*HIER_ROOT_ADDR).clone().into()),
})))?
.extract_pointers()
.into_iter()
.map(|(_, val)| val)
.collect();
Ok(all_directories
.into_iter()
.filter(|entry| !directories_with_parents.contains(&entry.entity))
.map(|e| e.entity)
.collect())
}
@ -145,7 +111,7 @@ pub fn fetch_or_create_dir(
let matching_directories = connection
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
entity: QueryComponent::Variable(None),
attribute: QueryComponent::Exact(LABEL_ATTR.into()),
attribute: QueryComponent::Exact(ATTR_LABEL.into()),
value: QueryComponent::Exact(String::from(directory.clone()).into()),
})))?
.into_iter()
@ -154,13 +120,12 @@ pub fn fetch_or_create_dir(
let parent_has: Vec<Address> = match parent.clone() {
Some(parent) => connection
.query(Query::SingleQuery(QueryPart::Matches(PatternQuery {
entity: QueryComponent::Exact(parent),
attribute: QueryComponent::Exact(HIER_HAS_ATTR.into()),
value: QueryComponent::Variable(None),
entity: QueryComponent::Variable(None),
attribute: QueryComponent::Exact(ATTR_IN.into()),
value: QueryComponent::Exact(parent.into()),
})))?
.extract_pointers()
.into_iter()
.map(|(_, val)| val)
.map(|e| e.entity)
.collect(),
None => list_roots(connection)?,
};
@ -173,34 +138,33 @@ pub fn fetch_or_create_dir(
0 => {
if create {
let new_directory_address = Address::Uuid(Uuid::new_v4());
let type_entry = Entry {
entity: new_directory_address.clone(),
attribute: String::from(IS_OF_TYPE_ATTR),
value: HIER_ADDR.clone().into(),
provenance: "SYSTEM FS".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
connection.insert_entry(type_entry)?;
let directory_entry = Entry {
entity: new_directory_address.clone(),
attribute: String::from(LABEL_ATTR),
attribute: String::from(ATTR_LABEL),
value: String::from(directory).into(),
provenance: "SYSTEM FS".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
connection.insert_entry(directory_entry)?;
if let Some(parent) = parent {
let has_entry = Entry {
entity: parent,
attribute: String::from(HIER_HAS_ATTR),
value: new_directory_address.clone().into(),
connection.insert_entry(if let Some(parent) = parent {
Entry {
entity: new_directory_address.clone(),
attribute: String::from(ATTR_IN),
value: parent.into(),
provenance: "SYSTEM FS".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
connection.insert_entry(has_entry)?;
}
}
} else {
Entry {
entity: new_directory_address.clone(),
attribute: String::from(ATTR_IN),
value: HIER_ROOT_ADDR.clone().into(),
provenance: "SYSTEM FS".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
}
})?;
Ok(new_directory_address)
} else {
@ -269,10 +233,8 @@ pub fn resolve_path_cached(
}
pub fn initialize_hier(connection: &UpEndConnection) -> Result<()> {
connection.insert_entry(Entry::try_from(&*HIER_INVARIANT)?)?;
upend_insert_addr!(connection, HIER_ADDR, IS_OF_TYPE_ATTR, TYPE_ADDR)?;
upend_insert_val!(connection, HIER_ADDR, TYPE_HAS_ATTR, HIER_HAS_ATTR)?;
upend_insert_val!(connection, HIER_ADDR, LABEL_ATTR, "Group")?;
connection.insert_entry(Entry::try_from(&*HIER_ROOT_INVARIANT)?)?;
upend_insert_val!(connection, HIER_ROOT_ADDR, ATTR_LABEL, "Hierarchy Root")?;
Ok(())
}
@ -280,7 +242,7 @@ pub fn initialize_hier(connection: &UpEndConnection) -> Result<()> {
mod tests {
use anyhow::Result;
use crate::database::UpEndDatabase;
use crate::UpEndDatabase;
use tempfile::TempDir;
use super::*;

View File

@ -20,7 +20,4 @@ table! {
}
}
allow_tables_to_appear_in_same_query!(
data,
meta,
);
allow_tables_to_appear_in_same_query!(data, meta,);

View File

@ -1,28 +1,29 @@
#![macro_use]
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
#[macro_use]
extern crate lazy_static;
#[macro_use]
mod macros;
pub mod stores;
pub mod constants;
pub mod common;
pub mod engine;
pub mod entry;
pub mod hierarchies;
pub mod inner;
pub mod lang;
pub mod jobs;
pub mod stores;
mod inner;
mod util;
use crate::addressing::{Address, Addressable};
use crate::common::build;
use crate::database::constants::{
IS_OF_TYPE_ATTR, LABEL_ATTR, TYPE_ADDR, TYPE_HAS_ATTR, TYPE_INVARIANT, TYPE_TYPE_VAL,
};
use crate::database::engine::execute;
use crate::database::entry::{Entry, EntryValue, ImmutableEntry};
use crate::database::inner::models;
use crate::database::inner::schema::data;
use crate::database::lang::Query;
use crate::util::hash::Hash;
use crate::engine::execute;
use crate::inner::models;
use crate::inner::schema::data;
use crate::util::LoggerSink;
use anyhow::{anyhow, Result};
use diesel::prelude::*;
@ -37,6 +38,11 @@ use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex, RwLock};
use std::time::Duration;
use tracing::{debug, error, trace, warn};
use upend_base::addressing::{Address, Addressable};
use upend_base::entry::{Entry, EntryValue, ImmutableEntry};
use upend_base::error::UpEndError;
use upend_base::hash::UpMultihash;
use upend_base::lang::Query;
#[derive(Debug)]
pub struct ConnectionOptions {
@ -161,13 +167,6 @@ impl UpEndDatabase {
},
)?;
trace!("Initializing types...");
connection.insert_entry(Entry::try_from(&*TYPE_INVARIANT)?)?;
upend_insert_addr!(connection, TYPE_ADDR, IS_OF_TYPE_ATTR, TYPE_ADDR)?;
upend_insert_val!(connection, TYPE_ADDR, TYPE_HAS_ATTR, TYPE_HAS_ATTR)?;
upend_insert_val!(connection, TYPE_ADDR, TYPE_HAS_ATTR, TYPE_TYPE_VAL)?;
upend_insert_val!(connection, TYPE_ADDR, LABEL_ATTR, "UpEnd Type")?;
initialize_hier(&connection)?;
Ok(OpenResult { db, new })
@ -203,7 +202,7 @@ impl UpEndConnection {
}
pub fn get_meta<S: AsRef<str>>(&self, key: S) -> Result<String> {
use crate::database::inner::schema::meta::dsl;
use crate::inner::schema::meta::dsl;
let key = key.as_ref();
debug!("Querying META:{key}");
@ -219,8 +218,8 @@ impl UpEndConnection {
.map(|mv| mv.value.clone())
}
pub fn retrieve_entry(&self, hash: &Hash) -> Result<Option<Entry>> {
use crate::database::inner::schema::data::dsl::*;
pub fn retrieve_entry(&self, hash: &UpMultihash) -> Result<Option<Entry>> {
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
@ -241,7 +240,7 @@ impl UpEndConnection {
}
pub fn retrieve_object(&self, object_address: &Address) -> Result<Vec<Entry>> {
use crate::database::inner::schema::data::dsl::*;
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
@ -264,7 +263,7 @@ impl UpEndConnection {
.map(|e| e.address())
.filter_map(Result::ok)
.map(|addr| addr.encode())
.collect::<Result<Vec<Vec<u8>>>>()?,
.collect::<Result<Vec<Vec<u8>>, UpEndError>>()?,
),
)
.load::<models::Entry>(&conn)?;
@ -278,7 +277,7 @@ impl UpEndConnection {
}
pub fn remove_object(&self, object_address: Address) -> Result<usize> {
use crate::database::inner::schema::data::dsl::*;
use crate::inner::schema::data::dsl::*;
debug!("Deleting {}!", object_address);
@ -313,7 +312,7 @@ impl UpEndConnection {
debug!("Inserting: {}", entry);
let db_entry = models::Entry::try_from(&entry)?;
self.insert_model_entry(db_entry)?;
entry.address()
Ok(entry.address()?)
}
pub fn insert_entry_immutable(&self, entry: Entry) -> Result<Address> {
@ -343,7 +342,7 @@ impl UpEndConnection {
// #[deprecated]
pub fn get_all_addresses(&self) -> Result<Vec<Address>> {
use crate::database::inner::schema::data::dsl::*;
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
@ -361,7 +360,7 @@ impl UpEndConnection {
// #[deprecated]
pub fn get_all_attributes(&self) -> Result<Vec<String>> {
use crate::database::inner::schema::data::dsl::*;
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
@ -376,7 +375,7 @@ impl UpEndConnection {
}
pub fn get_stats(&self) -> Result<serde_json::Value> {
use crate::database::inner::schema::data::dsl::*;
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
@ -412,7 +411,7 @@ impl UpEndConnection {
#[deprecated]
pub fn get_explicit_entries(&self) -> Result<Vec<Entry>> {
use crate::database::inner::schema::data::dsl::*;
use crate::inner::schema::data::dsl::*;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
@ -433,6 +432,8 @@ impl UpEndConnection {
#[cfg(test)]
mod test {
use upend_base::constants::ATTR_LABEL;
use super::*;
use tempfile::TempDir;
@ -441,18 +442,18 @@ mod test {
let tempdir = TempDir::new().unwrap();
let result = UpEndDatabase::open(&tempdir, false);
assert!(result.is_ok());
assert!(result.unwrap().new);
let result = result.unwrap();
assert!(result.new);
// Not new
let result = UpEndDatabase::open(&tempdir, false);
assert!(result.is_ok());
assert!(!result.unwrap().new);
let result = result.unwrap();
assert!(!result.new);
// reinitialize true, new again
let result = UpEndDatabase::open(&tempdir, true);
assert!(result.is_ok());
assert!(result.unwrap().new);
let result = result.unwrap();
assert!(result.new);
}
#[test]
@ -464,7 +465,7 @@ mod test {
let connection = db.connection().unwrap();
let random_entity = Address::Uuid(uuid::Uuid::new_v4());
upend_insert_val!(connection, random_entity, LABEL_ATTR, "FOOBAR").unwrap();
upend_insert_val!(connection, random_entity, ATTR_LABEL, "FOOBAR").unwrap();
upend_insert_val!(connection, random_entity, "FLAVOUR", "STRANGE").unwrap();
let query = format!(r#"(matches @{random_entity} ? ?)"#)
@ -474,7 +475,7 @@ mod test {
assert_eq!(result.len(), 2);
let other_entity = Address::Uuid(uuid::Uuid::new_v4());
upend_insert_val!(connection, random_entity, LABEL_ATTR, "BAZQUX").unwrap();
upend_insert_val!(connection, random_entity, ATTR_LABEL, "BAZQUX").unwrap();
upend_insert_val!(connection, random_entity, "CHARGE", "POSITIVE").unwrap();
let query = format!(r#"(matches (in @{random_entity} @{other_entity}) ? ?)"#)
@ -487,13 +488,13 @@ mod test {
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 2);
let query = format!(r#"(matches ? "{LABEL_ATTR}" (in "FOOBAR" "BAZQUX"))"#)
let query = format!(r#"(matches ? "{ATTR_LABEL}" (in "FOOBAR" "BAZQUX"))"#)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
assert_eq!(result.len(), 2);
let query = format!(r#"(matches ? "{LABEL_ATTR}" (contains "OOBA"))"#)
let query = format!(r#"(matches ? "{ATTR_LABEL}" (contains "OOBA"))"#)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
@ -506,7 +507,7 @@ mod test {
assert_eq!(result.len(), 2);
let query =
format!(r#"(and (matches ? ? (contains "OOBA")) (matches ? "{LABEL_ATTR}" ?) )"#)
format!(r#"(and (matches ? ? (contains "OOBA")) (matches ? "{ATTR_LABEL}" ?) )"#)
.parse()
.unwrap();
let result = connection.query(query).unwrap();
@ -518,7 +519,7 @@ mod test {
(matches ? ? (contains "OOBA"))
(matches ? (contains "HARGE") ?)
)
(not (matches ? "{LABEL_ATTR}" ?))
(not (matches ? "{ATTR_LABEL}" ?))
)"#
)
.parse()
@ -529,7 +530,7 @@ mod test {
let query = format!(
r#"(join
(matches ?a "FLAVOUR" ?)
(matches ?a "{LABEL_ATTR}" "FOOBAR")
(matches ?a "{ATTR_LABEL}" "FOOBAR")
)"#
)
.parse()

View File

@ -4,7 +4,7 @@ macro_rules! upend_insert_val {
$db_connection.insert_entry(Entry {
entity: $entity.clone(),
attribute: String::from($attribute),
value: upend::database::entry::EntryValue::String(String::from($value)),
value: upend_base::entry::EntryValue::String(String::from($value)),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
})
@ -17,7 +17,7 @@ macro_rules! upend_insert_addr {
$db_connection.insert_entry(Entry {
entity: $entity.clone(),
attribute: String::from($attribute),
value: upend::database::entry::EntryValue::Address($addr.clone()),
value: upend_base::entry::EntryValue::Address($addr.clone()),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
})

View File

@ -1,9 +1,9 @@
use std::path::PathBuf;
use crate::util::hash::Hash;
use chrono::NaiveDateTime;
use diesel::Queryable;
use serde::Serialize;
use upend_base::hash::UpMultihash;
table! {
files (id) {
@ -20,7 +20,7 @@ table! {
#[derive(Queryable, Serialize, Clone, Debug)]
pub struct File {
pub id: i32,
pub hash: Hash,
pub hash: UpMultihash,
pub path: String,
pub valid: bool,
pub added: NaiveDateTime,
@ -32,7 +32,7 @@ pub struct File {
#[derive(Serialize, Clone, Debug)]
pub struct OutFile {
pub id: i32,
pub hash: Hash,
pub hash: UpMultihash,
pub path: PathBuf,
pub valid: bool,
pub added: NaiveDateTime,
@ -50,8 +50,8 @@ pub struct NewFile {
pub mtime: Option<NaiveDateTime>,
}
impl From<File> for crate::addressing::Address {
impl From<File> for upend_base::addressing::Address {
fn from(file: File) -> Self {
crate::addressing::Address::Hash(file.hash)
upend_base::addressing::Address::Hash(file.hash)
}
}

View File

@ -1,20 +1,10 @@
use self::db::files;
use super::{Blob, StoreError, UpStore, UpdatePathOutcome};
use crate::addressing::Address;
use crate::database::constants::{
ADDED_ATTR, HIER_HAS_ATTR, IS_OF_TYPE_ATTR, LABEL_ATTR, TYPE_ADDR, TYPE_BASE_ATTR,
TYPE_HAS_ATTR,
};
use crate::database::entry::{Entry, InvariantEntry};
use crate::database::hierarchies::{
resolve_path, resolve_path_cached, ResolveCache, UHierPath, UNode,
};
use crate::database::{
ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UPEND_SUBDIR,
};
use crate::util::hash::{b58_encode, Hash, Hashable};
use crate::util::jobs::{JobContainer, JobHandle};
use crate::hierarchies::{resolve_path, resolve_path_cached, ResolveCache, UHierPath, UNode};
use crate::jobs::{JobContainer, JobHandle};
use crate::util::hash_at_path;
use crate::{ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UPEND_SUBDIR};
use anyhow::{anyhow, Error, Result};
use chrono::prelude::*;
use diesel::r2d2::{self, ConnectionManager, ManageConnection};
@ -24,30 +14,24 @@ use lru::LruCache;
use rayon::prelude::*;
use serde_json::json;
use std::borrow::Borrow;
use std::convert::{TryFrom, TryInto};
use std::convert::TryInto;
use std::path::PathBuf;
use std::path::{Component, Path};
use std::sync::{Arc, Mutex, RwLock};
use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
use std::{fs, iter};
use tracing::{debug, error, info, trace, warn};
use upend_base::addressing::Address;
use upend_base::constants::{ATTR_ADDED, ATTR_BY, ATTR_IN, ATTR_LABEL, ATTR_OF, TYPE_HASH_ADDRESS};
use upend_base::entry::Entry;
use upend_base::hash::{b58_encode, UpMultihash};
use walkdir::WalkDir;
mod db;
const BLOB_TYPE: &str = "BLOB";
const ALIAS_KEY: &str = "ALIAS";
pub const FILE_MIME_KEY: &str = "FILE_MIME";
const FILE_SIZE_KEY: &str = "FILE_SIZE";
lazy_static! {
static ref BLOB_TYPE_INVARIANT: InvariantEntry = InvariantEntry {
attribute: String::from(TYPE_BASE_ATTR),
value: BLOB_TYPE.into(),
};
static ref BLOB_TYPE_ADDR: Address = BLOB_TYPE_INVARIANT.entity().unwrap();
}
pub struct FsStore {
path: PathBuf,
pool: r2d2::Pool<ConnectionManager<SqliteConnection>>,
@ -118,11 +102,18 @@ impl FsStore {
// Initialize types, etc...
debug!("Initializing DB types.");
upconnection.insert_entry(Entry::try_from(&*BLOB_TYPE_INVARIANT)?)?;
upend_insert_addr!(upconnection, BLOB_TYPE_ADDR, IS_OF_TYPE_ATTR, TYPE_ADDR)?;
upend_insert_val!(upconnection, BLOB_TYPE_ADDR, TYPE_HAS_ATTR, FILE_SIZE_KEY)?;
upend_insert_val!(upconnection, BLOB_TYPE_ADDR, TYPE_HAS_ATTR, FILE_MIME_KEY)?;
upend_insert_val!(upconnection, BLOB_TYPE_ADDR, LABEL_ATTR, "Data Blob")?;
upend_insert_addr!(
upconnection,
Address::Attribute(FILE_SIZE_KEY.to_string()),
ATTR_OF,
TYPE_HASH_ADDRESS
)?;
upend_insert_addr!(
upconnection,
Address::Attribute(FILE_MIME_KEY.to_string()),
ATTR_OF,
TYPE_HASH_ADDRESS
)?;
// Walk through the vault, find all paths
debug!("Traversing vault directory");
@ -261,7 +252,7 @@ impl FsStore {
.to_str()
.ok_or(anyhow!("Path not valid unicode!"))?;
let mut file_hash: Option<Hash> = None;
let mut file_hash: Option<UpMultihash> = None;
// Get size & mtime for quick comparison
let metadata = fs::metadata(&path)?;
@ -298,7 +289,7 @@ impl FsStore {
let mut same_hash = false;
if !quick_check || !same_mtime {
file_hash = Some(path.hash()?);
file_hash = Some(hash_at_path(&path)?);
same_hash = file_hash.as_ref().unwrap() == &existing_file.hash;
}
@ -331,7 +322,7 @@ impl FsStore {
// If not, add it!
if file_hash.is_none() {
file_hash = Some(path.hash()?);
file_hash = Some(hash_at_path(&path)?);
}
let mime_type = tree_magic_mini::from_filepath(&path).map(|s| s.to_string());
@ -355,7 +346,7 @@ impl FsStore {
&self,
connection: &UpEndConnection,
path: &Path,
hash: Hash,
hash: UpMultihash,
name_hint: Option<String>,
) -> Result<Address> {
let normalized_path = self.normalize_path(path)?;
@ -390,7 +381,7 @@ impl FsStore {
&self,
connection: &UpEndConnection,
normalized_path: &Path,
hash: Hash,
hash: UpMultihash,
name: Option<String>,
size: i64,
mtime: Option<NaiveDateTime>,
@ -402,7 +393,7 @@ impl FsStore {
.to_str()
.ok_or(anyhow!("Path not UTF-8?!"))?
.to_string(),
hash: (hash.clone()).0,
hash: hash.to_bytes(),
added: NaiveDateTime::from_timestamp_opt(Utc::now().timestamp(), 0).unwrap(),
size,
mtime,
@ -411,14 +402,6 @@ impl FsStore {
let blob_address = Address::Hash(hash);
// Metadata
let type_entry = Entry {
entity: blob_address.clone(),
attribute: String::from(IS_OF_TYPE_ATTR),
value: BLOB_TYPE_ADDR.clone().into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
let size_entry = Entry {
entity: blob_address.clone(),
attribute: FILE_SIZE_KEY.to_string(),
@ -437,7 +420,7 @@ impl FsStore {
let added_entry = Entry {
entity: blob_address.clone(),
attribute: ADDED_ATTR.to_string(),
attribute: ATTR_ADDED.to_string(),
value: (SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
@ -467,7 +450,6 @@ impl FsStore {
// Insert all
let file_count = self.insert_file(new_file)?;
connection.insert_entry_immutable(type_entry)?;
connection.insert_entry_immutable(size_entry)?;
if file_count == 1 {
connection.insert_entry_immutable(added_entry)?;
@ -477,9 +459,9 @@ impl FsStore {
}
let dir_has_entry = Entry {
entity: parent_dir.clone(),
attribute: HIER_HAS_ATTR.to_string(),
value: blob_address.clone().into(),
entity: blob_address.clone(),
attribute: ATTR_IN.to_string(),
value: parent_dir.clone().into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
@ -487,7 +469,7 @@ impl FsStore {
let label_entry = Entry {
entity: blob_address.clone(),
attribute: LABEL_ATTR.to_string(),
attribute: ATTR_LABEL.to_string(),
value: name
.unwrap_or_else(|| filename.as_os_str().to_string_lossy().to_string())
.into(),
@ -498,7 +480,7 @@ impl FsStore {
let alias_entry = Entry {
entity: dir_has_entry_addr,
attribute: ALIAS_KEY.to_string(),
attribute: ATTR_BY.to_string(),
value: label_entry_addr.into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
@ -512,7 +494,7 @@ impl FsStore {
debug!(
"Inserting {} ({})...",
&file.path,
Address::Hash(Hash((file.hash).clone()))
Address::Hash(UpMultihash::from_bytes(&file.hash)?)
);
let _lock = self.lock.write().unwrap();
@ -531,7 +513,7 @@ impl FsStore {
.unwrap())
}
fn retrieve_file(&self, obj_hash: &Hash) -> Result<Vec<db::OutFile>> {
fn retrieve_file(&self, obj_hash: &UpMultihash) -> Result<Vec<db::OutFile>> {
use self::db::files::dsl::*;
let _lock = self.lock.read().unwrap();
@ -539,7 +521,7 @@ impl FsStore {
let matches = files
.filter(valid.eq(true))
.filter(hash.eq(&obj_hash.0))
.filter(hash.eq(&obj_hash.to_bytes()))
.load::<db::File>(&conn)?;
let matches = matches
@ -615,7 +597,7 @@ impl From<db::File> for Blob {
}
impl UpStore for FsStore {
fn retrieve(&self, hash: &crate::util::hash::Hash) -> Result<Vec<Blob>, super::StoreError> {
fn retrieve(&self, hash: &UpMultihash) -> Result<Vec<Blob>, super::StoreError> {
Ok(self
.retrieve_file(hash)
.map_err(|e| StoreError::Unknown(e.to_string()))?
@ -638,11 +620,9 @@ impl UpStore for FsStore {
connection: UpEndConnection,
blob: Blob,
name_hint: Option<String>,
) -> Result<Hash, super::StoreError> {
) -> Result<UpMultihash, super::StoreError> {
let file_path = blob.get_file_path();
let hash = file_path
.hash()
.map_err(|e| StoreError::Unknown(e.to_string()))?;
let hash = hash_at_path(file_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
let existing_files = self.retrieve(&hash)?;
@ -745,8 +725,8 @@ impl UpStore for FsStore {
#[cfg(test)]
mod test {
use crate::database::UpEndDatabase;
use crate::util::jobs::JobContainer;
use crate::jobs::JobContainer;
use crate::UpEndDatabase;
use super::*;
use std::fs::File;

View File

@ -1,7 +1,8 @@
use std::path::{Path, PathBuf};
use super::{UpEndConnection, UpEndDatabase};
use crate::util::{hash::Hash, jobs::JobContainer};
use crate::jobs::JobContainer;
use upend_base::hash::UpMultihash;
pub mod fs;
@ -52,14 +53,14 @@ pub enum UpdatePathOutcome {
}
pub trait UpStore {
fn retrieve(&self, hash: &Hash) -> Result<Vec<Blob>>;
fn retrieve(&self, hash: &UpMultihash) -> Result<Vec<Blob>>;
fn retrieve_all(&self) -> Result<Vec<Blob>>;
fn store(
&self,
connection: UpEndConnection,
blob: Blob,
name_hint: Option<String>,
) -> Result<Hash>;
) -> Result<UpMultihash>;
fn update(
&self,
database: &UpEndDatabase,

View File

@ -1,7 +1,8 @@
pub mod hash;
pub mod jobs;
use std::path::Path;
use tracing::debug;
use filebuffer::FileBuffer;
use tracing::{debug, trace};
use upend_base::hash::UpMultihash;
#[derive(Default)]
pub struct LoggerSink {
@ -32,3 +33,11 @@ impl std::io::Write for LoggerSink {
Ok(())
}
}
pub fn hash_at_path<P: AsRef<Path>>(path: P) -> anyhow::Result<UpMultihash> {
let path = path.as_ref();
trace!("Hashing {:?}...", path);
let fbuffer = FileBuffer::open(path)?;
trace!("Finished hashing {:?}...", path);
Ok(upend_base::hash::sha256hash(&fbuffer)?)
}

View File

@ -1,199 +0,0 @@
use crate::util::hash::{b58_decode, b58_encode, Hash, Hashable};
use anyhow::{anyhow, Result};
use serde::de::Visitor;
use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
use std::str::FromStr;
use url::Url;
use uuid::Uuid;
#[derive(Clone, Eq, PartialEq, Hash)]
pub enum Address {
Hash(Hash),
Uuid(Uuid),
Attribute(String),
Url(Url),
}
// multihash SHA2-256
const SHA2_256: u64 = 0x12;
// multihash identity
const IDENTITY: u64 = 0x00;
type LargeMultihash = multihash::MultihashGeneric<256>;
impl Address {
pub fn encode(&self) -> Result<Vec<u8>> {
let hash = match self {
Self::Hash(hash) => {
LargeMultihash::wrap(SHA2_256, &hash.0).map_err(|err| anyhow!(err))?
}
Self::Uuid(uuid) => {
LargeMultihash::wrap(IDENTITY, &[vec![b'U'], uuid.as_bytes().to_vec()].concat())
.map_err(|err| anyhow!(err))?
}
Self::Attribute(attribute) => {
LargeMultihash::wrap(IDENTITY, &[&[b'A'], attribute.as_bytes()].concat())
.map_err(|err| anyhow!(err))?
}
Self::Url(url) => {
LargeMultihash::wrap(IDENTITY, &[&[b'X'], url.to_string().as_bytes()].concat())
.map_err(|err| anyhow!(err))?
}
};
Ok(hash.to_bytes())
}
pub fn decode(buffer: &[u8]) -> Result<Self> {
let multihash = LargeMultihash::from_bytes(buffer)
.map_err(|err| anyhow!("Error decoding address: {}", err))?;
match multihash.code() {
SHA2_256 => Ok(Self::Hash(Hash(multihash.digest().to_vec()))),
IDENTITY => {
let digest = multihash.digest().to_owned();
let digest_content: Vec<u8> = digest.clone().into_iter().skip(1).collect();
match digest[0] {
b'U' => Ok(Self::Uuid(uuid::Uuid::from_slice(
digest_content.as_slice(),
)?)),
b'A' => Ok(Self::Attribute(String::from_utf8(digest_content)?)),
b'X' => Ok(Self::Url(Url::parse(&String::from_utf8(digest_content)?)?)),
_ => Err(anyhow!("Error decoding address: Unknown identity marker.")),
}
}
_ => Err(anyhow!(
"Error decoding address: Unknown hash function type."
)),
}
}
}
impl Serialize for Address {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
serializer.serialize_str(b58_encode(self.encode().map_err(ser::Error::custom)?).as_str())
}
}
struct AddressVisitor;
impl<'de> Visitor<'de> for AddressVisitor {
type Value = Address;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a valid UpEnd address (hash/UUID) as a multi-hashed string")
}
fn visit_str<E>(self, str: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let bytes = b58_decode(str)
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))?;
Address::decode(bytes.as_ref())
.map_err(|e| de::Error::custom(format!("Error deserializing address: {}", e)))
}
}
impl<'de> Deserialize<'de> for Address {
fn deserialize<D>(deserializer: D) -> Result<Address, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(AddressVisitor)
}
}
impl FromStr for Address {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Address::decode(
b58_decode(s)
.map_err(|e| anyhow!("Error deserializing address: {}", e))?
.as_ref(),
)
}
}
impl std::fmt::Display for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
b58_encode(self.encode().map_err(|_| std::fmt::Error)?)
)
}
}
impl std::fmt::Debug for Address {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Address<{}>: {}",
match self {
Address::Hash(_) => "Hash",
Address::Uuid(_) => "UUID",
Address::Attribute(_) => "Attribute",
Address::Url(_) => "URL",
},
self
)
}
}
pub trait Addressable: Hashable {
fn address(&self) -> Result<Address> {
Ok(Address::Hash(self.hash()?))
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use url::Url;
use uuid::Uuid;
use crate::addressing::Address;
use crate::util::hash::Hash;
#[test]
fn test_hash_codec() -> Result<()> {
let addr = Address::Hash(Hash(vec![1, 2, 3, 4, 5]));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_uuid_codec() -> Result<()> {
let addr = Address::Uuid(Uuid::new_v4());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_attribute_codec() -> Result<()> {
let addr = Address::Attribute(String::from("ATTRIBUTE"));
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
#[test]
fn test_url_codec() -> Result<()> {
let addr = Address::Url(Url::parse("https://upend.dev/an/url/that/is/particularly/long/because/multihash/used/to/have/a/small/limit").unwrap());
let encoded = addr.encode()?;
let decoded = Address::decode(&encoded)?;
assert_eq!(addr, decoded);
Ok(())
}
}

View File

@ -1,8 +0,0 @@
#[derive(Clone, Debug)]
pub struct UpEndConfig {
pub vault_name: Option<String>,
pub desktop_enabled: bool,
pub trust_executables: bool,
pub secret: String,
pub key: Option<String>,
}

View File

@ -1,28 +0,0 @@
use crate::addressing::Address;
use crate::database::entry::InvariantEntry;
pub const TYPE_TYPE_VAL: &str = "TYPE";
pub const TYPE_BASE_ATTR: &str = "TYPE";
pub const TYPE_HAS_ATTR: &str = "TYPE_HAS";
// pub const TYPE_ATTR_REQUIRED: &str = "TYPE_ATTR_REQUIRED";
pub const IS_OF_TYPE_ATTR: &str = "IS";
pub const HIER_TYPE_VAL: &str = "HIER";
pub const HIER_HAS_ATTR: &str = "HAS";
pub const LABEL_ATTR: &str = "LBL";
pub const ADDED_ATTR: &str = "ADDED";
lazy_static! {
pub static ref TYPE_INVARIANT: InvariantEntry = InvariantEntry {
attribute: String::from(TYPE_BASE_ATTR),
value: TYPE_TYPE_VAL.into(),
};
pub static ref TYPE_ADDR: Address = TYPE_INVARIANT.entity().unwrap();
pub static ref HIER_INVARIANT: InvariantEntry = InvariantEntry {
attribute: String::from(TYPE_BASE_ATTR),
value: HIER_TYPE_VAL.into(),
};
pub static ref HIER_ADDR: Address = HIER_INVARIANT.entity().unwrap();
}

View File

@ -1,14 +0,0 @@
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
#[macro_use]
extern crate lazy_static;
extern crate self as upend;
pub mod database;
pub mod util;
pub mod addressing;
pub mod common;
pub mod config;

View File

@ -1,88 +0,0 @@
use crate::addressing::Address;
use anyhow::Result;
use diesel::backend::Backend;
use diesel::deserialize::FromSql;
use diesel::sqlite::Sqlite;
use diesel::{deserialize, sql_types};
use filebuffer::FileBuffer;
use multihash::Hasher;
use serde::{ser, Serialize, Serializer};
use std::path::{Path};
use tracing::trace;
#[derive(Debug, Clone, Eq, PartialEq, FromSqlRow, Hash)]
pub struct Hash(pub Vec<u8>);
impl AsRef<[u8]> for Hash {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl FromSql<sql_types::Binary, Sqlite> for Hash {
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
Ok(Hash(Vec::from(not_none!(bytes).read_blob())))
}
}
impl Serialize for Hash {
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
where
S: Serializer,
{
serializer.serialize_str(
b58_encode(
Address::Hash(self.clone())
.encode()
.map_err(ser::Error::custom)?,
)
.as_str(),
)
}
}
pub trait Hashable {
fn hash(&self) -> Result<Hash>;
}
impl Hashable for Path {
fn hash(self: &Path) -> Result<Hash> {
trace!("Hashing {:?}...", self);
let fbuffer = FileBuffer::open(self)?;
trace!("Finished hashing {:?}...", self);
Ok(hash(&fbuffer))
}
}
pub fn hash<T: AsRef<[u8]>>(input: T) -> Hash {
let mut hasher = multihash::Sha2_256::default();
hasher.update(input.as_ref());
Hash(Vec::from(hasher.finalize()))
}
pub fn b58_encode<T: AsRef<[u8]>>(vec: T) -> String {
multibase::encode(multibase::Base::Base58Btc, vec.as_ref())
}
pub fn b58_decode<T: AsRef<str>>(input: T) -> Result<Vec<u8>> {
let input = input.as_ref();
let (_base, data) = multibase::decode(input)?;
Ok(data)
}
#[cfg(test)]
mod tests {
use crate::util::hash::{b58_decode, b58_encode};
#[test]
fn test_encode_decode() {
let content = "Hello, World!".as_bytes();
let encoded = b58_encode(content);
let decoded = b58_decode(encoded);
assert!(decoded.is_ok());
assert_eq!(content, decoded.unwrap());
}
}

View File

@ -1,5 +1,6 @@
*.js
node_modules
*.wasm
.pnp.*
.yarn/*

View File

@ -1,6 +1,7 @@
import LRU from "lru-cache";
import { UpListing, UpObject } from ".";
import type {
ADDRESS_TYPE,
Address,
AttributeListingResult,
EntityListing,
@ -12,23 +13,42 @@ import type {
StoreInfo,
VaultInfo,
} from "./types";
import init_wasm from "upend_wasm";
import {
AddressComponents,
AddressTypeConstants,
addr_to_components,
components_to_addr,
} from "upend_wasm";
import debug from "debug";
const dbg = debug("upend:api");
export { AddressComponents };
export class UpEndApi {
private instanceUrl = "";
private wasmPath: string | undefined;
private wasmInitialized = false;
private addressTypeConstants: AddressTypeConstants | undefined = undefined;
private queryOnceLRU = new LRU<string, UpListing>({ max: 128 });
private inFlightRequests: { [key: string]: Promise<UpListing> | null } = {};
constructor(instanceUrl = "") {
constructor(instanceUrl = "", wasmPath?: string) {
this.setInstanceUrl(instanceUrl);
if (wasmPath) {
this.setWasmPath(wasmPath);
}
}
public setInstanceUrl(apiUrl: string) {
this.instanceUrl = apiUrl.replace(/\/+$/g, "");
}
public setWasmPath(wasmPath: string) {
this.wasmPath = wasmPath;
}
public get apiUrl() {
return this.instanceUrl + "/api";
}
@ -186,24 +206,67 @@ export class UpEndApi {
}
public async getAddress(
input: { attribute: string } | { url: string } | { urlContent: string }
input:
| { attribute: string }
| { url: string }
| { urlContent: string }
| ADDRESS_TYPE
): Promise<string> {
let response;
if ("attribute" in input) {
response = await fetch(
`${this.apiUrl}/address?attribute=${input.attribute}`
);
} else if ("url" in input) {
response = await fetch(`${this.apiUrl}/address?url=${input.url}`);
} else if ("urlContent" in input) {
response = await fetch(
`${this.apiUrl}/address?url_content=${input.urlContent}`
);
if (typeof input === "string") {
try {
if (!this.addressTypeConstants) {
await this.initWasm();
this.addressTypeConstants = new AddressTypeConstants();
}
return this.addressTypeConstants[input];
} catch (err) {
console.warn(err);
}
response = await fetch(`${this.apiUrl}/address?type=${input}`);
} else {
throw new Error("Input cannot be empty.");
if ("attribute" in input) {
response = await fetch(
`${this.apiUrl}/address?attribute=${input.attribute}`
);
} else if ("url" in input) {
response = await fetch(`${this.apiUrl}/address?url=${input.url}`);
} else if ("urlContent" in input) {
response = await fetch(
`${this.apiUrl}/address?url_content=${input.urlContent}`
);
} else {
throw new Error("Input cannot be empty.");
}
}
const result = await response.json();
dbg("Address for %o = %s", input, result);
return result;
}
public async addressToComponents(
address: string
): Promise<AddressComponents> {
await this.initWasm();
return addr_to_components(address);
}
public async componentsToAddress(
components: AddressComponents
): Promise<string> {
await this.initWasm();
return components_to_addr(components);
}
private async initWasm(): Promise<void> {
if (!this.wasmInitialized) {
if (!this.wasmPath) {
throw new Error(
"Path to WASM file not specified, cannot initialize WASM extensions."
);
}
await init_wasm(this.wasmPath);
this.wasmInitialized = true;
}
}
}

View File

@ -1,5 +1,31 @@
export const BLOB_TYPE_ADDR =
"zQmWvjFhyhxpCxZy89VzyEe1mXqk8cX9X5b3xewbM3bf7CB";
/**
* Attribute denoting (hierarchical) relation, in the "upwards" direction. For example, a file `IN` a group, an image `IN` photos, etc.
*/
export const ATTR_IN = "IN";
export const GROUP_TYPE_ADDR =
"zQmZ523exNAbYdsMsoxfNcwTjxkUWqCX6GZjVqJgCoG54Wv";
/**
* Attribute denoting that an entry belongs to the set relating to a given (hierarchical) relation.
* For example, a data blob may have a label entry, and to qualify that label within the context of belonging to a given hierarchical group, that label entry and the hierarchical entry will be linked with `BY`.
*/
export const ATTR_BY = "BY";
/**
* Attribute denoting that an attribute belongs to a given "tagging" entity. If an entity belongs to (`IN`) a "tagging" entity, it is expected to have attributes that are `OF` that entity.
*/
export const ATTR_OF = "OF";
/**
* Attribute denoting a human readable label.
*/
export const ATTR_LABEL = "LBL";
/**
* Attribute denoting the date & time an entity was noted in the database.
* (TODO: This info can be trivially derived from existing entry timestamps, while at the same time the "Introduction problem" is still open.)
*/
export const ATTR_ADDED = "ADDED";
/**
* Attribute for cross-vault unambiguous referencing of non-hashable (e.g. UUID) entities.
*/
export const ATTR_KEY = "KEY";

View File

@ -74,6 +74,15 @@ export class UpObject {
result[entry.attribute].push(entry);
});
this.backlinks.forEach((entry) => {
const attribute = `~${entry.attribute}`;
if (!result[attribute]) {
result[attribute] = [];
}
result[attribute].push(entry);
});
return result;
}

View File

@ -3,7 +3,7 @@
"version": "0.0.1",
"description": "Client library to interact with the UpEnd system.",
"scripts": {
"build": "tsc --build --listFiles --verbose",
"build": "tsc --build --listFiles --verbose && cp -v node_modules/upend_wasm/*.wasm .",
"test": "ava",
"lint": "eslint ."
},
@ -20,6 +20,7 @@
"packageManager": "yarn@3.1.1",
"dependencies": {
"debug": "^4.3.4",
"lru-cache": "^7.0.0"
"lru-cache": "^7.0.0",
"upend_wasm": "../upend_wasm/pkg"
}
}

View File

@ -1,4 +1,5 @@
export type Address = string;
export type ADDRESS_TYPE = "Hash" | "Uuid" | "Attribute" | "Url";
export type VALUE_TYPE = "Address" | "String" | "Number" | "Invalid";
/**

View File

@ -3632,9 +3632,17 @@ __metadata:
eslint: ^8.7.0
lru-cache: ^7.0.0
typescript: ^4.4.4
upend_wasm: ../upend_wasm/pkg
languageName: unknown
linkType: soft
"upend_wasm@file:../upend_wasm/pkg::locator=upend%40workspace%3A.":
version: 0.1.0
resolution: "upend_wasm@file:../upend_wasm/pkg#../upend_wasm/pkg::hash=e06eb3&locator=upend%40workspace%3A."
checksum: 77a4c5daa46c42340ac1d1121c60aba6c0d6aad9bc6b0462dea799b2fbc052770dcfb34eff00198e0ca6c7b20207b954f8c2ca18679d14750030a5ca1b96b8a9
languageName: node
linkType: hard
"uri-js@npm:^4.2.2":
version: 4.4.1
resolution: "uri-js@npm:4.4.1"

View File

@ -0,0 +1,12 @@
[package]
name = "upend_wasm"
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
wasm-bindgen = "0.2.87"
upend-base = { path = "../../base", features = ["wasm"] }
wee_alloc = "0.4.5"

View File

@ -0,0 +1,76 @@
use upend_base::{
addressing::{Address, AddressComponents},
constants,
error::UpEndError,
};
use wasm_bindgen::prelude::*;
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
#[derive(Debug, Clone)]
pub struct WasmError(String);
impl std::fmt::Display for WasmError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::error::Error for WasmError {}
#[allow(clippy::from_over_into)]
impl Into<JsValue> for WasmError {
fn into(self) -> JsValue {
JsValue::from_str(&self.0)
}
}
#[wasm_bindgen]
pub fn addr_to_components(address: String) -> Result<AddressComponents, WasmError> {
let address: Address = address
.parse()
.map_err(|e: UpEndError| WasmError(e.to_string()))?;
Ok(address.as_components())
}
#[wasm_bindgen]
pub fn components_to_addr(components: AddressComponents) -> Result<String, WasmError> {
let address =
Address::from_components(components).map_err(|e: UpEndError| WasmError(e.to_string()))?;
Ok(address.to_string())
}
#[wasm_bindgen]
pub struct AddressTypeConstants {}
#[wasm_bindgen]
#[allow(non_snake_case, clippy::new_without_default)]
impl AddressTypeConstants {
#[wasm_bindgen(constructor)]
pub fn new() -> Self {
AddressTypeConstants {}
}
#[wasm_bindgen(getter)]
pub fn Hash(&self) -> String {
constants::TYPE_HASH_ADDRESS.to_string()
}
#[wasm_bindgen(getter)]
pub fn Uuid(&self) -> String {
constants::TYPE_UUID_ADDRESS.to_string()
}
#[wasm_bindgen(getter)]
pub fn Attribute(&self) -> String {
constants::TYPE_ATTRIBUTE_ADDRESS.to_string()
}
#[wasm_bindgen(getter)]
pub fn Url(&self) -> String {
constants::TYPE_URL_ADDRESS.to_string()
}
}

View File

@ -4364,7 +4364,19 @@ update-notifier@6.0.2:
upend@../tools/upend_js:
version "0.0.1"
dependencies:
debug "^4.3.4"
lru-cache "^7.0.0"
upend_wasm "file:../../../.cache/yarn/v6/npm-upend-0.0.1-97749447-daec-4450-84bd-3a9efd2d337b-1687955082927/node_modules/upend_wasm/pkg"
"upend@file:../tools/upend_js":
version "0.0.1"
dependencies:
debug "^4.3.4"
lru-cache "^7.0.0"
upend_wasm "file:../../../.cache/yarn/v6/npm-upend-0.0.1-b03bbb87-3597-4b59-9114-fd6071909861-1687955082756/node_modules/upend_wasm/pkg"
"upend_wasm@file:../tools/upend_wasm/pkg":
version "0.1.0"
uri-js@^4.2.2:
version "4.4.1"

View File

@ -1,148 +0,0 @@
<script lang="ts">
import UpLink from "./display/UpLink.svelte";
import type { Component, UpType, Widget } from "../lib/types";
import EntryList from "./widgets/EntryList.svelte";
import type { UpEntry } from "upend";
import Icon from "./utils/Icon.svelte";
import IconButton from "./utils/IconButton.svelte";
import { createEventDispatcher } from "svelte";
const dispatch = createEventDispatcher();
import { i18n } from "../i18n";
export let entries: UpEntry[];
export let type: UpType | undefined = undefined;
export let widgets: Widget[] | undefined = undefined;
export let title: string | undefined = undefined;
export let editable = false;
export let reverse = false;
export let initialWidget: string | undefined = undefined;
export let highlighted = false;
let currentWidget: string | undefined;
function switchWidget(widget: string) {
currentWidget = widget;
dispatch("widgetSwitched", currentWidget);
}
let availableWidgets: Widget[] = [];
$: {
availableWidgets = [
{
name: "entrylist",
icon: "table",
components: [
{
component: EntryList,
},
],
},
];
if (type?.widgetInfo.length > 0) {
availableWidgets = [...type.widgetInfo, ...availableWidgets];
}
if (widgets?.length) {
availableWidgets = [...widgets, ...availableWidgets];
}
if (availableWidgets.map((w) => w.name).includes(initialWidget)) {
currentWidget = initialWidget;
} else {
currentWidget = availableWidgets[0].name;
}
}
let components: Component[] = [];
$: {
components = availableWidgets.find(
(w) => w.name === currentWidget
).components;
}
</script>
<section class="attribute-view labelborder" class:highlighted>
<header>
<h3>
{#if !title && type?.address}
<UpLink to={{ entity: type.address }}>
{#if type.icon}
<div class="icon">
<Icon name={type.icon} />
</div>
{/if}
{#if type.name != "HIER"}
{type.label || type.name || "???"}
{:else}
{$i18n.t("Members")}
{/if}
</UpLink>
{:else}
{title || ""}
{/if}
</h3>
{#if currentWidget && (availableWidgets.length > 1 || editable)}
<div class="views">
{#each availableWidgets as widget (widget.name)}
<IconButton
name={widget.icon || "question-diamond"}
active={widget.name === currentWidget}
--active-color="var(--foreground)"
on:click={() => switchWidget(widget.name)}
/>
{/each}
</div>
{/if}
</header>
<div class="content">
{#if !reverse}
{#each components as component}
<svelte:component
this={component.component}
{...(typeof component.props === "function"
? component.props(entries)
: component.props) || {}}
{entries}
{editable}
{type}
on:change
/>
{/each}
{:else}
<!-- shut up svelte check -->
<svelte:component
this={EntryList}
columns="entity, attribute"
{entries}
/>
{/if}
</div>
</section>
<style scoped lang="scss">
@use "./util";
section h3 {
transition: text-shadow 0.2s;
}
section.highlighted h3 {
text-shadow: #cb4b16 0 0 0.5em;
}
.icon {
display: inline-block;
font-size: 1.25em;
margin-top: -0.3em;
position: relative;
bottom: -2px;
}
.views {
display: flex;
right: 1ex;
font-size: 18px;
}
</style>

View File

@ -0,0 +1,139 @@
<script lang="ts" context="module">
export interface WidgetComponent {
component: ComponentType;
props: { [key: string]: unknown };
}
export interface Widget {
name: string;
icon?: string;
components: (entries: UpEntry[]) => Array<WidgetComponent>;
}
</script>
<script lang="ts">
import EntryList from "./widgets/EntryList.svelte";
import type { UpEntry } from "upend";
import Icon from "./utils/Icon.svelte";
import IconButton from "./utils/IconButton.svelte";
import { createEventDispatcher, type ComponentType } from "svelte";
const dispatch = createEventDispatcher();
export let entries: UpEntry[];
export let widgets: Widget[] | undefined = undefined;
export let initialWidget: string | undefined = undefined;
export let title: string | undefined = undefined;
export let icon: string | undefined = undefined;
export let highlighted = false;
export let editable = false;
let currentWidget: string | undefined;
function switchWidget(widget: string) {
currentWidget = widget;
dispatch("widgetSwitched", currentWidget);
}
let availableWidgets: Widget[] = [];
$: {
availableWidgets = [
{
name: "Entry List",
icon: "table",
components: (entries) => [
{
component: EntryList,
props: { entries, columns: "entity, attribute, value" },
},
],
},
];
if (widgets?.length) {
availableWidgets = [...widgets, ...availableWidgets];
}
if (availableWidgets.map((w) => w.name).includes(initialWidget)) {
currentWidget = initialWidget;
} else {
currentWidget = availableWidgets[0].name;
}
}
let components: WidgetComponent[] = [];
$: {
components = availableWidgets
.find((w) => w.name === currentWidget)
.components(entries);
}
</script>
<section class="entry-view labelborder" class:highlighted>
<header>
<h3>
{#if icon}
<div class="icon">
<Icon name={icon} />
</div>
{/if}
{title || ""}
</h3>
{#if currentWidget && (availableWidgets.length > 1 || editable)}
<div class="views">
{#each availableWidgets as widget (widget.name)}
<IconButton
name={widget.icon || "cube"}
title={widget.name}
active={widget.name === currentWidget}
--active-color="var(--foreground)"
on:click={() => switchWidget(widget.name)}>{widget.name}</IconButton
>
{/each}
</div>
{/if}
</header>
<div class="content">
{#each components as component}
<svelte:component
this={component.component}
{...component.props || {}}
{editable}
on:change
/>
{/each}
</div>
</section>
<style scoped lang="scss">
@use "./util";
section.entry-view {
header {
margin-bottom: 0.5rem;
}
.icon {
display: inline-block;
font-size: 1.25em;
margin-top: -0.3em;
position: relative;
bottom: -2px;
}
h3 {
transition: text-shadow 0.2s;
}
&.highlighted h3 {
text-shadow: #cb4b16 0 0 0.5em;
}
}
.views {
display: flex;
right: 1ex;
transform: translateY(-25%);
font-size: 18px;
}
</style>

View File

@ -1,25 +1,25 @@
<script lang="ts">
import AttributeView from "./AttributeView.svelte";
import { query, useEntity } from "../lib/entity";
import EntryView, { type Widget } from "./EntryView.svelte";
import { useEntity } from "../lib/entity";
import UpObject from "./display/UpObject.svelte";
import { UpType } from "../lib/types";
import { createEventDispatcher, setContext } from "svelte";
import { writable } from "svelte/store";
import { derived, writable, type Readable } from "svelte/store";
import type { UpEntry } from "upend";
import Spinner from "./utils/Spinner.svelte";
import NotesEditor from "./utils/NotesEditor.svelte";
import type { AttributeChange } from "../types/base";
import Selector from "./utils/Selector.svelte";
import type { IValue } from "upend/types";
import type { ADDRESS_TYPE, EntityInfo, IValue } from "upend/types";
import IconButton from "./utils/IconButton.svelte";
import type { BrowseContext } from "../util/browse";
import { Link, useParams } from "svelte-navigator";
import { GROUP_TYPE_ADDR } from "upend/constants";
import Icon from "./utils/Icon.svelte";
import BlobViewer from "./display/BlobViewer.svelte";
import { i18n } from "../i18n";
import EntryList from "./widgets/EntryList.svelte";
import api from "../lib/api";
import Gallery from "./widgets/Gallery.svelte";
import { ATTR_IN, ATTR_LABEL, ATTR_KEY, ATTR_OF } from "upend/constants";
const dispatch = createEventDispatcher();
const params = useParams();
@ -45,34 +45,92 @@
$: ({ entity, entityInfo, error, revalidate } = useEntity(address));
$: allTypeAddresses = ($entity?.attr["IS"] || []).map((attr) => attr.value.c);
$: allTypes = derived(
entityInfo,
($entityInfo, set) => {
getAllTypes($entityInfo).then((allTypes) => {
set(allTypes);
});
},
{}
) as Readable<{
[key: string]: {
labels: string[];
attributes: string[];
};
}>;
$: allTypeEntries = query(
`(matches (in ${allTypeAddresses.map((addr) => `@${addr}`).join(" ")}) ? ?)`
).result;
async function getAllTypes(entityInfo: EntityInfo) {
const allTypes = {};
let allTypes: { [key: string]: UpType } = {};
$: {
allTypes = {};
($allTypeEntries?.entries || []).forEach((entry) => {
if (allTypes[entry.entity] === undefined) {
allTypes[entry.entity] = new UpType(entry.entity);
}
if (!entityInfo) {
return {};
}
switch (entry.attribute) {
case "TYPE":
allTypes[entry.entity].name = String(entry.value.c);
break;
case "LBL":
allTypes[entry.entity].label = String(entry.value.c);
break;
case "TYPE_HAS":
allTypes[entry.entity].attributes.push(String(entry.value.c));
break;
}
});
const typeAddresses: string[] = [
await api.getAddress(entityInfo.t),
...$entity.attr[ATTR_IN].map((e) => e.value.c as string),
];
const typeAddressesIn = typeAddresses.map((addr) => `@${addr}`).join(" ");
allTypes = allTypes;
const labelsQuery = await api.query(
`(matches (in ${typeAddressesIn}) "${ATTR_LABEL}" ?)`
);
await Promise.all(
typeAddresses.map(async (address) => {
let labels = labelsQuery.getObject(address).identify();
let typeLabel: string | undefined;
await Promise.all(
(["Hash", "Uuid", "Attribute", "Url"] as ADDRESS_TYPE[]).map(
async (t) => {
if ((await api.getAddress(t)) == address) {
labels.push(`[${t}]`);
}
}
)
);
if (typeLabel) {
labels.unshift(typeLabel);
}
if (!labels.length) {
labels.push(address);
}
allTypes[address] = {
labels,
attributes: [],
};
})
);
const attributes = await api.query(
`(matches ? "${ATTR_OF}" (in ${typeAddressesIn}))`
);
await Promise.all(
typeAddresses.map(async (address) => {
allTypes[address].attributes = (
await Promise.all(
(attributes.getObject(address).attr[`~${ATTR_OF}`] || []).map(
async (e) => {
try {
const { t, c } = await api.addressToComponents(e.entity);
if (t == "Attribute") {
return c;
}
} catch (err) {
console.error(err);
return false;
}
}
)
)
).filter(Boolean);
})
);
return allTypes;
}
let typedAttributes = {} as { [key: string]: UpEntry[] };
@ -83,7 +141,7 @@
untypedAttributes = [];
($entity?.attributes || []).forEach((entry) => {
const entryTypes = Object.entries(allTypes).filter(([_, t]) =>
const entryTypes = Object.entries($allTypes || {}).filter(([_, t]) =>
t.attributes.includes(entry.attribute)
);
if (entryTypes.length > 0) {
@ -105,8 +163,9 @@
$: filteredUntypedAttributes = untypedAttributes.filter(
(entry) =>
![
"IS",
"LBL",
ATTR_LABEL,
ATTR_IN,
ATTR_KEY,
"NOTE",
"LAST_VISITED",
"NUM_VISITED",
@ -122,13 +181,14 @@
(editable
? $entity?.backlinks
: $entity?.backlinks.filter(
(entry) => !["HAS"].includes(entry.attribute)
(entry) => ![ATTR_IN].includes(entry.attribute)
)) || [];
$: groups = ($entity?.backlinks || [])
.filter((e) => e.attribute === "HAS")
.map((e) => [e.address, e.entity])
.sort(); // TODO
$: groups = ($entity?.attr[ATTR_IN] || []).map((e) => [
e.value.c as string,
e.address,
]);
$: tagged = $entity?.attr[`~${ATTR_IN}`] || [];
let attributesUsed: UpEntry[] = [];
$: {
@ -175,19 +235,11 @@
}
await api.putEntry([
{
entity: String(groupToAdd.c),
attribute: "HAS",
entity: address,
attribute: ATTR_IN,
value: {
t: "Address",
c: address,
},
},
{
entity: String(groupToAdd.c),
attribute: "IS",
value: {
t: "Address",
c: GROUP_TYPE_ADDR,
c: String(groupToAdd.c),
},
},
]);
@ -219,6 +271,66 @@
);
}
const attributeWidgets: Widget[] = [
{
name: "List",
icon: "list-check",
components: (entries) => [
{
component: EntryList,
props: {
entries,
columns: "attribute, value",
},
},
],
},
{
name: "Gallery",
icon: "image",
components: (entries) => [
{
component: Gallery,
props: {
entities: entries
.filter((e) => e.value.t == "Address")
.map((e) => e.value.c),
thumbnails: true,
},
},
],
},
];
const taggedWidgets: Widget[] = [
{
name: "List",
icon: "list-check",
components: (entries) => [
{
component: Gallery,
props: {
entities: entries.map((e) => e.entity),
thumbnails: false,
},
},
],
},
{
name: "Gallery",
icon: "image",
components: (entries) => [
{
component: Gallery,
props: {
entities: entries.map((e) => e.entity),
thumbnails: true,
},
},
],
},
];
$: entity.subscribe(async (object) => {
if (object && object.listing.entries.length) {
await api.putEntityAttribute(
@ -267,29 +379,21 @@
</div>
<NotesEditor {address} {editable} on:change={onChange} />
{#if !$error}
{#if Object.keys(allTypes).length || groups.length}
<section class="tags labelborder">
<header><h3>{$i18n.t("Tags")}</h3></header>
{#if groups.length}
<section class="groups labelborder">
<header><h3>{$i18n.t("Groups")}</h3></header>
<div class="content">
{#each Object.values(allTypes) as type}
{#each groups as [groupAddress, groupEntryAddress]}
<div
class="tag type"
on:mouseenter={() => (highlightedType = type.address)}
class="tag"
on:mouseenter={() => (highlightedType = groupAddress)}
on:mouseleave={() => (highlightedType = undefined)}
>
<UpObject address={type.address} link />
{#if editable}
<IconButton name="x-circle" />
{/if}
</div>
{/each}
{#each groups as [entryAddress, address]}
<div class="tag group">
<UpObject {address} link />
<UpObject address={groupAddress} link />
{#if editable}
<IconButton
name="x-circle"
on:click={() => removeGroup(entryAddress)}
on:click={() => removeGroup(groupEntryAddress)}
/>
{/if}
</div>
@ -308,70 +412,73 @@
</div>
</section>
{/if}
{#if Boolean($allTypeEntries)}
<div class="attributes">
{#each Object.entries(typedAttributes) as [typeAddr, entries] (typeAddr)}
<AttributeView
{entries}
type={allTypes[typeAddr]}
{editable}
on:change={onChange}
initialWidget={String($entity.get("LAST_ATTRIBUTE_WIDGET"))}
on:widgetSwitched={onAttributeWidgetSwitch}
highlighted={highlightedType == typeAddr}
/>
{/each}
<div class="attributes">
{#each Object.entries(typedAttributes) as [typeAddr, entries] (typeAddr)}
<EntryView
{entries}
{editable}
widgets={attributeWidgets}
on:change={onChange}
highlighted={highlightedType == typeAddr}
title={$allTypes[typeAddr].labels.join(" | ")}
/>
{/each}
{#if currentUntypedAttributes.length > 0 || editable}
<AttributeView
title={$i18n.t("Attributes")}
{editable}
entries={currentUntypedAttributes}
on:change={onChange}
/>
{/if}
{#if currentBacklinks.length > 0}
<AttributeView
title={`${$i18n.t("Referred to")} (${
$entity.backlinks.length
})`}
entries={currentBacklinks}
reverse
on:change={onChange}
/>
{/if}
{#if $entityInfo?.t === "Attribute"}
<div class="buttons">
<div class="button">
<Link to="/surface?x={$entityInfo.c}">
{$i18n.t("Surface view")}
</Link>
</div>
</div>
<section class="labelborder">
<header>
<h3>{$i18n.t("Used")} ({attributesUsed.length})</h3>
</header>
<EntryList
columns="entity,value"
columnWidths={["auto", "33%"]}
entries={attributesUsed}
orderByValue
/>
</section>
{/if}
</div>
{#if editable}
<div class="button" on:click={deleteObject}>
<Icon name="trash" />
</div>
{#if currentUntypedAttributes.length > 0 || editable}
<EntryView
title={$i18n.t("Attributes")}
{editable}
widgets={attributeWidgets}
entries={currentUntypedAttributes}
on:change={onChange}
/>
{/if}
{:else}
<Spinner centered />
{#if currentBacklinks.length > 0}
<EntryView
title={`${$i18n.t("Referred to")} (${currentBacklinks.length})`}
entries={currentBacklinks}
on:change={onChange}
/>
{/if}
{#if tagged.length > 0}
<EntryView
title={`${$i18n.t("Members")}`}
widgets={taggedWidgets}
entries={tagged}
on:change={onChange}
/>
{/if}
{#if $entityInfo?.t === "Attribute"}
<div class="buttons">
<div class="button">
<Link to="/surface?x={$entityInfo.c}">
{$i18n.t("Surface view")}
</Link>
</div>
</div>
<section class="labelborder">
<header>
<h3>{$i18n.t("Used")} ({attributesUsed.length})</h3>
</header>
<EntryList
columns="entity,value"
columnWidths={["auto", "33%"]}
entries={attributesUsed}
orderByValue
/>
</section>
{/if}
</div>
{#if editable}
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div class="button" on:click={deleteObject}>
<Icon name="trash" />
</div>
{/if}
{:else}
<div class="error">
@ -425,7 +532,7 @@
min-height: 0;
}
.tags {
.groups {
margin: 0.25rem 0;
.content {
display: flex;

View File

@ -9,6 +9,8 @@
import { createEventDispatcher } from "svelte";
import { getTypes } from "../../util/mediatypes";
import { formatDuration } from "../../util/fragments/time";
import { concurrentImage } from "../imageQueue";
import { ATTR_IN } from "upend/constants";
const dispatch = createEventDispatcher();
export let address: string;
@ -36,12 +38,16 @@
let failedChildren: string[] = [];
let loadedChildren: string[] = [];
$: groupChildren = ($entity?.attr["HAS"] || [])
.map((e) => String(e.value.c))
$: groupChildren = $entity?.backlinks
.filter((e) => e.attribute === ATTR_IN)
.map((e) => String(e.entity))
.filter(
(addr) =>
!failedChildren
.slice(0, ($entity?.attr["HAS"] || []).length - 4)
.slice(
0,
$entity?.backlinks.filter((e) => e.attribute === ATTR_IN).length - 4
)
.includes(addr)
)
.slice(0, 4);
@ -94,8 +100,8 @@
/>
{:else if types.web}
<img
src={String($entity?.get("OG_IMAGE"))}
alt="OpenGraph image for {$entityInfo?.t == 'Url' && $entityInfo?.c}"
use:concurrentImage={String($entity?.get("OG_IMAGE"))}
on:load={() => (loaded = address)}
on:error={() => (handled = false)}
/>
@ -108,9 +114,8 @@
{:else if types.audio}
<div class="audiopreview image">
<img
src="{api.apiUrl}/thumb/{address}?mime=audio"
alt="Thumbnail for {address}..."
loading="lazy"
use:concurrentImage={`${api.apiUrl}/thumb/${address}?mime=audio`}
on:load={() => (loaded = address)}
on:error={() => (handled = false)}
/>
@ -129,11 +134,10 @@
{:else}
<div class="image" class:loaded={loaded == address || !handled}>
<img
src="{api.apiUrl}/{types.mimeType?.includes('svg+xml')
? 'raw'
: 'thumb'}/{address}?size=512&quality=75"
alt="Thumbnail for {address}..."
loading="lazy"
use:concurrentImage={`${api.apiUrl}/${
types.mimeType?.includes("svg+xml") ? "raw" : "thumb"
}/${address}?size=512&quality=75`}
on:load={() => (loaded = address)}
on:error={() => (handled = false)}
/>

View File

@ -1,6 +1,6 @@
<script lang="ts">
import { createEventDispatcher, getContext } from "svelte";
import { BLOB_TYPE_ADDR } from "upend/constants";
import HashBadge from "./HashBadge.svelte";
import Ellipsis from "../utils/Ellipsis.svelte";
import UpLink from "./UpLink.svelte";
@ -10,7 +10,6 @@
import { vaultInfo } from "../../util/info";
import type { BrowseContext } from "../../util/browse";
import type { UpObject } from "upend";
import { i18n } from "../../i18n";
import type { EntityInfo } from "upend/types";
import { useEntity } from "../../lib/entity";
import api from "../../lib/api";
@ -20,14 +19,14 @@
export let labels: string[] | undefined = undefined;
export let link = false;
export let banner = false;
export let resolve = !(labels || []).length || banner;
let entity: Readable<UpObject> = readable(undefined);
let entityInfo: Readable<EntityInfo> = readable(undefined);
$: if (labels === undefined || banner)
({ entity, entityInfo } = useEntity(address));
$: if (resolve) ({ entity, entityInfo } = useEntity(address));
// isFile
$: isFile = $entity?.get("IS") === BLOB_TYPE_ADDR;
$: isFile = $entityInfo?.t == "Hash";
// Identification
let inferredIds: string[] = [];

View File

@ -11,6 +11,7 @@
import UpObject from "../../display/UpObject.svelte";
import Spinner from "../../utils/Spinner.svelte";
import { i18n } from "../../../i18n";
import { ATTR_LABEL } from "upend/constants";
export let address: string;
export let detail: boolean;
@ -51,7 +52,7 @@
color: annotation.get("COLOR") || DEFAULT_ANNOTATION_COLOR,
attributes: {
"upend-address": annotation.address,
label: annotation.get("LBL"),
label: annotation.get(ATTR_LABEL),
},
data: (annotation.attr["NOTE"] || [])[0]?.value,
...fragment,
@ -92,7 +93,7 @@
} as any); // incorrect types, `update()` does take `attributes`
}
await api.putEntityAttribute(entity, "LBL", {
await api.putEntityAttribute(entity, ATTR_LABEL, {
t: "String",
c: region.attributes["label"],
});

View File

@ -6,6 +6,7 @@
import IconButton from "../../utils/IconButton.svelte";
import Spinner from "../../utils/Spinner.svelte";
import UpObject from "../UpObject.svelte";
import { ATTR_LABEL } from "upend/constants";
export let address: string;
export let editable: boolean;
@ -58,7 +59,7 @@
if (annotation.get("W3C_FRAGMENT_SELECTOR")) {
anno.addAnnotation({
type: "Annotation",
body: annotation.attr["LBL"].map((e) => {
body: annotation.attr[ATTR_LABEL].map((e) => {
return {
type: "TextualBody",
value: String(e.value.c),
@ -134,7 +135,7 @@
...annotation.body.map((body) => {
return {
entity: uuid,
attribute: "LBL",
attribute: ATTR_LABEL,
value: {
t: "String",
c: body.value,
@ -146,9 +147,9 @@
anno.on("updateAnnotation", async (annotation) => {
const annotationObject = await api.fetchEntity(annotation.id);
await Promise.all(
annotationObject.attr["LBL"]
.concat(annotationObject.attr["W3C_FRAGMENT_SELECTOR"])
.map(async (e) => api.deleteEntry(e.address))
annotationObject.attr[ATTR_LABEL].concat(
annotationObject.attr["W3C_FRAGMENT_SELECTOR"]
).map(async (e) => api.deleteEntry(e.address))
);
await api.putEntry([
{
@ -162,7 +163,7 @@
...annotation.body.map((body) => {
return {
entity: annotation.id,
attribute: "LBL",
attribute: ATTR_LABEL,
value: {
t: "String",
c: body.value,

View File

@ -0,0 +1,123 @@
import debug from "debug";
const dbg = debug("upend:imageQueue");
class ImageQueue {
concurrency: number;
queue: {
element: HTMLElement;
id: string;
callback: () => Promise<void>;
check?: () => boolean;
}[] = [];
active = 0;
constructor(concurrency: number) {
this.concurrency = concurrency;
}
public add(
element: HTMLImageElement,
id: string,
callback: () => Promise<void>,
order?: () => number,
check?: () => boolean
) {
this.queue = this.queue.filter((e) => e.element !== element);
this.queue.push({ element, id, callback, check });
this.update();
}
private update() {
this.queue.sort((a, b) => {
const aBox = a.element.getBoundingClientRect();
const bBox = b.element.getBoundingClientRect();
const topDifference = aBox.top - bBox.top;
if (topDifference !== 0) {
return topDifference;
} else {
return aBox.left - bBox.left;
}
});
dbg(
"Active: %d, Queue: %O",
this.active,
this.queue.map((e) => [e.element, e.id])
);
if (this.active >= this.concurrency) {
return;
}
if (!this.queue.length) {
return;
}
const nextIdx = this.queue.findIndex((e) => e.check()) || 0;
const next = this.queue.splice(nextIdx, 1)[0];
dbg(`Getting ${next.id}...`);
this.active += 1;
next
.callback()
.then(() => {
dbg(`Loaded ${next.id}`);
})
.catch(() => {
dbg(`Failed to load ${next.id}...`);
})
.finally(() => {
this.active -= 1;
this.update();
});
}
}
const imageQueue = new ImageQueue(1);
export function concurrentImage(element: HTMLImageElement, src: string) {
const bbox = element.getBoundingClientRect();
let visible =
bbox.top >= 0 &&
bbox.left >= 0 &&
bbox.bottom <= window.innerHeight &&
bbox.right <= window.innerWidth;
const observer = new IntersectionObserver((entries) => {
visible = entries.some((e) => e.isIntersecting);
});
observer.observe(element);
function queueSelf() {
const loadSelf = () => {
return new Promise<void>((resolve, reject) => {
if (element.src === src) {
resolve();
return;
}
element.addEventListener("load", () => {
resolve();
});
element.addEventListener("error", () => {
reject();
});
element.src = src;
});
};
imageQueue.add(
element,
src,
loadSelf,
() => element.getBoundingClientRect().top,
() => visible
);
}
queueSelf();
return {
update(_src: string) {
queueSelf();
},
destroy() {
observer.disconnect();
},
};
}

View File

@ -10,6 +10,7 @@
const dispatch = createEventDispatcher();
import { matchSorter } from "match-sorter";
import api from "../../lib/api";
import { ATTR_LABEL } from "upend/constants";
const MAX_OPTIONS = 25;
@ -150,7 +151,7 @@
}
const validOptions = searchResult.entries
.filter((e) => e.attribute === "LBL")
.filter((e) => e.attribute === ATTR_LABEL)
.filter((e) => !exactHits.includes(e.entity));
const sortedOptions = matchSorter(validOptions, inputValue, {
@ -197,7 +198,7 @@
entity: { t: "Attribute", c: option.attribute.name },
});
// Second, label it.
await api.putEntityAttribute(address, "LBL", {
await api.putEntityAttribute(address, ATTR_LABEL, {
t: "String",
c: option.labelToCreate,
});

View File

@ -1,6 +1,6 @@
<script lang="ts">
import filesize from "filesize";
import { format, formatRelative, fromUnixTime, parseISO } from "date-fns";
import { formatRelative, fromUnixTime, parseISO } from "date-fns";
import Ellipsis from "../utils/Ellipsis.svelte";
import UpObject from "../display/UpObject.svelte";
import { createEventDispatcher } from "svelte";
@ -17,21 +17,21 @@
import { formatDuration } from "../../util/fragments/time";
import { i18n } from "../../i18n";
import UpLink from "../display/UpLink.svelte";
import type { UpType } from "src/lib/types";
import { ATTR_ADDED, ATTR_LABEL } from "upend/constants";
const dispatch = createEventDispatcher();
export let columns: string | undefined = undefined;
export let header = true;
export let orderByValue = false;
export let columnWidths: string[] = [];
export let columnWidths: string[] | undefined = undefined;
export let entries: UpEntry[];
export let type: UpType | undefined = undefined;
export let editable = false;
export let attributeOptions: string[] | undefined = undefined;
// Display
$: displayColumns = (columns || "attribute, value")
$: displayColumns = (columns || "entity, attribute, value")
.split(",")
.map((c) => c.trim());
@ -88,7 +88,9 @@
const addressesString = addresses.map((addr) => `@${addr}`).join(" ");
labelListing = query(`(matches (in ${addressesString}) "LBL" ? )`).result;
labelListing = query(
`(matches (in ${addressesString}) "${ATTR_LABEL}" ? )`
).result;
}
// Sorting
@ -160,7 +162,7 @@
switch (attribute) {
case "FILE_SIZE":
return filesize(parseInt(String(value), 10), { base: 2 });
case "ADDED":
case ATTR_ADDED:
case "LAST_VISITED":
return formatRelative(
fromUnixTime(parseInt(String(value), 10)),
@ -182,7 +184,7 @@
<col class="action-col" />
{/if}
{#each displayColumns as column, idx}
{#if columnWidths.length}
{#if columnWidths?.length}
<col
class="{column}-col"
style="width: {columnWidths[idx] || 'unset'}"
@ -300,7 +302,7 @@
<Selector
type="attribute"
bind:attribute={newEntryAttribute}
attributeOptions={type?.attributes}
attributeOptions={attributeOptions || []}
/>
</td>
{/if}

View File

@ -5,6 +5,7 @@
import { query } from "../../lib/entity";
import UpObject from "../display/UpObject.svelte";
import UpObjectCard from "../display/UpObjectCard.svelte";
import { ATTR_LABEL } from "upend/constants";
export let entities: Address[];
export let thumbnails = true;
@ -49,7 +50,9 @@
const addressesString = addresses.map((addr) => `@${addr}`).join(" ");
labelListing = query(`(matches (in ${addressesString}) "LBL" ? )`).result;
labelListing = query(
`(matches (in ${addressesString}) "${ATTR_LABEL}" ? )`
).result;
}
function sortEntities() {

View File

@ -5,7 +5,7 @@
"ADDED": "Added at",
"LAST_VISITED": "Last visited at",
"NUM_VISITED": "Times visited",
"LBL": "Label",
"ATTR_LABEL": "Label",
"IS": "Type",
"TYPE": "Type ID",
"MEDIA_DURATION": "Duration"

View File

@ -1,3 +1,4 @@
import { UpEndApi } from "upend/api";
import wasmURL from "upend/upend_wasm_bg.wasm?url";
export default new UpEndApi("/");
export default new UpEndApi("/", wasmURL);

View File

@ -1,108 +0,0 @@
import type { UpEntry } from "upend";
import Gallery from "../components/widgets/Gallery.svelte";
export class UpType {
address: string | undefined;
name: string | null = null;
label: string | null = null;
attributes: string[] = [];
constructor(address?: string) {
this.address = address;
}
public get icon(): string | undefined {
return this.name ? TYPE_ICONS[this.name] : undefined;
}
public get widgetInfo(): Widget[] {
return TYPE_WIDGETS[this.name] || [];
}
}
export const UNTYPED = new UpType();
export interface Component {
component: any; // TODO
props?:
| { [key: string]: unknown }
| ((entries: UpEntry[]) => { [key: string]: unknown });
}
export interface Widget {
name: string;
icon?: string;
components: Array<Component>;
}
const TYPE_ICONS: { [key: string]: string } = {
BLOB: "package",
HIER: "folder",
};
const TYPE_WIDGETS: { [key: string]: Widget[] } = {
HIER: [
{
name: "hierarchical-listing",
icon: "folder",
components: [
{
component: Gallery,
props: (entries) => {
return {
thumbnails: false,
entities: entries
.filter((e) => e.attribute == "HAS")
.map((e) => String(e.value.c)),
};
},
},
],
},
{
name: "hierarchical-listing-gallery",
icon: "image",
components: [
{
component: Gallery,
props: (entries) => {
return {
thumbnails: true,
entities: entries
.filter((e) => e.attribute == "HAS")
.map((e) => String(e.value.c)),
};
},
},
],
},
],
KSX_TRACK_MOODS: [
{
name: "ksx-track-compass",
icon: "plus-square",
components: [
// {
// name: "Compass",
// id: "compass_tint_energy",
// props: {
// xAttrName: "KSX_TINT",
// yAttrName: "KSX_ENERGY",
// xLabel: "Lightsoft // Heavydark",
// yLabel: "Chill // Extreme",
// },
// },
// {
// name: "Compass",
// id: "compass_seriousness_materials",
// props: {
// xAttrName: "KSX_SERIOUSNESS",
// yAttrName: "KSX_MATERIALS",
// xLabel: "Dionysia // Apollonia",
// yLabel: "Natural // Reinforced",
// },
// },
],
},
],
};

View File

@ -1,6 +1,6 @@
import type { EntityInfo } from "upend/types";
import type { UpObject } from "upend";
import { GROUP_TYPE_ADDR } from "upend/constants";
import { ATTR_IN } from "upend/constants";
export function getTypes(entity: UpObject, entityInfo: EntityInfo) {
const mimeType = String(entity?.get("FILE_MIME"));
@ -23,7 +23,7 @@ export function getTypes(entity: UpObject, entityInfo: EntityInfo) {
const web = entityInfo?.t == "Url";
const fragment = Boolean(entity?.get("ANNOTATES"));
const group = entity?.get("IS") == GROUP_TYPE_ADDR;
const group = entity?.backlinks.some((e) => e.attribute == ATTR_IN);
return {
mimeType,

View File

@ -1,6 +1,7 @@
import type { PutInput } from "upend/types";
import { query as queryFn } from "../lib/entity";
import api from "../lib/api";
import { ATTR_LABEL } from "upend/constants";
export function baseSearch(query: string) {
return queryFn(
@ -24,8 +25,9 @@ export async function createLabelled(label: string) {
},
};
} else {
// TODO - don't create invariants, create UUIDs instead, maybe with keys?
body = {
attribute: "LBL",
attribute: ATTR_LABEL,
value: {
t: "String",
c: label,

View File

@ -1,10 +1,10 @@
<script lang="ts">
import EntryList from "../components/widgets/EntryList.svelte";
import Gallery from "../components/widgets/Gallery.svelte";
import type { Widget } from "src/lib/types";
import type { Widget } from "../components/EntryView.svelte";
import { Link } from "svelte-navigator";
import { UpListing } from "upend";
import AttributeView from "../components/AttributeView.svelte";
import EntryView from "../components/EntryView.svelte";
import UpObjectCard from "../components/display/UpObjectCard.svelte";
import Spinner from "../components/utils/Spinner.svelte";
import api from "../lib/api";
@ -12,12 +12,13 @@
import { vaultInfo } from "../util/info";
import { updateTitle } from "../util/title";
import { i18n } from "../i18n";
import { ATTR_ADDED, ATTR_LABEL } from "upend/constants";
const roots = (async () => {
const data = await api.fetchRoots();
const listing = new UpListing(data);
return Object.values(listing.objects)
.filter((obj) => Boolean(obj.attr["LBL"]))
.filter((obj) => Boolean(obj.attr[ATTR_LABEL]))
.map((obj) => [obj.address, obj.identify().join(" | ")])
.sort(([_, i1], [__, i2]) => i1.localeCompare(i2));
})();
@ -34,7 +35,7 @@
.sort((a, b) => (b.value.c as number) - (a.value.c as number))
.slice(0, 25);
const { result: latestQuery } = query(`(matches ? "ADDED" ?)`);
const { result: latestQuery } = query(`(matches ? "${ATTR_ADDED}" ?)`);
$: latest = ($latestQuery?.entries || [])
.filter((e) => e.value.t == "Number")
.sort((a, b) => (b.value.c as number) - (a.value.c as number))
@ -42,9 +43,9 @@
const shortWidgets: Widget[] = [
{
name: "list-table",
name: "List",
icon: "list-ul",
components: [
components: (entries) => [
{
component: EntryList,
props: {
@ -52,21 +53,20 @@
columnWidths: ["6em"],
orderByValue: true,
header: false,
entries,
},
},
],
},
{
name: "gallery-view",
name: "Gallery",
icon: "image",
components: [
components: (entries) => [
{
component: Gallery,
props: (entries) => {
return {
entities: entries.map((e) => e.entity),
sort: false,
};
props: {
entities: entries.map((e) => e.entity),
sort: false,
},
},
],
@ -75,9 +75,9 @@
const longWidgets: Widget[] = [
{
name: "list-table",
name: "List",
icon: "list-ul",
components: [
components: (entries) => [
{
component: EntryList,
props: {
@ -85,21 +85,20 @@
columnWidths: ["13em"],
orderByValue: true,
header: false,
entries,
},
},
],
},
{
name: "gallery-view",
name: "Gallery",
icon: "image",
components: [
components: (entries) => [
{
component: Gallery,
props: (entries) => {
return {
entities: entries.map((e) => e.entity),
sort: false,
};
props: {
entities: entries.map((e) => e.entity),
sort: false,
},
},
],
@ -138,7 +137,7 @@
{#if $frequentQuery == undefined}
<Spinner centered />
{:else}
<AttributeView
<EntryView
--current-background="var(--background)"
entries={frequent}
widgets={shortWidgets}
@ -152,7 +151,7 @@
{#if $recentQuery == undefined}
<Spinner centered />
{:else}
<AttributeView
<EntryView
--current-background="var(--background)"
entries={recent}
widgets={longWidgets}
@ -168,7 +167,7 @@
{#if $latestQuery == undefined}
<Spinner centered />
{:else}
<AttributeView
<EntryView
--current-background="var(--background)"
entries={latest}
widgets={longWidgets}

View File

@ -9,11 +9,11 @@
import { baseSearch, createLabelled } from "../util/search";
import { updateTitle } from "../util/title";
import { query as queryFn } from "../lib/entity";
import AttributeView from "../components/AttributeView.svelte";
import EntryView, { type Widget } from "../components/EntryView.svelte";
import api from "../lib/api";
import Gallery from "../components/widgets/Gallery.svelte";
import type { Widget } from "src/lib/types";
import { matchSorter } from "match-sorter";
import { ATTR_LABEL } from "upend/constants";
const navigate = useNavigate();
export let query: string;
@ -36,7 +36,9 @@
exactHits = [];
}
$: objects = ($result?.entries || []).filter((e) => e.attribute === "LBL");
$: objects = ($result?.entries || []).filter(
(e) => e.attribute === ATTR_LABEL
);
$: sortedObjects = matchSorter(objects, debouncedQuery, {
keys: ["value.c"],
});
@ -44,15 +46,15 @@
let exactHits: string[] = [];
$: {
const addressesString = objects.map((e) => `@${e.entity}`).join(" ");
api.query(`(matches (in ${addressesString}) "LBL" ? )`).then(
(labelListing) => {
api
.query(`(matches (in ${addressesString}) "${ATTR_LABEL}" ? )`)
.then((labelListing) => {
exactHits = labelListing.entries
.filter(
(e) => String(e.value.c).toLowerCase() === query.toLowerCase()
)
.map((e) => e.entity);
}
);
});
}
async function create() {
@ -64,33 +66,29 @@
const searchWidgets: Widget[] = [
{
name: "list-table",
name: "List",
icon: "list-ul",
components: [
components: (entries) => [
{
component: Gallery,
props: (entries) => {
return {
entities: entries.map((e) => e.entity),
sort: false,
thumbnails: false,
};
props: {
entities: entries.map((e) => e.entity),
sort: false,
thumbnails: false,
},
},
],
},
{
name: "gallery-view",
name: "Gallery",
icon: "image",
components: [
components: (entries) => [
{
component: Gallery,
props: (entries) => {
return {
entities: entries.map((e) => e.entity),
sort: false,
thumbnails: true,
};
props: {
entities: entries.map((e) => e.entity),
sort: false,
thumbnails: true,
},
},
],
@ -123,7 +121,7 @@
<section class="objects">
{#if sortedObjects.length}
<h2>Objects</h2>
<AttributeView
<EntryView
--current-background="var(--background)"
entries={sortedObjects}
widgets={searchWidgets}

View File

@ -31,7 +31,7 @@
<tbody>
<tr class:invalid={!blob.paths[0].valid}>
<td class="hash"
><UpObject link address={blob.hash} labels={[]} /></td
><UpObject link address={blob.hash} resolve={false} /></td
>
<td class="size">{filesize(blob.size)}</td>
<td class="path">{blob.paths[0].path}</td>

1
webui/src/vite.d.ts vendored Normal file
View File

@ -0,0 +1 @@
/// <reference types="vite/client" />

View File

@ -12585,11 +12585,19 @@ __metadata:
"upend@file:../tools/upend_js::locator=upend-kestrel%40workspace%3A.":
version: 0.0.1
resolution: "upend@file:../tools/upend_js#../tools/upend_js::hash=5b38a7&locator=upend-kestrel%40workspace%3A."
resolution: "upend@file:../tools/upend_js#../tools/upend_js::hash=399d67&locator=upend-kestrel%40workspace%3A."
dependencies:
debug: ^4.3.4
lru-cache: ^7.0.0
checksum: 8f6fb87103908e4f16fb314da7b00f7e0656b3e088fb874b8a7c2d99c603575a3a5bcfe543603a7033216ea1e6a626628fe2a6a51776d794372fa684e2ce47c8
upend_wasm: ../upend_wasm/pkg
checksum: 49d5d59bc42deb38839668a31ab19fc8c8f578d8bad647655afa9a7ac920c68d71f2a4a5720464bc941a7345482af30c9b48f6cd631a66ccbb64d6a425dcecb6
languageName: node
linkType: hard
"upend_wasm@file:../upend_wasm/pkg::locator=upend%40file%3A..%2Ftools%2Fupend_js%23..%2Ftools%2Fupend_js%3A%3Ahash%3D399d67%26locator%3Dupend-kestrel%2540workspace%253A.":
version: 0.1.0
resolution: "upend_wasm@file:../upend_wasm/pkg#../upend_wasm/pkg::hash=9027d5&locator=upend%40file%3A..%2Ftools%2Fupend_js%23..%2Ftools%2Fupend_js%3A%3Ahash%3D399d67%26locator%3Dupend-kestrel%2540workspace%253A."
checksum: 8fd6644ea451f19720cb3a66cd947a98cf78256e7cbe88c53238a53e38738dee8f04d7dc4d1422c2218397174027625534e857033c1ac8ca38a49e82c693d1b9
languageName: node
linkType: hard