Compare commits

...

4 Commits

Author SHA1 Message Date
Tomáš Mládek 6ae20f9171 feat: add `user` to every Entry
ci/woodpecker/push/woodpecker Pipeline failed Details
(very ugly, lots of clones)
2024-04-02 21:08:39 +02:00
Tomáš Mládek 5771f32736 feat: add user management
- no more static keys, full register/login/logout flow
- add API error type
- refactor API to centralize request calls
- minor refactors re: vault options
- CSS refactor (buttons don't require classes, input styling)
2024-04-02 20:57:53 +02:00
Tomáš Mládek b480cf2e64 feat(backend): users with passwords 2024-04-02 20:57:53 +02:00
Tomáš Mládek 07c76423ac style(webui): contain COVERs in UpObject headers
ci/woodpecker/push/woodpecker Pipeline failed Details
2024-04-02 20:53:56 +02:00
41 changed files with 1031 additions and 238 deletions

View File

@ -1,6 +1,6 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="dev backend" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="run -- serve ./example_vault --clean --no-browser --reinitialize --rescan-mode mirror" />
<option name="command" value="run -- serve ./example_vault --clean --no-browser --reinitialize --rescan-mode mirror --secret upend" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
<envs />
<option name="emulateTerminal" value="true" />

55
Cargo.lock generated
View File

@ -487,6 +487,18 @@ version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
[[package]]
name = "argon2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072"
dependencies = [
"base64ct",
"blake2",
"cpufeatures",
"password-hash",
]
[[package]]
name = "arrayref"
version = "0.3.7"
@ -555,6 +567,12 @@ version = "0.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "414dcefbc63d77c526a76b3afcf6fbb9b5e2791c19c3aa2297733208750c6e53"
[[package]]
name = "base64ct"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
[[package]]
name = "bitflags"
version = "1.3.2"
@ -567,6 +585,15 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
[[package]]
name = "blake2"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
dependencies = [
"digest",
]
[[package]]
name = "blake2b_simd"
version = "1.0.1"
@ -863,9 +890,9 @@ dependencies = [
[[package]]
name = "cpufeatures"
version = "0.2.9"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1"
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
dependencies = [
"libc",
]
@ -1110,6 +1137,7 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
"subtle",
]
[[package]]
@ -1745,9 +1773,9 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.147"
version = "0.2.153"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]]
name = "libsqlite3-sys"
@ -2254,6 +2282,17 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "944553dd59c802559559161f9816429058b869003836120e262e8caec061b7ae"
[[package]]
name = "password-hash"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
dependencies = [
"base64ct",
"rand_core",
"subtle",
]
[[package]]
name = "paste"
version = "1.0.14"
@ -2914,6 +2953,12 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "subtle"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "syn"
version = "1.0.109"
@ -3379,6 +3424,7 @@ name = "upend-db"
version = "0.0.2"
dependencies = [
"anyhow",
"argon2",
"chrono",
"diesel",
"diesel_migrations",
@ -3394,6 +3440,7 @@ dependencies = [
"nonempty",
"num_cpus",
"once_cell",
"password-hash",
"rayon",
"regex",
"serde",

View File

@ -49,6 +49,7 @@ pub struct Entry {
pub attribute: Attribute,
pub value: EntryValue,
pub provenance: String,
pub user: Option<String>,
pub timestamp: NaiveDateTime,
}
@ -81,6 +82,7 @@ impl TryFrom<&InvariantEntry> for Entry {
attribute: invariant.attribute.clone(),
value: invariant.value.clone(),
provenance: "INVARIANT".to_string(),
user: None,
timestamp: NaiveDateTime::from_timestamp_opt(0, 0).unwrap(),
})
}

View File

@ -4,5 +4,4 @@ pub struct UpEndConfig {
pub desktop_enabled: bool,
pub trust_executables: bool,
pub secret: String,
pub key: Option<String>,
}

View File

@ -13,7 +13,7 @@ use upend_db::stores::Blob;
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
BlobMode, UpEndConnection,
BlobMode, OperationContext, UpEndConnection,
};
lazy_static! {
@ -26,6 +26,7 @@ lazy_static! {
attribute: ATTR_LABEL.parse().unwrap(),
value: "ID3".into(),
provenance: "INVARIANT".to_string(),
user: None,
timestamp: chrono::Utc::now().naive_utc(),
};
}
@ -39,6 +40,7 @@ impl Extractor for ID3Extractor {
connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>> {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
@ -72,14 +74,16 @@ impl Extractor for ID3Extractor {
"TYER" | "TBPM" => EntryValue::guess_from(text),
_ => text.clone().into(),
},
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
Entry {
entity: Address::Attribute(format!("ID3_{}", frame.id()).parse()?),
attribute: ATTR_LABEL.parse().unwrap(),
value: format!("ID3: {}", frame.name()).into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
]);
@ -97,12 +101,14 @@ impl Extractor for ID3Extractor {
Blob::from_filepath(&tmp_path),
None,
Some(BlobMode::StoreOnly),
context.clone(),
)?;
result.push(Entry {
entity: address.clone(),
attribute: "ID3_PICTURE".parse()?,
value: EntryValue::Address(Address::Hash(hash)),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
});
has_pictures = true;
@ -112,7 +118,8 @@ impl Extractor for ID3Extractor {
entity: Address::Attribute("ID3_PICTURE".parse()?),
attribute: ATTR_LABEL.parse().unwrap(),
value: "ID3 Embedded Image".into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
})
}
@ -126,7 +133,8 @@ impl Extractor for ID3Extractor {
entity: Address::Attribute(e.attribute.clone()),
attribute: ATTR_OF.parse().unwrap(),
value: EntryValue::Address(ID3_TYPE_INVARIANT.entity().unwrap()),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
})
.collect::<Vec<Entry>>(),
@ -138,7 +146,8 @@ impl Extractor for ID3Extractor {
entity: address.clone(),
attribute: ATTR_IN.parse().unwrap(),
value: EntryValue::Address(ID3_TYPE_INVARIANT.entity().unwrap()),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
]);

View File

@ -12,7 +12,7 @@ use upend_base::{
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
UpEndConnection,
OperationContext, UpEndConnection,
};
pub struct ExifExtractor;
@ -31,6 +31,7 @@ lazy_static! {
value: "EXIF".into(),
provenance: "INVARIANT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: None
};
}
@ -41,6 +42,7 @@ impl Extractor for ExifExtractor {
_connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>> {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
@ -86,14 +88,16 @@ impl Extractor for ExifExtractor {
EntryValue::guess_from(format!("{}", field.display_value()))
}
},
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
Entry {
entity: Address::Attribute(attribute),
attribute: ATTR_LABEL.parse().unwrap(),
value: format!("EXIF: {}", tag_description).into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
]);
@ -109,7 +113,8 @@ impl Extractor for ExifExtractor {
entity: Address::Attribute(e.attribute.clone()),
attribute: ATTR_OF.parse().unwrap(),
value: EntryValue::Address(EXIF_TYPE_INVARIANT.entity().unwrap()),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
})
.collect::<Vec<Entry>>(),
@ -123,7 +128,8 @@ impl Extractor for ExifExtractor {
entity: address.clone(),
attribute: ATTR_IN.parse().unwrap(),
value: EntryValue::Address(EXIF_TYPE_INVARIANT.entity().unwrap()),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
]);

View File

@ -12,7 +12,7 @@ use upend_base::{
use upend_db::{
jobs::{JobContainer, JobState},
stores::{fs::FILE_MIME_KEY, UpStore},
UpEndConnection,
OperationContext, UpEndConnection,
};
const DURATION_KEY: &str = "MEDIA_DURATION";
@ -28,6 +28,7 @@ lazy_static! {
value: "Multimedia".into(),
provenance: "INVARIANT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: None,
};
pub static ref DURATION_OF_MEDIA: Entry = Entry {
entity: Address::Attribute(DURATION_KEY.parse().unwrap()),
@ -35,6 +36,7 @@ lazy_static! {
value: EntryValue::Address(MEDIA_TYPE_INVARIANT.entity().unwrap()),
provenance: "INVARIANT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: None,
};
}
@ -47,6 +49,7 @@ impl Extractor for MediaExtractor {
_connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>> {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
@ -95,7 +98,8 @@ impl Extractor for MediaExtractor {
entity: address.clone(),
attribute: DURATION_KEY.parse().unwrap(),
value: EntryValue::Number(duration),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
(&MEDIA_TYPE_INVARIANT as &InvariantEntry)
@ -107,7 +111,8 @@ impl Extractor for MediaExtractor {
entity: address.clone(),
attribute: ATTR_IN.parse().unwrap(),
value: EntryValue::Address(MEDIA_TYPE_INVARIANT.entity().unwrap()),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
];

View File

@ -6,7 +6,9 @@ use std::{
};
use tracing::{debug, info, trace};
use upend_base::{addressing::Address, entry::Entry};
use upend_db::{jobs::JobContainer, stores::UpStore, UpEndConnection, UpEndDatabase};
use upend_db::{
jobs::JobContainer, stores::UpStore, OperationContext, UpEndConnection, UpEndDatabase,
};
#[cfg(feature = "extractors-web")]
pub mod web;
@ -27,6 +29,7 @@ pub trait Extractor {
connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>>;
fn is_needed(&self, _address: &Address, _connection: &UpEndConnection) -> Result<bool> {
@ -39,9 +42,10 @@ pub trait Extractor {
connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
job_container: JobContainer,
context: OperationContext,
) -> Result<usize> {
if self.is_needed(address, connection)? {
let entries = self.get(address, connection, store, job_container)?;
let entries = self.get(address, connection, store, job_container, context)?;
trace!("For \"{address}\", got: {entries:?}");
connection.transaction(|| {
@ -62,6 +66,7 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
db: D,
store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<usize> {
info!("Extracting metadata for all addresses.");
@ -77,7 +82,13 @@ pub fn extract_all<D: Borrow<UpEndDatabase>>(
.par_iter()
.map(|address| {
let connection = db.connection()?;
let entry_count = extract(address, &connection, store.clone(), job_container.clone());
let entry_count = extract(
address,
&connection,
store.clone(),
job_container.clone(),
context.clone(),
);
let mut cnt = count.write().unwrap();
*cnt += 1;
@ -107,6 +118,7 @@ pub fn extract(
connection: &UpEndConnection,
store: Arc<Box<dyn UpStore + Send + Sync>>,
job_container: JobContainer,
context: OperationContext,
) -> usize {
let mut entry_count = 0;
trace!("Extracting metadata for {address:?}");
@ -118,6 +130,7 @@ pub fn extract(
connection,
store.clone(),
job_container.clone(),
context.clone(),
);
match extract_result {
@ -133,6 +146,7 @@ pub fn extract(
connection,
store.clone(),
job_container.clone(),
context.clone(),
);
match extract_result {
@ -148,6 +162,7 @@ pub fn extract(
connection,
store.clone(),
job_container.clone(),
context.clone(),
);
match extract_result {
@ -158,8 +173,13 @@ pub fn extract(
#[cfg(feature = "extractors-media")]
{
let extract_result =
media::MediaExtractor.insert_info(address, connection, store.clone(), job_container);
let extract_result = media::MediaExtractor.insert_info(
address,
connection,
store.clone(),
job_container,
context.clone(),
);
match extract_result {
Ok(count) => entry_count += count,

View File

@ -14,7 +14,7 @@ use upend_base::entry::EntryValue;
use upend_db::jobs::JobContainer;
use upend_db::jobs::JobState;
use upend_db::stores::UpStore;
use upend_db::UpEndConnection;
use upend_db::{OperationContext, UpEndConnection};
use webpage::HTML;
pub struct WebExtractor;
@ -26,6 +26,7 @@ impl Extractor for WebExtractor {
_connection: &UpEndConnection,
_store: Arc<Box<dyn UpStore + Send + Sync>>,
mut job_container: JobContainer,
context: OperationContext,
) -> Result<Vec<Entry>> {
if let Address::Url(url) = address {
let mut job_handle =
@ -42,21 +43,24 @@ impl Extractor for WebExtractor {
entity: address.clone(),
attribute: "HTML_TITLE".parse().unwrap(),
value: html_title.clone().into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}),
html.title.map(|html_title| Entry {
entity: address.clone(),
attribute: ATTR_LABEL.parse().unwrap(),
value: html_title.into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}),
html.description.map(|html_desc| Entry {
entity: address.clone(),
attribute: "HTML_DESCRIPTION".parse().unwrap(),
value: html_desc.into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}),
];
@ -67,7 +71,8 @@ impl Extractor for WebExtractor {
entity: address.clone(),
attribute: ATTR_LABEL.parse()?,
value: value.clone().into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}));
}
@ -76,7 +81,8 @@ impl Extractor for WebExtractor {
entity: address.clone(),
attribute: attribute.parse()?,
value: value.into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}));
}
@ -85,7 +91,8 @@ impl Extractor for WebExtractor {
entity: address.clone(),
attribute: "OG_IMAGE".parse()?,
value: image.url.into(),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}))
}
@ -101,7 +108,8 @@ impl Extractor for WebExtractor {
entity: Address::Attribute(e.attribute.clone()),
attribute: ATTR_OF.parse().unwrap(),
value: EntryValue::Address(TYPE_URL_ADDRESS.clone()),
provenance: "SYSTEM EXTRACTOR".to_string(),
provenance: context.provenance.clone() + "EXTRACTOR",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
},
e,
@ -149,7 +157,13 @@ mod test {
let address = Address::Url(Url::parse("https://upend.dev").unwrap());
assert!(WebExtractor.is_needed(&address, &connection)?);
WebExtractor.insert_info(&address, &connection, store, job_container)?;
WebExtractor.insert_info(
&address,
&connection,
store,
job_container,
OperationContext::default(),
)?;
assert!(!WebExtractor.is_needed(&address, &connection)?);

View File

@ -16,7 +16,7 @@ use std::collections::HashMap;
use std::net::SocketAddr;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::{Arc, Mutex};
use tracing::trace;
use tracing::{debug, error, info, warn};
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
@ -26,7 +26,7 @@ use upend_base::hash::{sha256hash, UpMultihash};
use upend_db::jobs::JobContainer;
use upend_db::stores::fs::FsStore;
use upend_db::stores::UpStore;
use upend_db::{BlobMode, UpEndDatabase};
use upend_db::{BlobMode, OperationContext, UpEndDatabase};
use crate::util::exec::block_background;
@ -80,7 +80,7 @@ enum Commands {
entity: String,
/// The attribute of the entry.
attribute: String,
/// The value; its type will be heurestically determined.
/// The value; its type will be heuristically determined.
value: String,
/// Output format
#[arg(short, long, default_value = "tsv")]
@ -172,10 +172,6 @@ struct ServeArgs {
#[arg(long, env = "UPEND_SECRET")]
secret: Option<String>,
/// Authentication key users must supply.
#[arg(long, env = "UPEND_KEY")]
key: Option<String>,
/// Allowed host/domain name the API can serve.
#[arg(long, env = "UPEND_ALLOW_HOST")]
allow_host: Vec<String>,
@ -415,9 +411,9 @@ async fn main() -> Result<()> {
})),
desktop_enabled: !args.no_desktop,
trust_executables: args.trust_executables,
key: args.key,
secret,
},
public: Arc::new(Mutex::new(upend.connection()?.get_users()?.is_empty())),
};
// Start HTTP server
@ -471,8 +467,14 @@ async fn main() -> Result<()> {
initial: false,
tree_mode,
},
OperationContext::default(),
);
let _ = extractors::extract_all(
upend,
state.store,
job_container,
OperationContext::default(),
);
let _ = extractors::extract_all(upend, state.store, job_container);
Ok(())
});
}

View File

@ -26,7 +26,7 @@ use serde_json::json;
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::io::Write;
use std::sync::Arc;
use std::sync::{Arc, Mutex};
use std::time::{SystemTime, UNIX_EPOCH};
use tempfile::NamedTempFile;
use tracing::{debug, info, trace};
@ -41,6 +41,7 @@ use upend_db::jobs;
use upend_db::stores::UpdateOptions;
use upend_db::stores::{Blob, UpStore};
use upend_db::BlobMode;
use upend_db::OperationContext;
use upend_db::UpEndDatabase;
use upend_db::VaultOptions;
use url::Url;
@ -57,69 +58,136 @@ pub struct State {
pub job_container: jobs::JobContainer,
pub preview_store: Option<Arc<PreviewStore>>,
pub preview_thread_pool: Option<Arc<rayon::ThreadPool>>,
pub public: Arc<Mutex<bool>>,
}
#[derive(Debug, Serialize, Deserialize)]
struct JwtClaims {
user: String,
exp: usize,
}
#[derive(Deserialize)]
pub struct LoginRequest {
key: String,
pub struct UserPayload {
username: String,
password: String,
}
#[derive(Deserialize)]
pub struct LoginQueryParams {
via: Option<String>,
}
#[post("/api/auth/login")]
pub async fn login(
state: web::Data<State>,
payload: web::Json<LoginRequest>,
payload: web::Json<UserPayload>,
query: web::Query<LoginQueryParams>,
) -> Result<HttpResponse, Error> {
if state.config.key.is_none() || Some(&payload.key) == state.config.key.as_ref() {
let claims = JwtClaims {
exp: (SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_err(ErrorInternalServerError)?
.as_secs()
+ 7 * 24 * 60 * 60) as usize,
};
let conn = state.upend.connection().map_err(ErrorInternalServerError)?;
let token = jsonwebtoken::encode(
&jsonwebtoken::Header::default(),
&claims,
&jsonwebtoken::EncodingKey::from_secret(state.config.secret.as_ref()),
)
.map_err(ErrorInternalServerError)?;
Ok(HttpResponse::Ok().json(json!({ "token": token })))
} else {
Err(ErrorUnauthorized("Incorrect token."))
match conn.authenticate_user(&payload.username, &payload.password) {
Ok(()) => {
let token = create_token(&payload.username, &state.config.secret)?;
match query.via.as_deref() {
Some("cookie") => Ok(HttpResponse::NoContent()
.append_header((http::header::SET_COOKIE, format!("key={}; Path=/", token)))
.finish()),
_ => Ok(HttpResponse::Ok().json(json!({ "key": token }))),
}
}
Err(e) => Err(ErrorUnauthorized(e)),
}
}
fn check_auth(req: &HttpRequest, state: &State) -> Result<(), actix_web::Error> {
if let Some(key) = &state.config.key {
if let Some(auth_header) = req.headers().get("Authorization") {
let auth_header = auth_header.to_str().map_err(|err| {
ErrorBadRequest(format!("Invalid value in Authorization header: {err:?}"))
})?;
#[post("/api/auth/logout")]
pub async fn logout() -> Result<HttpResponse, Error> {
Ok(HttpResponse::NoContent()
.append_header((http::header::SET_COOKIE, "key=; Path=/; Max-Age=0"))
.finish())
}
let token = jsonwebtoken::decode::<JwtClaims>(
auth_header,
&jsonwebtoken::DecodingKey::from_secret(key.as_ref()),
&jsonwebtoken::Validation::default(),
);
#[post("/api/auth/register")]
pub async fn register(
req: HttpRequest,
state: web::Data<State>,
payload: web::Json<UserPayload>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
token
.map(|_| ())
.map_err(|err| ErrorUnauthorized(format!("Invalid token: {err:?}")))
} else {
Err(ErrorUnauthorized("Authorization required."))
let conn = state.upend.connection().map_err(ErrorInternalServerError)?;
match conn.set_user(&payload.username, &payload.password) {
Ok(_) => {
*state.public.lock().unwrap() = false;
let token = create_token(&payload.username, &state.config.secret)?;
Ok(HttpResponse::Ok().json(json!({ "token": token })))
}
Err(e) => Err(ErrorInternalServerError(e)),
}
}
#[get("/api/auth/whoami")]
pub async fn whoami(req: HttpRequest, state: web::Data<State>) -> Result<HttpResponse, Error> {
let user = check_auth(&req, &state)?;
Ok(HttpResponse::Ok().json(json!({ "user": user })))
}
fn check_auth(req: &HttpRequest, state: &State) -> Result<Option<String>, actix_web::Error> {
if *state.public.lock().unwrap() {
return Ok(None);
}
let key = if let Some(value) = req.headers().get("Authorization") {
let value = value.to_str().map_err(|err| {
ErrorBadRequest(format!("Invalid value in Authorization header: {err:?}"))
})?;
if !value.starts_with("Bearer ") {
return Err(ErrorUnauthorized("Invalid token type."));
}
Some(value.trim_start_matches("Bearer ").to_string())
} else if let Ok(cookies) = req.cookies() {
cookies
.iter()
.find(|c| c.name() == "key")
.map(|cookie| cookie.value().to_string())
} else {
None
};
if let Some(key) = key {
let token = jsonwebtoken::decode::<JwtClaims>(
&key,
&jsonwebtoken::DecodingKey::from_secret(state.config.secret.as_ref()),
&jsonwebtoken::Validation::default(),
);
match token {
Ok(token) => Ok(Some(token.claims.user)),
Err(err) => Err(ErrorUnauthorized(format!("Invalid token: {err:?}"))),
}
} else {
Ok(())
Err(ErrorUnauthorized("Authorization required."))
}
}
fn create_token(username: &str, secret: &str) -> Result<String, Error> {
let claims = JwtClaims {
user: username.to_string(),
exp: (SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_err(ErrorInternalServerError)?
.as_secs()
+ 7 * 24 * 60 * 60) as usize,
};
jsonwebtoken::encode(
&jsonwebtoken::Header::default(),
&claims,
&jsonwebtoken::EncodingKey::from_secret(secret.as_ref()),
)
.map_err(ErrorInternalServerError)
}
#[derive(Deserialize)]
pub struct RawRequest {
native: Option<String>,
@ -128,10 +196,13 @@ pub struct RawRequest {
#[get("/api/raw/{hash}")]
pub async fn get_raw(
req: HttpRequest,
state: web::Data<State>,
web::Query(query): web::Query<RawRequest>,
hash: web::Path<String>,
) -> Result<impl Responder, Error> {
check_auth(&req, &state)?;
let address =
Address::decode(&b58_decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
.map_err(ErrorInternalServerError)?;
@ -218,9 +289,12 @@ pub async fn get_raw(
#[head("/api/raw/{hash}")]
pub async fn head_raw(
req: HttpRequest,
state: web::Data<State>,
hash: web::Path<String>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let address =
Address::decode(&b58_decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
.map_err(ErrorInternalServerError)?;
@ -254,10 +328,13 @@ pub async fn head_raw(
#[get("/api/thumb/{hash}")]
pub async fn get_thumbnail(
req: HttpRequest,
state: web::Data<State>,
hash: web::Path<String>,
web::Query(query): web::Query<HashMap<String, String>>,
) -> Result<Either<NamedFile, HttpResponse>, Error> {
check_auth(&req, &state)?;
#[cfg(feature = "previews")]
if let Some(preview_store) = &state.preview_store {
let hash = hash.into_inner();
@ -299,7 +376,13 @@ pub async fn get_thumbnail(
}
#[post("/api/query")]
pub async fn get_query(state: web::Data<State>, query: String) -> Result<HttpResponse, Error> {
pub async fn get_query(
req: HttpRequest,
state: web::Data<State>,
query: String,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let in_query: Query = query.parse().map_err(ErrorBadRequest)?;
@ -341,9 +424,12 @@ impl EntriesAsHash for Vec<Entry> {
#[get("/api/obj/{address_str}")]
pub async fn get_object(
req: HttpRequest,
state: web::Data<State>,
address: web::Path<Address>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let address = address.into_inner();
@ -406,7 +492,7 @@ pub async fn put_object(
payload: web::Json<PutInput>,
web::Query(query): web::Query<UpdateQuery>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let user = check_auth(&req, &state)?;
let (entry_address, entity_address) = {
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
@ -415,6 +501,7 @@ pub async fn put_object(
debug!("PUTting {in_entry:?}");
let provenance = query.provenance.clone();
let _user = user.clone();
let process_inentry = move |in_entry: InEntry| -> Result<Entry> {
if let Some(entity) = in_entry.entity {
Ok(Entry {
@ -428,6 +515,7 @@ pub async fn put_object(
.trim()
.to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: _user.clone(),
})
} else {
Ok(Entry::try_from(&InvariantEntry {
@ -469,15 +557,25 @@ pub async fn put_object(
let _address = address.clone();
let _job_container = state.job_container.clone();
let _store = state.store.clone();
let _user = user.clone();
block_background::<_, _, anyhow::Error>(move || {
let entry_count =
extractors::extract(&_address, &connection, _store, _job_container);
let entry_count = extractors::extract(
&_address,
&connection,
_store,
_job_container,
OperationContext {
user: _user,
provenance: "API".to_string(),
},
);
debug!("Added {entry_count} extracted entries for {_address:?}");
Ok(())
});
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let _user = user.clone();
web::block(move || {
connection.transaction::<_, anyhow::Error, _>(|| {
if connection.retrieve_object(&address)?.is_empty() {
@ -496,6 +594,7 @@ pub async fn put_object(
})
.trim()
.to_string(),
user: _user,
timestamp: chrono::Utc::now().naive_utc(),
})?;
}
@ -518,7 +617,7 @@ pub async fn put_blob(
state: web::Data<State>,
mut payload: Multipart,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let user = check_auth(&req, &state)?;
if let Some(mut field) = payload.try_next().await? {
let mut file = NamedTempFile::new()?;
@ -557,6 +656,7 @@ pub async fn put_blob(
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let _store = state.store.clone();
let _filename = filename.clone();
let _user = user.clone();
let hash = web::block(move || {
let options = connection.get_vault_options()?;
_store
@ -565,6 +665,10 @@ pub async fn put_blob(
Blob::from_filepath(file.path()),
_filename,
options.blob_mode,
OperationContext {
user: _user,
provenance: "API".to_string(),
},
)
.map_err(anyhow::Error::from)
})
@ -590,8 +694,18 @@ pub async fn put_blob(
let _job_container = state.job_container.clone();
let _store = state.store.clone();
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let _user = user.clone();
block_background::<_, _, anyhow::Error>(move || {
let entry_count = extractors::extract(&_address, &connection, _store, _job_container);
let entry_count = extractors::extract(
&_address,
&connection,
_store,
_job_container,
OperationContext {
user: _user,
provenance: "API".to_string(),
},
);
debug!("Added {entry_count} extracted entries for {_address:?}");
Ok(())
});
@ -609,7 +723,7 @@ pub async fn put_object_attribute(
value: web::Json<EntryValue>,
web::Query(query): web::Query<UpdateQuery>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let user = check_auth(&req, &state)?;
let (address, attribute) = path.into_inner();
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
@ -632,6 +746,7 @@ pub async fn put_object_attribute(
})
.trim()
.to_string(),
user: user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
};
@ -736,7 +851,12 @@ pub async fn get_address(
}
#[get("/api/all/attributes")]
pub async fn get_all_attributes(state: web::Data<State>) -> Result<HttpResponse, Error> {
pub async fn get_all_attributes(
req: HttpRequest,
state: web::Data<State>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let attributes = web::block(move || connection.get_all_attributes())
.await?
@ -779,6 +899,8 @@ pub async fn list_hier(
path: web::Path<String>,
req: HttpRequest,
) -> Result<HttpResponse, Error> {
let user = check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
if path.is_empty() {
Ok(HttpResponse::MovedPermanently()
@ -789,9 +911,19 @@ pub async fn list_hier(
trace!(r#"Listing path "{}""#, upath);
let create = !req.method().is_safe();
let path = web::block(move || resolve_path(&connection, &upath, create))
.await?
.map_err(ErrorNotFound)?;
let path = web::block(move || {
resolve_path(
&connection,
&upath,
create,
OperationContext {
user,
provenance: "API".to_string(),
},
)
})
.await?
.map_err(ErrorNotFound)?;
match path.last() {
Some(addr) => Ok(HttpResponse::Found()
.append_header((http::header::LOCATION, format!("../../api/obj/{}", addr)))
@ -802,7 +934,11 @@ pub async fn list_hier(
}
#[get("/api/hier_roots")]
pub async fn list_hier_roots(state: web::Data<State>) -> Result<HttpResponse, Error> {
pub async fn list_hier_roots(
req: HttpRequest,
state: web::Data<State>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let result = web::block(move || {
@ -830,7 +966,7 @@ pub async fn api_refresh(
state: web::Data<State>,
web::Query(query): web::Query<RescanRequest>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let user = check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
@ -847,11 +983,19 @@ pub async fn api_refresh(
.unwrap_or_default(),
),
},
OperationContext {
user: user.clone(),
provenance: "API".to_string(),
},
);
let _ = crate::extractors::extract_all(
state.upend.clone(),
state.store.clone(),
state.job_container.clone(),
OperationContext {
user: user.clone(),
provenance: "API".to_string(),
},
);
Ok(())
});
@ -859,13 +1003,15 @@ pub async fn api_refresh(
}
#[get("/api/stats/vault")]
pub async fn vault_stats(state: web::Data<State>) -> Result<HttpResponse, Error> {
pub async fn vault_stats(req: HttpRequest, state: web::Data<State>) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
Ok(HttpResponse::Ok().json(connection.get_stats().map_err(ErrorInternalServerError)?))
}
#[get("/api/stats/store")]
pub async fn store_stats(state: web::Data<State>) -> Result<HttpResponse, Error> {
pub async fn store_stats(req: HttpRequest, state: web::Data<State>) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
Ok(HttpResponse::Ok().json(json!({
"main": state.store.stats().map_err(ErrorInternalServerError)?
})))
@ -878,9 +1024,11 @@ pub struct JobsRequest {
#[get("/api/jobs")]
pub async fn get_jobs(
req: HttpRequest,
state: web::Data<State>,
web::Query(query): web::Query<JobsRequest>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let jobs = state
.job_container
.get_jobs()
@ -907,12 +1055,14 @@ pub async fn get_info(state: web::Data<State>) -> Result<HttpResponse, Error> {
upend_db::common::build::PKG_VERSION,
build::PKG_VERSION
),
"desktop": state.config.desktop_enabled
"desktop": state.config.desktop_enabled,
"public": *state.public.lock().unwrap(),
})))
}
#[get("/api/options")]
pub async fn get_options(state: web::Data<State>) -> Result<HttpResponse, Error> {
pub async fn get_options(req: HttpRequest, state: web::Data<State>) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
Ok(HttpResponse::Ok().json(
connection
@ -940,7 +1090,11 @@ pub async fn put_options(
}
#[get("/api/migration/user-entries")]
pub async fn get_user_entries(state: web::Data<State>) -> Result<HttpResponse, Error> {
pub async fn get_user_entries(
req: HttpRequest,
state: web::Data<State>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let result = web::block(move || connection.get_explicit_entries())
@ -1215,6 +1369,7 @@ mod tests {
initial: true,
tree_mode: upend_db::BlobMode::default(),
},
OperationContext::default(),
)
.unwrap();
@ -1226,11 +1381,11 @@ mod tests {
desktop_enabled: false,
trust_executables: false,
secret: "secret".to_string(),
key: None,
},
job_container,
preview_store: None,
preview_thread_pool: None,
public: Arc::new(Mutex::new(true)),
}
}
}

View File

@ -46,6 +46,9 @@ where
.app_data(actix_web::web::Data::new(state))
.wrap(actix_web::middleware::Logger::default().exclude("/api/jobs"))
.service(routes::login)
.service(routes::register)
.service(routes::logout)
.service(routes::whoami)
.service(routes::get_raw)
.service(routes::head_raw)
.service(routes::get_thumbnail)

View File

@ -26,13 +26,16 @@ once_cell = "1.7.2"
lru = "0.7.0"
diesel = { version = "1.4", features = [
"sqlite",
"r2d2",
"chrono",
"serde_json",
"sqlite",
"r2d2",
"chrono",
"serde_json",
] }
diesel_migrations = "1.4"
libsqlite3-sys = { version = "^0", features = ["bundled"] }
password-hash = "0.5.0"
argon2 = "0.5.3"
chrono = { version = "0.4", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
@ -42,10 +45,10 @@ regex = "1"
multibase = "0.9"
multihash = { version = "*", default-features = false, features = [
"alloc",
"multihash-impl",
"sha2",
"identity",
"alloc",
"multihash-impl",
"sha2",
"identity",
] }
uuid = { version = "1.4", features = ["v4"] }
url = { version = "2", features = ["serde"] }

View File

@ -0,0 +1 @@
DROP TABLE users;

View File

@ -0,0 +1,7 @@
CREATE TABLE users
(
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
username VARCHAR NOT NULL,
password VARCHAR NOT NULL,
UNIQUE (username)
);

View File

@ -0,0 +1,2 @@
ALTER TABLE data
DROP COLUMN user;

View File

@ -0,0 +1,2 @@
ALTER TABLE data
ADD COLUMN user VARCHAR;

View File

@ -14,6 +14,7 @@ impl TryFrom<&models::Entry> for Entry {
attribute: e.attribute.parse()?,
value: value_str.parse().unwrap(),
provenance: e.provenance.clone(),
user: e.user.clone(),
timestamp: e.timestamp,
})
} else if let Some(value_num) = e.value_num {
@ -22,6 +23,7 @@ impl TryFrom<&models::Entry> for Entry {
attribute: e.attribute.parse()?,
value: EntryValue::Number(value_num),
provenance: e.provenance.clone(),
user: e.user.clone(),
timestamp: e.timestamp,
})
} else {
@ -30,6 +32,7 @@ impl TryFrom<&models::Entry> for Entry {
attribute: e.attribute.parse()?,
value: EntryValue::Number(f64::NAN),
provenance: e.provenance.clone(),
user: e.user.clone(),
timestamp: e.timestamp,
})
}
@ -53,6 +56,7 @@ impl TryFrom<&Entry> for models::Entry {
value_num: None,
immutable: false,
provenance: e.provenance.clone(),
user: e.user.clone(),
timestamp: e.timestamp,
};

View File

@ -6,6 +6,7 @@ use lru::LruCache;
use tracing::trace;
use uuid::Uuid;
use crate::OperationContext;
use upend_base::addressing::Address;
use upend_base::constants::ATTR_LABEL;
use upend_base::constants::{ATTR_IN, HIER_ROOT_ADDR, HIER_ROOT_INVARIANT};
@ -91,6 +92,7 @@ pub fn fetch_or_create_dir(
parent: Option<Address>,
directory: UNode,
create: bool,
context: OperationContext,
) -> Result<Address> {
match parent.clone() {
Some(address) => trace!("FETCHING/CREATING {}/{:#}", address, directory),
@ -137,7 +139,8 @@ pub fn fetch_or_create_dir(
entity: new_directory_address.clone(),
attribute: ATTR_LABEL.parse().unwrap(),
value: directory.to_string().into(),
provenance: "SYSTEM FS".to_string(),
provenance: context.provenance.clone() + "HIER",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
};
connection.insert_entry(directory_entry)?;
@ -147,7 +150,8 @@ pub fn fetch_or_create_dir(
entity: new_directory_address.clone(),
attribute: ATTR_IN.parse().unwrap(),
value: parent.into(),
provenance: "SYSTEM FS".to_string(),
provenance: context.provenance.clone() + "HIER",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}
} else {
@ -155,7 +159,8 @@ pub fn fetch_or_create_dir(
entity: new_directory_address.clone(),
attribute: ATTR_IN.parse().unwrap(),
value: HIER_ROOT_ADDR.clone().into(),
provenance: "SYSTEM FS".to_string(),
provenance: context.provenance.clone() + "HIER",
user: context.user.clone(),
timestamp: chrono::Utc::now().naive_utc(),
}
})?;
@ -177,6 +182,7 @@ pub fn resolve_path(
connection: &UpEndConnection,
path: &UHierPath,
create: bool,
context: OperationContext,
) -> Result<Vec<Address>> {
let mut result: Vec<Address> = vec![];
let mut path_stack = path.0.to_vec();
@ -188,6 +194,7 @@ pub fn resolve_path(
result.last().cloned(),
path_stack.pop().unwrap(),
create,
context.clone(),
)?;
result.push(dir_address);
}
@ -201,6 +208,7 @@ pub fn resolve_path_cached(
connection: &UpEndConnection,
path: &UHierPath,
create: bool,
context: OperationContext,
cache: &Arc<Mutex<ResolveCache>>,
) -> Result<Vec<Address>> {
let mut result: Vec<Address> = vec![];
@ -216,7 +224,7 @@ pub fn resolve_path_cached(
result.push(address.clone());
} else {
drop(cache_lock);
let address = fetch_or_create_dir(connection, parent, node, create)?;
let address = fetch_or_create_dir(connection, parent, node, create, context.clone())?;
result.push(address.clone());
cache.lock().unwrap().put(key, address);
}
@ -286,11 +294,23 @@ mod tests {
let open_result = UpEndDatabase::open(&temp_dir, true).unwrap();
let connection = open_result.db.connection().unwrap();
let foo_result = fetch_or_create_dir(&connection, None, UNode("foo".to_string()), true);
let foo_result = fetch_or_create_dir(
&connection,
None,
UNode("foo".to_string()),
true,
OperationContext::default(),
);
assert!(foo_result.is_ok());
let foo_result = foo_result.unwrap();
let bar_result = fetch_or_create_dir(&connection, None, UNode("bar".to_string()), true);
let bar_result = fetch_or_create_dir(
&connection,
None,
UNode("bar".to_string()),
true,
OperationContext::default(),
);
assert!(bar_result.is_ok());
let bar_result = bar_result.unwrap();
@ -299,6 +319,7 @@ mod tests {
Some(bar_result.clone()),
UNode("baz".to_string()),
true,
OperationContext::default(),
);
assert!(baz_result.is_ok());
let baz_result = baz_result.unwrap();
@ -306,7 +327,12 @@ mod tests {
let roots = list_roots(&connection);
assert_eq!(roots.unwrap(), [foo_result, bar_result.clone()]);
let resolve_result = resolve_path(&connection, &"bar/baz".parse().unwrap(), false);
let resolve_result = resolve_path(
&connection,
&"bar/baz".parse().unwrap(),
false,
OperationContext::default(),
);
assert!(resolve_result.is_ok());
assert_eq!(
@ -314,10 +340,20 @@ mod tests {
vec![bar_result.clone(), baz_result.clone()]
);
let resolve_result = resolve_path(&connection, &"bar/baz/bax".parse().unwrap(), false);
let resolve_result = resolve_path(
&connection,
&"bar/baz/bax".parse().unwrap(),
false,
OperationContext::default(),
);
assert!(resolve_result.is_err());
let resolve_result = resolve_path(&connection, &"bar/baz/bax".parse().unwrap(), true);
let resolve_result = resolve_path(
&connection,
&"bar/baz/bax".parse().unwrap(),
true,
OperationContext::default(),
);
assert!(resolve_result.is_ok());
let bax_result = fetch_or_create_dir(
@ -325,6 +361,7 @@ mod tests {
Some(baz_result.clone()),
UNode("bax".to_string()),
false,
OperationContext::default(),
);
assert!(bax_result.is_ok());
let bax_result = bax_result.unwrap();

View File

@ -1,4 +1,4 @@
use super::schema::{data, meta};
use super::schema::{data, meta, users};
use chrono::NaiveDateTime;
use serde::Serialize;
@ -13,6 +13,7 @@ pub struct Entry {
pub value_num: Option<f64>,
pub immutable: bool,
pub provenance: String,
pub user: Option<String>,
pub timestamp: NaiveDateTime,
}
@ -23,3 +24,11 @@ pub struct MetaValue {
pub key: String,
pub value: String,
}
#[derive(Queryable, Insertable, Serialize, Clone, Debug)]
#[table_name = "users"]
pub struct UserValue {
pub id: i32,
pub username: String,
pub password: String,
}

View File

@ -8,6 +8,7 @@ table! {
value_num -> Nullable<Double>,
immutable -> Bool,
provenance -> Text,
user -> Nullable<Text>,
timestamp -> Timestamp,
}
}
@ -20,4 +21,10 @@ table! {
}
}
allow_tables_to_appear_in_same_query!(data, meta,);
table! {
users (id) {
id -> Integer,
username -> Text,
password -> Text,
}
}

View File

@ -26,6 +26,7 @@ use crate::inner::models;
use crate::inner::schema::data;
use crate::util::LoggerSink;
use anyhow::{anyhow, Result};
use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
use diesel::prelude::*;
use diesel::r2d2::{self, ConnectionManager};
use diesel::result::{DatabaseErrorKind, Error};
@ -116,7 +117,7 @@ pub const DATABASE_FILENAME: &str = "upend.sqlite3";
impl UpEndDatabase {
pub fn open<P: AsRef<Path>>(dirpath: P, reinitialize: bool) -> Result<OpenResult> {
embed_migrations!("./migrations/upend/");
embed_migrations!("./migrations/upend");
let upend_path = dirpath.as_ref().join(UPEND_SUBDIR);
@ -273,6 +274,55 @@ impl UpEndConnection {
Ok(VaultOptions { blob_mode })
}
pub fn get_users(&self) -> Result<Vec<String>> {
use crate::inner::schema::users::dsl;
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
let result = dsl::users.select(dsl::username).load::<String>(&conn)?;
Ok(result)
}
pub fn set_user(&self, username: &str, password: &str) -> Result<bool> {
use crate::inner::schema::users::dsl;
let salt = password_hash::SaltString::generate(&mut password_hash::rand_core::OsRng);
let argon2 = Argon2::default();
let hashed_password = argon2
.hash_password(password.as_ref(), &salt)
.map_err(|e| anyhow!(e))?
.to_string();
let _lock = self.lock.write().unwrap();
let conn = self.pool.get()?;
let result = diesel::replace_into(dsl::users)
.values((
dsl::username.eq(username),
dsl::password.eq(hashed_password),
))
.execute(&conn)?;
Ok(result > 0)
}
pub fn authenticate_user(&self, username: &str, password: &str) -> Result<()> {
use crate::inner::schema::users::dsl;
let conn = self.pool.get()?;
let user_result = dsl::users
.filter(dsl::username.eq(username))
.load::<models::UserValue>(&conn)?;
let user = user_result.first().ok_or(anyhow!("User not found"))?;
let parsed_hash = PasswordHash::new(&user.password).map_err(|e| anyhow!(e))?;
let argon2 = Argon2::default();
argon2
.verify_password(password.as_ref(), &parsed_hash)
.map_err(|e| anyhow!(e))
}
pub fn retrieve_entry(&self, hash: &UpMultihash) -> Result<Option<Entry>> {
use crate::inner::schema::data::dsl::*;
@ -602,6 +652,22 @@ mod test {
assert_eq!(result[0].entity, edge_entity);
assert_eq!(result[0].value, EntryValue::Address(random_entity));
}
#[test]
fn test_users() {
let tempdir = TempDir::new().unwrap();
let result = UpEndDatabase::open(&tempdir, false).unwrap();
let db = result.db;
let connection = db.connection().unwrap();
assert!(connection.authenticate_user("thm", "hunter2").is_err());
connection.set_user("thm", "hunter2").unwrap();
connection.authenticate_user("thm", "hunter2").unwrap();
assert!(connection.authenticate_user("thm", "password").is_err());
connection.set_user("thm", "password").unwrap();
connection.authenticate_user("thm", "password").unwrap();
}
}
#[derive(Debug, Serialize, Deserialize)]
@ -637,3 +703,18 @@ impl std::str::FromStr for BlobMode {
}
}
}
#[derive(Debug, Clone)]
pub struct OperationContext {
pub user: Option<String>,
pub provenance: String,
}
impl Default for OperationContext {
fn default() -> Self {
Self {
user: None,
provenance: "SYSTEM".to_string(),
}
}
}

View File

@ -6,6 +6,7 @@ macro_rules! upend_insert_val {
attribute: $attribute.parse().unwrap(),
value: upend_base::entry::EntryValue::String(String::from($value)),
provenance: "SYSTEM INIT".to_string(),
user: None,
timestamp: chrono::Utc::now().naive_utc(),
})
}};
@ -19,6 +20,7 @@ macro_rules! upend_insert_addr {
attribute: $attribute.parse().unwrap(),
value: upend_base::entry::EntryValue::Address($addr.clone()),
provenance: "SYSTEM INIT".to_string(),
user: None,
timestamp: chrono::Utc::now().naive_utc(),
})
}};

View File

@ -5,7 +5,8 @@ use crate::hierarchies::{resolve_path, resolve_path_cached, ResolveCache, UHierP
use crate::jobs::{JobContainer, JobHandle};
use crate::util::hash_at_path;
use crate::{
BlobMode, ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UPEND_SUBDIR,
BlobMode, ConnectionOptions, LoggingHandler, OperationContext, UpEndConnection, UpEndDatabase,
UPEND_SUBDIR,
};
use anyhow::{anyhow, Result};
use chrono::prelude::*;
@ -95,6 +96,7 @@ impl FsStore {
db: D,
job_handle: JobHandle,
options: UpdateOptions,
context: OperationContext,
) -> Result<Vec<UpdatePathOutcome>> {
let start = Instant::now();
info!("Vault rescan started.");
@ -153,6 +155,7 @@ impl FsStore {
&existing_files,
&resolve_cache,
quick_check,
context.clone(),
);
let mut cnt = count.write().unwrap();
@ -249,6 +252,7 @@ impl FsStore {
existing_files: &Arc<RwLock<Vec<db::File>>>,
resolve_cache: &Arc<Mutex<ResolveCache>>,
quick_check: bool,
context: OperationContext,
) -> Result<UpdatePathOutcome> {
trace!("Processing: {:?}", path);
@ -366,6 +370,7 @@ impl FsStore {
size,
mtime,
Some(resolve_cache),
context,
)
.map(|_| {
info!("Added: {:?}", path);
@ -422,6 +427,7 @@ impl FsStore {
size: i64,
mtime: Option<NaiveDateTime>,
resolve_cache: Option<&Arc<Mutex<ResolveCache>>>,
context: OperationContext,
) -> Result<Address> {
let normalized_path = self.normalize_path(path)?;
let new_file = db::NewFile {
@ -444,6 +450,7 @@ impl FsStore {
value: (size as f64).into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: context.user.clone(),
};
let mime_type = tree_magic_mini::from_filepath(path).map(|s| s.to_string());
@ -453,6 +460,7 @@ impl FsStore {
value: mime_type.into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: context.user.clone(),
});
let added_entry = Entry {
@ -465,6 +473,7 @@ impl FsStore {
.into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: context.user.clone(),
};
let components = normalized_path.components().collect::<Vec<Component>>();
@ -488,13 +497,16 @@ impl FsStore {
.into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: context.user.clone(),
};
let label_entry_addr = connection.insert_entry(label_entry)?;
if let Some(upath) = upath {
let resolved_path = match resolve_cache {
Some(cache) => resolve_path_cached(connection, &upath, true, cache)?,
None => resolve_path(connection, &upath, true)?,
Some(cache) => {
resolve_path_cached(connection, &upath, true, context.clone(), cache)?
}
None => resolve_path(connection, &upath, true, context.clone())?,
};
let parent_dir = resolved_path.last().unwrap();
@ -504,6 +516,7 @@ impl FsStore {
value: parent_dir.clone().into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: context.user.clone(),
};
let dir_has_entry_addr = connection.insert_entry(dir_has_entry)?;
@ -513,6 +526,7 @@ impl FsStore {
value: label_entry_addr.into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
user: context.user.clone(),
};
connection.insert_entry(alias_entry)?;
}
@ -651,6 +665,7 @@ impl UpStore for FsStore {
blob: Blob,
name_hint: Option<String>,
blob_mode: Option<BlobMode>,
context: OperationContext,
) -> Result<UpMultihash, super::StoreError> {
let file_path = blob.get_file_path();
let hash = hash_at_path(file_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
@ -704,6 +719,7 @@ impl UpStore for FsStore {
size,
mtime,
None,
context,
)
.map_err(|e| StoreError::Unknown(e.to_string()))?;
}
@ -716,6 +732,7 @@ impl UpStore for FsStore {
db: &UpEndDatabase,
mut job_container: JobContainer,
options: UpdateOptions,
context: OperationContext,
) -> Result<Vec<UpdatePathOutcome>, StoreError> {
trace!(
"Running a vault update of {:?}, options = {:?}.",
@ -726,7 +743,7 @@ impl UpStore for FsStore {
match job_result {
Ok(job_handle) => {
let result = self.rescan_vault(db, job_handle, options);
let result = self.rescan_vault(db, job_handle, options, context);
if let Err(err) = &result {
error!("Update did not succeed! {:?}", err);
@ -836,6 +853,7 @@ mod test {
initial: true,
tree_mode: BlobMode::default(),
},
OperationContext::default(),
);
assert!(rescan_result.is_ok());
}
@ -882,6 +900,7 @@ mod test {
initial: quick,
tree_mode: BlobMode::default(),
},
OperationContext::default(),
);
assert!(rescan_result.is_ok());
@ -902,6 +921,7 @@ mod test {
initial: quick,
tree_mode: BlobMode::default(),
},
OperationContext::default(),
);
assert!(rescan_result.is_ok());
@ -925,6 +945,7 @@ mod test {
initial: quick,
tree_mode: BlobMode::default(),
},
OperationContext::default(),
);
assert!(rescan_result.is_ok());
@ -977,6 +998,7 @@ mod test {
initial: quick,
tree_mode: BlobMode::default(),
},
OperationContext::default(),
);
assert!(rescan_result.is_ok());
@ -1079,6 +1101,7 @@ mod test {
initial: true,
tree_mode,
},
OperationContext::default(),
)
.unwrap();
@ -1089,7 +1112,7 @@ mod test {
paths.iter().for_each(|path| {
let upath: UHierPath = path.parse().unwrap();
assert!(
resolve_path(&connection, &upath, false).is_ok(),
resolve_path(&connection, &upath, false, OperationContext::default()).is_ok(),
"Failed: {}",
upath
);

View File

@ -1,6 +1,7 @@
use std::path::{Path, PathBuf};
use super::{UpEndConnection, UpEndDatabase};
use crate::OperationContext;
use crate::{jobs::JobContainer, BlobMode};
use upend_base::hash::UpMultihash;
@ -61,12 +62,14 @@ pub trait UpStore {
blob: Blob,
name_hint: Option<String>,
blob_mode: Option<BlobMode>,
context: OperationContext,
) -> Result<UpMultihash>;
fn update(
&self,
database: &UpEndDatabase,
job_container: JobContainer,
options: UpdateOptions,
context: OperationContext,
) -> Result<Vec<UpdatePathOutcome>>;
fn stats(&self) -> Result<serde_json::Value>;
}

View File

@ -1,6 +1,6 @@
{
"name": "@upnd/upend",
"version": "0.4.1",
"version": "0.5.0",
"description": "Client library to interact with the UpEnd system.",
"main": "dist/index.js",
"types": "dist/index.d.ts",

View File

@ -22,6 +22,11 @@ const dbg = debug("upend:api");
export type { AddressComponents };
export type UpendApiError = {
kind: "Unauthorized" | "HttpError" | "FetchError" | "Unknown";
error?: Error;
};
export class UpEndApi {
private instanceUrl = "";
private readonly wasmExtensions: UpEndWasmExtensions | undefined = undefined;
@ -29,15 +34,21 @@ export class UpEndApi {
private queryOnceLRU = new LRU<string, UpListing>({ max: 128 });
private inFlightRequests: { [key: string]: Promise<UpListing> | null } = {};
private key: string | undefined;
private readonly onError: ((error: UpendApiError) => void) | undefined;
constructor(config: {
instanceUrl?: string;
wasmExtensions?: UpEndWasmExtensions;
timeout?: number;
authKey?: string;
onError?: (error: UpendApiError) => void;
}) {
this.setInstanceUrl(config.instanceUrl || "http://localhost:8093");
this.wasmExtensions = config.wasmExtensions;
this.timeout = config.timeout || 30_000;
this.key = config.authKey;
this.onError = config.onError;
}
public setInstanceUrl(apiUrl: string) {
@ -53,10 +64,10 @@ export class UpEndApi {
options?: ApiFetchOptions,
): Promise<UpObject> {
dbg("Fetching Entity %s", address);
const signal = this.getAbortSignal(options);
const entityFetch = await fetch(`${this.apiUrl}/obj/${address}`, {
signal,
});
const entityFetch = await this.fetch(
`${this.apiUrl}/obj/${address}`,
options,
);
const entityResult = (await entityFetch.json()) as EntityListing;
const entityListing = new UpListing(entityResult.entries);
return entityListing.getObject(address);
@ -64,8 +75,7 @@ export class UpEndApi {
public async fetchEntry(address: string, options?: ApiFetchOptions) {
dbg("Fetching entry %s", address);
const signal = this.getAbortSignal(options);
const response = await fetch(`${this.apiUrl}/raw/${address}`, { signal });
const response = await this.fetch(`${this.apiUrl}/raw/${address}`, options);
const data = await response.json();
const listing = new UpListing({ address: data });
return listing.entries[0];
@ -82,12 +92,10 @@ export class UpEndApi {
if (!this.inFlightRequests[queryStr]) {
dbg(`Querying: ${query}`);
this.inFlightRequests[queryStr] = new Promise((resolve, reject) => {
const signal = this.getAbortSignal(options);
fetch(`${this.apiUrl}/query`, {
this.fetch(`${this.apiUrl}/query`, options, {
method: "POST",
body: queryStr,
keepalive: true,
signal,
})
.then(async (response) => {
if (!response.ok) {
@ -117,12 +125,10 @@ export class UpEndApi {
options?: ApiFetchOptions,
): Promise<PutResult> {
dbg("Putting %O", input);
const signal = this.getAbortSignal(options);
const response = await fetch(`${this.apiUrl}/obj`, {
method: "PUT",
const response = await this.fetch(`${this.apiUrl}/obj`, options, {
headers: { "Content-Type": "application/json" },
method: "PUT",
body: JSON.stringify(input),
signal,
});
return await response.json();
@ -141,12 +147,10 @@ export class UpEndApi {
url += `?provenance=${provenance}`;
}
const signal = this.getAbortSignal(options);
const response = await fetch(url, {
const response = await this.fetch(url, options, {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(value),
signal,
});
return await response.json();
@ -203,10 +207,9 @@ export class UpEndApi {
xhr.send(formData);
});
} else {
const response = await fetch(`${this.apiUrl}/blob`, {
const response = await this.fetch(`${this.apiUrl}/blob`, options, {
method: "PUT",
body: formData,
signal,
});
if (!response.ok) {
@ -222,8 +225,9 @@ export class UpEndApi {
options?: ApiFetchOptions,
): Promise<void> {
dbg("Deleting entry %s", address);
const signal = this.getAbortSignal(options);
await fetch(`${this.apiUrl}/obj/${address}`, { method: "DELETE", signal });
await this.fetch(`${this.apiUrl}/obj/${address}`, options, {
method: "DELETE",
});
}
public getRaw(address: Address, preview = false) {
@ -236,26 +240,24 @@ export class UpEndApi {
options?: ApiFetchOptions,
) {
dbg("Getting %s raw (preview = %s)", address, preview);
const signal = this.getAbortSignal(options);
return await fetch(this.getRaw(address, preview), { signal });
return await this.fetch(this.getRaw(address, preview), options);
}
public async refreshVault(options?: ApiFetchOptions) {
dbg("Triggering vault refresh");
const signal = this.getAbortSignal(options);
return await fetch(`${this.apiUrl}/refresh`, { method: "POST", signal });
return await this.fetch(`${this.apiUrl}/refresh`, options, {
method: "POST",
});
}
public async nativeOpen(address: Address, options?: ApiFetchOptions) {
dbg("Opening %s natively", address);
const signal = this.getAbortSignal(options);
return fetch(`${this.apiUrl}/raw/${address}?native=1`, { signal });
return this.fetch(`${this.apiUrl}/raw/${address}?native=1`, options);
}
public async fetchRoots(options?: ApiFetchOptions): Promise<ListingResult> {
dbg("Fetching hierarchical roots...");
const signal = this.getAbortSignal(options);
const response = await fetch(`${this.apiUrl}/hier_roots`, { signal });
const response = await this.fetch(`${this.apiUrl}/hier_roots`, options);
const roots = await response.json();
dbg("Hierarchical roots: %O", roots);
return roots;
@ -263,8 +265,7 @@ export class UpEndApi {
public async fetchJobs(options?: ApiFetchOptions): Promise<IJob[]> {
// dbg("Fetching jobs...");
const signal = this.getAbortSignal(options);
const response = await fetch(`${this.apiUrl}/jobs`, { signal });
const response = await this.fetch(`${this.apiUrl}/jobs`, options);
return await response.json();
}
@ -272,8 +273,7 @@ export class UpEndApi {
options?: ApiFetchOptions,
): Promise<AttributeListingResult> {
dbg("Fetching all attributes...");
const signal = this.getAbortSignal(options);
const response = await fetch(`${this.apiUrl}/all/attributes`, { signal });
const response = await this.fetch(`${this.apiUrl}/all/attributes`, options);
const result = await response.json();
dbg("All attributes: %O", result);
return await result;
@ -281,19 +281,25 @@ export class UpEndApi {
public async fetchInfo(options?: ApiFetchOptions): Promise<VaultInfo> {
dbg("Fetching vault info...");
const signal = this.getAbortSignal(options);
const response = await fetch(`${this.apiUrl}/info`, { signal });
const response = await this.fetch(`${this.apiUrl}/info`, options);
const result = await response.json();
dbg("Vault info: %O", result);
return result;
}
public async fetchOptions(options?: ApiFetchOptions): Promise<VaultOptions> {
dbg("Fetching vault options...");
const response = await this.fetch(`${this.apiUrl}/options`, options);
const result = await response.json();
dbg("Vault options: %O", result);
return result;
}
public async fetchStoreInfo(
options?: ApiFetchOptions,
): Promise<{ [key: string]: StoreInfo }> {
dbg("Fetching store info...");
const signal = this.getAbortSignal(options);
const response = await fetch(`${this.apiUrl}/stats/store`, { signal });
const response = await this.fetch(`${this.apiUrl}/stats/store`, options);
const result = await response.json();
dbg("Store info: %O");
return await result;
@ -309,16 +315,15 @@ export class UpEndApi {
await this.wasmExtensions.init();
return this.wasmExtensions.AddressTypeConstants[input];
}
const signal = this.getAbortSignal(options);
response = await fetch(`${this.apiUrl}/address?type=${input}`, {
signal,
});
response = await this.fetch(
`${this.apiUrl}/address?type=${input}`,
options,
);
} else {
if ("urlContent" in input) {
const signal = this.getAbortSignal(options);
response = await fetch(
response = await this.fetch(
`${this.apiUrl}/address?url_content=${input.urlContent}`,
{ signal },
options,
);
} else {
throw new Error("Input cannot be empty.");
@ -352,8 +357,7 @@ export class UpEndApi {
public async getVaultOptions(
options?: ApiFetchOptions,
): Promise<VaultOptions> {
const signal = this.getAbortSignal(options);
const response = await fetch(`${this.apiUrl}/options`, { signal });
const response = await this.fetch(`${this.apiUrl}/options`, options);
return await response.json();
}
@ -369,12 +373,10 @@ export class UpEndApi {
payload["blob_mode"] = blob_mode;
}
const signal = this.getAbortSignal(apiOptions);
const response = await fetch(`${this.apiUrl}/options`, {
const response = await this.fetch(`${this.apiUrl}/options`, apiOptions, {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
signal,
});
if (!response.ok) {
@ -382,6 +384,80 @@ export class UpEndApi {
}
}
public async authenticate(
credentials: {
username: string;
password: string;
},
mode: "key",
options?: ApiFetchOptions,
): Promise<{ key: string }>;
public async authenticate(
credentials: {
username: string;
password: string;
},
mode?: "cookie",
options?: ApiFetchOptions,
): Promise<void>;
public async authenticate(
credentials: {
username: string;
password: string;
},
mode: "key" | "cookie" | undefined,
options?: ApiFetchOptions,
): Promise<{ key: string } | void> {
const via = mode || "cookie";
const response = await this.fetch(
`${this.apiUrl}/auth/login?via=${via}`,
options,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(credentials),
},
);
if (!response.ok) {
throw Error(await response.text());
}
if (mode === "key") {
const data = await response.json();
if (!data.key) {
throw Error("No key returned from server.");
}
this.key = data.key;
return data.key;
}
}
public async authStatus(
options?: ApiFetchOptions,
): Promise<{ user: string } | undefined> {
const response = await this.fetch(`${this.apiUrl}/auth/whoami`, options);
return await response.json();
}
public async resetAuth(mode: "key"): Promise<void>;
public async resetAuth(
mode?: "cookie",
options?: ApiFetchOptions,
): Promise<void>;
public async resetAuth(
mode?: "key" | "cookie",
options?: ApiFetchOptions,
): Promise<void> {
if (mode === "key") {
this.key = undefined;
} else {
await this.fetch(`${this.apiUrl}/auth/logout`, options, {
method: "POST",
});
}
}
private getAbortSignal(options: ApiFetchOptions | undefined) {
const controller = options?.abortController || new AbortController();
const timeout = options?.timeout || this.timeout;
@ -390,6 +466,51 @@ export class UpEndApi {
}
return controller.signal;
}
private async fetch(
url: string,
options: ApiFetchOptions | undefined,
requestInit?: RequestInit & { headers?: Record<string, string> },
): Promise<Response> {
const signal = this.getAbortSignal(options);
const headers = requestInit?.headers || {};
if (this.key) {
headers["Authorization"] = `Bearer ${this.key}`;
}
let result: Response;
let error: UpendApiError | undefined;
try {
result = await fetch(url, {
...requestInit,
signal,
headers,
});
if (!result.ok) {
if (result.status === 401) {
error = { kind: "Unauthorized" };
} else {
error = {
kind: "HttpError",
error: new Error(
`HTTP Error ${result.status}: ${result.statusText}`,
),
};
}
}
} catch (e) {
error = { kind: "FetchError", error: e as Error };
}
if (error) {
if (this.onError) {
this.onError(error);
}
throw error;
}
return result!;
}
}
export interface ApiFetchOptions {
@ -398,6 +519,7 @@ export interface ApiFetchOptions {
}
export type VaultBlobMode = "Flat" | "Mirror" | "Incoming";
export interface VaultOptions {
blob_mode: VaultBlobMode;
}

View File

@ -121,6 +121,7 @@ export class UpEntry extends UpObject implements IEntry {
attribute: string;
value: IValue;
provenance: string;
user: string;
timestamp: string;
constructor(address: string, entry: IEntry, listing: UpListing) {
@ -130,6 +131,7 @@ export class UpEntry extends UpObject implements IEntry {
this.attribute = entry.attribute;
this.value = entry.value;
this.provenance = entry.provenance;
this.user = entry.user;
this.timestamp = entry.timestamp;
}

View File

@ -18,6 +18,8 @@ export interface IEntry {
value: IValue;
/** The origin or provenance of the data entry (e.g. SYSTEM or USER API...) */
provenance: string;
/** The user who created the data entry. */
user: string;
/** The timestamp when the data entry was created in RFC 3339 format. */
timestamp: string;
}
@ -97,6 +99,7 @@ export interface VaultInfo {
location: string;
version: string;
desktop: boolean;
public: boolean;
}
export interface StoreInfo {

View File

@ -1,6 +1,25 @@
import { UpEndApi } from '@upnd/upend';
import { UpEndWasmExtensionsWeb } from '@upnd/upend/wasm/web';
import wasmURL from '@upnd/wasm-web/upend_wasm_bg.wasm?url';
import { type StartStopNotifier, writable, type Writable } from 'svelte/store';
const wasm = new UpEndWasmExtensionsWeb(wasmURL);
export default new UpEndApi({ instanceUrl: '/', wasmExtensions: wasm });
const api = new UpEndApi({ instanceUrl: '/', wasmExtensions: wasm });
export default api;
export const currentUser: Writable<string | undefined> = writable(
undefined as string | undefined,
((set) => {
api.authStatus().then((result) => set(result?.user));
}) as StartStopNotifier<string | undefined>
);
export async function login(credentials: { username: string; password: string }) {
await api.authenticate(credentials);
window.location.reload();
}
export async function logout() {
await api.resetAuth();
window.location.reload();
}

View File

@ -11,7 +11,6 @@
export let address: string | undefined = undefined;
export let index: number;
export let only: boolean;
export let background: string | undefined = undefined;
export let forceDetail = false;
let shifted = false;
let key = Math.random();
@ -64,35 +63,13 @@
window.addEventListener('mouseup', onMouseUp);
}
let resultBackground = background;
let imageBackground: string | undefined = undefined;
$: {
if (background?.startsWith('url(')) {
imageBackground = background;
resultBackground = 'transparent';
} else {
resultBackground = background;
imageBackground = undefined;
}
resultBackground ||= 'var(--background-lighter)';
}
function reload() {
key = Math.random();
}
</script>
<!-- svelte-ignore a11y-no-static-element-interactions -->
<div
class="browse-column"
class:detail
style="--background: {resultBackground}"
class:image-background={Boolean(imageBackground)}
on:mousemove={(ev) => (shifted = ev.shiftKey)}
>
{#if imageBackground}
<div class="background" style="background-image: {imageBackground}" />
{/if}
<div class="browse-column" class:detail on:mousemove={(ev) => (shifted = ev.shiftKey)}>
<div class="view" style="--width: {width}px">
<header>
{#if address}
@ -165,7 +142,7 @@
display: flex;
flex-direction: column;
background: var(--background);
background: var(--background-lighter);
color: var(--foreground-lighter);
border: 1px solid var(--foreground-lightest);
border-radius: 0.5em;

View File

@ -27,7 +27,6 @@
const dispatch = createEventDispatcher<{
resolved: string[];
close: void;
background: string | undefined;
}>();
export let address: string;
@ -393,19 +392,6 @@
);
}
});
$: {
const cover = $entity?.attr['COVER']?.[0];
if (!cover) {
dispatch('background', undefined);
} else {
switch (cover.value.t) {
case 'Address':
dispatch('background', `url('${api.getRaw(cover.value.c)}')`);
break;
}
}
}
</script>
<div
@ -527,13 +513,13 @@
<h2>{$i18n.t('Attributes')}</h2>
<EntryList
entries={$entity?.attributes || []}
columns={detail ? 'timestamp, provenance, attribute, value' : 'attribute, value'}
columns={detail ? 'timestamp, user, provenance, attribute, value' : 'attribute, value'}
on:change={onChange}
/>
<h2>{$i18n.t('Backlinks')}</h2>
<EntryList
entries={$entity?.backlinks || []}
columns={detail ? 'timestamp, provenance, entity, attribute' : 'entity, attribute'}
columns={detail ? 'timestamp, user, provenance, entity, attribute' : 'entity, attribute'}
on:change={onChange}
/>
</div>

View File

@ -0,0 +1,90 @@
<script lang="ts">
import { i18n } from '$lib/i18n';
import Icon from '$lib/components/utils/Icon.svelte';
import { login } from '$lib/api';
let username = '';
let password = '';
let error: string | undefined;
let authenticating = false;
async function submit() {
error = undefined;
try {
authenticating = true;
await login({ username, password });
} catch (e) {
error = (e as object).toString();
} finally {
authenticating = false;
}
}
</script>
<div class="modal-container">
<div class="modal" class:authenticating>
<h2>
<Icon name="lock" />
{$i18n.t('Authorization required')}
</h2>
<form on:submit|preventDefault={submit}>
<input placeholder={$i18n.t('Username')} type="text" bind:value={username} required />
<input placeholder={$i18n.t('Password')} type="password" bind:value={password} required />
<button type="submit"> <Icon plain name="log-in" /> {$i18n.t('Login')}</button>
</form>
{#if error}
<div class="error">{error}</div>
{/if}
</div>
</div>
<style lang="scss">
@use '$lib/styles/colors';
.modal-container {
position: fixed;
left: 0;
top: 0;
width: 100vw;
height: 100vh;
background: rgba(0, 0, 0, 0.66);
color: var(--foreground);
z-index: 9;
}
.modal {
position: fixed;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
background: var(--background);
color: var(--foreground);
border-radius: 5px;
border: 1px solid var(--foreground);
padding: 2rem;
display: flex;
flex-direction: column;
gap: 1rem;
&.authenticating {
filter: brightness(0.66);
pointer-events: none;
}
}
h2 {
text-align: center;
margin: 0 0 1rem 0;
}
form {
display: contents;
}
.error {
color: colors.$red;
text-align: center;
}
</style>

View File

@ -152,6 +152,22 @@
dispatch('change', { type: 'upsert', attribute: ATTR_LABEL, value: ev.detail });
}
}
let background: string | undefined;
$: background = $entity?.get('COVER')?.toString();
let resultBackground = background;
let imageBackground: string | undefined = undefined;
$: {
if (background) {
imageBackground = `url(${api.getRaw(background)})`;
resultBackground = 'transparent';
} else {
resultBackground = background;
imageBackground = undefined;
}
resultBackground ||= 'var(--background-lighter)';
}
</script>
<div
@ -160,6 +176,8 @@
class:right-active={address == $addresses[$index + 1]}
class:selected={select && $selected.includes(address)}
class:plain
style="--background: {resultBackground}"
class:image-background={Boolean(imageBackground)}
>
<div
class="address"
@ -167,6 +185,10 @@
class:banner
class:show-type={$entityInfo?.t === 'Url' && !addressIds.length}
>
{#if imageBackground}
<div class="image-gradient"></div>
<div class="image-background" style="background-image: {imageBackground}"></div>
{/if}
<HashBadge {address} />
<div class="label" class:resolving title={displayLabel}>
<Editable
@ -264,6 +286,7 @@
}
.address {
position: relative;
flex-grow: 1;
min-width: 0;
@ -275,7 +298,7 @@
font-family: var(--monospace-font);
line-break: anywhere;
background: var(--background-lighter);
background: var(--background);
border: 0.1em solid var(--foreground-lighter);
border-radius: 0.2em;
@ -336,6 +359,29 @@
&.banner .secondary {
display: unset;
}
.image-gradient {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: linear-gradient(90deg, rgba(0, 0, 0, 0.66) 16%, var(--background) 66%);
z-index: -1;
}
.image-background {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
z-index: -2;
background-size: cover;
background-position: center;
filter: brightness(0.8);
}
}
.label {

View File

@ -2,13 +2,16 @@
import { addEmitter } from '../AddModal.svelte';
import Icon from '../utils/Icon.svelte';
import { jobsEmitter } from './Jobs.svelte';
import api from '$lib/api';
import api, { currentUser, logout } from '$lib/api';
import Selector, { type SelectorValue } from '../utils/Selector.svelte';
import { i18n } from '$lib/i18n';
import { goto } from '$app/navigation';
import { onMount } from 'svelte';
import { vaultInfo } from '$lib/util/info';
import { slide } from 'svelte/transition';
let selector: Selector;
let userDropdown = false;
let lastSearched: SelectorValue[] = [];
@ -57,6 +60,7 @@
}
let fileInput: HTMLInputElement;
function onFileChange() {
if (fileInput.files?.length) {
addEmitter.emit('files', Array.from(fileInput.files));
@ -73,6 +77,12 @@
}
</script>
<svelte:body
on:click={() => {
userDropdown = false;
}}
/>
<div class="header">
<h1>
<a href="/">
@ -91,13 +101,31 @@
<Icon name="search" slot="prefix" />
</Selector>
</div>
<button class="button" on:click={() => addEmitter.emit('choose')}>
<button on:click={() => addEmitter.emit('choose')}>
<Icon name="upload" />
<input type="file" multiple bind:this={fileInput} on:change={onFileChange} />
</button>
<button class="button" on:click={() => rescan()} title="Rescan vault">
<button on:click={() => rescan()} title="Rescan vault">
<Icon name="refresh" />
</button>
<button
class="user"
disabled={$vaultInfo?.public}
on:click|stopPropagation={() => (userDropdown = true)}
>
<Icon name="user" />
</button>
{#if userDropdown}
<!-- svelte-ignore a11y-no-static-element-interactions a11y-click-events-have-key-events -->
<div class="user-dropdown" transition:slide on:click|stopPropagation={() => {}}>
<div class="user">
<Icon plain name="user" />
{$currentUser || '???'}
</div>
<hr />
<button on:click={() => logout()}> <Icon name="log-out" />{$i18n.t('Log out')}</button>
</div>
{/if}
</div>
<style lang="scss">
@ -141,6 +169,18 @@
}
}
.user-dropdown {
background: var(--background);
border-radius: 4px;
border: 1px solid var(--foreground);
padding: 0.5em;
position: absolute;
top: 3.5rem;
right: 0.5rem;
box-shadow: 0 0 0.5rem rgba(0, 0, 0, 0.5);
z-index: 99;
}
@media screen and (max-width: 600px) {
.name {
display: none;

View File

@ -166,7 +166,12 @@
}}
/>
<div class="icon">
<IconButton name="trash" color="#dc322f" on:click={() => removeEntity(entity)} />
<IconButton
plain
name="trash"
color="#dc322f"
on:click={() => removeEntity(entity)}
/>
</div>
{:else}
<div class="object">
@ -181,7 +186,12 @@
/>
</div>
<div class="icon">
<IconButton name="trash" color="#dc322f" on:click={() => removeEntity(entity)} />
<IconButton
plain
name="trash"
color="#dc322f"
on:click={() => removeEntity(entity)}
/>
</div>
{/if}
{:else}
@ -300,6 +310,12 @@
}
}
.icon {
display: flex;
align-items: center;
margin-left: 0.25em;
}
.add {
display: flex;
flex-direction: column;

View File

@ -34,6 +34,7 @@
const TIMESTAMP_COL = 'timestamp';
const PROVENANCE_COL = 'provenance';
const USER_COL = 'user';
const ENTITY_COL = 'entity';
const ATTR_COL = 'attribute';
const VALUE_COL = 'value';
@ -188,6 +189,7 @@
const COLUMN_LABELS: { [key: string]: string } = {
timestamp: $i18n.t('Added at'),
provenance: $i18n.t('Provenance'),
user: $i18n.t('User'),
entity: $i18n.t('Entity'),
attribute: $i18n.t('Attribute'),
value: $i18n.t('Value')
@ -243,6 +245,16 @@
</div>
{:else if column == PROVENANCE_COL}
<div class="cell">{entry.provenance}</div>
{:else if column == USER_COL}
<div class="cell">
{#if entry.user}
{entry.user}
{:else}
<div class="unset">
{$i18n.t('unset')}
</div>
{/if}
</div>
{:else if column == ENTITY_COL}
<div class="cell entity mark-entity">
<UpObject

View File

@ -22,6 +22,7 @@ select {
font-size: 2em;
}
button,
.button {
border: 1px solid var(--foreground);
border-radius: 4px;
@ -52,6 +53,23 @@ select {
}
}
input[type='text'],
input[type='password'] {
padding: 0.25em;
border: 1px solid var(--foreground-lighter);
border-radius: 4px;
background: var(--background);
color: var(--foreground);
transition: box-shadow 0.25s;
&:focus {
box-shadow: -1px -1px 2px 2px var(--primary);
outline: none;
}
}
.mark-entity::first-letter,
.mark-entity *::first-letter {
color: color.scale(color.mix(colors.$base1, colors.$red), $saturation: -33%);

View File

@ -1,9 +1,22 @@
import api from '$lib/api';
import { readable } from 'svelte/store';
import { readable, type Readable } from 'svelte/store';
import type { VaultInfo } from '@upnd/upend/types';
import type { VaultOptions } from '@upnd/upend/api';
export const vaultInfo = readable(undefined as VaultInfo | undefined, (set) => {
api.fetchInfo().then(async (info: VaultInfo) => {
set(info);
});
});
export const vaultInfo: Readable<VaultInfo | undefined> = readable(
undefined as VaultInfo | undefined,
(set) => {
api.fetchInfo().then(async (info: VaultInfo) => {
set(info);
});
}
);
export const vaultOptions: Readable<VaultOptions | undefined> = readable(
undefined as VaultOptions | undefined,
(set) => {
api.fetchOptions().then(async (options: VaultOptions) => {
set(options);
});
}
);

View File

@ -4,6 +4,17 @@
import Footer from '$lib/components/layout/Footer.svelte';
import DropPasteHandler from '$lib/components/DropPasteHandler.svelte';
import AddModal from '$lib/components/AddModal.svelte';
import { onMount } from 'svelte';
import { goto } from '$app/navigation';
import { vaultInfo, vaultOptions } from '$lib/util/info';
import LoginModal from '$lib/components/LoginModal.svelte';
import { currentUser } from '$lib/api';
onMount(() => {
if ($vaultOptions && !$vaultOptions.blob_mode) {
goto('/setup');
}
});
</script>
<Header />
@ -13,4 +24,7 @@
<Footer />
<AddModal />
{#if $vaultInfo && !$vaultInfo.public && !$currentUser}
<LoginModal />
{/if}
<DropPasteHandler />>

View File

@ -157,14 +157,6 @@
}
];
fetch('/api/options')
.then((res) => res.json())
.then((options) => {
if (!options.blob_mode) {
goto('/setup');
}
});
$: updateTitle($vaultInfo?.name || $i18n.t('Home') || 'Home');
</script>