Merge branch 'develop'
ci/woodpecker/push/woodpecker Pipeline was successful Details

refactor/errors
Tomáš Mládek 2023-11-05 16:39:14 +01:00
commit dea40124f9
11 changed files with 679 additions and 137 deletions

36
Cargo.lock generated
View File

@ -812,6 +812,20 @@ dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "crossbeam"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-channel",
"crossbeam-deque",
"crossbeam-epoch",
"crossbeam-queue",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.8"
@ -846,6 +860,16 @@ dependencies = [
"scopeguard",
]
[[package]]
name = "crossbeam-queue"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.16"
@ -1555,6 +1579,16 @@ dependencies = [
"simple_asn1",
]
[[package]]
name = "jwalk"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2735847566356cd2179a2a38264839308f7079fa96e6bd5a42d740460e003c56"
dependencies = [
"crossbeam",
"rayon",
]
[[package]]
name = "kamadak-exif"
version = "0.5.5"
@ -3203,6 +3237,7 @@ dependencies = [
"diesel",
"diesel_migrations",
"filebuffer",
"jwalk",
"lazy_static",
"lexpr",
"libsqlite3-sys",
@ -3225,7 +3260,6 @@ dependencies = [
"upend-base",
"url",
"uuid",
"walkdir",
]
[[package]]

View File

@ -444,13 +444,25 @@ async fn main() -> Result<()> {
};
if !args.no_initial_update {
info!("Running initial update...");
let initial = open_result.new;
block_background::<_, _, anyhow::Error>(move || {
let _ = state.store.update(&upend, job_container.clone(), initial);
let _ = extractors::extract_all(upend, state.store, job_container);
Ok(())
});
if !open_result.new {
info!("Running update...");
block_background::<_, _, anyhow::Error>(move || {
let connection = upend.connection()?;
let _ = state.store.update(
&upend,
job_container.clone(),
upend_db::stores::UpdateOptions {
initial: false,
tree_mode: connection
.get_vault_options()?
.blob_mode
.unwrap_or_default(),
},
);
let _ = extractors::extract_all(upend, state.store, job_container);
Ok(())
});
}
}
#[cfg(feature = "desktop")]

View File

@ -38,8 +38,11 @@ use upend_base::hash::{b58_decode, b58_encode, sha256hash};
use upend_base::lang::Query;
use upend_db::hierarchies::{list_roots, resolve_path, UHierPath};
use upend_db::jobs;
use upend_db::stores::UpdateOptions;
use upend_db::stores::{Blob, UpStore};
use upend_db::BlobMode;
use upend_db::UpEndDatabase;
use upend_db::VaultOptions;
use url::Url;
#[cfg(feature = "desktop")]
@ -762,23 +765,36 @@ pub async fn list_hier_roots(state: web::Data<State>) -> Result<HttpResponse, Er
Ok(HttpResponse::Ok().json(result.as_hash().map_err(ErrorInternalServerError)?))
}
// #[derive(Deserialize)]
// pub struct RescanRequest {
// full: Option<String>,
// }
#[derive(Deserialize)]
pub struct RescanRequest {
initial: Option<bool>,
tree_mode: Option<BlobMode>,
}
#[post("/api/refresh")]
pub async fn api_refresh(
req: HttpRequest,
state: web::Data<State>,
// web::Query(query): web::Query<RescanRequest>,
web::Query(query): web::Query<RescanRequest>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
block_background::<_, _, anyhow::Error>(move || {
let _ = state
.store
.update(&state.upend, state.job_container.clone(), false);
let _ = state.store.update(
&state.upend,
state.job_container.clone(),
UpdateOptions {
initial: query.initial.unwrap_or(false),
tree_mode: query.tree_mode.unwrap_or(
connection
.get_vault_options()?
.blob_mode
.unwrap_or_default(),
),
},
);
let _ = crate::extractors::extract_all(
state.upend.clone(),
state.store.clone(),
@ -842,6 +858,34 @@ pub async fn get_info(state: web::Data<State>) -> Result<HttpResponse, Error> {
})))
}
#[get("/api/options")]
pub async fn get_options(state: web::Data<State>) -> Result<HttpResponse, Error> {
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
Ok(HttpResponse::Ok().json(
connection
.get_vault_options()
.map_err(ErrorInternalServerError)?,
))
}
#[put("/api/options")]
pub async fn put_options(
req: HttpRequest,
state: web::Data<State>,
payload: web::Json<VaultOptions>,
) -> Result<HttpResponse, Error> {
check_auth(&req, &state)?;
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
let options = payload.into_inner();
web::block(move || connection.set_vault_options(options))
.await
.map_err(ErrorInternalServerError)?
.map_err(ErrorInternalServerError)?;
Ok(HttpResponse::Ok().finish())
}
#[get("/api/migration/user-entries")]
pub async fn get_user_entries(state: web::Data<State>) -> Result<HttpResponse, Error> {
let connection = state.upend.connection().map_err(ErrorInternalServerError)?;
@ -1017,7 +1061,12 @@ mod tests {
.uri("/api/hier/NATIVE/hello-world.txt")
.to_request();
let result = actix_web::test::call_service(&app, req).await;
assert_eq!(result.status(), http::StatusCode::FOUND);
assert_eq!(
result.status(),
http::StatusCode::FOUND,
"expected redirect, got {:}",
result.status()
);
assert_eq!(
result
.headers()
@ -1101,7 +1150,18 @@ mod tests {
) as Box<dyn UpStore + Send + Sync>);
let job_container = jobs::JobContainer::new();
store.update(&upend, job_container.clone(), true).unwrap();
let outcome = store
.update(
&upend,
job_container.clone(),
UpdateOptions {
initial: true,
tree_mode: upend_db::BlobMode::default(),
},
)
.unwrap();
println!("Outcome: {:?}", outcome);
State {
upend,

View File

@ -64,6 +64,8 @@ where
.service(routes::store_stats)
.service(routes::get_jobs)
.service(routes::get_info)
.service(routes::get_options)
.service(routes::put_options)
.service(routes::get_user_entries);
if let Some(ui_path) = ui_path {

View File

@ -52,7 +52,7 @@ url = { version = "2", features = ["serde"] }
filebuffer = "0.4.0"
tempfile = "^3.2.0"
walkdir = "2"
jwalk = "0.8.1"
tree_magic_mini = { version = "3.0.2", features = ["with-gpl-data"] }

View File

@ -31,6 +31,7 @@ use diesel::r2d2::{self, ConnectionManager};
use diesel::result::{DatabaseErrorKind, Error};
use diesel::sqlite::SqliteConnection;
use hierarchies::initialize_hier;
use serde::{Deserialize, Serialize};
use shadow_rs::is_release;
use std::convert::TryFrom;
use std::fs;
@ -152,7 +153,10 @@ impl UpEndDatabase {
let connection = db.connection().unwrap();
if !new {
let db_major: u64 = connection.get_meta("VERSION")?.parse()?;
let db_major: u64 = connection
.get_meta("VERSION")?
.ok_or(anyhow!("Database version not found!"))?
.parse()?;
if db_major > build::PKG_VERSION_MAJOR.parse().unwrap() {
return Err(anyhow!("Incompatible database! Found version "));
}
@ -201,7 +205,7 @@ impl UpEndConnection {
f()
}
pub fn get_meta<S: AsRef<str>>(&self, key: S) -> Result<String> {
pub fn get_meta<S: AsRef<str>>(&self, key: S) -> Result<Option<String>> {
use crate::inner::schema::meta::dsl;
let key = key.as_ref();
@ -210,12 +214,63 @@ impl UpEndConnection {
let _lock = self.lock.read().unwrap();
let conn = self.pool.get()?;
dsl::meta
let result = dsl::meta
.filter(dsl::key.eq(key))
.load::<models::MetaValue>(&conn)?
.first()
.ok_or(anyhow!(r#"No META "{key}" value found."#))
.map(|mv| mv.value.clone())
.load::<models::MetaValue>(&conn)?;
let result = result.first();
Ok(result.map(|v| v.value.clone()))
}
pub fn set_meta<S: AsRef<str>, T: AsRef<str>>(&self, key: S, value: T) -> Result<()> {
use crate::inner::schema::meta::dsl;
let key = key.as_ref();
let value = value.as_ref();
trace!("Setting META:{key} to {value}");
let _lock = self.lock.write().unwrap();
let conn = self.pool.get()?;
diesel::replace_into(dsl::meta)
.values((dsl::key.eq(key), dsl::value.eq(value)))
.execute(&conn)?;
Ok(())
}
pub fn set_vault_options(&self, options: VaultOptions) -> Result<()> {
if let Some(blob_mode) = options.blob_mode {
let tree_mode = match blob_mode {
BlobMode::Flat => "FLAT".to_string(),
BlobMode::Mirror => "MIRROR".to_string(),
BlobMode::Incoming(None) => "INCOMING".to_string(),
BlobMode::Incoming(Some(group)) => format!("INCOMING:{}", group),
BlobMode::StoreOnly => "STORE_ONLY".to_string(),
};
self.set_meta("VAULT_BLOB_MODE", tree_mode)?;
}
Ok(())
}
pub fn get_vault_options(&self) -> Result<VaultOptions> {
let blob_mode = match self.get_meta("VAULT_BLOB_MODE")? {
Some(mode) => match mode.as_str() {
"FLAT" => Some(BlobMode::Flat),
"MIRROR" => Some(BlobMode::Mirror),
"INCOMING" => Some(BlobMode::Incoming(None)),
"STORE_ONLY" => Some(BlobMode::StoreOnly),
mode if mode.starts_with("INCOMING:") => {
Some(BlobMode::Incoming(Some(mode[9..].to_string())))
}
_ => {
warn!("Unknown vault tree mode: {}", mode);
None
}
},
None => None,
};
Ok(VaultOptions { blob_mode })
}
pub fn retrieve_entry(&self, hash: &UpMultihash) -> Result<Option<Entry>> {
@ -432,7 +487,7 @@ impl UpEndConnection {
#[cfg(test)]
mod test {
use upend_base::constants::{ATTR_LABEL, ATTR_IN};
use upend_base::constants::{ATTR_IN, ATTR_LABEL};
use super::*;
use tempfile::TempDir;
@ -545,3 +600,22 @@ mod test {
assert_eq!(result[0].value, EntryValue::Address(random_entity));
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VaultOptions {
pub blob_mode: Option<BlobMode>,
}
/// Specifies how to store new blobs
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub enum BlobMode {
#[default]
/// Mirror the original tree
Mirror,
/// Use only the last level of the tree as a group
Flat,
/// Place all files in a single group
Incoming(Option<String>),
/// Only store files, don't place them anywhere
StoreOnly,
}

View File

@ -1,15 +1,18 @@
use self::db::files;
use super::{Blob, StoreError, UpStore, UpdatePathOutcome};
use crate::hierarchies::{resolve_path, resolve_path_cached, ResolveCache, UHierPath};
use super::{Blob, StoreError, UpStore, UpdateOptions, UpdatePathOutcome};
use crate::hierarchies::{resolve_path, resolve_path_cached, ResolveCache, UHierPath, UNode};
use crate::jobs::{JobContainer, JobHandle};
use crate::util::hash_at_path;
use crate::{ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UPEND_SUBDIR};
use anyhow::{anyhow, Error, Result};
use crate::{
BlobMode, ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UPEND_SUBDIR,
};
use anyhow::{anyhow, Result};
use chrono::prelude::*;
use diesel::r2d2::{self, ConnectionManager, ManageConnection};
use diesel::ExpressionMethods;
use diesel::{Connection, QueryDsl, RunQueryDsl, SqliteConnection};
use jwalk::WalkDir;
use lru::LruCache;
use rayon::prelude::*;
use serde_json::json;
@ -17,6 +20,7 @@ use std::borrow::Borrow;
use std::convert::TryInto;
use std::path::PathBuf;
use std::path::{Component, Path};
use std::str::FromStr;
use std::sync::{Arc, Mutex, RwLock};
use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
use std::{fs, iter};
@ -25,7 +29,6 @@ use upend_base::addressing::Address;
use upend_base::constants::{ATTR_ADDED, ATTR_BY, ATTR_IN, ATTR_LABEL, ATTR_OF, TYPE_HASH_ADDRESS};
use upend_base::entry::Entry;
use upend_base::hash::{b58_encode, UpMultihash};
use walkdir::WalkDir;
mod db;
@ -91,12 +94,13 @@ impl FsStore {
&self,
db: D,
job_handle: JobHandle,
quick_check: bool,
_disable_synchronous: bool,
options: UpdateOptions,
) -> Result<Vec<UpdatePathOutcome>> {
let start = Instant::now();
info!("Vault rescan started.");
let quick_check = options.initial;
let db = db.borrow();
let upconnection = db.connection()?;
@ -118,11 +122,11 @@ impl FsStore {
// Walk through the vault, find all paths
trace!("Traversing vault directory");
let absolute_dir_path = fs::canonicalize(&*self.path)?;
let path_entries: Vec<PathBuf> = WalkDir::new(&*self.path)
let paths: Vec<PathBuf> = WalkDir::new(&*self.path)
.follow_links(true)
.into_iter()
.filter_map(|e| e.ok())
.filter_map(|e| fs::canonicalize(e.into_path()).ok())
.filter_map(|e| fs::canonicalize(e.path()).ok())
.filter(|e| e.is_file())
.filter(|e| !e.starts_with(absolute_dir_path.join(UPEND_SUBDIR)))
.collect();
@ -132,17 +136,20 @@ impl FsStore {
// Actual processing
let count = RwLock::new(0_usize);
let resolve_cache = Arc::new(Mutex::new(LruCache::new(256)));
let total = path_entries.len() as f32;
let resolve_cache: Arc<Mutex<LruCache<(Option<Address>, UNode), Address>>> =
Arc::new(Mutex::new(LruCache::new(256)));
let total = paths.len() as f32;
let shared_job_handle = Arc::new(Mutex::new(job_handle));
let path_outcomes: Vec<UpdatePathOutcome> = path_entries
let path_outcomes: Vec<UpdatePathOutcome> = paths
.into_par_iter()
.map(|path| {
let result = self.process_directory_entry(
db,
&resolve_cache,
path.clone(),
options.tree_mode.clone(),
&existing_files,
&resolve_cache,
quick_check,
);
@ -169,11 +176,7 @@ impl FsStore {
let existing_files = existing_files.read().unwrap();
let cleanup_results = existing_files.iter().filter(|f| f.valid).map(|file| {
let trans_result = upconnection.transaction::<_, Error, _>(|| {
self.file_set_valid(file.id, false)?;
upconnection.remove_object(Address::from(file.clone()))?;
Ok(())
});
let trans_result = self.file_set_valid(file.id, false);
match trans_result {
Ok(_) => {
@ -237,9 +240,10 @@ impl FsStore {
fn process_directory_entry<D: Borrow<UpEndDatabase>>(
&self,
db: D,
resolve_cache: &Arc<Mutex<ResolveCache>>,
path: PathBuf,
mode: BlobMode,
existing_files: &Arc<RwLock<Vec<db::File>>>,
resolve_cache: &Arc<Mutex<ResolveCache>>,
quick_check: bool,
) -> Result<UpdatePathOutcome> {
trace!("Processing: {:?}", path);
@ -320,20 +324,38 @@ impl FsStore {
drop(existing_files_read);
}
// If not, add it!
// If not, hash it.
if file_hash.is_none() {
file_hash = Some(hash_at_path(&path)?);
}
let mime_type = tree_magic_mini::from_filepath(&path).map(|s| s.to_string());
let file_hash = file_hash.unwrap();
let connection: UpEndConnection = db.borrow().connection()?;
let file_is_known = !connection
.query(
format!(
"(matches @{} \"{}\" ?)",
Address::Hash(file_hash.clone()),
ATTR_IN
)
.parse()?,
)?
.is_empty();
let upath = if !file_is_known {
self.path_to_upath(&path, mode)?
} else {
None
};
self.insert_file_with_metadata(
&db.borrow().connection()?,
&normalized_path,
file_hash.unwrap(),
&path,
upath,
file_hash,
None,
size,
mtime,
mime_type,
Some(resolve_cache),
)
.map(|_| {
@ -342,52 +364,57 @@ impl FsStore {
})
}
fn add_file(
&self,
connection: &UpEndConnection,
path: &Path,
hash: UpMultihash,
name_hint: Option<String>,
) -> Result<Address> {
let normalized_path = self.normalize_path(path)?;
let metadata = fs::metadata(path)?;
let size = metadata.len() as i64;
let mtime = metadata
.modified()
.map(|t| {
NaiveDateTime::from_timestamp_opt(
t.duration_since(UNIX_EPOCH).unwrap().as_secs() as i64,
0,
)
})
.ok()
.flatten();
let mime_type = tree_magic_mini::from_filepath(path).map(|s| s.to_string());
fn path_to_upath(&self, path: &Path, mode: BlobMode) -> Result<Option<UHierPath>> {
match mode {
BlobMode::Flat => {
let normalized_path = self.normalize_path(path).unwrap();
let dirname = normalized_path.parent().and_then(|p| p.components().last());
self.insert_file_with_metadata(
connection,
&normalized_path,
hash,
name_hint,
size,
mtime,
mime_type,
None,
)
let upath = UHierPath(if let Some(dirname) = dirname {
vec![
"NATIVE".parse().unwrap(),
UNode::from_str(&dirname.as_os_str().to_string_lossy()).unwrap(),
]
} else {
vec!["NATIVE".parse().unwrap()]
});
Ok(Some(upath))
}
BlobMode::Mirror => {
let normalized_path = self.normalize_path(path).unwrap();
let path = normalized_path.parent().unwrap();
let upath =
iter::once("NATIVE".parse().unwrap())
.chain(path.iter().map(|component| {
UNode::from_str(&component.to_string_lossy()).unwrap()
}))
.collect::<Vec<UNode>>();
Ok(Some(UHierPath(upath)))
}
BlobMode::Incoming(group) => {
let upath = UHierPath(vec![group.unwrap_or("INCOMING".to_string()).parse()?]);
Ok(Some(upath))
}
BlobMode::StoreOnly => Ok(None),
}
}
#[allow(clippy::too_many_arguments)]
fn insert_file_with_metadata(
&self,
connection: &UpEndConnection,
normalized_path: &Path,
path: &Path,
upath: Option<UHierPath>,
hash: UpMultihash,
name: Option<String>,
size: i64,
mtime: Option<NaiveDateTime>,
mime_type: Option<String>,
resolve_cache: Option<&Arc<Mutex<ResolveCache>>>,
) -> Result<Address> {
let normalized_path = self.normalize_path(path)?;
let new_file = db::NewFile {
path: normalized_path
.to_str()
@ -410,6 +437,7 @@ impl FsStore {
timestamp: chrono::Utc::now().naive_utc(),
};
let mime_type = tree_magic_mini::from_filepath(path).map(|s| s.to_string());
let mime_entry = mime_type.map(|mime_type| Entry {
entity: blob_address.clone(),
attribute: FILE_MIME_KEY.to_string(),
@ -430,27 +458,10 @@ impl FsStore {
timestamp: chrono::Utc::now().naive_utc(),
};
// Add the appropriate entries w/r/t virtual filesystem location
let components = normalized_path.components().collect::<Vec<Component>>();
let (filename, dir_path) = components.split_last().unwrap();
let filename = components.last().unwrap();
let upath = UHierPath(
iter::once("NATIVE".parse().unwrap())
.chain(
dir_path
.iter()
.map(|component| component.as_os_str().to_string_lossy().parse().unwrap()),
)
.collect(),
);
let resolved_path = match resolve_cache {
Some(cache) => resolve_path_cached(connection, &upath, true, cache)?,
None => resolve_path(connection, &upath, true)?,
};
let parent_dir = resolved_path.last().unwrap();
// Insert all
let file_count = self.insert_file(new_file)?;
let file_count = self.insert_file_record(new_file)?;
connection.insert_entry_immutable(size_entry)?;
if file_count == 1 {
@ -460,15 +471,6 @@ impl FsStore {
connection.insert_entry(mime_entry)?;
}
let dir_has_entry = Entry {
entity: blob_address.clone(),
attribute: ATTR_IN.to_string(),
value: parent_dir.clone().into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
let dir_has_entry_addr = connection.insert_entry(dir_has_entry)?;
let label_entry = Entry {
entity: blob_address.clone(),
attribute: ATTR_LABEL.to_string(),
@ -480,19 +482,36 @@ impl FsStore {
};
let label_entry_addr = connection.insert_entry(label_entry)?;
let alias_entry = Entry {
entity: dir_has_entry_addr,
attribute: ATTR_BY.to_string(),
value: label_entry_addr.into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
connection.insert_entry(alias_entry)?;
if let Some(upath) = upath {
let resolved_path = match resolve_cache {
Some(cache) => resolve_path_cached(connection, &upath, true, cache)?,
None => resolve_path(connection, &upath, true)?,
};
let parent_dir = resolved_path.last().unwrap();
let dir_has_entry = Entry {
entity: blob_address.clone(),
attribute: ATTR_IN.to_string(),
value: parent_dir.clone().into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
let dir_has_entry_addr = connection.insert_entry(dir_has_entry)?;
let alias_entry = Entry {
entity: dir_has_entry_addr,
attribute: ATTR_BY.to_string(),
value: label_entry_addr.into(),
provenance: "SYSTEM INIT".to_string(),
timestamp: chrono::Utc::now().naive_utc(),
};
connection.insert_entry(alias_entry)?;
}
Ok(blob_address)
}
pub fn insert_file(&self, file: db::NewFile) -> Result<u32> {
fn insert_file_record(&self, file: db::NewFile) -> Result<u32> {
trace!(
"Inserting {} ({})...",
&file.path,
@ -643,11 +662,33 @@ impl UpStore for FsStore {
};
let final_path = self.path.join(final_name);
fs::copy(file_path, &final_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
self.add_file(&connection, &final_path, hash.clone(), name_hint)
.map_err(|e| StoreError::Unknown(e.to_string()))?;
let metadata =
fs::metadata(&final_path).map_err(|e| StoreError::Unknown(e.to_string()))?;
let size = metadata.len() as i64;
let mtime = metadata
.modified()
.map(|t| {
NaiveDateTime::from_timestamp_opt(
t.duration_since(UNIX_EPOCH).unwrap().as_secs() as i64,
0,
)
})
.ok()
.flatten();
self.insert_file_with_metadata(
&connection,
&final_path,
None,
hash.clone(),
name_hint,
size,
mtime,
None,
)
.map_err(|e| StoreError::Unknown(e.to_string()))?;
}
Ok(hash)
@ -657,18 +698,18 @@ impl UpStore for FsStore {
&self,
db: &UpEndDatabase,
mut job_container: JobContainer,
initial: bool,
options: UpdateOptions,
) -> Result<Vec<UpdatePathOutcome>, StoreError> {
trace!(
"Running a vault update of {:?}, initial = {}.",
"Running a vault update of {:?}, options = {:?}.",
self.path,
initial
options
);
let job_result = job_container.add_job("REIMPORT", "Scaning vault directory...");
match job_result {
Ok(job_handle) => {
let result = self.rescan_vault(db, job_handle, !initial, initial);
let result = self.rescan_vault(db, job_handle, options);
if let Err(err) = &result {
error!("Update did not succeed! {:?}", err);
@ -771,7 +812,14 @@ mod test {
let job_container = JobContainer::new();
// Store scan
let rescan_result = store.update(&open_result.db, job_container, false);
let rescan_result = store.update(
&open_result.db,
job_container,
UpdateOptions {
initial: true,
tree_mode: BlobMode::default(),
},
);
assert!(rescan_result.is_ok());
}
@ -810,7 +858,14 @@ mod test {
// Initial scan
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
let rescan_result = store.rescan_vault(&open_result.db, job, quick, true);
let rescan_result = store.rescan_vault(
&open_result.db,
job,
UpdateOptions {
initial: quick,
tree_mode: BlobMode::default(),
},
);
assert!(rescan_result.is_ok());
let rescan_result = rescan_result.unwrap();
@ -823,7 +878,14 @@ mod test {
// Modification-less rescan
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
let rescan_result = store.rescan_vault(&open_result.db, job, quick, false);
let rescan_result = store.rescan_vault(
&open_result.db,
job,
UpdateOptions {
initial: quick,
tree_mode: BlobMode::default(),
},
);
assert!(rescan_result.is_ok());
let rescan_result = rescan_result.unwrap();
@ -839,7 +901,14 @@ mod test {
std::fs::remove_file(temp_dir_path.join("hello-world.txt")).unwrap();
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
let rescan_result = store.rescan_vault(&open_result.db, job, quick, false);
let rescan_result = store.rescan_vault(
&open_result.db,
job,
UpdateOptions {
initial: quick,
tree_mode: BlobMode::default(),
},
);
assert!(rescan_result.is_ok());
let rescan_result = rescan_result.unwrap();
@ -866,4 +935,139 @@ mod test {
.count()
);
}
/// Prepare a temporary filesystem structure for testing
/// Returns the database connection
/// The structure is as follows:
/// ```text
/// NATIVE
/// ├── nested_directory
/// │   ├── nested_two
/// │   │   └── nested_three
/// │   │   │ └── foo.txt
/// │   │ └── nested_four
/// │   │ └── baz.txt
/// │   └── nested_three
/// │   └── bar.txt
/// └── in_root.txt
/// ```
fn _prepare_hier_vault(tree_mode: BlobMode) -> (UpEndConnection, TempDir) {
// Prepare temporary filesystem structure
let temp_dir = TempDir::new().unwrap();
let temp_dir_path = temp_dir.path().canonicalize().unwrap();
let nested_directory_path = temp_dir_path.join("nested_directory");
fs::create_dir(&nested_directory_path).unwrap();
let nested_two_path = nested_directory_path.join("nested_two");
fs::create_dir(&nested_two_path).unwrap();
let nested_three_first_path = nested_directory_path.join("nested_three");
fs::create_dir(&nested_three_first_path).unwrap();
let nested_three_second_path = nested_two_path.join("nested_three");
fs::create_dir(&nested_three_second_path).unwrap();
let nested_four_path = nested_two_path.join("nested_four");
fs::create_dir(&nested_four_path).unwrap();
let file_path = nested_three_second_path.join("foo.txt");
let mut tmp_file = File::create(file_path).unwrap();
writeln!(tmp_file, "Hello, World! I'm foo, and deep.").unwrap();
let file_path = nested_three_first_path.join("bar.txt");
let mut tmp_file = File::create(file_path).unwrap();
writeln!(tmp_file, "Hello, World! I'm bar, and shallower.").unwrap();
let file_path = nested_four_path.join("baz.txt");
let mut tmp_file = File::create(file_path).unwrap();
writeln!(tmp_file, "Hello, World! I'm baz.").unwrap();
let file_path = temp_dir_path.join("in_root.txt");
let mut tmp_file = File::create(file_path).unwrap();
writeln!(tmp_file, "Hello, World! I'm in root.").unwrap();
// Initialize database
let open_result = UpEndDatabase::open(&temp_dir, true).unwrap();
let store = FsStore::from_path(&temp_dir).unwrap();
let mut job_container = JobContainer::new();
// Initial scan
let job = job_container.add_job("RESCAN", "TEST JOB").unwrap();
store
.rescan_vault(
&open_result.db,
job,
UpdateOptions {
initial: true,
tree_mode,
},
)
.unwrap();
(open_result.db.connection().unwrap(), temp_dir)
}
fn assert_paths(paths: Vec<&str>, connection: &UpEndConnection) {
paths.iter().for_each(|path| {
let upath: UHierPath = path.parse().unwrap();
assert!(
resolve_path(&connection, &upath, false).is_ok(),
"Failed: {}",
upath
);
});
}
fn test_initial_scan(mode: BlobMode, expected_paths: Vec<&str>) {
let (connection, _vault_dir) = _prepare_hier_vault(mode);
assert_paths(expected_paths, &connection);
}
#[test]
fn test_mirror_mode() {
test_initial_scan(
BlobMode::Mirror,
vec![
"NATIVE",
"NATIVE/nested_directory/nested_two/nested_three/foo.txt",
"NATIVE/nested_directory/nested_two/nested_four/baz.txt",
"NATIVE/nested_directory/nested_three/bar.txt",
"NATIVE/in_root.txt",
],
);
}
#[test]
fn test_flat_mode() {
test_initial_scan(
BlobMode::Flat,
vec![
"NATIVE",
"NATIVE/nested_three/foo.txt",
"NATIVE/nested_four/baz.txt",
"NATIVE/nested_three/bar.txt",
"NATIVE/in_root.txt",
],
);
}
#[test]
fn test_incoming_mode() {
test_initial_scan(
BlobMode::Incoming(None),
vec![
"INCOMING/foo.txt",
"INCOMING/baz.txt",
"INCOMING/bar.txt",
"INCOMING/in_root.txt",
],
);
test_initial_scan(
BlobMode::Incoming(Some("new files".to_string())),
vec![
"new files/foo.txt",
"new files/baz.txt",
"new files/bar.txt",
"new files/in_root.txt",
],
);
}
}

View File

@ -1,7 +1,7 @@
use std::path::{Path, PathBuf};
use super::{UpEndConnection, UpEndDatabase};
use crate::jobs::JobContainer;
use crate::{jobs::JobContainer, BlobMode};
use upend_base::hash::UpMultihash;
pub mod fs;
@ -65,7 +65,13 @@ pub trait UpStore {
&self,
database: &UpEndDatabase,
job_container: JobContainer,
initial: bool,
options: UpdateOptions,
) -> Result<Vec<UpdatePathOutcome>>;
fn stats(&self) -> Result<serde_json::Value>;
}
#[derive(Debug, Clone)]
pub struct UpdateOptions {
pub initial: bool,
pub tree_mode: BlobMode,
}

View File

@ -1,5 +1,5 @@
<script lang="ts">
import { Router, Route, createHistory } from "svelte-navigator";
import { Router, Route, createHistory, navigate } from "svelte-navigator";
import createHashSource from "./util/history";
import Header from "./components/layout/Header.svelte";
import Footer from "./components/layout/Footer.svelte";
@ -10,6 +10,7 @@
import AddModal from "./components/AddModal.svelte";
import Store from "./views/Store.svelte";
import Surface from "./views/Surface.svelte";
import Setup from "./views/Setup.svelte";
import "./styles/main.scss";
@ -39,6 +40,10 @@
<Store />
</Route>
<Route path="/setup">
<Setup />
</Route>
<Footer />
<AddModal />

View File

@ -2,7 +2,7 @@
import EntryList from "../components/widgets/EntryList.svelte";
import EntityList from "../components/widgets/EntityList.svelte";
import type { Widget } from "../components/EntryView.svelte";
import { Link } from "svelte-navigator";
import { Link, useNavigate } from "svelte-navigator";
import { UpListing } from "@upnd/upend";
import EntryView from "../components/EntryView.svelte";
import UpObject from "../components/display/UpObject.svelte";
@ -19,6 +19,7 @@
ATTR_LABEL,
HIER_ROOT_ADDR,
} from "@upnd/upend/constants";
const navigate = useNavigate();
const roots = (async () => {
const data = await api.fetchRoots();
@ -152,6 +153,14 @@
},
];
fetch("/api/options")
.then((res) => res.json())
.then((options) => {
if (!options.blob_mode) {
navigate("/setup");
}
});
updateTitle("Home");
</script>
@ -356,6 +365,6 @@
.version {
text-decoration: none;
opacity: .66;
opacity: 0.66;
}
</style>

View File

@ -0,0 +1,136 @@
<script lang="ts">
import { updateTitle } from "../util/title";
import IconButton from "../components/utils/IconButton.svelte";
import { i18n } from "../i18n";
import { useNavigate } from "svelte-navigator";
import api from "../lib/api";
const navigate = useNavigate();
let mode: "Flat" | "DepthFirst" | "Mirror" | "Incoming" = undefined;
async function submitOptions() {
const blob_mode = {};
blob_mode[mode] = null;
const optionResponse = await fetch("/api/options", {
method: "PUT",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ blob_mode }),
});
if (!optionResponse.ok) {
throw new Error("Failed to set options");
}
await api.refreshVault();
navigate("/");
}
updateTitle("Initial Setup");
</script>
<main>
<h1>{$i18n.t("Vault Setup")}</h1>
<section class="tree-mode">
<h2>Scan mode</h2>
<div class="icons">
<div class="option">
<IconButton
name="copy-alt"
outline
on:click={() => (mode = "Mirror")}
active={mode === "Mirror"}
>
Mirror
</IconButton>
<p>
{$i18n.t(
"Groups are nested reflecting the original file directory structure.",
)}
</p>
</div>
<div class="option">
<IconButton
name="checkbox-minus"
outline
on:click={() => (mode = "Flat")}
active={mode === "Flat"}
>
Flat
</IconButton>
<p>
{$i18n.t(
"All groups are created as direct descendants of the root group.",
)}
</p>
</div>
<div class="option">
<IconButton
name="download"
outline
on:click={() => (mode = "Incoming")}
active={mode === "Incoming"}
>
Incoming
</IconButton>
<p>
{$i18n.t("New files are added to a group called 'Incoming'.")}
</p>
</div>
</div>
</section>
<section class="icons">
<IconButton
name="log-in"
outline
disabled={!mode}
on:click={() => submitOptions()}
>
{$i18n.t("Confirm and start scan")}
</IconButton>
</section>
</main>
<style lang="scss">
main {
border-radius: 1rem;
border: 1px solid var(--foreground);
background: var(--background-lighter);
margin: 4rem;
padding: 4rem;
display: flex;
flex-direction: column;
align-items: center;
gap: 4rem;
}
h1,
h2 {
text-align: center;
margin: 0;
}
.tree-mode .icons {
margin-top: 2rem;
display: flex;
gap: 2rem;
justify-content: center;
}
.icons {
font-size: 3rem;
}
.option {
flex-basis: 33%;
display: flex;
flex-direction: column;
align-items: center;
}
p {
font-size: initial;
text-align: center;
}
</style>