From 65936efe3851968da7f76943743fc70c8c75bcf8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Ml=C3=A1dek?= Date: Tue, 24 Oct 2023 22:14:12 +0200 Subject: [PATCH] feat(db): add new vault scan modes (flat, depthfirst) --- cli/src/main.rs | 26 ++- cli/src/routes.rs | 76 +++++++-- cli/src/serve.rs | 2 + db/src/lib.rs | 78 ++++++++- db/src/stores/fs/mod.rs | 304 ++++++++++++++++++++++++++++++++--- db/src/stores/mod.rs | 10 +- webui/src/App.svelte | 7 +- webui/src/views/Home.svelte | 13 +- webui/src/views/Setup.svelte | 135 ++++++++++++++++ 9 files changed, 596 insertions(+), 55 deletions(-) create mode 100644 webui/src/views/Setup.svelte diff --git a/cli/src/main.rs b/cli/src/main.rs index 6b9e3a0..b2ef389 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -444,13 +444,25 @@ async fn main() -> Result<()> { }; if !args.no_initial_update { - info!("Running initial update..."); - let initial = open_result.new; - block_background::<_, _, anyhow::Error>(move || { - let _ = state.store.update(&upend, job_container.clone(), initial); - let _ = extractors::extract_all(upend, state.store, job_container); - Ok(()) - }); + if !open_result.new { + info!("Running update..."); + block_background::<_, _, anyhow::Error>(move || { + let connection = upend.connection()?; + let _ = state.store.update( + &upend, + job_container.clone(), + upend_db::stores::UpdateOptions { + initial: false, + tree_mode: connection + .get_vault_options()? + .tree_mode + .unwrap_or_default(), + }, + ); + let _ = extractors::extract_all(upend, state.store, job_container); + Ok(()) + }); + } } #[cfg(feature = "desktop")] diff --git a/cli/src/routes.rs b/cli/src/routes.rs index 20a4c51..92fb55a 100644 --- a/cli/src/routes.rs +++ b/cli/src/routes.rs @@ -38,8 +38,10 @@ use upend_base::hash::{b58_decode, b58_encode, sha256hash}; use upend_base::lang::Query; use upend_db::hierarchies::{list_roots, resolve_path, UHierPath}; use upend_db::jobs; +use upend_db::stores::UpdateOptions; use upend_db::stores::{Blob, UpStore}; use upend_db::UpEndDatabase; +use upend_db::VaultOptions; use url::Url; #[cfg(feature = "desktop")] @@ -762,23 +764,33 @@ pub async fn list_hier_roots(state: web::Data) -> Result, -// } +#[derive(Deserialize)] +pub struct RescanRequest { + initial: Option, +} #[post("/api/refresh")] pub async fn api_refresh( req: HttpRequest, state: web::Data, - // web::Query(query): web::Query, + web::Query(query): web::Query, ) -> Result { check_auth(&req, &state)?; + let connection = state.upend.connection().map_err(ErrorInternalServerError)?; + block_background::<_, _, anyhow::Error>(move || { - let _ = state - .store - .update(&state.upend, state.job_container.clone(), false); + let _ = state.store.update( + &state.upend, + state.job_container.clone(), + UpdateOptions { + initial: query.initial.unwrap_or(false), + tree_mode: connection + .get_vault_options()? + .tree_mode + .unwrap_or_default(), + }, + ); let _ = crate::extractors::extract_all( state.upend.clone(), state.store.clone(), @@ -842,6 +854,34 @@ pub async fn get_info(state: web::Data) -> Result { }))) } +#[get("/api/options")] +pub async fn get_options(state: web::Data) -> Result { + let connection = state.upend.connection().map_err(ErrorInternalServerError)?; + Ok(HttpResponse::Ok().json( + connection + .get_vault_options() + .map_err(ErrorInternalServerError)?, + )) +} + +#[put("/api/options")] +pub async fn put_options( + req: HttpRequest, + state: web::Data, + payload: web::Json, +) -> Result { + check_auth(&req, &state)?; + + let connection = state.upend.connection().map_err(ErrorInternalServerError)?; + let options = payload.into_inner(); + web::block(move || connection.set_vault_options(options)) + .await + .map_err(ErrorInternalServerError)? + .map_err(ErrorInternalServerError)?; + + Ok(HttpResponse::Ok().finish()) +} + #[get("/api/migration/user-entries")] pub async fn get_user_entries(state: web::Data) -> Result { let connection = state.upend.connection().map_err(ErrorInternalServerError)?; @@ -1017,7 +1057,12 @@ mod tests { .uri("/api/hier/NATIVE/hello-world.txt") .to_request(); let result = actix_web::test::call_service(&app, req).await; - assert_eq!(result.status(), http::StatusCode::FOUND); + assert_eq!( + result.status(), + http::StatusCode::FOUND, + "expected redirect, got {:}", + result.status() + ); assert_eq!( result .headers() @@ -1101,7 +1146,18 @@ mod tests { ) as Box); let job_container = jobs::JobContainer::new(); - store.update(&upend, job_container.clone(), true).unwrap(); + let outcome = store + .update( + &upend, + job_container.clone(), + UpdateOptions { + initial: true, + tree_mode: upend_db::VaultTreeMode::default(), + }, + ) + .unwrap(); + + println!("Outcome: {:?}", outcome); State { upend, diff --git a/cli/src/serve.rs b/cli/src/serve.rs index 3f22e4a..db74e5b 100644 --- a/cli/src/serve.rs +++ b/cli/src/serve.rs @@ -64,6 +64,8 @@ where .service(routes::store_stats) .service(routes::get_jobs) .service(routes::get_info) + .service(routes::get_options) + .service(routes::put_options) .service(routes::get_user_entries); if let Some(ui_path) = ui_path { diff --git a/db/src/lib.rs b/db/src/lib.rs index 07b2d2f..49deece 100644 --- a/db/src/lib.rs +++ b/db/src/lib.rs @@ -31,6 +31,7 @@ use diesel::r2d2::{self, ConnectionManager}; use diesel::result::{DatabaseErrorKind, Error}; use diesel::sqlite::SqliteConnection; use hierarchies::initialize_hier; +use serde::{Deserialize, Serialize}; use shadow_rs::is_release; use std::convert::TryFrom; use std::fs; @@ -152,7 +153,10 @@ impl UpEndDatabase { let connection = db.connection().unwrap(); if !new { - let db_major: u64 = connection.get_meta("VERSION")?.parse()?; + let db_major: u64 = connection + .get_meta("VERSION")? + .ok_or(anyhow!("Database version not found!"))? + .parse()?; if db_major > build::PKG_VERSION_MAJOR.parse().unwrap() { return Err(anyhow!("Incompatible database! Found version ")); } @@ -201,7 +205,7 @@ impl UpEndConnection { f() } - pub fn get_meta>(&self, key: S) -> Result { + pub fn get_meta>(&self, key: S) -> Result> { use crate::inner::schema::meta::dsl; let key = key.as_ref(); @@ -210,12 +214,57 @@ impl UpEndConnection { let _lock = self.lock.read().unwrap(); let conn = self.pool.get()?; - dsl::meta + let result = dsl::meta .filter(dsl::key.eq(key)) - .load::(&conn)? - .first() - .ok_or(anyhow!(r#"No META "{key}" value found."#)) - .map(|mv| mv.value.clone()) + .load::(&conn)?; + let result = result.first(); + Ok(result.map(|v| v.value.clone())) + } + + pub fn set_meta, T: AsRef>(&self, key: S, value: T) -> Result<()> { + use crate::inner::schema::meta::dsl; + let key = key.as_ref(); + let value = value.as_ref(); + + trace!("Setting META:{key} to {value}"); + + let _lock = self.lock.write().unwrap(); + let conn = self.pool.get()?; + + diesel::replace_into(dsl::meta) + .values((dsl::key.eq(key), dsl::value.eq(value))) + .execute(&conn)?; + + Ok(()) + } + + pub fn set_vault_options(&self, options: VaultOptions) -> Result<()> { + if let Some(tree_mode) = options.tree_mode { + let tree_mode = match tree_mode { + VaultTreeMode::Flat => "FLAT", + VaultTreeMode::DepthFirst => "DEPTH_FIRST", + VaultTreeMode::Mirror => "MIRROR", + }; + self.set_meta("VAULT_TREE_MODE", tree_mode)?; + } + Ok(()) + } + + pub fn get_vault_options(&self) -> Result { + let tree_mode = match self.get_meta("VAULT_TREE_MODE")? { + Some(mode) => match mode.as_str() { + "FLAT" => Some(VaultTreeMode::Flat), + "DEPTH_FIRST" => Some(VaultTreeMode::DepthFirst), + "MIRROR" => Some(VaultTreeMode::Mirror), + _ => { + warn!("Unknown vault tree mode: {}", mode); + None + } + }, + None => None, + }; + + Ok(VaultOptions { tree_mode }) } pub fn retrieve_entry(&self, hash: &UpMultihash) -> Result> { @@ -432,7 +481,7 @@ impl UpEndConnection { #[cfg(test)] mod test { - use upend_base::constants::{ATTR_LABEL, ATTR_IN}; + use upend_base::constants::{ATTR_IN, ATTR_LABEL}; use super::*; use tempfile::TempDir; @@ -545,3 +594,16 @@ mod test { assert_eq!(result[0].value, EntryValue::Address(random_entity)); } } + +#[derive(Debug, Serialize, Deserialize)] +pub struct VaultOptions { + pub tree_mode: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub enum VaultTreeMode { + Flat, + DepthFirst, + #[default] + Mirror, +} diff --git a/db/src/stores/fs/mod.rs b/db/src/stores/fs/mod.rs index 5639c7a..43384ad 100644 --- a/db/src/stores/fs/mod.rs +++ b/db/src/stores/fs/mod.rs @@ -1,10 +1,12 @@ use self::db::files; -use super::{Blob, StoreError, UpStore, UpdatePathOutcome}; +use super::{Blob, StoreError, UpStore, UpdateOptions, UpdatePathOutcome}; use crate::hierarchies::{resolve_path, resolve_path_cached, ResolveCache, UHierPath, UNode}; use crate::jobs::{JobContainer, JobHandle}; use crate::util::hash_at_path; -use crate::{ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, UPEND_SUBDIR}; +use crate::{ + ConnectionOptions, LoggingHandler, UpEndConnection, UpEndDatabase, VaultTreeMode, UPEND_SUBDIR, +}; use anyhow::{anyhow, Error, Result}; use chrono::prelude::*; use diesel::r2d2::{self, ConnectionManager, ManageConnection}; @@ -14,6 +16,7 @@ use lru::LruCache; use rayon::prelude::*; use serde_json::json; use std::borrow::Borrow; +use std::collections::HashMap; use std::convert::TryInto; use std::path::PathBuf; use std::path::{Component, Path}; @@ -91,12 +94,13 @@ impl FsStore { &self, db: D, job_handle: JobHandle, - quick_check: bool, - _disable_synchronous: bool, + options: UpdateOptions, ) -> Result> { let start = Instant::now(); info!("Vault rescan started."); + let quick_check = options.initial; + let db = db.borrow(); let upconnection = db.connection()?; @@ -118,7 +122,7 @@ impl FsStore { // Walk through the vault, find all paths trace!("Traversing vault directory"); let absolute_dir_path = fs::canonicalize(&*self.path)?; - let path_entries: Vec = WalkDir::new(&*self.path) + let pathbufs: Vec = WalkDir::new(&*self.path) .follow_links(true) .into_iter() .filter_map(|e| e.ok()) @@ -127,21 +131,102 @@ impl FsStore { .filter(|e| !e.starts_with(absolute_dir_path.join(UPEND_SUBDIR))) .collect(); + let mut upaths: HashMap = HashMap::new(); + match options.tree_mode { + VaultTreeMode::Flat => { + for pb in &pathbufs { + let normalized_path = self.normalize_path(pb).unwrap(); + let dirname = normalized_path.parent().and_then(|p| p.components().last()); + + let upath = UHierPath(if let Some(dirname) = dirname { + vec![ + UNode::new("NATIVE").unwrap(), + UNode::new(dirname.as_os_str().to_string_lossy().to_string()).unwrap(), + ] + } else { + vec![UNode::new("NATIVE").unwrap()] + }); + + upaths.insert(pb.clone(), upath); + } + } + VaultTreeMode::DepthFirst => { + let mut shallowest: HashMap = HashMap::new(); + for path in &pathbufs { + let normalized_path = self.normalize_path(path).unwrap(); + let dirname = normalized_path.parent().and_then(|p| p.components().last()); + if let Some(dirname) = dirname { + let dirname = dirname.as_os_str().to_string_lossy().to_string(); + if let Some(existing_path) = shallowest.get_mut(&dirname) { + if existing_path.components().count() > path.components().count() { + *existing_path = path.clone(); + } + } else { + shallowest.insert(dirname, path.clone()); + } + } + } + for path in &pathbufs { + let normalized_path = self.normalize_path(path).unwrap(); + let dirname = normalized_path.parent().and_then(|p| p.components().last()); + if let Some(dirname) = dirname { + let dirname = dirname.as_os_str().to_string_lossy().to_string(); + let shallowest_path = shallowest.get(&dirname).unwrap(); + let upath = + iter::once(UNode::new("NATIVE").unwrap()) + .chain(self.normalize_path(shallowest_path).unwrap().parent().unwrap().iter().map( + |component| { + UNode::new(component.to_string_lossy().to_string()).unwrap() + }, + )) + .collect::>(); + upaths.insert(path.clone(), UHierPath(upath)); + } else { + upaths.insert(path.clone(), UHierPath(vec![UNode::new("NATIVE").unwrap()])); + } + } + } + VaultTreeMode::Mirror => { + for pb in &pathbufs { + let normalized_path = self.normalize_path(&pb).unwrap(); + let path = normalized_path.parent().unwrap(); + + let upath = iter::once(UNode::new("NATIVE").unwrap()) + .chain(path.iter().map(|component| { + UNode::new(component.to_string_lossy().to_string()).unwrap() + })) + .collect::>(); + + upaths.insert(pb.clone(), UHierPath(upath)); + } + } + }; + + let path_entries = pathbufs + .into_iter() + .map(|pb| { + let upath = upaths.remove(&pb).unwrap(); + (pb, upath) + }) + .collect::>(); + // Prepare for processing let existing_files = Arc::new(RwLock::new(self.retrieve_all_files()?)); // Actual processing let count = RwLock::new(0_usize); - let resolve_cache = Arc::new(Mutex::new(LruCache::new(256))); + let resolve_cache: Arc, UNode), Address>>> = + Arc::new(Mutex::new(LruCache::new(256))); let total = path_entries.len() as f32; let shared_job_handle = Arc::new(Mutex::new(job_handle)); let path_outcomes: Vec = path_entries .into_par_iter() - .map(|path| { + .map(|(path, upath)| { let result = self.process_directory_entry( db, &resolve_cache, path.clone(), + upath, &existing_files, quick_check, ); @@ -239,6 +324,7 @@ impl FsStore { db: D, resolve_cache: &Arc>, path: PathBuf, + upath: UHierPath, existing_files: &Arc>>, quick_check: bool, ) -> Result { @@ -329,6 +415,7 @@ impl FsStore { self.insert_file_with_metadata( &db.borrow().connection()?, &normalized_path, + upath, file_hash.unwrap(), None, size, @@ -346,6 +433,7 @@ impl FsStore { &self, connection: &UpEndConnection, path: &Path, + upath: UHierPath, hash: UpMultihash, name_hint: Option, ) -> Result
{ @@ -367,6 +455,7 @@ impl FsStore { self.insert_file_with_metadata( connection, &normalized_path, + upath, hash, name_hint, size, @@ -381,6 +470,7 @@ impl FsStore { &self, connection: &UpEndConnection, normalized_path: &Path, + upath: UHierPath, hash: UpMultihash, name: Option, size: i64, @@ -432,15 +522,8 @@ impl FsStore { // Add the appropriate entries w/r/t virtual filesystem location let components = normalized_path.components().collect::>(); - let (filename, dir_path) = components.split_last().unwrap(); + let filename = components.last().unwrap(); - let upath = UHierPath( - iter::once(UNode::new("NATIVE").unwrap()) - .chain(dir_path.iter().map(|component| { - UNode::new(component.as_os_str().to_string_lossy().to_string()).unwrap() - })) - .collect(), - ); let resolved_path = match resolve_cache { Some(cache) => resolve_path_cached(connection, &upath, true, cache)?, None => resolve_path(connection, &upath, true)?, @@ -644,8 +727,17 @@ impl UpStore for FsStore { fs::copy(file_path, &final_path).map_err(|e| StoreError::Unknown(e.to_string()))?; - self.add_file(&connection, &final_path, hash.clone(), name_hint) - .map_err(|e| StoreError::Unknown(e.to_string()))?; + self.add_file( + &connection, + &final_path, + UHierPath(vec![ + UNode::new("NATIVE").unwrap(), + UNode::new("INCOMING").unwrap(), + ]), + hash.clone(), + name_hint, + ) + .map_err(|e| StoreError::Unknown(e.to_string()))?; } Ok(hash) @@ -655,18 +747,18 @@ impl UpStore for FsStore { &self, db: &UpEndDatabase, mut job_container: JobContainer, - initial: bool, + options: UpdateOptions, ) -> Result, StoreError> { trace!( - "Running a vault update of {:?}, initial = {}.", + "Running a vault update of {:?}, options = {:?}.", self.path, - initial + options ); let job_result = job_container.add_job("REIMPORT", "Scaning vault directory..."); match job_result { Ok(job_handle) => { - let result = self.rescan_vault(db, job_handle, !initial, initial); + let result = self.rescan_vault(db, job_handle, options); if let Err(err) = &result { error!("Update did not succeed! {:?}", err); @@ -769,7 +861,14 @@ mod test { let job_container = JobContainer::new(); // Store scan - let rescan_result = store.update(&open_result.db, job_container, false); + let rescan_result = store.update( + &open_result.db, + job_container, + UpdateOptions { + initial: true, + tree_mode: VaultTreeMode::default(), + }, + ); assert!(rescan_result.is_ok()); } @@ -808,7 +907,14 @@ mod test { // Initial scan let job = job_container.add_job("RESCAN", "TEST JOB").unwrap(); - let rescan_result = store.rescan_vault(&open_result.db, job, quick, true); + let rescan_result = store.rescan_vault( + &open_result.db, + job, + UpdateOptions { + initial: quick, + tree_mode: VaultTreeMode::default(), + }, + ); assert!(rescan_result.is_ok()); let rescan_result = rescan_result.unwrap(); @@ -821,7 +927,14 @@ mod test { // Modification-less rescan let job = job_container.add_job("RESCAN", "TEST JOB").unwrap(); - let rescan_result = store.rescan_vault(&open_result.db, job, quick, false); + let rescan_result = store.rescan_vault( + &open_result.db, + job, + UpdateOptions { + initial: quick, + tree_mode: VaultTreeMode::default(), + }, + ); assert!(rescan_result.is_ok()); let rescan_result = rescan_result.unwrap(); @@ -837,7 +950,14 @@ mod test { std::fs::remove_file(temp_dir_path.join("hello-world.txt")).unwrap(); let job = job_container.add_job("RESCAN", "TEST JOB").unwrap(); - let rescan_result = store.rescan_vault(&open_result.db, job, quick, false); + let rescan_result = store.rescan_vault( + &open_result.db, + job, + UpdateOptions { + initial: quick, + tree_mode: VaultTreeMode::default(), + }, + ); assert!(rescan_result.is_ok()); let rescan_result = rescan_result.unwrap(); @@ -864,4 +984,138 @@ mod test { .count() ); } + + fn _prepare_hier_vault(tree_mode: VaultTreeMode) -> UpEndConnection { + // Prepare temporary filesystem structure + let temp_dir = TempDir::new().unwrap(); + let temp_dir_path = temp_dir.path().canonicalize().unwrap(); + + let file_path = temp_dir_path + .join("foo") + .join("bar") + .join("baz") + .join("baz.txt"); + std::fs::create_dir_all(file_path.parent().unwrap()).unwrap(); + let mut tmp_file = File::create(&file_path).unwrap(); + writeln!(tmp_file, "Hello, world!").unwrap(); + + let file_path = temp_dir_path.join("foo").join("baz").join("qux.txt"); + std::fs::create_dir_all(file_path.parent().unwrap()).unwrap(); + let mut tmp_file = File::create(&file_path).unwrap(); + writeln!(tmp_file, "Hello, world 2!").unwrap(); + + let file_path = temp_dir_path.join("zot.txt"); + std::fs::create_dir_all(file_path.parent().unwrap()).unwrap(); + let mut tmp_file = File::create(&file_path).unwrap(); + writeln!(tmp_file, "Hello, world 3!").unwrap(); + + // Initialize database + let open_result = UpEndDatabase::open(&temp_dir, true).unwrap(); + let store = FsStore::from_path(&temp_dir).unwrap(); + let mut job_container = JobContainer::new(); + + // Initial scan + let job = job_container.add_job("RESCAN", "TEST JOB").unwrap(); + store + .rescan_vault( + &open_result.db, + job, + UpdateOptions { + initial: true, + tree_mode, + }, + ) + .unwrap(); + + open_result.db.connection().unwrap() + } + + #[test] + fn test_mirror_mode() { + let connection = _prepare_hier_vault(VaultTreeMode::Mirror); + + let native_path = UHierPath(vec![UNode::new("NATIVE".to_string()).unwrap()]); + assert!(resolve_path(&connection, &native_path, false).is_ok(), "Failed: NATIVE"); + + let first_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("foo".to_string()).unwrap(), + UNode::new("bar".to_string()).unwrap(), + UNode::new("baz".to_string()).unwrap(), + UNode::new("baz.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &first_path, false).is_ok(), "Failed: `foo/bar/baz/baz.txt`"); + + let second_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("foo".to_string()).unwrap(), + UNode::new("baz".to_string()).unwrap(), + UNode::new("qux.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &second_path, false).is_ok(), "Failed: `foo/baz/qux.txt`"); + + let third_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("zot.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &third_path, false).is_ok(), "Failed: `zot.txt`"); + } + + #[test] + fn test_flat_mode() { + let connection = _prepare_hier_vault(VaultTreeMode::Flat); + + let native_path = UHierPath(vec![UNode::new("NATIVE".to_string()).unwrap()]); + assert!(resolve_path(&connection, &native_path, false).is_ok(), "Failed: NATIVE"); + + let first_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("baz".to_string()).unwrap(), + UNode::new("baz.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &first_path, false).is_ok(), "Failed: `baz/baz.txt`"); + + let second_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("baz".to_string()).unwrap(), + UNode::new("qux.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &second_path, false).is_ok(), "Failed: `baz/qux.txt`"); + + let third_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("zot.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &third_path, false).is_ok(), "Failed: `zot.txt`"); + } + + #[test] + fn test_depth_mode() { + let connection = _prepare_hier_vault(VaultTreeMode::DepthFirst); + + let native_path = UHierPath(vec![UNode::new("NATIVE".to_string()).unwrap()]); + assert!(resolve_path(&connection, &native_path, false).is_ok(), "Failed: NATIVE"); + + let first_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("foo".to_string()).unwrap(), + UNode::new("baz".to_string()).unwrap(), + UNode::new("baz.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &first_path, false).is_ok(), "Failed: `foo/baz/baz.txt`"); + + let second_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("foo".to_string()).unwrap(), + UNode::new("baz".to_string()).unwrap(), + UNode::new("qux.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &second_path, false).is_ok(), "Failed: `foo/baz/qux.txt`"); + + let third_path = UHierPath(vec![ + UNode::new("NATIVE".to_string()).unwrap(), + UNode::new("zot.txt".to_string()).unwrap(), + ]); + assert!(resolve_path(&connection, &third_path, false).is_ok(), "Failed: `zot.txt`"); + } } diff --git a/db/src/stores/mod.rs b/db/src/stores/mod.rs index 87a36cd..497014c 100644 --- a/db/src/stores/mod.rs +++ b/db/src/stores/mod.rs @@ -1,7 +1,7 @@ use std::path::{Path, PathBuf}; use super::{UpEndConnection, UpEndDatabase}; -use crate::jobs::JobContainer; +use crate::{jobs::JobContainer, VaultTreeMode}; use upend_base::hash::UpMultihash; pub mod fs; @@ -65,7 +65,13 @@ pub trait UpStore { &self, database: &UpEndDatabase, job_container: JobContainer, - initial: bool, + options: UpdateOptions, ) -> Result>; fn stats(&self) -> Result; } + +#[derive(Debug, Clone)] +pub struct UpdateOptions { + pub initial: bool, + pub tree_mode: VaultTreeMode, +} diff --git a/webui/src/App.svelte b/webui/src/App.svelte index b2c51e8..96180b1 100644 --- a/webui/src/App.svelte +++ b/webui/src/App.svelte @@ -1,5 +1,5 @@ @@ -356,6 +365,6 @@ .version { text-decoration: none; - opacity: .66; + opacity: 0.66; } diff --git a/webui/src/views/Setup.svelte b/webui/src/views/Setup.svelte new file mode 100644 index 0000000..8c2ddb6 --- /dev/null +++ b/webui/src/views/Setup.svelte @@ -0,0 +1,135 @@ + + +
+

{$i18n.t("Vault Setup")}

+
+

Tree mode

+
+
+ (mode = "Flat")} + active={mode === "Flat"} + > + Flat + +

+ {$i18n.t( + "All groups are created as direct descendants of the root group.", + )} +

+
+
+ (mode = "DepthFirst")} + active={mode === "DepthFirst"} + > + Depth-First + +

+ {$i18n.t( + "All groups are created as direct descendants of the root group.", + )} +

+
+
+ (mode = "Mirror")} + active={mode === "Mirror"} + > + Mirror + +

+ {$i18n.t( + "Groups are nested reflecting the original file directory structure.", + )} +

+
+
+
+
+ submitOptions()} + > + {$i18n.t("Confirm and start scan")} + +
+
+ +