From da4d7e6758a391d5f1be4b91371e7c2378f5bb9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Ml=C3=A1dek?= Date: Thu, 25 Mar 2021 21:29:49 +0100 Subject: [PATCH] attach mime entry directly to file hash; add mtime & size --- src/filesystem.rs | 43 +++++++++++++++++++++++++++++++++---------- 1 file changed, 33 insertions(+), 10 deletions(-) diff --git a/src/filesystem.rs b/src/filesystem.rs index 6f8c285..f57e188 100644 --- a/src/filesystem.rs +++ b/src/filesystem.rs @@ -39,6 +39,8 @@ const FILE_TYPE: &str = "FS_FILE"; const FILE_IDENTITY_KEY: &str = "FILE_IS"; const FILENAME_KEY: &str = "FILE_NAME"; const FILE_MIME_KEY: &str = "FILE_MIME"; +const FILE_MTIME_KEY: &str = "FILE_MTIME"; +const FILE_SIZE_KEY: &str = "FILE_SIZE"; lazy_static! { static ref FILE_TYPE_INVARIANT: InvariantEntry = InvariantEntry { attribute: String::from(TYPE_IS_ATTR), @@ -507,12 +509,14 @@ fn _process_directory_entry>( ) -> UpdatePathResult { info!("Processing: {:?}", path); + // Prepare the data let db_pool = Arc::clone(&db_pool); let existing_files = Arc::clone(&existing_files); let normalized_path = path.strip_prefix(&directory_path)?; let normalized_path_str = normalized_path.to_str().expect("path not valid unicode?!"); + // Get size & mtime for quick comparison let metadata = fs::metadata(&path)?; let size = metadata.len() as i64; if size < 0 { @@ -525,7 +529,9 @@ fn _process_directory_entry>( }) .ok(); + // Check if the path entry for this file already exists in database { + // Only grab existing_files for the duration of this block let mut existing_files = existing_files.write().unwrap(); let maybe_existing_file = existing_files @@ -536,15 +542,15 @@ fn _process_directory_entry>( if let Some((idx, existing_file)) = maybe_existing_file { if size == existing_file.size && mtime == existing_file.mtime { if !existing_file.valid { - let _ = - file_set_valid(&db_pool.write().unwrap().get()?, existing_file.id, true)?; + file_set_valid(&db_pool.write().unwrap().get()?, existing_file.id, true)?; } - existing_files.remove(idx); + existing_files.swap_remove(idx); return Ok(UpdatePathOutcome::Unchanged(path)); } } } + // If not, add it! let digest = path.hash()?; let new_file = models::NewFile { @@ -557,6 +563,30 @@ fn _process_directory_entry>( insert_file(&db_pool.write().unwrap().get()?, new_file)?; + // Insert metadata + let size_entry = Entry { + entity: Address::Hash(digest.clone()), + attribute: FILE_SIZE_KEY.to_string(), + value: EntryValue::Value(Value::from(size)), + }; + insert_entry(&db_pool.write().unwrap().get()?, size_entry)?; + + if let Some(mtime) = mtime { + let mtime_entry = Entry { + entity: Address::Hash(digest.clone()), + attribute: FILE_MTIME_KEY.to_string(), + value: EntryValue::Value(Value::from(mtime.timestamp())), + }; + insert_entry(&db_pool.write().unwrap().get()?, mtime_entry)?; + } + let mime_entry = Entry { + entity: Address::Hash(digest.clone()), + attribute: FILE_MIME_KEY.to_string(), + value: EntryValue::Value(Value::String(tree_magic::from_filepath(&path))), + }; + insert_entry(&db_pool.write().unwrap().get()?, mime_entry)?; + + // Finally, add the appropriate entries w/r/t virtual filesystem location let components = normalized_path.components().collect::>(); let (filename, dir_path) = components.split_last().unwrap(); @@ -599,13 +629,6 @@ fn _process_directory_entry>( }; insert_entry(&connection, identity_entry)?; - let mime_entry = Entry { - entity: file_address.clone(), - attribute: FILE_MIME_KEY.to_string(), - value: EntryValue::Value(Value::String(tree_magic::from_filepath(&path))), - }; - insert_entry(&connection, mime_entry)?; - let dir_has_entry = Entry { entity: parent_dir.clone(), attribute: DIR_HAS_KEY.to_string(),