fix bug adding duplicate Files

initialize digest in _process_directory_entry via once_cell
feat/vaults
Tomáš Mládek 2021-05-28 23:36:00 +02:00
parent b565e141ac
commit 4468262f50
3 changed files with 12 additions and 2 deletions

1
Cargo.lock generated
View File

@ -2361,6 +2361,7 @@ dependencies = [
"libsqlite3-sys",
"log",
"nonempty",
"once_cell",
"rayon",
"serde",
"serde_json",

View File

@ -18,6 +18,7 @@ thiserror = "1.0"
rayon = "1.4.0"
futures-util = "~0.3.12"
lazy_static = "1.4.0"
once_cell = "1.7.2"
diesel = { version = "1.4", features = ["sqlite", "r2d2", "chrono", "serde_json"] }
diesel_migrations = "1.4"

View File

@ -14,6 +14,7 @@ use chrono::prelude::*;
use diesel::sqlite::Sqlite;
use diesel::Connection;
use log::{error, info, trace, warn};
use once_cell::unsync::Lazy;
use rayon::prelude::*;
use serde_json::Value;
use std::convert::TryFrom;
@ -518,6 +519,8 @@ fn _process_directory_entry<P: AsRef<Path>>(
let normalized_path = path.strip_prefix(&directory_path)?;
let normalized_path_str = normalized_path.to_str().expect("path not valid unicode?!");
let digest = Lazy::new(|| path.hash());
// Get size & mtime for quick comparison
let metadata = fs::metadata(&path)?;
let size = metadata.len() as i64;
@ -542,7 +545,9 @@ fn _process_directory_entry<P: AsRef<Path>>(
.find(|(_, file)| file.path == normalized_path_str);
if let Some((idx, existing_file)) = maybe_existing_file {
if size == existing_file.size && mtime == existing_file.mtime {
if (size == existing_file.size && mtime == existing_file.mtime)
|| ((*digest).is_ok() && &existing_file.hash == (*digest).as_ref().unwrap())
{
if !existing_file.valid {
file_set_valid(&db_pool.write().unwrap().get()?, existing_file.id, true)?;
}
@ -553,7 +558,10 @@ fn _process_directory_entry<P: AsRef<Path>>(
}
// If not, add it!
let digest = path.hash()?;
if let Err(err) = &*digest {
return Err(anyhow!(format!("Error hashing: {}", err)));
}
let digest = (*digest).as_ref().unwrap().clone();
let new_file = models::NewFile {
path: normalized_path_str.to_string(),