update mtime if same hash to avoid hashing next time
This commit is contained in:
parent
cb832b6789
commit
b49315e22b
2 changed files with 32 additions and 10 deletions
|
@ -18,6 +18,7 @@ use crate::database::lang::Query;
|
|||
use crate::util::hash::{Hash, Hashable};
|
||||
use crate::util::LoggerSink;
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::NaiveDateTime;
|
||||
use diesel::debug_query;
|
||||
use diesel::prelude::*;
|
||||
use diesel::r2d2::{self, ConnectionManager};
|
||||
|
@ -83,6 +84,20 @@ pub fn get_latest_files<C: Connection<Backend = Sqlite>>(
|
|||
Ok(matches)
|
||||
}
|
||||
|
||||
pub fn file_update_mtime<C: Connection<Backend = Sqlite>>(
|
||||
connection: &C,
|
||||
file_id: i32,
|
||||
m_time: Option<NaiveDateTime>,
|
||||
) -> Result<usize> {
|
||||
use crate::database::inner::schema::files::dsl::*;
|
||||
|
||||
debug!("Setting file ID {}'s mtime = {:?}", file_id, m_time);
|
||||
|
||||
Ok(diesel::update(files.filter(id.eq(file_id)))
|
||||
.set(mtime.eq(m_time))
|
||||
.execute(connection)?)
|
||||
}
|
||||
|
||||
pub fn file_set_valid<C: Connection<Backend = Sqlite>>(
|
||||
connection: &C,
|
||||
file_id: i32,
|
||||
|
|
|
@ -12,7 +12,8 @@ use crate::database::entry::{Entry, EntryValue, InvariantEntry};
|
|||
use crate::database::hierarchies::{resolve_path_cached, ResolveCache, UNode, UPath};
|
||||
use crate::database::inner::models;
|
||||
use crate::database::{
|
||||
file_set_valid, insert_entry, insert_file, retrieve_all_files, DbPool, UPEND_SUBDIR,
|
||||
file_set_valid, file_update_mtime, insert_entry, insert_file, retrieve_all_files, DbPool,
|
||||
UPEND_SUBDIR,
|
||||
};
|
||||
use crate::util::hash::{Hash, Hashable};
|
||||
use crate::util::jobs::{Job, JobContainer, JobId, State};
|
||||
|
@ -254,18 +255,24 @@ fn _process_directory_entry<P: AsRef<Path>>(
|
|||
let existing_file = existing_file.clone();
|
||||
drop(existing_files_read);
|
||||
|
||||
let mut same = size == existing_file.size && mtime == existing_file.mtime;
|
||||
if !same {
|
||||
file_hash = Some(path.hash()?);
|
||||
same = file_hash.as_ref().unwrap() == &existing_file.hash;
|
||||
}
|
||||
if size == existing_file.size {
|
||||
let same_mtime = mtime.is_some() && mtime == existing_file.mtime;
|
||||
let mut same_hash = false;
|
||||
|
||||
if same {
|
||||
if !existing_file.valid {
|
||||
file_set_valid(connection, existing_file.id, true)?;
|
||||
if !same_mtime {
|
||||
file_hash = Some(path.hash()?);
|
||||
same_hash = file_hash.as_ref().unwrap() == &existing_file.hash;
|
||||
}
|
||||
|
||||
{
|
||||
if same_mtime || same_hash {
|
||||
if mtime != existing_file.mtime {
|
||||
file_update_mtime(connection, existing_file.id, mtime)?;
|
||||
}
|
||||
|
||||
if !existing_file.valid {
|
||||
file_set_valid(connection, existing_file.id, true)?;
|
||||
}
|
||||
|
||||
let mut existing_files_write = existing_files.write().unwrap();
|
||||
let maybe_existing_file = existing_files_write
|
||||
.iter()
|
||||
|
|
Loading…
Reference in a new issue