refactor: clippy fixes
ci/woodpecker/push/woodpecker Pipeline failed Details

refactor/sveltekit
Tomáš Mládek 2024-01-17 23:48:48 +01:00
parent 8043e25008
commit e6d7328b29
7 changed files with 31 additions and 33 deletions

View File

@ -42,7 +42,7 @@ impl Extractor for ID3Extractor {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
if let Some(file) = files.get(0) {
if let Some(file) = files.first() {
let file_path = file.get_file_path();
let mut job_handle = job_container.add_job(
None,
@ -90,7 +90,7 @@ impl Extractor for ID3Extractor {
let tmp_dir = tempfile::tempdir()?;
let tmp_path = tmp_dir.path().join(format!("img-{}", idx));
let mut file = std::fs::File::create(&tmp_path)?;
file.write_all(&*picture.data)?;
file.write_all(&picture.data)?;
let hash = store.store(
connection,
Blob::from_filepath(&tmp_path),

View File

@ -43,7 +43,7 @@ impl Extractor for ExifExtractor {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
if let Some(file) = files.get(0) {
if let Some(file) = files.first() {
let file_path = file.get_file_path();
let mut job_handle = job_container.add_job(
None,

View File

@ -48,7 +48,7 @@ impl Extractor for MediaExtractor {
if let Address::Hash(hash) = address {
let files = store.retrieve(hash)?;
if let Some(file) = files.get(0) {
if let Some(file) = files.first() {
let file_path = file.get_file_path();
let mut job_handle = job_container.add_job(
None,

View File

@ -447,32 +447,30 @@ async fn main() -> Result<()> {
}
};
if !args.no_initial_update {
if !open_result.new || args.rescan_mode.is_some() {
info!("Running update...");
block_background::<_, _, anyhow::Error>(move || {
let connection: upend_db::UpEndConnection = upend.connection()?;
if !args.no_initial_update && (!open_result.new || args.rescan_mode.is_some()) {
info!("Running update...");
block_background::<_, _, anyhow::Error>(move || {
let connection: upend_db::UpEndConnection = upend.connection()?;
let tree_mode = args.rescan_mode.unwrap_or_else(|| {
connection
.get_vault_options()
.unwrap()
.blob_mode
.unwrap_or_default()
});
let _ = state.store.update(
&upend,
job_container.clone(),
upend_db::stores::UpdateOptions {
initial: false,
tree_mode,
},
);
let _ = extractors::extract_all(upend, state.store, job_container);
Ok(())
let tree_mode = args.rescan_mode.unwrap_or_else(|| {
connection
.get_vault_options()
.unwrap()
.blob_mode
.unwrap_or_default()
});
}
let _ = state.store.update(
&upend,
job_container.clone(),
upend_db::stores::UpdateOptions {
initial: false,
tree_mode,
},
);
let _ = extractors::extract_all(upend, state.store, job_container);
Ok(())
});
}
#[cfg(feature = "desktop")]

View File

@ -90,7 +90,7 @@ impl PreviewStore {
} else {
trace!("Calculating preview for {hash:?}...");
let files = self.store.retrieve(&hash)?;
if let Some(file) = files.get(0) {
if let Some(file) = files.first() {
let file_path = file.get_file_path();
let mut job_handle = job_container.add_job(
None,

View File

@ -142,7 +142,7 @@ pub async fn get_raw(
let blobs = web::block(move || _store.retrieve(_hash.as_ref()))
.await?
.map_err(ErrorInternalServerError)?;
if let Some(blob) = blobs.get(0) {
if let Some(blob) = blobs.first() {
let file_path = blob.get_file_path();
if query.native.is_none() {
@ -231,11 +231,11 @@ pub async fn head_raw(
let blobs = web::block(move || _store.retrieve(_hash.as_ref()))
.await?
.map_err(ErrorInternalServerError)?;
if let Some(blob) = blobs.get(0) {
if let Some(blob) = blobs.first() {
let file_path = blob.get_file_path();
let mut response = HttpResponse::NoContent();
if let Some(mime_type) = tree_magic_mini::from_filepath(&file_path) {
if let Some(mime_type) = tree_magic_mini::from_filepath(file_path) {
if let Ok(mime) = mime_type.parse::<mime::Mime>() {
return Ok(response.content_type(mime).finish());
}

View File

@ -285,7 +285,7 @@ impl UpEndConnection {
match entry.len() {
0 => Ok(None),
1 => Ok(Some(Entry::try_from(entry.get(0).unwrap())?)),
1 => Ok(Some(Entry::try_from(entry.first().unwrap())?)),
_ => {
unreachable!(
"Multiple entries returned with the same hash - this should be impossible!"