chore: log -> tracing

feat/type-attributes
Tomáš Mládek 2022-10-23 15:59:10 +02:00
parent 4a2eaf3c33
commit 6394a70030
10 changed files with 11 additions and 11 deletions

View File

@ -2,8 +2,8 @@ use std::convert::TryFrom;
use std::sync::{Arc, Mutex};
use anyhow::{anyhow, Result};
use log::trace;
use lru::LruCache;
use tracing::trace;
use uuid::Uuid;
use crate::addressing::{Address, Addressable};

View File

@ -1,5 +1,5 @@
use serde::Serialize;
use super::schema::{data, meta};
use serde::Serialize;
#[derive(Queryable, Insertable, Serialize, Debug)]
#[table_name = "data"]

View File

@ -18,7 +18,6 @@ use chrono::prelude::*;
use diesel::r2d2::{self, ConnectionManager, ManageConnection};
use diesel::ExpressionMethods;
use diesel::{Connection, QueryDsl, RunQueryDsl, SqliteConnection};
use log::{debug, error, info, trace, warn};
use lru::LruCache;
use rayon::prelude::*;
use serde_json::json;
@ -29,6 +28,7 @@ use std::path::{Component, Path};
use std::sync::{Arc, Mutex, RwLock};
use std::time::{Instant, SystemTime, UNIX_EPOCH};
use std::{fs, iter};
use tracing::{debug, error, info, trace, warn};
use walkdir::WalkDir;
mod db;
@ -131,7 +131,7 @@ impl FsStore {
Ok(FsStore { path, pool, lock })
}
#[tracing::instrument(name="FS store rescan", skip_all)]
#[tracing::instrument(name = "FS store rescan", skip_all)]
fn rescan_vault<D: Borrow<UpEndDatabase>>(
&self,
db: D,

View File

@ -59,7 +59,7 @@ pub trait Extractor {
}
}
#[tracing::instrument(name="Extract all metadata", skip_all)]
#[tracing::instrument(name = "Extract all metadata", skip_all)]
pub fn extract_all<D: Borrow<UpEndDatabase>>(
db: D,
store: Arc<Box<dyn UpStore + Send + Sync>>,

View File

@ -12,9 +12,9 @@ use actix_cors::Cors;
use actix_web::{middleware, App, HttpServer};
use anyhow::Result;
use clap::{App as ClapApp, Arg};
use log::{debug, info, warn};
use rand::{thread_rng, Rng};
use std::sync::Arc;
use tracing::{debug, info, warn};
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
use crate::{

View File

@ -3,7 +3,7 @@ use crate::util::hash::b58_encode;
use crate::util::hash::Hash;
use crate::util::jobs::{JobContainer, JobState};
use anyhow::{anyhow, Result};
use log::{debug, trace};
use tracing::{debug, trace};
use std::{
collections::HashMap,

View File

@ -26,7 +26,6 @@ use actix_web::{
use anyhow::{anyhow, Result};
use futures::channel::oneshot;
use futures_util::TryStreamExt;
use log::{debug, info, trace};
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::collections::HashMap;
@ -35,6 +34,7 @@ use std::io::Write;
use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH};
use tempfile::NamedTempFile;
use tracing::{debug, info, trace};
use uuid::Uuid;
#[cfg(feature = "desktop")]

View File

@ -6,10 +6,10 @@ use diesel::deserialize::FromSql;
use diesel::sqlite::Sqlite;
use diesel::{deserialize, sql_types};
use filebuffer::FileBuffer;
use log::trace;
use multihash::Hasher;
use serde::{ser, Serialize, Serializer};
use std::path::{Path, PathBuf};
use tracing::trace;
#[derive(Debug, Clone, Eq, PartialEq, FromSqlRow, Hash)]
pub struct Hash(pub Vec<u8>);

View File

@ -1,10 +1,10 @@
use anyhow::{anyhow, Result};
use log::warn;
use serde::{Serialize, Serializer};
use std::{
collections::HashMap,
sync::{Arc, RwLock},
};
use tracing::warn;
use uuid::Uuid;
#[derive(Default, Serialize, Clone)]

View File

@ -2,7 +2,7 @@ pub mod exec;
pub mod hash;
pub mod jobs;
use log::debug;
use tracing::debug;
#[derive(Default)]
pub struct LoggerSink {