fix: restore store stats functionality somewhat

This commit is contained in:
Tomáš Mládek 2022-09-16 16:26:52 +02:00
parent 7ce7615b3a
commit 5704be7975
5 changed files with 98 additions and 87 deletions

View file

@ -21,6 +21,7 @@ use diesel::{Connection, QueryDsl, RunQueryDsl, SqliteConnection};
use log::{debug, error, info, warn}; use log::{debug, error, info, warn};
use lru::LruCache; use lru::LruCache;
use rayon::prelude::*; use rayon::prelude::*;
use serde_json::json;
use std::borrow::Borrow; use std::borrow::Borrow;
use std::convert::{TryFrom, TryInto}; use std::convert::{TryFrom, TryInto};
use std::path::PathBuf; use std::path::PathBuf;
@ -693,6 +694,50 @@ impl UpStore for FsStore {
Err(err) => Err(StoreError::Unknown(err.to_string())), Err(err) => Err(StoreError::Unknown(err.to_string())),
} }
} }
fn stats(&self) -> std::result::Result<serde_json::Value, StoreError> {
let files = self
.retrieve_all_files()
.map_err(|e| StoreError::Unknown(e.to_string()))?;
let mut files_by_hash = std::collections::HashMap::new();
for file in &files {
if !files_by_hash.contains_key(&file.hash) {
files_by_hash.insert(&file.hash, vec![]);
}
files_by_hash.get_mut(&file.hash).unwrap().push(file);
}
for paths in files_by_hash.values_mut() {
paths.sort_unstable_by_key(|f| !f.valid);
}
let mut blobs = files_by_hash
.iter()
.map(|(hash, files)| {
json!({
"hash": hash,
"size": files[0].size,
"paths": files.iter().map(|f| json!({
"added": f.added,
"valid": f.valid,
"path": f.path
})).collect::<serde_json::Value>()
})
})
.collect::<Vec<serde_json::Value>>();
blobs.sort_unstable_by_key(|f| f["size"].as_u64().unwrap());
blobs.reverse();
Ok(json!({
"totals": {
"count": files_by_hash.len(),
"size": files_by_hash.iter().map(|(_, f)| f[0].size as u64).sum::<u64>()
},
"blobs": blobs
}))
}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -49,6 +49,7 @@ pub enum UpdatePathOutcome {
Removed(PathBuf), Removed(PathBuf),
Failed(PathBuf, StoreError), Failed(PathBuf, StoreError),
} }
pub trait UpStore { pub trait UpStore {
fn retrieve(&self, hash: &Hash) -> Result<Vec<Blob>>; fn retrieve(&self, hash: &Hash) -> Result<Vec<Blob>>;
fn retrieve_all(&self) -> Result<Vec<Blob>>; fn retrieve_all(&self) -> Result<Vec<Blob>>;
@ -64,4 +65,5 @@ pub trait UpStore {
job_container: JobContainer, job_container: JobContainer,
initial: bool, initial: bool,
) -> Result<Vec<UpdatePathOutcome>>; ) -> Result<Vec<UpdatePathOutcome>>;
fn stats(&self) -> Result<serde_json::Value>;
} }

View file

@ -720,48 +720,9 @@ pub async fn api_refresh(
#[get("/api/store")] #[get("/api/store")]
pub async fn store_info(state: web::Data<State>) -> Result<HttpResponse, Error> { pub async fn store_info(state: web::Data<State>) -> Result<HttpResponse, Error> {
// let connection = state.upend.connection().map_err(ErrorInternalServerError)?; Ok(HttpResponse::Ok().json(json!({
// let files = web::block(move || connection.retrieve_all_files()) "main": state.store.stats().map_err(ErrorInternalServerError)?
// .await })))
// .map_err(ErrorInternalServerError)?;
// let mut files_by_hash = HashMap::new();
// for file in &files {
// if !files_by_hash.contains_key(&file.hash) {
// files_by_hash.insert(&file.hash, vec![]);
// }
// files_by_hash.get_mut(&file.hash).unwrap().push(file);
// }
// for paths in files_by_hash.values_mut() {
// paths.sort_unstable_by_key(|f| !f.valid);
// }
// let mut blobs = files_by_hash
// .iter()
// .map(|(hash, files)| {
// json!({
// "hash": hash,
// "size": files[0].size,
// "paths": files.iter().map(|f| json!({
// "added": f.added,
// "valid": f.valid,
// "path": f.path
// })).collect::<serde_json::Value>()
// })
// })
// .collect::<Vec<serde_json::Value>>();
// blobs.sort_unstable_by_key(|f| f["size"].as_u64().unwrap());
// blobs.reverse();
// Ok(HttpResponse::Ok().json(json!({
// "totals": {
// "count": files_by_hash.len(),
// "size": files_by_hash.iter().map(|(_, f)| f[0].size as u64).sum::<u64>()
// },
// "blobs": blobs
// })))
todo!();
} }
#[derive(Deserialize)] #[derive(Deserialize)]

View file

@ -134,7 +134,7 @@ export async function fetchInfo(): Promise<VaultInfo> {
return await response.json(); return await response.json();
} }
export async function fetchStoreInfo(): Promise<StoreInfo> { export async function fetchStoreInfo(): Promise<{ [key: string]: StoreInfo }> {
const response = await fetch(`${API_URL}/store`); const response = await fetch(`${API_URL}/store`);
return await response.json(); return await response.json();
} }

View file

@ -5,14 +5,16 @@
import Spinner from "../components/utils/Spinner.svelte"; import Spinner from "../components/utils/Spinner.svelte";
import { fetchStoreInfo } from "../lib/api"; import { fetchStoreInfo } from "../lib/api";
const store = fetchStoreInfo(); const stores = fetchStoreInfo();
</script> </script>
<div class="store"> <div class="store">
<h1>Store</h1> <h1>Stores</h1>
{#await store} {#await stores}
<Spinner /> <Spinner />
{:then store} {:then stores}
{#each Object.entries(stores) as [key, store] (key)}
<h2>{key}</h2>
<div class="totals"> <div class="totals">
<strong>{store.totals.count}</strong> blobs, <strong>{store.totals.count}</strong> blobs,
<strong>{filesize(store.totals.size)}</strong> <strong>{filesize(store.totals.size)}</strong>
@ -60,6 +62,7 @@
</tbody> </tbody>
{/each} {/each}
</table> </table>
{/each}
{:catch error} {:catch error}
<div class="error"> <div class="error">
{error} {error}