add latest_files endpoint
parent
61a470dfe3
commit
8751e57433
|
@ -153,6 +153,20 @@ pub fn retrieve_all_files<C: Connection<Backend = Sqlite>>(
|
|||
Ok(matches)
|
||||
}
|
||||
|
||||
pub fn get_latest_files<C: Connection<Backend = Sqlite>>(
|
||||
connection: &C,
|
||||
count: i64,
|
||||
) -> Result<Vec<models::File>> {
|
||||
use crate::schema::files::dsl::*;
|
||||
|
||||
let matches = files
|
||||
.order_by(added.desc())
|
||||
.limit(count)
|
||||
.load::<models::File>(connection)?;
|
||||
|
||||
Ok(matches)
|
||||
}
|
||||
|
||||
pub fn retrieve_file<C: Connection<Backend = Sqlite>>(
|
||||
connection: &C,
|
||||
obj_hash: Hash,
|
||||
|
|
21
src/hash.rs
21
src/hash.rs
|
@ -1,3 +1,4 @@
|
|||
use crate::addressing::Address;
|
||||
use actix::prelude::*;
|
||||
use anyhow::{anyhow, Result};
|
||||
use diesel::backend::Backend;
|
||||
|
@ -5,11 +6,11 @@ use diesel::deserialize::FromSql;
|
|||
use diesel::sqlite::Sqlite;
|
||||
use diesel::{deserialize, sql_types};
|
||||
use filebuffer::FileBuffer;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{ser, Serialize, Serializer};
|
||||
use std::path::PathBuf;
|
||||
use tiny_keccak::{Hasher, KangarooTwelve};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, FromSqlRow, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, FromSqlRow)]
|
||||
pub struct Hash(pub Vec<u8>);
|
||||
|
||||
impl AsRef<[u8]> for Hash {
|
||||
|
@ -24,6 +25,22 @@ impl FromSql<sql_types::Binary, Sqlite> for Hash {
|
|||
}
|
||||
}
|
||||
|
||||
impl Serialize for Hash {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(
|
||||
encode(
|
||||
Address::Hash(self.clone())
|
||||
.encode()
|
||||
.map_err(ser::Error::custom)?,
|
||||
)
|
||||
.as_str(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Hashable {
|
||||
fn hash(&self) -> Result<Hash>;
|
||||
}
|
||||
|
|
|
@ -95,6 +95,7 @@ fn main() -> Result<()> {
|
|||
.service(routes::delete_object)
|
||||
.service(routes::api_refresh)
|
||||
.service(routes::list_hier)
|
||||
.service(routes::latest_files)
|
||||
.service(routes::get_jobs)
|
||||
.service(
|
||||
actix_files::Files::new(
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use crate::addressing::Address;
|
||||
use crate::database::{
|
||||
insert_entry, query, remove_object, retrieve_file, retrieve_object, DbPool, Entry, Query,
|
||||
get_latest_files, insert_entry, query, remove_object, retrieve_file, retrieve_object, DbPool,
|
||||
Entry, Query,
|
||||
};
|
||||
use crate::filesystem::{list_directory, UPath};
|
||||
use crate::hash::{decode, encode};
|
||||
|
@ -176,6 +177,13 @@ pub async fn api_refresh(state: web::Data<State>) -> Result<HttpResponse, Error>
|
|||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
#[get("/api/files/latest")]
|
||||
pub async fn latest_files(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
||||
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
||||
let files = get_latest_files(&connection, 100).map_err(ErrorInternalServerError)?;
|
||||
Ok(HttpResponse::Ok().json(&files))
|
||||
}
|
||||
|
||||
#[get("/api/jobs")]
|
||||
pub async fn get_jobs(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
||||
let jobs = state.job_container.read().unwrap().get_jobs();
|
||||
|
|
Loading…
Reference in New Issue