234 lines
7.6 KiB
Rust
234 lines
7.6 KiB
Rust
use crate::addressing::{Address, Addressable};
|
|
use crate::database::entry::{Entry, InEntry};
|
|
use crate::database::hierarchies::{list_node, UPath};
|
|
use crate::database::lang::Query;
|
|
use crate::database::{
|
|
get_latest_files, insert_entry, query, remove_object, retrieve_file, retrieve_object, DbPool,
|
|
};
|
|
use crate::util::hash::{decode, encode};
|
|
use crate::util::jobs::JobContainer;
|
|
use actix_files::NamedFile;
|
|
use actix_web::error::{ErrorBadRequest, ErrorInternalServerError, ErrorNotFound};
|
|
use actix_web::{delete, error, get, post, put, web, Error, HttpResponse};
|
|
use anyhow::Result;
|
|
use futures_util::StreamExt;
|
|
use log::{debug, trace};
|
|
use serde::Deserialize;
|
|
use serde_json::json;
|
|
use std::collections::HashMap;
|
|
use std::convert::TryFrom;
|
|
use std::path::PathBuf;
|
|
use std::sync::{Arc, RwLock};
|
|
|
|
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
|
|
|
#[derive(Clone)]
|
|
pub struct State {
|
|
pub vault_name: Option<String>,
|
|
pub directory: PathBuf,
|
|
pub db_pool: DbPool,
|
|
pub job_container: Arc<RwLock<JobContainer>>,
|
|
}
|
|
|
|
#[get("/api/raw/{hash}")]
|
|
pub async fn get_raw(state: web::Data<State>, hash: web::Path<String>) -> Result<NamedFile, Error> {
|
|
let address = Address::decode(&decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
|
|
.map_err(ErrorInternalServerError)?;
|
|
if let Address::Hash(hash) = address {
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
let response = retrieve_file(&connection, hash);
|
|
|
|
debug!("{:?}", response);
|
|
|
|
match response {
|
|
Ok(result) => match result.get(0) {
|
|
Some(file) => Ok(NamedFile::open(state.directory.join(&file.path))?),
|
|
None => Err(error::ErrorNotFound("NOT FOUND")),
|
|
},
|
|
Err(e) => Err(error::ErrorInternalServerError(e)),
|
|
}
|
|
} else {
|
|
Err(ErrorBadRequest("Address does not refer to a file."))
|
|
}
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
pub struct QueryRequest {
|
|
query: String,
|
|
}
|
|
|
|
#[get("/api/obj")]
|
|
pub async fn get_query(
|
|
state: web::Data<State>,
|
|
web::Query(info): web::Query<QueryRequest>,
|
|
) -> Result<HttpResponse, Error> {
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
|
|
let sexp = lexpr::from_str(info.query.as_str()).map_err(ErrorBadRequest)?;
|
|
let in_query = Query::try_from(&sexp).map_err(ErrorBadRequest)?;
|
|
let entries = query(&connection, in_query).map_err(ErrorInternalServerError)?;
|
|
let mut result: HashMap<String, Entry> = HashMap::new();
|
|
for entry in entries {
|
|
result.insert(
|
|
encode(
|
|
entry
|
|
.address()
|
|
.map_err(ErrorInternalServerError)?
|
|
.encode()
|
|
.map_err(ErrorInternalServerError)?,
|
|
),
|
|
entry,
|
|
);
|
|
}
|
|
|
|
Ok(HttpResponse::Ok().json(&result))
|
|
}
|
|
|
|
#[get("/api/obj/{address_str}")]
|
|
pub async fn get_object(
|
|
state: web::Data<State>,
|
|
address_str: web::Path<String>,
|
|
) -> Result<HttpResponse, Error> {
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
let response: Result<Vec<Entry>> = retrieve_object(
|
|
&connection,
|
|
Address::decode(&decode(address_str.into_inner()).map_err(ErrorBadRequest)?)
|
|
.map_err(ErrorBadRequest)?,
|
|
);
|
|
|
|
debug!("{:?}", response);
|
|
|
|
let mut result: HashMap<String, Entry> = HashMap::new();
|
|
for entry in response.map_err(error::ErrorInternalServerError)? {
|
|
result.insert(
|
|
encode(
|
|
entry
|
|
.address()
|
|
.map_err(ErrorInternalServerError)?
|
|
.encode()
|
|
.map_err(ErrorInternalServerError)?,
|
|
),
|
|
entry,
|
|
);
|
|
}
|
|
Ok(HttpResponse::Ok().json(result))
|
|
}
|
|
|
|
const MAX_SIZE: usize = 1_000_000;
|
|
|
|
#[put("/api/obj")]
|
|
pub async fn put_object(
|
|
state: web::Data<State>,
|
|
mut payload: web::Payload,
|
|
) -> Result<HttpResponse, Error> {
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
|
|
let mut body = web::BytesMut::new();
|
|
while let Some(chunk) = payload.next().await {
|
|
let chunk = chunk?;
|
|
// limit max size of in-memory payload
|
|
if (body.len() + chunk.len()) > MAX_SIZE {
|
|
return Err(error::ErrorBadRequest("overflow."));
|
|
}
|
|
body.extend_from_slice(&chunk);
|
|
}
|
|
|
|
let in_entry = serde_json::from_slice::<InEntry>(&body).map_err(ErrorBadRequest)?;
|
|
let entry = Entry::try_from(in_entry).map_err(ErrorInternalServerError)?;
|
|
|
|
let result_address =
|
|
insert_entry(&connection, entry.clone()).map_err(ErrorInternalServerError)?;
|
|
|
|
Ok(HttpResponse::Ok().json(
|
|
[(
|
|
encode(result_address.encode().map_err(ErrorInternalServerError)?),
|
|
entry,
|
|
)]
|
|
.iter()
|
|
.cloned()
|
|
.collect::<HashMap<String, Entry>>(),
|
|
))
|
|
}
|
|
|
|
#[delete("/api/obj/{address_str}")]
|
|
pub async fn delete_object(
|
|
state: web::Data<State>,
|
|
address_str: web::Path<String>,
|
|
) -> Result<HttpResponse, Error> {
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
let _ = remove_object(
|
|
&connection,
|
|
Address::decode(&decode(address_str.into_inner()).map_err(ErrorBadRequest)?)
|
|
.map_err(ErrorInternalServerError)?,
|
|
)
|
|
.map_err(ErrorInternalServerError)?;
|
|
|
|
Ok(HttpResponse::Ok().finish())
|
|
}
|
|
|
|
#[get("/api/hier/{path:.*}")]
|
|
pub async fn list_hier(
|
|
state: web::Data<State>,
|
|
path: web::Path<String>,
|
|
) -> Result<HttpResponse, Error> {
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
let upath: UPath = path.into_inner().parse().map_err(ErrorBadRequest)?;
|
|
trace!("Listing path \"{}\"", upath);
|
|
let entries: Vec<Entry> = list_node(&connection, &upath)
|
|
.await
|
|
.map_err(ErrorNotFound)?; // todo: 500 if actual error occurs
|
|
|
|
Ok(HttpResponse::Ok().json(entries))
|
|
}
|
|
|
|
#[post("/api/refresh")]
|
|
pub async fn api_refresh(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
|
let _pool = state.db_pool.clone();
|
|
let _directory = state.directory.clone();
|
|
actix::spawn(crate::filesystem::rescan_vault(
|
|
_pool,
|
|
_directory,
|
|
state.job_container.clone(),
|
|
));
|
|
Ok(HttpResponse::Ok().finish())
|
|
}
|
|
|
|
#[get("/api/files/{hash}")]
|
|
pub async fn get_file(
|
|
state: web::Data<State>,
|
|
hash: web::Path<String>,
|
|
) -> Result<HttpResponse, Error> {
|
|
let address = Address::decode(&decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
|
|
.map_err(ErrorInternalServerError)?;
|
|
|
|
if let Address::Hash(hash) = address {
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
let response = retrieve_file(&connection, hash).map_err(ErrorInternalServerError)?;
|
|
|
|
Ok(HttpResponse::Ok().json(response))
|
|
} else {
|
|
Err(ErrorBadRequest("Address does not refer to a file."))
|
|
}
|
|
}
|
|
|
|
#[get("/api/files/latest")]
|
|
pub async fn latest_files(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
let files = get_latest_files(&connection, 100).map_err(ErrorInternalServerError)?;
|
|
Ok(HttpResponse::Ok().json(&files))
|
|
}
|
|
|
|
#[get("/api/jobs")]
|
|
pub async fn get_jobs(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
|
let jobs = state.job_container.read().unwrap().get_jobs();
|
|
Ok(HttpResponse::Ok().json(&jobs))
|
|
}
|
|
|
|
#[get("/api/info")]
|
|
pub async fn get_info(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
|
Ok(HttpResponse::Ok().json(json!({
|
|
"name": state.vault_name,
|
|
"location": state.directory,
|
|
"version": VERSION
|
|
})))
|
|
}
|