2020-09-13 13:20:35 +02:00
|
|
|
use crate::addressing::Address;
|
2021-02-19 21:58:35 +01:00
|
|
|
use crate::database::{
|
|
|
|
insert_entry, query, remove_object, retrieve_file, retrieve_object, DbPool, Entry, Query,
|
|
|
|
};
|
2021-02-20 12:12:48 +01:00
|
|
|
use crate::filesystem::{list_directory, UPath};
|
2021-02-19 20:27:30 +01:00
|
|
|
use crate::hash::{decode, encode, Hashable};
|
2021-02-20 17:36:19 +01:00
|
|
|
use crate::jobs::JobContainer;
|
2020-08-27 01:07:25 +02:00
|
|
|
use actix_files::NamedFile;
|
2020-09-13 20:10:18 +02:00
|
|
|
use actix_web::error::{ErrorBadRequest, ErrorInternalServerError, ErrorNotFound};
|
2021-02-19 21:58:35 +01:00
|
|
|
use actix_web::{delete, error, get, post, put, web, Error, HttpResponse};
|
2020-09-13 13:20:35 +02:00
|
|
|
use anyhow::Result;
|
2021-02-19 21:58:35 +01:00
|
|
|
use futures_util::StreamExt;
|
2020-09-12 14:27:45 +02:00
|
|
|
use log::debug;
|
2020-08-27 01:29:44 +02:00
|
|
|
use serde::Deserialize;
|
2020-09-13 13:20:35 +02:00
|
|
|
use std::collections::HashMap;
|
2021-02-20 12:39:03 +01:00
|
|
|
use std::convert::TryFrom;
|
2020-09-13 13:20:35 +02:00
|
|
|
use std::path::PathBuf;
|
2021-02-20 17:36:19 +01:00
|
|
|
use std::sync::{Arc, RwLock};
|
2020-09-12 14:27:45 +02:00
|
|
|
|
2020-08-30 22:11:32 +02:00
|
|
|
#[derive(Clone)]
|
2020-08-27 01:07:25 +02:00
|
|
|
pub struct State {
|
|
|
|
pub directory: PathBuf,
|
2020-09-15 19:26:47 +02:00
|
|
|
pub db_pool: DbPool,
|
2021-02-20 17:36:19 +01:00
|
|
|
pub job_container: Arc<RwLock<JobContainer>>,
|
2020-08-27 01:07:25 +02:00
|
|
|
}
|
|
|
|
|
2020-09-25 02:45:17 +02:00
|
|
|
#[get("/api/raw/{hash}")]
|
2020-08-27 01:07:25 +02:00
|
|
|
pub async fn get_raw(state: web::Data<State>, hash: web::Path<String>) -> Result<NamedFile, Error> {
|
2020-09-13 14:28:58 +02:00
|
|
|
let address = Address::decode(&decode(hash.into_inner()).map_err(ErrorInternalServerError)?)
|
|
|
|
.map_err(ErrorInternalServerError)?;
|
|
|
|
if let Address::Hash(hash) = address {
|
2020-09-15 19:26:47 +02:00
|
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
2020-09-22 00:41:59 +02:00
|
|
|
let response = retrieve_file(&connection, hash);
|
2020-08-27 01:07:25 +02:00
|
|
|
|
2020-09-13 14:28:58 +02:00
|
|
|
debug!("{:?}", response);
|
2020-09-12 14:27:45 +02:00
|
|
|
|
2020-09-13 14:28:58 +02:00
|
|
|
match response {
|
|
|
|
Ok(result) => match result {
|
2020-09-13 20:43:45 +02:00
|
|
|
Some(path) => Ok(NamedFile::open(state.directory.join(path))?),
|
2020-09-13 14:28:58 +02:00
|
|
|
None => Err(error::ErrorNotFound("NOT FOUND")),
|
|
|
|
},
|
|
|
|
Err(e) => Err(error::ErrorInternalServerError(e)),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Err(ErrorBadRequest("Address does not refer to a file."))
|
2020-08-27 01:07:25 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-20 12:12:48 +01:00
|
|
|
#[derive(Deserialize)]
|
|
|
|
pub struct QueryRequest {
|
|
|
|
query: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[get("/api/obj")]
|
|
|
|
pub async fn get_query(
|
|
|
|
state: web::Data<State>,
|
|
|
|
web::Query(info): web::Query<QueryRequest>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
|
|
|
2021-02-21 10:38:31 +01:00
|
|
|
let sexp = lexpr::from_str(info.query.as_str()).map_err(ErrorBadRequest)?;
|
|
|
|
let in_query = Query::try_from(&sexp).map_err(ErrorBadRequest)?;
|
2021-02-20 12:12:48 +01:00
|
|
|
let result = query(&connection, in_query).map_err(ErrorInternalServerError)?;
|
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().json(result))
|
|
|
|
}
|
|
|
|
|
2020-09-29 00:55:09 +02:00
|
|
|
#[get("/api/obj/{address_str}")]
|
2020-09-13 13:20:35 +02:00
|
|
|
pub async fn get_object(
|
|
|
|
state: web::Data<State>,
|
|
|
|
address_str: web::Path<String>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
2020-09-15 19:26:47 +02:00
|
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
|
|
let response: Result<Vec<Entry>> = retrieve_object(
|
|
|
|
&connection,
|
2020-09-25 02:45:17 +02:00
|
|
|
Address::decode(&decode(address_str.into_inner()).map_err(ErrorBadRequest)?)
|
2020-09-13 13:20:35 +02:00
|
|
|
.map_err(ErrorInternalServerError)?,
|
2020-09-15 19:26:47 +02:00
|
|
|
);
|
2020-09-13 13:20:35 +02:00
|
|
|
|
|
|
|
debug!("{:?}", response);
|
|
|
|
|
2021-02-19 21:45:33 +01:00
|
|
|
let mut result: HashMap<String, Entry> = HashMap::new();
|
2020-09-13 14:28:58 +02:00
|
|
|
for entry in response.map_err(error::ErrorInternalServerError)? {
|
2021-02-19 20:27:30 +01:00
|
|
|
result.insert(
|
|
|
|
encode(entry.hash().map_err(ErrorInternalServerError)?),
|
2021-02-19 21:45:33 +01:00
|
|
|
entry,
|
2021-02-19 20:27:30 +01:00
|
|
|
);
|
2020-09-13 13:20:35 +02:00
|
|
|
}
|
|
|
|
Ok(HttpResponse::Ok().json(result))
|
|
|
|
}
|
|
|
|
|
2021-02-19 21:58:35 +01:00
|
|
|
const MAX_SIZE: usize = 1_000_000;
|
|
|
|
|
|
|
|
#[put("/api/obj")]
|
|
|
|
pub async fn put_object(
|
|
|
|
state: web::Data<State>,
|
|
|
|
mut payload: web::Payload,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
|
|
|
|
|
|
let mut body = web::BytesMut::new();
|
|
|
|
while let Some(chunk) = payload.next().await {
|
|
|
|
let chunk = chunk?;
|
|
|
|
// limit max size of in-memory payload
|
|
|
|
if (body.len() + chunk.len()) > MAX_SIZE {
|
|
|
|
return Err(error::ErrorBadRequest("overflow."));
|
|
|
|
}
|
|
|
|
body.extend_from_slice(&chunk);
|
|
|
|
}
|
|
|
|
|
|
|
|
let entry = serde_json::from_slice::<Entry>(&body)?;
|
|
|
|
|
2021-02-19 22:18:31 +01:00
|
|
|
let result_address = insert_entry(&connection, entry).map_err(ErrorInternalServerError)?;
|
2021-02-19 21:58:35 +01:00
|
|
|
|
2021-02-19 22:18:31 +01:00
|
|
|
Ok(HttpResponse::Ok().json(result_address))
|
2021-02-19 21:58:35 +01:00
|
|
|
}
|
|
|
|
|
2020-09-29 00:55:09 +02:00
|
|
|
#[delete("/api/obj/{address_str}")]
|
|
|
|
pub async fn delete_object(
|
|
|
|
state: web::Data<State>,
|
|
|
|
address_str: web::Path<String>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
|
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
|
|
|
let _ = remove_object(
|
|
|
|
&connection,
|
|
|
|
Address::decode(&decode(address_str.into_inner()).map_err(ErrorBadRequest)?)
|
|
|
|
.map_err(ErrorInternalServerError)?,
|
|
|
|
)
|
|
|
|
.map_err(ErrorInternalServerError)?;
|
|
|
|
|
|
|
|
Ok(HttpResponse::Ok().finish())
|
|
|
|
}
|
|
|
|
|
2020-09-25 02:45:17 +02:00
|
|
|
#[get("/api/hier/{path:.*}")]
|
2020-09-13 20:10:18 +02:00
|
|
|
pub async fn list_hier(
|
|
|
|
state: web::Data<State>,
|
|
|
|
path: web::Path<String>,
|
|
|
|
) -> Result<HttpResponse, Error> {
|
2020-09-15 19:26:47 +02:00
|
|
|
let connection = state.db_pool.get().map_err(ErrorInternalServerError)?;
|
2020-09-13 20:10:18 +02:00
|
|
|
let upath: UPath = path.into_inner().parse().map_err(ErrorBadRequest)?;
|
2020-09-15 19:26:47 +02:00
|
|
|
let entries: Vec<Entry> = list_directory(&connection, &upath)
|
2020-09-13 20:10:18 +02:00
|
|
|
.await
|
|
|
|
.map_err(ErrorNotFound)?; // todo: 500 if actual error occurs
|
|
|
|
|
2021-02-19 21:45:33 +01:00
|
|
|
Ok(HttpResponse::Ok().json(entries))
|
2020-09-13 20:10:18 +02:00
|
|
|
}
|
|
|
|
|
2020-08-27 01:07:25 +02:00
|
|
|
#[post("/api/refresh")]
|
|
|
|
pub async fn api_refresh(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
2020-09-20 19:28:44 +02:00
|
|
|
let _pool = state.db_pool.clone();
|
|
|
|
let _directory = state.directory.clone();
|
2021-02-20 17:36:19 +01:00
|
|
|
actix::spawn(crate::filesystem::reimport_directory(
|
|
|
|
_pool,
|
|
|
|
_directory,
|
|
|
|
state.job_container.clone(),
|
|
|
|
));
|
2020-08-27 01:07:25 +02:00
|
|
|
Ok(HttpResponse::Ok().finish())
|
|
|
|
}
|
2021-02-20 17:36:19 +01:00
|
|
|
|
|
|
|
#[get("/api/jobs")]
|
|
|
|
pub async fn get_jobs(state: web::Data<State>) -> Result<HttpResponse, Error> {
|
|
|
|
let jobs = state.job_container.read().unwrap().get_jobs();
|
|
|
|
Ok(HttpResponse::Ok().json(&jobs))
|
|
|
|
}
|