diff --git a/Cargo.lock b/Cargo.lock index 9c72870..a439597 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -572,6 +572,7 @@ dependencies = [ "diesel_derives 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "libsqlite3-sys 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)", "r2d2 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1528,6 +1529,24 @@ dependencies = [ "unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "thiserror" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "thiserror-impl 1.0.20 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "thread_local" version = "1.0.1" @@ -1711,6 +1730,11 @@ name = "unicode-xid" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "unsigned-varint" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "upend" version = "0.1.0" @@ -1731,7 +1755,11 @@ dependencies = [ "filebuffer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)", + "thiserror 1.0.20 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "unsigned-varint 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "webbrowser 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", "xdg 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1747,6 +1775,14 @@ dependencies = [ "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "uuid" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "v_escape" version = "0.7.4" @@ -2111,6 +2147,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum synstructure 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701" "checksum termcolor 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb6bfa289a4d7c5766392812c0a1f4c1ba45afa1ad47803c11e1f407d846d75f" "checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +"checksum thiserror 1.0.20 (registry+https://github.com/rust-lang/crates.io-index)" = "7dfdd070ccd8ccb78f4ad66bf1982dc37f620ef696c6b5028fe2ed83dd3d0d08" +"checksum thiserror-impl 1.0.20 (registry+https://github.com/rust-lang/crates.io-index)" = "bd80fc12f73063ac132ac92aceea36734f04a1d93c1240c6944e23a3b8841793" "checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" "checksum threadpool 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" "checksum time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" @@ -2130,7 +2168,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" +"checksum unsigned-varint 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a98e44fc6af1e18c3a06666d829b4fd8d2714fb2dbffe8ab99d5dc7ea6baa628" "checksum url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb" +"checksum uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11" "checksum v_escape 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "660b101c07b5d0863deb9e7fb3138777e858d6d2a79f9e6049a27d1cc77c6da6" "checksum v_escape_derive 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c2ca2a14bc3fc5b64d188b087a7d3a927df87b152e941ccfbc66672e20c467ae" "checksum v_htmlescape 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e33e939c0d8cf047514fb6ba7d5aac78bc56677a6938b2ee67000b91f2e97e41" diff --git a/Cargo.toml b/Cargo.toml index 8d7d706..c9464f3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,8 +13,9 @@ env_logger = "0.7.1" log = "0.4" anyhow = "1.0" +thiserror = "1.0" -diesel = {version = "1.4", features = ["sqlite", "r2d2", "chrono"]} +diesel = {version = "1.4", features = ["sqlite", "r2d2", "chrono", "serde_json"]} diesel_migrations = "1.4" actix = "0.9.0" @@ -25,10 +26,13 @@ actix_derive = "0.3.2" chrono = {version = "0.4", features = ["serde"]} serde = {version = "1.0", features = ["derive"]} +serde_json = "1.0" bs58 = "0.3.1" filebuffer = "0.4.0" tiny-keccak = {version = "2.0", features = ["k12"]} +unsigned-varint = "0.5.0" +uuid = {version = "0.8", features = ["v4"]} walkdir = "2" dotenv = "0.15.0" diff --git a/migrations/upend/00_initial_structure/up.sql b/migrations/upend/00_initial_structure/up.sql index c3bf5ac..b4f0a96 100644 --- a/migrations/upend/00_initial_structure/up.sql +++ b/migrations/upend/00_initial_structure/up.sql @@ -1,3 +1,11 @@ +CREATE TABLE meta ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + key VARCHAR NOT NULL, + value VARCHAR NOT NULL +); + +INSERT INTO meta (key, value) VALUES ('version', '0.1.0'); + CREATE TABLE files ( id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, hash VARCHAR NOT NULL, diff --git a/src/addressing.rs b/src/addressing.rs new file mode 100644 index 0000000..5768edf --- /dev/null +++ b/src/addressing.rs @@ -0,0 +1,98 @@ +use std::convert::TryInto; +use std::io::prelude::*; +use std::io::Cursor; + +use anyhow::{anyhow, Result}; +use unsigned_varint::encode; +use uuid::Uuid; + +use crate::hash::{encode, Hash}; + +#[derive(Debug)] +pub enum Address { + Hash(Hash), + UUID(Uuid), +} + +// multihash KangarooTwelve +const KANGAROO_TWELVE: u128 = 0x1d01; + +// multihash identity +const IDENTITY: u128 = 0x00; + +impl Address { + pub fn encode(&self) -> Result> { + let (hash_func_type, digest) = match self { + Self::Hash(hash) => (KANGAROO_TWELVE, hash.0.clone()), + Self::UUID(uuid) => (IDENTITY, uuid.as_bytes().to_vec()), + }; + + let mut result = Cursor::new(vec![0u8; 0]); + result.write(encode::u128(hash_func_type, &mut encode::u128_buffer()))?; + result.write(encode::usize(digest.len(), &mut encode::usize_buffer()))?; + result.write(digest.as_slice())?; + + Ok(result.get_ref().clone()) + } + + pub fn decode(buffer: &Vec) -> Result { + let (hash_func_type, rest) = + unsigned_varint::decode::u128(buffer).map_err(|_| anyhow!("varint decode error"))?; + let (digest_len, rest) = + unsigned_varint::decode::usize(rest).map_err(|_| anyhow!("varint decode error"))?; + let digest = rest; + if digest_len != digest.len() { + Err(anyhow!( + "Actual digest length does not match declared digest length." + )) + } else { + match hash_func_type { + KANGAROO_TWELVE => Ok(Self::Hash(Hash(Vec::from(digest)))), + IDENTITY => Ok(Self::UUID(uuid::Uuid::from_bytes( + TryInto::<[u8; 16]>::try_into(digest).unwrap(), + ))), + _ => Err(anyhow!("Unknown hash function type.")), + } + } + } +} + +impl std::fmt::Display for Address { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", encode(self.encode().map_err(|_| std::fmt::Error)?)) + } +} + +#[cfg(test)] +mod tests { + use uuid::Uuid; + + use crate::addressing::Address; + use crate::hash::Hash; + + #[test] + fn test_hash_codec() { + let addr = Address::Hash(Hash(vec![1, 2, 3, 4, 5])); + + let encoded = addr.encode(); + assert!(encoded.is_ok()); + + let decoded = Address::decode(&encoded.unwrap()); + assert!(decoded.is_ok()); + + assert_eq!(format!("{}", addr), format!("{}", decoded.unwrap())); + } + + #[test] + fn test_uuid_codec() { + let addr = Address::UUID(Uuid::new_v4()); + + let encoded = addr.encode(); + assert!(encoded.is_ok()); + + let decoded = Address::decode(&encoded.unwrap()); + assert!(decoded.is_ok()); + + assert_eq!(format!("{}", addr), format!("{}", decoded.unwrap())); + } +} diff --git a/src/database.rs b/src/database.rs index d1da8d9..ecae17f 100644 --- a/src/database.rs +++ b/src/database.rs @@ -1,13 +1,83 @@ -use crate::models::NewFile; +use std::io::{Cursor, Write}; +use std::path::{Path, PathBuf}; +use std::time::Duration; + use actix::prelude::*; use actix_derive::Message; -use anyhow::Result; +use anyhow::{anyhow, Result}; use diesel::prelude::*; use diesel::r2d2::{self, ConnectionManager}; use diesel::sqlite::SqliteConnection; use log::debug; -use std::path::{Path, PathBuf}; -use std::time::Duration; + +use crate::addressing::Address; +use crate::hash::{decode, encode, hash, Hash, Hashable}; +use crate::models; + +#[derive(Debug)] +pub struct Entry { + identity: Hash, + target: Address, + key: String, + value: EntryValue, +} + +#[derive(Debug)] +pub struct InnerEntry { + target: Address, + key: String, + value: EntryValue, +} + +#[derive(Debug)] +pub enum EntryValue { + Value(serde_json::Value), + Address(Address), + Invalid, +} + +impl EntryValue { + fn to_str(&self) -> Result { + let (type_char, content) = match self { + EntryValue::Value(value) => ('J', serde_json::to_string(value)?), + EntryValue::Address(address) => ('O', encode(address.encode()?)), + EntryValue::Invalid => return Err(anyhow!("Cannot serialize invalid Entity value.")), + }; + + Ok(format!("{}{}", type_char, content)) + } +} + +impl std::str::FromStr for EntryValue { + type Err = std::convert::Infallible; + + fn from_str(s: &str) -> Result { + if s.len() < 2 { + Ok(EntryValue::Invalid) + } else { + let (type_char, content) = s.split_at(1); + match (type_char, content) { + ("J", content) => { + let value = serde_json::from_str(content); + if value.is_ok() { + Ok(EntryValue::Value(value.unwrap())) + } else { + Ok(EntryValue::Invalid) + } + } + ("O", content) => { + let addr = decode(content).and_then(|v| Address::decode(&v)); + if addr.is_ok() { + Ok(EntryValue::Address(addr.unwrap())) + } else { + Ok(EntryValue::Invalid) + } + } + _ => Ok(EntryValue::Invalid), + } + } + } +} pub type DbPool = r2d2::Pool>; @@ -20,7 +90,7 @@ impl Actor for DbExecutor { #[derive(Message)] #[rtype(result = "Result")] pub struct InsertFile { - pub file: NewFile, + pub file: models::NewFile, } #[derive(Message)] @@ -30,11 +100,23 @@ pub struct RetrieveByHash { } #[derive(Message)] -#[rtype(result = "Result>")] +#[rtype(result = "Result>")] pub struct LookupByFilename { pub query: String, } +#[derive(Message)] +#[rtype(result = "Result>")] +pub struct RetrieveEntries { + pub hash: Vec, +} + +#[derive(Message)] +#[rtype(result = "Result")] +pub struct InsertEntry { + pub entry: InnerEntry, +} + impl Handler for DbExecutor { type Result = Result; @@ -55,7 +137,6 @@ impl Handler for DbExecutor { type Result = Result>; fn handle(&mut self, msg: RetrieveByHash, _: &mut Self::Context) -> Self::Result { - use crate::models::File; use crate::schema::files::dsl::*; let connection = &self.0.get()?; @@ -63,17 +144,16 @@ impl Handler for DbExecutor { let matches = files .filter(hash.eq(msg.hash)) .filter(valid.eq(true)) - .load::(connection)?; + .load::(connection)?; Ok(matches.get(0).map(|f| f.path.clone())) } } impl Handler for DbExecutor { - type Result = Result>; + type Result = Result>; fn handle(&mut self, msg: LookupByFilename, _: &mut Self::Context) -> Self::Result { - use crate::models::File; use crate::schema::files::dsl::*; let connection = &self.0.get()?; @@ -81,12 +161,73 @@ impl Handler for DbExecutor { let matches = files .filter(path.like(format!("%{}%", msg.query))) .filter(valid.eq(true)) - .load::(connection)?; + .load::(connection)?; Ok(matches) } } +impl Handler for DbExecutor { + type Result = Result>; + + fn handle(&mut self, msg: RetrieveEntries, _: &mut Self::Context) -> Self::Result { + use crate::schema::data::dsl::*; + + let connection = &self.0.get()?; + + let matches = data + .filter(target.eq(msg.hash)) + .load::(connection)?; + let entries = matches + .into_iter() + .map(|e| -> Result { + Ok(Entry { + identity: Hash(e.identity), + target: Address::decode(&e.target)?, + key: e.key, + value: e.value.parse().unwrap(), + }) + }) + .filter_map(Result::ok) + .collect(); + + Ok(entries) + } +} + +impl Handler for DbExecutor { + type Result = Result; + + fn handle(&mut self, msg: InsertEntry, _: &mut Self::Context) -> Self::Result { + use crate::schema::data; + + let connection = &self.0.get()?; + + debug!("Inserting {:?}...", msg.entry); + + let insert_entry = models::Entry { + identity: msg.entry.hash()?.0, + target: msg.entry.target.encode()?, + key: msg.entry.key, + value: msg.entry.value.to_str()?, + }; + + Ok(diesel::insert_into(data::table) + .values(insert_entry) + .execute(connection)?) + } +} + +impl Hashable for InnerEntry { + fn hash(self: &InnerEntry) -> Result { + let mut result = Cursor::new(vec![0u8; 0]); + result.write(self.target.encode()?.as_slice())?; + result.write(self.key.as_bytes())?; + result.write(self.value.to_str()?.as_bytes())?; + Ok(hash(result.get_ref())) + } +} + #[derive(Debug)] pub struct ConnectionOptions { pub enable_foreign_keys: bool, @@ -172,19 +313,3 @@ pub fn open_upend>(dirpath: P) -> Result { Ok(OpenResult { pool, new }) } - -// extern crate xdg; - -// pub fn open_config() -> Result> { -// embed_migrations!("./migrations/config/"); - -// let dirpath = xdg::BaseDirectories::with_prefix("upend") -// .unwrap() -// .place_config_file("config.sqlite3") -// .expect("Could not create config file?!"); - -// let database_url = dirpath.join("base.sqlite3"); -// let connection = SqliteConnection::establish(database_url.to_str().unwrap())?; -// embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?; -// Ok(connection) -// } diff --git a/src/dataops.rs b/src/filesystem.rs similarity index 51% rename from src/dataops.rs rename to src/filesystem.rs index 1aa317f..46e277a 100644 --- a/src/dataops.rs +++ b/src/filesystem.rs @@ -1,41 +1,14 @@ -use crate::models::NewFile; -use anyhow::Result; -use filebuffer::FileBuffer; -use log::{info, warn}; use std::fs; -use std::io; use std::path::{Path, PathBuf}; -use tiny_keccak::{Hasher, KangarooTwelve}; -use walkdir::WalkDir; use actix::prelude::*; +use anyhow::Result; use chrono::prelude::*; -// use rayon::prelude::*; +use log::{info, warn}; +use walkdir::WalkDir; -// pub struct VaultUpdater( -// pub Addr, -// pub Addr, -// ); - -// impl Actor for VaultUpdater { -// type Context = Context; -// } - -// struct UpdateDirectory<'a> { -// path: &'a Path, -// } - -// impl Message for UpdateDirectory<'_> { -// type Result = Result<(), Box>; -// } - -// impl Handler> for VaultUpdater { -// type Result = Result<(), Box>; - -// fn handle(&mut self, msg: UpdateDirectory, _: &mut Self::Context) -> Self::Result { -// update_directory(msg.path, &self.0, &self.1).await -// } -// } +use crate::hash::{encode, ComputeHash, Hash, HasherWorker}; +use crate::models::NewFile; pub async fn update_directory>( directory: T, @@ -59,7 +32,7 @@ pub async fn update_directory>( path: entry.path().to_path_buf(), }; - let digest = hasher_worker.send(msg).await; + let digest: Result, MailboxError> = hasher_worker.send(msg).await; let new_file = NewFile { path: entry @@ -67,7 +40,7 @@ pub async fn update_directory>( .to_str() .expect("path not valid unicode?!") .to_string(), - hash: digest.unwrap().unwrap(), + hash: encode(digest??.0), size, created: NaiveDateTime::from_timestamp(Utc::now().timestamp(), 0), }; @@ -91,34 +64,3 @@ pub async fn update_directory_bg( warn!("Update did not succeed!"); } } - -pub struct HasherWorker; - -impl Actor for HasherWorker { - type Context = SyncContext; -} - -#[derive(Message)] -#[rtype(result = "Result")] -struct ComputeHash { - path: PathBuf, -} - -impl Handler for HasherWorker { - type Result = Result; - - fn handle(&mut self, msg: ComputeHash, _: &mut Self::Context) -> Self::Result { - compute_digest(msg.path) - } -} - -pub fn compute_digest>(filepath: P) -> Result { - let fbuffer = FileBuffer::open(&filepath)?; - let mut k12 = KangarooTwelve::new(b""); - k12.update(&fbuffer); - - let mut result = [0u8; 256 / 8]; - k12.finalize(&mut result); - - Ok(bs58::encode(&result).into_string()) -} diff --git a/src/hash.rs b/src/hash.rs new file mode 100644 index 0000000..bf39c95 --- /dev/null +++ b/src/hash.rs @@ -0,0 +1,83 @@ +use std::path::{Path, PathBuf}; + +use actix::prelude::*; +use anyhow::{anyhow, Result}; +use bs58; +use filebuffer::FileBuffer; +use tiny_keccak::{Hasher, KangarooTwelve}; + +#[derive(Debug)] +pub struct Hash(pub Vec); + +// impl Hash { +// pub fn encode(&self) -> String { +// encode(self.0.clone()).into_string() +// } +// +// pub fn decode>(string: T) -> Result { +// Ok(Hash(decode(string.as_ref())?)) +// } +// } + +// impl std::fmt::Display for Hash { +// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +// write!(f, "{}", self.encode()) +// } +// } + +pub trait Hashable { + fn hash(&self) -> Result; +} + +pub struct HasherWorker; + +impl Actor for HasherWorker { + type Context = SyncContext; +} + +#[derive(Message)] +#[rtype(result = "Result")] +pub struct ComputeHash { + pub path: PathBuf, +} + +impl Handler for HasherWorker { + type Result = Result; + + fn handle(&mut self, msg: ComputeHash, _: &mut Self::Context) -> Self::Result { + msg.path.hash() + } +} + +impl Hashable for Path { + fn hash(self: &Path) -> Result { + let fbuffer = FileBuffer::open(self)?; + Ok(hash(&fbuffer)) + } +} + +pub fn hash>(input: T) -> Hash { + let mut k12 = KangarooTwelve::new(b""); + k12.update(input.as_ref()); + + let mut result = [0u8; 256 / 8]; + k12.finalize(&mut result); + Hash(Vec::from(result)) +} + +pub fn encode>(vec: T) -> String { + // multibase base58 + format!("z{}", bs58::encode(vec).into_string()) +} + +pub fn decode>(string: T) -> Result> { + let string = string.as_ref(); + let (base, data) = string.split_at(1); + + // multibase base58 + if base != "z" { + Err(anyhow!("data not base58 encoded, bailing")) + } else { + Ok(bs58::decode(data).into_vec()?) + } +} diff --git a/src/main.rs b/src/main.rs index 89852d2..cd67d4a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,20 +1,21 @@ #[macro_use] extern crate diesel; - #[macro_use] extern crate diesel_migrations; +use std::net::SocketAddr; +use std::path::PathBuf; +use std::{env, fs}; + use actix::prelude::*; use actix_web::{middleware, App, HttpServer}; use clap::{App as ClapApp, Arg}; use log::{info, warn}; -use std::env; -use std::fs; -use std::net::SocketAddr; -use std::path::PathBuf; +mod addressing; mod database; -mod dataops; +mod filesystem; +mod hash; mod models; mod routes; mod schema; @@ -54,7 +55,7 @@ fn main() -> std::io::Result<()> { let pool = open_result.pool; let db_addr = SyncArbiter::start(3, move || database::DbExecutor(pool.clone())); - let hash_addr = SyncArbiter::start(4, || dataops::HasherWorker); + let hash_addr = SyncArbiter::start(4, || hash::HasherWorker); let bind: SocketAddr = matches .value_of("BIND") @@ -90,7 +91,9 @@ fn main() -> std::io::Result<()> { if open_result.new { info!("The vault has been just created, running initial update..."); - actix::spawn(dataops::update_directory_bg(vault_path, db_addr, hash_addr)); + actix::spawn(filesystem::update_directory_bg( + vault_path, db_addr, hash_addr, + )); } // TODO REMOVE diff --git a/src/models.rs b/src/models.rs index 28dce4d..7029421 100644 --- a/src/models.rs +++ b/src/models.rs @@ -1,8 +1,9 @@ -use super::schema::files; use chrono::NaiveDateTime; use serde::Serialize; -#[derive(Queryable, Serialize)] +use super::schema::{data, files}; + +#[derive(Queryable, Serialize, Debug)] pub struct File { pub id: i32, pub hash: String, @@ -20,3 +21,12 @@ pub struct NewFile { pub size: i64, pub created: NaiveDateTime, } + +#[derive(Queryable, Insertable, Serialize, Debug)] +#[table_name = "data"] +pub struct Entry { + pub identity: Vec, + pub target: Vec, + pub key: String, + pub value: String, +} diff --git a/src/routes.rs b/src/routes.rs index 9fe95d1..e8517a6 100644 --- a/src/routes.rs +++ b/src/routes.rs @@ -1,14 +1,15 @@ +use std::path::PathBuf; + use actix::prelude::*; use actix_files::NamedFile; use actix_web::{error, get, post, web, Error, HttpResponse}; use serde::Deserialize; -use std::path::PathBuf; #[derive(Clone)] pub struct State { pub directory: PathBuf, pub db: Addr, - pub hasher: Addr, + pub hasher: Addr, } #[get("/raw/{hash}")] @@ -49,7 +50,7 @@ pub async fn get_lookup( #[post("/api/refresh")] pub async fn api_refresh(state: web::Data) -> Result { - crate::dataops::update_directory(&state.directory, &state.db, &state.hasher) + crate::filesystem::update_directory(&state.directory, &state.db, &state.hasher) .await .map_err(|_| error::ErrorInternalServerError("UPDATE ERROR"))?; diff --git a/src/schema.rs b/src/schema.rs index 8637e4d..d6d1afc 100644 --- a/src/schema.rs +++ b/src/schema.rs @@ -18,7 +18,16 @@ table! { } } +table! { + meta (id) { + id -> Nullable, + key -> Text, + value -> Text, + } +} + allow_tables_to_appear_in_same_query!( data, files, + meta, );