diff --git a/src/morethantext/cache.rs b/src/morethantext/cache.rs new file mode 100644 index 0000000..493b1f0 --- /dev/null +++ b/src/morethantext/cache.rs @@ -0,0 +1,202 @@ +use rand::{distributions::Alphanumeric, Rng, thread_rng}; +use super::{DBError, Store, SessionData}; +use std::{ + cell::Cell, + time::{Duration, Instant}, +}; + +enum DataType { + DBMap(Store), +} + +impl DataType { + fn new(data_type: &str) -> Result { + match data_type { + "store" => Ok(DataType::DBMap(Store::new())), + _ => Err(DBError::new("invalid data type")), + } + } +} + +impl SessionData for DataType { + fn add(&mut self, key: &str, value: &str, data: &str) -> Result, DBError> { + match self { + DataType::DBMap(dbs) => dbs.add(key, value, data), + } + } + + fn eq(&self, key: &str, value: &str) -> Result, DBError> { + match self { + DataType::DBMap(dbs) => dbs.eq(key, value), + } + } + + fn list(&self, keys: Vec<&str>) -> Result, DBError> { + match self { + DataType::DBMap(dbs) => dbs.list(keys), + } + } +} + +struct Entry { + data: DataType, + last_used: Cell, +} + +impl Entry { + fn new(data_type: &str) -> Result { + let data = match DataType::new(data_type) { + Ok(item) => item, + Err(err) => return Err(err), + }; + Ok(Self { + data: data, + last_used: Cell::new(Instant::now()), + }) + } + + fn elapsed(&self) -> Duration { + self.last_used.get().elapsed() + } +} + +impl SessionData for Entry { + fn add(&mut self, key: &str, value: &str, data: &str) -> Result, DBError> { + self.last_used.set(Instant::now()); + self.data.add(key, value, data) + } + + fn eq(&self, key: &str, value: &str) -> Result, DBError> { + self.last_used.set(Instant::now()); + self.data.eq(key, value) + } + + fn list(&self, keys: Vec<&str>) -> Result, DBError> { + self.last_used.set(Instant::now()); + self.data.list(keys) + } +} + +struct Cache; + +impl Cache { + async fn new(dir: &str) -> Self { + Self + } +} + +#[cfg(test)] +mod datatype { + use super::*; + + #[test] + fn invalid_cache_type() -> Result<(), DBError> { + match DataType::new("dkhgdl") { + Ok(_) => Err(DBError::new("invalid data type should raise an error")), + Err(err) => { + assert_eq!(err.to_string(), "invalid data type"); + Ok(()) + } + } + } + + #[test] + fn create_storage() { + let dbs = DataType::new("store").unwrap(); + let expected: Vec = Vec::new(); + assert_eq!(dbs.list(["database"].to_vec()).unwrap(), expected); + } + + #[test] + fn update_storage() { + let mut dbs = DataType::new("store").unwrap(); + let name = "new_database"; + let id = "someid"; + dbs.add("database", name, id).unwrap(); + assert_eq!(dbs.eq("database", name).unwrap(), [id].to_vec()); + assert_eq!(dbs.list(["database"].to_vec()).unwrap(), [name].to_vec()); + } +} + +#[cfg(test)] +mod entry { + use super::*; + + #[test] + fn invalid_cache_type() -> Result<(), DBError> { + match Entry::new("uydg") { + Ok(_) => Err(DBError::new("invalid data type should raise an error")), + Err(err) => { + assert_eq!(err.to_string(), "invalid data type"); + Ok(()) + } + } + } + + #[test] + fn create_storage() { + let entry = Entry::new("store").unwrap(); + assert!( + Duration::from_secs(1) > entry.elapsed(), + "Entry last used should be now." + ); + let expected: Vec = Vec::new(); + assert_eq!(entry.list(["database"].to_vec()).unwrap(), expected); + } + + #[test] + fn add_database_to_storage() { + let mut entry = Entry::new("store").unwrap(); + entry + .last_used + .set(Instant::now() - Duration::from_secs(500)); + let name = "roger"; + let id = "cormick"; + entry.add("database", name, id).unwrap(); + assert!( + Duration::from_secs(1) > entry.elapsed(), + "Entry last used should be now." + ); + assert_eq!(entry.eq("database", name).unwrap(), [id].to_vec()); + } + + #[test] + fn get_database_from_storage() { + let mut entry = Entry::new("store").unwrap(); + let name = "jessica"; + entry.add("database", name, "rabbit").unwrap(); + entry + .last_used + .set(Instant::now() - Duration::from_secs(500)); + entry.eq("database", name).unwrap(); + assert!( + Duration::from_secs(1) > entry.elapsed(), + "Entry last used should be now." + ); + } + + #[test] + fn list_database_from_storage() { + let entry = Entry::new("store").unwrap(); + entry + .last_used + .set(Instant::now() - Duration::from_secs(500)); + entry.list(["database"].to_vec()).unwrap(); + assert!( + Duration::from_secs(1) > entry.elapsed(), + "Entry last used should be now." + ); + } +} + +#[cfg(test)] +mod cache { + use super::*; + use tempfile::tempdir; + + #[async_std::test] + async fn create() { + let dir = tempdir().unwrap(); + Cache::new(dir.path().to_str().unwrap()).await; + } +} diff --git a/src/morethantext/mod.rs b/src/morethantext/mod.rs index 30c11c7..e4f856e 100644 --- a/src/morethantext/mod.rs +++ b/src/morethantext/mod.rs @@ -1,4 +1,5 @@ -mod databases; +mod cache; +mod store; pub mod error; use async_std::{ @@ -7,7 +8,7 @@ use async_std::{ sync::{Arc, Mutex}, task::{sleep, spawn}, }; -use databases::Databases; +use store::Store; use error::DBError; use rand::{distributions::Alphanumeric, thread_rng, Rng}; use std::{ @@ -26,14 +27,14 @@ trait FileData { trait SessionData { fn add(&mut self, key: &str, value: &str, data: &str) -> Result, DBError>; - fn get(&self, key: &str, value: &str) -> Result, DBError>; + fn eq(&self, key: &str, value: &str) -> Result, DBError>; fn list(&self, keys: Vec<&str>) -> Result, DBError>; } #[derive(Clone)] pub enum CacheType { Raw(String), - DBMap(Databases), + DBMap(Store), TableMap, } @@ -81,7 +82,7 @@ impl CacheType { } Ok(CacheType::Raw(str::from_utf8(&output).unwrap().to_string())) } - "DBMap" => Ok(CacheType::DBMap(Databases::new())), + "DBMap" => Ok(CacheType::DBMap(Store::new())), "TableMap" => Ok(CacheType::TableMap), _ => Err(DBError::new("data corruption")), } @@ -164,7 +165,7 @@ impl MoreThanText { id = str::from_utf8(&holder).unwrap().to_string(); } else { id = output - .add_entry(CacheType::DBMap(Databases::new())) + .add_entry(CacheType::DBMap(Store::new())) .await .unwrap(); write(entry_file, id.as_bytes()).await.unwrap(); @@ -195,6 +196,26 @@ impl MoreThanText { filename.into_os_string().into_string().unwrap() } + fn new_id(&self) -> String { + thread_rng().sample_iter(&Alphanumeric).take(64).collect() + } + + async fn add(&self, feature: &str, key: &str, value: &str) -> Self { + let mut ids: Vec = Vec::new(); + for id in self.session.clone().into_iter() { + let holder = self.get_entry(&id).await.unwrap(); + //holder.add(feature, key, value); + } + let mut output = self.clone(); + output.session.clear(); + output.session.push(value.to_string()); + output + } + + async fn list(&self, feature: Vec<&str>) -> Result, DBError> { + Ok(Vec::new()) + } + async fn add_entry(&self, entry: CacheType) -> Result { let mut id: String = "".to_string(); let mut dup = true; @@ -364,7 +385,35 @@ mod init { } #[cfg(test)] -mod cache { +mod data { + use super::*; + use setup::MTT; + + #[async_std::test] + async fn ids_are_random() { + let mtt = MTT::new().await; + let id1 = mtt.db.new_id(); + let id2 = mtt.db.new_id(); + assert_ne!(id1, id2, "Ids should be random"); + } + + #[async_std::test] + async fn add_database() { + let mtt = MTT::new().await; + let name = "fred"; + let id = "*gsdfg"; + let output = mtt.db.add("database", name, id).await; + assert_eq!(output.session, [id], "should update session info."); + assert_eq!( + mtt.db.list(["database"].to_vec()).await.unwrap(), + [name], + "Should list the databases." + ); + } +} + +#[cfg(test)] +mod cache_test { use super::*; use async_std::fs::read; use setup::MTT; @@ -724,13 +773,13 @@ mod enum_ctype { #[test] fn get_dbmap_type() { - let holder = CacheType::DBMap(Databases::new()); + let holder = CacheType::DBMap(Store::new()); assert_eq!(holder.entry_type(), "DBMap"); } #[test] fn get_new_databases_bytes() { - let holder = CacheType::DBMap(Databases::new()); + let holder = CacheType::DBMap(Store::new()); let mut expected = "DBMap".as_bytes().to_vec(); expected.push(0); let output = holder.to_bytes(); diff --git a/src/morethantext/databases.rs b/src/morethantext/store.rs similarity index 89% rename from src/morethantext/databases.rs rename to src/morethantext/store.rs index 8fd4b36..b233718 100644 --- a/src/morethantext/databases.rs +++ b/src/morethantext/store.rs @@ -2,11 +2,11 @@ use super::{DBError, FileData, SessionData}; use std::{collections::HashMap, slice, str}; #[derive(Clone)] -pub struct Databases { +pub struct Store { db_map: HashMap, } -impl Databases { +impl Store { pub fn new() -> Self { Self { db_map: HashMap::new(), @@ -22,7 +22,7 @@ impl Databases { } } -impl FileData for Databases { +impl FileData for Store { fn to_bytes(&self) -> Vec { let mut output = Vec::new(); for (name, id) in self.db_map.iter() { @@ -35,7 +35,7 @@ impl FileData for Databases { } fn from_bytes(data: &mut slice::Iter) -> Result { - let mut output = Databases::new(); + let mut output = Store::new(); let mut name: Vec = Vec::new(); let mut id: Vec = Vec::new(); let mut get_id = false; @@ -80,7 +80,7 @@ impl FileData for Databases { } } -impl SessionData for Databases { +impl SessionData for Store { fn add(&mut self, key: &str, value: &str, data: &str) -> Result, DBError> { match Self::test_key(key) { Ok(_) => (), @@ -96,7 +96,7 @@ impl SessionData for Databases { Ok(output) } - fn get(&self, key: &str, value: &str) -> Result, DBError> { + fn eq(&self, key: &str, value: &str) -> Result, DBError> { match Self::test_key(key) { Ok(_) => (), Err(err) => return Err(err), @@ -129,7 +129,7 @@ mod file_data { #[test] fn to_bytes_new() { - let dbs = Databases::new(); + let dbs = Store::new(); let expected: Vec = Vec::new(); let output = dbs.to_bytes(); assert_eq!(output, expected); @@ -137,7 +137,7 @@ mod file_data { #[test] fn to_bytes_with_database() { - let mut dbs = Databases::new(); + let mut dbs = Store::new(); let name = "something"; let id = "id"; dbs.add("database", name, id).unwrap(); @@ -152,13 +152,13 @@ mod file_data { #[test] fn from_bytes() { - let mut dbs = Databases::new(); + let mut dbs = Store::new(); dbs.add("database", "one", "1").unwrap(); dbs.add("database", "two", "2").unwrap(); dbs.add("database", "three", "3").unwrap(); let data = dbs.to_bytes(); let mut feed = data.iter(); - let output = Databases::from_bytes(&mut feed).unwrap(); + let output = Store::from_bytes(&mut feed).unwrap(); assert_eq!(output.db_map, dbs.db_map); } @@ -166,7 +166,7 @@ mod file_data { fn from_bytes_incomplete_name() { let data = "notName".as_bytes(); let mut feed = data.iter(); - match Databases::from_bytes(&mut feed) { + match Store::from_bytes(&mut feed) { Ok(_) => assert!(false, "This should have failed."), Err(err) => assert_eq!(err.to_string(), "file corruption"), } @@ -178,7 +178,7 @@ mod file_data { data.push(0); data.append(&mut "nope".as_bytes().to_vec()); let mut feed = data.iter(); - match Databases::from_bytes(&mut feed) { + match Store::from_bytes(&mut feed) { Ok(_) => assert!(false, "This should have failed."), Err(err) => assert_eq!(err.to_string(), "file corruption"), } @@ -196,7 +196,7 @@ mod file_data { data.append(&mut "second".as_bytes().to_vec()); data.push(0); let mut feed = data.iter(); - match Databases::from_bytes(&mut feed) { + match Store::from_bytes(&mut feed) { Ok(_) => assert!(false, "This should have failed."), Err(err) => { assert_eq!(err.to_string(), "file corruption"); @@ -219,18 +219,18 @@ mod session_data { #[test] fn add_new() { - let mut dbs = Databases::new(); + let mut dbs = Store::new(); let key = "database"; let value = "marvin"; let data = "123456"; assert_eq!(dbs.add(key, value, data).unwrap(), [data]); - let output = dbs.get(key, value).unwrap(); + let output = dbs.eq(key, value).unwrap(); assert_eq!(output, [data]); } #[test] fn add_bad_key() { - let mut dbs = Databases::new(); + let mut dbs = Store::new(); let key = "sdgfjksg"; match dbs.add(key, "fred", "barney") { Ok(_) => assert!(false, "Bad keys should produce an error."), @@ -240,9 +240,9 @@ mod session_data { #[test] fn get_bad_key() { - let dbs = Databases::new(); + let dbs = Store::new(); let key = "bvdfgert"; - match dbs.get(key, "fred") { + match dbs.eq(key, "fred") { Ok(_) => assert!(false, "Bad keys should produce an error."), Err(_) => (), } @@ -250,7 +250,7 @@ mod session_data { #[test] fn unique_names() { - let mut dbs = Databases::new(); + let mut dbs = Store::new(); let value = "wilma"; dbs.add("database", value, "something").unwrap(); match dbs.add("database", value, "overwrite") { @@ -264,14 +264,14 @@ mod session_data { #[test] fn get_missing() { - let dbs = Databases::new(); - let output = dbs.get("database", "melvin").unwrap(); + let dbs = Store::new(); + let output = dbs.eq("database", "melvin").unwrap(); assert_eq!(output, Vec::::new()); } #[test] fn list_bad_keys() { - let dbs = Databases::new(); + let dbs = Store::new(); let key = "sdfgren"; let keys = [key]; match dbs.list(keys.to_vec()) { @@ -282,7 +282,7 @@ mod session_data { #[test] fn list_is_sorted() { - let mut dbs = Databases::new(); + let mut dbs = Store::new(); let mut data = ["fred", "barney", "wilma", "betty", "pebbles", "bambam"]; for db in data { dbs.add("database", db, db).unwrap();