Compare commits
58 Commits
894fcbcd02
...
master
Author | SHA1 | Date | |
---|---|---|---|
2e73bddbab | |||
800cad7ca3 | |||
95b2763442 | |||
460ca4b5a7 | |||
45522660bb | |||
829b7242bd | |||
49b0eaf2ec | |||
e49d6f5e46 | |||
ea7dec2f4e | |||
4dad6f7a05 | |||
de93ee1f2c | |||
c70c006abd | |||
e73fdbff75 | |||
05d445c58b | |||
933d48a47c | |||
c26089caed | |||
d90dc3b9fc | |||
a23b5d467e | |||
c8b93d9922 | |||
ecfc8fdf90 | |||
dd1c45ddbe | |||
30ea8d978c | |||
914e7a8146 | |||
646e0597ca | |||
50a4431316 | |||
9f368b0b95 | |||
69acfefc0c | |||
0f8670f686 | |||
90ab5809b1 | |||
41afc07bfa | |||
9faa96d307 | |||
30c9562542 | |||
448de012d3 | |||
bb23397eb0 | |||
da26693242 | |||
572abbeda8 | |||
e2d32f4a8c | |||
659a2758bb | |||
685ddfe32d | |||
6d61af5136 | |||
4b0af019c8 | |||
ee45375085 | |||
c75bea3913 | |||
40aae3ef5a | |||
1f36848450 | |||
d4d42a599e | |||
3de5c2857b | |||
abfa7114df | |||
418ba26da9 | |||
e125d79a6c | |||
a3d8feb37a | |||
c477a92945 | |||
8fada737ac | |||
691ab6a27e | |||
0ffb30d322 | |||
c0e4a5fad7 | |||
ef48fde890 | |||
9b0154064f |
11
.gitea/workflows/build.yaml
Normal file
11
.gitea/workflows/build.yaml
Normal file
@ -0,0 +1,11 @@
|
||||
---
|
||||
name: MoreThanText build
|
||||
run-name: ${{ gitea.actor }} runner
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
Build-MoreThanText:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: cargo test
|
||||
- run: cargo build --release
|
2
Pipfile
2
Pipfile
@ -11,4 +11,4 @@ black = "*"
|
||||
pylint = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.9"
|
||||
python_version = "3"
|
||||
|
@ -7,7 +7,7 @@ use tide::{
|
||||
mod morethantext;
|
||||
mod settings;
|
||||
|
||||
use morethantext::MoreThanText;
|
||||
use morethantext::{start_db, MoreThanText};
|
||||
use settings::Settings;
|
||||
|
||||
#[async_std::main]
|
||||
@ -20,7 +20,7 @@ async fn main() -> tide::Result<()> {
|
||||
}
|
||||
|
||||
async fn app_setup(data_dir: &str) -> tide::Server<MoreThanText> {
|
||||
let db = MoreThanText::new(data_dir).await.unwrap();
|
||||
let db = start_db(data_dir).await.unwrap();
|
||||
let mut app = tide::with_state(db);
|
||||
app.at("/").get(home);
|
||||
app.with(
|
||||
|
@ -1,168 +1,336 @@
|
||||
use super::{DBError, SessionData, Store};
|
||||
use async_std::{fs::write, path::Path};
|
||||
use super::{Database, ErrorCode, FromCache, MTTError, Store, ToCache, ENTRY};
|
||||
use async_std::{channel::Receiver, path::PathBuf};
|
||||
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
time::{Duration, Instant},
|
||||
collections::{HashMap, VecDeque},
|
||||
iter::Iterator,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
enum DataType {
|
||||
DBMap(Store),
|
||||
struct IDGenerator {
|
||||
ids: Option<VecDeque<String>>,
|
||||
}
|
||||
|
||||
impl DataType {
|
||||
fn new(data_type: &str) -> Result<Self, DBError> {
|
||||
match data_type {
|
||||
"store" => Ok(DataType::DBMap(Store::new())),
|
||||
_ => Err(DBError::new("invalid data type")),
|
||||
impl IDGenerator {
|
||||
fn new() -> Self {
|
||||
Self { ids: None }
|
||||
}
|
||||
|
||||
fn with_ids<T, D>(ids: T) -> Self
|
||||
where
|
||||
T: Into<Vec<D>>,
|
||||
D: Into<String>,
|
||||
{
|
||||
let id_list = ids.into();
|
||||
let mut data = VecDeque::new();
|
||||
for id in id_list {
|
||||
data.push_back(id.into());
|
||||
}
|
||||
Self { ids: Some(data) }
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionData for DataType {
|
||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError> {
|
||||
match self {
|
||||
DataType::DBMap(dbs) => dbs.add(key, value, data),
|
||||
impl Iterator for IDGenerator {
|
||||
type Item = String;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match &self.ids {
|
||||
Some(id_list) => {
|
||||
let mut ids = id_list.clone();
|
||||
let output = ids.pop_front();
|
||||
self.ids = Some(ids);
|
||||
output
|
||||
}
|
||||
None => Some(thread_rng().sample_iter(&Alphanumeric).take(64).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError> {
|
||||
match self {
|
||||
DataType::DBMap(dbs) => dbs.eq(key, value),
|
||||
}
|
||||
}
|
||||
|
||||
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||
match self {
|
||||
DataType::DBMap(dbs) => dbs.list(keys),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Entry {
|
||||
data: DataType,
|
||||
filename: String,
|
||||
last_used: Cell<Instant>,
|
||||
}
|
||||
|
||||
impl Entry {
|
||||
async fn new(filename: String, data: DataType) -> Result<Self, DBError> {
|
||||
if Path::new(&filename).exists().await {
|
||||
return Err(DBError::new("entry already exists"));
|
||||
}
|
||||
Ok(Self {
|
||||
data: data,
|
||||
filename: filename,
|
||||
last_used: Cell::new(Instant::now()),
|
||||
})
|
||||
}
|
||||
|
||||
async fn get(&self) -> Result<DataType, DBError> {
|
||||
Ok(self.data.clone())
|
||||
}
|
||||
}
|
||||
|
||||
struct Cache;
|
||||
|
||||
impl Cache {
|
||||
async fn new(dir: &str) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod datatype_sesssion {
|
||||
mod genid {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn invalid_cache_type() -> Result<(), DBError> {
|
||||
match DataType::new("dkhgdl") {
|
||||
Ok(_) => Err(DBError::new("invalid data type should raise an error")),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "invalid data type");
|
||||
Ok(())
|
||||
fn unique_ids() {
|
||||
let mut gen = IDGenerator::new();
|
||||
let mut output: Vec<String> = Vec::new();
|
||||
for _ in 0..10 {
|
||||
let id = gen.next().unwrap();
|
||||
assert!(!output.contains(&id), "{} found in {:?}", id, output);
|
||||
output.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn controlled_ids() {
|
||||
let ids = ["one", "two", "three"];
|
||||
let mut gen = IDGenerator::with_ids(ids.clone());
|
||||
for id in ids {
|
||||
assert_eq!(id, gen.next().unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Cache {
|
||||
data: HashMap<String, FromCache>,
|
||||
ids: IDGenerator,
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
pub async fn new<P>(_dir: P) -> Self
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
let mut data = HashMap::new();
|
||||
data.insert(ENTRY.to_string(), FromCache::Str(Store::new()));
|
||||
Self {
|
||||
data: data,
|
||||
ids: IDGenerator::new(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn with_ids<P, T, D>(dir: P, ids: T) -> Self
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
T: Into<Vec<D>>,
|
||||
D: Into<String>,
|
||||
{
|
||||
let mut output = Self::new(dir).await;
|
||||
output.ids = IDGenerator::with_ids(ids);
|
||||
output
|
||||
}
|
||||
|
||||
fn next_id(&mut self) -> String {
|
||||
let mut id: String;
|
||||
loop {
|
||||
id = self.ids.next().unwrap();
|
||||
match self.get(&id) {
|
||||
FromCache::Error(_) => break,
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
id
|
||||
}
|
||||
|
||||
pub async fn listen(&mut self, listener: Receiver<ToCache>) {
|
||||
loop {
|
||||
match listener.recv().await.unwrap() {
|
||||
ToCache::Get(data) => {
|
||||
data.result.send(self.get(data.data)).await.unwrap();
|
||||
}
|
||||
ToCache::Commit(data) => {
|
||||
data.result.send(self.commit(data.data)).await.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_storage() {
|
||||
let dbs = DataType::new("store").unwrap();
|
||||
let expected: Vec<String> = Vec::new();
|
||||
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), expected);
|
||||
pub fn get<S>(&self, id: S) -> FromCache
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let idd = id.into();
|
||||
match self.data.get(&idd) {
|
||||
Some(data) => data.clone(),
|
||||
None => FromCache::Error(MTTError::from_code(ErrorCode::IDNotFound(idd))),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_storage() {
|
||||
let mut dbs = DataType::new("store").unwrap();
|
||||
let name = "new_database";
|
||||
let id = "someid";
|
||||
dbs.add("database", name, id).unwrap();
|
||||
assert_eq!(dbs.eq("database", name).unwrap(), [id].to_vec());
|
||||
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), [name].to_vec());
|
||||
pub fn commit(&mut self, data: Store) -> FromCache {
|
||||
let entry_data = self.data.get(ENTRY).unwrap();
|
||||
let mut store = match entry_data {
|
||||
FromCache::Str(ep) => ep.clone(),
|
||||
_ => {
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
for name in data.list() {
|
||||
let id = self.next_id();
|
||||
match store.add_by_id(name, &id) {
|
||||
Ok(_) => {
|
||||
self.data.insert(id, FromCache::DB(Database::new()));
|
||||
}
|
||||
Err(err) => return FromCache::Error(err),
|
||||
}
|
||||
}
|
||||
self.data
|
||||
.insert(ENTRY.to_string(), FromCache::Str(store))
|
||||
.unwrap();
|
||||
FromCache::Ok
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod datatype_file {
|
||||
use super::*;
|
||||
|
||||
// Test file data traits here.
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod entry {
|
||||
mod engine {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[async_std::test]
|
||||
async fn create() {
|
||||
async fn get_entry() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mut data = DataType::new("store").unwrap();
|
||||
data.add("database", "roger", "moore").unwrap();
|
||||
let filepath = dir.path().join("wiliam");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let item = Entry::new(filename.to_string(), data.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
let output = item.get().await.unwrap();
|
||||
let cache = Cache::new(dir.path()).await;
|
||||
let expected: Vec<String> = Vec::new();
|
||||
let result = cache.get(ENTRY);
|
||||
match result {
|
||||
FromCache::Str(store) => assert_eq!(store.list(), expected),
|
||||
_ => assert!(false, "{:?} should be FromCache::Str", result),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_bad_entry() -> Result<(), MTTError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let cache = Cache::new(dir.path()).await;
|
||||
let ids = ["bad1", "bad2"];
|
||||
for id in ids {
|
||||
let output = cache.get(id);
|
||||
match output {
|
||||
FromCache::Error(err) => match err.code {
|
||||
ErrorCode::IDNotFound(_) => {
|
||||
assert!(
|
||||
err.to_string().contains(id),
|
||||
"Had error: {}, Did not contain: {}",
|
||||
err.to_string(),
|
||||
id
|
||||
);
|
||||
}
|
||||
_ => return Err(MTTError::new(format!("{:?} is not IDNotFound", err.code))),
|
||||
},
|
||||
_ => {
|
||||
return Err(MTTError::new(format!(
|
||||
"{:?} is not FromCache::Error",
|
||||
output
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn commit_database() {
|
||||
// remove this one for the one below, maybe.
|
||||
let dir = tempdir().unwrap();
|
||||
let mut cache = Cache::new(dir.path()).await;
|
||||
let mut store = Store::new();
|
||||
let db = "garfield";
|
||||
store.add(db).unwrap();
|
||||
cache.commit(store.clone());
|
||||
let output = cache.get(ENTRY);
|
||||
match output {
|
||||
FromCache::Str(result) => assert_eq!(result.list(), store.list()),
|
||||
_ => assert!(false, "{:?} is not FromCache::Str", output),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_database_entry() {
|
||||
let id = "an_id";
|
||||
let name = "garfield";
|
||||
let dir = tempdir().unwrap();
|
||||
let mut cache = Cache::with_ids(dir.path(), [id]).await;
|
||||
let mut store = Store::new();
|
||||
store.add(name).unwrap();
|
||||
cache.commit(store.clone());
|
||||
let db_out = cache.get(id);
|
||||
match db_out {
|
||||
FromCache::DB(_) => (),
|
||||
_ => assert!(
|
||||
false,
|
||||
"{:?} is not FromCache::DB -- cache is {:?}",
|
||||
db_out, cache.data
|
||||
),
|
||||
}
|
||||
let store_out = cache.get(ENTRY);
|
||||
match store_out {
|
||||
FromCache::Str(updated_store) => match updated_store.get(name) {
|
||||
Some(output) => {
|
||||
assert_eq!(output.id, Some(id.to_string()));
|
||||
assert!(output.data.is_none(), "Should have removed the database.");
|
||||
}
|
||||
None => assert!(true, "Store should have stored the database."),
|
||||
},
|
||||
_ => assert!(
|
||||
false,
|
||||
"{:?} is not FromCache::Str -- cache is {:?}",
|
||||
db_out, cache.data
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn ids_are_not_overwritten() {
|
||||
let ids = ["first", "first", "second"];
|
||||
let names = ["barney", "fred"];
|
||||
let dir = tempdir().unwrap();
|
||||
let mut cache = Cache::with_ids(dir.path(), ids).await;
|
||||
let mut store1 = Store::new();
|
||||
store1.add(names[0]).unwrap();
|
||||
let mut store2 = Store::new();
|
||||
store2.add(names[1]).unwrap();
|
||||
cache.commit(store1);
|
||||
cache.commit(store2);
|
||||
assert_eq!(
|
||||
data.list(["database"].to_vec()).unwrap(),
|
||||
output.list(["database"].to_vec()).unwrap()
|
||||
cache.data.len(),
|
||||
3,
|
||||
"cache.data had the following entries {:?}",
|
||||
cache.data.keys()
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn no_over_writes() -> Result<(), DBError> {
|
||||
async fn no_duplicate_ids() {
|
||||
let ids = ["one", "two"];
|
||||
let dir = tempdir().unwrap();
|
||||
let id = "wicked";
|
||||
let file = dir.path().join(id);
|
||||
let filename = file.to_str().unwrap();
|
||||
write(&file, b"previous").await.unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
match Entry::new(filename.to_string(), data).await {
|
||||
Ok(_) => {
|
||||
return Err(DBError::new(
|
||||
"Should produce an error for an existing Entry",
|
||||
))
|
||||
}
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "entry already exists");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
let mut cache = Cache::with_ids(dir.path(), ids).await;
|
||||
cache
|
||||
.data
|
||||
.insert(ids[0].to_string(), FromCache::DB(Database::new()));
|
||||
assert_eq!(cache.next_id(), ids[1]);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod cache {
|
||||
use super::*;
|
||||
mod messages {
|
||||
use super::{
|
||||
super::{start_db, ToCacheMsg},
|
||||
*,
|
||||
};
|
||||
use async_std::channel::unbounded;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[async_std::test]
|
||||
async fn create() {
|
||||
async fn get_the_store() {
|
||||
let dir = tempdir().unwrap();
|
||||
Cache::new(dir.path().to_str().unwrap()).await;
|
||||
let mtt = start_db(dir.path()).await.unwrap();
|
||||
let in_s = mtt.to_cache.clone();
|
||||
let (out_s, out_r) = unbounded();
|
||||
let msg = ToCacheMsg {
|
||||
data: ENTRY.to_string(),
|
||||
result: out_s,
|
||||
};
|
||||
in_s.send(ToCache::Get(msg)).await.unwrap();
|
||||
let result = out_r.recv().await.unwrap();
|
||||
match result {
|
||||
FromCache::Str(_) => (),
|
||||
_ => assert!(false, "{:?} is not FromCache::Str", result),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_bad_id() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mtt = start_db(dir.path()).await.unwrap();
|
||||
let in_s = mtt.to_cache.clone();
|
||||
let (out_s, out_r) = unbounded();
|
||||
let msg = ToCacheMsg {
|
||||
data: "bad_id!".to_string(),
|
||||
result: out_s,
|
||||
};
|
||||
in_s.send(ToCache::Get(msg)).await.unwrap();
|
||||
let output = out_r.recv().await.unwrap();
|
||||
match output {
|
||||
FromCache::Error(_) => (),
|
||||
_ => assert!(false, "{:?} is not FromCache::Error", output),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
36
src/morethantext/database-old.rs
Normal file
36
src/morethantext/database-old.rs
Normal file
@ -0,0 +1,36 @@
|
||||
use super::{DBError, FileData, SessionData};
|
||||
use std::slice;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Database;
|
||||
|
||||
impl Database {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl FileData<Self> for Database {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
let output = Vec::new();
|
||||
output
|
||||
}
|
||||
|
||||
fn from_bytes(_data: &mut slice::Iter<u8>) -> Result<Self, DBError> {
|
||||
Ok(Self {})
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionData for Database {
|
||||
fn add(&mut self, _key: &str, _value: &str, _data: &str) -> Result<Vec<String>, DBError> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn eq(&self, _key: &str, _value: &str) -> Result<Vec<String>, DBError> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn list(&self, _keys: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
}
|
171
src/morethantext/database.rs
Normal file
171
src/morethantext/database.rs
Normal file
@ -0,0 +1,171 @@
|
||||
use super::{Data, ErrorCode, MTTError, Table};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Database {
|
||||
data: HashMap<String, Data<Table>>,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
data: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add<S>(&mut self, name: S) -> Result<(), MTTError>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let db_name = name.into();
|
||||
match self.get(&db_name) {
|
||||
Some(_) => Err(MTTError::from_code(ErrorCode::DuplicateTable(db_name))),
|
||||
None => {
|
||||
self.data.insert(db_name, Data::from_data(Table::new()));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_by_id<S, D>(&mut self, name: S, id: D) -> Result<(), MTTError>
|
||||
where
|
||||
S: Into<String>,
|
||||
D: Into<String>,
|
||||
{
|
||||
let db_name = name.into();
|
||||
match self.get(&db_name) {
|
||||
Some(_) => Err(MTTError::from_code(ErrorCode::DuplicateTable(db_name))),
|
||||
None => {
|
||||
self.data.insert(db_name, Data::from_id(id.into()));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Option<&Data<Table>> {
|
||||
self.data.get(name)
|
||||
}
|
||||
|
||||
pub fn list(&self) -> Vec<String> {
|
||||
let mut names = Vec::new();
|
||||
for name in self.data.keys() {
|
||||
names.push(name.to_string());
|
||||
}
|
||||
names.sort();
|
||||
names
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod databases {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn create_new() {
|
||||
let db = Database::new();
|
||||
let expected: Vec<String> = Vec::new();
|
||||
assert_eq!(db.list(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_db_by_str() {
|
||||
let mut db = Database::new();
|
||||
let name = "Melvin";
|
||||
db.add(name).unwrap();
|
||||
let output = db.get(name);
|
||||
assert!(output.is_some(), "Get returned none.");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_db_by_string() {
|
||||
let mut db = Database::new();
|
||||
let name = "Marvin";
|
||||
db.add(name.to_string()).unwrap();
|
||||
let output = db.get(name);
|
||||
assert!(output.is_some(), "Get returned none.");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fail_on_duplicates() -> Result<(), MTTError> {
|
||||
let mut db = Database::new();
|
||||
let name = "Mickie";
|
||||
db.add(name).unwrap();
|
||||
match db.add(name) {
|
||||
Ok(_) => Err(MTTError::new("duplicates should error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::DuplicateTable(db_name) => {
|
||||
assert_eq!(db_name, name);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(MTTError::new(format!("{:?} is not DuplicateTable", err))),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_using_cache_id() {
|
||||
let mut db = Database::new();
|
||||
let name = "fred";
|
||||
let id = "12345";
|
||||
db.add_by_id(name, id).unwrap();
|
||||
let output = db.get(name).unwrap();
|
||||
assert!(output.data.is_none(), "there should be no data");
|
||||
assert_eq!(output.id, Some(id.to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_by_cache_id_name_string() {
|
||||
let mut db = Database::new();
|
||||
let name = "barney";
|
||||
let id = "67890";
|
||||
db.add_by_id(name.to_string(), id).unwrap();
|
||||
let output = db.get(name).unwrap();
|
||||
assert!(output.data.is_none(), "there should be no data");
|
||||
assert_eq!(output.id, Some(id.to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_databases_for_add_by_id() {
|
||||
let mut db = Database::new();
|
||||
let name = "betty";
|
||||
db.add_by_id(name, "fghij").unwrap();
|
||||
match db.add_by_id(name, "klmno") {
|
||||
Ok(_) => assert!(false, "Duplicates should error."),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::DuplicateTable(db_name) => assert_eq!(db_name, name),
|
||||
_ => assert!(false, "{:?} is not DuplicateTable", err),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_by_cache_id_string() {
|
||||
let mut db = Database::new();
|
||||
let name = "wilma";
|
||||
let id = "abcdef";
|
||||
db.add_by_id(name, id.to_string()).unwrap();
|
||||
let output = db.get(name).unwrap();
|
||||
assert!(output.data.is_none(), "there should be no data");
|
||||
assert_eq!(output.id, Some(id.to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_bad_database() -> Result<(), MTTError> {
|
||||
let db = Database::new();
|
||||
match db.get("missing") {
|
||||
Some(_) => Err(MTTError::new("Should have returned None.")),
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_list() {
|
||||
let mut db = Database::new();
|
||||
let mut ids = ["one", "two", "three", "four", "five"];
|
||||
for name in ids {
|
||||
db.add(name.to_string()).unwrap();
|
||||
}
|
||||
ids.sort();
|
||||
assert_eq!(db.list(), ids);
|
||||
}
|
||||
}
|
381
src/morethantext/entry.rs
Normal file
381
src/morethantext/entry.rs
Normal file
@ -0,0 +1,381 @@
|
||||
use super::{DBError, DataType, ErrorCode, FileData, SessionData};
|
||||
use async_std::{
|
||||
fs::{read, remove_file, write},
|
||||
path::PathBuf,
|
||||
};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
pub struct Entry {
|
||||
data: DataType,
|
||||
filename: PathBuf,
|
||||
last_used: Cell<Instant>,
|
||||
}
|
||||
|
||||
impl Entry {
|
||||
pub async fn new<P>(filename: P, data: DataType) -> Result<Self, DBError>
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
let pathbuf = filename.into();
|
||||
if pathbuf.as_path().exists().await {
|
||||
return Err(DBError::from_code(ErrorCode::EntryExists(pathbuf)));
|
||||
} else {
|
||||
match write(&pathbuf, data.to_bytes()).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::from_code(ErrorCode::EntryWriteFailure(pathbuf));
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
data: data,
|
||||
filename: pathbuf,
|
||||
last_used: Cell::new(Instant::now()),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get<P>(filename: P) -> Result<Self, DBError>
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
let pathbuf = filename.into();
|
||||
let content = match read(&pathbuf).await {
|
||||
Ok(text) => text,
|
||||
Err(err) => {
|
||||
let mut error = DBError::from_code(ErrorCode::EntryReadFailure(pathbuf));
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
};
|
||||
let data = match DataType::from_bytes(&mut content.iter()) {
|
||||
Ok(raw) => raw,
|
||||
Err(err) => {
|
||||
let mut error = DBError::from_code(ErrorCode::EntryReadFailure(pathbuf));
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
};
|
||||
Ok(Self {
|
||||
data: data,
|
||||
filename: pathbuf,
|
||||
last_used: Cell::new(Instant::now()),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn elapsed(&self) -> Duration {
|
||||
self.last_used.get().elapsed()
|
||||
}
|
||||
|
||||
pub fn data(&self) -> DataType {
|
||||
self.last_used.set(Instant::now());
|
||||
self.data.clone()
|
||||
}
|
||||
|
||||
async fn update(&mut self, data: DataType) -> Result<(), DBError> {
|
||||
self.last_used.set(Instant::now());
|
||||
match write(&self.filename, data.to_bytes()).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error =
|
||||
DBError::from_code(ErrorCode::EntryWriteFailure(self.filename.clone()));
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
};
|
||||
self.data = data;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove(&self) -> Result<(), DBError> {
|
||||
match remove_file(&self.filename).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
let mut error =
|
||||
DBError::from_code(ErrorCode::EntryDeleteFailure(self.filename.clone()));
|
||||
error.add_source(err);
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod entry {
|
||||
use super::*;
|
||||
use std::error::Error;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_elapsed_time() {
|
||||
let dir = tempdir().unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
let filepath = dir.path().join("count");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let item = Entry::new(filename.to_string(), data).await.unwrap();
|
||||
assert!(
|
||||
Duration::from_secs(1) > item.elapsed(),
|
||||
"last_used should have been now."
|
||||
);
|
||||
item.last_used
|
||||
.set(Instant::now() - Duration::from_secs(500));
|
||||
assert!(
|
||||
Duration::from_secs(499) < item.elapsed(),
|
||||
"The duration should have increased."
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn create() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mut data = DataType::new("store").unwrap();
|
||||
data.add("database", "roger", "moore").unwrap();
|
||||
let filepath = dir.path().join("wiliam");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let item = Entry::new(filename.to_string(), data.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
Duration::from_secs(1) > item.elapsed(),
|
||||
"last_used should have been now."
|
||||
);
|
||||
let output = item.data();
|
||||
assert_eq!(
|
||||
data.list(["database"].to_vec()).unwrap(),
|
||||
output.list(["database"].to_vec()).unwrap()
|
||||
);
|
||||
assert!(filepath.is_file(), "Should have created the entry file.");
|
||||
let content = read(&filepath).await.unwrap();
|
||||
assert_eq!(content, data.to_bytes());
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn create_errors_on_bad_files() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
let filepath = dir.path().join("bad").join("path");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
match Entry::new(filename.to_string(), data).await {
|
||||
Ok(_) => Err(DBError::new("bad file names should raise an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::EntryWriteFailure(_) => {
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
assert!(err
|
||||
.source()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("could not write to file"));
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn create_does_not_over_writes() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let id = "wicked";
|
||||
let file = dir.path().join(id);
|
||||
let filename = file.to_str().unwrap();
|
||||
write(&file, b"previous").await.unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
match Entry::new(filename.to_string(), data).await {
|
||||
Ok(_) => {
|
||||
return Err(DBError::new(
|
||||
"Should produce an error for an existing Entry",
|
||||
))
|
||||
}
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::EntryExists(_) => Ok(()),
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_updates_last_used() {
|
||||
let dir = tempdir().unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
let filepath = dir.path().join("holder");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let item = Entry::new(filename.to_string(), data).await.unwrap();
|
||||
item.last_used
|
||||
.set(Instant::now() - Duration::from_secs(300));
|
||||
item.data();
|
||||
assert!(
|
||||
Duration::from_secs(1) > item.elapsed(),
|
||||
"last_used should have been reset."
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_entry() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mut data = DataType::new("store").unwrap();
|
||||
let filepath = dir.path().join("changing");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let mut item = Entry::new(filename.to_string(), data.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
item.last_used
|
||||
.set(Instant::now() - Duration::from_secs(500));
|
||||
data.add("database", "new", "stuff").unwrap();
|
||||
item.update(data.clone()).await.unwrap();
|
||||
assert!(
|
||||
Duration::from_secs(1) > item.elapsed(),
|
||||
"last_used should have been reset."
|
||||
);
|
||||
let output = item.data();
|
||||
assert_eq!(
|
||||
data.list(["database"].to_vec()).unwrap(),
|
||||
output.list(["database"].to_vec()).unwrap()
|
||||
);
|
||||
let content = read(&filepath).await.unwrap();
|
||||
assert_eq!(content, data.to_bytes());
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_write_errors() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
let filepath = dir.path().join("changing");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let mut item = Entry::new(filename.to_string(), data.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
drop(dir);
|
||||
match item.update(data).await {
|
||||
Ok(_) => Err(DBError::new("file writes should return an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::EntryWriteFailure(_) => {
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
assert!(err
|
||||
.source()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("could not write to file"));
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn retrieve() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mut data = DataType::new("store").unwrap();
|
||||
data.add("database", "something_old", "3.14159").unwrap();
|
||||
let filepath = dir.path().join("existing");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let item = Entry::new(filename.to_string(), data.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
let output = Entry::get(filename).await.unwrap();
|
||||
assert_eq!(
|
||||
output.data().list(["database"].to_vec()).unwrap(),
|
||||
data.list(["database"].to_vec()).unwrap()
|
||||
);
|
||||
assert_eq!(output.filename.to_str().unwrap(), filename);
|
||||
assert!(
|
||||
Duration::from_secs(1) > item.elapsed(),
|
||||
"last_used should have been reset."
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn retrieve_file_missing() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let filepath = dir.path().join("justnotthere");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
match Entry::get(filename).await {
|
||||
Ok(_) => Err(DBError::new("should have returned an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::EntryReadFailure(_) => {
|
||||
assert!(err.source().is_some(), "Error should have a source.");
|
||||
assert!(
|
||||
err.source()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("could not read file"),
|
||||
"Source Error Message: {}",
|
||||
err.source().unwrap().to_string()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn retrieve_corrupt_file() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let filepath = dir.path().join("garbage");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
write(&filepath, b"jhsdfghlsdf").await.unwrap();
|
||||
match Entry::get(filename).await {
|
||||
Ok(_) => Err(DBError::new("should have returned an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::EntryReadFailure(_) => {
|
||||
assert!(err.source().is_some(), "Error should have a source.");
|
||||
assert!(
|
||||
err.source().unwrap().to_string().contains("corrupt file"),
|
||||
"Source Error Message: {}",
|
||||
err.source().unwrap().to_string()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn delete() {
|
||||
let dir = tempdir().unwrap();
|
||||
let filepath = dir.path().join("byebye");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
let item = Entry::new(filename.to_string(), data.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
item.remove().await.unwrap();
|
||||
assert!(!filepath.exists(), "Entry file should be removed.");
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn delete_bad_file() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let filepath = dir.path().join("itsnotthere");
|
||||
let filename = filepath.to_str().unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
let item = Entry::new(filename.to_string(), data.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
remove_file(filename).await.unwrap();
|
||||
match item.remove().await {
|
||||
Ok(_) => Err(DBError::new("should have produced an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::EntryDeleteFailure(_) => {
|
||||
assert!(err.source().is_some(), "Error should have a source.");
|
||||
assert!(
|
||||
err.source()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("could not remove file"),
|
||||
"Source Error Message: {}",
|
||||
err.source().unwrap().to_string()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
@ -1,90 +1,135 @@
|
||||
use std::{error::Error, fmt};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DBError {
|
||||
msg: String,
|
||||
src: Option<Box<dyn Error + 'static>>,
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ErrorCode {
|
||||
// General
|
||||
Undefined(String),
|
||||
// Cache
|
||||
IDNotFound(String),
|
||||
// Store
|
||||
DuplicateDatabase(String),
|
||||
// Database
|
||||
DuplicateTable(String),
|
||||
}
|
||||
|
||||
impl DBError {
|
||||
impl fmt::Display for ErrorCode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ErrorCode::Undefined(msg) => write!(f, "{}", msg),
|
||||
ErrorCode::IDNotFound(id) => write!(f, "ID '{}' not found", id),
|
||||
ErrorCode::DuplicateDatabase(name) => write!(f, "database '{}' already exists", name),
|
||||
ErrorCode::DuplicateTable(name) => write!(f, "table '{}' already exists", name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod errorcodes {
|
||||
use super::*;
|
||||
|
||||
const ITEMS: [&str; 2] = ["one", "two"];
|
||||
|
||||
#[test]
|
||||
fn undefined_display() {
|
||||
for item in ITEMS {
|
||||
let err = ErrorCode::Undefined(item.to_string());
|
||||
assert_eq!(err.to_string(), item);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_missing() {
|
||||
for item in ITEMS {
|
||||
let err = ErrorCode::IDNotFound(item.to_string());
|
||||
assert_eq!(err.to_string(), format!("ID '{}' not found", item));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_database() {
|
||||
for item in ITEMS {
|
||||
let err = ErrorCode::DuplicateDatabase(item.to_string());
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
format!("database '{}' already exists", item)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn duplicate_table() {
|
||||
for item in ITEMS {
|
||||
let err = ErrorCode::DuplicateTable(item.to_string());
|
||||
assert_eq!(err.to_string(), format!("table '{}' already exists", item));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MTTError {
|
||||
pub code: ErrorCode,
|
||||
}
|
||||
|
||||
impl MTTError {
|
||||
pub fn new<S>(msg: S) -> Self
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let text = msg.into();
|
||||
Self {
|
||||
msg: msg.into(),
|
||||
src: None,
|
||||
code: ErrorCode::Undefined(text),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_source<E>(&mut self, src: E)
|
||||
where
|
||||
E: Error + 'static,
|
||||
{
|
||||
self.src = Some(Box::new(src));
|
||||
pub fn from_code(code: ErrorCode) -> Self {
|
||||
Self { code: code }
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for DBError {
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
match &self.src {
|
||||
Some(err) => Some(err.as_ref()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Error for MTTError {}
|
||||
|
||||
impl fmt::Display for DBError {
|
||||
impl fmt::Display for MTTError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.msg)
|
||||
write!(f, "{}", self.code)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod create {
|
||||
mod errors {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn with_str() {
|
||||
let msg = "something happened";
|
||||
let err = DBError::new(msg);
|
||||
assert!(
|
||||
err.to_string() == msg,
|
||||
"Got: {} -- Want: {}",
|
||||
err.to_string(),
|
||||
msg
|
||||
);
|
||||
assert!(
|
||||
err.source().is_none(),
|
||||
"Error should initialize with no source."
|
||||
);
|
||||
fn create_with_str() {
|
||||
let msgs = ["one", "two"];
|
||||
for msg in msgs {
|
||||
let err = MTTError::new(msg);
|
||||
assert_eq!(err.to_string(), msg);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with_string() {
|
||||
let msg = "it went boom".to_string();
|
||||
let err = DBError::new(msg.clone());
|
||||
assert!(
|
||||
err.to_string() == msg,
|
||||
"Got: {} -- Want: {}",
|
||||
err.to_string(),
|
||||
msg
|
||||
);
|
||||
assert!(
|
||||
err.source().is_none(),
|
||||
"Error should initialize with no source."
|
||||
);
|
||||
fn create_with_string() {
|
||||
let msg = "three";
|
||||
let err = MTTError::new(msg.to_string());
|
||||
assert_eq!(err.to_string(), msg);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with_source() {
|
||||
let msg = "but this caused the problem";
|
||||
let mut par = DBError::new("parent error");
|
||||
let src = DBError::new(msg);
|
||||
par.add_source(src);
|
||||
let output = par.source();
|
||||
assert!(output.is_some(), "Should return source.");
|
||||
let source = output.unwrap();
|
||||
assert!(source.to_string() == msg);
|
||||
fn create_from_code() {
|
||||
let code = ErrorCode::Undefined("oops".to_string());
|
||||
let err = MTTError::from_code(code);
|
||||
match err.code {
|
||||
ErrorCode::Undefined(_) => (),
|
||||
_ => assert!(false, "{:?} is not undefined", err.code),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_missing_id_from_code() {
|
||||
let code = ErrorCode::IDNotFound("123".to_string());
|
||||
let err = MTTError::from_code(code);
|
||||
match err.code {
|
||||
ErrorCode::IDNotFound(_) => (),
|
||||
_ => assert!(false, "{:?} is not undefined", err.code),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
89
src/morethantext/fieldtype/mod.rs
Normal file
89
src/morethantext/fieldtype/mod.rs
Normal file
@ -0,0 +1,89 @@
|
||||
mod static_string;
|
||||
|
||||
use crate::morethantext::error::MTTError;
|
||||
use static_string::StaticString;
|
||||
use std::fmt;
|
||||
|
||||
pub enum FieldType {
|
||||
StaticString(StaticString),
|
||||
}
|
||||
|
||||
impl FieldType {
|
||||
fn new(ftype: &str, data: &str) -> Result<Self, MTTError> {
|
||||
let field = match ftype {
|
||||
"StaticString" => StaticString::new(data),
|
||||
_ => Err(MTTError::new(format!(
|
||||
"field type {} does not exist",
|
||||
ftype
|
||||
))),
|
||||
};
|
||||
match field {
|
||||
Ok(fld) => Ok(fld.into()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FieldType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
FieldType::StaticString(data) => write!(f, "{}", data),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StaticString> for FieldType {
|
||||
fn from(data: StaticString) -> Self {
|
||||
FieldType::StaticString(data)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod converstion {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn from_static_string() {
|
||||
let data = "a static string";
|
||||
let field = StaticString::new(data).unwrap();
|
||||
let ftype: FieldType = field.into();
|
||||
assert!(
|
||||
ftype.to_string() == data,
|
||||
"\n\nGot: {}\nWant: {}",
|
||||
ftype.to_string(),
|
||||
data
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_field_type() -> Result<(), String> {
|
||||
let field_type = "dragon";
|
||||
let err_msg = format!("field type {} does not exist", field_type);
|
||||
match FieldType::new(field_type, "marmalade") {
|
||||
Ok(_) => Err("Should have returned an error.".to_string()),
|
||||
Err(err) => {
|
||||
if err.to_string() == err_msg {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!(
|
||||
"Error message is incorrect: Got: '{}' Want: '{}'",
|
||||
err.to_string(),
|
||||
err_msg
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_static_string() {
|
||||
let data = "This is a test.";
|
||||
let field = FieldType::new("StaticString", data).unwrap();
|
||||
assert!(
|
||||
field.to_string() == data,
|
||||
"\n\nGot: {}\nWant: {}\n\n",
|
||||
field.to_string(),
|
||||
data
|
||||
);
|
||||
}
|
||||
}
|
50
src/morethantext/fieldtype/static_string.rs
Normal file
50
src/morethantext/fieldtype/static_string.rs
Normal file
@ -0,0 +1,50 @@
|
||||
use crate::morethantext::error::MTTError;
|
||||
use std::fmt;
|
||||
|
||||
pub struct StaticString {
|
||||
data: String,
|
||||
}
|
||||
|
||||
impl StaticString {
|
||||
pub fn new<S>(name: S) -> Result<Self, MTTError>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
Ok(Self { data: name.into() })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for StaticString {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", &self.data)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod creation {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn new_accepts_str() {
|
||||
let data = "some data";
|
||||
let field = StaticString::new(data).unwrap();
|
||||
assert!(
|
||||
field.to_string() == data,
|
||||
"\n\nGot: {}\nWant: {}",
|
||||
field.to_string(),
|
||||
data
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_accepts_string() {
|
||||
let data = "actual string";
|
||||
let field = StaticString::new(data.to_string()).unwrap();
|
||||
assert!(
|
||||
field.to_string() == data,
|
||||
"\n\nGot: {}\nWant: {}",
|
||||
field.to_string(),
|
||||
data
|
||||
);
|
||||
}
|
||||
}
|
179
src/morethantext/graphql.rs
Normal file
179
src/morethantext/graphql.rs
Normal file
@ -0,0 +1,179 @@
|
||||
use async_graphql::{Context, EmptySubscription, Error, Object, Result, Schema};
|
||||
use async_std::sync::RwLock;
|
||||
use serde_json;
|
||||
|
||||
mod database;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Table {
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl Table {
|
||||
async fn new(name: String) -> Self {
|
||||
Self { name: name }
|
||||
}
|
||||
}
|
||||
|
||||
#[Object]
|
||||
impl Table {
|
||||
async fn name(&self) -> String {
|
||||
self.name.to_string()
|
||||
}
|
||||
|
||||
async fn describe(&self) -> Vec<u64> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
struct Query;
|
||||
|
||||
#[Object]
|
||||
impl Query {
|
||||
async fn table(&self, ctx: &Context<'_>, name: String) -> Result<Option<Table>> {
|
||||
let tbls = ctx
|
||||
.data::<RwLock<Vec<Table>>>()
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.to_vec();
|
||||
match tbls.binary_search_by(|t| t.name.cmp(&name)) {
|
||||
Ok(idx) => Ok(Some(tbls[idx].clone())),
|
||||
Err(_) => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
async fn tables(&self, ctx: &Context<'_>) -> Vec<Table> {
|
||||
ctx.data::<RwLock<Vec<Table>>>()
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.to_vec()
|
||||
}
|
||||
}
|
||||
|
||||
struct Mutation;
|
||||
|
||||
#[Object]
|
||||
impl Mutation {
|
||||
async fn create_table(&self, ctx: &Context<'_>, name: String) -> Result<Option<Table>> {
|
||||
let mut tables = ctx.data::<RwLock<Vec<Table>>>().unwrap().write().await;
|
||||
match tables.binary_search_by(|t| t.name.cmp(&name)) {
|
||||
Ok(_) => Err(Error::new(format!("Table {} already exists.", &name))),
|
||||
Err(_) => {
|
||||
let output = Table::new(name).await;
|
||||
tables.push(output.clone());
|
||||
tables.sort_by_key(|k| k.name.clone());
|
||||
Ok(Some(output))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoreThanText {
|
||||
schema: Schema<Query, Mutation, EmptySubscription>,
|
||||
}
|
||||
|
||||
impl MoreThanText {
|
||||
pub fn new() -> Self {
|
||||
let tables: Vec<Table> = Vec::new();
|
||||
Self {
|
||||
schema: Schema::build(Query, Mutation, EmptySubscription)
|
||||
.data(RwLock::new(tables))
|
||||
.finish(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn execute(&self, qry: &str) -> String {
|
||||
let res = self.schema.execute(qry).await;
|
||||
serde_json::to_string(&res).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod support {
|
||||
use super::*;
|
||||
|
||||
pub fn compare(db: &MoreThanText, output: &str, expected: &str) {
|
||||
assert!(
|
||||
output == expected,
|
||||
"\n\n{}\nGot: {}\nWant: {}\n\n",
|
||||
db.schema.sdl(),
|
||||
output,
|
||||
expected
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod queries {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn list_table() {
|
||||
let db = MoreThanText::new();
|
||||
db.execute(r#"mutation {createTable(name: "wilma"){name}}"#)
|
||||
.await;
|
||||
db.execute(r#"mutation {createTable(name: "betty"){name}}"#)
|
||||
.await;
|
||||
let output = db.execute(r#"{table(name: "wilma"){name}}"#).await;
|
||||
let expected = r#"{"data":{"table":{"name":"wilma"}}}"#;
|
||||
support::compare(&db, &output, &expected);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn list_no_table() {
|
||||
let db = MoreThanText::new();
|
||||
let output = db.execute(r#"{table(name: "slade"){name}}"#).await;
|
||||
let expected = r#"{"data":{"table":null}}"#;
|
||||
support::compare(&db, &output, &expected);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn list_tables() {
|
||||
let db = MoreThanText::new();
|
||||
db.execute(r#"mutation {createTable(name: "fred"){name}}"#)
|
||||
.await;
|
||||
db.execute(r#"mutation {createTable(name: "barney"){name}}"#)
|
||||
.await;
|
||||
let output = db.execute(r#"{tables{name}}"#).await;
|
||||
let expected = r#"{"data":{"tables":[{"name":"barney"},{"name":"fred"}]}}"#;
|
||||
support::compare(&db, &output, &expected);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn empty_table_description() {
|
||||
let db = MoreThanText::new();
|
||||
let output = db
|
||||
.execute(r#"mutation {createTable(name: "pebbles"){name describe}}"#)
|
||||
.await;
|
||||
let expected = r#"{"data":{"createTable":{"name":"pebbles","describe":[]}}}"#;
|
||||
support::compare(&db, &output, &expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod mutations {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_table() {
|
||||
let db = MoreThanText::new();
|
||||
let output = db
|
||||
.execute(r#"mutation {createTable(name: "william"){name}}"#)
|
||||
.await;
|
||||
let expected = r#"{"data":{"createTable":{"name":"william"}}}"#;
|
||||
support::compare(&db, &output, &expected);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn cannot_add_duplicate_table() {
|
||||
let db = MoreThanText::new();
|
||||
let qry = r#"mutation {createTable(name: "gadzoo"){name}}"#;
|
||||
db.execute(&qry).await;
|
||||
let output = db.execute(qry).await;
|
||||
let expected = r#"{"data":null,"errors":[{"message":"Table gadzoo already exists.","locations":[{"line":1,"column":11}],"path":["createTable"]}]}"#;
|
||||
support::compare(&db, &output, &expected);
|
||||
}
|
||||
}
|
402
src/morethantext/mod-2.rs
Normal file
402
src/morethantext/mod-2.rs
Normal file
@ -0,0 +1,402 @@
|
||||
mod database;
|
||||
mod entry;
|
||||
mod error;
|
||||
mod store;
|
||||
|
||||
use async_std::path::PathBuf;
|
||||
use database::Database;
|
||||
use entry::Entry;
|
||||
use error::{DBError, ErrorCode};
|
||||
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||
use std::{slice, str};
|
||||
use store::Store;
|
||||
|
||||
const ENTRY: &str = "EntryPoint";
|
||||
|
||||
trait ID {
|
||||
fn next(&self) -> String;
|
||||
}
|
||||
|
||||
trait FileData<F> {
|
||||
fn to_bytes(&self) -> Vec<u8>;
|
||||
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<F, DBError>;
|
||||
}
|
||||
|
||||
trait SessionData {
|
||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError>;
|
||||
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError>;
|
||||
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum DataType {
|
||||
DBMap(Store),
|
||||
TableMap(Database),
|
||||
}
|
||||
|
||||
impl DataType {
|
||||
fn new(data_type: &str) -> Result<Self, DBError> {
|
||||
match data_type {
|
||||
"store" => Ok(DataType::DBMap(Store::new())),
|
||||
"database" => Ok(DataType::TableMap(Database::new())),
|
||||
_ => Err(DBError::from_code(ErrorCode::DataTypeIncorrect(
|
||||
data_type.to_string(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionData for DataType {
|
||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError> {
|
||||
match self {
|
||||
DataType::DBMap(dbs) => dbs.add(key, value, data),
|
||||
DataType::TableMap(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError> {
|
||||
match self {
|
||||
DataType::DBMap(dbs) => dbs.eq(key, value),
|
||||
DataType::TableMap(_) => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||
match self {
|
||||
DataType::DBMap(dbs) => dbs.list(keys),
|
||||
DataType::TableMap(_) => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FileData<Self> for DataType {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
let mut output = Vec::new();
|
||||
match self {
|
||||
DataType::DBMap(_) => output.append(&mut "DBMap".as_bytes().to_vec()),
|
||||
DataType::TableMap(_) => output.append(&mut "TableMap".as_bytes().to_vec()),
|
||||
}
|
||||
output.push(0);
|
||||
match self {
|
||||
DataType::DBMap(store) => output.append(&mut store.to_bytes()),
|
||||
DataType::TableMap(_) => (),
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<Self, DBError> {
|
||||
let mut header: Vec<u8> = Vec::new();
|
||||
loop {
|
||||
let letter = match data.next() {
|
||||
Some(a) => a.clone(),
|
||||
None => 0,
|
||||
};
|
||||
if letter == 0 {
|
||||
break;
|
||||
} else {
|
||||
header.push(letter);
|
||||
}
|
||||
}
|
||||
let header = match str::from_utf8(&header) {
|
||||
Ok(item) => item,
|
||||
Err(_) => return Err(DBError::from_code(ErrorCode::CorruptFile)),
|
||||
};
|
||||
match header {
|
||||
"DBMap" => match Store::from_bytes(data) {
|
||||
Ok(store) => Ok(DataType::DBMap(store)),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
"TableMap" => Ok(DataType::new("database").unwrap()),
|
||||
_ => Err(DBError::from_code(ErrorCode::CorruptFile)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoreThanText {
|
||||
next_id: &dyn Fn() -> String,
|
||||
session: Vec<String>,
|
||||
}
|
||||
|
||||
impl MoreThanText {
|
||||
pub async fn new<P>(dir: P) -> Result<Self, DBError>
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
let pathbuf = dir.into();
|
||||
let entry = pathbuf.as_path().join(ENTRY);
|
||||
match Entry::get(entry.clone()).await {
|
||||
Ok(_) => (),
|
||||
Err(_) => {
|
||||
let store = DataType::new("store").unwrap();
|
||||
match Entry::new(entry, store).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::from_code(ErrorCode::CacheReadWrite);
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
next_id: fn id() -> String { thread_rng().sample_iter(&Alphanumeric).take(64).collect() },
|
||||
session: [ENTRY.to_string()].to_vec(),
|
||||
})
|
||||
}
|
||||
|
||||
fn set_session(&mut self, sess: Vec<String>) {
|
||||
self.session = sess;
|
||||
}
|
||||
|
||||
async fn new_entry(&self, _name: &str) -> Self {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod datatype {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn bad_data_type() -> Result<(), DBError> {
|
||||
let dt = "bufcuss";
|
||||
match DataType::new(dt) {
|
||||
Ok(_) => Err(DBError::new("should have produced an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::DataTypeIncorrect(value) => {
|
||||
assert_eq!(value, dt, "Incorrect input value");
|
||||
Ok(())
|
||||
}
|
||||
_ => {
|
||||
let mut error = DBError::new("incorrect error");
|
||||
error.add_source(err);
|
||||
Err(error)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_store() -> Result<(), DBError> {
|
||||
match DataType::new("store") {
|
||||
Ok(dt) => match dt {
|
||||
DataType::DBMap(_) => Ok(()),
|
||||
_ => Err(DBError::new("incorrect data type")),
|
||||
},
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_database() -> Result<(), DBError> {
|
||||
match DataType::new("database") {
|
||||
Ok(dt) => match dt {
|
||||
DataType::TableMap(_) => Ok(()),
|
||||
_ => Err(DBError::new("incorrect data type")),
|
||||
},
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod datatype_sesssion {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn update_storage() {
|
||||
let mut dbs = DataType::new("store").unwrap();
|
||||
let name = "new_database";
|
||||
let id = "someid";
|
||||
dbs.add("database", name, id).unwrap();
|
||||
assert_eq!(dbs.eq("database", name).unwrap(), [id].to_vec());
|
||||
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), [name].to_vec());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod datatype_file {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn new_store_bytes() {
|
||||
let dbs = DataType::new("store").unwrap();
|
||||
let mut expected = "DBMap".as_bytes().to_vec();
|
||||
expected.push(0);
|
||||
assert_eq!(dbs.to_bytes(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn store_bytes_with_info() {
|
||||
let name = "title";
|
||||
let id = "king";
|
||||
let mut store = Store::new();
|
||||
let mut dt_store = DataType::new("store").unwrap();
|
||||
let mut expected = dt_store.to_bytes();
|
||||
store.add("database", name, id).unwrap();
|
||||
expected.append(&mut store.to_bytes());
|
||||
dt_store.add("database", name, id).unwrap();
|
||||
assert_eq!(dt_store.to_bytes(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_empty_store() {
|
||||
let dt_store = DataType::new("store").unwrap();
|
||||
let data = dt_store.to_bytes();
|
||||
let mut feed = data.iter();
|
||||
let output = DataType::from_bytes(&mut feed).unwrap();
|
||||
assert_eq!(
|
||||
dt_store.list(["database"].to_vec()).unwrap(),
|
||||
output.list(["database"].to_vec()).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_store_info() {
|
||||
let mut dt_store = DataType::new("store").unwrap();
|
||||
dt_store.add("database", "raven", "beastboy").unwrap();
|
||||
let data = dt_store.to_bytes();
|
||||
let mut feed = data.iter();
|
||||
let output = DataType::from_bytes(&mut feed).unwrap();
|
||||
assert_eq!(
|
||||
dt_store.list(["database"].to_vec()).unwrap(),
|
||||
output.list(["database"].to_vec()).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_database_bytes() {
|
||||
let db = DataType::new("database").unwrap();
|
||||
let mut expected = "TableMap".as_bytes().to_vec();
|
||||
expected.push(0);
|
||||
assert_eq!(db.to_bytes(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_empty_database() {
|
||||
let dt = DataType::new("database").unwrap();
|
||||
let data = dt.to_bytes();
|
||||
let mut feed = data.iter();
|
||||
match DataType::from_bytes(&mut feed).unwrap() {
|
||||
DataType::TableMap(_) => (),
|
||||
_ => assert!(false, "Incorrect data type"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_bad_header() -> Result<(), DBError> {
|
||||
let data = "sdghsdl".as_bytes().to_vec();
|
||||
let mut feed = data.iter();
|
||||
match DataType::from_bytes(&mut feed) {
|
||||
Ok(_) => Err(DBError::new("should have raised an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::CorruptFile => Ok(()),
|
||||
_ => Err(DBError::new("incorrect error")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_bad_store() -> Result<(), DBError> {
|
||||
let mut data = "DBMap".as_bytes().to_vec();
|
||||
data.push(0);
|
||||
data.append(&mut "sdfgs".as_bytes().to_vec());
|
||||
let mut feed = data.iter();
|
||||
match DataType::from_bytes(&mut feed) {
|
||||
Ok(_) => Err(DBError::new("should have raised an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::CorruptFile => Ok(()),
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod db {
|
||||
use super::*;
|
||||
use async_std::fs::write;
|
||||
use std::error::Error;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[async_std::test]
|
||||
async fn create() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mtt = MoreThanText::new(dir.path()).await.unwrap();
|
||||
let epoint = dir.path().join(ENTRY);
|
||||
assert!(
|
||||
epoint.is_file(),
|
||||
"{} did not get created.",
|
||||
epoint.display()
|
||||
);
|
||||
let entry = Entry::get(epoint.to_str().unwrap()).await.unwrap();
|
||||
assert_eq!(
|
||||
entry.data().list(["database"].to_vec()).unwrap(),
|
||||
Vec::<String>::new()
|
||||
);
|
||||
let sess = [ENTRY];
|
||||
assert_eq!(mtt.session, sess);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn entry_failure() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let path = dir.path().join("bad").join("path");
|
||||
match MoreThanText::new(path).await {
|
||||
Ok(_) => Err(DBError::new("Should have produced an error.")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::CacheReadWrite => {
|
||||
assert!(err.source().is_some(), "Error should have a source.");
|
||||
assert!(
|
||||
err.source().unwrap().to_string().contains("write failure"),
|
||||
"Source Error Message: {}",
|
||||
err.source().unwrap().to_string()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn existing_entry_point() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let data = DataType::new("store").unwrap();
|
||||
Entry::new(dir.path().join(ENTRY), data.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
match MoreThanText::new(dir.path()).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn corrupt_enty_point() -> Result<(), DBError> {
|
||||
let dir = tempdir().unwrap();
|
||||
let file = dir.path().join(ENTRY);
|
||||
write(file, b"Really bad data.").await.unwrap();
|
||||
match MoreThanText::new(dir.path()).await {
|
||||
Ok(_) => Err(DBError::new("should have errored")),
|
||||
Err(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn set_session() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mut mtt = MoreThanText::new(dir.path()).await.unwrap();
|
||||
let sess = ["different".to_string()];
|
||||
mtt.set_session(sess.to_vec());
|
||||
assert_eq!(mtt.session, sess);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_a_database() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mtt = MoreThanText::new(dir.path()).await.unwrap();
|
||||
mtt.new_entry("wilbur").await;
|
||||
}
|
||||
}
|
640
src/morethantext/mod-3.rs
Normal file
640
src/morethantext/mod-3.rs
Normal file
@ -0,0 +1,640 @@
|
||||
use async_std::{
|
||||
channel::{unbounded, Receiver, Sender},
|
||||
path::PathBuf,
|
||||
task::spawn,
|
||||
};
|
||||
use std::{collections::HashMap, error::Error, fmt};
|
||||
|
||||
const ENTRY: &str = "EntryPoint";
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ErrorCode {
|
||||
// General
|
||||
Undefined(String),
|
||||
// Cache
|
||||
EntryNotFound(String),
|
||||
InvalidCommitData,
|
||||
// Store
|
||||
DatabaseAlreadyExists(String),
|
||||
}
|
||||
|
||||
impl fmt::Display for ErrorCode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ErrorCode::Undefined(msg) => write!(f, "{}", msg),
|
||||
ErrorCode::EntryNotFound(id) => write!(f, "entry '{}' was not found", id),
|
||||
ErrorCode::InvalidCommitData => write!(f, "commit data was not a database store"),
|
||||
ErrorCode::DatabaseAlreadyExists(name) => {
|
||||
write!(f, "database '{}' already exists", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod errorcodes {
|
||||
use super::*;
|
||||
|
||||
const ITEMS: [&str; 2] = ["one", "two"];
|
||||
|
||||
#[test]
|
||||
fn undefined_display() {
|
||||
for item in ITEMS {
|
||||
let err = ErrorCode::Undefined(item.to_string());
|
||||
assert_eq!(err.to_string(), item);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_entry() {
|
||||
for item in ITEMS {
|
||||
let err = ErrorCode::EntryNotFound(item.to_string());
|
||||
assert_eq!(err.to_string(), format!("entry '{}' was not found", item));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_commit_data() {
|
||||
let err = ErrorCode::InvalidCommitData;
|
||||
assert_eq!(err.to_string(), "commit data was not a database store");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn database_already_exists() {
|
||||
for item in ITEMS {
|
||||
let err = ErrorCode::DatabaseAlreadyExists(item.to_string());
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
format!("database '{}' already exists", item)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MTTError {
|
||||
code: ErrorCode,
|
||||
}
|
||||
|
||||
impl MTTError {
|
||||
fn new<S>(msg: S) -> Self
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let text = msg.into();
|
||||
Self {
|
||||
code: ErrorCode::Undefined(text),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_code(code: ErrorCode) -> Self {
|
||||
Self { code: code }
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for MTTError {}
|
||||
|
||||
impl fmt::Display for MTTError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.code)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod errors {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn create_with_str() {
|
||||
let msgs = ["one", "two"];
|
||||
for msg in msgs {
|
||||
let err = MTTError::new(msg);
|
||||
assert_eq!(err.to_string(), msg);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_with_string() {
|
||||
let msg = "three";
|
||||
let err = MTTError::new(msg.to_string());
|
||||
assert_eq!(err.to_string(), msg);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_from_code() {
|
||||
let code = ErrorCode::Undefined("oops".to_string());
|
||||
let err = MTTError::from_code(code);
|
||||
match err.code {
|
||||
ErrorCode::Undefined(_) => (),
|
||||
_ => assert!(false, "{:?} is not undefined", err.code),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_missing_entry() {
|
||||
let code = ErrorCode::EntryNotFound("an_id".to_string());
|
||||
let err = MTTError::from_code(code);
|
||||
match err.code {
|
||||
ErrorCode::EntryNotFound(_) => (),
|
||||
_ => assert!(false, "{:?} is not undefined", err.code),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct Storage<D> {
|
||||
id: Option<String>,
|
||||
data: Option<D>,
|
||||
// delete: bool,
|
||||
}
|
||||
|
||||
impl Storage<D> {
|
||||
fn from_id<S>(id: S) -> Self
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
Self {
|
||||
id: Some(id.into()),
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_datatype(dt: DataType) -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
data: Some(dt),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod storage {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn from_id_with_str() {
|
||||
let ids = ["first", "second"];
|
||||
for id in ids {
|
||||
let output = Storage::from_id(id);
|
||||
assert_eq!(output.id, Some(id.to_string()));
|
||||
assert!(
|
||||
output.data.is_none(),
|
||||
"The storage data should have been Non."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_id_with_string() {
|
||||
let id = "my_id".to_string();
|
||||
let output = Storage::from_id(id.clone());
|
||||
assert_eq!(output.id, Some(id));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_store() {
|
||||
let output = Storage::from_datatype(DataType::new("store"));
|
||||
assert!(output.id.is_none(), "id should be None.");
|
||||
assert!(output.data.is_some(), "There should be data");
|
||||
let result = output.data.unwrap();
|
||||
match result {
|
||||
DataType::DBMap(_) => (),
|
||||
_ => assert!(false, "{:?} should have been DataType::DBMap.", result),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_database() {
|
||||
let output = Storage::from_datatype(DataType::new("database"));
|
||||
let result = output.data.unwrap();
|
||||
match result {
|
||||
DataType::TableMap(_) => (),
|
||||
_ => assert!(false, "{:?} should have been DataType::TableMap.", result),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct Store {
|
||||
data: HashMap<String, Storage<Database>>,
|
||||
}
|
||||
|
||||
impl Store {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
data: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_new<S>(&mut self, name: S) -> Result<(), MTTError>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let dbname = name.into();
|
||||
match self.get(&dbname) {
|
||||
Some(_) => Err(MTTError::from_code(ErrorCode::DatabaseAlreadyExists(
|
||||
dbname,
|
||||
))),
|
||||
None => {
|
||||
self.data
|
||||
.insert(dbname, Storage::from_datatype(DataType::new("database")));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, name: &str) -> Option<&Storage<Database>> {
|
||||
self.data.get(name)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod stores {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn get_no_database() -> Result<(), MTTError> {
|
||||
let store = Store::new();
|
||||
match store.get("missing_name") {
|
||||
Some(_) => Err(MTTError::new("should have returned None")),
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_database_str() {
|
||||
let mut store = Store::new();
|
||||
let names = ["first", "second"];
|
||||
for name in names {
|
||||
store.add_new(name).unwrap();
|
||||
let output = store.get(name).unwrap();
|
||||
assert!(output.data.is_some(), "There should be a data type.");
|
||||
match output.data.clone().unwrap() {
|
||||
DataType::TableMap(_) => (),
|
||||
_ => assert!(
|
||||
false,
|
||||
"{:?} should have been DataType::TableMap.",
|
||||
output.data
|
||||
),
|
||||
}
|
||||
assert!(output.id.is_none(), "Should not have an id.");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_database_string() {
|
||||
let mut store = Store::new();
|
||||
let name = "third".to_string();
|
||||
store.add_new(name.clone()).unwrap();
|
||||
let output = store.get(&name).unwrap();
|
||||
match output.data.clone().unwrap() {
|
||||
DataType::TableMap(_) => (),
|
||||
_ => assert!(
|
||||
false,
|
||||
"{:?} should have been DataType::TableMap.",
|
||||
output.data
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_database_names() -> Result<(), MTTError> {
|
||||
let mut store = Store::new();
|
||||
let name = "duplicate";
|
||||
store.add_new(name).unwrap();
|
||||
match store.add_new(name) {
|
||||
Ok(_) => Err(MTTError::new("should have been an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::DatabaseAlreadyExists(dbname) => {
|
||||
assert_eq!(dbname, name);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(MTTError::new(format!(
|
||||
"{:?} should have been DatabaseAlreadyExists.",
|
||||
err.code
|
||||
))),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct Database;
|
||||
|
||||
#[cfg(test)]
|
||||
mod databases {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn create() {
|
||||
Database::new();
|
||||
}
|
||||
}
|
||||
|
||||
impl Database {
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum DataType {
|
||||
DBMap(Store),
|
||||
TableMap(Database),
|
||||
}
|
||||
|
||||
impl DataType {
|
||||
fn new(dtype: &str) -> DataType {
|
||||
match dtype {
|
||||
"store" => Self::DBMap(Store::new()),
|
||||
"database" => Self::TableMap(Database::new()),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod datatypes {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn create_store() {
|
||||
let dtype = DataType::new("store");
|
||||
match dtype {
|
||||
DataType::DBMap(_) => (),
|
||||
_ => assert!(false, "{:?} is not incorrect data type", dtype),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_database() {
|
||||
let dtype = DataType::new("database");
|
||||
match dtype {
|
||||
DataType::TableMap(_) => (),
|
||||
_ => assert!(false, "{:?} is not incorrect data type", dtype),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum FromCache {
|
||||
Ok,
|
||||
Data(HashMap<String, DataType>),
|
||||
Error(MTTError),
|
||||
}
|
||||
|
||||
struct CacheQuery {
|
||||
ids: Vec<String>,
|
||||
reply: Sender<FromCache>,
|
||||
}
|
||||
|
||||
struct CacheCommit {
|
||||
reply: Sender<FromCache>,
|
||||
data: DataType,
|
||||
}
|
||||
|
||||
impl CacheCommit {
|
||||
fn new(data: DataType, channel: Sender<FromCache>) -> Result<Self, MTTError> {
|
||||
match data {
|
||||
DataType::DBMap(_) => (),
|
||||
_ => return Err(MTTError::from_code(ErrorCode::InvalidCommitData)),
|
||||
}
|
||||
Ok(Self {
|
||||
data: data,
|
||||
reply: channel,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
mod commits {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn create() -> Result<(), MTTError> {
|
||||
let (s, _) = unbounded();
|
||||
match CacheCommit::new(DataType::new("store"), s) {
|
||||
Ok(output) => match output.data {
|
||||
DataType::DBMap(_) => Ok(()),
|
||||
_ => Err(MTTError::new(format!(
|
||||
"{:?} should have been DBMap",
|
||||
output.data
|
||||
))),
|
||||
},
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_data_type() -> Result<(), MTTError> {
|
||||
let (s, _) = unbounded();
|
||||
match CacheCommit::new(DataType::new("database"), s) {
|
||||
Ok(_) => Err(MTTError::new("CacheCommit::new did not return error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::InvalidCommitData => Ok(()),
|
||||
_ => Err(MTTError::new(format!(
|
||||
"{:?} is not the correct error",
|
||||
err.code
|
||||
))),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ToCache {
|
||||
Query(CacheQuery),
|
||||
Commit(CacheCommit),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoreThanText {
|
||||
session: Vec<String>,
|
||||
cache: Sender<Vec<String>>,
|
||||
}
|
||||
|
||||
impl MoreThanText {
|
||||
async fn new(cache: Sender<Vec<String>>) -> Result<Self, MTTError> {
|
||||
Ok(Self {
|
||||
session: [ENTRY.to_string()].to_vec(),
|
||||
cache: cache,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod mtt {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn create() {
|
||||
let (s, _) = unbounded();
|
||||
let mtt = MoreThanText::new(s).await.unwrap();
|
||||
assert_eq!(mtt.session, [ENTRY]);
|
||||
}
|
||||
}
|
||||
|
||||
struct Cache;
|
||||
|
||||
impl Cache {
|
||||
async fn new<P>(_dir: P) -> Result<Self, MTTError>
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
Ok(Self {})
|
||||
}
|
||||
|
||||
async fn query(&self, qry: &Vec<String>) -> Result<HashMap<String, DataType>, MTTError> {
|
||||
let mut output = HashMap::new();
|
||||
for id in qry {
|
||||
if id == ENTRY {
|
||||
output.insert(ENTRY.to_string(), DataType::new("store"));
|
||||
} else {
|
||||
return Err(MTTError::from_code(ErrorCode::EntryNotFound(
|
||||
id.to_string(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
async fn commit(&self) -> Result<(), MTTError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn start(&self, listener: Receiver<ToCache>) {
|
||||
loop {
|
||||
match listener.recv().await.unwrap() {
|
||||
ToCache::Query(qry) => match self.query(&qry.ids).await {
|
||||
Ok(data) => qry.reply.send(FromCache::Data(data)).await.unwrap(),
|
||||
Err(error) => qry.reply.send(FromCache::Error(error)).await.unwrap(),
|
||||
},
|
||||
ToCache::Commit(commit) => match self.commit().await {
|
||||
Ok(_) => commit.reply.send(FromCache::Ok).await.unwrap(),
|
||||
Err(error) => commit.reply.send(FromCache::Error(error)).await.unwrap(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod caches {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
async fn start_cache<P>(dir: P) -> Sender<ToCache>
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
let (s, r) = unbounded();
|
||||
let datadir = dir.into();
|
||||
spawn(async move {
|
||||
let cache = Cache::new(datadir).await.unwrap();
|
||||
cache.start(r).await;
|
||||
});
|
||||
s
|
||||
}
|
||||
|
||||
async fn send_request(data: Vec<&str>, channel: Sender<ToCache>) -> FromCache {
|
||||
let mut ids = Vec::new();
|
||||
for id in data.iter() {
|
||||
ids.push(id.to_string());
|
||||
}
|
||||
let (s, r) = unbounded();
|
||||
let msg = ToCache::Query(CacheQuery { ids: ids, reply: s });
|
||||
channel.send(msg).await.unwrap();
|
||||
r.recv().await.unwrap()
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn create() {
|
||||
let dir = tempdir().unwrap();
|
||||
let s_cache = start_cache(dir.path()).await;
|
||||
let result = send_request(vec![ENTRY], s_cache).await;
|
||||
match result {
|
||||
FromCache::Data(data) => match data.get(ENTRY) {
|
||||
Some(output) => match output {
|
||||
DataType::DBMap(_) => (),
|
||||
_ => assert!(false, "{:?} is not a database store.", output),
|
||||
},
|
||||
None => assert!(false, "Should contain entry point."),
|
||||
},
|
||||
_ => assert!(false, "{:?} should have been a store.", result),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn bad_entry() {
|
||||
let dir = tempdir().unwrap();
|
||||
let s_cache = start_cache(dir.path()).await;
|
||||
let result = send_request(vec!["bad_id"], s_cache).await;
|
||||
match result {
|
||||
FromCache::Error(_) => (),
|
||||
_ => assert!(false, "{:?} should have been an error.", result),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn empty_commit() {
|
||||
let dir = tempdir().unwrap();
|
||||
let s_cache = start_cache(dir.path()).await;
|
||||
let (s, r) = unbounded();
|
||||
let msg = ToCache::Commit(CacheCommit::new(DataType::new("store"), s).unwrap());
|
||||
s_cache.send(msg).await.unwrap();
|
||||
let result = r.recv().await.unwrap();
|
||||
match result {
|
||||
FromCache::Ok => (),
|
||||
_ => assert!(false, "{:?} should have been an Ok.", result),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_store() {
|
||||
let dir = tempdir().unwrap();
|
||||
let cache = Cache::new(dir.path()).await.unwrap();
|
||||
let output = cache.query(&[ENTRY.to_string()].to_vec()).await.unwrap();
|
||||
let result = output.get(ENTRY).unwrap();
|
||||
match result {
|
||||
DataType::DBMap(_) => (),
|
||||
_ => assert!(false, "{:?} should have been an Ok.", result),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn bad_get() {
|
||||
let dir = tempdir().unwrap();
|
||||
let cache = Cache::new(dir.path()).await.unwrap();
|
||||
let bad_id = "really_bad_id";
|
||||
match cache.query(&[bad_id.to_string()].to_vec()).await {
|
||||
Ok(_) => assert!(false, "Should have produced an error."),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::EntryNotFound(id) => assert_eq!(id, bad_id),
|
||||
_ => assert!(false, "{:?} should have been EntryNotFound.", err.code),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn start_db<P>(_dir: P) -> Result<MoreThanText, MTTError>
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
let (s, r) = unbounded();
|
||||
spawn(async move {
|
||||
loop {
|
||||
r.recv().await.unwrap();
|
||||
}
|
||||
});
|
||||
Ok(MoreThanText::new(s).await.unwrap())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod db_start_up {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[async_std::test]
|
||||
async fn initial_session() {
|
||||
let dir = tempdir().unwrap();
|
||||
let mtt = start_db(dir.path()).await.unwrap();
|
||||
assert_eq!(mtt.session, [ENTRY]);
|
||||
}
|
||||
}
|
823
src/morethantext/mod-old.rs
Normal file
823
src/morethantext/mod-old.rs
Normal file
@ -0,0 +1,823 @@
|
||||
mod cache;
|
||||
mod database;
|
||||
pub mod error;
|
||||
mod store;
|
||||
|
||||
use async_std::{
|
||||
fs::{create_dir, read, remove_file, write},
|
||||
path::Path,
|
||||
sync::{Arc, Mutex},
|
||||
task::{sleep, spawn},
|
||||
};
|
||||
use database::Database;
|
||||
use error::{DBError, ErrorCode};
|
||||
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fmt, slice, str,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use store::Store;
|
||||
|
||||
const DATA: &str = "data";
|
||||
const ENTRY: &str = "databases";
|
||||
|
||||
trait FileData<F> {
|
||||
fn to_bytes(&self) -> Vec<u8>;
|
||||
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<F, DBError>;
|
||||
}
|
||||
|
||||
trait SessionData {
|
||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError>;
|
||||
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError>;
|
||||
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum CacheType {
|
||||
Raw(String),
|
||||
DBMap(Store),
|
||||
TableMap,
|
||||
}
|
||||
|
||||
impl CacheType {
|
||||
pub fn entry_type(&self) -> String {
|
||||
match self {
|
||||
CacheType::Raw(_) => "Raw".to_string(),
|
||||
CacheType::DBMap(_) => "DBMap".to_string(),
|
||||
CacheType::TableMap => "TableMap".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
let mut output = self.entry_type().into_bytes();
|
||||
output.push(0);
|
||||
match self {
|
||||
CacheType::Raw(s) => output.append(&mut s.as_bytes().to_vec()),
|
||||
CacheType::DBMap(_) => (),
|
||||
CacheType::TableMap => (),
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
pub fn from_bytes(data: Vec<u8>) -> Result<CacheType, DBError> {
|
||||
let mut data_iter = data.iter();
|
||||
let mut letter: u8;
|
||||
match data_iter.next() {
|
||||
Some(item) => letter = *item,
|
||||
None => return Err(DBError::new("empty file")),
|
||||
}
|
||||
let mut header: Vec<u8> = Vec::new();
|
||||
while letter != 0 {
|
||||
header.push(letter.clone());
|
||||
match data_iter.next() {
|
||||
Some(item) => letter = *item,
|
||||
None => return Err(DBError::new("incomplete file")),
|
||||
}
|
||||
}
|
||||
let header = str::from_utf8(&header).unwrap().to_string();
|
||||
match header.as_str() {
|
||||
"Raw" => {
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
for letter in data_iter {
|
||||
output.push(letter.clone());
|
||||
}
|
||||
Ok(CacheType::Raw(str::from_utf8(&output).unwrap().to_string()))
|
||||
}
|
||||
"DBMap" => Ok(CacheType::DBMap(Store::new())),
|
||||
"TableMap" => Ok(CacheType::TableMap),
|
||||
_ => Err(DBError::new("data corruption")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CacheType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
CacheType::Raw(s) => write!(f, "{}", s),
|
||||
CacheType::DBMap(_) => todo!(),
|
||||
CacheType::TableMap => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct CacheEntry {
|
||||
data: CacheType,
|
||||
last_used: Instant,
|
||||
}
|
||||
|
||||
impl CacheEntry {
|
||||
fn new(data: CacheType) -> Self {
|
||||
Self {
|
||||
data: data,
|
||||
last_used: Instant::now(),
|
||||
}
|
||||
}
|
||||
|
||||
fn elapsed(&self) -> Duration {
|
||||
self.last_used.elapsed()
|
||||
}
|
||||
|
||||
fn touch(&mut self) {
|
||||
self.last_used = Instant::now();
|
||||
}
|
||||
|
||||
fn update(&mut self, data: CacheType) {
|
||||
self.data = data;
|
||||
self.touch();
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CacheEntry {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.data)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoreThanText {
|
||||
cache: Arc<Mutex<HashMap<String, CacheEntry>>>,
|
||||
dir: String,
|
||||
session: Vec<String>,
|
||||
}
|
||||
|
||||
impl MoreThanText {
|
||||
pub async fn new(dir: &str) -> Result<Self, DBError> {
|
||||
let data_dir = Path::new(dir).join(DATA);
|
||||
if !data_dir.is_dir().await {
|
||||
match create_dir(&data_dir).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("failed to create data directory");
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut output = Self {
|
||||
cache: Arc::new(Mutex::new(HashMap::new())),
|
||||
dir: data_dir.to_str().unwrap().to_string(),
|
||||
session: Vec::new(),
|
||||
};
|
||||
let entry_file = Path::new(dir).join(ENTRY);
|
||||
let id: String;
|
||||
if entry_file.is_file().await {
|
||||
let holder = read(entry_file).await.unwrap();
|
||||
id = str::from_utf8(&holder).unwrap().to_string();
|
||||
} else {
|
||||
id = output
|
||||
.add_entry(CacheType::DBMap(Store::new()))
|
||||
.await
|
||||
.unwrap();
|
||||
write(entry_file, id.as_bytes()).await.unwrap();
|
||||
}
|
||||
output.session.push(id);
|
||||
let looper = output.cache.clone();
|
||||
spawn(async move {
|
||||
let hold_time = Duration::from_secs(300);
|
||||
loop {
|
||||
sleep(Duration::from_secs(1)).await;
|
||||
let mut ids: Vec<String> = Vec::new();
|
||||
let mut cache = looper.lock().await;
|
||||
for (id, entry) in cache.iter() {
|
||||
if entry.elapsed() > hold_time {
|
||||
ids.push(id.to_string());
|
||||
}
|
||||
}
|
||||
for id in ids.iter() {
|
||||
cache.remove(id);
|
||||
}
|
||||
}
|
||||
});
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn filename(&self, id: &str) -> String {
|
||||
let filename = Path::new(&self.dir).join(&id);
|
||||
filename.into_os_string().into_string().unwrap()
|
||||
}
|
||||
|
||||
fn new_id(&self) -> String {
|
||||
thread_rng().sample_iter(&Alphanumeric).take(64).collect()
|
||||
}
|
||||
|
||||
async fn add(&self, feature: &str, key: &str, value: &str) -> Self {
|
||||
let mut ids: Vec<String> = Vec::new();
|
||||
for id in self.session.clone().into_iter() {
|
||||
let holder = self.get_entry(&id).await.unwrap();
|
||||
//holder.add(feature, key, value);
|
||||
}
|
||||
let mut output = self.clone();
|
||||
output.session.clear();
|
||||
output.session.push(value.to_string());
|
||||
output
|
||||
}
|
||||
|
||||
async fn list(&self, feature: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn add_entry(&self, entry: CacheType) -> Result<String, DBError> {
|
||||
let mut id: String = "".to_string();
|
||||
let mut dup = true;
|
||||
while dup {
|
||||
id = thread_rng().sample_iter(&Alphanumeric).take(32).collect();
|
||||
dup = Path::new(&self.dir).join(&id).as_path().exists().await;
|
||||
}
|
||||
match write(Path::new(&self.filename(&id)), entry.to_bytes()).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("data write");
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
};
|
||||
let mut cache = self.cache.lock().await;
|
||||
let data = CacheEntry::new(entry);
|
||||
cache.insert(id.clone(), data);
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
async fn get_entry(&self, id: &str) -> Result<CacheEntry, DBError> {
|
||||
let mut cache = self.cache.lock().await;
|
||||
match cache.get_mut(id) {
|
||||
Some(entry) => {
|
||||
entry.touch();
|
||||
Ok(entry.clone())
|
||||
}
|
||||
None => match read(Path::new(&self.filename(id))).await {
|
||||
Ok(content) => {
|
||||
let data = CacheEntry::new(CacheType::from_bytes(content).unwrap());
|
||||
cache.insert(id.to_string(), data.clone());
|
||||
Ok(data)
|
||||
}
|
||||
Err(_) => Err(DBError::new("cache entry not found")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_entry(&self, id: &str, entry: CacheType) -> Result<(), DBError> {
|
||||
match self.get_entry(id).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
match write(Path::new(&self.filename(id)), entry.to_bytes()).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("data write");
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
let mut cache = self.cache.lock().await;
|
||||
let data = CacheEntry::new(entry);
|
||||
cache.insert(id.to_string(), data);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_entry(&self, id: &str) -> Result<(), DBError> {
|
||||
let mut cache = self.cache.lock().await;
|
||||
cache.remove(id);
|
||||
match remove_file(Path::new(&self.filename(id))).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("data delete");
|
||||
error.add_source(err);
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod setup {
|
||||
use super::*;
|
||||
use async_std::fs::remove_dir_all;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
|
||||
pub struct MTT {
|
||||
pub db: MoreThanText,
|
||||
pub dir: TempDir,
|
||||
}
|
||||
|
||||
impl MTT {
|
||||
pub async fn new() -> Self {
|
||||
let dir = tempdir().unwrap();
|
||||
let db = MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
Self { db: db, dir: dir }
|
||||
}
|
||||
|
||||
pub async fn create_io_error(&self) {
|
||||
remove_dir_all(self.dir.path().join(DATA)).await.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod init {
|
||||
use super::*;
|
||||
use std::error::Error;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[async_std::test]
|
||||
async fn create_data_dir() {
|
||||
let dir = tempdir().unwrap();
|
||||
MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let data_dir = dir.path().join(DATA);
|
||||
assert!(data_dir.is_dir(), "Did not create the data directory.");
|
||||
dir.close().unwrap();
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn existing_data_dir() {
|
||||
let dir = tempdir().unwrap();
|
||||
let data_dir = dir.path().join(DATA);
|
||||
create_dir(data_dir).await.unwrap();
|
||||
MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
dir.close().unwrap();
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn bad_data_dir() {
|
||||
let msg = "could not create directory";
|
||||
match MoreThanText::new("kljsdgfhslkfrh").await {
|
||||
Ok(_) => assert!(false, "This test should fail to create a data directory"),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "failed to create data directory");
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
let err_msg = err.source().unwrap().to_string();
|
||||
assert!(err_msg.contains(msg), "'{}' not in '{}'", msg, err_msg);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn creates_entry_point() {
|
||||
let dir = tempdir().unwrap();
|
||||
let db = MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let entry = dir.path().join(ENTRY);
|
||||
assert!(entry.is_file(), "Did not create entry point file.");
|
||||
let data = read(entry).await.unwrap();
|
||||
let id = str::from_utf8(&data).unwrap();
|
||||
let cache = db.get_entry(&id).await.unwrap();
|
||||
assert_eq!(cache.data.entry_type(), "DBMap");
|
||||
assert_eq!(db.session, [id]);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn use_existing_entry_point() {
|
||||
let dir = tempdir().unwrap();
|
||||
let db1 = MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let db2 = MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db1.session, db2.session, "Did not read existing entry.");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod data {
|
||||
use super::*;
|
||||
use setup::MTT;
|
||||
|
||||
#[async_std::test]
|
||||
async fn ids_are_random() {
|
||||
let mtt = MTT::new().await;
|
||||
let id1 = mtt.db.new_id();
|
||||
let id2 = mtt.db.new_id();
|
||||
assert_ne!(id1, id2, "Ids should be random");
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_database() {
|
||||
let mtt = MTT::new().await;
|
||||
let name = "fred";
|
||||
let id = "*gsdfg";
|
||||
let output = mtt.db.add("database", name, id).await;
|
||||
assert_eq!(output.session, [id], "should update session info.");
|
||||
/*
|
||||
assert_eq!(
|
||||
mtt.db.list(["database"].to_vec()).await.unwrap(),
|
||||
[name],
|
||||
"Should list the databases."
|
||||
);
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod cache_test {
|
||||
use super::*;
|
||||
use async_std::fs::read;
|
||||
use setup::MTT;
|
||||
use std::error::Error;
|
||||
|
||||
#[async_std::test]
|
||||
async fn entry_ids_are_random() {
|
||||
let mtt = MTT::new().await;
|
||||
let data1 = CacheType::Raw("one".to_string());
|
||||
let data2 = CacheType::Raw("two".to_string());
|
||||
let id1 = mtt.db.add_entry(data1).await.unwrap();
|
||||
let id2 = mtt.db.add_entry(data2).await.unwrap();
|
||||
assert_ne!(id1, id2, "Ids should be unique.")
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn store_cache() {
|
||||
let mtt = MTT::new().await;
|
||||
let data = "something";
|
||||
let expected = CacheType::Raw(data.to_string());
|
||||
let id = mtt.db.add_entry(expected.clone()).await.unwrap();
|
||||
let output = mtt.db.get_entry(&id).await.unwrap();
|
||||
assert_eq!(output.to_string(), data);
|
||||
let dfile = mtt.dir.path().join(DATA).join(&id);
|
||||
assert!(dfile.is_file(), "Cache file should exist.");
|
||||
let content = read(dfile).await.unwrap();
|
||||
assert_eq!(content, expected.to_bytes());
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_entry_uodates_time() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = "something";
|
||||
let holder = CacheEntry {
|
||||
data: CacheType::Raw("old".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(200),
|
||||
};
|
||||
let mut cache = mtt.db.cache.lock().await;
|
||||
cache.insert(id.to_string(), holder);
|
||||
drop(cache);
|
||||
mtt.db.get_entry(&id).await.unwrap();
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let entry = cache.get(id).unwrap();
|
||||
let held = entry.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn retrieve_from_disk() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = "someid";
|
||||
let data = CacheType::Raw("stored".to_string());
|
||||
write(mtt.dir.path().join(DATA).join(id), data.to_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
let output = mtt.db.get_entry(id).await.unwrap();
|
||||
assert_eq!(output.to_string(), data.to_string());
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let stored = cache.get(id);
|
||||
assert!(stored.is_some(), "Did not store entry in the cache.");
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn store_bad_file() {
|
||||
let mtt = MTT::new().await;
|
||||
let msg = "could not write to file";
|
||||
mtt.create_io_error().await;
|
||||
match mtt.db.add_entry(CacheType::Raw("fail".to_string())).await {
|
||||
Ok(_) => assert!(false, "This test should fail."),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "data write");
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
let err_msg = err.source().unwrap().to_string();
|
||||
assert!(err_msg.contains(msg), "'{}' not in '{}'", msg, err_msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn retrieve_bad_id() {
|
||||
let mtt = MTT::new().await;
|
||||
match mtt.db.get_entry(&"Not Valid").await {
|
||||
Ok(_) => assert!(false, "Should have raised an error."),
|
||||
Err(err) => assert_eq!(err.to_string(), "cache entry not found"),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_cache_entry() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = "updateable";
|
||||
let holder = CacheEntry {
|
||||
data: CacheType::Raw("elder".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(500),
|
||||
};
|
||||
let mut cache = mtt.db.cache.lock().await;
|
||||
cache.insert(id.to_string(), holder);
|
||||
drop(cache);
|
||||
let expected = "different";
|
||||
let expect = CacheType::Raw(expected.to_string());
|
||||
mtt.db.update_entry(id, expect.clone()).await.unwrap();
|
||||
let output = mtt.db.get_entry(id).await.unwrap();
|
||||
assert_eq!(output.to_string(), expected);
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let entry = cache.get(id).unwrap();
|
||||
let held = entry.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
drop(cache);
|
||||
let content = read(mtt.dir.path().join(DATA).join(id)).await.unwrap();
|
||||
assert_eq!(content, expect.to_bytes());
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_bad_id() {
|
||||
let mtt = MTT::new().await;
|
||||
match mtt
|
||||
.db
|
||||
.update_entry("wilma", CacheType::Raw("wrong".to_string()))
|
||||
.await
|
||||
{
|
||||
Ok(_) => assert!(false, "Bad id should raise an error."),
|
||||
Err(err) => assert_eq!(err.to_string(), "cache entry not found"),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_bad_file() {
|
||||
let mtt = MTT::new().await;
|
||||
let msg = "could not write to file";
|
||||
let id = mtt
|
||||
.db
|
||||
.add_entry(CacheType::Raw("fleeting".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
mtt.create_io_error().await;
|
||||
match mtt
|
||||
.db
|
||||
.update_entry(&id, CacheType::Raw("failure".to_string()))
|
||||
.await
|
||||
{
|
||||
Ok(_) => assert!(false, "This should produce a write failure."),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "data write");
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
let err_msg = err.source().unwrap().to_string();
|
||||
assert!(err_msg.contains(msg), "'{}' not in '{}'", msg, err_msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn remove_entry() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = mtt
|
||||
.db
|
||||
.add_entry(CacheType::Raw("delete".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
mtt.db.delete_entry(&id).await.unwrap();
|
||||
match mtt.db.get_entry(&id).await {
|
||||
Ok(_) => assert!(false, "Entry should be removed from cache."),
|
||||
Err(_) => (),
|
||||
};
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn remove_missing_entry() {
|
||||
let mtt = MTT::new().await;
|
||||
let msg = "could not remove file";
|
||||
match mtt.db.delete_entry("missing").await {
|
||||
Ok(_) => assert!(false, "This should produce a write failure."),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "data delete");
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
let err_msg = err.source().unwrap().to_string();
|
||||
assert!(err_msg.contains(msg), "'{}' not in '{}'", msg, err_msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn remove_older() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = mtt
|
||||
.db
|
||||
.add_entry(CacheType::Raw("removed".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
let mut cache = mtt.db.cache.lock().await;
|
||||
let entry = cache.get_mut(&id).unwrap();
|
||||
entry.last_used = Instant::now() - Duration::from_secs(1000);
|
||||
drop(cache);
|
||||
sleep(Duration::from_secs(2)).await;
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let output = cache.get(&id);
|
||||
assert!(output.is_none(), "The entry shoould not be in memory.");
|
||||
drop(cache);
|
||||
let filename = mtt.db.filename(&id);
|
||||
let fpath = Path::new(&filename);
|
||||
assert!(
|
||||
fpath.is_file().await,
|
||||
"The stored version should still exist."
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn keep_newer() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = mtt
|
||||
.db
|
||||
.add_entry(CacheType::Raw("keep".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
sleep(Duration::from_secs(2)).await;
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let output = cache.get(&id);
|
||||
assert!(output.is_some(), "The entry shoould be in memory.");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod cache_entry {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn init() {
|
||||
let text = "new entry";
|
||||
let holder = CacheEntry::new(CacheType::Raw(text.to_string()));
|
||||
assert_eq!(holder.to_string(), text);
|
||||
let held = holder.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn older() {
|
||||
let secs = 800;
|
||||
let holder = CacheEntry {
|
||||
data: CacheType::Raw("older".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(secs),
|
||||
};
|
||||
let held = holder.elapsed() - Duration::from_secs(secs);
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"{:?} should be close to {}s",
|
||||
holder.elapsed(),
|
||||
secs
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accessed() {
|
||||
let mut holder = CacheEntry {
|
||||
data: CacheType::Raw("older".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(700),
|
||||
};
|
||||
holder.touch();
|
||||
let held = holder.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn updated() {
|
||||
let text = "new data";
|
||||
let mut holder = CacheEntry {
|
||||
data: CacheType::Raw("old data".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(900),
|
||||
};
|
||||
holder.update(CacheType::Raw(text.to_string()));
|
||||
assert_eq!(holder.to_string(), text);
|
||||
let held = holder.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod enum_ctype {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn bad_file_header() {
|
||||
let mut data: Vec<u8> = Vec::new();
|
||||
let mut ctype = "jlksdfg".as_bytes().to_vec();
|
||||
let mut cdata = "ghjk".as_bytes().to_vec();
|
||||
data.append(&mut ctype);
|
||||
data.push(0);
|
||||
data.append(&mut cdata);
|
||||
match CacheType::from_bytes(data) {
|
||||
Ok(_) => assert!(false, "This should fail."),
|
||||
Err(err) => assert_eq!(err.to_string(), "data corruption"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn incomplete_file() {
|
||||
let mut data: Vec<u8> = Vec::new();
|
||||
let mut ctype = "uoisfde".as_bytes().to_vec();
|
||||
data.append(&mut ctype);
|
||||
match CacheType::from_bytes(data) {
|
||||
Ok(_) => assert!(false, "This should fail."),
|
||||
Err(err) => assert_eq!(err.to_string(), "incomplete file"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_file() {
|
||||
let data: Vec<u8> = Vec::new();
|
||||
match CacheType::from_bytes(data) {
|
||||
Ok(_) => assert!(false, "This should fail."),
|
||||
Err(err) => assert_eq!(err.to_string(), "empty file"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_raw_type() {
|
||||
let holder = CacheType::Raw("nothing important".to_string());
|
||||
assert_eq!(holder.entry_type(), "Raw");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_raw_bytes() {
|
||||
let data = "addams";
|
||||
let holder = CacheType::Raw(data.to_string());
|
||||
let mut expected = holder.entry_type().into_bytes();
|
||||
expected.push(0);
|
||||
expected.append(&mut data.as_bytes().to_vec());
|
||||
let output = holder.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_raw_bytes() {
|
||||
let holder = CacheType::Raw("stored item".to_string());
|
||||
let data = holder.to_bytes();
|
||||
let output = CacheType::from_bytes(data).unwrap();
|
||||
assert_eq!(output.to_string(), holder.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_dbmap_type() {
|
||||
let holder = CacheType::DBMap(Store::new());
|
||||
assert_eq!(holder.entry_type(), "DBMap");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_new_databases_bytes() {
|
||||
let holder = CacheType::DBMap(Store::new());
|
||||
let mut expected = "DBMap".as_bytes().to_vec();
|
||||
expected.push(0);
|
||||
let output = holder.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_new_databases_bytes() {
|
||||
let mut data = "DBMap".as_bytes().to_vec();
|
||||
data.push(0);
|
||||
let output = CacheType::from_bytes(data).unwrap();
|
||||
assert_eq!(output.entry_type(), "DBMap");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_tablemap_type() {
|
||||
let holder = CacheType::TableMap;
|
||||
assert_eq!(holder.entry_type(), "TableMap");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_new_database_bytes() {
|
||||
let holder = CacheType::TableMap;
|
||||
let mut expected = "TableMap".as_bytes().to_vec();
|
||||
expected.push(0);
|
||||
let output = holder.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_new_database_bytes() {
|
||||
let mut data = "TableMap".as_bytes().to_vec();
|
||||
data.push(0);
|
||||
let output = CacheType::from_bytes(data).unwrap();
|
||||
assert_eq!(output.entry_type(), "TableMap");
|
||||
}
|
||||
}
|
@ -1,821 +1,186 @@
|
||||
mod cache;
|
||||
pub mod error;
|
||||
mod database;
|
||||
mod error;
|
||||
mod store;
|
||||
mod table;
|
||||
|
||||
use async_std::{
|
||||
fs::{create_dir, read, remove_file, write},
|
||||
path::Path,
|
||||
sync::{Arc, Mutex},
|
||||
task::{sleep, spawn},
|
||||
};
|
||||
use error::DBError;
|
||||
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fmt, slice, str,
|
||||
time::{Duration, Instant},
|
||||
channel::{unbounded, Sender},
|
||||
path::PathBuf,
|
||||
task::spawn,
|
||||
};
|
||||
use cache::Cache;
|
||||
use database::Database;
|
||||
use error::{ErrorCode, MTTError};
|
||||
use store::Store;
|
||||
use table::Table;
|
||||
|
||||
const DATA: &str = "data";
|
||||
const ENTRY: &str = "databases";
|
||||
const ENTRY: &str = "EntryPoint";
|
||||
|
||||
trait FileData<F> {
|
||||
fn to_bytes(&self) -> Vec<u8>;
|
||||
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<F, DBError>;
|
||||
#[derive(Debug)]
|
||||
pub struct ToCacheMsg<D> {
|
||||
data: D,
|
||||
result: Sender<FromCache>,
|
||||
}
|
||||
|
||||
trait SessionData {
|
||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError>;
|
||||
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError>;
|
||||
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError>;
|
||||
#[derive(Debug)]
|
||||
pub enum ToCache {
|
||||
Get(ToCacheMsg<String>),
|
||||
Commit(ToCacheMsg<Store>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum CacheType {
|
||||
Raw(String),
|
||||
DBMap(Store),
|
||||
TableMap,
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum FromCache {
|
||||
Ok,
|
||||
Str(Store),
|
||||
DB(Database),
|
||||
Error(MTTError),
|
||||
}
|
||||
|
||||
impl CacheType {
|
||||
pub fn entry_type(&self) -> String {
|
||||
match self {
|
||||
CacheType::Raw(_) => "Raw".to_string(),
|
||||
CacheType::DBMap(_) => "DBMap".to_string(),
|
||||
CacheType::TableMap => "TableMap".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
let mut output = self.entry_type().into_bytes();
|
||||
output.push(0);
|
||||
match self {
|
||||
CacheType::Raw(s) => output.append(&mut s.as_bytes().to_vec()),
|
||||
CacheType::DBMap(_) => (),
|
||||
CacheType::TableMap => (),
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
pub fn from_bytes(data: Vec<u8>) -> Result<CacheType, DBError> {
|
||||
let mut data_iter = data.iter();
|
||||
let mut letter: u8;
|
||||
match data_iter.next() {
|
||||
Some(item) => letter = *item,
|
||||
None => return Err(DBError::new("empty file")),
|
||||
}
|
||||
let mut header: Vec<u8> = Vec::new();
|
||||
while letter != 0 {
|
||||
header.push(letter.clone());
|
||||
match data_iter.next() {
|
||||
Some(item) => letter = *item,
|
||||
None => return Err(DBError::new("incomplete file")),
|
||||
}
|
||||
}
|
||||
let header = str::from_utf8(&header).unwrap().to_string();
|
||||
match header.as_str() {
|
||||
"Raw" => {
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
for letter in data_iter {
|
||||
output.push(letter.clone());
|
||||
}
|
||||
Ok(CacheType::Raw(str::from_utf8(&output).unwrap().to_string()))
|
||||
}
|
||||
"DBMap" => Ok(CacheType::DBMap(Store::new())),
|
||||
"TableMap" => Ok(CacheType::TableMap),
|
||||
_ => Err(DBError::new("data corruption")),
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Data<D> {
|
||||
id: Option<String>,
|
||||
data: Option<D>,
|
||||
}
|
||||
|
||||
impl fmt::Display for CacheType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
CacheType::Raw(s) => write!(f, "{}", s),
|
||||
CacheType::DBMap(_) => todo!(),
|
||||
CacheType::TableMap => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct CacheEntry {
|
||||
data: CacheType,
|
||||
last_used: Instant,
|
||||
}
|
||||
|
||||
impl CacheEntry {
|
||||
fn new(data: CacheType) -> Self {
|
||||
impl<D> Data<D> {
|
||||
fn from_id<S>(id: S) -> Self
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
Self {
|
||||
data: data,
|
||||
last_used: Instant::now(),
|
||||
id: Some(id.into()),
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn elapsed(&self) -> Duration {
|
||||
self.last_used.elapsed()
|
||||
}
|
||||
|
||||
fn touch(&mut self) {
|
||||
self.last_used = Instant::now();
|
||||
}
|
||||
|
||||
fn update(&mut self, data: CacheType) {
|
||||
self.data = data;
|
||||
self.touch();
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CacheEntry {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.data)
|
||||
fn from_data(data: D) -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
data: Some(data),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoreThanText {
|
||||
cache: Arc<Mutex<HashMap<String, CacheEntry>>>,
|
||||
dir: String,
|
||||
session: Vec<String>,
|
||||
to_cache: Sender<ToCache>,
|
||||
entry: Data<Store>,
|
||||
}
|
||||
|
||||
impl MoreThanText {
|
||||
pub async fn new(dir: &str) -> Result<Self, DBError> {
|
||||
let data_dir = Path::new(dir).join(DATA);
|
||||
if !data_dir.is_dir().await {
|
||||
match create_dir(&data_dir).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("failed to create data directory");
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
fn new(to_cache: Sender<ToCache>) -> Self {
|
||||
Self {
|
||||
to_cache: to_cache,
|
||||
entry: Data::from_id(ENTRY),
|
||||
}
|
||||
let mut output = Self {
|
||||
cache: Arc::new(Mutex::new(HashMap::new())),
|
||||
dir: data_dir.to_str().unwrap().to_string(),
|
||||
session: Vec::new(),
|
||||
}
|
||||
|
||||
async fn session(&self) -> Result<Store, MTTError> {
|
||||
let (s, r) = unbounded();
|
||||
let msg = ToCacheMsg {
|
||||
data: ENTRY.to_string(),
|
||||
result: s,
|
||||
};
|
||||
let entry_file = Path::new(dir).join(ENTRY);
|
||||
let id: String;
|
||||
if entry_file.is_file().await {
|
||||
let holder = read(entry_file).await.unwrap();
|
||||
id = str::from_utf8(&holder).unwrap().to_string();
|
||||
} else {
|
||||
id = output
|
||||
.add_entry(CacheType::DBMap(Store::new()))
|
||||
.await
|
||||
.unwrap();
|
||||
write(entry_file, id.as_bytes()).await.unwrap();
|
||||
self.to_cache.send(ToCache::Get(msg)).await.unwrap();
|
||||
match r.recv().await.unwrap() {
|
||||
FromCache::Str(store) => Ok(store),
|
||||
FromCache::Error(err) => Err(err),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
output.session.push(id);
|
||||
let looper = output.cache.clone();
|
||||
spawn(async move {
|
||||
let hold_time = Duration::from_secs(300);
|
||||
loop {
|
||||
sleep(Duration::from_secs(1)).await;
|
||||
let mut ids: Vec<String> = Vec::new();
|
||||
let mut cache = looper.lock().await;
|
||||
for (id, entry) in cache.iter() {
|
||||
if entry.elapsed() > hold_time {
|
||||
ids.push(id.to_string());
|
||||
}
|
||||
}
|
||||
for id in ids.iter() {
|
||||
cache.remove(id);
|
||||
}
|
||||
}
|
||||
});
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn filename(&self, id: &str) -> String {
|
||||
let filename = Path::new(&self.dir).join(&id);
|
||||
filename.into_os_string().into_string().unwrap()
|
||||
}
|
||||
|
||||
fn new_id(&self) -> String {
|
||||
thread_rng().sample_iter(&Alphanumeric).take(64).collect()
|
||||
}
|
||||
|
||||
async fn add(&self, feature: &str, key: &str, value: &str) -> Self {
|
||||
let mut ids: Vec<String> = Vec::new();
|
||||
for id in self.session.clone().into_iter() {
|
||||
let holder = self.get_entry(&id).await.unwrap();
|
||||
//holder.add(feature, key, value);
|
||||
}
|
||||
let mut output = self.clone();
|
||||
output.session.clear();
|
||||
output.session.push(value.to_string());
|
||||
output
|
||||
}
|
||||
|
||||
async fn list(&self, feature: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn add_entry(&self, entry: CacheType) -> Result<String, DBError> {
|
||||
let mut id: String = "".to_string();
|
||||
let mut dup = true;
|
||||
while dup {
|
||||
id = thread_rng().sample_iter(&Alphanumeric).take(32).collect();
|
||||
dup = Path::new(&self.dir).join(&id).as_path().exists().await;
|
||||
}
|
||||
match write(Path::new(&self.filename(&id)), entry.to_bytes()).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("data write");
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
async fn commit(&self, store: Store) -> Result<(), MTTError> {
|
||||
let (s, r) = unbounded();
|
||||
let msg = ToCacheMsg {
|
||||
data: store,
|
||||
result: s,
|
||||
};
|
||||
let mut cache = self.cache.lock().await;
|
||||
let data = CacheEntry::new(entry);
|
||||
cache.insert(id.clone(), data);
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
async fn get_entry(&self, id: &str) -> Result<CacheEntry, DBError> {
|
||||
let mut cache = self.cache.lock().await;
|
||||
match cache.get_mut(id) {
|
||||
Some(entry) => {
|
||||
entry.touch();
|
||||
Ok(entry.clone())
|
||||
}
|
||||
None => match read(Path::new(&self.filename(id))).await {
|
||||
Ok(content) => {
|
||||
let data = CacheEntry::new(CacheType::from_bytes(content).unwrap());
|
||||
cache.insert(id.to_string(), data.clone());
|
||||
Ok(data)
|
||||
}
|
||||
Err(_) => Err(DBError::new("cache entry not found")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_entry(&self, id: &str, entry: CacheType) -> Result<(), DBError> {
|
||||
match self.get_entry(id).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
match write(Path::new(&self.filename(id)), entry.to_bytes()).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("data write");
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
let mut cache = self.cache.lock().await;
|
||||
let data = CacheEntry::new(entry);
|
||||
cache.insert(id.to_string(), data);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_entry(&self, id: &str) -> Result<(), DBError> {
|
||||
let mut cache = self.cache.lock().await;
|
||||
cache.remove(id);
|
||||
match remove_file(Path::new(&self.filename(id))).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("data delete");
|
||||
error.add_source(err);
|
||||
Err(error)
|
||||
}
|
||||
self.to_cache.send(ToCache::Commit(msg)).await.unwrap();
|
||||
match r.recv().await.unwrap() {
|
||||
FromCache::Ok => Ok(()),
|
||||
FromCache::Error(err) => Err(err),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod setup {
|
||||
mod mtt {
|
||||
use super::*;
|
||||
use async_std::fs::remove_dir_all;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
|
||||
pub struct MTT {
|
||||
pub db: MoreThanText,
|
||||
pub dir: TempDir,
|
||||
}
|
||||
|
||||
impl MTT {
|
||||
pub async fn new() -> Self {
|
||||
let dir = tempdir().unwrap();
|
||||
let db = MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
Self { db: db, dir: dir }
|
||||
}
|
||||
|
||||
pub async fn create_io_error(&self) {
|
||||
remove_dir_all(self.dir.path().join(DATA)).await.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod init {
|
||||
use super::*;
|
||||
use std::error::Error;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[async_std::test]
|
||||
async fn create_data_dir() {
|
||||
async fn create_new() {
|
||||
let dir = tempdir().unwrap();
|
||||
MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let data_dir = dir.path().join(DATA);
|
||||
assert!(data_dir.is_dir(), "Did not create the data directory.");
|
||||
dir.close().unwrap();
|
||||
let mtt = start_db(dir.path()).await.unwrap();
|
||||
assert_eq!(mtt.entry.id, Some(ENTRY.to_string()));
|
||||
assert!(mtt.entry.data.is_none());
|
||||
let store = mtt.session().await.unwrap();
|
||||
let expected: Vec<String> = Vec::new();
|
||||
assert_eq!(store.list(), expected);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn existing_data_dir() {
|
||||
async fn commit_db() {
|
||||
let dir = tempdir().unwrap();
|
||||
let data_dir = dir.path().join(DATA);
|
||||
create_dir(data_dir).await.unwrap();
|
||||
MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
dir.close().unwrap();
|
||||
let db = "fred";
|
||||
let mtt = start_db(dir.path()).await.unwrap();
|
||||
let mut store = mtt.session().await.unwrap();
|
||||
store.add(db).unwrap();
|
||||
mtt.commit(store).await.unwrap();
|
||||
let store2 = mtt.session().await.unwrap();
|
||||
assert_eq!(store2.list(), [db]);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn bad_data_dir() {
|
||||
let msg = "could not create directory";
|
||||
match MoreThanText::new("kljsdgfhslkfrh").await {
|
||||
Ok(_) => assert!(false, "This test should fail to create a data directory"),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "failed to create data directory");
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
let err_msg = err.source().unwrap().to_string();
|
||||
assert!(err_msg.contains(msg), "'{}' not in '{}'", msg, err_msg);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn creates_entry_point() {
|
||||
async fn commit_from_multiple_sources() {
|
||||
let dir = tempdir().unwrap();
|
||||
let db = MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let entry = dir.path().join(ENTRY);
|
||||
assert!(entry.is_file(), "Did not create entry point file.");
|
||||
let data = read(entry).await.unwrap();
|
||||
let id = str::from_utf8(&data).unwrap();
|
||||
let cache = db.get_entry(&id).await.unwrap();
|
||||
assert_eq!(cache.data.entry_type(), "DBMap");
|
||||
assert_eq!(db.session, [id]);
|
||||
let mtt1 = start_db(dir.path()).await.unwrap();
|
||||
let mtt2 = mtt1.clone();
|
||||
let db1 = "first";
|
||||
let db2 = "second";
|
||||
let mut store1 = mtt1.session().await.unwrap();
|
||||
let mut store2 = mtt2.session().await.unwrap();
|
||||
store1.add(db1).unwrap();
|
||||
store2.add(db2).unwrap();
|
||||
mtt1.commit(store1).await.unwrap();
|
||||
mtt2.commit(store2).await.unwrap();
|
||||
let output = mtt1.session().await.unwrap();
|
||||
assert_eq!(output.list(), [db1, db2]);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn use_existing_entry_point() {
|
||||
async fn fail_on_duplicates() {
|
||||
let dir = tempdir().unwrap();
|
||||
let db1 = MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let db2 = MoreThanText::new(dir.path().to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db1.session, db2.session, "Did not read existing entry.");
|
||||
let mtt1 = start_db(dir.path()).await.unwrap();
|
||||
let mtt2 = mtt1.clone();
|
||||
let name = "unique_only";
|
||||
let mut store1 = mtt1.session().await.unwrap();
|
||||
let mut store2 = mtt2.session().await.unwrap();
|
||||
store1.add(name).unwrap();
|
||||
store2.add(name).unwrap();
|
||||
mtt1.commit(store1).await.unwrap();
|
||||
let output = mtt2.commit(store2).await;
|
||||
match output {
|
||||
Ok(_) => assert!(false, "Should have returned an error"),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::DuplicateDatabase(_) => (),
|
||||
_ => assert!(false, "{:?} is not ErrorCode::DuplicateDatabase", err.code),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod data {
|
||||
use super::*;
|
||||
use setup::MTT;
|
||||
|
||||
#[async_std::test]
|
||||
async fn ids_are_random() {
|
||||
let mtt = MTT::new().await;
|
||||
let id1 = mtt.db.new_id();
|
||||
let id2 = mtt.db.new_id();
|
||||
assert_ne!(id1, id2, "Ids should be random");
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_database() {
|
||||
let mtt = MTT::new().await;
|
||||
let name = "fred";
|
||||
let id = "*gsdfg";
|
||||
let output = mtt.db.add("database", name, id).await;
|
||||
assert_eq!(output.session, [id], "should update session info.");
|
||||
/*
|
||||
assert_eq!(
|
||||
mtt.db.list(["database"].to_vec()).await.unwrap(),
|
||||
[name],
|
||||
"Should list the databases."
|
||||
);
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod cache_test {
|
||||
use super::*;
|
||||
use async_std::fs::read;
|
||||
use setup::MTT;
|
||||
use std::error::Error;
|
||||
|
||||
#[async_std::test]
|
||||
async fn entry_ids_are_random() {
|
||||
let mtt = MTT::new().await;
|
||||
let data1 = CacheType::Raw("one".to_string());
|
||||
let data2 = CacheType::Raw("two".to_string());
|
||||
let id1 = mtt.db.add_entry(data1).await.unwrap();
|
||||
let id2 = mtt.db.add_entry(data2).await.unwrap();
|
||||
assert_ne!(id1, id2, "Ids should be unique.")
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn store_cache() {
|
||||
let mtt = MTT::new().await;
|
||||
let data = "something";
|
||||
let expected = CacheType::Raw(data.to_string());
|
||||
let id = mtt.db.add_entry(expected.clone()).await.unwrap();
|
||||
let output = mtt.db.get_entry(&id).await.unwrap();
|
||||
assert_eq!(output.to_string(), data);
|
||||
let dfile = mtt.dir.path().join(DATA).join(&id);
|
||||
assert!(dfile.is_file(), "Cache file should exist.");
|
||||
let content = read(dfile).await.unwrap();
|
||||
assert_eq!(content, expected.to_bytes());
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_entry_uodates_time() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = "something";
|
||||
let holder = CacheEntry {
|
||||
data: CacheType::Raw("old".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(200),
|
||||
};
|
||||
let mut cache = mtt.db.cache.lock().await;
|
||||
cache.insert(id.to_string(), holder);
|
||||
drop(cache);
|
||||
mtt.db.get_entry(&id).await.unwrap();
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let entry = cache.get(id).unwrap();
|
||||
let held = entry.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn retrieve_from_disk() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = "someid";
|
||||
let data = CacheType::Raw("stored".to_string());
|
||||
write(mtt.dir.path().join(DATA).join(id), data.to_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
let output = mtt.db.get_entry(id).await.unwrap();
|
||||
assert_eq!(output.to_string(), data.to_string());
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let stored = cache.get(id);
|
||||
assert!(stored.is_some(), "Did not store entry in the cache.");
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn store_bad_file() {
|
||||
let mtt = MTT::new().await;
|
||||
let msg = "could not write to file";
|
||||
mtt.create_io_error().await;
|
||||
match mtt.db.add_entry(CacheType::Raw("fail".to_string())).await {
|
||||
Ok(_) => assert!(false, "This test should fail."),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "data write");
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
let err_msg = err.source().unwrap().to_string();
|
||||
assert!(err_msg.contains(msg), "'{}' not in '{}'", msg, err_msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn retrieve_bad_id() {
|
||||
let mtt = MTT::new().await;
|
||||
match mtt.db.get_entry(&"Not Valid").await {
|
||||
Ok(_) => assert!(false, "Should have raised an error."),
|
||||
Err(err) => assert_eq!(err.to_string(), "cache entry not found"),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_cache_entry() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = "updateable";
|
||||
let holder = CacheEntry {
|
||||
data: CacheType::Raw("elder".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(500),
|
||||
};
|
||||
let mut cache = mtt.db.cache.lock().await;
|
||||
cache.insert(id.to_string(), holder);
|
||||
drop(cache);
|
||||
let expected = "different";
|
||||
let expect = CacheType::Raw(expected.to_string());
|
||||
mtt.db.update_entry(id, expect.clone()).await.unwrap();
|
||||
let output = mtt.db.get_entry(id).await.unwrap();
|
||||
assert_eq!(output.to_string(), expected);
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let entry = cache.get(id).unwrap();
|
||||
let held = entry.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
drop(cache);
|
||||
let content = read(mtt.dir.path().join(DATA).join(id)).await.unwrap();
|
||||
assert_eq!(content, expect.to_bytes());
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_bad_id() {
|
||||
let mtt = MTT::new().await;
|
||||
match mtt
|
||||
.db
|
||||
.update_entry("wilma", CacheType::Raw("wrong".to_string()))
|
||||
.await
|
||||
{
|
||||
Ok(_) => assert!(false, "Bad id should raise an error."),
|
||||
Err(err) => assert_eq!(err.to_string(), "cache entry not found"),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_bad_file() {
|
||||
let mtt = MTT::new().await;
|
||||
let msg = "could not write to file";
|
||||
let id = mtt
|
||||
.db
|
||||
.add_entry(CacheType::Raw("fleeting".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
mtt.create_io_error().await;
|
||||
match mtt
|
||||
.db
|
||||
.update_entry(&id, CacheType::Raw("failure".to_string()))
|
||||
.await
|
||||
{
|
||||
Ok(_) => assert!(false, "This should produce a write failure."),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "data write");
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
let err_msg = err.source().unwrap().to_string();
|
||||
assert!(err_msg.contains(msg), "'{}' not in '{}'", msg, err_msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn remove_entry() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = mtt
|
||||
.db
|
||||
.add_entry(CacheType::Raw("delete".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
mtt.db.delete_entry(&id).await.unwrap();
|
||||
match mtt.db.get_entry(&id).await {
|
||||
Ok(_) => assert!(false, "Entry should be removed from cache."),
|
||||
Err(_) => (),
|
||||
};
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn remove_missing_entry() {
|
||||
let mtt = MTT::new().await;
|
||||
let msg = "could not remove file";
|
||||
match mtt.db.delete_entry("missing").await {
|
||||
Ok(_) => assert!(false, "This should produce a write failure."),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "data delete");
|
||||
assert!(err.source().is_some(), "Must include the source error.");
|
||||
let err_msg = err.source().unwrap().to_string();
|
||||
assert!(err_msg.contains(msg), "'{}' not in '{}'", msg, err_msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn remove_older() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = mtt
|
||||
.db
|
||||
.add_entry(CacheType::Raw("removed".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
let mut cache = mtt.db.cache.lock().await;
|
||||
let entry = cache.get_mut(&id).unwrap();
|
||||
entry.last_used = Instant::now() - Duration::from_secs(1000);
|
||||
drop(cache);
|
||||
sleep(Duration::from_secs(2)).await;
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let output = cache.get(&id);
|
||||
assert!(output.is_none(), "The entry shoould not be in memory.");
|
||||
drop(cache);
|
||||
let filename = mtt.db.filename(&id);
|
||||
let fpath = Path::new(&filename);
|
||||
assert!(
|
||||
fpath.is_file().await,
|
||||
"The stored version should still exist."
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn keep_newer() {
|
||||
let mtt = MTT::new().await;
|
||||
let id = mtt
|
||||
.db
|
||||
.add_entry(CacheType::Raw("keep".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
sleep(Duration::from_secs(2)).await;
|
||||
let cache = mtt.db.cache.lock().await;
|
||||
let output = cache.get(&id);
|
||||
assert!(output.is_some(), "The entry shoould be in memory.");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod cache_entry {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn init() {
|
||||
let text = "new entry";
|
||||
let holder = CacheEntry::new(CacheType::Raw(text.to_string()));
|
||||
assert_eq!(holder.to_string(), text);
|
||||
let held = holder.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn older() {
|
||||
let secs = 800;
|
||||
let holder = CacheEntry {
|
||||
data: CacheType::Raw("older".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(secs),
|
||||
};
|
||||
let held = holder.elapsed() - Duration::from_secs(secs);
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"{:?} should be close to {}s",
|
||||
holder.elapsed(),
|
||||
secs
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accessed() {
|
||||
let mut holder = CacheEntry {
|
||||
data: CacheType::Raw("older".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(700),
|
||||
};
|
||||
holder.touch();
|
||||
let held = holder.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn updated() {
|
||||
let text = "new data";
|
||||
let mut holder = CacheEntry {
|
||||
data: CacheType::Raw("old data".to_string()),
|
||||
last_used: Instant::now() - Duration::from_secs(900),
|
||||
};
|
||||
holder.update(CacheType::Raw(text.to_string()));
|
||||
assert_eq!(holder.to_string(), text);
|
||||
let held = holder.elapsed();
|
||||
assert!(
|
||||
Duration::from_secs(1) > held,
|
||||
"Duration was {:?}, should have been close to 0s.",
|
||||
held
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod enum_ctype {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn bad_file_header() {
|
||||
let mut data: Vec<u8> = Vec::new();
|
||||
let mut ctype = "jlksdfg".as_bytes().to_vec();
|
||||
let mut cdata = "ghjk".as_bytes().to_vec();
|
||||
data.append(&mut ctype);
|
||||
data.push(0);
|
||||
data.append(&mut cdata);
|
||||
match CacheType::from_bytes(data) {
|
||||
Ok(_) => assert!(false, "This should fail."),
|
||||
Err(err) => assert_eq!(err.to_string(), "data corruption"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn incomplete_file() {
|
||||
let mut data: Vec<u8> = Vec::new();
|
||||
let mut ctype = "uoisfde".as_bytes().to_vec();
|
||||
data.append(&mut ctype);
|
||||
match CacheType::from_bytes(data) {
|
||||
Ok(_) => assert!(false, "This should fail."),
|
||||
Err(err) => assert_eq!(err.to_string(), "incomplete file"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_file() {
|
||||
let data: Vec<u8> = Vec::new();
|
||||
match CacheType::from_bytes(data) {
|
||||
Ok(_) => assert!(false, "This should fail."),
|
||||
Err(err) => assert_eq!(err.to_string(), "empty file"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_raw_type() {
|
||||
let holder = CacheType::Raw("nothing important".to_string());
|
||||
assert_eq!(holder.entry_type(), "Raw");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_raw_bytes() {
|
||||
let data = "addams";
|
||||
let holder = CacheType::Raw(data.to_string());
|
||||
let mut expected = holder.entry_type().into_bytes();
|
||||
expected.push(0);
|
||||
expected.append(&mut data.as_bytes().to_vec());
|
||||
let output = holder.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_raw_bytes() {
|
||||
let holder = CacheType::Raw("stored item".to_string());
|
||||
let data = holder.to_bytes();
|
||||
let output = CacheType::from_bytes(data).unwrap();
|
||||
assert_eq!(output.to_string(), holder.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_dbmap_type() {
|
||||
let holder = CacheType::DBMap(Store::new());
|
||||
assert_eq!(holder.entry_type(), "DBMap");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_new_databases_bytes() {
|
||||
let holder = CacheType::DBMap(Store::new());
|
||||
let mut expected = "DBMap".as_bytes().to_vec();
|
||||
expected.push(0);
|
||||
let output = holder.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_new_databases_bytes() {
|
||||
let mut data = "DBMap".as_bytes().to_vec();
|
||||
data.push(0);
|
||||
let output = CacheType::from_bytes(data).unwrap();
|
||||
assert_eq!(output.entry_type(), "DBMap");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_tablemap_type() {
|
||||
let holder = CacheType::TableMap;
|
||||
assert_eq!(holder.entry_type(), "TableMap");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_new_database_bytes() {
|
||||
let holder = CacheType::TableMap;
|
||||
let mut expected = "TableMap".as_bytes().to_vec();
|
||||
expected.push(0);
|
||||
let output = holder.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_new_database_bytes() {
|
||||
let mut data = "TableMap".as_bytes().to_vec();
|
||||
data.push(0);
|
||||
let output = CacheType::from_bytes(data).unwrap();
|
||||
assert_eq!(output.entry_type(), "TableMap");
|
||||
}
|
||||
pub async fn start_db<P>(dir: P) -> Result<MoreThanText, MTTError>
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
let path = dir.into();
|
||||
let (s, r) = unbounded();
|
||||
spawn(async move {
|
||||
let mut cache = Cache::new(path).await;
|
||||
cache.listen(r).await;
|
||||
});
|
||||
Ok(MoreThanText::new(s))
|
||||
}
|
||||
|
6
src/morethantext/mttsql.pest
Normal file
6
src/morethantext/mttsql.pest
Normal file
@ -0,0 +1,6 @@
|
||||
char = _{ ASCII_ALPHANUMERIC | "_" }
|
||||
whitespace = _{" " | "\t" | "\r" | "\n"}
|
||||
|
||||
name = {char+}
|
||||
command = {"create database" ~ whitespace+ ~ name ~ ";"}
|
||||
script = {command+}
|
398
src/morethantext/old-mod.rs
Normal file
398
src/morethantext/old-mod.rs
Normal file
@ -0,0 +1,398 @@
|
||||
/*
|
||||
use async_std::sync::{Arc, RwLock};
|
||||
use std::{collections::HashMap, error::Error, fmt, str::FromStr};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DBError {
|
||||
detail: String,
|
||||
source: Option<Box<DBError>>,
|
||||
}
|
||||
|
||||
impl DBError {
|
||||
fn new(detail: String) -> Self {
|
||||
Self {
|
||||
detail: detail.to_string(),
|
||||
source: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for DBError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", &self.detail)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for DBError {
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
match &self.source {
|
||||
Some(err) => Some(err),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub enum FieldType {
|
||||
Table,
|
||||
}
|
||||
|
||||
impl fmt::Display for FieldType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
FieldType::Table => write!(f, "table"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for FieldType {
|
||||
type Err = DBError;
|
||||
|
||||
fn from_str(input: &str) -> Result<FieldType, Self::Err> {
|
||||
match input {
|
||||
"table" => Ok(FieldType::Table),
|
||||
_ => Err(DBError::new(format!("field type {} does not exist", input))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Table {
|
||||
fields: Arc<RwLock<HashMap<String, FieldType>>>,
|
||||
}
|
||||
|
||||
impl Table {
|
||||
pub async fn new() -> Self {
|
||||
Self {
|
||||
fields: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_field(&self, name: &str, ftype: &str) -> Result<(), Box<dyn Error>> {
|
||||
let ftype = match FieldType::from_str(ftype) {
|
||||
Ok(field) => field,
|
||||
Err(err) => {
|
||||
let mut error = DBError::new(format!("failed to add field {}", name));
|
||||
error.source = Some(Box::new(err));
|
||||
return Err(Box::new(error));
|
||||
}
|
||||
};
|
||||
let mut fmap = self.fields.write().await;
|
||||
match fmap.get(name) {
|
||||
Some(_) => Err(Box::new(DBError::new(format!(
|
||||
"field {} already exists",
|
||||
name
|
||||
)))),
|
||||
None => {
|
||||
fmap.insert(name.to_string(), ftype);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn fields(&self) -> HashMap<String, FieldType> {
|
||||
let fmap = self.fields.read().await;
|
||||
fmap.clone()
|
||||
}
|
||||
}
|
||||
*/
|
||||
use async_std::sync::{Arc, RwLock};
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub mod error;
|
||||
mod fieldtype;
|
||||
|
||||
use error::MTTError;
|
||||
use fieldtype::FieldType;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoreThanText;
|
||||
|
||||
impl MoreThanText {
|
||||
pub async fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
pub async fn add_table(&self, name: &str) -> Table {
|
||||
Table::new()
|
||||
}
|
||||
|
||||
pub async fn get_table(&self, name: &str) -> Table {
|
||||
Table::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
struct FieldDef;
|
||||
|
||||
pub struct Table {
|
||||
fields: Arc<RwLock<HashMap<String, FieldDef>>>,
|
||||
}
|
||||
|
||||
impl Table {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
fields: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
async fn add_field(&self, name: &str) {
|
||||
let mut field_defs = self.fields.write().await;
|
||||
field_defs.insert(name.to_string(), FieldDef {});
|
||||
}
|
||||
|
||||
async fn get_field(&self, name: &str) -> Option<FieldDef> {
|
||||
let field_defs = self.fields.read().await;
|
||||
match field_defs.get(name) {
|
||||
Some(def) => Some(def.clone()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
async fn new_record(&self) -> Record {
|
||||
Record::new()
|
||||
}
|
||||
}
|
||||
|
||||
struct Record {
|
||||
data: Arc<RwLock<HashMap<String, FieldType>>>,
|
||||
}
|
||||
|
||||
impl Record {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
data: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
async fn update_field(&self, name: String, data: FieldType) {
|
||||
let mut map = self.data.write().await;
|
||||
map.insert(name, data);
|
||||
}
|
||||
|
||||
async fn get_field(&self, name: &str) -> Option<FieldType> {
|
||||
let map = self.data.read().await;
|
||||
match map.get(name) {
|
||||
Some(field) => Some(field.clone()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod databases {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn new_database() {
|
||||
MoreThanText::new().await;
|
||||
}
|
||||
|
||||
async fn add_table() {
|
||||
let db = MoreThanText::new().await;
|
||||
let name = "table";
|
||||
db.add_table(name).await;
|
||||
db.get_table(name).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tables {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn new_table() {
|
||||
Table::new();
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_field_definition() {
|
||||
let tbl = Table::new();
|
||||
let name = "field";
|
||||
let expected = FieldDef {};
|
||||
tbl.add_field(name).await;
|
||||
let output = tbl.get_field(name).await.unwrap();
|
||||
assert!(output == expected, "Did not return a field definition.");
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn missing_field_definition() {
|
||||
let tbl = Table::new();
|
||||
let output = tbl.get_field("missing").await;
|
||||
assert!(
|
||||
output == None,
|
||||
"Should return None if field does not exist."
|
||||
);
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_empty_record() {
|
||||
let tbl = Table::new();
|
||||
tbl.new_record().await;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod records {
|
||||
use super::*;
|
||||
|
||||
/*
|
||||
#[async_std::test]
|
||||
async fn update_fields() {
|
||||
let rec = Record::new();
|
||||
let name = "elephant";
|
||||
let data = "";
|
||||
let sstr = StaticString::new();
|
||||
rec.update_field(name.to_string(), sstr).await;
|
||||
let output = rec.get_field(name).await.unwrap();
|
||||
assert!(
|
||||
output.to_string() == data,
|
||||
"\n\nGot: {}\nWant: {}\n\n",
|
||||
output.to_string(),
|
||||
data
|
||||
)
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn empty_field() {
|
||||
let rec = Record::new();
|
||||
let name = "mull";
|
||||
let output = rec.get_field(name).await;
|
||||
assert!(output == None, "Should return an option.");
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
/*
|
||||
#[cfg(test)]
|
||||
mod tables {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn new_table() {
|
||||
Table::new().await;
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn update_field() {
|
||||
let table = Table::new().await;
|
||||
let mut expected: HashMap<String, FieldType> = HashMap::new();
|
||||
expected.insert("stan".to_string(), FieldType::Table);
|
||||
expected.insert("lee".to_string(), FieldType::Table);
|
||||
table.update_field("stan", "table").await.unwrap();
|
||||
table.update_field("lee", "table").await.unwrap();
|
||||
let output = table.fields().await;
|
||||
assert!(output == expected, "Table did not get the fields added.");
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_bad_field() -> Result<(), String> {
|
||||
let table = Table::new().await;
|
||||
let name = "failure";
|
||||
let bad_type = "ljksdbtt";
|
||||
let expected = format!("failed to add field {}", name);
|
||||
let source = format!("field type {} does not exist", bad_type);
|
||||
match table.update_field(name, bad_type).await {
|
||||
Ok(_) => Err("A bad field type should not return successfully".to_string()),
|
||||
Err(err) => {
|
||||
if format!("{}", err) != expected {
|
||||
Err(format!("Got: '{}' - Want: '{}'", err, expected))
|
||||
} else if format!("{}", err.source().unwrap()) != source {
|
||||
Err(format!(
|
||||
"Got: '{}' - Want: '{}'",
|
||||
err.source().unwrap(),
|
||||
source
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_duplicate_field() -> Result<(), String> {
|
||||
let table = Table::new().await;
|
||||
let name = "twice";
|
||||
let expected = format!("field {} already exists", name);
|
||||
table.update_field(name, "table").await.unwrap();
|
||||
match table.update_field(name, "table").await {
|
||||
Ok(_) => Err(format!("Cannot have two fields with named '{}'", name)),
|
||||
Err(err) => {
|
||||
if format!("{}", err) == expected {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("Got: '{}' - Want: '{}'", err, expected))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod databases {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn new_database() {
|
||||
MoreThanText::new().await;
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_table() {
|
||||
let db = MoreThanText::new().await;
|
||||
db.add_table("fred".to_string()).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod fieldtypes {
|
||||
use super::*;
|
||||
|
||||
fn get_field_map() -> HashMap<String, FieldType> {
|
||||
let mut fields: HashMap<String, FieldType> = HashMap::new();
|
||||
fields.insert("table".to_string(), FieldType::Table);
|
||||
return fields;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn convert_to_string() {
|
||||
for (key, value) in get_field_map().iter() {
|
||||
assert!(
|
||||
key == &value.to_string(),
|
||||
"\n\nGot: {}\nWant: {}\n\n",
|
||||
value.to_string(),
|
||||
key
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn convert_from_string() {
|
||||
for (key, value) in get_field_map().iter() {
|
||||
assert!(
|
||||
&FieldType::from_str(key).unwrap() == value,
|
||||
"\n\nDid not return a FieldType::{}",
|
||||
key
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn convert_from_string_error() -> Result<(), String> {
|
||||
let ftype = "lkjsdfh";
|
||||
let expected = format!("field type {} does not exist", ftype);
|
||||
match FieldType::from_str(ftype) {
|
||||
Ok(_) => Err(format!("Found field type {}", ftype)),
|
||||
Err(err) => {
|
||||
if format!("{}", err) == expected {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("Got: '{}' - Want: '{}'", err, expected))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
120
src/morethantext/old-mod2.rs
Normal file
120
src/morethantext/old-mod2.rs
Normal file
@ -0,0 +1,120 @@
|
||||
pub mod error;
|
||||
pub mod fieldtype;
|
||||
|
||||
use async_std::sync::{Arc, RwLock};
|
||||
use error::MTTError;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoreThanText {
|
||||
tables: Arc<RwLock<HashMap<String, Table>>>,
|
||||
}
|
||||
|
||||
impl MoreThanText {
|
||||
pub async fn new() -> Self {
|
||||
Self {
|
||||
tables: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn new_table<S>(&self, tname: S) -> Result<Table, MTTError>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let mut tables = self.tables.write().await;
|
||||
let name = tname.into();
|
||||
match tables.get(&name) {
|
||||
Some(_) => Err(MTTError::new(format!("table {} already exists", name))),
|
||||
None => {
|
||||
let table = Table::new().await;
|
||||
tables.insert(name, table.clone());
|
||||
Ok(table)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_table(&self, name: &str) -> Option<Table> {
|
||||
let tables = self.tables.read().await;
|
||||
match tables.get(name) {
|
||||
Some(tbl) => Some(tbl.clone()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Table;
|
||||
|
||||
impl Table {
|
||||
pub async fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
async fn new_column(&self, _name: &str, _type: &str) {}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod database {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn create_table_with_str() {
|
||||
let db = MoreThanText::new().await;
|
||||
db.new_table("william").await.unwrap();
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn create_table_with_string() {
|
||||
let db = MoreThanText::new().await;
|
||||
db.new_table("marvin".to_string()).await.unwrap();
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn table_names_are_unique() -> Result<(), String> {
|
||||
let db = MoreThanText::new().await;
|
||||
let name = "alexandar";
|
||||
let msg = format!("table {} already exists", name);
|
||||
db.new_table(name).await.unwrap();
|
||||
match db.new_table(name).await {
|
||||
Ok(_) => Err("Duplicate table names are not allowed.".to_string()),
|
||||
Err(err) => {
|
||||
if err.to_string() == msg {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!(
|
||||
"Error message is incorrect: Got: '{}' Want: '{}'",
|
||||
err.to_string(),
|
||||
msg
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_non_existant_table() {
|
||||
let db = MoreThanText::new().await;
|
||||
let table = db.get_table("missing").await;
|
||||
assert!(table.is_none(), "There should be no table.");
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn get_a_table() {
|
||||
let db = MoreThanText::new().await;
|
||||
let name = "here";
|
||||
db.new_table(name).await.unwrap();
|
||||
let table = db.get_table(name).await;
|
||||
assert!(table.is_some(), "Table should be found.");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod table {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn add_column() {
|
||||
let tbl = Table::new().await;
|
||||
tbl.new_column("fred", "StaticString").await;
|
||||
}
|
||||
}
|
153
src/morethantext/old-mod3.rs
Normal file
153
src/morethantext/old-mod3.rs
Normal file
@ -0,0 +1,153 @@
|
||||
pub mod error;
|
||||
|
||||
use async_std::sync::{Arc, RwLock};
|
||||
use error::DBError;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoreThanText {
|
||||
databases: Arc<RwLock<HashMap<String, Database>>>,
|
||||
}
|
||||
|
||||
impl MoreThanText {
|
||||
pub async fn new() -> Self {
|
||||
Self {
|
||||
databases: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_database(&self, name: &str) -> Result<(), DBError> {
|
||||
let mut databases = self.databases.write().await;
|
||||
match databases.get(name) {
|
||||
Some(_) => Err(DBError::new("duplicate database name")),
|
||||
None => {
|
||||
let db = Database::new().await;
|
||||
databases.insert(name.to_string(), db);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn use_database(&self, name: &str) -> Result<Database, DBError> {
|
||||
let databases = self.databases.read().await;
|
||||
match databases.get(name) {
|
||||
Some(db) => Ok(db.clone()),
|
||||
None => Err(DBError::new("database name not found")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Database;
|
||||
|
||||
impl Database {
|
||||
async fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
async fn add_table(&self, _name: &str) {}
|
||||
}
|
||||
|
||||
struct Table;
|
||||
|
||||
impl Table {
|
||||
async fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod engine_functions {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn create_database() {
|
||||
let mtt = MoreThanText::new().await;
|
||||
mtt.create_database("smith").await.unwrap();
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn database_names_must_be_unique() -> Result<(), DBError> {
|
||||
let mtt = MoreThanText::new().await;
|
||||
let msg = "duplicate database name";
|
||||
mtt.create_database("john").await.unwrap();
|
||||
match mtt.create_database("john").await {
|
||||
Ok(_) => Err(DBError::new("Duplicate names should cause error")),
|
||||
Err(err) => {
|
||||
if err.to_string() == msg {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(DBError::new(format!(
|
||||
"incorrect err message: got: '{}' want: '{}'",
|
||||
err.to_string(),
|
||||
msg
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn use_database() -> Result<(), DBError> {
|
||||
let mtt = MoreThanText::new().await;
|
||||
let dbname = "Johnson";
|
||||
mtt.create_database(dbname).await.unwrap();
|
||||
mtt.use_database(dbname).await.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn use_missing_database() -> Result<(), DBError> {
|
||||
let error = "database name not found";
|
||||
let mtt = MoreThanText::new().await;
|
||||
match mtt.use_database("ssmith").await {
|
||||
Ok(_) => Err(DBError::new("Should raise database missing error")),
|
||||
Err(err) => {
|
||||
if err.to_string() == error {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(DBError::new(format!(
|
||||
"Incorrect error message: Got '{}' Want '{}'",
|
||||
err.to_string(),
|
||||
error
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn create_get_table() {
|
||||
let db = "thedatabase";
|
||||
let mtt = MoreThanText::new().await;
|
||||
mtt.create_database(db).await.unwrap();
|
||||
let dbase = mtt.use_database(db).await.unwrap();
|
||||
dbase.add_table("melvin").await;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod database_functions {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn new_database() {
|
||||
Database::new().await;
|
||||
}
|
||||
|
||||
#[async_std::test]
|
||||
async fn new_table() {
|
||||
let db = Database::new().await;
|
||||
db.add_table("fred").await;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod table_functions {
|
||||
use super::*;
|
||||
|
||||
#[async_std::test]
|
||||
async fn new_table() {
|
||||
Table::new().await;
|
||||
}
|
||||
}
|
306
src/morethantext/store-old.rs
Normal file
306
src/morethantext/store-old.rs
Normal file
@ -0,0 +1,306 @@
|
||||
use super::{DBError, ErrorCode, FileData, SessionData};
|
||||
use std::{collections::HashMap, slice, str};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Store {
|
||||
db_map: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl Store {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
db_map: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn test_key(key: &str) -> Result<(), DBError> {
|
||||
match key {
|
||||
"database" => (),
|
||||
_ => return Err(DBError::new(format!("databases do not have a {}", key))),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FileData<Self> for Store {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
let mut output = Vec::new();
|
||||
for (name, id) in self.db_map.iter() {
|
||||
output.append(&mut name.as_bytes().to_vec());
|
||||
output.push(0);
|
||||
output.append(&mut id.as_bytes().to_vec());
|
||||
output.push(0);
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<Self, DBError> {
|
||||
let mut output = Store::new();
|
||||
let mut name: Vec<u8> = Vec::new();
|
||||
let mut id: Vec<u8> = Vec::new();
|
||||
let mut get_id = false;
|
||||
let mut letter: u8;
|
||||
loop {
|
||||
match data.next() {
|
||||
Some(a) => letter = a.clone(),
|
||||
None => {
|
||||
if !name.is_empty() {
|
||||
return Err(DBError::from_code(ErrorCode::CorruptFile));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if letter == 0 {
|
||||
if get_id {
|
||||
let name_holder = match str::from_utf8(&name) {
|
||||
Ok(item) => item,
|
||||
Err(_) => return Err(DBError::from_code(ErrorCode::CorruptFile)),
|
||||
};
|
||||
let id_holder = match str::from_utf8(&id) {
|
||||
Ok(item) => item,
|
||||
Err(_) => return Err(DBError::from_code(ErrorCode::CorruptFile)),
|
||||
};
|
||||
match output.add("database", name_holder, id_holder) {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::from_code(ErrorCode::CorruptFile);
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
};
|
||||
name.clear();
|
||||
id.clear();
|
||||
}
|
||||
get_id = !get_id;
|
||||
} else {
|
||||
if get_id {
|
||||
id.push(letter);
|
||||
} else {
|
||||
name.push(letter);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionData for Store {
|
||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError> {
|
||||
match Self::test_key(key) {
|
||||
Ok(_) => (),
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
match self.db_map.get(value) {
|
||||
Some(_) => return Err(DBError::new(format!("database {} already exists", value))),
|
||||
None => (),
|
||||
}
|
||||
self.db_map.insert(value.to_string(), data.to_string());
|
||||
let mut output = Vec::new();
|
||||
output.push(data.to_string());
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError> {
|
||||
match Self::test_key(key) {
|
||||
Ok(_) => (),
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
let mut output = Vec::new();
|
||||
match self.db_map.get(value) {
|
||||
Some(data) => output.push(data.to_string()),
|
||||
None => (),
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||
for key in keys {
|
||||
match Self::test_key(key) {
|
||||
Ok(_) => (),
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
}
|
||||
let mut names: Vec<String> = self.db_map.clone().into_keys().collect();
|
||||
names.sort();
|
||||
Ok(names)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod file_data {
|
||||
use super::*;
|
||||
use std::error::Error;
|
||||
|
||||
#[test]
|
||||
fn to_bytes_new() {
|
||||
let dbs = Store::new();
|
||||
let expected: Vec<u8> = Vec::new();
|
||||
let output = dbs.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_bytes_with_database() {
|
||||
let mut dbs = Store::new();
|
||||
let name = "something";
|
||||
let id = "id";
|
||||
dbs.add("database", name, id).unwrap();
|
||||
let mut expected: Vec<u8> = Vec::new();
|
||||
expected.append(&mut name.as_bytes().to_vec());
|
||||
expected.push(0);
|
||||
expected.append(&mut id.as_bytes().to_vec());
|
||||
expected.push(0);
|
||||
let output = dbs.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_bytes() {
|
||||
let mut dbs = Store::new();
|
||||
dbs.add("database", "one", "1").unwrap();
|
||||
dbs.add("database", "two", "2").unwrap();
|
||||
dbs.add("database", "three", "3").unwrap();
|
||||
let data = dbs.to_bytes();
|
||||
let mut feed = data.iter();
|
||||
let output = Store::from_bytes(&mut feed).unwrap();
|
||||
assert_eq!(output.db_map, dbs.db_map);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_bytes_incomplete_name() -> Result<(), DBError> {
|
||||
let data = "notName".as_bytes();
|
||||
let mut feed = data.iter();
|
||||
match Store::from_bytes(&mut feed) {
|
||||
Ok(_) => Err(DBError::new("should have produced an errpr")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::CorruptFile => Ok(()),
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_bytes_incomplete_id() -> Result<(), DBError> {
|
||||
let mut data = "proper".as_bytes().to_vec();
|
||||
data.push(0);
|
||||
data.append(&mut "nope".as_bytes().to_vec());
|
||||
let mut feed = data.iter();
|
||||
match Store::from_bytes(&mut feed) {
|
||||
Ok(_) => Err(DBError::new("should have produced an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::CorruptFile => Ok(()),
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_bytes_handles_error() -> Result<(), DBError> {
|
||||
let name = "duplicate";
|
||||
let mut data = name.as_bytes().to_vec();
|
||||
data.push(0);
|
||||
data.append(&mut "first".as_bytes().to_vec());
|
||||
data.push(0);
|
||||
data.append(&mut "duplicate".as_bytes().to_vec());
|
||||
data.push(0);
|
||||
data.append(&mut "second".as_bytes().to_vec());
|
||||
data.push(0);
|
||||
let mut feed = data.iter();
|
||||
match Store::from_bytes(&mut feed) {
|
||||
Ok(_) => Err(DBError::new("should have returned an error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::CorruptFile => {
|
||||
assert!(
|
||||
err.source().is_some(),
|
||||
"Should state file corruption cause."
|
||||
);
|
||||
assert_eq!(
|
||||
err.source().unwrap().to_string(),
|
||||
format!("database {} already exists", name)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(DBError::new("incorrect error code")),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod session_data {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn add_new() {
|
||||
let mut dbs = Store::new();
|
||||
let key = "database";
|
||||
let value = "marvin";
|
||||
let data = "123456";
|
||||
assert_eq!(dbs.add(key, value, data).unwrap(), [data]);
|
||||
let output = dbs.eq(key, value).unwrap();
|
||||
assert_eq!(output, [data]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_bad_key() {
|
||||
let mut dbs = Store::new();
|
||||
let key = "sdgfjksg";
|
||||
match dbs.add(key, "fred", "barney") {
|
||||
Ok(_) => assert!(false, "Bad keys should produce an error."),
|
||||
Err(err) => assert_eq!(err.to_string(), format!("databases do not have a {}", key)),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_bad_key() {
|
||||
let dbs = Store::new();
|
||||
let key = "bvdfgert";
|
||||
match dbs.eq(key, "fred") {
|
||||
Ok(_) => assert!(false, "Bad keys should produce an error."),
|
||||
Err(_) => (),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unique_names() {
|
||||
let mut dbs = Store::new();
|
||||
let value = "wilma";
|
||||
dbs.add("database", value, "something").unwrap();
|
||||
match dbs.add("database", value, "overwrite") {
|
||||
Ok(_) => assert!(false, "Duplicate names should produce an error."),
|
||||
Err(err) => assert_eq!(
|
||||
err.to_string(),
|
||||
format!("database {} already exists", value)
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_missing() {
|
||||
let dbs = Store::new();
|
||||
let output = dbs.eq("database", "melvin").unwrap();
|
||||
assert_eq!(output, Vec::<String>::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_bad_keys() {
|
||||
let dbs = Store::new();
|
||||
let key = "sdfgren";
|
||||
let keys = [key];
|
||||
match dbs.list(keys.to_vec()) {
|
||||
Ok(_) => assert!(false, "Bad keys should error."),
|
||||
Err(err) => assert_eq!(err.to_string(), format!("databases do not have a {}", key)),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_is_sorted() {
|
||||
let mut dbs = Store::new();
|
||||
let mut data = ["fred", "barney", "wilma", "betty", "pebbles", "bambam"];
|
||||
for db in data {
|
||||
dbs.add("database", db, db).unwrap();
|
||||
}
|
||||
data.sort();
|
||||
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), data);
|
||||
}
|
||||
}
|
@ -1,293 +1,171 @@
|
||||
use super::{DBError, FileData, SessionData};
|
||||
use std::{collections::HashMap, slice, str};
|
||||
use super::{Data, Database, ErrorCode, MTTError};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Store {
|
||||
db_map: HashMap<String, String>,
|
||||
data: HashMap<String, Data<Database>>,
|
||||
}
|
||||
|
||||
impl Store {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
db_map: HashMap::new(),
|
||||
data: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn test_key(key: &str) -> Result<(), DBError> {
|
||||
match key {
|
||||
"database" => (),
|
||||
_ => return Err(DBError::new(format!("databases do not have a {}", key))),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FileData<Self> for Store {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
let mut output = Vec::new();
|
||||
for (name, id) in self.db_map.iter() {
|
||||
output.append(&mut name.as_bytes().to_vec());
|
||||
output.push(0);
|
||||
output.append(&mut id.as_bytes().to_vec());
|
||||
output.push(0);
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<Self, DBError> {
|
||||
let mut output = Store::new();
|
||||
let mut name: Vec<u8> = Vec::new();
|
||||
let mut id: Vec<u8> = Vec::new();
|
||||
let mut get_id = false;
|
||||
let mut letter: u8;
|
||||
loop {
|
||||
match data.next() {
|
||||
Some(a) => letter = a.clone(),
|
||||
None => {
|
||||
if !name.is_empty() {
|
||||
return Err(DBError::new("file corruption"));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if letter == 0 {
|
||||
if get_id {
|
||||
match output.add(
|
||||
"database",
|
||||
str::from_utf8(&name).unwrap(),
|
||||
str::from_utf8(&id).unwrap(),
|
||||
) {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
let mut error = DBError::new("file corruption");
|
||||
error.add_source(err);
|
||||
return Err(error);
|
||||
}
|
||||
};
|
||||
name.clear();
|
||||
id.clear();
|
||||
}
|
||||
get_id = !get_id;
|
||||
} else {
|
||||
if get_id {
|
||||
id.push(letter);
|
||||
} else {
|
||||
name.push(letter);
|
||||
}
|
||||
pub fn add<S>(&mut self, name: S) -> Result<(), MTTError>
|
||||
where
|
||||
S: Into<String>,
|
||||
{
|
||||
let db_name = name.into();
|
||||
match self.get(&db_name) {
|
||||
Some(_) => Err(MTTError::from_code(ErrorCode::DuplicateDatabase(db_name))),
|
||||
None => {
|
||||
self.data.insert(db_name, Data::from_data(Database::new()));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionData for Store {
|
||||
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError> {
|
||||
match Self::test_key(key) {
|
||||
Ok(_) => (),
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
match self.db_map.get(value) {
|
||||
Some(_) => return Err(DBError::new(format!("database {} already exists", value))),
|
||||
None => (),
|
||||
}
|
||||
self.db_map.insert(value.to_string(), data.to_string());
|
||||
let mut output = Vec::new();
|
||||
output.push(data.to_string());
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError> {
|
||||
match Self::test_key(key) {
|
||||
Ok(_) => (),
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
let mut output = Vec::new();
|
||||
match self.db_map.get(value) {
|
||||
Some(data) => output.push(data.to_string()),
|
||||
None => (),
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError> {
|
||||
for key in keys {
|
||||
match Self::test_key(key) {
|
||||
Ok(_) => (),
|
||||
Err(err) => return Err(err),
|
||||
pub fn add_by_id<S, D>(&mut self, name: S, id: D) -> Result<(), MTTError>
|
||||
where
|
||||
S: Into<String>,
|
||||
D: Into<String>,
|
||||
{
|
||||
let db_name = name.into();
|
||||
match self.get(&db_name) {
|
||||
Some(_) => Err(MTTError::from_code(ErrorCode::DuplicateDatabase(db_name))),
|
||||
None => {
|
||||
self.data.insert(db_name, Data::from_id(id.into()));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
let mut names: Vec<String> = self.db_map.clone().into_keys().collect();
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Option<&Data<Database>> {
|
||||
self.data.get(name)
|
||||
}
|
||||
|
||||
pub fn list(&self) -> Vec<String> {
|
||||
let mut names = Vec::new();
|
||||
for name in self.data.keys() {
|
||||
names.push(name.to_string());
|
||||
}
|
||||
names.sort();
|
||||
Ok(names)
|
||||
names
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod file_data {
|
||||
use super::*;
|
||||
use std::error::Error;
|
||||
|
||||
#[test]
|
||||
fn to_bytes_new() {
|
||||
let dbs = Store::new();
|
||||
let expected: Vec<u8> = Vec::new();
|
||||
let output = dbs.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_bytes_with_database() {
|
||||
let mut dbs = Store::new();
|
||||
let name = "something";
|
||||
let id = "id";
|
||||
dbs.add("database", name, id).unwrap();
|
||||
let mut expected: Vec<u8> = Vec::new();
|
||||
expected.append(&mut name.as_bytes().to_vec());
|
||||
expected.push(0);
|
||||
expected.append(&mut id.as_bytes().to_vec());
|
||||
expected.push(0);
|
||||
let output = dbs.to_bytes();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_bytes() {
|
||||
let mut dbs = Store::new();
|
||||
dbs.add("database", "one", "1").unwrap();
|
||||
dbs.add("database", "two", "2").unwrap();
|
||||
dbs.add("database", "three", "3").unwrap();
|
||||
let data = dbs.to_bytes();
|
||||
let mut feed = data.iter();
|
||||
let output = Store::from_bytes(&mut feed).unwrap();
|
||||
assert_eq!(output.db_map, dbs.db_map);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_bytes_incomplete_name() {
|
||||
let data = "notName".as_bytes();
|
||||
let mut feed = data.iter();
|
||||
match Store::from_bytes(&mut feed) {
|
||||
Ok(_) => assert!(false, "This should have failed."),
|
||||
Err(err) => assert_eq!(err.to_string(), "file corruption"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_bytes_incomplete_id() {
|
||||
let mut data = "proper".as_bytes().to_vec();
|
||||
data.push(0);
|
||||
data.append(&mut "nope".as_bytes().to_vec());
|
||||
let mut feed = data.iter();
|
||||
match Store::from_bytes(&mut feed) {
|
||||
Ok(_) => assert!(false, "This should have failed."),
|
||||
Err(err) => assert_eq!(err.to_string(), "file corruption"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_bytes_handles_error() {
|
||||
let name = "duplicate";
|
||||
let mut data = name.as_bytes().to_vec();
|
||||
data.push(0);
|
||||
data.append(&mut "first".as_bytes().to_vec());
|
||||
data.push(0);
|
||||
data.append(&mut "duplicate".as_bytes().to_vec());
|
||||
data.push(0);
|
||||
data.append(&mut "second".as_bytes().to_vec());
|
||||
data.push(0);
|
||||
let mut feed = data.iter();
|
||||
match Store::from_bytes(&mut feed) {
|
||||
Ok(_) => assert!(false, "This should have failed."),
|
||||
Err(err) => {
|
||||
assert_eq!(err.to_string(), "file corruption");
|
||||
assert!(
|
||||
err.source().is_some(),
|
||||
"Should state file corruption cause."
|
||||
);
|
||||
assert_eq!(
|
||||
err.source().unwrap().to_string(),
|
||||
format!("database {} already exists", name)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod session_data {
|
||||
mod storage {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn add_new() {
|
||||
let mut dbs = Store::new();
|
||||
let key = "database";
|
||||
let value = "marvin";
|
||||
let data = "123456";
|
||||
assert_eq!(dbs.add(key, value, data).unwrap(), [data]);
|
||||
let output = dbs.eq(key, value).unwrap();
|
||||
assert_eq!(output, [data]);
|
||||
fn create_new() {
|
||||
let store = Store::new();
|
||||
let expected: Vec<String> = Vec::new();
|
||||
assert_eq!(store.list(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_bad_key() {
|
||||
let mut dbs = Store::new();
|
||||
let key = "sdgfjksg";
|
||||
match dbs.add(key, "fred", "barney") {
|
||||
Ok(_) => assert!(false, "Bad keys should produce an error."),
|
||||
Err(err) => assert_eq!(err.to_string(), format!("databases do not have a {}", key)),
|
||||
fn add_db_by_str() {
|
||||
let mut store = Store::new();
|
||||
let name = "Melvin";
|
||||
store.add(name).unwrap();
|
||||
let output = store.get(name);
|
||||
assert!(output.is_some(), "Get returned none.");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_db_by_string() {
|
||||
let mut store = Store::new();
|
||||
let name = "Marvin";
|
||||
store.add(name.to_string()).unwrap();
|
||||
let output = store.get(name);
|
||||
assert!(output.is_some(), "Get returned none.");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fail_on_duplicates() -> Result<(), MTTError> {
|
||||
let mut store = Store::new();
|
||||
let name = "Mickie";
|
||||
store.add(name).unwrap();
|
||||
match store.add(name) {
|
||||
Ok(_) => Err(MTTError::new("duplicates should error")),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::DuplicateDatabase(db_name) => {
|
||||
assert_eq!(db_name, name);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(MTTError::new(format!("{:?} is not DuplicateDatabase", err))),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_bad_key() {
|
||||
let dbs = Store::new();
|
||||
let key = "bvdfgert";
|
||||
match dbs.eq(key, "fred") {
|
||||
Ok(_) => assert!(false, "Bad keys should produce an error."),
|
||||
Err(_) => (),
|
||||
fn add_using_cache_id() {
|
||||
let mut store = Store::new();
|
||||
let name = "fred";
|
||||
let id = "12345";
|
||||
store.add_by_id(name, id).unwrap();
|
||||
let output = store.get(name).unwrap();
|
||||
assert!(output.data.is_none(), "there should be no data");
|
||||
assert_eq!(output.id, Some(id.to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_by_cache_id_name_string() {
|
||||
let mut store = Store::new();
|
||||
let name = "barney";
|
||||
let id = "67890";
|
||||
store.add_by_id(name.to_string(), id).unwrap();
|
||||
let output = store.get(name).unwrap();
|
||||
assert!(output.data.is_none(), "there should be no data");
|
||||
assert_eq!(output.id, Some(id.to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_databases_for_add_by_id() {
|
||||
let mut store = Store::new();
|
||||
let name = "betty";
|
||||
store.add_by_id(name, "fghij").unwrap();
|
||||
match store.add_by_id(name, "klmno") {
|
||||
Ok(_) => assert!(false, "Duplicates should error."),
|
||||
Err(err) => match err.code {
|
||||
ErrorCode::DuplicateDatabase(db_name) => assert_eq!(db_name, name),
|
||||
_ => assert!(false, "{:?} is not DuplicateDatabase", err),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unique_names() {
|
||||
let mut dbs = Store::new();
|
||||
let value = "wilma";
|
||||
dbs.add("database", value, "something").unwrap();
|
||||
match dbs.add("database", value, "overwrite") {
|
||||
Ok(_) => assert!(false, "Duplicate names should produce an error."),
|
||||
Err(err) => assert_eq!(
|
||||
err.to_string(),
|
||||
format!("database {} already exists", value)
|
||||
),
|
||||
fn add_by_cache_id_string() {
|
||||
let mut store = Store::new();
|
||||
let name = "wilma";
|
||||
let id = "abcdef";
|
||||
store.add_by_id(name, id.to_string()).unwrap();
|
||||
let output = store.get(name).unwrap();
|
||||
assert!(output.data.is_none(), "there should be no data");
|
||||
assert_eq!(output.id, Some(id.to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_bad_database() -> Result<(), MTTError> {
|
||||
let store = Store::new();
|
||||
match store.get("missing") {
|
||||
Some(_) => Err(MTTError::new("Should have returned None.")),
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_missing() {
|
||||
let dbs = Store::new();
|
||||
let output = dbs.eq("database", "melvin").unwrap();
|
||||
assert_eq!(output, Vec::<String>::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_bad_keys() {
|
||||
let dbs = Store::new();
|
||||
let key = "sdfgren";
|
||||
let keys = [key];
|
||||
match dbs.list(keys.to_vec()) {
|
||||
Ok(_) => assert!(false, "Bad keys should error."),
|
||||
Err(err) => assert_eq!(err.to_string(), format!("databases do not have a {}", key)),
|
||||
fn get_list() {
|
||||
let mut store = Store::new();
|
||||
let mut ids = ["one", "two", "three", "four", "five"];
|
||||
for name in ids {
|
||||
store.add(name.to_string()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_is_sorted() {
|
||||
let mut dbs = Store::new();
|
||||
let mut data = ["fred", "barney", "wilma", "betty", "pebbles", "bambam"];
|
||||
for db in data {
|
||||
dbs.add("database", db, db).unwrap();
|
||||
}
|
||||
data.sort();
|
||||
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), data);
|
||||
ids.sort();
|
||||
assert_eq!(store.list(), ids);
|
||||
}
|
||||
}
|
||||
|
18
src/morethantext/table.rs
Normal file
18
src/morethantext/table.rs
Normal file
@ -0,0 +1,18 @@
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Table;
|
||||
|
||||
impl Table {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tables {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn create_new() {
|
||||
Table::new();
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user