279 lines
7.7 KiB
Rust
279 lines
7.7 KiB
Rust
use super::{DBError, FileData, SessionData, Store};
|
|
use async_std::{fs::write, path::Path};
|
|
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
|
use std::{
|
|
cell::Cell,
|
|
slice,
|
|
str,
|
|
time::{Duration, Instant},
|
|
};
|
|
|
|
#[derive(Clone)]
|
|
enum DataType {
|
|
DBMap(Store),
|
|
}
|
|
|
|
impl DataType {
|
|
fn new(data_type: &str) -> Result<Self, DBError> {
|
|
match data_type {
|
|
"store" => Ok(DataType::DBMap(Store::new())),
|
|
_ => Err(DBError::new("invalid data type")),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl SessionData for DataType {
|
|
fn add(&mut self, key: &str, value: &str, data: &str) -> Result<Vec<String>, DBError> {
|
|
match self {
|
|
DataType::DBMap(dbs) => dbs.add(key, value, data),
|
|
}
|
|
}
|
|
|
|
fn eq(&self, key: &str, value: &str) -> Result<Vec<String>, DBError> {
|
|
match self {
|
|
DataType::DBMap(dbs) => dbs.eq(key, value),
|
|
}
|
|
}
|
|
|
|
fn list(&self, keys: Vec<&str>) -> Result<Vec<String>, DBError> {
|
|
match self {
|
|
DataType::DBMap(dbs) => dbs.list(keys),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl FileData<Self> for DataType {
|
|
fn to_bytes(&self) -> Vec<u8> {
|
|
let mut output = Vec::new();
|
|
match self {
|
|
DataType::DBMap(store) => {
|
|
output.append(&mut "DBMap".as_bytes().to_vec());
|
|
output.push(0);
|
|
output.append(&mut store.to_bytes());
|
|
},
|
|
}
|
|
output
|
|
}
|
|
|
|
fn from_bytes(data: &mut slice::Iter<u8>) -> Result<Self, DBError> {
|
|
let mut header: Vec<u8> = Vec::new();
|
|
loop {
|
|
let letter = match data.next() {
|
|
Some(a) => a.clone(),
|
|
None => 0,
|
|
};
|
|
if letter == 0 {
|
|
break;
|
|
} else {
|
|
header.push(letter);
|
|
}
|
|
}
|
|
let header = match str::from_utf8(&header) {
|
|
Ok(item) => item,
|
|
Err(_) => return Err(DBError::new("file corruption")),
|
|
};
|
|
match header {
|
|
"DBMap" => {
|
|
match Store::from_bytes(data) {
|
|
Ok(store) => Ok(DataType::DBMap(store)),
|
|
Err(err) => Err(err),
|
|
}
|
|
},
|
|
_ => Err(DBError::new("file corruption")),
|
|
}
|
|
}
|
|
}
|
|
|
|
struct Entry {
|
|
data: DataType,
|
|
filename: String,
|
|
last_used: Cell<Instant>,
|
|
}
|
|
|
|
impl Entry {
|
|
async fn new(filename: String, data: DataType) -> Result<Self, DBError> {
|
|
if Path::new(&filename).exists().await {
|
|
return Err(DBError::new("entry already exists"));
|
|
}
|
|
Ok(Self {
|
|
data: data,
|
|
filename: filename,
|
|
last_used: Cell::new(Instant::now()),
|
|
})
|
|
}
|
|
|
|
async fn get(&self) -> Result<DataType, DBError> {
|
|
Ok(self.data.clone())
|
|
}
|
|
}
|
|
|
|
struct Cache;
|
|
|
|
impl Cache {
|
|
async fn new(dir: &str) -> Self {
|
|
Self
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod datatype_sesssion {
|
|
use super::*;
|
|
|
|
#[test]
|
|
fn invalid_cache_type() -> Result<(), DBError> {
|
|
match DataType::new("dkhgdl") {
|
|
Ok(_) => Err(DBError::new("invalid data type should raise an error")),
|
|
Err(err) => {
|
|
assert_eq!(err.to_string(), "invalid data type");
|
|
Ok(())
|
|
}
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn create_storage() {
|
|
let dbs = DataType::new("store").unwrap();
|
|
let expected: Vec<String> = Vec::new();
|
|
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), expected);
|
|
}
|
|
|
|
#[test]
|
|
fn update_storage() {
|
|
let mut dbs = DataType::new("store").unwrap();
|
|
let name = "new_database";
|
|
let id = "someid";
|
|
dbs.add("database", name, id).unwrap();
|
|
assert_eq!(dbs.eq("database", name).unwrap(), [id].to_vec());
|
|
assert_eq!(dbs.list(["database"].to_vec()).unwrap(), [name].to_vec());
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod datatype_file {
|
|
use super::*;
|
|
|
|
#[test]
|
|
fn new_store_bytes() {
|
|
let dbs = DataType::new("store").unwrap();
|
|
let mut expected = "DBMap".as_bytes().to_vec();
|
|
expected.push(0);
|
|
assert_eq!(dbs.to_bytes(), expected);
|
|
}
|
|
|
|
#[test]
|
|
fn store_bytes_with_info() {
|
|
let name = "title";
|
|
let id = "king";
|
|
let mut store = Store::new();
|
|
let mut dt_store = DataType::new("store").unwrap();
|
|
let mut expected = dt_store.to_bytes();
|
|
store.add("database", name, id);
|
|
expected.append(&mut store.to_bytes());
|
|
dt_store.add("database", name, id);
|
|
assert_eq!(dt_store.to_bytes(), expected);
|
|
}
|
|
|
|
#[test]
|
|
fn read_empty_store() {
|
|
let dt_store = DataType::new("store").unwrap();
|
|
let data = dt_store.to_bytes();
|
|
let mut feed = data.iter();
|
|
let output = DataType::from_bytes(&mut feed).unwrap();
|
|
assert_eq!(dt_store.list(["database"].to_vec()).unwrap(), output.list(["database"].to_vec()).unwrap());
|
|
}
|
|
|
|
#[test]
|
|
fn read_store_info() {
|
|
let mut dt_store = DataType::new("store").unwrap();
|
|
dt_store.add("database", "raven", "beastboy").unwrap();
|
|
let data = dt_store.to_bytes();
|
|
let mut feed = data.iter();
|
|
let output = DataType::from_bytes(&mut feed).unwrap();
|
|
assert_eq!(dt_store.list(["database"].to_vec()).unwrap(), output.list(["database"].to_vec()).unwrap());
|
|
}
|
|
|
|
#[test]
|
|
fn read_bad_header() -> Result<(), DBError> {
|
|
let data = "sdghsdl".as_bytes().to_vec();
|
|
let mut feed = data.iter();
|
|
match DataType::from_bytes(&mut feed) {
|
|
Ok(_) => Err(DBError::new("should have raised an error")),
|
|
Err(err) => {
|
|
assert_eq!(err.to_string(), "file corruption");
|
|
Ok(())
|
|
},
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn read_bad_store() -> Result<(), DBError> {
|
|
let mut data = "DBMap".as_bytes().to_vec();
|
|
data.push(0);
|
|
data.append(&mut "sdfgs".as_bytes().to_vec());
|
|
let mut feed = data.iter();
|
|
match DataType::from_bytes(&mut feed) {
|
|
Ok(_) => Err(DBError::new("should have raised an error")),
|
|
Err(err) => {
|
|
assert_eq!(err.to_string(), "file corruption");
|
|
Ok(())
|
|
},
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod entry {
|
|
use super::*;
|
|
use tempfile::tempdir;
|
|
|
|
#[async_std::test]
|
|
async fn create() {
|
|
let dir = tempdir().unwrap();
|
|
let mut data = DataType::new("store").unwrap();
|
|
data.add("database", "roger", "moore").unwrap();
|
|
let filepath = dir.path().join("wiliam");
|
|
let filename = filepath.to_str().unwrap();
|
|
let item = Entry::new(filename.to_string(), data.clone())
|
|
.await
|
|
.unwrap();
|
|
let output = item.get().await.unwrap();
|
|
assert_eq!(
|
|
data.list(["database"].to_vec()).unwrap(),
|
|
output.list(["database"].to_vec()).unwrap()
|
|
);
|
|
}
|
|
|
|
#[async_std::test]
|
|
async fn no_over_writes() -> Result<(), DBError> {
|
|
let dir = tempdir().unwrap();
|
|
let id = "wicked";
|
|
let file = dir.path().join(id);
|
|
let filename = file.to_str().unwrap();
|
|
write(&file, b"previous").await.unwrap();
|
|
let data = DataType::new("store").unwrap();
|
|
match Entry::new(filename.to_string(), data).await {
|
|
Ok(_) => {
|
|
return Err(DBError::new(
|
|
"Should produce an error for an existing Entry",
|
|
))
|
|
}
|
|
Err(err) => {
|
|
assert_eq!(err.to_string(), "entry already exists");
|
|
Ok(())
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod cache {
|
|
use super::*;
|
|
use tempfile::tempdir;
|
|
|
|
#[async_std::test]
|
|
async fn create() {
|
|
let dir = tempdir().unwrap();
|
|
Cache::new(dir.path().to_str().unwrap()).await;
|
|
}
|
|
}
|