Added cache initialization.

This commit is contained in:
Jeff Baskin 2023-03-14 11:32:37 -04:00
parent c477a92945
commit a3d8feb37a
3 changed files with 307 additions and 102 deletions

View File

@ -1,4 +1,4 @@
use super::{DBError, FileData, SessionData, Store}; use super::{DBError, ErrorCode, FileData, SessionData, Store};
use async_std::{ use async_std::{
fs::{read, remove_file, write}, fs::{read, remove_file, write},
path::{Path, PathBuf}, path::{Path, PathBuf},
@ -21,7 +21,9 @@ impl DataType {
fn new(data_type: &str) -> Result<Self, DBError> { fn new(data_type: &str) -> Result<Self, DBError> {
match data_type { match data_type {
"store" => Ok(DataType::DBMap(Store::new())), "store" => Ok(DataType::DBMap(Store::new())),
_ => Err(DBError::new("invalid data type")), _ => Err(DBError::from_code(ErrorCode::DataTypeIncorrect(
data_type.to_string(),
))),
} }
} }
} }
@ -74,14 +76,14 @@ impl FileData<Self> for DataType {
} }
let header = match str::from_utf8(&header) { let header = match str::from_utf8(&header) {
Ok(item) => item, Ok(item) => item,
Err(_) => return Err(DBError::new("file corruption")), Err(_) => return Err(DBError::from_code(ErrorCode::CorruptFile)),
}; };
match header { match header {
"DBMap" => match Store::from_bytes(data) { "DBMap" => match Store::from_bytes(data) {
Ok(store) => Ok(DataType::DBMap(store)), Ok(store) => Ok(DataType::DBMap(store)),
Err(err) => Err(err), Err(err) => Err(err),
}, },
_ => Err(DBError::new("file corruption")), _ => Err(DBError::from_code(ErrorCode::CorruptFile)),
} }
} }
} }
@ -99,13 +101,12 @@ impl Entry {
{ {
let pathbuf = filename.into(); let pathbuf = filename.into();
if pathbuf.as_path().exists().await { if pathbuf.as_path().exists().await {
// if Path::new(&filename).exists().await { return Err(DBError::from_code(ErrorCode::EntryExists(pathbuf)));
return Err(DBError::new("entry already exists"));
} else { } else {
match write(&pathbuf, data.to_bytes()).await { match write(&pathbuf, data.to_bytes()).await {
Ok(_) => (), Ok(_) => (),
Err(err) => { Err(err) => {
let mut error = DBError::new("failed to write"); let mut error = DBError::from_code(ErrorCode::EntryWriteFailure(pathbuf));
error.add_source(err); error.add_source(err);
return Err(error); return Err(error);
} }
@ -126,7 +127,7 @@ impl Entry {
let content = match read(&pathbuf).await { let content = match read(&pathbuf).await {
Ok(text) => text, Ok(text) => text,
Err(err) => { Err(err) => {
let mut error = DBError::new("read error"); let mut error = DBError::from_code(ErrorCode::EntryReadFailure(pathbuf));
error.add_source(err); error.add_source(err);
return Err(error); return Err(error);
} }
@ -134,7 +135,7 @@ impl Entry {
let data = match DataType::from_bytes(&mut content.iter()) { let data = match DataType::from_bytes(&mut content.iter()) {
Ok(raw) => raw, Ok(raw) => raw,
Err(err) => { Err(err) => {
let mut error = DBError::new("read error"); let mut error = DBError::from_code(ErrorCode::EntryReadFailure(pathbuf));
error.add_source(err); error.add_source(err);
return Err(error); return Err(error);
} }
@ -160,7 +161,7 @@ impl Entry {
match write(&self.filename, data.to_bytes()).await { match write(&self.filename, data.to_bytes()).await {
Ok(_) => (), Ok(_) => (),
Err(err) => { Err(err) => {
let mut error = DBError::new("write error"); let mut error = DBError::from_code(ErrorCode::EntryWriteFailure(self.filename.clone()));
error.add_source(err); error.add_source(err);
return Err(error); return Err(error);
} }
@ -173,7 +174,7 @@ impl Entry {
match remove_file(&self.filename).await { match remove_file(&self.filename).await {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(err) => { Err(err) => {
let mut error = DBError::new("cannot remove"); let mut error = DBError::from_code(ErrorCode::EntryDeleteFailure(self.filename.clone()));
error.add_source(err); error.add_source(err);
Err(error) Err(error)
} }
@ -190,14 +191,19 @@ impl Cache {
{ {
let pathbuf = dir.into(); let pathbuf = dir.into();
let entry = pathbuf.as_path().join(ENTRY); let entry = pathbuf.as_path().join(ENTRY);
match Entry::get(entry.clone()).await {
Ok(_) => Ok(Self {}),
Err(_) => {
let store = DataType::new("store").unwrap(); let store = DataType::new("store").unwrap();
match Entry::new(entry, store).await { match Entry::new(entry, store).await {
Ok(_) => Ok(Self {}), Ok(_) => Ok(Self {}),
Err(err) => { Err(err) => {
let mut error = DBError::new("initialization failure"); let mut error = DBError::from_code(ErrorCode::CacheReadWrite);
error.add_source(err); error.add_source(err);
Err(error) Err(error)
},
} }
},
} }
} }
} }
@ -210,10 +216,10 @@ mod datatype_sesssion {
fn invalid_cache_type() -> Result<(), DBError> { fn invalid_cache_type() -> Result<(), DBError> {
match DataType::new("dkhgdl") { match DataType::new("dkhgdl") {
Ok(_) => Err(DBError::new("invalid data type should raise an error")), Ok(_) => Err(DBError::new("invalid data type should raise an error")),
Err(err) => { Err(err) => match err.code {
assert_eq!(err.to_string(), "invalid data type"); ErrorCode::DataTypeIncorrect(_) => Ok(()),
Ok(()) _ => Err(DBError::new("Invalid error code")),
} },
} }
} }
@ -291,10 +297,10 @@ mod datatype_file {
let mut feed = data.iter(); let mut feed = data.iter();
match DataType::from_bytes(&mut feed) { match DataType::from_bytes(&mut feed) {
Ok(_) => Err(DBError::new("should have raised an error")), Ok(_) => Err(DBError::new("should have raised an error")),
Err(err) => { Err(err) => match err.code {
assert_eq!(err.to_string(), "file corruption"); ErrorCode::CorruptFile => Ok(()),
Ok(()) _ => Err(DBError::new("incorrect error")),
} },
} }
} }
@ -371,8 +377,8 @@ mod entry {
let filename = filepath.to_str().unwrap(); let filename = filepath.to_str().unwrap();
match Entry::new(filename.to_string(), data).await { match Entry::new(filename.to_string(), data).await {
Ok(_) => Err(DBError::new("bad file names should raise an error")), Ok(_) => Err(DBError::new("bad file names should raise an error")),
Err(err) => { Err(err) => match err.code {
assert_eq!(err.to_string(), "failed to write"); ErrorCode::EntryWriteFailure(_) => {
assert!(err.source().is_some(), "Must include the source error."); assert!(err.source().is_some(), "Must include the source error.");
assert!(err assert!(err
.source() .source()
@ -381,6 +387,8 @@ mod entry {
.contains("could not write to file")); .contains("could not write to file"));
Ok(()) Ok(())
} }
_ => Err(DBError::new("incorrect error code")),
},
} }
} }
@ -398,10 +406,10 @@ mod entry {
"Should produce an error for an existing Entry", "Should produce an error for an existing Entry",
)) ))
} }
Err(err) => { Err(err) => match err.code {
assert_eq!(err.to_string(), "entry already exists"); ErrorCode::EntryExists(_) => Ok(()),
Ok(()) _ => Err(DBError::new("incorrect error code")),
} },
} }
} }
@ -459,8 +467,8 @@ mod entry {
drop(dir); drop(dir);
match item.update(data).await { match item.update(data).await {
Ok(_) => Err(DBError::new("file writes should return an error")), Ok(_) => Err(DBError::new("file writes should return an error")),
Err(err) => { Err(err) => match err.code {
assert_eq!(err.to_string(), "write error"); ErrorCode::EntryWriteFailure(_) => {
assert!(err.source().is_some(), "Must include the source error."); assert!(err.source().is_some(), "Must include the source error.");
assert!(err assert!(err
.source() .source()
@ -468,6 +476,8 @@ mod entry {
.to_string() .to_string()
.contains("could not write to file")); .contains("could not write to file"));
Ok(()) Ok(())
},
_ => Err(DBError::new("incorrect error code")),
} }
} }
} }
@ -501,8 +511,8 @@ mod entry {
let filename = filepath.to_str().unwrap(); let filename = filepath.to_str().unwrap();
match Entry::get(filename).await { match Entry::get(filename).await {
Ok(_) => Err(DBError::new("should have returned an error")), Ok(_) => Err(DBError::new("should have returned an error")),
Err(err) => { Err(err) => match err.code {
assert_eq!(err.to_string(), "read error"); ErrorCode::EntryReadFailure(_) => {
assert!(err.source().is_some(), "Error should have a source."); assert!(err.source().is_some(), "Error should have a source.");
assert!( assert!(
err.source() err.source()
@ -514,6 +524,8 @@ mod entry {
); );
Ok(()) Ok(())
} }
_ => Err(DBError::new("incorrect error code")),
}
} }
} }
@ -525,18 +537,17 @@ mod entry {
write(&filepath, b"jhsdfghlsdf").await.unwrap(); write(&filepath, b"jhsdfghlsdf").await.unwrap();
match Entry::get(filename).await { match Entry::get(filename).await {
Ok(_) => Err(DBError::new("should have returned an error")), Ok(_) => Err(DBError::new("should have returned an error")),
Err(err) => { Err(err) => match err.code {
assert_eq!(err.to_string(), "read error"); ErrorCode::EntryReadFailure(_) => {
assert!(err.source().is_some(), "Error should have a source."); assert!(err.source().is_some(), "Error should have a source.");
assert!( assert!(
err.source() err.source().unwrap().to_string().contains("corrupt file"),
.unwrap()
.to_string()
.contains("file corruption"),
"Source Error Message: {}", "Source Error Message: {}",
err.source().unwrap().to_string() err.source().unwrap().to_string()
); );
Ok(()) Ok(())
},
_ => Err(DBError::new("incorrect error code")),
} }
} }
} }
@ -566,8 +577,8 @@ mod entry {
remove_file(filename).await.unwrap(); remove_file(filename).await.unwrap();
match item.remove().await { match item.remove().await {
Ok(_) => Err(DBError::new("should have produced an error")), Ok(_) => Err(DBError::new("should have produced an error")),
Err(err) => { Err(err) => match err.code {
assert_eq!(err.to_string(), "cannot remove"); ErrorCode::EntryDeleteFailure(_) => {
assert!(err.source().is_some(), "Error should have a source."); assert!(err.source().is_some(), "Error should have a source.");
assert!( assert!(
err.source() err.source()
@ -578,6 +589,8 @@ mod entry {
err.source().unwrap().to_string() err.source().unwrap().to_string()
); );
Ok(()) Ok(())
},
_ => Err(DBError::new("incorrect error code")),
} }
} }
} }
@ -607,23 +620,46 @@ mod cache {
} }
#[async_std::test] #[async_std::test]
async fn entry_failure() { async fn entry_failure() -> Result<(), DBError> {
let dir = tempdir().unwrap(); let dir = tempdir().unwrap();
let path = dir.path().join("bad").join("path"); let path = dir.path().join("bad").join("path");
match Cache::new(path).await { match Cache::new(path).await {
Ok(_) => assert!(false, "Should have produced an error."), Ok(_) => Err(DBError::new("Should have produced an error.")),
Err(err) => { Err(err) => {
assert_eq!(err.to_string(), "initialization failure"); match err.code {
ErrorCode::CacheReadWrite => {
assert!(err.source().is_some(), "Error should have a source."); assert!(err.source().is_some(), "Error should have a source.");
assert!( assert!(
err.source() err.source().unwrap().to_string().contains("write failure"),
.unwrap()
.to_string()
.contains("failed to write"),
"Source Error Message: {}", "Source Error Message: {}",
err.source().unwrap().to_string() err.source().unwrap().to_string()
); );
Ok(())
},
_ => Err(DBError::new("incorrect error code")),
} }
} }
} }
} }
#[async_std::test]
async fn existing_entry_point() {
let dir = tempdir().unwrap();
let data = DataType::new("store").unwrap();
Entry::new(dir.path().join(ENTRY), data.clone())
.await
.unwrap();
let cache = Cache::new(dir.path()).await.unwrap();
}
#[async_std::test]
async fn corrupt_enty_point() -> Result<(), DBError> {
let dir = tempdir().unwrap();
let file = dir.path().join(ENTRY);
write(file, b"Really bad data.").await.unwrap();
match Cache::new(dir.path()).await {
Ok(_) => Err(DBError::new("should have errored")),
Err(_) => Ok(()),
}
}
}

View File

@ -1,8 +1,56 @@
use async_std::path::PathBuf;
use std::{error::Error, fmt}; use std::{error::Error, fmt};
#[derive(Debug)]
pub enum ErrorCode {
Undefined(String),
// Read Write Errors
CorruptFile,
// Data Type Errors
DataTypeIncorrect(String),
// Entry Errors
EntryExists(PathBuf),
EntryWriteFailure(PathBuf),
EntryReadFailure(PathBuf),
EntryDeleteFailure(PathBuf),
// Cache
CacheReadWrite,
}
impl fmt::Display for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ErrorCode::Undefined(msg) => write!(f, "{}", msg),
ErrorCode::DataTypeIncorrect(dtype) => write!(f, "data type '{}' is not valid", dtype),
ErrorCode::CorruptFile => write!(f, "corrupt file"),
ErrorCode::EntryExists(path) => write!(
f,
"entry '{}' already exists",
path.file_name().unwrap().to_str().unwrap()
),
ErrorCode::EntryWriteFailure(path) => write!(
f,
"entry '{}' write failure",
path.file_name().unwrap().to_str().unwrap()
),
ErrorCode::EntryReadFailure(path) => write!(
f,
"entry '{}' read failure",
path.file_name().unwrap().to_str().unwrap()
),
ErrorCode::EntryDeleteFailure(path) => write!(
f,
"entry '{}' delete failure",
path.file_name().unwrap().to_str().unwrap()
),
ErrorCode::CacheReadWrite => write!(f, "cache read write"),
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct DBError { pub struct DBError {
msg: String, pub code: ErrorCode,
src: Option<Box<dyn Error + 'static>>, src: Option<Box<dyn Error + 'static>>,
} }
@ -12,7 +60,14 @@ impl DBError {
S: Into<String>, S: Into<String>,
{ {
Self { Self {
msg: msg.into(), code: ErrorCode::Undefined(msg.into()),
src: None,
}
}
pub fn from_code(code: ErrorCode) -> Self {
Self {
code: code,
src: None, src: None,
} }
} }
@ -36,12 +91,12 @@ impl Error for DBError {
impl fmt::Display for DBError { impl fmt::Display for DBError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.msg) write!(f, "{}", self.code)
} }
} }
#[cfg(test)] #[cfg(test)]
mod create { mod errors {
use super::*; use super::*;
#[test] #[test]
@ -76,6 +131,15 @@ mod create {
); );
} }
#[test]
fn using_error_code() {
let msg = "utter failure";
let code = ErrorCode::Undefined(msg.to_string());
let err = DBError::from_code(code);
assert_eq!(err.to_string(), msg);
assert!(err.source().is_none(), "Should be no source");
}
#[test] #[test]
fn with_source() { fn with_source() {
let msg = "but this caused the problem"; let msg = "but this caused the problem";
@ -88,3 +152,108 @@ mod create {
assert!(source.to_string() == msg); assert!(source.to_string() == msg);
} }
} }
#[cfg(test)]
mod codes {
use super::*;
use async_std::path::PathBuf;
const items: [&str; 2] = ["first", "second"];
fn create_path_buffer() -> Vec<PathBuf> {
let mut output = Vec::new();
for item in items {
let mut path = PathBuf::new();
path.push("thepath");
path.push(item);
output.push(path);
}
output
}
#[test]
fn undefined_display() {
for item in items {
let err = ErrorCode::Undefined(item.to_string());
assert_eq!(err.to_string(), item);
}
}
#[test]
fn incorrect_data_type() {
for item in items {
let err = ErrorCode::DataTypeIncorrect(item.to_string());
assert_eq!(
err.to_string(),
format!("data type '{}' is not valid", item)
);
}
}
#[test]
fn corrupt_file() {
assert_eq!(ErrorCode::CorruptFile.to_string(), "corrupt file");
}
#[test]
fn entry_exists() {
for path in create_path_buffer() {
let err = ErrorCode::EntryExists(path.clone());
assert_eq!(
err.to_string(),
format!(
"entry '{}' already exists",
path.file_name().unwrap().to_str().unwrap()
)
);
}
}
#[test]
fn entry_write_failure() {
for path in create_path_buffer() {
let err = ErrorCode::EntryWriteFailure(path.clone());
assert_eq!(
err.to_string(),
format!(
"entry '{}' write failure",
path.file_name().unwrap().to_str().unwrap()
)
);
}
}
#[test]
fn entry_read_failure() {
for path in create_path_buffer() {
let err = ErrorCode::EntryReadFailure(path.clone());
assert_eq!(
err.to_string(),
format!(
"entry '{}' read failure",
path.file_name().unwrap().to_str().unwrap()
)
);
}
}
#[test]
fn entry_delete_failure() {
for path in create_path_buffer() {
let err = ErrorCode::EntryDeleteFailure(path.clone());
assert_eq!(
err.to_string(),
format!(
"entry '{}' delete failure",
path.file_name().unwrap().to_str().unwrap()
)
);
}
}
#[test]
fn cache_read_write_failure() {
let err = ErrorCode::CacheReadWrite;
assert_eq!(err.to_string(), "cache read write");
}
}

View File

@ -8,7 +8,7 @@ use async_std::{
sync::{Arc, Mutex}, sync::{Arc, Mutex},
task::{sleep, spawn}, task::{sleep, spawn},
}; };
use error::DBError; use error::{DBError, ErrorCode};
use rand::{distributions::Alphanumeric, thread_rng, Rng}; use rand::{distributions::Alphanumeric, thread_rng, Rng};
use std::{ use std::{
collections::HashMap, collections::HashMap,