add io error variant and blob writer function
This commit is contained in:
parent
e05de8284d
commit
799d9de001
|
@ -1,5 +1,7 @@
|
|||
//! Custom error type representing all possible error variants for peach-web.
|
||||
|
||||
use std::io::Error as IoError;
|
||||
|
||||
use golgi::GolgiError;
|
||||
use peach_lib::error::PeachError;
|
||||
use peach_lib::{serde_json, serde_yaml};
|
||||
|
@ -12,6 +14,7 @@ pub enum PeachWebError {
|
|||
FailedToRegisterDynDomain(String),
|
||||
Golgi(GolgiError),
|
||||
HomeDir,
|
||||
Io(IoError),
|
||||
Json(JsonError),
|
||||
OsString,
|
||||
PeachLib { source: PeachError, msg: String },
|
||||
|
@ -24,6 +27,7 @@ impl std::error::Error for PeachWebError {
|
|||
PeachWebError::FailedToRegisterDynDomain(_) => None,
|
||||
PeachWebError::Golgi(ref source) => Some(source),
|
||||
PeachWebError::HomeDir => None,
|
||||
PeachWebError::Io(ref source) => Some(source),
|
||||
PeachWebError::Json(ref source) => Some(source),
|
||||
PeachWebError::OsString => None,
|
||||
PeachWebError::PeachLib { ref source, .. } => Some(source),
|
||||
|
@ -43,6 +47,7 @@ impl std::fmt::Display for PeachWebError {
|
|||
f,
|
||||
"Filesystem error: failed to determine home directory path"
|
||||
),
|
||||
PeachWebError::Io(ref source) => write!(f, "IO error: {}", source),
|
||||
PeachWebError::Json(ref source) => write!(f, "Serde JSON error: {}", source),
|
||||
PeachWebError::OsString => write!(
|
||||
f,
|
||||
|
@ -60,6 +65,12 @@ impl From<GolgiError> for PeachWebError {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<IoError> for PeachWebError {
|
||||
fn from(err: IoError) -> PeachWebError {
|
||||
PeachWebError::Io(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JsonError> for PeachWebError {
|
||||
fn from(err: JsonError) -> PeachWebError {
|
||||
PeachWebError::Json(err)
|
||||
|
|
|
@ -1,11 +1,19 @@
|
|||
pub mod monitor;
|
||||
|
||||
use std::io::prelude::*;
|
||||
use std::{fs, fs::File};
|
||||
|
||||
use dirs;
|
||||
use golgi::blobs;
|
||||
use log::info;
|
||||
use peach_lib::sbot::SbotConfig;
|
||||
use rocket::response::{Redirect, Responder};
|
||||
use rocket::serde::Serialize;
|
||||
use rocket::{
|
||||
fs::TempFile,
|
||||
response::{Redirect, Responder},
|
||||
serde::Serialize,
|
||||
};
|
||||
use rocket_dyn_templates::Template;
|
||||
use temporary::Directory;
|
||||
|
||||
use crate::{error::PeachWebError, THEME};
|
||||
|
||||
|
@ -32,6 +40,41 @@ pub fn get_go_ssb_path() -> Result<String, PeachWebError> {
|
|||
Ok(go_ssb_path)
|
||||
}
|
||||
|
||||
// take the path to a file, add it to the blobstore and return the blob id
|
||||
pub async fn write_blob_to_store(file: &mut TempFile<'_>) -> Result<String, PeachWebError> {
|
||||
// create temporary directory and path
|
||||
let temp_dir = Directory::new("blob")?;
|
||||
// we performed a `file.name().is_some()` check before calling `write_blob_to_store`
|
||||
// so it should be safe to do a simple unwrap here
|
||||
let filename = file.name().expect("retrieving filename from uploaded file");
|
||||
let temp_path = temp_dir.join(filename);
|
||||
|
||||
// write file to temporary path
|
||||
file.persist_to(&temp_path).await?;
|
||||
|
||||
// open the file and read it into a buffer
|
||||
let mut file = File::open(&temp_path)?;
|
||||
let mut buffer = Vec::new();
|
||||
file.read_to_end(&mut buffer)?;
|
||||
|
||||
// hash the bytes representing the file
|
||||
let (hex_hash, blob_id) = blobs::hash_blob(&buffer)?;
|
||||
|
||||
// define the blobstore path and blob filename
|
||||
let (blob_dir, blob_filename) = hex_hash.split_at(2);
|
||||
let go_ssb_path = get_go_ssb_path()?;
|
||||
let blobstore_sub_dir = format!("{}/blobs/sha256/{}", go_ssb_path, blob_dir);
|
||||
|
||||
// create the blobstore sub-directory
|
||||
fs::create_dir_all(&blobstore_sub_dir)?;
|
||||
|
||||
// copy the file to the blobstore
|
||||
let blob_path = format!("{}/{}", blobstore_sub_dir, blob_filename);
|
||||
fs::copy(temp_path, blob_path)?;
|
||||
|
||||
Ok(blob_id)
|
||||
}
|
||||
|
||||
// THEME FUNCTIONS
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
|
|
Loading…
Reference in New Issue