DAR-Encryption

This commit is contained in:
Paul Zinselmeyer 2023-10-18 16:14:44 +02:00
parent 3ec0d51003
commit ac084a9288
13 changed files with 1638 additions and 955 deletions

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
/target
/data
/.env
/result

1371
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -6,18 +6,23 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
tokio = { version = "1.27.0", features = ["full"] }
tokio-util = { version="0.7"}
tokio = { version = "1.33", features = ["full"] }
tokio-util = { version="0.7", features = ["io"]}
futures-util = "0.3"
axum = {version="0.6", features=["macros", "headers"]}
serde = "1.0"
serde_cbor = "0.11"
toml = "0.8"
render = { git="https://github.com/render-rs/render.rs" }
thiserror = "1.0.40"
rand = "0.8.5"
thiserror = "1.0"
rand = "0.8"
dotenvy = "0.15"
markdown = "0.3.0"
markdown = "0.3"
axum_oidc = {git="https://git2.zettoit.eu/pfz4/axum_oidc"}
rust-s3 = { version="0.33.0", features=["tokio-rustls-tls", "tags"], default_features=false }
log = "0.4.18"
pretty_env_logger = "0.5.0"
log = "0.4"
env_logger = "0.10"
chacha20 = "0.9"
sha3 = "0.10"
hex = "0.4"
bytes = "1.5"
pin-project-lite = "0.2"

View file

@ -1,4 +0,0 @@
FROM debian:stable-slim as final
WORKDIR /app
COPY ./target/release/bin ./bin
CMD ["/app/bin"]

129
flake.lock Normal file
View file

@ -0,0 +1,129 @@
{
"nodes": {
"crane": {
"inputs": {
"flake-compat": "flake-compat",
"flake-utils": [
"flake-utils"
],
"nixpkgs": [
"nixpkgs"
],
"rust-overlay": [
"rust-overlay"
]
},
"locked": {
"lastModified": 1697513493,
"narHash": "sha256-Kjidf29+ahcsQE7DICxI4g4tjMSY76BfhKFANnkQhk0=",
"owner": "ipetkov",
"repo": "crane",
"rev": "eb5034b6ee36d523bf1d326ab990811ac2ceb870",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1696267196,
"narHash": "sha256-AAQ/2sD+0D18bb8hKuEEVpHUYD1GmO2Uh/taFamn6XQ=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "4f910c9827911b1ec2bf26b5a062cd09f8d89f85",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1694529238,
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1697059129,
"narHash": "sha256-9NJcFF9CEYPvHJ5ckE8kvINvI84SZZ87PvqMbH6pro0=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "5e4c2ada4fcd54b99d56d7bd62f384511a7e2593",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"crane": "crane",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": [
"flake-utils"
],
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1697508761,
"narHash": "sha256-QKWiXUlnke+EiJw3pek1l7xyJ4YsxYXZeQJt/YLgjvA=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "6f74c92caaf2541641b50ec623676430101d1fd4",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

69
flake.nix Normal file
View file

@ -0,0 +1,69 @@
{
description = "bin service";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs = {
nixpkgs.follows = "nixpkgs";
flake-utils.follows = "flake-utils";
};
};
crane = {
url = "github:ipetkov/crane";
inputs = {
nixpkgs.follows = "nixpkgs";
flake-utils.follows = "flake-utils";
rust-overlay.follows = "rust-overlay";
};
};
};
outputs = { self, nixpkgs, flake-utils, rust-overlay, crane}:
flake-utils.lib.eachDefaultSystem (system:
let
overlays = [ (import rust-overlay) ];
pkgs = import nixpkgs {
inherit system overlays;
};
rustToolchain = pkgs.rust-bin.stable.latest.default;
markdownFilter = path: _type: builtins.match ".*md$" path != null;
markdownOrCargo = path: type: (markdownFilter path type) || (craneLib.filterCargoSources path type);
craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain;
src = pkgs.lib.cleanSourceWith {
src = craneLib.path ./.;
filter = markdownOrCargo;
};
nativeBuildInputs = with pkgs; [ rustToolchain pkg-config ];
buildInputs = with pkgs; [ ];
commonArgs = {
inherit src buildInputs nativeBuildInputs;
};
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
bin = craneLib.buildPackage (commonArgs // {
inherit cargoArtifacts;
});
in
with pkgs;
{
packages = {
inherit bin;
default = bin;
};
devShells.default = mkShell {
inputsFrom = [ bin ];
};
hydraJobs."build" = bin;
}
);
}

45
src/error.rs Normal file
View file

@ -0,0 +1,45 @@
use axum::{http::StatusCode, response::IntoResponse};
use log::error;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("io error: {:?}", 0)]
Io(#[from] std::io::Error),
#[error("time error: {:?}", 0)]
Time(#[from] std::time::SystemTimeError),
#[error("metadata error: {:?}", 0)]
MetadataDe(#[from] toml::de::Error),
#[error("metadata error: {:?}", 0)]
MetadataSer(#[from] toml::ser::Error),
#[error("phrase is not valid")]
PhraseInvalid,
#[error("bin could not be found")]
BinNotFound,
#[error("file exists")]
DataFileExists,
#[error("hex error: {:?}", 0)]
Hex(#[from] hex::FromHexError),
#[error("could not parse ttl")]
ParseTtl,
#[error("encryption error")]
ChaCha,
}
impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
match self {
Self::PhraseInvalid => (StatusCode::BAD_REQUEST, "phrase is not valid"),
Self::BinNotFound => (StatusCode::NOT_FOUND, "bin does not exist"),
Self::DataFileExists => (StatusCode::CONFLICT, "bin already has data"),
Self::ParseTtl => (StatusCode::BAD_REQUEST, "invalid ttl class"),
_ => {
error!("{:?}", self);
(StatusCode::INTERNAL_SERVER_ERROR, "internal server error")
}
}
.into_response()
}
}

108
src/garbage_collector.rs Normal file
View file

@ -0,0 +1,108 @@
use std::{
collections::BinaryHeap,
sync::Arc,
time::{Duration, SystemTime, UNIX_EPOCH},
};
use log::{debug, info, warn};
use tokio::{fs, sync::Mutex};
use crate::{metadata::Metadata, util::Id};
#[derive(Clone)]
pub(crate) struct GarbageCollector {
heap: Arc<Mutex<BinaryHeap<GarbageCollectorItem>>>,
path: String,
}
impl GarbageCollector {
pub(crate) async fn new(path: String) -> Self {
let mut heap = BinaryHeap::new();
let mut dir = fs::read_dir(&path).await.expect("readable data dir");
while let Ok(Some(entry)) = dir.next_entry().await {
let file_name = entry.file_name();
let file_name = file_name.to_string_lossy();
if let Some(id) = file_name.strip_suffix(".toml") {
if let Ok(metadata) = Metadata::from_file(&format!("./data/{}.toml", id)).await {
heap.push(GarbageCollectorItem {
expires_at: metadata.expires_at,
id: Id::from_str(id),
})
}
}
}
Self {
heap: Arc::new(Mutex::new(heap)),
path,
}
}
pub(crate) async fn schedule(&self, id: &Id, expires_at: u64) {
self.heap.lock().await.push(GarbageCollectorItem {
id: id.clone(),
expires_at,
})
}
pub(crate) async fn run(&self) {
let mut heap = self.heap.lock().await;
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("time after EPOCH")
.as_secs();
// skip if first item is not old enough to be deleted
if let Some(true) = heap.peek().map(|x| x.expires_at > now) {
return;
}
if let Some(item) = heap.pop() {
if let Ok(metadata) =
Metadata::from_file(&format!("{}/{}.toml", self.path, item.id)).await
{
// check metadata if the item is really ready for deletion (needed for reschedule
// population of bin with data)
if metadata.expires_at <= now {
debug!("deleting bin {}", item.id);
let res_meta =
fs::remove_file(&format!("{}/{}.toml", self.path, item.id)).await;
let res_data = fs::remove_file(&format!("{}/{}.dat", self.path, item.id)).await;
if res_meta.is_err() || res_data.is_err() {
warn!("failed to delete bin {} for gc", item.id);
}
}
} else {
info!("cant open metadata file for bin {} for gc", item.id);
}
}
}
pub(crate) fn spawn(&self) {
let gc = self.clone();
tokio::spawn(async move {
loop {
tokio::time::sleep(Duration::from_secs(10)).await;
gc.run().await;
}
});
}
}
#[derive(PartialEq, Eq)]
struct GarbageCollectorItem {
id: Id,
expires_at: u64,
}
impl PartialOrd for GarbageCollectorItem {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for GarbageCollectorItem {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.expires_at.cmp(&other.expires_at).reverse()
}
}

View file

@ -2,9 +2,7 @@
An empty bin was created for you. The first HTTP POST or PUT request can upload data.
All following requests can only read the uploaded data.
To use the build-in link-shortener functionality you have to POST or PUT the URL with Content-Type: text/x-uri or Content-Type: text/uri-list.
To change the default expiration date, you can use the `?ttl=` parameter. The following expiry classes are defined: <lifecycle_classes>.
To change the default expiration date, you can use the `?ttl=<seconds_to_live>` parameter.
## Upload a link
`$ curl -H "Content-Type: text/x-uri" --data "https://example.com" <bin_url>`
@ -27,4 +25,3 @@ $ curl <bin_url> | gpg -d - | tar -xzf
## Accessing the data
After uploading data you can access it by accessing <bin_url> with an optional file extension that suits the data that you uploaded.
If the bin is a link you will get redirected.

View file

@ -1,13 +1,13 @@
#![deny(clippy::unwrap_used)]
use std::{
collections::HashSet,
borrow::Borrow,
env,
io::ErrorKind,
task::{Context, Poll},
time::Duration,
str::FromStr,
time::{SystemTime, UNIX_EPOCH},
};
use axum::{
body::{Bytes, StreamBody},
body::StreamBody,
debug_handler,
extract::{BodyStream, FromRef, Path, Query, State},
headers::ContentType,
@ -16,62 +16,49 @@ use axum::{
routing::get,
Router, TypedHeader,
};
use axum_oidc::{ClaimsExtractor, EmptyAdditionalClaims, Key, OidcApplication};
use futures_util::{Stream, StreamExt, TryStreamExt};
use log::{info, warn};
use rand::{distributions::Alphanumeric, Rng};
use axum_oidc::oidc::{self, EmptyAdditionalClaims, OidcApplication, OidcExtractor};
use chacha20::{
cipher::{KeyIvInit, StreamCipher},
ChaCha20,
};
use futures_util::StreamExt;
use garbage_collector::GarbageCollector;
use log::debug;
use render::{html, raw};
use s3::{creds::Credentials, error::S3Error, request::ResponseDataStream, Bucket};
use serde::Deserialize;
use tokio_util::io::StreamReader;
use sha3::{Digest, Sha3_256};
use tokio::{
fs::File,
io::{AsyncWriteExt, BufReader, BufWriter},
};
use util::{IdSalt, KeySalt};
// RFC 7230 section 3.1.1
// It is RECOMMENDED that all HTTP senders and recipients
// support, at a minimum, request-line lengths of 8000 octets.
const HTTP_URL_MAXLENGTH: i64 = 8000;
use crate::{
error::Error,
metadata::Metadata,
util::{Id, Key, Nonce, Phrase},
web_util::DecryptingStream,
};
mod error;
mod garbage_collector;
mod metadata;
mod util;
mod web_util;
// support the RFC2483 with text/uri-list and the inofficial text/x-uri mimetype
const HTTP_URL_MIMETYPES: [&str; 2] = ["text/x-uri", "text/uri-list"];
/// length of the "phrase" that is used to access the bin (https://example.com/<phrase>)
const PHRASE_LENGTH: usize = 16;
/// length of the salts that are used to generate the key for chacha and id for the files
const SALT_LENGTH: usize = 256 - PHRASE_LENGTH;
#[derive(Debug, thiserror::Error)]
enum Error {
#[error("s3 error: {:?}", 1)]
S3(#[from] S3Error),
#[error("url is invalid utf8")]
UrlUtf8Invalid,
#[error("item could not be found")]
ItemNotFound,
#[error("file exists")]
DataFileExists,
#[error("could not parse ttl")]
ParseTtl,
}
type HandlerResult<T> = Result<T, Error>;
impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
match self {
Self::S3(e) => {
warn!("S3 Error: {:?}", e);
(StatusCode::INTERNAL_SERVER_ERROR, "s3 error")
}
Self::UrlUtf8Invalid => (StatusCode::INTERNAL_SERVER_ERROR, "url is not valid utf8"),
Self::ItemNotFound => (StatusCode::NOT_FOUND, "bin could not be found"),
Self::DataFileExists => (StatusCode::CONFLICT, "bin already has data"),
Self::ParseTtl => (StatusCode::BAD_REQUEST, "invalid ttl class"),
}
.into_response()
}
}
#[derive(Clone)]
pub struct AppState {
application_base: String,
oidc_application: OidcApplication<EmptyAdditionalClaims>,
bucket: Bucket,
lifecycle_classes: HashSet<String>,
default_lifecycle_class: String,
data: String,
key_salt: KeySalt,
id_salt: IdSalt,
garbage_collector: GarbageCollector,
}
impl FromRef<AppState> for OidcApplication<EmptyAdditionalClaims> {
@ -80,10 +67,16 @@ impl FromRef<AppState> for OidcApplication<EmptyAdditionalClaims> {
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct ExpiryHeapItem {
pub expires_at: u64,
pub id: String,
}
#[tokio::main]
async fn main() {
dotenvy::dotenv().ok();
pretty_env_logger::init();
env_logger::init();
let application_base = env::var("APPLICATION_BASE").expect("APPLICATION_BASE env var");
let issuer = env::var("ISSUER").expect("ISSUER env var");
@ -96,204 +89,167 @@ async fn main() {
.collect::<Vec<_>>();
let oidc_application = OidcApplication::<EmptyAdditionalClaims>::create(
application_base.parse().unwrap(),
application_base
.parse()
.expect("valid APPLICATION_BASE url"),
issuer.to_string(),
client_id.to_string(),
client_secret.to_owned(),
scopes.clone(),
Key::generate(),
oidc::Key::generate(),
)
.await
.unwrap();
.expect("Oidc Authentication Client");
let bucket = Bucket::new(
&env::var("S3_BUCKET").expect("S3_BUCKET env var"),
env::var("S3_REGION")
.expect("S3_REGION env var")
.parse()
.unwrap(),
Credentials::new(
Some(&env::var("S3_ACCESS_KEY").expect("S3_ACCESS_KEY env var")),
Some(&env::var("S3_SECRET_KEY").expect("S3_SECRET_KEY env var")),
None,
None,
None,
)
.unwrap(),
)
.unwrap()
.with_path_style()
.with_request_timeout(Duration::from_secs(60 * 60 * 6));
let lifecycle_classes = std::env::var("LIFECYCLE_CLASSES")
.expect("LIFECYCLE_CLASSES env var")
.split(',')
.map(|x| x.to_string())
.collect::<HashSet<String>>();
let default_lifecycle_class =
std::env::var("DEFAULT_LIFECYCLE_CLASS").expect("DEFAULT_LIFECYCLE_CLASS env var");
if !lifecycle_classes.contains(&default_lifecycle_class) {
panic!("DEFAULT_LIFECYCLE_CLASS must be an element of LIFECYCLE_CLASSES");
}
let garbage_collector = GarbageCollector::new("./data".to_string()).await;
garbage_collector.spawn();
let state: AppState = AppState {
application_base,
oidc_application,
bucket,
lifecycle_classes,
default_lifecycle_class,
data: "./data".to_string(),
key_salt: KeySalt::from_str(&env::var("KEY_SALT").expect("KEY_SALT env var"))
.expect("KEY SALT valid hex"),
id_salt: IdSalt::from_str(&env::var("ID_SALT").expect("ID_SALT env var"))
.expect("ID_SALT valid hex"),
garbage_collector,
};
// when the two salts are identical, the bin id and the bin key are also identical, this would
// make the encryption useless
assert_ne!(state.key_salt.raw(), state.id_salt.raw());
let app = Router::new()
.route("/", get(get_index))
.route("/:id", get(get_item).post(post_item).put(post_item))
.with_state(state);
axum::Server::bind(&"[::]:3000".parse().unwrap())
axum::Server::bind(&"[::]:8080".parse().expect("valid listen address"))
.serve(app.into_make_service())
.await
.unwrap();
.expect("Axum Server");
}
async fn get_index(
State(app_state): State<AppState>,
ClaimsExtractor(claims): ClaimsExtractor<EmptyAdditionalClaims>,
OidcExtractor { claims, .. }: OidcExtractor<EmptyAdditionalClaims>,
) -> Result<impl IntoResponse, Error> {
let subject = claims.subject().to_string();
//generate id
let id = rand::thread_rng()
.sample_iter(Alphanumeric)
.take(8)
.map(char::from)
.collect::<String>();
//generate phrase and derive id from it
let phrase = Phrase::random();
let id = Id::from_phrase(&phrase, &app_state.id_salt);
app_state.bucket.put_object(&id, &[]).await?;
app_state
.bucket
.put_object_tagging(
&id,
&[
("ttl".to_string(), app_state.default_lifecycle_class.clone()),
("subject".to_string(), subject),
],
)
.await?;
let nonce = Nonce::random();
info!("created bin {id}");
let expires_at = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() + 24 * 3600;
let metadata_path = format!("{}/{}.toml", app_state.data, id);
let metadata = Metadata {
subject,
nonce: nonce.to_hex(),
etag: None,
size: None,
content_type: None,
expires_at,
};
metadata.to_file(&metadata_path).await?;
app_state.garbage_collector.schedule(&id, expires_at).await;
debug!("created bin {id}");
Ok(Redirect::temporary(&format!(
"{}{}",
app_state.application_base, id
app_state.application_base, phrase
)))
}
#[derive(Deserialize)]
pub struct PostQuery {
ttl: Option<String>,
ttl: Option<u64>,
}
async fn post_item(
Path(id): Path<String>,
Path(phrase): Path<String>,
Query(params): Query<PostQuery>,
State(app_state): State<AppState>,
content_type: Option<TypedHeader<ContentType>>,
stream: BodyStream,
mut stream: BodyStream,
) -> HandlerResult<impl IntoResponse> {
let id = sanitize_id(id);
let phrase = Phrase::from_str(&phrase)?;
let id = Id::from_phrase(&phrase, &app_state.id_salt);
let metadata = app_state.bucket.head_object(&id).await?.0;
let metadata_path = format!("{}/{}.toml", app_state.data, id);
let mut metadata = Metadata::from_file(&metadata_path).await?;
if metadata.e_tag.is_none() {
return Err(Error::ItemNotFound);
}
if let Some(content_length) = metadata.content_length {
if content_length > 0 {
return Err(Error::DataFileExists);
}
let path = format!("{}/{}.dat", app_state.data, id);
let path = std::path::Path::new(&path);
if !path.exists() {
let key = Key::from_phrase(&phrase, &app_state.key_salt);
let nonce = Nonce::from_hex(&metadata.nonce)?;
let mut cipher = ChaCha20::new(key.borrow(), nonce.borrow());
let file = File::create(&path).await?;
let mut writer = BufWriter::new(file);
let mut etag_hasher = Sha3_256::new();
let mut size = 0;
while let Some(chunk) = stream.next().await {
let mut buf = chunk.unwrap_or_default().to_vec();
etag_hasher.update(&buf);
size += buf.len() as u64;
cipher.apply_keystream(&mut buf);
writer.write_all(&buf).await?;
}
let ttl = if let Some(ttl) = &params.ttl {
if !app_state.lifecycle_classes.contains(ttl) {
return Err(Error::ParseTtl);
}
ttl
} else {
&app_state.default_lifecycle_class
writer.flush().await?;
metadata.etag = Some(hex::encode(etag_hasher.finalize()));
metadata.size = Some(size);
metadata.content_type = match content_type {
Some(content_type) => Some(content_type.to_string()),
None => Some("application/octet-stream".to_string()),
};
metadata.expires_at = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs()
+ params.ttl.unwrap_or(24 * 3600);
metadata.to_file(&metadata_path).await?;
let tags = app_state.bucket.get_object_tagging(&id).await?.0;
let mut reader =
StreamReader::new(stream.map_err(|e| std::io::Error::new(ErrorKind::Other, e.to_string())));
let status_code = match content_type {
Some(content_type) => {
app_state
.bucket
.put_object_stream_with_content_type(&mut reader, &id, &content_type.to_string())
.await
.garbage_collector
.schedule(&id, metadata.expires_at)
.await;
debug!("bin {id} got filled");
Ok((StatusCode::OK, "ok\n"))
} else {
Err(Error::DataFileExists)
}
None => app_state.bucket.put_object_stream(&mut reader, &id).await,
}?;
let status_code = StatusCode::from_u16(status_code).unwrap();
let subject = tags
.iter()
.find(|x| x.key() == "subject")
.map(|x| x.value())
.unwrap_or_default();
app_state
.bucket
.put_object_tagging(
&id,
&[
("ttl".to_string(), ttl.to_string()),
("subject".to_string(), subject),
],
)
.await?;
info!("bin {id} is now read only");
Ok((
status_code,
format!(
"{}\n",
status_code
.canonical_reason()
.unwrap_or(&status_code.to_string())
),
))
}
#[debug_handler]
async fn get_item(
Path(id): Path<String>,
Path(phrase): Path<String>,
State(app_state): State<AppState>,
) -> HandlerResult<impl IntoResponse> {
let id = sanitize_id(id);
let phrase = Phrase::from_str(&phrase)?;
let id = Id::from_phrase(&phrase, &app_state.id_salt);
let metadata = app_state.bucket.head_object(&id).await?.0;
let metadata = Metadata::from_file(&format!("{}/{}.toml", app_state.data, id)).await?;
if metadata.e_tag.is_none() {
return Err(Error::ItemNotFound);
}
if metadata.content_length.is_none() || metadata.content_length == Some(0) {
let body = include_str!("item_explanation.md")
.replace(
let path = format!("{}/{}.dat", app_state.data, id);
let key = Key::from_phrase(&phrase, &app_state.key_salt);
let nonce = Nonce::from_hex(&metadata.nonce)?;
if std::fs::metadata(&path).is_err() {
let body = include_str!("item_explanation.md").replace(
"<bin_url>",
&format!("{}{}", app_state.application_base, id),
)
.replace(
"<lifecycle_classes>",
&app_state
.lifecycle_classes
.iter()
.map(|x| x.to_string())
.reduce(|acc, e| acc + ", " + e.as_str())
.unwrap_or_default(),
&format!("{}{}", app_state.application_base, phrase),
);
let body = markdown::to_html(&body);
let body = html! {
<html>
@ -309,70 +265,32 @@ async fn get_item(
</html>
};
Ok((StatusCode::ACCEPTED, Html(body)).into_response())
} else if let Some(content_length) = metadata.content_length {
if HTTP_URL_MIMETYPES.contains(&metadata.content_type.as_deref().unwrap_or(""))
&& content_length <= HTTP_URL_MAXLENGTH
{
let file = app_state.bucket.get_object(&id).await?;
let url = String::from_utf8(file.to_vec()).map_err(|_| Error::UrlUtf8Invalid)?;
// Use the first line that doesn't start with a # to be compliant with RFC2483.
let url = url.lines().find(|x| !x.starts_with('#')).unwrap_or("");
Ok(Redirect::temporary(url).into_response())
} else {
let file_stream = app_state.bucket.get_object_stream(&id).await.unwrap();
//TODO(pfz4): Maybe add link handling
let file = File::open(&path).await?;
let reader = BufReader::new(file);
let body = StreamBody::new(ResponseStream(std::sync::Mutex::new(file_stream)));
let body = StreamBody::new(DecryptingStream::new(reader, id, &metadata, &key, &nonce));
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_LENGTH,
metadata.content_length.unwrap().into(),
metadata.size.unwrap_or_default().into(),
);
if let Some(content_type) = metadata.content_type {
headers.insert(header::CONTENT_TYPE, content_type.parse().unwrap());
if let Some(content_type) = metadata.content_type.and_then(|x| x.parse().ok()) {
headers.insert(header::CONTENT_TYPE, content_type);
}
if let Some(etag) = metadata.e_tag {
headers.insert(header::ETAG, etag.parse().unwrap());
if let Some(etag) = metadata.etag.clone().and_then(|x| x.parse().ok()) {
headers.insert(header::ETAG, etag);
}
if let Some(content_encoding) = metadata.content_encoding {
headers.insert(header::CONTENT_ENCODING, content_encoding.parse().unwrap());
}
if let Some(content_language) = metadata.content_language {
headers.insert(header::CONTENT_LANGUAGE, content_language.parse().unwrap());
}
if let Some(cache_control) = metadata.cache_control {
headers.insert(header::CACHE_CONTROL, cache_control.parse().unwrap());
if let Some(digest) = metadata
.etag
.and_then(|x| format!("sha3-256={x}").parse().ok())
{
headers.insert("Digest", digest);
}
Ok((StatusCode::OK, headers, body).into_response())
}
} else {
// logically should not happen
panic!("logic contradiction");
}
}
fn sanitize_id(id: String) -> String {
id.chars()
.take_while(|c| *c != '.')
.filter(|c| c.is_ascii_alphanumeric())
.collect()
}
struct ResponseStream(std::sync::Mutex<ResponseDataStream>);
impl Stream for ResponseStream {
type Item = Result<Bytes, Error>;
fn poll_next(self: std::pin::Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.0
.lock()
.unwrap()
.bytes()
.poll_next_unpin(cx)
.map(|x| x.map(Ok))
}
}
unsafe impl Send for ResponseStream {}

33
src/metadata.rs Normal file
View file

@ -0,0 +1,33 @@
use std::{io::ErrorKind, time::Instant};
use serde::{Deserialize, Serialize};
use tokio::{fs::File, io::AsyncWriteExt};
use crate::Error;
#[derive(Deserialize, Serialize, Debug)]
pub struct Metadata {
pub subject: String,
pub nonce: String,
pub etag: Option<String>,
pub size: Option<u64>,
pub content_type: Option<String>,
pub expires_at: u64, // seconds since UNIX_EPOCH
}
impl Metadata {
pub async fn from_file(path: &str) -> Result<Self, Error> {
let metadata = match tokio::fs::read_to_string(path).await {
Ok(x) => Ok(x),
Err(err) if err.kind() == ErrorKind::NotFound => Err(Error::BinNotFound),
Err(x) => Err(x.into()),
}?;
Ok(toml::from_str::<Self>(&metadata)?)
}
pub async fn to_file(&self, path: &str) -> Result<(), Error> {
let data = toml::to_string(&self)?;
let mut file = File::create(path).await?;
file.write_all(data.as_ref()).await?;
Ok(())
}
}

238
src/util.rs Normal file
View file

@ -0,0 +1,238 @@
use std::{borrow::Borrow, fmt::Display, str::FromStr};
use chacha20::cipher::{generic_array::GenericArray, ArrayLength};
use rand::{distributions, Rng};
use sha3::{Digest, Sha3_256};
use crate::{error::Error, PHRASE_LENGTH, SALT_LENGTH};
#[derive(Debug, PartialEq)]
pub(crate) struct Phrase(String);
#[derive(Debug, PartialEq, Eq, Clone)]
pub(crate) struct Id(String);
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct IdSalt(Vec<u8>);
#[derive(Debug, PartialEq)]
pub(crate) struct Key(Vec<u8>);
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct KeySalt(Vec<u8>);
#[derive(Debug, PartialEq)]
pub(crate) struct Nonce(Vec<u8>);
impl Phrase {
pub(crate) fn random() -> Self {
let phrase = rand::thread_rng()
.sample_iter(distributions::Alphanumeric)
.take(PHRASE_LENGTH)
.map(char::from)
.collect::<String>();
Self(phrase)
}
}
impl FromStr for Phrase {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.chars().any(|x| !x.is_ascii_alphanumeric()) {
Err(Error::PhraseInvalid)
} else {
Ok(Self(s.to_string()))
}
}
}
impl Display for Phrase {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl Id {
pub(crate) fn from_phrase(phrase: &Phrase, salt: &IdSalt) -> Self {
let mut hasher = Sha3_256::new();
hasher.update(&phrase.0);
hasher.update(&salt.0);
let id = hex::encode(hasher.finalize());
Self(id)
}
pub(crate) fn from_str(s: &str) -> Self {
Self(s.to_string())
}
pub(crate) fn raw(&self) -> &str {
&self.0
}
}
impl Display for Id {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl IdSalt {
pub(crate) fn random() -> Self {
let salt = rand::thread_rng()
.sample_iter(distributions::Standard)
.take(SALT_LENGTH)
.collect();
Self(salt)
}
pub(crate) fn raw(&self) -> &[u8] {
&self.0
}
}
impl FromStr for IdSalt {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(hex::decode(s)?))
}
}
impl Key {
pub(crate) fn from_phrase(phrase: &Phrase, salt: &KeySalt) -> Self {
let mut hasher = Sha3_256::new();
hasher.update(&phrase.0);
hasher.update(&salt.0);
Self(hasher.finalize().to_vec())
}
}
impl<L: ArrayLength<u8>> Borrow<GenericArray<u8, L>> for Key {
fn borrow(&self) -> &GenericArray<u8, L> {
GenericArray::<u8, L>::from_slice(&self.0)
}
}
impl KeySalt {
pub(crate) fn random() -> Self {
let salt = rand::thread_rng()
.sample_iter(distributions::Standard)
.take(SALT_LENGTH)
.collect();
Self(salt)
}
pub(crate) fn raw(&self) -> &[u8] {
&self.0
}
}
impl FromStr for KeySalt {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(hex::decode(s)?))
}
}
impl Nonce {
pub(crate) fn random() -> Self {
// generate a 12 byte / 96 bit nonce for chacha20 as defined in rfc7539
let nonce = rand::thread_rng()
.sample_iter(distributions::Standard)
.take(12)
.collect();
Self(nonce)
}
pub(crate) fn from_hex(hex_value: &str) -> Result<Self, Error> {
Ok(Self(hex::decode(hex_value)?))
}
pub(crate) fn to_hex(&self) -> String {
hex::encode(&self.0)
}
}
impl<L: ArrayLength<u8>> Borrow<GenericArray<u8, L>> for Nonce {
fn borrow(&self) -> &GenericArray<u8, L> {
GenericArray::from_slice(&self.0)
}
}
#[cfg(test)]
mod test {
use crate::{
util::{Id, IdSalt, Key, KeySalt, Nonce, Phrase},
PHRASE_LENGTH, SALT_LENGTH,
};
#[test]
fn phrase() {
assert_eq!(PHRASE_LENGTH, Phrase::random().0.len());
assert_ne!(Phrase::random(), Phrase::random());
}
#[test]
fn id() {
let phrase = Phrase::random();
let salt = IdSalt::random();
let phrase2 = Phrase::random();
let salt2 = IdSalt::random();
assert_eq!(
Id::from_phrase(&phrase, &salt),
Id::from_phrase(&phrase, &salt)
);
assert_ne!(
Id::from_phrase(&phrase, &salt),
Id::from_phrase(&phrase, &salt2)
);
assert_ne!(
Id::from_phrase(&phrase, &salt),
Id::from_phrase(&phrase2, &salt)
);
}
#[test]
fn key() {
let phrase = Phrase::random();
let salt = KeySalt::random();
let phrase2 = Phrase::random();
let salt2 = KeySalt::random();
assert_eq!(
Key::from_phrase(&phrase, &salt),
Key::from_phrase(&phrase, &salt)
);
assert_ne!(
Key::from_phrase(&phrase, &salt),
Key::from_phrase(&phrase, &salt2)
);
assert_ne!(
Key::from_phrase(&phrase, &salt),
Key::from_phrase(&phrase2, &salt)
);
}
#[test]
#[allow(clippy::unwrap_used)]
fn key_id_collision() {
let phrase = Phrase::random();
let id_salt = IdSalt::random();
let key_salt = KeySalt::random();
assert_ne!(
hex::decode(Id::from_phrase(&phrase, &id_salt).0).unwrap(),
Key::from_phrase(&phrase, &key_salt).0
);
}
#[test]
fn id_salt() {
assert_eq!(SALT_LENGTH, IdSalt::random().0.len());
assert_ne!(IdSalt::random(), IdSalt::random());
}
#[test]
fn key_salt() {
assert_eq!(SALT_LENGTH, KeySalt::random().0.len());
assert_ne!(KeySalt::random(), KeySalt::random());
}
#[test]
fn nonce() {
assert_eq!(12, Nonce::random().0.len());
assert_ne!(Nonce::random(), Nonce::random());
}
}

133
src/web_util.rs Normal file
View file

@ -0,0 +1,133 @@
use std::{
borrow::Borrow,
pin::Pin,
task::{Context, Poll},
};
use bytes::{Bytes, BytesMut};
use chacha20::{
cipher::{KeyIvInit, StreamCipher},
ChaCha20,
};
use futures_util::Stream;
use log::{debug, warn};
use pin_project_lite::pin_project;
use sha3::{Digest, Sha3_256};
use tokio::io::AsyncRead;
use tokio_util::io::poll_read_buf;
use crate::{
metadata::Metadata,
util::{Id, Key, Nonce},
};
pin_project! {
pub(crate) struct DecryptingStream<R> {
#[pin]
reader: Option<R>,
buf: BytesMut,
// chunk size
capacity: usize,
// chacha20 cipher
cipher: ChaCha20,
// hasher to verify file integrity
hasher: Sha3_256,
// hash to verify against
target_hash: String,
// id of the file for logging purposes
id: Id,
// total file size
size: u64,
// current position of the "reading head"
progress: u64
}
}
impl<R: AsyncRead> DecryptingStream<R> {
pub(crate) fn new(reader: R, id: Id, metadata: &Metadata, key: &Key, nonce: &Nonce) -> Self {
let cipher = ChaCha20::new(key.borrow(), nonce.borrow());
Self {
reader: Some(reader),
buf: BytesMut::new(),
capacity: 1 << 22, // 4 MiB
cipher,
hasher: Sha3_256::new(),
target_hash: metadata.etag.clone().unwrap_or_default(),
id,
size: metadata.size.unwrap_or_default(),
progress: 0,
}
}
}
impl<R: AsyncRead> Stream for DecryptingStream<R> {
type Item = std::io::Result<Bytes>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut this = self.as_mut().project();
let reader = match this.reader.as_pin_mut() {
Some(r) => r,
None => return Poll::Ready(None),
};
if this.buf.capacity() == 0 {
this.buf.reserve(*this.capacity);
}
match poll_read_buf(reader, cx, &mut this.buf) {
Poll::Pending => Poll::Pending,
Poll::Ready(Err(err)) => {
debug!("failed to send bin {}", this.id);
self.project().reader.set(None);
Poll::Ready(Some(Err(err)))
}
Poll::Ready(Ok(0)) => {
if self.progress_check() == DecryptingStreamProgress::Failed {
// The hash is invalid, the file has been tampered with. Close reader and stream causing the download to fail
self.project().reader.set(None);
return Poll::Ready(None);
};
self.project().reader.set(None);
Poll::Ready(None)
}
Poll::Ready(Ok(n)) => {
let mut chunk = this.buf.split();
// decrypt the chunk using chacha
this.cipher.apply_keystream(&mut chunk);
// update the sha3 hasher
this.hasher.update(&chunk);
// track progress
*this.progress += n as u64;
if self.progress_check() == DecryptingStreamProgress::Failed {
// The hash is invalid, the file has been tampered with. Close reader and stream causing the download to fail
warn!("bin {} is corrupted! transmission failed", self.id);
self.project().reader.set(None);
return Poll::Ready(None);
};
Poll::Ready(Some(Ok(chunk.freeze())))
}
}
}
}
impl<R: AsyncRead> DecryptingStream<R> {
/// checks if the hash is correct when the last byte has been read
fn progress_check(&self) -> DecryptingStreamProgress {
if self.progress >= self.size {
let hash = hex::encode(self.hasher.clone().finalize());
if hash != self.target_hash {
DecryptingStreamProgress::Failed
} else {
DecryptingStreamProgress::Finished
}
} else {
DecryptingStreamProgress::Running
}
}
}
#[derive(PartialEq, Eq)]
enum DecryptingStreamProgress {
Finished,
Failed,
Running,
}