Compare commits
10 Commits
300c0385e8
...
577f7f23f8
Author | SHA1 | Date |
---|---|---|
Zynh0722 | 577f7f23f8 | |
Zynh0722 | 290041e367 | |
Zynh0722 | 6d939efbbf | |
Zynh0722 | 6ad91ff7e5 | |
Zynh0722 | 6e332d4b3c | |
Zynh0722 | e6c996d7d8 | |
Zynh0722 | b93a3950d0 | |
Zynh0722 | 2f52fe4ced | |
Zynh0722 | 107c38d533 | |
Zynh0722 | 33ad2f14a7 |
File diff suppressed because it is too large
Load Diff
|
@ -5,15 +5,21 @@ edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-bincode = { version = "0.7.0", features = ["tokio"] }
|
async-bincode = { version = "0.7.0", features = ["tokio"] }
|
||||||
|
async-trait = "0.1.72"
|
||||||
async_zip = { version = "0.0.13", features = ["deflate", "tokio", "tokio-fs", "async-compression"] }
|
async_zip = { version = "0.0.13", features = ["deflate", "tokio", "tokio-fs", "async-compression"] }
|
||||||
axum = { version = "0.6.12", features = ["multipart", "http2", "headers"] }
|
axum = { version = "0.6.12", features = ["multipart", "http2", "headers", "macros", "original-uri"] }
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
chrono = { version = "0.4.24", features = ["serde"] }
|
chrono = { version = "0.4.24", features = ["serde"] }
|
||||||
futures = "0.3.28"
|
futures = "0.3.28"
|
||||||
headers = "0.3.8"
|
headers = "0.3.8"
|
||||||
|
leptos = { version = "0.4.6", features = ["ssr", "nightly", "tracing", "default-tls"] }
|
||||||
|
leptos_meta = { version = "0.4.6", features = ["ssr"] }
|
||||||
|
leptos_router = { version = "0.4.6", features = ["ssr"] }
|
||||||
rand = { version = "0.8.5", features = ["small_rng"] }
|
rand = { version = "0.8.5", features = ["small_rng"] }
|
||||||
|
reqwest = { version = "0.11.18", features = ["json", "native-tls", "blocking"] }
|
||||||
sanitize-filename-reader-friendly = "2.2.1"
|
sanitize-filename-reader-friendly = "2.2.1"
|
||||||
serde = { version = "1.0.160", features = ["serde_derive", "derive"] }
|
serde = { version = "1.0.160", features = ["serde_derive", "derive"] }
|
||||||
serde_derive = "1.0.160"
|
serde_derive = "1.0.160"
|
||||||
|
|
|
@ -16,6 +16,7 @@ body {
|
||||||
padding: 1.5em;
|
padding: 1.5em;
|
||||||
border-radius: 1em;
|
border-radius: 1em;
|
||||||
border: 1px solid #25283d;
|
border: 1px solid #25283d;
|
||||||
|
list-style: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
.return-button {
|
.return-button {
|
||||||
|
|
|
@ -10,7 +10,7 @@ body {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.main-form {
|
.column-container {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
|
@ -25,6 +25,7 @@ body {
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
.cat-img {
|
.cat-img {
|
||||||
width: 250px;
|
width: 250px;
|
||||||
height: 250px;
|
height: 250px;
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
|
@ -1,54 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<title>NyaZoom</title>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
<link href="css/main.css" rel="stylesheet">
|
|
||||||
<script>
|
|
||||||
fetch("https://catfact.ninja/fact")
|
|
||||||
.then(data => data.json())
|
|
||||||
.then(data => {
|
|
||||||
document.getElementById("cat-fact").innerHTML = data.fact;
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
<script>
|
|
||||||
document.addEventListener("DOMContentLoaded", () => {
|
|
||||||
let inputs = document.querySelectorAll('input#file');
|
|
||||||
Array.prototype.forEach.call(inputs, function (input) {
|
|
||||||
let label = input.nextElementSibling;
|
|
||||||
let labelVal = label.innerHTML;
|
|
||||||
input.addEventListener('change', function (e) {
|
|
||||||
let fileName = '';
|
|
||||||
|
|
||||||
if (this.files?.length > 1) {
|
|
||||||
fileName = this.getAttribute('data-multiple-caption')?.replace('{count}', this.files.length);
|
|
||||||
} else {
|
|
||||||
fileName = e.target.value.split('\\').pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
label.innerHTML = fileName || labelVal;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}, false);
|
|
||||||
</script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<h1>NyaZoom<sup>2</sup></h1>
|
|
||||||
<div class="form-wrapper">
|
|
||||||
<form action="/upload" method="post" enctype="multipart/form-data" class="main-form">
|
|
||||||
<div class="cat-img-wrapper">
|
|
||||||
<img class="cat-img" src="https://cataas.com/cat?width=250&height=250" />
|
|
||||||
</div>
|
|
||||||
<input type="file" id="file" name="file" data-multiple-caption="{count} files selected" multiple />
|
|
||||||
<label for="file">Select Files</label>
|
|
||||||
|
|
||||||
<input type="submit" value="Get Link~">
|
|
||||||
<p id="cat-fact" />
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
fetch("https://catfact.ninja/fact")
|
||||||
|
.then(data => data.json())
|
||||||
|
.then(data => {
|
||||||
|
document.getElementById("cat-fact").innerHTML = data.fact;
|
||||||
|
});
|
|
@ -0,0 +1,18 @@
|
||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
let inputs = document.querySelectorAll('input#file');
|
||||||
|
Array.prototype.forEach.call(inputs, function(input) {
|
||||||
|
let label = input.nextElementSibling;
|
||||||
|
let labelVal = label.innerHTML;
|
||||||
|
input.addEventListener('change', function(e) {
|
||||||
|
let fileName = '';
|
||||||
|
|
||||||
|
if (this.files?.length > 1) {
|
||||||
|
fileName = this.getAttribute('data-multiple-caption')?.replace('{count}', this.files.length);
|
||||||
|
} else {
|
||||||
|
fileName = e.target.value.split('\\').pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
label.innerHTML = fileName || labelVal;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}, false);
|
|
@ -0,0 +1,21 @@
|
||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
const params = new Proxy(new URLSearchParams(window.location.search), {
|
||||||
|
get: (searchParams, prop) => searchParams.get(prop),
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
if (params.link !== null) {
|
||||||
|
let link = `${window.location.origin}/download/${params.link}`;
|
||||||
|
|
||||||
|
let link_el = document.getElementById("link");
|
||||||
|
|
||||||
|
link_el.href = link;
|
||||||
|
link_el.innerHTML = link;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function clipboard() {
|
||||||
|
let copyText = document.getElementById("link");
|
||||||
|
|
||||||
|
navigator.clipboard?.writeText(copyText.href).then(() => alert("Copied: " + copyText.href));
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
htmx.on('#form', 'htmx:xhr:progress', function(evt) {
|
||||||
|
htmx.find('#progress').setAttribute('value', evt.detail.loaded / evt.detail.total * 100)
|
||||||
|
});
|
|
@ -0,0 +1,40 @@
|
||||||
|
use crate::state::AppState;
|
||||||
|
|
||||||
|
use super::error;
|
||||||
|
|
||||||
|
use serde::Serialize;
|
||||||
|
use tokio::io::AsyncReadExt;
|
||||||
|
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub async fn write_to_cache<T, Y>(records: &HashMap<T, Y>) -> io::Result<()>
|
||||||
|
where
|
||||||
|
T: Serialize,
|
||||||
|
Y: Serialize,
|
||||||
|
{
|
||||||
|
let mut records_cache = tokio::fs::File::create(".cache/data").await.unwrap();
|
||||||
|
|
||||||
|
let mut buf: Vec<u8> = Vec::with_capacity(200);
|
||||||
|
bincode::serialize_into(&mut buf, records).map_err(|err| error::io_other(&err.to_string()))?;
|
||||||
|
|
||||||
|
let bytes_written = tokio::io::copy(&mut buf.as_slice(), &mut records_cache).await?;
|
||||||
|
|
||||||
|
tracing::debug!("state cache size: {}", bytes_written);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn fetch_cache() -> AppState {
|
||||||
|
let records = if let Ok(file) = tokio::fs::File::open(".cache/data").await.as_mut() {
|
||||||
|
let mut buf: Vec<u8> = Vec::with_capacity(200);
|
||||||
|
file.read_to_end(&mut buf).await.unwrap();
|
||||||
|
|
||||||
|
bincode::deserialize_from(&mut buf.as_slice()).unwrap()
|
||||||
|
} else {
|
||||||
|
HashMap::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
AppState::new(records)
|
||||||
|
}
|
338
src/main.rs
338
src/main.rs
|
@ -1,48 +1,45 @@
|
||||||
use async_zip::tokio::write::ZipFileWriter;
|
use async_zip::{tokio::write::ZipFileWriter, Compression, ZipEntryBuilder};
|
||||||
use async_zip::{Compression, ZipEntryBuilder};
|
|
||||||
|
|
||||||
use axum::body::StreamBody;
|
|
||||||
use axum::extract::{ConnectInfo, State};
|
|
||||||
use axum::http::{Request, StatusCode};
|
|
||||||
use axum::middleware::{self, Next};
|
|
||||||
use axum::response::{IntoResponse, Response};
|
|
||||||
use axum::routing::{get, post};
|
|
||||||
use axum::TypedHeader;
|
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{DefaultBodyLimit, Multipart},
|
body::StreamBody,
|
||||||
response::Redirect,
|
extract::{ConnectInfo, DefaultBodyLimit, Multipart, State},
|
||||||
Router,
|
http::{Request, Response, StatusCode},
|
||||||
|
middleware::{self, Next},
|
||||||
|
response::{Html, IntoResponse, Redirect},
|
||||||
|
routing::{get, post},
|
||||||
|
Json, Router, TypedHeader,
|
||||||
};
|
};
|
||||||
|
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
|
|
||||||
use headers::{Header, HeaderName, HeaderValue};
|
use headers::HeaderMap;
|
||||||
use rand::distributions::{Alphanumeric, DistString};
|
use leptos::IntoView;
|
||||||
use rand::rngs::SmallRng;
|
use nyazoom_headers::ForwardedFor;
|
||||||
use rand::SeedableRng;
|
|
||||||
|
|
||||||
use sanitize_filename_reader_friendly::sanitize;
|
use sanitize_filename_reader_friendly::sanitize;
|
||||||
|
|
||||||
use serde::Serialize;
|
use std::{io, net::SocketAddr, path::Path, time::Duration};
|
||||||
|
|
||||||
use tokio::io::AsyncReadExt;
|
use tokio_util::{
|
||||||
use tokio_util::compat::FuturesAsyncWriteCompatExt;
|
compat::FuturesAsyncWriteCompatExt,
|
||||||
|
io::{ReaderStream, StreamReader},
|
||||||
use std::collections::HashMap;
|
};
|
||||||
use std::io;
|
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use tokio_util::io::{ReaderStream, StreamReader};
|
|
||||||
|
|
||||||
use tower_http::{limit::RequestBodyLimitLayer, services::ServeDir, trace::TraceLayer};
|
use tower_http::{limit::RequestBodyLimitLayer, services::ServeDir, trace::TraceLayer};
|
||||||
|
|
||||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
|
mod cache;
|
||||||
|
mod nyazoom_headers;
|
||||||
mod state;
|
mod state;
|
||||||
|
mod util;
|
||||||
|
mod views;
|
||||||
|
|
||||||
use state::{AppState, UploadRecord};
|
use state::{AppState, UploadRecord};
|
||||||
|
|
||||||
|
use crate::state::AsyncRemoveRecord;
|
||||||
|
use crate::views::{DownloadLinkPage, HtmxPage, LinkView, Welcome};
|
||||||
|
|
||||||
pub mod error {
|
pub mod error {
|
||||||
use std::io::{Error, ErrorKind};
|
use std::io::{Error, ErrorKind};
|
||||||
|
|
||||||
|
@ -63,20 +60,45 @@ async fn main() -> io::Result<()> {
|
||||||
.init();
|
.init();
|
||||||
|
|
||||||
// uses create_dir_all to create both .cache and serve inside it in one go
|
// uses create_dir_all to create both .cache and serve inside it in one go
|
||||||
make_dir(".cache/serve").await?;
|
util::make_dir(".cache/serve").await?;
|
||||||
|
|
||||||
let state = fetch_cache().await;
|
let state = cache::fetch_cache().await;
|
||||||
|
|
||||||
|
// Spawn a repeating task that will clean files periodically
|
||||||
|
tokio::spawn({
|
||||||
|
let state = state.clone();
|
||||||
|
async move {
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(Duration::from_secs(15 * 60)).await;
|
||||||
|
tracing::info!("Cleaning Sweep!");
|
||||||
|
|
||||||
|
let mut records = state.records.lock().await;
|
||||||
|
|
||||||
|
for (key, record) in records.clone().into_iter() {
|
||||||
|
if !record.can_be_downloaded() {
|
||||||
|
tracing::info!("culling: {:?}", record);
|
||||||
|
records.remove_record(&key).await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// Router Setup
|
// Router Setup
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
|
.route("/", get(welcome))
|
||||||
.route("/upload", post(upload_to_zip))
|
.route("/upload", post(upload_to_zip))
|
||||||
|
.route("/records", get(records))
|
||||||
|
.route("/records/links", get(records_links))
|
||||||
.route("/download/:id", get(download))
|
.route("/download/:id", get(download))
|
||||||
|
.route("/link/:id", get(link).delete(link_delete))
|
||||||
|
.route("/link/:id/remaining", get(remaining))
|
||||||
.layer(DefaultBodyLimit::disable())
|
.layer(DefaultBodyLimit::disable())
|
||||||
.layer(RequestBodyLimitLayer::new(
|
.layer(RequestBodyLimitLayer::new(
|
||||||
10 * 1024 * 1024 * 1024, // 10GiB
|
10 * 1024 * 1024 * 1024, // 10GiB
|
||||||
))
|
))
|
||||||
.with_state(state)
|
.with_state(state)
|
||||||
.nest_service("/", ServeDir::new("dist"))
|
.fallback_service(ServeDir::new("dist"))
|
||||||
.layer(TraceLayer::new_for_http())
|
.layer(TraceLayer::new_for_http())
|
||||||
.layer(middleware::from_fn(log_source));
|
.layer(middleware::from_fn(log_source));
|
||||||
|
|
||||||
|
@ -91,23 +113,110 @@ async fn main() -> io::Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// async fn log_source<B>(
|
async fn remaining(
|
||||||
// ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
State(state): State<AppState>,
|
||||||
// req: Request<B>,
|
axum::extract::Path(id): axum::extract::Path<String>,
|
||||||
// next: Next<B>,
|
) -> impl IntoResponse {
|
||||||
// ) -> Response {
|
let records = state.records.lock().await;
|
||||||
// tracing::info!("{}", addr);
|
if let Some(record) = records.get(&id) {
|
||||||
//
|
let downloads_remaining = record.downloads_remaining();
|
||||||
// next.run(req).await
|
let plural = if downloads_remaining > 1 { "s" } else { "" };
|
||||||
// }
|
let out = format!(
|
||||||
|
"You have {} download{} remaining!",
|
||||||
|
downloads_remaining, plural
|
||||||
|
);
|
||||||
|
Html(out)
|
||||||
|
} else {
|
||||||
|
Html("?".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn welcome() -> impl IntoResponse {
|
||||||
|
let cat_fact = views::get_cat_fact().await;
|
||||||
|
Html(leptos::ssr::render_to_string(move |cx| {
|
||||||
|
leptos::view! { cx, <Welcome fact=cat_fact /> }
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn records(State(state): State<AppState>) -> impl IntoResponse {
|
||||||
|
Json(state.records.lock().await.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function is to remain ugly until that time in which I properly hide
|
||||||
|
// this behind some kind of authentication
|
||||||
|
async fn records_links(State(state): State<AppState>) -> impl IntoResponse {
|
||||||
|
let records = state.records.lock().await.clone();
|
||||||
|
Html(leptos::ssr::render_to_string(move |cx| {
|
||||||
|
leptos::view! { cx,
|
||||||
|
<HtmxPage>
|
||||||
|
<div class="form-wrapper">
|
||||||
|
<div class="column-container">
|
||||||
|
<ul>
|
||||||
|
{records.keys().map(|key| leptos::view! { cx,
|
||||||
|
<li class="link-wrapper">
|
||||||
|
<a href="/link/{key}">{key}</a>
|
||||||
|
<button style="margin-left: 1em;"
|
||||||
|
hx-target="closest .link-wrapper"
|
||||||
|
hx-swap="outerHTML"
|
||||||
|
hx-delete="/link/{key}">X</button>
|
||||||
|
</li>
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</HtmxPage>
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn link(
|
||||||
|
axum::extract::Path(id): axum::extract::Path<String>,
|
||||||
|
State(mut state): State<AppState>,
|
||||||
|
) -> Result<Html<String>, Redirect> {
|
||||||
|
{
|
||||||
|
let mut records = state.records.lock().await;
|
||||||
|
|
||||||
|
if let Some(record) = records
|
||||||
|
.get_mut(&id)
|
||||||
|
.filter(|record| record.can_be_downloaded())
|
||||||
|
{
|
||||||
|
if record.can_be_downloaded() {
|
||||||
|
return Ok(Html(leptos::ssr::render_to_string({
|
||||||
|
let record = record.clone();
|
||||||
|
|cx| {
|
||||||
|
leptos::view! { cx, <DownloadLinkPage id=id record=record /> }
|
||||||
|
}
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: This....
|
||||||
|
state.remove_record(&id).await.unwrap();
|
||||||
|
|
||||||
|
Err(Redirect::to(&format!("/404.html")))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn link_delete(
|
||||||
|
axum::extract::Path(id): axum::extract::Path<String>,
|
||||||
|
State(mut state): State<AppState>,
|
||||||
|
) -> Result<Html<String>, (StatusCode, String)> {
|
||||||
|
state
|
||||||
|
.remove_record(&id)
|
||||||
|
.await
|
||||||
|
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
||||||
|
|
||||||
|
Ok(Html("".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
async fn log_source<B>(
|
async fn log_source<B>(
|
||||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||||
TypedHeader(ForwardedFor(forwarded_for)): TypedHeader<ForwardedFor>,
|
forwarded_for: Option<TypedHeader<ForwardedFor>>,
|
||||||
req: Request<B>,
|
req: Request<B>,
|
||||||
next: Next<B>,
|
next: Next<B>,
|
||||||
) -> Response {
|
) -> impl IntoResponse {
|
||||||
tracing::info!("{} : {}", addr, forwarded_for);
|
tracing::info!("{} : {:?}", addr, forwarded_for);
|
||||||
|
|
||||||
next.run(req).await
|
next.run(req).await
|
||||||
}
|
}
|
||||||
|
@ -115,10 +224,10 @@ async fn log_source<B>(
|
||||||
async fn upload_to_zip(
|
async fn upload_to_zip(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
mut body: Multipart,
|
mut body: Multipart,
|
||||||
) -> Result<Redirect, (StatusCode, String)> {
|
) -> Result<Response<String>, (StatusCode, String)> {
|
||||||
tracing::debug!("{:?}", *state.records.lock().await);
|
tracing::debug!("{:?}", *state.records.lock().await);
|
||||||
|
|
||||||
let cache_name = get_random_name(10);
|
let cache_name = util::get_random_name(10);
|
||||||
|
|
||||||
let archive_path = Path::new(".cache/serve").join(&format!("{}.zip", &cache_name));
|
let archive_path = Path::new(".cache/serve").join(&format!("{}.zip", &cache_name));
|
||||||
|
|
||||||
|
@ -160,142 +269,61 @@ async fn upload_to_zip(
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut records = state.records.lock().await;
|
let mut records = state.records.lock().await;
|
||||||
records.insert(cache_name.clone(), UploadRecord::new(archive_path));
|
let record = UploadRecord::new(archive_path);
|
||||||
|
records.insert(cache_name.clone(), record.clone());
|
||||||
|
|
||||||
write_to_cache(&records)
|
cache::write_to_cache(&records)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
||||||
|
|
||||||
writer.close().await.unwrap();
|
writer.close().await.unwrap();
|
||||||
|
|
||||||
Ok(Redirect::to(&format!("/link.html?link={}", cache_name)))
|
let id = cache_name;
|
||||||
|
let response = Response::builder()
|
||||||
|
.status(200)
|
||||||
|
.header("Content-Type", "text/html")
|
||||||
|
.header("HX-Push-Url", format!("/link/{}", &id))
|
||||||
|
.body(leptos::ssr::render_to_string(|cx| {
|
||||||
|
leptos::view! { cx, <LinkView id record /> }
|
||||||
|
}))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download(
|
async fn download(
|
||||||
axum::extract::Path(id): axum::extract::Path<String>,
|
axum::extract::Path(id): axum::extract::Path<String>,
|
||||||
|
headers: HeaderMap,
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
) -> Result<axum::response::Response, (StatusCode, String)> {
|
) -> Result<axum::response::Response, (StatusCode, String)> {
|
||||||
let mut records = state.records.lock().await;
|
{
|
||||||
|
let mut records = state.records.lock().await;
|
||||||
|
if headers.get("hx-request").is_some() {
|
||||||
|
return Ok(axum::http::Response::builder()
|
||||||
|
.header("HX-Redirect", format!("/download/{id}"))
|
||||||
|
.status(204)
|
||||||
|
.body("".to_owned())
|
||||||
|
.unwrap()
|
||||||
|
.into_response());
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(record) = records.get_mut(&id) {
|
if let Some(record) = records
|
||||||
if record.can_be_downloaded() {
|
.get_mut(&id)
|
||||||
|
.filter(|record| record.can_be_downloaded())
|
||||||
|
{
|
||||||
record.downloads += 1;
|
record.downloads += 1;
|
||||||
|
|
||||||
let file = tokio::fs::File::open(&record.file).await.unwrap();
|
let file = tokio::fs::File::open(&record.file).await.unwrap();
|
||||||
|
|
||||||
return Ok(axum::http::Response::builder()
|
return Ok(axum::response::Response::builder()
|
||||||
.header("Content-Type", "application/zip")
|
.header("Content-Type", "application/zip")
|
||||||
.body(StreamBody::new(ReaderStream::new(file)))
|
.body(StreamBody::new(ReaderStream::new(file)))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_response());
|
.into_response());
|
||||||
} else {
|
} else {
|
||||||
let _ = tokio::fs::remove_file(&record.file);
|
records.remove_record(&id).await.unwrap()
|
||||||
records.remove(&id);
|
|
||||||
write_to_cache(&records).await.unwrap();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Redirect::to("/404.html").into_response())
|
Ok(Redirect::to("/404.html").into_response())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
async fn make_dir<T>(name: T) -> io::Result<()>
|
|
||||||
where
|
|
||||||
T: AsRef<Path>,
|
|
||||||
{
|
|
||||||
tokio::fs::create_dir_all(name)
|
|
||||||
.await
|
|
||||||
.or_else(|err| match err.kind() {
|
|
||||||
io::ErrorKind::AlreadyExists => Ok(()),
|
|
||||||
_ => Err(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn get_random_name(len: usize) -> String {
|
|
||||||
let mut rng = SmallRng::from_entropy();
|
|
||||||
|
|
||||||
Alphanumeric.sample_string(&mut rng, len)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn write_to_cache<T, Y>(records: &HashMap<T, Y>) -> io::Result<()>
|
|
||||||
where
|
|
||||||
T: Serialize,
|
|
||||||
Y: Serialize,
|
|
||||||
{
|
|
||||||
let mut records_cache = tokio::fs::File::create(".cache/data").await.unwrap();
|
|
||||||
|
|
||||||
let mut buf: Vec<u8> = Vec::with_capacity(200);
|
|
||||||
bincode::serialize_into(&mut buf, &*records)
|
|
||||||
.map_err(|err| error::io_other(&err.to_string()))?;
|
|
||||||
|
|
||||||
let bytes_written = tokio::io::copy(&mut buf.as_slice(), &mut records_cache).await?;
|
|
||||||
|
|
||||||
tracing::debug!("state cache size: {}", bytes_written);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn fetch_cache() -> AppState {
|
|
||||||
let records = if let Ok(file) = tokio::fs::File::open(".cache/data").await.as_mut() {
|
|
||||||
let mut buf: Vec<u8> = Vec::with_capacity(200);
|
|
||||||
file.read_to_end(&mut buf).await.unwrap();
|
|
||||||
|
|
||||||
bincode::deserialize_from(&mut buf.as_slice()).unwrap()
|
|
||||||
} else {
|
|
||||||
HashMap::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
AppState::new(records)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
static UNITS: [&str; 6] = ["KiB", "MiB", "GiB", "TiB", "PiB", "EiB"];
|
|
||||||
// This function is actually rather interesting to me, I understand that rust is
|
|
||||||
// very powerful, and its very safe, but i find it rather amusing that the [] operator
|
|
||||||
// doesn't check bounds, meaning it can panic at runtime. Usually rust is very
|
|
||||||
// very careful about possible panics
|
|
||||||
//
|
|
||||||
// although this function shouldn't be able to panic at runtime due to known bounds
|
|
||||||
// being listened to
|
|
||||||
#[inline]
|
|
||||||
fn _bytes_to_human_readable(bytes: u64) -> String {
|
|
||||||
let mut running = bytes as f64;
|
|
||||||
let mut count = 0;
|
|
||||||
while running > 1024.0 && count <= 6 {
|
|
||||||
running /= 1024.0;
|
|
||||||
count += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
format!("{:.2} {}", running, UNITS[count - 1])
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ForwardedFor(String);
|
|
||||||
|
|
||||||
static FF_TEXT: &str = "x-forwarded-for";
|
|
||||||
static FF_NAME: HeaderName = HeaderName::from_static(FF_TEXT);
|
|
||||||
|
|
||||||
impl Header for ForwardedFor {
|
|
||||||
fn name() -> &'static HeaderName {
|
|
||||||
&FF_NAME
|
|
||||||
}
|
|
||||||
|
|
||||||
fn decode<'i, I>(values: &mut I) -> Result<Self, headers::Error>
|
|
||||||
where
|
|
||||||
Self: Sized,
|
|
||||||
I: Iterator<Item = &'i headers::HeaderValue>,
|
|
||||||
{
|
|
||||||
let value = values
|
|
||||||
.next()
|
|
||||||
.ok_or_else(headers::Error::invalid)?
|
|
||||||
.to_str()
|
|
||||||
.map_err(|_| headers::Error::invalid())?
|
|
||||||
.to_owned();
|
|
||||||
|
|
||||||
Ok(ForwardedFor(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn encode<E: Extend<headers::HeaderValue>>(&self, values: &mut E) {
|
|
||||||
values.extend(std::iter::once(HeaderValue::from_str(&self.0).unwrap()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
use headers::{self, Header, HeaderName, HeaderValue};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ForwardedFor(String);
|
||||||
|
|
||||||
|
pub static FF_TEXT: &str = "x-forwarded-for";
|
||||||
|
|
||||||
|
pub static FF_NAME: HeaderName = HeaderName::from_static(FF_TEXT);
|
||||||
|
|
||||||
|
impl Header for ForwardedFor {
|
||||||
|
fn name() -> &'static HeaderName {
|
||||||
|
&FF_NAME
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode<'i, I>(values: &mut I) -> Result<Self, headers::Error>
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
I: Iterator<Item = &'i headers::HeaderValue>,
|
||||||
|
{
|
||||||
|
let value = values
|
||||||
|
.next()
|
||||||
|
.ok_or_else(headers::Error::invalid)?
|
||||||
|
.to_str()
|
||||||
|
.map_err(|_| headers::Error::invalid())?
|
||||||
|
.to_owned();
|
||||||
|
|
||||||
|
Ok(ForwardedFor(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode<E: Extend<headers::HeaderValue>>(&self, values: &mut E) {
|
||||||
|
values.extend(std::iter::once(HeaderValue::from_str(&self.0).unwrap()));
|
||||||
|
}
|
||||||
|
}
|
52
src/state.rs
52
src/state.rs
|
@ -1,15 +1,19 @@
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::{hash_map::Entry, HashMap},
|
||||||
|
io::ErrorKind,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use async_trait::async_trait;
|
||||||
|
use chrono::{DateTime, Duration, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
use crate::cache;
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct UploadRecord {
|
pub struct UploadRecord {
|
||||||
pub uploaded: DateTime<Utc>,
|
pub uploaded: DateTime<Utc>,
|
||||||
pub file: PathBuf,
|
pub file: PathBuf,
|
||||||
|
@ -26,7 +30,13 @@ impl UploadRecord {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn can_be_downloaded(&self) -> bool {
|
pub fn can_be_downloaded(&self) -> bool {
|
||||||
self.downloads < self.max_downloads
|
let dur_since_upload = Utc::now().signed_duration_since(self.uploaded);
|
||||||
|
|
||||||
|
dur_since_upload < Duration::days(3) && self.downloads < self.max_downloads
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn downloads_remaining(&self) -> u8 {
|
||||||
|
self.max_downloads - self.downloads
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,7 +46,7 @@ impl Default for UploadRecord {
|
||||||
uploaded: Utc::now(),
|
uploaded: Utc::now(),
|
||||||
file: Path::new("").to_owned(),
|
file: Path::new("").to_owned(),
|
||||||
downloads: 0,
|
downloads: 0,
|
||||||
max_downloads: 1,
|
max_downloads: 5,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -53,3 +63,35 @@ impl AppState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait AsyncRemoveRecord {
|
||||||
|
async fn remove_record(&mut self, id: &String) -> Result<(), std::io::Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl AsyncRemoveRecord for AppState {
|
||||||
|
async fn remove_record(&mut self, id: &String) -> Result<(), std::io::Error> {
|
||||||
|
let mut records = self.records.lock().await;
|
||||||
|
records.remove_record(id).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl AsyncRemoveRecord for HashMap<String, UploadRecord> {
|
||||||
|
async fn remove_record(&mut self, id: &String) -> Result<(), std::io::Error> {
|
||||||
|
match self.entry(id.clone()) {
|
||||||
|
Entry::Occupied(entry) => {
|
||||||
|
tokio::fs::remove_file(&entry.get().file).await?;
|
||||||
|
entry.remove_entry();
|
||||||
|
cache::write_to_cache(&self).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Entry::Vacant(_) => Err(std::io::Error::new(
|
||||||
|
ErrorKind::Other,
|
||||||
|
"No UploadRecord Found",
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
use rand::{
|
||||||
|
distributions::{Alphanumeric, DistString},
|
||||||
|
rngs::SmallRng,
|
||||||
|
SeedableRng,
|
||||||
|
};
|
||||||
|
|
||||||
|
use std::{io, path::Path};
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub async fn make_dir<T>(name: T) -> io::Result<()>
|
||||||
|
where
|
||||||
|
T: AsRef<Path>,
|
||||||
|
{
|
||||||
|
tokio::fs::create_dir_all(name)
|
||||||
|
.await
|
||||||
|
.or_else(|err| match err.kind() {
|
||||||
|
io::ErrorKind::AlreadyExists => Ok(()),
|
||||||
|
_ => Err(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn get_random_name(len: usize) -> String {
|
||||||
|
let mut rng = SmallRng::from_entropy();
|
||||||
|
|
||||||
|
Alphanumeric.sample_string(&mut rng, len)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub static UNITS: [&str; 6] = ["KiB", "MiB", "GiB", "TiB", "PiB", "EiB"];
|
||||||
|
|
||||||
|
// This function is actually rather interesting to me, I understand that rust is
|
||||||
|
// very powerful, and its very safe, but i find it rather amusing that the [] operator
|
||||||
|
// doesn't check bounds, meaning it can panic at runtime. Usually rust is very
|
||||||
|
// very careful about possible panics
|
||||||
|
//
|
||||||
|
// although this function shouldn't be able to panic at runtime due to known bounds
|
||||||
|
// being listened to
|
||||||
|
#[inline]
|
||||||
|
pub fn _bytes_to_human_readable(bytes: u64) -> String {
|
||||||
|
let mut running = bytes as f64;
|
||||||
|
let mut count = 0;
|
||||||
|
while running > 1024.0 && count <= 6 {
|
||||||
|
running /= 1024.0;
|
||||||
|
count += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
format!("{:.2} {}", running, UNITS[count - 1])
|
||||||
|
}
|
|
@ -0,0 +1,106 @@
|
||||||
|
use futures::TryFutureExt;
|
||||||
|
use leptos::{component, view, Children, IntoView, Scope};
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
use crate::state::UploadRecord;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct CatFact {
|
||||||
|
pub fact: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_cat_fact() -> String {
|
||||||
|
reqwest::get("https://catfact.ninja/fact")
|
||||||
|
.and_then(|res| res.json())
|
||||||
|
.map_ok(|cf: CatFact| cf.fact)
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| String::from("The cat fact goddess has failed me :<"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// {https://api.thecatapi.com/v1/images/search?size=small&format=src}
|
||||||
|
// {https://cataas.com/cat?width=250&height=250}
|
||||||
|
#[component]
|
||||||
|
pub fn Welcome(cx: Scope, fact: String) -> impl IntoView {
|
||||||
|
view! { cx,
|
||||||
|
<HtmxPage>
|
||||||
|
<div class="form-wrapper">
|
||||||
|
<WelcomeView fact />
|
||||||
|
</div>
|
||||||
|
</HtmxPage>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[component]
|
||||||
|
pub fn WelcomeView(cx: Scope, fact: String) -> impl IntoView {
|
||||||
|
view! {
|
||||||
|
cx,
|
||||||
|
<form id="form" hx-swap="outerHTML" hx-post="/upload" hx-encoding="multipart/form-data" class="column-container">
|
||||||
|
<div class="cat-img-wrapper">
|
||||||
|
<img class="cat-img" src="https://api.thecatapi.com/v1/images/search?size=small&format=src" />
|
||||||
|
</div>
|
||||||
|
<input type="file" id="file" name="file" data-multiple-caption="{{count}} files selected" multiple />
|
||||||
|
<label for="file">Select Files</label>
|
||||||
|
|
||||||
|
<input type="submit" value="Get Link~" />
|
||||||
|
<p id="cat-fact">{fact}</p>
|
||||||
|
<progress id="progress" class="htmx-indicator" value="0" max="100"></progress>
|
||||||
|
</form>
|
||||||
|
<script src="/scripts/loading_progress.js" />
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// <link href="../dist/css/link.css" rel="stylesheet" />
|
||||||
|
// #TODO: Handle pushing cleaner
|
||||||
|
#[component]
|
||||||
|
pub fn DownloadLinkPage(cx: Scope, id: String, record: UploadRecord) -> impl IntoView {
|
||||||
|
view! { cx,
|
||||||
|
<HtmxPage>
|
||||||
|
<div class="form-wrapper">
|
||||||
|
<LinkView id record />
|
||||||
|
</div>
|
||||||
|
</HtmxPage>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[component]
|
||||||
|
pub fn HtmxPage(cx: Scope, children: Children) -> impl IntoView {
|
||||||
|
view! { cx,
|
||||||
|
<head>
|
||||||
|
<title>Nyazoom</title>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<link href="/css/main.css" rel="stylesheet" />
|
||||||
|
<link href="/css/link.css" rel="stylesheet" />
|
||||||
|
<script src="/scripts/file_label.js" />
|
||||||
|
<script src="/scripts/link.js" />
|
||||||
|
<script src="https://unpkg.com/htmx.org@1.9.4" integrity="sha384-zUfuhFKKZCbHTY6aRR46gxiqszMk5tcHjsVFxnUo8VMus4kHGVdIYVbOYYNlKmHV" crossorigin="anonymous"></script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<h1>NyaZoom<sup>2</sup></h1>
|
||||||
|
{children(cx)}
|
||||||
|
</body>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[component]
|
||||||
|
pub fn LinkView(cx: Scope, id: String, record: UploadRecord) -> impl IntoView {
|
||||||
|
let downloads_remaining = record.max_downloads - record.downloads;
|
||||||
|
let plural = if downloads_remaining > 1 { "s" } else { "" };
|
||||||
|
view! {
|
||||||
|
cx,
|
||||||
|
<div class="column-container">
|
||||||
|
<div class="link-wrapper">
|
||||||
|
<a id="link" href="/download/{id}">Download Now!</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="link-wrapper" hx-get="/link/{id}/remaining" hx-trigger="click from:#link delay:0.2s, every 10s" >
|
||||||
|
You have {record.downloads_remaining()} download{plural} remaining!
|
||||||
|
</div>
|
||||||
|
<button class="return-button" onclick="clipboard()">Copy to Clipboard</button>
|
||||||
|
|
||||||
|
|
||||||
|
<a href="/" class="return-button">Return to home</a>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue