Compare commits
1 Commits
Author | SHA1 | Date |
---|---|---|
NullCube | 95e6a837f3 |
|
@ -1,3 +1,4 @@
|
||||||
/target
|
/target
|
||||||
.DS_Store
|
.DS_Store
|
||||||
/.cache
|
/.cache
|
||||||
|
.direnv
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
edition = "2021"
|
|
File diff suppressed because it is too large
Load Diff
38
Cargo.toml
38
Cargo.toml
|
@ -8,42 +8,24 @@ edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-bincode = { version = "0.7.0", features = ["tokio"] }
|
async-bincode = { version = "0.7.0", features = ["tokio"] }
|
||||||
async-trait = "0.1.81"
|
async-trait = "0.1.72"
|
||||||
async_zip = { version = "0.0.17", features = [
|
async_zip = { version = "0.0.13", features = ["deflate", "tokio", "tokio-fs", "async-compression"] }
|
||||||
"deflate",
|
axum = { version = "0.6.12", features = ["multipart", "http2", "headers", "macros", "original-uri"] }
|
||||||
"tokio",
|
|
||||||
"tokio-fs",
|
|
||||||
"async-compression",
|
|
||||||
] }
|
|
||||||
http-body-util = "0.1.2"
|
|
||||||
axum = { version = "0.7.5", features = [
|
|
||||||
"multipart",
|
|
||||||
"http2",
|
|
||||||
"macros",
|
|
||||||
"original-uri",
|
|
||||||
] }
|
|
||||||
axum-extra = { version = "0.9.0", features = ["typed-header"] }
|
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
chrono = { version = "0.4.24", features = ["serde"] }
|
chrono = { version = "0.4.24", features = ["serde"] }
|
||||||
futures = "0.3.28"
|
futures = "0.3.28"
|
||||||
headers = "0.4.0"
|
headers = "0.3.8"
|
||||||
leptos = { version = "0.6.14", features = [
|
leptos = { version = "0.4.6", features = ["ssr", "nightly", "tracing", "default-tls"] }
|
||||||
"ssr",
|
leptos_meta = { version = "0.4.6", features = ["ssr"] }
|
||||||
"nightly",
|
leptos_router = { version = "0.4.6", features = ["ssr"] }
|
||||||
"tracing",
|
|
||||||
"default-tls",
|
|
||||||
"experimental-islands",
|
|
||||||
] }
|
|
||||||
leptos_meta = { version = "0.6.14", features = ["ssr"] }
|
|
||||||
leptos_router = { version = "0.6.14", features = ["ssr"] }
|
|
||||||
rand = { version = "0.8.5", features = ["small_rng"] }
|
rand = { version = "0.8.5", features = ["small_rng"] }
|
||||||
reqwest = { version = "0.12.7", features = ["json", "native-tls", "blocking"] }
|
reqwest = { version = "0.11.18", features = ["json", "native-tls", "blocking"] }
|
||||||
sanitize-filename-reader-friendly = "2.2.1"
|
sanitize-filename-reader-friendly = "2.2.1"
|
||||||
serde = { version = "1.0.160", features = ["serde_derive", "derive"] }
|
serde = { version = "1.0.160", features = ["serde_derive", "derive"] }
|
||||||
serde_derive = "1.0.160"
|
serde_derive = "1.0.160"
|
||||||
tokio = { version = "1.27.0", features = ["full"] }
|
tokio = { version = "1.27.0", features = ["full"] }
|
||||||
tokio-util = { version = "0.7.7", features = ["io"] }
|
tokio-util = { version = "0.7.7", features = ["io"] }
|
||||||
tower = { version = "0.5.0", features = ["util"] }
|
tower = { version = "0.4.13", features = ["util"] }
|
||||||
tower-http = { version = "0.5.0", features = ["fs", "trace", "limit"] }
|
tower-http = { version = "0.4.0", features = ["fs", "trace", "limit"] }
|
||||||
tracing = "0.1.37"
|
tracing = "0.1.37"
|
||||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title></title>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
<link href="css/link.css" rel="stylesheet">
|
||||||
|
|
||||||
|
<script>
|
||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
const params = new Proxy(new URLSearchParams(window.location.search), {
|
||||||
|
get: (searchParams, prop) => searchParams.get(prop),
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
if (params.link !== null) {
|
||||||
|
let link = `${window.location.origin}/download/${params.link}`;
|
||||||
|
|
||||||
|
let link_el = document.getElementById("link");
|
||||||
|
|
||||||
|
link_el.href = link;
|
||||||
|
link_el.innerHTML = link;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function clipboard() {
|
||||||
|
let copyText = document.getElementById("link");
|
||||||
|
|
||||||
|
navigator.clipboard.writeText(copyText.href).then(() => alert("Copied: " + copyText.href));
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class="link-wrapper">
|
||||||
|
<a id="link" href=""></a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button class="return-button" onclick="clipboard()">Copy to Clipboard</button>
|
||||||
|
|
||||||
|
<a href="/" class="return-button">Return to home</a>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
|
@ -0,0 +1,5 @@
|
||||||
|
fetch("https://catfact.ninja/fact")
|
||||||
|
.then(data => data.json())
|
||||||
|
.then(data => {
|
||||||
|
document.getElementById("cat-fact").innerHTML = data.fact;
|
||||||
|
});
|
|
@ -1,3 +1,19 @@
|
||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
const params = new Proxy(new URLSearchParams(window.location.search), {
|
||||||
|
get: (searchParams, prop) => searchParams.get(prop),
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
if (params.link !== null) {
|
||||||
|
let link = `${window.location.origin}/download/${params.link}`;
|
||||||
|
|
||||||
|
let link_el = document.getElementById("link");
|
||||||
|
|
||||||
|
link_el.href = link;
|
||||||
|
link_el.innerHTML = link;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
function clipboard() {
|
function clipboard() {
|
||||||
let copyText = document.getElementById("link");
|
let copyText = document.getElementById("link");
|
||||||
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[attr_values]
|
|
||||||
class = "Tailwind" # "Tailwind" is the only attribute value formatter available for now
|
|
|
@ -1,2 +0,0 @@
|
||||||
[rustfmt]
|
|
||||||
overrideCommand = ["leptosfmt", "--stdin", "--rustfmt"]
|
|
|
@ -1,5 +1,7 @@
|
||||||
use crate::state::AppState;
|
use crate::state::AppState;
|
||||||
|
|
||||||
|
use super::error;
|
||||||
|
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use tokio::io::AsyncReadExt;
|
use tokio::io::AsyncReadExt;
|
||||||
|
|
||||||
|
@ -15,7 +17,7 @@ where
|
||||||
let mut records_cache = tokio::fs::File::create(".cache/data").await.unwrap();
|
let mut records_cache = tokio::fs::File::create(".cache/data").await.unwrap();
|
||||||
|
|
||||||
let mut buf: Vec<u8> = Vec::with_capacity(200);
|
let mut buf: Vec<u8> = Vec::with_capacity(200);
|
||||||
bincode::serialize_into(&mut buf, records).map_err(io::Error::other)?;
|
bincode::serialize_into(&mut buf, records).map_err(|err| error::io_other(&err.to_string()))?;
|
||||||
|
|
||||||
let bytes_written = tokio::io::copy(&mut buf.as_slice(), &mut records_cache).await?;
|
let bytes_written = tokio::io::copy(&mut buf.as_slice(), &mut records_cache).await?;
|
||||||
|
|
||||||
|
|
324
src/main.rs
324
src/main.rs
|
@ -1,83 +1,329 @@
|
||||||
use axum::{
|
use async_zip::{tokio::write::ZipFileWriter, Compression, ZipEntryBuilder};
|
||||||
extract::{ConnectInfo, Request},
|
|
||||||
middleware::{self, Next},
|
|
||||||
response::{Html, IntoResponse},
|
|
||||||
routing::get,
|
|
||||||
Router,
|
|
||||||
};
|
|
||||||
use axum_extra::TypedHeader;
|
|
||||||
use tower_http::{services::ServeDir, trace::TraceLayer};
|
|
||||||
|
|
||||||
use std::{io, net::SocketAddr};
|
use axum::{
|
||||||
|
body::StreamBody,
|
||||||
|
extract::{ConnectInfo, DefaultBodyLimit, Multipart, State},
|
||||||
|
http::{Request, Response, StatusCode},
|
||||||
|
middleware::{self, Next},
|
||||||
|
response::{Html, IntoResponse, Redirect},
|
||||||
|
routing::{get, post},
|
||||||
|
Json, Router, TypedHeader,
|
||||||
|
};
|
||||||
|
|
||||||
|
use futures::TryStreamExt;
|
||||||
|
|
||||||
|
use headers::HeaderMap;
|
||||||
|
use leptos::IntoView;
|
||||||
|
use nyazoom_headers::ForwardedFor;
|
||||||
|
|
||||||
|
use sanitize_filename_reader_friendly::sanitize;
|
||||||
|
|
||||||
|
use std::{io, net::SocketAddr, path::Path, time::Duration};
|
||||||
|
|
||||||
|
use tokio_util::{
|
||||||
|
compat::FuturesAsyncWriteCompatExt,
|
||||||
|
io::{ReaderStream, StreamReader},
|
||||||
|
};
|
||||||
|
|
||||||
|
use tower_http::{limit::RequestBodyLimitLayer, services::ServeDir, trace::TraceLayer};
|
||||||
|
|
||||||
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
mod cache;
|
mod cache;
|
||||||
mod router;
|
mod nyazoom_headers;
|
||||||
mod state;
|
mod state;
|
||||||
mod util;
|
mod util;
|
||||||
mod views;
|
mod views;
|
||||||
|
|
||||||
use util::{headers::ForwardedFor, logging, ssr, sweeper};
|
use state::{AppState, UploadRecord};
|
||||||
|
|
||||||
use router::*;
|
use crate::state::AsyncRemoveRecord;
|
||||||
use state::*;
|
use crate::views::{DownloadLinkPage, HtmxPage, LinkView, Welcome};
|
||||||
use views::*;
|
|
||||||
|
pub mod error {
|
||||||
|
use std::io::{Error, ErrorKind};
|
||||||
|
|
||||||
|
pub fn io_other(s: &str) -> Error {
|
||||||
|
Error::new(ErrorKind::Other, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
logging::init_tracing();
|
// Set up logging
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(
|
||||||
|
tracing_subscriber::EnvFilter::try_from_default_env()
|
||||||
|
.unwrap_or_else(|_| "nyazoom=debug,tower_http=debug".into()),
|
||||||
|
)
|
||||||
|
.with(tracing_subscriber::fmt::layer())
|
||||||
|
.init();
|
||||||
|
|
||||||
// uses create_dir_all to create both .cache and serve inside it in one go
|
// uses create_dir_all to create both .cache and serve inside it in one go
|
||||||
util::make_dir(".cache/serve").await?;
|
util::make_dir(".cache/serve").await?;
|
||||||
|
|
||||||
let state = cache::fetch_cache().await;
|
let state = cache::fetch_cache().await;
|
||||||
|
|
||||||
sweeper::spawn(state.clone());
|
// Spawn a repeating task that will clean files periodically
|
||||||
|
tokio::spawn({
|
||||||
|
let state = state.clone();
|
||||||
|
async move {
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(Duration::from_secs(15 * 60)).await;
|
||||||
|
tracing::info!("Cleaning Sweep!");
|
||||||
|
|
||||||
|
let mut records = state.records.lock().await;
|
||||||
|
|
||||||
|
for (key, record) in records.clone().into_iter() {
|
||||||
|
if !record.can_be_downloaded() {
|
||||||
|
tracing::info!("culling: {:?}", record);
|
||||||
|
records.remove_record(&key).await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// Router Setup
|
// Router Setup
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
.route("/", get(welcome))
|
.route("/", get(welcome))
|
||||||
.nest("/download", get_download_router())
|
.route("/upload", post(upload_to_zip))
|
||||||
.nest("/upload", get_upload_router())
|
.route("/records", get(records))
|
||||||
.nest("/records", get_records_router())
|
.route("/records/links", get(records_links))
|
||||||
.nest("/link", get_link_router())
|
.route("/download/:id", get(download))
|
||||||
|
.route("/link/:id", get(link).delete(link_delete))
|
||||||
|
.route("/link/:id/remaining", get(remaining))
|
||||||
|
.layer(DefaultBodyLimit::disable())
|
||||||
|
.layer(RequestBodyLimitLayer::new(
|
||||||
|
10 * 1024 * 1024 * 1024, // 10GiB
|
||||||
|
))
|
||||||
.with_state(state)
|
.with_state(state)
|
||||||
.fallback_service(ServeDir::new("dist"))
|
.fallback_service(ServeDir::new("dist"))
|
||||||
.layer(TraceLayer::new_for_http())
|
.layer(TraceLayer::new_for_http())
|
||||||
.layer(middleware::from_fn(log_source));
|
.layer(middleware::from_fn(log_source));
|
||||||
|
|
||||||
serve(app).await;
|
// Server creation
|
||||||
|
let addr = SocketAddr::from(([0, 0, 0, 0], 3000));
|
||||||
|
tracing::debug!("listening on http://{}/", addr);
|
||||||
|
axum::Server::bind(&addr)
|
||||||
|
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn remaining(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
axum::extract::Path(id): axum::extract::Path<String>,
|
||||||
|
) -> impl IntoResponse {
|
||||||
|
let records = state.records.lock().await;
|
||||||
|
if let Some(record) = records.get(&id) {
|
||||||
|
let downloads_remaining = record.downloads_remaining();
|
||||||
|
let plural = if downloads_remaining > 1 { "s" } else { "" };
|
||||||
|
let out = format!(
|
||||||
|
"You have {} download{} remaining!",
|
||||||
|
downloads_remaining, plural
|
||||||
|
);
|
||||||
|
Html(out)
|
||||||
|
} else {
|
||||||
|
Html("?".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fn welcome() -> impl IntoResponse {
|
async fn welcome() -> impl IntoResponse {
|
||||||
let fact = views::get_cat_fact().await;
|
let cat_fact = views::get_cat_fact().await;
|
||||||
Html(ssr::render(move || {
|
Html(leptos::ssr::render_to_string(move |cx| {
|
||||||
leptos::view! { <WelcomePage fact /> }
|
leptos::view! { cx, <Welcome fact=cat_fact /> }
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn log_source(
|
async fn records(State(state): State<AppState>) -> impl IntoResponse {
|
||||||
|
Json(state.records.lock().await.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function is to remain ugly until that time in which I properly hide
|
||||||
|
// this behind some kind of authentication
|
||||||
|
async fn records_links(State(state): State<AppState>) -> impl IntoResponse {
|
||||||
|
let records = state.records.lock().await.clone();
|
||||||
|
Html(leptos::ssr::render_to_string(move |cx| {
|
||||||
|
leptos::view! { cx,
|
||||||
|
<HtmxPage>
|
||||||
|
<div class="form-wrapper">
|
||||||
|
<div class="column-container">
|
||||||
|
<ul>
|
||||||
|
{records.keys().map(|key| leptos::view! { cx,
|
||||||
|
<li class="link-wrapper">
|
||||||
|
<a href="/link/{key}">{key}</a>
|
||||||
|
<button style="margin-left: 1em;"
|
||||||
|
hx-target="closest .link-wrapper"
|
||||||
|
hx-swap="outerHTML"
|
||||||
|
hx-delete="/link/{key}">X</button>
|
||||||
|
</li>
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</HtmxPage>
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn link(
|
||||||
|
axum::extract::Path(id): axum::extract::Path<String>,
|
||||||
|
State(mut state): State<AppState>,
|
||||||
|
) -> Result<Html<String>, Redirect> {
|
||||||
|
{
|
||||||
|
let mut records = state.records.lock().await;
|
||||||
|
|
||||||
|
if let Some(record) = records
|
||||||
|
.get_mut(&id)
|
||||||
|
.filter(|record| record.can_be_downloaded())
|
||||||
|
{
|
||||||
|
if record.can_be_downloaded() {
|
||||||
|
return Ok(Html(leptos::ssr::render_to_string({
|
||||||
|
let record = record.clone();
|
||||||
|
|cx| {
|
||||||
|
leptos::view! { cx, <DownloadLinkPage id=id record=record /> }
|
||||||
|
}
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: This....
|
||||||
|
state.remove_record(&id).await.unwrap();
|
||||||
|
|
||||||
|
Err(Redirect::to(&format!("/404.html")))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn link_delete(
|
||||||
|
axum::extract::Path(id): axum::extract::Path<String>,
|
||||||
|
State(mut state): State<AppState>,
|
||||||
|
) -> Result<Html<String>, (StatusCode, String)> {
|
||||||
|
state
|
||||||
|
.remove_record(&id)
|
||||||
|
.await
|
||||||
|
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
||||||
|
|
||||||
|
Ok(Html("".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn log_source<B>(
|
||||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||||
forwarded_for: Option<TypedHeader<ForwardedFor>>,
|
forwarded_for: Option<TypedHeader<ForwardedFor>>,
|
||||||
req: Request,
|
req: Request<B>,
|
||||||
next: Next,
|
next: Next<B>,
|
||||||
) -> impl IntoResponse {
|
) -> impl IntoResponse {
|
||||||
tracing::info!("{} : {:?}", addr, forwarded_for);
|
tracing::info!("{} : {:?}", addr, forwarded_for);
|
||||||
|
|
||||||
next.run(req).await
|
next.run(req).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn serve(app: Router) {
|
async fn upload_to_zip(
|
||||||
// // Server creation
|
State(state): State<AppState>,
|
||||||
let addr = SocketAddr::from(([0, 0, 0, 0], 3000));
|
mut body: Multipart,
|
||||||
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
|
) -> Result<Response<String>, (StatusCode, String)> {
|
||||||
axum::serve(
|
tracing::debug!("{:?}", *state.records.lock().await);
|
||||||
listener,
|
|
||||||
app.into_make_service_with_connect_info::<SocketAddr>(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
tracing::debug!("listening on http://{}/", addr);
|
let cache_name = util::get_random_name(10);
|
||||||
|
|
||||||
|
let archive_path = Path::new(".cache/serve").join(&format!("{}.zip", &cache_name));
|
||||||
|
|
||||||
|
tracing::debug!("Zipping: {:?}", &archive_path);
|
||||||
|
|
||||||
|
let mut archive = tokio::fs::File::create(&archive_path)
|
||||||
|
.await
|
||||||
|
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
||||||
|
let mut writer = ZipFileWriter::new(&mut archive);
|
||||||
|
|
||||||
|
while let Some(field) = body.next_field().await.unwrap() {
|
||||||
|
let file_name = match field.file_name() {
|
||||||
|
Some(file_name) => sanitize(file_name),
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
tracing::debug!("Downloading to Zip: {file_name:?}");
|
||||||
|
|
||||||
|
let stream = field;
|
||||||
|
let body_with_io_error = stream.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
|
||||||
|
let mut body_reader = StreamReader::new(body_with_io_error);
|
||||||
|
|
||||||
|
let builder = ZipEntryBuilder::new(file_name, Compression::Deflate);
|
||||||
|
let mut entry_writer = writer
|
||||||
|
.write_entry_stream(builder)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.compat_write();
|
||||||
|
|
||||||
|
tokio::io::copy(&mut body_reader, &mut entry_writer)
|
||||||
|
.await
|
||||||
|
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
||||||
|
|
||||||
|
entry_writer
|
||||||
|
.into_inner()
|
||||||
|
.close()
|
||||||
|
.await
|
||||||
|
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut records = state.records.lock().await;
|
||||||
|
let record = UploadRecord::new(archive_path);
|
||||||
|
records.insert(cache_name.clone(), record.clone());
|
||||||
|
|
||||||
|
cache::write_to_cache(&records)
|
||||||
|
.await
|
||||||
|
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
||||||
|
|
||||||
|
writer.close().await.unwrap();
|
||||||
|
|
||||||
|
let id = cache_name;
|
||||||
|
let response = Response::builder()
|
||||||
|
.status(200)
|
||||||
|
.header("Content-Type", "text/html")
|
||||||
|
.header("HX-Push-Url", format!("/link/{}", &id))
|
||||||
|
.body(leptos::ssr::render_to_string(|cx| {
|
||||||
|
leptos::view! { cx, <LinkView id record /> }
|
||||||
|
}))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download(
|
||||||
|
axum::extract::Path(id): axum::extract::Path<String>,
|
||||||
|
headers: HeaderMap,
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<axum::response::Response, (StatusCode, String)> {
|
||||||
|
{
|
||||||
|
let mut records = state.records.lock().await;
|
||||||
|
if headers.get("hx-request").is_some() {
|
||||||
|
return Ok(axum::http::Response::builder()
|
||||||
|
.header("HX-Redirect", format!("/download/{id}"))
|
||||||
|
.status(204)
|
||||||
|
.body("".to_owned())
|
||||||
|
.unwrap()
|
||||||
|
.into_response());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(record) = records
|
||||||
|
.get_mut(&id)
|
||||||
|
.filter(|record| record.can_be_downloaded())
|
||||||
|
{
|
||||||
|
record.downloads += 1;
|
||||||
|
|
||||||
|
let file = tokio::fs::File::open(&record.file).await.unwrap();
|
||||||
|
|
||||||
|
return Ok(axum::response::Response::builder()
|
||||||
|
.header("Content-Type", "application/zip")
|
||||||
|
.body(StreamBody::new(ReaderStream::new(file)))
|
||||||
|
.unwrap()
|
||||||
|
.into_response());
|
||||||
|
} else {
|
||||||
|
records.remove_record(&id).await.unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Redirect::to("/404.html").into_response())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,41 +0,0 @@
|
||||||
use axum::{
|
|
||||||
body::Body,
|
|
||||||
extract::State,
|
|
||||||
response::{IntoResponse, Redirect},
|
|
||||||
routing::get,
|
|
||||||
Router,
|
|
||||||
};
|
|
||||||
use reqwest::StatusCode;
|
|
||||||
use tokio_util::io::ReaderStream;
|
|
||||||
|
|
||||||
use crate::{AppState, AsyncRemoveRecord};
|
|
||||||
|
|
||||||
pub fn get_download_router() -> Router<AppState> {
|
|
||||||
Router::new().route("/:id", get(download))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn download(
|
|
||||||
axum::extract::Path(id): axum::extract::Path<String>,
|
|
||||||
State(state): State<AppState>,
|
|
||||||
) -> Result<axum::response::Response, (StatusCode, String)> {
|
|
||||||
{
|
|
||||||
let mut records = state.records.lock().await;
|
|
||||||
if let Some(record) = records
|
|
||||||
.get_mut(&id)
|
|
||||||
.filter(|record| record.can_be_downloaded())
|
|
||||||
{
|
|
||||||
record.downloads += 1;
|
|
||||||
|
|
||||||
let file = tokio::fs::File::open(&record.file).await.unwrap();
|
|
||||||
|
|
||||||
return Ok(axum::response::Response::builder()
|
|
||||||
.header("Content-Type", "application/zip")
|
|
||||||
.body(Body::from_stream(ReaderStream::new(file)))
|
|
||||||
.unwrap());
|
|
||||||
} else {
|
|
||||||
records.remove_record(&id).await.unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Redirect::to("/404.html").into_response())
|
|
||||||
}
|
|
|
@ -1,74 +0,0 @@
|
||||||
use axum::{
|
|
||||||
extract::State,
|
|
||||||
response::{Html, IntoResponse, Redirect},
|
|
||||||
routing::get,
|
|
||||||
Router,
|
|
||||||
};
|
|
||||||
use reqwest::StatusCode;
|
|
||||||
|
|
||||||
use crate::{util::ssr, AppState, AsyncRemoveRecord, DownloadLinkPage};
|
|
||||||
|
|
||||||
pub fn get_link_router() -> Router<AppState> {
|
|
||||||
// Link pages
|
|
||||||
Router::new()
|
|
||||||
.route("/:id", get(link).delete(link_delete))
|
|
||||||
.route("/:id/remaining", get(remaining))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn link(
|
|
||||||
axum::extract::Path(id): axum::extract::Path<String>,
|
|
||||||
State(mut state): State<AppState>,
|
|
||||||
) -> Result<Html<String>, Redirect> {
|
|
||||||
{
|
|
||||||
let mut records = state.records.lock().await;
|
|
||||||
|
|
||||||
if let Some(record) = records
|
|
||||||
.get_mut(&id)
|
|
||||||
.filter(|record| record.can_be_downloaded())
|
|
||||||
{
|
|
||||||
return Ok(Html(ssr::render({
|
|
||||||
let record = record.clone();
|
|
||||||
|| leptos::view! { <DownloadLinkPage id record /> }
|
|
||||||
})));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: This....
|
|
||||||
state.remove_record(&id).await.unwrap();
|
|
||||||
|
|
||||||
Err(Redirect::to("/404.html"))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn link_delete(
|
|
||||||
axum::extract::Path(id): axum::extract::Path<String>,
|
|
||||||
State(mut state): State<AppState>,
|
|
||||||
) -> Result<Html<String>, (StatusCode, String)> {
|
|
||||||
state
|
|
||||||
.remove_record(&id)
|
|
||||||
.await
|
|
||||||
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
|
||||||
|
|
||||||
Ok(Html("".to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn get_remaining_text(downloads_remaining: u8) -> String {
|
|
||||||
let plural = if downloads_remaining > 1 { "s" } else { "" };
|
|
||||||
format!(
|
|
||||||
"You have {} download{} remaining!",
|
|
||||||
downloads_remaining, plural
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn remaining(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
axum::extract::Path(id): axum::extract::Path<String>,
|
|
||||||
) -> impl IntoResponse {
|
|
||||||
let records = state.records.lock().await;
|
|
||||||
if let Some(record) = records.get(&id) {
|
|
||||||
let downloads_remaining = record.downloads_remaining();
|
|
||||||
Html(get_remaining_text(downloads_remaining))
|
|
||||||
} else {
|
|
||||||
Html("?".to_string())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,9 +0,0 @@
|
||||||
pub mod download;
|
|
||||||
pub mod link;
|
|
||||||
pub mod records;
|
|
||||||
pub mod upload;
|
|
||||||
|
|
||||||
pub use download::get_download_router;
|
|
||||||
pub use link::get_link_router;
|
|
||||||
pub use records::get_records_router;
|
|
||||||
pub use upload::get_upload_router;
|
|
|
@ -1,57 +0,0 @@
|
||||||
use axum::{
|
|
||||||
extract::State,
|
|
||||||
response::{Html, IntoResponse},
|
|
||||||
routing::get,
|
|
||||||
Json, Router,
|
|
||||||
};
|
|
||||||
use leptos::CollectView;
|
|
||||||
|
|
||||||
use crate::{util::ssr, AppState, HtmxPage};
|
|
||||||
|
|
||||||
pub fn get_records_router() -> Router<AppState> {
|
|
||||||
// Records views
|
|
||||||
Router::new()
|
|
||||||
.route("/", get(records))
|
|
||||||
.route("/links", get(records_links))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn records(State(state): State<AppState>) -> impl IntoResponse {
|
|
||||||
Json(state.records.lock().await.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
// This function is to remain ugly until that time in which I properly hide
|
|
||||||
// this behind some kind of authentication
|
|
||||||
pub async fn records_links(State(state): State<AppState>) -> impl IntoResponse {
|
|
||||||
let records = state.records.lock().await.clone();
|
|
||||||
|
|
||||||
let records_list_view = records
|
|
||||||
.keys()
|
|
||||||
.map(|key| {
|
|
||||||
leptos::view! {
|
|
||||||
<li class="link-wrapper">
|
|
||||||
<a href="/link/{key}">{key}</a>
|
|
||||||
<button
|
|
||||||
style="margin-left: 1em;"
|
|
||||||
hx-target="closest .link-wrapper"
|
|
||||||
hx-swap="outerHTML"
|
|
||||||
hx-delete="/link/{key}"
|
|
||||||
>
|
|
||||||
"X"
|
|
||||||
</button>
|
|
||||||
</li>
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect_view();
|
|
||||||
|
|
||||||
Html(ssr::render(move || {
|
|
||||||
leptos::view! {
|
|
||||||
<HtmxPage>
|
|
||||||
<div class="form-wrapper">
|
|
||||||
<div class="column-container">
|
|
||||||
<ul>{records_list_view}</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</HtmxPage>
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
}
|
|
|
@ -1,99 +0,0 @@
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use async_zip::{base::write::ZipFileWriter, Compression, ZipEntryBuilder};
|
|
||||||
use axum::{
|
|
||||||
extract::{DefaultBodyLimit, Multipart, State},
|
|
||||||
http::Response,
|
|
||||||
routing::post,
|
|
||||||
Router,
|
|
||||||
};
|
|
||||||
use futures::TryStreamExt;
|
|
||||||
use reqwest::StatusCode;
|
|
||||||
use sanitize_filename_reader_friendly::sanitize;
|
|
||||||
use tokio::io;
|
|
||||||
use tokio_util::{compat::FuturesAsyncWriteCompatExt, io::StreamReader};
|
|
||||||
use tower_http::limit::RequestBodyLimitLayer;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
cache,
|
|
||||||
util::{self, ssr},
|
|
||||||
AppState, LinkView, UploadRecord,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn get_upload_router() -> Router<AppState> {
|
|
||||||
// Upload needs a subrouter to increase the body limit
|
|
||||||
Router::new()
|
|
||||||
.route("/", post(upload_to_zip))
|
|
||||||
.layer(DefaultBodyLimit::disable())
|
|
||||||
.layer(RequestBodyLimitLayer::new(
|
|
||||||
10 * 1024 * 1024 * 1024, // 10GiB
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn upload_to_zip(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
mut body: Multipart,
|
|
||||||
) -> Result<Response<String>, (StatusCode, String)> {
|
|
||||||
tracing::debug!("{:?}", *state.records.lock().await);
|
|
||||||
|
|
||||||
let cache_name = util::get_random_name(10);
|
|
||||||
|
|
||||||
let archive_path = Path::new(".cache/serve").join(format!("{}.zip", &cache_name));
|
|
||||||
|
|
||||||
tracing::debug!("Zipping: {:?}", &archive_path);
|
|
||||||
|
|
||||||
let mut archive = tokio::fs::File::create(&archive_path)
|
|
||||||
.await
|
|
||||||
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
|
||||||
let mut writer = ZipFileWriter::with_tokio(&mut archive);
|
|
||||||
|
|
||||||
while let Some(field) = body.next_field().await.unwrap() {
|
|
||||||
let file_name = match field.file_name() {
|
|
||||||
Some(file_name) => sanitize(file_name),
|
|
||||||
_ => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!("Downloading to Zip: {file_name:?}");
|
|
||||||
|
|
||||||
let stream = field;
|
|
||||||
let body_with_io_error = stream.map_err(io::Error::other);
|
|
||||||
let mut body_reader = StreamReader::new(body_with_io_error);
|
|
||||||
|
|
||||||
let builder = ZipEntryBuilder::new(file_name.into(), Compression::Deflate);
|
|
||||||
let mut entry_writer = writer
|
|
||||||
.write_entry_stream(builder)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.compat_write();
|
|
||||||
|
|
||||||
tokio::io::copy(&mut body_reader, &mut entry_writer)
|
|
||||||
.await
|
|
||||||
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
|
||||||
|
|
||||||
entry_writer
|
|
||||||
.into_inner()
|
|
||||||
.close()
|
|
||||||
.await
|
|
||||||
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut records = state.records.lock().await;
|
|
||||||
let record = UploadRecord::new(archive_path);
|
|
||||||
records.insert(cache_name.clone(), record.clone());
|
|
||||||
|
|
||||||
cache::write_to_cache(&records)
|
|
||||||
.await
|
|
||||||
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))?;
|
|
||||||
|
|
||||||
writer.close().await.unwrap();
|
|
||||||
|
|
||||||
let id = cache_name;
|
|
||||||
let response = Response::builder()
|
|
||||||
.status(200)
|
|
||||||
.header("Content-Type", "text/html")
|
|
||||||
.header("HX-Push-Url", format!("/link/{}", &id))
|
|
||||||
.body(ssr::render(|| leptos::view! { <LinkView id record /> }))
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
Ok(response)
|
|
||||||
}
|
|
10
src/state.rs
10
src/state.rs
|
@ -66,12 +66,12 @@ impl AppState {
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait AsyncRemoveRecord {
|
pub trait AsyncRemoveRecord {
|
||||||
async fn remove_record(&mut self, id: &str) -> Result<(), std::io::Error>;
|
async fn remove_record(&mut self, id: &String) -> Result<(), std::io::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl AsyncRemoveRecord for AppState {
|
impl AsyncRemoveRecord for AppState {
|
||||||
async fn remove_record(&mut self, id: &str) -> Result<(), std::io::Error> {
|
async fn remove_record(&mut self, id: &String) -> Result<(), std::io::Error> {
|
||||||
let mut records = self.records.lock().await;
|
let mut records = self.records.lock().await;
|
||||||
records.remove_record(id).await
|
records.remove_record(id).await
|
||||||
}
|
}
|
||||||
|
@ -79,12 +79,12 @@ impl AsyncRemoveRecord for AppState {
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl AsyncRemoveRecord for HashMap<String, UploadRecord> {
|
impl AsyncRemoveRecord for HashMap<String, UploadRecord> {
|
||||||
async fn remove_record(&mut self, id: &str) -> Result<(), std::io::Error> {
|
async fn remove_record(&mut self, id: &String) -> Result<(), std::io::Error> {
|
||||||
match self.entry(id.to_string()) {
|
match self.entry(id.clone()) {
|
||||||
Entry::Occupied(entry) => {
|
Entry::Occupied(entry) => {
|
||||||
tokio::fs::remove_file(&entry.get().file).await?;
|
tokio::fs::remove_file(&entry.get().file).await?;
|
||||||
entry.remove_entry();
|
entry.remove_entry();
|
||||||
cache::write_to_cache(self).await?;
|
cache::write_to_cache(&self).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,3 @@
|
||||||
pub mod headers;
|
|
||||||
pub mod logging;
|
|
||||||
pub mod ssr;
|
|
||||||
pub mod sweeper;
|
|
||||||
|
|
||||||
use rand::{
|
use rand::{
|
||||||
distributions::{Alphanumeric, DistString},
|
distributions::{Alphanumeric, DistString},
|
||||||
rngs::SmallRng,
|
rngs::SmallRng,
|
|
@ -1,12 +0,0 @@
|
||||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
|
||||||
|
|
||||||
pub fn init_tracing() {
|
|
||||||
// Set up logging
|
|
||||||
tracing_subscriber::registry()
|
|
||||||
.with(
|
|
||||||
tracing_subscriber::EnvFilter::try_from_default_env()
|
|
||||||
.unwrap_or_else(|_| "nyazoom=debug,tower_http=debug".into()),
|
|
||||||
)
|
|
||||||
.with(tracing_subscriber::fmt::layer())
|
|
||||||
.init();
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
pub fn render<F, N>(f: F) -> String
|
|
||||||
where
|
|
||||||
F: FnOnce() -> N + 'static,
|
|
||||||
N: leptos::IntoView,
|
|
||||||
{
|
|
||||||
leptos::ssr::render_to_string(f).to_string()
|
|
||||||
}
|
|
|
@ -1,22 +0,0 @@
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use crate::state::{AppState, AsyncRemoveRecord};
|
|
||||||
|
|
||||||
/// Spawn a repeating task that will clean files periodically
|
|
||||||
pub fn spawn(state: AppState) {
|
|
||||||
tokio::spawn(async move {
|
|
||||||
loop {
|
|
||||||
tokio::time::sleep(Duration::from_secs(15 * 60)).await;
|
|
||||||
tracing::info!("Cleaning Sweep!");
|
|
||||||
|
|
||||||
let mut records = state.records.lock().await;
|
|
||||||
|
|
||||||
for (key, record) in records.clone().into_iter() {
|
|
||||||
if !record.can_be_downloaded() {
|
|
||||||
tracing::info!("culling: {:?}", record);
|
|
||||||
records.remove_record(&key).await.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
use futures::TryFutureExt;
|
||||||
|
use leptos::{component, view, Children, IntoView, Scope};
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
use crate::state::UploadRecord;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct CatFact {
|
||||||
|
pub fact: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_cat_fact() -> String {
|
||||||
|
reqwest::get("https://catfact.ninja/fact")
|
||||||
|
.and_then(|res| res.json())
|
||||||
|
.map_ok(|cf: CatFact| cf.fact)
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| String::from("The cat fact goddess has failed me :<"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// {https://api.thecatapi.com/v1/images/search?size=small&format=src}
|
||||||
|
// {https://cataas.com/cat?width=250&height=250}
|
||||||
|
#[component]
|
||||||
|
pub fn Welcome(cx: Scope, fact: String) -> impl IntoView {
|
||||||
|
view! { cx,
|
||||||
|
<HtmxPage>
|
||||||
|
<div class="form-wrapper">
|
||||||
|
<WelcomeView fact />
|
||||||
|
</div>
|
||||||
|
</HtmxPage>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[component]
|
||||||
|
pub fn WelcomeView(cx: Scope, fact: String) -> impl IntoView {
|
||||||
|
view! {
|
||||||
|
cx,
|
||||||
|
<form id="form" hx-swap="outerHTML" hx-post="/upload" hx-encoding="multipart/form-data" class="column-container">
|
||||||
|
<div class="cat-img-wrapper">
|
||||||
|
<img class="cat-img" src="https://api.thecatapi.com/v1/images/search?size=small&format=src" />
|
||||||
|
</div>
|
||||||
|
<input type="file" id="file" name="file" data-multiple-caption="{{count}} files selected" multiple />
|
||||||
|
<label for="file">Select Files</label>
|
||||||
|
|
||||||
|
<input type="submit" value="Get Link~" />
|
||||||
|
<p id="cat-fact">{fact}</p>
|
||||||
|
<progress id="progress" class="htmx-indicator" value="0" max="100"></progress>
|
||||||
|
</form>
|
||||||
|
<script src="/scripts/loading_progress.js" />
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// <link href="../dist/css/link.css" rel="stylesheet" />
|
||||||
|
// #TODO: Handle pushing cleaner
|
||||||
|
#[component]
|
||||||
|
pub fn DownloadLinkPage(cx: Scope, id: String, record: UploadRecord) -> impl IntoView {
|
||||||
|
view! { cx,
|
||||||
|
<HtmxPage>
|
||||||
|
<div class="form-wrapper">
|
||||||
|
<LinkView id record />
|
||||||
|
</div>
|
||||||
|
</HtmxPage>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[component]
|
||||||
|
pub fn HtmxPage(cx: Scope, children: Children) -> impl IntoView {
|
||||||
|
view! { cx,
|
||||||
|
<head>
|
||||||
|
<title>Nyazoom</title>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<link href="/css/main.css" rel="stylesheet" />
|
||||||
|
<link href="/css/link.css" rel="stylesheet" />
|
||||||
|
<script src="/scripts/file_label.js" />
|
||||||
|
<script src="/scripts/link.js" />
|
||||||
|
<script src="https://unpkg.com/htmx.org@1.9.4" integrity="sha384-zUfuhFKKZCbHTY6aRR46gxiqszMk5tcHjsVFxnUo8VMus4kHGVdIYVbOYYNlKmHV" crossorigin="anonymous"></script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<h1>NyaZoom<sup>2</sup></h1>
|
||||||
|
{children(cx)}
|
||||||
|
</body>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[component]
|
||||||
|
pub fn LinkView(cx: Scope, id: String, record: UploadRecord) -> impl IntoView {
|
||||||
|
let downloads_remaining = record.max_downloads - record.downloads;
|
||||||
|
let plural = if downloads_remaining > 1 { "s" } else { "" };
|
||||||
|
view! {
|
||||||
|
cx,
|
||||||
|
<div class="column-container">
|
||||||
|
<div class="link-wrapper">
|
||||||
|
<a id="link" href="/download/{id}">Download Now!</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="link-wrapper" hx-get="/link/{id}/remaining" hx-trigger="click from:#link delay:0.2s, every 10s" >
|
||||||
|
You have {record.downloads_remaining()} download{plural} remaining!
|
||||||
|
</div>
|
||||||
|
<button class="return-button" onclick="clipboard()">Copy to Clipboard</button>
|
||||||
|
|
||||||
|
|
||||||
|
<a href="/" class="return-button">Return to home</a>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,26 +0,0 @@
|
||||||
use leptos::{component, view, Children, IntoView};
|
|
||||||
|
|
||||||
#[component]
|
|
||||||
pub fn HtmxPage(children: Children) -> impl IntoView {
|
|
||||||
view! {
|
|
||||||
<head>
|
|
||||||
<title>Nyazoom</title>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<link href="/css/main.css" rel="stylesheet" />
|
|
||||||
<link href="/css/link.css" rel="stylesheet" />
|
|
||||||
<script src="/scripts/file_label.js" />
|
|
||||||
<script src="/scripts/link.js" />
|
|
||||||
<script
|
|
||||||
src="https://unpkg.com/htmx.org@2.0.2"
|
|
||||||
integrity="sha384-Y7hw+L/jvKeWIRRkqWYfPcvVxHzVzn5REgzbawhxAuQGwX1XWe70vji+VSeHOThJ"
|
|
||||||
crossorigin="anonymous"
|
|
||||||
></script>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<h1>"NyaZoom"<sup>"2"</sup></h1>
|
|
||||||
{children()}
|
|
||||||
</body>
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
use leptos::{component, view, IntoView};
|
|
||||||
|
|
||||||
use crate::link::get_remaining_text;
|
|
||||||
use crate::state::UploadRecord;
|
|
||||||
use crate::HtmxPage;
|
|
||||||
|
|
||||||
// <link href="../dist/css/link.css" rel="stylesheet" />
|
|
||||||
// #TODO: Handle pushing cleaner
|
|
||||||
#[component]
|
|
||||||
pub fn DownloadLinkPage(id: String, record: UploadRecord) -> impl IntoView {
|
|
||||||
view! {
|
|
||||||
<HtmxPage>
|
|
||||||
<div class="form-wrapper">
|
|
||||||
<LinkView id record />
|
|
||||||
</div>
|
|
||||||
</HtmxPage>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[component]
|
|
||||||
pub fn LinkView(id: String, record: UploadRecord) -> impl IntoView {
|
|
||||||
let downloads_remaining = record.max_downloads - record.downloads;
|
|
||||||
view! {
|
|
||||||
<div class="column-container">
|
|
||||||
<div class="link-wrapper">
|
|
||||||
<a id="link" href="/download/{id}">
|
|
||||||
"Download Now!"
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div
|
|
||||||
class="link-wrapper"
|
|
||||||
hx-get="/link/{id}/remaining"
|
|
||||||
hx-trigger="click from:#link delay:0.2s, every 10s"
|
|
||||||
>
|
|
||||||
{get_remaining_text(downloads_remaining)}
|
|
||||||
</div>
|
|
||||||
<button class="return-button" onclick="clipboard()">
|
|
||||||
Copy to Clipboard
|
|
||||||
</button>
|
|
||||||
|
|
||||||
<a href="/" class="return-button">
|
|
||||||
"Return to home"
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
use futures::TryFutureExt;
|
|
||||||
use serde::Deserialize;
|
|
||||||
|
|
||||||
pub mod base_page;
|
|
||||||
pub mod links;
|
|
||||||
pub mod welcome;
|
|
||||||
|
|
||||||
pub use base_page::*;
|
|
||||||
pub use links::*;
|
|
||||||
pub use welcome::*;
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct CatFact {
|
|
||||||
pub fact: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_cat_fact() -> String {
|
|
||||||
reqwest::get("https://catfact.ninja/fact")
|
|
||||||
.and_then(|res| res.json())
|
|
||||||
.map_ok(|cf: CatFact| cf.fact)
|
|
||||||
.await
|
|
||||||
.unwrap_or_else(|_| String::from("The cat fact goddess has failed me :<"))
|
|
||||||
}
|
|
|
@ -1,49 +0,0 @@
|
||||||
use leptos::{component, view, IntoView};
|
|
||||||
|
|
||||||
use crate::HtmxPage;
|
|
||||||
|
|
||||||
// {https://api.thecatapi.com/v1/images/search?size=small&format=src}
|
|
||||||
// {https://cataas.com/cat?width=250&height=250}
|
|
||||||
#[component]
|
|
||||||
pub fn WelcomePage(fact: String) -> impl IntoView {
|
|
||||||
view! {
|
|
||||||
<HtmxPage>
|
|
||||||
<div class="form-wrapper">
|
|
||||||
<WelcomeView fact />
|
|
||||||
</div>
|
|
||||||
</HtmxPage>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[component]
|
|
||||||
pub fn WelcomeView(fact: String) -> impl IntoView {
|
|
||||||
view! {
|
|
||||||
<form
|
|
||||||
id="form"
|
|
||||||
hx-swap="outerHTML"
|
|
||||||
hx-post="/upload"
|
|
||||||
hx-encoding="multipart/form-data"
|
|
||||||
class="column-container"
|
|
||||||
>
|
|
||||||
<div class="cat-img-wrapper">
|
|
||||||
<img
|
|
||||||
class="cat-img"
|
|
||||||
src="https://api.thecatapi.com/v1/images/search?size=small&format=src"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<input
|
|
||||||
type="file"
|
|
||||||
id="file"
|
|
||||||
name="file"
|
|
||||||
data-multiple-caption="{{count}} files selected"
|
|
||||||
multiple
|
|
||||||
/>
|
|
||||||
<label for="file">"Select Files"</label>
|
|
||||||
|
|
||||||
<input type="submit" value="Get Link~" />
|
|
||||||
<p id="cat-fact">{fact}</p>
|
|
||||||
<progress id="progress" class="htmx-indicator" value="0" max="100"></progress>
|
|
||||||
</form>
|
|
||||||
<script src="/scripts/loading_progress.js" />
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue