From b93a3950d04913c3230abfc08ba2bd1c09c56fa3 Mon Sep 17 00:00:00 2001 From: Zynh0722 Date: Thu, 27 Jul 2023 08:40:01 -0700 Subject: [PATCH] prehtmx refactoring --- Cargo.toml | 2 +- dist/scripts/link.js | 21 ++++++++++++++ src/main.rs | 68 ++++++++++++++++++++++++++++++++++++++------ src/state.rs | 11 ++++--- src/views.rs | 62 ++++++++++++++++++++++++++++++++-------- 5 files changed, 138 insertions(+), 26 deletions(-) create mode 100644 dist/scripts/link.js diff --git a/Cargo.toml b/Cargo.toml index ce2f259..6c68063 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,7 @@ edition = "2021" [dependencies] async-bincode = { version = "0.7.0", features = ["tokio"] } async_zip = { version = "0.0.13", features = ["deflate", "tokio", "tokio-fs", "async-compression"] } -axum = { version = "0.6.12", features = ["multipart", "http2", "headers", "macros"] } +axum = { version = "0.6.12", features = ["multipart", "http2", "headers", "macros", "original-uri"] } bincode = "1.3.3" chrono = { version = "0.4.24", features = ["serde"] } futures = "0.3.28" diff --git a/dist/scripts/link.js b/dist/scripts/link.js new file mode 100644 index 0000000..17a8e5e --- /dev/null +++ b/dist/scripts/link.js @@ -0,0 +1,21 @@ +document.addEventListener("DOMContentLoaded", () => { + const params = new Proxy(new URLSearchParams(window.location.search), { + get: (searchParams, prop) => searchParams.get(prop), + }); + + + if (params.link !== null) { + let link = `${window.location.origin}/download/${params.link}`; + + let link_el = document.getElementById("link"); + + link_el.href = link; + link_el.innerHTML = link; + } +}); + +function clipboard() { + let copyText = document.getElementById("link"); + + navigator.clipboard?.writeText(copyText.href).then(() => alert("Copied: " + copyText.href)); +} diff --git a/src/main.rs b/src/main.rs index 1545b82..206ab3a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,16 +7,17 @@ use axum::{ middleware::{self, Next}, response::{Html, IntoResponse, Redirect, Response}, routing::{get, post}, - Router, TypedHeader, + Json, Router, TypedHeader, }; use futures::TryStreamExt; +use leptos::IntoView; use nyazoom_headers::ForwardedFor; use sanitize_filename_reader_friendly::sanitize; -use std::{io, net::SocketAddr, path::Path}; +use std::{io, net::SocketAddr, path::Path, time::Duration}; use tokio_util::{ compat::FuturesAsyncWriteCompatExt, @@ -35,7 +36,7 @@ mod views; use state::{AppState, UploadRecord}; -use crate::views::Welcome; +use crate::views::{DownloadLink, Welcome}; pub mod error { use std::io::{Error, ErrorKind}; @@ -56,7 +57,13 @@ async fn main() -> io::Result<()> { .with(tracing_subscriber::fmt::layer()) .init(); - // tracing::info!("{}", get_cat_fact().await); + // Spawn a repeating task that will clean files periodically + tokio::spawn(async { + loop { + tracing::info!("Cleaning Sweep!"); + tokio::time::sleep(Duration::from_secs(15 * 60)).await + } + }); // uses create_dir_all to create both .cache and serve inside it in one go util::make_dir(".cache/serve").await?; @@ -67,13 +74,16 @@ async fn main() -> io::Result<()> { let app = Router::new() .route("/", get(welcome)) .route("/upload", post(upload_to_zip)) + .route("/records", get(records)) + .route("/records/links", get(records_links)) .route("/download/:id", get(download)) + .route("/link/:id", get(link)) .layer(DefaultBodyLimit::disable()) .layer(RequestBodyLimitLayer::new( 10 * 1024 * 1024 * 1024, // 10GiB )) .with_state(state) - .nest_service("/dist", ServeDir::new("dist")) + .fallback_service(ServeDir::new("dist")) .layer(TraceLayer::new_for_http()) .layer(middleware::from_fn(log_source)); @@ -95,6 +105,49 @@ async fn welcome() -> impl IntoResponse { })) } +async fn records(State(state): State) -> impl IntoResponse { + Json(state.records.lock().await.clone()) +} + +async fn records_links(State(state): State) -> impl IntoResponse { + let records = state.records.lock().await.clone(); + Html(leptos::ssr::render_to_string(move |cx| { + leptos::view! { cx, +
    + {records + .iter() + .map(|(key, _)| + leptos::view! { cx,
  • {key}
  • }) + .collect::>()} +
+ } + })) +} + +async fn link( + axum::extract::Path(id): axum::extract::Path, + State(state): State, +) -> Result, Redirect> { + let mut records = state.records.lock().await; + + if let Some(record) = records.get_mut(&id) { + if record.can_be_downloaded() { + return Ok(Html(leptos::ssr::render_to_string({ + let record = record.clone(); + |cx| { + leptos::view! { cx, } + } + }))); + } else { + let _ = tokio::fs::remove_file(&record.file).await; + records.remove(&id); + cache::write_to_cache(&records).await.unwrap(); + } + } + + Err(Redirect::to(&format!("/404.html"))) +} + async fn log_source( ConnectInfo(addr): ConnectInfo, forwarded_for: Option>, @@ -162,10 +215,7 @@ async fn upload_to_zip( writer.close().await.unwrap(); - Ok(Redirect::to(&format!( - "/dist/link.html?link={}", - cache_name - ))) + Ok(Redirect::to(&format!("/link/{}", cache_name))) } async fn download( diff --git a/src/state.rs b/src/state.rs index 513e9a0..5cff331 100644 --- a/src/state.rs +++ b/src/state.rs @@ -4,12 +4,12 @@ use std::{ sync::Arc, }; -use chrono::{DateTime, Utc}; +use chrono::{DateTime, Duration, Utc}; use serde::{Deserialize, Serialize}; use tokio::sync::Mutex; #[allow(dead_code)] -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct UploadRecord { pub uploaded: DateTime, pub file: PathBuf, @@ -26,7 +26,10 @@ impl UploadRecord { } pub fn can_be_downloaded(&self) -> bool { - self.downloads < self.max_downloads + let now = Utc::now(); + let dur_since_upload = now.signed_duration_since(self.uploaded); + + dur_since_upload < Duration::days(3) && self.downloads < self.max_downloads } } @@ -36,7 +39,7 @@ impl Default for UploadRecord { uploaded: Utc::now(), file: Path::new("").to_owned(), downloads: 0, - max_downloads: 1, + max_downloads: 5, } } } diff --git a/src/views.rs b/src/views.rs index 2e3b9c6..c512288 100644 --- a/src/views.rs +++ b/src/views.rs @@ -1,7 +1,9 @@ use futures::TryFutureExt; -use leptos::IntoView; +use leptos::{Children, IntoView}; use serde::Deserialize; +use crate::state::UploadRecord; + #[derive(Debug, Deserialize)] pub struct CatFact { pub fact: String, @@ -15,32 +17,68 @@ pub async fn get_cat_fact() -> String { .unwrap_or_else(|_| String::from("The cat fact goddess has failed me :<")) } +// {https://api.thecatapi.com/v1/images/search?size=small&format=src} +// {https://cataas.com/cat?width=250&height=250} #[leptos::component] pub fn Welcome(cx: leptos::Scope, fact: String) -> impl IntoView { leptos::view! { cx, - - NyaZoom - - - -