Compare commits
9 commits
2ff0d37313
...
88bdfdfc87
Author | SHA1 | Date | |
---|---|---|---|
88bdfdfc87 | |||
47bc1b6016 | |||
856369e370 | |||
d55a906dde | |||
5c662ba716 | |||
7338b3c31f | |||
a3112d96ae | |||
9a664a4771 | |||
56e9cc7af3 |
9 changed files with 37 additions and 3 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -3,3 +3,5 @@
|
||||||
/.cache
|
/.cache
|
||||||
/node_modules
|
/node_modules
|
||||||
testing.db
|
testing.db
|
||||||
|
testing.db-shm
|
||||||
|
testing.db-wal
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
[sqlfluff]
|
[sqlfluff]
|
||||||
dialect = sqlite
|
dialect = sqlite
|
||||||
|
|
||||||
|
[sqlfluff:rules:capitalisation.keywords]
|
||||||
|
# Keywords
|
||||||
|
capitalisation_policy = upper
|
||||||
|
|
||||||
[sqlfluff:layout:type:column_constraint_segment]
|
[sqlfluff:layout:type:column_constraint_segment]
|
||||||
spacing_before = align
|
spacing_before = align
|
||||||
align_within = create_table_statement
|
align_within = create_table_statement
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
-- Add up migration script here
|
-- Add up migration script here
|
||||||
CREATE TABLE IF NOT EXISTS records (
|
CREATE TABLE IF NOT EXISTS records (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER NOT NULL PRIMARY KEY,
|
||||||
uploaded TEXT NOT NULL,
|
uploaded TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
file_path TEXT NOT NULL,
|
file_path TEXT NOT NULL,
|
||||||
downloads INTEGER NOT NULL DEFAULT 0,
|
downloads INTEGER NOT NULL DEFAULT 0,
|
||||||
max_downloads INTEGER NOT NULL
|
max_downloads INTEGER NOT NULL
|
||||||
|
|
1
queries/records/count_records.sql
Normal file
1
queries/records/count_records.sql
Normal file
|
@ -0,0 +1 @@
|
||||||
|
SELECT COUNT(*) FROM records;
|
3
queries/records/increment_download.sql
Normal file
3
queries/records/increment_download.sql
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
UPDATE record
|
||||||
|
SET downloads = downloads + 1
|
||||||
|
WHERE id = ?
|
2
queries/records/new.sql
Normal file
2
queries/records/new.sql
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
INSERT INTO records (file_path, max_downloads)
|
||||||
|
VALUES (?, ?);
|
|
@ -12,7 +12,9 @@ mkShell rec {
|
||||||
# Additional darwin specific inputs can be set here
|
# Additional darwin specific inputs can be set here
|
||||||
pkgs.darwin.apple_sdk.frameworks.SystemConfiguration
|
pkgs.darwin.apple_sdk.frameworks.SystemConfiguration
|
||||||
];
|
];
|
||||||
buildInputs = additionalBuildInputs;
|
buildInputs = [
|
||||||
|
sqlite
|
||||||
|
] ++ additionalBuildInputs;
|
||||||
LD_LIBRARY_PATH = lib.makeLibraryPath buildInputs;
|
LD_LIBRARY_PATH = lib.makeLibraryPath buildInputs;
|
||||||
DATABASE_URL = "sqlite:testing.db";
|
DATABASE_URL = "sqlite:testing.db";
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,6 +75,19 @@ async fn upload_to_zip(
|
||||||
|
|
||||||
let record = UploadRecord::new(archive_path);
|
let record = UploadRecord::new(archive_path);
|
||||||
let mut records = state.records.lock().await;
|
let mut records = state.records.lock().await;
|
||||||
|
|
||||||
|
if let Some(mut conn) = state.pool.try_acquire() {
|
||||||
|
let path = record.file.clone().into_os_string().into_string().unwrap();
|
||||||
|
|
||||||
|
let id = sqlx::query_file!("queries/records/new.sql", path, 5)
|
||||||
|
.execute(&mut *conn)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.last_insert_rowid();
|
||||||
|
|
||||||
|
// TODO: Looks like I actually gotta store cache_name lmfao
|
||||||
|
};
|
||||||
|
|
||||||
records.insert(cache_name.clone(), record.clone());
|
records.insert(cache_name.clone(), record.clone());
|
||||||
|
|
||||||
let records_cache = records.clone();
|
let records_cache = records.clone();
|
||||||
|
|
|
@ -2,12 +2,14 @@ use std::{
|
||||||
collections::{hash_map::Entry, HashMap},
|
collections::{hash_map::Entry, HashMap},
|
||||||
io::ErrorKind,
|
io::ErrorKind,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
str::FromStr,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::{DateTime, Duration, Utc};
|
use chrono::{DateTime, Duration, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::{sqlite::SqliteConnectOptions, SqlitePool};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
use crate::cache;
|
use crate::cache;
|
||||||
|
@ -54,12 +56,17 @@ impl Default for UploadRecord {
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
pub records: Arc<Mutex<HashMap<String, UploadRecord>>>,
|
pub records: Arc<Mutex<HashMap<String, UploadRecord>>>,
|
||||||
|
pub pool: SqlitePool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppState {
|
impl AppState {
|
||||||
pub fn new(records: HashMap<String, UploadRecord>) -> Self {
|
pub fn new(records: HashMap<String, UploadRecord>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
records: Arc::new(Mutex::new(records)),
|
records: Arc::new(Mutex::new(records)),
|
||||||
|
pool: SqlitePool::connect_lazy_with(
|
||||||
|
SqliteConnectOptions::from_str("sqlite:testing.db")
|
||||||
|
.expect("Invalid Database String"),
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue