forked from neri/datatrash
fix deletion race condition
This commit is contained in:
parent
7403abbe99
commit
5fb444216e
|
@ -6,10 +6,11 @@ use sqlx::{postgres::PgPool, Cursor, Row};
|
||||||
pub(crate) async fn delete_old_files(receiver: Receiver<()>, db: PgPool, files_dir: PathBuf) {
|
pub(crate) async fn delete_old_files(receiver: Receiver<()>, db: PgPool, files_dir: PathBuf) {
|
||||||
loop {
|
loop {
|
||||||
wait_for_file_expiry(&receiver, &db).await;
|
wait_for_file_expiry(&receiver, &db).await;
|
||||||
let mut cursor = sqlx::query("SELECT file_id FROM files WHERE files.valid_till < $1")
|
|
||||||
.bind(Local::now().naive_local())
|
|
||||||
.fetch(&db);
|
|
||||||
|
|
||||||
|
let now = Local::now().naive_local();
|
||||||
|
let mut cursor = sqlx::query("SELECT file_id FROM files WHERE files.valid_till < $1")
|
||||||
|
.bind(now)
|
||||||
|
.fetch(&db);
|
||||||
while let Some(row) = cursor.next().await.expect("could not load expired files") {
|
while let Some(row) = cursor.next().await.expect("could not load expired files") {
|
||||||
let file_id: String = row.get("file_id");
|
let file_id: String = row.get("file_id");
|
||||||
let mut path = files_dir.clone();
|
let mut path = files_dir.clone();
|
||||||
|
@ -19,8 +20,9 @@ pub(crate) async fn delete_old_files(receiver: Receiver<()>, db: PgPool, files_d
|
||||||
fs::remove_file(&path).await.expect("could not delete file");
|
fs::remove_file(&path).await.expect("could not delete file");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
sqlx::query("DELETE FROM files WHERE valid_till < $1")
|
sqlx::query("DELETE FROM files WHERE valid_till < $1")
|
||||||
.bind(Local::now().naive_local())
|
.bind(now)
|
||||||
.execute(&db)
|
.execute(&db)
|
||||||
.await
|
.await
|
||||||
.expect("could not delete expired files from database");
|
.expect("could not delete expired files from database");
|
||||||
|
|
13
src/main.rs
13
src/main.rs
|
@ -11,7 +11,12 @@ use actix_web::{
|
||||||
web::{self, Bytes},
|
web::{self, Bytes},
|
||||||
App, Error, FromRequest, HttpRequest, HttpResponse, HttpServer,
|
App, Error, FromRequest, HttpRequest, HttpResponse, HttpServer,
|
||||||
};
|
};
|
||||||
use async_std::{fs, path::PathBuf, sync::Sender, task};
|
use async_std::{
|
||||||
|
fs,
|
||||||
|
path::PathBuf,
|
||||||
|
sync::{channel, Sender},
|
||||||
|
task,
|
||||||
|
};
|
||||||
use file_kind::FileKind;
|
use file_kind::FileKind;
|
||||||
use sqlx::{
|
use sqlx::{
|
||||||
postgres::{PgPool, PgRow},
|
postgres::{PgPool, PgRow},
|
||||||
|
@ -155,8 +160,6 @@ async fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
let pool: PgPool = setup_db().await;
|
let pool: PgPool = setup_db().await;
|
||||||
|
|
||||||
log::info!("omnomnom");
|
|
||||||
|
|
||||||
let config = Config {
|
let config = Config {
|
||||||
server_url: env::var("SERVER_URL").unwrap_or_else(|_| "http://localhost:8000".to_owned()),
|
server_url: env::var("SERVER_URL").unwrap_or_else(|_| "http://localhost:8000".to_owned()),
|
||||||
files_dir: PathBuf::from(env::var("FILES_DIR").unwrap_or_else(|_| "./files".to_owned())),
|
files_dir: PathBuf::from(env::var("FILES_DIR").unwrap_or_else(|_| "./files".to_owned())),
|
||||||
|
@ -166,7 +169,9 @@ async fn main() -> std::io::Result<()> {
|
||||||
.await
|
.await
|
||||||
.expect("could not create directory for storing files");
|
.expect("could not create directory for storing files");
|
||||||
|
|
||||||
let (send, recv) = async_std::sync::channel::<()>(1);
|
log::info!("omnomnom");
|
||||||
|
|
||||||
|
let (send, recv) = channel(1);
|
||||||
task::spawn(deleter::delete_old_files(
|
task::spawn(deleter::delete_old_files(
|
||||||
recv,
|
recv,
|
||||||
pool.clone(),
|
pool.clone(),
|
||||||
|
|
Loading…
Reference in New Issue