forked from neri/datatrash
improve error handling
This commit is contained in:
parent
19ae7ff673
commit
4efea5e221
35
src/main.rs
35
src/main.rs
|
@ -36,7 +36,7 @@ async fn index() -> Result<NamedFile, Error> {
|
|||
async fn upload(
|
||||
payload: Multipart,
|
||||
db: web::Data<PgPool>,
|
||||
sender: web::Data<Sender<()>>,
|
||||
expiry_watch_sender: web::Data<Sender<()>>,
|
||||
config: web::Data<Config>,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let file_id = format!("{:x?}", rand::random::<u32>());
|
||||
|
@ -48,22 +48,35 @@ async fn upload(
|
|||
Ok(data) => data,
|
||||
Err(err) => {
|
||||
if filename.exists().await {
|
||||
fs::remove_file(filename)
|
||||
.await
|
||||
.map_err(|_| error::ErrorInternalServerError("could not remove file"))?;
|
||||
fs::remove_file(filename).await.map_err(|_| {
|
||||
error::ErrorInternalServerError(
|
||||
"could not parse multipart; could not remove file",
|
||||
)
|
||||
})?;
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
|
||||
sqlx::query("INSERT INTO Files (file_id, file_name, valid_till, kind) VALUES ($1, $2, $3, $4)")
|
||||
let db_insert = sqlx::query(
|
||||
"INSERT INTO Files (file_id, file_name, valid_till, kind) VALUES ($1, $2, $3, $4)",
|
||||
)
|
||||
.bind(&file_id)
|
||||
.bind(original_name.as_ref().unwrap_or_else(|| &file_id))
|
||||
.bind(original_name.as_ref().unwrap_or(&file_id))
|
||||
.bind(valid_till.naive_local())
|
||||
.bind(kind.to_string())
|
||||
.execute(db.as_ref())
|
||||
.await
|
||||
.map_err(|_| error::ErrorInternalServerError("could not insert file into database"))?;
|
||||
.await;
|
||||
if db_insert.is_err() {
|
||||
fs::remove_file(filename).await.map_err(|_| {
|
||||
error::ErrorInternalServerError(
|
||||
"could not insert file into database; could not remove file",
|
||||
)
|
||||
})?;
|
||||
return Err(error::ErrorInternalServerError(
|
||||
"could not insert file into database",
|
||||
));
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"create new file {} (valid_till: {}, kind: {})",
|
||||
|
@ -72,7 +85,7 @@ async fn upload(
|
|||
kind
|
||||
);
|
||||
|
||||
sender.send(()).await;
|
||||
expiry_watch_sender.send(()).await;
|
||||
|
||||
let redirect = if kind == FileKind::BINARY && original_name.is_some() {
|
||||
format!("/upload/{}/{}", file_id, original_name.unwrap())
|
||||
|
@ -219,7 +232,7 @@ async fn main() -> std::io::Result<()> {
|
|||
));
|
||||
|
||||
let db = web::Data::new(pool);
|
||||
let sender = web::Data::new(sender);
|
||||
let expiry_watch_sender = web::Data::new(sender);
|
||||
let upload_max_bytes: usize = env::var("UPLOAD_MAX_BYTES")
|
||||
.ok()
|
||||
.and_then(|variable| variable.parse().ok())
|
||||
|
@ -231,7 +244,7 @@ async fn main() -> std::io::Result<()> {
|
|||
App::new()
|
||||
.wrap(middleware::Logger::default())
|
||||
.app_data(db.clone())
|
||||
.app_data(sender.clone())
|
||||
.app_data(expiry_watch_sender.clone())
|
||||
.app_data(Bytes::configure(|cfg| cfg.limit(upload_max_bytes)))
|
||||
.data(config.clone())
|
||||
.service(web::resource("/").route(web::get().to(index)))
|
||||
|
|
|
@ -79,10 +79,10 @@ pub(crate) async fn parse_multipart(
|
|||
fn get_field_name(field: &Field) -> Result<String, error::Error> {
|
||||
Ok(field
|
||||
.content_disposition()
|
||||
.ok_or_else(|| error::ParseError::Incomplete)?
|
||||
.ok_or(error::ParseError::Incomplete)?
|
||||
.get_name()
|
||||
.map(|s| s.to_owned())
|
||||
.ok_or_else(|| error::ParseError::Incomplete)?)
|
||||
.ok_or(error::ParseError::Incomplete)?)
|
||||
}
|
||||
|
||||
async fn parse_string(name: &str, field: actix_multipart::Field) -> Result<String, error::Error> {
|
||||
|
|
Loading…
Reference in New Issue