forked from neri/datatrash
refactor upload requirements check into separate method
This commit is contained in:
parent
937f0c439c
commit
30d059b7af
|
@ -5,7 +5,7 @@ use async_std::{fs::File, path::Path, prelude::*};
|
|||
use chrono::{prelude::*, Duration};
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
|
||||
const MAX_UPLOAD_SECONDS: u64 = 31 * 24 * 60 * 60;
|
||||
const MAX_UPLOAD_SECONDS: i64 = 31 * 24 * 60 * 60;
|
||||
const DEFAULT_UPLOAD_SECONDS: u64 = 30 * 60;
|
||||
|
||||
pub(crate) struct UploadConfig {
|
||||
|
@ -64,53 +64,60 @@ pub(crate) async fn parse_multipart(
|
|||
|
||||
let original_name = original_name.ok_or_else(|| error::ErrorBadRequest("no content found"))?;
|
||||
let kind = kind.ok_or_else(|| error::ErrorBadRequest("no content found"))?;
|
||||
|
||||
if original_name.len() > 255 {
|
||||
return Err(error::ErrorBadRequest("filename is too long"));
|
||||
}
|
||||
let validated_keep_for: u64 = if let Some(keep_for) = keep_for {
|
||||
let seconds = keep_for.parse().map_err(|e| {
|
||||
error::ErrorBadRequest(format!("field keep_for is not a number: {}", e))
|
||||
})?;
|
||||
if seconds > MAX_UPLOAD_SECONDS {
|
||||
return Err(error::ErrorBadRequest(format!(
|
||||
"maximum allowed validity is {} seconds, but you specified {} seconds",
|
||||
MAX_UPLOAD_SECONDS, seconds
|
||||
)));
|
||||
}
|
||||
seconds
|
||||
keep_for
|
||||
.parse()
|
||||
.map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))?
|
||||
} else {
|
||||
DEFAULT_UPLOAD_SECONDS
|
||||
};
|
||||
let valid_duration = Duration::seconds(validated_keep_for as i64);
|
||||
let valid_till = Local::now() + valid_duration;
|
||||
let now = Local::now();
|
||||
let valid_till = now + valid_duration;
|
||||
|
||||
check_auth_requirements(size, valid_duration, password, config)?;
|
||||
|
||||
Ok(UploadConfig {
|
||||
let upload_config = UploadConfig {
|
||||
original_name,
|
||||
valid_till,
|
||||
kind,
|
||||
delete_on_download,
|
||||
})
|
||||
};
|
||||
|
||||
check_requirements(&upload_config, size, password, now, config)?;
|
||||
|
||||
Ok(upload_config)
|
||||
}
|
||||
|
||||
fn check_auth_requirements(
|
||||
fn check_requirements(
|
||||
upload_config: &UploadConfig,
|
||||
size: u64,
|
||||
validated_keep_for: Duration,
|
||||
password: Option<String>,
|
||||
now: DateTime<Local>,
|
||||
config: &config::Config,
|
||||
) -> Result<(), error::Error> {
|
||||
if upload_config.original_name.len() > 255 {
|
||||
return Err(error::ErrorBadRequest("filename is too long"));
|
||||
}
|
||||
|
||||
let valid_seconds = (upload_config.valid_till - now).num_seconds();
|
||||
if valid_seconds > MAX_UPLOAD_SECONDS {
|
||||
return Err(error::ErrorBadRequest(format!(
|
||||
"maximum allowed validity is {} seconds, but you specified {} seconds",
|
||||
MAX_UPLOAD_SECONDS, valid_seconds
|
||||
)));
|
||||
}
|
||||
|
||||
if let Some(no_auth_limits) = &config.no_auth_limits {
|
||||
let requires_auth = validated_keep_for > no_auth_limits.max_time
|
||||
|| validated_keep_for > no_auth_limits.large_file_max_time
|
||||
let requires_auth = valid_seconds > no_auth_limits.max_time.num_seconds()
|
||||
|| valid_seconds > no_auth_limits.large_file_max_time.num_seconds()
|
||||
&& size > no_auth_limits.large_file_size;
|
||||
// hIGh sECUriTy paSsWoRD CHEck
|
||||
if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) {
|
||||
return Err(error::ErrorBadRequest(
|
||||
"upload requires authentication, but authentication was incorrect",
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -176,13 +183,12 @@ async fn write_to_file(
|
|||
}
|
||||
|
||||
fn get_original_filename(field: &actix_multipart::Field) -> Option<String> {
|
||||
field.content_disposition().and_then(|content_disposition| {
|
||||
content_disposition
|
||||
field
|
||||
.content_disposition()?
|
||||
.parameters
|
||||
.into_iter()
|
||||
.find_map(|param| match param {
|
||||
DispositionParam::Filename(filename) => Some(filename),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue