forked from neri/datatrash
finish it i guess
This commit is contained in:
parent
fdd418ec3e
commit
827baf8eec
|
@ -76,7 +76,6 @@ dependencies = [
|
||||||
"derive_more",
|
"derive_more",
|
||||||
"either",
|
"either",
|
||||||
"encoding_rs",
|
"encoding_rs",
|
||||||
"failure",
|
|
||||||
"flate2",
|
"flate2",
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
|
|
@ -7,7 +7,7 @@ edition = "2018"
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "2.0.0"
|
actix-web = { version = "2.0.0", default-features = false, features = [ "compress" ] }
|
||||||
sqlx = { version = "0.3.5", default-features = false, features = [ "runtime-async-std", "macros", "postgres", "chrono" ] }
|
sqlx = { version = "0.3.5", default-features = false, features = [ "runtime-async-std", "macros", "postgres", "chrono" ] }
|
||||||
actix-rt = "1.1.1"
|
actix-rt = "1.1.1"
|
||||||
env_logger = "0.7.1"
|
env_logger = "0.7.1"
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
use async_std::{fs, path::PathBuf, sync::Receiver, task};
|
||||||
|
use chrono::{prelude::*, Duration};
|
||||||
|
use futures::future::FutureExt;
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
|
||||||
|
pub(crate) async fn delete_old_files(receiver: Receiver<()>, db: PgPool) {
|
||||||
|
loop {
|
||||||
|
wait_for_file_expiry(&receiver, &db).await;
|
||||||
|
let now = Local::now().naive_local();
|
||||||
|
let expired_files =
|
||||||
|
sqlx::query!("SELECT file_id FROM files WHERE files.valid_till < $1", now)
|
||||||
|
.fetch_all(&db)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
for expired_file in expired_files {
|
||||||
|
let path = PathBuf::from(&format!("files/{}", expired_file.file_id));
|
||||||
|
if path.exists().await {
|
||||||
|
log::info!("delete file {}", expired_file.file_id);
|
||||||
|
fs::remove_file(&path).await.expect("could not delete file");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sqlx::query!("DELETE FROM files WHERE valid_till < $1", now)
|
||||||
|
.execute(&db)
|
||||||
|
.await
|
||||||
|
.expect("could not delete expired files from database");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn wait_for_file_expiry(receiver: &Receiver<()>, db: &PgPool) {
|
||||||
|
let row = sqlx::query!("SELECT MIN(valid_till) as min from files")
|
||||||
|
.fetch_one(db)
|
||||||
|
.await
|
||||||
|
.expect("could not fetch expiring file from database");
|
||||||
|
let next_timeout = match row.min {
|
||||||
|
Some(min) => min.signed_duration_since(Local::now().naive_local()),
|
||||||
|
None => Duration::days(1),
|
||||||
|
};
|
||||||
|
let positive_timeout = next_timeout
|
||||||
|
.to_std()
|
||||||
|
.unwrap_or_else(|_| std::time::Duration::from_secs(0));
|
||||||
|
futures::select! {
|
||||||
|
_ = task::sleep(positive_timeout).fuse() => {}
|
||||||
|
_ = receiver.recv().fuse() => {}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
use std::{fmt::Display, str::FromStr};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) enum FileKind {
|
||||||
|
TEXT,
|
||||||
|
BINARY,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for FileKind {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
FileKind::TEXT => write!(f, "text"),
|
||||||
|
FileKind::BINARY => write!(f, "binary"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for FileKind {
|
||||||
|
type Err = String;
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s.to_lowercase().as_str() {
|
||||||
|
"text" => Ok(FileKind::TEXT),
|
||||||
|
"binary" => Ok(FileKind::BINARY),
|
||||||
|
_ => Err(format!("unknown kind {}", s)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
179
src/main.rs
179
src/main.rs
|
@ -1,16 +1,24 @@
|
||||||
|
mod deleter;
|
||||||
|
mod file_kind;
|
||||||
mod multipart;
|
mod multipart;
|
||||||
|
|
||||||
use actix_files::Files;
|
use actix_files::{Files, NamedFile};
|
||||||
use actix_multipart::Multipart;
|
use actix_multipart::Multipart;
|
||||||
use actix_web::{error, middleware, web, App, Error, HttpResponse, HttpServer};
|
use actix_web::{
|
||||||
use async_std::prelude::*;
|
error,
|
||||||
use chrono::{prelude::*, Duration};
|
http::header::{ContentDisposition, DispositionParam, DispositionType},
|
||||||
use futures::{StreamExt, TryStreamExt};
|
middleware,
|
||||||
|
web::{self, Bytes},
|
||||||
|
App, Error, FromRequest, HttpRequest, HttpResponse, HttpServer,
|
||||||
|
};
|
||||||
|
use async_std::{fs, path::PathBuf, sync::Sender, task};
|
||||||
|
use file_kind::FileKind;
|
||||||
use sqlx::postgres::PgPool;
|
use sqlx::postgres::PgPool;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
const INDEX_HTML: &str = include_str!("../static/index.html");
|
const INDEX_HTML: &str = include_str!("../template/index.html");
|
||||||
const UPLOAD_HTML: &str = include_str!("../static/upload.html");
|
const UPLOAD_HTML: &str = include_str!("../template/upload.html");
|
||||||
|
const VIEW_HTML: &str = include_str!("../template/view.html");
|
||||||
|
|
||||||
async fn index() -> Result<HttpResponse, Error> {
|
async fn index() -> Result<HttpResponse, Error> {
|
||||||
Ok(HttpResponse::Ok()
|
Ok(HttpResponse::Ok()
|
||||||
|
@ -18,105 +26,140 @@ async fn index() -> Result<HttpResponse, Error> {
|
||||||
.body(INDEX_HTML))
|
.body(INDEX_HTML))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn upload(mut payload: Multipart, db: web::Data<PgPool>) -> Result<HttpResponse, Error> {
|
async fn upload(
|
||||||
let id = format!("{:x?}", rand::random::<u32>());
|
payload: Multipart,
|
||||||
let filename = format!("files/{}", id);
|
db: web::Data<PgPool>,
|
||||||
let mut timeout: Option<String> = None;
|
sender: web::Data<Sender<()>>,
|
||||||
let mut kind: Option<String> = None;
|
) -> Result<HttpResponse, Error> {
|
||||||
|
let file_id = format!("{:x?}", rand::random::<u32>());
|
||||||
|
let filename = PathBuf::from(format!("files/{}", file_id));
|
||||||
|
|
||||||
while let Ok(Some(mut field)) = payload.try_next().await {
|
let (original_name, valid_till, kind) =
|
||||||
let name = multipart::get_field_name(&field)?;
|
match multipart::parse_multipart(payload, &file_id, &filename).await {
|
||||||
match name.as_str() {
|
Ok(data) => data,
|
||||||
"validity_secs" => {
|
Err(err) => {
|
||||||
timeout = multipart::read_string(field)
|
if filename.exists().await {
|
||||||
|
fs::remove_file(filename)
|
||||||
.await
|
.await
|
||||||
.map(Some)
|
.map_err(|_| error::ErrorInternalServerError("could not remove file"))?;
|
||||||
.map_err(error::ErrorInternalServerError)?;
|
|
||||||
}
|
}
|
||||||
"kind" => {
|
return Err(err);
|
||||||
kind = multipart::read_string(field)
|
|
||||||
.await
|
|
||||||
.map(Some)
|
|
||||||
.map_err(error::ErrorInternalServerError)?;
|
|
||||||
}
|
}
|
||||||
"content" => {
|
|
||||||
let mut file = async_std::fs::File::create(&filename)
|
|
||||||
.await
|
|
||||||
.map_err(error::ErrorInternalServerError)?;
|
|
||||||
|
|
||||||
while let Some(chunk) = field.next().await {
|
|
||||||
let data = chunk.unwrap();
|
|
||||||
file = file.write_all(&data).await.map(|_| file)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
};
|
};
|
||||||
}
|
|
||||||
|
|
||||||
println!("timeout = {:?}, kind = {:?}", timeout, kind);
|
sqlx::query!(
|
||||||
|
"INSERT INTO Files (file_id, file_name, valid_till, kind) VALUES ($1, $2, $3, $4)",
|
||||||
if timeout == None || kind == None {
|
file_id,
|
||||||
async_std::fs::remove_file(&filename)
|
original_name.unwrap_or_else(|| file_id.clone()),
|
||||||
.await
|
valid_till.naive_local(),
|
||||||
.expect("could not delete file");
|
kind.to_string()
|
||||||
return Ok(HttpResponse::BadRequest().body("timeout or kind not specified"));
|
)
|
||||||
}
|
|
||||||
|
|
||||||
let validity_secs = timeout
|
|
||||||
.unwrap()
|
|
||||||
.parse::<i64>()
|
|
||||||
.expect("could not parse validity as int");
|
|
||||||
let valid_till = Local::now() + Duration::seconds(validity_secs);
|
|
||||||
let kind = kind.unwrap();
|
|
||||||
|
|
||||||
sqlx::query("INSERT INTO Files (valid_till, kind) VALUES ($1, $2)")
|
|
||||||
.bind(valid_till)
|
|
||||||
.bind(kind)
|
|
||||||
.execute(db.as_ref())
|
.execute(db.as_ref())
|
||||||
.await
|
.await
|
||||||
.expect("could not insert");
|
.map_err(|_| error::ErrorInternalServerError("could not insert file into database"))?;
|
||||||
|
|
||||||
|
log::info!(
|
||||||
|
"create new file {} (valid_till: {}, kind: {})",
|
||||||
|
file_id,
|
||||||
|
valid_till,
|
||||||
|
kind
|
||||||
|
);
|
||||||
|
|
||||||
|
sender.send(()).await;
|
||||||
|
|
||||||
Ok(HttpResponse::Found()
|
Ok(HttpResponse::Found()
|
||||||
.header("location", format!("/upload/{}", id))
|
.header("location", format!("/upload/{}", file_id))
|
||||||
.finish())
|
.finish())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn uploaded(id: web::Path<String>) -> Result<HttpResponse, Error> {
|
async fn uploaded(id: web::Path<String>) -> Result<HttpResponse, Error> {
|
||||||
let upload_html = UPLOAD_HTML.replace("{id}", &*id);
|
let upload_html = UPLOAD_HTML.replace("{id}", id.as_ref());
|
||||||
Ok(HttpResponse::Ok()
|
Ok(HttpResponse::Ok()
|
||||||
.content_type("text/html")
|
.content_type("text/html")
|
||||||
.body(upload_html))
|
.body(upload_html))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn download(
|
||||||
|
req: HttpRequest,
|
||||||
|
id: web::Path<String>,
|
||||||
|
db: web::Data<PgPool>,
|
||||||
|
) -> Result<HttpResponse, Error> {
|
||||||
|
let row = sqlx::query!(
|
||||||
|
"SELECT file_id, file_name, kind from files WHERE file_id = $1",
|
||||||
|
*id
|
||||||
|
)
|
||||||
|
.fetch_one(db.as_ref())
|
||||||
|
.await
|
||||||
|
.map_err(|_| error::ErrorNotFound("could not find file"))?;
|
||||||
|
let path: PathBuf = PathBuf::from(format!("files/{}", row.file_id));
|
||||||
|
|
||||||
|
if row.kind == FileKind::TEXT.to_string() {
|
||||||
|
let content = fs::read_to_string(path).await?;
|
||||||
|
let view_html = VIEW_HTML.replace("{text}", &content);
|
||||||
|
let response = HttpResponse::Ok().content_type("text/html").body(view_html);
|
||||||
|
Ok(response)
|
||||||
|
} else {
|
||||||
|
let file = NamedFile::open(path)?.set_content_disposition(ContentDisposition {
|
||||||
|
disposition: DispositionType::Attachment,
|
||||||
|
parameters: vec![DispositionParam::Filename(row.file_name)],
|
||||||
|
});
|
||||||
|
file.into_response(&req)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn setup_db() -> PgPool {
|
||||||
|
let pool = PgPool::builder()
|
||||||
|
.max_size(5)
|
||||||
|
.build(&env::var("DATABASE_URL").expect("DATABASE_URL environement variable not set"))
|
||||||
|
.await
|
||||||
|
.expect("could not create db pool");
|
||||||
|
|
||||||
|
sqlx::query!(
|
||||||
|
"
|
||||||
|
CREATE TABLE IF NOT EXISTS files (
|
||||||
|
id serial,
|
||||||
|
file_id varchar(255) not null,
|
||||||
|
file_name varchar(255) not null,
|
||||||
|
valid_till timestamp not null,
|
||||||
|
kind varchar(255) not null,
|
||||||
|
primary key (id)
|
||||||
|
)
|
||||||
|
"
|
||||||
|
)
|
||||||
|
.execute(&pool)
|
||||||
|
.await
|
||||||
|
.expect("could not create table Files");
|
||||||
|
|
||||||
|
pool
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> std::io::Result<()> {
|
async fn main() -> std::io::Result<()> {
|
||||||
std::env::set_var("RUST_LOG", "warn,datatrash=info,actix_web=info");
|
std::env::set_var("RUST_LOG", "warn,datatrash=info,actix_web=info");
|
||||||
std::env::set_var("DATABASE_URL", "postgresql://localhost");
|
std::env::set_var("DATABASE_URL", "postgresql://localhost");
|
||||||
env_logger::init();
|
env_logger::init();
|
||||||
|
|
||||||
let pool: PgPool = PgPool::builder()
|
let pool: PgPool = setup_db().await;
|
||||||
.max_size(5) // maximum number of connections in the pool
|
|
||||||
.build(&env::var("DATABASE_URL").expect("DATABASE_URL environement variable not set"))
|
|
||||||
.await
|
|
||||||
.expect("could not create db pool");
|
|
||||||
sqlx::query!("CREATE TABLE IF NOT EXISTS Files ( id serial, valid_till timestamp, kind varchar(255), primary key (id) )")
|
|
||||||
.execute(&pool)
|
|
||||||
.await
|
|
||||||
.expect("could not create table Files");
|
|
||||||
|
|
||||||
log::info!("omnomnom");
|
log::info!("omnomnom");
|
||||||
|
|
||||||
|
let (send, recv) = async_std::sync::channel::<()>(1);
|
||||||
|
task::spawn(deleter::delete_old_files(recv, pool.clone()));
|
||||||
|
|
||||||
let db = web::Data::new(pool);
|
let db = web::Data::new(pool);
|
||||||
|
let send = web::Data::new(send);
|
||||||
|
|
||||||
HttpServer::new(move || {
|
HttpServer::new(move || {
|
||||||
App::new()
|
App::new()
|
||||||
.wrap(middleware::Logger::default())
|
.wrap(middleware::Logger::default())
|
||||||
.app_data(db.clone())
|
.app_data(db.clone())
|
||||||
|
.app_data(send.clone())
|
||||||
|
.app_data(Bytes::configure(|cfg| cfg.limit(8_388_608)))
|
||||||
.service(web::resource("/").route(web::get().to(index)))
|
.service(web::resource("/").route(web::get().to(index)))
|
||||||
.service(web::resource("/upload").route(web::post().to(upload)))
|
.service(web::resource("/upload").route(web::post().to(upload)))
|
||||||
.service(web::resource("/upload/{id}").route(web::get().to(uploaded)))
|
.service(web::resource("/upload/{id}").route(web::get().to(uploaded)))
|
||||||
|
.service(web::resource("/file/{id}").route(web::get().to(download)))
|
||||||
.service(Files::new("/static", "static").disable_content_disposition())
|
.service(Files::new("/static", "static").disable_content_disposition())
|
||||||
.service(Files::new("/file", "files"))
|
|
||||||
})
|
})
|
||||||
.bind("0.0.0.0:8000")?
|
.bind("0.0.0.0:8000")?
|
||||||
.run()
|
.run()
|
||||||
|
|
119
src/multipart.rs
119
src/multipart.rs
|
@ -1,21 +1,118 @@
|
||||||
use actix_multipart::Field;
|
use crate::file_kind::FileKind;
|
||||||
use futures::StreamExt;
|
use actix_multipart::{Field, Multipart};
|
||||||
|
use actix_web::{error, http::header::DispositionParam};
|
||||||
|
use async_std::{fs, fs::File, path::Path, prelude::*};
|
||||||
|
use chrono::{prelude::*, Duration};
|
||||||
|
use futures::{StreamExt, TryStreamExt};
|
||||||
|
|
||||||
pub fn get_field_name(field: &Field) -> Result<String, actix_web::error::ParseError> {
|
pub(crate) async fn parse_multipart(
|
||||||
field
|
mut payload: Multipart,
|
||||||
|
file_id: &str,
|
||||||
|
filename: &Path,
|
||||||
|
) -> Result<(Option<String>, DateTime<Local>, FileKind), error::Error> {
|
||||||
|
let mut original_name: Option<String> = None;
|
||||||
|
let mut timeout: Option<String> = None;
|
||||||
|
let mut kind: Option<FileKind> = None;
|
||||||
|
|
||||||
|
while let Ok(Some(field)) = payload.try_next().await {
|
||||||
|
let name = get_field_name(&field)?;
|
||||||
|
let name = name.as_str();
|
||||||
|
match name {
|
||||||
|
"validity_secs" => {
|
||||||
|
timeout = Some(parse_string(name, field).await?);
|
||||||
|
}
|
||||||
|
"content" => {
|
||||||
|
let file_original_name = get_original_filename(&field);
|
||||||
|
if file_original_name == None || file_original_name.as_deref() == Some("") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
println!("got content");
|
||||||
|
original_name = file_original_name;
|
||||||
|
kind = Some(FileKind::BINARY);
|
||||||
|
let mut file = fs::File::create(&filename)
|
||||||
|
.await
|
||||||
|
.map_err(|_| error::ErrorInternalServerError("could not create file"))?;
|
||||||
|
write_to_file(&mut file, field)
|
||||||
|
.await
|
||||||
|
.map_err(|_| error::ErrorInternalServerError("could not write file"))?;
|
||||||
|
}
|
||||||
|
"text_content" => {
|
||||||
|
if original_name.is_some() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
println!("got text content");
|
||||||
|
original_name = Some(format!("{}.txt", file_id));
|
||||||
|
kind = Some(FileKind::TEXT);
|
||||||
|
let mut file = fs::File::create(&filename)
|
||||||
|
.await
|
||||||
|
.map_err(|_| error::ErrorInternalServerError("could not create file"))?;
|
||||||
|
write_to_file(&mut file, field)
|
||||||
|
.await
|
||||||
|
.map_err(|_| error::ErrorInternalServerError("could not write file"))?;
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(original_name) = &original_name {
|
||||||
|
if original_name.len() > 255 {
|
||||||
|
return Err(error::ErrorBadRequest("filename is too long"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let validity_secs = timeout
|
||||||
|
.ok_or_else(|| error::ErrorBadRequest("field validity_secs not set"))?
|
||||||
|
.parse()
|
||||||
|
.map_err(|e| {
|
||||||
|
error::ErrorBadRequest(format!("field validity_secs is not a number: {}", e))
|
||||||
|
})?;
|
||||||
|
let valid_till = Local::now() + Duration::seconds(validity_secs);
|
||||||
|
let kind = kind.ok_or_else(|| error::ErrorBadRequest("no content found"))?;
|
||||||
|
Ok((original_name, valid_till, kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_field_name(field: &Field) -> Result<String, error::Error> {
|
||||||
|
Ok(field
|
||||||
.content_disposition()
|
.content_disposition()
|
||||||
.ok_or_else(|| actix_web::error::ParseError::Incomplete)?
|
.ok_or_else(|| error::ParseError::Incomplete)?
|
||||||
.get_name()
|
.get_name()
|
||||||
.map(|s| s.to_owned())
|
.map(|s| s.to_owned())
|
||||||
.ok_or_else(|| actix_web::error::ParseError::Incomplete)
|
.ok_or_else(|| error::ParseError::Incomplete)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn read_string(
|
async fn parse_string(name: &str, field: actix_multipart::Field) -> Result<String, error::Error> {
|
||||||
mut field: actix_multipart::Field,
|
let data = read_content(field).await?;
|
||||||
) -> Result<String, std::string::FromUtf8Error> {
|
String::from_utf8(data)
|
||||||
|
.map_err(|_| error::ErrorBadRequest(format!("could not parse field {} as utf-8", name)))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn read_content(mut field: actix_multipart::Field) -> Result<Vec<u8>, error::Error> {
|
||||||
let mut data = Vec::new();
|
let mut data = Vec::new();
|
||||||
while let Some(chunk) = field.next().await {
|
while let Some(chunk) = field.next().await {
|
||||||
data.extend(chunk.unwrap());
|
data.extend(chunk.map_err(error::ErrorBadRequest)?);
|
||||||
}
|
}
|
||||||
String::from_utf8(data)
|
Ok(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn write_to_file(
|
||||||
|
file: &mut File,
|
||||||
|
mut field: actix_multipart::Field,
|
||||||
|
) -> Result<(), error::Error> {
|
||||||
|
while let Some(chunk) = field.next().await {
|
||||||
|
file.write_all(chunk.map_err(error::ErrorBadRequest)?.as_ref())
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_original_filename(field: &actix_multipart::Field) -> Option<String> {
|
||||||
|
field.content_disposition().and_then(|content_disposition| {
|
||||||
|
content_disposition
|
||||||
|
.parameters
|
||||||
|
.into_iter()
|
||||||
|
.find_map(|param| match param {
|
||||||
|
DispositionParam::Filename(filename) => Some(filename),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,3 +8,31 @@ main {
|
||||||
max-width: 1200px;
|
max-width: 1200px;
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
h1 > a,
|
||||||
|
h1 > a:visited {
|
||||||
|
color: #dddddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: cornflowerblue;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:visited {
|
||||||
|
color: mediumorchid;
|
||||||
|
}
|
||||||
|
|
||||||
|
input,
|
||||||
|
select,
|
||||||
|
textarea {
|
||||||
|
background-color: #222222;
|
||||||
|
color: #dddddd;
|
||||||
|
padding: 0.5rem;
|
||||||
|
border: 2px solid #dddddd;
|
||||||
|
border-radius: 5px;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
input[type='submit'] {
|
||||||
|
background-color: green;
|
||||||
|
}
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>datatrash</title>
|
|
||||||
<link href="/static/index.css" rel="stylesheet" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<main>
|
|
||||||
<h1>datatrash</h1>
|
|
||||||
<form action="/upload" method="POST" enctype="multipart/form-data">
|
|
||||||
<input type="file" name="content" />
|
|
||||||
<input type="text" name="validity_secs" />
|
|
||||||
<input type="text" name="kind" />
|
|
||||||
<input type="submit" value="Hochladen" />
|
|
||||||
</form>
|
|
||||||
</main>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>datatrash</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link href="/static/index.css" rel="stylesheet" />
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<main>
|
||||||
|
<h1>datatrash</h1>
|
||||||
|
<form action="/upload" method="POST" enctype="multipart/form-data">
|
||||||
|
<input type="file" name="content" />
|
||||||
|
<br />
|
||||||
|
<textarea
|
||||||
|
name="text_content"
|
||||||
|
placeholder="Oder asciitrash"
|
||||||
|
rows="20"
|
||||||
|
cols="120"
|
||||||
|
></textarea>
|
||||||
|
<br />
|
||||||
|
Gültig für
|
||||||
|
<select name="validity_secs">
|
||||||
|
<option value="1800">30 minuten</option>
|
||||||
|
<option value="3600">60 minuten</option>
|
||||||
|
<option value="43200">12 stunden</option>
|
||||||
|
<option value="86400">24 stunden</option>
|
||||||
|
<option value="604800">1 woche</option>
|
||||||
|
<option value="2678400">1 monat</option>
|
||||||
|
</select>
|
||||||
|
<br />
|
||||||
|
<input type="submit" value="Hochladen" />
|
||||||
|
</form>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -2,11 +2,12 @@
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>datatrash</title>
|
<title>datatrash</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
<link href="/static/index.css" rel="stylesheet" />
|
<link href="/static/index.css" rel="stylesheet" />
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<main>
|
<main>
|
||||||
<h1>datatrash</h1>
|
<h1><a href="/">datatrash</a></h1>
|
||||||
<p>
|
<p>
|
||||||
Uploaded
|
Uploaded
|
||||||
<a href="http://localhost:8000/file/{id}">
|
<a href="http://localhost:8000/file/{id}">
|
|
@ -0,0 +1,14 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>datatrash</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link href="/static/index.css" rel="stylesheet" />
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<main>
|
||||||
|
<h1><a href="/">datatrash</a></h1>
|
||||||
|
<textarea rows="20" cols="120" readonly>{text}</textarea>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
Loading…
Reference in New Issue