Initial commit
This commit is contained in:
commit
fdd418ec3e
|
@ -0,0 +1,2 @@
|
||||||
|
/target
|
||||||
|
/files
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,21 @@
|
||||||
|
[package]
|
||||||
|
name = "datatrash"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["neri"]
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
actix-web = "2.0.0"
|
||||||
|
sqlx = { version = "0.3.5", default-features = false, features = [ "runtime-async-std", "macros", "postgres", "chrono" ] }
|
||||||
|
actix-rt = "1.1.1"
|
||||||
|
env_logger = "0.7.1"
|
||||||
|
log = "0.4.8"
|
||||||
|
actix-files = "0.2.2"
|
||||||
|
async-std = "1.6.2"
|
||||||
|
actix-multipart = "0.2.0"
|
||||||
|
futures = "0.3.5"
|
||||||
|
mime = "0.3.16"
|
||||||
|
rand = "0.7.3"
|
||||||
|
chrono = "0.4.13"
|
|
@ -0,0 +1,124 @@
|
||||||
|
mod multipart;
|
||||||
|
|
||||||
|
use actix_files::Files;
|
||||||
|
use actix_multipart::Multipart;
|
||||||
|
use actix_web::{error, middleware, web, App, Error, HttpResponse, HttpServer};
|
||||||
|
use async_std::prelude::*;
|
||||||
|
use chrono::{prelude::*, Duration};
|
||||||
|
use futures::{StreamExt, TryStreamExt};
|
||||||
|
use sqlx::postgres::PgPool;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
|
const INDEX_HTML: &str = include_str!("../static/index.html");
|
||||||
|
const UPLOAD_HTML: &str = include_str!("../static/upload.html");
|
||||||
|
|
||||||
|
async fn index() -> Result<HttpResponse, Error> {
|
||||||
|
Ok(HttpResponse::Ok()
|
||||||
|
.content_type("text/html")
|
||||||
|
.body(INDEX_HTML))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn upload(mut payload: Multipart, db: web::Data<PgPool>) -> Result<HttpResponse, Error> {
|
||||||
|
let id = format!("{:x?}", rand::random::<u32>());
|
||||||
|
let filename = format!("files/{}", id);
|
||||||
|
let mut timeout: Option<String> = None;
|
||||||
|
let mut kind: Option<String> = None;
|
||||||
|
|
||||||
|
while let Ok(Some(mut field)) = payload.try_next().await {
|
||||||
|
let name = multipart::get_field_name(&field)?;
|
||||||
|
match name.as_str() {
|
||||||
|
"validity_secs" => {
|
||||||
|
timeout = multipart::read_string(field)
|
||||||
|
.await
|
||||||
|
.map(Some)
|
||||||
|
.map_err(error::ErrorInternalServerError)?;
|
||||||
|
}
|
||||||
|
"kind" => {
|
||||||
|
kind = multipart::read_string(field)
|
||||||
|
.await
|
||||||
|
.map(Some)
|
||||||
|
.map_err(error::ErrorInternalServerError)?;
|
||||||
|
}
|
||||||
|
"content" => {
|
||||||
|
let mut file = async_std::fs::File::create(&filename)
|
||||||
|
.await
|
||||||
|
.map_err(error::ErrorInternalServerError)?;
|
||||||
|
|
||||||
|
while let Some(chunk) = field.next().await {
|
||||||
|
let data = chunk.unwrap();
|
||||||
|
file = file.write_all(&data).await.map(|_| file)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("timeout = {:?}, kind = {:?}", timeout, kind);
|
||||||
|
|
||||||
|
if timeout == None || kind == None {
|
||||||
|
async_std::fs::remove_file(&filename)
|
||||||
|
.await
|
||||||
|
.expect("could not delete file");
|
||||||
|
return Ok(HttpResponse::BadRequest().body("timeout or kind not specified"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let validity_secs = timeout
|
||||||
|
.unwrap()
|
||||||
|
.parse::<i64>()
|
||||||
|
.expect("could not parse validity as int");
|
||||||
|
let valid_till = Local::now() + Duration::seconds(validity_secs);
|
||||||
|
let kind = kind.unwrap();
|
||||||
|
|
||||||
|
sqlx::query("INSERT INTO Files (valid_till, kind) VALUES ($1, $2)")
|
||||||
|
.bind(valid_till)
|
||||||
|
.bind(kind)
|
||||||
|
.execute(db.as_ref())
|
||||||
|
.await
|
||||||
|
.expect("could not insert");
|
||||||
|
|
||||||
|
Ok(HttpResponse::Found()
|
||||||
|
.header("location", format!("/upload/{}", id))
|
||||||
|
.finish())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn uploaded(id: web::Path<String>) -> Result<HttpResponse, Error> {
|
||||||
|
let upload_html = UPLOAD_HTML.replace("{id}", &*id);
|
||||||
|
Ok(HttpResponse::Ok()
|
||||||
|
.content_type("text/html")
|
||||||
|
.body(upload_html))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::main]
|
||||||
|
async fn main() -> std::io::Result<()> {
|
||||||
|
std::env::set_var("RUST_LOG", "warn,datatrash=info,actix_web=info");
|
||||||
|
std::env::set_var("DATABASE_URL", "postgresql://localhost");
|
||||||
|
env_logger::init();
|
||||||
|
|
||||||
|
let pool: PgPool = PgPool::builder()
|
||||||
|
.max_size(5) // maximum number of connections in the pool
|
||||||
|
.build(&env::var("DATABASE_URL").expect("DATABASE_URL environement variable not set"))
|
||||||
|
.await
|
||||||
|
.expect("could not create db pool");
|
||||||
|
sqlx::query!("CREATE TABLE IF NOT EXISTS Files ( id serial, valid_till timestamp, kind varchar(255), primary key (id) )")
|
||||||
|
.execute(&pool)
|
||||||
|
.await
|
||||||
|
.expect("could not create table Files");
|
||||||
|
|
||||||
|
log::info!("omnomnom");
|
||||||
|
|
||||||
|
let db = web::Data::new(pool);
|
||||||
|
|
||||||
|
HttpServer::new(move || {
|
||||||
|
App::new()
|
||||||
|
.wrap(middleware::Logger::default())
|
||||||
|
.app_data(db.clone())
|
||||||
|
.service(web::resource("/").route(web::get().to(index)))
|
||||||
|
.service(web::resource("/upload").route(web::post().to(upload)))
|
||||||
|
.service(web::resource("/upload/{id}").route(web::get().to(uploaded)))
|
||||||
|
.service(Files::new("/static", "static").disable_content_disposition())
|
||||||
|
.service(Files::new("/file", "files"))
|
||||||
|
})
|
||||||
|
.bind("0.0.0.0:8000")?
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
use actix_multipart::Field;
|
||||||
|
use futures::StreamExt;
|
||||||
|
|
||||||
|
pub fn get_field_name(field: &Field) -> Result<String, actix_web::error::ParseError> {
|
||||||
|
field
|
||||||
|
.content_disposition()
|
||||||
|
.ok_or_else(|| actix_web::error::ParseError::Incomplete)?
|
||||||
|
.get_name()
|
||||||
|
.map(|s| s.to_owned())
|
||||||
|
.ok_or_else(|| actix_web::error::ParseError::Incomplete)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn read_string(
|
||||||
|
mut field: actix_multipart::Field,
|
||||||
|
) -> Result<String, std::string::FromUtf8Error> {
|
||||||
|
let mut data = Vec::new();
|
||||||
|
while let Some(chunk) = field.next().await {
|
||||||
|
data.extend(chunk.unwrap());
|
||||||
|
}
|
||||||
|
String::from_utf8(data)
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
body {
|
||||||
|
background-color: #222222;
|
||||||
|
}
|
||||||
|
|
||||||
|
main {
|
||||||
|
color: #dddddd;
|
||||||
|
font-family: sans;
|
||||||
|
max-width: 1200px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>datatrash</title>
|
||||||
|
<link href="/static/index.css" rel="stylesheet" />
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<main>
|
||||||
|
<h1>datatrash</h1>
|
||||||
|
<form action="/upload" method="POST" enctype="multipart/form-data">
|
||||||
|
<input type="file" name="content" />
|
||||||
|
<input type="text" name="validity_secs" />
|
||||||
|
<input type="text" name="kind" />
|
||||||
|
<input type="submit" value="Hochladen" />
|
||||||
|
</form>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -0,0 +1,18 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>datatrash</title>
|
||||||
|
<link href="/static/index.css" rel="stylesheet" />
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<main>
|
||||||
|
<h1>datatrash</h1>
|
||||||
|
<p>
|
||||||
|
Uploaded
|
||||||
|
<a href="http://localhost:8000/file/{id}">
|
||||||
|
http://localhost:8000/files/{id}
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
Loading…
Reference in New Issue