remove compile time postgres dependency, add docker build
This commit is contained in:
parent
deb99942d3
commit
e110378ba6
|
@ -688,6 +688,7 @@ dependencies = [
|
|||
"futures",
|
||||
"log",
|
||||
"mime",
|
||||
"openssl-sys",
|
||||
"rand",
|
||||
"sqlx",
|
||||
]
|
||||
|
@ -1454,6 +1455,15 @@ version = "0.1.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
|
||||
|
||||
[[package]]
|
||||
name = "openssl-src"
|
||||
version = "111.10.1+1.1.1g"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "375f12316ddf0762f7cf1e2890a0a857954b96851b47b5af7fc06940c9e12f83"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openssl-sys"
|
||||
version = "0.9.58"
|
||||
|
@ -1463,6 +1473,7 @@ dependencies = [
|
|||
"autocfg",
|
||||
"cc",
|
||||
"libc",
|
||||
"openssl-src",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
|
|
@ -19,3 +19,7 @@ futures = "0.3.5"
|
|||
mime = "0.3.16"
|
||||
rand = "0.7.3"
|
||||
chrono = "0.4.13"
|
||||
openssl-sys = "*"
|
||||
|
||||
[features]
|
||||
vendored = ["openssl-sys/vendored"]
|
||||
|
|
51
Dockerfile
51
Dockerfile
|
@ -1,30 +1,31 @@
|
|||
FROM postgres as builder
|
||||
FROM ekidd/rust-musl-builder as build
|
||||
|
||||
ENV POSTGRES_USER "datatrash"
|
||||
ENV POSTGRES_PASSWORD "secure"
|
||||
USER rust
|
||||
WORKDIR /home/rust/src/
|
||||
RUN USER=rust cargo new datatrash
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install --yes curl build-essential
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs -o rustup-install.sh
|
||||
RUN sh rustup-install.sh -y
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
WORKDIR /home/rust/src/datatrash
|
||||
COPY --chown=rust Cargo.toml Cargo.lock ./
|
||||
RUN cargo build --release --features vendored
|
||||
|
||||
ENV USER rust
|
||||
WORKDIR /
|
||||
RUN cargo new --bin datatrash
|
||||
WORKDIR /datatrash
|
||||
COPY Cargo.lock Cargo.lock
|
||||
COPY Cargo.toml Cargo.toml
|
||||
RUN cargo build --release --target=x86_64-unknown-linux-musl --features vendored
|
||||
RUN rm src/*.rs
|
||||
COPY --chown=rust src ./src
|
||||
COPY --chown=rust static ./static
|
||||
COPY --chown=rust template ./template
|
||||
COPY --chown=rust init-db.sql ./init-db.sql
|
||||
RUN touch src/main.rs
|
||||
RUN cargo install --path . --features vendored
|
||||
|
||||
COPY src src
|
||||
ENV DATABASE_URL "postgresql://datatrash:secure@localhost"
|
||||
RUN rm target/release/deps/datatrash*
|
||||
RUN cargo build --release --target=x86_64-unknown-linux-musl --features vendored
|
||||
RUN strip target/release/horrible
|
||||
FROM alpine
|
||||
|
||||
FROM SCRATCH
|
||||
COPY --from=builder /datatrash/target/release/datatrash /datatrash
|
||||
ENTRYPOINT ["/datatrash"]
|
||||
ENV DATABASE_URL "postresql://localhost"
|
||||
ENV SERVER_URL "http://localhost:8000"
|
||||
ENV FILES_DIR "./files"
|
||||
ENV UPLOAD_MAX_BYTES "8388608"
|
||||
ENV BIND_ADDRESS "0.0.0.0:8000"
|
||||
ENV RUST_BACKTRACE "1"
|
||||
|
||||
COPY --from=build /home/rust/.cargo/bin/datatrash .
|
||||
COPY static ./static
|
||||
RUN mkdir ./files
|
||||
EXPOSE 8000
|
||||
ENTRYPOINT ["./datatrash"]
|
||||
|
|
20
README.md
20
README.md
|
@ -6,16 +6,16 @@ A file and text uploading service with configurable time limit
|
|||
|
||||
## compiling
|
||||
|
||||
Compiling is a little strange.
|
||||
The SQL-statements are checked for correctness at compile-time, unfortunately this means that the
|
||||
database needs to be running at compile-time too.
|
||||
```sh
|
||||
docker build -t datatrash .
|
||||
docker cp datatrash:/home/rust/.cargo/bin/datatrash datatrash
|
||||
```
|
||||
|
||||
To get set up:
|
||||
or, to just run it in docker
|
||||
|
||||
- Start a postgresql somewhere
|
||||
- Set its connection url in the `.env` file
|
||||
- Run the `init-db.sql` script in the database (`cat init-db.sql | psql`)
|
||||
- Build the project `cargo build --release`
|
||||
```sh
|
||||
docker-compose up -d --build
|
||||
```
|
||||
|
||||
## running & config
|
||||
|
||||
|
@ -27,6 +27,4 @@ To get set up:
|
|||
| UPLOAD_MAX_BYTES | 8388608 (8MiB) |
|
||||
| BIND_ADDRESS | 0.0.0.0:8000 |
|
||||
|
||||
Other things are not configurable yet.
|
||||
|
||||
- The maximum filename length is 255
|
||||
The maximum filename length is 255
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
version: "3.3"
|
||||
services:
|
||||
datatrash:
|
||||
build: .
|
||||
environment:
|
||||
DATABASE_URL: 'postgresql://admin:secure@postgres'
|
||||
ports:
|
||||
- '8000:8000'
|
||||
postgres:
|
||||
image: postgres
|
||||
environment:
|
||||
POSTGRES_USER: admin
|
||||
POSTGRES_PASSWORD: secure
|
||||
|
||||
|
|
@ -1,26 +1,26 @@
|
|||
use async_std::{fs, path::PathBuf, sync::Receiver, task};
|
||||
use chrono::{prelude::*, Duration};
|
||||
use futures::future::FutureExt;
|
||||
use sqlx::postgres::PgPool;
|
||||
use sqlx::{postgres::PgPool, Cursor, Row};
|
||||
|
||||
pub(crate) async fn delete_old_files(receiver: Receiver<()>, db: PgPool, files_dir: PathBuf) {
|
||||
loop {
|
||||
wait_for_file_expiry(&receiver, &db).await;
|
||||
let now = Local::now().naive_local();
|
||||
let expired_files =
|
||||
sqlx::query!("SELECT file_id FROM files WHERE files.valid_till < $1", now)
|
||||
.fetch_all(&db)
|
||||
.await
|
||||
.unwrap();
|
||||
for expired_file in expired_files {
|
||||
let mut cursor = sqlx::query("SELECT file_id FROM files WHERE files.valid_till < $1")
|
||||
.bind(Local::now().naive_local())
|
||||
.fetch(&db);
|
||||
|
||||
while let Some(row) = cursor.next().await.expect("could not load expired files") {
|
||||
let file_id: String = row.get("file_id");
|
||||
let mut path = files_dir.clone();
|
||||
path.push(&expired_file.file_id);
|
||||
path.push(&file_id);
|
||||
if path.exists().await {
|
||||
log::info!("delete file {}", expired_file.file_id);
|
||||
log::info!("delete file {}", file_id);
|
||||
fs::remove_file(&path).await.expect("could not delete file");
|
||||
}
|
||||
}
|
||||
sqlx::query!("DELETE FROM files WHERE valid_till < $1", now)
|
||||
sqlx::query("DELETE FROM files WHERE valid_till < $1")
|
||||
.bind(Local::now().naive_local())
|
||||
.execute(&db)
|
||||
.await
|
||||
.expect("could not delete expired files from database");
|
||||
|
@ -28,12 +28,15 @@ pub(crate) async fn delete_old_files(receiver: Receiver<()>, db: PgPool, files_d
|
|||
}
|
||||
|
||||
async fn wait_for_file_expiry(receiver: &Receiver<()>, db: &PgPool) {
|
||||
let row = sqlx::query!("SELECT MIN(valid_till) as min from files")
|
||||
.fetch_one(db)
|
||||
let mut cursor = sqlx::query("SELECT MIN(valid_till) as min from files").fetch(db);
|
||||
let row = cursor
|
||||
.next()
|
||||
.await
|
||||
.expect("could not fetch expiring file from database");
|
||||
let next_timeout = match row.min {
|
||||
Some(min) => min.signed_duration_since(Local::now().naive_local()),
|
||||
.expect("could not fetch expiring files from database")
|
||||
.expect("postgres min did not return any row");
|
||||
let valid_till: Option<NaiveDateTime> = row.get("min");
|
||||
let next_timeout = match valid_till {
|
||||
Some(valid_till) => valid_till.signed_duration_since(Local::now().naive_local()),
|
||||
None => Duration::days(1),
|
||||
};
|
||||
let positive_timeout = next_timeout
|
||||
|
|
41
src/main.rs
41
src/main.rs
|
@ -13,7 +13,10 @@ use actix_web::{
|
|||
};
|
||||
use async_std::{fs, path::PathBuf, sync::Sender, task};
|
||||
use file_kind::FileKind;
|
||||
use sqlx::postgres::PgPool;
|
||||
use sqlx::{
|
||||
postgres::{PgPool, PgRow},
|
||||
Cursor, Row,
|
||||
};
|
||||
use std::env;
|
||||
|
||||
const INDEX_HTML: &str = include_str!("../template/index.html");
|
||||
|
@ -49,13 +52,11 @@ async fn upload(
|
|||
}
|
||||
};
|
||||
|
||||
sqlx::query!(
|
||||
"INSERT INTO Files (file_id, file_name, valid_till, kind) VALUES ($1, $2, $3, $4)",
|
||||
file_id,
|
||||
original_name.unwrap_or_else(|| file_id.clone()),
|
||||
valid_till.naive_local(),
|
||||
kind.to_string()
|
||||
)
|
||||
sqlx::query("INSERT INTO Files (file_id, file_name, valid_till, kind) VALUES ($1, $2, $3, $4)")
|
||||
.bind(&file_id)
|
||||
.bind(original_name.unwrap_or_else(|| file_id.clone()))
|
||||
.bind(valid_till.naive_local())
|
||||
.bind(kind.to_string())
|
||||
.execute(db.as_ref())
|
||||
.await
|
||||
.map_err(|_| error::ErrorInternalServerError("could not insert file into database"))?;
|
||||
|
@ -89,17 +90,21 @@ async fn download(
|
|||
db: web::Data<PgPool>,
|
||||
config: web::Data<Config>,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let row = sqlx::query!(
|
||||
"SELECT file_id, file_name, kind from files WHERE file_id = $1",
|
||||
*id
|
||||
)
|
||||
.fetch_one(db.as_ref())
|
||||
let mut cursor = sqlx::query("SELECT file_id, file_name, kind from files WHERE file_id = $1")
|
||||
.bind(id.as_ref())
|
||||
.fetch(db.as_ref());
|
||||
let row: PgRow = cursor
|
||||
.next()
|
||||
.await
|
||||
.map_err(|_| error::ErrorNotFound("could not find file"))?;
|
||||
.map_err(|_| error::ErrorInternalServerError("could not run select statement"))?
|
||||
.ok_or_else(|| error::ErrorNotFound("could not find file"))?;
|
||||
let file_id: String = row.get("file_id");
|
||||
let file_name: String = row.get("file_name");
|
||||
let kind: String = row.get("kind");
|
||||
let mut path = config.files_dir.clone();
|
||||
path.push(&row.file_id);
|
||||
path.push(&file_id);
|
||||
|
||||
if row.kind == FileKind::TEXT.to_string() {
|
||||
if kind == FileKind::TEXT.to_string() {
|
||||
let content = fs::read_to_string(path).await?;
|
||||
let view_html = VIEW_HTML.replace("{text}", &content);
|
||||
let response = HttpResponse::Ok().content_type("text/html").body(view_html);
|
||||
|
@ -107,7 +112,7 @@ async fn download(
|
|||
} else {
|
||||
let file = NamedFile::open(path)?.set_content_disposition(ContentDisposition {
|
||||
disposition: DispositionType::Attachment,
|
||||
parameters: vec![DispositionParam::Filename(row.file_name)],
|
||||
parameters: vec![DispositionParam::Filename(file_name)],
|
||||
});
|
||||
file.into_response(&req)
|
||||
}
|
||||
|
@ -120,7 +125,7 @@ async fn setup_db() -> PgPool {
|
|||
.await
|
||||
.expect("could not create db pool");
|
||||
|
||||
sqlx::query_file!("./init-db.sql")
|
||||
sqlx::query(include_str!("../init-db.sql"))
|
||||
.execute(&pool)
|
||||
.await
|
||||
.expect("could not create table Files");
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
<br />
|
||||
Gültig für
|
||||
<select name="validity_secs">
|
||||
<option value="10">10 sekunden</option>
|
||||
<option value="1800">30 minuten</option>
|
||||
<option value="3600">60 minuten</option>
|
||||
<option value="43200">12 stunden</option>
|
||||
|
|
Loading…
Reference in New Issue