Compare commits
No commits in common. "8fc3d79abb01fb286b89b51eed488b04273a2b10" and "0b5f369fa07020c3612f8937d50998e6f40c032e" have entirely different histories.
8fc3d79abb
...
0b5f369fa0
20 changed files with 265 additions and 574 deletions
123
Cargo.lock
generated
123
Cargo.lock
generated
|
@ -76,20 +76,6 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-compression"
|
||||
version = "0.3.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "942c7cd7ae39e91bde4820d74132e9862e62c2f386c3aa90ccf55949f5bad63a"
|
||||
dependencies = [
|
||||
"brotli",
|
||||
"flate2",
|
||||
"futures-core",
|
||||
"memchr",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.64"
|
||||
|
@ -476,6 +462,19 @@ dependencies = [
|
|||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0"
|
||||
dependencies = [
|
||||
"humantime",
|
||||
"is-terminal",
|
||||
"log",
|
||||
"regex",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.2.8"
|
||||
|
@ -747,6 +746,12 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
|
||||
|
||||
[[package]]
|
||||
name = "hex"
|
||||
version = "0.4.3"
|
||||
|
@ -814,6 +819,12 @@ version = "1.0.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
|
||||
|
||||
[[package]]
|
||||
name = "humantime"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.14.24"
|
||||
|
@ -921,6 +932,18 @@ dependencies = [
|
|||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857"
|
||||
dependencies = [
|
||||
"hermit-abi 0.3.1",
|
||||
"io-lifetimes",
|
||||
"rustix",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.5"
|
||||
|
@ -1077,16 +1100,6 @@ dependencies = [
|
|||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.46.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
|
||||
dependencies = [
|
||||
"overload",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.45"
|
||||
|
@ -1112,7 +1125,7 @@ version = "1.15.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"hermit-abi 0.2.6",
|
||||
"libc",
|
||||
]
|
||||
|
||||
|
@ -1128,12 +1141,6 @@ version = "0.3.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
|
||||
|
||||
[[package]]
|
||||
name = "overload"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.11.2"
|
||||
|
@ -1624,15 +1631,6 @@ dependencies = [
|
|||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sharded-slab"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "similar"
|
||||
version = "2.2.1"
|
||||
|
@ -1713,9 +1711,9 @@ dependencies = [
|
|||
name = "talon"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"brotli",
|
||||
"compressible",
|
||||
"env_logger",
|
||||
"flate2",
|
||||
"hex",
|
||||
"hex-literal",
|
||||
|
@ -1740,7 +1738,6 @@ dependencies = [
|
|||
"time",
|
||||
"tokio",
|
||||
"toml",
|
||||
"tracing-subscriber",
|
||||
"zip",
|
||||
]
|
||||
|
||||
|
@ -1809,16 +1806,6 @@ dependencies = [
|
|||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.20"
|
||||
|
@ -1987,32 +1974,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-log"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70"
|
||||
dependencies = [
|
||||
"nu-ansi-term",
|
||||
"sharded-slab",
|
||||
"smallvec",
|
||||
"thread_local",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2073,12 +2034,6 @@ version = "0.2.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc7ed8ba44ca06be78ea1ad2c3682a43349126c8818054231ee6f4748012aed2"
|
||||
|
||||
[[package]]
|
||||
name = "valuable"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
|
|
|
@ -38,12 +38,7 @@ compressible = "0.2.0"
|
|||
regex = "1.7.1"
|
||||
log = "0.4.17"
|
||||
httpdate = "1.0.2"
|
||||
tracing-subscriber = "0.3.16"
|
||||
async-compression = { version = "0.3.15", features = [
|
||||
"tokio",
|
||||
"gzip",
|
||||
"brotli",
|
||||
] }
|
||||
env_logger = "0.10.0"
|
||||
|
||||
[dev-dependencies]
|
||||
rstest = "0.16.0"
|
||||
|
|
174
src/api.rs
174
src/api.rs
|
@ -1,4 +1,4 @@
|
|||
use std::{collections::BTreeMap, io::Cursor};
|
||||
use std::io::Cursor;
|
||||
|
||||
use poem::{
|
||||
error::{Error, ResponseError},
|
||||
|
@ -8,18 +8,12 @@ use poem::{
|
|||
};
|
||||
use poem_openapi::{
|
||||
auth::ApiKey,
|
||||
param::{Path, Query},
|
||||
param::Path,
|
||||
payload::{Binary, Json},
|
||||
OpenApi, SecurityScheme,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
config::{Access, KeyCfg},
|
||||
db,
|
||||
model::*,
|
||||
oai::DynParams,
|
||||
util, Talon,
|
||||
};
|
||||
use crate::{config::KeyCfg, db, model::*, oai::DynParams, util, Talon};
|
||||
|
||||
pub struct TalonApi;
|
||||
|
||||
|
@ -38,10 +32,10 @@ async fn api_key_checker(req: &Request, api_key: ApiKey) -> Option<KeyCfg> {
|
|||
}
|
||||
|
||||
impl ApiKeyAuthorization {
|
||||
fn check_subdomain(&self, subdomain: &str, access: Access) -> Result<()> {
|
||||
fn check_subdomain(&self, subdomain: &str) -> Result<()> {
|
||||
if subdomain.is_empty() {
|
||||
Err(ApiError::InvalidSubdomain.into())
|
||||
} else if !self.0.domains.matches_domain(subdomain) || !self.0.allows(access) {
|
||||
} else if !self.0.domains.matches_domain(subdomain) {
|
||||
Err(ApiError::NoAccess.into())
|
||||
} else {
|
||||
Ok(())
|
||||
|
@ -55,21 +49,18 @@ enum ApiError {
|
|||
InvalidSubdomain,
|
||||
#[error("you do not have access to this subdomain")]
|
||||
NoAccess,
|
||||
#[error("invalid fallback: {0}")]
|
||||
InvalidFallback(String),
|
||||
}
|
||||
|
||||
impl ResponseError for ApiError {
|
||||
fn status(&self) -> StatusCode {
|
||||
match self {
|
||||
ApiError::InvalidSubdomain | ApiError::InvalidFallback(_) => StatusCode::BAD_REQUEST,
|
||||
ApiError::InvalidSubdomain => StatusCode::BAD_REQUEST,
|
||||
ApiError::NoAccess => StatusCode::UNAUTHORIZED,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[OpenApi]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
impl TalonApi {
|
||||
/// Get a website
|
||||
#[oai(path = "/website/:subdomain", method = "get")]
|
||||
|
@ -94,7 +85,7 @@ impl TalonApi {
|
|||
subdomain: Path<String>,
|
||||
website: Json<WebsiteNew>,
|
||||
) -> Result<()> {
|
||||
auth.check_subdomain(&subdomain, Access::Modify)?;
|
||||
auth.check_subdomain(&subdomain)?;
|
||||
if subdomain.as_str() == talon.cfg.server.internal_subdomain
|
||||
|| !util::validate_subdomain(&subdomain)
|
||||
{
|
||||
|
@ -114,7 +105,7 @@ impl TalonApi {
|
|||
subdomain: Path<String>,
|
||||
website: Json<WebsiteUpdate>,
|
||||
) -> Result<()> {
|
||||
auth.check_subdomain(&subdomain, Access::Modify)?;
|
||||
auth.check_subdomain(&subdomain)?;
|
||||
|
||||
talon.db.update_website(&subdomain, website.0.into())?;
|
||||
Ok(())
|
||||
|
@ -128,66 +119,19 @@ impl TalonApi {
|
|||
talon: Data<&Talon>,
|
||||
subdomain: Path<String>,
|
||||
) -> Result<()> {
|
||||
auth.check_subdomain(&subdomain, Access::Modify)?;
|
||||
auth.check_subdomain(&subdomain)?;
|
||||
|
||||
talon.db.delete_website(&subdomain, true)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all publicly listed websites
|
||||
///
|
||||
/// Returns all publicly listed websites (visibility != `hidden`)
|
||||
#[oai(path = "/websites", method = "get")]
|
||||
async fn websites_get(
|
||||
&self,
|
||||
talon: Data<&Talon>,
|
||||
/// Mimimum visibility of the websites
|
||||
#[oai(default)]
|
||||
visibility: Query<Visibility>,
|
||||
) -> Result<Json<Vec<Website>>> {
|
||||
talon
|
||||
.db
|
||||
.get_websites()
|
||||
.map(|r| r.map(Website::from))
|
||||
.filter(|ws| match ws {
|
||||
Ok(ws) => ws.visibility != Visibility::Hidden && ws.visibility <= visibility.0,
|
||||
Err(_) => true,
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map(Json)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
|
||||
/// Get all websites
|
||||
///
|
||||
/// Returns all websites from Talon's database (including hidden ones, if the current user
|
||||
/// has access to them). This endpoint requires authentication (use the `/websites` endpoint
|
||||
/// for unauthenticated users).
|
||||
#[oai(path = "/websitesAll", method = "get")]
|
||||
async fn websites_get_all(
|
||||
&self,
|
||||
auth: ApiKeyAuthorization,
|
||||
talon: Data<&Talon>,
|
||||
/// Mimimum visibility of the websites
|
||||
#[oai(default)]
|
||||
visibility: Query<Visibility>,
|
||||
) -> Result<Json<Vec<Website>>> {
|
||||
#[oai(path = "/websites", method = "get")]
|
||||
async fn websites_get(&self, talon: Data<&Talon>) -> Result<Json<Vec<Website>>> {
|
||||
talon
|
||||
.db
|
||||
.get_websites()
|
||||
.map(|r| r.map(Website::from))
|
||||
.filter(|ws| match ws {
|
||||
Ok(ws) => {
|
||||
if ws.visibility == Visibility::Hidden
|
||||
&& auth.check_subdomain(&ws.subdomain, Access::Read).is_err()
|
||||
{
|
||||
false
|
||||
} else {
|
||||
ws.visibility <= visibility.0
|
||||
}
|
||||
}
|
||||
Err(_) => true,
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map(Json)
|
||||
.map_err(Error::from)
|
||||
|
@ -234,52 +178,12 @@ impl TalonApi {
|
|||
subdomain: Path<String>,
|
||||
id: Path<u32>,
|
||||
) -> Result<()> {
|
||||
auth.check_subdomain(&subdomain, Access::Modify)?;
|
||||
auth.check_subdomain(&subdomain)?;
|
||||
|
||||
talon.db.delete_version(&subdomain, *id, true)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn insert_version(
|
||||
talon: &Talon,
|
||||
subdomain: &str,
|
||||
id: u32,
|
||||
fallback: Option<String>,
|
||||
spa: bool,
|
||||
mut version_data: BTreeMap<String, String>,
|
||||
) -> Result<()> {
|
||||
version_data.remove("fallback");
|
||||
version_data.remove("spa");
|
||||
|
||||
// Validata fallback path
|
||||
if let Some(fallback) = &fallback {
|
||||
if let Err(e) = talon.storage.get_file(id, fallback, &Default::default()) {
|
||||
// Remove the uploaded files of the bad version
|
||||
let _ = talon.db.delete_version(subdomain, id, false);
|
||||
return Err(ApiError::InvalidFallback(e.to_string()).into());
|
||||
}
|
||||
}
|
||||
|
||||
talon.db.insert_version(
|
||||
subdomain,
|
||||
id,
|
||||
&db::model::Version {
|
||||
data: version_data,
|
||||
fallback,
|
||||
spa,
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
talon.db.update_website(
|
||||
subdomain,
|
||||
db::model::WebsiteUpdate {
|
||||
latest_version: Some(Some(id)),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Upload a new version (.zip archive)
|
||||
#[oai(path = "/website/:subdomain/uploadZip", method = "post")]
|
||||
async fn version_upload_zip(
|
||||
|
@ -287,13 +191,6 @@ impl TalonApi {
|
|||
auth: ApiKeyAuthorization,
|
||||
talon: Data<&Talon>,
|
||||
subdomain: Path<String>,
|
||||
/// Fallback page
|
||||
///
|
||||
/// The fallback page gets returned when the requested page does not exist
|
||||
fallback: Query<Option<String>>,
|
||||
/// SPA mode (return fallback page with OK status)
|
||||
#[oai(default)]
|
||||
spa: Query<bool>,
|
||||
/// Associated version data
|
||||
///
|
||||
/// This is an arbitrary string map that can hold build information and other stuff
|
||||
|
@ -302,12 +199,28 @@ impl TalonApi {
|
|||
/// zip archive with the website files
|
||||
data: Binary<Vec<u8>>,
|
||||
) -> Result<()> {
|
||||
auth.check_subdomain(&subdomain, Access::Upload)?;
|
||||
auth.check_subdomain(&subdomain)?;
|
||||
let vid = talon.db.new_version_id()?;
|
||||
talon
|
||||
.storage
|
||||
.insert_zip_archive(Cursor::new(data.as_slice()), vid)?;
|
||||
Self::insert_version(&talon, &subdomain, vid, fallback.0, spa.0, version_data.0)
|
||||
|
||||
talon.db.insert_version(
|
||||
&subdomain,
|
||||
vid,
|
||||
&db::model::Version {
|
||||
data: version_data.0,
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
talon.db.update_website(
|
||||
&subdomain,
|
||||
db::model::WebsiteUpdate {
|
||||
latest_version: Some(Some(vid)),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Upload a new version (.tar.gz archive)
|
||||
|
@ -317,13 +230,6 @@ impl TalonApi {
|
|||
auth: ApiKeyAuthorization,
|
||||
talon: Data<&Talon>,
|
||||
subdomain: Path<String>,
|
||||
/// Fallback page
|
||||
///
|
||||
/// The fallback page gets returned when the requested page does not exist
|
||||
fallback: Query<Option<String>>,
|
||||
/// SPA mode (return fallback page with OK status)
|
||||
#[oai(default)]
|
||||
spa: Query<bool>,
|
||||
/// Associated version data
|
||||
///
|
||||
/// This is an arbitrary string map that can hold build information and other stuff
|
||||
|
@ -332,9 +238,25 @@ impl TalonApi {
|
|||
/// tar.gz archive with the website files
|
||||
data: Binary<Vec<u8>>,
|
||||
) -> Result<()> {
|
||||
auth.check_subdomain(&subdomain, Access::Upload)?;
|
||||
auth.check_subdomain(&subdomain)?;
|
||||
let vid = talon.db.new_version_id()?;
|
||||
talon.storage.insert_tgz_archive(data.as_slice(), vid)?;
|
||||
Self::insert_version(&talon, &subdomain, vid, fallback.0, spa.0, version_data.0)
|
||||
|
||||
talon.db.insert_version(
|
||||
&subdomain,
|
||||
vid,
|
||||
&db::model::Version {
|
||||
data: version_data.0,
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
talon.db.update_website(
|
||||
&subdomain,
|
||||
db::model::WebsiteUpdate {
|
||||
latest_version: Some(Some(vid)),
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -130,8 +130,6 @@ impl CompressionCfg {
|
|||
pub struct KeyCfg {
|
||||
#[serde(skip_serializing_if = "Domains::is_none")]
|
||||
pub domains: Domains,
|
||||
pub upload: bool,
|
||||
pub modify: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
|
@ -143,16 +141,6 @@ pub enum Domains {
|
|||
Multiple(Vec<String>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Access {
|
||||
/// Dont modify anything
|
||||
Read,
|
||||
/// Update a new website version
|
||||
Upload,
|
||||
/// Create, update or delete websites
|
||||
Modify,
|
||||
}
|
||||
|
||||
impl Domains {
|
||||
fn is_none(&self) -> bool {
|
||||
matches!(self, Domains::None)
|
||||
|
@ -187,16 +175,6 @@ impl Domains {
|
|||
}
|
||||
}
|
||||
|
||||
impl KeyCfg {
|
||||
pub fn allows(&self, access: Access) -> bool {
|
||||
match access {
|
||||
Access::Read => true,
|
||||
Access::Upload => self.upload,
|
||||
Access::Modify => self.modify,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -66,14 +66,7 @@ pub struct Version {
|
|||
///
|
||||
/// This is an arbitrary string map that can hold build information and other stuff
|
||||
/// and will be displayed in the site info dialog.
|
||||
#[serde(default)]
|
||||
pub data: BTreeMap<String, String>,
|
||||
/// Path of the fallback page which is returned if the requested path was not found
|
||||
#[serde(default)]
|
||||
pub fallback: Option<String>,
|
||||
/// SPA mode (return the fallback page with OK sta)
|
||||
#[serde(default)]
|
||||
pub spa: bool,
|
||||
}
|
||||
|
||||
impl Default for Version {
|
||||
|
@ -81,8 +74,6 @@ impl Default for Version {
|
|||
Self {
|
||||
created_at: OffsetDateTime::now_utc(),
|
||||
data: Default::default(),
|
||||
fallback: Default::default(),
|
||||
spa: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,11 +2,6 @@ use talon::{Result, Talon};
|
|||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
if std::env::var_os("RUST_LOG").is_none() {
|
||||
std::env::set_var("RUST_LOG", "info");
|
||||
}
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let talon = Talon::new("tmp")?;
|
||||
talon.launch().await
|
||||
}
|
||||
|
|
|
@ -75,10 +75,7 @@ pub struct Version {
|
|||
pub data: BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Enum, Serialize, Deserialize,
|
||||
)]
|
||||
#[oai(rename_all = "snake_case")]
|
||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Enum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum Visibility {
|
||||
Featured,
|
||||
|
@ -88,7 +85,6 @@ pub enum Visibility {
|
|||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Enum, Serialize, Deserialize)]
|
||||
#[oai(rename_all = "snake_case")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SourceIcon {
|
||||
Link,
|
||||
|
|
|
@ -15,11 +15,11 @@ impl<'a> ApiExtractor<'a> for DynParams {
|
|||
const TYPE: ApiExtractorType = ApiExtractorType::Parameter;
|
||||
const PARAM_IS_REQUIRED: bool = false;
|
||||
|
||||
type ParamType = Self;
|
||||
type ParamRawType = BTreeMap<String, String>;
|
||||
type ParamType = BTreeMap<String, String>;
|
||||
type ParamRawType = Self::ParamType;
|
||||
|
||||
fn register(registry: &mut Registry) {
|
||||
Self::ParamRawType::register(registry);
|
||||
Self::ParamType::register(registry);
|
||||
}
|
||||
|
||||
fn param_in() -> Option<MetaParamIn> {
|
||||
|
@ -27,7 +27,7 @@ impl<'a> ApiExtractor<'a> for DynParams {
|
|||
}
|
||||
|
||||
fn param_schema_ref() -> Option<MetaSchemaRef> {
|
||||
Some(Self::ParamRawType::schema_ref())
|
||||
Some(Self::ParamType::schema_ref())
|
||||
}
|
||||
|
||||
fn param_raw_type(&self) -> Option<&Self::ParamRawType> {
|
||||
|
|
33
src/page.rs
33
src/page.rs
|
@ -6,7 +6,7 @@ use poem::{
|
|||
IntoResponse, Request, Response, Result,
|
||||
};
|
||||
|
||||
use crate::{storage::StorageError, Talon};
|
||||
use crate::Talon;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum PageError {
|
||||
|
@ -23,7 +23,7 @@ impl ResponseError for PageError {
|
|||
}
|
||||
|
||||
#[handler]
|
||||
pub async fn page(request: &Request, talon: Data<&Talon>) -> Result<Response> {
|
||||
pub fn page(request: &Request, talon: Data<&Talon>) -> Result<Response> {
|
||||
let host = request
|
||||
.header(header::HOST)
|
||||
.ok_or(PageError::InvalidSubdomain)?;
|
||||
|
@ -35,32 +35,13 @@ pub async fn page(request: &Request, talon: Data<&Talon>) -> Result<Response> {
|
|||
};
|
||||
|
||||
let ws = talon.db.get_website(subdomain)?;
|
||||
let vid = ws.latest_version.ok_or(PageError::NoVersion)?;
|
||||
let (file, ok) =
|
||||
match talon
|
||||
.storage
|
||||
.get_file(vid, request.original_uri().path(), request.headers())
|
||||
{
|
||||
Ok(file) => (file, true),
|
||||
Err(StorageError::NotFound(f)) => {
|
||||
let version = talon.db.get_version(subdomain, vid)?;
|
||||
if let Some(fallback) = &version.fallback {
|
||||
(
|
||||
talon.storage.get_file(vid, fallback, request.headers())?,
|
||||
version.spa,
|
||||
)
|
||||
} else {
|
||||
return Err(StorageError::NotFound(f).into());
|
||||
}
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
let version = ws.latest_version.ok_or(PageError::NoVersion)?;
|
||||
let file = talon
|
||||
.storage
|
||||
.get_file(version, request.original_uri().path(), request.headers())?;
|
||||
|
||||
Ok(match file.rd_path {
|
||||
Some(rd_path) => Redirect::moved_permanent(rd_path).into_response(),
|
||||
None => file
|
||||
.to_response(request.headers(), ok)
|
||||
.await?
|
||||
.into_response(),
|
||||
None => file.to_response(request.headers())?.into_response(),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -70,8 +70,7 @@ impl Talon {
|
|||
.at(
|
||||
"/api/spec",
|
||||
poem::endpoint::make_sync(move |_| spec.clone()),
|
||||
)
|
||||
.with(poem::middleware::Cors::new());
|
||||
);
|
||||
|
||||
let internal_domain = format!(
|
||||
"{}.{}",
|
||||
|
@ -83,7 +82,6 @@ impl Talon {
|
|||
.at(&internal_domain, route_internal)
|
||||
.at(&site_domains, page)
|
||||
.at(&self.i.cfg.server.root_domain, page)
|
||||
.with(poem::middleware::Tracing)
|
||||
.data(self.clone());
|
||||
|
||||
Server::new(TcpListener::bind(&self.i.cfg.server.address))
|
||||
|
|
342
src/storage.rs
342
src/storage.rs
|
@ -1,6 +1,7 @@
|
|||
use std::{
|
||||
borrow::Cow,
|
||||
fs::{self, File},
|
||||
collections::BTreeMap,
|
||||
fs,
|
||||
io::{BufReader, Read, Seek, SeekFrom},
|
||||
ops::Bound,
|
||||
path::{Path, PathBuf},
|
||||
|
@ -33,7 +34,7 @@ pub struct Storage {
|
|||
cfg: Config,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum CompressionAlg {
|
||||
#[default]
|
||||
None,
|
||||
|
@ -123,34 +124,32 @@ impl Storage {
|
|||
let hash = util::hash_file(file_path)?;
|
||||
let stored_file = self.file_path_mkdir(&hash)?;
|
||||
|
||||
if !stored_file.is_file() {
|
||||
fs::copy(file_path, &stored_file)?;
|
||||
fs::copy(file_path, &stored_file)?;
|
||||
|
||||
if self.cfg.compression.enabled()
|
||||
&& mime_guess::from_path(file_path)
|
||||
.first()
|
||||
.map(|t| Self::is_compressible(t.essence_str()))
|
||||
.unwrap_or_default()
|
||||
{
|
||||
if self.cfg.compression.gzip_en {
|
||||
let mut encoder = GzEncoder::new(
|
||||
fs::File::create(stored_file.with_extension("gz"))?,
|
||||
flate2::Compression::new(self.cfg.compression.gzip_level.into()),
|
||||
);
|
||||
let mut input = BufReader::new(fs::File::open(&stored_file)?);
|
||||
std::io::copy(&mut input, &mut encoder)?;
|
||||
}
|
||||
if self.cfg.compression.enabled()
|
||||
&& mime_guess::from_path(file_path)
|
||||
.first()
|
||||
.map(|t| compressible::is_compressible(t.essence_str()))
|
||||
.unwrap_or_default()
|
||||
{
|
||||
if self.cfg.compression.gzip_en {
|
||||
let mut encoder = GzEncoder::new(
|
||||
fs::File::create(stored_file.with_extension("gz"))?,
|
||||
flate2::Compression::new(self.cfg.compression.gzip_level.into()),
|
||||
);
|
||||
let mut input = BufReader::new(fs::File::open(&stored_file)?);
|
||||
std::io::copy(&mut input, &mut encoder)?;
|
||||
}
|
||||
|
||||
if self.cfg.compression.brotli_en {
|
||||
let mut encoder = brotli::CompressorWriter::new(
|
||||
fs::File::create(stored_file.with_extension("br"))?,
|
||||
4096,
|
||||
self.cfg.compression.brotli_level.into(),
|
||||
20,
|
||||
);
|
||||
let mut input = BufReader::new(fs::File::open(&stored_file)?);
|
||||
std::io::copy(&mut input, &mut encoder)?;
|
||||
}
|
||||
if self.cfg.compression.brotli_en {
|
||||
let mut encoder = brotli::CompressorWriter::new(
|
||||
fs::File::create(stored_file.with_extension("br"))?,
|
||||
4096,
|
||||
self.cfg.compression.brotli_level.into(),
|
||||
20,
|
||||
);
|
||||
let mut input = BufReader::new(fs::File::open(&stored_file)?);
|
||||
std::io::copy(&mut input, &mut encoder)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -237,8 +236,6 @@ impl Storage {
|
|||
self.insert_dir(import_path, version)
|
||||
}
|
||||
|
||||
/// Get the path of a file with the given hash while creating the subdirectory
|
||||
/// if necessary
|
||||
fn file_path_mkdir(&self, hash: &[u8]) -> Result<PathBuf> {
|
||||
let hash_str = hash.encode_hex::<String>();
|
||||
|
||||
|
@ -249,57 +246,34 @@ impl Storage {
|
|||
Ok(subdir.join(&hash_str))
|
||||
}
|
||||
|
||||
/// Get the path of a file with the given hash
|
||||
fn file_path(&self, hash: &[u8]) -> PathBuf {
|
||||
let hash_str = hash.encode_hex::<String>();
|
||||
let subdir = self.path.join(&hash_str[..2]);
|
||||
subdir.join(&hash_str)
|
||||
}
|
||||
|
||||
/// Get all available compression algorithms for a stored file
|
||||
fn file_compressions(&self, hash: &[u8], is_compressible: bool) -> Vec<CompressionAlg> {
|
||||
let mut res = Vec::new();
|
||||
fn files_compressed(&self, hash: &[u8]) -> BTreeMap<CompressionAlg, PathBuf> {
|
||||
let path = self.file_path(hash);
|
||||
let mut res = BTreeMap::new();
|
||||
|
||||
if is_compressible {
|
||||
if self.cfg.compression.gzip_en {
|
||||
let path_gz = path.with_extension("gz");
|
||||
if path_gz.is_file() {
|
||||
res.push(CompressionAlg::Gzip);
|
||||
}
|
||||
if self.cfg.compression.gzip_en {
|
||||
let path_gz = path.with_extension("gz");
|
||||
if path_gz.is_file() {
|
||||
res.insert(CompressionAlg::Gzip, path_gz);
|
||||
}
|
||||
if self.cfg.compression.brotli_en {
|
||||
let path_br = path.with_extension("br");
|
||||
if path_br.is_file() {
|
||||
res.push(CompressionAlg::Brotli);
|
||||
}
|
||||
}
|
||||
if self.cfg.compression.brotli_en {
|
||||
let path_br = path.with_extension("br");
|
||||
if path_br.is_file() {
|
||||
res.insert(CompressionAlg::Brotli, path_br);
|
||||
}
|
||||
}
|
||||
if path.is_file() {
|
||||
res.push(CompressionAlg::None);
|
||||
res.insert(CompressionAlg::None, path);
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
/// Get the file path of a compressed file
|
||||
fn file_path_compressed(&self, hash: &[u8], alg: CompressionAlg) -> PathBuf {
|
||||
let path = self.file_path(hash);
|
||||
match alg {
|
||||
CompressionAlg::None => path,
|
||||
CompressionAlg::Gzip => path.with_extension("gz"),
|
||||
CompressionAlg::Brotli => path.with_extension("br"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a file with the given mime type should be compressed
|
||||
///
|
||||
/// HTML files should not be compressed, since they need to be injected with the
|
||||
/// UI code
|
||||
fn is_compressible(mime: &str) -> bool {
|
||||
mime != "text/html" && compressible::is_compressible(mime)
|
||||
}
|
||||
|
||||
/// Get a file using the raw site path and the website version
|
||||
///
|
||||
/// HTTP headers are used to determine if the compressed version of a file should be returned.
|
||||
|
@ -341,19 +315,14 @@ impl Storage {
|
|||
|
||||
let mime = util::site_path_mime(&new_path);
|
||||
|
||||
let algorithms = self.file_compressions(
|
||||
&hash,
|
||||
mime.as_ref()
|
||||
.map(|m| Self::is_compressible(m.essence_str()))
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
let alg = util::parse_accept_encoding(headers, &algorithms);
|
||||
let files = self.files_compressed(&hash);
|
||||
let file = util::parse_accept_encoding(headers, &files);
|
||||
|
||||
match alg {
|
||||
Some(alg) => Ok(GotFile {
|
||||
match file {
|
||||
Some((compression, file)) => Ok(GotFile {
|
||||
hash: hash.encode_hex(),
|
||||
file_path: self.file_path_compressed(&hash, alg),
|
||||
encoding: alg.encoding(),
|
||||
file_path: file.to_owned(),
|
||||
encoding: compression.encoding(),
|
||||
mime,
|
||||
rd_path,
|
||||
}),
|
||||
|
@ -369,169 +338,104 @@ impl GotFile {
|
|||
/// Convert the retrieved file to a HTTP response
|
||||
///
|
||||
/// Adapted from: <https://github.com/poem-web/poem/blob/049215cf02c5d4b1ab76f290b4708f3142d6d61b/poem/src/web/static_file.rs#L175>
|
||||
pub async fn to_response(
|
||||
pub fn to_response(
|
||||
self,
|
||||
headers: &HeaderMap,
|
||||
ok: bool,
|
||||
) -> std::result::Result<Response, StaticFileError> {
|
||||
let mut file = File::open(&self.file_path)?;
|
||||
let path = self.file_path;
|
||||
let mut file = std::fs::File::open(path)?;
|
||||
let metadata = file.metadata()?;
|
||||
|
||||
// content length
|
||||
let mut content_length = metadata.len();
|
||||
|
||||
// etag and last modified
|
||||
let etag = headers::ETag::from_str(&format!("\"{}\"", self.hash)).unwrap();
|
||||
let mut last_modified_str = String::new();
|
||||
|
||||
if ok {
|
||||
// handle if-match and if-(un)modified queries
|
||||
let if_match = headers.typed_get::<headers::IfMatch>();
|
||||
let if_unmodified_since = headers.typed_get::<headers::IfUnmodifiedSince>();
|
||||
let if_none_match = headers.typed_get::<headers::IfNoneMatch>();
|
||||
let if_modified_since = headers.typed_get::<headers::IfModifiedSince>();
|
||||
// extract headers
|
||||
let if_match = headers.typed_get::<headers::IfMatch>();
|
||||
let if_unmodified_since = headers.typed_get::<headers::IfUnmodifiedSince>();
|
||||
let if_none_match = headers.typed_get::<headers::IfNoneMatch>();
|
||||
let if_modified_since = headers.typed_get::<headers::IfModifiedSince>();
|
||||
let range = headers.typed_get::<headers::Range>();
|
||||
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
if let Some(if_match) = if_match {
|
||||
if !if_match.precondition_passes(&etag) {
|
||||
return Err(StaticFileError::PreconditionFailed);
|
||||
}
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
let etag = headers::ETag::from_str(&format!("\"{}\"", self.hash)).unwrap();
|
||||
|
||||
if let Some(if_match) = if_match {
|
||||
if !if_match.precondition_passes(&etag) {
|
||||
return Err(StaticFileError::PreconditionFailed);
|
||||
}
|
||||
|
||||
if let Some(if_unmodified_since) = if_unmodified_since {
|
||||
if !if_unmodified_since.precondition_passes(modified) {
|
||||
return Err(StaticFileError::PreconditionFailed);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(if_non_match) = if_none_match {
|
||||
if !if_non_match.precondition_passes(&etag) {
|
||||
return Ok(StatusCode::NOT_MODIFIED.into());
|
||||
}
|
||||
} else if let Some(if_modified_since) = if_modified_since {
|
||||
if !if_modified_since.is_modified(modified) {
|
||||
return Ok(StatusCode::NOT_MODIFIED.into());
|
||||
}
|
||||
}
|
||||
|
||||
last_modified_str = HttpDate::from(modified).to_string();
|
||||
}
|
||||
|
||||
if let Some(if_unmodified_since) = if_unmodified_since {
|
||||
if !if_unmodified_since.precondition_passes(modified) {
|
||||
return Err(StaticFileError::PreconditionFailed);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(if_non_match) = if_none_match {
|
||||
if !if_non_match.precondition_passes(&etag) {
|
||||
return Ok(StatusCode::NOT_MODIFIED.into());
|
||||
}
|
||||
} else if let Some(if_modified_since) = if_modified_since {
|
||||
if !if_modified_since.is_modified(modified) {
|
||||
return Ok(StatusCode::NOT_MODIFIED.into());
|
||||
}
|
||||
}
|
||||
|
||||
last_modified_str = HttpDate::from(modified).to_string();
|
||||
}
|
||||
|
||||
if self
|
||||
.mime
|
||||
.as_ref()
|
||||
.map(|m| m.essence_str() == "text/html")
|
||||
.unwrap_or_default()
|
||||
{
|
||||
// Inject UI code into HTML
|
||||
let to_inject = "<!-- Hello World -->\n";
|
||||
let mut content_range = None;
|
||||
|
||||
let mut html = String::with_capacity(metadata.len() as usize);
|
||||
tokio::fs::File::from_std(file)
|
||||
.read_to_string(&mut html)
|
||||
.await?;
|
||||
|
||||
if let Some(ctag_pos) = html.rfind("</html>") {
|
||||
html.insert_str(ctag_pos, to_inject);
|
||||
}
|
||||
|
||||
// Compress response if possible
|
||||
let alg = util::parse_accept_encoding(
|
||||
headers,
|
||||
&[
|
||||
CompressionAlg::Brotli,
|
||||
CompressionAlg::Gzip,
|
||||
CompressionAlg::None,
|
||||
],
|
||||
)
|
||||
.unwrap_or_default();
|
||||
let body = match alg {
|
||||
CompressionAlg::None => Body::from(html),
|
||||
CompressionAlg::Gzip => {
|
||||
let enc = async_compression::tokio::bufread::GzipEncoder::with_quality(
|
||||
tokio::io::BufReader::new(Body::from(html).into_async_read()),
|
||||
async_compression::Level::Precise(6),
|
||||
);
|
||||
Body::from_async_read(enc)
|
||||
}
|
||||
CompressionAlg::Brotli => {
|
||||
let enc = async_compression::tokio::bufread::BrotliEncoder::with_quality(
|
||||
tokio::io::BufReader::new(Body::from(html).into_async_read()),
|
||||
async_compression::Level::Precise(7),
|
||||
);
|
||||
Body::from_async_read(enc)
|
||||
}
|
||||
let body = if let Some((start, end)) = range.and_then(|range| range.iter().next()) {
|
||||
let start = match start {
|
||||
Bound::Included(n) => n,
|
||||
Bound::Excluded(n) => n + 1,
|
||||
Bound::Unbounded => 0,
|
||||
};
|
||||
|
||||
// Build response
|
||||
let mut response = Response::builder()
|
||||
.header(header::CONTENT_TYPE, "text/html")
|
||||
.typed_header(etag);
|
||||
|
||||
if let Some(encoding) = alg.encoding() {
|
||||
response = response.header(header::CONTENT_ENCODING, encoding)
|
||||
}
|
||||
if !last_modified_str.is_empty() {
|
||||
response = response.header(header::LAST_MODIFIED, last_modified_str);
|
||||
let end = match end {
|
||||
Bound::Included(n) => n + 1,
|
||||
Bound::Excluded(n) => n,
|
||||
Bound::Unbounded => metadata.len(),
|
||||
};
|
||||
if end < start || end > metadata.len() {
|
||||
return Err(StaticFileError::RangeNotSatisfiable {
|
||||
size: metadata.len(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(response.body(body))
|
||||
if start != 0 || end != metadata.len() {
|
||||
content_range = Some((start..end, metadata.len()));
|
||||
}
|
||||
|
||||
content_length = end - start;
|
||||
file.seek(SeekFrom::Start(start))?;
|
||||
Body::from_async_read(tokio::fs::File::from_std(file).take(end - start))
|
||||
} else {
|
||||
// Handle range requests
|
||||
let range = headers.typed_get::<headers::Range>().filter(|_| ok);
|
||||
let size = metadata.len();
|
||||
let mut content_length = size;
|
||||
let mut content_range = None;
|
||||
Body::from_async_read(tokio::fs::File::from_std(file))
|
||||
};
|
||||
|
||||
let body = if let Some((start, end)) = range.and_then(|range| range.iter().next()) {
|
||||
let start = match start {
|
||||
Bound::Included(n) => n,
|
||||
Bound::Excluded(n) => n + 1,
|
||||
Bound::Unbounded => 0,
|
||||
};
|
||||
let end = match end {
|
||||
Bound::Included(n) => n + 1,
|
||||
Bound::Excluded(n) => n,
|
||||
Bound::Unbounded => size,
|
||||
};
|
||||
if end < start || end > size {
|
||||
return Err(StaticFileError::RangeNotSatisfiable { size });
|
||||
}
|
||||
let mut response = Response::builder()
|
||||
.header(header::ACCEPT_RANGES, "bytes")
|
||||
.header(header::CONTENT_LENGTH, content_length)
|
||||
.header(header::ETAG, self.hash);
|
||||
|
||||
if start != 0 || end != size {
|
||||
content_range = Some((start..end, size));
|
||||
}
|
||||
|
||||
content_length = end - start;
|
||||
file.seek(SeekFrom::Start(start))?;
|
||||
Body::from_async_read(tokio::fs::File::from_std(file).take(end - start))
|
||||
} else {
|
||||
Body::from_async_read(tokio::fs::File::from_std(file))
|
||||
};
|
||||
|
||||
// Build response
|
||||
let mut response = Response::builder().header(header::CONTENT_LENGTH, content_length);
|
||||
|
||||
if ok {
|
||||
response = response
|
||||
.typed_header(etag)
|
||||
.header(header::ACCEPT_RANGES, "bytes");
|
||||
} else {
|
||||
response = response.status(StatusCode::NOT_FOUND);
|
||||
}
|
||||
if !last_modified_str.is_empty() {
|
||||
response = response.header(header::LAST_MODIFIED, last_modified_str);
|
||||
}
|
||||
if let Some(encoding) = self.encoding {
|
||||
response = response.header(header::CONTENT_ENCODING, encoding);
|
||||
}
|
||||
if let Some(mime) = &self.mime {
|
||||
response = response.header(header::CONTENT_TYPE, mime.essence_str());
|
||||
}
|
||||
if let Some((range, size)) = content_range {
|
||||
response = response
|
||||
.status(StatusCode::PARTIAL_CONTENT)
|
||||
.typed_header(headers::ContentRange::bytes(range, size).unwrap());
|
||||
}
|
||||
Ok(response.body(body))
|
||||
if !last_modified_str.is_empty() {
|
||||
response = response.header(header::LAST_MODIFIED, last_modified_str);
|
||||
}
|
||||
if let Some(encoding) = self.encoding {
|
||||
response = response.header(header::CONTENT_ENCODING, encoding);
|
||||
}
|
||||
if let Some(mime) = self.mime {
|
||||
response = response.header(header::CONTENT_TYPE, mime.essence_str());
|
||||
}
|
||||
if let Some((range, size)) = content_range {
|
||||
response = response
|
||||
.status(StatusCode::PARTIAL_CONTENT)
|
||||
.typed_header(headers::ContentRange::bytes(range, size).unwrap());
|
||||
}
|
||||
Ok(response.body(body))
|
||||
}
|
||||
}
|
||||
|
|
53
src/util.rs
53
src/util.rs
|
@ -1,4 +1,4 @@
|
|||
use std::{fs::File, path::Path, str::FromStr};
|
||||
use std::{collections::BTreeMap, fs::File, path::Path, str::FromStr};
|
||||
|
||||
use mime_guess::Mime;
|
||||
use poem::http::{header, HeaderMap};
|
||||
|
@ -53,14 +53,14 @@ impl FromStr for ContentCoding {
|
|||
}
|
||||
}
|
||||
|
||||
/// Parse Accept-Encoding header and return the preferred algorithm
|
||||
/// Parse Accept-Encoding header and return the compressed file with the preferred algorithm
|
||||
///
|
||||
/// Source: <https://github.com/poem-web/poem/blob/049215cf02c5d4b1ab76f290b4708f3142d6d61b/poem/src/middleware/compression.rs#L36>
|
||||
pub fn parse_accept_encoding(
|
||||
pub fn parse_accept_encoding<'a, T>(
|
||||
headers: &HeaderMap,
|
||||
enabled_algorithms: &[CompressionAlg],
|
||||
) -> Option<CompressionAlg> {
|
||||
if enabled_algorithms.is_empty() {
|
||||
files: &'a BTreeMap<CompressionAlg, T>,
|
||||
) -> Option<(CompressionAlg, &'a T)> {
|
||||
if files.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -75,20 +75,23 @@ pub fn parse_accept_encoding(
|
|||
None => (v, 1000),
|
||||
};
|
||||
let coding: ContentCoding = e.parse().ok()?;
|
||||
let alg = match coding {
|
||||
ContentCoding::Brotli => Some(CompressionAlg::Brotli)
|
||||
.filter(|_| enabled_algorithms.contains(&CompressionAlg::Brotli)),
|
||||
ContentCoding::Gzip => Some(CompressionAlg::Gzip)
|
||||
.filter(|_| enabled_algorithms.contains(&CompressionAlg::Gzip)),
|
||||
ContentCoding::Star => enabled_algorithms.iter().max().copied(),
|
||||
let alg_file = match coding {
|
||||
ContentCoding::Brotli => {
|
||||
(CompressionAlg::Brotli, files.get(&CompressionAlg::Brotli)?)
|
||||
}
|
||||
ContentCoding::Gzip => (CompressionAlg::Gzip, files.get(&CompressionAlg::Gzip)?),
|
||||
ContentCoding::Star => {
|
||||
files.iter().max_by_key(|(a, _)| *a).map(|(a, f)| (*a, f))?
|
||||
}
|
||||
};
|
||||
alg.map(|alg| (alg, q))
|
||||
Some((alg_file, q))
|
||||
})
|
||||
.max_by_key(|(a, q)| (*q, *a))
|
||||
.map(|(a, _)| a)
|
||||
.max_by_key(|((a, _), q)| (*q, *a))
|
||||
.map(|(x, _)| x)
|
||||
.or_else(|| {
|
||||
Some(CompressionAlg::None)
|
||||
.filter(|_| enabled_algorithms.contains(&CompressionAlg::None))
|
||||
files
|
||||
.get(&CompressionAlg::None)
|
||||
.map(|f| (CompressionAlg::None, f))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -159,15 +162,13 @@ mod tests {
|
|||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::ACCEPT_ENCODING, accept.parse().unwrap());
|
||||
|
||||
let compression = parse_accept_encoding(
|
||||
&headers,
|
||||
&[
|
||||
CompressionAlg::Gzip,
|
||||
CompressionAlg::Brotli,
|
||||
CompressionAlg::None,
|
||||
],
|
||||
)
|
||||
.unwrap();
|
||||
let mut files = BTreeMap::new();
|
||||
files.insert(CompressionAlg::None, 0);
|
||||
files.insert(CompressionAlg::Gzip, 1);
|
||||
files.insert(CompressionAlg::Brotli, 2);
|
||||
|
||||
let (compression, file) = parse_accept_encoding(&headers, &files).unwrap();
|
||||
assert_eq!(compression, expect);
|
||||
assert_eq!(file, files.get(&compression).unwrap());
|
||||
}
|
||||
}
|
||||
|
|
4
tests/fixtures/mod.rs
vendored
4
tests/fixtures/mod.rs
vendored
|
@ -105,7 +105,6 @@ fn insert_websites(db: &Db) {
|
|||
&Version {
|
||||
created_at: datetime!(2023-02-18 16:30 +0),
|
||||
data: v1_data,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
@ -123,7 +122,6 @@ fn insert_websites(db: &Db) {
|
|||
&Version {
|
||||
created_at: datetime!(2023-02-18 16:52 +0),
|
||||
data: v2_data,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
@ -135,7 +133,6 @@ fn insert_websites(db: &Db) {
|
|||
&Version {
|
||||
created_at: datetime!(2023-02-18 16:30 +0),
|
||||
data: BTreeMap::new(),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
@ -146,7 +143,6 @@ fn insert_websites(db: &Db) {
|
|||
&Version {
|
||||
created_at: datetime!(2023-02-20 18:30 +0),
|
||||
data: BTreeMap::new(),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
|
|
@ -18,21 +18,15 @@ ConfigInner(
|
|||
keys: {
|
||||
"04e99561e3824f387a217d141d2a3b46375de6864afbedf9c9a2cc102bc946a4": KeyCfg(
|
||||
domains: "/^talon-\\d+/",
|
||||
upload: false,
|
||||
modify: false,
|
||||
),
|
||||
"21bdac19ffd22870d561b1d55b35eddd9029497107edb7b926aa3e7856bb409b": KeyCfg(
|
||||
domains: [
|
||||
"spotify-gender-ex",
|
||||
"rustypipe",
|
||||
],
|
||||
upload: false,
|
||||
modify: false,
|
||||
),
|
||||
"c32ff286c8ac1c3102625badf38ffd251ae0c4a56079d8ba490f320af63f1f47": KeyCfg(
|
||||
domains: "*",
|
||||
upload: false,
|
||||
modify: false,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
|
|
@ -16,22 +16,15 @@ ConfigInner(
|
|||
brotli_level: 7,
|
||||
),
|
||||
keys: {
|
||||
"04e99561e3824f387a217d141d2a3b46375de6864afbedf9c9a2cc102bc946a4": KeyCfg(
|
||||
upload: false,
|
||||
modify: false,
|
||||
),
|
||||
"04e99561e3824f387a217d141d2a3b46375de6864afbedf9c9a2cc102bc946a4": KeyCfg(),
|
||||
"21bdac19ffd22870d561b1d55b35eddd9029497107edb7b926aa3e7856bb409b": KeyCfg(
|
||||
domains: [
|
||||
"spotify-gender-ex",
|
||||
"rustypipe",
|
||||
],
|
||||
upload: false,
|
||||
modify: false,
|
||||
),
|
||||
"c32ff286c8ac1c3102625badf38ffd251ae0c4a56079d8ba490f320af63f1f47": KeyCfg(
|
||||
domains: "*",
|
||||
upload: false,
|
||||
modify: false,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
|
|
@ -4,8 +4,8 @@ expression: data
|
|||
---
|
||||
{"type":"website","key":"rustypipe","value":{"name":"RustyPipe","created_at":[2023,51,18,30,0,0,0,0,0],"latest_version":4,"color":7943647,"visibility":"featured","source_url":"https://code.thetadev.de/ThetaDev/rustypipe","source_icon":"gitea"}}
|
||||
{"type":"website","key":"spotify-gender-ex","value":{"name":"Spotify-Gender-Ex","created_at":[2023,49,16,30,0,0,0,0,0],"latest_version":3,"color":1947988,"visibility":"featured","source_url":"https://github.com/Theta-Dev/Spotify-Gender-Ex","source_icon":"github"}}
|
||||
{"type":"version","key":"rustypipe:4","value":{"created_at":[2023,51,18,30,0,0,0,0,0],"data":{},"fallback":null,"spa":false}}
|
||||
{"type":"version","key":"spotify-gender-ex:3","value":{"created_at":[2023,49,16,30,0,0,0,0,0],"data":{},"fallback":null,"spa":false}}
|
||||
{"type":"version","key":"rustypipe:4","value":{"created_at":[2023,51,18,30,0,0,0,0,0],"data":{}}}
|
||||
{"type":"version","key":"spotify-gender-ex:3","value":{"created_at":[2023,49,16,30,0,0,0,0,0],"data":{}}}
|
||||
{"type":"file","key":"3:gex_style.css","value":"fc825b409a49724af8f5b3c4ad15e175e68095ea746237a7b46152d3f383f541"}
|
||||
{"type":"file","key":"3:index.html","value":"6c5d37546616519e8973be51515b8a90898b4675f7b6d01f2d891edb686408a2"}
|
||||
{"type":"file","key":"4:index.html","value":"94a67cf13d752a9c1875ad999eb2be5a1b0f9746c66bca2631820b8186028811"}
|
||||
|
|
|
@ -5,10 +5,10 @@ expression: data
|
|||
{"type":"website","key":"-","value":{"name":"ThetaDev","created_at":[2023,49,16,30,0,0,0,0,0],"latest_version":2,"color":2068974,"visibility":"featured","source_url":null,"source_icon":null}}
|
||||
{"type":"website","key":"rustypipe","value":{"name":"RustyPipe","created_at":[2023,51,18,30,0,0,0,0,0],"latest_version":4,"color":7943647,"visibility":"featured","source_url":"https://code.thetadev.de/ThetaDev/rustypipe","source_icon":"gitea"}}
|
||||
{"type":"website","key":"spotify-gender-ex","value":{"name":"Spotify-Gender-Ex","created_at":[2023,49,16,30,0,0,0,0,0],"latest_version":3,"color":1947988,"visibility":"featured","source_url":"https://github.com/Theta-Dev/Spotify-Gender-Ex","source_icon":"github"}}
|
||||
{"type":"version","key":"-:1","value":{"created_at":[2023,49,16,30,0,0,0,0,0],"data":{"Deployed by":"https://github.com/Theta-Dev/Talon/actions/runs/1352014628","Version":"v0.1.0"},"fallback":null,"spa":false}}
|
||||
{"type":"version","key":"-:2","value":{"created_at":[2023,49,16,52,0,0,0,0,0],"data":{"Deployed by":"https://github.com/Theta-Dev/Talon/actions/runs/1354755231","Version":"v0.1.1"},"fallback":null,"spa":false}}
|
||||
{"type":"version","key":"rustypipe:4","value":{"created_at":[2023,51,18,30,0,0,0,0,0],"data":{},"fallback":null,"spa":false}}
|
||||
{"type":"version","key":"spotify-gender-ex:3","value":{"created_at":[2023,49,16,30,0,0,0,0,0],"data":{},"fallback":null,"spa":false}}
|
||||
{"type":"version","key":"-:1","value":{"created_at":[2023,49,16,30,0,0,0,0,0],"data":{"Deployed by":"https://github.com/Theta-Dev/Talon/actions/runs/1352014628","Version":"v0.1.0"}}}
|
||||
{"type":"version","key":"-:2","value":{"created_at":[2023,49,16,52,0,0,0,0,0],"data":{"Deployed by":"https://github.com/Theta-Dev/Talon/actions/runs/1354755231","Version":"v0.1.1"}}}
|
||||
{"type":"version","key":"rustypipe:4","value":{"created_at":[2023,51,18,30,0,0,0,0,0],"data":{}}}
|
||||
{"type":"version","key":"spotify-gender-ex:3","value":{"created_at":[2023,49,16,30,0,0,0,0,0],"data":{}}}
|
||||
{"type":"file","key":"1:index.html","value":"3b5f6bad5376897435def176d0fe77e5b9b4f0deafc7491fc27262650744ad68"}
|
||||
{"type":"file","key":"1:style.css","value":"356f131c825fbf604797c7e9c85352549d81db8af91fee834016d075110af026"}
|
||||
{"type":"file","key":"2:assets/image.jpg","value":"901d291a47a8a9b55c06f84e5e5f82fd2dcee65cac1406d6e878b805d45c1e93"}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: tests/tests.rs
|
||||
source: src/db/mod.rs
|
||||
expression: version
|
||||
---
|
||||
Version(
|
||||
|
@ -8,6 +8,4 @@ Version(
|
|||
"Deployed by": "https://github.com/Theta-Dev/Talon/actions/runs/1352014628",
|
||||
"Version": "v0.1.0",
|
||||
},
|
||||
fallback: None,
|
||||
spa: false,
|
||||
)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: tests/tests.rs
|
||||
source: src/db/mod.rs
|
||||
expression: versions
|
||||
---
|
||||
[
|
||||
|
@ -9,8 +9,6 @@ expression: versions
|
|||
"Deployed by": "https://github.com/Theta-Dev/Talon/actions/runs/1352014628",
|
||||
"Version": "v0.1.0",
|
||||
},
|
||||
fallback: None,
|
||||
spa: false,
|
||||
)),
|
||||
(2, Version(
|
||||
created_at: (2023, 49, 16, 52, 0, 0, 0, 0, 0),
|
||||
|
@ -18,7 +16,5 @@ expression: versions
|
|||
"Deployed by": "https://github.com/Theta-Dev/Talon/actions/runs/1354755231",
|
||||
"Version": "v0.1.1",
|
||||
},
|
||||
fallback: None,
|
||||
spa: false,
|
||||
)),
|
||||
]
|
||||
|
|
|
@ -283,19 +283,17 @@ mod storage {
|
|||
// Images should not be compressed
|
||||
let expect = &hash_str
|
||||
!= "901d291a47a8a9b55c06f84e5e5f82fd2dcee65cac1406d6e878b805d45c1e93"
|
||||
&& &hash_str != "9f7e7971b4bfdb75429e534dea461ed90340886925078cda252cada9aa0e25f7"
|
||||
&& &hash_str != "a44816e6c3b650bdf88e6532659ba07ef187c2113ae311da9709e056aec8eadb";
|
||||
&& &hash_str != "9f7e7971b4bfdb75429e534dea461ed90340886925078cda252cada9aa0e25f7";
|
||||
assert_eq!(path_compressed.is_file(), expect)
|
||||
}
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case::index("br", VERSION_1_2, "", false, "text/html", None)]
|
||||
#[case::nocmp("", VERSION_1_2, "assets/style.css", true, "text/css", None)]
|
||||
#[case::gzip("gzip", VERSION_1_2, "assets/style.css", true, "text/css", None)]
|
||||
#[case::br("br", VERSION_1_2, "assets/style.css", true, "text/css", None)]
|
||||
#[case::nocmp("", VERSION_1_2, "", true, "text/html", None)]
|
||||
#[case::gzip("gzip", VERSION_1_2, "", true, "text/html", None)]
|
||||
#[case::br("br", VERSION_1_2, "", true, "text/html", None)]
|
||||
#[case::image("br", VERSION_1_2, "assets/image.jpg", false, "image/jpeg", None)]
|
||||
#[case::subdir("br", VERSION_3_1, "page2", false, "text/html", Some("/page2/"))]
|
||||
#[case::subdir("br", VERSION_3_1, "page2", true, "text/html", Some("/page2/"))]
|
||||
fn get_file(
|
||||
store: StorageTest,
|
||||
#[case] encoding: &str,
|
||||
|
|
Loading…
Reference in a new issue