mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-06-21 02:40:08 +00:00
Update Rust, Crates and Web-Vault (#5860)
- Updated web-vault to v2025.5.0 - Updated Rust to v1.87.0 - Updated all the crates - Replaced yubico with yubico_ng - Fixed several new (nightly) clippy lints Signed-off-by: BlackDex <black.dex@gmail.com>
This commit is contained in:
parent
ad8484a2d5
commit
73f2441d1a
30 changed files with 418 additions and 480 deletions
617
Cargo.lock
generated
617
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
30
Cargo.toml
30
Cargo.toml
|
@ -6,7 +6,7 @@ name = "vaultwarden"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.84.0"
|
rust-version = "1.85.0"
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||||
|
@ -72,14 +72,14 @@ dashmap = "6.1.0"
|
||||||
|
|
||||||
# Async futures
|
# Async futures
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
tokio = { version = "1.44.2", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
tokio = { version = "1.45.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||||
|
|
||||||
# A generic serialization/deserialization framework
|
# A generic serialization/deserialization framework
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = "1.0.140"
|
serde_json = "1.0.140"
|
||||||
|
|
||||||
# A safe, extensible ORM and Query builder
|
# A safe, extensible ORM and Query builder
|
||||||
diesel = { version = "2.2.9", features = ["chrono", "r2d2", "numeric"] }
|
diesel = { version = "2.2.10", features = ["chrono", "r2d2", "numeric"] }
|
||||||
diesel_migrations = "2.2.0"
|
diesel_migrations = "2.2.0"
|
||||||
diesel_logger = { version = "0.4.0", optional = true }
|
diesel_logger = { version = "0.4.0", optional = true }
|
||||||
|
|
||||||
|
@ -87,10 +87,10 @@ derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref"
|
||||||
diesel-derive-newtype = "2.1.2"
|
diesel-derive-newtype = "2.1.2"
|
||||||
|
|
||||||
# Bundled/Static SQLite
|
# Bundled/Static SQLite
|
||||||
libsqlite3-sys = { version = "0.32.0", features = ["bundled"], optional = true }
|
libsqlite3-sys = { version = "0.33.0", features = ["bundled"], optional = true }
|
||||||
|
|
||||||
# Crypto-related libraries
|
# Crypto-related libraries
|
||||||
rand = "0.9.0"
|
rand = "0.9.1"
|
||||||
ring = "0.17.14"
|
ring = "0.17.14"
|
||||||
subtle = "2.6.1"
|
subtle = "2.6.1"
|
||||||
|
|
||||||
|
@ -98,15 +98,15 @@ subtle = "2.6.1"
|
||||||
uuid = { version = "1.16.0", features = ["v4"] }
|
uuid = { version = "1.16.0", features = ["v4"] }
|
||||||
|
|
||||||
# Date and time libraries
|
# Date and time libraries
|
||||||
chrono = { version = "0.4.40", features = ["clock", "serde"], default-features = false }
|
chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false }
|
||||||
chrono-tz = "0.10.3"
|
chrono-tz = "0.10.3"
|
||||||
time = "0.3.41"
|
time = "0.3.41"
|
||||||
|
|
||||||
# Job scheduler
|
# Job scheduler
|
||||||
job_scheduler_ng = "2.0.5"
|
job_scheduler_ng = "2.2.0"
|
||||||
|
|
||||||
# Data encoding library Hex/Base32/Base64
|
# Data encoding library Hex/Base32/Base64
|
||||||
data-encoding = "2.8.0"
|
data-encoding = "2.9.0"
|
||||||
|
|
||||||
# JWT library
|
# JWT library
|
||||||
jsonwebtoken = "9.3.1"
|
jsonwebtoken = "9.3.1"
|
||||||
|
@ -115,7 +115,7 @@ jsonwebtoken = "9.3.1"
|
||||||
totp-lite = "2.0.1"
|
totp-lite = "2.0.1"
|
||||||
|
|
||||||
# Yubico Library
|
# Yubico Library
|
||||||
yubico = { version = "0.12.0", features = ["online-tokio"], default-features = false }
|
yubico = { package = "yubico_ng", version = "0.13.0", features = ["online-tokio"], default-features = false }
|
||||||
|
|
||||||
# WebAuthn libraries
|
# WebAuthn libraries
|
||||||
webauthn-rs = "0.3.2"
|
webauthn-rs = "0.3.2"
|
||||||
|
@ -124,7 +124,7 @@ webauthn-rs = "0.3.2"
|
||||||
url = "2.5.4"
|
url = "2.5.4"
|
||||||
|
|
||||||
# Email libraries
|
# Email libraries
|
||||||
lettre = { version = "0.11.15", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
lettre = { version = "0.11.16", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||||
email_address = "0.2.9"
|
email_address = "0.2.9"
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ handlebars = { version = "6.3.2", features = ["dir_source"] }
|
||||||
|
|
||||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||||
reqwest = { version = "0.12.15", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
reqwest = { version = "0.12.15", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
||||||
hickory-resolver = "0.25.1"
|
hickory-resolver = "0.25.2"
|
||||||
|
|
||||||
# Favicon extraction libraries
|
# Favicon extraction libraries
|
||||||
html5gum = "0.7.0"
|
html5gum = "0.7.0"
|
||||||
|
@ -171,17 +171,13 @@ which = "7.0.3"
|
||||||
argon2 = "0.5.3"
|
argon2 = "0.5.3"
|
||||||
|
|
||||||
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
||||||
rpassword = "7.3.1"
|
rpassword = "7.4.0"
|
||||||
|
|
||||||
# Loading a dynamic CSS Stylesheet
|
# Loading a dynamic CSS Stylesheet
|
||||||
grass_compiler = { version = "0.13.4", default-features = false }
|
grass_compiler = { version = "0.13.4", default-features = false }
|
||||||
|
|
||||||
[patch.crates-io]
|
|
||||||
# Patch yubico to remove duplicate crates of older versions
|
|
||||||
yubico = { git = "https://github.com/BlackDex/yubico-rs", rev = "00df14811f58155c0f02e3ab10f1570ed3e115c6" }
|
|
||||||
|
|
||||||
# Strip debuginfo from the release builds
|
# Strip debuginfo from the release builds
|
||||||
# The symbols are the provide better panic traces
|
# The debug symbols are to provide better panic traces
|
||||||
# Also enable fat LTO and use 1 codegen unit for optimizations
|
# Also enable fat LTO and use 1 codegen unit for optimizations
|
||||||
[profile.release]
|
[profile.release]
|
||||||
strip = "debuginfo"
|
strip = "debuginfo"
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
---
|
---
|
||||||
vault_version: "v2025.3.1"
|
vault_version: "v2025.5.0"
|
||||||
vault_image_digest: "sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd"
|
vault_image_digest: "sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e"
|
||||||
# Cross Compile Docker Helper Scripts v1.6.1
|
# Cross Compile Docker Helper Scripts v1.6.1
|
||||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||||
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||||
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
|
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
|
||||||
rust_version: 1.86.0 # Rust version to be used
|
rust_version: 1.87.0 # Rust version to be used
|
||||||
debian_version: bookworm # Debian release name to be used
|
debian_version: bookworm # Debian release name to be used
|
||||||
alpine_version: "3.21" # Alpine version to be used
|
alpine_version: "3.21" # Alpine version to be used
|
||||||
# For which platforms/architectures will we try to build images
|
# For which platforms/architectures will we try to build images
|
||||||
|
|
|
@ -19,23 +19,23 @@
|
||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.3.1
|
# $ docker pull docker.io/vaultwarden/web-vault:v2025.5.0
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.3.1
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.5.0
|
||||||
# [docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd]
|
# [docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e
|
||||||
# [docker.io/vaultwarden/web-vault:v2025.3.1]
|
# [docker.io/vaultwarden/web-vault:v2025.5.0]
|
||||||
#
|
#
|
||||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd AS vault
|
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e AS vault
|
||||||
|
|
||||||
########################## ALPINE BUILD IMAGES ##########################
|
########################## ALPINE BUILD IMAGES ##########################
|
||||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.86.0 AS build_amd64
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.87.0 AS build_amd64
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.86.0 AS build_arm64
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.87.0 AS build_arm64
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.86.0 AS build_armv7
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.87.0 AS build_armv7
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.86.0 AS build_armv6
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.87.0 AS build_armv6
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# hadolint ignore=DL3006
|
# hadolint ignore=DL3006
|
||||||
|
|
|
@ -19,15 +19,15 @@
|
||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.3.1
|
# $ docker pull docker.io/vaultwarden/web-vault:v2025.5.0
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.3.1
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.5.0
|
||||||
# [docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd]
|
# [docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e
|
||||||
# [docker.io/vaultwarden/web-vault:v2025.3.1]
|
# [docker.io/vaultwarden/web-vault:v2025.5.0]
|
||||||
#
|
#
|
||||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd AS vault
|
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e AS vault
|
||||||
|
|
||||||
########################## Cross Compile Docker Helper Scripts ##########################
|
########################## Cross Compile Docker Helper Scripts ##########################
|
||||||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
||||||
|
@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bd
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# hadolint ignore=DL3006
|
# hadolint ignore=DL3006
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.86.0-slim-bookworm AS build
|
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.87.0-slim-bookworm AS build
|
||||||
COPY --from=xx / /
|
COPY --from=xx / /
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
|
|
|
@ -10,7 +10,7 @@ proc-macro = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
quote = "1.0.40"
|
quote = "1.0.40"
|
||||||
syn = "2.0.100"
|
syn = "2.0.101"
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.86.0"
|
channel = "1.87.0"
|
||||||
components = [ "rustfmt", "clippy" ]
|
components = [ "rustfmt", "clippy" ]
|
||||||
profile = "minimal"
|
profile = "minimal"
|
||||||
|
|
|
@ -102,7 +102,7 @@ const ACTING_ADMIN_USER: &str = "vaultwarden-admin-00000-000000000000";
|
||||||
pub const FAKE_ADMIN_UUID: &str = "00000000-0000-0000-0000-000000000000";
|
pub const FAKE_ADMIN_UUID: &str = "00000000-0000-0000-0000-000000000000";
|
||||||
|
|
||||||
fn admin_path() -> String {
|
fn admin_path() -> String {
|
||||||
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
format!("{}{ADMIN_PATH}", CONFIG.domain_path())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -206,7 +206,7 @@ fn post_admin_login(
|
||||||
|
|
||||||
cookies.add(cookie);
|
cookies.add(cookie);
|
||||||
if let Some(redirect) = redirect {
|
if let Some(redirect) = redirect {
|
||||||
Ok(Redirect::to(format!("{}{}", admin_path(), redirect)))
|
Ok(Redirect::to(format!("{}{redirect}", admin_path())))
|
||||||
} else {
|
} else {
|
||||||
Err(AdminResponse::Ok(render_admin_page()))
|
Err(AdminResponse::Ok(render_admin_page()))
|
||||||
}
|
}
|
||||||
|
@ -427,7 +427,7 @@ async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt:
|
||||||
for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await {
|
for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await {
|
||||||
match unregister_push_device(device.push_uuid).await {
|
match unregister_push_device(device.push_uuid).await {
|
||||||
Ok(r) => r,
|
Ok(r) => r,
|
||||||
Err(e) => error!("Unable to unregister devices from Bitwarden server: {}", e),
|
Err(e) => error!("Unable to unregister devices from Bitwarden server: {e}"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -301,11 +301,11 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
if CONFIG.signups_verify() && !email_verified {
|
if CONFIG.signups_verify() && !email_verified {
|
||||||
if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid).await {
|
if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid).await {
|
||||||
error!("Error sending welcome email: {:#?}", e);
|
error!("Error sending welcome email: {e:#?}");
|
||||||
}
|
}
|
||||||
user.last_verifying_at = Some(user.created_at);
|
user.last_verifying_at = Some(user.created_at);
|
||||||
} else if let Err(e) = mail::send_welcome(&user.email).await {
|
} else if let Err(e) = mail::send_welcome(&user.email).await {
|
||||||
error!("Error sending welcome email: {:#?}", e);
|
error!("Error sending welcome email: {e:#?}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if email_verified && is_email_2fa_required(data.organization_user_id, &mut conn).await {
|
if email_verified && is_email_2fa_required(data.organization_user_id, &mut conn).await {
|
||||||
|
@ -788,10 +788,10 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
if let Err(e) = mail::send_change_email(&data.new_email, &token).await {
|
if let Err(e) = mail::send_change_email(&data.new_email, &token).await {
|
||||||
error!("Error sending change-email email: {:#?}", e);
|
error!("Error sending change-email email: {e:#?}");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
debug!("Email change request for user ({}) to email ({}) with token ({})", user.uuid, data.new_email, token);
|
debug!("Email change request for user ({}) to email ({}) with token ({token})", user.uuid, data.new_email);
|
||||||
}
|
}
|
||||||
|
|
||||||
user.email_new = Some(data.new_email);
|
user.email_new = Some(data.new_email);
|
||||||
|
@ -873,7 +873,7 @@ async fn post_verify_email(headers: Headers) -> EmptyResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await {
|
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await {
|
||||||
error!("Error sending verify_email email: {:#?}", e);
|
error!("Error sending verify_email email: {e:#?}");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -904,7 +904,7 @@ async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbC
|
||||||
user.last_verifying_at = None;
|
user.last_verifying_at = None;
|
||||||
user.login_verify_count = 0;
|
user.login_verify_count = 0;
|
||||||
if let Err(e) = user.save(&mut conn).await {
|
if let Err(e) = user.save(&mut conn).await {
|
||||||
error!("Error saving email verification: {:#?}", e);
|
error!("Error saving email verification: {e:#?}");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -923,7 +923,7 @@ async fn post_delete_recover(data: Json<DeleteRecoverData>, mut conn: DbConn) ->
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
if let Some(user) = User::find_by_mail(&data.email, &mut conn).await {
|
if let Some(user) = User::find_by_mail(&data.email, &mut conn).await {
|
||||||
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await {
|
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await {
|
||||||
error!("Error sending delete account email: {:#?}", e);
|
error!("Error sending delete account email: {e:#?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -1202,7 +1202,7 @@ async fn put_device_token(
|
||||||
if device.is_registered() {
|
if device.is_registered() {
|
||||||
// check if the new token is the same as the registered token
|
// check if the new token is the same as the registered token
|
||||||
if device.push_token.is_some() && device.push_token.unwrap() == token.clone() {
|
if device.push_token.is_some() && device.push_token.unwrap() == token.clone() {
|
||||||
debug!("Device {} is already registered and token is the same", device_id);
|
debug!("Device {device_id} is already registered and token is the same");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else {
|
} else {
|
||||||
// Try to unregister already registered device
|
// Try to unregister already registered device
|
||||||
|
|
|
@ -381,7 +381,7 @@ pub async fn update_cipher_from_data(
|
||||||
if let Some(dt) = data.last_known_revision_date {
|
if let Some(dt) = data.last_known_revision_date {
|
||||||
match NaiveDateTime::parse_from_str(&dt, "%+") {
|
match NaiveDateTime::parse_from_str(&dt, "%+") {
|
||||||
// ISO 8601 format
|
// ISO 8601 format
|
||||||
Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
|
Err(err) => warn!("Error parsing LastKnownRevisionDate '{dt}': {err}"),
|
||||||
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => {
|
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => {
|
||||||
err!("The client copy of this cipher is out of date. Resync the client and try again.")
|
err!("The client copy of this cipher is out of date. Resync the client and try again.")
|
||||||
}
|
}
|
||||||
|
@ -1105,7 +1105,7 @@ async fn post_attachment_v2(
|
||||||
Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key));
|
Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key));
|
||||||
attachment.save(&mut conn).await.expect("Error saving attachment");
|
attachment.save(&mut conn).await.expect("Error saving attachment");
|
||||||
|
|
||||||
let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id);
|
let url = format!("/ciphers/{}/attachment/{attachment_id}", cipher.uuid);
|
||||||
let response_key = match data.admin_request {
|
let response_key = match data.admin_request {
|
||||||
Some(b) if b => "cipherMiniResponse",
|
Some(b) if b => "cipherMiniResponse",
|
||||||
_ => "cipherResponse",
|
_ => "cipherResponse",
|
||||||
|
|
|
@ -227,7 +227,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
||||||
let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await {
|
let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await {
|
||||||
None => {
|
None => {
|
||||||
if !CONFIG.invitations_allowed() {
|
if !CONFIG.invitations_allowed() {
|
||||||
err!(format!("Grantee user does not exist: {}", &email))
|
err!(format!("Grantee user does not exist: {email}"))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.is_email_domain_allowed(&email) {
|
if !CONFIG.is_email_domain_allowed(&email) {
|
||||||
|
|
|
@ -338,7 +338,7 @@ async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbC
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"fileUploadType": 0, // 0 == Direct | 1 == Azure
|
"fileUploadType": 0, // 0 == Direct | 1 == Azure
|
||||||
"object": "send-fileUpload",
|
"object": "send-fileUpload",
|
||||||
"url": format!("/sends/{}/file/{}", send.uuid, file_id),
|
"url": format!("/sends/{}/file/{file_id}", send.uuid),
|
||||||
"sendResponse": send.to_json()
|
"sendResponse": send.to_json()
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
@ -556,7 +556,7 @@ async fn post_access_file(
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"object": "send-fileDownload",
|
"object": "send-fileDownload",
|
||||||
"id": file_id,
|
"id": file_id,
|
||||||
"url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token)
|
"url": format!("{}/api/sends/{send_id}/{file_id}?t={token}", &host.host)
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -148,7 +148,7 @@ pub async fn validate_totp_code(
|
||||||
if generated == totp_code && time_step > twofactor.last_used {
|
if generated == totp_code && time_step > twofactor.last_used {
|
||||||
// If the step does not equals 0 the time is drifted either server or client side.
|
// If the step does not equals 0 the time is drifted either server or client side.
|
||||||
if step != 0 {
|
if step != 0 {
|
||||||
warn!("TOTP Time drift detected. The step offset is {}", step);
|
warn!("TOTP Time drift detected. The step offset is {step}");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save the last used time step so only totp time steps higher then this one are allowed.
|
// Save the last used time step so only totp time steps higher then this one are allowed.
|
||||||
|
@ -157,7 +157,7 @@ pub async fn validate_totp_code(
|
||||||
twofactor.save(conn).await?;
|
twofactor.save(conn).await?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else if generated == totp_code && time_step <= twofactor.last_used {
|
} else if generated == totp_code && time_step <= twofactor.last_used {
|
||||||
warn!("This TOTP or a TOTP code within {} steps back or forward has already been used!", steps);
|
warn!("This TOTP or a TOTP code within {steps} steps back or forward has already been used!");
|
||||||
err!(
|
err!(
|
||||||
format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip),
|
format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip),
|
||||||
ErrorEvent {
|
ErrorEvent {
|
||||||
|
|
|
@ -202,7 +202,7 @@ async fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData)
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
// https://duo.com/docs/authapi#api-details
|
// https://duo.com/docs/authapi#api-details
|
||||||
let url = format!("https://{}{}", &data.host, path);
|
let url = format!("https://{}{path}", &data.host);
|
||||||
let date = Utc::now().to_rfc2822();
|
let date = Utc::now().to_rfc2822();
|
||||||
let username = &data.ik;
|
let username = &data.ik;
|
||||||
let fields = [&date, method, &data.host, path, params];
|
let fields = [&date, method, &data.host, path, params];
|
||||||
|
@ -274,9 +274,9 @@ pub async fn generate_duo_signature(email: &str, conn: &mut DbConn) -> ApiResult
|
||||||
|
|
||||||
fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64) -> String {
|
fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64) -> String {
|
||||||
let val = format!("{email}|{ikey}|{expire}");
|
let val = format!("{email}|{ikey}|{expire}");
|
||||||
let cookie = format!("{}|{}", prefix, BASE64.encode(val.as_bytes()));
|
let cookie = format!("{prefix}|{}", BASE64.encode(val.as_bytes()));
|
||||||
|
|
||||||
format!("{}|{}", cookie, crypto::hmac_sign(key, &cookie))
|
format!("{cookie}|{}", crypto::hmac_sign(key, &cookie))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn validate_duo_login(email: &str, response: &str, conn: &mut DbConn) -> EmptyResult {
|
pub async fn validate_duo_login(email: &str, response: &str, conn: &mut DbConn) -> EmptyResult {
|
||||||
|
|
|
@ -182,7 +182,7 @@ impl DuoClient {
|
||||||
HealthCheckResponse::HealthFail {
|
HealthCheckResponse::HealthFail {
|
||||||
message,
|
message,
|
||||||
message_detail,
|
message_detail,
|
||||||
} => err!(format!("Duo health check FAIL response, msg: {}, detail: {}", message, message_detail)),
|
} => err!(format!("Duo health check FAIL response, msg: {message}, detail: {message_detail}")),
|
||||||
};
|
};
|
||||||
|
|
||||||
if health_stat != "OK" {
|
if health_stat != "OK" {
|
||||||
|
@ -275,7 +275,7 @@ impl DuoClient {
|
||||||
|
|
||||||
let status_code = res.status();
|
let status_code = res.status();
|
||||||
if status_code != StatusCode::OK {
|
if status_code != StatusCode::OK {
|
||||||
err!(format!("Failure response from Duo: {}", status_code))
|
err!(format!("Failure response from Duo: {status_code}"))
|
||||||
}
|
}
|
||||||
|
|
||||||
let response: IdTokenResponse = match res.json::<IdTokenResponse>().await {
|
let response: IdTokenResponse = match res.json::<IdTokenResponse>().await {
|
||||||
|
@ -478,7 +478,7 @@ pub async fn validate_duo_login(
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Err(e),
|
||||||
};
|
};
|
||||||
|
|
||||||
let d: Digest = digest(&SHA512_256, format!("{}{}", ctx.nonce, device_identifier).as_bytes());
|
let d: Digest = digest(&SHA512_256, format!("{}{device_identifier}", ctx.nonce).as_bytes());
|
||||||
let hash: String = HEXLOWER.encode(d.as_ref());
|
let hash: String = HEXLOWER.encode(d.as_ref());
|
||||||
|
|
||||||
match client.exchange_authz_code_for_result(code, email, hash.as_str()).await {
|
match client.exchange_authz_code_for_result(code, email, hash.as_str()).await {
|
||||||
|
|
|
@ -210,7 +210,7 @@ pub async fn validate_email_code_str(
|
||||||
.map_res("Two factor not found")?;
|
.map_res("Two factor not found")?;
|
||||||
let Some(issued_token) = &email_data.last_token else {
|
let Some(issued_token) = &email_data.last_token else {
|
||||||
err!(
|
err!(
|
||||||
format!("No token available! IP: {}", ip),
|
format!("No token available! IP: {ip}"),
|
||||||
ErrorEvent {
|
ErrorEvent {
|
||||||
event: EventType::UserFailedLogIn2fa
|
event: EventType::UserFailedLogIn2fa
|
||||||
}
|
}
|
||||||
|
@ -226,7 +226,7 @@ pub async fn validate_email_code_str(
|
||||||
twofactor.save(conn).await?;
|
twofactor.save(conn).await?;
|
||||||
|
|
||||||
err!(
|
err!(
|
||||||
format!("Token is invalid! IP: {}", ip),
|
format!("Token is invalid! IP: {ip}"),
|
||||||
ErrorEvent {
|
ErrorEvent {
|
||||||
event: EventType::UserFailedLogIn2fa
|
event: EventType::UserFailedLogIn2fa
|
||||||
}
|
}
|
||||||
|
@ -329,7 +329,7 @@ pub fn obscure_email(email: &str) -> String {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
format!("{}@{}", new_name, &domain)
|
format!("{new_name}@{domain}")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_and_activate_email_2fa(user_id: &UserId, conn: &mut DbConn) -> EmptyResult {
|
pub async fn find_and_activate_email_2fa(user_id: &UserId, conn: &mut DbConn) -> EmptyResult {
|
||||||
|
|
|
@ -69,12 +69,12 @@ static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+
|
||||||
#[get("/<domain>/icon.png")]
|
#[get("/<domain>/icon.png")]
|
||||||
fn icon_external(domain: &str) -> Option<Redirect> {
|
fn icon_external(domain: &str) -> Option<Redirect> {
|
||||||
if !is_valid_domain(domain) {
|
if !is_valid_domain(domain) {
|
||||||
warn!("Invalid domain: {}", domain);
|
warn!("Invalid domain: {domain}");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if should_block_address(domain) {
|
if should_block_address(domain) {
|
||||||
warn!("Blocked address: {}", domain);
|
warn!("Blocked address: {domain}");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,7 +96,7 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec<u8>)> {
|
||||||
const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png");
|
const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png");
|
||||||
|
|
||||||
if !is_valid_domain(domain) {
|
if !is_valid_domain(domain) {
|
||||||
warn!("Invalid domain: {}", domain);
|
warn!("Invalid domain: {domain}");
|
||||||
return Cached::ttl(
|
return Cached::ttl(
|
||||||
(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
||||||
CONFIG.icon_cache_negttl(),
|
CONFIG.icon_cache_negttl(),
|
||||||
|
@ -105,7 +105,7 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec<u8>)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if should_block_address(domain) {
|
if should_block_address(domain) {
|
||||||
warn!("Blocked address: {}", domain);
|
warn!("Blocked address: {domain}");
|
||||||
return Cached::ttl(
|
return Cached::ttl(
|
||||||
(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
||||||
CONFIG.icon_cache_negttl(),
|
CONFIG.icon_cache_negttl(),
|
||||||
|
@ -130,7 +130,7 @@ fn is_valid_domain(domain: &str) -> bool {
|
||||||
|
|
||||||
// If parsing the domain fails using Url, it will not work with reqwest.
|
// If parsing the domain fails using Url, it will not work with reqwest.
|
||||||
if let Err(parse_error) = url::Url::parse(format!("https://{domain}").as_str()) {
|
if let Err(parse_error) = url::Url::parse(format!("https://{domain}").as_str()) {
|
||||||
debug!("Domain parse error: '{}' - {:?}", domain, parse_error);
|
debug!("Domain parse error: '{domain}' - {parse_error:?}");
|
||||||
return false;
|
return false;
|
||||||
} else if domain.is_empty()
|
} else if domain.is_empty()
|
||||||
|| domain.contains("..")
|
|| domain.contains("..")
|
||||||
|
@ -139,18 +139,17 @@ fn is_valid_domain(domain: &str) -> bool {
|
||||||
|| domain.ends_with('-')
|
|| domain.ends_with('-')
|
||||||
{
|
{
|
||||||
debug!(
|
debug!(
|
||||||
"Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'",
|
"Domain validation error: '{domain}' is either empty, contains '..', starts with an '.', starts or ends with a '-'"
|
||||||
domain
|
|
||||||
);
|
);
|
||||||
return false;
|
return false;
|
||||||
} else if domain.len() > 255 {
|
} else if domain.len() > 255 {
|
||||||
debug!("Domain validation error: '{}' exceeds 255 characters", domain);
|
debug!("Domain validation error: '{domain}' exceeds 255 characters");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
for c in domain.chars() {
|
for c in domain.chars() {
|
||||||
if !c.is_alphanumeric() && !ALLOWED_CHARS.contains(c) {
|
if !c.is_alphanumeric() && !ALLOWED_CHARS.contains(c) {
|
||||||
debug!("Domain validation error: '{}' contains an invalid character '{}'", domain, c);
|
debug!("Domain validation error: '{domain}' contains an invalid character '{c}'");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -159,7 +158,7 @@ fn is_valid_domain(domain: &str) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
||||||
let path = format!("{}/{}.png", CONFIG.icon_cache_folder(), domain);
|
let path = format!("{}/{domain}.png", CONFIG.icon_cache_folder());
|
||||||
|
|
||||||
// Check for expiration of negatively cached copy
|
// Check for expiration of negatively cached copy
|
||||||
if icon_is_negcached(&path).await {
|
if icon_is_negcached(&path).await {
|
||||||
|
@ -167,10 +166,7 @@ async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(icon) = get_cached_icon(&path).await {
|
if let Some(icon) = get_cached_icon(&path).await {
|
||||||
let icon_type = match get_icon_type(&icon) {
|
let icon_type = get_icon_type(&icon).unwrap_or("x-icon");
|
||||||
Some(x) => x,
|
|
||||||
_ => "x-icon",
|
|
||||||
};
|
|
||||||
return Some((icon, icon_type.to_string()));
|
return Some((icon, icon_type.to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,7 +188,7 @@ async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
warn!("Unable to download icon: {:?}", e);
|
warn!("Unable to download icon: {e:?}");
|
||||||
let miss_indicator = path + ".miss";
|
let miss_indicator = path + ".miss";
|
||||||
save_icon(&miss_indicator, &[]).await;
|
save_icon(&miss_indicator, &[]).await;
|
||||||
None
|
None
|
||||||
|
@ -234,7 +230,7 @@ async fn icon_is_negcached(path: &str) -> bool {
|
||||||
// No longer negatively cached, drop the marker
|
// No longer negatively cached, drop the marker
|
||||||
Ok(true) => {
|
Ok(true) => {
|
||||||
if let Err(e) = remove_file(&miss_indicator).await {
|
if let Err(e) = remove_file(&miss_indicator).await {
|
||||||
error!("Could not remove negative cache indicator for icon {:?}: {:?}", path, e);
|
error!("Could not remove negative cache indicator for icon {path:?}: {e:?}");
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
@ -534,10 +530,10 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> {
|
||||||
// Check if the icon type is allowed, else try an icon from the list.
|
// Check if the icon type is allowed, else try an icon from the list.
|
||||||
icon_type = get_icon_type(&body);
|
icon_type = get_icon_type(&body);
|
||||||
if icon_type.is_none() {
|
if icon_type.is_none() {
|
||||||
debug!("Icon from {} data:image uri, is not a valid image type", domain);
|
debug!("Icon from {domain} data:image uri, is not a valid image type");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
info!("Extracted icon from data:image uri for {}", domain);
|
info!("Extracted icon from data:image uri for {domain}");
|
||||||
buffer = body.freeze();
|
buffer = body.freeze();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -577,7 +573,7 @@ async fn save_icon(path: &str, icon: &[u8]) {
|
||||||
create_dir_all(&CONFIG.icon_cache_folder()).await.expect("Error creating icon cache folder");
|
create_dir_all(&CONFIG.icon_cache_folder()).await.expect("Error creating icon cache folder");
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!("Unable to save icon: {:?}", e);
|
warn!("Unable to save icon: {e:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -158,7 +158,7 @@ async fn _password_login(
|
||||||
// Get the user
|
// Get the user
|
||||||
let username = data.username.as_ref().unwrap().trim();
|
let username = data.username.as_ref().unwrap().trim();
|
||||||
let Some(mut user) = User::find_by_mail(username, conn).await else {
|
let Some(mut user) = User::find_by_mail(username, conn).await else {
|
||||||
err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username))
|
err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {username}.", ip.ip))
|
||||||
};
|
};
|
||||||
|
|
||||||
// Set the user_id here to be passed back used for event logging.
|
// Set the user_id here to be passed back used for event logging.
|
||||||
|
@ -168,7 +168,7 @@ async fn _password_login(
|
||||||
if !user.enabled {
|
if !user.enabled {
|
||||||
err!(
|
err!(
|
||||||
"This user has been disabled",
|
"This user has been disabled",
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
format!("IP: {}. Username: {username}.", ip.ip),
|
||||||
ErrorEvent {
|
ErrorEvent {
|
||||||
event: EventType::UserFailedLogIn
|
event: EventType::UserFailedLogIn
|
||||||
}
|
}
|
||||||
|
@ -182,7 +182,7 @@ async fn _password_login(
|
||||||
let Some(auth_request) = AuthRequest::find_by_uuid_and_user(auth_request_id, &user.uuid, conn).await else {
|
let Some(auth_request) = AuthRequest::find_by_uuid_and_user(auth_request_id, &user.uuid, conn).await else {
|
||||||
err!(
|
err!(
|
||||||
"Auth request not found. Try again.",
|
"Auth request not found. Try again.",
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
format!("IP: {}. Username: {username}.", ip.ip),
|
||||||
ErrorEvent {
|
ErrorEvent {
|
||||||
event: EventType::UserFailedLogIn,
|
event: EventType::UserFailedLogIn,
|
||||||
}
|
}
|
||||||
|
@ -200,7 +200,7 @@ async fn _password_login(
|
||||||
{
|
{
|
||||||
err!(
|
err!(
|
||||||
"Username or access code is incorrect. Try again",
|
"Username or access code is incorrect. Try again",
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
format!("IP: {}. Username: {username}.", ip.ip),
|
||||||
ErrorEvent {
|
ErrorEvent {
|
||||||
event: EventType::UserFailedLogIn,
|
event: EventType::UserFailedLogIn,
|
||||||
}
|
}
|
||||||
|
@ -209,7 +209,7 @@ async fn _password_login(
|
||||||
} else if !user.check_valid_password(password) {
|
} else if !user.check_valid_password(password) {
|
||||||
err!(
|
err!(
|
||||||
"Username or password is incorrect. Try again",
|
"Username or password is incorrect. Try again",
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
format!("IP: {}. Username: {username}.", ip.ip),
|
||||||
ErrorEvent {
|
ErrorEvent {
|
||||||
event: EventType::UserFailedLogIn,
|
event: EventType::UserFailedLogIn,
|
||||||
}
|
}
|
||||||
|
@ -222,7 +222,7 @@ async fn _password_login(
|
||||||
user.set_password(password, None, false, None);
|
user.set_password(password, None, false, None);
|
||||||
|
|
||||||
if let Err(e) = user.save(conn).await {
|
if let Err(e) = user.save(conn).await {
|
||||||
error!("Error updating user: {:#?}", e);
|
error!("Error updating user: {e:#?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -241,11 +241,11 @@ async fn _password_login(
|
||||||
user.login_verify_count += 1;
|
user.login_verify_count += 1;
|
||||||
|
|
||||||
if let Err(e) = user.save(conn).await {
|
if let Err(e) = user.save(conn).await {
|
||||||
error!("Error updating user: {:#?}", e);
|
error!("Error updating user: {e:#?}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await {
|
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await {
|
||||||
error!("Error auto-sending email verification email: {:#?}", e);
|
error!("Error auto-sending email verification email: {e:#?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -253,7 +253,7 @@ async fn _password_login(
|
||||||
// We still want the login to fail until they actually verified the email address
|
// We still want the login to fail until they actually verified the email address
|
||||||
err!(
|
err!(
|
||||||
"Please verify your email before trying again.",
|
"Please verify your email before trying again.",
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
format!("IP: {}. Username: {username}.", ip.ip),
|
||||||
ErrorEvent {
|
ErrorEvent {
|
||||||
event: EventType::UserFailedLogIn
|
event: EventType::UserFailedLogIn
|
||||||
}
|
}
|
||||||
|
@ -266,7 +266,7 @@ async fn _password_login(
|
||||||
|
|
||||||
if CONFIG.mail_enabled() && new_device {
|
if CONFIG.mail_enabled() && new_device {
|
||||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await {
|
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await {
|
||||||
error!("Error sending new device email: {:#?}", e);
|
error!("Error sending new device email: {e:#?}");
|
||||||
|
|
||||||
if CONFIG.require_device_email() {
|
if CONFIG.require_device_email() {
|
||||||
err!(
|
err!(
|
||||||
|
@ -352,7 +352,7 @@ async fn _password_login(
|
||||||
result["TwoFactorToken"] = Value::String(token);
|
result["TwoFactorToken"] = Value::String(token);
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("User {} logged in successfully. IP: {}", username, ip.ip);
|
info!("User {username} logged in successfully. IP: {}", ip.ip);
|
||||||
Ok(Json(result))
|
Ok(Json(result))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -420,7 +420,7 @@ async fn _user_api_key_login(
|
||||||
if CONFIG.mail_enabled() && new_device {
|
if CONFIG.mail_enabled() && new_device {
|
||||||
let now = Utc::now().naive_utc();
|
let now = Utc::now().naive_utc();
|
||||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await {
|
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await {
|
||||||
error!("Error sending new device email: {:#?}", e);
|
error!("Error sending new device email: {e:#?}");
|
||||||
|
|
||||||
if CONFIG.require_device_email() {
|
if CONFIG.require_device_email() {
|
||||||
err!(
|
err!(
|
||||||
|
|
|
@ -257,7 +257,7 @@ async fn send_to_push_relay(notification_data: Value) {
|
||||||
let auth_push_token = match get_auth_push_token().await {
|
let auth_push_token = match get_auth_push_token().await {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
debug!("Could not get the auth push token: {}", e);
|
debug!("Could not get the auth push token: {e}");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -267,7 +267,7 @@ async fn send_to_push_relay(notification_data: Value) {
|
||||||
let req = match make_http_request(Method::POST, &(CONFIG.push_relay_uri() + "/push/send")) {
|
let req = match make_http_request(Method::POST, &(CONFIG.push_relay_uri() + "/push/send")) {
|
||||||
Ok(r) => r,
|
Ok(r) => r,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("An error occurred while sending a send update to the push relay: {}", e);
|
error!("An error occurred while sending a send update to the push relay: {e}");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -280,7 +280,7 @@ async fn send_to_push_relay(notification_data: Value) {
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
error!("An error occurred while sending a send update to the push relay: {}", e);
|
error!("An error occurred while sending a send update to the push relay: {e}");
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -289,7 +289,7 @@ pub fn generate_organization_api_key_login_claims(
|
||||||
exp: (time_now + TimeDelta::try_hours(1).unwrap()).timestamp(),
|
exp: (time_now + TimeDelta::try_hours(1).unwrap()).timestamp(),
|
||||||
iss: JWT_ORG_API_KEY_ISSUER.to_string(),
|
iss: JWT_ORG_API_KEY_ISSUER.to_string(),
|
||||||
sub: org_api_key_uuid,
|
sub: org_api_key_uuid,
|
||||||
client_id: format!("organization.{}", org_id),
|
client_id: format!("organization.{org_id}"),
|
||||||
client_sub: org_id,
|
client_sub: org_id,
|
||||||
scope: vec!["api.organization".into()],
|
scope: vec!["api.organization".into()],
|
||||||
}
|
}
|
||||||
|
@ -547,7 +547,7 @@ impl<'r> FromRequest<'r> for Headers {
|
||||||
let mut user = user;
|
let mut user = user;
|
||||||
user.reset_stamp_exception();
|
user.reset_stamp_exception();
|
||||||
if let Err(e) = user.save(&mut conn).await {
|
if let Err(e) = user.save(&mut conn).await {
|
||||||
error!("Error updating user: {:#?}", e);
|
error!("Error updating user: {e:#?}");
|
||||||
}
|
}
|
||||||
err_handler!("Stamp exception is expired")
|
err_handler!("Stamp exception is expired")
|
||||||
} else if !stamp_exception.routes.contains(¤t_route.to_string()) {
|
} else if !stamp_exception.routes.contains(¤t_route.to_string()) {
|
||||||
|
@ -911,7 +911,7 @@ impl<'r> FromRequest<'r> for ClientIp {
|
||||||
None => ip,
|
None => ip,
|
||||||
}
|
}
|
||||||
.parse()
|
.parse()
|
||||||
.map_err(|_| warn!("'{}' header is malformed: {}", CONFIG.ip_header(), ip))
|
.map_err(|_| warn!("'{}' header is malformed: {ip}", CONFIG.ip_header()))
|
||||||
.ok()
|
.ok()
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -375,19 +375,19 @@ make_config! {
|
||||||
/// Data folder |> Main data folder
|
/// Data folder |> Main data folder
|
||||||
data_folder: String, false, def, "data".to_string();
|
data_folder: String, false, def, "data".to_string();
|
||||||
/// Database URL
|
/// Database URL
|
||||||
database_url: String, false, auto, |c| format!("{}/{}", c.data_folder, "db.sqlite3");
|
database_url: String, false, auto, |c| format!("{}/db.sqlite3", c.data_folder);
|
||||||
/// Icon cache folder
|
/// Icon cache folder
|
||||||
icon_cache_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "icon_cache");
|
icon_cache_folder: String, false, auto, |c| format!("{}/icon_cache", c.data_folder);
|
||||||
/// Attachments folder
|
/// Attachments folder
|
||||||
attachments_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "attachments");
|
attachments_folder: String, false, auto, |c| format!("{}/attachments", c.data_folder);
|
||||||
/// Sends folder
|
/// Sends folder
|
||||||
sends_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "sends");
|
sends_folder: String, false, auto, |c| format!("{}/sends", c.data_folder);
|
||||||
/// Temp folder |> Used for storing temporary file uploads
|
/// Temp folder |> Used for storing temporary file uploads
|
||||||
tmp_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "tmp");
|
tmp_folder: String, false, auto, |c| format!("{}/tmp", c.data_folder);
|
||||||
/// Templates folder
|
/// Templates folder
|
||||||
templates_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "templates");
|
templates_folder: String, false, auto, |c| format!("{}/templates", c.data_folder);
|
||||||
/// Session JWT key
|
/// Session JWT key
|
||||||
rsa_key_filename: String, false, auto, |c| format!("{}/{}", c.data_folder, "rsa_key");
|
rsa_key_filename: String, false, auto, |c| format!("{}/rsa_key", c.data_folder);
|
||||||
/// Web vault folder
|
/// Web vault folder
|
||||||
web_vault_folder: String, false, def, "web-vault/".to_string();
|
web_vault_folder: String, false, def, "web-vault/".to_string();
|
||||||
},
|
},
|
||||||
|
@ -1215,7 +1215,7 @@ impl Config {
|
||||||
pub fn is_email_domain_allowed(&self, email: &str) -> bool {
|
pub fn is_email_domain_allowed(&self, email: &str) -> bool {
|
||||||
let e: Vec<&str> = email.rsplitn(2, '@').collect();
|
let e: Vec<&str> = email.rsplitn(2, '@').collect();
|
||||||
if e.len() != 2 || e[0].is_empty() || e[1].is_empty() {
|
if e.len() != 2 || e[0].is_empty() || e[1].is_empty() {
|
||||||
warn!("Failed to parse email address '{}'", email);
|
warn!("Failed to parse email address '{email}'");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
let email_domain = e[0].to_lowercase();
|
let email_domain = e[0].to_lowercase();
|
||||||
|
|
|
@ -46,7 +46,7 @@ impl Attachment {
|
||||||
|
|
||||||
pub fn get_url(&self, host: &str) -> String {
|
pub fn get_url(&self, host: &str) -> String {
|
||||||
let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone()));
|
let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone()));
|
||||||
format!("{}/attachments/{}/{}?token={}", host, self.cipher_uuid, self.id, token)
|
format!("{host}/attachments/{}/{}?token={token}", self.cipher_uuid, self.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_json(&self, host: &str) -> Value {
|
pub fn to_json(&self, host: &str) -> Value {
|
||||||
|
@ -117,7 +117,7 @@ impl Attachment {
|
||||||
// upstream caller has already cleaned up the file as part of
|
// upstream caller has already cleaned up the file as part of
|
||||||
// its own error handling.
|
// its own error handling.
|
||||||
Err(e) if e.kind() == ErrorKind::NotFound => {
|
Err(e) if e.kind() == ErrorKind::NotFound => {
|
||||||
debug!("File '{}' already deleted.", file_path);
|
debug!("File '{file_path}' already deleted.");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
Err(e) => Err(e.into()),
|
Err(e) => Err(e.into()),
|
||||||
|
|
|
@ -85,7 +85,7 @@ impl Cipher {
|
||||||
let mut validation_errors = serde_json::Map::new();
|
let mut validation_errors = serde_json::Map::new();
|
||||||
let max_note_size = CONFIG._max_note_size();
|
let max_note_size = CONFIG._max_note_size();
|
||||||
let max_note_size_msg =
|
let max_note_size_msg =
|
||||||
format!("The field Notes exceeds the maximum encrypted value length of {} characters.", &max_note_size);
|
format!("The field Notes exceeds the maximum encrypted value length of {max_note_size} characters.");
|
||||||
for (index, cipher) in cipher_data.iter().enumerate() {
|
for (index, cipher) in cipher_data.iter().enumerate() {
|
||||||
// Validate the note size and if it is exceeded return a warning
|
// Validate the note size and if it is exceeded return a warning
|
||||||
if let Some(note) = &cipher.notes {
|
if let Some(note) = &cipher.notes {
|
||||||
|
|
|
@ -298,7 +298,7 @@ impl Group {
|
||||||
|
|
||||||
pub async fn update_revision(uuid: &GroupId, conn: &mut DbConn) {
|
pub async fn update_revision(uuid: &GroupId, conn: &mut DbConn) {
|
||||||
if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await {
|
if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await {
|
||||||
warn!("Failed to update revision for {}: {:#?}", uuid, e);
|
warn!("Failed to update revision for {uuid}: {e:#?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -334,7 +334,7 @@ impl User {
|
||||||
|
|
||||||
pub async fn update_uuid_revision(uuid: &UserId, conn: &mut DbConn) {
|
pub async fn update_uuid_revision(uuid: &UserId, conn: &mut DbConn) {
|
||||||
if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await {
|
if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await {
|
||||||
warn!("Failed to update revision for {}: {:#?}", uuid, e);
|
warn!("Failed to update revision for {uuid}: {e:#?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -214,7 +214,7 @@ impl Responder<'_, 'static> for Error {
|
||||||
match self.error {
|
match self.error {
|
||||||
ErrorKind::Empty(_) => {} // Don't print the error in this situation
|
ErrorKind::Empty(_) => {} // Don't print the error in this situation
|
||||||
ErrorKind::Simple(_) => {} // Don't print the error in this situation
|
ErrorKind::Simple(_) => {} // Don't print the error in this situation
|
||||||
_ => error!(target: "error", "{:#?}", self),
|
_ => error!(target: "error", "{self:#?}"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
|
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
|
||||||
|
|
24
src/mail.rs
24
src/mail.rs
|
@ -85,7 +85,7 @@ fn smtp_transport() -> AsyncSmtpTransport<Tokio1Executor> {
|
||||||
smtp_client.authentication(selected_mechanisms)
|
smtp_client.authentication(selected_mechanisms)
|
||||||
} else {
|
} else {
|
||||||
// Only show a warning, and return without setting an actual authentication mechanism
|
// Only show a warning, and return without setting an actual authentication mechanism
|
||||||
warn!("No valid SMTP Auth mechanism found for '{}', using default values", mechanism);
|
warn!("No valid SMTP Auth mechanism found for '{mechanism}', using default values");
|
||||||
smtp_client
|
smtp_client
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -213,7 +213,7 @@ pub async fn send_register_verify_email(email: &str, token: &str) -> EmptyResult
|
||||||
"email/register_verify_email",
|
"email/register_verify_email",
|
||||||
json!({
|
json!({
|
||||||
// `url.Url` would place the anchor `#` after the query parameters
|
// `url.Url` would place the anchor `#` after the query parameters
|
||||||
"url": format!("{}/#/finish-signup/?{}", CONFIG.domain(), query_string),
|
"url": format!("{}/#/finish-signup/?{query_string}", CONFIG.domain()),
|
||||||
"img_src": CONFIG._smtp_img_src(),
|
"img_src": CONFIG._smtp_img_src(),
|
||||||
"email": email,
|
"email": email,
|
||||||
}),
|
}),
|
||||||
|
@ -314,7 +314,7 @@ pub async fn send_invite(
|
||||||
"email/send_org_invite",
|
"email/send_org_invite",
|
||||||
json!({
|
json!({
|
||||||
// `url.Url` would place the anchor `#` after the query parameters
|
// `url.Url` would place the anchor `#` after the query parameters
|
||||||
"url": format!("{}/#/accept-organization/?{}", CONFIG.domain(), query_string),
|
"url": format!("{}/#/accept-organization/?{query_string}", CONFIG.domain()),
|
||||||
"img_src": CONFIG._smtp_img_src(),
|
"img_src": CONFIG._smtp_img_src(),
|
||||||
"org_name": org_name,
|
"org_name": org_name,
|
||||||
}),
|
}),
|
||||||
|
@ -615,13 +615,13 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult {
|
||||||
// Match some common errors and make them more user friendly
|
// Match some common errors and make them more user friendly
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if e.is_client() {
|
if e.is_client() {
|
||||||
debug!("Sendmail client error: {:?}", e);
|
debug!("Sendmail client error: {e:?}");
|
||||||
err!(format!("Sendmail client error: {e}"));
|
err!(format!("Sendmail client error: {e}"));
|
||||||
} else if e.is_response() {
|
} else if e.is_response() {
|
||||||
debug!("Sendmail response error: {:?}", e);
|
debug!("Sendmail response error: {e:?}");
|
||||||
err!(format!("Sendmail response error: {e}"));
|
err!(format!("Sendmail response error: {e}"));
|
||||||
} else {
|
} else {
|
||||||
debug!("Sendmail error: {:?}", e);
|
debug!("Sendmail error: {e:?}");
|
||||||
err!(format!("Sendmail error: {e}"));
|
err!(format!("Sendmail error: {e}"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -632,13 +632,13 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult {
|
||||||
// Match some common errors and make them more user friendly
|
// Match some common errors and make them more user friendly
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if e.is_client() {
|
if e.is_client() {
|
||||||
debug!("SMTP client error: {:#?}", e);
|
debug!("SMTP client error: {e:#?}");
|
||||||
err!(format!("SMTP client error: {e}"));
|
err!(format!("SMTP client error: {e}"));
|
||||||
} else if e.is_transient() {
|
} else if e.is_transient() {
|
||||||
debug!("SMTP 4xx error: {:#?}", e);
|
debug!("SMTP 4xx error: {e:#?}");
|
||||||
err!(format!("SMTP 4xx error: {e}"));
|
err!(format!("SMTP 4xx error: {e}"));
|
||||||
} else if e.is_permanent() {
|
} else if e.is_permanent() {
|
||||||
debug!("SMTP 5xx error: {:#?}", e);
|
debug!("SMTP 5xx error: {e:#?}");
|
||||||
let mut msg = e.to_string();
|
let mut msg = e.to_string();
|
||||||
// Add a special check for 535 to add a more descriptive message
|
// Add a special check for 535 to add a more descriptive message
|
||||||
if msg.contains("(535)") {
|
if msg.contains("(535)") {
|
||||||
|
@ -646,13 +646,13 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult {
|
||||||
}
|
}
|
||||||
err!(format!("SMTP 5xx error: {msg}"));
|
err!(format!("SMTP 5xx error: {msg}"));
|
||||||
} else if e.is_timeout() {
|
} else if e.is_timeout() {
|
||||||
debug!("SMTP timeout error: {:#?}", e);
|
debug!("SMTP timeout error: {e:#?}");
|
||||||
err!(format!("SMTP timeout error: {e}"));
|
err!(format!("SMTP timeout error: {e}"));
|
||||||
} else if e.is_tls() {
|
} else if e.is_tls() {
|
||||||
debug!("SMTP encryption error: {:#?}", e);
|
debug!("SMTP encryption error: {e:#?}");
|
||||||
err!(format!("SMTP encryption error: {e}"));
|
err!(format!("SMTP encryption error: {e}"));
|
||||||
} else {
|
} else {
|
||||||
debug!("SMTP error: {:#?}", e);
|
debug!("SMTP error: {e:#?}");
|
||||||
err!(format!("SMTP error: {e}"));
|
err!(format!("SMTP error: {e}"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
13
src/main.rs
13
src/main.rs
|
@ -430,10 +430,7 @@ fn init_logging() -> Result<log::LevelFilter, Error> {
|
||||||
}
|
}
|
||||||
None => error!(
|
None => error!(
|
||||||
target: "panic",
|
target: "panic",
|
||||||
"thread '{}' panicked at '{}'\n{:}",
|
"thread '{thread}' panicked at '{msg}'\n{backtrace:}"
|
||||||
thread,
|
|
||||||
msg,
|
|
||||||
backtrace
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
@ -453,7 +450,7 @@ fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
|
||||||
match syslog::unix(syslog_fmt) {
|
match syslog::unix(syslog_fmt) {
|
||||||
Ok(sl) => logger.chain(sl),
|
Ok(sl) => logger.chain(sl),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Unable to connect to syslog: {:?}", e);
|
error!("Unable to connect to syslog: {e:?}");
|
||||||
logger
|
logger
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -469,7 +466,7 @@ async fn check_data_folder() {
|
||||||
let data_folder = &CONFIG.data_folder();
|
let data_folder = &CONFIG.data_folder();
|
||||||
let path = Path::new(data_folder);
|
let path = Path::new(data_folder);
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
error!("Data folder '{}' doesn't exist.", data_folder);
|
error!("Data folder '{data_folder}' doesn't exist.");
|
||||||
if is_running_in_container() {
|
if is_running_in_container() {
|
||||||
error!("Verify that your data volume is mounted at the correct location.");
|
error!("Verify that your data volume is mounted at the correct location.");
|
||||||
} else {
|
} else {
|
||||||
|
@ -478,7 +475,7 @@ async fn check_data_folder() {
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
if !path.is_dir() {
|
if !path.is_dir() {
|
||||||
error!("Data folder '{}' is not a directory.", data_folder);
|
error!("Data folder '{data_folder}' is not a directory.");
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -552,7 +549,7 @@ async fn create_db_pool() -> db::DbPool {
|
||||||
match util::retry_db(db::DbPool::from_config, CONFIG.db_connection_retries()).await {
|
match util::retry_db(db::DbPool::from_config, CONFIG.db_connection_retries()).await {
|
||||||
Ok(p) => p,
|
Ok(p) => p,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Error creating database pool: {:?}", e);
|
error!("Error creating database pool: {e:?}");
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,7 +61,7 @@ app-organization-plans > form > bit-section:nth-child(2) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Hide Collection Management Form */
|
/* Hide Collection Management Form */
|
||||||
app-org-account form.ng-untouched:nth-child(6) {
|
app-org-account form.ng-untouched:nth-child(5) {
|
||||||
@extend %vw-hide;
|
@extend %vw-hide;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
16
src/util.rs
16
src/util.rs
|
@ -268,8 +268,8 @@ impl Fairing for BetterLogging {
|
||||||
} else {
|
} else {
|
||||||
"http"
|
"http"
|
||||||
};
|
};
|
||||||
let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port);
|
let addr = format!("{scheme}://{}:{}", &config.address, &config.port);
|
||||||
info!(target: "start", "Rocket has launched from {}", addr);
|
info!(target: "start", "Rocket has launched from {addr}");
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn on_request(&self, request: &mut Request<'_>, _data: &mut Data<'_>) {
|
async fn on_request(&self, request: &mut Request<'_>, _data: &mut Data<'_>) {
|
||||||
|
@ -283,8 +283,8 @@ impl Fairing for BetterLogging {
|
||||||
let uri_subpath = uri_path_str.strip_prefix(&CONFIG.domain_path()).unwrap_or(&uri_path_str);
|
let uri_subpath = uri_path_str.strip_prefix(&CONFIG.domain_path()).unwrap_or(&uri_path_str);
|
||||||
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
||||||
match uri.query() {
|
match uri.query() {
|
||||||
Some(q) => info!(target: "request", "{} {}?{}", method, uri_path_str, &q[..q.len().min(30)]),
|
Some(q) => info!(target: "request", "{method} {uri_path_str}?{}", &q[..q.len().min(30)]),
|
||||||
None => info!(target: "request", "{} {}", method, uri_path_str),
|
None => info!(target: "request", "{method} {uri_path_str}"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -299,9 +299,9 @@ impl Fairing for BetterLogging {
|
||||||
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
||||||
let status = response.status();
|
let status = response.status();
|
||||||
if let Some(ref route) = request.route() {
|
if let Some(ref route) = request.route() {
|
||||||
info!(target: "response", "{} => {}", route, status)
|
info!(target: "response", "{route} => {status}")
|
||||||
} else {
|
} else {
|
||||||
info!(target: "response", "{}", status)
|
info!(target: "response", "{status}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -326,7 +326,7 @@ pub fn get_display_size(size: i64) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
format!("{:.2} {}", size, UNITS[unit_counter])
|
format!("{size:.2} {}", UNITS[unit_counter])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_uuid() -> String {
|
pub fn get_uuid() -> String {
|
||||||
|
@ -699,7 +699,7 @@ where
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
warn!("Can't connect to database, retrying: {:?}", e);
|
warn!("Can't connect to database, retrying: {e:?}");
|
||||||
|
|
||||||
sleep(Duration::from_millis(1_000)).await;
|
sleep(Duration::from_millis(1_000)).await;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue