mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-08-05 10:39:07 +00:00
Release v1.34.1
-----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEubehCDcydr88BAb5/Ip9FMPNVDoFAmg02z4ACgkQ/Ip9FMPN VDoMvA/9GKOgrCDQUS1MaMtxvnpG4shy8lokPcvRNWc1X2fRl4j9jMmIgbmaTA7O OZVCbqEVGnyEw0cFGKMg9161G+AIBOfmbkdJ+VGOTjhTItB1IG8mW0b17HiEKoSM ixpF7M9YDZMd5BGNzALPIb2v2ig6i59oOOHIPUrGaRv6yQacOZ16ahqK9qohTxsv fLwyLVPDc/8RX9dwMbLOspGtCeoF58uTjefqQGn7mbc9AyeVftEKWlyDrYWXXXce zZx9xXjVZwmOEB7HxoMZ7onhxzUArDcruK7jIrIM7K6i7e+stjssc/2/X0ajCj4i 7od7/4entHGoz6PAoldoAZ3eO8Y1tL22kfwqAPDbr78q0w+NkVQ1K9RC5qMMLmqC GpHtFfZjiIYFu+suOX+XQVacSXfbDpknm1av5oXNLQVlE91AQZ0ln/s6oEMVZqZY aPZWgBZsjJ80mayyuT3CLsRXaqPSY9J60RKMW12wUyzLXW8dD2n6biZa7bRXo52O 87q8QODM8prJWlQjmn+qzlBSRPdaDbbxczDpjcK12h6izmQfpFv4YUWEdOcJeMAw m7MNLkI63AbF1GGHvZHFCKrWb9CFl4n14VWf8qD/PFW3AuytntLTZNbGwCuV1puU 55EMwOjqFqBjQcBM9NdUd/KUbWMJFdJQDfhyufbd7tmers3EGZU= =gIvx -----END PGP SIGNATURE----- Merge tag '1.34.1' into sso-support Release v1.34.1
This commit is contained in:
commit
a89249b817
59 changed files with 1694 additions and 1198 deletions
|
@ -348,20 +348,17 @@
|
|||
## Client Settings
|
||||
## Enable experimental feature flags for clients.
|
||||
## This is a comma-separated list of flags, e.g. "flag1,flag2,flag3".
|
||||
## Note that clients cache the /api/config endpoint for about 1 hour and it could take some time before they are enabled or disabled!
|
||||
##
|
||||
## The following flags are available:
|
||||
## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials.
|
||||
## - "autofill-v2": Use the new autofill implementation.
|
||||
## - "browser-fileless-import": Directly import credentials from other providers without a file.
|
||||
## - "extension-refresh": Temporarily enable the new extension design until general availability (should be used with the beta Chrome extension)
|
||||
## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor.
|
||||
## - "inline-menu-positioning-improvements": Enable the use of inline menu password generator and identity suggestions in the browser extension.
|
||||
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
|
||||
## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension.
|
||||
## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0)
|
||||
## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.2.0)
|
||||
## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.2.0)
|
||||
## - "mutual-tls": Enable the use of mutual TLS on Android (Client >= 2025.2.0)
|
||||
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
|
||||
## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0)
|
||||
## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
|
||||
## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
|
||||
## - "mutual-tls": Enable the use of mutual TLS on Android (Client >= 2025.2.0)
|
||||
# EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials
|
||||
|
||||
## Require new device emails. When a user logs in an email is required to be sent.
|
||||
|
|
750
Cargo.lock
generated
750
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
32
Cargo.toml
32
Cargo.toml
|
@ -6,7 +6,7 @@ name = "vaultwarden"
|
|||
version = "1.0.0"
|
||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.84.0"
|
||||
rust-version = "1.85.0"
|
||||
resolver = "2"
|
||||
|
||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||
|
@ -76,14 +76,14 @@ dashmap = "6.1.0"
|
|||
|
||||
# Async futures
|
||||
futures = "0.3.31"
|
||||
tokio = { version = "1.44.2", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
tokio = { version = "1.45.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "2.2.9", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel = { version = "2.2.10", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel_migrations = "2.2.0"
|
||||
diesel_logger = { version = "0.4.0", optional = true }
|
||||
|
||||
|
@ -91,26 +91,26 @@ derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref"
|
|||
diesel-derive-newtype = "2.1.2"
|
||||
|
||||
# Bundled/Static SQLite
|
||||
libsqlite3-sys = { version = "0.32.0", features = ["bundled"], optional = true }
|
||||
libsqlite3-sys = { version = "0.33.0", features = ["bundled"], optional = true }
|
||||
|
||||
# Crypto-related libraries
|
||||
rand = "0.9.0"
|
||||
rand = "0.9.1"
|
||||
ring = "0.17.14"
|
||||
subtle = "2.6.1"
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "1.16.0", features = ["v4"] }
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
|
||||
# Date and time libraries
|
||||
chrono = { version = "0.4.40", features = ["clock", "serde"], default-features = false }
|
||||
chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false }
|
||||
chrono-tz = "0.10.3"
|
||||
time = "0.3.41"
|
||||
|
||||
# Job scheduler
|
||||
job_scheduler_ng = "2.0.5"
|
||||
job_scheduler_ng = "2.2.0"
|
||||
|
||||
# Data encoding library Hex/Base32/Base64
|
||||
data-encoding = "2.8.0"
|
||||
data-encoding = "2.9.0"
|
||||
|
||||
# JWT library
|
||||
jsonwebtoken = "9.3.1"
|
||||
|
@ -119,7 +119,7 @@ jsonwebtoken = "9.3.1"
|
|||
totp-lite = "2.0.1"
|
||||
|
||||
# Yubico Library
|
||||
yubico = { version = "0.12.0", features = ["online-tokio"], default-features = false }
|
||||
yubico = { package = "yubico_ng", version = "0.13.0", features = ["online-tokio"], default-features = false }
|
||||
|
||||
# WebAuthn libraries
|
||||
webauthn-rs = "0.3.2"
|
||||
|
@ -128,7 +128,7 @@ webauthn-rs = "0.3.2"
|
|||
url = "2.5.4"
|
||||
|
||||
# Email libraries
|
||||
lettre = { version = "0.11.15", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||
lettre = { version = "0.11.16", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||
email_address = "0.2.9"
|
||||
|
||||
|
@ -137,7 +137,7 @@ handlebars = { version = "6.3.2", features = ["dir_source"] }
|
|||
|
||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||
reqwest = { version = "0.12.15", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
||||
hickory-resolver = "0.25.1"
|
||||
hickory-resolver = "0.25.2"
|
||||
|
||||
# Favicon extraction libraries
|
||||
html5gum = "0.7.0"
|
||||
|
@ -179,17 +179,13 @@ which = "7.0.3"
|
|||
argon2 = "0.5.3"
|
||||
|
||||
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
||||
rpassword = "7.3.1"
|
||||
rpassword = "7.4.0"
|
||||
|
||||
# Loading a dynamic CSS Stylesheet
|
||||
grass_compiler = { version = "0.13.4", default-features = false }
|
||||
|
||||
[patch.crates-io]
|
||||
# Patch yubico to remove duplicate crates of older versions
|
||||
yubico = { git = "https://github.com/BlackDex/yubico-rs", rev = "00df14811f58155c0f02e3ab10f1570ed3e115c6" }
|
||||
|
||||
# Strip debuginfo from the release builds
|
||||
# The symbols are the provide better panic traces
|
||||
# The debug symbols are to provide better panic traces
|
||||
# Also enable fat LTO and use 1 codegen unit for optimizations
|
||||
[profile.release]
|
||||
strip = "debuginfo"
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
---
|
||||
vault_version: "v2025.3.1"
|
||||
vault_image_digest: "sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd"
|
||||
vault_version: "v2025.5.0"
|
||||
vault_image_digest: "sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e"
|
||||
# Cross Compile Docker Helper Scripts v1.6.1
|
||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
|
||||
rust_version: 1.86.0 # Rust version to be used
|
||||
rust_version: 1.87.0 # Rust version to be used
|
||||
debian_version: bookworm # Debian release name to be used
|
||||
alpine_version: "3.21" # Alpine version to be used
|
||||
# For which platforms/architectures will we try to build images
|
||||
|
|
|
@ -19,23 +19,23 @@
|
|||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.3.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.3.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd
|
||||
# [docker.io/vaultwarden/web-vault:v2025.3.1]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e
|
||||
# [docker.io/vaultwarden/web-vault:v2025.5.0]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd AS vault
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e AS vault
|
||||
|
||||
########################## ALPINE BUILD IMAGES ##########################
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.86.0 AS build_amd64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.86.0 AS build_arm64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.86.0 AS build_armv7
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.86.0 AS build_armv6
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.87.0 AS build_amd64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.87.0 AS build_arm64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.87.0 AS build_armv7
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.87.0 AS build_armv6
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
|
|
|
@ -19,15 +19,15 @@
|
|||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.3.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.3.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd
|
||||
# [docker.io/vaultwarden/web-vault:v2025.3.1]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e
|
||||
# [docker.io/vaultwarden/web-vault:v2025.5.0]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:5b11739052c26dc3c2135b28dc5b072bc607f870a3e81fbbcc72e0cd1f124bcd AS vault
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:a0a377b810e66a4ebf1416f732d2be06f3262bf5a5238695af88d3ec6871cc0e AS vault
|
||||
|
||||
########################## Cross Compile Docker Helper Scripts ##########################
|
||||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
||||
|
@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bd
|
|||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.86.0-slim-bookworm AS build
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.87.0-slim-bookworm AS build
|
||||
COPY --from=xx / /
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
|
|
@ -10,7 +10,7 @@ proc-macro = true
|
|||
|
||||
[dependencies]
|
||||
quote = "1.0.40"
|
||||
syn = "2.0.100"
|
||||
syn = "2.0.101"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[toolchain]
|
||||
channel = "1.86.0"
|
||||
channel = "1.87.0"
|
||||
components = [ "rustfmt", "clippy" ]
|
||||
profile = "minimal"
|
||||
|
|
|
@ -103,7 +103,7 @@ const ACTING_ADMIN_USER: &str = "vaultwarden-admin-00000-000000000000";
|
|||
pub const FAKE_ADMIN_UUID: &str = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
fn admin_path() -> String {
|
||||
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
||||
format!("{}{ADMIN_PATH}", CONFIG.domain_path())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -207,7 +207,7 @@ fn post_admin_login(
|
|||
|
||||
cookies.add(cookie);
|
||||
if let Some(redirect) = redirect {
|
||||
Ok(Redirect::to(format!("{}{}", admin_path(), redirect)))
|
||||
Ok(Redirect::to(format!("{}{redirect}", admin_path())))
|
||||
} else {
|
||||
Err(AdminResponse::Ok(render_admin_page()))
|
||||
}
|
||||
|
@ -448,13 +448,13 @@ async fn delete_sso_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -
|
|||
async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &mut conn).await?;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await {
|
||||
match unregister_push_device(device.push_uuid).await {
|
||||
match unregister_push_device(&device.push_uuid).await {
|
||||
Ok(r) => r,
|
||||
Err(e) => error!("Unable to unregister devices from Bitwarden server: {}", e),
|
||||
Err(e) => error!("Unable to unregister devices from Bitwarden server: {e}"),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -474,7 +474,7 @@ async fn disable_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt:
|
|||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
@ -618,20 +618,14 @@ struct GitCommit {
|
|||
sha: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TimeApi {
|
||||
year: u16,
|
||||
month: u8,
|
||||
day: u8,
|
||||
hour: u8,
|
||||
minute: u8,
|
||||
seconds: u8,
|
||||
}
|
||||
|
||||
async fn get_json_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
||||
Ok(make_http_request(Method::GET, url)?.send().await?.error_for_status()?.json::<T>().await?)
|
||||
}
|
||||
|
||||
async fn get_text_api(url: &str) -> Result<String, Error> {
|
||||
Ok(make_http_request(Method::GET, url)?.send().await?.error_for_status()?.text().await?)
|
||||
}
|
||||
|
||||
async fn has_http_access() -> bool {
|
||||
let Ok(req) = make_http_request(Method::HEAD, "https://github.com/dani-garcia/vaultwarden") else {
|
||||
return false;
|
||||
|
@ -643,9 +637,10 @@ async fn has_http_access() -> bool {
|
|||
}
|
||||
|
||||
use cached::proc_macro::cached;
|
||||
/// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already.
|
||||
/// It will cache this function for 300 seconds (5 minutes) which should prevent the exhaustion of the rate limit.
|
||||
#[cached(time = 300, sync_writes = "default")]
|
||||
/// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already
|
||||
/// It will cache this function for 600 seconds (10 minutes) which should prevent the exhaustion of the rate limit
|
||||
/// Any cache will be lost if Vaultwarden is restarted
|
||||
#[cached(time = 600, sync_writes = "default")]
|
||||
async fn get_release_info(has_http_access: bool, running_within_container: bool) -> (String, String, String) {
|
||||
// If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway.
|
||||
if has_http_access {
|
||||
|
@ -663,7 +658,7 @@ async fn get_release_info(has_http_access: bool, running_within_container: bool)
|
|||
}
|
||||
_ => "-".to_string(),
|
||||
},
|
||||
// Do not fetch the web-vault version when running within a container.
|
||||
// Do not fetch the web-vault version when running within a container
|
||||
// The web-vault version is embedded within the container it self, and should not be updated manually
|
||||
if running_within_container {
|
||||
"-".to_string()
|
||||
|
@ -685,17 +680,18 @@ async fn get_release_info(has_http_access: bool, running_within_container: bool)
|
|||
|
||||
async fn get_ntp_time(has_http_access: bool) -> String {
|
||||
if has_http_access {
|
||||
if let Ok(ntp_time) = get_json_api::<TimeApi>("https://www.timeapi.io/api/Time/current/zone?timeZone=UTC").await
|
||||
{
|
||||
return format!(
|
||||
"{year}-{month:02}-{day:02} {hour:02}:{minute:02}:{seconds:02} UTC",
|
||||
year = ntp_time.year,
|
||||
month = ntp_time.month,
|
||||
day = ntp_time.day,
|
||||
hour = ntp_time.hour,
|
||||
minute = ntp_time.minute,
|
||||
seconds = ntp_time.seconds
|
||||
);
|
||||
if let Ok(cf_trace) = get_text_api("https://cloudflare.com/cdn-cgi/trace").await {
|
||||
for line in cf_trace.lines() {
|
||||
if let Some((key, value)) = line.split_once('=') {
|
||||
if key == "ts" {
|
||||
let ts = value.split_once('.').map_or(value, |(s, _)| s);
|
||||
if let Ok(dt) = chrono::DateTime::parse_from_str(ts, "%s") {
|
||||
return dt.format("%Y-%m-%d %H:%M:%S UTC").to_string();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
String::from("Unable to fetch NTP time.")
|
||||
|
@ -728,6 +724,16 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
|||
// Get current running versions
|
||||
let web_vault_version = get_web_vault_version();
|
||||
|
||||
// Check if the running version is newer than the latest stable released version
|
||||
let web_vault_pre_release = if let Ok(web_ver_match) = semver::VersionReq::parse(&format!(">{latest_web_build}")) {
|
||||
web_ver_match.matches(
|
||||
&semver::Version::parse(&web_vault_version).unwrap_or_else(|_| semver::Version::parse("2025.1.1").unwrap()),
|
||||
)
|
||||
} else {
|
||||
error!("Unable to parse latest_web_build: '{latest_web_build}'");
|
||||
false
|
||||
};
|
||||
|
||||
let diagnostics_json = json!({
|
||||
"dns_resolved": dns_resolved,
|
||||
"current_release": VERSION,
|
||||
|
@ -736,6 +742,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
|||
"web_vault_enabled": &CONFIG.web_vault_enabled(),
|
||||
"web_vault_version": web_vault_version,
|
||||
"latest_web_build": latest_web_build,
|
||||
"web_vault_pre_release": web_vault_pre_release,
|
||||
"running_within_container": running_within_container,
|
||||
"container_base_image": if running_within_container { container_base_image() } else { "Not applicable" },
|
||||
"has_http_access": has_http_access,
|
||||
|
@ -751,6 +758,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
|||
"overrides": &CONFIG.get_overrides().join(", "),
|
||||
"host_arch": env::consts::ARCH,
|
||||
"host_os": env::consts::OS,
|
||||
"tz_env": env::var("TZ").unwrap_or_default(),
|
||||
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
||||
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the server date/time check as late as possible to minimize the time difference
|
||||
"ntp_time": get_ntp_time(has_http_access).await, // Run the ntp check as late as possible to minimize the time difference
|
||||
|
|
|
@ -143,9 +143,8 @@ async fn is_email_2fa_required(member_id: Option<MembershipId>, conn: &mut DbCon
|
|||
if CONFIG.email_2fa_enforce_on_verified_invite() {
|
||||
return true;
|
||||
}
|
||||
if member_id.is_some() {
|
||||
return OrgPolicy::is_enabled_for_member(&member_id.unwrap(), OrgPolicyType::TwoFactorAuthentication, conn)
|
||||
.await;
|
||||
if let Some(member_id) = member_id {
|
||||
return OrgPolicy::is_enabled_for_member(&member_id, OrgPolicyType::TwoFactorAuthentication, conn).await;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
@ -313,11 +312,11 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|
|||
if CONFIG.mail_enabled() {
|
||||
if CONFIG.signups_verify() && !email_verified {
|
||||
if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid).await {
|
||||
error!("Error sending welcome email: {:#?}", e);
|
||||
error!("Error sending welcome email: {e:#?}");
|
||||
}
|
||||
user.last_verifying_at = Some(user.created_at);
|
||||
} else if let Err(e) = mail::send_welcome(&user.email).await {
|
||||
error!("Error sending welcome email: {:#?}", e);
|
||||
error!("Error sending welcome email: {e:#?}");
|
||||
}
|
||||
|
||||
if email_verified && is_email_2fa_required(data.organization_user_id, &mut conn).await {
|
||||
|
@ -420,7 +419,6 @@ async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
|||
#[serde(rename_all = "camelCase")]
|
||||
struct ProfileData {
|
||||
// culture: String, // Ignored, always use en-US
|
||||
// masterPasswordHint: Option<String>, // Ignored, has been moved to ChangePassData
|
||||
name: String,
|
||||
}
|
||||
|
||||
|
@ -546,7 +544,7 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: D
|
|||
// Prevent logging out the client where the user requested this endpoint from.
|
||||
// If you do logout the user it will causes issues at the client side.
|
||||
// Adding the device uuid will prevent this.
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone())).await;
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
@ -606,7 +604,7 @@ async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn,
|
|||
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone())).await;
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
@ -818,7 +816,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
|||
// Prevent logging out the client where the user requested this endpoint from.
|
||||
// If you do logout the user it will causes issues at the client side.
|
||||
// Adding the device uuid will prevent this.
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone())).await;
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
@ -834,7 +832,7 @@ async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, mut conn:
|
|||
user.reset_security_stamp();
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
@ -860,6 +858,11 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn
|
|||
}
|
||||
|
||||
if User::find_by_mail(&data.new_email, &mut conn).await.is_some() {
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Err(e) = mail::send_change_email_existing(&data.new_email, &user.email).await {
|
||||
error!("Error sending change-email-existing email: {e:#?}");
|
||||
}
|
||||
}
|
||||
err!("Email already in use");
|
||||
}
|
||||
|
||||
|
@ -871,10 +874,10 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn
|
|||
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Err(e) = mail::send_change_email(&data.new_email, &token).await {
|
||||
error!("Error sending change-email email: {:#?}", e);
|
||||
error!("Error sending change-email email: {e:#?}");
|
||||
}
|
||||
} else {
|
||||
debug!("Email change request for user ({}) to email ({}) with token ({})", user.uuid, data.new_email, token);
|
||||
debug!("Email change request for user ({}) to email ({}) with token ({token})", user.uuid, data.new_email);
|
||||
}
|
||||
|
||||
user.email_new = Some(data.new_email);
|
||||
|
@ -942,7 +945,7 @@ async fn post_email(data: Json<ChangeEmailData>, headers: Headers, mut conn: DbC
|
|||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
@ -956,7 +959,7 @@ async fn post_verify_email(headers: Headers) -> EmptyResult {
|
|||
}
|
||||
|
||||
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await {
|
||||
error!("Error sending verify_email email: {:#?}", e);
|
||||
error!("Error sending verify_email email: {e:#?}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -987,7 +990,7 @@ async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbC
|
|||
user.last_verifying_at = None;
|
||||
user.login_verify_count = 0;
|
||||
if let Err(e) = user.save(&mut conn).await {
|
||||
error!("Error saving email verification: {:#?}", e);
|
||||
error!("Error saving email verification: {e:#?}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -1006,7 +1009,7 @@ async fn post_delete_recover(data: Json<DeleteRecoverData>, mut conn: DbConn) ->
|
|||
if CONFIG.mail_enabled() {
|
||||
if let Some(user) = User::find_by_mail(&data.email, &mut conn).await {
|
||||
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await {
|
||||
error!("Error sending delete account email: {:#?}", e);
|
||||
error!("Error sending delete account email: {e:#?}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -1140,7 +1143,7 @@ pub async fn _prelogin(data: Json<PreloginData>, mut conn: DbConn) -> Json<Value
|
|||
}))
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Auth/Models/Request/Accounts/SecretVerificationRequestModel.cs
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SecretVerificationRequest {
|
||||
|
@ -1154,7 +1157,7 @@ pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &mut DbConn) ->
|
|||
user.set_password(pwd_hash, None, false, None);
|
||||
|
||||
if let Err(e) = user.save(conn).await {
|
||||
error!("Error updating user: {:#?}", e);
|
||||
error!("Error updating user: {e:#?}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -1300,19 +1303,14 @@ async fn put_device_token(
|
|||
err!(format!("Error: device {device_id} should be present before a token can be assigned"))
|
||||
};
|
||||
|
||||
// if the device already has been registered
|
||||
if device.is_registered() {
|
||||
// check if the new token is the same as the registered token
|
||||
if device.push_token.is_some() && device.push_token.unwrap() == token.clone() {
|
||||
debug!("Device {} is already registered and token is the same", device_id);
|
||||
// Check if the new token is the same as the registered token
|
||||
// Although upstream seems to always register a device on login, we do not.
|
||||
// Unless this causes issues, lets keep it this way, else we might need to also register on every login.
|
||||
if device.push_token.as_ref() == Some(&token) {
|
||||
debug!("Device {device_id} for user {} is already registered and token is identical", headers.user.uuid);
|
||||
return Ok(());
|
||||
} else {
|
||||
// Try to unregister already registered device
|
||||
unregister_push_device(device.push_uuid).await.ok();
|
||||
}
|
||||
// clear the push_uuid
|
||||
device.push_uuid = None;
|
||||
}
|
||||
|
||||
device.push_token = Some(token);
|
||||
if let Err(e) = device.save(&mut conn).await {
|
||||
err!(format!("An error occurred while trying to save the device push token: {e}"));
|
||||
|
@ -1326,16 +1324,19 @@ async fn put_device_token(
|
|||
#[put("/devices/identifier/<device_id>/clear-token")]
|
||||
async fn put_clear_device_token(device_id: DeviceId, mut conn: DbConn) -> EmptyResult {
|
||||
// This only clears push token
|
||||
// https://github.com/bitwarden/core/blob/master/src/Api/Controllers/DevicesController.cs#L109
|
||||
// https://github.com/bitwarden/core/blob/master/src/Core/Services/Implementations/DeviceService.cs#L37
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Controllers/DevicesController.cs#L215
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Services/Implementations/DeviceService.cs#L37
|
||||
// This is somehow not implemented in any app, added it in case it is required
|
||||
// 2025: Also, it looks like it only clears the first found device upstream, which is probably faulty.
|
||||
// This because currently multiple accounts could be on the same device/app and that would cause issues.
|
||||
// Vaultwarden removes the push-token for all devices, but this probably means we should also unregister all these devices.
|
||||
if !CONFIG.push_enabled() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(device) = Device::find_by_uuid(&device_id, &mut conn).await {
|
||||
Device::clear_push_token_by_uuid(&device_id, &mut conn).await?;
|
||||
unregister_push_device(device.push_uuid).await?;
|
||||
unregister_push_device(&device.push_uuid).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -1373,10 +1374,10 @@ async fn post_auth_request(
|
|||
};
|
||||
|
||||
// Validate device uuid and type
|
||||
match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await {
|
||||
Some(device) if device.atype == client_headers.device_type => {}
|
||||
let device = match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await {
|
||||
Some(device) if device.atype == client_headers.device_type => device,
|
||||
_ => err!("AuthRequest doesn't exist", "Device verification failed"),
|
||||
}
|
||||
};
|
||||
|
||||
let mut auth_request = AuthRequest::new(
|
||||
user.uuid.clone(),
|
||||
|
@ -1388,7 +1389,7 @@ async fn post_auth_request(
|
|||
);
|
||||
auth_request.save(&mut conn).await?;
|
||||
|
||||
nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await;
|
||||
nt.send_auth_request(&user.uuid, &auth_request.uuid, &device, &mut conn).await;
|
||||
|
||||
log_user_event(
|
||||
EventType::UserRequestedDeviceApproval as i32,
|
||||
|
@ -1463,6 +1464,10 @@ async fn put_auth_request(
|
|||
err!("AuthRequest doesn't exist", "Record not found or user uuid does not match")
|
||||
};
|
||||
|
||||
if headers.device.uuid != data.device_identifier {
|
||||
err!("AuthRequest doesn't exist", "Device verification failed")
|
||||
}
|
||||
|
||||
if auth_request.approved.is_some() {
|
||||
err!("An authentication request with the same device already exists")
|
||||
}
|
||||
|
@ -1479,7 +1484,7 @@ async fn put_auth_request(
|
|||
auth_request.save(&mut conn).await?;
|
||||
|
||||
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
|
||||
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await;
|
||||
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &headers.device, &mut conn).await;
|
||||
|
||||
log_user_event(
|
||||
EventType::OrganizationUserApprovedAuthRequest as i32,
|
||||
|
|
|
@ -381,7 +381,7 @@ pub async fn update_cipher_from_data(
|
|||
if let Some(dt) = data.last_known_revision_date {
|
||||
match NaiveDateTime::parse_from_str(&dt, "%+") {
|
||||
// ISO 8601 format
|
||||
Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
|
||||
Err(err) => warn!("Error parsing LastKnownRevisionDate '{dt}': {err}"),
|
||||
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => {
|
||||
err!("The client copy of this cipher is out of date. Resync the client and try again.")
|
||||
}
|
||||
|
@ -535,7 +535,7 @@ pub async fn update_cipher_from_data(
|
|||
ut,
|
||||
cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
shared_to_collections,
|
||||
conn,
|
||||
)
|
||||
|
@ -612,7 +612,7 @@ async fn post_ciphers_import(
|
|||
|
||||
let mut user = headers.user;
|
||||
user.update_revision(&mut conn).await?;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -808,7 +808,7 @@ async fn post_collections_update(
|
|||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
Some(Vec::from_iter(posted_collections)),
|
||||
&mut conn,
|
||||
)
|
||||
|
@ -885,7 +885,7 @@ async fn post_collections_admin(
|
|||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
Some(Vec::from_iter(posted_collections)),
|
||||
&mut conn,
|
||||
)
|
||||
|
@ -1105,7 +1105,7 @@ async fn post_attachment_v2(
|
|||
Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key));
|
||||
attachment.save(&mut conn).await.expect("Error saving attachment");
|
||||
|
||||
let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id);
|
||||
let url = format!("/ciphers/{}/attachment/{attachment_id}", cipher.uuid);
|
||||
let response_key = match data.admin_request {
|
||||
Some(b) if b => "cipherMiniResponse",
|
||||
_ => "cipherResponse",
|
||||
|
@ -1281,7 +1281,7 @@ async fn save_attachment(
|
|||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
None,
|
||||
&mut conn,
|
||||
)
|
||||
|
@ -1581,8 +1581,8 @@ async fn move_cipher_selected(
|
|||
nt.send_cipher_update(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&[user_id.clone()],
|
||||
&headers.device.uuid,
|
||||
std::slice::from_ref(&user_id),
|
||||
&headers.device,
|
||||
None,
|
||||
&mut conn,
|
||||
)
|
||||
|
@ -1629,7 +1629,7 @@ async fn delete_all(
|
|||
Some(member) => {
|
||||
if member.atype == MembershipType::Owner {
|
||||
Cipher::delete_all_by_organization(&org_data.org_id, &mut conn).await?;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
log_event(
|
||||
EventType::OrganizationPurgedVault as i32,
|
||||
|
@ -1662,7 +1662,7 @@ async fn delete_all(
|
|||
}
|
||||
|
||||
user.update_revision(&mut conn).await?;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1691,7 +1691,7 @@ async fn _delete_cipher_by_uuid(
|
|||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
|
@ -1702,7 +1702,7 @@ async fn _delete_cipher_by_uuid(
|
|||
UpdateType::SyncCipherDelete,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
|
@ -1767,7 +1767,7 @@ async fn _restore_cipher_by_uuid(
|
|||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
|
@ -1841,7 +1841,7 @@ async fn _delete_cipher_attachment_by_id(
|
|||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
|
|
|
@ -227,7 +227,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
|||
let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await {
|
||||
None => {
|
||||
if !CONFIG.invitations_allowed() {
|
||||
err!(format!("Grantee user does not exist: {}", &email))
|
||||
err!(format!("Grantee user does not exist: {email}"))
|
||||
}
|
||||
|
||||
if !CONFIG.is_email_domain_allowed(&email) {
|
||||
|
|
|
@ -29,7 +29,7 @@ struct EventRange {
|
|||
continuation_token: Option<String>,
|
||||
}
|
||||
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ecf69d9cabce732cf2c57976dd9afa5728578fb/src/Api/Controllers/EventsController.cs#LL84C35-L84C41
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Controllers/EventsController.cs#L87
|
||||
#[get("/organizations/<org_id>/events?<data..>")]
|
||||
async fn get_org_events(
|
||||
org_id: OrganizationId,
|
||||
|
@ -169,8 +169,8 @@ struct EventCollection {
|
|||
}
|
||||
|
||||
// Upstream:
|
||||
// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Events/Controllers/CollectController.cs
|
||||
// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Events/Controllers/CollectController.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/EventService.cs
|
||||
#[post("/collect", format = "application/json", data = "<data>")]
|
||||
async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
if !CONFIG.org_events_enabled() {
|
||||
|
|
|
@ -45,7 +45,7 @@ async fn post_folders(data: Json<FolderData>, headers: Headers, mut conn: DbConn
|
|||
let mut folder = Folder::new(headers.user.uuid, data.name);
|
||||
|
||||
folder.save(&mut conn).await?;
|
||||
nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device.uuid, &mut conn).await;
|
||||
nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device, &mut conn).await;
|
||||
|
||||
Ok(Json(folder.to_json()))
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ async fn put_folder(
|
|||
folder.name = data.name;
|
||||
|
||||
folder.save(&mut conn).await?;
|
||||
nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device.uuid, &mut conn).await;
|
||||
nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device, &mut conn).await;
|
||||
|
||||
Ok(Json(folder.to_json()))
|
||||
}
|
||||
|
@ -97,6 +97,6 @@ async fn delete_folder(folder_id: FolderId, headers: Headers, mut conn: DbConn,
|
|||
// Delete the actual folder entry
|
||||
folder.delete(&mut conn).await?;
|
||||
|
||||
nt.send_folder_update(UpdateType::SyncFolderDelete, &folder, &headers.device.uuid, &mut conn).await;
|
||||
nt.send_folder_update(UpdateType::SyncFolderDelete, &folder, &headers.device, &mut conn).await;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -125,7 +125,7 @@ async fn post_eq_domains(
|
|||
|
||||
user.save(&mut conn).await?;
|
||||
|
||||
nt.send_user_update(UpdateType::SyncSettings, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncSettings, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
Ok(Json(json!({})))
|
||||
}
|
||||
|
@ -200,12 +200,14 @@ fn get_api_webauthn(_headers: Headers) -> Json<Value> {
|
|||
#[get("/config")]
|
||||
fn config() -> Json<Value> {
|
||||
let domain = crate::CONFIG.domain();
|
||||
// Official available feature flags can be found here:
|
||||
// Server (v2025.5.0): https://github.com/bitwarden/server/blob/4a7db112a0952c6df8bacf36c317e9c4e58c3651/src/Core/Constants.cs#L102
|
||||
// Client (v2025.5.0): https://github.com/bitwarden/clients/blob/9df8a3cc50ed45f52513e62c23fcc8a4b745f078/libs/common/src/enums/feature-flag.enum.ts#L10
|
||||
// Android (v2025.4.0): https://github.com/bitwarden/android/blob/bee09de972c3870de0d54a0067996be473ec55c7/app/src/main/java/com/x8bit/bitwarden/data/platform/manager/model/FlagKey.kt#L27
|
||||
// iOS (v2025.4.0): https://github.com/bitwarden/ios/blob/956e05db67344c912e3a1b8cb2609165d67da1c9/BitwardenShared/Core/Platform/Models/Enum/FeatureFlag.swift#L7
|
||||
let mut feature_states =
|
||||
parse_experimental_client_feature_flags(&crate::CONFIG.experimental_client_feature_flags());
|
||||
// Force the new key rotation feature
|
||||
feature_states.insert("key-rotation-improvements".to_string(), true);
|
||||
feature_states.insert("flexible-collections-v-1".to_string(), false);
|
||||
|
||||
feature_states.insert("duo-redirect".to_string(), true);
|
||||
feature_states.insert("email-verification".to_string(), true);
|
||||
feature_states.insert("unauth-ui-refresh".to_string(), true);
|
||||
|
||||
|
@ -215,7 +217,7 @@ fn config() -> Json<Value> {
|
|||
// We should make sure that we keep this updated when we support the new server features
|
||||
// Version history:
|
||||
// - Individual cipher key encryption: 2024.2.0
|
||||
"version": "2025.1.0",
|
||||
"version": "2025.4.0",
|
||||
"gitHash": option_env!("GIT_REV"),
|
||||
"server": {
|
||||
"name": "Vaultwarden",
|
||||
|
@ -230,6 +232,12 @@ fn config() -> Json<Value> {
|
|||
"identity": format!("{domain}/identity"),
|
||||
"notifications": format!("{domain}/notifications"),
|
||||
"sso": "",
|
||||
"cloudRegion": null,
|
||||
},
|
||||
// Bitwarden uses this for the self-hosted servers to indicate the default push technology
|
||||
"push": {
|
||||
"pushTechnology": 0,
|
||||
"vapidPublicKey": null
|
||||
},
|
||||
"featureStates": feature_states,
|
||||
"object": "config",
|
||||
|
|
|
@ -10,10 +10,7 @@ use crate::{
|
|||
core::{accept_org_invite, log_event, two_factor, CipherSyncData, CipherSyncType},
|
||||
EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType,
|
||||
},
|
||||
auth::{
|
||||
decode_invite, AdminHeaders, ClientVersion, Headers, ManagerHeaders, ManagerHeadersLoose, OrgMemberHeaders,
|
||||
OwnerHeaders,
|
||||
},
|
||||
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OrgMemberHeaders, OwnerHeaders},
|
||||
db::{models::*, DbConn},
|
||||
mail,
|
||||
util::{convert_json_key_lcase_first, get_uuid, NumberOrString},
|
||||
|
@ -408,6 +405,21 @@ async fn get_org_collections_details(
|
|||
|| (CONFIG.org_groups_enabled()
|
||||
&& GroupUser::has_full_access_by_member(&org_id, &member.uuid, &mut conn).await);
|
||||
|
||||
// Get all admins, owners and managers who can manage/access all
|
||||
// Those are currently not listed in the col_users but need to be listed too.
|
||||
let manage_all_members: Vec<Value> = Membership::find_confirmed_and_manage_all_by_org(&org_id, &mut conn)
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|member| {
|
||||
json!({
|
||||
"id": member.uuid,
|
||||
"readOnly": false,
|
||||
"hidePasswords": false,
|
||||
"manage": true,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
for col in Collection::find_by_organization(&org_id, &mut conn).await {
|
||||
// check whether the current user has access to the given collection
|
||||
let assigned = has_full_access_to_org
|
||||
|
@ -416,7 +428,7 @@ async fn get_org_collections_details(
|
|||
&& GroupUser::has_access_to_collection_by_member(&col.uuid, &member.uuid, &mut conn).await);
|
||||
|
||||
// get the users assigned directly to the given collection
|
||||
let users: Vec<Value> = col_users
|
||||
let mut users: Vec<Value> = col_users
|
||||
.iter()
|
||||
.filter(|collection_member| collection_member.collection_uuid == col.uuid)
|
||||
.map(|collection_member| {
|
||||
|
@ -425,6 +437,7 @@ async fn get_org_collections_details(
|
|||
)
|
||||
})
|
||||
.collect();
|
||||
users.extend_from_slice(&manage_all_members);
|
||||
|
||||
// get the group details for the given collection
|
||||
let groups: Vec<Value> = if CONFIG.org_groups_enabled() {
|
||||
|
@ -715,6 +728,9 @@ async fn _delete_organization_collection(
|
|||
headers: &ManagerHeaders,
|
||||
conn: &mut DbConn,
|
||||
) -> EmptyResult {
|
||||
if org_id != &headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let Some(collection) = Collection::find_by_uuid_and_org(col_id, org_id, conn).await else {
|
||||
err!("Collection not found", "Collection does not exist or does not belong to this organization")
|
||||
};
|
||||
|
@ -927,7 +943,7 @@ struct OrgIdData {
|
|||
|
||||
#[get("/ciphers/organization-details?<data..>")]
|
||||
async fn get_org_details(data: OrgIdData, headers: OrgMemberHeaders, mut conn: DbConn) -> JsonResult {
|
||||
if data.organization_id != headers.org_id {
|
||||
if data.organization_id != headers.membership.org_uuid {
|
||||
err_code!("Resource not found.", "Organization id's do not match", rocket::http::Status::NotFound.code);
|
||||
}
|
||||
|
||||
|
@ -1239,6 +1255,9 @@ async fn reinvite_member(
|
|||
headers: AdminHeaders,
|
||||
mut conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
_reinvite_member(&org_id, &member_id, &headers.user.email, &mut conn).await
|
||||
}
|
||||
|
||||
|
@ -1424,6 +1443,9 @@ async fn _confirm_invite(
|
|||
conn: &mut DbConn,
|
||||
nt: &Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
if org_id != &headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
if key.is_empty() || member_id.is_empty() {
|
||||
err!("Key or UserId is not set, unable to process request");
|
||||
}
|
||||
|
@ -1487,7 +1509,7 @@ async fn _confirm_invite(
|
|||
let save_result = member_to_confirm.save(conn).await;
|
||||
|
||||
if let Some(user) = User::find_by_uuid(&member_to_confirm.user_uuid, conn).await {
|
||||
nt.send_user_update(UpdateType::SyncOrgKeys, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncOrgKeys, &user, &headers.device.push_uuid, conn).await;
|
||||
}
|
||||
|
||||
save_result
|
||||
|
@ -1746,6 +1768,9 @@ async fn _delete_member(
|
|||
conn: &mut DbConn,
|
||||
nt: &Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
if org_id != &headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let Some(member_to_delete) = Membership::find_by_uuid_and_org(member_id, org_id, conn).await else {
|
||||
err!("User to delete isn't member of the organization")
|
||||
};
|
||||
|
@ -1774,7 +1799,7 @@ async fn _delete_member(
|
|||
.await;
|
||||
|
||||
if let Some(user) = User::find_by_uuid(&member_to_delete.user_uuid, conn).await {
|
||||
nt.send_user_update(UpdateType::SyncOrgKeys, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncOrgKeys, &user, &headers.device.push_uuid, conn).await;
|
||||
}
|
||||
|
||||
member_to_delete.delete(conn).await
|
||||
|
@ -1840,16 +1865,20 @@ struct RelationsData {
|
|||
value: usize,
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/ImportCiphersController.cs#L62
|
||||
#[post("/ciphers/import-organization?<query..>", data = "<data>")]
|
||||
async fn post_org_import(
|
||||
query: OrgIdData,
|
||||
data: Json<ImportData>,
|
||||
headers: AdminHeaders,
|
||||
headers: OrgMemberHeaders,
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
let data: ImportData = data.into_inner();
|
||||
let org_id = query.organization_id;
|
||||
if org_id != headers.membership.org_uuid {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let data: ImportData = data.into_inner();
|
||||
|
||||
// Validate the import before continuing
|
||||
// Bitwarden does not process the import if there is one item invalid.
|
||||
|
@ -1862,8 +1891,20 @@ async fn post_org_import(
|
|||
let mut collections: Vec<CollectionId> = Vec::with_capacity(data.collections.len());
|
||||
for col in data.collections {
|
||||
let collection_uuid = if existing_collections.contains(&col.id) {
|
||||
col.id.unwrap()
|
||||
let col_id = col.id.unwrap();
|
||||
// When not an Owner or Admin, check if the member is allowed to access the collection.
|
||||
if headers.membership.atype < MembershipType::Admin
|
||||
&& !Collection::can_access_collection(&headers.membership, &col_id, &mut conn).await
|
||||
{
|
||||
err!(Compact, "The current user isn't allowed to manage this collection")
|
||||
}
|
||||
col_id
|
||||
} else {
|
||||
// We do not allow users or managers which can not manage all collections to create new collections
|
||||
// If there is any collection other than an existing import collection, abort the import.
|
||||
if headers.membership.atype <= MembershipType::Manager && !headers.membership.has_full_access() {
|
||||
err!(Compact, "The current user isn't allowed to create new collections")
|
||||
}
|
||||
let new_collection = Collection::new(org_id.clone(), col.name, col.external_id);
|
||||
new_collection.save(&mut conn).await?;
|
||||
new_collection.uuid
|
||||
|
@ -1886,7 +1927,17 @@ async fn post_org_import(
|
|||
// Always clear folder_id's via an organization import
|
||||
cipher_data.folder_id = None;
|
||||
let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone());
|
||||
update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await.ok();
|
||||
update_cipher_from_data(
|
||||
&mut cipher,
|
||||
cipher_data,
|
||||
&headers,
|
||||
Some(collections.clone()),
|
||||
&mut conn,
|
||||
&nt,
|
||||
UpdateType::None,
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
ciphers.push(cipher.uuid);
|
||||
}
|
||||
|
||||
|
@ -1917,12 +1968,6 @@ struct BulkCollectionsData {
|
|||
async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
let data: BulkCollectionsData = data.into_inner();
|
||||
|
||||
// This feature does not seem to be active on all the clients
|
||||
// To prevent future issues, add a check to block a call when this is set to true
|
||||
if data.remove_collections {
|
||||
err!("Bulk removing of collections is not yet implemented")
|
||||
}
|
||||
|
||||
// Get all the collection available to the user in one query
|
||||
// Also filter based upon the provided collections
|
||||
let user_collections: HashMap<CollectionId, Collection> =
|
||||
|
@ -1951,10 +1996,18 @@ async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers: Headers
|
|||
// Do not abort the operation just ignore it, it could be a cipher was just deleted for example
|
||||
if let Some(cipher) = Cipher::find_by_uuid_and_org(cipher_id, &data.organization_id, &mut conn).await {
|
||||
if cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
|
||||
// When selecting a specific collection from the left filter list, and use the bulk option, you can remove an item from that collection
|
||||
// In these cases the client will call this endpoint twice, once for adding the new collections and a second for deleting.
|
||||
if data.remove_collections {
|
||||
for collection in &data.collection_ids {
|
||||
CollectionCipher::delete(&cipher.uuid, collection, &mut conn).await?;
|
||||
}
|
||||
} else {
|
||||
for collection in &data.collection_ids {
|
||||
CollectionCipher::save(&cipher.uuid, collection, &mut conn).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -2448,6 +2501,9 @@ async fn _revoke_member(
|
|||
headers: &AdminHeaders,
|
||||
conn: &mut DbConn,
|
||||
) -> EmptyResult {
|
||||
if org_id != &headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
match Membership::find_by_uuid_and_org(member_id, org_id, conn).await {
|
||||
Some(mut member) if member.status > MembershipStatus::Revoked as i32 => {
|
||||
if member.user_uuid == headers.user.uuid {
|
||||
|
@ -2555,6 +2611,9 @@ async fn _restore_member(
|
|||
headers: &AdminHeaders,
|
||||
conn: &mut DbConn,
|
||||
) -> EmptyResult {
|
||||
if org_id != &headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
match Membership::find_by_uuid_and_org(member_id, org_id, conn).await {
|
||||
Some(mut member) if member.status < MembershipStatus::Accepted as i32 => {
|
||||
if member.user_uuid == headers.user.uuid {
|
||||
|
@ -2602,19 +2661,28 @@ async fn _restore_member(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[get("/organizations/<org_id>/groups")]
|
||||
async fn get_groups(org_id: OrganizationId, headers: ManagerHeadersLoose, mut conn: DbConn) -> JsonResult {
|
||||
async fn get_groups_data(
|
||||
details: bool,
|
||||
org_id: OrganizationId,
|
||||
headers: ManagerHeadersLoose,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
if org_id != headers.membership.org_uuid {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let groups: Vec<Value> = if CONFIG.org_groups_enabled() {
|
||||
// Group::find_by_organization(&org_id, &mut conn).await.iter().map(Group::to_json).collect::<Value>()
|
||||
let groups = Group::find_by_organization(&org_id, &mut conn).await;
|
||||
let mut groups_json = Vec::with_capacity(groups.len());
|
||||
|
||||
if details {
|
||||
for g in groups {
|
||||
groups_json.push(g.to_json_details(&mut conn).await)
|
||||
}
|
||||
} else {
|
||||
for g in groups {
|
||||
groups_json.push(g.to_json())
|
||||
}
|
||||
}
|
||||
groups_json
|
||||
} else {
|
||||
// The Bitwarden clients seem to call this API regardless of whether groups are enabled,
|
||||
|
@ -2629,9 +2697,14 @@ async fn get_groups(org_id: OrganizationId, headers: ManagerHeadersLoose, mut co
|
|||
})))
|
||||
}
|
||||
|
||||
#[get("/organizations/<org_id>/groups")]
|
||||
async fn get_groups(org_id: OrganizationId, headers: ManagerHeadersLoose, conn: DbConn) -> JsonResult {
|
||||
get_groups_data(false, org_id, headers, conn).await
|
||||
}
|
||||
|
||||
#[get("/organizations/<org_id>/groups/details", rank = 1)]
|
||||
async fn get_groups_details(org_id: OrganizationId, headers: ManagerHeadersLoose, conn: DbConn) -> JsonResult {
|
||||
get_groups(org_id, headers, conn).await
|
||||
get_groups_data(true, org_id, headers, conn).await
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
@ -2693,6 +2766,9 @@ async fn post_groups(
|
|||
data: Json<GroupRequest>,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
if !CONFIG.org_groups_enabled() {
|
||||
err!("Group support is disabled");
|
||||
}
|
||||
|
@ -2722,6 +2798,9 @@ async fn put_group(
|
|||
headers: AdminHeaders,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
if !CONFIG.org_groups_enabled() {
|
||||
err!("Group support is disabled");
|
||||
}
|
||||
|
@ -2786,7 +2865,8 @@ async fn add_update_group(
|
|||
"organizationId": group.organizations_uuid,
|
||||
"name": group.name,
|
||||
"accessAll": group.access_all,
|
||||
"externalId": group.external_id
|
||||
"externalId": group.external_id,
|
||||
"object": "group"
|
||||
})))
|
||||
}
|
||||
|
||||
|
@ -2837,6 +2917,9 @@ async fn _delete_group(
|
|||
headers: &AdminHeaders,
|
||||
conn: &mut DbConn,
|
||||
) -> EmptyResult {
|
||||
if org_id != &headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
if !CONFIG.org_groups_enabled() {
|
||||
err!("Group support is disabled");
|
||||
}
|
||||
|
@ -2866,6 +2949,9 @@ async fn bulk_delete_groups(
|
|||
headers: AdminHeaders,
|
||||
mut conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
if !CONFIG.org_groups_enabled() {
|
||||
err!("Group support is disabled");
|
||||
}
|
||||
|
@ -2929,6 +3015,9 @@ async fn put_group_members(
|
|||
data: Json<Vec<MembershipId>>,
|
||||
mut conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
if !CONFIG.org_groups_enabled() {
|
||||
err!("Group support is disabled");
|
||||
}
|
||||
|
@ -3113,7 +3202,7 @@ async fn get_organization_public_key(
|
|||
headers: OrgMemberHeaders,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
if org_id != headers.org_id {
|
||||
if org_id != headers.membership.org_uuid {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let Some(org) = Organization::find_by_uuid(&org_id, &mut conn).await else {
|
||||
|
@ -3127,7 +3216,7 @@ async fn get_organization_public_key(
|
|||
}
|
||||
|
||||
// Obsolete - Renamed to public-key (2023.8), left for backwards compatibility with older clients
|
||||
// https://github.com/bitwarden/server/blob/25dc0c9178e3e3584074bbef0d4be827b7c89415/src/Api/AdminConsole/Controllers/OrganizationsController.cs#L463-L468
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Controllers/OrganizationsController.cs#L487-L492
|
||||
#[get("/organizations/<org_id>/keys")]
|
||||
async fn get_organization_keys(org_id: OrganizationId, headers: OrgMemberHeaders, conn: DbConn) -> JsonResult {
|
||||
get_organization_public_key(org_id, headers, conn).await
|
||||
|
@ -3178,7 +3267,7 @@ async fn put_reset_password(
|
|||
user.set_password(reset_request.new_master_password_hash.as_str(), Some(reset_request.key), true, None);
|
||||
user.save(&mut conn).await?;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
log_event(
|
||||
EventType::OrganizationUserAdminResetPassword as i32,
|
||||
|
@ -3218,16 +3307,16 @@ async fn get_reset_password_details(
|
|||
|
||||
check_reset_password_applicable_and_permissions(&org_id, &member_id, &headers, &mut conn).await?;
|
||||
|
||||
// https://github.com/bitwarden/server/blob/3b50ccb9f804efaacdc46bed5b60e5b28eddefcf/src/Api/Models/Response/Organizations/OrganizationUserResponseModel.cs#L111
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Models/Response/Organizations/OrganizationUserResponseModel.cs#L190
|
||||
Ok(Json(json!({
|
||||
"object": "organizationUserResetPasswordDetails",
|
||||
"organizationUserId": member_id,
|
||||
"kdf": user.client_kdf_type,
|
||||
"kdfIterations": user.client_kdf_iter,
|
||||
"kdfMemory": user.client_kdf_memory,
|
||||
"kdfParallelism": user.client_kdf_parallelism,
|
||||
"resetPasswordKey": member.reset_password_key,
|
||||
"encryptedPrivateKey": org.private_key,
|
||||
|
||||
})))
|
||||
}
|
||||
|
||||
|
@ -3312,58 +3401,23 @@ async fn put_reset_password_enrollment(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// This is a new function active since the v2022.9.x clients.
|
||||
// It combines the previous two calls done before.
|
||||
// We call those two functions here and combine them ourselves.
|
||||
//
|
||||
// NOTE: It seems clients can't handle uppercase-first keys!!
|
||||
// We need to convert all keys so they have the first character to be a lowercase.
|
||||
// Else the export will be just an empty JSON file.
|
||||
// We currently only support exports by members of the Admin or Owner status.
|
||||
// Vaultwarden does not yet support exporting only managed collections!
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/OrganizationExportController.cs#L52
|
||||
#[get("/organizations/<org_id>/export")]
|
||||
async fn get_org_export(
|
||||
org_id: OrganizationId,
|
||||
headers: AdminHeaders,
|
||||
client_version: Option<ClientVersion>,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
async fn get_org_export(org_id: OrganizationId, headers: AdminHeaders, mut conn: DbConn) -> JsonResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
// Since version v2023.1.0 the format of the export is different.
|
||||
// Also, this endpoint was created since v2022.9.0.
|
||||
// Therefore, we will check for any version smaller then v2023.1.0 and return a different response.
|
||||
// If we can't determine the version, we will use the latest default v2023.1.0 and higher.
|
||||
// https://github.com/bitwarden/server/blob/9ca93381ce416454734418c3a9f99ab49747f1b6/src/Api/Controllers/OrganizationExportController.cs#L44
|
||||
let use_list_response_model = if let Some(client_version) = client_version {
|
||||
let ver_match = semver::VersionReq::parse("<2023.1.0").unwrap();
|
||||
ver_match.matches(&client_version.0)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// Also both main keys here need to be lowercase, else the export will fail.
|
||||
if use_list_response_model {
|
||||
// Backwards compatible pre v2023.1.0 response
|
||||
Ok(Json(json!({
|
||||
"collections": {
|
||||
"data": convert_json_key_lcase_first(_get_org_collections(&org_id, &mut conn).await),
|
||||
"object": "list",
|
||||
"continuationToken": null,
|
||||
},
|
||||
"ciphers": {
|
||||
"data": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await),
|
||||
"object": "list",
|
||||
"continuationToken": null,
|
||||
}
|
||||
})))
|
||||
} else {
|
||||
// v2023.1.0 and newer response
|
||||
Ok(Json(json!({
|
||||
"collections": convert_json_key_lcase_first(_get_org_collections(&org_id, &mut conn).await),
|
||||
"ciphers": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await),
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
async fn _api_key(
|
||||
org_id: &OrganizationId,
|
||||
|
@ -3372,6 +3426,9 @@ async fn _api_key(
|
|||
headers: AdminHeaders,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
if org_id != &headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let data: PasswordOrOtpData = data.into_inner();
|
||||
let user = headers.user;
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ struct OrgImportData {
|
|||
#[post("/public/organization/import", data = "<data>")]
|
||||
async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: DbConn) -> EmptyResult {
|
||||
// Most of the logic for this function can be found here
|
||||
// https://github.com/bitwarden/server/blob/fd892b2ff4547648a276734fb2b14a8abae2c6f5/src/Core/Services/Implementations/OrganizationService.cs#L1797
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/OrganizationService.cs#L1203
|
||||
|
||||
let org_id = token.0;
|
||||
let data = data.into_inner();
|
||||
|
|
|
@ -2,6 +2,7 @@ use std::path::Path;
|
|||
|
||||
use chrono::{DateTime, TimeDelta, Utc};
|
||||
use num_traits::ToPrimitive;
|
||||
use once_cell::sync::Lazy;
|
||||
use rocket::form::Form;
|
||||
use rocket::fs::NamedFile;
|
||||
use rocket::fs::TempFile;
|
||||
|
@ -17,6 +18,21 @@ use crate::{
|
|||
};
|
||||
|
||||
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
|
||||
static ANON_PUSH_DEVICE: Lazy<Device> = Lazy::new(|| {
|
||||
let dt = crate::util::parse_date("1970-01-01T00:00:00.000000Z");
|
||||
Device {
|
||||
uuid: String::from("00000000-0000-0000-0000-000000000000").into(),
|
||||
created_at: dt,
|
||||
updated_at: dt,
|
||||
user_uuid: String::from("00000000-0000-0000-0000-000000000000").into(),
|
||||
name: String::new(),
|
||||
atype: 14, // 14 == Unknown Browser
|
||||
push_uuid: Some(String::from("00000000-0000-0000-0000-000000000000").into()),
|
||||
push_token: None,
|
||||
refresh_token: String::new(),
|
||||
twofactor_remember: None,
|
||||
}
|
||||
});
|
||||
|
||||
// The max file size allowed by Bitwarden clients and add an extra 5% to avoid issues
|
||||
const SIZE_525_MB: i64 = 550_502_400;
|
||||
|
@ -182,7 +198,7 @@ async fn post_send(data: Json<SendData>, headers: Headers, mut conn: DbConn, nt:
|
|||
UpdateType::SyncSendCreate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
|
@ -204,6 +220,8 @@ struct UploadDataV2<'f> {
|
|||
// @deprecated Mar 25 2021: This method has been deprecated in favor of direct uploads (v2).
|
||||
// This method still exists to support older clients, probably need to remove it sometime.
|
||||
// Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L164-L167
|
||||
// 2025: This endpoint doesn't seem to exists anymore in the latest version
|
||||
// See: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/SendsController.cs
|
||||
#[post("/sends/file", format = "multipart/form-data", data = "<data>")]
|
||||
async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &mut conn).await?;
|
||||
|
@ -272,7 +290,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
|
|||
UpdateType::SyncSendCreate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
|
@ -280,7 +298,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
|
|||
Ok(Json(send.to_json()))
|
||||
}
|
||||
|
||||
// Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L190
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/SendsController.cs#L165
|
||||
#[post("/sends/file/v2", data = "<data>")]
|
||||
async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &mut conn).await?;
|
||||
|
@ -338,7 +356,7 @@ async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbC
|
|||
Ok(Json(json!({
|
||||
"fileUploadType": 0, // 0 == Direct | 1 == Azure
|
||||
"object": "send-fileUpload",
|
||||
"url": format!("/sends/{}/file/{}", send.uuid, file_id),
|
||||
"url": format!("/sends/{}/file/{file_id}", send.uuid),
|
||||
"sendResponse": send.to_json()
|
||||
})))
|
||||
}
|
||||
|
@ -351,7 +369,7 @@ pub struct SendFileData {
|
|||
fileName: String,
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/66f95d1c443490b653e5a15d32977e2f5a3f9e32/src/Api/Tools/Controllers/SendsController.cs#L250
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/SendsController.cs#L195
|
||||
#[post("/sends/<send_id>/file/<file_id>", format = "multipart/form-data", data = "<data>")]
|
||||
async fn post_send_file_v2_data(
|
||||
send_id: SendId,
|
||||
|
@ -424,7 +442,7 @@ async fn post_send_file_v2_data(
|
|||
UpdateType::SyncSendCreate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
|
@ -489,7 +507,7 @@ async fn post_access(
|
|||
UpdateType::SyncSendUpdate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&String::from("00000000-0000-0000-0000-000000000000").into(),
|
||||
&ANON_PUSH_DEVICE,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
|
@ -546,7 +564,7 @@ async fn post_access_file(
|
|||
UpdateType::SyncSendUpdate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&String::from("00000000-0000-0000-0000-000000000000").into(),
|
||||
&ANON_PUSH_DEVICE,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
|
@ -556,7 +574,7 @@ async fn post_access_file(
|
|||
Ok(Json(json!({
|
||||
"object": "send-fileDownload",
|
||||
"id": file_id,
|
||||
"url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token)
|
||||
"url": format!("{}/api/sends/{send_id}/{file_id}?t={token}", &host.host)
|
||||
})))
|
||||
}
|
||||
|
||||
|
@ -645,7 +663,7 @@ pub async fn update_send_from_data(
|
|||
|
||||
send.save(conn).await?;
|
||||
if ut != UpdateType::None {
|
||||
nt.send_send_update(ut, send, &send.update_users_revision(conn).await, &headers.device.uuid, conn).await;
|
||||
nt.send_send_update(ut, send, &send.update_users_revision(conn).await, &headers.device, conn).await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -661,7 +679,7 @@ async fn delete_send(send_id: SendId, headers: Headers, mut conn: DbConn, nt: No
|
|||
UpdateType::SyncSendDelete,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
|
@ -683,7 +701,7 @@ async fn put_remove_password(send_id: SendId, headers: Headers, mut conn: DbConn
|
|||
UpdateType::SyncSendUpdate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
|
|
|
@ -34,6 +34,10 @@ async fn generate_authenticator(data: Json<PasswordOrOtpData>, headers: Headers,
|
|||
_ => (false, crypto::encode_random_bytes::<20>(BASE32)),
|
||||
};
|
||||
|
||||
// Upstream seems to also return `userVerificationToken`, but doesn't seem to be used at all.
|
||||
// It should help prevent TOTP disclosure if someone keeps their vault unlocked.
|
||||
// Since it doesn't seem to be used, and also does not cause any issues, lets leave it out of the response.
|
||||
// See: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Auth/Controllers/TwoFactorController.cs#L94
|
||||
Ok(Json(json!({
|
||||
"enabled": enabled,
|
||||
"key": key,
|
||||
|
@ -148,7 +152,7 @@ pub async fn validate_totp_code(
|
|||
if generated == totp_code && time_step > twofactor.last_used {
|
||||
// If the step does not equals 0 the time is drifted either server or client side.
|
||||
if step != 0 {
|
||||
warn!("TOTP Time drift detected. The step offset is {}", step);
|
||||
warn!("TOTP Time drift detected. The step offset is {step}");
|
||||
}
|
||||
|
||||
// Save the last used time step so only totp time steps higher then this one are allowed.
|
||||
|
@ -157,7 +161,7 @@ pub async fn validate_totp_code(
|
|||
twofactor.save(conn).await?;
|
||||
return Ok(());
|
||||
} else if generated == totp_code && time_step <= twofactor.last_used {
|
||||
warn!("This TOTP or a TOTP code within {} steps back or forward has already been used!", steps);
|
||||
warn!("This TOTP or a TOTP code within {steps} steps back or forward has already been used!");
|
||||
err!(
|
||||
format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip),
|
||||
ErrorEvent {
|
||||
|
|
|
@ -118,6 +118,9 @@ async fn get_duo(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbCo
|
|||
} else {
|
||||
json!({
|
||||
"enabled": enabled,
|
||||
"host": null,
|
||||
"clientSecret": null,
|
||||
"clientId": null,
|
||||
"object": "twoFactorDuo"
|
||||
})
|
||||
};
|
||||
|
@ -202,7 +205,7 @@ async fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData)
|
|||
use std::str::FromStr;
|
||||
|
||||
// https://duo.com/docs/authapi#api-details
|
||||
let url = format!("https://{}{}", &data.host, path);
|
||||
let url = format!("https://{}{path}", &data.host);
|
||||
let date = Utc::now().to_rfc2822();
|
||||
let username = &data.ik;
|
||||
let fields = [&date, method, &data.host, path, params];
|
||||
|
@ -274,9 +277,9 @@ pub async fn generate_duo_signature(email: &str, conn: &mut DbConn) -> ApiResult
|
|||
|
||||
fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64) -> String {
|
||||
let val = format!("{email}|{ikey}|{expire}");
|
||||
let cookie = format!("{}|{}", prefix, BASE64.encode(val.as_bytes()));
|
||||
let cookie = format!("{prefix}|{}", BASE64.encode(val.as_bytes()));
|
||||
|
||||
format!("{}|{}", cookie, crypto::hmac_sign(key, &cookie))
|
||||
format!("{cookie}|{}", crypto::hmac_sign(key, &cookie))
|
||||
}
|
||||
|
||||
pub async fn validate_duo_login(email: &str, response: &str, conn: &mut DbConn) -> EmptyResult {
|
||||
|
|
|
@ -21,7 +21,7 @@ use url::Url;
|
|||
|
||||
// The location on this service that Duo should redirect users to. For us, this is a bridge
|
||||
// built in to the Bitwarden clients.
|
||||
// See: https://github.com/bitwarden/clients/blob/main/apps/web/src/connectors/duo-redirect.ts
|
||||
// See: https://github.com/bitwarden/clients/blob/5fb46df3415aefced0b52f2db86c873962255448/apps/web/src/connectors/duo-redirect.ts
|
||||
const DUO_REDIRECT_LOCATION: &str = "duo-redirect-connector.html";
|
||||
|
||||
// Number of seconds that a JWT we generate for Duo should be valid for.
|
||||
|
@ -182,7 +182,7 @@ impl DuoClient {
|
|||
HealthCheckResponse::HealthFail {
|
||||
message,
|
||||
message_detail,
|
||||
} => err!(format!("Duo health check FAIL response, msg: {}, detail: {}", message, message_detail)),
|
||||
} => err!(format!("Duo health check FAIL response, msg: {message}, detail: {message_detail}")),
|
||||
};
|
||||
|
||||
if health_stat != "OK" {
|
||||
|
@ -275,7 +275,7 @@ impl DuoClient {
|
|||
|
||||
let status_code = res.status();
|
||||
if status_code != StatusCode::OK {
|
||||
err!(format!("Failure response from Duo: {}", status_code))
|
||||
err!(format!("Failure response from Duo: {status_code}"))
|
||||
}
|
||||
|
||||
let response: IdTokenResponse = match res.json::<IdTokenResponse>().await {
|
||||
|
@ -478,7 +478,7 @@ pub async fn validate_duo_login(
|
|||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
let d: Digest = digest(&SHA512_256, format!("{}{}", ctx.nonce, device_identifier).as_bytes());
|
||||
let d: Digest = digest(&SHA512_256, format!("{}{device_identifier}", ctx.nonce).as_bytes());
|
||||
let hash: String = HEXLOWER.encode(d.as_ref());
|
||||
|
||||
match client.exchange_authz_code_for_result(code, email, hash.as_str()).await {
|
||||
|
|
|
@ -197,14 +197,20 @@ async fn email(data: Json<EmailData>, headers: Headers, mut conn: DbConn) -> Jso
|
|||
}
|
||||
|
||||
/// Validate the email code when used as TwoFactor token mechanism
|
||||
pub async fn validate_email_code_str(user_id: &UserId, token: &str, data: &str, conn: &mut DbConn) -> EmptyResult {
|
||||
pub async fn validate_email_code_str(
|
||||
user_id: &UserId,
|
||||
token: &str,
|
||||
data: &str,
|
||||
ip: &std::net::IpAddr,
|
||||
conn: &mut DbConn,
|
||||
) -> EmptyResult {
|
||||
let mut email_data = EmailTokenData::from_json(data)?;
|
||||
let mut twofactor = TwoFactor::find_by_user_and_type(user_id, TwoFactorType::Email as i32, conn)
|
||||
.await
|
||||
.map_res("Two factor not found")?;
|
||||
let Some(issued_token) = &email_data.last_token else {
|
||||
err!(
|
||||
"No token available",
|
||||
format!("No token available! IP: {ip}"),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn2fa
|
||||
}
|
||||
|
@ -220,7 +226,7 @@ pub async fn validate_email_code_str(user_id: &UserId, token: &str, data: &str,
|
|||
twofactor.save(conn).await?;
|
||||
|
||||
err!(
|
||||
"Token is invalid",
|
||||
format!("Token is invalid! IP: {ip}"),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn2fa
|
||||
}
|
||||
|
@ -323,7 +329,7 @@ pub fn obscure_email(email: &str) -> String {
|
|||
}
|
||||
};
|
||||
|
||||
format!("{}@{}", new_name, &domain)
|
||||
format!("{new_name}@{domain}")
|
||||
}
|
||||
|
||||
pub async fn find_and_activate_email_2fa(user_id: &UserId, conn: &mut DbConn) -> EmptyResult {
|
||||
|
|
|
@ -69,12 +69,12 @@ static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+
|
|||
#[get("/<domain>/icon.png")]
|
||||
fn icon_external(domain: &str) -> Option<Redirect> {
|
||||
if !is_valid_domain(domain) {
|
||||
warn!("Invalid domain: {}", domain);
|
||||
warn!("Invalid domain: {domain}");
|
||||
return None;
|
||||
}
|
||||
|
||||
if should_block_address(domain) {
|
||||
warn!("Blocked address: {}", domain);
|
||||
warn!("Blocked address: {domain}");
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -96,7 +96,7 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec<u8>)> {
|
|||
const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png");
|
||||
|
||||
if !is_valid_domain(domain) {
|
||||
warn!("Invalid domain: {}", domain);
|
||||
warn!("Invalid domain: {domain}");
|
||||
return Cached::ttl(
|
||||
(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
||||
CONFIG.icon_cache_negttl(),
|
||||
|
@ -105,7 +105,7 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec<u8>)> {
|
|||
}
|
||||
|
||||
if should_block_address(domain) {
|
||||
warn!("Blocked address: {}", domain);
|
||||
warn!("Blocked address: {domain}");
|
||||
return Cached::ttl(
|
||||
(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
||||
CONFIG.icon_cache_negttl(),
|
||||
|
@ -130,7 +130,7 @@ fn is_valid_domain(domain: &str) -> bool {
|
|||
|
||||
// If parsing the domain fails using Url, it will not work with reqwest.
|
||||
if let Err(parse_error) = url::Url::parse(format!("https://{domain}").as_str()) {
|
||||
debug!("Domain parse error: '{}' - {:?}", domain, parse_error);
|
||||
debug!("Domain parse error: '{domain}' - {parse_error:?}");
|
||||
return false;
|
||||
} else if domain.is_empty()
|
||||
|| domain.contains("..")
|
||||
|
@ -139,18 +139,17 @@ fn is_valid_domain(domain: &str) -> bool {
|
|||
|| domain.ends_with('-')
|
||||
{
|
||||
debug!(
|
||||
"Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'",
|
||||
domain
|
||||
"Domain validation error: '{domain}' is either empty, contains '..', starts with an '.', starts or ends with a '-'"
|
||||
);
|
||||
return false;
|
||||
} else if domain.len() > 255 {
|
||||
debug!("Domain validation error: '{}' exceeds 255 characters", domain);
|
||||
debug!("Domain validation error: '{domain}' exceeds 255 characters");
|
||||
return false;
|
||||
}
|
||||
|
||||
for c in domain.chars() {
|
||||
if !c.is_alphanumeric() && !ALLOWED_CHARS.contains(c) {
|
||||
debug!("Domain validation error: '{}' contains an invalid character '{}'", domain, c);
|
||||
debug!("Domain validation error: '{domain}' contains an invalid character '{c}'");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -159,7 +158,7 @@ fn is_valid_domain(domain: &str) -> bool {
|
|||
}
|
||||
|
||||
async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
||||
let path = format!("{}/{}.png", CONFIG.icon_cache_folder(), domain);
|
||||
let path = format!("{}/{domain}.png", CONFIG.icon_cache_folder());
|
||||
|
||||
// Check for expiration of negatively cached copy
|
||||
if icon_is_negcached(&path).await {
|
||||
|
@ -167,10 +166,7 @@ async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
|||
}
|
||||
|
||||
if let Some(icon) = get_cached_icon(&path).await {
|
||||
let icon_type = match get_icon_type(&icon) {
|
||||
Some(x) => x,
|
||||
_ => "x-icon",
|
||||
};
|
||||
let icon_type = get_icon_type(&icon).unwrap_or("x-icon");
|
||||
return Some((icon, icon_type.to_string()));
|
||||
}
|
||||
|
||||
|
@ -192,7 +188,7 @@ async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
|||
return None;
|
||||
}
|
||||
|
||||
warn!("Unable to download icon: {:?}", e);
|
||||
warn!("Unable to download icon: {e:?}");
|
||||
let miss_indicator = path + ".miss";
|
||||
save_icon(&miss_indicator, &[]).await;
|
||||
None
|
||||
|
@ -234,7 +230,7 @@ async fn icon_is_negcached(path: &str) -> bool {
|
|||
// No longer negatively cached, drop the marker
|
||||
Ok(true) => {
|
||||
if let Err(e) = remove_file(&miss_indicator).await {
|
||||
error!("Could not remove negative cache indicator for icon {:?}: {:?}", path, e);
|
||||
error!("Could not remove negative cache indicator for icon {path:?}: {e:?}");
|
||||
}
|
||||
false
|
||||
}
|
||||
|
@ -534,10 +530,10 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> {
|
|||
// Check if the icon type is allowed, else try an icon from the list.
|
||||
icon_type = get_icon_type(&body);
|
||||
if icon_type.is_none() {
|
||||
debug!("Icon from {} data:image uri, is not a valid image type", domain);
|
||||
debug!("Icon from {domain} data:image uri, is not a valid image type");
|
||||
continue;
|
||||
}
|
||||
info!("Extracted icon from data:image uri for {}", domain);
|
||||
info!("Extracted icon from data:image uri for {domain}");
|
||||
buffer = body.freeze();
|
||||
break;
|
||||
}
|
||||
|
@ -577,7 +573,7 @@ async fn save_icon(path: &str, icon: &[u8]) {
|
|||
create_dir_all(&CONFIG.icon_cache_folder()).await.expect("Error creating icon cache folder");
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Unable to save icon: {:?}", e);
|
||||
warn!("Unable to save icon: {e:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ use crate::{
|
|||
ApiResult, EmptyResult, JsonResult,
|
||||
},
|
||||
auth,
|
||||
auth::{AuthMethod, ClientHeaders, ClientIp},
|
||||
auth::{generate_organization_api_key_login_claims, AuthMethod, ClientHeaders, ClientIp, ClientVersion},
|
||||
db::{models::*, DbConn},
|
||||
error::MapResult,
|
||||
mail, sso,
|
||||
|
@ -45,7 +45,12 @@ pub fn routes() -> Vec<Route> {
|
|||
}
|
||||
|
||||
#[post("/connect/token", data = "<data>")]
|
||||
async fn login(data: Form<ConnectData>, client_header: ClientHeaders, mut conn: DbConn) -> JsonResult {
|
||||
async fn login(
|
||||
data: Form<ConnectData>,
|
||||
client_header: ClientHeaders,
|
||||
client_version: Option<ClientVersion>,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
let data: ConnectData = data.into_inner();
|
||||
|
||||
let mut user_id: Option<UserId> = None;
|
||||
|
@ -66,7 +71,7 @@ async fn login(data: Form<ConnectData>, client_header: ClientHeaders, mut conn:
|
|||
_check_is_some(&data.device_name, "device_name cannot be blank")?;
|
||||
_check_is_some(&data.device_type, "device_type cannot be blank")?;
|
||||
|
||||
_password_login(data, &mut user_id, &mut conn, &client_header.ip).await
|
||||
_password_login(data, &mut user_id, &mut conn, &client_header.ip, &client_version).await
|
||||
}
|
||||
"client_credentials" => {
|
||||
_check_is_some(&data.client_id, "client_id cannot be blank")?;
|
||||
|
@ -87,7 +92,7 @@ async fn login(data: Form<ConnectData>, client_header: ClientHeaders, mut conn:
|
|||
_check_is_some(&data.device_name, "device_name cannot be blank")?;
|
||||
_check_is_some(&data.device_type, "device_type cannot be blank")?;
|
||||
|
||||
_sso_login(data, &mut user_id, &mut conn, &client_header.ip).await
|
||||
_sso_login(data, &mut user_id, &mut conn, &client_header.ip, &client_version).await
|
||||
}
|
||||
"authorization_code" => err!("SSO sign-in is not available"),
|
||||
t => err!("Invalid type", t),
|
||||
|
@ -137,7 +142,7 @@ async fn _refresh_login(data: ConnectData, conn: &mut DbConn, ip: &ClientIp) ->
|
|||
// See: https://github.com/dani-garcia/vaultwarden/issues/4156
|
||||
// ---
|
||||
// let members = Membership::find_confirmed_by_user(&user.uuid, conn).await;
|
||||
match auth::refresh_tokens(ip, &refresh_token, conn).await {
|
||||
match auth::refresh_tokens(ip, &refresh_token, data.client_id, conn).await {
|
||||
Err(err) => {
|
||||
err_code!(format!("Unable to refresh login credentials: {}", err.message()), Status::Unauthorized.code)
|
||||
}
|
||||
|
@ -159,7 +164,13 @@ async fn _refresh_login(data: ConnectData, conn: &mut DbConn, ip: &ClientIp) ->
|
|||
}
|
||||
|
||||
// After exchanging the code we need to check first if 2FA is needed before continuing
|
||||
async fn _sso_login(data: ConnectData, user_id: &mut Option<UserId>, conn: &mut DbConn, ip: &ClientIp) -> JsonResult {
|
||||
async fn _sso_login(
|
||||
data: ConnectData,
|
||||
user_id: &mut Option<UserId>,
|
||||
conn: &mut DbConn,
|
||||
ip: &ClientIp,
|
||||
client_version: &Option<ClientVersion>,
|
||||
) -> JsonResult {
|
||||
AuthMethod::Sso.check_scope(data.scope.as_ref())?;
|
||||
|
||||
// Ratelimit the login
|
||||
|
@ -257,7 +268,7 @@ async fn _sso_login(data: ConnectData, user_id: &mut Option<UserId>, conn: &mut
|
|||
}
|
||||
Some((mut user, sso_user)) => {
|
||||
let (mut device, new_device) = get_device(&data, conn, &user).await?;
|
||||
let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, conn).await?;
|
||||
let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, client_version, conn).await?;
|
||||
|
||||
if user.private_key.is_none() {
|
||||
// User was invited a stub was created
|
||||
|
@ -297,8 +308,9 @@ async fn _sso_login(data: ConnectData, user_id: &mut Option<UserId>, conn: &mut
|
|||
let auth_tokens = sso::create_auth_tokens(
|
||||
&device,
|
||||
&user,
|
||||
data.client_id,
|
||||
auth_user.refresh_token,
|
||||
&auth_user.access_token,
|
||||
auth_user.access_token,
|
||||
auth_user.expires_in,
|
||||
)?;
|
||||
|
||||
|
@ -310,6 +322,7 @@ async fn _password_login(
|
|||
user_id: &mut Option<UserId>,
|
||||
conn: &mut DbConn,
|
||||
ip: &ClientIp,
|
||||
client_version: &Option<ClientVersion>,
|
||||
) -> JsonResult {
|
||||
// Validate scope
|
||||
AuthMethod::Password.check_scope(data.scope.as_ref())?;
|
||||
|
@ -320,7 +333,7 @@ async fn _password_login(
|
|||
// Get the user
|
||||
let username = data.username.as_ref().unwrap().trim();
|
||||
let Some(mut user) = User::find_by_mail(username, conn).await else {
|
||||
err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username))
|
||||
err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {username}.", ip.ip))
|
||||
};
|
||||
|
||||
// Set the user_id here to be passed back used for event logging.
|
||||
|
@ -330,7 +343,7 @@ async fn _password_login(
|
|||
if !user.enabled {
|
||||
err!(
|
||||
"This user has been disabled",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||
format!("IP: {}. Username: {username}.", ip.ip),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn
|
||||
}
|
||||
|
@ -344,7 +357,7 @@ async fn _password_login(
|
|||
let Some(auth_request) = AuthRequest::find_by_uuid_and_user(auth_request_id, &user.uuid, conn).await else {
|
||||
err!(
|
||||
"Auth request not found. Try again.",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||
format!("IP: {}. Username: {username}.", ip.ip),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn,
|
||||
}
|
||||
|
@ -362,7 +375,7 @@ async fn _password_login(
|
|||
{
|
||||
err!(
|
||||
"Username or access code is incorrect. Try again",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||
format!("IP: {}. Username: {username}.", ip.ip),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn,
|
||||
}
|
||||
|
@ -371,7 +384,7 @@ async fn _password_login(
|
|||
} else if !user.check_valid_password(password) {
|
||||
err!(
|
||||
"Username or password is incorrect. Try again",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||
format!("IP: {}. Username: {username}.", ip.ip),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn,
|
||||
}
|
||||
|
@ -398,11 +411,11 @@ async fn _password_login(
|
|||
user.login_verify_count += 1;
|
||||
|
||||
if let Err(e) = user.save(conn).await {
|
||||
error!("Error updating user: {:#?}", e);
|
||||
error!("Error updating user: {e:#?}");
|
||||
}
|
||||
|
||||
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await {
|
||||
error!("Error auto-sending email verification email: {:#?}", e);
|
||||
error!("Error auto-sending email verification email: {e:#?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -410,7 +423,7 @@ async fn _password_login(
|
|||
// We still want the login to fail until they actually verified the email address
|
||||
err!(
|
||||
"Please verify your email before trying again.",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||
format!("IP: {}. Username: {username}.", ip.ip),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn
|
||||
}
|
||||
|
@ -419,9 +432,9 @@ async fn _password_login(
|
|||
|
||||
let (mut device, new_device) = get_device(&data, conn, &user).await?;
|
||||
|
||||
let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, conn).await?;
|
||||
let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, client_version, conn).await?;
|
||||
|
||||
let auth_tokens = auth::AuthTokens::new(&device, &user, AuthMethod::Password);
|
||||
let auth_tokens = auth::AuthTokens::new(&device, &user, AuthMethod::Password, data.client_id);
|
||||
|
||||
authenticated_response(&user, &mut device, new_device, auth_tokens, twofactor_token, &now, conn, ip).await
|
||||
}
|
||||
|
@ -439,7 +452,7 @@ async fn authenticated_response(
|
|||
) -> JsonResult {
|
||||
if CONFIG.mail_enabled() && new_device {
|
||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), now, device).await {
|
||||
error!("Error sending new device email: {:#?}", e);
|
||||
error!("Error sending new device email: {e:#?}");
|
||||
|
||||
if CONFIG.require_device_email() {
|
||||
err!(
|
||||
|
@ -490,7 +503,7 @@ async fn authenticated_response(
|
|||
result["TwoFactorToken"] = Value::String(token);
|
||||
}
|
||||
|
||||
info!("User {} logged in successfully. IP: {}", user.email, ip.ip);
|
||||
info!("User {} logged in successfully. IP: {}", &user.name, ip.ip);
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
|
@ -558,7 +571,7 @@ async fn _user_api_key_login(
|
|||
if CONFIG.mail_enabled() && new_device {
|
||||
let now = Utc::now().naive_utc();
|
||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await {
|
||||
error!("Error sending new device email: {:#?}", e);
|
||||
error!("Error sending new device email: {e:#?}");
|
||||
|
||||
if CONFIG.require_device_email() {
|
||||
err!(
|
||||
|
@ -577,7 +590,7 @@ async fn _user_api_key_login(
|
|||
// See: https://github.com/dani-garcia/vaultwarden/issues/4156
|
||||
// ---
|
||||
// let orgs = Membership::find_confirmed_by_user(&user.uuid, conn).await;
|
||||
let access_claims = auth::LoginJwtClaims::default(&device, &user, &AuthMethod::UserApiKey);
|
||||
let access_claims = auth::LoginJwtClaims::default(&device, &user, &AuthMethod::UserApiKey, data.client_id);
|
||||
|
||||
// Save to update `device.updated_at` to track usage
|
||||
device.save(conn).await?;
|
||||
|
@ -621,7 +634,7 @@ async fn _organization_api_key_login(data: ConnectData, conn: &mut DbConn, ip: &
|
|||
err!("Incorrect client_secret", format!("IP: {}. Organization: {}.", ip.ip, org_api_key.org_uuid))
|
||||
}
|
||||
|
||||
let claim = auth::generate_organization_api_key_login_claims(org_api_key.uuid, org_api_key.org_uuid);
|
||||
let claim = generate_organization_api_key_login_claims(org_api_key.uuid, org_api_key.org_uuid);
|
||||
let access_token = auth::encode_jwt(&claim);
|
||||
|
||||
Ok(Json(json!({
|
||||
|
@ -659,6 +672,7 @@ async fn twofactor_auth(
|
|||
data: &ConnectData,
|
||||
device: &mut Device,
|
||||
ip: &ClientIp,
|
||||
client_version: &Option<ClientVersion>,
|
||||
conn: &mut DbConn,
|
||||
) -> ApiResult<Option<String>> {
|
||||
let twofactors = TwoFactor::find_by_user(&user.uuid, conn).await;
|
||||
|
@ -676,7 +690,12 @@ async fn twofactor_auth(
|
|||
|
||||
let twofactor_code = match data.two_factor_token {
|
||||
Some(ref code) => code,
|
||||
None => err_json!(_json_err_twofactor(&twofactor_ids, &user.uuid, data, conn).await?, "2FA token not provided"),
|
||||
None => {
|
||||
err_json!(
|
||||
_json_err_twofactor(&twofactor_ids, &user.uuid, data, client_version, conn).await?,
|
||||
"2FA token not provided"
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled);
|
||||
|
@ -712,7 +731,7 @@ async fn twofactor_auth(
|
|||
}
|
||||
}
|
||||
Some(TwoFactorType::Email) => {
|
||||
email::validate_email_code_str(&user.uuid, twofactor_code, &selected_data?, conn).await?
|
||||
email::validate_email_code_str(&user.uuid, twofactor_code, &selected_data?, &ip.ip, conn).await?
|
||||
}
|
||||
|
||||
Some(TwoFactorType::Remember) => {
|
||||
|
@ -722,7 +741,7 @@ async fn twofactor_auth(
|
|||
}
|
||||
_ => {
|
||||
err_json!(
|
||||
_json_err_twofactor(&twofactor_ids, &user.uuid, data, conn).await?,
|
||||
_json_err_twofactor(&twofactor_ids, &user.uuid, data, client_version, conn).await?,
|
||||
"2FA Remember token not provided"
|
||||
)
|
||||
}
|
||||
|
@ -755,6 +774,7 @@ async fn _json_err_twofactor(
|
|||
providers: &[i32],
|
||||
user_id: &UserId,
|
||||
data: &ConnectData,
|
||||
client_version: &Option<ClientVersion>,
|
||||
conn: &mut DbConn,
|
||||
) -> ApiResult<Value> {
|
||||
let mut result = json!({
|
||||
|
@ -827,8 +847,16 @@ async fn _json_err_twofactor(
|
|||
err!("No twofactor email registered")
|
||||
};
|
||||
|
||||
// Send email immediately if email is the only 2FA option
|
||||
if providers.len() == 1 {
|
||||
// Starting with version 2025.5.0 the client will call `/api/two-factor/send-email-login`.
|
||||
let disabled_send = if let Some(cv) = client_version {
|
||||
let ver_match = semver::VersionReq::parse(">=2025.5.0").unwrap();
|
||||
ver_match.matches(&cv.0)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// Send email immediately if email is the only 2FA option.
|
||||
if providers.len() == 1 && !disabled_send {
|
||||
email::send_token(user_id, conn).await?
|
||||
}
|
||||
|
||||
|
|
|
@ -85,7 +85,7 @@ pub struct MasterPasswordPolicy {
|
|||
enforce_on_login: bool,
|
||||
}
|
||||
|
||||
// Fetch all valid Master Password Policies and merge them into one with all true's and larges numbers as one policy
|
||||
// Fetch all valid Master Password Policies and merge them into one with all trues and largest numbers as one policy
|
||||
async fn master_password_policy(user: &User, conn: &DbConn) -> Value {
|
||||
let master_password_policies: Vec<MasterPasswordPolicy> =
|
||||
OrgPolicy::find_accepted_and_confirmed_by_user_and_active_policy(
|
||||
|
@ -116,6 +116,6 @@ async fn master_password_policy(user: &User, conn: &DbConn) -> Value {
|
|||
json!({})
|
||||
};
|
||||
|
||||
mpp_json["object"] = json!("masterPasswordPolicy");
|
||||
mpp_json["Object"] = json!("masterPasswordPolicy");
|
||||
mpp_json
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use rocket_ws::{Message, WebSocket};
|
|||
use crate::{
|
||||
auth::{ClientIp, WsAccessTokenHeader},
|
||||
db::{
|
||||
models::{AuthRequestId, Cipher, CollectionId, DeviceId, Folder, Send as DbSend, User, UserId},
|
||||
models::{AuthRequestId, Cipher, CollectionId, Device, DeviceId, Folder, PushId, Send as DbSend, User, UserId},
|
||||
DbConn,
|
||||
},
|
||||
Error, CONFIG,
|
||||
|
@ -339,7 +339,7 @@ impl WebSocketUsers {
|
|||
}
|
||||
|
||||
// NOTE: The last modified date needs to be updated before calling these methods
|
||||
pub async fn send_user_update(&self, ut: UpdateType, user: &User) {
|
||||
pub async fn send_user_update(&self, ut: UpdateType, user: &User, push_uuid: &Option<PushId>, conn: &mut DbConn) {
|
||||
// Skip any processing if both WebSockets and Push are not active
|
||||
if *NOTIFICATIONS_DISABLED {
|
||||
return;
|
||||
|
@ -355,11 +355,11 @@ impl WebSocketUsers {
|
|||
}
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
push_user_update(ut, user);
|
||||
push_user_update(ut, user, push_uuid, conn).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_logout(&self, user: &User, acting_device_id: Option<DeviceId>) {
|
||||
pub async fn send_logout(&self, user: &User, acting_device_id: Option<DeviceId>, conn: &mut DbConn) {
|
||||
// Skip any processing if both WebSockets and Push are not active
|
||||
if *NOTIFICATIONS_DISABLED {
|
||||
return;
|
||||
|
@ -375,17 +375,11 @@ impl WebSocketUsers {
|
|||
}
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
push_logout(user, acting_device_id.clone());
|
||||
push_logout(user, acting_device_id.clone(), conn).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_folder_update(
|
||||
&self,
|
||||
ut: UpdateType,
|
||||
folder: &Folder,
|
||||
acting_device_id: &DeviceId,
|
||||
conn: &mut DbConn,
|
||||
) {
|
||||
pub async fn send_folder_update(&self, ut: UpdateType, folder: &Folder, device: &Device, conn: &mut DbConn) {
|
||||
// Skip any processing if both WebSockets and Push are not active
|
||||
if *NOTIFICATIONS_DISABLED {
|
||||
return;
|
||||
|
@ -397,7 +391,7 @@ impl WebSocketUsers {
|
|||
("RevisionDate".into(), serialize_date(folder.updated_at)),
|
||||
],
|
||||
ut,
|
||||
Some(acting_device_id.clone()),
|
||||
Some(device.uuid.clone()),
|
||||
);
|
||||
|
||||
if CONFIG.enable_websocket() {
|
||||
|
@ -405,7 +399,7 @@ impl WebSocketUsers {
|
|||
}
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
push_folder_update(ut, folder, acting_device_id, conn).await;
|
||||
push_folder_update(ut, folder, device, conn).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -414,7 +408,7 @@ impl WebSocketUsers {
|
|||
ut: UpdateType,
|
||||
cipher: &Cipher,
|
||||
user_ids: &[UserId],
|
||||
acting_device_id: &DeviceId,
|
||||
device: &Device,
|
||||
collection_uuids: Option<Vec<CollectionId>>,
|
||||
conn: &mut DbConn,
|
||||
) {
|
||||
|
@ -444,7 +438,7 @@ impl WebSocketUsers {
|
|||
("RevisionDate".into(), revision_date),
|
||||
],
|
||||
ut,
|
||||
Some(acting_device_id.clone()),
|
||||
Some(device.uuid.clone()), // Acting device id (unique device/app uuid)
|
||||
);
|
||||
|
||||
if CONFIG.enable_websocket() {
|
||||
|
@ -454,7 +448,7 @@ impl WebSocketUsers {
|
|||
}
|
||||
|
||||
if CONFIG.push_enabled() && user_ids.len() == 1 {
|
||||
push_cipher_update(ut, cipher, acting_device_id, conn).await;
|
||||
push_cipher_update(ut, cipher, device, conn).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -463,7 +457,7 @@ impl WebSocketUsers {
|
|||
ut: UpdateType,
|
||||
send: &DbSend,
|
||||
user_ids: &[UserId],
|
||||
acting_device_id: &DeviceId,
|
||||
device: &Device,
|
||||
conn: &mut DbConn,
|
||||
) {
|
||||
// Skip any processing if both WebSockets and Push are not active
|
||||
|
@ -488,7 +482,7 @@ impl WebSocketUsers {
|
|||
}
|
||||
}
|
||||
if CONFIG.push_enabled() && user_ids.len() == 1 {
|
||||
push_send_update(ut, send, acting_device_id, conn).await;
|
||||
push_send_update(ut, send, device, conn).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -496,7 +490,7 @@ impl WebSocketUsers {
|
|||
&self,
|
||||
user_id: &UserId,
|
||||
auth_request_uuid: &str,
|
||||
acting_device_id: &DeviceId,
|
||||
device: &Device,
|
||||
conn: &mut DbConn,
|
||||
) {
|
||||
// Skip any processing if both WebSockets and Push are not active
|
||||
|
@ -506,14 +500,14 @@ impl WebSocketUsers {
|
|||
let data = create_update(
|
||||
vec![("Id".into(), auth_request_uuid.to_owned().into()), ("UserId".into(), user_id.to_string().into())],
|
||||
UpdateType::AuthRequest,
|
||||
Some(acting_device_id.clone()),
|
||||
Some(device.uuid.clone()),
|
||||
);
|
||||
if CONFIG.enable_websocket() {
|
||||
self.send_update(user_id, &data).await;
|
||||
}
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
push_auth_request(user_id.clone(), auth_request_uuid.to_owned(), conn).await;
|
||||
push_auth_request(user_id, auth_request_uuid, device, conn).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,7 +515,7 @@ impl WebSocketUsers {
|
|||
&self,
|
||||
user_id: &UserId,
|
||||
auth_request_id: &AuthRequestId,
|
||||
approving_device_id: &DeviceId,
|
||||
device: &Device,
|
||||
conn: &mut DbConn,
|
||||
) {
|
||||
// Skip any processing if both WebSockets and Push are not active
|
||||
|
@ -531,14 +525,14 @@ impl WebSocketUsers {
|
|||
let data = create_update(
|
||||
vec![("Id".into(), auth_request_id.to_string().into()), ("UserId".into(), user_id.to_string().into())],
|
||||
UpdateType::AuthRequestResponse,
|
||||
Some(approving_device_id.clone()),
|
||||
Some(device.uuid.clone()),
|
||||
);
|
||||
if CONFIG.enable_websocket() {
|
||||
self.send_update(user_id, &data).await;
|
||||
}
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
push_auth_response(user_id, auth_request_id, approving_device_id, conn).await;
|
||||
push_auth_response(user_id, auth_request_id, device, conn).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
204
src/api/push.rs
204
src/api/push.rs
|
@ -7,9 +7,9 @@ use tokio::sync::RwLock;
|
|||
|
||||
use crate::{
|
||||
api::{ApiResult, EmptyResult, UpdateType},
|
||||
db::models::{AuthRequestId, Cipher, Device, DeviceId, Folder, Send, User, UserId},
|
||||
db::models::{AuthRequestId, Cipher, Device, DeviceId, Folder, PushId, Send, User, UserId},
|
||||
http_client::make_http_request,
|
||||
util::format_date,
|
||||
util::{format_date, get_uuid},
|
||||
CONFIG,
|
||||
};
|
||||
|
||||
|
@ -28,20 +28,20 @@ struct LocalAuthPushToken {
|
|||
valid_until: Instant,
|
||||
}
|
||||
|
||||
async fn get_auth_push_token() -> ApiResult<String> {
|
||||
static PUSH_TOKEN: Lazy<RwLock<LocalAuthPushToken>> = Lazy::new(|| {
|
||||
async fn get_auth_api_token() -> ApiResult<String> {
|
||||
static API_TOKEN: Lazy<RwLock<LocalAuthPushToken>> = Lazy::new(|| {
|
||||
RwLock::new(LocalAuthPushToken {
|
||||
access_token: String::new(),
|
||||
valid_until: Instant::now(),
|
||||
})
|
||||
});
|
||||
let push_token = PUSH_TOKEN.read().await;
|
||||
let api_token = API_TOKEN.read().await;
|
||||
|
||||
if push_token.valid_until.saturating_duration_since(Instant::now()).as_secs() > 0 {
|
||||
if api_token.valid_until.saturating_duration_since(Instant::now()).as_secs() > 0 {
|
||||
debug!("Auth Push token still valid, no need for a new one");
|
||||
return Ok(push_token.access_token.clone());
|
||||
return Ok(api_token.access_token.clone());
|
||||
}
|
||||
drop(push_token); // Drop the read lock now
|
||||
drop(api_token); // Drop the read lock now
|
||||
|
||||
let installation_id = CONFIG.push_installation_id();
|
||||
let client_id = format!("installation.{installation_id}");
|
||||
|
@ -68,44 +68,48 @@ async fn get_auth_push_token() -> ApiResult<String> {
|
|||
Err(e) => err!(format!("Unexpected push token received from bitwarden server: {e}")),
|
||||
};
|
||||
|
||||
let mut push_token = PUSH_TOKEN.write().await;
|
||||
push_token.valid_until = Instant::now()
|
||||
let mut api_token = API_TOKEN.write().await;
|
||||
api_token.valid_until = Instant::now()
|
||||
.checked_add(Duration::new((json_pushtoken.expires_in / 2) as u64, 0)) // Token valid for half the specified time
|
||||
.unwrap();
|
||||
|
||||
push_token.access_token = json_pushtoken.access_token;
|
||||
api_token.access_token = json_pushtoken.access_token;
|
||||
|
||||
debug!("Token still valid for {}", push_token.valid_until.saturating_duration_since(Instant::now()).as_secs());
|
||||
Ok(push_token.access_token.clone())
|
||||
debug!("Token still valid for {}", api_token.valid_until.saturating_duration_since(Instant::now()).as_secs());
|
||||
Ok(api_token.access_token.clone())
|
||||
}
|
||||
|
||||
pub async fn register_push_device(device: &mut Device, conn: &mut crate::db::DbConn) -> EmptyResult {
|
||||
if !CONFIG.push_enabled() || !device.is_push_device() || device.is_registered() {
|
||||
if !CONFIG.push_enabled() || !device.is_push_device() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if device.push_token.is_none() {
|
||||
warn!("Skipping the registration of the device {} because the push_token field is empty.", device.uuid);
|
||||
warn!("To get rid of this message you need to clear the app data and reconnect the device.");
|
||||
warn!("Skipping the registration of the device {:?} because the push_token field is empty.", device.uuid);
|
||||
warn!("To get rid of this message you need to logout, clear the app data and login again on the device.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
debug!("Registering Device {}", device.uuid);
|
||||
debug!("Registering Device {:?}", device.push_uuid);
|
||||
|
||||
// generate a random push_uuid so we know the device is registered
|
||||
device.push_uuid = Some(uuid::Uuid::new_v4().to_string());
|
||||
// Generate a random push_uuid so if it doesn't already have one
|
||||
if device.push_uuid.is_none() {
|
||||
device.push_uuid = Some(PushId(get_uuid()));
|
||||
}
|
||||
|
||||
//Needed to register a device for push to bitwarden :
|
||||
let data = json!({
|
||||
"deviceId": device.push_uuid, // Unique UUID per user/device
|
||||
"pushToken": device.push_token,
|
||||
"userId": device.user_uuid,
|
||||
"deviceId": device.push_uuid,
|
||||
"identifier": device.uuid,
|
||||
"type": device.atype,
|
||||
"pushToken": device.push_token
|
||||
"identifier": device.uuid, // Unique UUID of the device/app, determined by the device/app it self currently registering
|
||||
// "organizationIds:" [] // TODO: This is not yet implemented by Vaultwarden!
|
||||
"installationId": CONFIG.push_installation_id(),
|
||||
});
|
||||
|
||||
let auth_push_token = get_auth_push_token().await?;
|
||||
let auth_header = format!("Bearer {}", &auth_push_token);
|
||||
let auth_api_token = get_auth_api_token().await?;
|
||||
let auth_header = format!("Bearer {auth_api_token}");
|
||||
|
||||
if let Err(e) = make_http_request(Method::POST, &(CONFIG.push_relay_uri() + "/push/register"))?
|
||||
.header(CONTENT_TYPE, "application/json")
|
||||
|
@ -126,15 +130,18 @@ pub async fn register_push_device(device: &mut Device, conn: &mut crate::db::DbC
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn unregister_push_device(push_id: Option<String>) -> EmptyResult {
|
||||
pub async fn unregister_push_device(push_id: &Option<PushId>) -> EmptyResult {
|
||||
if !CONFIG.push_enabled() || push_id.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
let auth_push_token = get_auth_push_token().await?;
|
||||
let auth_api_token = get_auth_api_token().await?;
|
||||
|
||||
let auth_header = format!("Bearer {}", &auth_push_token);
|
||||
let auth_header = format!("Bearer {auth_api_token}");
|
||||
|
||||
match make_http_request(Method::DELETE, &(CONFIG.push_relay_uri() + "/push/" + &push_id.unwrap()))?
|
||||
match make_http_request(
|
||||
Method::POST,
|
||||
&format!("{}/push/delete/{}", CONFIG.push_relay_uri(), push_id.as_ref().unwrap()),
|
||||
)?
|
||||
.header(AUTHORIZATION, auth_header)
|
||||
.send()
|
||||
.await
|
||||
|
@ -145,12 +152,7 @@ pub async fn unregister_push_device(push_id: Option<String>) -> EmptyResult {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn push_cipher_update(
|
||||
ut: UpdateType,
|
||||
cipher: &Cipher,
|
||||
acting_device_id: &DeviceId,
|
||||
conn: &mut crate::db::DbConn,
|
||||
) {
|
||||
pub async fn push_cipher_update(ut: UpdateType, cipher: &Cipher, device: &Device, conn: &mut crate::db::DbConn) {
|
||||
// We shouldn't send a push notification on cipher update if the cipher belongs to an organization, this isn't implemented in the upstream server too.
|
||||
if cipher.organization_uuid.is_some() {
|
||||
return;
|
||||
|
@ -163,24 +165,28 @@ pub async fn push_cipher_update(
|
|||
if Device::check_user_has_push_device(user_id, conn).await {
|
||||
send_to_push_relay(json!({
|
||||
"userId": user_id,
|
||||
"organizationId": (),
|
||||
"deviceId": acting_device_id,
|
||||
"identifier": acting_device_id,
|
||||
"organizationId": null,
|
||||
"deviceId": device.push_uuid, // Should be the records unique uuid of the acting device (unique uuid per user/device)
|
||||
"identifier": device.uuid, // Should be the acting device id (aka uuid per device/app)
|
||||
"type": ut as i32,
|
||||
"payload": {
|
||||
"Id": cipher.uuid,
|
||||
"UserId": cipher.user_uuid,
|
||||
"OrganizationId": (),
|
||||
"RevisionDate": format_date(&cipher.updated_at)
|
||||
}
|
||||
"id": cipher.uuid,
|
||||
"userId": cipher.user_uuid,
|
||||
"organizationId": null,
|
||||
"collectionIds": null,
|
||||
"revisionDate": format_date(&cipher.updated_at)
|
||||
},
|
||||
"clientType": null,
|
||||
"installationId": null
|
||||
}))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_logout(user: &User, acting_device_id: Option<DeviceId>) {
|
||||
pub async fn push_logout(user: &User, acting_device_id: Option<DeviceId>, conn: &mut crate::db::DbConn) {
|
||||
let acting_device_id: Value = acting_device_id.map(|v| v.to_string().into()).unwrap_or_else(|| Value::Null);
|
||||
|
||||
if Device::check_user_has_push_device(&user.uuid, conn).await {
|
||||
tokio::task::spawn(send_to_push_relay(json!({
|
||||
"userId": user.uuid,
|
||||
"organizationId": (),
|
||||
|
@ -188,62 +194,68 @@ pub fn push_logout(user: &User, acting_device_id: Option<DeviceId>) {
|
|||
"identifier": acting_device_id,
|
||||
"type": UpdateType::LogOut as i32,
|
||||
"payload": {
|
||||
"UserId": user.uuid,
|
||||
"Date": format_date(&user.updated_at)
|
||||
}
|
||||
"userId": user.uuid,
|
||||
"date": format_date(&user.updated_at)
|
||||
},
|
||||
"clientType": null,
|
||||
"installationId": null
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_user_update(ut: UpdateType, user: &User) {
|
||||
pub async fn push_user_update(ut: UpdateType, user: &User, push_uuid: &Option<PushId>, conn: &mut crate::db::DbConn) {
|
||||
if Device::check_user_has_push_device(&user.uuid, conn).await {
|
||||
tokio::task::spawn(send_to_push_relay(json!({
|
||||
"userId": user.uuid,
|
||||
"organizationId": (),
|
||||
"deviceId": (),
|
||||
"identifier": (),
|
||||
"organizationId": null,
|
||||
"deviceId": push_uuid,
|
||||
"identifier": null,
|
||||
"type": ut as i32,
|
||||
"payload": {
|
||||
"UserId": user.uuid,
|
||||
"Date": format_date(&user.updated_at)
|
||||
}
|
||||
"userId": user.uuid,
|
||||
"date": format_date(&user.updated_at)
|
||||
},
|
||||
"clientType": null,
|
||||
"installationId": null
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn push_folder_update(
|
||||
ut: UpdateType,
|
||||
folder: &Folder,
|
||||
acting_device_id: &DeviceId,
|
||||
conn: &mut crate::db::DbConn,
|
||||
) {
|
||||
pub async fn push_folder_update(ut: UpdateType, folder: &Folder, device: &Device, conn: &mut crate::db::DbConn) {
|
||||
if Device::check_user_has_push_device(&folder.user_uuid, conn).await {
|
||||
tokio::task::spawn(send_to_push_relay(json!({
|
||||
"userId": folder.user_uuid,
|
||||
"organizationId": (),
|
||||
"deviceId": acting_device_id,
|
||||
"identifier": acting_device_id,
|
||||
"organizationId": null,
|
||||
"deviceId": device.push_uuid, // Should be the records unique uuid of the acting device (unique uuid per user/device)
|
||||
"identifier": device.uuid, // Should be the acting device id (aka uuid per device/app)
|
||||
"type": ut as i32,
|
||||
"payload": {
|
||||
"Id": folder.uuid,
|
||||
"UserId": folder.user_uuid,
|
||||
"RevisionDate": format_date(&folder.updated_at)
|
||||
}
|
||||
"id": folder.uuid,
|
||||
"userId": folder.user_uuid,
|
||||
"revisionDate": format_date(&folder.updated_at)
|
||||
},
|
||||
"clientType": null,
|
||||
"installationId": null
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn push_send_update(ut: UpdateType, send: &Send, acting_device_id: &DeviceId, conn: &mut crate::db::DbConn) {
|
||||
pub async fn push_send_update(ut: UpdateType, send: &Send, device: &Device, conn: &mut crate::db::DbConn) {
|
||||
if let Some(s) = &send.user_uuid {
|
||||
if Device::check_user_has_push_device(s, conn).await {
|
||||
tokio::task::spawn(send_to_push_relay(json!({
|
||||
"userId": send.user_uuid,
|
||||
"organizationId": (),
|
||||
"deviceId": acting_device_id,
|
||||
"identifier": acting_device_id,
|
||||
"organizationId": null,
|
||||
"deviceId": device.push_uuid, // Should be the records unique uuid of the acting device (unique uuid per user/device)
|
||||
"identifier": device.uuid, // Should be the acting device id (aka uuid per device/app)
|
||||
"type": ut as i32,
|
||||
"payload": {
|
||||
"Id": send.uuid,
|
||||
"UserId": send.user_uuid,
|
||||
"RevisionDate": format_date(&send.revision_date)
|
||||
}
|
||||
"id": send.uuid,
|
||||
"userId": send.user_uuid,
|
||||
"revisionDate": format_date(&send.revision_date)
|
||||
},
|
||||
"clientType": null,
|
||||
"installationId": null
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
@ -254,20 +266,20 @@ async fn send_to_push_relay(notification_data: Value) {
|
|||
return;
|
||||
}
|
||||
|
||||
let auth_push_token = match get_auth_push_token().await {
|
||||
let auth_api_token = match get_auth_api_token().await {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
debug!("Could not get the auth push token: {}", e);
|
||||
debug!("Could not get the auth push token: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let auth_header = format!("Bearer {}", &auth_push_token);
|
||||
let auth_header = format!("Bearer {auth_api_token}");
|
||||
|
||||
let req = match make_http_request(Method::POST, &(CONFIG.push_relay_uri() + "/push/send")) {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
error!("An error occurred while sending a send update to the push relay: {}", e);
|
||||
error!("An error occurred while sending a send update to the push relay: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
@ -280,22 +292,24 @@ async fn send_to_push_relay(notification_data: Value) {
|
|||
.send()
|
||||
.await
|
||||
{
|
||||
error!("An error occurred while sending a send update to the push relay: {}", e);
|
||||
error!("An error occurred while sending a send update to the push relay: {e}");
|
||||
};
|
||||
}
|
||||
|
||||
pub async fn push_auth_request(user_id: UserId, auth_request_id: String, conn: &mut crate::db::DbConn) {
|
||||
if Device::check_user_has_push_device(&user_id, conn).await {
|
||||
pub async fn push_auth_request(user_id: &UserId, auth_request_id: &str, device: &Device, conn: &mut crate::db::DbConn) {
|
||||
if Device::check_user_has_push_device(user_id, conn).await {
|
||||
tokio::task::spawn(send_to_push_relay(json!({
|
||||
"userId": user_id,
|
||||
"organizationId": (),
|
||||
"deviceId": null,
|
||||
"identifier": null,
|
||||
"organizationId": null,
|
||||
"deviceId": device.push_uuid, // Should be the records unique uuid of the acting device (unique uuid per user/device)
|
||||
"identifier": device.uuid, // Should be the acting device id (aka uuid per device/app)
|
||||
"type": UpdateType::AuthRequest as i32,
|
||||
"payload": {
|
||||
"Id": auth_request_id,
|
||||
"UserId": user_id,
|
||||
}
|
||||
"userId": user_id,
|
||||
"id": auth_request_id,
|
||||
},
|
||||
"clientType": null,
|
||||
"installationId": null
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
@ -303,20 +317,22 @@ pub async fn push_auth_request(user_id: UserId, auth_request_id: String, conn: &
|
|||
pub async fn push_auth_response(
|
||||
user_id: &UserId,
|
||||
auth_request_id: &AuthRequestId,
|
||||
approving_device_id: &DeviceId,
|
||||
device: &Device,
|
||||
conn: &mut crate::db::DbConn,
|
||||
) {
|
||||
if Device::check_user_has_push_device(user_id, conn).await {
|
||||
tokio::task::spawn(send_to_push_relay(json!({
|
||||
"userId": user_id,
|
||||
"organizationId": (),
|
||||
"deviceId": approving_device_id,
|
||||
"identifier": approving_device_id,
|
||||
"organizationId": null,
|
||||
"deviceId": device.push_uuid, // Should be the records unique uuid of the acting device (unique uuid per user/device)
|
||||
"identifier": device.uuid, // Should be the acting device id (aka uuid per device/app)
|
||||
"type": UpdateType::AuthRequestResponse as i32,
|
||||
"payload": {
|
||||
"Id": auth_request_id,
|
||||
"UserId": user_id,
|
||||
}
|
||||
"userId": user_id,
|
||||
"id": auth_request_id,
|
||||
},
|
||||
"clientType": null,
|
||||
"installationId": null
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,10 +59,10 @@ fn vaultwarden_css() -> Cached<Css<String>> {
|
|||
"load_user_scss": true,
|
||||
"mail_enabled": CONFIG.mail_enabled(),
|
||||
"sends_allowed": CONFIG.sends_allowed(),
|
||||
"signup_disabled": CONFIG.is_signup_disabled(),
|
||||
"signup_disabled": !CONFIG.signups_allowed() && CONFIG.signups_domains_whitelist().is_empty(),
|
||||
"sso_disabled": !CONFIG.sso_enabled(),
|
||||
"sso_only": CONFIG.sso_enabled() && CONFIG.sso_only(),
|
||||
"yubico_enabled": CONFIG._enable_yubico() && (CONFIG.yubico_client_id().is_some() == CONFIG.yubico_secret_key().is_some()),
|
||||
"yubico_enabled": CONFIG._enable_yubico() && CONFIG.yubico_client_id().is_some() && CONFIG.yubico_secret_key().is_some(),
|
||||
});
|
||||
|
||||
let scss = match CONFIG.render_template("scss/vaultwarden.scss", &css_options) {
|
||||
|
|
75
src/auth.rs
75
src/auth.rs
|
@ -193,6 +193,11 @@ pub struct LoginJwtClaims {
|
|||
pub sstamp: String,
|
||||
// device uuid
|
||||
pub device: DeviceId,
|
||||
// what kind of device, like FirefoxBrowser or Android derived from DeviceType
|
||||
pub devicetype: String,
|
||||
// the type of client_id, like web, cli, desktop, browser or mobile
|
||||
pub client_id: String,
|
||||
|
||||
// [ "api", "offline_access" ]
|
||||
pub scope: Vec<String>,
|
||||
// [ "Application" ]
|
||||
|
@ -200,7 +205,15 @@ pub struct LoginJwtClaims {
|
|||
}
|
||||
|
||||
impl LoginJwtClaims {
|
||||
pub fn new(device: &Device, user: &User, nbf: i64, exp: i64, scope: Vec<String>, now: DateTime<Utc>) -> Self {
|
||||
pub fn new(
|
||||
device: &Device,
|
||||
user: &User,
|
||||
nbf: i64,
|
||||
exp: i64,
|
||||
scope: Vec<String>,
|
||||
client_id: Option<String>,
|
||||
now: DateTime<Utc>,
|
||||
) -> Self {
|
||||
// ---
|
||||
// Disabled these keys to be added to the JWT since they could cause the JWT to get too large
|
||||
// Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients
|
||||
|
@ -240,12 +253,14 @@ impl LoginJwtClaims {
|
|||
// orgmanager,
|
||||
sstamp: user.security_stamp.clone(),
|
||||
device: device.uuid.clone(),
|
||||
devicetype: DeviceType::from_i32(device.atype).to_string(),
|
||||
client_id: client_id.unwrap_or("undefined".to_string()),
|
||||
scope,
|
||||
amr: vec!["Application".into()],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default(device: &Device, user: &User, auth_method: &AuthMethod) -> Self {
|
||||
pub fn default(device: &Device, user: &User, auth_method: &AuthMethod, client_id: Option<String>) -> Self {
|
||||
let time_now = Utc::now();
|
||||
Self::new(
|
||||
device,
|
||||
|
@ -253,6 +268,7 @@ impl LoginJwtClaims {
|
|||
time_now.timestamp(),
|
||||
(time_now + *DEFAULT_ACCESS_VALIDITY).timestamp(),
|
||||
auth_method.scope_vec(),
|
||||
client_id,
|
||||
time_now,
|
||||
)
|
||||
}
|
||||
|
@ -368,7 +384,7 @@ pub fn generate_organization_api_key_login_claims(
|
|||
exp: (time_now + TimeDelta::try_hours(1).unwrap()).timestamp(),
|
||||
iss: JWT_ORG_API_KEY_ISSUER.to_string(),
|
||||
sub: org_api_key_uuid,
|
||||
client_id: format!("organization.{}", org_id),
|
||||
client_id: format!("organization.{org_id}"),
|
||||
client_sub: org_id,
|
||||
scope: vec!["api.organization".into()],
|
||||
}
|
||||
|
@ -626,7 +642,7 @@ impl<'r> FromRequest<'r> for Headers {
|
|||
let mut user = user;
|
||||
user.reset_stamp_exception();
|
||||
if let Err(e) = user.save(&mut conn).await {
|
||||
error!("Error updating user: {:#?}", e);
|
||||
error!("Error updating user: {e:#?}");
|
||||
}
|
||||
err_handler!("Stamp exception is expired")
|
||||
} else if !stamp_exception.routes.contains(¤t_route.to_string()) {
|
||||
|
@ -768,17 +784,6 @@ impl<'r> FromRequest<'r> for AdminHeaders {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<AdminHeaders> for Headers {
|
||||
fn from(h: AdminHeaders) -> Headers {
|
||||
Headers {
|
||||
host: h.host,
|
||||
device: h.device,
|
||||
user: h.user,
|
||||
ip: h.ip,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// col_id is usually the fourth path param ("/organizations/<org_id>/collections/<col_id>"),
|
||||
// but there could be cases where it is a query value.
|
||||
// First check the path, if this is not a valid uuid, try the query values.
|
||||
|
@ -948,8 +953,10 @@ impl<'r> FromRequest<'r> for OwnerHeaders {
|
|||
|
||||
pub struct OrgMemberHeaders {
|
||||
pub host: String,
|
||||
pub device: Device,
|
||||
pub user: User,
|
||||
pub org_id: OrganizationId,
|
||||
pub membership: Membership,
|
||||
pub ip: ClientIp,
|
||||
}
|
||||
|
||||
#[rocket::async_trait]
|
||||
|
@ -961,8 +968,10 @@ impl<'r> FromRequest<'r> for OrgMemberHeaders {
|
|||
if headers.is_member() {
|
||||
Outcome::Success(Self {
|
||||
host: headers.host,
|
||||
device: headers.device,
|
||||
user: headers.user,
|
||||
org_id: headers.membership.org_uuid,
|
||||
membership: headers.membership,
|
||||
ip: headers.ip,
|
||||
})
|
||||
} else {
|
||||
err_handler!("You need to be a Member of the Organization to call this endpoint")
|
||||
|
@ -970,6 +979,17 @@ impl<'r> FromRequest<'r> for OrgMemberHeaders {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<OrgMemberHeaders> for Headers {
|
||||
fn from(h: OrgMemberHeaders) -> Headers {
|
||||
Headers {
|
||||
host: h.host,
|
||||
device: h.device,
|
||||
user: h.user,
|
||||
ip: h.ip,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Client IP address detection
|
||||
//
|
||||
|
@ -990,7 +1010,7 @@ impl<'r> FromRequest<'r> for ClientIp {
|
|||
None => ip,
|
||||
}
|
||||
.parse()
|
||||
.map_err(|_| warn!("'{}' header is malformed: {}", CONFIG.ip_header(), ip))
|
||||
.map_err(|_| warn!("'{}' header is malformed: {ip}", CONFIG.ip_header()))
|
||||
.ok()
|
||||
})
|
||||
} else {
|
||||
|
@ -1157,10 +1177,10 @@ impl AuthTokens {
|
|||
}
|
||||
|
||||
// Create refresh_token and access_token with default validity
|
||||
pub fn new(device: &Device, user: &User, sub: AuthMethod) -> Self {
|
||||
pub fn new(device: &Device, user: &User, sub: AuthMethod, client_id: Option<String>) -> Self {
|
||||
let time_now = Utc::now();
|
||||
|
||||
let access_claims = LoginJwtClaims::default(device, user, &sub);
|
||||
let access_claims = LoginJwtClaims::default(device, user, &sub, client_id);
|
||||
|
||||
let validity = if DeviceType::is_mobile(&device.atype) {
|
||||
*MOBILE_REFRESH_VALIDITY
|
||||
|
@ -1184,7 +1204,12 @@ impl AuthTokens {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn refresh_tokens(ip: &ClientIp, refresh_token: &str, conn: &mut DbConn) -> ApiResult<(Device, AuthTokens)> {
|
||||
pub async fn refresh_tokens(
|
||||
ip: &ClientIp,
|
||||
refresh_token: &str,
|
||||
client_id: Option<String>,
|
||||
conn: &mut DbConn,
|
||||
) -> ApiResult<(Device, AuthTokens)> {
|
||||
let refresh_claims = match decode_refresh(refresh_token) {
|
||||
Err(err) => {
|
||||
debug!("Failed to decode {} refresh_token: {refresh_token}", ip.ip);
|
||||
|
@ -1209,12 +1234,14 @@ pub async fn refresh_tokens(ip: &ClientIp, refresh_token: &str, conn: &mut DbCon
|
|||
|
||||
let auth_tokens = match refresh_claims.sub {
|
||||
AuthMethod::Sso if CONFIG.sso_enabled() && CONFIG.sso_auth_only_not_session() => {
|
||||
AuthTokens::new(&device, &user, refresh_claims.sub)
|
||||
AuthTokens::new(&device, &user, refresh_claims.sub, client_id)
|
||||
}
|
||||
AuthMethod::Sso if CONFIG.sso_enabled() => {
|
||||
sso::exchange_refresh_token(&device, &user, client_id, refresh_claims).await?
|
||||
}
|
||||
AuthMethod::Sso if CONFIG.sso_enabled() => sso::exchange_refresh_token(&device, &user, &refresh_claims).await?,
|
||||
AuthMethod::Sso => err!("SSO is now disabled, Login again using email and master password"),
|
||||
AuthMethod::Password if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO is now required, Login again"),
|
||||
AuthMethod::Password => AuthTokens::new(&device, &user, refresh_claims.sub),
|
||||
AuthMethod::Password => AuthTokens::new(&device, &user, refresh_claims.sub, client_id),
|
||||
_ => err!("Invalid auth method, cannot refresh token"),
|
||||
};
|
||||
|
||||
|
|
|
@ -375,19 +375,19 @@ make_config! {
|
|||
/// Data folder |> Main data folder
|
||||
data_folder: String, false, def, "data".to_string();
|
||||
/// Database URL
|
||||
database_url: String, false, auto, |c| format!("{}/{}", c.data_folder, "db.sqlite3");
|
||||
database_url: String, false, auto, |c| format!("{}/db.sqlite3", c.data_folder);
|
||||
/// Icon cache folder
|
||||
icon_cache_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "icon_cache");
|
||||
icon_cache_folder: String, false, auto, |c| format!("{}/icon_cache", c.data_folder);
|
||||
/// Attachments folder
|
||||
attachments_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "attachments");
|
||||
attachments_folder: String, false, auto, |c| format!("{}/attachments", c.data_folder);
|
||||
/// Sends folder
|
||||
sends_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "sends");
|
||||
sends_folder: String, false, auto, |c| format!("{}/sends", c.data_folder);
|
||||
/// Temp folder |> Used for storing temporary file uploads
|
||||
tmp_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "tmp");
|
||||
tmp_folder: String, false, auto, |c| format!("{}/tmp", c.data_folder);
|
||||
/// Templates folder
|
||||
templates_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "templates");
|
||||
templates_folder: String, false, auto, |c| format!("{}/templates", c.data_folder);
|
||||
/// Session JWT key
|
||||
rsa_key_filename: String, false, auto, |c| format!("{}/{}", c.data_folder, "rsa_key");
|
||||
rsa_key_filename: String, false, auto, |c| format!("{}/rsa_key", c.data_folder);
|
||||
/// Web vault folder
|
||||
web_vault_folder: String, false, def, "web-vault/".to_string();
|
||||
},
|
||||
|
@ -582,7 +582,7 @@ make_config! {
|
|||
authenticator_disable_time_drift: bool, true, def, false;
|
||||
|
||||
/// Customize the enabled feature flags on the clients |> This is a comma separated list of feature flags to enable.
|
||||
experimental_client_feature_flags: String, false, def, "fido2-vault-credentials".to_string();
|
||||
experimental_client_feature_flags: String, false, def, String::new();
|
||||
|
||||
/// Require new device emails |> When a user logs in an email is required to be sent.
|
||||
/// If sending the email fails the login attempt will fail.
|
||||
|
@ -872,20 +872,25 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: deal with deprecated flags so they can be removed from this list, cf. #4263
|
||||
// Server (v2025.5.0): https://github.com/bitwarden/server/blob/4a7db112a0952c6df8bacf36c317e9c4e58c3651/src/Core/Constants.cs#L102
|
||||
// Client (v2025.5.0): https://github.com/bitwarden/clients/blob/9df8a3cc50ed45f52513e62c23fcc8a4b745f078/libs/common/src/enums/feature-flag.enum.ts#L10
|
||||
// Android (v2025.4.0): https://github.com/bitwarden/android/blob/bee09de972c3870de0d54a0067996be473ec55c7/app/src/main/java/com/x8bit/bitwarden/data/platform/manager/model/FlagKey.kt#L27
|
||||
// iOS (v2025.4.0): https://github.com/bitwarden/ios/blob/956e05db67344c912e3a1b8cb2609165d67da1c9/BitwardenShared/Core/Platform/Models/Enum/FeatureFlag.swift#L7
|
||||
//
|
||||
// NOTE: Move deprecated flags to the utils::parse_experimental_client_feature_flags() DEPRECATED_FLAGS const!
|
||||
const KNOWN_FLAGS: &[&str] = &[
|
||||
"autofill-overlay",
|
||||
"autofill-v2",
|
||||
"browser-fileless-import",
|
||||
"extension-refresh",
|
||||
"fido2-vault-credentials",
|
||||
// Autofill Team
|
||||
"inline-menu-positioning-improvements",
|
||||
"ssh-key-vault-item",
|
||||
"inline-menu-totp",
|
||||
"ssh-agent",
|
||||
// Key Management Team
|
||||
"ssh-key-vault-item",
|
||||
// Tools
|
||||
"export-attachments",
|
||||
// Mobile Team
|
||||
"anon-addy-self-host-alias",
|
||||
"simple-login-self-host-alias",
|
||||
"mutual-tls",
|
||||
"export-attachments",
|
||||
];
|
||||
let configured_flags = parse_experimental_client_feature_flags(&cfg.experimental_client_feature_flags);
|
||||
let invalid_flags: Vec<_> = configured_flags.keys().filter(|flag| !KNOWN_FLAGS.contains(&flag.as_str())).collect();
|
||||
|
@ -1326,7 +1331,7 @@ impl Config {
|
|||
pub fn is_email_domain_allowed(&self, email: &str) -> bool {
|
||||
let e: Vec<&str> = email.rsplitn(2, '@').collect();
|
||||
if e.len() != 2 || e[0].is_empty() || e[1].is_empty() {
|
||||
warn!("Failed to parse email address '{}'", email);
|
||||
warn!("Failed to parse email address '{email}'");
|
||||
return false;
|
||||
}
|
||||
let email_domain = e[0].to_lowercase();
|
||||
|
@ -1494,6 +1499,7 @@ where
|
|||
reg!("email/email_footer_text");
|
||||
|
||||
reg!("email/admin_reset_password", ".html");
|
||||
reg!("email/change_email_existing", ".html");
|
||||
reg!("email/change_email", ".html");
|
||||
reg!("email/delete_account", ".html");
|
||||
reg!("email/emergency_access_invite_accepted", ".html");
|
||||
|
|
|
@ -46,7 +46,7 @@ impl Attachment {
|
|||
|
||||
pub fn get_url(&self, host: &str) -> String {
|
||||
let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone()));
|
||||
format!("{}/attachments/{}/{}?token={}", host, self.cipher_uuid, self.id, token)
|
||||
format!("{host}/attachments/{}/{}?token={token}", self.cipher_uuid, self.id)
|
||||
}
|
||||
|
||||
pub fn to_json(&self, host: &str) -> Value {
|
||||
|
@ -117,7 +117,7 @@ impl Attachment {
|
|||
// upstream caller has already cleaned up the file as part of
|
||||
// its own error handling.
|
||||
Err(e) if e.kind() == ErrorKind::NotFound => {
|
||||
debug!("File '{}' already deleted.", file_path);
|
||||
debug!("File '{file_path}' already deleted.");
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => Err(e.into()),
|
||||
|
|
|
@ -16,7 +16,7 @@ db_object! {
|
|||
pub organization_uuid: Option<OrganizationId>,
|
||||
|
||||
pub request_device_identifier: DeviceId,
|
||||
pub device_type: i32, // https://github.com/bitwarden/server/blob/master/src/Core/Enums/DeviceType.cs
|
||||
pub device_type: i32, // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Enums/DeviceType.cs
|
||||
|
||||
pub request_ip: String,
|
||||
pub response_device_id: Option<DeviceId>,
|
||||
|
|
|
@ -85,7 +85,7 @@ impl Cipher {
|
|||
let mut validation_errors = serde_json::Map::new();
|
||||
let max_note_size = CONFIG._max_note_size();
|
||||
let max_note_size_msg =
|
||||
format!("The field Notes exceeds the maximum encrypted value length of {} characters.", &max_note_size);
|
||||
format!("The field Notes exceeds the maximum encrypted value length of {max_note_size} characters.");
|
||||
for (index, cipher) in cipher_data.iter().enumerate() {
|
||||
// Validate the note size and if it is exceeded return a warning
|
||||
if let Some(note) = &cipher.notes {
|
||||
|
@ -318,7 +318,7 @@ impl Cipher {
|
|||
// supports the "cipherDetails" type, though it seems like the
|
||||
// Bitwarden clients will ignore extra fields.
|
||||
//
|
||||
// Ref: https://github.com/bitwarden/server/blob/master/src/Core/Models/Api/Response/CipherResponseModel.cs
|
||||
// Ref: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Vault/Models/Response/CipherResponseModel.cs#L14
|
||||
let mut json_object = json!({
|
||||
"object": "cipherDetails",
|
||||
"id": self.uuid,
|
||||
|
|
|
@ -5,8 +5,11 @@ use derive_more::{Display, From};
|
|||
use serde_json::Value;
|
||||
|
||||
use super::{AuthRequest, UserId};
|
||||
use crate::{crypto, util::format_date};
|
||||
use macros::IdFromParam;
|
||||
use crate::{
|
||||
crypto,
|
||||
util::{format_date, get_uuid},
|
||||
};
|
||||
use macros::{IdFromParam, UuidFromParam};
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
|
@ -21,8 +24,8 @@ db_object! {
|
|||
pub user_uuid: UserId,
|
||||
|
||||
pub name: String,
|
||||
pub atype: i32, // https://github.com/bitwarden/server/blob/dcc199bcce4aa2d5621f6fab80f1b49d8b143418/src/Core/Enums/DeviceType.cs
|
||||
pub push_uuid: Option<String>,
|
||||
pub atype: i32, // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Enums/DeviceType.cs
|
||||
pub push_uuid: Option<PushId>,
|
||||
pub push_token: Option<String>,
|
||||
|
||||
pub refresh_token: String,
|
||||
|
@ -44,7 +47,7 @@ impl Device {
|
|||
name,
|
||||
atype,
|
||||
|
||||
push_uuid: None,
|
||||
push_uuid: Some(PushId(get_uuid())),
|
||||
push_token: None,
|
||||
refresh_token: crypto::encode_random_bytes::<64>(BASE64URL),
|
||||
twofactor_remember: None,
|
||||
|
@ -56,7 +59,7 @@ impl Device {
|
|||
"id": self.uuid,
|
||||
"name": self.name,
|
||||
"type": self.atype,
|
||||
"identifier": self.push_uuid,
|
||||
"identifier": self.uuid,
|
||||
"creationDate": format_date(&self.created_at),
|
||||
"isTrusted": false,
|
||||
"object":"device"
|
||||
|
@ -78,10 +81,6 @@ impl Device {
|
|||
matches!(DeviceType::from_i32(self.atype), DeviceType::Android | DeviceType::Ios)
|
||||
}
|
||||
|
||||
pub fn is_registered(&self) -> bool {
|
||||
self.push_uuid.is_some()
|
||||
}
|
||||
|
||||
pub fn is_cli(&self) -> bool {
|
||||
matches!(DeviceType::from_i32(self.atype), DeviceType::WindowsCLI | DeviceType::MacOsCLI | DeviceType::LinuxCLI)
|
||||
}
|
||||
|
@ -102,10 +101,12 @@ impl DeviceWithAuthRequest {
|
|||
"id": self.device.uuid,
|
||||
"name": self.device.name,
|
||||
"type": self.device.atype,
|
||||
"identifier": self.device.push_uuid,
|
||||
"identifier": self.device.uuid,
|
||||
"creationDate": format_date(&self.device.created_at),
|
||||
"devicePendingAuthRequest": auth_request,
|
||||
"isTrusted": false,
|
||||
"encryptedPublicKey": null,
|
||||
"encryptedUserKey": null,
|
||||
"object": "device",
|
||||
})
|
||||
}
|
||||
|
@ -345,3 +346,6 @@ impl DeviceType {
|
|||
Clone, Debug, DieselNewType, Display, From, FromForm, Hash, PartialEq, Eq, Serialize, Deserialize, IdFromParam,
|
||||
)]
|
||||
pub struct DeviceId(String);
|
||||
|
||||
#[derive(Clone, Debug, DieselNewType, Display, From, FromForm, Serialize, Deserialize, UuidFromParam)]
|
||||
pub struct PushId(pub String);
|
||||
|
|
|
@ -78,6 +78,7 @@ impl EmergencyAccess {
|
|||
"grantorId": grantor_user.uuid,
|
||||
"email": grantor_user.email,
|
||||
"name": grantor_user.name,
|
||||
"avatarColor": grantor_user.avatar_color,
|
||||
"object": "emergencyAccessGrantorDetails",
|
||||
})
|
||||
}
|
||||
|
@ -106,6 +107,7 @@ impl EmergencyAccess {
|
|||
"granteeId": grantee_user.uuid,
|
||||
"email": grantee_user.email,
|
||||
"name": grantee_user.name,
|
||||
"avatarColor": grantee_user.avatar_color,
|
||||
"object": "emergencyAccessGranteeDetails",
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -8,9 +8,9 @@ use crate::{api::EmptyResult, db::DbConn, error::MapResult, CONFIG};
|
|||
// https://bitwarden.com/help/event-logs/
|
||||
|
||||
db_object! {
|
||||
// Upstream: https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs
|
||||
// Upstream: https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Api/Models/Public/Response/EventResponseModel.cs
|
||||
// Upstream SQL: https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Sql/dbo/Tables/Event.sql
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/EventService.cs
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Public/Models/Response/EventResponseModel.cs
|
||||
// Upstream SQL: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Sql/dbo/Tables/Event.sql
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[diesel(table_name = event)]
|
||||
#[diesel(treat_none_as_null = true)]
|
||||
|
@ -25,7 +25,7 @@ db_object! {
|
|||
pub group_uuid: Option<GroupId>,
|
||||
pub org_user_uuid: Option<MembershipId>,
|
||||
pub act_user_uuid: Option<UserId>,
|
||||
// Upstream enum: https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Enums/DeviceType.cs
|
||||
// Upstream enum: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Enums/DeviceType.cs
|
||||
pub device_type: Option<i32>,
|
||||
pub ip_address: Option<String>,
|
||||
pub event_date: NaiveDateTime,
|
||||
|
@ -36,7 +36,7 @@ db_object! {
|
|||
}
|
||||
}
|
||||
|
||||
// Upstream enum: https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Enums/EventType.cs
|
||||
// Upstream enum: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Enums/EventType.cs
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum EventType {
|
||||
// User
|
||||
|
@ -72,7 +72,6 @@ pub enum EventType {
|
|||
CipherSoftDeleted = 1115,
|
||||
CipherRestored = 1116,
|
||||
CipherClientToggledCardNumberVisible = 1117,
|
||||
CipherClientToggledTOTPSeedVisible = 1118,
|
||||
|
||||
// Collection
|
||||
CollectionCreated = 1300,
|
||||
|
@ -88,7 +87,7 @@ pub enum EventType {
|
|||
OrganizationUserInvited = 1500,
|
||||
OrganizationUserConfirmed = 1501,
|
||||
OrganizationUserUpdated = 1502,
|
||||
OrganizationUserRemoved = 1503,
|
||||
OrganizationUserRemoved = 1503, // Organization user data was deleted
|
||||
OrganizationUserUpdatedGroups = 1504,
|
||||
OrganizationUserUnlinkedSso = 1505, // Not supported
|
||||
OrganizationUserResetPasswordEnroll = 1506,
|
||||
|
@ -100,8 +99,8 @@ pub enum EventType {
|
|||
OrganizationUserRestored = 1512,
|
||||
OrganizationUserApprovedAuthRequest = 1513,
|
||||
OrganizationUserRejectedAuthRequest = 1514,
|
||||
OrganizationUserDeleted = 1515,
|
||||
OrganizationUserLeft = 1516,
|
||||
OrganizationUserDeleted = 1515, // Both user and organization user data were deleted
|
||||
OrganizationUserLeft = 1516, // User voluntarily left the organization
|
||||
|
||||
// Organization
|
||||
OrganizationUpdated = 1600,
|
||||
|
@ -188,7 +187,7 @@ impl Event {
|
|||
}
|
||||
|
||||
/// Database methods
|
||||
/// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs
|
||||
/// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/EventService.cs
|
||||
impl Event {
|
||||
pub const PAGE_SIZE: i64 = 30;
|
||||
|
||||
|
|
|
@ -68,16 +68,11 @@ impl Group {
|
|||
}
|
||||
|
||||
pub fn to_json(&self) -> Value {
|
||||
use crate::util::format_date;
|
||||
|
||||
json!({
|
||||
"id": self.uuid,
|
||||
"organizationId": self.organizations_uuid,
|
||||
"name": self.name,
|
||||
"accessAll": self.access_all,
|
||||
"externalId": self.external_id,
|
||||
"creationDate": format_date(&self.creation_date),
|
||||
"revisionDate": format_date(&self.revision_date),
|
||||
"object": "group"
|
||||
})
|
||||
}
|
||||
|
@ -298,7 +293,7 @@ impl Group {
|
|||
|
||||
pub async fn update_revision(uuid: &GroupId, conn: &mut DbConn) {
|
||||
if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await {
|
||||
warn!("Failed to update revision for {}: {:#?}", uuid, e);
|
||||
warn!("Failed to update revision for {uuid}: {e:#?}");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ pub use self::attachment::{Attachment, AttachmentId};
|
|||
pub use self::auth_request::{AuthRequest, AuthRequestId};
|
||||
pub use self::cipher::{Cipher, CipherId, RepromptType};
|
||||
pub use self::collection::{Collection, CollectionCipher, CollectionId, CollectionUser};
|
||||
pub use self::device::{Device, DeviceId, DeviceType};
|
||||
pub use self::device::{Device, DeviceId, DeviceType, PushId};
|
||||
pub use self::emergency_access::{EmergencyAccess, EmergencyAccessId, EmergencyAccessStatus, EmergencyAccessType};
|
||||
pub use self::event::{Event, EventType};
|
||||
pub use self::favorite::Favorite;
|
||||
|
|
|
@ -21,7 +21,7 @@ db_object! {
|
|||
}
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/abfdf6f5cb0f1f1504dbaaaa0e04ce9cb60faf19/src/Core/AdminConsole/Enums/PolicyType.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Enums/PolicyType.cs
|
||||
#[derive(Copy, Clone, Eq, PartialEq, num_derive::FromPrimitive)]
|
||||
pub enum OrgPolicyType {
|
||||
TwoFactorAuthentication = 0,
|
||||
|
@ -41,7 +41,7 @@ pub enum OrgPolicyType {
|
|||
RemoveUnlockWithPin = 14,
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/SendOptionsPolicyData.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Models/Data/Organizations/Policies/SendOptionsPolicyData.cs#L5
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendOptionsPolicyData {
|
||||
|
@ -49,7 +49,7 @@ pub struct SendOptionsPolicyData {
|
|||
pub disable_hide_email: bool,
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/ResetPasswordDataModel.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Models/Data/Organizations/Policies/ResetPasswordDataModel.cs
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResetPasswordDataModel {
|
||||
|
@ -82,14 +82,25 @@ impl OrgPolicy {
|
|||
}
|
||||
|
||||
pub fn to_json(&self) -> Value {
|
||||
json!({
|
||||
let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null);
|
||||
let mut policy = json!({
|
||||
"id": self.uuid,
|
||||
"organizationId": self.org_uuid,
|
||||
"type": self.atype,
|
||||
"data": serde_json::from_str(&self.data).unwrap_or(Value::Null),
|
||||
"data": data_json,
|
||||
"enabled": self.enabled,
|
||||
"object": "policy",
|
||||
})
|
||||
});
|
||||
|
||||
// Upstream adds this key/value
|
||||
// Allow enabling Single Org policy when the organization has claimed domains.
|
||||
// See: (https://github.com/bitwarden/server/pull/5565)
|
||||
// We return the same to prevent possible issues
|
||||
if self.atype == 8i32 {
|
||||
policy["canToggleState"] = json!(true);
|
||||
}
|
||||
|
||||
policy
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ db_object! {
|
|||
}
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/b86a04cef9f1e1b82cf18e49fc94e017c641130c/src/Core/Enums/OrganizationUserStatusType.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Enums/OrganizationUserStatusType.cs
|
||||
#[derive(PartialEq)]
|
||||
pub enum MembershipStatus {
|
||||
Revoked = -1,
|
||||
|
@ -179,7 +179,7 @@ impl Organization {
|
|||
public_key,
|
||||
}
|
||||
}
|
||||
// https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/Organizations/OrganizationResponseModel.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Models/Response/Organizations/OrganizationResponseModel.cs
|
||||
pub fn to_json(&self) -> Value {
|
||||
json!({
|
||||
"id": self.uuid,
|
||||
|
@ -205,7 +205,6 @@ impl Organization {
|
|||
"useResetPassword": CONFIG.mail_enabled(),
|
||||
"allowAdminAccessToAllCollectionItems": true,
|
||||
"limitCollectionCreation": true,
|
||||
"limitCollectionCreationDeletion": true,
|
||||
"limitCollectionDeletion": true,
|
||||
|
||||
"businessName": self.name,
|
||||
|
@ -452,7 +451,7 @@ impl Membership {
|
|||
"manageScim": false // Not supported (Not AGPLv3 Licensed)
|
||||
});
|
||||
|
||||
// https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/ProfileOrganizationResponseModel.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Models/Response/ProfileOrganizationResponseModel.cs
|
||||
json!({
|
||||
"id": self.org_uuid,
|
||||
"identifier": null, // Not supported
|
||||
|
@ -479,6 +478,8 @@ impl Membership {
|
|||
"usePasswordManager": true,
|
||||
"useCustomPermissions": true,
|
||||
"useActivateAutofillPolicy": false,
|
||||
"useAdminSponsoredFamilies": false,
|
||||
"useRiskInsights": false, // Not supported (Not AGPLv3 Licensed)
|
||||
|
||||
"organizationUserId": self.uuid,
|
||||
"providerId": null,
|
||||
|
@ -486,7 +487,6 @@ impl Membership {
|
|||
"providerType": null,
|
||||
"familySponsorshipFriendlyName": null,
|
||||
"familySponsorshipAvailable": false,
|
||||
"planProductType": 3,
|
||||
"productTierType": 3, // Enterprise tier
|
||||
"keyConnectorEnabled": false,
|
||||
"keyConnectorUrl": null,
|
||||
|
@ -495,10 +495,11 @@ impl Membership {
|
|||
"familySponsorshipToDelete": null,
|
||||
"accessSecretsManager": false,
|
||||
"limitCollectionCreation": self.atype < MembershipType::Manager, // If less then a manager return true, to limit collection creations
|
||||
"limitCollectionCreationDeletion": true,
|
||||
"limitCollectionDeletion": true,
|
||||
"limitItemDeletion": false,
|
||||
"allowAdminAccessToAllCollectionItems": true,
|
||||
"userIsManagedByOrganization": false, // Means not managed via the Members UI, like SSO
|
||||
"userIsClaimedByOrganization": false, // The new key instead of the obsolete userIsManagedByOrganization
|
||||
|
||||
"permissions": permissions,
|
||||
|
||||
|
@ -644,6 +645,8 @@ impl Membership {
|
|||
"permissions": permissions,
|
||||
|
||||
"ssoBound": false, // Not supported
|
||||
"managedByOrganization": false, // This key is obsolete replaced by claimedByOrganization
|
||||
"claimedByOrganization": false, // Means not managed via the Members UI, like SSO
|
||||
"usesKeyConnector": false, // Not supported
|
||||
"accessSecretsManager": false, // Not supported (Not AGPLv3 Licensed)
|
||||
|
||||
|
@ -904,6 +907,21 @@ impl Membership {
|
|||
}}
|
||||
}
|
||||
|
||||
// Get all users which are either owner or admin, or a manager which can manage/access all
|
||||
pub async fn find_confirmed_and_manage_all_by_org(org_uuid: &OrganizationId, conn: &mut DbConn) -> Vec<Self> {
|
||||
db_run! { conn: {
|
||||
users_organizations::table
|
||||
.filter(users_organizations::org_uuid.eq(org_uuid))
|
||||
.filter(users_organizations::status.eq(MembershipStatus::Confirmed as i32))
|
||||
.filter(
|
||||
users_organizations::atype.eq_any(vec![MembershipType::Owner as i32, MembershipType::Admin as i32])
|
||||
.or(users_organizations::atype.eq(MembershipType::Manager as i32).and(users_organizations::access_all.eq(true)))
|
||||
)
|
||||
.load::<MembershipDb>(conn)
|
||||
.unwrap_or_default().from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
pub async fn count_by_org(org_uuid: &OrganizationId, conn: &mut DbConn) -> i64 {
|
||||
db_run! { conn: {
|
||||
users_organizations::table
|
||||
|
|
|
@ -258,7 +258,6 @@ impl User {
|
|||
"emailVerified": !CONFIG.mail_enabled() || self.verified_at.is_some(),
|
||||
"premium": true,
|
||||
"premiumFromOrganization": false,
|
||||
"masterPasswordHint": self.password_hint,
|
||||
"culture": "en-US",
|
||||
"twoFactorEnabled": twofactor_enabled,
|
||||
"key": self.akey,
|
||||
|
@ -343,7 +342,7 @@ impl User {
|
|||
|
||||
pub async fn update_uuid_revision(uuid: &UserId, conn: &mut DbConn) {
|
||||
if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await {
|
||||
warn!("Failed to update revision for {}: {:#?}", uuid, e);
|
||||
warn!("Failed to update revision for {uuid}: {e:#?}");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
30
src/error.rs
30
src/error.rs
|
@ -59,6 +59,8 @@ use yubico::yubicoerror::YubicoError as YubiErr;
|
|||
#[derive(Serialize)]
|
||||
pub struct Empty {}
|
||||
|
||||
pub struct Compact {}
|
||||
|
||||
// Error struct
|
||||
// Contains a String error message, meant for the user and an enum variant, with an error of different types.
|
||||
//
|
||||
|
@ -69,6 +71,7 @@ make_error! {
|
|||
Empty(Empty): _no_source, _serialize,
|
||||
// Used to represent err! calls
|
||||
Simple(String): _no_source, _api_error,
|
||||
Compact(Compact): _no_source, _api_error_small,
|
||||
|
||||
// Used in our custom http client to handle non-global IPs and blocked domains
|
||||
CustomHttpClient(CustomHttpClientError): _has_source, _api_error,
|
||||
|
@ -132,6 +135,12 @@ impl Error {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_kind(mut self, kind: ErrorKind) -> Self {
|
||||
self.error = kind;
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub const fn with_code(mut self, code: u16) -> Self {
|
||||
self.error_code = code;
|
||||
|
@ -204,6 +213,18 @@ fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
|
|||
_serialize(&json, "")
|
||||
}
|
||||
|
||||
fn _api_error_small(_: &impl std::any::Any, msg: &str) -> String {
|
||||
let json = json!({
|
||||
"message": msg,
|
||||
"validationErrors": null,
|
||||
"exceptionMessage": null,
|
||||
"exceptionStackTrace": null,
|
||||
"innerExceptionMessage": null,
|
||||
"object": "error"
|
||||
});
|
||||
_serialize(&json, "")
|
||||
}
|
||||
|
||||
//
|
||||
// Rocket responder impl
|
||||
//
|
||||
|
@ -216,9 +237,8 @@ use rocket::response::{self, Responder, Response};
|
|||
impl Responder<'_, 'static> for Error {
|
||||
fn respond_to(self, _: &Request<'_>) -> response::Result<'static> {
|
||||
match self.error {
|
||||
ErrorKind::Empty(_) => {} // Don't print the error in this situation
|
||||
ErrorKind::Simple(_) => {} // Don't print the error in this situation
|
||||
_ => error!(target: "error", "{:#?}", self),
|
||||
ErrorKind::Empty(_) | ErrorKind::Simple(_) | ErrorKind::Compact(_) => {} // Don't print the error in this situation
|
||||
_ => error!(target: "error", "{self:#?}"),
|
||||
};
|
||||
|
||||
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
|
||||
|
@ -232,6 +252,10 @@ impl Responder<'_, 'static> for Error {
|
|||
//
|
||||
#[macro_export]
|
||||
macro_rules! err {
|
||||
($kind:ident, $msg:expr) => {{
|
||||
error!("{}", $msg);
|
||||
return Err($crate::error::Error::new($msg, $msg).with_kind($crate::error::ErrorKind::$kind($crate::error::$kind {})));
|
||||
}};
|
||||
($msg:expr) => {{
|
||||
error!("{}", $msg);
|
||||
return Err($crate::error::Error::new($msg, $msg));
|
||||
|
|
38
src/mail.rs
38
src/mail.rs
|
@ -85,7 +85,7 @@ fn smtp_transport() -> AsyncSmtpTransport<Tokio1Executor> {
|
|||
smtp_client.authentication(selected_mechanisms)
|
||||
} else {
|
||||
// Only show a warning, and return without setting an actual authentication mechanism
|
||||
warn!("No valid SMTP Auth mechanism found for '{}', using default values", mechanism);
|
||||
warn!("No valid SMTP Auth mechanism found for '{mechanism}', using default values");
|
||||
smtp_client
|
||||
}
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ pub async fn send_register_verify_email(email: &str, token: &str) -> EmptyResult
|
|||
"email/register_verify_email",
|
||||
json!({
|
||||
// `url.Url` would place the anchor `#` after the query parameters
|
||||
"url": format!("{}/#/finish-signup/?{}", CONFIG.domain(), query_string),
|
||||
"url": format!("{}/#/finish-signup/?{query_string}", CONFIG.domain()),
|
||||
"img_src": CONFIG._smtp_img_src(),
|
||||
"email": email,
|
||||
}),
|
||||
|
@ -318,7 +318,7 @@ pub async fn send_invite(
|
|||
"email/send_org_invite",
|
||||
json!({
|
||||
// `url.Url` would place the anchor `#` after the query parameters
|
||||
"url": format!("{}/#/accept-organization/?{}", CONFIG.domain(), query_string),
|
||||
"url": format!("{}/#/accept-organization/?{query_string}", CONFIG.domain()),
|
||||
"img_src": CONFIG._smtp_img_src(),
|
||||
"org_name": org_name,
|
||||
}),
|
||||
|
@ -574,6 +574,20 @@ pub async fn send_change_email(address: &str, token: &str) -> EmptyResult {
|
|||
send_email(address, &subject, body_html, body_text).await
|
||||
}
|
||||
|
||||
pub async fn send_change_email_existing(address: &str, acting_address: &str) -> EmptyResult {
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/change_email_existing",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
"img_src": CONFIG._smtp_img_src(),
|
||||
"existing_address": address,
|
||||
"acting_address": acting_address,
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(address, &subject, body_html, body_text).await
|
||||
}
|
||||
|
||||
pub async fn send_sso_change_email(address: &str) -> EmptyResult {
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/sso_change_email",
|
||||
|
@ -631,13 +645,13 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult {
|
|||
// Match some common errors and make them more user friendly
|
||||
Err(e) => {
|
||||
if e.is_client() {
|
||||
debug!("Sendmail client error: {:?}", e);
|
||||
debug!("Sendmail client error: {e:?}");
|
||||
err!(format!("Sendmail client error: {e}"));
|
||||
} else if e.is_response() {
|
||||
debug!("Sendmail response error: {:?}", e);
|
||||
debug!("Sendmail response error: {e:?}");
|
||||
err!(format!("Sendmail response error: {e}"));
|
||||
} else {
|
||||
debug!("Sendmail error: {:?}", e);
|
||||
debug!("Sendmail error: {e:?}");
|
||||
err!(format!("Sendmail error: {e}"));
|
||||
}
|
||||
}
|
||||
|
@ -648,13 +662,13 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult {
|
|||
// Match some common errors and make them more user friendly
|
||||
Err(e) => {
|
||||
if e.is_client() {
|
||||
debug!("SMTP client error: {:#?}", e);
|
||||
debug!("SMTP client error: {e:#?}");
|
||||
err!(format!("SMTP client error: {e}"));
|
||||
} else if e.is_transient() {
|
||||
debug!("SMTP 4xx error: {:#?}", e);
|
||||
debug!("SMTP 4xx error: {e:#?}");
|
||||
err!(format!("SMTP 4xx error: {e}"));
|
||||
} else if e.is_permanent() {
|
||||
debug!("SMTP 5xx error: {:#?}", e);
|
||||
debug!("SMTP 5xx error: {e:#?}");
|
||||
let mut msg = e.to_string();
|
||||
// Add a special check for 535 to add a more descriptive message
|
||||
if msg.contains("(535)") {
|
||||
|
@ -662,13 +676,13 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult {
|
|||
}
|
||||
err!(format!("SMTP 5xx error: {msg}"));
|
||||
} else if e.is_timeout() {
|
||||
debug!("SMTP timeout error: {:#?}", e);
|
||||
debug!("SMTP timeout error: {e:#?}");
|
||||
err!(format!("SMTP timeout error: {e}"));
|
||||
} else if e.is_tls() {
|
||||
debug!("SMTP encryption error: {:#?}", e);
|
||||
debug!("SMTP encryption error: {e:#?}");
|
||||
err!(format!("SMTP encryption error: {e}"));
|
||||
} else {
|
||||
debug!("SMTP error: {:#?}", e);
|
||||
debug!("SMTP error: {e:#?}");
|
||||
err!(format!("SMTP error: {e}"));
|
||||
}
|
||||
}
|
||||
|
|
13
src/main.rs
13
src/main.rs
|
@ -431,10 +431,7 @@ fn init_logging() -> Result<log::LevelFilter, Error> {
|
|||
}
|
||||
None => error!(
|
||||
target: "panic",
|
||||
"thread '{}' panicked at '{}'\n{:}",
|
||||
thread,
|
||||
msg,
|
||||
backtrace
|
||||
"thread '{thread}' panicked at '{msg}'\n{backtrace:}"
|
||||
),
|
||||
}
|
||||
}));
|
||||
|
@ -454,7 +451,7 @@ fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
|
|||
match syslog::unix(syslog_fmt) {
|
||||
Ok(sl) => logger.chain(sl),
|
||||
Err(e) => {
|
||||
error!("Unable to connect to syslog: {:?}", e);
|
||||
error!("Unable to connect to syslog: {e:?}");
|
||||
logger
|
||||
}
|
||||
}
|
||||
|
@ -470,7 +467,7 @@ async fn check_data_folder() {
|
|||
let data_folder = &CONFIG.data_folder();
|
||||
let path = Path::new(data_folder);
|
||||
if !path.exists() {
|
||||
error!("Data folder '{}' doesn't exist.", data_folder);
|
||||
error!("Data folder '{data_folder}' doesn't exist.");
|
||||
if is_running_in_container() {
|
||||
error!("Verify that your data volume is mounted at the correct location.");
|
||||
} else {
|
||||
|
@ -479,7 +476,7 @@ async fn check_data_folder() {
|
|||
exit(1);
|
||||
}
|
||||
if !path.is_dir() {
|
||||
error!("Data folder '{}' is not a directory.", data_folder);
|
||||
error!("Data folder '{data_folder}' is not a directory.");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
|
@ -553,7 +550,7 @@ async fn create_db_pool() -> db::DbPool {
|
|||
match util::retry_db(db::DbPool::from_config, CONFIG.db_connection_retries()).await {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
error!("Error creating database pool: {:?}", e);
|
||||
error!("Error creating database pool: {e:?}");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
|
60
src/sso.rs
60
src/sso.rs
|
@ -316,14 +316,14 @@ pub async fn authorize_url(
|
|||
let verifier = if CONFIG.sso_pkce() {
|
||||
let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
|
||||
auth_req = auth_req.set_pkce_challenge(pkce_challenge);
|
||||
Some(pkce_verifier.secret().to_string())
|
||||
Some(pkce_verifier.into_secret())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let (auth_url, _, nonce) = auth_req.url();
|
||||
|
||||
let sso_nonce = SsoNonce::new(state, nonce.secret().to_string(), verifier, redirect_uri);
|
||||
let sso_nonce = SsoNonce::new(state, nonce.secret().clone(), verifier, redirect_uri);
|
||||
sso_nonce.save(&mut conn).await?;
|
||||
|
||||
Ok(auth_url)
|
||||
|
@ -448,7 +448,7 @@ pub async fn exchange_code(wrapped_code: &str, conn: &mut DbConn) -> ApiResult<U
|
|||
if CONFIG.sso_debug_tokens() {
|
||||
debug!("Id token: {}", id_token.to_string());
|
||||
debug!("Access token: {}", token_response.access_token().secret());
|
||||
debug!("Refresh token: {:?}", token_response.refresh_token().map(|t| t.secret().to_string()));
|
||||
debug!("Refresh token: {:?}", token_response.refresh_token().map(|t| t.secret()));
|
||||
debug!("Expiration time: {:?}", token_response.expires_in());
|
||||
}
|
||||
|
||||
|
@ -473,7 +473,7 @@ pub async fn exchange_code(wrapped_code: &str, conn: &mut DbConn) -> ApiResult<U
|
|||
|
||||
let user_name = user_info.preferred_username().map(|un| un.to_string());
|
||||
|
||||
let refresh_token = token_response.refresh_token().map(|t| t.secret().to_string());
|
||||
let refresh_token = token_response.refresh_token().map(|t| t.secret());
|
||||
if refresh_token.is_none() && CONFIG.sso_scopes_vec().contains(&"offline_access".to_string()) {
|
||||
error!("Scope offline_access is present but response contain no refresh_token");
|
||||
}
|
||||
|
@ -481,8 +481,8 @@ pub async fn exchange_code(wrapped_code: &str, conn: &mut DbConn) -> ApiResult<U
|
|||
let identifier = OIDCIdentifier::new(id_claims.issuer(), id_claims.subject());
|
||||
|
||||
let authenticated_user = AuthenticatedUser {
|
||||
refresh_token,
|
||||
access_token: token_response.access_token().secret().to_string(),
|
||||
refresh_token: refresh_token.cloned(),
|
||||
access_token: token_response.access_token().secret().clone(),
|
||||
expires_in: token_response.expires_in(),
|
||||
identifier: identifier.clone(),
|
||||
email: email.clone(),
|
||||
|
@ -523,24 +523,26 @@ pub async fn redeem(state: &OIDCState, conn: &mut DbConn) -> ApiResult<Authentic
|
|||
pub fn create_auth_tokens(
|
||||
device: &Device,
|
||||
user: &User,
|
||||
client_id: Option<String>,
|
||||
refresh_token: Option<String>,
|
||||
access_token: &str,
|
||||
access_token: String,
|
||||
expires_in: Option<Duration>,
|
||||
) -> ApiResult<AuthTokens> {
|
||||
if !CONFIG.sso_auth_only_not_session() {
|
||||
let now = Utc::now();
|
||||
|
||||
let (ap_nbf, ap_exp) = match (decode_token_claims("access_token", access_token), expires_in) {
|
||||
let (ap_nbf, ap_exp) = match (decode_token_claims("access_token", &access_token), expires_in) {
|
||||
(Ok(ap), _) => (ap.nbf(), ap.exp),
|
||||
(Err(_), Some(exp)) => (now.timestamp(), (now + exp).timestamp()),
|
||||
_ => err!("Non jwt access_token and empty expires_in"),
|
||||
};
|
||||
|
||||
let access_claims = auth::LoginJwtClaims::new(device, user, ap_nbf, ap_exp, AuthMethod::Sso.scope_vec(), now);
|
||||
let access_claims =
|
||||
auth::LoginJwtClaims::new(device, user, ap_nbf, ap_exp, AuthMethod::Sso.scope_vec(), client_id, now);
|
||||
|
||||
_create_auth_tokens(device, refresh_token, access_claims, access_token)
|
||||
} else {
|
||||
Ok(AuthTokens::new(device, user, AuthMethod::Sso))
|
||||
Ok(AuthTokens::new(device, user, AuthMethod::Sso, client_id))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -548,24 +550,24 @@ fn _create_auth_tokens(
|
|||
device: &Device,
|
||||
refresh_token: Option<String>,
|
||||
access_claims: auth::LoginJwtClaims,
|
||||
access_token: &str,
|
||||
access_token: String,
|
||||
) -> ApiResult<AuthTokens> {
|
||||
let (nbf, exp, token) = if let Some(rt) = refresh_token.as_ref() {
|
||||
match decode_token_claims("refresh_token", rt) {
|
||||
let (nbf, exp, token) = if let Some(rt) = refresh_token {
|
||||
match decode_token_claims("refresh_token", &rt) {
|
||||
Err(_) => {
|
||||
let time_now = Utc::now();
|
||||
let exp = (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp();
|
||||
debug!("Non jwt refresh_token (expiration set to {})", exp);
|
||||
(time_now.timestamp(), exp, TokenWrapper::Refresh(rt.to_string()))
|
||||
(time_now.timestamp(), exp, TokenWrapper::Refresh(rt))
|
||||
}
|
||||
Ok(refresh_payload) => {
|
||||
debug!("Refresh_payload: {:?}", refresh_payload);
|
||||
(refresh_payload.nbf(), refresh_payload.exp, TokenWrapper::Refresh(rt.to_string()))
|
||||
(refresh_payload.nbf(), refresh_payload.exp, TokenWrapper::Refresh(rt))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
debug!("No refresh_token present");
|
||||
(access_claims.nbf, access_claims.exp, TokenWrapper::Access(access_token.to_string()))
|
||||
(access_claims.nbf, access_claims.exp, TokenWrapper::Access(access_token))
|
||||
};
|
||||
|
||||
let refresh_claims = auth::RefreshJwtClaims {
|
||||
|
@ -589,11 +591,13 @@ fn _create_auth_tokens(
|
|||
pub async fn exchange_refresh_token(
|
||||
device: &Device,
|
||||
user: &User,
|
||||
refresh_claims: &auth::RefreshJwtClaims,
|
||||
client_id: Option<String>,
|
||||
refresh_claims: auth::RefreshJwtClaims,
|
||||
) -> ApiResult<AuthTokens> {
|
||||
match &refresh_claims.token {
|
||||
let exp = refresh_claims.exp;
|
||||
match refresh_claims.token {
|
||||
Some(TokenWrapper::Refresh(refresh_token)) => {
|
||||
let rt = RefreshToken::new(refresh_token.to_string());
|
||||
let rt = RefreshToken::new(refresh_token);
|
||||
|
||||
let client = Client::cached().await?;
|
||||
|
||||
|
@ -604,16 +608,15 @@ pub async fn exchange_refresh_token(
|
|||
};
|
||||
|
||||
// Use new refresh_token if returned
|
||||
let rolled_refresh_token = token_response
|
||||
.refresh_token()
|
||||
.map(|token| token.secret().to_string())
|
||||
.unwrap_or(refresh_token.to_string());
|
||||
let rolled_refresh_token =
|
||||
token_response.refresh_token().map(|token| token.secret()).unwrap_or(rt.secret());
|
||||
|
||||
create_auth_tokens(
|
||||
device,
|
||||
user,
|
||||
Some(rolled_refresh_token),
|
||||
token_response.access_token().secret(),
|
||||
client_id,
|
||||
Some(rolled_refresh_token.clone()),
|
||||
token_response.access_token().secret().clone(),
|
||||
token_response.expires_in(),
|
||||
)
|
||||
}
|
||||
|
@ -621,12 +624,12 @@ pub async fn exchange_refresh_token(
|
|||
let now = Utc::now();
|
||||
let exp_limit = (now + *BW_EXPIRATION).timestamp();
|
||||
|
||||
if refresh_claims.exp < exp_limit {
|
||||
if exp < exp_limit {
|
||||
err_silent!("Access token is close to expiration but we have no refresh token")
|
||||
}
|
||||
|
||||
let client = Client::cached().await?;
|
||||
match client.user_info(AccessToken::new(access_token.to_string())).await {
|
||||
match client.user_info(AccessToken::new(access_token.clone())).await {
|
||||
Err(err) => {
|
||||
err_silent!(format!("Failed to retrieve user info, token has probably been invalidated: {err}"))
|
||||
}
|
||||
|
@ -635,8 +638,9 @@ pub async fn exchange_refresh_token(
|
|||
device,
|
||||
user,
|
||||
now.timestamp(),
|
||||
refresh_claims.exp,
|
||||
exp,
|
||||
AuthMethod::Sso.scope_vec(),
|
||||
client_id,
|
||||
now,
|
||||
);
|
||||
_create_auth_tokens(device, None, access_claims, access_token)
|
||||
|
|
17
src/static/scripts/admin_diagnostics.js
vendored
17
src/static/scripts/admin_diagnostics.js
vendored
|
@ -29,7 +29,7 @@ function isValidIp(ip) {
|
|||
return ipv4Regex.test(ip) || ipv6Regex.test(ip);
|
||||
}
|
||||
|
||||
function checkVersions(platform, installed, latest, commit=null) {
|
||||
function checkVersions(platform, installed, latest, commit=null, pre_release=false) {
|
||||
if (installed === "-" || latest === "-") {
|
||||
document.getElementById(`${platform}-failed`).classList.remove("d-none");
|
||||
return;
|
||||
|
@ -37,10 +37,12 @@ function checkVersions(platform, installed, latest, commit=null) {
|
|||
|
||||
// Only check basic versions, no commit revisions
|
||||
if (commit === null || installed.indexOf("-") === -1) {
|
||||
if (installed !== latest) {
|
||||
document.getElementById(`${platform}-warning`).classList.remove("d-none");
|
||||
} else {
|
||||
if (platform === "web" && pre_release === true) {
|
||||
document.getElementById(`${platform}-prerelease`).classList.remove("d-none");
|
||||
} else if (installed == latest) {
|
||||
document.getElementById(`${platform}-success`).classList.remove("d-none");
|
||||
} else {
|
||||
document.getElementById(`${platform}-warning`).classList.remove("d-none");
|
||||
}
|
||||
} else {
|
||||
// Check if this is a branched version.
|
||||
|
@ -86,7 +88,7 @@ async function generateSupportString(event, dj) {
|
|||
supportString += `* Running within a container: ${dj.running_within_container} (Base: ${dj.container_base_image})\n`;
|
||||
supportString += `* Database type: ${dj.db_type}\n`;
|
||||
supportString += `* Database version: ${dj.db_version}\n`;
|
||||
supportString += `* Environment settings overridden!: ${dj.overrides !== ""}\n`;
|
||||
supportString += `* Uses config.json: ${dj.overrides !== ""}\n`;
|
||||
supportString += `* Uses a reverse proxy: ${dj.ip_header_exists}\n`;
|
||||
if (dj.ip_header_exists) {
|
||||
supportString += `* IP Header check: ${dj.ip_header_match} (${dj.ip_header_name})\n`;
|
||||
|
@ -94,6 +96,9 @@ async function generateSupportString(event, dj) {
|
|||
supportString += `* Internet access: ${dj.has_http_access}\n`;
|
||||
supportString += `* Internet access via a proxy: ${dj.uses_proxy}\n`;
|
||||
supportString += `* DNS Check: ${dnsCheck}\n`;
|
||||
if (dj.tz_env !== "") {
|
||||
supportString += `* TZ environment: ${dj.tz_env}\n`;
|
||||
}
|
||||
supportString += `* Browser/Server Time Check: ${timeCheck}\n`;
|
||||
supportString += `* Server/NTP Time Check: ${ntpTimeCheck}\n`;
|
||||
supportString += `* Domain Configuration Check: ${domainCheck}\n`;
|
||||
|
@ -206,7 +211,7 @@ function initVersionCheck(dj) {
|
|||
if (!dj.running_within_container) {
|
||||
const webInstalled = dj.web_vault_version;
|
||||
const webLatest = dj.latest_web_build;
|
||||
checkVersions("web", webInstalled, webLatest);
|
||||
checkVersions("web", webInstalled, webLatest, null, dj.web_vault_pre_release);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
14
src/static/scripts/bootstrap.bundle.js
vendored
14
src/static/scripts/bootstrap.bundle.js
vendored
|
@ -1,5 +1,5 @@
|
|||
/*!
|
||||
* Bootstrap v5.3.4 (https://getbootstrap.com/)
|
||||
* Bootstrap v5.3.6 (https://getbootstrap.com/)
|
||||
* Copyright 2011-2025 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors)
|
||||
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
|
||||
*/
|
||||
|
@ -647,7 +647,7 @@
|
|||
* Constants
|
||||
*/
|
||||
|
||||
const VERSION = '5.3.4';
|
||||
const VERSION = '5.3.6';
|
||||
|
||||
/**
|
||||
* Class definition
|
||||
|
@ -673,6 +673,8 @@
|
|||
this[propertyName] = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Private
|
||||
_queueCallback(callback, element, isAnimated = true) {
|
||||
executeAfterTransition(callback, element, isAnimated);
|
||||
}
|
||||
|
@ -1604,11 +1606,11 @@
|
|||
this._element.style[dimension] = '';
|
||||
this._queueCallback(complete, this._element, true);
|
||||
}
|
||||
|
||||
// Private
|
||||
_isShown(element = this._element) {
|
||||
return element.classList.contains(CLASS_NAME_SHOW$7);
|
||||
}
|
||||
|
||||
// Private
|
||||
_configAfterMerge(config) {
|
||||
config.toggle = Boolean(config.toggle); // Coerce string values
|
||||
config.parent = getElement(config.parent);
|
||||
|
@ -3688,6 +3690,9 @@
|
|||
this._element.setAttribute('aria-expanded', 'false');
|
||||
Manipulator.removeDataAttribute(this._menu, 'popper');
|
||||
EventHandler.trigger(this._element, EVENT_HIDDEN$5, relatedTarget);
|
||||
|
||||
// Explicitly return focus to the trigger element
|
||||
this._element.focus();
|
||||
}
|
||||
_getConfig(config) {
|
||||
config = super._getConfig(config);
|
||||
|
@ -6209,7 +6214,6 @@
|
|||
}
|
||||
|
||||
// Private
|
||||
|
||||
_maybeScheduleHide() {
|
||||
if (!this._config.autohide) {
|
||||
return;
|
||||
|
|
45
src/static/scripts/bootstrap.css
vendored
45
src/static/scripts/bootstrap.css
vendored
|
@ -1,6 +1,6 @@
|
|||
@charset "UTF-8";
|
||||
/*!
|
||||
* Bootstrap v5.3.4 (https://getbootstrap.com/)
|
||||
* Bootstrap v5.3.6 (https://getbootstrap.com/)
|
||||
* Copyright 2011-2025 The Bootstrap Authors
|
||||
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
|
||||
*/
|
||||
|
@ -2156,10 +2156,6 @@ progress {
|
|||
display: block;
|
||||
padding: 0;
|
||||
}
|
||||
.form-control::-moz-placeholder {
|
||||
color: var(--bs-secondary-color);
|
||||
opacity: 1;
|
||||
}
|
||||
.form-control::placeholder {
|
||||
color: var(--bs-secondary-color);
|
||||
opacity: 1;
|
||||
|
@ -2629,17 +2625,10 @@ textarea.form-control-lg {
|
|||
.form-floating > .form-control-plaintext {
|
||||
padding: 1rem 0.75rem;
|
||||
}
|
||||
.form-floating > .form-control::-moz-placeholder, .form-floating > .form-control-plaintext::-moz-placeholder {
|
||||
color: transparent;
|
||||
}
|
||||
.form-floating > .form-control::placeholder,
|
||||
.form-floating > .form-control-plaintext::placeholder {
|
||||
color: transparent;
|
||||
}
|
||||
.form-floating > .form-control:not(:-moz-placeholder), .form-floating > .form-control-plaintext:not(:-moz-placeholder) {
|
||||
padding-top: 1.625rem;
|
||||
padding-bottom: 0.625rem;
|
||||
}
|
||||
.form-floating > .form-control:focus, .form-floating > .form-control:not(:placeholder-shown),
|
||||
.form-floating > .form-control-plaintext:focus,
|
||||
.form-floating > .form-control-plaintext:not(:placeholder-shown) {
|
||||
|
@ -2656,9 +2645,6 @@ textarea.form-control-lg {
|
|||
padding-bottom: 0.625rem;
|
||||
padding-left: 0.75rem;
|
||||
}
|
||||
.form-floating > .form-control:not(:-moz-placeholder) ~ label {
|
||||
transform: scale(0.85) translateY(-0.5rem) translateX(0.15rem);
|
||||
}
|
||||
.form-floating > .form-control:focus ~ label,
|
||||
.form-floating > .form-control:not(:placeholder-shown) ~ label,
|
||||
.form-floating > .form-control-plaintext ~ label,
|
||||
|
@ -2668,15 +2654,6 @@ textarea.form-control-lg {
|
|||
.form-floating > .form-control:-webkit-autofill ~ label {
|
||||
transform: scale(0.85) translateY(-0.5rem) translateX(0.15rem);
|
||||
}
|
||||
.form-floating > textarea:not(:-moz-placeholder) ~ label::after {
|
||||
position: absolute;
|
||||
inset: 1rem 0.375rem;
|
||||
z-index: -1;
|
||||
height: 1.5em;
|
||||
content: "";
|
||||
background-color: var(--bs-body-bg);
|
||||
border-radius: var(--bs-border-radius);
|
||||
}
|
||||
.form-floating > textarea:focus ~ label::after,
|
||||
.form-floating > textarea:not(:placeholder-shown) ~ label::after {
|
||||
position: absolute;
|
||||
|
@ -4540,24 +4517,24 @@ textarea.form-control-lg {
|
|||
border-top-right-radius: 0;
|
||||
border-bottom-right-radius: 0;
|
||||
}
|
||||
.card-group > .card:not(:last-child) .card-img-top,
|
||||
.card-group > .card:not(:last-child) .card-header {
|
||||
.card-group > .card:not(:last-child) > .card-img-top,
|
||||
.card-group > .card:not(:last-child) > .card-header {
|
||||
border-top-right-radius: 0;
|
||||
}
|
||||
.card-group > .card:not(:last-child) .card-img-bottom,
|
||||
.card-group > .card:not(:last-child) .card-footer {
|
||||
.card-group > .card:not(:last-child) > .card-img-bottom,
|
||||
.card-group > .card:not(:last-child) > .card-footer {
|
||||
border-bottom-right-radius: 0;
|
||||
}
|
||||
.card-group > .card:not(:first-child) {
|
||||
border-top-left-radius: 0;
|
||||
border-bottom-left-radius: 0;
|
||||
}
|
||||
.card-group > .card:not(:first-child) .card-img-top,
|
||||
.card-group > .card:not(:first-child) .card-header {
|
||||
.card-group > .card:not(:first-child) > .card-img-top,
|
||||
.card-group > .card:not(:first-child) > .card-header {
|
||||
border-top-left-radius: 0;
|
||||
}
|
||||
.card-group > .card:not(:first-child) .card-img-bottom,
|
||||
.card-group > .card:not(:first-child) .card-footer {
|
||||
.card-group > .card:not(:first-child) > .card-img-bottom,
|
||||
.card-group > .card:not(:first-child) > .card-footer {
|
||||
border-bottom-left-radius: 0;
|
||||
}
|
||||
}
|
||||
|
@ -7179,6 +7156,10 @@ textarea.form-control-lg {
|
|||
.visually-hidden-focusable:not(:focus):not(:focus-within):not(caption) {
|
||||
position: absolute !important;
|
||||
}
|
||||
.visually-hidden *,
|
||||
.visually-hidden-focusable:not(:focus):not(:focus-within) * {
|
||||
overflow: hidden !important;
|
||||
}
|
||||
|
||||
.stretched-link::after {
|
||||
position: absolute;
|
||||
|
|
121
src/static/scripts/datatables.css
vendored
121
src/static/scripts/datatables.css
vendored
|
@ -4,10 +4,10 @@
|
|||
*
|
||||
* To rebuild or modify this file with the latest versions of the included
|
||||
* software please visit:
|
||||
* https://datatables.net/download/#bs5/dt-2.2.2
|
||||
* https://datatables.net/download/#bs5/dt-2.3.1
|
||||
*
|
||||
* Included libraries:
|
||||
* DataTables 2.2.2
|
||||
* DataTables 2.3.1
|
||||
*/
|
||||
|
||||
:root {
|
||||
|
@ -104,24 +104,14 @@ table.dataTable thead > tr > td.dt-ordering-desc span.dt-column-order:after {
|
|||
content: "\25BC";
|
||||
content: "\25BC"/"";
|
||||
}
|
||||
table.dataTable thead > tr > th.dt-orderable-asc, table.dataTable thead > tr > th.dt-orderable-desc, table.dataTable thead > tr > th.dt-ordering-asc, table.dataTable thead > tr > th.dt-ordering-desc,
|
||||
table.dataTable thead > tr > td.dt-orderable-asc,
|
||||
table.dataTable thead > tr > td.dt-orderable-desc,
|
||||
table.dataTable thead > tr > td.dt-ordering-asc,
|
||||
table.dataTable thead > tr > td.dt-ordering-desc {
|
||||
position: relative;
|
||||
padding-right: 30px;
|
||||
}
|
||||
table.dataTable thead > tr > th.dt-orderable-asc span.dt-column-order, table.dataTable thead > tr > th.dt-orderable-desc span.dt-column-order, table.dataTable thead > tr > th.dt-ordering-asc span.dt-column-order, table.dataTable thead > tr > th.dt-ordering-desc span.dt-column-order,
|
||||
table.dataTable thead > tr > td.dt-orderable-asc span.dt-column-order,
|
||||
table.dataTable thead > tr > td.dt-orderable-desc span.dt-column-order,
|
||||
table.dataTable thead > tr > td.dt-ordering-asc span.dt-column-order,
|
||||
table.dataTable thead > tr > td.dt-ordering-desc span.dt-column-order {
|
||||
position: absolute;
|
||||
right: 12px;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
position: relative;
|
||||
width: 12px;
|
||||
height: 20px;
|
||||
}
|
||||
table.dataTable thead > tr > th.dt-orderable-asc span.dt-column-order:before, table.dataTable thead > tr > th.dt-orderable-asc span.dt-column-order:after, table.dataTable thead > tr > th.dt-orderable-desc span.dt-column-order:before, table.dataTable thead > tr > th.dt-orderable-desc span.dt-column-order:after, table.dataTable thead > tr > th.dt-ordering-asc span.dt-column-order:before, table.dataTable thead > tr > th.dt-ordering-asc span.dt-column-order:after, table.dataTable thead > tr > th.dt-ordering-desc span.dt-column-order:before, table.dataTable thead > tr > th.dt-ordering-desc span.dt-column-order:after,
|
||||
table.dataTable thead > tr > td.dt-orderable-asc span.dt-column-order:before,
|
||||
|
@ -163,6 +153,40 @@ table.dataTable thead > tr > td:active {
|
|||
outline: none;
|
||||
}
|
||||
|
||||
table.dataTable thead > tr > th div.dt-column-header,
|
||||
table.dataTable thead > tr > th div.dt-column-footer,
|
||||
table.dataTable thead > tr > td div.dt-column-header,
|
||||
table.dataTable thead > tr > td div.dt-column-footer,
|
||||
table.dataTable tfoot > tr > th div.dt-column-header,
|
||||
table.dataTable tfoot > tr > th div.dt-column-footer,
|
||||
table.dataTable tfoot > tr > td div.dt-column-header,
|
||||
table.dataTable tfoot > tr > td div.dt-column-footer {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
table.dataTable thead > tr > th div.dt-column-header span.dt-column-title,
|
||||
table.dataTable thead > tr > th div.dt-column-footer span.dt-column-title,
|
||||
table.dataTable thead > tr > td div.dt-column-header span.dt-column-title,
|
||||
table.dataTable thead > tr > td div.dt-column-footer span.dt-column-title,
|
||||
table.dataTable tfoot > tr > th div.dt-column-header span.dt-column-title,
|
||||
table.dataTable tfoot > tr > th div.dt-column-footer span.dt-column-title,
|
||||
table.dataTable tfoot > tr > td div.dt-column-header span.dt-column-title,
|
||||
table.dataTable tfoot > tr > td div.dt-column-footer span.dt-column-title {
|
||||
flex-grow: 1;
|
||||
}
|
||||
table.dataTable thead > tr > th div.dt-column-header span.dt-column-title:empty,
|
||||
table.dataTable thead > tr > th div.dt-column-footer span.dt-column-title:empty,
|
||||
table.dataTable thead > tr > td div.dt-column-header span.dt-column-title:empty,
|
||||
table.dataTable thead > tr > td div.dt-column-footer span.dt-column-title:empty,
|
||||
table.dataTable tfoot > tr > th div.dt-column-header span.dt-column-title:empty,
|
||||
table.dataTable tfoot > tr > th div.dt-column-footer span.dt-column-title:empty,
|
||||
table.dataTable tfoot > tr > td div.dt-column-header span.dt-column-title:empty,
|
||||
table.dataTable tfoot > tr > td div.dt-column-footer span.dt-column-title:empty {
|
||||
display: none;
|
||||
}
|
||||
|
||||
div.dt-scroll-body > table.dataTable > thead > tr > th,
|
||||
div.dt-scroll-body > table.dataTable > thead > tr > td {
|
||||
overflow: hidden;
|
||||
|
@ -258,10 +282,25 @@ table.dataTable td.dt-type-numeric,
|
|||
table.dataTable td.dt-type-date {
|
||||
text-align: right;
|
||||
}
|
||||
table.dataTable th.dt-type-numeric div.dt-column-header,
|
||||
table.dataTable th.dt-type-numeric div.dt-column-footer, table.dataTable th.dt-type-date div.dt-column-header,
|
||||
table.dataTable th.dt-type-date div.dt-column-footer,
|
||||
table.dataTable td.dt-type-numeric div.dt-column-header,
|
||||
table.dataTable td.dt-type-numeric div.dt-column-footer,
|
||||
table.dataTable td.dt-type-date div.dt-column-header,
|
||||
table.dataTable td.dt-type-date div.dt-column-footer {
|
||||
flex-direction: row-reverse;
|
||||
}
|
||||
table.dataTable th.dt-left,
|
||||
table.dataTable td.dt-left {
|
||||
text-align: left;
|
||||
}
|
||||
table.dataTable th.dt-left div.dt-column-header,
|
||||
table.dataTable th.dt-left div.dt-column-footer,
|
||||
table.dataTable td.dt-left div.dt-column-header,
|
||||
table.dataTable td.dt-left div.dt-column-footer {
|
||||
flex-direction: row;
|
||||
}
|
||||
table.dataTable th.dt-center,
|
||||
table.dataTable td.dt-center {
|
||||
text-align: center;
|
||||
|
@ -270,10 +309,22 @@ table.dataTable th.dt-right,
|
|||
table.dataTable td.dt-right {
|
||||
text-align: right;
|
||||
}
|
||||
table.dataTable th.dt-right div.dt-column-header,
|
||||
table.dataTable th.dt-right div.dt-column-footer,
|
||||
table.dataTable td.dt-right div.dt-column-header,
|
||||
table.dataTable td.dt-right div.dt-column-footer {
|
||||
flex-direction: row-reverse;
|
||||
}
|
||||
table.dataTable th.dt-justify,
|
||||
table.dataTable td.dt-justify {
|
||||
text-align: justify;
|
||||
}
|
||||
table.dataTable th.dt-justify div.dt-column-header,
|
||||
table.dataTable th.dt-justify div.dt-column-footer,
|
||||
table.dataTable td.dt-justify div.dt-column-header,
|
||||
table.dataTable td.dt-justify div.dt-column-footer {
|
||||
flex-direction: row;
|
||||
}
|
||||
table.dataTable th.dt-nowrap,
|
||||
table.dataTable td.dt-nowrap {
|
||||
white-space: nowrap;
|
||||
|
@ -295,6 +346,16 @@ table.dataTable tfoot th.dt-head-left,
|
|||
table.dataTable tfoot td.dt-head-left {
|
||||
text-align: left;
|
||||
}
|
||||
table.dataTable thead th.dt-head-left div.dt-column-header,
|
||||
table.dataTable thead th.dt-head-left div.dt-column-footer,
|
||||
table.dataTable thead td.dt-head-left div.dt-column-header,
|
||||
table.dataTable thead td.dt-head-left div.dt-column-footer,
|
||||
table.dataTable tfoot th.dt-head-left div.dt-column-header,
|
||||
table.dataTable tfoot th.dt-head-left div.dt-column-footer,
|
||||
table.dataTable tfoot td.dt-head-left div.dt-column-header,
|
||||
table.dataTable tfoot td.dt-head-left div.dt-column-footer {
|
||||
flex-direction: row;
|
||||
}
|
||||
table.dataTable thead th.dt-head-center,
|
||||
table.dataTable thead td.dt-head-center,
|
||||
table.dataTable tfoot th.dt-head-center,
|
||||
|
@ -307,12 +368,32 @@ table.dataTable tfoot th.dt-head-right,
|
|||
table.dataTable tfoot td.dt-head-right {
|
||||
text-align: right;
|
||||
}
|
||||
table.dataTable thead th.dt-head-right div.dt-column-header,
|
||||
table.dataTable thead th.dt-head-right div.dt-column-footer,
|
||||
table.dataTable thead td.dt-head-right div.dt-column-header,
|
||||
table.dataTable thead td.dt-head-right div.dt-column-footer,
|
||||
table.dataTable tfoot th.dt-head-right div.dt-column-header,
|
||||
table.dataTable tfoot th.dt-head-right div.dt-column-footer,
|
||||
table.dataTable tfoot td.dt-head-right div.dt-column-header,
|
||||
table.dataTable tfoot td.dt-head-right div.dt-column-footer {
|
||||
flex-direction: row-reverse;
|
||||
}
|
||||
table.dataTable thead th.dt-head-justify,
|
||||
table.dataTable thead td.dt-head-justify,
|
||||
table.dataTable tfoot th.dt-head-justify,
|
||||
table.dataTable tfoot td.dt-head-justify {
|
||||
text-align: justify;
|
||||
}
|
||||
table.dataTable thead th.dt-head-justify div.dt-column-header,
|
||||
table.dataTable thead th.dt-head-justify div.dt-column-footer,
|
||||
table.dataTable thead td.dt-head-justify div.dt-column-header,
|
||||
table.dataTable thead td.dt-head-justify div.dt-column-footer,
|
||||
table.dataTable tfoot th.dt-head-justify div.dt-column-header,
|
||||
table.dataTable tfoot th.dt-head-justify div.dt-column-footer,
|
||||
table.dataTable tfoot td.dt-head-justify div.dt-column-header,
|
||||
table.dataTable tfoot td.dt-head-justify div.dt-column-footer {
|
||||
flex-direction: row;
|
||||
}
|
||||
table.dataTable thead th.dt-head-nowrap,
|
||||
table.dataTable thead td.dt-head-nowrap,
|
||||
table.dataTable tfoot th.dt-head-nowrap,
|
||||
|
@ -410,6 +491,9 @@ div.dt-container div.dt-layout-table > div {
|
|||
margin-left: 0;
|
||||
}
|
||||
}
|
||||
div.dt-container {
|
||||
position: relative;
|
||||
}
|
||||
div.dt-container div.dt-length label {
|
||||
font-weight: normal;
|
||||
text-align: left;
|
||||
|
@ -498,14 +582,19 @@ table.dataTable.table-sm > thead > tr td.dt-orderable-asc,
|
|||
table.dataTable.table-sm > thead > tr td.dt-orderable-desc,
|
||||
table.dataTable.table-sm > thead > tr td.dt-ordering-asc,
|
||||
table.dataTable.table-sm > thead > tr td.dt-ordering-desc {
|
||||
padding-right: 20px;
|
||||
padding-right: 0.25rem;
|
||||
}
|
||||
table.dataTable.table-sm > thead > tr th.dt-orderable-asc span.dt-column-order, table.dataTable.table-sm > thead > tr th.dt-orderable-desc span.dt-column-order, table.dataTable.table-sm > thead > tr th.dt-ordering-asc span.dt-column-order, table.dataTable.table-sm > thead > tr th.dt-ordering-desc span.dt-column-order,
|
||||
table.dataTable.table-sm > thead > tr td.dt-orderable-asc span.dt-column-order,
|
||||
table.dataTable.table-sm > thead > tr td.dt-orderable-desc span.dt-column-order,
|
||||
table.dataTable.table-sm > thead > tr td.dt-ordering-asc span.dt-column-order,
|
||||
table.dataTable.table-sm > thead > tr td.dt-ordering-desc span.dt-column-order {
|
||||
right: 5px;
|
||||
right: 0.25rem;
|
||||
}
|
||||
table.dataTable.table-sm > thead > tr th.dt-type-date span.dt-column-order, table.dataTable.table-sm > thead > tr th.dt-type-numeric span.dt-column-order,
|
||||
table.dataTable.table-sm > thead > tr td.dt-type-date span.dt-column-order,
|
||||
table.dataTable.table-sm > thead > tr td.dt-type-numeric span.dt-column-order {
|
||||
left: 0.25rem;
|
||||
}
|
||||
|
||||
div.dt-scroll-head table.table-bordered {
|
||||
|
|
390
src/static/scripts/datatables.js
vendored
390
src/static/scripts/datatables.js
vendored
|
@ -4,13 +4,13 @@
|
|||
*
|
||||
* To rebuild or modify this file with the latest versions of the included
|
||||
* software please visit:
|
||||
* https://datatables.net/download/#bs5/dt-2.2.2
|
||||
* https://datatables.net/download/#bs5/dt-2.3.1
|
||||
*
|
||||
* Included libraries:
|
||||
* DataTables 2.2.2
|
||||
* DataTables 2.3.1
|
||||
*/
|
||||
|
||||
/*! DataTables 2.2.2
|
||||
/*! DataTables 2.3.1
|
||||
* © SpryMedia Ltd - datatables.net/license
|
||||
*/
|
||||
|
||||
|
@ -101,15 +101,19 @@
|
|||
var defaults = DataTable.defaults;
|
||||
var $this = $(this);
|
||||
|
||||
|
||||
/* Sanity check */
|
||||
// Sanity check
|
||||
if ( this.nodeName.toLowerCase() != 'table' )
|
||||
{
|
||||
_fnLog( null, 0, 'Non-table node initialisation ('+this.nodeName+')', 2 );
|
||||
return;
|
||||
}
|
||||
|
||||
$(this).trigger( 'options.dt', oInit );
|
||||
// Special case for options
|
||||
if (oInit.on && oInit.on.options) {
|
||||
_fnListener($this, 'options', oInit.on.options);
|
||||
}
|
||||
|
||||
$this.trigger( 'options.dt', oInit );
|
||||
|
||||
/* Backwards compatibility for the defaults */
|
||||
_fnCompatOpts( defaults );
|
||||
|
@ -248,6 +252,9 @@
|
|||
"caption",
|
||||
"layout",
|
||||
"orderDescReverse",
|
||||
"orderIndicators",
|
||||
"orderHandler",
|
||||
"titleRow",
|
||||
"typeDetect",
|
||||
[ "iCookieDuration", "iStateDuration" ], // backwards compat
|
||||
[ "oSearch", "oPreviousSearch" ],
|
||||
|
@ -276,6 +283,13 @@
|
|||
|
||||
oSettings.rowIdFn = _fnGetObjectDataFn( oInit.rowId );
|
||||
|
||||
// Add event listeners
|
||||
if (oInit.on) {
|
||||
Object.keys(oInit.on).forEach(function (key) {
|
||||
_fnListener($this, key, oInit.on[key]);
|
||||
});
|
||||
}
|
||||
|
||||
/* Browser support detection */
|
||||
_fnBrowserDetect( oSettings );
|
||||
|
||||
|
@ -336,7 +350,7 @@
|
|||
/* HTML5 attribute detection - build an mData object automatically if the
|
||||
* attributes are found
|
||||
*/
|
||||
var rowOne = $this.children('tbody').find('tr').eq(0);
|
||||
var rowOne = $this.children('tbody').find('tr:first-child').eq(0);
|
||||
|
||||
if ( rowOne.length ) {
|
||||
var a = function ( cell, name ) {
|
||||
|
@ -494,6 +508,13 @@
|
|||
* @namespace
|
||||
*/
|
||||
DataTable.ext = _ext = {
|
||||
/**
|
||||
* DataTables build type (expanded by the download builder)
|
||||
*
|
||||
* @type string
|
||||
*/
|
||||
builder: "bs5/dt-2.3.1",
|
||||
|
||||
/**
|
||||
* Buttons. For use with the Buttons extension for DataTables. This is
|
||||
* defined here so other extensions can define buttons regardless of load
|
||||
|
@ -505,6 +526,14 @@
|
|||
buttons: {},
|
||||
|
||||
|
||||
/**
|
||||
* ColumnControl buttons and content
|
||||
*
|
||||
* @type object
|
||||
*/
|
||||
ccContent: {},
|
||||
|
||||
|
||||
/**
|
||||
* Element class names
|
||||
*
|
||||
|
@ -514,14 +543,6 @@
|
|||
classes: {},
|
||||
|
||||
|
||||
/**
|
||||
* DataTables build type (expanded by the download builder)
|
||||
*
|
||||
* @type string
|
||||
*/
|
||||
builder: "bs5/dt-2.2.2",
|
||||
|
||||
|
||||
/**
|
||||
* Error reporting.
|
||||
*
|
||||
|
@ -1887,6 +1908,26 @@
|
|||
init.scrollX = init.scrollX ? '100%' : '';
|
||||
}
|
||||
|
||||
// Objects for ordering
|
||||
if ( typeof init.bSort === 'object' ) {
|
||||
init.orderIndicators = init.bSort.indicators !== undefined ? init.bSort.indicators : true;
|
||||
init.orderHandler = init.bSort.handler !== undefined ? init.bSort.handler : true;
|
||||
init.bSort = true;
|
||||
}
|
||||
else if (init.bSort === false) {
|
||||
init.orderIndicators = false;
|
||||
init.orderHandler = false;
|
||||
}
|
||||
else if (init.bSort === true) {
|
||||
init.orderIndicators = true;
|
||||
init.orderHandler = true;
|
||||
}
|
||||
|
||||
// Which cells are the title cells?
|
||||
if (typeof init.bSortCellsTop === 'boolean') {
|
||||
init.titleRow = init.bSortCellsTop;
|
||||
}
|
||||
|
||||
// Column search objects are in an array, so it needs to be converted
|
||||
// element by element
|
||||
var searchCols = init.aoSearchCols;
|
||||
|
@ -3264,7 +3305,7 @@
|
|||
* @param {*} settings DataTables settings
|
||||
* @param {*} source Source layout array
|
||||
* @param {*} incColumns What columns should be included
|
||||
* @returns Layout array
|
||||
* @returns Layout array in column index order
|
||||
*/
|
||||
function _fnHeaderLayout( settings, source, incColumns )
|
||||
{
|
||||
|
@ -3548,7 +3589,9 @@
|
|||
|
||||
_fnDraw( settings );
|
||||
|
||||
settings.api.one('draw', function () {
|
||||
settings._drawHold = false;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
@ -3560,10 +3603,9 @@
|
|||
var zero = oLang.sZeroRecords;
|
||||
var dataSrc = _fnDataSource( settings );
|
||||
|
||||
if (
|
||||
(settings.iDraw < 1 && dataSrc === 'ssp') ||
|
||||
(settings.iDraw <= 1 && dataSrc === 'ajax')
|
||||
) {
|
||||
// Make use of the fact that settings.json is only set once the initial data has
|
||||
// been loaded. Show loading when that isn't the case
|
||||
if ((dataSrc === 'ssp' || dataSrc === 'ajax') && ! settings.json) {
|
||||
zero = oLang.sLoadingRecords;
|
||||
}
|
||||
else if ( oLang.sEmptyTable && settings.fnRecordsTotal() === 0 )
|
||||
|
@ -3933,6 +3975,7 @@
|
|||
var rows = $(thead).children('tr');
|
||||
var row, cell;
|
||||
var i, k, l, iLen, shifted, column, colspan, rowspan;
|
||||
var titleRow = settings.titleRow;
|
||||
var isHeader = thead && thead.nodeName.toLowerCase() === 'thead';
|
||||
var layout = [];
|
||||
var unique;
|
||||
|
@ -3961,6 +4004,7 @@
|
|||
cell.nodeName.toUpperCase() == 'TH'
|
||||
) {
|
||||
var cols = [];
|
||||
var jqCell = $(cell);
|
||||
|
||||
// Get the col and rowspan attributes from the DOM and sanitise them
|
||||
colspan = cell.getAttribute('colspan') * 1;
|
||||
|
@ -3981,7 +4025,7 @@
|
|||
if ( write ) {
|
||||
if (unique) {
|
||||
// Allow column options to be set from HTML attributes
|
||||
_fnColumnOptions( settings, shifted, $(cell).data() );
|
||||
_fnColumnOptions( settings, shifted, jqCell.data() );
|
||||
|
||||
// Get the width for the column. This can be defined from the
|
||||
// width attribute, style attribute or `columns.width` option
|
||||
|
@ -3998,8 +4042,15 @@
|
|||
// Column title handling - can be user set, or read from the DOM
|
||||
// This happens before the render, so the original is still in place
|
||||
if ( columnDef.sTitle !== null && ! columnDef.autoTitle ) {
|
||||
if (
|
||||
(titleRow === true && i === 0) || // top row
|
||||
(titleRow === false && i === rows.length -1) || // bottom row
|
||||
(titleRow === i) || // specific row
|
||||
(titleRow === null)
|
||||
) {
|
||||
cell.innerHTML = columnDef.sTitle;
|
||||
}
|
||||
}
|
||||
|
||||
if (! columnDef.sTitle && unique) {
|
||||
columnDef.sTitle = _stripHtml(cell.innerHTML);
|
||||
|
@ -4016,12 +4067,12 @@
|
|||
// Fall back to the aria-label attribute on the table header if no ariaTitle is
|
||||
// provided.
|
||||
if (! columnDef.ariaTitle) {
|
||||
columnDef.ariaTitle = $(cell).attr("aria-label") || columnDef.sTitle;
|
||||
columnDef.ariaTitle = jqCell.attr("aria-label") || columnDef.sTitle;
|
||||
}
|
||||
|
||||
// Column specific class names
|
||||
if ( columnDef.className ) {
|
||||
$(cell).addClass( columnDef.className );
|
||||
jqCell.addClass( columnDef.className );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4033,11 +4084,28 @@
|
|||
.appendTo(cell);
|
||||
}
|
||||
|
||||
if ( isHeader && $('span.dt-column-order', cell).length === 0) {
|
||||
if (
|
||||
settings.orderIndicators &&
|
||||
isHeader &&
|
||||
jqCell.filter(':not([data-dt-order=disable])').length !== 0 &&
|
||||
jqCell.parent(':not([data-dt-order=disable])').length !== 0 &&
|
||||
$('span.dt-column-order', cell).length === 0
|
||||
) {
|
||||
$('<span>')
|
||||
.addClass('dt-column-order')
|
||||
.appendTo(cell);
|
||||
}
|
||||
|
||||
// We need to wrap the elements in the header in another element to use flexbox
|
||||
// layout for those elements
|
||||
var headerFooter = isHeader ? 'header' : 'footer';
|
||||
|
||||
if ( $('span.dt-column-' + headerFooter, cell).length === 0) {
|
||||
$('<div>')
|
||||
.addClass('dt-column-' + headerFooter)
|
||||
.append(cell.childNodes)
|
||||
.appendTo(cell);
|
||||
}
|
||||
}
|
||||
|
||||
// If there is col / rowspan, copy the information into the layout grid
|
||||
|
@ -4188,6 +4256,11 @@
|
|||
// Allow plug-ins and external processes to modify the data
|
||||
_fnCallbackFire( oSettings, null, 'preXhr', [oSettings, data, baseAjax], true );
|
||||
|
||||
// Custom Ajax option to submit the parameters as a JSON string
|
||||
if (baseAjax.submitAs === 'json' && typeof data === 'object') {
|
||||
baseAjax.data = JSON.stringify(data);
|
||||
}
|
||||
|
||||
if ( typeof ajax === 'function' )
|
||||
{
|
||||
// Is a function - let the caller define what needs to be done
|
||||
|
@ -5688,17 +5761,22 @@
|
|||
function _fnSortInit( settings ) {
|
||||
var target = settings.nTHead;
|
||||
var headerRows = target.querySelectorAll('tr');
|
||||
var legacyTop = settings.bSortCellsTop;
|
||||
var titleRow = settings.titleRow;
|
||||
var notSelector = ':not([data-dt-order="disable"]):not([data-dt-order="icon-only"])';
|
||||
|
||||
// Legacy support for `orderCellsTop`
|
||||
if (legacyTop === true) {
|
||||
if (titleRow === true) {
|
||||
target = headerRows[0];
|
||||
}
|
||||
else if (legacyTop === false) {
|
||||
else if (titleRow === false) {
|
||||
target = headerRows[ headerRows.length - 1 ];
|
||||
}
|
||||
else if (titleRow !== null) {
|
||||
target = headerRows[titleRow];
|
||||
}
|
||||
// else - all rows
|
||||
|
||||
if (settings.orderHandler) {
|
||||
_fnSortAttachListener(
|
||||
settings,
|
||||
target,
|
||||
|
@ -5706,6 +5784,7 @@
|
|||
? 'tr'+notSelector+' th'+notSelector+', tr'+notSelector+' td'+notSelector
|
||||
: 'th'+notSelector+', td'+notSelector
|
||||
);
|
||||
}
|
||||
|
||||
// Need to resolve the user input array into our internal structure
|
||||
var order = [];
|
||||
|
@ -5720,6 +5799,8 @@
|
|||
var run = false;
|
||||
var columns = column === undefined
|
||||
? _fnColumnsFromHeader( e.target )
|
||||
: Array.isArray(column)
|
||||
? column
|
||||
: [column];
|
||||
|
||||
if ( columns.length ) {
|
||||
|
@ -6343,16 +6424,19 @@
|
|||
|
||||
// A column name was stored and should be used for restore
|
||||
if (typeof col[0] === 'string') {
|
||||
// Find the name from the current list of column names
|
||||
var idx = currentNames.indexOf(col[0]);
|
||||
|
||||
// Find the name from the current list of column names, or fallback to index 0
|
||||
set[0] = idx >= 0
|
||||
? idx
|
||||
: 0;
|
||||
if (idx < 0) {
|
||||
// If the column was not found ignore it and continue
|
||||
return;
|
||||
}
|
||||
|
||||
set[0] = idx;
|
||||
}
|
||||
else if (set[0] >= columns.length) {
|
||||
// If a column name, but it is out of bounds, set to 0
|
||||
set[0] = 0;
|
||||
// If the column index is out of bounds ignore it and continue
|
||||
return;
|
||||
}
|
||||
|
||||
settings.aaSorting.push(set);
|
||||
|
@ -6765,6 +6849,23 @@
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add one or more listeners to the table
|
||||
*
|
||||
* @param {*} that JQ for the table
|
||||
* @param {*} name Event name
|
||||
* @param {*} src Listener(s)
|
||||
*/
|
||||
function _fnListener(that, name, src) {
|
||||
if (!Array.isArray(src)) {
|
||||
src = [src];
|
||||
}
|
||||
|
||||
for (i=0 ; i<src.length ; i++) {
|
||||
that.on(name + '.dt', src[i]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
|
@ -7421,12 +7522,24 @@
|
|||
['footer', 'aoFooter'],
|
||||
].forEach(function (item) {
|
||||
_api_register( 'table().' + item[0] + '.structure()' , function (selector) {
|
||||
var indexes = this.columns(selector).indexes().flatten();
|
||||
var indexes = this.columns(selector).indexes().flatten().toArray();
|
||||
var ctx = this.context[0];
|
||||
var structure = _fnHeaderLayout(ctx, ctx[item[1]], indexes);
|
||||
|
||||
return _fnHeaderLayout(ctx, ctx[item[1]], indexes);
|
||||
// The structure is in column index order - but from this method we want the return to be
|
||||
// in the columns() selector API order. In order to do that we need to map from one form
|
||||
// to the other
|
||||
var orderedIndexes = indexes.slice().sort(function (a, b) {
|
||||
return a - b;
|
||||
});
|
||||
|
||||
return structure.map(function (row) {
|
||||
return indexes.map(function (colIdx) {
|
||||
return row[orderedIndexes.indexOf(colIdx)];
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
})
|
||||
|
||||
|
||||
_api_registerPlural( 'tables().containers()', 'table().container()' , function () {
|
||||
|
@ -7775,7 +7888,7 @@
|
|||
{
|
||||
var
|
||||
out = [], res,
|
||||
a, i, ien, j, jen,
|
||||
i, ien,
|
||||
selectorType = typeof selector;
|
||||
|
||||
// Can't just check for isArray here, as an API or jQuery instance might be
|
||||
|
@ -7785,13 +7898,7 @@
|
|||
}
|
||||
|
||||
for ( i=0, ien=selector.length ; i<ien ; i++ ) {
|
||||
// Only split on simple strings - complex expressions will be jQuery selectors
|
||||
a = selector[i] && selector[i].split && ! selector[i].match(/[[(:]/) ?
|
||||
selector[i].split(',') :
|
||||
[ selector[i] ];
|
||||
|
||||
for ( j=0, jen=a.length ; j<jen ; j++ ) {
|
||||
res = selectFn( typeof a[j] === 'string' ? (a[j]).trim() : a[j] );
|
||||
res = selectFn( typeof selector[i] === 'string' ? selector[i].trim() : selector[i] );
|
||||
|
||||
// Remove empty items
|
||||
res = res.filter( function (item) {
|
||||
|
@ -7802,7 +7909,6 @@
|
|||
out = out.concat( res );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// selector extensions
|
||||
var ext = _ext.selector[ type ];
|
||||
|
@ -7829,6 +7935,7 @@
|
|||
}
|
||||
|
||||
return $.extend( {
|
||||
columnOrder: 'implied',
|
||||
search: 'none',
|
||||
order: 'current',
|
||||
page: 'all'
|
||||
|
@ -8590,23 +8697,60 @@
|
|||
|
||||
var __column_header = function ( settings, column, row ) {
|
||||
var header = settings.aoHeader;
|
||||
var target = row !== undefined
|
||||
? row
|
||||
: settings.bSortCellsTop // legacy support
|
||||
? 0
|
||||
: header.length - 1;
|
||||
var titleRow = settings.titleRow;
|
||||
var target = null;
|
||||
|
||||
if (row !== undefined) {
|
||||
target = row;
|
||||
}
|
||||
else if (titleRow === true) { // legacy orderCellsTop support
|
||||
target = 0;
|
||||
}
|
||||
else if (titleRow === false) {
|
||||
target = header.length - 1;
|
||||
}
|
||||
else if (titleRow !== null) {
|
||||
target = titleRow;
|
||||
}
|
||||
else {
|
||||
// Automatic - find the _last_ unique cell from the top that is not empty (last for
|
||||
// backwards compatibility)
|
||||
for (var i=0 ; i<header.length ; i++) {
|
||||
if (header[i][column].unique && $('span.dt-column-title', header[i][column].cell).text()) {
|
||||
target = i;
|
||||
}
|
||||
}
|
||||
|
||||
if (target === null) {
|
||||
target = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return header[target][column].cell;
|
||||
};
|
||||
|
||||
var __column_header_cells = function (header) {
|
||||
var out = [];
|
||||
|
||||
for (var i=0 ; i<header.length ; i++) {
|
||||
for (var j=0 ; j<header[i].length ; j++) {
|
||||
var cell = header[i][j].cell;
|
||||
|
||||
if (!out.includes(cell)) {
|
||||
out.push(cell);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
var __column_selector = function ( settings, selector, opts )
|
||||
{
|
||||
var
|
||||
columns = settings.aoColumns,
|
||||
names = _pluck( columns, 'sName' ),
|
||||
titles = _pluck( columns, 'sTitle' ),
|
||||
cells = DataTable.util.get('[].[].cell')(settings.aoHeader),
|
||||
nodes = _unique( _flatten([], cells) );
|
||||
names, titles,
|
||||
nodes = __column_header_cells(settings.aoHeader);
|
||||
|
||||
var run = function ( s ) {
|
||||
var selInt = _intVal( s );
|
||||
|
@ -8678,12 +8822,21 @@
|
|||
} );
|
||||
|
||||
case 'name':
|
||||
// Don't get names, unless needed, and only get once if it is
|
||||
if (!names) {
|
||||
names = _pluck( columns, 'sName' );
|
||||
}
|
||||
|
||||
// match by name. `names` is column index complete and in order
|
||||
return names.map( function (name, i) {
|
||||
return name === match[1] ? i : null;
|
||||
} );
|
||||
|
||||
case 'title':
|
||||
if (!titles) {
|
||||
titles = _pluck( columns, 'sTitle' );
|
||||
}
|
||||
|
||||
// match by column title
|
||||
return titles.map( function (title, i) {
|
||||
return title === match[1] ? i : null;
|
||||
|
@ -8722,7 +8875,11 @@
|
|||
[];
|
||||
};
|
||||
|
||||
return _selector_run( 'column', selector, run, settings, opts );
|
||||
var selected = _selector_run( 'column', selector, run, settings, opts );
|
||||
|
||||
return opts.columnOrder && opts.columnOrder === 'index'
|
||||
? selected.sort(function (a, b) { return a - b; })
|
||||
: selected; // implied
|
||||
};
|
||||
|
||||
|
||||
|
@ -8846,6 +9003,12 @@
|
|||
}, 1 );
|
||||
} );
|
||||
|
||||
_api_registerPlural( 'columns().names()', 'column().name()', function () {
|
||||
return this.iterator( 'column', function ( settings, column ) {
|
||||
return settings.aoColumns[column].sName;
|
||||
}, 1 );
|
||||
} );
|
||||
|
||||
_api_registerPlural( 'columns().nodes()', 'column().nodes()', function () {
|
||||
return this.iterator( 'column-rows', function ( settings, column, i, j, rows ) {
|
||||
return _pluck_order( settings.aoData, rows, 'anCells', column ) ;
|
||||
|
@ -9272,7 +9435,10 @@
|
|||
// otherwise a 2D array was passed in
|
||||
|
||||
return this.iterator( 'table', function ( settings ) {
|
||||
settings.aaSorting = Array.isArray(order) ? order.slice() : order;
|
||||
var resolved = [];
|
||||
_fnSortResolve(settings, resolved, order);
|
||||
|
||||
settings.aaSorting = resolved;
|
||||
} );
|
||||
} );
|
||||
|
||||
|
@ -9398,7 +9564,7 @@
|
|||
var fixed = settings.searchFixed;
|
||||
|
||||
if (! name) {
|
||||
return Object.keys(fixed)
|
||||
return Object.keys(fixed);
|
||||
}
|
||||
else if (search === undefined) {
|
||||
return fixed[name];
|
||||
|
@ -9465,10 +9631,10 @@
|
|||
var fixed = settings.aoColumns[colIdx].searchFixed;
|
||||
|
||||
if (! name) {
|
||||
return Object.keys(fixed)
|
||||
return Object.keys(fixed);
|
||||
}
|
||||
else if (search === undefined) {
|
||||
return fixed[name];
|
||||
return fixed[name] || null;
|
||||
}
|
||||
else if (search === null) {
|
||||
delete fixed[name];
|
||||
|
@ -9920,14 +10086,9 @@
|
|||
jqTable.append( tfoot );
|
||||
}
|
||||
|
||||
// Clean up the header
|
||||
$(thead).find('span.dt-column-order').remove();
|
||||
$(thead).find('span.dt-column-title').each(function () {
|
||||
var title = $(this).html();
|
||||
$(this).parent().append(title);
|
||||
$(this).remove();
|
||||
});
|
||||
|
||||
// Clean up the header / footer
|
||||
cleanHeader(thead, 'header');
|
||||
cleanHeader(tfoot, 'footer');
|
||||
settings.colgroup.remove();
|
||||
|
||||
settings.aaSorting = [];
|
||||
|
@ -9949,7 +10110,6 @@
|
|||
orderClasses.isDesc
|
||||
)
|
||||
.css('width', '')
|
||||
.removeAttr('data-dt-column')
|
||||
.removeAttr('aria-sort');
|
||||
|
||||
// Add the TR elements back into the table in their original order
|
||||
|
@ -10030,6 +10190,19 @@
|
|||
: resolved;
|
||||
} );
|
||||
|
||||
// Needed for header and footer, so pulled into its own function
|
||||
function cleanHeader(node, className) {
|
||||
$(node).find('span.dt-column-order').remove();
|
||||
$(node).find('span.dt-column-title').each(function () {
|
||||
var title = $(this).html();
|
||||
$(this).parent().parent().append(title);
|
||||
$(this).remove();
|
||||
});
|
||||
$(node).find('div.dt-column-' + className).remove();
|
||||
|
||||
$('th, td', node).removeAttr('data-dt-column');
|
||||
}
|
||||
|
||||
/**
|
||||
* Version string for plug-ins to check compatibility. Allowed format is
|
||||
* `a.b.c-d` where: a:int, b:int, c:int, d:string(dev|beta|alpha). `d` is used
|
||||
|
@ -10038,7 +10211,7 @@
|
|||
* @type string
|
||||
* @default Version number
|
||||
*/
|
||||
DataTable.version = "2.2.2";
|
||||
DataTable.version = "2.3.1";
|
||||
|
||||
/**
|
||||
* Private data store, containing all of the settings objects that are
|
||||
|
@ -10645,6 +10818,10 @@
|
|||
"bSortCellsTop": null,
|
||||
|
||||
|
||||
/** Specify which row is the title row in the header. Replacement for bSortCellsTop */
|
||||
titleRow: null,
|
||||
|
||||
|
||||
/**
|
||||
* Enable or disable the addition of the classes `sorting\_1`, `sorting\_2` and
|
||||
* `sorting\_3` to the columns which are currently being sorted on. This is
|
||||
|
@ -10922,6 +11099,13 @@
|
|||
1: "entry"
|
||||
},
|
||||
|
||||
/**
|
||||
* Page length options
|
||||
*/
|
||||
lengthLabels: {
|
||||
'-1': 'All'
|
||||
},
|
||||
|
||||
/**
|
||||
* This string is shown in preference to `zeroRecords` when the table is
|
||||
* empty of data (regardless of filtering). Note that this is an optional
|
||||
|
@ -11192,7 +11376,10 @@
|
|||
/**
|
||||
* For server-side processing - use the data from the DOM for the first draw
|
||||
*/
|
||||
iDeferLoading: null
|
||||
iDeferLoading: null,
|
||||
|
||||
/** Event listeners */
|
||||
on: null
|
||||
};
|
||||
|
||||
_fnHungarianMap( DataTable.defaults );
|
||||
|
@ -12019,10 +12206,7 @@
|
|||
|
||||
/**
|
||||
* Indicate that if multiple rows are in the header and there is more than
|
||||
* one unique cell per column, if the top one (true) or bottom one (false)
|
||||
* should be used for sorting / title by DataTables.
|
||||
* Note that this parameter will be set by the initialisation routine. To
|
||||
* set a default use {@link DataTable.defaults}.
|
||||
* one unique cell per column. Replaced by titleRow
|
||||
*/
|
||||
"bSortCellsTop": null,
|
||||
|
||||
|
@ -12147,7 +12331,19 @@
|
|||
resizeObserver: null,
|
||||
|
||||
/** Keep a record of the last size of the container, so we can skip duplicates */
|
||||
containerWidth: -1
|
||||
containerWidth: -1,
|
||||
|
||||
/** Reverse the initial order of the data set on desc ordering */
|
||||
orderDescReverse: null,
|
||||
|
||||
/** Show / hide ordering indicators in headers */
|
||||
orderIndicators: true,
|
||||
|
||||
/** Default ordering listener */
|
||||
orderHandler: true,
|
||||
|
||||
/** Title row indicator */
|
||||
titleRow: null
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -12977,7 +13173,7 @@
|
|||
cell.addClass(classes.order.none);
|
||||
}
|
||||
|
||||
var legacyTop = settings.bSortCellsTop;
|
||||
var titleRow = settings.titleRow;
|
||||
var headerRows = cell.closest('thead').find('tr');
|
||||
var rowIdx = cell.parent().index();
|
||||
|
||||
|
@ -12987,11 +13183,10 @@
|
|||
cell.attr('data-dt-order') === 'disable' ||
|
||||
cell.parent().attr('data-dt-order') === 'disable' ||
|
||||
|
||||
// Legacy support for `orderCellsTop`. If it is set, then cells
|
||||
// which are not in the top or bottom row of the header (depending
|
||||
// on the value) do not get the sorting classes applied to them
|
||||
(legacyTop === true && rowIdx !== 0) ||
|
||||
(legacyTop === false && rowIdx !== headerRows.length - 1)
|
||||
// titleRow support, for defining a specific row in the header
|
||||
(titleRow === true && rowIdx !== 0) ||
|
||||
(titleRow === false && rowIdx !== headerRows.length - 1) ||
|
||||
(typeof titleRow === 'number' && rowIdx !== titleRow)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
@ -13001,7 +13196,7 @@
|
|||
// `DT` namespace will allow the event to be removed automatically
|
||||
// on destroy, while the `dt` namespaced event is the one we are
|
||||
// listening for
|
||||
$(settings.nTable).on( 'order.dt.DT column-visibility.dt.DT', function ( e, ctx ) {
|
||||
$(settings.nTable).on( 'order.dt.DT column-visibility.dt.DT', function ( e, ctx, column ) {
|
||||
if ( settings !== ctx ) { // need to check this this is the host
|
||||
return; // table, not a nested one
|
||||
}
|
||||
|
@ -13012,6 +13207,16 @@
|
|||
return;
|
||||
}
|
||||
|
||||
var orderedColumns = _pluck(sorting, 'col');
|
||||
|
||||
// This handler is only needed on column visibility if the column is part of the
|
||||
// ordering. If it isn't, then we can bail out to save performance. It could be a
|
||||
// separate event handler, but this is a balance between code reuse / size and performance
|
||||
// console.log(e, e.name, column, orderedColumns, orderedColumns.includes(column))
|
||||
if (e.type === 'column-visibility' && ! orderedColumns.includes(column)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var i;
|
||||
var orderClasses = classes.order;
|
||||
var columns = ctx.api.columns( cell );
|
||||
|
@ -13020,8 +13225,8 @@
|
|||
var ariaType = '';
|
||||
var indexes = columns.indexes();
|
||||
var sortDirs = columns.orderable(true).flatten();
|
||||
var orderedColumns = _pluck(sorting, 'col');
|
||||
var tabIndex = settings.iTabIndex;
|
||||
var canOrder = ctx.orderHandler && orderable;
|
||||
|
||||
cell
|
||||
.removeClass(
|
||||
|
@ -13029,8 +13234,8 @@
|
|||
orderClasses.isDesc
|
||||
)
|
||||
.toggleClass( orderClasses.none, ! orderable )
|
||||
.toggleClass( orderClasses.canAsc, orderable && sortDirs.includes('asc') )
|
||||
.toggleClass( orderClasses.canDesc, orderable && sortDirs.includes('desc') );
|
||||
.toggleClass( orderClasses.canAsc, canOrder && sortDirs.includes('asc') )
|
||||
.toggleClass( orderClasses.canDesc, canOrder && sortDirs.includes('desc') );
|
||||
|
||||
// Determine if all of the columns that this cell covers are included in the
|
||||
// current ordering
|
||||
|
@ -13789,12 +13994,17 @@
|
|||
} );
|
||||
|
||||
for ( i=0 ; i<lengths.length ; i++ ) {
|
||||
select[0][ i ] = new Option(
|
||||
typeof language[i] === 'number' ?
|
||||
// Attempt to look up the length from the i18n options
|
||||
var label = settings.api.i18n('lengthLabels.' + lengths[i], null);
|
||||
|
||||
if (label === null) {
|
||||
// If not present, fallback to old style
|
||||
label = typeof language[i] === 'number' ?
|
||||
settings.fnFormatNumber( language[i] ) :
|
||||
language[i],
|
||||
lengths[i]
|
||||
);
|
||||
language[i];
|
||||
}
|
||||
|
||||
select[0][ i ] = new Option(label, lengths[i]);
|
||||
}
|
||||
|
||||
// add for and id to label and input
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
<dt class="col-sm-5">Web Installed
|
||||
<span class="badge bg-success d-none" id="web-success" title="Latest version is installed.">Ok</span>
|
||||
<span class="badge bg-warning text-dark d-none" id="web-warning" title="There seems to be an update available.">Update</span>
|
||||
<span class="badge bg-info text-dark d-none" id="web-prerelease" title="You seem to be using a pre-release version.">Pre-Release</span>
|
||||
</dt>
|
||||
<dd class="col-sm-7">
|
||||
<span id="web-installed">{{page_data.web_vault_version}}</span>
|
||||
|
@ -68,10 +69,14 @@
|
|||
<span class="d-block"><b>No</b></span>
|
||||
{{/unless}}
|
||||
</dd>
|
||||
<dt class="col-sm-5">Environment settings overridden</dt>
|
||||
<dt class="col-sm-5">Uses config.json
|
||||
{{#if page_data.overrides}}
|
||||
<span class="badge bg-info text-dark" title="Environment variables are overwritten by a config.json.">Note</span>
|
||||
{{/if}}
|
||||
</dt>
|
||||
<dd class="col-sm-7">
|
||||
{{#if page_data.overrides}}
|
||||
<span class="d-block" title="The following settings are overridden: {{page_data.overrides}}"><b>Yes</b></span>
|
||||
<abbr class="d-block" title="The following settings are overridden: {{page_data.overrides}}"><b>Yes</b></abbr>
|
||||
{{/if}}
|
||||
{{#unless page_data.overrides}}
|
||||
<span class="d-block"><b>No</b></span>
|
||||
|
@ -154,7 +159,11 @@
|
|||
<dd class="col-sm-7">
|
||||
<span id="dns-resolved">{{page_data.dns_resolved}}</span>
|
||||
</dd>
|
||||
<dt class="col-sm-5">Date & Time (Local)</dt>
|
||||
<dt class="col-sm-5">Date & Time (Local)
|
||||
{{#if page_data.tz_env}}
|
||||
<span class="badge bg-success" title="Configured TZ environment variable">{{page_data.tz_env}}</span>
|
||||
{{/if}}
|
||||
</dt>
|
||||
<dd class="col-sm-7">
|
||||
<span><b>Server:</b> {{page_data.server_time_local}}</span>
|
||||
</dd>
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
<span class="d-block"><strong>Groups:</strong> {{group_count}}</span>
|
||||
<span class="d-block"><strong>Events:</strong> {{event_count}}</span>
|
||||
</td>
|
||||
<td class="text-end px-0 small">
|
||||
<td class="text-end px-1 small">
|
||||
<button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-delete-organization data-vw-org-uuid="{{id}}" data-vw-org-name="{{name}}" data-vw-billing-email="{{billingEmail}}">Delete Organization</button><br>
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
@ -68,7 +68,7 @@
|
|||
{{/each}}
|
||||
</div>
|
||||
</td>
|
||||
<td class="text-end px-0 small">
|
||||
<td class="text-end px-1 small">
|
||||
<span data-vw-user-uuid="{{id}}" data-vw-user-email="{{email}}">
|
||||
{{#if twoFactorEnabled}}
|
||||
<button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-remove2fa>Remove all 2FA</button><br>
|
||||
|
|
6
src/static/templates/email/change_email_existing.hbs
Normal file
6
src/static/templates/email/change_email_existing.hbs
Normal file
|
@ -0,0 +1,6 @@
|
|||
Your Email Change
|
||||
<!---------------->
|
||||
A user ({{ acting_address }}) recently tried to change their account to use this email address ({{ existing_address }}). An account already exists with this email ({{ existing_address }}).
|
||||
|
||||
If you did not try to change an email address, contact your administrator.
|
||||
{{> email/email_footer_text }}
|
16
src/static/templates/email/change_email_existing.html.hbs
Normal file
16
src/static/templates/email/change_email_existing.html.hbs
Normal file
|
@ -0,0 +1,16 @@
|
|||
Your Email Change
|
||||
<!---------------->
|
||||
{{> email/email_header }}
|
||||
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||
A user ({{ acting_address }}) recently tried to change their account to use this email address ({{ existing_address }}). An account already exists with this email ({{ existing_address }}).
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block last" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||
If you did not try to change an email address, contact your administrator.
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
{{> email/email_footer }}
|
|
@ -1,7 +1,7 @@
|
|||
Removed from {{{org_name}}}
|
||||
Your access to {{{org_name}}} has been revoked.
|
||||
<!---------------->
|
||||
You have been removed from organization *{{org_name}}* because your account does not have Two-step Login enabled.
|
||||
Your user account has been removed from the *{{org_name}}* organization because you do not have two-step login configured.
|
||||
Before you can re-join this organization you need to set up two-step login on your user account.
|
||||
|
||||
|
||||
You can enable Two-step Login in your account settings.
|
||||
You can enable two-step login in your account settings.
|
||||
{{> email/email_footer_text }}
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
Removed from {{{org_name}}}
|
||||
Your access to {{{org_name}}} has been revoked.
|
||||
<!---------------->
|
||||
{{> email/email_header }}
|
||||
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||
You have been removed from organization <b style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">{{org_name}}</b> because your account does not have Two-step Login enabled.
|
||||
Your user account has been removed from the <b style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">{{org_name}}</b> organization because you do not have two-step login configured.<br>
|
||||
Before you can re-join this organization you need to set up two-step login on your user account.
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block last" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||
You can enable Two-step Login in your account settings.
|
||||
You can enable two-step login in your account settings.
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
|
|
@ -35,29 +35,13 @@ a[href$="/settings/two-factor"] {
|
|||
@extend %vw-hide;
|
||||
}
|
||||
|
||||
/* Hide Business Owned checkbox */
|
||||
app-org-info > form:nth-child(1) > div:nth-child(3) {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
|
||||
/* Hide the `This account is owned by a business` checkbox and label */
|
||||
#ownedBusiness,
|
||||
label[for^="ownedBusiness"] {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
|
||||
/* Hide Business Name */
|
||||
app-org-account form div bit-form-field.tw-block:nth-child(3) {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
|
||||
/* Hide organization plans */
|
||||
app-organization-plans > form > bit-section:nth-child(2) {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
|
||||
/* Hide Collection Management Form */
|
||||
app-org-account form.ng-untouched:nth-child(6) {
|
||||
app-org-account form.ng-untouched:nth-child(5) {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
|
||||
|
@ -94,15 +78,19 @@ bit-nav-logo bit-nav-item .bwi-shield {
|
|||
{{#if signup_disabled}}
|
||||
/* From web vault 2025.1.2 and onwards, the signup button is hidden
|
||||
when signups are disabled as the web vault checks the /api/config endpoint.
|
||||
Note that the clients tend to aggressively cache this endpoint, so it might
|
||||
Note that the clients tend to cache this endpoint for about 1 hour, so it might
|
||||
take a while for the change to take effect. To avoid the button appearing
|
||||
when it shouldn't, we'll keep this style in place for a couple of versions */
|
||||
{{#if webver "<2025.3.0"}}
|
||||
/* Hide the register link on the login screen */
|
||||
{{#if (webver "<2025.3.0")}}
|
||||
app-login form div + div + div + div + hr,
|
||||
app-login form div + div + div + div + hr + p {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
{{else}}
|
||||
app-root a[routerlink="/signup"] {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
|
||||
|
@ -122,14 +110,14 @@ app-root form.ng-untouched button.\!tw-text-primary-600:nth-child(4) {
|
|||
|
||||
{{#unless mail_enabled}}
|
||||
/* Hide `Email` 2FA if mail is not enabled */
|
||||
app-two-factor-setup ul.list-group.list-group-2fa li.list-group-item:nth-child(1) {
|
||||
.providers-2fa-1 {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
{{/unless}}
|
||||
|
||||
{{#unless yubico_enabled}}
|
||||
/* Hide `YubiKey OTP security key` 2FA if it is not enabled */
|
||||
app-two-factor-setup ul.list-group.list-group-2fa li.list-group-item:nth-child(4) {
|
||||
.providers-2fa-3 {
|
||||
@extend %vw-hide;
|
||||
}
|
||||
{{/unless}}
|
||||
|
|
33
src/util.rs
33
src/util.rs
|
@ -271,8 +271,8 @@ impl Fairing for BetterLogging {
|
|||
} else {
|
||||
"http"
|
||||
};
|
||||
let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port);
|
||||
info!(target: "start", "Rocket has launched from {}", addr);
|
||||
let addr = format!("{scheme}://{}:{}", &config.address, &config.port);
|
||||
info!(target: "start", "Rocket has launched from {addr}");
|
||||
}
|
||||
|
||||
async fn on_request(&self, request: &mut Request<'_>, _data: &mut Data<'_>) {
|
||||
|
@ -286,8 +286,8 @@ impl Fairing for BetterLogging {
|
|||
let uri_subpath = uri_path_str.strip_prefix(&CONFIG.domain_path()).unwrap_or(&uri_path_str);
|
||||
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
||||
match uri.query() {
|
||||
Some(q) => info!(target: "request", "{} {}?{}", method, uri_path_str, &q[..q.len().min(30)]),
|
||||
None => info!(target: "request", "{} {}", method, uri_path_str),
|
||||
Some(q) => info!(target: "request", "{method} {uri_path_str}?{}", &q[..q.len().min(30)]),
|
||||
None => info!(target: "request", "{method} {uri_path_str}"),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -302,9 +302,9 @@ impl Fairing for BetterLogging {
|
|||
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
||||
let status = response.status();
|
||||
if let Some(ref route) = request.route() {
|
||||
info!(target: "response", "{} => {}", route, status)
|
||||
info!(target: "response", "{route} => {status}")
|
||||
} else {
|
||||
info!(target: "response", "{}", status)
|
||||
info!(target: "response", "{status}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -329,7 +329,7 @@ pub fn get_display_size(size: i64) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
format!("{:.2} {}", size, UNITS[unit_counter])
|
||||
format!("{size:.2} {}", UNITS[unit_counter])
|
||||
}
|
||||
|
||||
pub fn get_uuid() -> String {
|
||||
|
@ -702,7 +702,7 @@ where
|
|||
return Err(e);
|
||||
}
|
||||
|
||||
warn!("Can't connect to database, retrying: {:?}", e);
|
||||
warn!("Can't connect to database, retrying: {e:?}");
|
||||
|
||||
sleep(Duration::from_millis(1_000)).await;
|
||||
}
|
||||
|
@ -755,9 +755,20 @@ pub fn convert_json_key_lcase_first(src_json: Value) -> Value {
|
|||
|
||||
/// Parses the experimental client feature flags string into a HashMap.
|
||||
pub fn parse_experimental_client_feature_flags(experimental_client_feature_flags: &str) -> HashMap<String, bool> {
|
||||
let feature_states = experimental_client_feature_flags.split(',').map(|f| (f.trim().to_owned(), true)).collect();
|
||||
|
||||
feature_states
|
||||
// These flags could still be configured, but are deprecated and not used anymore
|
||||
// To prevent old installations from starting filter these out and not error out
|
||||
const DEPRECATED_FLAGS: &[&str] =
|
||||
&["autofill-overlay", "autofill-v2", "browser-fileless-import", "extension-refresh", "fido2-vault-credentials"];
|
||||
experimental_client_feature_flags
|
||||
.split(',')
|
||||
.filter_map(|f| {
|
||||
let flag = f.trim();
|
||||
if !flag.is_empty() && !DEPRECATED_FLAGS.contains(&flag) {
|
||||
return Some((flag.to_owned(), true));
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// TODO: This is extracted from IpAddr::is_global, which is unstable:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue