mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-05-30 23:43:56 +00:00
Improve sync speed and updated dep. versions
Improved sync speed by resolving the N+1 query issues. Solves #1402 and Solves #1453 With this change there is just one query done to retreive all the important data, and matching is done in-code/memory. With a very large database the sync time went down about 3 times. Also updated misc crates and Github Actions versions.
This commit is contained in:
parent
3abf173d89
commit
3ca85028ea
38 changed files with 6084 additions and 5835 deletions
|
@ -101,30 +101,36 @@ struct SyncData {
|
|||
async fn sync(data: SyncData, headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
let user_json = headers.user.to_json(&conn).await;
|
||||
|
||||
let folders = Folder::find_by_user(&headers.user.uuid, &conn).await;
|
||||
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
|
||||
// Get all ciphers which are visible by the user
|
||||
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn).await;
|
||||
|
||||
let collections_json = stream::iter(Collection::find_by_user_uuid(&headers.user.uuid, &conn).await)
|
||||
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, &conn).await;
|
||||
|
||||
// Lets generate the ciphers_json using all the gathered info
|
||||
let ciphers_json: Vec<Value> = stream::iter(ciphers)
|
||||
.then(|c| async {
|
||||
let c = c; // Move out this single variable
|
||||
c.to_json_details(&headers.user.uuid, &conn).await
|
||||
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await
|
||||
})
|
||||
.collect::<Vec<Value>>()
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
let policies = OrgPolicy::find_confirmed_by_user(&headers.user.uuid, &conn);
|
||||
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
|
||||
|
||||
let ciphers_json = stream::iter(Cipher::find_by_user_visible(&headers.user.uuid, &conn).await)
|
||||
let collections_json: Vec<Value> = stream::iter(Collection::find_by_user_uuid(&headers.user.uuid, &conn).await)
|
||||
.then(|c| async {
|
||||
let c = c; // Move out this single variable
|
||||
c.to_json(&headers.host, &headers.user.uuid, &conn).await
|
||||
c.to_json_details(&headers.user.uuid, Some(&cipher_sync_data), &conn).await
|
||||
})
|
||||
.collect::<Vec<Value>>()
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
let sends = Send::find_by_user(&headers.user.uuid, &conn);
|
||||
let sends_json: Vec<Value> = sends.await.iter().map(|s| s.to_json()).collect();
|
||||
let folders_json: Vec<Value> =
|
||||
Folder::find_by_user(&headers.user.uuid, &conn).await.iter().map(Folder::to_json).collect();
|
||||
|
||||
let sends_json: Vec<Value> =
|
||||
Send::find_by_user(&headers.user.uuid, &conn).await.iter().map(Send::to_json).collect();
|
||||
|
||||
let policies_json: Vec<Value> =
|
||||
OrgPolicy::find_confirmed_by_user(&headers.user.uuid, &conn).await.iter().map(OrgPolicy::to_json).collect();
|
||||
|
||||
let domains_json = if data.exclude_domains {
|
||||
Value::Null
|
||||
|
@ -147,10 +153,13 @@ async fn sync(data: SyncData, headers: Headers, conn: DbConn) -> Json<Value> {
|
|||
|
||||
#[get("/ciphers")]
|
||||
async fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
let ciphers_json = stream::iter(Cipher::find_by_user_visible(&headers.user.uuid, &conn).await)
|
||||
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn).await;
|
||||
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, &conn).await;
|
||||
|
||||
let ciphers_json = stream::iter(ciphers)
|
||||
.then(|c| async {
|
||||
let c = c; // Move out this single variable
|
||||
c.to_json(&headers.host, &headers.user.uuid, &conn).await
|
||||
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await
|
||||
})
|
||||
.collect::<Vec<Value>>()
|
||||
.await;
|
||||
|
@ -173,7 +182,7 @@ async fn get_cipher(uuid: String, headers: Headers, conn: DbConn) -> JsonResult
|
|||
err!("Cipher is not owned by user")
|
||||
}
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await))
|
||||
}
|
||||
|
||||
#[get("/ciphers/<uuid>/admin")]
|
||||
|
@ -303,7 +312,7 @@ async fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbCo
|
|||
let mut cipher = Cipher::new(data.Type, data.Name.clone());
|
||||
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate).await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await))
|
||||
}
|
||||
|
||||
/// Enforces the personal ownership policy on user-owned ciphers, if applicable.
|
||||
|
@ -582,7 +591,7 @@ async fn put_cipher(
|
|||
|
||||
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherUpdate).await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
@ -797,7 +806,7 @@ async fn share_cipher_by_uuid(
|
|||
)
|
||||
.await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, conn).await))
|
||||
}
|
||||
|
||||
/// v2 API for downloading an attachment. This just redirects the client to
|
||||
|
@ -866,7 +875,7 @@ async fn post_attachment_v2(
|
|||
"AttachmentId": attachment_id,
|
||||
"Url": url,
|
||||
"FileUploadType": FileUploadType::Direct as i32,
|
||||
response_key: cipher.to_json(&headers.host, &headers.user.uuid, &conn).await,
|
||||
response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await,
|
||||
})))
|
||||
}
|
||||
|
||||
|
@ -1035,7 +1044,7 @@ async fn post_attachment(
|
|||
|
||||
let (cipher, conn) = save_attachment(attachment, uuid, data, &headers, conn, nt).await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await))
|
||||
}
|
||||
|
||||
#[post("/ciphers/<uuid>/attachment-admin", format = "multipart/form-data", data = "<data>")]
|
||||
|
@ -1399,7 +1408,7 @@ async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, n
|
|||
cipher.save(conn).await?;
|
||||
|
||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await);
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, conn).await))
|
||||
}
|
||||
|
||||
async fn _restore_multiple_ciphers(
|
||||
|
@ -1463,3 +1472,66 @@ async fn _delete_cipher_attachment_by_id(
|
|||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// This will hold all the necessary data to improve a full sync of all the ciphers
|
||||
/// It can be used during the `Cipher::to_json()` call.
|
||||
/// It will prevent the so called N+1 SQL issue by running just a few queries which will hold all the data needed.
|
||||
/// This will not improve the speed of a single cipher.to_json() call that much, so better not to use it for those calls.
|
||||
pub struct CipherSyncData {
|
||||
pub cipher_attachments: HashMap<String, Vec<Attachment>>,
|
||||
pub cipher_folders: HashMap<String, String>,
|
||||
pub cipher_favorites: HashSet<String>,
|
||||
pub cipher_collections: HashMap<String, Vec<String>>,
|
||||
pub user_organizations: HashMap<String, UserOrganization>,
|
||||
pub user_collections: HashMap<String, CollectionUser>,
|
||||
}
|
||||
|
||||
impl CipherSyncData {
|
||||
pub async fn new(user_uuid: &str, ciphers: &Vec<Cipher>, conn: &DbConn) -> Self {
|
||||
// Generate a list of Cipher UUID's to be used during a query filter with an eq_any.
|
||||
let cipher_uuids = stream::iter(ciphers).map(|c| c.uuid.to_string()).collect::<Vec<String>>().await;
|
||||
|
||||
// Generate a list of Cipher UUID's containing a Vec with one or more Attachment records
|
||||
let mut cipher_attachments: HashMap<String, Vec<Attachment>> = HashMap::new();
|
||||
for attachment in Attachment::find_all_by_ciphers(&cipher_uuids, conn).await {
|
||||
cipher_attachments.entry(attachment.cipher_uuid.to_string()).or_default().push(attachment);
|
||||
}
|
||||
|
||||
// Generate a HashMap with the Cipher UUID as key and the Folder UUID as value
|
||||
let cipher_folders: HashMap<String, String> =
|
||||
stream::iter(FolderCipher::find_by_user(user_uuid, conn).await).collect().await;
|
||||
|
||||
// Generate a HashSet of all the Cipher UUID's which are marked as favorite
|
||||
let cipher_favorites: HashSet<String> =
|
||||
stream::iter(Favorite::get_all_cipher_uuid_by_user(user_uuid, conn).await).collect().await;
|
||||
|
||||
// Generate a HashMap with the Cipher UUID as key and one or more Collection UUID's
|
||||
let mut cipher_collections: HashMap<String, Vec<String>> = HashMap::new();
|
||||
for (cipher, collection) in Cipher::get_collections_with_cipher_by_user(user_uuid, conn).await {
|
||||
cipher_collections.entry(cipher).or_default().push(collection);
|
||||
}
|
||||
|
||||
// Generate a HashMap with the Organization UUID as key and the UserOrganization record
|
||||
let user_organizations: HashMap<String, UserOrganization> =
|
||||
stream::iter(UserOrganization::find_by_user(user_uuid, conn).await)
|
||||
.map(|uo| (uo.org_uuid.to_string(), uo))
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
// Generate a HashMap with the User_Collections UUID as key and the CollectionUser record
|
||||
let user_collections: HashMap<String, CollectionUser> =
|
||||
stream::iter(CollectionUser::find_by_user(user_uuid, conn).await)
|
||||
.map(|uc| (uc.collection_uuid.to_string(), uc))
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
Self {
|
||||
cipher_attachments,
|
||||
cipher_folders,
|
||||
cipher_favorites,
|
||||
cipher_collections,
|
||||
user_organizations,
|
||||
user_collections,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use serde_json::Value;
|
|||
use std::borrow::Borrow;
|
||||
|
||||
use crate::{
|
||||
api::{EmptyResult, JsonResult, JsonUpcase, NumberOrString},
|
||||
api::{core::CipherSyncData, EmptyResult, JsonResult, JsonUpcase, NumberOrString},
|
||||
auth::{decode_emergency_access_invite, Headers},
|
||||
db::{models::*, DbConn, DbPool},
|
||||
mail, CONFIG,
|
||||
|
@ -595,10 +595,13 @@ async fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn)
|
|||
err!("Emergency access not valid.")
|
||||
}
|
||||
|
||||
let ciphers_json = stream::iter(Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await)
|
||||
let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await;
|
||||
let cipher_sync_data = CipherSyncData::new(&emergency_access.grantor_uuid, &ciphers, &conn).await;
|
||||
|
||||
let ciphers_json = stream::iter(ciphers)
|
||||
.then(|c| async {
|
||||
let c = c; // Move out this single variable
|
||||
c.to_json(&host, &emergency_access.grantor_uuid, &conn).await
|
||||
c.to_json(&host, &emergency_access.grantor_uuid, Some(&cipher_sync_data), &conn).await
|
||||
})
|
||||
.collect::<Vec<Value>>()
|
||||
.await;
|
||||
|
|
|
@ -7,6 +7,7 @@ mod sends;
|
|||
pub mod two_factor;
|
||||
|
||||
pub use ciphers::purge_trashed_ciphers;
|
||||
pub use ciphers::CipherSyncData;
|
||||
pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
|
||||
pub use sends::purge_sends;
|
||||
pub use two_factor::send_incomplete_2fa_notifications;
|
||||
|
|
|
@ -4,7 +4,10 @@ use rocket::Route;
|
|||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType},
|
||||
api::{
|
||||
core::CipherSyncData, EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData,
|
||||
UpdateType,
|
||||
},
|
||||
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
|
||||
db::{models::*, DbConn},
|
||||
mail, CONFIG,
|
||||
|
@ -483,10 +486,13 @@ struct OrgIdData {
|
|||
|
||||
#[get("/ciphers/organization-details?<data..>")]
|
||||
async fn get_org_details(data: OrgIdData, headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
let ciphers_json = stream::iter(Cipher::find_by_org(&data.organization_id, &conn).await)
|
||||
let ciphers = Cipher::find_by_org(&data.organization_id, &conn).await;
|
||||
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, &conn).await;
|
||||
|
||||
let ciphers_json = stream::iter(ciphers)
|
||||
.then(|c| async {
|
||||
let c = c; // Move out this single variable
|
||||
c.to_json(&headers.host, &headers.user.uuid, &conn).await
|
||||
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await
|
||||
})
|
||||
.collect::<Vec<Value>>()
|
||||
.await;
|
||||
|
|
|
@ -206,16 +206,16 @@ macro_rules! db_run {
|
|||
// Different code for each db
|
||||
( $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{
|
||||
#[allow(unused)] use diesel::prelude::*;
|
||||
#[allow(unused)] use crate::db::FromDb;
|
||||
#[allow(unused)] use $crate::db::FromDb;
|
||||
|
||||
let conn = $conn.conn.clone();
|
||||
let mut conn = conn.lock_owned().await;
|
||||
match conn.as_mut().expect("internal invariant broken: self.connection is Some") {
|
||||
$($(
|
||||
#[cfg($db)]
|
||||
crate::db::DbConnInner::$db($conn) => {
|
||||
$crate::db::DbConnInner::$db($conn) => {
|
||||
paste::paste! {
|
||||
#[allow(unused)] use crate::db::[<__ $db _schema>]::{self as schema, *};
|
||||
#[allow(unused)] use $crate::db::[<__ $db _schema>]::{self as schema, *};
|
||||
#[allow(unused)] use [<__ $db _model>]::*;
|
||||
}
|
||||
|
||||
|
@ -227,16 +227,16 @@ macro_rules! db_run {
|
|||
|
||||
( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{
|
||||
#[allow(unused)] use diesel::prelude::*;
|
||||
#[allow(unused)] use crate::db::FromDb;
|
||||
#[allow(unused)] use $crate::db::FromDb;
|
||||
|
||||
let conn = $conn.conn.clone();
|
||||
let mut conn = conn.lock_owned().await;
|
||||
match conn.as_mut().expect("internal invariant broken: self.connection is Some") {
|
||||
$($(
|
||||
#[cfg($db)]
|
||||
crate::db::DbConnInner::$db($conn) => {
|
||||
$crate::db::DbConnInner::$db($conn) => {
|
||||
paste::paste! {
|
||||
#[allow(unused)] use crate::db::[<__ $db _schema>]::{self as schema, *};
|
||||
#[allow(unused)] use $crate::db::[<__ $db _schema>]::{self as schema, *};
|
||||
// @ RAW: #[allow(unused)] use [<__ $db _model>]::*;
|
||||
}
|
||||
|
||||
|
@ -297,7 +297,7 @@ macro_rules! db_object {
|
|||
paste::paste! {
|
||||
#[allow(unused)] use super::*;
|
||||
#[allow(unused)] use diesel::prelude::*;
|
||||
#[allow(unused)] use crate::db::[<__ $db _schema>]::*;
|
||||
#[allow(unused)] use $crate::db::[<__ $db _schema>]::*;
|
||||
|
||||
$( #[$attr] )*
|
||||
pub struct [<$name Db>] { $(
|
||||
|
@ -309,7 +309,7 @@ macro_rules! db_object {
|
|||
#[inline(always)] pub fn to_db(x: &super::$name) -> Self { Self { $( $field: x.$field.clone(), )+ } }
|
||||
}
|
||||
|
||||
impl crate::db::FromDb for [<$name Db>] {
|
||||
impl $crate::db::FromDb for [<$name Db>] {
|
||||
type Output = super::$name;
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
#[inline(always)] fn from_db(self) -> Self::Output { super::$name { $( $field: self.$field, )+ } }
|
||||
|
|
|
@ -2,14 +2,12 @@ use std::io::ErrorKind;
|
|||
|
||||
use serde_json::Value;
|
||||
|
||||
use super::Cipher;
|
||||
use crate::CONFIG;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "attachments"]
|
||||
#[changeset_options(treat_none_as_null="true")]
|
||||
#[belongs_to(super::Cipher, foreign_key = "cipher_uuid")]
|
||||
#[primary_key(id)]
|
||||
pub struct Attachment {
|
||||
pub id: String,
|
||||
|
@ -188,4 +186,15 @@ impl Attachment {
|
|||
.unwrap_or(0)
|
||||
}}
|
||||
}
|
||||
|
||||
pub async fn find_all_by_ciphers(cipher_uuids: &Vec<String>, conn: &DbConn) -> Vec<Self> {
|
||||
db_run! { conn: {
|
||||
attachments::table
|
||||
.filter(attachments::cipher_uuid.eq_any(cipher_uuids))
|
||||
.select(attachments::all_columns)
|
||||
.load::<AttachmentDb>(conn)
|
||||
.expect("Error loading attachments")
|
||||
.from_db()
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,19 +1,17 @@
|
|||
use crate::CONFIG;
|
||||
use chrono::{Duration, NaiveDateTime, Utc};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::CONFIG;
|
||||
use super::{Attachment, CollectionCipher, Favorite, FolderCipher, User, UserOrgStatus, UserOrgType, UserOrganization};
|
||||
|
||||
use super::{
|
||||
Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType,
|
||||
UserOrganization,
|
||||
};
|
||||
use crate::api::core::CipherSyncData;
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "ciphers"]
|
||||
#[changeset_options(treat_none_as_null="true")]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[belongs_to(Organization, foreign_key = "organization_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
pub struct Cipher {
|
||||
pub uuid: String,
|
||||
|
@ -82,22 +80,32 @@ use crate::error::MapResult;
|
|||
|
||||
/// Database methods
|
||||
impl Cipher {
|
||||
pub async fn to_json(&self, host: &str, user_uuid: &str, conn: &DbConn) -> Value {
|
||||
pub async fn to_json(
|
||||
&self,
|
||||
host: &str,
|
||||
user_uuid: &str,
|
||||
cipher_sync_data: Option<&CipherSyncData>,
|
||||
conn: &DbConn,
|
||||
) -> Value {
|
||||
use crate::util::format_date;
|
||||
|
||||
let attachments = Attachment::find_by_cipher(&self.uuid, conn).await;
|
||||
// When there are no attachments use null instead of an empty array
|
||||
let attachments_json = if attachments.is_empty() {
|
||||
Value::Null
|
||||
let mut attachments_json: Value = Value::Null;
|
||||
if let Some(cipher_sync_data) = cipher_sync_data {
|
||||
if let Some(attachments) = cipher_sync_data.cipher_attachments.get(&self.uuid) {
|
||||
attachments_json = attachments.iter().map(|c| c.to_json(host)).collect();
|
||||
}
|
||||
} else {
|
||||
attachments.iter().map(|c| c.to_json(host)).collect()
|
||||
};
|
||||
let attachments = Attachment::find_by_cipher(&self.uuid, conn).await;
|
||||
if !attachments.is_empty() {
|
||||
attachments_json = attachments.iter().map(|c| c.to_json(host)).collect()
|
||||
}
|
||||
}
|
||||
|
||||
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||
let password_history_json =
|
||||
self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||
|
||||
let (read_only, hide_passwords) = match self.get_access_restrictions(user_uuid, conn).await {
|
||||
let (read_only, hide_passwords) = match self.get_access_restrictions(user_uuid, cipher_sync_data, conn).await {
|
||||
Some((ro, hp)) => (ro, hp),
|
||||
None => {
|
||||
error!("Cipher ownership assertion failure");
|
||||
|
@ -109,7 +117,7 @@ impl Cipher {
|
|||
// If not passing an empty object, mobile clients will crash.
|
||||
let mut type_data_json: Value = serde_json::from_str(&self.data).unwrap_or_else(|_| json!({}));
|
||||
|
||||
// NOTE: This was marked as *Backwards Compatibilty Code*, but as of January 2021 this is still being used by upstream
|
||||
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
|
||||
// Set the first element of the Uris array as Uri, this is needed several (mobile) clients.
|
||||
if self.atype == 1 {
|
||||
if type_data_json["Uris"].is_array() {
|
||||
|
@ -124,13 +132,23 @@ impl Cipher {
|
|||
// Clone the type_data and add some default value.
|
||||
let mut data_json = type_data_json.clone();
|
||||
|
||||
// NOTE: This was marked as *Backwards Compatibilty Code*, but as of January 2021 this is still being used by upstream
|
||||
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
|
||||
// data_json should always contain the following keys with every atype
|
||||
data_json["Fields"] = json!(fields_json);
|
||||
data_json["Name"] = json!(self.name);
|
||||
data_json["Notes"] = json!(self.notes);
|
||||
data_json["PasswordHistory"] = json!(password_history_json);
|
||||
|
||||
let collection_ids = if let Some(cipher_sync_data) = cipher_sync_data {
|
||||
if let Some(cipher_collections) = cipher_sync_data.cipher_collections.get(&self.uuid) {
|
||||
Cow::from(cipher_collections)
|
||||
} else {
|
||||
Cow::from(Vec::with_capacity(0))
|
||||
}
|
||||
} else {
|
||||
Cow::from(self.get_collections(user_uuid, conn).await)
|
||||
};
|
||||
|
||||
// There are three types of cipher response models in upstream
|
||||
// Bitwarden: "cipherMini", "cipher", and "cipherDetails" (in order
|
||||
// of increasing level of detail). vaultwarden currently only
|
||||
|
@ -144,8 +162,8 @@ impl Cipher {
|
|||
"Type": self.atype,
|
||||
"RevisionDate": format_date(&self.updated_at),
|
||||
"DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
|
||||
"FolderId": self.get_folder_uuid(user_uuid, conn).await,
|
||||
"Favorite": self.is_favorite(user_uuid, conn).await,
|
||||
"FolderId": if let Some(cipher_sync_data) = cipher_sync_data { cipher_sync_data.cipher_folders.get(&self.uuid).map(|c| c.to_string() ) } else { self.get_folder_uuid(user_uuid, conn).await },
|
||||
"Favorite": if let Some(cipher_sync_data) = cipher_sync_data { cipher_sync_data.cipher_favorites.contains(&self.uuid) } else { self.is_favorite(user_uuid, conn).await },
|
||||
"Reprompt": self.reprompt.unwrap_or(RepromptType::None as i32),
|
||||
"OrganizationId": self.organization_uuid,
|
||||
"Attachments": attachments_json,
|
||||
|
@ -154,7 +172,7 @@ impl Cipher {
|
|||
"OrganizationUseTotp": true,
|
||||
|
||||
// This field is specific to the cipherDetails type.
|
||||
"CollectionIds": self.get_collections(user_uuid, conn).await,
|
||||
"CollectionIds": collection_ids,
|
||||
|
||||
"Name": self.name,
|
||||
"Notes": self.notes,
|
||||
|
@ -318,13 +336,21 @@ impl Cipher {
|
|||
}
|
||||
|
||||
/// Returns whether this cipher is owned by an org in which the user has full access.
|
||||
pub async fn is_in_full_access_org(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||
pub async fn is_in_full_access_org(
|
||||
&self,
|
||||
user_uuid: &str,
|
||||
cipher_sync_data: Option<&CipherSyncData>,
|
||||
conn: &DbConn,
|
||||
) -> bool {
|
||||
if let Some(ref org_uuid) = self.organization_uuid {
|
||||
if let Some(user_org) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn).await {
|
||||
if let Some(cipher_sync_data) = cipher_sync_data {
|
||||
if let Some(cached_user_org) = cipher_sync_data.user_organizations.get(org_uuid) {
|
||||
return cached_user_org.has_full_access();
|
||||
}
|
||||
} else if let Some(user_org) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn).await {
|
||||
return user_org.has_full_access();
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
|
@ -333,18 +359,62 @@ impl Cipher {
|
|||
/// not in any collection the user has access to. Otherwise, the user has
|
||||
/// access to this cipher, and Some(read_only, hide_passwords) represents
|
||||
/// the access restrictions.
|
||||
pub async fn get_access_restrictions(&self, user_uuid: &str, conn: &DbConn) -> Option<(bool, bool)> {
|
||||
pub async fn get_access_restrictions(
|
||||
&self,
|
||||
user_uuid: &str,
|
||||
cipher_sync_data: Option<&CipherSyncData>,
|
||||
conn: &DbConn,
|
||||
) -> Option<(bool, bool)> {
|
||||
// Check whether this cipher is directly owned by the user, or is in
|
||||
// a collection that the user has full access to. If so, there are no
|
||||
// access restrictions.
|
||||
if self.is_owned_by_user(user_uuid) || self.is_in_full_access_org(user_uuid, conn).await {
|
||||
if self.is_owned_by_user(user_uuid) || self.is_in_full_access_org(user_uuid, cipher_sync_data, conn).await {
|
||||
return Some((false, false));
|
||||
}
|
||||
|
||||
let rows = if let Some(cipher_sync_data) = cipher_sync_data {
|
||||
let mut rows: Vec<(bool, bool)> = Vec::new();
|
||||
if let Some(collections) = cipher_sync_data.cipher_collections.get(&self.uuid) {
|
||||
for collection in collections {
|
||||
if let Some(uc) = cipher_sync_data.user_collections.get(collection) {
|
||||
rows.push((uc.read_only, uc.hide_passwords));
|
||||
}
|
||||
}
|
||||
}
|
||||
rows
|
||||
} else {
|
||||
self.get_collections_access_flags(user_uuid, conn).await
|
||||
};
|
||||
|
||||
if rows.is_empty() {
|
||||
// This cipher isn't in any collections accessible to the user.
|
||||
return None;
|
||||
}
|
||||
|
||||
// A cipher can be in multiple collections with inconsistent access flags.
|
||||
// For example, a cipher could be in one collection where the user has
|
||||
// read-only access, but also in another collection where the user has
|
||||
// read/write access. For a flag to be in effect for a cipher, upstream
|
||||
// requires all collections the cipher is in to have that flag set.
|
||||
// Therefore, we do a boolean AND of all values in each of the `read_only`
|
||||
// and `hide_passwords` columns. This could ideally be done as part of the
|
||||
// query, but Diesel doesn't support a min() or bool_and() function on
|
||||
// booleans and this behavior isn't portable anyway.
|
||||
let mut read_only = true;
|
||||
let mut hide_passwords = true;
|
||||
for (ro, hp) in rows.iter() {
|
||||
read_only &= ro;
|
||||
hide_passwords &= hp;
|
||||
}
|
||||
|
||||
Some((read_only, hide_passwords))
|
||||
}
|
||||
|
||||
pub async fn get_collections_access_flags(&self, user_uuid: &str, conn: &DbConn) -> Vec<(bool, bool)> {
|
||||
db_run! {conn: {
|
||||
// Check whether this cipher is in any collections accessible to the
|
||||
// user. If so, retrieve the access flags for each collection.
|
||||
let rows = ciphers::table
|
||||
ciphers::table
|
||||
.filter(ciphers::uuid.eq(&self.uuid))
|
||||
.inner_join(ciphers_collections::table.on(
|
||||
ciphers::uuid.eq(ciphers_collections::cipher_uuid)))
|
||||
|
@ -353,42 +423,19 @@ impl Cipher {
|
|||
.and(users_collections::user_uuid.eq(user_uuid))))
|
||||
.select((users_collections::read_only, users_collections::hide_passwords))
|
||||
.load::<(bool, bool)>(conn)
|
||||
.expect("Error getting access restrictions");
|
||||
|
||||
if rows.is_empty() {
|
||||
// This cipher isn't in any collections accessible to the user.
|
||||
return None;
|
||||
}
|
||||
|
||||
// A cipher can be in multiple collections with inconsistent access flags.
|
||||
// For example, a cipher could be in one collection where the user has
|
||||
// read-only access, but also in another collection where the user has
|
||||
// read/write access. For a flag to be in effect for a cipher, upstream
|
||||
// requires all collections the cipher is in to have that flag set.
|
||||
// Therefore, we do a boolean AND of all values in each of the `read_only`
|
||||
// and `hide_passwords` columns. This could ideally be done as part of the
|
||||
// query, but Diesel doesn't support a min() or bool_and() function on
|
||||
// booleans and this behavior isn't portable anyway.
|
||||
let mut read_only = true;
|
||||
let mut hide_passwords = true;
|
||||
for (ro, hp) in rows.iter() {
|
||||
read_only &= ro;
|
||||
hide_passwords &= hp;
|
||||
}
|
||||
|
||||
Some((read_only, hide_passwords))
|
||||
.expect("Error getting access restrictions")
|
||||
}}
|
||||
}
|
||||
|
||||
pub async fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||
match self.get_access_restrictions(user_uuid, conn).await {
|
||||
match self.get_access_restrictions(user_uuid, None, conn).await {
|
||||
Some((read_only, _hide_passwords)) => !read_only,
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||
self.get_access_restrictions(user_uuid, conn).await.is_some()
|
||||
self.get_access_restrictions(user_uuid, None, conn).await.is_some()
|
||||
}
|
||||
|
||||
// Returns whether this cipher is a favorite of the specified user.
|
||||
|
@ -563,4 +610,32 @@ impl Cipher {
|
|||
.load::<String>(conn).unwrap_or_default()
|
||||
}}
|
||||
}
|
||||
|
||||
/// Return a Vec with (cipher_uuid, collection_uuid)
|
||||
/// This is used during a full sync so we only need one query for all collections accessible.
|
||||
pub async fn get_collections_with_cipher_by_user(user_id: &str, conn: &DbConn) -> Vec<(String, String)> {
|
||||
db_run! {conn: {
|
||||
ciphers_collections::table
|
||||
.inner_join(collections::table.on(
|
||||
collections::uuid.eq(ciphers_collections::collection_uuid)
|
||||
))
|
||||
.inner_join(users_organizations::table.on(
|
||||
users_organizations::org_uuid.eq(collections::org_uuid).and(
|
||||
users_organizations::user_uuid.eq(user_id)
|
||||
)
|
||||
))
|
||||
.left_join(users_collections::table.on(
|
||||
users_collections::collection_uuid.eq(ciphers_collections::collection_uuid).and(
|
||||
users_collections::user_uuid.eq(user_id)
|
||||
)
|
||||
))
|
||||
.filter(users_collections::user_uuid.eq(user_id).or( // User has access to collection
|
||||
users_organizations::access_all.eq(true).or( // User has access all
|
||||
users_organizations::atype.le(UserOrgType::Admin as i32) // User is admin or owner
|
||||
)
|
||||
))
|
||||
.select(ciphers_collections::all_columns)
|
||||
.load::<(String, String)>(conn).unwrap_or_default()
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
use serde_json::Value;
|
||||
|
||||
use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization};
|
||||
use super::{User, UserOrgStatus, UserOrgType, UserOrganization};
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "collections"]
|
||||
#[belongs_to(Organization, foreign_key = "org_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
pub struct Collection {
|
||||
pub uuid: String,
|
||||
|
@ -13,10 +12,8 @@ db_object! {
|
|||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Identifiable, Queryable, Insertable)]
|
||||
#[table_name = "users_collections"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[belongs_to(Collection, foreign_key = "collection_uuid")]
|
||||
#[primary_key(user_uuid, collection_uuid)]
|
||||
pub struct CollectionUser {
|
||||
pub user_uuid: String,
|
||||
|
@ -25,10 +22,8 @@ db_object! {
|
|||
pub hide_passwords: bool,
|
||||
}
|
||||
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Identifiable, Queryable, Insertable)]
|
||||
#[table_name = "ciphers_collections"]
|
||||
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
|
||||
#[belongs_to(Collection, foreign_key = "collection_uuid")]
|
||||
#[primary_key(cipher_uuid, collection_uuid)]
|
||||
pub struct CollectionCipher {
|
||||
pub cipher_uuid: String,
|
||||
|
@ -57,11 +52,32 @@ impl Collection {
|
|||
})
|
||||
}
|
||||
|
||||
pub async fn to_json_details(&self, user_uuid: &str, conn: &DbConn) -> Value {
|
||||
pub async fn to_json_details(
|
||||
&self,
|
||||
user_uuid: &str,
|
||||
cipher_sync_data: Option<&crate::api::core::CipherSyncData>,
|
||||
conn: &DbConn,
|
||||
) -> Value {
|
||||
let (read_only, hide_passwords) = if let Some(cipher_sync_data) = cipher_sync_data {
|
||||
match cipher_sync_data.user_organizations.get(&self.org_uuid) {
|
||||
Some(uo) if uo.has_full_access() => (false, false),
|
||||
Some(_) => {
|
||||
if let Some(uc) = cipher_sync_data.user_collections.get(&self.uuid) {
|
||||
(uc.read_only, uc.hide_passwords)
|
||||
} else {
|
||||
(false, false)
|
||||
}
|
||||
}
|
||||
_ => (true, true),
|
||||
}
|
||||
} else {
|
||||
(!self.is_writable_by_user(user_uuid, conn).await, self.hide_passwords_for_user(user_uuid, conn).await)
|
||||
};
|
||||
|
||||
let mut json_object = self.to_json();
|
||||
json_object["Object"] = json!("collectionDetails");
|
||||
json_object["ReadOnly"] = json!(!self.is_writable_by_user(user_uuid, conn).await);
|
||||
json_object["HidePasswords"] = json!(self.hide_passwords_for_user(user_uuid, conn).await);
|
||||
json_object["ReadOnly"] = json!(read_only);
|
||||
json_object["HidePasswords"] = json!(hide_passwords);
|
||||
json_object
|
||||
}
|
||||
}
|
||||
|
@ -374,6 +390,17 @@ impl CollectionUser {
|
|||
}}
|
||||
}
|
||||
|
||||
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||
db_run! { conn: {
|
||||
users_collections::table
|
||||
.filter(users_collections::user_uuid.eq(user_uuid))
|
||||
.select(users_collections::all_columns)
|
||||
.load::<CollectionUserDb>(conn)
|
||||
.expect("Error loading users_collections")
|
||||
.from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
pub async fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
for collection in CollectionUser::find_by_collection(collection_uuid, conn).await.iter() {
|
||||
User::update_uuid_revision(&collection.user_uuid, conn).await;
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
use chrono::{NaiveDateTime, Utc};
|
||||
|
||||
use super::User;
|
||||
use crate::CONFIG;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "devices"]
|
||||
#[changeset_options(treat_none_as_null="true")]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[primary_key(uuid, user_uuid)]
|
||||
pub struct Device {
|
||||
pub uuid: String,
|
||||
|
|
|
@ -4,10 +4,9 @@ use serde_json::Value;
|
|||
use super::User;
|
||||
|
||||
db_object! {
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "emergency_access"]
|
||||
#[changeset_options(treat_none_as_null="true")]
|
||||
#[belongs_to(User, foreign_key = "grantor_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
pub struct EmergencyAccess {
|
||||
pub uuid: String,
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
use super::{Cipher, User};
|
||||
use super::User;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Identifiable, Queryable, Insertable)]
|
||||
#[table_name = "favorites"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
|
||||
#[primary_key(user_uuid, cipher_uuid)]
|
||||
pub struct Favorite {
|
||||
pub user_uuid: String,
|
||||
|
@ -80,4 +78,16 @@ impl Favorite {
|
|||
.map_res("Error removing favorites by user")
|
||||
}}
|
||||
}
|
||||
|
||||
/// Return a vec with (cipher_uuid) this will only contain favorite flagged ciphers
|
||||
/// This is used during a full sync so we only need one query for all favorite cipher matches.
|
||||
pub async fn get_all_cipher_uuid_by_user(user_uuid: &str, conn: &DbConn) -> Vec<String> {
|
||||
db_run! { conn: {
|
||||
favorites::table
|
||||
.filter(favorites::user_uuid.eq(user_uuid))
|
||||
.select(favorites::cipher_uuid)
|
||||
.load::<String>(conn)
|
||||
.unwrap_or_default()
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
use chrono::{NaiveDateTime, Utc};
|
||||
use serde_json::Value;
|
||||
|
||||
use super::{Cipher, User};
|
||||
use super::User;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "folders"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
pub struct Folder {
|
||||
pub uuid: String,
|
||||
|
@ -16,10 +15,8 @@ db_object! {
|
|||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Identifiable, Queryable, Insertable)]
|
||||
#[table_name = "folders_ciphers"]
|
||||
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
|
||||
#[belongs_to(Folder, foreign_key = "folder_uuid")]
|
||||
#[primary_key(cipher_uuid, folder_uuid)]
|
||||
pub struct FolderCipher {
|
||||
pub cipher_uuid: String,
|
||||
|
@ -215,4 +212,17 @@ impl FolderCipher {
|
|||
.from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
/// Return a vec with (cipher_uuid, folder_uuid)
|
||||
/// This is used during a full sync so we only need one query for all folder matches.
|
||||
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<(String, String)> {
|
||||
db_run! { conn: {
|
||||
folders_ciphers::table
|
||||
.inner_join(folders::table)
|
||||
.filter(folders::user_uuid.eq(user_uuid))
|
||||
.select(folders_ciphers::all_columns)
|
||||
.load::<(String, String)>(conn)
|
||||
.unwrap_or_default()
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,12 +6,11 @@ use crate::db::DbConn;
|
|||
use crate::error::MapResult;
|
||||
use crate::util::UpCase;
|
||||
|
||||
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
||||
use super::{UserOrgStatus, UserOrgType, UserOrganization};
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "org_policies"]
|
||||
#[belongs_to(Organization, foreign_key = "org_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
pub struct OrgPolicy {
|
||||
pub uuid: String,
|
||||
|
|
|
@ -547,6 +547,15 @@ impl UserOrganization {
|
|||
}}
|
||||
}
|
||||
|
||||
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||
db_run! { conn: {
|
||||
users_organizations::table
|
||||
.filter(users_organizations::user_uuid.eq(user_uuid))
|
||||
.load::<UserOrganizationDb>(conn)
|
||||
.expect("Error loading user organizations").from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
pub async fn find_by_user_and_policy(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> Vec<Self> {
|
||||
db_run! { conn: {
|
||||
users_organizations::table
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
use chrono::{NaiveDateTime, Utc};
|
||||
use serde_json::Value;
|
||||
|
||||
use super::{Organization, User};
|
||||
use super::User;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "sends"]
|
||||
#[changeset_options(treat_none_as_null="true")]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[belongs_to(Organization, foreign_key = "organization_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
pub struct Send {
|
||||
pub uuid: String,
|
||||
|
|
|
@ -2,12 +2,9 @@ use serde_json::Value;
|
|||
|
||||
use crate::{api::EmptyResult, db::DbConn, error::MapResult};
|
||||
|
||||
use super::User;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "twofactor"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
pub struct TwoFactor {
|
||||
pub uuid: String,
|
||||
|
|
|
@ -2,12 +2,9 @@ use chrono::{NaiveDateTime, Utc};
|
|||
|
||||
use crate::{api::EmptyResult, auth::ClientIp, db::DbConn, error::MapResult, CONFIG};
|
||||
|
||||
use super::User;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "twofactor_incomplete"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[primary_key(user_uuid, device_uuid)]
|
||||
pub struct TwoFactorIncomplete {
|
||||
pub user_uuid: String,
|
||||
|
|
16
src/error.rs
16
src/error.rs
|
@ -214,20 +214,20 @@ impl<'r> Responder<'r, 'static> for Error {
|
|||
macro_rules! err {
|
||||
($msg:expr) => {{
|
||||
error!("{}", $msg);
|
||||
return Err(crate::error::Error::new($msg, $msg));
|
||||
return Err($crate::error::Error::new($msg, $msg));
|
||||
}};
|
||||
($usr_msg:expr, $log_value:expr) => {{
|
||||
error!("{}. {}", $usr_msg, $log_value);
|
||||
return Err(crate::error::Error::new($usr_msg, $log_value));
|
||||
return Err($crate::error::Error::new($usr_msg, $log_value));
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! err_silent {
|
||||
($msg:expr) => {{
|
||||
return Err(crate::error::Error::new($msg, $msg));
|
||||
return Err($crate::error::Error::new($msg, $msg));
|
||||
}};
|
||||
($usr_msg:expr, $log_value:expr) => {{
|
||||
return Err(crate::error::Error::new($usr_msg, $log_value));
|
||||
return Err($crate::error::Error::new($usr_msg, $log_value));
|
||||
}};
|
||||
}
|
||||
|
||||
|
@ -235,11 +235,11 @@ macro_rules! err_silent {
|
|||
macro_rules! err_code {
|
||||
($msg:expr, $err_code: expr) => {{
|
||||
error!("{}", $msg);
|
||||
return Err(crate::error::Error::new($msg, $msg).with_code($err_code));
|
||||
return Err($crate::error::Error::new($msg, $msg).with_code($err_code));
|
||||
}};
|
||||
($usr_msg:expr, $log_value:expr, $err_code: expr) => {{
|
||||
error!("{}. {}", $usr_msg, $log_value);
|
||||
return Err(crate::error::Error::new($usr_msg, $log_value).with_code($err_code));
|
||||
return Err($crate::error::Error::new($usr_msg, $log_value).with_code($err_code));
|
||||
}};
|
||||
}
|
||||
|
||||
|
@ -247,11 +247,11 @@ macro_rules! err_code {
|
|||
macro_rules! err_discard {
|
||||
($msg:expr, $data:expr) => {{
|
||||
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
||||
return Err(crate::error::Error::new($msg, $msg));
|
||||
return Err($crate::error::Error::new($msg, $msg));
|
||||
}};
|
||||
($usr_msg:expr, $log_value:expr, $data:expr) => {{
|
||||
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
||||
return Err(crate::error::Error::new($usr_msg, $log_value));
|
||||
return Err($crate::error::Error::new($usr_msg, $log_value));
|
||||
}};
|
||||
}
|
||||
|
||||
|
|
|
@ -377,12 +377,13 @@ async fn schedule_jobs(pool: db::DbPool) {
|
|||
return;
|
||||
}
|
||||
|
||||
let runtime = tokio::runtime::Handle::current();
|
||||
let runtime = tokio::runtime::Runtime::new().unwrap();
|
||||
|
||||
thread::Builder::new()
|
||||
.name("job-scheduler".to_string())
|
||||
.spawn(move || {
|
||||
use job_scheduler::{Job, JobScheduler};
|
||||
let _runtime_guard = runtime.enter();
|
||||
|
||||
let mut sched = JobScheduler::new();
|
||||
|
||||
|
|
216
src/static/scripts/bootstrap-native.js
vendored
216
src/static/scripts/bootstrap-native.js
vendored
|
@ -1,5 +1,5 @@
|
|||
/*!
|
||||
* Native JavaScript for Bootstrap v4.1.0 (https://thednp.github.io/bootstrap.native/)
|
||||
* Native JavaScript for Bootstrap v4.1.2 (https://thednp.github.io/bootstrap.native/)
|
||||
* Copyright 2015-2022 © dnp_theme
|
||||
* Licensed under MIT (https://github.com/thednp/bootstrap.native/blob/master/LICENSE)
|
||||
*/
|
||||
|
@ -545,7 +545,7 @@
|
|||
return normalOps;
|
||||
}
|
||||
|
||||
var version = "4.1.0";
|
||||
var version = "4.1.2";
|
||||
|
||||
const Version = version;
|
||||
|
||||
|
@ -2814,6 +2814,29 @@
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a shortie for `document.createElement` method
|
||||
* which allows you to create a new `HTMLElement` for a given `tagName`
|
||||
* or based on an object with specific non-readonly attributes:
|
||||
* `id`, `className`, `textContent`, `style`, etc.
|
||||
* @see https://developer.mozilla.org/en-US/docs/Web/API/Document/createElement
|
||||
*
|
||||
* @param {Record<string, string> | string} param `tagName` or object
|
||||
* @return {HTMLElement | Element} a new `HTMLElement` or `Element`
|
||||
*/
|
||||
function createElement(param) {
|
||||
if (typeof param === 'string') {
|
||||
return getDocument().createElement(param);
|
||||
}
|
||||
|
||||
const { tagName } = param;
|
||||
const attr = { ...param };
|
||||
const newElement = createElement(tagName);
|
||||
delete attr.tagName;
|
||||
ObjectAssign(newElement, attr);
|
||||
return newElement;
|
||||
}
|
||||
|
||||
/** @type {string} */
|
||||
const offcanvasString = 'offcanvas';
|
||||
|
||||
|
@ -2824,7 +2847,7 @@
|
|||
const offcanvasActiveSelector = `.${offcanvasString}.${showClass}`;
|
||||
|
||||
// any document would suffice
|
||||
const overlay = getDocument().createElement('div');
|
||||
const overlay = createElement('div');
|
||||
|
||||
/**
|
||||
* Returns the current active modal / offcancas element.
|
||||
|
@ -2863,8 +2886,10 @@
|
|||
* Shows the overlay to the user.
|
||||
*/
|
||||
function showOverlay() {
|
||||
addClass(overlay, showClass);
|
||||
reflow(overlay);
|
||||
if (!hasClass(overlay, showClass)) {
|
||||
addClass(overlay, showClass);
|
||||
reflow(overlay);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2949,7 +2974,7 @@
|
|||
|
||||
if (!modalOverflow && scrollbarWidth) {
|
||||
const pad = isRTL(element) ? 'paddingLeft' : 'paddingRight';
|
||||
// @ts-ignore
|
||||
// @ts-ignore -- cannot use `setElementStyle`
|
||||
element.style[pad] = `${scrollbarWidth}px`;
|
||||
}
|
||||
setScrollbar(element, (modalOverflow || clientHeight !== scrollHeight));
|
||||
|
@ -2989,15 +3014,16 @@
|
|||
* @param {Modal} self the `Modal` instance
|
||||
*/
|
||||
function afterModalHide(self) {
|
||||
const { triggers, element } = self;
|
||||
const { triggers, element, relatedTarget } = self;
|
||||
removeOverlay(element);
|
||||
// @ts-ignore
|
||||
element.style.paddingRight = '';
|
||||
setElementStyle(element, { paddingRight: '' });
|
||||
toggleModalDismiss(self);
|
||||
|
||||
if (triggers.length) {
|
||||
const visibleTrigger = triggers.find((x) => isVisible(x));
|
||||
if (visibleTrigger) focus(visibleTrigger);
|
||||
}
|
||||
const focusElement = showModalEvent.relatedTarget || triggers.find(isVisible);
|
||||
if (focusElement) focus(focusElement);
|
||||
|
||||
hiddenModalEvent.relatedTarget = relatedTarget;
|
||||
dispatchEvent(element, hiddenModalEvent);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -3019,12 +3045,11 @@
|
|||
*/
|
||||
function beforeModalShow(self) {
|
||||
const { element, hasFade } = self;
|
||||
// @ts-ignore
|
||||
element.style.display = 'block';
|
||||
setElementStyle(element, { display: 'block' });
|
||||
|
||||
setModalScrollbar(self);
|
||||
if (!getCurrentOpen(element)) {
|
||||
getDocumentBody(element).style.overflow = 'hidden';
|
||||
setElementStyle(getDocumentBody(element), { overflow: 'hidden' });
|
||||
}
|
||||
|
||||
addClass(element, showClass);
|
||||
|
@ -3042,11 +3067,10 @@
|
|||
*/
|
||||
function beforeModalHide(self, force) {
|
||||
const {
|
||||
element, options, relatedTarget, hasFade,
|
||||
element, options, hasFade,
|
||||
} = self;
|
||||
|
||||
// @ts-ignore
|
||||
element.style.display = '';
|
||||
setElementStyle(element, { display: '' });
|
||||
|
||||
// force can also be the transitionEvent object, we wanna make sure it's not
|
||||
// call is not forced and overlay is visible
|
||||
|
@ -3057,11 +3081,6 @@
|
|||
} else {
|
||||
afterModalHide(self);
|
||||
}
|
||||
|
||||
toggleModalDismiss(self);
|
||||
|
||||
hiddenModalEvent.relatedTarget = relatedTarget;
|
||||
dispatchEvent(element, hiddenModalEvent);
|
||||
}
|
||||
|
||||
// MODAL EVENT HANDLERS
|
||||
|
@ -3243,14 +3262,15 @@
|
|||
}
|
||||
|
||||
if (backdrop) {
|
||||
if (!currentOpen && !hasClass(overlay, showClass)) {
|
||||
if (!container.contains(overlay)) {
|
||||
appendOverlay(container, hasFade, true);
|
||||
} else {
|
||||
toggleOverlayType(true);
|
||||
}
|
||||
|
||||
overlayDelay = getElementTransitionDuration(overlay);
|
||||
|
||||
if (!hasClass(overlay, showClass)) showOverlay();
|
||||
showOverlay();
|
||||
setTimeout(() => beforeModalShow(self), overlayDelay);
|
||||
} else {
|
||||
beforeModalShow(self);
|
||||
|
@ -3398,13 +3418,12 @@
|
|||
|
||||
if (!options.scroll) {
|
||||
setOffCanvasScrollbar(self);
|
||||
getDocumentBody(element).style.overflow = 'hidden';
|
||||
setElementStyle(getDocumentBody(element), { overflow: 'hidden' });
|
||||
}
|
||||
|
||||
addClass(element, offcanvasTogglingClass);
|
||||
addClass(element, showClass);
|
||||
// @ts-ignore
|
||||
element.style.visibility = 'visible';
|
||||
setElementStyle(element, { visibility: 'visible' });
|
||||
|
||||
emulateTransitionEnd(element, () => showOffcanvasComplete(self));
|
||||
}
|
||||
|
@ -3509,17 +3528,13 @@
|
|||
* @param {Offcanvas} self the `Offcanvas` instance
|
||||
*/
|
||||
function showOffcanvasComplete(self) {
|
||||
const { element, triggers } = self;
|
||||
const { element } = self;
|
||||
removeClass(element, offcanvasTogglingClass);
|
||||
|
||||
removeAttribute(element, ariaHidden);
|
||||
setAttribute(element, ariaModal, 'true');
|
||||
setAttribute(element, 'role', 'dialog');
|
||||
|
||||
if (triggers.length) {
|
||||
triggers.forEach((btn) => setAttribute(btn, ariaExpanded, 'true'));
|
||||
}
|
||||
|
||||
dispatchEvent(element, shownOffcanvasEvent);
|
||||
|
||||
toggleOffCanvasDismiss(self, true);
|
||||
|
@ -3537,14 +3552,10 @@
|
|||
setAttribute(element, ariaHidden, 'true');
|
||||
removeAttribute(element, ariaModal);
|
||||
removeAttribute(element, 'role');
|
||||
// @ts-ignore
|
||||
element.style.visibility = '';
|
||||
setElementStyle(element, { visibility: '' });
|
||||
|
||||
if (triggers.length) {
|
||||
triggers.forEach((btn) => setAttribute(btn, ariaExpanded, 'false'));
|
||||
const visibleTrigger = triggers.find((x) => isVisible(x));
|
||||
if (visibleTrigger) focus(visibleTrigger);
|
||||
}
|
||||
const visibleTrigger = showOffcanvasEvent.relatedTarget || triggers.find((x) => isVisible(x));
|
||||
if (visibleTrigger) focus(visibleTrigger);
|
||||
|
||||
removeOverlay(element);
|
||||
|
||||
|
@ -3634,13 +3645,14 @@
|
|||
}
|
||||
|
||||
if (options.backdrop) {
|
||||
if (!currentOpen) {
|
||||
if (!container.contains(overlay)) {
|
||||
appendOverlay(container, true);
|
||||
} else {
|
||||
toggleOverlayType();
|
||||
}
|
||||
|
||||
overlayDelay = getElementTransitionDuration(overlay);
|
||||
if (!hasClass(overlay, showClass)) showOverlay();
|
||||
showOverlay();
|
||||
|
||||
setTimeout(() => beforeOffcanvasShow(self), overlayDelay);
|
||||
} else {
|
||||
|
@ -4055,7 +4067,8 @@
|
|||
*/
|
||||
const mousehoverEvent = 'hover';
|
||||
|
||||
let elementUID = 1;
|
||||
let elementUID = 0;
|
||||
let elementMapUID = 0;
|
||||
const elementIDMap = new Map();
|
||||
|
||||
/**
|
||||
|
@ -4066,27 +4079,25 @@
|
|||
* @returns {number} an existing or new unique ID
|
||||
*/
|
||||
function getUID(element, key) {
|
||||
elementUID += 1;
|
||||
let elMap = elementIDMap.get(element);
|
||||
let result = elementUID;
|
||||
let result = key ? elementUID : elementMapUID;
|
||||
|
||||
if (key && key.length) {
|
||||
if (elMap) {
|
||||
const elMapId = elMap.get(key);
|
||||
if (!Number.isNaN(elMapId)) {
|
||||
result = elMapId;
|
||||
} else {
|
||||
elMap.set(key, result);
|
||||
}
|
||||
} else {
|
||||
elementIDMap.set(element, new Map());
|
||||
elMap = elementIDMap.get(element);
|
||||
elMap.set(key, result);
|
||||
if (key) {
|
||||
const elID = getUID(element);
|
||||
const elMap = elementIDMap.get(elID) || new Map();
|
||||
if (!elementIDMap.has(elID)) {
|
||||
elementIDMap.set(elID, elMap);
|
||||
}
|
||||
} else if (!Number.isNaN(elMap)) {
|
||||
result = elMap;
|
||||
if (!elMap.has(key)) {
|
||||
elMap.set(key, result);
|
||||
elementUID += 1;
|
||||
} else result = elMap.get(key);
|
||||
} else {
|
||||
elementIDMap.set(element, result);
|
||||
const elkey = element.id || element;
|
||||
|
||||
if (!elementIDMap.has(elkey)) {
|
||||
elementIDMap.set(elkey, result);
|
||||
elementMapUID += 1;
|
||||
} else result = elementIDMap.get(elkey);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -5098,6 +5109,8 @@
|
|||
const hiddenTabEvent = OriginalEvent(`hidden.bs.${tabString}`);
|
||||
|
||||
/**
|
||||
* Stores the current active tab and its content
|
||||
* for a given `.nav` element.
|
||||
* @type {Map<(HTMLElement | Element), any>}
|
||||
*/
|
||||
const tabPrivate = new Map();
|
||||
|
@ -5111,7 +5124,7 @@
|
|||
function triggerTabEnd(self) {
|
||||
const { tabContent, nav } = self;
|
||||
|
||||
if (tabContent) {
|
||||
if (tabContent && hasClass(tabContent, collapsingClass)) {
|
||||
// @ts-ignore
|
||||
tabContent.style.height = '';
|
||||
removeClass(tabContent, collapsingClass);
|
||||
|
@ -5125,11 +5138,13 @@
|
|||
* @param {Tab} self the `Tab` instance
|
||||
*/
|
||||
function triggerTabShow(self) {
|
||||
const { element, tabContent, nav } = self;
|
||||
const { currentHeight, nextHeight } = tabPrivate.get(element);
|
||||
const {
|
||||
element, tabContent, content: nextContent, nav,
|
||||
} = self;
|
||||
const { tab } = nav && tabPrivate.get(nav);
|
||||
|
||||
if (tabContent) { // height animation
|
||||
if (tabContent && hasClass(nextContent, fadeClass)) { // height animation
|
||||
const { currentHeight, nextHeight } = tabPrivate.get(element);
|
||||
if (currentHeight === nextHeight) {
|
||||
triggerTabEnd(self);
|
||||
} else {
|
||||
|
@ -5141,6 +5156,7 @@
|
|||
}, 50);
|
||||
}
|
||||
} else if (nav) Timer.clear(nav);
|
||||
|
||||
shownTabEvent.relatedTarget = tab;
|
||||
dispatchEvent(element, shownTabEvent);
|
||||
}
|
||||
|
@ -5156,9 +5172,11 @@
|
|||
const { tab, content } = nav && tabPrivate.get(nav);
|
||||
let currentHeight = 0;
|
||||
|
||||
if (tabContent) {
|
||||
[content, nextContent].forEach((c) => addClass(c, 'overflow-hidden'));
|
||||
currentHeight = content.scrollHeight;
|
||||
if (tabContent && hasClass(nextContent, fadeClass)) {
|
||||
[content, nextContent].forEach((c) => {
|
||||
addClass(c, 'overflow-hidden');
|
||||
});
|
||||
currentHeight = content.scrollHeight || 0;
|
||||
}
|
||||
|
||||
// update relatedTarget and dispatch event
|
||||
|
@ -5170,7 +5188,7 @@
|
|||
addClass(nextContent, activeClass);
|
||||
removeClass(content, activeClass);
|
||||
|
||||
if (tabContent) {
|
||||
if (tabContent && hasClass(nextContent, fadeClass)) {
|
||||
const nextHeight = nextContent.scrollHeight;
|
||||
tabPrivate.set(element, { currentHeight, nextHeight });
|
||||
|
||||
|
@ -5178,7 +5196,9 @@
|
|||
// @ts-ignore -- height animation
|
||||
tabContent.style.height = `${currentHeight}px`;
|
||||
reflow(tabContent);
|
||||
[content, nextContent].forEach((c) => removeClass(c, 'overflow-hidden'));
|
||||
[content, nextContent].forEach((c) => {
|
||||
removeClass(c, 'overflow-hidden');
|
||||
});
|
||||
}
|
||||
|
||||
if (nextContent && hasClass(nextContent, fadeClass)) {
|
||||
|
@ -5187,8 +5207,11 @@
|
|||
emulateTransitionEnd(nextContent, () => {
|
||||
triggerTabShow(self);
|
||||
});
|
||||
}, 17);
|
||||
} else { triggerTabShow(self); }
|
||||
}, 1);
|
||||
} else {
|
||||
addClass(nextContent, showClass);
|
||||
triggerTabShow(self);
|
||||
}
|
||||
|
||||
dispatchEvent(tab, hiddenTabEvent);
|
||||
}
|
||||
|
@ -5217,6 +5240,16 @@
|
|||
return { tab, content };
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a parent dropdown.
|
||||
* @param {HTMLElement | Element} element the `Tab` element
|
||||
* @returns {(HTMLElement | Element)?} the parent dropdown
|
||||
*/
|
||||
function getParentDropdown(element) {
|
||||
const dropdown = closest(element, `.${dropdownMenuClasses.join(',.')}`);
|
||||
return dropdown ? querySelector(`.${dropdownMenuClasses[0]}-toggle`, dropdown) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggles on/off the `click` event listener.
|
||||
* @param {Tab} self the `Tab` instance
|
||||
|
@ -5273,7 +5306,22 @@
|
|||
|
||||
// event targets
|
||||
/** @type {(HTMLElement | Element)?} */
|
||||
self.dropdown = nav && querySelector(`.${dropdownMenuClasses[0]}-toggle`, nav);
|
||||
self.dropdown = getParentDropdown(element);
|
||||
|
||||
// show first Tab instance of none is shown
|
||||
// suggested on #432
|
||||
const { tab } = getActiveTab(self);
|
||||
if (nav && !tab) {
|
||||
const firstTab = querySelector(tabSelector, nav);
|
||||
const firstTabContent = firstTab && getTargetElement(firstTab);
|
||||
|
||||
if (firstTabContent) {
|
||||
addClass(firstTab, activeClass);
|
||||
addClass(firstTabContent, showClass);
|
||||
addClass(firstTabContent, activeClass);
|
||||
setAttribute(element, ariaSelected, 'true');
|
||||
}
|
||||
}
|
||||
|
||||
// add event listener
|
||||
toggleTabHandler(self, true);
|
||||
|
@ -5301,20 +5349,24 @@
|
|||
|
||||
// update relatedTarget and dispatch
|
||||
hideTabEvent.relatedTarget = element;
|
||||
|
||||
dispatchEvent(tab, hideTabEvent);
|
||||
if (hideTabEvent.defaultPrevented) return;
|
||||
|
||||
if (nav) Timer.set(nav, () => {}, 17);
|
||||
removeClass(tab, activeClass);
|
||||
setAttribute(tab, ariaSelected, 'false');
|
||||
addClass(element, activeClass);
|
||||
setAttribute(element, ariaSelected, 'true');
|
||||
|
||||
if (dropdown) {
|
||||
// @ts-ignore
|
||||
if (!hasClass(element.parentNode, dropdownMenuClass)) {
|
||||
if (hasClass(dropdown, activeClass)) removeClass(dropdown, activeClass);
|
||||
} else if (!hasClass(dropdown, activeClass)) addClass(dropdown, activeClass);
|
||||
const activeDropdown = getParentDropdown(tab);
|
||||
if (activeDropdown && hasClass(activeDropdown, activeClass)) {
|
||||
removeClass(activeDropdown, activeClass);
|
||||
}
|
||||
|
||||
if (nav) {
|
||||
Timer.set(nav, () => {
|
||||
removeClass(tab, activeClass);
|
||||
setAttribute(tab, ariaSelected, 'false');
|
||||
if (dropdown && !hasClass(dropdown, activeClass)) addClass(dropdown, activeClass);
|
||||
}, 1);
|
||||
}
|
||||
|
||||
if (hasClass(content, fadeClass)) {
|
||||
|
|
4
src/static/scripts/datatables.css
vendored
4
src/static/scripts/datatables.css
vendored
|
@ -4,10 +4,10 @@
|
|||
*
|
||||
* To rebuild or modify this file with the latest versions of the included
|
||||
* software please visit:
|
||||
* https://datatables.net/download/#bs5/dt-1.11.4
|
||||
* https://datatables.net/download/#bs5/dt-1.11.5
|
||||
*
|
||||
* Included libraries:
|
||||
* DataTables 1.11.4
|
||||
* DataTables 1.11.5
|
||||
*/
|
||||
|
||||
@charset "UTF-8";
|
||||
|
|
10899
src/static/scripts/datatables.js
vendored
10899
src/static/scripts/datatables.js
vendored
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue