From 30e768613b3345405f118cbad2580107a8811af7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Sun, 30 Dec 2018 23:34:31 +0100 Subject: [PATCH] Start using rustfmt and some style changes to make some lines shorter --- rustfmt.toml | 1 + src/api/core/accounts.rs | 22 +- src/api/core/ciphers.rs | 455 ++++++++++++++++++++++------------ src/api/core/folders.rs | 33 ++- src/api/core/mod.rs | 7 +- src/api/core/organizations.rs | 372 +++++++++++++++++---------- src/api/core/two_factor.rs | 78 ++---- src/api/identity.rs | 14 +- src/api/mod.rs | 18 +- src/api/notifications.rs | 59 ++--- src/auth.rs | 49 ++-- src/crypto.rs | 16 +- src/db/mod.rs | 12 +- src/db/models/attachment.rs | 14 +- src/db/models/cipher.rs | 126 ++++++---- src/db/models/collection.rs | 169 +++++++------ src/db/models/device.rs | 31 ++- src/db/models/folder.rs | 62 ++--- src/db/models/mod.rs | 8 +- src/db/models/organization.rs | 99 ++++---- src/db/models/two_factor.rs | 32 ++- src/db/models/user.rs | 106 ++++---- src/error.rs | 8 +- src/mail.rs | 45 ++-- src/main.rs | 157 +++++++----- src/util.rs | 87 ++++--- 26 files changed, 1177 insertions(+), 903 deletions(-) create mode 100644 rustfmt.toml diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 00000000..75306517 --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1 @@ +max_width = 120 diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index 514eb875..556d15e5 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -3,13 +3,13 @@ use rocket_contrib::json::Json; use crate::db::models::*; use crate::db::DbConn; -use crate::api::{EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData, UpdateType, WebSocketUsers}; -use crate::auth::{Headers, decode_invite_jwt, InviteJWTClaims}; +use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType}; +use crate::auth::{decode_invite_jwt, Headers, InviteJWTClaims}; use crate::mail; use crate::CONFIG; -use rocket::{Route, State}; +use rocket::Route; pub fn routes() -> Vec { routes![ @@ -74,9 +74,9 @@ fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { } else { let token = match &data.Token { Some(token) => token, - None => err!("No valid invite token") + None => err!("No valid invite token"), }; - + let claims: InviteJWTClaims = decode_invite_jwt(&token)?; if &claims.email == &data.Email { user @@ -257,7 +257,7 @@ struct KeyData { } #[post("/accounts/key", data = "")] -fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { +fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { let data: KeyData = data.into_inner().data; if !headers.user.check_valid_password(&data.MasterPasswordHash) { @@ -294,7 +294,15 @@ fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, ws: err!("The cipher is not owned by the user") } - update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &ws, UpdateType::SyncCipherUpdate)? + update_cipher_from_data( + &mut saved_cipher, + cipher_data, + &headers, + false, + &conn, + &nt, + UpdateType::CipherUpdate, + )? } // Update user data diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index c3edf2e1..8adf4688 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -1,8 +1,8 @@ -use std::collections::{HashSet, HashMap}; +use std::collections::{HashMap, HashSet}; use std::path::Path; use rocket::http::ContentType; -use rocket::{request::Form, Data, Route, State}; +use rocket::{request::Form, Data, Route}; use rocket_contrib::json::Json; use serde_json::Value; @@ -17,7 +17,7 @@ use crate::db::DbConn; use crate::crypto; -use crate::api::{self, EmptyResult, JsonResult, JsonUpcase, PasswordData, UpdateType, WebSocketUsers}; +use crate::api::{self, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordData, UpdateType}; use crate::auth::Headers; use crate::CONFIG; @@ -56,7 +56,6 @@ pub fn routes() -> Vec { delete_all, move_cipher_selected, move_cipher_selected_put, - post_collections_update, post_collections_admin, put_collections_admin, @@ -80,9 +79,16 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> JsonResult { let collections_json: Vec = collections.iter().map(|c| c.to_json()).collect(); let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn); - let ciphers_json: Vec = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); + let ciphers_json: Vec = ciphers + .iter() + .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) + .collect(); - let domains_json = if data.exclude_domains { Value::Null } else { api::core::get_eq_domains(headers).unwrap().into_inner() }; + let domains_json = if data.exclude_domains { + Value::Null + } else { + api::core::get_eq_domains(headers).unwrap().into_inner() + }; Ok(Json(json!({ "Profile": user_json, @@ -98,7 +104,10 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> JsonResult { fn get_ciphers(headers: Headers, conn: DbConn) -> JsonResult { let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn); - let ciphers_json: Vec = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); + let ciphers_json: Vec = ciphers + .iter() + .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) + .collect(); Ok(Json(json!({ "Data": ciphers_json, @@ -111,7 +120,7 @@ fn get_ciphers(headers: Headers, conn: DbConn) -> JsonResult { fn get_cipher(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { let cipher = match Cipher::find_by_uuid(&uuid, &conn) { Some(cipher) => cipher, - None => err!("Cipher doesn't exist") + None => err!("Cipher doesn't exist"), }; if !cipher.is_accessible_to_user(&headers.user.uuid, &conn) { @@ -166,7 +175,7 @@ pub struct CipherData { // These are used during key rotation #[serde(rename = "Attachments")] _Attachments: Option, // Unused, contains map of {id: filename} - Attachments2: Option> + Attachments2: Option>, } #[derive(Deserialize, Debug)] @@ -177,42 +186,53 @@ pub struct Attachments2Data { } #[post("/ciphers/admin", data = "")] -fn post_ciphers_admin(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { +fn post_ciphers_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { let data: ShareCipherData = data.into_inner().data; let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone()); cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.save(&conn)?; - share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &ws) + share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt) } #[post("/ciphers/create", data = "")] -fn post_ciphers_create(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { - post_ciphers_admin(data, headers, conn, ws) +fn post_ciphers_create(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { + post_ciphers_admin(data, headers, conn, nt) } #[post("/ciphers", data = "")] -fn post_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { +fn post_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { let data: CipherData = data.into_inner().data; let mut cipher = Cipher::new(data.Type, data.Name.clone()); - update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &ws, UpdateType::SyncCipherCreate)?; + update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate)?; Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) } -pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: &Headers, shared_to_collection: bool, conn: &DbConn, ws: &State, ut: UpdateType) -> EmptyResult { +pub fn update_cipher_from_data( + cipher: &mut Cipher, + data: CipherData, + headers: &Headers, + shared_to_collection: bool, + conn: &DbConn, + nt: &Notify, + ut: UpdateType, +) -> EmptyResult { if let Some(org_id) = data.OrganizationId { match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { None => err!("You don't have permission to add item to organization"), - Some(org_user) => if shared_to_collection - || org_user.has_full_access() - || cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { - cipher.organization_uuid = Some(org_id); - cipher.user_uuid = None; - } else { - err!("You don't have permission to add cipher directly to organization") + Some(org_user) => { + if shared_to_collection + || org_user.has_full_access() + || cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) + { + cipher.organization_uuid = Some(org_id); + cipher.user_uuid = None; + } else { + err!("You don't have permission to add cipher directly to organization") + } } } } else { @@ -226,7 +246,7 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: & err!("Folder is not owned by user") } } - None => err!("Folder doesn't exist") + None => err!("Folder doesn't exist"), } } @@ -235,7 +255,7 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: & for (id, attachment) in attachments { let mut saved_att = match Attachment::find_by_id(&id, &conn) { Some(att) => att, - None => err!("Attachment doesn't exist") + None => err!("Attachment doesn't exist"), }; if saved_att.cipher_uuid != cipher.uuid { @@ -254,12 +274,12 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: & 2 => data.SecureNote, 3 => data.Card, 4 => data.Identity, - _ => err!("Invalid type") + _ => err!("Invalid type"), }; let mut type_data = match type_data_opt { Some(data) => data, - None => err!("Data missing") + None => err!("Data missing"), }; // TODO: ******* Backwards compat start ********** @@ -280,7 +300,7 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: & cipher.save(&conn)?; - ws.send_cipher_update(ut, &cipher, &cipher.update_users_revision(&conn)); + nt.send_cipher_update(ut, &cipher, &cipher.update_users_revision(&conn)); cipher.move_to_folder(data.FolderId, &headers.user.uuid, &conn) } @@ -304,9 +324,8 @@ struct RelationsData { Value: usize, } - #[post("/ciphers/import", data = "")] -fn post_ciphers_import(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { +fn post_ciphers_import(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { let data: ImportData = data.into_inner().data; // Read and create the folders @@ -327,49 +346,67 @@ fn post_ciphers_import(data: JsonUpcase, headers: Headers, conn: DbC // Read and create the ciphers for (index, cipher_data) in data.Ciphers.into_iter().enumerate() { - let folder_uuid = relations_map.get(&index) - .map(|i| folders[*i].uuid.clone()); + let folder_uuid = relations_map.get(&index).map(|i| folders[*i].uuid.clone()); let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); - update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &ws, UpdateType::SyncCipherCreate)?; - - cipher.move_to_folder(folder_uuid, &headers.user.uuid.clone(), &conn).ok(); + update_cipher_from_data( + &mut cipher, + cipher_data, + &headers, + false, + &conn, + &nt, + UpdateType::CipherCreate, + )?; + + cipher.move_to_folder(folder_uuid, &headers.user.uuid.clone(), &conn)?; } let mut user = headers.user; user.update_revision(&conn) } - #[put("/ciphers//admin", data = "")] -fn put_cipher_admin(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { - put_cipher(uuid, data, headers, conn, ws) +fn put_cipher_admin( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> JsonResult { + put_cipher(uuid, data, headers, conn, nt) } #[post("/ciphers//admin", data = "")] -fn post_cipher_admin(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { - post_cipher(uuid, data, headers, conn, ws) +fn post_cipher_admin( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> JsonResult { + post_cipher(uuid, data, headers, conn, nt) } #[post("/ciphers/", data = "")] -fn post_cipher(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { - put_cipher(uuid, data, headers, conn, ws) +fn post_cipher(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { + put_cipher(uuid, data, headers, conn, nt) } #[put("/ciphers/", data = "")] -fn put_cipher(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { +fn put_cipher(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { let data: CipherData = data.into_inner().data; let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) { Some(cipher) => cipher, - None => err!("Cipher doesn't exist") + None => err!("Cipher doesn't exist"), }; if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { err!("Cipher is not write accessible") } - update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &ws, UpdateType::SyncCipherUpdate)?; + update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)?; Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) } @@ -381,22 +418,37 @@ struct CollectionsAdminData { } #[post("/ciphers//collections", data = "")] -fn post_collections_update(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +fn post_collections_update( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, +) -> EmptyResult { post_collections_admin(uuid, data, headers, conn) } #[put("/ciphers//collections-admin", data = "")] -fn put_collections_admin(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +fn put_collections_admin( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, +) -> EmptyResult { post_collections_admin(uuid, data, headers, conn) } #[post("/ciphers//collections-admin", data = "")] -fn post_collections_admin(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +fn post_collections_admin( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, +) -> EmptyResult { let data: CollectionsAdminData = data.into_inner().data; let cipher = match Cipher::find_by_uuid(&uuid, &conn) { Some(cipher) => cipher, - None => err!("Cipher doesn't exist") + None => err!("Cipher doesn't exist"), }; if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { @@ -404,16 +456,22 @@ fn post_collections_admin(uuid: String, data: JsonUpcase, } let posted_collections: HashSet = data.CollectionIds.iter().cloned().collect(); - let current_collections: HashSet = cipher.get_collections(&headers.user.uuid ,&conn).iter().cloned().collect(); + let current_collections: HashSet = cipher + .get_collections(&headers.user.uuid, &conn) + .iter() + .cloned() + .collect(); for collection in posted_collections.symmetric_difference(¤t_collections) { match Collection::find_by_uuid(&collection, &conn) { None => err!("Invalid collection ID provided"), Some(collection) => { if collection.is_writable_by_user(&headers.user.uuid, &conn) { - if posted_collections.contains(&collection.uuid) { // Add to collection + if posted_collections.contains(&collection.uuid) { + // Add to collection CollectionCipher::save(&cipher.uuid, &collection.uuid, &conn)?; - } else { // Remove from collection + } else { + // Remove from collection CollectionCipher::delete(&cipher.uuid, &collection.uuid, &conn)?; } } else { @@ -434,28 +492,45 @@ struct ShareCipherData { } #[post("/ciphers//share", data = "")] -fn post_cipher_share(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { +fn post_cipher_share( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> JsonResult { let data: ShareCipherData = data.into_inner().data; - share_cipher_by_uuid(&uuid, data, &headers, &conn, &ws) + share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt) } #[put("/ciphers//share", data = "")] -fn put_cipher_share(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { +fn put_cipher_share( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> JsonResult { let data: ShareCipherData = data.into_inner().data; - share_cipher_by_uuid(&uuid, data, &headers, &conn, &ws) + share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt) } #[derive(Deserialize)] #[allow(non_snake_case)] struct ShareSelectedCipherData { Ciphers: Vec, - CollectionIds: Vec + CollectionIds: Vec, } #[put("/ciphers/share", data = "")] -fn put_cipher_share_seleted(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { +fn put_cipher_share_seleted( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { let mut data: ShareSelectedCipherData = data.into_inner().data; let mut cipher_ids: Vec = Vec::new(); @@ -470,7 +545,7 @@ fn put_cipher_share_seleted(data: JsonUpcase, headers: for cipher in data.Ciphers.iter() { match cipher.Id { Some(ref id) => cipher_ids.push(id.to_string()), - None => err!("Request missing ids field") + None => err!("Request missing ids field"), }; } @@ -483,20 +558,25 @@ fn put_cipher_share_seleted(data: JsonUpcase, headers: while let Some(cipher) = data.Ciphers.pop() { let mut shared_cipher_data = ShareCipherData { Cipher: cipher, - CollectionIds: data.CollectionIds.clone() + CollectionIds: data.CollectionIds.clone(), }; match shared_cipher_data.Cipher.Id.take() { - Some(id) => share_cipher_by_uuid(&id, shared_cipher_data , &headers, &conn, &ws)?, - None => err!("Request missing ids field") - + Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &conn, &nt)?, + None => err!("Request missing ids field"), }; } Ok(()) } -fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, conn: &DbConn, ws: &State) -> JsonResult { +fn share_cipher_by_uuid( + uuid: &str, + data: ShareCipherData, + headers: &Headers, + conn: &DbConn, + nt: &Notify, +) -> JsonResult { let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) { Some(cipher) => { if cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { @@ -504,8 +584,8 @@ fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, co } else { err!("Cipher is not write accessible") } - }, - None => err!("Cipher doesn't exist") + } + None => err!("Cipher doesn't exist"), }; match data.Cipher.OrganizationId.clone() { @@ -525,7 +605,15 @@ fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, co } } } - update_cipher_from_data(&mut cipher, data.Cipher, &headers, shared_to_collection, &conn, &ws, UpdateType::SyncCipherUpdate)?; + update_cipher_from_data( + &mut cipher, + data.Cipher, + &headers, + shared_to_collection, + &conn, + &nt, + UpdateType::CipherUpdate, + )?; Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) } @@ -536,7 +624,7 @@ fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, co fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers: Headers, conn: DbConn) -> JsonResult { let cipher = match Cipher::find_by_uuid(&uuid, &conn) { Some(cipher) => cipher, - None => err!("Cipher doesn't exist") + None => err!("Cipher doesn't exist"), }; if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { @@ -551,117 +639,152 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers let mut attachment_key = None; - Multipart::with_body(data.open(), boundary).foreach_entry(|mut field| { - match field.headers.name.as_str() { - "key" => { - use std::io::Read; - let mut key_buffer = String::new(); - if field.data.read_to_string(&mut key_buffer).is_ok() { - attachment_key = Some(key_buffer); - } - }, - "data" => { - // This is provided by the client, don't trust it - let name = field.headers.filename.expect("No filename provided"); - - let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); - let path = base_path.join(&file_name); - - let size = match field.data.save() - .memory_threshold(0) - .size_limit(None) - .with_path(path) { - SaveResult::Full(SavedData::File(_, size)) => size as i32, - SaveResult::Full(other) => { - error!("Attachment is not a file: {:?}", other); - return; - }, - SaveResult::Partial(_, reason) => { - error!("Partial result: {:?}", reason); - return; - }, - SaveResult::Error(e) => { - error!("Error: {:?}", e); - return; + Multipart::with_body(data.open(), boundary) + .foreach_entry(|mut field| { + match field.headers.name.as_str() { + "key" => { + use std::io::Read; + let mut key_buffer = String::new(); + if field.data.read_to_string(&mut key_buffer).is_ok() { + attachment_key = Some(key_buffer); } - }; + } + "data" => { + // This is provided by the client, don't trust it + let name = field.headers.filename.expect("No filename provided"); + + let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); + let path = base_path.join(&file_name); + + let size = match field.data.save().memory_threshold(0).size_limit(None).with_path(path) { + SaveResult::Full(SavedData::File(_, size)) => size as i32, + SaveResult::Full(other) => { + error!("Attachment is not a file: {:?}", other); + return; + } + SaveResult::Partial(_, reason) => { + error!("Partial result: {:?}", reason); + return; + } + SaveResult::Error(e) => { + error!("Error: {:?}", e); + return; + } + }; - let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size); - attachment.key = attachment_key.clone(); - attachment.save(&conn).expect("Error saving attachment"); - }, - _ => error!("Invalid multipart name") - } - }).expect("Error processing multipart data"); + let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size); + attachment.key = attachment_key.clone(); + attachment.save(&conn).expect("Error saving attachment"); + } + _ => error!("Invalid multipart name"), + } + }) + .expect("Error processing multipart data"); Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) } #[post("/ciphers//attachment-admin", format = "multipart/form-data", data = "")] -fn post_attachment_admin(uuid: String, data: Data, content_type: &ContentType, headers: Headers, conn: DbConn) -> JsonResult { +fn post_attachment_admin( + uuid: String, + data: Data, + content_type: &ContentType, + headers: Headers, + conn: DbConn, +) -> JsonResult { post_attachment(uuid, data, content_type, headers, conn) } -#[post("/ciphers//attachment//share", format = "multipart/form-data", data = "")] -fn post_attachment_share(uuid: String, attachment_id: String, data: Data, content_type: &ContentType, headers: Headers, conn: DbConn, ws: State) -> JsonResult { - _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &ws)?; +#[post( + "/ciphers//attachment//share", + format = "multipart/form-data", + data = "" +)] +fn post_attachment_share( + uuid: String, + attachment_id: String, + data: Data, + content_type: &ContentType, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> JsonResult { + _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt)?; post_attachment(uuid, data, content_type, headers, conn) } #[post("/ciphers//attachment//delete-admin")] -fn delete_attachment_post_admin(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - delete_attachment(uuid, attachment_id, headers, conn, ws) +fn delete_attachment_post_admin( + uuid: String, + attachment_id: String, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { + delete_attachment(uuid, attachment_id, headers, conn, nt) } #[post("/ciphers//attachment//delete")] -fn delete_attachment_post(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - delete_attachment(uuid, attachment_id, headers, conn, ws) +fn delete_attachment_post( + uuid: String, + attachment_id: String, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { + delete_attachment(uuid, attachment_id, headers, conn, nt) } #[delete("/ciphers//attachment/")] -fn delete_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &ws) +fn delete_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { + _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt) } #[delete("/ciphers//attachment//admin")] -fn delete_attachment_admin(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &ws) +fn delete_attachment_admin( + uuid: String, + attachment_id: String, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { + _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt) } #[post("/ciphers//delete")] -fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, &ws) +fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, &nt) } #[post("/ciphers//delete-admin")] -fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, &ws) +fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, &nt) } #[delete("/ciphers/")] -fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, &ws) +fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, &nt) } #[delete("/ciphers//admin")] -fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, &ws) +fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, &nt) } #[delete("/ciphers", data = "")] -fn delete_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { +fn delete_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { let data: Value = data.into_inner().data; let uuids = match data.get("Ids") { Some(ids) => match ids.as_array() { Some(ids) => ids.iter().filter_map(Value::as_str), - None => err!("Posted ids field is not an array") + None => err!("Posted ids field is not an array"), }, - None => err!("Request missing ids field") + None => err!("Request missing ids field"), }; for uuid in uuids { - if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, &ws) { + if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, &nt) { return error; }; } @@ -670,46 +793,42 @@ fn delete_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbCon } #[post("/ciphers/delete", data = "")] -fn delete_cipher_selected_post(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - delete_cipher_selected(data, headers, conn, ws) +fn delete_cipher_selected_post(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { + delete_cipher_selected(data, headers, conn, nt) } #[post("/ciphers/move", data = "")] -fn move_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { +fn move_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { let data = data.into_inner().data; let folder_id = match data.get("FolderId") { - Some(folder_id) => { - match folder_id.as_str() { - Some(folder_id) => { - match Folder::find_by_uuid(folder_id, &conn) { - Some(folder) => { - if folder.user_uuid != headers.user.uuid { - err!("Folder is not owned by user") - } - Some(folder.uuid) - } - None => err!("Folder doesn't exist") + Some(folder_id) => match folder_id.as_str() { + Some(folder_id) => match Folder::find_by_uuid(folder_id, &conn) { + Some(folder) => { + if folder.user_uuid != headers.user.uuid { + err!("Folder is not owned by user") } + Some(folder.uuid) } - None => err!("Folder id provided in wrong format") - } - } - None => None + None => err!("Folder doesn't exist"), + }, + None => err!("Folder id provided in wrong format"), + }, + None => None, }; let uuids = match data.get("Ids") { Some(ids) => match ids.as_array() { Some(ids) => ids.iter().filter_map(Value::as_str), - None => err!("Posted ids field is not an array") + None => err!("Posted ids field is not an array"), }, - None => err!("Request missing ids field") + None => err!("Request missing ids field"), }; for uuid in uuids { let mut cipher = match Cipher::find_by_uuid(uuid, &conn) { Some(cipher) => cipher, - None => err!("Cipher doesn't exist") + None => err!("Cipher doesn't exist"), }; if !cipher.is_accessible_to_user(&headers.user.uuid, &conn) { @@ -720,19 +839,19 @@ fn move_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbConn, cipher.move_to_folder(folder_id.clone(), &headers.user.uuid, &conn)?; cipher.save(&conn)?; - ws.send_cipher_update(UpdateType::SyncCipherUpdate, &cipher, &cipher.update_users_revision(&conn)); + nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn)); } Ok(()) } #[put("/ciphers/move", data = "")] -fn move_cipher_selected_put(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - move_cipher_selected(data, headers, conn, ws) +fn move_cipher_selected_put(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { + move_cipher_selected(data, headers, conn, nt) } #[post("/ciphers/purge", data = "")] -fn delete_all(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { +fn delete_all(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { let data: PasswordData = data.into_inner().data; let password_hash = data.MasterPasswordHash; @@ -745,19 +864,19 @@ fn delete_all(data: JsonUpcase, headers: Headers, conn: DbConn, ws // Delete ciphers and their attachments for cipher in Cipher::find_owned_by_user(&user.uuid, &conn) { cipher.delete(&conn)?; - ws.send_cipher_update(UpdateType::SyncCipherDelete, &cipher, &cipher.update_users_revision(&conn)); + nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn)); } // Delete folders for f in Folder::find_by_user(&user.uuid, &conn) { f.delete(&conn)?; - ws.send_folder_update(UpdateType::SyncFolderCreate, &f); + nt.send_folder_update(UpdateType::FolderCreate, &f); } Ok(()) } -fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, ws: &State) -> EmptyResult { +fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> EmptyResult { let cipher = match Cipher::find_by_uuid(&uuid, &conn) { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), @@ -768,14 +887,20 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, ws: &Sta } cipher.delete(&conn)?; - ws.send_cipher_update(UpdateType::SyncCipherDelete, &cipher, &cipher.update_users_revision(&conn)); + nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn)); Ok(()) } -fn _delete_cipher_attachment_by_id(uuid: &str, attachment_id: &str, headers: &Headers, conn: &DbConn, ws: &State) -> EmptyResult { +fn _delete_cipher_attachment_by_id( + uuid: &str, + attachment_id: &str, + headers: &Headers, + conn: &DbConn, + nt: &Notify, +) -> EmptyResult { let attachment = match Attachment::find_by_id(&attachment_id, &conn) { Some(attachment) => attachment, - None => err!("Attachment doesn't exist") + None => err!("Attachment doesn't exist"), }; if attachment.cipher_uuid != uuid { @@ -784,7 +909,7 @@ fn _delete_cipher_attachment_by_id(uuid: &str, attachment_id: &str, headers: &He let cipher = match Cipher::find_by_uuid(&uuid, &conn) { Some(cipher) => cipher, - None => err!("Cipher doesn't exist") + None => err!("Cipher doesn't exist"), }; if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { @@ -793,6 +918,6 @@ fn _delete_cipher_attachment_by_id(uuid: &str, attachment_id: &str, headers: &He // Delete attachment attachment.delete(&conn)?; - ws.send_cipher_update(UpdateType::SyncCipherDelete, &cipher, &cipher.update_users_revision(&conn)); + nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn)); Ok(()) } diff --git a/src/api/core/folders.rs b/src/api/core/folders.rs index 4585d530..d7b42705 100644 --- a/src/api/core/folders.rs +++ b/src/api/core/folders.rs @@ -1,11 +1,10 @@ -use rocket::State; use rocket_contrib::json::Json; use serde_json::Value; -use crate::db::DbConn; use crate::db::models::*; +use crate::db::DbConn; -use crate::api::{JsonResult, EmptyResult, JsonUpcase, WebSocketUsers, UpdateType}; +use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType}; use crate::auth::Headers; use rocket::Route; @@ -39,7 +38,7 @@ fn get_folders(headers: Headers, conn: DbConn) -> JsonResult { fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { let folder = match Folder::find_by_uuid(&uuid, &conn) { Some(folder) => folder, - _ => err!("Invalid folder") + _ => err!("Invalid folder"), }; if folder.user_uuid != headers.user.uuid { @@ -53,33 +52,33 @@ fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { #[allow(non_snake_case)] pub struct FolderData { - pub Name: String + pub Name: String, } #[post("/folders", data = "")] -fn post_folders(data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { +fn post_folders(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { let data: FolderData = data.into_inner().data; let mut folder = Folder::new(headers.user.uuid.clone(), data.Name); folder.save(&conn)?; - ws.send_folder_update(UpdateType::SyncFolderCreate, &folder); + nt.send_folder_update(UpdateType::FolderCreate, &folder); Ok(Json(folder.to_json())) } #[post("/folders/", data = "")] -fn post_folder(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { - put_folder(uuid, data, headers, conn, ws) +fn post_folder(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { + put_folder(uuid, data, headers, conn, nt) } #[put("/folders/", data = "")] -fn put_folder(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> JsonResult { +fn put_folder(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { let data: FolderData = data.into_inner().data; let mut folder = match Folder::find_by_uuid(&uuid, &conn) { Some(folder) => folder, - _ => err!("Invalid folder") + _ => err!("Invalid folder"), }; if folder.user_uuid != headers.user.uuid { @@ -89,21 +88,21 @@ fn put_folder(uuid: String, data: JsonUpcase, headers: Headers, conn folder.name = data.Name; folder.save(&conn)?; - ws.send_folder_update(UpdateType::SyncFolderUpdate, &folder); + nt.send_folder_update(UpdateType::FolderUpdate, &folder); Ok(Json(folder.to_json())) } #[post("/folders//delete")] -fn delete_folder_post(uuid: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { - delete_folder(uuid, headers, conn, ws) +fn delete_folder_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { + delete_folder(uuid, headers, conn, nt) } #[delete("/folders/")] -fn delete_folder(uuid: String, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { +fn delete_folder(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { let folder = match Folder::find_by_uuid(&uuid, &conn) { Some(folder) => folder, - _ => err!("Invalid folder") + _ => err!("Invalid folder"), }; if folder.user_uuid != headers.user.uuid { @@ -113,6 +112,6 @@ fn delete_folder(uuid: String, headers: Headers, conn: DbConn, ws: State Vec { let mut mod_routes = routes![ clear_device_token, put_device_token, - get_eq_domains, post_eq_domains, put_eq_domains, @@ -25,9 +24,9 @@ pub fn routes() -> Vec { routes } -/// -/// Move this somewhere else -/// +// +// Move this somewhere else +// use rocket::Route; use rocket_contrib::json::Json; diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 9794cc91..b1148431 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -1,14 +1,13 @@ -use rocket::State; use rocket::request::Form; use rocket_contrib::json::Json; use serde_json::Value; -use crate::CONFIG; -use crate::db::DbConn; use crate::db::models::*; +use crate::db::DbConn; +use crate::CONFIG; -use crate::api::{PasswordData, JsonResult, EmptyResult, NumberOrString, JsonUpcase, WebSocketUsers, UpdateType}; -use crate::auth::{Headers, AdminHeaders, OwnerHeaders, encode_jwt, decode_invite_jwt, InviteJWTClaims, JWT_ISSUER}; +use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType}; +use crate::auth::{decode_invite_jwt, encode_jwt, AdminHeaders, Headers, InviteJWTClaims, OwnerHeaders, JWT_ISSUER}; use crate::mail; @@ -53,7 +52,6 @@ pub fn routes() -> Vec { ] } - #[derive(Deserialize)] #[allow(non_snake_case)] struct OrgData { @@ -83,10 +81,8 @@ fn create_organization(headers: Headers, data: JsonUpcase, conn: DbConn let data: OrgData = data.into_inner().data; let mut org = Organization::new(data.Name, data.BillingEmail); - let mut user_org = UserOrganization::new( - headers.user.uuid.clone(), org.uuid.clone()); - let mut collection = Collection::new( - org.uuid.clone(), data.CollectionName); + let mut user_org = UserOrganization::new(headers.user.uuid.clone(), org.uuid.clone()); + let mut collection = Collection::new(org.uuid.clone(), data.CollectionName); user_org.key = data.Key; user_org.access_all = true; @@ -101,7 +97,12 @@ fn create_organization(headers: Headers, data: JsonUpcase, conn: DbConn } #[delete("/organizations/", data = "")] -fn delete_organization(org_id: String, data: JsonUpcase, headers: OwnerHeaders, conn: DbConn) -> EmptyResult { +fn delete_organization( + org_id: String, + data: JsonUpcase, + headers: OwnerHeaders, + conn: DbConn, +) -> EmptyResult { let data: PasswordData = data.into_inner().data; let password_hash = data.MasterPasswordHash; @@ -111,12 +112,17 @@ fn delete_organization(org_id: String, data: JsonUpcase, headers: match Organization::find_by_uuid(&org_id, &conn) { None => err!("Organization not found"), - Some(org) => org.delete(&conn) + Some(org) => org.delete(&conn), } } #[post("/organizations//delete", data = "")] -fn post_delete_organization(org_id: String, data: JsonUpcase, headers: OwnerHeaders, conn: DbConn) -> EmptyResult { +fn post_delete_organization( + org_id: String, + data: JsonUpcase, + headers: OwnerHeaders, + conn: DbConn, +) -> EmptyResult { delete_organization(org_id, data, headers, conn) } @@ -126,15 +132,14 @@ fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyRe None => err!("User not part of organization"), Some(user_org) => { if user_org.type_ == UserOrgType::Owner { - let num_owners = UserOrganization::find_by_org_and_type( - &org_id, UserOrgType::Owner as i32, &conn) - .len(); + let num_owners = + UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len(); if num_owners <= 1 { err!("The last owner can't leave") } } - + user_org.delete(&conn) } } @@ -144,22 +149,32 @@ fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyRe fn get_organization(org_id: String, _headers: OwnerHeaders, conn: DbConn) -> JsonResult { match Organization::find_by_uuid(&org_id, &conn) { Some(organization) => Ok(Json(organization.to_json())), - None => err!("Can't find organization details") + None => err!("Can't find organization details"), } } #[put("/organizations/", data = "")] -fn put_organization(org_id: String, headers: OwnerHeaders, data: JsonUpcase, conn: DbConn) -> JsonResult { +fn put_organization( + org_id: String, + headers: OwnerHeaders, + data: JsonUpcase, + conn: DbConn, +) -> JsonResult { post_organization(org_id, headers, data, conn) } #[post("/organizations/", data = "")] -fn post_organization(org_id: String, _headers: OwnerHeaders, data: JsonUpcase, conn: DbConn) -> JsonResult { +fn post_organization( + org_id: String, + _headers: OwnerHeaders, + data: JsonUpcase, + conn: DbConn, +) -> JsonResult { let data: OrganizationUpdateData = data.into_inner().data; let mut org = match Organization::find_by_uuid(&org_id, &conn) { Some(organization) => organization, - None => err!("Can't find organization details") + None => err!("Can't find organization details"), }; org.name = data.Name; @@ -172,7 +187,6 @@ fn post_organization(org_id: String, _headers: OwnerHeaders, data: JsonUpcase JsonResult { - Ok(Json(json!({ "Data": Collection::find_by_user_uuid(&headers.user.uuid, &conn) @@ -198,12 +212,17 @@ fn get_org_collections(org_id: String, _headers: AdminHeaders, conn: DbConn) -> } #[post("/organizations//collections", data = "")] -fn post_organization_collections(org_id: String, _headers: AdminHeaders, data: JsonUpcase, conn: DbConn) -> JsonResult { +fn post_organization_collections( + org_id: String, + _headers: AdminHeaders, + data: JsonUpcase, + conn: DbConn, +) -> JsonResult { let data: NewCollectionData = data.into_inner().data; let org = match Organization::find_by_uuid(&org_id, &conn) { Some(organization) => organization, - None => err!("Can't find organization details") + None => err!("Can't find organization details"), }; let mut collection = Collection::new(org.uuid.clone(), data.Name); @@ -213,22 +232,34 @@ fn post_organization_collections(org_id: String, _headers: AdminHeaders, data: J } #[put("/organizations//collections/", data = "")] -fn put_organization_collection_update(org_id: String, col_id: String, headers: AdminHeaders, data: JsonUpcase, conn: DbConn) -> JsonResult { +fn put_organization_collection_update( + org_id: String, + col_id: String, + headers: AdminHeaders, + data: JsonUpcase, + conn: DbConn, +) -> JsonResult { post_organization_collection_update(org_id, col_id, headers, data, conn) } #[post("/organizations//collections/", data = "")] -fn post_organization_collection_update(org_id: String, col_id: String, _headers: AdminHeaders, data: JsonUpcase, conn: DbConn) -> JsonResult { +fn post_organization_collection_update( + org_id: String, + col_id: String, + _headers: AdminHeaders, + data: JsonUpcase, + conn: DbConn, +) -> JsonResult { let data: NewCollectionData = data.into_inner().data; let org = match Organization::find_by_uuid(&org_id, &conn) { Some(organization) => organization, - None => err!("Can't find organization details") + None => err!("Can't find organization details"), }; let mut collection = match Collection::find_by_uuid(&col_id, &conn) { Some(collection) => collection, - None => err!("Collection not found") + None => err!("Collection not found"), }; if collection.org_uuid != org.uuid { @@ -241,15 +272,22 @@ fn post_organization_collection_update(org_id: String, col_id: String, _headers: Ok(Json(collection.to_json())) } - #[delete("/organizations//collections//user/")] -fn delete_organization_collection_user(org_id: String, col_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> EmptyResult { +fn delete_organization_collection_user( + org_id: String, + col_id: String, + org_user_id: String, + _headers: AdminHeaders, + conn: DbConn, +) -> EmptyResult { let collection = match Collection::find_by_uuid(&col_id, &conn) { None => err!("Collection not found"), - Some(collection) => if collection.org_uuid == org_id { - collection - } else { - err!("Collection and Organization id do not match") + Some(collection) => { + if collection.org_uuid == org_id { + collection + } else { + err!("Collection and Organization id do not match") + } } }; @@ -258,16 +296,20 @@ fn delete_organization_collection_user(org_id: String, col_id: String, org_user_ Some(user_org) => { match CollectionUser::find_by_collection_and_user(&collection.uuid, &user_org.user_uuid, &conn) { None => err!("User not assigned to collection"), - Some(col_user) => { - col_user.delete(&conn) - } + Some(col_user) => col_user.delete(&conn), } } } } #[post("/organizations//collections//delete-user/")] -fn post_organization_collection_delete_user(org_id: String, col_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { +fn post_organization_collection_delete_user( + org_id: String, + col_id: String, + org_user_id: String, + headers: AdminHeaders, + conn: DbConn, +) -> EmptyResult { delete_organization_collection_user(org_id, col_id, org_user_id, headers, conn) } @@ -275,10 +317,12 @@ fn post_organization_collection_delete_user(org_id: String, col_id: String, org_ fn delete_organization_collection(org_id: String, col_id: String, _headers: AdminHeaders, conn: DbConn) -> EmptyResult { match Collection::find_by_uuid(&col_id, &conn) { None => err!("Collection not found"), - Some(collection) => if collection.org_uuid == org_id { - collection.delete(&conn) - } else { - err!("Collection and Organization id do not match") + Some(collection) => { + if collection.org_uuid == org_id { + collection.delete(&conn) + } else { + err!("Collection and Organization id do not match") + } } } } @@ -291,7 +335,13 @@ struct DeleteCollectionData { } #[post("/organizations//collections//delete", data = "<_data>")] -fn post_organization_collection_delete(org_id: String, col_id: String, headers: AdminHeaders, _data: JsonUpcase, conn: DbConn) -> EmptyResult { +fn post_organization_collection_delete( + org_id: String, + col_id: String, + headers: AdminHeaders, + _data: JsonUpcase, + conn: DbConn, +) -> EmptyResult { delete_organization_collection(org_id, col_id, headers, conn) } @@ -314,16 +364,18 @@ fn get_collection_users(org_id: String, coll_id: String, _headers: AdminHeaders, // Get org and collection, check that collection is from org let collection = match Collection::find_by_uuid_and_org(&coll_id, &org_id, &conn) { None => err!("Collection not found in Organization"), - Some(collection) => collection + Some(collection) => collection, }; // Get the users from collection let user_list: Vec = CollectionUser::find_by_collection(&collection.uuid, &conn) - .iter().map(|col_user| { - UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn) - .unwrap() - .to_json_collection_user_details(col_user.read_only, &conn) - }).collect(); + .iter() + .map(|col_user| { + UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn) + .unwrap() + .to_json_collection_user_details(col_user.read_only, &conn) + }) + .collect(); Ok(Json(json!({ "Data": user_list, @@ -335,13 +387,16 @@ fn get_collection_users(org_id: String, coll_id: String, _headers: AdminHeaders, #[derive(FromForm)] struct OrgIdData { #[form(field = "organizationId")] - organization_id: String + organization_id: String, } #[get("/ciphers/organization-details?")] fn get_org_details(data: Form, headers: Headers, conn: DbConn) -> JsonResult { let ciphers = Cipher::find_by_org(&data.organization_id, &conn); - let ciphers_json: Vec = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); + let ciphers_json: Vec = ciphers + .iter() + .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) + .collect(); Ok(Json(json!({ "Data": ciphers_json, @@ -367,7 +422,7 @@ where D: Deserializer<'de>, { // Deserialize null to empty Vec - Deserialize::deserialize(deserializer).or(Ok(vec![])) + Deserialize::deserialize(deserializer).or(Ok(vec![])) } #[derive(Deserialize)] @@ -393,11 +448,10 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade let new_type = match UserOrgType::from_str(&data.Type.into_string()) { Some(new_type) => new_type as i32, - None => err!("Invalid type") + None => err!("Invalid type"), }; - if new_type != UserOrgType::User && - headers.org_user_type != UserOrgType::Owner { + if new_type != UserOrgType::User && headers.org_user_type != UserOrgType::Owner { err!("Only Owners can invite Managers, Admins or Owners") } @@ -407,23 +461,26 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade None => UserOrgStatus::Accepted as i32, // Automatically mark user as accepted if no email invites }; let user = match User::find_by_mail(&email, &conn) { - None => if CONFIG.invitations_allowed { // Invite user if that's enabled - let mut invitation = Invitation::new(email.clone()); - invitation.save(&conn)?; - let mut user = User::new(email.clone()); - user.save(&conn)?; - user_org_status = UserOrgStatus::Invited as i32; - user - - } else { - err!(format!("User email does not exist: {}", email)) - }, - Some(user) => if UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn).is_some() { - err!(format!("User already in organization: {}", email)) - } else { - user + None => { + if CONFIG.invitations_allowed { + // Invite user if that's enabled + let mut invitation = Invitation::new(email.clone()); + invitation.save(&conn)?; + let mut user = User::new(email.clone()); + user.save(&conn)?; + user_org_status = UserOrgStatus::Invited as i32; + user + } else { + err!(format!("User email does not exist: {}", email)) + } + } + Some(user) => { + if UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn).is_some() { + err!(format!("User already in organization: {}", email)) + } else { + user + } } - }; let mut new_user = UserOrganization::new(user.uuid.clone(), org_id.clone()); @@ -449,9 +506,14 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade if let Some(ref mail_config) = CONFIG.mail { let org_name = match Organization::find_by_uuid(&org_id, &conn) { Some(org) => org.name, - None => err!("Error looking up organization") + None => err!("Error looking up organization"), }; - let claims = generate_invite_claims(user.uuid.to_string(), user.email.clone(), org_id.clone(), Some(new_user.uuid.clone())); + let claims = generate_invite_claims( + user.uuid.to_string(), + user.email.clone(), + org_id.clone(), + Some(new_user.uuid.clone()), + ); let invite_token = encode_jwt(&claims); mail::send_invite(&email, &org_id, &new_user.uuid, &invite_token, &org_name, mail_config)?; } @@ -481,20 +543,32 @@ fn reinvite_user(org_id: String, user_org: String, _headers: AdminHeaders, conn: }; if Invitation::find_by_mail(&user.email, &conn).is_none() { - err!("No invitation found for user to resend. Try inviting them first.") + err!("No invitation found for user to resend. Try inviting them first.") } let org_name = match Organization::find_by_uuid(&org_id, &conn) { Some(org) => org.name, - None => err!("Error looking up organization.") + None => err!("Error looking up organization."), }; - let claims = generate_invite_claims(user.uuid.to_string(), user.email.clone(), org_id.clone(), Some(user_org.uuid.clone())); + let claims = generate_invite_claims( + user.uuid.to_string(), + user.email.clone(), + org_id.clone(), + Some(user_org.uuid.clone()), + ); let invite_token = encode_jwt(&claims); if let Some(ref mail_config) = CONFIG.mail { - mail::send_invite(&user.email, &org_id, &user_org.uuid, &invite_token, &org_name, mail_config)?; + mail::send_invite( + &user.email, + &org_id, + &user_org.uuid, + &invite_token, + &org_name, + mail_config, + )?; } - + Ok(()) } @@ -519,7 +593,7 @@ fn generate_invite_claims(uuid: String, email: String, org_id: String, org_user_ #[post("/organizations/<_org_id>/users/<_org_user_id>/accept", data = "")] fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase, conn: DbConn) -> EmptyResult { -// The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead + // The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead let data: AcceptData = data.into_inner().data; let token = &data.Token; let claims: InviteJWTClaims = decode_invite_jwt(&token)?; @@ -529,35 +603,39 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase user_org, - None => err!("Error accepting the invitation") - }; + let mut user_org = + match UserOrganization::find_by_uuid_and_org(&claims.user_org_id.unwrap(), &claims.org_id, &conn) { + Some(user_org) => user_org, + None => err!("Error accepting the invitation"), + }; user_org.status = UserOrgStatus::Accepted as i32; if user_org.save(&conn).is_err() { err!("Failed to accept user to organization") } } - }, - None => { - err!("Invited user not found") - }, + } + None => err!("Invited user not found"), } Ok(()) } #[post("/organizations//users//confirm", data = "")] -fn confirm_invite(org_id: String, org_user_id: String, data: JsonUpcase, headers: AdminHeaders, conn: DbConn) -> EmptyResult { +fn confirm_invite( + org_id: String, + org_user_id: String, + data: JsonUpcase, + headers: AdminHeaders, + conn: DbConn, +) -> EmptyResult { let data = data.into_inner().data; let mut user_to_confirm = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { Some(user) => user, - None => err!("The specified user isn't a member of the organization") + None => err!("The specified user isn't a member of the organization"), }; - if user_to_confirm.type_ != UserOrgType::User && - headers.org_user_type != UserOrgType::Owner { + if user_to_confirm.type_ != UserOrgType::User && headers.org_user_type != UserOrgType::Owner { err!("Only Owners can confirm Managers, Admins or Owners") } @@ -568,7 +646,7 @@ fn confirm_invite(org_id: String, org_user_id: String, data: JsonUpcase, user_to_confirm.status = UserOrgStatus::Confirmed as i32; user_to_confirm.key = match data["Key"].as_str() { Some(key) => key.to_string(), - None => err!("Invalid key provided") + None => err!("Invalid key provided"), }; user_to_confirm.save(&conn) @@ -578,7 +656,7 @@ fn confirm_invite(org_id: String, org_user_id: String, data: JsonUpcase, fn get_user(org_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonResult { let user = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { Some(user) => user, - None => err!("The specified user isn't a member of the organization") + None => err!("The specified user isn't a member of the organization"), }; Ok(Json(user.to_json_details(&conn))) @@ -594,44 +672,50 @@ struct EditUserData { } #[put("/organizations//users/", data = "", rank = 1)] -fn put_organization_user(org_id: String, org_user_id: String, data: JsonUpcase, headers: AdminHeaders, conn: DbConn) -> EmptyResult { +fn put_organization_user( + org_id: String, + org_user_id: String, + data: JsonUpcase, + headers: AdminHeaders, + conn: DbConn, +) -> EmptyResult { edit_user(org_id, org_user_id, data, headers, conn) } #[post("/organizations//users/", data = "", rank = 1)] -fn edit_user(org_id: String, org_user_id: String, data: JsonUpcase, headers: AdminHeaders, conn: DbConn) -> EmptyResult { +fn edit_user( + org_id: String, + org_user_id: String, + data: JsonUpcase, + headers: AdminHeaders, + conn: DbConn, +) -> EmptyResult { let data: EditUserData = data.into_inner().data; let new_type = match UserOrgType::from_str(&data.Type.into_string()) { Some(new_type) => new_type, - None => err!("Invalid type") + None => err!("Invalid type"), }; let mut user_to_edit = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { Some(user) => user, - None => err!("The specified user isn't member of the organization") + None => err!("The specified user isn't member of the organization"), }; - if new_type != user_to_edit.type_ && ( - user_to_edit.type_ >= UserOrgType::Admin || - new_type >= UserOrgType::Admin - ) && - headers.org_user_type != UserOrgType::Owner { + if new_type != user_to_edit.type_ + && (user_to_edit.type_ >= UserOrgType::Admin || new_type >= UserOrgType::Admin) + && headers.org_user_type != UserOrgType::Owner + { err!("Only Owners can grant and remove Admin or Owner privileges") } - if user_to_edit.type_ == UserOrgType::Owner && - headers.org_user_type != UserOrgType::Owner { + if user_to_edit.type_ == UserOrgType::Owner && headers.org_user_type != UserOrgType::Owner { err!("Only Owners can edit Owner users") } - if user_to_edit.type_ == UserOrgType::Owner && - new_type != UserOrgType::Owner { - + if user_to_edit.type_ == UserOrgType::Owner && new_type != UserOrgType::Owner { // Removing owner permmission, check that there are at least another owner - let num_owners = UserOrganization::find_by_org_and_type( - &org_id, UserOrgType::Owner as i32, &conn) - .len(); + let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len(); if num_owners <= 1 { err!("Can't delete the last owner") @@ -665,19 +749,16 @@ fn edit_user(org_id: String, org_user_id: String, data: JsonUpcase fn delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { let user_to_delete = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { Some(user) => user, - None => err!("User to delete isn't member of the organization") + None => err!("User to delete isn't member of the organization"), }; - if user_to_delete.type_ != UserOrgType::User && - headers.org_user_type != UserOrgType::Owner { + if user_to_delete.type_ != UserOrgType::User && headers.org_user_type != UserOrgType::Owner { err!("Only Owners can delete Admins or Owners") } if user_to_delete.type_ == UserOrgType::Owner { // Removing owner, check that there are at least another owner - let num_owners = UserOrganization::find_by_org_and_type( - &org_id, UserOrgType::Owner as i32, &conn) - .len(); + let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len(); if num_owners <= 1 { err!("Can't delete the last owner") @@ -692,8 +773,8 @@ fn post_delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, delete_user(org_id, org_user_id, headers, conn) } -use super::ciphers::CipherData; use super::ciphers::update_cipher_from_data; +use super::ciphers::CipherData; #[derive(Deserialize)] #[allow(non_snake_case)] @@ -713,13 +794,19 @@ struct RelationsData { } #[post("/ciphers/import-organization?", data = "")] -fn post_org_import(query: Form, data: JsonUpcase, headers: Headers, conn: DbConn, ws: State) -> EmptyResult { +fn post_org_import( + query: Form, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { let data: ImportData = data.into_inner().data; let org_id = query.into_inner().organization_id; let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { Some(user) => user, - None => err!("User is not part of the organization") + None => err!("User is not part of the organization"), }; if org_user.type_ < UserOrgType::Admin { @@ -727,14 +814,18 @@ fn post_org_import(query: Form, data: JsonUpcase, headers } // Read and create the collections - let collections: Vec<_> = data.Collections.into_iter().map(|coll| { - let mut collection = Collection::new(org_id.clone(), coll.Name); - if collection.save(&conn).is_err() { - err!("Failed to create Collection"); - } - - Ok(collection) - }).collect(); + let collections: Vec<_> = data + .Collections + .into_iter() + .map(|coll| { + let mut collection = Collection::new(org_id.clone(), coll.Name); + if collection.save(&conn).is_err() { + err!("Failed to create Collection"); + } + + Ok(collection) + }) + .collect(); // Read the relations between collections and ciphers let mut relations = Vec::new(); @@ -743,11 +834,24 @@ fn post_org_import(query: Form, data: JsonUpcase, headers } // Read and create the ciphers - let ciphers: Vec<_> = data.Ciphers.into_iter().map(|cipher_data| { - let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); - update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &ws, UpdateType::SyncCipherCreate).ok(); - cipher - }).collect(); + let ciphers: Vec<_> = data + .Ciphers + .into_iter() + .map(|cipher_data| { + let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); + update_cipher_from_data( + &mut cipher, + cipher_data, + &headers, + false, + &conn, + &nt, + UpdateType::CipherCreate, + ) + .ok(); + cipher + }) + .collect(); // Assign the collections for (cipher_index, coll_index) in relations { @@ -755,9 +859,9 @@ fn post_org_import(query: Form, data: JsonUpcase, headers let coll = &collections[coll_index]; let coll_id = match coll { Ok(coll) => coll.uuid.as_str(), - Err(_) => err!("Failed to assign to collection") + Err(_) => err!("Failed to assign to collection"), }; - + CollectionCipher::save(cipher_id, coll_id, &conn)?; } diff --git a/src/api/core/two_factor.rs b/src/api/core/two_factor.rs index 26451096..a2f2cd41 100644 --- a/src/api/core/two_factor.rs +++ b/src/api/core/two_factor.rs @@ -3,7 +3,6 @@ use rocket_contrib::json::Json; use serde_json; use serde_json::Value; - use crate::db::{ models::{TwoFactor, TwoFactorType, User}, DbConn, @@ -111,11 +110,7 @@ struct DisableTwoFactorData { } #[post("/two-factor/disable", data = "")] -fn disable_twofactor( - data: JsonUpcase, - headers: Headers, - conn: DbConn, -) -> JsonResult { +fn disable_twofactor(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: DisableTwoFactorData = data.into_inner().data; let password_hash = data.MasterPasswordHash; @@ -137,20 +132,12 @@ fn disable_twofactor( } #[put("/two-factor/disable", data = "")] -fn disable_twofactor_put( - data: JsonUpcase, - headers: Headers, - conn: DbConn, -) -> JsonResult { +fn disable_twofactor_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { disable_twofactor(data, headers, conn) } #[post("/two-factor/get-authenticator", data = "")] -fn generate_authenticator( - data: JsonUpcase, - headers: Headers, - conn: DbConn, -) -> JsonResult { +fn generate_authenticator(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: PasswordData = data.into_inner().data; if !headers.user.check_valid_password(&data.MasterPasswordHash) { @@ -181,11 +168,7 @@ struct EnableAuthenticatorData { } #[post("/two-factor/authenticator", data = "")] -fn activate_authenticator( - data: JsonUpcase, - headers: Headers, - conn: DbConn, -) -> JsonResult { +fn activate_authenticator(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: EnableAuthenticatorData = data.into_inner().data; let password_hash = data.MasterPasswordHash; let key = data.Key; @@ -228,11 +211,7 @@ fn activate_authenticator( } #[put("/two-factor/authenticator", data = "")] -fn activate_authenticator_put( - data: JsonUpcase, - headers: Headers, - conn: DbConn, -) -> JsonResult { +fn activate_authenticator_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { activate_authenticator(data, headers, conn) } @@ -338,11 +317,8 @@ fn activate_u2f(data: JsonUpcase, headers: Headers, conn: DbConn) err!("Invalid password"); } - let tf_challenge = TwoFactor::find_by_user_and_type( - &headers.user.uuid, - TwoFactorType::U2fRegisterChallenge as i32, - &conn, - ); + let tf_challenge = + TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::U2fRegisterChallenge as i32, &conn); if let Some(tf_challenge) = tf_challenge { let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?; @@ -394,17 +370,14 @@ fn activate_u2f(data: JsonUpcase, headers: Headers, conn: DbConn) #[put("/two-factor/u2f", data = "")] fn activate_u2f_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - activate_u2f(data,headers, conn) + activate_u2f(data, headers, conn) } fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge { let challenge = U2F.generate_challenge().unwrap(); - TwoFactor::new( - user_uuid.into(), - type_, - serde_json::to_string(&challenge).unwrap(), - ).save(conn) + TwoFactor::new(user_uuid.into(), type_, serde_json::to_string(&challenge).unwrap()) + .save(conn) .expect("Error saving challenge"); challenge @@ -478,8 +451,7 @@ pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> Emp let mut _counter: u32 = 0; for registration in registrations { - let response = - U2F.sign_response(challenge.clone(), registration, response.clone(), _counter); + let response = U2F.sign_response(challenge.clone(), registration, response.clone(), _counter); match response { Ok(new_counter) => { _counter = new_counter; @@ -495,7 +467,6 @@ pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> Emp err!("error verifying response") } - #[derive(Deserialize, Debug)] #[allow(non_snake_case)] struct EnableYubikeyData { @@ -515,8 +486,8 @@ pub struct YubikeyMetadata { pub Nfc: bool, } -use yubico::Yubico; use yubico::config::Config; +use yubico::Yubico; fn parse_yubikeys(data: &EnableYubikeyData) -> Vec { let mut yubikeys: Vec = Vec::new(); @@ -548,7 +519,7 @@ fn jsonify_yubikeys(yubikeys: Vec) -> serde_json::Value { let mut result = json!({}); for (i, key) in yubikeys.into_iter().enumerate() { - result[format!("Key{}", i+1)] = Value::String(key); + result[format!("Key{}", i + 1)] = Value::String(key); } result @@ -556,16 +527,17 @@ fn jsonify_yubikeys(yubikeys: Vec) -> serde_json::Value { fn verify_yubikey_otp(otp: String) -> JsonResult { if !CONFIG.yubico_cred_set { - err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. \ - Yubikey OTP Disabled") + err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. Yubikey OTP Disabled") } let yubico = Yubico::new(); - let config = Config::default().set_client_id(CONFIG.yubico_client_id.to_owned()).set_key(CONFIG.yubico_secret_key.to_owned()); + let config = Config::default() + .set_client_id(CONFIG.yubico_client_id.to_owned()) + .set_key(CONFIG.yubico_secret_key.to_owned()); let result = match CONFIG.yubico_server { Some(ref server) => yubico.verify(otp, config.set_api_hosts(vec![server.to_owned()])), - None => yubico.verify(otp, config) + None => yubico.verify(otp, config), }; match result { @@ -577,8 +549,7 @@ fn verify_yubikey_otp(otp: String) -> JsonResult { #[post("/two-factor/get-yubikey", data = "")] fn generate_yubikey(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { if !CONFIG.yubico_cred_set { - err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. \ - Yubikey OTP Disabled") + err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. Yubikey OTP Disabled") } let data: PasswordData = data.into_inner().data; @@ -619,11 +590,7 @@ fn activate_yubikey(data: JsonUpcase, headers: Headers, conn: } // Check if we already have some data - let yubikey_data = TwoFactor::find_by_user_and_type( - &headers.user.uuid, - TwoFactorType::YubiKey as i32, - &conn, - ); + let yubikey_data = TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::YubiKey as i32, &conn); if let Some(yubikey_data) = yubikey_data { yubikey_data.delete(&conn)?; @@ -642,7 +609,7 @@ fn activate_yubikey(data: JsonUpcase, headers: Headers, conn: for yubikey in &yubikeys { if yubikey.len() == 12 { // YubiKey ID - continue + continue; } let result = verify_yubikey_otp(yubikey.to_owned()); @@ -692,7 +659,8 @@ pub fn validate_yubikey_login(user_uuid: &str, response: &str, conn: &DbConn) -> None => err!("No YubiKey devices registered"), }; - let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&twofactor.data).expect("Can't parse Yubikey Metadata"); + let yubikey_metadata: YubikeyMetadata = + serde_json::from_str(&twofactor.data).expect("Can't parse Yubikey Metadata"); let response_id = &response[..12]; if !yubikey_metadata.Keys.contains(&response_id.to_owned()) { diff --git a/src/api/identity.rs b/src/api/identity.rs index e0efc029..fb52d304 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -145,7 +145,12 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult Ok(Json(result)) } -fn twofactor_auth(user_uuid: &str, data: &ConnectData, device: &mut Device, conn: &DbConn) -> ApiResult> { +fn twofactor_auth( + user_uuid: &str, + data: &ConnectData, + device: &mut Device, + conn: &DbConn, +) -> ApiResult> { let twofactors_raw = TwoFactor::find_by_user(user_uuid, conn); // Remove u2f challenge twofactors (impl detail) let twofactors: Vec<_> = twofactors_raw.iter().filter(|tf| tf.type_ < 1000).collect(); @@ -252,13 +257,14 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api result["TwoFactorProviders2"][provider.to_string()] = Value::Object(map); } - Some(TwoFactorType::YubiKey) => { - let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::YubiKey as i32, &conn) { + Some(tf_type @ TwoFactorType::YubiKey) => { + let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, &conn) { Some(tf) => tf, None => err!("No YubiKey devices registered"), }; - let yubikey_metadata: two_factor::YubikeyMetadata = serde_json::from_str(&twofactor.data).expect("Can't parse Yubikey Metadata"); + let yubikey_metadata: two_factor::YubikeyMetadata = + serde_json::from_str(&twofactor.data).expect("Can't parse Yubikey Metadata"); let mut map = JsonMap::new(); map.insert("Nfc".into(), Value::Bool(yubikey_metadata.Nfc)); diff --git a/src/api/mod.rs b/src/api/mod.rs index c719d15e..e5b2f1d4 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -1,17 +1,17 @@ -pub(crate) mod core; mod admin; +pub(crate) mod core; mod icons; mod identity; -mod web; mod notifications; +mod web; -pub use self::core::routes as core_routes; pub use self::admin::routes as admin_routes; +pub use self::core::routes as core_routes; pub use self::icons::routes as icons_routes; pub use self::identity::routes as identity_routes; -pub use self::web::routes as web_routes; pub use self::notifications::routes as notifications_routes; -pub use self::notifications::{start_notification_server, WebSocketUsers, UpdateType}; +pub use self::notifications::{start_notification_server, Notify, UpdateType}; +pub use self::web::routes as web_routes; use rocket_contrib::json::Json; use serde_json::Value; @@ -28,7 +28,7 @@ type JsonUpcase = Json>; #[derive(Deserialize)] #[allow(non_snake_case)] struct PasswordData { - MasterPasswordHash: String + MasterPasswordHash: String, } #[derive(Deserialize, Debug, Clone)] @@ -42,14 +42,14 @@ impl NumberOrString { fn into_string(self) -> String { match self { NumberOrString::Number(n) => n.to_string(), - NumberOrString::String(s) => s + NumberOrString::String(s) => s, } } fn into_i32(self) -> Option { match self { NumberOrString::Number(n) => Some(n), - NumberOrString::String(s) => s.parse().ok() - } + NumberOrString::String(s) => s.parse().ok(), + } } } diff --git a/src/api/notifications.rs b/src/api/notifications.rs index d126b2ad..9f4cc77e 100644 --- a/src/api/notifications.rs +++ b/src/api/notifications.rs @@ -14,7 +14,7 @@ pub fn routes() -> Vec { #[get("/hub")] fn websockets_err() -> JsonResult { - err!("'/notifications/hub' should be proxied towards the websocket server, otherwise notifications will not work. Go to the README for more info.") + err!("'/notifications/hub' should be proxied to the websocket server or notifications won't work. Go to the README for more info.") } #[post("/hub/negotiate")] @@ -40,9 +40,9 @@ fn negotiate(_headers: Headers, _conn: DbConn) -> JsonResult { }))) } -/// -/// Websockets server -/// +// +// Websockets server +// use std::sync::Arc; use std::thread; @@ -94,9 +94,7 @@ fn serialize_date(date: NaiveDateTime) -> Value { use byteorder::{BigEndian, WriteBytesExt}; let mut bs = [0u8; 8]; - bs.as_mut() - .write_i64::(timestamp) - .expect("Unable to write"); + bs.as_mut().write_i64::(timestamp).expect("Unable to write"); // -1 is Timestamp // https://github.com/msgpack/msgpack/blob/master/spec.md#timestamp-extension-type @@ -142,12 +140,7 @@ impl Handler for WSHandler { use crate::auth; let claims = match auth::decode_jwt(access_token) { Ok(claims) => claims, - Err(_) => { - return Err(ws::Error::new( - ws::ErrorKind::Internal, - "Invalid access token provided", - )) - } + Err(_) => return Err(ws::Error::new(ws::ErrorKind::Internal, "Invalid access token provided")), }; // Assign the user to the handler @@ -158,11 +151,9 @@ impl Handler for WSHandler { let handler_insert = self.out.clone(); let handler_update = self.out.clone(); - self.users.map.upsert( - user_uuid, - || vec![handler_insert], - |ref mut v| v.push(handler_update), - ); + self.users + .map + .upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update)); // Schedule a ping to keep the connection alive self.out.timeout(PING_MS, PING) @@ -238,7 +229,7 @@ impl Factory for WSFactory { #[derive(Clone)] pub struct WebSocketUsers { - pub map: Arc>>, + map: Arc>>, } impl WebSocketUsers { @@ -338,32 +329,32 @@ fn create_ping() -> Vec { #[allow(dead_code)] pub enum UpdateType { - SyncCipherUpdate = 0, - SyncCipherCreate = 1, - SyncLoginDelete = 2, - SyncFolderDelete = 3, - SyncCiphers = 4, - - SyncVault = 5, - SyncOrgKeys = 6, - SyncFolderCreate = 7, - SyncFolderUpdate = 8, - SyncCipherDelete = 9, + CipherUpdate = 0, + CipherCreate = 1, + LoginDelete = 2, + FolderDelete = 3, + Ciphers = 4, + + Vault = 5, + OrgKeys = 6, + FolderCreate = 7, + FolderUpdate = 8, + CipherDelete = 9, SyncSettings = 10, LogOut = 11, } +use rocket::State; +pub type Notify<'a> = State<'a, WebSocketUsers>; + pub fn start_notification_server() -> WebSocketUsers { let factory = WSFactory::init(); let users = factory.users.clone(); if CONFIG.websocket_enabled { thread::spawn(move || { - WebSocket::new(factory) - .unwrap() - .listen(&CONFIG.websocket_url) - .unwrap(); + WebSocket::new(factory).unwrap().listen(&CONFIG.websocket_url).unwrap(); }); } diff --git a/src/auth.rs b/src/auth.rs index df85697f..6eb029e6 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -1,6 +1,6 @@ -/// -/// JWT Handling -/// +// +// JWT Handling +// use crate::util::read_file; use chrono::Duration; @@ -15,17 +15,20 @@ const JWT_ALGORITHM: Algorithm = Algorithm::RS256; lazy_static! { pub static ref DEFAULT_VALIDITY: Duration = Duration::hours(2); pub static ref JWT_ISSUER: String = CONFIG.domain.clone(); - static ref JWT_HEADER: Header = Header::new(JWT_ALGORITHM); - static ref PRIVATE_RSA_KEY: Vec = match read_file(&CONFIG.private_rsa_key) { Ok(key) => key, - Err(e) => panic!("Error loading private RSA Key from {}\n Error: {}", CONFIG.private_rsa_key, e) + Err(e) => panic!( + "Error loading private RSA Key from {}\n Error: {}", + CONFIG.private_rsa_key, e + ), }; - static ref PUBLIC_RSA_KEY: Vec = match read_file(&CONFIG.public_rsa_key) { Ok(key) => key, - Err(e) => panic!("Error loading public RSA Key from {}\n Error: {}", CONFIG.public_rsa_key, e) + Err(e) => panic!( + "Error loading public RSA Key from {}\n Error: {}", + CONFIG.public_rsa_key, e + ), }; } @@ -65,7 +68,7 @@ pub fn decode_invite_jwt(token: &str) -> Result { algorithms: vec![JWT_ALGORITHM], }; - jsonwebtoken::decode(token, &PUBLIC_RSA_KEY, &validation) + jsonwebtoken::decode(token, &PUBLIC_RSA_KEY, &validation) .map(|d| d.claims) .map_res("Error decoding invite JWT") } @@ -117,14 +120,14 @@ pub struct InviteJWTClaims { pub user_org_id: Option, } -/// -/// Bearer token authentication -/// +// +// Bearer token authentication +// +use rocket::request::{self, FromRequest, Request}; use rocket::Outcome; -use rocket::request::{self, Request, FromRequest}; +use crate::db::models::{Device, User, UserOrgStatus, UserOrgType, UserOrganization}; use crate::db::DbConn; -use crate::db::models::{User, UserOrganization, UserOrgType, UserOrgStatus, Device}; pub struct Headers { pub host: String, @@ -227,10 +230,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders { Some(Ok(org_id)) => { let conn = match request.guard::() { Outcome::Success(conn) => conn, - _ => err_handler!("Error getting DB") + _ => err_handler!("Error getting DB"), }; - let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { + let user = headers.user; + let org_user = match UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn) { Some(user) => { if user.status == UserOrgStatus::Confirmed as i32 { user @@ -238,17 +242,18 @@ impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders { err_handler!("The current user isn't confirmed member of the organization") } } - None => err_handler!("The current user isn't member of the organization") + None => err_handler!("The current user isn't member of the organization"), }; Outcome::Success(Self { host: headers.host, device: headers.device, - user: headers.user, + user, org_user_type: { if let Some(org_usr_type) = UserOrgType::from_i32(org_user.type_) { org_usr_type - } else { // This should only happen if the DB is corrupted + } else { + // This should only happen if the DB is corrupted err_handler!("Unknown user type in the database") } }, @@ -319,9 +324,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for OwnerHeaders { } } -/// -/// Client IP address detection -/// +// +// Client IP address detection +// use std::net::IpAddr; pub struct ClientIp { diff --git a/src/crypto.rs b/src/crypto.rs index 2698c0e4..9321f25f 100644 --- a/src/crypto.rs +++ b/src/crypto.rs @@ -1,6 +1,6 @@ -/// -/// PBKDF2 derivation -/// +// +// PBKDF2 derivation +// use ring::{digest, pbkdf2}; @@ -19,9 +19,9 @@ pub fn verify_password_hash(secret: &[u8], salt: &[u8], previous: &[u8], iterati pbkdf2::verify(DIGEST_ALG, iterations, salt, secret, previous).is_ok() } -/// -/// Random values -/// +// +// Random values +// pub fn get_random_64() -> Vec { get_random(vec![0u8; 64]) @@ -30,7 +30,9 @@ pub fn get_random_64() -> Vec { pub fn get_random(mut array: Vec) -> Vec { use ring::rand::{SecureRandom, SystemRandom}; - SystemRandom::new().fill(&mut array).expect("Error generating random values"); + SystemRandom::new() + .fill(&mut array) + .expect("Error generating random values"); array } diff --git a/src/db/mod.rs b/src/db/mod.rs index e72dc771..cc011bac 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -1,9 +1,9 @@ use std::ops::Deref; -use diesel::{Connection as DieselConnection, ConnectionError}; -use diesel::sqlite::SqliteConnection; use diesel::r2d2; use diesel::r2d2::ConnectionManager; +use diesel::sqlite::SqliteConnection; +use diesel::{Connection as DieselConnection, ConnectionError}; use rocket::http::Status; use rocket::request::{self, FromRequest}; @@ -20,16 +20,14 @@ type Pool = r2d2::Pool>; /// Connection request guard type: a wrapper around an r2d2 pooled connection. pub struct DbConn(pub r2d2::PooledConnection>); -pub mod schema; pub mod models; +pub mod schema; /// Initializes a database pool. pub fn init_pool() -> Pool { let manager = ConnectionManager::new(&*CONFIG.database_url); - r2d2::Pool::builder() - .build(manager) - .expect("Failed to create pool") + r2d2::Pool::builder().build(manager).expect("Failed to create pool") } pub fn get_connection() -> Result { @@ -46,7 +44,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for DbConn { let pool = request.guard::>()?; match pool.get() { Ok(conn) => Outcome::Success(DbConn(conn)), - Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())) + Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())), } } } diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index a5974c36..82ebebd9 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -49,10 +49,10 @@ impl Attachment { } } +use crate::db::schema::attachments; +use crate::db::DbConn; use diesel; use diesel::prelude::*; -use crate::db::DbConn; -use crate::db::schema::attachments; use crate::api::EmptyResult; use crate::error::MapResult; @@ -68,12 +68,11 @@ impl Attachment { pub fn delete(self, conn: &DbConn) -> EmptyResult { crate::util::retry( - || diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))) - .execute(&**conn), + || diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(&**conn), 10, ) .map_res("Error deleting attachment")?; - + crate::util::delete_file(&self.get_file_path())?; Ok(()) } @@ -86,7 +85,10 @@ impl Attachment { } pub fn find_by_id(id: &str, conn: &DbConn) -> Option { - attachments::table.filter(attachments::id.eq(id)).first::(&**conn).ok() + attachments::table + .filter(attachments::id.eq(id)) + .first::(&**conn) + .ok() } pub fn find_by_cipher(cipher_uuid: &str, conn: &DbConn) -> Vec { diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 40bbeb71..3a7c9dff 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -1,7 +1,9 @@ use chrono::{NaiveDateTime, Utc}; use serde_json::Value; -use super::{Attachment, CollectionCipher, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization}; +use super::{ + Attachment, CollectionCipher, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization, +}; #[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[table_name = "ciphers"] @@ -79,11 +81,15 @@ impl Cipher { let fields_json: Value = if let Some(ref fields) = self.fields { serde_json::from_str(fields).unwrap() - } else { Value::Null }; - + } else { + Value::Null + }; + let password_history_json: Value = if let Some(ref password_history) = self.password_history { serde_json::from_str(password_history).unwrap() - } else { Value::Null }; + } else { + Value::Null + }; let mut data_json: Value = serde_json::from_str(&self.data).unwrap(); @@ -137,15 +143,16 @@ impl Cipher { Some(ref user_uuid) => { User::update_uuid_revision(&user_uuid, conn); user_uuids.push(user_uuid.clone()) - }, - None => { // Belongs to Organization, need to update affected users + } + None => { + // Belongs to Organization, need to update affected users if let Some(ref org_uuid) = self.organization_uuid { UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - user_uuids.push(user_org.user_uuid.clone()) - }); + .iter() + .for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + user_uuids.push(user_org.user_uuid.clone()) + }); } } }; @@ -207,7 +214,9 @@ impl Cipher { Ok(()) //nothing to do } else { self.update_users_revision(conn); - if let Some(current_folder) = FolderCipher::find_by_folder_and_cipher(¤t_folder, &self.uuid, &conn) { + if let Some(current_folder) = + FolderCipher::find_by_folder_and_cipher(¤t_folder, &self.uuid, &conn) + { current_folder.delete(&conn)?; } FolderCipher::new(&new_folder, &self.uuid).save(&conn) @@ -227,64 +236,79 @@ impl Cipher { pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { ciphers::table - .filter(ciphers::uuid.eq(&self.uuid)) - .left_join(users_organizations::table.on( - ciphers::organization_uuid.eq(users_organizations::org_uuid.nullable()).and( - users_organizations::user_uuid.eq(user_uuid) + .filter(ciphers::uuid.eq(&self.uuid)) + .left_join( + users_organizations::table.on(ciphers::organization_uuid + .eq(users_organizations::org_uuid.nullable()) + .and(users_organizations::user_uuid.eq(user_uuid))), ) - )) - .left_join(ciphers_collections::table) - .left_join(users_collections::table.on( - ciphers_collections::collection_uuid.eq(users_collections::collection_uuid) - )) - .filter(ciphers::user_uuid.eq(user_uuid).or( // Cipher owner - users_organizations::access_all.eq(true).or( // access_all in Organization - users_organizations::type_.le(UserOrgType::Admin as i32).or( // Org admin or owner - users_collections::user_uuid.eq(user_uuid).and( - users_collections::read_only.eq(false) //R/W access to collection - ) - ) + .left_join(ciphers_collections::table) + .left_join( + users_collections::table + .on(ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)), ) - )) - .select(ciphers::all_columns) - .first::(&**conn).ok().is_some() + .filter(ciphers::user_uuid.eq(user_uuid).or( + // Cipher owner + users_organizations::access_all.eq(true).or( + // access_all in Organization + users_organizations::type_.le(UserOrgType::Admin as i32).or( + // Org admin or owner + users_collections::user_uuid.eq(user_uuid).and( + users_collections::read_only.eq(false), //R/W access to collection + ), + ), + ), + )) + .select(ciphers::all_columns) + .first::(&**conn) + .ok() + .is_some() } pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { ciphers::table - .filter(ciphers::uuid.eq(&self.uuid)) - .left_join(users_organizations::table.on( - ciphers::organization_uuid.eq(users_organizations::org_uuid.nullable()).and( - users_organizations::user_uuid.eq(user_uuid) + .filter(ciphers::uuid.eq(&self.uuid)) + .left_join( + users_organizations::table.on(ciphers::organization_uuid + .eq(users_organizations::org_uuid.nullable()) + .and(users_organizations::user_uuid.eq(user_uuid))), ) - )) - .left_join(ciphers_collections::table) - .left_join(users_collections::table.on( - ciphers_collections::collection_uuid.eq(users_collections::collection_uuid) - )) - .filter(ciphers::user_uuid.eq(user_uuid).or( // Cipher owner - users_organizations::access_all.eq(true).or( // access_all in Organization - users_organizations::type_.le(UserOrgType::Admin as i32).or( // Org admin or owner - users_collections::user_uuid.eq(user_uuid) // Access to Collection - ) + .left_join(ciphers_collections::table) + .left_join( + users_collections::table + .on(ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)), ) - )) - .select(ciphers::all_columns) - .first::(&**conn).ok().is_some() + .filter(ciphers::user_uuid.eq(user_uuid).or( + // Cipher owner + users_organizations::access_all.eq(true).or( + // access_all in Organization + users_organizations::type_.le(UserOrgType::Admin as i32).or( + // Org admin or owner + users_collections::user_uuid.eq(user_uuid), // Access to Collection + ), + ), + )) + .select(ciphers::all_columns) + .first::(&**conn) + .ok() + .is_some() } pub fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option { - folders_ciphers::table.inner_join(folders::table) + folders_ciphers::table + .inner_join(folders::table) .filter(folders::user_uuid.eq(&user_uuid)) .filter(folders_ciphers::cipher_uuid.eq(&self.uuid)) .select(folders_ciphers::folder_uuid) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { ciphers::table .filter(ciphers::uuid.eq(uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } // Find all ciphers accessible to user diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs index 7b0f565c..3738bd9c 100644 --- a/src/db/models/collection.rs +++ b/src/db/models/collection.rs @@ -1,6 +1,6 @@ use serde_json::Value; -use super::{Organization, UserOrganization, UserOrgType, UserOrgStatus}; +use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; #[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[table_name = "collections"] @@ -33,10 +33,10 @@ impl Collection { } } +use crate::db::schema::*; +use crate::db::DbConn; use diesel; use diesel::prelude::*; -use crate::db::DbConn; -use crate::db::schema::*; use crate::api::EmptyResult; use crate::error::MapResult; @@ -46,27 +46,24 @@ impl Collection { pub fn save(&mut self, conn: &DbConn) -> EmptyResult { // Update affected users revision UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - }); + .iter() + .for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + }); diesel::replace_into(collections::table) - .values(&*self) - .execute(&**conn) - .map_res("Error saving collection") + .values(&*self) + .execute(&**conn) + .map_res("Error saving collection") } pub fn delete(self, conn: &DbConn) -> EmptyResult { CollectionCipher::delete_all_by_collection(&self.uuid, &conn)?; CollectionUser::delete_all_by_collection(&self.uuid, &conn)?; - diesel::delete( - collections::table.filter( - collections::uuid.eq(self.uuid) - ) - ).execute(&**conn) - .map_res("Error deleting collection") + diesel::delete(collections::table.filter(collections::uuid.eq(self.uuid))) + .execute(&**conn) + .map_res("Error deleting collection") } pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { @@ -79,7 +76,8 @@ impl Collection { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { collections::table .filter(collections::uuid.eq(uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_user_uuid(user_uuid: &str, conn: &DbConn) -> Vec { @@ -106,21 +104,26 @@ impl Collection { } pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec { - Self::find_by_user_uuid(user_uuid, conn).into_iter().filter(|c| c.org_uuid == org_uuid).collect() + Self::find_by_user_uuid(user_uuid, conn) + .into_iter() + .filter(|c| c.org_uuid == org_uuid) + .collect() } pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec { collections::table .filter(collections::org_uuid.eq(org_uuid)) - .load::(&**conn).expect("Error loading collections") + .load::(&**conn) + .expect("Error loading collections") } pub fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { collections::table - .filter(collections::uuid.eq(uuid)) - .filter(collections::org_uuid.eq(org_uuid)) - .select(collections::all_columns) - .first::(&**conn).ok() + .filter(collections::uuid.eq(uuid)) + .filter(collections::org_uuid.eq(org_uuid)) + .select(collections::all_columns) + .first::(&**conn) + .ok() } pub fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &DbConn) -> Option { @@ -150,22 +153,25 @@ impl Collection { match UserOrganization::find_by_user_and_org(&user_uuid, &self.org_uuid, &conn) { None => false, // Not in Org Some(user_org) => { - if user_org.access_all { - true - } else { - users_collections::table.inner_join(collections::table) - .filter(users_collections::collection_uuid.eq(&self.uuid)) - .filter(users_collections::user_uuid.eq(&user_uuid)) - .filter(users_collections::read_only.eq(false)) - .select(collections::all_columns) - .first::(&**conn).ok().is_some() // Read only or no access to collection - } + if user_org.access_all { + true + } else { + users_collections::table + .inner_join(collections::table) + .filter(users_collections::collection_uuid.eq(&self.uuid)) + .filter(users_collections::user_uuid.eq(&user_uuid)) + .filter(users_collections::read_only.eq(false)) + .select(collections::all_columns) + .first::(&**conn) + .ok() + .is_some() // Read only or no access to collection + } } } } } -use super::User; +use super::User; #[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[table_name = "users_collections"] @@ -186,70 +192,72 @@ impl CollectionUser { .inner_join(collections::table.on(collections::uuid.eq(users_collections::collection_uuid))) .filter(collections::org_uuid.eq(org_uuid)) .select(users_collections::all_columns) - .load::(&**conn).expect("Error loading users_collections") + .load::(&**conn) + .expect("Error loading users_collections") } - pub fn save(user_uuid: &str, collection_uuid: &str, read_only:bool, conn: &DbConn) -> EmptyResult { + pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult { User::update_uuid_revision(&user_uuid, conn); diesel::replace_into(users_collections::table) - .values(( - users_collections::user_uuid.eq(user_uuid), - users_collections::collection_uuid.eq(collection_uuid), - users_collections::read_only.eq(read_only), - )).execute(&**conn) - .map_res("Error adding user to collection") + .values(( + users_collections::user_uuid.eq(user_uuid), + users_collections::collection_uuid.eq(collection_uuid), + users_collections::read_only.eq(read_only), + )) + .execute(&**conn) + .map_res("Error adding user to collection") } pub fn delete(self, conn: &DbConn) -> EmptyResult { User::update_uuid_revision(&self.user_uuid, conn); - diesel::delete(users_collections::table - .filter(users_collections::user_uuid.eq(&self.user_uuid)) - .filter(users_collections::collection_uuid.eq(&self.collection_uuid))) + diesel::delete( + users_collections::table + .filter(users_collections::user_uuid.eq(&self.user_uuid)) + .filter(users_collections::collection_uuid.eq(&self.collection_uuid)), + ) .execute(&**conn) .map_res("Error removing user from collection") } pub fn find_by_collection(collection_uuid: &str, conn: &DbConn) -> Vec { users_collections::table - .filter(users_collections::collection_uuid.eq(collection_uuid)) - .select(users_collections::all_columns) - .load::(&**conn).expect("Error loading users_collections") + .filter(users_collections::collection_uuid.eq(collection_uuid)) + .select(users_collections::all_columns) + .load::(&**conn) + .expect("Error loading users_collections") } pub fn find_by_collection_and_user(collection_uuid: &str, user_uuid: &str, conn: &DbConn) -> Option { users_collections::table - .filter(users_collections::collection_uuid.eq(collection_uuid)) - .filter(users_collections::user_uuid.eq(user_uuid)) - .select(users_collections::all_columns) - .first::(&**conn).ok() + .filter(users_collections::collection_uuid.eq(collection_uuid)) + .filter(users_collections::user_uuid.eq(user_uuid)) + .select(users_collections::all_columns) + .first::(&**conn) + .ok() } pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { CollectionUser::find_by_collection(&collection_uuid, conn) - .iter() - .for_each(|collection| { - User::update_uuid_revision(&collection.user_uuid, conn) - }); + .iter() + .for_each(|collection| User::update_uuid_revision(&collection.user_uuid, conn)); - diesel::delete(users_collections::table - .filter(users_collections::collection_uuid.eq(collection_uuid)) - ).execute(&**conn) - .map_res("Error deleting users from collection") + diesel::delete(users_collections::table.filter(users_collections::collection_uuid.eq(collection_uuid))) + .execute(&**conn) + .map_res("Error deleting users from collection") } pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { User::update_uuid_revision(&user_uuid, conn); - diesel::delete(users_collections::table - .filter(users_collections::user_uuid.eq(user_uuid)) - ).execute(&**conn) - .map_res("Error removing user from collections") + diesel::delete(users_collections::table.filter(users_collections::user_uuid.eq(user_uuid))) + .execute(&**conn) + .map_res("Error removing user from collections") } } -use super::Cipher; +use super::Cipher; #[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[table_name = "ciphers_collections"] @@ -268,29 +276,30 @@ impl CollectionCipher { .values(( ciphers_collections::cipher_uuid.eq(cipher_uuid), ciphers_collections::collection_uuid.eq(collection_uuid), - )).execute(&**conn) + )) + .execute(&**conn) .map_res("Error adding cipher to collection") } pub fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult { - diesel::delete(ciphers_collections::table - .filter(ciphers_collections::cipher_uuid.eq(cipher_uuid)) - .filter(ciphers_collections::collection_uuid.eq(collection_uuid))) - .execute(&**conn) - .map_res("Error deleting cipher from collection") + diesel::delete( + ciphers_collections::table + .filter(ciphers_collections::cipher_uuid.eq(cipher_uuid)) + .filter(ciphers_collections::collection_uuid.eq(collection_uuid)), + ) + .execute(&**conn) + .map_res("Error deleting cipher from collection") } pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { - diesel::delete(ciphers_collections::table - .filter(ciphers_collections::cipher_uuid.eq(cipher_uuid)) - ).execute(&**conn) - .map_res("Error removing cipher from collections") + diesel::delete(ciphers_collections::table.filter(ciphers_collections::cipher_uuid.eq(cipher_uuid))) + .execute(&**conn) + .map_res("Error removing cipher from collections") } pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { - diesel::delete(ciphers_collections::table - .filter(ciphers_collections::collection_uuid.eq(collection_uuid)) - ).execute(&**conn) - .map_res("Error removing ciphers from collection") + diesel::delete(ciphers_collections::table.filter(ciphers_collections::collection_uuid.eq(collection_uuid))) + .execute(&**conn) + .map_res("Error removing ciphers from collection") } -} \ No newline at end of file +} diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 2636b935..01e12772 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -44,8 +44,8 @@ impl Device { } pub fn refresh_twofactor_remember(&mut self) -> String { - use data_encoding::BASE64; use crate::crypto; + use data_encoding::BASE64; let twofactor_remember = BASE64.encode(&crypto::get_random(vec![0u8; 180])); self.twofactor_remember = Some(twofactor_remember.clone()); @@ -57,12 +57,11 @@ impl Device { self.twofactor_remember = None; } - pub fn refresh_tokens(&mut self, user: &super::User, orgs: Vec) -> (String, i64) { // If there is no refresh token, we create one if self.refresh_token.is_empty() { - use data_encoding::BASE64URL; use crate::crypto; + use data_encoding::BASE64URL; self.refresh_token = BASE64URL.encode(&crypto::get_random_64()); } @@ -105,10 +104,10 @@ impl Device { } } +use crate::db::schema::devices; +use crate::db::DbConn; use diesel; use diesel::prelude::*; -use crate::db::DbConn; -use crate::db::schema::devices; use crate::api::EmptyResult; use crate::error::MapResult; @@ -119,21 +118,16 @@ impl Device { self.updated_at = Utc::now().naive_utc(); crate::util::retry( - || { - diesel::replace_into(devices::table) - .values(&*self) - .execute(&**conn) - }, + || diesel::replace_into(devices::table).values(&*self).execute(&**conn), 10, ) .map_res("Error saving device") } pub fn delete(self, conn: &DbConn) -> EmptyResult { - diesel::delete(devices::table.filter( - devices::uuid.eq(self.uuid) - )).execute(&**conn) - .map_res("Error removing device") + diesel::delete(devices::table.filter(devices::uuid.eq(self.uuid))) + .execute(&**conn) + .map_res("Error removing device") } pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { @@ -146,18 +140,21 @@ impl Device { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { devices::table .filter(devices::uuid.eq(uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_refresh_token(refresh_token: &str, conn: &DbConn) -> Option { devices::table .filter(devices::refresh_token.eq(refresh_token)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { devices::table .filter(devices::user_uuid.eq(user_uuid)) - .load::(&**conn).expect("Error loading devices") + .load::(&**conn) + .expect("Error loading devices") } } diff --git a/src/db/models/folder.rs b/src/db/models/folder.rs index 669a9c3b..142bb0c9 100644 --- a/src/db/models/folder.rs +++ b/src/db/models/folder.rs @@ -1,7 +1,7 @@ use chrono::{NaiveDateTime, Utc}; use serde_json::Value; -use super::{User, Cipher}; +use super::{Cipher, User}; #[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[table_name = "folders"] @@ -61,10 +61,10 @@ impl FolderCipher { } } +use crate::db::schema::{folders, folders_ciphers}; +use crate::db::DbConn; use diesel; use diesel::prelude::*; -use crate::db::DbConn; -use crate::db::schema::{folders, folders_ciphers}; use crate::api::EmptyResult; use crate::error::MapResult; @@ -76,20 +76,18 @@ impl Folder { self.updated_at = Utc::now().naive_utc(); diesel::replace_into(folders::table) - .values(&*self).execute(&**conn) - .map_res("Error saving folder") + .values(&*self) + .execute(&**conn) + .map_res("Error saving folder") } pub fn delete(&self, conn: &DbConn) -> EmptyResult { User::update_uuid_revision(&self.user_uuid, conn); FolderCipher::delete_all_by_folder(&self.uuid, &conn)?; - diesel::delete( - folders::table.filter( - folders::uuid.eq(&self.uuid) - ) - ).execute(&**conn) - .map_res("Error deleting folder") + diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid))) + .execute(&**conn) + .map_res("Error deleting folder") } pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { @@ -102,56 +100,60 @@ impl Folder { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { folders::table .filter(folders::uuid.eq(uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { folders::table .filter(folders::user_uuid.eq(user_uuid)) - .load::(&**conn).expect("Error loading folders") + .load::(&**conn) + .expect("Error loading folders") } } impl FolderCipher { pub fn save(&self, conn: &DbConn) -> EmptyResult { diesel::replace_into(folders_ciphers::table) - .values(&*self) - .execute(&**conn) - .map_res("Error adding cipher to folder") + .values(&*self) + .execute(&**conn) + .map_res("Error adding cipher to folder") } pub fn delete(self, conn: &DbConn) -> EmptyResult { - diesel::delete(folders_ciphers::table - .filter(folders_ciphers::cipher_uuid.eq(self.cipher_uuid)) - .filter(folders_ciphers::folder_uuid.eq(self.folder_uuid)) - ).execute(&**conn) + diesel::delete( + folders_ciphers::table + .filter(folders_ciphers::cipher_uuid.eq(self.cipher_uuid)) + .filter(folders_ciphers::folder_uuid.eq(self.folder_uuid)), + ) + .execute(&**conn) .map_res("Error removing cipher from folder") } pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { - diesel::delete(folders_ciphers::table - .filter(folders_ciphers::cipher_uuid.eq(cipher_uuid)) - ).execute(&**conn) - .map_res("Error removing cipher from folders") + diesel::delete(folders_ciphers::table.filter(folders_ciphers::cipher_uuid.eq(cipher_uuid))) + .execute(&**conn) + .map_res("Error removing cipher from folders") } pub fn delete_all_by_folder(folder_uuid: &str, conn: &DbConn) -> EmptyResult { - diesel::delete(folders_ciphers::table - .filter(folders_ciphers::folder_uuid.eq(folder_uuid)) - ).execute(&**conn) - .map_res("Error removing ciphers from folder") + diesel::delete(folders_ciphers::table.filter(folders_ciphers::folder_uuid.eq(folder_uuid))) + .execute(&**conn) + .map_res("Error removing ciphers from folder") } pub fn find_by_folder_and_cipher(folder_uuid: &str, cipher_uuid: &str, conn: &DbConn) -> Option { folders_ciphers::table .filter(folders_ciphers::folder_uuid.eq(folder_uuid)) .filter(folders_ciphers::cipher_uuid.eq(cipher_uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec { folders_ciphers::table .filter(folders_ciphers::folder_uuid.eq(folder_uuid)) - .load::(&**conn).expect("Error loading folders") + .load::(&**conn) + .expect("Error loading folders") } } diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index 1f8d10a8..1ae6197f 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -10,10 +10,10 @@ mod two_factor; pub use self::attachment::Attachment; pub use self::cipher::Cipher; +pub use self::collection::{Collection, CollectionCipher, CollectionUser}; pub use self::device::Device; pub use self::folder::{Folder, FolderCipher}; -pub use self::user::{User, Invitation}; pub use self::organization::Organization; -pub use self::organization::{UserOrganization, UserOrgStatus, UserOrgType}; -pub use self::collection::{Collection, CollectionUser, CollectionCipher}; -pub use self::two_factor::{TwoFactor, TwoFactorType}; \ No newline at end of file +pub use self::organization::{UserOrgStatus, UserOrgType, UserOrganization}; +pub use self::two_factor::{TwoFactor, TwoFactorType}; +pub use self::user::{Invitation, User}; diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index de2196e4..0efcb5e3 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -1,7 +1,7 @@ -use std::cmp::Ordering; use serde_json::Value; +use std::cmp::Ordering; -use super::{User, CollectionUser}; +use super::{CollectionUser, User}; #[derive(Debug, Identifiable, Queryable, Insertable)] #[table_name = "organizations"] @@ -32,9 +32,7 @@ pub enum UserOrgStatus { Confirmed = 2, } -#[derive(Copy, Clone)] -#[derive(PartialEq)] -#[derive(Eq)] +#[derive(Copy, Clone, PartialEq, Eq)] pub enum UserOrgType { Owner = 0, Admin = 1, @@ -51,13 +49,13 @@ impl Ord for UserOrgType { UserOrgType::Owner => Ordering::Greater, UserOrgType::Admin => match other { UserOrgType::Owner => Ordering::Less, - _ => Ordering::Greater + _ => Ordering::Greater, }, UserOrgType::Manager => match other { UserOrgType::Owner | UserOrgType::Admin => Ordering::Less, - _ => Ordering::Greater + _ => Ordering::Greater, }, - UserOrgType::User => Ordering::Less + UserOrgType::User => Ordering::Less, } } } @@ -78,7 +76,7 @@ impl PartialEq for UserOrgType { impl PartialOrd for UserOrgType { fn partial_cmp(&self, other: &i32) -> Option { if let Some(other) = Self::from_i32(*other) { - return Some(self.cmp(&other)) + return Some(self.cmp(&other)); } None } @@ -96,7 +94,6 @@ impl PartialOrd for UserOrgType { _ => true, } } - } impl PartialEq for i32 { @@ -108,7 +105,7 @@ impl PartialEq for i32 { impl PartialOrd for i32 { fn partial_cmp(&self, other: &UserOrgType) -> Option { if let Some(self_type) = UserOrgType::from_i32(*self) { - return Some(self_type.cmp(other)) + return Some(self_type.cmp(other)); } None } @@ -126,7 +123,6 @@ impl PartialOrd for i32 { _ => false, } } - } impl UserOrgType { @@ -149,7 +145,6 @@ impl UserOrgType { _ => None, } } - } /// Local methods @@ -208,11 +203,10 @@ impl UserOrganization { } } - +use crate::db::schema::{ciphers_collections, organizations, users_collections, users_organizations}; +use crate::db::DbConn; use diesel; use diesel::prelude::*; -use crate::db::DbConn; -use crate::db::schema::{organizations, users_organizations, users_collections, ciphers_collections}; use crate::api::EmptyResult; use crate::error::MapResult; @@ -221,13 +215,14 @@ use crate::error::MapResult; impl Organization { pub fn save(&mut self, conn: &DbConn) -> EmptyResult { UserOrganization::find_by_org(&self.uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - }); + .iter() + .for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + }); diesel::replace_into(organizations::table) - .values(&*self).execute(&**conn) + .values(&*self) + .execute(&**conn) .map_res("Error saving organization") } @@ -238,18 +233,16 @@ impl Organization { Collection::delete_all_by_organization(&self.uuid, &conn)?; UserOrganization::delete_all_by_organization(&self.uuid, &conn)?; - diesel::delete( - organizations::table.filter( - organizations::uuid.eq(self.uuid) - ) - ).execute(&**conn) - .map_res("Error saving organization") + diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid))) + .execute(&**conn) + .map_res("Error saving organization") } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { organizations::table .filter(organizations::uuid.eq(uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } } @@ -314,12 +307,15 @@ impl UserOrganization { }) } - pub fn to_json_details(&self, conn: &DbConn) -> Value { - let coll_uuids = if self.access_all { + pub fn to_json_details(&self, conn: &DbConn) -> Value { + let coll_uuids = if self.access_all { vec![] // If we have complete access, no need to fill the array } else { let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); - collections.iter().map(|c| json!({"Id": c.collection_uuid, "ReadOnly": c.read_only})).collect() + collections + .iter() + .map(|c| json!({"Id": c.collection_uuid, "ReadOnly": c.read_only})) + .collect() }; json!({ @@ -339,8 +335,9 @@ impl UserOrganization { User::update_uuid_revision(&self.user_uuid, conn); diesel::replace_into(users_organizations::table) - .values(&*self).execute(&**conn) - .map_res("Error adding user to organization") + .values(&*self) + .execute(&**conn) + .map_res("Error adding user to organization") } pub fn delete(self, conn: &DbConn) -> EmptyResult { @@ -348,12 +345,9 @@ impl UserOrganization { CollectionUser::delete_all_by_user(&self.user_uuid, &conn)?; - diesel::delete( - users_organizations::table.filter( - users_organizations::uuid.eq(self.uuid) - ) - ).execute(&**conn) - .map_res("Error removing user from organization") + diesel::delete(users_organizations::table.filter(users_organizations::uuid.eq(self.uuid))) + .execute(&**conn) + .map_res("Error removing user from organization") } pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { @@ -377,54 +371,62 @@ impl UserOrganization { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { users_organizations::table .filter(users_organizations::uuid.eq(uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { users_organizations::table .filter(users_organizations::uuid.eq(uuid)) .filter(users_organizations::org_uuid.eq(org_uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::status.eq(UserOrgStatus::Confirmed as i32)) - .load::(&**conn).unwrap_or_default() + .load::(&**conn) + .unwrap_or_default() } pub fn find_invited_by_user(user_uuid: &str, conn: &DbConn) -> Vec { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::status.eq(UserOrgStatus::Invited as i32)) - .load::(&**conn).unwrap_or_default() + .load::(&**conn) + .unwrap_or_default() } pub fn find_any_state_by_user(user_uuid: &str, conn: &DbConn) -> Vec { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) - .load::(&**conn).unwrap_or_default() + .load::(&**conn) + .unwrap_or_default() } pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { users_organizations::table .filter(users_organizations::org_uuid.eq(org_uuid)) - .load::(&**conn).expect("Error loading user organizations") + .load::(&**conn) + .expect("Error loading user organizations") } pub fn find_by_org_and_type(org_uuid: &str, type_: i32, conn: &DbConn) -> Vec { users_organizations::table .filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::type_.eq(type_)) - .load::(&**conn).expect("Error loading user organizations") + .load::(&**conn) + .expect("Error loading user organizations") } pub fn find_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec { @@ -461,7 +463,4 @@ impl UserOrganization { .select(users_organizations::all_columns) .load::(&**conn).expect("Error loading user organizations") } - } - - diff --git a/src/db/models/two_factor.rs b/src/db/models/two_factor.rs index 609cb7b7..fd1d2c7b 100644 --- a/src/db/models/two_factor.rs +++ b/src/db/models/two_factor.rs @@ -50,7 +50,7 @@ impl TwoFactor { let decoded_secret = match BASE32.decode(totp_secret) { Ok(s) => s, - Err(_) => return false + Err(_) => return false, }; let generated = totp_raw_now(&decoded_secret, 6, 0, 30, &HashType::SHA1); @@ -74,10 +74,10 @@ impl TwoFactor { } } +use crate::db::schema::twofactor; +use crate::db::DbConn; use diesel; use diesel::prelude::*; -use crate::db::DbConn; -use crate::db::schema::twofactor; use crate::api::EmptyResult; use crate::error::MapResult; @@ -92,33 +92,29 @@ impl TwoFactor { } pub fn delete(self, conn: &DbConn) -> EmptyResult { - diesel::delete( - twofactor::table.filter( - twofactor::uuid.eq(self.uuid) - ) - ).execute(&**conn) - .map_res("Error deleting twofactor") + diesel::delete(twofactor::table.filter(twofactor::uuid.eq(self.uuid))) + .execute(&**conn) + .map_res("Error deleting twofactor") } pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { twofactor::table .filter(twofactor::user_uuid.eq(user_uuid)) - .load::(&**conn).expect("Error loading twofactor") + .load::(&**conn) + .expect("Error loading twofactor") } pub fn find_by_user_and_type(user_uuid: &str, type_: i32, conn: &DbConn) -> Option { twofactor::table .filter(twofactor::user_uuid.eq(user_uuid)) .filter(twofactor::type_.eq(type_)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } - + pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { - diesel::delete( - twofactor::table.filter( - twofactor::user_uuid.eq(user_uuid) - ) - ).execute(&**conn) - .map_res("Error deleting twofactors") + diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid))) + .execute(&**conn) + .map_res("Error deleting twofactors") } } diff --git a/src/db/models/user.rs b/src/db/models/user.rs index b0e3d933..888d3f57 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -4,7 +4,6 @@ use serde_json::Value; use crate::crypto; use crate::CONFIG; - #[derive(Debug, Identifiable, Queryable, Insertable)] #[table_name = "users"] #[primary_key(uuid)] @@ -24,7 +23,7 @@ pub struct User { pub key: String, pub private_key: Option, pub public_key: Option, - + #[column_name = "totp_secret"] _totp_secret: Option, pub totp_recover: Option, @@ -33,7 +32,7 @@ pub struct User { pub equivalent_domains: String, pub excluded_globals: String, - + pub client_kdf_type: i32, pub client_kdf_iter: i32, } @@ -64,23 +63,25 @@ impl User { password_hint: None, private_key: None, public_key: None, - + _totp_secret: None, totp_recover: None, equivalent_domains: "[]".to_string(), excluded_globals: "[]".to_string(), - + client_kdf_type: Self::CLIENT_KDF_TYPE_DEFAULT, client_kdf_iter: Self::CLIENT_KDF_ITER_DEFAULT, } } pub fn check_valid_password(&self, password: &str) -> bool { - crypto::verify_password_hash(password.as_bytes(), - &self.salt, - &self.password_hash, - self.password_iterations as u32) + crypto::verify_password_hash( + password.as_bytes(), + &self.salt, + &self.password_hash, + self.password_iterations as u32, + ) } pub fn check_valid_recovery_code(&self, recovery_code: &str) -> bool { @@ -92,9 +93,7 @@ impl User { } pub fn set_password(&mut self, password: &str) { - self.password_hash = crypto::hash_password(password.as_bytes(), - &self.salt, - self.password_iterations as u32); + self.password_hash = crypto::hash_password(password.as_bytes(), &self.salt, self.password_iterations as u32); } pub fn reset_security_stamp(&mut self) { @@ -102,11 +101,11 @@ impl User { } } +use super::{Cipher, Device, Folder, TwoFactor, UserOrgType, UserOrganization}; +use crate::db::schema::{invitations, users}; +use crate::db::DbConn; use diesel; use diesel::prelude::*; -use crate::db::DbConn; -use crate::db::schema::{users, invitations}; -use super::{Cipher, Folder, Device, UserOrganization, UserOrgType, TwoFactor}; use crate::api::EmptyResult; use crate::error::MapResult; @@ -114,7 +113,7 @@ use crate::error::MapResult; /// Database methods impl User { pub fn to_json(&self, conn: &DbConn) -> Value { - use super::{UserOrganization, TwoFactor}; + use super::{TwoFactor, UserOrganization}; let orgs = UserOrganization::find_by_user(&self.uuid, conn); let orgs_json: Vec = orgs.iter().map(|c| c.to_json(&conn)).collect(); @@ -137,22 +136,20 @@ impl User { }) } - pub fn save(&mut self, conn: &DbConn) -> EmptyResult { self.updated_at = Utc::now().naive_utc(); diesel::replace_into(users::table) // Insert or update - .values(&*self).execute(&**conn) - .map_res("Error saving user") + .values(&*self) + .execute(&**conn) + .map_res("Error saving user") } pub fn delete(self, conn: &DbConn) -> EmptyResult { for user_org in UserOrganization::find_by_user(&self.uuid, &*conn) { if user_org.type_ == UserOrgType::Owner { - if UserOrganization::find_by_org_and_type( - &user_org.org_uuid, - UserOrgType::Owner as i32, &conn - ).len() <= 1 { + let owner_type = UserOrgType::Owner as i32; + if UserOrganization::find_by_org_and_type(&user_org.org_uuid, owner_type, &conn).len() <= 1 { err!("Can't delete last owner") } } @@ -165,15 +162,14 @@ impl User { TwoFactor::delete_all_by_user(&self.uuid, &*conn)?; Invitation::take(&self.email, &*conn); // Delete invitation if any - diesel::delete(users::table.filter( - users::uuid.eq(self.uuid))) - .execute(&**conn) - .map_res("Error deleting user") + diesel::delete(users::table.filter(users::uuid.eq(self.uuid))) + .execute(&**conn) + .map_res("Error deleting user") } pub fn update_uuid_revision(uuid: &str, conn: &DbConn) { if let Some(mut user) = User::find_by_uuid(&uuid, conn) { - if user.update_revision(conn).is_err(){ + if user.update_revision(conn).is_err() { warn!("Failed to update revision for {}", user.email); }; }; @@ -181,32 +177,26 @@ impl User { pub fn update_revision(&mut self, conn: &DbConn) -> EmptyResult { self.updated_at = Utc::now().naive_utc(); - diesel::update( - users::table.filter( - users::uuid.eq(&self.uuid) - ) - ) - .set(users::updated_at.eq(&self.updated_at)) - .execute(&**conn) - .map_res("Error updating user revision") + diesel::update(users::table.filter(users::uuid.eq(&self.uuid))) + .set(users::updated_at.eq(&self.updated_at)) + .execute(&**conn) + .map_res("Error updating user revision") } pub fn find_by_mail(mail: &str, conn: &DbConn) -> Option { let lower_mail = mail.to_lowercase(); users::table .filter(users::email.eq(lower_mail)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { - users::table - .filter(users::uuid.eq(uuid)) - .first::(&**conn).ok() + users::table.filter(users::uuid.eq(uuid)).first::(&**conn).ok() } pub fn get_all(conn: &DbConn) -> Vec { - users::table - .load::(&**conn).expect("Error loading users") + users::table.load::(&**conn).expect("Error loading users") } } @@ -219,37 +209,35 @@ pub struct Invitation { impl Invitation { pub fn new(email: String) -> Self { - Self { - email - } + Self { email } } pub fn save(&mut self, conn: &DbConn) -> EmptyResult { diesel::replace_into(invitations::table) - .values(&*self) - .execute(&**conn) - .map_res("Error saving invitation") + .values(&*self) + .execute(&**conn) + .map_res("Error saving invitation") } pub fn delete(self, conn: &DbConn) -> EmptyResult { - diesel::delete(invitations::table.filter( - invitations::email.eq(self.email))) - .execute(&**conn) - .map_res("Error deleting invitation") + diesel::delete(invitations::table.filter(invitations::email.eq(self.email))) + .execute(&**conn) + .map_res("Error deleting invitation") } pub fn find_by_mail(mail: &str, conn: &DbConn) -> Option { let lower_mail = mail.to_lowercase(); invitations::table .filter(invitations::email.eq(lower_mail)) - .first::(&**conn).ok() + .first::(&**conn) + .ok() } pub fn take(mail: &str, conn: &DbConn) -> bool { - CONFIG.invitations_allowed && - match Self::find_by_mail(mail, &conn) { - Some(invitation) => invitation.delete(&conn).is_ok(), - None => false - } + CONFIG.invitations_allowed + && match Self::find_by_mail(mail, &conn) { + Some(invitation) => invitation.delete(&conn).is_ok(), + None => false, + } } -} \ No newline at end of file +} diff --git a/src/error.rs b/src/error.rs index 7538cbac..8bb19058 100644 --- a/src/error.rs +++ b/src/error.rs @@ -8,7 +8,7 @@ macro_rules! make_error { #[derive(Display)] enum ErrorKind { $($name( $ty )),+ } pub struct Error { message: String, error: ErrorKind } - + $(impl From<$ty> for Error { fn from(err: $ty) -> Self { Error::from((stringify!($name), err)) } })+ @@ -140,9 +140,9 @@ impl<'r> Responder<'r> for Error { } } -/// -/// Error return macros -/// +// +// Error return macros +// #[macro_export] macro_rules! err { ($msg:expr) => {{ diff --git a/src/mail.rs b/src/mail.rs index 5858d1bf..567d64ab 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -1,8 +1,8 @@ -use native_tls::{Protocol, TlsConnector}; -use lettre::{Transport, SmtpTransport, SmtpClient, ClientTlsParameters, ClientSecurity}; -use lettre::smtp::ConnectionReuseParameters; use lettre::smtp::authentication::Credentials; +use lettre::smtp::ConnectionReuseParameters; +use lettre::{ClientSecurity, ClientTlsParameters, SmtpClient, SmtpTransport, Transport}; use lettre_email::EmailBuilder; +use native_tls::{Protocol, TlsConnector}; use crate::MailConfig; use crate::CONFIG; @@ -22,10 +22,7 @@ fn mailer(config: &MailConfig) -> SmtpTransport { ClientSecurity::None }; - let smtp_client = SmtpClient::new( - (config.smtp_host.as_str(), config.smtp_port), - client_security, - ).unwrap(); + let smtp_client = SmtpClient::new((config.smtp_host.as_str(), config.smtp_port), client_security).unwrap(); let smtp_client = match (&config.smtp_username, &config.smtp_password) { (Some(user), Some(pass)) => smtp_client.credentials(Credentials::new(user.clone(), pass.clone())), @@ -40,15 +37,20 @@ fn mailer(config: &MailConfig) -> SmtpTransport { pub fn send_password_hint(address: &str, hint: Option, config: &MailConfig) -> EmptyResult { let (subject, body) = if let Some(hint) = hint { - ("Your master password hint", - format!( - "You (or someone) recently requested your master password hint.\n\n\ - Your hint is: \"{}\"\n\n\ - If you did not request your master password hint you can safely ignore this email.\n", - hint)) + ( + "Your master password hint", + format!( + "You (or someone) recently requested your master password hint.\n\n\ + Your hint is: \"{}\"\n\n\ + If you did not request your master password hint you can safely ignore this email.\n", + hint + ), + ) } else { - ("Sorry, you have no password hint...", - "Sorry, you have not specified any password hint...\n".into()) + ( + "Sorry, you have no password hint...", + "Sorry, you have not specified any password hint...\n".into(), + ) }; let email = EmailBuilder::new() @@ -65,8 +67,15 @@ pub fn send_password_hint(address: &str, hint: Option, config: &MailConf .and(Ok(())) } -pub fn send_invite(address: &str, org_id: &str, org_user_id: &str, token: &str, org_name: &str, config: &MailConfig) -> EmptyResult { - let (subject, body) = { +pub fn send_invite( + address: &str, + org_id: &str, + org_user_id: &str, + token: &str, + org_name: &str, + config: &MailConfig, +) -> EmptyResult { + let (subject, body) = { (format!("Join {}", &org_name), format!( " @@ -91,4 +100,4 @@ pub fn send_invite(address: &str, org_id: &str, org_user_id: &str, token: &str, .send(email.into()) .map_err(|e| Error::new("Error sending invite email", e.to_string())) .and(Ok(())) -} \ No newline at end of file +} diff --git a/src/main.rs b/src/main.rs index c0f43622..14408cee 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,26 +2,39 @@ #![recursion_limit = "128"] #![allow(proc_macro_derive_resolution_fallback)] // TODO: Remove this when diesel update fixes warnings -#[macro_use] extern crate rocket; -#[macro_use] extern crate serde_derive; -#[macro_use] extern crate serde_json; -#[macro_use] extern crate log; -#[macro_use] extern crate diesel; -#[macro_use] extern crate diesel_migrations; -#[macro_use] extern crate lazy_static; -#[macro_use] extern crate derive_more; -#[macro_use] extern crate num_derive; - -use std::{path::Path, process::{exit, Command}}; +#[macro_use] +extern crate rocket; +#[macro_use] +extern crate serde_derive; +#[macro_use] +extern crate serde_json; +#[macro_use] +extern crate log; +#[macro_use] +extern crate diesel; +#[macro_use] +extern crate diesel_migrations; +#[macro_use] +extern crate lazy_static; +#[macro_use] +extern crate derive_more; +#[macro_use] +extern crate num_derive; + use rocket::Rocket; +use std::{ + path::Path, + process::{exit, Command}, +}; -#[macro_use] mod error; -mod util; +#[macro_use] +mod error; mod api; -mod db; -mod crypto; mod auth; +mod crypto; +mod db; mod mail; +mod util; fn init_rocket() -> Rocket { rocket::ignite() @@ -67,20 +80,20 @@ fn main() { fn init_logging() -> Result<(), fern::InitError> { let mut logger = fern::Dispatch::new() - .format(|out, message, record| { - out.finish(format_args!( - "{}[{}][{}] {}", - chrono::Local::now().format("[%Y-%m-%d %H:%M:%S]"), - record.target(), - record.level(), - message - )) - }) - .level(log::LevelFilter::Debug) - .level_for("hyper", log::LevelFilter::Warn) - .level_for("ws", log::LevelFilter::Info) - .level_for("multipart", log::LevelFilter::Info) - .chain(std::io::stdout()); + .format(|out, message, record| { + out.finish(format_args!( + "{}[{}][{}] {}", + chrono::Local::now().format("[%Y-%m-%d %H:%M:%S]"), + record.target(), + record.level(), + message + )) + }) + .level(log::LevelFilter::Debug) + .level_for("hyper", log::LevelFilter::Warn) + .level_for("ws", log::LevelFilter::Info) + .level_for("multipart", log::LevelFilter::Info) + .chain(std::io::stdout()); if let Some(log_file) = CONFIG.log_file.as_ref() { logger = logger.chain(fern::log_file(log_file)?); @@ -93,7 +106,9 @@ fn init_logging() -> Result<(), fern::InitError> { } #[cfg(not(feature = "enable_syslog"))] -fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch { logger } +fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch { + logger +} #[cfg(feature = "enable_syslog")] fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch { @@ -127,44 +142,60 @@ fn check_db() { // Turn on WAL in SQLite use diesel::RunQueryDsl; let connection = db::get_connection().expect("Can't conect to DB"); - diesel::sql_query("PRAGMA journal_mode=wal").execute(&connection).expect("Failed to turn on WAL"); + diesel::sql_query("PRAGMA journal_mode=wal") + .execute(&connection) + .expect("Failed to turn on WAL"); } fn check_rsa_keys() { // If the RSA keys don't exist, try to create them - if !util::file_exists(&CONFIG.private_rsa_key) - || !util::file_exists(&CONFIG.public_rsa_key) { + if !util::file_exists(&CONFIG.private_rsa_key) || !util::file_exists(&CONFIG.public_rsa_key) { info!("JWT keys don't exist, checking if OpenSSL is available..."); - Command::new("openssl") - .arg("version") - .output().unwrap_or_else(|_| { + Command::new("openssl").arg("version").output().unwrap_or_else(|_| { info!("Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH"); exit(1); }); info!("OpenSSL detected, creating keys..."); - let mut success = Command::new("openssl").arg("genrsa") - .arg("-out").arg(&CONFIG.private_rsa_key_pem) - .output().expect("Failed to create private pem file") - .status.success(); - - success &= Command::new("openssl").arg("rsa") - .arg("-in").arg(&CONFIG.private_rsa_key_pem) - .arg("-outform").arg("DER") - .arg("-out").arg(&CONFIG.private_rsa_key) - .output().expect("Failed to create private der file") - .status.success(); - - success &= Command::new("openssl").arg("rsa") - .arg("-in").arg(&CONFIG.private_rsa_key) - .arg("-inform").arg("DER") + let mut success = Command::new("openssl") + .arg("genrsa") + .arg("-out") + .arg(&CONFIG.private_rsa_key_pem) + .output() + .expect("Failed to create private pem file") + .status + .success(); + + success &= Command::new("openssl") + .arg("rsa") + .arg("-in") + .arg(&CONFIG.private_rsa_key_pem) + .arg("-outform") + .arg("DER") + .arg("-out") + .arg(&CONFIG.private_rsa_key) + .output() + .expect("Failed to create private der file") + .status + .success(); + + success &= Command::new("openssl") + .arg("rsa") + .arg("-in") + .arg(&CONFIG.private_rsa_key) + .arg("-inform") + .arg("DER") .arg("-RSAPublicKey_out") - .arg("-outform").arg("DER") - .arg("-out").arg(&CONFIG.public_rsa_key) - .output().expect("Failed to create public der file") - .status.success(); + .arg("-outform") + .arg("DER") + .arg("-out") + .arg(&CONFIG.public_rsa_key) + .output() + .expect("Failed to create public der file") + .status + .success(); if success { info!("Keys created correctly."); @@ -219,13 +250,7 @@ impl MailConfig { }); let smtp_ssl = get_env_or("SMTP_SSL", true); - let smtp_port = get_env("SMTP_PORT").unwrap_or_else(|| - if smtp_ssl { - 587u16 - } else { - 25u16 - } - ); + let smtp_port = get_env("SMTP_PORT").unwrap_or_else(|| if smtp_ssl { 587u16 } else { 25u16 }); let smtp_username = get_env("SMTP_USERNAME"); let smtp_password = get_env("SMTP_PASSWORD").or_else(|| { @@ -319,8 +344,12 @@ impl Config { web_vault_enabled: get_env_or("WEB_VAULT_ENABLED", true), websocket_enabled: get_env_or("WEBSOCKET_ENABLED", false), - websocket_url: format!("{}:{}", get_env_or("WEBSOCKET_ADDRESS", "0.0.0.0".to_string()), get_env_or("WEBSOCKET_PORT", 3012)), - + websocket_url: format!( + "{}:{}", + get_env_or("WEBSOCKET_ADDRESS", "0.0.0.0".to_string()), + get_env_or("WEBSOCKET_PORT", 3012) + ), + extended_logging: get_env_or("EXTENDED_LOGGING", true), log_file: get_env("LOG_FILE"), diff --git a/src/util.rs b/src/util.rs index 2e01eb10..5d84914c 100644 --- a/src/util.rs +++ b/src/util.rs @@ -1,10 +1,10 @@ -/// -/// Web Headers -/// +// +// Web Headers +// use rocket::fairing::{Fairing, Info, Kind}; use rocket::{Request, Response}; -pub struct AppHeaders (); +pub struct AppHeaders(); impl Fairing for AppHeaders { fn info(&self) -> Info { @@ -29,10 +29,9 @@ impl Fairing for AppHeaders { } } - -/// -/// File handling -/// +// +// File handling +// use std::fs::{self, File}; use std::io::{Read, Result as IOResult}; use std::path::Path; @@ -43,7 +42,7 @@ pub fn file_exists(path: &str) -> bool { pub fn read_file(path: &str) -> IOResult> { let mut contents: Vec = Vec::new(); - + let mut file = File::open(Path::new(path))?; file.read_to_end(&mut contents)?; @@ -75,7 +74,7 @@ pub fn get_display_size(size: i32) -> String { } else { break; } - }; + } // Round to two decimals size = (size * 100.).round() / 100.; @@ -86,13 +85,12 @@ pub fn get_uuid() -> String { uuid::Uuid::new_v4().to_string() } +// +// String util methods +// -/// -/// String util methods -/// - -use std::str::FromStr; use std::ops::Try; +use std::str::FromStr; pub fn upcase_first(s: &str) -> String { let mut c = s.chars(); @@ -102,7 +100,11 @@ pub fn upcase_first(s: &str) -> String { } } -pub fn try_parse_string(string: impl Try) -> Option where S: AsRef, T: FromStr { +pub fn try_parse_string(string: impl Try) -> Option +where + S: AsRef, + T: FromStr, +{ if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::()) { Some(value) } else { @@ -110,7 +112,11 @@ pub fn try_parse_string(string: impl Try) -> Option } } -pub fn try_parse_string_or(string: impl Try, default: T) -> T where S: AsRef, T: FromStr { +pub fn try_parse_string_or(string: impl Try, default: T) -> T +where + S: AsRef, + T: FromStr, +{ if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::()) { value } else { @@ -118,24 +124,29 @@ pub fn try_parse_string_or(string: impl Try, default: } } - -/// -/// Env methods -/// +// +// Env methods +// use std::env; -pub fn get_env(key: &str) -> Option where V: FromStr { +pub fn get_env(key: &str) -> Option +where + V: FromStr, +{ try_parse_string(env::var(key)) } -pub fn get_env_or(key: &str, default: V) -> V where V: FromStr { +pub fn get_env_or(key: &str, default: V) -> V +where + V: FromStr, +{ try_parse_string_or(env::var(key), default) } -/// -/// Date util methods -/// +// +// Date util methods +// use chrono::NaiveDateTime; @@ -145,9 +156,9 @@ pub fn format_date(date: &NaiveDateTime) -> String { date.format(DATETIME_FORMAT).to_string() } -/// -/// Deserialization methods -/// +// +// Deserialization methods +// use std::fmt; @@ -163,10 +174,11 @@ pub struct UpCase { pub data: T, } -/// https://github.com/serde-rs/serde/issues/586 +// https://github.com/serde-rs/serde/issues/586 pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result - where T: DeserializeOwned, - D: Deserializer<'de> +where + T: DeserializeOwned, + D: Deserializer<'de>, { let d = deserializer.deserialize_any(UpCaseVisitor)?; T::deserialize(d).map_err(de::Error::custom) @@ -182,7 +194,8 @@ impl<'de> Visitor<'de> for UpCaseVisitor { } fn visit_map(self, mut map: A) -> Result - where A: MapAccess<'de> + where + A: MapAccess<'de>, { let mut result_map = JsonMap::new(); @@ -194,7 +207,9 @@ impl<'de> Visitor<'de> for UpCaseVisitor { } fn visit_seq(self, mut seq: A) -> Result - where A: SeqAccess<'de> { + where + A: SeqAccess<'de>, + { let mut result_seq = Vec::::new(); while let Some(value) = seq.next_element()? { @@ -208,13 +223,12 @@ impl<'de> Visitor<'de> for UpCaseVisitor { fn upcase_value(value: &Value) -> Value { if let Some(map) = value.as_object() { let mut new_value = json!({}); - + for (key, val) in map { let processed_key = _process_key(key); new_value[processed_key] = upcase_value(val); } new_value - } else if let Some(array) = value.as_array() { // Initialize array with null values let mut new_value = json!(vec![Value::Null; array.len()]); @@ -223,7 +237,6 @@ fn upcase_value(value: &Value) -> Value { new_value[index] = upcase_value(val); } new_value - } else { value.clone() }