Merge branch 'BlackDex-issue-3048'

pull/3065/head
Daniel García 2 years ago
commit 525979d5d9
No known key found for this signature in database
GPG Key ID: FC8A7D14C3CD543A

@ -205,7 +205,7 @@ pub struct CipherData {
*/ */
pub Type: i32, pub Type: i32,
pub Name: String, pub Name: String,
Notes: Option<String>, pub Notes: Option<String>,
Fields: Option<Value>, Fields: Option<Value>,
// Only one of these should exist, depending on type // Only one of these should exist, depending on type
@ -542,6 +542,12 @@ async fn post_ciphers_import(
let data: ImportData = data.into_inner().data; let data: ImportData = data.into_inner().data;
// Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
Cipher::validate_notes(&data.Ciphers)?;
// Read and create the folders // Read and create the folders
let mut folders: Vec<_> = Vec::new(); let mut folders: Vec<_> = Vec::new();
for folder in data.Folders.into_iter() { for folder in data.Folders.into_iter() {

@ -7,7 +7,7 @@ mod organizations;
mod sends; mod sends;
pub mod two_factor; pub mod two_factor;
pub use ciphers::{purge_trashed_ciphers, CipherSyncData, CipherSyncType}; pub use ciphers::{purge_trashed_ciphers, CipherData, CipherSyncData, CipherSyncType};
pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job}; pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
pub use events::{event_cleanup_job, log_event, log_user_event}; pub use events::{event_cleanup_job, log_event, log_user_event};
pub use sends::purge_sends; pub use sends::purge_sends;

@ -1378,6 +1378,12 @@ async fn post_org_import(
let data: ImportData = data.into_inner().data; let data: ImportData = data.into_inner().data;
let org_id = query.organization_id; let org_id = query.organization_id;
// Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
Cipher::validate_notes(&data.Ciphers)?;
let mut collections = Vec::new(); let mut collections = Vec::new();
for coll in data.Collections { for coll in data.Collections {
let collection = Collection::new(org_id.clone(), coll.Name); let collection = Collection::new(org_id.clone(), coll.Name);

@ -6,7 +6,7 @@ use super::{
Attachment, CollectionCipher, Favorite, FolderCipher, Group, User, UserOrgStatus, UserOrgType, UserOrganization, Attachment, CollectionCipher, Favorite, FolderCipher, Group, User, UserOrgStatus, UserOrgType, UserOrganization,
}; };
use crate::api::core::CipherSyncData; use crate::api::core::{CipherData, CipherSyncData};
use std::borrow::Cow; use std::borrow::Cow;
@ -73,6 +73,33 @@ impl Cipher {
reprompt: None, reprompt: None,
} }
} }
pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult {
let mut validation_errors = serde_json::Map::new();
for (index, cipher) in cipher_data.iter().enumerate() {
if let Some(note) = &cipher.Notes {
if note.len() > 10_000 {
validation_errors.insert(
format!("Ciphers[{index}].Notes"),
serde_json::to_value([
"The field Notes exceeds the maximum encrypted value length of 10000 characters.",
])
.unwrap(),
);
}
}
}
if !validation_errors.is_empty() {
let err_json = json!({
"message": "The model state is invalid.",
"validationErrors" : validation_errors,
"object": "error"
});
err_json!(err_json, "Import validation errors")
} else {
Ok(())
}
}
} }
use crate::db::DbConn; use crate::db::DbConn;

Loading…
Cancel
Save