Merge branch 'RealOrangeOne-fmt'

pull/1636/head^2
Daniel García 4 years ago
commit af2235bf88
No known key found for this signature in database
GPG Key ID: FC8A7D14C3CD543A

@ -89,7 +89,7 @@ jobs:
with: with:
profile: minimal profile: minimal
target: ${{ matrix.target-triple }} target: ${{ matrix.target-triple }}
components: clippy components: clippy, rustfmt
# End Uses the rust-toolchain file to determine version # End Uses the rust-toolchain file to determine version
@ -111,6 +111,15 @@ jobs:
# End Run cargo clippy # End Run cargo clippy
# Run cargo fmt
- name: '`cargo fmt`'
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
# End Run cargo fmt
# Build the binary # Build the binary
- name: '`cargo build --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`' - name: '`cargo build --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`'
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1

@ -1,5 +1,5 @@
use std::process::Command;
use std::env; use std::env;
use std::process::Command;
fn main() { fn main() {
// This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros // This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros
@ -11,7 +11,9 @@ fn main() {
println!("cargo:rustc-cfg=postgresql"); println!("cargo:rustc-cfg=postgresql");
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"); compile_error!(
"You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"
);
if let Ok(version) = env::var("BWRS_VERSION") { if let Ok(version) = env::var("BWRS_VERSION") {
println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=BWRS_VERSION={}", version);

@ -1,2 +1,7 @@
version = "Two" version = "Two"
edition = "2018"
max_width = 120 max_width = 120
newline_style = "Unix"
use_small_heuristics = "Off"
struct_lit_single_line = false
overflow_delimited_expr = true

@ -3,7 +3,6 @@ use serde::de::DeserializeOwned;
use serde_json::Value; use serde_json::Value;
use std::{env, time::Duration}; use std::{env, time::Duration};
use rocket::{ use rocket::{
http::{Cookie, Cookies, SameSite}, http::{Cookie, Cookies, SameSite},
request::{self, FlashMessage, Form, FromRequest, Outcome, Request}, request::{self, FlashMessage, Form, FromRequest, Outcome, Request},
@ -19,7 +18,7 @@ use crate::{
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType}, db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
error::{Error, MapResult}, error::{Error, MapResult},
mail, mail,
util::{format_naive_datetime_local, get_display_size, is_running_in_docker, get_reqwest_client}, util::{format_naive_datetime_local, get_display_size, get_reqwest_client, is_running_in_docker},
CONFIG, CONFIG,
}; };
@ -64,11 +63,8 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| {
.unwrap_or("Unknown") .unwrap_or("Unknown")
}); });
static CAN_BACKUP: Lazy<bool> = Lazy::new(|| { static CAN_BACKUP: Lazy<bool> =
DbConnType::from_url(&CONFIG.database_url()) Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false));
.map(|t| t == DbConnType::sqlite)
.unwrap_or(false)
});
#[get("/")] #[get("/")]
fn admin_disabled() -> &'static str { fn admin_disabled() -> &'static str {
@ -141,7 +137,12 @@ fn admin_url(referer: Referer) -> String {
fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> { fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> {
// If there is an error, show it // If there is an error, show it
let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg())); let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg()));
let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); let json = json!({
"page_content": "admin/login",
"version": VERSION,
"error": msg,
"urlpath": CONFIG.domain_path()
});
// Return the page // Return the page
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?; let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
@ -165,10 +166,7 @@ fn post_admin_login(
// If the token is invalid, redirect to login page // If the token is invalid, redirect to login page
if !_validate_token(&data.token) { if !_validate_token(&data.token) {
error!("Invalid admin token. IP: {}", ip.ip); error!("Invalid admin token. IP: {}", ip.ip);
Err(Flash::error( Err(Flash::error(Redirect::to(admin_url(referer)), "Invalid admin token, please try again."))
Redirect::to(admin_url(referer)),
"Invalid admin token, please try again.",
))
} else { } else {
// If the token received is valid, generate JWT and save it as a cookie // If the token received is valid, generate JWT and save it as a cookie
let claims = generate_admin_claims(); let claims = generate_admin_claims();
@ -328,7 +326,8 @@ fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> { fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
let users = User::get_all(&conn); let users = User::get_all(&conn);
let dt_fmt = "%Y-%m-%d %H:%M:%S %Z"; let dt_fmt = "%Y-%m-%d %H:%M:%S %Z";
let users_json: Vec<Value> = users.iter() let users_json: Vec<Value> = users
.iter()
.map(|u| { .map(|u| {
let mut usr = u.to_json(&conn); let mut usr = u.to_json(&conn);
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn)); usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn));
@ -338,7 +337,7 @@ fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt)); usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt));
usr["last_active"] = match u.last_active(&conn) { usr["last_active"] = match u.last_active(&conn) {
Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)), Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)),
None => json!("Never") None => json!("Never"),
}; };
usr usr
}) })
@ -423,7 +422,6 @@ fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, conn: D
user_to_edit.save(&conn) user_to_edit.save(&conn)
} }
#[post("/users/update_revision")] #[post("/users/update_revision")]
fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
User::update_all_revisions(&conn) User::update_all_revisions(&conn)
@ -432,7 +430,8 @@ fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
#[get("/organizations/overview")] #[get("/organizations/overview")]
fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> { fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
let organizations = Organization::get_all(&conn); let organizations = Organization::get_all(&conn);
let organizations_json: Vec<Value> = organizations.iter() let organizations_json: Vec<Value> = organizations
.iter()
.map(|o| { .map(|o| {
let mut org = o.to_json(); let mut org = o.to_json();
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn)); org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn));
@ -471,22 +470,13 @@ struct GitCommit {
fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> { fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
let github_api = get_reqwest_client(); let github_api = get_reqwest_client();
Ok(github_api Ok(github_api.get(url).timeout(Duration::from_secs(10)).send()?.error_for_status()?.json::<T>()?)
.get(url)
.timeout(Duration::from_secs(10))
.send()?
.error_for_status()?
.json::<T>()?)
} }
fn has_http_access() -> bool { fn has_http_access() -> bool {
let http_access = get_reqwest_client(); let http_access = get_reqwest_client();
match http_access match http_access.head("https://github.com/dani-garcia/bitwarden_rs").timeout(Duration::from_secs(10)).send() {
.head("https://github.com/dani-garcia/bitwarden_rs")
.timeout(Duration::from_secs(10))
.send()
{
Ok(r) => r.status().is_success(), Ok(r) => r.status().is_success(),
_ => false, _ => false,
} }
@ -499,15 +489,14 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
use std::net::ToSocketAddrs; use std::net::ToSocketAddrs;
// Get current running versions // Get current running versions
let web_vault_version: WebVaultVersion = match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) { let web_vault_version: WebVaultVersion =
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) {
Ok(s) => serde_json::from_str(&s)?, Ok(s) => serde_json::from_str(&s)?,
_ => { _ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
Ok(s) => serde_json::from_str(&s)?, Ok(s) => serde_json::from_str(&s)?,
_ => { _ => WebVaultVersion {
WebVaultVersion{version: String::from("Version file missing")} version: String::from("Version file missing"),
}, },
}
}, },
}; };
@ -529,7 +518,8 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
// TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already. // TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already.
let (latest_release, latest_commit, latest_web_build) = if has_http_access { let (latest_release, latest_commit, latest_web_build) = if has_http_access {
( (
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") { match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest")
{
Ok(r) => r.tag_name, Ok(r) => r.tag_name,
_ => "-".to_string(), _ => "-".to_string(),
}, },
@ -545,7 +535,9 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
if running_within_docker { if running_within_docker {
"-".to_string() "-".to_string()
} else { } else {
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") { match get_github_api::<GitRelease>(
"https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest",
) {
Ok(r) => r.tag_name.trim_start_matches('v').to_string(), Ok(r) => r.tag_name.trim_start_matches('v').to_string(),
_ => "-".to_string(), _ => "-".to_string(),
} }
@ -557,7 +549,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
let ip_header_name = match &ip_header.0 { let ip_header_name = match &ip_header.0 {
Some(h) => h, Some(h) => h,
_ => "" _ => "",
}; };
let diagnostics_json = json!({ let diagnostics_json = json!({

@ -320,15 +320,7 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt:
err!("The cipher is not owned by the user") err!("The cipher is not owned by the user")
} }
update_cipher_from_data( update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)?
&mut saved_cipher,
cipher_data,
&headers,
false,
&conn,
&nt,
UpdateType::CipherUpdate,
)?
} }
// Update user data // Update user data

@ -100,24 +100,18 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> {
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect(); let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn); let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
let collections_json: Vec<Value> = collections.iter() let collections_json: Vec<Value> =
.map(|c| c.to_json_details(&headers.user.uuid, &conn)) collections.iter().map(|c| c.to_json_details(&headers.user.uuid, &conn)).collect();
.collect();
let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn); let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn);
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect(); let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn); let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn);
let ciphers_json: Vec<Value> = ciphers let ciphers_json: Vec<Value> =
.iter() ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
let sends = Send::find_by_user(&headers.user.uuid, &conn); let sends = Send::find_by_user(&headers.user.uuid, &conn);
let sends_json: Vec<Value> = sends let sends_json: Vec<Value> = sends.iter().map(|s| s.to_json()).collect();
.iter()
.map(|s| s.to_json())
.collect();
let domains_json = if data.exclude_domains { let domains_json = if data.exclude_domains {
Value::Null Value::Null
@ -142,10 +136,8 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> {
fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> { fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> {
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn); let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn);
let ciphers_json: Vec<Value> = ciphers let ciphers_json: Vec<Value> =
.iter() ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
Json(json!({ Json(json!({
"Data": ciphers_json, "Data": ciphers_json,
@ -288,17 +280,12 @@ fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt
/// allowed to delete or share such ciphers to an org, however. /// allowed to delete or share such ciphers to an org, however.
/// ///
/// Ref: https://bitwarden.com/help/article/policies/#personal-ownership /// Ref: https://bitwarden.com/help/article/policies/#personal-ownership
fn enforce_personal_ownership_policy( fn enforce_personal_ownership_policy(data: &CipherData, headers: &Headers, conn: &DbConn) -> EmptyResult {
data: &CipherData,
headers: &Headers,
conn: &DbConn
) -> EmptyResult {
if data.OrganizationId.is_none() { if data.OrganizationId.is_none() {
let user_uuid = &headers.user.uuid; let user_uuid = &headers.user.uuid;
let policy_type = OrgPolicyType::PersonalOwnership; let policy_type = OrgPolicyType::PersonalOwnership;
if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) { if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) {
err!("Due to an Enterprise Policy, you are restricted from \ err!("Due to an Enterprise Policy, you are restricted from saving items to your personal vault.")
saving items to your personal vault.")
} }
} }
Ok(()) Ok(())
@ -317,11 +304,12 @@ pub fn update_cipher_from_data(
// Check that the client isn't updating an existing cipher with stale data. // Check that the client isn't updating an existing cipher with stale data.
if let Some(dt) = data.LastKnownRevisionDate { if let Some(dt) = data.LastKnownRevisionDate {
match NaiveDateTime::parse_from_str(&dt, "%+") { // ISO 8601 format match NaiveDateTime::parse_from_str(&dt, "%+") {
Err(err) => // ISO 8601 format
warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => {
err!("The client copy of this cipher is out of date. Resync the client and try again."), err!("The client copy of this cipher is out of date. Resync the client and try again.")
}
Ok(_) => (), Ok(_) => (),
} }
} }
@ -394,10 +382,7 @@ pub fn update_cipher_from_data(
// But, we at least know we do not need to store and return this specific key. // But, we at least know we do not need to store and return this specific key.
fn _clean_cipher_data(mut json_data: Value) -> Value { fn _clean_cipher_data(mut json_data: Value) -> Value {
if json_data.is_array() { if json_data.is_array() {
json_data.as_array_mut() json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| {
.unwrap()
.iter_mut()
.for_each(|ref mut f| {
f.as_object_mut().unwrap().remove("Response"); f.as_object_mut().unwrap().remove("Response");
}); });
}; };
@ -421,13 +406,13 @@ pub fn update_cipher_from_data(
data["Uris"] = _clean_cipher_data(data["Uris"].clone()); data["Uris"] = _clean_cipher_data(data["Uris"].clone());
} }
data data
}, }
None => err!("Data missing"), None => err!("Data missing"),
}; };
cipher.name = data.Name; cipher.name = data.Name;
cipher.notes = data.Notes; cipher.notes = data.Notes;
cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string() ); cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string());
cipher.data = type_data.to_string(); cipher.data = type_data.to_string();
cipher.password_history = data.PasswordHistory.map(|f| f.to_string()); cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
@ -602,11 +587,8 @@ fn post_collections_admin(
} }
let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect(); let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect();
let current_collections: HashSet<String> = cipher let current_collections: HashSet<String> =
.get_collections(&headers.user.uuid, &conn) cipher.get_collections(&headers.user.uuid, &conn).iter().cloned().collect();
.iter()
.cloned()
.collect();
for collection in posted_collections.symmetric_difference(&current_collections) { for collection in posted_collections.symmetric_difference(&current_collections) {
match Collection::find_by_uuid(&collection, &conn) { match Collection::find_by_uuid(&collection, &conn) {
@ -842,7 +824,8 @@ fn post_attachment(
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
let path = base_path.join(&file_name); let path = base_path.join(&file_name);
let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) { let size =
match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(SavedData::File(_, size)) => size as i32,
SaveResult::Full(other) => { SaveResult::Full(other) => {
std::fs::remove_file(path).ok(); std::fs::remove_file(path).ok();
@ -994,12 +977,22 @@ fn delete_cipher_selected_admin(data: JsonUpcase<Value>, headers: Headers, conn:
} }
#[post("/ciphers/delete-admin", data = "<data>")] #[post("/ciphers/delete-admin", data = "<data>")]
fn delete_cipher_selected_post_admin(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { fn delete_cipher_selected_post_admin(
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
delete_cipher_selected_post(data, headers, conn, nt) delete_cipher_selected_post(data, headers, conn, nt)
} }
#[put("/ciphers/delete-admin", data = "<data>")] #[put("/ciphers/delete-admin", data = "<data>")]
fn delete_cipher_selected_put_admin(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { fn delete_cipher_selected_put_admin(
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
delete_cipher_selected_put(data, headers, conn, nt) delete_cipher_selected_put(data, headers, conn, nt)
} }
@ -1150,7 +1143,13 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_del
Ok(()) Ok(())
} }
fn _delete_multiple_ciphers(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, soft_delete: bool, nt: Notify) -> EmptyResult { fn _delete_multiple_ciphers(
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
soft_delete: bool,
nt: Notify,
) -> EmptyResult {
let data: Value = data.into_inner().data; let data: Value = data.into_inner().data;
let uuids = match data.get("Ids") { let uuids = match data.get("Ids") {
@ -1202,7 +1201,7 @@ fn _restore_multiple_ciphers(data: JsonUpcase<Value>, headers: &Headers, conn: &
for uuid in uuids { for uuid in uuids {
match _restore_cipher_by_uuid(uuid, headers, conn, nt) { match _restore_cipher_by_uuid(uuid, headers, conn, nt) {
Ok(json) => ciphers.push(json.into_inner()), Ok(json) => ciphers.push(json.into_inner()),
err => return err err => return err,
} }
} }

@ -8,15 +8,7 @@ use crate::{
}; };
pub fn routes() -> Vec<rocket::Route> { pub fn routes() -> Vec<rocket::Route> {
routes![ routes![get_folders, get_folder, post_folders, post_folder, put_folder, delete_folder_post, delete_folder,]
get_folders,
get_folder,
post_folders,
post_folder,
put_folder,
delete_folder_post,
delete_folder,
]
} }
#[get("/folders")] #[get("/folders")]

@ -2,21 +2,15 @@ mod accounts;
mod ciphers; mod ciphers;
mod folders; mod folders;
mod organizations; mod organizations;
pub mod two_factor;
mod sends; mod sends;
pub mod two_factor;
pub use ciphers::purge_trashed_ciphers; pub use ciphers::purge_trashed_ciphers;
pub use sends::purge_sends; pub use sends::purge_sends;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
let mut mod_routes = routes![ let mut mod_routes =
clear_device_token, routes![clear_device_token, put_device_token, get_eq_domains, post_eq_domains, put_eq_domains, hibp_breach,];
put_device_token,
get_eq_domains,
post_eq_domains,
put_eq_domains,
hibp_breach,
];
let mut routes = Vec::new(); let mut routes = Vec::new();
routes.append(&mut accounts::routes()); routes.append(&mut accounts::routes());
@ -33,9 +27,9 @@ pub fn routes() -> Vec<Route> {
// //
// Move this somewhere else // Move this somewhere else
// //
use rocket::response::Response;
use rocket::Route; use rocket::Route;
use rocket_contrib::json::Json; use rocket_contrib::json::Json;
use rocket::response::Response;
use serde_json::Value; use serde_json::Value;
use crate::{ use crate::{
@ -156,10 +150,7 @@ fn hibp_breach(username: String) -> JsonResult {
if let Some(api_key) = crate::CONFIG.hibp_api_key() { if let Some(api_key) = crate::CONFIG.hibp_api_key() {
let hibp_client = get_reqwest_client(); let hibp_client = get_reqwest_client();
let res = hibp_client let res = hibp_client.get(&url).header("hibp-api-key", api_key).send()?;
.get(&url)
.header("hibp-api-key", api_key)
.send()?;
// If we get a 404, return a 404, it means no breached accounts // If we get a 404, return a 404, it means no breached accounts
if res.status() == 404 { if res.status() == 404 {

@ -5,7 +5,7 @@ use serde_json::Value;
use crate::{ use crate::{
api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType}, api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType},
auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders, ManagerHeaders, ManagerHeadersLoose}, auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
db::{models::*, DbConn}, db::{models::*, DbConn},
mail, CONFIG, mail, CONFIG,
}; };
@ -333,7 +333,12 @@ fn post_organization_collection_delete_user(
} }
#[delete("/organizations/<org_id>/collections/<col_id>")] #[delete("/organizations/<org_id>/collections/<col_id>")]
fn delete_organization_collection(org_id: String, col_id: String, _headers: ManagerHeaders, conn: DbConn) -> EmptyResult { fn delete_organization_collection(
org_id: String,
col_id: String,
_headers: ManagerHeaders,
conn: DbConn,
) -> EmptyResult {
match Collection::find_by_uuid(&col_id, &conn) { match Collection::find_by_uuid(&col_id, &conn) {
None => err!("Collection not found"), None => err!("Collection not found"),
Some(collection) => { Some(collection) => {
@ -426,9 +431,7 @@ fn put_collection_users(
continue; continue;
} }
CollectionUser::save(&user.user_uuid, &coll_id, CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn)?;
d.ReadOnly, d.HidePasswords,
&conn)?;
} }
Ok(()) Ok(())
@ -443,10 +446,8 @@ struct OrgIdData {
#[get("/ciphers/organization-details?<data..>")] #[get("/ciphers/organization-details?<data..>")]
fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> Json<Value> { fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> Json<Value> {
let ciphers = Cipher::find_by_org(&data.organization_id, &conn); let ciphers = Cipher::find_by_org(&data.organization_id, &conn);
let ciphers_json: Vec<Value> = ciphers let ciphers_json: Vec<Value> =
.iter() ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
Json(json!({ Json(json!({
"Data": ciphers_json, "Data": ciphers_json,
@ -544,9 +545,7 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
None => err!("Collection not found in Organization"), None => err!("Collection not found in Organization"),
Some(collection) => { Some(collection) => {
CollectionUser::save(&user.uuid, &collection.uuid, CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn)?;
col.ReadOnly, col.HidePasswords,
&conn)?;
} }
} }
} }
@ -801,9 +800,13 @@ fn edit_user(
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
None => err!("Collection not found in Organization"), None => err!("Collection not found in Organization"),
Some(collection) => { Some(collection) => {
CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid, CollectionUser::save(
col.ReadOnly, col.HidePasswords, &user_to_edit.user_uuid,
&conn)?; &collection.uuid,
col.ReadOnly,
col.HidePasswords,
&conn,
)?;
} }
} }
} }
@ -899,15 +902,7 @@ fn post_org_import(
.into_iter() .into_iter()
.map(|cipher_data| { .map(|cipher_data| {
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
update_cipher_from_data( update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherCreate)
&mut cipher,
cipher_data,
&headers,
false,
&conn,
&nt,
UpdateType::CipherCreate,
)
.ok(); .ok();
cipher cipher
}) })
@ -989,7 +984,13 @@ struct PolicyData {
} }
#[put("/organizations/<org_id>/policies/<pol_type>", data = "<data>")] #[put("/organizations/<org_id>/policies/<pol_type>", data = "<data>")]
fn put_policy(org_id: String, pol_type: i32, data: Json<PolicyData>, _headers: AdminHeaders, conn: DbConn) -> JsonResult { fn put_policy(
org_id: String,
pol_type: i32,
data: Json<PolicyData>,
_headers: AdminHeaders,
conn: DbConn,
) -> JsonResult {
let data: PolicyData = data.into_inner(); let data: PolicyData = data.into_inner();
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { let pol_type_enum = match OrgPolicyType::from_i32(pol_type) {
@ -1127,8 +1128,7 @@ fn import(org_id: String, data: JsonUpcase<OrgImportData>, headers: Headers, con
// If user is not part of the organization, but it exists // If user is not part of the organization, but it exists
} else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() { } else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() {
if let Some (user) = User::find_by_mail(&user_data.Email, &conn) { if let Some(user) = User::find_by_mail(&user_data.Email, &conn) {
let user_org_status = if CONFIG.mail_enabled() { let user_org_status = if CONFIG.mail_enabled() {
UserOrgStatus::Invited as i32 UserOrgStatus::Invited as i32
} else { } else {
@ -1164,7 +1164,7 @@ fn import(org_id: String, data: JsonUpcase<OrgImportData>, headers: Headers, con
// If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true) // If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true)
if data.OverwriteExisting { if data.OverwriteExisting {
for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn) { for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn) {
if let Some (user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) { if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) {
if !data.Users.iter().any(|u| u.Email == user_email) { if !data.Users.iter().any(|u| u.Email == user_email) {
user_org.delete(&conn)?; user_org.delete(&conn)?;
} }

@ -16,15 +16,7 @@ use crate::{
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available"; const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
pub fn routes() -> Vec<rocket::Route> { pub fn routes() -> Vec<rocket::Route> {
routes![ routes![post_send, post_send_file, post_access, post_access_file, put_send, delete_send, put_remove_password]
post_send,
post_send_file,
post_access,
post_access_file,
put_send,
delete_send,
put_remove_password
]
} }
pub fn purge_sends(pool: DbPool) { pub fn purge_sends(pool: DbPool) {
@ -171,13 +163,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn
None => err!("No model entry present"), None => err!("No model entry present"),
}; };
let size = match data_entry let size = match data_entry.data.save().memory_threshold(0).size_limit(size_limit).with_path(&file_path) {
.data
.save()
.memory_threshold(0)
.size_limit(size_limit)
.with_path(&file_path)
{
SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(SavedData::File(_, size)) => size as i32,
SaveResult::Full(other) => { SaveResult::Full(other) => {
std::fs::remove_file(&file_path).ok(); std::fs::remove_file(&file_path).ok();
@ -198,10 +184,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn
if let Some(o) = data_value.as_object_mut() { if let Some(o) = data_value.as_object_mut() {
o.insert(String::from("Id"), Value::String(file_id)); o.insert(String::from("Id"), Value::String(file_id));
o.insert(String::from("Size"), Value::Number(size.into())); o.insert(String::from("Size"), Value::Number(size.into()));
o.insert( o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size)));
String::from("SizeName"),
Value::String(crate::util::get_display_size(size)),
);
} }
send.data = serde_json::to_string(&data_value)?; send.data = serde_json::to_string(&data_value)?;

@ -17,11 +17,7 @@ use crate::{
pub use crate::config::CONFIG; pub use crate::config::CONFIG;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![ routes![generate_authenticator, activate_authenticator, activate_authenticator_put,]
generate_authenticator,
activate_authenticator,
activate_authenticator_put,
]
} }
#[post("/two-factor/get-authenticator", data = "<data>")] #[post("/two-factor/get-authenticator", data = "<data>")]
@ -141,7 +137,7 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl
// The amount of steps back and forward in time // The amount of steps back and forward in time
// Also check if we need to disable time drifted TOTP codes. // Also check if we need to disable time drifted TOTP codes.
// If that is the case, we set the steps to 0 so only the current TOTP is valid. // If that is the case, we set the steps to 0 so only the current TOTP is valid.
let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 }; let steps = !CONFIG.authenticator_disable_time_drift() as i64;
for step in -steps..=steps { for step in -steps..=steps {
let time_step = current_timestamp / 30i64 + step; let time_step = current_timestamp / 30i64 + step;
@ -163,22 +159,11 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl
twofactor.save(&conn)?; twofactor.save(&conn)?;
return Ok(()); return Ok(());
} else if generated == totp_code && time_step <= twofactor.last_used as i64 { } else if generated == totp_code && time_step <= twofactor.last_used as i64 {
warn!( warn!("This or a TOTP code within {} steps back and forward has already been used!", steps);
"This or a TOTP code within {} steps back and forward has already been used!", err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip));
steps
);
err!(format!(
"Invalid TOTP code! Server time: {} IP: {}",
current_time.format("%F %T UTC"),
ip.ip
));
} }
} }
// Else no valide code received, deny access // Else no valide code received, deny access
err!(format!( err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip));
"Invalid TOTP code! Server time: {} IP: {}",
current_time.format("%F %T UTC"),
ip.ip
));
} }

@ -60,7 +60,11 @@ impl DuoData {
ik.replace_range(digits.., replaced); ik.replace_range(digits.., replaced);
sk.replace_range(digits.., replaced); sk.replace_range(digits.., replaced);
Self { host, ik, sk } Self {
host,
ik,
sk,
}
} }
} }
@ -200,7 +204,8 @@ fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> Em
let client = get_reqwest_client(); let client = get_reqwest_client();
client.request(m, &url) client
.request(m, &url)
.basic_auth(username, Some(password)) .basic_auth(username, Some(password))
.header(header::USER_AGENT, "bitwarden_rs:Duo/1.0 (Rust)") .header(header::USER_AGENT, "bitwarden_rs:Duo/1.0 (Rust)")
.header(header::DATE, date) .header(header::DATE, date)

@ -125,11 +125,7 @@ fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbConn) -
let twofactor_data = EmailTokenData::new(data.Email, generated_token); let twofactor_data = EmailTokenData::new(data.Email, generated_token);
// Uses EmailVerificationChallenge as type to show that it's not verified yet. // Uses EmailVerificationChallenge as type to show that it's not verified yet.
let twofactor = TwoFactor::new( let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json());
user.uuid,
TwoFactorType::EmailVerificationChallenge,
twofactor_data.to_json(),
);
twofactor.save(&conn)?; twofactor.save(&conn)?;
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?;
@ -186,7 +182,8 @@ fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonRes
/// Validate the email code when used as TwoFactor token mechanism /// Validate the email code when used as TwoFactor token mechanism
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult { pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
let mut email_data = EmailTokenData::from_json(&data)?; let mut email_data = EmailTokenData::from_json(&data)?;
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn).map_res("Two factor not found")?; let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn)
.map_res("Two factor not found")?;
let issued_token = match &email_data.last_token { let issued_token = match &email_data.last_token {
Some(t) => t, Some(t) => t,
_ => err!("No token available"), _ => err!("No token available"),

@ -20,13 +20,7 @@ pub mod u2f;
pub mod yubikey; pub mod yubikey;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
let mut routes = routes![ let mut routes = routes![get_twofactor, get_recover, recover, disable_twofactor, disable_twofactor_put,];
get_twofactor,
get_recover,
recover,
disable_twofactor,
disable_twofactor_put,
];
routes.append(&mut authenticator::routes()); routes.append(&mut authenticator::routes());
routes.append(&mut duo::routes()); routes.append(&mut duo::routes());

@ -28,13 +28,7 @@ static APP_ID: Lazy<String> = Lazy::new(|| format!("{}/app-id.json", &CONFIG.dom
static U2F: Lazy<U2f> = Lazy::new(|| U2f::new(APP_ID.clone())); static U2F: Lazy<U2f> = Lazy::new(|| U2f::new(APP_ID.clone()));
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![ routes![generate_u2f, generate_u2f_challenge, activate_u2f, activate_u2f_put, delete_u2f,]
generate_u2f,
generate_u2f_challenge,
activate_u2f,
activate_u2f_put,
delete_u2f,
]
} }
#[post("/two-factor/get-u2f", data = "<data>")] #[post("/two-factor/get-u2f", data = "<data>")]
@ -161,10 +155,7 @@ fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn)
let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?; let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?;
let error_code = response let error_code = response.error_code.clone().map_or("0".into(), NumberOrString::into_string);
.error_code
.clone()
.map_or("0".into(), NumberOrString::into_string);
if error_code != "0" { if error_code != "0" {
err!("Error registering U2F token") err!("Error registering U2F token")
@ -300,20 +291,13 @@ fn _old_parse_registrations(registations: &str) -> Vec<Registration> {
let regs: Vec<Value> = serde_json::from_str(registations).expect("Can't parse Registration data"); let regs: Vec<Value> = serde_json::from_str(registations).expect("Can't parse Registration data");
regs.into_iter() regs.into_iter().map(|r| serde_json::from_value(r).unwrap()).map(|Helper(r)| r).collect()
.map(|r| serde_json::from_value(r).unwrap())
.map(|Helper(r)| r)
.collect()
} }
pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRequest> { pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRequest> {
let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn); let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn);
let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)? let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?.1.into_iter().map(|r| r.reg).collect();
.1
.into_iter()
.map(|r| r.reg)
.collect();
if registrations.is_empty() { if registrations.is_empty() {
err!("No U2F devices registered") err!("No U2F devices registered")

@ -12,7 +12,11 @@ use regex::Regex;
use reqwest::{blocking::Client, blocking::Response, header, Url}; use reqwest::{blocking::Client, blocking::Response, header, Url};
use rocket::{http::ContentType, http::Cookie, response::Content, Route}; use rocket::{http::ContentType, http::Cookie, response::Content, Route};
use crate::{error::Error, util::{Cached, get_reqwest_client_builder}, CONFIG}; use crate::{
error::Error,
util::{get_reqwest_client_builder, Cached},
CONFIG,
};
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![icon] routes![icon]
@ -25,7 +29,12 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8")); default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8"));
default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache")); default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache"));
default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache")); default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache"));
default_headers.insert(header::ACCEPT, header::HeaderValue::from_static("text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8")); default_headers.insert(
header::ACCEPT,
header::HeaderValue::from_static(
"text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8",
),
);
// Reuse the client between requests // Reuse the client between requests
get_reqwest_client_builder() get_reqwest_client_builder()
@ -49,13 +58,16 @@ fn icon(domain: String) -> Cached<Content<Vec<u8>>> {
if !is_valid_domain(&domain) { if !is_valid_domain(&domain) {
warn!("Invalid domain: {}", domain); warn!("Invalid domain: {}", domain);
return Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()); return Cached::ttl(
Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
CONFIG.icon_cache_negttl(),
);
} }
match get_icon(&domain) { match get_icon(&domain) {
Some((icon, icon_type)) => { Some((icon, icon_type)) => {
Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl()) Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl())
}, }
_ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()), _ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()),
} }
} }
@ -77,7 +89,10 @@ fn is_valid_domain(domain: &str) -> bool {
|| domain.starts_with('-') || domain.starts_with('-')
|| domain.ends_with('-') || domain.ends_with('-')
{ {
debug!("Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", domain); debug!(
"Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'",
domain
);
return false; return false;
} else if domain.len() > 255 { } else if domain.len() > 255 {
debug!("Domain validation error: '{}' exceeds 255 characters", domain); debug!("Domain validation error: '{}' exceeds 255 characters", domain);
@ -338,12 +353,20 @@ struct Icon {
impl Icon { impl Icon {
const fn new(priority: u8, href: String) -> Self { const fn new(priority: u8, href: String) -> Self {
Self { href, priority } Self {
href,
priority,
}
} }
} }
fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &Url) { fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &Url) {
if let markup5ever_rcdom::NodeData::Element { name, attrs, .. } = &node.data { if let markup5ever_rcdom::NodeData::Element {
name,
attrs,
..
} = &node.data
{
if name.local.as_ref() == "link" { if name.local.as_ref() == "link" {
let mut has_rel = false; let mut has_rel = false;
let mut href = None; let mut href = None;
@ -354,7 +377,8 @@ fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Ve
let attr_name = attr.name.local.as_ref(); let attr_name = attr.name.local.as_ref();
let attr_value = attr.value.as_ref(); let attr_value = attr.value.as_ref();
if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value) { if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value)
{
has_rel = true; has_rel = true;
} else if attr_name == "href" { } else if attr_name == "href" {
href = Some(attr_value); href = Some(attr_value);
@ -683,6 +707,6 @@ fn get_icon_type(bytes: &[u8]) -> Option<&'static str> {
[82, 73, 70, 70, ..] => Some("webp"), [82, 73, 70, 70, ..] => Some("webp"),
[255, 216, 255, ..] => Some("jpeg"), [255, 216, 255, ..] => Some("jpeg"),
[66, 77, ..] => Some("bmp"), [66, 77, ..] => Some("bmp"),
_ => None _ => None,
} }
} }

@ -88,34 +88,28 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
let username = data.username.as_ref().unwrap(); let username = data.username.as_ref().unwrap();
let user = match User::find_by_mail(username, &conn) { let user = match User::find_by_mail(username, &conn) {
Some(user) => user, Some(user) => user,
None => err!( None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)),
"Username or password is incorrect. Try again",
format!("IP: {}. Username: {}.", ip.ip, username)
),
}; };
// Check password // Check password
let password = data.password.as_ref().unwrap(); let password = data.password.as_ref().unwrap();
if !user.check_valid_password(password) { if !user.check_valid_password(password) {
err!( err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username))
"Username or password is incorrect. Try again",
format!("IP: {}. Username: {}.", ip.ip, username)
)
} }
// Check if the user is disabled // Check if the user is disabled
if !user.enabled { if !user.enabled {
err!( err!("This user has been disabled", format!("IP: {}. Username: {}.", ip.ip, username))
"This user has been disabled",
format!("IP: {}. Username: {}.", ip.ip, username)
)
} }
let now = Local::now(); let now = Local::now();
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() { if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
let now = now.naive_utc(); let now = now.naive_utc();
if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 { if user.last_verifying_at.is_none()
|| now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds()
> CONFIG.signups_verify_resend_time() as i64
{
let resend_limit = CONFIG.signups_verify_resend_limit() as i32; let resend_limit = CONFIG.signups_verify_resend_limit() as i32;
if resend_limit == 0 || user.login_verify_count < resend_limit { if resend_limit == 0 || user.login_verify_count < resend_limit {
// We want to send another email verification if we require signups to verify // We want to send another email verification if we require signups to verify
@ -135,10 +129,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
} }
// We still want the login to fail until they actually verified the email address // We still want the login to fail until they actually verified the email address
err!( err!("Please verify your email before trying again.", format!("IP: {}. Username: {}.", ip.ip, username))
"Please verify your email before trying again.",
format!("IP: {}. Username: {}.", ip.ip, username)
)
} }
let (mut device, new_device) = get_device(&data, &conn, &user); let (mut device, new_device) = get_device(&data, &conn, &user);
@ -236,9 +227,7 @@ fn twofactor_auth(
None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"), None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"),
}; };
let selected_twofactor = twofactors let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled);
.into_iter()
.find(|tf| tf.atype == selected_id && tf.enabled);
use crate::api::core::two_factor as _tf; use crate::api::core::two_factor as _tf;
use crate::crypto::ct_eq; use crate::crypto::ct_eq;
@ -247,18 +236,26 @@ fn twofactor_auth(
let mut remember = data.two_factor_remember.unwrap_or(0); let mut remember = data.two_factor_remember.unwrap_or(0);
match TwoFactorType::from_i32(selected_id) { match TwoFactorType::from_i32(selected_id) {
Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?, Some(TwoFactorType::Authenticator) => {
_tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?
}
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?, Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?, Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?, Some(TwoFactorType::Duo) => {
Some(TwoFactorType::Email) => _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?, _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?
}
Some(TwoFactorType::Email) => {
_tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?
}
Some(TwoFactorType::Remember) => { Some(TwoFactorType::Remember) => {
match device.twofactor_remember { match device.twofactor_remember {
Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => { Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => {
remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time
} }
_ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"), _ => {
err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided")
}
} }
} }
_ => err!("Invalid two factor provider"), _ => err!("Invalid two factor provider"),

@ -55,9 +55,9 @@ impl NumberOrString {
use std::num::ParseIntError as PIE; use std::num::ParseIntError as PIE;
match self { match self {
NumberOrString::Number(n) => Ok(n), NumberOrString::Number(n) => Ok(n),
NumberOrString::String(s) => s NumberOrString::String(s) => {
.parse() s.parse().map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string()))
.map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string())), }
} }
} }
} }

@ -4,12 +4,7 @@ use rocket::Route;
use rocket_contrib::json::Json; use rocket_contrib::json::Json;
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use crate::{ use crate::{api::EmptyResult, auth::Headers, db::DbConn, Error, CONFIG};
api::EmptyResult,
auth::Headers,
db::DbConn,
Error, CONFIG,
};
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![negotiate, websockets_err] routes![negotiate, websockets_err]
@ -19,12 +14,16 @@ static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true);
#[get("/hub")] #[get("/hub")]
fn websockets_err() -> EmptyResult { fn websockets_err() -> EmptyResult {
if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() { if CONFIG.websocket_enabled()
err!(" && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok()
{
err!(
"
########################################################### ###########################################################
'/notifications/hub' should be proxied to the websocket server or notifications won't work. '/notifications/hub' should be proxied to the websocket server or notifications won't work.
Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false. Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false.
###########################################################################################\n") ###########################################################################################\n"
)
} else { } else {
Err(Error::empty()) Err(Error::empty())
} }
@ -204,9 +203,7 @@ impl Handler for WsHandler {
let handler_insert = self.out.clone(); let handler_insert = self.out.clone();
let handler_update = self.out.clone(); let handler_update = self.out.clone();
self.users self.users.map.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update));
.map
.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update));
// Schedule a ping to keep the connection alive // Schedule a ping to keep the connection alive
self.out.timeout(PING_MS, PING) self.out.timeout(PING_MS, PING)
@ -216,7 +213,11 @@ impl Handler for WsHandler {
if let Message::Text(text) = msg.clone() { if let Message::Text(text) = msg.clone() {
let json = &text[..text.len() - 1]; // Remove last char let json = &text[..text.len() - 1]; // Remove last char
if let Ok(InitialMessage { protocol, version }) = from_str::<InitialMessage>(json) { if let Ok(InitialMessage {
protocol,
version,
}) = from_str::<InitialMessage>(json)
{
if &protocol == "messagepack" && version == 1 { if &protocol == "messagepack" && version == 1 {
return self.out.send(&INITIAL_RESPONSE[..]); // Respond to initial message return self.out.send(&INITIAL_RESPONSE[..]); // Respond to initial message
} }
@ -295,10 +296,7 @@ impl WebSocketUsers {
// NOTE: The last modified date needs to be updated before calling these methods // NOTE: The last modified date needs to be updated before calling these methods
pub fn send_user_update(&self, ut: UpdateType, user: &User) { pub fn send_user_update(&self, ut: UpdateType, user: &User) {
let data = create_update( let data = create_update(
vec![ vec![("UserId".into(), user.uuid.clone().into()), ("Date".into(), serialize_date(user.updated_at))],
("UserId".into(), user.uuid.clone().into()),
("Date".into(), serialize_date(user.updated_at)),
],
ut, ut,
); );

@ -83,11 +83,15 @@ fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))), "hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
"bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), "bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))),
"bootstrap-native.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))), "bootstrap-native.js" => {
Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js")))
}
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))), "identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
"datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), "datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))),
"datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), "datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))),
"jquery-3.5.1.slim.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))), "jquery-3.5.1.slim.js" => {
Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js")))
}
_ => err!(format!("Static file not found: {}", filename)), _ => err!(format!("Static file not found: {}", filename)),
} }
} }

@ -223,10 +223,9 @@ use crate::db::{
}; };
pub struct Host { pub struct Host {
pub host: String pub host: String,
} }
impl<'a, 'r> FromRequest<'a, 'r> for Host { impl<'a, 'r> FromRequest<'a, 'r> for Host {
type Error = &'static str; type Error = &'static str;
@ -261,7 +260,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for Host {
format!("{}://{}", protocol, host) format!("{}://{}", protocol, host)
}; };
Outcome::Success(Host { host }) Outcome::Success(Host {
host,
})
} }
} }
@ -317,10 +318,8 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers {
}; };
if user.security_stamp != claims.sstamp { if user.security_stamp != claims.sstamp {
if let Some(stamp_exception) = user if let Some(stamp_exception) =
.stamp_exception user.stamp_exception.as_deref().and_then(|s| serde_json::from_str::<UserStampException>(s).ok())
.as_deref()
.and_then(|s| serde_json::from_str::<UserStampException>(s).ok())
{ {
let current_route = match request.route().and_then(|r| r.name) { let current_route = match request.route().and_then(|r| r.name) {
Some(name) => name, Some(name) => name,
@ -338,7 +337,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers {
} }
} }
Outcome::Success(Headers { host, device, user }) Outcome::Success(Headers {
host,
device,
user,
})
} }
} }
@ -506,7 +509,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for ManagerHeaders {
}; };
if !headers.org_user.has_full_access() { if !headers.org_user.has_full_access() {
match CollectionUser::find_by_collection_and_user(&col_id, &headers.org_user.user_uuid, &conn) { match CollectionUser::find_by_collection_and_user(
&col_id,
&headers.org_user.user_uuid,
&conn,
) {
Some(_) => (), Some(_) => (),
None => err_handler!("The current user isn't a manager for this collection"), None => err_handler!("The current user isn't a manager for this collection"),
} }
@ -636,10 +643,10 @@ impl<'a, 'r> FromRequest<'a, 'r> for ClientIp {
None None
}; };
let ip = ip let ip = ip.or_else(|| req.remote().map(|r| r.ip())).unwrap_or_else(|| "0.0.0.0".parse().unwrap());
.or_else(|| req.remote().map(|r| r.ip()))
.unwrap_or_else(|| "0.0.0.0".parse().unwrap());
Outcome::Success(ClientIp { ip }) Outcome::Success(ClientIp {
ip,
})
} }
} }

@ -527,10 +527,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
let limit = 256; let limit = 256;
if cfg.database_max_conns < 1 || cfg.database_max_conns > limit { if cfg.database_max_conns < 1 || cfg.database_max_conns > limit {
err!(format!( err!(format!("`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.", limit,));
"`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.",
limit,
));
} }
let dom = cfg.domain.to_lowercase(); let dom = cfg.domain.to_lowercase();
@ -871,9 +868,7 @@ fn case_helper<'reg, 'rc>(
rc: &mut RenderContext<'reg, 'rc>, rc: &mut RenderContext<'reg, 'rc>,
out: &mut dyn Output, out: &mut dyn Output,
) -> HelperResult { ) -> HelperResult {
let param = h let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
.param(0)
.ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
let value = param.value().clone(); let value = param.value().clone();
if h.params().iter().skip(1).any(|x| x.value() == &value) { if h.params().iter().skip(1).any(|x| x.value() == &value) {
@ -890,21 +885,15 @@ fn js_escape_helper<'reg, 'rc>(
_rc: &mut RenderContext<'reg, 'rc>, _rc: &mut RenderContext<'reg, 'rc>,
out: &mut dyn Output, out: &mut dyn Output,
) -> HelperResult { ) -> HelperResult {
let param = h let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
.param(0)
.ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
let no_quote = h let no_quote = h.param(1).is_some();
.param(1)
.is_some();
let value = param let value =
.value() param.value().as_str().ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?;
.as_str()
.ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?;
let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27"); let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27");
if ! no_quote { if !no_quote {
escaped_value = format!("&quot;{}&quot;", escaped_value); escaped_value = format!("&quot;{}&quot;", escaped_value);
} }

@ -47,9 +47,7 @@ pub fn get_random_64() -> Vec<u8> {
pub fn get_random(mut array: Vec<u8>) -> Vec<u8> { pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
use ring::rand::{SecureRandom, SystemRandom}; use ring::rand::{SecureRandom, SystemRandom};
SystemRandom::new() SystemRandom::new().fill(&mut array).expect("Error generating random values");
.fill(&mut array)
.expect("Error generating random values");
array array
} }

@ -23,7 +23,6 @@ pub mod __mysql_schema;
#[path = "schemas/postgresql/schema.rs"] #[path = "schemas/postgresql/schema.rs"]
pub mod __postgresql_schema; pub mod __postgresql_schema;
// This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported // This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported
macro_rules! generate_connections { macro_rules! generate_connections {
( $( $name:ident: $ty:ty ),+ ) => { ( $( $name:ident: $ty:ty ),+ ) => {
@ -108,7 +107,6 @@ impl DbConnType {
} }
} }
#[macro_export] #[macro_export]
macro_rules! db_run { macro_rules! db_run {
// Same for all dbs // Same for all dbs
@ -154,7 +152,6 @@ macro_rules! db_run {
}; };
} }
pub trait FromDb { pub trait FromDb {
type Output; type Output;
#[allow(clippy::wrong_self_convention)] #[allow(clippy::wrong_self_convention)]
@ -239,7 +236,6 @@ pub fn backup_database(conn: &DbConn) -> Result<(), Error> {
Ok(()) Ok(())
} }
/// Get the SQL Server version /// Get the SQL Server version
pub fn get_sql_server_version(conn: &DbConn) -> String { pub fn get_sql_server_version(conn: &DbConn) -> String {
db_run! {@raw conn: db_run! {@raw conn:
@ -292,8 +288,7 @@ mod sqlite_migrations {
use diesel::{Connection, RunQueryDsl}; use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection = let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?;
diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration // Disable Foreign Key Checks during migration
// Scoped to a connection. // Scoped to a connection.
@ -303,9 +298,7 @@ mod sqlite_migrations {
// Turn on WAL in SQLite // Turn on WAL in SQLite
if crate::CONFIG.enable_db_wal() { if crate::CONFIG.enable_db_wal() {
diesel::sql_query("PRAGMA journal_mode=wal") diesel::sql_query("PRAGMA journal_mode=wal").execute(&connection).expect("Failed to turn on WAL");
.execute(&connection)
.expect("Failed to turn on WAL");
} }
embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?; embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?;
@ -321,8 +314,7 @@ mod mysql_migrations {
pub fn run_migrations() -> Result<(), super::Error> { pub fn run_migrations() -> Result<(), super::Error> {
use diesel::{Connection, RunQueryDsl}; use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection = let connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?;
diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration // Disable Foreign Key Checks during migration
// Scoped to a connection/session. // Scoped to a connection/session.
@ -343,8 +335,7 @@ mod postgresql_migrations {
pub fn run_migrations() -> Result<(), super::Error> { pub fn run_migrations() -> Result<(), super::Error> {
use diesel::{Connection, RunQueryDsl}; use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection = let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?;
diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration // Disable Foreign Key Checks during migration
// FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html, // FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html,

@ -59,7 +59,6 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl Attachment { impl Attachment {
pub fn save(&self, conn: &DbConn) -> EmptyResult { pub fn save(&self, conn: &DbConn) -> EmptyResult {
db_run! { conn: db_run! { conn:
sqlite, mysql { sqlite, mysql {

@ -4,14 +4,7 @@ use serde_json::Value;
use crate::CONFIG; use crate::CONFIG;
use super::{ use super::{
Attachment, Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType,
CollectionCipher,
Favorite,
FolderCipher,
Organization,
User,
UserOrgStatus,
UserOrgType,
UserOrganization, UserOrganization,
}; };
@ -93,15 +86,15 @@ impl Cipher {
}; };
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); let password_history_json =
self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
let (read_only, hide_passwords) = let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) {
match self.get_access_restrictions(&user_uuid, conn) {
Some((ro, hp)) => (ro, hp), Some((ro, hp)) => (ro, hp),
None => { None => {
error!("Cipher ownership assertion failure"); error!("Cipher ownership assertion failure");
(true, true) (true, true)
}, }
}; };
// Get the type_data or a default to an empty json object '{}'. // Get the type_data or a default to an empty json object '{}'.
@ -197,9 +190,7 @@ impl Cipher {
None => { None => {
// Belongs to Organization, need to update affected users // Belongs to Organization, need to update affected users
if let Some(ref org_uuid) = self.organization_uuid { if let Some(ref org_uuid) = self.organization_uuid {
UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn) UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn).iter().for_each(|user_org| {
.iter()
.for_each(|user_org| {
User::update_uuid_revision(&user_org.user_uuid, conn); User::update_uuid_revision(&user_org.user_uuid, conn);
user_uuids.push(user_org.user_uuid.clone()) user_uuids.push(user_org.user_uuid.clone())
}); });

@ -1,6 +1,6 @@
use serde_json::Value; use serde_json::Value;
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization, User, Cipher}; use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
@ -127,9 +127,7 @@ impl Collection {
} }
pub fn update_users_revision(&self, conn: &DbConn) { pub fn update_users_revision(&self, conn: &DbConn) {
UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn) UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).iter().for_each(|user_org| {
.iter()
.for_each(|user_org| {
User::update_uuid_revision(&user_org.user_uuid, conn); User::update_uuid_revision(&user_org.user_uuid, conn);
}); });
} }
@ -170,10 +168,7 @@ impl Collection {
} }
pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> {
Self::find_by_user_uuid(user_uuid, conn) Self::find_by_user_uuid(user_uuid, conn).into_iter().filter(|c| c.org_uuid == org_uuid).collect()
.into_iter()
.filter(|c| c.org_uuid == org_uuid)
.collect()
} }
pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
@ -284,7 +279,13 @@ impl CollectionUser {
}} }}
} }
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult { pub fn save(
user_uuid: &str,
collection_uuid: &str,
read_only: bool,
hide_passwords: bool,
conn: &DbConn,
) -> EmptyResult {
User::update_uuid_revision(&user_uuid, conn); User::update_uuid_revision(&user_uuid, conn);
db_run! { conn: db_run! { conn:
@ -374,9 +375,7 @@ impl CollectionUser {
} }
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
CollectionUser::find_by_collection(&collection_uuid, conn) CollectionUser::find_by_collection(&collection_uuid, conn).iter().for_each(|collection| {
.iter()
.for_each(|collection| {
User::update_uuid_revision(&collection.user_uuid, conn); User::update_uuid_revision(&collection.user_uuid, conn);
}); });

@ -20,7 +20,7 @@ use crate::error::MapResult;
impl Favorite { impl Favorite {
// Returns whether the specified cipher is a favorite of the specified user. // Returns whether the specified cipher is a favorite of the specified user.
pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool { pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool {
db_run!{ conn: { db_run! { conn: {
let query = favorites::table let query = favorites::table
.filter(favorites::cipher_uuid.eq(cipher_uuid)) .filter(favorites::cipher_uuid.eq(cipher_uuid))
.filter(favorites::user_uuid.eq(user_uuid)) .filter(favorites::user_uuid.eq(user_uuid))
@ -36,7 +36,7 @@ impl Favorite {
match (old, new) { match (old, new) {
(false, true) => { (false, true) => {
User::update_uuid_revision(user_uuid, &conn); User::update_uuid_revision(user_uuid, &conn);
db_run!{ conn: { db_run! { conn: {
diesel::insert_into(favorites::table) diesel::insert_into(favorites::table)
.values(( .values((
favorites::user_uuid.eq(user_uuid), favorites::user_uuid.eq(user_uuid),
@ -48,7 +48,7 @@ impl Favorite {
} }
(true, false) => { (true, false) => {
User::update_uuid_revision(user_uuid, &conn); User::update_uuid_revision(user_uuid, &conn);
db_run!{ conn: { db_run! { conn: {
diesel::delete( diesel::delete(
favorites::table favorites::table
.filter(favorites::user_uuid.eq(user_uuid)) .filter(favorites::user_uuid.eq(user_uuid))
@ -59,7 +59,7 @@ impl Favorite {
}} }}
} }
// Otherwise, the favorite status is already what it should be. // Otherwise, the favorite status is already what it should be.
_ => Ok(()) _ => Ok(()),
} }
} }

@ -109,7 +109,6 @@ impl Folder {
User::update_uuid_revision(&self.user_uuid, conn); User::update_uuid_revision(&self.user_uuid, conn);
FolderCipher::delete_all_by_folder(&self.uuid, &conn)?; FolderCipher::delete_all_by_folder(&self.uuid, &conn)?;
db_run! { conn: { db_run! { conn: {
diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid))) diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid)))
.execute(conn) .execute(conn)

@ -6,9 +6,9 @@ mod favorite;
mod folder; mod folder;
mod org_policy; mod org_policy;
mod organization; mod organization;
mod send;
mod two_factor; mod two_factor;
mod user; mod user;
mod send;
pub use self::attachment::Attachment; pub use self::attachment::Attachment;
pub use self::cipher::Cipher; pub use self::cipher::Cipher;
@ -18,6 +18,6 @@ pub use self::favorite::Favorite;
pub use self::folder::{Folder, FolderCipher}; pub use self::folder::{Folder, FolderCipher};
pub use self::org_policy::{OrgPolicy, OrgPolicyType}; pub use self::org_policy::{OrgPolicy, OrgPolicyType};
pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
pub use self::send::{Send, SendType};
pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::two_factor::{TwoFactor, TwoFactorType};
pub use self::user::{Invitation, User, UserStampException}; pub use self::user::{Invitation, User, UserStampException};
pub use self::send::{Send, SendType};

@ -4,7 +4,7 @@ use crate::api::EmptyResult;
use crate::db::DbConn; use crate::db::DbConn;
use crate::error::MapResult; use crate::error::MapResult;
use super::{Organization, UserOrganization, UserOrgStatus, UserOrgType}; use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
@ -20,8 +20,7 @@ db_object! {
} }
} }
#[derive(Copy, Clone)] #[derive(Copy, Clone, num_derive::FromPrimitive)]
#[derive(num_derive::FromPrimitive)]
pub enum OrgPolicyType { pub enum OrgPolicyType {
TwoFactorAuthentication = 0, TwoFactorAuthentication = 0,
MasterPassword = 1, MasterPassword = 1,
@ -175,7 +174,8 @@ impl OrgPolicy {
/// and the user is not an owner or admin of that org. This is only useful for checking /// and the user is not an owner or admin of that org. This is only useful for checking
/// applicability of policy types that have these particular semantics. /// applicability of policy types that have these particular semantics.
pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool { pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool {
for policy in OrgPolicy::find_by_user(user_uuid, conn) { // Returns confirmed users only. // Returns confirmed users only.
for policy in OrgPolicy::find_by_user(user_uuid, conn) {
if policy.enabled && policy.has_type(policy_type) { if policy.enabled && policy.has_type(policy_type) {
let org_uuid = &policy.org_uuid; let org_uuid = &policy.org_uuid;
if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) { if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) {

@ -1,8 +1,8 @@
use num_traits::FromPrimitive;
use serde_json::Value; use serde_json::Value;
use std::cmp::Ordering; use std::cmp::Ordering;
use num_traits::FromPrimitive;
use super::{CollectionUser, User, OrgPolicy}; use super::{CollectionUser, OrgPolicy, User};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
@ -35,8 +35,7 @@ pub enum UserOrgStatus {
Confirmed = 2, Confirmed = 2,
} }
#[derive(Copy, Clone, PartialEq, Eq)] #[derive(Copy, Clone, PartialEq, Eq, num_derive::FromPrimitive)]
#[derive(num_derive::FromPrimitive)]
pub enum UserOrgType { pub enum UserOrgType {
Owner = 0, Owner = 0,
Admin = 1, Admin = 1,
@ -190,9 +189,7 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl Organization { impl Organization {
pub fn save(&self, conn: &DbConn) -> EmptyResult { pub fn save(&self, conn: &DbConn) -> EmptyResult {
UserOrganization::find_by_org(&self.uuid, conn) UserOrganization::find_by_org(&self.uuid, conn).iter().for_each(|user_org| {
.iter()
.for_each(|user_org| {
User::update_uuid_revision(&user_org.user_uuid, conn); User::update_uuid_revision(&user_org.user_uuid, conn);
}); });
@ -236,7 +233,6 @@ impl Organization {
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?; UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?; OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?;
db_run! { conn: { db_run! { conn: {
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid))) diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
.execute(conn) .execute(conn)
@ -347,11 +343,13 @@ impl UserOrganization {
let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn);
collections collections
.iter() .iter()
.map(|c| json!({ .map(|c| {
json!({
"Id": c.collection_uuid, "Id": c.collection_uuid,
"ReadOnly": c.read_only, "ReadOnly": c.read_only,
"HidePasswords": c.hide_passwords, "HidePasswords": c.hide_passwords,
})) })
})
.collect() .collect()
}; };
@ -446,8 +444,7 @@ impl UserOrganization {
} }
pub fn has_full_access(&self) -> bool { pub fn has_full_access(&self) -> bool {
(self.access_all || self.atype >= UserOrgType::Admin) && (self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed)
self.has_status(UserOrgStatus::Confirmed)
} }
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {

@ -64,7 +64,7 @@ enum UserStatus {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct UserStampException { pub struct UserStampException {
pub route: String, pub route: String,
pub security_stamp: String pub security_stamp: String,
} }
/// Local methods /// Local methods
@ -162,7 +162,7 @@ impl User {
pub fn set_stamp_exception(&mut self, route_exception: &str) { pub fn set_stamp_exception(&mut self, route_exception: &str) {
let stamp_exception = UserStampException { let stamp_exception = UserStampException {
route: route_exception.to_string(), route: route_exception.to_string(),
security_stamp: self.security_stamp.to_string() security_stamp: self.security_stamp.to_string(),
}; };
self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default()); self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default());
} }
@ -341,14 +341,16 @@ impl User {
pub fn last_active(&self, conn: &DbConn) -> Option<NaiveDateTime> { pub fn last_active(&self, conn: &DbConn) -> Option<NaiveDateTime> {
match Device::find_latest_active_by_user(&self.uuid, conn) { match Device::find_latest_active_by_user(&self.uuid, conn) {
Some(device) => Some(device.updated_at), Some(device) => Some(device.updated_at),
None => None None => None,
} }
} }
} }
impl Invitation { impl Invitation {
pub const fn new(email: String) -> Self { pub const fn new(email: String) -> Self {
Self { email } Self {
email,
}
} }
pub fn save(&self, conn: &DbConn) -> EmptyResult { pub fn save(&self, conn: &DbConn) -> EmptyResult {

@ -33,10 +33,10 @@ macro_rules! make_error {
}; };
} }
use diesel::r2d2::PoolError as R2d2Err;
use diesel::result::Error as DieselErr; use diesel::result::Error as DieselErr;
use diesel::ConnectionError as DieselConErr; use diesel::ConnectionError as DieselConErr;
use diesel_migrations::RunMigrationsError as DieselMigErr; use diesel_migrations::RunMigrationsError as DieselMigErr;
use diesel::r2d2::PoolError as R2d2Err;
use handlebars::RenderError as HbErr; use handlebars::RenderError as HbErr;
use jsonwebtoken::errors::Error as JwtErr; use jsonwebtoken::errors::Error as JwtErr;
use regex::Error as RegexErr; use regex::Error as RegexErr;
@ -198,11 +198,7 @@ impl<'r> Responder<'r> for Error {
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest); let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
Response::build() Response::build().status(code).header(ContentType::JSON).sized_body(Cursor::new(format!("{}", self))).ok()
.status(code)
.header(ContentType::JSON)
.sized_body(Cursor::new(format!("{}", self)))
.ok()
} }
} }

@ -1,4 +1,4 @@
use std::{str::FromStr}; use std::str::FromStr;
use chrono::{DateTime, Local}; use chrono::{DateTime, Local};
use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
@ -62,11 +62,13 @@ fn mailer() -> SmtpTransport {
let mut selected_mechanisms = vec![]; let mut selected_mechanisms = vec![];
for wanted_mechanism in mechanism.split(',') { for wanted_mechanism in mechanism.split(',') {
for m in &allowed_mechanisms { for m in &allowed_mechanisms {
if m.to_string().to_lowercase() == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() { if m.to_string().to_lowercase()
== wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase()
{
selected_mechanisms.push(*m); selected_mechanisms.push(*m);
} }
} }
}; }
if !selected_mechanisms.is_empty() { if !selected_mechanisms.is_empty() {
smtp_client.authentication(selected_mechanisms) smtp_client.authentication(selected_mechanisms)
@ -316,24 +318,16 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String
let smtp_from = &CONFIG.smtp_from(); let smtp_from = &CONFIG.smtp_from();
let email = Message::builder() let email = Message::builder()
.message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::<Vec<&str>>()[1] ))) .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::<Vec<&str>>()[1])))
.to(Mailbox::new(None, Address::from_str(&address)?)) .to(Mailbox::new(None, Address::from_str(&address)?))
.from(Mailbox::new( .from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?))
Some(CONFIG.smtp_from_name()),
Address::from_str(smtp_from)?,
))
.subject(subject) .subject(subject)
.multipart( .multipart(MultiPart::alternative().singlepart(text).singlepart(html))?;
MultiPart::alternative()
.singlepart(text)
.singlepart(html)
)?;
match mailer().send(&email) { match mailer().send(&email) {
Ok(_) => Ok(()), Ok(_) => Ok(()),
// Match some common errors and make them more user friendly // Match some common errors and make them more user friendly
Err(e) => { Err(e) => {
if e.is_client() { if e.is_client() {
err!(format!("SMTP Client error: {}", e)); err!(format!("SMTP Client error: {}", e));
} else if e.is_transient() { } else if e.is_transient() {

@ -16,7 +16,7 @@ extern crate diesel;
#[macro_use] #[macro_use]
extern crate diesel_migrations; extern crate diesel_migrations;
use job_scheduler::{JobScheduler, Job}; use job_scheduler::{Job, JobScheduler};
use std::{ use std::{
fs::create_dir_all, fs::create_dir_all,
panic, panic,
@ -127,7 +127,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
// Enable smtp debug logging only specifically for smtp when need. // Enable smtp debug logging only specifically for smtp when need.
// This can contain sensitive information we do not want in the default debug/trace logging. // This can contain sensitive information we do not want in the default debug/trace logging.
if CONFIG.smtp_debug() { if CONFIG.smtp_debug() {
println!("[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!"); println!(
"[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!"
);
println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n"); println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n");
logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug) logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug)
} else { } else {
@ -298,7 +300,10 @@ fn check_web_vault() {
let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html"); let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html");
if !index_path.exists() { if !index_path.exists() {
error!("Web vault is not found at '{}'. To install it, please follow the steps in: ", CONFIG.web_vault_folder()); error!(
"Web vault is not found at '{}'. To install it, please follow the steps in: ",
CONFIG.web_vault_folder()
);
error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault"); error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it"); error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it");
exit(1); exit(1);
@ -344,7 +349,9 @@ fn schedule_jobs(pool: db::DbPool) {
info!("Job scheduler disabled."); info!("Job scheduler disabled.");
return; return;
} }
thread::Builder::new().name("job-scheduler".to_string()).spawn(move || { thread::Builder::new()
.name("job-scheduler".to_string())
.spawn(move || {
let mut sched = JobScheduler::new(); let mut sched = JobScheduler::new();
// Purge sends that are past their deletion date. // Purge sends that are past their deletion date.
@ -370,5 +377,6 @@ fn schedule_jobs(pool: db::DbPool) {
sched.tick(); sched.tick();
thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms())); thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms()));
} }
}).expect("Error spawning job scheduler thread"); })
.expect("Error spawning job scheduler thread");
} }

@ -28,7 +28,10 @@ impl Fairing for AppHeaders {
res.set_raw_header("X-Frame-Options", "SAMEORIGIN"); res.set_raw_header("X-Frame-Options", "SAMEORIGIN");
res.set_raw_header("X-Content-Type-Options", "nosniff"); res.set_raw_header("X-Content-Type-Options", "nosniff");
res.set_raw_header("X-XSS-Protection", "1; mode=block"); res.set_raw_header("X-XSS-Protection", "1; mode=block");
let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors()); let csp = format!(
"frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};",
CONFIG.allowed_iframe_ancestors()
);
res.set_raw_header("Content-Security-Policy", csp); res.set_raw_header("Content-Security-Policy", csp);
// Disable cache unless otherwise specified // Disable cache unless otherwise specified
@ -124,14 +127,8 @@ impl<'r, R: Responder<'r>> Responder<'r> for Cached<R> {
// Log all the routes from the main paths list, and the attachments endpoint // Log all the routes from the main paths list, and the attachments endpoint
// Effectively ignores, any static file route, and the alive endpoint // Effectively ignores, any static file route, and the alive endpoint
const LOGGED_ROUTES: [&str; 6] = [ const LOGGED_ROUTES: [&str; 6] =
"/api", ["/api", "/admin", "/identity", "/icons", "/notifications/hub/negotiate", "/attachments"];
"/admin",
"/identity",
"/icons",
"/notifications/hub/negotiate",
"/attachments",
];
// Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts // Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts
pub struct BetterLogging(pub bool); pub struct BetterLogging(pub bool);
@ -158,7 +155,11 @@ impl Fairing for BetterLogging {
} }
let config = rocket.config(); let config = rocket.config();
let scheme = if config.tls_enabled() { "https" } else { "http" }; let scheme = if config.tls_enabled() {
"https"
} else {
"http"
};
let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port); let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port);
info!(target: "start", "Rocket has launched from {}", addr); info!(target: "start", "Rocket has launched from {}", addr);
} }
@ -293,8 +294,7 @@ where
use std::env; use std::env;
pub fn get_env_str_value(key: &str) -> Option<String> pub fn get_env_str_value(key: &str) -> Option<String> {
{
let key_file = format!("{}_FILE", key); let key_file = format!("{}_FILE", key);
let value_from_env = env::var(key); let value_from_env = env::var(key);
let value_file = env::var(&key_file); let value_file = env::var(&key_file);
@ -304,9 +304,9 @@ pub fn get_env_str_value(key: &str) -> Option<String>
(Ok(v_env), Err(_)) => Some(v_env), (Ok(v_env), Err(_)) => Some(v_env),
(Err(_), Ok(v_file)) => match fs::read_to_string(v_file) { (Err(_), Ok(v_file)) => match fs::read_to_string(v_file) {
Ok(content) => Some(content.trim().to_string()), Ok(content) => Some(content.trim().to_string()),
Err(e) => panic!("Failed to load {}: {:?}", key, e) Err(e) => panic!("Failed to load {}: {:?}", key, e),
}, },
_ => None _ => None,
} }
} }
@ -523,7 +523,10 @@ where
} }
} }
use reqwest::{blocking::{Client, ClientBuilder}, header}; use reqwest::{
blocking::{Client, ClientBuilder},
header,
};
pub fn get_reqwest_client() -> Client { pub fn get_reqwest_client() -> Client {
get_reqwest_client_builder().build().expect("Failed to build client") get_reqwest_client_builder().build().expect("Failed to build client")
@ -532,8 +535,5 @@ pub fn get_reqwest_client() -> Client {
pub fn get_reqwest_client_builder() -> ClientBuilder { pub fn get_reqwest_client_builder() -> ClientBuilder {
let mut headers = header::HeaderMap::new(); let mut headers = header::HeaderMap::new();
headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Bitwarden_RS")); headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Bitwarden_RS"));
Client::builder() Client::builder().default_headers(headers).timeout(Duration::from_secs(10))
.default_headers(headers)
.timeout(Duration::from_secs(10))
} }

Loading…
Cancel
Save