Merge branch 'RealOrangeOne-fmt'

pull/1636/head^2
Daniel García 4 years ago
commit af2235bf88
No known key found for this signature in database
GPG Key ID: FC8A7D14C3CD543A

@ -89,7 +89,7 @@ jobs:
with:
profile: minimal
target: ${{ matrix.target-triple }}
components: clippy
components: clippy, rustfmt
# End Uses the rust-toolchain file to determine version
@ -111,6 +111,15 @@ jobs:
# End Run cargo clippy
# Run cargo fmt
- name: '`cargo fmt`'
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
# End Run cargo fmt
# Build the binary
- name: '`cargo build --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`'
uses: actions-rs/cargo@v1

@ -1,5 +1,5 @@
use std::process::Command;
use std::env;
use std::process::Command;
fn main() {
// This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros
@ -11,7 +11,9 @@ fn main() {
println!("cargo:rustc-cfg=postgresql");
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
compile_error!(
"You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"
);
if let Ok(version) = env::var("BWRS_VERSION") {
println!("cargo:rustc-env=BWRS_VERSION={}", version);

@ -1,2 +1,7 @@
version = "Two"
edition = "2018"
max_width = 120
newline_style = "Unix"
use_small_heuristics = "Off"
struct_lit_single_line = false
overflow_delimited_expr = true

@ -3,7 +3,6 @@ use serde::de::DeserializeOwned;
use serde_json::Value;
use std::{env, time::Duration};
use rocket::{
http::{Cookie, Cookies, SameSite},
request::{self, FlashMessage, Form, FromRequest, Outcome, Request},
@ -19,7 +18,7 @@ use crate::{
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
error::{Error, MapResult},
mail,
util::{format_naive_datetime_local, get_display_size, is_running_in_docker, get_reqwest_client},
util::{format_naive_datetime_local, get_display_size, get_reqwest_client, is_running_in_docker},
CONFIG,
};
@ -64,11 +63,8 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| {
.unwrap_or("Unknown")
});
static CAN_BACKUP: Lazy<bool> = Lazy::new(|| {
DbConnType::from_url(&CONFIG.database_url())
.map(|t| t == DbConnType::sqlite)
.unwrap_or(false)
});
static CAN_BACKUP: Lazy<bool> =
Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false));
#[get("/")]
fn admin_disabled() -> &'static str {
@ -141,7 +137,12 @@ fn admin_url(referer: Referer) -> String {
fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> {
// If there is an error, show it
let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg()));
let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()});
let json = json!({
"page_content": "admin/login",
"version": VERSION,
"error": msg,
"urlpath": CONFIG.domain_path()
});
// Return the page
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
@ -165,10 +166,7 @@ fn post_admin_login(
// If the token is invalid, redirect to login page
if !_validate_token(&data.token) {
error!("Invalid admin token. IP: {}", ip.ip);
Err(Flash::error(
Redirect::to(admin_url(referer)),
"Invalid admin token, please try again.",
))
Err(Flash::error(Redirect::to(admin_url(referer)), "Invalid admin token, please try again."))
} else {
// If the token received is valid, generate JWT and save it as a cookie
let claims = generate_admin_claims();
@ -328,7 +326,8 @@ fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
let users = User::get_all(&conn);
let dt_fmt = "%Y-%m-%d %H:%M:%S %Z";
let users_json: Vec<Value> = users.iter()
let users_json: Vec<Value> = users
.iter()
.map(|u| {
let mut usr = u.to_json(&conn);
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn));
@ -338,7 +337,7 @@ fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt));
usr["last_active"] = match u.last_active(&conn) {
Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)),
None => json!("Never")
None => json!("Never"),
};
usr
})
@ -423,7 +422,6 @@ fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, conn: D
user_to_edit.save(&conn)
}
#[post("/users/update_revision")]
fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
User::update_all_revisions(&conn)
@ -432,7 +430,8 @@ fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
#[get("/organizations/overview")]
fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
let organizations = Organization::get_all(&conn);
let organizations_json: Vec<Value> = organizations.iter()
let organizations_json: Vec<Value> = organizations
.iter()
.map(|o| {
let mut org = o.to_json();
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn));
@ -471,22 +470,13 @@ struct GitCommit {
fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
let github_api = get_reqwest_client();
Ok(github_api
.get(url)
.timeout(Duration::from_secs(10))
.send()?
.error_for_status()?
.json::<T>()?)
Ok(github_api.get(url).timeout(Duration::from_secs(10)).send()?.error_for_status()?.json::<T>()?)
}
fn has_http_access() -> bool {
let http_access = get_reqwest_client();
match http_access
.head("https://github.com/dani-garcia/bitwarden_rs")
.timeout(Duration::from_secs(10))
.send()
{
match http_access.head("https://github.com/dani-garcia/bitwarden_rs").timeout(Duration::from_secs(10)).send() {
Ok(r) => r.status().is_success(),
_ => false,
}
@ -499,15 +489,14 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
use std::net::ToSocketAddrs;
// Get current running versions
let web_vault_version: WebVaultVersion = match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) {
let web_vault_version: WebVaultVersion =
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) {
Ok(s) => serde_json::from_str(&s)?,
_ => {
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
_ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
Ok(s) => serde_json::from_str(&s)?,
_ => {
WebVaultVersion{version: String::from("Version file missing")}
_ => WebVaultVersion {
version: String::from("Version file missing"),
},
}
},
};
@ -529,7 +518,8 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
// TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already.
let (latest_release, latest_commit, latest_web_build) = if has_http_access {
(
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") {
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest")
{
Ok(r) => r.tag_name,
_ => "-".to_string(),
},
@ -545,7 +535,9 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
if running_within_docker {
"-".to_string()
} else {
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") {
match get_github_api::<GitRelease>(
"https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest",
) {
Ok(r) => r.tag_name.trim_start_matches('v').to_string(),
_ => "-".to_string(),
}
@ -557,7 +549,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
let ip_header_name = match &ip_header.0 {
Some(h) => h,
_ => ""
_ => "",
};
let diagnostics_json = json!({

@ -320,15 +320,7 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt:
err!("The cipher is not owned by the user")
}
update_cipher_from_data(
&mut saved_cipher,
cipher_data,
&headers,
false,
&conn,
&nt,
UpdateType::CipherUpdate,
)?
update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)?
}
// Update user data

@ -100,24 +100,18 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> {
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
let collections_json: Vec<Value> = collections.iter()
.map(|c| c.to_json_details(&headers.user.uuid, &conn))
.collect();
let collections_json: Vec<Value> =
collections.iter().map(|c| c.to_json_details(&headers.user.uuid, &conn)).collect();
let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn);
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn);
let ciphers_json: Vec<Value> = ciphers
.iter()
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
let ciphers_json: Vec<Value> =
ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
let sends = Send::find_by_user(&headers.user.uuid, &conn);
let sends_json: Vec<Value> = sends
.iter()
.map(|s| s.to_json())
.collect();
let sends_json: Vec<Value> = sends.iter().map(|s| s.to_json()).collect();
let domains_json = if data.exclude_domains {
Value::Null
@ -142,10 +136,8 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> {
fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> {
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn);
let ciphers_json: Vec<Value> = ciphers
.iter()
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
let ciphers_json: Vec<Value> =
ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
Json(json!({
"Data": ciphers_json,
@ -288,17 +280,12 @@ fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt
/// allowed to delete or share such ciphers to an org, however.
///
/// Ref: https://bitwarden.com/help/article/policies/#personal-ownership
fn enforce_personal_ownership_policy(
data: &CipherData,
headers: &Headers,
conn: &DbConn
) -> EmptyResult {
fn enforce_personal_ownership_policy(data: &CipherData, headers: &Headers, conn: &DbConn) -> EmptyResult {
if data.OrganizationId.is_none() {
let user_uuid = &headers.user.uuid;
let policy_type = OrgPolicyType::PersonalOwnership;
if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) {
err!("Due to an Enterprise Policy, you are restricted from \
saving items to your personal vault.")
err!("Due to an Enterprise Policy, you are restricted from saving items to your personal vault.")
}
}
Ok(())
@ -317,11 +304,12 @@ pub fn update_cipher_from_data(
// Check that the client isn't updating an existing cipher with stale data.
if let Some(dt) = data.LastKnownRevisionDate {
match NaiveDateTime::parse_from_str(&dt, "%+") { // ISO 8601 format
Err(err) =>
warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 =>
err!("The client copy of this cipher is out of date. Resync the client and try again."),
match NaiveDateTime::parse_from_str(&dt, "%+") {
// ISO 8601 format
Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => {
err!("The client copy of this cipher is out of date. Resync the client and try again.")
}
Ok(_) => (),
}
}
@ -394,10 +382,7 @@ pub fn update_cipher_from_data(
// But, we at least know we do not need to store and return this specific key.
fn _clean_cipher_data(mut json_data: Value) -> Value {
if json_data.is_array() {
json_data.as_array_mut()
.unwrap()
.iter_mut()
.for_each(|ref mut f| {
json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| {
f.as_object_mut().unwrap().remove("Response");
});
};
@ -421,7 +406,7 @@ pub fn update_cipher_from_data(
data["Uris"] = _clean_cipher_data(data["Uris"].clone());
}
data
},
}
None => err!("Data missing"),
};
@ -602,11 +587,8 @@ fn post_collections_admin(
}
let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect();
let current_collections: HashSet<String> = cipher
.get_collections(&headers.user.uuid, &conn)
.iter()
.cloned()
.collect();
let current_collections: HashSet<String> =
cipher.get_collections(&headers.user.uuid, &conn).iter().cloned().collect();
for collection in posted_collections.symmetric_difference(&current_collections) {
match Collection::find_by_uuid(&collection, &conn) {
@ -842,7 +824,8 @@ fn post_attachment(
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
let path = base_path.join(&file_name);
let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
let size =
match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
SaveResult::Full(SavedData::File(_, size)) => size as i32,
SaveResult::Full(other) => {
std::fs::remove_file(path).ok();
@ -994,12 +977,22 @@ fn delete_cipher_selected_admin(data: JsonUpcase<Value>, headers: Headers, conn:
}
#[post("/ciphers/delete-admin", data = "<data>")]
fn delete_cipher_selected_post_admin(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
fn delete_cipher_selected_post_admin(
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
delete_cipher_selected_post(data, headers, conn, nt)
}
#[put("/ciphers/delete-admin", data = "<data>")]
fn delete_cipher_selected_put_admin(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
fn delete_cipher_selected_put_admin(
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
delete_cipher_selected_put(data, headers, conn, nt)
}
@ -1150,7 +1143,13 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_del
Ok(())
}
fn _delete_multiple_ciphers(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, soft_delete: bool, nt: Notify) -> EmptyResult {
fn _delete_multiple_ciphers(
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
soft_delete: bool,
nt: Notify,
) -> EmptyResult {
let data: Value = data.into_inner().data;
let uuids = match data.get("Ids") {
@ -1202,7 +1201,7 @@ fn _restore_multiple_ciphers(data: JsonUpcase<Value>, headers: &Headers, conn: &
for uuid in uuids {
match _restore_cipher_by_uuid(uuid, headers, conn, nt) {
Ok(json) => ciphers.push(json.into_inner()),
err => return err
err => return err,
}
}

@ -8,15 +8,7 @@ use crate::{
};
pub fn routes() -> Vec<rocket::Route> {
routes![
get_folders,
get_folder,
post_folders,
post_folder,
put_folder,
delete_folder_post,
delete_folder,
]
routes![get_folders, get_folder, post_folders, post_folder, put_folder, delete_folder_post, delete_folder,]
}
#[get("/folders")]

@ -2,21 +2,15 @@ mod accounts;
mod ciphers;
mod folders;
mod organizations;
pub mod two_factor;
mod sends;
pub mod two_factor;
pub use ciphers::purge_trashed_ciphers;
pub use sends::purge_sends;
pub fn routes() -> Vec<Route> {
let mut mod_routes = routes![
clear_device_token,
put_device_token,
get_eq_domains,
post_eq_domains,
put_eq_domains,
hibp_breach,
];
let mut mod_routes =
routes![clear_device_token, put_device_token, get_eq_domains, post_eq_domains, put_eq_domains, hibp_breach,];
let mut routes = Vec::new();
routes.append(&mut accounts::routes());
@ -33,9 +27,9 @@ pub fn routes() -> Vec<Route> {
//
// Move this somewhere else
//
use rocket::response::Response;
use rocket::Route;
use rocket_contrib::json::Json;
use rocket::response::Response;
use serde_json::Value;
use crate::{
@ -156,10 +150,7 @@ fn hibp_breach(username: String) -> JsonResult {
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
let hibp_client = get_reqwest_client();
let res = hibp_client
.get(&url)
.header("hibp-api-key", api_key)
.send()?;
let res = hibp_client.get(&url).header("hibp-api-key", api_key).send()?;
// If we get a 404, return a 404, it means no breached accounts
if res.status() == 404 {

@ -5,7 +5,7 @@ use serde_json::Value;
use crate::{
api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType},
auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders, ManagerHeaders, ManagerHeadersLoose},
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
db::{models::*, DbConn},
mail, CONFIG,
};
@ -333,7 +333,12 @@ fn post_organization_collection_delete_user(
}
#[delete("/organizations/<org_id>/collections/<col_id>")]
fn delete_organization_collection(org_id: String, col_id: String, _headers: ManagerHeaders, conn: DbConn) -> EmptyResult {
fn delete_organization_collection(
org_id: String,
col_id: String,
_headers: ManagerHeaders,
conn: DbConn,
) -> EmptyResult {
match Collection::find_by_uuid(&col_id, &conn) {
None => err!("Collection not found"),
Some(collection) => {
@ -426,9 +431,7 @@ fn put_collection_users(
continue;
}
CollectionUser::save(&user.user_uuid, &coll_id,
d.ReadOnly, d.HidePasswords,
&conn)?;
CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn)?;
}
Ok(())
@ -443,10 +446,8 @@ struct OrgIdData {
#[get("/ciphers/organization-details?<data..>")]
fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> Json<Value> {
let ciphers = Cipher::find_by_org(&data.organization_id, &conn);
let ciphers_json: Vec<Value> = ciphers
.iter()
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
let ciphers_json: Vec<Value> =
ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
Json(json!({
"Data": ciphers_json,
@ -544,9 +545,7 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
None => err!("Collection not found in Organization"),
Some(collection) => {
CollectionUser::save(&user.uuid, &collection.uuid,
col.ReadOnly, col.HidePasswords,
&conn)?;
CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn)?;
}
}
}
@ -801,9 +800,13 @@ fn edit_user(
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
None => err!("Collection not found in Organization"),
Some(collection) => {
CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid,
col.ReadOnly, col.HidePasswords,
&conn)?;
CollectionUser::save(
&user_to_edit.user_uuid,
&collection.uuid,
col.ReadOnly,
col.HidePasswords,
&conn,
)?;
}
}
}
@ -899,15 +902,7 @@ fn post_org_import(
.into_iter()
.map(|cipher_data| {
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
update_cipher_from_data(
&mut cipher,
cipher_data,
&headers,
false,
&conn,
&nt,
UpdateType::CipherCreate,
)
update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherCreate)
.ok();
cipher
})
@ -989,7 +984,13 @@ struct PolicyData {
}
#[put("/organizations/<org_id>/policies/<pol_type>", data = "<data>")]
fn put_policy(org_id: String, pol_type: i32, data: Json<PolicyData>, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
fn put_policy(
org_id: String,
pol_type: i32,
data: Json<PolicyData>,
_headers: AdminHeaders,
conn: DbConn,
) -> JsonResult {
let data: PolicyData = data.into_inner();
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) {
@ -1128,7 +1129,6 @@ fn import(org_id: String, data: JsonUpcase<OrgImportData>, headers: Headers, con
// If user is not part of the organization, but it exists
} else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() {
if let Some(user) = User::find_by_mail(&user_data.Email, &conn) {
let user_org_status = if CONFIG.mail_enabled() {
UserOrgStatus::Invited as i32
} else {

@ -16,15 +16,7 @@ use crate::{
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
pub fn routes() -> Vec<rocket::Route> {
routes![
post_send,
post_send_file,
post_access,
post_access_file,
put_send,
delete_send,
put_remove_password
]
routes![post_send, post_send_file, post_access, post_access_file, put_send, delete_send, put_remove_password]
}
pub fn purge_sends(pool: DbPool) {
@ -171,13 +163,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn
None => err!("No model entry present"),
};
let size = match data_entry
.data
.save()
.memory_threshold(0)
.size_limit(size_limit)
.with_path(&file_path)
{
let size = match data_entry.data.save().memory_threshold(0).size_limit(size_limit).with_path(&file_path) {
SaveResult::Full(SavedData::File(_, size)) => size as i32,
SaveResult::Full(other) => {
std::fs::remove_file(&file_path).ok();
@ -198,10 +184,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn
if let Some(o) = data_value.as_object_mut() {
o.insert(String::from("Id"), Value::String(file_id));
o.insert(String::from("Size"), Value::Number(size.into()));
o.insert(
String::from("SizeName"),
Value::String(crate::util::get_display_size(size)),
);
o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size)));
}
send.data = serde_json::to_string(&data_value)?;

@ -17,11 +17,7 @@ use crate::{
pub use crate::config::CONFIG;
pub fn routes() -> Vec<Route> {
routes![
generate_authenticator,
activate_authenticator,
activate_authenticator_put,
]
routes![generate_authenticator, activate_authenticator, activate_authenticator_put,]
}
#[post("/two-factor/get-authenticator", data = "<data>")]
@ -141,7 +137,7 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl
// The amount of steps back and forward in time
// Also check if we need to disable time drifted TOTP codes.
// If that is the case, we set the steps to 0 so only the current TOTP is valid.
let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 };
let steps = !CONFIG.authenticator_disable_time_drift() as i64;
for step in -steps..=steps {
let time_step = current_timestamp / 30i64 + step;
@ -163,22 +159,11 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl
twofactor.save(&conn)?;
return Ok(());
} else if generated == totp_code && time_step <= twofactor.last_used as i64 {
warn!(
"This or a TOTP code within {} steps back and forward has already been used!",
steps
);
err!(format!(
"Invalid TOTP code! Server time: {} IP: {}",
current_time.format("%F %T UTC"),
ip.ip
));
warn!("This or a TOTP code within {} steps back and forward has already been used!", steps);
err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip));
}
}
// Else no valide code received, deny access
err!(format!(
"Invalid TOTP code! Server time: {} IP: {}",
current_time.format("%F %T UTC"),
ip.ip
));
err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip));
}

@ -60,7 +60,11 @@ impl DuoData {
ik.replace_range(digits.., replaced);
sk.replace_range(digits.., replaced);
Self { host, ik, sk }
Self {
host,
ik,
sk,
}
}
}
@ -200,7 +204,8 @@ fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> Em
let client = get_reqwest_client();
client.request(m, &url)
client
.request(m, &url)
.basic_auth(username, Some(password))
.header(header::USER_AGENT, "bitwarden_rs:Duo/1.0 (Rust)")
.header(header::DATE, date)

@ -125,11 +125,7 @@ fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbConn) -
let twofactor_data = EmailTokenData::new(data.Email, generated_token);
// Uses EmailVerificationChallenge as type to show that it's not verified yet.
let twofactor = TwoFactor::new(
user.uuid,
TwoFactorType::EmailVerificationChallenge,
twofactor_data.to_json(),
);
let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json());
twofactor.save(&conn)?;
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?;
@ -186,7 +182,8 @@ fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonRes
/// Validate the email code when used as TwoFactor token mechanism
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
let mut email_data = EmailTokenData::from_json(&data)?;
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn).map_res("Two factor not found")?;
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn)
.map_res("Two factor not found")?;
let issued_token = match &email_data.last_token {
Some(t) => t,
_ => err!("No token available"),

@ -20,13 +20,7 @@ pub mod u2f;
pub mod yubikey;
pub fn routes() -> Vec<Route> {
let mut routes = routes![
get_twofactor,
get_recover,
recover,
disable_twofactor,
disable_twofactor_put,
];
let mut routes = routes![get_twofactor, get_recover, recover, disable_twofactor, disable_twofactor_put,];
routes.append(&mut authenticator::routes());
routes.append(&mut duo::routes());

@ -28,13 +28,7 @@ static APP_ID: Lazy<String> = Lazy::new(|| format!("{}/app-id.json", &CONFIG.dom
static U2F: Lazy<U2f> = Lazy::new(|| U2f::new(APP_ID.clone()));
pub fn routes() -> Vec<Route> {
routes![
generate_u2f,
generate_u2f_challenge,
activate_u2f,
activate_u2f_put,
delete_u2f,
]
routes![generate_u2f, generate_u2f_challenge, activate_u2f, activate_u2f_put, delete_u2f,]
}
#[post("/two-factor/get-u2f", data = "<data>")]
@ -161,10 +155,7 @@ fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn)
let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?;
let error_code = response
.error_code
.clone()
.map_or("0".into(), NumberOrString::into_string);
let error_code = response.error_code.clone().map_or("0".into(), NumberOrString::into_string);
if error_code != "0" {
err!("Error registering U2F token")
@ -300,20 +291,13 @@ fn _old_parse_registrations(registations: &str) -> Vec<Registration> {
let regs: Vec<Value> = serde_json::from_str(registations).expect("Can't parse Registration data");
regs.into_iter()
.map(|r| serde_json::from_value(r).unwrap())
.map(|Helper(r)| r)
.collect()
regs.into_iter().map(|r| serde_json::from_value(r).unwrap()).map(|Helper(r)| r).collect()
}
pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRequest> {
let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn);
let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?
.1
.into_iter()
.map(|r| r.reg)
.collect();
let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?.1.into_iter().map(|r| r.reg).collect();
if registrations.is_empty() {
err!("No U2F devices registered")

@ -12,7 +12,11 @@ use regex::Regex;
use reqwest::{blocking::Client, blocking::Response, header, Url};
use rocket::{http::ContentType, http::Cookie, response::Content, Route};
use crate::{error::Error, util::{Cached, get_reqwest_client_builder}, CONFIG};
use crate::{
error::Error,
util::{get_reqwest_client_builder, Cached},
CONFIG,
};
pub fn routes() -> Vec<Route> {
routes![icon]
@ -25,7 +29,12 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8"));
default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache"));
default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache"));
default_headers.insert(header::ACCEPT, header::HeaderValue::from_static("text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8"));
default_headers.insert(
header::ACCEPT,
header::HeaderValue::from_static(
"text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8",
),
);
// Reuse the client between requests
get_reqwest_client_builder()
@ -49,13 +58,16 @@ fn icon(domain: String) -> Cached<Content<Vec<u8>>> {
if !is_valid_domain(&domain) {
warn!("Invalid domain: {}", domain);
return Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl());
return Cached::ttl(
Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
CONFIG.icon_cache_negttl(),
);
}
match get_icon(&domain) {
Some((icon, icon_type)) => {
Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl())
},
}
_ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()),
}
}
@ -77,7 +89,10 @@ fn is_valid_domain(domain: &str) -> bool {
|| domain.starts_with('-')
|| domain.ends_with('-')
{
debug!("Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", domain);
debug!(
"Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'",
domain
);
return false;
} else if domain.len() > 255 {
debug!("Domain validation error: '{}' exceeds 255 characters", domain);
@ -338,12 +353,20 @@ struct Icon {
impl Icon {
const fn new(priority: u8, href: String) -> Self {
Self { href, priority }
Self {
href,
priority,
}
}
}
fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &Url) {
if let markup5ever_rcdom::NodeData::Element { name, attrs, .. } = &node.data {
if let markup5ever_rcdom::NodeData::Element {
name,
attrs,
..
} = &node.data
{
if name.local.as_ref() == "link" {
let mut has_rel = false;
let mut href = None;
@ -354,7 +377,8 @@ fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Ve
let attr_name = attr.name.local.as_ref();
let attr_value = attr.value.as_ref();
if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value) {
if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value)
{
has_rel = true;
} else if attr_name == "href" {
href = Some(attr_value);
@ -683,6 +707,6 @@ fn get_icon_type(bytes: &[u8]) -> Option<&'static str> {
[82, 73, 70, 70, ..] => Some("webp"),
[255, 216, 255, ..] => Some("jpeg"),
[66, 77, ..] => Some("bmp"),
_ => None
_ => None,
}
}

@ -88,34 +88,28 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
let username = data.username.as_ref().unwrap();
let user = match User::find_by_mail(username, &conn) {
Some(user) => user,
None => err!(
"Username or password is incorrect. Try again",
format!("IP: {}. Username: {}.", ip.ip, username)
),
None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)),
};
// Check password
let password = data.password.as_ref().unwrap();
if !user.check_valid_password(password) {
err!(
"Username or password is incorrect. Try again",
format!("IP: {}. Username: {}.", ip.ip, username)
)
err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username))
}
// Check if the user is disabled
if !user.enabled {
err!(
"This user has been disabled",
format!("IP: {}. Username: {}.", ip.ip, username)
)
err!("This user has been disabled", format!("IP: {}. Username: {}.", ip.ip, username))
}
let now = Local::now();
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
let now = now.naive_utc();
if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 {
if user.last_verifying_at.is_none()
|| now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds()
> CONFIG.signups_verify_resend_time() as i64
{
let resend_limit = CONFIG.signups_verify_resend_limit() as i32;
if resend_limit == 0 || user.login_verify_count < resend_limit {
// We want to send another email verification if we require signups to verify
@ -135,10 +129,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
}
// We still want the login to fail until they actually verified the email address
err!(
"Please verify your email before trying again.",
format!("IP: {}. Username: {}.", ip.ip, username)
)
err!("Please verify your email before trying again.", format!("IP: {}. Username: {}.", ip.ip, username))
}
let (mut device, new_device) = get_device(&data, &conn, &user);
@ -236,9 +227,7 @@ fn twofactor_auth(
None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"),
};
let selected_twofactor = twofactors
.into_iter()
.find(|tf| tf.atype == selected_id && tf.enabled);
let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled);
use crate::api::core::two_factor as _tf;
use crate::crypto::ct_eq;
@ -247,18 +236,26 @@ fn twofactor_auth(
let mut remember = data.two_factor_remember.unwrap_or(0);
match TwoFactorType::from_i32(selected_id) {
Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?,
Some(TwoFactorType::Authenticator) => {
_tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?
}
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?,
Some(TwoFactorType::Email) => _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?,
Some(TwoFactorType::Duo) => {
_tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?
}
Some(TwoFactorType::Email) => {
_tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?
}
Some(TwoFactorType::Remember) => {
match device.twofactor_remember {
Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => {
remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time
}
_ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"),
_ => {
err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided")
}
}
}
_ => err!("Invalid two factor provider"),

@ -55,9 +55,9 @@ impl NumberOrString {
use std::num::ParseIntError as PIE;
match self {
NumberOrString::Number(n) => Ok(n),
NumberOrString::String(s) => s
.parse()
.map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string())),
NumberOrString::String(s) => {
s.parse().map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string()))
}
}
}
}

@ -4,12 +4,7 @@ use rocket::Route;
use rocket_contrib::json::Json;
use serde_json::Value as JsonValue;
use crate::{
api::EmptyResult,
auth::Headers,
db::DbConn,
Error, CONFIG,
};
use crate::{api::EmptyResult, auth::Headers, db::DbConn, Error, CONFIG};
pub fn routes() -> Vec<Route> {
routes![negotiate, websockets_err]
@ -19,12 +14,16 @@ static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true);
#[get("/hub")]
fn websockets_err() -> EmptyResult {
if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() {
err!("
if CONFIG.websocket_enabled()
&& SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok()
{
err!(
"
###########################################################
'/notifications/hub' should be proxied to the websocket server or notifications won't work.
Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false.
###########################################################################################\n")
###########################################################################################\n"
)
} else {
Err(Error::empty())
}
@ -204,9 +203,7 @@ impl Handler for WsHandler {
let handler_insert = self.out.clone();
let handler_update = self.out.clone();
self.users
.map
.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update));
self.users.map.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update));
// Schedule a ping to keep the connection alive
self.out.timeout(PING_MS, PING)
@ -216,7 +213,11 @@ impl Handler for WsHandler {
if let Message::Text(text) = msg.clone() {
let json = &text[..text.len() - 1]; // Remove last char
if let Ok(InitialMessage { protocol, version }) = from_str::<InitialMessage>(json) {
if let Ok(InitialMessage {
protocol,
version,
}) = from_str::<InitialMessage>(json)
{
if &protocol == "messagepack" && version == 1 {
return self.out.send(&INITIAL_RESPONSE[..]); // Respond to initial message
}
@ -295,10 +296,7 @@ impl WebSocketUsers {
// NOTE: The last modified date needs to be updated before calling these methods
pub fn send_user_update(&self, ut: UpdateType, user: &User) {
let data = create_update(
vec![
("UserId".into(), user.uuid.clone().into()),
("Date".into(), serialize_date(user.updated_at)),
],
vec![("UserId".into(), user.uuid.clone().into()), ("Date".into(), serialize_date(user.updated_at))],
ut,
);

@ -83,11 +83,15 @@ fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
"bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))),
"bootstrap-native.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))),
"bootstrap-native.js" => {
Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js")))
}
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
"datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))),
"datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))),
"jquery-3.5.1.slim.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))),
"jquery-3.5.1.slim.js" => {
Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js")))
}
_ => err!(format!("Static file not found: {}", filename)),
}
}

@ -223,10 +223,9 @@ use crate::db::{
};
pub struct Host {
pub host: String
pub host: String,
}
impl<'a, 'r> FromRequest<'a, 'r> for Host {
type Error = &'static str;
@ -261,7 +260,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for Host {
format!("{}://{}", protocol, host)
};
Outcome::Success(Host { host })
Outcome::Success(Host {
host,
})
}
}
@ -317,10 +318,8 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers {
};
if user.security_stamp != claims.sstamp {
if let Some(stamp_exception) = user
.stamp_exception
.as_deref()
.and_then(|s| serde_json::from_str::<UserStampException>(s).ok())
if let Some(stamp_exception) =
user.stamp_exception.as_deref().and_then(|s| serde_json::from_str::<UserStampException>(s).ok())
{
let current_route = match request.route().and_then(|r| r.name) {
Some(name) => name,
@ -338,7 +337,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers {
}
}
Outcome::Success(Headers { host, device, user })
Outcome::Success(Headers {
host,
device,
user,
})
}
}
@ -506,7 +509,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for ManagerHeaders {
};
if !headers.org_user.has_full_access() {
match CollectionUser::find_by_collection_and_user(&col_id, &headers.org_user.user_uuid, &conn) {
match CollectionUser::find_by_collection_and_user(
&col_id,
&headers.org_user.user_uuid,
&conn,
) {
Some(_) => (),
None => err_handler!("The current user isn't a manager for this collection"),
}
@ -636,10 +643,10 @@ impl<'a, 'r> FromRequest<'a, 'r> for ClientIp {
None
};
let ip = ip
.or_else(|| req.remote().map(|r| r.ip()))
.unwrap_or_else(|| "0.0.0.0".parse().unwrap());
let ip = ip.or_else(|| req.remote().map(|r| r.ip())).unwrap_or_else(|| "0.0.0.0".parse().unwrap());
Outcome::Success(ClientIp { ip })
Outcome::Success(ClientIp {
ip,
})
}
}

@ -527,10 +527,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
let limit = 256;
if cfg.database_max_conns < 1 || cfg.database_max_conns > limit {
err!(format!(
"`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.",
limit,
));
err!(format!("`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.", limit,));
}
let dom = cfg.domain.to_lowercase();
@ -871,9 +868,7 @@ fn case_helper<'reg, 'rc>(
rc: &mut RenderContext<'reg, 'rc>,
out: &mut dyn Output,
) -> HelperResult {
let param = h
.param(0)
.ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
let value = param.value().clone();
if h.params().iter().skip(1).any(|x| x.value() == &value) {
@ -890,18 +885,12 @@ fn js_escape_helper<'reg, 'rc>(
_rc: &mut RenderContext<'reg, 'rc>,
out: &mut dyn Output,
) -> HelperResult {
let param = h
.param(0)
.ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
let no_quote = h
.param(1)
.is_some();
let value = param
.value()
.as_str()
.ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?;
let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
let no_quote = h.param(1).is_some();
let value =
param.value().as_str().ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?;
let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27");
if !no_quote {

@ -47,9 +47,7 @@ pub fn get_random_64() -> Vec<u8> {
pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
use ring::rand::{SecureRandom, SystemRandom};
SystemRandom::new()
.fill(&mut array)
.expect("Error generating random values");
SystemRandom::new().fill(&mut array).expect("Error generating random values");
array
}

@ -23,7 +23,6 @@ pub mod __mysql_schema;
#[path = "schemas/postgresql/schema.rs"]
pub mod __postgresql_schema;
// This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported
macro_rules! generate_connections {
( $( $name:ident: $ty:ty ),+ ) => {
@ -108,7 +107,6 @@ impl DbConnType {
}
}
#[macro_export]
macro_rules! db_run {
// Same for all dbs
@ -154,7 +152,6 @@ macro_rules! db_run {
};
}
pub trait FromDb {
type Output;
#[allow(clippy::wrong_self_convention)]
@ -239,7 +236,6 @@ pub fn backup_database(conn: &DbConn) -> Result<(), Error> {
Ok(())
}
/// Get the SQL Server version
pub fn get_sql_server_version(conn: &DbConn) -> String {
db_run! {@raw conn:
@ -292,8 +288,7 @@ mod sqlite_migrations {
use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection =
diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?;
let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration
// Scoped to a connection.
@ -303,9 +298,7 @@ mod sqlite_migrations {
// Turn on WAL in SQLite
if crate::CONFIG.enable_db_wal() {
diesel::sql_query("PRAGMA journal_mode=wal")
.execute(&connection)
.expect("Failed to turn on WAL");
diesel::sql_query("PRAGMA journal_mode=wal").execute(&connection).expect("Failed to turn on WAL");
}
embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?;
@ -321,8 +314,7 @@ mod mysql_migrations {
pub fn run_migrations() -> Result<(), super::Error> {
use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection =
diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?;
let connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration
// Scoped to a connection/session.
@ -343,8 +335,7 @@ mod postgresql_migrations {
pub fn run_migrations() -> Result<(), super::Error> {
use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection =
diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?;
let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration
// FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html,

@ -59,7 +59,6 @@ use crate::error::MapResult;
/// Database methods
impl Attachment {
pub fn save(&self, conn: &DbConn) -> EmptyResult {
db_run! { conn:
sqlite, mysql {

@ -4,14 +4,7 @@ use serde_json::Value;
use crate::CONFIG;
use super::{
Attachment,
CollectionCipher,
Favorite,
FolderCipher,
Organization,
User,
UserOrgStatus,
UserOrgType,
Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType,
UserOrganization,
};
@ -93,15 +86,15 @@ impl Cipher {
};
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
let password_history_json =
self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
let (read_only, hide_passwords) =
match self.get_access_restrictions(&user_uuid, conn) {
let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) {
Some((ro, hp)) => (ro, hp),
None => {
error!("Cipher ownership assertion failure");
(true, true)
},
}
};
// Get the type_data or a default to an empty json object '{}'.
@ -197,9 +190,7 @@ impl Cipher {
None => {
// Belongs to Organization, need to update affected users
if let Some(ref org_uuid) = self.organization_uuid {
UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn)
.iter()
.for_each(|user_org| {
UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn).iter().for_each(|user_org| {
User::update_uuid_revision(&user_org.user_uuid, conn);
user_uuids.push(user_org.user_uuid.clone())
});

@ -1,6 +1,6 @@
use serde_json::Value;
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization, User, Cipher};
use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization};
db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
@ -127,9 +127,7 @@ impl Collection {
}
pub fn update_users_revision(&self, conn: &DbConn) {
UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn)
.iter()
.for_each(|user_org| {
UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).iter().for_each(|user_org| {
User::update_uuid_revision(&user_org.user_uuid, conn);
});
}
@ -170,10 +168,7 @@ impl Collection {
}
pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> {
Self::find_by_user_uuid(user_uuid, conn)
.into_iter()
.filter(|c| c.org_uuid == org_uuid)
.collect()
Self::find_by_user_uuid(user_uuid, conn).into_iter().filter(|c| c.org_uuid == org_uuid).collect()
}
pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
@ -284,7 +279,13 @@ impl CollectionUser {
}}
}
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult {
pub fn save(
user_uuid: &str,
collection_uuid: &str,
read_only: bool,
hide_passwords: bool,
conn: &DbConn,
) -> EmptyResult {
User::update_uuid_revision(&user_uuid, conn);
db_run! { conn:
@ -374,9 +375,7 @@ impl CollectionUser {
}
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
CollectionUser::find_by_collection(&collection_uuid, conn)
.iter()
.for_each(|collection| {
CollectionUser::find_by_collection(&collection_uuid, conn).iter().for_each(|collection| {
User::update_uuid_revision(&collection.user_uuid, conn);
});

@ -59,7 +59,7 @@ impl Favorite {
}}
}
// Otherwise, the favorite status is already what it should be.
_ => Ok(())
_ => Ok(()),
}
}

@ -109,7 +109,6 @@ impl Folder {
User::update_uuid_revision(&self.user_uuid, conn);
FolderCipher::delete_all_by_folder(&self.uuid, &conn)?;
db_run! { conn: {
diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid)))
.execute(conn)

@ -6,9 +6,9 @@ mod favorite;
mod folder;
mod org_policy;
mod organization;
mod send;
mod two_factor;
mod user;
mod send;
pub use self::attachment::Attachment;
pub use self::cipher::Cipher;
@ -18,6 +18,6 @@ pub use self::favorite::Favorite;
pub use self::folder::{Folder, FolderCipher};
pub use self::org_policy::{OrgPolicy, OrgPolicyType};
pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
pub use self::send::{Send, SendType};
pub use self::two_factor::{TwoFactor, TwoFactorType};
pub use self::user::{Invitation, User, UserStampException};
pub use self::send::{Send, SendType};

@ -4,7 +4,7 @@ use crate::api::EmptyResult;
use crate::db::DbConn;
use crate::error::MapResult;
use super::{Organization, UserOrganization, UserOrgStatus, UserOrgType};
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
@ -20,8 +20,7 @@ db_object! {
}
}
#[derive(Copy, Clone)]
#[derive(num_derive::FromPrimitive)]
#[derive(Copy, Clone, num_derive::FromPrimitive)]
pub enum OrgPolicyType {
TwoFactorAuthentication = 0,
MasterPassword = 1,
@ -175,7 +174,8 @@ impl OrgPolicy {
/// and the user is not an owner or admin of that org. This is only useful for checking
/// applicability of policy types that have these particular semantics.
pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool {
for policy in OrgPolicy::find_by_user(user_uuid, conn) { // Returns confirmed users only.
// Returns confirmed users only.
for policy in OrgPolicy::find_by_user(user_uuid, conn) {
if policy.enabled && policy.has_type(policy_type) {
let org_uuid = &policy.org_uuid;
if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) {

@ -1,8 +1,8 @@
use num_traits::FromPrimitive;
use serde_json::Value;
use std::cmp::Ordering;
use num_traits::FromPrimitive;
use super::{CollectionUser, User, OrgPolicy};
use super::{CollectionUser, OrgPolicy, User};
db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
@ -35,8 +35,7 @@ pub enum UserOrgStatus {
Confirmed = 2,
}
#[derive(Copy, Clone, PartialEq, Eq)]
#[derive(num_derive::FromPrimitive)]
#[derive(Copy, Clone, PartialEq, Eq, num_derive::FromPrimitive)]
pub enum UserOrgType {
Owner = 0,
Admin = 1,
@ -190,9 +189,7 @@ use crate::error::MapResult;
/// Database methods
impl Organization {
pub fn save(&self, conn: &DbConn) -> EmptyResult {
UserOrganization::find_by_org(&self.uuid, conn)
.iter()
.for_each(|user_org| {
UserOrganization::find_by_org(&self.uuid, conn).iter().for_each(|user_org| {
User::update_uuid_revision(&user_org.user_uuid, conn);
});
@ -236,7 +233,6 @@ impl Organization {
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?;
db_run! { conn: {
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
.execute(conn)
@ -347,11 +343,13 @@ impl UserOrganization {
let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn);
collections
.iter()
.map(|c| json!({
.map(|c| {
json!({
"Id": c.collection_uuid,
"ReadOnly": c.read_only,
"HidePasswords": c.hide_passwords,
}))
})
})
.collect()
};
@ -446,8 +444,7 @@ impl UserOrganization {
}
pub fn has_full_access(&self) -> bool {
(self.access_all || self.atype >= UserOrgType::Admin) &&
self.has_status(UserOrgStatus::Confirmed)
(self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed)
}
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {

@ -64,7 +64,7 @@ enum UserStatus {
#[derive(Serialize, Deserialize)]
pub struct UserStampException {
pub route: String,
pub security_stamp: String
pub security_stamp: String,
}
/// Local methods
@ -162,7 +162,7 @@ impl User {
pub fn set_stamp_exception(&mut self, route_exception: &str) {
let stamp_exception = UserStampException {
route: route_exception.to_string(),
security_stamp: self.security_stamp.to_string()
security_stamp: self.security_stamp.to_string(),
};
self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default());
}
@ -341,14 +341,16 @@ impl User {
pub fn last_active(&self, conn: &DbConn) -> Option<NaiveDateTime> {
match Device::find_latest_active_by_user(&self.uuid, conn) {
Some(device) => Some(device.updated_at),
None => None
None => None,
}
}
}
impl Invitation {
pub const fn new(email: String) -> Self {
Self { email }
Self {
email,
}
}
pub fn save(&self, conn: &DbConn) -> EmptyResult {

@ -33,10 +33,10 @@ macro_rules! make_error {
};
}
use diesel::r2d2::PoolError as R2d2Err;
use diesel::result::Error as DieselErr;
use diesel::ConnectionError as DieselConErr;
use diesel_migrations::RunMigrationsError as DieselMigErr;
use diesel::r2d2::PoolError as R2d2Err;
use handlebars::RenderError as HbErr;
use jsonwebtoken::errors::Error as JwtErr;
use regex::Error as RegexErr;
@ -198,11 +198,7 @@ impl<'r> Responder<'r> for Error {
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
Response::build()
.status(code)
.header(ContentType::JSON)
.sized_body(Cursor::new(format!("{}", self)))
.ok()
Response::build().status(code).header(ContentType::JSON).sized_body(Cursor::new(format!("{}", self))).ok()
}
}

@ -1,4 +1,4 @@
use std::{str::FromStr};
use std::str::FromStr;
use chrono::{DateTime, Local};
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
@ -62,11 +62,13 @@ fn mailer() -> SmtpTransport {
let mut selected_mechanisms = vec![];
for wanted_mechanism in mechanism.split(',') {
for m in &allowed_mechanisms {
if m.to_string().to_lowercase() == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() {
if m.to_string().to_lowercase()
== wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase()
{
selected_mechanisms.push(*m);
}
}
};
}
if !selected_mechanisms.is_empty() {
smtp_client.authentication(selected_mechanisms)
@ -318,22 +320,14 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String
let email = Message::builder()
.message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::<Vec<&str>>()[1])))
.to(Mailbox::new(None, Address::from_str(&address)?))
.from(Mailbox::new(
Some(CONFIG.smtp_from_name()),
Address::from_str(smtp_from)?,
))
.from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?))
.subject(subject)
.multipart(
MultiPart::alternative()
.singlepart(text)
.singlepart(html)
)?;
.multipart(MultiPart::alternative().singlepart(text).singlepart(html))?;
match mailer().send(&email) {
Ok(_) => Ok(()),
// Match some common errors and make them more user friendly
Err(e) => {
if e.is_client() {
err!(format!("SMTP Client error: {}", e));
} else if e.is_transient() {

@ -16,7 +16,7 @@ extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
use job_scheduler::{JobScheduler, Job};
use job_scheduler::{Job, JobScheduler};
use std::{
fs::create_dir_all,
panic,
@ -127,7 +127,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
// Enable smtp debug logging only specifically for smtp when need.
// This can contain sensitive information we do not want in the default debug/trace logging.
if CONFIG.smtp_debug() {
println!("[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!");
println!(
"[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!"
);
println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n");
logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug)
} else {
@ -298,7 +300,10 @@ fn check_web_vault() {
let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html");
if !index_path.exists() {
error!("Web vault is not found at '{}'. To install it, please follow the steps in: ", CONFIG.web_vault_folder());
error!(
"Web vault is not found at '{}'. To install it, please follow the steps in: ",
CONFIG.web_vault_folder()
);
error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it");
exit(1);
@ -344,7 +349,9 @@ fn schedule_jobs(pool: db::DbPool) {
info!("Job scheduler disabled.");
return;
}
thread::Builder::new().name("job-scheduler".to_string()).spawn(move || {
thread::Builder::new()
.name("job-scheduler".to_string())
.spawn(move || {
let mut sched = JobScheduler::new();
// Purge sends that are past their deletion date.
@ -370,5 +377,6 @@ fn schedule_jobs(pool: db::DbPool) {
sched.tick();
thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms()));
}
}).expect("Error spawning job scheduler thread");
})
.expect("Error spawning job scheduler thread");
}

@ -28,7 +28,10 @@ impl Fairing for AppHeaders {
res.set_raw_header("X-Frame-Options", "SAMEORIGIN");
res.set_raw_header("X-Content-Type-Options", "nosniff");
res.set_raw_header("X-XSS-Protection", "1; mode=block");
let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors());
let csp = format!(
"frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};",
CONFIG.allowed_iframe_ancestors()
);
res.set_raw_header("Content-Security-Policy", csp);
// Disable cache unless otherwise specified
@ -124,14 +127,8 @@ impl<'r, R: Responder<'r>> Responder<'r> for Cached<R> {
// Log all the routes from the main paths list, and the attachments endpoint
// Effectively ignores, any static file route, and the alive endpoint
const LOGGED_ROUTES: [&str; 6] = [
"/api",
"/admin",
"/identity",
"/icons",
"/notifications/hub/negotiate",
"/attachments",
];
const LOGGED_ROUTES: [&str; 6] =
["/api", "/admin", "/identity", "/icons", "/notifications/hub/negotiate", "/attachments"];
// Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts
pub struct BetterLogging(pub bool);
@ -158,7 +155,11 @@ impl Fairing for BetterLogging {
}
let config = rocket.config();
let scheme = if config.tls_enabled() { "https" } else { "http" };
let scheme = if config.tls_enabled() {
"https"
} else {
"http"
};
let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port);
info!(target: "start", "Rocket has launched from {}", addr);
}
@ -293,8 +294,7 @@ where
use std::env;
pub fn get_env_str_value(key: &str) -> Option<String>
{
pub fn get_env_str_value(key: &str) -> Option<String> {
let key_file = format!("{}_FILE", key);
let value_from_env = env::var(key);
let value_file = env::var(&key_file);
@ -304,9 +304,9 @@ pub fn get_env_str_value(key: &str) -> Option<String>
(Ok(v_env), Err(_)) => Some(v_env),
(Err(_), Ok(v_file)) => match fs::read_to_string(v_file) {
Ok(content) => Some(content.trim().to_string()),
Err(e) => panic!("Failed to load {}: {:?}", key, e)
Err(e) => panic!("Failed to load {}: {:?}", key, e),
},
_ => None
_ => None,
}
}
@ -523,7 +523,10 @@ where
}
}
use reqwest::{blocking::{Client, ClientBuilder}, header};
use reqwest::{
blocking::{Client, ClientBuilder},
header,
};
pub fn get_reqwest_client() -> Client {
get_reqwest_client_builder().build().expect("Failed to build client")
@ -532,8 +535,5 @@ pub fn get_reqwest_client() -> Client {
pub fn get_reqwest_client_builder() -> ClientBuilder {
let mut headers = header::HeaderMap::new();
headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Bitwarden_RS"));
Client::builder()
.default_headers(headers)
.timeout(Duration::from_secs(10))
Client::builder().default_headers(headers).timeout(Duration::from_secs(10))
}

Loading…
Cancel
Save