Merge branch 'main' into register_verify_email

register_verify_email
Daniel García 1 week ago committed by GitHub
commit 1fa4901c35
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -411,6 +411,14 @@
## Multiple values must be separated with a whitespace. ## Multiple values must be separated with a whitespace.
# ALLOWED_IFRAME_ANCESTORS= # ALLOWED_IFRAME_ANCESTORS=
## Allowed connect-src (Know the risks!)
## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/connect-src
## Allows other domains to URLs which can be loaded using script interfaces like the Forwarded email alias feature
## This adds the configured value to the 'Content-Security-Policy' headers 'connect-src' value.
## Multiple values must be separated with a whitespace. And only HTTPS values are allowed.
## Example: "https://my-addy-io.domain.tld https://my-simplelogin.domain.tld"
# ALLOWED_CONNECT_SRC=""
## Number of seconds, on average, between login requests from the same IP address before rate limiting kicks in. ## Number of seconds, on average, between login requests from the same IP address before rate limiting kicks in.
# LOGIN_RATELIMIT_SECONDS=60 # LOGIN_RATELIMIT_SECONDS=60
## Allow a burst of requests of up to this size, while maintaining the average indicated by `LOGIN_RATELIMIT_SECONDS`. ## Allow a burst of requests of up to this size, while maintaining the average indicated by `LOGIN_RATELIMIT_SECONDS`.

@ -47,7 +47,7 @@ jobs:
steps: steps:
# Checkout the repo # Checkout the repo
- name: "Checkout" - name: "Checkout"
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# End Checkout the repo # End Checkout the repo
@ -75,7 +75,7 @@ jobs:
# Only install the clippy and rustfmt components on the default rust-toolchain # Only install the clippy and rustfmt components on the default rust-toolchain
- name: "Install rust-toolchain version" - name: "Install rust-toolchain version"
uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2 uses: dtolnay/rust-toolchain@315e265cd78dad1e1dcf3a5074f6d6c47029d5aa # master @ Nov 18, 2024, 5:36 AM GMT+1
if: ${{ matrix.channel == 'rust-toolchain' }} if: ${{ matrix.channel == 'rust-toolchain' }}
with: with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@ -85,7 +85,7 @@ jobs:
# Install the any other channel to be used for which we do not execute clippy and rustfmt # Install the any other channel to be used for which we do not execute clippy and rustfmt
- name: "Install MSRV version" - name: "Install MSRV version"
uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2 uses: dtolnay/rust-toolchain@315e265cd78dad1e1dcf3a5074f6d6c47029d5aa # master @ Nov 18, 2024, 5:36 AM GMT+1
if: ${{ matrix.channel != 'rust-toolchain' }} if: ${{ matrix.channel != 'rust-toolchain' }}
with: with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@ -107,7 +107,7 @@ jobs:
# End Show environment # End Show environment
# Enable Rust Caching # Enable Rust Caching
- uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3 - uses: Swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2.7.5
with: with:
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes. # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
# Like changing the build host from Ubuntu 20.04 to 22.04 for example. # Like changing the build host from Ubuntu 20.04 to 22.04 for example.
@ -117,6 +117,12 @@ jobs:
# Run cargo tests # Run cargo tests
# First test all features together, afterwards test them separately. # First test all features together, afterwards test them separately.
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger"
id: test_sqlite_mysql_postgresql_mimalloc_logger
if: $${{ always() }}
run: |
cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc" - name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
id: test_sqlite_mysql_postgresql_mimalloc id: test_sqlite_mysql_postgresql_mimalloc
if: $${{ always() }} if: $${{ always() }}
@ -176,6 +182,7 @@ jobs:
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY
echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY
echo "|---|------|" >> $GITHUB_STEP_SUMMARY echo "|---|------|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY

@ -13,7 +13,7 @@ jobs:
steps: steps:
# Checkout the repo # Checkout the repo
- name: Checkout - name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# End Checkout the repo # End Checkout the repo
# Start Docker Buildx # Start Docker Buildx

@ -58,7 +58,7 @@ jobs:
steps: steps:
# Checkout the repo # Checkout the repo
- name: Checkout - name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with: with:
fetch-depth: 0 fetch-depth: 0

@ -28,10 +28,13 @@ jobs:
actions: read actions: read
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
- name: Run Trivy vulnerability scanner - name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@5681af892cd0f4997658e2bacc62bd0a894cf564 # v0.27.0 uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0
env:
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
with: with:
scan-type: repo scan-type: repo
ignore-unfixed: true ignore-unfixed: true
@ -40,6 +43,6 @@ jobs:
severity: CRITICAL,HIGH severity: CRITICAL,HIGH
- name: Upload Trivy scan results to GitHub Security tab - name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.26.6 uses: github/codeql-action/upload-sarif@86b04fb0e47484f7282357688f21d5d0e32175fe # v3.27.5
with: with:
sarif_file: 'trivy-results.sarif' sarif_file: 'trivy-results.sarif'

604
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -3,7 +3,7 @@ name = "vaultwarden"
version = "1.0.0" version = "1.0.0"
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
edition = "2021" edition = "2021"
rust-version = "1.80.0" rust-version = "1.82.0"
resolver = "2" resolver = "2"
repository = "https://github.com/dani-garcia/vaultwarden" repository = "https://github.com/dani-garcia/vaultwarden"
@ -36,13 +36,13 @@ unstable = []
[target."cfg(unix)".dependencies] [target."cfg(unix)".dependencies]
# Logging # Logging
syslog = "6.1.1" syslog = "7.0.0"
[dependencies] [dependencies]
# Logging # Logging
log = "0.4.22" log = "0.4.22"
fern = { version = "0.7.0", features = ["syslog-6", "reopen-1"] } fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] }
tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
# A `dotenv` implementation for Rust # A `dotenv` implementation for Rust
dotenvy = { version = "0.15.7", default-features = false } dotenvy = { version = "0.15.7", default-features = false }
@ -53,7 +53,7 @@ once_cell = "1.20.2"
# Numerical libraries # Numerical libraries
num-traits = "0.2.19" num-traits = "0.2.19"
num-derive = "0.4.2" num-derive = "0.4.2"
bigdecimal = "0.4.6" bigdecimal = "0.4.7"
# Web framework # Web framework
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false } rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
@ -67,16 +67,16 @@ dashmap = "6.1.0"
# Async futures # Async futures
futures = "0.3.31" futures = "0.3.31"
tokio = { version = "1.41.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } tokio = { version = "1.42.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
# A generic serialization/deserialization framework # A generic serialization/deserialization framework
serde = { version = "1.0.214", features = ["derive"] } serde = { version = "1.0.216", features = ["derive"] }
serde_json = "1.0.132" serde_json = "1.0.133"
# A safe, extensible ORM and Query builder # A safe, extensible ORM and Query builder
diesel = { version = "2.2.4", features = ["chrono", "r2d2", "numeric"] } diesel = { version = "2.2.6", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.2.0" diesel_migrations = "2.2.0"
diesel_logger = { version = "0.3.0", optional = true } diesel_logger = { version = "0.4.0", optional = true }
# Bundled/Static SQLite # Bundled/Static SQLite
libsqlite3-sys = { version = "0.30.1", features = ["bundled"], optional = true } libsqlite3-sys = { version = "0.30.1", features = ["bundled"], optional = true }
@ -89,9 +89,9 @@ ring = "0.17.8"
uuid = { version = "1.11.0", features = ["v4"] } uuid = { version = "1.11.0", features = ["v4"] }
# Date and time libraries # Date and time libraries
chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false } chrono = { version = "0.4.39", features = ["clock", "serde"], default-features = false }
chrono-tz = "0.10.0" chrono-tz = "0.10.0"
time = "0.3.36" time = "0.3.37"
# Job scheduler # Job scheduler
job_scheduler_ng = "2.0.5" job_scheduler_ng = "2.0.5"
@ -106,16 +106,16 @@ jsonwebtoken = "9.3.0"
totp-lite = "2.0.1" totp-lite = "2.0.1"
# Yubico Library # Yubico Library
yubico = { version = "0.11.0", features = ["online-tokio"], default-features = false } yubico = { version = "0.12.0", features = ["online-tokio"], default-features = false }
# WebAuthn libraries # WebAuthn libraries
webauthn-rs = "0.3.2" webauthn-rs = "0.3.2"
# Handling of URL's for WebAuthn and favicons # Handling of URL's for WebAuthn and favicons
url = "2.5.3" url = "2.5.4"
# Email libraries # Email libraries
lettre = { version = "0.11.10", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } lettre = { version = "0.11.11", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
email_address = "0.2.9" email_address = "0.2.9"
@ -124,13 +124,13 @@ handlebars = { version = "6.2.0", features = ["dir_source"] }
# HTTP client (Used for favicons, version check, DUO and HIBP API) # HTTP client (Used for favicons, version check, DUO and HIBP API)
reqwest = { version = "0.12.9", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] } reqwest = { version = "0.12.9", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
hickory-resolver = "0.24.1" hickory-resolver = "0.24.2"
# Favicon extraction libraries # Favicon extraction libraries
html5gum = "0.6.1" html5gum = "0.7.0"
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false } regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
data-url = "0.3.1" data-url = "0.3.1"
bytes = "1.8.0" bytes = "1.9.0"
# Cache function results (Used for version check and favicon fetching) # Cache function results (Used for version check and favicon fetching)
cached = { version = "0.54.0", features = ["async"] } cached = { version = "0.54.0", features = ["async"] }
@ -147,10 +147,10 @@ pico-args = "0.5.0"
# Macro ident concatenation # Macro ident concatenation
paste = "1.0.15" paste = "1.0.15"
governor = "0.7.0" governor = "0.8.0"
# Check client versions for specific features. # Check client versions for specific features.
semver = "1.0.23" semver = "1.0.24"
# Allow overriding the default memory allocator # Allow overriding the default memory allocator
# Mainly used for the musl builds, since the default musl malloc is very slow # Mainly used for the musl builds, since the default musl malloc is very slow
@ -166,6 +166,10 @@ rpassword = "7.3.1"
# Loading a dynamic CSS Stylesheet # Loading a dynamic CSS Stylesheet
grass_compiler = { version = "0.13.4", default-features = false } grass_compiler = { version = "0.13.4", default-features = false }
[patch.crates-io]
# Patch yubico to remove duplicate crates of older versions
yubico = { git = "https://github.com/BlackDex/yubico-rs", rev = "00df14811f58155c0f02e3ab10f1570ed3e115c6" }
# Strip debuginfo from the release builds # Strip debuginfo from the release builds
# The symbols are the provide better panic traces # The symbols are the provide better panic traces
# Also enable fat LTO and use 1 codegen unit for optimizations # Also enable fat LTO and use 1 codegen unit for optimizations
@ -216,7 +220,8 @@ noop_method_call = "deny"
refining_impl_trait = { level = "deny", priority = -1 } refining_impl_trait = { level = "deny", priority = -1 }
rust_2018_idioms = { level = "deny", priority = -1 } rust_2018_idioms = { level = "deny", priority = -1 }
rust_2021_compatibility = { level = "deny", priority = -1 } rust_2021_compatibility = { level = "deny", priority = -1 }
# rust_2024_compatibility = { level = "deny", priority = -1 } # Enable once we are at MSRV 1.81.0 rust_2024_compatibility = { level = "deny", priority = -1 }
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
single_use_lifetimes = "deny" single_use_lifetimes = "deny"
trivial_casts = "deny" trivial_casts = "deny"
trivial_numeric_casts = "deny" trivial_numeric_casts = "deny"
@ -225,9 +230,6 @@ unused_import_braces = "deny"
unused_lifetimes = "deny" unused_lifetimes = "deny"
unused_qualifications = "deny" unused_qualifications = "deny"
variant_size_differences = "deny" variant_size_differences = "deny"
# The lints below are part of the rust_2024_compatibility group
static-mut-refs = "deny"
unsafe-op-in-unsafe-fn = "deny"
# https://rust-lang.github.io/rust-clippy/stable/index.html # https://rust-lang.github.io/rust-clippy/stable/index.html
[lints.clippy] [lints.clippy]

@ -21,7 +21,7 @@ notify us. We welcome working with you to resolve the issue promptly. Thanks in
The following bug classes are out-of scope: The following bug classes are out-of scope:
- Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues) - Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues)
- Bugs that are not part of Vaultwarden, like on the the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated - Bugs that are not part of Vaultwarden, like on the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated
- Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer - Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer
- Attacks requiring physical access to a user's device - Attacks requiring physical access to a user's device
- Issues related to software or protocols not under Vaultwarden's control - Issues related to software or protocols not under Vaultwarden's control

@ -5,9 +5,9 @@ vault_image_digest: "sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf716
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
xx_image_digest: "sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa" xx_image_digest: "sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa"
rust_version: 1.82.0 # Rust version to be used rust_version: 1.83.0 # Rust version to be used
debian_version: bookworm # Debian release name to be used debian_version: bookworm # Debian release name to be used
alpine_version: "3.20" # Alpine version to be used alpine_version: "3.21" # Alpine version to be used
# For which platforms/architectures will we try to build images # For which platforms/architectures will we try to build images
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"] platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
# Determine the build images per OS/Arch # Determine the build images per OS/Arch

@ -32,10 +32,10 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931
########################## ALPINE BUILD IMAGES ########################## ########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
## And for Alpine we define all build images here, they will only be loaded when actually used ## And for Alpine we define all build images here, they will only be loaded when actually used
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.82.0 AS build_amd64 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.83.0 AS build_amd64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.82.0 AS build_arm64 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.83.0 AS build_arm64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.82.0 AS build_armv7 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.83.0 AS build_armv7
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.82.0 AS build_armv6 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.83.0 AS build_armv6
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006 # hadolint ignore=DL3006
@ -126,7 +126,7 @@ RUN source /env-cargo && \
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
# #
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.20 FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.21
ENV ROCKET_PROFILE="release" \ ENV ROCKET_PROFILE="release" \
ROCKET_ADDRESS=0.0.0.0 \ ROCKET_ADDRESS=0.0.0.0 \

@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:1978e7a58a1777cb0ef0d
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006 # hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.82.0-slim-bookworm AS build FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.83.0-slim-bookworm AS build
COPY --from=xx / / COPY --from=xx / /
ARG TARGETARCH ARG TARGETARCH
ARG TARGETVARIANT ARG TARGETVARIANT

@ -46,7 +46,7 @@ There also is an option to use an other docker container to provide support for
```bash ```bash
# To install and activate # To install and activate
docker run --privileged --rm tonistiigi/binfmt --install arm64,arm docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
# To unistall # To uninstall
docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
``` ```

@ -17,7 +17,7 @@ variable "SOURCE_REPOSITORY_URL" {
default = null default = null
} }
// The commit hash of of the current commit this build was triggered on // The commit hash of the current commit this build was triggered on
variable "SOURCE_COMMIT" { variable "SOURCE_COMMIT" {
default = null default = null
} }

@ -1,4 +1,4 @@
[toolchain] [toolchain]
channel = "1.82.0" channel = "1.83.0"
components = [ "rustfmt", "clippy" ] components = [ "rustfmt", "clippy" ]
profile = "minimal" profile = "minimal"

@ -62,6 +62,7 @@ pub fn routes() -> Vec<Route> {
diagnostics, diagnostics,
get_diagnostics_config, get_diagnostics_config,
resend_user_invite, resend_user_invite,
get_diagnostics_http,
] ]
} }
@ -494,10 +495,10 @@ struct UserOrgTypeData {
async fn update_user_org_type(data: Json<UserOrgTypeData>, token: AdminToken, mut conn: DbConn) -> EmptyResult { async fn update_user_org_type(data: Json<UserOrgTypeData>, token: AdminToken, mut conn: DbConn) -> EmptyResult {
let data: UserOrgTypeData = data.into_inner(); let data: UserOrgTypeData = data.into_inner();
let mut user_to_edit = let Some(mut user_to_edit) =
match UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &mut conn).await { UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &mut conn).await
Some(user) => user, else {
None => err!("The specified user isn't member of the organization"), err!("The specified user isn't member of the organization")
}; };
let new_type = match UserOrgType::from_str(&data.user_type.into_string()) { let new_type = match UserOrgType::from_str(&data.user_type.into_string()) {
@ -601,9 +602,8 @@ async fn get_json_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
} }
async fn has_http_access() -> bool { async fn has_http_access() -> bool {
let req = match make_http_request(Method::HEAD, "https://github.com/dani-garcia/vaultwarden") { let Ok(req) = make_http_request(Method::HEAD, "https://github.com/dani-garcia/vaultwarden") else {
Ok(r) => r, return false;
Err(_) => return false,
}; };
match req.send().await { match req.send().await {
Ok(r) => r.status().is_success(), Ok(r) => r.status().is_success(),
@ -713,6 +713,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
"ip_header_name": ip_header_name, "ip_header_name": ip_header_name,
"ip_header_config": &CONFIG.ip_header(), "ip_header_config": &CONFIG.ip_header(),
"uses_proxy": uses_proxy, "uses_proxy": uses_proxy,
"enable_websocket": &CONFIG.enable_websocket(),
"db_type": *DB_TYPE, "db_type": *DB_TYPE,
"db_version": get_sql_server_version(&mut conn).await, "db_version": get_sql_server_version(&mut conn).await,
"admin_url": format!("{}/diagnostics", admin_url()), "admin_url": format!("{}/diagnostics", admin_url()),
@ -734,6 +735,11 @@ fn get_diagnostics_config(_token: AdminToken) -> Json<Value> {
Json(support_json) Json(support_json)
} }
#[get("/diagnostics/http?<code>")]
fn get_diagnostics_http(code: u16, _token: AdminToken) -> EmptyResult {
err_code!(format!("Testing error {code} response"), code);
}
#[post("/config", data = "<data>")] #[post("/config", data = "<data>")]
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult { fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
let data: ConfigBuilder = data.into_inner(); let data: ConfigBuilder = data.into_inner();

@ -607,9 +607,8 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
// Skip `null` folder id entries. // Skip `null` folder id entries.
// See: https://github.com/bitwarden/clients/issues/8453 // See: https://github.com/bitwarden/clients/issues/8453
if let Some(folder_id) = folder_data.id { if let Some(folder_id) = folder_data.id {
let saved_folder = match existing_folders.iter_mut().find(|f| f.uuid == folder_id) { let Some(saved_folder) = existing_folders.iter_mut().find(|f| f.uuid == folder_id) else {
Some(folder) => folder, err!("Folder doesn't exist")
None => err!("Folder doesn't exist"),
}; };
saved_folder.name = folder_data.name; saved_folder.name = folder_data.name;
@ -619,10 +618,10 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
// Update emergency access data // Update emergency access data
for emergency_access_data in data.emergency_access_keys { for emergency_access_data in data.emergency_access_keys {
let saved_emergency_access = let Some(saved_emergency_access) =
match existing_emergency_access.iter_mut().find(|ea| ea.uuid == emergency_access_data.id) { existing_emergency_access.iter_mut().find(|ea| ea.uuid == emergency_access_data.id)
Some(emergency_access) => emergency_access, else {
None => err!("Emergency access doesn't exist or is not owned by the user"), err!("Emergency access doesn't exist or is not owned by the user")
}; };
saved_emergency_access.key_encrypted = Some(emergency_access_data.key_encrypted); saved_emergency_access.key_encrypted = Some(emergency_access_data.key_encrypted);
@ -631,10 +630,10 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
// Update reset password data // Update reset password data
for reset_password_data in data.reset_password_keys { for reset_password_data in data.reset_password_keys {
let user_org = match existing_user_orgs.iter_mut().find(|uo| uo.org_uuid == reset_password_data.organization_id) let Some(user_org) =
{ existing_user_orgs.iter_mut().find(|uo| uo.org_uuid == reset_password_data.organization_id)
Some(reset_password) => reset_password, else {
None => err!("Reset password doesn't exist"), err!("Reset password doesn't exist")
}; };
user_org.reset_password_key = Some(reset_password_data.reset_password_key); user_org.reset_password_key = Some(reset_password_data.reset_password_key);
@ -643,9 +642,8 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
// Update send data // Update send data
for send_data in data.sends { for send_data in data.sends {
let send = match existing_sends.iter_mut().find(|s| &s.uuid == send_data.id.as_ref().unwrap()) { let Some(send) = existing_sends.iter_mut().find(|s| &s.uuid == send_data.id.as_ref().unwrap()) else {
Some(send) => send, err!("Send doesn't exist")
None => err!("Send doesn't exist"),
}; };
update_send_from_data(send, send_data, &headers, &mut conn, &nt, UpdateType::None).await?; update_send_from_data(send, send_data, &headers, &mut conn, &nt, UpdateType::None).await?;
@ -656,9 +654,9 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
for cipher_data in data.ciphers { for cipher_data in data.ciphers {
if cipher_data.organization_id.is_none() { if cipher_data.organization_id.is_none() {
let saved_cipher = match existing_ciphers.iter_mut().find(|c| &c.uuid == cipher_data.id.as_ref().unwrap()) { let Some(saved_cipher) = existing_ciphers.iter_mut().find(|c| &c.uuid == cipher_data.id.as_ref().unwrap())
Some(cipher) => cipher, else {
None => err!("Cipher doesn't exist"), err!("Cipher doesn't exist")
}; };
// Prevent triggering cipher updates via WebSockets by settings UpdateType::None // Prevent triggering cipher updates via WebSockets by settings UpdateType::None
@ -835,14 +833,12 @@ struct VerifyEmailTokenData {
async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbConn) -> EmptyResult { async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbConn) -> EmptyResult {
let data: VerifyEmailTokenData = data.into_inner(); let data: VerifyEmailTokenData = data.into_inner();
let mut user = match User::find_by_uuid(&data.user_id, &mut conn).await { let Some(mut user) = User::find_by_uuid(&data.user_id, &mut conn).await else {
Some(user) => user, err!("User doesn't exist")
None => err!("User doesn't exist"),
}; };
let claims = match decode_verify_email(&data.token) { let Ok(claims) = decode_verify_email(&data.token) else {
Ok(claims) => claims, err!("Invalid claim")
Err(_) => err!("Invalid claim"),
}; };
if claims.sub != user.uuid { if claims.sub != user.uuid {
err!("Invalid claim"); err!("Invalid claim");
@ -894,15 +890,14 @@ struct DeleteRecoverTokenData {
async fn post_delete_recover_token(data: Json<DeleteRecoverTokenData>, mut conn: DbConn) -> EmptyResult { async fn post_delete_recover_token(data: Json<DeleteRecoverTokenData>, mut conn: DbConn) -> EmptyResult {
let data: DeleteRecoverTokenData = data.into_inner(); let data: DeleteRecoverTokenData = data.into_inner();
let user = match User::find_by_uuid(&data.user_id, &mut conn).await { let Ok(claims) = decode_delete(&data.token) else {
Some(user) => user, err!("Invalid claim")
None => err!("User doesn't exist"),
}; };
let claims = match decode_delete(&data.token) { let Some(user) = User::find_by_uuid(&data.user_id, &mut conn).await else {
Ok(claims) => claims, err!("User doesn't exist")
Err(_) => err!("Invalid claim"),
}; };
if claims.sub != user.uuid { if claims.sub != user.uuid {
err!("Invalid claim"); err!("Invalid claim");
} }
@ -1074,11 +1069,8 @@ impl<'r> FromRequest<'r> for KnownDevice {
async fn from_request(req: &'r Request<'_>) -> Outcome<Self, Self::Error> { async fn from_request(req: &'r Request<'_>) -> Outcome<Self, Self::Error> {
let email = if let Some(email_b64) = req.headers().get_one("X-Request-Email") { let email = if let Some(email_b64) = req.headers().get_one("X-Request-Email") {
let email_bytes = match data_encoding::BASE64URL_NOPAD.decode(email_b64.as_bytes()) { let Ok(email_bytes) = data_encoding::BASE64URL_NOPAD.decode(email_b64.as_bytes()) else {
Ok(bytes) => bytes,
Err(_) => {
return Outcome::Error((Status::BadRequest, "X-Request-Email value failed to decode as base64url")); return Outcome::Error((Status::BadRequest, "X-Request-Email value failed to decode as base64url"));
}
}; };
match String::from_utf8(email_bytes) { match String::from_utf8(email_bytes) {
Ok(email) => email, Ok(email) => email,
@ -1119,9 +1111,9 @@ async fn put_device_token(uuid: &str, data: Json<PushToken>, headers: Headers, m
let data = data.into_inner(); let data = data.into_inner();
let token = data.push_token; let token = data.push_token;
let mut device = match Device::find_by_uuid_and_user(&headers.device.uuid, &headers.user.uuid, &mut conn).await { let Some(mut device) = Device::find_by_uuid_and_user(&headers.device.uuid, &headers.user.uuid, &mut conn).await
Some(device) => device, else {
None => err!(format!("Error: device {uuid} should be present before a token can be assigned")), err!(format!("Error: device {uuid} should be present before a token can be assigned"))
}; };
// if the device already has been registered // if the device already has been registered
@ -1192,9 +1184,8 @@ async fn post_auth_request(
) -> JsonResult { ) -> JsonResult {
let data = data.into_inner(); let data = data.into_inner();
let user = match User::find_by_mail(&data.email, &mut conn).await { let Some(user) = User::find_by_mail(&data.email, &mut conn).await else {
Some(user) => user, err!("AuthRequest doesn't exist", "User not found")
None => err!("AuthRequest doesn't exist", "User not found"),
}; };
// Validate device uuid and type // Validate device uuid and type
@ -1232,15 +1223,10 @@ async fn post_auth_request(
#[get("/auth-requests/<uuid>")] #[get("/auth-requests/<uuid>")]
async fn get_auth_request(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_auth_request(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
let auth_request = match AuthRequest::find_by_uuid(uuid, &mut conn).await { let Some(auth_request) = AuthRequest::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await else {
Some(auth_request) => auth_request, err!("AuthRequest doesn't exist", "Record not found or user uuid does not match")
None => err!("AuthRequest doesn't exist", "Record not found"),
}; };
if headers.user.uuid != auth_request.user_uuid {
err!("AuthRequest doesn't exist", "User uuid's do not match")
}
let response_date_utc = auth_request.response_date.map(|response_date| format_date(&response_date)); let response_date_utc = auth_request.response_date.map(|response_date| format_date(&response_date));
Ok(Json(json!({ Ok(Json(json!({
@ -1277,15 +1263,10 @@ async fn put_auth_request(
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data = data.into_inner(); let data = data.into_inner();
let mut auth_request: AuthRequest = match AuthRequest::find_by_uuid(uuid, &mut conn).await { let Some(mut auth_request) = AuthRequest::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await else {
Some(auth_request) => auth_request, err!("AuthRequest doesn't exist", "Record not found or user uuid does not match")
None => err!("AuthRequest doesn't exist", "Record not found"),
}; };
if headers.user.uuid != auth_request.user_uuid {
err!("AuthRequest doesn't exist", "User uuid's do not match")
}
if auth_request.approved.is_some() { if auth_request.approved.is_some() {
err!("An authentication request with the same device already exists") err!("An authentication request with the same device already exists")
} }
@ -1330,9 +1311,8 @@ async fn get_auth_request_response(
client_headers: ClientHeaders, client_headers: ClientHeaders,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let auth_request = match AuthRequest::find_by_uuid(uuid, &mut conn).await { let Some(auth_request) = AuthRequest::find_by_uuid(uuid, &mut conn).await else {
Some(auth_request) => auth_request, err!("AuthRequest doesn't exist", "User not found")
None => err!("AuthRequest doesn't exist", "User not found"),
}; };
if auth_request.device_type != client_headers.device_type if auth_request.device_type != client_headers.device_type

@ -193,9 +193,8 @@ async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> {
#[get("/ciphers/<uuid>")] #[get("/ciphers/<uuid>")]
async fn get_cipher(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_cipher(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let Some(cipher) = Cipher::find_by_uuid(uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_accessible_to_user(&headers.user.uuid, &mut conn).await { if !cipher.is_accessible_to_user(&headers.user.uuid, &mut conn).await {
@ -429,14 +428,9 @@ pub async fn update_cipher_from_data(
cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.user_uuid = Some(headers.user.uuid.clone());
} }
if let Some(ref folder_id) = data.folder_id { if let Some(ref folder_uuid) = data.folder_id {
match Folder::find_by_uuid(folder_id, conn).await { if Folder::find_by_uuid_and_user(folder_uuid, &headers.user.uuid, conn).await.is_none() {
Some(folder) => { err!("Invalid folder", "Folder does not exist or belongs to another user");
if folder.user_uuid != headers.user.uuid {
err!("Folder is not owned by user")
}
}
None => err!("Folder doesn't exist"),
} }
} }
@ -511,7 +505,7 @@ pub async fn update_cipher_from_data(
cipher.fields = data.fields.map(|f| _clean_cipher_data(f).to_string()); cipher.fields = data.fields.map(|f| _clean_cipher_data(f).to_string());
cipher.data = type_data.to_string(); cipher.data = type_data.to_string();
cipher.password_history = data.password_history.map(|f| f.to_string()); cipher.password_history = data.password_history.map(|f| f.to_string());
cipher.reprompt = data.reprompt; cipher.reprompt = data.reprompt.filter(|r| *r == RepromptType::None as i32 || *r == RepromptType::Password as i32);
cipher.save(conn).await?; cipher.save(conn).await?;
cipher.move_to_folder(data.folder_id, &headers.user.uuid, conn).await?; cipher.move_to_folder(data.folder_id, &headers.user.uuid, conn).await?;
@ -661,9 +655,8 @@ async fn put_cipher(
) -> JsonResult { ) -> JsonResult {
let data: CipherData = data.into_inner(); let data: CipherData = data.into_inner();
let mut cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let Some(mut cipher) = Cipher::find_by_uuid(uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
// TODO: Check if only the folder ID or favorite status is being changed. // TODO: Check if only the folder ID or favorite status is being changed.
@ -695,19 +688,13 @@ async fn put_cipher_partial(
) -> JsonResult { ) -> JsonResult {
let data: PartialCipherData = data.into_inner(); let data: PartialCipherData = data.into_inner();
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let Some(cipher) = Cipher::find_by_uuid(uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if let Some(ref folder_id) = data.folder_id { if let Some(ref folder_uuid) = data.folder_id {
match Folder::find_by_uuid(folder_id, &mut conn).await { if Folder::find_by_uuid_and_user(folder_uuid, &headers.user.uuid, &mut conn).await.is_none() {
Some(folder) => { err!("Invalid folder", "Folder does not exist or belongs to another user");
if folder.user_uuid != headers.user.uuid {
err!("Folder is not owned by user")
}
}
None => err!("Folder doesn't exist"),
} }
} }
@ -774,9 +761,8 @@ async fn post_collections_update(
) -> JsonResult { ) -> JsonResult {
let data: CollectionsAdminData = data.into_inner(); let data: CollectionsAdminData = data.into_inner();
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let Some(cipher) = Cipher::find_by_uuid(uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
@ -788,7 +774,8 @@ async fn post_collections_update(
HashSet::<String>::from_iter(cipher.get_collections(headers.user.uuid.clone(), &mut conn).await); HashSet::<String>::from_iter(cipher.get_collections(headers.user.uuid.clone(), &mut conn).await);
for collection in posted_collections.symmetric_difference(&current_collections) { for collection in posted_collections.symmetric_difference(&current_collections) {
match Collection::find_by_uuid(collection, &mut conn).await { match Collection::find_by_uuid_and_org(collection, cipher.organization_uuid.as_ref().unwrap(), &mut conn).await
{
None => err!("Invalid collection ID provided"), None => err!("Invalid collection ID provided"),
Some(collection) => { Some(collection) => {
if collection.is_writable_by_user(&headers.user.uuid, &mut conn).await { if collection.is_writable_by_user(&headers.user.uuid, &mut conn).await {
@ -851,9 +838,8 @@ async fn post_collections_admin(
) -> EmptyResult { ) -> EmptyResult {
let data: CollectionsAdminData = data.into_inner(); let data: CollectionsAdminData = data.into_inner();
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let Some(cipher) = Cipher::find_by_uuid(uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
@ -865,7 +851,8 @@ async fn post_collections_admin(
HashSet::<String>::from_iter(cipher.get_admin_collections(headers.user.uuid.clone(), &mut conn).await); HashSet::<String>::from_iter(cipher.get_admin_collections(headers.user.uuid.clone(), &mut conn).await);
for collection in posted_collections.symmetric_difference(&current_collections) { for collection in posted_collections.symmetric_difference(&current_collections) {
match Collection::find_by_uuid(collection, &mut conn).await { match Collection::find_by_uuid_and_org(collection, cipher.organization_uuid.as_ref().unwrap(), &mut conn).await
{
None => err!("Invalid collection ID provided"), None => err!("Invalid collection ID provided"),
Some(collection) => { Some(collection) => {
if collection.is_writable_by_user(&headers.user.uuid, &mut conn).await { if collection.is_writable_by_user(&headers.user.uuid, &mut conn).await {
@ -1043,9 +1030,8 @@ async fn share_cipher_by_uuid(
/// redirects to the same location as before the v2 API. /// redirects to the same location as before the v2 API.
#[get("/ciphers/<uuid>/attachment/<attachment_id>")] #[get("/ciphers/<uuid>/attachment/<attachment_id>")]
async fn get_attachment(uuid: &str, attachment_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_attachment(uuid: &str, attachment_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let Some(cipher) = Cipher::find_by_uuid(uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_accessible_to_user(&headers.user.uuid, &mut conn).await { if !cipher.is_accessible_to_user(&headers.user.uuid, &mut conn).await {
@ -1084,9 +1070,8 @@ async fn post_attachment_v2(
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let Some(cipher) = Cipher::find_by_uuid(uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
@ -1150,9 +1135,8 @@ async fn save_attachment(
err!("Attachment size can't be negative") err!("Attachment size can't be negative")
} }
let cipher = match Cipher::find_by_uuid(cipher_uuid, &mut conn).await { let Some(cipher) = Cipher::find_by_uuid(cipher_uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
@ -1545,21 +1529,15 @@ async fn move_cipher_selected(
let data = data.into_inner(); let data = data.into_inner();
let user_uuid = headers.user.uuid; let user_uuid = headers.user.uuid;
if let Some(ref folder_id) = data.folder_id { if let Some(ref folder_uuid) = data.folder_id {
match Folder::find_by_uuid(folder_id, &mut conn).await { if Folder::find_by_uuid_and_user(folder_uuid, &user_uuid, &mut conn).await.is_none() {
Some(folder) => { err!("Invalid folder", "Folder does not exist or belongs to another user");
if folder.user_uuid != user_uuid {
err!("Folder is not owned by user")
}
}
None => err!("Folder doesn't exist"),
} }
} }
for uuid in data.ids { for uuid in data.ids {
let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await { let Some(cipher) = Cipher::find_by_uuid(&uuid, &mut conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_accessible_to_user(&user_uuid, &mut conn).await { if !cipher.is_accessible_to_user(&user_uuid, &mut conn).await {
@ -1667,9 +1645,8 @@ async fn _delete_cipher_by_uuid(
soft_delete: bool, soft_delete: bool,
nt: &Notify<'_>, nt: &Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let mut cipher = match Cipher::find_by_uuid(uuid, conn).await { let Some(mut cipher) = Cipher::find_by_uuid(uuid, conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await {
@ -1739,9 +1716,8 @@ async fn _delete_multiple_ciphers(
} }
async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &mut DbConn, nt: &Notify<'_>) -> JsonResult { async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &mut DbConn, nt: &Notify<'_>) -> JsonResult {
let mut cipher = match Cipher::find_by_uuid(uuid, conn).await { let Some(mut cipher) = Cipher::find_by_uuid(uuid, conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await {
@ -1807,18 +1783,16 @@ async fn _delete_cipher_attachment_by_id(
conn: &mut DbConn, conn: &mut DbConn,
nt: &Notify<'_>, nt: &Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let attachment = match Attachment::find_by_id(attachment_id, conn).await { let Some(attachment) = Attachment::find_by_id(attachment_id, conn).await else {
Some(attachment) => attachment, err!("Attachment doesn't exist")
None => err!("Attachment doesn't exist"),
}; };
if attachment.cipher_uuid != uuid { if attachment.cipher_uuid != uuid {
err!("Attachment from other cipher") err!("Attachment from other cipher")
} }
let cipher = match Cipher::find_by_uuid(uuid, conn).await { let Some(cipher) = Cipher::find_by_uuid(uuid, conn).await else {
Some(cipher) => cipher, err!("Cipher doesn't exist")
None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await {

@ -137,10 +137,10 @@ async fn post_emergency_access(
let data: EmergencyAccessUpdateData = data.into_inner(); let data: EmergencyAccessUpdateData = data.into_inner();
let mut emergency_access = let Some(mut emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await
Some(emergency_access) => emergency_access, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
let new_type = match EmergencyAccessType::from_str(&data.r#type.into_string()) { let new_type = match EmergencyAccessType::from_str(&data.r#type.into_string()) {
@ -284,24 +284,22 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
async fn resend_invite(emer_id: &str, headers: Headers, mut conn: DbConn) -> EmptyResult { async fn resend_invite(emer_id: &str, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let mut emergency_access = let Some(mut emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if emergency_access.status != EmergencyAccessStatus::Invited as i32 { if emergency_access.status != EmergencyAccessStatus::Invited as i32 {
err!("The grantee user is already accepted or confirmed to the organization"); err!("The grantee user is already accepted or confirmed to the organization");
} }
let email = match emergency_access.email.clone() { let Some(email) = emergency_access.email.clone() else {
Some(email) => email, err!("Email not valid.")
None => err!("Email not valid."),
}; };
let grantee_user = match User::find_by_mail(&email, &mut conn).await { let Some(grantee_user) = User::find_by_mail(&email, &mut conn).await else {
Some(user) => user, err!("Grantee user not found.")
None => err!("Grantee user not found."),
}; };
let grantor_user = headers.user; let grantor_user = headers.user;
@ -356,16 +354,15 @@ async fn accept_invite(emer_id: &str, data: Json<AcceptData>, headers: Headers,
// We need to search for the uuid in combination with the email, since we do not yet store the uuid of the grantee in the database. // We need to search for the uuid in combination with the email, since we do not yet store the uuid of the grantee in the database.
// The uuid of the grantee gets stored once accepted. // The uuid of the grantee gets stored once accepted.
let mut emergency_access = let Some(mut emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantee_email(emer_id, &headers.user.email, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantee_email(emer_id, &headers.user.email, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
// get grantor user to send Accepted email // get grantor user to send Accepted email
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await { let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
Some(user) => user, err!("Grantor user not found.")
None => err!("Grantor user not found."),
}; };
if emer_id == claims.emer_id if emer_id == claims.emer_id
@ -403,10 +400,10 @@ async fn confirm_emergency_access(
let data: ConfirmData = data.into_inner(); let data: ConfirmData = data.into_inner();
let key = data.key; let key = data.key;
let mut emergency_access = let Some(mut emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &confirming_user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &confirming_user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if emergency_access.status != EmergencyAccessStatus::Accepted as i32 if emergency_access.status != EmergencyAccessStatus::Accepted as i32
@ -415,15 +412,13 @@ async fn confirm_emergency_access(
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&confirming_user.uuid, &mut conn).await { let Some(grantor_user) = User::find_by_uuid(&confirming_user.uuid, &mut conn).await else {
Some(user) => user, err!("Grantor user not found.")
None => err!("Grantor user not found."),
}; };
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await { let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
Some(user) => user, err!("Grantee user not found.")
None => err!("Grantee user not found."),
}; };
emergency_access.status = EmergencyAccessStatus::Confirmed as i32; emergency_access.status = EmergencyAccessStatus::Confirmed as i32;
@ -450,19 +445,18 @@ async fn initiate_emergency_access(emer_id: &str, headers: Headers, mut conn: Db
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let initiating_user = headers.user; let initiating_user = headers.user;
let mut emergency_access = let Some(mut emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &initiating_user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &initiating_user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if emergency_access.status != EmergencyAccessStatus::Confirmed as i32 { if emergency_access.status != EmergencyAccessStatus::Confirmed as i32 {
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await { let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
Some(user) => user, err!("Grantor user not found.")
None => err!("Grantor user not found."),
}; };
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
@ -488,25 +482,23 @@ async fn initiate_emergency_access(emer_id: &str, headers: Headers, mut conn: Db
async fn approve_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn approve_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let mut emergency_access = let Some(mut emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32 { if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32 {
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&headers.user.uuid, &mut conn).await { let Some(grantor_user) = User::find_by_uuid(&headers.user.uuid, &mut conn).await else {
Some(user) => user, err!("Grantor user not found.")
None => err!("Grantor user not found."),
}; };
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await { let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
Some(user) => user, err!("Grantee user not found.")
None => err!("Grantee user not found."),
}; };
emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32; emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32;
@ -525,10 +517,10 @@ async fn approve_emergency_access(emer_id: &str, headers: Headers, mut conn: DbC
async fn reject_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn reject_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let mut emergency_access = let Some(mut emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32 if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32
@ -538,9 +530,8 @@ async fn reject_emergency_access(emer_id: &str, headers: Headers, mut conn: DbCo
} }
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await { let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
Some(user) => user, err!("Grantee user not found.")
None => err!("Grantee user not found."),
}; };
emergency_access.status = EmergencyAccessStatus::Confirmed as i32; emergency_access.status = EmergencyAccessStatus::Confirmed as i32;
@ -563,10 +554,10 @@ async fn reject_emergency_access(emer_id: &str, headers: Headers, mut conn: DbCo
async fn view_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn view_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let emergency_access = let Some(emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &headers.user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &headers.user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if !is_valid_request(&emergency_access, &headers.user.uuid, EmergencyAccessType::View) { if !is_valid_request(&emergency_access, &headers.user.uuid, EmergencyAccessType::View) {
@ -602,19 +593,18 @@ async fn takeover_emergency_access(emer_id: &str, headers: Headers, mut conn: Db
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let requesting_user = headers.user; let requesting_user = headers.user;
let emergency_access = let Some(emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) { if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) {
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await { let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
Some(user) => user, err!("Grantor user not found.")
None => err!("Grantor user not found."),
}; };
let result = json!({ let result = json!({
@ -650,19 +640,18 @@ async fn password_emergency_access(
//let key = &data.Key; //let key = &data.Key;
let requesting_user = headers.user; let requesting_user = headers.user;
let emergency_access = let Some(emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) { if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) {
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let mut grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await { let Some(mut grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
Some(user) => user, err!("Grantor user not found.")
None => err!("Grantor user not found."),
}; };
// change grantor_user password // change grantor_user password
@ -686,19 +675,18 @@ async fn password_emergency_access(
#[get("/emergency-access/<emer_id>/policies")] #[get("/emergency-access/<emer_id>/policies")]
async fn policies_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn policies_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
let requesting_user = headers.user; let requesting_user = headers.user;
let emergency_access = let Some(emergency_access) =
match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await { EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await
Some(emer) => emer, else {
None => err!("Emergency access not valid."), err!("Emergency access not valid.")
}; };
if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) { if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) {
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await { let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
Some(user) => user, err!("Grantor user not found.")
None => err!("Grantor user not found."),
}; };
let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &mut conn); let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &mut conn);

@ -25,16 +25,10 @@ async fn get_folders(headers: Headers, mut conn: DbConn) -> Json<Value> {
#[get("/folders/<uuid>")] #[get("/folders/<uuid>")]
async fn get_folder(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_folder(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
let folder = match Folder::find_by_uuid(uuid, &mut conn).await { match Folder::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await {
Some(folder) => folder, Some(folder) => Ok(Json(folder.to_json())),
_ => err!("Invalid folder"), _ => err!("Invalid folder", "Folder does not exist or belongs to another user"),
};
if folder.user_uuid != headers.user.uuid {
err!("Folder belongs to another user")
} }
Ok(Json(folder.to_json()))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -71,15 +65,10 @@ async fn put_folder(
) -> JsonResult { ) -> JsonResult {
let data: FolderData = data.into_inner(); let data: FolderData = data.into_inner();
let mut folder = match Folder::find_by_uuid(uuid, &mut conn).await { let Some(mut folder) = Folder::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await else {
Some(folder) => folder, err!("Invalid folder", "Folder does not exist or belongs to another user")
_ => err!("Invalid folder"),
}; };
if folder.user_uuid != headers.user.uuid {
err!("Folder belongs to another user")
}
folder.name = data.name; folder.name = data.name;
folder.save(&mut conn).await?; folder.save(&mut conn).await?;
@ -95,15 +84,10 @@ async fn delete_folder_post(uuid: &str, headers: Headers, conn: DbConn, nt: Noti
#[delete("/folders/<uuid>")] #[delete("/folders/<uuid>")]
async fn delete_folder(uuid: &str, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_folder(uuid: &str, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let folder = match Folder::find_by_uuid(uuid, &mut conn).await { let Some(folder) = Folder::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await else {
Some(folder) => folder, err!("Invalid folder", "Folder does not exist or belongs to another user")
_ => err!("Invalid folder"),
}; };
if folder.user_uuid != headers.user.uuid {
err!("Folder belongs to another user")
}
// Delete the actual folder entry // Delete the actual folder entry
folder.delete(&mut conn).await?; folder.delete(&mut conn).await?;

@ -267,9 +267,8 @@ async fn post_organization(
) -> JsonResult { ) -> JsonResult {
let data: OrganizationUpdateData = data.into_inner(); let data: OrganizationUpdateData = data.into_inner();
let mut org = match Organization::find_by_uuid(org_id, &mut conn).await { let Some(mut org) = Organization::find_by_uuid(org_id, &mut conn).await else {
Some(organization) => organization, err!("Can't find organization details")
None => err!("Can't find organization details"),
}; };
org.name = data.name; org.name = data.name;
@ -318,9 +317,8 @@ async fn get_org_collections(org_id: &str, _headers: ManagerHeadersLoose, mut co
async fn get_org_collections_details(org_id: &str, headers: ManagerHeadersLoose, mut conn: DbConn) -> JsonResult { async fn get_org_collections_details(org_id: &str, headers: ManagerHeadersLoose, mut conn: DbConn) -> JsonResult {
let mut data = Vec::new(); let mut data = Vec::new();
let user_org = match UserOrganization::find_by_user_and_org(&headers.user.uuid, org_id, &mut conn).await { let Some(user_org) = UserOrganization::find_by_user_and_org(&headers.user.uuid, org_id, &mut conn).await else {
Some(u) => u, err!("User is not part of organization")
None => err!("User is not part of organization"),
}; };
// get all collection memberships for the current organization // get all collection memberships for the current organization
@ -387,9 +385,8 @@ async fn post_organization_collections(
) -> JsonResult { ) -> JsonResult {
let data: NewCollectionData = data.into_inner(); let data: NewCollectionData = data.into_inner();
let org = match Organization::find_by_uuid(org_id, &mut conn).await { let Some(org) = Organization::find_by_uuid(org_id, &mut conn).await else {
Some(organization) => organization, err!("Can't find organization details")
None => err!("Can't find organization details"),
}; };
let collection = Collection::new(org.uuid, data.name, data.external_id); let collection = Collection::new(org.uuid, data.name, data.external_id);
@ -413,9 +410,8 @@ async fn post_organization_collections(
} }
for user in data.users { for user in data.users {
let org_user = match UserOrganization::find_by_uuid(&user.id, &mut conn).await { let Some(org_user) = UserOrganization::find_by_uuid_and_org(&user.id, org_id, &mut conn).await else {
Some(u) => u, err!("User is not part of organization")
None => err!("User is not part of organization"),
}; };
if org_user.access_all { if org_user.access_all {
@ -454,20 +450,14 @@ async fn post_organization_collection_update(
) -> JsonResult { ) -> JsonResult {
let data: NewCollectionData = data.into_inner(); let data: NewCollectionData = data.into_inner();
let org = match Organization::find_by_uuid(org_id, &mut conn).await { if Organization::find_by_uuid(org_id, &mut conn).await.is_none() {
Some(organization) => organization, err!("Can't find organization details")
None => err!("Can't find organization details"),
}; };
let mut collection = match Collection::find_by_uuid(col_id, &mut conn).await { let Some(mut collection) = Collection::find_by_uuid_and_org(col_id, org_id, &mut conn).await else {
Some(collection) => collection, err!("Collection not found")
None => err!("Collection not found"),
}; };
if collection.org_uuid != org.uuid {
err!("Collection is not owned by organization");
}
collection.name = data.name; collection.name = data.name;
collection.external_id = match data.external_id { collection.external_id = match data.external_id {
Some(external_id) if !external_id.trim().is_empty() => Some(external_id), Some(external_id) if !external_id.trim().is_empty() => Some(external_id),
@ -498,9 +488,8 @@ async fn post_organization_collection_update(
CollectionUser::delete_all_by_collection(col_id, &mut conn).await?; CollectionUser::delete_all_by_collection(col_id, &mut conn).await?;
for user in data.users { for user in data.users {
let org_user = match UserOrganization::find_by_uuid(&user.id, &mut conn).await { let Some(org_user) = UserOrganization::find_by_uuid_and_org(&user.id, org_id, &mut conn).await else {
Some(u) => u, err!("User is not part of organization")
None => err!("User is not part of organization"),
}; };
if org_user.access_all { if org_user.access_all {
@ -521,15 +510,8 @@ async fn delete_organization_collection_user(
_headers: AdminHeaders, _headers: AdminHeaders,
mut conn: DbConn, mut conn: DbConn,
) -> EmptyResult { ) -> EmptyResult {
let collection = match Collection::find_by_uuid(col_id, &mut conn).await { let Some(collection) = Collection::find_by_uuid_and_org(col_id, org_id, &mut conn).await else {
None => err!("Collection not found"), err!("Collection not found", "Collection does not exist or does not belong to this organization")
Some(collection) => {
if collection.org_uuid == org_id {
collection
} else {
err!("Collection and Organization id do not match")
}
}
}; };
match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await { match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await {
@ -560,10 +542,9 @@ async fn _delete_organization_collection(
headers: &ManagerHeaders, headers: &ManagerHeaders,
conn: &mut DbConn, conn: &mut DbConn,
) -> EmptyResult { ) -> EmptyResult {
match Collection::find_by_uuid(col_id, conn).await { let Some(collection) = Collection::find_by_uuid_and_org(col_id, org_id, conn).await else {
None => err!("Collection not found"), err!("Collection not found", "Collection does not exist or does not belong to this organization")
Some(collection) => { };
if collection.org_uuid == org_id {
log_event( log_event(
EventType::CollectionDeleted as i32, EventType::CollectionDeleted as i32,
&collection.uuid, &collection.uuid,
@ -575,11 +556,6 @@ async fn _delete_organization_collection(
) )
.await; .await;
collection.delete(conn).await collection.delete(conn).await
} else {
err!("Collection and Organization id do not match")
}
}
}
} }
#[delete("/organizations/<org_id>/collections/<col_id>")] #[delete("/organizations/<org_id>/collections/<col_id>")]
@ -601,12 +577,11 @@ struct DeleteCollectionData {
org_id: String, org_id: String,
} }
#[post("/organizations/<org_id>/collections/<col_id>/delete", data = "<_data>")] #[post("/organizations/<org_id>/collections/<col_id>/delete")]
async fn post_organization_collection_delete( async fn post_organization_collection_delete(
org_id: &str, org_id: &str,
col_id: &str, col_id: &str,
headers: ManagerHeaders, headers: ManagerHeaders,
_data: Json<DeleteCollectionData>,
mut conn: DbConn, mut conn: DbConn,
) -> EmptyResult { ) -> EmptyResult {
_delete_organization_collection(org_id, col_id, &headers, &mut conn).await _delete_organization_collection(org_id, col_id, &headers, &mut conn).await
@ -651,9 +626,9 @@ async fn get_org_collection_detail(
err!("Collection is not owned by organization") err!("Collection is not owned by organization")
} }
let user_org = match UserOrganization::find_by_user_and_org(&headers.user.uuid, org_id, &mut conn).await { let Some(user_org) = UserOrganization::find_by_user_and_org(&headers.user.uuid, org_id, &mut conn).await
Some(u) => u, else {
None => err!("User is not part of organization"), err!("User is not part of organization")
}; };
let groups: Vec<Value> = if CONFIG.org_groups_enabled() { let groups: Vec<Value> = if CONFIG.org_groups_enabled() {
@ -695,9 +670,8 @@ async fn get_org_collection_detail(
#[get("/organizations/<org_id>/collections/<coll_id>/users")] #[get("/organizations/<org_id>/collections/<coll_id>/users")]
async fn get_collection_users(org_id: &str, coll_id: &str, _headers: ManagerHeaders, mut conn: DbConn) -> JsonResult { async fn get_collection_users(org_id: &str, coll_id: &str, _headers: ManagerHeaders, mut conn: DbConn) -> JsonResult {
// Get org and collection, check that collection is from org // Get org and collection, check that collection is from org
let collection = match Collection::find_by_uuid_and_org(coll_id, org_id, &mut conn).await { let Some(collection) = Collection::find_by_uuid_and_org(coll_id, org_id, &mut conn).await else {
None => err!("Collection not found in Organization"), err!("Collection not found in Organization")
Some(collection) => collection,
}; };
let mut user_list = Vec::new(); let mut user_list = Vec::new();
@ -731,9 +705,8 @@ async fn put_collection_users(
// And then add all the received ones (except if the user has access_all) // And then add all the received ones (except if the user has access_all)
for d in data.iter() { for d in data.iter() {
let user = match UserOrganization::find_by_uuid(&d.id, &mut conn).await { let Some(user) = UserOrganization::find_by_uuid_and_org(&d.id, org_id, &mut conn).await else {
Some(u) => u, err!("User is not part of organization")
None => err!("User is not part of organization"),
}; };
if user.access_all { if user.access_all {
@ -1007,18 +980,16 @@ async fn reinvite_user(org_id: &str, user_org: &str, headers: AdminHeaders, mut
} }
async fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, conn: &mut DbConn) -> EmptyResult { async fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, conn: &mut DbConn) -> EmptyResult {
let user_org = match UserOrganization::find_by_uuid(user_org, conn).await { let Some(user_org) = UserOrganization::find_by_uuid_and_org(user_org, org_id, conn).await else {
Some(user_org) => user_org, err!("The user hasn't been invited to the organization.")
None => err!("The user hasn't been invited to the organization."),
}; };
if user_org.status != UserOrgStatus::Invited as i32 { if user_org.status != UserOrgStatus::Invited as i32 {
err!("The user is already accepted or confirmed to the organization") err!("The user is already accepted or confirmed to the organization")
} }
let user = match User::find_by_uuid(&user_org.user_uuid, conn).await { let Some(user) = User::find_by_uuid(&user_org.user_uuid, conn).await else {
Some(user) => user, err!("User not found.")
None => err!("User not found."),
}; };
if !CONFIG.invitations_allowed() && user.password_hash.is_empty() { if !CONFIG.invitations_allowed() && user.password_hash.is_empty() {
@ -1059,20 +1030,25 @@ struct AcceptData {
reset_password_key: Option<String>, reset_password_key: Option<String>,
} }
#[post("/organizations/<org_id>/users/<_org_user_id>/accept", data = "<data>")] #[post("/organizations/<org_id>/users/<org_user_id>/accept", data = "<data>")]
async fn accept_invite(org_id: &str, _org_user_id: &str, data: Json<AcceptData>, mut conn: DbConn) -> EmptyResult { async fn accept_invite(org_id: &str, org_user_id: &str, data: Json<AcceptData>, mut conn: DbConn) -> EmptyResult {
// The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead // The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead
let data: AcceptData = data.into_inner(); let data: AcceptData = data.into_inner();
let claims = decode_invite(&data.token)?; let claims = decode_invite(&data.token)?;
// If a claim does not have a user_org_id or it does not match the one in from the URI, something is wrong.
match &claims.user_org_id {
Some(ou_id) if ou_id.eq(org_user_id) => {}
_ => err!("Error accepting the invitation", "Claim does not match the org_user_id"),
}
match User::find_by_mail(&claims.email, &mut conn).await { match User::find_by_mail(&claims.email, &mut conn).await {
Some(user) => { Some(user) => {
Invitation::take(&claims.email, &mut conn).await; Invitation::take(&claims.email, &mut conn).await;
if let (Some(user_org), Some(org)) = (&claims.user_org_id, &claims.org_id) { if let (Some(user_org), Some(org)) = (&claims.user_org_id, &claims.org_id) {
let mut user_org = match UserOrganization::find_by_uuid_and_org(user_org, org, &mut conn).await { let Some(mut user_org) = UserOrganization::find_by_uuid_and_org(user_org, org, &mut conn).await else {
Some(user_org) => user_org, err!("Error accepting the invitation")
None => err!("Error accepting the invitation"),
}; };
if user_org.status != UserOrgStatus::Invited as i32 { if user_org.status != UserOrgStatus::Invited as i32 {
@ -1213,9 +1189,8 @@ async fn _confirm_invite(
err!("Key or UserId is not set, unable to process request"); err!("Key or UserId is not set, unable to process request");
} }
let mut user_to_confirm = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn).await { let Some(mut user_to_confirm) = UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn).await else {
Some(user) => user, err!("The specified user isn't a member of the organization")
None => err!("The specified user isn't a member of the organization"),
}; };
if user_to_confirm.atype != UserOrgType::User && headers.org_user_type != UserOrgType::Owner { if user_to_confirm.atype != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
@ -1287,9 +1262,8 @@ async fn get_user(
_headers: AdminHeaders, _headers: AdminHeaders,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let user = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await { let Some(user) = UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await else {
Some(user) => user, err!("The specified user isn't a member of the organization")
None => err!("The specified user isn't a member of the organization"),
}; };
// In this case, when groups are requested we also need to include collections. // In this case, when groups are requested we also need to include collections.
@ -1331,14 +1305,12 @@ async fn edit_user(
) -> EmptyResult { ) -> EmptyResult {
let data: EditUserData = data.into_inner(); let data: EditUserData = data.into_inner();
let new_type = match UserOrgType::from_str(&data.r#type.into_string()) { let Some(new_type) = UserOrgType::from_str(&data.r#type.into_string()) else {
Some(new_type) => new_type, err!("Invalid type")
None => err!("Invalid type"),
}; };
let mut user_to_edit = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await { let Some(mut user_to_edit) = UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await else {
Some(user) => user, err!("The specified user isn't member of the organization")
None => err!("The specified user isn't member of the organization"),
}; };
if new_type != user_to_edit.atype if new_type != user_to_edit.atype
@ -1490,9 +1462,8 @@ async fn _delete_user(
conn: &mut DbConn, conn: &mut DbConn,
nt: &Notify<'_>, nt: &Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let user_to_delete = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn).await { let Some(user_to_delete) = UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn).await else {
Some(user) => user, err!("User to delete isn't member of the organization")
None => err!("User to delete isn't member of the organization"),
}; };
if user_to_delete.atype != UserOrgType::User && headers.org_user_type != UserOrgType::Owner { if user_to_delete.atype != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
@ -1652,7 +1623,7 @@ struct BulkCollectionsData {
remove_collections: bool, remove_collections: bool,
} }
// This endpoint is only reachable via the organization view, therefor this endpoint is located here // This endpoint is only reachable via the organization view, therefore this endpoint is located here
// Also Bitwarden does not send out Notifications for these changes, it only does this for individual cipher collection updates // Also Bitwarden does not send out Notifications for these changes, it only does this for individual cipher collection updates
#[post("/ciphers/bulk-collections", data = "<data>")] #[post("/ciphers/bulk-collections", data = "<data>")]
async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers: Headers, mut conn: DbConn) -> EmptyResult { async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
@ -1725,9 +1696,8 @@ async fn list_policies_token(org_id: &str, token: &str, mut conn: DbConn) -> Jso
let invite = decode_invite(token)?; let invite = decode_invite(token)?;
let invite_org_id = match invite.org_id { let Some(invite_org_id) = invite.org_id else {
Some(invite_org_id) => invite_org_id, err!("Invalid token")
None => err!("Invalid token"),
}; };
if invite_org_id != org_id { if invite_org_id != org_id {
@ -1747,9 +1717,8 @@ async fn list_policies_token(org_id: &str, token: &str, mut conn: DbConn) -> Jso
#[get("/organizations/<org_id>/policies/<pol_type>")] #[get("/organizations/<org_id>/policies/<pol_type>")]
async fn get_policy(org_id: &str, pol_type: i32, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult { async fn get_policy(org_id: &str, pol_type: i32, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult {
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { let Some(pol_type_enum) = OrgPolicyType::from_i32(pol_type) else {
Some(pt) => pt, err!("Invalid or unsupported policy type")
None => err!("Invalid or unsupported policy type"),
}; };
let policy = match OrgPolicy::find_by_org_and_type(org_id, pol_type_enum, &mut conn).await { let policy = match OrgPolicy::find_by_org_and_type(org_id, pol_type_enum, &mut conn).await {
@ -1778,9 +1747,8 @@ async fn put_policy(
) -> JsonResult { ) -> JsonResult {
let data: PolicyData = data.into_inner(); let data: PolicyData = data.into_inner();
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { let Some(pol_type_enum) = OrgPolicyType::from_i32(pol_type) else {
Some(pt) => pt, err!("Invalid or unsupported policy type")
None => err!("Invalid or unsupported policy type"),
}; };
// Bitwarden only allows the Reset Password policy when Single Org policy is enabled // Bitwarden only allows the Reset Password policy when Single Org policy is enabled
@ -2437,9 +2405,8 @@ async fn put_group(
err!("Group support is disabled"); err!("Group support is disabled");
} }
let group = match Group::find_by_uuid(group_id, &mut conn).await { let Some(group) = Group::find_by_uuid_and_org(group_id, org_id, &mut conn).await else {
Some(group) => group, err!("Group not found", "Group uuid is invalid or does not belong to the organization")
None => err!("Group not found"),
}; };
let group_request = data.into_inner(); let group_request = data.into_inner();
@ -2502,15 +2469,14 @@ async fn add_update_group(
}))) })))
} }
#[get("/organizations/<_org_id>/groups/<group_id>/details")] #[get("/organizations/<org_id>/groups/<group_id>/details")]
async fn get_group_details(_org_id: &str, group_id: &str, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult { async fn get_group_details(org_id: &str, group_id: &str, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult {
if !CONFIG.org_groups_enabled() { if !CONFIG.org_groups_enabled() {
err!("Group support is disabled"); err!("Group support is disabled");
} }
let group = match Group::find_by_uuid(group_id, &mut conn).await { let Some(group) = Group::find_by_uuid_and_org(group_id, org_id, &mut conn).await else {
Some(group) => group, err!("Group not found", "Group uuid is invalid or does not belong to the organization")
_ => err!("Group could not be found!"),
}; };
Ok(Json(group.to_json_details(&mut conn).await)) Ok(Json(group.to_json_details(&mut conn).await))
@ -2531,9 +2497,8 @@ async fn _delete_group(org_id: &str, group_id: &str, headers: &AdminHeaders, con
err!("Group support is disabled"); err!("Group support is disabled");
} }
let group = match Group::find_by_uuid(group_id, conn).await { let Some(group) = Group::find_by_uuid_and_org(group_id, org_id, conn).await else {
Some(group) => group, err!("Group not found", "Group uuid is invalid or does not belong to the organization")
_ => err!("Group not found"),
}; };
log_event( log_event(
@ -2569,29 +2534,27 @@ async fn bulk_delete_groups(
Ok(()) Ok(())
} }
#[get("/organizations/<_org_id>/groups/<group_id>")] #[get("/organizations/<org_id>/groups/<group_id>")]
async fn get_group(_org_id: &str, group_id: &str, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult { async fn get_group(org_id: &str, group_id: &str, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult {
if !CONFIG.org_groups_enabled() { if !CONFIG.org_groups_enabled() {
err!("Group support is disabled"); err!("Group support is disabled");
} }
let group = match Group::find_by_uuid(group_id, &mut conn).await { let Some(group) = Group::find_by_uuid_and_org(group_id, org_id, &mut conn).await else {
Some(group) => group, err!("Group not found", "Group uuid is invalid or does not belong to the organization")
_ => err!("Group not found"),
}; };
Ok(Json(group.to_json())) Ok(Json(group.to_json()))
} }
#[get("/organizations/<_org_id>/groups/<group_id>/users")] #[get("/organizations/<org_id>/groups/<group_id>/users")]
async fn get_group_users(_org_id: &str, group_id: &str, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult { async fn get_group_users(org_id: &str, group_id: &str, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult {
if !CONFIG.org_groups_enabled() { if !CONFIG.org_groups_enabled() {
err!("Group support is disabled"); err!("Group support is disabled");
} }
match Group::find_by_uuid(group_id, &mut conn).await { if Group::find_by_uuid_and_org(group_id, org_id, &mut conn).await.is_none() {
Some(_) => { /* Do nothing */ } err!("Group could not be found!", "Group uuid is invalid or does not belong to the organization")
_ => err!("Group could not be found!"),
}; };
let group_users: Vec<String> = GroupUser::find_by_group(group_id, &mut conn) let group_users: Vec<String> = GroupUser::find_by_group(group_id, &mut conn)
@ -2615,9 +2578,8 @@ async fn put_group_users(
err!("Group support is disabled"); err!("Group support is disabled");
} }
match Group::find_by_uuid(group_id, &mut conn).await { if Group::find_by_uuid_and_org(group_id, org_id, &mut conn).await.is_none() {
Some(_) => { /* Do nothing */ } err!("Group could not be found!", "Group uuid is invalid or does not belong to the organization")
_ => err!("Group could not be found!"),
}; };
GroupUser::delete_all_by_group(group_id, &mut conn).await?; GroupUser::delete_all_by_group(group_id, &mut conn).await?;
@ -2642,15 +2604,14 @@ async fn put_group_users(
Ok(()) Ok(())
} }
#[get("/organizations/<_org_id>/users/<user_id>/groups")] #[get("/organizations/<org_id>/users/<user_id>/groups")]
async fn get_user_groups(_org_id: &str, user_id: &str, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult { async fn get_user_groups(org_id: &str, user_id: &str, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult {
if !CONFIG.org_groups_enabled() { if !CONFIG.org_groups_enabled() {
err!("Group support is disabled"); err!("Group support is disabled");
} }
match UserOrganization::find_by_uuid(user_id, &mut conn).await { if UserOrganization::find_by_uuid_and_org(user_id, org_id, &mut conn).await.is_none() {
Some(_) => { /* Do nothing */ } err!("User could not be found!")
_ => err!("User could not be found!"),
}; };
let user_groups: Vec<String> = let user_groups: Vec<String> =
@ -2688,13 +2649,8 @@ async fn put_user_groups(
err!("Group support is disabled"); err!("Group support is disabled");
} }
let user_org = match UserOrganization::find_by_uuid(org_user_id, &mut conn).await { if UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await.is_none() {
Some(uo) => uo, err!("User could not be found or does not belong to the organization.");
_ => err!("User could not be found!"),
};
if user_org.org_uuid != org_id {
err!("Group doesn't belong to organization");
} }
GroupUser::delete_all_by_user(org_user_id, &mut conn).await?; GroupUser::delete_all_by_user(org_user_id, &mut conn).await?;
@ -2742,22 +2698,12 @@ async fn delete_group_user(
err!("Group support is disabled"); err!("Group support is disabled");
} }
let user_org = match UserOrganization::find_by_uuid(org_user_id, &mut conn).await { if UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await.is_none() {
Some(uo) => uo, err!("User could not be found or does not belong to the organization.");
_ => err!("User could not be found!"),
};
if user_org.org_uuid != org_id {
err!("User doesn't belong to organization");
} }
let group = match Group::find_by_uuid(group_id, &mut conn).await { if Group::find_by_uuid_and_org(group_id, org_id, &mut conn).await.is_none() {
Some(g) => g, err!("Group could not be found or does not belong to the organization.");
_ => err!("Group could not be found!"),
};
if group.organizations_uuid != org_id {
err!("Group doesn't belong to organization");
} }
log_event( log_event(
@ -2789,14 +2735,13 @@ struct OrganizationUserResetPasswordRequest {
key: String, key: String,
} }
// Upstrem reports this is the renamed endpoint instead of `/keys` // Upstream reports this is the renamed endpoint instead of `/keys`
// But the clients do not seem to use this at all // But the clients do not seem to use this at all
// Just add it here in case they will // Just add it here in case they will
#[get("/organizations/<org_id>/public-key")] #[get("/organizations/<org_id>/public-key")]
async fn get_organization_public_key(org_id: &str, _headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_organization_public_key(org_id: &str, _headers: Headers, mut conn: DbConn) -> JsonResult {
let org = match Organization::find_by_uuid(org_id, &mut conn).await { let Some(org) = Organization::find_by_uuid(org_id, &mut conn).await else {
Some(organization) => organization, err!("Organization not found")
None => err!("Organization not found"),
}; };
Ok(Json(json!({ Ok(Json(json!({
@ -2821,19 +2766,16 @@ async fn put_reset_password(
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let org = match Organization::find_by_uuid(org_id, &mut conn).await { let Some(org) = Organization::find_by_uuid(org_id, &mut conn).await else {
Some(org) => org, err!("Required organization not found")
None => err!("Required organization not found"),
}; };
let org_user = match UserOrganization::find_by_uuid_and_org(org_user_id, &org.uuid, &mut conn).await { let Some(org_user) = UserOrganization::find_by_uuid_and_org(org_user_id, &org.uuid, &mut conn).await else {
Some(user) => user, err!("User to reset isn't member of required organization")
None => err!("User to reset isn't member of required organization"),
}; };
let user = match User::find_by_uuid(&org_user.user_uuid, &mut conn).await { let Some(user) = User::find_by_uuid(&org_user.user_uuid, &mut conn).await else {
Some(user) => user, err!("User not found")
None => err!("User not found"),
}; };
check_reset_password_applicable_and_permissions(org_id, org_user_id, &headers, &mut conn).await?; check_reset_password_applicable_and_permissions(org_id, org_user_id, &headers, &mut conn).await?;
@ -2880,19 +2822,16 @@ async fn get_reset_password_details(
headers: AdminHeaders, headers: AdminHeaders,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let org = match Organization::find_by_uuid(org_id, &mut conn).await { let Some(org) = Organization::find_by_uuid(org_id, &mut conn).await else {
Some(org) => org, err!("Required organization not found")
None => err!("Required organization not found"),
}; };
let org_user = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await { let Some(org_user) = UserOrganization::find_by_uuid_and_org(org_user_id, org_id, &mut conn).await else {
Some(user) => user, err!("User to reset isn't member of required organization")
None => err!("User to reset isn't member of required organization"),
}; };
let user = match User::find_by_uuid(&org_user.user_uuid, &mut conn).await { let Some(user) = User::find_by_uuid(&org_user.user_uuid, &mut conn).await else {
Some(user) => user, err!("User not found")
None => err!("User not found"),
}; };
check_reset_password_applicable_and_permissions(org_id, org_user_id, &headers, &mut conn).await?; check_reset_password_applicable_and_permissions(org_id, org_user_id, &headers, &mut conn).await?;
@ -2918,9 +2857,8 @@ async fn check_reset_password_applicable_and_permissions(
) -> EmptyResult { ) -> EmptyResult {
check_reset_password_applicable(org_id, conn).await?; check_reset_password_applicable(org_id, conn).await?;
let target_user = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn).await { let Some(target_user) = UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn).await else {
Some(user) => user, err!("Reset target user not found")
None => err!("Reset target user not found"),
}; };
// Resetting user must be higher/equal to user to reset // Resetting user must be higher/equal to user to reset
@ -2936,9 +2874,8 @@ async fn check_reset_password_applicable(org_id: &str, conn: &mut DbConn) -> Emp
err!("Password reset is not supported on an email-disabled instance."); err!("Password reset is not supported on an email-disabled instance.");
} }
let policy = match OrgPolicy::find_by_org_and_type(org_id, OrgPolicyType::ResetPassword, conn).await { let Some(policy) = OrgPolicy::find_by_org_and_type(org_id, OrgPolicyType::ResetPassword, conn).await else {
Some(p) => p, err!("Policy not found")
None => err!("Policy not found"),
}; };
if !policy.enabled { if !policy.enabled {
@ -2956,9 +2893,8 @@ async fn put_reset_password_enrollment(
data: Json<OrganizationUserResetPasswordEnrollmentRequest>, data: Json<OrganizationUserResetPasswordEnrollmentRequest>,
mut conn: DbConn, mut conn: DbConn,
) -> EmptyResult { ) -> EmptyResult {
let mut org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, org_id, &mut conn).await { let Some(mut org_user) = UserOrganization::find_by_user_and_org(&headers.user.uuid, org_id, &mut conn).await else {
Some(u) => u, err!("User to enroll isn't member of required organization")
None => err!("User to enroll isn't member of required organization"),
}; };
check_reset_password_applicable(org_id, &mut conn).await?; check_reset_password_applicable(org_id, &mut conn).await?;

@ -203,9 +203,8 @@ impl<'r> FromRequest<'r> for PublicToken {
None => err_handler!("No access token provided"), None => err_handler!("No access token provided"),
}; };
// Check JWT token is valid and get device and user from it // Check JWT token is valid and get device and user from it
let claims = match auth::decode_api_org(access_token) { let Ok(claims) = auth::decode_api_org(access_token) else {
Ok(claims) => claims, err_handler!("Invalid claim")
Err(_) => err_handler!("Invalid claim"),
}; };
// Check if time is between claims.nbf and claims.exp // Check if time is between claims.nbf and claims.exp
let time_now = Utc::now().timestamp(); let time_now = Utc::now().timestamp();
@ -227,13 +226,11 @@ impl<'r> FromRequest<'r> for PublicToken {
Outcome::Success(conn) => conn, Outcome::Success(conn) => conn,
_ => err_handler!("Error getting DB"), _ => err_handler!("Error getting DB"),
}; };
let org_uuid = match claims.client_id.strip_prefix("organization.") { let Some(org_uuid) = claims.client_id.strip_prefix("organization.") else {
Some(uuid) => uuid, err_handler!("Malformed client_id")
None => err_handler!("Malformed client_id"),
}; };
let org_api_key = match OrganizationApiKey::find_by_org_uuid(org_uuid, &conn).await { let Some(org_api_key) = OrganizationApiKey::find_by_org_uuid(org_uuid, &conn).await else {
Some(org_api_key) => org_api_key, err_handler!("Invalid client_id")
None => err_handler!("Invalid client_id"),
}; };
if org_api_key.org_uuid != claims.client_sub { if org_api_key.org_uuid != claims.client_sub {
err_handler!("Token not issued for this org"); err_handler!("Token not issued for this org");

@ -159,16 +159,10 @@ async fn get_sends(headers: Headers, mut conn: DbConn) -> Json<Value> {
#[get("/sends/<uuid>")] #[get("/sends/<uuid>")]
async fn get_send(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_send(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
let send = match Send::find_by_uuid(uuid, &mut conn).await { match Send::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await {
Some(send) => send, Some(send) => Ok(Json(send.to_json())),
None => err!("Send not found"), None => err!("Send not found", "Invalid uuid or does not belong to user"),
};
if send.user_uuid.as_ref() != Some(&headers.user.uuid) {
err!("Send is not owned by user")
} }
Ok(Json(send.to_json()))
} }
#[post("/sends", data = "<data>")] #[post("/sends", data = "<data>")]
@ -371,22 +365,14 @@ async fn post_send_file_v2_data(
let mut data = data.into_inner(); let mut data = data.into_inner();
let Some(send) = Send::find_by_uuid(send_uuid, &mut conn).await else { let Some(send) = Send::find_by_uuid_and_user(send_uuid, &headers.user.uuid, &mut conn).await else {
err!("Send not found. Unable to save the file.") err!("Send not found. Unable to save the file.", "Invalid uuid or does not belong to user.")
}; };
if send.atype != SendType::File as i32 { if send.atype != SendType::File as i32 {
err!("Send is not a file type send."); err!("Send is not a file type send.");
} }
let Some(send_user_id) = &send.user_uuid else {
err!("Sends are only supported for users at the moment.")
};
if send_user_id != &headers.user.uuid {
err!("Send doesn't belong to user.");
}
let Ok(send_data) = serde_json::from_str::<SendFileData>(&send.data) else { let Ok(send_data) = serde_json::from_str::<SendFileData>(&send.data) else {
err!("Unable to decode send data as json.") err!("Unable to decode send data as json.")
}; };
@ -456,9 +442,8 @@ async fn post_access(
ip: ClientIp, ip: ClientIp,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let mut send = match Send::find_by_access_id(access_id, &mut conn).await { let Some(mut send) = Send::find_by_access_id(access_id, &mut conn).await else {
Some(s) => s, err_code!(SEND_INACCESSIBLE_MSG, 404)
None => err_code!(SEND_INACCESSIBLE_MSG, 404),
}; };
if let Some(max_access_count) = send.max_access_count { if let Some(max_access_count) = send.max_access_count {
@ -517,9 +502,8 @@ async fn post_access_file(
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let mut send = match Send::find_by_uuid(send_id, &mut conn).await { let Some(mut send) = Send::find_by_uuid(send_id, &mut conn).await else {
Some(s) => s, err_code!(SEND_INACCESSIBLE_MSG, 404)
None => err_code!(SEND_INACCESSIBLE_MSG, 404),
}; };
if let Some(max_access_count) = send.max_access_count { if let Some(max_access_count) = send.max_access_count {
@ -582,16 +566,15 @@ async fn download_send(send_id: SafeString, file_id: SafeString, t: &str) -> Opt
None None
} }
#[put("/sends/<id>", data = "<data>")] #[put("/sends/<uuid>", data = "<data>")]
async fn put_send(id: &str, data: Json<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn put_send(uuid: &str, data: Json<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
enforce_disable_send_policy(&headers, &mut conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let data: SendData = data.into_inner(); let data: SendData = data.into_inner();
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?; enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
let mut send = match Send::find_by_uuid(id, &mut conn).await { let Some(mut send) = Send::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await else {
Some(s) => s, err!("Send not found", "Send uuid is invalid or does not belong to user")
None => err!("Send not found"),
}; };
update_send_from_data(&mut send, data, &headers, &mut conn, &nt, UpdateType::SyncSendUpdate).await?; update_send_from_data(&mut send, data, &headers, &mut conn, &nt, UpdateType::SyncSendUpdate).await?;
@ -657,17 +640,12 @@ pub async fn update_send_from_data(
Ok(()) Ok(())
} }
#[delete("/sends/<id>")] #[delete("/sends/<uuid>")]
async fn delete_send(id: &str, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_send(uuid: &str, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let send = match Send::find_by_uuid(id, &mut conn).await { let Some(send) = Send::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await else {
Some(s) => s, err!("Send not found", "Invalid send uuid, or does not belong to user")
None => err!("Send not found"),
}; };
if send.user_uuid.as_ref() != Some(&headers.user.uuid) {
err!("Send is not owned by user")
}
send.delete(&mut conn).await?; send.delete(&mut conn).await?;
nt.send_send_update( nt.send_send_update(
UpdateType::SyncSendDelete, UpdateType::SyncSendDelete,
@ -681,19 +659,14 @@ async fn delete_send(id: &str, headers: Headers, mut conn: DbConn, nt: Notify<'_
Ok(()) Ok(())
} }
#[put("/sends/<id>/remove-password")] #[put("/sends/<uuid>/remove-password")]
async fn put_remove_password(id: &str, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn put_remove_password(uuid: &str, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
enforce_disable_send_policy(&headers, &mut conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let mut send = match Send::find_by_uuid(id, &mut conn).await { let Some(mut send) = Send::find_by_uuid_and_user(uuid, &headers.user.uuid, &mut conn).await else {
Some(s) => s, err!("Send not found", "Invalid send uuid, or does not belong to user")
None => err!("Send not found"),
}; };
if send.user_uuid.as_ref() != Some(&headers.user.uuid) {
err!("Send is not owned by user")
}
send.set_password(None); send.set_password(None);
send.save(&mut conn).await?; send.save(&mut conn).await?;
nt.send_send_update( nt.send_send_update(

@ -117,9 +117,8 @@ pub async fn validate_totp_code(
) -> EmptyResult { ) -> EmptyResult {
use totp_lite::{totp_custom, Sha1}; use totp_lite::{totp_custom, Sha1};
let decoded_secret = match BASE32.decode(secret.as_bytes()) { let Ok(decoded_secret) = BASE32.decode(secret.as_bytes()) else {
Ok(s) => s, err!("Invalid TOTP secret")
Err(_) => err!("Invalid TOTP secret"),
}; };
let mut twofactor = let mut twofactor =

@ -232,9 +232,8 @@ async fn get_user_duo_data(uuid: &str, conn: &mut DbConn) -> DuoStatus {
let type_ = TwoFactorType::Duo as i32; let type_ = TwoFactorType::Duo as i32;
// If the user doesn't have an entry, disabled // If the user doesn't have an entry, disabled
let twofactor = match TwoFactor::find_by_user_and_type(uuid, type_, conn).await { let Some(twofactor) = TwoFactor::find_by_user_and_type(uuid, type_, conn).await else {
Some(t) => t, return DuoStatus::Disabled(DuoData::global().is_some());
None => return DuoStatus::Disabled(DuoData::global().is_some()),
}; };
// If the user has the required values, we use those // If the user has the required values, we use those
@ -333,14 +332,12 @@ fn parse_duo_values(key: &str, val: &str, ikey: &str, prefix: &str, time: i64) -
err!("Prefixes don't match") err!("Prefixes don't match")
} }
let cookie_vec = match BASE64.decode(u_b64.as_bytes()) { let Ok(cookie_vec) = BASE64.decode(u_b64.as_bytes()) else {
Ok(c) => c, err!("Invalid Duo cookie encoding")
Err(_) => err!("Invalid Duo cookie encoding"),
}; };
let cookie = match String::from_utf8(cookie_vec) { let Ok(cookie) = String::from_utf8(cookie_vec) else {
Ok(c) => c, err!("Invalid Duo cookie encoding")
Err(_) => err!("Invalid Duo cookie encoding"),
}; };
let cookie_split: Vec<&str> = cookie.split('|').collect(); let cookie_split: Vec<&str> = cookie.split('|').collect();

@ -40,9 +40,8 @@ async fn send_email_login(data: Json<SendEmailLoginData>, mut conn: DbConn) -> E
use crate::db::models::User; use crate::db::models::User;
// Get the user // Get the user
let user = match User::find_by_mail(&data.email, &mut conn).await { let Some(user) = User::find_by_mail(&data.email, &mut conn).await else {
Some(user) => user, err!("Username or password is incorrect. Try again.")
None => err!("Username or password is incorrect. Try again."),
}; };
// Check password // Check password
@ -174,9 +173,8 @@ async fn email(data: Json<EmailData>, headers: Headers, mut conn: DbConn) -> Jso
let mut email_data = EmailTokenData::from_json(&twofactor.data)?; let mut email_data = EmailTokenData::from_json(&twofactor.data)?;
let issued_token = match &email_data.last_token { let Some(issued_token) = &email_data.last_token else {
Some(t) => t, err!("No token available")
_ => err!("No token available"),
}; };
if !crypto::ct_eq(issued_token, data.token) { if !crypto::ct_eq(issued_token, data.token) {
@ -205,14 +203,13 @@ pub async fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, c
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Email as i32, conn) let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Email as i32, conn)
.await .await
.map_res("Two factor not found")?; .map_res("Two factor not found")?;
let issued_token = match &email_data.last_token { let Some(issued_token) = &email_data.last_token else {
Some(t) => t, err!(
_ => err!(
"No token available", "No token available",
ErrorEvent { ErrorEvent {
event: EventType::UserFailedLogIn2fa event: EventType::UserFailedLogIn2fa
} }
), )
}; };
if !crypto::ct_eq(issued_token, token) { if !crypto::ct_eq(issued_token, token) {

@ -85,9 +85,8 @@ async fn recover(data: Json<RecoverTwoFactor>, client_headers: ClientHeaders, mu
use crate::db::models::User; use crate::db::models::User;
// Get the user // Get the user
let mut user = match User::find_by_mail(&data.email, &mut conn).await { let Some(mut user) = User::find_by_mail(&data.email, &mut conn).await else {
Some(user) => user, err!("Username or password is incorrect. Try again.")
None => err!("Username or password is incorrect. Try again."),
}; };
// Check password // Check password

@ -309,17 +309,16 @@ async fn delete_webauthn(data: Json<DeleteU2FData>, headers: Headers, mut conn:
err!("Invalid password"); err!("Invalid password");
} }
let mut tf = let Some(mut tf) =
match TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::Webauthn as i32, &mut conn).await { TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::Webauthn as i32, &mut conn).await
Some(tf) => tf, else {
None => err!("Webauthn data not found!"), err!("Webauthn data not found!")
}; };
let mut data: Vec<WebauthnRegistration> = serde_json::from_str(&tf.data)?; let mut data: Vec<WebauthnRegistration> = serde_json::from_str(&tf.data)?;
let item_pos = match data.iter().position(|r| r.id == id) { let Some(item_pos) = data.iter().position(|r| r.id == id) else {
Some(p) => p, err!("Webauthn entry not found")
None => err!("Webauthn entry not found"),
}; };
let removed_item = data.remove(item_pos); let removed_item = data.remove(item_pos);

@ -19,7 +19,7 @@ use tokio::{
io::{AsyncReadExt, AsyncWriteExt}, io::{AsyncReadExt, AsyncWriteExt},
}; };
use html5gum::{Emitter, HtmlString, InfallibleTokenizer, Readable, StringReader, Tokenizer}; use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
use crate::{ use crate::{
error::Error, error::Error,
@ -261,11 +261,7 @@ impl Icon {
} }
} }
fn get_favicons_node( fn get_favicons_node(dom: Tokenizer<StringReader<'_>, FaviconEmitter>, icons: &mut Vec<Icon>, url: &url::Url) {
dom: InfallibleTokenizer<StringReader<'_>, FaviconEmitter>,
icons: &mut Vec<Icon>,
url: &url::Url,
) {
const TAG_LINK: &[u8] = b"link"; const TAG_LINK: &[u8] = b"link";
const TAG_BASE: &[u8] = b"base"; const TAG_BASE: &[u8] = b"base";
const TAG_HEAD: &[u8] = b"head"; const TAG_HEAD: &[u8] = b"head";
@ -274,7 +270,7 @@ fn get_favicons_node(
let mut base_url = url.clone(); let mut base_url = url.clone();
let mut icon_tags: Vec<Tag> = Vec::new(); let mut icon_tags: Vec<Tag> = Vec::new();
for token in dom { for Ok(token) in dom {
let tag_name: &[u8] = &token.tag.name; let tag_name: &[u8] = &token.tag.name;
match tag_name { match tag_name {
TAG_LINK => { TAG_LINK => {
@ -401,7 +397,7 @@ async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> {
// 384KB should be more than enough for the HTML, though as we only really need the HTML header. // 384KB should be more than enough for the HTML, though as we only really need the HTML header.
let limited_reader = stream_to_bytes_limit(content, 384 * 1024).await?.to_vec(); let limited_reader = stream_to_bytes_limit(content, 384 * 1024).await?.to_vec();
let dom = Tokenizer::new_with_emitter(limited_reader.to_reader(), FaviconEmitter::default()).infallible(); let dom = Tokenizer::new_with_emitter(limited_reader.to_reader(), FaviconEmitter::default());
get_favicons_node(dom, &mut iconlist, &url); get_favicons_node(dom, &mut iconlist, &url);
} else { } else {
// Add the default favicon.ico to the list with just the given domain // Add the default favicon.ico to the list with just the given domain
@ -662,7 +658,7 @@ impl reqwest::cookie::CookieStore for Jar {
/// The FaviconEmitter is using an optimized version of the DefaultEmitter. /// The FaviconEmitter is using an optimized version of the DefaultEmitter.
/// This prevents emitting tags like comments, doctype and also strings between the tags. /// This prevents emitting tags like comments, doctype and also strings between the tags.
/// But it will also only emit the tags we need and only if they have the correct attributes /// But it will also only emit the tags we need and only if they have the correct attributes
/// Therefor parsing the HTML content is faster. /// Therefore parsing the HTML content is faster.
use std::collections::BTreeMap; use std::collections::BTreeMap;
#[derive(Default)] #[derive(Default)]

@ -157,9 +157,8 @@ async fn _password_login(
// Get the user // Get the user
let username = data.username.as_ref().unwrap().trim(); let username = data.username.as_ref().unwrap().trim();
let mut user = match User::find_by_mail(username, conn).await { let Some(mut user) = User::find_by_mail(username, conn).await else {
Some(user) => user, err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username))
None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)),
}; };
// Set the user_uuid here to be passed back used for event logging. // Set the user_uuid here to be passed back used for event logging.
@ -180,7 +179,8 @@ async fn _password_login(
// If we get an auth request, we don't check the user's password, but the access code of the auth request // If we get an auth request, we don't check the user's password, but the access code of the auth request
if let Some(ref auth_request_uuid) = data.auth_request { if let Some(ref auth_request_uuid) = data.auth_request {
let Some(auth_request) = AuthRequest::find_by_uuid(auth_request_uuid.as_str(), conn).await else { let Some(auth_request) = AuthRequest::find_by_uuid_and_user(auth_request_uuid.as_str(), &user.uuid, conn).await
else {
err!( err!(
"Auth request not found. Try again.", "Auth request not found. Try again.",
format!("IP: {}. Username: {}.", ip.ip, username), format!("IP: {}. Username: {}.", ip.ip, username),
@ -382,13 +382,11 @@ async fn _user_api_key_login(
) -> JsonResult { ) -> JsonResult {
// Get the user via the client_id // Get the user via the client_id
let client_id = data.client_id.as_ref().unwrap(); let client_id = data.client_id.as_ref().unwrap();
let client_user_uuid = match client_id.strip_prefix("user.") { let Some(client_user_uuid) = client_id.strip_prefix("user.") else {
Some(uuid) => uuid, err!("Malformed client_id", format!("IP: {}.", ip.ip))
None => err!("Malformed client_id", format!("IP: {}.", ip.ip)),
}; };
let user = match User::find_by_uuid(client_user_uuid, conn).await { let Some(user) = User::find_by_uuid(client_user_uuid, conn).await else {
Some(user) => user, err!("Invalid client_id", format!("IP: {}.", ip.ip))
None => err!("Invalid client_id", format!("IP: {}.", ip.ip)),
}; };
// Set the user_uuid here to be passed back used for event logging. // Set the user_uuid here to be passed back used for event logging.
@ -471,13 +469,11 @@ async fn _user_api_key_login(
async fn _organization_api_key_login(data: ConnectData, conn: &mut DbConn, ip: &ClientIp) -> JsonResult { async fn _organization_api_key_login(data: ConnectData, conn: &mut DbConn, ip: &ClientIp) -> JsonResult {
// Get the org via the client_id // Get the org via the client_id
let client_id = data.client_id.as_ref().unwrap(); let client_id = data.client_id.as_ref().unwrap();
let org_uuid = match client_id.strip_prefix("organization.") { let Some(org_uuid) = client_id.strip_prefix("organization.") else {
Some(uuid) => uuid, err!("Malformed client_id", format!("IP: {}.", ip.ip))
None => err!("Malformed client_id", format!("IP: {}.", ip.ip)),
}; };
let org_api_key = match OrganizationApiKey::find_by_org_uuid(org_uuid, conn).await { let Some(org_api_key) = OrganizationApiKey::find_by_org_uuid(org_uuid, conn).await else {
Some(org_api_key) => org_api_key, err!("Invalid client_id", format!("IP: {}.", ip.ip))
None => err!("Invalid client_id", format!("IP: {}.", ip.ip)),
}; };
// Check API key. // Check API key.
@ -676,9 +672,8 @@ async fn _json_err_twofactor(
} }
Some(tf_type @ TwoFactorType::YubiKey) => { Some(tf_type @ TwoFactorType::YubiKey) => {
let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn).await { let Some(twofactor) = TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn).await else {
Some(tf) => tf, err!("No YubiKey devices registered")
None => err!("No YubiKey devices registered"),
}; };
let yubikey_metadata: yubikey::YubikeyMetadata = serde_json::from_str(&twofactor.data)?; let yubikey_metadata: yubikey::YubikeyMetadata = serde_json::from_str(&twofactor.data)?;
@ -689,9 +684,8 @@ async fn _json_err_twofactor(
} }
Some(tf_type @ TwoFactorType::Email) => { Some(tf_type @ TwoFactorType::Email) => {
let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn).await { let Some(twofactor) = TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn).await else {
Some(tf) => tf, err!("No twofactor email registered")
None => err!("No twofactor email registered"),
}; };
// Send email immediately if email is the only 2FA option // Send email immediately if email is the only 2FA option

@ -155,12 +155,9 @@ pub async fn push_cipher_update(
if cipher.organization_uuid.is_some() { if cipher.organization_uuid.is_some() {
return; return;
}; };
let user_uuid = match &cipher.user_uuid { let Some(user_uuid) = &cipher.user_uuid else {
Some(c) => c,
None => {
debug!("Cipher has no uuid"); debug!("Cipher has no uuid");
return; return;
}
}; };
if Device::check_user_has_push_device(user_uuid, conn).await { if Device::check_user_has_push_device(user_uuid, conn).await {

@ -503,9 +503,8 @@ impl<'r> FromRequest<'r> for Headers {
}; };
// Check JWT token is valid and get device and user from it // Check JWT token is valid and get device and user from it
let claims = match decode_login(access_token) { let Ok(claims) = decode_login(access_token) else {
Ok(claims) => claims, err_handler!("Invalid claim")
Err(_) => err_handler!("Invalid claim"),
}; };
let device_uuid = claims.device; let device_uuid = claims.device;
@ -516,23 +515,20 @@ impl<'r> FromRequest<'r> for Headers {
_ => err_handler!("Error getting DB"), _ => err_handler!("Error getting DB"),
}; };
let device = match Device::find_by_uuid_and_user(&device_uuid, &user_uuid, &mut conn).await { let Some(device) = Device::find_by_uuid_and_user(&device_uuid, &user_uuid, &mut conn).await else {
Some(device) => device, err_handler!("Invalid device id")
None => err_handler!("Invalid device id"),
}; };
let user = match User::find_by_uuid(&user_uuid, &mut conn).await { let Some(user) = User::find_by_uuid(&user_uuid, &mut conn).await else {
Some(user) => user, err_handler!("Device has no user associated")
None => err_handler!("Device has no user associated"),
}; };
if user.security_stamp != claims.sstamp { if user.security_stamp != claims.sstamp {
if let Some(stamp_exception) = if let Some(stamp_exception) =
user.stamp_exception.as_deref().and_then(|s| serde_json::from_str::<UserStampException>(s).ok()) user.stamp_exception.as_deref().and_then(|s| serde_json::from_str::<UserStampException>(s).ok())
{ {
let current_route = match request.route().and_then(|r| r.name.as_deref()) { let Some(current_route) = request.route().and_then(|r| r.name.as_deref()) else {
Some(name) => name, err_handler!("Error getting current route for stamp exception")
_ => err_handler!("Error getting current route for stamp exception"),
}; };
// Check if the stamp exception has expired first. // Check if the stamp exception has expired first.

@ -238,6 +238,7 @@ macro_rules! make_config {
// Besides Pass, only String types will be masked via _privacy_mask. // Besides Pass, only String types will be masked via _privacy_mask.
const PRIVACY_CONFIG: &[&str] = &[ const PRIVACY_CONFIG: &[&str] = &[
"allowed_iframe_ancestors", "allowed_iframe_ancestors",
"allowed_connect_src",
"database_url", "database_url",
"domain_origin", "domain_origin",
"domain_path", "domain_path",
@ -248,6 +249,7 @@ macro_rules! make_config {
"smtp_from", "smtp_from",
"smtp_host", "smtp_host",
"smtp_username", "smtp_username",
"_smtp_img_src",
]; ];
let cfg = { let cfg = {
@ -610,6 +612,9 @@ make_config! {
/// Allowed iframe ancestors (Know the risks!) |> Allows other domains to embed the web vault into an iframe, useful for embedding into secure intranets /// Allowed iframe ancestors (Know the risks!) |> Allows other domains to embed the web vault into an iframe, useful for embedding into secure intranets
allowed_iframe_ancestors: String, true, def, String::new(); allowed_iframe_ancestors: String, true, def, String::new();
/// Allowed connect-src (Know the risks!) |> Allows other domains to URLs which can be loaded using script interfaces like the Forwarded email alias feature
allowed_connect_src: String, true, def, String::new();
/// Seconds between login requests |> Number of seconds, on average, between login and 2FA requests from the same IP address before rate limiting kicks in /// Seconds between login requests |> Number of seconds, on average, between login and 2FA requests from the same IP address before rate limiting kicks in
login_ratelimit_seconds: u64, false, def, 60; login_ratelimit_seconds: u64, false, def, 60;
/// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `login_ratelimit_seconds`. Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2 /// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `login_ratelimit_seconds`. Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2
@ -761,6 +766,13 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
); );
} }
let connect_src = cfg.allowed_connect_src.to_lowercase();
for url in connect_src.split_whitespace() {
if !url.starts_with("https://") || Url::parse(url).is_err() {
err!("ALLOWED_CONNECT_SRC variable contains one or more invalid URLs. Only FQDN's starting with https are allowed");
}
}
let whitelist = &cfg.signups_domains_whitelist; let whitelist = &cfg.signups_domains_whitelist;
if !whitelist.is_empty() && whitelist.split(',').any(|d| d.trim().is_empty()) { if !whitelist.is_empty() && whitelist.split(',').any(|d| d.trim().is_empty()) {
err!("`SIGNUPS_DOMAINS_WHITELIST` contains empty tokens"); err!("`SIGNUPS_DOMAINS_WHITELIST` contains empty tokens");

@ -373,14 +373,6 @@ pub async fn backup_database(conn: &mut DbConn) -> Result<String, Error> {
err!("PostgreSQL and MySQL/MariaDB do not support this backup feature"); err!("PostgreSQL and MySQL/MariaDB do not support this backup feature");
} }
sqlite { sqlite {
backup_sqlite_database(conn)
}
}
}
#[cfg(sqlite)]
pub fn backup_sqlite_database(conn: &mut diesel::sqlite::SqliteConnection) -> Result<String, Error> {
use diesel::RunQueryDsl;
let db_url = CONFIG.database_url(); let db_url = CONFIG.database_url();
let db_path = std::path::Path::new(&db_url).parent().unwrap(); let db_path = std::path::Path::new(&db_url).parent().unwrap();
let backup_file = db_path let backup_file = db_path
@ -389,6 +381,8 @@ pub fn backup_sqlite_database(conn: &mut diesel::sqlite::SqliteConnection) -> Re
.into_owned(); .into_owned();
diesel::sql_query(format!("VACUUM INTO '{backup_file}'")).execute(conn)?; diesel::sql_query(format!("VACUUM INTO '{backup_file}'")).execute(conn)?;
Ok(backup_file) Ok(backup_file)
}
}
} }
/// Get the SQL Server version /// Get the SQL Server version

@ -111,6 +111,17 @@ impl AuthRequest {
}} }}
} }
pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! {conn: {
auth_requests::table
.filter(auth_requests::uuid.eq(uuid))
.filter(auth_requests::user_uuid.eq(user_uuid))
.first::<AuthRequestDb>(conn)
.ok()
.from_db()
}}
}
pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
auth_requests::table auth_requests::table

@ -46,10 +46,9 @@ db_object! {
} }
} }
#[allow(dead_code)]
pub enum RepromptType { pub enum RepromptType {
None = 0, None = 0,
Password = 1, // not currently used in server Password = 1,
} }
/// Local methods /// Local methods
@ -296,7 +295,7 @@ impl Cipher {
"creationDate": format_date(&self.created_at), "creationDate": format_date(&self.created_at),
"revisionDate": format_date(&self.updated_at), "revisionDate": format_date(&self.updated_at),
"deletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))), "deletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
"reprompt": self.reprompt.unwrap_or(RepromptType::None as i32), "reprompt": self.reprompt.filter(|r| *r == RepromptType::None as i32 || *r == RepromptType::Password as i32).unwrap_or(RepromptType::None as i32),
"organizationId": self.organization_uuid, "organizationId": self.organization_uuid,
"key": self.key, "key": self.key,
"attachments": attachments_json, "attachments": attachments_json,

@ -120,10 +120,11 @@ impl Folder {
Ok(()) Ok(())
} }
pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> { pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
folders::table folders::table
.filter(folders::uuid.eq(uuid)) .filter(folders::uuid.eq(uuid))
.filter(folders::user_uuid.eq(user_uuid))
.first::<FolderDb>(conn) .first::<FolderDb>(conn)
.ok() .ok()
.from_db() .from_db()

@ -191,10 +191,11 @@ impl Group {
}} }}
} }
pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> { pub async fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
groups::table groups::table
.filter(groups::uuid.eq(uuid)) .filter(groups::uuid.eq(uuid))
.filter(groups::organizations_uuid.eq(org_uuid))
.first::<GroupDb>(conn) .first::<GroupDb>(conn)
.ok() .ok()
.from_db() .from_db()

@ -18,7 +18,7 @@ mod user;
pub use self::attachment::Attachment; pub use self::attachment::Attachment;
pub use self::auth_request::AuthRequest; pub use self::auth_request::AuthRequest;
pub use self::cipher::Cipher; pub use self::cipher::{Cipher, RepromptType};
pub use self::collection::{Collection, CollectionCipher, CollectionUser}; pub use self::collection::{Collection, CollectionCipher, CollectionUser};
pub use self::device::{Device, DeviceType}; pub use self::device::{Device, DeviceType};
pub use self::emergency_access::{EmergencyAccess, EmergencyAccessStatus, EmergencyAccessType}; pub use self::emergency_access::{EmergencyAccess, EmergencyAccessStatus, EmergencyAccessType};

@ -142,16 +142,6 @@ impl OrgPolicy {
}} }}
} }
pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: {
org_policies::table
.filter(org_policies::uuid.eq(uuid))
.first::<OrgPolicyDb>(conn)
.ok()
.from_db()
}}
}
pub async fn find_by_org(org_uuid: &str, conn: &mut DbConn) -> Vec<Self> { pub async fn find_by_org(org_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
org_policies::table org_policies::table

@ -268,9 +268,8 @@ impl Send {
use data_encoding::BASE64URL_NOPAD; use data_encoding::BASE64URL_NOPAD;
use uuid::Uuid; use uuid::Uuid;
let uuid_vec = match BASE64URL_NOPAD.decode(access_id.as_bytes()) { let Ok(uuid_vec) = BASE64URL_NOPAD.decode(access_id.as_bytes()) else {
Ok(v) => v, return None;
Err(_) => return None,
}; };
let uuid = match Uuid::from_slice(&uuid_vec) { let uuid = match Uuid::from_slice(&uuid_vec) {
@ -291,6 +290,17 @@ impl Send {
}} }}
} }
pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! {conn: {
sends::table
.filter(sends::uuid.eq(uuid))
.filter(sends::user_uuid.eq(user_uuid))
.first::<SendDb>(conn)
.ok()
.from_db()
}}
}
pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
sends::table sends::table

@ -308,9 +308,8 @@ pub async fn send_invite(
} }
} }
let query_string = match query.query() { let Some(query_string) = query.query() else {
None => err!("Failed to build invite URL query parameters"), err!("Failed to build invite URL query parameters")
Some(query) => query,
}; };
let (subject, body_html, body_text) = get_text( let (subject, body_html, body_text) = get_text(
@ -352,9 +351,8 @@ pub async fn send_emergency_access_invite(
.append_pair("token", &encode_jwt(&claims)); .append_pair("token", &encode_jwt(&claims));
} }
let query_string = match query.query() { let Some(query_string) = query.query() else {
None => err!("Failed to build emergency invite URL query parameters"), err!("Failed to build emergency invite URL query parameters")
Some(query) => query,
}; };
let (subject, body_html, body_text) = get_text( let (subject, body_html, body_text) = get_text(

@ -67,7 +67,7 @@ pub use util::is_running_in_container;
#[rocket::main] #[rocket::main]
async fn main() -> Result<(), Error> { async fn main() -> Result<(), Error> {
parse_args(); parse_args().await;
launch_info(); launch_info();
let level = init_logging()?; let level = init_logging()?;
@ -115,7 +115,7 @@ PRESETS: m= t= p=
pub const VERSION: Option<&str> = option_env!("VW_VERSION"); pub const VERSION: Option<&str> = option_env!("VW_VERSION");
fn parse_args() { async fn parse_args() {
let mut pargs = pico_args::Arguments::from_env(); let mut pargs = pico_args::Arguments::from_env();
let version = VERSION.unwrap_or("(Version info from Git not present)"); let version = VERSION.unwrap_or("(Version info from Git not present)");
@ -186,7 +186,7 @@ fn parse_args() {
exit(1); exit(1);
} }
} else if command == "backup" { } else if command == "backup" {
match backup_sqlite() { match backup_sqlite().await {
Ok(f) => { Ok(f) => {
println!("Backup to '{f}' was successful"); println!("Backup to '{f}' was successful");
exit(0); exit(0);
@ -201,26 +201,21 @@ fn parse_args() {
} }
} }
fn backup_sqlite() -> Result<String, Error> { async fn backup_sqlite() -> Result<String, Error> {
#[cfg(sqlite)] use crate::db::{backup_database, DbConnType};
{
use crate::db::{backup_sqlite_database, DbConnType};
if DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false) { if DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false) {
use diesel::Connection;
let url = CONFIG.database_url();
// Establish a connection to the sqlite database // Establish a connection to the sqlite database
let mut conn = diesel::sqlite::SqliteConnection::establish(&url)?; let mut conn = db::DbPool::from_config()
let backup_file = backup_sqlite_database(&mut conn)?; .expect("SQLite database connection failed")
.get()
.await
.expect("Unable to get SQLite db pool");
let backup_file = backup_database(&mut conn).await?;
Ok(backup_file) Ok(backup_file)
} else { } else {
err_silent!("The database type is not SQLite. Backups only works for SQLite databases") err_silent!("The database type is not SQLite. Backups only works for SQLite databases")
} }
}
#[cfg(not(sqlite))]
{
err_silent!("The 'sqlite' feature is not enabled. Backups only works for SQLite databases")
}
} }
fn launch_info() { fn launch_info() {
@ -610,7 +605,7 @@ async fn launch_rocket(pool: db::DbPool, extra_debug: bool) -> Result<(), Error>
// If we need more signals to act upon, we might want to use select! here. // If we need more signals to act upon, we might want to use select! here.
// With only one item to listen for this is enough. // With only one item to listen for this is enough.
let _ = signal_user1.recv().await; let _ = signal_user1.recv().await;
match backup_sqlite() { match backup_sqlite().await {
Ok(f) => info!("Backup to '{f}' was successful"), Ok(f) => info!("Backup to '{f}' was successful"),
Err(e) => error!("Backup failed. {e:?}"), Err(e) => error!("Backup failed. {e:?}"),
} }

@ -38,8 +38,8 @@ img {
max-width: 130px; max-width: 130px;
} }
#users-table .vw-actions, #orgs-table .vw-actions { #users-table .vw-actions, #orgs-table .vw-actions {
min-width: 130px; min-width: 135px;
max-width: 130px; max-width: 140px;
} }
#users-table .vw-org-cell { #users-table .vw-org-cell {
max-height: 120px; max-height: 120px;

@ -7,6 +7,8 @@ var timeCheck = false;
var ntpTimeCheck = false; var ntpTimeCheck = false;
var domainCheck = false; var domainCheck = false;
var httpsCheck = false; var httpsCheck = false;
var websocketCheck = false;
var httpResponseCheck = false;
// ================================ // ================================
// Date & Time Check // Date & Time Check
@ -76,18 +78,15 @@ async function generateSupportString(event, dj) {
event.preventDefault(); event.preventDefault();
event.stopPropagation(); event.stopPropagation();
let supportString = "### Your environment (Generated via diagnostics page)\n"; let supportString = "### Your environment (Generated via diagnostics page)\n\n";
supportString += `* Vaultwarden version: v${dj.current_release}\n`; supportString += `* Vaultwarden version: v${dj.current_release}\n`;
supportString += `* Web-vault version: v${dj.web_vault_version}\n`; supportString += `* Web-vault version: v${dj.web_vault_version}\n`;
supportString += `* OS/Arch: ${dj.host_os}/${dj.host_arch}\n`; supportString += `* OS/Arch: ${dj.host_os}/${dj.host_arch}\n`;
supportString += `* Running within a container: ${dj.running_within_container} (Base: ${dj.container_base_image})\n`; supportString += `* Running within a container: ${dj.running_within_container} (Base: ${dj.container_base_image})\n`;
supportString += "* Environment settings overridden: "; supportString += `* Database type: ${dj.db_type}\n`;
if (dj.overrides != "") { supportString += `* Database version: ${dj.db_version}\n`;
supportString += "true\n"; supportString += `* Environment settings overridden!: ${dj.overrides !== ""}\n`;
} else {
supportString += "false\n";
}
supportString += `* Uses a reverse proxy: ${dj.ip_header_exists}\n`; supportString += `* Uses a reverse proxy: ${dj.ip_header_exists}\n`;
if (dj.ip_header_exists) { if (dj.ip_header_exists) {
supportString += `* IP Header check: ${dj.ip_header_match} (${dj.ip_header_name})\n`; supportString += `* IP Header check: ${dj.ip_header_match} (${dj.ip_header_name})\n`;
@ -99,11 +98,12 @@ async function generateSupportString(event, dj) {
supportString += `* Server/NTP Time Check: ${ntpTimeCheck}\n`; supportString += `* Server/NTP Time Check: ${ntpTimeCheck}\n`;
supportString += `* Domain Configuration Check: ${domainCheck}\n`; supportString += `* Domain Configuration Check: ${domainCheck}\n`;
supportString += `* HTTPS Check: ${httpsCheck}\n`; supportString += `* HTTPS Check: ${httpsCheck}\n`;
supportString += `* Database type: ${dj.db_type}\n`; if (dj.enable_websocket) {
supportString += `* Database version: ${dj.db_version}\n`; supportString += `* Websocket Check: ${websocketCheck}\n`;
supportString += "* Clients used: \n"; } else {
supportString += "* Reverse proxy and version: \n"; supportString += "* Websocket Check: disabled\n";
supportString += "* Other relevant information: \n"; }
supportString += `* HTTP Response Checks: ${httpResponseCheck}\n`;
const jsonResponse = await fetch(`${BASE_URL}/admin/diagnostics/config`, { const jsonResponse = await fetch(`${BASE_URL}/admin/diagnostics/config`, {
"headers": { "Accept": "application/json" } "headers": { "Accept": "application/json" }
@ -113,10 +113,30 @@ async function generateSupportString(event, dj) {
throw new Error(jsonResponse); throw new Error(jsonResponse);
} }
const configJson = await jsonResponse.json(); const configJson = await jsonResponse.json();
supportString += "\n### Config (Generated via diagnostics page)\n<details><summary>Show Running Config</summary>\n";
// Start Config and Details section within a details block which is collapsed by default
supportString += "\n### Config & Details (Generated via diagnostics page)\n\n";
supportString += "<details><summary>Show Config & Details</summary>\n";
// Add overrides if they exists
if (dj.overrides != "") {
supportString += `\n**Environment settings which are overridden:** ${dj.overrides}\n`; supportString += `\n**Environment settings which are overridden:** ${dj.overrides}\n`;
supportString += "\n\n```json\n" + JSON.stringify(configJson, undefined, 2) + "\n```\n</details>\n"; }
// Add http response check messages if they exists
if (httpResponseCheck === false) {
supportString += "\n**Failed HTTP Checks:**\n";
// We use `innerText` here since that will convert <br> into new-lines
supportString += "\n```yaml\n" + document.getElementById("http-response-errors").innerText.trim() + "\n```\n";
}
// Add the current config in json form
supportString += "\n**Config:**\n";
supportString += "\n```json\n" + JSON.stringify(configJson, undefined, 2) + "\n```\n";
supportString += "\n</details>\n";
// Add the support string to the textbox so it can be viewed and copied
document.getElementById("support-string").textContent = supportString; document.getElementById("support-string").textContent = supportString;
document.getElementById("support-string").classList.remove("d-none"); document.getElementById("support-string").classList.remove("d-none");
document.getElementById("copy-support").classList.remove("d-none"); document.getElementById("copy-support").classList.remove("d-none");
@ -199,6 +219,162 @@ function checkDns(dns_resolved) {
} }
} }
async function fetchCheckUrl(url) {
try {
const response = await fetch(url);
return { headers: response.headers, status: response.status, text: await response.text() };
} catch (error) {
console.error(`Error fetching ${url}: ${error}`);
return { error };
}
}
function checkSecurityHeaders(headers, omit) {
let securityHeaders = {
"x-frame-options": ["SAMEORIGIN"],
"x-content-type-options": ["nosniff"],
"referrer-policy": ["same-origin"],
"x-xss-protection": ["0"],
"x-robots-tag": ["noindex", "nofollow"],
"content-security-policy": [
"default-src 'self'",
"base-uri 'self'",
"form-action 'self'",
"object-src 'self' blob:",
"script-src 'self' 'wasm-unsafe-eval'",
"style-src 'self' 'unsafe-inline'",
"child-src 'self' https://*.duosecurity.com https://*.duofederal.com",
"frame-src 'self' https://*.duosecurity.com https://*.duofederal.com",
"frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb chrome-extension://jbkfoedolllekgbhcbcoahefnbanhhlh moz-extension://*",
"img-src 'self' data: https://haveibeenpwned.com",
"connect-src 'self' https://api.pwnedpasswords.com https://api.2fa.directory https://app.simplelogin.io/api/ https://app.addy.io/api/ https://api.fastmail.com/ https://api.forwardemail.net",
]
};
let messages = [];
for (let header in securityHeaders) {
// Skip some headers for specific endpoints if needed
if (typeof omit === "object" && omit.includes(header) === true) {
continue;
}
// If the header exists, check if the contents matches what we expect it to be
let headerValue = headers.get(header);
if (headerValue !== null) {
securityHeaders[header].forEach((expectedValue) => {
if (headerValue.indexOf(expectedValue) === -1) {
messages.push(`'${header}' does not contain '${expectedValue}'`);
}
});
} else {
messages.push(`'${header}' is missing!`);
}
}
return messages;
}
async function checkHttpResponse() {
const [apiConfig, webauthnConnector, notFound, notFoundApi, badRequest, unauthorized, forbidden] = await Promise.all([
fetchCheckUrl(`${BASE_URL}/api/config`),
fetchCheckUrl(`${BASE_URL}/webauthn-connector.html`),
fetchCheckUrl(`${BASE_URL}/admin/does-not-exist`),
fetchCheckUrl(`${BASE_URL}/admin/diagnostics/http?code=404`),
fetchCheckUrl(`${BASE_URL}/admin/diagnostics/http?code=400`),
fetchCheckUrl(`${BASE_URL}/admin/diagnostics/http?code=401`),
fetchCheckUrl(`${BASE_URL}/admin/diagnostics/http?code=403`),
]);
const respErrorElm = document.getElementById("http-response-errors");
// Check and validate the default API header responses
let apiErrors = checkSecurityHeaders(apiConfig.headers);
if (apiErrors.length >= 1) {
respErrorElm.innerHTML += "<b>API calls:</b><br>";
apiErrors.forEach((errMsg) => {
respErrorElm.innerHTML += `<b>Header:</b> ${errMsg}<br>`;
});
}
// Check the special `-connector.html` headers, these should have some headers omitted.
const omitConnectorHeaders = ["x-frame-options", "content-security-policy"];
let connectorErrors = checkSecurityHeaders(webauthnConnector.headers, omitConnectorHeaders);
omitConnectorHeaders.forEach((header) => {
if (webauthnConnector.headers.get(header) !== null) {
connectorErrors.push(`'${header}' is present while it should not`);
}
});
if (connectorErrors.length >= 1) {
respErrorElm.innerHTML += "<b>2FA Connector calls:</b><br>";
connectorErrors.forEach((errMsg) => {
respErrorElm.innerHTML += `<b>Header:</b> ${errMsg}<br>`;
});
}
// Check specific error code responses if they are not re-written by a reverse proxy
let responseErrors = [];
if (notFound.status !== 404 || notFound.text.indexOf("return to the web-vault") === -1) {
responseErrors.push("404 (Not Found) HTML is invalid");
}
if (notFoundApi.status !== 404 || notFoundApi.text.indexOf("\"message\":\"Testing error 404 response\",") === -1) {
responseErrors.push("404 (Not Found) JSON is invalid");
}
if (badRequest.status !== 400 || badRequest.text.indexOf("\"message\":\"Testing error 400 response\",") === -1) {
responseErrors.push("400 (Bad Request) is invalid");
}
if (unauthorized.status !== 401 || unauthorized.text.indexOf("\"message\":\"Testing error 401 response\",") === -1) {
responseErrors.push("401 (Unauthorized) is invalid");
}
if (forbidden.status !== 403 || forbidden.text.indexOf("\"message\":\"Testing error 403 response\",") === -1) {
responseErrors.push("403 (Forbidden) is invalid");
}
if (responseErrors.length >= 1) {
respErrorElm.innerHTML += "<b>HTTP error responses:</b><br>";
responseErrors.forEach((errMsg) => {
respErrorElm.innerHTML += `<b>Response to:</b> ${errMsg}<br>`;
});
}
if (responseErrors.length >= 1 || connectorErrors.length >= 1 || apiErrors.length >= 1) {
document.getElementById("http-response-warning").classList.remove("d-none");
} else {
httpResponseCheck = true;
document.getElementById("http-response-success").classList.remove("d-none");
}
}
async function fetchWsUrl(wsUrl) {
return new Promise((resolve, reject) => {
try {
const ws = new WebSocket(wsUrl);
ws.onopen = () => {
ws.close();
resolve(true);
};
ws.onerror = () => {
reject(false);
};
} catch (_) {
reject(false);
}
});
}
async function checkWebsocketConnection() {
// Test Websocket connections via the anonymous (login with device) connection
const isConnected = await fetchWsUrl(`${BASE_URL}/notifications/anonymous-hub?token=admin-diagnostics`).catch(() => false);
if (isConnected) {
websocketCheck = true;
document.getElementById("websocket-success").classList.remove("d-none");
} else {
document.getElementById("websocket-error").classList.remove("d-none");
}
}
function init(dj) { function init(dj) {
// Time check // Time check
document.getElementById("time-browser-string").textContent = browserUTC; document.getElementById("time-browser-string").textContent = browserUTC;
@ -225,6 +401,12 @@ function init(dj) {
// DNS Check // DNS Check
checkDns(dj.dns_resolved); checkDns(dj.dns_resolved);
checkHttpResponse();
if (dj.enable_websocket) {
checkWebsocketConnection();
}
} }
// onLoad events // onLoad events

@ -4,10 +4,10 @@
* *
* To rebuild or modify this file with the latest versions of the included * To rebuild or modify this file with the latest versions of the included
* software please visit: * software please visit:
* https://datatables.net/download/#bs5/dt-2.0.8 * https://datatables.net/download/#bs5/dt-2.1.8
* *
* Included libraries: * Included libraries:
* DataTables 2.0.8 * DataTables 2.1.8
*/ */
@charset "UTF-8"; @charset "UTF-8";
@ -45,15 +45,21 @@ table.dataTable tr.dt-hasChild td.dt-control:before {
} }
html.dark table.dataTable td.dt-control:before, html.dark table.dataTable td.dt-control:before,
:root[data-bs-theme=dark] table.dataTable td.dt-control:before { :root[data-bs-theme=dark] table.dataTable td.dt-control:before,
:root[data-theme=dark] table.dataTable td.dt-control:before {
border-left-color: rgba(255, 255, 255, 0.5); border-left-color: rgba(255, 255, 255, 0.5);
} }
html.dark table.dataTable tr.dt-hasChild td.dt-control:before, html.dark table.dataTable tr.dt-hasChild td.dt-control:before,
:root[data-bs-theme=dark] table.dataTable tr.dt-hasChild td.dt-control:before { :root[data-bs-theme=dark] table.dataTable tr.dt-hasChild td.dt-control:before,
:root[data-theme=dark] table.dataTable tr.dt-hasChild td.dt-control:before {
border-top-color: rgba(255, 255, 255, 0.5); border-top-color: rgba(255, 255, 255, 0.5);
border-left-color: transparent; border-left-color: transparent;
} }
div.dt-scroll {
width: 100%;
}
div.dt-scroll-body thead tr, div.dt-scroll-body thead tr,
div.dt-scroll-body tfoot tr { div.dt-scroll-body tfoot tr {
height: 0; height: 0;
@ -377,6 +383,31 @@ table.table.dataTable.table-hover > tbody > tr.selected:hover > * {
box-shadow: inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.975); box-shadow: inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.975);
} }
div.dt-container div.dt-layout-start > *:not(:last-child) {
margin-right: 1em;
}
div.dt-container div.dt-layout-end > *:not(:first-child) {
margin-left: 1em;
}
div.dt-container div.dt-layout-full {
width: 100%;
}
div.dt-container div.dt-layout-full > *:only-child {
margin-left: auto;
margin-right: auto;
}
div.dt-container div.dt-layout-table > div {
display: block !important;
}
@media screen and (max-width: 767px) {
div.dt-container div.dt-layout-start > *:not(:last-child) {
margin-right: 0;
}
div.dt-container div.dt-layout-end > *:not(:first-child) {
margin-left: 0;
}
}
div.dt-container div.dt-length label { div.dt-container div.dt-length label {
font-weight: normal; font-weight: normal;
text-align: left; text-align: left;
@ -400,9 +431,6 @@ div.dt-container div.dt-search input {
display: inline-block; display: inline-block;
width: auto; width: auto;
} }
div.dt-container div.dt-info {
padding-top: 0.85em;
}
div.dt-container div.dt-paging { div.dt-container div.dt-paging {
margin: 0; margin: 0;
} }

File diff suppressed because it is too large Load Diff

@ -132,6 +132,21 @@
<span class="d-block" title="We have direct internet access, no outgoing proxy configured."><b>No</b></span> <span class="d-block" title="We have direct internet access, no outgoing proxy configured."><b>No</b></span>
{{/unless}} {{/unless}}
</dd> </dd>
<dt class="col-sm-5">Websocket enabled
{{#if page_data.enable_websocket}}
<span class="badge bg-success d-none" id="websocket-success" title="Websocket connection is working.">Ok</span>
<span class="badge bg-danger d-none" id="websocket-error" title="Websocket connection error, validate your reverse proxy configuration!">Error</span>
{{/if}}
</dt>
<dd class="col-sm-7">
{{#if page_data.enable_websocket}}
<span class="d-block" title="Websocket connections are enabled (ENABLE_WEBSOCKET is true)."><b>Yes</b></span>
{{/if}}
{{#unless page_data.enable_websocket}}
<span class="d-block" title="Websocket connections are disabled (ENABLE_WEBSOCKET is false)."><b>No</b></span>
{{/unless}}
</dd>
<dt class="col-sm-5">DNS (github.com) <dt class="col-sm-5">DNS (github.com)
<span class="badge bg-success d-none" id="dns-success" title="DNS Resolving works!">Ok</span> <span class="badge bg-success d-none" id="dns-success" title="DNS Resolving works!">Ok</span>
<span class="badge bg-danger d-none" id="dns-warning" title="DNS Resolving failed. Please fix.">Error</span> <span class="badge bg-danger d-none" id="dns-warning" title="DNS Resolving failed. Please fix.">Error</span>
@ -167,6 +182,14 @@
<span id="domain-server" class="d-block"><b>Server:</b> <span id="domain-server-string">{{page_data.admin_url}}</span></span> <span id="domain-server" class="d-block"><b>Server:</b> <span id="domain-server-string">{{page_data.admin_url}}</span></span>
<span id="domain-browser" class="d-block"><b>Browser:</b> <span id="domain-browser-string"></span></span> <span id="domain-browser" class="d-block"><b>Browser:</b> <span id="domain-browser-string"></span></span>
</dd> </dd>
<dt class="col-sm-5">HTTP Response validation
<span class="badge bg-success d-none" id="http-response-success" title="All headers and HTTP request responses seem to be ok.">Ok</span>
<span class="badge bg-danger d-none" id="http-response-warning" title="Some headers or HTTP request responses return invalid data!">Error</span>
</dt>
<dd class="col-sm-7">
<span id="http-response-errors" class="d-block"></span>
</dd>
</dl> </dl>
</div> </div>
</div> </div>

@ -19,7 +19,7 @@
<tr> <tr>
<td> <td>
<svg width="48" height="48" class="float-start me-2 rounded" data-jdenticon-value="{{email}}"> <svg width="48" height="48" class="float-start me-2 rounded" data-jdenticon-value="{{email}}">
<div class="float-start"> <div>
<strong>{{name}}</strong> <strong>{{name}}</strong>
<span class="d-block">{{email}}</span> <span class="d-block">{{email}}</span>
<span class="d-block"> <span class="d-block">

@ -51,9 +51,11 @@ impl Fairing for AppHeaders {
} }
} }
// NOTE: When modifying or adding security headers be sure to also update the diagnostic checks in `src/static/scripts/admin_diagnostics.js` in `checkSecurityHeaders`
res.set_raw_header("Permissions-Policy", "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), payment=(), picture-in-picture=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=()"); res.set_raw_header("Permissions-Policy", "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), payment=(), picture-in-picture=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=()");
res.set_raw_header("Referrer-Policy", "same-origin"); res.set_raw_header("Referrer-Policy", "same-origin");
res.set_raw_header("X-Content-Type-Options", "nosniff"); res.set_raw_header("X-Content-Type-Options", "nosniff");
res.set_raw_header("X-Robots-Tag", "noindex, nofollow");
// Obsolete in modern browsers, unsafe (XS-Leak), and largely replaced by CSP // Obsolete in modern browsers, unsafe (XS-Leak), and largely replaced by CSP
res.set_raw_header("X-XSS-Protection", "0"); res.set_raw_header("X-XSS-Protection", "0");
@ -96,10 +98,11 @@ impl Fairing for AppHeaders {
https://app.addy.io/api/ \ https://app.addy.io/api/ \
https://api.fastmail.com/ \ https://api.fastmail.com/ \
https://api.forwardemail.net \ https://api.forwardemail.net \
;\ {allowed_connect_src};\
", ",
icon_service_csp = CONFIG._icon_service_csp(), icon_service_csp = CONFIG._icon_service_csp(),
allowed_iframe_ancestors = CONFIG.allowed_iframe_ancestors() allowed_iframe_ancestors = CONFIG.allowed_iframe_ancestors(),
allowed_connect_src = CONFIG.allowed_connect_src(),
); );
res.set_raw_header("Content-Security-Policy", csp); res.set_raw_header("Content-Security-Policy", csp);
res.set_raw_header("X-Frame-Options", "SAMEORIGIN"); res.set_raw_header("X-Frame-Options", "SAMEORIGIN");

Loading…
Cancel
Save