From c61afd966f3793a6bb0ca056d191e93d7d474750 Mon Sep 17 00:00:00 2001 From: Timshel Date: Thu, 12 Sep 2024 15:18:39 +0200 Subject: [PATCH] Improvements and error handling --- .env.template | 24 + Cargo.lock | 663 +++++++++++++++++- Cargo.toml | 3 +- SSO.md | 286 ++++++++ docker/start.sh | 7 + .../mysql/2023-02-01-133000_add_sso/up.sql | 3 - .../down.sql | 0 .../mysql/2023-09-10-133000_add_sso/up.sql | 4 + .../down.sql | 1 + .../up.sql | 1 + .../down.sql | 6 + .../up.sql | 8 + .../down.sql | 8 + .../up.sql | 9 + .../2024-03-06-170000_add_sso_users/down.sql | 1 + .../2024-03-06-170000_add_sso_users/up.sql | 7 + .../down.sql | 0 .../up.sql | 2 + .../2023-02-01-133000_add_sso/up.sql | 3 - .../down.sql | 0 .../2023-09-10-133000_add_sso/up.sql | 4 + .../down.sql | 1 + .../up.sql | 1 + .../down.sql | 6 + .../up.sql | 8 + .../down.sql | 8 + .../up.sql | 9 + .../2024-03-06-170000_add_sso_users/down.sql | 1 + .../2024-03-06-170000_add_sso_users/up.sql | 7 + .../down.sql | 0 .../up.sql | 3 + .../sqlite/2023-02-01-133000_add_sso/up.sql | 3 - .../down.sql | 0 .../sqlite/2023-09-10-133000_add_sso/up.sql | 4 + .../down.sql | 1 + .../up.sql | 1 + .../down.sql | 6 + .../up.sql | 8 + .../down.sql | 8 + .../up.sql | 9 + .../2024-03-06-170000_add_sso_users/down.sql | 1 + .../2024-03-06-170000_add_sso_users/up.sql | 7 + .../down.sql | 0 .../2024-03-13_170000_sso_userscascade/up.sql | 9 + src/api/admin.rs | 2 +- src/api/core/accounts.rs | 40 +- src/api/core/emergency_access.rs | 2 +- src/api/core/organizations.rs | 56 +- src/api/core/public.rs | 14 +- src/api/identity.rs | 607 +++++++--------- src/api/mod.rs | 2 +- src/auth.rs | 267 ++++++- src/config.rs | 171 ++++- src/db/models/device.rs | 65 +- src/db/models/mod.rs | 2 +- src/db/models/org_policy.rs | 5 +- src/db/models/organization.rs | 23 +- src/db/models/sso_nonce.rs | 47 +- src/db/models/user.rs | 62 +- src/db/schemas/mysql/schema.rs | 16 +- src/db/schemas/postgresql/schema.rs | 16 +- src/db/schemas/sqlite/schema.rs | 16 +- src/error.rs | 4 + src/mail.rs | 18 +- src/main.rs | 8 + src/sso.rs | 544 ++++++++++++++ .../templates/email/sso_change_email.hbs | 4 + .../templates/email/sso_change_email.html.hbs | 11 + src/util.rs | 63 +- 69 files changed, 2604 insertions(+), 602 deletions(-) create mode 100644 SSO.md delete mode 100644 migrations/mysql/2023-02-01-133000_add_sso/up.sql rename migrations/mysql/{2023-02-01-133000_add_sso => 2023-09-10-133000_add_sso}/down.sql (100%) create mode 100644 migrations/mysql/2023-09-10-133000_add_sso/up.sql create mode 100644 migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql create mode 100644 migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql create mode 100644 migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql create mode 100644 migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql create mode 100644 migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql create mode 100644 migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql create mode 100644 migrations/mysql/2024-03-06-170000_add_sso_users/down.sql create mode 100644 migrations/mysql/2024-03-06-170000_add_sso_users/up.sql create mode 100644 migrations/mysql/2024-03-13-170000_sso_users_cascade/down.sql create mode 100644 migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql delete mode 100644 migrations/postgresql/2023-02-01-133000_add_sso/up.sql rename migrations/postgresql/{2023-02-01-133000_add_sso => 2023-09-10-133000_add_sso}/down.sql (100%) create mode 100644 migrations/postgresql/2023-09-10-133000_add_sso/up.sql create mode 100644 migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql create mode 100644 migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql create mode 100644 migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/down.sql create mode 100644 migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/up.sql create mode 100644 migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql create mode 100644 migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql create mode 100644 migrations/postgresql/2024-03-06-170000_add_sso_users/down.sql create mode 100644 migrations/postgresql/2024-03-06-170000_add_sso_users/up.sql create mode 100644 migrations/postgresql/2024-03-13-170000_sso_users_cascade/down.sql create mode 100644 migrations/postgresql/2024-03-13-170000_sso_users_cascade/up.sql delete mode 100644 migrations/sqlite/2023-02-01-133000_add_sso/up.sql rename migrations/sqlite/{2023-02-01-133000_add_sso => 2023-09-10-133000_add_sso}/down.sql (100%) create mode 100644 migrations/sqlite/2023-09-10-133000_add_sso/up.sql create mode 100644 migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql create mode 100644 migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql create mode 100644 migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/down.sql create mode 100644 migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/up.sql create mode 100644 migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql create mode 100644 migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql create mode 100644 migrations/sqlite/2024-03-06-170000_add_sso_users/down.sql create mode 100644 migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql create mode 100644 migrations/sqlite/2024-03-13_170000_sso_userscascade/down.sql create mode 100644 migrations/sqlite/2024-03-13_170000_sso_userscascade/up.sql create mode 100644 src/sso.rs create mode 100644 src/static/templates/email/sso_change_email.hbs create mode 100644 src/static/templates/email/sso_change_email.html.hbs diff --git a/.env.template b/.env.template index 37d762ca..f7e77b88 100644 --- a/.env.template +++ b/.env.template @@ -161,6 +161,10 @@ ## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. ## Defaults to every minute. Set blank to disable this job. # DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *" +# +## Cron schedule of the job that cleans sso nonce from incomplete flow +## Defaults to daily (20 minutes after midnight). Set blank to disable this job. +# PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *" ######################## ### General settings ### @@ -440,11 +444,31 @@ # SSO_ENABLED=false ## Prevent users from logging in directly without going through SSO # SSO_ONLY=false +## On SSO Signup if a user with a matching email already exists make the association +# SSO_SIGNUPS_MATCH_EMAIL=true ## Base URL of the OIDC server (auto-discovery is used) +## - Should not include the `/.well-known/openid-configuration` part and no trailing `/` +## - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse # SSO_AUTHORITY=https://auth.example.com +## Authorization request scopes. Optional SSO scopes, override if email and profile are not enough (`openid` is implicit). +#SSO_SCOPES="email profile" +## Additionnal authorization url parameters (ex: to obtain a `refresh_token` with Google Auth). +# SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent" +## Activate PKCE for the Auth Code flow. Recommended but disabled for now waiting for feedback on support. +# SSO_PKCE=false +## Regex to add additionnal trusted audience to Id Token (by default only the client_id is trusted). +# SSO_AUDIENCE_TRUSTED='^$' ## Set your Client ID and Client Key # SSO_CLIENT_ID=11111 # SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA +## Optional Master password policy (minComplexity=[0-4]) +# SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}' +## Use sso only for authentication not the session lifecycle +# SSO_AUTH_ONLY_NOT_SESSION=false +## Client cache for discovery endpoint. Duration in seconds (0 to disable). +# SSO_CLIENT_CACHE_EXPIRATION=0 +## Log all the tokens, LOG_LEVEL=debug is required +# SSO_DEBUG_TOKENS=false ######################## ### MFA/2FA settings ### diff --git a/Cargo.lock b/Cargo.lock index ae469d23..20ff0c72 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -55,9 +55,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.19" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "611cc2ae7d2e242c457e4be7f97036b8ad9ca152b499f53faf99b1ed8fc2553f" +checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" [[package]] name = "android-tzdata" @@ -326,6 +326,12 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + [[package]] name = "base64" version = "0.13.1" @@ -439,6 +445,12 @@ version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +[[package]] +name = "bytecount" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" + [[package]] name = "bytemuck" version = "1.19.0" @@ -493,6 +505,37 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" +[[package]] +name = "camino" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", +] + [[package]] name = "cc" version = "1.1.37" @@ -516,8 +559,10 @@ checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", + "js-sys", "num-traits", "serde", + "wasm-bindgen", "windows-targets 0.52.6", ] @@ -561,6 +606,12 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + [[package]] name = "cookie" version = "0.18.1" @@ -635,12 +686,33 @@ dependencies = [ "once_cell", ] +[[package]] +name = "crossbeam-channel" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + [[package]] name = "crypto-common" version = "0.1.6" @@ -651,6 +723,33 @@ dependencies = [ "typenum", ] +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "darling" version = "0.20.10" @@ -686,6 +785,19 @@ dependencies = [ "syn", ] +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "dashmap" version = "6.1.0" @@ -712,6 +824,17 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + [[package]] name = "deranged" version = "0.3.11" @@ -719,6 +842,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", + "serde", ] [[package]] @@ -828,6 +952,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] @@ -872,12 +997,77 @@ dependencies = [ "syn", ] +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest", + "elliptic-curve", + "rfc6979", + "signature", + "spki", +] + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8", + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +dependencies = [ + "curve25519-dalek", + "ed25519", + "serde", + "sha2", + "subtle", + "zeroize", +] + [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest", + "ff", + "generic-array", + "group", + "hkdf", + "pem-rfc7468", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + [[package]] name = "email-encoding" version = "0.3.0" @@ -988,6 +1178,22 @@ dependencies = [ "syslog", ] +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + [[package]] name = "figment" version = "0.10.19" @@ -1171,6 +1377,7 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", + "zeroize", ] [[package]] @@ -1217,7 +1424,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0746aa765db78b521451ef74221663b57ba595bf83f75d0ce23cc09447c8139f" dependencies = [ "cfg-if", - "dashmap", + "dashmap 6.1.0", "futures-sink", "futures-timer", "futures-util", @@ -1231,6 +1438,17 @@ dependencies = [ "spinning_top", ] +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + [[package]] name = "h2" version = "0.3.26" @@ -1243,7 +1461,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap", + "indexmap 2.6.0", "slab", "tokio", "tokio-util", @@ -1262,7 +1480,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.1.0", - "indexmap", + "indexmap 2.6.0", "slab", "tokio", "tokio-util", @@ -1291,6 +1509,12 @@ dependencies = [ "walkdir", ] +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hashbrown" version = "0.14.5" @@ -1325,6 +1549,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + [[package]] name = "hickory-proto" version = "0.24.1" @@ -1370,6 +1600,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + [[package]] name = "hmac" version = "0.12.1" @@ -1531,6 +1770,20 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.31", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.3" @@ -1784,6 +2037,17 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + [[package]] name = "indexmap" version = "2.6.0" @@ -1830,6 +2094,15 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.11" @@ -1891,6 +2164,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "lettre" @@ -2080,6 +2356,21 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "mini-moka" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c325dfab65f261f386debee8b0969da215b3fa0037e74c8a1234db7ba986d803" +dependencies = [ + "crossbeam-channel", + "crossbeam-utils", + "dashmap 5.5.3", + "skeptic", + "smallvec", + "tagptr", + "triomphe", +] + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -2195,6 +2486,23 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand", + "smallvec", + "zeroize", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -2221,6 +2529,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-modular" version = "0.6.1" @@ -2243,6 +2562,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -2264,6 +2584,26 @@ dependencies = [ "libc", ] +[[package]] +name = "oauth2" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c38841cdd844847e3e7c8d29cef9dcfed8877f8f56f9071f77843ecf3baf937f" +dependencies = [ + "base64 0.13.1", + "chrono", + "getrandom", + "http 0.2.12", + "rand", + "reqwest 0.11.27", + "serde", + "serde_json", + "serde_path_to_error", + "sha2", + "thiserror", + "url", +] + [[package]] name = "object" version = "0.36.5" @@ -2279,6 +2619,38 @@ version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +[[package]] +name = "openidconnect" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f47e80a9cfae4462dd29c41e987edd228971d6565553fbc14b8a11e666d91590" +dependencies = [ + "base64 0.13.1", + "chrono", + "dyn-clone", + "ed25519-dalek", + "hmac", + "http 0.2.12", + "itertools", + "log", + "oauth2", + "p256", + "p384", + "rand", + "rsa", + "serde", + "serde-value", + "serde_derive", + "serde_json", + "serde_path_to_error", + "serde_plain", + "serde_with", + "sha2", + "subtle", + "thiserror", + "url", +] + [[package]] name = "openssl" version = "0.10.68" @@ -2333,12 +2705,45 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + [[package]] name = "overload" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + +[[package]] +name = "p384" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70786f51bcc69f6a4c0360e063a4cac5419ef7c5cd5b3c99ad70f3be5ba79209" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + [[package]] name = "parking" version = "2.2.1" @@ -2427,6 +2832,15 @@ dependencies = [ "serde", ] +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -2545,6 +2959,27 @@ dependencies = [ "futures-io", ] +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + [[package]] name = "pkg-config" version = "0.3.31" @@ -2596,6 +3031,15 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + [[package]] name = "proc-macro2" version = "1.0.89" @@ -2643,6 +3087,17 @@ dependencies = [ "psl-types", ] +[[package]] +name = "pulldown-cmark" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" +dependencies = [ + "bitflags 2.6.0", + "memchr", + "unicase", +] + [[package]] name = "quanta" version = "0.12.3" @@ -2766,7 +3221,7 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] @@ -2781,9 +3236,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -2828,6 +3283,7 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.31", + "hyper-rustls 0.24.2", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -2837,6 +3293,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls 0.21.12", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -2845,11 +3302,13 @@ dependencies = [ "system-configuration 0.5.1", "tokio", "tokio-native-tls", + "tokio-rustls 0.24.1", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", + "webpki-roots", "winreg", ] @@ -2872,7 +3331,7 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.5.0", - "hyper-rustls", + "hyper-rustls 0.27.3", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -2912,6 +3371,16 @@ dependencies = [ "quick-error", ] +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + [[package]] name = "ring" version = "0.17.8" @@ -2962,7 +3431,7 @@ dependencies = [ "either", "figment", "futures", - "indexmap", + "indexmap 2.6.0", "log", "memchr", "multer", @@ -2994,7 +3463,7 @@ checksum = "575d32d7ec1a9770108c879fc7c47815a80073f96ca07ff9525a94fcede1dd46" dependencies = [ "devise", "glob", - "indexmap", + "indexmap 2.6.0", "proc-macro2", "quote", "rocket_http", @@ -3014,7 +3483,7 @@ dependencies = [ "futures", "http 0.2.12", "hyper 0.14.31", - "indexmap", + "indexmap 2.6.0", "log", "memchr", "pear", @@ -3054,6 +3523,26 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "rsa" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core", + "signature", + "spki", + "subtle", + "zeroize", +] + [[package]] name = "rtoolbox" version = "0.0.2" @@ -3070,6 +3559,15 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + [[package]] name = "rustix" version = "0.38.40" @@ -3214,6 +3712,20 @@ dependencies = [ "untrusted", ] +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + [[package]] name = "security-framework" version = "2.11.1" @@ -3242,6 +3754,9 @@ name = "semver" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +dependencies = [ + "serde", +] [[package]] name = "serde" @@ -3252,6 +3767,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + [[package]] name = "serde_cbor" version = "0.11.2" @@ -3285,6 +3810,25 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_plain" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1fc6db65a611022b23a0dec6975d63fb80a302cb3388835ff02c097258d50" +dependencies = [ + "serde", +] + [[package]] name = "serde_spanned" version = "0.6.8" @@ -3306,6 +3850,36 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_with" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.6.0", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "sha1" version = "0.10.6" @@ -3362,6 +3936,16 @@ dependencies = [ "libc", ] +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + [[package]] name = "simple_asn1" version = "0.6.2" @@ -3380,6 +3964,21 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "skeptic" +version = "0.13.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" +dependencies = [ + "bytecount", + "cargo_metadata", + "error-chain", + "glob", + "pulldown-cmark", + "tempfile", + "walkdir", +] + [[package]] name = "slab" version = "0.4.9" @@ -3420,6 +4019,16 @@ dependencies = [ "lock_api", ] +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + [[package]] name = "stable-pattern" version = "0.1.0" @@ -3561,6 +4170,12 @@ dependencies = [ "libc", ] +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + [[package]] name = "tempfile" version = "3.14.0" @@ -3806,7 +4421,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap", + "indexmap 2.6.0", "serde", "serde_spanned", "toml_datetime", @@ -3893,6 +4508,12 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "triomphe" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" + [[package]] name = "try-lock" version = "0.2.5" @@ -3949,6 +4570,12 @@ dependencies = [ "version_check", ] +[[package]] +name = "unicase" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" + [[package]] name = "unicode-bidi" version = "0.3.17" @@ -4045,7 +4672,7 @@ dependencies = [ "chrono-tz", "cookie", "cookie_store", - "dashmap", + "dashmap 6.1.0", "data-encoding", "data-url", "diesel", @@ -4065,9 +4692,11 @@ dependencies = [ "libsqlite3-sys", "log", "mimalloc", + "mini-moka", "num-derive", "num-traits", "once_cell", + "openidconnect", "openssl", "paste", "percent-encoding", @@ -4251,6 +4880,12 @@ dependencies = [ "url", ] +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + [[package]] name = "which" version = "7.0.0" diff --git a/Cargo.toml b/Cargo.toml index 2df213fe..cdc3add9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -150,7 +150,8 @@ paste = "1.0.15" governor = "0.7.0" # OIDC for SSO -openidconnect = "3.4.0" +openidconnect = "3.5.0" +mini-moka = "0.10.2" # Check client versions for specific features. semver = "1.0.23" diff --git a/SSO.md b/SSO.md new file mode 100644 index 00000000..0ccdc349 --- /dev/null +++ b/SSO.md @@ -0,0 +1,286 @@ +# SSO using OpenId Connect + +To use an external source of authentication your SSO will need to support OpenID Connect : + +- An OpenID Connect Discovery endpoint should be available +- Client authentication will be done using Id and Secret. + +A master password will still be required and not controlled by the SSO (depending on your point of view this might be a feature ;). +This introduces another way to control who can use the vault without having to use invitation or using an LDAP. + +## Configuration + +The following configurations are available + + - `SSO_ENABLED` : Activate the SSO + - `SSO_ONLY` : disable email+Master password authentication + - `SSO_SIGNUPS_MATCH_EMAIL`: On SSO Signup if a user with a matching email already exists make the association (default `true`) + - `SSO_AUTHORITY` : the OpenID Connect Discovery endpoint of your SSO + - Should not include the `/.well-known/openid-configuration` part and no trailing `/` + - $SSO_AUTHORITY/.well-known/openid-configuration should return the a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse + - `SSO_SCOPES` : Optional, allow to override scopes if needed (default `"email profile"`) + - `SSO_AUTHORIZE_EXTRA_PARAMS` : Optional, allow to add extra parameter to the authorize redirection (default `""`) + - `SSO_PKCE`: Activate PKCE for the Auth Code flow. Recommended but disabled for now waiting for feedback on support (default `false`). + - `SSO_AUDIENCE_TRUSTED`: Optional, Regex to trust additional audience for the IdToken (`client_id` is always trusted). Use single quote when writing the regex: `'^$'`. + - `SSO_CLIENT_ID` : Client Id + - `SSO_CLIENT_SECRET` : Client Secret + - `SSO_MASTER_PASSWORD_POLICY`: Optional Master password policy + - `SSO_AUTH_ONLY_NOT_SESSION`: Enable to use SSO only for authentication not session lifecycle + - `SSO_CLIENT_CACHE_EXPIRATION`: Cache calls to the discovery endpoint, duration in seconds, `0` to disable (default `0`); + - `SSO_DEBUG_TOKENS`: Log all tokens (default `false`, `LOG_LEVEL=debug` is required) + +The callback url is : `https://your.domain/identity/connect/oidc-signin` + +## Account and Email handling + +When logging in with SSO an identifier (`{iss}/{sub}` claims from the IdToken) is saved in a separate table (`sso_users`). +This is used to link to the SSO provider identifier without changing the default Vaultwarden user `uuid`. This is needed because: + + - Storing the SSO identifier is important to prevent account takeover due to email change. + - We can't use the identifier as the User uuid since it's way longer (Max 255 chars for the `sub` part, cf [spec](https://openid.net/specs/openid-connect-core-1_0.html#IDToken)). + - We want to be able to associate existing account based on `email` but only when the user logs in for the first time (controlled by `SSO_SIGNUPS_MATCH_EMAIL`). + - We need to be able to associate with existing stub account, such as the one created when inviting a user to an org (association is possible only if the user does not have a private key). + +Additionally: + + - Signup to Vaultwarden will be blocked if the Provider reports the email as `unverified`. + - Changing the email needs to be done by the user since it requires updating the `key`. + On login if the email returned by the provider is not the one saved in Vaultwarden an email will be sent to the user to ask him to update it. + - If set `SIGNUPS_DOMAINS_WHITELIST` is applied on SSO signup and when attempting to change the email. + +This means that if you ever need to change the provider url or the provider itself; you'll have to first delete the association +then ensure that `SSO_SIGNUPS_MATCH_EMAIL` is activated to allow a new association. + +To delete the association (this has no impact on the `Vaultwarden` user): + +```sql +TRUNCATE TABLE sso_users; +``` + +## Client Cache + +By default the client cache is disabled since it can cause issues with the signing keys. +\ +This means that the discovery endpoint will be called again each time we need to interact with the provider (generating authorize_url, exchange the authorize code, refresh tokens). +This is suboptimal so the `SSO_CLIENT_CACHE_EXPIRATION` allows you to configure an expiration that should work for your provider. + +As a protection against a misconfigured expiration if the validation of the `IdToken` fails then the client cache is invalidated (but you'll periodically have an unlucky user ^^). + +### Google example (Rolling keys) + +If we take Google as an example checking the discovery [endpoint](https://accounts.google.com/.well-known/openid-configuration) response headers we can see that the `max-age` of the cache control is set to `3600` seconds. And the [jwk_uri](https://www.googleapis.com/oauth2/v3/certs) response headers usually contain a `max-age` with an even bigger value. +/ +Combined with user [feedback](https://github.com/ramosbugs/openidconnect-rs/issues/152) we can conclude that Google will roll the signing keys each week. + +Setting the cache expiration too high has diminishing return but using something like `600` (10 min) should provide plenty benefits. + +### Rolling keys manually + +If you want to roll the used key, first add a new one but do not immediately start signing with it. +Wait for the delay you configured in `SSO_CLIENT_CACHE_EXPIRATION` then you can start signing with it. + +As mentioned in the Google example setting too high of a value has diminishing return even if you do not plan to roll the keys. + +## Keycloak + +Default access token lifetime might be only `5min`, set a longer value otherwise it will collide with `VaultWarden` front-end expiration detection which is also set at `5min`. +\ +At the realm level + +- `Realm settings / Tokens / Access Token Lifespan` to at least `10min` (`accessTokenLifespan` setting when using `kcadm.sh`). +- `Realm settings / Sessions / SSO Session Idle/Max` for the Refresh token lifetime + +Or for a specific client in `Clients / Client details / Advanced / Advanced settings` you can find `Access Token Lifespan` and `Client Session Idle/Max`. + +Server configuration, nothing specific just set: + +- `SSO_AUTHORITY=https://${domain}/realms/${realm_name}` +- `SSO_CLIENT_ID` +- `SSO_CLIENT_SECRET` +- `SSO_PKCE=true` + +## Auth0 + +Not working due to the following issue https://github.com/ramosbugs/openidconnect-rs/issues/23 (they appear not to follow the spec). +A feature flag is available to bypass the issue but since it's a compile time feature you will have to patch `Vaultwarden` with something like: + +```patch +diff --git a/Cargo.toml b/Cargo.toml +index 0524a7be..9999e852 100644 +--- a/Cargo.toml ++++ b/Cargo.toml +@@ -150,7 +150,7 @@ paste = "1.0.15" + governor = "0.6.3" + + # OIDC for SSO +-openidconnect = "3.5.0" ++openidconnect = { version = "3.5.0", features = ["accept-rfc3339-timestamps"] } + mini-moka = "0.10.2" +``` + +There is no plan at the moment to either always activate the feature nor make a specific distribution for Auth0. + +## Authelia + +To obtain a `refresh_token` to be able to extend session you'll need to add the `offline_access` scope. + +Config will look like: + +- `SSO_SCOPES="email profile offline_access"` + + +## Authentik + +Default access token lifetime might be only `5min`, set a longer value otherwise it will collide with `VaultWarden` front-end expiration detection which is also set at `5min`. +\ +To change the tokens expiration go to `Applications / Providers / Edit / Advanced protocol settings`. + +Starting with `2024.2` version you will need to add the `offline_access` scope and ensure it's selected in `Applications / Providers / Edit / Advanced protocol settings / Scopes` ([Doc](https://docs.goauthentik.io/docs/providers/oauth2/#authorization_code)). + +Server configuration should look like: + +- `SSO_AUTHORITY=https://${domain}/application/o/${application_name}/` : trailing `/` is important +- `SSO_SCOPES="email profile offline_access"` +- `SSO_CLIENT_ID` +- `SSO_CLIENT_SECRET` +- `SSO_PKCE=true` + +## Casdoor + +Since version [v1.639.0](https://github.com/casdoor/casdoor/releases/tag/v1.639.0) should work (Tested with version [v1.686.0](https://github.com/casdoor/casdoor/releases/tag/v1.686.0)). +When creating the application you will need to select the `Token format -> JWT-Standard`. + +Then configure your server with: + +- `SSO_AUTHORITY=https://${provider_host}` +- `SSO_CLIENT_ID` +- `SSO_CLIENT_SECRET` +- `SSO_PKCE=true` + +## GitLab + +Create an application in your Gitlab Settings with + +- `redirectURI`: https://your.domain/identity/connect/oidc-signin +- `Confidential`: `true` +- `scopes`: `openid`, `profile`, `email` + +Then configure your server with + +- `SSO_AUTHORITY=https://gitlab.com` +- `SSO_CLIENT_ID` +- `SSO_CLIENT_SECRET` +- `SSO_PKCE=true` + +## Google Auth + +Google [Documentation](https://developers.google.com/identity/openid-connect/openid-connect). +\ +By default without extra [configuration](https://developers.google.com/identity/protocols/oauth2/web-server#creatingclient) you won´t have a `refresh_token` and session will be limited to 1h. + +Configure your server with : + +- `SSO_AUTHORITY=https://accounts.google.com` +- `SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent"` +- `SSO_PKCE=true` +- `SSO_CLIENT_ID` +- `SSO_CLIENT_SECRET` + +## Kanidm + +Kanidm recommend always running with PKCE: + +Config will look like: + +- `SSO_PKCE=true` + +Otherwise you can disable the PKCE requirement with: `kanidm system oauth2 warning-insecure-client-disable-pkce CLIENT_NAME --name admin`. + +## Microsoft Entra ID + +1. Create an "App registration" in [Entra ID](https://entra.microsoft.com/) following [Identity | Applications | App registrations](https://entra.microsoft.com/#blade/Microsoft_AAD_RegisteredApps/ApplicationsListBlade/quickStartType//sourceType/Microsoft_AAD_IAM). +2. From the "Overview" of your "App registration", you'll need the "Directory (tenant) ID" for the `SSO_AUTHORITY` variable and the "Application (client) ID" as the `SSO_CLIENT_ID` value. +3. In "Certificates & Secrets" create an "App secret" , you'll need the "Secret Value" for the `SSO_CLIENT_SECRET` variable. +4. In "Authentication" add as "Web Redirect URI". +5. In "API Permissions" make sure you have `profile`, `email` and `offline_access` listed under "API / Permission name" (`offline_access` is required, otherwise no refresh_token is returned, see ). + +Only the v2 endpoint is compliant with the OpenID spec, see and . + +Your configuration should look like this: + +* `SSO_AUTHORITY=https://login.microsoftonline.com/${Directory (tenant) ID}/v2.0` +* `SSO_SCOPES="email profile offline_access"` +* `SSO_CLIENT_ID=${Application (client) ID}` +* `SSO_CLIENT_SECRET=${Secret Value}` + +## Zitadel + +To obtain a `refresh_token` to be able to extend session you'll need to add the `offline_access` scope. + +Additionally Zitadel include the `Project id` and the `Client Id` in the audience of the Id Token. +For the validation to work you will need to add the `Resource Id` as a trusted audience (`Client Id` is trusted by default). +You can control the trusted audience with the config `SSO_AUDIENCE_TRUSTED` + +It appears it's not possible to use PKCE with confidential client so it needs to be disabled. + +Config will look like: + +- `SSO_AUTHORITY=https://${provider_host}` +- `SSO_SCOPES="email profile offline_access"` +- `SSO_CLIENT_ID` +- `SSO_CLIENT_SECRET` +- `SSO_AUDIENCE_TRUSTED='^${Project Id}$'` +- `SSO_PKCE=false` + +## Session lifetime + +Session lifetime is dependant on refresh token and access token returned after calling your SSO token endpoint (grant type `authorization_code`). +If no refresh token is returned then the session will be limited to the access token lifetime. + +Tokens are not persisted in VaultWarden but wrapped in JWT tokens and returned to the application (The `refresh_token` and `access_token` values returned by VW `identity/connect/token` endpoint). +Note that VaultWarden will always return a `refresh_token` for compatibility reasons with the web front and it presence does not indicate that a refresh token was returned by your SSO (But you can decode its value with and then check if the `token` field contain anything). + +With a refresh token present, activity in the application will trigger a refresh of the access token when it's close to expiration ([5min](https://github.com/bitwarden/clients/blob/0bcb45ed5caa990abaff735553a5046e85250f24/libs/common/src/auth/services/token.service.ts#L126) in web client). + +Additionally for certain action a token check is performed, if we have a refresh token we will perform a refresh otherwise we'll call the user information endpoint to check the access token validity. + +### Disabling SSO session handling + +If you are unable to obtain a `refresh_token` or for any other reason you can disable SSO session handling and revert to the default handling. +You'll need to enable `SSO_AUTH_ONLY_NOT_SESSION=true` then access token will be valid for 2h and refresh token will allow for an idle time of 7 days (which can be indefinitely extended). + +### Debug information + +Running with `LOG_LEVEL=debug` you'll be able to see information on token expiration. + +## Desktop Client + +There is some issue to handle redirection from your browser (used for sso login) to the application. + +### Chrome + +Probably not much hope, an [issue](https://github.com/bitwarden/clients/issues/2606) is open on the subject and it appears that both Linux and Windows are not working. + +## Firefox + +On Windows you'll be presented with a prompt the first time you log to confirm which application should be launched (But there is a bug at the moment you might end-up with an empty vault after login atm). + + +On Linux it's a bit more tricky. +First you'll need to add some config in `about:config` : + +```conf +network.protocol-handler.expose.bitwarden=false +network.protocol-handler.external.bitwarden=true +``` + +If you have any doubt you can check `mailto` to see how it's configured. + +The redirection will still not work since it appears that the association to an application can only be done on a link/click. You can trigger it with a dummy page such as: + +```html +data:text/html,Click me to register Bitwarden +``` + +From now on the redirection should now work. +If you need to change the application launched you can now find it in `Settings` by using the search function and entering `application`. diff --git a/docker/start.sh b/docker/start.sh index 4fac4514..1f50883d 100755 --- a/docker/start.sh +++ b/docker/start.sh @@ -26,4 +26,11 @@ elif [ -d /etc/bitwarden_rs.d ]; then done fi +# Toggle the SSO Link +if [ "$SSO_ENABLED" = "true" ]; then + sed -i 's#a\[routerlink="/sso"\]#a\[routerlink="/sso-sed"\]#' /web-vault/app/main.*.css +else + sed -i 's#a\[routerlink="/sso-sed"\]#a\[routerlink="/sso"\]#' /web-vault/app/main.*.css +fi + exec /vaultwarden "${@}" diff --git a/migrations/mysql/2023-02-01-133000_add_sso/up.sql b/migrations/mysql/2023-02-01-133000_add_sso/up.sql deleted file mode 100644 index c10ab5cf..00000000 --- a/migrations/mysql/2023-02-01-133000_add_sso/up.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE sso_nonce ( - nonce CHAR(36) NOT NULL PRIMARY KEY -); diff --git a/migrations/mysql/2023-02-01-133000_add_sso/down.sql b/migrations/mysql/2023-09-10-133000_add_sso/down.sql similarity index 100% rename from migrations/mysql/2023-02-01-133000_add_sso/down.sql rename to migrations/mysql/2023-09-10-133000_add_sso/down.sql diff --git a/migrations/mysql/2023-09-10-133000_add_sso/up.sql b/migrations/mysql/2023-09-10-133000_add_sso/up.sql new file mode 100644 index 00000000..518664df --- /dev/null +++ b/migrations/mysql/2023-09-10-133000_add_sso/up.sql @@ -0,0 +1,4 @@ +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql b/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql new file mode 100644 index 00000000..3a708927 --- /dev/null +++ b/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations DROP COLUMN invited_by_email; diff --git a/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql b/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql new file mode 100644 index 00000000..c94e1131 --- /dev/null +++ b/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; diff --git a/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql b/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql new file mode 100644 index 00000000..bce31222 --- /dev/null +++ b/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql @@ -0,0 +1,6 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql b/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql new file mode 100644 index 00000000..f73aeea9 --- /dev/null +++ b/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state VARCHAR(512) NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql b/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql new file mode 100644 index 00000000..c033f7cb --- /dev/null +++ b/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state VARCHAR(512) NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql b/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql new file mode 100644 index 00000000..42fb0efa --- /dev/null +++ b/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state VARCHAR(512) NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/mysql/2024-03-06-170000_add_sso_users/down.sql b/migrations/mysql/2024-03-06-170000_add_sso_users/down.sql new file mode 100644 index 00000000..f2f92f68 --- /dev/null +++ b/migrations/mysql/2024-03-06-170000_add_sso_users/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS sso_users; diff --git a/migrations/mysql/2024-03-06-170000_add_sso_users/up.sql b/migrations/mysql/2024-03-06-170000_add_sso_users/up.sql new file mode 100644 index 00000000..7809d43e --- /dev/null +++ b/migrations/mysql/2024-03-06-170000_add_sso_users/up.sql @@ -0,0 +1,7 @@ +CREATE TABLE sso_users ( + user_uuid CHAR(36) NOT NULL PRIMARY KEY, + identifier VARCHAR(768) NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT now(), + + FOREIGN KEY(user_uuid) REFERENCES users(uuid) +); diff --git a/migrations/mysql/2024-03-13-170000_sso_users_cascade/down.sql b/migrations/mysql/2024-03-13-170000_sso_users_cascade/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql b/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql new file mode 100644 index 00000000..4e06fe58 --- /dev/null +++ b/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql @@ -0,0 +1,2 @@ +ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`; +ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; diff --git a/migrations/postgresql/2023-02-01-133000_add_sso/up.sql b/migrations/postgresql/2023-02-01-133000_add_sso/up.sql deleted file mode 100644 index 57f976c1..00000000 --- a/migrations/postgresql/2023-02-01-133000_add_sso/up.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE sso_nonce ( - nonce CHAR(36) NOT NULL PRIMARY KEY -); \ No newline at end of file diff --git a/migrations/postgresql/2023-02-01-133000_add_sso/down.sql b/migrations/postgresql/2023-09-10-133000_add_sso/down.sql similarity index 100% rename from migrations/postgresql/2023-02-01-133000_add_sso/down.sql rename to migrations/postgresql/2023-09-10-133000_add_sso/down.sql diff --git a/migrations/postgresql/2023-09-10-133000_add_sso/up.sql b/migrations/postgresql/2023-09-10-133000_add_sso/up.sql new file mode 100644 index 00000000..1321e246 --- /dev/null +++ b/migrations/postgresql/2023-09-10-133000_add_sso/up.sql @@ -0,0 +1,4 @@ +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql b/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql new file mode 100644 index 00000000..3a708927 --- /dev/null +++ b/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations DROP COLUMN invited_by_email; diff --git a/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql b/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql new file mode 100644 index 00000000..c94e1131 --- /dev/null +++ b/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; diff --git a/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/down.sql b/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/down.sql new file mode 100644 index 00000000..7cf4d9d6 --- /dev/null +++ b/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/down.sql @@ -0,0 +1,6 @@ +DROP TABLE sso_nonce; + +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/up.sql b/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/up.sql new file mode 100644 index 00000000..f7402460 --- /dev/null +++ b/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/up.sql @@ -0,0 +1,8 @@ +DROP TABLE sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql b/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql new file mode 100644 index 00000000..ef209a45 --- /dev/null +++ b/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql b/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql new file mode 100644 index 00000000..f2dedfc9 --- /dev/null +++ b/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2024-03-06-170000_add_sso_users/down.sql b/migrations/postgresql/2024-03-06-170000_add_sso_users/down.sql new file mode 100644 index 00000000..f2f92f68 --- /dev/null +++ b/migrations/postgresql/2024-03-06-170000_add_sso_users/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS sso_users; diff --git a/migrations/postgresql/2024-03-06-170000_add_sso_users/up.sql b/migrations/postgresql/2024-03-06-170000_add_sso_users/up.sql new file mode 100644 index 00000000..b74b5728 --- /dev/null +++ b/migrations/postgresql/2024-03-06-170000_add_sso_users/up.sql @@ -0,0 +1,7 @@ +CREATE TABLE sso_users ( + user_uuid CHAR(36) NOT NULL PRIMARY KEY, + identifier TEXT NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT now(), + + FOREIGN KEY(user_uuid) REFERENCES users(uuid) +); diff --git a/migrations/postgresql/2024-03-13-170000_sso_users_cascade/down.sql b/migrations/postgresql/2024-03-13-170000_sso_users_cascade/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/postgresql/2024-03-13-170000_sso_users_cascade/up.sql b/migrations/postgresql/2024-03-13-170000_sso_users_cascade/up.sql new file mode 100644 index 00000000..38f97b4d --- /dev/null +++ b/migrations/postgresql/2024-03-13-170000_sso_users_cascade/up.sql @@ -0,0 +1,3 @@ +ALTER TABLE sso_users + DROP CONSTRAINT "sso_users_user_uuid_fkey", + ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; diff --git a/migrations/sqlite/2023-02-01-133000_add_sso/up.sql b/migrations/sqlite/2023-02-01-133000_add_sso/up.sql deleted file mode 100644 index c10ab5cf..00000000 --- a/migrations/sqlite/2023-02-01-133000_add_sso/up.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE sso_nonce ( - nonce CHAR(36) NOT NULL PRIMARY KEY -); diff --git a/migrations/sqlite/2023-02-01-133000_add_sso/down.sql b/migrations/sqlite/2023-09-10-133000_add_sso/down.sql similarity index 100% rename from migrations/sqlite/2023-02-01-133000_add_sso/down.sql rename to migrations/sqlite/2023-09-10-133000_add_sso/down.sql diff --git a/migrations/sqlite/2023-09-10-133000_add_sso/up.sql b/migrations/sqlite/2023-09-10-133000_add_sso/up.sql new file mode 100644 index 00000000..518664df --- /dev/null +++ b/migrations/sqlite/2023-09-10-133000_add_sso/up.sql @@ -0,0 +1,4 @@ +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql b/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql new file mode 100644 index 00000000..3a708927 --- /dev/null +++ b/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations DROP COLUMN invited_by_email; diff --git a/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql b/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql new file mode 100644 index 00000000..c94e1131 --- /dev/null +++ b/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; diff --git a/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/down.sql b/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/down.sql new file mode 100644 index 00000000..3cbd4602 --- /dev/null +++ b/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/down.sql @@ -0,0 +1,6 @@ +DROP TABLE sso_nonce; + +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/up.sql b/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/up.sql new file mode 100644 index 00000000..13e95fd8 --- /dev/null +++ b/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/up.sql @@ -0,0 +1,8 @@ +DROP TABLE sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql b/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql new file mode 100644 index 00000000..e7a55bd8 --- /dev/null +++ b/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql b/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql new file mode 100644 index 00000000..6b55e95d --- /dev/null +++ b/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2024-03-06-170000_add_sso_users/down.sql b/migrations/sqlite/2024-03-06-170000_add_sso_users/down.sql new file mode 100644 index 00000000..f2f92f68 --- /dev/null +++ b/migrations/sqlite/2024-03-06-170000_add_sso_users/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS sso_users; diff --git a/migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql b/migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql new file mode 100644 index 00000000..6d015f04 --- /dev/null +++ b/migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql @@ -0,0 +1,7 @@ +CREATE TABLE sso_users ( + user_uuid CHAR(36) NOT NULL PRIMARY KEY, + identifier TEXT NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + + FOREIGN KEY(user_uuid) REFERENCES users(uuid) +); diff --git a/migrations/sqlite/2024-03-13_170000_sso_userscascade/down.sql b/migrations/sqlite/2024-03-13_170000_sso_userscascade/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/sqlite/2024-03-13_170000_sso_userscascade/up.sql b/migrations/sqlite/2024-03-13_170000_sso_userscascade/up.sql new file mode 100644 index 00000000..53b09cf4 --- /dev/null +++ b/migrations/sqlite/2024-03-13_170000_sso_userscascade/up.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_users; + +CREATE TABLE sso_users ( + user_uuid CHAR(36) NOT NULL PRIMARY KEY, + identifier TEXT NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + + FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE +); diff --git a/src/api/admin.rs b/src/api/admin.rs index cc902e39..84e716ec 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -294,7 +294,7 @@ async fn invite_user(data: Json, _token: AdminToken, mut conn: DbCon err_code!("User already exists", Status::Conflict.code) } - let mut user = User::new(data.email); + let mut user = User::new(data.email, None); async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult { if CONFIG.mail_enabled() { diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index ddff80c0..3390418f 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -6,7 +6,7 @@ use serde_json::Value; use crate::{ api::{ core::{log_user_event, two_factor::email}, - register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, Notify, + register_push_device, unregister_push_device, AnonymousNotify, ApiResult, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType, }, auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers}, @@ -92,8 +92,7 @@ pub struct SetPasswordData { keys: Option, master_password_hash: String, master_password_hint: Option, - #[allow(dead_code)] - org_identifier: Option, + // org_identifier: Option, } #[derive(Debug, Deserialize)] @@ -103,13 +102,6 @@ struct KeysData { public_key: String, } -#[derive(Debug, Serialize, Deserialize)] -struct TokenPayload { - exp: i64, - email: String, - nonce: String, -} - /// Trims whitespace from password hints, and converts blank password hints to `None`. fn clean_password_hint(password_hint: &Option) -> Option { match password_hint { @@ -182,10 +174,7 @@ pub async fn _register(data: Json, mut conn: DbConn) -> JsonResult err!("Registration email does not match invite email") } } else if Invitation::take(&email, &mut conn).await { - for user_org in UserOrganization::find_invited_by_user(&user.uuid, &mut conn).await.iter_mut() { - user_org.status = UserOrgStatus::Accepted as i32; - user_org.save(&mut conn).await?; - } + UserOrganization::confirm_user_invitations(&user.uuid, &mut conn).await?; user } else if CONFIG.is_signup_allowed(&email) || (CONFIG.emergency_access_allowed() @@ -201,7 +190,7 @@ pub async fn _register(data: Json, mut conn: DbConn) -> JsonResult // because the vaultwarden admin can invite anyone, regardless // of other signup restrictions. if Invitation::take(&email, &mut conn).await || CONFIG.is_signup_allowed(&email) { - User::new(email.clone()) + User::new(email.clone(), None) } else { err!("Registration not allowed or user already exists") } @@ -272,7 +261,7 @@ async fn post_set_password(data: Json, headers: Headers, mut co // Check against the password hint setting here so if it fails, the user // can retry without losing their invitation below. - let password_hint = clean_password_hint(&data.master_password_hash); + let password_hint = clean_password_hint(&data.master_password_hint); enforce_password_hint_setting(&password_hint)?; if let Some(client_kdf_iter) = data.kdf_iterations { @@ -983,15 +972,30 @@ struct SecretVerificationRequest { master_password_hash: String, } +// Change the KDF Iterations if necessary +pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &mut DbConn) -> ApiResult<()> { + if user.password_iterations != CONFIG.password_iterations() { + user.password_iterations = CONFIG.password_iterations(); + user.set_password(pwd_hash, None, false, None); + + if let Err(e) = user.save(conn).await { + error!("Error updating user: {:#?}", e); + } + } + Ok(()) +} + #[post("/accounts/verify-password", data = "")] -fn verify_password(data: Json, headers: Headers) -> JsonResult { +async fn verify_password(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { let data: SecretVerificationRequest = data.into_inner(); - let user = headers.user; + let mut user = headers.user; if !user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } + kdf_upgrade(&mut user, &data.master_password_hash, &mut conn).await?; + Ok(Json(json!({ "MasterPasswordPolicy": {}, // Required for SSO login with mobile apps }))) diff --git a/src/api/core/emergency_access.rs b/src/api/core/emergency_access.rs index 1c29b774..f8871298 100644 --- a/src/api/core/emergency_access.rs +++ b/src/api/core/emergency_access.rs @@ -239,7 +239,7 @@ async fn send_invite(data: Json, headers: Headers, mu invitation.save(&mut conn).await?; } - let mut user = User::new(email.clone()); + let mut user = User::new(email.clone(), None); user.save(&mut conn).await?; (user, true) } diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 9a44d6d8..d1922532 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -60,6 +60,7 @@ pub fn routes() -> Vec { list_policies, list_policies_token, list_policies_invited_user, + get_policy_master_password, get_policy, put_policy, get_organization_tax, @@ -174,7 +175,7 @@ async fn create_organization(headers: Headers, data: Json, mut conn: Db }; let org = Organization::new(data.name, data.billing_email, private_key, public_key); - let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone()); + let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone(), None); let collection = Collection::new(org.uuid.clone(), data.collection_name, None); user_org.akey = data.key; @@ -308,9 +309,13 @@ async fn get_user_collections(headers: Headers, mut conn: DbConn) -> Json })) } +// Called during the SSO enrollment +// The `_identifier` should be the harcoded value returned by `get_org_domain_sso_details` +// The returned `Id` will then be passed to `get_policy_master_password` which will mainly ignore it #[get("/organizations/<_identifier>/auto-enroll-status")] -fn get_auto_enroll_status(_identifier: String) -> JsonResult { +fn get_auto_enroll_status(_identifier: &str) -> JsonResult { Ok(Json(json!({ + "Id": "_", "ResetPasswordEnabled": false, // Not implemented }))) } @@ -790,6 +795,9 @@ async fn _get_org_details(org_id: &str, host: &str, user_uuid: &str, conn: &mut json!(ciphers_json) } +// Endpoint called when the user select SSO login (body: `{ "email": "" }`). +// Returning a Domain/Organization here allow to prefill it and prevent prompting the user +// VaultWarden sso login is not linked to Org so we set a dummy value. #[post("/organizations/domain/sso/details")] fn get_org_domain_sso_details() -> JsonResult { Ok(Json(json!({ @@ -907,7 +915,7 @@ async fn send_invite(org_id: &str, data: Json, headers: AdminHeaders invitation.save(&mut conn).await?; } - let mut user = User::new(email.clone()); + let mut user = User::new(email.clone(), None); user.save(&mut conn).await?; user } @@ -924,7 +932,8 @@ async fn send_invite(org_id: &str, data: Json, headers: AdminHeaders } }; - let mut new_user = UserOrganization::new(user.uuid.clone(), String::from(org_id)); + let mut new_user = + UserOrganization::new(user.uuid.clone(), String::from(org_id), Some(headers.user.email.clone())); let access_all = data.access_all; new_user.access_all = access_all; new_user.atype = new_type; @@ -1760,17 +1769,22 @@ async fn list_policies_token(org_id: &str, token: &str, mut conn: DbConn) -> Jso }))) } +// Called during the SSO enrollment. +// Since the VW SSO flow is not linked to an organization it will be called with a dummy or undefined `org_id` #[allow(non_snake_case)] #[get("/organizations//policies/invited-user?")] -async fn list_policies_invited_user(org_id: String, userId: String, mut conn: DbConn) -> JsonResult { - // We should confirm the user is part of the organization, but unique domain_hints must be supported first. - +async fn list_policies_invited_user(org_id: &str, userId: &str, mut conn: DbConn) -> JsonResult { if userId.is_empty() { err!("userId must not be empty"); } - let policies = OrgPolicy::find_by_org(&org_id, &mut conn).await; - let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); + let user_orgs = UserOrganization::find_invited_by_user(userId, &mut conn).await; + let policies_json: Vec = if user_orgs.into_iter().any(|user_org| user_org.org_uuid == org_id) { + let policies = OrgPolicy::find_by_org(org_id, &mut conn).await; + policies.iter().map(OrgPolicy::to_json).collect() + } else { + Vec::with_capacity(0) + }; Ok(Json(json!({ "Data": policies_json, @@ -1779,7 +1793,26 @@ async fn list_policies_invited_user(org_id: String, userId: String, mut conn: Db }))) } -#[get("/organizations//policies/")] +// Called during the SSO enrollment. +#[get("/organizations//policies/master-password", rank = 1)] +fn get_policy_master_password(org_id: &str, _headers: Headers) -> JsonResult { + let data = match CONFIG.sso_master_password_policy() { + Some(policy) => policy, + None => "null".to_string(), + }; + + let policy = OrgPolicy { + uuid: String::from(org_id), + org_uuid: String::from(org_id), + atype: OrgPolicyType::MasterPassword as i32, + enabled: CONFIG.sso_master_password_policy().is_some(), + data, + }; + + Ok(Json(policy.to_json())) +} + +#[get("/organizations//policies/", rank = 2)] async fn get_policy(org_id: &str, pol_type: i32, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult { let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { Some(pt) => pt, @@ -2047,7 +2080,8 @@ async fn import(org_id: &str, data: Json, headers: Headers, mut c UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites }; - let mut new_org_user = UserOrganization::new(user.uuid.clone(), String::from(org_id)); + let mut new_org_user = + UserOrganization::new(user.uuid.clone(), String::from(org_id), Some(headers.user.email.clone())); new_org_user.access_all = false; new_org_user.atype = UserOrgType::User as i32; new_org_user.status = user_org_status; diff --git a/src/api/core/public.rs b/src/api/core/public.rs index 737d30dd..ed22db72 100644 --- a/src/api/core/public.rs +++ b/src/api/core/public.rs @@ -93,7 +93,7 @@ async fn ldap_import(data: Json, token: PublicToken, mut conn: Db Some(user) => user, // exists in vaultwarden None => { // User does not exist yet - let mut new_user = User::new(user_data.email.clone()); + let mut new_user = User::new(user_data.email.clone(), None); new_user.save(&mut conn).await?; if !CONFIG.mail_enabled() { @@ -109,7 +109,12 @@ async fn ldap_import(data: Json, token: PublicToken, mut conn: Db UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites }; - let mut new_org_user = UserOrganization::new(user.uuid.clone(), org_id.clone()); + let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await { + Some(org) => (org.name, org.billing_email), + None => err!("Error looking up organization"), + }; + + let mut new_org_user = UserOrganization::new(user.uuid.clone(), org_id.clone(), Some(org_email.clone())); new_org_user.set_external_id(Some(user_data.external_id.clone())); new_org_user.access_all = false; new_org_user.atype = UserOrgType::User as i32; @@ -118,11 +123,6 @@ async fn ldap_import(data: Json, token: PublicToken, mut conn: Db new_org_user.save(&mut conn).await?; if CONFIG.mail_enabled() { - let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await { - Some(org) => (org.name, org.billing_email), - None => err!("Error looking up organization"), - }; - mail::send_invite(&user, Some(org_id.clone()), Some(new_org_user.uuid), &org_name, Some(org_email)) .await?; } diff --git a/src/api/identity.rs b/src/api/identity.rs index 84129ea3..31efba83 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -1,10 +1,10 @@ -use chrono::Utc; -use jsonwebtoken::DecodingKey; +use chrono::{NaiveDateTime, Utc}; use num_traits::FromPrimitive; -use rocket::serde::json::Json; use rocket::{ form::{Form, FromForm}, - http::CookieJar, + http::Status, + response::Redirect, + serde::json::Json, Route, }; use serde_json::Value; @@ -12,23 +12,22 @@ use serde_json::Value; use crate::{ api::{ core::{ - accounts::{PreloginData, RegisterData, _prelogin, _register}, + accounts::{PreloginData, RegisterData, _prelogin, _register, kdf_upgrade}, log_user_event, two_factor::{authenticator, duo, duo_oidc, email, enforce_2fa_policy, webauthn, yubikey}, }, push::register_push_device, ApiResult, EmptyResult, JsonResult, }, - auth::{encode_jwt, generate_organization_api_key_login_claims, generate_ssotoken_claims, ClientHeaders, ClientIp}, + auth, + auth::{AuthMethod, AuthMethodScope, ClientHeaders, ClientIp}, db::{models::*, DbConn}, error::MapResult, - mail, util, - util::{CookieManager, CustomRedirect}, - CONFIG, + mail, sso, util, CONFIG, }; pub fn routes() -> Vec { - routes![login, prelogin, identity_register, prevalidate, authorize, oidcsignin] + routes![login, prelogin, identity_register, _prevalidate, prevalidate, authorize, oidcsignin, oidcsignin_error] } #[post("/connect/token", data = "")] @@ -42,6 +41,7 @@ async fn login(data: Form, client_header: ClientHeaders, mut conn: _check_is_some(&data.refresh_token, "refresh_token cannot be blank")?; _refresh_login(data, &mut conn).await } + "password" if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO sign-in is required"), "password" => { _check_is_some(&data.client_id, "client_id cannot be blank")?; _check_is_some(&data.password, "password cannot be blank")?; @@ -65,15 +65,17 @@ async fn login(data: Form, client_header: ClientHeaders, mut conn: _api_key_login(data, &mut user_uuid, &mut conn, &client_header.ip).await } - "authorization_code" => { + "authorization_code" if CONFIG.sso_enabled() => { _check_is_some(&data.client_id, "client_id cannot be blank")?; _check_is_some(&data.code, "code cannot be blank")?; _check_is_some(&data.device_identifier, "device_identifier cannot be blank")?; _check_is_some(&data.device_name, "device_name cannot be blank")?; _check_is_some(&data.device_type, "device_type cannot be blank")?; - _authorization_login(data, &mut user_uuid, &mut conn, &client_header.ip).await + + _sso_login(data, &mut user_uuid, &mut conn, &client_header.ip).await } + "authorization_code" => err!("SSO sign-in is not available"), t => err!("Invalid type", t), }; @@ -107,172 +109,147 @@ async fn login(data: Form, client_header: ClientHeaders, mut conn: login_result } +// Return Status::Unauthorized to trigger logout async fn _refresh_login(data: ConnectData, conn: &mut DbConn) -> JsonResult { // Extract token - let token = data.refresh_token.unwrap(); - - // Get device by refresh token - let mut device = Device::find_by_refresh_token(&token, conn).await.map_res("Invalid refresh token")?; - - let scope = "api offline_access"; - let scope_vec = vec!["api".into(), "offline_access".into()]; + let refresh_token = match data.refresh_token { + Some(token) => token, + None => err_code!("Missing refresh_token", Status::Unauthorized.code), + }; - // Common - let user = User::find_by_uuid(&device.user_uuid, conn).await.unwrap(); // --- // Disabled this variable, it was used to generate the JWT // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out // See: https://github.com/dani-garcia/vaultwarden/issues/4156 // --- // let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, conn).await; - let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec); - device.save(conn).await?; - - let result = json!({ - "access_token": access_token, - "expires_in": expires_in, - "token_type": "Bearer", - "refresh_token": device.refresh_token, - - "scope": scope, - }); - - Ok(Json(result)) + match auth::refresh_tokens(&refresh_token, conn).await { + Err(err) => { + err_code!(format!("Unable to refresh login credentials: {}", err.message()), Status::Unauthorized.code) + } + Ok((mut device, auth_tokens)) => { + // Save to update `device.updated_at` to track usage + device.save(conn).await?; + + let result = json!({ + "refresh_token": auth_tokens.refresh_token(), + "access_token": auth_tokens.access_token(), + "expires_in": auth_tokens.expires_in(), + "token_type": "Bearer", + "scope": auth_tokens.scope(), + }); + + Ok(Json(result)) + } + } } -#[derive(Debug, Serialize, Deserialize)] -struct TokenPayload { - exp: i64, - email: Option, - nonce: String, -} +// After exchanging the code we need to check first if 2FA is needed before continuing +async fn _sso_login(data: ConnectData, user_uuid: &mut Option, conn: &mut DbConn, ip: &ClientIp) -> JsonResult { + AuthMethod::Sso.check_scope(data.scope.as_ref())?; -async fn _authorization_login( - data: ConnectData, - user_uuid: &mut Option, - conn: &mut DbConn, - ip: &ClientIp, -) -> JsonResult { - let scope = match data.scope.as_ref() { - None => err!("Got no scope in OIDC data"), - Some(scope) => scope, - }; - if scope != "api offline_access" { - err!("Scope not supported") - } + // Ratelimit the login + crate::ratelimit::check_limit_login(&ip.ip)?; - let scope_vec = vec!["api".into(), "offline_access".into()]; let code = match data.code.as_ref() { None => err!("Got no code in OIDC data"), Some(code) => code, }; - let (refresh_token, id_token, user_info) = match get_auth_code_access_token(code).await { - Ok((refresh_token, id_token, user_info)) => (refresh_token, id_token, user_info), - Err(_err) => err!("Could not retrieve access token"), - }; - - let mut validation = jsonwebtoken::Validation::default(); - validation.insecure_disable_signature_validation(); - - let token = - match jsonwebtoken::decode::(id_token.as_str(), &DecodingKey::from_secret(&[]), &validation) { - Err(_err) => err!("Could not decode id token"), - Ok(payload) => payload.claims, - }; + let user_infos = sso::exchange_code(code, conn).await?; + + // Will trigger 2FA flow if needed + let user_data = match SsoUser::find_by_identifier_or_email(&user_infos.identifier, &user_infos.email, conn).await { + None => None, + Some((user, None)) if user.private_key.is_some() && !CONFIG.sso_signups_match_email() => { + error!( + "Login failure ({}), existing non SSO user ({}) with same email ({}) and association is disabled", + user_infos.identifier, user.uuid, user.email + ); + err_silent!("Existing non SSO user with same email") + } + Some((user, Some(sso_user))) if sso_user.identifier != user_infos.identifier => { + error!( + "Login failure ({}), existing SSO user ({}) with same email ({})", + user_infos.identifier, user.uuid, user.email + ); + err_silent!("Existing SSO user with same email") + } + Some((user, sso_user)) => { + let (mut device, new_device) = get_device(&data, conn, &user).await?; + let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, conn).await?; - // let expiry = token.exp; - let nonce = token.nonce; - let mut new_user = false; - - match SsoNonce::find(&nonce, conn).await { - Some(sso_nonce) => { - match sso_nonce.delete(conn).await { - Ok(_) => { - let user_email = match token.email { - Some(email) => email, - None => match user_info.email() { - None => err!("Neither id token nor userinfo contained an email"), - Some(email) => email.to_owned().to_string(), - }, - }; - let now = Utc::now().naive_utc(); - - let mut user = match User::find_by_mail(&user_email, conn).await { - Some(user) => user, - None => { - new_user = true; - User::new(user_email.clone()) - } - }; - - if new_user { - user.verified_at = Some(Utc::now().naive_utc()); - user.save(conn).await?; - } + Some((user, device, new_device, twofactor_token, sso_user)) + } + }; - // Set the user_uuid here to be passed back used for event logging. - *user_uuid = Some(user.uuid.clone()); + // We passed 2FA get full user informations + let auth_user = sso::redeem(&user_infos.state, conn).await?; - let (mut device, new_device) = get_device(&data, conn, &user).await; + let now = Utc::now().naive_utc(); + let (user, mut device, new_device, twofactor_token, sso_user) = match user_data { + None => { + if !CONFIG.is_email_domain_allowed(&user_infos.email) { + err!("Email domain not allowed"); + } - let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, true, conn).await?; + if !user_infos.email_verified.unwrap_or(true) { + err!("Email needs to be verified before you can use VaultWarden"); + } - if CONFIG.mail_enabled() && new_device { - if let Err(e) = - mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await - { - error!("Error sending new device email: {:#?}", e); + let mut user = User::new(user_infos.email, user_infos.user_name); + user.verified_at = Some(now); + user.save(conn).await?; - if CONFIG.require_device_email() { - err!("Could not send login notification email. Please contact your administrator.") - } - } - } + let (device, new_device) = get_device(&data, conn, &user).await?; - if CONFIG.sso_acceptall_invites() { - for user_org in UserOrganization::find_invited_by_user(&user.uuid, conn).await.iter_mut() { - user_org.status = UserOrgStatus::Accepted as i32; - user_org.save(conn).await?; - } - } + (user, device, new_device, None, None) + } + Some((mut user, device, new_device, twofactor_token, sso_user)) if user.private_key.is_none() => { + // User was invited a stub was created + user.verified_at = Some(now); + if let Some(user_name) = user_infos.user_name { + user.name = user_name; + } - device.refresh_token = refresh_token.clone(); - device.save(conn).await?; - - let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec); - device.save(conn).await?; - - let mut result = json!({ - "access_token": access_token, - "token_type": "Bearer", - "refresh_token": device.refresh_token, - "expires_in": expires_in, - "Key": user.akey, - "PrivateKey": user.private_key, - "Kdf": user.client_kdf_type, - "KdfIterations": user.client_kdf_iter, - "KdfMemory": user.client_kdf_memory, - "KdfParallelism": user.client_kdf_parallelism, - "ResetMasterPassword": user.password_hash.is_empty(), - "scope": scope, - "unofficialServer": true, - }); - - if let Some(token) = twofactor_token { - result["TwoFactorToken"] = Value::String(token); - } + if !CONFIG.mail_enabled() { + UserOrganization::confirm_user_invitations(&user.uuid, conn).await?; + } - info!("User {} logged in successfully. IP: {}", user.email, ip.ip); - Ok(Json(result)) + user.save(conn).await?; + (user, device, new_device, twofactor_token, sso_user) + } + Some((user, device, new_device, twofactor_token, sso_user)) => { + if user.email != user_infos.email { + if CONFIG.mail_enabled() { + mail::send_sso_change_email(&user_infos.email).await?; } - Err(_) => err!("Failed to delete nonce"), + info!("User {} email changed in SSO provider from {} to {}", user.uuid, user.email, user_infos.email); } + (user, device, new_device, twofactor_token, sso_user) } - None => { - err!("Invalid nonce") - } + }; + + if sso_user.is_none() { + let user_sso = SsoUser { + user_uuid: user.uuid.clone(), + identifier: user_infos.identifier, + }; + user_sso.save(conn).await?; } + + // Set the user_uuid here to be passed back used for event logging. + *user_uuid = Some(user.uuid.clone()); + + let auth_tokens = sso::create_auth_tokens( + &device, + &user, + auth_user.refresh_token, + &auth_user.access_token, + auth_user.expires_in, + )?; + + authenticated_response(&user, &mut device, new_device, auth_tokens, twofactor_token, &now, conn, ip).await } #[derive(Default, Deserialize, Serialize)] @@ -294,19 +271,11 @@ async fn _password_login( ip: &ClientIp, ) -> JsonResult { // Validate scope - let scope = data.scope.as_ref().unwrap(); - if scope != "api offline_access" { - err!("Scope not supported") - } - let scope_vec = vec!["api".into(), "offline_access".into()]; + AuthMethod::Password.check_scope(data.scope.as_ref())?; // Ratelimit the login crate::ratelimit::check_limit_login(&ip.ip)?; - if CONFIG.sso_enabled() && CONFIG.sso_only() { - err!("SSO sign-in is required"); - } - // Get the user let username = data.username.as_ref().unwrap().trim(); let mut user = match User::find_by_mail(username, conn).await { @@ -349,15 +318,7 @@ async fn _password_login( ) } - // Change the KDF Iterations - if user.password_iterations != CONFIG.password_iterations() { - user.password_iterations = CONFIG.password_iterations(); - user.set_password(password, None, false, None); - - if let Err(e) = user.save(conn).await { - error!("Error updating user: {:#?}", e); - } - } + kdf_upgrade(&mut user, password, conn).await?; // Check if the user is disabled if !user.enabled { @@ -404,12 +365,28 @@ async fn _password_login( ) } - let (mut device, new_device) = get_device(&data, conn, &user).await; + let (mut device, new_device) = get_device(&data, conn, &user).await?; - let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, false, conn).await?; + let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, conn).await?; + let auth_tokens = auth::AuthTokens::new(&device, &user, AuthMethod::Password); + + authenticated_response(&user, &mut device, new_device, auth_tokens, twofactor_token, &now, conn, ip).await +} + +#[allow(clippy::too_many_arguments)] +async fn authenticated_response( + user: &User, + device: &mut Device, + new_device: bool, + auth_tokens: auth::AuthTokens, + twofactor_token: Option, + now: &NaiveDateTime, + conn: &mut DbConn, + ip: &ClientIp, +) -> JsonResult { if CONFIG.mail_enabled() && new_device { - if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await { + if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), now, device).await { error!("Error sending new device email: {:#?}", e); if CONFIG.require_device_email() { @@ -425,17 +402,10 @@ async fn _password_login( // register push device if !new_device { - register_push_device(&mut device, conn).await?; + register_push_device(device, conn).await?; } - // Common - // --- - // Disabled this variable, it was used to generate the JWT - // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out - // See: https://github.com/dani-garcia/vaultwarden/issues/4156 - // --- - // let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, conn).await; - let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec); + // Save to update `device.updated_at` to track usage device.save(conn).await?; // Fetch all valid Master Password Policies and merge them into one with all true's and larges numbers as one policy @@ -469,14 +439,11 @@ async fn _password_login( }; let mut result = json!({ - "access_token": access_token, - "expires_in": expires_in, + "access_token": auth_tokens.access_token(), + "expires_in": auth_tokens.expires_in(), "token_type": "Bearer", - "refresh_token": device.refresh_token, - "Key": user.akey, + "refresh_token": auth_tokens.refresh_token(), "PrivateKey": user.private_key, - //"TwoFactorToken": "11122233333444555666777888999" - "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "KdfMemory": user.client_kdf_memory, @@ -484,19 +451,22 @@ async fn _password_login( "ResetMasterPassword": false, // TODO: Same as above "ForcePasswordReset": false, "MasterPasswordPolicy": master_password_policy, - - "scope": scope, + "scope": auth_tokens.scope(), "UserDecryptionOptions": { "HasMasterPassword": !user.password_hash.is_empty(), "Object": "userDecryptionOptions" }, }); + if !user.akey.is_empty() { + result["Key"] = Value::String(user.akey.clone()); + } + if let Some(token) = twofactor_token { result["TwoFactorToken"] = Value::String(token); } - info!("User {} logged in successfully. IP: {}", username, ip.ip); + info!("User {} logged in successfully. IP: {}", user.email, ip.ip); Ok(Json(result)) } @@ -510,9 +480,9 @@ async fn _api_key_login( crate::ratelimit::check_limit_login(&ip.ip)?; // Validate scope - match data.scope.as_ref().unwrap().as_ref() { - "api" => _user_api_key_login(data, user_uuid, conn, ip).await, - "api.organization" => _organization_api_key_login(data, conn, ip).await, + match data.scope.as_ref() { + Some(scope) if scope == &AuthMethod::UserApiKey.scope() => _user_api_key_login(data, user_uuid, conn, ip).await, + Some(scope) if scope == &AuthMethod::OrgApiKey.scope() => _organization_api_key_login(data, conn, ip).await, _ => err!("Scope not supported"), } } @@ -560,7 +530,7 @@ async fn _user_api_key_login( ) } - let (mut device, new_device) = get_device(&data, conn, &user).await; + let (mut device, new_device) = get_device(&data, conn, &user).await?; if CONFIG.mail_enabled() && new_device { let now = Utc::now().naive_utc(); @@ -578,15 +548,15 @@ async fn _user_api_key_login( } } - // Common - let scope_vec = vec!["api".into()]; // --- // Disabled this variable, it was used to generate the JWT // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out // See: https://github.com/dani-garcia/vaultwarden/issues/4156 // --- // let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, conn).await; - let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec); + let access_claims = auth::LoginJwtClaims::default(&device, &user, &AuthMethod::UserApiKey); + + // Save to update `device.updated_at` to track usage device.save(conn).await?; info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip); @@ -594,8 +564,8 @@ async fn _user_api_key_login( // Note: No refresh_token is returned. The CLI just repeats the // client_credentials login flow when the existing token expires. let result = json!({ - "access_token": access_token, - "expires_in": expires_in, + "access_token": access_claims.token(), + "expires_in": access_claims.expires_in(), "token_type": "Bearer", "Key": user.akey, "PrivateKey": user.private_key, @@ -605,7 +575,7 @@ async fn _user_api_key_login( "KdfMemory": user.client_kdf_memory, "KdfParallelism": user.client_kdf_parallelism, "ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing - "scope": "api", + "scope": AuthMethod::UserApiKey.scope(), }); Ok(Json(result)) @@ -629,19 +599,19 @@ async fn _organization_api_key_login(data: ConnectData, conn: &mut DbConn, ip: & err!("Incorrect client_secret", format!("IP: {}. Organization: {}.", ip.ip, org_api_key.org_uuid)) } - let claim = generate_organization_api_key_login_claims(org_api_key.uuid, org_api_key.org_uuid); - let access_token = crate::auth::encode_jwt(&claim); + let claim = auth::generate_organization_api_key_login_claims(org_api_key.uuid, org_api_key.org_uuid); + let access_token = auth::encode_jwt(&claim); Ok(Json(json!({ "access_token": access_token, "expires_in": 3600, "token_type": "Bearer", - "scope": "api.organization", + "scope": AuthMethod::OrgApiKey.scope(), }))) } /// Retrieves an existing device or creates a new device from ConnectData and the User -async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> (Device, bool) { +async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> ApiResult<(Device, bool)> { // On iOS, device_type sends "iOS", on others it sends a number // When unknown or unable to parse, return 14, which is 'Unknown Browser' let device_type = util::try_parse_string(data.device_type.as_ref()).unwrap_or(14); @@ -653,12 +623,13 @@ async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> (Devi let device = match Device::find_by_uuid_and_user(&device_id, &user.uuid, conn).await { Some(device) => device, None => { + let device = Device::new(device_id, user.uuid.clone(), device_name, device_type); new_device = true; - Device::new(device_id, user.uuid.clone(), device_name, device_type) + device } }; - (device, new_device) + Ok((device, new_device)) } async fn twofactor_auth( @@ -666,7 +637,6 @@ async fn twofactor_auth( data: &ConnectData, device: &mut Device, ip: &ClientIp, - is_sso: bool, conn: &mut DbConn, ) -> ApiResult> { let twofactors = TwoFactor::find_by_user(&user.uuid, conn).await; @@ -684,17 +654,7 @@ async fn twofactor_auth( let twofactor_code = match data.two_factor_token { Some(ref code) => code, - None => { - if is_sso { - if CONFIG.sso_only() { - err!("2FA not supported with SSO login, contact your administrator"); - } else { - err!("2FA not supported with SSO login, log in directly using email and master password"); - } - } else { - err_json!(_json_err_twofactor(&twofactor_ids, &user.uuid, data, conn).await?, "2FA token not provided"); - } - } + None => err_json!(_json_err_twofactor(&twofactor_ids, &user.uuid, data, conn).await?, "2FA token not provided"), }; let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled); @@ -756,12 +716,13 @@ async fn twofactor_auth( TwoFactorIncomplete::mark_complete(&user.uuid, &device.uuid, conn).await?; - if !CONFIG.disable_2fa_remember() && remember == 1 { - Ok(Some(device.refresh_twofactor_remember())) + let two_factor = if !CONFIG.disable_2fa_remember() && remember == 1 { + Some(device.refresh_twofactor_remember()) } else { device.delete_twofactor_remember(); - Ok(None) - } + None + }; + Ok(two_factor) } fn _selected_data(tf: Option) -> ApiResult { @@ -938,176 +899,120 @@ fn _check_is_some(value: &Option, msg: &str) -> EmptyResult { Ok(()) } +// Deprecated but still needed for Mobile apps #[get("/account/prevalidate")] -#[allow(non_snake_case)] -fn prevalidate() -> JsonResult { - let claims = generate_ssotoken_claims(); - let ssotoken = encode_jwt(&claims); - Ok(Json(json!({ - "token": ssotoken, - }))) +fn _prevalidate() -> JsonResult { + prevalidate() } -use openidconnect::core::{CoreClient, CoreProviderMetadata, CoreResponseType, CoreUserInfoClaims}; -use openidconnect::reqwest::async_http_client; -use openidconnect::{ - AuthenticationFlow, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce, OAuth2TokenResponse, - RedirectUrl, Scope, -}; - -async fn get_client_from_sso_config() -> ApiResult { - let redirect = CONFIG.sso_callback_path(); - let client_id = ClientId::new(CONFIG.sso_client_id()); - let client_secret = ClientSecret::new(CONFIG.sso_client_secret()); - let issuer_url = match IssuerUrl::new(CONFIG.sso_authority()) { - Ok(issuer) => issuer, - Err(_err) => err!("invalid issuer URL"), - }; - - let provider_metadata = match CoreProviderMetadata::discover_async(issuer_url, async_http_client).await { - Ok(metadata) => metadata, - Err(_err) => { - err!("Failed to discover OpenID provider") - } - }; +#[get("/sso/prevalidate")] +fn prevalidate() -> JsonResult { + if CONFIG.sso_enabled() { + let sso_token = sso::encode_ssotoken_claims(); + Ok(Json(json!({ + "token": sso_token, + }))) + } else { + err!("SSO sign-in is not available") + } +} - let redirect_uri = match RedirectUrl::new(redirect) { - Ok(uri) => uri, - Err(err) => err!("Invalid redirection url: {}", err.to_string()), - }; - let client = CoreClient::from_provider_metadata(provider_metadata, client_id, Some(client_secret)) - .set_redirect_uri(redirect_uri); +#[get("/connect/oidc-signin?&", rank = 1)] +async fn oidcsignin(code: String, state: String, conn: DbConn) -> ApiResult { + oidcsignin_redirect( + state.clone(), + sso::OIDCCodeWrapper::Ok { + code, + state, + }, + &conn, + ) + .await +} - Ok(client) +// Bitwarden client appear to only care for code and state so we pipe it through +// cf: https://github.com/bitwarden/clients/blob/8e46ef1ae5be8b62b0d3d0b9d1b1c62088a04638/libs/angular/src/auth/components/sso.component.ts#L68C11-L68C23) +#[get("/connect/oidc-signin?&&", rank = 2)] +async fn oidcsignin_error( + state: String, + error: String, + error_description: Option, + conn: DbConn, +) -> ApiResult { + oidcsignin_redirect( + state.clone(), + sso::OIDCCodeWrapper::Error { + state, + error, + error_description, + }, + &conn, + ) + .await } -#[get("/connect/oidc-signin?")] -fn oidcsignin(code: String, jar: &CookieJar<'_>, _conn: DbConn) -> ApiResult { - let cookiemanager = CookieManager::new(jar); +// iss and scope parameters are needed for redirection to work on IOS. +async fn oidcsignin_redirect(state: String, wrapper: sso::OIDCCodeWrapper, conn: &DbConn) -> ApiResult { + let code = sso::encode_code_claims(wrapper); - let redirect_uri = match cookiemanager.get_cookie("redirect_uri".to_string()) { - None => err!("No redirect_uri in cookie"), - Some(uri) => uri, + let nonce = match SsoNonce::find(&state, conn).await { + Some(n) => n, + None => err!(format!("Failed to retrive redirect_uri with {state}")), }; - let orig_state = match cookiemanager.get_cookie("state".to_string()) { - None => err!("No state in cookie"), - Some(state) => state, + + let mut url = match url::Url::parse(&nonce.redirect_uri) { + Ok(url) => url, + Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", nonce.redirect_uri)), }; - cookiemanager.delete_cookie("redirect_uri".to_string()); - cookiemanager.delete_cookie("state".to_string()); + url.query_pairs_mut() + .append_pair("code", &code) + .append_pair("state", &state) + .append_pair("scope", &AuthMethod::Sso.scope()) + .append_pair("iss", &CONFIG.domain()); - let redirect = CustomRedirect { - url: format!("{redirect_uri}?code={code}&state={orig_state}"), - headers: vec![], - }; + debug!("Redirection to {url}"); - Ok(redirect) + Ok(Redirect::temporary(String::from(url))) } -#[derive(FromForm)] -#[allow(non_snake_case)] +#[derive(Debug, Clone, Default, FromForm)] struct AuthorizeData { - #[allow(unused)] #[field(name = uncased("client_id"))] #[field(name = uncased("clientid"))] - client_id: Option, + client_id: String, #[field(name = uncased("redirect_uri"))] #[field(name = uncased("redirecturi"))] - redirect_uri: Option, + redirect_uri: String, #[allow(unused)] - #[field(name = uncased("response_type"))] - #[field(name = uncased("responsetype"))] response_type: Option, #[allow(unused)] - #[field(name = uncased("scope"))] scope: Option, - #[field(name = uncased("state"))] - state: Option, + state: String, #[allow(unused)] - #[field(name = uncased("code_challenge"))] code_challenge: Option, #[allow(unused)] - #[field(name = uncased("code_challenge_method"))] code_challenge_method: Option, #[allow(unused)] - #[field(name = uncased("response_mode"))] response_mode: Option, #[allow(unused)] - #[field(name = uncased("domain_hint"))] domain_hint: Option, #[allow(unused)] #[field(name = uncased("ssoToken"))] - ssoToken: Option, + sso_token: Option, } +// The `redirect_uri` will change depending of the client (web, android, ios ..) #[get("/connect/authorize?")] -async fn authorize(data: AuthorizeData, jar: &CookieJar<'_>, mut conn: DbConn) -> ApiResult { - let cookiemanager = CookieManager::new(jar); - match get_client_from_sso_config().await { - Ok(client) => { - let (auth_url, _csrf_state, nonce) = client - .authorize_url( - AuthenticationFlow::::AuthorizationCode, - CsrfToken::new_random, - Nonce::new_random, - ) - .add_scope(Scope::new("email".to_string())) - .add_scope(Scope::new("profile".to_string())) - .url(); - - let sso_nonce = SsoNonce::new(nonce.secret().to_string()); - sso_nonce.save(&mut conn).await?; - - let redirect_uri = match data.redirect_uri { - None => err!("No redirect_uri in data"), - Some(uri) => uri, - }; - cookiemanager.set_cookie("redirect_uri".to_string(), redirect_uri); - let state = match data.state { - None => err!("No state in data"), - Some(state) => state, - }; - cookiemanager.set_cookie("state".to_string(), state); - - let redirect = CustomRedirect { - url: format!("{}", auth_url), - headers: vec![], - }; - - Ok(redirect) - } - Err(_err) => err!("Unable to find client from identifier"), - } -} - -async fn get_auth_code_access_token(code: &str) -> ApiResult<(String, String, CoreUserInfoClaims)> { - let oidc_code = AuthorizationCode::new(String::from(code)); - match get_client_from_sso_config().await { - Ok(client) => match client.exchange_code(oidc_code).request_async(async_http_client).await { - Ok(token_response) => { - let refresh_token = match token_response.refresh_token() { - Some(token) => token.secret().to_string(), - None => String::new(), - }; - let id_token = match token_response.extra_fields().id_token() { - None => err!("Token response did not contain an id_token"), - Some(token) => token.to_string(), - }; +async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult { + let AuthorizeData { + client_id, + redirect_uri, + state, + .. + } = data; - let user_info: CoreUserInfoClaims = - match client.user_info(token_response.access_token().to_owned(), None) { - Err(_err) => err!("Token response did not contain user_info"), - Ok(info) => match info.request_async(async_http_client).await { - Err(_err) => err!("Request to user_info endpoint failed"), - Ok(claim) => claim, - }, - }; + let auth_url = sso::authorize_url(state, &client_id, &redirect_uri, conn).await?; - Ok((refresh_token, id_token, user_info)) - } - Err(err) => err!("Failed to contact token endpoint: {}", err.to_string()), - }, - Err(_err) => err!("Unable to find client"), - } + Ok(Redirect::temporary(String::from(auth_url))) } diff --git a/src/api/mod.rs b/src/api/mod.rs index 27a3775f..16d13ccb 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -35,7 +35,7 @@ pub use crate::api::{ use crate::db::{models::User, DbConn}; // Type aliases for API methods results -type ApiResult = Result; +pub type ApiResult = Result; pub type JsonResult = ApiResult>; pub type EmptyResult = ApiResult<()>; diff --git a/src/auth.rs b/src/auth.rs index f5aef64d..b0bc06a5 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -1,6 +1,5 @@ // JWT Handling -// -use chrono::{TimeDelta, Utc}; +use chrono::{DateTime, TimeDelta, Utc}; use jsonwebtoken::{errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header}; use num_traits::FromPrimitive; use once_cell::sync::{Lazy, OnceCell}; @@ -14,18 +13,29 @@ use std::{ net::IpAddr, }; -use crate::{error::Error, CONFIG}; +use crate::{ + api::ApiResult, + db::{ + models::{Collection, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, + DbConn, + }, + error::Error, + sso, CONFIG, +}; const JWT_ALGORITHM: Algorithm = Algorithm::RS256; -pub static DEFAULT_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); +// Limit when BitWarden consider the token as expired +pub static BW_EXPIRATION: Lazy = Lazy::new(|| TimeDelta::try_minutes(5).unwrap()); + +pub static DEFAULT_REFRESH_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_days(30).unwrap()); +pub static DEFAULT_ACCESS_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); static JWT_HEADER: Lazy
= Lazy::new(|| Header::new(JWT_ALGORITHM)); pub static JWT_LOGIN_ISSUER: Lazy = Lazy::new(|| format!("{}|login", CONFIG.domain_origin())); static JWT_INVITE_ISSUER: Lazy = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin())); static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: Lazy = Lazy::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin())); -static JWT_SSOTOKEN_ISSUER: Lazy = Lazy::new(|| format!("{}|ssotoken", CONFIG.domain_origin())); static JWT_DELETE_ISSUER: Lazy = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin())); static JWT_VERIFYEMAIL_ISSUER: Lazy = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin())); static JWT_ADMIN_ISSUER: Lazy = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin())); @@ -87,7 +97,7 @@ pub fn encode_jwt(claims: &T) -> String { } } -fn decode_jwt(token: &str, issuer: String) -> Result { +pub fn decode_jwt(token: &str, issuer: String) -> Result { let mut validation = jsonwebtoken::Validation::new(JWT_ALGORITHM); validation.leeway = 30; // 30 seconds validation.validate_exp = true; @@ -106,6 +116,10 @@ fn decode_jwt(token: &str, issuer: String) -> Result Result { + decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) +} + pub fn decode_login(token: &str) -> Result { decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) } @@ -179,6 +193,73 @@ pub struct LoginJwtClaims { pub amr: Vec, } +impl LoginJwtClaims { + pub fn new(device: &Device, user: &User, nbf: i64, exp: i64, scope: Vec, now: DateTime) -> Self { + // --- + // Disabled these keys to be added to the JWT since they could cause the JWT to get too large + // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients + // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out + // --- + // fn arg: orgs: Vec, + // --- + // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); + // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); + // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); + // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); + + if exp <= (now + *BW_EXPIRATION).timestamp() { + warn!("Raise access_token lifetime to more than 5min.") + } + + // Create the JWT claims struct, to send to the client + Self { + nbf, + exp, + iss: JWT_LOGIN_ISSUER.to_string(), + sub: user.uuid.clone(), + premium: true, + name: user.name.clone(), + email: user.email.clone(), + email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), + + // --- + // Disabled these keys to be added to the JWT since they could cause the JWT to get too large + // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients + // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out + // See: https://github.com/dani-garcia/vaultwarden/issues/4156 + // --- + // orgowner, + // orgadmin, + // orguser, + // orgmanager, + sstamp: user.security_stamp.clone(), + device: device.uuid.clone(), + scope, + amr: vec!["Application".into()], + } + } + + pub fn default(device: &Device, user: &User, auth_method: &AuthMethod) -> Self { + let time_now = Utc::now(); + Self::new( + device, + user, + time_now.timestamp(), + (time_now + *DEFAULT_ACCESS_VALIDITY).timestamp(), + auth_method.scope_vec(), + time_now, + ) + } + + pub fn token(&self) -> String { + encode_jwt(&self) + } + + pub fn expires_in(&self) -> i64 { + self.exp - Utc::now().timestamp() + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct InviteJwtClaims { // Not before @@ -332,28 +413,6 @@ pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims { } } -#[derive(Debug, Serialize, Deserialize)] -pub struct SsoTokenJwtClaims { - // Not before - pub nbf: i64, - // Expiration time - pub exp: i64, - // Issuer - pub iss: String, - // Subject - pub sub: String, -} - -pub fn generate_ssotoken_claims() -> SsoTokenJwtClaims { - let time_now = Utc::now().naive_utc(); - SsoTokenJwtClaims { - nbf: time_now.timestamp(), - exp: (time_now + Duration::minutes(2)).timestamp(), - iss: JWT_SSOTOKEN_ISSUER.to_string(), - sub: "vaultwarden".to_string(), - } -} - pub fn generate_verify_email_claims(uuid: String) -> BasicJwtClaims { let time_now = Utc::now(); let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); @@ -393,11 +452,6 @@ use rocket::{ request::{FromRequest, Outcome, Request}, }; -use crate::db::{ - models::{Collection, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, - DbConn, -}; - pub struct Host { pub host: String, } @@ -923,3 +977,150 @@ impl<'r> FromRequest<'r> for WsAccessTokenHeader { }) } } + +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum AuthMethod { + OrgApiKey, + Password, + Sso, + UserApiKey, +} + +pub trait AuthMethodScope { + fn scope_vec(&self) -> Vec; + fn scope(&self) -> String; + fn check_scope(&self, scope: Option<&String>) -> ApiResult; +} + +impl AuthMethodScope for AuthMethod { + fn scope(&self) -> String { + match self { + AuthMethod::OrgApiKey => "api.organization".to_string(), + AuthMethod::Password => "api offline_access".to_string(), + AuthMethod::Sso => "api offline_access".to_string(), + AuthMethod::UserApiKey => "api".to_string(), + } + } + + fn scope_vec(&self) -> Vec { + self.scope().split_whitespace().map(str::to_string).collect() + } + + fn check_scope(&self, scope: Option<&String>) -> ApiResult { + let method_scope = self.scope(); + match scope { + None => err!("Missing scope"), + Some(scope) if scope == &method_scope => Ok(method_scope), + Some(scope) => err!(format!("Scope ({scope}) not supported")), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum TokenWrapper { + Access(String), + Refresh(String), +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct RefreshJwtClaims { + // Not before + pub nbf: i64, + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + // Subject + pub sub: AuthMethod, + + pub device_token: String, + + pub token: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct AuthTokens { + pub refresh_claims: RefreshJwtClaims, + pub access_claims: LoginJwtClaims, +} + +impl AuthTokens { + pub fn refresh_token(&self) -> String { + encode_jwt(&self.refresh_claims) + } + + pub fn access_token(&self) -> String { + self.access_claims.token() + } + + pub fn expires_in(&self) -> i64 { + self.access_claims.expires_in() + } + + pub fn scope(&self) -> String { + self.refresh_claims.sub.scope() + } + + // Create refresh_token and access_token with default validity + pub fn new(device: &Device, user: &User, sub: AuthMethod) -> Self { + let time_now = Utc::now(); + + let access_claims = LoginJwtClaims::default(device, user, &sub); + + let refresh_claims = RefreshJwtClaims { + nbf: time_now.timestamp(), + exp: (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp(), + iss: JWT_LOGIN_ISSUER.to_string(), + sub, + device_token: device.refresh_token.clone(), + token: None, + }; + + Self { + refresh_claims, + access_claims, + } + } +} + +pub async fn refresh_tokens(refresh_token: &str, conn: &mut DbConn) -> ApiResult<(Device, AuthTokens)> { + let time_now = Utc::now(); + + let refresh_claims = match decode_refresh(refresh_token) { + Err(err) => err_silent!(format!("Impossible to read refresh_token: {}", err.message())), + Ok(claims) => claims, + }; + + // Get device by refresh token + let mut device = match Device::find_by_refresh_token(&refresh_claims.device_token, conn).await { + None => err!("Invalid refresh token"), + Some(device) => device, + }; + + // Roll the Device.refresh_token this way it invalides old JWT refresh_token + device.roll_refresh_token(); + device.save(conn).await?; + + let user = match User::find_by_uuid(&device.user_uuid, conn).await { + None => err!("Impossible to find user"), + Some(user) => user, + }; + + if refresh_claims.exp < time_now.timestamp() { + err!("Expired refresh token"); + } + + let auth_tokens = match refresh_claims.sub { + AuthMethod::Sso if CONFIG.sso_enabled() && CONFIG.sso_auth_only_not_session() => { + AuthTokens::new(&device, &user, refresh_claims.sub) + } + AuthMethod::Sso if CONFIG.sso_enabled() => sso::exchange_refresh_token(&device, &user, &refresh_claims).await?, + AuthMethod::Sso => err!("SSO is now disabled, Login again using email and master password"), + AuthMethod::Password if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO is now required, Login again"), + AuthMethod::Password => AuthTokens::new(&device, &user, refresh_claims.sub), + _ => err!("Invalid auth method, cannot refresh token"), + }; + + Ok((device, auth_tokens)) +} diff --git a/src/config.rs b/src/config.rs index 46eb5aab..fd74813c 100644 --- a/src/config.rs +++ b/src/config.rs @@ -423,6 +423,9 @@ make_config! { /// Duo Auth context cleanup schedule |> Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. /// Defaults to once every minute. Set blank to disable this job. duo_context_purge_schedule: String, false, def, "30 * * * * *".to_string(); + /// Purge incomplete sso nonce. |> Cron schedule of the job that cleans leftover nonce in db due to incomplete sso login. + /// Defaults to daily. Set blank to disable this job. + purge_incomplete_sso_nonce: String, false, def, "0 20 0 * * *".to_string(); }, /// General settings @@ -640,19 +643,35 @@ make_config! { /// OpenID Connect SSO settings sso { /// Enabled - sso_enabled: bool, true, def, false; - /// Force SSO login - sso_only: bool, true, def, false; + sso_enabled: bool, false, def, false; + /// Only sso login |> Disable Email+Master Password login + sso_only: bool, true, def, false; + /// Allow email association |> Associate existing non-sso user based on email + sso_signups_match_email: bool, true, def, true; /// Client ID - sso_client_id: String, true, def, String::new(); + sso_client_id: String, false, def, String::new(); /// Client Key - sso_client_secret: Pass, true, def, String::new(); - /// Authority Server - sso_authority: String, true, def, String::new(); - /// CallBack Path - sso_callback_path: String, false, gen, |c| generate_sso_callback_path(&c.domain); - /// Allow workaround so SSO logins accept all invites - sso_acceptall_invites: bool, true, def, false; + sso_client_secret: Pass, false, def, String::new(); + /// Authority Server |> Base url of the OIDC provider discovery endpoint (without `/.well-known/openid-configuration`) + sso_authority: String, false, def, String::new(); + /// Authorization request scopes |> List the of the needed scope (`openid` is implicit) + sso_scopes: String, false, def, "email profile".to_string(); + /// Authorization request extra parameters + sso_authorize_extra_params: String, false, def, String::new(); + /// Use PKCE during Authorization flow + sso_pkce: bool, false, def, false; + /// Regex for additionnal trusted Id token audience |> By default only the client_id is trsuted. + sso_audience_trusted: String, false, option; + /// CallBack Path |> Generated from Domain. + sso_callback_path: String, false, generated, |c| generate_sso_callback_path(&c.domain); + /// Optional sso master password policy |> Ex format: '{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}' + sso_master_password_policy: String, true, option; + /// Use sso only for auth not the session lifecycle |> Use default Vaultwarden session lifecycle (Idle refresh token valid for 30days) + sso_auth_only_not_session: bool, true, def, false; + /// Client cache for discovery endpoint. |> Duration in seconds (0 or less to disable). More details: https://github.com/dani-garcia/vaultwarden/blob/sso-support/SSO.md#client-cache + sso_client_cache_expiration: u64, true, def, 0; + /// Log all tokens |> `LOG_LEVEL=debug` or `LOG_LEVEL=info,vaultwarden::sso=debug` is required + sso_debug_tokens: bool, true, def, false; }, /// Yubikey settings @@ -866,10 +885,15 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { err!("All Duo options need to be set for global Duo support") } - if cfg.sso_enabled - && (cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty()) - { - err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") + if cfg.sso_enabled { + if cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty() { + err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") + } + + internal_sso_issuer_url(&cfg.sso_authority)?; + internal_sso_redirect_url(&cfg.sso_callback_path)?; + check_master_password_policy(&cfg.sso_master_password_policy)?; + internal_sso_authorize_extra_params_vec(&cfg.sso_authorize_extra_params)?; } if cfg._enable_yubico { @@ -1049,6 +1073,35 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { Ok(()) } +fn internal_sso_issuer_url(sso_authority: &String) -> Result { + match openidconnect::IssuerUrl::new(sso_authority.clone()) { + Err(err) => err!(format!("Invalid sso_authority UR ({sso_authority}): {err}")), + Ok(issuer_url) => Ok(issuer_url), + } +} + +fn internal_sso_redirect_url(sso_callback_path: &String) -> Result { + match openidconnect::RedirectUrl::new(sso_callback_path.clone()) { + Err(err) => err!(format!("Invalid sso_callback_path ({sso_callback_path} built using `domain`) URL: {err}")), + Ok(redirect_url) => Ok(redirect_url), + } +} + +fn internal_sso_authorize_extra_params_vec(config: &str) -> Result, Error> { + match parse_param_list(config.to_owned(), '&', '=') { + Err(e) => err!(format!("Invalid SSO_AUTHORIZE_EXTRA_PARAMS: {e}")), + Ok(params) => Ok(params), + } +} + +fn check_master_password_policy(sso_master_password_policy: &Option) -> Result<(), Error> { + let policy = sso_master_password_policy.as_ref().map(|mpp| serde_json::from_str::(mpp)); + if let Some(Err(error)) = policy { + err!(format!("Invalid sso_master_password_policy ({error}), Ensure that it's correctly escaped with ''")) + } + Ok(()) +} + /// Extracts an RFC 6454 web origin from a URL. fn extract_url_origin(url: &str) -> String { match Url::parse(url) { @@ -1126,6 +1179,26 @@ fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option, smtp_explicit_tls "starttls".to_string() } +/// Allow to parse a list of Key/Values (Ex: `key1=value&key2=value2`) +/// - line break are handled as `separator` +fn parse_param_list(config: String, separator: char, kv_separator: char) -> Result, Error> { + config + .lines() + .flat_map(|l| l.split(separator)) + .map(|l| l.trim()) + .filter(|l| !l.is_empty()) + .map(|l| { + let split = l.split(kv_separator).collect::>(); + match &split[..] { + [key, value] => Ok(((*key).to_string(), (*value).to_string())), + _ => { + err!(format!("Failed to parse ({l}). Expected key{kv_separator}value")) + } + } + }) + .collect() +} + impl Config { pub fn load() -> Result { // Loading from env and file @@ -1313,6 +1386,22 @@ impl Config { } } } + + pub fn sso_issuer_url(&self) -> Result { + internal_sso_issuer_url(&self.sso_authority()) + } + + pub fn sso_redirect_url(&self) -> Result { + internal_sso_redirect_url(&self.sso_callback_path()) + } + + pub fn sso_scopes_vec(&self) -> Vec { + self.sso_scopes().split_whitespace().map(str::to_string).collect() + } + + pub fn sso_authorize_extra_params_vec(&self) -> Result, Error> { + internal_sso_authorize_extra_params_vec(&self.sso_authorize_extra_params()) + } } use handlebars::{ @@ -1370,6 +1459,7 @@ where reg!("email/send_single_org_removed_from_org", ".html"); reg!("email/set_password", ".html"); reg!("email/smtp_test", ".html"); + reg!("email/sso_change_email", ".html"); reg!("email/twofactor_email", ".html"); reg!("email/verify_email", ".html"); reg!("email/welcome_must_verify", ".html"); @@ -1426,3 +1516,54 @@ fn to_json<'reg, 'rc>( out.write(&json)?; Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_param_list() { + let config = "key1=value&key2=value2&".to_string(); + let parsed = parse_param_list(config, '&', '='); + + assert_eq!( + parsed.unwrap(), + vec![("key1".to_string(), "value".to_string()), ("key2".to_string(), "value2".to_string())] + ); + } + + #[test] + fn test_parse_param_list_lines() { + let config = r#" + key1=value + key2=value2 + "# + .to_string(); + let parsed = parse_param_list(config, '&', '='); + + assert_eq!( + parsed.unwrap(), + vec![("key1".to_string(), "value".to_string()), ("key2".to_string(), "value2".to_string())] + ); + } + + #[test] + fn test_parse_param_list_mixed() { + let config = r#"key1=value&key2=value2& + &key3=value3&& + &key4=value4 + "# + .to_string(); + let parsed = parse_param_list(config, '&', '='); + + assert_eq!( + parsed.unwrap(), + vec![ + ("key1".to_string(), "value".to_string()), + ("key2".to_string(), "value2".to_string()), + ("key3".to_string(), "value3".to_string()), + ("key4".to_string(), "value4".to_string()), + ] + ); + } +} diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 8feab49d..86775455 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -1,6 +1,7 @@ use chrono::{NaiveDateTime, Utc}; +use data_encoding::{BASE64, BASE64URL}; -use crate::{crypto, CONFIG}; +use crate::crypto; use core::fmt; db_object! { @@ -42,13 +43,16 @@ impl Device { push_uuid: None, push_token: None, - refresh_token: String::new(), + refresh_token: crypto::encode_random_bytes::<64>(BASE64URL), twofactor_remember: None, } } + pub fn roll_refresh_token(&mut self) { + self.refresh_token = crypto::encode_random_bytes::<64>(BASE64URL) + } + pub fn refresh_twofactor_remember(&mut self) -> String { - use data_encoding::BASE64; let twofactor_remember = crypto::encode_random_bytes::<180>(BASE64); self.twofactor_remember = Some(twofactor_remember.clone()); @@ -59,61 +63,6 @@ impl Device { self.twofactor_remember = None; } - pub fn refresh_tokens(&mut self, user: &super::User, scope: Vec) -> (String, i64) { - // If there is no refresh token, we create one - if self.refresh_token.is_empty() { - use data_encoding::BASE64URL; - self.refresh_token = crypto::encode_random_bytes::<64>(BASE64URL); - } - - // Update the expiration of the device and the last update date - let time_now = Utc::now(); - self.updated_at = time_now.naive_utc(); - - // --- - // Disabled these keys to be added to the JWT since they could cause the JWT to get too large - // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients - // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out - // --- - // fn arg: orgs: Vec, - // --- - // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); - // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); - // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); - // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); - - // Create the JWT claims struct, to send to the client - use crate::auth::{encode_jwt, LoginJwtClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER}; - let claims = LoginJwtClaims { - nbf: time_now.timestamp(), - exp: (time_now + *DEFAULT_VALIDITY).timestamp(), - iss: JWT_LOGIN_ISSUER.to_string(), - sub: user.uuid.clone(), - - premium: true, - name: user.name.clone(), - email: user.email.clone(), - email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), - - // --- - // Disabled these keys to be added to the JWT since they could cause the JWT to get too large - // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients - // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out - // See: https://github.com/dani-garcia/vaultwarden/issues/4156 - // --- - // orgowner, - // orgadmin, - // orguser, - // orgmanager, - sstamp: user.security_stamp.clone(), - device: self.uuid.clone(), - scope, - amr: vec!["Application".into()], - }; - - (encode_jwt(&claims), DEFAULT_VALIDITY.num_seconds()) - } - pub fn is_push_device(&self) -> bool { matches!(DeviceType::from_i32(self.atype), DeviceType::Android | DeviceType::Ios) } diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index 72afcd75..83e40763 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -34,4 +34,4 @@ pub use self::sso_nonce::SsoNonce; pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::two_factor_duo_context::TwoFactorDuoContext; pub use self::two_factor_incomplete::TwoFactorIncomplete; -pub use self::user::{Invitation, User, UserKdfType, UserStampException}; +pub use self::user::{Invitation, SsoUser, User, UserKdfType, UserStampException}; diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index 935e4bc3..efba120f 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -27,7 +27,7 @@ pub enum OrgPolicyType { MasterPassword = 1, PasswordGenerator = 2, SingleOrg = 3, - RequireSso = 4, + // RequireSso = 4, // Not supported PersonalOwnership = 5, DisableSend = 6, SendOptions = 7, @@ -77,12 +77,11 @@ impl OrgPolicy { } pub fn to_json(&self) -> Value { - let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null); json!({ "id": self.uuid, "organizationId": self.org_uuid, "type": self.atype, - "data": data_json, + "data": serde_json::from_str(&self.data).unwrap_or(Value::Null), "enabled": self.enabled, "object": "policy", }) diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index ff8ac58a..32e6384a 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -29,6 +29,7 @@ db_object! { pub uuid: String, pub user_uuid: String, pub org_uuid: String, + pub invited_by_email: Option, pub access_all: bool, pub akey: String, @@ -170,9 +171,9 @@ impl Organization { "useGroups": CONFIG.org_groups_enabled(), "useTotp": true, "usePolicies": true, - // "UseScim": false, // Not supported (Not AGPLv3 Licensed) - "useSso": CONFIG.sso_enabled(), - // "UseKeyConnector": false, // Not supported + // "useScim": false, // Not supported (Not AGPLv3 Licensed) + "useSso": false, // Not supported + // "useKeyConnector": false, // Not supported "selfHost": true, "useApi": true, "hasPublicAndPrivateKeys": self.private_key.is_some() && self.public_key.is_some(), @@ -200,12 +201,13 @@ impl Organization { static ACTIVATE_REVOKE_DIFF: i32 = 128; impl UserOrganization { - pub fn new(user_uuid: String, org_uuid: String) -> Self { + pub fn new(user_uuid: String, org_uuid: String, invited_by_email: Option) -> Self { Self { uuid: crate::util::get_uuid(), user_uuid, org_uuid, + invited_by_email, access_all: false, akey: String::new(), @@ -396,7 +398,7 @@ impl UserOrganization { "resetPasswordEnrolled": self.reset_password_key.is_some(), "useResetPassword": CONFIG.mail_enabled(), "ssoBound": false, // Not supported - "useSso": CONFIG.sso_enabled(), + "useSso": false, // Not supported "useKeyConnector": false, "useSecretsManager": false, "usePasswordManager": true, @@ -724,6 +726,17 @@ impl UserOrganization { }} } + pub async fn confirm_user_invitations(user_uuid: &str, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: { + diesel::update(users_organizations::table) + .filter(users_organizations::user_uuid.eq(user_uuid)) + .filter(users_organizations::status.eq(UserOrgStatus::Invited as i32)) + .set(users_organizations::status.eq(UserOrgStatus::Accepted as i32)) + .execute(conn) + .map_res("Error confirming invitations") + }} + } + pub async fn find_any_state_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec { db_run! { conn: { users_organizations::table diff --git a/src/db/models/sso_nonce.rs b/src/db/models/sso_nonce.rs index 0a9533e0..881f075b 100644 --- a/src/db/models/sso_nonce.rs +++ b/src/db/models/sso_nonce.rs @@ -1,21 +1,34 @@ +use chrono::{NaiveDateTime, Utc}; + use crate::api::EmptyResult; -use crate::db::DbConn; +use crate::db::{DbConn, DbPool}; use crate::error::MapResult; +use crate::sso::NONCE_EXPIRATION; db_object! { #[derive(Identifiable, Queryable, Insertable)] #[diesel(table_name = sso_nonce)] - #[diesel(primary_key(nonce))] + #[diesel(primary_key(state))] pub struct SsoNonce { + pub state: String, pub nonce: String, + pub verifier: Option, + pub redirect_uri: String, + pub created_at: NaiveDateTime, } } /// Local methods impl SsoNonce { - pub fn new(nonce: String) -> Self { - Self { + pub fn new(state: String, nonce: String, verifier: Option, redirect_uri: String) -> Self { + let now = Utc::now().naive_utc(); + + SsoNonce { + state, nonce, + verifier, + redirect_uri, + created_at: now, } } } @@ -28,7 +41,7 @@ impl SsoNonce { diesel::replace_into(sso_nonce::table) .values(SsoNonceDb::to_db(self)) .execute(conn) - .map_res("Error saving SSO device") + .map_res("Error saving SSO nonce") } postgresql { let value = SsoNonceDb::to_db(self); @@ -40,21 +53,37 @@ impl SsoNonce { } } - pub async fn delete(self, conn: &mut DbConn) -> EmptyResult { + pub async fn delete(state: &str, conn: &mut DbConn) -> EmptyResult { db_run! { conn: { - diesel::delete(sso_nonce::table.filter(sso_nonce::nonce.eq(self.nonce))) + diesel::delete(sso_nonce::table.filter(sso_nonce::state.eq(state))) .execute(conn) .map_res("Error deleting SSO nonce") }} } - pub async fn find(nonce: &str, conn: &mut DbConn) -> Option { + pub async fn find(state: &str, conn: &DbConn) -> Option { + let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; db_run! { conn: { sso_nonce::table - .filter(sso_nonce::nonce.eq(nonce)) + .filter(sso_nonce::state.eq(state)) + .filter(sso_nonce::created_at.ge(oldest)) .first::(conn) .ok() .from_db() }} } + + pub async fn delete_expired(pool: DbPool) -> EmptyResult { + debug!("Purging expired sso_nonce"); + if let Ok(conn) = pool.get().await { + let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; + db_run! { conn: { + diesel::delete(sso_nonce::table.filter(sso_nonce::created_at.lt(oldest))) + .execute(conn) + .map_res("Error deleting expired SSO nonce") + }} + } else { + err!("Failed to get DB connection while purging expired sso_nonce") + } + } } diff --git a/src/db/models/user.rs b/src/db/models/user.rs index 94f42c84..c553fd0e 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -6,7 +6,7 @@ use crate::crypto; use crate::CONFIG; db_object! { - #[derive(Identifiable, Queryable, Insertable, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset, Selectable)] #[diesel(table_name = users)] #[diesel(treat_none_as_null = true)] #[diesel(primary_key(uuid))] @@ -61,6 +61,14 @@ db_object! { pub struct Invitation { pub email: String, } + + #[derive(Identifiable, Queryable, Insertable, Selectable)] + #[diesel(table_name = sso_users)] + #[diesel(primary_key(user_uuid))] + pub struct SsoUser { + pub user_uuid: String, + pub identifier: String, + } } pub enum UserKdfType { @@ -86,7 +94,7 @@ impl User { pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32; pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000; - pub fn new(email: String) -> Self { + pub fn new(email: String, name: Option) -> Self { let now = Utc::now().naive_utc(); let email = email.to_lowercase(); @@ -98,7 +106,7 @@ impl User { verified_at: None, last_verifying_at: None, login_verify_count: 0, - name: email.clone(), + name: name.unwrap_or(email.clone()), email, akey: String::new(), email_new: None, @@ -458,3 +466,51 @@ impl Invitation { } } } + +impl SsoUser { + pub async fn save(&self, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: + sqlite, mysql { + diesel::replace_into(sso_users::table) + .values(SsoUserDb::to_db(self)) + .execute(conn) + .map_res("Error saving SSO user") + } + postgresql { + let value = SsoUserDb::to_db(self); + diesel::insert_into(sso_users::table) + .values(&value) + .execute(conn) + .map_res("Error saving SSO user") + } + } + } + + // Written as an union to make the query more lisible than using an `or_filter`. + // But `first()` does not appear to work with `union()` so we use `load()`. + pub async fn find_by_identifier_or_email( + identifier: &str, + mail: &str, + conn: &DbConn, + ) -> Option<(User, Option)> { + let lower_mail = mail.to_lowercase(); + + db_run! {conn: { + users::table + .inner_join(sso_users::table) + .select(<(UserDb, Option)>::as_select()) + .filter(sso_users::identifier.eq(identifier)) + .union( + users::table + .left_join(sso_users::table) + .select(<(UserDb, Option)>::as_select()) + .filter(users::email.eq(lower_mail)) + ) + .load(conn) + .expect("Error searching user by SSO identifier and email") + .into_iter() + .next() + .map(|(user, sso_user)| { (user.from_db(), sso_user.from_db()) }) + }} + } +} diff --git a/src/db/schemas/mysql/schema.rs b/src/db/schemas/mysql/schema.rs index 70cc5960..89c4a4d1 100644 --- a/src/db/schemas/mysql/schema.rs +++ b/src/db/schemas/mysql/schema.rs @@ -234,6 +234,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -254,8 +255,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -352,6 +364,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -365,6 +378,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/db/schemas/postgresql/schema.rs b/src/db/schemas/postgresql/schema.rs index 1761858d..517fe0c2 100644 --- a/src/db/schemas/postgresql/schema.rs +++ b/src/db/schemas/postgresql/schema.rs @@ -234,6 +234,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -254,8 +255,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -352,6 +364,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -365,6 +378,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/db/schemas/sqlite/schema.rs b/src/db/schemas/sqlite/schema.rs index 1761858d..517fe0c2 100644 --- a/src/db/schemas/sqlite/schema.rs +++ b/src/db/schemas/sqlite/schema.rs @@ -234,6 +234,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -254,8 +255,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -352,6 +364,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -365,6 +378,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/error.rs b/src/error.rs index 1061a08d..bf9f2cf4 100644 --- a/src/error.rs +++ b/src/error.rs @@ -147,6 +147,10 @@ impl Error { pub fn get_event(&self) -> &Option { &self.event } + + pub fn message(&self) -> &str { + &self.message + } } pub trait MapResult { diff --git a/src/mail.rs b/src/mail.rs index 3850ae1f..97face07 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -281,7 +281,11 @@ pub async fn send_invite( .append_pair("organizationId", org_id.as_deref().unwrap_or("_")) .append_pair("organizationUserId", org_user_id.as_deref().unwrap_or("_")) .append_pair("token", &invite_token); - if user.private_key.is_some() { + + if CONFIG.sso_enabled() && CONFIG.sso_only() { + query_params.append_pair("orgUserHasExistingUser", "false"); + query_params.append_pair("orgSsoIdentifier", org_name); + } else if user.private_key.is_some() { query_params.append_pair("orgUserHasExistingUser", "true"); } } @@ -552,6 +556,18 @@ pub async fn send_change_email(address: &str, token: &str) -> EmptyResult { send_email(address, &subject, body_html, body_text).await } +pub async fn send_sso_change_email(address: &str) -> EmptyResult { + let (subject, body_html, body_text) = get_text( + "email/sso_change_email", + json!({ + "url": format!("{}/#/settings/account", CONFIG.domain()), + "img_src": CONFIG._smtp_img_src(), + }), + )?; + + send_email(address, &subject, body_html, body_text).await +} + pub async fn send_set_password(address: &str, user_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/set_password", diff --git a/src/main.rs b/src/main.rs index 7e180e2e..ca00c499 100644 --- a/src/main.rs +++ b/src/main.rs @@ -54,6 +54,7 @@ mod db; mod http_client; mod mail; mod ratelimit; +mod sso; mod util; use crate::api::core::two_factor::duo_oidc::purge_duo_contexts; @@ -702,6 +703,13 @@ fn schedule_jobs(pool: db::DbPool) { })); } + // Purge sso nonce from incomplete flow (default to daily at 00h20). + if !CONFIG.purge_incomplete_sso_nonce().is_empty() { + sched.add(Job::new(CONFIG.purge_incomplete_sso_nonce().parse().unwrap(), || { + runtime.spawn(db::models::SsoNonce::delete_expired(pool.clone())); + })); + } + // Periodically check for jobs to run. We probably won't need any // jobs that run more often than once a minute, so a default poll // interval of 30 seconds should be sufficient. Users who want to diff --git a/src/sso.rs b/src/sso.rs new file mode 100644 index 00000000..ad57fbbc --- /dev/null +++ b/src/sso.rs @@ -0,0 +1,544 @@ +use chrono::Utc; +use regex::Regex; +use std::borrow::Cow; +use std::time::Duration; +use url::Url; + +use mini_moka::sync::Cache; +use once_cell::sync::Lazy; +use openidconnect::core::{ + CoreClient, CoreIdTokenVerifier, CoreProviderMetadata, CoreResponseType, CoreUserInfoClaims, +}; +use openidconnect::reqwest::async_http_client; +use openidconnect::{ + AccessToken, AuthDisplay, AuthPrompt, AuthenticationFlow, AuthorizationCode, AuthorizationRequest, ClientId, + ClientSecret, CsrfToken, Nonce, OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RefreshToken, + ResponseType, Scope, +}; + +use crate::{ + api::ApiResult, + auth, + auth::{AuthMethod, AuthMethodScope, AuthTokens, TokenWrapper, BW_EXPIRATION, DEFAULT_REFRESH_VALIDITY}, + db::{ + models::{Device, SsoNonce, User}, + DbConn, + }, + CONFIG, +}; + +static AC_CACHE: Lazy> = + Lazy::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build()); + +static CLIENT_CACHE_KEY: Lazy = Lazy::new(|| "sso-client".to_string()); +static CLIENT_CACHE: Lazy> = Lazy::new(|| { + Cache::builder().max_capacity(1).time_to_live(Duration::from_secs(CONFIG.sso_client_cache_expiration())).build() +}); + +static SSO_JWT_ISSUER: Lazy = Lazy::new(|| format!("{}|sso", CONFIG.domain_origin())); + +pub static NONCE_EXPIRATION: Lazy = Lazy::new(|| chrono::TimeDelta::try_minutes(10).unwrap()); + +trait AuthorizationRequestExt<'a> { + fn add_extra_params>, V: Into>>(self, params: Vec<(N, V)>) -> Self; +} + +impl<'a, AD: AuthDisplay, P: AuthPrompt, RT: ResponseType> AuthorizationRequestExt<'a> + for AuthorizationRequest<'a, AD, P, RT> +{ + fn add_extra_params>, V: Into>>(mut self, params: Vec<(N, V)>) -> Self { + for (key, value) in params { + self = self.add_extra_param(key, value); + } + self + } +} + +#[derive(Debug, Serialize, Deserialize)] +struct SsoTokenJwtClaims { + // Not before + pub nbf: i64, + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + // Subject + pub sub: String, +} + +pub fn encode_ssotoken_claims() -> String { + let time_now = Utc::now(); + let claims = SsoTokenJwtClaims { + nbf: time_now.timestamp(), + exp: (time_now + chrono::TimeDelta::try_minutes(2).unwrap()).timestamp(), + iss: SSO_JWT_ISSUER.to_string(), + sub: "vaultwarden".to_string(), + }; + + auth::encode_jwt(&claims) +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum OIDCCodeWrapper { + Ok { + code: String, + state: String, + }, + Error { + state: String, + error: String, + error_description: Option, + }, +} + +#[derive(Debug, Serialize, Deserialize)] +struct OIDCCodeClaims { + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + + pub code: OIDCCodeWrapper, +} + +pub fn encode_code_claims(code: OIDCCodeWrapper) -> String { + let time_now = Utc::now(); + let claims = OIDCCodeClaims { + exp: (time_now + chrono::TimeDelta::try_minutes(5).unwrap()).timestamp(), + iss: SSO_JWT_ISSUER.to_string(), + code, + }; + + auth::encode_jwt(&claims) +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct BasicTokenClaims { + iat: Option, + nbf: Option, + exp: i64, +} + +impl BasicTokenClaims { + fn nbf(&self) -> i64 { + self.nbf.or(self.iat).unwrap_or_else(|| Utc::now().timestamp()) + } +} + +fn decode_token_claims(token_name: &str, token: &str) -> ApiResult { + let mut validation = jsonwebtoken::Validation::default(); + validation.set_issuer(&[CONFIG.sso_authority()]); + validation.insecure_disable_signature_validation(); + validation.validate_aud = false; + + match jsonwebtoken::decode(token, &jsonwebtoken::DecodingKey::from_secret(&[]), &validation) { + Ok(btc) => Ok(btc.claims), + Err(err) => err_silent!(format!("Failed to decode basic token claims from {token_name}: {err}")), + } +} + +#[rocket::async_trait] +trait CoreClientExt { + async fn _get_client() -> ApiResult; + async fn cached() -> ApiResult; + + async fn user_info_async(&self, access_token: AccessToken) -> ApiResult; + + fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_>; +} + +#[rocket::async_trait] +impl CoreClientExt for CoreClient { + // Call the OpenId discovery endpoint to retrieve configuration + async fn _get_client() -> ApiResult { + let client_id = ClientId::new(CONFIG.sso_client_id()); + let client_secret = ClientSecret::new(CONFIG.sso_client_secret()); + + let issuer_url = CONFIG.sso_issuer_url()?; + + let provider_metadata = match CoreProviderMetadata::discover_async(issuer_url, async_http_client).await { + Err(err) => err!(format!("Failed to discover OpenID provider: {err}")), + Ok(metadata) => metadata, + }; + + Ok(CoreClient::from_provider_metadata(provider_metadata, client_id, Some(client_secret)) + .set_redirect_uri(CONFIG.sso_redirect_url()?)) + } + + // Simple cache to prevent recalling the discovery endpoint each time + async fn cached() -> ApiResult { + if CONFIG.sso_client_cache_expiration() > 0 { + match CLIENT_CACHE.get(&*CLIENT_CACHE_KEY) { + Some(client) => Ok(client), + None => Self::_get_client().await.inspect(|client| { + debug!("Inserting new client in cache"); + CLIENT_CACHE.insert(CLIENT_CACHE_KEY.clone(), client.clone()); + }), + } + } else { + Self::_get_client().await + } + } + + async fn user_info_async(&self, access_token: AccessToken) -> ApiResult { + let endpoint = match self.user_info(access_token, None) { + Err(err) => err!(format!("No user_info endpoint: {err}")), + Ok(endpoint) => endpoint, + }; + + match endpoint.request_async(async_http_client).await { + Err(err) => err!(format!("Request to user_info endpoint failed: {err}")), + Ok(user_info) => Ok(user_info), + } + } + + fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_> { + let mut verifier = self.id_token_verifier(); + if let Some(regex_str) = CONFIG.sso_audience_trusted() { + match Regex::new(®ex_str) { + Ok(regex) => { + verifier = verifier.set_other_audience_verifier_fn(move |aud| regex.is_match(aud)); + } + Err(err) => { + error!("Failed to parse SSO_AUDIENCE_TRUSTED={regex_str} regex: {err}"); + } + } + } + verifier + } +} + +// The `nonce` allow to protect against replay attacks +// redirect_uri from: https://github.com/bitwarden/server/blob/main/src/Identity/IdentityServer/ApiClient.cs +pub async fn authorize_url(state: String, client_id: &str, raw_redirect_uri: &str, mut conn: DbConn) -> ApiResult { + let scopes = CONFIG.sso_scopes_vec().into_iter().map(Scope::new); + + let redirect_uri = match client_id { + "web" | "browser" => format!("{}/sso-connector.html", CONFIG.domain()), + "desktop" | "mobile" => "bitwarden://sso-callback".to_string(), + "cli" => { + let port_regex = Regex::new(r"^http://localhost:([0-9]{4})$").unwrap(); + match port_regex.captures(raw_redirect_uri).and_then(|captures| captures.get(1).map(|c| c.as_str())) { + Some(port) => format!("http://localhost:{}", port), + None => err!("Failed to extract port number"), + } + } + _ => err!(format!("Unsupported client {client_id}")), + }; + + let client = CoreClient::cached().await?; + let mut auth_req = client + .authorize_url( + AuthenticationFlow::::AuthorizationCode, + || CsrfToken::new(state), + Nonce::new_random, + ) + .add_scopes(scopes) + .add_extra_params(CONFIG.sso_authorize_extra_params_vec()?); + + let verifier = if CONFIG.sso_pkce() { + let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256(); + auth_req = auth_req.set_pkce_challenge(pkce_challenge); + Some(pkce_verifier.secret().to_string()) + } else { + None + }; + + let (auth_url, csrf_state, nonce) = auth_req.url(); + + let sso_nonce = SsoNonce::new(csrf_state.secret().to_string(), nonce.secret().to_string(), verifier, redirect_uri); + sso_nonce.save(&mut conn).await?; + + Ok(auth_url) +} + +#[derive(Clone, Debug)] +pub struct AuthenticatedUser { + pub refresh_token: Option, + pub access_token: String, + pub expires_in: Option, + pub identifier: String, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +#[derive(Clone, Debug)] +pub struct UserInformation { + pub state: String, + pub identifier: String, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +async fn decode_code_claims(code: &str, conn: &mut DbConn) -> ApiResult<(String, String)> { + match auth::decode_jwt::(code, SSO_JWT_ISSUER.to_string()) { + Ok(code_claims) => match code_claims.code { + OIDCCodeWrapper::Ok { + code, + state, + } => Ok((code, state)), + OIDCCodeWrapper::Error { + state, + error, + error_description, + } => { + if let Err(err) = SsoNonce::delete(&state, conn).await { + error!("Failed to delete database sso_nonce using {state}: {err}") + } + err!(format!( + "SSO authorization failed: {error}, {}", + error_description.as_ref().unwrap_or(&String::new()) + )) + } + }, + Err(err) => err!(format!("Failed to decode code wrapper: {err}")), + } +} + +// During the 2FA flow we will +// - retrieve the user information and then only discover he needs 2FA. +// - second time we will rely on the `AC_CACHE` since the `code` has already been exchanged. +// The `nonce` will ensure that the user is authorized only once. +// We return only the `UserInformation` to force calling `redeem` to obtain the `refresh_token`. +pub async fn exchange_code(wrapped_code: &str, conn: &mut DbConn) -> ApiResult { + let (code, state) = decode_code_claims(wrapped_code, conn).await?; + + if let Some(authenticated_user) = AC_CACHE.get(&state) { + return Ok(UserInformation { + state, + identifier: authenticated_user.identifier, + email: authenticated_user.email, + email_verified: authenticated_user.email_verified, + user_name: authenticated_user.user_name, + }); + } + + let oidc_code = AuthorizationCode::new(code.clone()); + let client = CoreClient::cached().await?; + + let nonce = match SsoNonce::find(&state, conn).await { + None => err!(format!("Invalid state cannot retrieve nonce")), + Some(nonce) => nonce, + }; + + let mut exchange = client.exchange_code(oidc_code); + + if CONFIG.sso_pkce() { + match nonce.verifier { + None => err!(format!("Missing verifier in the DB nonce table")), + Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)), + } + } + + match exchange.request_async(async_http_client).await { + Ok(token_response) => { + let user_info = client.user_info_async(token_response.access_token().to_owned()).await?; + let oidc_nonce = Nonce::new(nonce.nonce.clone()); + + let id_token = match token_response.extra_fields().id_token() { + None => err!("Token response did not contain an id_token"), + Some(token) => token, + }; + + if CONFIG.sso_debug_tokens() { + debug!("Id token: {}", id_token.to_string()); + debug!("Access token: {}", token_response.access_token().secret().to_string()); + debug!("Refresh token: {:?}", token_response.refresh_token().map(|t| t.secret().to_string())); + debug!("Expiration time: {:?}", token_response.expires_in()); + } + + let id_claims = match id_token.claims(&client.vw_id_token_verifier(), &oidc_nonce) { + Ok(claims) => claims, + Err(err) => { + if CONFIG.sso_client_cache_expiration() > 0 { + CLIENT_CACHE.invalidate(&*CLIENT_CACHE_KEY); + } + err!(format!("Could not read id_token claims, {err}")); + } + }; + + let email = match id_claims.email() { + Some(email) => email.to_string(), + None => match user_info.email() { + None => err!("Neither id token nor userinfo contained an email"), + Some(email) => email.to_owned().to_string(), + }, + } + .to_lowercase(); + + let user_name = user_info.preferred_username().map(|un| un.to_string()); + + let refresh_token = token_response.refresh_token().map(|t| t.secret().to_string()); + if refresh_token.is_none() && CONFIG.sso_scopes_vec().contains(&"offline_access".to_string()) { + error!("Scope offline_access is present but response contain no refresh_token"); + } + + let identifier = format!("{}/{}", **id_claims.issuer(), **id_claims.subject()); + + let authenticated_user = AuthenticatedUser { + refresh_token, + access_token: token_response.access_token().secret().to_string(), + expires_in: token_response.expires_in(), + identifier: identifier.clone(), + email: email.clone(), + email_verified: id_claims.email_verified(), + user_name: user_name.clone(), + }; + + AC_CACHE.insert(state.clone(), authenticated_user.clone()); + + Ok(UserInformation { + state, + identifier, + email, + email_verified: id_claims.email_verified(), + user_name, + }) + } + Err(err) => err!(format!("Failed to contact token endpoint: {err}")), + } +} + +// User has passed 2FA flow we can delete `nonce` and clear the cache. +pub async fn redeem(state: &String, conn: &mut DbConn) -> ApiResult { + if let Err(err) = SsoNonce::delete(state, conn).await { + error!("Failed to delete database sso_nonce using {state}: {err}") + } + + if let Some(au) = AC_CACHE.get(state) { + AC_CACHE.invalidate(state); + Ok(au) + } else { + err!("Failed to retrieve user info from sso cache") + } +} + +// We always return a refresh_token (with no refresh_token some secrets are not displayed in the web front). +// If there is no SSO refresh_token, we keep the access_token to be able to call user_info to check for validity +pub fn create_auth_tokens( + device: &Device, + user: &User, + refresh_token: Option, + access_token: &str, + expires_in: Option, +) -> ApiResult { + if !CONFIG.sso_auth_only_not_session() { + let now = Utc::now(); + + let (ap_nbf, ap_exp) = match (decode_token_claims("access_token", access_token), expires_in) { + (Ok(ap), _) => (ap.nbf(), ap.exp), + (Err(_), Some(exp)) => (now.timestamp(), (now + exp).timestamp()), + _ => err!("Non jwt access_token and empty expires_in"), + }; + + let access_claims = auth::LoginJwtClaims::new(device, user, ap_nbf, ap_exp, AuthMethod::Sso.scope_vec(), now); + + _create_auth_tokens(device, refresh_token, access_claims, access_token) + } else { + Ok(AuthTokens::new(device, user, AuthMethod::Sso)) + } +} + +fn _create_auth_tokens( + device: &Device, + refresh_token: Option, + access_claims: auth::LoginJwtClaims, + access_token: &str, +) -> ApiResult { + let (nbf, exp, token) = if let Some(rt) = refresh_token.as_ref() { + match decode_token_claims("refresh_token", rt) { + Err(_) => { + let time_now = Utc::now(); + let exp = (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp(); + debug!("Non jwt refresh_token (expiration set to {})", exp); + (time_now.timestamp(), exp, TokenWrapper::Refresh(rt.to_string())) + } + Ok(refresh_payload) => { + debug!("Refresh_payload: {:?}", refresh_payload); + (refresh_payload.nbf(), refresh_payload.exp, TokenWrapper::Refresh(rt.to_string())) + } + } + } else { + debug!("No refresh_token present"); + (access_claims.nbf, access_claims.exp, TokenWrapper::Access(access_token.to_string())) + }; + + let refresh_claims = auth::RefreshJwtClaims { + nbf, + exp, + iss: auth::JWT_LOGIN_ISSUER.to_string(), + sub: AuthMethod::Sso, + device_token: device.refresh_token.clone(), + token: Some(token), + }; + + Ok(AuthTokens { + refresh_claims, + access_claims, + }) +} + +// This endpoint is called in two case +// - the session is close to expiration we will try to extend it +// - the user is going to make an action and we check that the session is still valid +pub async fn exchange_refresh_token( + device: &Device, + user: &User, + refresh_claims: &auth::RefreshJwtClaims, +) -> ApiResult { + match &refresh_claims.token { + Some(TokenWrapper::Refresh(refresh_token)) => { + let rt = RefreshToken::new(refresh_token.to_string()); + + let client = CoreClient::cached().await?; + + let token_response = match client.exchange_refresh_token(&rt).request_async(async_http_client).await { + Err(err) => err!(format!("Request to exchange_refresh_token endpoint failed: {:?}", err)), + Ok(token_response) => token_response, + }; + + // Use new refresh_token if returned + let rolled_refresh_token = token_response + .refresh_token() + .map(|token| token.secret().to_string()) + .unwrap_or(refresh_token.to_string()); + + create_auth_tokens( + device, + user, + Some(rolled_refresh_token), + token_response.access_token().secret(), + token_response.expires_in(), + ) + } + Some(TokenWrapper::Access(access_token)) => { + let now = Utc::now(); + let exp_limit = (now + *BW_EXPIRATION).timestamp(); + + if refresh_claims.exp < exp_limit { + err_silent!("Access token is close to expiration but we have no refresh token") + } + + let client = CoreClient::cached().await?; + match client.user_info_async(AccessToken::new(access_token.to_string())).await { + Err(err) => { + err_silent!(format!("Failed to retrieve user info, token has probably been invalidated: {err}")) + } + Ok(_) => { + let access_claims = auth::LoginJwtClaims::new( + device, + user, + now.timestamp(), + refresh_claims.exp, + AuthMethod::Sso.scope_vec(), + now, + ); + _create_auth_tokens(device, None, access_claims, access_token) + } + } + } + None => err!("No token present while in SSO"), + } +} diff --git a/src/static/templates/email/sso_change_email.hbs b/src/static/templates/email/sso_change_email.hbs new file mode 100644 index 00000000..5a512280 --- /dev/null +++ b/src/static/templates/email/sso_change_email.hbs @@ -0,0 +1,4 @@ +Your Email Changed + +Your email was changed in your SSO Provider. Please update your email in Account Settings ({{url}}). +{{> email/email_footer_text }} diff --git a/src/static/templates/email/sso_change_email.html.hbs b/src/static/templates/email/sso_change_email.html.hbs new file mode 100644 index 00000000..74cd445c --- /dev/null +++ b/src/static/templates/email/sso_change_email.html.hbs @@ -0,0 +1,11 @@ +Your Email Changed + +{{> email/email_header }} + + + + +
+ Your email was changed in your SSO Provider. Please update your email in Account Settings. +
+{{> email/email_footer }} diff --git a/src/util.rs b/src/util.rs index 1b77040b..400e86a9 100644 --- a/src/util.rs +++ b/src/util.rs @@ -6,7 +6,7 @@ use std::{collections::HashMap, io::Cursor, ops::Deref, path::Path}; use num_traits::ToPrimitive; use rocket::{ fairing::{Fairing, Info, Kind}, - http::{ContentType, Cookie, CookieJar, Header, HeaderMap, Method, SameSite, Status}, + http::{ContentType, Header, HeaderMap, Method, Status}, request::FromParam, response::{self, Responder}, Data, Orbit, Request, Response, Rocket, @@ -129,10 +129,12 @@ impl Cors { // If a match exists, return it. Otherwise, return None. fn get_allowed_origin(headers: &HeaderMap<'_>) -> Option { let origin = Cors::get_header(headers, "Origin"); - let domain_origin = CONFIG.domain_origin(); - let sso_origin = CONFIG.sso_authority(); let safari_extension_origin = "file://"; - if origin == domain_origin || origin == safari_extension_origin || origin == sso_origin { + + if origin == CONFIG.domain_origin() + || origin == safari_extension_origin + || (CONFIG.sso_enabled() && origin == CONFIG.sso_authority()) + { Some(origin) } else { None @@ -257,33 +259,6 @@ impl<'r> FromParam<'r> for SafeString { } } -pub struct CustomRedirect { - pub url: String, - pub headers: Vec<(String, String)>, -} - -impl<'r> rocket::response::Responder<'r, 'static> for CustomRedirect { - fn respond_to(self, _: &rocket::request::Request<'_>) -> rocket::response::Result<'static> { - let mut response = Response::build() - .status(rocket::http::Status { - code: 307, - }) - .raw_header("Location", self.url) - .header(ContentType::HTML) - .finalize(); - - // Normal headers - response.set_raw_header("Referrer-Policy", "same-origin"); - response.set_raw_header("X-XSS-Protection", "0"); - - for header in &self.headers { - response.set_raw_header(header.0.clone(), header.1.clone()); - } - - Ok(response) - } -} - // Log all the routes from the main paths list, and the attachments endpoint // Effectively ignores, any static file route, and the alive endpoint const LOGGED_ROUTES: [&str; 7] = ["/api", "/admin", "/identity", "/icons", "/attachments", "/events", "/notifications"]; @@ -903,29 +878,3 @@ mod tests { }); } } - -pub struct CookieManager<'a> { - jar: &'a CookieJar<'a>, -} - -impl<'a> CookieManager<'a> { - pub fn new(jar: &'a CookieJar<'a>) -> Self { - Self { - jar, - } - } - - pub fn set_cookie(&self, name: String, value: String) { - let cookie = Cookie::build((name, value)).same_site(SameSite::Lax); - - self.jar.add(cookie) - } - - pub fn get_cookie(&self, name: String) -> Option { - self.jar.get(&name).map(|c| c.value().to_string()) - } - - pub fn delete_cookie(&self, name: String) { - self.jar.remove(Cookie::from(name)); - } -}