1
0
mirror of https://github.com/matrix-org/matrix-authentication-service.git synced 2025-08-06 06:02:40 +03:00

Enable clippy lints on a workspace level

This enables a lot more lints than before in some crates, so this fixed a lot of warnings as well.
This commit is contained in:
Quentin Gliech
2023-12-05 16:45:40 +01:00
parent df3ca5ae66
commit a0f5f3c642
88 changed files with 567 additions and 236 deletions

View File

@@ -12,6 +12,18 @@ package.edition = "2021"
package.homepage = "https://matrix-org.github.io/matrix-authentication-service/" package.homepage = "https://matrix-org.github.io/matrix-authentication-service/"
package.repository = "https://github.com/matrix-org/matrix-authentication-service/" package.repository = "https://github.com/matrix-org/matrix-authentication-service/"
[workspace.lints.rust]
unsafe_code = "forbid"
[workspace.lints.clippy]
all = "deny"
pedantic = "warn"
str_to_string = "deny"
[workspace.lints.rustdoc]
broken_intra_doc_links = "deny"
[workspace.dependencies] [workspace.dependencies]
# High-level error handling # High-level error handling

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
async-trait = "0.1.74" async-trait = "0.1.74"
axum = { version = "0.6.20", features = ["headers"] } axum = { version = "0.6.20", features = ["headers"] }

View File

@@ -84,6 +84,12 @@ impl Credentials {
} }
} }
/// Fetch the client from the database
///
/// # Errors
///
/// Returns an error if the client could not be found or if the underlying
/// repository errored.
pub async fn fetch<E>( pub async fn fetch<E>(
&self, &self,
repo: &mut impl RepositoryAccess<Error = E>, repo: &mut impl RepositoryAccess<Error = E>,
@@ -98,6 +104,11 @@ impl Credentials {
repo.oauth2_client().find_by_client_id(client_id).await repo.oauth2_client().find_by_client_id(client_id).await
} }
/// Verify credentials presented by the client for authentication
///
/// # Errors
///
/// Returns an error if the credentials are invalid.
#[tracing::instrument(skip_all, err)] #[tracing::instrument(skip_all, err)]
pub async fn verify( pub async fn verify(
&self, &self,

View File

@@ -146,6 +146,13 @@ impl CookieJar {
self self
} }
/// Load and deserialize a cookie from the jar
///
/// Returns `None` if the cookie is not present
///
/// # Errors
///
/// Returns an error if the cookie cannot be deserialized
pub fn load<T: DeserializeOwned>(&self, key: &str) -> Result<Option<T>, CookieDecodeError> { pub fn load<T: DeserializeOwned>(&self, key: &str) -> Result<Option<T>, CookieDecodeError> {
let Some(cookie) = self.inner.get(key) else { let Some(cookie) = self.inner.get(key) else {
return Ok(None); return Ok(None);

View File

@@ -80,6 +80,10 @@ impl CsrfToken {
} }
/// Verifies that the value got from an HTML form matches this token /// Verifies that the value got from an HTML form matches this token
///
/// # Errors
///
/// Returns an error if the value in the form does not match this token
pub fn verify_form_value(&self, form_value: &str) -> Result<(), CsrfError> { pub fn verify_form_value(&self, form_value: &str) -> Result<(), CsrfError> {
let form_value = BASE64URL_NOPAD.decode(form_value.as_bytes())?; let form_value = BASE64URL_NOPAD.decode(form_value.as_bytes())?;
if self.token[..] == form_value { if self.token[..] == form_value {
@@ -108,10 +112,20 @@ pub struct ProtectedForm<T> {
} }
pub trait CsrfExt { pub trait CsrfExt {
/// Get the current CSRF token out of the cookie jar, generating a new one
/// if necessary
fn csrf_token<C, R>(self, clock: &C, rng: R) -> (CsrfToken, Self) fn csrf_token<C, R>(self, clock: &C, rng: R) -> (CsrfToken, Self)
where where
R: RngCore, R: RngCore,
C: Clock; C: Clock;
/// Verify that the given CSRF-protected form is valid, returning the inner
/// value
///
/// # Errors
///
/// Returns an error if the CSRF cookie is missing or if the value in the
/// form is invalid
fn verify_form<C, T>(&self, clock: &C, form: ProtectedForm<T>) -> Result<T, CsrfError> fn verify_form<C, T>(&self, clock: &C, form: ProtectedForm<T>) -> Result<T, CsrfError>
where where
C: Clock; C: Clock;

View File

@@ -29,6 +29,12 @@ pub struct HttpClientFactory {
} }
impl HttpClientFactory { impl HttpClientFactory {
/// Constructs a new HTTP client factory
///
/// # Errors
///
/// Returns an error if the client factory failed to initialise, which can
/// happen when it fails to load the system's CA certificates.
pub async fn new() -> Result<Self, ClientInitError> { pub async fn new() -> Result<Self, ClientInitError> {
Ok(Self { Ok(Self {
traced_connector: make_traced_connector().await?, traced_connector: make_traced_connector().await?,
@@ -37,10 +43,6 @@ impl HttpClientFactory {
} }
/// Constructs a new HTTP client /// Constructs a new HTTP client
///
/// # Errors
///
/// Returns an error if the client failed to initialise
pub fn client<B>(&self, category: &'static str) -> ClientService<TracedClient<B>> pub fn client<B>(&self, category: &'static str) -> ClientService<TracedClient<B>>
where where
B: axum::body::HttpBody + Send, B: axum::body::HttpBody + Send,
@@ -54,10 +56,6 @@ impl HttpClientFactory {
} }
/// Constructs a new [`HttpService`], suitable for `mas-oidc-client` /// Constructs a new [`HttpService`], suitable for `mas-oidc-client`
///
/// # Errors
///
/// Returns an error if the client failed to initialise
pub fn http_service(&self, category: &'static str) -> HttpService { pub fn http_service(&self, category: &'static str) -> HttpService {
let client = self.client(category); let client = self.client(category);
let client = ( let client = (

View File

@@ -12,15 +12,8 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![deny(clippy::future_not_send)]
#![deny( #![allow(clippy::module_name_repetitions)]
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
clippy::future_not_send
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions, clippy::missing_errors_doc)]
pub mod client_authorization; pub mod client_authorization;
pub mod cookies; pub mod cookies;

View File

@@ -42,6 +42,11 @@ impl SessionInfo {
} }
/// Load the [`BrowserSession`] from database /// Load the [`BrowserSession`] from database
///
/// # Errors
///
/// Returns an error if the session is not found or if the session is not
/// active anymore
pub async fn load_session<E>( pub async fn load_session<E>(
&self, &self,
repo: &mut impl RepositoryAccess<Error = E>, repo: &mut impl RepositoryAccess<Error = E>,

View File

@@ -84,6 +84,13 @@ pub struct UserAuthorization<F = ()> {
impl<F: Send> UserAuthorization<F> { impl<F: Send> UserAuthorization<F> {
// TODO: take scopes to validate as parameter // TODO: take scopes to validate as parameter
/// Verify a user authorization and return the session and the protected
/// form value
///
/// # Errors
///
/// Returns an error if the token is invalid, if the user session ended or
/// if the form is missing
pub async fn protected_form<E>( pub async fn protected_form<E>(
self, self,
repo: &mut impl RepositoryAccess<Error = E>, repo: &mut impl RepositoryAccess<Error = E>,
@@ -103,6 +110,11 @@ impl<F: Send> UserAuthorization<F> {
} }
// TODO: take scopes to validate as parameter // TODO: take scopes to validate as parameter
/// Verify a user authorization and return the session
///
/// # Errors
///
/// Returns an error if the token is invalid or if the user session ended
pub async fn protected<E>( pub async fn protected<E>(
self, self,
repo: &mut impl RepositoryAccess<Error = E>, repo: &mut impl RepositoryAccess<Error = E>,

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
axum = "0.6.20" axum = "0.6.20"

View File

@@ -12,9 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
use std::{io::IsTerminal, sync::Arc}; use std::{io::IsTerminal, sync::Arc};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
tokio = { version = "1.34.0", features = ["fs", "rt"] } tokio = { version = "1.34.0", features = ["fs", "rt"] }
tracing.workspace = true tracing.workspace = true
@@ -44,3 +47,4 @@ dist = []
[[bin]] [[bin]]
name = "schema" name = "schema"
doc = false

View File

@@ -12,15 +12,10 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![deny(missing_docs, rustdoc::missing_crate_level_docs)]
#![deny(
clippy::all,
missing_docs,
rustdoc::missing_crate_level_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
// derive(JSONSchema) uses &str.to_string()
#![allow(clippy::str_to_string)]
//! Application configuration logic //! Application configuration logic

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
chrono.workspace = true chrono.workspace = true
thiserror.workspace = true thiserror.workspace = true

View File

@@ -40,7 +40,11 @@ impl Device {
#[must_use] #[must_use]
pub fn to_scope_token(&self) -> ScopeToken { pub fn to_scope_token(&self) -> ScopeToken {
// SAFETY: the inner id should only have valid scope characters // SAFETY: the inner id should only have valid scope characters
format!("{DEVICE_SCOPE_PREFIX}{}", self.id).parse().unwrap() let Ok(scope_token) = format!("{DEVICE_SCOPE_PREFIX}{}", self.id).parse() else {
unreachable!()
};
scope_token
} }
/// Get the corresponding [`Device`] from a [`ScopeToken`] /// Get the corresponding [`Device`] from a [`ScopeToken`]

View File

@@ -72,6 +72,11 @@ impl CompatRefreshTokenState {
matches!(self, Self::Consumed { .. }) matches!(self, Self::Consumed { .. })
} }
/// Consume the refresh token, returning a new state.
///
/// # Errors
///
/// Returns an error if the refresh token is already consumed.
pub fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
match self { match self {
Self::Valid => Ok(Self::Consumed { consumed_at }), Self::Valid => Ok(Self::Consumed { consumed_at }),
@@ -99,6 +104,11 @@ impl std::ops::Deref for CompatRefreshToken {
} }
impl CompatRefreshToken { impl CompatRefreshToken {
/// Consume the refresh token and return the consumed token.
///
/// # Errors
///
/// Returns an error if the refresh token is already consumed.
pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.state = self.state.consume(consumed_at)?; self.state = self.state.consume(consumed_at)?;
Ok(self) Ok(self)

View File

@@ -36,7 +36,7 @@ pub enum CompatSsoLoginState {
} }
impl CompatSsoLoginState { impl CompatSsoLoginState {
/// Returns `true` if the compat sso login state is [`Pending`]. /// Returns `true` if the compat SSO login state is [`Pending`].
/// ///
/// [`Pending`]: CompatSsoLoginState::Pending /// [`Pending`]: CompatSsoLoginState::Pending
#[must_use] #[must_use]
@@ -44,7 +44,7 @@ impl CompatSsoLoginState {
matches!(self, Self::Pending) matches!(self, Self::Pending)
} }
/// Returns `true` if the compat sso login state is [`Fulfilled`]. /// Returns `true` if the compat SSO login state is [`Fulfilled`].
/// ///
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled /// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
#[must_use] #[must_use]
@@ -52,7 +52,7 @@ impl CompatSsoLoginState {
matches!(self, Self::Fulfilled { .. }) matches!(self, Self::Fulfilled { .. })
} }
/// Returns `true` if the compat sso login state is [`Exchanged`]. /// Returns `true` if the compat SSO login state is [`Exchanged`].
/// ///
/// [`Exchanged`]: CompatSsoLoginState::Exchanged /// [`Exchanged`]: CompatSsoLoginState::Exchanged
#[must_use] #[must_use]
@@ -60,6 +60,11 @@ impl CompatSsoLoginState {
matches!(self, Self::Exchanged { .. }) matches!(self, Self::Exchanged { .. })
} }
/// Get the time at which the login was fulfilled.
///
/// Returns `None` if the compat SSO login state is [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
#[must_use] #[must_use]
pub fn fulfilled_at(&self) -> Option<DateTime<Utc>> { pub fn fulfilled_at(&self) -> Option<DateTime<Utc>> {
match self { match self {
@@ -70,6 +75,11 @@ impl CompatSsoLoginState {
} }
} }
/// Get the time at which the login was exchanged.
///
/// Returns `None` if the compat SSO login state is not [`Exchanged`].
///
/// [`Exchanged`]: CompatSsoLoginState::Exchanged
#[must_use] #[must_use]
pub fn exchanged_at(&self) -> Option<DateTime<Utc>> { pub fn exchanged_at(&self) -> Option<DateTime<Utc>> {
match self { match self {
@@ -78,6 +88,11 @@ impl CompatSsoLoginState {
} }
} }
/// Get the session ID associated with the login.
///
/// Returns `None` if the compat SSO login state is [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
#[must_use] #[must_use]
pub fn session_id(&self) -> Option<Ulid> { pub fn session_id(&self) -> Option<Ulid> {
match self { match self {
@@ -88,6 +103,14 @@ impl CompatSsoLoginState {
} }
} }
/// Transition the compat SSO login state from [`Pending`] to [`Fulfilled`].
///
/// # Errors
///
/// Returns an error if the compat SSO login state is not [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
pub fn fulfill( pub fn fulfill(
self, self,
fulfilled_at: DateTime<Utc>, fulfilled_at: DateTime<Utc>,
@@ -102,6 +125,15 @@ impl CompatSsoLoginState {
} }
} }
/// Transition the compat SSO login state from [`Fulfilled`] to
/// [`Exchanged`].
///
/// # Errors
///
/// Returns an error if the compat SSO login state is not [`Fulfilled`].
///
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
/// [`Exchanged`]: CompatSsoLoginState::Exchanged
pub fn exchange(self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn exchange(self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
match self { match self {
Self::Fulfilled { Self::Fulfilled {
@@ -135,6 +167,15 @@ impl std::ops::Deref for CompatSsoLogin {
} }
impl CompatSsoLogin { impl CompatSsoLogin {
/// Transition the compat SSO login from a [`Pending`] state to
/// [`Fulfilled`].
///
/// # Errors
///
/// Returns an error if the compat SSO login state is not [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
pub fn fulfill( pub fn fulfill(
mut self, mut self,
fulfilled_at: DateTime<Utc>, fulfilled_at: DateTime<Utc>,
@@ -144,6 +185,15 @@ impl CompatSsoLogin {
Ok(self) Ok(self)
} }
/// Transition the compat SSO login from a [`Fulfilled`] state to
/// [`Exchanged`].
///
/// # Errors
///
/// Returns an error if the compat SSO login state is not [`Fulfilled`].
///
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
/// [`Exchanged`]: CompatSsoLoginState::Exchanged
pub fn exchange(mut self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn exchange(mut self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.state = self.state.exchange(exchanged_at)?; self.state = self.state.exchange(exchanged_at)?;
Ok(self) Ok(self)

View File

@@ -12,14 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![allow(clippy::module_name_repetitions)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
#![allow(
clippy::module_name_repetitions,
clippy::missing_panics_doc,
clippy::missing_errors_doc
)]
use thiserror::Error; use thiserror::Error;
@@ -29,6 +22,7 @@ pub(crate) mod tokens;
pub(crate) mod upstream_oauth2; pub(crate) mod upstream_oauth2;
pub(crate) mod users; pub(crate) mod users;
/// Error when an invalid state transition is attempted.
#[derive(Debug, Error)] #[derive(Debug, Error)]
#[error("invalid state transition")] #[error("invalid state transition")]
pub struct InvalidTransitionError; pub struct InvalidTransitionError;

View File

@@ -39,6 +39,7 @@ pub struct Pkce {
} }
impl Pkce { impl Pkce {
/// Create a new PKCE challenge, with the given method and challenge.
#[must_use] #[must_use]
pub fn new(challenge_method: PkceCodeChallengeMethod, challenge: String) -> Self { pub fn new(challenge_method: PkceCodeChallengeMethod, challenge: String) -> Self {
Pkce { Pkce {
@@ -47,6 +48,11 @@ impl Pkce {
} }
} }
/// Verify the PKCE challenge.
///
/// # Errors
///
/// Returns an error if the verifier is invalid.
pub fn verify(&self, verifier: &str) -> Result<(), CodeChallengeError> { pub fn verify(&self, verifier: &str) -> Result<(), CodeChallengeError> {
self.challenge_method.verify(&self.challenge, verifier) self.challenge_method.verify(&self.challenge, verifier)
} }
@@ -176,11 +182,25 @@ impl AuthorizationGrant {
self.created_at - Duration::seconds(max_age.unwrap_or(3600 * 24 * 365)) self.created_at - Duration::seconds(max_age.unwrap_or(3600 * 24 * 365))
} }
/// Mark the authorization grant as exchanged.
///
/// # Errors
///
/// Returns an error if the authorization grant is not [`Fulfilled`].
///
/// [`Fulfilled`]: AuthorizationGrantStage::Fulfilled
pub fn exchange(mut self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn exchange(mut self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.stage = self.stage.exchange(exchanged_at)?; self.stage = self.stage.exchange(exchanged_at)?;
Ok(self) Ok(self)
} }
/// Mark the authorization grant as fulfilled.
///
/// # Errors
///
/// Returns an error if the authorization grant is not [`Pending`].
///
/// [`Pending`]: AuthorizationGrantStage::Pending
pub fn fulfill( pub fn fulfill(
mut self, mut self,
fulfilled_at: DateTime<Utc>, fulfilled_at: DateTime<Utc>,
@@ -190,12 +210,23 @@ impl AuthorizationGrant {
Ok(self) Ok(self)
} }
// TODO: this is not used? /// Mark the authorization grant as cancelled.
///
/// # Errors
///
/// Returns an error if the authorization grant is not [`Pending`].
///
/// [`Pending`]: AuthorizationGrantStage::Pending
///
/// # TODO
///
/// This appears to be unused
pub fn cancel(mut self, canceld_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn cancel(mut self, canceld_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.stage = self.stage.cancel(canceld_at)?; self.stage = self.stage.cancel(canceld_at)?;
Ok(self) Ok(self)
} }
#[doc(hidden)]
pub fn sample(now: DateTime<Utc>, rng: &mut impl RngCore) -> Self { pub fn sample(now: DateTime<Utc>, rng: &mut impl RngCore) -> Self {
Self { Self {
id: Ulid::from_datetime_with_source(now.into(), rng), id: Ulid::from_datetime_with_source(now.into(), rng),

View File

@@ -112,6 +112,15 @@ pub enum InvalidRedirectUriError {
} }
impl Client { impl Client {
/// Determine which redirect URI to use for the given request.
///
/// # Errors
///
/// Returns an error if:
///
/// - no URL was given but multiple redirect URIs are registered,
/// - no URL was registered, or
/// - the given URL is not registered
pub fn resolve_redirect_uri<'a>( pub fn resolve_redirect_uri<'a>(
&'a self, &'a self,
redirect_uri: &'a Option<Url>, redirect_uri: &'a Option<Url>,
@@ -125,6 +134,7 @@ impl Client {
} }
} }
#[doc(hidden)]
pub fn samples(now: DateTime<Utc>, rng: &mut impl RngCore) -> Vec<Client> { pub fn samples(now: DateTime<Utc>, rng: &mut impl RngCore) -> Vec<Client> {
vec![ vec![
// A client with all the URIs set // A client with all the URIs set

View File

@@ -121,6 +121,11 @@ pub enum RefreshTokenState {
} }
impl RefreshTokenState { impl RefreshTokenState {
/// Consume the refresh token, returning a new state.
///
/// # Errors
///
/// Returns an error if the refresh token is already consumed.
fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
match self { match self {
Self::Valid => Ok(Self::Consumed { consumed_at }), Self::Valid => Ok(Self::Consumed { consumed_at }),
@@ -169,6 +174,11 @@ impl RefreshToken {
self.id.to_string() self.id.to_string()
} }
/// Consumes the refresh token and returns the consumed token.
///
/// # Errors
///
/// Returns an error if the refresh token is already consumed.
pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.state = self.state.consume(consumed_at)?; self.state = self.state.consume(consumed_at)?;
Ok(self) Ok(self)
@@ -266,6 +276,10 @@ impl TokenType {
/// Ok(TokenType::CompatAccessToken) /// Ok(TokenType::CompatAccessToken)
/// ); /// );
/// ``` /// ```
///
/// # Errors
///
/// Returns an error if the token is not valid
pub fn check(token: &str) -> Result<TokenType, TokenFormatError> { pub fn check(token: &str) -> Result<TokenType, TokenFormatError> {
// these are legacy tokens imported from Synapse // these are legacy tokens imported from Synapse
// we don't do any validation on them and continue as is // we don't do any validation on them and continue as is

View File

@@ -37,6 +37,14 @@ pub enum UpstreamOAuthAuthorizationSessionState {
} }
impl UpstreamOAuthAuthorizationSessionState { impl UpstreamOAuthAuthorizationSessionState {
/// Mark the upstream OAuth 2.0 authorization session as completed.
///
/// # Errors
///
/// Returns an error if the upstream OAuth 2.0 authorization session state
/// is not [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
pub fn complete( pub fn complete(
self, self,
completed_at: DateTime<Utc>, completed_at: DateTime<Utc>,
@@ -53,6 +61,14 @@ impl UpstreamOAuthAuthorizationSessionState {
} }
} }
/// Mark the upstream OAuth 2.0 authorization session as consumed.
///
/// # Errors
///
/// Returns an error if the upstream OAuth 2.0 authorization session state
/// is not [`Completed`].
///
/// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed
pub fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
match self { match self {
Self::Completed { Self::Completed {
@@ -69,6 +85,12 @@ impl UpstreamOAuthAuthorizationSessionState {
} }
} }
/// Get the link ID for the upstream OAuth 2.0 authorization session.
///
/// Returns `None` if the upstream OAuth 2.0 authorization session state is
/// [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
#[must_use] #[must_use]
pub fn link_id(&self) -> Option<Ulid> { pub fn link_id(&self) -> Option<Ulid> {
match self { match self {
@@ -77,6 +99,13 @@ impl UpstreamOAuthAuthorizationSessionState {
} }
} }
/// Get the time at which the upstream OAuth 2.0 authorization session was
/// completed.
///
/// Returns `None` if the upstream OAuth 2.0 authorization session state is
/// [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
#[must_use] #[must_use]
pub fn completed_at(&self) -> Option<DateTime<Utc>> { pub fn completed_at(&self) -> Option<DateTime<Utc>> {
match self { match self {
@@ -87,6 +116,12 @@ impl UpstreamOAuthAuthorizationSessionState {
} }
} }
/// Get the ID token for the upstream OAuth 2.0 authorization session.
///
/// Returns `None` if the upstream OAuth 2.0 authorization session state is
/// [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
#[must_use] #[must_use]
pub fn id_token(&self) -> Option<&str> { pub fn id_token(&self) -> Option<&str> {
match self { match self {
@@ -97,6 +132,13 @@ impl UpstreamOAuthAuthorizationSessionState {
} }
} }
/// Get the time at which the upstream OAuth 2.0 authorization session was
/// consumed.
///
/// Returns `None` if the upstream OAuth 2.0 authorization session state is
/// not [`Consumed`].
///
/// [`Consumed`]: UpstreamOAuthAuthorizationSessionState::Consumed
#[must_use] #[must_use]
pub fn consumed_at(&self) -> Option<DateTime<Utc>> { pub fn consumed_at(&self) -> Option<DateTime<Utc>> {
match self { match self {
@@ -105,7 +147,7 @@ impl UpstreamOAuthAuthorizationSessionState {
} }
} }
/// Returns `true` if the upstream oauth authorization session state is /// Returns `true` if the upstream OAuth 2.0 authorization session state is
/// [`Pending`]. /// [`Pending`].
/// ///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
@@ -114,7 +156,7 @@ impl UpstreamOAuthAuthorizationSessionState {
matches!(self, Self::Pending) matches!(self, Self::Pending)
} }
/// Returns `true` if the upstream oauth authorization session state is /// Returns `true` if the upstream OAuth 2.0 authorization session state is
/// [`Completed`]. /// [`Completed`].
/// ///
/// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed /// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed
@@ -123,7 +165,7 @@ impl UpstreamOAuthAuthorizationSessionState {
matches!(self, Self::Completed { .. }) matches!(self, Self::Completed { .. })
} }
/// Returns `true` if the upstream oauth authorization session state is /// Returns `true` if the upstream OAuth 2.0 authorization session state is
/// [`Consumed`]. /// [`Consumed`].
/// ///
/// [`Consumed`]: UpstreamOAuthAuthorizationSessionState::Consumed /// [`Consumed`]: UpstreamOAuthAuthorizationSessionState::Consumed
@@ -153,6 +195,15 @@ impl std::ops::Deref for UpstreamOAuthAuthorizationSession {
} }
impl UpstreamOAuthAuthorizationSession { impl UpstreamOAuthAuthorizationSession {
/// Mark the upstream OAuth 2.0 authorization session as completed. Returns
/// the updated session.
///
/// # Errors
///
/// Returns an error if the upstream OAuth 2.0 authorization session state
/// is not [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
pub fn complete( pub fn complete(
mut self, mut self,
completed_at: DateTime<Utc>, completed_at: DateTime<Utc>,
@@ -163,6 +214,15 @@ impl UpstreamOAuthAuthorizationSession {
Ok(self) Ok(self)
} }
/// Mark the upstream OAuth 2.0 authorization session as consumed. Returns
/// the updated session.
///
/// # Errors
///
/// Returns an error if the upstream OAuth 2.0 authorization session state
/// is not [`Completed`].
///
/// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed
pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> { pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.state = self.state.consume(consumed_at)?; self.state = self.state.consume(consumed_at)?;
Ok(self) Ok(self)

View File

@@ -39,6 +39,7 @@ impl User {
} }
impl User { impl User {
#[doc(hidden)]
#[must_use] #[must_use]
pub fn samples(now: chrono::DateTime<Utc>, rng: &mut impl Rng) -> Vec<Self> { pub fn samples(now: chrono::DateTime<Utc>, rng: &mut impl Rng) -> Vec<Self> {
vec![User { vec![User {
@@ -175,6 +176,7 @@ impl Deref for UserEmailVerification {
} }
impl UserEmailVerification { impl UserEmailVerification {
#[doc(hidden)]
#[must_use] #[must_use]
pub fn samples(now: chrono::DateTime<Utc>, rng: &mut impl Rng) -> Vec<Self> { pub fn samples(now: chrono::DateTime<Utc>, rng: &mut impl Rng) -> Vec<Self> {
let states = [ let states = [

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
async-trait = "0.1.74" async-trait = "0.1.74"
tracing.workspace = true tracing.workspace = true

View File

@@ -14,14 +14,7 @@
//! Helps sending emails to users, with different email backends //! Helps sending emails to users, with different email backends
#![forbid(unsafe_code)] #![deny(missing_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
missing_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
mod mailer; mod mailer;
mod transport; mod transport;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
async-graphql = { version = "6.0.11", features = ["chrono", "url"] } async-graphql = { version = "6.0.11", features = ["chrono", "url"] }
@@ -29,3 +32,4 @@ mas-storage = { path = "../storage" }
[[bin]] [[bin]]
name = "schema" name = "schema"
doc = false

View File

@@ -12,19 +12,8 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![deny(clippy::future_not_send)]
#![deny( #![allow(clippy::module_name_repetitions, clippy::unused_async)]
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
clippy::future_not_send
)]
#![warn(clippy::pedantic)]
#![allow(
clippy::module_name_repetitions,
clippy::missing_errors_doc,
clippy::unused_async
)]
use async_graphql::EmptySubscription; use async_graphql::EmptySubscription;
use mas_data_model::{BrowserSession, Session, User}; use mas_data_model::{BrowserSession, Session, User};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
# Async runtime # Async runtime
tokio = { version = "1.34.0", features = ["macros"] } tokio = { version = "1.34.0", features = ["macros"] }

View File

@@ -12,14 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![deny(clippy::future_not_send)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
clippy::future_not_send
)]
#![warn(clippy::pedantic)]
#![allow( #![allow(
// Some axum handlers need that // Some axum handlers need that
clippy::unused_async, clippy::unused_async,

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
axum = { version = "0.6.20", optional = true } axum = { version = "0.6.20", optional = true }
bytes = "1.5.0" bytes = "1.5.0"

View File

@@ -14,14 +14,7 @@
//! [`tower`] layers and services to help building HTTP client and servers //! [`tower`] layers and services to help building HTTP client and servers
#![forbid(unsafe_code)] #![deny(rustdoc::missing_crate_level_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::missing_crate_level_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
#[cfg(feature = "client")] #[cfg(feature = "client")]

View File

@@ -136,7 +136,7 @@ async fn test_urlencoded_request_body() {
} }
let bytes = hyper::body::to_bytes(request.into_body()).await?; let bytes = hyper::body::to_bytes(request.into_body()).await?;
assert_eq!(bytes.to_vec(), br#"hello=world"#.to_vec()); assert_eq!(bytes.to_vec(), br"hello=world".to_vec());
let res = Response::new(hyper::Body::empty()); let res = Response::new(hyper::Body::empty());
Ok(res) Ok(res)

View File

@@ -7,6 +7,9 @@ edition.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
camino.workspace = true camino.workspace = true
clap.workspace = true clap.workspace = true
@@ -16,4 +19,4 @@ tracing-subscriber.workspace = true
tracing.workspace = true tracing.workspace = true
walkdir = "2.4.0" walkdir = "2.4.0"
mas-i18n = { path = "../i18n" } mas-i18n = { path = "../i18n" }

View File

@@ -12,9 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![deny(clippy::all)]
#![warn(clippy::pedantic)]
use std::fs::File; use std::fs::File;
use camino::Utf8PathBuf; use camino::Utf8PathBuf;

View File

@@ -7,6 +7,9 @@ edition.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
camino.workspace = true camino.workspace = true
icu_list = { version = "1.4.0", features = ["compiled_data", "std"] } icu_list = { version = "1.4.0", features = ["compiled_data", "std"] }

View File

@@ -12,9 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![warn(clippy::pedantic)]
#![deny(clippy::all)]
pub mod sprintf; pub mod sprintf;
pub mod translations; pub mod translations;
mod translator; mod translator;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
async-trait = "0.1.74" async-trait = "0.1.74"

View File

@@ -12,10 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
use std::{collections::HashMap, fmt::Display, sync::Arc}; use std::{collections::HashMap, fmt::Display, sync::Arc};
use camino::{Utf8Path, Utf8PathBuf}; use camino::{Utf8Path, Utf8PathBuf};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
serde = { workspace = true, optional = true } serde = { workspace = true, optional = true }
schemars = { version = "0.8.16", default-features = false, optional = true } schemars = { version = "0.8.16", default-features = false, optional = true }

View File

@@ -14,14 +14,7 @@
//! Values from IANA registries, generated by the `mas-iana-codegen` crate //! Values from IANA registries, generated by the `mas-iana-codegen` crate
#![forbid(unsafe_code)] #![deny(missing_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
missing_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
pub mod jose; pub mod jose;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
base64ct = { version = "1.6.0", features = ["std"] } base64ct = { version = "1.6.0", features = ["std"] }
chrono.workspace = true chrono.workspace = true

View File

@@ -56,6 +56,10 @@ impl<C: Encoding> Base64<C> {
} }
/// Parse some base64-encoded data to create a `Base64` instance. /// Parse some base64-encoded data to create a `Base64` instance.
///
/// # Errors
///
/// Returns an error if the input is not valid base64.
pub fn parse(encoded: &str) -> Result<Self, base64ct::Error> { pub fn parse(encoded: &str) -> Result<Self, base64ct::Error> {
C::decode_vec(encoded).map(Self::new) C::decode_vec(encoded).map(Self::new)
} }

View File

@@ -37,7 +37,14 @@ pub enum ClaimError {
} }
pub trait Validator<T> { pub trait Validator<T> {
/// The associated error type returned by this validator.
type Error; type Error;
/// Validate a claim value
///
/// # Errors
///
/// Returns an error if the value is invalid.
fn validate(&self, value: &T) -> Result<(), Self::Error>; fn validate(&self, value: &T) -> Result<(), Self::Error>;
} }
@@ -68,6 +75,11 @@ where
} }
} }
/// Insert a claim into the given claims map.
///
/// # Errors
///
/// Returns an error if the value failed to serialize.
pub fn insert<I>( pub fn insert<I>(
&self, &self,
claims: &mut HashMap<String, serde_json::Value>, claims: &mut HashMap<String, serde_json::Value>,
@@ -85,6 +97,12 @@ where
Ok(()) Ok(())
} }
/// Extract a claim from the given claims map.
///
/// # Errors
///
/// Returns an error if the value failed to deserialize, if its value is
/// invalid or if the claim is missing.
pub fn extract_required( pub fn extract_required(
&self, &self,
claims: &mut HashMap<String, serde_json::Value>, claims: &mut HashMap<String, serde_json::Value>,
@@ -98,6 +116,12 @@ where
self.extract_required_with_options(claims, validator) self.extract_required_with_options(claims, validator)
} }
/// Extract a claim from the given claims map, with the given options.
///
/// # Errors
///
/// Returns an error if the value failed to deserialize, if its value is
/// invalid or if the claim is missing.
pub fn extract_required_with_options<I>( pub fn extract_required_with_options<I>(
&self, &self,
claims: &mut HashMap<String, serde_json::Value>, claims: &mut HashMap<String, serde_json::Value>,
@@ -124,6 +148,12 @@ where
Ok(res) Ok(res)
} }
/// Extract a claim from the given claims map, if it exists.
///
/// # Errors
///
/// Returns an error if the value failed to deserialize or if its value is
/// invalid.
pub fn extract_optional( pub fn extract_optional(
&self, &self,
claims: &mut HashMap<String, serde_json::Value>, claims: &mut HashMap<String, serde_json::Value>,
@@ -137,6 +167,13 @@ where
self.extract_optional_with_options(claims, validator) self.extract_optional_with_options(claims, validator)
} }
/// Extract a claim from the given claims map, if it exists, with the given
/// options.
///
/// # Errors
///
/// Returns an error if the value failed to deserialize or if its value is
/// invalid.
pub fn extract_optional_with_options<I>( pub fn extract_optional_with_options<I>(
&self, &self,
claims: &mut HashMap<String, serde_json::Value>, claims: &mut HashMap<String, serde_json::Value>,
@@ -238,7 +275,7 @@ impl From<&TimeOptions> for TimeNotBefore {
/// ///
/// According to the [OpenID Connect Core 1.0 specification]. /// According to the [OpenID Connect Core 1.0 specification].
/// ///
/// # Errors /// # Errors
/// ///
/// Returns an error if the algorithm is not supported. /// Returns an error if the algorithm is not supported.
/// ///

View File

@@ -57,21 +57,29 @@ pub enum AsymmetricSigningKey {
} }
impl AsymmetricSigningKey { impl AsymmetricSigningKey {
/// Create a new signing key with the RS256 algorithm from the given RSA
/// private key.
#[must_use] #[must_use]
pub fn rs256(key: rsa::RsaPrivateKey) -> Self { pub fn rs256(key: rsa::RsaPrivateKey) -> Self {
Self::Rs256(rsa::pkcs1v15::SigningKey::new(key)) Self::Rs256(rsa::pkcs1v15::SigningKey::new(key))
} }
/// Create a new signing key with the RS384 algorithm from the given RSA
/// private key.
#[must_use] #[must_use]
pub fn rs384(key: rsa::RsaPrivateKey) -> Self { pub fn rs384(key: rsa::RsaPrivateKey) -> Self {
Self::Rs384(rsa::pkcs1v15::SigningKey::new(key)) Self::Rs384(rsa::pkcs1v15::SigningKey::new(key))
} }
/// Create a new signing key with the RS512 algorithm from the given RSA
/// private key.
#[must_use] #[must_use]
pub fn rs512(key: rsa::RsaPrivateKey) -> Self { pub fn rs512(key: rsa::RsaPrivateKey) -> Self {
Self::Rs512(rsa::pkcs1v15::SigningKey::new(key)) Self::Rs512(rsa::pkcs1v15::SigningKey::new(key))
} }
/// Create a new signing key with the PS256 algorithm from the given RSA
/// private key.
#[must_use] #[must_use]
pub fn ps256(key: rsa::RsaPrivateKey) -> Self { pub fn ps256(key: rsa::RsaPrivateKey) -> Self {
Self::Ps256(rsa::pss::SigningKey::new_with_salt_len( Self::Ps256(rsa::pss::SigningKey::new_with_salt_len(
@@ -80,6 +88,8 @@ impl AsymmetricSigningKey {
)) ))
} }
/// Create a new signing key with the PS384 algorithm from the given RSA
/// private key.
#[must_use] #[must_use]
pub fn ps384(key: rsa::RsaPrivateKey) -> Self { pub fn ps384(key: rsa::RsaPrivateKey) -> Self {
Self::Ps384(rsa::pss::SigningKey::new_with_salt_len( Self::Ps384(rsa::pss::SigningKey::new_with_salt_len(
@@ -88,6 +98,8 @@ impl AsymmetricSigningKey {
)) ))
} }
/// Create a new signing key with the PS512 algorithm from the given RSA
/// private key.
#[must_use] #[must_use]
pub fn ps512(key: rsa::RsaPrivateKey) -> Self { pub fn ps512(key: rsa::RsaPrivateKey) -> Self {
Self::Ps512(rsa::pss::SigningKey::new_with_salt_len( Self::Ps512(rsa::pss::SigningKey::new_with_salt_len(
@@ -96,21 +108,34 @@ impl AsymmetricSigningKey {
)) ))
} }
/// Create a new signing key with the ES256 algorithm from the given ECDSA
/// private key.
#[must_use] #[must_use]
pub fn es256(key: elliptic_curve::SecretKey<p256::NistP256>) -> Self { pub fn es256(key: elliptic_curve::SecretKey<p256::NistP256>) -> Self {
Self::Es256(ecdsa::SigningKey::from(key)) Self::Es256(ecdsa::SigningKey::from(key))
} }
/// Create a new signing key with the ES384 algorithm from the given ECDSA
/// private key.
#[must_use] #[must_use]
pub fn es384(key: elliptic_curve::SecretKey<p384::NistP384>) -> Self { pub fn es384(key: elliptic_curve::SecretKey<p384::NistP384>) -> Self {
Self::Es384(ecdsa::SigningKey::from(key)) Self::Es384(ecdsa::SigningKey::from(key))
} }
/// Create a new signing key with the ES256K algorithm from the given ECDSA
/// private key.
#[must_use] #[must_use]
pub fn es256k(key: elliptic_curve::SecretKey<k256::Secp256k1>) -> Self { pub fn es256k(key: elliptic_curve::SecretKey<k256::Secp256k1>) -> Self {
Self::Es256K(ecdsa::SigningKey::from(key)) Self::Es256K(ecdsa::SigningKey::from(key))
} }
/// Create a new signing key for the given algorithm from the given private
/// JWK parameters.
///
/// # Errors
///
/// Returns an error if the key parameters are not suitable for the given
/// algorithm.
pub fn from_jwk_and_alg( pub fn from_jwk_and_alg(
params: &JsonWebKeyPrivateParameters, params: &JsonWebKeyPrivateParameters,
alg: &JsonWebSignatureAlg, alg: &JsonWebSignatureAlg,
@@ -275,51 +300,76 @@ pub enum AsymmetricVerifyingKey {
} }
impl AsymmetricVerifyingKey { impl AsymmetricVerifyingKey {
/// Create a new verifying key with the RS256 algorithm from the given RSA
/// public key.
#[must_use] #[must_use]
pub fn rs256(key: rsa::RsaPublicKey) -> Self { pub fn rs256(key: rsa::RsaPublicKey) -> Self {
Self::Rs256(rsa::pkcs1v15::VerifyingKey::new(key)) Self::Rs256(rsa::pkcs1v15::VerifyingKey::new(key))
} }
/// Create a new verifying key with the RS384 algorithm from the given RSA
/// public key.
#[must_use] #[must_use]
pub fn rs384(key: rsa::RsaPublicKey) -> Self { pub fn rs384(key: rsa::RsaPublicKey) -> Self {
Self::Rs384(rsa::pkcs1v15::VerifyingKey::new(key)) Self::Rs384(rsa::pkcs1v15::VerifyingKey::new(key))
} }
/// Create a new verifying key with the RS512 algorithm from the given RSA
/// public key.
#[must_use] #[must_use]
pub fn rs512(key: rsa::RsaPublicKey) -> Self { pub fn rs512(key: rsa::RsaPublicKey) -> Self {
Self::Rs512(rsa::pkcs1v15::VerifyingKey::new(key)) Self::Rs512(rsa::pkcs1v15::VerifyingKey::new(key))
} }
/// Create a new verifying key with the PS256 algorithm from the given RSA
/// public key.
#[must_use] #[must_use]
pub fn ps256(key: rsa::RsaPublicKey) -> Self { pub fn ps256(key: rsa::RsaPublicKey) -> Self {
Self::Ps256(rsa::pss::VerifyingKey::new(key)) Self::Ps256(rsa::pss::VerifyingKey::new(key))
} }
/// Create a new verifying key with the PS384 algorithm from the given RSA
/// public key.
#[must_use] #[must_use]
pub fn ps384(key: rsa::RsaPublicKey) -> Self { pub fn ps384(key: rsa::RsaPublicKey) -> Self {
Self::Ps384(rsa::pss::VerifyingKey::new(key)) Self::Ps384(rsa::pss::VerifyingKey::new(key))
} }
/// Create a new verifying key with the PS512 algorithm from the given RSA
/// public key.
#[must_use] #[must_use]
pub fn ps512(key: rsa::RsaPublicKey) -> Self { pub fn ps512(key: rsa::RsaPublicKey) -> Self {
Self::Ps512(rsa::pss::VerifyingKey::new(key)) Self::Ps512(rsa::pss::VerifyingKey::new(key))
} }
/// Create a new verifying key with the ES256 algorithm from the given ECDSA
/// public key.
#[must_use] #[must_use]
pub fn es256(key: elliptic_curve::PublicKey<p256::NistP256>) -> Self { pub fn es256(key: elliptic_curve::PublicKey<p256::NistP256>) -> Self {
Self::Es256(ecdsa::VerifyingKey::from(key)) Self::Es256(ecdsa::VerifyingKey::from(key))
} }
/// Create a new verifying key with the ES384 algorithm from the given ECDSA
/// public key.
#[must_use] #[must_use]
pub fn es384(key: elliptic_curve::PublicKey<p384::NistP384>) -> Self { pub fn es384(key: elliptic_curve::PublicKey<p384::NistP384>) -> Self {
Self::Es384(ecdsa::VerifyingKey::from(key)) Self::Es384(ecdsa::VerifyingKey::from(key))
} }
/// Create a new verifying key with the ES256K algorithm from the given
/// ECDSA public key.
#[must_use] #[must_use]
pub fn es256k(key: elliptic_curve::PublicKey<k256::Secp256k1>) -> Self { pub fn es256k(key: elliptic_curve::PublicKey<k256::Secp256k1>) -> Self {
Self::Es256K(ecdsa::VerifyingKey::from(key)) Self::Es256K(ecdsa::VerifyingKey::from(key))
} }
/// Create a new verifying key for the given algorithm from the given public
/// JWK parameters.
///
/// # Errors
///
/// Returns an error if the key parameters are not suitable for the given
/// algorithm.
pub fn from_jwk_and_alg( pub fn from_jwk_and_alg(
params: &JsonWebKeyPublicParameters, params: &JsonWebKeyPublicParameters,
alg: &JsonWebSignatureAlg, alg: &JsonWebSignatureAlg,

View File

@@ -33,6 +33,11 @@ pub struct InvalidAlgorithm {
} }
impl SymmetricKey { impl SymmetricKey {
/// Create a new symmetric key for the given algorithm with the given key.
///
/// # Errors
///
/// Returns an error if the algorithm is not supported.
pub fn new_for_alg(key: Vec<u8>, alg: &JsonWebSignatureAlg) -> Result<Self, InvalidAlgorithm> { pub fn new_for_alg(key: Vec<u8>, alg: &JsonWebSignatureAlg) -> Result<Self, InvalidAlgorithm> {
match alg { match alg {
JsonWebSignatureAlg::Hs256 => Ok(Self::hs256(key)), JsonWebSignatureAlg::Hs256 => Ok(Self::hs256(key)),
@@ -45,16 +50,19 @@ impl SymmetricKey {
} }
} }
/// Create a new symmetric key using the HS256 algorithm with the given key.
#[must_use] #[must_use]
pub const fn hs256(key: Vec<u8>) -> Self { pub const fn hs256(key: Vec<u8>) -> Self {
Self::Hs256(super::Hs256Key::new(key)) Self::Hs256(super::Hs256Key::new(key))
} }
/// Create a new symmetric key using the HS384 algorithm with the given key.
#[must_use] #[must_use]
pub const fn hs384(key: Vec<u8>) -> Self { pub const fn hs384(key: Vec<u8>) -> Self {
Self::Hs384(super::Hs384Key::new(key)) Self::Hs384(super::Hs384Key::new(key))
} }
/// Create a new symmetric key using the HS512 algorithm with the given key.
#[must_use] #[must_use]
pub const fn hs512(key: Vec<u8>) -> Self { pub const fn hs512(key: Vec<u8>) -> Self {
Self::Hs512(super::Hs512Key::new(key)) Self::Hs512(super::Hs512Key::new(key))

View File

@@ -106,6 +106,7 @@ impl TryFrom<PrivateJsonWebKey> for PublicJsonWebKey {
} }
impl<P> JsonWebKey<P> { impl<P> JsonWebKey<P> {
/// Create a new [`JsonWebKey`] with the given parameters.
#[must_use] #[must_use]
pub const fn new(parameters: P) -> Self { pub const fn new(parameters: P) -> Self {
Self { Self {
@@ -121,6 +122,12 @@ impl<P> JsonWebKey<P> {
} }
} }
/// Map the parameters of this [`JsonWebKey`] to a new type, with a fallible
/// mapper, consuming the original key.
///
/// # Errors
///
/// Returns an error if the mapper returns an error.
pub fn try_map<M, O, E>(self, mapper: M) -> Result<JsonWebKey<O>, E> pub fn try_map<M, O, E>(self, mapper: M) -> Result<JsonWebKey<O>, E>
where where
M: FnOnce(P) -> Result<O, E>, M: FnOnce(P) -> Result<O, E>,
@@ -138,6 +145,8 @@ impl<P> JsonWebKey<P> {
}) })
} }
/// Map the parameters of this [`JsonWebKey`] to a new type, consuming the
/// original key.
pub fn map<M, O>(self, mapper: M) -> JsonWebKey<O> pub fn map<M, O>(self, mapper: M) -> JsonWebKey<O>
where where
M: FnOnce(P) -> O, M: FnOnce(P) -> O,
@@ -155,6 +164,12 @@ impl<P> JsonWebKey<P> {
} }
} }
/// Map the parameters of this [`JsonWebKey`] to a new type, with a fallible
/// mapper, cloning the other fields.
///
/// # Errors
///
/// Returns an error if the mapper returns an error.
pub fn try_cloned_map<M, O, E>(&self, mapper: M) -> Result<JsonWebKey<O>, E> pub fn try_cloned_map<M, O, E>(&self, mapper: M) -> Result<JsonWebKey<O>, E>
where where
M: FnOnce(&P) -> Result<O, E>, M: FnOnce(&P) -> Result<O, E>,
@@ -172,6 +187,8 @@ impl<P> JsonWebKey<P> {
}) })
} }
/// Map the parameters of this [`JsonWebKey`] to a new type, cloning the
/// other fields.
pub fn cloned_map<M, O>(&self, mapper: M) -> JsonWebKey<O> pub fn cloned_map<M, O>(&self, mapper: M) -> JsonWebKey<O>
where where
M: FnOnce(&P) -> O, M: FnOnce(&P) -> O,
@@ -189,35 +206,41 @@ impl<P> JsonWebKey<P> {
} }
} }
/// Set the `use` field of this [`JsonWebKey`].
#[must_use] #[must_use]
pub fn with_use(mut self, value: JsonWebKeyUse) -> Self { pub fn with_use(mut self, value: JsonWebKeyUse) -> Self {
self.r#use = Some(value); self.r#use = Some(value);
self self
} }
/// Set the `key_ops` field of this [`JsonWebKey`].
#[must_use] #[must_use]
pub fn with_key_ops(mut self, key_ops: Vec<JsonWebKeyOperation>) -> Self { pub fn with_key_ops(mut self, key_ops: Vec<JsonWebKeyOperation>) -> Self {
self.key_ops = Some(key_ops); self.key_ops = Some(key_ops);
self self
} }
/// Set the `alg` field of this [`JsonWebKey`].
#[must_use] #[must_use]
pub fn with_alg(mut self, alg: JsonWebSignatureAlg) -> Self { pub fn with_alg(mut self, alg: JsonWebSignatureAlg) -> Self {
self.alg = Some(alg); self.alg = Some(alg);
self self
} }
/// Set the `kid` field of this [`JsonWebKey`].
#[must_use] #[must_use]
pub fn with_kid(mut self, kid: impl Into<String>) -> Self { pub fn with_kid(mut self, kid: impl Into<String>) -> Self {
self.kid = Some(kid.into()); self.kid = Some(kid.into());
self self
} }
/// Get the `kid` field of this [`JsonWebKey`], if set.
#[must_use] #[must_use]
pub const fn alg(&self) -> Option<&JsonWebSignatureAlg> { pub const fn alg(&self) -> Option<&JsonWebSignatureAlg> {
self.alg.as_ref() self.alg.as_ref()
} }
/// Get the inner parameters of this [`JsonWebKey`].
#[must_use] #[must_use]
pub const fn params(&self) -> &P { pub const fn params(&self) -> &P {
&self.parameters &self.parameters

View File

@@ -192,10 +192,12 @@ pub struct NoKeyWorked {
} }
impl<'a, T> Jwt<'a, T> { impl<'a, T> Jwt<'a, T> {
/// Get the JWT header
pub fn header(&self) -> &JsonWebSignatureHeader { pub fn header(&self) -> &JsonWebSignatureHeader {
&self.header &self.header
} }
/// Get the JWT payload
pub fn payload(&self) -> &T { pub fn payload(&self) -> &T {
&self.payload &self.payload
} }
@@ -209,6 +211,11 @@ impl<'a, T> Jwt<'a, T> {
} }
} }
/// Verify the signature of this JWT using the given key.
///
/// # Errors
///
/// Returns an error if the signature is invalid.
pub fn verify<K, S>(&self, key: &K) -> Result<(), JwtVerificationError> pub fn verify<K, S>(&self, key: &K) -> Result<(), JwtVerificationError>
where where
K: Verifier<S>, K: Verifier<S>,
@@ -221,6 +228,12 @@ impl<'a, T> Jwt<'a, T> {
.map_err(JwtVerificationError::verify) .map_err(JwtVerificationError::verify)
} }
/// Verify the signature of this JWT using the given symmetric key.
///
/// # Errors
///
/// Returns an error if the signature is invalid or if the algorithm is not
/// supported.
pub fn verify_with_shared_secret(&self, secret: Vec<u8>) -> Result<(), NoKeyWorked> { pub fn verify_with_shared_secret(&self, secret: Vec<u8>) -> Result<(), NoKeyWorked> {
let verifier = crate::jwa::SymmetricKey::new_for_alg(secret, self.header().alg()) let verifier = crate::jwa::SymmetricKey::new_for_alg(secret, self.header().alg())
.map_err(|_| NoKeyWorked::default())?; .map_err(|_| NoKeyWorked::default())?;
@@ -230,6 +243,12 @@ impl<'a, T> Jwt<'a, T> {
Ok(()) Ok(())
} }
/// Verify the signature of this JWT using the given JWKS.
///
/// # Errors
///
/// Returns an error if the signature is invalid, if no key matches the
/// constraints, or if the algorithm is not supported.
pub fn verify_with_jwks(&self, jwks: &PublicJsonWebKeySet) -> Result<(), NoKeyWorked> { pub fn verify_with_jwks(&self, jwks: &PublicJsonWebKeySet) -> Result<(), NoKeyWorked> {
let constraints = ConstraintSet::from(self.header()); let constraints = ConstraintSet::from(self.header());
let candidates = constraints.filter(&**jwks); let candidates = constraints.filter(&**jwks);
@@ -250,14 +269,17 @@ impl<'a, T> Jwt<'a, T> {
Err(NoKeyWorked::default()) Err(NoKeyWorked::default())
} }
/// Get the raw JWT string as a borrowed [`str`]
pub fn as_str(&'a self) -> &'a str { pub fn as_str(&'a self) -> &'a str {
&self.raw &self.raw
} }
/// Get the raw JWT string as an owned [`String`]
pub fn into_string(self) -> String { pub fn into_string(self) -> String {
self.raw.into() self.raw.into()
} }
/// Split the JWT into its parts (header and payload).
pub fn into_parts(self) -> (JsonWebSignatureHeader, T) { pub fn into_parts(self) -> (JsonWebSignatureHeader, T) {
(self.header, self.payload) (self.header, self.payload)
} }
@@ -295,6 +317,12 @@ impl JwtSignatureError {
} }
impl<T> Jwt<'static, T> { impl<T> Jwt<'static, T> {
/// Sign the given payload with the given key.
///
/// # Errors
///
/// Returns an error if the payload could not be serialized or if the key
/// could not sign the payload.
pub fn sign<K, S>( pub fn sign<K, S>(
header: JsonWebSignatureHeader, header: JsonWebSignatureHeader,
payload: T, payload: T,
@@ -309,6 +337,12 @@ impl<T> Jwt<'static, T> {
Self::sign_with_rng(&mut thread_rng(), header, payload, key) Self::sign_with_rng(&mut thread_rng(), header, payload, key)
} }
/// Sign the given payload with the given key using the given RNG.
///
/// # Errors
///
/// Returns an error if the payload could not be serialized or if the key
/// could not sign the payload.
pub fn sign_with_rng<R, K, S>( pub fn sign_with_rng<R, K, S>(
rng: &mut R, rng: &mut R,
header: JsonWebSignatureHeader, header: JsonWebSignatureHeader,

View File

@@ -12,10 +12,8 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![deny(rustdoc::broken_intra_doc_links)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)] #![allow(clippy::module_name_repetitions)]
#![warn(clippy::pedantic)]
#![allow(clippy::missing_errors_doc, clippy::module_name_repetitions)]
mod base64; mod base64;
pub mod claims; pub mod claims;

View File

@@ -119,7 +119,7 @@ macro_rules! asymetric_jwt_test {
let mut rng = ChaCha8Rng::seed_from_u64(42); let mut rng = ChaCha8Rng::seed_from_u64(42);
let alg = JsonWebSignatureAlg::$alg; let alg = JsonWebSignatureAlg::$alg;
let payload = Payload { let payload = Payload {
hello: "world".to_string(), hello: "world".to_owned(),
}; };
let header = JsonWebSignatureHeader::new(alg.clone()); let header = JsonWebSignatureHeader::new(alg.clone());
@@ -137,7 +137,7 @@ macro_rules! asymetric_jwt_test {
fn sign_and_verify_jwt() { fn sign_and_verify_jwt() {
let alg = JsonWebSignatureAlg::$alg; let alg = JsonWebSignatureAlg::$alg;
let payload = Payload { let payload = Payload {
hello: "world".to_string(), hello: "world".to_owned(),
}; };
let header = JsonWebSignatureHeader::new(alg.clone()); let header = JsonWebSignatureHeader::new(alg.clone());
@@ -192,7 +192,7 @@ macro_rules! symetric_jwt_test {
fn sign_and_verify_jwt() { fn sign_and_verify_jwt() {
let alg = JsonWebSignatureAlg::$alg; let alg = JsonWebSignatureAlg::$alg;
let payload = Payload { let payload = Payload {
hello: "world".to_string(), hello: "world".to_owned(),
}; };
let header = JsonWebSignatureHeader::new(alg.clone()); let header = JsonWebSignatureHeader::new(alg.clone());

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
aead = { version = "0.5.2", features = ["std"] } aead = { version = "0.5.2", features = ["std"] }
const-oid = { version = "0.9.5", features = ["std"] } const-oid = { version = "0.9.5", features = ["std"] }

View File

@@ -14,10 +14,6 @@
//! A crate to store keys which can then be used to sign and verify JWTs. //! A crate to store keys which can then be used to sign and verify JWTs.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
use std::{ops::Deref, sync::Arc}; use std::{ops::Deref, sync::Arc};
use der::{zeroize::Zeroizing, Decode, Encode, EncodePem}; use der::{zeroize::Zeroizing, Decode, Encode, EncodePem};

View File

@@ -159,6 +159,7 @@ fn load_unencrypted_as_encrypted_error() {
.is_unencrypted()); .is_unencrypted());
} }
#[allow(clippy::similar_names)]
#[test] #[test]
fn generate_sign_and_verify() { fn generate_sign_and_verify() {
// Use a seeded RNG to keep the snapshot stable // Use a seeded RNG to keep the snapshot stable

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
bytes = "1.5.0" bytes = "1.5.0"
event-listener = "4.0.0" event-listener = "4.0.0"

View File

@@ -12,14 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![deny(rustdoc::missing_crate_level_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::missing_crate_level_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
//! An utility crate to build flexible [`hyper`] listeners, with optional TLS //! An utility crate to build flexible [`hyper`] listeners, with optional TLS

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
async-trait = "0.1.74" async-trait = "0.1.74"

View File

@@ -12,10 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
use http::{header::AUTHORIZATION, request::Builder, Method, Request, StatusCode}; use http::{header::AUTHORIZATION, request::Builder, Method, Request, StatusCode};
use mas_axum_utils::http_client_factory::HttpClientFactory; use mas_axum_utils::http_client_factory::HttpClientFactory;
use mas_http::{EmptyBody, HttpServiceExt}; use mas_http::{EmptyBody, HttpServiceExt};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
serde.workspace = true serde.workspace = true

View File

@@ -12,10 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
mod mock; mod mock;
pub use self::mock::HomeserverConnection as MockHomeserverConnection; pub use self::mock::HomeserverConnection as MockHomeserverConnection;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
http.workspace = true http.workspace = true
serde.workspace = true serde.workspace = true

View File

@@ -20,14 +20,7 @@
//! [OpenID Connect]: https://openid.net/connect/ //! [OpenID Connect]: https://openid.net/connect/
//! [Matrix Authentication Service]: https://github.com/matrix-org/matrix-authentication-service //! [Matrix Authentication Service]: https://github.com/matrix-org/matrix-authentication-service
#![forbid(unsafe_code)] #![deny(missing_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
missing_docs
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
pub mod errors; pub mod errors;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[features] [features]
default = ["hyper", "keystore"] default = ["hyper", "keystore"]
hyper = [ hyper = [

View File

@@ -53,14 +53,7 @@
//! [MSC3861]: https://github.com/matrix-org/matrix-spec-proposals/pull/3861 //! [MSC3861]: https://github.com/matrix-org/matrix-spec-proposals/pull/3861
//! [OAuth 2.0]: https://oauth.net/2/ //! [OAuth 2.0]: https://oauth.net/2/
#![forbid(unsafe_code)] #![deny(missing_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
missing_docs
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions, clippy::implicit_hasher)] #![allow(clippy::module_name_repetitions, clippy::implicit_hasher)]
pub mod error; pub mod error;

View File

@@ -94,7 +94,7 @@ fn id_token(issuer: &str) -> (IdToken, PublicJsonWebKeySet) {
let mut claims = HashMap::new(); let mut claims = HashMap::new();
let now = now(); let now = now();
claims::ISS.insert(&mut claims, issuer.to_string()).unwrap(); claims::ISS.insert(&mut claims, issuer.to_owned()).unwrap();
claims::SUB claims::SUB
.insert(&mut claims, SUBJECT_IDENTIFIER.to_owned()) .insert(&mut claims, SUBJECT_IDENTIFIER.to_owned())
.unwrap(); .unwrap();
@@ -128,7 +128,7 @@ fn id_token(issuer: &str) -> (IdToken, PublicJsonWebKeySet) {
/// Generate client credentials for the given authentication method. /// Generate client credentials for the given authentication method.
fn client_credentials( fn client_credentials(
auth_method: OAuthClientAuthenticationMethod, auth_method: &OAuthClientAuthenticationMethod,
issuer: &Url, issuer: &Url,
custom_signing: Option<Box<JwtSigningFn>>, custom_signing: Option<Box<JwtSigningFn>>,
) -> ClientCredentials { ) -> ClientCredentials {

View File

@@ -149,7 +149,7 @@ fn pass_full_authorization_url() {
async fn pass_pushed_authorization_request() { async fn pass_pushed_authorization_request() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let authorization_endpoint = issuer.join("authorize").unwrap(); let authorization_endpoint = issuer.join("authorize").unwrap();
let par_endpoint = issuer.join("par").unwrap(); let par_endpoint = issuer.join("par").unwrap();
let redirect_uri = Url::parse(REDIRECT_URI).unwrap(); let redirect_uri = Url::parse(REDIRECT_URI).unwrap();
@@ -225,7 +225,7 @@ async fn pass_pushed_authorization_request() {
async fn fail_pushed_authorization_request_404() { async fn fail_pushed_authorization_request_404() {
let (http_service, _, issuer) = init_test().await; let (http_service, _, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let authorization_endpoint = issuer.join("authorize").unwrap(); let authorization_endpoint = issuer.join("authorize").unwrap();
let par_endpoint = issuer.join("par").unwrap(); let par_endpoint = issuer.join("par").unwrap();
let redirect_uri = Url::parse(REDIRECT_URI).unwrap(); let redirect_uri = Url::parse(REDIRECT_URI).unwrap();
@@ -251,7 +251,7 @@ async fn fail_pushed_authorization_request_404() {
assert_matches!( assert_matches!(
error, error,
AuthorizationError::PushedAuthorization(PushedAuthorizationError::Http(_)) AuthorizationError::PushedAuthorization(PushedAuthorizationError::Http(_))
) );
} }
/// Check if the given request to the token endpoint is valid. /// Check if the given request to the token endpoint is valid.
@@ -303,7 +303,7 @@ fn is_valid_token_endpoint_request(req: &Request) -> bool {
async fn pass_access_token_with_authorization_code() { async fn pass_access_token_with_authorization_code() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap(); let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);
@@ -362,7 +362,7 @@ async fn pass_access_token_with_authorization_code() {
async fn fail_access_token_with_authorization_code_wrong_nonce() { async fn fail_access_token_with_authorization_code_wrong_nonce() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap(); let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);
@@ -424,7 +424,7 @@ async fn fail_access_token_with_authorization_code_wrong_nonce() {
async fn fail_access_token_with_authorization_code_no_id_token() { async fn fail_access_token_with_authorization_code_no_id_token() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap(); let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);

View File

@@ -32,7 +32,7 @@ use crate::{client_credentials, init_test, now, ACCESS_TOKEN, CLIENT_ID, CLIENT_
async fn pass_access_token_with_client_credentials() { async fn pass_access_token_with_client_credentials() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials( let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::ClientSecretPost, &OAuthClientAuthenticationMethod::ClientSecretPost,
&issuer, &issuer,
None, None,
); );

View File

@@ -32,7 +32,7 @@ use crate::{client_credentials, init_test, now, ACCESS_TOKEN, CLIENT_ID, SUBJECT
async fn pass_introspect_token() { async fn pass_introspect_token() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let introspection_endpoint = issuer.join("introspect").unwrap(); let introspection_endpoint = issuer.join("introspect").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);

View File

@@ -49,7 +49,7 @@ fn id_token(
let mut claims = HashMap::new(); let mut claims = HashMap::new();
let now = now(); let now = now();
claims::ISS.insert(&mut claims, issuer.to_string()).unwrap(); claims::ISS.insert(&mut claims, issuer.to_owned()).unwrap();
claims::AUD claims::AUD
.insert(&mut claims, CLIENT_ID.to_owned()) .insert(&mut claims, CLIENT_ID.to_owned())
.unwrap(); .unwrap();
@@ -246,5 +246,5 @@ async fn fail_verify_id_token_wrong_auth_time() {
) )
.unwrap_err(); .unwrap_err();
assert_matches!(error, IdTokenError::WrongAuthTime) assert_matches!(error, IdTokenError::WrongAuthTime);
} }

View File

@@ -30,7 +30,7 @@ use crate::{client_credentials, init_test, now, ACCESS_TOKEN, CLIENT_ID, REFRESH
async fn pass_refresh_access_token() { async fn pass_refresh_access_token() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap(); let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);

View File

@@ -188,9 +188,8 @@ async fn pass_register_client_private_key_jwt() {
Mock::given(method("POST")) Mock::given(method("POST"))
.and(path("/register")) .and(path("/register"))
.and(|req: &Request| { .and(|req: &Request| {
let metadata = match req.body_json::<ClientMetadata>() { let Ok(metadata) = req.body_json::<ClientMetadata>() else {
Ok(body) => body, return false;
Err(_) => return false,
}; };
*metadata.token_endpoint_auth_method() == OAuthClientAuthenticationMethod::PrivateKeyJwt *metadata.token_endpoint_auth_method() == OAuthClientAuthenticationMethod::PrivateKeyJwt

View File

@@ -28,7 +28,7 @@ use crate::{client_credentials, init_test, ACCESS_TOKEN, CLIENT_ID};
async fn pass_revoke_token() { async fn pass_revoke_token() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let revocation_endpoint = issuer.join("revoke").unwrap(); let revocation_endpoint = issuer.join("revoke").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);

View File

@@ -41,7 +41,7 @@ use crate::{client_credentials, init_test, now, ACCESS_TOKEN, CLIENT_ID, CLIENT_
async fn pass_none() { async fn pass_none() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None); client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap(); let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);
@@ -90,7 +90,7 @@ async fn pass_none() {
async fn pass_client_secret_basic() { async fn pass_client_secret_basic() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials( let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::ClientSecretBasic, &OAuthClientAuthenticationMethod::ClientSecretBasic,
&issuer, &issuer,
None, None,
); );
@@ -135,7 +135,7 @@ async fn pass_client_secret_basic() {
async fn pass_client_secret_post() { async fn pass_client_secret_post() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials( let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::ClientSecretPost, &OAuthClientAuthenticationMethod::ClientSecretPost,
&issuer, &issuer,
None, None,
); );
@@ -195,7 +195,7 @@ async fn pass_client_secret_post() {
async fn pass_client_secret_jwt() { async fn pass_client_secret_jwt() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials( let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::ClientSecretJwt, &OAuthClientAuthenticationMethod::ClientSecretJwt,
&issuer, &issuer,
None, None,
); );
@@ -225,12 +225,9 @@ async fn pass_client_secret_jwt() {
return false; return false;
} }
let jwt = match query_pairs.get("client_assertion") { let Some(jwt) = query_pairs.get("client_assertion") else {
Some(jwt) => jwt, println!("Missing client assertion");
None => { return false;
println!("Missing client assertion");
return false;
}
}; };
let jwt = Jwt::<HashMap<String, Value>>::try_from(jwt.as_ref()).unwrap(); let jwt = Jwt::<HashMap<String, Value>>::try_from(jwt.as_ref()).unwrap();
@@ -279,7 +276,7 @@ async fn pass_client_secret_jwt() {
async fn pass_private_key_jwt_with_keystore() { async fn pass_private_key_jwt_with_keystore() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials( let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::PrivateKeyJwt, &OAuthClientAuthenticationMethod::PrivateKeyJwt,
&issuer, &issuer,
None, None,
); );
@@ -319,12 +316,9 @@ async fn pass_private_key_jwt_with_keystore() {
return false; return false;
} }
let jwt = match query_pairs.get("client_assertion") { let Some(jwt) = query_pairs.get("client_assertion") else {
Some(jwt) => jwt, println!("Missing client assertion");
None => { return false;
println!("Missing client assertion");
return false;
}
}; };
let jwt = Jwt::<HashMap<String, Value>>::try_from(jwt.as_ref()).unwrap(); let jwt = Jwt::<HashMap<String, Value>>::try_from(jwt.as_ref()).unwrap();
@@ -370,7 +364,7 @@ async fn pass_private_key_jwt_with_keystore() {
async fn pass_private_key_jwt_with_custom_signing() { async fn pass_private_key_jwt_with_custom_signing() {
let (http_service, mock_server, issuer) = init_test().await; let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials( let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::PrivateKeyJwt, &OAuthClientAuthenticationMethod::PrivateKeyJwt,
&issuer, &issuer,
Some(Box::new(|_claims, _alg| Ok("fake.signed.jwt".to_owned()))), Some(Box::new(|_claims, _alg| Ok("fake.signed.jwt".to_owned()))),
); );
@@ -439,7 +433,7 @@ async fn pass_private_key_jwt_with_custom_signing() {
async fn fail_private_key_jwt_with_custom_signing() { async fn fail_private_key_jwt_with_custom_signing() {
let (http_service, _, issuer) = init_test().await; let (http_service, _, issuer) = init_test().await;
let client_credentials = client_credentials( let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::PrivateKeyJwt, &OAuthClientAuthenticationMethod::PrivateKeyJwt,
&issuer, &issuer,
Some(Box::new(|_claims, _alg| Err("Something went wrong".into()))), Some(Box::new(|_claims, _alg| Err("Something went wrong".into()))),
); );

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
opa-wasm = { git = "https://github.com/matrix-org/rust-opa-wasm.git" } opa-wasm = { git = "https://github.com/matrix-org/rust-opa-wasm.git" }

View File

@@ -12,11 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
#![allow(clippy::missing_errors_doc)]
pub mod model; pub mod model;
use mas_data_model::{AuthorizationGrant, Client, User}; use mas_data_model::{AuthorizationGrant, Client, User};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
axum = { version = "0.6.20", default-features = false } axum = { version = "0.6.20", default-features = false }
serde.workspace = true serde.workspace = true

View File

@@ -12,14 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::pedantic,
clippy::str_to_string,
rustdoc::broken_intra_doc_links
)]
pub(crate) mod endpoints; pub(crate) mod endpoints;
pub(crate) mod traits; pub(crate) mod traits;
mod url_builder; mod url_builder;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
serde.workspace = true serde.workspace = true
thiserror.workspace = true thiserror.workspace = true

View File

@@ -12,14 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![deny(rustdoc::missing_crate_level_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::missing_crate_level_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
//! A crate to help serve single-page apps built by Vite. //! A crate to help serve single-page apps built by Vite.

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
async-trait = "0.1.74" async-trait = "0.1.74"
sqlx = { version = "0.7.3", features = ["runtime-tokio-rustls", "postgres", "migrate", "chrono", "json", "uuid", "ipnetwork"] } sqlx = { version = "0.7.3", features = ["runtime-tokio-rustls", "postgres", "migrate", "chrono", "json", "uuid", "ipnetwork"] }

View File

@@ -164,15 +164,7 @@
//! [`Ulid`]: ulid::Ulid //! [`Ulid`]: ulid::Ulid
//! [`Uuid`]: uuid::Uuid //! [`Uuid`]: uuid::Uuid
#![forbid(unsafe_code)] #![deny(clippy::future_not_send, missing_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
clippy::future_not_send,
rustdoc::broken_intra_doc_links,
missing_docs
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
use sqlx::migrate::Migrator; use sqlx::migrate::Migrator;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
async-trait = "0.1.74" async-trait = "0.1.74"
chrono.workspace = true chrono.workspace = true

View File

@@ -133,15 +133,7 @@
//! } //! }
//! ``` //! ```
#![forbid(unsafe_code)] #![deny(clippy::future_not_send, missing_docs)]
#![deny(
clippy::all,
clippy::str_to_string,
clippy::future_not_send,
rustdoc::broken_intra_doc_links,
missing_docs
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
pub mod clock; pub mod clock;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
apalis-core = { version = "0.4.7", features = ["extensions", "tokio-comp", "storage"] } apalis-core = { version = "0.4.7", features = ["extensions", "tokio-comp", "storage"] }

View File

@@ -12,10 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
use std::sync::Arc; use std::sync::Arc;
use apalis_core::{executor::TokioExecutor, layers::extensions::Extension, monitor::Monitor}; use apalis_core::{executor::TokioExecutor, layers::extensions::Extension, monitor::Monitor};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
arc-swap = "1.6.0" arc-swap = "1.6.0"
tracing.workspace = true tracing.workspace = true

View File

@@ -12,15 +12,8 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![forbid(unsafe_code)] #![deny(missing_docs)]
#![deny( #![allow(clippy::module_name_repetitions)]
clippy::all,
clippy::str_to_string,
missing_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions, clippy::missing_errors_doc)]
//! Templates rendering //! Templates rendering
@@ -37,7 +30,7 @@ use rand::Rng;
use serde::Serialize; use serde::Serialize;
use thiserror::Error; use thiserror::Error;
use tokio::task::JoinError; use tokio::task::JoinError;
use tracing::{debug, info, warn}; use tracing::{debug, info};
use walkdir::DirEntry; use walkdir::DirEntry;
mod context; mod context;
@@ -377,6 +370,10 @@ register_templates! {
impl Templates { impl Templates {
/// Render all templates with the generated samples to check if they render /// Render all templates with the generated samples to check if they render
/// properly /// properly
///
/// # Errors
///
/// Returns an error if any of the templates fails to render
pub fn check_render( pub fn check_render(
&self, &self,
now: chrono::DateTime<chrono::Utc>, now: chrono::DateTime<chrono::Utc>,

View File

@@ -54,6 +54,10 @@ macro_rules! register_templates {
impl Templates { impl Templates {
$( $(
$(#[$attr])? $(#[$attr])?
///
/// # Errors
///
/// Returns an error if the template fails to render.
pub fn $name pub fn $name
$(< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)? $(< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)?
(&self, context: &$param) (&self, context: &$param)
@@ -75,6 +79,10 @@ macro_rules! register_templates {
$( $(
#[doc = concat!("Render the `", $template, "` template with sample contexts")] #[doc = concat!("Render the `", $template, "` template with sample contexts")]
///
/// # Errors
///
/// Returns an error if the template fails to render with any of the sample.
pub fn $name pub fn $name
$(< $( $lt $( : $clt $(+ $dlt )* + TemplateContext )? ),+ >)? $(< $( $lt $( : $clt $(+ $dlt )* + TemplateContext )? ),+ >)?
(templates: &Templates, now: chrono::DateTime<chrono::Utc>, rng: &mut impl rand::Rng) (templates: &Templates, now: chrono::DateTime<chrono::Utc>, rng: &mut impl rand::Rng)

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true homepage.workspace = true
repository.workspace = true repository.workspace = true
[lints]
workspace = true
[dependencies] [dependencies]
http.workspace = true http.workspace = true
tracing.workspace = true tracing.workspace = true

View File

@@ -12,8 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#![deny(clippy::all)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)] #![allow(clippy::module_name_repetitions)]
mod metrics; mod metrics;