1
0
mirror of https://github.com/matrix-org/matrix-authentication-service.git synced 2025-08-06 06:02:40 +03:00

Enable clippy lints on a workspace level

This enables a lot more lints than before in some crates, so this fixed a lot of warnings as well.
This commit is contained in:
Quentin Gliech
2023-12-05 16:45:40 +01:00
parent df3ca5ae66
commit a0f5f3c642
88 changed files with 567 additions and 236 deletions

View File

@@ -12,6 +12,18 @@ package.edition = "2021"
package.homepage = "https://matrix-org.github.io/matrix-authentication-service/"
package.repository = "https://github.com/matrix-org/matrix-authentication-service/"
[workspace.lints.rust]
unsafe_code = "forbid"
[workspace.lints.clippy]
all = "deny"
pedantic = "warn"
str_to_string = "deny"
[workspace.lints.rustdoc]
broken_intra_doc_links = "deny"
[workspace.dependencies]
# High-level error handling

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
async-trait = "0.1.74"
axum = { version = "0.6.20", features = ["headers"] }

View File

@@ -84,6 +84,12 @@ impl Credentials {
}
}
/// Fetch the client from the database
///
/// # Errors
///
/// Returns an error if the client could not be found or if the underlying
/// repository errored.
pub async fn fetch<E>(
&self,
repo: &mut impl RepositoryAccess<Error = E>,
@@ -98,6 +104,11 @@ impl Credentials {
repo.oauth2_client().find_by_client_id(client_id).await
}
/// Verify credentials presented by the client for authentication
///
/// # Errors
///
/// Returns an error if the credentials are invalid.
#[tracing::instrument(skip_all, err)]
pub async fn verify(
&self,

View File

@@ -146,6 +146,13 @@ impl CookieJar {
self
}
/// Load and deserialize a cookie from the jar
///
/// Returns `None` if the cookie is not present
///
/// # Errors
///
/// Returns an error if the cookie cannot be deserialized
pub fn load<T: DeserializeOwned>(&self, key: &str) -> Result<Option<T>, CookieDecodeError> {
let Some(cookie) = self.inner.get(key) else {
return Ok(None);

View File

@@ -80,6 +80,10 @@ impl CsrfToken {
}
/// Verifies that the value got from an HTML form matches this token
///
/// # Errors
///
/// Returns an error if the value in the form does not match this token
pub fn verify_form_value(&self, form_value: &str) -> Result<(), CsrfError> {
let form_value = BASE64URL_NOPAD.decode(form_value.as_bytes())?;
if self.token[..] == form_value {
@@ -108,10 +112,20 @@ pub struct ProtectedForm<T> {
}
pub trait CsrfExt {
/// Get the current CSRF token out of the cookie jar, generating a new one
/// if necessary
fn csrf_token<C, R>(self, clock: &C, rng: R) -> (CsrfToken, Self)
where
R: RngCore,
C: Clock;
/// Verify that the given CSRF-protected form is valid, returning the inner
/// value
///
/// # Errors
///
/// Returns an error if the CSRF cookie is missing or if the value in the
/// form is invalid
fn verify_form<C, T>(&self, clock: &C, form: ProtectedForm<T>) -> Result<T, CsrfError>
where
C: Clock;

View File

@@ -29,6 +29,12 @@ pub struct HttpClientFactory {
}
impl HttpClientFactory {
/// Constructs a new HTTP client factory
///
/// # Errors
///
/// Returns an error if the client factory failed to initialise, which can
/// happen when it fails to load the system's CA certificates.
pub async fn new() -> Result<Self, ClientInitError> {
Ok(Self {
traced_connector: make_traced_connector().await?,
@@ -37,10 +43,6 @@ impl HttpClientFactory {
}
/// Constructs a new HTTP client
///
/// # Errors
///
/// Returns an error if the client failed to initialise
pub fn client<B>(&self, category: &'static str) -> ClientService<TracedClient<B>>
where
B: axum::body::HttpBody + Send,
@@ -54,10 +56,6 @@ impl HttpClientFactory {
}
/// Constructs a new [`HttpService`], suitable for `mas-oidc-client`
///
/// # Errors
///
/// Returns an error if the client failed to initialise
pub fn http_service(&self, category: &'static str) -> HttpService {
let client = self.client(category);
let client = (

View File

@@ -12,15 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
clippy::future_not_send
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions, clippy::missing_errors_doc)]
#![deny(clippy::future_not_send)]
#![allow(clippy::module_name_repetitions)]
pub mod client_authorization;
pub mod cookies;

View File

@@ -42,6 +42,11 @@ impl SessionInfo {
}
/// Load the [`BrowserSession`] from database
///
/// # Errors
///
/// Returns an error if the session is not found or if the session is not
/// active anymore
pub async fn load_session<E>(
&self,
repo: &mut impl RepositoryAccess<Error = E>,

View File

@@ -84,6 +84,13 @@ pub struct UserAuthorization<F = ()> {
impl<F: Send> UserAuthorization<F> {
// TODO: take scopes to validate as parameter
/// Verify a user authorization and return the session and the protected
/// form value
///
/// # Errors
///
/// Returns an error if the token is invalid, if the user session ended or
/// if the form is missing
pub async fn protected_form<E>(
self,
repo: &mut impl RepositoryAccess<Error = E>,
@@ -103,6 +110,11 @@ impl<F: Send> UserAuthorization<F> {
}
// TODO: take scopes to validate as parameter
/// Verify a user authorization and return the session
///
/// # Errors
///
/// Returns an error if the token is invalid or if the user session ended
pub async fn protected<E>(
self,
repo: &mut impl RepositoryAccess<Error = E>,

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
axum = "0.6.20"

View File

@@ -12,9 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)]
use std::{io::IsTerminal, sync::Arc};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
tokio = { version = "1.34.0", features = ["fs", "rt"] }
tracing.workspace = true
@@ -44,3 +47,4 @@ dist = []
[[bin]]
name = "schema"
doc = false

View File

@@ -12,15 +12,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
missing_docs,
rustdoc::missing_crate_level_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![deny(missing_docs, rustdoc::missing_crate_level_docs)]
#![allow(clippy::module_name_repetitions)]
// derive(JSONSchema) uses &str.to_string()
#![allow(clippy::str_to_string)]
//! Application configuration logic

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
chrono.workspace = true
thiserror.workspace = true

View File

@@ -40,7 +40,11 @@ impl Device {
#[must_use]
pub fn to_scope_token(&self) -> ScopeToken {
// SAFETY: the inner id should only have valid scope characters
format!("{DEVICE_SCOPE_PREFIX}{}", self.id).parse().unwrap()
let Ok(scope_token) = format!("{DEVICE_SCOPE_PREFIX}{}", self.id).parse() else {
unreachable!()
};
scope_token
}
/// Get the corresponding [`Device`] from a [`ScopeToken`]

View File

@@ -72,6 +72,11 @@ impl CompatRefreshTokenState {
matches!(self, Self::Consumed { .. })
}
/// Consume the refresh token, returning a new state.
///
/// # Errors
///
/// Returns an error if the refresh token is already consumed.
pub fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
match self {
Self::Valid => Ok(Self::Consumed { consumed_at }),
@@ -99,6 +104,11 @@ impl std::ops::Deref for CompatRefreshToken {
}
impl CompatRefreshToken {
/// Consume the refresh token and return the consumed token.
///
/// # Errors
///
/// Returns an error if the refresh token is already consumed.
pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.state = self.state.consume(consumed_at)?;
Ok(self)

View File

@@ -36,7 +36,7 @@ pub enum CompatSsoLoginState {
}
impl CompatSsoLoginState {
/// Returns `true` if the compat sso login state is [`Pending`].
/// Returns `true` if the compat SSO login state is [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
#[must_use]
@@ -44,7 +44,7 @@ impl CompatSsoLoginState {
matches!(self, Self::Pending)
}
/// Returns `true` if the compat sso login state is [`Fulfilled`].
/// Returns `true` if the compat SSO login state is [`Fulfilled`].
///
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
#[must_use]
@@ -52,7 +52,7 @@ impl CompatSsoLoginState {
matches!(self, Self::Fulfilled { .. })
}
/// Returns `true` if the compat sso login state is [`Exchanged`].
/// Returns `true` if the compat SSO login state is [`Exchanged`].
///
/// [`Exchanged`]: CompatSsoLoginState::Exchanged
#[must_use]
@@ -60,6 +60,11 @@ impl CompatSsoLoginState {
matches!(self, Self::Exchanged { .. })
}
/// Get the time at which the login was fulfilled.
///
/// Returns `None` if the compat SSO login state is [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
#[must_use]
pub fn fulfilled_at(&self) -> Option<DateTime<Utc>> {
match self {
@@ -70,6 +75,11 @@ impl CompatSsoLoginState {
}
}
/// Get the time at which the login was exchanged.
///
/// Returns `None` if the compat SSO login state is not [`Exchanged`].
///
/// [`Exchanged`]: CompatSsoLoginState::Exchanged
#[must_use]
pub fn exchanged_at(&self) -> Option<DateTime<Utc>> {
match self {
@@ -78,6 +88,11 @@ impl CompatSsoLoginState {
}
}
/// Get the session ID associated with the login.
///
/// Returns `None` if the compat SSO login state is [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
#[must_use]
pub fn session_id(&self) -> Option<Ulid> {
match self {
@@ -88,6 +103,14 @@ impl CompatSsoLoginState {
}
}
/// Transition the compat SSO login state from [`Pending`] to [`Fulfilled`].
///
/// # Errors
///
/// Returns an error if the compat SSO login state is not [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
pub fn fulfill(
self,
fulfilled_at: DateTime<Utc>,
@@ -102,6 +125,15 @@ impl CompatSsoLoginState {
}
}
/// Transition the compat SSO login state from [`Fulfilled`] to
/// [`Exchanged`].
///
/// # Errors
///
/// Returns an error if the compat SSO login state is not [`Fulfilled`].
///
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
/// [`Exchanged`]: CompatSsoLoginState::Exchanged
pub fn exchange(self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
match self {
Self::Fulfilled {
@@ -135,6 +167,15 @@ impl std::ops::Deref for CompatSsoLogin {
}
impl CompatSsoLogin {
/// Transition the compat SSO login from a [`Pending`] state to
/// [`Fulfilled`].
///
/// # Errors
///
/// Returns an error if the compat SSO login state is not [`Pending`].
///
/// [`Pending`]: CompatSsoLoginState::Pending
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
pub fn fulfill(
mut self,
fulfilled_at: DateTime<Utc>,
@@ -144,6 +185,15 @@ impl CompatSsoLogin {
Ok(self)
}
/// Transition the compat SSO login from a [`Fulfilled`] state to
/// [`Exchanged`].
///
/// # Errors
///
/// Returns an error if the compat SSO login state is not [`Fulfilled`].
///
/// [`Fulfilled`]: CompatSsoLoginState::Fulfilled
/// [`Exchanged`]: CompatSsoLoginState::Exchanged
pub fn exchange(mut self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.state = self.state.exchange(exchanged_at)?;
Ok(self)

View File

@@ -12,14 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
#![allow(
clippy::module_name_repetitions,
clippy::missing_panics_doc,
clippy::missing_errors_doc
)]
#![allow(clippy::module_name_repetitions)]
use thiserror::Error;
@@ -29,6 +22,7 @@ pub(crate) mod tokens;
pub(crate) mod upstream_oauth2;
pub(crate) mod users;
/// Error when an invalid state transition is attempted.
#[derive(Debug, Error)]
#[error("invalid state transition")]
pub struct InvalidTransitionError;

View File

@@ -39,6 +39,7 @@ pub struct Pkce {
}
impl Pkce {
/// Create a new PKCE challenge, with the given method and challenge.
#[must_use]
pub fn new(challenge_method: PkceCodeChallengeMethod, challenge: String) -> Self {
Pkce {
@@ -47,6 +48,11 @@ impl Pkce {
}
}
/// Verify the PKCE challenge.
///
/// # Errors
///
/// Returns an error if the verifier is invalid.
pub fn verify(&self, verifier: &str) -> Result<(), CodeChallengeError> {
self.challenge_method.verify(&self.challenge, verifier)
}
@@ -176,11 +182,25 @@ impl AuthorizationGrant {
self.created_at - Duration::seconds(max_age.unwrap_or(3600 * 24 * 365))
}
/// Mark the authorization grant as exchanged.
///
/// # Errors
///
/// Returns an error if the authorization grant is not [`Fulfilled`].
///
/// [`Fulfilled`]: AuthorizationGrantStage::Fulfilled
pub fn exchange(mut self, exchanged_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.stage = self.stage.exchange(exchanged_at)?;
Ok(self)
}
/// Mark the authorization grant as fulfilled.
///
/// # Errors
///
/// Returns an error if the authorization grant is not [`Pending`].
///
/// [`Pending`]: AuthorizationGrantStage::Pending
pub fn fulfill(
mut self,
fulfilled_at: DateTime<Utc>,
@@ -190,12 +210,23 @@ impl AuthorizationGrant {
Ok(self)
}
// TODO: this is not used?
/// Mark the authorization grant as cancelled.
///
/// # Errors
///
/// Returns an error if the authorization grant is not [`Pending`].
///
/// [`Pending`]: AuthorizationGrantStage::Pending
///
/// # TODO
///
/// This appears to be unused
pub fn cancel(mut self, canceld_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.stage = self.stage.cancel(canceld_at)?;
Ok(self)
}
#[doc(hidden)]
pub fn sample(now: DateTime<Utc>, rng: &mut impl RngCore) -> Self {
Self {
id: Ulid::from_datetime_with_source(now.into(), rng),

View File

@@ -112,6 +112,15 @@ pub enum InvalidRedirectUriError {
}
impl Client {
/// Determine which redirect URI to use for the given request.
///
/// # Errors
///
/// Returns an error if:
///
/// - no URL was given but multiple redirect URIs are registered,
/// - no URL was registered, or
/// - the given URL is not registered
pub fn resolve_redirect_uri<'a>(
&'a self,
redirect_uri: &'a Option<Url>,
@@ -125,6 +134,7 @@ impl Client {
}
}
#[doc(hidden)]
pub fn samples(now: DateTime<Utc>, rng: &mut impl RngCore) -> Vec<Client> {
vec![
// A client with all the URIs set

View File

@@ -121,6 +121,11 @@ pub enum RefreshTokenState {
}
impl RefreshTokenState {
/// Consume the refresh token, returning a new state.
///
/// # Errors
///
/// Returns an error if the refresh token is already consumed.
fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
match self {
Self::Valid => Ok(Self::Consumed { consumed_at }),
@@ -169,6 +174,11 @@ impl RefreshToken {
self.id.to_string()
}
/// Consumes the refresh token and returns the consumed token.
///
/// # Errors
///
/// Returns an error if the refresh token is already consumed.
pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.state = self.state.consume(consumed_at)?;
Ok(self)
@@ -266,6 +276,10 @@ impl TokenType {
/// Ok(TokenType::CompatAccessToken)
/// );
/// ```
///
/// # Errors
///
/// Returns an error if the token is not valid
pub fn check(token: &str) -> Result<TokenType, TokenFormatError> {
// these are legacy tokens imported from Synapse
// we don't do any validation on them and continue as is

View File

@@ -37,6 +37,14 @@ pub enum UpstreamOAuthAuthorizationSessionState {
}
impl UpstreamOAuthAuthorizationSessionState {
/// Mark the upstream OAuth 2.0 authorization session as completed.
///
/// # Errors
///
/// Returns an error if the upstream OAuth 2.0 authorization session state
/// is not [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
pub fn complete(
self,
completed_at: DateTime<Utc>,
@@ -53,6 +61,14 @@ impl UpstreamOAuthAuthorizationSessionState {
}
}
/// Mark the upstream OAuth 2.0 authorization session as consumed.
///
/// # Errors
///
/// Returns an error if the upstream OAuth 2.0 authorization session state
/// is not [`Completed`].
///
/// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed
pub fn consume(self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
match self {
Self::Completed {
@@ -69,6 +85,12 @@ impl UpstreamOAuthAuthorizationSessionState {
}
}
/// Get the link ID for the upstream OAuth 2.0 authorization session.
///
/// Returns `None` if the upstream OAuth 2.0 authorization session state is
/// [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
#[must_use]
pub fn link_id(&self) -> Option<Ulid> {
match self {
@@ -77,6 +99,13 @@ impl UpstreamOAuthAuthorizationSessionState {
}
}
/// Get the time at which the upstream OAuth 2.0 authorization session was
/// completed.
///
/// Returns `None` if the upstream OAuth 2.0 authorization session state is
/// [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
#[must_use]
pub fn completed_at(&self) -> Option<DateTime<Utc>> {
match self {
@@ -87,6 +116,12 @@ impl UpstreamOAuthAuthorizationSessionState {
}
}
/// Get the ID token for the upstream OAuth 2.0 authorization session.
///
/// Returns `None` if the upstream OAuth 2.0 authorization session state is
/// [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
#[must_use]
pub fn id_token(&self) -> Option<&str> {
match self {
@@ -97,6 +132,13 @@ impl UpstreamOAuthAuthorizationSessionState {
}
}
/// Get the time at which the upstream OAuth 2.0 authorization session was
/// consumed.
///
/// Returns `None` if the upstream OAuth 2.0 authorization session state is
/// not [`Consumed`].
///
/// [`Consumed`]: UpstreamOAuthAuthorizationSessionState::Consumed
#[must_use]
pub fn consumed_at(&self) -> Option<DateTime<Utc>> {
match self {
@@ -105,7 +147,7 @@ impl UpstreamOAuthAuthorizationSessionState {
}
}
/// Returns `true` if the upstream oauth authorization session state is
/// Returns `true` if the upstream OAuth 2.0 authorization session state is
/// [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
@@ -114,7 +156,7 @@ impl UpstreamOAuthAuthorizationSessionState {
matches!(self, Self::Pending)
}
/// Returns `true` if the upstream oauth authorization session state is
/// Returns `true` if the upstream OAuth 2.0 authorization session state is
/// [`Completed`].
///
/// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed
@@ -123,7 +165,7 @@ impl UpstreamOAuthAuthorizationSessionState {
matches!(self, Self::Completed { .. })
}
/// Returns `true` if the upstream oauth authorization session state is
/// Returns `true` if the upstream OAuth 2.0 authorization session state is
/// [`Consumed`].
///
/// [`Consumed`]: UpstreamOAuthAuthorizationSessionState::Consumed
@@ -153,6 +195,15 @@ impl std::ops::Deref for UpstreamOAuthAuthorizationSession {
}
impl UpstreamOAuthAuthorizationSession {
/// Mark the upstream OAuth 2.0 authorization session as completed. Returns
/// the updated session.
///
/// # Errors
///
/// Returns an error if the upstream OAuth 2.0 authorization session state
/// is not [`Pending`].
///
/// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending
pub fn complete(
mut self,
completed_at: DateTime<Utc>,
@@ -163,6 +214,15 @@ impl UpstreamOAuthAuthorizationSession {
Ok(self)
}
/// Mark the upstream OAuth 2.0 authorization session as consumed. Returns
/// the updated session.
///
/// # Errors
///
/// Returns an error if the upstream OAuth 2.0 authorization session state
/// is not [`Completed`].
///
/// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed
pub fn consume(mut self, consumed_at: DateTime<Utc>) -> Result<Self, InvalidTransitionError> {
self.state = self.state.consume(consumed_at)?;
Ok(self)

View File

@@ -39,6 +39,7 @@ impl User {
}
impl User {
#[doc(hidden)]
#[must_use]
pub fn samples(now: chrono::DateTime<Utc>, rng: &mut impl Rng) -> Vec<Self> {
vec![User {
@@ -175,6 +176,7 @@ impl Deref for UserEmailVerification {
}
impl UserEmailVerification {
#[doc(hidden)]
#[must_use]
pub fn samples(now: chrono::DateTime<Utc>, rng: &mut impl Rng) -> Vec<Self> {
let states = [

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
async-trait = "0.1.74"
tracing.workspace = true

View File

@@ -14,14 +14,7 @@
//! Helps sending emails to users, with different email backends
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
missing_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![deny(missing_docs)]
mod mailer;
mod transport;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
async-graphql = { version = "6.0.11", features = ["chrono", "url"] }
@@ -29,3 +32,4 @@ mas-storage = { path = "../storage" }
[[bin]]
name = "schema"
doc = false

View File

@@ -12,19 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
clippy::future_not_send
)]
#![warn(clippy::pedantic)]
#![allow(
clippy::module_name_repetitions,
clippy::missing_errors_doc,
clippy::unused_async
)]
#![deny(clippy::future_not_send)]
#![allow(clippy::module_name_repetitions, clippy::unused_async)]
use async_graphql::EmptySubscription;
use mas_data_model::{BrowserSession, Session, User};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
# Async runtime
tokio = { version = "1.34.0", features = ["macros"] }

View File

@@ -12,14 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
clippy::future_not_send
)]
#![warn(clippy::pedantic)]
#![deny(clippy::future_not_send)]
#![allow(
// Some axum handlers need that
clippy::unused_async,

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
axum = { version = "0.6.20", optional = true }
bytes = "1.5.0"

View File

@@ -14,14 +14,7 @@
//! [`tower`] layers and services to help building HTTP client and servers
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::missing_crate_level_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![deny(rustdoc::missing_crate_level_docs)]
#![allow(clippy::module_name_repetitions)]
#[cfg(feature = "client")]

View File

@@ -136,7 +136,7 @@ async fn test_urlencoded_request_body() {
}
let bytes = hyper::body::to_bytes(request.into_body()).await?;
assert_eq!(bytes.to_vec(), br#"hello=world"#.to_vec());
assert_eq!(bytes.to_vec(), br"hello=world".to_vec());
let res = Response::new(hyper::Body::empty());
Ok(res)

View File

@@ -7,6 +7,9 @@ edition.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
camino.workspace = true
clap.workspace = true
@@ -16,4 +19,4 @@ tracing-subscriber.workspace = true
tracing.workspace = true
walkdir = "2.4.0"
mas-i18n = { path = "../i18n" }
mas-i18n = { path = "../i18n" }

View File

@@ -12,9 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![deny(clippy::all)]
#![warn(clippy::pedantic)]
use std::fs::File;
use camino::Utf8PathBuf;

View File

@@ -7,6 +7,9 @@ edition.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
camino.workspace = true
icu_list = { version = "1.4.0", features = ["compiled_data", "std"] }

View File

@@ -12,9 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![warn(clippy::pedantic)]
#![deny(clippy::all)]
pub mod sprintf;
pub mod translations;
mod translator;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
async-trait = "0.1.74"

View File

@@ -12,10 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
use std::{collections::HashMap, fmt::Display, sync::Arc};
use camino::{Utf8Path, Utf8PathBuf};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
serde = { workspace = true, optional = true }
schemars = { version = "0.8.16", default-features = false, optional = true }

View File

@@ -14,14 +14,7 @@
//! Values from IANA registries, generated by the `mas-iana-codegen` crate
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
missing_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![deny(missing_docs)]
#![allow(clippy::module_name_repetitions)]
pub mod jose;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
base64ct = { version = "1.6.0", features = ["std"] }
chrono.workspace = true

View File

@@ -56,6 +56,10 @@ impl<C: Encoding> Base64<C> {
}
/// Parse some base64-encoded data to create a `Base64` instance.
///
/// # Errors
///
/// Returns an error if the input is not valid base64.
pub fn parse(encoded: &str) -> Result<Self, base64ct::Error> {
C::decode_vec(encoded).map(Self::new)
}

View File

@@ -37,7 +37,14 @@ pub enum ClaimError {
}
pub trait Validator<T> {
/// The associated error type returned by this validator.
type Error;
/// Validate a claim value
///
/// # Errors
///
/// Returns an error if the value is invalid.
fn validate(&self, value: &T) -> Result<(), Self::Error>;
}
@@ -68,6 +75,11 @@ where
}
}
/// Insert a claim into the given claims map.
///
/// # Errors
///
/// Returns an error if the value failed to serialize.
pub fn insert<I>(
&self,
claims: &mut HashMap<String, serde_json::Value>,
@@ -85,6 +97,12 @@ where
Ok(())
}
/// Extract a claim from the given claims map.
///
/// # Errors
///
/// Returns an error if the value failed to deserialize, if its value is
/// invalid or if the claim is missing.
pub fn extract_required(
&self,
claims: &mut HashMap<String, serde_json::Value>,
@@ -98,6 +116,12 @@ where
self.extract_required_with_options(claims, validator)
}
/// Extract a claim from the given claims map, with the given options.
///
/// # Errors
///
/// Returns an error if the value failed to deserialize, if its value is
/// invalid or if the claim is missing.
pub fn extract_required_with_options<I>(
&self,
claims: &mut HashMap<String, serde_json::Value>,
@@ -124,6 +148,12 @@ where
Ok(res)
}
/// Extract a claim from the given claims map, if it exists.
///
/// # Errors
///
/// Returns an error if the value failed to deserialize or if its value is
/// invalid.
pub fn extract_optional(
&self,
claims: &mut HashMap<String, serde_json::Value>,
@@ -137,6 +167,13 @@ where
self.extract_optional_with_options(claims, validator)
}
/// Extract a claim from the given claims map, if it exists, with the given
/// options.
///
/// # Errors
///
/// Returns an error if the value failed to deserialize or if its value is
/// invalid.
pub fn extract_optional_with_options<I>(
&self,
claims: &mut HashMap<String, serde_json::Value>,
@@ -238,7 +275,7 @@ impl From<&TimeOptions> for TimeNotBefore {
///
/// According to the [OpenID Connect Core 1.0 specification].
///
/// # Errors
/// # Errors
///
/// Returns an error if the algorithm is not supported.
///

View File

@@ -57,21 +57,29 @@ pub enum AsymmetricSigningKey {
}
impl AsymmetricSigningKey {
/// Create a new signing key with the RS256 algorithm from the given RSA
/// private key.
#[must_use]
pub fn rs256(key: rsa::RsaPrivateKey) -> Self {
Self::Rs256(rsa::pkcs1v15::SigningKey::new(key))
}
/// Create a new signing key with the RS384 algorithm from the given RSA
/// private key.
#[must_use]
pub fn rs384(key: rsa::RsaPrivateKey) -> Self {
Self::Rs384(rsa::pkcs1v15::SigningKey::new(key))
}
/// Create a new signing key with the RS512 algorithm from the given RSA
/// private key.
#[must_use]
pub fn rs512(key: rsa::RsaPrivateKey) -> Self {
Self::Rs512(rsa::pkcs1v15::SigningKey::new(key))
}
/// Create a new signing key with the PS256 algorithm from the given RSA
/// private key.
#[must_use]
pub fn ps256(key: rsa::RsaPrivateKey) -> Self {
Self::Ps256(rsa::pss::SigningKey::new_with_salt_len(
@@ -80,6 +88,8 @@ impl AsymmetricSigningKey {
))
}
/// Create a new signing key with the PS384 algorithm from the given RSA
/// private key.
#[must_use]
pub fn ps384(key: rsa::RsaPrivateKey) -> Self {
Self::Ps384(rsa::pss::SigningKey::new_with_salt_len(
@@ -88,6 +98,8 @@ impl AsymmetricSigningKey {
))
}
/// Create a new signing key with the PS512 algorithm from the given RSA
/// private key.
#[must_use]
pub fn ps512(key: rsa::RsaPrivateKey) -> Self {
Self::Ps512(rsa::pss::SigningKey::new_with_salt_len(
@@ -96,21 +108,34 @@ impl AsymmetricSigningKey {
))
}
/// Create a new signing key with the ES256 algorithm from the given ECDSA
/// private key.
#[must_use]
pub fn es256(key: elliptic_curve::SecretKey<p256::NistP256>) -> Self {
Self::Es256(ecdsa::SigningKey::from(key))
}
/// Create a new signing key with the ES384 algorithm from the given ECDSA
/// private key.
#[must_use]
pub fn es384(key: elliptic_curve::SecretKey<p384::NistP384>) -> Self {
Self::Es384(ecdsa::SigningKey::from(key))
}
/// Create a new signing key with the ES256K algorithm from the given ECDSA
/// private key.
#[must_use]
pub fn es256k(key: elliptic_curve::SecretKey<k256::Secp256k1>) -> Self {
Self::Es256K(ecdsa::SigningKey::from(key))
}
/// Create a new signing key for the given algorithm from the given private
/// JWK parameters.
///
/// # Errors
///
/// Returns an error if the key parameters are not suitable for the given
/// algorithm.
pub fn from_jwk_and_alg(
params: &JsonWebKeyPrivateParameters,
alg: &JsonWebSignatureAlg,
@@ -275,51 +300,76 @@ pub enum AsymmetricVerifyingKey {
}
impl AsymmetricVerifyingKey {
/// Create a new verifying key with the RS256 algorithm from the given RSA
/// public key.
#[must_use]
pub fn rs256(key: rsa::RsaPublicKey) -> Self {
Self::Rs256(rsa::pkcs1v15::VerifyingKey::new(key))
}
/// Create a new verifying key with the RS384 algorithm from the given RSA
/// public key.
#[must_use]
pub fn rs384(key: rsa::RsaPublicKey) -> Self {
Self::Rs384(rsa::pkcs1v15::VerifyingKey::new(key))
}
/// Create a new verifying key with the RS512 algorithm from the given RSA
/// public key.
#[must_use]
pub fn rs512(key: rsa::RsaPublicKey) -> Self {
Self::Rs512(rsa::pkcs1v15::VerifyingKey::new(key))
}
/// Create a new verifying key with the PS256 algorithm from the given RSA
/// public key.
#[must_use]
pub fn ps256(key: rsa::RsaPublicKey) -> Self {
Self::Ps256(rsa::pss::VerifyingKey::new(key))
}
/// Create a new verifying key with the PS384 algorithm from the given RSA
/// public key.
#[must_use]
pub fn ps384(key: rsa::RsaPublicKey) -> Self {
Self::Ps384(rsa::pss::VerifyingKey::new(key))
}
/// Create a new verifying key with the PS512 algorithm from the given RSA
/// public key.
#[must_use]
pub fn ps512(key: rsa::RsaPublicKey) -> Self {
Self::Ps512(rsa::pss::VerifyingKey::new(key))
}
/// Create a new verifying key with the ES256 algorithm from the given ECDSA
/// public key.
#[must_use]
pub fn es256(key: elliptic_curve::PublicKey<p256::NistP256>) -> Self {
Self::Es256(ecdsa::VerifyingKey::from(key))
}
/// Create a new verifying key with the ES384 algorithm from the given ECDSA
/// public key.
#[must_use]
pub fn es384(key: elliptic_curve::PublicKey<p384::NistP384>) -> Self {
Self::Es384(ecdsa::VerifyingKey::from(key))
}
/// Create a new verifying key with the ES256K algorithm from the given
/// ECDSA public key.
#[must_use]
pub fn es256k(key: elliptic_curve::PublicKey<k256::Secp256k1>) -> Self {
Self::Es256K(ecdsa::VerifyingKey::from(key))
}
/// Create a new verifying key for the given algorithm from the given public
/// JWK parameters.
///
/// # Errors
///
/// Returns an error if the key parameters are not suitable for the given
/// algorithm.
pub fn from_jwk_and_alg(
params: &JsonWebKeyPublicParameters,
alg: &JsonWebSignatureAlg,

View File

@@ -33,6 +33,11 @@ pub struct InvalidAlgorithm {
}
impl SymmetricKey {
/// Create a new symmetric key for the given algorithm with the given key.
///
/// # Errors
///
/// Returns an error if the algorithm is not supported.
pub fn new_for_alg(key: Vec<u8>, alg: &JsonWebSignatureAlg) -> Result<Self, InvalidAlgorithm> {
match alg {
JsonWebSignatureAlg::Hs256 => Ok(Self::hs256(key)),
@@ -45,16 +50,19 @@ impl SymmetricKey {
}
}
/// Create a new symmetric key using the HS256 algorithm with the given key.
#[must_use]
pub const fn hs256(key: Vec<u8>) -> Self {
Self::Hs256(super::Hs256Key::new(key))
}
/// Create a new symmetric key using the HS384 algorithm with the given key.
#[must_use]
pub const fn hs384(key: Vec<u8>) -> Self {
Self::Hs384(super::Hs384Key::new(key))
}
/// Create a new symmetric key using the HS512 algorithm with the given key.
#[must_use]
pub const fn hs512(key: Vec<u8>) -> Self {
Self::Hs512(super::Hs512Key::new(key))

View File

@@ -106,6 +106,7 @@ impl TryFrom<PrivateJsonWebKey> for PublicJsonWebKey {
}
impl<P> JsonWebKey<P> {
/// Create a new [`JsonWebKey`] with the given parameters.
#[must_use]
pub const fn new(parameters: P) -> Self {
Self {
@@ -121,6 +122,12 @@ impl<P> JsonWebKey<P> {
}
}
/// Map the parameters of this [`JsonWebKey`] to a new type, with a fallible
/// mapper, consuming the original key.
///
/// # Errors
///
/// Returns an error if the mapper returns an error.
pub fn try_map<M, O, E>(self, mapper: M) -> Result<JsonWebKey<O>, E>
where
M: FnOnce(P) -> Result<O, E>,
@@ -138,6 +145,8 @@ impl<P> JsonWebKey<P> {
})
}
/// Map the parameters of this [`JsonWebKey`] to a new type, consuming the
/// original key.
pub fn map<M, O>(self, mapper: M) -> JsonWebKey<O>
where
M: FnOnce(P) -> O,
@@ -155,6 +164,12 @@ impl<P> JsonWebKey<P> {
}
}
/// Map the parameters of this [`JsonWebKey`] to a new type, with a fallible
/// mapper, cloning the other fields.
///
/// # Errors
///
/// Returns an error if the mapper returns an error.
pub fn try_cloned_map<M, O, E>(&self, mapper: M) -> Result<JsonWebKey<O>, E>
where
M: FnOnce(&P) -> Result<O, E>,
@@ -172,6 +187,8 @@ impl<P> JsonWebKey<P> {
})
}
/// Map the parameters of this [`JsonWebKey`] to a new type, cloning the
/// other fields.
pub fn cloned_map<M, O>(&self, mapper: M) -> JsonWebKey<O>
where
M: FnOnce(&P) -> O,
@@ -189,35 +206,41 @@ impl<P> JsonWebKey<P> {
}
}
/// Set the `use` field of this [`JsonWebKey`].
#[must_use]
pub fn with_use(mut self, value: JsonWebKeyUse) -> Self {
self.r#use = Some(value);
self
}
/// Set the `key_ops` field of this [`JsonWebKey`].
#[must_use]
pub fn with_key_ops(mut self, key_ops: Vec<JsonWebKeyOperation>) -> Self {
self.key_ops = Some(key_ops);
self
}
/// Set the `alg` field of this [`JsonWebKey`].
#[must_use]
pub fn with_alg(mut self, alg: JsonWebSignatureAlg) -> Self {
self.alg = Some(alg);
self
}
/// Set the `kid` field of this [`JsonWebKey`].
#[must_use]
pub fn with_kid(mut self, kid: impl Into<String>) -> Self {
self.kid = Some(kid.into());
self
}
/// Get the `kid` field of this [`JsonWebKey`], if set.
#[must_use]
pub const fn alg(&self) -> Option<&JsonWebSignatureAlg> {
self.alg.as_ref()
}
/// Get the inner parameters of this [`JsonWebKey`].
#[must_use]
pub const fn params(&self) -> &P {
&self.parameters

View File

@@ -192,10 +192,12 @@ pub struct NoKeyWorked {
}
impl<'a, T> Jwt<'a, T> {
/// Get the JWT header
pub fn header(&self) -> &JsonWebSignatureHeader {
&self.header
}
/// Get the JWT payload
pub fn payload(&self) -> &T {
&self.payload
}
@@ -209,6 +211,11 @@ impl<'a, T> Jwt<'a, T> {
}
}
/// Verify the signature of this JWT using the given key.
///
/// # Errors
///
/// Returns an error if the signature is invalid.
pub fn verify<K, S>(&self, key: &K) -> Result<(), JwtVerificationError>
where
K: Verifier<S>,
@@ -221,6 +228,12 @@ impl<'a, T> Jwt<'a, T> {
.map_err(JwtVerificationError::verify)
}
/// Verify the signature of this JWT using the given symmetric key.
///
/// # Errors
///
/// Returns an error if the signature is invalid or if the algorithm is not
/// supported.
pub fn verify_with_shared_secret(&self, secret: Vec<u8>) -> Result<(), NoKeyWorked> {
let verifier = crate::jwa::SymmetricKey::new_for_alg(secret, self.header().alg())
.map_err(|_| NoKeyWorked::default())?;
@@ -230,6 +243,12 @@ impl<'a, T> Jwt<'a, T> {
Ok(())
}
/// Verify the signature of this JWT using the given JWKS.
///
/// # Errors
///
/// Returns an error if the signature is invalid, if no key matches the
/// constraints, or if the algorithm is not supported.
pub fn verify_with_jwks(&self, jwks: &PublicJsonWebKeySet) -> Result<(), NoKeyWorked> {
let constraints = ConstraintSet::from(self.header());
let candidates = constraints.filter(&**jwks);
@@ -250,14 +269,17 @@ impl<'a, T> Jwt<'a, T> {
Err(NoKeyWorked::default())
}
/// Get the raw JWT string as a borrowed [`str`]
pub fn as_str(&'a self) -> &'a str {
&self.raw
}
/// Get the raw JWT string as an owned [`String`]
pub fn into_string(self) -> String {
self.raw.into()
}
/// Split the JWT into its parts (header and payload).
pub fn into_parts(self) -> (JsonWebSignatureHeader, T) {
(self.header, self.payload)
}
@@ -295,6 +317,12 @@ impl JwtSignatureError {
}
impl<T> Jwt<'static, T> {
/// Sign the given payload with the given key.
///
/// # Errors
///
/// Returns an error if the payload could not be serialized or if the key
/// could not sign the payload.
pub fn sign<K, S>(
header: JsonWebSignatureHeader,
payload: T,
@@ -309,6 +337,12 @@ impl<T> Jwt<'static, T> {
Self::sign_with_rng(&mut thread_rng(), header, payload, key)
}
/// Sign the given payload with the given key using the given RNG.
///
/// # Errors
///
/// Returns an error if the payload could not be serialized or if the key
/// could not sign the payload.
pub fn sign_with_rng<R, K, S>(
rng: &mut R,
header: JsonWebSignatureHeader,

View File

@@ -12,10 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
#![allow(clippy::missing_errors_doc, clippy::module_name_repetitions)]
#![deny(rustdoc::broken_intra_doc_links)]
#![allow(clippy::module_name_repetitions)]
mod base64;
pub mod claims;

View File

@@ -119,7 +119,7 @@ macro_rules! asymetric_jwt_test {
let mut rng = ChaCha8Rng::seed_from_u64(42);
let alg = JsonWebSignatureAlg::$alg;
let payload = Payload {
hello: "world".to_string(),
hello: "world".to_owned(),
};
let header = JsonWebSignatureHeader::new(alg.clone());
@@ -137,7 +137,7 @@ macro_rules! asymetric_jwt_test {
fn sign_and_verify_jwt() {
let alg = JsonWebSignatureAlg::$alg;
let payload = Payload {
hello: "world".to_string(),
hello: "world".to_owned(),
};
let header = JsonWebSignatureHeader::new(alg.clone());
@@ -192,7 +192,7 @@ macro_rules! symetric_jwt_test {
fn sign_and_verify_jwt() {
let alg = JsonWebSignatureAlg::$alg;
let payload = Payload {
hello: "world".to_string(),
hello: "world".to_owned(),
};
let header = JsonWebSignatureHeader::new(alg.clone());

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
aead = { version = "0.5.2", features = ["std"] }
const-oid = { version = "0.9.5", features = ["std"] }

View File

@@ -14,10 +14,6 @@
//! A crate to store keys which can then be used to sign and verify JWTs.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
use std::{ops::Deref, sync::Arc};
use der::{zeroize::Zeroizing, Decode, Encode, EncodePem};

View File

@@ -159,6 +159,7 @@ fn load_unencrypted_as_encrypted_error() {
.is_unencrypted());
}
#[allow(clippy::similar_names)]
#[test]
fn generate_sign_and_verify() {
// Use a seeded RNG to keep the snapshot stable

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
bytes = "1.5.0"
event-listener = "4.0.0"

View File

@@ -12,14 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::missing_crate_level_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![deny(rustdoc::missing_crate_level_docs)]
#![allow(clippy::module_name_repetitions)]
//! An utility crate to build flexible [`hyper`] listeners, with optional TLS

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
async-trait = "0.1.74"

View File

@@ -12,10 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
use http::{header::AUTHORIZATION, request::Builder, Method, Request, StatusCode};
use mas_axum_utils::http_client_factory::HttpClientFactory;
use mas_http::{EmptyBody, HttpServiceExt};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
serde.workspace = true

View File

@@ -12,10 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
mod mock;
pub use self::mock::HomeserverConnection as MockHomeserverConnection;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
http.workspace = true
serde.workspace = true

View File

@@ -20,14 +20,7 @@
//! [OpenID Connect]: https://openid.net/connect/
//! [Matrix Authentication Service]: https://github.com/matrix-org/matrix-authentication-service
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
missing_docs
)]
#![warn(clippy::pedantic)]
#![deny(missing_docs)]
#![allow(clippy::module_name_repetitions)]
pub mod errors;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[features]
default = ["hyper", "keystore"]
hyper = [

View File

@@ -53,14 +53,7 @@
//! [MSC3861]: https://github.com/matrix-org/matrix-spec-proposals/pull/3861
//! [OAuth 2.0]: https://oauth.net/2/
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::broken_intra_doc_links,
missing_docs
)]
#![warn(clippy::pedantic)]
#![deny(missing_docs)]
#![allow(clippy::module_name_repetitions, clippy::implicit_hasher)]
pub mod error;

View File

@@ -94,7 +94,7 @@ fn id_token(issuer: &str) -> (IdToken, PublicJsonWebKeySet) {
let mut claims = HashMap::new();
let now = now();
claims::ISS.insert(&mut claims, issuer.to_string()).unwrap();
claims::ISS.insert(&mut claims, issuer.to_owned()).unwrap();
claims::SUB
.insert(&mut claims, SUBJECT_IDENTIFIER.to_owned())
.unwrap();
@@ -128,7 +128,7 @@ fn id_token(issuer: &str) -> (IdToken, PublicJsonWebKeySet) {
/// Generate client credentials for the given authentication method.
fn client_credentials(
auth_method: OAuthClientAuthenticationMethod,
auth_method: &OAuthClientAuthenticationMethod,
issuer: &Url,
custom_signing: Option<Box<JwtSigningFn>>,
) -> ClientCredentials {

View File

@@ -149,7 +149,7 @@ fn pass_full_authorization_url() {
async fn pass_pushed_authorization_request() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let authorization_endpoint = issuer.join("authorize").unwrap();
let par_endpoint = issuer.join("par").unwrap();
let redirect_uri = Url::parse(REDIRECT_URI).unwrap();
@@ -225,7 +225,7 @@ async fn pass_pushed_authorization_request() {
async fn fail_pushed_authorization_request_404() {
let (http_service, _, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let authorization_endpoint = issuer.join("authorize").unwrap();
let par_endpoint = issuer.join("par").unwrap();
let redirect_uri = Url::parse(REDIRECT_URI).unwrap();
@@ -251,7 +251,7 @@ async fn fail_pushed_authorization_request_404() {
assert_matches!(
error,
AuthorizationError::PushedAuthorization(PushedAuthorizationError::Http(_))
)
);
}
/// Check if the given request to the token endpoint is valid.
@@ -303,7 +303,7 @@ fn is_valid_token_endpoint_request(req: &Request) -> bool {
async fn pass_access_token_with_authorization_code() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);
@@ -362,7 +362,7 @@ async fn pass_access_token_with_authorization_code() {
async fn fail_access_token_with_authorization_code_wrong_nonce() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);
@@ -424,7 +424,7 @@ async fn fail_access_token_with_authorization_code_wrong_nonce() {
async fn fail_access_token_with_authorization_code_no_id_token() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);

View File

@@ -32,7 +32,7 @@ use crate::{client_credentials, init_test, now, ACCESS_TOKEN, CLIENT_ID, CLIENT_
async fn pass_access_token_with_client_credentials() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::ClientSecretPost,
&OAuthClientAuthenticationMethod::ClientSecretPost,
&issuer,
None,
);

View File

@@ -32,7 +32,7 @@ use crate::{client_credentials, init_test, now, ACCESS_TOKEN, CLIENT_ID, SUBJECT
async fn pass_introspect_token() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let introspection_endpoint = issuer.join("introspect").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);

View File

@@ -49,7 +49,7 @@ fn id_token(
let mut claims = HashMap::new();
let now = now();
claims::ISS.insert(&mut claims, issuer.to_string()).unwrap();
claims::ISS.insert(&mut claims, issuer.to_owned()).unwrap();
claims::AUD
.insert(&mut claims, CLIENT_ID.to_owned())
.unwrap();
@@ -246,5 +246,5 @@ async fn fail_verify_id_token_wrong_auth_time() {
)
.unwrap_err();
assert_matches!(error, IdTokenError::WrongAuthTime)
assert_matches!(error, IdTokenError::WrongAuthTime);
}

View File

@@ -30,7 +30,7 @@ use crate::{client_credentials, init_test, now, ACCESS_TOKEN, CLIENT_ID, REFRESH
async fn pass_refresh_access_token() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);

View File

@@ -188,9 +188,8 @@ async fn pass_register_client_private_key_jwt() {
Mock::given(method("POST"))
.and(path("/register"))
.and(|req: &Request| {
let metadata = match req.body_json::<ClientMetadata>() {
Ok(body) => body,
Err(_) => return false,
let Ok(metadata) = req.body_json::<ClientMetadata>() else {
return false;
};
*metadata.token_endpoint_auth_method() == OAuthClientAuthenticationMethod::PrivateKeyJwt

View File

@@ -28,7 +28,7 @@ use crate::{client_credentials, init_test, ACCESS_TOKEN, CLIENT_ID};
async fn pass_revoke_token() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let revocation_endpoint = issuer.join("revoke").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);

View File

@@ -41,7 +41,7 @@ use crate::{client_credentials, init_test, now, ACCESS_TOKEN, CLIENT_ID, CLIENT_
async fn pass_none() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials =
client_credentials(OAuthClientAuthenticationMethod::None, &issuer, None);
client_credentials(&OAuthClientAuthenticationMethod::None, &issuer, None);
let token_endpoint = issuer.join("token").unwrap();
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42);
@@ -90,7 +90,7 @@ async fn pass_none() {
async fn pass_client_secret_basic() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::ClientSecretBasic,
&OAuthClientAuthenticationMethod::ClientSecretBasic,
&issuer,
None,
);
@@ -135,7 +135,7 @@ async fn pass_client_secret_basic() {
async fn pass_client_secret_post() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::ClientSecretPost,
&OAuthClientAuthenticationMethod::ClientSecretPost,
&issuer,
None,
);
@@ -195,7 +195,7 @@ async fn pass_client_secret_post() {
async fn pass_client_secret_jwt() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::ClientSecretJwt,
&OAuthClientAuthenticationMethod::ClientSecretJwt,
&issuer,
None,
);
@@ -225,12 +225,9 @@ async fn pass_client_secret_jwt() {
return false;
}
let jwt = match query_pairs.get("client_assertion") {
Some(jwt) => jwt,
None => {
println!("Missing client assertion");
return false;
}
let Some(jwt) = query_pairs.get("client_assertion") else {
println!("Missing client assertion");
return false;
};
let jwt = Jwt::<HashMap<String, Value>>::try_from(jwt.as_ref()).unwrap();
@@ -279,7 +276,7 @@ async fn pass_client_secret_jwt() {
async fn pass_private_key_jwt_with_keystore() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::PrivateKeyJwt,
&OAuthClientAuthenticationMethod::PrivateKeyJwt,
&issuer,
None,
);
@@ -319,12 +316,9 @@ async fn pass_private_key_jwt_with_keystore() {
return false;
}
let jwt = match query_pairs.get("client_assertion") {
Some(jwt) => jwt,
None => {
println!("Missing client assertion");
return false;
}
let Some(jwt) = query_pairs.get("client_assertion") else {
println!("Missing client assertion");
return false;
};
let jwt = Jwt::<HashMap<String, Value>>::try_from(jwt.as_ref()).unwrap();
@@ -370,7 +364,7 @@ async fn pass_private_key_jwt_with_keystore() {
async fn pass_private_key_jwt_with_custom_signing() {
let (http_service, mock_server, issuer) = init_test().await;
let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::PrivateKeyJwt,
&OAuthClientAuthenticationMethod::PrivateKeyJwt,
&issuer,
Some(Box::new(|_claims, _alg| Ok("fake.signed.jwt".to_owned()))),
);
@@ -439,7 +433,7 @@ async fn pass_private_key_jwt_with_custom_signing() {
async fn fail_private_key_jwt_with_custom_signing() {
let (http_service, _, issuer) = init_test().await;
let client_credentials = client_credentials(
OAuthClientAuthenticationMethod::PrivateKeyJwt,
&OAuthClientAuthenticationMethod::PrivateKeyJwt,
&issuer,
Some(Box::new(|_claims, _alg| Err("Something went wrong".into()))),
);

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
opa-wasm = { git = "https://github.com/matrix-org/rust-opa-wasm.git" }

View File

@@ -12,11 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
#![allow(clippy::missing_errors_doc)]
pub mod model;
use mas_data_model::{AuthorizationGrant, Client, User};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
axum = { version = "0.6.20", default-features = false }
serde.workspace = true

View File

@@ -12,14 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::pedantic,
clippy::str_to_string,
rustdoc::broken_intra_doc_links
)]
pub(crate) mod endpoints;
pub(crate) mod traits;
mod url_builder;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
serde.workspace = true
thiserror.workspace = true

View File

@@ -12,14 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
rustdoc::missing_crate_level_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![deny(rustdoc::missing_crate_level_docs)]
//! A crate to help serve single-page apps built by Vite.

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
async-trait = "0.1.74"
sqlx = { version = "0.7.3", features = ["runtime-tokio-rustls", "postgres", "migrate", "chrono", "json", "uuid", "ipnetwork"] }

View File

@@ -164,15 +164,7 @@
//! [`Ulid`]: ulid::Ulid
//! [`Uuid`]: uuid::Uuid
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
clippy::future_not_send,
rustdoc::broken_intra_doc_links,
missing_docs
)]
#![warn(clippy::pedantic)]
#![deny(clippy::future_not_send, missing_docs)]
#![allow(clippy::module_name_repetitions)]
use sqlx::migrate::Migrator;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
async-trait = "0.1.74"
chrono.workspace = true

View File

@@ -133,15 +133,7 @@
//! }
//! ```
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
clippy::future_not_send,
rustdoc::broken_intra_doc_links,
missing_docs
)]
#![warn(clippy::pedantic)]
#![deny(clippy::future_not_send, missing_docs)]
#![allow(clippy::module_name_repetitions)]
pub mod clock;

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
anyhow.workspace = true
apalis-core = { version = "0.4.7", features = ["extensions", "tokio-comp", "storage"] }

View File

@@ -12,10 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(clippy::all, clippy::str_to_string, rustdoc::broken_intra_doc_links)]
#![warn(clippy::pedantic)]
use std::sync::Arc;
use apalis_core::{executor::TokioExecutor, layers::extensions::Extension, monitor::Monitor};

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
arc-swap = "1.6.0"
tracing.workspace = true

View File

@@ -12,15 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![forbid(unsafe_code)]
#![deny(
clippy::all,
clippy::str_to_string,
missing_docs,
rustdoc::broken_intra_doc_links
)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions, clippy::missing_errors_doc)]
#![deny(missing_docs)]
#![allow(clippy::module_name_repetitions)]
//! Templates rendering
@@ -37,7 +30,7 @@ use rand::Rng;
use serde::Serialize;
use thiserror::Error;
use tokio::task::JoinError;
use tracing::{debug, info, warn};
use tracing::{debug, info};
use walkdir::DirEntry;
mod context;
@@ -377,6 +370,10 @@ register_templates! {
impl Templates {
/// Render all templates with the generated samples to check if they render
/// properly
///
/// # Errors
///
/// Returns an error if any of the templates fails to render
pub fn check_render(
&self,
now: chrono::DateTime<chrono::Utc>,

View File

@@ -54,6 +54,10 @@ macro_rules! register_templates {
impl Templates {
$(
$(#[$attr])?
///
/// # Errors
///
/// Returns an error if the template fails to render.
pub fn $name
$(< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)?
(&self, context: &$param)
@@ -75,6 +79,10 @@ macro_rules! register_templates {
$(
#[doc = concat!("Render the `", $template, "` template with sample contexts")]
///
/// # Errors
///
/// Returns an error if the template fails to render with any of the sample.
pub fn $name
$(< $( $lt $( : $clt $(+ $dlt )* + TemplateContext )? ),+ >)?
(templates: &Templates, now: chrono::DateTime<chrono::Utc>, rng: &mut impl rand::Rng)

View File

@@ -7,6 +7,9 @@ license.workspace = true
homepage.workspace = true
repository.workspace = true
[lints]
workspace = true
[dependencies]
http.workspace = true
tracing.workspace = true

View File

@@ -12,8 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#![deny(clippy::all)]
#![warn(clippy::pedantic)]
#![allow(clippy::module_name_repetitions)]
mod metrics;