1
0
mirror of https://github.com/quay/quay.git synced 2025-04-18 10:44:06 +03:00

ui: add support for exposing quay UI as a dynamic plugin (PROJQUAY-3203) (#1799)

* ui: add support for exposing quay UI as a dynamic plugin (PROJQUAY-3203)

* Introduces a new SSO JWT based auth for client side Oauth
* Adds a new entrypoint component for the UI without topnav and sidenav for plugin
* Adds webpack config to build dynamic plugin
This commit is contained in:
Syed Ahmed 2023-04-20 19:05:07 -04:00 committed by GitHub
parent 0e3221e4f3
commit 2db3b186f9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 2385 additions and 768 deletions

5
app.py
View File

@ -2,11 +2,8 @@ import hashlib
import json
import logging
import os
from functools import partial
from authlib.jose import JsonWebKey
from cryptography.hazmat.primitives import serialization
from flask import Flask, request, Request
from flask_login import LoginManager
from flask_mail import Mail
@ -18,7 +15,6 @@ import features
from _init import (
config_provider,
CONF_DIR,
IS_KUBERNETES,
IS_TESTING,
OVERRIDE_CONFIG_DIRECTORY,
@ -55,7 +51,6 @@ from oauth.loginmanager import OAuthLoginManager
from storage import Storage
from util.log import filter_logs
from util import get_app_url
from util.secscan.secscan_util import get_blob_download_uri_getter
from util.ipresolver import IPResolver
from util.saas.analytics import Analytics
from util.saas.exceptionlog import Sentry

View File

@ -24,6 +24,14 @@ def get_validated_oauth_token():
return context.authed_oauth_token if context else None
def get_sso_token():
"""
Returns the authenticated and SSO token, if any, or None if none.
"""
context = get_authenticated_context()
return context.sso_token if context else None
def set_authenticated_context(auth_context):
"""
Sets the auth context for the current request context to that given.

View File

@ -139,6 +139,7 @@ class ValidatedAuthContext(AuthContext):
robot=None,
appspecifictoken=None,
signed_data=None,
sso_token=None,
):
# Note: These field names *MUST* match the string values of the kinds defined in
# ContextEntityKind.
@ -148,6 +149,7 @@ class ValidatedAuthContext(AuthContext):
self.oauthtoken = oauthtoken
self.appspecifictoken = appspecifictoken
self.signed_data = signed_data
self.sso_token = sso_token
def tuple(self):
return list(vars(self).values())

View File

@ -8,6 +8,7 @@ from auth.basic import validate_basic_auth
from auth.oauth import validate_bearer_auth
from auth.cookie import validate_session_cookie
from auth.signedgrant import validate_signed_grant
from auth.validateresult import AuthKind
from util.http import abort
@ -57,6 +58,10 @@ def _auth_decorator(pass_result=False, handlers=None):
if result.error_message is not None:
# Log the failure.
authentication_count.labels(result.kind, False).inc()
# Do we only need to abort for JWT based errors?
if result.kind == AuthKind.ssojwt:
abort(401, message=result.error_message)
break
if pass_result:

View File

@ -1,10 +1,20 @@
import logging
from datetime import datetime
from jwt import ExpiredSignatureError, InvalidTokenError
from app import oauth_login, authentication, app, analytics
from auth.scopes import scopes_from_scope_string
from auth.validateresult import AuthKind, ValidateResult
from data import model
from oauth.login import OAuthLoginException
from oauth.login_utils import (
is_jwt,
get_sub_username_email_from_token,
_conduct_oauth_login,
get_jwt_issuer,
)
from oauth.oidc import PublicKeyLoadException
logger = logging.getLogger(__name__)
@ -27,6 +37,63 @@ def validate_bearer_auth(auth_header):
def validate_oauth_token(token):
if is_jwt(token):
return validate_sso_oauth_token(token)
else:
return validate_app_oauth_token(token)
def validate_sso_oauth_token(token):
issuer = get_jwt_issuer(token)
if not issuer:
return ValidateResult(AuthKind.ssojwt, error_message="Token does not contain issuer")
try:
service = oauth_login.get_service_by_issuer(issuer)
if not service:
return ValidateResult(AuthKind.ssojwt, error_message=f"Issuer {issuer} not configured")
except ConnectionError as e:
logger.exception(e)
return ValidateResult(AuthKind.ssojwt, error_message="Unable to connect to auth server")
try:
# for client side oauth, the audience will be the client side oauth client
options = {"verify_aud": False, "verify_nbf": False}
if app.config.get("TESTING", False):
options["verify_signature"] = False
decoded_id_token = service.decode_user_jwt(token, options=options)
sub, lusername, lemail = get_sub_username_email_from_token(
decoded_id_token, None, service.config, False
)
login_result = _conduct_oauth_login(
config=app.config,
analytics=analytics,
auth_system=authentication,
login_service=service,
lid=sub,
lusername=lusername,
lemail=lemail,
captcha_verified=True,
)
if login_result.error_message:
logger.error(f"Error logging in {login_result.error_message}")
return ValidateResult(AuthKind.ssojwt, error_message=login_result.error_message)
return ValidateResult(AuthKind.ssojwt, user=login_result.user_obj, sso_token=token)
except (
OAuthLoginException,
ExpiredSignatureError,
InvalidTokenError,
PublicKeyLoadException,
) as ole:
logger.exception(ole)
return ValidateResult(AuthKind.ssojwt, error_message=str(ole))
def validate_app_oauth_token(token):
"""
Validates the specified OAuth token, returning whether it points to a valid OAuth token.
"""

View File

@ -1,6 +1,6 @@
import pytest
from auth.oauth import validate_bearer_auth, validate_oauth_token
from auth.oauth import validate_bearer_auth
from auth.validateresult import AuthKind, ValidateResult
from data import model
from test.fixtures import *

View File

@ -8,6 +8,7 @@ class AuthKind(Enum):
oauth = "oauth"
signed_grant = "signed_grant"
credentials = "credentials"
ssojwt = "ssojwt"
def __str__(self):
return "%s" % self.value
@ -29,6 +30,7 @@ class ValidateResult(object):
appspecifictoken=None,
signed_data=None,
error_message=None,
sso_token=None,
):
self.kind = kind
self.missing = missing
@ -40,6 +42,7 @@ class ValidateResult(object):
robot=robot,
appspecifictoken=appspecifictoken,
signed_data=signed_data,
sso_token=sso_token,
)
def tuple(self):

View File

@ -95,7 +95,7 @@ services:
# namespace with quay. this allows quay to serve
# layers to clair over localhost.
clair:
user: nobody
user: ${DOCKER_USER:-nobody:0}
container_name: quay-clair
image: quay.io/projectquay/clair:4.4.0
volumes:
@ -107,6 +107,4 @@ services:
cpus: 2
command:
["bash", "-c", "cd /src/clair/cmd/clair; go run -mod vendor ."]
depends_on:
- quay

View File

@ -382,6 +382,7 @@ class User(ApiResource):
Get user information for the authenticated user.
"""
user = get_authenticated_user()
if user is None or user.organization or not UserReadPermission(user.username).can():
raise InvalidToken("Requires authentication", payload={"session_required": False})

View File

@ -9,7 +9,7 @@ from flask import session, request, Response
import features
from app import app
from auth.auth_context import get_validated_oauth_token
from auth.auth_context import get_validated_oauth_token, get_sso_token
from util.http import abort
@ -65,7 +65,7 @@ def csrf_protect(
@wraps(func)
def wrapper(*args, **kwargs):
# Verify the CSRF token.
if get_validated_oauth_token() is None:
if get_validated_oauth_token() is None and get_sso_token() is None:
if all_methods or (request.method != "GET" and request.method != "HEAD"):
verify_csrf(session_token_name, request_token_name, check_header)

View File

@ -5,21 +5,18 @@ import os
from collections import namedtuple
from flask import request, redirect, url_for, Blueprint, abort, session
from peewee import IntegrityError
import features
from app import app, analytics, get_app_url, oauth_login, authentication, url_scheme_and_hostname
from _init import CONF_DIR
from app import app, get_app_url, oauth_login, authentication, url_scheme_and_hostname, analytics
from auth.auth_context import get_authenticated_user
from auth.decorators import require_session_login
from data import model
from data.users.shared import can_create_user
from endpoints.common import common_login
from endpoints.web import index, render_page_template_with_routedata
from endpoints.csrf import csrf_protect, OAUTH_CSRF_TOKEN_NAME, generate_csrf_token
from oauth.login import OAuthLoginException, ExportComplianceException
from util.validation import generate_valid_usernames
from oauth.login_utils import _conduct_oauth_login, _attach_service
from util.request import get_request_ip
logger = logging.getLogger(__name__)
@ -30,23 +27,6 @@ oauthlogin_csrf_protect = csrf_protect(
OAUTH_CSRF_TOKEN_NAME, "state", all_methods=True, check_header=False
)
OAuthResult = namedtuple(
"OAuthResult",
["user_obj", "service_name", "error_message", "register_redirect", "requires_verification"],
)
def _oauthresult(
user_obj=None,
service_name=None,
error_message=None,
register_redirect=False,
requires_verification=False,
):
return OAuthResult(
user_obj, service_name, error_message, register_redirect, requires_verification
)
def _get_response(result):
if result.error_message is not None:
@ -57,110 +37,6 @@ def _get_response(result):
return _perform_login(result.user_obj, result.service_name)
def _conduct_oauth_login(
auth_system, login_service, lid, lusername, lemail, metadata=None, captcha_verified=False
):
"""
Conducts login from the result of an OAuth service's login flow and returns the status of the
login, as well as the followup step.
"""
service_id = login_service.service_id()
service_name = login_service.service_name()
# Check for an existing account *bound to this service*. If found, conduct login of that account
# and redirect.
user_obj = model.user.verify_federated_login(service_id, lid)
if user_obj is not None:
return _oauthresult(user_obj=user_obj, service_name=service_name)
# If the login service has a bound field name, and we have a defined internal auth type that is
# not the database, then search for an existing account with that matching field. This allows
# users to setup SSO while also being backed by something like LDAP.
bound_field_name = login_service.login_binding_field()
if auth_system.federated_service is not None and bound_field_name is not None:
# Perform lookup.
logger.debug('Got oauth bind field name of "%s"', bound_field_name)
lookup_value = None
if bound_field_name == "sub":
lookup_value = lid
elif bound_field_name == "username":
lookup_value = lusername
elif bound_field_name == "email":
lookup_value = lemail
if lookup_value is None:
logger.error("Missing lookup value for OAuth login")
return _oauthresult(
service_name=service_name, error_message="Configuration error in this provider"
)
(user_obj, err) = auth_system.link_user(lookup_value)
if err is not None:
logger.debug("%s %s not found: %s", bound_field_name, lookup_value, err)
msg = "%s %s not found in backing auth system" % (bound_field_name, lookup_value)
return _oauthresult(service_name=service_name, error_message=msg)
# Found an existing user. Bind their internal auth account to this service as well.
result = _attach_service(login_service, user_obj, lid, lusername)
if result.error_message is not None:
return result
return _oauthresult(user_obj=user_obj, service_name=service_name)
# Otherwise, we need to create a new user account.
blacklisted_domains = app.config.get("BLACKLISTED_EMAIL_DOMAINS", [])
if not can_create_user(lemail, blacklisted_domains=blacklisted_domains):
error_message = "User creation is disabled. Please contact your administrator"
return _oauthresult(service_name=service_name, error_message=error_message)
if features.RECAPTCHA and not captcha_verified:
return _oauthresult(service_name=service_name, requires_verification=True)
# Try to create the user
try:
# Generate a valid username.
new_username = None
for valid in generate_valid_usernames(lusername):
if model.user.get_user_or_org(valid):
continue
new_username = valid
break
requires_password = auth_system.requires_distinct_cli_password
prompts = model.user.get_default_user_prompts(features)
user_obj = model.user.create_federated_user(
new_username,
lemail,
service_id,
lid,
set_password_notification=requires_password,
metadata=metadata or {},
confirm_username=features.USERNAME_CONFIRMATION,
prompts=prompts,
email_required=features.MAILING,
)
# Success, tell analytics
analytics.track(user_obj.username, "register", {"service": service_name.lower()})
return _oauthresult(user_obj=user_obj, service_name=service_name)
except model.InvalidEmailAddressException:
message = (
"The e-mail address {0} is already associated "
"with an existing {1} account. \n"
"Please log in with your username and password and "
"associate your {2} account to use it in the future."
)
message = message.format(lemail, app.config["REGISTRY_TITLE_SHORT"], service_name)
return _oauthresult(
service_name=service_name, error_message=message, register_redirect=True
)
except model.DataModelException as ex:
return _oauthresult(service_name=service_name, error_message=str(ex))
def _render_ologin_error(service_name, error_message=None, register_redirect=False):
"""
Returns a Flask response indicating an OAuth error.
@ -221,29 +97,6 @@ def _perform_login(user_obj, service_name):
return _render_ologin_error(service_name, "Could not login. Account may be disabled")
def _attach_service(login_service, user_obj, lid, lusername):
"""
Attaches the given user account to the given service, with the given service user ID and service
username.
"""
metadata = {
"service_username": lusername,
}
try:
model.user.attach_federated_login(
user_obj, login_service.service_id(), lid, metadata=metadata
)
return _oauthresult(user_obj=user_obj)
except IntegrityError:
err = "%s account %s is already attached to a %s account" % (
login_service.service_name(),
lusername,
app.config["REGISTRY_TITLE_SHORT"],
)
return _oauthresult(service_name=login_service.service_name(), error_message=err)
def _register_service(login_service):
"""
Registers the given login service, adding its callback and attach routes to the blueprint.
@ -281,6 +134,8 @@ def _register_service(login_service):
session["captcha_verified"] = 0
result = _conduct_oauth_login(
app.config,
analytics,
authentication,
login_service,
lid,
@ -317,7 +172,7 @@ def _register_service(login_service):
# Conduct attach.
user_obj = get_authenticated_user()
result = _attach_service(login_service, user_obj, lid, lusername)
result = _attach_service(app.config, login_service, user_obj, lid, lusername)
if result.error_message is not None:
return _get_response(result)

View File

@ -6,6 +6,7 @@ from data import model, database
from data.users import get_users_handler, DatabaseUsers
from endpoints.oauth.login import _conduct_oauth_login
from oauth.services.github import GithubOAuthService
from test.analytics import analytics
from test.test_ldap import mock_ldap
from test.fixtures import *
@ -36,7 +37,7 @@ def _get_users_handler(auth_type):
return get_users_handler(config, None, None)
def test_existing_account(auth_system, login_service):
def test_existing_account(app, auth_system, login_service):
login_service_lid = "someexternaluser"
# Create an existing bound federated user.
@ -47,7 +48,13 @@ def test_existing_account(auth_system, login_service):
with mock_ldap():
result = _conduct_oauth_login(
auth_system, login_service, login_service_lid, login_service_lid, "example@example.com"
app.config,
analytics,
auth_system,
login_service,
login_service_lid,
login_service_lid,
"example@example.com",
)
assert result.user_obj == created_user
@ -57,7 +64,7 @@ def test_existing_account(auth_system, login_service):
assert current_user_count == existing_user_count
def test_new_account_via_database(login_service):
def test_new_account_via_database(app, login_service):
existing_user_count = database.User.select().count()
login_service_lid = "someexternaluser"
internal_auth = DatabaseUsers()
@ -65,7 +72,13 @@ def test_new_account_via_database(login_service):
# Conduct login. Since the external user doesn't (yet) bind to a user in the database,
# a new user should be created and bound to the external service.
result = _conduct_oauth_login(
internal_auth, login_service, login_service_lid, login_service_lid, "example@example.com"
app.config,
analytics,
internal_auth,
login_service,
login_service_lid,
login_service_lid,
"example@example.com",
)
assert result.user_obj is not None
@ -97,7 +110,7 @@ def test_new_account_via_database(login_service):
],
)
def test_flagged_user_creation(
open_creation, invite_only, has_invite, expect_success, login_service
app, open_creation, invite_only, has_invite, expect_success, login_service
):
login_service_lid = "someexternaluser"
email = "some@example.com"
@ -113,7 +126,13 @@ def test_flagged_user_creation(
with patch("features.INVITE_ONLY_USER_CREATION", invite_only):
# Conduct login.
result = _conduct_oauth_login(
internal_auth, login_service, login_service_lid, login_service_lid, email
app.config,
analytics,
internal_auth,
login_service,
login_service_lid,
login_service_lid,
email,
)
assert (result.user_obj is not None) == expect_success
assert (result.error_message is None) == expect_success
@ -168,7 +187,9 @@ def test_new_account_via_ldap(binding_field, lid, lusername, lemail, expected_er
with mock_ldap():
# Conduct OAuth login.
result = _conduct_oauth_login(internal_auth, external_auth, lid, lusername, lemail)
result = _conduct_oauth_login(
app.config, analytics, internal_auth, external_auth, lid, lusername, lemail
)
assert result.error_message == expected_error
current_user_count = database.User.select().count()
@ -220,7 +241,13 @@ def test_existing_account_in_ldap(app):
# Conduct OAuth login with the same lid and bound field. This should find the existing LDAP
# user (via the `username` binding), and then bind Github to it as well.
result = _conduct_oauth_login(
internal_auth, external_auth, bound_user.username, bound_user.username, bound_user.email
app.config,
analytics,
internal_auth,
external_auth,
bound_user.username,
bound_user.username,
bound_user.email,
)
assert result.error_message is None

View File

@ -30,11 +30,11 @@ FEATURE_PARTIAL_USER_AUTOCOMPLETE: true
FEATURE_REPO_MIRROR: false
FEATURE_REQUIRE_TEAM_INVITE: true
FEATURE_RESTRICTED_V1_PUSH: false
FEATURE_SECURITY_NOTIFICATIONS: true
FEATURE_SECURITY_NOTIFICATIONS: false
FEATURE_SECURITY_SCANNER: true
FEATURE_USERNAME_CONFIRMATION: true
FEATURE_USER_CREATION: true
FEATURE_USER_LOG_ACCESS: true
FEATURE_USER_LOG_ACCESS: false
FEATURE_PROXY_CACHE: true
GITHUB_LOGIN_CONFIG: {}
GITHUB_TRIGGER_CONFIG: {}
@ -69,7 +69,20 @@ USER_EVENTS_REDIS:
host: quay-redis
port: 6379
USE_CDN: false
FEATURE_QUOTA_MANAGEMENT: True
FEATURE_QUOTA_MANAGEMENT: false
BROWSER_API_CALLS_XHR_ONLY: False
CORS_ORIGIN: "http://localhost:9000"
# CORS_ORIGIN: "https://stage.foo.redhat.com:1337 http://localhost:9000/"
CORS_ORIGIN:
- "https://stage.foo.redhat.com:1337"
- "http://localhost:9000"
FEATURE_UI_V2: True
RHSSO_LOGIN_CONFIG:
CLIENT_ID: stage.quay.io
CLIENT_SECRET: SECRET
OIDC_SERVER: https://sso.stage.redhat.com/auth/realms/redhat-external/
SERVICE_NAME: Red Hat
SERVICE_ICON: /static/img/RedHat.svg
VERIFIED_EMAIL_CLAIM_NAME: email
PREFERRED_USERNAME_CLAIM_NAME: preferred_username
LOGIN_SCOPES: ['openid']

239
oauth/login_utils.py Normal file
View File

@ -0,0 +1,239 @@
import base64
import json
import logging
from collections import namedtuple
import jwt
import features
from data import model
from data.users.shared import can_create_user
from peewee import IntegrityError
from oauth.login import OAuthLoginException
from util.validation import generate_valid_usernames
OAuthResult = namedtuple(
"OAuthResult",
["user_obj", "service_name", "error_message", "register_redirect", "requires_verification"],
)
logger = logging.getLogger(__name__)
def is_jwt(token):
try:
headers = jwt.get_unverified_header(token)
return headers.get("typ", "").lower() == "jwt"
except (jwt.exceptions.DecodeError):
pass
return False
def get_jwt_issuer(token):
"""
Extract the issuer from the JWT token.
The passed token is assumed to be a valid
JWT token
"""
decoded = jwt.decode(token, options={"verify_signature": False})
return decoded.get("iss", None)
def get_sub_username_email_from_token(decoded_id_token, user_info=None, config={}, mailing=False):
if not user_info:
user_info = decoded_id_token
# Verify for impersonation
if user_info.get("impersonated", False):
logger.debug("Requests from impersonated principals are not supported")
raise OAuthLoginException("Requests from impersonated principals are not supported")
# Verify subs.
if user_info["sub"] != decoded_id_token["sub"]:
logger.debug(
"Mismatch in `sub` returned by OIDC user info endpoint: %s vs %s",
user_info["sub"],
decoded_id_token["sub"],
)
raise OAuthLoginException("Mismatch in `sub` returned by OIDC user info endpoint")
# Check if we have a verified email address.
if config.get("VERIFIED_EMAIL_CLAIM_NAME"):
email_address = user_info.get(config["VERIFIED_EMAIL_CLAIM_NAME"])
else:
email_address = user_info.get("email") if user_info.get("email_verified") else None
logger.debug("Found e-mail address `%s` for sub `%s`", email_address, user_info["sub"])
if mailing:
if email_address is None:
raise OAuthLoginException(
"A verified email address is required to login with this service"
)
# Check for a preferred username.
if config.get("PREFERRED_USERNAME_CLAIM_NAME"):
lusername = user_info.get(config["PREFERRED_USERNAME_CLAIM_NAME"])
else:
lusername = user_info.get("preferred_username")
if lusername is None:
# Note: Active Directory provides `unique_name` and `upn`.
# https://docs.microsoft.com/en-us/azure/active-directory/develop/v1-id-and-access-tokens
lusername = user_info.get("unique_name", user_info.get("upn"))
if lusername is None:
lusername = user_info["sub"]
if lusername.find("@") >= 0:
lusername = lusername[0 : lusername.find("@")]
return decoded_id_token["sub"], lusername, email_address
def _oauthresult(
user_obj=None,
service_name=None,
error_message=None,
register_redirect=False,
requires_verification=False,
):
return OAuthResult(
user_obj, service_name, error_message, register_redirect, requires_verification
)
def _attach_service(config, login_service, user_obj, lid, lusername):
"""
Attaches the given user account to the given service, with the given service user ID and service
username.
"""
metadata = {
"service_username": lusername,
}
try:
model.user.attach_federated_login(
user_obj, login_service.service_id(), lid, metadata=metadata
)
return _oauthresult(user_obj=user_obj)
except IntegrityError:
err = "%s account %s is already attached to a %s account" % (
login_service.service_name(),
lusername,
config["REGISTRY_TITLE_SHORT"],
)
return _oauthresult(service_name=login_service.service_name(), error_message=err)
def _conduct_oauth_login(
config,
analytics,
auth_system,
login_service,
lid,
lusername,
lemail,
metadata=None,
captcha_verified=False,
):
"""
Conducts login from the result of an OAuth service's login flow and returns the status of the
login, as well as the followup step.
"""
service_id = login_service.service_id()
service_name = login_service.service_name()
# Check for an existing account *bound to this service*. If found, conduct login of that account
# and redirect.
user_obj = model.user.verify_federated_login(service_id, lid)
if user_obj is not None:
return _oauthresult(user_obj=user_obj, service_name=service_name)
# If the login service has a bound field name, and we have a defined internal auth type that is
# not the database, then search for an existing account with that matching field. This allows
# users to setup SSO while also being backed by something like LDAP.
bound_field_name = login_service.login_binding_field()
if auth_system.federated_service is not None and bound_field_name is not None:
# Perform lookup.
logger.debug('Got oauth bind field name of "%s"', bound_field_name)
lookup_value = None
if bound_field_name == "sub":
lookup_value = lid
elif bound_field_name == "username":
lookup_value = lusername
elif bound_field_name == "email":
lookup_value = lemail
if lookup_value is None:
logger.error("Missing lookup value for OAuth login")
return _oauthresult(
service_name=service_name, error_message="Configuration error in this provider"
)
(user_obj, err) = auth_system.link_user(lookup_value)
if err is not None:
logger.debug("%s %s not found: %s", bound_field_name, lookup_value, err)
msg = "%s %s not found in backing auth system" % (bound_field_name, lookup_value)
return _oauthresult(service_name=service_name, error_message=msg)
# Found an existing user. Bind their internal auth account to this service as well.
result = _attach_service(config, login_service, user_obj, lid, lusername)
if result.error_message is not None:
return result
return _oauthresult(user_obj=user_obj, service_name=service_name)
# Otherwise, we need to create a new user account.
blacklisted_domains = config.get("BLACKLISTED_EMAIL_DOMAINS", [])
if not can_create_user(lemail, blacklisted_domains=blacklisted_domains):
error_message = "User creation is disabled. Please contact your administrator"
return _oauthresult(service_name=service_name, error_message=error_message)
if features.RECAPTCHA and not captcha_verified:
return _oauthresult(service_name=service_name, requires_verification=True)
# Try to create the user
try:
# Generate a valid username.
new_username = None
for valid in generate_valid_usernames(lusername):
if model.user.get_user_or_org(valid):
continue
new_username = valid
break
requires_password = auth_system.requires_distinct_cli_password
prompts = model.user.get_default_user_prompts(features)
user_obj = model.user.create_federated_user(
new_username,
lemail,
service_id,
lid,
set_password_notification=requires_password,
metadata=metadata or {},
confirm_username=features.USERNAME_CONFIRMATION,
prompts=prompts,
email_required=features.MAILING,
)
# Success, tell analytics
analytics.track(user_obj.username, "register", {"service": service_name.lower()})
return _oauthresult(user_obj=user_obj, service_name=service_name)
except model.InvalidEmailAddressException:
message = (
"The e-mail address {0} is already associated "
"with an existing {1} account. \n"
"Please log in with your username and password and "
"associate your {2} account to use it in the future."
)
message = message.format(lemail, config["REGISTRY_TITLE_SHORT"], service_name)
return _oauthresult(
service_name=service_name, error_message=message, register_redirect=True
)
except model.DataModelException as ex:
return _oauthresult(service_name=service_name, error_message=str(ex))

View File

@ -41,4 +41,18 @@ class OAuthLoginManager(object):
if service.service_id() == service_id:
return service
def get_service_by_issuer(self, issuer):
for service in self.services:
if not hasattr(service, "get_issuer"):
continue
if not service.get_issuer:
continue
config_issuer = service.get_issuer()
if config_issuer.rstrip("/") == issuer.rstrip("/"):
return service
return None

View File

@ -17,6 +17,7 @@ from oauth.base import (
OAuthEndpoint,
)
from oauth.login import OAuthLoginException
from oauth.login_utils import get_sub_username_email_from_token
from util.security.jwtutil import decode, InvalidTokenError
logger = logging.getLogger(__name__)
@ -182,50 +183,9 @@ class OIDCLoginService(OAuthService):
else:
user_info = decoded_id_token
# Verify for impersonation
if user_info.get("impersonated", False):
logger.debug("Requests from impersonated principals are not supported")
raise OAuthLoginException("Requests from impersonated principals are not supported")
# Verify subs.
if user_info["sub"] != decoded_id_token["sub"]:
logger.debug(
"Mismatch in `sub` returned by OIDC user info endpoint: %s vs %s",
user_info["sub"],
decoded_id_token["sub"],
)
raise OAuthLoginException("Mismatch in `sub` returned by OIDC user info endpoint")
# Check if we have a verified email address.
if self.config.get("VERIFIED_EMAIL_CLAIM_NAME"):
email_address = user_info.get(self.config["VERIFIED_EMAIL_CLAIM_NAME"])
else:
email_address = user_info.get("email") if user_info.get("email_verified") else None
logger.debug("Found e-mail address `%s` for sub `%s`", email_address, user_info["sub"])
if self._mailing:
if email_address is None:
raise OAuthLoginException(
"A verified email address is required to login with this service"
)
# Check for a preferred username.
if self.config.get("PREFERRED_USERNAME_CLAIM_NAME"):
lusername = user_info.get(self.config["PREFERRED_USERNAME_CLAIM_NAME"])
else:
lusername = user_info.get("preferred_username")
if lusername is None:
# Note: Active Directory provides `unique_name` and `upn`.
# https://docs.microsoft.com/en-us/azure/active-directory/develop/v1-id-and-access-tokens
lusername = user_info.get("unique_name", user_info.get("upn"))
if lusername is None:
lusername = user_info["sub"]
if lusername.find("@") >= 0:
lusername = lusername[0 : lusername.find("@")]
return decoded_id_token["sub"], lusername, email_address
return get_sub_username_email_from_token(
decoded_id_token, user_info, self.config, self._mailing
)
@property
def _issuer(self):
@ -235,6 +195,9 @@ class OIDCLoginService(OAuthService):
# If specified, use the overridden OIDC issuer.
return self.config.get("OIDC_ISSUER", issuer)
def get_issuer(self):
return self._issuer
@lru_cache(maxsize=1)
def _oidc_config(self):
if self.config.get("OIDC_SERVER"):
@ -267,7 +230,7 @@ class OIDCLoginService(OAuthService):
logger.exception("Could not parse OIDC discovery for url: %s", discovery_url)
raise DiscoveryFailureException("Could not parse OIDC discovery information")
def decode_user_jwt(self, token):
def decode_user_jwt(self, token, options={}):
"""
Decodes the given JWT under the given provider and returns it.
@ -287,15 +250,20 @@ class OIDCLoginService(OAuthService):
self.client_id(),
self._issuer,
)
key = ""
if options.get("verify_signature", True):
key = self._get_public_key(kid)
try:
return decode(
token,
self._get_public_key(kid),
key,
algorithms=ALLOWED_ALGORITHMS,
audience=self.client_id(),
issuer=self._issuer,
leeway=JWT_CLOCK_SKEW_SECONDS,
options=dict(require=["iat", "exp"]),
options=dict(require=["iat", "exp"], **options),
)
except InvalidTokenError as ite:
logger.warning(
@ -314,7 +282,7 @@ class OIDCLoginService(OAuthService):
audience=self.client_id(),
issuer=self._issuer,
leeway=JWT_CLOCK_SKEW_SECONDS,
options=dict(require=["iat", "exp"]),
options=dict(require=["iat", "exp"], **options),
)
except InvalidTokenError as ite:
logger.warning(
@ -332,7 +300,7 @@ class OIDCLoginService(OAuthService):
audience=self.client_id(),
issuer=self._issuer,
leeway=JWT_CLOCK_SKEW_SECONDS,
options=dict(require=["iat", "exp"], verify_signature=False),
options=dict(require=["iat", "exp"], verify_signature=False, **options),
)
logger.debug("Got an error when trying to verify OIDC JWT: %s", nonverified)
raise ite

View File

@ -55,7 +55,7 @@ class RHSSOOAuthService(OIDCLoginService):
# to render the compliance error page
raise e
except Exception as e:
# This generates a gneneric OAUTH error page
# This generates a generic OAUTH error page
# also any issues with reaching the export
# compliance API should trigger this
raise OAuthLoginException(str(e))

View File

@ -5,3 +5,6 @@ class FakeMixpanel(object):
def init_app(app):
return FakeMixpanel()
analytics = FakeMixpanel()

View File

@ -11,6 +11,7 @@ from authlib.jose import JsonWebKey
from cryptography.hazmat.primitives import serialization
from app import app, authentication
from auth.oauth import validate_bearer_auth
from data import model
from endpoints.oauth.login import oauthlogin as oauthlogin_bp
from test.test_endpoints import EndpointTestCase
@ -239,6 +240,61 @@ class OAuthLoginTestCase(EndpointTestCase):
test_attach=False,
)
def test_jwt_bearer_token_no_provider(self):
encoded_jwt = jwt.encode({"iss": "badissuer"}, "secret", algorithm="HS256")
oidc_mocks = self._get_oidc_mocks()
with HTTMock(*oidc_mocks):
result = validate_bearer_auth(f"Bearer {encoded_jwt}")
assert result.error_message == "Issuer badissuer not configured"
def test_jwt_bearer_email_used(self):
# issuer from testconfig.py
encoded_jwt = jwt.encode(
{
"iss": app.config["TESTOIDC_LOGIN_CONFIG"]["OIDC_SERVER"],
"aud": app.config["TESTOIDC_LOGIN_CONFIG"]["CLIENT_ID"],
"nbf": int(time.time()),
"iat": int(time.time()),
"exp": int(time.time() + 600),
"sub": "cool.user",
"email": "someemail@example.com",
"email_verified": False,
},
"secret",
algorithm="HS256",
headers={"kid": "fakekid"},
)
oidc_mocks = self._get_oidc_mocks()
with HTTMock(*oidc_mocks):
result = validate_bearer_auth(f"Bearer {encoded_jwt}")
assert (
"already associated with an existing Project Quay account" in result.error_message
)
def test_jwt_bearer_new_user(self):
# issuer from testconfig.py
encoded_jwt = jwt.encode(
{
"iss": app.config["TESTOIDC_LOGIN_CONFIG"]["OIDC_SERVER"],
"aud": app.config["TESTOIDC_LOGIN_CONFIG"]["CLIENT_ID"],
"nbf": int(time.time()),
"iat": int(time.time()),
"exp": int(time.time() + 600),
"sub": "cool.user",
"email": "somenewemail@example.com",
"email_verified": True,
},
"secret",
algorithm="HS256",
headers={"kid": "fakekid"},
)
oidc_mocks = self._get_oidc_mocks()
with HTTMock(*oidc_mocks):
result = validate_bearer_auth(f"Bearer {encoded_jwt}")
assert result.error_message is None
if __name__ == "__main__":
unittest.main()

1744
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,6 +2,9 @@
"name": "quay-ui",
"version": "0.1.0",
"private": true,
"insights": {
"appname": "quay"
},
"homepage": ".",
"dependencies": {
"@patternfly/patternfly": "^4.185.1",
@ -9,6 +12,8 @@
"@patternfly/react-core": "^4.202.16",
"@patternfly/react-icons": "^4.53.16",
"@patternfly/react-table": "^4.71.16",
"@redhat-cloud-services/frontend-components": "^3.9.31",
"@redhat-cloud-services/frontend-components-config-utilities": "^1.5.31",
"@tanstack/react-query": "^4.13.5",
"@testing-library/jest-dom": "^5.16.4",
"@testing-library/react": "^12.1.4",
@ -33,6 +38,8 @@
"scripts": {
"start": "webpack serve --color --progress --config webpack.dev.js",
"build": "webpack --config webpack.prod.js",
"start-plugin": "NODE_ENV=development webpack serve --color --progress --config webpack.plugin.js",
"build-plugin": "NODE_ENV=production webpack --config webpack.plugin.js",
"test": "react-scripts test",
"eject": "react-scripts eject",
"format": "prettier --config .prettierrc \"src/**/*.{ts,tsx}\" --write",
@ -57,6 +64,7 @@
]
},
"devDependencies": {
"@openshift/dynamic-plugin-sdk-webpack": "^3.0.1",
"@types/react-dom": "^17.0.2",
"@types/react-router-dom": "^5.3.3",
"@typescript-eslint/eslint-plugin": "^5.18.0",
@ -78,6 +86,7 @@
"eslint-plugin-react-hooks": "^4.4.0",
"file-loader": "^6.2.0",
"html-webpack-plugin": "^5.5.0",
"jws": "^4.0.0",
"lint-staged": ">=10",
"prettier": "^2.1.2",
"raw-loader": "^4.0.2",

8
web/plugin-metadata.json Normal file
View File

@ -0,0 +1,8 @@
{
"name": "quay-ui-plugin",
"version": "0.0.1",
"exposedModules": {
"QuayPluginMain": "./src/routes/PluginMain"
}
}

View File

@ -4,3 +4,8 @@ export const QuayConfigState = atom({
key: 'quayConfigState',
default: null,
});
export const IsPluginState = atom({
key: 'isPlugin',
default: false,
});

View File

@ -9,6 +9,11 @@ if (process.env.MOCK_API === 'true') {
axios.defaults.baseURL =
process.env.REACT_QUAY_APP_API_URL ||
`${window.location.protocol}//${window.location.host}`;
if (window?.insights?.chrome?.auth) {
axios.defaults.baseURL = 'http://localhost:8080'; // TODO: replace with correct endpoint
}
axios.defaults.withCredentials = true;
axios.defaults.headers.common['X-Requested-With'] = 'XMLHttpRequest';
@ -28,10 +33,18 @@ axiosIns.interceptors.request.use(async (config) => {
GlobalAuthState.csrfToken = r.csrf_token;
}
if (!GlobalAuthState.bearerToken && window?.insights?.chrome?.auth) {
GlobalAuthState.bearerToken = await window.insights.chrome.auth.getToken();
}
if (config.headers && GlobalAuthState.csrfToken) {
config.headers['X-CSRF-Token'] = GlobalAuthState.csrfToken;
}
if (config.headers && GlobalAuthState.bearerToken) {
config.headers['Authorization'] = `Bearer ${GlobalAuthState.bearerToken}`;
}
return config;
});

View File

@ -38,7 +38,7 @@ export function formatSize(sizeInBytes: number) {
const i = Math.floor(Math.log(sizeInBytes) / Math.log(1024));
return (
(sizeInBytes / Math.pow(1024, i)).toFixed(2) * 1 +
(sizeInBytes / Math.pow(1024, i)).toFixed(2) +
' ' +
['B', 'kB', 'MB', 'GB', 'TB'][i]
);
@ -48,3 +48,46 @@ export function isValidEmail(email: string): boolean {
const regex = /\S+@\S+\.\S+/;
return regex.test(email);
}
export function parseRepoNameFromUrl(url: string): string {
//url is in the format of <prefix>/repository/<org>/<repo>
//or <prefix>/repository/<org>/<repo>/tag/<tag>
const urlParts = url.split('/');
const repoKeywordIndex = urlParts.indexOf('repository');
if (repoKeywordIndex === -1) {
return '';
}
return urlParts[repoKeywordIndex + 2];
}
export function parseOrgNameFromUrl(url: string): string {
//url is in the format of <prefix>/repository/<org>/<repo>
//or <prefix>/repository/<org>/<repo>/tag/<tag>
const urlParts = url.split('/');
const repoKeywordIndex = urlParts.indexOf('repository');
if (repoKeywordIndex === -1) {
return '';
}
return urlParts[repoKeywordIndex + 1];
}
export function parseTagNameFromUrl(url: string): string {
//url is in the format of <prefix>/repository/<org>/<repo>
//or <prefix>/repository/<org>/<repo>/tag/<tag>
const urlParts = url.split('/');
const repoKeywordIndex = urlParts.indexOf('repository');
const tagKeywordIndex = urlParts.indexOf('tag');
if (repoKeywordIndex === -1) {
return '';
}
if (tagKeywordIndex === -1) {
return '';
}
return urlParts[tagKeywordIndex + 1];
}

View File

@ -8,11 +8,13 @@ const csrfTokenUrl = '/csrf_token';
interface AuthResource {
isLoggedIn: boolean;
csrfToken: string | null;
bearerToken: string | null;
}
export const GlobalAuthState: AuthResource = {
isLoggedIn: false,
csrfToken: null,
bearerToken: null,
};
export async function loginUser(username: string, password: string) {

View File

@ -38,25 +38,28 @@ export enum NavigationPath {
repositoryDetail = '/repository/:organizationName/*',
// Tag Detail
tagDetail = '/tag/:organizationName/*',
tagDetail = '/repository/:organizationName/:repositoryName/tag/:tagName',
}
export function getRepoDetailPath(org: string, repo: string) {
// return relative path to repository detail page from repo list table
let repoPath = NavigationPath.repositoryDetail.toString();
repoPath = repoPath.replace(':organizationName', org);
repoPath = repoPath.replace('*', repo);
return repoPath;
return domainRoute(repoPath);
}
export function getTagDetailPath(
org: string,
repo: string,
tag: string,
tagName: string,
queryParams: Map<string, string> = null,
) {
let tagPath = NavigationPath.tagDetail.toString();
tagPath = tagPath.replace(':organizationName', org);
tagPath = tagPath.replace('*', `${repo}/${tag}`);
tagPath = tagPath.replace(':repositoryName', repo);
tagPath = tagPath.replace(':tagName', tagName);
if (queryParams) {
const params = [];
for (const entry of Array.from(queryParams.entries())) {
@ -64,7 +67,7 @@ export function getTagDetailPath(
}
tagPath = tagPath + '?' + params.join('&');
}
return tagPath;
return domainRoute(tagPath);
}
export function getDomain() {
@ -83,7 +86,7 @@ function domainRoute(definedRoute) {
return (
// This regex replaces everything after the last occurrence of organization|repository|signin with empty string.
// Doing this gives us the prefix.
currentRoute.replace(/\/(\/organization|repository|signin)(?!.*\1).*/, '') +
currentRoute.replace(/\/(organization|repository|signin)(?!.*\1).*/, '') +
definedRoute
);
}

View File

@ -7,7 +7,7 @@ import {
TabTitleText,
Title,
} from '@patternfly/react-core';
import {useLocation, useSearchParams} from 'react-router-dom';
import {useLocation, useParams, useSearchParams} from 'react-router-dom';
import {useCallback, useState} from 'react';
import RepositoriesList from 'src/routes/RepositoriesList/RepositoriesList';
import Settings from './Tabs/Settings/Settings';
@ -16,7 +16,7 @@ import RobotAccountsList from 'src/routes/RepositoriesList/RobotAccountsList';
export default function Organization() {
const location = useLocation();
const orgName = location.pathname.split('/')[2];
const {organizationName} = useParams();
const [searchParams, setSearchParams] = useSearchParams();
const [activeTabKey, setActiveTabKey] = useState<string>(
@ -34,15 +34,15 @@ export default function Organization() {
const repositoriesSubNav = [
{
name: 'Repositories',
component: <RepositoriesList />,
component: <RepositoriesList organizationName={organizationName} />,
},
{
name: 'Robot accounts',
component: <RobotAccountsList orgName={orgName} />,
component: <RobotAccountsList organizationName={organizationName} />,
},
{
name: 'Settings',
component: <Settings />,
component: <Settings organizationName={organizationName} />,
},
];
@ -54,7 +54,7 @@ export default function Organization() {
className="no-padding-bottom"
>
<Title data-testid="repo-title" headingLevel="h1">
{orgName}
{organizationName}
</Title>
</PageSection>
<PageSection

View File

@ -17,9 +17,9 @@ import {useLocation} from 'react-router-dom';
import {useCurrentUser} from 'src/hooks/UseCurrentUser';
import {useOrganization} from 'src/hooks/UseOrganization';
const GeneralSettings = () => {
const GeneralSettings = (props: GeneralSettingsProps) => {
const location = useLocation();
const organizationName = location.pathname.split('/')[2];
const organizationName = props.organizationName;
const {user} = useCurrentUser();
const {organization, isUserOrganization, loading} =
@ -107,7 +107,7 @@ const GeneralSettings = () => {
// return <h1>Hello</h1>;
// };
export default function Settings() {
export default function Settings(props: SettingsProps) {
const [activeTabIndex, setActiveTabIndex] = useState(0);
const handleTabClick = (event, tabIndex) => {
@ -118,7 +118,7 @@ export default function Settings() {
{
name: 'General Settings',
id: 'generalsettings',
content: <GeneralSettings />,
content: <GeneralSettings organizationName={props.organizationName} />,
},
// {
// name: 'Billing Information',
@ -156,3 +156,11 @@ export default function Settings() {
</Flex>
);
}
type SettingsProps = {
organizationName: string;
};
type GeneralSettingsProps = {
organizationName: string;
};

View File

@ -35,6 +35,7 @@ import ColumnNames from './ColumnNames';
import RepoCount from 'src/components/Table/RepoCount';
import {useOrganizations} from 'src/hooks/UseOrganizations';
import {useDeleteOrganizations} from 'src/hooks/UseDeleteOrganizations';
import {Router} from 'react-router-dom';
export interface OrganizationsTableItem {
name: string;
@ -44,7 +45,7 @@ export interface OrganizationsTableItem {
function OrgListHeader() {
return (
<>
<QuayBreadcrumb />
<QuayBreadcrumb />
<PageSection variant={PageSectionVariants.light}>
<div className="co-m-nav-title--row">
<Title headingLevel="h1">Organizations</Title>

View File

@ -0,0 +1,130 @@
import {Banner, Flex, FlexItem, Page} from '@patternfly/react-core';
import {Navigate, Outlet, Route, Router, Routes} from 'react-router-dom';
import {RecoilRoot, useSetRecoilState} from 'recoil';
import {QueryClient, QueryClientProvider} from '@tanstack/react-query';
import useChrome from '@redhat-cloud-services/frontend-components/useChrome';
import {NavigationPath} from './NavigationPath';
import OrganizationsList from './OrganizationsList/OrganizationsList';
import Organization from './OrganizationsList/Organization/Organization';
import RepositoryDetails from 'src/routes/RepositoryDetails/RepositoryDetails';
import RepositoriesList from './RepositoriesList/RepositoriesList';
import TagDetails from 'src/routes/TagDetails/TagDetails';
import {useEffect, useMemo} from 'react';
import {useQuayConfig} from 'src/hooks/UseQuayConfig';
import SiteUnavailableError from 'src/components/errors/SiteUnavailableError';
import NotFound from 'src/components/errors/404';
import {useCurrentUser} from 'src/hooks/UseCurrentUser';
import {InfoCircleIcon} from '@patternfly/react-icons';
import {GlobalAuthState} from '../resources/AuthResource';
import {IsPluginState} from '../atoms/QuayConfigState';
const NavigationRoutes = [
{
path: NavigationPath.organizationsList,
Component: <OrganizationsList />,
},
{
path: NavigationPath.organizationDetail,
Component: <Organization />,
},
{
path: NavigationPath.repositoriesList,
Component: <RepositoriesList />,
},
{
path: NavigationPath.repositoryDetail,
Component: <RepositoryDetails />,
},
{
path: NavigationPath.tagDetail,
Component: <TagDetails />,
},
];
function PluginMain() {
const quayConfig = useQuayConfig();
const {loading, error} = useCurrentUser();
const chrome = useChrome();
const setIsPluginState = useSetRecoilState(IsPluginState);
console.log('useChrome chrome', chrome);
chrome?.auth?.getToken().then((token) => {
console.log('chrome auth token', token);
GlobalAuthState.bearerToken = token;
});
useEffect(() => {
if (quayConfig?.config?.REGISTRY_TITLE) {
document.title = quayConfig.config.REGISTRY_TITLE;
}
}, [quayConfig]);
useEffect(() => {
setIsPluginState(true);
}, []);
if (loading) {
return null;
}
return (
<Page style={{height: '100vh'}}>
<Banner variant="info">
<Flex
spaceItems={{default: 'spaceItemsSm'}}
justifyContent={{default: 'justifyContentCenter'}}
>
<FlexItem>
<InfoCircleIcon />
</FlexItem>
<FlexItem>
Please use{' '}
<a
href="https://forms.gle/M2CtyneF3iaMT5UVA"
target="_blank"
rel="noreferrer"
>
this form
</a>{' '}
to provide feedback on your experience
</FlexItem>
</Flex>
</Banner>
<Routes>
<Route index element={<Navigate to="organization" replace />} />
{NavigationRoutes.map(({path, Component}, key) => (
<Route path={path} key={key} element={Component} />
))}
<Route path="*" element={<NotFound />} />
</Routes>
<Outlet />
</Page>
);
}
// Wraps the plugin with necessary context providers
export default function PluginMainRoot() {
// initialize the client only on itial render
const queryClient = useMemo(() => {
return new QueryClient({
defaultOptions: {
queries: {
retry: false,
refetchOnWindowFocus: false,
},
},
});
}, []);
return (
<RecoilRoot>
<QueryClientProvider client={queryClient}>
<PluginMain />
</QueryClientProvider>
</RecoilRoot>
);
}

View File

@ -43,10 +43,6 @@ import {useCurrentUser} from 'src/hooks/UseCurrentUser';
import {useRepositories} from 'src/hooks/UseRepositories';
import {useDeleteRepositories} from 'src/hooks/UseDeleteRepositories';
function getReponameFromURL(pathname: string): string {
return pathname.includes('organization') ? pathname.split('/')[2] : null;
}
interface RepoListHeaderProps {
shouldRender: boolean;
}
@ -66,8 +62,8 @@ function RepoListHeader(props: RepoListHeaderProps) {
);
}
export default function RepositoriesList() {
const currentOrg = getReponameFromURL(useLocation().pathname);
export default function RepositoriesList(props: RepositoriesListProps) {
const currentOrg = props.organizationName;
const [isCreateRepoModalOpen, setCreateRepoModalOpen] = useState(false);
const [isKebabOpen, setKebabOpen] = useState(false);
const [makePublicModalOpen, setmakePublicModal] = useState(false);

View File

@ -110,7 +110,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
const {robotAccountsForOrg, page, perPage, setPage, setPerPage} =
useRobotAccounts({
name: props.orgName,
name: props.organizationName,
onSuccess: () => {
setLoading(false);
},
@ -138,9 +138,9 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
// Fetching teams
useQuery(
['organization', props.orgName, 'teams'],
['organization', props.organizationName, 'teams'],
({signal}) => {
fetchOrg(props.orgName, signal).then((response) => {
fetchOrg(props.organizationName, signal).then((response) => {
setTeams(Object['values'](response?.teams));
return response?.teams;
});
@ -148,7 +148,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
},
{
placeholderData: () => {
return queryClient.getQueryData(['organization', props.orgName]);
return queryClient.getQueryData(['organization', props.organizationName]);
},
},
);
@ -196,7 +196,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
};
const {deleteRobotAccounts} = useDeleteRobotAccounts({
namespace: props.orgName,
namespace: props.organizationName,
onSuccess: () => {
setSelectedRobotAccounts([]);
setDeleteModalOpen(!isDeleteModalOpen);
@ -218,7 +218,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
});
const {updateRepoPerms, deleteRepoPerms} = useRobotRepoPermissions({
namespace: props.orgName,
namespace: props.organizationName,
onSuccess: () => null,
onError: (err) => {
setErrTitle('Repository Permission update failed');
@ -265,7 +265,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
const onRepoModalSave = async () => {
try {
const robotname = robotForModalView.name.replace(props.orgName + '+', '');
const robotname = robotForModalView.name.replace(props.organizationName + '+', '');
const [toUpdate, toDelete] = updateRepoPermissions();
if (toUpdate.length > 0) {
await updateRepoPerms({robotName: robotname, repoPerms: toUpdate});
@ -403,7 +403,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
<CreateRobotAccountModal
isModalOpen={isCreateRobotModalOpen}
handleModalToggle={() => setCreateRobotModalOpen(!isCreateRobotModalOpen)}
namespace={props.orgName}
namespace={props.organizationName}
teams={teams}
RepoPermissionDropdownItems={RepoPermissionDropdownItems}
/>
@ -516,7 +516,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
Component={
<RobotRepositoryPermissions
robotAccount={robotForModalView}
namespace={props.orgName}
namespace={props.organizationName}
RepoPermissionDropdownItems={RepoPermissionDropdownItems}
repos={robotRepos}
selectedRepos={selectedReposForModalView}
@ -538,7 +538,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
onClose={onTokenModalClose}
Component={
<RobotTokensModal
namespace={props.orgName}
namespace={props.organizationName}
robotAccount={robotForModalView}
/>
}
@ -609,7 +609,7 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
<Td data-label="kebab">
<RobotAccountKebab
robotAccount={robotAccount}
namespace={props.orgName}
namespace={props.organizationName}
setError={setErr}
deleteModal={bulkDeleteRobotAccountModal}
deleteKebabIsOpen={isDeleteModalOpen}
@ -654,5 +654,5 @@ export default function RobotAccountsList(props: RobotAccountsListProps) {
}
interface RobotAccountsListProps {
orgName: string;
organizationName: string;
}

View File

@ -15,7 +15,7 @@ import {
DrawerPanelContent,
} from '@patternfly/react-core';
import {QuayBreadcrumb} from 'src/components/breadcrumb/Breadcrumb';
import Tags from './Tags/Tags';
import TagsList from './Tags/TagsList';
import {useLocation, useSearchParams, useNavigate} from 'react-router-dom';
import {useEffect, useRef, useState} from 'react';
import Settings from './Settings/Settings';
@ -31,6 +31,7 @@ import RequestError from 'src/components/errors/RequestError';
import {useQuayConfig} from 'src/hooks/UseQuayConfig';
import CreateNotification from './Settings/NotificationsCreateNotification';
import {useRepository} from 'src/hooks/UseRepository';
import {parseOrgNameFromUrl, parseRepoNameFromUrl} from 'src/libs/utils';
enum TabIndex {
Tags = 'tags',
@ -61,13 +62,8 @@ export default function RepositoryDetails() {
const drawerRef = useRef<HTMLDivElement>();
// TODO: refactor
const [organization, ...repo] = location.pathname.split('/').slice(2);
const repository = repo.join('/');
const {repoDetails, errorLoadingRepoDetails} = useRepository(
organization,
repository,
);
const organization = parseOrgNameFromUrl(location.pathname);
const repository = parseRepoNameFromUrl(location.pathname);
const requestedTabIndex = getTabIndex(searchParams.get('tab'));
if (requestedTabIndex && requestedTabIndex !== activeTabKey) {
@ -99,7 +95,7 @@ export default function RepositoryDetails() {
),
};
useEffect(() => {
/* useEffect(() => {
if (errorLoadingRepoDetails) {
setErr(
addDisplayError(
@ -108,7 +104,7 @@ export default function RepositoryDetails() {
),
);
}
}, [errorLoadingRepoDetails]);
}, [errorLoadingRepoDetails]); */
return (
<Drawer
@ -159,7 +155,7 @@ export default function RepositoryDetails() {
eventKey={TabIndex.Tags}
title={<TabTitleText>Tags</TabTitleText>}
>
<Tags
<TagsList
organization={organization}
repository={repository}
repoDetails={repoDetails}

View File

@ -1,5 +1,5 @@
import {TagsToolbar} from './TagsToolbar';
import Table from './Table';
import TagsTable from './TagsTable';
import {useState, useEffect} from 'react';
import {
searchTagsFilterState,
@ -28,7 +28,7 @@ import {CubesIcon} from '@patternfly/react-icons';
import {ToolbarPagination} from 'src/components/toolbar/ToolbarPagination';
import {RepositoryDetails} from 'src/resources/RepositoryResource';
export default function Tags(props: TagsProps) {
export default function TagsList(props: TagsProps) {
const [tags, setTags] = useState<Tag[]>([]);
const [loading, setLoading] = useState<boolean>(true);
const [err, setErr] = useState<string>();
@ -135,7 +135,7 @@ export default function Tags(props: TagsProps) {
selectTag={selectTag}
repoDetails={props.repoDetails}
/>
<Table
<TagsTable
org={props.organization}
repo={props.repository}
tags={paginatedTags}

View File

@ -12,7 +12,7 @@ import prettyBytes from 'pretty-bytes';
import {useState} from 'react';
import {Tag, Manifest} from 'src/resources/TagResource';
import {useResetRecoilState} from 'recoil';
import {Link} from 'react-router-dom';
import {Link, useLocation} from 'react-router-dom';
import {getTagDetailPath} from 'src/routes/NavigationPath';
import TablePopover from './TablePopover';
import SecurityDetails from './SecurityDetails';
@ -80,7 +80,7 @@ function SubRow(props: SubRowProps) {
);
}
function Row(props: RowProps) {
function TagsTableRow(props: RowProps) {
const tag = props.tag;
const rowIndex = props.rowIndex;
let size =
@ -95,6 +95,8 @@ function Row(props: RowProps) {
const emptySecurityDetails = useResetRecoilState(SecurityDetailsState);
const resetSecurityDetails = () => emptySecurityDetails();
const location = useLocation();
return (
<Tbody
data-testid="table-entry"
@ -179,7 +181,7 @@ function Row(props: RowProps) {
);
}
export default function Table(props: TableProps) {
export default function TagsTable(props: TableProps) {
// Control expanded tags
const [expandedTags, setExpandedTags] = useState<string[]>([]);
const setTagExpanded = (tag: Tag, isExpanding = true) =>
@ -210,7 +212,7 @@ export default function Table(props: TableProps) {
</Tr>
</Thead>
{props.tags.map((tag: Tag, rowIndex: number) => (
<Row
<TagsTableRow
key={rowIndex}
org={props.org}
repo={props.repo}

View File

@ -29,7 +29,7 @@ const NavigationRoutes = [
},
{
path: NavigationPath.repositoriesList,
Component: <RepositoriesList />,
Component: <RepositoriesList organizationName={''} />,
},
{
path: NavigationPath.repositoryDetail,

View File

@ -24,6 +24,11 @@ import {
SecurityDetailsErrorState,
SecurityDetailsState,
} from 'src/atoms/SecurityDetailsState';
import {
parseOrgNameFromUrl,
parseRepoNameFromUrl,
parseTagNameFromUrl,
} from '../../libs/utils';
export default function TagDetails() {
const [searchParams] = useSearchParams();
@ -48,9 +53,10 @@ export default function TagDetails() {
// TODO: refactor, need more checks when parsing path
const location = useLocation();
const [org, ...repoPath] = location.pathname.split('/').slice(2);
const tag = repoPath.pop();
const repo = repoPath.join('/');
const org = parseOrgNameFromUrl(location.pathname);
const repo = parseRepoNameFromUrl(location.pathname);
const tag = parseTagNameFromUrl(location.pathname);
useEffect(() => {
(async () => {

View File

@ -1,110 +0,0 @@
/* eslint-env node */
import {Configuration as WebpackConfiguration} from 'webpack';
import {Configuration as WebpackDevServerConfiguration} from 'webpack-dev-server';
import * as path from 'path';
const HtmlWebpackPlugin = require('html-webpack-plugin');
// import { ConsoleRemotePlugin } from "@openshift-console/dynamic-plugin-sdk-webpack";
const CopyWebpackPlugin = require('copy-webpack-plugin');
interface Configuration extends WebpackConfiguration {
devServer?: WebpackDevServerConfiguration;
}
const config: Configuration = {
mode: 'development',
// No regular entry points. The remote container entry is handled by ConsoleRemotePlugin.
// entry: {},
entry: path.resolve(__dirname, './src/index.tsx'),
context: path.resolve(__dirname, 'src'),
output: {
path: path.resolve(__dirname, 'dist'),
filename: '[name]-bundle.js',
chunkFilename: '[name]-chunk.js',
},
resolve: {
extensions: ['.ts', '.tsx', '.js', '.jsx'],
},
module: {
rules: [
{
test: /\.css$/,
use: ['style-loader', 'css-loader'],
},
{
test: /\.scss$/,
exclude: /node_modules/,
use: [
{
loader: 'style-loader',
},
{
loader: 'css-loader',
options: {
sourceMap: true,
},
},
{
loader: 'sass-loader',
options: {
sourceMap: true,
},
},
],
},
{
test: /\.(png|jpg|jpeg|gif|svg|woff2?|ttf|eot|otf)(\?.*$|$)/,
type: 'asset/resource',
generator: {
filename: 'assets/[name].[ext]',
},
},
{
test: /\.m?js/,
resolve: {
fullySpecified: false,
},
},
],
},
devServer: {
static: './dist',
port: 9001,
// Allow bridge running in a container to connect to the plugin dev server.
allowedHosts: 'all',
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, PATCH, OPTIONS',
'Access-Control-Allow-Headers':
'X-Requested-With, Content-Type, Authorization',
},
devMiddleware: {
writeToDisk: true,
},
},
plugins: [
// new ConsoleRemotePlugin(),
new HtmlWebpackPlugin({
template: path.join(__dirname, 'public', 'index.html'),
}),
new CopyWebpackPlugin({
patterns: [{from: path.resolve(__dirname, 'locales'), to: 'locales'}],
}),
],
devtool: 'source-map',
optimization: {
chunkIds: 'named',
minimize: false,
},
};
if (process.env.NODE_ENV === 'production') {
config.mode = 'production';
config.output.filename = '[name]-bundle-[hash].min.js';
config.output.chunkFilename = '[name]-chunk-[chunkhash].min.js';
config.optimization.chunkIds = 'deterministic';
config.optimization.minimize = true;
}
export default config;

259
web/webpack.plugin.js Normal file
View File

@ -0,0 +1,259 @@
const path = require('path');
const DynamicRemotePlugin =
require('@openshift/dynamic-plugin-sdk-webpack').DynamicRemotePlugin;
const CSSMinimizerPlugin = require('css-minimizer-webpack-plugin');
const webpack = require('webpack');
const TsconfigPathsPlugin = require('tsconfig-paths-webpack-plugin');
const pluginMetadata = require('./plugin-metadata');
// HCC development proxy
const proxy = require('@redhat-cloud-services/frontend-components-config-utilities/proxy');
const fs = require('fs');
const BG_IMAGES_DIRNAME = 'assets';
const isProd = process.env.NODE_ENV === 'production';
const pathTo = (relativePath) => path.resolve(__dirname, relativePath);
/**
* Shared modules consumed and/or provided by this plugin.
*
* A host application typically provides some modules to its plugins. If an application
* provided module is configured as an eager singleton, we suggest using `import: false`
* to avoid bundling a fallback version of the module when building your plugin.
*
* Plugins may provide additional shared modules that can be consumed by other plugins.
*
* @see https://webpack.js.org/plugins/module-federation-plugin/#sharing-hints
*/
const pluginSharedModules = {
'@openshift/dynamic-plugin-sdk': {singleton: true, import: false},
'@patternfly/react-core': {},
'@patternfly/react-table': {},
react: {singleton: true, import: false},
'react-dom': {singleton: true, import: false},
'react-router-dom': {singleton: true, import: false},
};
const plugins = [
new webpack.EnvironmentPlugin({
NODE_ENV: 'development',
}),
// required for SDK code
new webpack.ProvidePlugin({
process: 'process/browser',
}),
new DynamicRemotePlugin({
pluginMetadata,
extensions: [],
sharedModules: pluginSharedModules,
entryScriptFilename: isProd
? 'plugin-entry.[fullhash].min.js'
: 'plugin-entry.js',
}),
];
module.exports = {
mode: isProd ? 'production' : 'development',
entry: {}, // Plugin container entry is generated by DynamicRemotePlugin
output: {
path: pathTo('dist'),
// reflect the CDN public path, TODO: Adjust in the future based on the actual location in HCC
publicPath: '/apps/quay/',
chunkFilename: isProd ? 'chunks/[id].[chunkhash].min.js' : 'chunks/[id].js',
assetModuleFilename: isProd
? 'assets/[contenthash][ext]'
: 'assets/[name][ext]',
},
resolve: {
extensions: ['.ts', '.tsx', '.js', '.jsx'],
plugins: [
new TsconfigPathsPlugin({
configFile: path.resolve(__dirname, './tsconfig.json'),
}),
],
symlinks: false,
cacheWithContext: false,
},
module: {
rules: [
{
test: /\.(jsx?|tsx?)$/,
exclude: /\/node_modules\//,
use: [
{
loader: 'ts-loader',
options: {
transpileOnly: true,
experimentalWatchApi: true,
},
},
],
},
{
test: /\.(svg|ttf|eot|woff|woff2)$/,
// only process modules with this loader
// if they live under a 'fonts' or 'pficon' directory
include: [
path.resolve(__dirname, 'node_modules/patternfly/dist/fonts'),
path.resolve(
__dirname,
'node_modules/@patternfly/react-core/dist/styles/assets/fonts',
),
path.resolve(
__dirname,
'node_modules/@patternfly/react-core/dist/styles/assets/pficon',
),
path.resolve(
__dirname,
'node_modules/@patternfly/patternfly/assets/fonts',
),
path.resolve(
__dirname,
'node_modules/@patternfly/patternfly/assets/pficon',
),
],
generator: {
filename: 'assets/[name].[ext]',
},
},
{
test: /\.svg$/,
include: (input) => input.indexOf('background-filter.svg') > 1,
type: 'asset',
},
{
test: /\.svg$/,
// only process SVG modules with this loader if they live under a 'bgimages' directory
// this is primarily useful when applying a CSS background using an SVG
include: (input) => input.indexOf(BG_IMAGES_DIRNAME) > -1,
type: 'asset/inline',
},
{
test: /\.svg$/i,
// only process SVG modules with this loader when they don't live under a 'bgimages',
// 'fonts', or 'pficon' directory, those are handled with other loaders
include: (input) =>
input.indexOf(BG_IMAGES_DIRNAME) === -1 &&
input.indexOf('fonts') === -1 &&
input.indexOf('background-filter') === -1 &&
input.indexOf('pficon') === -1,
type: 'asset/resource',
},
{
test: /\.(jpg|jpeg|png|gif)$/i,
include: [
path.resolve(__dirname, 'node_modules/patternfly'),
path.resolve(
__dirname,
'node_modules/@patternfly/patternfly/assets/images',
),
path.resolve(
__dirname,
'node_modules/@patternfly/react-styles/css/assets/images',
),
path.resolve(
__dirname,
'node_modules/@patternfly/react-core/dist/styles/assets/images',
),
path.resolve(
__dirname,
'node_modules/@patternfly/react-core/node_modules/@patternfly/react-styles/css/assets/images',
),
path.resolve(
__dirname,
'node_modules/@patternfly/react-table/node_modules/@patternfly/react-styles/css/assets/images',
),
path.resolve(
__dirname,
'node_modules/@patternfly/react-inline-edit-extension/node_modules/@patternfly/react-styles/css/assets/images',
),
],
type: 'asset',
},
{
test: /\.s[ac]ss$/i,
use: [
// Creates `style` nodes from JS strings
'style-loader',
// Translates CSS into CommonJS
'css-loader',
// Compiles Sass to CSS
'sass-loader',
],
},
{
test: /\.(css)$/,
use: ['style-loader', 'css-loader'],
},
],
},
plugins,
devtool: isProd ? 'source-map' : 'cheap-source-map',
optimization: {
minimize: isProd,
minimizer: [
'...', // The '...' string represents the webpack default TerserPlugin instance
new CSSMinimizerPlugin(),
],
},
devServer: {
static: pathTo('dist'),
port: 1337,
https: true,
host: '0.0.0.0',
allowedHosts: 'all',
// https://github.com/bripkens/connect-history-api-fallback
historyApiFallback: {
// We should really implement the same logic as cloud-services-config
// and only redirect (/beta)?/bundle/app-name to /index.html
rewrites: [
{from: /^\/api/, to: '/404.html'},
{from: /^(\/beta)?\/config/, to: '/404.html'},
],
verbose: true,
disableDotRule: true,
},
devMiddleware: {
writeToDisk: true,
},
client: {
overlay: false,
},
...proxy({
localChrome: '/Users/syed/work/redhat/consoledot-integration/insights-chrome/build/',
useProxy: true,
env: 'stage-stable',
port: 1337,
appUrl: ['/settings/quay', '/beta/settings/quay'],
publicPath: '/apps/quay/',
proxyVerbose: true,
onBeforeSetupMiddleware: ({chromePath}) => {
if (chromePath) {
const outputPath = pathTo('dist');
const template = fs.readFileSync(`${chromePath}/index.html`, {
encoding: 'utf-8',
});
if (!fs.existsSync(outputPath)) {
fs.mkdirSync(outputPath);
}
fs.writeFileSync(`${outputPath}/index.html`, template);
}
},
useDevBuild: true,
routes: {
'/config/chrome': {
host: 'http://localhost:8003',
},
'/beta/config/chrome': {
host: 'http://localhost:8003',
},
'/api/chrome-service/v1/static/stable/stage/navigation': {
host: 'http://localhost:8003',
}
},
}),
},
};