mirror of
https://github.com/quay/quay.git
synced 2025-07-30 07:43:13 +03:00
fix all the docstrings
This commit is contained in:
@ -2,24 +2,32 @@ from flask import _request_ctx_stack
|
||||
|
||||
|
||||
def get_authenticated_context():
|
||||
""" Returns the auth context for the current request context, if any. """
|
||||
"""
|
||||
Returns the auth context for the current request context, if any.
|
||||
"""
|
||||
return getattr(_request_ctx_stack.top, "authenticated_context", None)
|
||||
|
||||
|
||||
def get_authenticated_user():
|
||||
""" Returns the authenticated user, if any, or None if none. """
|
||||
"""
|
||||
Returns the authenticated user, if any, or None if none.
|
||||
"""
|
||||
context = get_authenticated_context()
|
||||
return context.authed_user if context else None
|
||||
|
||||
|
||||
def get_validated_oauth_token():
|
||||
""" Returns the authenticated and validated OAuth access token, if any, or None if none. """
|
||||
"""
|
||||
Returns the authenticated and validated OAuth access token, if any, or None if none.
|
||||
"""
|
||||
context = get_authenticated_context()
|
||||
return context.authed_oauth_token if context else None
|
||||
|
||||
|
||||
def set_authenticated_context(auth_context):
|
||||
""" Sets the auth context for the current request context to that given. """
|
||||
"""
|
||||
Sets the auth context for the current request context to that given.
|
||||
"""
|
||||
ctx = _request_ctx_stack.top
|
||||
ctx.authenticated_context = auth_context
|
||||
return auth_context
|
||||
|
@ -20,92 +20,116 @@ logger = logging.getLogger(__name__)
|
||||
@add_metaclass(ABCMeta)
|
||||
class AuthContext(object):
|
||||
"""
|
||||
Interface that represents the current context of authentication.
|
||||
"""
|
||||
Interface that represents the current context of authentication.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def entity_kind(self):
|
||||
""" Returns the kind of the entity in this auth context. """
|
||||
"""
|
||||
Returns the kind of the entity in this auth context.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def is_anonymous(self):
|
||||
""" Returns true if this is an anonymous context. """
|
||||
"""
|
||||
Returns true if this is an anonymous context.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def authed_oauth_token(self):
|
||||
""" Returns the authenticated OAuth token, if any. """
|
||||
"""
|
||||
Returns the authenticated OAuth token, if any.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def authed_user(self):
|
||||
""" Returns the authenticated user, whether directly, or via an OAuth or access token. Note that
|
||||
this property will also return robot accounts.
|
||||
"""
|
||||
"""
|
||||
Returns the authenticated user, whether directly, or via an OAuth or access token.
|
||||
|
||||
Note that this property will also return robot accounts.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def has_nonrobot_user(self):
|
||||
""" Returns whether a user (not a robot) was authenticated successfully. """
|
||||
"""
|
||||
Returns whether a user (not a robot) was authenticated successfully.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def identity(self):
|
||||
""" Returns the identity for the auth context. """
|
||||
"""
|
||||
Returns the identity for the auth context.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def description(self):
|
||||
""" Returns a human-readable and *public* description of the current auth context. """
|
||||
"""
|
||||
Returns a human-readable and *public* description of the current auth context.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def credential_username(self):
|
||||
""" Returns the username to create credentials for this context's entity, if any. """
|
||||
"""
|
||||
Returns the username to create credentials for this context's entity, if any.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def analytics_id_and_public_metadata(self):
|
||||
""" Returns the analytics ID and public log metadata for this auth context. """
|
||||
"""
|
||||
Returns the analytics ID and public log metadata for this auth context.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def apply_to_request_context(self):
|
||||
""" Applies this auth result to the auth context and Flask-Principal. """
|
||||
"""
|
||||
Applies this auth result to the auth context and Flask-Principal.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def to_signed_dict(self):
|
||||
""" Serializes the auth context into a dictionary suitable for inclusion in a JWT or other
|
||||
form of signed serialization.
|
||||
"""
|
||||
"""
|
||||
Serializes the auth context into a dictionary suitable for inclusion in a JWT or other form
|
||||
of signed serialization.
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def unique_key(self):
|
||||
""" Returns a key that is unique to this auth context type and its data. For example, an
|
||||
instance of the auth context type for the user might be a string of the form
|
||||
`user-{user-uuid}`. Callers should treat this key as opaque and not rely on the contents
|
||||
for anything besides uniqueness. This is typically used by callers when they'd like to
|
||||
check cache but not hit the database to get a fully validated auth context.
|
||||
"""
|
||||
"""
|
||||
Returns a key that is unique to this auth context type and its data.
|
||||
|
||||
For example, an instance of the auth context type for the user might be a string of the form
|
||||
`user-{user-uuid}`. Callers should treat this key as opaque and not rely on the contents for
|
||||
anything besides uniqueness. This is typically used by callers when they'd like to check
|
||||
cache but not hit the database to get a fully validated auth context.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ValidatedAuthContext(AuthContext):
|
||||
""" ValidatedAuthContext represents the loaded, authenticated and validated auth information
|
||||
for the current request context.
|
||||
"""
|
||||
"""
|
||||
ValidatedAuthContext represents the loaded, authenticated and validated auth information for the
|
||||
current request context.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -133,7 +157,9 @@ class ValidatedAuthContext(AuthContext):
|
||||
|
||||
@property
|
||||
def entity_kind(self):
|
||||
""" Returns the kind of the entity in this auth context. """
|
||||
"""
|
||||
Returns the kind of the entity in this auth context.
|
||||
"""
|
||||
for kind in ContextEntityKind:
|
||||
if hasattr(self, kind.value) and getattr(self, kind.value):
|
||||
return kind
|
||||
@ -142,9 +168,11 @@ class ValidatedAuthContext(AuthContext):
|
||||
|
||||
@property
|
||||
def authed_user(self):
|
||||
""" Returns the authenticated user, whether directly, or via an OAuth token. Note that this
|
||||
will also return robot accounts.
|
||||
"""
|
||||
"""
|
||||
Returns the authenticated user, whether directly, or via an OAuth token.
|
||||
|
||||
Note that this will also return robot accounts.
|
||||
"""
|
||||
authed_user = self._authed_user()
|
||||
if authed_user is not None and not authed_user.enabled:
|
||||
logger.warning("Attempt to reference a disabled user/robot: %s", authed_user.username)
|
||||
@ -170,17 +198,23 @@ class ValidatedAuthContext(AuthContext):
|
||||
|
||||
@property
|
||||
def is_anonymous(self):
|
||||
""" Returns true if this is an anonymous context. """
|
||||
"""
|
||||
Returns true if this is an anonymous context.
|
||||
"""
|
||||
return not self.authed_user and not self.token and not self.signed_data
|
||||
|
||||
@property
|
||||
def has_nonrobot_user(self):
|
||||
""" Returns whether a user (not a robot) was authenticated successfully. """
|
||||
"""
|
||||
Returns whether a user (not a robot) was authenticated successfully.
|
||||
"""
|
||||
return bool(self.authed_user and not self.robot)
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
""" Returns the identity for the auth context. """
|
||||
"""
|
||||
Returns the identity for the auth context.
|
||||
"""
|
||||
if self.oauthtoken:
|
||||
scope_set = scopes_from_scope_string(self.oauthtoken.scope)
|
||||
return QuayDeferredPermissionUser.for_user(self.oauthtoken.authorized_user, scope_set)
|
||||
@ -200,7 +234,9 @@ class ValidatedAuthContext(AuthContext):
|
||||
|
||||
@property
|
||||
def entity_reference(self):
|
||||
""" Returns the DB object reference for this context's entity. """
|
||||
"""
|
||||
Returns the DB object reference for this context's entity.
|
||||
"""
|
||||
if self.entity_kind == ContextEntityKind.anonymous:
|
||||
return None
|
||||
|
||||
@ -208,23 +244,31 @@ class ValidatedAuthContext(AuthContext):
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
""" Returns a human-readable and *public* description of the current auth context. """
|
||||
"""
|
||||
Returns a human-readable and *public* description of the current auth context.
|
||||
"""
|
||||
handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]()
|
||||
return handler.description(self.entity_reference)
|
||||
|
||||
@property
|
||||
def credential_username(self):
|
||||
""" Returns the username to create credentials for this context's entity, if any. """
|
||||
"""
|
||||
Returns the username to create credentials for this context's entity, if any.
|
||||
"""
|
||||
handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]()
|
||||
return handler.credential_username(self.entity_reference)
|
||||
|
||||
def analytics_id_and_public_metadata(self):
|
||||
""" Returns the analytics ID and public log metadata for this auth context. """
|
||||
"""
|
||||
Returns the analytics ID and public log metadata for this auth context.
|
||||
"""
|
||||
handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]()
|
||||
return handler.analytics_id_and_public_metadata(self.entity_reference)
|
||||
|
||||
def apply_to_request_context(self):
|
||||
""" Applies this auth result to the auth context and Flask-Principal. """
|
||||
"""
|
||||
Applies this auth result to the auth context and Flask-Principal.
|
||||
"""
|
||||
# Save to the request context.
|
||||
set_authenticated_context(self)
|
||||
|
||||
@ -238,9 +282,10 @@ class ValidatedAuthContext(AuthContext):
|
||||
return "%s-%s" % (signed_dict["entity_kind"], signed_dict.get("entity_reference", "(anon)"))
|
||||
|
||||
def to_signed_dict(self):
|
||||
""" Serializes the auth context into a dictionary suitable for inclusion in a JWT or other
|
||||
form of signed serialization.
|
||||
"""
|
||||
"""
|
||||
Serializes the auth context into a dictionary suitable for inclusion in a JWT or other form
|
||||
of signed serialization.
|
||||
"""
|
||||
dict_data = {
|
||||
"version": 2,
|
||||
"entity_kind": self.entity_kind.value,
|
||||
@ -288,11 +333,14 @@ class ValidatedAuthContext(AuthContext):
|
||||
|
||||
|
||||
class SignedAuthContext(AuthContext):
|
||||
""" SignedAuthContext represents an auth context loaded from a signed token of some kind,
|
||||
such as a JWT. Unlike ValidatedAuthContext, SignedAuthContext operates lazily, only loading
|
||||
the actual {user, robot, token, etc} when requested. This allows registry operations that
|
||||
only need to check if *some* entity is present to do so, without hitting the database.
|
||||
"""
|
||||
"""
|
||||
SignedAuthContext represents an auth context loaded from a signed token of some kind, such as a
|
||||
JWT.
|
||||
|
||||
Unlike ValidatedAuthContext, SignedAuthContext operates lazily, only loading the actual {user,
|
||||
robot, token, etc} when requested. This allows registry operations that only need to check if
|
||||
*some* entity is present to do so, without hitting the database.
|
||||
"""
|
||||
|
||||
def __init__(self, kind, signed_data, v1_dict_format):
|
||||
self.kind = kind
|
||||
@ -325,9 +373,10 @@ class SignedAuthContext(AuthContext):
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _get_validated(self):
|
||||
""" Returns a ValidatedAuthContext for this signed context, resolving all the necessary
|
||||
"""
|
||||
Returns a ValidatedAuthContext for this signed context, resolving all the necessary
|
||||
references.
|
||||
"""
|
||||
"""
|
||||
if not self.v1_dict_format:
|
||||
if self.kind == ContextEntityKind.anonymous:
|
||||
return ValidatedAuthContext()
|
||||
@ -390,19 +439,25 @@ class SignedAuthContext(AuthContext):
|
||||
|
||||
@property
|
||||
def entity_kind(self):
|
||||
""" Returns the kind of the entity in this auth context. """
|
||||
"""
|
||||
Returns the kind of the entity in this auth context.
|
||||
"""
|
||||
return self.kind
|
||||
|
||||
@property
|
||||
def is_anonymous(self):
|
||||
""" Returns true if this is an anonymous context. """
|
||||
"""
|
||||
Returns true if this is an anonymous context.
|
||||
"""
|
||||
return self.kind == ContextEntityKind.anonymous
|
||||
|
||||
@property
|
||||
def authed_user(self):
|
||||
""" Returns the authenticated user, whether directly, or via an OAuth or access token. Note that
|
||||
this property will also return robot accounts.
|
||||
"""
|
||||
"""
|
||||
Returns the authenticated user, whether directly, or via an OAuth or access token.
|
||||
|
||||
Note that this property will also return robot accounts.
|
||||
"""
|
||||
if self.kind == ContextEntityKind.anonymous:
|
||||
return None
|
||||
|
||||
@ -417,7 +472,9 @@ class SignedAuthContext(AuthContext):
|
||||
|
||||
@property
|
||||
def has_nonrobot_user(self):
|
||||
""" Returns whether a user (not a robot) was authenticated successfully. """
|
||||
"""
|
||||
Returns whether a user (not a robot) was authenticated successfully.
|
||||
"""
|
||||
if self.kind == ContextEntityKind.anonymous:
|
||||
return False
|
||||
|
||||
@ -425,29 +482,40 @@ class SignedAuthContext(AuthContext):
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
""" Returns the identity for the auth context. """
|
||||
"""
|
||||
Returns the identity for the auth context.
|
||||
"""
|
||||
return self._get_validated().identity
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
""" Returns a human-readable and *public* description of the current auth context. """
|
||||
"""
|
||||
Returns a human-readable and *public* description of the current auth context.
|
||||
"""
|
||||
return self._get_validated().description
|
||||
|
||||
@property
|
||||
def credential_username(self):
|
||||
""" Returns the username to create credentials for this context's entity, if any. """
|
||||
"""
|
||||
Returns the username to create credentials for this context's entity, if any.
|
||||
"""
|
||||
return self._get_validated().credential_username
|
||||
|
||||
def analytics_id_and_public_metadata(self):
|
||||
""" Returns the analytics ID and public log metadata for this auth context. """
|
||||
"""
|
||||
Returns the analytics ID and public log metadata for this auth context.
|
||||
"""
|
||||
return self._get_validated().analytics_id_and_public_metadata()
|
||||
|
||||
def apply_to_request_context(self):
|
||||
""" Applies this auth result to the auth context and Flask-Principal. """
|
||||
"""
|
||||
Applies this auth result to the auth context and Flask-Principal.
|
||||
"""
|
||||
return self._get_validated().apply_to_request_context()
|
||||
|
||||
def to_signed_dict(self):
|
||||
""" Serializes the auth context into a dictionary suitable for inclusion in a JWT or other
|
||||
form of signed serialization.
|
||||
"""
|
||||
"""
|
||||
Serializes the auth context into a dictionary suitable for inclusion in a JWT or other form
|
||||
of signed serialization.
|
||||
"""
|
||||
return self.signed_data
|
||||
|
@ -10,19 +10,23 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def has_basic_auth(username):
|
||||
""" Returns true if a basic auth header exists with a username and password pair that validates
|
||||
against the internal authentication system. Returns True on full success and False on any
|
||||
failure (missing header, invalid header, invalid credentials, etc).
|
||||
"""
|
||||
"""
|
||||
Returns true if a basic auth header exists with a username and password pair that validates
|
||||
against the internal authentication system.
|
||||
|
||||
Returns True on full success and False on any failure (missing header, invalid header, invalid
|
||||
credentials, etc).
|
||||
"""
|
||||
auth_header = request.headers.get("authorization", "")
|
||||
result = validate_basic_auth(auth_header)
|
||||
return result.has_nonrobot_user and result.context.user.username == username
|
||||
|
||||
|
||||
def validate_basic_auth(auth_header):
|
||||
""" Validates the specified basic auth header, returning whether its credentials point
|
||||
to a valid user or token.
|
||||
"""
|
||||
"""
|
||||
Validates the specified basic auth header, returning whether its credentials point to a valid
|
||||
user or token.
|
||||
"""
|
||||
if not auth_header:
|
||||
return ValidateResult(AuthKind.basic, missing=True)
|
||||
|
||||
@ -41,8 +45,9 @@ def validate_basic_auth(auth_header):
|
||||
|
||||
|
||||
def _parse_basic_auth_header(auth):
|
||||
""" Parses the given basic auth header, returning the credentials found inside.
|
||||
"""
|
||||
"""
|
||||
Parses the given basic auth header, returning the credentials found inside.
|
||||
"""
|
||||
normalized = [part.strip() for part in auth.split(" ") if part]
|
||||
if normalized[0].lower() != "basic" or len(normalized) != 2:
|
||||
return None, "Invalid basic auth header"
|
||||
|
@ -12,10 +12,12 @@ from auth.credential_consts import (
|
||||
|
||||
|
||||
class ContextEntityKind(Enum):
|
||||
""" Defines the various kinds of entities in an auth context. Note that the string values of
|
||||
these fields *must* match the names of the fields in the ValidatedAuthContext class, as
|
||||
we fill them in directly based on the string names here.
|
||||
"""
|
||||
"""
|
||||
Defines the various kinds of entities in an auth context.
|
||||
|
||||
Note that the string values of these fields *must* match the names of the fields in the
|
||||
ValidatedAuthContext class, as we fill them in directly based on the string names here.
|
||||
"""
|
||||
|
||||
anonymous = "anonymous"
|
||||
user = "user"
|
||||
@ -29,35 +31,45 @@ class ContextEntityKind(Enum):
|
||||
@add_metaclass(ABCMeta)
|
||||
class ContextEntityHandler(object):
|
||||
"""
|
||||
Interface that represents handling specific kinds of entities under an auth context.
|
||||
"""
|
||||
Interface that represents handling specific kinds of entities under an auth context.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def credential_username(self, entity_reference):
|
||||
""" Returns the username to create credentials for this entity, if any. """
|
||||
"""
|
||||
Returns the username to create credentials for this entity, if any.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_serialized_entity_reference(self, entity_reference):
|
||||
""" Returns the entity reference for this kind of auth context, serialized into a form that can
|
||||
be placed into a JSON object and put into a JWT. This is typically a DB UUID or another
|
||||
unique identifier for the object in the DB.
|
||||
"""
|
||||
"""
|
||||
Returns the entity reference for this kind of auth context, serialized into a form that can
|
||||
be placed into a JSON object and put into a JWT.
|
||||
|
||||
This is typically a DB UUID or another unique identifier for the object in the DB.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def deserialize_entity_reference(self, serialized_entity_reference):
|
||||
""" Returns the deserialized reference to the entity in the database, or None if none. """
|
||||
"""
|
||||
Returns the deserialized reference to the entity in the database, or None if none.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def description(self, entity_reference):
|
||||
""" Returns a human-readable and *public* description of the current entity. """
|
||||
"""
|
||||
Returns a human-readable and *public* description of the current entity.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def analytics_id_and_public_metadata(self, entity_reference):
|
||||
""" Returns the analyitics ID and a dict of public metadata for the current entity. """
|
||||
"""
|
||||
Returns the analyitics ID and a dict of public metadata for the current entity.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
|
@ -9,7 +9,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_session_cookie(auth_header_unusued=None):
|
||||
""" Attempts to load a user from a session cookie. """
|
||||
"""
|
||||
Attempts to load a user from a session cookie.
|
||||
"""
|
||||
if current_user.is_anonymous:
|
||||
return ValidateResult(AuthKind.cookie, missing=True)
|
||||
|
||||
|
@ -27,7 +27,9 @@ class CredentialKind(Enum):
|
||||
|
||||
|
||||
def validate_credentials(auth_username, auth_password_or_token):
|
||||
""" Validates a pair of auth username and password/token credentials. """
|
||||
"""
|
||||
Validates a pair of auth username and password/token credentials.
|
||||
"""
|
||||
# Check for access tokens.
|
||||
if auth_username == ACCESS_TOKEN_USERNAME:
|
||||
logger.debug("Found credentials for access token")
|
||||
|
@ -23,9 +23,11 @@ authentication_count = Counter(
|
||||
|
||||
|
||||
def _auth_decorator(pass_result=False, handlers=None):
|
||||
""" Builds an auth decorator that runs the given handlers and, if any return successfully,
|
||||
sets up the auth context. The wrapped function will be invoked *regardless of success or
|
||||
failure of the auth handler(s)*
|
||||
"""
|
||||
Builds an auth decorator that runs the given handlers and, if any return successfully, sets up
|
||||
the auth context.
|
||||
|
||||
The wrapped function will be invoked *regardless of success or failure of the auth handler(s)*
|
||||
"""
|
||||
|
||||
def processor(func):
|
||||
@ -75,8 +77,10 @@ process_basic_auth_no_pass = _auth_decorator(handlers=[validate_basic_auth])
|
||||
|
||||
|
||||
def require_session_login(func):
|
||||
""" Decorates a function and ensures that a valid session cookie exists or a 401 is raised. If
|
||||
a valid session cookie does exist, the authenticated user and identity are also set.
|
||||
"""
|
||||
Decorates a function and ensures that a valid session cookie exists or a 401 is raised.
|
||||
|
||||
If a valid session cookie does exist, the authenticated user and identity are also set.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
@ -95,9 +99,11 @@ def require_session_login(func):
|
||||
|
||||
|
||||
def extract_namespace_repo_from_session(func):
|
||||
""" Extracts the namespace and repository name from the current session (which must exist)
|
||||
and passes them into the decorated function as the first and second arguments. If the
|
||||
session doesn't exist or does not contain these arugments, a 400 error is raised.
|
||||
"""
|
||||
Extracts the namespace and repository name from the current session (which must exist) and
|
||||
passes them into the decorated function as the first and second arguments.
|
||||
|
||||
If the session doesn't exist or does not contain these arugments, a 400 error is raised.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
|
@ -10,9 +10,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_bearer_auth(auth_header):
|
||||
""" Validates an OAuth token found inside a basic auth `Bearer` token, returning whether it
|
||||
points to a valid OAuth token.
|
||||
"""
|
||||
"""
|
||||
Validates an OAuth token found inside a basic auth `Bearer` token, returning whether it points
|
||||
to a valid OAuth token.
|
||||
"""
|
||||
if not auth_header:
|
||||
return ValidateResult(AuthKind.oauth, missing=True)
|
||||
|
||||
@ -26,8 +27,9 @@ def validate_bearer_auth(auth_header):
|
||||
|
||||
|
||||
def validate_oauth_token(token):
|
||||
""" Validates the specified OAuth token, returning whether it points to a valid OAuth token.
|
||||
"""
|
||||
"""
|
||||
Validates the specified OAuth token, returning whether it points to a valid OAuth token.
|
||||
"""
|
||||
validated = model.oauth.validate_access_token(token)
|
||||
if not validated:
|
||||
logger.warning("OAuth access token could not be validated: %s", token)
|
||||
|
@ -112,9 +112,10 @@ class QuayDeferredPermissionUser(Identity):
|
||||
return self._translate_role_for_scopes(USER_ROLES, SCOPE_MAX_USER_ROLES, role)
|
||||
|
||||
def _populate_user_provides(self, user_object):
|
||||
""" Populates the provides that naturally apply to a user, such as being the admin of
|
||||
their own namespace.
|
||||
"""
|
||||
"""
|
||||
Populates the provides that naturally apply to a user, such as being the admin of their own
|
||||
namespace.
|
||||
"""
|
||||
|
||||
# Add the user specific permissions, only for non-oauth permission
|
||||
user_grant = _UserNeed(user_object.username, self._user_role_for_scopes("admin"))
|
||||
@ -142,9 +143,11 @@ class QuayDeferredPermissionUser(Identity):
|
||||
self.provides.add(_SuperUserNeed())
|
||||
|
||||
def _populate_namespace_wide_provides(self, user_object, namespace_filter):
|
||||
""" Populates the namespace-wide provides for a particular user under a particular namespace.
|
||||
"""
|
||||
Populates the namespace-wide provides for a particular user under a particular namespace.
|
||||
|
||||
This method does *not* add any provides for specific repositories.
|
||||
"""
|
||||
"""
|
||||
|
||||
for team in model.permission.get_org_wide_permissions(
|
||||
user_object, org_filter=namespace_filter
|
||||
@ -169,7 +172,9 @@ class QuayDeferredPermissionUser(Identity):
|
||||
self.provides.add(team_grant)
|
||||
|
||||
def _populate_repository_provides(self, user_object, namespace_filter, repository_name):
|
||||
""" Populates the repository-specific provides for a particular user and repository. """
|
||||
"""
|
||||
Populates the repository-specific provides for a particular user and repository.
|
||||
"""
|
||||
|
||||
if namespace_filter and repository_name:
|
||||
permissions = model.permission.get_user_repository_permissions(
|
||||
@ -232,7 +237,9 @@ class QuayDeferredPermissionUser(Identity):
|
||||
|
||||
|
||||
class QuayPermission(Permission):
|
||||
""" Base for all permissions in Quay. """
|
||||
"""
|
||||
Base for all permissions in Quay.
|
||||
"""
|
||||
|
||||
namespace = None
|
||||
repo_name = None
|
||||
|
@ -57,7 +57,9 @@ class InvalidJWTException(Exception):
|
||||
|
||||
|
||||
def get_auth_headers(repository=None, scopes=None):
|
||||
""" Returns a dictionary of headers for auth responses. """
|
||||
"""
|
||||
Returns a dictionary of headers for auth responses.
|
||||
"""
|
||||
headers = {}
|
||||
realm_auth_path = url_for("v2.generate_registry_jwt")
|
||||
authenticate = 'Bearer realm="{0}{1}",service="{2}"'.format(
|
||||
@ -76,10 +78,13 @@ def get_auth_headers(repository=None, scopes=None):
|
||||
|
||||
|
||||
def identity_from_bearer_token(bearer_header):
|
||||
""" Process a bearer header and return the loaded identity, or raise InvalidJWTException if an
|
||||
identity could not be loaded. Expects tokens and grants in the format of the Docker registry
|
||||
v2 auth spec: https://docs.docker.com/registry/spec/auth/token/
|
||||
"""
|
||||
"""
|
||||
Process a bearer header and return the loaded identity, or raise InvalidJWTException if an
|
||||
identity could not be loaded.
|
||||
|
||||
Expects tokens and grants in the format of the Docker registry v2 auth spec:
|
||||
https://docs.docker.com/registry/spec/auth/token/
|
||||
"""
|
||||
logger.debug("Validating auth header: %s", bearer_header)
|
||||
|
||||
try:
|
||||
@ -121,9 +126,11 @@ def identity_from_bearer_token(bearer_header):
|
||||
|
||||
|
||||
def process_registry_jwt_auth(scopes=None):
|
||||
""" Processes the registry JWT auth token found in the authorization header. If none found,
|
||||
no error is returned. If an invalid token is found, raises a 401.
|
||||
"""
|
||||
"""
|
||||
Processes the registry JWT auth token found in the authorization header.
|
||||
|
||||
If none found, no error is returned. If an invalid token is found, raises a 401.
|
||||
"""
|
||||
|
||||
def inner(func):
|
||||
@wraps(func)
|
||||
|
@ -166,9 +166,9 @@ def validate_scope_string(scopes):
|
||||
|
||||
|
||||
def is_subset_string(full_string, expected_string):
|
||||
""" Returns true if the scopes found in expected_string are also found
|
||||
in full_string.
|
||||
"""
|
||||
"""
|
||||
Returns true if the scopes found in expected_string are also found in full_string.
|
||||
"""
|
||||
full_scopes = scopes_from_scope_string(full_string)
|
||||
if not full_scopes:
|
||||
return False
|
||||
|
@ -12,7 +12,9 @@ SIGNATURE_PREFIX = "sigv2="
|
||||
|
||||
|
||||
def generate_signed_token(grants, user_context):
|
||||
""" Generates a signed session token with the given grants and user context. """
|
||||
"""
|
||||
Generates a signed session token with the given grants and user context.
|
||||
"""
|
||||
ser = SecureCookieSessionInterface().get_signing_serializer(app)
|
||||
data_to_sign = {
|
||||
"grants": grants,
|
||||
@ -24,9 +26,10 @@ def generate_signed_token(grants, user_context):
|
||||
|
||||
|
||||
def validate_signed_grant(auth_header):
|
||||
""" Validates a signed grant as found inside an auth header and returns whether it points to
|
||||
a valid grant.
|
||||
"""
|
||||
"""
|
||||
Validates a signed grant as found inside an auth header and returns whether it points to a valid
|
||||
grant.
|
||||
"""
|
||||
if not auth_header:
|
||||
return ValidateResult(AuthKind.signed_grant, missing=True)
|
||||
|
||||
|
@ -14,7 +14,9 @@ class AuthKind(Enum):
|
||||
|
||||
|
||||
class ValidateResult(object):
|
||||
""" A result of validating auth in one form or another. """
|
||||
"""
|
||||
A result of validating auth in one form or another.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -47,11 +49,15 @@ class ValidateResult(object):
|
||||
return self.tuple() == other.tuple()
|
||||
|
||||
def apply_to_context(self):
|
||||
""" Applies this auth result to the auth context and Flask-Principal. """
|
||||
"""
|
||||
Applies this auth result to the auth context and Flask-Principal.
|
||||
"""
|
||||
self.context.apply_to_request_context()
|
||||
|
||||
def with_kind(self, kind):
|
||||
""" Returns a copy of this result, but with the kind replaced. """
|
||||
"""
|
||||
Returns a copy of this result, but with the kind replaced.
|
||||
"""
|
||||
result = ValidateResult(kind, missing=self.missing, error_message=self.error_message)
|
||||
result.context = self.context
|
||||
return result
|
||||
@ -65,15 +71,21 @@ class ValidateResult(object):
|
||||
|
||||
@property
|
||||
def authed_user(self):
|
||||
""" Returns the authenticated user, whether directly, or via an OAuth token. """
|
||||
"""
|
||||
Returns the authenticated user, whether directly, or via an OAuth token.
|
||||
"""
|
||||
return self.context.authed_user
|
||||
|
||||
@property
|
||||
def has_nonrobot_user(self):
|
||||
""" Returns whether a user (not a robot) was authenticated successfully. """
|
||||
"""
|
||||
Returns whether a user (not a robot) was authenticated successfully.
|
||||
"""
|
||||
return self.context.has_nonrobot_user
|
||||
|
||||
@property
|
||||
def auth_valid(self):
|
||||
""" Returns whether authentication successfully occurred. """
|
||||
"""
|
||||
Returns whether authentication successfully occurred.
|
||||
"""
|
||||
return self.context.entity_kind != ContextEntityKind.anonymous
|
||||
|
@ -24,7 +24,9 @@ class Avatar(object):
|
||||
|
||||
|
||||
class BaseAvatar(object):
|
||||
""" Base class for all avatar implementations. """
|
||||
"""
|
||||
Base class for all avatar implementations.
|
||||
"""
|
||||
|
||||
def __init__(self, preferred_url_scheme, colors, http_client):
|
||||
self.preferred_url_scheme = preferred_url_scheme
|
||||
@ -32,9 +34,10 @@ class BaseAvatar(object):
|
||||
self.http_client = http_client
|
||||
|
||||
def get_mail_html(self, name, email_or_id, size=16, kind="user"):
|
||||
""" Returns the full HTML and CSS for viewing the avatar of the given name and email address,
|
||||
"""
|
||||
Returns the full HTML and CSS for viewing the avatar of the given name and email address,
|
||||
with an optional size.
|
||||
"""
|
||||
"""
|
||||
data = self.get_data(name, email_or_id, kind)
|
||||
url = self._get_url(data["hash"], size) if kind != "team" else None
|
||||
font_size = size - 6
|
||||
@ -110,12 +113,16 @@ class BaseAvatar(object):
|
||||
return {"name": name, "hash": hash_value, "color": hash_color, "kind": kind}
|
||||
|
||||
def _get_url(self, hash_value, size):
|
||||
""" Returns the URL for displaying the overlay avatar. """
|
||||
"""
|
||||
Returns the URL for displaying the overlay avatar.
|
||||
"""
|
||||
return None
|
||||
|
||||
|
||||
class GravatarAvatar(BaseAvatar):
|
||||
""" Avatar system that uses gravatar for generating avatars. """
|
||||
"""
|
||||
Avatar system that uses gravatar for generating avatars.
|
||||
"""
|
||||
|
||||
def _get_url(self, hash_value, size=16):
|
||||
return "%s://www.gravatar.com/avatar/%s?d=404&size=%s" % (
|
||||
@ -126,7 +133,9 @@ class GravatarAvatar(BaseAvatar):
|
||||
|
||||
|
||||
class LocalAvatar(BaseAvatar):
|
||||
""" Avatar system that uses the local system for generating avatars. """
|
||||
"""
|
||||
Avatar system that uses the local system for generating avatars.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
4
boot.py
4
boot.py
@ -69,8 +69,8 @@ def _verify_service_key():
|
||||
|
||||
def setup_jwt_proxy():
|
||||
"""
|
||||
Creates a service key for quay to use in the jwtproxy and generates the JWT proxy configuration.
|
||||
"""
|
||||
Creates a service key for quay to use in the jwtproxy and generates the JWT proxy configuration.
|
||||
"""
|
||||
if os.path.exists(os.path.join(CONF_DIR, "jwtproxy_conf.yaml")):
|
||||
# Proxy is already setup. Make sure the service key is still valid.
|
||||
quay_key_id = _verify_service_key()
|
||||
|
@ -6,16 +6,17 @@ from trollius import get_event_loop, coroutine
|
||||
|
||||
def wrap_with_threadpool(obj, worker_threads=1):
|
||||
"""
|
||||
Wraps a class in an async executor so that it can be safely used in an event loop like trollius.
|
||||
"""
|
||||
Wraps a class in an async executor so that it can be safely used in an event loop like trollius.
|
||||
"""
|
||||
async_executor = ThreadPoolExecutor(worker_threads)
|
||||
return AsyncWrapper(obj, executor=async_executor), async_executor
|
||||
|
||||
|
||||
class AsyncWrapper(object):
|
||||
""" Wrapper class which will transform a syncronous library to one that can be used with
|
||||
trollius coroutines.
|
||||
"""
|
||||
"""
|
||||
Wrapper class which will transform a syncronous library to one that can be used with trollius
|
||||
coroutines.
|
||||
"""
|
||||
|
||||
def __init__(self, delegate, loop=None, executor=None):
|
||||
self._loop = loop if loop is not None else get_event_loop()
|
||||
@ -29,9 +30,10 @@ class AsyncWrapper(object):
|
||||
return delegate_attr
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
""" Wraps the delegate_attr with primitives that will transform sync calls to ones shelled
|
||||
out to a thread pool.
|
||||
"""
|
||||
"""
|
||||
Wraps the delegate_attr with primitives that will transform sync calls to ones shelled
|
||||
out to a thread pool.
|
||||
"""
|
||||
callable_delegate_attr = partial(delegate_attr, *args, **kwargs)
|
||||
return self._loop.run_in_executor(self._executor, callable_delegate_attr)
|
||||
|
||||
|
@ -2,7 +2,9 @@ from autobahn.asyncio.wamp import ApplicationSession
|
||||
|
||||
|
||||
class BaseComponent(ApplicationSession):
|
||||
""" Base class for all registered component sessions in the server. """
|
||||
"""
|
||||
Base class for all registered component sessions in the server.
|
||||
"""
|
||||
|
||||
def __init__(self, config, **kwargs):
|
||||
ApplicationSession.__init__(self, config)
|
||||
|
@ -35,7 +35,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ComponentStatus(object):
|
||||
""" ComponentStatus represents the possible states of a component. """
|
||||
"""
|
||||
ComponentStatus represents the possible states of a component.
|
||||
"""
|
||||
|
||||
JOINING = "joining"
|
||||
WAITING = "waiting"
|
||||
@ -45,7 +47,9 @@ class ComponentStatus(object):
|
||||
|
||||
|
||||
class BuildComponent(BaseComponent):
|
||||
""" An application session component which conducts one (or more) builds. """
|
||||
"""
|
||||
An application session component which conducts one (or more) builds.
|
||||
"""
|
||||
|
||||
def __init__(self, config, realm=None, token=None, **kwargs):
|
||||
self.expected_token = token
|
||||
@ -85,7 +89,9 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@trollius.coroutine
|
||||
def start_build(self, build_job):
|
||||
""" Starts a build. """
|
||||
"""
|
||||
Starts a build.
|
||||
"""
|
||||
if self._component_status not in (ComponentStatus.WAITING, ComponentStatus.RUNNING):
|
||||
logger.debug(
|
||||
"Could not start build for component %s (build %s, worker version: %s): %s",
|
||||
@ -191,7 +197,9 @@ class BuildComponent(BaseComponent):
|
||||
logger.debug("With Arguments: %s", build_arguments)
|
||||
|
||||
def build_complete_callback(result):
|
||||
""" This function is used to execute a coroutine as the callback. """
|
||||
"""
|
||||
This function is used to execute a coroutine as the callback.
|
||||
"""
|
||||
trollius.ensure_future(self._build_complete(result))
|
||||
|
||||
self.call("io.quay.builder.build", **build_arguments).add_done_callback(
|
||||
@ -218,14 +226,18 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@staticmethod
|
||||
def _commit_sha(build_config):
|
||||
""" Determines whether the metadata is using an old schema or not and returns the commit. """
|
||||
"""
|
||||
Determines whether the metadata is using an old schema or not and returns the commit.
|
||||
"""
|
||||
commit_sha = build_config["trigger_metadata"].get("commit", "")
|
||||
old_commit_sha = build_config["trigger_metadata"].get("commit_sha", "")
|
||||
return commit_sha or old_commit_sha
|
||||
|
||||
@staticmethod
|
||||
def name_and_path(subdir):
|
||||
""" Returns the dockerfile path and name """
|
||||
"""
|
||||
Returns the dockerfile path and name.
|
||||
"""
|
||||
if subdir.endswith("/"):
|
||||
subdir += "Dockerfile"
|
||||
elif not subdir.endswith("Dockerfile"):
|
||||
@ -234,7 +246,9 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@staticmethod
|
||||
def _total_completion(statuses, total_images):
|
||||
""" Returns the current amount completion relative to the total completion of a build. """
|
||||
"""
|
||||
Returns the current amount completion relative to the total completion of a build.
|
||||
"""
|
||||
percentage_with_sizes = float(len(statuses.values())) / total_images
|
||||
sent_bytes = sum([status["current"] for status in statuses.values()])
|
||||
total_bytes = sum([status["total"] for status in statuses.values()])
|
||||
@ -242,7 +256,9 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@staticmethod
|
||||
def _process_pushpull_status(status_dict, current_phase, docker_data, images):
|
||||
""" Processes the status of a push or pull by updating the provided status_dict and images. """
|
||||
"""
|
||||
Processes the status of a push or pull by updating the provided status_dict and images.
|
||||
"""
|
||||
if not docker_data:
|
||||
return
|
||||
|
||||
@ -271,7 +287,9 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@trollius.coroutine
|
||||
def _on_log_message(self, phase, json_data):
|
||||
""" Tails log messages and updates the build status. """
|
||||
"""
|
||||
Tails log messages and updates the build status.
|
||||
"""
|
||||
# Update the heartbeat.
|
||||
self._last_heartbeat = datetime.datetime.utcnow()
|
||||
|
||||
@ -355,7 +373,9 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@trollius.coroutine
|
||||
def _build_failure(self, error_message, exception=None):
|
||||
""" Handles and logs a failed build. """
|
||||
"""
|
||||
Handles and logs a failed build.
|
||||
"""
|
||||
yield From(
|
||||
self._build_status.set_error(
|
||||
error_message, {"internal_error": str(exception) if exception else None}
|
||||
@ -370,7 +390,11 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@trollius.coroutine
|
||||
def _build_complete(self, result):
|
||||
""" Wraps up a completed build. Handles any errors and calls self._build_finished. """
|
||||
"""
|
||||
Wraps up a completed build.
|
||||
|
||||
Handles any errors and calls self._build_finished.
|
||||
"""
|
||||
build_id = self._current_job.repo_build.uuid
|
||||
|
||||
try:
|
||||
@ -451,7 +475,9 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@trollius.coroutine
|
||||
def _build_finished(self, job_status):
|
||||
""" Alerts the parent that a build has completed and sets the status back to running. """
|
||||
"""
|
||||
Alerts the parent that a build has completed and sets the status back to running.
|
||||
"""
|
||||
yield From(self.parent_manager.job_completed(self._current_job, job_status, self))
|
||||
|
||||
# Set the component back to a running state.
|
||||
@ -459,7 +485,9 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@staticmethod
|
||||
def _ping():
|
||||
""" Ping pong. """
|
||||
"""
|
||||
Ping pong.
|
||||
"""
|
||||
return "pong"
|
||||
|
||||
@trollius.coroutine
|
||||
@ -499,7 +527,9 @@ class BuildComponent(BaseComponent):
|
||||
self._component_status = phase
|
||||
|
||||
def _on_heartbeat(self):
|
||||
""" Updates the last known heartbeat. """
|
||||
"""
|
||||
Updates the last known heartbeat.
|
||||
"""
|
||||
if self._component_status == ComponentStatus.TIMED_OUT:
|
||||
return
|
||||
|
||||
@ -508,10 +538,12 @@ class BuildComponent(BaseComponent):
|
||||
|
||||
@trollius.coroutine
|
||||
def _heartbeat(self):
|
||||
""" Coroutine that runs every HEARTBEAT_TIMEOUT seconds, both checking the worker's heartbeat
|
||||
and updating the heartbeat in the build status dictionary (if applicable). This allows
|
||||
the build system to catch crashes from either end.
|
||||
"""
|
||||
"""
|
||||
Coroutine that runs every HEARTBEAT_TIMEOUT seconds, both checking the worker's heartbeat
|
||||
and updating the heartbeat in the build status dictionary (if applicable).
|
||||
|
||||
This allows the build system to catch crashes from either end.
|
||||
"""
|
||||
yield From(trollius.sleep(INITIAL_TIMEOUT))
|
||||
|
||||
while True:
|
||||
|
@ -2,9 +2,11 @@ import re
|
||||
|
||||
|
||||
def extract_current_step(current_status_string):
|
||||
""" Attempts to extract the current step numeric identifier from the given status string. Returns the step
|
||||
number or None if none.
|
||||
"""
|
||||
"""
|
||||
Attempts to extract the current step numeric identifier from the given status string.
|
||||
|
||||
Returns the step number or None if none.
|
||||
"""
|
||||
# Older format: `Step 12 :`
|
||||
# Newer format: `Step 4/13 :`
|
||||
step_increment = re.search(r"Step ([0-9]+)/([0-9]+) :", current_status_string)
|
||||
|
@ -2,7 +2,9 @@ from data.database import BUILD_PHASE
|
||||
|
||||
|
||||
class BuildJobResult(object):
|
||||
""" Build job result enum """
|
||||
"""
|
||||
Build job result enum.
|
||||
"""
|
||||
|
||||
INCOMPLETE = "incomplete"
|
||||
COMPLETE = "complete"
|
||||
@ -10,7 +12,9 @@ class BuildJobResult(object):
|
||||
|
||||
|
||||
class BuildServerStatus(object):
|
||||
""" Build server status enum """
|
||||
"""
|
||||
Build server status enum.
|
||||
"""
|
||||
|
||||
STARTING = "starting"
|
||||
RUNNING = "running"
|
||||
|
@ -14,13 +14,17 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BuildJobLoadException(Exception):
|
||||
""" Exception raised if a build job could not be instantiated for some reason. """
|
||||
"""
|
||||
Exception raised if a build job could not be instantiated for some reason.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class BuildJob(object):
|
||||
""" Represents a single in-progress build job. """
|
||||
"""
|
||||
Represents a single in-progress build job.
|
||||
"""
|
||||
|
||||
def __init__(self, job_item):
|
||||
self.job_item = job_item
|
||||
@ -56,17 +60,23 @@ class BuildJob(object):
|
||||
|
||||
@property
|
||||
def build_uuid(self):
|
||||
""" Returns the unique UUID for this build job. """
|
||||
"""
|
||||
Returns the unique UUID for this build job.
|
||||
"""
|
||||
return self.job_details["build_uuid"]
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
""" Returns the namespace under which this build is running. """
|
||||
"""
|
||||
Returns the namespace under which this build is running.
|
||||
"""
|
||||
return self.repo_build.repository.namespace_user.username
|
||||
|
||||
@property
|
||||
def repo_name(self):
|
||||
""" Returns the name of the repository under which this build is running. """
|
||||
"""
|
||||
Returns the name of the repository under which this build is running.
|
||||
"""
|
||||
return self.repo_build.repository.name
|
||||
|
||||
@property
|
||||
@ -74,7 +84,9 @@ class BuildJob(object):
|
||||
return self._load_repo_build()
|
||||
|
||||
def get_build_package_url(self, user_files):
|
||||
""" Returns the URL of the build package for this build, if any or empty string if none. """
|
||||
"""
|
||||
Returns the URL of the build package for this build, if any or empty string if none.
|
||||
"""
|
||||
archive_url = self.build_config.get("archive_url", None)
|
||||
if archive_url:
|
||||
return archive_url
|
||||
@ -88,7 +100,9 @@ class BuildJob(object):
|
||||
|
||||
@property
|
||||
def pull_credentials(self):
|
||||
""" Returns the pull credentials for this job, or None if none. """
|
||||
"""
|
||||
Returns the pull credentials for this job, or None if none.
|
||||
"""
|
||||
return self.job_details.get("pull_credentials")
|
||||
|
||||
@property
|
||||
@ -102,7 +116,9 @@ class BuildJob(object):
|
||||
)
|
||||
|
||||
def determine_cached_tag(self, base_image_id=None, cache_comments=None):
|
||||
""" Returns the tag to pull to prime the cache or None if none. """
|
||||
"""
|
||||
Returns the tag to pull to prime the cache or None if none.
|
||||
"""
|
||||
cached_tag = self._determine_cached_tag_by_tag()
|
||||
logger.debug(
|
||||
"Determined cached tag %s for %s: %s", cached_tag, base_image_id, cache_comments
|
||||
@ -110,9 +126,12 @@ class BuildJob(object):
|
||||
return cached_tag
|
||||
|
||||
def _determine_cached_tag_by_tag(self):
|
||||
""" Determines the cached tag by looking for one of the tags being built, and seeing if it
|
||||
exists in the repository. This is a fallback for when no comment information is available.
|
||||
"""
|
||||
"""
|
||||
Determines the cached tag by looking for one of the tags being built, and seeing if it
|
||||
exists in the repository.
|
||||
|
||||
This is a fallback for when no comment information is available.
|
||||
"""
|
||||
with UseThenDisconnect(app.config):
|
||||
tags = self.build_config.get("docker_tags", ["latest"])
|
||||
repository = RepositoryReference.for_repo_obj(self.repo_build.repository)
|
||||
@ -128,7 +147,9 @@ class BuildJob(object):
|
||||
|
||||
|
||||
class BuildJobNotifier(object):
|
||||
""" A class for sending notifications to a job that only relies on the build_uuid """
|
||||
"""
|
||||
A class for sending notifications to a job that only relies on the build_uuid.
|
||||
"""
|
||||
|
||||
def __init__(self, build_uuid):
|
||||
self.build_uuid = build_uuid
|
||||
|
@ -13,7 +13,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StatusHandler(object):
|
||||
""" Context wrapper for writing status to build logs. """
|
||||
"""
|
||||
Context wrapper for writing status to build logs.
|
||||
"""
|
||||
|
||||
def __init__(self, build_logs, repository_build_uuid):
|
||||
self._current_phase = None
|
||||
|
@ -1,5 +1,7 @@
|
||||
class WorkerError(object):
|
||||
""" Helper class which represents errors raised by a build worker. """
|
||||
"""
|
||||
Helper class which represents errors raised by a build worker.
|
||||
"""
|
||||
|
||||
def __init__(self, error_code, base_message=None):
|
||||
self._error_code = error_code
|
||||
|
@ -2,7 +2,9 @@ from trollius import coroutine
|
||||
|
||||
|
||||
class BaseManager(object):
|
||||
""" Base for all worker managers. """
|
||||
"""
|
||||
Base for all worker managers.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -22,59 +24,77 @@ class BaseManager(object):
|
||||
|
||||
@coroutine
|
||||
def job_heartbeat(self, build_job):
|
||||
""" Method invoked to tell the manager that a job is still running. This method will be called
|
||||
every few minutes. """
|
||||
"""
|
||||
Method invoked to tell the manager that a job is still running.
|
||||
|
||||
This method will be called every few minutes.
|
||||
"""
|
||||
self.job_heartbeat_callback(build_job)
|
||||
|
||||
def overall_setup_time(self):
|
||||
""" Returns the number of seconds that the build system should wait before allowing the job
|
||||
to be picked up again after called 'schedule'.
|
||||
"""
|
||||
"""
|
||||
Returns the number of seconds that the build system should wait before allowing the job to
|
||||
be picked up again after called 'schedule'.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def shutdown(self):
|
||||
""" Indicates that the build controller server is in a shutdown state and that no new jobs
|
||||
or workers should be performed. Existing workers should be cleaned up once their jobs
|
||||
have completed
|
||||
"""
|
||||
"""
|
||||
Indicates that the build controller server is in a shutdown state and that no new jobs or
|
||||
workers should be performed.
|
||||
|
||||
Existing workers should be cleaned up once their jobs have completed
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@coroutine
|
||||
def schedule(self, build_job):
|
||||
""" Schedules a queue item to be built. Returns a 2-tuple with (True, None) if the item was
|
||||
properly scheduled and (False, a retry timeout in seconds) if all workers are busy or an
|
||||
error occurs.
|
||||
"""
|
||||
"""
|
||||
Schedules a queue item to be built.
|
||||
|
||||
Returns a 2-tuple with (True, None) if the item was properly scheduled and (False, a retry
|
||||
timeout in seconds) if all workers are busy or an error occurs.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def initialize(self, manager_config):
|
||||
""" Runs any initialization code for the manager. Called once the server is in a ready state.
|
||||
"""
|
||||
"""
|
||||
Runs any initialization code for the manager.
|
||||
|
||||
Called once the server is in a ready state.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@coroutine
|
||||
def build_component_ready(self, build_component):
|
||||
""" Method invoked whenever a build component announces itself as ready.
|
||||
"""
|
||||
"""
|
||||
Method invoked whenever a build component announces itself as ready.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def build_component_disposed(self, build_component, timed_out):
|
||||
""" Method invoked whenever a build component has been disposed. The timed_out boolean indicates
|
||||
whether the component's heartbeat timed out.
|
||||
"""
|
||||
"""
|
||||
Method invoked whenever a build component has been disposed.
|
||||
|
||||
The timed_out boolean indicates whether the component's heartbeat timed out.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@coroutine
|
||||
def job_completed(self, build_job, job_status, build_component):
|
||||
""" Method invoked once a job_item has completed, in some manner. The job_status will be
|
||||
one of: incomplete, error, complete. Implementations of this method should call coroutine
|
||||
self.job_complete_callback with a status of Incomplete if they wish for the job to be
|
||||
automatically requeued.
|
||||
"""
|
||||
"""
|
||||
Method invoked once a job_item has completed, in some manner.
|
||||
|
||||
The job_status will be one of: incomplete, error, complete. Implementations of this method
|
||||
should call coroutine self.job_complete_callback with a status of Incomplete if they wish
|
||||
for the job to be automatically requeued.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def num_workers(self):
|
||||
""" Returns the number of active build workers currently registered. This includes those
|
||||
that are currently busy and awaiting more work.
|
||||
"""
|
||||
"""
|
||||
Returns the number of active build workers currently registered.
|
||||
|
||||
This includes those that are currently busy and awaiting more work.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
@ -9,7 +9,9 @@ CANCELLERS = {"ephemeral": OrchestratorCanceller}
|
||||
|
||||
|
||||
class BuildCanceller(object):
|
||||
""" A class to manage cancelling a build """
|
||||
"""
|
||||
A class to manage cancelling a build.
|
||||
"""
|
||||
|
||||
def __init__(self, app=None):
|
||||
self.build_manager_config = app.config.get("BUILD_MANAGER")
|
||||
@ -19,7 +21,9 @@ class BuildCanceller(object):
|
||||
self.handler = None
|
||||
|
||||
def try_cancel_build(self, uuid):
|
||||
""" A method to kill a running build """
|
||||
"""
|
||||
A method to kill a running build.
|
||||
"""
|
||||
if self.handler is None:
|
||||
canceller = CANCELLERS.get(self.build_manager_config[0], NoopCanceller)
|
||||
self.handler = canceller(self.build_manager_config[1])
|
||||
|
@ -13,7 +13,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DynamicRegistrationComponent(BaseComponent):
|
||||
""" Component session that handles dynamic registration of the builder components. """
|
||||
"""
|
||||
Component session that handles dynamic registration of the builder components.
|
||||
"""
|
||||
|
||||
def onConnect(self):
|
||||
self.join(REGISTRATION_REALM)
|
||||
@ -32,7 +34,9 @@ class DynamicRegistrationComponent(BaseComponent):
|
||||
|
||||
|
||||
class EnterpriseManager(BaseManager):
|
||||
""" Build manager implementation for the Enterprise Registry. """
|
||||
"""
|
||||
Build manager implementation for the Enterprise Registry.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.ready_components = set()
|
||||
@ -52,7 +56,9 @@ class EnterpriseManager(BaseManager):
|
||||
return 60
|
||||
|
||||
def add_build_component(self):
|
||||
""" Adds a new build component for an Enterprise Registry. """
|
||||
"""
|
||||
Adds a new build component for an Enterprise Registry.
|
||||
"""
|
||||
# Generate a new unique realm ID for the build worker.
|
||||
realm = str(uuid.uuid4())
|
||||
new_component = self.register_component(realm, BuildComponent, token="")
|
||||
@ -61,7 +67,9 @@ class EnterpriseManager(BaseManager):
|
||||
|
||||
@coroutine
|
||||
def schedule(self, build_job):
|
||||
""" Schedules a build for an Enterprise Registry. """
|
||||
"""
|
||||
Schedules a build for an Enterprise Registry.
|
||||
"""
|
||||
if self.shutting_down or not self.ready_components:
|
||||
raise Return(False, RETRY_TIMEOUT)
|
||||
|
||||
|
@ -65,7 +65,9 @@ BuildInfo = namedtuple("BuildInfo", ["component", "build_job", "execution_id", "
|
||||
|
||||
|
||||
class EphemeralBuilderManager(BaseManager):
|
||||
""" Build manager implementation for the Enterprise Registry. """
|
||||
"""
|
||||
Build manager implementation for the Enterprise Registry.
|
||||
"""
|
||||
|
||||
EXECUTORS = {
|
||||
"popen": PopenExecutor,
|
||||
@ -98,7 +100,9 @@ class EphemeralBuilderManager(BaseManager):
|
||||
|
||||
@coroutine
|
||||
def _mark_job_incomplete(self, build_job, build_info):
|
||||
""" Marks a job as incomplete, in response to a failure to start or a timeout. """
|
||||
"""
|
||||
Marks a job as incomplete, in response to a failure to start or a timeout.
|
||||
"""
|
||||
executor_name = build_info.executor_name
|
||||
execution_id = build_info.execution_id
|
||||
|
||||
@ -137,13 +141,13 @@ class EphemeralBuilderManager(BaseManager):
|
||||
@coroutine
|
||||
def _job_callback(self, key_change):
|
||||
"""
|
||||
This is the callback invoked when keys related to jobs are changed.
|
||||
It ignores all events related to the creation of new jobs.
|
||||
Deletes or expirations cause checks to ensure they've been properly marked as completed.
|
||||
This is the callback invoked when keys related to jobs are changed. It ignores all events
|
||||
related to the creation of new jobs. Deletes or expirations cause checks to ensure they've
|
||||
been properly marked as completed.
|
||||
|
||||
:param key_change: the event and value produced by a key changing in the orchestrator
|
||||
:type key_change: :class:`KeyChange`
|
||||
"""
|
||||
:param key_change: the event and value produced by a key changing in the orchestrator
|
||||
:type key_change: :class:`KeyChange`
|
||||
"""
|
||||
if key_change.event in (KeyEvent.CREATE, KeyEvent.SET):
|
||||
raise Return()
|
||||
|
||||
@ -339,35 +343,35 @@ class EphemeralBuilderManager(BaseManager):
|
||||
|
||||
def _metric_key(self, realm):
|
||||
"""
|
||||
Create a key which is used to track a job in the Orchestrator.
|
||||
Create a key which is used to track a job in the Orchestrator.
|
||||
|
||||
:param realm: realm for the build
|
||||
:type realm: str
|
||||
:returns: key used to track jobs
|
||||
:rtype: str
|
||||
"""
|
||||
:param realm: realm for the build
|
||||
:type realm: str
|
||||
:returns: key used to track jobs
|
||||
:rtype: str
|
||||
"""
|
||||
return slash_join(self._metric_prefix, realm)
|
||||
|
||||
def _job_key(self, build_job):
|
||||
"""
|
||||
Creates a key which is used to track a job in the Orchestrator.
|
||||
Creates a key which is used to track a job in the Orchestrator.
|
||||
|
||||
:param build_job: unique job identifier for a build
|
||||
:type build_job: str
|
||||
:returns: key used to track the job
|
||||
:rtype: str
|
||||
"""
|
||||
:param build_job: unique job identifier for a build
|
||||
:type build_job: str
|
||||
:returns: key used to track the job
|
||||
:rtype: str
|
||||
"""
|
||||
return slash_join(self._job_prefix, build_job.job_details["build_uuid"])
|
||||
|
||||
def _realm_key(self, realm):
|
||||
"""
|
||||
Create a key which is used to track an incoming connection on a realm.
|
||||
Create a key which is used to track an incoming connection on a realm.
|
||||
|
||||
:param realm: realm for the build
|
||||
:type realm: str
|
||||
:returns: key used to track the connection to the realm
|
||||
:rtype: str
|
||||
"""
|
||||
:param realm: realm for the build
|
||||
:type realm: str
|
||||
:returns: key used to track the connection to the realm
|
||||
:rtype: str
|
||||
"""
|
||||
return slash_join(self._realm_prefix, realm)
|
||||
|
||||
def initialize(self, manager_config):
|
||||
@ -787,10 +791,11 @@ class EphemeralBuilderManager(BaseManager):
|
||||
logger.exception("Could not write metric for realm %s", realm)
|
||||
|
||||
def num_workers(self):
|
||||
""" The number of workers we're managing locally.
|
||||
"""
|
||||
The number of workers we're managing locally.
|
||||
|
||||
:returns: the number of the workers locally managed
|
||||
:rtype: int
|
||||
:returns: the number of the workers locally managed
|
||||
:rtype: int
|
||||
"""
|
||||
return len(self._component_to_job)
|
||||
|
||||
|
@ -5,7 +5,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EtcdCanceller(object):
|
||||
""" A class that sends a message to etcd to cancel a build """
|
||||
"""
|
||||
A class that sends a message to etcd to cancel a build.
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
etcd_host = config.get("ETCD_HOST", "127.0.0.1")
|
||||
@ -28,7 +30,9 @@ class EtcdCanceller(object):
|
||||
)
|
||||
|
||||
def try_cancel_build(self, build_uuid):
|
||||
""" Writes etcd message to cancel build_uuid. """
|
||||
"""
|
||||
Writes etcd message to cancel build_uuid.
|
||||
"""
|
||||
logger.info("Cancelling build %s".format(build_uuid))
|
||||
try:
|
||||
self._etcd_client.write(
|
||||
|
@ -65,15 +65,19 @@ def async_observe(metric, *labels):
|
||||
|
||||
|
||||
class ExecutorException(Exception):
|
||||
""" Exception raised when there is a problem starting or stopping a builder. """
|
||||
"""
|
||||
Exception raised when there is a problem starting or stopping a builder.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class BuilderExecutor(object):
|
||||
def __init__(self, executor_config, manager_hostname):
|
||||
""" Interface which can be plugged into the EphemeralNodeManager to provide a strategy for
|
||||
starting and stopping builders. """
|
||||
"""
|
||||
Interface which can be plugged into the EphemeralNodeManager to provide a strategy for
|
||||
starting and stopping builders.
|
||||
"""
|
||||
self.executor_config = executor_config
|
||||
self.manager_hostname = manager_hostname
|
||||
|
||||
@ -82,28 +86,40 @@ class BuilderExecutor(object):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
""" Name returns the unique name for this executor. """
|
||||
"""
|
||||
Name returns the unique name for this executor.
|
||||
"""
|
||||
return self.executor_config.get("NAME") or self.__class__.__name__
|
||||
|
||||
@property
|
||||
def setup_time(self):
|
||||
""" Returns the amount of time (in seconds) to wait for the execution to start for the build.
|
||||
If None, the manager's default will be used. """
|
||||
"""
|
||||
Returns the amount of time (in seconds) to wait for the execution to start for the build.
|
||||
|
||||
If None, the manager's default will be used.
|
||||
"""
|
||||
return self.executor_config.get("SETUP_TIME")
|
||||
|
||||
@coroutine
|
||||
def start_builder(self, realm, token, build_uuid):
|
||||
""" Create a builder with the specified config. Returns a unique id which can be used to manage
|
||||
the builder. """
|
||||
"""
|
||||
Create a builder with the specified config.
|
||||
|
||||
Returns a unique id which can be used to manage the builder.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@coroutine
|
||||
def stop_builder(self, builder_id):
|
||||
""" Stop a builder which is currently running. """
|
||||
"""
|
||||
Stop a builder which is currently running.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def allowed_for_namespace(self, namespace):
|
||||
""" Returns true if this executor can be used for builds in the given namespace. """
|
||||
"""
|
||||
Returns true if this executor can be used for builds in the given namespace.
|
||||
"""
|
||||
|
||||
# Check for an explicit namespace whitelist.
|
||||
namespace_whitelist = self.executor_config.get("NAMESPACE_WHITELIST")
|
||||
@ -122,8 +138,9 @@ class BuilderExecutor(object):
|
||||
|
||||
@property
|
||||
def minimum_retry_threshold(self):
|
||||
""" Returns the minimum number of retries required for this executor to be used or 0 if
|
||||
none. """
|
||||
"""
|
||||
Returns the minimum number of retries required for this executor to be used or 0 if none.
|
||||
"""
|
||||
return self.executor_config.get("MINIMUM_RETRY_THRESHOLD", 0)
|
||||
|
||||
def generate_cloud_config(
|
||||
@ -163,8 +180,10 @@ class BuilderExecutor(object):
|
||||
|
||||
|
||||
class EC2Executor(BuilderExecutor):
|
||||
""" Implementation of BuilderExecutor which uses libcloud to start machines on a variety of cloud
|
||||
providers. """
|
||||
"""
|
||||
Implementation of BuilderExecutor which uses libcloud to start machines on a variety of cloud
|
||||
providers.
|
||||
"""
|
||||
|
||||
COREOS_STACK_URL = (
|
||||
"http://%s.release.core-os.net/amd64-usr/current/coreos_production_ami_hvm.txt"
|
||||
@ -175,7 +194,9 @@ class EC2Executor(BuilderExecutor):
|
||||
super(EC2Executor, self).__init__(*args, **kwargs)
|
||||
|
||||
def _get_conn(self):
|
||||
""" Creates an ec2 connection which can be used to manage instances. """
|
||||
"""
|
||||
Creates an ec2 connection which can be used to manage instances.
|
||||
"""
|
||||
return AsyncWrapper(
|
||||
boto.ec2.connect_to_region(
|
||||
self.executor_config["EC2_REGION"],
|
||||
@ -187,7 +208,9 @@ class EC2Executor(BuilderExecutor):
|
||||
@classmethod
|
||||
@cachetools.func.ttl_cache(ttl=ONE_HOUR)
|
||||
def _get_coreos_ami(cls, ec2_region, coreos_channel):
|
||||
""" Retrieve the CoreOS AMI id from the canonical listing. """
|
||||
"""
|
||||
Retrieve the CoreOS AMI id from the canonical listing.
|
||||
"""
|
||||
stack_list_string = requests.get(EC2Executor.COREOS_STACK_URL % coreos_channel).text
|
||||
stack_amis = dict([stack.split("=") for stack in stack_list_string.split("|")])
|
||||
return stack_amis[ec2_region]
|
||||
@ -303,7 +326,9 @@ class EC2Executor(BuilderExecutor):
|
||||
|
||||
|
||||
class PopenExecutor(BuilderExecutor):
|
||||
""" Implementation of BuilderExecutor which uses Popen to fork a quay-builder process. """
|
||||
"""
|
||||
Implementation of BuilderExecutor which uses Popen to fork a quay-builder process.
|
||||
"""
|
||||
|
||||
def __init__(self, executor_config, manager_hostname):
|
||||
self._jobs = {}
|
||||
@ -354,8 +379,9 @@ class PopenExecutor(BuilderExecutor):
|
||||
|
||||
|
||||
class KubernetesExecutor(BuilderExecutor):
|
||||
""" Executes build jobs by creating Kubernetes jobs which run a qemu-kvm virtual
|
||||
machine in a pod """
|
||||
"""
|
||||
Executes build jobs by creating Kubernetes jobs which run a qemu-kvm virtual machine in a pod.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(KubernetesExecutor, self).__init__(*args, **kwargs)
|
||||
@ -585,10 +611,14 @@ class KubernetesExecutor(BuilderExecutor):
|
||||
|
||||
|
||||
class LogPipe(threading.Thread):
|
||||
""" Adapted from http://codereview.stackexchange.com/a/17959 """
|
||||
"""
|
||||
Adapted from http://codereview.stackexchange.com/a/17959.
|
||||
"""
|
||||
|
||||
def __init__(self, level):
|
||||
""" Setup the object with a logger and a loglevel and start the thread """
|
||||
"""
|
||||
Setup the object with a logger and a loglevel and start the thread.
|
||||
"""
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = False
|
||||
self.level = level
|
||||
@ -597,16 +627,22 @@ class LogPipe(threading.Thread):
|
||||
self.start()
|
||||
|
||||
def fileno(self):
|
||||
""" Return the write file descriptor of the pipe """
|
||||
"""
|
||||
Return the write file descriptor of the pipe.
|
||||
"""
|
||||
return self.fd_write
|
||||
|
||||
def run(self):
|
||||
""" Run the thread, logging everything. """
|
||||
"""
|
||||
Run the thread, logging everything.
|
||||
"""
|
||||
for line in iter(self.pipe_reader.readline, ""):
|
||||
logging.log(self.level, line.strip("\n"))
|
||||
|
||||
self.pipe_reader.close()
|
||||
|
||||
def close(self):
|
||||
""" Close the write end of the pipe. """
|
||||
"""
|
||||
Close the write end of the pipe.
|
||||
"""
|
||||
os.close(self.fd_write)
|
||||
|
@ -1,9 +1,13 @@
|
||||
class NoopCanceller(object):
|
||||
""" A class that can not cancel a build """
|
||||
"""
|
||||
A class that can not cancel a build.
|
||||
"""
|
||||
|
||||
def __init__(self, config=None):
|
||||
pass
|
||||
|
||||
def try_cancel_build(self, uuid):
|
||||
""" Does nothing and fails to cancel build. """
|
||||
"""
|
||||
Does nothing and fails to cancel build.
|
||||
"""
|
||||
return False
|
||||
|
@ -11,7 +11,9 @@ CANCEL_PREFIX = "cancel/"
|
||||
|
||||
|
||||
class OrchestratorCanceller(object):
|
||||
""" An asynchronous way to cancel a build with any Orchestrator. """
|
||||
"""
|
||||
An asynchronous way to cancel a build with any Orchestrator.
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
self._orchestrator = orchestrator_from_config(config, canceller_only=True)
|
||||
|
@ -46,13 +46,13 @@ REDIS_EXPIRED_KEYSPACE_REGEX = re.compile(REDIS_EXPIRED_KEYSPACE_PATTERN % (r"(\
|
||||
|
||||
def orchestrator_from_config(manager_config, canceller_only=False):
|
||||
"""
|
||||
Allocates a new Orchestrator from the 'ORCHESTRATOR' block from provided manager config.
|
||||
Checks for legacy configuration prefixed with 'ETCD_' when the 'ORCHESTRATOR' is not present.
|
||||
Allocates a new Orchestrator from the 'ORCHESTRATOR' block from provided manager config. Checks
|
||||
for legacy configuration prefixed with 'ETCD_' when the 'ORCHESTRATOR' is not present.
|
||||
|
||||
:param manager_config: the configuration for the orchestrator
|
||||
:type manager_config: dict
|
||||
:rtype: :class: Orchestrator
|
||||
"""
|
||||
:param manager_config: the configuration for the orchestrator
|
||||
:type manager_config: dict
|
||||
:rtype: :class: Orchestrator
|
||||
"""
|
||||
# Legacy codepath only knows how to configure etcd.
|
||||
if manager_config.get("ORCHESTRATOR") is None:
|
||||
manager_config["ORCHESTRATOR"] = {
|
||||
@ -130,25 +130,24 @@ class KeyChange(namedtuple("KeyChange", ["event", "key", "value"])):
|
||||
@add_metaclass(ABCMeta)
|
||||
class Orchestrator(object):
|
||||
"""
|
||||
Orchestrator is the interface that is used to synchronize the build states
|
||||
across build managers.
|
||||
Orchestrator is the interface that is used to synchronize the build states across build
|
||||
managers.
|
||||
|
||||
This interface assumes that storage is being done by a key-value store
|
||||
that supports watching for events on keys.
|
||||
This interface assumes that storage is being done by a key-value store
|
||||
that supports watching for events on keys.
|
||||
|
||||
Missing keys should return KeyError; otherwise, errors should raise an
|
||||
OrchestratorError.
|
||||
Missing keys should return KeyError; otherwise, errors should raise an
|
||||
OrchestratorError.
|
||||
|
||||
:param key_prefix: the prefix of keys being watched
|
||||
:type key_prefix: str
|
||||
"""
|
||||
:param key_prefix: the prefix of keys being watched
|
||||
:type key_prefix: str
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def on_key_change(self, key, callback, restarter=None):
|
||||
"""
|
||||
|
||||
The callback parameter takes in a KeyChange object as a parameter.
|
||||
"""
|
||||
The callback parameter takes in a KeyChange object as a parameter.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
@ -187,47 +186,47 @@ class Orchestrator(object):
|
||||
@abstractmethod
|
||||
def set_key_sync(self, key, value, overwrite=False, expiration=None):
|
||||
"""
|
||||
set_key, but without trollius coroutines.
|
||||
"""
|
||||
set_key, but without trollius coroutines.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete_key(self, key):
|
||||
"""
|
||||
Deletes a key that has been set in the orchestrator.
|
||||
Deletes a key that has been set in the orchestrator.
|
||||
|
||||
:param key: the identifier for the key
|
||||
:type key: str
|
||||
"""
|
||||
:param key: the identifier for the key
|
||||
:type key: str
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def lock(self, key, expiration=DEFAULT_LOCK_EXPIRATION):
|
||||
"""
|
||||
Takes a lock for synchronizing exclusive operations cluster-wide.
|
||||
Takes a lock for synchronizing exclusive operations cluster-wide.
|
||||
|
||||
:param key: the identifier for the lock
|
||||
:type key: str
|
||||
:param expiration: the duration until the lock expires
|
||||
:type expiration: :class:`datetime.timedelta` or int (seconds)
|
||||
:returns: whether or not the lock was acquired
|
||||
:rtype: bool
|
||||
"""
|
||||
:param key: the identifier for the lock
|
||||
:type key: str
|
||||
:param expiration: the duration until the lock expires
|
||||
:type expiration: :class:`datetime.timedelta` or int (seconds)
|
||||
:returns: whether or not the lock was acquired
|
||||
:rtype: bool
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def shutdown():
|
||||
"""
|
||||
This function should shutdown any final resources allocated by the Orchestrator.
|
||||
"""
|
||||
This function should shutdown any final resources allocated by the Orchestrator.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _sleep_orchestrator():
|
||||
"""
|
||||
This function blocks the trollius event loop by sleeping in order to backoff if a failure
|
||||
such as a ConnectionError has occurred.
|
||||
"""
|
||||
This function blocks the trollius event loop by sleeping in order to backoff if a failure such
|
||||
as a ConnectionError has occurred.
|
||||
"""
|
||||
logger.exception(
|
||||
"Connecting to etcd failed; sleeping for %s and then trying again",
|
||||
ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION,
|
||||
@ -240,7 +239,9 @@ def _sleep_orchestrator():
|
||||
|
||||
|
||||
class EtcdAction(object):
|
||||
""" Enumeration of the various kinds of etcd actions we can observe via a watch. """
|
||||
"""
|
||||
Enumeration of the various kinds of etcd actions we can observe via a watch.
|
||||
"""
|
||||
|
||||
GET = "get"
|
||||
SET = "set"
|
||||
@ -287,9 +288,10 @@ class Etcd2Orchestrator(Orchestrator):
|
||||
@staticmethod
|
||||
def _sanity_check_ttl(ttl):
|
||||
"""
|
||||
A TTL of < 0 in etcd results in the key *never being expired*.
|
||||
We use a max here to ensure that if the TTL is < 0, the key will expire immediately.
|
||||
"""
|
||||
A TTL of < 0 in etcd results in the key *never being expired*.
|
||||
|
||||
We use a max here to ensure that if the TTL is < 0, the key will expire immediately.
|
||||
"""
|
||||
return max(ttl, 0)
|
||||
|
||||
def _watch_etcd(self, key, callback, restarter=None, start_index=None):
|
||||
@ -511,8 +513,8 @@ class MemoryOrchestrator(Orchestrator):
|
||||
|
||||
def set_key_sync(self, key, value, overwrite=False, expiration=None):
|
||||
"""
|
||||
set_key, but without trollius coroutines.
|
||||
"""
|
||||
set_key, but without trollius coroutines.
|
||||
"""
|
||||
preexisting_key = "key" in self.state
|
||||
if preexisting_key and not overwrite:
|
||||
raise KeyError
|
||||
@ -683,9 +685,10 @@ class RedisOrchestrator(Orchestrator):
|
||||
@staticmethod
|
||||
def _is_expired_keyspace_event(event_result):
|
||||
"""
|
||||
Sanity check that this isn't an unrelated keyspace event.
|
||||
There could be a more efficient keyspace event config to avoid this client-side filter.
|
||||
"""
|
||||
Sanity check that this isn't an unrelated keyspace event.
|
||||
|
||||
There could be a more efficient keyspace event config to avoid this client-side filter.
|
||||
"""
|
||||
if event_result is None:
|
||||
return False
|
||||
|
||||
|
@ -32,8 +32,10 @@ HEARTBEAT_PERIOD_SEC = 30
|
||||
|
||||
|
||||
class BuilderServer(object):
|
||||
""" Server which handles both HTTP and WAMP requests, managing the full state of the build
|
||||
controller. """
|
||||
"""
|
||||
Server which handles both HTTP and WAMP requests, managing the full state of the build
|
||||
controller.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -131,8 +133,11 @@ class BuilderServer(object):
|
||||
logger.debug("Shutting down server")
|
||||
|
||||
def _register_component(self, realm, component_klass, **kwargs):
|
||||
""" Registers a component with the server. The component_klass must derive from
|
||||
BaseComponent. """
|
||||
"""
|
||||
Registers a component with the server.
|
||||
|
||||
The component_klass must derive from BaseComponent.
|
||||
"""
|
||||
logger.debug("Registering component with realm %s", realm)
|
||||
if realm in self._realm_map:
|
||||
logger.debug("Component with realm %s already registered", realm)
|
||||
|
@ -88,7 +88,9 @@ class EphemeralBuilderTestCase(unittest.TestCase):
|
||||
|
||||
|
||||
class TestEphemeralLifecycle(EphemeralBuilderTestCase):
|
||||
""" Tests the various lifecycles of the ephemeral builder and its interaction with etcd. """
|
||||
"""
|
||||
Tests the various lifecycles of the ephemeral builder and its interaction with etcd.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(TestEphemeralLifecycle, self).__init__(*args, **kwargs)
|
||||
@ -420,9 +422,10 @@ class TestEphemeralLifecycle(EphemeralBuilderTestCase):
|
||||
|
||||
|
||||
class TestEphemeral(EphemeralBuilderTestCase):
|
||||
""" Simple unit tests for the ephemeral builder around config management, starting and stopping
|
||||
jobs.
|
||||
"""
|
||||
"""
|
||||
Simple unit tests for the ephemeral builder around config management, starting and stopping
|
||||
jobs.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(TestEphemeral, self).setUp()
|
||||
|
@ -164,7 +164,9 @@ class BuildTriggerHandler(object):
|
||||
|
||||
@property
|
||||
def auth_token(self):
|
||||
""" Returns the auth token for the trigger. """
|
||||
"""
|
||||
Returns the auth token for the trigger.
|
||||
"""
|
||||
# NOTE: This check is for testing.
|
||||
if hasattr(self.trigger, "auth_token"):
|
||||
return self.trigger.auth_token
|
||||
@ -177,98 +179,111 @@ class BuildTriggerHandler(object):
|
||||
@abstractmethod
|
||||
def load_dockerfile_contents(self):
|
||||
"""
|
||||
Loads the Dockerfile found for the trigger's config and returns them or None if none could
|
||||
be found/loaded.
|
||||
"""
|
||||
Loads the Dockerfile found for the trigger's config and returns them or None if none could
|
||||
be found/loaded.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_build_source_namespaces(self):
|
||||
"""
|
||||
Take the auth information for the specific trigger type and load the
|
||||
list of namespaces that can contain build sources.
|
||||
"""
|
||||
Take the auth information for the specific trigger type and load the list of namespaces that
|
||||
can contain build sources.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_build_sources_for_namespace(self, namespace):
|
||||
"""
|
||||
Take the auth information for the specific trigger type and load the
|
||||
list of repositories under the given namespace.
|
||||
"""
|
||||
Take the auth information for the specific trigger type and load the list of repositories
|
||||
under the given namespace.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_build_subdirs(self):
|
||||
"""
|
||||
Take the auth information and the specified config so far and list all of
|
||||
the possible subdirs containing dockerfiles.
|
||||
"""
|
||||
Take the auth information and the specified config so far and list all of the possible
|
||||
subdirs containing dockerfiles.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def handle_trigger_request(self, request):
|
||||
"""
|
||||
Transform the incoming request data into a set of actions. Returns a PreparedBuild.
|
||||
"""
|
||||
Transform the incoming request data into a set of actions.
|
||||
|
||||
Returns a PreparedBuild.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def is_active(self):
|
||||
"""
|
||||
Returns True if the current build trigger is active. Inactive means further
|
||||
setup is needed.
|
||||
"""
|
||||
Returns True if the current build trigger is active.
|
||||
|
||||
Inactive means further setup is needed.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def activate(self, standard_webhook_url):
|
||||
"""
|
||||
Activates the trigger for the service, with the given new configuration.
|
||||
Returns new public and private config that should be stored if successful.
|
||||
"""
|
||||
Activates the trigger for the service, with the given new configuration.
|
||||
|
||||
Returns new public and private config that should be stored if successful.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def deactivate(self):
|
||||
"""
|
||||
Deactivates the trigger for the service, removing any hooks installed in
|
||||
the remote service. Returns the new config that should be stored if this
|
||||
trigger is going to be re-activated.
|
||||
"""
|
||||
Deactivates the trigger for the service, removing any hooks installed in the remote service.
|
||||
|
||||
Returns the new config that should be stored if this trigger is going to be re-activated.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def manual_start(self, run_parameters=None):
|
||||
"""
|
||||
Manually creates a repository build for this trigger. Returns a PreparedBuild.
|
||||
"""
|
||||
Manually creates a repository build for this trigger.
|
||||
|
||||
Returns a PreparedBuild.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_field_values(self, field_name, limit=None):
|
||||
"""
|
||||
Lists all values for the given custom trigger field. For example, a trigger might have a
|
||||
field named "branches", and this method would return all branches.
|
||||
"""
|
||||
Lists all values for the given custom trigger field.
|
||||
|
||||
For example, a trigger might have a field named "branches", and this method would return all
|
||||
branches.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_repository_url(self):
|
||||
""" Returns the URL of the current trigger's repository. Note that this operation
|
||||
can be called in a loop, so it should be as fast as possible. """
|
||||
"""
|
||||
Returns the URL of the current trigger's repository.
|
||||
|
||||
Note that this operation can be called in a loop, so it should be as fast as possible.
|
||||
"""
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def filename_is_dockerfile(cls, file_name):
|
||||
""" Returns whether the file is named Dockerfile or follows the convention <name>.Dockerfile"""
|
||||
"""
|
||||
Returns whether the file is named Dockerfile or follows the convention <name>.Dockerfile.
|
||||
"""
|
||||
return file_name.endswith(".Dockerfile") or u"Dockerfile" == file_name
|
||||
|
||||
@classmethod
|
||||
def service_name(cls):
|
||||
"""
|
||||
Particular service implemented by subclasses.
|
||||
"""
|
||||
Particular service implemented by subclasses.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
@ -280,17 +295,22 @@ class BuildTriggerHandler(object):
|
||||
raise InvalidServiceException("Unable to find service: %s" % trigger.service.name)
|
||||
|
||||
def put_config_key(self, key, value):
|
||||
""" Updates a config key in the trigger, saving it to the DB. """
|
||||
"""
|
||||
Updates a config key in the trigger, saving it to the DB.
|
||||
"""
|
||||
self.config[key] = value
|
||||
model.build.update_build_trigger(self.trigger, self.config)
|
||||
|
||||
def set_auth_token(self, auth_token):
|
||||
""" Sets the auth token for the trigger, saving it to the DB. """
|
||||
"""
|
||||
Sets the auth token for the trigger, saving it to the DB.
|
||||
"""
|
||||
model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
|
||||
|
||||
def get_dockerfile_path(self):
|
||||
""" Returns the normalized path to the Dockerfile found in the subdirectory
|
||||
in the config. """
|
||||
"""
|
||||
Returns the normalized path to the Dockerfile found in the subdirectory in the config.
|
||||
"""
|
||||
dockerfile_path = self.config.get("dockerfile_path") or "Dockerfile"
|
||||
if dockerfile_path[0] == "/":
|
||||
dockerfile_path = dockerfile_path[1:]
|
||||
@ -395,7 +415,9 @@ class BuildTriggerHandler(object):
|
||||
|
||||
@classmethod
|
||||
def get_parent_directory_mappings(cls, dockerfile_path, current_paths=None):
|
||||
""" Returns a map of dockerfile_paths to it's possible contexts. """
|
||||
"""
|
||||
Returns a map of dockerfile_paths to it's possible contexts.
|
||||
"""
|
||||
if dockerfile_path == "":
|
||||
return {}
|
||||
|
||||
|
@ -128,9 +128,9 @@ BITBUCKET_COMMIT_INFO_SCHEMA = {
|
||||
|
||||
|
||||
def get_transformed_commit_info(bb_commit, ref, default_branch, repository_name, lookup_author):
|
||||
""" Returns the BitBucket commit information transformed into our own
|
||||
payload format.
|
||||
"""
|
||||
"""
|
||||
Returns the BitBucket commit information transformed into our own payload format.
|
||||
"""
|
||||
try:
|
||||
validate(bb_commit, BITBUCKET_COMMIT_INFO_SCHEMA)
|
||||
except Exception as exc:
|
||||
@ -165,9 +165,11 @@ def get_transformed_commit_info(bb_commit, ref, default_branch, repository_name,
|
||||
|
||||
|
||||
def get_transformed_webhook_payload(bb_payload, default_branch=None):
|
||||
""" Returns the BitBucket webhook JSON payload transformed into our own payload
|
||||
format. If the bb_payload is not valid, returns None.
|
||||
"""
|
||||
"""
|
||||
Returns the BitBucket webhook JSON payload transformed into our own payload format.
|
||||
|
||||
If the bb_payload is not valid, returns None.
|
||||
"""
|
||||
try:
|
||||
validate(bb_payload, BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA)
|
||||
except Exception as exc:
|
||||
@ -209,15 +211,17 @@ def get_transformed_webhook_payload(bb_payload, default_branch=None):
|
||||
|
||||
class BitbucketBuildTrigger(BuildTriggerHandler):
|
||||
"""
|
||||
BuildTrigger for Bitbucket.
|
||||
"""
|
||||
BuildTrigger for Bitbucket.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def service_name(cls):
|
||||
return "bitbucket"
|
||||
|
||||
def _get_client(self):
|
||||
""" Returns a BitBucket API client for this trigger's config. """
|
||||
"""
|
||||
Returns a BitBucket API client for this trigger's config.
|
||||
"""
|
||||
key = app.config.get("BITBUCKET_TRIGGER_CONFIG", {}).get("CONSUMER_KEY", "")
|
||||
secret = app.config.get("BITBUCKET_TRIGGER_CONFIG", {}).get("CONSUMER_SECRET", "")
|
||||
|
||||
@ -227,7 +231,9 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
||||
return BitBucket(key, secret, callback_url, timeout=15)
|
||||
|
||||
def _get_authorized_client(self):
|
||||
""" Returns an authorized API client. """
|
||||
"""
|
||||
Returns an authorized API client.
|
||||
"""
|
||||
base_client = self._get_client()
|
||||
auth_token = self.auth_token or "invalid:invalid"
|
||||
token_parts = auth_token.split(":")
|
||||
@ -238,14 +244,18 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
||||
return base_client.get_authorized_client(access_token, access_token_secret)
|
||||
|
||||
def _get_repository_client(self):
|
||||
""" Returns an API client for working with this config's BB repository. """
|
||||
"""
|
||||
Returns an API client for working with this config's BB repository.
|
||||
"""
|
||||
source = self.config["build_source"]
|
||||
(namespace, name) = source.split("/")
|
||||
bitbucket_client = self._get_authorized_client()
|
||||
return bitbucket_client.for_namespace(namespace).repositories().get(name)
|
||||
|
||||
def _get_default_branch(self, repository, default_value="master"):
|
||||
""" Returns the default branch for the repository or the value given. """
|
||||
"""
|
||||
Returns the default branch for the repository or the value given.
|
||||
"""
|
||||
(result, data, _) = repository.get_main_branch()
|
||||
if result:
|
||||
return data["name"]
|
||||
@ -253,7 +263,9 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
||||
return default_value
|
||||
|
||||
def get_oauth_url(self):
|
||||
""" Returns the OAuth URL to authorize Bitbucket. """
|
||||
"""
|
||||
Returns the OAuth URL to authorize Bitbucket.
|
||||
"""
|
||||
bitbucket_client = self._get_client()
|
||||
(result, data, err_msg) = bitbucket_client.get_authorization_url()
|
||||
if not result:
|
||||
@ -262,7 +274,9 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
||||
return data
|
||||
|
||||
def exchange_verifier(self, verifier):
|
||||
""" Exchanges the given verifier token to setup this trigger. """
|
||||
"""
|
||||
Exchanges the given verifier token to setup this trigger.
|
||||
"""
|
||||
bitbucket_client = self._get_client()
|
||||
access_token = self.config.get("access_token", "")
|
||||
access_token_secret = self.auth_token
|
||||
|
@ -78,9 +78,11 @@ GITHUB_WEBHOOK_PAYLOAD_SCHEMA = {
|
||||
|
||||
|
||||
def get_transformed_webhook_payload(gh_payload, default_branch=None, lookup_user=None):
|
||||
""" Returns the GitHub webhook JSON payload transformed into our own payload
|
||||
format. If the gh_payload is not valid, returns None.
|
||||
"""
|
||||
"""
|
||||
Returns the GitHub webhook JSON payload transformed into our own payload format.
|
||||
|
||||
If the gh_payload is not valid, returns None.
|
||||
"""
|
||||
try:
|
||||
validate(gh_payload, GITHUB_WEBHOOK_PAYLOAD_SCHEMA)
|
||||
except Exception as exc:
|
||||
@ -149,11 +151,13 @@ def _catch_ssl_errors(func):
|
||||
|
||||
class GithubBuildTrigger(BuildTriggerHandler):
|
||||
"""
|
||||
BuildTrigger for GitHub that uses the archive API and buildpacks.
|
||||
"""
|
||||
BuildTrigger for GitHub that uses the archive API and buildpacks.
|
||||
"""
|
||||
|
||||
def _get_client(self):
|
||||
""" Returns an authenticated client for talking to the GitHub API. """
|
||||
"""
|
||||
Returns an authenticated client for talking to the GitHub API.
|
||||
"""
|
||||
return Github(
|
||||
self.auth_token,
|
||||
base_url=github_trigger.api_endpoint(),
|
||||
|
@ -91,9 +91,9 @@ def _catch_timeouts_and_errors(func):
|
||||
|
||||
|
||||
def _paginated_iterator(func, exc, **kwargs):
|
||||
""" Returns an iterator over invocations of the given function, automatically handling
|
||||
pagination.
|
||||
"""
|
||||
"""
|
||||
Returns an iterator over invocations of the given function, automatically handling pagination.
|
||||
"""
|
||||
page = 1
|
||||
while True:
|
||||
result = func(page=page, per_page=_PER_PAGE_COUNT, **kwargs)
|
||||
@ -114,9 +114,11 @@ def _paginated_iterator(func, exc, **kwargs):
|
||||
def get_transformed_webhook_payload(
|
||||
gl_payload, default_branch=None, lookup_user=None, lookup_commit=None
|
||||
):
|
||||
""" Returns the Gitlab webhook JSON payload transformed into our own payload
|
||||
format. If the gl_payload is not valid, returns None.
|
||||
"""
|
||||
"""
|
||||
Returns the Gitlab webhook JSON payload transformed into our own payload format.
|
||||
|
||||
If the gl_payload is not valid, returns None.
|
||||
"""
|
||||
try:
|
||||
validate(gl_payload, GITLAB_WEBHOOK_PAYLOAD_SCHEMA)
|
||||
except Exception as exc:
|
||||
@ -182,8 +184,8 @@ def get_transformed_webhook_payload(
|
||||
|
||||
class GitLabBuildTrigger(BuildTriggerHandler):
|
||||
"""
|
||||
BuildTrigger for GitLab.
|
||||
"""
|
||||
BuildTrigger for GitLab.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def service_name(cls):
|
||||
|
@ -99,7 +99,9 @@ def should_skip_commit(metadata):
|
||||
|
||||
|
||||
def raise_if_skipped_build(prepared_build, config):
|
||||
""" Raises a SkipRequestException if the given build should be skipped. """
|
||||
"""
|
||||
Raises a SkipRequestException if the given build should be skipped.
|
||||
"""
|
||||
# Check to ensure we have metadata.
|
||||
if not prepared_build.metadata:
|
||||
logger.debug("Skipping request due to missing metadata for prepared build")
|
||||
|
@ -66,8 +66,8 @@ def write_config(filename, **kwargs):
|
||||
|
||||
def generate_nginx_config(config):
|
||||
"""
|
||||
Generates nginx config from the app config
|
||||
"""
|
||||
Generates nginx config from the app config.
|
||||
"""
|
||||
config = config or {}
|
||||
use_https = os.path.exists(os.path.join(QUAYCONF_DIR, "stack/ssl.key"))
|
||||
use_old_certs = os.path.exists(os.path.join(QUAYCONF_DIR, "stack/ssl.old.key"))
|
||||
@ -89,8 +89,8 @@ def generate_nginx_config(config):
|
||||
|
||||
def generate_server_config(config):
|
||||
"""
|
||||
Generates server config from the app config
|
||||
"""
|
||||
Generates server config from the app config.
|
||||
"""
|
||||
config = config or {}
|
||||
tuf_server = config.get("TUF_SERVER", None)
|
||||
tuf_host = config.get("TUF_HOST", None)
|
||||
@ -111,8 +111,8 @@ def generate_server_config(config):
|
||||
|
||||
def generate_rate_limiting_config(config):
|
||||
"""
|
||||
Generates rate limiting config from the app config
|
||||
"""
|
||||
Generates rate limiting config from the app config.
|
||||
"""
|
||||
config = config or {}
|
||||
non_rate_limited_namespaces = config.get("NON_RATE_LIMITED_NAMESPACES") or set()
|
||||
enable_rate_limits = config.get("FEATURE_RATE_LIMITS", False)
|
||||
@ -126,8 +126,8 @@ def generate_rate_limiting_config(config):
|
||||
|
||||
def generate_hosted_http_base_config(config):
|
||||
"""
|
||||
Generates hosted http base config from the app config
|
||||
"""
|
||||
Generates hosted http base config from the app config.
|
||||
"""
|
||||
config = config or {}
|
||||
feature_proxy_protocol = config.get("FEATURE_PROXY_PROTOCOL", False)
|
||||
|
||||
|
@ -44,7 +44,9 @@ def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
|
||||
|
||||
|
||||
def format_date(date):
|
||||
""" Output an RFC822 date format. """
|
||||
"""
|
||||
Output an RFC822 date format.
|
||||
"""
|
||||
if date is None:
|
||||
return None
|
||||
return formatdate(timegm(date.utctimetuple()))
|
||||
@ -147,7 +149,9 @@ def validate_json_request(schema_name, optional=False):
|
||||
|
||||
|
||||
def kubernetes_only(f):
|
||||
""" Aborts the request with a 400 if the app is not running on kubernetes """
|
||||
"""
|
||||
Aborts the request with a 400 if the app is not running on kubernetes.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def abort_if_not_kube(*args, **kwargs):
|
||||
|
@ -20,7 +20,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
@resource("/v1/kubernetes/deployments/")
|
||||
class SuperUserKubernetesDeployment(ApiResource):
|
||||
""" Resource for the getting the status of Red Hat Quay deployments and cycling them """
|
||||
"""
|
||||
Resource for the getting the status of Red Hat Quay deployments and cycling them.
|
||||
"""
|
||||
|
||||
schemas = {
|
||||
"ValidateDeploymentNames": {
|
||||
@ -65,7 +67,9 @@ class QEDeploymentRolloutStatus(ApiResource):
|
||||
|
||||
@resource("/v1/kubernetes/deployments/rollback")
|
||||
class QEDeploymentRollback(ApiResource):
|
||||
""" Resource for rolling back deployments """
|
||||
"""
|
||||
Resource for rolling back deployments.
|
||||
"""
|
||||
|
||||
schemas = {
|
||||
"ValidateDeploymentNames": {
|
||||
@ -111,7 +115,9 @@ class QEDeploymentRollback(ApiResource):
|
||||
|
||||
@resource("/v1/kubernetes/config")
|
||||
class SuperUserKubernetesConfiguration(ApiResource):
|
||||
""" Resource for saving the config files to kubernetes secrets. """
|
||||
"""
|
||||
Resource for saving the config files to kubernetes secrets.
|
||||
"""
|
||||
|
||||
@kubernetes_only
|
||||
@nickname("scDeployConfiguration")
|
||||
@ -128,7 +134,9 @@ class SuperUserKubernetesConfiguration(ApiResource):
|
||||
|
||||
@resource("/v1/kubernetes/config/populate")
|
||||
class KubernetesConfigurationPopulator(ApiResource):
|
||||
""" Resource for populating the local configuration from the cluster's kubernetes secrets. """
|
||||
"""
|
||||
Resource for populating the local configuration from the cluster's kubernetes secrets.
|
||||
"""
|
||||
|
||||
@kubernetes_only
|
||||
@nickname("scKubePopulateConfig")
|
||||
|
@ -26,18 +26,24 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def database_is_valid():
|
||||
""" Returns whether the database, as configured, is valid. """
|
||||
"""
|
||||
Returns whether the database, as configured, is valid.
|
||||
"""
|
||||
return model.is_valid()
|
||||
|
||||
|
||||
def database_has_users():
|
||||
""" Returns whether the database has any users defined. """
|
||||
"""
|
||||
Returns whether the database has any users defined.
|
||||
"""
|
||||
return model.has_users()
|
||||
|
||||
|
||||
@resource("/v1/superuser/config")
|
||||
class SuperUserConfig(ApiResource):
|
||||
""" Resource for fetching and updating the current configuration, if any. """
|
||||
"""
|
||||
Resource for fetching and updating the current configuration, if any.
|
||||
"""
|
||||
|
||||
schemas = {
|
||||
"UpdateConfig": {
|
||||
@ -50,14 +56,18 @@ class SuperUserConfig(ApiResource):
|
||||
|
||||
@nickname("scGetConfig")
|
||||
def get(self):
|
||||
""" Returns the currently defined configuration, if any. """
|
||||
"""
|
||||
Returns the currently defined configuration, if any.
|
||||
"""
|
||||
config_object = config_provider.get_config()
|
||||
return {"config": config_object}
|
||||
|
||||
@nickname("scUpdateConfig")
|
||||
@validate_json_request("UpdateConfig")
|
||||
def put(self):
|
||||
""" Updates the config override file. """
|
||||
"""
|
||||
Updates the config override file.
|
||||
"""
|
||||
# Note: This method is called to set the database configuration before super users exists,
|
||||
# so we also allow it to be called if there is no valid registry configuration setup.
|
||||
config_object = request.get_json()["config"]
|
||||
@ -78,13 +88,16 @@ class SuperUserConfig(ApiResource):
|
||||
|
||||
@resource("/v1/superuser/registrystatus")
|
||||
class SuperUserRegistryStatus(ApiResource):
|
||||
""" Resource for determining the status of the registry, such as if config exists,
|
||||
if a database is configured, and if it has any defined users.
|
||||
"""
|
||||
"""
|
||||
Resource for determining the status of the registry, such as if config exists, if a database is
|
||||
configured, and if it has any defined users.
|
||||
"""
|
||||
|
||||
@nickname("scRegistryStatus")
|
||||
def get(self):
|
||||
""" Returns the status of the registry. """
|
||||
"""
|
||||
Returns the status of the registry.
|
||||
"""
|
||||
# If there is no config file, we need to setup the database.
|
||||
if not config_provider.config_exists():
|
||||
return {"status": "config-db"}
|
||||
@ -118,11 +131,15 @@ def _reload_config():
|
||||
|
||||
@resource("/v1/superuser/setupdb")
|
||||
class SuperUserSetupDatabase(ApiResource):
|
||||
""" Resource for invoking alembic to setup the database. """
|
||||
"""
|
||||
Resource for invoking alembic to setup the database.
|
||||
"""
|
||||
|
||||
@nickname("scSetupDatabase")
|
||||
def get(self):
|
||||
""" Invokes the alembic upgrade process. """
|
||||
"""
|
||||
Invokes the alembic upgrade process.
|
||||
"""
|
||||
# Note: This method is called after the database configured is saved, but before the
|
||||
# database has any tables. Therefore, we only allow it to be run in that unique case.
|
||||
if config_provider.config_exists() and not database_is_valid():
|
||||
@ -146,7 +163,9 @@ class SuperUserSetupDatabase(ApiResource):
|
||||
|
||||
@resource("/v1/superuser/config/createsuperuser")
|
||||
class SuperUserCreateInitialSuperUser(ApiResource):
|
||||
""" Resource for creating the initial super user. """
|
||||
"""
|
||||
Resource for creating the initial super user.
|
||||
"""
|
||||
|
||||
schemas = {
|
||||
"CreateSuperUser": {
|
||||
@ -164,8 +183,10 @@ class SuperUserCreateInitialSuperUser(ApiResource):
|
||||
@nickname("scCreateInitialSuperuser")
|
||||
@validate_json_request("CreateSuperUser")
|
||||
def post(self):
|
||||
""" Creates the initial super user, updates the underlying configuration and
|
||||
sets the current session to have that super user. """
|
||||
"""
|
||||
Creates the initial super user, updates the underlying configuration and sets the current
|
||||
session to have that super user.
|
||||
"""
|
||||
|
||||
_reload_config()
|
||||
|
||||
@ -199,7 +220,9 @@ class SuperUserCreateInitialSuperUser(ApiResource):
|
||||
|
||||
@resource("/v1/superuser/config/validate/<service>")
|
||||
class SuperUserConfigValidate(ApiResource):
|
||||
""" Resource for validating a block of configuration against an external service. """
|
||||
"""
|
||||
Resource for validating a block of configuration against an external service.
|
||||
"""
|
||||
|
||||
schemas = {
|
||||
"ValidateConfig": {
|
||||
@ -219,7 +242,9 @@ class SuperUserConfigValidate(ApiResource):
|
||||
@nickname("scValidateConfig")
|
||||
@validate_json_request("ValidateConfig")
|
||||
def post(self, service):
|
||||
""" Validates the given config for the given service. """
|
||||
"""
|
||||
Validates the given config for the given service.
|
||||
"""
|
||||
# Note: This method is called to validate the database configuration before super users exists,
|
||||
# so we also allow it to be called if there is no valid registry configuration setup. Note that
|
||||
# this is also safe since this method does not access any information not given in the request.
|
||||
@ -239,11 +264,15 @@ class SuperUserConfigValidate(ApiResource):
|
||||
|
||||
@resource("/v1/superuser/config/file/<filename>")
|
||||
class SuperUserConfigFile(ApiResource):
|
||||
""" Resource for fetching the status of config files and overriding them. """
|
||||
"""
|
||||
Resource for fetching the status of config files and overriding them.
|
||||
"""
|
||||
|
||||
@nickname("scConfigFileExists")
|
||||
def get(self, filename):
|
||||
""" Returns whether the configuration file with the given name exists. """
|
||||
"""
|
||||
Returns whether the configuration file with the given name exists.
|
||||
"""
|
||||
if not is_valid_config_upload_filename(filename):
|
||||
abort(404)
|
||||
|
||||
@ -251,7 +280,9 @@ class SuperUserConfigFile(ApiResource):
|
||||
|
||||
@nickname("scUpdateConfigFile")
|
||||
def post(self, filename):
|
||||
""" Updates the configuration file with the given name. """
|
||||
"""
|
||||
Updates the configuration file with the given name.
|
||||
"""
|
||||
if not is_valid_config_upload_filename(filename):
|
||||
abort(404)
|
||||
|
||||
|
@ -5,35 +5,37 @@ from six import add_metaclass
|
||||
@add_metaclass(ABCMeta)
|
||||
class SuperuserConfigDataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by the superuser config API.
|
||||
"""
|
||||
Interface that represents all data store interactions required by the superuser config API.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def is_valid(self):
|
||||
"""
|
||||
Returns true if the configured database is valid.
|
||||
"""
|
||||
Returns true if the configured database is valid.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def has_users(self):
|
||||
"""
|
||||
Returns true if there are any users defined.
|
||||
"""
|
||||
Returns true if there are any users defined.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_superuser(self, username, password, email):
|
||||
"""
|
||||
Creates a new superuser with the given username, password and email. Returns the user's UUID.
|
||||
"""
|
||||
Creates a new superuser with the given username, password and email.
|
||||
|
||||
Returns the user's UUID.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def has_federated_login(self, username, service_name):
|
||||
"""
|
||||
Returns true if the matching user has a federated login under the matching service.
|
||||
"""
|
||||
Returns true if the matching user has a federated login under the matching service.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def attach_federated_login(self, username, service_name, federated_username):
|
||||
"""
|
||||
Attaches a federatated login to the matching user, under the given service.
|
||||
"""
|
||||
Attaches a federatated login to the matching user, under the given service.
|
||||
"""
|
||||
|
@ -32,7 +32,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
@resource("/v1/superuser/customcerts/<certpath>")
|
||||
class SuperUserCustomCertificate(ApiResource):
|
||||
""" Resource for managing a custom certificate. """
|
||||
"""
|
||||
Resource for managing a custom certificate.
|
||||
"""
|
||||
|
||||
@nickname("uploadCustomCertificate")
|
||||
def post(self, certpath):
|
||||
@ -85,7 +87,9 @@ class SuperUserCustomCertificate(ApiResource):
|
||||
|
||||
@resource("/v1/superuser/customcerts")
|
||||
class SuperUserCustomCertificates(ApiResource):
|
||||
""" Resource for managing custom certificates. """
|
||||
"""
|
||||
Resource for managing custom certificates.
|
||||
"""
|
||||
|
||||
@nickname("getCustomCertificates")
|
||||
def get(self):
|
||||
@ -128,7 +132,9 @@ class SuperUserCustomCertificates(ApiResource):
|
||||
|
||||
@resource("/v1/superuser/keys")
|
||||
class SuperUserServiceKeyManagement(ApiResource):
|
||||
""" Resource for managing service keys."""
|
||||
"""
|
||||
Resource for managing service keys.
|
||||
"""
|
||||
|
||||
schemas = {
|
||||
"CreateServiceKey": {
|
||||
@ -221,7 +227,9 @@ class SuperUserServiceKeyManagement(ApiResource):
|
||||
|
||||
@resource("/v1/superuser/approvedkeys/<kid>")
|
||||
class SuperUserServiceKeyApproval(ApiResource):
|
||||
""" Resource for approving service keys. """
|
||||
"""
|
||||
Resource for approving service keys.
|
||||
"""
|
||||
|
||||
schemas = {
|
||||
"ApproveServiceKey": {
|
||||
|
@ -37,24 +37,25 @@ class RepositoryBuild(
|
||||
)
|
||||
):
|
||||
"""
|
||||
RepositoryBuild represents a build associated with a repostiory
|
||||
:type uuid: string
|
||||
:type logs_archived: boolean
|
||||
:type repository_namespace_user_username: string
|
||||
:type repository_name: string
|
||||
:type can_write: boolean
|
||||
:type can_write: boolean
|
||||
:type pull_robot: User
|
||||
:type resource_key: string
|
||||
:type trigger: Trigger
|
||||
:type display_name: string
|
||||
:type started: boolean
|
||||
:type job_config: {Any -> Any}
|
||||
:type phase: string
|
||||
:type status: string
|
||||
:type error: string
|
||||
:type archive_url: string
|
||||
"""
|
||||
RepositoryBuild represents a build associated with a repostiory.
|
||||
|
||||
:type uuid: string
|
||||
:type logs_archived: boolean
|
||||
:type repository_namespace_user_username: string
|
||||
:type repository_name: string
|
||||
:type can_write: boolean
|
||||
:type can_write: boolean
|
||||
:type pull_robot: User
|
||||
:type resource_key: string
|
||||
:type trigger: Trigger
|
||||
:type display_name: string
|
||||
:type started: boolean
|
||||
:type job_config: {Any -> Any}
|
||||
:type phase: string
|
||||
:type status: string
|
||||
:type error: string
|
||||
:type archive_url: string
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
|
||||
@ -94,12 +95,13 @@ class RepositoryBuild(
|
||||
|
||||
class Approval(namedtuple("Approval", ["approver", "approval_type", "approved_date", "notes"])):
|
||||
"""
|
||||
Approval represents whether a key has been approved or not
|
||||
:type approver: User
|
||||
:type approval_type: string
|
||||
:type approved_date: Date
|
||||
:type notes: string
|
||||
"""
|
||||
Approval represents whether a key has been approved or not.
|
||||
|
||||
:type approver: User
|
||||
:type approval_type: string
|
||||
:type approved_date: Date
|
||||
:type notes: string
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
@ -127,18 +129,18 @@ class ServiceKey(
|
||||
)
|
||||
):
|
||||
"""
|
||||
ServiceKey is an apostille signing key
|
||||
:type name: string
|
||||
:type kid: int
|
||||
:type service: string
|
||||
:type jwk: string
|
||||
:type metadata: string
|
||||
:type created_date: Date
|
||||
:type expiration_date: Date
|
||||
:type rotation_duration: Date
|
||||
:type approval: Approval
|
||||
ServiceKey is an apostille signing key.
|
||||
|
||||
"""
|
||||
:type name: string
|
||||
:type kid: int
|
||||
:type service: string
|
||||
:type jwk: string
|
||||
:type metadata: string
|
||||
:type created_date: Date
|
||||
:type expiration_date: Date
|
||||
:type rotation_duration: Date
|
||||
:type approval: Approval
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
@ -156,13 +158,14 @@ class ServiceKey(
|
||||
|
||||
class User(namedtuple("User", ["username", "email", "verified", "enabled", "robot"])):
|
||||
"""
|
||||
User represents a single user.
|
||||
:type username: string
|
||||
:type email: string
|
||||
:type verified: boolean
|
||||
:type enabled: boolean
|
||||
:type robot: User
|
||||
"""
|
||||
User represents a single user.
|
||||
|
||||
:type username: string
|
||||
:type email: string
|
||||
:type verified: boolean
|
||||
:type enabled: boolean
|
||||
:type robot: User
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
user_data = {
|
||||
@ -179,10 +182,11 @@ class User(namedtuple("User", ["username", "email", "verified", "enabled", "robo
|
||||
|
||||
class Organization(namedtuple("Organization", ["username", "email"])):
|
||||
"""
|
||||
Organization represents a single org.
|
||||
:type username: string
|
||||
:type email: string
|
||||
"""
|
||||
Organization represents a single org.
|
||||
|
||||
:type username: string
|
||||
:type email: string
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
@ -194,11 +198,11 @@ class Organization(namedtuple("Organization", ["username", "email"])):
|
||||
@add_metaclass(ABCMeta)
|
||||
class SuperuserDataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by a superuser api.
|
||||
"""
|
||||
Interface that represents all data store interactions required by a superuser api.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def list_all_service_keys(self):
|
||||
"""
|
||||
Returns a list of service keys
|
||||
"""
|
||||
Returns a list of service keys.
|
||||
"""
|
||||
|
@ -47,9 +47,9 @@ class ServiceKeyAlreadyApproved(Exception):
|
||||
|
||||
class PreOCIModel(SuperuserDataInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for the SuperUser using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
PreOCIModel implements the data model for the SuperUser using a database schema before it was
|
||||
changed to support the OCI specification.
|
||||
"""
|
||||
|
||||
def list_all_service_keys(self):
|
||||
keys = model.service_keys.list_all_keys()
|
||||
|
@ -19,8 +19,8 @@ from config_app.config_util.tar import (
|
||||
@resource("/v1/configapp/initialization")
|
||||
class ConfigInitialization(ApiResource):
|
||||
"""
|
||||
Resource for dealing with any initialization logic for the config app
|
||||
"""
|
||||
Resource for dealing with any initialization logic for the config app.
|
||||
"""
|
||||
|
||||
@nickname("scStartNewConfig")
|
||||
def post(self):
|
||||
@ -31,9 +31,9 @@ class ConfigInitialization(ApiResource):
|
||||
@resource("/v1/configapp/tarconfig")
|
||||
class TarConfigLoader(ApiResource):
|
||||
"""
|
||||
Resource for dealing with configuration as a tarball,
|
||||
including loading and generating functions
|
||||
"""
|
||||
Resource for dealing with configuration as a tarball, including loading and generating
|
||||
functions.
|
||||
"""
|
||||
|
||||
@nickname("scGetConfigTarball")
|
||||
def get(self):
|
||||
@ -50,7 +50,9 @@ class TarConfigLoader(ApiResource):
|
||||
|
||||
@nickname("scUploadTarballConfig")
|
||||
def put(self):
|
||||
""" Loads tarball config into the config provider """
|
||||
"""
|
||||
Loads tarball config into the config provider.
|
||||
"""
|
||||
# Generate a new empty dir to load the config into
|
||||
config_provider.new_config_dir()
|
||||
input_stream = request.stream
|
||||
|
@ -5,10 +5,14 @@ from config_app.config_endpoints.api.superuser_models_interface import user_view
|
||||
|
||||
@resource("/v1/user/")
|
||||
class User(ApiResource):
|
||||
""" Operations related to users. """
|
||||
"""
|
||||
Operations related to users.
|
||||
"""
|
||||
|
||||
@nickname("getLoggedInUser")
|
||||
def get(self):
|
||||
""" Get user information for the authenticated user. """
|
||||
"""
|
||||
Get user information for the authenticated user.
|
||||
"""
|
||||
user = get_authenticated_user()
|
||||
return user_view(user)
|
||||
|
@ -30,7 +30,9 @@ TYPE_CONVERTER = {
|
||||
|
||||
|
||||
def _list_files(path, extension, contains=""):
|
||||
""" Returns a list of all the files with the given extension found under the given path. """
|
||||
"""
|
||||
Returns a list of all the files with the given extension found under the given path.
|
||||
"""
|
||||
|
||||
def matches(f):
|
||||
return os.path.splitext(f)[1] == "." + extension and contains in os.path.splitext(f)[0]
|
||||
@ -47,7 +49,9 @@ FONT_AWESOME_4 = "netdna.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.cs
|
||||
|
||||
|
||||
def render_page_template(name, route_data=None, js_bundle_name=DEFAULT_JS_BUNDLE_NAME, **kwargs):
|
||||
""" Renders the page template with the given name as the response and returns its contents. """
|
||||
"""
|
||||
Renders the page template with the given name as the response and returns its contents.
|
||||
"""
|
||||
main_scripts = _list_files("build", "js", js_bundle_name)
|
||||
|
||||
use_cdn = os.getenv("TESTING") == "true"
|
||||
|
@ -10,26 +10,26 @@ class ApiErrorType(Enum):
|
||||
|
||||
class ApiException(HTTPException):
|
||||
"""
|
||||
Represents an error in the application/problem+json format.
|
||||
Represents an error in the application/problem+json format.
|
||||
|
||||
See: https://tools.ietf.org/html/rfc7807
|
||||
See: https://tools.ietf.org/html/rfc7807
|
||||
|
||||
- "type" (string) - A URI reference that identifies the
|
||||
problem type.
|
||||
- "type" (string) - A URI reference that identifies the
|
||||
problem type.
|
||||
|
||||
- "title" (string) - A short, human-readable summary of the problem
|
||||
type. It SHOULD NOT change from occurrence to occurrence of the
|
||||
problem, except for purposes of localization
|
||||
- "title" (string) - A short, human-readable summary of the problem
|
||||
type. It SHOULD NOT change from occurrence to occurrence of the
|
||||
problem, except for purposes of localization
|
||||
|
||||
- "status" (number) - The HTTP status code
|
||||
- "status" (number) - The HTTP status code
|
||||
|
||||
- "detail" (string) - A human-readable explanation specific to this
|
||||
occurrence of the problem.
|
||||
- "detail" (string) - A human-readable explanation specific to this
|
||||
occurrence of the problem.
|
||||
|
||||
- "instance" (string) - A URI reference that identifies the specific
|
||||
occurrence of the problem. It may or may not yield further
|
||||
information if dereferenced.
|
||||
"""
|
||||
- "instance" (string) - A URI reference that identifies the specific
|
||||
occurrence of the problem. It may or may not yield further
|
||||
information if dereferenced.
|
||||
"""
|
||||
|
||||
def __init__(self, error_type, status_code, error_description, payload=None):
|
||||
Exception.__init__(self)
|
||||
|
@ -9,10 +9,10 @@ OLD_CONFIG_SUBDIR = "old/"
|
||||
|
||||
|
||||
class TransientDirectoryProvider(FileConfigProvider):
|
||||
""" Implementation of the config provider that reads and writes the data
|
||||
from/to the file system, only using temporary directories,
|
||||
deleting old dirs and creating new ones as requested.
|
||||
"""
|
||||
"""
|
||||
Implementation of the config provider that reads and writes the data from/to the file system,
|
||||
only using temporary directories, deleting old dirs and creating new ones as requested.
|
||||
"""
|
||||
|
||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||
# Create a temp directory that will be cleaned up when we change the config path
|
||||
@ -29,8 +29,8 @@ class TransientDirectoryProvider(FileConfigProvider):
|
||||
|
||||
def new_config_dir(self):
|
||||
"""
|
||||
Update the path with a new temporary directory, deleting the old one in the process
|
||||
"""
|
||||
Update the path with a new temporary directory, deleting the old one in the process.
|
||||
"""
|
||||
self.temp_dir.cleanup()
|
||||
temp_dir = TemporaryDirectory()
|
||||
|
||||
@ -40,8 +40,8 @@ class TransientDirectoryProvider(FileConfigProvider):
|
||||
|
||||
def create_copy_of_config_dir(self):
|
||||
"""
|
||||
Create a directory to store loaded/populated configuration (for rollback if necessary)
|
||||
"""
|
||||
Create a directory to store loaded/populated configuration (for rollback if necessary)
|
||||
"""
|
||||
if self.old_config_dir is not None:
|
||||
self.old_config_dir.cleanup()
|
||||
|
||||
|
@ -8,7 +8,9 @@ from util.config.validator import EXTRA_CA_DIRECTORY, EXTRA_CA_DIRECTORY_PREFIX
|
||||
|
||||
|
||||
def get_config_provider(config_volume, yaml_filename, py_filename, testing=False):
|
||||
""" Loads and returns the config provider for the current environment. """
|
||||
"""
|
||||
Loads and returns the config provider for the current environment.
|
||||
"""
|
||||
|
||||
if testing:
|
||||
return TestConfigProvider()
|
||||
|
@ -12,7 +12,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseFileProvider(BaseProvider):
|
||||
""" Base implementation of the config provider that reads the data from the file system. """
|
||||
"""
|
||||
Base implementation of the config provider that reads the data from the file system.
|
||||
"""
|
||||
|
||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||
self.config_volume = config_volume
|
||||
|
@ -12,13 +12,17 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CannotWriteConfigException(Exception):
|
||||
""" Exception raised when the config cannot be written. """
|
||||
"""
|
||||
Exception raised when the config cannot be written.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SetupIncompleteException(Exception):
|
||||
""" Exception raised when attempting to verify config that has not yet been setup. """
|
||||
"""
|
||||
Exception raised when attempting to verify config that has not yet been setup.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@ -63,8 +67,9 @@ def export_yaml(config_obj, config_file):
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class BaseProvider(object):
|
||||
""" A configuration provider helps to load, save, and handle config override in the application.
|
||||
"""
|
||||
"""
|
||||
A configuration provider helps to load, save, and handle config override in the application.
|
||||
"""
|
||||
|
||||
@property
|
||||
def provider_id(self):
|
||||
@ -72,59 +77,84 @@ class BaseProvider(object):
|
||||
|
||||
@abstractmethod
|
||||
def update_app_config(self, app_config):
|
||||
""" Updates the given application config object with the loaded override config. """
|
||||
"""
|
||||
Updates the given application config object with the loaded override config.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_config(self):
|
||||
""" Returns the contents of the config override file, or None if none. """
|
||||
"""
|
||||
Returns the contents of the config override file, or None if none.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def save_config(self, config_object):
|
||||
""" Updates the contents of the config override file to those given. """
|
||||
"""
|
||||
Updates the contents of the config override file to those given.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def config_exists(self):
|
||||
""" Returns true if a config override file exists in the config volume. """
|
||||
"""
|
||||
Returns true if a config override file exists in the config volume.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def volume_exists(self):
|
||||
""" Returns whether the config override volume exists. """
|
||||
"""
|
||||
Returns whether the config override volume exists.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def volume_file_exists(self, filename):
|
||||
""" Returns whether the file with the given name exists under the config override volume. """
|
||||
"""
|
||||
Returns whether the file with the given name exists under the config override volume.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_volume_file(self, filename, mode="r"):
|
||||
""" Returns a Python file referring to the given name under the config override volume. """
|
||||
"""
|
||||
Returns a Python file referring to the given name under the config override volume.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def write_volume_file(self, filename, contents):
|
||||
""" Writes the given contents to the config override volumne, with the given filename. """
|
||||
"""
|
||||
Writes the given contents to the config override volumne, with the given filename.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def remove_volume_file(self, filename):
|
||||
""" Removes the config override volume file with the given filename. """
|
||||
"""
|
||||
Removes the config override volume file with the given filename.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def list_volume_directory(self, path):
|
||||
""" Returns a list of strings representing the names of the files found in the config override
|
||||
directory under the given path. If the path doesn't exist, returns None.
|
||||
"""
|
||||
"""
|
||||
Returns a list of strings representing the names of the files found in the config override
|
||||
directory under the given path.
|
||||
|
||||
If the path doesn't exist, returns None.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
""" Saves the given flask file to the config override volume, with the given
|
||||
filename.
|
||||
"""
|
||||
"""
|
||||
Saves the given flask file to the config override volume, with the given filename.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def requires_restart(self, app_config):
|
||||
""" If true, the configuration loaded into memory for the app does not match that on disk,
|
||||
"""
|
||||
If true, the configuration loaded into memory for the app does not match that on disk,
|
||||
indicating that this container requires a restart.
|
||||
"""
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_volume_path(self, directory, filename):
|
||||
""" Helper for constructing file paths, which may differ between providers. For example,
|
||||
kubernetes can't have subfolders in configmaps """
|
||||
"""
|
||||
Helper for constructing file paths, which may differ between providers.
|
||||
|
||||
For example, kubernetes can't have subfolders in configmaps
|
||||
"""
|
||||
|
@ -8,7 +8,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _ensure_parent_dir(filepath):
|
||||
""" Ensures that the parent directory of the given file path exists. """
|
||||
"""
|
||||
Ensures that the parent directory of the given file path exists.
|
||||
"""
|
||||
try:
|
||||
parentpath = os.path.abspath(os.path.join(filepath, os.pardir))
|
||||
if not os.path.isdir(parentpath):
|
||||
@ -18,8 +20,9 @@ def _ensure_parent_dir(filepath):
|
||||
|
||||
|
||||
class FileConfigProvider(BaseFileProvider):
|
||||
""" Implementation of the config provider that reads and writes the data
|
||||
from/to the file system. """
|
||||
"""
|
||||
Implementation of the config provider that reads and writes the data from/to the file system.
|
||||
"""
|
||||
|
||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||
super(FileConfigProvider, self).__init__(config_volume, yaml_filename, py_filename)
|
||||
|
@ -8,8 +8,11 @@ REAL_FILES = ["test/data/signing-private.gpg", "test/data/signing-public.gpg", "
|
||||
|
||||
|
||||
class TestConfigProvider(BaseProvider):
|
||||
""" Implementation of the config provider for testing. Everything is kept in-memory instead on
|
||||
the real file system. """
|
||||
"""
|
||||
Implementation of the config provider for testing.
|
||||
|
||||
Everything is kept in-memory instead on the real file system.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.clear()
|
||||
|
@ -28,11 +28,12 @@ class K8sApiException(Exception):
|
||||
|
||||
def _deployment_rollout_status_message(deployment, deployment_name):
|
||||
"""
|
||||
Gets the friendly human readable message of the current state of the deployment rollout
|
||||
:param deployment: python dict matching: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.11/#deployment-v1-apps
|
||||
:param deployment_name: string
|
||||
:return: DeploymentRolloutStatus
|
||||
"""
|
||||
Gets the friendly human readable message of the current state of the deployment rollout.
|
||||
|
||||
:param deployment: python dict matching: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.11/#deployment-v1-apps
|
||||
:param deployment_name: string
|
||||
:return: DeploymentRolloutStatus
|
||||
"""
|
||||
# Logic for rollout status pulled from the `kubectl rollout status` command:
|
||||
# https://github.com/kubernetes/kubernetes/blob/d9ba19c751709c8608e09a0537eea98973f3a796/pkg/kubectl/rollout_status.go#L62
|
||||
if deployment["metadata"]["generation"] <= deployment["status"]["observedGeneration"]:
|
||||
@ -88,7 +89,9 @@ def _deployment_rollout_status_message(deployment, deployment_name):
|
||||
|
||||
|
||||
class KubernetesAccessorSingleton(object):
|
||||
""" Singleton allowing access to kubernetes operations """
|
||||
"""
|
||||
Singleton allowing access to kubernetes operations.
|
||||
"""
|
||||
|
||||
_instance = None
|
||||
|
||||
@ -102,10 +105,11 @@ class KubernetesAccessorSingleton(object):
|
||||
@classmethod
|
||||
def get_instance(cls, kube_config=None):
|
||||
"""
|
||||
Singleton getter implementation, returns the instance if one exists, otherwise creates the
|
||||
instance and ties it to the class.
|
||||
:return: KubernetesAccessorSingleton
|
||||
"""
|
||||
Singleton getter implementation, returns the instance if one exists, otherwise creates the
|
||||
instance and ties it to the class.
|
||||
|
||||
:return: KubernetesAccessorSingleton
|
||||
"""
|
||||
if cls._instance is None:
|
||||
return cls(kube_config)
|
||||
|
||||
@ -113,9 +117,10 @@ class KubernetesAccessorSingleton(object):
|
||||
|
||||
def save_secret_to_directory(self, dir_path):
|
||||
"""
|
||||
Saves all files in the kubernetes secret to a local directory.
|
||||
Assumes the directory is empty.
|
||||
"""
|
||||
Saves all files in the kubernetes secret to a local directory.
|
||||
|
||||
Assumes the directory is empty.
|
||||
"""
|
||||
secret = self._lookup_secret()
|
||||
|
||||
secret_data = secret.get("data", {})
|
||||
@ -143,8 +148,8 @@ class KubernetesAccessorSingleton(object):
|
||||
|
||||
def replace_qe_secret(self, new_secret_data):
|
||||
"""
|
||||
Removes the old config and replaces it with the new_secret_data as one action
|
||||
"""
|
||||
Removes the old config and replaces it with the new_secret_data as one action.
|
||||
"""
|
||||
# Check first that the namespace for Red Hat Quay exists. If it does not, report that
|
||||
# as an error, as it seems to be a common issue.
|
||||
namespace_url = "namespaces/%s" % (self.kube_config.qe_namespace)
|
||||
@ -183,10 +188,11 @@ class KubernetesAccessorSingleton(object):
|
||||
self._assert_success(self._execute_k8s_api("PUT", secret_url, secret))
|
||||
|
||||
def get_deployment_rollout_status(self, deployment_name):
|
||||
""""
|
||||
Returns the status of a rollout of a given deployment
|
||||
:return _DeploymentRolloutStatus
|
||||
"""
|
||||
"""
|
||||
" Returns the status of a rollout of a given deployment.
|
||||
|
||||
:return _DeploymentRolloutStatus
|
||||
"""
|
||||
deployment_selector_url = "namespaces/%s/deployments/%s" % (
|
||||
self.kube_config.qe_namespace,
|
||||
deployment_name,
|
||||
@ -203,9 +209,9 @@ class KubernetesAccessorSingleton(object):
|
||||
return _deployment_rollout_status_message(deployment, deployment_name)
|
||||
|
||||
def get_qe_deployments(self):
|
||||
""""
|
||||
Returns all deployments matching the label selector provided in the KubeConfig
|
||||
"""
|
||||
"""
|
||||
" Returns all deployments matching the label selector provided in the KubeConfig.
|
||||
"""
|
||||
deployment_selector_url = "namespaces/%s/deployments?labelSelector=%s%%3D%s" % (
|
||||
self.kube_config.qe_namespace,
|
||||
QE_DEPLOYMENT_LABEL,
|
||||
@ -220,9 +226,9 @@ class KubernetesAccessorSingleton(object):
|
||||
return json.loads(response.text)
|
||||
|
||||
def cycle_qe_deployments(self, deployment_names):
|
||||
""""
|
||||
Triggers a rollout of all desired deployments in the qe namespace
|
||||
"""
|
||||
"""
|
||||
" Triggers a rollout of all desired deployments in the qe namespace.
|
||||
"""
|
||||
|
||||
for name in deployment_names:
|
||||
logger.debug("Cycling deployment %s", name)
|
||||
|
@ -5,12 +5,13 @@ from config_app._init_config import CONF_DIR
|
||||
def logfile_path(jsonfmt=False, debug=False):
|
||||
"""
|
||||
Returns the a logfileconf path following this rules:
|
||||
|
||||
- conf/logging_debug_json.conf # jsonfmt=true, debug=true
|
||||
- conf/logging_json.conf # jsonfmt=true, debug=false
|
||||
- conf/logging_debug.conf # jsonfmt=false, debug=true
|
||||
- conf/logging.conf # jsonfmt=false, debug=false
|
||||
Can be parametrized via envvars: JSONLOG=true, DEBUGLOG=true
|
||||
"""
|
||||
"""
|
||||
_json = ""
|
||||
_debug = ""
|
||||
|
||||
|
@ -4,21 +4,26 @@ import OpenSSL
|
||||
|
||||
|
||||
class CertInvalidException(Exception):
|
||||
""" Exception raised when a certificate could not be parsed/loaded. """
|
||||
"""
|
||||
Exception raised when a certificate could not be parsed/loaded.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class KeyInvalidException(Exception):
|
||||
""" Exception raised when a key could not be parsed/loaded or successfully applied to a cert. """
|
||||
"""
|
||||
Exception raised when a key could not be parsed/loaded or successfully applied to a cert.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def load_certificate(cert_contents):
|
||||
""" Loads the certificate from the given contents and returns it or raises a CertInvalidException
|
||||
on failure.
|
||||
"""
|
||||
"""
|
||||
Loads the certificate from the given contents and returns it or raises a CertInvalidException on
|
||||
failure.
|
||||
"""
|
||||
try:
|
||||
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_contents)
|
||||
return SSLCertificate(cert)
|
||||
@ -30,15 +35,19 @@ _SUBJECT_ALT_NAME = "subjectAltName"
|
||||
|
||||
|
||||
class SSLCertificate(object):
|
||||
""" Helper class for easier working with SSL certificates. """
|
||||
"""
|
||||
Helper class for easier working with SSL certificates.
|
||||
"""
|
||||
|
||||
def __init__(self, openssl_cert):
|
||||
self.openssl_cert = openssl_cert
|
||||
|
||||
def validate_private_key(self, private_key_path):
|
||||
""" Validates that the private key found at the given file path applies to this certificate.
|
||||
"""
|
||||
Validates that the private key found at the given file path applies to this certificate.
|
||||
|
||||
Raises a KeyInvalidException on failure.
|
||||
"""
|
||||
"""
|
||||
context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
|
||||
context.use_certificate(self.openssl_cert)
|
||||
|
||||
@ -49,7 +58,9 @@ class SSLCertificate(object):
|
||||
raise KeyInvalidException(ex.message[0][2])
|
||||
|
||||
def matches_name(self, check_name):
|
||||
""" Returns true if this SSL certificate matches the given DNS hostname. """
|
||||
"""
|
||||
Returns true if this SSL certificate matches the given DNS hostname.
|
||||
"""
|
||||
for dns_name in self.names:
|
||||
if fnmatch(check_name, dns_name):
|
||||
return True
|
||||
@ -58,17 +69,25 @@ class SSLCertificate(object):
|
||||
|
||||
@property
|
||||
def expired(self):
|
||||
""" Returns whether the SSL certificate has expired. """
|
||||
"""
|
||||
Returns whether the SSL certificate has expired.
|
||||
"""
|
||||
return self.openssl_cert.has_expired()
|
||||
|
||||
@property
|
||||
def common_name(self):
|
||||
""" Returns the defined common name for the certificate, if any. """
|
||||
"""
|
||||
Returns the defined common name for the certificate, if any.
|
||||
"""
|
||||
return self.openssl_cert.get_subject().commonName
|
||||
|
||||
@property
|
||||
def names(self):
|
||||
""" Returns all the DNS named to which the certificate applies. May be empty. """
|
||||
"""
|
||||
Returns all the DNS named to which the certificate applies.
|
||||
|
||||
May be empty.
|
||||
"""
|
||||
dns_names = set()
|
||||
common_name = self.common_name
|
||||
if common_name is not None:
|
||||
|
@ -3,8 +3,8 @@ from util.config.validator import EXTRA_CA_DIRECTORY
|
||||
|
||||
def strip_absolute_path_and_add_trailing_dir(path):
|
||||
"""
|
||||
Removes the initial trailing / from the prefix path, and add the last dir one
|
||||
"""
|
||||
Removes the initial trailing / from the prefix path, and add the last dir one.
|
||||
"""
|
||||
return path[1:] + "/"
|
||||
|
||||
|
||||
|
@ -14,13 +14,17 @@ def _ensure_sha256_header(digest):
|
||||
|
||||
|
||||
def get_blob(digest, models_ref):
|
||||
""" Find a blob by its digest. """
|
||||
"""
|
||||
Find a blob by its digest.
|
||||
"""
|
||||
Blob = models_ref.Blob
|
||||
return Blob.select().where(Blob.digest == _ensure_sha256_header(digest)).get()
|
||||
|
||||
|
||||
def get_or_create_blob(digest, size, media_type_name, locations, models_ref):
|
||||
""" Try to find a blob by its digest or create it. """
|
||||
"""
|
||||
Try to find a blob by its digest or create it.
|
||||
"""
|
||||
Blob = models_ref.Blob
|
||||
BlobPlacement = models_ref.BlobPlacement
|
||||
|
||||
@ -48,7 +52,9 @@ def get_or_create_blob(digest, size, media_type_name, locations, models_ref):
|
||||
|
||||
|
||||
def get_blob_locations(digest, models_ref):
|
||||
""" Find all locations names for a blob. """
|
||||
"""
|
||||
Find all locations names for a blob.
|
||||
"""
|
||||
Blob = models_ref.Blob
|
||||
BlobPlacement = models_ref.BlobPlacement
|
||||
BlobPlacementLocation = models_ref.BlobPlacementLocation
|
||||
|
@ -2,9 +2,11 @@ from data.appr_model import tag as tag_model
|
||||
|
||||
|
||||
def get_channel_releases(repo, channel, models_ref):
|
||||
""" Return all previously linked tags.
|
||||
This works based upon Tag lifetimes.
|
||||
"""
|
||||
"""
|
||||
Return all previously linked tags.
|
||||
|
||||
This works based upon Tag lifetimes.
|
||||
"""
|
||||
Channel = models_ref.Channel
|
||||
Tag = models_ref.Tag
|
||||
|
||||
@ -24,13 +26,17 @@ def get_channel_releases(repo, channel, models_ref):
|
||||
|
||||
|
||||
def get_channel(repo, channel_name, models_ref):
|
||||
""" Find a Channel by name. """
|
||||
"""
|
||||
Find a Channel by name.
|
||||
"""
|
||||
channel = tag_model.get_tag(repo, channel_name, models_ref, "channel")
|
||||
return channel
|
||||
|
||||
|
||||
def get_tag_channels(repo, tag_name, models_ref, active=True):
|
||||
""" Find the Channels associated with a Tag. """
|
||||
"""
|
||||
Find the Channels associated with a Tag.
|
||||
"""
|
||||
Tag = models_ref.Tag
|
||||
|
||||
tag = tag_model.get_tag(repo, tag_name, models_ref, "release")
|
||||
@ -43,12 +49,16 @@ def get_tag_channels(repo, tag_name, models_ref, active=True):
|
||||
|
||||
|
||||
def delete_channel(repo, channel_name, models_ref):
|
||||
""" Delete a channel by name. """
|
||||
"""
|
||||
Delete a channel by name.
|
||||
"""
|
||||
return tag_model.delete_tag(repo, channel_name, models_ref, "channel")
|
||||
|
||||
|
||||
def create_or_update_channel(repo, channel_name, tag_name, models_ref):
|
||||
""" Creates or updates a channel to include a particular tag. """
|
||||
"""
|
||||
Creates or updates a channel to include a particular tag.
|
||||
"""
|
||||
tag = tag_model.get_tag(repo, tag_name, models_ref, "release")
|
||||
return tag_model.create_or_update_tag(
|
||||
repo, channel_name, models_ref, linked_tag=tag, tag_kind="channel"
|
||||
@ -56,7 +66,9 @@ def create_or_update_channel(repo, channel_name, tag_name, models_ref):
|
||||
|
||||
|
||||
def get_repo_channels(repo, models_ref):
|
||||
""" Creates or updates a channel to include a particular tag. """
|
||||
"""
|
||||
Creates or updates a channel to include a particular tag.
|
||||
"""
|
||||
Channel = models_ref.Channel
|
||||
Tag = models_ref.Tag
|
||||
|
||||
|
@ -53,7 +53,9 @@ def get_or_create_manifest(manifest_json, media_type_name, models_ref):
|
||||
|
||||
|
||||
def get_manifest_types(repo, models_ref, release=None):
|
||||
""" Returns an array of MediaTypes.name for a repo, can filter by tag """
|
||||
"""
|
||||
Returns an array of MediaTypes.name for a repo, can filter by tag.
|
||||
"""
|
||||
Tag = models_ref.Tag
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
|
||||
|
@ -45,8 +45,10 @@ def get_or_create_manifest_list(manifest_list_json, media_type_name, schema_vers
|
||||
|
||||
|
||||
def create_manifestlistmanifest(manifestlist, manifest_ids, manifest_list_json, models_ref):
|
||||
""" From a manifestlist, manifests, and the manifest list blob,
|
||||
create if doesn't exist the manfiestlistmanifest for each manifest """
|
||||
"""
|
||||
From a manifestlist, manifests, and the manifest list blob, create if doesn't exist the
|
||||
manfiestlistmanifest for each manifest.
|
||||
"""
|
||||
for pos in xrange(len(manifest_ids)):
|
||||
manifest_id = manifest_ids[pos]
|
||||
manifest_json = manifest_list_json[pos]
|
||||
|
@ -10,7 +10,9 @@ from data.appr_model import tag as tag_model
|
||||
def list_packages_query(
|
||||
models_ref, namespace=None, media_type=None, search_query=None, username=None, limit=50,
|
||||
):
|
||||
""" List and filter repository by search query. """
|
||||
"""
|
||||
List and filter repository by search query.
|
||||
"""
|
||||
Tag = models_ref.Tag
|
||||
|
||||
if username and not search_query:
|
||||
|
@ -23,7 +23,9 @@ def _ensure_sha256_header(digest):
|
||||
|
||||
|
||||
def get_app_release(repo, tag_name, media_type, models_ref):
|
||||
""" Returns (tag, manifest, blob) given a repo object, tag_name, and media_type). """
|
||||
"""
|
||||
Returns (tag, manifest, blob) given a repo object, tag_name, and media_type).
|
||||
"""
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
Manifest = models_ref.Manifest
|
||||
Blob = models_ref.Blob
|
||||
@ -88,12 +90,13 @@ def delete_app_release(repo, tag_name, media_type, models_ref):
|
||||
|
||||
|
||||
def create_app_release(repo, tag_name, manifest_data, digest, models_ref, force=False):
|
||||
""" Create a new application release, it includes creating a new Tag, ManifestList,
|
||||
ManifestListManifests, Manifest, ManifestBlob.
|
||||
"""
|
||||
Create a new application release, it includes creating a new Tag, ManifestList,
|
||||
ManifestListManifests, Manifest, ManifestBlob.
|
||||
|
||||
To deduplicate the ManifestList, the manifestlist_json is kept ordered by the manifest.id.
|
||||
To find the insert point in the ManifestList it uses bisect on the manifest-ids list.
|
||||
"""
|
||||
To deduplicate the ManifestList, the manifestlist_json is kept ordered by the manifest.id. To
|
||||
find the insert point in the ManifestList it uses bisect on the manifest-ids list.
|
||||
"""
|
||||
ManifestList = models_ref.ManifestList
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
Blob = models_ref.Blob
|
||||
@ -160,7 +163,9 @@ def create_app_release(repo, tag_name, manifest_data, digest, models_ref, force=
|
||||
|
||||
|
||||
def get_release_objs(repo, models_ref, media_type=None):
|
||||
""" Returns an array of Tag for a repo, with optional filtering by media_type. """
|
||||
"""
|
||||
Returns an array of Tag for a repo, with optional filtering by media_type.
|
||||
"""
|
||||
Tag = models_ref.Tag
|
||||
|
||||
release_query = Tag.select().where(
|
||||
@ -173,5 +178,7 @@ def get_release_objs(repo, models_ref, media_type=None):
|
||||
|
||||
|
||||
def get_releases(repo, model_refs, media_type=None):
|
||||
""" Returns an array of Tag.name for a repo, can filter by media_type. """
|
||||
"""
|
||||
Returns an array of Tag.name for a repo, can filter by media_type.
|
||||
"""
|
||||
return [t.name for t in get_release_objs(repo, model_refs, media_type)]
|
||||
|
@ -114,7 +114,9 @@ def tag_exists(repo, tag_name, models_ref, tag_kind="release"):
|
||||
|
||||
|
||||
def filter_tags_by_media_type(tag_query, media_type, models_ref):
|
||||
""" Return only available tag for a media_type. """
|
||||
"""
|
||||
Return only available tag for a media_type.
|
||||
"""
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
Tag = models_ref.Tag
|
||||
media_type = manifest_media_type(media_type)
|
||||
|
@ -325,7 +325,9 @@ PLANS = [
|
||||
|
||||
|
||||
def get_plan(plan_id):
|
||||
""" Returns the plan with the given ID or None if none. """
|
||||
"""
|
||||
Returns the plan with the given ID or None if none.
|
||||
"""
|
||||
for plan in PLANS:
|
||||
if plan["stripeId"] == plan_id:
|
||||
return plan
|
||||
|
@ -44,9 +44,9 @@ class RedisBuildLogs(object):
|
||||
|
||||
def append_log_entry(self, build_id, log_obj):
|
||||
"""
|
||||
Appends the serialized form of log_obj to the end of the log entry list
|
||||
and returns the new length of the list.
|
||||
"""
|
||||
Appends the serialized form of log_obj to the end of the log entry list and returns the new
|
||||
length of the list.
|
||||
"""
|
||||
pipeline = self._redis.pipeline(transaction=False)
|
||||
pipeline.expire(self._logs_key(build_id), SEVEN_DAYS)
|
||||
pipeline.rpush(self._logs_key(build_id), json.dumps(log_obj))
|
||||
@ -55,9 +55,9 @@ class RedisBuildLogs(object):
|
||||
|
||||
def append_log_message(self, build_id, log_message, log_type=None, log_data=None):
|
||||
"""
|
||||
Wraps the message in an envelope and push it to the end of the log entry
|
||||
list and returns the index at which it was inserted.
|
||||
"""
|
||||
Wraps the message in an envelope and push it to the end of the log entry list and returns
|
||||
the index at which it was inserted.
|
||||
"""
|
||||
log_obj = {"message": log_message}
|
||||
|
||||
if log_type:
|
||||
@ -70,9 +70,9 @@ class RedisBuildLogs(object):
|
||||
|
||||
def get_log_entries(self, build_id, start_index):
|
||||
"""
|
||||
Returns a tuple of the current length of the list and an iterable of the
|
||||
requested log entries.
|
||||
"""
|
||||
Returns a tuple of the current length of the list and an iterable of the requested log
|
||||
entries.
|
||||
"""
|
||||
try:
|
||||
llen = self._redis.llen(self._logs_key(build_id))
|
||||
log_entries = self._redis.lrange(self._logs_key(build_id), start_index, -1)
|
||||
@ -82,20 +82,20 @@ class RedisBuildLogs(object):
|
||||
|
||||
def expire_status(self, build_id):
|
||||
"""
|
||||
Sets the status entry to expire in 1 day.
|
||||
"""
|
||||
Sets the status entry to expire in 1 day.
|
||||
"""
|
||||
self._redis.expire(self._status_key(build_id), ONE_DAY)
|
||||
|
||||
def expire_log_entries(self, build_id):
|
||||
"""
|
||||
Sets the log entry to expire in 1 day.
|
||||
"""
|
||||
Sets the log entry to expire in 1 day.
|
||||
"""
|
||||
self._redis.expire(self._logs_key(build_id), ONE_DAY)
|
||||
|
||||
def delete_log_entries(self, build_id):
|
||||
"""
|
||||
Delete the log entry
|
||||
"""
|
||||
Delete the log entry.
|
||||
"""
|
||||
self._redis.delete(self._logs_key(build_id))
|
||||
|
||||
@staticmethod
|
||||
@ -104,15 +104,14 @@ class RedisBuildLogs(object):
|
||||
|
||||
def set_status(self, build_id, status_obj):
|
||||
"""
|
||||
Sets the status key for this build to json serialized form of the supplied
|
||||
obj.
|
||||
"""
|
||||
Sets the status key for this build to json serialized form of the supplied obj.
|
||||
"""
|
||||
self._redis.set(self._status_key(build_id), json.dumps(status_obj), ex=SEVEN_DAYS)
|
||||
|
||||
def get_status(self, build_id):
|
||||
"""
|
||||
Loads the status information for the specified build id.
|
||||
"""
|
||||
Loads the status information for the specified build id.
|
||||
"""
|
||||
try:
|
||||
fetched = self._redis.get(self._status_key(build_id))
|
||||
except redis.RedisError as re:
|
||||
|
4
data/cache/__init__.py
vendored
4
data/cache/__init__.py
vendored
@ -2,7 +2,9 @@ from data.cache.impl import NoopDataModelCache, InMemoryDataModelCache, Memcache
|
||||
|
||||
|
||||
def get_model_cache(config):
|
||||
""" Returns a data model cache matching the given configuration. """
|
||||
"""
|
||||
Returns a data model cache matching the given configuration.
|
||||
"""
|
||||
cache_config = config.get("DATA_MODEL_CACHE_CONFIG", {})
|
||||
engine = cache_config.get("engine", "noop")
|
||||
|
||||
|
20
data/cache/cache_key.py
vendored
20
data/cache/cache_key.py
vendored
@ -2,29 +2,39 @@ from collections import namedtuple
|
||||
|
||||
|
||||
class CacheKey(namedtuple("CacheKey", ["key", "expiration"])):
|
||||
""" Defines a key into the data model cache. """
|
||||
"""
|
||||
Defines a key into the data model cache.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def for_repository_blob(namespace_name, repo_name, digest, version):
|
||||
""" Returns a cache key for a blob in a repository. """
|
||||
"""
|
||||
Returns a cache key for a blob in a repository.
|
||||
"""
|
||||
return CacheKey("repo_blob__%s_%s_%s_%s" % (namespace_name, repo_name, digest, version), "60s")
|
||||
|
||||
|
||||
def for_catalog_page(auth_context_key, start_id, limit):
|
||||
""" Returns a cache key for a single page of a catalog lookup for an authed context. """
|
||||
"""
|
||||
Returns a cache key for a single page of a catalog lookup for an authed context.
|
||||
"""
|
||||
params = (auth_context_key or "(anon)", start_id or 0, limit or 0)
|
||||
return CacheKey("catalog_page__%s_%s_%s" % params, "60s")
|
||||
|
||||
|
||||
def for_namespace_geo_restrictions(namespace_name):
|
||||
""" Returns a cache key for the geo restrictions for a namespace. """
|
||||
"""
|
||||
Returns a cache key for the geo restrictions for a namespace.
|
||||
"""
|
||||
return CacheKey("geo_restrictions__%s" % (namespace_name), "240s")
|
||||
|
||||
|
||||
def for_active_repo_tags(repository_id, start_pagination_id, limit):
|
||||
""" Returns a cache key for the active tags in a repository. """
|
||||
"""
|
||||
Returns a cache key for the active tags in a repository.
|
||||
"""
|
||||
return CacheKey(
|
||||
"repo_active_tags__%s_%s_%s" % (repository_id, start_pagination_id, limit), "120s"
|
||||
)
|
||||
|
24
data/cache/impl.py
vendored
24
data/cache/impl.py
vendored
@ -20,25 +20,33 @@ def is_not_none(value):
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class DataModelCache(object):
|
||||
""" Defines an interface for cache storing and returning tuple data model objects. """
|
||||
"""
|
||||
Defines an interface for cache storing and returning tuple data model objects.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def retrieve(self, cache_key, loader, should_cache=is_not_none):
|
||||
""" Checks the cache for the specified cache key and returns the value found (if any). If none
|
||||
found, the loader is called to get a result and populate the cache.
|
||||
"""
|
||||
"""
|
||||
Checks the cache for the specified cache key and returns the value found (if any).
|
||||
|
||||
If none found, the loader is called to get a result and populate the cache.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class NoopDataModelCache(DataModelCache):
|
||||
""" Implementation of the data model cache which does nothing. """
|
||||
"""
|
||||
Implementation of the data model cache which does nothing.
|
||||
"""
|
||||
|
||||
def retrieve(self, cache_key, loader, should_cache=is_not_none):
|
||||
return loader()
|
||||
|
||||
|
||||
class InMemoryDataModelCache(DataModelCache):
|
||||
""" Implementation of the data model cache backed by an in-memory dictionary. """
|
||||
"""
|
||||
Implementation of the data model cache backed by an in-memory dictionary.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.cache = ExpiresDict()
|
||||
@ -83,7 +91,9 @@ _JSON_TYPE = 2
|
||||
|
||||
|
||||
class MemcachedModelCache(DataModelCache):
|
||||
""" Implementation of the data model cache backed by a memcached. """
|
||||
"""
|
||||
Implementation of the data model cache backed by a memcached.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
289
data/database.py
289
data/database.py
@ -95,9 +95,11 @@ _EXTRA_ARGS = {
|
||||
|
||||
|
||||
def pipes_concat(arg1, arg2, *extra_args):
|
||||
""" Concat function for sqlite, since it doesn't support fn.Concat.
|
||||
Concatenates clauses with || characters.
|
||||
"""
|
||||
"""
|
||||
Concat function for sqlite, since it doesn't support fn.Concat.
|
||||
|
||||
Concatenates clauses with || characters.
|
||||
"""
|
||||
reduced = arg1.concat(arg2)
|
||||
for arg in extra_args:
|
||||
reduced = reduced.concat(arg)
|
||||
@ -105,9 +107,11 @@ def pipes_concat(arg1, arg2, *extra_args):
|
||||
|
||||
|
||||
def function_concat(arg1, arg2, *extra_args):
|
||||
""" Default implementation of concat which uses fn.Concat(). Used by all
|
||||
database engines except sqlite.
|
||||
"""
|
||||
"""
|
||||
Default implementation of concat which uses fn.Concat().
|
||||
|
||||
Used by all database engines except sqlite.
|
||||
"""
|
||||
return fn.Concat(arg1, arg2, *extra_args)
|
||||
|
||||
|
||||
@ -125,16 +129,17 @@ def null_for_update(query):
|
||||
|
||||
|
||||
def delete_instance_filtered(instance, model_class, delete_nullable, skip_transitive_deletes):
|
||||
""" Deletes the DB instance recursively, skipping any models in the skip_transitive_deletes set.
|
||||
"""
|
||||
Deletes the DB instance recursively, skipping any models in the skip_transitive_deletes set.
|
||||
|
||||
Callers *must* ensure that any models listed in the skip_transitive_deletes must be capable
|
||||
of being directly deleted when the instance is deleted (with automatic sorting handling
|
||||
dependency order).
|
||||
Callers *must* ensure that any models listed in the skip_transitive_deletes must be capable
|
||||
of being directly deleted when the instance is deleted (with automatic sorting handling
|
||||
dependency order).
|
||||
|
||||
For example, the RepositoryTag and Image tables for Repository will always refer to the
|
||||
*same* repository when RepositoryTag references Image, so we can safely skip
|
||||
transitive deletion for the RepositoryTag table.
|
||||
"""
|
||||
For example, the RepositoryTag and Image tables for Repository will always refer to the
|
||||
*same* repository when RepositoryTag references Image, so we can safely skip
|
||||
transitive deletion for the RepositoryTag table.
|
||||
"""
|
||||
# We need to sort the ops so that models get cleaned in order of their dependencies
|
||||
ops = reversed(list(instance.dependencies(delete_nullable)))
|
||||
filtered_ops = []
|
||||
@ -206,9 +211,10 @@ class RetryOperationalError(object):
|
||||
|
||||
|
||||
class CloseForLongOperation(object):
|
||||
""" Helper object which disconnects the database then reconnects after the nested operation
|
||||
completes.
|
||||
"""
|
||||
"""
|
||||
Helper object which disconnects the database then reconnects after the nested operation
|
||||
completes.
|
||||
"""
|
||||
|
||||
def __init__(self, config_object):
|
||||
self.config_object = config_object
|
||||
@ -225,7 +231,9 @@ class CloseForLongOperation(object):
|
||||
|
||||
|
||||
class UseThenDisconnect(object):
|
||||
""" Helper object for conducting work with a database and then tearing it down. """
|
||||
"""
|
||||
Helper object for conducting work with a database and then tearing it down.
|
||||
"""
|
||||
|
||||
def __init__(self, config_object):
|
||||
self.config_object = config_object
|
||||
@ -241,9 +249,10 @@ class UseThenDisconnect(object):
|
||||
|
||||
|
||||
class TupleSelector(object):
|
||||
""" Helper class for selecting tuples from a peewee query and easily accessing
|
||||
them as if they were objects.
|
||||
"""
|
||||
"""
|
||||
Helper class for selecting tuples from a peewee query and easily accessing them as if they were
|
||||
objects.
|
||||
"""
|
||||
|
||||
class _TupleWrapper(object):
|
||||
def __init__(self, data, fields):
|
||||
@ -255,7 +264,9 @@ class TupleSelector(object):
|
||||
|
||||
@classmethod
|
||||
def tuple_reference_key(cls, field):
|
||||
""" Returns a string key for referencing a field in a TupleSelector. """
|
||||
"""
|
||||
Returns a string key for referencing a field in a TupleSelector.
|
||||
"""
|
||||
if isinstance(field, Function):
|
||||
return field.name + ",".join([cls.tuple_reference_key(arg) for arg in field.arguments])
|
||||
|
||||
@ -288,8 +299,11 @@ ensure_under_transaction = CallableProxy()
|
||||
|
||||
|
||||
def validate_database_url(url, db_kwargs, connect_timeout=5):
|
||||
""" Validates that we can connect to the given database URL, with the given kwargs. Raises
|
||||
an exception if the validation fails. """
|
||||
"""
|
||||
Validates that we can connect to the given database URL, with the given kwargs.
|
||||
|
||||
Raises an exception if the validation fails.
|
||||
"""
|
||||
db_kwargs = db_kwargs.copy()
|
||||
|
||||
try:
|
||||
@ -305,8 +319,11 @@ def validate_database_url(url, db_kwargs, connect_timeout=5):
|
||||
|
||||
|
||||
def validate_database_precondition(url, db_kwargs, connect_timeout=5):
|
||||
""" Validates that we can connect to the given database URL and the database meets our
|
||||
precondition. Raises an exception if the validation fails. """
|
||||
"""
|
||||
Validates that we can connect to the given database URL and the database meets our precondition.
|
||||
|
||||
Raises an exception if the validation fails.
|
||||
"""
|
||||
db_kwargs = db_kwargs.copy()
|
||||
try:
|
||||
driver = _db_from_url(
|
||||
@ -473,19 +490,23 @@ def _get_enum_field_values(enum_field):
|
||||
|
||||
|
||||
class EnumField(ForeignKeyField):
|
||||
""" Create a cached python Enum from an EnumTable """
|
||||
"""
|
||||
Create a cached python Enum from an EnumTable.
|
||||
"""
|
||||
|
||||
def __init__(self, model, enum_key_field="name", *args, **kwargs):
|
||||
"""
|
||||
model is the EnumTable model-class (see ForeignKeyField)
|
||||
enum_key_field is the field from the EnumTable to use as the enum name
|
||||
"""
|
||||
model is the EnumTable model-class (see ForeignKeyField) enum_key_field is the field from
|
||||
the EnumTable to use as the enum name.
|
||||
"""
|
||||
self.enum_key_field = enum_key_field
|
||||
super(EnumField, self).__init__(model, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def enum(self):
|
||||
""" Returns a python enun.Enum generated from the associated EnumTable """
|
||||
"""
|
||||
Returns a python enun.Enum generated from the associated EnumTable.
|
||||
"""
|
||||
return _get_enum_field_values(self)
|
||||
|
||||
def get_id(self, name):
|
||||
@ -512,10 +533,12 @@ class EnumField(ForeignKeyField):
|
||||
|
||||
|
||||
def deprecated_field(field, flag):
|
||||
""" Marks a field as deprecated and removes it from the peewee model if the
|
||||
flag is not set. A flag is defined in the active_migration module and will
|
||||
be associated with one or more migration phases.
|
||||
"""
|
||||
"""
|
||||
Marks a field as deprecated and removes it from the peewee model if the flag is not set.
|
||||
|
||||
A flag is defined in the active_migration module and will be associated with one or more
|
||||
migration phases.
|
||||
"""
|
||||
if ActiveDataMigration.has_flag(flag):
|
||||
return field
|
||||
|
||||
@ -529,9 +552,10 @@ class BaseModel(ReadReplicaSupportedModel):
|
||||
read_only_config = read_only_config
|
||||
|
||||
def __getattribute__(self, name):
|
||||
""" Adds _id accessors so that foreign key field IDs can be looked up without making
|
||||
a database roundtrip.
|
||||
"""
|
||||
"""
|
||||
Adds _id accessors so that foreign key field IDs can be looked up without making a database
|
||||
roundtrip.
|
||||
"""
|
||||
if name.endswith("_id"):
|
||||
field_name = name[0 : len(name) - 3]
|
||||
if field_name in self._meta.fields:
|
||||
@ -762,13 +786,14 @@ class RepositoryKind(BaseModel):
|
||||
@unique
|
||||
class RepositoryState(IntEnum):
|
||||
"""
|
||||
Possible states of a repository.
|
||||
NORMAL: Regular repo where all actions are possible
|
||||
READ_ONLY: Only read actions, such as pull, are allowed regardless of specific user permissions
|
||||
MIRROR: Equivalent to READ_ONLY except that mirror robot has write permission
|
||||
MARKED_FOR_DELETION: Indicates the repository has been marked for deletion and should be hidden
|
||||
and un-usable.
|
||||
"""
|
||||
Possible states of a repository.
|
||||
|
||||
NORMAL: Regular repo where all actions are possible
|
||||
READ_ONLY: Only read actions, such as pull, are allowed regardless of specific user permissions
|
||||
MIRROR: Equivalent to READ_ONLY except that mirror robot has write permission
|
||||
MARKED_FOR_DELETION: Indicates the repository has been marked for deletion and should be hidden
|
||||
and un-usable.
|
||||
"""
|
||||
|
||||
NORMAL = 0
|
||||
READ_ONLY = 1
|
||||
@ -1038,9 +1063,9 @@ class Image(BaseModel):
|
||||
)
|
||||
|
||||
def ancestor_id_list(self):
|
||||
""" Returns an integer list of ancestor ids, ordered chronologically from
|
||||
root to direct parent.
|
||||
"""
|
||||
"""
|
||||
Returns an integer list of ancestor ids, ordered chronologically from root to direct parent.
|
||||
"""
|
||||
return map(int, self.ancestors.split("/")[1:-1])
|
||||
|
||||
|
||||
@ -1078,7 +1103,9 @@ class RepositoryTag(BaseModel):
|
||||
|
||||
|
||||
class BUILD_PHASE(object):
|
||||
""" Build phases enum """
|
||||
"""
|
||||
Build phases enum.
|
||||
"""
|
||||
|
||||
ERROR = "error"
|
||||
INTERNAL_ERROR = "internalerror"
|
||||
@ -1102,7 +1129,9 @@ class BUILD_PHASE(object):
|
||||
|
||||
|
||||
class TRIGGER_DISABLE_REASON(object):
|
||||
""" Build trigger disable reason enum """
|
||||
"""
|
||||
Build trigger disable reason enum.
|
||||
"""
|
||||
|
||||
BUILD_FALURES = "successive_build_failures"
|
||||
INTERNAL_ERRORS = "successive_build_internal_errors"
|
||||
@ -1195,7 +1224,11 @@ class LogEntry(BaseModel):
|
||||
|
||||
|
||||
class LogEntry2(BaseModel):
|
||||
""" TEMP FOR QUAY.IO ONLY. DO NOT RELEASE INTO QUAY ENTERPRISE. """
|
||||
"""
|
||||
TEMP FOR QUAY.IO ONLY.
|
||||
|
||||
DO NOT RELEASE INTO QUAY ENTERPRISE.
|
||||
"""
|
||||
|
||||
kind = ForeignKeyField(LogEntryKind)
|
||||
account = IntegerField(index=True, db_column="account_id")
|
||||
@ -1423,8 +1456,9 @@ class ServiceKey(BaseModel):
|
||||
|
||||
|
||||
class MediaType(BaseModel):
|
||||
""" MediaType is an enumeration of the possible formats of various objects in the data model.
|
||||
"""
|
||||
"""
|
||||
MediaType is an enumeration of the possible formats of various objects in the data model.
|
||||
"""
|
||||
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
@ -1437,17 +1471,19 @@ class Messages(BaseModel):
|
||||
|
||||
|
||||
class LabelSourceType(BaseModel):
|
||||
""" LabelSourceType is an enumeration of the possible sources for a label.
|
||||
"""
|
||||
"""
|
||||
LabelSourceType is an enumeration of the possible sources for a label.
|
||||
"""
|
||||
|
||||
name = CharField(index=True, unique=True)
|
||||
mutable = BooleanField(default=False)
|
||||
|
||||
|
||||
class Label(BaseModel):
|
||||
""" Label represents user-facing metadata associated with another entry in the database (e.g. a
|
||||
Manifest).
|
||||
"""
|
||||
"""
|
||||
Label represents user-facing metadata associated with another entry in the database (e.g. a
|
||||
Manifest).
|
||||
"""
|
||||
|
||||
uuid = CharField(default=uuid_generator, index=True, unique=True)
|
||||
key = CharField(index=True)
|
||||
@ -1457,8 +1493,9 @@ class Label(BaseModel):
|
||||
|
||||
|
||||
class ApprBlob(BaseModel):
|
||||
""" ApprBlob represents a content-addressable object stored outside of the database.
|
||||
"""
|
||||
"""
|
||||
ApprBlob represents a content-addressable object stored outside of the database.
|
||||
"""
|
||||
|
||||
digest = CharField(index=True, unique=True)
|
||||
media_type = EnumField(MediaType)
|
||||
@ -1467,15 +1504,17 @@ class ApprBlob(BaseModel):
|
||||
|
||||
|
||||
class ApprBlobPlacementLocation(BaseModel):
|
||||
""" ApprBlobPlacementLocation is an enumeration of the possible storage locations for ApprBlobs.
|
||||
"""
|
||||
"""
|
||||
ApprBlobPlacementLocation is an enumeration of the possible storage locations for ApprBlobs.
|
||||
"""
|
||||
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class ApprBlobPlacement(BaseModel):
|
||||
""" ApprBlobPlacement represents the location of a Blob.
|
||||
"""
|
||||
"""
|
||||
ApprBlobPlacement represents the location of a Blob.
|
||||
"""
|
||||
|
||||
blob = ForeignKeyField(ApprBlob)
|
||||
location = EnumField(ApprBlobPlacementLocation)
|
||||
@ -1487,8 +1526,9 @@ class ApprBlobPlacement(BaseModel):
|
||||
|
||||
|
||||
class ApprManifest(BaseModel):
|
||||
""" ApprManifest represents the metadata and collection of blobs that comprise an Appr image.
|
||||
"""
|
||||
"""
|
||||
ApprManifest represents the metadata and collection of blobs that comprise an Appr image.
|
||||
"""
|
||||
|
||||
digest = CharField(index=True, unique=True)
|
||||
media_type = EnumField(MediaType)
|
||||
@ -1496,8 +1536,9 @@ class ApprManifest(BaseModel):
|
||||
|
||||
|
||||
class ApprManifestBlob(BaseModel):
|
||||
""" ApprManifestBlob is a many-to-many relation table linking ApprManifests and ApprBlobs.
|
||||
"""
|
||||
"""
|
||||
ApprManifestBlob is a many-to-many relation table linking ApprManifests and ApprBlobs.
|
||||
"""
|
||||
|
||||
manifest = ForeignKeyField(ApprManifest, index=True)
|
||||
blob = ForeignKeyField(ApprBlob, index=True)
|
||||
@ -1509,8 +1550,9 @@ class ApprManifestBlob(BaseModel):
|
||||
|
||||
|
||||
class ApprManifestList(BaseModel):
|
||||
""" ApprManifestList represents all of the various Appr manifests that compose an ApprTag.
|
||||
"""
|
||||
"""
|
||||
ApprManifestList represents all of the various Appr manifests that compose an ApprTag.
|
||||
"""
|
||||
|
||||
digest = CharField(index=True, unique=True)
|
||||
manifest_list_json = JSONField()
|
||||
@ -1519,15 +1561,17 @@ class ApprManifestList(BaseModel):
|
||||
|
||||
|
||||
class ApprTagKind(BaseModel):
|
||||
""" ApprTagKind is a enumtable to reference tag kinds.
|
||||
"""
|
||||
"""
|
||||
ApprTagKind is a enumtable to reference tag kinds.
|
||||
"""
|
||||
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class ApprTag(BaseModel):
|
||||
""" ApprTag represents a user-facing alias for referencing an ApprManifestList.
|
||||
"""
|
||||
"""
|
||||
ApprTag represents a user-facing alias for referencing an ApprManifestList.
|
||||
"""
|
||||
|
||||
name = CharField()
|
||||
repository = ForeignKeyField(Repository)
|
||||
@ -1555,9 +1599,10 @@ ApprChannel = ApprTag.alias()
|
||||
|
||||
|
||||
class ApprManifestListManifest(BaseModel):
|
||||
""" ApprManifestListManifest is a many-to-many relation table linking ApprManifestLists and
|
||||
ApprManifests.
|
||||
"""
|
||||
"""
|
||||
ApprManifestListManifest is a many-to-many relation table linking ApprManifestLists and
|
||||
ApprManifests.
|
||||
"""
|
||||
|
||||
manifest_list = ForeignKeyField(ApprManifestList, index=True)
|
||||
manifest = ForeignKeyField(ApprManifest, index=True)
|
||||
@ -1573,9 +1618,10 @@ class ApprManifestListManifest(BaseModel):
|
||||
|
||||
|
||||
class AppSpecificAuthToken(BaseModel):
|
||||
""" AppSpecificAuthToken represents a token generated by a user for use with an external
|
||||
application where putting the user's credentials, even encrypted, is deemed too risky.
|
||||
"""
|
||||
"""
|
||||
AppSpecificAuthToken represents a token generated by a user for use with an external application
|
||||
where putting the user's credentials, even encrypted, is deemed too risky.
|
||||
"""
|
||||
|
||||
user = QuayUserField()
|
||||
uuid = CharField(default=uuid_generator, max_length=36, index=True)
|
||||
@ -1594,9 +1640,11 @@ class AppSpecificAuthToken(BaseModel):
|
||||
|
||||
|
||||
class Manifest(BaseModel):
|
||||
""" Manifest represents a single manifest under a repository. Within a repository,
|
||||
there can only be one manifest with the same digest.
|
||||
"""
|
||||
"""
|
||||
Manifest represents a single manifest under a repository.
|
||||
|
||||
Within a repository, there can only be one manifest with the same digest.
|
||||
"""
|
||||
|
||||
repository = ForeignKeyField(Repository)
|
||||
digest = CharField(index=True)
|
||||
@ -1613,15 +1661,17 @@ class Manifest(BaseModel):
|
||||
|
||||
|
||||
class TagKind(BaseModel):
|
||||
""" TagKind describes the various kinds of tags that can be found in the registry.
|
||||
"""
|
||||
"""
|
||||
TagKind describes the various kinds of tags that can be found in the registry.
|
||||
"""
|
||||
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class Tag(BaseModel):
|
||||
""" Tag represents a user-facing alias for referencing a Manifest or as an alias to another tag.
|
||||
"""
|
||||
"""
|
||||
Tag represents a user-facing alias for referencing a Manifest or as an alias to another tag.
|
||||
"""
|
||||
|
||||
name = CharField()
|
||||
repository = ForeignKeyField(Repository)
|
||||
@ -1648,10 +1698,12 @@ class Tag(BaseModel):
|
||||
|
||||
|
||||
class ManifestChild(BaseModel):
|
||||
""" ManifestChild represents a relationship between a manifest and its child manifest(s).
|
||||
Multiple manifests can share the same children. Note that since Manifests are stored
|
||||
per-repository, the repository here is a bit redundant, but we do so to make cleanup easier.
|
||||
"""
|
||||
"""
|
||||
ManifestChild represents a relationship between a manifest and its child manifest(s).
|
||||
|
||||
Multiple manifests can share the same children. Note that since Manifests are stored per-
|
||||
repository, the repository here is a bit redundant, but we do so to make cleanup easier.
|
||||
"""
|
||||
|
||||
repository = ForeignKeyField(Repository)
|
||||
manifest = ForeignKeyField(Manifest)
|
||||
@ -1669,10 +1721,12 @@ class ManifestChild(BaseModel):
|
||||
|
||||
|
||||
class ManifestLabel(BaseModel):
|
||||
""" ManifestLabel represents a label applied to a Manifest, within a repository.
|
||||
Note that since Manifests are stored per-repository, the repository here is
|
||||
a bit redundant, but we do so to make cleanup easier.
|
||||
"""
|
||||
"""
|
||||
ManifestLabel represents a label applied to a Manifest, within a repository.
|
||||
|
||||
Note that since Manifests are stored per-repository, the repository here is a bit redundant, but
|
||||
we do so to make cleanup easier.
|
||||
"""
|
||||
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
manifest = ForeignKeyField(Manifest)
|
||||
@ -1685,7 +1739,9 @@ class ManifestLabel(BaseModel):
|
||||
|
||||
|
||||
class ManifestBlob(BaseModel):
|
||||
""" ManifestBlob represents a blob that is used by a manifest. """
|
||||
"""
|
||||
ManifestBlob represents a blob that is used by a manifest.
|
||||
"""
|
||||
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
manifest = ForeignKeyField(Manifest)
|
||||
@ -1698,9 +1754,10 @@ class ManifestBlob(BaseModel):
|
||||
|
||||
|
||||
class ManifestLegacyImage(BaseModel):
|
||||
""" For V1-compatible manifests only, this table maps from the manifest to its associated
|
||||
Docker image.
|
||||
"""
|
||||
"""
|
||||
For V1-compatible manifests only, this table maps from the manifest to its associated Docker
|
||||
image.
|
||||
"""
|
||||
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
manifest = ForeignKeyField(Manifest, unique=True)
|
||||
@ -1708,7 +1765,9 @@ class ManifestLegacyImage(BaseModel):
|
||||
|
||||
|
||||
class TagManifest(BaseModel):
|
||||
""" TO BE DEPRECATED: The manifest for a tag. """
|
||||
"""
|
||||
TO BE DEPRECATED: The manifest for a tag.
|
||||
"""
|
||||
|
||||
tag = ForeignKeyField(RepositoryTag, unique=True)
|
||||
digest = CharField(index=True)
|
||||
@ -1724,8 +1783,9 @@ class TagManifestToManifest(BaseModel):
|
||||
|
||||
|
||||
class TagManifestLabel(BaseModel):
|
||||
""" TO BE DEPRECATED: Mapping from a tag manifest to a label.
|
||||
"""
|
||||
"""
|
||||
TO BE DEPRECATED: Mapping from a tag manifest to a label.
|
||||
"""
|
||||
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
annotated = ForeignKeyField(TagManifest, index=True)
|
||||
@ -1762,17 +1822,18 @@ class TagToRepositoryTag(BaseModel):
|
||||
@unique
|
||||
class RepoMirrorRuleType(IntEnum):
|
||||
"""
|
||||
Types of mirroring rules.
|
||||
TAG_GLOB_CSV: Comma separated glob values (eg. "7.6,7.6-1.*")
|
||||
"""
|
||||
Types of mirroring rules.
|
||||
|
||||
TAG_GLOB_CSV: Comma separated glob values (eg. "7.6,7.6-1.*")
|
||||
"""
|
||||
|
||||
TAG_GLOB_CSV = 1
|
||||
|
||||
|
||||
class RepoMirrorRule(BaseModel):
|
||||
"""
|
||||
Determines how a given Repository should be mirrored.
|
||||
"""
|
||||
Determines how a given Repository should be mirrored.
|
||||
"""
|
||||
|
||||
uuid = CharField(default=uuid_generator, max_length=36, index=True)
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
@ -1789,8 +1850,8 @@ class RepoMirrorRule(BaseModel):
|
||||
@unique
|
||||
class RepoMirrorType(IntEnum):
|
||||
"""
|
||||
Types of repository mirrors.
|
||||
"""
|
||||
Types of repository mirrors.
|
||||
"""
|
||||
|
||||
PULL = 1 # Pull images from the external repo
|
||||
|
||||
@ -1798,8 +1859,8 @@ class RepoMirrorType(IntEnum):
|
||||
@unique
|
||||
class RepoMirrorStatus(IntEnum):
|
||||
"""
|
||||
Possible statuses of repository mirroring.
|
||||
"""
|
||||
Possible statuses of repository mirroring.
|
||||
"""
|
||||
|
||||
FAIL = -1
|
||||
NEVER_RUN = 0
|
||||
@ -1810,9 +1871,9 @@ class RepoMirrorStatus(IntEnum):
|
||||
|
||||
class RepoMirrorConfig(BaseModel):
|
||||
"""
|
||||
Represents a repository to be mirrored and any additional configuration
|
||||
required to perform the mirroring.
|
||||
"""
|
||||
Represents a repository to be mirrored and any additional configuration required to perform the
|
||||
mirroring.
|
||||
"""
|
||||
|
||||
repository = ForeignKeyField(Repository, index=True, unique=True, backref="mirror")
|
||||
creation_date = DateTimeField(default=datetime.utcnow)
|
||||
|
@ -9,7 +9,9 @@ from util.security.secret import convert_secret_key
|
||||
|
||||
|
||||
class DecryptionFailureException(Exception):
|
||||
""" Exception raised if a field could not be decrypted. """
|
||||
"""
|
||||
Exception raised if a field could not be decrypted.
|
||||
"""
|
||||
|
||||
|
||||
EncryptionVersion = namedtuple("EncryptionVersion", ["prefix", "encrypt", "decrypt"])
|
||||
@ -56,9 +58,10 @@ _RESERVED_FIELD_SPACE = len(_SEPARATOR) + max([len(k) for k in _VERSIONS.keys()]
|
||||
|
||||
|
||||
class FieldEncrypter(object):
|
||||
""" Helper object for defining how fields are encrypted and decrypted between the database
|
||||
and the application.
|
||||
"""
|
||||
"""
|
||||
Helper object for defining how fields are encrypted and decrypted between the database and the
|
||||
application.
|
||||
"""
|
||||
|
||||
def __init__(self, secret_key, version="v0"):
|
||||
# NOTE: secret_key will be None when the system is being first initialized, so we allow that
|
||||
@ -68,7 +71,9 @@ class FieldEncrypter(object):
|
||||
self._encryption_version = _VERSIONS[version]
|
||||
|
||||
def encrypt_value(self, value, field_max_length=None):
|
||||
""" Encrypts the value using the current version of encryption. """
|
||||
"""
|
||||
Encrypts the value using the current version of encryption.
|
||||
"""
|
||||
assert self._secret_key is not None
|
||||
encrypted_value = self._encryption_version.encrypt(
|
||||
self._secret_key, value, field_max_length
|
||||
@ -76,9 +81,11 @@ class FieldEncrypter(object):
|
||||
return "%s%s%s" % (self._encryption_version.prefix, _SEPARATOR, encrypted_value)
|
||||
|
||||
def decrypt_value(self, value):
|
||||
""" Decrypts the value, returning it. If the value cannot be decrypted
|
||||
raises a DecryptionFailureException.
|
||||
"""
|
||||
"""
|
||||
Decrypts the value, returning it.
|
||||
|
||||
If the value cannot be decrypted raises a DecryptionFailureException.
|
||||
"""
|
||||
assert self._secret_key is not None
|
||||
if _SEPARATOR not in value:
|
||||
raise DecryptionFailureException("Invalid encrypted value")
|
||||
|
@ -78,7 +78,9 @@ class Base64BinaryField(TextField):
|
||||
|
||||
|
||||
class DecryptedValue(object):
|
||||
""" Wrapper around an already decrypted value to be placed into an encrypted field. """
|
||||
"""
|
||||
Wrapper around an already decrypted value to be placed into an encrypted field.
|
||||
"""
|
||||
|
||||
def __init__(self, decrypted_value):
|
||||
assert decrypted_value is not None
|
||||
@ -89,24 +91,34 @@ class DecryptedValue(object):
|
||||
return self.value
|
||||
|
||||
def matches(self, unencrypted_value):
|
||||
""" Returns whether the value of this field matches the unencrypted_value. """
|
||||
"""
|
||||
Returns whether the value of this field matches the unencrypted_value.
|
||||
"""
|
||||
return self.decrypt() == unencrypted_value
|
||||
|
||||
|
||||
class LazyEncryptedValue(object):
|
||||
""" Wrapper around an encrypted value in an encrypted field. Will decrypt lazily. """
|
||||
"""
|
||||
Wrapper around an encrypted value in an encrypted field.
|
||||
|
||||
Will decrypt lazily.
|
||||
"""
|
||||
|
||||
def __init__(self, encrypted_value, field):
|
||||
self.encrypted_value = encrypted_value
|
||||
self._field = field
|
||||
|
||||
def decrypt(self, encrypter=None):
|
||||
""" Decrypts the value. """
|
||||
"""
|
||||
Decrypts the value.
|
||||
"""
|
||||
encrypter = encrypter or self._field.model._meta.encrypter
|
||||
return encrypter.decrypt_value(self.encrypted_value)
|
||||
|
||||
def matches(self, unencrypted_value):
|
||||
""" Returns whether the value of this field matches the unencrypted_value. """
|
||||
"""
|
||||
Returns whether the value of this field matches the unencrypted_value.
|
||||
"""
|
||||
return self.decrypt() == unencrypted_value
|
||||
|
||||
def __eq__(self, _):
|
||||
@ -132,7 +144,9 @@ class LazyEncryptedValue(object):
|
||||
|
||||
|
||||
def _add_encryption(field_class, requires_length_check=True):
|
||||
""" Adds support for encryption and decryption to the given field class. """
|
||||
"""
|
||||
Adds support for encryption and decryption to the given field class.
|
||||
"""
|
||||
|
||||
class indexed_class(field_class):
|
||||
def __init__(self, default_token_length=None, *args, **kwargs):
|
||||
@ -202,11 +216,15 @@ class EnumField(SmallIntegerField):
|
||||
self.enum_type = enum_type
|
||||
|
||||
def db_value(self, value):
|
||||
"""Convert the python value for storage in the database."""
|
||||
"""
|
||||
Convert the python value for storage in the database.
|
||||
"""
|
||||
return int(value.value)
|
||||
|
||||
def python_value(self, value):
|
||||
"""Convert the database value to a pythonic value."""
|
||||
"""
|
||||
Convert the database value to a pythonic value.
|
||||
"""
|
||||
return self.enum_type(value) if value is not None else None
|
||||
|
||||
def clone_base(self, **kwargs):
|
||||
@ -214,7 +232,9 @@ class EnumField(SmallIntegerField):
|
||||
|
||||
|
||||
def _add_fulltext(field_class):
|
||||
""" Adds support for full text indexing and lookup to the given field class. """
|
||||
"""
|
||||
Adds support for full text indexing and lookup to the given field class.
|
||||
"""
|
||||
|
||||
class indexed_class(field_class):
|
||||
# Marker used by SQLAlchemy translation layer to add the proper index for full text searching.
|
||||
@ -256,32 +276,42 @@ FullIndexedTextField = _add_fulltext(TextField)
|
||||
|
||||
|
||||
class Credential(object):
|
||||
""" Credential represents a hashed credential. """
|
||||
"""
|
||||
Credential represents a hashed credential.
|
||||
"""
|
||||
|
||||
def __init__(self, hashed):
|
||||
self.hashed = hashed
|
||||
|
||||
def matches(self, value):
|
||||
""" Returns true if this credential matches the unhashed value given. """
|
||||
"""
|
||||
Returns true if this credential matches the unhashed value given.
|
||||
"""
|
||||
return bcrypt.hashpw(value.encode("utf-8"), self.hashed) == self.hashed
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, string_value):
|
||||
""" Returns a Credential object from an unhashed string value. """
|
||||
"""
|
||||
Returns a Credential object from an unhashed string value.
|
||||
"""
|
||||
return Credential(bcrypt.hashpw(string_value.encode("utf-8"), bcrypt.gensalt()))
|
||||
|
||||
@classmethod
|
||||
def generate(cls, length=20):
|
||||
""" Generates a new credential and returns it, along with its unhashed form. """
|
||||
"""
|
||||
Generates a new credential and returns it, along with its unhashed form.
|
||||
"""
|
||||
token = random_string(length)
|
||||
return Credential.from_string(token), token
|
||||
|
||||
|
||||
class CredentialField(CharField):
|
||||
""" A character field that stores crytographically hashed credentials that should never be
|
||||
available to the user in plaintext after initial creation. This field automatically
|
||||
provides verification.
|
||||
"""
|
||||
"""
|
||||
A character field that stores crytographically hashed credentials that should never be available
|
||||
to the user in plaintext after initial creation.
|
||||
|
||||
This field automatically provides verification.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
CharField.__init__(self, *args, **kwargs)
|
||||
|
@ -9,13 +9,16 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _merge_aggregated_log_counts(*args):
|
||||
""" Merge two lists of AggregatedLogCount based on the value of their kind_id and datetime.
|
||||
"""
|
||||
"""
|
||||
Merge two lists of AggregatedLogCount based on the value of their kind_id and datetime.
|
||||
"""
|
||||
matching_keys = {}
|
||||
aggregated_log_counts_list = itertools.chain.from_iterable(args)
|
||||
|
||||
def canonical_key_from_kind_date_tuple(kind_id, dt):
|
||||
""" Return a comma separated key from an AggregatedLogCount's kind_id and datetime. """
|
||||
"""
|
||||
Return a comma separated key from an AggregatedLogCount's kind_id and datetime.
|
||||
"""
|
||||
return str(kind_id) + "," + str(dt)
|
||||
|
||||
for kind_id, count, dt in aggregated_log_counts_list:
|
||||
@ -33,9 +36,9 @@ def _merge_aggregated_log_counts(*args):
|
||||
|
||||
class CombinedLogsModel(SharedModel, ActionLogsDataInterface):
|
||||
"""
|
||||
CombinedLogsModel implements the data model that logs to the first logs model and reads from
|
||||
both.
|
||||
"""
|
||||
CombinedLogsModel implements the data model that logs to the first logs model and reads from
|
||||
both.
|
||||
"""
|
||||
|
||||
def __init__(self, read_write_logs_model, read_only_logs_model):
|
||||
self.read_write_logs_model = read_write_logs_model
|
||||
|
@ -11,7 +11,9 @@ from util.morecollections import AttrDict
|
||||
|
||||
|
||||
def _format_date(date):
|
||||
""" Output an RFC822 date format. """
|
||||
"""
|
||||
Output an RFC822 date format.
|
||||
"""
|
||||
if date is None:
|
||||
return None
|
||||
|
||||
@ -24,10 +26,12 @@ def _kinds():
|
||||
|
||||
|
||||
class LogEntriesPage(namedtuple("LogEntriesPage", ["logs", "next_page_token"])):
|
||||
""" Represents a page returned by the lookup_logs call. The `logs` contains the logs
|
||||
found for the page and `next_page_token`, if not None, contains the token to be
|
||||
encoded and returned for the followup call.
|
||||
"""
|
||||
"""
|
||||
Represents a page returned by the lookup_logs call.
|
||||
|
||||
The `logs` contains the logs found for the page and `next_page_token`, if not None, contains the
|
||||
token to be encoded and returned for the followup call.
|
||||
"""
|
||||
|
||||
|
||||
class Log(
|
||||
@ -48,7 +52,9 @@ class Log(
|
||||
],
|
||||
)
|
||||
):
|
||||
""" Represents a single log entry returned by the logs model. """
|
||||
"""
|
||||
Represents a single log entry returned by the logs model.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def for_logentry(cls, log):
|
||||
@ -181,7 +187,9 @@ class Log(
|
||||
|
||||
|
||||
class AggregatedLogCount(namedtuple("AggregatedLogCount", ["kind_id", "count", "datetime"])):
|
||||
""" Represents the aggregated count of the number of logs, of a particular kind, on a day. """
|
||||
"""
|
||||
Represents the aggregated count of the number of logs, of a particular kind, on a day.
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
view = {
|
||||
|
@ -50,11 +50,12 @@ COUNT_REPOSITORY_ACTION_TIMEOUT = 30
|
||||
|
||||
|
||||
def _date_range_descending(start_datetime, end_datetime, includes_end_datetime=False):
|
||||
""" Generate the dates between `end_datetime` and `start_datetime`.
|
||||
"""
|
||||
Generate the dates between `end_datetime` and `start_datetime`.
|
||||
|
||||
If `includes_end_datetime` is set, the generator starts at `end_datetime`,
|
||||
otherwise, starts the generator at `end_datetime` minus 1 second.
|
||||
"""
|
||||
If `includes_end_datetime` is set, the generator starts at `end_datetime`, otherwise, starts the
|
||||
generator at `end_datetime` minus 1 second.
|
||||
"""
|
||||
assert end_datetime >= start_datetime
|
||||
start_date = start_datetime.date()
|
||||
|
||||
@ -69,12 +70,13 @@ def _date_range_descending(start_datetime, end_datetime, includes_end_datetime=F
|
||||
|
||||
|
||||
def _date_range_in_single_index(dt1, dt2):
|
||||
""" Determine whether a single index can be searched given a range
|
||||
of dates or datetimes. If date instances are given, difference should be 1 day.
|
||||
"""
|
||||
Determine whether a single index can be searched given a range of dates or datetimes. If date
|
||||
instances are given, difference should be 1 day.
|
||||
|
||||
NOTE: dt2 is exclusive to the search result set.
|
||||
i.e. The date range is larger or equal to dt1 and strictly smaller than dt2
|
||||
"""
|
||||
NOTE: dt2 is exclusive to the search result set.
|
||||
i.e. The date range is larger or equal to dt1 and strictly smaller than dt2
|
||||
"""
|
||||
assert isinstance(dt1, date) and isinstance(dt2, date)
|
||||
|
||||
dt = dt2 - dt1
|
||||
@ -106,34 +108,40 @@ def _for_elasticsearch_logs(logs, repository_id=None, namespace_id=None):
|
||||
|
||||
|
||||
def _random_id():
|
||||
""" Generates a unique uuid4 string for the random_id field in LogEntry.
|
||||
It is used as tie-breaker for sorting logs based on datetime:
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-search-after.html
|
||||
"""
|
||||
"""
|
||||
Generates a unique uuid4 string for the random_id field in LogEntry.
|
||||
|
||||
It is used as tie-breaker for sorting logs based on datetime:
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-search-after.html
|
||||
"""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class ElasticsearchLogsModelInterface(object):
|
||||
"""
|
||||
Interface for Elasticsearch specific operations with the logs model.
|
||||
These operations are usually index based.
|
||||
"""
|
||||
Interface for Elasticsearch specific operations with the logs model.
|
||||
|
||||
These operations are usually index based.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def can_delete_index(self, index, cutoff_date):
|
||||
""" Return whether the given index is older than the given cutoff date. """
|
||||
"""
|
||||
Return whether the given index is older than the given cutoff date.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def list_indices(self):
|
||||
""" List the logs model's indices. """
|
||||
"""
|
||||
List the logs model's indices.
|
||||
"""
|
||||
|
||||
|
||||
class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsModelInterface):
|
||||
"""
|
||||
DocumentLogsModel implements the data model for the logs API backed by an
|
||||
elasticsearch service.
|
||||
"""
|
||||
DocumentLogsModel implements the data model for the logs API backed by an elasticsearch service.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, should_skip_logging=None, elasticsearch_config=None, producer=None, **kwargs
|
||||
@ -155,11 +163,12 @@ class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsM
|
||||
|
||||
@staticmethod
|
||||
def _get_ids_by_names(repository_name, namespace_name, performer_name):
|
||||
""" Retrieve repository/namespace/performer ids based on their names.
|
||||
throws DataModelException when the namespace_name does not match any
|
||||
user in the database.
|
||||
"""
|
||||
Retrieve repository/namespace/performer ids based on their names.
|
||||
|
||||
throws DataModelException when the namespace_name does not match any user in the database.
|
||||
returns database ID or None if not exists.
|
||||
"""
|
||||
"""
|
||||
repository_id = None
|
||||
account_id = None
|
||||
performer_id = None
|
||||
@ -269,10 +278,11 @@ class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsM
|
||||
return search.execute()
|
||||
|
||||
def _load_latest_logs(self, performer_id, repository_id, account_id, filter_kinds, size):
|
||||
""" Return the latest logs from Elasticsearch.
|
||||
"""
|
||||
Return the latest logs from Elasticsearch.
|
||||
|
||||
Look at indices up to theset logrotateworker threshold, or up to 30 days if not defined.
|
||||
"""
|
||||
Look at indices up to theset logrotateworker threshold, or up to 30 days if not defined.
|
||||
"""
|
||||
# Set the last index to check to be the logrotateworker threshold, or 30 days
|
||||
end_datetime = datetime.now()
|
||||
start_datetime = end_datetime - timedelta(days=DATE_RANGE_LIMIT)
|
||||
@ -563,7 +573,9 @@ class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsM
|
||||
return self._es_client.list_indices()
|
||||
|
||||
def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation):
|
||||
""" Yield a context manager for a group of outdated logs. """
|
||||
"""
|
||||
Yield a context manager for a group of outdated logs.
|
||||
"""
|
||||
all_indices = self.list_indices()
|
||||
for index in all_indices:
|
||||
if not self.can_delete_index(index, cutoff_date):
|
||||
@ -575,11 +587,10 @@ class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsM
|
||||
|
||||
class ElasticsearchLogRotationContext(LogRotationContextInterface):
|
||||
"""
|
||||
ElasticsearchLogRotationContext yield batch of logs from an index.
|
||||
ElasticsearchLogRotationContext yield batch of logs from an index.
|
||||
|
||||
When completed without exceptions, this context will delete its associated
|
||||
Elasticsearch index.
|
||||
"""
|
||||
When completed without exceptions, this context will delete its associated Elasticsearch index.
|
||||
"""
|
||||
|
||||
def __init__(self, index, min_logs_per_rotation, es_client):
|
||||
self._es_client = es_client
|
||||
@ -623,7 +634,9 @@ class ElasticsearchLogRotationContext(LogRotationContextInterface):
|
||||
return search
|
||||
|
||||
def _generate_filename(self):
|
||||
""" Generate the filenames used to archive the action logs. """
|
||||
"""
|
||||
Generate the filenames used to archive the action logs.
|
||||
"""
|
||||
filename = "%s_%d-%d" % (self.index, self.start_pos, self.end_pos)
|
||||
filename = ".".join((filename, "txt.gz"))
|
||||
return filename
|
||||
|
@ -58,8 +58,8 @@ class LogEntry(Document):
|
||||
@classmethod
|
||||
def init(cls, index_prefix, index_settings=None, skip_template_init=False):
|
||||
"""
|
||||
Create the index template, and populate LogEntry's mapping and index settings.
|
||||
"""
|
||||
Create the index template, and populate LogEntry's mapping and index settings.
|
||||
"""
|
||||
wildcard_index = Index(name=index_prefix + "*")
|
||||
wildcard_index.settings(**(index_settings or {}))
|
||||
wildcard_index.document(cls)
|
||||
@ -93,8 +93,8 @@ class LogEntry(Document):
|
||||
|
||||
class ElasticsearchLogs(object):
|
||||
"""
|
||||
Model for logs operations stored in an Elasticsearch cluster.
|
||||
"""
|
||||
Model for logs operations stored in an Elasticsearch cluster.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -125,9 +125,8 @@ class ElasticsearchLogs(object):
|
||||
|
||||
def _initialize(self):
|
||||
"""
|
||||
Initialize a connection to an ES cluster and
|
||||
creates an index template if it does not exist.
|
||||
"""
|
||||
Initialize a connection to an ES cluster and creates an index template if it does not exist.
|
||||
"""
|
||||
if not self._initialized:
|
||||
http_auth = None
|
||||
if self._access_key and self._secret_key and self._aws_region:
|
||||
@ -183,7 +182,9 @@ class ElasticsearchLogs(object):
|
||||
self._initialized = True
|
||||
|
||||
def index_name(self, day):
|
||||
""" Return an index name for the given day. """
|
||||
"""
|
||||
Return an index name for the given day.
|
||||
"""
|
||||
return self._index_prefix + day.strftime(INDEX_DATE_FORMAT)
|
||||
|
||||
def index_exists(self, index):
|
||||
@ -194,15 +195,17 @@ class ElasticsearchLogs(object):
|
||||
|
||||
@staticmethod
|
||||
def _valid_index_prefix(prefix):
|
||||
""" Check that the given index prefix is valid with the set of
|
||||
indices used by this class.
|
||||
"""
|
||||
"""
|
||||
Check that the given index prefix is valid with the set of indices used by this class.
|
||||
"""
|
||||
return re.match(VALID_INDEX_PATTERN, prefix) is not None
|
||||
|
||||
def _valid_index_name(self, index):
|
||||
""" Check that the given index name is valid and follows the format:
|
||||
<index_prefix>YYYY-MM-DD
|
||||
"""
|
||||
"""
|
||||
Check that the given index name is valid and follows the format:
|
||||
|
||||
<index_prefix>YYYY-MM-DD
|
||||
"""
|
||||
if not ElasticsearchLogs._valid_index_prefix(index):
|
||||
return False
|
||||
|
||||
@ -218,7 +221,9 @@ class ElasticsearchLogs(object):
|
||||
return False
|
||||
|
||||
def can_delete_index(self, index, cutoff_date):
|
||||
""" Check if the given index can be deleted based on the given index's date and cutoff date. """
|
||||
"""
|
||||
Check if the given index can be deleted based on the given index's date and cutoff date.
|
||||
"""
|
||||
assert self._valid_index_name(index)
|
||||
index_dt = datetime.strptime(index[len(self._index_prefix) :], INDEX_DATE_FORMAT)
|
||||
return index_dt < cutoff_date and cutoff_date - index_dt >= timedelta(days=1)
|
||||
@ -260,11 +265,12 @@ def configure_es(
|
||||
index_settings=None,
|
||||
):
|
||||
"""
|
||||
For options in index_settings, refer to:
|
||||
https://www.elastic.co/guide/en/elasticsearch/guide/master/_index_settings.html
|
||||
some index settings are set at index creation time, and therefore, you should NOT
|
||||
change those settings once the index is set.
|
||||
"""
|
||||
For options in index_settings, refer to:
|
||||
|
||||
https://www.elastic.co/guide/en/elasticsearch/guide/master/_index_settings.html
|
||||
some index settings are set at index creation time, and therefore, you should NOT
|
||||
change those settings once the index is set.
|
||||
"""
|
||||
es_client = ElasticsearchLogs(
|
||||
host=host,
|
||||
port=port,
|
||||
|
@ -26,8 +26,10 @@ StoredLog = namedtuple(
|
||||
|
||||
class InMemoryModel(ActionLogsDataInterface):
|
||||
"""
|
||||
InMemoryModel implements the data model for logs in-memory. FOR TESTING ONLY.
|
||||
"""
|
||||
InMemoryModel implements the data model for logs in-memory.
|
||||
|
||||
FOR TESTING ONLY.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.logs = []
|
||||
@ -315,7 +317,9 @@ class InMemoryLogRotationContext(LogRotationContextInterface):
|
||||
self.all_logs.remove(log)
|
||||
|
||||
def yield_logs_batch(self):
|
||||
""" Yield a batch of logs and a filename for that batch. """
|
||||
"""
|
||||
Yield a batch of logs and a filename for that batch.
|
||||
"""
|
||||
filename = "inmemory_model_filename_placeholder"
|
||||
filename = ".".join((filename, "txt.gz"))
|
||||
yield [log_and_repo.stored_log for log_and_repo in self.expired_logs], filename
|
||||
|
@ -3,14 +3,18 @@ from six import add_metaclass
|
||||
|
||||
|
||||
class LogsIterationTimeout(Exception):
|
||||
""" Exception raised if logs iteration times out. """
|
||||
"""
|
||||
Exception raised if logs iteration times out.
|
||||
"""
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class ActionLogsDataInterface(object):
|
||||
""" Interface for code to work with the logs data model. The logs data model consists
|
||||
of all access for reading and writing action logs.
|
||||
"""
|
||||
"""
|
||||
Interface for code to work with the logs data model.
|
||||
|
||||
The logs data model consists of all access for reading and writing action logs.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def lookup_logs(
|
||||
@ -24,11 +28,13 @@ class ActionLogsDataInterface(object):
|
||||
page_token=None,
|
||||
max_page_count=None,
|
||||
):
|
||||
""" Looks up all logs between the start_datetime and end_datetime, filtered
|
||||
by performer (a user), repository or namespace. Note that one (and only one) of the three
|
||||
can be specified. Returns a LogEntriesPage. `filter_kinds`, if specified, is a set/list
|
||||
of the kinds of logs to filter out.
|
||||
"""
|
||||
"""
|
||||
Looks up all logs between the start_datetime and end_datetime, filtered by performer (a
|
||||
user), repository or namespace.
|
||||
|
||||
Note that one (and only one) of the three can be specified. Returns a LogEntriesPage.
|
||||
`filter_kinds`, if specified, is a set/list of the kinds of logs to filter out.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def lookup_latest_logs(
|
||||
@ -39,10 +45,12 @@ class ActionLogsDataInterface(object):
|
||||
filter_kinds=None,
|
||||
size=20,
|
||||
):
|
||||
""" Looks up latest logs of a specific kind, filtered by performer (a user),
|
||||
repository or namespace. Note that one (and only one) of the three can be specified.
|
||||
Returns a list of `Log`.
|
||||
"""
|
||||
"""
|
||||
Looks up latest logs of a specific kind, filtered by performer (a user), repository or
|
||||
namespace.
|
||||
|
||||
Note that one (and only one) of the three can be specified. Returns a list of `Log`.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_aggregated_log_counts(
|
||||
@ -54,16 +62,20 @@ class ActionLogsDataInterface(object):
|
||||
namespace_name=None,
|
||||
filter_kinds=None,
|
||||
):
|
||||
""" Returns the aggregated count of logs, by kind, between the start_datetime and end_datetime,
|
||||
filtered by performer (a user), repository or namespace. Note that one (and only one) of
|
||||
the three can be specified. Returns a list of AggregatedLogCount.
|
||||
"""
|
||||
"""
|
||||
Returns the aggregated count of logs, by kind, between the start_datetime and end_datetime,
|
||||
filtered by performer (a user), repository or namespace.
|
||||
|
||||
Note that one (and only one) of the three can be specified. Returns a list of
|
||||
AggregatedLogCount.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def count_repository_actions(self, repository, day):
|
||||
""" Returns the total number of repository actions over the given day, in the given repository
|
||||
"""
|
||||
Returns the total number of repository actions over the given day, in the given repository
|
||||
or None on error.
|
||||
"""
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def queue_logs_export(
|
||||
@ -77,10 +89,12 @@ class ActionLogsDataInterface(object):
|
||||
callback_email=None,
|
||||
filter_kinds=None,
|
||||
):
|
||||
""" Queues logs between the start_datetime and end_time, filtered by a repository or namespace,
|
||||
for export to the specified URL and/or email address. Returns the ID of the export job
|
||||
queued or None if error.
|
||||
"""
|
||||
"""
|
||||
Queues logs between the start_datetime and end_time, filtered by a repository or namespace,
|
||||
for export to the specified URL and/or email address.
|
||||
|
||||
Returns the ID of the export job queued or None if error.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def log_action(
|
||||
@ -95,7 +109,9 @@ class ActionLogsDataInterface(object):
|
||||
timestamp=None,
|
||||
is_free_namespace=False,
|
||||
):
|
||||
""" Logs a single action as having taken place. """
|
||||
"""
|
||||
Logs a single action as having taken place.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def yield_logs_for_export(
|
||||
@ -106,7 +122,8 @@ class ActionLogsDataInterface(object):
|
||||
namespace_id=None,
|
||||
max_query_time=None,
|
||||
):
|
||||
""" Returns an iterator that yields bundles of all logs found between the start_datetime and
|
||||
"""
|
||||
Returns an iterator that yields bundles of all logs found between the start_datetime and
|
||||
end_datetime, optionally filtered by the repository or namespace. This function should be
|
||||
used for any bulk lookup operations, and should be implemented by implementors to put
|
||||
minimal strain on the backing storage for large operations. If there was an error in setting
|
||||
@ -115,30 +132,33 @@ class ActionLogsDataInterface(object):
|
||||
If max_query_time is specified, each iteration that yields a log bundle will have its
|
||||
queries run with a maximum timeout of that specified, and, if any exceed that threshold,
|
||||
LogsIterationTimeout will be raised instead of returning the logs bundle.
|
||||
"""
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation):
|
||||
"""
|
||||
A generator that yields contexts implementing the LogRotationContextInterface.
|
||||
Each context represents a set of logs to be archived and deleted once
|
||||
the context completes without exceptions.
|
||||
A generator that yields contexts implementing the LogRotationContextInterface. Each context
|
||||
represents a set of logs to be archived and deleted once the context completes without
|
||||
exceptions.
|
||||
|
||||
For database logs, the LogRotationContext abstracts over a set of rows. When the context
|
||||
finishes, its associated rows get deleted.
|
||||
For database logs, the LogRotationContext abstracts over a set of rows. When the context
|
||||
finishes, its associated rows get deleted.
|
||||
|
||||
For Elasticsearch logs, the LogRotationContext abstracts over indices. When the context
|
||||
finishes, its associated index gets deleted.
|
||||
"""
|
||||
For Elasticsearch logs, the LogRotationContext abstracts over indices. When the context
|
||||
finishes, its associated index gets deleted.
|
||||
"""
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class LogRotationContextInterface(object):
|
||||
""" Interface for iterating over a set of logs to be archived. """
|
||||
"""
|
||||
Interface for iterating over a set of logs to be archived.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def yield_logs_batch(self):
|
||||
"""
|
||||
Generator yielding batch of logs and a filename for that batch.
|
||||
A batch is a subset of the logs part of the context.
|
||||
"""
|
||||
Generator yielding batch of logs and a filename for that batch.
|
||||
|
||||
A batch is a subset of the logs part of the context.
|
||||
"""
|
||||
|
@ -5,9 +5,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LogSendException(Exception):
|
||||
""" A generic error when sending the logs to its destination.
|
||||
e.g. Kinesis, Kafka, Elasticsearch, ...
|
||||
"""
|
||||
"""
|
||||
A generic error when sending the logs to its destination.
|
||||
|
||||
e.g. Kinesis, Kafka, Elasticsearch, ...
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
@ -10,10 +10,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ElasticsearchLogsProducer(LogProducerInterface):
|
||||
""" Log producer writing log entries to Elasticsearch.
|
||||
"""
|
||||
Log producer writing log entries to Elasticsearch.
|
||||
|
||||
This implementation writes directly to Elasticsearch without a streaming/queueing service.
|
||||
"""
|
||||
This implementation writes directly to Elasticsearch without a streaming/queueing service.
|
||||
"""
|
||||
|
||||
def send(self, logentry):
|
||||
try:
|
||||
|
@ -6,4 +6,6 @@ from six import add_metaclass
|
||||
class LogProducerInterface(object):
|
||||
@abstractmethod
|
||||
def send(self, logentry):
|
||||
""" Send a log entry to the configured log infrastructure. """
|
||||
"""
|
||||
Send a log entry to the configured log infrastructure.
|
||||
"""
|
||||
|
@ -15,7 +15,9 @@ DEFAULT_MAX_BLOCK_SECONDS = 5
|
||||
|
||||
|
||||
class KafkaLogsProducer(LogProducerInterface):
|
||||
""" Log producer writing log entries to a Kafka stream. """
|
||||
"""
|
||||
Log producer writing log entries to a Kafka stream.
|
||||
"""
|
||||
|
||||
def __init__(self, bootstrap_servers=None, topic=None, client_id=None, max_block_seconds=None):
|
||||
self.bootstrap_servers = bootstrap_servers
|
||||
|
@ -21,10 +21,12 @@ DEFAULT_MAX_POOL_CONNECTIONS = 10
|
||||
|
||||
|
||||
def _partition_key(number_of_shards=None):
|
||||
""" Generate a partition key for AWS Kinesis stream.
|
||||
If the number of shards is specified, generate keys where the size of the key space is
|
||||
the number of shards.
|
||||
"""
|
||||
"""
|
||||
Generate a partition key for AWS Kinesis stream.
|
||||
|
||||
If the number of shards is specified, generate keys where the size of the key space is the
|
||||
number of shards.
|
||||
"""
|
||||
key = None
|
||||
if number_of_shards is not None:
|
||||
shard_number = random.randrange(0, number_of_shards)
|
||||
@ -36,7 +38,9 @@ def _partition_key(number_of_shards=None):
|
||||
|
||||
|
||||
class KinesisStreamLogsProducer(LogProducerInterface):
|
||||
""" Log producer writing log entries to an Amazon Kinesis Data Stream. """
|
||||
"""
|
||||
Log producer writing log entries to an Amazon Kinesis Data Stream.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -3,7 +3,9 @@ from datetime import datetime
|
||||
|
||||
|
||||
class LogEntryJSONEncoder(json.JSONEncoder):
|
||||
""" JSON encoder to encode datetimes to ISO8601 format. """
|
||||
"""
|
||||
JSON encoder to encode datetimes to ISO8601 format.
|
||||
"""
|
||||
|
||||
def default(self, obj):
|
||||
if isinstance(obj, datetime):
|
||||
@ -13,7 +15,9 @@ class LogEntryJSONEncoder(json.JSONEncoder):
|
||||
|
||||
|
||||
def logs_json_serializer(logentry, sort_keys=False):
|
||||
""" Serializes a LogEntry to json bytes. """
|
||||
"""
|
||||
Serializes a LogEntry to json bytes.
|
||||
"""
|
||||
return json.dumps(
|
||||
logentry.to_dict(), cls=LogEntryJSONEncoder, ensure_ascii=True, sort_keys=sort_keys
|
||||
).encode("ascii")
|
||||
|
@ -18,10 +18,12 @@ class SharedModel:
|
||||
callback_email=None,
|
||||
filter_kinds=None,
|
||||
):
|
||||
""" Queues logs between the start_datetime and end_time, filtered by a repository or namespace,
|
||||
for export to the specified URL and/or email address. Returns the ID of the export job
|
||||
queued or None if error.
|
||||
"""
|
||||
"""
|
||||
Queues logs between the start_datetime and end_time, filtered by a repository or namespace,
|
||||
for export to the specified URL and/or email address.
|
||||
|
||||
Returns the ID of the export job queued or None if error.
|
||||
"""
|
||||
export_id = str(uuid.uuid4())
|
||||
namespace = model.user.get_namespace_user(namespace_name)
|
||||
if namespace is None:
|
||||
@ -59,8 +61,11 @@ def epoch_ms(dt):
|
||||
|
||||
|
||||
def get_kinds_filter(kinds):
|
||||
""" Given a list of kinds, return the set of kinds not that are not part of that list.
|
||||
i.e Returns the list of kinds to be filtered out. """
|
||||
"""
|
||||
Given a list of kinds, return the set of kinds not that are not part of that list.
|
||||
|
||||
i.e Returns the list of kinds to be filtered out.
|
||||
"""
|
||||
kind_map = model.log.get_log_entry_kinds()
|
||||
kind_map = {key: kind_map[key] for key in kind_map if not isinstance(key, int)}
|
||||
return [kind_name for kind_name in kind_map if kind_name not in kinds]
|
||||
|
@ -31,9 +31,9 @@ LOG_MODELS = [LogEntry3, LogEntry2, LogEntry]
|
||||
|
||||
class TableLogsModel(SharedModel, ActionLogsDataInterface):
|
||||
"""
|
||||
TableLogsModel implements the data model for the logs API backed by a single table
|
||||
in the database.
|
||||
"""
|
||||
TableLogsModel implements the data model for the logs API backed by a single table in the
|
||||
database.
|
||||
"""
|
||||
|
||||
def __init__(self, should_skip_logging=None, **kwargs):
|
||||
self._should_skip_logging = should_skip_logging
|
||||
@ -325,7 +325,9 @@ class TableLogsModel(SharedModel, ActionLogsDataInterface):
|
||||
current_batch_size = timedelta(seconds=seconds)
|
||||
|
||||
def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation):
|
||||
""" Yield a context manager for a group of outdated logs. """
|
||||
"""
|
||||
Yield a context manager for a group of outdated logs.
|
||||
"""
|
||||
for log_model in LOG_MODELS:
|
||||
while True:
|
||||
with UseThenDisconnect(config.app_config):
|
||||
@ -362,12 +364,12 @@ table_logs_model = TableLogsModel()
|
||||
|
||||
class DatabaseLogRotationContext(LogRotationContextInterface):
|
||||
"""
|
||||
DatabaseLogRotationContext represents a batch of logs to be archived together.
|
||||
i.e A set of logs to be archived in the same file (based on the number of logs per rotation).
|
||||
DatabaseLogRotationContext represents a batch of logs to be archived together. i.e A set of logs
|
||||
to be archived in the same file (based on the number of logs per rotation).
|
||||
|
||||
When completed without exceptions, this context will delete the stale logs
|
||||
from rows `start_id` to `end_id`.
|
||||
"""
|
||||
When completed without exceptions, this context will delete the stale logs from rows `start_id`
|
||||
to `end_id`.
|
||||
"""
|
||||
|
||||
def __init__(self, logs, log_model, start_id, end_id):
|
||||
self.logs = logs
|
||||
@ -385,6 +387,8 @@ class DatabaseLogRotationContext(LogRotationContextInterface):
|
||||
delete_stale_logs(self.start_id, self.end_id, self.log_model)
|
||||
|
||||
def yield_logs_batch(self):
|
||||
""" Yield a batch of logs and a filename for that batch. """
|
||||
"""
|
||||
Yield a batch of logs and a filename for that batch.
|
||||
"""
|
||||
filename = "%d-%d-%s.txt.gz" % (self.start_id, self.end_id, self.log_model.__name__.lower())
|
||||
yield self.logs, filename
|
||||
|
@ -610,9 +610,9 @@ def test_date_range_in_single_index(dt1, dt2, expected_result):
|
||||
|
||||
def test_pagination(logs_model, mock_page_size):
|
||||
"""
|
||||
Make sure that pagination does not stop if searching through multiple indices by day,
|
||||
and the current log count matches the page size while there are still indices to be searched.
|
||||
"""
|
||||
Make sure that pagination does not stop if searching through multiple indices by day, and the
|
||||
current log count matches the page size while there are still indices to be searched.
|
||||
"""
|
||||
day1 = datetime.now()
|
||||
day2 = day1 + timedelta(days=1)
|
||||
day3 = day2 + timedelta(days=1)
|
||||
|
@ -54,10 +54,12 @@ tables = AttrDict(target_metadata.tables)
|
||||
|
||||
|
||||
def get_tester():
|
||||
""" Returns the tester to use. We only return the tester that populates data
|
||||
if the TEST_MIGRATE env var is set to `true` AND we make sure we're not
|
||||
connecting to a production database.
|
||||
"""
|
||||
"""
|
||||
Returns the tester to use.
|
||||
|
||||
We only return the tester that populates data if the TEST_MIGRATE env var is set to `true` AND
|
||||
we make sure we're not connecting to a production database.
|
||||
"""
|
||||
if os.environ.get("TEST_MIGRATE", "") == "true":
|
||||
url = unquote(DB_URI)
|
||||
if url.find("amazonaws.com") < 0:
|
||||
@ -92,17 +94,17 @@ def report_success(ctx=None, step=None, heads=None, run_args=None):
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
"""
|
||||
Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
"""
|
||||
url = unquote(DB_URI)
|
||||
context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True)
|
||||
|
||||
@ -111,12 +113,11 @@ def run_migrations_offline():
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
"""
|
||||
Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
In this scenario we need to create an Engine and associate a connection with the context.
|
||||
"""
|
||||
|
||||
if (
|
||||
isinstance(db.obj, SqliteDatabase)
|
||||
|
@ -9,22 +9,29 @@ from util.abchelpers import nooper
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class ProgressReporter(object):
|
||||
""" Implements an interface for reporting progress with the migrations.
|
||||
"""
|
||||
"""
|
||||
Implements an interface for reporting progress with the migrations.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def report_version_complete(self, success):
|
||||
""" Called when an entire migration is complete. """
|
||||
"""
|
||||
Called when an entire migration is complete.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def report_step_progress(self):
|
||||
""" Called when a single step in the migration has been completed. """
|
||||
"""
|
||||
Called when a single step in the migration has been completed.
|
||||
"""
|
||||
|
||||
|
||||
@nooper
|
||||
class NullReporter(ProgressReporter):
|
||||
""" No-op version of the progress reporter, designed for use when no progress
|
||||
reporting endpoint is provided. """
|
||||
"""
|
||||
No-op version of the progress reporter, designed for use when no progress reporting endpoint is
|
||||
provided.
|
||||
"""
|
||||
|
||||
|
||||
class PrometheusReporter(ProgressReporter):
|
||||
|
@ -16,7 +16,9 @@ from test.fixtures import *
|
||||
],
|
||||
)
|
||||
def test_alembic_db_uri(db_uri, is_valid):
|
||||
""" Test if the given URI is escaped for string interpolation (Python's configparser). """
|
||||
"""
|
||||
Test if the given URI is escaped for string interpolation (Python's configparser).
|
||||
"""
|
||||
with patch("alembic.script.ScriptDirectory.run_env") as m:
|
||||
if is_valid:
|
||||
run_alembic_migration(db_uri)
|
||||
|
@ -91,28 +91,37 @@ class DataTypes(object):
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class MigrationTester(object):
|
||||
""" Implements an interface for adding testing capabilities to the
|
||||
data model migration system in Alembic.
|
||||
"""
|
||||
"""
|
||||
Implements an interface for adding testing capabilities to the data model migration system in
|
||||
Alembic.
|
||||
"""
|
||||
|
||||
TestDataType = DataTypes
|
||||
|
||||
@abstractmethod
|
||||
def is_testing(self):
|
||||
""" Returns whether we are currently under a migration test. """
|
||||
"""
|
||||
Returns whether we are currently under a migration test.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def populate_table(self, table_name, fields):
|
||||
""" Called to populate a table with the given fields filled in with testing data. """
|
||||
"""
|
||||
Called to populate a table with the given fields filled in with testing data.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def populate_column(self, table_name, col_name, field_type):
|
||||
""" Called to populate a column in a table to be filled in with testing data. """
|
||||
"""
|
||||
Called to populate a column in a table to be filled in with testing data.
|
||||
"""
|
||||
|
||||
|
||||
@nooper
|
||||
class NoopTester(MigrationTester):
|
||||
""" No-op version of the tester, designed for production workloads. """
|
||||
"""
|
||||
No-op version of the tester, designed for production workloads.
|
||||
"""
|
||||
|
||||
|
||||
class PopulateTestDataTester(MigrationTester):
|
||||
|
@ -1,9 +1,9 @@
|
||||
"""Add creation date to User table
|
||||
"""
|
||||
Add creation date to User table.
|
||||
|
||||
Revision ID: 0cf50323c78b
|
||||
Revises: 87fbbc224f10
|
||||
Create Date: 2018-03-09 13:19:41.903196
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
|
@ -1,9 +1,9 @@
|
||||
"""Add Tag, TagKind and ManifestChild tables
|
||||
"""
|
||||
Add Tag, TagKind and ManifestChild tables.
|
||||
|
||||
Revision ID: 10f45ee2310b
|
||||
Revises: 13411de1c0ff
|
||||
Create Date: 2018-10-29 15:22:53.552216
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
|
@ -1,9 +1,9 @@
|
||||
"""Remove unique from TagManifestToManifest
|
||||
"""
|
||||
Remove unique from TagManifestToManifest.
|
||||
|
||||
Revision ID: 13411de1c0ff
|
||||
Revises: 654e6df88b71
|
||||
Create Date: 2018-08-19 23:30:24.969549
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
|
@ -1,9 +1,9 @@
|
||||
"""Add maximum build queue count setting to user table
|
||||
"""
|
||||
Add maximum build queue count setting to user table.
|
||||
|
||||
Revision ID: 152bb29a1bb3
|
||||
Revises: 7367229b38d9
|
||||
Create Date: 2018-02-20 13:34:34.902415
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
|
@ -1,9 +1,9 @@
|
||||
"""Make BlodUpload byte_count not nullable
|
||||
"""
|
||||
Make BlodUpload byte_count not nullable.
|
||||
|
||||
Revision ID: 152edccba18c
|
||||
Revises: c91c564aad34
|
||||
Create Date: 2018-02-23 12:41:25.571835
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user