1
0
mirror of https://github.com/quay/quay.git synced 2025-07-31 18:44:32 +03:00

fix all the docstrings

This commit is contained in:
alecmerdler
2020-02-05 19:55:07 -08:00
parent 17e6736575
commit 3dd5f045ff
463 changed files with 8851 additions and 5003 deletions

View File

@ -2,24 +2,32 @@ from flask import _request_ctx_stack
def get_authenticated_context(): def get_authenticated_context():
""" Returns the auth context for the current request context, if any. """ """
Returns the auth context for the current request context, if any.
"""
return getattr(_request_ctx_stack.top, "authenticated_context", None) return getattr(_request_ctx_stack.top, "authenticated_context", None)
def get_authenticated_user(): def get_authenticated_user():
""" Returns the authenticated user, if any, or None if none. """ """
Returns the authenticated user, if any, or None if none.
"""
context = get_authenticated_context() context = get_authenticated_context()
return context.authed_user if context else None return context.authed_user if context else None
def get_validated_oauth_token(): def get_validated_oauth_token():
""" Returns the authenticated and validated OAuth access token, if any, or None if none. """ """
Returns the authenticated and validated OAuth access token, if any, or None if none.
"""
context = get_authenticated_context() context = get_authenticated_context()
return context.authed_oauth_token if context else None return context.authed_oauth_token if context else None
def set_authenticated_context(auth_context): def set_authenticated_context(auth_context):
""" Sets the auth context for the current request context to that given. """ """
Sets the auth context for the current request context to that given.
"""
ctx = _request_ctx_stack.top ctx = _request_ctx_stack.top
ctx.authenticated_context = auth_context ctx.authenticated_context = auth_context
return auth_context return auth_context

View File

@ -26,85 +26,109 @@ class AuthContext(object):
@property @property
@abstractmethod @abstractmethod
def entity_kind(self): def entity_kind(self):
""" Returns the kind of the entity in this auth context. """ """
Returns the kind of the entity in this auth context.
"""
pass pass
@property @property
@abstractmethod @abstractmethod
def is_anonymous(self): def is_anonymous(self):
""" Returns true if this is an anonymous context. """ """
Returns true if this is an anonymous context.
"""
pass pass
@property @property
@abstractmethod @abstractmethod
def authed_oauth_token(self): def authed_oauth_token(self):
""" Returns the authenticated OAuth token, if any. """ """
Returns the authenticated OAuth token, if any.
"""
pass pass
@property @property
@abstractmethod @abstractmethod
def authed_user(self): def authed_user(self):
""" Returns the authenticated user, whether directly, or via an OAuth or access token. Note that """
this property will also return robot accounts. Returns the authenticated user, whether directly, or via an OAuth or access token.
Note that this property will also return robot accounts.
""" """
pass pass
@property @property
@abstractmethod @abstractmethod
def has_nonrobot_user(self): def has_nonrobot_user(self):
""" Returns whether a user (not a robot) was authenticated successfully. """ """
Returns whether a user (not a robot) was authenticated successfully.
"""
pass pass
@property @property
@abstractmethod @abstractmethod
def identity(self): def identity(self):
""" Returns the identity for the auth context. """ """
Returns the identity for the auth context.
"""
pass pass
@property @property
@abstractmethod @abstractmethod
def description(self): def description(self):
""" Returns a human-readable and *public* description of the current auth context. """ """
Returns a human-readable and *public* description of the current auth context.
"""
pass pass
@property @property
@abstractmethod @abstractmethod
def credential_username(self): def credential_username(self):
""" Returns the username to create credentials for this context's entity, if any. """ """
Returns the username to create credentials for this context's entity, if any.
"""
pass pass
@abstractmethod @abstractmethod
def analytics_id_and_public_metadata(self): def analytics_id_and_public_metadata(self):
""" Returns the analytics ID and public log metadata for this auth context. """ """
Returns the analytics ID and public log metadata for this auth context.
"""
pass pass
@abstractmethod @abstractmethod
def apply_to_request_context(self): def apply_to_request_context(self):
""" Applies this auth result to the auth context and Flask-Principal. """ """
Applies this auth result to the auth context and Flask-Principal.
"""
pass pass
@abstractmethod @abstractmethod
def to_signed_dict(self): def to_signed_dict(self):
""" Serializes the auth context into a dictionary suitable for inclusion in a JWT or other """
form of signed serialization. Serializes the auth context into a dictionary suitable for inclusion in a JWT or other form
of signed serialization.
""" """
pass pass
@property @property
@abstractmethod @abstractmethod
def unique_key(self): def unique_key(self):
""" Returns a key that is unique to this auth context type and its data. For example, an """
instance of the auth context type for the user might be a string of the form Returns a key that is unique to this auth context type and its data.
`user-{user-uuid}`. Callers should treat this key as opaque and not rely on the contents
for anything besides uniqueness. This is typically used by callers when they'd like to For example, an instance of the auth context type for the user might be a string of the form
check cache but not hit the database to get a fully validated auth context. `user-{user-uuid}`. Callers should treat this key as opaque and not rely on the contents for
anything besides uniqueness. This is typically used by callers when they'd like to check
cache but not hit the database to get a fully validated auth context.
""" """
pass pass
class ValidatedAuthContext(AuthContext): class ValidatedAuthContext(AuthContext):
""" ValidatedAuthContext represents the loaded, authenticated and validated auth information """
for the current request context. ValidatedAuthContext represents the loaded, authenticated and validated auth information for the
current request context.
""" """
def __init__( def __init__(
@ -133,7 +157,9 @@ class ValidatedAuthContext(AuthContext):
@property @property
def entity_kind(self): def entity_kind(self):
""" Returns the kind of the entity in this auth context. """ """
Returns the kind of the entity in this auth context.
"""
for kind in ContextEntityKind: for kind in ContextEntityKind:
if hasattr(self, kind.value) and getattr(self, kind.value): if hasattr(self, kind.value) and getattr(self, kind.value):
return kind return kind
@ -142,8 +168,10 @@ class ValidatedAuthContext(AuthContext):
@property @property
def authed_user(self): def authed_user(self):
""" Returns the authenticated user, whether directly, or via an OAuth token. Note that this """
will also return robot accounts. Returns the authenticated user, whether directly, or via an OAuth token.
Note that this will also return robot accounts.
""" """
authed_user = self._authed_user() authed_user = self._authed_user()
if authed_user is not None and not authed_user.enabled: if authed_user is not None and not authed_user.enabled:
@ -170,17 +198,23 @@ class ValidatedAuthContext(AuthContext):
@property @property
def is_anonymous(self): def is_anonymous(self):
""" Returns true if this is an anonymous context. """ """
Returns true if this is an anonymous context.
"""
return not self.authed_user and not self.token and not self.signed_data return not self.authed_user and not self.token and not self.signed_data
@property @property
def has_nonrobot_user(self): def has_nonrobot_user(self):
""" Returns whether a user (not a robot) was authenticated successfully. """ """
Returns whether a user (not a robot) was authenticated successfully.
"""
return bool(self.authed_user and not self.robot) return bool(self.authed_user and not self.robot)
@property @property
def identity(self): def identity(self):
""" Returns the identity for the auth context. """ """
Returns the identity for the auth context.
"""
if self.oauthtoken: if self.oauthtoken:
scope_set = scopes_from_scope_string(self.oauthtoken.scope) scope_set = scopes_from_scope_string(self.oauthtoken.scope)
return QuayDeferredPermissionUser.for_user(self.oauthtoken.authorized_user, scope_set) return QuayDeferredPermissionUser.for_user(self.oauthtoken.authorized_user, scope_set)
@ -200,7 +234,9 @@ class ValidatedAuthContext(AuthContext):
@property @property
def entity_reference(self): def entity_reference(self):
""" Returns the DB object reference for this context's entity. """ """
Returns the DB object reference for this context's entity.
"""
if self.entity_kind == ContextEntityKind.anonymous: if self.entity_kind == ContextEntityKind.anonymous:
return None return None
@ -208,23 +244,31 @@ class ValidatedAuthContext(AuthContext):
@property @property
def description(self): def description(self):
""" Returns a human-readable and *public* description of the current auth context. """ """
Returns a human-readable and *public* description of the current auth context.
"""
handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]() handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]()
return handler.description(self.entity_reference) return handler.description(self.entity_reference)
@property @property
def credential_username(self): def credential_username(self):
""" Returns the username to create credentials for this context's entity, if any. """ """
Returns the username to create credentials for this context's entity, if any.
"""
handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]() handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]()
return handler.credential_username(self.entity_reference) return handler.credential_username(self.entity_reference)
def analytics_id_and_public_metadata(self): def analytics_id_and_public_metadata(self):
""" Returns the analytics ID and public log metadata for this auth context. """ """
Returns the analytics ID and public log metadata for this auth context.
"""
handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]() handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]()
return handler.analytics_id_and_public_metadata(self.entity_reference) return handler.analytics_id_and_public_metadata(self.entity_reference)
def apply_to_request_context(self): def apply_to_request_context(self):
""" Applies this auth result to the auth context and Flask-Principal. """ """
Applies this auth result to the auth context and Flask-Principal.
"""
# Save to the request context. # Save to the request context.
set_authenticated_context(self) set_authenticated_context(self)
@ -238,8 +282,9 @@ class ValidatedAuthContext(AuthContext):
return "%s-%s" % (signed_dict["entity_kind"], signed_dict.get("entity_reference", "(anon)")) return "%s-%s" % (signed_dict["entity_kind"], signed_dict.get("entity_reference", "(anon)"))
def to_signed_dict(self): def to_signed_dict(self):
""" Serializes the auth context into a dictionary suitable for inclusion in a JWT or other """
form of signed serialization. Serializes the auth context into a dictionary suitable for inclusion in a JWT or other form
of signed serialization.
""" """
dict_data = { dict_data = {
"version": 2, "version": 2,
@ -288,10 +333,13 @@ class ValidatedAuthContext(AuthContext):
class SignedAuthContext(AuthContext): class SignedAuthContext(AuthContext):
""" SignedAuthContext represents an auth context loaded from a signed token of some kind, """
such as a JWT. Unlike ValidatedAuthContext, SignedAuthContext operates lazily, only loading SignedAuthContext represents an auth context loaded from a signed token of some kind, such as a
the actual {user, robot, token, etc} when requested. This allows registry operations that JWT.
only need to check if *some* entity is present to do so, without hitting the database.
Unlike ValidatedAuthContext, SignedAuthContext operates lazily, only loading the actual {user,
robot, token, etc} when requested. This allows registry operations that only need to check if
*some* entity is present to do so, without hitting the database.
""" """
def __init__(self, kind, signed_data, v1_dict_format): def __init__(self, kind, signed_data, v1_dict_format):
@ -325,7 +373,8 @@ class SignedAuthContext(AuthContext):
@lru_cache(maxsize=1) @lru_cache(maxsize=1)
def _get_validated(self): def _get_validated(self):
""" Returns a ValidatedAuthContext for this signed context, resolving all the necessary """
Returns a ValidatedAuthContext for this signed context, resolving all the necessary
references. references.
""" """
if not self.v1_dict_format: if not self.v1_dict_format:
@ -390,18 +439,24 @@ class SignedAuthContext(AuthContext):
@property @property
def entity_kind(self): def entity_kind(self):
""" Returns the kind of the entity in this auth context. """ """
Returns the kind of the entity in this auth context.
"""
return self.kind return self.kind
@property @property
def is_anonymous(self): def is_anonymous(self):
""" Returns true if this is an anonymous context. """ """
Returns true if this is an anonymous context.
"""
return self.kind == ContextEntityKind.anonymous return self.kind == ContextEntityKind.anonymous
@property @property
def authed_user(self): def authed_user(self):
""" Returns the authenticated user, whether directly, or via an OAuth or access token. Note that """
this property will also return robot accounts. Returns the authenticated user, whether directly, or via an OAuth or access token.
Note that this property will also return robot accounts.
""" """
if self.kind == ContextEntityKind.anonymous: if self.kind == ContextEntityKind.anonymous:
return None return None
@ -417,7 +472,9 @@ class SignedAuthContext(AuthContext):
@property @property
def has_nonrobot_user(self): def has_nonrobot_user(self):
""" Returns whether a user (not a robot) was authenticated successfully. """ """
Returns whether a user (not a robot) was authenticated successfully.
"""
if self.kind == ContextEntityKind.anonymous: if self.kind == ContextEntityKind.anonymous:
return False return False
@ -425,29 +482,40 @@ class SignedAuthContext(AuthContext):
@property @property
def identity(self): def identity(self):
""" Returns the identity for the auth context. """ """
Returns the identity for the auth context.
"""
return self._get_validated().identity return self._get_validated().identity
@property @property
def description(self): def description(self):
""" Returns a human-readable and *public* description of the current auth context. """ """
Returns a human-readable and *public* description of the current auth context.
"""
return self._get_validated().description return self._get_validated().description
@property @property
def credential_username(self): def credential_username(self):
""" Returns the username to create credentials for this context's entity, if any. """ """
Returns the username to create credentials for this context's entity, if any.
"""
return self._get_validated().credential_username return self._get_validated().credential_username
def analytics_id_and_public_metadata(self): def analytics_id_and_public_metadata(self):
""" Returns the analytics ID and public log metadata for this auth context. """ """
Returns the analytics ID and public log metadata for this auth context.
"""
return self._get_validated().analytics_id_and_public_metadata() return self._get_validated().analytics_id_and_public_metadata()
def apply_to_request_context(self): def apply_to_request_context(self):
""" Applies this auth result to the auth context and Flask-Principal. """ """
Applies this auth result to the auth context and Flask-Principal.
"""
return self._get_validated().apply_to_request_context() return self._get_validated().apply_to_request_context()
def to_signed_dict(self): def to_signed_dict(self):
""" Serializes the auth context into a dictionary suitable for inclusion in a JWT or other """
form of signed serialization. Serializes the auth context into a dictionary suitable for inclusion in a JWT or other form
of signed serialization.
""" """
return self.signed_data return self.signed_data

View File

@ -10,9 +10,12 @@ logger = logging.getLogger(__name__)
def has_basic_auth(username): def has_basic_auth(username):
""" Returns true if a basic auth header exists with a username and password pair that validates """
against the internal authentication system. Returns True on full success and False on any Returns true if a basic auth header exists with a username and password pair that validates
failure (missing header, invalid header, invalid credentials, etc). against the internal authentication system.
Returns True on full success and False on any failure (missing header, invalid header, invalid
credentials, etc).
""" """
auth_header = request.headers.get("authorization", "") auth_header = request.headers.get("authorization", "")
result = validate_basic_auth(auth_header) result = validate_basic_auth(auth_header)
@ -20,8 +23,9 @@ def has_basic_auth(username):
def validate_basic_auth(auth_header): def validate_basic_auth(auth_header):
""" Validates the specified basic auth header, returning whether its credentials point """
to a valid user or token. Validates the specified basic auth header, returning whether its credentials point to a valid
user or token.
""" """
if not auth_header: if not auth_header:
return ValidateResult(AuthKind.basic, missing=True) return ValidateResult(AuthKind.basic, missing=True)
@ -41,7 +45,8 @@ def validate_basic_auth(auth_header):
def _parse_basic_auth_header(auth): def _parse_basic_auth_header(auth):
""" Parses the given basic auth header, returning the credentials found inside. """
Parses the given basic auth header, returning the credentials found inside.
""" """
normalized = [part.strip() for part in auth.split(" ") if part] normalized = [part.strip() for part in auth.split(" ") if part]
if normalized[0].lower() != "basic" or len(normalized) != 2: if normalized[0].lower() != "basic" or len(normalized) != 2:

View File

@ -12,9 +12,11 @@ from auth.credential_consts import (
class ContextEntityKind(Enum): class ContextEntityKind(Enum):
""" Defines the various kinds of entities in an auth context. Note that the string values of """
these fields *must* match the names of the fields in the ValidatedAuthContext class, as Defines the various kinds of entities in an auth context.
we fill them in directly based on the string names here.
Note that the string values of these fields *must* match the names of the fields in the
ValidatedAuthContext class, as we fill them in directly based on the string names here.
""" """
anonymous = "anonymous" anonymous = "anonymous"
@ -34,30 +36,40 @@ class ContextEntityHandler(object):
@abstractmethod @abstractmethod
def credential_username(self, entity_reference): def credential_username(self, entity_reference):
""" Returns the username to create credentials for this entity, if any. """ """
Returns the username to create credentials for this entity, if any.
"""
pass pass
@abstractmethod @abstractmethod
def get_serialized_entity_reference(self, entity_reference): def get_serialized_entity_reference(self, entity_reference):
""" Returns the entity reference for this kind of auth context, serialized into a form that can """
be placed into a JSON object and put into a JWT. This is typically a DB UUID or another Returns the entity reference for this kind of auth context, serialized into a form that can
unique identifier for the object in the DB. be placed into a JSON object and put into a JWT.
This is typically a DB UUID or another unique identifier for the object in the DB.
""" """
pass pass
@abstractmethod @abstractmethod
def deserialize_entity_reference(self, serialized_entity_reference): def deserialize_entity_reference(self, serialized_entity_reference):
""" Returns the deserialized reference to the entity in the database, or None if none. """ """
Returns the deserialized reference to the entity in the database, or None if none.
"""
pass pass
@abstractmethod @abstractmethod
def description(self, entity_reference): def description(self, entity_reference):
""" Returns a human-readable and *public* description of the current entity. """ """
Returns a human-readable and *public* description of the current entity.
"""
pass pass
@abstractmethod @abstractmethod
def analytics_id_and_public_metadata(self, entity_reference): def analytics_id_and_public_metadata(self, entity_reference):
""" Returns the analyitics ID and a dict of public metadata for the current entity. """ """
Returns the analyitics ID and a dict of public metadata for the current entity.
"""
pass pass

View File

@ -9,7 +9,9 @@ logger = logging.getLogger(__name__)
def validate_session_cookie(auth_header_unusued=None): def validate_session_cookie(auth_header_unusued=None):
""" Attempts to load a user from a session cookie. """ """
Attempts to load a user from a session cookie.
"""
if current_user.is_anonymous: if current_user.is_anonymous:
return ValidateResult(AuthKind.cookie, missing=True) return ValidateResult(AuthKind.cookie, missing=True)

View File

@ -27,7 +27,9 @@ class CredentialKind(Enum):
def validate_credentials(auth_username, auth_password_or_token): def validate_credentials(auth_username, auth_password_or_token):
""" Validates a pair of auth username and password/token credentials. """ """
Validates a pair of auth username and password/token credentials.
"""
# Check for access tokens. # Check for access tokens.
if auth_username == ACCESS_TOKEN_USERNAME: if auth_username == ACCESS_TOKEN_USERNAME:
logger.debug("Found credentials for access token") logger.debug("Found credentials for access token")

View File

@ -23,9 +23,11 @@ authentication_count = Counter(
def _auth_decorator(pass_result=False, handlers=None): def _auth_decorator(pass_result=False, handlers=None):
""" Builds an auth decorator that runs the given handlers and, if any return successfully, """
sets up the auth context. The wrapped function will be invoked *regardless of success or Builds an auth decorator that runs the given handlers and, if any return successfully, sets up
failure of the auth handler(s)* the auth context.
The wrapped function will be invoked *regardless of success or failure of the auth handler(s)*
""" """
def processor(func): def processor(func):
@ -75,8 +77,10 @@ process_basic_auth_no_pass = _auth_decorator(handlers=[validate_basic_auth])
def require_session_login(func): def require_session_login(func):
""" Decorates a function and ensures that a valid session cookie exists or a 401 is raised. If """
a valid session cookie does exist, the authenticated user and identity are also set. Decorates a function and ensures that a valid session cookie exists or a 401 is raised.
If a valid session cookie does exist, the authenticated user and identity are also set.
""" """
@wraps(func) @wraps(func)
@ -95,9 +99,11 @@ def require_session_login(func):
def extract_namespace_repo_from_session(func): def extract_namespace_repo_from_session(func):
""" Extracts the namespace and repository name from the current session (which must exist) """
and passes them into the decorated function as the first and second arguments. If the Extracts the namespace and repository name from the current session (which must exist) and
session doesn't exist or does not contain these arugments, a 400 error is raised. passes them into the decorated function as the first and second arguments.
If the session doesn't exist or does not contain these arugments, a 400 error is raised.
""" """
@wraps(func) @wraps(func)

View File

@ -10,8 +10,9 @@ logger = logging.getLogger(__name__)
def validate_bearer_auth(auth_header): def validate_bearer_auth(auth_header):
""" Validates an OAuth token found inside a basic auth `Bearer` token, returning whether it """
points to a valid OAuth token. Validates an OAuth token found inside a basic auth `Bearer` token, returning whether it points
to a valid OAuth token.
""" """
if not auth_header: if not auth_header:
return ValidateResult(AuthKind.oauth, missing=True) return ValidateResult(AuthKind.oauth, missing=True)
@ -26,7 +27,8 @@ def validate_bearer_auth(auth_header):
def validate_oauth_token(token): def validate_oauth_token(token):
""" Validates the specified OAuth token, returning whether it points to a valid OAuth token. """
Validates the specified OAuth token, returning whether it points to a valid OAuth token.
""" """
validated = model.oauth.validate_access_token(token) validated = model.oauth.validate_access_token(token)
if not validated: if not validated:

View File

@ -112,8 +112,9 @@ class QuayDeferredPermissionUser(Identity):
return self._translate_role_for_scopes(USER_ROLES, SCOPE_MAX_USER_ROLES, role) return self._translate_role_for_scopes(USER_ROLES, SCOPE_MAX_USER_ROLES, role)
def _populate_user_provides(self, user_object): def _populate_user_provides(self, user_object):
""" Populates the provides that naturally apply to a user, such as being the admin of """
their own namespace. Populates the provides that naturally apply to a user, such as being the admin of their own
namespace.
""" """
# Add the user specific permissions, only for non-oauth permission # Add the user specific permissions, only for non-oauth permission
@ -142,7 +143,9 @@ class QuayDeferredPermissionUser(Identity):
self.provides.add(_SuperUserNeed()) self.provides.add(_SuperUserNeed())
def _populate_namespace_wide_provides(self, user_object, namespace_filter): def _populate_namespace_wide_provides(self, user_object, namespace_filter):
""" Populates the namespace-wide provides for a particular user under a particular namespace. """
Populates the namespace-wide provides for a particular user under a particular namespace.
This method does *not* add any provides for specific repositories. This method does *not* add any provides for specific repositories.
""" """
@ -169,7 +172,9 @@ class QuayDeferredPermissionUser(Identity):
self.provides.add(team_grant) self.provides.add(team_grant)
def _populate_repository_provides(self, user_object, namespace_filter, repository_name): def _populate_repository_provides(self, user_object, namespace_filter, repository_name):
""" Populates the repository-specific provides for a particular user and repository. """ """
Populates the repository-specific provides for a particular user and repository.
"""
if namespace_filter and repository_name: if namespace_filter and repository_name:
permissions = model.permission.get_user_repository_permissions( permissions = model.permission.get_user_repository_permissions(
@ -232,7 +237,9 @@ class QuayDeferredPermissionUser(Identity):
class QuayPermission(Permission): class QuayPermission(Permission):
""" Base for all permissions in Quay. """ """
Base for all permissions in Quay.
"""
namespace = None namespace = None
repo_name = None repo_name = None

View File

@ -57,7 +57,9 @@ class InvalidJWTException(Exception):
def get_auth_headers(repository=None, scopes=None): def get_auth_headers(repository=None, scopes=None):
""" Returns a dictionary of headers for auth responses. """ """
Returns a dictionary of headers for auth responses.
"""
headers = {} headers = {}
realm_auth_path = url_for("v2.generate_registry_jwt") realm_auth_path = url_for("v2.generate_registry_jwt")
authenticate = 'Bearer realm="{0}{1}",service="{2}"'.format( authenticate = 'Bearer realm="{0}{1}",service="{2}"'.format(
@ -76,9 +78,12 @@ def get_auth_headers(repository=None, scopes=None):
def identity_from_bearer_token(bearer_header): def identity_from_bearer_token(bearer_header):
""" Process a bearer header and return the loaded identity, or raise InvalidJWTException if an """
identity could not be loaded. Expects tokens and grants in the format of the Docker registry Process a bearer header and return the loaded identity, or raise InvalidJWTException if an
v2 auth spec: https://docs.docker.com/registry/spec/auth/token/ identity could not be loaded.
Expects tokens and grants in the format of the Docker registry v2 auth spec:
https://docs.docker.com/registry/spec/auth/token/
""" """
logger.debug("Validating auth header: %s", bearer_header) logger.debug("Validating auth header: %s", bearer_header)
@ -121,8 +126,10 @@ def identity_from_bearer_token(bearer_header):
def process_registry_jwt_auth(scopes=None): def process_registry_jwt_auth(scopes=None):
""" Processes the registry JWT auth token found in the authorization header. If none found, """
no error is returned. If an invalid token is found, raises a 401. Processes the registry JWT auth token found in the authorization header.
If none found, no error is returned. If an invalid token is found, raises a 401.
""" """
def inner(func): def inner(func):

View File

@ -166,8 +166,8 @@ def validate_scope_string(scopes):
def is_subset_string(full_string, expected_string): def is_subset_string(full_string, expected_string):
""" Returns true if the scopes found in expected_string are also found """
in full_string. Returns true if the scopes found in expected_string are also found in full_string.
""" """
full_scopes = scopes_from_scope_string(full_string) full_scopes = scopes_from_scope_string(full_string)
if not full_scopes: if not full_scopes:

View File

@ -12,7 +12,9 @@ SIGNATURE_PREFIX = "sigv2="
def generate_signed_token(grants, user_context): def generate_signed_token(grants, user_context):
""" Generates a signed session token with the given grants and user context. """ """
Generates a signed session token with the given grants and user context.
"""
ser = SecureCookieSessionInterface().get_signing_serializer(app) ser = SecureCookieSessionInterface().get_signing_serializer(app)
data_to_sign = { data_to_sign = {
"grants": grants, "grants": grants,
@ -24,8 +26,9 @@ def generate_signed_token(grants, user_context):
def validate_signed_grant(auth_header): def validate_signed_grant(auth_header):
""" Validates a signed grant as found inside an auth header and returns whether it points to """
a valid grant. Validates a signed grant as found inside an auth header and returns whether it points to a valid
grant.
""" """
if not auth_header: if not auth_header:
return ValidateResult(AuthKind.signed_grant, missing=True) return ValidateResult(AuthKind.signed_grant, missing=True)

View File

@ -14,7 +14,9 @@ class AuthKind(Enum):
class ValidateResult(object): class ValidateResult(object):
""" A result of validating auth in one form or another. """ """
A result of validating auth in one form or another.
"""
def __init__( def __init__(
self, self,
@ -47,11 +49,15 @@ class ValidateResult(object):
return self.tuple() == other.tuple() return self.tuple() == other.tuple()
def apply_to_context(self): def apply_to_context(self):
""" Applies this auth result to the auth context and Flask-Principal. """ """
Applies this auth result to the auth context and Flask-Principal.
"""
self.context.apply_to_request_context() self.context.apply_to_request_context()
def with_kind(self, kind): def with_kind(self, kind):
""" Returns a copy of this result, but with the kind replaced. """ """
Returns a copy of this result, but with the kind replaced.
"""
result = ValidateResult(kind, missing=self.missing, error_message=self.error_message) result = ValidateResult(kind, missing=self.missing, error_message=self.error_message)
result.context = self.context result.context = self.context
return result return result
@ -65,15 +71,21 @@ class ValidateResult(object):
@property @property
def authed_user(self): def authed_user(self):
""" Returns the authenticated user, whether directly, or via an OAuth token. """ """
Returns the authenticated user, whether directly, or via an OAuth token.
"""
return self.context.authed_user return self.context.authed_user
@property @property
def has_nonrobot_user(self): def has_nonrobot_user(self):
""" Returns whether a user (not a robot) was authenticated successfully. """ """
Returns whether a user (not a robot) was authenticated successfully.
"""
return self.context.has_nonrobot_user return self.context.has_nonrobot_user
@property @property
def auth_valid(self): def auth_valid(self):
""" Returns whether authentication successfully occurred. """ """
Returns whether authentication successfully occurred.
"""
return self.context.entity_kind != ContextEntityKind.anonymous return self.context.entity_kind != ContextEntityKind.anonymous

View File

@ -24,7 +24,9 @@ class Avatar(object):
class BaseAvatar(object): class BaseAvatar(object):
""" Base class for all avatar implementations. """ """
Base class for all avatar implementations.
"""
def __init__(self, preferred_url_scheme, colors, http_client): def __init__(self, preferred_url_scheme, colors, http_client):
self.preferred_url_scheme = preferred_url_scheme self.preferred_url_scheme = preferred_url_scheme
@ -32,7 +34,8 @@ class BaseAvatar(object):
self.http_client = http_client self.http_client = http_client
def get_mail_html(self, name, email_or_id, size=16, kind="user"): def get_mail_html(self, name, email_or_id, size=16, kind="user"):
""" Returns the full HTML and CSS for viewing the avatar of the given name and email address, """
Returns the full HTML and CSS for viewing the avatar of the given name and email address,
with an optional size. with an optional size.
""" """
data = self.get_data(name, email_or_id, kind) data = self.get_data(name, email_or_id, kind)
@ -110,12 +113,16 @@ class BaseAvatar(object):
return {"name": name, "hash": hash_value, "color": hash_color, "kind": kind} return {"name": name, "hash": hash_value, "color": hash_color, "kind": kind}
def _get_url(self, hash_value, size): def _get_url(self, hash_value, size):
""" Returns the URL for displaying the overlay avatar. """ """
Returns the URL for displaying the overlay avatar.
"""
return None return None
class GravatarAvatar(BaseAvatar): class GravatarAvatar(BaseAvatar):
""" Avatar system that uses gravatar for generating avatars. """ """
Avatar system that uses gravatar for generating avatars.
"""
def _get_url(self, hash_value, size=16): def _get_url(self, hash_value, size=16):
return "%s://www.gravatar.com/avatar/%s?d=404&size=%s" % ( return "%s://www.gravatar.com/avatar/%s?d=404&size=%s" % (
@ -126,7 +133,9 @@ class GravatarAvatar(BaseAvatar):
class LocalAvatar(BaseAvatar): class LocalAvatar(BaseAvatar):
""" Avatar system that uses the local system for generating avatars. """ """
Avatar system that uses the local system for generating avatars.
"""
pass pass

View File

@ -13,8 +13,9 @@ def wrap_with_threadpool(obj, worker_threads=1):
class AsyncWrapper(object): class AsyncWrapper(object):
""" Wrapper class which will transform a syncronous library to one that can be used with """
trollius coroutines. Wrapper class which will transform a syncronous library to one that can be used with trollius
coroutines.
""" """
def __init__(self, delegate, loop=None, executor=None): def __init__(self, delegate, loop=None, executor=None):
@ -29,7 +30,8 @@ class AsyncWrapper(object):
return delegate_attr return delegate_attr
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
""" Wraps the delegate_attr with primitives that will transform sync calls to ones shelled """
Wraps the delegate_attr with primitives that will transform sync calls to ones shelled
out to a thread pool. out to a thread pool.
""" """
callable_delegate_attr = partial(delegate_attr, *args, **kwargs) callable_delegate_attr = partial(delegate_attr, *args, **kwargs)

View File

@ -2,7 +2,9 @@ from autobahn.asyncio.wamp import ApplicationSession
class BaseComponent(ApplicationSession): class BaseComponent(ApplicationSession):
""" Base class for all registered component sessions in the server. """ """
Base class for all registered component sessions in the server.
"""
def __init__(self, config, **kwargs): def __init__(self, config, **kwargs):
ApplicationSession.__init__(self, config) ApplicationSession.__init__(self, config)

View File

@ -35,7 +35,9 @@ logger = logging.getLogger(__name__)
class ComponentStatus(object): class ComponentStatus(object):
""" ComponentStatus represents the possible states of a component. """ """
ComponentStatus represents the possible states of a component.
"""
JOINING = "joining" JOINING = "joining"
WAITING = "waiting" WAITING = "waiting"
@ -45,7 +47,9 @@ class ComponentStatus(object):
class BuildComponent(BaseComponent): class BuildComponent(BaseComponent):
""" An application session component which conducts one (or more) builds. """ """
An application session component which conducts one (or more) builds.
"""
def __init__(self, config, realm=None, token=None, **kwargs): def __init__(self, config, realm=None, token=None, **kwargs):
self.expected_token = token self.expected_token = token
@ -85,7 +89,9 @@ class BuildComponent(BaseComponent):
@trollius.coroutine @trollius.coroutine
def start_build(self, build_job): def start_build(self, build_job):
""" Starts a build. """ """
Starts a build.
"""
if self._component_status not in (ComponentStatus.WAITING, ComponentStatus.RUNNING): if self._component_status not in (ComponentStatus.WAITING, ComponentStatus.RUNNING):
logger.debug( logger.debug(
"Could not start build for component %s (build %s, worker version: %s): %s", "Could not start build for component %s (build %s, worker version: %s): %s",
@ -191,7 +197,9 @@ class BuildComponent(BaseComponent):
logger.debug("With Arguments: %s", build_arguments) logger.debug("With Arguments: %s", build_arguments)
def build_complete_callback(result): def build_complete_callback(result):
""" This function is used to execute a coroutine as the callback. """ """
This function is used to execute a coroutine as the callback.
"""
trollius.ensure_future(self._build_complete(result)) trollius.ensure_future(self._build_complete(result))
self.call("io.quay.builder.build", **build_arguments).add_done_callback( self.call("io.quay.builder.build", **build_arguments).add_done_callback(
@ -218,14 +226,18 @@ class BuildComponent(BaseComponent):
@staticmethod @staticmethod
def _commit_sha(build_config): def _commit_sha(build_config):
""" Determines whether the metadata is using an old schema or not and returns the commit. """ """
Determines whether the metadata is using an old schema or not and returns the commit.
"""
commit_sha = build_config["trigger_metadata"].get("commit", "") commit_sha = build_config["trigger_metadata"].get("commit", "")
old_commit_sha = build_config["trigger_metadata"].get("commit_sha", "") old_commit_sha = build_config["trigger_metadata"].get("commit_sha", "")
return commit_sha or old_commit_sha return commit_sha or old_commit_sha
@staticmethod @staticmethod
def name_and_path(subdir): def name_and_path(subdir):
""" Returns the dockerfile path and name """ """
Returns the dockerfile path and name.
"""
if subdir.endswith("/"): if subdir.endswith("/"):
subdir += "Dockerfile" subdir += "Dockerfile"
elif not subdir.endswith("Dockerfile"): elif not subdir.endswith("Dockerfile"):
@ -234,7 +246,9 @@ class BuildComponent(BaseComponent):
@staticmethod @staticmethod
def _total_completion(statuses, total_images): def _total_completion(statuses, total_images):
""" Returns the current amount completion relative to the total completion of a build. """ """
Returns the current amount completion relative to the total completion of a build.
"""
percentage_with_sizes = float(len(statuses.values())) / total_images percentage_with_sizes = float(len(statuses.values())) / total_images
sent_bytes = sum([status["current"] for status in statuses.values()]) sent_bytes = sum([status["current"] for status in statuses.values()])
total_bytes = sum([status["total"] for status in statuses.values()]) total_bytes = sum([status["total"] for status in statuses.values()])
@ -242,7 +256,9 @@ class BuildComponent(BaseComponent):
@staticmethod @staticmethod
def _process_pushpull_status(status_dict, current_phase, docker_data, images): def _process_pushpull_status(status_dict, current_phase, docker_data, images):
""" Processes the status of a push or pull by updating the provided status_dict and images. """ """
Processes the status of a push or pull by updating the provided status_dict and images.
"""
if not docker_data: if not docker_data:
return return
@ -271,7 +287,9 @@ class BuildComponent(BaseComponent):
@trollius.coroutine @trollius.coroutine
def _on_log_message(self, phase, json_data): def _on_log_message(self, phase, json_data):
""" Tails log messages and updates the build status. """ """
Tails log messages and updates the build status.
"""
# Update the heartbeat. # Update the heartbeat.
self._last_heartbeat = datetime.datetime.utcnow() self._last_heartbeat = datetime.datetime.utcnow()
@ -355,7 +373,9 @@ class BuildComponent(BaseComponent):
@trollius.coroutine @trollius.coroutine
def _build_failure(self, error_message, exception=None): def _build_failure(self, error_message, exception=None):
""" Handles and logs a failed build. """ """
Handles and logs a failed build.
"""
yield From( yield From(
self._build_status.set_error( self._build_status.set_error(
error_message, {"internal_error": str(exception) if exception else None} error_message, {"internal_error": str(exception) if exception else None}
@ -370,7 +390,11 @@ class BuildComponent(BaseComponent):
@trollius.coroutine @trollius.coroutine
def _build_complete(self, result): def _build_complete(self, result):
""" Wraps up a completed build. Handles any errors and calls self._build_finished. """ """
Wraps up a completed build.
Handles any errors and calls self._build_finished.
"""
build_id = self._current_job.repo_build.uuid build_id = self._current_job.repo_build.uuid
try: try:
@ -451,7 +475,9 @@ class BuildComponent(BaseComponent):
@trollius.coroutine @trollius.coroutine
def _build_finished(self, job_status): def _build_finished(self, job_status):
""" Alerts the parent that a build has completed and sets the status back to running. """ """
Alerts the parent that a build has completed and sets the status back to running.
"""
yield From(self.parent_manager.job_completed(self._current_job, job_status, self)) yield From(self.parent_manager.job_completed(self._current_job, job_status, self))
# Set the component back to a running state. # Set the component back to a running state.
@ -459,7 +485,9 @@ class BuildComponent(BaseComponent):
@staticmethod @staticmethod
def _ping(): def _ping():
""" Ping pong. """ """
Ping pong.
"""
return "pong" return "pong"
@trollius.coroutine @trollius.coroutine
@ -499,7 +527,9 @@ class BuildComponent(BaseComponent):
self._component_status = phase self._component_status = phase
def _on_heartbeat(self): def _on_heartbeat(self):
""" Updates the last known heartbeat. """ """
Updates the last known heartbeat.
"""
if self._component_status == ComponentStatus.TIMED_OUT: if self._component_status == ComponentStatus.TIMED_OUT:
return return
@ -508,9 +538,11 @@ class BuildComponent(BaseComponent):
@trollius.coroutine @trollius.coroutine
def _heartbeat(self): def _heartbeat(self):
""" Coroutine that runs every HEARTBEAT_TIMEOUT seconds, both checking the worker's heartbeat """
and updating the heartbeat in the build status dictionary (if applicable). This allows Coroutine that runs every HEARTBEAT_TIMEOUT seconds, both checking the worker's heartbeat
the build system to catch crashes from either end. and updating the heartbeat in the build status dictionary (if applicable).
This allows the build system to catch crashes from either end.
""" """
yield From(trollius.sleep(INITIAL_TIMEOUT)) yield From(trollius.sleep(INITIAL_TIMEOUT))

View File

@ -2,8 +2,10 @@ import re
def extract_current_step(current_status_string): def extract_current_step(current_status_string):
""" Attempts to extract the current step numeric identifier from the given status string. Returns the step """
number or None if none. Attempts to extract the current step numeric identifier from the given status string.
Returns the step number or None if none.
""" """
# Older format: `Step 12 :` # Older format: `Step 12 :`
# Newer format: `Step 4/13 :` # Newer format: `Step 4/13 :`

View File

@ -2,7 +2,9 @@ from data.database import BUILD_PHASE
class BuildJobResult(object): class BuildJobResult(object):
""" Build job result enum """ """
Build job result enum.
"""
INCOMPLETE = "incomplete" INCOMPLETE = "incomplete"
COMPLETE = "complete" COMPLETE = "complete"
@ -10,7 +12,9 @@ class BuildJobResult(object):
class BuildServerStatus(object): class BuildServerStatus(object):
""" Build server status enum """ """
Build server status enum.
"""
STARTING = "starting" STARTING = "starting"
RUNNING = "running" RUNNING = "running"

View File

@ -14,13 +14,17 @@ logger = logging.getLogger(__name__)
class BuildJobLoadException(Exception): class BuildJobLoadException(Exception):
""" Exception raised if a build job could not be instantiated for some reason. """ """
Exception raised if a build job could not be instantiated for some reason.
"""
pass pass
class BuildJob(object): class BuildJob(object):
""" Represents a single in-progress build job. """ """
Represents a single in-progress build job.
"""
def __init__(self, job_item): def __init__(self, job_item):
self.job_item = job_item self.job_item = job_item
@ -56,17 +60,23 @@ class BuildJob(object):
@property @property
def build_uuid(self): def build_uuid(self):
""" Returns the unique UUID for this build job. """ """
Returns the unique UUID for this build job.
"""
return self.job_details["build_uuid"] return self.job_details["build_uuid"]
@property @property
def namespace(self): def namespace(self):
""" Returns the namespace under which this build is running. """ """
Returns the namespace under which this build is running.
"""
return self.repo_build.repository.namespace_user.username return self.repo_build.repository.namespace_user.username
@property @property
def repo_name(self): def repo_name(self):
""" Returns the name of the repository under which this build is running. """ """
Returns the name of the repository under which this build is running.
"""
return self.repo_build.repository.name return self.repo_build.repository.name
@property @property
@ -74,7 +84,9 @@ class BuildJob(object):
return self._load_repo_build() return self._load_repo_build()
def get_build_package_url(self, user_files): def get_build_package_url(self, user_files):
""" Returns the URL of the build package for this build, if any or empty string if none. """ """
Returns the URL of the build package for this build, if any or empty string if none.
"""
archive_url = self.build_config.get("archive_url", None) archive_url = self.build_config.get("archive_url", None)
if archive_url: if archive_url:
return archive_url return archive_url
@ -88,7 +100,9 @@ class BuildJob(object):
@property @property
def pull_credentials(self): def pull_credentials(self):
""" Returns the pull credentials for this job, or None if none. """ """
Returns the pull credentials for this job, or None if none.
"""
return self.job_details.get("pull_credentials") return self.job_details.get("pull_credentials")
@property @property
@ -102,7 +116,9 @@ class BuildJob(object):
) )
def determine_cached_tag(self, base_image_id=None, cache_comments=None): def determine_cached_tag(self, base_image_id=None, cache_comments=None):
""" Returns the tag to pull to prime the cache or None if none. """ """
Returns the tag to pull to prime the cache or None if none.
"""
cached_tag = self._determine_cached_tag_by_tag() cached_tag = self._determine_cached_tag_by_tag()
logger.debug( logger.debug(
"Determined cached tag %s for %s: %s", cached_tag, base_image_id, cache_comments "Determined cached tag %s for %s: %s", cached_tag, base_image_id, cache_comments
@ -110,8 +126,11 @@ class BuildJob(object):
return cached_tag return cached_tag
def _determine_cached_tag_by_tag(self): def _determine_cached_tag_by_tag(self):
""" Determines the cached tag by looking for one of the tags being built, and seeing if it """
exists in the repository. This is a fallback for when no comment information is available. Determines the cached tag by looking for one of the tags being built, and seeing if it
exists in the repository.
This is a fallback for when no comment information is available.
""" """
with UseThenDisconnect(app.config): with UseThenDisconnect(app.config):
tags = self.build_config.get("docker_tags", ["latest"]) tags = self.build_config.get("docker_tags", ["latest"])
@ -128,7 +147,9 @@ class BuildJob(object):
class BuildJobNotifier(object): class BuildJobNotifier(object):
""" A class for sending notifications to a job that only relies on the build_uuid """ """
A class for sending notifications to a job that only relies on the build_uuid.
"""
def __init__(self, build_uuid): def __init__(self, build_uuid):
self.build_uuid = build_uuid self.build_uuid = build_uuid

View File

@ -13,7 +13,9 @@ logger = logging.getLogger(__name__)
class StatusHandler(object): class StatusHandler(object):
""" Context wrapper for writing status to build logs. """ """
Context wrapper for writing status to build logs.
"""
def __init__(self, build_logs, repository_build_uuid): def __init__(self, build_logs, repository_build_uuid):
self._current_phase = None self._current_phase = None

View File

@ -1,5 +1,7 @@
class WorkerError(object): class WorkerError(object):
""" Helper class which represents errors raised by a build worker. """ """
Helper class which represents errors raised by a build worker.
"""
def __init__(self, error_code, base_message=None): def __init__(self, error_code, base_message=None):
self._error_code = error_code self._error_code = error_code

View File

@ -2,7 +2,9 @@ from trollius import coroutine
class BaseManager(object): class BaseManager(object):
""" Base for all worker managers. """ """
Base for all worker managers.
"""
def __init__( def __init__(
self, self,
@ -22,59 +24,77 @@ class BaseManager(object):
@coroutine @coroutine
def job_heartbeat(self, build_job): def job_heartbeat(self, build_job):
""" Method invoked to tell the manager that a job is still running. This method will be called """
every few minutes. """ Method invoked to tell the manager that a job is still running.
This method will be called every few minutes.
"""
self.job_heartbeat_callback(build_job) self.job_heartbeat_callback(build_job)
def overall_setup_time(self): def overall_setup_time(self):
""" Returns the number of seconds that the build system should wait before allowing the job """
to be picked up again after called 'schedule'. Returns the number of seconds that the build system should wait before allowing the job to
be picked up again after called 'schedule'.
""" """
raise NotImplementedError raise NotImplementedError
def shutdown(self): def shutdown(self):
""" Indicates that the build controller server is in a shutdown state and that no new jobs """
or workers should be performed. Existing workers should be cleaned up once their jobs Indicates that the build controller server is in a shutdown state and that no new jobs or
have completed workers should be performed.
Existing workers should be cleaned up once their jobs have completed
""" """
raise NotImplementedError raise NotImplementedError
@coroutine @coroutine
def schedule(self, build_job): def schedule(self, build_job):
""" Schedules a queue item to be built. Returns a 2-tuple with (True, None) if the item was """
properly scheduled and (False, a retry timeout in seconds) if all workers are busy or an Schedules a queue item to be built.
error occurs.
Returns a 2-tuple with (True, None) if the item was properly scheduled and (False, a retry
timeout in seconds) if all workers are busy or an error occurs.
""" """
raise NotImplementedError raise NotImplementedError
def initialize(self, manager_config): def initialize(self, manager_config):
""" Runs any initialization code for the manager. Called once the server is in a ready state. """
Runs any initialization code for the manager.
Called once the server is in a ready state.
""" """
raise NotImplementedError raise NotImplementedError
@coroutine @coroutine
def build_component_ready(self, build_component): def build_component_ready(self, build_component):
""" Method invoked whenever a build component announces itself as ready. """
Method invoked whenever a build component announces itself as ready.
""" """
raise NotImplementedError raise NotImplementedError
def build_component_disposed(self, build_component, timed_out): def build_component_disposed(self, build_component, timed_out):
""" Method invoked whenever a build component has been disposed. The timed_out boolean indicates """
whether the component's heartbeat timed out. Method invoked whenever a build component has been disposed.
The timed_out boolean indicates whether the component's heartbeat timed out.
""" """
raise NotImplementedError raise NotImplementedError
@coroutine @coroutine
def job_completed(self, build_job, job_status, build_component): def job_completed(self, build_job, job_status, build_component):
""" Method invoked once a job_item has completed, in some manner. The job_status will be """
one of: incomplete, error, complete. Implementations of this method should call coroutine Method invoked once a job_item has completed, in some manner.
self.job_complete_callback with a status of Incomplete if they wish for the job to be
automatically requeued. The job_status will be one of: incomplete, error, complete. Implementations of this method
should call coroutine self.job_complete_callback with a status of Incomplete if they wish
for the job to be automatically requeued.
""" """
raise NotImplementedError raise NotImplementedError
def num_workers(self): def num_workers(self):
""" Returns the number of active build workers currently registered. This includes those """
that are currently busy and awaiting more work. Returns the number of active build workers currently registered.
This includes those that are currently busy and awaiting more work.
""" """
raise NotImplementedError raise NotImplementedError

View File

@ -9,7 +9,9 @@ CANCELLERS = {"ephemeral": OrchestratorCanceller}
class BuildCanceller(object): class BuildCanceller(object):
""" A class to manage cancelling a build """ """
A class to manage cancelling a build.
"""
def __init__(self, app=None): def __init__(self, app=None):
self.build_manager_config = app.config.get("BUILD_MANAGER") self.build_manager_config = app.config.get("BUILD_MANAGER")
@ -19,7 +21,9 @@ class BuildCanceller(object):
self.handler = None self.handler = None
def try_cancel_build(self, uuid): def try_cancel_build(self, uuid):
""" A method to kill a running build """ """
A method to kill a running build.
"""
if self.handler is None: if self.handler is None:
canceller = CANCELLERS.get(self.build_manager_config[0], NoopCanceller) canceller = CANCELLERS.get(self.build_manager_config[0], NoopCanceller)
self.handler = canceller(self.build_manager_config[1]) self.handler = canceller(self.build_manager_config[1])

View File

@ -13,7 +13,9 @@ logger = logging.getLogger(__name__)
class DynamicRegistrationComponent(BaseComponent): class DynamicRegistrationComponent(BaseComponent):
""" Component session that handles dynamic registration of the builder components. """ """
Component session that handles dynamic registration of the builder components.
"""
def onConnect(self): def onConnect(self):
self.join(REGISTRATION_REALM) self.join(REGISTRATION_REALM)
@ -32,7 +34,9 @@ class DynamicRegistrationComponent(BaseComponent):
class EnterpriseManager(BaseManager): class EnterpriseManager(BaseManager):
""" Build manager implementation for the Enterprise Registry. """ """
Build manager implementation for the Enterprise Registry.
"""
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.ready_components = set() self.ready_components = set()
@ -52,7 +56,9 @@ class EnterpriseManager(BaseManager):
return 60 return 60
def add_build_component(self): def add_build_component(self):
""" Adds a new build component for an Enterprise Registry. """ """
Adds a new build component for an Enterprise Registry.
"""
# Generate a new unique realm ID for the build worker. # Generate a new unique realm ID for the build worker.
realm = str(uuid.uuid4()) realm = str(uuid.uuid4())
new_component = self.register_component(realm, BuildComponent, token="") new_component = self.register_component(realm, BuildComponent, token="")
@ -61,7 +67,9 @@ class EnterpriseManager(BaseManager):
@coroutine @coroutine
def schedule(self, build_job): def schedule(self, build_job):
""" Schedules a build for an Enterprise Registry. """ """
Schedules a build for an Enterprise Registry.
"""
if self.shutting_down or not self.ready_components: if self.shutting_down or not self.ready_components:
raise Return(False, RETRY_TIMEOUT) raise Return(False, RETRY_TIMEOUT)

View File

@ -65,7 +65,9 @@ BuildInfo = namedtuple("BuildInfo", ["component", "build_job", "execution_id", "
class EphemeralBuilderManager(BaseManager): class EphemeralBuilderManager(BaseManager):
""" Build manager implementation for the Enterprise Registry. """ """
Build manager implementation for the Enterprise Registry.
"""
EXECUTORS = { EXECUTORS = {
"popen": PopenExecutor, "popen": PopenExecutor,
@ -98,7 +100,9 @@ class EphemeralBuilderManager(BaseManager):
@coroutine @coroutine
def _mark_job_incomplete(self, build_job, build_info): def _mark_job_incomplete(self, build_job, build_info):
""" Marks a job as incomplete, in response to a failure to start or a timeout. """ """
Marks a job as incomplete, in response to a failure to start or a timeout.
"""
executor_name = build_info.executor_name executor_name = build_info.executor_name
execution_id = build_info.execution_id execution_id = build_info.execution_id
@ -137,9 +141,9 @@ class EphemeralBuilderManager(BaseManager):
@coroutine @coroutine
def _job_callback(self, key_change): def _job_callback(self, key_change):
""" """
This is the callback invoked when keys related to jobs are changed. This is the callback invoked when keys related to jobs are changed. It ignores all events
It ignores all events related to the creation of new jobs. related to the creation of new jobs. Deletes or expirations cause checks to ensure they've
Deletes or expirations cause checks to ensure they've been properly marked as completed. been properly marked as completed.
:param key_change: the event and value produced by a key changing in the orchestrator :param key_change: the event and value produced by a key changing in the orchestrator
:type key_change: :class:`KeyChange` :type key_change: :class:`KeyChange`
@ -787,7 +791,8 @@ class EphemeralBuilderManager(BaseManager):
logger.exception("Could not write metric for realm %s", realm) logger.exception("Could not write metric for realm %s", realm)
def num_workers(self): def num_workers(self):
""" The number of workers we're managing locally. """
The number of workers we're managing locally.
:returns: the number of the workers locally managed :returns: the number of the workers locally managed
:rtype: int :rtype: int

View File

@ -5,7 +5,9 @@ logger = logging.getLogger(__name__)
class EtcdCanceller(object): class EtcdCanceller(object):
""" A class that sends a message to etcd to cancel a build """ """
A class that sends a message to etcd to cancel a build.
"""
def __init__(self, config): def __init__(self, config):
etcd_host = config.get("ETCD_HOST", "127.0.0.1") etcd_host = config.get("ETCD_HOST", "127.0.0.1")
@ -28,7 +30,9 @@ class EtcdCanceller(object):
) )
def try_cancel_build(self, build_uuid): def try_cancel_build(self, build_uuid):
""" Writes etcd message to cancel build_uuid. """ """
Writes etcd message to cancel build_uuid.
"""
logger.info("Cancelling build %s".format(build_uuid)) logger.info("Cancelling build %s".format(build_uuid))
try: try:
self._etcd_client.write( self._etcd_client.write(

View File

@ -65,15 +65,19 @@ def async_observe(metric, *labels):
class ExecutorException(Exception): class ExecutorException(Exception):
""" Exception raised when there is a problem starting or stopping a builder. """ """
Exception raised when there is a problem starting or stopping a builder.
"""
pass pass
class BuilderExecutor(object): class BuilderExecutor(object):
def __init__(self, executor_config, manager_hostname): def __init__(self, executor_config, manager_hostname):
""" Interface which can be plugged into the EphemeralNodeManager to provide a strategy for """
starting and stopping builders. """ Interface which can be plugged into the EphemeralNodeManager to provide a strategy for
starting and stopping builders.
"""
self.executor_config = executor_config self.executor_config = executor_config
self.manager_hostname = manager_hostname self.manager_hostname = manager_hostname
@ -82,28 +86,40 @@ class BuilderExecutor(object):
@property @property
def name(self): def name(self):
""" Name returns the unique name for this executor. """ """
Name returns the unique name for this executor.
"""
return self.executor_config.get("NAME") or self.__class__.__name__ return self.executor_config.get("NAME") or self.__class__.__name__
@property @property
def setup_time(self): def setup_time(self):
""" Returns the amount of time (in seconds) to wait for the execution to start for the build. """
If None, the manager's default will be used. """ Returns the amount of time (in seconds) to wait for the execution to start for the build.
If None, the manager's default will be used.
"""
return self.executor_config.get("SETUP_TIME") return self.executor_config.get("SETUP_TIME")
@coroutine @coroutine
def start_builder(self, realm, token, build_uuid): def start_builder(self, realm, token, build_uuid):
""" Create a builder with the specified config. Returns a unique id which can be used to manage """
the builder. """ Create a builder with the specified config.
Returns a unique id which can be used to manage the builder.
"""
raise NotImplementedError raise NotImplementedError
@coroutine @coroutine
def stop_builder(self, builder_id): def stop_builder(self, builder_id):
""" Stop a builder which is currently running. """ """
Stop a builder which is currently running.
"""
raise NotImplementedError raise NotImplementedError
def allowed_for_namespace(self, namespace): def allowed_for_namespace(self, namespace):
""" Returns true if this executor can be used for builds in the given namespace. """ """
Returns true if this executor can be used for builds in the given namespace.
"""
# Check for an explicit namespace whitelist. # Check for an explicit namespace whitelist.
namespace_whitelist = self.executor_config.get("NAMESPACE_WHITELIST") namespace_whitelist = self.executor_config.get("NAMESPACE_WHITELIST")
@ -122,8 +138,9 @@ class BuilderExecutor(object):
@property @property
def minimum_retry_threshold(self): def minimum_retry_threshold(self):
""" Returns the minimum number of retries required for this executor to be used or 0 if """
none. """ Returns the minimum number of retries required for this executor to be used or 0 if none.
"""
return self.executor_config.get("MINIMUM_RETRY_THRESHOLD", 0) return self.executor_config.get("MINIMUM_RETRY_THRESHOLD", 0)
def generate_cloud_config( def generate_cloud_config(
@ -163,8 +180,10 @@ class BuilderExecutor(object):
class EC2Executor(BuilderExecutor): class EC2Executor(BuilderExecutor):
""" Implementation of BuilderExecutor which uses libcloud to start machines on a variety of cloud """
providers. """ Implementation of BuilderExecutor which uses libcloud to start machines on a variety of cloud
providers.
"""
COREOS_STACK_URL = ( COREOS_STACK_URL = (
"http://%s.release.core-os.net/amd64-usr/current/coreos_production_ami_hvm.txt" "http://%s.release.core-os.net/amd64-usr/current/coreos_production_ami_hvm.txt"
@ -175,7 +194,9 @@ class EC2Executor(BuilderExecutor):
super(EC2Executor, self).__init__(*args, **kwargs) super(EC2Executor, self).__init__(*args, **kwargs)
def _get_conn(self): def _get_conn(self):
""" Creates an ec2 connection which can be used to manage instances. """ """
Creates an ec2 connection which can be used to manage instances.
"""
return AsyncWrapper( return AsyncWrapper(
boto.ec2.connect_to_region( boto.ec2.connect_to_region(
self.executor_config["EC2_REGION"], self.executor_config["EC2_REGION"],
@ -187,7 +208,9 @@ class EC2Executor(BuilderExecutor):
@classmethod @classmethod
@cachetools.func.ttl_cache(ttl=ONE_HOUR) @cachetools.func.ttl_cache(ttl=ONE_HOUR)
def _get_coreos_ami(cls, ec2_region, coreos_channel): def _get_coreos_ami(cls, ec2_region, coreos_channel):
""" Retrieve the CoreOS AMI id from the canonical listing. """ """
Retrieve the CoreOS AMI id from the canonical listing.
"""
stack_list_string = requests.get(EC2Executor.COREOS_STACK_URL % coreos_channel).text stack_list_string = requests.get(EC2Executor.COREOS_STACK_URL % coreos_channel).text
stack_amis = dict([stack.split("=") for stack in stack_list_string.split("|")]) stack_amis = dict([stack.split("=") for stack in stack_list_string.split("|")])
return stack_amis[ec2_region] return stack_amis[ec2_region]
@ -303,7 +326,9 @@ class EC2Executor(BuilderExecutor):
class PopenExecutor(BuilderExecutor): class PopenExecutor(BuilderExecutor):
""" Implementation of BuilderExecutor which uses Popen to fork a quay-builder process. """ """
Implementation of BuilderExecutor which uses Popen to fork a quay-builder process.
"""
def __init__(self, executor_config, manager_hostname): def __init__(self, executor_config, manager_hostname):
self._jobs = {} self._jobs = {}
@ -354,8 +379,9 @@ class PopenExecutor(BuilderExecutor):
class KubernetesExecutor(BuilderExecutor): class KubernetesExecutor(BuilderExecutor):
""" Executes build jobs by creating Kubernetes jobs which run a qemu-kvm virtual """
machine in a pod """ Executes build jobs by creating Kubernetes jobs which run a qemu-kvm virtual machine in a pod.
"""
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(KubernetesExecutor, self).__init__(*args, **kwargs) super(KubernetesExecutor, self).__init__(*args, **kwargs)
@ -585,10 +611,14 @@ class KubernetesExecutor(BuilderExecutor):
class LogPipe(threading.Thread): class LogPipe(threading.Thread):
""" Adapted from http://codereview.stackexchange.com/a/17959 """ """
Adapted from http://codereview.stackexchange.com/a/17959.
"""
def __init__(self, level): def __init__(self, level):
""" Setup the object with a logger and a loglevel and start the thread """ """
Setup the object with a logger and a loglevel and start the thread.
"""
threading.Thread.__init__(self) threading.Thread.__init__(self)
self.daemon = False self.daemon = False
self.level = level self.level = level
@ -597,16 +627,22 @@ class LogPipe(threading.Thread):
self.start() self.start()
def fileno(self): def fileno(self):
""" Return the write file descriptor of the pipe """ """
Return the write file descriptor of the pipe.
"""
return self.fd_write return self.fd_write
def run(self): def run(self):
""" Run the thread, logging everything. """ """
Run the thread, logging everything.
"""
for line in iter(self.pipe_reader.readline, ""): for line in iter(self.pipe_reader.readline, ""):
logging.log(self.level, line.strip("\n")) logging.log(self.level, line.strip("\n"))
self.pipe_reader.close() self.pipe_reader.close()
def close(self): def close(self):
""" Close the write end of the pipe. """ """
Close the write end of the pipe.
"""
os.close(self.fd_write) os.close(self.fd_write)

View File

@ -1,9 +1,13 @@
class NoopCanceller(object): class NoopCanceller(object):
""" A class that can not cancel a build """ """
A class that can not cancel a build.
"""
def __init__(self, config=None): def __init__(self, config=None):
pass pass
def try_cancel_build(self, uuid): def try_cancel_build(self, uuid):
""" Does nothing and fails to cancel build. """ """
Does nothing and fails to cancel build.
"""
return False return False

View File

@ -11,7 +11,9 @@ CANCEL_PREFIX = "cancel/"
class OrchestratorCanceller(object): class OrchestratorCanceller(object):
""" An asynchronous way to cancel a build with any Orchestrator. """ """
An asynchronous way to cancel a build with any Orchestrator.
"""
def __init__(self, config): def __init__(self, config):
self._orchestrator = orchestrator_from_config(config, canceller_only=True) self._orchestrator = orchestrator_from_config(config, canceller_only=True)

View File

@ -46,8 +46,8 @@ REDIS_EXPIRED_KEYSPACE_REGEX = re.compile(REDIS_EXPIRED_KEYSPACE_PATTERN % (r"(\
def orchestrator_from_config(manager_config, canceller_only=False): def orchestrator_from_config(manager_config, canceller_only=False):
""" """
Allocates a new Orchestrator from the 'ORCHESTRATOR' block from provided manager config. Allocates a new Orchestrator from the 'ORCHESTRATOR' block from provided manager config. Checks
Checks for legacy configuration prefixed with 'ETCD_' when the 'ORCHESTRATOR' is not present. for legacy configuration prefixed with 'ETCD_' when the 'ORCHESTRATOR' is not present.
:param manager_config: the configuration for the orchestrator :param manager_config: the configuration for the orchestrator
:type manager_config: dict :type manager_config: dict
@ -130,8 +130,8 @@ class KeyChange(namedtuple("KeyChange", ["event", "key", "value"])):
@add_metaclass(ABCMeta) @add_metaclass(ABCMeta)
class Orchestrator(object): class Orchestrator(object):
""" """
Orchestrator is the interface that is used to synchronize the build states Orchestrator is the interface that is used to synchronize the build states across build
across build managers. managers.
This interface assumes that storage is being done by a key-value store This interface assumes that storage is being done by a key-value store
that supports watching for events on keys. that supports watching for events on keys.
@ -146,7 +146,6 @@ class Orchestrator(object):
@abstractmethod @abstractmethod
def on_key_change(self, key, callback, restarter=None): def on_key_change(self, key, callback, restarter=None):
""" """
The callback parameter takes in a KeyChange object as a parameter. The callback parameter takes in a KeyChange object as a parameter.
""" """
pass pass
@ -225,8 +224,8 @@ class Orchestrator(object):
def _sleep_orchestrator(): def _sleep_orchestrator():
""" """
This function blocks the trollius event loop by sleeping in order to backoff if a failure This function blocks the trollius event loop by sleeping in order to backoff if a failure such
such as a ConnectionError has occurred. as a ConnectionError has occurred.
""" """
logger.exception( logger.exception(
"Connecting to etcd failed; sleeping for %s and then trying again", "Connecting to etcd failed; sleeping for %s and then trying again",
@ -240,7 +239,9 @@ def _sleep_orchestrator():
class EtcdAction(object): class EtcdAction(object):
""" Enumeration of the various kinds of etcd actions we can observe via a watch. """ """
Enumeration of the various kinds of etcd actions we can observe via a watch.
"""
GET = "get" GET = "get"
SET = "set" SET = "set"
@ -288,6 +289,7 @@ class Etcd2Orchestrator(Orchestrator):
def _sanity_check_ttl(ttl): def _sanity_check_ttl(ttl):
""" """
A TTL of < 0 in etcd results in the key *never being expired*. A TTL of < 0 in etcd results in the key *never being expired*.
We use a max here to ensure that if the TTL is < 0, the key will expire immediately. We use a max here to ensure that if the TTL is < 0, the key will expire immediately.
""" """
return max(ttl, 0) return max(ttl, 0)
@ -684,6 +686,7 @@ class RedisOrchestrator(Orchestrator):
def _is_expired_keyspace_event(event_result): def _is_expired_keyspace_event(event_result):
""" """
Sanity check that this isn't an unrelated keyspace event. Sanity check that this isn't an unrelated keyspace event.
There could be a more efficient keyspace event config to avoid this client-side filter. There could be a more efficient keyspace event config to avoid this client-side filter.
""" """
if event_result is None: if event_result is None:

View File

@ -32,8 +32,10 @@ HEARTBEAT_PERIOD_SEC = 30
class BuilderServer(object): class BuilderServer(object):
""" Server which handles both HTTP and WAMP requests, managing the full state of the build """
controller. """ Server which handles both HTTP and WAMP requests, managing the full state of the build
controller.
"""
def __init__( def __init__(
self, self,
@ -131,8 +133,11 @@ class BuilderServer(object):
logger.debug("Shutting down server") logger.debug("Shutting down server")
def _register_component(self, realm, component_klass, **kwargs): def _register_component(self, realm, component_klass, **kwargs):
""" Registers a component with the server. The component_klass must derive from """
BaseComponent. """ Registers a component with the server.
The component_klass must derive from BaseComponent.
"""
logger.debug("Registering component with realm %s", realm) logger.debug("Registering component with realm %s", realm)
if realm in self._realm_map: if realm in self._realm_map:
logger.debug("Component with realm %s already registered", realm) logger.debug("Component with realm %s already registered", realm)

View File

@ -88,7 +88,9 @@ class EphemeralBuilderTestCase(unittest.TestCase):
class TestEphemeralLifecycle(EphemeralBuilderTestCase): class TestEphemeralLifecycle(EphemeralBuilderTestCase):
""" Tests the various lifecycles of the ephemeral builder and its interaction with etcd. """ """
Tests the various lifecycles of the ephemeral builder and its interaction with etcd.
"""
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(TestEphemeralLifecycle, self).__init__(*args, **kwargs) super(TestEphemeralLifecycle, self).__init__(*args, **kwargs)
@ -420,7 +422,8 @@ class TestEphemeralLifecycle(EphemeralBuilderTestCase):
class TestEphemeral(EphemeralBuilderTestCase): class TestEphemeral(EphemeralBuilderTestCase):
""" Simple unit tests for the ephemeral builder around config management, starting and stopping """
Simple unit tests for the ephemeral builder around config management, starting and stopping
jobs. jobs.
""" """

View File

@ -164,7 +164,9 @@ class BuildTriggerHandler(object):
@property @property
def auth_token(self): def auth_token(self):
""" Returns the auth token for the trigger. """ """
Returns the auth token for the trigger.
"""
# NOTE: This check is for testing. # NOTE: This check is for testing.
if hasattr(self.trigger, "auth_token"): if hasattr(self.trigger, "auth_token"):
return self.trigger.auth_token return self.trigger.auth_token
@ -185,39 +187,42 @@ class BuildTriggerHandler(object):
@abstractmethod @abstractmethod
def list_build_source_namespaces(self): def list_build_source_namespaces(self):
""" """
Take the auth information for the specific trigger type and load the Take the auth information for the specific trigger type and load the list of namespaces that
list of namespaces that can contain build sources. can contain build sources.
""" """
pass pass
@abstractmethod @abstractmethod
def list_build_sources_for_namespace(self, namespace): def list_build_sources_for_namespace(self, namespace):
""" """
Take the auth information for the specific trigger type and load the Take the auth information for the specific trigger type and load the list of repositories
list of repositories under the given namespace. under the given namespace.
""" """
pass pass
@abstractmethod @abstractmethod
def list_build_subdirs(self): def list_build_subdirs(self):
""" """
Take the auth information and the specified config so far and list all of Take the auth information and the specified config so far and list all of the possible
the possible subdirs containing dockerfiles. subdirs containing dockerfiles.
""" """
pass pass
@abstractmethod @abstractmethod
def handle_trigger_request(self, request): def handle_trigger_request(self, request):
""" """
Transform the incoming request data into a set of actions. Returns a PreparedBuild. Transform the incoming request data into a set of actions.
Returns a PreparedBuild.
""" """
pass pass
@abstractmethod @abstractmethod
def is_active(self): def is_active(self):
""" """
Returns True if the current build trigger is active. Inactive means further Returns True if the current build trigger is active.
setup is needed.
Inactive means further setup is needed.
""" """
pass pass
@ -225,6 +230,7 @@ class BuildTriggerHandler(object):
def activate(self, standard_webhook_url): def activate(self, standard_webhook_url):
""" """
Activates the trigger for the service, with the given new configuration. Activates the trigger for the service, with the given new configuration.
Returns new public and private config that should be stored if successful. Returns new public and private config that should be stored if successful.
""" """
pass pass
@ -232,36 +238,45 @@ class BuildTriggerHandler(object):
@abstractmethod @abstractmethod
def deactivate(self): def deactivate(self):
""" """
Deactivates the trigger for the service, removing any hooks installed in Deactivates the trigger for the service, removing any hooks installed in the remote service.
the remote service. Returns the new config that should be stored if this
trigger is going to be re-activated. Returns the new config that should be stored if this trigger is going to be re-activated.
""" """
pass pass
@abstractmethod @abstractmethod
def manual_start(self, run_parameters=None): def manual_start(self, run_parameters=None):
""" """
Manually creates a repository build for this trigger. Returns a PreparedBuild. Manually creates a repository build for this trigger.
Returns a PreparedBuild.
""" """
pass pass
@abstractmethod @abstractmethod
def list_field_values(self, field_name, limit=None): def list_field_values(self, field_name, limit=None):
""" """
Lists all values for the given custom trigger field. For example, a trigger might have a Lists all values for the given custom trigger field.
field named "branches", and this method would return all branches.
For example, a trigger might have a field named "branches", and this method would return all
branches.
""" """
pass pass
@abstractmethod @abstractmethod
def get_repository_url(self): def get_repository_url(self):
""" Returns the URL of the current trigger's repository. Note that this operation """
can be called in a loop, so it should be as fast as possible. """ Returns the URL of the current trigger's repository.
Note that this operation can be called in a loop, so it should be as fast as possible.
"""
pass pass
@classmethod @classmethod
def filename_is_dockerfile(cls, file_name): def filename_is_dockerfile(cls, file_name):
""" Returns whether the file is named Dockerfile or follows the convention <name>.Dockerfile""" """
Returns whether the file is named Dockerfile or follows the convention <name>.Dockerfile.
"""
return file_name.endswith(".Dockerfile") or u"Dockerfile" == file_name return file_name.endswith(".Dockerfile") or u"Dockerfile" == file_name
@classmethod @classmethod
@ -280,17 +295,22 @@ class BuildTriggerHandler(object):
raise InvalidServiceException("Unable to find service: %s" % trigger.service.name) raise InvalidServiceException("Unable to find service: %s" % trigger.service.name)
def put_config_key(self, key, value): def put_config_key(self, key, value):
""" Updates a config key in the trigger, saving it to the DB. """ """
Updates a config key in the trigger, saving it to the DB.
"""
self.config[key] = value self.config[key] = value
model.build.update_build_trigger(self.trigger, self.config) model.build.update_build_trigger(self.trigger, self.config)
def set_auth_token(self, auth_token): def set_auth_token(self, auth_token):
""" Sets the auth token for the trigger, saving it to the DB. """ """
Sets the auth token for the trigger, saving it to the DB.
"""
model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token) model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
def get_dockerfile_path(self): def get_dockerfile_path(self):
""" Returns the normalized path to the Dockerfile found in the subdirectory """
in the config. """ Returns the normalized path to the Dockerfile found in the subdirectory in the config.
"""
dockerfile_path = self.config.get("dockerfile_path") or "Dockerfile" dockerfile_path = self.config.get("dockerfile_path") or "Dockerfile"
if dockerfile_path[0] == "/": if dockerfile_path[0] == "/":
dockerfile_path = dockerfile_path[1:] dockerfile_path = dockerfile_path[1:]
@ -395,7 +415,9 @@ class BuildTriggerHandler(object):
@classmethod @classmethod
def get_parent_directory_mappings(cls, dockerfile_path, current_paths=None): def get_parent_directory_mappings(cls, dockerfile_path, current_paths=None):
""" Returns a map of dockerfile_paths to it's possible contexts. """ """
Returns a map of dockerfile_paths to it's possible contexts.
"""
if dockerfile_path == "": if dockerfile_path == "":
return {} return {}

View File

@ -128,8 +128,8 @@ BITBUCKET_COMMIT_INFO_SCHEMA = {
def get_transformed_commit_info(bb_commit, ref, default_branch, repository_name, lookup_author): def get_transformed_commit_info(bb_commit, ref, default_branch, repository_name, lookup_author):
""" Returns the BitBucket commit information transformed into our own """
payload format. Returns the BitBucket commit information transformed into our own payload format.
""" """
try: try:
validate(bb_commit, BITBUCKET_COMMIT_INFO_SCHEMA) validate(bb_commit, BITBUCKET_COMMIT_INFO_SCHEMA)
@ -165,8 +165,10 @@ def get_transformed_commit_info(bb_commit, ref, default_branch, repository_name,
def get_transformed_webhook_payload(bb_payload, default_branch=None): def get_transformed_webhook_payload(bb_payload, default_branch=None):
""" Returns the BitBucket webhook JSON payload transformed into our own payload """
format. If the bb_payload is not valid, returns None. Returns the BitBucket webhook JSON payload transformed into our own payload format.
If the bb_payload is not valid, returns None.
""" """
try: try:
validate(bb_payload, BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA) validate(bb_payload, BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA)
@ -217,7 +219,9 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
return "bitbucket" return "bitbucket"
def _get_client(self): def _get_client(self):
""" Returns a BitBucket API client for this trigger's config. """ """
Returns a BitBucket API client for this trigger's config.
"""
key = app.config.get("BITBUCKET_TRIGGER_CONFIG", {}).get("CONSUMER_KEY", "") key = app.config.get("BITBUCKET_TRIGGER_CONFIG", {}).get("CONSUMER_KEY", "")
secret = app.config.get("BITBUCKET_TRIGGER_CONFIG", {}).get("CONSUMER_SECRET", "") secret = app.config.get("BITBUCKET_TRIGGER_CONFIG", {}).get("CONSUMER_SECRET", "")
@ -227,7 +231,9 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
return BitBucket(key, secret, callback_url, timeout=15) return BitBucket(key, secret, callback_url, timeout=15)
def _get_authorized_client(self): def _get_authorized_client(self):
""" Returns an authorized API client. """ """
Returns an authorized API client.
"""
base_client = self._get_client() base_client = self._get_client()
auth_token = self.auth_token or "invalid:invalid" auth_token = self.auth_token or "invalid:invalid"
token_parts = auth_token.split(":") token_parts = auth_token.split(":")
@ -238,14 +244,18 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
return base_client.get_authorized_client(access_token, access_token_secret) return base_client.get_authorized_client(access_token, access_token_secret)
def _get_repository_client(self): def _get_repository_client(self):
""" Returns an API client for working with this config's BB repository. """ """
Returns an API client for working with this config's BB repository.
"""
source = self.config["build_source"] source = self.config["build_source"]
(namespace, name) = source.split("/") (namespace, name) = source.split("/")
bitbucket_client = self._get_authorized_client() bitbucket_client = self._get_authorized_client()
return bitbucket_client.for_namespace(namespace).repositories().get(name) return bitbucket_client.for_namespace(namespace).repositories().get(name)
def _get_default_branch(self, repository, default_value="master"): def _get_default_branch(self, repository, default_value="master"):
""" Returns the default branch for the repository or the value given. """ """
Returns the default branch for the repository or the value given.
"""
(result, data, _) = repository.get_main_branch() (result, data, _) = repository.get_main_branch()
if result: if result:
return data["name"] return data["name"]
@ -253,7 +263,9 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
return default_value return default_value
def get_oauth_url(self): def get_oauth_url(self):
""" Returns the OAuth URL to authorize Bitbucket. """ """
Returns the OAuth URL to authorize Bitbucket.
"""
bitbucket_client = self._get_client() bitbucket_client = self._get_client()
(result, data, err_msg) = bitbucket_client.get_authorization_url() (result, data, err_msg) = bitbucket_client.get_authorization_url()
if not result: if not result:
@ -262,7 +274,9 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
return data return data
def exchange_verifier(self, verifier): def exchange_verifier(self, verifier):
""" Exchanges the given verifier token to setup this trigger. """ """
Exchanges the given verifier token to setup this trigger.
"""
bitbucket_client = self._get_client() bitbucket_client = self._get_client()
access_token = self.config.get("access_token", "") access_token = self.config.get("access_token", "")
access_token_secret = self.auth_token access_token_secret = self.auth_token

View File

@ -78,8 +78,10 @@ GITHUB_WEBHOOK_PAYLOAD_SCHEMA = {
def get_transformed_webhook_payload(gh_payload, default_branch=None, lookup_user=None): def get_transformed_webhook_payload(gh_payload, default_branch=None, lookup_user=None):
""" Returns the GitHub webhook JSON payload transformed into our own payload """
format. If the gh_payload is not valid, returns None. Returns the GitHub webhook JSON payload transformed into our own payload format.
If the gh_payload is not valid, returns None.
""" """
try: try:
validate(gh_payload, GITHUB_WEBHOOK_PAYLOAD_SCHEMA) validate(gh_payload, GITHUB_WEBHOOK_PAYLOAD_SCHEMA)
@ -153,7 +155,9 @@ class GithubBuildTrigger(BuildTriggerHandler):
""" """
def _get_client(self): def _get_client(self):
""" Returns an authenticated client for talking to the GitHub API. """ """
Returns an authenticated client for talking to the GitHub API.
"""
return Github( return Github(
self.auth_token, self.auth_token,
base_url=github_trigger.api_endpoint(), base_url=github_trigger.api_endpoint(),

View File

@ -91,8 +91,8 @@ def _catch_timeouts_and_errors(func):
def _paginated_iterator(func, exc, **kwargs): def _paginated_iterator(func, exc, **kwargs):
""" Returns an iterator over invocations of the given function, automatically handling """
pagination. Returns an iterator over invocations of the given function, automatically handling pagination.
""" """
page = 1 page = 1
while True: while True:
@ -114,8 +114,10 @@ def _paginated_iterator(func, exc, **kwargs):
def get_transformed_webhook_payload( def get_transformed_webhook_payload(
gl_payload, default_branch=None, lookup_user=None, lookup_commit=None gl_payload, default_branch=None, lookup_user=None, lookup_commit=None
): ):
""" Returns the Gitlab webhook JSON payload transformed into our own payload """
format. If the gl_payload is not valid, returns None. Returns the Gitlab webhook JSON payload transformed into our own payload format.
If the gl_payload is not valid, returns None.
""" """
try: try:
validate(gl_payload, GITLAB_WEBHOOK_PAYLOAD_SCHEMA) validate(gl_payload, GITLAB_WEBHOOK_PAYLOAD_SCHEMA)

View File

@ -99,7 +99,9 @@ def should_skip_commit(metadata):
def raise_if_skipped_build(prepared_build, config): def raise_if_skipped_build(prepared_build, config):
""" Raises a SkipRequestException if the given build should be skipped. """ """
Raises a SkipRequestException if the given build should be skipped.
"""
# Check to ensure we have metadata. # Check to ensure we have metadata.
if not prepared_build.metadata: if not prepared_build.metadata:
logger.debug("Skipping request due to missing metadata for prepared build") logger.debug("Skipping request due to missing metadata for prepared build")

View File

@ -66,7 +66,7 @@ def write_config(filename, **kwargs):
def generate_nginx_config(config): def generate_nginx_config(config):
""" """
Generates nginx config from the app config Generates nginx config from the app config.
""" """
config = config or {} config = config or {}
use_https = os.path.exists(os.path.join(QUAYCONF_DIR, "stack/ssl.key")) use_https = os.path.exists(os.path.join(QUAYCONF_DIR, "stack/ssl.key"))
@ -89,7 +89,7 @@ def generate_nginx_config(config):
def generate_server_config(config): def generate_server_config(config):
""" """
Generates server config from the app config Generates server config from the app config.
""" """
config = config or {} config = config or {}
tuf_server = config.get("TUF_SERVER", None) tuf_server = config.get("TUF_SERVER", None)
@ -111,7 +111,7 @@ def generate_server_config(config):
def generate_rate_limiting_config(config): def generate_rate_limiting_config(config):
""" """
Generates rate limiting config from the app config Generates rate limiting config from the app config.
""" """
config = config or {} config = config or {}
non_rate_limited_namespaces = config.get("NON_RATE_LIMITED_NAMESPACES") or set() non_rate_limited_namespaces = config.get("NON_RATE_LIMITED_NAMESPACES") or set()
@ -126,7 +126,7 @@ def generate_rate_limiting_config(config):
def generate_hosted_http_base_config(config): def generate_hosted_http_base_config(config):
""" """
Generates hosted http base config from the app config Generates hosted http base config from the app config.
""" """
config = config or {} config = config or {}
feature_proxy_protocol = config.get("FEATURE_PROXY_PROTOCOL", False) feature_proxy_protocol = config.get("FEATURE_PROXY_PROTOCOL", False)

View File

@ -44,7 +44,9 @@ def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
def format_date(date): def format_date(date):
""" Output an RFC822 date format. """ """
Output an RFC822 date format.
"""
if date is None: if date is None:
return None return None
return formatdate(timegm(date.utctimetuple())) return formatdate(timegm(date.utctimetuple()))
@ -147,7 +149,9 @@ def validate_json_request(schema_name, optional=False):
def kubernetes_only(f): def kubernetes_only(f):
""" Aborts the request with a 400 if the app is not running on kubernetes """ """
Aborts the request with a 400 if the app is not running on kubernetes.
"""
@wraps(f) @wraps(f)
def abort_if_not_kube(*args, **kwargs): def abort_if_not_kube(*args, **kwargs):

View File

@ -20,7 +20,9 @@ logger = logging.getLogger(__name__)
@resource("/v1/kubernetes/deployments/") @resource("/v1/kubernetes/deployments/")
class SuperUserKubernetesDeployment(ApiResource): class SuperUserKubernetesDeployment(ApiResource):
""" Resource for the getting the status of Red Hat Quay deployments and cycling them """ """
Resource for the getting the status of Red Hat Quay deployments and cycling them.
"""
schemas = { schemas = {
"ValidateDeploymentNames": { "ValidateDeploymentNames": {
@ -65,7 +67,9 @@ class QEDeploymentRolloutStatus(ApiResource):
@resource("/v1/kubernetes/deployments/rollback") @resource("/v1/kubernetes/deployments/rollback")
class QEDeploymentRollback(ApiResource): class QEDeploymentRollback(ApiResource):
""" Resource for rolling back deployments """ """
Resource for rolling back deployments.
"""
schemas = { schemas = {
"ValidateDeploymentNames": { "ValidateDeploymentNames": {
@ -111,7 +115,9 @@ class QEDeploymentRollback(ApiResource):
@resource("/v1/kubernetes/config") @resource("/v1/kubernetes/config")
class SuperUserKubernetesConfiguration(ApiResource): class SuperUserKubernetesConfiguration(ApiResource):
""" Resource for saving the config files to kubernetes secrets. """ """
Resource for saving the config files to kubernetes secrets.
"""
@kubernetes_only @kubernetes_only
@nickname("scDeployConfiguration") @nickname("scDeployConfiguration")
@ -128,7 +134,9 @@ class SuperUserKubernetesConfiguration(ApiResource):
@resource("/v1/kubernetes/config/populate") @resource("/v1/kubernetes/config/populate")
class KubernetesConfigurationPopulator(ApiResource): class KubernetesConfigurationPopulator(ApiResource):
""" Resource for populating the local configuration from the cluster's kubernetes secrets. """ """
Resource for populating the local configuration from the cluster's kubernetes secrets.
"""
@kubernetes_only @kubernetes_only
@nickname("scKubePopulateConfig") @nickname("scKubePopulateConfig")

View File

@ -26,18 +26,24 @@ logger = logging.getLogger(__name__)
def database_is_valid(): def database_is_valid():
""" Returns whether the database, as configured, is valid. """ """
Returns whether the database, as configured, is valid.
"""
return model.is_valid() return model.is_valid()
def database_has_users(): def database_has_users():
""" Returns whether the database has any users defined. """ """
Returns whether the database has any users defined.
"""
return model.has_users() return model.has_users()
@resource("/v1/superuser/config") @resource("/v1/superuser/config")
class SuperUserConfig(ApiResource): class SuperUserConfig(ApiResource):
""" Resource for fetching and updating the current configuration, if any. """ """
Resource for fetching and updating the current configuration, if any.
"""
schemas = { schemas = {
"UpdateConfig": { "UpdateConfig": {
@ -50,14 +56,18 @@ class SuperUserConfig(ApiResource):
@nickname("scGetConfig") @nickname("scGetConfig")
def get(self): def get(self):
""" Returns the currently defined configuration, if any. """ """
Returns the currently defined configuration, if any.
"""
config_object = config_provider.get_config() config_object = config_provider.get_config()
return {"config": config_object} return {"config": config_object}
@nickname("scUpdateConfig") @nickname("scUpdateConfig")
@validate_json_request("UpdateConfig") @validate_json_request("UpdateConfig")
def put(self): def put(self):
""" Updates the config override file. """ """
Updates the config override file.
"""
# Note: This method is called to set the database configuration before super users exists, # Note: This method is called to set the database configuration before super users exists,
# so we also allow it to be called if there is no valid registry configuration setup. # so we also allow it to be called if there is no valid registry configuration setup.
config_object = request.get_json()["config"] config_object = request.get_json()["config"]
@ -78,13 +88,16 @@ class SuperUserConfig(ApiResource):
@resource("/v1/superuser/registrystatus") @resource("/v1/superuser/registrystatus")
class SuperUserRegistryStatus(ApiResource): class SuperUserRegistryStatus(ApiResource):
""" Resource for determining the status of the registry, such as if config exists, """
if a database is configured, and if it has any defined users. Resource for determining the status of the registry, such as if config exists, if a database is
configured, and if it has any defined users.
""" """
@nickname("scRegistryStatus") @nickname("scRegistryStatus")
def get(self): def get(self):
""" Returns the status of the registry. """ """
Returns the status of the registry.
"""
# If there is no config file, we need to setup the database. # If there is no config file, we need to setup the database.
if not config_provider.config_exists(): if not config_provider.config_exists():
return {"status": "config-db"} return {"status": "config-db"}
@ -118,11 +131,15 @@ def _reload_config():
@resource("/v1/superuser/setupdb") @resource("/v1/superuser/setupdb")
class SuperUserSetupDatabase(ApiResource): class SuperUserSetupDatabase(ApiResource):
""" Resource for invoking alembic to setup the database. """ """
Resource for invoking alembic to setup the database.
"""
@nickname("scSetupDatabase") @nickname("scSetupDatabase")
def get(self): def get(self):
""" Invokes the alembic upgrade process. """ """
Invokes the alembic upgrade process.
"""
# Note: This method is called after the database configured is saved, but before the # Note: This method is called after the database configured is saved, but before the
# database has any tables. Therefore, we only allow it to be run in that unique case. # database has any tables. Therefore, we only allow it to be run in that unique case.
if config_provider.config_exists() and not database_is_valid(): if config_provider.config_exists() and not database_is_valid():
@ -146,7 +163,9 @@ class SuperUserSetupDatabase(ApiResource):
@resource("/v1/superuser/config/createsuperuser") @resource("/v1/superuser/config/createsuperuser")
class SuperUserCreateInitialSuperUser(ApiResource): class SuperUserCreateInitialSuperUser(ApiResource):
""" Resource for creating the initial super user. """ """
Resource for creating the initial super user.
"""
schemas = { schemas = {
"CreateSuperUser": { "CreateSuperUser": {
@ -164,8 +183,10 @@ class SuperUserCreateInitialSuperUser(ApiResource):
@nickname("scCreateInitialSuperuser") @nickname("scCreateInitialSuperuser")
@validate_json_request("CreateSuperUser") @validate_json_request("CreateSuperUser")
def post(self): def post(self):
""" Creates the initial super user, updates the underlying configuration and """
sets the current session to have that super user. """ Creates the initial super user, updates the underlying configuration and sets the current
session to have that super user.
"""
_reload_config() _reload_config()
@ -199,7 +220,9 @@ class SuperUserCreateInitialSuperUser(ApiResource):
@resource("/v1/superuser/config/validate/<service>") @resource("/v1/superuser/config/validate/<service>")
class SuperUserConfigValidate(ApiResource): class SuperUserConfigValidate(ApiResource):
""" Resource for validating a block of configuration against an external service. """ """
Resource for validating a block of configuration against an external service.
"""
schemas = { schemas = {
"ValidateConfig": { "ValidateConfig": {
@ -219,7 +242,9 @@ class SuperUserConfigValidate(ApiResource):
@nickname("scValidateConfig") @nickname("scValidateConfig")
@validate_json_request("ValidateConfig") @validate_json_request("ValidateConfig")
def post(self, service): def post(self, service):
""" Validates the given config for the given service. """ """
Validates the given config for the given service.
"""
# Note: This method is called to validate the database configuration before super users exists, # Note: This method is called to validate the database configuration before super users exists,
# so we also allow it to be called if there is no valid registry configuration setup. Note that # so we also allow it to be called if there is no valid registry configuration setup. Note that
# this is also safe since this method does not access any information not given in the request. # this is also safe since this method does not access any information not given in the request.
@ -239,11 +264,15 @@ class SuperUserConfigValidate(ApiResource):
@resource("/v1/superuser/config/file/<filename>") @resource("/v1/superuser/config/file/<filename>")
class SuperUserConfigFile(ApiResource): class SuperUserConfigFile(ApiResource):
""" Resource for fetching the status of config files and overriding them. """ """
Resource for fetching the status of config files and overriding them.
"""
@nickname("scConfigFileExists") @nickname("scConfigFileExists")
def get(self, filename): def get(self, filename):
""" Returns whether the configuration file with the given name exists. """ """
Returns whether the configuration file with the given name exists.
"""
if not is_valid_config_upload_filename(filename): if not is_valid_config_upload_filename(filename):
abort(404) abort(404)
@ -251,7 +280,9 @@ class SuperUserConfigFile(ApiResource):
@nickname("scUpdateConfigFile") @nickname("scUpdateConfigFile")
def post(self, filename): def post(self, filename):
""" Updates the configuration file with the given name. """ """
Updates the configuration file with the given name.
"""
if not is_valid_config_upload_filename(filename): if not is_valid_config_upload_filename(filename):
abort(404) abort(404)

View File

@ -23,7 +23,9 @@ class SuperuserConfigDataInterface(object):
@abstractmethod @abstractmethod
def create_superuser(self, username, password, email): def create_superuser(self, username, password, email):
""" """
Creates a new superuser with the given username, password and email. Returns the user's UUID. Creates a new superuser with the given username, password and email.
Returns the user's UUID.
""" """
@abstractmethod @abstractmethod

View File

@ -32,7 +32,9 @@ logger = logging.getLogger(__name__)
@resource("/v1/superuser/customcerts/<certpath>") @resource("/v1/superuser/customcerts/<certpath>")
class SuperUserCustomCertificate(ApiResource): class SuperUserCustomCertificate(ApiResource):
""" Resource for managing a custom certificate. """ """
Resource for managing a custom certificate.
"""
@nickname("uploadCustomCertificate") @nickname("uploadCustomCertificate")
def post(self, certpath): def post(self, certpath):
@ -85,7 +87,9 @@ class SuperUserCustomCertificate(ApiResource):
@resource("/v1/superuser/customcerts") @resource("/v1/superuser/customcerts")
class SuperUserCustomCertificates(ApiResource): class SuperUserCustomCertificates(ApiResource):
""" Resource for managing custom certificates. """ """
Resource for managing custom certificates.
"""
@nickname("getCustomCertificates") @nickname("getCustomCertificates")
def get(self): def get(self):
@ -128,7 +132,9 @@ class SuperUserCustomCertificates(ApiResource):
@resource("/v1/superuser/keys") @resource("/v1/superuser/keys")
class SuperUserServiceKeyManagement(ApiResource): class SuperUserServiceKeyManagement(ApiResource):
""" Resource for managing service keys.""" """
Resource for managing service keys.
"""
schemas = { schemas = {
"CreateServiceKey": { "CreateServiceKey": {
@ -221,7 +227,9 @@ class SuperUserServiceKeyManagement(ApiResource):
@resource("/v1/superuser/approvedkeys/<kid>") @resource("/v1/superuser/approvedkeys/<kid>")
class SuperUserServiceKeyApproval(ApiResource): class SuperUserServiceKeyApproval(ApiResource):
""" Resource for approving service keys. """ """
Resource for approving service keys.
"""
schemas = { schemas = {
"ApproveServiceKey": { "ApproveServiceKey": {

View File

@ -37,7 +37,8 @@ class RepositoryBuild(
) )
): ):
""" """
RepositoryBuild represents a build associated with a repostiory RepositoryBuild represents a build associated with a repostiory.
:type uuid: string :type uuid: string
:type logs_archived: boolean :type logs_archived: boolean
:type repository_namespace_user_username: string :type repository_namespace_user_username: string
@ -94,7 +95,8 @@ class RepositoryBuild(
class Approval(namedtuple("Approval", ["approver", "approval_type", "approved_date", "notes"])): class Approval(namedtuple("Approval", ["approver", "approval_type", "approved_date", "notes"])):
""" """
Approval represents whether a key has been approved or not Approval represents whether a key has been approved or not.
:type approver: User :type approver: User
:type approval_type: string :type approval_type: string
:type approved_date: Date :type approved_date: Date
@ -127,7 +129,8 @@ class ServiceKey(
) )
): ):
""" """
ServiceKey is an apostille signing key ServiceKey is an apostille signing key.
:type name: string :type name: string
:type kid: int :type kid: int
:type service: string :type service: string
@ -137,7 +140,6 @@ class ServiceKey(
:type expiration_date: Date :type expiration_date: Date
:type rotation_duration: Date :type rotation_duration: Date
:type approval: Approval :type approval: Approval
""" """
def to_dict(self): def to_dict(self):
@ -157,6 +159,7 @@ class ServiceKey(
class User(namedtuple("User", ["username", "email", "verified", "enabled", "robot"])): class User(namedtuple("User", ["username", "email", "verified", "enabled", "robot"])):
""" """
User represents a single user. User represents a single user.
:type username: string :type username: string
:type email: string :type email: string
:type verified: boolean :type verified: boolean
@ -180,6 +183,7 @@ class User(namedtuple("User", ["username", "email", "verified", "enabled", "robo
class Organization(namedtuple("Organization", ["username", "email"])): class Organization(namedtuple("Organization", ["username", "email"])):
""" """
Organization represents a single org. Organization represents a single org.
:type username: string :type username: string
:type email: string :type email: string
""" """
@ -200,5 +204,5 @@ class SuperuserDataInterface(object):
@abstractmethod @abstractmethod
def list_all_service_keys(self): def list_all_service_keys(self):
""" """
Returns a list of service keys Returns a list of service keys.
""" """

View File

@ -47,8 +47,8 @@ class ServiceKeyAlreadyApproved(Exception):
class PreOCIModel(SuperuserDataInterface): class PreOCIModel(SuperuserDataInterface):
""" """
PreOCIModel implements the data model for the SuperUser using a database schema PreOCIModel implements the data model for the SuperUser using a database schema before it was
before it was changed to support the OCI specification. changed to support the OCI specification.
""" """
def list_all_service_keys(self): def list_all_service_keys(self):

View File

@ -19,7 +19,7 @@ from config_app.config_util.tar import (
@resource("/v1/configapp/initialization") @resource("/v1/configapp/initialization")
class ConfigInitialization(ApiResource): class ConfigInitialization(ApiResource):
""" """
Resource for dealing with any initialization logic for the config app Resource for dealing with any initialization logic for the config app.
""" """
@nickname("scStartNewConfig") @nickname("scStartNewConfig")
@ -31,8 +31,8 @@ class ConfigInitialization(ApiResource):
@resource("/v1/configapp/tarconfig") @resource("/v1/configapp/tarconfig")
class TarConfigLoader(ApiResource): class TarConfigLoader(ApiResource):
""" """
Resource for dealing with configuration as a tarball, Resource for dealing with configuration as a tarball, including loading and generating
including loading and generating functions functions.
""" """
@nickname("scGetConfigTarball") @nickname("scGetConfigTarball")
@ -50,7 +50,9 @@ class TarConfigLoader(ApiResource):
@nickname("scUploadTarballConfig") @nickname("scUploadTarballConfig")
def put(self): def put(self):
""" Loads tarball config into the config provider """ """
Loads tarball config into the config provider.
"""
# Generate a new empty dir to load the config into # Generate a new empty dir to load the config into
config_provider.new_config_dir() config_provider.new_config_dir()
input_stream = request.stream input_stream = request.stream

View File

@ -5,10 +5,14 @@ from config_app.config_endpoints.api.superuser_models_interface import user_view
@resource("/v1/user/") @resource("/v1/user/")
class User(ApiResource): class User(ApiResource):
""" Operations related to users. """ """
Operations related to users.
"""
@nickname("getLoggedInUser") @nickname("getLoggedInUser")
def get(self): def get(self):
""" Get user information for the authenticated user. """ """
Get user information for the authenticated user.
"""
user = get_authenticated_user() user = get_authenticated_user()
return user_view(user) return user_view(user)

View File

@ -30,7 +30,9 @@ TYPE_CONVERTER = {
def _list_files(path, extension, contains=""): def _list_files(path, extension, contains=""):
""" Returns a list of all the files with the given extension found under the given path. """ """
Returns a list of all the files with the given extension found under the given path.
"""
def matches(f): def matches(f):
return os.path.splitext(f)[1] == "." + extension and contains in os.path.splitext(f)[0] return os.path.splitext(f)[1] == "." + extension and contains in os.path.splitext(f)[0]
@ -47,7 +49,9 @@ FONT_AWESOME_4 = "netdna.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.cs
def render_page_template(name, route_data=None, js_bundle_name=DEFAULT_JS_BUNDLE_NAME, **kwargs): def render_page_template(name, route_data=None, js_bundle_name=DEFAULT_JS_BUNDLE_NAME, **kwargs):
""" Renders the page template with the given name as the response and returns its contents. """ """
Renders the page template with the given name as the response and returns its contents.
"""
main_scripts = _list_files("build", "js", js_bundle_name) main_scripts = _list_files("build", "js", js_bundle_name)
use_cdn = os.getenv("TESTING") == "true" use_cdn = os.getenv("TESTING") == "true"

View File

@ -9,9 +9,9 @@ OLD_CONFIG_SUBDIR = "old/"
class TransientDirectoryProvider(FileConfigProvider): class TransientDirectoryProvider(FileConfigProvider):
""" Implementation of the config provider that reads and writes the data """
from/to the file system, only using temporary directories, Implementation of the config provider that reads and writes the data from/to the file system,
deleting old dirs and creating new ones as requested. only using temporary directories, deleting old dirs and creating new ones as requested.
""" """
def __init__(self, config_volume, yaml_filename, py_filename): def __init__(self, config_volume, yaml_filename, py_filename):
@ -29,7 +29,7 @@ class TransientDirectoryProvider(FileConfigProvider):
def new_config_dir(self): def new_config_dir(self):
""" """
Update the path with a new temporary directory, deleting the old one in the process Update the path with a new temporary directory, deleting the old one in the process.
""" """
self.temp_dir.cleanup() self.temp_dir.cleanup()
temp_dir = TemporaryDirectory() temp_dir = TemporaryDirectory()

View File

@ -8,7 +8,9 @@ from util.config.validator import EXTRA_CA_DIRECTORY, EXTRA_CA_DIRECTORY_PREFIX
def get_config_provider(config_volume, yaml_filename, py_filename, testing=False): def get_config_provider(config_volume, yaml_filename, py_filename, testing=False):
""" Loads and returns the config provider for the current environment. """ """
Loads and returns the config provider for the current environment.
"""
if testing: if testing:
return TestConfigProvider() return TestConfigProvider()

View File

@ -12,7 +12,9 @@ logger = logging.getLogger(__name__)
class BaseFileProvider(BaseProvider): class BaseFileProvider(BaseProvider):
""" Base implementation of the config provider that reads the data from the file system. """ """
Base implementation of the config provider that reads the data from the file system.
"""
def __init__(self, config_volume, yaml_filename, py_filename): def __init__(self, config_volume, yaml_filename, py_filename):
self.config_volume = config_volume self.config_volume = config_volume

View File

@ -12,13 +12,17 @@ logger = logging.getLogger(__name__)
class CannotWriteConfigException(Exception): class CannotWriteConfigException(Exception):
""" Exception raised when the config cannot be written. """ """
Exception raised when the config cannot be written.
"""
pass pass
class SetupIncompleteException(Exception): class SetupIncompleteException(Exception):
""" Exception raised when attempting to verify config that has not yet been setup. """ """
Exception raised when attempting to verify config that has not yet been setup.
"""
pass pass
@ -63,7 +67,8 @@ def export_yaml(config_obj, config_file):
@add_metaclass(ABCMeta) @add_metaclass(ABCMeta)
class BaseProvider(object): class BaseProvider(object):
""" A configuration provider helps to load, save, and handle config override in the application. """
A configuration provider helps to load, save, and handle config override in the application.
""" """
@property @property
@ -72,59 +77,84 @@ class BaseProvider(object):
@abstractmethod @abstractmethod
def update_app_config(self, app_config): def update_app_config(self, app_config):
""" Updates the given application config object with the loaded override config. """ """
Updates the given application config object with the loaded override config.
"""
@abstractmethod @abstractmethod
def get_config(self): def get_config(self):
""" Returns the contents of the config override file, or None if none. """ """
Returns the contents of the config override file, or None if none.
"""
@abstractmethod @abstractmethod
def save_config(self, config_object): def save_config(self, config_object):
""" Updates the contents of the config override file to those given. """ """
Updates the contents of the config override file to those given.
"""
@abstractmethod @abstractmethod
def config_exists(self): def config_exists(self):
""" Returns true if a config override file exists in the config volume. """ """
Returns true if a config override file exists in the config volume.
"""
@abstractmethod @abstractmethod
def volume_exists(self): def volume_exists(self):
""" Returns whether the config override volume exists. """ """
Returns whether the config override volume exists.
"""
@abstractmethod @abstractmethod
def volume_file_exists(self, filename): def volume_file_exists(self, filename):
""" Returns whether the file with the given name exists under the config override volume. """ """
Returns whether the file with the given name exists under the config override volume.
"""
@abstractmethod @abstractmethod
def get_volume_file(self, filename, mode="r"): def get_volume_file(self, filename, mode="r"):
""" Returns a Python file referring to the given name under the config override volume. """ """
Returns a Python file referring to the given name under the config override volume.
"""
@abstractmethod @abstractmethod
def write_volume_file(self, filename, contents): def write_volume_file(self, filename, contents):
""" Writes the given contents to the config override volumne, with the given filename. """ """
Writes the given contents to the config override volumne, with the given filename.
"""
@abstractmethod @abstractmethod
def remove_volume_file(self, filename): def remove_volume_file(self, filename):
""" Removes the config override volume file with the given filename. """ """
Removes the config override volume file with the given filename.
"""
@abstractmethod @abstractmethod
def list_volume_directory(self, path): def list_volume_directory(self, path):
""" Returns a list of strings representing the names of the files found in the config override """
directory under the given path. If the path doesn't exist, returns None. Returns a list of strings representing the names of the files found in the config override
directory under the given path.
If the path doesn't exist, returns None.
""" """
@abstractmethod @abstractmethod
def save_volume_file(self, filename, flask_file): def save_volume_file(self, filename, flask_file):
""" Saves the given flask file to the config override volume, with the given """
filename. Saves the given flask file to the config override volume, with the given filename.
""" """
@abstractmethod @abstractmethod
def requires_restart(self, app_config): def requires_restart(self, app_config):
""" If true, the configuration loaded into memory for the app does not match that on disk, """
If true, the configuration loaded into memory for the app does not match that on disk,
indicating that this container requires a restart. indicating that this container requires a restart.
""" """
@abstractmethod @abstractmethod
def get_volume_path(self, directory, filename): def get_volume_path(self, directory, filename):
""" Helper for constructing file paths, which may differ between providers. For example, """
kubernetes can't have subfolders in configmaps """ Helper for constructing file paths, which may differ between providers.
For example, kubernetes can't have subfolders in configmaps
"""

View File

@ -8,7 +8,9 @@ logger = logging.getLogger(__name__)
def _ensure_parent_dir(filepath): def _ensure_parent_dir(filepath):
""" Ensures that the parent directory of the given file path exists. """ """
Ensures that the parent directory of the given file path exists.
"""
try: try:
parentpath = os.path.abspath(os.path.join(filepath, os.pardir)) parentpath = os.path.abspath(os.path.join(filepath, os.pardir))
if not os.path.isdir(parentpath): if not os.path.isdir(parentpath):
@ -18,8 +20,9 @@ def _ensure_parent_dir(filepath):
class FileConfigProvider(BaseFileProvider): class FileConfigProvider(BaseFileProvider):
""" Implementation of the config provider that reads and writes the data """
from/to the file system. """ Implementation of the config provider that reads and writes the data from/to the file system.
"""
def __init__(self, config_volume, yaml_filename, py_filename): def __init__(self, config_volume, yaml_filename, py_filename):
super(FileConfigProvider, self).__init__(config_volume, yaml_filename, py_filename) super(FileConfigProvider, self).__init__(config_volume, yaml_filename, py_filename)

View File

@ -8,8 +8,11 @@ REAL_FILES = ["test/data/signing-private.gpg", "test/data/signing-public.gpg", "
class TestConfigProvider(BaseProvider): class TestConfigProvider(BaseProvider):
""" Implementation of the config provider for testing. Everything is kept in-memory instead on """
the real file system. """ Implementation of the config provider for testing.
Everything is kept in-memory instead on the real file system.
"""
def __init__(self): def __init__(self):
self.clear() self.clear()

View File

@ -28,7 +28,8 @@ class K8sApiException(Exception):
def _deployment_rollout_status_message(deployment, deployment_name): def _deployment_rollout_status_message(deployment, deployment_name):
""" """
Gets the friendly human readable message of the current state of the deployment rollout Gets the friendly human readable message of the current state of the deployment rollout.
:param deployment: python dict matching: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.11/#deployment-v1-apps :param deployment: python dict matching: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.11/#deployment-v1-apps
:param deployment_name: string :param deployment_name: string
:return: DeploymentRolloutStatus :return: DeploymentRolloutStatus
@ -88,7 +89,9 @@ def _deployment_rollout_status_message(deployment, deployment_name):
class KubernetesAccessorSingleton(object): class KubernetesAccessorSingleton(object):
""" Singleton allowing access to kubernetes operations """ """
Singleton allowing access to kubernetes operations.
"""
_instance = None _instance = None
@ -104,6 +107,7 @@ class KubernetesAccessorSingleton(object):
""" """
Singleton getter implementation, returns the instance if one exists, otherwise creates the Singleton getter implementation, returns the instance if one exists, otherwise creates the
instance and ties it to the class. instance and ties it to the class.
:return: KubernetesAccessorSingleton :return: KubernetesAccessorSingleton
""" """
if cls._instance is None: if cls._instance is None:
@ -114,6 +118,7 @@ class KubernetesAccessorSingleton(object):
def save_secret_to_directory(self, dir_path): def save_secret_to_directory(self, dir_path):
""" """
Saves all files in the kubernetes secret to a local directory. Saves all files in the kubernetes secret to a local directory.
Assumes the directory is empty. Assumes the directory is empty.
""" """
secret = self._lookup_secret() secret = self._lookup_secret()
@ -143,7 +148,7 @@ class KubernetesAccessorSingleton(object):
def replace_qe_secret(self, new_secret_data): def replace_qe_secret(self, new_secret_data):
""" """
Removes the old config and replaces it with the new_secret_data as one action Removes the old config and replaces it with the new_secret_data as one action.
""" """
# Check first that the namespace for Red Hat Quay exists. If it does not, report that # Check first that the namespace for Red Hat Quay exists. If it does not, report that
# as an error, as it seems to be a common issue. # as an error, as it seems to be a common issue.
@ -183,8 +188,9 @@ class KubernetesAccessorSingleton(object):
self._assert_success(self._execute_k8s_api("PUT", secret_url, secret)) self._assert_success(self._execute_k8s_api("PUT", secret_url, secret))
def get_deployment_rollout_status(self, deployment_name): def get_deployment_rollout_status(self, deployment_name):
"""" """
Returns the status of a rollout of a given deployment " Returns the status of a rollout of a given deployment.
:return _DeploymentRolloutStatus :return _DeploymentRolloutStatus
""" """
deployment_selector_url = "namespaces/%s/deployments/%s" % ( deployment_selector_url = "namespaces/%s/deployments/%s" % (
@ -203,8 +209,8 @@ class KubernetesAccessorSingleton(object):
return _deployment_rollout_status_message(deployment, deployment_name) return _deployment_rollout_status_message(deployment, deployment_name)
def get_qe_deployments(self): def get_qe_deployments(self):
"""" """
Returns all deployments matching the label selector provided in the KubeConfig " Returns all deployments matching the label selector provided in the KubeConfig.
""" """
deployment_selector_url = "namespaces/%s/deployments?labelSelector=%s%%3D%s" % ( deployment_selector_url = "namespaces/%s/deployments?labelSelector=%s%%3D%s" % (
self.kube_config.qe_namespace, self.kube_config.qe_namespace,
@ -220,8 +226,8 @@ class KubernetesAccessorSingleton(object):
return json.loads(response.text) return json.loads(response.text)
def cycle_qe_deployments(self, deployment_names): def cycle_qe_deployments(self, deployment_names):
"""" """
Triggers a rollout of all desired deployments in the qe namespace " Triggers a rollout of all desired deployments in the qe namespace.
""" """
for name in deployment_names: for name in deployment_names:

View File

@ -5,6 +5,7 @@ from config_app._init_config import CONF_DIR
def logfile_path(jsonfmt=False, debug=False): def logfile_path(jsonfmt=False, debug=False):
""" """
Returns the a logfileconf path following this rules: Returns the a logfileconf path following this rules:
- conf/logging_debug_json.conf # jsonfmt=true, debug=true - conf/logging_debug_json.conf # jsonfmt=true, debug=true
- conf/logging_json.conf # jsonfmt=true, debug=false - conf/logging_json.conf # jsonfmt=true, debug=false
- conf/logging_debug.conf # jsonfmt=false, debug=true - conf/logging_debug.conf # jsonfmt=false, debug=true

View File

@ -4,20 +4,25 @@ import OpenSSL
class CertInvalidException(Exception): class CertInvalidException(Exception):
""" Exception raised when a certificate could not be parsed/loaded. """ """
Exception raised when a certificate could not be parsed/loaded.
"""
pass pass
class KeyInvalidException(Exception): class KeyInvalidException(Exception):
""" Exception raised when a key could not be parsed/loaded or successfully applied to a cert. """ """
Exception raised when a key could not be parsed/loaded or successfully applied to a cert.
"""
pass pass
def load_certificate(cert_contents): def load_certificate(cert_contents):
""" Loads the certificate from the given contents and returns it or raises a CertInvalidException """
on failure. Loads the certificate from the given contents and returns it or raises a CertInvalidException on
failure.
""" """
try: try:
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_contents) cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_contents)
@ -30,13 +35,17 @@ _SUBJECT_ALT_NAME = "subjectAltName"
class SSLCertificate(object): class SSLCertificate(object):
""" Helper class for easier working with SSL certificates. """ """
Helper class for easier working with SSL certificates.
"""
def __init__(self, openssl_cert): def __init__(self, openssl_cert):
self.openssl_cert = openssl_cert self.openssl_cert = openssl_cert
def validate_private_key(self, private_key_path): def validate_private_key(self, private_key_path):
""" Validates that the private key found at the given file path applies to this certificate. """
Validates that the private key found at the given file path applies to this certificate.
Raises a KeyInvalidException on failure. Raises a KeyInvalidException on failure.
""" """
context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD) context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
@ -49,7 +58,9 @@ class SSLCertificate(object):
raise KeyInvalidException(ex.message[0][2]) raise KeyInvalidException(ex.message[0][2])
def matches_name(self, check_name): def matches_name(self, check_name):
""" Returns true if this SSL certificate matches the given DNS hostname. """ """
Returns true if this SSL certificate matches the given DNS hostname.
"""
for dns_name in self.names: for dns_name in self.names:
if fnmatch(check_name, dns_name): if fnmatch(check_name, dns_name):
return True return True
@ -58,17 +69,25 @@ class SSLCertificate(object):
@property @property
def expired(self): def expired(self):
""" Returns whether the SSL certificate has expired. """ """
Returns whether the SSL certificate has expired.
"""
return self.openssl_cert.has_expired() return self.openssl_cert.has_expired()
@property @property
def common_name(self): def common_name(self):
""" Returns the defined common name for the certificate, if any. """ """
Returns the defined common name for the certificate, if any.
"""
return self.openssl_cert.get_subject().commonName return self.openssl_cert.get_subject().commonName
@property @property
def names(self): def names(self):
""" Returns all the DNS named to which the certificate applies. May be empty. """ """
Returns all the DNS named to which the certificate applies.
May be empty.
"""
dns_names = set() dns_names = set()
common_name = self.common_name common_name = self.common_name
if common_name is not None: if common_name is not None:

View File

@ -3,7 +3,7 @@ from util.config.validator import EXTRA_CA_DIRECTORY
def strip_absolute_path_and_add_trailing_dir(path): def strip_absolute_path_and_add_trailing_dir(path):
""" """
Removes the initial trailing / from the prefix path, and add the last dir one Removes the initial trailing / from the prefix path, and add the last dir one.
""" """
return path[1:] + "/" return path[1:] + "/"

View File

@ -14,13 +14,17 @@ def _ensure_sha256_header(digest):
def get_blob(digest, models_ref): def get_blob(digest, models_ref):
""" Find a blob by its digest. """ """
Find a blob by its digest.
"""
Blob = models_ref.Blob Blob = models_ref.Blob
return Blob.select().where(Blob.digest == _ensure_sha256_header(digest)).get() return Blob.select().where(Blob.digest == _ensure_sha256_header(digest)).get()
def get_or_create_blob(digest, size, media_type_name, locations, models_ref): def get_or_create_blob(digest, size, media_type_name, locations, models_ref):
""" Try to find a blob by its digest or create it. """ """
Try to find a blob by its digest or create it.
"""
Blob = models_ref.Blob Blob = models_ref.Blob
BlobPlacement = models_ref.BlobPlacement BlobPlacement = models_ref.BlobPlacement
@ -48,7 +52,9 @@ def get_or_create_blob(digest, size, media_type_name, locations, models_ref):
def get_blob_locations(digest, models_ref): def get_blob_locations(digest, models_ref):
""" Find all locations names for a blob. """ """
Find all locations names for a blob.
"""
Blob = models_ref.Blob Blob = models_ref.Blob
BlobPlacement = models_ref.BlobPlacement BlobPlacement = models_ref.BlobPlacement
BlobPlacementLocation = models_ref.BlobPlacementLocation BlobPlacementLocation = models_ref.BlobPlacementLocation

View File

@ -2,7 +2,9 @@ from data.appr_model import tag as tag_model
def get_channel_releases(repo, channel, models_ref): def get_channel_releases(repo, channel, models_ref):
""" Return all previously linked tags. """
Return all previously linked tags.
This works based upon Tag lifetimes. This works based upon Tag lifetimes.
""" """
Channel = models_ref.Channel Channel = models_ref.Channel
@ -24,13 +26,17 @@ def get_channel_releases(repo, channel, models_ref):
def get_channel(repo, channel_name, models_ref): def get_channel(repo, channel_name, models_ref):
""" Find a Channel by name. """ """
Find a Channel by name.
"""
channel = tag_model.get_tag(repo, channel_name, models_ref, "channel") channel = tag_model.get_tag(repo, channel_name, models_ref, "channel")
return channel return channel
def get_tag_channels(repo, tag_name, models_ref, active=True): def get_tag_channels(repo, tag_name, models_ref, active=True):
""" Find the Channels associated with a Tag. """ """
Find the Channels associated with a Tag.
"""
Tag = models_ref.Tag Tag = models_ref.Tag
tag = tag_model.get_tag(repo, tag_name, models_ref, "release") tag = tag_model.get_tag(repo, tag_name, models_ref, "release")
@ -43,12 +49,16 @@ def get_tag_channels(repo, tag_name, models_ref, active=True):
def delete_channel(repo, channel_name, models_ref): def delete_channel(repo, channel_name, models_ref):
""" Delete a channel by name. """ """
Delete a channel by name.
"""
return tag_model.delete_tag(repo, channel_name, models_ref, "channel") return tag_model.delete_tag(repo, channel_name, models_ref, "channel")
def create_or_update_channel(repo, channel_name, tag_name, models_ref): def create_or_update_channel(repo, channel_name, tag_name, models_ref):
""" Creates or updates a channel to include a particular tag. """ """
Creates or updates a channel to include a particular tag.
"""
tag = tag_model.get_tag(repo, tag_name, models_ref, "release") tag = tag_model.get_tag(repo, tag_name, models_ref, "release")
return tag_model.create_or_update_tag( return tag_model.create_or_update_tag(
repo, channel_name, models_ref, linked_tag=tag, tag_kind="channel" repo, channel_name, models_ref, linked_tag=tag, tag_kind="channel"
@ -56,7 +66,9 @@ def create_or_update_channel(repo, channel_name, tag_name, models_ref):
def get_repo_channels(repo, models_ref): def get_repo_channels(repo, models_ref):
""" Creates or updates a channel to include a particular tag. """ """
Creates or updates a channel to include a particular tag.
"""
Channel = models_ref.Channel Channel = models_ref.Channel
Tag = models_ref.Tag Tag = models_ref.Tag

View File

@ -53,7 +53,9 @@ def get_or_create_manifest(manifest_json, media_type_name, models_ref):
def get_manifest_types(repo, models_ref, release=None): def get_manifest_types(repo, models_ref, release=None):
""" Returns an array of MediaTypes.name for a repo, can filter by tag """ """
Returns an array of MediaTypes.name for a repo, can filter by tag.
"""
Tag = models_ref.Tag Tag = models_ref.Tag
ManifestListManifest = models_ref.ManifestListManifest ManifestListManifest = models_ref.ManifestListManifest

View File

@ -45,8 +45,10 @@ def get_or_create_manifest_list(manifest_list_json, media_type_name, schema_vers
def create_manifestlistmanifest(manifestlist, manifest_ids, manifest_list_json, models_ref): def create_manifestlistmanifest(manifestlist, manifest_ids, manifest_list_json, models_ref):
""" From a manifestlist, manifests, and the manifest list blob, """
create if doesn't exist the manfiestlistmanifest for each manifest """ From a manifestlist, manifests, and the manifest list blob, create if doesn't exist the
manfiestlistmanifest for each manifest.
"""
for pos in xrange(len(manifest_ids)): for pos in xrange(len(manifest_ids)):
manifest_id = manifest_ids[pos] manifest_id = manifest_ids[pos]
manifest_json = manifest_list_json[pos] manifest_json = manifest_list_json[pos]

View File

@ -10,7 +10,9 @@ from data.appr_model import tag as tag_model
def list_packages_query( def list_packages_query(
models_ref, namespace=None, media_type=None, search_query=None, username=None, limit=50, models_ref, namespace=None, media_type=None, search_query=None, username=None, limit=50,
): ):
""" List and filter repository by search query. """ """
List and filter repository by search query.
"""
Tag = models_ref.Tag Tag = models_ref.Tag
if username and not search_query: if username and not search_query:

View File

@ -23,7 +23,9 @@ def _ensure_sha256_header(digest):
def get_app_release(repo, tag_name, media_type, models_ref): def get_app_release(repo, tag_name, media_type, models_ref):
""" Returns (tag, manifest, blob) given a repo object, tag_name, and media_type). """ """
Returns (tag, manifest, blob) given a repo object, tag_name, and media_type).
"""
ManifestListManifest = models_ref.ManifestListManifest ManifestListManifest = models_ref.ManifestListManifest
Manifest = models_ref.Manifest Manifest = models_ref.Manifest
Blob = models_ref.Blob Blob = models_ref.Blob
@ -88,11 +90,12 @@ def delete_app_release(repo, tag_name, media_type, models_ref):
def create_app_release(repo, tag_name, manifest_data, digest, models_ref, force=False): def create_app_release(repo, tag_name, manifest_data, digest, models_ref, force=False):
""" Create a new application release, it includes creating a new Tag, ManifestList, """
Create a new application release, it includes creating a new Tag, ManifestList,
ManifestListManifests, Manifest, ManifestBlob. ManifestListManifests, Manifest, ManifestBlob.
To deduplicate the ManifestList, the manifestlist_json is kept ordered by the manifest.id. To deduplicate the ManifestList, the manifestlist_json is kept ordered by the manifest.id. To
To find the insert point in the ManifestList it uses bisect on the manifest-ids list. find the insert point in the ManifestList it uses bisect on the manifest-ids list.
""" """
ManifestList = models_ref.ManifestList ManifestList = models_ref.ManifestList
ManifestListManifest = models_ref.ManifestListManifest ManifestListManifest = models_ref.ManifestListManifest
@ -160,7 +163,9 @@ def create_app_release(repo, tag_name, manifest_data, digest, models_ref, force=
def get_release_objs(repo, models_ref, media_type=None): def get_release_objs(repo, models_ref, media_type=None):
""" Returns an array of Tag for a repo, with optional filtering by media_type. """ """
Returns an array of Tag for a repo, with optional filtering by media_type.
"""
Tag = models_ref.Tag Tag = models_ref.Tag
release_query = Tag.select().where( release_query = Tag.select().where(
@ -173,5 +178,7 @@ def get_release_objs(repo, models_ref, media_type=None):
def get_releases(repo, model_refs, media_type=None): def get_releases(repo, model_refs, media_type=None):
""" Returns an array of Tag.name for a repo, can filter by media_type. """ """
Returns an array of Tag.name for a repo, can filter by media_type.
"""
return [t.name for t in get_release_objs(repo, model_refs, media_type)] return [t.name for t in get_release_objs(repo, model_refs, media_type)]

View File

@ -114,7 +114,9 @@ def tag_exists(repo, tag_name, models_ref, tag_kind="release"):
def filter_tags_by_media_type(tag_query, media_type, models_ref): def filter_tags_by_media_type(tag_query, media_type, models_ref):
""" Return only available tag for a media_type. """ """
Return only available tag for a media_type.
"""
ManifestListManifest = models_ref.ManifestListManifest ManifestListManifest = models_ref.ManifestListManifest
Tag = models_ref.Tag Tag = models_ref.Tag
media_type = manifest_media_type(media_type) media_type = manifest_media_type(media_type)

View File

@ -325,7 +325,9 @@ PLANS = [
def get_plan(plan_id): def get_plan(plan_id):
""" Returns the plan with the given ID or None if none. """ """
Returns the plan with the given ID or None if none.
"""
for plan in PLANS: for plan in PLANS:
if plan["stripeId"] == plan_id: if plan["stripeId"] == plan_id:
return plan return plan

View File

@ -44,8 +44,8 @@ class RedisBuildLogs(object):
def append_log_entry(self, build_id, log_obj): def append_log_entry(self, build_id, log_obj):
""" """
Appends the serialized form of log_obj to the end of the log entry list Appends the serialized form of log_obj to the end of the log entry list and returns the new
and returns the new length of the list. length of the list.
""" """
pipeline = self._redis.pipeline(transaction=False) pipeline = self._redis.pipeline(transaction=False)
pipeline.expire(self._logs_key(build_id), SEVEN_DAYS) pipeline.expire(self._logs_key(build_id), SEVEN_DAYS)
@ -55,8 +55,8 @@ class RedisBuildLogs(object):
def append_log_message(self, build_id, log_message, log_type=None, log_data=None): def append_log_message(self, build_id, log_message, log_type=None, log_data=None):
""" """
Wraps the message in an envelope and push it to the end of the log entry Wraps the message in an envelope and push it to the end of the log entry list and returns
list and returns the index at which it was inserted. the index at which it was inserted.
""" """
log_obj = {"message": log_message} log_obj = {"message": log_message}
@ -70,8 +70,8 @@ class RedisBuildLogs(object):
def get_log_entries(self, build_id, start_index): def get_log_entries(self, build_id, start_index):
""" """
Returns a tuple of the current length of the list and an iterable of the Returns a tuple of the current length of the list and an iterable of the requested log
requested log entries. entries.
""" """
try: try:
llen = self._redis.llen(self._logs_key(build_id)) llen = self._redis.llen(self._logs_key(build_id))
@ -94,7 +94,7 @@ class RedisBuildLogs(object):
def delete_log_entries(self, build_id): def delete_log_entries(self, build_id):
""" """
Delete the log entry Delete the log entry.
""" """
self._redis.delete(self._logs_key(build_id)) self._redis.delete(self._logs_key(build_id))
@ -104,8 +104,7 @@ class RedisBuildLogs(object):
def set_status(self, build_id, status_obj): def set_status(self, build_id, status_obj):
""" """
Sets the status key for this build to json serialized form of the supplied Sets the status key for this build to json serialized form of the supplied obj.
obj.
""" """
self._redis.set(self._status_key(build_id), json.dumps(status_obj), ex=SEVEN_DAYS) self._redis.set(self._status_key(build_id), json.dumps(status_obj), ex=SEVEN_DAYS)

View File

@ -2,7 +2,9 @@ from data.cache.impl import NoopDataModelCache, InMemoryDataModelCache, Memcache
def get_model_cache(config): def get_model_cache(config):
""" Returns a data model cache matching the given configuration. """ """
Returns a data model cache matching the given configuration.
"""
cache_config = config.get("DATA_MODEL_CACHE_CONFIG", {}) cache_config = config.get("DATA_MODEL_CACHE_CONFIG", {})
engine = cache_config.get("engine", "noop") engine = cache_config.get("engine", "noop")

View File

@ -2,29 +2,39 @@ from collections import namedtuple
class CacheKey(namedtuple("CacheKey", ["key", "expiration"])): class CacheKey(namedtuple("CacheKey", ["key", "expiration"])):
""" Defines a key into the data model cache. """ """
Defines a key into the data model cache.
"""
pass pass
def for_repository_blob(namespace_name, repo_name, digest, version): def for_repository_blob(namespace_name, repo_name, digest, version):
""" Returns a cache key for a blob in a repository. """ """
Returns a cache key for a blob in a repository.
"""
return CacheKey("repo_blob__%s_%s_%s_%s" % (namespace_name, repo_name, digest, version), "60s") return CacheKey("repo_blob__%s_%s_%s_%s" % (namespace_name, repo_name, digest, version), "60s")
def for_catalog_page(auth_context_key, start_id, limit): def for_catalog_page(auth_context_key, start_id, limit):
""" Returns a cache key for a single page of a catalog lookup for an authed context. """ """
Returns a cache key for a single page of a catalog lookup for an authed context.
"""
params = (auth_context_key or "(anon)", start_id or 0, limit or 0) params = (auth_context_key or "(anon)", start_id or 0, limit or 0)
return CacheKey("catalog_page__%s_%s_%s" % params, "60s") return CacheKey("catalog_page__%s_%s_%s" % params, "60s")
def for_namespace_geo_restrictions(namespace_name): def for_namespace_geo_restrictions(namespace_name):
""" Returns a cache key for the geo restrictions for a namespace. """ """
Returns a cache key for the geo restrictions for a namespace.
"""
return CacheKey("geo_restrictions__%s" % (namespace_name), "240s") return CacheKey("geo_restrictions__%s" % (namespace_name), "240s")
def for_active_repo_tags(repository_id, start_pagination_id, limit): def for_active_repo_tags(repository_id, start_pagination_id, limit):
""" Returns a cache key for the active tags in a repository. """ """
Returns a cache key for the active tags in a repository.
"""
return CacheKey( return CacheKey(
"repo_active_tags__%s_%s_%s" % (repository_id, start_pagination_id, limit), "120s" "repo_active_tags__%s_%s_%s" % (repository_id, start_pagination_id, limit), "120s"
) )

22
data/cache/impl.py vendored
View File

@ -20,25 +20,33 @@ def is_not_none(value):
@add_metaclass(ABCMeta) @add_metaclass(ABCMeta)
class DataModelCache(object): class DataModelCache(object):
""" Defines an interface for cache storing and returning tuple data model objects. """ """
Defines an interface for cache storing and returning tuple data model objects.
"""
@abstractmethod @abstractmethod
def retrieve(self, cache_key, loader, should_cache=is_not_none): def retrieve(self, cache_key, loader, should_cache=is_not_none):
""" Checks the cache for the specified cache key and returns the value found (if any). If none """
found, the loader is called to get a result and populate the cache. Checks the cache for the specified cache key and returns the value found (if any).
If none found, the loader is called to get a result and populate the cache.
""" """
pass pass
class NoopDataModelCache(DataModelCache): class NoopDataModelCache(DataModelCache):
""" Implementation of the data model cache which does nothing. """ """
Implementation of the data model cache which does nothing.
"""
def retrieve(self, cache_key, loader, should_cache=is_not_none): def retrieve(self, cache_key, loader, should_cache=is_not_none):
return loader() return loader()
class InMemoryDataModelCache(DataModelCache): class InMemoryDataModelCache(DataModelCache):
""" Implementation of the data model cache backed by an in-memory dictionary. """ """
Implementation of the data model cache backed by an in-memory dictionary.
"""
def __init__(self): def __init__(self):
self.cache = ExpiresDict() self.cache = ExpiresDict()
@ -83,7 +91,9 @@ _JSON_TYPE = 2
class MemcachedModelCache(DataModelCache): class MemcachedModelCache(DataModelCache):
""" Implementation of the data model cache backed by a memcached. """ """
Implementation of the data model cache backed by a memcached.
"""
def __init__( def __init__(
self, self,

View File

@ -95,7 +95,9 @@ _EXTRA_ARGS = {
def pipes_concat(arg1, arg2, *extra_args): def pipes_concat(arg1, arg2, *extra_args):
""" Concat function for sqlite, since it doesn't support fn.Concat. """
Concat function for sqlite, since it doesn't support fn.Concat.
Concatenates clauses with || characters. Concatenates clauses with || characters.
""" """
reduced = arg1.concat(arg2) reduced = arg1.concat(arg2)
@ -105,8 +107,10 @@ def pipes_concat(arg1, arg2, *extra_args):
def function_concat(arg1, arg2, *extra_args): def function_concat(arg1, arg2, *extra_args):
""" Default implementation of concat which uses fn.Concat(). Used by all """
database engines except sqlite. Default implementation of concat which uses fn.Concat().
Used by all database engines except sqlite.
""" """
return fn.Concat(arg1, arg2, *extra_args) return fn.Concat(arg1, arg2, *extra_args)
@ -125,7 +129,8 @@ def null_for_update(query):
def delete_instance_filtered(instance, model_class, delete_nullable, skip_transitive_deletes): def delete_instance_filtered(instance, model_class, delete_nullable, skip_transitive_deletes):
""" Deletes the DB instance recursively, skipping any models in the skip_transitive_deletes set. """
Deletes the DB instance recursively, skipping any models in the skip_transitive_deletes set.
Callers *must* ensure that any models listed in the skip_transitive_deletes must be capable Callers *must* ensure that any models listed in the skip_transitive_deletes must be capable
of being directly deleted when the instance is deleted (with automatic sorting handling of being directly deleted when the instance is deleted (with automatic sorting handling
@ -206,7 +211,8 @@ class RetryOperationalError(object):
class CloseForLongOperation(object): class CloseForLongOperation(object):
""" Helper object which disconnects the database then reconnects after the nested operation """
Helper object which disconnects the database then reconnects after the nested operation
completes. completes.
""" """
@ -225,7 +231,9 @@ class CloseForLongOperation(object):
class UseThenDisconnect(object): class UseThenDisconnect(object):
""" Helper object for conducting work with a database and then tearing it down. """ """
Helper object for conducting work with a database and then tearing it down.
"""
def __init__(self, config_object): def __init__(self, config_object):
self.config_object = config_object self.config_object = config_object
@ -241,8 +249,9 @@ class UseThenDisconnect(object):
class TupleSelector(object): class TupleSelector(object):
""" Helper class for selecting tuples from a peewee query and easily accessing """
them as if they were objects. Helper class for selecting tuples from a peewee query and easily accessing them as if they were
objects.
""" """
class _TupleWrapper(object): class _TupleWrapper(object):
@ -255,7 +264,9 @@ class TupleSelector(object):
@classmethod @classmethod
def tuple_reference_key(cls, field): def tuple_reference_key(cls, field):
""" Returns a string key for referencing a field in a TupleSelector. """ """
Returns a string key for referencing a field in a TupleSelector.
"""
if isinstance(field, Function): if isinstance(field, Function):
return field.name + ",".join([cls.tuple_reference_key(arg) for arg in field.arguments]) return field.name + ",".join([cls.tuple_reference_key(arg) for arg in field.arguments])
@ -288,8 +299,11 @@ ensure_under_transaction = CallableProxy()
def validate_database_url(url, db_kwargs, connect_timeout=5): def validate_database_url(url, db_kwargs, connect_timeout=5):
""" Validates that we can connect to the given database URL, with the given kwargs. Raises """
an exception if the validation fails. """ Validates that we can connect to the given database URL, with the given kwargs.
Raises an exception if the validation fails.
"""
db_kwargs = db_kwargs.copy() db_kwargs = db_kwargs.copy()
try: try:
@ -305,8 +319,11 @@ def validate_database_url(url, db_kwargs, connect_timeout=5):
def validate_database_precondition(url, db_kwargs, connect_timeout=5): def validate_database_precondition(url, db_kwargs, connect_timeout=5):
""" Validates that we can connect to the given database URL and the database meets our """
precondition. Raises an exception if the validation fails. """ Validates that we can connect to the given database URL and the database meets our precondition.
Raises an exception if the validation fails.
"""
db_kwargs = db_kwargs.copy() db_kwargs = db_kwargs.copy()
try: try:
driver = _db_from_url( driver = _db_from_url(
@ -473,19 +490,23 @@ def _get_enum_field_values(enum_field):
class EnumField(ForeignKeyField): class EnumField(ForeignKeyField):
""" Create a cached python Enum from an EnumTable """ """
Create a cached python Enum from an EnumTable.
"""
def __init__(self, model, enum_key_field="name", *args, **kwargs): def __init__(self, model, enum_key_field="name", *args, **kwargs):
""" """
model is the EnumTable model-class (see ForeignKeyField) model is the EnumTable model-class (see ForeignKeyField) enum_key_field is the field from
enum_key_field is the field from the EnumTable to use as the enum name the EnumTable to use as the enum name.
""" """
self.enum_key_field = enum_key_field self.enum_key_field = enum_key_field
super(EnumField, self).__init__(model, *args, **kwargs) super(EnumField, self).__init__(model, *args, **kwargs)
@property @property
def enum(self): def enum(self):
""" Returns a python enun.Enum generated from the associated EnumTable """ """
Returns a python enun.Enum generated from the associated EnumTable.
"""
return _get_enum_field_values(self) return _get_enum_field_values(self)
def get_id(self, name): def get_id(self, name):
@ -512,9 +533,11 @@ class EnumField(ForeignKeyField):
def deprecated_field(field, flag): def deprecated_field(field, flag):
""" Marks a field as deprecated and removes it from the peewee model if the """
flag is not set. A flag is defined in the active_migration module and will Marks a field as deprecated and removes it from the peewee model if the flag is not set.
be associated with one or more migration phases.
A flag is defined in the active_migration module and will be associated with one or more
migration phases.
""" """
if ActiveDataMigration.has_flag(flag): if ActiveDataMigration.has_flag(flag):
return field return field
@ -529,8 +552,9 @@ class BaseModel(ReadReplicaSupportedModel):
read_only_config = read_only_config read_only_config = read_only_config
def __getattribute__(self, name): def __getattribute__(self, name):
""" Adds _id accessors so that foreign key field IDs can be looked up without making """
a database roundtrip. Adds _id accessors so that foreign key field IDs can be looked up without making a database
roundtrip.
""" """
if name.endswith("_id"): if name.endswith("_id"):
field_name = name[0 : len(name) - 3] field_name = name[0 : len(name) - 3]
@ -763,6 +787,7 @@ class RepositoryKind(BaseModel):
class RepositoryState(IntEnum): class RepositoryState(IntEnum):
""" """
Possible states of a repository. Possible states of a repository.
NORMAL: Regular repo where all actions are possible NORMAL: Regular repo where all actions are possible
READ_ONLY: Only read actions, such as pull, are allowed regardless of specific user permissions READ_ONLY: Only read actions, such as pull, are allowed regardless of specific user permissions
MIRROR: Equivalent to READ_ONLY except that mirror robot has write permission MIRROR: Equivalent to READ_ONLY except that mirror robot has write permission
@ -1038,8 +1063,8 @@ class Image(BaseModel):
) )
def ancestor_id_list(self): def ancestor_id_list(self):
""" Returns an integer list of ancestor ids, ordered chronologically from """
root to direct parent. Returns an integer list of ancestor ids, ordered chronologically from root to direct parent.
""" """
return map(int, self.ancestors.split("/")[1:-1]) return map(int, self.ancestors.split("/")[1:-1])
@ -1078,7 +1103,9 @@ class RepositoryTag(BaseModel):
class BUILD_PHASE(object): class BUILD_PHASE(object):
""" Build phases enum """ """
Build phases enum.
"""
ERROR = "error" ERROR = "error"
INTERNAL_ERROR = "internalerror" INTERNAL_ERROR = "internalerror"
@ -1102,7 +1129,9 @@ class BUILD_PHASE(object):
class TRIGGER_DISABLE_REASON(object): class TRIGGER_DISABLE_REASON(object):
""" Build trigger disable reason enum """ """
Build trigger disable reason enum.
"""
BUILD_FALURES = "successive_build_failures" BUILD_FALURES = "successive_build_failures"
INTERNAL_ERRORS = "successive_build_internal_errors" INTERNAL_ERRORS = "successive_build_internal_errors"
@ -1195,7 +1224,11 @@ class LogEntry(BaseModel):
class LogEntry2(BaseModel): class LogEntry2(BaseModel):
""" TEMP FOR QUAY.IO ONLY. DO NOT RELEASE INTO QUAY ENTERPRISE. """ """
TEMP FOR QUAY.IO ONLY.
DO NOT RELEASE INTO QUAY ENTERPRISE.
"""
kind = ForeignKeyField(LogEntryKind) kind = ForeignKeyField(LogEntryKind)
account = IntegerField(index=True, db_column="account_id") account = IntegerField(index=True, db_column="account_id")
@ -1423,7 +1456,8 @@ class ServiceKey(BaseModel):
class MediaType(BaseModel): class MediaType(BaseModel):
""" MediaType is an enumeration of the possible formats of various objects in the data model. """
MediaType is an enumeration of the possible formats of various objects in the data model.
""" """
name = CharField(index=True, unique=True) name = CharField(index=True, unique=True)
@ -1437,7 +1471,8 @@ class Messages(BaseModel):
class LabelSourceType(BaseModel): class LabelSourceType(BaseModel):
""" LabelSourceType is an enumeration of the possible sources for a label. """
LabelSourceType is an enumeration of the possible sources for a label.
""" """
name = CharField(index=True, unique=True) name = CharField(index=True, unique=True)
@ -1445,7 +1480,8 @@ class LabelSourceType(BaseModel):
class Label(BaseModel): class Label(BaseModel):
""" Label represents user-facing metadata associated with another entry in the database (e.g. a """
Label represents user-facing metadata associated with another entry in the database (e.g. a
Manifest). Manifest).
""" """
@ -1457,7 +1493,8 @@ class Label(BaseModel):
class ApprBlob(BaseModel): class ApprBlob(BaseModel):
""" ApprBlob represents a content-addressable object stored outside of the database. """
ApprBlob represents a content-addressable object stored outside of the database.
""" """
digest = CharField(index=True, unique=True) digest = CharField(index=True, unique=True)
@ -1467,14 +1504,16 @@ class ApprBlob(BaseModel):
class ApprBlobPlacementLocation(BaseModel): class ApprBlobPlacementLocation(BaseModel):
""" ApprBlobPlacementLocation is an enumeration of the possible storage locations for ApprBlobs. """
ApprBlobPlacementLocation is an enumeration of the possible storage locations for ApprBlobs.
""" """
name = CharField(index=True, unique=True) name = CharField(index=True, unique=True)
class ApprBlobPlacement(BaseModel): class ApprBlobPlacement(BaseModel):
""" ApprBlobPlacement represents the location of a Blob. """
ApprBlobPlacement represents the location of a Blob.
""" """
blob = ForeignKeyField(ApprBlob) blob = ForeignKeyField(ApprBlob)
@ -1487,7 +1526,8 @@ class ApprBlobPlacement(BaseModel):
class ApprManifest(BaseModel): class ApprManifest(BaseModel):
""" ApprManifest represents the metadata and collection of blobs that comprise an Appr image. """
ApprManifest represents the metadata and collection of blobs that comprise an Appr image.
""" """
digest = CharField(index=True, unique=True) digest = CharField(index=True, unique=True)
@ -1496,7 +1536,8 @@ class ApprManifest(BaseModel):
class ApprManifestBlob(BaseModel): class ApprManifestBlob(BaseModel):
""" ApprManifestBlob is a many-to-many relation table linking ApprManifests and ApprBlobs. """
ApprManifestBlob is a many-to-many relation table linking ApprManifests and ApprBlobs.
""" """
manifest = ForeignKeyField(ApprManifest, index=True) manifest = ForeignKeyField(ApprManifest, index=True)
@ -1509,7 +1550,8 @@ class ApprManifestBlob(BaseModel):
class ApprManifestList(BaseModel): class ApprManifestList(BaseModel):
""" ApprManifestList represents all of the various Appr manifests that compose an ApprTag. """
ApprManifestList represents all of the various Appr manifests that compose an ApprTag.
""" """
digest = CharField(index=True, unique=True) digest = CharField(index=True, unique=True)
@ -1519,14 +1561,16 @@ class ApprManifestList(BaseModel):
class ApprTagKind(BaseModel): class ApprTagKind(BaseModel):
""" ApprTagKind is a enumtable to reference tag kinds. """
ApprTagKind is a enumtable to reference tag kinds.
""" """
name = CharField(index=True, unique=True) name = CharField(index=True, unique=True)
class ApprTag(BaseModel): class ApprTag(BaseModel):
""" ApprTag represents a user-facing alias for referencing an ApprManifestList. """
ApprTag represents a user-facing alias for referencing an ApprManifestList.
""" """
name = CharField() name = CharField()
@ -1555,7 +1599,8 @@ ApprChannel = ApprTag.alias()
class ApprManifestListManifest(BaseModel): class ApprManifestListManifest(BaseModel):
""" ApprManifestListManifest is a many-to-many relation table linking ApprManifestLists and """
ApprManifestListManifest is a many-to-many relation table linking ApprManifestLists and
ApprManifests. ApprManifests.
""" """
@ -1573,8 +1618,9 @@ class ApprManifestListManifest(BaseModel):
class AppSpecificAuthToken(BaseModel): class AppSpecificAuthToken(BaseModel):
""" AppSpecificAuthToken represents a token generated by a user for use with an external """
application where putting the user's credentials, even encrypted, is deemed too risky. AppSpecificAuthToken represents a token generated by a user for use with an external application
where putting the user's credentials, even encrypted, is deemed too risky.
""" """
user = QuayUserField() user = QuayUserField()
@ -1594,8 +1640,10 @@ class AppSpecificAuthToken(BaseModel):
class Manifest(BaseModel): class Manifest(BaseModel):
""" Manifest represents a single manifest under a repository. Within a repository, """
there can only be one manifest with the same digest. Manifest represents a single manifest under a repository.
Within a repository, there can only be one manifest with the same digest.
""" """
repository = ForeignKeyField(Repository) repository = ForeignKeyField(Repository)
@ -1613,14 +1661,16 @@ class Manifest(BaseModel):
class TagKind(BaseModel): class TagKind(BaseModel):
""" TagKind describes the various kinds of tags that can be found in the registry. """
TagKind describes the various kinds of tags that can be found in the registry.
""" """
name = CharField(index=True, unique=True) name = CharField(index=True, unique=True)
class Tag(BaseModel): class Tag(BaseModel):
""" Tag represents a user-facing alias for referencing a Manifest or as an alias to another tag. """
Tag represents a user-facing alias for referencing a Manifest or as an alias to another tag.
""" """
name = CharField() name = CharField()
@ -1648,9 +1698,11 @@ class Tag(BaseModel):
class ManifestChild(BaseModel): class ManifestChild(BaseModel):
""" ManifestChild represents a relationship between a manifest and its child manifest(s). """
Multiple manifests can share the same children. Note that since Manifests are stored ManifestChild represents a relationship between a manifest and its child manifest(s).
per-repository, the repository here is a bit redundant, but we do so to make cleanup easier.
Multiple manifests can share the same children. Note that since Manifests are stored per-
repository, the repository here is a bit redundant, but we do so to make cleanup easier.
""" """
repository = ForeignKeyField(Repository) repository = ForeignKeyField(Repository)
@ -1669,9 +1721,11 @@ class ManifestChild(BaseModel):
class ManifestLabel(BaseModel): class ManifestLabel(BaseModel):
""" ManifestLabel represents a label applied to a Manifest, within a repository. """
Note that since Manifests are stored per-repository, the repository here is ManifestLabel represents a label applied to a Manifest, within a repository.
a bit redundant, but we do so to make cleanup easier.
Note that since Manifests are stored per-repository, the repository here is a bit redundant, but
we do so to make cleanup easier.
""" """
repository = ForeignKeyField(Repository, index=True) repository = ForeignKeyField(Repository, index=True)
@ -1685,7 +1739,9 @@ class ManifestLabel(BaseModel):
class ManifestBlob(BaseModel): class ManifestBlob(BaseModel):
""" ManifestBlob represents a blob that is used by a manifest. """ """
ManifestBlob represents a blob that is used by a manifest.
"""
repository = ForeignKeyField(Repository, index=True) repository = ForeignKeyField(Repository, index=True)
manifest = ForeignKeyField(Manifest) manifest = ForeignKeyField(Manifest)
@ -1698,8 +1754,9 @@ class ManifestBlob(BaseModel):
class ManifestLegacyImage(BaseModel): class ManifestLegacyImage(BaseModel):
""" For V1-compatible manifests only, this table maps from the manifest to its associated """
Docker image. For V1-compatible manifests only, this table maps from the manifest to its associated Docker
image.
""" """
repository = ForeignKeyField(Repository, index=True) repository = ForeignKeyField(Repository, index=True)
@ -1708,7 +1765,9 @@ class ManifestLegacyImage(BaseModel):
class TagManifest(BaseModel): class TagManifest(BaseModel):
""" TO BE DEPRECATED: The manifest for a tag. """ """
TO BE DEPRECATED: The manifest for a tag.
"""
tag = ForeignKeyField(RepositoryTag, unique=True) tag = ForeignKeyField(RepositoryTag, unique=True)
digest = CharField(index=True) digest = CharField(index=True)
@ -1724,7 +1783,8 @@ class TagManifestToManifest(BaseModel):
class TagManifestLabel(BaseModel): class TagManifestLabel(BaseModel):
""" TO BE DEPRECATED: Mapping from a tag manifest to a label. """
TO BE DEPRECATED: Mapping from a tag manifest to a label.
""" """
repository = ForeignKeyField(Repository, index=True) repository = ForeignKeyField(Repository, index=True)
@ -1763,6 +1823,7 @@ class TagToRepositoryTag(BaseModel):
class RepoMirrorRuleType(IntEnum): class RepoMirrorRuleType(IntEnum):
""" """
Types of mirroring rules. Types of mirroring rules.
TAG_GLOB_CSV: Comma separated glob values (eg. "7.6,7.6-1.*") TAG_GLOB_CSV: Comma separated glob values (eg. "7.6,7.6-1.*")
""" """
@ -1810,8 +1871,8 @@ class RepoMirrorStatus(IntEnum):
class RepoMirrorConfig(BaseModel): class RepoMirrorConfig(BaseModel):
""" """
Represents a repository to be mirrored and any additional configuration Represents a repository to be mirrored and any additional configuration required to perform the
required to perform the mirroring. mirroring.
""" """
repository = ForeignKeyField(Repository, index=True, unique=True, backref="mirror") repository = ForeignKeyField(Repository, index=True, unique=True, backref="mirror")

View File

@ -9,7 +9,9 @@ from util.security.secret import convert_secret_key
class DecryptionFailureException(Exception): class DecryptionFailureException(Exception):
""" Exception raised if a field could not be decrypted. """ """
Exception raised if a field could not be decrypted.
"""
EncryptionVersion = namedtuple("EncryptionVersion", ["prefix", "encrypt", "decrypt"]) EncryptionVersion = namedtuple("EncryptionVersion", ["prefix", "encrypt", "decrypt"])
@ -56,8 +58,9 @@ _RESERVED_FIELD_SPACE = len(_SEPARATOR) + max([len(k) for k in _VERSIONS.keys()]
class FieldEncrypter(object): class FieldEncrypter(object):
""" Helper object for defining how fields are encrypted and decrypted between the database """
and the application. Helper object for defining how fields are encrypted and decrypted between the database and the
application.
""" """
def __init__(self, secret_key, version="v0"): def __init__(self, secret_key, version="v0"):
@ -68,7 +71,9 @@ class FieldEncrypter(object):
self._encryption_version = _VERSIONS[version] self._encryption_version = _VERSIONS[version]
def encrypt_value(self, value, field_max_length=None): def encrypt_value(self, value, field_max_length=None):
""" Encrypts the value using the current version of encryption. """ """
Encrypts the value using the current version of encryption.
"""
assert self._secret_key is not None assert self._secret_key is not None
encrypted_value = self._encryption_version.encrypt( encrypted_value = self._encryption_version.encrypt(
self._secret_key, value, field_max_length self._secret_key, value, field_max_length
@ -76,8 +81,10 @@ class FieldEncrypter(object):
return "%s%s%s" % (self._encryption_version.prefix, _SEPARATOR, encrypted_value) return "%s%s%s" % (self._encryption_version.prefix, _SEPARATOR, encrypted_value)
def decrypt_value(self, value): def decrypt_value(self, value):
""" Decrypts the value, returning it. If the value cannot be decrypted """
raises a DecryptionFailureException. Decrypts the value, returning it.
If the value cannot be decrypted raises a DecryptionFailureException.
""" """
assert self._secret_key is not None assert self._secret_key is not None
if _SEPARATOR not in value: if _SEPARATOR not in value:

View File

@ -78,7 +78,9 @@ class Base64BinaryField(TextField):
class DecryptedValue(object): class DecryptedValue(object):
""" Wrapper around an already decrypted value to be placed into an encrypted field. """ """
Wrapper around an already decrypted value to be placed into an encrypted field.
"""
def __init__(self, decrypted_value): def __init__(self, decrypted_value):
assert decrypted_value is not None assert decrypted_value is not None
@ -89,24 +91,34 @@ class DecryptedValue(object):
return self.value return self.value
def matches(self, unencrypted_value): def matches(self, unencrypted_value):
""" Returns whether the value of this field matches the unencrypted_value. """ """
Returns whether the value of this field matches the unencrypted_value.
"""
return self.decrypt() == unencrypted_value return self.decrypt() == unencrypted_value
class LazyEncryptedValue(object): class LazyEncryptedValue(object):
""" Wrapper around an encrypted value in an encrypted field. Will decrypt lazily. """ """
Wrapper around an encrypted value in an encrypted field.
Will decrypt lazily.
"""
def __init__(self, encrypted_value, field): def __init__(self, encrypted_value, field):
self.encrypted_value = encrypted_value self.encrypted_value = encrypted_value
self._field = field self._field = field
def decrypt(self, encrypter=None): def decrypt(self, encrypter=None):
""" Decrypts the value. """ """
Decrypts the value.
"""
encrypter = encrypter or self._field.model._meta.encrypter encrypter = encrypter or self._field.model._meta.encrypter
return encrypter.decrypt_value(self.encrypted_value) return encrypter.decrypt_value(self.encrypted_value)
def matches(self, unencrypted_value): def matches(self, unencrypted_value):
""" Returns whether the value of this field matches the unencrypted_value. """ """
Returns whether the value of this field matches the unencrypted_value.
"""
return self.decrypt() == unencrypted_value return self.decrypt() == unencrypted_value
def __eq__(self, _): def __eq__(self, _):
@ -132,7 +144,9 @@ class LazyEncryptedValue(object):
def _add_encryption(field_class, requires_length_check=True): def _add_encryption(field_class, requires_length_check=True):
""" Adds support for encryption and decryption to the given field class. """ """
Adds support for encryption and decryption to the given field class.
"""
class indexed_class(field_class): class indexed_class(field_class):
def __init__(self, default_token_length=None, *args, **kwargs): def __init__(self, default_token_length=None, *args, **kwargs):
@ -202,11 +216,15 @@ class EnumField(SmallIntegerField):
self.enum_type = enum_type self.enum_type = enum_type
def db_value(self, value): def db_value(self, value):
"""Convert the python value for storage in the database.""" """
Convert the python value for storage in the database.
"""
return int(value.value) return int(value.value)
def python_value(self, value): def python_value(self, value):
"""Convert the database value to a pythonic value.""" """
Convert the database value to a pythonic value.
"""
return self.enum_type(value) if value is not None else None return self.enum_type(value) if value is not None else None
def clone_base(self, **kwargs): def clone_base(self, **kwargs):
@ -214,7 +232,9 @@ class EnumField(SmallIntegerField):
def _add_fulltext(field_class): def _add_fulltext(field_class):
""" Adds support for full text indexing and lookup to the given field class. """ """
Adds support for full text indexing and lookup to the given field class.
"""
class indexed_class(field_class): class indexed_class(field_class):
# Marker used by SQLAlchemy translation layer to add the proper index for full text searching. # Marker used by SQLAlchemy translation layer to add the proper index for full text searching.
@ -256,31 +276,41 @@ FullIndexedTextField = _add_fulltext(TextField)
class Credential(object): class Credential(object):
""" Credential represents a hashed credential. """ """
Credential represents a hashed credential.
"""
def __init__(self, hashed): def __init__(self, hashed):
self.hashed = hashed self.hashed = hashed
def matches(self, value): def matches(self, value):
""" Returns true if this credential matches the unhashed value given. """ """
Returns true if this credential matches the unhashed value given.
"""
return bcrypt.hashpw(value.encode("utf-8"), self.hashed) == self.hashed return bcrypt.hashpw(value.encode("utf-8"), self.hashed) == self.hashed
@classmethod @classmethod
def from_string(cls, string_value): def from_string(cls, string_value):
""" Returns a Credential object from an unhashed string value. """ """
Returns a Credential object from an unhashed string value.
"""
return Credential(bcrypt.hashpw(string_value.encode("utf-8"), bcrypt.gensalt())) return Credential(bcrypt.hashpw(string_value.encode("utf-8"), bcrypt.gensalt()))
@classmethod @classmethod
def generate(cls, length=20): def generate(cls, length=20):
""" Generates a new credential and returns it, along with its unhashed form. """ """
Generates a new credential and returns it, along with its unhashed form.
"""
token = random_string(length) token = random_string(length)
return Credential.from_string(token), token return Credential.from_string(token), token
class CredentialField(CharField): class CredentialField(CharField):
""" A character field that stores crytographically hashed credentials that should never be """
available to the user in plaintext after initial creation. This field automatically A character field that stores crytographically hashed credentials that should never be available
provides verification. to the user in plaintext after initial creation.
This field automatically provides verification.
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):

View File

@ -9,13 +9,16 @@ logger = logging.getLogger(__name__)
def _merge_aggregated_log_counts(*args): def _merge_aggregated_log_counts(*args):
""" Merge two lists of AggregatedLogCount based on the value of their kind_id and datetime. """
Merge two lists of AggregatedLogCount based on the value of their kind_id and datetime.
""" """
matching_keys = {} matching_keys = {}
aggregated_log_counts_list = itertools.chain.from_iterable(args) aggregated_log_counts_list = itertools.chain.from_iterable(args)
def canonical_key_from_kind_date_tuple(kind_id, dt): def canonical_key_from_kind_date_tuple(kind_id, dt):
""" Return a comma separated key from an AggregatedLogCount's kind_id and datetime. """ """
Return a comma separated key from an AggregatedLogCount's kind_id and datetime.
"""
return str(kind_id) + "," + str(dt) return str(kind_id) + "," + str(dt)
for kind_id, count, dt in aggregated_log_counts_list: for kind_id, count, dt in aggregated_log_counts_list:

View File

@ -11,7 +11,9 @@ from util.morecollections import AttrDict
def _format_date(date): def _format_date(date):
""" Output an RFC822 date format. """ """
Output an RFC822 date format.
"""
if date is None: if date is None:
return None return None
@ -24,9 +26,11 @@ def _kinds():
class LogEntriesPage(namedtuple("LogEntriesPage", ["logs", "next_page_token"])): class LogEntriesPage(namedtuple("LogEntriesPage", ["logs", "next_page_token"])):
""" Represents a page returned by the lookup_logs call. The `logs` contains the logs """
found for the page and `next_page_token`, if not None, contains the token to be Represents a page returned by the lookup_logs call.
encoded and returned for the followup call.
The `logs` contains the logs found for the page and `next_page_token`, if not None, contains the
token to be encoded and returned for the followup call.
""" """
@ -48,7 +52,9 @@ class Log(
], ],
) )
): ):
""" Represents a single log entry returned by the logs model. """ """
Represents a single log entry returned by the logs model.
"""
@classmethod @classmethod
def for_logentry(cls, log): def for_logentry(cls, log):
@ -181,7 +187,9 @@ class Log(
class AggregatedLogCount(namedtuple("AggregatedLogCount", ["kind_id", "count", "datetime"])): class AggregatedLogCount(namedtuple("AggregatedLogCount", ["kind_id", "count", "datetime"])):
""" Represents the aggregated count of the number of logs, of a particular kind, on a day. """ """
Represents the aggregated count of the number of logs, of a particular kind, on a day.
"""
def to_dict(self): def to_dict(self):
view = { view = {

View File

@ -50,10 +50,11 @@ COUNT_REPOSITORY_ACTION_TIMEOUT = 30
def _date_range_descending(start_datetime, end_datetime, includes_end_datetime=False): def _date_range_descending(start_datetime, end_datetime, includes_end_datetime=False):
""" Generate the dates between `end_datetime` and `start_datetime`. """
Generate the dates between `end_datetime` and `start_datetime`.
If `includes_end_datetime` is set, the generator starts at `end_datetime`, If `includes_end_datetime` is set, the generator starts at `end_datetime`, otherwise, starts the
otherwise, starts the generator at `end_datetime` minus 1 second. generator at `end_datetime` minus 1 second.
""" """
assert end_datetime >= start_datetime assert end_datetime >= start_datetime
start_date = start_datetime.date() start_date = start_datetime.date()
@ -69,8 +70,9 @@ def _date_range_descending(start_datetime, end_datetime, includes_end_datetime=F
def _date_range_in_single_index(dt1, dt2): def _date_range_in_single_index(dt1, dt2):
""" Determine whether a single index can be searched given a range """
of dates or datetimes. If date instances are given, difference should be 1 day. Determine whether a single index can be searched given a range of dates or datetimes. If date
instances are given, difference should be 1 day.
NOTE: dt2 is exclusive to the search result set. NOTE: dt2 is exclusive to the search result set.
i.e. The date range is larger or equal to dt1 and strictly smaller than dt2 i.e. The date range is larger or equal to dt1 and strictly smaller than dt2
@ -106,7 +108,9 @@ def _for_elasticsearch_logs(logs, repository_id=None, namespace_id=None):
def _random_id(): def _random_id():
""" Generates a unique uuid4 string for the random_id field in LogEntry. """
Generates a unique uuid4 string for the random_id field in LogEntry.
It is used as tie-breaker for sorting logs based on datetime: It is used as tie-breaker for sorting logs based on datetime:
https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-search-after.html https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-search-after.html
""" """
@ -117,22 +121,26 @@ def _random_id():
class ElasticsearchLogsModelInterface(object): class ElasticsearchLogsModelInterface(object):
""" """
Interface for Elasticsearch specific operations with the logs model. Interface for Elasticsearch specific operations with the logs model.
These operations are usually index based. These operations are usually index based.
""" """
@abstractmethod @abstractmethod
def can_delete_index(self, index, cutoff_date): def can_delete_index(self, index, cutoff_date):
""" Return whether the given index is older than the given cutoff date. """ """
Return whether the given index is older than the given cutoff date.
"""
@abstractmethod @abstractmethod
def list_indices(self): def list_indices(self):
""" List the logs model's indices. """ """
List the logs model's indices.
"""
class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsModelInterface): class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsModelInterface):
""" """
DocumentLogsModel implements the data model for the logs API backed by an DocumentLogsModel implements the data model for the logs API backed by an elasticsearch service.
elasticsearch service.
""" """
def __init__( def __init__(
@ -155,9 +163,10 @@ class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsM
@staticmethod @staticmethod
def _get_ids_by_names(repository_name, namespace_name, performer_name): def _get_ids_by_names(repository_name, namespace_name, performer_name):
""" Retrieve repository/namespace/performer ids based on their names. """
throws DataModelException when the namespace_name does not match any Retrieve repository/namespace/performer ids based on their names.
user in the database.
throws DataModelException when the namespace_name does not match any user in the database.
returns database ID or None if not exists. returns database ID or None if not exists.
""" """
repository_id = None repository_id = None
@ -269,7 +278,8 @@ class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsM
return search.execute() return search.execute()
def _load_latest_logs(self, performer_id, repository_id, account_id, filter_kinds, size): def _load_latest_logs(self, performer_id, repository_id, account_id, filter_kinds, size):
""" Return the latest logs from Elasticsearch. """
Return the latest logs from Elasticsearch.
Look at indices up to theset logrotateworker threshold, or up to 30 days if not defined. Look at indices up to theset logrotateworker threshold, or up to 30 days if not defined.
""" """
@ -563,7 +573,9 @@ class DocumentLogsModel(SharedModel, ActionLogsDataInterface, ElasticsearchLogsM
return self._es_client.list_indices() return self._es_client.list_indices()
def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation): def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation):
""" Yield a context manager for a group of outdated logs. """ """
Yield a context manager for a group of outdated logs.
"""
all_indices = self.list_indices() all_indices = self.list_indices()
for index in all_indices: for index in all_indices:
if not self.can_delete_index(index, cutoff_date): if not self.can_delete_index(index, cutoff_date):
@ -577,8 +589,7 @@ class ElasticsearchLogRotationContext(LogRotationContextInterface):
""" """
ElasticsearchLogRotationContext yield batch of logs from an index. ElasticsearchLogRotationContext yield batch of logs from an index.
When completed without exceptions, this context will delete its associated When completed without exceptions, this context will delete its associated Elasticsearch index.
Elasticsearch index.
""" """
def __init__(self, index, min_logs_per_rotation, es_client): def __init__(self, index, min_logs_per_rotation, es_client):
@ -623,7 +634,9 @@ class ElasticsearchLogRotationContext(LogRotationContextInterface):
return search return search
def _generate_filename(self): def _generate_filename(self):
""" Generate the filenames used to archive the action logs. """ """
Generate the filenames used to archive the action logs.
"""
filename = "%s_%d-%d" % (self.index, self.start_pos, self.end_pos) filename = "%s_%d-%d" % (self.index, self.start_pos, self.end_pos)
filename = ".".join((filename, "txt.gz")) filename = ".".join((filename, "txt.gz"))
return filename return filename

View File

@ -125,8 +125,7 @@ class ElasticsearchLogs(object):
def _initialize(self): def _initialize(self):
""" """
Initialize a connection to an ES cluster and Initialize a connection to an ES cluster and creates an index template if it does not exist.
creates an index template if it does not exist.
""" """
if not self._initialized: if not self._initialized:
http_auth = None http_auth = None
@ -183,7 +182,9 @@ class ElasticsearchLogs(object):
self._initialized = True self._initialized = True
def index_name(self, day): def index_name(self, day):
""" Return an index name for the given day. """ """
Return an index name for the given day.
"""
return self._index_prefix + day.strftime(INDEX_DATE_FORMAT) return self._index_prefix + day.strftime(INDEX_DATE_FORMAT)
def index_exists(self, index): def index_exists(self, index):
@ -194,13 +195,15 @@ class ElasticsearchLogs(object):
@staticmethod @staticmethod
def _valid_index_prefix(prefix): def _valid_index_prefix(prefix):
""" Check that the given index prefix is valid with the set of """
indices used by this class. Check that the given index prefix is valid with the set of indices used by this class.
""" """
return re.match(VALID_INDEX_PATTERN, prefix) is not None return re.match(VALID_INDEX_PATTERN, prefix) is not None
def _valid_index_name(self, index): def _valid_index_name(self, index):
""" Check that the given index name is valid and follows the format: """
Check that the given index name is valid and follows the format:
<index_prefix>YYYY-MM-DD <index_prefix>YYYY-MM-DD
""" """
if not ElasticsearchLogs._valid_index_prefix(index): if not ElasticsearchLogs._valid_index_prefix(index):
@ -218,7 +221,9 @@ class ElasticsearchLogs(object):
return False return False
def can_delete_index(self, index, cutoff_date): def can_delete_index(self, index, cutoff_date):
""" Check if the given index can be deleted based on the given index's date and cutoff date. """ """
Check if the given index can be deleted based on the given index's date and cutoff date.
"""
assert self._valid_index_name(index) assert self._valid_index_name(index)
index_dt = datetime.strptime(index[len(self._index_prefix) :], INDEX_DATE_FORMAT) index_dt = datetime.strptime(index[len(self._index_prefix) :], INDEX_DATE_FORMAT)
return index_dt < cutoff_date and cutoff_date - index_dt >= timedelta(days=1) return index_dt < cutoff_date and cutoff_date - index_dt >= timedelta(days=1)
@ -261,6 +266,7 @@ def configure_es(
): ):
""" """
For options in index_settings, refer to: For options in index_settings, refer to:
https://www.elastic.co/guide/en/elasticsearch/guide/master/_index_settings.html https://www.elastic.co/guide/en/elasticsearch/guide/master/_index_settings.html
some index settings are set at index creation time, and therefore, you should NOT some index settings are set at index creation time, and therefore, you should NOT
change those settings once the index is set. change those settings once the index is set.

View File

@ -26,7 +26,9 @@ StoredLog = namedtuple(
class InMemoryModel(ActionLogsDataInterface): class InMemoryModel(ActionLogsDataInterface):
""" """
InMemoryModel implements the data model for logs in-memory. FOR TESTING ONLY. InMemoryModel implements the data model for logs in-memory.
FOR TESTING ONLY.
""" """
def __init__(self): def __init__(self):
@ -315,7 +317,9 @@ class InMemoryLogRotationContext(LogRotationContextInterface):
self.all_logs.remove(log) self.all_logs.remove(log)
def yield_logs_batch(self): def yield_logs_batch(self):
""" Yield a batch of logs and a filename for that batch. """ """
Yield a batch of logs and a filename for that batch.
"""
filename = "inmemory_model_filename_placeholder" filename = "inmemory_model_filename_placeholder"
filename = ".".join((filename, "txt.gz")) filename = ".".join((filename, "txt.gz"))
yield [log_and_repo.stored_log for log_and_repo in self.expired_logs], filename yield [log_and_repo.stored_log for log_and_repo in self.expired_logs], filename

View File

@ -3,13 +3,17 @@ from six import add_metaclass
class LogsIterationTimeout(Exception): class LogsIterationTimeout(Exception):
""" Exception raised if logs iteration times out. """ """
Exception raised if logs iteration times out.
"""
@add_metaclass(ABCMeta) @add_metaclass(ABCMeta)
class ActionLogsDataInterface(object): class ActionLogsDataInterface(object):
""" Interface for code to work with the logs data model. The logs data model consists """
of all access for reading and writing action logs. Interface for code to work with the logs data model.
The logs data model consists of all access for reading and writing action logs.
""" """
@abstractmethod @abstractmethod
@ -24,10 +28,12 @@ class ActionLogsDataInterface(object):
page_token=None, page_token=None,
max_page_count=None, max_page_count=None,
): ):
""" Looks up all logs between the start_datetime and end_datetime, filtered """
by performer (a user), repository or namespace. Note that one (and only one) of the three Looks up all logs between the start_datetime and end_datetime, filtered by performer (a
can be specified. Returns a LogEntriesPage. `filter_kinds`, if specified, is a set/list user), repository or namespace.
of the kinds of logs to filter out.
Note that one (and only one) of the three can be specified. Returns a LogEntriesPage.
`filter_kinds`, if specified, is a set/list of the kinds of logs to filter out.
""" """
@abstractmethod @abstractmethod
@ -39,9 +45,11 @@ class ActionLogsDataInterface(object):
filter_kinds=None, filter_kinds=None,
size=20, size=20,
): ):
""" Looks up latest logs of a specific kind, filtered by performer (a user), """
repository or namespace. Note that one (and only one) of the three can be specified. Looks up latest logs of a specific kind, filtered by performer (a user), repository or
Returns a list of `Log`. namespace.
Note that one (and only one) of the three can be specified. Returns a list of `Log`.
""" """
@abstractmethod @abstractmethod
@ -54,14 +62,18 @@ class ActionLogsDataInterface(object):
namespace_name=None, namespace_name=None,
filter_kinds=None, filter_kinds=None,
): ):
""" Returns the aggregated count of logs, by kind, between the start_datetime and end_datetime, """
filtered by performer (a user), repository or namespace. Note that one (and only one) of Returns the aggregated count of logs, by kind, between the start_datetime and end_datetime,
the three can be specified. Returns a list of AggregatedLogCount. filtered by performer (a user), repository or namespace.
Note that one (and only one) of the three can be specified. Returns a list of
AggregatedLogCount.
""" """
@abstractmethod @abstractmethod
def count_repository_actions(self, repository, day): def count_repository_actions(self, repository, day):
""" Returns the total number of repository actions over the given day, in the given repository """
Returns the total number of repository actions over the given day, in the given repository
or None on error. or None on error.
""" """
@ -77,9 +89,11 @@ class ActionLogsDataInterface(object):
callback_email=None, callback_email=None,
filter_kinds=None, filter_kinds=None,
): ):
""" Queues logs between the start_datetime and end_time, filtered by a repository or namespace, """
for export to the specified URL and/or email address. Returns the ID of the export job Queues logs between the start_datetime and end_time, filtered by a repository or namespace,
queued or None if error. for export to the specified URL and/or email address.
Returns the ID of the export job queued or None if error.
""" """
@abstractmethod @abstractmethod
@ -95,7 +109,9 @@ class ActionLogsDataInterface(object):
timestamp=None, timestamp=None,
is_free_namespace=False, is_free_namespace=False,
): ):
""" Logs a single action as having taken place. """ """
Logs a single action as having taken place.
"""
@abstractmethod @abstractmethod
def yield_logs_for_export( def yield_logs_for_export(
@ -106,7 +122,8 @@ class ActionLogsDataInterface(object):
namespace_id=None, namespace_id=None,
max_query_time=None, max_query_time=None,
): ):
""" Returns an iterator that yields bundles of all logs found between the start_datetime and """
Returns an iterator that yields bundles of all logs found between the start_datetime and
end_datetime, optionally filtered by the repository or namespace. This function should be end_datetime, optionally filtered by the repository or namespace. This function should be
used for any bulk lookup operations, and should be implemented by implementors to put used for any bulk lookup operations, and should be implemented by implementors to put
minimal strain on the backing storage for large operations. If there was an error in setting minimal strain on the backing storage for large operations. If there was an error in setting
@ -120,9 +137,9 @@ class ActionLogsDataInterface(object):
@abstractmethod @abstractmethod
def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation): def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation):
""" """
A generator that yields contexts implementing the LogRotationContextInterface. A generator that yields contexts implementing the LogRotationContextInterface. Each context
Each context represents a set of logs to be archived and deleted once represents a set of logs to be archived and deleted once the context completes without
the context completes without exceptions. exceptions.
For database logs, the LogRotationContext abstracts over a set of rows. When the context For database logs, the LogRotationContext abstracts over a set of rows. When the context
finishes, its associated rows get deleted. finishes, its associated rows get deleted.
@ -134,11 +151,14 @@ class ActionLogsDataInterface(object):
@add_metaclass(ABCMeta) @add_metaclass(ABCMeta)
class LogRotationContextInterface(object): class LogRotationContextInterface(object):
""" Interface for iterating over a set of logs to be archived. """ """
Interface for iterating over a set of logs to be archived.
"""
@abstractmethod @abstractmethod
def yield_logs_batch(self): def yield_logs_batch(self):
""" """
Generator yielding batch of logs and a filename for that batch. Generator yielding batch of logs and a filename for that batch.
A batch is a subset of the logs part of the context. A batch is a subset of the logs part of the context.
""" """

View File

@ -5,7 +5,9 @@ logger = logging.getLogger(__name__)
class LogSendException(Exception): class LogSendException(Exception):
""" A generic error when sending the logs to its destination. """
A generic error when sending the logs to its destination.
e.g. Kinesis, Kafka, Elasticsearch, ... e.g. Kinesis, Kafka, Elasticsearch, ...
""" """

View File

@ -10,7 +10,8 @@ logger = logging.getLogger(__name__)
class ElasticsearchLogsProducer(LogProducerInterface): class ElasticsearchLogsProducer(LogProducerInterface):
""" Log producer writing log entries to Elasticsearch. """
Log producer writing log entries to Elasticsearch.
This implementation writes directly to Elasticsearch without a streaming/queueing service. This implementation writes directly to Elasticsearch without a streaming/queueing service.
""" """

View File

@ -6,4 +6,6 @@ from six import add_metaclass
class LogProducerInterface(object): class LogProducerInterface(object):
@abstractmethod @abstractmethod
def send(self, logentry): def send(self, logentry):
""" Send a log entry to the configured log infrastructure. """ """
Send a log entry to the configured log infrastructure.
"""

View File

@ -15,7 +15,9 @@ DEFAULT_MAX_BLOCK_SECONDS = 5
class KafkaLogsProducer(LogProducerInterface): class KafkaLogsProducer(LogProducerInterface):
""" Log producer writing log entries to a Kafka stream. """ """
Log producer writing log entries to a Kafka stream.
"""
def __init__(self, bootstrap_servers=None, topic=None, client_id=None, max_block_seconds=None): def __init__(self, bootstrap_servers=None, topic=None, client_id=None, max_block_seconds=None):
self.bootstrap_servers = bootstrap_servers self.bootstrap_servers = bootstrap_servers

View File

@ -21,9 +21,11 @@ DEFAULT_MAX_POOL_CONNECTIONS = 10
def _partition_key(number_of_shards=None): def _partition_key(number_of_shards=None):
""" Generate a partition key for AWS Kinesis stream. """
If the number of shards is specified, generate keys where the size of the key space is Generate a partition key for AWS Kinesis stream.
the number of shards.
If the number of shards is specified, generate keys where the size of the key space is the
number of shards.
""" """
key = None key = None
if number_of_shards is not None: if number_of_shards is not None:
@ -36,7 +38,9 @@ def _partition_key(number_of_shards=None):
class KinesisStreamLogsProducer(LogProducerInterface): class KinesisStreamLogsProducer(LogProducerInterface):
""" Log producer writing log entries to an Amazon Kinesis Data Stream. """ """
Log producer writing log entries to an Amazon Kinesis Data Stream.
"""
def __init__( def __init__(
self, self,

View File

@ -3,7 +3,9 @@ from datetime import datetime
class LogEntryJSONEncoder(json.JSONEncoder): class LogEntryJSONEncoder(json.JSONEncoder):
""" JSON encoder to encode datetimes to ISO8601 format. """ """
JSON encoder to encode datetimes to ISO8601 format.
"""
def default(self, obj): def default(self, obj):
if isinstance(obj, datetime): if isinstance(obj, datetime):
@ -13,7 +15,9 @@ class LogEntryJSONEncoder(json.JSONEncoder):
def logs_json_serializer(logentry, sort_keys=False): def logs_json_serializer(logentry, sort_keys=False):
""" Serializes a LogEntry to json bytes. """ """
Serializes a LogEntry to json bytes.
"""
return json.dumps( return json.dumps(
logentry.to_dict(), cls=LogEntryJSONEncoder, ensure_ascii=True, sort_keys=sort_keys logentry.to_dict(), cls=LogEntryJSONEncoder, ensure_ascii=True, sort_keys=sort_keys
).encode("ascii") ).encode("ascii")

View File

@ -18,9 +18,11 @@ class SharedModel:
callback_email=None, callback_email=None,
filter_kinds=None, filter_kinds=None,
): ):
""" Queues logs between the start_datetime and end_time, filtered by a repository or namespace, """
for export to the specified URL and/or email address. Returns the ID of the export job Queues logs between the start_datetime and end_time, filtered by a repository or namespace,
queued or None if error. for export to the specified URL and/or email address.
Returns the ID of the export job queued or None if error.
""" """
export_id = str(uuid.uuid4()) export_id = str(uuid.uuid4())
namespace = model.user.get_namespace_user(namespace_name) namespace = model.user.get_namespace_user(namespace_name)
@ -59,8 +61,11 @@ def epoch_ms(dt):
def get_kinds_filter(kinds): def get_kinds_filter(kinds):
""" Given a list of kinds, return the set of kinds not that are not part of that list. """
i.e Returns the list of kinds to be filtered out. """ Given a list of kinds, return the set of kinds not that are not part of that list.
i.e Returns the list of kinds to be filtered out.
"""
kind_map = model.log.get_log_entry_kinds() kind_map = model.log.get_log_entry_kinds()
kind_map = {key: kind_map[key] for key in kind_map if not isinstance(key, int)} kind_map = {key: kind_map[key] for key in kind_map if not isinstance(key, int)}
return [kind_name for kind_name in kind_map if kind_name not in kinds] return [kind_name for kind_name in kind_map if kind_name not in kinds]

View File

@ -31,8 +31,8 @@ LOG_MODELS = [LogEntry3, LogEntry2, LogEntry]
class TableLogsModel(SharedModel, ActionLogsDataInterface): class TableLogsModel(SharedModel, ActionLogsDataInterface):
""" """
TableLogsModel implements the data model for the logs API backed by a single table TableLogsModel implements the data model for the logs API backed by a single table in the
in the database. database.
""" """
def __init__(self, should_skip_logging=None, **kwargs): def __init__(self, should_skip_logging=None, **kwargs):
@ -325,7 +325,9 @@ class TableLogsModel(SharedModel, ActionLogsDataInterface):
current_batch_size = timedelta(seconds=seconds) current_batch_size = timedelta(seconds=seconds)
def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation): def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation):
""" Yield a context manager for a group of outdated logs. """ """
Yield a context manager for a group of outdated logs.
"""
for log_model in LOG_MODELS: for log_model in LOG_MODELS:
while True: while True:
with UseThenDisconnect(config.app_config): with UseThenDisconnect(config.app_config):
@ -362,11 +364,11 @@ table_logs_model = TableLogsModel()
class DatabaseLogRotationContext(LogRotationContextInterface): class DatabaseLogRotationContext(LogRotationContextInterface):
""" """
DatabaseLogRotationContext represents a batch of logs to be archived together. DatabaseLogRotationContext represents a batch of logs to be archived together. i.e A set of logs
i.e A set of logs to be archived in the same file (based on the number of logs per rotation). to be archived in the same file (based on the number of logs per rotation).
When completed without exceptions, this context will delete the stale logs When completed without exceptions, this context will delete the stale logs from rows `start_id`
from rows `start_id` to `end_id`. to `end_id`.
""" """
def __init__(self, logs, log_model, start_id, end_id): def __init__(self, logs, log_model, start_id, end_id):
@ -385,6 +387,8 @@ class DatabaseLogRotationContext(LogRotationContextInterface):
delete_stale_logs(self.start_id, self.end_id, self.log_model) delete_stale_logs(self.start_id, self.end_id, self.log_model)
def yield_logs_batch(self): def yield_logs_batch(self):
""" Yield a batch of logs and a filename for that batch. """ """
Yield a batch of logs and a filename for that batch.
"""
filename = "%d-%d-%s.txt.gz" % (self.start_id, self.end_id, self.log_model.__name__.lower()) filename = "%d-%d-%s.txt.gz" % (self.start_id, self.end_id, self.log_model.__name__.lower())
yield self.logs, filename yield self.logs, filename

View File

@ -610,8 +610,8 @@ def test_date_range_in_single_index(dt1, dt2, expected_result):
def test_pagination(logs_model, mock_page_size): def test_pagination(logs_model, mock_page_size):
""" """
Make sure that pagination does not stop if searching through multiple indices by day, Make sure that pagination does not stop if searching through multiple indices by day, and the
and the current log count matches the page size while there are still indices to be searched. current log count matches the page size while there are still indices to be searched.
""" """
day1 = datetime.now() day1 = datetime.now()
day2 = day1 + timedelta(days=1) day2 = day1 + timedelta(days=1)

View File

@ -54,9 +54,11 @@ tables = AttrDict(target_metadata.tables)
def get_tester(): def get_tester():
""" Returns the tester to use. We only return the tester that populates data """
if the TEST_MIGRATE env var is set to `true` AND we make sure we're not Returns the tester to use.
connecting to a production database.
We only return the tester that populates data if the TEST_MIGRATE env var is set to `true` AND
we make sure we're not connecting to a production database.
""" """
if os.environ.get("TEST_MIGRATE", "") == "true": if os.environ.get("TEST_MIGRATE", "") == "true":
url = unquote(DB_URI) url = unquote(DB_URI)
@ -92,7 +94,8 @@ def report_success(ctx=None, step=None, heads=None, run_args=None):
def run_migrations_offline(): def run_migrations_offline():
"""Run migrations in 'offline' mode. """
Run migrations in 'offline' mode.
This configures the context with just a URL This configures the context with just a URL
and not an Engine, though an Engine is acceptable and not an Engine, though an Engine is acceptable
@ -101,7 +104,6 @@ def run_migrations_offline():
Calls to context.execute() here emit the given string to the Calls to context.execute() here emit the given string to the
script output. script output.
""" """
url = unquote(DB_URI) url = unquote(DB_URI)
context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True) context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True)
@ -111,11 +113,10 @@ def run_migrations_offline():
def run_migrations_online(): def run_migrations_online():
"""Run migrations in 'online' mode. """
Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
In this scenario we need to create an Engine and associate a connection with the context.
""" """
if ( if (

View File

@ -9,22 +9,29 @@ from util.abchelpers import nooper
@add_metaclass(ABCMeta) @add_metaclass(ABCMeta)
class ProgressReporter(object): class ProgressReporter(object):
""" Implements an interface for reporting progress with the migrations. """
Implements an interface for reporting progress with the migrations.
""" """
@abstractmethod @abstractmethod
def report_version_complete(self, success): def report_version_complete(self, success):
""" Called when an entire migration is complete. """ """
Called when an entire migration is complete.
"""
@abstractmethod @abstractmethod
def report_step_progress(self): def report_step_progress(self):
""" Called when a single step in the migration has been completed. """ """
Called when a single step in the migration has been completed.
"""
@nooper @nooper
class NullReporter(ProgressReporter): class NullReporter(ProgressReporter):
""" No-op version of the progress reporter, designed for use when no progress """
reporting endpoint is provided. """ No-op version of the progress reporter, designed for use when no progress reporting endpoint is
provided.
"""
class PrometheusReporter(ProgressReporter): class PrometheusReporter(ProgressReporter):

View File

@ -16,7 +16,9 @@ from test.fixtures import *
], ],
) )
def test_alembic_db_uri(db_uri, is_valid): def test_alembic_db_uri(db_uri, is_valid):
""" Test if the given URI is escaped for string interpolation (Python's configparser). """ """
Test if the given URI is escaped for string interpolation (Python's configparser).
"""
with patch("alembic.script.ScriptDirectory.run_env") as m: with patch("alembic.script.ScriptDirectory.run_env") as m:
if is_valid: if is_valid:
run_alembic_migration(db_uri) run_alembic_migration(db_uri)

View File

@ -91,28 +91,37 @@ class DataTypes(object):
@add_metaclass(ABCMeta) @add_metaclass(ABCMeta)
class MigrationTester(object): class MigrationTester(object):
""" Implements an interface for adding testing capabilities to the """
data model migration system in Alembic. Implements an interface for adding testing capabilities to the data model migration system in
Alembic.
""" """
TestDataType = DataTypes TestDataType = DataTypes
@abstractmethod @abstractmethod
def is_testing(self): def is_testing(self):
""" Returns whether we are currently under a migration test. """ """
Returns whether we are currently under a migration test.
"""
@abstractmethod @abstractmethod
def populate_table(self, table_name, fields): def populate_table(self, table_name, fields):
""" Called to populate a table with the given fields filled in with testing data. """ """
Called to populate a table with the given fields filled in with testing data.
"""
@abstractmethod @abstractmethod
def populate_column(self, table_name, col_name, field_type): def populate_column(self, table_name, col_name, field_type):
""" Called to populate a column in a table to be filled in with testing data. """ """
Called to populate a column in a table to be filled in with testing data.
"""
@nooper @nooper
class NoopTester(MigrationTester): class NoopTester(MigrationTester):
""" No-op version of the tester, designed for production workloads. """ """
No-op version of the tester, designed for production workloads.
"""
class PopulateTestDataTester(MigrationTester): class PopulateTestDataTester(MigrationTester):

View File

@ -1,9 +1,9 @@
"""Add creation date to User table """
Add creation date to User table.
Revision ID: 0cf50323c78b Revision ID: 0cf50323c78b
Revises: 87fbbc224f10 Revises: 87fbbc224f10
Create Date: 2018-03-09 13:19:41.903196 Create Date: 2018-03-09 13:19:41.903196
""" """
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.

View File

@ -1,9 +1,9 @@
"""Add Tag, TagKind and ManifestChild tables """
Add Tag, TagKind and ManifestChild tables.
Revision ID: 10f45ee2310b Revision ID: 10f45ee2310b
Revises: 13411de1c0ff Revises: 13411de1c0ff
Create Date: 2018-10-29 15:22:53.552216 Create Date: 2018-10-29 15:22:53.552216
""" """
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.

View File

@ -1,9 +1,9 @@
"""Remove unique from TagManifestToManifest """
Remove unique from TagManifestToManifest.
Revision ID: 13411de1c0ff Revision ID: 13411de1c0ff
Revises: 654e6df88b71 Revises: 654e6df88b71
Create Date: 2018-08-19 23:30:24.969549 Create Date: 2018-08-19 23:30:24.969549
""" """
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.

View File

@ -1,9 +1,9 @@
"""Add maximum build queue count setting to user table """
Add maximum build queue count setting to user table.
Revision ID: 152bb29a1bb3 Revision ID: 152bb29a1bb3
Revises: 7367229b38d9 Revises: 7367229b38d9
Create Date: 2018-02-20 13:34:34.902415 Create Date: 2018-02-20 13:34:34.902415
""" """
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.

View File

@ -1,9 +1,9 @@
"""Make BlodUpload byte_count not nullable """
Make BlodUpload byte_count not nullable.
Revision ID: 152edccba18c Revision ID: 152edccba18c
Revises: c91c564aad34 Revises: c91c564aad34
Create Date: 2018-02-23 12:41:25.571835 Create Date: 2018-02-23 12:41:25.571835
""" """
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.

View File

@ -1,9 +1,9 @@
"""Add automatic disable of build triggers """
Add automatic disable of build triggers.
Revision ID: 17aff2e1354e Revision ID: 17aff2e1354e
Revises: 61cadbacb9fc Revises: 61cadbacb9fc
Create Date: 2017-10-18 15:58:03.971526 Create Date: 2017-10-18 15:58:03.971526
""" """
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.

View File

@ -1,9 +1,9 @@
"""Add last_accessed field to User table """
Add last_accessed field to User table.
Revision ID: 224ce4c72c2f Revision ID: 224ce4c72c2f
Revises: b547bc139ad8 Revises: b547bc139ad8
Create Date: 2018-03-12 22:44:07.070490 Create Date: 2018-03-12 22:44:07.070490
""" """
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.

Some files were not shown because too many files have changed in this diff Show More