1
0
mirror of https://github.com/quay/quay.git synced 2025-07-31 18:44:32 +03:00

chore: drop deprecated tables and remove unused code (PROJQUAY-522) (#2089)

* chore: drop deprecated tables and remove unused code

* isort imports

* migration: check for table existence before drop
This commit is contained in:
Kenny Lee Sin Cheong
2023-08-25 12:17:24 -04:00
committed by GitHub
parent e72773bbce
commit 5f63b3a7bb
550 changed files with 3480 additions and 3778 deletions

View File

@ -1,4 +1,5 @@
from enum import Enum, unique from enum import Enum, unique
from data.migrationutil import DefinedDataMigration, MigrationPhase from data.migrationutil import DefinedDataMigration, MigrationPhase
ActiveDataMigration = None ActiveDataMigration = None

41
app.py
View File

@ -3,67 +3,64 @@ import json
import logging import logging
import os import os
from functools import partial from functools import partial
from authlib.jose import JsonWebKey from authlib.jose import JsonWebKey
from flask import Flask, request, Request from flask import Flask, Request, request
from flask_login import LoginManager from flask_login import LoginManager
from flask_mail import Mail from flask_mail import Mail
from flask_principal import Principal from flask_principal import Principal
from werkzeug.middleware.proxy_fix import ProxyFix
from werkzeug.exceptions import HTTPException from werkzeug.exceptions import HTTPException
from werkzeug.middleware.proxy_fix import ProxyFix
import features import features
from _init import ( from _init import (
config_provider, IS_BUILDING,
IS_KUBERNETES, IS_KUBERNETES,
IS_TESTING, IS_TESTING,
OVERRIDE_CONFIG_DIRECTORY, OVERRIDE_CONFIG_DIRECTORY,
IS_BUILDING, config_provider,
) )
from avatars.avatars import Avatar from avatars.avatars import Avatar
from buildman.manager.buildcanceller import BuildCanceller from buildman.manager.buildcanceller import BuildCanceller
from data import database from data import database, logs_model, model
from data import model
from data import logs_model
from data.archivedlogs import LogArchive from data.archivedlogs import LogArchive
from data.billing import Billing from data.billing import Billing
from data.buildlogs import BuildLogs from data.buildlogs import BuildLogs
from data.cache import get_model_cache from data.cache import get_model_cache
from data.model.user import LoginWrappedDBUser from data.model.user import LoginWrappedDBUser
from data.queue import WorkQueue from data.queue import WorkQueue
from data.registry_model import registry_model
from data.secscan_model import secscan_model
from data.userevent import UserEventsBuilderModule from data.userevent import UserEventsBuilderModule
from data.userfiles import Userfiles from data.userfiles import Userfiles
from data.users import UserAuthentication, UserManager from data.users import UserAuthentication, UserManager
from data.registry_model import registry_model
from data.secscan_model import secscan_model
from image.oci import register_artifact_type from image.oci import register_artifact_type
from oauth.loginmanager import OAuthLoginManager
from oauth.services.github import GithubOAuthService
from oauth.services.gitlab import GitLabOAuthService
from path_converters import ( from path_converters import (
APIRepositoryPathConverter,
RegexConverter, RegexConverter,
RepositoryPathConverter, RepositoryPathConverter,
APIRepositoryPathConverter,
RepositoryPathRedirectConverter, RepositoryPathRedirectConverter,
V1CreateRepositoryPathConverter, V1CreateRepositoryPathConverter,
) )
from oauth.services.github import GithubOAuthService
from oauth.services.gitlab import GitLabOAuthService
from oauth.loginmanager import OAuthLoginManager
from storage import Storage from storage import Storage
from util.log import filter_logs
from util import get_app_url from util import get_app_url
from util.ipresolver import IPResolver
from util.saas.analytics import Analytics
from util.saas.exceptionlog import Sentry
from util.names import urn_generator
from util.config import URLSchemeAndHostname from util.config import URLSchemeAndHostname
from util.config.configutil import generate_secret_key from util.config.configutil import generate_secret_key
from util.greenlet_tracing import enable_tracing
from util.ipresolver import IPResolver
from util.label_validator import LabelValidator from util.label_validator import LabelValidator
from util.log import filter_logs
from util.marketplace import RHMarketplaceAPI, RHUserAPI from util.marketplace import RHMarketplaceAPI, RHUserAPI
from util.metrics.prometheus import PrometheusPlugin from util.metrics.prometheus import PrometheusPlugin
from util.names import urn_generator
from util.repomirror.api import RepoMirrorAPI from util.repomirror.api import RepoMirrorAPI
from util.tufmetadata.api import TUFMetadataAPI from util.saas.analytics import Analytics
from util.saas.exceptionlog import Sentry
from util.security.instancekeys import InstanceKeys from util.security.instancekeys import InstanceKeys
from util.greenlet_tracing import enable_tracing from util.tufmetadata.api import TUFMetadataAPI
OVERRIDE_CONFIG_YAML_FILENAME = os.path.join(OVERRIDE_CONFIG_DIRECTORY, "config.yaml") OVERRIDE_CONFIG_YAML_FILENAME = os.path.join(OVERRIDE_CONFIG_DIRECTORY, "config.yaml")
OVERRIDE_CONFIG_PY_FILENAME = os.path.join(OVERRIDE_CONFIG_DIRECTORY, "config.py") OVERRIDE_CONFIG_PY_FILENAME = os.path.join(OVERRIDE_CONFIG_DIRECTORY, "config.py")

View File

@ -1,18 +1,16 @@
import logging import logging
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from cachetools.func import lru_cache from cachetools.func import lru_cache
from flask_principal import Identity, identity_changed
from six import add_metaclass from six import add_metaclass
from app import app from app import app
from data import model
from flask_principal import Identity, identity_changed
from auth.auth_context import set_authenticated_context from auth.auth_context import set_authenticated_context
from auth.context_entity import ContextEntityKind, CONTEXT_ENTITY_HANDLERS from auth.context_entity import CONTEXT_ENTITY_HANDLERS, ContextEntityKind
from auth.permissions import QuayDeferredPermissionUser from auth.permissions import QuayDeferredPermissionUser
from auth.scopes import scopes_from_scope_string from auth.scopes import scopes_from_scope_string
from data import model
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,10 +1,10 @@
import logging import logging
from base64 import b64decode from base64 import b64decode
from flask import request from flask import request
from auth.credentials import validate_credentials from auth.credentials import validate_credentials
from auth.validateresult import ValidateResult, AuthKind from auth.validateresult import AuthKind, ValidateResult
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,14 +1,14 @@
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from six import add_metaclass
from enum import Enum from enum import Enum
from data import model from six import add_metaclass
from auth.credential_consts import ( from auth.credential_consts import (
ACCESS_TOKEN_USERNAME, ACCESS_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
APP_SPECIFIC_TOKEN_USERNAME, APP_SPECIFIC_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
) )
from data import model
class ContextEntityKind(Enum): class ContextEntityKind(Enum):

View File

@ -1,6 +1,6 @@
import logging import logging
from uuid import UUID from uuid import UUID
from flask_login import current_user from flask_login import current_user
from auth.validateresult import AuthKind, ValidateResult from auth.validateresult import AuthKind, ValidateResult

View File

@ -1,17 +1,15 @@
import logging import logging
from enum import Enum from enum import Enum
import features import features
from app import authentication from app import authentication
from auth.oauth import validate_oauth_token
from auth.validateresult import ValidateResult, AuthKind
from auth.credential_consts import ( from auth.credential_consts import (
ACCESS_TOKEN_USERNAME, ACCESS_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
APP_SPECIFIC_TOKEN_USERNAME, APP_SPECIFIC_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
) )
from auth.oauth import validate_oauth_token
from auth.validateresult import AuthKind, ValidateResult
from data import model from data import model
from util.names import parse_robot_username from util.names import parse_robot_username

View File

@ -1,18 +1,16 @@
import logging import logging
from functools import wraps from functools import wraps
from flask import request, session from flask import request, session
from prometheus_client import Counter from prometheus_client import Counter
from auth.basic import validate_basic_auth from auth.basic import validate_basic_auth
from auth.oauth import validate_bearer_auth
from auth.cookie import validate_session_cookie from auth.cookie import validate_session_cookie
from auth.oauth import validate_bearer_auth
from auth.signedgrant import validate_signed_grant from auth.signedgrant import validate_signed_grant
from auth.validateresult import AuthKind from auth.validateresult import AuthKind
from util.http import abort from util.http import abort
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -3,16 +3,16 @@ from datetime import datetime
from jwt import ExpiredSignatureError, InvalidTokenError from jwt import ExpiredSignatureError, InvalidTokenError
from app import oauth_login, authentication, app, analytics from app import analytics, app, authentication, oauth_login
from auth.scopes import scopes_from_scope_string from auth.scopes import scopes_from_scope_string
from auth.validateresult import AuthKind, ValidateResult from auth.validateresult import AuthKind, ValidateResult
from data import model from data import model
from oauth.login import OAuthLoginException from oauth.login import OAuthLoginException
from oauth.login_utils import ( from oauth.login_utils import (
is_jwt,
get_sub_username_email_from_token,
_conduct_oauth_login, _conduct_oauth_login,
get_jwt_issuer, get_jwt_issuer,
get_sub_username_email_from_token,
is_jwt,
) )
from oauth.oidc import PublicKeyLoadException from oauth.oidc import PublicKeyLoadException

View File

@ -1,17 +1,14 @@
import logging import logging
from collections import defaultdict, namedtuple
from functools import partial
from typing import DefaultDict, Optional from typing import DefaultDict, Optional
from collections import namedtuple, defaultdict from flask_principal import Identity, Permission, identity_changed, identity_loaded
from functools import partial
from flask_principal import identity_loaded, Permission, Identity, identity_changed
from app import app, usermanager from app import app, usermanager
from auth import scopes from auth import scopes
from data import model from data import model
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,28 +1,26 @@
import logging import logging
from functools import wraps from functools import wraps
from jsonschema import validate, ValidationError
from flask import request, url_for from flask import request, url_for
from flask_principal import identity_changed, Identity from flask_principal import Identity, identity_changed
from jsonschema import ValidationError, validate
from app import app, get_app_url, instance_keys from app import app, get_app_url, instance_keys
from auth.auth_context import set_authenticated_context from auth.auth_context import set_authenticated_context
from auth.auth_context_type import SignedAuthContext from auth.auth_context_type import SignedAuthContext
from auth.permissions import ( from auth.permissions import (
repository_admin_grant,
repository_read_grant, repository_read_grant,
repository_write_grant, repository_write_grant,
repository_admin_grant,
) )
from util.http import abort from util.http import abort
from util.names import parse_namespace_repository from util.names import parse_namespace_repository
from util.security.registry_jwt import ( from util.security.registry_jwt import (
ANONYMOUS_SUB, ANONYMOUS_SUB,
decode_bearer_header,
InvalidBearerTokenException, InvalidBearerTokenException,
decode_bearer_header,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,6 +1,7 @@
from collections import namedtuple
import features
import re import re
from collections import namedtuple
import features
Scope = namedtuple("Scope", ["scope", "icon", "dangerous", "title", "description"]) Scope = namedtuple("Scope", ["scope", "icon", "dangerous", "title", "description"])

View File

@ -1,6 +1,6 @@
import logging import logging
from flask.sessions import SecureCookieSessionInterface, BadSignature from flask.sessions import BadSignature, SecureCookieSessionInterface
from app import app from app import app
from auth.validateresult import AuthKind, ValidateResult from auth.validateresult import AuthKind, ValidateResult

View File

@ -1,9 +1,13 @@
from test.fixtures import *
import pytest import pytest
from auth.auth_context_type import SignedAuthContext, ValidatedAuthContext, ContextEntityKind from auth.auth_context_type import (
from data import model, database ContextEntityKind,
SignedAuthContext,
from test.fixtures import * ValidatedAuthContext,
)
from data import database, model
def get_oauth_token(_): def get_oauth_token(_):

View File

@ -1,20 +1,19 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import pytest
from base64 import b64encode from base64 import b64encode
from test.fixtures import *
import pytest
from auth.basic import validate_basic_auth from auth.basic import validate_basic_auth
from auth.credentials import ( from auth.credentials import (
ACCESS_TOKEN_USERNAME, ACCESS_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
APP_SPECIFIC_TOKEN_USERNAME, APP_SPECIFIC_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
) )
from auth.validateresult import AuthKind, ValidateResult from auth.validateresult import AuthKind, ValidateResult
from data import model from data import model
from test.fixtures import *
def _token(username, password): def _token(username, password):
assert isinstance(username, str) assert isinstance(username, str)

View File

@ -1,11 +1,11 @@
import uuid import uuid
from test.fixtures import *
from flask_login import login_user from flask_login import login_user
from app import LoginWrappedDBUser from app import LoginWrappedDBUser
from data import model
from auth.cookie import validate_session_cookie from auth.cookie import validate_session_cookie
from test.fixtures import * from data import model
def test_anonymous_cookie(app): def test_anonymous_cookie(app):

View File

@ -1,16 +1,16 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from auth.credentials import validate_credentials, CredentialKind from test.fixtures import *
from auth.credential_consts import ( from auth.credential_consts import (
ACCESS_TOKEN_USERNAME, ACCESS_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
APP_SPECIFIC_TOKEN_USERNAME, APP_SPECIFIC_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
) )
from auth.credentials import CredentialKind, validate_credentials
from auth.validateresult import AuthKind, ValidateResult from auth.validateresult import AuthKind, ValidateResult
from data import model from data import model
from test.fixtures import *
def test_valid_user(app): def test_valid_user(app):
result, kind = validate_credentials("devtable", "password") result, kind = validate_credentials("devtable", "password")

View File

@ -1,5 +1,6 @@
import pytest from test.fixtures import *
import pytest
from flask import session from flask import session
from flask_login import login_user from flask_login import login_user
from werkzeug.exceptions import HTTPException from werkzeug.exceptions import HTTPException
@ -8,11 +9,10 @@ from app import LoginWrappedDBUser
from auth.auth_context import get_authenticated_user from auth.auth_context import get_authenticated_user
from auth.decorators import ( from auth.decorators import (
extract_namespace_repo_from_session, extract_namespace_repo_from_session,
require_session_login,
process_auth_or_cookie, process_auth_or_cookie,
require_session_login,
) )
from data import model from data import model
from test.fixtures import *
def test_extract_namespace_repo_from_session_missing(app): def test_extract_namespace_repo_from_session_missing(app):

View File

@ -1,9 +1,10 @@
from test.fixtures import *
import pytest import pytest
from auth.oauth import validate_bearer_auth from auth.oauth import validate_bearer_auth
from auth.validateresult import AuthKind, ValidateResult from auth.validateresult import AuthKind, ValidateResult
from data import model from data import model
from test.fixtures import *
@pytest.mark.parametrize( @pytest.mark.parametrize(

View File

@ -1,11 +1,11 @@
from test.fixtures import *
import pytest import pytest
from auth import scopes from auth import scopes
from auth.permissions import SuperUserPermission, QuayDeferredPermissionUser from auth.permissions import QuayDeferredPermissionUser, SuperUserPermission
from data import model from data import model
from test.fixtures import *
SUPER_USERNAME = "devtable" SUPER_USERNAME = "devtable"
UNSUPER_USERNAME = "freshuser" UNSUPER_USERNAME = "freshuser"

View File

@ -4,15 +4,14 @@ import time
import jwt import jwt
import pytest import pytest
from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives import serialization
from app import app, instance_keys from app import app, instance_keys
from auth.auth_context_type import ValidatedAuthContext from auth.auth_context_type import ValidatedAuthContext
from auth.registry_jwt_auth import identity_from_bearer_token, InvalidJWTException from auth.registry_jwt_auth import InvalidJWTException, identity_from_bearer_token
from data import model # TODO: remove this after service keys are decoupled from data import model # TODO: remove this after service keys are decoupled
from data.database import ServiceKeyApprovalType from data.database import ServiceKeyApprovalType
from initdb import setup_database_for_testing, finished_database_for_testing from initdb import finished_database_for_testing, setup_database_for_testing
from util.morecollections import AttrDict from util.morecollections import AttrDict
from util.security.registry_jwt import ANONYMOUS_SUB, build_context_and_subject from util.security.registry_jwt import ANONYMOUS_SUB, build_context_and_subject

View File

@ -1,10 +1,10 @@
import pytest import pytest
from auth.scopes import ( from auth.scopes import (
scopes_from_scope_string,
validate_scope_string,
ALL_SCOPES, ALL_SCOPES,
is_subset_string, is_subset_string,
scopes_from_scope_string,
validate_scope_string,
) )

View File

@ -1,6 +1,10 @@
import pytest import pytest
from auth.signedgrant import validate_signed_grant, generate_signed_token, SIGNATURE_PREFIX from auth.signedgrant import (
SIGNATURE_PREFIX,
generate_signed_token,
validate_signed_grant,
)
from auth.validateresult import AuthKind, ValidateResult from auth.validateresult import AuthKind, ValidateResult

View File

@ -1,10 +1,11 @@
from test.fixtures import *
import pytest import pytest
from auth.auth_context import get_authenticated_context from auth.auth_context import get_authenticated_context
from auth.validateresult import AuthKind, ValidateResult from auth.validateresult import AuthKind, ValidateResult
from data import model from data import model
from data.database import AppSpecificAuthToken from data.database import AppSpecificAuthToken
from test.fixtures import *
def get_user(): def get_user():

View File

@ -1,5 +1,6 @@
from enum import Enum from enum import Enum
from auth.auth_context_type import ValidatedAuthContext, ContextEntityKind
from auth.auth_context_type import ContextEntityKind, ValidatedAuthContext
class AuthKind(Enum): class AuthKind(Enum):

View File

@ -1,10 +1,10 @@
import hashlib import hashlib
import math
import logging import logging
import features import math
from requests.exceptions import RequestException from requests.exceptions import RequestException
import features
from util.bytes import Bytes from util.bytes import Bytes
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

13
boot.py
View File

@ -1,25 +1,22 @@
#!/usr/bin/env python #!/usr/bin/env python
import logging
import os.path
from datetime import datetime, timedelta from datetime import datetime, timedelta
from urllib.parse import urlunparse from urllib.parse import urlunparse
from jinja2 import Template
from cachetools.func import lru_cache from cachetools.func import lru_cache
import logging
import release
import os.path
from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives import serialization
from jinja2 import Template
import release
from _init import CONF_DIR
from app import app from app import app
from data.model import ServiceKeyDoesNotExist from data.model import ServiceKeyDoesNotExist
from data.model.release import set_region_release from data.model.release import set_region_release
from data.model.service_keys import get_service_key from data.model.service_keys import get_service_key
from util.config.database import sync_database_with_config from util.config.database import sync_database_with_config
from util.generatepresharedkey import generate_key from util.generatepresharedkey import generate_key
from _init import CONF_DIR
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,5 +1,4 @@
import asyncio import asyncio
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from functools import partial from functools import partial

View File

@ -1,17 +1,17 @@
import logging
import jsonschema import jsonschema
import jwt import jwt
import logging
from app import instance_keys from app import instance_keys
from util.security import jwtutil from util.security import jwtutil
from util.security.registry_jwt import ( from util.security.registry_jwt import (
generate_bearer_token,
InvalidBearerTokenException,
ALGORITHM, ALGORITHM,
JWT_CLOCK_SKEW_SECONDS, JWT_CLOCK_SKEW_SECONDS,
InvalidBearerTokenException,
generate_bearer_token,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,26 +1,24 @@
import logging import logging
import logging.config import logging.config
import os import os
import time
import socket import socket
import time
from raven.conf import setup_logging
from raven.handlers.logging import SentryHandler
import features import features
from app import ( from app import (
OVERRIDE_CONFIG_DIRECTORY,
app, app,
userfiles as user_files,
build_logs, build_logs,
dockerfile_build_queue, dockerfile_build_queue,
instance_keys, instance_keys,
OVERRIDE_CONFIG_DIRECTORY,
) )
from util.log import logfile_path from app import userfiles as user_files
from buildman.manager.ephemeral import EphemeralBuilderManager from buildman.manager.ephemeral import EphemeralBuilderManager
from buildman.server import BuilderServer from buildman.server import BuilderServer
from util.log import logfile_path
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -2,11 +2,11 @@
# Generated by the protocol buffer compiler. DO NOT EDIT! # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: buildman.proto # source: buildman.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message from google.protobuf import message as _message
from google.protobuf import reflection as _reflection from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import enum_type_wrapper
# @@protoc_insertion_point(imports) # @@protoc_insertion_point(imports)

View File

@ -1,19 +1,17 @@
import grpc
import json import json
import logging import logging
import grpc
from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import MessageToDict
from buildman.buildman_pb import buildman_pb2
from buildman.buildman_pb import buildman_pb2_grpc
from buildman.build_token import ( from buildman.build_token import (
BUILD_JOB_REGISTRATION_TYPE, BUILD_JOB_REGISTRATION_TYPE,
BUILD_JOB_TOKEN_TYPE, BUILD_JOB_TOKEN_TYPE,
InvalidBuildTokenException, InvalidBuildTokenException,
) )
from buildman.buildman_pb import buildman_pb2, buildman_pb2_grpc
from data.database import BUILD_PHASE from data.database import BUILD_PHASE
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -2,13 +2,14 @@ import json
import logging import logging
import os import os
from app import app
from cachetools.func import lru_cache from cachetools.func import lru_cache
from notifications import spawn_notification
from app import app
from data import model from data import model
from data.database import UseThenDisconnect
from data.registry_model import registry_model from data.registry_model import registry_model
from data.registry_model.datatypes import RepositoryReference from data.registry_model.datatypes import RepositoryReference
from data.database import UseThenDisconnect from notifications import spawn_notification
from util import slash_join from util import slash_join
from util.morecollections import AttrDict from util.morecollections import AttrDict

View File

@ -1,5 +1,5 @@
from abc import abstractmethod, ABC
import inspect import inspect
from abc import ABC, abstractmethod
class BaseManager(ABC): class BaseManager(ABC):

View File

@ -1,7 +1,7 @@
import logging import logging
from buildman.manager.orchestrator_canceller import OrchestratorCanceller
from buildman.manager.noop_canceller import NoopCanceller from buildman.manager.noop_canceller import NoopCanceller
from buildman.manager.orchestrator_canceller import OrchestratorCanceller
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,53 +1,49 @@
import calendar import calendar
import logging
import json import json
import logging
import re import re
import time import time
import uuid import uuid
import dateutil.parser
from datetime import datetime, timedelta from datetime import datetime, timedelta
import dateutil.parser
from prometheus_client import Counter, Histogram from prometheus_client import Counter, Histogram
from app import app from app import app, instance_keys
from buildman.build_token import ( from buildman.build_token import (
build_token,
verify_build_token,
InvalidBearerTokenException,
BUILD_JOB_REGISTRATION_TYPE, BUILD_JOB_REGISTRATION_TYPE,
BUILD_JOB_TOKEN_TYPE, BUILD_JOB_TOKEN_TYPE,
InvalidBearerTokenException,
build_token,
verify_build_token,
) )
from buildman.interface import ( from buildman.interface import (
BuildStateInterface, RESULT_PHASES,
BuildJobAlreadyExistsError, BuildJobAlreadyExistsError,
BuildJobDoesNotExistsError, BuildJobDoesNotExistsError,
BuildJobError, BuildJobError,
BuildJobResult, BuildJobResult,
RESULT_PHASES, BuildStateInterface,
) )
from buildman.jobutil.buildjob import BuildJob, BuildJobLoadException from buildman.jobutil.buildjob import BuildJob, BuildJobLoadException
from buildman.manager.executor import ( from buildman.manager.executor import (
PopenExecutor,
EC2Executor, EC2Executor,
KubernetesExecutor, KubernetesExecutor,
KubernetesPodmanExecutor, KubernetesPodmanExecutor,
PopenExecutor,
) )
from buildman.orchestrator import ( from buildman.orchestrator import (
orchestrator_from_config,
KeyEvent,
OrchestratorError,
OrchestratorConnectionError,
ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION, ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION,
KeyEvent,
OrchestratorConnectionError,
OrchestratorError,
orchestrator_from_config,
) )
from data import database, model
from app import instance_keys
from data import database
from data.database import BUILD_PHASE from data.database import BUILD_PHASE
from data import model
from util import slash_join from util import slash_join
from util.morecollections import AttrDict from util.morecollections import AttrDict
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -9,25 +9,21 @@ import subprocess
import threading import threading
import time import time
import uuid import uuid
from functools import lru_cache, partial, wraps
from functools import partial, wraps, lru_cache
import boto3 import boto3
import botocore import botocore
import cachetools.func import cachetools.func
import requests import requests
from jinja2 import Environment, FileSystemLoader
from jinja2 import FileSystemLoader, Environment
from prometheus_client import Histogram from prometheus_client import Histogram
import release import release
from _init import OVERRIDE_CONFIG_DIRECTORY, ROOT_DIR
from _init import ROOT_DIR, OVERRIDE_CONFIG_DIRECTORY
from app import app from app import app
from buildman.container_cloud_config import CloudConfigContext from buildman.container_cloud_config import CloudConfigContext
from buildman.server import SECURE_GRPC_SERVER_PORT from buildman.server import SECURE_GRPC_SERVER_PORT
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,9 +1,8 @@
import logging import logging
from buildman.orchestrator import orchestrator_from_config, OrchestratorError from buildman.orchestrator import OrchestratorError, orchestrator_from_config
from util import slash_join from util import slash_join
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,13 +1,11 @@
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from contextlib import ContextDecorator
import datetime import datetime
import json import json
import logging import logging
import re import re
import time import time
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from contextlib import ContextDecorator
from enum import IntEnum, unique from enum import IntEnum, unique
import redis import redis
@ -15,7 +13,6 @@ import redis
from util import slash_join from util import slash_join
from util.expiresdict import ExpiresDict from util.expiresdict import ExpiresDict
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
ONE_DAY = 60 * 60 * 24 ONE_DAY = 60 * 60 * 24

View File

@ -1,21 +1,18 @@
import logging
import grpc
import json import json
import logging
from concurrent import futures from concurrent import futures
from datetime import timedelta from datetime import timedelta
from threading import Event from threading import Event
import grpc
from flask import Flask from flask import Flask
from app import app from app import app
from buildman.buildmanagerservicer import BuildManagerServicer
from buildman.buildman_pb import buildman_pb2, buildman_pb2_grpc from buildman.buildman_pb import buildman_pb2, buildman_pb2_grpc
from buildman.buildmanagerservicer import BuildManagerServicer
from buildman.jobutil.buildjob import BuildJob, BuildJobLoadException from buildman.jobutil.buildjob import BuildJob, BuildJobLoadException
from data import database, model from data import database, model
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,20 +1,19 @@
import asyncio import asyncio
import unittest
import json import json
import unittest
import uuid import uuid
from mock import Mock, ANY from mock import ANY, Mock
from six import iteritems from six import iteritems
from buildman.asyncutil import AsyncWrapper from buildman.asyncutil import AsyncWrapper
from buildman.component.buildcomponent import BuildComponent from buildman.component.buildcomponent import BuildComponent
from buildman.manager.ephemeral import EphemeralBuilderManager, REALM_PREFIX, JOB_PREFIX from buildman.manager.ephemeral import JOB_PREFIX, REALM_PREFIX, EphemeralBuilderManager
from buildman.manager.executor import BuilderExecutor, ExecutorException from buildman.manager.executor import BuilderExecutor, ExecutorException
from buildman.orchestrator import KeyEvent, KeyChange from buildman.orchestrator import KeyChange, KeyEvent
from buildman.server import BuildJobResult from buildman.server import BuildJobResult
from util import slash_join from util import slash_join
BUILD_UUID = "deadbeef-dead-beef-dead-deadbeefdead" BUILD_UUID = "deadbeef-dead-beef-dead-deadbeefdead"
REALM_ID = "1234-realm" REALM_ID = "1234-realm"

View File

@ -1,20 +1,19 @@
import pytest
import time import time
import uuid import uuid
from test.fixtures import *
import pytest
from app import app, instance_keys
from buildman.build_token import ( from buildman.build_token import (
build_token,
verify_build_token,
ANONYMOUS_SUB, ANONYMOUS_SUB,
BUILD_JOB_REGISTRATION_TYPE, BUILD_JOB_REGISTRATION_TYPE,
BUILD_JOB_TOKEN_TYPE, BUILD_JOB_TOKEN_TYPE,
InvalidBuildTokenException, InvalidBuildTokenException,
build_token,
verify_build_token,
) )
from test.fixtures import *
from app import app, instance_keys
@pytest.mark.parametrize( @pytest.mark.parametrize(
"token_type, expected_exception", "token_type, expected_exception",

View File

@ -1,23 +1,22 @@
import time import time
import pytest
from random import randrange from random import randrange
from unittest.mock import patch, Mock from test.fixtures import *
from unittest.mock import Mock, patch
import fakeredis import fakeredis
import pytest
from freezegun import freeze_time from freezegun import freeze_time
from buildman.orchestrator import ( from buildman.orchestrator import (
MemoryOrchestrator,
RedisOrchestrator,
REDIS_EXPIRED_SUFFIX, REDIS_EXPIRED_SUFFIX,
REDIS_EXPIRING_SUFFIX, REDIS_EXPIRING_SUFFIX,
KeyEvent,
KeyChange, KeyChange,
KeyEvent,
MemoryOrchestrator,
RedisOrchestrator,
) )
from util import slash_join from util import slash_join
from test.fixtures import *
@pytest.fixture() @pytest.fixture()
def fake_redis(): def fake_redis():

View File

@ -1,14 +1,14 @@
import logging import logging
import os import os
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from jsonschema import validate from jsonschema import validate
from six import add_metaclass from six import add_metaclass
from endpoints.building import PreparedBuild from buildtrigger.triggerutil import InvalidServiceException, get_trigger_config
from data import model from data import model
from buildtrigger.triggerutil import get_trigger_config, InvalidServiceException from endpoints.building import PreparedBuild
from util.jsontemplate import apply_data_to_obj, JSONTemplateParseException from util.jsontemplate import JSONTemplateParseException, apply_data_to_obj
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -10,16 +10,16 @@ from jsonschema import validate
from app import app, get_app_url from app import app, get_app_url
from buildtrigger.basehandler import BuildTriggerHandler from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.triggerutil import ( from buildtrigger.triggerutil import (
InvalidPayloadException,
RepositoryReadException, RepositoryReadException,
SkipRequestException,
TriggerActivationException, TriggerActivationException,
TriggerDeactivationException, TriggerDeactivationException,
TriggerStartException,
InvalidPayloadException,
TriggerProviderException, TriggerProviderException,
SkipRequestException, TriggerStartException,
determine_build_ref, determine_build_ref,
raise_if_skipped_build,
find_matching_branches, find_matching_branches,
raise_if_skipped_build,
) )
from util.dict_wrappers import JSONPathDict, SafeDictSetter from util.dict_wrappers import JSONPathDict, SafeDictSetter
from util.security.ssh import generate_ssh_keypair from util.security.ssh import generate_ssh_keypair

View File

@ -1,38 +1,27 @@
import logging
import json import json
import logging
from jsonschema import validate, ValidationError from jsonschema import ValidationError, validate
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.bitbuckethandler import BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA as bb_schema
from buildtrigger.bitbuckethandler import get_transformed_webhook_payload as bb_payload
from buildtrigger.githubhandler import GITHUB_WEBHOOK_PAYLOAD_SCHEMA as gh_schema
from buildtrigger.githubhandler import get_transformed_webhook_payload as gh_payload
from buildtrigger.gitlabhandler import GITLAB_WEBHOOK_PAYLOAD_SCHEMA as gl_schema
from buildtrigger.gitlabhandler import get_transformed_webhook_payload as gl_payload
from buildtrigger.triggerutil import ( from buildtrigger.triggerutil import (
InvalidPayloadException,
RepositoryReadException, RepositoryReadException,
SkipRequestException,
TriggerActivationException, TriggerActivationException,
TriggerStartException, TriggerStartException,
ValidationRequestException, ValidationRequestException,
InvalidPayloadException,
SkipRequestException,
raise_if_skipped_build,
find_matching_branches, find_matching_branches,
raise_if_skipped_build,
) )
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.bitbuckethandler import (
BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA as bb_schema,
get_transformed_webhook_payload as bb_payload,
)
from buildtrigger.githubhandler import (
GITHUB_WEBHOOK_PAYLOAD_SCHEMA as gh_schema,
get_transformed_webhook_payload as gh_payload,
)
from buildtrigger.gitlabhandler import (
GITLAB_WEBHOOK_PAYLOAD_SCHEMA as gl_schema,
get_transformed_webhook_payload as gl_payload,
)
from util.security.ssh import generate_ssh_keypair from util.security.ssh import generate_ssh_keypair
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Defines an ordered set of tuples of the schemas and associated transformation functions # Defines an ordered set of tuples of the schemas and associated transformation functions

View File

@ -1,39 +1,33 @@
import base64
import logging import logging
import os.path import os.path
import base64
import re import re
from calendar import timegm from calendar import timegm
from functools import wraps from functools import wraps
from ssl import SSLError from ssl import SSLError
from github import ( from github import BadCredentialsException as GitHubBadCredentialsException
Github, from github import Github, GithubException, UnknownObjectException
UnknownObjectException,
GithubException,
BadCredentialsException as GitHubBadCredentialsException,
)
from jsonschema import validate from jsonschema import validate
from app import app, github_trigger from app import app, github_trigger
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.triggerutil import ( from buildtrigger.triggerutil import (
EmptyRepositoryException,
InvalidPayloadException,
RepositoryReadException, RepositoryReadException,
SkipRequestException,
TriggerActivationException, TriggerActivationException,
TriggerDeactivationException, TriggerDeactivationException,
TriggerStartException, TriggerStartException,
EmptyRepositoryException,
ValidationRequestException, ValidationRequestException,
SkipRequestException,
InvalidPayloadException,
determine_build_ref, determine_build_ref,
raise_if_skipped_build,
find_matching_branches, find_matching_branches,
raise_if_skipped_build,
) )
from buildtrigger.basehandler import BuildTriggerHandler
from endpoints.exception import ExternalServiceError from endpoints.exception import ExternalServiceError
from util.security.ssh import generate_ssh_keypair
from util.dict_wrappers import JSONPathDict, SafeDictSetter from util.dict_wrappers import JSONPathDict, SafeDictSetter
from util.security.ssh import generate_ssh_keypair
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,32 +1,30 @@
import os.path
import logging import logging
import os.path
from calendar import timegm from calendar import timegm
from functools import wraps from functools import wraps
import dateutil.parser import dateutil.parser
import gitlab import gitlab
import requests import requests
from jsonschema import validate from jsonschema import validate
from app import app, gitlab_trigger from app import app, gitlab_trigger
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.triggerutil import ( from buildtrigger.triggerutil import (
InvalidPayloadException,
RepositoryReadException, RepositoryReadException,
SkipRequestException,
TriggerActivationException, TriggerActivationException,
TriggerAuthException,
TriggerDeactivationException, TriggerDeactivationException,
TriggerStartException, TriggerStartException,
SkipRequestException,
InvalidPayloadException,
TriggerAuthException,
determine_build_ref, determine_build_ref,
raise_if_skipped_build,
find_matching_branches, find_matching_branches,
raise_if_skipped_build,
) )
from buildtrigger.basehandler import BuildTriggerHandler
from endpoints.exception import ExternalServiceError from endpoints.exception import ExternalServiceError
from util.security.ssh import generate_ssh_keypair
from util.dict_wrappers import JSONPathDict, SafeDictSetter from util.dict_wrappers import JSONPathDict, SafeDictSetter
from util.security.ssh import generate_ssh_keypair
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,4 +1,5 @@
from datetime import datetime from datetime import datetime
from mock import Mock from mock import Mock
from buildtrigger.bitbuckethandler import BitbucketBuildTrigger from buildtrigger.bitbuckethandler import BitbucketBuildTrigger

View File

@ -1,7 +1,7 @@
from datetime import datetime from datetime import datetime
from mock import Mock
from github import GithubException from github import GithubException
from mock import Mock
from buildtrigger.githubhandler import GithubBuildTrigger from buildtrigger.githubhandler import GithubBuildTrigger
from util.morecollections import AttrDict from util.morecollections import AttrDict

View File

@ -1,11 +1,9 @@
import base64 import base64
import json import json
from contextlib import contextmanager from contextlib import contextmanager
import gitlab import gitlab
from httmock import HTTMock, urlmatch
from httmock import urlmatch, HTTMock
from buildtrigger.gitlabhandler import GitLabBuildTrigger from buildtrigger.gitlabhandler import GitLabBuildTrigger
from util.morecollections import AttrDict from util.morecollections import AttrDict

View File

@ -1,11 +1,12 @@
import json import json
import pytest import pytest
from buildtrigger.test.bitbucketmock import get_bitbucket_trigger from buildtrigger.test.bitbucketmock import get_bitbucket_trigger
from buildtrigger.triggerutil import ( from buildtrigger.triggerutil import (
InvalidPayloadException,
SkipRequestException, SkipRequestException,
ValidationRequestException, ValidationRequestException,
InvalidPayloadException,
) )
from endpoints.building import PreparedBuild from endpoints.building import PreparedBuild
from util.morecollections import AttrDict from util.morecollections import AttrDict

View File

@ -2,9 +2,9 @@ import copy
import pytest import pytest
from buildtrigger.triggerutil import TriggerStartException
from buildtrigger.test.bitbucketmock import get_bitbucket_trigger from buildtrigger.test.bitbucketmock import get_bitbucket_trigger
from buildtrigger.test.githubmock import get_github_trigger, GithubBuildTrigger from buildtrigger.test.githubmock import GithubBuildTrigger, get_github_trigger
from buildtrigger.triggerutil import TriggerStartException
from endpoints.building import PreparedBuild from endpoints.building import PreparedBuild

View File

@ -1,11 +1,12 @@
import json import json
import pytest import pytest
from buildtrigger.test.githubmock import get_github_trigger from buildtrigger.test.githubmock import get_github_trigger
from buildtrigger.triggerutil import ( from buildtrigger.triggerutil import (
InvalidPayloadException,
SkipRequestException, SkipRequestException,
ValidationRequestException, ValidationRequestException,
InvalidPayloadException,
) )
from endpoints.building import PreparedBuild from endpoints.building import PreparedBuild
from util.morecollections import AttrDict from util.morecollections import AttrDict

View File

@ -1,14 +1,14 @@
import json import json
import pytest
import pytest
from mock import Mock from mock import Mock
from buildtrigger.test.gitlabmock import get_gitlab_trigger from buildtrigger.test.gitlabmock import get_gitlab_trigger
from buildtrigger.triggerutil import ( from buildtrigger.triggerutil import (
SkipRequestException,
ValidationRequestException,
InvalidPayloadException, InvalidPayloadException,
SkipRequestException,
TriggerStartException, TriggerStartException,
ValidationRequestException,
) )
from endpoints.building import PreparedBuild from endpoints.building import PreparedBuild
from util.morecollections import AttrDict from util.morecollections import AttrDict

View File

@ -1,13 +1,12 @@
import json import json
import pytest import pytest
from jsonschema import validate from jsonschema import validate
from buildtrigger.customhandler import custom_trigger_payload
from buildtrigger.basehandler import METADATA_SCHEMA from buildtrigger.basehandler import METADATA_SCHEMA
from buildtrigger.bitbuckethandler import get_transformed_webhook_payload as bb_webhook
from buildtrigger.bitbuckethandler import get_transformed_commit_info as bb_commit from buildtrigger.bitbuckethandler import get_transformed_commit_info as bb_commit
from buildtrigger.bitbuckethandler import get_transformed_webhook_payload as bb_webhook
from buildtrigger.customhandler import custom_trigger_payload
from buildtrigger.githubhandler import get_transformed_webhook_payload as gh_webhook from buildtrigger.githubhandler import get_transformed_webhook_payload as gh_webhook
from buildtrigger.gitlabhandler import get_transformed_webhook_payload as gl_webhook from buildtrigger.gitlabhandler import get_transformed_webhook_payload as gl_webhook
from buildtrigger.triggerutil import SkipRequestException from buildtrigger.triggerutil import SkipRequestException

View File

@ -1,5 +1,5 @@
import json
import io import io
import json
import logging import logging
import re import re

View File

@ -1,13 +1,12 @@
import sys
import os import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../")) sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import logging import logging
from util.log import logfile_path from util.log import logfile_path
from util.workers import get_worker_count, get_worker_connections_count from util.workers import get_worker_connections_count, get_worker_count
logconfig = logfile_path(debug=True) logconfig = logfile_path(debug=True)
bind = "0.0.0.0:5000" bind = "0.0.0.0:5000"

View File

@ -3,16 +3,15 @@ from gevent import monkey
monkey.patch_all() monkey.patch_all()
import sys
import os import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../")) sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import logging import logging
from util.log import logfile_path from util.log import logfile_path
from util.workers import get_worker_count, get_worker_connections_count from util.workers import get_worker_connections_count, get_worker_count
logconfig = logfile_path(debug=False) logconfig = logfile_path(debug=False)
bind = "unix:/tmp/gunicorn_registry.sock" bind = "unix:/tmp/gunicorn_registry.sock"

View File

@ -3,16 +3,15 @@ from gevent import monkey
monkey.patch_all() monkey.patch_all()
import sys
import os import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../")) sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import logging import logging
from util.log import logfile_path from util.log import logfile_path
from util.workers import get_worker_count, get_worker_connections_count from util.workers import get_worker_connections_count, get_worker_count
logconfig = logfile_path(debug=False) logconfig = logfile_path(debug=False)
bind = "unix:/tmp/gunicorn_secscan.sock" bind = "unix:/tmp/gunicorn_secscan.sock"

View File

@ -3,16 +3,15 @@ from gevent import monkey
monkey.patch_all() monkey.patch_all()
import sys
import os import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../")) sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import logging import logging
from util.log import logfile_path from util.log import logfile_path
from util.workers import get_worker_count, get_worker_connections_count from util.workers import get_worker_connections_count, get_worker_count
logconfig = logfile_path(debug=False) logconfig = logfile_path(debug=False)

View File

@ -1,7 +1,7 @@
import os import os
from app import app
from active_migration import ActiveDataMigration from active_migration import ActiveDataMigration
from app import app
def current_migration(): def current_migration():

View File

@ -1,8 +1,8 @@
import os import os
import os.path import os.path
import yaml
import jinja2 import jinja2
import yaml
QUAYPATH = os.getenv("QUAYPATH", ".") QUAYPATH = os.getenv("QUAYPATH", ".")
QUAYDIR = os.getenv("QUAYDIR", "/") QUAYDIR = os.getenv("QUAYDIR", "/")

View File

@ -1,7 +1,8 @@
from typing import Union, List
import os import os
import os.path import os.path
import sys import sys
from typing import List, Union
import jinja2 import jinja2
QUAYPATH = os.getenv("QUAYPATH", ".") QUAYPATH = os.getenv("QUAYPATH", ".")

View File

@ -1,20 +1,18 @@
from contextlib import contextmanager
import os import os
import tempfile import tempfile
from contextlib import contextmanager
from six import iteritems
from supervisor.options import ServerOptions
import jinja2 import jinja2
import pytest import pytest
from six import iteritems
from supervisor.options import ServerOptions
from ..supervisord_conf_create import ( from ..supervisord_conf_create import (
registry_services, QUAY_OVERRIDE_SERVICES,
QUAY_SERVICES,
limit_services, limit_services,
override_services, override_services,
QUAY_SERVICES, registry_services,
QUAY_OVERRIDE_SERVICES,
) )

View File

@ -2,7 +2,6 @@ import logging
from data.userfiles import DelegateUserfiles from data.userfiles import DelegateUserfiles
JSON_MIMETYPE = "application/json" JSON_MIMETYPE = "application/json"

View File

@ -1,10 +1,10 @@
import random import random
import string import string
from typing import Any, Dict
import stripe
from datetime import datetime, timedelta
from calendar import timegm from calendar import timegm
from datetime import datetime, timedelta
from typing import Any, Dict
import stripe
from util.morecollections import AttrDict from util.morecollections import AttrDict

View File

@ -1,12 +1,11 @@
import redis
import json import json
import time import time
from contextlib import closing from contextlib import closing
from util.dynamic import import_class
from datetime import timedelta from datetime import timedelta
import redis
from util.dynamic import import_class
ONE_DAY = timedelta(days=1) ONE_DAY = timedelta(days=1)
SEVEN_DAYS = timedelta(days=7) SEVEN_DAYS = timedelta(days=7)

View File

@ -1,11 +1,11 @@
from data.cache.redis_cache import redis_cache_from_config
from data.cache.impl import ( from data.cache.impl import (
NoopDataModelCache, DisconnectWrapper,
InMemoryDataModelCache, InMemoryDataModelCache,
MemcachedModelCache, MemcachedModelCache,
NoopDataModelCache,
RedisDataModelCache, RedisDataModelCache,
DisconnectWrapper,
) )
from data.cache.redis_cache import redis_cache_from_config
def get_model_cache(config): def get_model_cache(config):

10
data/cache/impl.py vendored
View File

@ -1,17 +1,17 @@
import logging
import json import json
import logging
import os import os
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from contextlib import contextmanager from contextlib import contextmanager
from datetime import datetime from datetime import datetime
from pymemcache.client.base import PooledClient
from prometheus_client import Counter from prometheus_client import Counter
from redis import StrictRedis, RedisError from pymemcache.client.base import PooledClient
from redis import RedisError, StrictRedis
from six import add_metaclass from six import add_metaclass
from data.database import CloseForLongOperation
from data.cache.redis_cache import redis_cache_from_config from data.cache.redis_cache import redis_cache_from_config
from data.database import CloseForLongOperation
from util.expiresdict import ExpiresDict from util.expiresdict import ExpiresDict
from util.timedeltastring import convert_to_timedelta from util.timedeltastring import convert_to_timedelta
from util.workers import get_worker_connections_count from util.workers import get_worker_connections_count

View File

@ -1,4 +1,4 @@
from redis import StrictRedis, RedisError from redis import RedisError, StrictRedis
from rediscluster import RedisCluster from rediscluster import RedisCluster

View File

@ -1,22 +1,22 @@
from typing import Any, Dict
from unittest.mock import MagicMock, patch
import pytest import pytest
from typing import Dict, Any
from unittest.mock import patch, MagicMock
from rediscluster.nodemanager import NodeManager from rediscluster.nodemanager import NodeManager
from data.cache import ( from data.cache import (
InMemoryDataModelCache, InMemoryDataModelCache,
NoopDataModelCache,
MemcachedModelCache, MemcachedModelCache,
NoopDataModelCache,
RedisDataModelCache, RedisDataModelCache,
) )
from data.cache.cache_key import CacheKey from data.cache.cache_key import CacheKey
from data.cache.redis_cache import ( from data.cache.redis_cache import (
redis_cache_from_config,
REDIS_DRIVERS, REDIS_DRIVERS,
ReadEndpointSupportedRedis, ReadEndpointSupportedRedis,
redis_cache_from_config,
) )
DATA: Dict[str, Any] = {} DATA: Dict[str, Any] = {}
TEST_CACHE_CONFIG = { TEST_CACHE_CONFIG = {

View File

@ -1,62 +1,64 @@
# pylint: disable=old-style-class,no-init # pylint: disable=old-style-class,no-init
from __future__ import annotations from __future__ import annotations
import inspect import inspect
import logging import logging
import os
import string import string
import sys import sys
import time import time
import uuid import uuid
import os
from contextlib import contextmanager
from collections import defaultdict, namedtuple from collections import defaultdict, namedtuple
from contextlib import contextmanager
from datetime import datetime from datetime import datetime
from enum import Enum, IntEnum, unique
from random import SystemRandom from random import SystemRandom
import rehash
import toposort import toposort
from cachetools.func import lru_cache
from enum import IntEnum, Enum, unique
from peewee import * from peewee import *
from peewee import __exception_wrapper__, Function # type: ignore from peewee import Function, __exception_wrapper__ # type: ignore
from playhouse.pool import ( from playhouse.pool import (
PooledDatabase, PooledDatabase,
PooledMySQLDatabase, PooledMySQLDatabase,
PooledPostgresqlDatabase, PooledPostgresqlDatabase,
PooledSqliteDatabase, PooledSqliteDatabase,
) )
from sqlalchemy.engine.url import make_url from sqlalchemy.engine.url import make_url
import rehash
from cachetools.func import lru_cache
from data.fields import (
ResumableSHA256Field,
ResumableSHA1Field,
JSONField,
Base64BinaryField,
FullIndexedTextField,
FullIndexedCharField,
EnumField as ClientEnumField,
EncryptedTextField,
EncryptedCharField,
CredentialField,
)
from data.decorators import deprecated_model from data.decorators import deprecated_model
from data.text import match_mysql, match_like
from data.encryption import FieldEncrypter from data.encryption import FieldEncrypter
from data.readreplica import ReadReplicaSupportedModel, ReadOnlyConfig, disallow_replica_use
from data.estimate import mysql_estimate_row_count, normal_row_count from data.estimate import mysql_estimate_row_count, normal_row_count
from util.names import urn_generator from data.fields import (
from util.metrics.prometheus import ( Base64BinaryField,
db_pooled_connections_in_use, CredentialField,
db_pooled_connections_available, EncryptedCharField,
db_connect_calls, EncryptedTextField,
db_close_calls,
) )
from data.fields import EnumField as ClientEnumField
from data.fields import (
FullIndexedCharField,
FullIndexedTextField,
JSONField,
ResumableSHA1Field,
ResumableSHA256Field,
)
from data.readreplica import (
ReadOnlyConfig,
ReadReplicaSupportedModel,
disallow_replica_use,
)
from data.text import match_like, match_mysql
from util.metrics.prometheus import (
db_close_calls,
db_connect_calls,
db_pooled_connections_available,
db_pooled_connections_in_use,
)
from util.names import urn_generator
from util.validation import validate_postgres_precondition from util.validation import validate_postgres_precondition
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
DEFAULT_DB_CONNECT_TIMEOUT = 10 # seconds DEFAULT_DB_CONNECT_TIMEOUT = 10 # seconds
@ -711,7 +713,6 @@ class User(BaseModel):
# are cleaned up directly in the model. # are cleaned up directly in the model.
skip_transitive_deletes = ( skip_transitive_deletes = (
{ {
Image,
Repository, Repository,
Team, Team,
RepositoryBuild, RepositoryBuild,
@ -723,17 +724,13 @@ class User(BaseModel):
Star, Star,
RepositoryAuthorizedEmail, RepositoryAuthorizedEmail,
TeamMember, TeamMember,
RepositoryTag,
PermissionPrototype, PermissionPrototype,
DerivedStorageForImage,
TagManifest,
AccessToken, AccessToken,
OAuthAccessToken, OAuthAccessToken,
BlobUpload, BlobUpload,
RepositoryNotification, RepositoryNotification,
OAuthAuthorizationCode, OAuthAuthorizationCode,
RepositoryActionCount, RepositoryActionCount,
TagManifestLabel,
TeamSync, TeamSync,
RepositorySearchScore, RepositorySearchScore,
DeletedNamespace, DeletedNamespace,
@ -743,7 +740,6 @@ class User(BaseModel):
ManifestSecurityStatus, ManifestSecurityStatus,
RepoMirrorConfig, RepoMirrorConfig,
UploadedBlob, UploadedBlob,
RepositorySize,
QuotaRepositorySize, QuotaRepositorySize,
QuotaNamespaceSize, QuotaNamespaceSize,
UserOrganizationQuota, UserOrganizationQuota,
@ -752,7 +748,6 @@ class User(BaseModel):
} }
| appr_classes | appr_classes
| v22_classes | v22_classes
| transition_classes
) )
delete_instance_filtered(self, User, delete_nullable, skip_transitive_deletes) delete_instance_filtered(self, User, delete_nullable, skip_transitive_deletes)
@ -958,28 +953,21 @@ class Repository(BaseModel):
# are cleaned up directly # are cleaned up directly
skip_transitive_deletes = ( skip_transitive_deletes = (
{ {
RepositoryTag,
RepositoryBuild, RepositoryBuild,
RepositoryBuildTrigger, RepositoryBuildTrigger,
BlobUpload, BlobUpload,
Image,
TagManifest,
TagManifestLabel,
Label, Label,
DerivedStorageForImage,
RepositorySearchScore, RepositorySearchScore,
RepoMirrorConfig, RepoMirrorConfig,
RepoMirrorRule, RepoMirrorRule,
DeletedRepository, DeletedRepository,
ManifestSecurityStatus, ManifestSecurityStatus,
UploadedBlob, UploadedBlob,
RepositorySize,
QuotaNamespaceSize, QuotaNamespaceSize,
QuotaRepositorySize, QuotaRepositorySize,
} }
| appr_classes | appr_classes
| v22_classes | v22_classes
| transition_classes
) )
delete_instance_filtered(self, Repository, delete_nullable, skip_transitive_deletes) delete_instance_filtered(self, Repository, delete_nullable, skip_transitive_deletes)
@ -991,13 +979,6 @@ class RepositorySearchScore(BaseModel):
last_updated = DateTimeField(null=True) last_updated = DateTimeField(null=True)
@deprecated_model
class RepositorySize(BaseModel):
repository = ForeignKeyField(Repository, unique=True)
repository_id: int
size_bytes = BigIntegerField()
class QuotaNamespaceSize(BaseModel): class QuotaNamespaceSize(BaseModel):
namespace_user = ForeignKeyField(User, unique=True) namespace_user = ForeignKeyField(User, unique=True)
size_bytes = BigIntegerField(null=False, default=0) size_bytes = BigIntegerField(null=False, default=0)
@ -1189,84 +1170,6 @@ class UserRegion(BaseModel):
indexes = ((("user", "location"), True),) indexes = ((("user", "location"), True),)
@deprecated_model
class Image(BaseModel):
# This class is intentionally denormalized. Even though images are supposed
# to be globally unique we can't treat them as such for permissions and
# security reasons. So rather than Repository <-> Image being many to many
# each image now belongs to exactly one repository.
docker_image_id = CharField(index=True)
repository = ForeignKeyField(Repository)
# '/' separated list of ancestory ids, e.g. /1/2/6/7/10/
ancestors = CharField(index=True, default="/", max_length=64535, null=True)
storage = ForeignKeyField(ImageStorage, null=True)
created = DateTimeField(null=True)
comment = TextField(null=True)
command = TextField(null=True)
aggregate_size = BigIntegerField(null=True)
v1_json_metadata = TextField(null=True)
v1_checksum = CharField(null=True)
security_indexed = BooleanField(default=False, index=True)
security_indexed_engine = IntegerField(default=IMAGE_NOT_SCANNED_ENGINE_VERSION, index=True)
# We use a proxy here instead of 'self' in order to disable the foreign key constraint
parent = DeferredForeignKey("Image", null=True, backref="children")
class Meta:
database = db
read_only_config = read_only_config
indexes = (
# we don't really want duplicates
(("repository", "docker_image_id"), True),
(("security_indexed_engine", "security_indexed"), False),
)
def ancestor_id_list(self):
"""
Returns an integer list of ancestor ids, ordered chronologically from root to direct parent.
"""
return list(map(int, self.ancestors.split("/")[1:-1]))
@deprecated_model
class DerivedStorageForImage(BaseModel):
source_image = ForeignKeyField(Image)
derivative = ForeignKeyField(ImageStorage)
transformation = ForeignKeyField(ImageStorageTransformation)
uniqueness_hash = CharField(null=True)
class Meta:
database = db
read_only_config = read_only_config
indexes = ((("source_image", "transformation", "uniqueness_hash"), True),)
@deprecated_model
class RepositoryTag(BaseModel):
name = CharField()
image = ForeignKeyField(Image)
repository = ForeignKeyField(Repository)
lifetime_start_ts = IntegerField(default=get_epoch_timestamp)
lifetime_end_ts = IntegerField(null=True, index=True)
hidden = BooleanField(default=False)
reversion = BooleanField(default=False)
class Meta:
database = db
read_only_config = read_only_config
indexes = (
(("repository", "name"), False),
(("repository", "lifetime_start_ts"), False),
(("repository", "lifetime_end_ts"), False),
# This unique index prevents deadlocks when concurrently moving and deleting tags
(("repository", "name", "lifetime_end_ts"), True),
)
class BUILD_PHASE(object): class BUILD_PHASE(object):
""" """
Build phases enum. Build phases enum.
@ -1596,21 +1499,6 @@ class QuayRelease(BaseModel):
) )
@deprecated_model
class TorrentInfo(BaseModel):
storage = ForeignKeyField(ImageStorage)
piece_length = IntegerField()
pieces = Base64BinaryField()
class Meta:
database = db
read_only_config = read_only_config
indexes = (
# we may want to compute the piece hashes multiple times with different piece lengths
(("storage", "piece_length"), True),
)
class ServiceKeyApprovalType(Enum): class ServiceKeyApprovalType(Enum):
SUPERUSER = "Super User API" SUPERUSER = "Super User API"
KEY_ROTATION = "Key Rotation" KEY_ROTATION = "Key Rotation"
@ -1939,64 +1827,6 @@ class ManifestBlob(BaseModel):
indexes = ((("manifest", "blob"), True),) indexes = ((("manifest", "blob"), True),)
@deprecated_model
class ManifestLegacyImage(BaseModel):
"""
For V1-compatible manifests only, this table maps from the manifest to its associated Docker
image.
"""
repository = ForeignKeyField(Repository, index=True)
manifest = ForeignKeyField(Manifest, unique=True)
image = ForeignKeyField(Image)
@deprecated_model
class TagManifest(BaseModel):
tag = ForeignKeyField(RepositoryTag, unique=True)
digest = CharField(index=True)
json_data = TextField()
@deprecated_model
class TagManifestToManifest(BaseModel):
tag_manifest = ForeignKeyField(TagManifest, index=True, unique=True)
manifest = ForeignKeyField(Manifest, index=True)
broken = BooleanField(index=True, default=False)
@deprecated_model
class TagManifestLabel(BaseModel):
repository = ForeignKeyField(Repository, index=True)
annotated = ForeignKeyField(TagManifest, index=True)
label = ForeignKeyField(Label)
class Meta:
database = db
read_only_config = read_only_config
indexes = ((("annotated", "label"), True),)
@deprecated_model
class TagManifestLabelMap(BaseModel):
tag_manifest = ForeignKeyField(TagManifest, index=True)
manifest = ForeignKeyField(Manifest, null=True, index=True)
label = ForeignKeyField(Label, index=True)
tag_manifest_label = ForeignKeyField(TagManifestLabel, index=True)
manifest_label = ForeignKeyField(ManifestLabel, null=True, index=True)
broken_manifest = BooleanField(index=True, default=False)
@deprecated_model
class TagToRepositoryTag(BaseModel):
repository = ForeignKeyField(Repository, index=True)
tag = ForeignKeyField(Tag, index=True, unique=True)
repository_tag = ForeignKeyField(RepositoryTag, index=True, unique=True)
@unique @unique
class RepoMirrorRuleType(IntEnum): class RepoMirrorRuleType(IntEnum):
""" """
@ -2180,10 +2010,7 @@ appr_classes = set(
ApprBlobPlacement, ApprBlobPlacement,
] ]
) )
v22_classes = set( v22_classes = set([Manifest, ManifestLabel, ManifestBlob, TagKind, ManifestChild, Tag])
[Manifest, ManifestLabel, ManifestBlob, ManifestLegacyImage, TagKind, ManifestChild, Tag]
)
transition_classes = set([TagManifestToManifest, TagManifestLabelMap, TagToRepositoryTag])
is_model = lambda x: inspect.isclass(x) and issubclass(x, BaseModel) and x is not BaseModel is_model = lambda x: inspect.isclass(x) and issubclass(x, BaseModel) and x is not BaseModel
all_models = [model[1] for model in inspect.getmembers(sys.modules[__name__], is_model)] all_models = [model[1] for model in inspect.getmembers(sys.modules[__name__], is_model)]

View File

@ -1,8 +1,8 @@
import os
import logging
import base64 import base64
import logging
import os
from collections import namedtuple from collections import namedtuple
from cryptography.hazmat.primitives.ciphers.aead import AESCCM from cryptography.hazmat.primitives.ciphers.aead import AESCCM
from util.security.secret import convert_secret_key from util.security.secret import convert_secret_key

View File

@ -1,13 +1,12 @@
import base64 import base64
import json
import pickle import pickle
import string import string
import json
from random import SystemRandom from random import SystemRandom
import bcrypt import bcrypt
from peewee import CharField, SmallIntegerField, TextField
from peewee import TextField, CharField, SmallIntegerField
from data.text import prefix_search from data.text import prefix_search
from util.bytes import Bytes from util.bytes import Bytes

View File

@ -1,9 +1,9 @@
import logging import logging
from data.logs_model.combined_model import CombinedLogsModel
from data.logs_model.document_logs_model import DocumentLogsModel
from data.logs_model.splunk_logs_model import SplunkLogsModel from data.logs_model.splunk_logs_model import SplunkLogsModel
from data.logs_model.table_logs_model import TableLogsModel from data.logs_model.table_logs_model import TableLogsModel
from data.logs_model.document_logs_model import DocumentLogsModel
from data.logs_model.combined_model import CombinedLogsModel
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,5 +1,5 @@
import logging
import itertools import itertools
import logging
from data.logs_model.datatypes import AggregatedLogCount, LogEntriesPage from data.logs_model.datatypes import AggregatedLogCount, LogEntriesPage
from data.logs_model.interface import ActionLogsDataInterface from data.logs_model.interface import ActionLogsDataInterface

View File

@ -1,5 +1,4 @@
import json import json
from calendar import timegm from calendar import timegm
from collections import namedtuple from collections import namedtuple
from email.utils import formatdate from email.utils import formatdate

View File

@ -3,38 +3,38 @@
import json import json
import logging import logging
import uuid import uuid
from time import time
from datetime import timedelta, datetime, date
from dateutil.parser import parse as parse_datetime
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from six import add_metaclass from datetime import date, datetime, timedelta
from time import time
from dateutil.parser import parse as parse_datetime
from elasticsearch.exceptions import ConnectionTimeout, NotFoundError from elasticsearch.exceptions import ConnectionTimeout, NotFoundError
from six import add_metaclass
from data import model from data import model
from data.database import CloseForLongOperation from data.database import CloseForLongOperation
from data.model import config from data.logs_model.datatypes import AggregatedLogCount, Log, LogEntriesPage
from data.model.log import (
_json_serialize,
ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING,
DataModelException,
)
from data.logs_model.elastic_logs import LogEntry, configure_es from data.logs_model.elastic_logs import LogEntry, configure_es
from data.logs_model.datatypes import Log, AggregatedLogCount, LogEntriesPage
from data.logs_model.interface import ( from data.logs_model.interface import (
ActionLogsDataInterface, ActionLogsDataInterface,
LogRotationContextInterface, LogRotationContextInterface,
LogsIterationTimeout, LogsIterationTimeout,
) )
from data.logs_model.shared import SharedModel, epoch_ms, InvalidLogsDateRangeError
from data.logs_model.logs_producer import LogProducerProxy, LogSendException from data.logs_model.logs_producer import LogProducerProxy, LogSendException
from data.logs_model.logs_producer.elasticsearch_logs_producer import (
ElasticsearchLogsProducer,
)
from data.logs_model.logs_producer.kafka_logs_producer import KafkaLogsProducer from data.logs_model.logs_producer.kafka_logs_producer import KafkaLogsProducer
from data.logs_model.logs_producer.elasticsearch_logs_producer import ElasticsearchLogsProducer from data.logs_model.logs_producer.kinesis_stream_logs_producer import (
from data.logs_model.logs_producer.kinesis_stream_logs_producer import KinesisStreamLogsProducer KinesisStreamLogsProducer,
)
from data.logs_model.shared import InvalidLogsDateRangeError, SharedModel, epoch_ms
from data.model import config
from data.model.log import (
ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING,
DataModelException,
_json_serialize,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,15 +1,13 @@
import os
import logging import logging
import os
import re import re
from datetime import datetime, timedelta from datetime import datetime, timedelta
from requests_aws4auth import AWS4Auth
from elasticsearch import RequestsHttpConnection from elasticsearch import RequestsHttpConnection
from elasticsearch.exceptions import NotFoundError, AuthorizationException from elasticsearch.exceptions import AuthorizationException, NotFoundError
from elasticsearch_dsl import Index, Document, Integer, Date, Text, Ip, Keyword, Object from elasticsearch_dsl import Date, Document, Index, Integer, Ip, Keyword, Object, Text
from elasticsearch_dsl.connections import connections from elasticsearch_dsl.connections import connections
from requests_aws4auth import AWS4Auth
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,13 +1,13 @@
import logging
import json import json
import logging
from collections import namedtuple from collections import namedtuple
from datetime import datetime from datetime import datetime
from tzlocal import get_localzone
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from tzlocal import get_localzone
from data import model from data import model
from data.logs_model.datatypes import AggregatedLogCount, LogEntriesPage, Log from data.logs_model.datatypes import AggregatedLogCount, Log, LogEntriesPage
from data.logs_model.interface import ( from data.logs_model.interface import (
ActionLogsDataInterface, ActionLogsDataInterface,
LogRotationContextInterface, LogRotationContextInterface,

View File

@ -1,4 +1,5 @@
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from six import add_metaclass from six import add_metaclass

View File

@ -1,6 +1,5 @@
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -2,9 +2,8 @@ import logging
from elasticsearch.exceptions import ElasticsearchException from elasticsearch.exceptions import ElasticsearchException
from data.logs_model.logs_producer.interface import LogProducerInterface
from data.logs_model.logs_producer import LogSendException from data.logs_model.logs_producer import LogSendException
from data.logs_model.logs_producer.interface import LogProducerInterface
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,4 +1,5 @@
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from six import add_metaclass from six import add_metaclass

View File

@ -1,13 +1,12 @@
import logging import logging
from kafka.errors import KafkaError, KafkaTimeoutError
from kafka import KafkaProducer from kafka import KafkaProducer
from kafka.errors import KafkaError, KafkaTimeoutError
from data.logs_model.shared import epoch_ms from data.logs_model.logs_producer import LogSendException
from data.logs_model.logs_producer.interface import LogProducerInterface from data.logs_model.logs_producer.interface import LogProducerInterface
from data.logs_model.logs_producer.util import logs_json_serializer from data.logs_model.logs_producer.util import logs_json_serializer
from data.logs_model.logs_producer import LogSendException from data.logs_model.shared import epoch_ms
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,15 +1,14 @@
import logging
import hashlib import hashlib
import logging
import random import random
import boto3 import boto3
from botocore.exceptions import ClientError
from botocore.client import Config from botocore.client import Config
from botocore.exceptions import ClientError
from data.logs_model.logs_producer import LogSendException
from data.logs_model.logs_producer.interface import LogProducerInterface from data.logs_model.logs_producer.interface import LogProducerInterface
from data.logs_model.logs_producer.util import logs_json_serializer from data.logs_model.logs_producer.util import logs_json_serializer
from data.logs_model.logs_producer import LogSendException
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,13 +1,13 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import logging
import json import json
import logging
from datetime import datetime from datetime import datetime
import pytest import pytest
from data.logs_model.logs_producer.util import logs_json_serializer
from data.logs_model.elastic_logs import LogEntry from data.logs_model.elastic_logs import LogEntry
from data.logs_model.logs_producer.util import logs_json_serializer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,6 +1,5 @@
import uuid
import json import json
import uuid
from calendar import timegm from calendar import timegm
from data import model from data import model

View File

@ -1,6 +1,5 @@
import json import json
import logging import logging
from datetime import datetime from datetime import datetime
from data import model from data import model

View File

@ -1,23 +1,22 @@
# pylint: disable=protected-access # pylint: disable=protected-access
import logging import logging
from datetime import datetime, timedelta from datetime import datetime, timedelta
from tzlocal import get_localzone
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from tzlocal import get_localzone
from data import model from data import model
from data.model import config from data.database import BaseModel, LogEntry, LogEntry2, LogEntry3, UseThenDisconnect
from data.database import LogEntry, LogEntry2, LogEntry3, BaseModel, UseThenDisconnect from data.logs_model.datatypes import AggregatedLogCount, Log, LogEntriesPage
from data.logs_model.interface import ( from data.logs_model.interface import (
ActionLogsDataInterface, ActionLogsDataInterface,
LogsIterationTimeout,
LogRotationContextInterface, LogRotationContextInterface,
LogsIterationTimeout,
) )
from data.logs_model.datatypes import Log, AggregatedLogCount, LogEntriesPage from data.logs_model.shared import InvalidLogsDateRangeError, SharedModel
from data.logs_model.shared import SharedModel, InvalidLogsDateRangeError from data.model import config
from data.model.log import get_stale_logs, get_stale_logs_start_id, delete_stale_logs from data.model.log import delete_stale_logs, get_stale_logs, get_stale_logs_start_id
from data.readreplica import ReadOnlyModeException from data.readreplica import ReadOnlyModeException
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,14 +1,12 @@
import fnmatch
import json import json
import uuid import uuid
import fnmatch
from collections import defaultdict from collections import defaultdict
from contextlib import contextmanager from contextlib import contextmanager
from datetime import datetime from datetime import datetime
import dateutil.parser import dateutil.parser
from httmock import HTTMock, urlmatch
from httmock import urlmatch, HTTMock
FAKE_ES_HOST = "fakees" FAKE_ES_HOST = "fakees"

View File

@ -1,10 +1,10 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import json import json
from datetime import datetime from datetime import datetime
from dateutil.parser import parse from dateutil.parser import parse
from data.logs_model.datatypes import LogEntriesPage, Log, AggregatedLogCount from data.logs_model.datatypes import AggregatedLogCount, Log, LogEntriesPage
def _status(d, code=200): def _status(d, code=200):

View File

@ -1,13 +1,12 @@
import pytest
from datetime import date, datetime, timedelta from datetime import date, datetime, timedelta
from test.fixtures import *
import pytest
from freezegun import freeze_time from freezegun import freeze_time
from data import model from data import model
from data.logs_model.inmemory_model import InMemoryModel
from data.logs_model.combined_model import CombinedLogsModel from data.logs_model.combined_model import CombinedLogsModel
from data.logs_model.inmemory_model import InMemoryModel
from test.fixtures import *
@pytest.fixture() @pytest.fixture()

View File

@ -6,14 +6,18 @@ import json
from datetime import datetime, timedelta from datetime import datetime, timedelta
import pytest import pytest
from mock import patch, Mock
from dateutil.parser import parse from dateutil.parser import parse
from httmock import HTTMock, urlmatch
from mock import Mock, patch
from httmock import urlmatch, HTTMock from data.logs_model import LogsModelProxy, configure
from data.logs_model.elastic_logs import (
INDEX_DATE_FORMAT,
INDEX_NAME_PREFIX,
ElasticsearchLogs,
)
from data.model.log import _json_serialize from data.model.log import _json_serialize
from data.logs_model.elastic_logs import ElasticsearchLogs, INDEX_NAME_PREFIX, INDEX_DATE_FORMAT
from data.logs_model import configure, LogsModelProxy
from .mock_elasticsearch import * from .mock_elasticsearch import *
FAKE_ES_HOST = "fakees" FAKE_ES_HOST = "fakees"

View File

@ -1,20 +1,25 @@
import os import os
import pytest from datetime import date, datetime, timedelta
from datetime import datetime, timedelta, date
from unittest.mock import patch
from data.logs_model.datatypes import AggregatedLogCount
from data.logs_model.table_logs_model import TableLogsModel
from data.logs_model.combined_model import CombinedLogsModel
from data.logs_model.inmemory_model import InMemoryModel
from data.logs_model.combined_model import _merge_aggregated_log_counts
from data.logs_model.document_logs_model import _date_range_in_single_index, DocumentLogsModel
from data.logs_model.interface import LogsIterationTimeout
from data.logs_model.test.fake_elasticsearch import FAKE_ES_HOST, fake_elasticsearch
from data.database import LogEntry, LogEntry2, LogEntry3, LogEntryKind
from data import model
from test.fixtures import * from test.fixtures import *
from unittest.mock import patch
import pytest
from data import model
from data.database import LogEntry, LogEntry2, LogEntry3, LogEntryKind
from data.logs_model.combined_model import (
CombinedLogsModel,
_merge_aggregated_log_counts,
)
from data.logs_model.datatypes import AggregatedLogCount
from data.logs_model.document_logs_model import (
DocumentLogsModel,
_date_range_in_single_index,
)
from data.logs_model.inmemory_model import InMemoryModel
from data.logs_model.interface import LogsIterationTimeout
from data.logs_model.table_logs_model import TableLogsModel
from data.logs_model.test.fake_elasticsearch import FAKE_ES_HOST, fake_elasticsearch
@pytest.fixture() @pytest.fixture()

View File

@ -1,21 +1,20 @@
import logging import logging
import pytest
from dateutil.parser import parse
from mock import patch, Mock
import botocore import botocore
import pytest
from dateutil.parser import parse
from mock import Mock, patch
from data.logs_model import configure from data.logs_model import configure
from .mock_elasticsearch import *
from .test_elasticsearch import ( from .test_elasticsearch import (
app_config, app_config,
logs_model_config,
logs_model, logs_model,
mock_elasticsearch, logs_model_config,
mock_db_model, mock_db_model,
mock_elasticsearch,
) )
from .mock_elasticsearch import *
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -3,16 +3,17 @@ import ssl
import tempfile import tempfile
from ssl import SSLError from ssl import SSLError
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
from test.fixtures import *
from unittest.mock import MagicMock, call from unittest.mock import MagicMock, call
import pytest import pytest
from dateutil.parser import parse from dateutil.parser import parse
from mock import patch, Mock from mock import Mock, patch
from .test_elasticsearch import logs_model, mock_db_model
from data.logs_model import configure from data.logs_model import configure
from test.fixtures import *
from ..logs_producer.splunk_logs_producer import SplunkLogsProducer from ..logs_producer.splunk_logs_producer import SplunkLogsProducer
from .test_elasticsearch import logs_model, mock_db_model
FAKE_SPLUNK_HOST = "fakesplunk" FAKE_SPLUNK_HOST = "fakesplunk"
FAKE_SPLUNK_PORT = 443 FAKE_SPLUNK_PORT = 443

View File

@ -7,14 +7,12 @@ which defines the parameters for synthesizing a Kubernetes job to connect to and
migrate a relational database. migrate a relational database.
""" """
import yaml
import logging import logging
from functools import partial, wraps from functools import partial, wraps
from alembic.operations import ops
import sqlalchemy as sa import sqlalchemy as sa
import yaml
from alembic.operations import ops
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,23 +1,22 @@
import logging import logging
import logging.config import logging.config
import os import os
from urllib.parse import unquote from urllib.parse import unquote
from alembic import context, op as alembic_op from alembic import context
from alembic import op as alembic_op
from alembic.script.revision import ResolutionError from alembic.script.revision import ResolutionError
from alembic.util import CommandError from alembic.util import CommandError
from peewee import SqliteDatabase from peewee import SqliteDatabase
from sqlalchemy import create_engine from sqlalchemy import create_engine
from app import app from app import app
from data.database import all_models, db, LEGACY_INDEX_MAP from data.database import LEGACY_INDEX_MAP, all_models, db
from data.migrations.tester import NoopTester, PopulateTestDataTester from data.migrations.tester import NoopTester, PopulateTestDataTester
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from release import GIT_HEAD, REGION, SERVICE from release import GIT_HEAD, REGION, SERVICE
from util.morecollections import AttrDict from util.morecollections import AttrDict
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

Some files were not shown because too many files have changed in this diff Show More