1
0
mirror of https://github.com/quay/quay.git synced 2025-04-18 10:44:06 +03:00

chore: drop deprecated tables and remove unused code (PROJQUAY-522) (#2089)

* chore: drop deprecated tables and remove unused code

* isort imports

* migration: check for table existence before drop
This commit is contained in:
Kenny Lee Sin Cheong 2023-08-25 12:17:24 -04:00 committed by GitHub
parent e72773bbce
commit 5f63b3a7bb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
550 changed files with 3480 additions and 3778 deletions

View File

@ -1,4 +1,5 @@
from enum import Enum, unique
from data.migrationutil import DefinedDataMigration, MigrationPhase
ActiveDataMigration = None

41
app.py
View File

@ -3,67 +3,64 @@ import json
import logging
import os
from functools import partial
from authlib.jose import JsonWebKey
from flask import Flask, request, Request
from flask import Flask, Request, request
from flask_login import LoginManager
from flask_mail import Mail
from flask_principal import Principal
from werkzeug.middleware.proxy_fix import ProxyFix
from werkzeug.exceptions import HTTPException
from werkzeug.middleware.proxy_fix import ProxyFix
import features
from _init import (
config_provider,
IS_BUILDING,
IS_KUBERNETES,
IS_TESTING,
OVERRIDE_CONFIG_DIRECTORY,
IS_BUILDING,
config_provider,
)
from avatars.avatars import Avatar
from buildman.manager.buildcanceller import BuildCanceller
from data import database
from data import model
from data import logs_model
from data import database, logs_model, model
from data.archivedlogs import LogArchive
from data.billing import Billing
from data.buildlogs import BuildLogs
from data.cache import get_model_cache
from data.model.user import LoginWrappedDBUser
from data.queue import WorkQueue
from data.registry_model import registry_model
from data.secscan_model import secscan_model
from data.userevent import UserEventsBuilderModule
from data.userfiles import Userfiles
from data.users import UserAuthentication, UserManager
from data.registry_model import registry_model
from data.secscan_model import secscan_model
from image.oci import register_artifact_type
from oauth.loginmanager import OAuthLoginManager
from oauth.services.github import GithubOAuthService
from oauth.services.gitlab import GitLabOAuthService
from path_converters import (
APIRepositoryPathConverter,
RegexConverter,
RepositoryPathConverter,
APIRepositoryPathConverter,
RepositoryPathRedirectConverter,
V1CreateRepositoryPathConverter,
)
from oauth.services.github import GithubOAuthService
from oauth.services.gitlab import GitLabOAuthService
from oauth.loginmanager import OAuthLoginManager
from storage import Storage
from util.log import filter_logs
from util import get_app_url
from util.ipresolver import IPResolver
from util.saas.analytics import Analytics
from util.saas.exceptionlog import Sentry
from util.names import urn_generator
from util.config import URLSchemeAndHostname
from util.config.configutil import generate_secret_key
from util.greenlet_tracing import enable_tracing
from util.ipresolver import IPResolver
from util.label_validator import LabelValidator
from util.log import filter_logs
from util.marketplace import RHMarketplaceAPI, RHUserAPI
from util.metrics.prometheus import PrometheusPlugin
from util.names import urn_generator
from util.repomirror.api import RepoMirrorAPI
from util.tufmetadata.api import TUFMetadataAPI
from util.saas.analytics import Analytics
from util.saas.exceptionlog import Sentry
from util.security.instancekeys import InstanceKeys
from util.greenlet_tracing import enable_tracing
from util.tufmetadata.api import TUFMetadataAPI
OVERRIDE_CONFIG_YAML_FILENAME = os.path.join(OVERRIDE_CONFIG_DIRECTORY, "config.yaml")
OVERRIDE_CONFIG_PY_FILENAME = os.path.join(OVERRIDE_CONFIG_DIRECTORY, "config.py")

View File

@ -1,18 +1,16 @@
import logging
from abc import ABCMeta, abstractmethod
from cachetools.func import lru_cache
from flask_principal import Identity, identity_changed
from six import add_metaclass
from app import app
from data import model
from flask_principal import Identity, identity_changed
from auth.auth_context import set_authenticated_context
from auth.context_entity import ContextEntityKind, CONTEXT_ENTITY_HANDLERS
from auth.context_entity import CONTEXT_ENTITY_HANDLERS, ContextEntityKind
from auth.permissions import QuayDeferredPermissionUser
from auth.scopes import scopes_from_scope_string
from data import model
logger = logging.getLogger(__name__)

View File

@ -1,10 +1,10 @@
import logging
from base64 import b64decode
from flask import request
from auth.credentials import validate_credentials
from auth.validateresult import ValidateResult, AuthKind
from auth.validateresult import AuthKind, ValidateResult
logger = logging.getLogger(__name__)

View File

@ -1,14 +1,14 @@
from abc import ABCMeta, abstractmethod
from six import add_metaclass
from enum import Enum
from data import model
from six import add_metaclass
from auth.credential_consts import (
ACCESS_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
APP_SPECIFIC_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
)
from data import model
class ContextEntityKind(Enum):

View File

@ -1,6 +1,6 @@
import logging
from uuid import UUID
from flask_login import current_user
from auth.validateresult import AuthKind, ValidateResult

View File

@ -1,17 +1,15 @@
import logging
from enum import Enum
import features
from app import authentication
from auth.oauth import validate_oauth_token
from auth.validateresult import ValidateResult, AuthKind
from auth.credential_consts import (
ACCESS_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
APP_SPECIFIC_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
)
from auth.oauth import validate_oauth_token
from auth.validateresult import AuthKind, ValidateResult
from data import model
from util.names import parse_robot_username

View File

@ -1,18 +1,16 @@
import logging
from functools import wraps
from flask import request, session
from prometheus_client import Counter
from auth.basic import validate_basic_auth
from auth.oauth import validate_bearer_auth
from auth.cookie import validate_session_cookie
from auth.oauth import validate_bearer_auth
from auth.signedgrant import validate_signed_grant
from auth.validateresult import AuthKind
from util.http import abort
logger = logging.getLogger(__name__)

View File

@ -3,16 +3,16 @@ from datetime import datetime
from jwt import ExpiredSignatureError, InvalidTokenError
from app import oauth_login, authentication, app, analytics
from app import analytics, app, authentication, oauth_login
from auth.scopes import scopes_from_scope_string
from auth.validateresult import AuthKind, ValidateResult
from data import model
from oauth.login import OAuthLoginException
from oauth.login_utils import (
is_jwt,
get_sub_username_email_from_token,
_conduct_oauth_login,
get_jwt_issuer,
get_sub_username_email_from_token,
is_jwt,
)
from oauth.oidc import PublicKeyLoadException

View File

@ -1,17 +1,14 @@
import logging
from collections import defaultdict, namedtuple
from functools import partial
from typing import DefaultDict, Optional
from collections import namedtuple, defaultdict
from functools import partial
from flask_principal import identity_loaded, Permission, Identity, identity_changed
from flask_principal import Identity, Permission, identity_changed, identity_loaded
from app import app, usermanager
from auth import scopes
from data import model
logger = logging.getLogger(__name__)

View File

@ -1,28 +1,26 @@
import logging
from functools import wraps
from jsonschema import validate, ValidationError
from flask import request, url_for
from flask_principal import identity_changed, Identity
from flask_principal import Identity, identity_changed
from jsonschema import ValidationError, validate
from app import app, get_app_url, instance_keys
from auth.auth_context import set_authenticated_context
from auth.auth_context_type import SignedAuthContext
from auth.permissions import (
repository_admin_grant,
repository_read_grant,
repository_write_grant,
repository_admin_grant,
)
from util.http import abort
from util.names import parse_namespace_repository
from util.security.registry_jwt import (
ANONYMOUS_SUB,
decode_bearer_header,
InvalidBearerTokenException,
decode_bearer_header,
)
logger = logging.getLogger(__name__)

View File

@ -1,6 +1,7 @@
from collections import namedtuple
import features
import re
from collections import namedtuple
import features
Scope = namedtuple("Scope", ["scope", "icon", "dangerous", "title", "description"])

View File

@ -1,6 +1,6 @@
import logging
from flask.sessions import SecureCookieSessionInterface, BadSignature
from flask.sessions import BadSignature, SecureCookieSessionInterface
from app import app
from auth.validateresult import AuthKind, ValidateResult

View File

@ -1,9 +1,13 @@
from test.fixtures import *
import pytest
from auth.auth_context_type import SignedAuthContext, ValidatedAuthContext, ContextEntityKind
from data import model, database
from test.fixtures import *
from auth.auth_context_type import (
ContextEntityKind,
SignedAuthContext,
ValidatedAuthContext,
)
from data import database, model
def get_oauth_token(_):

View File

@ -1,20 +1,19 @@
# -*- coding: utf-8 -*-
import pytest
from base64 import b64encode
from test.fixtures import *
import pytest
from auth.basic import validate_basic_auth
from auth.credentials import (
ACCESS_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
APP_SPECIFIC_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
)
from auth.validateresult import AuthKind, ValidateResult
from data import model
from test.fixtures import *
def _token(username, password):
assert isinstance(username, str)

View File

@ -1,11 +1,11 @@
import uuid
from test.fixtures import *
from flask_login import login_user
from app import LoginWrappedDBUser
from data import model
from auth.cookie import validate_session_cookie
from test.fixtures import *
from data import model
def test_anonymous_cookie(app):

View File

@ -1,16 +1,16 @@
# -*- coding: utf-8 -*-
from auth.credentials import validate_credentials, CredentialKind
from test.fixtures import *
from auth.credential_consts import (
ACCESS_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
APP_SPECIFIC_TOKEN_USERNAME,
OAUTH_TOKEN_USERNAME,
)
from auth.credentials import CredentialKind, validate_credentials
from auth.validateresult import AuthKind, ValidateResult
from data import model
from test.fixtures import *
def test_valid_user(app):
result, kind = validate_credentials("devtable", "password")

View File

@ -1,5 +1,6 @@
import pytest
from test.fixtures import *
import pytest
from flask import session
from flask_login import login_user
from werkzeug.exceptions import HTTPException
@ -8,11 +9,10 @@ from app import LoginWrappedDBUser
from auth.auth_context import get_authenticated_user
from auth.decorators import (
extract_namespace_repo_from_session,
require_session_login,
process_auth_or_cookie,
require_session_login,
)
from data import model
from test.fixtures import *
def test_extract_namespace_repo_from_session_missing(app):

View File

@ -1,9 +1,10 @@
from test.fixtures import *
import pytest
from auth.oauth import validate_bearer_auth
from auth.validateresult import AuthKind, ValidateResult
from data import model
from test.fixtures import *
@pytest.mark.parametrize(

View File

@ -1,11 +1,11 @@
from test.fixtures import *
import pytest
from auth import scopes
from auth.permissions import SuperUserPermission, QuayDeferredPermissionUser
from auth.permissions import QuayDeferredPermissionUser, SuperUserPermission
from data import model
from test.fixtures import *
SUPER_USERNAME = "devtable"
UNSUPER_USERNAME = "freshuser"

View File

@ -4,15 +4,14 @@ import time
import jwt
import pytest
from cryptography.hazmat.primitives import serialization
from app import app, instance_keys
from auth.auth_context_type import ValidatedAuthContext
from auth.registry_jwt_auth import identity_from_bearer_token, InvalidJWTException
from auth.registry_jwt_auth import InvalidJWTException, identity_from_bearer_token
from data import model # TODO: remove this after service keys are decoupled
from data.database import ServiceKeyApprovalType
from initdb import setup_database_for_testing, finished_database_for_testing
from initdb import finished_database_for_testing, setup_database_for_testing
from util.morecollections import AttrDict
from util.security.registry_jwt import ANONYMOUS_SUB, build_context_and_subject

View File

@ -1,10 +1,10 @@
import pytest
from auth.scopes import (
scopes_from_scope_string,
validate_scope_string,
ALL_SCOPES,
is_subset_string,
scopes_from_scope_string,
validate_scope_string,
)

View File

@ -1,6 +1,10 @@
import pytest
from auth.signedgrant import validate_signed_grant, generate_signed_token, SIGNATURE_PREFIX
from auth.signedgrant import (
SIGNATURE_PREFIX,
generate_signed_token,
validate_signed_grant,
)
from auth.validateresult import AuthKind, ValidateResult

View File

@ -1,10 +1,11 @@
from test.fixtures import *
import pytest
from auth.auth_context import get_authenticated_context
from auth.validateresult import AuthKind, ValidateResult
from data import model
from data.database import AppSpecificAuthToken
from test.fixtures import *
def get_user():

View File

@ -1,5 +1,6 @@
from enum import Enum
from auth.auth_context_type import ValidatedAuthContext, ContextEntityKind
from auth.auth_context_type import ContextEntityKind, ValidatedAuthContext
class AuthKind(Enum):

View File

@ -1,10 +1,10 @@
import hashlib
import math
import logging
import features
import math
from requests.exceptions import RequestException
import features
from util.bytes import Bytes
logger = logging.getLogger(__name__)

13
boot.py
View File

@ -1,25 +1,22 @@
#!/usr/bin/env python
import logging
import os.path
from datetime import datetime, timedelta
from urllib.parse import urlunparse
from jinja2 import Template
from cachetools.func import lru_cache
import logging
import release
import os.path
from cryptography.hazmat.primitives import serialization
from jinja2 import Template
import release
from _init import CONF_DIR
from app import app
from data.model import ServiceKeyDoesNotExist
from data.model.release import set_region_release
from data.model.service_keys import get_service_key
from util.config.database import sync_database_with_config
from util.generatepresharedkey import generate_key
from _init import CONF_DIR
logger = logging.getLogger(__name__)

View File

@ -1,5 +1,4 @@
import asyncio
from concurrent.futures import ThreadPoolExecutor
from functools import partial

View File

@ -1,17 +1,17 @@
import logging
import jsonschema
import jwt
import logging
from app import instance_keys
from util.security import jwtutil
from util.security.registry_jwt import (
generate_bearer_token,
InvalidBearerTokenException,
ALGORITHM,
JWT_CLOCK_SKEW_SECONDS,
InvalidBearerTokenException,
generate_bearer_token,
)
logger = logging.getLogger(__name__)

View File

@ -1,26 +1,24 @@
import logging
import logging.config
import os
import time
import socket
import time
from raven.conf import setup_logging
from raven.handlers.logging import SentryHandler
import features
from app import (
OVERRIDE_CONFIG_DIRECTORY,
app,
userfiles as user_files,
build_logs,
dockerfile_build_queue,
instance_keys,
OVERRIDE_CONFIG_DIRECTORY,
)
from util.log import logfile_path
from app import userfiles as user_files
from buildman.manager.ephemeral import EphemeralBuilderManager
from buildman.server import BuilderServer
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
from util.log import logfile_path
logger = logging.getLogger(__name__)

View File

@ -2,11 +2,11 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: buildman.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import enum_type_wrapper
# @@protoc_insertion_point(imports)

View File

@ -1,19 +1,17 @@
import grpc
import json
import logging
import grpc
from google.protobuf.json_format import MessageToDict
from buildman.buildman_pb import buildman_pb2
from buildman.buildman_pb import buildman_pb2_grpc
from buildman.build_token import (
BUILD_JOB_REGISTRATION_TYPE,
BUILD_JOB_TOKEN_TYPE,
InvalidBuildTokenException,
)
from buildman.buildman_pb import buildman_pb2, buildman_pb2_grpc
from data.database import BUILD_PHASE
logger = logging.getLogger(__name__)

View File

@ -2,13 +2,14 @@ import json
import logging
import os
from app import app
from cachetools.func import lru_cache
from notifications import spawn_notification
from app import app
from data import model
from data.database import UseThenDisconnect
from data.registry_model import registry_model
from data.registry_model.datatypes import RepositoryReference
from data.database import UseThenDisconnect
from notifications import spawn_notification
from util import slash_join
from util.morecollections import AttrDict

View File

@ -1,5 +1,5 @@
from abc import abstractmethod, ABC
import inspect
from abc import ABC, abstractmethod
class BaseManager(ABC):

View File

@ -1,7 +1,7 @@
import logging
from buildman.manager.orchestrator_canceller import OrchestratorCanceller
from buildman.manager.noop_canceller import NoopCanceller
from buildman.manager.orchestrator_canceller import OrchestratorCanceller
logger = logging.getLogger(__name__)

View File

@ -1,53 +1,49 @@
import calendar
import logging
import json
import logging
import re
import time
import uuid
import dateutil.parser
from datetime import datetime, timedelta
import dateutil.parser
from prometheus_client import Counter, Histogram
from app import app
from app import app, instance_keys
from buildman.build_token import (
build_token,
verify_build_token,
InvalidBearerTokenException,
BUILD_JOB_REGISTRATION_TYPE,
BUILD_JOB_TOKEN_TYPE,
InvalidBearerTokenException,
build_token,
verify_build_token,
)
from buildman.interface import (
BuildStateInterface,
RESULT_PHASES,
BuildJobAlreadyExistsError,
BuildJobDoesNotExistsError,
BuildJobError,
BuildJobResult,
RESULT_PHASES,
BuildStateInterface,
)
from buildman.jobutil.buildjob import BuildJob, BuildJobLoadException
from buildman.manager.executor import (
PopenExecutor,
EC2Executor,
KubernetesExecutor,
KubernetesPodmanExecutor,
PopenExecutor,
)
from buildman.orchestrator import (
orchestrator_from_config,
KeyEvent,
OrchestratorError,
OrchestratorConnectionError,
ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION,
KeyEvent,
OrchestratorConnectionError,
OrchestratorError,
orchestrator_from_config,
)
from app import instance_keys
from data import database
from data import database, model
from data.database import BUILD_PHASE
from data import model
from util import slash_join
from util.morecollections import AttrDict
logger = logging.getLogger(__name__)

View File

@ -9,25 +9,21 @@ import subprocess
import threading
import time
import uuid
from functools import partial, wraps, lru_cache
from functools import lru_cache, partial, wraps
import boto3
import botocore
import cachetools.func
import requests
from jinja2 import FileSystemLoader, Environment
from jinja2 import Environment, FileSystemLoader
from prometheus_client import Histogram
import release
from _init import ROOT_DIR, OVERRIDE_CONFIG_DIRECTORY
from _init import OVERRIDE_CONFIG_DIRECTORY, ROOT_DIR
from app import app
from buildman.container_cloud_config import CloudConfigContext
from buildman.server import SECURE_GRPC_SERVER_PORT
logger = logging.getLogger(__name__)

View File

@ -1,9 +1,8 @@
import logging
from buildman.orchestrator import orchestrator_from_config, OrchestratorError
from buildman.orchestrator import OrchestratorError, orchestrator_from_config
from util import slash_join
logger = logging.getLogger(__name__)

View File

@ -1,13 +1,11 @@
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from contextlib import ContextDecorator
import datetime
import json
import logging
import re
import time
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from contextlib import ContextDecorator
from enum import IntEnum, unique
import redis
@ -15,7 +13,6 @@ import redis
from util import slash_join
from util.expiresdict import ExpiresDict
logger = logging.getLogger(__name__)
ONE_DAY = 60 * 60 * 24

View File

@ -1,21 +1,18 @@
import logging
import grpc
import json
import logging
from concurrent import futures
from datetime import timedelta
from threading import Event
import grpc
from flask import Flask
from app import app
from buildman.buildmanagerservicer import BuildManagerServicer
from buildman.buildman_pb import buildman_pb2, buildman_pb2_grpc
from buildman.buildmanagerservicer import BuildManagerServicer
from buildman.jobutil.buildjob import BuildJob, BuildJobLoadException
from data import database, model
logger = logging.getLogger(__name__)

View File

@ -1,20 +1,19 @@
import asyncio
import unittest
import json
import unittest
import uuid
from mock import Mock, ANY
from mock import ANY, Mock
from six import iteritems
from buildman.asyncutil import AsyncWrapper
from buildman.component.buildcomponent import BuildComponent
from buildman.manager.ephemeral import EphemeralBuilderManager, REALM_PREFIX, JOB_PREFIX
from buildman.manager.ephemeral import JOB_PREFIX, REALM_PREFIX, EphemeralBuilderManager
from buildman.manager.executor import BuilderExecutor, ExecutorException
from buildman.orchestrator import KeyEvent, KeyChange
from buildman.orchestrator import KeyChange, KeyEvent
from buildman.server import BuildJobResult
from util import slash_join
BUILD_UUID = "deadbeef-dead-beef-dead-deadbeefdead"
REALM_ID = "1234-realm"

View File

@ -1,20 +1,19 @@
import pytest
import time
import uuid
from test.fixtures import *
import pytest
from app import app, instance_keys
from buildman.build_token import (
build_token,
verify_build_token,
ANONYMOUS_SUB,
BUILD_JOB_REGISTRATION_TYPE,
BUILD_JOB_TOKEN_TYPE,
InvalidBuildTokenException,
build_token,
verify_build_token,
)
from test.fixtures import *
from app import app, instance_keys
@pytest.mark.parametrize(
"token_type, expected_exception",

View File

@ -1,23 +1,22 @@
import time
import pytest
from random import randrange
from unittest.mock import patch, Mock
from test.fixtures import *
from unittest.mock import Mock, patch
import fakeredis
import pytest
from freezegun import freeze_time
from buildman.orchestrator import (
MemoryOrchestrator,
RedisOrchestrator,
REDIS_EXPIRED_SUFFIX,
REDIS_EXPIRING_SUFFIX,
KeyEvent,
KeyChange,
KeyEvent,
MemoryOrchestrator,
RedisOrchestrator,
)
from util import slash_join
from test.fixtures import *
@pytest.fixture()
def fake_redis():

View File

@ -1,14 +1,14 @@
import logging
import os
from abc import ABCMeta, abstractmethod
from jsonschema import validate
from six import add_metaclass
from endpoints.building import PreparedBuild
from buildtrigger.triggerutil import InvalidServiceException, get_trigger_config
from data import model
from buildtrigger.triggerutil import get_trigger_config, InvalidServiceException
from util.jsontemplate import apply_data_to_obj, JSONTemplateParseException
from endpoints.building import PreparedBuild
from util.jsontemplate import JSONTemplateParseException, apply_data_to_obj
logger = logging.getLogger(__name__)

View File

@ -10,16 +10,16 @@ from jsonschema import validate
from app import app, get_app_url
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.triggerutil import (
InvalidPayloadException,
RepositoryReadException,
SkipRequestException,
TriggerActivationException,
TriggerDeactivationException,
TriggerStartException,
InvalidPayloadException,
TriggerProviderException,
SkipRequestException,
TriggerStartException,
determine_build_ref,
raise_if_skipped_build,
find_matching_branches,
raise_if_skipped_build,
)
from util.dict_wrappers import JSONPathDict, SafeDictSetter
from util.security.ssh import generate_ssh_keypair

View File

@ -1,38 +1,27 @@
import logging
import json
import logging
from jsonschema import validate, ValidationError
from jsonschema import ValidationError, validate
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.bitbuckethandler import BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA as bb_schema
from buildtrigger.bitbuckethandler import get_transformed_webhook_payload as bb_payload
from buildtrigger.githubhandler import GITHUB_WEBHOOK_PAYLOAD_SCHEMA as gh_schema
from buildtrigger.githubhandler import get_transformed_webhook_payload as gh_payload
from buildtrigger.gitlabhandler import GITLAB_WEBHOOK_PAYLOAD_SCHEMA as gl_schema
from buildtrigger.gitlabhandler import get_transformed_webhook_payload as gl_payload
from buildtrigger.triggerutil import (
InvalidPayloadException,
RepositoryReadException,
SkipRequestException,
TriggerActivationException,
TriggerStartException,
ValidationRequestException,
InvalidPayloadException,
SkipRequestException,
raise_if_skipped_build,
find_matching_branches,
raise_if_skipped_build,
)
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.bitbuckethandler import (
BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA as bb_schema,
get_transformed_webhook_payload as bb_payload,
)
from buildtrigger.githubhandler import (
GITHUB_WEBHOOK_PAYLOAD_SCHEMA as gh_schema,
get_transformed_webhook_payload as gh_payload,
)
from buildtrigger.gitlabhandler import (
GITLAB_WEBHOOK_PAYLOAD_SCHEMA as gl_schema,
get_transformed_webhook_payload as gl_payload,
)
from util.security.ssh import generate_ssh_keypair
logger = logging.getLogger(__name__)
# Defines an ordered set of tuples of the schemas and associated transformation functions

View File

@ -1,39 +1,33 @@
import base64
import logging
import os.path
import base64
import re
from calendar import timegm
from functools import wraps
from ssl import SSLError
from github import (
Github,
UnknownObjectException,
GithubException,
BadCredentialsException as GitHubBadCredentialsException,
)
from github import BadCredentialsException as GitHubBadCredentialsException
from github import Github, GithubException, UnknownObjectException
from jsonschema import validate
from app import app, github_trigger
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.triggerutil import (
EmptyRepositoryException,
InvalidPayloadException,
RepositoryReadException,
SkipRequestException,
TriggerActivationException,
TriggerDeactivationException,
TriggerStartException,
EmptyRepositoryException,
ValidationRequestException,
SkipRequestException,
InvalidPayloadException,
determine_build_ref,
raise_if_skipped_build,
find_matching_branches,
raise_if_skipped_build,
)
from buildtrigger.basehandler import BuildTriggerHandler
from endpoints.exception import ExternalServiceError
from util.security.ssh import generate_ssh_keypair
from util.dict_wrappers import JSONPathDict, SafeDictSetter
from util.security.ssh import generate_ssh_keypair
logger = logging.getLogger(__name__)

View File

@ -1,32 +1,30 @@
import os.path
import logging
import os.path
from calendar import timegm
from functools import wraps
import dateutil.parser
import gitlab
import requests
from jsonschema import validate
from app import app, gitlab_trigger
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.triggerutil import (
InvalidPayloadException,
RepositoryReadException,
SkipRequestException,
TriggerActivationException,
TriggerAuthException,
TriggerDeactivationException,
TriggerStartException,
SkipRequestException,
InvalidPayloadException,
TriggerAuthException,
determine_build_ref,
raise_if_skipped_build,
find_matching_branches,
raise_if_skipped_build,
)
from buildtrigger.basehandler import BuildTriggerHandler
from endpoints.exception import ExternalServiceError
from util.security.ssh import generate_ssh_keypair
from util.dict_wrappers import JSONPathDict, SafeDictSetter
from util.security.ssh import generate_ssh_keypair
logger = logging.getLogger(__name__)

View File

@ -1,4 +1,5 @@
from datetime import datetime
from mock import Mock
from buildtrigger.bitbuckethandler import BitbucketBuildTrigger

View File

@ -1,7 +1,7 @@
from datetime import datetime
from mock import Mock
from github import GithubException
from mock import Mock
from buildtrigger.githubhandler import GithubBuildTrigger
from util.morecollections import AttrDict

View File

@ -1,11 +1,9 @@
import base64
import json
from contextlib import contextmanager
import gitlab
from httmock import urlmatch, HTTMock
from httmock import HTTMock, urlmatch
from buildtrigger.gitlabhandler import GitLabBuildTrigger
from util.morecollections import AttrDict

View File

@ -1,11 +1,12 @@
import json
import pytest
from buildtrigger.test.bitbucketmock import get_bitbucket_trigger
from buildtrigger.triggerutil import (
InvalidPayloadException,
SkipRequestException,
ValidationRequestException,
InvalidPayloadException,
)
from endpoints.building import PreparedBuild
from util.morecollections import AttrDict

View File

@ -2,9 +2,9 @@ import copy
import pytest
from buildtrigger.triggerutil import TriggerStartException
from buildtrigger.test.bitbucketmock import get_bitbucket_trigger
from buildtrigger.test.githubmock import get_github_trigger, GithubBuildTrigger
from buildtrigger.test.githubmock import GithubBuildTrigger, get_github_trigger
from buildtrigger.triggerutil import TriggerStartException
from endpoints.building import PreparedBuild

View File

@ -1,11 +1,12 @@
import json
import pytest
from buildtrigger.test.githubmock import get_github_trigger
from buildtrigger.triggerutil import (
InvalidPayloadException,
SkipRequestException,
ValidationRequestException,
InvalidPayloadException,
)
from endpoints.building import PreparedBuild
from util.morecollections import AttrDict

View File

@ -1,14 +1,14 @@
import json
import pytest
import pytest
from mock import Mock
from buildtrigger.test.gitlabmock import get_gitlab_trigger
from buildtrigger.triggerutil import (
SkipRequestException,
ValidationRequestException,
InvalidPayloadException,
SkipRequestException,
TriggerStartException,
ValidationRequestException,
)
from endpoints.building import PreparedBuild
from util.morecollections import AttrDict

View File

@ -1,13 +1,12 @@
import json
import pytest
from jsonschema import validate
from buildtrigger.customhandler import custom_trigger_payload
from buildtrigger.basehandler import METADATA_SCHEMA
from buildtrigger.bitbuckethandler import get_transformed_webhook_payload as bb_webhook
from buildtrigger.bitbuckethandler import get_transformed_commit_info as bb_commit
from buildtrigger.bitbuckethandler import get_transformed_webhook_payload as bb_webhook
from buildtrigger.customhandler import custom_trigger_payload
from buildtrigger.githubhandler import get_transformed_webhook_payload as gh_webhook
from buildtrigger.gitlabhandler import get_transformed_webhook_payload as gl_webhook
from buildtrigger.triggerutil import SkipRequestException

View File

@ -1,5 +1,5 @@
import json
import io
import json
import logging
import re

View File

@ -1,13 +1,12 @@
import sys
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import logging
from util.log import logfile_path
from util.workers import get_worker_count, get_worker_connections_count
from util.workers import get_worker_connections_count, get_worker_count
logconfig = logfile_path(debug=True)
bind = "0.0.0.0:5000"

View File

@ -3,16 +3,15 @@ from gevent import monkey
monkey.patch_all()
import sys
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import logging
from util.log import logfile_path
from util.workers import get_worker_count, get_worker_connections_count
from util.workers import get_worker_connections_count, get_worker_count
logconfig = logfile_path(debug=False)
bind = "unix:/tmp/gunicorn_registry.sock"

View File

@ -3,16 +3,15 @@ from gevent import monkey
monkey.patch_all()
import sys
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import logging
from util.log import logfile_path
from util.workers import get_worker_count, get_worker_connections_count
from util.workers import get_worker_connections_count, get_worker_count
logconfig = logfile_path(debug=False)
bind = "unix:/tmp/gunicorn_secscan.sock"

View File

@ -3,16 +3,15 @@ from gevent import monkey
monkey.patch_all()
import sys
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import logging
from util.log import logfile_path
from util.workers import get_worker_count, get_worker_connections_count
from util.workers import get_worker_connections_count, get_worker_count
logconfig = logfile_path(debug=False)

View File

@ -1,7 +1,7 @@
import os
from app import app
from active_migration import ActiveDataMigration
from app import app
def current_migration():

View File

@ -1,8 +1,8 @@
import os
import os.path
import yaml
import jinja2
import yaml
QUAYPATH = os.getenv("QUAYPATH", ".")
QUAYDIR = os.getenv("QUAYDIR", "/")

View File

@ -1,7 +1,8 @@
from typing import Union, List
import os
import os.path
import sys
from typing import List, Union
import jinja2
QUAYPATH = os.getenv("QUAYPATH", ".")

View File

@ -1,20 +1,18 @@
from contextlib import contextmanager
import os
import tempfile
from six import iteritems
from supervisor.options import ServerOptions
from contextlib import contextmanager
import jinja2
import pytest
from six import iteritems
from supervisor.options import ServerOptions
from ..supervisord_conf_create import (
registry_services,
QUAY_OVERRIDE_SERVICES,
QUAY_SERVICES,
limit_services,
override_services,
QUAY_SERVICES,
QUAY_OVERRIDE_SERVICES,
registry_services,
)

View File

@ -2,7 +2,6 @@ import logging
from data.userfiles import DelegateUserfiles
JSON_MIMETYPE = "application/json"

View File

@ -1,10 +1,10 @@
import random
import string
from typing import Any, Dict
import stripe
from datetime import datetime, timedelta
from calendar import timegm
from datetime import datetime, timedelta
from typing import Any, Dict
import stripe
from util.morecollections import AttrDict

View File

@ -1,12 +1,11 @@
import redis
import json
import time
from contextlib import closing
from util.dynamic import import_class
from datetime import timedelta
import redis
from util.dynamic import import_class
ONE_DAY = timedelta(days=1)
SEVEN_DAYS = timedelta(days=7)

View File

@ -1,11 +1,11 @@
from data.cache.redis_cache import redis_cache_from_config
from data.cache.impl import (
NoopDataModelCache,
DisconnectWrapper,
InMemoryDataModelCache,
MemcachedModelCache,
NoopDataModelCache,
RedisDataModelCache,
DisconnectWrapper,
)
from data.cache.redis_cache import redis_cache_from_config
def get_model_cache(config):

10
data/cache/impl.py vendored
View File

@ -1,17 +1,17 @@
import logging
import json
import logging
import os
from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
from datetime import datetime
from pymemcache.client.base import PooledClient
from prometheus_client import Counter
from redis import StrictRedis, RedisError
from pymemcache.client.base import PooledClient
from redis import RedisError, StrictRedis
from six import add_metaclass
from data.database import CloseForLongOperation
from data.cache.redis_cache import redis_cache_from_config
from data.database import CloseForLongOperation
from util.expiresdict import ExpiresDict
from util.timedeltastring import convert_to_timedelta
from util.workers import get_worker_connections_count

View File

@ -1,4 +1,4 @@
from redis import StrictRedis, RedisError
from redis import RedisError, StrictRedis
from rediscluster import RedisCluster

View File

@ -1,22 +1,22 @@
from typing import Any, Dict
from unittest.mock import MagicMock, patch
import pytest
from typing import Dict, Any
from unittest.mock import patch, MagicMock
from rediscluster.nodemanager import NodeManager
from data.cache import (
InMemoryDataModelCache,
NoopDataModelCache,
MemcachedModelCache,
NoopDataModelCache,
RedisDataModelCache,
)
from data.cache.cache_key import CacheKey
from data.cache.redis_cache import (
redis_cache_from_config,
REDIS_DRIVERS,
ReadEndpointSupportedRedis,
redis_cache_from_config,
)
DATA: Dict[str, Any] = {}
TEST_CACHE_CONFIG = {

View File

@ -1,62 +1,64 @@
# pylint: disable=old-style-class,no-init
from __future__ import annotations
import inspect
import logging
import os
import string
import sys
import time
import uuid
import os
from contextlib import contextmanager
from collections import defaultdict, namedtuple
from contextlib import contextmanager
from datetime import datetime
from enum import Enum, IntEnum, unique
from random import SystemRandom
import rehash
import toposort
from enum import IntEnum, Enum, unique
from cachetools.func import lru_cache
from peewee import *
from peewee import __exception_wrapper__, Function # type: ignore
from peewee import Function, __exception_wrapper__ # type: ignore
from playhouse.pool import (
PooledDatabase,
PooledMySQLDatabase,
PooledPostgresqlDatabase,
PooledSqliteDatabase,
)
from sqlalchemy.engine.url import make_url
import rehash
from cachetools.func import lru_cache
from data.fields import (
ResumableSHA256Field,
ResumableSHA1Field,
JSONField,
Base64BinaryField,
FullIndexedTextField,
FullIndexedCharField,
EnumField as ClientEnumField,
EncryptedTextField,
EncryptedCharField,
CredentialField,
)
from data.decorators import deprecated_model
from data.text import match_mysql, match_like
from data.encryption import FieldEncrypter
from data.readreplica import ReadReplicaSupportedModel, ReadOnlyConfig, disallow_replica_use
from data.estimate import mysql_estimate_row_count, normal_row_count
from util.names import urn_generator
from util.metrics.prometheus import (
db_pooled_connections_in_use,
db_pooled_connections_available,
db_connect_calls,
db_close_calls,
from data.fields import (
Base64BinaryField,
CredentialField,
EncryptedCharField,
EncryptedTextField,
)
from data.fields import EnumField as ClientEnumField
from data.fields import (
FullIndexedCharField,
FullIndexedTextField,
JSONField,
ResumableSHA1Field,
ResumableSHA256Field,
)
from data.readreplica import (
ReadOnlyConfig,
ReadReplicaSupportedModel,
disallow_replica_use,
)
from data.text import match_like, match_mysql
from util.metrics.prometheus import (
db_close_calls,
db_connect_calls,
db_pooled_connections_available,
db_pooled_connections_in_use,
)
from util.names import urn_generator
from util.validation import validate_postgres_precondition
logger = logging.getLogger(__name__)
DEFAULT_DB_CONNECT_TIMEOUT = 10 # seconds
@ -711,7 +713,6 @@ class User(BaseModel):
# are cleaned up directly in the model.
skip_transitive_deletes = (
{
Image,
Repository,
Team,
RepositoryBuild,
@ -723,17 +724,13 @@ class User(BaseModel):
Star,
RepositoryAuthorizedEmail,
TeamMember,
RepositoryTag,
PermissionPrototype,
DerivedStorageForImage,
TagManifest,
AccessToken,
OAuthAccessToken,
BlobUpload,
RepositoryNotification,
OAuthAuthorizationCode,
RepositoryActionCount,
TagManifestLabel,
TeamSync,
RepositorySearchScore,
DeletedNamespace,
@ -743,7 +740,6 @@ class User(BaseModel):
ManifestSecurityStatus,
RepoMirrorConfig,
UploadedBlob,
RepositorySize,
QuotaRepositorySize,
QuotaNamespaceSize,
UserOrganizationQuota,
@ -752,7 +748,6 @@ class User(BaseModel):
}
| appr_classes
| v22_classes
| transition_classes
)
delete_instance_filtered(self, User, delete_nullable, skip_transitive_deletes)
@ -958,28 +953,21 @@ class Repository(BaseModel):
# are cleaned up directly
skip_transitive_deletes = (
{
RepositoryTag,
RepositoryBuild,
RepositoryBuildTrigger,
BlobUpload,
Image,
TagManifest,
TagManifestLabel,
Label,
DerivedStorageForImage,
RepositorySearchScore,
RepoMirrorConfig,
RepoMirrorRule,
DeletedRepository,
ManifestSecurityStatus,
UploadedBlob,
RepositorySize,
QuotaNamespaceSize,
QuotaRepositorySize,
}
| appr_classes
| v22_classes
| transition_classes
)
delete_instance_filtered(self, Repository, delete_nullable, skip_transitive_deletes)
@ -991,13 +979,6 @@ class RepositorySearchScore(BaseModel):
last_updated = DateTimeField(null=True)
@deprecated_model
class RepositorySize(BaseModel):
repository = ForeignKeyField(Repository, unique=True)
repository_id: int
size_bytes = BigIntegerField()
class QuotaNamespaceSize(BaseModel):
namespace_user = ForeignKeyField(User, unique=True)
size_bytes = BigIntegerField(null=False, default=0)
@ -1189,84 +1170,6 @@ class UserRegion(BaseModel):
indexes = ((("user", "location"), True),)
@deprecated_model
class Image(BaseModel):
# This class is intentionally denormalized. Even though images are supposed
# to be globally unique we can't treat them as such for permissions and
# security reasons. So rather than Repository <-> Image being many to many
# each image now belongs to exactly one repository.
docker_image_id = CharField(index=True)
repository = ForeignKeyField(Repository)
# '/' separated list of ancestory ids, e.g. /1/2/6/7/10/
ancestors = CharField(index=True, default="/", max_length=64535, null=True)
storage = ForeignKeyField(ImageStorage, null=True)
created = DateTimeField(null=True)
comment = TextField(null=True)
command = TextField(null=True)
aggregate_size = BigIntegerField(null=True)
v1_json_metadata = TextField(null=True)
v1_checksum = CharField(null=True)
security_indexed = BooleanField(default=False, index=True)
security_indexed_engine = IntegerField(default=IMAGE_NOT_SCANNED_ENGINE_VERSION, index=True)
# We use a proxy here instead of 'self' in order to disable the foreign key constraint
parent = DeferredForeignKey("Image", null=True, backref="children")
class Meta:
database = db
read_only_config = read_only_config
indexes = (
# we don't really want duplicates
(("repository", "docker_image_id"), True),
(("security_indexed_engine", "security_indexed"), False),
)
def ancestor_id_list(self):
"""
Returns an integer list of ancestor ids, ordered chronologically from root to direct parent.
"""
return list(map(int, self.ancestors.split("/")[1:-1]))
@deprecated_model
class DerivedStorageForImage(BaseModel):
source_image = ForeignKeyField(Image)
derivative = ForeignKeyField(ImageStorage)
transformation = ForeignKeyField(ImageStorageTransformation)
uniqueness_hash = CharField(null=True)
class Meta:
database = db
read_only_config = read_only_config
indexes = ((("source_image", "transformation", "uniqueness_hash"), True),)
@deprecated_model
class RepositoryTag(BaseModel):
name = CharField()
image = ForeignKeyField(Image)
repository = ForeignKeyField(Repository)
lifetime_start_ts = IntegerField(default=get_epoch_timestamp)
lifetime_end_ts = IntegerField(null=True, index=True)
hidden = BooleanField(default=False)
reversion = BooleanField(default=False)
class Meta:
database = db
read_only_config = read_only_config
indexes = (
(("repository", "name"), False),
(("repository", "lifetime_start_ts"), False),
(("repository", "lifetime_end_ts"), False),
# This unique index prevents deadlocks when concurrently moving and deleting tags
(("repository", "name", "lifetime_end_ts"), True),
)
class BUILD_PHASE(object):
"""
Build phases enum.
@ -1596,21 +1499,6 @@ class QuayRelease(BaseModel):
)
@deprecated_model
class TorrentInfo(BaseModel):
storage = ForeignKeyField(ImageStorage)
piece_length = IntegerField()
pieces = Base64BinaryField()
class Meta:
database = db
read_only_config = read_only_config
indexes = (
# we may want to compute the piece hashes multiple times with different piece lengths
(("storage", "piece_length"), True),
)
class ServiceKeyApprovalType(Enum):
SUPERUSER = "Super User API"
KEY_ROTATION = "Key Rotation"
@ -1939,64 +1827,6 @@ class ManifestBlob(BaseModel):
indexes = ((("manifest", "blob"), True),)
@deprecated_model
class ManifestLegacyImage(BaseModel):
"""
For V1-compatible manifests only, this table maps from the manifest to its associated Docker
image.
"""
repository = ForeignKeyField(Repository, index=True)
manifest = ForeignKeyField(Manifest, unique=True)
image = ForeignKeyField(Image)
@deprecated_model
class TagManifest(BaseModel):
tag = ForeignKeyField(RepositoryTag, unique=True)
digest = CharField(index=True)
json_data = TextField()
@deprecated_model
class TagManifestToManifest(BaseModel):
tag_manifest = ForeignKeyField(TagManifest, index=True, unique=True)
manifest = ForeignKeyField(Manifest, index=True)
broken = BooleanField(index=True, default=False)
@deprecated_model
class TagManifestLabel(BaseModel):
repository = ForeignKeyField(Repository, index=True)
annotated = ForeignKeyField(TagManifest, index=True)
label = ForeignKeyField(Label)
class Meta:
database = db
read_only_config = read_only_config
indexes = ((("annotated", "label"), True),)
@deprecated_model
class TagManifestLabelMap(BaseModel):
tag_manifest = ForeignKeyField(TagManifest, index=True)
manifest = ForeignKeyField(Manifest, null=True, index=True)
label = ForeignKeyField(Label, index=True)
tag_manifest_label = ForeignKeyField(TagManifestLabel, index=True)
manifest_label = ForeignKeyField(ManifestLabel, null=True, index=True)
broken_manifest = BooleanField(index=True, default=False)
@deprecated_model
class TagToRepositoryTag(BaseModel):
repository = ForeignKeyField(Repository, index=True)
tag = ForeignKeyField(Tag, index=True, unique=True)
repository_tag = ForeignKeyField(RepositoryTag, index=True, unique=True)
@unique
class RepoMirrorRuleType(IntEnum):
"""
@ -2180,10 +2010,7 @@ appr_classes = set(
ApprBlobPlacement,
]
)
v22_classes = set(
[Manifest, ManifestLabel, ManifestBlob, ManifestLegacyImage, TagKind, ManifestChild, Tag]
)
transition_classes = set([TagManifestToManifest, TagManifestLabelMap, TagToRepositoryTag])
v22_classes = set([Manifest, ManifestLabel, ManifestBlob, TagKind, ManifestChild, Tag])
is_model = lambda x: inspect.isclass(x) and issubclass(x, BaseModel) and x is not BaseModel
all_models = [model[1] for model in inspect.getmembers(sys.modules[__name__], is_model)]

View File

@ -1,8 +1,8 @@
import os
import logging
import base64
import logging
import os
from collections import namedtuple
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
from util.security.secret import convert_secret_key

View File

@ -1,13 +1,12 @@
import base64
import json
import pickle
import string
import json
from random import SystemRandom
import bcrypt
from peewee import CharField, SmallIntegerField, TextField
from peewee import TextField, CharField, SmallIntegerField
from data.text import prefix_search
from util.bytes import Bytes

View File

@ -1,9 +1,9 @@
import logging
from data.logs_model.combined_model import CombinedLogsModel
from data.logs_model.document_logs_model import DocumentLogsModel
from data.logs_model.splunk_logs_model import SplunkLogsModel
from data.logs_model.table_logs_model import TableLogsModel
from data.logs_model.document_logs_model import DocumentLogsModel
from data.logs_model.combined_model import CombinedLogsModel
logger = logging.getLogger(__name__)

View File

@ -1,5 +1,5 @@
import logging
import itertools
import logging
from data.logs_model.datatypes import AggregatedLogCount, LogEntriesPage
from data.logs_model.interface import ActionLogsDataInterface

View File

@ -1,5 +1,4 @@
import json
from calendar import timegm
from collections import namedtuple
from email.utils import formatdate

View File

@ -3,38 +3,38 @@
import json
import logging
import uuid
from time import time
from datetime import timedelta, datetime, date
from dateutil.parser import parse as parse_datetime
from abc import ABCMeta, abstractmethod
from six import add_metaclass
from datetime import date, datetime, timedelta
from time import time
from dateutil.parser import parse as parse_datetime
from elasticsearch.exceptions import ConnectionTimeout, NotFoundError
from six import add_metaclass
from data import model
from data.database import CloseForLongOperation
from data.model import config
from data.model.log import (
_json_serialize,
ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING,
DataModelException,
)
from data.logs_model.datatypes import AggregatedLogCount, Log, LogEntriesPage
from data.logs_model.elastic_logs import LogEntry, configure_es
from data.logs_model.datatypes import Log, AggregatedLogCount, LogEntriesPage
from data.logs_model.interface import (
ActionLogsDataInterface,
LogRotationContextInterface,
LogsIterationTimeout,
)
from data.logs_model.shared import SharedModel, epoch_ms, InvalidLogsDateRangeError
from data.logs_model.logs_producer import LogProducerProxy, LogSendException
from data.logs_model.logs_producer.elasticsearch_logs_producer import (
ElasticsearchLogsProducer,
)
from data.logs_model.logs_producer.kafka_logs_producer import KafkaLogsProducer
from data.logs_model.logs_producer.elasticsearch_logs_producer import ElasticsearchLogsProducer
from data.logs_model.logs_producer.kinesis_stream_logs_producer import KinesisStreamLogsProducer
from data.logs_model.logs_producer.kinesis_stream_logs_producer import (
KinesisStreamLogsProducer,
)
from data.logs_model.shared import InvalidLogsDateRangeError, SharedModel, epoch_ms
from data.model import config
from data.model.log import (
ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING,
DataModelException,
_json_serialize,
)
logger = logging.getLogger(__name__)

View File

@ -1,15 +1,13 @@
import os
import logging
import os
import re
from datetime import datetime, timedelta
from requests_aws4auth import AWS4Auth
from elasticsearch import RequestsHttpConnection
from elasticsearch.exceptions import NotFoundError, AuthorizationException
from elasticsearch_dsl import Index, Document, Integer, Date, Text, Ip, Keyword, Object
from elasticsearch.exceptions import AuthorizationException, NotFoundError
from elasticsearch_dsl import Date, Document, Index, Integer, Ip, Keyword, Object, Text
from elasticsearch_dsl.connections import connections
from requests_aws4auth import AWS4Auth
logger = logging.getLogger(__name__)

View File

@ -1,13 +1,13 @@
import logging
import json
import logging
from collections import namedtuple
from datetime import datetime
from tzlocal import get_localzone
from dateutil.relativedelta import relativedelta
from tzlocal import get_localzone
from data import model
from data.logs_model.datatypes import AggregatedLogCount, LogEntriesPage, Log
from data.logs_model.datatypes import AggregatedLogCount, Log, LogEntriesPage
from data.logs_model.interface import (
ActionLogsDataInterface,
LogRotationContextInterface,

View File

@ -1,4 +1,5 @@
from abc import ABCMeta, abstractmethod
from six import add_metaclass

View File

@ -1,6 +1,5 @@
import logging
logger = logging.getLogger(__name__)

View File

@ -2,9 +2,8 @@ import logging
from elasticsearch.exceptions import ElasticsearchException
from data.logs_model.logs_producer.interface import LogProducerInterface
from data.logs_model.logs_producer import LogSendException
from data.logs_model.logs_producer.interface import LogProducerInterface
logger = logging.getLogger(__name__)

View File

@ -1,4 +1,5 @@
from abc import ABCMeta, abstractmethod
from six import add_metaclass

View File

@ -1,13 +1,12 @@
import logging
from kafka.errors import KafkaError, KafkaTimeoutError
from kafka import KafkaProducer
from kafka.errors import KafkaError, KafkaTimeoutError
from data.logs_model.shared import epoch_ms
from data.logs_model.logs_producer import LogSendException
from data.logs_model.logs_producer.interface import LogProducerInterface
from data.logs_model.logs_producer.util import logs_json_serializer
from data.logs_model.logs_producer import LogSendException
from data.logs_model.shared import epoch_ms
logger = logging.getLogger(__name__)

View File

@ -1,15 +1,14 @@
import logging
import hashlib
import logging
import random
import boto3
from botocore.exceptions import ClientError
from botocore.client import Config
from botocore.exceptions import ClientError
from data.logs_model.logs_producer import LogSendException
from data.logs_model.logs_producer.interface import LogProducerInterface
from data.logs_model.logs_producer.util import logs_json_serializer
from data.logs_model.logs_producer import LogSendException
logger = logging.getLogger(__name__)

View File

@ -1,13 +1,13 @@
# -*- coding: utf-8 -*-
import logging
import json
import logging
from datetime import datetime
import pytest
from data.logs_model.logs_producer.util import logs_json_serializer
from data.logs_model.elastic_logs import LogEntry
from data.logs_model.logs_producer.util import logs_json_serializer
logger = logging.getLogger(__name__)

View File

@ -1,6 +1,5 @@
import uuid
import json
import uuid
from calendar import timegm
from data import model

View File

@ -1,6 +1,5 @@
import json
import logging
from datetime import datetime
from data import model

View File

@ -1,23 +1,22 @@
# pylint: disable=protected-access
import logging
from datetime import datetime, timedelta
from tzlocal import get_localzone
from dateutil.relativedelta import relativedelta
from tzlocal import get_localzone
from data import model
from data.model import config
from data.database import LogEntry, LogEntry2, LogEntry3, BaseModel, UseThenDisconnect
from data.database import BaseModel, LogEntry, LogEntry2, LogEntry3, UseThenDisconnect
from data.logs_model.datatypes import AggregatedLogCount, Log, LogEntriesPage
from data.logs_model.interface import (
ActionLogsDataInterface,
LogsIterationTimeout,
LogRotationContextInterface,
LogsIterationTimeout,
)
from data.logs_model.datatypes import Log, AggregatedLogCount, LogEntriesPage
from data.logs_model.shared import SharedModel, InvalidLogsDateRangeError
from data.model.log import get_stale_logs, get_stale_logs_start_id, delete_stale_logs
from data.logs_model.shared import InvalidLogsDateRangeError, SharedModel
from data.model import config
from data.model.log import delete_stale_logs, get_stale_logs, get_stale_logs_start_id
from data.readreplica import ReadOnlyModeException
logger = logging.getLogger(__name__)

View File

@ -1,14 +1,12 @@
import fnmatch
import json
import uuid
import fnmatch
from collections import defaultdict
from contextlib import contextmanager
from datetime import datetime
import dateutil.parser
from httmock import urlmatch, HTTMock
from httmock import HTTMock, urlmatch
FAKE_ES_HOST = "fakees"

View File

@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
import json
from datetime import datetime
from dateutil.parser import parse
from data.logs_model.datatypes import LogEntriesPage, Log, AggregatedLogCount
from data.logs_model.datatypes import AggregatedLogCount, Log, LogEntriesPage
def _status(d, code=200):

View File

@ -1,13 +1,12 @@
import pytest
from datetime import date, datetime, timedelta
from test.fixtures import *
import pytest
from freezegun import freeze_time
from data import model
from data.logs_model.inmemory_model import InMemoryModel
from data.logs_model.combined_model import CombinedLogsModel
from test.fixtures import *
from data.logs_model.inmemory_model import InMemoryModel
@pytest.fixture()

View File

@ -6,14 +6,18 @@ import json
from datetime import datetime, timedelta
import pytest
from mock import patch, Mock
from dateutil.parser import parse
from httmock import HTTMock, urlmatch
from mock import Mock, patch
from httmock import urlmatch, HTTMock
from data.logs_model import LogsModelProxy, configure
from data.logs_model.elastic_logs import (
INDEX_DATE_FORMAT,
INDEX_NAME_PREFIX,
ElasticsearchLogs,
)
from data.model.log import _json_serialize
from data.logs_model.elastic_logs import ElasticsearchLogs, INDEX_NAME_PREFIX, INDEX_DATE_FORMAT
from data.logs_model import configure, LogsModelProxy
from .mock_elasticsearch import *
FAKE_ES_HOST = "fakees"

View File

@ -1,20 +1,25 @@
import os
import pytest
from datetime import datetime, timedelta, date
from unittest.mock import patch
from data.logs_model.datatypes import AggregatedLogCount
from data.logs_model.table_logs_model import TableLogsModel
from data.logs_model.combined_model import CombinedLogsModel
from data.logs_model.inmemory_model import InMemoryModel
from data.logs_model.combined_model import _merge_aggregated_log_counts
from data.logs_model.document_logs_model import _date_range_in_single_index, DocumentLogsModel
from data.logs_model.interface import LogsIterationTimeout
from data.logs_model.test.fake_elasticsearch import FAKE_ES_HOST, fake_elasticsearch
from data.database import LogEntry, LogEntry2, LogEntry3, LogEntryKind
from data import model
from datetime import date, datetime, timedelta
from test.fixtures import *
from unittest.mock import patch
import pytest
from data import model
from data.database import LogEntry, LogEntry2, LogEntry3, LogEntryKind
from data.logs_model.combined_model import (
CombinedLogsModel,
_merge_aggregated_log_counts,
)
from data.logs_model.datatypes import AggregatedLogCount
from data.logs_model.document_logs_model import (
DocumentLogsModel,
_date_range_in_single_index,
)
from data.logs_model.inmemory_model import InMemoryModel
from data.logs_model.interface import LogsIterationTimeout
from data.logs_model.table_logs_model import TableLogsModel
from data.logs_model.test.fake_elasticsearch import FAKE_ES_HOST, fake_elasticsearch
@pytest.fixture()

View File

@ -1,21 +1,20 @@
import logging
import pytest
from dateutil.parser import parse
from mock import patch, Mock
import botocore
import pytest
from dateutil.parser import parse
from mock import Mock, patch
from data.logs_model import configure
from .mock_elasticsearch import *
from .test_elasticsearch import (
app_config,
logs_model_config,
logs_model,
mock_elasticsearch,
logs_model_config,
mock_db_model,
mock_elasticsearch,
)
from .mock_elasticsearch import *
logger = logging.getLogger(__name__)

View File

@ -3,16 +3,17 @@ import ssl
import tempfile
from ssl import SSLError
from tempfile import NamedTemporaryFile
from test.fixtures import *
from unittest.mock import MagicMock, call
import pytest
from dateutil.parser import parse
from mock import patch, Mock
from mock import Mock, patch
from .test_elasticsearch import logs_model, mock_db_model
from data.logs_model import configure
from test.fixtures import *
from ..logs_producer.splunk_logs_producer import SplunkLogsProducer
from .test_elasticsearch import logs_model, mock_db_model
FAKE_SPLUNK_HOST = "fakesplunk"
FAKE_SPLUNK_PORT = 443

View File

@ -7,14 +7,12 @@ which defines the parameters for synthesizing a Kubernetes job to connect to and
migrate a relational database.
"""
import yaml
import logging
from functools import partial, wraps
from alembic.operations import ops
import sqlalchemy as sa
import yaml
from alembic.operations import ops
logger = logging.getLogger(__name__)

View File

@ -1,23 +1,22 @@
import logging
import logging.config
import os
from urllib.parse import unquote
from alembic import context, op as alembic_op
from alembic import context
from alembic import op as alembic_op
from alembic.script.revision import ResolutionError
from alembic.util import CommandError
from peewee import SqliteDatabase
from sqlalchemy import create_engine
from app import app
from data.database import all_models, db, LEGACY_INDEX_MAP
from data.database import LEGACY_INDEX_MAP, all_models, db
from data.migrations.tester import NoopTester, PopulateTestDataTester
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from release import GIT_HEAD, REGION, SERVICE
from util.morecollections import AttrDict
logger = logging.getLogger(__name__)

Some files were not shown because too many files have changed in this diff Show More