mirror of
https://github.com/quay/quay.git
synced 2025-07-31 18:44:32 +03:00
secscan: deprecate support for Clair V2 (PROJQUAY-2837) (#951)
Removes read support for Clair V2, along with the need to package jwtproxy with Quay. TODO: Drop deprecate image api + image table, remove image data model.
This commit is contained in:
committed by
GitHub
parent
e6c6ecd47b
commit
5471d3cbcb
10
Dockerfile
10
Dockerfile
@ -75,15 +75,6 @@ RUN set -ex\
|
|||||||
; npm run --quiet build\
|
; npm run --quiet build\
|
||||||
;
|
;
|
||||||
|
|
||||||
# Jwtproxy grabs jwtproxy.
|
|
||||||
FROM registry.access.redhat.com/ubi8/ubi:latest AS jwtproxy
|
|
||||||
ENV OS=linux ARCH=amd64
|
|
||||||
ARG JWTPROXY_VERSION=0.0.3
|
|
||||||
RUN set -ex\
|
|
||||||
; curl -fsSL -o /usr/local/bin/jwtproxy "https://github.com/coreos/jwtproxy/releases/download/v${JWTPROXY_VERSION}/jwtproxy-${OS}-${ARCH}"\
|
|
||||||
; chmod +x /usr/local/bin/jwtproxy\
|
|
||||||
;
|
|
||||||
|
|
||||||
# Pushgateway grabs pushgateway.
|
# Pushgateway grabs pushgateway.
|
||||||
FROM registry.access.redhat.com/ubi8/ubi:latest AS pushgateway
|
FROM registry.access.redhat.com/ubi8/ubi:latest AS pushgateway
|
||||||
ENV OS=linux ARCH=amd64
|
ENV OS=linux ARCH=amd64
|
||||||
@ -138,7 +129,6 @@ RUN set -ex\
|
|||||||
WORKDIR $QUAYDIR
|
WORKDIR $QUAYDIR
|
||||||
RUN mkdir ${QUAYDIR}/config_app
|
RUN mkdir ${QUAYDIR}/config_app
|
||||||
# Ordered from least changing to most changing.
|
# Ordered from least changing to most changing.
|
||||||
COPY --from=jwtproxy /usr/local/bin/jwtproxy /usr/local/bin/jwtproxy
|
|
||||||
COPY --from=pushgateway /usr/local/bin/pushgateway /usr/local/bin/pushgateway
|
COPY --from=pushgateway /usr/local/bin/pushgateway /usr/local/bin/pushgateway
|
||||||
COPY --from=build-python /app /app
|
COPY --from=build-python /app /app
|
||||||
COPY --from=config-tool /opt/app-root/src/go/bin/config-tool /bin
|
COPY --from=config-tool /opt/app-root/src/go/bin/config-tool /bin
|
||||||
|
@ -31,9 +31,6 @@ RUN cd source/config-tool && \
|
|||||||
go mod vendor && \
|
go mod vendor && \
|
||||||
go build ./cmd/config-tool
|
go build ./cmd/config-tool
|
||||||
|
|
||||||
RUN cd source/jwtproxy && \
|
|
||||||
go build ./cmd/jwtproxy
|
|
||||||
|
|
||||||
RUN cd source/pushgateway && \
|
RUN cd source/pushgateway && \
|
||||||
go mod vendor && \
|
go mod vendor && \
|
||||||
go build
|
go build
|
||||||
@ -73,7 +70,6 @@ COPY --from=build-npm $PIP_CERT $PIP_CERT
|
|||||||
RUN cp -Rp $REMOTE_SOURCE_DIR/app/source/quay/* $QUAYDIR
|
RUN cp -Rp $REMOTE_SOURCE_DIR/app/source/quay/* $QUAYDIR
|
||||||
|
|
||||||
COPY --from=build-gomod $REMOTE_SOURCE_DIR/app/source/config-tool/config-tool /usr/local/bin/config-tool
|
COPY --from=build-gomod $REMOTE_SOURCE_DIR/app/source/config-tool/config-tool /usr/local/bin/config-tool
|
||||||
COPY --from=build-gomod $REMOTE_SOURCE_DIR/app/source/jwtproxy/jwtproxy /usr/local/bin/jwtproxy
|
|
||||||
COPY --from=build-gomod $REMOTE_SOURCE_DIR/app/source/config-tool/pkg/lib/editor $QUAYDIR/config_app
|
COPY --from=build-gomod $REMOTE_SOURCE_DIR/app/source/config-tool/pkg/lib/editor $QUAYDIR/config_app
|
||||||
COPY --from=build-gomod $REMOTE_SOURCE_DIR/app/source/pushgateway/pushgateway /usr/local/bin/pushgateway
|
COPY --from=build-gomod $REMOTE_SOURCE_DIR/app/source/pushgateway/pushgateway /usr/local/bin/pushgateway
|
||||||
|
|
||||||
|
39
boot.py
39
boot.py
@ -26,11 +26,6 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
@lru_cache(maxsize=1)
|
@lru_cache(maxsize=1)
|
||||||
def get_audience():
|
def get_audience():
|
||||||
audience = app.config.get("JWTPROXY_AUDIENCE")
|
|
||||||
|
|
||||||
if audience:
|
|
||||||
return audience
|
|
||||||
|
|
||||||
scheme = app.config.get("PREFERRED_URL_SCHEME")
|
scheme = app.config.get("PREFERRED_URL_SCHEME")
|
||||||
hostname = app.config.get("SERVER_HOSTNAME")
|
hostname = app.config.get("SERVER_HOSTNAME")
|
||||||
|
|
||||||
@ -69,17 +64,10 @@ def _verify_service_key():
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def setup_jwt_proxy():
|
def setup_instance_service_key():
|
||||||
"""
|
"""
|
||||||
Creates a service key for quay to use in the jwtproxy and generates the JWT proxy configuration.
|
Creates a service key for quay.
|
||||||
"""
|
"""
|
||||||
if os.path.exists(os.path.join(CONF_DIR, "jwtproxy_conf.yaml")):
|
|
||||||
# Proxy is already setup. Make sure the service key is still valid.
|
|
||||||
quay_key_id = _verify_service_key()
|
|
||||||
if quay_key_id is not None:
|
|
||||||
logger.warning("Service key %s already set up. Nothing to do.", quay_key_id)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Ensure we have an existing key if in read-only mode.
|
# Ensure we have an existing key if in read-only mode.
|
||||||
if app.config.get("REGISTRY_STATE", "normal") == "readonly":
|
if app.config.get("REGISTRY_STATE", "normal") == "readonly":
|
||||||
quay_key_id = _verify_service_key()
|
quay_key_id = _verify_service_key()
|
||||||
@ -107,27 +95,6 @@ def setup_jwt_proxy():
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.warning("Generated new service key %s", quay_key_id)
|
|
||||||
|
|
||||||
# Generate the JWT proxy configuration.
|
|
||||||
audience = get_audience()
|
|
||||||
registry = audience + "/keys"
|
|
||||||
security_issuer = app.config.get("SECURITY_SCANNER_ISSUER_NAME", "security_scanner")
|
|
||||||
|
|
||||||
with open(os.path.join(CONF_DIR, "jwtproxy_conf.yaml.jnj")) as f:
|
|
||||||
template = Template(f.read())
|
|
||||||
rendered = template.render(
|
|
||||||
conf_dir=CONF_DIR,
|
|
||||||
audience=audience,
|
|
||||||
registry=registry,
|
|
||||||
key_id=quay_key_id,
|
|
||||||
security_issuer=security_issuer,
|
|
||||||
service_key_location=app.config["INSTANCE_SERVICE_KEY_LOCATION"],
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(os.path.join(CONF_DIR, "jwtproxy_conf.yaml"), "w") as f:
|
|
||||||
f.write(rendered)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
if not app.config.get("SETUP_COMPLETE", False):
|
if not app.config.get("SETUP_COMPLETE", False):
|
||||||
@ -136,7 +103,7 @@ def main():
|
|||||||
)
|
)
|
||||||
|
|
||||||
sync_database_with_config(app.config)
|
sync_database_with_config(app.config)
|
||||||
setup_jwt_proxy()
|
setup_instance_service_key()
|
||||||
|
|
||||||
# Record deploy
|
# Record deploy
|
||||||
if release.REGION and release.GIT_HEAD:
|
if release.REGION and release.GIT_HEAD:
|
||||||
|
@ -1,17 +0,0 @@
|
|||||||
#! /bin/bash
|
|
||||||
set -e
|
|
||||||
QUAYPATH=${QUAYPATH:-"."}
|
|
||||||
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
|
||||||
cd ${QUAYDIR:-"/"}
|
|
||||||
SYSTEM_CERTDIR=${SYSTEM_CERTDIR:-"/etc/pki/ca-trust/source/anchors"}
|
|
||||||
# Create certs for jwtproxy to mitm outgoing TLS connections
|
|
||||||
# echo '{"CN":"CA","key":{"algo":"rsa","size":2048}}' | cfssl gencert -initca - | cfssljson -bare mitm
|
|
||||||
mkdir -p /tmp/certificates; cd /tmp/certificates
|
|
||||||
openssl req -new -newkey rsa:4096 -days 3650 -nodes -x509 \
|
|
||||||
-subj "/C=US/ST=NY/L=NYC/O=Dis/CN=self-signed" \
|
|
||||||
-keyout mitm-key.pem -out mitm.pem
|
|
||||||
cp /tmp/certificates/mitm-key.pem $QUAYCONF/mitm.key
|
|
||||||
cp /tmp/certificates/mitm.pem $QUAYCONF/mitm.cert
|
|
||||||
cp /tmp/certificates/mitm.pem $SYSTEM_CERTDIR/mitm.crt
|
|
||||||
rm -Rf /tmp/certificates
|
|
||||||
|
|
@ -40,7 +40,6 @@ def registry_services():
|
|||||||
"gunicorn-secscan": {"autostart": "true"},
|
"gunicorn-secscan": {"autostart": "true"},
|
||||||
"gunicorn-web": {"autostart": "true"},
|
"gunicorn-web": {"autostart": "true"},
|
||||||
"ip-resolver-update-worker": {"autostart": "true"},
|
"ip-resolver-update-worker": {"autostart": "true"},
|
||||||
"jwtproxy": {"autostart": "true"},
|
|
||||||
"memcache": {"autostart": "true"},
|
"memcache": {"autostart": "true"},
|
||||||
"nginx": {"autostart": "true"},
|
"nginx": {"autostart": "true"},
|
||||||
"pushgateway": {"autostart": "true"},
|
"pushgateway": {"autostart": "true"},
|
||||||
@ -76,7 +75,6 @@ def config_services():
|
|||||||
"gunicorn-secscan": {"autostart": "false"},
|
"gunicorn-secscan": {"autostart": "false"},
|
||||||
"gunicorn-web": {"autostart": "false"},
|
"gunicorn-web": {"autostart": "false"},
|
||||||
"ip-resolver-update-worker": {"autostart": "false"},
|
"ip-resolver-update-worker": {"autostart": "false"},
|
||||||
"jwtproxy": {"autostart": "false"},
|
|
||||||
"memcache": {"autostart": "false"},
|
"memcache": {"autostart": "false"},
|
||||||
"nginx": {"autostart": "false"},
|
"nginx": {"autostart": "false"},
|
||||||
"pushgateway": {"autostart": "false"},
|
"pushgateway": {"autostart": "false"},
|
||||||
|
@ -1,28 +0,0 @@
|
|||||||
jwtproxy:
|
|
||||||
signer_proxy:
|
|
||||||
enabled: true
|
|
||||||
listen_addr: :8081
|
|
||||||
ca_key_file: {{ conf_dir }}/mitm.key
|
|
||||||
ca_crt_file: {{ conf_dir }}/mitm.cert
|
|
||||||
|
|
||||||
signer:
|
|
||||||
issuer: quay
|
|
||||||
expiration_time: 5m
|
|
||||||
max_skew: 1m
|
|
||||||
private_key:
|
|
||||||
type: preshared
|
|
||||||
options:
|
|
||||||
key_id: {{ key_id }}
|
|
||||||
private_key_path: {{ service_key_location }}
|
|
||||||
verifier_proxies:
|
|
||||||
- enabled: true
|
|
||||||
listen_addr: unix:/tmp/jwtproxy_secscan.sock
|
|
||||||
socket_permission: 0777
|
|
||||||
verifier:
|
|
||||||
upstream: unix:/tmp/gunicorn_secscan.sock
|
|
||||||
audience: {{ audience }}
|
|
||||||
key_server:
|
|
||||||
type: keyregistry
|
|
||||||
options:
|
|
||||||
issuer: {{ security_issuer }}
|
|
||||||
registry: {{ registry }}
|
|
@ -52,15 +52,11 @@ map $http_x_forwarded_proto $proper_scheme {
|
|||||||
upstream web_app_server {
|
upstream web_app_server {
|
||||||
server unix:/tmp/gunicorn_web.sock fail_timeout=0;
|
server unix:/tmp/gunicorn_web.sock fail_timeout=0;
|
||||||
}
|
}
|
||||||
upstream jwtproxy_secscan {
|
|
||||||
server unix:/tmp/jwtproxy_secscan.sock fail_timeout=0;
|
|
||||||
}
|
|
||||||
upstream registry_app_server {
|
upstream registry_app_server {
|
||||||
server unix:/tmp/gunicorn_registry.sock fail_timeout=0;
|
server unix:/tmp/gunicorn_registry.sock fail_timeout=0;
|
||||||
}
|
}
|
||||||
|
|
||||||
# NOTE: Exposed for the _internal_ping *only*. All other secscan routes *MUST* go through
|
# NOTE: Exposed for the _internal_ping *only*.
|
||||||
# the jwtproxy.
|
|
||||||
upstream secscan_app_server {
|
upstream secscan_app_server {
|
||||||
server unix:/tmp/gunicorn_secscan.sock fail_timeout=0;
|
server unix:/tmp/gunicorn_secscan.sock fail_timeout=0;
|
||||||
}
|
}
|
||||||
|
@ -264,12 +264,6 @@ autostart = {{ config['gunicorn-web']['autostart'] }}
|
|||||||
stdout_events_enabled = true
|
stdout_events_enabled = true
|
||||||
stderr_events_enabled = true
|
stderr_events_enabled = true
|
||||||
|
|
||||||
[program:jwtproxy]
|
|
||||||
command=/usr/local/bin/jwtproxy --config %(ENV_QUAYCONF)s/jwtproxy_conf.yaml
|
|
||||||
autostart = {{ config['jwtproxy']['autostart'] }}
|
|
||||||
stdout_events_enabled = true
|
|
||||||
stderr_events_enabled = true
|
|
||||||
|
|
||||||
[program:memcache]
|
[program:memcache]
|
||||||
command=memcached -u memcached -m 64 -l 127.0.0.1 -p 18080
|
command=memcached -u memcached -m 64 -l 127.0.0.1 -p 18080
|
||||||
autostart = {{ config['memcache']['autostart'] }}
|
autostart = {{ config['memcache']['autostart'] }}
|
||||||
|
11
config.py
11
config.py
@ -536,15 +536,6 @@ class DefaultConfig(ImmutableConfig):
|
|||||||
# Replaces the SERVER_HOSTNAME as the destination for mirroring.
|
# Replaces the SERVER_HOSTNAME as the destination for mirroring.
|
||||||
REPO_MIRROR_SERVER_HOSTNAME: Optional[str] = None
|
REPO_MIRROR_SERVER_HOSTNAME: Optional[str] = None
|
||||||
|
|
||||||
# JWTProxy Settings
|
|
||||||
# The address (sans schema) to proxy outgoing requests through the jwtproxy
|
|
||||||
# to be signed
|
|
||||||
JWTPROXY_SIGNER = "localhost:8081"
|
|
||||||
|
|
||||||
# The audience that jwtproxy should verify on incoming requests
|
|
||||||
# If None, will be calculated off of the SERVER_HOSTNAME (default)
|
|
||||||
JWTPROXY_AUDIENCE = None
|
|
||||||
|
|
||||||
# "Secret" key for generating encrypted paging tokens. Only needed to be secret to
|
# "Secret" key for generating encrypted paging tokens. Only needed to be secret to
|
||||||
# hide the ID range for production (in which this value is overridden). Should *not*
|
# hide the ID range for production (in which this value is overridden). Should *not*
|
||||||
# be relied upon for secure encryption otherwise.
|
# be relied upon for secure encryption otherwise.
|
||||||
@ -562,14 +553,12 @@ class DefaultConfig(ImmutableConfig):
|
|||||||
SERVICE_LOG_ACCOUNT_ID = None
|
SERVICE_LOG_ACCOUNT_ID = None
|
||||||
|
|
||||||
# The service key ID for the instance service.
|
# The service key ID for the instance service.
|
||||||
# NOTE: If changed, jwtproxy_conf.yaml.jnj must also be updated.
|
|
||||||
INSTANCE_SERVICE_KEY_SERVICE = "quay"
|
INSTANCE_SERVICE_KEY_SERVICE = "quay"
|
||||||
|
|
||||||
# The location of the key ID file generated for this instance.
|
# The location of the key ID file generated for this instance.
|
||||||
INSTANCE_SERVICE_KEY_KID_LOCATION = os.path.join(CONF_DIR, "quay.kid")
|
INSTANCE_SERVICE_KEY_KID_LOCATION = os.path.join(CONF_DIR, "quay.kid")
|
||||||
|
|
||||||
# The location of the private key generated for this instance.
|
# The location of the private key generated for this instance.
|
||||||
# NOTE: If changed, jwtproxy_conf.yaml.jnj must also be updated.
|
|
||||||
INSTANCE_SERVICE_KEY_LOCATION = os.path.join(CONF_DIR, "quay.pem")
|
INSTANCE_SERVICE_KEY_LOCATION = os.path.join(CONF_DIR, "quay.pem")
|
||||||
|
|
||||||
# This instance's service key expiration in minutes.
|
# This instance's service key expiration in minutes.
|
||||||
|
@ -2,7 +2,6 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from data.secscan_model.secscan_v2_model import V2SecurityScanner, NoopV2SecurityScanner
|
|
||||||
from data.secscan_model.secscan_v4_model import (
|
from data.secscan_model.secscan_v4_model import (
|
||||||
V4SecurityScanner,
|
V4SecurityScanner,
|
||||||
NoopV4SecurityScanner,
|
NoopV4SecurityScanner,
|
||||||
@ -24,13 +23,8 @@ class SecurityScannerModelProxy(SecurityScannerInterface):
|
|||||||
except InvalidConfigurationException:
|
except InvalidConfigurationException:
|
||||||
self._model = NoopV4SecurityScanner()
|
self._model = NoopV4SecurityScanner()
|
||||||
|
|
||||||
try:
|
|
||||||
self._legacy_model = V2SecurityScanner(app, instance_keys, storage)
|
|
||||||
except InvalidConfigurationException:
|
|
||||||
self._legacy_model = NoopV2SecurityScanner()
|
|
||||||
|
|
||||||
logger.info("===============================")
|
logger.info("===============================")
|
||||||
logger.info("Using split secscan model: `%s`", [self._legacy_model, self._model])
|
logger.info("Using split secscan model: `%s`", [self._model])
|
||||||
logger.info("===============================")
|
logger.info("===============================")
|
||||||
|
|
||||||
return self
|
return self
|
||||||
@ -52,15 +46,6 @@ class SecurityScannerModelProxy(SecurityScannerInterface):
|
|||||||
if info.status != ScanLookupStatus.NOT_YET_INDEXED:
|
if info.status != ScanLookupStatus.NOT_YET_INDEXED:
|
||||||
return info
|
return info
|
||||||
|
|
||||||
legacy_info = self._legacy_model.load_security_information(
|
|
||||||
manifest_or_legacy_image, include_vulnerabilities
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
legacy_info.status != ScanLookupStatus.UNSUPPORTED_FOR_INDEXING
|
|
||||||
and legacy_info.status != ScanLookupStatus.COULD_NOT_LOAD
|
|
||||||
):
|
|
||||||
return legacy_info
|
|
||||||
|
|
||||||
return SecurityInformationLookupResult.with_status(ScanLookupStatus.NOT_YET_INDEXED)
|
return SecurityInformationLookupResult.with_status(ScanLookupStatus.NOT_YET_INDEXED)
|
||||||
|
|
||||||
def register_model_cleanup_callbacks(self, data_model_config):
|
def register_model_cleanup_callbacks(self, data_model_config):
|
||||||
@ -68,7 +53,7 @@ class SecurityScannerModelProxy(SecurityScannerInterface):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def legacy_api_handler(self):
|
def legacy_api_handler(self):
|
||||||
return self._legacy_model.legacy_api_handler
|
raise NotImplementedError
|
||||||
|
|
||||||
def lookup_notification_page(self, notification_id, page_index=None):
|
def lookup_notification_page(self, notification_id, page_index=None):
|
||||||
return self._model.lookup_notification_page(notification_id, page_index)
|
return self._model.lookup_notification_page(notification_id, page_index)
|
||||||
|
@ -3,7 +3,6 @@ import pytest
|
|||||||
from mock import patch, Mock
|
from mock import patch, Mock
|
||||||
|
|
||||||
from data.secscan_model.datatypes import ScanLookupStatus, SecurityInformationLookupResult
|
from data.secscan_model.datatypes import ScanLookupStatus, SecurityInformationLookupResult
|
||||||
from data.secscan_model.secscan_v2_model import V2SecurityScanner, ScanToken as V2ScanToken
|
|
||||||
from data.secscan_model.secscan_v4_model import (
|
from data.secscan_model.secscan_v4_model import (
|
||||||
V4SecurityScanner,
|
V4SecurityScanner,
|
||||||
IndexReportState,
|
IndexReportState,
|
||||||
@ -24,8 +23,8 @@ from app import app, instance_keys, storage
|
|||||||
[
|
[
|
||||||
(False, False, ScanLookupStatus.NOT_YET_INDEXED),
|
(False, False, ScanLookupStatus.NOT_YET_INDEXED),
|
||||||
(False, True, ScanLookupStatus.UNSUPPORTED_FOR_INDEXING),
|
(False, True, ScanLookupStatus.UNSUPPORTED_FOR_INDEXING),
|
||||||
(True, False, ScanLookupStatus.FAILED_TO_INDEX),
|
# (True, False, ScanLookupStatus.FAILED_TO_INDEX),
|
||||||
(True, True, ScanLookupStatus.UNSUPPORTED_FOR_INDEXING),
|
# (True, True, ScanLookupStatus.UNSUPPORTED_FOR_INDEXING),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_load_security_information(indexed_v2, indexed_v4, expected_status, initialized_db):
|
def test_load_security_information(indexed_v2, indexed_v4, expected_status, initialized_db):
|
||||||
@ -72,7 +71,6 @@ def test_load_security_information(indexed_v2, indexed_v4, expected_status, init
|
|||||||
(None, V4ScanToken(56), None),
|
(None, V4ScanToken(56), None),
|
||||||
(V4ScanToken(None), V4ScanToken(56), AssertionError),
|
(V4ScanToken(None), V4ScanToken(56), AssertionError),
|
||||||
(V4ScanToken(1), V4ScanToken(56), None),
|
(V4ScanToken(1), V4ScanToken(56), None),
|
||||||
(V2ScanToken(158), V4ScanToken(56), AssertionError),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_perform_indexing(next_token, expected_next_token, expected_error, initialized_db):
|
def test_perform_indexing(next_token, expected_next_token, expected_error, initialized_db):
|
||||||
|
@ -1,128 +0,0 @@
|
|||||||
import mock
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from data.secscan_model.datatypes import ScanLookupStatus, SecurityInformation
|
|
||||||
from data.secscan_model.secscan_v2_model import V2SecurityScanner
|
|
||||||
from data.registry_model import registry_model
|
|
||||||
from data.database import Manifest, Image, ManifestSecurityStatus, IndexStatus, IndexerVersion
|
|
||||||
from data.model.oci import shared
|
|
||||||
from data.model.image import set_secscan_status
|
|
||||||
|
|
||||||
from test.fixtures import *
|
|
||||||
|
|
||||||
from app import app, instance_keys, storage
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_security_information_unknown_manifest(initialized_db):
|
|
||||||
repository_ref = registry_model.lookup_repository("devtable", "simple")
|
|
||||||
tag = registry_model.get_repo_tag(repository_ref, "latest")
|
|
||||||
manifest = registry_model.get_manifest_for_tag(tag)
|
|
||||||
|
|
||||||
registry_model.populate_legacy_images_for_testing(manifest, storage)
|
|
||||||
|
|
||||||
# Delete the manifest.
|
|
||||||
Manifest.get(id=manifest._db_id).delete_instance(recursive=True)
|
|
||||||
|
|
||||||
secscan = V2SecurityScanner(app, instance_keys, storage)
|
|
||||||
assert (
|
|
||||||
secscan.load_security_information(manifest).status
|
|
||||||
== ScanLookupStatus.UNSUPPORTED_FOR_INDEXING
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_security_information_failed_to_index(initialized_db):
|
|
||||||
repository_ref = registry_model.lookup_repository("devtable", "simple")
|
|
||||||
tag = registry_model.get_repo_tag(repository_ref, "latest")
|
|
||||||
manifest = registry_model.get_manifest_for_tag(tag)
|
|
||||||
|
|
||||||
registry_model.populate_legacy_images_for_testing(manifest, storage)
|
|
||||||
|
|
||||||
# Set the index status.
|
|
||||||
image = shared.get_legacy_image_for_manifest(manifest._db_id)
|
|
||||||
image.security_indexed = False
|
|
||||||
image.security_indexed_engine = 3
|
|
||||||
image.save()
|
|
||||||
|
|
||||||
secscan = V2SecurityScanner(app, instance_keys, storage)
|
|
||||||
assert secscan.load_security_information(manifest).status == ScanLookupStatus.FAILED_TO_INDEX
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_security_information_queued(initialized_db):
|
|
||||||
repository_ref = registry_model.lookup_repository("devtable", "simple")
|
|
||||||
tag = registry_model.get_repo_tag(repository_ref, "latest")
|
|
||||||
manifest = registry_model.get_manifest_for_tag(tag)
|
|
||||||
|
|
||||||
registry_model.populate_legacy_images_for_testing(manifest, storage)
|
|
||||||
|
|
||||||
secscan = V2SecurityScanner(app, instance_keys, storage)
|
|
||||||
assert secscan.load_security_information(manifest).status == ScanLookupStatus.NOT_YET_INDEXED
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"secscan_api_response",
|
|
||||||
[
|
|
||||||
({"Layer": {}}),
|
|
||||||
(
|
|
||||||
{
|
|
||||||
"Layer": {
|
|
||||||
"IndexedByVersion": 3,
|
|
||||||
"ParentName": "9c6afaebf33df8db2e3f38f95c402d82e025386730f6a8cbe0b780a6053cdd11.d4b545b4-49ce-4bc4-8bbe-b58bed7bddd9",
|
|
||||||
"Name": "ed209f9bdb3766c3da8a004a72e3a30901bde36c39466a3825af1cd12894e7a3.86f0a285-6f29-47c4-a3ae-7e2c70cad0ba",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{
|
|
||||||
"Layer": {
|
|
||||||
"IndexedByVersion": 3,
|
|
||||||
"ParentName": "9c6afaebf33df8db2e3f38f95c402d82e025386730f6a8cbe0b780a6053cdd11.d4b545b4-49ce-4bc4-8bbe-b58bed7bddd9",
|
|
||||||
"Name": "ed209f9bdb3766c3da8a004a72e3a30901bde36c39466a3825af1cd12894e7a3.86f0a285-6f29-47c4-a3ae-7e2c70cad0ba",
|
|
||||||
"Features": [
|
|
||||||
{
|
|
||||||
"Name": "tzdata",
|
|
||||||
"VersionFormat": "",
|
|
||||||
"NamespaceName": "",
|
|
||||||
"AddedBy": "sha256:8d691f585fa8cec0eba196be460cfaffd69939782d6162986c3e0c5225d54f02",
|
|
||||||
"Version": "2019c-0+deb10u1",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_load_security_information_api_responses(secscan_api_response, initialized_db):
|
|
||||||
repository_ref = registry_model.lookup_repository("devtable", "simple")
|
|
||||||
tag = registry_model.get_repo_tag(repository_ref, "latest")
|
|
||||||
manifest = registry_model.get_manifest_for_tag(tag)
|
|
||||||
|
|
||||||
registry_model.populate_legacy_images_for_testing(manifest, storage)
|
|
||||||
|
|
||||||
legacy_image_row = shared.get_legacy_image_for_manifest(manifest._db_id)
|
|
||||||
assert legacy_image_row is not None
|
|
||||||
set_secscan_status(legacy_image_row, True, 3)
|
|
||||||
|
|
||||||
secscan = V2SecurityScanner(app, instance_keys, storage)
|
|
||||||
secscan._legacy_secscan_api = mock.Mock()
|
|
||||||
secscan._legacy_secscan_api.get_layer_data.return_value = secscan_api_response
|
|
||||||
|
|
||||||
security_information = secscan.load_security_information(manifest).security_information
|
|
||||||
|
|
||||||
assert isinstance(security_information, SecurityInformation)
|
|
||||||
assert security_information.Layer.Name == secscan_api_response["Layer"].get("Name", "")
|
|
||||||
assert security_information.Layer.ParentName == secscan_api_response["Layer"].get(
|
|
||||||
"ParentName", ""
|
|
||||||
)
|
|
||||||
assert security_information.Layer.IndexedByVersion == secscan_api_response["Layer"].get(
|
|
||||||
"IndexedByVersion", None
|
|
||||||
)
|
|
||||||
assert len(security_information.Layer.Features) == len(
|
|
||||||
secscan_api_response["Layer"].get("Features", [])
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_perform_indexing(initialized_db):
|
|
||||||
secscan = V2SecurityScanner(app, instance_keys, storage)
|
|
||||||
|
|
||||||
with pytest.raises(NotImplementedError):
|
|
||||||
secscan.perform_indexing()
|
|
@ -51,25 +51,6 @@ def _check_gunicorn(endpoint):
|
|||||||
return fn
|
return fn
|
||||||
|
|
||||||
|
|
||||||
def _check_jwt_proxy(app):
|
|
||||||
"""
|
|
||||||
Returns the status of JWT proxy in the container.
|
|
||||||
"""
|
|
||||||
client = app.config["HTTPCLIENT"]
|
|
||||||
# FIXME(alecmerdler): This is no longer behind jwtproxy...
|
|
||||||
registry_url = _compute_internal_endpoint(app, "secscan")
|
|
||||||
try:
|
|
||||||
status_code = client.get(registry_url, verify=False, timeout=2).status_code
|
|
||||||
okay = status_code == 403
|
|
||||||
return (
|
|
||||||
okay,
|
|
||||||
("Got non-403 response for JWT proxy: %s" % status_code) if not okay else None,
|
|
||||||
)
|
|
||||||
except Exception as ex:
|
|
||||||
logger.exception("Exception when checking jwtproxy health: %s", registry_url)
|
|
||||||
return (False, "Exception when checking jwtproxy health: %s" % registry_url)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_database(app):
|
def _check_database(app):
|
||||||
"""
|
"""
|
||||||
Returns the status of the database, as accessed from this instance.
|
Returns the status of the database, as accessed from this instance.
|
||||||
@ -181,8 +162,6 @@ _INSTANCE_SERVICES = {
|
|||||||
"web_gunicorn": _check_gunicorn("_internal_ping"),
|
"web_gunicorn": _check_gunicorn("_internal_ping"),
|
||||||
"service_key": _check_service_key,
|
"service_key": _check_service_key,
|
||||||
"disk_space": _check_disk_space(for_warning=False),
|
"disk_space": _check_disk_space(for_warning=False),
|
||||||
# https://issues.redhat.com/browse/PROJQUAY-1193
|
|
||||||
# "jwtproxy": _check_jwt_proxy, TODO: remove with removal of jwtproxy in container
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_GLOBAL_SERVICES = {
|
_GLOBAL_SERVICES = {
|
||||||
|
@ -38,7 +38,7 @@ from data.database import RepositoryActionCount, Repository as RepositoryTable
|
|||||||
from data.logs_model import logs_model
|
from data.logs_model import logs_model
|
||||||
from data.registry_model import registry_model
|
from data.registry_model import registry_model
|
||||||
from test.helpers import assert_action_logged, check_transitive_modifications
|
from test.helpers import assert_action_logged, check_transitive_modifications
|
||||||
from util.secscan.fake import fake_security_scanner
|
from util.secscan.v4.fake import fake_security_scanner
|
||||||
|
|
||||||
from endpoints.api.team import (
|
from endpoints.api.team import (
|
||||||
TeamMember,
|
TeamMember,
|
||||||
|
@ -3,13 +3,15 @@ import time
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from app import app, storage, url_scheme_and_hostname
|
from app import app, storage, url_scheme_and_hostname
|
||||||
|
from config import build_requests_session
|
||||||
from data import model
|
from data import model
|
||||||
from data.registry_model import registry_model
|
from data.registry_model import registry_model
|
||||||
from data.database import Image, ManifestLegacyImage
|
from data.database import Image, ManifestLegacyImage
|
||||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||||
from util.secscan.secscan_util import get_blob_download_uri_getter
|
from util.secscan.secscan_util import get_blob_download_uri_getter
|
||||||
from util.secscan.api import SecurityScannerAPI, APIRequestFailure
|
from util.secscan.v4.api import ClairSecurityScannerAPI, APIRequestFailure
|
||||||
from util.secscan.fake import fake_security_scanner
|
from util.secscan.v4.fake import fake_security_scanner
|
||||||
|
from util.secscan.blob import BlobURLRetriever
|
||||||
from util.security.instancekeys import InstanceKeys
|
from util.security.instancekeys import InstanceKeys
|
||||||
|
|
||||||
|
|
||||||
@ -39,15 +41,11 @@ class TestSecurityScanner(unittest.TestCase):
|
|||||||
self.ctx.__enter__()
|
self.ctx.__enter__()
|
||||||
|
|
||||||
instance_keys = InstanceKeys(app)
|
instance_keys = InstanceKeys(app)
|
||||||
self.api = SecurityScannerAPI(
|
|
||||||
app.config,
|
retriever = BlobURLRetriever(storage, instance_keys, app)
|
||||||
storage,
|
|
||||||
app.config["SERVER_HOSTNAME"],
|
self.api = ClairSecurityScannerAPI(
|
||||||
app.config["HTTPCLIENT"],
|
"http://fakesecurityscanner", build_requests_session(), retriever
|
||||||
uri_creator=get_blob_download_uri_getter(
|
|
||||||
app.test_request_context("/"), url_scheme_and_hostname
|
|
||||||
),
|
|
||||||
instance_keys=instance_keys,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
@ -75,24 +73,22 @@ class TestSecurityScanner(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
Test for basic retrieval of layers from the security scanner.
|
Test for basic retrieval of layers from the security scanner.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
repo_ref = registry_model.lookup_repository(ADMIN_ACCESS_USER, SIMPLE_REPO)
|
repo_ref = registry_model.lookup_repository(ADMIN_ACCESS_USER, SIMPLE_REPO)
|
||||||
repo_tag = registry_model.get_repo_tag(repo_ref, "latest")
|
repo_tag = registry_model.get_repo_tag(repo_ref, "latest")
|
||||||
manifest = registry_model.get_manifest_for_tag(repo_tag)
|
manifest = registry_model.get_manifest_for_tag(repo_tag)
|
||||||
|
layers = registry_model.list_manifest_layers(manifest, storage, True)
|
||||||
registry_model.populate_legacy_images_for_testing(manifest, storage)
|
registry_model.populate_legacy_images_for_testing(manifest, storage)
|
||||||
|
|
||||||
with fake_security_scanner() as security_scanner:
|
with fake_security_scanner() as security_scanner:
|
||||||
# Ensure the layer doesn't exist yet.
|
# Ensure the layer doesn't exist yet.
|
||||||
self.assertFalse(security_scanner.has_layer(security_scanner.layer_id(manifest)))
|
self.assertIsNone(self.api.index_report(manifest.digest))
|
||||||
self.assertIsNone(self.api.get_layer_data(manifest))
|
|
||||||
|
|
||||||
# Add the layer.
|
# Add the layer.
|
||||||
security_scanner.add_layer(security_scanner.layer_id(manifest))
|
self.api.index(manifest, layers)
|
||||||
|
|
||||||
# Retrieve the results.
|
# Retrieve the results.
|
||||||
result = self.api.get_layer_data(manifest, include_vulnerabilities=True)
|
result = self.api.vulnerability_report(manifest.digest)
|
||||||
self.assertIsNotNone(result)
|
self.assertIsNotNone(result)
|
||||||
self.assertEqual(result["Layer"]["Name"], security_scanner.layer_id(manifest))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -57,8 +57,6 @@ INTERNAL_ONLY_PROPERTIES = {
|
|||||||
"GARBAGE_COLLECTION_FREQUENCY",
|
"GARBAGE_COLLECTION_FREQUENCY",
|
||||||
"PAGE_TOKEN_KEY",
|
"PAGE_TOKEN_KEY",
|
||||||
"BUILD_MANAGER",
|
"BUILD_MANAGER",
|
||||||
"JWTPROXY_AUDIENCE",
|
|
||||||
"JWTPROXY_SIGNER",
|
|
||||||
"SECURITY_SCANNER_INDEXING_MIN_ID",
|
"SECURITY_SCANNER_INDEXING_MIN_ID",
|
||||||
"SECURITY_SCANNER_V4_REINDEX_THRESHOLD",
|
"SECURITY_SCANNER_V4_REINDEX_THRESHOLD",
|
||||||
"STATIC_SITE_BUCKET",
|
"STATIC_SITE_BUCKET",
|
||||||
|
@ -4,7 +4,7 @@ from config import build_requests_session
|
|||||||
from util.config import URLSchemeAndHostname
|
from util.config import URLSchemeAndHostname
|
||||||
from util.config.validator import ValidatorContext
|
from util.config.validator import ValidatorContext
|
||||||
from util.config.validators.validate_secscan import SecurityScannerValidator
|
from util.config.validators.validate_secscan import SecurityScannerValidator
|
||||||
from util.secscan.fake import fake_security_scanner
|
from util.secscan.v4.fake import fake_security_scanner
|
||||||
|
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
||||||
@ -36,7 +36,7 @@ def test_validate_noop(unvalidated_config, app):
|
|||||||
"TESTING": True,
|
"TESTING": True,
|
||||||
"DISTRIBUTED_STORAGE_PREFERENCE": [],
|
"DISTRIBUTED_STORAGE_PREFERENCE": [],
|
||||||
"FEATURE_SECURITY_SCANNER": True,
|
"FEATURE_SECURITY_SCANNER": True,
|
||||||
"SECURITY_SCANNER_ENDPOINT": "http://invalidhost",
|
"SECURITY_SCANNER_V4_ENDPOINT": "http://invalidhost",
|
||||||
},
|
},
|
||||||
Exception,
|
Exception,
|
||||||
),
|
),
|
||||||
@ -45,7 +45,7 @@ def test_validate_noop(unvalidated_config, app):
|
|||||||
"TESTING": True,
|
"TESTING": True,
|
||||||
"DISTRIBUTED_STORAGE_PREFERENCE": [],
|
"DISTRIBUTED_STORAGE_PREFERENCE": [],
|
||||||
"FEATURE_SECURITY_SCANNER": True,
|
"FEATURE_SECURITY_SCANNER": True,
|
||||||
"SECURITY_SCANNER_ENDPOINT": "http://fakesecurityscanner",
|
"SECURITY_SCANNER_V4_ENDPOINT": "http://fakesecurityscanner",
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
# from boot import setup_jwt_proxy
|
# from boot import setup_jwt_proxy
|
||||||
from util.secscan.api import SecurityScannerAPI
|
from util.secscan.v4.api import ClairSecurityScannerAPI
|
||||||
from util.config.validators import BaseValidator, ConfigValidationException
|
from util.config.validators import BaseValidator, ConfigValidationException
|
||||||
|
|
||||||
|
|
||||||
@ -24,13 +24,11 @@ class SecurityScannerValidator(BaseValidator):
|
|||||||
if not feature_sec_scanner:
|
if not feature_sec_scanner:
|
||||||
return
|
return
|
||||||
|
|
||||||
api = SecurityScannerAPI(
|
api = ClairSecurityScannerAPI(
|
||||||
config,
|
config.get("SECURITY_SCANNER_V4_ENDPOINT"),
|
||||||
|
client,
|
||||||
None,
|
None,
|
||||||
server_hostname,
|
jwt_psk=config.get("SECURITY_SCANNER_V4_PSK"),
|
||||||
client=client,
|
|
||||||
skip_validation=True,
|
|
||||||
uri_creator=uri_creator,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# if not is_testing:
|
# if not is_testing:
|
||||||
@ -44,10 +42,8 @@ class SecurityScannerValidator(BaseValidator):
|
|||||||
|
|
||||||
while max_tries > 0:
|
while max_tries > 0:
|
||||||
try:
|
try:
|
||||||
response = api.ping()
|
response = api.state()
|
||||||
last_exception = None
|
last_exception = None
|
||||||
if response.status_code == 200:
|
|
||||||
return
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
last_exception = ex
|
last_exception = ex
|
||||||
|
|
||||||
@ -57,6 +53,6 @@ class SecurityScannerValidator(BaseValidator):
|
|||||||
if last_exception is not None:
|
if last_exception is not None:
|
||||||
message = str(last_exception)
|
message = str(last_exception)
|
||||||
raise ConfigValidationException("Could not ping security scanner: %s" % message)
|
raise ConfigValidationException("Could not ping security scanner: %s" % message)
|
||||||
else:
|
elif not response.get("state"):
|
||||||
message = "Expected 200 status code, got %s: %s" % (response.status_code, response.text)
|
message = "Invalid indexer state" % (response.status_code, response.text)
|
||||||
raise ConfigValidationException("Could not ping security scanner: %s" % message)
|
raise ConfigValidationException("Could not ping security scanner: %s" % message)
|
||||||
|
@ -11,8 +11,6 @@ from _init import CONF_DIR
|
|||||||
|
|
||||||
TOKEN_VALIDITY_LIFETIME_S = 60 # Amount of time the repo mirror has to call the skopeo URL
|
TOKEN_VALIDITY_LIFETIME_S = 60 # Amount of time the repo mirror has to call the skopeo URL
|
||||||
|
|
||||||
MITM_CERT_PATH = os.path.join(CONF_DIR, "mitm.cert")
|
|
||||||
|
|
||||||
DEFAULT_HTTP_HEADERS = {"Connection": "close"}
|
DEFAULT_HTTP_HEADERS = {"Connection": "close"}
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -1,333 +0,0 @@
|
|||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from abc import ABCMeta, abstractmethod
|
|
||||||
from six import add_metaclass
|
|
||||||
from urllib.parse import urljoin
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from data import model
|
|
||||||
from data.database import CloseForLongOperation, Image, Manifest, ManifestLegacyImage
|
|
||||||
from data.registry_model.datatypes import Manifest as ManifestDataType, LegacyImage
|
|
||||||
from util.abchelpers import nooper
|
|
||||||
from util.failover import failover, FailoverException
|
|
||||||
from util.secscan.validator import V2SecurityConfigValidator
|
|
||||||
|
|
||||||
from _init import CONF_DIR
|
|
||||||
|
|
||||||
TOKEN_VALIDITY_LIFETIME_S = 60 # Amount of time the security scanner has to call the layer URL
|
|
||||||
|
|
||||||
UNKNOWN_PARENT_LAYER_ERROR_MSG = "worker: parent layer is unknown, it must be processed first"
|
|
||||||
|
|
||||||
MITM_CERT_PATH = os.path.join(CONF_DIR, "mitm.cert")
|
|
||||||
|
|
||||||
DEFAULT_HTTP_HEADERS = {"Connection": "close"}
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class APIRequestFailure(Exception):
|
|
||||||
"""
|
|
||||||
Exception raised when there is a failure to conduct an API request.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Non200ResponseException(Exception):
|
|
||||||
"""
|
|
||||||
Exception raised when the upstream API returns a non-200 HTTP status code.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, response):
|
|
||||||
super(Non200ResponseException, self).__init__()
|
|
||||||
self.response = response
|
|
||||||
|
|
||||||
|
|
||||||
_API_METHOD_GET_LAYER = "layers/%s"
|
|
||||||
_API_METHOD_PING = "metrics"
|
|
||||||
|
|
||||||
|
|
||||||
def compute_layer_id(layer):
|
|
||||||
"""
|
|
||||||
Returns the ID for the layer in the security scanner.
|
|
||||||
"""
|
|
||||||
assert isinstance(layer, ManifestDataType)
|
|
||||||
|
|
||||||
manifest = Manifest.get(id=layer._db_id)
|
|
||||||
try:
|
|
||||||
layer = ManifestLegacyImage.get(manifest=manifest).image
|
|
||||||
except ManifestLegacyImage.DoesNotExist:
|
|
||||||
return None
|
|
||||||
|
|
||||||
assert layer.docker_image_id
|
|
||||||
assert layer.storage.uuid
|
|
||||||
return "%s.%s" % (layer.docker_image_id, layer.storage.uuid)
|
|
||||||
|
|
||||||
|
|
||||||
class SecurityScannerAPI(object):
|
|
||||||
"""
|
|
||||||
Helper class for talking to the Security Scan service (usually Clair).
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
config,
|
|
||||||
storage,
|
|
||||||
server_hostname=None,
|
|
||||||
client=None,
|
|
||||||
skip_validation=False,
|
|
||||||
uri_creator=None,
|
|
||||||
instance_keys=None,
|
|
||||||
):
|
|
||||||
feature_enabled = config.get("FEATURE_SECURITY_SCANNER", False)
|
|
||||||
has_valid_config = skip_validation
|
|
||||||
|
|
||||||
if not skip_validation and feature_enabled:
|
|
||||||
config_validator = V2SecurityConfigValidator(
|
|
||||||
feature_enabled, config.get("SECURITY_SCANNER_ENDPOINT")
|
|
||||||
)
|
|
||||||
has_valid_config = config_validator.valid()
|
|
||||||
|
|
||||||
if feature_enabled and has_valid_config:
|
|
||||||
self.state = ImplementedSecurityScannerAPI(
|
|
||||||
config,
|
|
||||||
storage,
|
|
||||||
server_hostname,
|
|
||||||
client=client,
|
|
||||||
uri_creator=uri_creator,
|
|
||||||
instance_keys=instance_keys,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.state = NoopSecurityScannerAPI()
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
return getattr(self.state, name, None)
|
|
||||||
|
|
||||||
|
|
||||||
@add_metaclass(ABCMeta)
|
|
||||||
class SecurityScannerAPIInterface(object):
|
|
||||||
"""
|
|
||||||
Helper class for talking to the Security Scan service (usually Clair).
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def ping(self):
|
|
||||||
"""
|
|
||||||
Calls GET on the metrics endpoint of the security scanner to ensure it is running and
|
|
||||||
properly configured.
|
|
||||||
|
|
||||||
Returns the HTTP response.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def check_layer_vulnerable(self, layer_id, cve_name):
|
|
||||||
"""
|
|
||||||
Checks to see if the layer with the given ID is vulnerable to the specified CVE.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_layer_data(self, layer, include_features=False, include_vulnerabilities=False):
|
|
||||||
"""
|
|
||||||
Returns the layer data for the specified layer.
|
|
||||||
|
|
||||||
On error, returns None.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@nooper
|
|
||||||
class NoopSecurityScannerAPI(SecurityScannerAPIInterface):
|
|
||||||
"""
|
|
||||||
No-op version of the security scanner API.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ImplementedSecurityScannerAPI(SecurityScannerAPIInterface):
|
|
||||||
"""
|
|
||||||
Helper class for talking to the Security Scan service (Clair).
|
|
||||||
"""
|
|
||||||
|
|
||||||
# TODO refactor this to not take an app config, and instead just the things it needs as a config object
|
|
||||||
def __init__(
|
|
||||||
self, config, storage, server_hostname, client=None, uri_creator=None, instance_keys=None
|
|
||||||
):
|
|
||||||
self._config = config
|
|
||||||
self._instance_keys = instance_keys
|
|
||||||
self._client = client
|
|
||||||
self._storage = storage
|
|
||||||
self._server_hostname = server_hostname
|
|
||||||
self._default_storage_locations = config["DISTRIBUTED_STORAGE_PREFERENCE"]
|
|
||||||
self._target_version = config.get("SECURITY_SCANNER_ENGINE_VERSION_TARGET", 2)
|
|
||||||
self._uri_creator = uri_creator
|
|
||||||
|
|
||||||
def ping(self):
|
|
||||||
"""
|
|
||||||
Calls GET on the metrics endpoint of the security scanner to ensure it is running and
|
|
||||||
properly configured.
|
|
||||||
|
|
||||||
Returns the HTTP response.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return self._call("GET", _API_METHOD_PING)
|
|
||||||
except requests.exceptions.Timeout as tie:
|
|
||||||
logger.exception("Timeout when trying to connect to security scanner endpoint")
|
|
||||||
msg = "Timeout when trying to connect to security scanner endpoint: %s" % tie.message
|
|
||||||
raise Exception(msg)
|
|
||||||
except requests.exceptions.ConnectionError as ce:
|
|
||||||
logger.exception("Connection error when trying to connect to security scanner endpoint")
|
|
||||||
msg = (
|
|
||||||
"Connection error when trying to connect to security scanner endpoint: %s"
|
|
||||||
% ce.message
|
|
||||||
)
|
|
||||||
raise Exception(msg)
|
|
||||||
except (requests.exceptions.RequestException, ValueError) as ve:
|
|
||||||
logger.exception("Exception when trying to connect to security scanner endpoint")
|
|
||||||
msg = "Exception when trying to connect to security scanner endpoint: %s" % ve
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
def check_layer_vulnerable(self, layer_id, cve_name):
|
|
||||||
"""
|
|
||||||
Checks to see if the layer with the given ID is vulnerable to the specified CVE.
|
|
||||||
"""
|
|
||||||
layer_data = self._get_layer_data(layer_id, include_vulnerabilities=True)
|
|
||||||
if layer_data is None or "Layer" not in layer_data or "Features" not in layer_data["Layer"]:
|
|
||||||
return False
|
|
||||||
|
|
||||||
for feature in layer_data["Layer"]["Features"]:
|
|
||||||
for vuln in feature.get("Vulnerabilities", []):
|
|
||||||
if vuln["Name"] == cve_name:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_layer_data(self, layer, include_features=False, include_vulnerabilities=False):
|
|
||||||
"""
|
|
||||||
Returns the layer data for the specified layer.
|
|
||||||
|
|
||||||
On error, returns None.
|
|
||||||
"""
|
|
||||||
layer_id = compute_layer_id(layer)
|
|
||||||
if layer_id is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return self._get_layer_data(layer_id, include_features, include_vulnerabilities)
|
|
||||||
|
|
||||||
def _get_layer_data(self, layer_id, include_features=False, include_vulnerabilities=False):
|
|
||||||
params = {}
|
|
||||||
if include_features:
|
|
||||||
params = {"features": True}
|
|
||||||
|
|
||||||
if include_vulnerabilities:
|
|
||||||
params = {"vulnerabilities": True}
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = self._call("GET", _API_METHOD_GET_LAYER % layer_id, params=params)
|
|
||||||
logger.debug(
|
|
||||||
"Got response %s for vulnerabilities for layer %s", response.status_code, layer_id
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
return response.json()
|
|
||||||
except ValueError:
|
|
||||||
logger.exception("Failed to decode response JSON")
|
|
||||||
return None
|
|
||||||
|
|
||||||
except Non200ResponseException as ex:
|
|
||||||
logger.debug(
|
|
||||||
"Got failed response %s for vulnerabilities for layer %s",
|
|
||||||
ex.response.status_code,
|
|
||||||
layer_id,
|
|
||||||
)
|
|
||||||
if ex.response.status_code == 404:
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
"downstream security service failure: status %d, text: %s",
|
|
||||||
ex.response.status_code,
|
|
||||||
ex.response.text,
|
|
||||||
)
|
|
||||||
if ex.response.status_code // 100 == 5:
|
|
||||||
raise APIRequestFailure("Downstream service returned 5xx")
|
|
||||||
else:
|
|
||||||
raise APIRequestFailure("Downstream service returned non-200")
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
logger.exception(
|
|
||||||
"API call timed out for loading vulnerabilities for layer %s", layer_id
|
|
||||||
)
|
|
||||||
raise APIRequestFailure("API call timed out")
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
logger.exception("Connection error for loading vulnerabilities for layer %s", layer_id)
|
|
||||||
raise APIRequestFailure("Could not connect to security service")
|
|
||||||
except requests.exceptions.RequestException:
|
|
||||||
logger.exception("Failed to get layer data response for %s", layer_id)
|
|
||||||
raise APIRequestFailure()
|
|
||||||
|
|
||||||
def _request(self, method, endpoint, path, body, params, timeout):
|
|
||||||
"""
|
|
||||||
Issues an HTTP request to the security endpoint.
|
|
||||||
"""
|
|
||||||
url = _join_api_url(endpoint, self._config.get("SECURITY_SCANNER_API_VERSION", "v1"), path)
|
|
||||||
signer_proxy_url = self._config.get("JWTPROXY_SIGNER", "localhost:8081")
|
|
||||||
|
|
||||||
logger.debug("%sing security URL %s", method.upper(), url)
|
|
||||||
resp = self._client.request(
|
|
||||||
method,
|
|
||||||
url,
|
|
||||||
json=body,
|
|
||||||
params=params,
|
|
||||||
timeout=timeout,
|
|
||||||
verify=MITM_CERT_PATH,
|
|
||||||
headers=DEFAULT_HTTP_HEADERS,
|
|
||||||
proxies={"https": "http://" + signer_proxy_url, "http": "http://" + signer_proxy_url},
|
|
||||||
)
|
|
||||||
if resp.status_code // 100 != 2:
|
|
||||||
raise Non200ResponseException(resp)
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def _call(self, method, path, params=None, body=None):
|
|
||||||
"""
|
|
||||||
Issues an HTTP request to the security endpoint handling the logic of using an alternative
|
|
||||||
BATCH endpoint for non-GET requests and failover for GET requests.
|
|
||||||
"""
|
|
||||||
timeout = self._config.get("SECURITY_SCANNER_API_TIMEOUT_SECONDS", 1)
|
|
||||||
endpoint = self._config["SECURITY_SCANNER_ENDPOINT"]
|
|
||||||
|
|
||||||
with CloseForLongOperation(self._config):
|
|
||||||
# If the request isn't a read, attempt to use a batch stack and do not fail over.
|
|
||||||
if method != "GET":
|
|
||||||
if self._config.get("SECURITY_SCANNER_ENDPOINT_BATCH") is not None:
|
|
||||||
endpoint = self._config["SECURITY_SCANNER_ENDPOINT_BATCH"]
|
|
||||||
timeout = (
|
|
||||||
self._config.get("SECURITY_SCANNER_API_BATCH_TIMEOUT_SECONDS") or timeout
|
|
||||||
)
|
|
||||||
return self._request(method, endpoint, path, body, params, timeout)
|
|
||||||
|
|
||||||
# The request is read-only and can failover.
|
|
||||||
all_endpoints = [endpoint] + self._config.get(
|
|
||||||
"SECURITY_SCANNER_READONLY_FAILOVER_ENDPOINTS", []
|
|
||||||
)
|
|
||||||
return _failover_read_request(
|
|
||||||
*[
|
|
||||||
((self._request, endpoint, path, body, params, timeout), {})
|
|
||||||
for endpoint in all_endpoints
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _join_api_url(endpoint, api_version, path):
|
|
||||||
pathless_url = urljoin(endpoint, "/" + api_version) + "/"
|
|
||||||
return urljoin(pathless_url, path)
|
|
||||||
|
|
||||||
|
|
||||||
@failover
|
|
||||||
def _failover_read_request(request_fn, endpoint, path, body, params, timeout):
|
|
||||||
"""
|
|
||||||
This function auto-retries read-only requests until they return a 2xx status code.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return request_fn("GET", endpoint, path, body, params, timeout)
|
|
||||||
except (requests.exceptions.RequestException, Non200ResponseException) as ex:
|
|
||||||
raise FailoverException(ex)
|
|
@ -1,272 +0,0 @@
|
|||||||
import json
|
|
||||||
import copy
|
|
||||||
import uuid
|
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from httmock import urlmatch, HTTMock, all_requests
|
|
||||||
|
|
||||||
from util.secscan.api import UNKNOWN_PARENT_LAYER_ERROR_MSG, compute_layer_id
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def fake_security_scanner(hostname="fakesecurityscanner"):
|
|
||||||
"""
|
|
||||||
Context manager which yields a fake security scanner.
|
|
||||||
|
|
||||||
All requests made to the given hostname (default: fakesecurityscanner) will be handled by the
|
|
||||||
fake.
|
|
||||||
"""
|
|
||||||
scanner = FakeSecurityScanner(hostname)
|
|
||||||
with HTTMock(*(scanner.get_endpoints())):
|
|
||||||
yield scanner
|
|
||||||
|
|
||||||
|
|
||||||
class FakeSecurityScanner(object):
|
|
||||||
"""
|
|
||||||
Implements a fake security scanner (with somewhat real responses) for testing API calls and
|
|
||||||
responses.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, hostname, index_version=1):
|
|
||||||
self.hostname = hostname
|
|
||||||
self.index_version = index_version
|
|
||||||
self.layers = {}
|
|
||||||
self.layer_vulns = {}
|
|
||||||
|
|
||||||
self.ok_layer_id = None
|
|
||||||
self.fail_layer_id = None
|
|
||||||
self.internal_error_layer_id = None
|
|
||||||
self.error_layer_id = None
|
|
||||||
self.unexpected_status_layer_id = None
|
|
||||||
|
|
||||||
def set_ok_layer_id(self, ok_layer_id):
|
|
||||||
"""
|
|
||||||
Sets a layer ID that, if encountered when the analyze call is made, causes a 200 to be
|
|
||||||
immediately returned.
|
|
||||||
"""
|
|
||||||
self.ok_layer_id = ok_layer_id
|
|
||||||
|
|
||||||
def set_fail_layer_id(self, fail_layer_id):
|
|
||||||
"""
|
|
||||||
Sets a layer ID that, if encountered when the analyze call is made, causes a 422 to be
|
|
||||||
raised.
|
|
||||||
"""
|
|
||||||
self.fail_layer_id = fail_layer_id
|
|
||||||
|
|
||||||
def set_internal_error_layer_id(self, internal_error_layer_id):
|
|
||||||
"""
|
|
||||||
Sets a layer ID that, if encountered when the analyze call is made, causes a 500 to be
|
|
||||||
raised.
|
|
||||||
"""
|
|
||||||
self.internal_error_layer_id = internal_error_layer_id
|
|
||||||
|
|
||||||
def set_error_layer_id(self, error_layer_id):
|
|
||||||
"""
|
|
||||||
Sets a layer ID that, if encountered when the analyze call is made, causes a 400 to be
|
|
||||||
raised.
|
|
||||||
"""
|
|
||||||
self.error_layer_id = error_layer_id
|
|
||||||
|
|
||||||
def set_unexpected_status_layer_id(self, layer_id):
|
|
||||||
"""
|
|
||||||
Sets a layer ID that, if encountered when the analyze call is made, causes an HTTP 600 to be
|
|
||||||
raised.
|
|
||||||
|
|
||||||
This is useful in testing the robustness of the to unknown status codes.
|
|
||||||
"""
|
|
||||||
self.unexpected_status_layer_id = layer_id
|
|
||||||
|
|
||||||
def has_layer(self, layer_id):
|
|
||||||
"""
|
|
||||||
Returns true if the layer with the given ID has been analyzed.
|
|
||||||
"""
|
|
||||||
return layer_id in self.layers
|
|
||||||
|
|
||||||
def layer_id(self, layer):
|
|
||||||
"""
|
|
||||||
Returns the Quay Security Scanner layer ID for the given layer (Image row).
|
|
||||||
"""
|
|
||||||
return compute_layer_id(layer)
|
|
||||||
|
|
||||||
def add_layer(self, layer_id):
|
|
||||||
"""
|
|
||||||
Adds a layer to the security scanner, with no features or vulnerabilities.
|
|
||||||
"""
|
|
||||||
self.layers[layer_id] = {
|
|
||||||
"Name": layer_id,
|
|
||||||
"Format": "Docker",
|
|
||||||
"IndexedByVersion": self.index_version,
|
|
||||||
}
|
|
||||||
|
|
||||||
def remove_layer(self, layer_id):
|
|
||||||
"""
|
|
||||||
Removes a layer from the security scanner.
|
|
||||||
"""
|
|
||||||
self.layers.pop(layer_id, None)
|
|
||||||
|
|
||||||
def set_vulns(self, layer_id, vulns):
|
|
||||||
"""
|
|
||||||
Sets the vulnerabilities for the layer with the given ID to those given.
|
|
||||||
"""
|
|
||||||
self.layer_vulns[layer_id] = vulns
|
|
||||||
|
|
||||||
# Since this call may occur before the layer is "anaylzed", we only add the data
|
|
||||||
# to the layer itself if present.
|
|
||||||
if self.layers.get(layer_id):
|
|
||||||
layer = self.layers[layer_id]
|
|
||||||
layer["Features"] = layer.get("Features", [])
|
|
||||||
layer["Features"].append(
|
|
||||||
{
|
|
||||||
"Name": "somefeature",
|
|
||||||
"Namespace": "somenamespace",
|
|
||||||
"Version": "someversion",
|
|
||||||
"Vulnerabilities": self.layer_vulns[layer_id],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_endpoints(self):
|
|
||||||
"""
|
|
||||||
Returns the HTTMock endpoint definitions for the fake security scanner.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@urlmatch(netloc=r"(.*\.)?" + self.hostname, path=r"/v1/layers/(.+)", method="GET")
|
|
||||||
def get_layer_mock(url, request):
|
|
||||||
layer_id = url.path[len("/v1/layers/") :]
|
|
||||||
if layer_id == self.ok_layer_id:
|
|
||||||
return {
|
|
||||||
"status_code": 200,
|
|
||||||
"content": json.dumps({"Layer": {}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
if layer_id == self.internal_error_layer_id:
|
|
||||||
return {
|
|
||||||
"status_code": 500,
|
|
||||||
"content": json.dumps({"Error": {"Message": "Internal server error"}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
if not layer_id in self.layers:
|
|
||||||
return {
|
|
||||||
"status_code": 404,
|
|
||||||
"content": json.dumps({"Error": {"Message": "Unknown layer"}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
layer_data = copy.deepcopy(self.layers[layer_id])
|
|
||||||
|
|
||||||
has_vulns = request.url.find("vulnerabilities") > 0
|
|
||||||
has_features = request.url.find("features") > 0
|
|
||||||
if not has_vulns and not has_features:
|
|
||||||
layer_data.pop("Features", None)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status_code": 200,
|
|
||||||
"content": json.dumps({"Layer": layer_data}),
|
|
||||||
}
|
|
||||||
|
|
||||||
@urlmatch(netloc=r"(.*\.)?" + self.hostname, path=r"/v1/layers/(.+)", method="DELETE")
|
|
||||||
def remove_layer_mock(url, _):
|
|
||||||
layer_id = url.path[len("/v1/layers/") :]
|
|
||||||
if not layer_id in self.layers:
|
|
||||||
return {
|
|
||||||
"status_code": 404,
|
|
||||||
"content": json.dumps({"Error": {"Message": "Unknown layer"}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
self.layers.pop(layer_id)
|
|
||||||
return {
|
|
||||||
"status_code": 204,
|
|
||||||
"content": "",
|
|
||||||
}
|
|
||||||
|
|
||||||
@urlmatch(netloc=r"(.*\.)?" + self.hostname, path=r"/v1/layers", method="POST")
|
|
||||||
def post_layer_mock(_, request):
|
|
||||||
body_data = json.loads(request.body)
|
|
||||||
if not "Layer" in body_data:
|
|
||||||
return {"status_code": 400, "content": "Missing body"}
|
|
||||||
|
|
||||||
layer = body_data["Layer"]
|
|
||||||
if not "Path" in layer:
|
|
||||||
return {"status_code": 400, "content": "Missing Path"}
|
|
||||||
|
|
||||||
if not "Name" in layer:
|
|
||||||
return {"status_code": 400, "content": "Missing Name"}
|
|
||||||
|
|
||||||
if not "Format" in layer:
|
|
||||||
return {"status_code": 400, "content": "Missing Format"}
|
|
||||||
|
|
||||||
if layer["Name"] == self.internal_error_layer_id:
|
|
||||||
return {
|
|
||||||
"status_code": 500,
|
|
||||||
"content": json.dumps({"Error": {"Message": "Internal server error"}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
if layer["Name"] == self.fail_layer_id:
|
|
||||||
return {
|
|
||||||
"status_code": 422,
|
|
||||||
"content": json.dumps({"Error": {"Message": "Cannot analyze"}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
if layer["Name"] == self.error_layer_id:
|
|
||||||
return {
|
|
||||||
"status_code": 400,
|
|
||||||
"content": json.dumps({"Error": {"Message": "Some sort of error"}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
if layer["Name"] == self.unexpected_status_layer_id:
|
|
||||||
return {
|
|
||||||
"status_code": 600,
|
|
||||||
"content": json.dumps({"Error": {"Message": "Some sort of error"}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
parent_id = layer.get("ParentName", None)
|
|
||||||
parent_layer = None
|
|
||||||
|
|
||||||
if parent_id is not None:
|
|
||||||
parent_layer = self.layers.get(parent_id, None)
|
|
||||||
if parent_layer is None:
|
|
||||||
return {
|
|
||||||
"status_code": 400,
|
|
||||||
"content": json.dumps(
|
|
||||||
{"Error": {"Message": UNKNOWN_PARENT_LAYER_ERROR_MSG}}
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
self.add_layer(layer["Name"])
|
|
||||||
if parent_layer is not None:
|
|
||||||
self.layers[layer["Name"]]["ParentName"] = parent_id
|
|
||||||
|
|
||||||
# If vulnerabilities have already been registered with this layer, call set_vulns to make sure
|
|
||||||
# their data is added to the layer's data.
|
|
||||||
if self.layer_vulns.get(layer["Name"]):
|
|
||||||
self.set_vulns(layer["Name"], self.layer_vulns[layer["Name"]])
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status_code": 201,
|
|
||||||
"content": json.dumps(
|
|
||||||
{
|
|
||||||
"Layer": self.layers[layer["Name"]],
|
|
||||||
}
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
@urlmatch(netloc=r"(.*\.)?" + self.hostname, path=r"/v1/metrics$", method="GET")
|
|
||||||
def metrics(url, _):
|
|
||||||
return {
|
|
||||||
"status_code": 200,
|
|
||||||
"content": json.dumps({"fake": True}),
|
|
||||||
}
|
|
||||||
|
|
||||||
@all_requests
|
|
||||||
def response_content(url, _):
|
|
||||||
return {
|
|
||||||
"status_code": 500,
|
|
||||||
"content": json.dumps({"Error": {"Message": "Unknown endpoint %s" % url.path}}),
|
|
||||||
}
|
|
||||||
|
|
||||||
return [
|
|
||||||
get_layer_mock,
|
|
||||||
post_layer_mock,
|
|
||||||
remove_layer_mock,
|
|
||||||
metrics,
|
|
||||||
response_content,
|
|
||||||
]
|
|
@ -20,7 +20,6 @@ from util.security.registry_jwt import (
|
|||||||
|
|
||||||
|
|
||||||
DEFAULT_HTTP_HEADERS = {"Connection": "close"}
|
DEFAULT_HTTP_HEADERS = {"Connection": "close"}
|
||||||
MITM_CERT_PATH = "/conf/mitm.cert"
|
|
||||||
TOKEN_VALIDITY_LIFETIME_S = 60 * 60 # 1 hour
|
TOKEN_VALIDITY_LIFETIME_S = 60 * 60 # 1 hour
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
Reference in New Issue
Block a user