mirror of
https://github.com/quay/quay.git
synced 2026-01-26 06:21:37 +03:00
feat: Allow action logs to be forwarded to Splunk (PROJQUAY-4993) (#1764)
Signed-off-by: harishsurf <hgovinda@redhat.com>
This commit is contained in:
committed by
GitHub
parent
c3fc3a82ab
commit
be1bddfd6a
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
|
||||
from data.logs_model.splunk_logs_model import SplunkLogsModel
|
||||
from data.logs_model.table_logs_model import TableLogsModel
|
||||
from data.logs_model.document_logs_model import DocumentLogsModel
|
||||
from data.logs_model.combined_model import CombinedLogsModel
|
||||
@@ -18,6 +19,7 @@ _LOG_MODELS = {
|
||||
"database": TableLogsModel,
|
||||
"transition_reads_both_writes_es": _transition_model,
|
||||
"elasticsearch": DocumentLogsModel,
|
||||
"splunk": SplunkLogsModel,
|
||||
}
|
||||
|
||||
_PULL_LOG_KINDS = {"pull_repo", "repo_verb"}
|
||||
@@ -43,7 +45,7 @@ logs_model = LogsModelProxy()
|
||||
|
||||
|
||||
def configure(app_config):
|
||||
logger.debug("Configuring log lodel")
|
||||
logger.debug("Configuring log model")
|
||||
model_name = app_config.get("LOGS_MODEL", "database")
|
||||
model_config = app_config.get("LOGS_MODEL_CONFIG", {})
|
||||
|
||||
|
||||
44
data/logs_model/logs_producer/splunk_logs_producer.py
Normal file
44
data/logs_model/logs_producer/splunk_logs_producer.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import logging
|
||||
|
||||
from splunklib import client
|
||||
|
||||
from data.logs_model.logs_producer import LogSendException
|
||||
from data.logs_model.logs_producer.interface import LogProducerInterface
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SplunkLogsProducer(LogProducerInterface):
|
||||
"""
|
||||
Log producer for writing log entries to Splunk
|
||||
This implementation writes directly to Splunk without a streaming/queueing service.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
host,
|
||||
port,
|
||||
bearer_token,
|
||||
url_scheme="https",
|
||||
verify_ssl=True,
|
||||
index_prefix=None,
|
||||
):
|
||||
try:
|
||||
service = client.connect(
|
||||
host=host, port=port, token=bearer_token, scheme=url_scheme, verify=verify_ssl
|
||||
)
|
||||
except Exception as ex:
|
||||
logger.exception("Failed to connect to Splunk instance %s", ex)
|
||||
raise ex
|
||||
try:
|
||||
self.index = service.indexes[index_prefix]
|
||||
logger.info("splunk index %s", self.index)
|
||||
except KeyError:
|
||||
self.index = service.indexes.create(index_prefix)
|
||||
|
||||
def send(self, log):
|
||||
try:
|
||||
self.index.submit(log, sourcetype="access_combined", host="quay")
|
||||
except Exception as e:
|
||||
logger.exception("SplunkLogsProducer exception sending log to Splunk: %s", e)
|
||||
raise LogSendException("SplunkLogsProducer exception sending log to Splunk: %s" % e)
|
||||
144
data/logs_model/splunk_logs_model.py
Normal file
144
data/logs_model/splunk_logs_model.py
Normal file
@@ -0,0 +1,144 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from data import model
|
||||
from data.logs_model.interface import ActionLogsDataInterface
|
||||
from data.logs_model.logs_producer import LogProducerProxy, LogSendException
|
||||
from data.logs_model.logs_producer.splunk_logs_producer import SplunkLogsProducer
|
||||
from data.logs_model.shared import SharedModel
|
||||
from data.model import config
|
||||
from data.model.log import ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SplunkLogsModel(SharedModel, ActionLogsDataInterface):
|
||||
"""
|
||||
SplunkLogsModel implements model for establishing connection and sending events to Splunk cluster
|
||||
"""
|
||||
|
||||
def __init__(self, producer, splunk_config, should_skip_logging=None):
|
||||
self._should_skip_logging = should_skip_logging
|
||||
self._logs_producer = LogProducerProxy()
|
||||
if producer == "splunk":
|
||||
self._logs_producer.initialize(SplunkLogsProducer(**splunk_config))
|
||||
else:
|
||||
raise Exception("Invalid log producer: %s" % producer)
|
||||
|
||||
def log_action(
|
||||
self,
|
||||
kind_name,
|
||||
namespace_name=None,
|
||||
performer=None,
|
||||
ip=None,
|
||||
metadata=None,
|
||||
repository=None,
|
||||
repository_name=None,
|
||||
timestamp=None,
|
||||
is_free_namespace=False,
|
||||
):
|
||||
|
||||
if self._should_skip_logging and self._should_skip_logging(
|
||||
kind_name, namespace_name, is_free_namespace
|
||||
):
|
||||
return
|
||||
|
||||
if repository_name is not None:
|
||||
if repository is None or namespace_name is not None:
|
||||
raise ValueError(
|
||||
"Incorrect argument provided when logging action logs, namespace name should not be "
|
||||
"empty"
|
||||
)
|
||||
repository = model.repository.get_repository(namespace_name, repository_name)
|
||||
|
||||
if timestamp is None:
|
||||
timestamp = datetime.today()
|
||||
|
||||
account_id = None
|
||||
performer_id = None
|
||||
repository_id = None
|
||||
|
||||
if namespace_name is not None:
|
||||
ns_user = model.user.get_namespace_user(namespace_name)
|
||||
if ns_user is not None:
|
||||
account_id = ns_user.id
|
||||
|
||||
if performer is not None:
|
||||
performer_id = performer.id
|
||||
|
||||
if repository is not None:
|
||||
repository_id = repository.id
|
||||
|
||||
kind_id = model.log._get_log_entry_kind(kind_name)
|
||||
|
||||
metadata_json = metadata or {}
|
||||
|
||||
log_data = {
|
||||
"kind": kind_id,
|
||||
"account": account_id,
|
||||
"performer": performer_id,
|
||||
"repository": repository_id,
|
||||
"ip": ip,
|
||||
"metadata_json": metadata_json or {},
|
||||
"datetime": timestamp,
|
||||
}
|
||||
|
||||
try:
|
||||
self._logs_producer.send(json.dumps(log_data, sort_keys=True, default=str))
|
||||
except LogSendException as lse:
|
||||
strict_logging_disabled = config.app_config.get("ALLOW_PULLS_WITHOUT_STRICT_LOGGING")
|
||||
logger.exception("log_action failed", extra=({"exception": lse}).update(log_data))
|
||||
if not (strict_logging_disabled and kind_name in ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING):
|
||||
raise
|
||||
|
||||
def lookup_logs(
|
||||
self,
|
||||
start_datetime,
|
||||
end_datetime,
|
||||
performer_name=None,
|
||||
repository_name=None,
|
||||
namespace_name=None,
|
||||
filter_kinds=None,
|
||||
page_token=None,
|
||||
max_page_count=None,
|
||||
):
|
||||
raise NotImplementedError("Method not implemented, Splunk does not support log lookups")
|
||||
|
||||
def lookup_latest_logs(
|
||||
self,
|
||||
performer_name=None,
|
||||
repository_name=None,
|
||||
namespace_name=None,
|
||||
filter_kinds=None,
|
||||
size=20,
|
||||
):
|
||||
raise NotImplementedError("Method not implemented, Splunk does not support log lookups")
|
||||
|
||||
def get_aggregated_log_counts(
|
||||
self,
|
||||
start_datetime,
|
||||
end_datetime,
|
||||
performer_name=None,
|
||||
repository_name=None,
|
||||
namespace_name=None,
|
||||
filter_kinds=None,
|
||||
):
|
||||
raise NotImplementedError("Method not implemented, Splunk does not support log lookups")
|
||||
|
||||
def count_repository_actions(self, repository, day):
|
||||
raise NotImplementedError("Method not implemented, Splunk does not support log lookups")
|
||||
|
||||
def yield_logs_for_export(
|
||||
self,
|
||||
start_datetime,
|
||||
end_datetime,
|
||||
repository_id=None,
|
||||
namespace_id=None,
|
||||
max_query_time=None,
|
||||
):
|
||||
raise NotImplementedError("Method not implemented, Splunk does not support log lookups")
|
||||
|
||||
def yield_log_rotation_context(self, cutoff_date, min_logs_per_rotation):
|
||||
raise NotImplementedError("Method not implemented, Splunk does not support log lookups")
|
||||
56
data/logs_model/test/test_splunk.py
Normal file
56
data/logs_model/test/test_splunk.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from dateutil.parser import parse
|
||||
from mock import patch, Mock
|
||||
|
||||
from .test_elasticsearch import logs_model, mock_db_model
|
||||
from data.logs_model import configure
|
||||
from test.fixtures import *
|
||||
from data.model.repository import create_repository
|
||||
|
||||
FAKE_SPLUNK_HOST = "fakesplunk"
|
||||
FAKE_SPLUNK_PORT = 443
|
||||
FAKE_SPLUNK_TOKEN = None
|
||||
FAKE_INDEX_PREFIX = "test_index_prefix"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def splunk_logs_model_config():
|
||||
conf = {
|
||||
"LOGS_MODEL": "splunk",
|
||||
"LOGS_MODEL_CONFIG": {
|
||||
"producer": "splunk",
|
||||
"splunk_config": {
|
||||
"host": FAKE_SPLUNK_HOST,
|
||||
"port": FAKE_SPLUNK_PORT,
|
||||
"bearer_token": FAKE_SPLUNK_TOKEN,
|
||||
"url_scheme": "https",
|
||||
"verify_ssl": True,
|
||||
"index_prefix": FAKE_INDEX_PREFIX,
|
||||
},
|
||||
},
|
||||
}
|
||||
return conf
|
||||
|
||||
|
||||
def test_splunk_logs_producers(logs_model, splunk_logs_model_config, mock_db_model, initialized_db):
|
||||
|
||||
producer_config = splunk_logs_model_config
|
||||
with patch(
|
||||
"data.logs_model.logs_producer.splunk_logs_producer.SplunkLogsProducer.send"
|
||||
) as mock_send, patch("splunklib.client.connect", MagicMock()):
|
||||
repo = create_repository("devtable", "somenewrepo", None, repo_kind="image")
|
||||
configure(producer_config)
|
||||
logs_model.log_action(
|
||||
"pull_repo",
|
||||
"devtable",
|
||||
Mock(id=1),
|
||||
"192.168.1.1",
|
||||
{"key": "value"},
|
||||
repo,
|
||||
None,
|
||||
parse("2019-01-01T03:30"),
|
||||
)
|
||||
|
||||
mock_send.assert_called_once()
|
||||
@@ -62,3 +62,11 @@ def handle_readonly(ex):
|
||||
)
|
||||
response.status_code = 503
|
||||
return response
|
||||
|
||||
|
||||
@app.errorhandler(NotImplementedError)
|
||||
def handle_not_implemented_error(ex):
|
||||
logger.exception(ex)
|
||||
response = jsonify({"message": str(ex)})
|
||||
response.status_code = 501
|
||||
return response
|
||||
|
||||
@@ -114,6 +114,7 @@ s3transfer==0.5.1
|
||||
semantic-version==2.8.4
|
||||
six==1.14.0
|
||||
soupsieve==1.9.5
|
||||
splunk-sdk==1.7.3
|
||||
SQLAlchemy==1.4.31
|
||||
stevedore==1.31.0
|
||||
stringscore==0.1.0
|
||||
|
||||
@@ -35,11 +35,12 @@
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div id="bar-chart" style="width: 800px; height: 500px;"
|
||||
<div class="empty-primary-msg" ng-if="splunkLogMsg"> {{ splunkLogMsg }}</div>
|
||||
<div id="bar-chart" style="width: 800px; height: 500px;"
|
||||
quay-show="chartVisible && Features.AGGREGATED_LOG_COUNT_RETRIEVAL">
|
||||
<svg style="width: 800px; height: 500px;"></svg>
|
||||
<svg style="width: 800px; height: 500px;"></svg>
|
||||
<div class="cor-loader" ng-if="chartLoading"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="hidden-xs side-controls">
|
||||
<div class="result-count">
|
||||
|
||||
@@ -35,6 +35,7 @@ angular.module('quay').directive('logsView', function () {
|
||||
|
||||
$scope.options = {};
|
||||
$scope.context = {};
|
||||
$scope.splunkLogMsg = null;
|
||||
|
||||
var datetime = new Date();
|
||||
$scope.options.logStartDate = new Date(datetime.getUTCFullYear(), datetime.getUTCMonth(), datetime.getUTCDate() - 7);
|
||||
@@ -532,6 +533,11 @@ angular.module('quay').directive('logsView', function () {
|
||||
$scope.chart.draw('bar-chart', resp.aggregated, $scope.options.logStartDate,
|
||||
$scope.options.logEndDate);
|
||||
$scope.chartLoading = false;
|
||||
}).catch(function (resp) {
|
||||
if (resp.status === 501) {
|
||||
$scope.chartLoading = false;
|
||||
$scope.splunkLogMsg = resp.data.message;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -574,6 +580,10 @@ angular.module('quay').directive('logsView', function () {
|
||||
$scope.loading = false;
|
||||
$scope.nextPageToken = resp.next_page;
|
||||
$scope.hasAdditional = !!resp.next_page;
|
||||
}).catch(function (resp) {
|
||||
if (resp.status === 501) {
|
||||
$scope.loading = false;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -1004,7 +1004,7 @@ CONFIG_SCHEMA = {
|
||||
"LOGS_MODEL": {
|
||||
"type": "string",
|
||||
"description": "Logs model for action logs",
|
||||
"enum": ["database", "transition_reads_both_writes_es", "elasticsearch"],
|
||||
"enum": ["database", "transition_reads_both_writes_es", "elasticsearch", "splunk"],
|
||||
"x-example": "database",
|
||||
},
|
||||
"LOGS_MODEL_CONFIG": {
|
||||
@@ -1014,8 +1014,8 @@ CONFIG_SCHEMA = {
|
||||
"properties": {
|
||||
"producer": {
|
||||
"type": "string",
|
||||
"description": "Logs producer if logging to Elasticsearch",
|
||||
"enum": ["kafka", "elasticsearch", "kinesis_stream"],
|
||||
"description": "Logs producer",
|
||||
"enum": ["kafka", "elasticsearch", "kinesis_stream", "splunk"],
|
||||
"x-example": "kafka",
|
||||
},
|
||||
"elasticsearch_config": {
|
||||
@@ -1133,6 +1133,49 @@ CONFIG_SCHEMA = {
|
||||
},
|
||||
},
|
||||
},
|
||||
"splunk_config": {
|
||||
"type": "object",
|
||||
"description": "Logs model config for splunk action logs/ splunk cluster configuration",
|
||||
"x-reference": "https://dev.splunk.com/enterprise/docs/devtools/python/sdk-python"
|
||||
"/howtousesplunkpython/howtogetdatapython#To-add-data-directly-to-an-index",
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string",
|
||||
"description": "Splunk cluster endpoint",
|
||||
"x-example": "host.splunk.example",
|
||||
},
|
||||
"port": {
|
||||
"type": "number",
|
||||
"description": "Splunk management cluster endpoint port",
|
||||
"x-example": 1234,
|
||||
},
|
||||
"bearer_token": {
|
||||
"type": "string",
|
||||
"description": "Bearer_Token for splunk.See: "
|
||||
"https://dev.splunk.com/enterprise/docs/devtools/python/sdk-python"
|
||||
"/howtousesplunkpython/howtoconnectpython/#Log-in-using-a-bearer-token",
|
||||
"x-example": "us-east-1",
|
||||
},
|
||||
"url_scheme": {
|
||||
"type": "string",
|
||||
"description": "The url scheme for accessing the splunk service. If Splunk is behind SSL"
|
||||
"*at all*, this *must* be `https`",
|
||||
"enum": ["http", "https"],
|
||||
"x-example": "https",
|
||||
},
|
||||
"verify_ssl": {
|
||||
"type": "boolean",
|
||||
"description": "Enable (True) or disable (False) SSL verification for https connections."
|
||||
"Defaults to True",
|
||||
"x-example": True,
|
||||
},
|
||||
"index_prefix": {
|
||||
"type": "string",
|
||||
"description": "Splunk's index prefix",
|
||||
"x-example": "splunk_logentry_",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
# Feature Flag: Blacklist Email Domains
|
||||
|
||||
Reference in New Issue
Block a user