1
0
mirror of https://github.com/quay/quay.git synced 2026-01-27 18:42:52 +03:00
Files
quay/util/audit.py
Kurtis Mullins 38be6d05d0 Python 3 (#153)
* Convert all Python2 to Python3 syntax.

* Removes oauth2lib dependency

* Replace mockredis with fakeredis

* byte/str conversions

* Removes nonexisting __nonzero__ in Python3

* Python3 Dockerfile and related

* [PROJQUAY-98] Replace resumablehashlib with rehash

* PROJQUAY-123 - replace gpgme with python3-gpg

* [PROJQUAY-135] Fix unhashable class error

* Update external dependencies for Python 3

- Move github.com/app-registry/appr to github.com/quay/appr
- github.com/coderanger/supervisor-stdout
- github.com/DevTable/container-cloud-config
- Update to latest mockldap with changes applied from coreos/mockldap
- Update dependencies in requirements.txt and requirements-dev.txt

* Default FLOAT_REPR function to str in json encoder and removes keyword assignment

True, False, and str were not keywords in Python2...

* [PROJQUAY-165] Replace package `bencode` with `bencode.py`

- Bencode is not compatible with Python 3.x and is no longer
  maintained. Bencode.py appears to be a drop-in replacement/fork
  that is compatible with Python 3.

* Make sure monkey.patch is called before anything else (

* Removes anunidecode dependency and replaces it with text_unidecode

* Base64 encode/decode pickle dumps/loads when storing value in DB

Base64 encodes/decodes the serialized values when storing them in the
DB. Also make sure to return a Python3 string instead of a Bytes when
coercing for db, otherwise, Postgres' TEXT field will convert it into
a hex representation when storing the value.

* Implement __hash__ on Digest class

In Python 3, if a class defines __eq__() but not __hash__(), its
instances will not be usable as items in hashable collections (e.g sets).

* Remove basestring check

* Fix expected message in credentials tests

* Fix usage of Cryptography.Fernet for Python3 (#219)

- Specifically, this addresses the issue where Byte<->String
  conversions weren't being applied correctly.

* Fix utils

- tar+stream layer format utils
- filelike util

* Fix storage tests

* Fix endpoint tests

* Fix workers tests

* Fix docker's empty layer bytes

* Fix registry tests

* Appr

* Enable CI for Python 3.6

* Skip buildman tests

Skip buildman tests while it's being rewritten to allow ci to pass.

* Install swig for CI

* Update expected exception type in redis validation test

* Fix gpg signing calls

Fix gpg calls for updated gpg wrapper, and add signing tests.

* Convert / to // for Python3 integer division

* WIP: Update buildman to use asyncio instead of trollius.

This dependency is considered deprecated/abandoned and was only
used as an implementation/backport of asyncio on Python 2.x
This is a work in progress, and is included in the PR just to get the
rest of the tests passing. The builder is actually being rewritten.

* Target Python 3.8

* Removes unused files

- Removes unused files that were added accidentally while rebasing
- Small fixes/cleanup
- TODO tasks comments

* Add TODO to verify rehash backward compat with resumablehashlib

* Revert "[PROJQUAY-135] Fix unhashable class error" and implements __hash__ instead.

This reverts commit 735e38e3c1d072bf50ea864bc7e119a55d3a8976.
Instead, defines __hash__ for encryped fields class, using the parent
field's implementation.

* Remove some unused files ad imports

Co-authored-by: Kenny Lee Sin Cheong <kenny.lee@redhat.com>
Co-authored-by: Tom McKay <thomasmckay@redhat.com>
2020-06-05 16:50:13 -04:00

103 lines
3.4 KiB
Python

import logging
import random
from collections import namedtuple
from urllib.parse import urlparse
from flask import request
from app import analytics, userevents, ip_resolver
from auth.auth_context import get_authenticated_context, get_authenticated_user
from data.logs_model import logs_model
from util.request import get_request_ip
from data.readreplica import ReadOnlyModeException
logger = logging.getLogger(__name__)
Repository = namedtuple("Repository", ["namespace_name", "name", "id", "is_free_namespace"])
def wrap_repository(repo_obj):
return Repository(
namespace_name=repo_obj.namespace_user.username,
name=repo_obj.name,
id=repo_obj.id,
is_free_namespace=repo_obj.namespace_user.stripe_id is None,
)
def track_and_log(event_name, repo_obj, analytics_name=None, analytics_sample=1, **kwargs):
repo_name = repo_obj.name
namespace_name = repo_obj.namespace_name
metadata = {
"repo": repo_name,
"namespace": namespace_name,
"user-agent": request.user_agent.string,
}
metadata.update(kwargs)
is_free_namespace = False
if hasattr(repo_obj, "is_free_namespace"):
is_free_namespace = repo_obj.is_free_namespace
# Add auth context metadata.
analytics_id = "anonymous"
auth_context = get_authenticated_context()
if auth_context is not None:
analytics_id, context_metadata = auth_context.analytics_id_and_public_metadata()
metadata.update(context_metadata)
# Publish the user event (if applicable)
logger.debug("Checking publishing %s to the user events system", event_name)
if auth_context and auth_context.has_nonrobot_user:
logger.debug("Publishing %s to the user events system", event_name)
user_event_data = {
"action": event_name,
"repository": repo_name,
"namespace": namespace_name,
}
event = userevents.get_event(auth_context.authed_user.username)
event.publish_event_data("docker-cli", user_event_data)
# Save the action to mixpanel.
if random.random() < analytics_sample:
if analytics_name is None:
analytics_name = event_name
logger.debug("Logging the %s to analytics engine", analytics_name)
request_parsed = urlparse(request.url_root)
extra_params = {
"repository": "%s/%s" % (namespace_name, repo_name),
"user-agent": request.user_agent.string,
"hostname": request_parsed.hostname,
}
analytics.track(analytics_id, analytics_name, extra_params)
# Add the resolved information to the metadata.
logger.debug("Resolving IP address %s", get_request_ip())
resolved_ip = ip_resolver.resolve_ip(get_request_ip())
if resolved_ip is not None:
metadata["resolved_ip"] = resolved_ip._asdict()
logger.debug("Resolved IP address %s", get_request_ip())
# Log the action to the database.
logger.debug("Logging the %s to logs system", event_name)
try:
logs_model.log_action(
event_name,
namespace_name,
performer=get_authenticated_user(),
ip=get_request_ip(),
metadata=metadata,
repository=repo_obj,
is_free_namespace=is_free_namespace,
)
logger.debug("Track and log of %s complete", event_name)
except ReadOnlyModeException:
pass