mirror of
https://github.com/quay/quay.git
synced 2026-01-26 06:21:37 +03:00
* Convert all Python2 to Python3 syntax. * Removes oauth2lib dependency * Replace mockredis with fakeredis * byte/str conversions * Removes nonexisting __nonzero__ in Python3 * Python3 Dockerfile and related * [PROJQUAY-98] Replace resumablehashlib with rehash * PROJQUAY-123 - replace gpgme with python3-gpg * [PROJQUAY-135] Fix unhashable class error * Update external dependencies for Python 3 - Move github.com/app-registry/appr to github.com/quay/appr - github.com/coderanger/supervisor-stdout - github.com/DevTable/container-cloud-config - Update to latest mockldap with changes applied from coreos/mockldap - Update dependencies in requirements.txt and requirements-dev.txt * Default FLOAT_REPR function to str in json encoder and removes keyword assignment True, False, and str were not keywords in Python2... * [PROJQUAY-165] Replace package `bencode` with `bencode.py` - Bencode is not compatible with Python 3.x and is no longer maintained. Bencode.py appears to be a drop-in replacement/fork that is compatible with Python 3. * Make sure monkey.patch is called before anything else ( * Removes anunidecode dependency and replaces it with text_unidecode * Base64 encode/decode pickle dumps/loads when storing value in DB Base64 encodes/decodes the serialized values when storing them in the DB. Also make sure to return a Python3 string instead of a Bytes when coercing for db, otherwise, Postgres' TEXT field will convert it into a hex representation when storing the value. * Implement __hash__ on Digest class In Python 3, if a class defines __eq__() but not __hash__(), its instances will not be usable as items in hashable collections (e.g sets). * Remove basestring check * Fix expected message in credentials tests * Fix usage of Cryptography.Fernet for Python3 (#219) - Specifically, this addresses the issue where Byte<->String conversions weren't being applied correctly. * Fix utils - tar+stream layer format utils - filelike util * Fix storage tests * Fix endpoint tests * Fix workers tests * Fix docker's empty layer bytes * Fix registry tests * Appr * Enable CI for Python 3.6 * Skip buildman tests Skip buildman tests while it's being rewritten to allow ci to pass. * Install swig for CI * Update expected exception type in redis validation test * Fix gpg signing calls Fix gpg calls for updated gpg wrapper, and add signing tests. * Convert / to // for Python3 integer division * WIP: Update buildman to use asyncio instead of trollius. This dependency is considered deprecated/abandoned and was only used as an implementation/backport of asyncio on Python 2.x This is a work in progress, and is included in the PR just to get the rest of the tests passing. The builder is actually being rewritten. * Target Python 3.8 * Removes unused files - Removes unused files that were added accidentally while rebasing - Small fixes/cleanup - TODO tasks comments * Add TODO to verify rehash backward compat with resumablehashlib * Revert "[PROJQUAY-135] Fix unhashable class error" and implements __hash__ instead. This reverts commit 735e38e3c1d072bf50ea864bc7e119a55d3a8976. Instead, defines __hash__ for encryped fields class, using the parent field's implementation. * Remove some unused files ad imports Co-authored-by: Kenny Lee Sin Cheong <kenny.lee@redhat.com> Co-authored-by: Tom McKay <thomasmckay@redhat.com>
186 lines
7.1 KiB
Python
186 lines
7.1 KiB
Python
import hashlib
|
|
|
|
import pytest
|
|
|
|
from data import model, database
|
|
from storage.basestorage import StoragePaths
|
|
from storage.fakestorage import FakeStorage
|
|
from storage.distributedstorage import DistributedStorage
|
|
from workers.storagereplication import (
|
|
StorageReplicationWorker,
|
|
JobException,
|
|
WorkerUnhealthyException,
|
|
)
|
|
|
|
from test.fixtures import *
|
|
|
|
|
|
@pytest.fixture()
|
|
def storage_user(app):
|
|
user = model.user.get_user("devtable")
|
|
database.UserRegion.create(
|
|
user=user, location=database.ImageStorageLocation.get(name="local_us")
|
|
)
|
|
database.UserRegion.create(
|
|
user=user, location=database.ImageStorageLocation.get(name="local_eu")
|
|
)
|
|
return user
|
|
|
|
|
|
@pytest.fixture()
|
|
def storage_paths():
|
|
return StoragePaths()
|
|
|
|
|
|
@pytest.fixture()
|
|
def replication_worker():
|
|
return StorageReplicationWorker(None)
|
|
|
|
|
|
@pytest.fixture()
|
|
def storage():
|
|
return DistributedStorage(
|
|
{"local_us": FakeStorage("local"), "local_eu": FakeStorage("local")}, ["local_us"]
|
|
)
|
|
|
|
|
|
def test_storage_replication_v1(storage_user, storage_paths, replication_worker, storage, app):
|
|
# Add a storage entry with a V1 path.
|
|
v1_storage = model.storage.create_v1_storage("local_us")
|
|
content_path = storage_paths.v1_image_layer_path(v1_storage.uuid)
|
|
storage.put_content(["local_us"], content_path, b"some content")
|
|
|
|
# Call replicate on it and verify it replicates.
|
|
replication_worker.replicate_storage(storage_user, v1_storage.uuid, storage)
|
|
|
|
# Ensure that the data was replicated to the other "region".
|
|
assert storage.get_content(["local_eu"], content_path) == b"some content"
|
|
|
|
locations = model.storage.get_storage_locations(v1_storage.uuid)
|
|
assert len(locations) == 2
|
|
|
|
|
|
def test_storage_replication_cas(storage_user, storage_paths, replication_worker, storage, app):
|
|
# Add a storage entry with a CAS path.
|
|
content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest()
|
|
cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum)
|
|
|
|
location = database.ImageStorageLocation.get(name="local_us")
|
|
database.ImageStoragePlacement.create(storage=cas_storage, location=location)
|
|
|
|
content_path = storage_paths.blob_path(cas_storage.content_checksum)
|
|
storage.put_content(["local_us"], content_path, b"some content")
|
|
|
|
# Call replicate on it and verify it replicates.
|
|
replication_worker.replicate_storage(storage_user, cas_storage.uuid, storage)
|
|
|
|
# Ensure that the data was replicated to the other "region".
|
|
assert storage.get_content(["local_eu"], content_path) == b"some content"
|
|
|
|
locations = model.storage.get_storage_locations(cas_storage.uuid)
|
|
assert len(locations) == 2
|
|
|
|
|
|
def test_storage_replication_missing_base(
|
|
storage_user, storage_paths, replication_worker, storage, app
|
|
):
|
|
# Add a storage entry with a CAS path.
|
|
content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest()
|
|
cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum)
|
|
|
|
location = database.ImageStorageLocation.get(name="local_us")
|
|
database.ImageStoragePlacement.create(storage=cas_storage, location=location)
|
|
|
|
# Attempt to replicate storage. This should fail because the layer is missing from the base
|
|
# storage.
|
|
with pytest.raises(JobException):
|
|
replication_worker.replicate_storage(
|
|
storage_user, cas_storage.uuid, storage, backoff_check=False
|
|
)
|
|
|
|
# Ensure the storage location count remains 1. This is technically inaccurate, but that's okay
|
|
# as we still require at least one location per storage.
|
|
locations = model.storage.get_storage_locations(cas_storage.uuid)
|
|
assert len(locations) == 1
|
|
|
|
|
|
def test_storage_replication_copy_error(
|
|
storage_user, storage_paths, replication_worker, storage, app
|
|
):
|
|
# Add a storage entry with a CAS path.
|
|
content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest()
|
|
cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum)
|
|
|
|
location = database.ImageStorageLocation.get(name="local_us")
|
|
database.ImageStoragePlacement.create(storage=cas_storage, location=location)
|
|
|
|
content_path = storage_paths.blob_path(cas_storage.content_checksum)
|
|
storage.put_content(["local_us"], content_path, b"some content")
|
|
|
|
# Tell storage to break copying.
|
|
storage.put_content(["local_us"], "break_copying", b"true")
|
|
|
|
# Attempt to replicate storage. This should fail because the write fails.
|
|
with pytest.raises(JobException):
|
|
replication_worker.replicate_storage(
|
|
storage_user, cas_storage.uuid, storage, backoff_check=False
|
|
)
|
|
|
|
# Ensure the storage location count remains 1.
|
|
locations = model.storage.get_storage_locations(cas_storage.uuid)
|
|
assert len(locations) == 1
|
|
|
|
|
|
def test_storage_replication_copy_didnot_copy(
|
|
storage_user, storage_paths, replication_worker, storage, app
|
|
):
|
|
# Add a storage entry with a CAS path.
|
|
content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest()
|
|
cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum)
|
|
|
|
location = database.ImageStorageLocation.get(name="local_us")
|
|
database.ImageStoragePlacement.create(storage=cas_storage, location=location)
|
|
|
|
content_path = storage_paths.blob_path(cas_storage.content_checksum)
|
|
storage.put_content(["local_us"], content_path, b"some content")
|
|
|
|
# Tell storage to fake copying (i.e. not actually copy the data).
|
|
storage.put_content(["local_us"], "fake_copying", b"true")
|
|
|
|
# Attempt to replicate storage. This should fail because the copy doesn't actually do the copy.
|
|
with pytest.raises(JobException):
|
|
replication_worker.replicate_storage(
|
|
storage_user, cas_storage.uuid, storage, backoff_check=False
|
|
)
|
|
|
|
# Ensure the storage location count remains 1.
|
|
locations = model.storage.get_storage_locations(cas_storage.uuid)
|
|
assert len(locations) == 1
|
|
|
|
|
|
def test_storage_replication_copy_unhandled_exception(
|
|
storage_user, storage_paths, replication_worker, storage, app
|
|
):
|
|
# Add a storage entry with a CAS path.
|
|
content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest()
|
|
cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum)
|
|
|
|
location = database.ImageStorageLocation.get(name="local_us")
|
|
database.ImageStoragePlacement.create(storage=cas_storage, location=location)
|
|
|
|
content_path = storage_paths.blob_path(cas_storage.content_checksum)
|
|
storage.put_content(["local_us"], content_path, b"some content")
|
|
|
|
# Tell storage to raise an exception when copying.
|
|
storage.put_content(["local_us"], "except_copying", b"true")
|
|
|
|
# Attempt to replicate storage. This should fail because the copy raises an unhandled exception.
|
|
with pytest.raises(WorkerUnhealthyException):
|
|
replication_worker.replicate_storage(
|
|
storage_user, cas_storage.uuid, storage, backoff_check=False
|
|
)
|
|
|
|
# Ensure the storage location count remains 1.
|
|
locations = model.storage.get_storage_locations(cas_storage.uuid)
|
|
assert len(locations) == 1
|