1
0
mirror of https://github.com/quay/quay.git synced 2026-01-26 06:21:37 +03:00
Files
quay/workers/blobuploadcleanupworker/models_interface.py
Joseph Schorr 424c1a19d7 Have the BlobUpload cleanup worker run on a single instance only (#239)
Instead of running simultaneously across multiple nodes, we change the
worker to run only from a single instance. This is better for the DB and
the previous behavior was not necessary given the size of the BlobUpload
table.

Fixes https://issues.redhat.com/browse/PROJQUAY-365
2020-02-27 13:16:40 -05:00

51 lines
1.2 KiB
Python

from abc import ABCMeta, abstractmethod
from collections import namedtuple
from six import add_metaclass
class BlobUpload(
namedtuple("BlobUpload", ["uuid", "storage_metadata", "location_name", "created"])
):
"""
BlobUpload represents a single upload of a blob in progress or previously started.
"""
@add_metaclass(ABCMeta)
class BlobUploadCleanupWorkerDataInterface(object):
"""
Interface that represents all data store interactions required by the blob upload cleanup
worker.
"""
@abstractmethod
def get_stale_blob_upload(self, stale_threshold):
"""
Returns a BlobUpload that was created on or before the current date/time minus the stale
threshold.
If none, returns None.
"""
pass
@abstractmethod
def delete_blob_upload(self, blob_upload):
"""
Deletes a blob upload from the database.
"""
pass
@abstractmethod
def create_stale_upload_for_testing(self):
"""
Creates a new stale blob upload for testing.
"""
pass
@abstractmethod
def blob_upload_exists(self, upload_uuid):
"""
Returns True if a blob upload with the given UUID exists.
"""
pass