mirror of
https://github.com/quay/quay.git
synced 2025-07-28 20:22:05 +03:00
chore: remove deprecated appr code (PROJQUAY-4992) (#1718)
This commit is contained in:
committed by
GitHub
parent
cb590f9a63
commit
6e8e2d2fe7
2
.github/workflows/oci-distribution-spec.yaml
vendored
2
.github/workflows/oci-distribution-spec.yaml
vendored
@ -96,8 +96,6 @@ jobs:
|
||||
# OCI Conformance tests don't expect tags to be cached.
|
||||
# If we implement cache invalidation, we can enable it back.
|
||||
active_repo_tags_cache_ttl: 0s
|
||||
appr_applications_list_cache_ttl: 3600s
|
||||
appr_show_package_cache_ttl: 3600s
|
||||
EOF
|
||||
|
||||
# Run the Quay container. See also:
|
||||
|
@ -101,7 +101,6 @@ def generate_server_config(config):
|
||||
tuf_host = config.get("TUF_HOST", None)
|
||||
signing_enabled = config.get("FEATURE_SIGNING", False)
|
||||
maximum_layer_size = config.get("MAXIMUM_LAYER_SIZE", "20G")
|
||||
maximum_cnr_layer_size = config.get("MAXIMUM_CNR_LAYER_SIZE", "1M")
|
||||
enable_rate_limits = config.get("FEATURE_RATE_LIMITS", False)
|
||||
|
||||
write_config(
|
||||
@ -110,7 +109,6 @@ def generate_server_config(config):
|
||||
tuf_host=tuf_host,
|
||||
signing_enabled=signing_enabled,
|
||||
maximum_layer_size=maximum_layer_size,
|
||||
maximum_cnr_layer_size=maximum_cnr_layer_size,
|
||||
enable_rate_limits=enable_rate_limits,
|
||||
static_dir=STATIC_DIR,
|
||||
)
|
||||
|
@ -126,22 +126,6 @@ location ~ ^/v2/(.+)/_trust/tuf/(.*)$ {
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
location /cnr {
|
||||
proxy_buffering off;
|
||||
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_pass http://registry_app_server;
|
||||
proxy_read_timeout 120;
|
||||
proxy_temp_path /tmp 1 2;
|
||||
|
||||
client_max_body_size {{ maximum_cnr_layer_size }};
|
||||
|
||||
{% if enable_rate_limits %}
|
||||
limit_req zone=staticauth burst=5 nodelay;
|
||||
{% endif %}
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://web_app_server;
|
||||
|
||||
|
20
config.py
20
config.py
@ -384,12 +384,6 @@ class DefaultConfig(ImmutableConfig):
|
||||
# Feature Flag: Whether to support signing
|
||||
FEATURE_SIGNING = False
|
||||
|
||||
# Feature Flag: Whether to enable support for App repositories.
|
||||
FEATURE_APP_REGISTRY = False
|
||||
|
||||
# Feature Flag: Whether app registry is in a read-only mode.
|
||||
FEATURE_READONLY_APP_REGISTRY = False
|
||||
|
||||
# Feature Flag: If set to true, the _catalog endpoint returns public repositories. Otherwise,
|
||||
# only private repositories can be returned.
|
||||
FEATURE_PUBLIC_CATALOG = False
|
||||
@ -672,8 +666,6 @@ class DefaultConfig(ImmutableConfig):
|
||||
"catalog_page_cache_ttl": "60s",
|
||||
"namespace_geo_restrictions_cache_ttl": "240s",
|
||||
"active_repo_tags_cache_ttl": "120s",
|
||||
"appr_applications_list_cache_ttl": "3600s",
|
||||
"appr_show_package_cache_ttl": "3600s",
|
||||
}
|
||||
|
||||
# Defines the number of successive failures of a build trigger's build before the trigger is
|
||||
@ -745,18 +737,6 @@ class DefaultConfig(ImmutableConfig):
|
||||
# The timeout after which a fresh login check is required for sensitive operations.
|
||||
FRESH_LOGIN_TIMEOUT = "10m"
|
||||
|
||||
# The limit on the number of results returned by app registry listing operations.
|
||||
APP_REGISTRY_RESULTS_LIMIT = 100
|
||||
|
||||
# The whitelist of namespaces whose app registry package list is cached for 1 hour.
|
||||
APP_REGISTRY_PACKAGE_LIST_CACHE_WHITELIST: Optional[List[str]] = []
|
||||
|
||||
# The whitelist of namespaces whose app registry show package is cached for 1 hour.
|
||||
APP_REGISTRY_SHOW_PACKAGE_CACHE_WHITELIST: Optional[List[str]] = []
|
||||
|
||||
# The maximum size of uploaded CNR layers.
|
||||
MAXIMUM_CNR_LAYER_SIZE = "2m"
|
||||
|
||||
# Feature Flag: Whether to clear expired RepositoryActionCount entries.
|
||||
FEATURE_CLEAR_EXPIRED_RAC_ENTRIES = False
|
||||
|
||||
|
@ -1,9 +0,0 @@
|
||||
from data.appr_model import (
|
||||
blob,
|
||||
channel,
|
||||
manifest,
|
||||
manifest_list,
|
||||
package,
|
||||
release,
|
||||
tag,
|
||||
)
|
@ -1,86 +0,0 @@
|
||||
import logging
|
||||
|
||||
from peewee import IntegrityError
|
||||
|
||||
from data.model import db_transaction
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _ensure_sha256_header(digest):
|
||||
if digest.startswith("sha256:"):
|
||||
return digest
|
||||
return "sha256:" + digest
|
||||
|
||||
|
||||
def get_blob(digest, models_ref):
|
||||
"""
|
||||
Find a blob by its digest.
|
||||
"""
|
||||
Blob = models_ref.Blob
|
||||
return Blob.select().where(Blob.digest == _ensure_sha256_header(digest)).get()
|
||||
|
||||
|
||||
def get_or_create_blob(digest, size, media_type_name, locations, models_ref):
|
||||
"""
|
||||
Try to find a blob by its digest or create it.
|
||||
"""
|
||||
Blob = models_ref.Blob
|
||||
BlobPlacement = models_ref.BlobPlacement
|
||||
|
||||
# Get or create the blog entry for the digest.
|
||||
try:
|
||||
blob = get_blob(digest, models_ref)
|
||||
logger.debug("Retrieved blob with digest %s", digest)
|
||||
except Blob.DoesNotExist:
|
||||
blob = Blob.create(
|
||||
digest=_ensure_sha256_header(digest),
|
||||
media_type_id=Blob.media_type.get_id(media_type_name),
|
||||
size=size,
|
||||
)
|
||||
logger.debug("Created blob with digest %s", digest)
|
||||
|
||||
# Add the locations to the blob.
|
||||
for location_name in locations:
|
||||
location_id = BlobPlacement.location.get_id(location_name)
|
||||
try:
|
||||
BlobPlacement.create(blob=blob, location=location_id)
|
||||
except IntegrityError:
|
||||
logger.debug("Location %s already existing for blob %s", location_name, blob.id)
|
||||
|
||||
return blob
|
||||
|
||||
|
||||
def get_blob_locations(digest, models_ref):
|
||||
"""
|
||||
Find all locations names for a blob.
|
||||
"""
|
||||
Blob = models_ref.Blob
|
||||
BlobPlacement = models_ref.BlobPlacement
|
||||
BlobPlacementLocation = models_ref.BlobPlacementLocation
|
||||
|
||||
return [
|
||||
x.name
|
||||
for x in BlobPlacementLocation.select()
|
||||
.join(BlobPlacement)
|
||||
.join(Blob)
|
||||
.where(Blob.digest == _ensure_sha256_header(digest))
|
||||
]
|
||||
|
||||
|
||||
def ensure_blob_locations(models_ref, *names):
|
||||
BlobPlacementLocation = models_ref.BlobPlacementLocation
|
||||
|
||||
with db_transaction():
|
||||
locations = BlobPlacementLocation.select().where(BlobPlacementLocation.name << names)
|
||||
|
||||
insert_names = list(names)
|
||||
|
||||
for location in locations:
|
||||
insert_names.remove(location.name)
|
||||
|
||||
if not insert_names:
|
||||
return
|
||||
|
||||
data = [{"name": name} for name in insert_names]
|
||||
BlobPlacementLocation.insert_many(data).execute()
|
@ -1,81 +0,0 @@
|
||||
from data.appr_model import tag as tag_model
|
||||
|
||||
|
||||
def get_channel_releases(repo, channel, models_ref):
|
||||
"""
|
||||
Return all previously linked tags.
|
||||
|
||||
This works based upon Tag lifetimes.
|
||||
"""
|
||||
Channel = models_ref.Channel
|
||||
Tag = models_ref.Tag
|
||||
|
||||
tag_kind_id = Channel.tag_kind.get_id("channel")
|
||||
channel_name = channel.name
|
||||
return (
|
||||
Tag.select(Tag, Channel)
|
||||
.join(Channel, on=(Tag.id == Channel.linked_tag))
|
||||
.where(
|
||||
Channel.repository == repo,
|
||||
Channel.name == channel_name,
|
||||
Channel.tag_kind == tag_kind_id,
|
||||
Channel.lifetime_end.is_null(False),
|
||||
)
|
||||
.order_by(Tag.lifetime_end)
|
||||
)
|
||||
|
||||
|
||||
def get_channel(repo, channel_name, models_ref):
|
||||
"""
|
||||
Find a Channel by name.
|
||||
"""
|
||||
channel = tag_model.get_tag(repo, channel_name, models_ref, "channel")
|
||||
return channel
|
||||
|
||||
|
||||
def get_tag_channels(repo, tag_name, models_ref, active=True):
|
||||
"""
|
||||
Find the Channels associated with a Tag.
|
||||
"""
|
||||
Tag = models_ref.Tag
|
||||
|
||||
tag = tag_model.get_tag(repo, tag_name, models_ref, "release")
|
||||
query = tag.tag_parents
|
||||
|
||||
if active:
|
||||
query = tag_model.tag_is_alive(query, Tag)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def delete_channel(repo, channel_name, models_ref):
|
||||
"""
|
||||
Delete a channel by name.
|
||||
"""
|
||||
return tag_model.delete_tag(repo, channel_name, models_ref, "channel")
|
||||
|
||||
|
||||
def create_or_update_channel(repo, channel_name, tag_name, models_ref):
|
||||
"""
|
||||
Creates or updates a channel to include a particular tag.
|
||||
"""
|
||||
tag = tag_model.get_tag(repo, tag_name, models_ref, "release")
|
||||
return tag_model.create_or_update_tag(
|
||||
repo, channel_name, models_ref, linked_tag=tag, tag_kind="channel"
|
||||
)
|
||||
|
||||
|
||||
def get_repo_channels(repo, models_ref):
|
||||
"""
|
||||
Creates or updates a channel to include a particular tag.
|
||||
"""
|
||||
Channel = models_ref.Channel
|
||||
Tag = models_ref.Tag
|
||||
|
||||
tag_kind_id = Channel.tag_kind.get_id("channel")
|
||||
query = (
|
||||
Channel.select(Channel, Tag)
|
||||
.join(Tag, on=(Tag.id == Channel.linked_tag))
|
||||
.where(Channel.repository == repo, Channel.tag_kind == tag_kind_id)
|
||||
)
|
||||
return tag_model.tag_is_alive(query, Channel)
|
@ -1,75 +0,0 @@
|
||||
import logging
|
||||
import hashlib
|
||||
import json
|
||||
|
||||
from cnr.models.package_base import get_media_type
|
||||
|
||||
from data.database import db_transaction, MediaType
|
||||
from data.appr_model import tag as tag_model
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _ensure_sha256_header(digest):
|
||||
if digest.startswith("sha256:"):
|
||||
return digest
|
||||
return "sha256:" + digest
|
||||
|
||||
|
||||
def _digest(manifestjson):
|
||||
return _ensure_sha256_header(
|
||||
hashlib.sha256(json.dumps(manifestjson, sort_keys=True).encode("utf-8")).hexdigest()
|
||||
)
|
||||
|
||||
|
||||
def get_manifest_query(digest, media_type, models_ref):
|
||||
Manifest = models_ref.Manifest
|
||||
return Manifest.select().where(
|
||||
Manifest.digest == _ensure_sha256_header(digest),
|
||||
Manifest.media_type == Manifest.media_type.get_id(media_type),
|
||||
)
|
||||
|
||||
|
||||
def get_manifest_with_blob(digest, media_type, models_ref):
|
||||
Blob = models_ref.Blob
|
||||
query = get_manifest_query(digest, media_type, models_ref)
|
||||
return query.join(Blob).get()
|
||||
|
||||
|
||||
def get_or_create_manifest(manifest_json, media_type_name, models_ref):
|
||||
Manifest = models_ref.Manifest
|
||||
digest = _digest(manifest_json)
|
||||
try:
|
||||
manifest = get_manifest_query(digest, media_type_name, models_ref).get()
|
||||
except Manifest.DoesNotExist:
|
||||
with db_transaction():
|
||||
manifest = Manifest.create(
|
||||
digest=digest,
|
||||
manifest_json=manifest_json,
|
||||
media_type=Manifest.media_type.get_id(media_type_name),
|
||||
)
|
||||
return manifest
|
||||
|
||||
|
||||
def get_manifest_types(repo, models_ref, release=None):
|
||||
"""
|
||||
Returns an array of MediaTypes.name for a repo, can filter by tag.
|
||||
"""
|
||||
Tag = models_ref.Tag
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
|
||||
query = tag_model.tag_is_alive(
|
||||
Tag.select(MediaType.name)
|
||||
.join(ManifestListManifest, on=(ManifestListManifest.manifest_list == Tag.manifest_list))
|
||||
.join(MediaType, on=(ManifestListManifest.media_type == MediaType.id))
|
||||
.where(Tag.repository == repo, Tag.tag_kind == Tag.tag_kind.get_id("release")),
|
||||
Tag,
|
||||
)
|
||||
if release:
|
||||
query = query.where(Tag.name == release)
|
||||
|
||||
manifests = set()
|
||||
for m in query.distinct().tuples():
|
||||
manifests.add(get_media_type(m[0]))
|
||||
return manifests
|
@ -1,80 +0,0 @@
|
||||
import logging
|
||||
import hashlib
|
||||
import json
|
||||
|
||||
from data.database import db_transaction
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _ensure_sha256_header(digest):
|
||||
if digest.startswith("sha256:"):
|
||||
return digest
|
||||
return "sha256:" + digest
|
||||
|
||||
|
||||
def _digest(manifestjson):
|
||||
return _ensure_sha256_header(
|
||||
hashlib.sha256(json.dumps(manifestjson, sort_keys=True).encode("utf-8")).hexdigest()
|
||||
)
|
||||
|
||||
|
||||
def get_manifest_list(digest, models_ref):
|
||||
ManifestList = models_ref.ManifestList
|
||||
return ManifestList.select().where(ManifestList.digest == _ensure_sha256_header(digest)).get()
|
||||
|
||||
|
||||
def get_or_create_manifest_list(manifest_list_json, media_type_name, schema_version, models_ref):
|
||||
ManifestList = models_ref.ManifestList
|
||||
|
||||
digest = _digest(manifest_list_json)
|
||||
media_type_id = ManifestList.media_type.get_id(media_type_name)
|
||||
|
||||
try:
|
||||
return get_manifest_list(digest, models_ref)
|
||||
except ManifestList.DoesNotExist:
|
||||
with db_transaction():
|
||||
manifestlist = ManifestList.create(
|
||||
digest=digest,
|
||||
manifest_list_json=manifest_list_json,
|
||||
schema_version=schema_version,
|
||||
media_type=media_type_id,
|
||||
)
|
||||
return manifestlist
|
||||
|
||||
|
||||
def create_manifestlistmanifest(manifestlist, manifest_ids, manifest_list_json, models_ref):
|
||||
"""
|
||||
From a manifestlist, manifests, and the manifest list blob, create if doesn't exist the
|
||||
manfiestlistmanifest for each manifest.
|
||||
"""
|
||||
for pos in range(len(manifest_ids)):
|
||||
manifest_id = manifest_ids[pos]
|
||||
manifest_json = manifest_list_json[pos]
|
||||
get_or_create_manifestlistmanifest(
|
||||
manifest=manifest_id,
|
||||
manifestlist=manifestlist,
|
||||
media_type_name=manifest_json["mediaType"],
|
||||
models_ref=models_ref,
|
||||
)
|
||||
|
||||
|
||||
def get_or_create_manifestlistmanifest(manifest, manifestlist, media_type_name, models_ref):
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
|
||||
media_type_id = ManifestListManifest.media_type.get_id(media_type_name)
|
||||
try:
|
||||
ml = (
|
||||
ManifestListManifest.select().where(
|
||||
ManifestListManifest.manifest == manifest,
|
||||
ManifestListManifest.media_type == media_type_id,
|
||||
ManifestListManifest.manifest_list == manifestlist,
|
||||
)
|
||||
).get()
|
||||
|
||||
except ManifestListManifest.DoesNotExist:
|
||||
ml = ManifestListManifest.create(
|
||||
manifest_list=manifestlist, media_type=media_type_id, manifest=manifest
|
||||
)
|
||||
return ml
|
@ -1,47 +0,0 @@
|
||||
from collections import namedtuple
|
||||
|
||||
from data.database import (
|
||||
ApprTag,
|
||||
ApprTagKind,
|
||||
ApprBlobPlacementLocation,
|
||||
ApprManifestList,
|
||||
ApprManifestBlob,
|
||||
ApprBlob,
|
||||
ApprManifestListManifest,
|
||||
ApprManifest,
|
||||
ApprBlobPlacement,
|
||||
ApprChannel,
|
||||
)
|
||||
|
||||
ModelsRef = namedtuple(
|
||||
"ModelsRef",
|
||||
[
|
||||
"Tag",
|
||||
"TagKind",
|
||||
"BlobPlacementLocation",
|
||||
"ManifestList",
|
||||
"ManifestBlob",
|
||||
"Blob",
|
||||
"ManifestListManifest",
|
||||
"Manifest",
|
||||
"BlobPlacement",
|
||||
"Channel",
|
||||
"manifestlistmanifest_set_name",
|
||||
"tag_set_prefetch_name",
|
||||
],
|
||||
)
|
||||
|
||||
NEW_MODELS = ModelsRef(
|
||||
ApprTag,
|
||||
ApprTagKind,
|
||||
ApprBlobPlacementLocation,
|
||||
ApprManifestList,
|
||||
ApprManifestBlob,
|
||||
ApprBlob,
|
||||
ApprManifestListManifest,
|
||||
ApprManifest,
|
||||
ApprBlobPlacement,
|
||||
ApprChannel,
|
||||
"apprmanifestlistmanifest_set",
|
||||
"apprtag_set",
|
||||
)
|
@ -1,82 +0,0 @@
|
||||
from cnr.models.package_base import get_media_type, manifest_media_type
|
||||
from peewee import prefetch
|
||||
|
||||
|
||||
from data import model
|
||||
from data.database import Repository, Namespace, RepositoryState
|
||||
from data.appr_model import tag as tag_model
|
||||
|
||||
|
||||
def list_packages_query(
|
||||
models_ref,
|
||||
namespace=None,
|
||||
media_type=None,
|
||||
search_query=None,
|
||||
username=None,
|
||||
limit=50,
|
||||
):
|
||||
"""
|
||||
List and filter repository by search query.
|
||||
"""
|
||||
Tag = models_ref.Tag
|
||||
|
||||
if username and not search_query:
|
||||
repositories = model.repository.get_visible_repositories(
|
||||
username,
|
||||
kind_filter="application",
|
||||
include_public=True,
|
||||
namespace=namespace,
|
||||
limit=limit,
|
||||
)
|
||||
if not repositories:
|
||||
return []
|
||||
|
||||
repo_query = (
|
||||
Repository.select(Repository, Namespace.username)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.id << [repo.rid for repo in repositories])
|
||||
)
|
||||
|
||||
if namespace:
|
||||
repo_query = repo_query.where(Namespace.username == namespace)
|
||||
else:
|
||||
if search_query is not None:
|
||||
fields = [model.repository.SEARCH_FIELDS.name.name]
|
||||
repositories = model.repository.get_app_search(
|
||||
search_query, username=username, search_fields=fields, limit=limit
|
||||
)
|
||||
if not repositories:
|
||||
return []
|
||||
|
||||
repo_query = (
|
||||
Repository.select(Repository, Namespace.username)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Repository.id << [repo.id for repo in repositories])
|
||||
)
|
||||
else:
|
||||
repo_query = (
|
||||
Repository.select(Repository, Namespace.username)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(
|
||||
Repository.visibility == model.repository.get_public_repo_visibility(),
|
||||
Repository.kind == Repository.kind.get_id("application"),
|
||||
)
|
||||
)
|
||||
|
||||
if namespace:
|
||||
repo_query = repo_query.where(Namespace.username == namespace)
|
||||
|
||||
repo_query = repo_query.where(Repository.state != RepositoryState.MARKED_FOR_DELETION)
|
||||
|
||||
tag_query = (
|
||||
Tag.select()
|
||||
.where(Tag.tag_kind == Tag.tag_kind.get_id("release"))
|
||||
.order_by(Tag.lifetime_start)
|
||||
)
|
||||
|
||||
if media_type:
|
||||
tag_query = tag_model.filter_tags_by_media_type(tag_query, media_type, models_ref)
|
||||
|
||||
tag_query = tag_model.tag_is_alive(tag_query, Tag)
|
||||
query = prefetch(repo_query, tag_query)
|
||||
return query
|
@ -1,184 +0,0 @@
|
||||
import bisect
|
||||
|
||||
from cnr.exception import PackageAlreadyExists
|
||||
from cnr.models.package_base import manifest_media_type
|
||||
|
||||
from data.database import db_transaction, get_epoch_timestamp
|
||||
from data.appr_model import (
|
||||
blob as blob_model,
|
||||
manifest as manifest_model,
|
||||
manifest_list as manifest_list_model,
|
||||
tag as tag_model,
|
||||
)
|
||||
|
||||
|
||||
LIST_MEDIA_TYPE = "application/vnd.cnr.manifest.list.v0.json"
|
||||
SCHEMA_VERSION = "v0"
|
||||
|
||||
|
||||
def _ensure_sha256_header(digest):
|
||||
if digest.startswith("sha256:"):
|
||||
return digest
|
||||
return "sha256:" + digest
|
||||
|
||||
|
||||
def get_app_release(repo, tag_name, media_type, models_ref):
|
||||
"""
|
||||
Returns (tag, manifest, blob) given a repo object, tag_name, and media_type).
|
||||
"""
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
Manifest = models_ref.Manifest
|
||||
Blob = models_ref.Blob
|
||||
ManifestBlob = models_ref.ManifestBlob
|
||||
manifestlistmanifest_set_name = models_ref.manifestlistmanifest_set_name
|
||||
|
||||
tag = tag_model.get_tag(repo, tag_name, models_ref, tag_kind="release")
|
||||
media_type_id = ManifestListManifest.media_type.get_id(manifest_media_type(media_type))
|
||||
manifestlistmanifest = (
|
||||
getattr(tag.manifest_list, manifestlistmanifest_set_name)
|
||||
.join(Manifest)
|
||||
.where(ManifestListManifest.media_type == media_type_id)
|
||||
.get()
|
||||
)
|
||||
manifest = manifestlistmanifest.manifest
|
||||
blob = Blob.select().join(ManifestBlob).where(ManifestBlob.manifest == manifest).get()
|
||||
return (tag, manifest, blob)
|
||||
|
||||
|
||||
def delete_app_release(repo, tag_name, media_type, models_ref):
|
||||
"""Terminate a Tag/media-type couple
|
||||
It find the corresponding tag/manifest and remove from the manifestlistmanifest the manifest
|
||||
1. it terminates the current tag (in all-cases)
|
||||
2. if the new manifestlist is not empty, it creates a new tag for it
|
||||
"""
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
manifestlistmanifest_set_name = models_ref.manifestlistmanifest_set_name
|
||||
|
||||
media_type_id = ManifestListManifest.media_type.get_id(manifest_media_type(media_type))
|
||||
|
||||
with db_transaction():
|
||||
tag = tag_model.get_tag(repo, tag_name, models_ref)
|
||||
manifest_list = tag.manifest_list
|
||||
list_json = manifest_list.manifest_list_json
|
||||
mlm_query = ManifestListManifest.select().where(
|
||||
ManifestListManifest.manifest_list == tag.manifest_list
|
||||
)
|
||||
list_manifest_ids = sorted([mlm.manifest_id for mlm in mlm_query])
|
||||
manifestlistmanifest = (
|
||||
getattr(tag.manifest_list, manifestlistmanifest_set_name)
|
||||
.where(ManifestListManifest.media_type == media_type_id)
|
||||
.get()
|
||||
)
|
||||
index = list_manifest_ids.index(manifestlistmanifest.manifest_id)
|
||||
list_manifest_ids.pop(index)
|
||||
list_json.pop(index)
|
||||
|
||||
if not list_json:
|
||||
tag.lifetime_end = get_epoch_timestamp()
|
||||
tag.save()
|
||||
else:
|
||||
manifestlist = manifest_list_model.get_or_create_manifest_list(
|
||||
list_json, LIST_MEDIA_TYPE, SCHEMA_VERSION, models_ref
|
||||
)
|
||||
manifest_list_model.create_manifestlistmanifest(
|
||||
manifestlist, list_manifest_ids, list_json, models_ref
|
||||
)
|
||||
tag = tag_model.create_or_update_tag(
|
||||
repo, tag_name, models_ref, manifest_list=manifestlist, tag_kind="release"
|
||||
)
|
||||
return tag
|
||||
|
||||
|
||||
def create_app_release(repo, tag_name, manifest_data, digest, models_ref, force=False):
|
||||
"""
|
||||
Create a new application release, it includes creating a new Tag, ManifestList,
|
||||
ManifestListManifests, Manifest, ManifestBlob.
|
||||
|
||||
To deduplicate the ManifestList, the manifestlist_json is kept ordered by the manifest.id. To
|
||||
find the insert point in the ManifestList it uses bisect on the manifest-ids list.
|
||||
"""
|
||||
ManifestList = models_ref.ManifestList
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
Blob = models_ref.Blob
|
||||
ManifestBlob = models_ref.ManifestBlob
|
||||
|
||||
with db_transaction():
|
||||
# Create/get the package manifest
|
||||
manifest = manifest_model.get_or_create_manifest(
|
||||
manifest_data, manifest_data["mediaType"], models_ref
|
||||
)
|
||||
# get the tag
|
||||
tag = tag_model.get_or_initialize_tag(repo, tag_name, models_ref)
|
||||
|
||||
if tag.manifest_list is None:
|
||||
tag.manifest_list = ManifestList(
|
||||
media_type=ManifestList.media_type.get_id(LIST_MEDIA_TYPE),
|
||||
schema_version=SCHEMA_VERSION,
|
||||
manifest_list_json=[],
|
||||
)
|
||||
|
||||
elif tag_model.tag_media_type_exists(tag, manifest.media_type, models_ref):
|
||||
if force:
|
||||
delete_app_release(repo, tag_name, manifest.media_type.name, models_ref)
|
||||
return create_app_release(
|
||||
repo, tag_name, manifest_data, digest, models_ref, force=False
|
||||
)
|
||||
else:
|
||||
raise PackageAlreadyExists("package exists already")
|
||||
|
||||
list_json = tag.manifest_list.manifest_list_json
|
||||
mlm_query = ManifestListManifest.select().where(
|
||||
ManifestListManifest.manifest_list == tag.manifest_list
|
||||
)
|
||||
list_manifest_ids = sorted([mlm.manifest_id for mlm in mlm_query])
|
||||
insert_point = bisect.bisect_left(list_manifest_ids, manifest.id)
|
||||
list_json.insert(insert_point, manifest.manifest_json)
|
||||
list_manifest_ids.insert(insert_point, manifest.id)
|
||||
manifestlist = manifest_list_model.get_or_create_manifest_list(
|
||||
list_json, LIST_MEDIA_TYPE, SCHEMA_VERSION, models_ref
|
||||
)
|
||||
manifest_list_model.create_manifestlistmanifest(
|
||||
manifestlist, list_manifest_ids, list_json, models_ref
|
||||
)
|
||||
|
||||
tag = tag_model.create_or_update_tag(
|
||||
repo, tag_name, models_ref, manifest_list=manifestlist, tag_kind="release"
|
||||
)
|
||||
blob_digest = digest
|
||||
|
||||
try:
|
||||
(
|
||||
ManifestBlob.select()
|
||||
.join(Blob)
|
||||
.where(
|
||||
ManifestBlob.manifest == manifest,
|
||||
Blob.digest == _ensure_sha256_header(blob_digest),
|
||||
)
|
||||
.get()
|
||||
)
|
||||
except ManifestBlob.DoesNotExist:
|
||||
blob = blob_model.get_blob(blob_digest, models_ref)
|
||||
ManifestBlob.create(manifest=manifest, blob=blob)
|
||||
return tag
|
||||
|
||||
|
||||
def get_release_objs(repo, models_ref, media_type=None):
|
||||
"""
|
||||
Returns an array of Tag for a repo, with optional filtering by media_type.
|
||||
"""
|
||||
Tag = models_ref.Tag
|
||||
|
||||
release_query = Tag.select().where(
|
||||
Tag.repository == repo, Tag.tag_kind == Tag.tag_kind.get_id("release")
|
||||
)
|
||||
if media_type:
|
||||
release_query = tag_model.filter_tags_by_media_type(release_query, media_type, models_ref)
|
||||
|
||||
return tag_model.tag_is_alive(release_query, Tag)
|
||||
|
||||
|
||||
def get_releases(repo, model_refs, media_type=None):
|
||||
"""
|
||||
Returns an array of Tag.name for a repo, can filter by media_type.
|
||||
"""
|
||||
return [t.name for t in get_release_objs(repo, model_refs, media_type)]
|
@ -1,149 +0,0 @@
|
||||
import logging
|
||||
|
||||
from cnr.models.package_base import manifest_media_type
|
||||
from peewee import IntegrityError
|
||||
|
||||
from data.model import db_transaction, TagAlreadyCreatedException
|
||||
from data.database import get_epoch_timestamp_ms, db_for_update
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def tag_is_alive(query, cls, now_ts=None):
|
||||
return query.where((cls.lifetime_end >> None) | (cls.lifetime_end > now_ts))
|
||||
|
||||
|
||||
def tag_media_type_exists(tag, media_type, models_ref):
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
manifestlistmanifest_set_name = models_ref.manifestlistmanifest_set_name
|
||||
return (
|
||||
getattr(tag.manifest_list, manifestlistmanifest_set_name)
|
||||
.where(ManifestListManifest.media_type == media_type)
|
||||
.count()
|
||||
> 0
|
||||
)
|
||||
|
||||
|
||||
def create_or_update_tag(
|
||||
repo, tag_name, models_ref, manifest_list=None, linked_tag=None, tag_kind="release"
|
||||
):
|
||||
Tag = models_ref.Tag
|
||||
|
||||
now_ts = get_epoch_timestamp_ms()
|
||||
tag_kind_id = Tag.tag_kind.get_id(tag_kind)
|
||||
with db_transaction():
|
||||
try:
|
||||
tag = db_for_update(
|
||||
tag_is_alive(
|
||||
Tag.select().where(
|
||||
Tag.repository == repo, Tag.name == tag_name, Tag.tag_kind == tag_kind_id
|
||||
),
|
||||
Tag,
|
||||
now_ts,
|
||||
)
|
||||
).get()
|
||||
if tag.manifest_list == manifest_list and tag.linked_tag == linked_tag:
|
||||
return tag
|
||||
tag.lifetime_end = now_ts
|
||||
tag.save()
|
||||
except Tag.DoesNotExist:
|
||||
pass
|
||||
|
||||
try:
|
||||
return Tag.create(
|
||||
repository=repo,
|
||||
manifest_list=manifest_list,
|
||||
linked_tag=linked_tag,
|
||||
name=tag_name,
|
||||
lifetime_start=now_ts,
|
||||
lifetime_end=None,
|
||||
tag_kind=tag_kind_id,
|
||||
)
|
||||
except IntegrityError:
|
||||
msg = "Tag with name %s and lifetime start %s under repository %s/%s already exists"
|
||||
raise TagAlreadyCreatedException(
|
||||
msg % (tag_name, now_ts, repo.namespace_user, repo.name)
|
||||
)
|
||||
|
||||
|
||||
def get_or_initialize_tag(repo, tag_name, models_ref, tag_kind="release"):
|
||||
Tag = models_ref.Tag
|
||||
|
||||
try:
|
||||
return tag_is_alive(
|
||||
Tag.select().where(Tag.repository == repo, Tag.name == tag_name), Tag
|
||||
).get()
|
||||
except Tag.DoesNotExist:
|
||||
return Tag(repo=repo, name=tag_name, tag_kind=Tag.tag_kind.get_id(tag_kind))
|
||||
|
||||
|
||||
def get_tag(repo, tag_name, models_ref, tag_kind="release"):
|
||||
Tag = models_ref.Tag
|
||||
return tag_is_alive(
|
||||
Tag.select().where(
|
||||
Tag.repository == repo,
|
||||
Tag.name == tag_name,
|
||||
Tag.tag_kind == Tag.tag_kind.get_id(tag_kind),
|
||||
),
|
||||
Tag,
|
||||
).get()
|
||||
|
||||
|
||||
def delete_tag(repo, tag_name, models_ref, tag_kind="release"):
|
||||
Tag = models_ref.Tag
|
||||
tag_kind_id = Tag.tag_kind.get_id(tag_kind)
|
||||
tag = tag_is_alive(
|
||||
Tag.select().where(
|
||||
Tag.repository == repo, Tag.name == tag_name, Tag.tag_kind == tag_kind_id
|
||||
),
|
||||
Tag,
|
||||
).get()
|
||||
tag.lifetime_end = get_epoch_timestamp_ms()
|
||||
tag.save()
|
||||
return tag
|
||||
|
||||
|
||||
def tag_exists(repo, tag_name, models_ref, tag_kind="release"):
|
||||
Tag = models_ref.Tag
|
||||
try:
|
||||
get_tag(repo, tag_name, models_ref, tag_kind)
|
||||
return True
|
||||
except Tag.DoesNotExist:
|
||||
return False
|
||||
|
||||
|
||||
def filter_tags_by_media_type(tag_query, media_type, models_ref):
|
||||
"""
|
||||
Return only available tag for a media_type.
|
||||
"""
|
||||
ManifestListManifest = models_ref.ManifestListManifest
|
||||
Tag = models_ref.Tag
|
||||
media_type = manifest_media_type(media_type)
|
||||
t = tag_query.join(
|
||||
ManifestListManifest, on=(ManifestListManifest.manifest_list == Tag.manifest_list)
|
||||
).where(ManifestListManifest.media_type == ManifestListManifest.media_type.get_id(media_type))
|
||||
return t
|
||||
|
||||
|
||||
def get_most_recent_tag_lifetime_start(repository_ids, models_ref, tag_kind="release"):
|
||||
"""
|
||||
Returns a map from repo ID to the timestamp of the most recently pushed alive tag for each
|
||||
specified repository or None if none.
|
||||
"""
|
||||
if not repository_ids:
|
||||
return {}
|
||||
|
||||
assert len(repository_ids) > 0 and None not in repository_ids
|
||||
|
||||
Tag = models_ref.Tag
|
||||
tag_kind_id = Tag.tag_kind.get_id(tag_kind)
|
||||
tags = tag_is_alive(
|
||||
Tag.select().where(
|
||||
Tag.repository << [rid for rid in repository_ids], Tag.tag_kind == tag_kind_id
|
||||
),
|
||||
Tag,
|
||||
)
|
||||
to_seconds = lambda ms: ms // 1000 if ms is not None else None
|
||||
|
||||
return {t.repository.id: to_seconds(t.lifetime_start) for t in tags}
|
@ -1,10 +0,0 @@
|
||||
from data.appr_model import tag as apprtags_model
|
||||
from data.appr_model.tag import get_most_recent_tag_lifetime_start
|
||||
from endpoints.appr.models_cnr import model as appr_model
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
def test_empty_get_most_recent_tag_lifetime_start(initialized_db):
|
||||
tags = apprtags_model.get_most_recent_tag_lifetime_start([], appr_model.models_ref)
|
||||
assert isinstance(tags, dict) and len(tags) == 0
|
@ -7,14 +7,8 @@ import features
|
||||
from auth.permissions import ReadRepositoryPermission
|
||||
from data.database import Repository as RepositoryTable, RepositoryState
|
||||
from data import model
|
||||
from data.appr_model import (
|
||||
channel as channel_model,
|
||||
release as release_model,
|
||||
tag as apprtags_model,
|
||||
)
|
||||
from data.registry_model import registry_model
|
||||
from data.registry_model.datatypes import RepositoryReference
|
||||
from endpoints.appr.models_cnr import model as appr_model
|
||||
from endpoints.api.repository_models_interface import (
|
||||
RepositoryDataInterface,
|
||||
RepositoryBaseElement,
|
||||
@ -140,12 +134,8 @@ class PreOCIModel(RepositoryDataInterface):
|
||||
repository_ids = [repo.rid for repo in repos]
|
||||
|
||||
if last_modified:
|
||||
last_modified_map = (
|
||||
registry_model.get_most_recent_tag_lifetime_start(repository_refs)
|
||||
if repo_kind == "image"
|
||||
else apprtags_model.get_most_recent_tag_lifetime_start(
|
||||
repository_ids, appr_model.models_ref
|
||||
)
|
||||
last_modified_map = registry_model.get_most_recent_tag_lifetime_start(
|
||||
repository_refs
|
||||
)
|
||||
|
||||
if popularity:
|
||||
@ -237,20 +227,6 @@ class PreOCIModel(RepositoryDataInterface):
|
||||
repo.state,
|
||||
)
|
||||
|
||||
if base.kind_name == "application":
|
||||
channels = channel_model.get_repo_channels(repo, appr_model.models_ref)
|
||||
releases = release_model.get_release_objs(repo, appr_model.models_ref)
|
||||
releases_channels_map = defaultdict(list)
|
||||
return ApplicationRepository(
|
||||
base,
|
||||
[_create_channel(channel, releases_channels_map) for channel in channels],
|
||||
[
|
||||
Release(release.name, release.lifetime_start, releases_channels_map)
|
||||
for release in releases
|
||||
],
|
||||
repo.state,
|
||||
)
|
||||
|
||||
tags = None
|
||||
repo_ref = RepositoryReference.for_repo_obj(repo)
|
||||
if include_tags:
|
||||
|
@ -3,8 +3,6 @@ import pytest
|
||||
from mock import patch, ANY, MagicMock
|
||||
|
||||
from data import model, database
|
||||
from data.appr_model import release, channel, blob
|
||||
from endpoints.appr.models_cnr import model as appr_model
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.repository import RepositoryTrust, Repository, RepositoryList
|
||||
from endpoints.test.shared import client_with_identity
|
||||
@ -111,41 +109,6 @@ def test_list_repositories_last_modified(client):
|
||||
assert repo["last_modified"] is not None
|
||||
|
||||
|
||||
def test_list_app_repositories_last_modified(client):
|
||||
with client_with_identity("devtable", client) as cl:
|
||||
devtable = model.user.get_user("devtable")
|
||||
repo = model.repository.create_repository(
|
||||
"devtable", "someappr", devtable, repo_kind="application"
|
||||
)
|
||||
|
||||
models_ref = appr_model.models_ref
|
||||
blob.get_or_create_blob(
|
||||
"sha256:somedigest", 0, "application/vnd.cnr.blob.v0.tar+gzip", ["local_us"], models_ref
|
||||
)
|
||||
|
||||
release.create_app_release(
|
||||
repo,
|
||||
"test",
|
||||
dict(mediaType="application/vnd.cnr.package-manifest.helm.v0.json"),
|
||||
"sha256:somedigest",
|
||||
models_ref,
|
||||
False,
|
||||
)
|
||||
|
||||
channel.create_or_update_channel(repo, "somechannel", "test", models_ref)
|
||||
|
||||
params = {
|
||||
"namespace": "devtable",
|
||||
"last_modified": "true",
|
||||
"repo_kind": "application",
|
||||
}
|
||||
response = conduct_api_call(cl, RepositoryList, "GET", params).json
|
||||
|
||||
assert len(response["repositories"]) > 0
|
||||
for repo in response["repositories"]:
|
||||
assert repo["last_modified"] is not None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"repo_name, extended_repo_names, expected_status",
|
||||
[
|
||||
@ -207,36 +170,6 @@ def test_get_repo(has_tag_manifest, client, initialized_db):
|
||||
assert response["state"] in ["NORMAL", "MIRROR", "READ_ONLY", "MARKED_FOR_DELETION"]
|
||||
|
||||
|
||||
def test_get_app_repo(client, initialized_db):
|
||||
with client_with_identity("devtable", client) as cl:
|
||||
devtable = model.user.get_user("devtable")
|
||||
repo = model.repository.create_repository(
|
||||
"devtable", "someappr", devtable, repo_kind="application"
|
||||
)
|
||||
|
||||
models_ref = appr_model.models_ref
|
||||
blob.get_or_create_blob(
|
||||
"sha256:somedigest", 0, "application/vnd.cnr.blob.v0.tar+gzip", ["local_us"], models_ref
|
||||
)
|
||||
|
||||
release.create_app_release(
|
||||
repo,
|
||||
"test",
|
||||
dict(mediaType="application/vnd.cnr.package-manifest.helm.v0.json"),
|
||||
"sha256:somedigest",
|
||||
models_ref,
|
||||
False,
|
||||
)
|
||||
|
||||
channel.create_or_update_channel(repo, "somechannel", "test", models_ref)
|
||||
|
||||
params = {"repository": "devtable/someappr"}
|
||||
response = conduct_api_call(cl, Repository, "GET", params).json
|
||||
assert response["kind"] == "application"
|
||||
assert response["channels"]
|
||||
assert response["releases"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"state, can_write",
|
||||
[
|
||||
|
@ -1,51 +0,0 @@
|
||||
import logging
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from cnr.exception import Forbidden
|
||||
from flask import Blueprint
|
||||
|
||||
from auth.permissions import (
|
||||
AdministerRepositoryPermission,
|
||||
ReadRepositoryPermission,
|
||||
ModifyRepositoryPermission,
|
||||
)
|
||||
from endpoints.appr.decorators import require_repo_permission
|
||||
from util.metrics.prometheus import timed_blueprint
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
appr_bp = timed_blueprint(Blueprint("appr", __name__))
|
||||
|
||||
|
||||
def _raise_method(repository, scopes):
|
||||
raise Forbidden(
|
||||
"Unauthorized access for: %s" % repository, {"package": repository, "scopes": scopes}
|
||||
)
|
||||
|
||||
|
||||
def _get_reponame_kwargs(*args, **kwargs):
|
||||
return [kwargs["namespace"], kwargs["package_name"]]
|
||||
|
||||
|
||||
require_app_repo_read = require_repo_permission(
|
||||
ReadRepositoryPermission,
|
||||
scopes=["pull"],
|
||||
allow_public=True,
|
||||
raise_method=_raise_method,
|
||||
get_reponame_method=_get_reponame_kwargs,
|
||||
)
|
||||
|
||||
require_app_repo_write = require_repo_permission(
|
||||
ModifyRepositoryPermission,
|
||||
scopes=["pull", "push"],
|
||||
raise_method=_raise_method,
|
||||
get_reponame_method=_get_reponame_kwargs,
|
||||
)
|
||||
|
||||
require_app_repo_admin = require_repo_permission(
|
||||
AdministerRepositoryPermission,
|
||||
scopes=["pull", "push"],
|
||||
raise_method=_raise_method,
|
||||
get_reponame_method=_get_reponame_kwargs,
|
||||
)
|
@ -1,308 +0,0 @@
|
||||
import base64
|
||||
import io
|
||||
import tarfile
|
||||
import gzip
|
||||
import hashlib
|
||||
import os
|
||||
import threading
|
||||
|
||||
|
||||
from cnr.exception import raise_package_not_found
|
||||
from cnr.models.blob_base import BlobBase
|
||||
from cnr.models.channel_base import ChannelBase
|
||||
from cnr.models.db_base import CnrDB
|
||||
from cnr.models.package_base import PackageBase, manifest_media_type
|
||||
|
||||
from flask import request
|
||||
from app import storage
|
||||
from digest.digest_tools import Digest, InvalidDigestException
|
||||
from endpoints.appr.models_cnr import model
|
||||
from util.request import get_request_ip
|
||||
|
||||
|
||||
# NOTE: This is a copy of the Package class from the CNR implementation, modified to lazy
|
||||
# load the tar contents and BytesIO so that we can avoid doing so in common GET operations
|
||||
# such as simply retrieving Blob's.
|
||||
class LazyPackage(object):
|
||||
def __init__(self, blob=None, b64_encoded=True):
|
||||
self.files = {}
|
||||
self.tar = None
|
||||
self.blob = None
|
||||
self._io_file = None
|
||||
self._digest = None
|
||||
self._size = None
|
||||
self.b64blob = None
|
||||
if blob is not None:
|
||||
self.load(blob, b64_encoded)
|
||||
|
||||
self.lock = threading.RLock()
|
||||
|
||||
def _load_blob(self, blob, b64_encoded):
|
||||
if b64_encoded:
|
||||
self.b64blob = blob
|
||||
self.blob = base64.b64decode(blob)
|
||||
else:
|
||||
self.b64blob = base64.b64encode(blob)
|
||||
self.blob = blob
|
||||
|
||||
def load(self, blob, b64_encoded=True):
|
||||
self._digest = None
|
||||
self._load_blob(blob, b64_encoded)
|
||||
|
||||
@property
|
||||
def io_file(self):
|
||||
self._lazy_load_file()
|
||||
return self._io_file
|
||||
|
||||
def _lazy_load_file(self):
|
||||
with self.lock:
|
||||
if self._io_file is not None:
|
||||
return
|
||||
|
||||
self._io_file = io.BytesIO(self.blob)
|
||||
|
||||
def _lazy_load_tar(self):
|
||||
with self.lock:
|
||||
if self.tar is not None:
|
||||
return
|
||||
|
||||
self.tar = tarfile.open(fileobj=self.io_file, mode="r:gz")
|
||||
for member in self.tar.getmembers():
|
||||
tfile = self.tar.extractfile(member)
|
||||
if tfile is not None:
|
||||
self.files[tfile.name] = tfile.read()
|
||||
|
||||
def extract(self, dest):
|
||||
self._lazy_load_tar()
|
||||
self.tar.extractall(dest)
|
||||
|
||||
def pack(self, dest):
|
||||
with open(dest, "wb") as destfile:
|
||||
destfile.write(self.blob)
|
||||
|
||||
def tree(self, directory=None):
|
||||
self._lazy_load_tar()
|
||||
files = self.files.keys()
|
||||
files.sort()
|
||||
if directory is not None:
|
||||
filtered = [x for x in files if x.startswith(directory)]
|
||||
else:
|
||||
filtered = files
|
||||
return filtered
|
||||
|
||||
def file(self, filename):
|
||||
self._lazy_load_tar()
|
||||
return self.files[filename]
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
self._lazy_load_file()
|
||||
if self._size is None:
|
||||
self.io_file.seek(0, os.SEEK_END)
|
||||
self._size = self.io_file.tell()
|
||||
return self._size
|
||||
|
||||
@property
|
||||
def digest(self):
|
||||
self._lazy_load_file()
|
||||
if self._digest is None:
|
||||
self.io_file.seek(0)
|
||||
gunzip = gzip.GzipFile(fileobj=self.io_file, mode="r").read()
|
||||
self._digest = hashlib.sha256(gunzip).hexdigest()
|
||||
self.io_file.seek(0)
|
||||
return self._digest
|
||||
|
||||
|
||||
class Blob(BlobBase):
|
||||
def __init__(self, package_name, blob, b64_encoded=True):
|
||||
self.package = package_name
|
||||
self.packager = LazyPackage(blob, b64_encoded)
|
||||
|
||||
@classmethod
|
||||
def upload_url(cls, digest):
|
||||
# Ensure we have a valid digest.
|
||||
try:
|
||||
Digest.parse_digest("sha256:%s" % digest)
|
||||
except InvalidDigestException:
|
||||
return None
|
||||
|
||||
return "cnr/blobs/sha256/%s/%s" % (digest[0:2], digest)
|
||||
|
||||
def save(self, content_media_type):
|
||||
model.store_blob(self, content_media_type)
|
||||
|
||||
@classmethod
|
||||
def delete(cls, package_name, digest):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def _fetch_b64blob(cls, package_name, digest):
|
||||
blobpath = cls.upload_url(digest)
|
||||
if blobpath is None:
|
||||
raise_package_not_found(package_name, digest)
|
||||
|
||||
locations = model.get_blob_locations(digest)
|
||||
if not locations:
|
||||
raise_package_not_found(package_name, digest)
|
||||
return base64.b64encode(storage.get_content(locations, blobpath))
|
||||
|
||||
@classmethod
|
||||
def download_url(cls, package_name, digest):
|
||||
blobpath = cls.upload_url(digest)
|
||||
if blobpath is None:
|
||||
raise_package_not_found(package_name, digest)
|
||||
|
||||
locations = model.get_blob_locations(digest)
|
||||
if not locations:
|
||||
raise_package_not_found(package_name, digest)
|
||||
return storage.get_direct_download_url(locations, blobpath, get_request_ip())
|
||||
|
||||
|
||||
class Channel(ChannelBase):
|
||||
"""
|
||||
CNR Channel model implemented against the Quay data model.
|
||||
"""
|
||||
|
||||
def __init__(self, name, package, current=None):
|
||||
super(Channel, self).__init__(name, package, current=current)
|
||||
self._channel_data = None
|
||||
|
||||
def _exists(self):
|
||||
"""
|
||||
Check if the channel is saved already.
|
||||
"""
|
||||
return model.channel_exists(self.package, self.name)
|
||||
|
||||
@classmethod
|
||||
def get(cls, name, package):
|
||||
chanview = model.fetch_channel(package, name, with_releases=False)
|
||||
return cls(name, package, chanview.current)
|
||||
|
||||
def save(self):
|
||||
model.update_channel(self.package, self.name, self.current)
|
||||
|
||||
def delete(self):
|
||||
model.delete_channel(self.package, self.name)
|
||||
|
||||
@classmethod
|
||||
def all(cls, package_name):
|
||||
return [Channel(c.name, package_name, c.current) for c in model.list_channels(package_name)]
|
||||
|
||||
@property
|
||||
def _channel(self):
|
||||
if self._channel_data is None:
|
||||
self._channel_data = model.fetch_channel(self.package, self.name)
|
||||
return self._channel_data
|
||||
|
||||
def releases(self):
|
||||
"""
|
||||
Returns the list of versions.
|
||||
"""
|
||||
return self._channel.releases
|
||||
|
||||
def _add_release(self, release):
|
||||
return model.update_channel(self.package, self.name, release)._asdict
|
||||
|
||||
def _remove_release(self, release):
|
||||
model.delete_channel(self.package, self.name)
|
||||
|
||||
|
||||
class User(object):
|
||||
"""
|
||||
User in CNR models.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def get_user(cls, username, password):
|
||||
"""
|
||||
Returns True if user creds is valid.
|
||||
"""
|
||||
return model.get_user(username, password)
|
||||
|
||||
|
||||
class Package(PackageBase):
|
||||
"""
|
||||
CNR Package model implemented against the Quay data model.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _apptuple_to_dict(cls, apptuple):
|
||||
return {
|
||||
"release": apptuple.release,
|
||||
"created_at": apptuple.created_at,
|
||||
"digest": apptuple.manifest.digest,
|
||||
"mediaType": apptuple.manifest.mediaType,
|
||||
"package": apptuple.name,
|
||||
"content": apptuple.manifest.content._asdict(),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def create_repository(cls, package_name, visibility, owner):
|
||||
model.create_application(package_name, visibility, owner)
|
||||
|
||||
@classmethod
|
||||
def exists(cls, package_name):
|
||||
return model.application_exists(package_name)
|
||||
|
||||
@classmethod
|
||||
def all(cls, organization=None, media_type=None, search=None, username=None, **kwargs):
|
||||
return [
|
||||
dict(x._asdict())
|
||||
for x in model.list_applications(
|
||||
namespace=organization, media_type=media_type, search=search, username=username
|
||||
)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def _fetch(cls, package_name, release, media_type):
|
||||
data = model.fetch_release(package_name, release, manifest_media_type(media_type))
|
||||
return cls._apptuple_to_dict(data)
|
||||
|
||||
@classmethod
|
||||
def all_releases(cls, package_name, media_type=None):
|
||||
return model.list_releases(package_name, media_type)
|
||||
|
||||
@classmethod
|
||||
def search(cls, query, username=None):
|
||||
return model.basic_search(query, username=username)
|
||||
|
||||
def _save(self, force=False, **kwargs):
|
||||
user = kwargs["user"]
|
||||
visibility = kwargs["visibility"]
|
||||
model.create_release(self, user, visibility, force)
|
||||
|
||||
@classmethod
|
||||
def _delete(cls, package_name, release, media_type):
|
||||
model.delete_release(package_name, release, manifest_media_type(media_type))
|
||||
|
||||
@classmethod
|
||||
def isdeleted_release(cls, package, release):
|
||||
return model.release_exists(package, release)
|
||||
|
||||
def channels(self, channel_class, iscurrent=True):
|
||||
return [
|
||||
c.name
|
||||
for c in model.list_release_channels(self.package, self.release, active=iscurrent)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def manifests(cls, package, release=None):
|
||||
return model.list_manifests(package, release)
|
||||
|
||||
@classmethod
|
||||
def dump_all(cls, blob_cls):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class QuayDB(CnrDB):
|
||||
"""
|
||||
Wrapper Class to embed all CNR Models.
|
||||
"""
|
||||
|
||||
Channel = Channel
|
||||
Package = Package
|
||||
Blob = Blob
|
||||
|
||||
@classmethod
|
||||
def reset_db(cls, force=False):
|
||||
pass
|
@ -1,69 +0,0 @@
|
||||
import logging
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from data import model
|
||||
from util.http import abort
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _raise_unauthorized(repository, scopes):
|
||||
raise Exception("Unauthorized acces to %s", repository)
|
||||
|
||||
|
||||
def _get_reponame_kwargs(*args, **kwargs):
|
||||
return [kwargs["namespace"], kwargs["package_name"]]
|
||||
|
||||
|
||||
def disallow_for_image_repository(get_reponame_method=_get_reponame_kwargs):
|
||||
def wrapper(func):
|
||||
@wraps(func)
|
||||
def wrapped(*args, **kwargs):
|
||||
namespace_name, repo_name = get_reponame_method(*args, **kwargs)
|
||||
image_repo = model.repository.get_repository(
|
||||
namespace_name, repo_name, kind_filter="image"
|
||||
)
|
||||
if image_repo is not None:
|
||||
logger.debug("Tried to invoked a CNR method on an image repository")
|
||||
abort(
|
||||
405,
|
||||
message="Cannot push an application to an image repository with the same name",
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def require_repo_permission(
|
||||
permission_class,
|
||||
scopes=None,
|
||||
allow_public=False,
|
||||
raise_method=_raise_unauthorized,
|
||||
get_reponame_method=_get_reponame_kwargs,
|
||||
):
|
||||
def wrapper(func):
|
||||
@wraps(func)
|
||||
@disallow_for_image_repository(get_reponame_method=get_reponame_method)
|
||||
def wrapped(*args, **kwargs):
|
||||
namespace_name, repo_name = get_reponame_method(*args, **kwargs)
|
||||
logger.debug(
|
||||
"Checking permission %s for repo: %s/%s",
|
||||
permission_class,
|
||||
namespace_name,
|
||||
repo_name,
|
||||
)
|
||||
permission = permission_class(namespace_name, repo_name)
|
||||
if permission.can() or (
|
||||
allow_public and model.repository.repository_is_public(namespace_name, repo_name)
|
||||
):
|
||||
return func(*args, **kwargs)
|
||||
repository = namespace_name + "/" + repo_name
|
||||
raise_method(repository, scopes)
|
||||
|
||||
return wrapped
|
||||
|
||||
return wrapper
|
@ -1,439 +0,0 @@
|
||||
from collections import namedtuple
|
||||
from datetime import datetime
|
||||
|
||||
import cnr.semver
|
||||
|
||||
from cnr.exception import raise_package_not_found, raise_channel_not_found, CnrException
|
||||
|
||||
import features
|
||||
import data.model
|
||||
|
||||
from app import app, storage, authentication, model_cache
|
||||
from data import appr_model
|
||||
from data import model as data_model
|
||||
from data.cache import cache_key
|
||||
from data.database import Repository, MediaType, db_transaction
|
||||
from data.appr_model.models import NEW_MODELS
|
||||
from endpoints.appr.models_interface import (
|
||||
ApplicationManifest,
|
||||
ApplicationRelease,
|
||||
ApplicationSummaryView,
|
||||
AppRegistryDataInterface,
|
||||
BlobDescriptor,
|
||||
ChannelView,
|
||||
ChannelReleasesView,
|
||||
)
|
||||
from util.audit import track_and_log
|
||||
from util.morecollections import AttrDict
|
||||
from util.names import parse_robot_username
|
||||
|
||||
|
||||
class ReadOnlyException(CnrException):
|
||||
status_code = 405
|
||||
errorcode = "read-only"
|
||||
|
||||
|
||||
def _strip_sha256_header(digest):
|
||||
if digest.startswith("sha256:"):
|
||||
return digest.split("sha256:")[1]
|
||||
return digest
|
||||
|
||||
|
||||
def _split_package_name(package):
|
||||
"""
|
||||
Returns the namespace and package-name.
|
||||
"""
|
||||
return package.split("/")
|
||||
|
||||
|
||||
def _join_package_name(ns, name):
|
||||
"""
|
||||
Returns a app-name in the 'namespace/name' format.
|
||||
"""
|
||||
return "%s/%s" % (ns, name)
|
||||
|
||||
|
||||
def _timestamp_to_iso(timestamp, in_ms=True):
|
||||
if in_ms:
|
||||
timestamp = timestamp // 1000
|
||||
return datetime.fromtimestamp(timestamp).isoformat()
|
||||
|
||||
|
||||
def _application(package):
|
||||
ns, name = _split_package_name(package)
|
||||
repo = data.model.repository.get_app_repository(ns, name)
|
||||
if repo is None:
|
||||
raise_package_not_found(package)
|
||||
return repo
|
||||
|
||||
|
||||
class CNRAppModel(AppRegistryDataInterface):
|
||||
def __init__(self, models_ref, is_readonly):
|
||||
self.models_ref = models_ref
|
||||
self.is_readonly = is_readonly
|
||||
|
||||
def log_action(
|
||||
self,
|
||||
event_name,
|
||||
namespace_name,
|
||||
repo_name=None,
|
||||
analytics_name=None,
|
||||
analytics_sample=1,
|
||||
metadata=None,
|
||||
):
|
||||
metadata = {} if metadata is None else metadata
|
||||
|
||||
repo = None
|
||||
if repo_name is not None:
|
||||
db_repo = data.model.repository.get_repository(
|
||||
namespace_name, repo_name, kind_filter="application"
|
||||
)
|
||||
repo = AttrDict(
|
||||
{
|
||||
"id": db_repo.id,
|
||||
"name": db_repo.name,
|
||||
"namespace_name": db_repo.namespace_user.username,
|
||||
"is_free_namespace": db_repo.namespace_user.stripe_id is None,
|
||||
}
|
||||
)
|
||||
track_and_log(
|
||||
event_name,
|
||||
repo,
|
||||
analytics_name=analytics_name,
|
||||
analytics_sample=analytics_sample,
|
||||
**metadata,
|
||||
)
|
||||
|
||||
def list_applications(
|
||||
self, namespace=None, media_type=None, search=None, username=None, with_channels=False
|
||||
):
|
||||
"""
|
||||
Lists all repositories that contain applications, with optional filtering to a specific
|
||||
namespace and view a specific user.
|
||||
"""
|
||||
limit = app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
|
||||
namespace_whitelist = app.config.get("APP_REGISTRY_PACKAGE_LIST_CACHE_WHITELIST", [])
|
||||
|
||||
# NOTE: This caching only applies for the super-large and commonly requested results
|
||||
# sets.
|
||||
if (
|
||||
namespace is not None
|
||||
and namespace in namespace_whitelist
|
||||
and media_type is None
|
||||
and search is None
|
||||
and username is None
|
||||
and not with_channels
|
||||
):
|
||||
|
||||
def _list_applications():
|
||||
return [
|
||||
found._asdict()
|
||||
for found in self._list_applications(namespace=namespace, limit=limit)
|
||||
]
|
||||
|
||||
apps_cache_key = cache_key.for_appr_applications_list(
|
||||
namespace, limit, model_cache.cache_config
|
||||
)
|
||||
return [
|
||||
ApplicationSummaryView(**found)
|
||||
for found in model_cache.retrieve(apps_cache_key, _list_applications)
|
||||
]
|
||||
else:
|
||||
return self._list_applications(
|
||||
namespace, media_type, search, username, with_channels, limit=limit
|
||||
)
|
||||
|
||||
def _list_applications(
|
||||
self,
|
||||
namespace=None,
|
||||
media_type=None,
|
||||
search=None,
|
||||
username=None,
|
||||
with_channels=False,
|
||||
limit=None,
|
||||
):
|
||||
limit = limit or app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
|
||||
views = []
|
||||
for repo in appr_model.package.list_packages_query(
|
||||
self.models_ref, namespace, media_type, search, username=username, limit=limit
|
||||
):
|
||||
tag_set_prefetch = getattr(repo, self.models_ref.tag_set_prefetch_name)
|
||||
releases = [t.name for t in tag_set_prefetch]
|
||||
if not releases:
|
||||
continue
|
||||
available_releases = [
|
||||
str(x) for x in sorted(cnr.semver.versions(releases, False), reverse=True)
|
||||
]
|
||||
channels = None
|
||||
if with_channels:
|
||||
channels = [
|
||||
ChannelView(name=chan.name, current=chan.linked_tag.name)
|
||||
for chan in appr_model.channel.get_repo_channels(repo, self.models_ref)
|
||||
]
|
||||
|
||||
app_name = _join_package_name(repo.namespace_user.username, repo.name)
|
||||
manifests = self.list_manifests(app_name, available_releases[0])
|
||||
view = ApplicationSummaryView(
|
||||
namespace=repo.namespace_user.username,
|
||||
name=app_name,
|
||||
visibility=data_model.repository.repository_visibility_name(repo),
|
||||
default=available_releases[0],
|
||||
channels=channels,
|
||||
manifests=manifests,
|
||||
releases=available_releases,
|
||||
updated_at=_timestamp_to_iso(tag_set_prefetch[-1].lifetime_start),
|
||||
created_at=_timestamp_to_iso(tag_set_prefetch[0].lifetime_start),
|
||||
)
|
||||
views.append(view)
|
||||
|
||||
return views
|
||||
|
||||
def application_is_public(self, package_name):
|
||||
"""
|
||||
Returns:
|
||||
* True if the repository is public
|
||||
"""
|
||||
namespace, name = _split_package_name(package_name)
|
||||
return data.model.repository.repository_is_public(namespace, name)
|
||||
|
||||
def create_application(self, package_name, visibility, owner):
|
||||
"""
|
||||
Create a new app repository, owner is the user who creates it.
|
||||
"""
|
||||
if self.is_readonly:
|
||||
raise ReadOnlyException("Currently in read-only mode")
|
||||
|
||||
ns, name = _split_package_name(package_name)
|
||||
data.model.repository.create_repository(ns, name, owner, visibility, "application")
|
||||
|
||||
def application_exists(self, package_name):
|
||||
"""
|
||||
Create a new app repository, owner is the user who creates it.
|
||||
"""
|
||||
ns, name = _split_package_name(package_name)
|
||||
return data.model.repository.get_repository(ns, name, kind_filter="application") is not None
|
||||
|
||||
def basic_search(self, query, username=None):
|
||||
"""Returns an array of matching AppRepositories in the format: 'namespace/name'
|
||||
Note:
|
||||
* Only 'public' repositories are returned
|
||||
|
||||
Todo:
|
||||
* Filter results with readeable reposistory for the user (including visibilitys)
|
||||
"""
|
||||
limit = app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
|
||||
return [
|
||||
_join_package_name(r.namespace_user.username, r.name)
|
||||
for r in data.model.repository.get_app_search(
|
||||
lookup=query, username=username, limit=limit
|
||||
)
|
||||
]
|
||||
|
||||
def list_releases(self, package_name, media_type=None):
|
||||
"""Return the list of all releases of an Application
|
||||
Example:
|
||||
>>> get_app_releases('ant31/rocketchat')
|
||||
['1.7.1', '1.7.0', '1.7.2']
|
||||
|
||||
Todo:
|
||||
* Paginate
|
||||
"""
|
||||
return appr_model.release.get_releases(
|
||||
_application(package_name), self.models_ref, media_type
|
||||
)
|
||||
|
||||
def list_manifests(self, package_name, release=None):
|
||||
"""
|
||||
Returns the list of all manifests of an Application.
|
||||
|
||||
Todo:
|
||||
* Paginate
|
||||
"""
|
||||
try:
|
||||
repo = _application(package_name)
|
||||
return list(appr_model.manifest.get_manifest_types(repo, self.models_ref, release))
|
||||
except (Repository.DoesNotExist, self.models_ref.Tag.DoesNotExist):
|
||||
raise_package_not_found(package_name, release)
|
||||
|
||||
def fetch_release(self, package_name, release, media_type):
|
||||
"""
|
||||
Retrieves an AppRelease from it's repository-name and release-name.
|
||||
"""
|
||||
repo = _application(package_name)
|
||||
try:
|
||||
tag, manifest, blob = appr_model.release.get_app_release(
|
||||
repo, release, media_type, self.models_ref
|
||||
)
|
||||
created_at = _timestamp_to_iso(tag.lifetime_start)
|
||||
|
||||
blob_descriptor = BlobDescriptor(
|
||||
digest=_strip_sha256_header(blob.digest),
|
||||
mediaType=blob.media_type.name,
|
||||
size=blob.size,
|
||||
urls=[],
|
||||
)
|
||||
|
||||
app_manifest = ApplicationManifest(
|
||||
digest=manifest.digest, mediaType=manifest.media_type.name, content=blob_descriptor
|
||||
)
|
||||
|
||||
app_release = ApplicationRelease(
|
||||
release=tag.name, created_at=created_at, name=package_name, manifest=app_manifest
|
||||
)
|
||||
return app_release
|
||||
except (
|
||||
self.models_ref.Tag.DoesNotExist,
|
||||
self.models_ref.Manifest.DoesNotExist,
|
||||
self.models_ref.Blob.DoesNotExist,
|
||||
Repository.DoesNotExist,
|
||||
MediaType.DoesNotExist,
|
||||
):
|
||||
raise_package_not_found(package_name, release, media_type)
|
||||
|
||||
def store_blob(self, cnrblob, content_media_type):
|
||||
if self.is_readonly:
|
||||
raise ReadOnlyException("Currently in read-only mode")
|
||||
|
||||
fp = cnrblob.packager.io_file
|
||||
path = cnrblob.upload_url(cnrblob.digest)
|
||||
locations = storage.preferred_locations
|
||||
storage.stream_write(locations, path, fp, "application/x-gzip")
|
||||
db_blob = appr_model.blob.get_or_create_blob(
|
||||
cnrblob.digest, cnrblob.size, content_media_type, locations, self.models_ref
|
||||
)
|
||||
return BlobDescriptor(
|
||||
mediaType=content_media_type,
|
||||
digest=_strip_sha256_header(db_blob.digest),
|
||||
size=db_blob.size,
|
||||
urls=[],
|
||||
)
|
||||
|
||||
def create_release(self, package, user, visibility, force=False):
|
||||
"""
|
||||
Add an app-release to a repository package is an instance of data.cnr.package.Package.
|
||||
"""
|
||||
if self.is_readonly:
|
||||
raise ReadOnlyException("Currently in read-only mode")
|
||||
|
||||
manifest = package.manifest()
|
||||
ns, name = package.namespace, package.name
|
||||
repo = data.model.repository.get_or_create_repository(
|
||||
ns, name, user, visibility=visibility, repo_kind="application"
|
||||
)
|
||||
tag_name = package.release
|
||||
appr_model.release.create_app_release(
|
||||
repo,
|
||||
tag_name,
|
||||
package.manifest(),
|
||||
manifest["content"]["digest"],
|
||||
self.models_ref,
|
||||
force,
|
||||
)
|
||||
|
||||
def delete_release(self, package_name, release, media_type):
|
||||
"""
|
||||
Remove/Delete an app-release from an app-repository.
|
||||
|
||||
It does not delete the entire app-repository, only a single release
|
||||
"""
|
||||
if self.is_readonly:
|
||||
raise ReadOnlyException("Currently in read-only mode")
|
||||
|
||||
repo = _application(package_name)
|
||||
try:
|
||||
appr_model.release.delete_app_release(repo, release, media_type, self.models_ref)
|
||||
except (
|
||||
self.models_ref.Channel.DoesNotExist,
|
||||
self.models_ref.Tag.DoesNotExist,
|
||||
MediaType.DoesNotExist,
|
||||
):
|
||||
raise_package_not_found(package_name, release, media_type)
|
||||
|
||||
def release_exists(self, package, release):
|
||||
"""
|
||||
Return true if a release with that name already exist or have existed (include deleted ones)
|
||||
"""
|
||||
# TODO: Figure out why this isn't implemented.
|
||||
|
||||
def channel_exists(self, package_name, channel_name):
|
||||
"""
|
||||
Returns true if channel exists.
|
||||
"""
|
||||
repo = _application(package_name)
|
||||
return appr_model.tag.tag_exists(repo, channel_name, self.models_ref, "channel")
|
||||
|
||||
def delete_channel(self, package_name, channel_name):
|
||||
"""Delete an AppChannel
|
||||
Note:
|
||||
It doesn't delete the AppReleases
|
||||
"""
|
||||
if self.is_readonly:
|
||||
raise ReadOnlyException("Currently in read-only mode")
|
||||
|
||||
repo = _application(package_name)
|
||||
try:
|
||||
appr_model.channel.delete_channel(repo, channel_name, self.models_ref)
|
||||
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
|
||||
raise_channel_not_found(package_name, channel_name)
|
||||
|
||||
def list_channels(self, package_name):
|
||||
"""
|
||||
Returns all AppChannel for a package.
|
||||
"""
|
||||
repo = _application(package_name)
|
||||
channels = appr_model.channel.get_repo_channels(repo, self.models_ref)
|
||||
return [ChannelView(name=chan.name, current=chan.linked_tag.name) for chan in channels]
|
||||
|
||||
def fetch_channel(self, package_name, channel_name, with_releases=True):
|
||||
"""
|
||||
Returns an AppChannel.
|
||||
"""
|
||||
repo = _application(package_name)
|
||||
|
||||
try:
|
||||
channel = appr_model.channel.get_channel(repo, channel_name, self.models_ref)
|
||||
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
|
||||
raise_channel_not_found(package_name, channel_name)
|
||||
|
||||
if with_releases:
|
||||
releases = appr_model.channel.get_channel_releases(repo, channel, self.models_ref)
|
||||
chanview = ChannelReleasesView(
|
||||
current=channel.linked_tag.name,
|
||||
name=channel.name,
|
||||
releases=[channel.linked_tag.name] + [c.name for c in releases],
|
||||
)
|
||||
else:
|
||||
chanview = ChannelView(current=channel.linked_tag.name, name=channel.name)
|
||||
|
||||
return chanview
|
||||
|
||||
def list_release_channels(self, package_name, release, active=True):
|
||||
repo = _application(package_name)
|
||||
try:
|
||||
channels = appr_model.channel.get_tag_channels(
|
||||
repo, release, self.models_ref, active=active
|
||||
)
|
||||
return [ChannelView(name=c.name, current=release) for c in channels]
|
||||
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
|
||||
raise_package_not_found(package_name, release)
|
||||
|
||||
def update_channel(self, package_name, channel_name, release):
|
||||
"""Append a new release to the AppChannel
|
||||
Returns:
|
||||
A new AppChannel with the release
|
||||
"""
|
||||
if self.is_readonly:
|
||||
raise ReadOnlyException("Currently in read-only mode")
|
||||
|
||||
repo = _application(package_name)
|
||||
channel = appr_model.channel.create_or_update_channel(
|
||||
repo, channel_name, release, self.models_ref
|
||||
)
|
||||
return ChannelView(current=channel.linked_tag.name, name=channel.name)
|
||||
|
||||
def get_blob_locations(self, digest):
|
||||
return appr_model.blob.get_blob_locations(digest, self.models_ref)
|
||||
|
||||
|
||||
# Phase 3: Read and write from new tables.
|
||||
model = CNRAppModel(NEW_MODELS, features.READONLY_APP_REGISTRY)
|
@ -1,244 +0,0 @@
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
class BlobDescriptor(namedtuple("Blob", ["mediaType", "size", "digest", "urls"])):
|
||||
"""
|
||||
BlobDescriptor describes a blob with its mediatype, size and digest.
|
||||
|
||||
A BlobDescriptor is used to retrieves the actual blob.
|
||||
"""
|
||||
|
||||
|
||||
class ChannelReleasesView(namedtuple("ChannelReleasesView", ["name", "current", "releases"])):
|
||||
"""
|
||||
A channel is a pointer to a Release (current).
|
||||
|
||||
Releases are the previous tags pointed by channel (history).
|
||||
"""
|
||||
|
||||
|
||||
class ChannelView(namedtuple("ChannelView", ["name", "current"])):
|
||||
"""
|
||||
A channel is a pointer to a Release (current).
|
||||
"""
|
||||
|
||||
|
||||
class ApplicationSummaryView(
|
||||
namedtuple(
|
||||
"ApplicationSummaryView",
|
||||
[
|
||||
"name",
|
||||
"namespace",
|
||||
"visibility",
|
||||
"default",
|
||||
"manifests",
|
||||
"channels",
|
||||
"releases",
|
||||
"updated_at",
|
||||
"created_at",
|
||||
],
|
||||
)
|
||||
):
|
||||
"""
|
||||
ApplicationSummaryView is an aggregated view of an application repository.
|
||||
"""
|
||||
|
||||
|
||||
class ApplicationManifest(namedtuple("ApplicationManifest", ["mediaType", "digest", "content"])):
|
||||
"""
|
||||
ApplicationManifest embed the BlobDescriptor and some metadata around it.
|
||||
|
||||
An ApplicationManifest is content-addressable.
|
||||
"""
|
||||
|
||||
|
||||
class ApplicationRelease(
|
||||
namedtuple("ApplicationRelease", ["release", "name", "created_at", "manifest"])
|
||||
):
|
||||
"""
|
||||
The ApplicationRelease associates an ApplicationManifest to a repository and release.
|
||||
"""
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class AppRegistryDataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by a App Registry.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def list_applications(
|
||||
self, namespace=None, media_type=None, search=None, username=None, with_channels=False
|
||||
):
|
||||
"""
|
||||
Lists all repositories that contain applications, with optional filtering to a specific
|
||||
namespace and/or to those visible to a specific user.
|
||||
|
||||
Returns: list of ApplicationSummaryView
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def application_is_public(self, package_name):
|
||||
"""
|
||||
Returns true if the application is public.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_application(self, package_name, visibility, owner):
|
||||
"""
|
||||
Create a new app repository, owner is the user who creates it.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def application_exists(self, package_name):
|
||||
"""
|
||||
Returns true if the application exists.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def basic_search(self, query, username=None):
|
||||
"""Returns an array of matching application in the format: 'namespace/name'
|
||||
Note:
|
||||
* Only 'public' repositories are returned
|
||||
"""
|
||||
pass
|
||||
|
||||
# @TODO: Paginate
|
||||
@abstractmethod
|
||||
def list_releases(self, package_name, media_type=None):
|
||||
"""Returns the list of all releases(names) of an AppRepository
|
||||
Example:
|
||||
>>> get_app_releases('ant31/rocketchat')
|
||||
['1.7.1', '1.7.0', '1.7.2']
|
||||
"""
|
||||
pass
|
||||
|
||||
# @TODO: Paginate
|
||||
@abstractmethod
|
||||
def list_manifests(self, package_name, release=None):
|
||||
"""
|
||||
Returns the list of all available manifests type of an Application across all releases or
|
||||
for a specific one.
|
||||
|
||||
Example:
|
||||
>>> get_app_releases('ant31/rocketchat')
|
||||
['1.7.1', '1.7.0', '1.7.2']
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def fetch_release(self, package_name, release, media_type):
|
||||
"""
|
||||
Returns an ApplicationRelease.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def store_blob(self, cnrblob, content_media_type):
|
||||
"""
|
||||
Upload the blob content to a storage location and creates a Blob entry in the DB.
|
||||
|
||||
Returns a BlobDescriptor
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_release(self, package, user, visibility, force=False):
|
||||
"""
|
||||
Creates and returns an ApplicationRelease.
|
||||
|
||||
- package is a data.model.Package object
|
||||
- user is the owner of the package
|
||||
- visibility is a string: 'public' or 'private'
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def release_exists(self, package, release):
|
||||
"""
|
||||
Return true if a release with that name already exist or has existed (including deleted
|
||||
ones)
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete_release(self, package_name, release, media_type):
|
||||
"""
|
||||
Remove/Delete an app-release from an app-repository.
|
||||
|
||||
It does not delete the entire app-repository, only a single release
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_release_channels(self, package_name, release, active=True):
|
||||
"""
|
||||
Returns a list of Channel that are/was pointing to a release.
|
||||
|
||||
If active is True, returns only active Channel (lifetime_end not null)
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def channel_exists(self, package_name, channel_name):
|
||||
"""
|
||||
Returns true if the channel with the given name exists under the matching package.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_channel(self, package_name, channel_name, release):
|
||||
"""
|
||||
Append a new release to the Channel Returns a new Channel with the release as current.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete_channel(self, package_name, channel_name):
|
||||
"""
|
||||
Delete a Channel, it doesn't delete/touch the ApplicationRelease pointed by the channel.
|
||||
"""
|
||||
|
||||
# @TODO: Paginate
|
||||
@abstractmethod
|
||||
def list_channels(self, package_name):
|
||||
"""
|
||||
Returns all AppChannel for a package.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def fetch_channel(self, package_name, channel_name, with_releases=True):
|
||||
"""Returns an Channel
|
||||
Raises: ChannelNotFound, PackageNotFound
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def log_action(
|
||||
self,
|
||||
event_name,
|
||||
namespace_name,
|
||||
repo_name=None,
|
||||
analytics_name=None,
|
||||
analytics_sample=1,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Logs an action to the audit log.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_blob_locations(self, digest):
|
||||
"""
|
||||
Returns a list of strings for the locations in which a Blob is present.
|
||||
"""
|
||||
pass
|
@ -1,404 +0,0 @@
|
||||
import logging
|
||||
from base64 import b64encode
|
||||
|
||||
import cnr
|
||||
from cnr.api.impl import registry as cnr_registry
|
||||
from cnr.api.registry import _pull, repo_name
|
||||
from cnr.exception import (
|
||||
ChannelNotFound,
|
||||
CnrException,
|
||||
Forbidden,
|
||||
InvalidParams,
|
||||
InvalidRelease,
|
||||
InvalidUsage,
|
||||
PackageAlreadyExists,
|
||||
PackageNotFound,
|
||||
PackageReleaseNotFound,
|
||||
UnableToLockResource,
|
||||
UnauthorizedAccess,
|
||||
Unsupported,
|
||||
)
|
||||
from flask import jsonify, request
|
||||
|
||||
import features
|
||||
from app import app, model_cache
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth.credentials import validate_credentials
|
||||
from auth.decorators import process_auth
|
||||
from auth.permissions import CreateRepositoryPermission, ModifyRepositoryPermission
|
||||
from data.logs_model import logs_model
|
||||
from data.cache import cache_key
|
||||
from endpoints.appr import appr_bp, require_app_repo_read, require_app_repo_write
|
||||
from endpoints.appr.cnr_backend import Blob, Channel, Package, User
|
||||
from endpoints.appr.decorators import disallow_for_image_repository
|
||||
from endpoints.appr.models_cnr import model
|
||||
from endpoints.decorators import anon_allowed, anon_protect, check_region_blacklisted
|
||||
from util.names import REPOSITORY_NAME_REGEX, REPOSITORY_NAME_EXTENDED_REGEX, TAG_REGEX
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@appr_bp.errorhandler(Unsupported)
|
||||
@appr_bp.errorhandler(PackageAlreadyExists)
|
||||
@appr_bp.errorhandler(InvalidRelease)
|
||||
@appr_bp.errorhandler(Forbidden)
|
||||
@appr_bp.errorhandler(UnableToLockResource)
|
||||
@appr_bp.errorhandler(UnauthorizedAccess)
|
||||
@appr_bp.errorhandler(PackageNotFound)
|
||||
@appr_bp.errorhandler(PackageReleaseNotFound)
|
||||
@appr_bp.errorhandler(CnrException)
|
||||
@appr_bp.errorhandler(InvalidUsage)
|
||||
@appr_bp.errorhandler(InvalidParams)
|
||||
@appr_bp.errorhandler(ChannelNotFound)
|
||||
def render_error(error):
|
||||
response = jsonify({"error": error.to_dict()})
|
||||
response.status_code = error.status_code
|
||||
return response
|
||||
|
||||
|
||||
@appr_bp.route("/version")
|
||||
@anon_allowed
|
||||
def version():
|
||||
return jsonify({"cnr-api": cnr.__version__})
|
||||
|
||||
|
||||
@appr_bp.route("/api/v1/users/login", methods=["POST"])
|
||||
@anon_allowed
|
||||
def login():
|
||||
values = request.get_json(force=True, silent=True) or {}
|
||||
username = values.get("user", {}).get("username")
|
||||
password = values.get("user", {}).get("password")
|
||||
if not username or not password:
|
||||
raise InvalidUsage("Missing username or password")
|
||||
|
||||
result, _ = validate_credentials(username, password)
|
||||
if not result.auth_valid:
|
||||
raise UnauthorizedAccess(result.error_message)
|
||||
|
||||
auth = b64encode(b"%s:%s" % (username.encode("ascii"), password.encode("ascii")))
|
||||
return jsonify({"token": "basic " + auth.decode("ascii")})
|
||||
|
||||
|
||||
# @TODO: Redirect to S3 url
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/blobs/sha256/<string:digest>",
|
||||
methods=["GET"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_read
|
||||
@check_region_blacklisted(namespace_name_kwarg="namespace")
|
||||
@anon_protect
|
||||
def blobs(namespace, package_name, digest):
|
||||
reponame = repo_name(namespace, package_name)
|
||||
data = cnr_registry.pull_blob(reponame, digest, blob_class=Blob)
|
||||
json_format = request.args.get("format", None) == "json"
|
||||
return _pull(data, json_format=json_format)
|
||||
|
||||
|
||||
@appr_bp.route("/api/v1/packages", methods=["GET"], strict_slashes=False)
|
||||
@process_auth
|
||||
@anon_protect
|
||||
def list_packages():
|
||||
namespace = request.args.get("namespace", None)
|
||||
media_type = request.args.get("media_type", None)
|
||||
query = request.args.get("query", None)
|
||||
user = get_authenticated_user()
|
||||
username = None
|
||||
if user:
|
||||
username = user.username
|
||||
result_data = cnr_registry.list_packages(
|
||||
namespace, package_class=Package, search=query, media_type=media_type, username=username
|
||||
)
|
||||
return jsonify(result_data)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/<string:release>/<string:media_type>",
|
||||
methods=["DELETE"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_write
|
||||
@anon_protect
|
||||
def delete_package(namespace, package_name, release, media_type):
|
||||
reponame = repo_name(namespace, package_name)
|
||||
result = cnr_registry.delete_package(reponame, release, media_type, package_class=Package)
|
||||
logs_model.log_action(
|
||||
"delete_tag",
|
||||
namespace,
|
||||
repository_name=package_name,
|
||||
metadata={"release": release, "mediatype": media_type},
|
||||
)
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/<string:release>/<string:media_type>",
|
||||
methods=["GET"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_read
|
||||
@check_region_blacklisted(namespace_name_kwarg="namespace")
|
||||
@anon_protect
|
||||
def show_package(namespace, package_name, release, media_type):
|
||||
def _retrieve_package():
|
||||
reponame = repo_name(namespace, package_name)
|
||||
return cnr_registry.show_package(
|
||||
reponame, release, media_type, channel_class=Channel, package_class=Package
|
||||
)
|
||||
|
||||
namespace_whitelist = app.config.get("APP_REGISTRY_SHOW_PACKAGE_CACHE_WHITELIST", [])
|
||||
if not namespace or namespace not in namespace_whitelist:
|
||||
return jsonify(_retrieve_package())
|
||||
|
||||
show_package_cache_key = cache_key.for_appr_show_package(
|
||||
namespace, package_name, release, media_type, model_cache.cache_config
|
||||
)
|
||||
|
||||
result = model_cache.retrieve(show_package_cache_key, _retrieve_package)
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>",
|
||||
methods=["GET"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_read
|
||||
@anon_protect
|
||||
def show_package_releases(namespace, package_name):
|
||||
reponame = repo_name(namespace, package_name)
|
||||
media_type = request.args.get("media_type", None)
|
||||
result = cnr_registry.show_package_releases(
|
||||
reponame, media_type=media_type, package_class=Package
|
||||
)
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/<string:release>",
|
||||
methods=["GET"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_read
|
||||
@anon_protect
|
||||
def show_package_release_manifests(namespace, package_name, release):
|
||||
reponame = repo_name(namespace, package_name)
|
||||
result = cnr_registry.show_package_manifests(reponame, release, package_class=Package)
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/<string:release>/<string:media_type>/pull",
|
||||
methods=["GET"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_read
|
||||
@check_region_blacklisted(namespace_name_kwarg="namespace")
|
||||
@anon_protect
|
||||
def pull(namespace, package_name, release, media_type):
|
||||
logger.debug("Pull of release %s of app repository %s/%s", release, namespace, package_name)
|
||||
reponame = repo_name(namespace, package_name)
|
||||
data = cnr_registry.pull(reponame, release, media_type, Package, blob_class=Blob)
|
||||
logs_model.log_action(
|
||||
"pull_repo",
|
||||
namespace,
|
||||
repository_name=package_name,
|
||||
metadata={"release": release, "mediatype": media_type},
|
||||
)
|
||||
json_format = request.args.get("format", None) == "json"
|
||||
return _pull(data, json_format)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>",
|
||||
methods=["POST"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@disallow_for_image_repository()
|
||||
@process_auth
|
||||
@anon_protect
|
||||
def push(namespace, package_name):
|
||||
reponame = repo_name(namespace, package_name)
|
||||
|
||||
if features.EXTENDED_REPOSITORY_NAMES:
|
||||
if not REPOSITORY_NAME_EXTENDED_REGEX.match(package_name):
|
||||
logger.debug("Found invalid repository name CNR push: %s", reponame)
|
||||
raise InvalidUsage("invalid repository name: %s" % reponame)
|
||||
else:
|
||||
if not REPOSITORY_NAME_REGEX.match(package_name):
|
||||
logger.debug("Found invalid repository name CNR push: %s", reponame)
|
||||
raise InvalidUsage("invalid repository name: %s" % reponame)
|
||||
|
||||
values = request.get_json(force=True, silent=True) or {}
|
||||
private = values.get("visibility", "private")
|
||||
|
||||
owner = get_authenticated_user()
|
||||
if not Package.exists(reponame):
|
||||
if not CreateRepositoryPermission(namespace).can():
|
||||
raise Forbidden(
|
||||
"Unauthorized access for: %s" % reponame,
|
||||
{"package": reponame, "scopes": ["create"]},
|
||||
)
|
||||
Package.create_repository(reponame, private, owner)
|
||||
logs_model.log_action("create_repo", namespace, repository_name=package_name)
|
||||
|
||||
if not ModifyRepositoryPermission(namespace, package_name).can():
|
||||
raise Forbidden(
|
||||
"Unauthorized access for: %s" % reponame, {"package": reponame, "scopes": ["push"]}
|
||||
)
|
||||
|
||||
if not "release" in values:
|
||||
raise InvalidUsage("Missing release")
|
||||
|
||||
if not "media_type" in values:
|
||||
raise InvalidUsage("Missing media_type")
|
||||
|
||||
if not "blob" in values:
|
||||
raise InvalidUsage("Missing blob")
|
||||
|
||||
release_version = str(values["release"])
|
||||
media_type = values["media_type"]
|
||||
force = request.args.get("force", "false") == "true"
|
||||
|
||||
blob = Blob(reponame, values["blob"])
|
||||
app_release = cnr_registry.push(
|
||||
reponame,
|
||||
release_version,
|
||||
media_type,
|
||||
blob,
|
||||
force,
|
||||
package_class=Package,
|
||||
user=owner,
|
||||
visibility=private,
|
||||
)
|
||||
logs_model.log_action(
|
||||
"push_repo", namespace, repository_name=package_name, metadata={"release": release_version}
|
||||
)
|
||||
return jsonify(app_release)
|
||||
|
||||
|
||||
@appr_bp.route("/api/v1/packages/search", methods=["GET"], strict_slashes=False)
|
||||
@process_auth
|
||||
@anon_protect
|
||||
def search_packages():
|
||||
query = request.args.get("q")
|
||||
user = get_authenticated_user()
|
||||
username = None
|
||||
if user:
|
||||
username = user.username
|
||||
|
||||
search_results = cnr_registry.search(query, Package, username=username)
|
||||
return jsonify(search_results)
|
||||
|
||||
|
||||
# CHANNELS
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/channels",
|
||||
methods=["GET"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_read
|
||||
@anon_protect
|
||||
def list_channels(namespace, package_name):
|
||||
reponame = repo_name(namespace, package_name)
|
||||
return jsonify(cnr_registry.list_channels(reponame, channel_class=Channel))
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/channels/<string:channel_name>",
|
||||
methods=["GET"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_read
|
||||
@anon_protect
|
||||
def show_channel(namespace, package_name, channel_name):
|
||||
reponame = repo_name(namespace, package_name)
|
||||
channel = cnr_registry.show_channel(reponame, channel_name, channel_class=Channel)
|
||||
return jsonify(channel)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/channels/<string:channel_name>/<string:release>",
|
||||
methods=["POST"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_write
|
||||
@anon_protect
|
||||
def add_channel_release(namespace, package_name, channel_name, release):
|
||||
_check_channel_name(channel_name, release)
|
||||
reponame = repo_name(namespace, package_name)
|
||||
result = cnr_registry.add_channel_release(
|
||||
reponame, channel_name, release, channel_class=Channel, package_class=Package
|
||||
)
|
||||
logs_model.log_action(
|
||||
"create_tag",
|
||||
namespace,
|
||||
repository_name=package_name,
|
||||
metadata={"channel": channel_name, "release": release},
|
||||
)
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
def _check_channel_name(channel_name, release=None):
|
||||
if not TAG_REGEX.match(channel_name):
|
||||
logger.debug("Found invalid channel name CNR add channel release: %s", channel_name)
|
||||
raise InvalidUsage(
|
||||
"Found invalid channelname %s" % release, {"name": channel_name, "release": release}
|
||||
)
|
||||
|
||||
if release is not None and not TAG_REGEX.match(release):
|
||||
logger.debug("Found invalid release name CNR add channel release: %s", release)
|
||||
raise InvalidUsage(
|
||||
"Found invalid channel release name %s" % release,
|
||||
{"name": channel_name, "release": release},
|
||||
)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/channels/<string:channel_name>/<string:release>",
|
||||
methods=["DELETE"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_write
|
||||
@anon_protect
|
||||
def delete_channel_release(namespace, package_name, channel_name, release):
|
||||
_check_channel_name(channel_name, release)
|
||||
reponame = repo_name(namespace, package_name)
|
||||
result = cnr_registry.delete_channel_release(
|
||||
reponame, channel_name, release, channel_class=Channel, package_class=Package
|
||||
)
|
||||
logs_model.log_action(
|
||||
"delete_tag",
|
||||
namespace,
|
||||
repository_name=package_name,
|
||||
metadata={"channel": channel_name, "release": release},
|
||||
)
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
@appr_bp.route(
|
||||
"/api/v1/packages/<string:namespace>/<string:package_name>/channels/<string:channel_name>",
|
||||
methods=["DELETE"],
|
||||
strict_slashes=False,
|
||||
)
|
||||
@process_auth
|
||||
@require_app_repo_write
|
||||
@anon_protect
|
||||
def delete_channel(namespace, package_name, channel_name):
|
||||
_check_channel_name(channel_name)
|
||||
reponame = repo_name(namespace, package_name)
|
||||
result = cnr_registry.delete_channel(reponame, channel_name, channel_class=Channel)
|
||||
logs_model.log_action(
|
||||
"delete_tag", namespace, repository_name=package_name, metadata={"channel": channel_name}
|
||||
)
|
||||
return jsonify(result)
|
@ -1,194 +0,0 @@
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
from app import model_cache
|
||||
|
||||
from cnr.tests.conftest import *
|
||||
from cnr.tests.test_apiserver import BaseTestServer
|
||||
from cnr.tests.test_models import CnrTestModels
|
||||
|
||||
import data.appr_model.blob as appr_blob
|
||||
|
||||
from data.database import User
|
||||
from data.model import organization, user
|
||||
from endpoints.appr import registry # Needed to register the endpoint
|
||||
from endpoints.appr.cnr_backend import Channel, Package, QuayDB
|
||||
from endpoints.appr.models_cnr import model as appr_app_model
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
def create_org(namespace, owner):
|
||||
try:
|
||||
User.get(username=namespace)
|
||||
except User.DoesNotExist:
|
||||
organization.create_organization(namespace, "%s@test.com" % str(uuid.uuid1()), owner)
|
||||
|
||||
|
||||
class ChannelTest(Channel):
|
||||
@classmethod
|
||||
def dump_all(cls, package_class=None):
|
||||
result = []
|
||||
for repo in appr_app_model.list_applications(with_channels=True):
|
||||
for chan in repo.channels:
|
||||
result.append({"name": chan.name, "current": chan.current, "package": repo.name})
|
||||
return result
|
||||
|
||||
|
||||
class PackageTest(Package):
|
||||
def _save(self, force, **kwargs):
|
||||
owner = user.get_user("devtable")
|
||||
create_org(self.namespace, owner)
|
||||
super(PackageTest, self)._save(force, user=owner, visibility="public")
|
||||
|
||||
@classmethod
|
||||
def create_repository(cls, package_name, visibility, owner):
|
||||
ns, _ = package_name.split("/")
|
||||
owner = user.get_user("devtable")
|
||||
visibility = "public"
|
||||
create_org(ns, owner)
|
||||
return super(PackageTest, cls).create_repository(package_name, visibility, owner)
|
||||
|
||||
@classmethod
|
||||
def dump_all(cls, blob_cls):
|
||||
result = []
|
||||
for repo in appr_app_model.list_applications(with_channels=True):
|
||||
package_name = repo.name
|
||||
for release in repo.releases:
|
||||
for mtype in cls.manifests(package_name, release):
|
||||
package = appr_app_model.fetch_release(package_name, release, mtype)
|
||||
blob = blob_cls.get(package_name, package.manifest.content.digest)
|
||||
app_data = cls._apptuple_to_dict(package)
|
||||
app_data.pop("digest")
|
||||
app_data["channels"] = [
|
||||
x.name
|
||||
for x in appr_app_model.list_release_channels(
|
||||
package_name, package.release, False
|
||||
)
|
||||
]
|
||||
app_data["blob"] = blob.b64blob
|
||||
result.append(app_data)
|
||||
return result
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def quaydb(monkeypatch, app):
|
||||
monkeypatch.setattr("endpoints.appr.cnr_backend.QuayDB.Package", PackageTest)
|
||||
monkeypatch.setattr("endpoints.appr.cnr_backend.Package", PackageTest)
|
||||
monkeypatch.setattr("endpoints.appr.registry.Package", PackageTest)
|
||||
monkeypatch.setattr("cnr.models.Package", PackageTest)
|
||||
|
||||
monkeypatch.setattr("endpoints.appr.cnr_backend.QuayDB.Channel", ChannelTest)
|
||||
monkeypatch.setattr("endpoints.appr.registry.Channel", ChannelTest)
|
||||
monkeypatch.setattr("cnr.models.Channel", ChannelTest)
|
||||
|
||||
|
||||
class TestServerQuayDB(BaseTestServer):
|
||||
DB_CLASS = QuayDB
|
||||
|
||||
@property
|
||||
def token(self):
|
||||
return "basic ZGV2dGFibGU6cGFzc3dvcmQ="
|
||||
|
||||
def test_search_package_match(self, db_with_data1, client):
|
||||
"""TODO: search cross namespace and package name"""
|
||||
BaseTestServer.test_search_package_match(self, db_with_data1, client)
|
||||
|
||||
def test_list_search_package_match(self, db_with_data1, client):
|
||||
url = self._url_for("api/v1/packages")
|
||||
res = self.Client(client, self.headers()).get(url, params={"query": "rocketchat"})
|
||||
assert res.status_code == 200
|
||||
assert len(self.json(res)) == 1
|
||||
|
||||
# Run again for cache checking.
|
||||
res = self.Client(client, self.headers()).get(url, params={"query": "rocketchat"})
|
||||
assert res.status_code == 200
|
||||
assert len(self.json(res)) == 1
|
||||
|
||||
def test_list_search_package_no_match(self, db_with_data1, client):
|
||||
url = self._url_for("api/v1/packages")
|
||||
res = self.Client(client, self.headers()).get(url, params={"query": "toto"})
|
||||
assert res.status_code == 200
|
||||
assert len(self.json(res)) == 0
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_push_package_already_exists_force(self, db_with_data1, package_b64blob, client):
|
||||
"""
|
||||
No force push implemented.
|
||||
"""
|
||||
BaseTestServer.test_push_package_already_exists_force(
|
||||
self, db_with_data1, package_b64blob, client
|
||||
)
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_delete_channel_release_absent_release(self, db_with_data1, client):
|
||||
BaseTestServer.test_delete_channel_release_absent_release(self, db_with_data1, client)
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_get_absent_blob(self, newdb, client):
|
||||
pass
|
||||
|
||||
|
||||
class TestQuayModels(CnrTestModels):
|
||||
DB_CLASS = QuayDB
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_channel_delete_releases(self, db_with_data1):
|
||||
"""
|
||||
Can't remove a release from the channel, only delete the channel entirely.
|
||||
"""
|
||||
CnrTestModels.test_channel_delete_releases(self, db_with_data1)
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_forbiddeb_db_reset(self, db_class):
|
||||
pass
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_db_restore(self, newdb, dbdata1):
|
||||
# This will fail as long as CNR tests use a mediatype with v1.
|
||||
pass
|
||||
|
||||
def test_save_package_exists_force(self, newdb, package_b64blob):
|
||||
model_cache.empty_for_testing()
|
||||
CnrTestModels.test_save_package_exists_force(self, newdb, package_b64blob)
|
||||
|
||||
def test_save_package_exists(self, newdb, package_b64blob):
|
||||
model_cache.empty_for_testing()
|
||||
CnrTestModels.test_save_package_exists(self, newdb, package_b64blob)
|
||||
|
||||
def test_save_package(self, newdb, package_b64blob):
|
||||
model_cache.empty_for_testing()
|
||||
CnrTestModels.test_save_package(self, newdb, package_b64blob)
|
||||
|
||||
def test_save_package_bad_release(self, newdb):
|
||||
model_cache.empty_for_testing()
|
||||
CnrTestModels.test_save_package_bad_release(self, newdb)
|
||||
|
||||
def test_push_same_blob(self, db_with_data1):
|
||||
p = db_with_data1.Package.get("titi/rocketchat", ">1.2", "kpm")
|
||||
assert p.package == "titi/rocketchat"
|
||||
assert p.release == "2.0.1"
|
||||
assert p.digest == "d3b54b7912fe770a61b59ab612a442eac52a8a5d8d05dbe92bf8f212d68aaa80"
|
||||
blob = db_with_data1.Blob.get("titi/rocketchat", p.digest)
|
||||
bdb = appr_blob.get_blob(p.digest, appr_app_model.models_ref)
|
||||
newblob = db_with_data1.Blob("titi/app2", blob.b64blob)
|
||||
p2 = db_with_data1.Package("titi/app2", "1.0.0", "helm", newblob)
|
||||
p2.save()
|
||||
b2db = appr_blob.get_blob(p2.digest, appr_app_model.models_ref)
|
||||
assert b2db.id == bdb.id
|
||||
|
||||
def test_force_push_different_blob(self, db_with_data1):
|
||||
p = db_with_data1.Package.get("titi/rocketchat", "2.0.1", "kpm")
|
||||
assert p.package == "titi/rocketchat"
|
||||
assert p.release == "2.0.1"
|
||||
assert p.digest == "d3b54b7912fe770a61b59ab612a442eac52a8a5d8d05dbe92bf8f212d68aaa80"
|
||||
blob = db_with_data1.Blob.get(
|
||||
"titi/rocketchat", "72ed15c9a65961ecd034cca098ec18eb99002cd402824aae8a674a8ae41bd0ef"
|
||||
)
|
||||
p2 = db_with_data1.Package("titi/rocketchat", "2.0.1", "kpm", blob)
|
||||
p2.save(force=True)
|
||||
pnew = db_with_data1.Package.get("titi/rocketchat", "2.0.1", "kpm")
|
||||
assert pnew.package == "titi/rocketchat"
|
||||
assert pnew.release == "2.0.1"
|
||||
assert pnew.digest == "72ed15c9a65961ecd034cca098ec18eb99002cd402824aae8a674a8ae41bd0ef"
|
@ -1,182 +0,0 @@
|
||||
import base64
|
||||
import pytest
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from data import model
|
||||
from endpoints.appr.registry import appr_bp, blobs
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
BLOB_ARGS = {"digest": "abcd1235"}
|
||||
PACKAGE_ARGS = {"release": "r", "media_type": "foo"}
|
||||
RELEASE_ARGS = {"release": "r"}
|
||||
CHANNEL_ARGS = {"channel_name": "c"}
|
||||
CHANNEL_RELEASE_ARGS = {"channel_name": "c", "release": "r"}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"resource,method,params,owned_by,is_public,identity,expected",
|
||||
[
|
||||
("appr.blobs", "GET", BLOB_ARGS, "devtable", False, "public", 403),
|
||||
("appr.blobs", "GET", BLOB_ARGS, "devtable", False, "devtable", 404),
|
||||
("appr.blobs", "GET", BLOB_ARGS, "devtable", True, "public", 404),
|
||||
("appr.blobs", "GET", BLOB_ARGS, "devtable", True, "devtable", 404),
|
||||
("appr.delete_package", "DELETE", PACKAGE_ARGS, "devtable", False, "public", 403),
|
||||
("appr.delete_package", "DELETE", PACKAGE_ARGS, "devtable", False, "devtable", 404),
|
||||
("appr.delete_package", "DELETE", PACKAGE_ARGS, "devtable", True, "public", 403),
|
||||
("appr.delete_package", "DELETE", PACKAGE_ARGS, "devtable", True, "devtable", 404),
|
||||
("appr.show_package", "GET", PACKAGE_ARGS, "devtable", False, "public", 403),
|
||||
("appr.show_package", "GET", PACKAGE_ARGS, "devtable", False, "devtable", 404),
|
||||
("appr.show_package", "GET", PACKAGE_ARGS, "devtable", True, "public", 404),
|
||||
("appr.show_package", "GET", PACKAGE_ARGS, "devtable", True, "devtable", 404),
|
||||
("appr.show_package_releases", "GET", {}, "devtable", False, "public", 403),
|
||||
("appr.show_package_releases", "GET", {}, "devtable", False, "devtable", 200),
|
||||
("appr.show_package_releases", "GET", {}, "devtable", True, "public", 200),
|
||||
("appr.show_package_releases", "GET", {}, "devtable", True, "devtable", 200),
|
||||
(
|
||||
"appr.show_package_release_manifests",
|
||||
"GET",
|
||||
RELEASE_ARGS,
|
||||
"devtable",
|
||||
False,
|
||||
"public",
|
||||
403,
|
||||
),
|
||||
(
|
||||
"appr.show_package_release_manifests",
|
||||
"GET",
|
||||
RELEASE_ARGS,
|
||||
"devtable",
|
||||
False,
|
||||
"devtable",
|
||||
200,
|
||||
),
|
||||
(
|
||||
"appr.show_package_release_manifests",
|
||||
"GET",
|
||||
RELEASE_ARGS,
|
||||
"devtable",
|
||||
True,
|
||||
"public",
|
||||
200,
|
||||
),
|
||||
(
|
||||
"appr.show_package_release_manifests",
|
||||
"GET",
|
||||
RELEASE_ARGS,
|
||||
"devtable",
|
||||
True,
|
||||
"devtable",
|
||||
200,
|
||||
),
|
||||
("appr.pull", "GET", PACKAGE_ARGS, "devtable", False, "public", 403),
|
||||
("appr.pull", "GET", PACKAGE_ARGS, "devtable", False, "devtable", 404),
|
||||
("appr.pull", "GET", PACKAGE_ARGS, "devtable", True, "public", 404),
|
||||
("appr.pull", "GET", PACKAGE_ARGS, "devtable", True, "devtable", 404),
|
||||
("appr.push", "POST", {}, "devtable", False, "public", 403),
|
||||
("appr.push", "POST", {}, "devtable", False, "devtable", 400),
|
||||
("appr.push", "POST", {}, "devtable", True, "public", 403),
|
||||
("appr.push", "POST", {}, "devtable", True, "devtable", 400),
|
||||
("appr.list_channels", "GET", {}, "devtable", False, "public", 403),
|
||||
("appr.list_channels", "GET", {}, "devtable", False, "devtable", 200),
|
||||
("appr.list_channels", "GET", {}, "devtable", True, "public", 200),
|
||||
("appr.list_channels", "GET", {}, "devtable", True, "devtable", 200),
|
||||
("appr.show_channel", "GET", CHANNEL_ARGS, "devtable", False, "public", 403),
|
||||
("appr.show_channel", "GET", CHANNEL_ARGS, "devtable", False, "devtable", 404),
|
||||
("appr.show_channel", "GET", CHANNEL_ARGS, "devtable", True, "public", 404),
|
||||
("appr.show_channel", "GET", CHANNEL_ARGS, "devtable", True, "devtable", 404),
|
||||
("appr.delete_channel", "DELETE", CHANNEL_ARGS, "devtable", False, "public", 403),
|
||||
("appr.delete_channel", "DELETE", CHANNEL_ARGS, "devtable", False, "devtable", 404),
|
||||
("appr.delete_channel", "DELETE", CHANNEL_ARGS, "devtable", True, "public", 403),
|
||||
("appr.delete_channel", "DELETE", CHANNEL_ARGS, "devtable", True, "devtable", 404),
|
||||
(
|
||||
"appr.add_channel_release",
|
||||
"POST",
|
||||
CHANNEL_RELEASE_ARGS,
|
||||
"devtable",
|
||||
False,
|
||||
"public",
|
||||
403,
|
||||
),
|
||||
(
|
||||
"appr.add_channel_release",
|
||||
"POST",
|
||||
CHANNEL_RELEASE_ARGS,
|
||||
"devtable",
|
||||
False,
|
||||
"devtable",
|
||||
404,
|
||||
),
|
||||
("appr.add_channel_release", "POST", CHANNEL_RELEASE_ARGS, "devtable", True, "public", 403),
|
||||
(
|
||||
"appr.add_channel_release",
|
||||
"POST",
|
||||
CHANNEL_RELEASE_ARGS,
|
||||
"devtable",
|
||||
True,
|
||||
"devtable",
|
||||
404,
|
||||
),
|
||||
(
|
||||
"appr.delete_channel_release",
|
||||
"DELETE",
|
||||
CHANNEL_RELEASE_ARGS,
|
||||
"devtable",
|
||||
False,
|
||||
"public",
|
||||
403,
|
||||
),
|
||||
(
|
||||
"appr.delete_channel_release",
|
||||
"DELETE",
|
||||
CHANNEL_RELEASE_ARGS,
|
||||
"devtable",
|
||||
False,
|
||||
"devtable",
|
||||
404,
|
||||
),
|
||||
(
|
||||
"appr.delete_channel_release",
|
||||
"DELETE",
|
||||
CHANNEL_RELEASE_ARGS,
|
||||
"devtable",
|
||||
True,
|
||||
"public",
|
||||
403,
|
||||
),
|
||||
(
|
||||
"appr.delete_channel_release",
|
||||
"DELETE",
|
||||
CHANNEL_RELEASE_ARGS,
|
||||
"devtable",
|
||||
True,
|
||||
"devtable",
|
||||
404,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_api_security(
|
||||
resource, method, params, owned_by, is_public, identity, expected, app, client
|
||||
):
|
||||
app.register_blueprint(appr_bp, url_prefix="/cnr")
|
||||
|
||||
with client_with_identity(identity, client) as cl:
|
||||
owner = model.user.get_user(owned_by)
|
||||
visibility = "public" if is_public else "private"
|
||||
model.repository.create_repository(
|
||||
owned_by, "someapprepo", owner, visibility=visibility, repo_kind="application"
|
||||
)
|
||||
|
||||
params["namespace"] = owned_by
|
||||
params["package_name"] = "someapprepo"
|
||||
params["_csrf_token"] = "123csrfforme"
|
||||
|
||||
url = url_for(resource, **params)
|
||||
headers = {}
|
||||
if identity is not None:
|
||||
auth = base64.b64encode(("%s:password" % identity).encode("ascii"))
|
||||
headers["authorization"] = "basic " + auth.decode("ascii")
|
||||
|
||||
rv = cl.open(url, headers=headers, method=method)
|
||||
assert rv.status_code == expected
|
@ -1,21 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from werkzeug.exceptions import HTTPException
|
||||
|
||||
from data import model
|
||||
from endpoints.appr import require_app_repo_read
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
def test_require_app_repo_read(app):
|
||||
called = [False]
|
||||
|
||||
# Ensure that trying to read an *image* repository fails.
|
||||
@require_app_repo_read
|
||||
def empty(**kwargs):
|
||||
called[0] = True
|
||||
|
||||
with pytest.raises(HTTPException):
|
||||
empty(namespace="devtable", package_name="simple")
|
||||
assert not called[0]
|
@ -1,19 +0,0 @@
|
||||
import pytest
|
||||
from endpoints.appr.models_cnr import _strip_sha256_header
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"digest,expected",
|
||||
[
|
||||
(
|
||||
"sha256:251b6897608fb18b8a91ac9abac686e2e95245d5a041f2d1e78fe7a815e6480a",
|
||||
"251b6897608fb18b8a91ac9abac686e2e95245d5a041f2d1e78fe7a815e6480a",
|
||||
),
|
||||
(
|
||||
"251b6897608fb18b8a91ac9abac686e2e95245d5a041f2d1e78fe7a815e6480a",
|
||||
"251b6897608fb18b8a91ac9abac686e2e95245d5a041f2d1e78fe7a815e6480a",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_stip_sha256(digest, expected):
|
||||
assert _strip_sha256_header(digest) == expected
|
@ -1,91 +0,0 @@
|
||||
import base64
|
||||
import json
|
||||
|
||||
from mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from flask import url_for
|
||||
|
||||
from data import model
|
||||
from endpoints.appr.registry import appr_bp
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"login_data, expected_code",
|
||||
[
|
||||
({"username": "devtable", "password": "password"}, 200),
|
||||
({"username": "devtable", "password": "badpass"}, 401),
|
||||
({"username": "devtable+dtrobot", "password": "badpass"}, 401),
|
||||
({"username": "devtable+dtrobot2", "password": "badpass"}, 401),
|
||||
],
|
||||
)
|
||||
def test_login(login_data, expected_code, app, client):
|
||||
if "+" in login_data["username"] and login_data["password"] is None:
|
||||
username, robotname = login_data["username"].split("+")
|
||||
_, login_data["password"] = model.user.create_robot(
|
||||
robotname, model.user.get_user(username)
|
||||
)
|
||||
|
||||
url = url_for("appr.login")
|
||||
headers = {"Content-Type": "application/json"}
|
||||
data = {"user": login_data}
|
||||
|
||||
rv = client.open(url, method="POST", data=json.dumps(data), headers=headers)
|
||||
assert rv.status_code == expected_code
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"release_name",
|
||||
[
|
||||
"1.0",
|
||||
"1",
|
||||
1,
|
||||
],
|
||||
)
|
||||
def test_invalid_release_name(release_name, app, client):
|
||||
params = {
|
||||
"namespace": "devtable",
|
||||
"package_name": "someapprepo",
|
||||
}
|
||||
|
||||
url = url_for("appr.push", **params)
|
||||
auth = base64.b64encode(b"devtable:password").decode("ascii")
|
||||
headers = {"Content-Type": "application/json", "Authorization": "Basic " + auth}
|
||||
data = {
|
||||
"release": release_name,
|
||||
"media_type": "application/vnd.cnr.manifest.v1+json",
|
||||
"blob": "H4sIAFQwWVoAA+3PMQrCQBAF0Bxlb+Bk143nETGIIEoSC29vMMFOu3TvNb/5DH/Ot8f02jWbiohDremT3ZKR90uuUlty7nKJNmqKtkQuTarbzlo8x+k4zFOu4+lyH4afvbnW93/urH98EwAAAAAAAAAAADb0BsdwExIAKAAA",
|
||||
}
|
||||
|
||||
rv = client.open(url, method="POST", data=json.dumps(data), headers=headers)
|
||||
assert rv.status_code == 422
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"readonly, expected_status",
|
||||
[
|
||||
(True, 405),
|
||||
(False, 422),
|
||||
],
|
||||
)
|
||||
def test_readonly(readonly, expected_status, app, client):
|
||||
params = {
|
||||
"namespace": "devtable",
|
||||
"package_name": "someapprepo",
|
||||
}
|
||||
|
||||
url = url_for("appr.push", **params)
|
||||
auth = base64.b64encode(b"devtable:password").decode("ascii")
|
||||
headers = {"Content-Type": "application/json", "Authorization": "Basic " + auth}
|
||||
data = {
|
||||
"release": "1.0",
|
||||
"media_type": "application/vnd.cnr.manifest.v0+json",
|
||||
"blob": "H4sIAFQwWVoAA+3PMQrCQBAF0Bxlb+Bk143nETGIIEoSC29vMMFOu3TvNb/5DH/Ot8f02jWbiohDremT3ZKR90uuUlty7nKJNmqKtkQuTarbzlo8x+k4zFOu4+lyH4afvbnW93/urH98EwAAAAAAAAAAADb0BsdwExIAKAAA",
|
||||
}
|
||||
|
||||
with patch("endpoints.appr.models_cnr.model.is_readonly", readonly):
|
||||
rv = client.open(url, method="POST", data=json.dumps(data), headers=headers)
|
||||
assert rv.status_code == expected_status
|
@ -253,14 +253,6 @@ def buildtrigger(path, trigger):
|
||||
return index("")
|
||||
|
||||
|
||||
@route_show_if(features.APP_REGISTRY)
|
||||
@web.route("/application/", defaults={"path": ""})
|
||||
@web.route("/application/<path:path>", methods=["GET"])
|
||||
@no_cache
|
||||
def application(path):
|
||||
return index("")
|
||||
|
||||
|
||||
@web.route("/security/")
|
||||
@no_cache
|
||||
def security():
|
||||
|
@ -492,15 +492,6 @@ def initialize_database():
|
||||
MediaType.create(name="text/plain")
|
||||
MediaType.create(name="application/json")
|
||||
MediaType.create(name="text/markdown")
|
||||
MediaType.create(name="application/vnd.cnr.blob.v0.tar+gzip")
|
||||
MediaType.create(name="application/vnd.cnr.package-manifest.helm.v0.json")
|
||||
MediaType.create(name="application/vnd.cnr.package-manifest.kpm.v0.json")
|
||||
MediaType.create(name="application/vnd.cnr.package-manifest.docker-compose.v0.json")
|
||||
MediaType.create(name="application/vnd.cnr.package.kpm.v0.tar+gzip")
|
||||
MediaType.create(name="application/vnd.cnr.package.helm.v0.tar+gzip")
|
||||
MediaType.create(name="application/vnd.cnr.package.docker-compose.v0.tar+gzip")
|
||||
MediaType.create(name="application/vnd.cnr.manifests.v0.json")
|
||||
MediaType.create(name="application/vnd.cnr.manifest.list.v0.json")
|
||||
|
||||
for media_type in DOCKER_SCHEMA1_CONTENT_TYPES:
|
||||
MediaType.create(name=media_type)
|
||||
|
@ -3,13 +3,9 @@ import features
|
||||
|
||||
from app import app as application
|
||||
|
||||
from endpoints.appr import appr_bp, registry # registry needed to ensure routes registered
|
||||
from endpoints.v1 import v1_bp
|
||||
from endpoints.v2 import v2_bp
|
||||
|
||||
|
||||
application.register_blueprint(v1_bp, url_prefix="/v1")
|
||||
application.register_blueprint(v2_bp, url_prefix="/v2")
|
||||
|
||||
if features.APP_REGISTRY:
|
||||
application.register_blueprint(appr_bp, url_prefix="/cnr")
|
||||
|
@ -20,7 +20,6 @@ cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
charset-normalizer==2.0.12
|
||||
click==7.1.2
|
||||
cnr-server @ git+https://github.com/quay/appr.git@58c88e4952e95935c0dd72d4a24b0c44f2249f5b
|
||||
cryptography==3.3.2
|
||||
DateTime==4.3
|
||||
debtcollector==1.22.0
|
||||
|
@ -21,7 +21,6 @@ from data.database import close_db_filter, db, configure
|
||||
from data.model.user import LoginWrappedDBUser
|
||||
from data.userfiles import Userfiles
|
||||
from endpoints.api import api_bp
|
||||
from endpoints.appr import appr_bp
|
||||
from endpoints.web import web
|
||||
from endpoints.v1 import v1_bp
|
||||
from endpoints.v2 import v2_bp
|
||||
@ -328,7 +327,6 @@ def app(appconfig, initialized_db):
|
||||
app.url_map.converters["v1createrepopath"] = V1CreateRepositoryPathConverter
|
||||
|
||||
app.register_blueprint(api_bp, url_prefix="/api")
|
||||
app.register_blueprint(appr_bp, url_prefix="/cnr")
|
||||
app.register_blueprint(web, url_prefix="/")
|
||||
app.register_blueprint(v1_bp, url_prefix="/v1")
|
||||
app.register_blueprint(v2_bp, url_prefix="/v2")
|
||||
|
@ -32,8 +32,7 @@ from app import (
|
||||
)
|
||||
from buildtrigger.basehandler import BuildTriggerHandler
|
||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from data import database, model, appr_model
|
||||
from data.appr_model.models import NEW_MODELS
|
||||
from data import database, model
|
||||
from data.database import RepositoryActionCount, Repository as RepositoryTable
|
||||
from data.logs_model import logs_model
|
||||
from data.registry_model import registry_model
|
||||
|
@ -92,7 +92,6 @@ class TestConfig(DefaultConfig):
|
||||
RECAPTCHA_SECRET_KEY = "somesecretkey"
|
||||
RECAPTCHA_WHITELISTED_USERS: List[str] = []
|
||||
|
||||
FEATURE_APP_REGISTRY = True
|
||||
FEATURE_TEAM_SYNCING = True
|
||||
FEATURE_CHANGE_TAG_EXPIRATION = True
|
||||
|
||||
|
@ -40,7 +40,6 @@ def add_enterprise_config_defaults(config_obj, current_secret_key):
|
||||
# Default features that are off.
|
||||
config_obj["FEATURE_MAILING"] = config_obj.get("FEATURE_MAILING", False)
|
||||
config_obj["FEATURE_BUILD_SUPPORT"] = config_obj.get("FEATURE_BUILD_SUPPORT", False)
|
||||
config_obj["FEATURE_APP_REGISTRY"] = config_obj.get("FEATURE_APP_REGISTRY", False)
|
||||
config_obj["FEATURE_REPO_MIRROR"] = config_obj.get("FEATURE_REPO_MIRROR", False)
|
||||
|
||||
# Default repo mirror config.
|
||||
|
@ -1,6 +1,4 @@
|
||||
from data import model
|
||||
from data.appr_model import blob
|
||||
from data.appr_model.models import NEW_MODELS
|
||||
|
||||
|
||||
def sync_database_with_config(config):
|
||||
@ -11,4 +9,3 @@ def sync_database_with_config(config):
|
||||
location_names = list(config.get("DISTRIBUTED_STORAGE_CONFIG", {}).keys())
|
||||
if location_names:
|
||||
model.image.ensure_image_locations(*location_names)
|
||||
blob.ensure_blob_locations(NEW_MODELS, *location_names)
|
||||
|
@ -11,15 +11,12 @@ INTERNAL_ONLY_PROPERTIES = {
|
||||
"SESSION_COOKIE_SAMESITE",
|
||||
"DATABASE_SECRET_KEY",
|
||||
"V22_NAMESPACE_BLACKLIST",
|
||||
"MAXIMUM_CNR_LAYER_SIZE",
|
||||
"OCI_NAMESPACE_WHITELIST",
|
||||
"FEATURE_GENERAL_OCI_SUPPORT",
|
||||
"FEATURE_HELM_OCI_SUPPORT",
|
||||
"FEATURE_NAMESPACE_GARBAGE_COLLECTION",
|
||||
"FEATURE_REPOSITORY_GARBAGE_COLLECTION",
|
||||
"FEATURE_REPOSITORY_ACTION_COUNTER",
|
||||
"APP_REGISTRY_PACKAGE_LIST_CACHE_WHITELIST",
|
||||
"APP_REGISTRY_SHOW_PACKAGE_CACHE_WHITELIST",
|
||||
"FEATURE_MANIFEST_SIZE_BACKFILL",
|
||||
"TESTING",
|
||||
"SEND_FILE_MAX_AGE_DEFAULT",
|
||||
@ -96,7 +93,6 @@ INTERNAL_ONLY_PROPERTIES = {
|
||||
"V1_ONLY_DOMAIN",
|
||||
"LOGS_MODEL",
|
||||
"LOGS_MODEL_CONFIG",
|
||||
"APP_REGISTRY_RESULTS_LIMIT",
|
||||
"V3_UPGRADE_MODE", # Deprecated old flag
|
||||
"ACCOUNT_RECOVERY_MODE",
|
||||
"BLOBUPLOAD_DELETION_DATE_THRESHOLD",
|
||||
@ -944,18 +940,6 @@ CONFIG_SCHEMA = {
|
||||
"description": "Whether to collect and support user metadata. Defaults to False",
|
||||
"x-example": False,
|
||||
},
|
||||
# Feature Flag: Support App Registry.
|
||||
"FEATURE_APP_REGISTRY": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to enable support for App repositories. Defaults to False",
|
||||
"x-example": False,
|
||||
},
|
||||
# Feature Flag: Read only app registry.
|
||||
"FEATURE_READONLY_APP_REGISTRY": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to App repositories are read-only. Defaults to False",
|
||||
"x-example": True,
|
||||
},
|
||||
# Feature Flag: Public Reposiotires in _catalog Endpoint.
|
||||
"FEATURE_PUBLIC_CATALOG": {
|
||||
"type": "boolean",
|
||||
|
Reference in New Issue
Block a user