Merge pull request #2747 from jzelinskie/v1-interfaces
endpoints.v1: new fs layout for data interface
This commit is contained in:
commit
ee30191d88
6 changed files with 280 additions and 293 deletions
|
@ -4,7 +4,6 @@ from app import metric_queue, license_validator
|
|||
from endpoints.decorators import anon_protect, anon_allowed
|
||||
from util.metrics.metricqueue import time_blueprint
|
||||
|
||||
|
||||
v1_bp = Blueprint('v1', __name__)
|
||||
license_validator.enforce_license_before_request(v1_bp)
|
||||
time_blueprint(v1_bp, metric_queue)
|
||||
|
@ -28,6 +27,7 @@ def ping():
|
|||
return response
|
||||
|
||||
|
||||
from endpoints.v1 import index
|
||||
from endpoints.v1 import registry
|
||||
from endpoints.v1 import tag
|
||||
from endpoints.v1 import (
|
||||
index,
|
||||
registry,
|
||||
tag,)
|
||||
|
|
|
@ -9,20 +9,19 @@ from flask import request, make_response, jsonify, session
|
|||
from app import authentication, userevents, metric_queue
|
||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||
from auth.decorators import process_auth
|
||||
from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
||||
ReadRepositoryPermission, CreateRepositoryPermission,
|
||||
repository_read_grant, repository_write_grant)
|
||||
from auth.permissions import (
|
||||
ModifyRepositoryPermission, UserAdminPermission, ReadRepositoryPermission,
|
||||
CreateRepositoryPermission, repository_read_grant, repository_write_grant)
|
||||
from auth.signedgrant import generate_signed_token
|
||||
from data.interfaces.v1 import pre_oci_model as model
|
||||
from endpoints.common import parse_repository_name
|
||||
from endpoints.decorators import anon_protect, anon_allowed
|
||||
from endpoints.notificationhelper import spawn_notification
|
||||
from endpoints.v1 import v1_bp
|
||||
from endpoints.v1.models_pre_oci import pre_oci_model as model
|
||||
from util.audit import track_and_log
|
||||
from util.http import abort
|
||||
from util.names import REPOSITORY_NAME_REGEX
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -66,7 +65,9 @@ def generate_headers(scope=GrantType.READ_REPOSITORY, add_grant_for_status=None)
|
|||
response.headers['X-Docker-Token'] = signature
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator_method
|
||||
|
||||
|
||||
|
@ -122,18 +123,15 @@ def get_user():
|
|||
if get_validated_oauth_token():
|
||||
return jsonify({
|
||||
'username': '$oauthtoken',
|
||||
'email': None,
|
||||
})
|
||||
'email': None,})
|
||||
elif get_authenticated_user():
|
||||
return jsonify({
|
||||
'username': get_authenticated_user().username,
|
||||
'email': get_authenticated_user().email,
|
||||
})
|
||||
'email': get_authenticated_user().email,})
|
||||
elif get_validated_token():
|
||||
return jsonify({
|
||||
'username': '$token',
|
||||
'email': None,
|
||||
})
|
||||
'email': None,})
|
||||
abort(404)
|
||||
|
||||
|
||||
|
@ -151,8 +149,7 @@ def update_user(username):
|
|||
|
||||
return jsonify({
|
||||
'username': get_authenticated_user().username,
|
||||
'email': get_authenticated_user().email
|
||||
})
|
||||
'email': get_authenticated_user().email})
|
||||
abort(403)
|
||||
|
||||
|
||||
|
@ -181,8 +178,7 @@ def create_repository(namespace_name, repo_name):
|
|||
if not modify_perm.can():
|
||||
abort(403,
|
||||
message='You do not have permission to modify repository %(namespace)s/%(repository)s',
|
||||
issue='no-repo-write-permission',
|
||||
namespace=namespace_name, repository=repo_name)
|
||||
issue='no-repo-write-permission', namespace=namespace_name, repository=repo_name)
|
||||
elif repo.kind != 'image':
|
||||
msg = 'This repository is for managing %s resources and not container images.' % repo.kind
|
||||
abort(405, message=msg, namespace=namespace_name)
|
||||
|
@ -205,8 +201,7 @@ def create_repository(namespace_name, repo_name):
|
|||
user_event_data = {
|
||||
'action': 'push_start',
|
||||
'repository': repo_name,
|
||||
'namespace': namespace_name,
|
||||
}
|
||||
'namespace': namespace_name,}
|
||||
|
||||
event = userevents.get_event(get_authenticated_user().username)
|
||||
event.publish_event_data('docker-cli', user_event_data)
|
||||
|
@ -237,8 +232,7 @@ def update_images(namespace_name, repo_name):
|
|||
|
||||
updated_tags = session.get('pushed_tags', {})
|
||||
event_data = {
|
||||
'updated_tags': updated_tags,
|
||||
}
|
||||
'updated_tags': updated_tags,}
|
||||
|
||||
track_and_log('push_repo', repo)
|
||||
spawn_notification(repo, 'repo_push', event_data)
|
||||
|
@ -340,8 +334,7 @@ def _conduct_repo_search(username, query, limit=25, page=1):
|
|||
'name': repo.namespace_name + '/' + repo.name,
|
||||
'description': repo.description,
|
||||
'is_public': repo.is_public,
|
||||
'href': '/repository/' + repo.namespace_name + '/' + repo.name
|
||||
})
|
||||
'href': '/repository/' + repo.namespace_name + '/' + repo.name})
|
||||
|
||||
# Defined: https://docs.docker.com/v1.6/reference/api/registry_api/
|
||||
return {
|
||||
|
@ -350,5 +343,4 @@ def _conduct_repo_search(username, query, limit=25, page=1):
|
|||
'num_pages': page + 1 if len(matching_repos) > limit else page,
|
||||
'page': page,
|
||||
'page_size': limit,
|
||||
'results': results,
|
||||
}
|
||||
'results': results,}
|
||||
|
|
221
endpoints/v1/models_interface.py
Normal file
221
endpoints/v1/models_interface.py
Normal file
|
@ -0,0 +1,221 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
class Repository(
|
||||
namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', 'is_public',
|
||||
'kind'])):
|
||||
"""
|
||||
Repository represents a namespaced collection of tags.
|
||||
:type id: int
|
||||
:type name: string
|
||||
:type namespace_name: string
|
||||
:type description: string
|
||||
:type is_public: bool
|
||||
:type kind: string
|
||||
"""
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class DockerRegistryV1DataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by a Docker Registry v1.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def placement_locations_and_path_docker_v1(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns all the placements for the image with the given V1 Docker ID, found under the given
|
||||
repository or None if no image was found.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def docker_v1_metadata(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns various pieces of metadata associated with an image with the given V1 Docker ID,
|
||||
including the checksum and its V1 JSON metadata.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_docker_v1_metadata(self, namespace_name, repo_name, image_id, created_date_str,
|
||||
comment, command, compat_json, parent_image_id=None):
|
||||
"""
|
||||
Updates various pieces of V1 metadata associated with a particular image.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def storage_exists(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns whether storage already exists for the image with the V1 Docker ID under the given
|
||||
repository.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def store_docker_v1_checksums(self, namespace_name, repo_name, image_id, checksum,
|
||||
content_checksum):
|
||||
"""
|
||||
Stores the various V1 checksums for the image with the V1 Docker ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def is_image_uploading(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns whether the image with the V1 Docker ID is currently marked as uploading.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_image_uploading(self, namespace_name, repo_name, image_id, is_uploading):
|
||||
"""
|
||||
Marks the image with the V1 Docker ID with the given uploading status.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_image_sizes(self, namespace_name, repo_name, image_id, size, uncompressed_size):
|
||||
"""
|
||||
Updates the sizing information for the image with the given V1 Docker ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_image_size(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns the wire size of the image with the given Docker V1 ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_bittorrent_pieces(self, namespace_name, repo_name, image_id, pieces_bytes):
|
||||
"""
|
||||
Saves the BitTorrent piece hashes for the image with the given Docker V1 ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def image_ancestry(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns a list containing the full ancestry of Docker V1 IDs, in order, for the image with the
|
||||
given Docker V1 ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def repository_exists(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns whether the repository with the given name and namespace exists.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_or_link_image(self, username, namespace_name, repo_name, image_id, storage_location):
|
||||
"""
|
||||
Adds the given image to the given repository, by either linking to an existing image visible to
|
||||
the user with the given username, or creating a new one if no existing image matches.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_temp_hidden_tag(self, namespace_name, repo_name, image_id, expiration):
|
||||
"""
|
||||
Creates a hidden tag under the matching namespace pointing to the image with the given V1 Docker
|
||||
ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_tags(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns all the tags defined in the repository with the given namespace and name.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_or_update_tag(self, namespace_name, repo_name, image_id, tag_name):
|
||||
"""
|
||||
Creates or updates a tag under the matching repository to point to the image with the given
|
||||
Docker V1 ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_image_id_by_tag(self, namespace_name, repo_name, tag_name):
|
||||
"""
|
||||
Returns the Docker V1 image ID for the HEAD image for the tag with the given name under the
|
||||
matching repository, or None if none.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete_tag(self, namespace_name, repo_name, tag_name):
|
||||
"""
|
||||
Deletes the given tag from the given repository.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def load_token(self, token):
|
||||
"""
|
||||
Loads the data associated with the given (deprecated) access token, and, if
|
||||
found returns True.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def verify_robot(self, username, token):
|
||||
"""
|
||||
Returns True if the given robot username and token match an existing robot
|
||||
account.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def change_user_password(self, user, new_password):
|
||||
"""
|
||||
Changes the password associated with the given user.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_repository(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns the repository with the given name under the given namespace or None
|
||||
if none.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_repository(self, namespace_name, repo_name, user=None):
|
||||
"""
|
||||
Creates a new repository under the given namespace with the given name, for
|
||||
the given user.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def repository_is_public(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns whether the repository with the given name under the given namespace
|
||||
is public. If no matching repository was found, returns False.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def validate_oauth_token(self, token):
|
||||
""" Returns whether the given OAuth token validates. """
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0,
|
||||
limit=25):
|
||||
"""
|
||||
Returns a sorted list of repositories matching the given search term.
|
||||
"""
|
||||
pass
|
|
@ -1,234 +1,15 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
from app import app, storage as store
|
||||
from data import model
|
||||
from data.model import db_transaction
|
||||
from endpoints.v1.models_interface import DockerRegistryV1DataInterface, Repository
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
|
||||
class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description',
|
||||
'is_public', 'kind'])):
|
||||
"""
|
||||
Repository represents a namespaced collection of tags.
|
||||
:type id: int
|
||||
:type name: string
|
||||
:type namespace_name: string
|
||||
:type description: string
|
||||
:type is_public: bool
|
||||
:type kind: string
|
||||
"""
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class DockerRegistryV1DataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by a Docker Registry v1.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def placement_locations_and_path_docker_v1(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns all the placements for the image with the given V1 Docker ID, found under the given
|
||||
repository or None if no image was found.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def docker_v1_metadata(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns various pieces of metadata associated with an image with the given V1 Docker ID,
|
||||
including the checksum and its V1 JSON metadata.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_docker_v1_metadata(self, namespace_name, repo_name, image_id, created_date_str,
|
||||
comment, command, compat_json, parent_image_id=None):
|
||||
"""
|
||||
Updates various pieces of V1 metadata associated with a particular image.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def storage_exists(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns whether storage already exists for the image with the V1 Docker ID under the given
|
||||
repository.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def store_docker_v1_checksums(self, namespace_name, repo_name, image_id, checksum,
|
||||
content_checksum):
|
||||
"""
|
||||
Stores the various V1 checksums for the image with the V1 Docker ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def is_image_uploading(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns whether the image with the V1 Docker ID is currently marked as uploading.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_image_uploading(self, namespace_name, repo_name, image_id, is_uploading):
|
||||
"""
|
||||
Marks the image with the V1 Docker ID with the given uploading status.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_image_sizes(self, namespace_name, repo_name, image_id, size, uncompressed_size):
|
||||
"""
|
||||
Updates the sizing information for the image with the given V1 Docker ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_image_size(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns the wire size of the image with the given Docker V1 ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_bittorrent_pieces(self, namespace_name, repo_name, image_id, pieces_bytes):
|
||||
"""
|
||||
Saves the BitTorrent piece hashes for the image with the given Docker V1 ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def image_ancestry(self, namespace_name, repo_name, image_id):
|
||||
"""
|
||||
Returns a list containing the full ancestry of Docker V1 IDs, in order, for the image with the
|
||||
given Docker V1 ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def repository_exists(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns whether the repository with the given name and namespace exists.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_or_link_image(self, username, namespace_name, repo_name, image_id, storage_location):
|
||||
"""
|
||||
Adds the given image to the given repository, by either linking to an existing image visible to
|
||||
the user with the given username, or creating a new one if no existing image matches.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_temp_hidden_tag(self, namespace_name, repo_name, image_id, expiration):
|
||||
"""
|
||||
Creates a hidden tag under the matching namespace pointing to the image with the given V1 Docker
|
||||
ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_tags(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns all the tags defined in the repository with the given namespace and name.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_or_update_tag(self, namespace_name, repo_name, image_id, tag_name):
|
||||
"""
|
||||
Creates or updates a tag under the matching repository to point to the image with the given
|
||||
Docker V1 ID.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_image_id_by_tag(self, namespace_name, repo_name, tag_name):
|
||||
"""
|
||||
Returns the Docker V1 image ID for the HEAD image for the tag with the given name under the
|
||||
matching repository, or None if none.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete_tag(self, namespace_name, repo_name, tag_name):
|
||||
"""
|
||||
Deletes the given tag from the given repository.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def load_token(self, token):
|
||||
"""
|
||||
Loads the data associated with the given (deprecated) access token, and, if
|
||||
found returns True.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def verify_robot(self, username, token):
|
||||
"""
|
||||
Returns True if the given robot username and token match an existing robot
|
||||
account.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def change_user_password(self, user, new_password):
|
||||
"""
|
||||
Changes the password associated with the given user.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_repository(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns the repository with the given name under the given namespace or None
|
||||
if none.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def create_repository(self, namespace_name, repo_name, user=None):
|
||||
"""
|
||||
Creates a new repository under the given namespace with the given name, for
|
||||
the given user.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def repository_is_public(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns whether the repository with the given name under the given namespace
|
||||
is public. If no matching repository was found, returns False.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def validate_oauth_token(self, token):
|
||||
""" Returns whether the given OAuth token validates. """
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0, limit=25):
|
||||
"""
|
||||
Returns a sorted list of repositories matching the given search term.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class PreOCIModel(DockerRegistryV1DataInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for the v1 Docker Registry protocol using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
|
||||
def placement_locations_and_path_docker_v1(self, namespace_name, repo_name, image_id):
|
||||
repo_image = model.image.get_repo_image_extended(namespace_name, repo_name, image_id)
|
||||
if not repo_image or repo_image.storage is None:
|
||||
|
@ -245,8 +26,7 @@ class PreOCIModel(DockerRegistryV1DataInterface):
|
|||
'repo_name': repo_name,
|
||||
'image_id': image_id,
|
||||
'checksum': repo_image.v1_checksum,
|
||||
'compat_json': repo_image.v1_json_metadata,
|
||||
})
|
||||
'compat_json': repo_image.v1_json_metadata,})
|
||||
|
||||
def update_docker_v1_metadata(self, namespace_name, repo_name, image_id, created_date_str,
|
||||
comment, command, compat_json, parent_image_id=None):
|
||||
|
@ -274,7 +54,7 @@ class PreOCIModel(DockerRegistryV1DataInterface):
|
|||
if repo_image is None or repo_image.storage is None:
|
||||
return
|
||||
|
||||
with db_transaction():
|
||||
with model.db_transaction():
|
||||
repo_image.storage.content_checksum = content_checksum
|
||||
repo_image.v1_checksum = checksum
|
||||
repo_image.storage.save()
|
||||
|
@ -388,10 +168,10 @@ class PreOCIModel(DockerRegistryV1DataInterface):
|
|||
def validate_oauth_token(self, token):
|
||||
return bool(model.oauth.validate_access_token(token))
|
||||
|
||||
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0, limit=25):
|
||||
repos = model.repository.get_filtered_matching_repositories(search_term,
|
||||
filter_username=filter_username,
|
||||
offset=offset, limit=limit)
|
||||
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0,
|
||||
limit=25):
|
||||
repos = model.repository.get_filtered_matching_repositories(
|
||||
search_term, filter_username=filter_username, offset=offset, limit=limit)
|
||||
return [_repository_for_repo(repo) for repo in repos]
|
||||
|
||||
|
||||
|
@ -403,8 +183,7 @@ def _repository_for_repo(repo):
|
|||
namespace_name=repo.namespace_user.username,
|
||||
description=repo.description,
|
||||
is_public=model.repository.is_repository_public(repo),
|
||||
kind=model.repository.get_repo_kind_name(repo),
|
||||
)
|
||||
kind=model.repository.get_repo_kind_name(repo),)
|
||||
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
|
@ -10,13 +10,12 @@ from flask import make_response, request, session, Response, redirect, abort as
|
|||
from app import storage as store, app, metric_queue
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth.decorators import extract_namespace_repo_from_session, process_auth
|
||||
from auth.permissions import (ReadRepositoryPermission,
|
||||
ModifyRepositoryPermission)
|
||||
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission)
|
||||
from auth.registry_jwt_auth import get_granted_username
|
||||
from data import model, database
|
||||
from data.interfaces.v1 import pre_oci_model as model
|
||||
from digest import checksums
|
||||
from endpoints.v1 import v1_bp
|
||||
from endpoints.v1.models_pre_oci import pre_oci_model as model
|
||||
from endpoints.decorators import anon_protect
|
||||
from util.http import abort, exact_abort
|
||||
from util.registry.filelike import SocketReader
|
||||
|
@ -24,7 +23,6 @@ from util.registry import gzipstream
|
|||
from util.registry.replication import queue_storage_replication
|
||||
from util.registry.torrent import PieceHasher
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -38,18 +36,21 @@ def _finish_image(namespace, repository, image_id):
|
|||
|
||||
def require_completion(f):
|
||||
"""This make sure that the image push correctly finished."""
|
||||
|
||||
@wraps(f)
|
||||
def wrapper(namespace, repository, *args, **kwargs):
|
||||
image_id = kwargs['image_id']
|
||||
if model.is_image_uploading(namespace, repository, image_id):
|
||||
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
||||
issue='upload-in-progress', image_id=image_id)
|
||||
abort(400, 'Image %(image_id)s is being uploaded, retry later', issue='upload-in-progress',
|
||||
image_id=image_id)
|
||||
return f(namespace, repository, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def set_cache_headers(f):
|
||||
"""Returns HTTP headers suitable for caching."""
|
||||
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Set TTL to 1 year by default
|
||||
|
@ -59,8 +60,7 @@ def set_cache_headers(f):
|
|||
headers = {
|
||||
'Cache-Control': 'public, max-age={0}'.format(ttl),
|
||||
'Expires': expires,
|
||||
'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',
|
||||
}
|
||||
'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',}
|
||||
if 'If-Modified-Since' in request.headers:
|
||||
response = make_response('Not modified', 304)
|
||||
response.headers.extend(headers)
|
||||
|
@ -69,6 +69,7 @@ def set_cache_headers(f):
|
|||
# Prevent the Cookie to be sent when the object is cacheable
|
||||
session.modified = False
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
|
@ -92,8 +93,7 @@ def head_image_layer(namespace, repository, image_id, headers):
|
|||
locations, _ = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
|
||||
if locations is None:
|
||||
logger.debug('Could not find any blob placement locations')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
|
||||
# Add the Accept-Ranges header if the storage engine supports resumable
|
||||
# downloads.
|
||||
|
@ -129,8 +129,7 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
logger.debug('Looking up placement locations and path')
|
||||
locations, path = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
|
||||
if not locations or not path:
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
try:
|
||||
logger.debug('Looking up the direct download URL for path: %s', path)
|
||||
direct_download_url = store.get_direct_download_url(locations, path)
|
||||
|
@ -145,8 +144,7 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
return Response(store.stream_read(locations, path), headers=headers)
|
||||
except (IOError, AttributeError):
|
||||
logger.exception('Image layer data not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
|
||||
abort(403)
|
||||
|
||||
|
@ -214,10 +212,8 @@ def put_image_layer(namespace, repository, image_id):
|
|||
try:
|
||||
start_time = time()
|
||||
store.stream_write(locations, path, sr)
|
||||
metric_queue.chunk_size.Observe(size_info.compressed_size,
|
||||
labelvalues=[list(locations)[0]])
|
||||
metric_queue.chunk_upload_time.Observe(time() - start_time,
|
||||
labelvalues=[list(locations)[0]])
|
||||
metric_queue.chunk_size.Observe(size_info.compressed_size, labelvalues=[list(locations)[0]])
|
||||
metric_queue.chunk_upload_time.Observe(time() - start_time, labelvalues=[list(locations)[0]])
|
||||
except IOError:
|
||||
logger.exception('Exception when writing image data')
|
||||
abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
|
||||
|
@ -227,7 +223,8 @@ def put_image_layer(namespace, repository, image_id):
|
|||
size_info.uncompressed_size)
|
||||
|
||||
# Save the BitTorrent pieces.
|
||||
model.create_bittorrent_pieces(namespace, repository, image_id, piece_hasher.final_piece_hashes())
|
||||
model.create_bittorrent_pieces(namespace, repository, image_id,
|
||||
piece_hasher.final_piece_hashes())
|
||||
|
||||
# Append the computed checksum.
|
||||
csums = []
|
||||
|
@ -307,8 +304,8 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
|
||||
logger.debug('Marking image path')
|
||||
if not model.is_image_uploading(namespace, repository, image_id):
|
||||
abort(409, 'Cannot set checksum for image %(image_id)s',
|
||||
issue='image-write-error', image_id=image_id)
|
||||
abort(409, 'Cannot set checksum for image %(image_id)s', issue='image-write-error',
|
||||
image_id=image_id)
|
||||
|
||||
logger.debug('Storing image and content checksums')
|
||||
|
||||
|
@ -323,8 +320,8 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
logger.debug('session checksums: %s', session.get('checksum', []))
|
||||
logger.debug('client supplied checksum: %s', checksum)
|
||||
logger.debug('put_image_checksum: Wrong checksum')
|
||||
abort(400, 'Checksum mismatch for image: %(image_id)s',
|
||||
issue='checksum-mismatch', image_id=image_id)
|
||||
abort(400, 'Checksum mismatch for image: %(image_id)s', issue='checksum-mismatch',
|
||||
image_id=image_id)
|
||||
|
||||
# Mark the image as uploaded.
|
||||
_finish_image(namespace, repository, image_id)
|
||||
|
@ -416,16 +413,16 @@ def put_image_json(namespace, repository, image_id):
|
|||
pass
|
||||
|
||||
if not data or not isinstance(data, dict):
|
||||
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s',
|
||||
issue='invalid-request', image_id=image_id, json=request.data)
|
||||
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s', issue='invalid-request',
|
||||
image_id=image_id, json=request.data)
|
||||
|
||||
if 'id' not in data:
|
||||
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
abort(400, 'Missing key `id` in JSON for image: %(image_id)s', issue='invalid-request',
|
||||
image_id=image_id)
|
||||
|
||||
if image_id != data['id']:
|
||||
abort(400, 'JSON data contains invalid id for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
abort(400, 'JSON data contains invalid id for image: %(image_id)s', issue='invalid-request',
|
||||
image_id=image_id)
|
||||
|
||||
logger.debug('Looking up repo image')
|
||||
|
||||
|
@ -469,7 +466,8 @@ def put_image_json(namespace, repository, image_id):
|
|||
command = json.dumps(command_list) if command_list else None
|
||||
|
||||
logger.debug('Setting image metadata')
|
||||
model.update_docker_v1_metadata(namespace, repository, image_id, data.get('created'),
|
||||
model.update_docker_v1_metadata(namespace, repository, image_id,
|
||||
data.get('created'),
|
||||
data.get('comment'), command, uploaded_metadata, parent_id)
|
||||
|
||||
return make_response('true', 200)
|
||||
|
|
|
@ -3,19 +3,16 @@ import json
|
|||
|
||||
from flask import abort, request, jsonify, make_response, session
|
||||
|
||||
|
||||
from auth.decorators import process_auth
|
||||
from auth.permissions import (ReadRepositoryPermission,
|
||||
ModifyRepositoryPermission)
|
||||
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission)
|
||||
from data import model
|
||||
from data.interfaces.v1 import pre_oci_model as model
|
||||
from endpoints.common import parse_repository_name
|
||||
from endpoints.decorators import anon_protect
|
||||
from endpoints.v1 import v1_bp
|
||||
from endpoints.v1.models_pre_oci import pre_oci_model as model
|
||||
from util.audit import track_and_log
|
||||
from util.names import TAG_ERROR, TAG_REGEX
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
Reference in a new issue