2016-09-23 21:50:09 +00:00
|
|
|
from abc import ABCMeta, abstractmethod
|
|
|
|
from collections import namedtuple
|
|
|
|
|
|
|
|
from six import add_metaclass
|
|
|
|
|
2016-07-11 22:51:28 +00:00
|
|
|
from app import app, storage as store
|
|
|
|
from data import model
|
2016-07-12 20:09:13 +00:00
|
|
|
from data.model import db_transaction
|
2016-07-11 22:51:28 +00:00
|
|
|
from util.morecollections import AttrDict
|
|
|
|
|
|
|
|
|
2016-08-30 19:05:15 +00:00
|
|
|
class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description',
|
|
|
|
'is_public'])):
|
2016-07-12 20:09:13 +00:00
|
|
|
"""
|
2016-08-30 19:05:15 +00:00
|
|
|
Repository represents a namespaced collection of tags.
|
2016-07-12 20:09:13 +00:00
|
|
|
"""
|
2016-07-12 17:48:44 +00:00
|
|
|
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@add_metaclass(ABCMeta)
|
2016-08-30 19:05:15 +00:00
|
|
|
class DockerRegistryV1DataInterface(object):
|
2016-08-16 19:23:00 +00:00
|
|
|
"""
|
2016-08-30 19:05:15 +00:00
|
|
|
Interface that represents all data store interactions required by a Docker Registry v1.
|
2016-08-16 19:23:00 +00:00
|
|
|
"""
|
2016-08-19 18:00:21 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def placement_locations_and_path_docker_v1(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns all the placements for the image with the given V1 Docker ID, found under the given
|
|
|
|
repository or None if no image was found.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def docker_v1_metadata(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns various pieces of metadata associated with an image with the given V1 Docker ID,
|
|
|
|
including the checksum and its V1 JSON metadata.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def update_docker_v1_metadata(self, namespace_name, repo_name, image_id, created_date_str,
|
|
|
|
comment, command, compat_json, parent_image_id=None):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Updates various pieces of V1 metadata associated with a particular image.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def storage_exists(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns whether storage already exists for the image with the V1 Docker ID under the given
|
|
|
|
repository.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def store_docker_v1_checksums(self, namespace_name, repo_name, image_id, checksum,
|
|
|
|
content_checksum):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Stores the various V1 checksums for the image with the V1 Docker ID.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def is_image_uploading(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns whether the image with the V1 Docker ID is currently marked as uploading.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def update_image_uploading(self, namespace_name, repo_name, image_id, is_uploading):
|
|
|
|
"""
|
|
|
|
Marks the image with the V1 Docker ID with the given uploading status.
|
|
|
|
"""
|
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def update_image_sizes(self, namespace_name, repo_name, image_id, size, uncompressed_size):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Updates the sizing information for the image with the given V1 Docker ID.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def get_image_size(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns the wire size of the image with the given Docker V1 ID.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def create_bittorrent_pieces(self, namespace_name, repo_name, image_id, pieces_bytes):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Saves the BitTorrent piece hashes for the image with the given Docker V1 ID.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def image_ancestry(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns a list containing the full ancestry of Docker V1 IDs, in order, for the image with the
|
|
|
|
given Docker V1 ID.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def repository_exists(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns whether the repository with the given name and namespace exists.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def create_or_link_image(self, username, namespace_name, repo_name, image_id, storage_location):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Adds the given image to the given repository, by either linking to an existing image visible to
|
|
|
|
the user with the given username, or creating a new one if no existing image matches.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def create_temp_hidden_tag(self, namespace_name, repo_name, image_id, expiration):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Creates a hidden tag under the matching namespace pointing to the image with the given V1 Docker
|
|
|
|
ID.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def list_tags(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns all the tags defined in the repository with the given namespace and name.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def create_or_update_tag(self, namespace_name, repo_name, image_id, tag_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Creates or updates a tag under the matching repository to point to the image with the given
|
|
|
|
Docker V1 ID.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def find_image_id_by_tag(self, namespace_name, repo_name, tag_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns the Docker V1 image ID for the HEAD image for the tag with the given name under the
|
|
|
|
matching repository, or None if none.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def delete_tag(self, namespace_name, repo_name, tag_name):
|
|
|
|
"""
|
|
|
|
Deletes the given tag from the given repository.
|
|
|
|
"""
|
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def load_token(self, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Loads the data associated with the given (deprecated) access token, and, if
|
|
|
|
found returns True.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def verify_robot(self, username, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns True if the given robot username and token match an existing robot
|
|
|
|
account.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def change_user_password(self, user, new_password):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Changes the password associated with the given user.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def get_repository(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns the repository with the given name under the given namespace or None
|
|
|
|
if none.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def create_repository(self, namespace_name, repo_name, user=None):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Creates a new repository under the given namespace with the given name, for
|
|
|
|
the given user.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def repository_is_public(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns whether the repository with the given name under the given namespace
|
|
|
|
is public. If no matching repository was found, returns False.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
|
|
|
def validate_oauth_token(self, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
""" Returns whether the given OAuth token validates. """
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
@abstractmethod
|
Optimize repository search by changing our lookup strategy
Previous to this change, repositories were looked up unfiltered in six different queries, and then filtered using the permissions model, which issued a query per repository found, making search incredibly slow. Instead, we now lookup a chunk of repositories unfiltered and then filter them via a single query to the database. By layering the filtering on top of the lookup, each as queries, we can minimize the number of queries necessary, without (at the same time) using a super expensive join.
Other changes:
- Remove the 5 page pre-lookup on V1 search and simply return that there is one more page available, until there isn't. While technically not correct, it is much more efficient, and no one should be using pagination with V1 search anyway.
- Remove the lookup for repos without entries in the RAC table. Instead, we now add a new RAC entry when the repository is created for *the day before*, with count 0, so that it is immediately searchable
- Remove lookup of results with a matching namespace; these aren't very relevant anyway, and it overly complicates sorting
2017-02-27 22:56:44 +00:00
|
|
|
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0, limit=25):
|
2016-08-30 19:05:15 +00:00
|
|
|
"""
|
|
|
|
Returns a sorted list of repositories matching the given search term.
|
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
pass
|
2016-08-30 19:05:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
class PreOCIModel(DockerRegistryV1DataInterface):
|
2016-08-16 19:23:00 +00:00
|
|
|
"""
|
2016-08-30 19:05:15 +00:00
|
|
|
PreOCIModel implements the data model for the v1 Docker Registry protocol using a database schema
|
|
|
|
before it was changed to support the OCI specification.
|
2016-08-16 19:23:00 +00:00
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
def placement_locations_and_path_docker_v1(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_extended(namespace_name, repo_name, image_id)
|
|
|
|
if not repo_image or repo_image.storage is None:
|
|
|
|
return None, None
|
|
|
|
return repo_image.storage.locations, model.storage.get_layer_path(repo_image.storage)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def docker_v1_metadata(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return AttrDict({
|
|
|
|
'namespace_name': namespace_name,
|
|
|
|
'repo_name': repo_name,
|
|
|
|
'image_id': image_id,
|
|
|
|
'checksum': repo_image.v1_checksum,
|
|
|
|
'compat_json': repo_image.v1_json_metadata,
|
|
|
|
})
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def update_docker_v1_metadata(self, namespace_name, repo_name, image_id, created_date_str,
|
|
|
|
comment, command, compat_json, parent_image_id=None):
|
2016-08-30 19:05:15 +00:00
|
|
|
parent_image = None
|
|
|
|
if parent_image_id is not None:
|
|
|
|
parent_image = model.image.get_repo_image(namespace_name, repo_name, parent_image_id)
|
|
|
|
|
|
|
|
model.image.set_image_metadata(image_id, namespace_name, repo_name, created_date_str, comment,
|
|
|
|
command, compat_json, parent=parent_image)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def storage_exists(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if repo_image.storage.uploading:
|
|
|
|
return False
|
|
|
|
|
|
|
|
layer_path = model.storage.get_layer_path(repo_image.storage)
|
|
|
|
return store.exists(repo_image.storage.locations, layer_path)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def store_docker_v1_checksums(self, namespace_name, repo_name, image_id, checksum,
|
|
|
|
content_checksum):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
with db_transaction():
|
|
|
|
repo_image.storage.content_checksum = content_checksum
|
|
|
|
repo_image.v1_checksum = checksum
|
|
|
|
repo_image.storage.save()
|
|
|
|
repo_image.save()
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def is_image_uploading(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return False
|
|
|
|
return repo_image.storage.uploading
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def update_image_uploading(self, namespace_name, repo_name, image_id, is_uploading):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
repo_image.storage.uploading = is_uploading
|
|
|
|
repo_image.storage.save()
|
|
|
|
return repo_image.storage
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def update_image_sizes(self, namespace_name, repo_name, image_id, size, uncompressed_size):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.storage.set_image_storage_metadata(image_id, namespace_name, repo_name, size,
|
|
|
|
uncompressed_size)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def get_image_size(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return None
|
|
|
|
return repo_image.storage.image_size
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_bittorrent_pieces(self, namespace_name, repo_name, image_id, pieces_bytes):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
model.storage.save_torrent_info(repo_image.storage, app.config['BITTORRENT_PIECE_SIZE'],
|
|
|
|
pieces_bytes)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def image_ancestry(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
try:
|
|
|
|
image = model.image.get_image_by_id(namespace_name, repo_name, image_id)
|
|
|
|
except model.InvalidImageException:
|
|
|
|
return None
|
|
|
|
|
|
|
|
parents = model.image.get_parent_images(namespace_name, repo_name, image)
|
|
|
|
ancestry_docker_ids = [image.docker_image_id]
|
|
|
|
ancestry_docker_ids.extend([parent.docker_image_id for parent in parents])
|
|
|
|
return ancestry_docker_ids
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def repository_exists(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
|
|
|
return repo is not None
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_or_link_image(self, username, namespace_name, repo_name, image_id, storage_location):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
|
|
|
model.image.find_create_or_link_image(image_id, repo, username, {}, storage_location)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_temp_hidden_tag(self, namespace_name, repo_name, image_id, expiration):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
repo = repo_image.repository
|
|
|
|
model.tag.create_temporary_hidden_tag(repo, repo_image, expiration)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def list_tags(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
return model.tag.list_repository_tags(namespace_name, repo_name)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_or_update_tag(self, namespace_name, repo_name, image_id, tag_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.tag.create_or_update_tag(namespace_name, repo_name, tag_name, image_id)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def find_image_id_by_tag(self, namespace_name, repo_name, tag_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
try:
|
|
|
|
tag_image = model.tag.get_tag_image(namespace_name, repo_name, tag_name)
|
|
|
|
except model.DataModelException:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return tag_image.docker_image_id
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def delete_tag(self, namespace_name, repo_name, tag_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.tag.delete_tag(namespace_name, repo_name, tag_name)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def load_token(self, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
try:
|
|
|
|
model.token.load_token_data(token)
|
|
|
|
return True
|
|
|
|
except model.InvalidTokenException:
|
|
|
|
return False
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def verify_robot(self, username, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
try:
|
|
|
|
return bool(model.user.verify_robot(username, token))
|
|
|
|
except model.InvalidRobotException:
|
|
|
|
return False
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def change_user_password(self, user, new_password):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.user.change_password(user, new_password)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def get_repository(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
|
|
|
if repo is None:
|
|
|
|
return None
|
2016-09-23 21:50:09 +00:00
|
|
|
return _repository_for_repo(repo)
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_repository(self, namespace_name, repo_name, user=None):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.repository.create_repository(namespace_name, repo_name, user)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def repository_is_public(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
return model.repository.repository_is_public(namespace_name, repo_name)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def validate_oauth_token(self, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
return bool(model.oauth.validate_access_token(token))
|
|
|
|
|
Optimize repository search by changing our lookup strategy
Previous to this change, repositories were looked up unfiltered in six different queries, and then filtered using the permissions model, which issued a query per repository found, making search incredibly slow. Instead, we now lookup a chunk of repositories unfiltered and then filter them via a single query to the database. By layering the filtering on top of the lookup, each as queries, we can minimize the number of queries necessary, without (at the same time) using a super expensive join.
Other changes:
- Remove the 5 page pre-lookup on V1 search and simply return that there is one more page available, until there isn't. While technically not correct, it is much more efficient, and no one should be using pagination with V1 search anyway.
- Remove the lookup for repos without entries in the RAC table. Instead, we now add a new RAC entry when the repository is created for *the day before*, with count 0, so that it is immediately searchable
- Remove lookup of results with a matching namespace; these aren't very relevant anyway, and it overly complicates sorting
2017-02-27 22:56:44 +00:00
|
|
|
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0, limit=25):
|
|
|
|
repos = model.repository.get_filtered_matching_repositories(search_term, filter_username,
|
|
|
|
offset, limit)
|
2016-09-23 21:50:09 +00:00
|
|
|
return [_repository_for_repo(repo) for repo in repos]
|
|
|
|
|
|
|
|
|
|
|
|
def _repository_for_repo(repo):
|
|
|
|
""" Returns a Repository object representing the Pre-OCI data model instance of a repository. """
|
|
|
|
return Repository(
|
|
|
|
id=repo.id,
|
|
|
|
name=repo.name,
|
|
|
|
namespace_name=repo.namespace_user.username,
|
|
|
|
description=repo.description,
|
|
|
|
is_public=model.repository.is_repository_public(repo)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
pre_oci_model = PreOCIModel()
|