2016-07-11 22:51:28 +00:00
|
|
|
from app import app, storage as store
|
|
|
|
from data import model
|
2017-06-29 17:19:53 +00:00
|
|
|
from endpoints.v1.models_interface import DockerRegistryV1DataInterface, Repository
|
2016-07-11 22:51:28 +00:00
|
|
|
from util.morecollections import AttrDict
|
|
|
|
|
|
|
|
|
2016-08-30 19:05:15 +00:00
|
|
|
class PreOCIModel(DockerRegistryV1DataInterface):
|
2016-08-16 19:23:00 +00:00
|
|
|
"""
|
2016-08-30 19:05:15 +00:00
|
|
|
PreOCIModel implements the data model for the v1 Docker Registry protocol using a database schema
|
|
|
|
before it was changed to support the OCI specification.
|
2016-08-16 19:23:00 +00:00
|
|
|
"""
|
2016-09-23 21:50:09 +00:00
|
|
|
def placement_locations_and_path_docker_v1(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_extended(namespace_name, repo_name, image_id)
|
|
|
|
if not repo_image or repo_image.storage is None:
|
|
|
|
return None, None
|
|
|
|
return repo_image.storage.locations, model.storage.get_layer_path(repo_image.storage)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def docker_v1_metadata(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return AttrDict({
|
|
|
|
'namespace_name': namespace_name,
|
|
|
|
'repo_name': repo_name,
|
|
|
|
'image_id': image_id,
|
|
|
|
'checksum': repo_image.v1_checksum,
|
|
|
|
'compat_json': repo_image.v1_json_metadata,
|
|
|
|
})
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def update_docker_v1_metadata(self, namespace_name, repo_name, image_id, created_date_str,
|
|
|
|
comment, command, compat_json, parent_image_id=None):
|
2016-08-30 19:05:15 +00:00
|
|
|
parent_image = None
|
|
|
|
if parent_image_id is not None:
|
|
|
|
parent_image = model.image.get_repo_image(namespace_name, repo_name, parent_image_id)
|
|
|
|
|
|
|
|
model.image.set_image_metadata(image_id, namespace_name, repo_name, created_date_str, comment,
|
|
|
|
command, compat_json, parent=parent_image)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def storage_exists(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if repo_image.storage.uploading:
|
|
|
|
return False
|
|
|
|
|
|
|
|
layer_path = model.storage.get_layer_path(repo_image.storage)
|
|
|
|
return store.exists(repo_image.storage.locations, layer_path)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def store_docker_v1_checksums(self, namespace_name, repo_name, image_id, checksum,
|
|
|
|
content_checksum):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return
|
|
|
|
|
2017-06-29 17:19:53 +00:00
|
|
|
with model.db_transaction():
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image.storage.content_checksum = content_checksum
|
|
|
|
repo_image.v1_checksum = checksum
|
|
|
|
repo_image.storage.save()
|
|
|
|
repo_image.save()
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def is_image_uploading(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return False
|
|
|
|
return repo_image.storage.uploading
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def update_image_uploading(self, namespace_name, repo_name, image_id, is_uploading):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
repo_image.storage.uploading = is_uploading
|
|
|
|
repo_image.storage.save()
|
|
|
|
return repo_image.storage
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def update_image_sizes(self, namespace_name, repo_name, image_id, size, uncompressed_size):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.storage.set_image_storage_metadata(image_id, namespace_name, repo_name, size,
|
|
|
|
uncompressed_size)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def get_image_size(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return None
|
|
|
|
return repo_image.storage.image_size
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_bittorrent_pieces(self, namespace_name, repo_name, image_id, pieces_bytes):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None or repo_image.storage is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
model.storage.save_torrent_info(repo_image.storage, app.config['BITTORRENT_PIECE_SIZE'],
|
|
|
|
pieces_bytes)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def image_ancestry(self, namespace_name, repo_name, image_id):
|
2016-08-30 19:05:15 +00:00
|
|
|
try:
|
|
|
|
image = model.image.get_image_by_id(namespace_name, repo_name, image_id)
|
|
|
|
except model.InvalidImageException:
|
|
|
|
return None
|
|
|
|
|
|
|
|
parents = model.image.get_parent_images(namespace_name, repo_name, image)
|
|
|
|
ancestry_docker_ids = [image.docker_image_id]
|
|
|
|
ancestry_docker_ids.extend([parent.docker_image_id for parent in parents])
|
|
|
|
return ancestry_docker_ids
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def repository_exists(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
|
|
|
return repo is not None
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_or_link_image(self, username, namespace_name, repo_name, image_id, storage_location):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
|
|
|
model.image.find_create_or_link_image(image_id, repo, username, {}, storage_location)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_temp_hidden_tag(self, namespace_name, repo_name, image_id, expiration):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo_image = model.image.get_repo_image(namespace_name, repo_name, image_id)
|
|
|
|
if repo_image is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
repo = repo_image.repository
|
|
|
|
model.tag.create_temporary_hidden_tag(repo, repo_image, expiration)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def list_tags(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
return model.tag.list_repository_tags(namespace_name, repo_name)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_or_update_tag(self, namespace_name, repo_name, image_id, tag_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.tag.create_or_update_tag(namespace_name, repo_name, tag_name, image_id)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def find_image_id_by_tag(self, namespace_name, repo_name, tag_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
try:
|
|
|
|
tag_image = model.tag.get_tag_image(namespace_name, repo_name, tag_name)
|
|
|
|
except model.DataModelException:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return tag_image.docker_image_id
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def delete_tag(self, namespace_name, repo_name, tag_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.tag.delete_tag(namespace_name, repo_name, tag_name)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def load_token(self, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
try:
|
|
|
|
model.token.load_token_data(token)
|
|
|
|
return True
|
|
|
|
except model.InvalidTokenException:
|
|
|
|
return False
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def verify_robot(self, username, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
try:
|
|
|
|
return bool(model.user.verify_robot(username, token))
|
|
|
|
except model.InvalidRobotException:
|
|
|
|
return False
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def change_user_password(self, user, new_password):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.user.change_password(user, new_password)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def get_repository(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name)
|
|
|
|
if repo is None:
|
|
|
|
return None
|
2016-09-23 21:50:09 +00:00
|
|
|
return _repository_for_repo(repo)
|
2016-08-30 19:05:15 +00:00
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def create_repository(self, namespace_name, repo_name, user=None):
|
2016-08-30 19:05:15 +00:00
|
|
|
model.repository.create_repository(namespace_name, repo_name, user)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def repository_is_public(self, namespace_name, repo_name):
|
2016-08-30 19:05:15 +00:00
|
|
|
return model.repository.repository_is_public(namespace_name, repo_name)
|
|
|
|
|
2016-09-23 21:50:09 +00:00
|
|
|
def validate_oauth_token(self, token):
|
2016-08-30 19:05:15 +00:00
|
|
|
return bool(model.oauth.validate_access_token(token))
|
|
|
|
|
Optimize repository search by changing our lookup strategy
Previous to this change, repositories were looked up unfiltered in six different queries, and then filtered using the permissions model, which issued a query per repository found, making search incredibly slow. Instead, we now lookup a chunk of repositories unfiltered and then filter them via a single query to the database. By layering the filtering on top of the lookup, each as queries, we can minimize the number of queries necessary, without (at the same time) using a super expensive join.
Other changes:
- Remove the 5 page pre-lookup on V1 search and simply return that there is one more page available, until there isn't. While technically not correct, it is much more efficient, and no one should be using pagination with V1 search anyway.
- Remove the lookup for repos without entries in the RAC table. Instead, we now add a new RAC entry when the repository is created for *the day before*, with count 0, so that it is immediately searchable
- Remove lookup of results with a matching namespace; these aren't very relevant anyway, and it overly complicates sorting
2017-02-27 22:56:44 +00:00
|
|
|
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0, limit=25):
|
2017-03-21 19:36:46 +00:00
|
|
|
repos = model.repository.get_filtered_matching_repositories(search_term,
|
|
|
|
filter_username=filter_username,
|
|
|
|
offset=offset, limit=limit)
|
2016-09-23 21:50:09 +00:00
|
|
|
return [_repository_for_repo(repo) for repo in repos]
|
|
|
|
|
|
|
|
|
|
|
|
def _repository_for_repo(repo):
|
|
|
|
""" Returns a Repository object representing the Pre-OCI data model instance of a repository. """
|
|
|
|
return Repository(
|
|
|
|
id=repo.id,
|
|
|
|
name=repo.name,
|
|
|
|
namespace_name=repo.namespace_user.username,
|
|
|
|
description=repo.description,
|
2017-03-22 18:30:33 +00:00
|
|
|
is_public=model.repository.is_repository_public(repo),
|
|
|
|
kind=model.repository.get_repo_kind_name(repo),
|
2016-09-23 21:50:09 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
pre_oci_model = PreOCIModel()
|