2016-09-29 00:17:14 +00:00
|
|
|
import binascii
|
2017-12-14 18:38:24 +00:00
|
|
|
import copy
|
2016-09-29 00:17:14 +00:00
|
|
|
import hashlib
|
|
|
|
import json
|
|
|
|
import logging
|
2015-09-18 19:40:35 +00:00
|
|
|
import math
|
2016-09-29 00:17:14 +00:00
|
|
|
import os
|
2015-09-18 19:40:35 +00:00
|
|
|
import random
|
2016-09-29 00:17:14 +00:00
|
|
|
import shutil
|
2015-09-18 19:40:35 +00:00
|
|
|
import string
|
2016-09-29 00:17:14 +00:00
|
|
|
import tarfile
|
2016-05-13 22:29:57 +00:00
|
|
|
import time
|
2016-09-29 00:17:14 +00:00
|
|
|
import unittest
|
|
|
|
import uuid
|
|
|
|
|
|
|
|
from cStringIO import StringIO
|
|
|
|
from tempfile import NamedTemporaryFile
|
|
|
|
|
|
|
|
import bencode
|
2016-05-13 22:29:57 +00:00
|
|
|
import gpgme
|
2016-09-29 00:17:14 +00:00
|
|
|
import requests
|
|
|
|
import resumablehashlib
|
2015-09-17 20:48:08 +00:00
|
|
|
|
2016-09-29 00:17:14 +00:00
|
|
|
from Crypto import Random
|
|
|
|
from Crypto.PublicKey import RSA
|
2016-06-06 19:38:29 +00:00
|
|
|
from flask import request, jsonify
|
2015-05-29 22:08:17 +00:00
|
|
|
from flask.blueprints import Blueprint
|
2016-09-29 00:17:14 +00:00
|
|
|
from flask_testing import LiveServerTestCase
|
|
|
|
from jwkest.jwk import RSAKey
|
|
|
|
|
|
|
|
import endpoints.decorated # required for side effect
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2017-11-10 20:46:09 +00:00
|
|
|
from app import app, storage, instance_keys, get_app_url, metric_queue
|
2016-05-31 20:43:49 +00:00
|
|
|
from data.database import close_db_filter, configure, DerivedStorageForImage, QueueItem, Image
|
2015-12-15 21:52:22 +00:00
|
|
|
from data import model
|
2016-09-29 00:17:14 +00:00
|
|
|
from digest.checksums import compute_simple
|
|
|
|
from endpoints.api import api_bp
|
|
|
|
from endpoints.csrf import generate_csrf_token
|
2015-07-06 19:00:07 +00:00
|
|
|
from endpoints.v1 import v1_bp
|
2015-08-27 18:55:33 +00:00
|
|
|
from endpoints.v2 import v2_bp
|
2015-11-20 19:47:56 +00:00
|
|
|
from endpoints.verbs import verbs
|
2016-10-14 17:36:47 +00:00
|
|
|
from image.docker.schema1 import DockerSchema1ManifestBuilder
|
2015-05-29 22:08:17 +00:00
|
|
|
from initdb import wipe_database, initialize_database, populate_database
|
2015-09-29 19:02:03 +00:00
|
|
|
from jsonschema import validate as validate_schema
|
2016-08-24 16:55:33 +00:00
|
|
|
from util.security.registry_jwt import decode_bearer_header
|
2017-06-19 23:03:10 +00:00
|
|
|
from util.timedeltastring import convert_to_timedelta
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
|
|
|
|
try:
|
2015-07-06 19:00:07 +00:00
|
|
|
app.register_blueprint(v1_bp, url_prefix='/v1')
|
2015-08-27 18:55:33 +00:00
|
|
|
app.register_blueprint(v2_bp, url_prefix='/v2')
|
2015-11-20 19:47:56 +00:00
|
|
|
app.register_blueprint(verbs, url_prefix='/c1')
|
2015-05-29 22:08:17 +00:00
|
|
|
app.register_blueprint(api_bp, url_prefix='/api')
|
|
|
|
except ValueError:
|
|
|
|
# Blueprint was already registered
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-09-08 16:35:03 +00:00
|
|
|
# Add a test blueprint for generating CSRF tokens, setting feature flags and reloading the
|
|
|
|
# DB connection.
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
testbp = Blueprint('testbp', __name__)
|
2015-09-24 20:17:42 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-06-02 18:27:57 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
@testbp.route('/csrf', methods=['GET'])
|
|
|
|
def generate_csrf():
|
|
|
|
return generate_csrf_token()
|
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
@testbp.route('/fakestoragedd/<enabled>', methods=['POST'])
|
|
|
|
def set_fakestorage_directdownload(enabled):
|
|
|
|
storage.put_content(['local_us'], 'supports_direct_download', enabled)
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2016-10-04 11:03:39 +00:00
|
|
|
@testbp.route('/deleteimage/<image_id>', methods=['POST'])
|
|
|
|
def delete_image(image_id):
|
|
|
|
image = Image.get(docker_image_id=image_id)
|
|
|
|
image.docker_image_id = 'DELETED'
|
|
|
|
image.save()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2016-05-31 20:43:49 +00:00
|
|
|
@testbp.route('/storagerepentry/<image_id>', methods=['GET'])
|
|
|
|
def get_storage_replication_entry(image_id):
|
|
|
|
image = Image.get(docker_image_id=image_id)
|
|
|
|
QueueItem.select().where(QueueItem.queue_name ** ('%' + image.storage.uuid + '%')).get()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2015-06-02 18:27:57 +00:00
|
|
|
@testbp.route('/feature/<feature_name>', methods=['POST'])
|
|
|
|
def set_feature(feature_name):
|
|
|
|
import features
|
|
|
|
old_value = features._FEATURES[feature_name].value
|
|
|
|
features._FEATURES[feature_name].value = request.get_json()['value']
|
|
|
|
return jsonify({'old_value': old_value})
|
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
|
2016-05-23 23:52:17 +00:00
|
|
|
@testbp.route('/clearderivedcache', methods=['POST'])
|
|
|
|
def clearderivedcache():
|
|
|
|
DerivedStorageForImage.delete().execute()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
@testbp.route('/removeuncompressed/<image_id>', methods=['POST'])
|
|
|
|
def removeuncompressed(image_id):
|
|
|
|
image = model.image.get_image_by_id('devtable', 'newrepo', image_id)
|
|
|
|
image.storage.uncompressed_size = None
|
|
|
|
image.storage.save()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2015-12-15 21:52:22 +00:00
|
|
|
@testbp.route('/addtoken', methods=['POST'])
|
|
|
|
def addtoken():
|
|
|
|
another_token = model.token.create_delegate_token('devtable', 'newrepo', 'my-new-token', 'write')
|
|
|
|
another_token.code = 'somecooltokencode'
|
|
|
|
another_token.save()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2017-12-14 18:38:24 +00:00
|
|
|
@testbp.route('/breakdatabase', methods=['POST'])
|
|
|
|
def break_database():
|
|
|
|
# Close any existing connection.
|
|
|
|
close_db_filter(None)
|
|
|
|
|
|
|
|
# Reload the database config with an invalid connection.
|
|
|
|
config = copy.copy(app.config)
|
|
|
|
config['DB_URI'] = 'sqlite:///not/a/valid/database'
|
|
|
|
configure(config)
|
|
|
|
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
@testbp.route('/reloadapp', methods=['POST'])
|
|
|
|
def reload_app():
|
2015-09-08 16:35:03 +00:00
|
|
|
# Close any existing connection.
|
|
|
|
close_db_filter(None)
|
|
|
|
|
|
|
|
# Reload the database config.
|
|
|
|
configure(app.config)
|
2015-09-17 20:48:08 +00:00
|
|
|
|
|
|
|
# Reload random after the process split, as it cannot be used uninitialized across forks.
|
2016-09-29 00:17:14 +00:00
|
|
|
Random.atfork()
|
2015-09-08 16:35:03 +00:00
|
|
|
return 'OK'
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
app.register_blueprint(testbp, url_prefix='/__test')
|
|
|
|
|
|
|
|
|
2015-06-02 18:27:57 +00:00
|
|
|
class TestFeature(object):
|
|
|
|
""" Helper object which temporarily sets the value of a feature flag.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, test_case, feature_flag, test_value):
|
|
|
|
self.test_case = test_case
|
|
|
|
self.feature_flag = feature_flag
|
|
|
|
self.test_value = test_value
|
|
|
|
self.old_value = None
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
result = self.test_case.conduct('POST', '/__test/feature/' + self.feature_flag,
|
|
|
|
data=json.dumps(dict(value=self.test_value)),
|
|
|
|
headers={'Content-Type': 'application/json'})
|
|
|
|
|
|
|
|
result_data = json.loads(result.text)
|
|
|
|
self.old_value = result_data['old_value']
|
|
|
|
|
|
|
|
def __exit__(self, type, value, traceback):
|
|
|
|
self.test_case.conduct('POST', '/__test/feature/' + self.feature_flag,
|
|
|
|
data=json.dumps(dict(value=self.old_value)),
|
|
|
|
headers={'Content-Type': 'application/json'})
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-09-08 16:35:03 +00:00
|
|
|
_CLEAN_DATABASE_PATH = None
|
2015-09-24 20:17:42 +00:00
|
|
|
_JWK = RSAKey(key=RSA.generate(2048))
|
2015-09-08 16:35:03 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
class FailureCodes:
|
|
|
|
""" Defines tuples representing the HTTP status codes for various errors. The tuple
|
|
|
|
is defined as ('errordescription', V1HTTPStatusCode, V2HTTPStatusCode). """
|
|
|
|
|
|
|
|
UNAUTHENTICATED = ('unauthenticated', 401, 401)
|
|
|
|
UNAUTHORIZED = ('unauthorized', 403, 401)
|
|
|
|
INVALID_REGISTRY = ('invalidregistry', 404, 404)
|
|
|
|
DOES_NOT_EXIST = ('doesnotexist', 404, 404)
|
|
|
|
INVALID_REQUEST = ('invalidrequest', 400, 400)
|
2017-03-22 21:03:42 +00:00
|
|
|
APP_REPOSITORY = ('apprepository', 405, 405)
|
2016-01-22 21:49:32 +00:00
|
|
|
|
|
|
|
def _get_expected_code(expected_failure, version, success_status_code):
|
|
|
|
""" Returns the HTTP status code for the expected failure under the specified protocol version
|
|
|
|
(1 or 2). If none, returns the success status code. """
|
|
|
|
if not expected_failure:
|
|
|
|
return success_status_code
|
|
|
|
|
|
|
|
return expected_failure[version]
|
2015-09-17 20:16:27 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def _get_repo_name(namespace, name):
|
|
|
|
if namespace == '':
|
|
|
|
return name
|
|
|
|
|
|
|
|
return '%s/%s' % (namespace, name)
|
|
|
|
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def _get_full_contents(image_data, additional_fields=False):
|
2016-01-19 20:52:34 +00:00
|
|
|
if 'chunks' in image_data:
|
|
|
|
# Data is just for chunking; no need for a real TAR.
|
|
|
|
return image_data['contents']
|
|
|
|
|
|
|
|
layer_data = StringIO()
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def add_file(name, contents):
|
|
|
|
tar_file_info = tarfile.TarInfo(name=name)
|
|
|
|
tar_file_info.type = tarfile.REGTYPE
|
|
|
|
tar_file_info.size = len(contents)
|
2016-08-29 20:38:12 +00:00
|
|
|
tar_file_info.mtime = 1
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
tar_file = tarfile.open(fileobj=layer_data, mode='w|gz')
|
|
|
|
tar_file.addfile(tar_file_info, StringIO(contents))
|
|
|
|
tar_file.close()
|
|
|
|
|
|
|
|
add_file('contents', image_data['contents'])
|
|
|
|
if additional_fields:
|
|
|
|
add_file('anotherfile', str(uuid.uuid4()))
|
2016-01-19 20:52:34 +00:00
|
|
|
|
|
|
|
layer_bytes = layer_data.getvalue()
|
|
|
|
layer_data.close()
|
2016-02-12 15:39:27 +00:00
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
return layer_bytes
|
|
|
|
|
|
|
|
|
2015-09-08 16:35:03 +00:00
|
|
|
def get_new_database_uri():
|
|
|
|
# If a clean copy of the database has not yet been created, create one now.
|
|
|
|
global _CLEAN_DATABASE_PATH
|
|
|
|
if not _CLEAN_DATABASE_PATH:
|
|
|
|
wipe_database()
|
|
|
|
initialize_database()
|
|
|
|
populate_database()
|
|
|
|
close_db_filter(None)
|
|
|
|
|
|
|
|
# Save the path of the clean database.
|
|
|
|
_CLEAN_DATABASE_PATH = app.config['TEST_DB_FILE'].name
|
|
|
|
|
|
|
|
# Create a new temp file to be used as the actual backing database for the test.
|
|
|
|
# Note that we have the close() the file to ensure we can copy to it via shutil.
|
|
|
|
local_db_file = NamedTemporaryFile(delete=True)
|
|
|
|
local_db_file.close()
|
|
|
|
|
|
|
|
# Copy the clean database to the path.
|
|
|
|
shutil.copy2(_CLEAN_DATABASE_PATH, local_db_file.name)
|
|
|
|
return 'sqlite:///{0}'.format(local_db_file.name)
|
2015-09-08 14:40:10 +00:00
|
|
|
|
2015-09-17 20:16:27 +00:00
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
class RegistryTestCaseMixin(LiveServerTestCase):
|
2015-05-29 22:08:17 +00:00
|
|
|
def create_app(self):
|
2015-10-05 19:26:45 +00:00
|
|
|
if os.environ.get('DEBUG') == 'true':
|
|
|
|
app.config['DEBUG'] = True
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
app.config['TESTING'] = True
|
2016-12-27 22:38:31 +00:00
|
|
|
app.config['LIVESERVER_PORT'] = 0 # LiveServerTestCase will choose the port for us.
|
2018-07-24 19:51:21 +00:00
|
|
|
app.config['LIVESERVER_TIMEOUT'] = 15
|
2015-09-08 16:35:03 +00:00
|
|
|
app.config['DB_URI'] = get_new_database_uri()
|
2015-05-29 22:08:17 +00:00
|
|
|
return app
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.clearSession()
|
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
# Tell the remote running app to reload the database and app. By default, the app forks from the
|
2015-09-08 16:35:03 +00:00
|
|
|
# current context and has already loaded the DB config with the *original* DB URL. We call
|
|
|
|
# the remote reload method to force it to pick up the changes to DB_URI set in the create_app
|
|
|
|
# method.
|
2015-09-17 20:48:08 +00:00
|
|
|
self.conduct('POST', '/__test/reloadapp')
|
2015-09-08 16:35:03 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
def clearSession(self):
|
|
|
|
self.session = requests.Session()
|
|
|
|
self.signature = None
|
|
|
|
self.docker_token = 'true'
|
2015-09-17 20:48:08 +00:00
|
|
|
self.jwt = None
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Load the CSRF token.
|
|
|
|
self.csrf_token = ''
|
|
|
|
self.csrf_token = self.conduct('GET', '/__test/csrf').text
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def do_tag(self, namespace, repository, tag, image_id, expected_code=200, auth='sig'):
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
self.conduct('PUT', '/v1/repositories/%s/tags/%s' % (repo_name, tag),
|
2016-02-12 15:39:27 +00:00
|
|
|
data='"%s"' % image_id, expected_code=expected_code, auth=auth)
|
2015-09-17 20:16:27 +00:00
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
def conduct_api_login(self, username, password):
|
|
|
|
self.conduct('POST', '/api/v1/signin',
|
|
|
|
data=json.dumps(dict(username=username, password=password)),
|
|
|
|
headers={'Content-Type': 'application/json'})
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def change_repo_visibility(self, namespace, repository, visibility):
|
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
self.conduct('POST', '/api/v1/repository/%s/changevisibility' % repo_name,
|
2015-09-17 20:48:08 +00:00
|
|
|
data=json.dumps(dict(visibility=visibility)),
|
|
|
|
headers={'Content-Type': 'application/json'})
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def assertContents(self, image_data, response):
|
|
|
|
if 'chunks' in image_data:
|
|
|
|
return
|
|
|
|
|
|
|
|
tar = tarfile.open(fileobj=StringIO(response.content))
|
|
|
|
self.assertEquals(tar.extractfile('contents').read(), image_data['contents'])
|
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class BaseRegistryMixin(object):
|
2015-11-24 04:46:05 +00:00
|
|
|
def conduct(self, method, url, headers=None, data=None, auth=None, params=None, expected_code=200,
|
2015-12-15 21:21:06 +00:00
|
|
|
json_data=None, user_agent=None):
|
2015-09-08 15:58:21 +00:00
|
|
|
params = params or {}
|
|
|
|
params['_csrf_token'] = self.csrf_token
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
headers = headers or {}
|
2015-09-08 15:58:21 +00:00
|
|
|
auth_tuple = None
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-12-15 21:21:06 +00:00
|
|
|
if user_agent is not None:
|
|
|
|
headers['User-Agent'] = user_agent
|
|
|
|
else:
|
|
|
|
headers['User-Agent'] = 'docker/1.9.1'
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
if self.docker_token:
|
|
|
|
headers['X-Docker-Token'] = self.docker_token
|
|
|
|
|
|
|
|
if auth == 'sig':
|
|
|
|
if self.signature:
|
|
|
|
headers['Authorization'] = 'token ' + self.signature
|
|
|
|
elif auth == 'jwt':
|
|
|
|
if self.jwt:
|
|
|
|
headers['Authorization'] = 'Bearer ' + self.jwt
|
|
|
|
elif auth:
|
|
|
|
auth_tuple = auth
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
if json_data is not None:
|
|
|
|
data = json.dumps(json_data)
|
|
|
|
headers['Content-Type'] = 'application/json'
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
response = self.session.request(method, self.get_server_url() + url, headers=headers, data=data,
|
2015-09-08 15:58:21 +00:00
|
|
|
auth=auth_tuple, params=params)
|
2016-06-09 19:04:55 +00:00
|
|
|
if expected_code is None:
|
|
|
|
return response
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
if response.status_code != expected_code:
|
|
|
|
print response.text
|
|
|
|
|
|
|
|
if 'www-authenticate' in response.headers:
|
|
|
|
self.signature = response.headers['www-authenticate']
|
|
|
|
|
|
|
|
if 'X-Docker-Token' in response.headers:
|
|
|
|
self.docker_token = response.headers['X-Docker-Token']
|
|
|
|
|
|
|
|
self.assertEquals(response.status_code, expected_code)
|
|
|
|
return response
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
def _get_default_images(self):
|
|
|
|
return [{'id': 'someid', 'contents': 'somecontent'}]
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
|
|
|
|
class V1RegistryMixin(BaseRegistryMixin):
|
|
|
|
def v1_ping(self):
|
2015-05-29 22:08:17 +00:00
|
|
|
self.conduct('GET', '/v1/_ping')
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
|
|
|
|
class V1RegistryPushMixin(V1RegistryMixin):
|
2016-02-12 15:39:27 +00:00
|
|
|
push_version = 'v1'
|
|
|
|
|
|
|
|
def do_push(self, namespace, repository, username, password, images=None, expect_failure=None,
|
2016-10-04 11:03:39 +00:00
|
|
|
munge_shas=[], tag_names=None, head_check=True):
|
2015-09-29 21:53:39 +00:00
|
|
|
images = images or self._get_default_images()
|
2015-05-29 22:08:17 +00:00
|
|
|
auth = (username, password)
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Ping!
|
2015-09-08 15:58:21 +00:00
|
|
|
self.v1_ping()
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/repositories/{namespace}/{repository}/
|
2016-01-22 21:49:32 +00:00
|
|
|
expected_code = _get_expected_code(expect_failure, 1, 201)
|
2016-01-29 14:42:15 +00:00
|
|
|
self.conduct('PUT', '/v1/repositories/%s/' % repo_name,
|
2015-09-29 21:53:39 +00:00
|
|
|
data=json.dumps(images), auth=auth,
|
2015-09-24 15:42:56 +00:00
|
|
|
expected_code=expected_code)
|
|
|
|
|
|
|
|
if expected_code != 201:
|
|
|
|
return
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
for image_data in images:
|
|
|
|
image_id = image_data['id']
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
# PUT /v1/images/{imageID}/json
|
2015-12-17 18:39:01 +00:00
|
|
|
image_json_data = {'id': image_id}
|
|
|
|
if 'size' in image_data:
|
|
|
|
image_json_data['Size'] = image_data['size']
|
|
|
|
|
2016-05-13 22:29:57 +00:00
|
|
|
if 'parent' in image_data:
|
|
|
|
image_json_data['parent'] = image_data['parent']
|
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
self.conduct('PUT', '/v1/images/%s/json' % image_id,
|
2015-12-17 18:39:01 +00:00
|
|
|
data=json.dumps(image_json_data), auth='sig')
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/images/{imageID}/layer
|
2016-01-21 20:40:51 +00:00
|
|
|
layer_bytes = _get_full_contents(image_data)
|
2015-09-18 19:40:35 +00:00
|
|
|
self.conduct('PUT', '/v1/images/%s/layer' % image_id,
|
2015-09-08 15:58:21 +00:00
|
|
|
data=StringIO(layer_bytes), auth='sig')
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/images/{imageID}/checksum
|
2015-12-17 18:39:01 +00:00
|
|
|
checksum = compute_simple(StringIO(layer_bytes), json.dumps(image_json_data))
|
2015-09-18 19:40:35 +00:00
|
|
|
self.conduct('PUT', '/v1/images/%s/checksum' % image_id,
|
2015-09-08 15:58:21 +00:00
|
|
|
headers={'X-Docker-Checksum-Payload': checksum},
|
|
|
|
auth='sig')
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/repositories/{namespace}/{repository}/tags/latest
|
2016-06-06 19:38:29 +00:00
|
|
|
tag_names = tag_names or ['latest']
|
|
|
|
for tag_name in tag_names:
|
|
|
|
self.do_tag(namespace, repository, tag_name, images[-1]['id'])
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/repositories/{namespace}/{repository}/images
|
2016-01-21 20:40:51 +00:00
|
|
|
self.conduct('PUT', '/v1/repositories/%s/images' % repo_name,
|
2015-09-08 15:58:21 +00:00
|
|
|
expected_code=204,
|
|
|
|
auth='sig')
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V1RegistryPullMixin(V1RegistryMixin):
|
2016-02-12 15:39:27 +00:00
|
|
|
pull_version = 'v1'
|
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
def do_pull(self, namespace, repository, username=None, password='password', expect_failure=None,
|
2016-10-04 11:03:39 +00:00
|
|
|
images=None, munge_shas=[]):
|
2015-10-02 18:33:38 +00:00
|
|
|
images = images or self._get_default_images()
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
auth = None
|
|
|
|
if username:
|
|
|
|
auth = (username, password)
|
|
|
|
|
|
|
|
# Ping!
|
2015-09-08 15:58:21 +00:00
|
|
|
self.v1_ping()
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
prefix = '/v1/repositories/%s/' % repo_name
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# GET /v1/repositories/{namespace}/{repository}/images
|
2016-01-22 21:49:32 +00:00
|
|
|
expected_code = _get_expected_code(expect_failure, 1, 200)
|
2015-05-29 22:08:17 +00:00
|
|
|
self.conduct('GET', prefix + 'images', auth=auth, expected_code=expected_code)
|
|
|
|
if expected_code != 200:
|
|
|
|
return
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# GET /v1/repositories/{namespace}/{repository}/tags
|
|
|
|
tags_result = json.loads(self.conduct('GET', prefix + 'tags', auth='sig').text)
|
|
|
|
self.assertEquals(1, len(tags_result.values()))
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
tag_image_id = tags_result['latest']
|
2016-10-04 11:03:39 +00:00
|
|
|
if not munge_shas:
|
|
|
|
# Ensure we have a matching image ID.
|
|
|
|
known_ids = [item['id'] for item in images]
|
|
|
|
self.assertTrue(tag_image_id in known_ids)
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# Retrieve the ancestry of the tag image.
|
|
|
|
image_prefix = '/v1/images/%s/' % tag_image_id
|
|
|
|
ancestors = self.conduct('GET', image_prefix + 'ancestry', auth='sig').json()
|
|
|
|
for index, image_id in enumerate(ancestors):
|
2015-05-29 22:08:17 +00:00
|
|
|
# /v1/images/{imageID}/{ancestry, json, layer}
|
|
|
|
image_prefix = '/v1/images/%s/' % image_id
|
2015-09-08 15:58:21 +00:00
|
|
|
self.conduct('GET', image_prefix + 'ancestry', auth='sig')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
response = self.conduct('GET', image_prefix + 'json', auth='sig')
|
|
|
|
self.assertEquals(image_id, response.json()['id'])
|
|
|
|
|
2016-10-03 14:09:12 +00:00
|
|
|
# Ensure we can HEAD the image layer.
|
|
|
|
self.conduct('HEAD', image_prefix + 'layer', auth='sig')
|
|
|
|
|
|
|
|
# And retrieve the layer data.
|
2016-02-12 15:39:27 +00:00
|
|
|
response = self.conduct('GET', image_prefix + 'layer', auth='sig')
|
|
|
|
|
|
|
|
# Ensure we can parse the layer bytes and that they contain the contents.
|
2016-05-13 22:29:57 +00:00
|
|
|
self.assertContents(images[len(images) - index - 1], response)
|
2015-08-27 18:55:33 +00:00
|
|
|
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V2RegistryMixin(BaseRegistryMixin):
|
2015-09-29 19:02:03 +00:00
|
|
|
MANIFEST_SCHEMA = {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
|
|
|
'name': {
|
|
|
|
'type': 'string',
|
|
|
|
},
|
|
|
|
'tag': {
|
|
|
|
'type': 'string',
|
|
|
|
},
|
|
|
|
'signatures': {
|
|
|
|
'type': 'array',
|
|
|
|
'itemType': {
|
|
|
|
'type': 'object',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'fsLayers': {
|
|
|
|
'type': 'array',
|
|
|
|
'itemType': {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
|
|
|
'blobSum': {
|
|
|
|
'type': 'string',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': 'blobSum',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'history': {
|
|
|
|
'type': 'array',
|
|
|
|
'itemType': {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
|
|
|
'v1Compatibility': {
|
|
|
|
'type': 'object',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': ['v1Compatibility'],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': ['name', 'tag', 'fsLayers', 'history', 'signatures'],
|
|
|
|
}
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
def v2_ping(self):
|
2015-09-29 19:02:03 +00:00
|
|
|
response = self.conduct('GET', '/v2/', expected_code=200 if self.jwt else 401, auth='jwt')
|
|
|
|
self.assertEquals(response.headers['Docker-Distribution-API-Version'], 'registry/2.0')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
def do_auth(self, username, password, namespace, repository, expected_code=200, scopes=[]):
|
2016-01-22 21:49:32 +00:00
|
|
|
auth = None
|
|
|
|
if username and password:
|
|
|
|
auth = (username, password)
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
params = {
|
|
|
|
'account': username,
|
2015-09-17 20:27:05 +00:00
|
|
|
'service': app.config['SERVER_HOSTNAME'],
|
2015-08-27 18:55:33 +00:00
|
|
|
}
|
|
|
|
|
2016-02-09 22:25:33 +00:00
|
|
|
if scopes:
|
|
|
|
params['scope'] = 'repository:%s:%s' % (repo_name, ','.join(scopes))
|
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
response = self.conduct('GET', '/v2/auth', params=params, auth=auth,
|
2015-08-27 18:55:33 +00:00
|
|
|
expected_code=expected_code)
|
|
|
|
|
|
|
|
if expected_code == 200:
|
|
|
|
response_json = json.loads(response.text)
|
|
|
|
self.assertIsNotNone(response_json.get('token'))
|
2015-09-08 15:58:21 +00:00
|
|
|
self.jwt = response_json['token']
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
return response
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V2RegistryPushMixin(V2RegistryMixin):
|
2016-02-12 15:39:27 +00:00
|
|
|
push_version = 'v2'
|
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
def do_push(self, namespace, repository, username, password, images=None, tag_names=None,
|
2016-10-04 11:03:39 +00:00
|
|
|
cancel=False, invalid=False, expect_failure=None, scopes=None, munge_shas=[],
|
|
|
|
head_check=True):
|
2015-09-29 21:53:39 +00:00
|
|
|
images = images or self._get_default_images()
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
2015-09-29 21:53:39 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Ping!
|
2015-09-08 15:58:21 +00:00
|
|
|
self.v2_ping()
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
# Auth. If the expected failure is an invalid registry, in V2 we'll receive that error from
|
|
|
|
# the auth endpoint first, rather than just the V2 requests below.
|
|
|
|
expected_auth_code = 200
|
|
|
|
if expect_failure == FailureCodes.INVALID_REGISTRY:
|
|
|
|
expected_auth_code = 400
|
2017-03-22 21:03:42 +00:00
|
|
|
elif expect_failure == FailureCodes.APP_REPOSITORY:
|
|
|
|
expected_auth_code = 405
|
2016-01-22 21:49:32 +00:00
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
self.do_auth(username, password, namespace, repository, scopes=scopes or ['push', 'pull'],
|
2015-10-05 18:19:52 +00:00
|
|
|
expected_code=expected_auth_code)
|
|
|
|
if expected_auth_code != 200:
|
|
|
|
return
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
expected_code = _get_expected_code(expect_failure, 2, 404)
|
|
|
|
tag_names = tag_names or ['latest']
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
manifests = {}
|
|
|
|
full_contents = {}
|
2016-05-13 22:29:57 +00:00
|
|
|
for image_data in reversed(images):
|
2016-10-04 11:03:39 +00:00
|
|
|
image_id = image_data['id']
|
|
|
|
full_contents[image_id] = _get_full_contents(image_data,
|
|
|
|
additional_fields=image_id in munge_shas)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
# Build a fake manifest.
|
|
|
|
for tag_name in tag_names:
|
|
|
|
builder = DockerSchema1ManifestBuilder(namespace, repository, tag_name)
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
for image_data in reversed(images):
|
|
|
|
checksum = 'sha256:' + hashlib.sha256(full_contents[image_data['id']]).hexdigest()
|
|
|
|
if invalid:
|
|
|
|
checksum = 'sha256:' + hashlib.sha256('foobarbaz').hexdigest()
|
2016-01-22 21:49:32 +00:00
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
builder.add_layer(checksum, json.dumps(image_data))
|
|
|
|
|
|
|
|
# Build the manifest.
|
|
|
|
manifests[tag_name] = builder.build(_JWK)
|
2015-09-24 20:17:42 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Push the image's layers.
|
2015-09-29 19:02:03 +00:00
|
|
|
checksums = {}
|
2016-05-13 22:29:57 +00:00
|
|
|
for image_data in reversed(images):
|
2015-09-29 21:53:39 +00:00
|
|
|
image_id = image_data['id']
|
2016-01-19 20:52:34 +00:00
|
|
|
layer_bytes = full_contents[image_data['id']]
|
2015-09-29 21:53:39 +00:00
|
|
|
chunks = image_data.get('chunks')
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Layer data should not yet exist.
|
2016-01-19 20:52:34 +00:00
|
|
|
checksum = 'sha256:' + hashlib.sha256(layer_bytes).hexdigest()
|
2016-10-04 11:03:39 +00:00
|
|
|
|
|
|
|
if head_check:
|
|
|
|
self.conduct('HEAD', '/v2/%s/blobs/%s' % (repo_name, checksum),
|
|
|
|
expected_code=404, auth='jwt')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
# If we expected a non-404 status code, then the HEAD operation has failed and we cannot
|
|
|
|
# continue performing the push.
|
|
|
|
if expected_code != 404:
|
|
|
|
return
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Start a new upload of the layer data.
|
2016-01-21 20:40:51 +00:00
|
|
|
response = self.conduct('POST', '/v2/%s/blobs/uploads/' % repo_name,
|
2015-09-08 15:58:21 +00:00
|
|
|
expected_code=202, auth='jwt')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
upload_uuid = response.headers['Docker-Upload-UUID']
|
2016-10-07 19:41:35 +00:00
|
|
|
|
|
|
|
server_hostname = get_app_url()
|
|
|
|
new_upload_location = response.headers['Location']
|
|
|
|
self.assertTrue(new_upload_location.startswith(server_hostname))
|
|
|
|
|
|
|
|
# We need to make this relative just for the tests because the live server test
|
|
|
|
# case modifies the port.
|
|
|
|
location = response.headers['Location'][len(server_hostname):]
|
2015-08-27 18:55:33 +00:00
|
|
|
|
|
|
|
# PATCH the image data into the layer.
|
2015-09-18 19:40:35 +00:00
|
|
|
if chunks is None:
|
2016-01-19 20:52:34 +00:00
|
|
|
self.conduct('PATCH', location, data=layer_bytes, expected_code=204, auth='jwt')
|
2015-09-18 19:40:35 +00:00
|
|
|
else:
|
|
|
|
for chunk in chunks:
|
2015-09-25 15:51:50 +00:00
|
|
|
if len(chunk) == 3:
|
|
|
|
(start_byte, end_byte, expected_code) = chunk
|
|
|
|
else:
|
|
|
|
(start_byte, end_byte) = chunk
|
|
|
|
expected_code = 204
|
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
contents_chunk = layer_bytes[start_byte:end_byte]
|
2016-08-29 20:38:12 +00:00
|
|
|
self.conduct('PATCH', location, data=contents_chunk, expected_code=expected_code,
|
|
|
|
auth='jwt', headers={'Range': 'bytes=%s-%s' % (start_byte, end_byte)})
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-25 15:51:50 +00:00
|
|
|
if expected_code != 204:
|
|
|
|
return
|
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
# Retrieve the upload status at each point.
|
2016-01-21 20:40:51 +00:00
|
|
|
status_url = '/v2/%s/blobs/uploads/%s' % (repo_name, upload_uuid)
|
2015-09-29 19:02:03 +00:00
|
|
|
response = self.conduct('GET', status_url, expected_code=204, auth='jwt',
|
|
|
|
headers=dict(host=self.get_server_url()))
|
|
|
|
self.assertEquals(response.headers['Docker-Upload-UUID'], upload_uuid)
|
|
|
|
self.assertEquals(response.headers['Range'], "bytes=0-%s" % end_byte)
|
|
|
|
|
|
|
|
if cancel:
|
|
|
|
self.conduct('DELETE', location, params=dict(digest=checksum), expected_code=204,
|
|
|
|
auth='jwt')
|
|
|
|
|
|
|
|
# Ensure the upload was canceled.
|
2016-01-21 20:40:51 +00:00
|
|
|
status_url = '/v2/%s/blobs/uploads/%s' % (repo_name, upload_uuid)
|
2015-09-29 19:02:03 +00:00
|
|
|
self.conduct('GET', status_url, expected_code=404, auth='jwt',
|
|
|
|
headers=dict(host=self.get_server_url()))
|
|
|
|
return
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Finish the layer upload with a PUT.
|
2015-09-29 19:02:03 +00:00
|
|
|
response = self.conduct('PUT', location, params=dict(digest=checksum), expected_code=201,
|
|
|
|
auth='jwt')
|
|
|
|
|
|
|
|
self.assertEquals(response.headers['Docker-Content-Digest'], checksum)
|
|
|
|
checksums[image_id] = checksum
|
|
|
|
|
|
|
|
# Ensure the layer exists now.
|
2016-01-21 20:40:51 +00:00
|
|
|
response = self.conduct('HEAD', '/v2/%s/blobs/%s' % (repo_name, checksum),
|
2015-09-29 19:02:03 +00:00
|
|
|
expected_code=200, auth='jwt')
|
|
|
|
self.assertEquals(response.headers['Docker-Content-Digest'], checksum)
|
2016-01-19 20:52:34 +00:00
|
|
|
self.assertEquals(response.headers['Content-Length'], str(len(layer_bytes)))
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
for tag_name in tag_names:
|
|
|
|
manifest = manifests[tag_name]
|
|
|
|
|
|
|
|
# Write the manifest. If we expect it to be invalid, we expect a 404 code. Otherwise, we expect
|
|
|
|
# a 202 response for success.
|
|
|
|
put_code = 404 if invalid else 202
|
|
|
|
self.conduct('PUT', '/v2/%s/manifests/%s' % (repo_name, tag_name),
|
|
|
|
data=manifest.bytes, expected_code=put_code,
|
|
|
|
headers={'Content-Type': 'application/json'}, auth='jwt')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
return checksums, manifests
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V2RegistryPullMixin(V2RegistryMixin):
|
2016-02-12 15:39:27 +00:00
|
|
|
pull_version = 'v2'
|
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
def do_pull(self, namespace, repository, username=None, password='password', expect_failure=None,
|
2016-10-04 11:03:39 +00:00
|
|
|
manifest_id=None, images=None, munge_shas=[]):
|
2015-10-02 18:33:38 +00:00
|
|
|
images = images or self._get_default_images()
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Ping!
|
2015-09-08 15:58:21 +00:00
|
|
|
self.v2_ping()
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
# Auth. If the failure expected is unauthenticated, then the auth endpoint will 401 before
|
|
|
|
# we reach any of the registry operations.
|
|
|
|
expected_auth_code = 200
|
|
|
|
if expect_failure == FailureCodes.UNAUTHENTICATED:
|
|
|
|
expected_auth_code = 401
|
2017-03-22 21:03:42 +00:00
|
|
|
elif expect_failure == FailureCodes.APP_REPOSITORY:
|
|
|
|
expected_auth_code = 405
|
2016-01-22 21:49:32 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
self.do_auth(username, password, namespace, repository, scopes=['pull'],
|
2016-01-22 21:49:32 +00:00
|
|
|
expected_code=expected_auth_code)
|
|
|
|
if expected_auth_code != 200:
|
2015-08-27 18:55:33 +00:00
|
|
|
return
|
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
# Retrieve the manifest for the tag or digest.
|
|
|
|
manifest_id = manifest_id or 'latest'
|
2016-01-22 21:49:32 +00:00
|
|
|
|
|
|
|
expected_code = _get_expected_code(expect_failure, 2, 200)
|
2016-01-21 20:40:51 +00:00
|
|
|
response = self.conduct('GET', '/v2/%s/manifests/%s' % (repo_name, manifest_id),
|
2016-01-22 21:49:32 +00:00
|
|
|
auth='jwt', expected_code=expected_code)
|
|
|
|
if expected_code != 200:
|
2015-09-29 19:02:03 +00:00
|
|
|
return
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
manifest_data = json.loads(response.text)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
# Ensure the manifest returned by us is valid.
|
|
|
|
validate_schema(manifest_data, V2RegistryMixin.MANIFEST_SCHEMA)
|
|
|
|
|
2015-10-02 18:33:38 +00:00
|
|
|
# Verify the layers.
|
2015-09-18 19:40:35 +00:00
|
|
|
blobs = {}
|
2016-05-13 22:29:57 +00:00
|
|
|
for index, layer in enumerate(reversed(manifest_data['fsLayers'])):
|
2015-08-27 18:55:33 +00:00
|
|
|
blob_id = layer['blobSum']
|
2016-01-21 20:40:51 +00:00
|
|
|
result = self.conduct('GET', '/v2/%s/blobs/%s' % (repo_name, blob_id),
|
2015-09-18 19:40:35 +00:00
|
|
|
expected_code=200, auth='jwt')
|
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
blobs[blob_id] = result.content
|
2016-02-12 15:39:27 +00:00
|
|
|
self.assertContents(images[index], result)
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-10-02 18:33:38 +00:00
|
|
|
# Verify the V1 metadata is present for each expected image.
|
|
|
|
found_v1_layers = set()
|
|
|
|
history = manifest_data['history']
|
|
|
|
for entry in history:
|
|
|
|
v1_history = json.loads(entry['v1Compatibility'])
|
|
|
|
found_v1_layers.add(v1_history['id'])
|
|
|
|
|
|
|
|
for image in images:
|
2016-02-12 20:57:44 +00:00
|
|
|
self.assertIn(image['id'], found_v1_layers)
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2016-10-04 11:03:39 +00:00
|
|
|
return blobs, manifest_data
|
2015-08-27 18:55:33 +00:00
|
|
|
|
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
class V1RegistryLoginMixin(object):
|
|
|
|
def do_login(self, username, password, scope, expect_success=True):
|
|
|
|
data = {
|
|
|
|
'username': username,
|
|
|
|
'password': password,
|
|
|
|
}
|
|
|
|
|
|
|
|
response = self.conduct('POST', '/v1/users/', json_data=data, expected_code=400)
|
|
|
|
if expect_success:
|
|
|
|
self.assertEquals(response.text, '"Username or email already exists"')
|
|
|
|
else:
|
|
|
|
self.assertNotEquals(response.text, '"Username or email already exists"')
|
|
|
|
|
|
|
|
|
|
|
|
class V2RegistryLoginMixin(object):
|
2016-05-23 20:36:48 +00:00
|
|
|
def do_login(self, username, password, scope, expect_success=True, expected_failure_code=401):
|
2016-01-19 20:52:34 +00:00
|
|
|
params = {
|
|
|
|
'account': username,
|
|
|
|
'scope': scope,
|
|
|
|
'service': app.config['SERVER_HOSTNAME'],
|
|
|
|
}
|
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
if expect_success:
|
|
|
|
expected_code = 200
|
|
|
|
else:
|
2016-05-23 20:36:48 +00:00
|
|
|
expected_code = expected_failure_code
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
auth = None
|
|
|
|
if username and password:
|
|
|
|
auth = (username, password)
|
|
|
|
|
|
|
|
response = self.conduct('GET', '/v2/auth', params=params, auth=auth,
|
2016-01-19 20:52:34 +00:00
|
|
|
expected_code=expected_code)
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
class RegistryTestsMixin(object):
|
2018-01-29 18:30:49 +00:00
|
|
|
def test_previously_bad_repo_name(self):
|
2018-01-29 19:52:50 +00:00
|
|
|
# Push a new repository with two layers.
|
|
|
|
self.do_push('public', 'foo.bar', 'public', 'password')
|
2018-01-29 18:30:49 +00:00
|
|
|
|
2018-01-29 19:52:50 +00:00
|
|
|
# Pull the repository to verify.
|
|
|
|
self.do_pull('public', 'foo.bar', 'public', 'password')
|
2018-01-29 18:30:49 +00:00
|
|
|
|
2017-03-22 21:03:42 +00:00
|
|
|
def test_application_repo(self):
|
|
|
|
# Create an application repository via the API.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
data = {
|
|
|
|
'repository': 'someapprepo',
|
|
|
|
'visibility': 'private',
|
2017-03-23 21:16:19 +00:00
|
|
|
'repo_kind': 'application',
|
2017-03-22 21:03:42 +00:00
|
|
|
'description': 'test app repo',
|
|
|
|
}
|
|
|
|
self.conduct('POST', '/api/v1/repository', json_data=data, expected_code=201)
|
|
|
|
|
|
|
|
# Try to push to the repo, which should fail with a 405.
|
|
|
|
self.do_push('devtable', 'someapprepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.APP_REPOSITORY)
|
|
|
|
|
|
|
|
# Try to pull from the repo, which should fail with a 405.
|
|
|
|
self.do_pull('devtable', 'someapprepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.APP_REPOSITORY)
|
|
|
|
|
|
|
|
|
2016-10-04 11:03:39 +00:00
|
|
|
def test_middle_layer_different_sha(self):
|
|
|
|
if self.push_version == 'v1':
|
|
|
|
# No SHAs to munge in V1.
|
|
|
|
return
|
|
|
|
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'rootid',
|
|
|
|
'contents': 'The root image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Push a new repository with two layers.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the repository to verify.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Push again, munging the middle layer to ensure it gets assigned a different ID.
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'rootid',
|
|
|
|
'contents': 'The root image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
munged_shas = ['baseid']
|
|
|
|
|
|
|
|
# Push the repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas,
|
|
|
|
head_check=False)
|
|
|
|
|
|
|
|
# Pull the repository to verify.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas)
|
|
|
|
|
|
|
|
# Ensures we don't hit weird tag overwrite issues.
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
# Delete the baseid image.
|
|
|
|
self.conduct('POST', '/__test/deleteimage/baseid')
|
|
|
|
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'rootid',
|
|
|
|
'contents': 'The root image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Push the repository again, this time munging the root layer. Since the baseid does not exist
|
|
|
|
# anymore (since we deleted it above), this will have to look in the layer metadata itself
|
|
|
|
# to work (which didn't before).
|
|
|
|
munged_shas = ['rootid']
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas,
|
|
|
|
head_check=False)
|
|
|
|
|
|
|
|
# Pull the repository to verify.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas)
|
|
|
|
|
|
|
|
|
|
|
|
def test_push_same_ids_different_base_sha(self):
|
|
|
|
if self.push_version == 'v1':
|
|
|
|
# No SHAs to munge in V1.
|
|
|
|
return
|
|
|
|
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
munged_shas = ['baseid']
|
|
|
|
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Push a the repository again, but with different SHAs.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas,
|
|
|
|
head_check=False)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas)
|
|
|
|
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def test_push_same_ids_different_sha(self):
|
|
|
|
if self.push_version == 'v1':
|
|
|
|
# No SHAs to munge in V1.
|
|
|
|
return
|
|
|
|
|
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2016-02-12 15:39:27 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
2016-10-04 11:03:39 +00:00
|
|
|
munged_shas = ['latestid']
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Push a the repository again, but with different SHAs.
|
2016-10-04 11:03:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas,
|
|
|
|
head_check=False)
|
2016-02-12 15:39:27 +00:00
|
|
|
|
|
|
|
# Pull the repository.
|
2016-10-04 11:03:39 +00:00
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas)
|
|
|
|
|
|
|
|
|
|
|
|
def test_push_same_ids_different_sha_both_layers(self):
|
|
|
|
if self.push_version == 'v1':
|
|
|
|
# No SHAs to munge in V1.
|
|
|
|
return
|
|
|
|
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
munged_shas = ['baseid', 'latestid']
|
|
|
|
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Push a the repository again, but with different SHAs.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas,
|
|
|
|
head_check=False)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas)
|
2016-02-12 15:39:27 +00:00
|
|
|
|
2016-02-15 15:57:20 +00:00
|
|
|
|
|
|
|
def test_push_same_ids_different_sha_with_unicode(self):
|
|
|
|
if self.push_version == 'v1':
|
|
|
|
# No SHAs to munge in V1.
|
|
|
|
return
|
|
|
|
|
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2016-02-15 15:57:20 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'The latest image',
|
|
|
|
'unicode': u'the Pawe\xc5\x82 Kami\xc5\x84ski image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
2016-10-04 11:03:39 +00:00
|
|
|
munged_shas = ['latestid', 'baseid']
|
|
|
|
|
2016-02-15 15:57:20 +00:00
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Push a the repository again, but with different SHAs.
|
2016-10-04 11:03:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas,
|
|
|
|
head_check=False)
|
2016-02-15 15:57:20 +00:00
|
|
|
|
|
|
|
# Pull the repository.
|
2016-10-04 11:03:39 +00:00
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images, munge_shas=munged_shas)
|
2016-02-15 15:57:20 +00:00
|
|
|
|
|
|
|
|
2015-11-24 20:23:45 +00:00
|
|
|
def test_push_pull_logging(self):
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the push was added.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/public/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(1, len(logs))
|
|
|
|
self.assertEquals('push_repo', logs[0]['kind'])
|
2017-05-16 21:05:31 +00:00
|
|
|
self.assertEquals('public', logs[0]['metadata']['namespace'])
|
|
|
|
self.assertEquals('newrepo', logs[0]['metadata']['repo'])
|
2015-11-24 20:23:45 +00:00
|
|
|
self.assertEquals('public', logs[0]['performer']['name'])
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/public/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(2, len(logs))
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
|
|
|
self.assertEquals('public', logs[0]['performer']['name'])
|
|
|
|
|
|
|
|
|
2015-12-09 21:10:39 +00:00
|
|
|
def test_push_pull_logging_byrobot(self):
|
|
|
|
# Lookup the robot's password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('GET', '/api/v1/organization/buynlarge/robots/ownerbot')
|
|
|
|
robot_token = json.loads(resp.text)['token']
|
|
|
|
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('buynlarge', 'newrepo', 'buynlarge+ownerbot', robot_token)
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the push was added.
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/buynlarge/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(1, len(logs))
|
|
|
|
self.assertEquals('push_repo', logs[0]['kind'])
|
2017-05-16 21:05:31 +00:00
|
|
|
self.assertEquals('buynlarge', logs[0]['metadata']['namespace'])
|
|
|
|
self.assertEquals('newrepo', logs[0]['metadata']['repo'])
|
2015-12-09 21:10:39 +00:00
|
|
|
self.assertEquals('buynlarge+ownerbot', logs[0]['performer']['name'])
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'buynlarge+ownerbot', robot_token)
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/buynlarge/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(2, len(logs))
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
2017-05-16 21:05:31 +00:00
|
|
|
self.assertEquals('buynlarge', logs[0]['metadata']['namespace'])
|
|
|
|
self.assertEquals('newrepo', logs[0]['metadata']['repo'])
|
2015-12-09 21:10:39 +00:00
|
|
|
self.assertEquals('buynlarge+ownerbot', logs[0]['performer']['name'])
|
|
|
|
|
|
|
|
|
2015-12-15 21:52:22 +00:00
|
|
|
def test_push_pull_logging_bytoken(self):
|
|
|
|
# Push the repository.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Add a token.
|
|
|
|
self.conduct('POST', '/__test/addtoken')
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('devtable', 'newrepo', '$token', 'somecooltokencode')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/devtable/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
2017-05-16 21:05:31 +00:00
|
|
|
self.assertEquals('devtable', logs[0]['metadata']['namespace'])
|
|
|
|
self.assertEquals('newrepo', logs[0]['metadata']['repo'])
|
2015-12-15 21:52:22 +00:00
|
|
|
self.assertEquals('my-new-token', logs[0]['metadata']['token'])
|
|
|
|
|
|
|
|
|
2015-12-09 21:10:39 +00:00
|
|
|
def test_push_pull_logging_byoauth(self):
|
|
|
|
# Push the repository.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('devtable', 'newrepo', '$oauthtoken', 'test')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/devtable/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(2, len(logs))
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
2017-05-16 21:05:31 +00:00
|
|
|
self.assertEquals('devtable', logs[0]['metadata']['namespace'])
|
|
|
|
self.assertEquals('newrepo', logs[0]['metadata']['repo'])
|
2015-12-09 21:10:39 +00:00
|
|
|
|
|
|
|
self.assertEquals('devtable', logs[0]['performer']['name'])
|
|
|
|
self.assertEquals(1, logs[0]['metadata']['oauth_token_id'])
|
|
|
|
|
|
|
|
|
2017-12-08 22:05:59 +00:00
|
|
|
def test_push_pull_logging_byclitoken(self):
|
|
|
|
# Push the repository.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('devtable', 'newrepo', '$app', 'test')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/devtable/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(2, len(logs))
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
|
|
|
self.assertEquals('devtable', logs[0]['metadata']['namespace'])
|
|
|
|
self.assertEquals('newrepo', logs[0]['metadata']['repo'])
|
|
|
|
|
|
|
|
self.assertEquals('devtable', logs[0]['performer']['name'])
|
|
|
|
self.assertTrue('app_specific_token' in logs[0]['metadata'])
|
|
|
|
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
def test_pull_publicrepo_anonymous(self):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-05-29 22:08:17 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo anonymously, which should fail (since it is
|
|
|
|
# private)
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', expect_failure=FailureCodes.UNAUTHORIZED)
|
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Make the repository public.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
self.change_repo_visibility('public', 'newrepo', 'public')
|
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Pull the repository anonymously, which should succeed because the repository is public.
|
|
|
|
self.do_pull('public', 'newrepo')
|
|
|
|
|
|
|
|
|
|
|
|
def test_pull_publicrepo_devtable(self):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-05-29 22:08:17 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as devtable, which should fail as it belongs
|
|
|
|
# to public.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Make the repository public.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
self.change_repo_visibility('public', 'newrepo', 'public')
|
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is public.
|
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
|
|
|
def test_pull_private_repo(self):
|
|
|
|
# Add a new repository under the devtable user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
2015-05-29 22:08:17 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as public, which should fail as it belongs
|
|
|
|
# to devtable.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('devtable', 'newrepo', 'public', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is owned by
|
|
|
|
# devtable.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
2015-06-02 19:16:22 +00:00
|
|
|
def test_public_no_anonymous_access_with_auth(self):
|
2015-06-02 18:27:57 +00:00
|
|
|
# Turn off anonymous access.
|
|
|
|
with TestFeature(self, 'ANONYMOUS_ACCESS', False):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-06-02 18:27:57 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as devtable, which should fail as it belongs
|
|
|
|
# to public.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-06-02 18:27:57 +00:00
|
|
|
|
|
|
|
# Make the repository public.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
self.change_repo_visibility('public', 'newrepo', 'public')
|
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is public.
|
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
|
|
|
def test_private_no_anonymous_access(self):
|
|
|
|
# Turn off anonymous access.
|
|
|
|
with TestFeature(self, 'ANONYMOUS_ACCESS', False):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-06-02 18:27:57 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as devtable, which should fail as it belongs
|
|
|
|
# to public.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-06-02 18:27:57 +00:00
|
|
|
|
|
|
|
# Pull the repository as public, which should succeed because the repository is owned by public.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password')
|
|
|
|
|
|
|
|
|
2015-06-02 19:16:22 +00:00
|
|
|
def test_public_no_anonymous_access_no_auth(self):
|
|
|
|
# Turn off anonymous access.
|
|
|
|
with TestFeature(self, 'ANONYMOUS_ACCESS', False):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-06-02 19:16:22 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as anonymous, which should fail as it
|
|
|
|
# is private.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', expect_failure=FailureCodes.UNAUTHENTICATED)
|
2015-06-02 19:16:22 +00:00
|
|
|
|
|
|
|
# Make the repository public.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
self.change_repo_visibility('public', 'newrepo', 'public')
|
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Try again to pull the (currently public) repo as anonymous, which should fail as
|
|
|
|
# anonymous access is disabled.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', expect_failure=FailureCodes.UNAUTHENTICATED)
|
2015-06-02 19:16:22 +00:00
|
|
|
|
|
|
|
# Pull the repository as public, which should succeed because the repository is owned by public.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password')
|
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is public.
|
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password')
|
|
|
|
|
2015-06-10 19:16:01 +00:00
|
|
|
|
|
|
|
def test_create_repo_creator_user(self):
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('buynlarge', 'newrepo', 'creator', 'password')
|
2015-06-10 19:16:01 +00:00
|
|
|
|
2016-08-04 20:23:09 +00:00
|
|
|
# Pull the repository as creator, as they created it.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'creator', 'password')
|
|
|
|
|
2015-06-10 19:16:01 +00:00
|
|
|
# Pull the repository as devtable, which should succeed because the repository is owned by the
|
|
|
|
# org.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'devtable', 'password')
|
|
|
|
|
2016-08-04 20:23:09 +00:00
|
|
|
# Attempt to pull the repository as reader, which should fail.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'reader', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
|
|
|
|
2015-06-10 19:16:01 +00:00
|
|
|
|
|
|
|
def test_create_repo_robot_owner(self):
|
|
|
|
# Lookup the robot's password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('GET', '/api/v1/organization/buynlarge/robots/ownerbot')
|
|
|
|
robot_token = json.loads(resp.text)['token']
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('buynlarge', 'newrepo', 'buynlarge+ownerbot', robot_token)
|
2015-06-10 19:16:01 +00:00
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is owned by the
|
|
|
|
# org.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
|
|
|
def test_create_repo_robot_creator(self):
|
|
|
|
# Lookup the robot's password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('GET', '/api/v1/organization/buynlarge/robots/creatorbot')
|
|
|
|
robot_token = json.loads(resp.text)['token']
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('buynlarge', 'newrepo', 'buynlarge+creatorbot', robot_token)
|
2015-06-10 19:16:01 +00:00
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is owned by the
|
|
|
|
# org.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'devtable', 'password')
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def test_library_repo(self):
|
|
|
|
self.do_push('', 'newrepo', 'devtable', 'password')
|
|
|
|
self.do_pull('', 'newrepo', 'devtable', 'password')
|
|
|
|
self.do_pull('library', 'newrepo', 'devtable', 'password')
|
|
|
|
|
2016-05-31 20:43:49 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def test_library_disabled(self):
|
|
|
|
with TestFeature(self, 'LIBRARY_SUPPORT', False):
|
|
|
|
self.do_push('library', 'newrepo', 'devtable', 'password')
|
|
|
|
self.do_pull('library', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
2016-05-31 20:43:49 +00:00
|
|
|
def test_image_replication(self):
|
|
|
|
with TestFeature(self, 'STORAGE_REPLICATION', True):
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'The latest image',
|
|
|
|
'unicode': u'the Pawe\xc5\x82 Kami\xc5\x84ski image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Ensure that we have a storage replication entry for each image pushed.
|
|
|
|
self.conduct('GET', '/__test/storagerepentry/baseid', expected_code=200)
|
|
|
|
self.conduct('GET', '/__test/storagerepentry/latestid', expected_code=200)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V1RegistryTests(V1RegistryPullMixin, V1RegistryPushMixin, RegistryTestsMixin,
|
|
|
|
RegistryTestCaseMixin, LiveServerTestCase):
|
2015-08-27 18:55:33 +00:00
|
|
|
""" Tests for V1 registry. """
|
2016-10-31 17:24:40 +00:00
|
|
|
def test_search(self):
|
|
|
|
# Public
|
|
|
|
resp = self.conduct('GET', '/v1/search', params=dict(q='public'))
|
|
|
|
data = resp.json()
|
|
|
|
self.assertEquals(1, data['num_results'])
|
|
|
|
self.assertEquals(1, len(data['results']))
|
|
|
|
|
|
|
|
# Simple (not logged in, no results)
|
|
|
|
resp = self.conduct('GET', '/v1/search', params=dict(q='simple'))
|
|
|
|
data = resp.json()
|
|
|
|
self.assertEquals(0, data['num_results'])
|
|
|
|
self.assertEquals(0, len(data['results']))
|
|
|
|
|
|
|
|
# Simple (logged in)
|
|
|
|
resp = self.conduct('GET', '/v1/search', params=dict(q='simple'), auth=('devtable', 'password'))
|
|
|
|
data = resp.json()
|
|
|
|
self.assertEquals(1, data['num_results'])
|
|
|
|
self.assertEquals(1, len(data['results']))
|
|
|
|
|
|
|
|
|
2017-02-08 19:59:22 +00:00
|
|
|
def test_search_pagination(self):
|
|
|
|
# Check for the first page.
|
Optimize repository search by changing our lookup strategy
Previous to this change, repositories were looked up unfiltered in six different queries, and then filtered using the permissions model, which issued a query per repository found, making search incredibly slow. Instead, we now lookup a chunk of repositories unfiltered and then filter them via a single query to the database. By layering the filtering on top of the lookup, each as queries, we can minimize the number of queries necessary, without (at the same time) using a super expensive join.
Other changes:
- Remove the 5 page pre-lookup on V1 search and simply return that there is one more page available, until there isn't. While technically not correct, it is much more efficient, and no one should be using pagination with V1 search anyway.
- Remove the lookup for repos without entries in the RAC table. Instead, we now add a new RAC entry when the repository is created for *the day before*, with count 0, so that it is immediately searchable
- Remove lookup of results with a matching namespace; these aren't very relevant anyway, and it overly complicates sorting
2017-02-27 22:56:44 +00:00
|
|
|
resp = self.conduct('GET', '/v1/search', params=dict(q='s', n='1'),
|
|
|
|
auth=('devtable', 'password'))
|
2017-02-08 19:59:22 +00:00
|
|
|
data = resp.json()
|
|
|
|
self.assertEquals('s', data['query'])
|
|
|
|
|
|
|
|
self.assertEquals(1, data['num_results'])
|
|
|
|
self.assertEquals(1, len(data['results']))
|
|
|
|
|
|
|
|
self.assertEquals(1, data['page'])
|
|
|
|
self.assertTrue(data['num_pages'] > 1)
|
|
|
|
|
Optimize repository search by changing our lookup strategy
Previous to this change, repositories were looked up unfiltered in six different queries, and then filtered using the permissions model, which issued a query per repository found, making search incredibly slow. Instead, we now lookup a chunk of repositories unfiltered and then filter them via a single query to the database. By layering the filtering on top of the lookup, each as queries, we can minimize the number of queries necessary, without (at the same time) using a super expensive join.
Other changes:
- Remove the 5 page pre-lookup on V1 search and simply return that there is one more page available, until there isn't. While technically not correct, it is much more efficient, and no one should be using pagination with V1 search anyway.
- Remove the lookup for repos without entries in the RAC table. Instead, we now add a new RAC entry when the repository is created for *the day before*, with count 0, so that it is immediately searchable
- Remove lookup of results with a matching namespace; these aren't very relevant anyway, and it overly complicates sorting
2017-02-27 22:56:44 +00:00
|
|
|
# Check for the followup page.
|
|
|
|
resp = self.conduct('GET', '/v1/search', params=dict(q='s', n='1', page=2),
|
|
|
|
auth=('devtable', 'password'))
|
|
|
|
data = resp.json()
|
|
|
|
self.assertEquals('s', data['query'])
|
2017-02-08 19:59:22 +00:00
|
|
|
|
Optimize repository search by changing our lookup strategy
Previous to this change, repositories were looked up unfiltered in six different queries, and then filtered using the permissions model, which issued a query per repository found, making search incredibly slow. Instead, we now lookup a chunk of repositories unfiltered and then filter them via a single query to the database. By layering the filtering on top of the lookup, each as queries, we can minimize the number of queries necessary, without (at the same time) using a super expensive join.
Other changes:
- Remove the 5 page pre-lookup on V1 search and simply return that there is one more page available, until there isn't. While technically not correct, it is much more efficient, and no one should be using pagination with V1 search anyway.
- Remove the lookup for repos without entries in the RAC table. Instead, we now add a new RAC entry when the repository is created for *the day before*, with count 0, so that it is immediately searchable
- Remove lookup of results with a matching namespace; these aren't very relevant anyway, and it overly complicates sorting
2017-02-27 22:56:44 +00:00
|
|
|
self.assertEquals(1, data['num_results'])
|
|
|
|
self.assertEquals(1, len(data['results']))
|
2017-02-08 19:59:22 +00:00
|
|
|
|
Optimize repository search by changing our lookup strategy
Previous to this change, repositories were looked up unfiltered in six different queries, and then filtered using the permissions model, which issued a query per repository found, making search incredibly slow. Instead, we now lookup a chunk of repositories unfiltered and then filter them via a single query to the database. By layering the filtering on top of the lookup, each as queries, we can minimize the number of queries necessary, without (at the same time) using a super expensive join.
Other changes:
- Remove the 5 page pre-lookup on V1 search and simply return that there is one more page available, until there isn't. While technically not correct, it is much more efficient, and no one should be using pagination with V1 search anyway.
- Remove the lookup for repos without entries in the RAC table. Instead, we now add a new RAC entry when the repository is created for *the day before*, with count 0, so that it is immediately searchable
- Remove lookup of results with a matching namespace; these aren't very relevant anyway, and it overly complicates sorting
2017-02-27 22:56:44 +00:00
|
|
|
self.assertEquals(2, data['page'])
|
2017-02-08 19:59:22 +00:00
|
|
|
|
|
|
|
|
2016-08-16 19:23:00 +00:00
|
|
|
def test_users(self):
|
|
|
|
# Not logged in, should 404.
|
2017-12-20 16:43:55 +00:00
|
|
|
self.conduct('GET', '/v1/users/', expected_code=404)
|
2016-08-16 19:23:00 +00:00
|
|
|
|
|
|
|
# Try some logins.
|
2017-12-20 16:43:55 +00:00
|
|
|
self.conduct('POST', '/v1/users/', json_data={'username': 'freshuser'}, expected_code=400)
|
|
|
|
resp = self.conduct('POST', '/v1/users/',
|
2017-12-20 16:35:43 +00:00
|
|
|
json_data={'username': 'devtable', 'password': 'password'},
|
|
|
|
expected_code=400)
|
2016-08-16 19:23:00 +00:00
|
|
|
|
|
|
|
# Because Docker
|
|
|
|
self.assertEquals('"Username or email already exists"', resp.text)
|
|
|
|
|
|
|
|
|
2015-10-22 20:59:28 +00:00
|
|
|
def test_push_reponame_with_slashes(self):
|
|
|
|
# Attempt to add a repository name with slashes. This should fail as we do not support it.
|
|
|
|
images = [{
|
2016-01-19 20:52:34 +00:00
|
|
|
'id': 'onlyimagehere',
|
|
|
|
'contents': 'somecontents',
|
2015-10-22 20:59:28 +00:00
|
|
|
}]
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_push('public', 'newrepo/somesubrepo', 'public', 'password', images,
|
|
|
|
expect_failure=FailureCodes.INVALID_REGISTRY)
|
2015-10-22 20:59:28 +00:00
|
|
|
|
2015-10-27 21:54:02 +00:00
|
|
|
def test_push_unicode_metadata(self):
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
|
|
|
|
images = [{
|
|
|
|
'id': 'onlyimagehere',
|
2016-01-19 20:52:34 +00:00
|
|
|
'comment': 'Pawe\xc5\x82 Kami\xc5\x84ski <pawel.kaminski@codewise.com>'.decode('utf-8'),
|
|
|
|
'contents': 'somecontents',
|
2015-10-27 21:54:02 +00:00
|
|
|
}]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'unicodetest', 'devtable', 'password', images)
|
2015-11-06 23:18:29 +00:00
|
|
|
self.do_pull('devtable', 'unicodetest', 'devtable', 'password', images=images)
|
2015-10-27 21:54:02 +00:00
|
|
|
|
2015-10-05 20:36:33 +00:00
|
|
|
def test_tag_validation(self):
|
|
|
|
image_id = 'onlyimagehere'
|
|
|
|
images = [{
|
2016-01-19 20:52:34 +00:00
|
|
|
'id': image_id,
|
|
|
|
'contents': 'somecontents',
|
2015-10-05 20:36:33 +00:00
|
|
|
}]
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2015-10-05 20:36:33 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images)
|
|
|
|
self.do_tag('public', 'newrepo', '1', image_id)
|
|
|
|
self.do_tag('public', 'newrepo', 'x' * 128, image_id)
|
2016-01-21 20:40:51 +00:00
|
|
|
self.do_tag('public', 'newrepo', '', image_id, expected_code=404)
|
2015-10-05 20:36:33 +00:00
|
|
|
self.do_tag('public', 'newrepo', 'x' * 129, image_id, expected_code=400)
|
|
|
|
self.do_tag('public', 'newrepo', '.fail', image_id, expected_code=400)
|
|
|
|
self.do_tag('public', 'newrepo', '-fail', image_id, expected_code=400)
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V2RegistryTests(V2RegistryPullMixin, V2RegistryPushMixin, RegistryTestsMixin,
|
|
|
|
RegistryTestCaseMixin, LiveServerTestCase):
|
2015-08-27 18:55:33 +00:00
|
|
|
""" Tests for V2 registry. """
|
2016-10-17 18:32:43 +00:00
|
|
|
def test_proper_auth_response(self):
|
|
|
|
response = self.conduct('GET', '/v2/devtable/doesnotexist/tags/list', auth='jwt',
|
|
|
|
expected_code=401)
|
|
|
|
self.assertIn('WWW-Authenticate', response.headers)
|
|
|
|
self.assertIn('scope="repository:devtable/doesnotexist:pull"',
|
|
|
|
response.headers['WWW-Authenticate'])
|
|
|
|
|
2016-10-04 16:26:12 +00:00
|
|
|
def test_parent_misordered(self):
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images,
|
|
|
|
expect_failure=FailureCodes.INVALID_REQUEST)
|
|
|
|
|
|
|
|
def test_invalid_parent(self):
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'unknownparent',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images,
|
|
|
|
expect_failure=FailureCodes.INVALID_REQUEST)
|
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
def test_tags_pagination(self):
|
|
|
|
# Push 10 tags.
|
|
|
|
tag_names = ['tag-%s' % i for i in range(0, 10)]
|
|
|
|
self.do_push('public', 'new-repo', 'public', 'password', tag_names=tag_names)
|
|
|
|
|
|
|
|
encountered = set()
|
|
|
|
|
|
|
|
# Ensure tags list is properly paginated.
|
|
|
|
relative_url = '/v2/public/new-repo/tags/list?n=5'
|
|
|
|
for i in range(0, 3):
|
|
|
|
result = self.conduct('GET', relative_url, auth='jwt')
|
|
|
|
result_json = result.json()
|
|
|
|
encountered.update(set(result_json['tags']))
|
|
|
|
|
|
|
|
if 'Link' not in result.headers:
|
|
|
|
break
|
|
|
|
|
|
|
|
# Check the next page of results.
|
2016-10-17 17:57:05 +00:00
|
|
|
link_header = result.headers['Link']
|
|
|
|
self.assertTrue(link_header.startswith('<'))
|
|
|
|
self.assertTrue(link_header.endswith('>; rel="next"'))
|
|
|
|
|
|
|
|
link = link_header[1:]
|
2016-10-03 18:10:39 +00:00
|
|
|
self.assertTrue(link.endswith('; rel="next"'))
|
|
|
|
|
|
|
|
url, _ = link.split(';')
|
2016-12-15 17:04:57 +00:00
|
|
|
relative_url = url[url.find('/v2/'):-1]
|
2016-10-03 18:10:39 +00:00
|
|
|
|
|
|
|
encountered.update(set(result_json['tags']))
|
|
|
|
|
|
|
|
# Ensure we found all the results.
|
|
|
|
self.assertEquals(encountered, set(tag_names))
|
|
|
|
|
2016-10-03 13:06:49 +00:00
|
|
|
def test_numeric_tag(self):
|
|
|
|
# Push a new repository.
|
2016-10-03 18:10:39 +00:00
|
|
|
self.do_push('public', 'new-repo', 'public', 'password', tag_names=['1234'])
|
2016-10-03 13:06:49 +00:00
|
|
|
|
|
|
|
# Pull the repository.
|
2016-10-03 14:09:12 +00:00
|
|
|
self.do_pull('public', 'new-repo', 'public', 'password', manifest_id='1234')
|
2016-10-03 13:06:49 +00:00
|
|
|
|
2016-07-18 22:20:00 +00:00
|
|
|
def test_label_invalid_manifest(self):
|
|
|
|
images = [{
|
|
|
|
'id': 'someid',
|
|
|
|
'config': {'Labels': None},
|
|
|
|
'contents': 'somecontent'
|
|
|
|
}]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
def test_labels(self):
|
|
|
|
# Push a new repo with the latest tag.
|
|
|
|
images = [{
|
|
|
|
'id': 'someid',
|
|
|
|
'config': {'Labels': {'foo': 'bar', 'baz': 'meh', 'theoretically-invalid--label': 'foo'}},
|
|
|
|
'contents': 'somecontent'
|
|
|
|
}]
|
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
(_, manifests) = self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
digest = manifests['latest'].digest
|
2016-07-18 22:20:00 +00:00
|
|
|
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
labels = self.conduct('GET', '/api/v1/repository/devtable/newrepo/manifest/' + digest + '/labels').json()
|
|
|
|
self.assertEquals(3, len(labels['labels']))
|
|
|
|
|
|
|
|
self.assertEquals('manifest', labels['labels'][0]['source_type'])
|
|
|
|
self.assertEquals('manifest', labels['labels'][1]['source_type'])
|
|
|
|
self.assertEquals('manifest', labels['labels'][2]['source_type'])
|
|
|
|
|
|
|
|
self.assertEquals('text/plain', labels['labels'][0]['media_type'])
|
|
|
|
self.assertEquals('text/plain', labels['labels'][1]['media_type'])
|
|
|
|
self.assertEquals('text/plain', labels['labels'][2]['media_type'])
|
|
|
|
|
|
|
|
def test_json_labels(self):
|
|
|
|
# Push a new repo with the latest tag.
|
|
|
|
images = [{
|
|
|
|
'id': 'someid',
|
|
|
|
'config': {'Labels': {'foo': 'bar', 'baz': '{"some": "json"}'}},
|
|
|
|
'contents': 'somecontent'
|
|
|
|
}]
|
|
|
|
|
2016-10-03 18:10:39 +00:00
|
|
|
(_, manifests) = self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
digest = manifests['latest'].digest
|
2016-07-18 22:20:00 +00:00
|
|
|
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
labels = self.conduct('GET', '/api/v1/repository/devtable/newrepo/manifest/' + digest + '/labels').json()
|
|
|
|
self.assertEquals(2, len(labels['labels']))
|
|
|
|
|
2016-12-15 17:04:57 +00:00
|
|
|
media_types = set([label['media_type'] for label in labels['labels']])
|
|
|
|
|
|
|
|
self.assertTrue('text/plain' in media_types)
|
|
|
|
self.assertTrue('application/json' in media_types)
|
2016-07-18 22:20:00 +00:00
|
|
|
|
2017-11-14 18:40:11 +00:00
|
|
|
def test_not_json_labels(self):
|
|
|
|
# Push a new repo with the latest tag.
|
|
|
|
images = [{
|
|
|
|
'id': 'someid',
|
|
|
|
'config': {'Labels': {'foo': '[hello world]', 'bar': '{wassup?!}'}},
|
|
|
|
'contents': 'somecontent'
|
|
|
|
}]
|
|
|
|
|
|
|
|
(_, manifests) = self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
digest = manifests['latest'].digest
|
|
|
|
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
labels = self.conduct('GET', '/api/v1/repository/devtable/newrepo/manifest/' + digest + '/labels').json()
|
|
|
|
self.assertEquals(2, len(labels['labels']))
|
|
|
|
|
|
|
|
media_types = set([label['media_type'] for label in labels['labels']])
|
|
|
|
|
|
|
|
self.assertTrue('text/plain' in media_types)
|
|
|
|
self.assertFalse('application/json' in media_types)
|
|
|
|
|
|
|
|
|
2017-06-19 23:03:10 +00:00
|
|
|
def test_expiration_label(self):
|
|
|
|
# Push a new repo with the latest tag.
|
|
|
|
images = [{
|
|
|
|
'id': 'someid',
|
|
|
|
'config': {'Labels': {'quay.expires-after': '1d'}},
|
|
|
|
'contents': 'somecontent'
|
|
|
|
}]
|
|
|
|
|
|
|
|
(_, manifests) = self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
tags = self.conduct('GET', '/api/v1/repository/devtable/newrepo/tag').json()
|
|
|
|
tag = tags['tags'][0]
|
|
|
|
|
|
|
|
self.assertEqual(tag['end_ts'], tag['start_ts'] + convert_to_timedelta('1d').total_seconds())
|
|
|
|
|
|
|
|
def test_invalid_expiration_label(self):
|
|
|
|
# Push a new repo with the latest tag.
|
|
|
|
images = [{
|
|
|
|
'id': 'someid',
|
|
|
|
'config': {'Labels': {'quay.expires-after': 'blahblah'}},
|
|
|
|
'contents': 'somecontent'
|
|
|
|
}]
|
|
|
|
|
|
|
|
(_, manifests) = self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
tags = self.conduct('GET', '/api/v1/repository/devtable/newrepo/tag').json()
|
|
|
|
tag = tags['tags'][0]
|
|
|
|
|
|
|
|
self.assertIsNone(tag.get('end_ts'))
|
|
|
|
|
2016-06-02 16:46:20 +00:00
|
|
|
def test_invalid_manifest_type(self):
|
|
|
|
namespace = 'devtable'
|
|
|
|
repository = 'somerepo'
|
|
|
|
tag_name = 'sometag'
|
|
|
|
|
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
|
|
|
|
self.v2_ping()
|
|
|
|
self.do_auth('devtable', 'password', namespace, repository, scopes=['push', 'pull'])
|
|
|
|
|
|
|
|
# Build a fake manifest.
|
2016-08-16 19:23:00 +00:00
|
|
|
builder = DockerSchema1ManifestBuilder(namespace, repository, tag_name)
|
2016-06-02 16:46:20 +00:00
|
|
|
builder.add_layer('sha256:' + hashlib.sha256('invalid').hexdigest(), json.dumps({'id': 'foo'}))
|
|
|
|
manifest = builder.build(_JWK)
|
|
|
|
|
|
|
|
self.conduct('PUT', '/v2/%s/manifests/%s' % (repo_name, tag_name),
|
|
|
|
data=manifest.bytes, expected_code=415,
|
|
|
|
headers={'Content-Type': 'application/vnd.docker.distribution.manifest.v2+json'},
|
|
|
|
auth='jwt')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2017-12-20 16:35:43 +00:00
|
|
|
def test_invalid_manifest(self):
|
|
|
|
namespace = 'devtable'
|
|
|
|
repository = 'somerepo'
|
|
|
|
tag_name = 'sometag'
|
|
|
|
|
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
|
|
|
|
self.v2_ping()
|
|
|
|
self.do_auth('devtable', 'password', namespace, repository, scopes=['push', 'pull'])
|
|
|
|
|
|
|
|
self.conduct('PUT', '/v2/%s/manifests/%s' % (repo_name, tag_name),
|
|
|
|
data='{}', expected_code=400,
|
|
|
|
auth='jwt')
|
|
|
|
|
2017-12-20 16:02:34 +00:00
|
|
|
def test_oci_manifest_type(self):
|
|
|
|
namespace = 'devtable'
|
|
|
|
repository = 'somerepo'
|
|
|
|
tag_name = 'sometag'
|
|
|
|
|
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
|
|
|
|
self.v2_ping()
|
|
|
|
self.do_auth('devtable', 'password', namespace, repository, scopes=['push', 'pull'])
|
|
|
|
|
|
|
|
# Build a fake manifest.
|
|
|
|
builder = DockerSchema1ManifestBuilder(namespace, repository, tag_name)
|
|
|
|
builder.add_layer('sha256:' + hashlib.sha256('invalid').hexdigest(), json.dumps({'id': 'foo'}))
|
|
|
|
manifest = builder.build(_JWK)
|
|
|
|
|
|
|
|
self.conduct('PUT', '/v2/%s/manifests/%s' % (repo_name, tag_name),
|
|
|
|
data=manifest.bytes, expected_code=415,
|
|
|
|
headers={'Content-Type': 'application/vnd.oci.image.manifest.v1+json'},
|
|
|
|
auth='jwt')
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def test_invalid_blob(self):
|
|
|
|
namespace = 'devtable'
|
|
|
|
repository = 'somerepo'
|
|
|
|
tag_name = 'sometag'
|
|
|
|
|
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
|
|
|
|
self.v2_ping()
|
|
|
|
self.do_auth('devtable', 'password', namespace, repository, scopes=['push', 'pull'])
|
|
|
|
|
|
|
|
# Build a fake manifest.
|
2016-08-16 19:23:00 +00:00
|
|
|
builder = DockerSchema1ManifestBuilder(namespace, repository, tag_name)
|
2016-02-12 15:39:27 +00:00
|
|
|
builder.add_layer('sha256:' + hashlib.sha256('invalid').hexdigest(), json.dumps({'id': 'foo'}))
|
|
|
|
manifest = builder.build(_JWK)
|
|
|
|
|
|
|
|
response = self.conduct('PUT', '/v2/%s/manifests/%s' % (repo_name, tag_name),
|
|
|
|
data=manifest.bytes, expected_code=404,
|
|
|
|
headers={'Content-Type': 'application/json'}, auth='jwt')
|
|
|
|
self.assertEquals('BLOB_UNKNOWN', response.json()['errors'][0]['code'])
|
|
|
|
|
|
|
|
|
2015-12-10 20:15:24 +00:00
|
|
|
def test_delete_manifest(self):
|
|
|
|
# Push a new repo with the latest tag.
|
2016-10-03 18:10:39 +00:00
|
|
|
(_, manifests) = self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
digest = manifests['latest'].digest
|
2015-12-10 20:15:24 +00:00
|
|
|
|
|
|
|
# Ensure the pull works.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Conduct auth for the write scope.
|
|
|
|
self.do_auth('devtable', 'password', 'devtable', 'newrepo', scopes=['push'])
|
|
|
|
|
|
|
|
# Delete the digest.
|
|
|
|
self.conduct('DELETE', '/v2/devtable/newrepo/manifests/' + digest, auth='jwt',
|
|
|
|
expected_code=202)
|
|
|
|
|
|
|
|
# Ensure the tag no longer exists.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password',
|
2016-01-22 21:49:32 +00:00
|
|
|
expect_failure=FailureCodes.DOES_NOT_EXIST)
|
2015-12-10 20:15:24 +00:00
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
def test_push_only_push_scope(self):
|
|
|
|
images = [{
|
|
|
|
'id': 'onlyimagehere',
|
|
|
|
'contents': 'foobar',
|
|
|
|
}]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'somenewrepo', 'devtable', 'password', images,
|
|
|
|
scopes=['push'])
|
|
|
|
|
2015-10-22 20:59:28 +00:00
|
|
|
def test_push_reponame_with_slashes(self):
|
|
|
|
# Attempt to add a repository name with slashes. This should fail as we do not support it.
|
|
|
|
images = [{
|
2016-01-19 20:52:34 +00:00
|
|
|
'id': 'onlyimagehere',
|
|
|
|
'contents': 'somecontents',
|
2015-10-22 20:59:28 +00:00
|
|
|
}]
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2015-10-22 20:59:28 +00:00
|
|
|
self.do_push('public', 'newrepo/somesubrepo', 'devtable', 'password', images,
|
2016-01-22 21:49:32 +00:00
|
|
|
expect_failure=FailureCodes.INVALID_REGISTRY)
|
2015-10-22 20:59:28 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
def test_invalid_push(self):
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', invalid=True)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
def test_cancel_push(self):
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', cancel=True)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2017-12-14 18:38:24 +00:00
|
|
|
def test_with_blob_caching(self):
|
|
|
|
# Add a repository and do a pull, to prime the cache.
|
|
|
|
_, manifests = self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Purposefully break the database so that we can check if caching works.
|
|
|
|
self.conduct('POST', '/__test/breakdatabase')
|
|
|
|
|
|
|
|
# Attempt to pull the blobs and ensure we get back a result. Since the database is broken,
|
|
|
|
# this will only work if caching is working and no additional queries/connections are made.
|
|
|
|
repo_name = 'devtable/newrepo'
|
|
|
|
for tag_name in manifests:
|
|
|
|
for layer in manifests[tag_name].layers:
|
|
|
|
blob_id = str(layer.digest)
|
|
|
|
self.conduct('GET', '/v2/%s/blobs/%s' % (repo_name, blob_id), expected_code=200, auth='jwt')
|
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
def test_pull_by_checksum(self):
|
|
|
|
# Add a new repository under the user, so we have a real repository to pull.
|
2016-10-03 18:10:39 +00:00
|
|
|
_, manifests = self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
digest = manifests['latest'].digest
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
# Attempt to pull by digest.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password', manifest_id=digest)
|
|
|
|
|
|
|
|
def test_pull_invalid_image_tag(self):
|
|
|
|
# Add a new repository under the user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
2015-09-29 19:02:03 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Attempt to pull the invalid tag.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password', manifest_id='invalid',
|
2016-01-22 21:49:32 +00:00
|
|
|
expect_failure=FailureCodes.INVALID_REGISTRY)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
def test_partial_upload_below_5mb(self):
|
|
|
|
chunksize = 1024 * 1024 * 2
|
|
|
|
size = chunksize * 3
|
|
|
|
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
|
|
|
|
|
|
|
chunk_count = int(math.ceil((len(contents) * 1.0) / chunksize))
|
|
|
|
chunks = [(index * chunksize, (index + 1)*chunksize) for index in range(chunk_count)]
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id':'someid',
|
2015-09-18 19:40:35 +00:00
|
|
|
'contents': contents,
|
|
|
|
'chunks': chunks
|
|
|
|
}
|
2015-09-29 21:53:39 +00:00
|
|
|
]
|
2015-09-18 19:40:35 +00:00
|
|
|
|
|
|
|
# Push the chunked upload.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-18 19:40:35 +00:00
|
|
|
|
|
|
|
# Pull the image back and verify the contents.
|
2016-10-04 11:03:39 +00:00
|
|
|
blobs, _ = self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-18 19:40:35 +00:00
|
|
|
self.assertEquals(len(blobs.items()), 1)
|
|
|
|
self.assertEquals(blobs.items()[0][1], contents)
|
|
|
|
|
2015-09-25 15:51:50 +00:00
|
|
|
def test_partial_upload_way_below_5mb(self):
|
|
|
|
size = 1024
|
|
|
|
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
|
|
|
chunks = [(0, 100), (100, size)]
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id':'someid',
|
2015-09-25 15:51:50 +00:00
|
|
|
'contents': contents,
|
|
|
|
'chunks': chunks
|
|
|
|
}
|
2015-09-29 21:53:39 +00:00
|
|
|
]
|
2015-09-25 15:51:50 +00:00
|
|
|
|
|
|
|
# Push the chunked upload.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-25 15:51:50 +00:00
|
|
|
|
|
|
|
# Pull the image back and verify the contents.
|
2016-10-04 11:03:39 +00:00
|
|
|
blobs, _ = self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-25 15:51:50 +00:00
|
|
|
self.assertEquals(len(blobs.items()), 1)
|
|
|
|
self.assertEquals(blobs.items()[0][1], contents)
|
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
def test_partial_upload_resend_below_5mb(self):
|
2015-09-25 15:51:50 +00:00
|
|
|
size = 150
|
2015-09-18 19:40:35 +00:00
|
|
|
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
|
|
|
|
2015-09-25 15:51:50 +00:00
|
|
|
chunks = [(0, 100), (10, size)]
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id':'someid',
|
2015-09-18 19:40:35 +00:00
|
|
|
'contents': contents,
|
|
|
|
'chunks': chunks
|
|
|
|
}
|
2015-09-29 21:53:39 +00:00
|
|
|
]
|
2015-09-18 19:40:35 +00:00
|
|
|
|
|
|
|
# Push the chunked upload.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-18 19:40:35 +00:00
|
|
|
|
|
|
|
# Pull the image back and verify the contents.
|
2016-10-04 11:03:39 +00:00
|
|
|
blobs, _ = self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-18 19:40:35 +00:00
|
|
|
self.assertEquals(len(blobs.items()), 1)
|
|
|
|
self.assertEquals(blobs.items()[0][1], contents)
|
|
|
|
|
2015-09-25 15:51:50 +00:00
|
|
|
def test_partial_upload_try_resend_with_gap(self):
|
|
|
|
size = 150
|
|
|
|
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
|
|
|
|
|
|
|
chunks = [(0, 100), (101, size, 416)]
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id':'someid',
|
2015-09-25 15:51:50 +00:00
|
|
|
'contents': contents,
|
|
|
|
'chunks': chunks
|
|
|
|
}
|
2015-09-29 21:53:39 +00:00
|
|
|
]
|
2015-09-25 15:51:50 +00:00
|
|
|
|
|
|
|
# Attempt to push the chunked upload, which should fail.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
def test_multiple_layers_invalid(self):
|
|
|
|
# Attempt to push a manifest with an image depending on an unknown base layer.
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images,
|
2016-01-22 21:49:32 +00:00
|
|
|
expect_failure=FailureCodes.INVALID_REQUEST)
|
2015-09-29 21:53:39 +00:00
|
|
|
|
|
|
|
def test_multiple_layers(self):
|
|
|
|
# Push a manifest with multiple layers.
|
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2015-09-29 21:53:39 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-25 15:51:50 +00:00
|
|
|
|
2015-10-05 18:19:52 +00:00
|
|
|
def test_invalid_regname(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_push('devtable', 'this/is/a/repo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.INVALID_REGISTRY)
|
2015-10-05 18:19:52 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
def test_multiple_tags(self):
|
2015-09-29 21:53:39 +00:00
|
|
|
latest_images = [
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
foobar_images = [
|
|
|
|
{
|
|
|
|
'id': 'foobarid',
|
|
|
|
'contents': 'the foobar image',
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
# Create the repo.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=latest_images,
|
2016-10-03 18:10:39 +00:00
|
|
|
tag_names=['latest'])
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=foobar_images,
|
2016-10-03 18:10:39 +00:00
|
|
|
tag_names=['foobar'])
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
# Retrieve the tags.
|
|
|
|
response = self.conduct('GET', '/v2/devtable/newrepo/tags/list', auth='jwt', expected_code=200)
|
|
|
|
data = json.loads(response.text)
|
|
|
|
self.assertEquals(data['name'], "devtable/newrepo")
|
|
|
|
self.assertIn('latest', data['tags'])
|
|
|
|
self.assertIn('foobar', data['tags'])
|
|
|
|
|
2015-10-05 19:26:45 +00:00
|
|
|
# Retrieve the tags with pagination.
|
2015-09-29 19:02:03 +00:00
|
|
|
response = self.conduct('GET', '/v2/devtable/newrepo/tags/list', auth='jwt',
|
|
|
|
params=dict(n=1), expected_code=200)
|
|
|
|
|
|
|
|
data = json.loads(response.text)
|
|
|
|
self.assertEquals(data['name'], "devtable/newrepo")
|
|
|
|
self.assertEquals(len(data['tags']), 1)
|
|
|
|
|
|
|
|
# Try to get tags before a repo exists.
|
2016-02-22 17:21:22 +00:00
|
|
|
response = self.conduct('GET', '/v2/devtable/doesnotexist/tags/list', auth='jwt', expected_code=401)
|
|
|
|
|
|
|
|
# Assert 401s to non-auth endpoints also get the WWW-Authenticate header.
|
|
|
|
self.assertIn('WWW-Authenticate', response.headers)
|
2016-10-17 18:32:43 +00:00
|
|
|
self.assertIn('scope="repository:devtable/doesnotexist:pull"',
|
|
|
|
response.headers['WWW-Authenticate'])
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2015-12-15 21:21:06 +00:00
|
|
|
def test_one_five_blacklist(self):
|
|
|
|
self.conduct('GET', '/v2/', expected_code=404, user_agent='Go 1.1 package http')
|
|
|
|
|
2017-06-02 22:30:51 +00:00
|
|
|
def test_normal_catalog(self):
|
2016-02-09 22:25:33 +00:00
|
|
|
# Look for public repositories and ensure all are public.
|
2017-06-02 22:30:51 +00:00
|
|
|
with TestFeature(self, 'PUBLIC_CATALOG', False):
|
|
|
|
response = self.conduct('GET', '/v2/_catalog')
|
|
|
|
data = response.json()
|
|
|
|
self.assertTrue(len(data['repositories']) == 0)
|
|
|
|
|
|
|
|
# Perform auth and lookup the catalog again.
|
|
|
|
self.do_auth('devtable', 'password', 'devtable', 'simple')
|
|
|
|
all_repos = []
|
|
|
|
|
|
|
|
response = self.conduct('GET', '/v2/_catalog', params=dict(n=2), auth='jwt')
|
|
|
|
data = response.json()
|
|
|
|
self.assertEquals(len(data['repositories']), 2)
|
|
|
|
|
|
|
|
def test_public_catalog(self):
|
|
|
|
# Look for public repositories and ensure all are public.
|
|
|
|
with TestFeature(self, 'PUBLIC_CATALOG', True):
|
|
|
|
response = self.conduct('GET', '/v2/_catalog')
|
|
|
|
data = response.json()
|
|
|
|
self.assertTrue(len(data['repositories']) > 0)
|
|
|
|
|
|
|
|
for reponame in data['repositories']:
|
|
|
|
self.assertTrue(reponame.find('public/') == 0)
|
|
|
|
|
|
|
|
# Perform auth and lookup the catalog again.
|
|
|
|
self.do_auth('devtable', 'password', 'devtable', 'simple')
|
|
|
|
all_repos = []
|
|
|
|
|
|
|
|
response = self.conduct('GET', '/v2/_catalog', params=dict(n=2), auth='jwt')
|
|
|
|
data = response.json()
|
|
|
|
self.assertEquals(len(data['repositories']), 2)
|
|
|
|
all_repos.extend(data['repositories'])
|
|
|
|
|
|
|
|
# Ensure we have a next link.
|
|
|
|
self.assertIsNotNone(response.headers.get('Link'))
|
|
|
|
|
|
|
|
# Request with the next link.
|
|
|
|
while response.headers.get('Link'):
|
|
|
|
link_url = response.headers.get('Link')[1:].split(';')[0][:-1]
|
|
|
|
v2_index = link_url.find('/v2/')
|
|
|
|
relative_url = link_url[v2_index:]
|
|
|
|
|
|
|
|
next_response = self.conduct('GET', relative_url, auth='jwt')
|
|
|
|
next_data = next_response.json()
|
|
|
|
all_repos.extend(next_data['repositories'])
|
|
|
|
|
|
|
|
self.assertTrue(len(next_data['repositories']) <= 2)
|
|
|
|
self.assertNotEquals(next_data['repositories'], data['repositories'])
|
|
|
|
response = next_response
|
|
|
|
|
|
|
|
# Ensure the authed request has the public repository.
|
|
|
|
public = [reponame for reponame in all_repos if reponame.find('/publicrepo') >= 0]
|
|
|
|
self.assertTrue(bool(public))
|
2016-02-09 22:25:33 +00:00
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V1PushV2PullRegistryTests(V2RegistryPullMixin, V1RegistryPushMixin, RegistryTestsMixin,
|
2015-09-17 20:48:08 +00:00
|
|
|
RegistryTestCaseMixin, LiveServerTestCase):
|
2015-09-08 15:58:21 +00:00
|
|
|
""" Tests for V1 push, V2 pull registry. """
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def test_multiple_tag_with_pull(self):
|
|
|
|
""" Tagging the same exact V1 tag multiple times and then pulling with V2. """
|
|
|
|
images = self._get_default_images()
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
self.do_tag('devtable', 'newrepo', 'latest', images[0]['id'], auth=('devtable', 'password'))
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V1PullV2PushRegistryTests(V1RegistryPullMixin, V2RegistryPushMixin, RegistryTestsMixin,
|
2015-09-17 20:48:08 +00:00
|
|
|
RegistryTestCaseMixin, LiveServerTestCase):
|
2015-09-08 15:58:21 +00:00
|
|
|
""" Tests for V1 pull, V2 push registry. """
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-11-20 19:47:56 +00:00
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
class TorrentTestMixin(V2RegistryPullMixin):
|
|
|
|
""" Mixin of tests for torrent support. """
|
|
|
|
def get_torrent(self, blobsum):
|
|
|
|
# Enable direct download URLs in fake storage.
|
|
|
|
self.conduct('POST', '/__test/fakestoragedd/true')
|
|
|
|
|
|
|
|
response = self.conduct('GET', '/c1/torrent/devtable/newrepo/blobs/' + blobsum,
|
|
|
|
auth=('devtable', 'password'))
|
|
|
|
|
|
|
|
# Disable direct download URLs in fake storage.
|
|
|
|
self.conduct('POST', '/__test/fakestoragedd/false')
|
|
|
|
|
|
|
|
return response.content
|
|
|
|
|
|
|
|
def test_get_basic_torrent(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
|
|
|
|
# Retrieve the manifest for the tag.
|
2016-10-04 11:03:39 +00:00
|
|
|
blobs, _ = self.do_pull('devtable', 'newrepo', 'devtable', 'password', manifest_id='latest',
|
|
|
|
images=initial_images)
|
2016-01-19 20:52:34 +00:00
|
|
|
self.assertEquals(1, len(list(blobs.keys())))
|
|
|
|
blobsum = list(blobs.keys())[0]
|
|
|
|
|
|
|
|
# Retrieve the torrent for the tag.
|
|
|
|
torrent = self.get_torrent(blobsum)
|
|
|
|
contents = bencode.bdecode(torrent)
|
|
|
|
|
|
|
|
# Ensure that there is a webseed.
|
2016-08-24 16:55:33 +00:00
|
|
|
self.assertEquals(contents['url-list'], 'http://somefakeurl?goes=here')
|
2016-01-19 20:52:34 +00:00
|
|
|
|
|
|
|
# Ensure there is an announce and some pieces.
|
|
|
|
self.assertIsNotNone(contents.get('info', {}).get('pieces'))
|
|
|
|
self.assertIsNotNone(contents.get('announce'))
|
|
|
|
|
|
|
|
sha = resumablehashlib.sha1()
|
|
|
|
sha.update(blobs[blobsum])
|
|
|
|
|
|
|
|
expected = binascii.hexlify(sha.digest())
|
|
|
|
found = binascii.hexlify(contents['info']['pieces'])
|
|
|
|
|
|
|
|
self.assertEquals(expected, found)
|
|
|
|
|
|
|
|
|
|
|
|
class TorrentV1PushTests(RegistryTestCaseMixin, TorrentTestMixin, V1RegistryPushMixin, LiveServerTestCase):
|
|
|
|
""" Torrent tests via V1 push. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
class TorrentV2PushTests(RegistryTestCaseMixin, TorrentTestMixin, V2RegistryPushMixin, LiveServerTestCase):
|
|
|
|
""" Torrent tests via V2 push. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class SquashingTests(RegistryTestCaseMixin, V1RegistryPushMixin, LiveServerTestCase):
|
|
|
|
""" Tests for registry squashing. """
|
2015-11-20 19:47:56 +00:00
|
|
|
|
2017-12-07 20:22:20 +00:00
|
|
|
def get_squashed_image(self, auth='sig'):
|
|
|
|
response = self.conduct('GET', '/c1/squash/devtable/newrepo/latest', auth=auth)
|
2015-11-24 16:18:56 +00:00
|
|
|
tar = tarfile.open(fileobj=StringIO(response.content))
|
2016-01-20 21:33:10 +00:00
|
|
|
return tar, response.content
|
2015-11-24 16:18:56 +00:00
|
|
|
|
2017-12-07 20:22:20 +00:00
|
|
|
def test_squashed_with_credentials(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
initial_image_id = '91081df45b58dc62dd207441785eef2b895f0383fbe601c99a3cf643c79957dc'
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
|
|
|
tar, _ = self.get_squashed_image(auth=('devtable', 'password'))
|
|
|
|
self.assertTrue(initial_image_id in tar.getnames())
|
|
|
|
|
2015-11-24 16:18:56 +00:00
|
|
|
def test_squashed_changes(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
initial_image_id = '91081df45b58dc62dd207441785eef2b895f0383fbe601c99a3cf643c79957dc'
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
2016-01-20 21:33:10 +00:00
|
|
|
tar, _ = self.get_squashed_image()
|
2015-11-24 16:18:56 +00:00
|
|
|
self.assertTrue(initial_image_id in tar.getnames())
|
|
|
|
|
|
|
|
# Change the images.
|
|
|
|
updated_images = [
|
|
|
|
{
|
|
|
|
'id': 'updatedid',
|
|
|
|
'contents': 'the updated image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=updated_images)
|
|
|
|
updated_image_id = '38df4bd4cdffc6b7d656dbd2813c73e864f2d362ad887c999ac315224ad281ac'
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag and ensure it has changed.
|
2016-01-20 21:33:10 +00:00
|
|
|
tar, _ = self.get_squashed_image()
|
2015-11-24 16:18:56 +00:00
|
|
|
self.assertTrue(updated_image_id in tar.getnames())
|
|
|
|
|
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
def test_estimated_squashing(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
'size': 2002,
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
|
|
|
|
# NULL out the uncompressed size to force estimation.
|
|
|
|
self.conduct('POST', '/__test/removeuncompressed/initialid')
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
|
|
|
initial_image_id = '91081df45b58dc62dd207441785eef2b895f0383fbe601c99a3cf643c79957dc'
|
2016-01-20 21:33:10 +00:00
|
|
|
tar, _ = self.get_squashed_image()
|
2015-12-17 18:39:01 +00:00
|
|
|
self.assertTrue(initial_image_id in tar.getnames())
|
|
|
|
|
|
|
|
|
2015-11-20 19:47:56 +00:00
|
|
|
def test_multilayer_squashing(self):
|
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2015-11-20 19:47:56 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
|
|
|
expected_image_id = 'bd590ae79fba5ebc6550aaf016c0bd0f49b1d78178e0f83e0ca1c56c2bb7e7bf'
|
|
|
|
|
|
|
|
expected_names = ['repositories',
|
|
|
|
expected_image_id,
|
|
|
|
'%s/json' % expected_image_id,
|
|
|
|
'%s/VERSION' % expected_image_id,
|
|
|
|
'%s/layer.tar' % expected_image_id]
|
|
|
|
|
2016-01-20 21:33:10 +00:00
|
|
|
tar, _ = self.get_squashed_image()
|
2015-11-20 19:47:56 +00:00
|
|
|
self.assertEquals(expected_names, tar.getnames())
|
|
|
|
self.assertEquals('1.0', tar.extractfile(tar.getmember('%s/VERSION' % expected_image_id)).read())
|
|
|
|
|
|
|
|
json_data = (tar.extractfile(tar.getmember('%s/json' % expected_image_id)).read())
|
|
|
|
|
|
|
|
# Ensure the JSON loads and parses.
|
|
|
|
result = json.loads(json_data)
|
|
|
|
self.assertEquals(expected_image_id, result['id'])
|
|
|
|
|
|
|
|
# Ensure that the "image_name" file refers to the latest image, as it is the top layer.
|
|
|
|
layer_tar = tarfile.open(fileobj=tar.extractfile(tar.getmember('%s/layer.tar' % expected_image_id)))
|
2016-01-19 20:52:34 +00:00
|
|
|
image_contents = layer_tar.extractfile(layer_tar.getmember('contents')).read()
|
|
|
|
self.assertEquals('the latest image', image_contents)
|
2015-11-24 04:46:05 +00:00
|
|
|
|
2016-01-20 21:33:10 +00:00
|
|
|
def test_squashed_torrent(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
initial_image_id = '91081df45b58dc62dd207441785eef2b895f0383fbe601c99a3cf643c79957dc'
|
|
|
|
|
2016-09-01 23:00:11 +00:00
|
|
|
# Try to pull the torrent of the squashed image. This should fail with a 406 since the
|
2016-01-20 21:33:10 +00:00
|
|
|
# squashed image doesn't yet exist.
|
|
|
|
self.conduct('GET', '/c1/squash/devtable/newrepo/latest', auth=('devtable', 'password'),
|
|
|
|
headers=dict(accept='application/x-bittorrent'),
|
2016-01-20 23:35:07 +00:00
|
|
|
expected_code=406)
|
2016-01-20 21:33:10 +00:00
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
|
|
|
tar, squashed = self.get_squashed_image()
|
|
|
|
self.assertTrue(initial_image_id in tar.getnames())
|
|
|
|
|
|
|
|
# Enable direct download URLs in fake storage.
|
|
|
|
self.conduct('POST', '/__test/fakestoragedd/true')
|
|
|
|
|
|
|
|
# Pull the torrent.
|
|
|
|
response = self.conduct('GET', '/c1/squash/devtable/newrepo/latest',
|
|
|
|
auth=('devtable', 'password'),
|
|
|
|
headers=dict(accept='application/x-bittorrent'))
|
|
|
|
|
|
|
|
# Disable direct download URLs in fake storage.
|
|
|
|
self.conduct('POST', '/__test/fakestoragedd/false')
|
|
|
|
|
|
|
|
# Ensure the torrent is valid.
|
|
|
|
contents = bencode.bdecode(response.content)
|
|
|
|
|
|
|
|
# Ensure that there is a webseed.
|
2016-08-24 16:55:33 +00:00
|
|
|
self.assertEquals(contents['url-list'], 'http://somefakeurl?goes=here')
|
2016-01-20 21:33:10 +00:00
|
|
|
|
|
|
|
# Ensure there is an announce and some pieces.
|
|
|
|
self.assertIsNotNone(contents.get('info', {}).get('pieces'))
|
|
|
|
self.assertIsNotNone(contents.get('announce'))
|
|
|
|
|
|
|
|
# Ensure the SHA1 matches the generated tar.
|
|
|
|
sha = resumablehashlib.sha1()
|
|
|
|
sha.update(squashed)
|
|
|
|
|
|
|
|
expected = binascii.hexlify(sha.digest())
|
|
|
|
found = binascii.hexlify(contents['info']['pieces'])
|
|
|
|
|
|
|
|
self.assertEquals(expected, found)
|
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
class LoginTests(object):
|
|
|
|
""" Generic tests for registry login. """
|
|
|
|
def test_invalid_username_knownrepo(self):
|
|
|
|
self.do_login('invaliduser', 'somepassword', expect_success=False,
|
|
|
|
scope='repository:devtable/simple:pull')
|
|
|
|
|
|
|
|
def test_invalid_password_knownrepo(self):
|
|
|
|
self.do_login('devtable', 'somepassword', expect_success=False,
|
|
|
|
scope='repository:devtable/simple:pull')
|
|
|
|
|
|
|
|
def test_validuser_knownrepo(self):
|
|
|
|
self.do_login('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:devtable/simple:pull')
|
|
|
|
|
|
|
|
def test_validuser_encryptedpass(self):
|
|
|
|
# Generate an encrypted password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('POST', '/api/v1/user/clientkey', json_data=dict(password='password'))
|
|
|
|
|
|
|
|
encryptedpassword = resp.json()['key']
|
|
|
|
self.do_login('devtable', encryptedpassword, expect_success=True,
|
|
|
|
scope='repository:devtable/simple:pull')
|
|
|
|
|
|
|
|
def test_robotkey(self):
|
|
|
|
# Lookup the robot's password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('GET', '/api/v1/user/robots/dtrobot')
|
|
|
|
robot_token = resp.json()['token']
|
|
|
|
|
|
|
|
self.do_login('devtable+dtrobot', robot_token, expect_success=True,
|
|
|
|
scope='repository:devtable/complex:pull')
|
|
|
|
|
|
|
|
def test_oauth(self):
|
|
|
|
self.do_login('$oauthtoken', 'test', expect_success=True,
|
|
|
|
scope='repository:devtable/complex:pull')
|
|
|
|
|
2017-12-08 22:05:59 +00:00
|
|
|
def test_cli_token(self):
|
|
|
|
self.do_login('$app', 'test', expect_success=True,
|
|
|
|
scope='repository:devtable/complex:pull')
|
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
class V1LoginTests(V1RegistryLoginMixin, LoginTests, RegistryTestCaseMixin, BaseRegistryMixin, LiveServerTestCase):
|
|
|
|
""" Tests for V1 login. """
|
|
|
|
pass # No additional tests.
|
|
|
|
|
|
|
|
|
|
|
|
class V2LoginTests(V2RegistryLoginMixin, LoginTests, RegistryTestCaseMixin, BaseRegistryMixin, LiveServerTestCase):
|
|
|
|
""" Tests for V2 login. """
|
2016-05-23 20:36:48 +00:00
|
|
|
def do_logincheck(self, username, password, scope, expected_actions=[], expect_success=True,
|
|
|
|
**kwargs):
|
2016-05-31 20:48:19 +00:00
|
|
|
# Perform login to get an auth token.
|
2016-05-23 20:36:48 +00:00
|
|
|
response = self.do_login(username, password, scope, expect_success=expect_success, **kwargs)
|
2016-01-22 21:49:32 +00:00
|
|
|
if not expect_success:
|
|
|
|
return
|
|
|
|
|
2016-05-31 20:48:19 +00:00
|
|
|
# Validate the returned token.
|
2016-01-22 21:49:32 +00:00
|
|
|
encoded = response.json()['token']
|
2016-08-24 16:55:33 +00:00
|
|
|
header = 'Bearer ' + encoded
|
2016-01-22 21:49:32 +00:00
|
|
|
|
2017-11-10 20:46:09 +00:00
|
|
|
payload = decode_bearer_header(header, instance_keys, app.config, metric_queue=metric_queue)
|
2016-05-31 20:48:19 +00:00
|
|
|
self.assertIsNotNone(payload)
|
2016-01-22 21:49:32 +00:00
|
|
|
|
|
|
|
if scope is None:
|
|
|
|
self.assertEquals(0, len(payload['access']))
|
|
|
|
else:
|
|
|
|
self.assertEquals(1, len(payload['access']))
|
|
|
|
self.assertEquals(payload['access'][0]['actions'], expected_actions)
|
|
|
|
|
|
|
|
|
2015-12-09 20:07:37 +00:00
|
|
|
def test_nouser_noscope(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('', '', expect_success=False, scope=None)
|
2015-12-09 20:07:37 +00:00
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
def test_validuser_unknownrepo(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:invalidnamespace/simple:pull',
|
|
|
|
expected_actions=[])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_validuser_unknownnamespacerepo(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:devtable/newrepo:push',
|
|
|
|
expected_actions=['push'])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_validuser_noaccess(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('public', 'password', expect_success=True,
|
|
|
|
scope='repository:devtable/simple:pull',
|
|
|
|
expected_actions=[])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
2016-05-23 20:36:48 +00:00
|
|
|
def test_validuser_withendpoint(self):
|
|
|
|
self.do_logincheck('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:localhost:5000/devtable/simple:pull,push',
|
|
|
|
expected_actions=['push', 'pull'])
|
|
|
|
|
|
|
|
def test_validuser_invalid_endpoint(self):
|
|
|
|
self.do_logincheck('public', 'password', expect_success=False, expected_failure_code=400,
|
|
|
|
scope='repository:someotherrepo.com/devtable/simple:pull,push',
|
|
|
|
expected_actions=[])
|
|
|
|
|
|
|
|
def test_validuser_malformed_endpoint(self):
|
|
|
|
self.do_logincheck('public', 'password', expect_success=False, expected_failure_code=400,
|
|
|
|
scope='repository:localhost:5000/registryroot/devtable/simple:pull,push',
|
|
|
|
expected_actions=[])
|
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
def test_validuser_noscope(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('public', 'password', expect_success=True, scope=None)
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_invaliduser_noscope(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('invaliduser', 'invalidpass', expect_success=False, scope=None)
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_invalidpassword_noscope(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('public', 'invalidpass', expect_success=False, scope=None)
|
2015-11-24 04:46:05 +00:00
|
|
|
|
2017-12-08 22:05:59 +00:00
|
|
|
def test_cli_noaccess(self):
|
|
|
|
self.do_logincheck('$app', 'test', expect_success=True,
|
|
|
|
scope='repository:freshuser/unknownrepo:pull,push',
|
|
|
|
expected_actions=[])
|
|
|
|
|
|
|
|
def test_cli_public(self):
|
|
|
|
self.do_logincheck('$app', 'test', expect_success=True,
|
|
|
|
scope='repository:public/publicrepo:pull,push',
|
|
|
|
expected_actions=['pull'])
|
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
def test_oauth_noaccess(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('$oauthtoken', 'test', expect_success=True,
|
|
|
|
scope='repository:freshuser/unknownrepo:pull,push',
|
|
|
|
expected_actions=[])
|
|
|
|
|
|
|
|
def test_oauth_public(self):
|
|
|
|
self.do_logincheck('$oauthtoken', 'test', expect_success=True,
|
|
|
|
scope='repository:public/publicrepo:pull,push',
|
|
|
|
expected_actions=['pull'])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_nouser_pull_publicrepo(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('', '', expect_success=True, scope='repository:public/publicrepo:pull',
|
|
|
|
expected_actions=['pull'])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_nouser_push_publicrepo(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('', '', expect_success=True, scope='repository:public/publicrepo:push',
|
|
|
|
expected_actions=[])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def test_library_invaliduser(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('invaliduser', 'password', expect_success=False,
|
|
|
|
scope='repository:librepo:pull,push')
|
2016-01-21 20:40:51 +00:00
|
|
|
|
|
|
|
def test_library_noaccess(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('freshuser', 'password', expect_success=True,
|
|
|
|
scope='repository:librepo:pull,push',
|
|
|
|
expected_actions=[])
|
2016-01-21 20:40:51 +00:00
|
|
|
|
|
|
|
def test_library_access(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:librepo:pull,push',
|
|
|
|
expected_actions=['push', 'pull'])
|
|
|
|
|
|
|
|
def test_nouser_pushpull_publicrepo(self):
|
|
|
|
# Note: Docker 1.8.3 will ask for both push and pull scopes at all times. For public pulls
|
|
|
|
# with no credentials, we were returning a 401. This test makes sure we get back just a pull
|
|
|
|
# token.
|
|
|
|
self.do_logincheck('', '', expect_success=True,
|
|
|
|
scope='repository:public/publicrepo:pull,push',
|
|
|
|
expected_actions=['pull'])
|
2016-01-21 20:40:51 +00:00
|
|
|
|
2015-11-20 19:47:56 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|