2015-05-29 22:08:17 +00:00
|
|
|
import unittest
|
|
|
|
import requests
|
2015-08-27 18:55:33 +00:00
|
|
|
import os
|
2015-09-18 19:40:35 +00:00
|
|
|
import math
|
|
|
|
import random
|
|
|
|
import string
|
2016-01-19 20:52:34 +00:00
|
|
|
import resumablehashlib
|
|
|
|
import binascii
|
2016-02-12 15:39:27 +00:00
|
|
|
import uuid
|
2016-05-13 22:29:57 +00:00
|
|
|
import time
|
|
|
|
import gpgme
|
2015-09-17 20:48:08 +00:00
|
|
|
|
|
|
|
import Crypto.Random
|
2016-06-06 19:38:29 +00:00
|
|
|
from flask import request, jsonify
|
2015-05-29 22:08:17 +00:00
|
|
|
from flask.blueprints import Blueprint
|
|
|
|
from flask.ext.testing import LiveServerTestCase
|
|
|
|
|
2016-05-31 20:48:19 +00:00
|
|
|
from app import app, storage, instance_keys
|
2016-05-31 20:43:49 +00:00
|
|
|
from data.database import close_db_filter, configure, DerivedStorageForImage, QueueItem, Image
|
2015-12-15 21:52:22 +00:00
|
|
|
from data import model
|
2015-07-06 19:00:07 +00:00
|
|
|
from endpoints.v1 import v1_bp
|
2015-08-27 18:55:33 +00:00
|
|
|
from endpoints.v2 import v2_bp
|
2015-11-20 19:47:56 +00:00
|
|
|
from endpoints.verbs import verbs
|
2015-08-27 18:55:33 +00:00
|
|
|
from endpoints.v2.manifest import SignedManifestBuilder
|
2015-05-29 22:08:17 +00:00
|
|
|
from endpoints.api import api_bp
|
|
|
|
from initdb import wipe_database, initialize_database, populate_database
|
|
|
|
from endpoints.csrf import generate_csrf_token
|
2015-09-08 16:35:03 +00:00
|
|
|
from tempfile import NamedTemporaryFile
|
2015-09-29 19:02:03 +00:00
|
|
|
from jsonschema import validate as validate_schema
|
2016-05-31 20:48:19 +00:00
|
|
|
from util.security.registry_jwt import decode_bearer_token
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
import endpoints.decorated
|
|
|
|
import json
|
2015-08-27 18:55:33 +00:00
|
|
|
import hashlib
|
2015-09-24 20:17:42 +00:00
|
|
|
import logging
|
2016-01-19 20:52:34 +00:00
|
|
|
import bencode
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
import tarfile
|
2015-09-08 16:35:03 +00:00
|
|
|
import shutil
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
from jwkest.jwk import RSAKey
|
|
|
|
from Crypto.PublicKey import RSA
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
from cStringIO import StringIO
|
2015-07-15 21:25:41 +00:00
|
|
|
from digest.checksums import compute_simple
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
try:
|
2015-07-06 19:00:07 +00:00
|
|
|
app.register_blueprint(v1_bp, url_prefix='/v1')
|
2015-08-27 18:55:33 +00:00
|
|
|
app.register_blueprint(v2_bp, url_prefix='/v2')
|
2015-11-20 19:47:56 +00:00
|
|
|
app.register_blueprint(verbs, url_prefix='/c1')
|
2015-05-29 22:08:17 +00:00
|
|
|
app.register_blueprint(api_bp, url_prefix='/api')
|
|
|
|
except ValueError:
|
|
|
|
# Blueprint was already registered
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-09-08 16:35:03 +00:00
|
|
|
# Add a test blueprint for generating CSRF tokens, setting feature flags and reloading the
|
|
|
|
# DB connection.
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
testbp = Blueprint('testbp', __name__)
|
2015-09-24 20:17:42 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-06-02 18:27:57 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
@testbp.route('/csrf', methods=['GET'])
|
|
|
|
def generate_csrf():
|
|
|
|
return generate_csrf_token()
|
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
@testbp.route('/fakestoragedd/<enabled>', methods=['POST'])
|
|
|
|
def set_fakestorage_directdownload(enabled):
|
|
|
|
storage.put_content(['local_us'], 'supports_direct_download', enabled)
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2016-05-31 20:43:49 +00:00
|
|
|
@testbp.route('/storagerepentry/<image_id>', methods=['GET'])
|
|
|
|
def get_storage_replication_entry(image_id):
|
|
|
|
image = Image.get(docker_image_id=image_id)
|
|
|
|
QueueItem.select().where(QueueItem.queue_name ** ('%' + image.storage.uuid + '%')).get()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2015-06-02 18:27:57 +00:00
|
|
|
@testbp.route('/feature/<feature_name>', methods=['POST'])
|
|
|
|
def set_feature(feature_name):
|
|
|
|
import features
|
|
|
|
old_value = features._FEATURES[feature_name].value
|
|
|
|
features._FEATURES[feature_name].value = request.get_json()['value']
|
|
|
|
return jsonify({'old_value': old_value})
|
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
|
2016-05-23 23:52:17 +00:00
|
|
|
@testbp.route('/clearderivedcache', methods=['POST'])
|
|
|
|
def clearderivedcache():
|
|
|
|
DerivedStorageForImage.delete().execute()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
@testbp.route('/removeuncompressed/<image_id>', methods=['POST'])
|
|
|
|
def removeuncompressed(image_id):
|
|
|
|
image = model.image.get_image_by_id('devtable', 'newrepo', image_id)
|
|
|
|
image.storage.uncompressed_size = None
|
|
|
|
image.storage.save()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2015-12-15 21:52:22 +00:00
|
|
|
@testbp.route('/addtoken', methods=['POST'])
|
|
|
|
def addtoken():
|
|
|
|
another_token = model.token.create_delegate_token('devtable', 'newrepo', 'my-new-token', 'write')
|
|
|
|
another_token.code = 'somecooltokencode'
|
|
|
|
another_token.save()
|
|
|
|
return 'OK'
|
|
|
|
|
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
@testbp.route('/reloadapp', methods=['POST'])
|
|
|
|
def reload_app():
|
2015-09-08 16:35:03 +00:00
|
|
|
# Close any existing connection.
|
|
|
|
close_db_filter(None)
|
|
|
|
|
|
|
|
# Reload the database config.
|
|
|
|
configure(app.config)
|
2015-09-17 20:48:08 +00:00
|
|
|
|
|
|
|
# Reload random after the process split, as it cannot be used uninitialized across forks.
|
|
|
|
Crypto.Random.atfork()
|
2015-09-08 16:35:03 +00:00
|
|
|
return 'OK'
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
app.register_blueprint(testbp, url_prefix='/__test')
|
|
|
|
|
|
|
|
|
2015-06-02 18:27:57 +00:00
|
|
|
class TestFeature(object):
|
|
|
|
""" Helper object which temporarily sets the value of a feature flag.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, test_case, feature_flag, test_value):
|
|
|
|
self.test_case = test_case
|
|
|
|
self.feature_flag = feature_flag
|
|
|
|
self.test_value = test_value
|
|
|
|
self.old_value = None
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
result = self.test_case.conduct('POST', '/__test/feature/' + self.feature_flag,
|
|
|
|
data=json.dumps(dict(value=self.test_value)),
|
|
|
|
headers={'Content-Type': 'application/json'})
|
|
|
|
|
|
|
|
result_data = json.loads(result.text)
|
|
|
|
self.old_value = result_data['old_value']
|
|
|
|
|
|
|
|
def __exit__(self, type, value, traceback):
|
|
|
|
self.test_case.conduct('POST', '/__test/feature/' + self.feature_flag,
|
|
|
|
data=json.dumps(dict(value=self.old_value)),
|
|
|
|
headers={'Content-Type': 'application/json'})
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-09-08 14:40:10 +00:00
|
|
|
_PORT_NUMBER = 5001
|
2015-09-08 16:35:03 +00:00
|
|
|
_CLEAN_DATABASE_PATH = None
|
2015-09-24 20:17:42 +00:00
|
|
|
_JWK = RSAKey(key=RSA.generate(2048))
|
2015-09-08 16:35:03 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
class FailureCodes:
|
|
|
|
""" Defines tuples representing the HTTP status codes for various errors. The tuple
|
|
|
|
is defined as ('errordescription', V1HTTPStatusCode, V2HTTPStatusCode). """
|
|
|
|
|
|
|
|
UNAUTHENTICATED = ('unauthenticated', 401, 401)
|
|
|
|
UNAUTHORIZED = ('unauthorized', 403, 401)
|
|
|
|
INVALID_REGISTRY = ('invalidregistry', 404, 404)
|
|
|
|
DOES_NOT_EXIST = ('doesnotexist', 404, 404)
|
|
|
|
INVALID_REQUEST = ('invalidrequest', 400, 400)
|
|
|
|
|
|
|
|
def _get_expected_code(expected_failure, version, success_status_code):
|
|
|
|
""" Returns the HTTP status code for the expected failure under the specified protocol version
|
|
|
|
(1 or 2). If none, returns the success status code. """
|
|
|
|
if not expected_failure:
|
|
|
|
return success_status_code
|
|
|
|
|
|
|
|
return expected_failure[version]
|
2015-09-17 20:16:27 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def _get_repo_name(namespace, name):
|
|
|
|
if namespace == '':
|
|
|
|
return name
|
|
|
|
|
|
|
|
return '%s/%s' % (namespace, name)
|
|
|
|
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def _get_full_contents(image_data, additional_fields=False):
|
2016-01-19 20:52:34 +00:00
|
|
|
if 'chunks' in image_data:
|
|
|
|
# Data is just for chunking; no need for a real TAR.
|
|
|
|
return image_data['contents']
|
|
|
|
|
|
|
|
layer_data = StringIO()
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def add_file(name, contents):
|
|
|
|
tar_file_info = tarfile.TarInfo(name=name)
|
|
|
|
tar_file_info.type = tarfile.REGTYPE
|
|
|
|
tar_file_info.size = len(contents)
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
tar_file = tarfile.open(fileobj=layer_data, mode='w|gz')
|
|
|
|
tar_file.addfile(tar_file_info, StringIO(contents))
|
|
|
|
tar_file.close()
|
|
|
|
|
|
|
|
add_file('contents', image_data['contents'])
|
|
|
|
if additional_fields:
|
|
|
|
add_file('anotherfile', str(uuid.uuid4()))
|
2016-01-19 20:52:34 +00:00
|
|
|
|
|
|
|
layer_bytes = layer_data.getvalue()
|
|
|
|
layer_data.close()
|
2016-02-12 15:39:27 +00:00
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
return layer_bytes
|
|
|
|
|
|
|
|
|
2015-09-08 16:35:03 +00:00
|
|
|
def get_new_database_uri():
|
|
|
|
# If a clean copy of the database has not yet been created, create one now.
|
|
|
|
global _CLEAN_DATABASE_PATH
|
|
|
|
if not _CLEAN_DATABASE_PATH:
|
|
|
|
wipe_database()
|
|
|
|
initialize_database()
|
|
|
|
populate_database()
|
|
|
|
close_db_filter(None)
|
|
|
|
|
|
|
|
# Save the path of the clean database.
|
|
|
|
_CLEAN_DATABASE_PATH = app.config['TEST_DB_FILE'].name
|
|
|
|
|
|
|
|
# Create a new temp file to be used as the actual backing database for the test.
|
|
|
|
# Note that we have the close() the file to ensure we can copy to it via shutil.
|
|
|
|
local_db_file = NamedTemporaryFile(delete=True)
|
|
|
|
local_db_file.close()
|
|
|
|
|
|
|
|
# Copy the clean database to the path.
|
|
|
|
shutil.copy2(_CLEAN_DATABASE_PATH, local_db_file.name)
|
|
|
|
return 'sqlite:///{0}'.format(local_db_file.name)
|
2015-09-08 14:40:10 +00:00
|
|
|
|
2015-09-17 20:16:27 +00:00
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
class RegistryTestCaseMixin(LiveServerTestCase):
|
2015-05-29 22:08:17 +00:00
|
|
|
def create_app(self):
|
2015-09-08 14:40:10 +00:00
|
|
|
global _PORT_NUMBER
|
|
|
|
_PORT_NUMBER = _PORT_NUMBER + 1
|
2015-10-05 19:26:45 +00:00
|
|
|
|
|
|
|
if os.environ.get('DEBUG') == 'true':
|
|
|
|
app.config['DEBUG'] = True
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
app.config['TESTING'] = True
|
2015-09-08 14:40:10 +00:00
|
|
|
app.config['LIVESERVER_PORT'] = _PORT_NUMBER
|
2015-09-08 16:35:03 +00:00
|
|
|
app.config['DB_URI'] = get_new_database_uri()
|
2015-05-29 22:08:17 +00:00
|
|
|
return app
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.clearSession()
|
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
# Tell the remote running app to reload the database and app. By default, the app forks from the
|
2015-09-08 16:35:03 +00:00
|
|
|
# current context and has already loaded the DB config with the *original* DB URL. We call
|
|
|
|
# the remote reload method to force it to pick up the changes to DB_URI set in the create_app
|
|
|
|
# method.
|
2015-09-17 20:48:08 +00:00
|
|
|
self.conduct('POST', '/__test/reloadapp')
|
2015-09-08 16:35:03 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
def clearSession(self):
|
|
|
|
self.session = requests.Session()
|
|
|
|
self.signature = None
|
|
|
|
self.docker_token = 'true'
|
2015-09-17 20:48:08 +00:00
|
|
|
self.jwt = None
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Load the CSRF token.
|
|
|
|
self.csrf_token = ''
|
|
|
|
self.csrf_token = self.conduct('GET', '/__test/csrf').text
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def do_tag(self, namespace, repository, tag, image_id, expected_code=200, auth='sig'):
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
self.conduct('PUT', '/v1/repositories/%s/tags/%s' % (repo_name, tag),
|
2016-02-12 15:39:27 +00:00
|
|
|
data='"%s"' % image_id, expected_code=expected_code, auth=auth)
|
2015-09-17 20:16:27 +00:00
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
def conduct_api_login(self, username, password):
|
|
|
|
self.conduct('POST', '/api/v1/signin',
|
|
|
|
data=json.dumps(dict(username=username, password=password)),
|
|
|
|
headers={'Content-Type': 'application/json'})
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def change_repo_visibility(self, namespace, repository, visibility):
|
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
self.conduct('POST', '/api/v1/repository/%s/changevisibility' % repo_name,
|
2015-09-17 20:48:08 +00:00
|
|
|
data=json.dumps(dict(visibility=visibility)),
|
|
|
|
headers={'Content-Type': 'application/json'})
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def assertContents(self, image_data, response):
|
|
|
|
if 'chunks' in image_data:
|
|
|
|
return
|
|
|
|
|
|
|
|
tar = tarfile.open(fileobj=StringIO(response.content))
|
|
|
|
self.assertEquals(tar.extractfile('contents').read(), image_data['contents'])
|
|
|
|
|
2015-09-17 20:48:08 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class BaseRegistryMixin(object):
|
2015-11-24 04:46:05 +00:00
|
|
|
def conduct(self, method, url, headers=None, data=None, auth=None, params=None, expected_code=200,
|
2015-12-15 21:21:06 +00:00
|
|
|
json_data=None, user_agent=None):
|
2015-09-08 15:58:21 +00:00
|
|
|
params = params or {}
|
|
|
|
params['_csrf_token'] = self.csrf_token
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
headers = headers or {}
|
2015-09-08 15:58:21 +00:00
|
|
|
auth_tuple = None
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-12-15 21:21:06 +00:00
|
|
|
if user_agent is not None:
|
|
|
|
headers['User-Agent'] = user_agent
|
|
|
|
else:
|
|
|
|
headers['User-Agent'] = 'docker/1.9.1'
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
if self.docker_token:
|
|
|
|
headers['X-Docker-Token'] = self.docker_token
|
|
|
|
|
|
|
|
if auth == 'sig':
|
|
|
|
if self.signature:
|
|
|
|
headers['Authorization'] = 'token ' + self.signature
|
|
|
|
elif auth == 'jwt':
|
|
|
|
if self.jwt:
|
|
|
|
headers['Authorization'] = 'Bearer ' + self.jwt
|
|
|
|
elif auth:
|
|
|
|
auth_tuple = auth
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
if json_data is not None:
|
|
|
|
data = json.dumps(json_data)
|
|
|
|
headers['Content-Type'] = 'application/json'
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
response = self.session.request(method, self.get_server_url() + url, headers=headers, data=data,
|
2015-09-08 15:58:21 +00:00
|
|
|
auth=auth_tuple, params=params)
|
2016-06-09 19:04:55 +00:00
|
|
|
if expected_code is None:
|
|
|
|
return response
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
if response.status_code != expected_code:
|
|
|
|
print response.text
|
|
|
|
|
|
|
|
if 'www-authenticate' in response.headers:
|
|
|
|
self.signature = response.headers['www-authenticate']
|
|
|
|
|
|
|
|
if 'X-Docker-Token' in response.headers:
|
|
|
|
self.docker_token = response.headers['X-Docker-Token']
|
|
|
|
|
|
|
|
self.assertEquals(response.status_code, expected_code)
|
|
|
|
return response
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
def _get_default_images(self):
|
|
|
|
return [{'id': 'someid', 'contents': 'somecontent'}]
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
|
|
|
|
class V1RegistryMixin(BaseRegistryMixin):
|
|
|
|
def v1_ping(self):
|
2015-05-29 22:08:17 +00:00
|
|
|
self.conduct('GET', '/v1/_ping')
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
|
|
|
|
class V1RegistryPushMixin(V1RegistryMixin):
|
2016-02-12 15:39:27 +00:00
|
|
|
push_version = 'v1'
|
|
|
|
|
|
|
|
def do_push(self, namespace, repository, username, password, images=None, expect_failure=None,
|
2016-06-06 19:38:29 +00:00
|
|
|
munge_shas=False, tag_names=None):
|
2015-09-29 21:53:39 +00:00
|
|
|
images = images or self._get_default_images()
|
2015-05-29 22:08:17 +00:00
|
|
|
auth = (username, password)
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Ping!
|
2015-09-08 15:58:21 +00:00
|
|
|
self.v1_ping()
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/repositories/{namespace}/{repository}/
|
2016-01-22 21:49:32 +00:00
|
|
|
expected_code = _get_expected_code(expect_failure, 1, 201)
|
2016-01-29 14:42:15 +00:00
|
|
|
self.conduct('PUT', '/v1/repositories/%s/' % repo_name,
|
2015-09-29 21:53:39 +00:00
|
|
|
data=json.dumps(images), auth=auth,
|
2015-09-24 15:42:56 +00:00
|
|
|
expected_code=expected_code)
|
|
|
|
|
|
|
|
if expected_code != 201:
|
|
|
|
return
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
for image_data in images:
|
|
|
|
image_id = image_data['id']
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
# PUT /v1/images/{imageID}/json
|
2015-12-17 18:39:01 +00:00
|
|
|
image_json_data = {'id': image_id}
|
|
|
|
if 'size' in image_data:
|
|
|
|
image_json_data['Size'] = image_data['size']
|
|
|
|
|
2016-05-13 22:29:57 +00:00
|
|
|
if 'parent' in image_data:
|
|
|
|
image_json_data['parent'] = image_data['parent']
|
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
self.conduct('PUT', '/v1/images/%s/json' % image_id,
|
2015-12-17 18:39:01 +00:00
|
|
|
data=json.dumps(image_json_data), auth='sig')
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/images/{imageID}/layer
|
2016-01-21 20:40:51 +00:00
|
|
|
layer_bytes = _get_full_contents(image_data)
|
2015-09-18 19:40:35 +00:00
|
|
|
self.conduct('PUT', '/v1/images/%s/layer' % image_id,
|
2015-09-08 15:58:21 +00:00
|
|
|
data=StringIO(layer_bytes), auth='sig')
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/images/{imageID}/checksum
|
2015-12-17 18:39:01 +00:00
|
|
|
checksum = compute_simple(StringIO(layer_bytes), json.dumps(image_json_data))
|
2015-09-18 19:40:35 +00:00
|
|
|
self.conduct('PUT', '/v1/images/%s/checksum' % image_id,
|
2015-09-08 15:58:21 +00:00
|
|
|
headers={'X-Docker-Checksum-Payload': checksum},
|
|
|
|
auth='sig')
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/repositories/{namespace}/{repository}/tags/latest
|
2016-06-06 19:38:29 +00:00
|
|
|
tag_names = tag_names or ['latest']
|
|
|
|
for tag_name in tag_names:
|
|
|
|
self.do_tag(namespace, repository, tag_name, images[-1]['id'])
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# PUT /v1/repositories/{namespace}/{repository}/images
|
2016-01-21 20:40:51 +00:00
|
|
|
self.conduct('PUT', '/v1/repositories/%s/images' % repo_name,
|
2015-09-08 15:58:21 +00:00
|
|
|
expected_code=204,
|
|
|
|
auth='sig')
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V1RegistryPullMixin(V1RegistryMixin):
|
2016-02-12 15:39:27 +00:00
|
|
|
pull_version = 'v1'
|
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
def do_pull(self, namespace, repository, username=None, password='password', expect_failure=None,
|
2016-02-12 15:39:27 +00:00
|
|
|
images=None, munge_shas=False):
|
2015-10-02 18:33:38 +00:00
|
|
|
images = images or self._get_default_images()
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
auth = None
|
|
|
|
if username:
|
|
|
|
auth = (username, password)
|
|
|
|
|
|
|
|
# Ping!
|
2015-09-08 15:58:21 +00:00
|
|
|
self.v1_ping()
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
prefix = '/v1/repositories/%s/' % repo_name
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# GET /v1/repositories/{namespace}/{repository}/images
|
2016-01-22 21:49:32 +00:00
|
|
|
expected_code = _get_expected_code(expect_failure, 1, 200)
|
2015-05-29 22:08:17 +00:00
|
|
|
self.conduct('GET', prefix + 'images', auth=auth, expected_code=expected_code)
|
|
|
|
if expected_code != 200:
|
|
|
|
return
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# GET /v1/repositories/{namespace}/{repository}/tags
|
|
|
|
tags_result = json.loads(self.conduct('GET', prefix + 'tags', auth='sig').text)
|
|
|
|
self.assertEquals(1, len(tags_result.values()))
|
2015-05-29 22:08:17 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# Ensure we do (or do not) have a matching image ID.
|
|
|
|
tag_image_id = tags_result['latest']
|
|
|
|
known_ids = [item['id'] for item in images]
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
self.assertEquals(not munge_shas, tag_image_id in known_ids)
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
# Retrieve the ancestry of the tag image.
|
|
|
|
image_prefix = '/v1/images/%s/' % tag_image_id
|
|
|
|
ancestors = self.conduct('GET', image_prefix + 'ancestry', auth='sig').json()
|
|
|
|
for index, image_id in enumerate(ancestors):
|
2015-05-29 22:08:17 +00:00
|
|
|
# /v1/images/{imageID}/{ancestry, json, layer}
|
|
|
|
image_prefix = '/v1/images/%s/' % image_id
|
2015-09-08 15:58:21 +00:00
|
|
|
self.conduct('GET', image_prefix + 'ancestry', auth='sig')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
response = self.conduct('GET', image_prefix + 'json', auth='sig')
|
|
|
|
self.assertEquals(image_id, response.json()['id'])
|
|
|
|
|
|
|
|
response = self.conduct('GET', image_prefix + 'layer', auth='sig')
|
|
|
|
|
|
|
|
# Ensure we can parse the layer bytes and that they contain the contents.
|
2016-05-13 22:29:57 +00:00
|
|
|
self.assertContents(images[len(images) - index - 1], response)
|
2015-08-27 18:55:33 +00:00
|
|
|
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V2RegistryMixin(BaseRegistryMixin):
|
2015-09-29 19:02:03 +00:00
|
|
|
MANIFEST_SCHEMA = {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
|
|
|
'name': {
|
|
|
|
'type': 'string',
|
|
|
|
},
|
|
|
|
'tag': {
|
|
|
|
'type': 'string',
|
|
|
|
},
|
|
|
|
'signatures': {
|
|
|
|
'type': 'array',
|
|
|
|
'itemType': {
|
|
|
|
'type': 'object',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'fsLayers': {
|
|
|
|
'type': 'array',
|
|
|
|
'itemType': {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
|
|
|
'blobSum': {
|
|
|
|
'type': 'string',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': 'blobSum',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'history': {
|
|
|
|
'type': 'array',
|
|
|
|
'itemType': {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
|
|
|
'v1Compatibility': {
|
|
|
|
'type': 'object',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': ['v1Compatibility'],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': ['name', 'tag', 'fsLayers', 'history', 'signatures'],
|
|
|
|
}
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
def v2_ping(self):
|
2015-09-29 19:02:03 +00:00
|
|
|
response = self.conduct('GET', '/v2/', expected_code=200 if self.jwt else 401, auth='jwt')
|
|
|
|
self.assertEquals(response.headers['Docker-Distribution-API-Version'], 'registry/2.0')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
def do_auth(self, username, password, namespace, repository, expected_code=200, scopes=[]):
|
2016-01-22 21:49:32 +00:00
|
|
|
auth = None
|
|
|
|
if username and password:
|
|
|
|
auth = (username, password)
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
params = {
|
|
|
|
'account': username,
|
2015-09-17 20:27:05 +00:00
|
|
|
'service': app.config['SERVER_HOSTNAME'],
|
2015-08-27 18:55:33 +00:00
|
|
|
}
|
|
|
|
|
2016-02-09 22:25:33 +00:00
|
|
|
if scopes:
|
|
|
|
params['scope'] = 'repository:%s:%s' % (repo_name, ','.join(scopes))
|
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
response = self.conduct('GET', '/v2/auth', params=params, auth=auth,
|
2015-08-27 18:55:33 +00:00
|
|
|
expected_code=expected_code)
|
|
|
|
|
|
|
|
if expected_code == 200:
|
|
|
|
response_json = json.loads(response.text)
|
|
|
|
self.assertIsNotNone(response_json.get('token'))
|
2015-09-08 15:58:21 +00:00
|
|
|
self.jwt = response_json['token']
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
return response
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V2RegistryPushMixin(V2RegistryMixin):
|
2016-02-12 15:39:27 +00:00
|
|
|
push_version = 'v2'
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
def do_push(self, namespace, repository, username, password, images=None, tag_name=None,
|
2016-02-12 15:39:27 +00:00
|
|
|
cancel=False, invalid=False, expect_failure=None, scopes=None,
|
|
|
|
munge_shas=False):
|
2015-09-29 21:53:39 +00:00
|
|
|
images = images or self._get_default_images()
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
2015-09-29 21:53:39 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Ping!
|
2015-09-08 15:58:21 +00:00
|
|
|
self.v2_ping()
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
# Auth. If the expected failure is an invalid registry, in V2 we'll receive that error from
|
|
|
|
# the auth endpoint first, rather than just the V2 requests below.
|
|
|
|
expected_auth_code = 200
|
|
|
|
if expect_failure == FailureCodes.INVALID_REGISTRY:
|
|
|
|
expected_auth_code = 400
|
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
self.do_auth(username, password, namespace, repository, scopes=scopes or ['push', 'pull'],
|
2015-10-05 18:19:52 +00:00
|
|
|
expected_code=expected_auth_code)
|
|
|
|
if expected_auth_code != 200:
|
|
|
|
return
|
2015-08-27 18:55:33 +00:00
|
|
|
|
|
|
|
# Build a fake manifest.
|
2015-09-29 19:02:03 +00:00
|
|
|
tag_name = tag_name or 'latest'
|
2015-08-27 18:55:33 +00:00
|
|
|
builder = SignedManifestBuilder(namespace, repository, tag_name)
|
2016-01-19 20:52:34 +00:00
|
|
|
full_contents = {}
|
|
|
|
|
2016-05-13 22:29:57 +00:00
|
|
|
for image_data in reversed(images):
|
2016-02-12 15:39:27 +00:00
|
|
|
full_contents[image_data['id']] = _get_full_contents(image_data, additional_fields=munge_shas)
|
2016-01-19 20:52:34 +00:00
|
|
|
checksum = 'sha256:' + hashlib.sha256(full_contents[image_data['id']]).hexdigest()
|
2015-09-29 19:02:03 +00:00
|
|
|
if invalid:
|
|
|
|
checksum = 'sha256:' + hashlib.sha256('foobarbaz').hexdigest()
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
builder.add_layer(checksum, json.dumps(image_data))
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
expected_code = _get_expected_code(expect_failure, 2, 404)
|
|
|
|
|
2015-09-24 20:17:42 +00:00
|
|
|
# Build the manifest.
|
|
|
|
manifest = builder.build(_JWK)
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Push the image's layers.
|
2015-09-29 19:02:03 +00:00
|
|
|
checksums = {}
|
2016-05-13 22:29:57 +00:00
|
|
|
for image_data in reversed(images):
|
2015-09-29 21:53:39 +00:00
|
|
|
image_id = image_data['id']
|
2016-01-19 20:52:34 +00:00
|
|
|
layer_bytes = full_contents[image_data['id']]
|
2015-09-29 21:53:39 +00:00
|
|
|
chunks = image_data.get('chunks')
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Layer data should not yet exist.
|
2016-01-19 20:52:34 +00:00
|
|
|
checksum = 'sha256:' + hashlib.sha256(layer_bytes).hexdigest()
|
2016-01-21 20:40:51 +00:00
|
|
|
self.conduct('HEAD', '/v2/%s/blobs/%s' % (repo_name, checksum),
|
2015-09-08 15:58:21 +00:00
|
|
|
expected_code=404, auth='jwt')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
# If we expected a non-404 status code, then the HEAD operation has failed and we cannot
|
|
|
|
# continue performing the push.
|
|
|
|
if expected_code != 404:
|
|
|
|
return
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Start a new upload of the layer data.
|
2016-01-21 20:40:51 +00:00
|
|
|
response = self.conduct('POST', '/v2/%s/blobs/uploads/' % repo_name,
|
2015-09-08 15:58:21 +00:00
|
|
|
expected_code=202, auth='jwt')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
upload_uuid = response.headers['Docker-Upload-UUID']
|
2015-08-27 18:55:33 +00:00
|
|
|
location = response.headers['Location'][len(self.get_server_url()):]
|
|
|
|
|
|
|
|
# PATCH the image data into the layer.
|
2015-09-18 19:40:35 +00:00
|
|
|
if chunks is None:
|
2016-01-19 20:52:34 +00:00
|
|
|
self.conduct('PATCH', location, data=layer_bytes, expected_code=204, auth='jwt')
|
2015-09-18 19:40:35 +00:00
|
|
|
else:
|
|
|
|
for chunk in chunks:
|
2015-09-25 15:51:50 +00:00
|
|
|
if len(chunk) == 3:
|
|
|
|
(start_byte, end_byte, expected_code) = chunk
|
|
|
|
else:
|
|
|
|
(start_byte, end_byte) = chunk
|
|
|
|
expected_code = 204
|
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
contents_chunk = layer_bytes[start_byte:end_byte]
|
2015-09-25 15:51:50 +00:00
|
|
|
self.conduct('PATCH', location, data=contents_chunk, expected_code=expected_code, auth='jwt',
|
2015-09-18 19:40:35 +00:00
|
|
|
headers={'Range': 'bytes=%s-%s' % (start_byte, end_byte)})
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-25 15:51:50 +00:00
|
|
|
if expected_code != 204:
|
|
|
|
return
|
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
# Retrieve the upload status at each point.
|
2016-01-21 20:40:51 +00:00
|
|
|
status_url = '/v2/%s/blobs/uploads/%s' % (repo_name, upload_uuid)
|
2015-09-29 19:02:03 +00:00
|
|
|
response = self.conduct('GET', status_url, expected_code=204, auth='jwt',
|
|
|
|
headers=dict(host=self.get_server_url()))
|
|
|
|
self.assertEquals(response.headers['Docker-Upload-UUID'], upload_uuid)
|
|
|
|
self.assertEquals(response.headers['Range'], "bytes=0-%s" % end_byte)
|
|
|
|
|
|
|
|
if cancel:
|
|
|
|
self.conduct('DELETE', location, params=dict(digest=checksum), expected_code=204,
|
|
|
|
auth='jwt')
|
|
|
|
|
|
|
|
# Ensure the upload was canceled.
|
2016-01-21 20:40:51 +00:00
|
|
|
status_url = '/v2/%s/blobs/uploads/%s' % (repo_name, upload_uuid)
|
2015-09-29 19:02:03 +00:00
|
|
|
self.conduct('GET', status_url, expected_code=404, auth='jwt',
|
|
|
|
headers=dict(host=self.get_server_url()))
|
|
|
|
return
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Finish the layer upload with a PUT.
|
2015-09-29 19:02:03 +00:00
|
|
|
response = self.conduct('PUT', location, params=dict(digest=checksum), expected_code=201,
|
|
|
|
auth='jwt')
|
|
|
|
|
|
|
|
self.assertEquals(response.headers['Docker-Content-Digest'], checksum)
|
|
|
|
checksums[image_id] = checksum
|
|
|
|
|
|
|
|
# Ensure the layer exists now.
|
2016-01-21 20:40:51 +00:00
|
|
|
response = self.conduct('HEAD', '/v2/%s/blobs/%s' % (repo_name, checksum),
|
2015-09-29 19:02:03 +00:00
|
|
|
expected_code=200, auth='jwt')
|
|
|
|
self.assertEquals(response.headers['Docker-Content-Digest'], checksum)
|
2016-01-19 20:52:34 +00:00
|
|
|
self.assertEquals(response.headers['Content-Length'], str(len(layer_bytes)))
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
# Write the manifest. If we expect it to be invalid, we expect a 404 code. Otherwise, we expect
|
|
|
|
# a 202 response for success.
|
|
|
|
put_code = 404 if invalid else 202
|
2016-01-21 20:40:51 +00:00
|
|
|
self.conduct('PUT', '/v2/%s/manifests/%s' % (repo_name, tag_name),
|
2015-09-29 19:02:03 +00:00
|
|
|
data=manifest.bytes, expected_code=put_code,
|
2015-09-08 15:58:21 +00:00
|
|
|
headers={'Content-Type': 'application/json'}, auth='jwt')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
return checksums, manifest.digest
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V2RegistryPullMixin(V2RegistryMixin):
|
2016-02-12 15:39:27 +00:00
|
|
|
pull_version = 'v2'
|
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
def do_pull(self, namespace, repository, username=None, password='password', expect_failure=None,
|
2016-02-12 15:39:27 +00:00
|
|
|
manifest_id=None, images=None, munge_shas=False):
|
2015-10-02 18:33:38 +00:00
|
|
|
images = images or self._get_default_images()
|
2016-01-21 20:40:51 +00:00
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
# Ping!
|
2015-09-08 15:58:21 +00:00
|
|
|
self.v2_ping()
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
# Auth. If the failure expected is unauthenticated, then the auth endpoint will 401 before
|
|
|
|
# we reach any of the registry operations.
|
|
|
|
expected_auth_code = 200
|
|
|
|
if expect_failure == FailureCodes.UNAUTHENTICATED:
|
|
|
|
expected_auth_code = 401
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
self.do_auth(username, password, namespace, repository, scopes=['pull'],
|
2016-01-22 21:49:32 +00:00
|
|
|
expected_code=expected_auth_code)
|
|
|
|
if expected_auth_code != 200:
|
2015-08-27 18:55:33 +00:00
|
|
|
return
|
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
# Retrieve the manifest for the tag or digest.
|
|
|
|
manifest_id = manifest_id or 'latest'
|
2016-01-22 21:49:32 +00:00
|
|
|
|
|
|
|
expected_code = _get_expected_code(expect_failure, 2, 200)
|
2016-01-21 20:40:51 +00:00
|
|
|
response = self.conduct('GET', '/v2/%s/manifests/%s' % (repo_name, manifest_id),
|
2016-01-22 21:49:32 +00:00
|
|
|
auth='jwt', expected_code=expected_code)
|
|
|
|
if expected_code != 200:
|
2015-09-29 19:02:03 +00:00
|
|
|
return
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
manifest_data = json.loads(response.text)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
# Ensure the manifest returned by us is valid.
|
|
|
|
validate_schema(manifest_data, V2RegistryMixin.MANIFEST_SCHEMA)
|
|
|
|
|
2015-10-02 18:33:38 +00:00
|
|
|
# Verify the layers.
|
2015-09-18 19:40:35 +00:00
|
|
|
blobs = {}
|
2016-05-13 22:29:57 +00:00
|
|
|
for index, layer in enumerate(reversed(manifest_data['fsLayers'])):
|
2015-08-27 18:55:33 +00:00
|
|
|
blob_id = layer['blobSum']
|
2016-01-21 20:40:51 +00:00
|
|
|
result = self.conduct('GET', '/v2/%s/blobs/%s' % (repo_name, blob_id),
|
2015-09-18 19:40:35 +00:00
|
|
|
expected_code=200, auth='jwt')
|
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
blobs[blob_id] = result.content
|
2016-02-12 15:39:27 +00:00
|
|
|
self.assertContents(images[index], result)
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-10-02 18:33:38 +00:00
|
|
|
# Verify the V1 metadata is present for each expected image.
|
|
|
|
found_v1_layers = set()
|
|
|
|
history = manifest_data['history']
|
|
|
|
for entry in history:
|
|
|
|
v1_history = json.loads(entry['v1Compatibility'])
|
|
|
|
found_v1_layers.add(v1_history['id'])
|
|
|
|
|
|
|
|
for image in images:
|
2016-02-12 20:57:44 +00:00
|
|
|
self.assertIn(image['id'], found_v1_layers)
|
2015-10-02 18:33:38 +00:00
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
return blobs
|
2015-08-27 18:55:33 +00:00
|
|
|
|
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
class V1RegistryLoginMixin(object):
|
|
|
|
def do_login(self, username, password, scope, expect_success=True):
|
|
|
|
data = {
|
|
|
|
'username': username,
|
|
|
|
'password': password,
|
|
|
|
}
|
|
|
|
|
|
|
|
response = self.conduct('POST', '/v1/users/', json_data=data, expected_code=400)
|
|
|
|
if expect_success:
|
|
|
|
self.assertEquals(response.text, '"Username or email already exists"')
|
|
|
|
else:
|
|
|
|
self.assertNotEquals(response.text, '"Username or email already exists"')
|
|
|
|
|
|
|
|
|
|
|
|
class V2RegistryLoginMixin(object):
|
2016-05-23 20:36:48 +00:00
|
|
|
def do_login(self, username, password, scope, expect_success=True, expected_failure_code=401):
|
2016-01-19 20:52:34 +00:00
|
|
|
params = {
|
|
|
|
'account': username,
|
|
|
|
'scope': scope,
|
|
|
|
'service': app.config['SERVER_HOSTNAME'],
|
|
|
|
}
|
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
if expect_success:
|
|
|
|
expected_code = 200
|
|
|
|
else:
|
2016-05-23 20:36:48 +00:00
|
|
|
expected_code = expected_failure_code
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2016-01-22 21:49:32 +00:00
|
|
|
auth = None
|
|
|
|
if username and password:
|
|
|
|
auth = (username, password)
|
|
|
|
|
|
|
|
response = self.conduct('GET', '/v2/auth', params=params, auth=auth,
|
2016-01-19 20:52:34 +00:00
|
|
|
expected_code=expected_code)
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
class RegistryTestsMixin(object):
|
2016-02-12 15:39:27 +00:00
|
|
|
def test_push_same_ids_different_sha(self):
|
|
|
|
if self.push_version == 'v1':
|
|
|
|
# No SHAs to munge in V1.
|
|
|
|
return
|
|
|
|
|
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2016-02-12 15:39:27 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Push a the repository again, but with different SHAs.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images, munge_shas=True)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images, munge_shas=True)
|
|
|
|
|
2016-02-15 15:57:20 +00:00
|
|
|
|
|
|
|
def test_push_same_ids_different_sha_with_unicode(self):
|
|
|
|
if self.push_version == 'v1':
|
|
|
|
# No SHAs to munge in V1.
|
|
|
|
return
|
|
|
|
|
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2016-02-15 15:57:20 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'The latest image',
|
|
|
|
'unicode': u'the Pawe\xc5\x82 Kami\xc5\x84ski image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Push a the repository again, but with different SHAs.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images, munge_shas=True)
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password', images=images, munge_shas=True)
|
|
|
|
|
|
|
|
|
2015-11-24 20:23:45 +00:00
|
|
|
def test_push_pull_logging(self):
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the push was added.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/public/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(1, len(logs))
|
|
|
|
self.assertEquals('push_repo', logs[0]['kind'])
|
|
|
|
self.assertEquals('public', logs[0]['performer']['name'])
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/public/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(2, len(logs))
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
|
|
|
self.assertEquals('public', logs[0]['performer']['name'])
|
|
|
|
|
|
|
|
|
2015-12-09 21:10:39 +00:00
|
|
|
def test_push_pull_logging_byrobot(self):
|
|
|
|
# Lookup the robot's password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('GET', '/api/v1/organization/buynlarge/robots/ownerbot')
|
|
|
|
robot_token = json.loads(resp.text)['token']
|
|
|
|
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('buynlarge', 'newrepo', 'buynlarge+ownerbot', robot_token)
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the push was added.
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/buynlarge/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(1, len(logs))
|
|
|
|
self.assertEquals('push_repo', logs[0]['kind'])
|
|
|
|
self.assertEquals('buynlarge+ownerbot', logs[0]['performer']['name'])
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'buynlarge+ownerbot', robot_token)
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/buynlarge/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(2, len(logs))
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
|
|
|
self.assertEquals('buynlarge+ownerbot', logs[0]['performer']['name'])
|
|
|
|
|
|
|
|
|
2015-12-15 21:52:22 +00:00
|
|
|
def test_push_pull_logging_bytoken(self):
|
|
|
|
# Push the repository.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Add a token.
|
|
|
|
self.conduct('POST', '/__test/addtoken')
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('devtable', 'newrepo', '$token', 'somecooltokencode')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/devtable/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
|
|
|
self.assertEquals('my-new-token', logs[0]['metadata']['token'])
|
|
|
|
|
|
|
|
|
2015-12-09 21:10:39 +00:00
|
|
|
def test_push_pull_logging_byoauth(self):
|
|
|
|
# Push the repository.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Pull the repository.
|
|
|
|
self.do_pull('devtable', 'newrepo', '$oauthtoken', 'test')
|
|
|
|
|
|
|
|
# Retrieve the logs and ensure the pull was added.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
result = self.conduct('GET', '/api/v1/repository/devtable/newrepo/logs')
|
|
|
|
logs = result.json()['logs']
|
|
|
|
|
|
|
|
self.assertEquals(2, len(logs))
|
|
|
|
self.assertEquals('pull_repo', logs[0]['kind'])
|
|
|
|
|
|
|
|
self.assertEquals('devtable', logs[0]['performer']['name'])
|
|
|
|
self.assertEquals(1, logs[0]['metadata']['oauth_token_id'])
|
|
|
|
|
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
def test_pull_publicrepo_anonymous(self):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-05-29 22:08:17 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo anonymously, which should fail (since it is
|
|
|
|
# private)
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', expect_failure=FailureCodes.UNAUTHORIZED)
|
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Make the repository public.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
self.change_repo_visibility('public', 'newrepo', 'public')
|
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Pull the repository anonymously, which should succeed because the repository is public.
|
|
|
|
self.do_pull('public', 'newrepo')
|
|
|
|
|
|
|
|
|
|
|
|
def test_pull_publicrepo_devtable(self):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-05-29 22:08:17 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as devtable, which should fail as it belongs
|
|
|
|
# to public.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Make the repository public.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
self.change_repo_visibility('public', 'newrepo', 'public')
|
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is public.
|
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
|
|
|
def test_pull_private_repo(self):
|
|
|
|
# Add a new repository under the devtable user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
2015-05-29 22:08:17 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as public, which should fail as it belongs
|
|
|
|
# to devtable.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('devtable', 'newrepo', 'public', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-05-29 22:08:17 +00:00
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is owned by
|
|
|
|
# devtable.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
2015-06-02 19:16:22 +00:00
|
|
|
def test_public_no_anonymous_access_with_auth(self):
|
2015-06-02 18:27:57 +00:00
|
|
|
# Turn off anonymous access.
|
|
|
|
with TestFeature(self, 'ANONYMOUS_ACCESS', False):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-06-02 18:27:57 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as devtable, which should fail as it belongs
|
|
|
|
# to public.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-06-02 18:27:57 +00:00
|
|
|
|
|
|
|
# Make the repository public.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
self.change_repo_visibility('public', 'newrepo', 'public')
|
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is public.
|
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
|
|
|
def test_private_no_anonymous_access(self):
|
|
|
|
# Turn off anonymous access.
|
|
|
|
with TestFeature(self, 'ANONYMOUS_ACCESS', False):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-06-02 18:27:57 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as devtable, which should fail as it belongs
|
|
|
|
# to public.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.UNAUTHORIZED)
|
2015-06-02 18:27:57 +00:00
|
|
|
|
|
|
|
# Pull the repository as public, which should succeed because the repository is owned by public.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password')
|
|
|
|
|
|
|
|
|
2015-06-02 19:16:22 +00:00
|
|
|
def test_public_no_anonymous_access_no_auth(self):
|
|
|
|
# Turn off anonymous access.
|
|
|
|
with TestFeature(self, 'ANONYMOUS_ACCESS', False):
|
|
|
|
# Add a new repository under the public user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password')
|
2015-06-02 19:16:22 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# First try to pull the (currently private) repo as anonymous, which should fail as it
|
|
|
|
# is private.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', expect_failure=FailureCodes.UNAUTHENTICATED)
|
2015-06-02 19:16:22 +00:00
|
|
|
|
|
|
|
# Make the repository public.
|
|
|
|
self.conduct_api_login('public', 'password')
|
|
|
|
self.change_repo_visibility('public', 'newrepo', 'public')
|
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Try again to pull the (currently public) repo as anonymous, which should fail as
|
|
|
|
# anonymous access is disabled.
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_pull('public', 'newrepo', expect_failure=FailureCodes.UNAUTHENTICATED)
|
2015-06-02 19:16:22 +00:00
|
|
|
|
|
|
|
# Pull the repository as public, which should succeed because the repository is owned by public.
|
|
|
|
self.do_pull('public', 'newrepo', 'public', 'password')
|
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is public.
|
|
|
|
self.do_pull('public', 'newrepo', 'devtable', 'password')
|
|
|
|
|
2015-06-10 19:16:01 +00:00
|
|
|
|
|
|
|
def test_create_repo_creator_user(self):
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('buynlarge', 'newrepo', 'creator', 'password')
|
2015-06-10 19:16:01 +00:00
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is owned by the
|
|
|
|
# org.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
|
|
|
def test_create_repo_robot_owner(self):
|
|
|
|
# Lookup the robot's password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('GET', '/api/v1/organization/buynlarge/robots/ownerbot')
|
|
|
|
robot_token = json.loads(resp.text)['token']
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('buynlarge', 'newrepo', 'buynlarge+ownerbot', robot_token)
|
2015-06-10 19:16:01 +00:00
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is owned by the
|
|
|
|
# org.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
|
|
|
def test_create_repo_robot_creator(self):
|
|
|
|
# Lookup the robot's password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('GET', '/api/v1/organization/buynlarge/robots/creatorbot')
|
|
|
|
robot_token = json.loads(resp.text)['token']
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('buynlarge', 'newrepo', 'buynlarge+creatorbot', robot_token)
|
2015-06-10 19:16:01 +00:00
|
|
|
|
|
|
|
# Pull the repository as devtable, which should succeed because the repository is owned by the
|
|
|
|
# org.
|
|
|
|
self.do_pull('buynlarge', 'newrepo', 'devtable', 'password')
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def test_library_repo(self):
|
|
|
|
self.do_push('', 'newrepo', 'devtable', 'password')
|
|
|
|
self.do_pull('', 'newrepo', 'devtable', 'password')
|
|
|
|
self.do_pull('library', 'newrepo', 'devtable', 'password')
|
|
|
|
|
2016-05-31 20:43:49 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def test_library_disabled(self):
|
|
|
|
with TestFeature(self, 'LIBRARY_SUPPORT', False):
|
|
|
|
self.do_push('library', 'newrepo', 'devtable', 'password')
|
|
|
|
self.do_pull('library', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
|
2016-05-31 20:43:49 +00:00
|
|
|
def test_image_replication(self):
|
|
|
|
with TestFeature(self, 'STORAGE_REPLICATION', True):
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'The latest image',
|
|
|
|
'unicode': u'the Pawe\xc5\x82 Kami\xc5\x84ski image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Push a new repository.
|
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images=images)
|
|
|
|
|
|
|
|
# Ensure that we have a storage replication entry for each image pushed.
|
|
|
|
self.conduct('GET', '/__test/storagerepentry/baseid', expected_code=200)
|
|
|
|
self.conduct('GET', '/__test/storagerepentry/latestid', expected_code=200)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V1RegistryTests(V1RegistryPullMixin, V1RegistryPushMixin, RegistryTestsMixin,
|
|
|
|
RegistryTestCaseMixin, LiveServerTestCase):
|
2015-08-27 18:55:33 +00:00
|
|
|
""" Tests for V1 registry. """
|
2015-10-22 20:59:28 +00:00
|
|
|
def test_push_reponame_with_slashes(self):
|
|
|
|
# Attempt to add a repository name with slashes. This should fail as we do not support it.
|
|
|
|
images = [{
|
2016-01-19 20:52:34 +00:00
|
|
|
'id': 'onlyimagehere',
|
|
|
|
'contents': 'somecontents',
|
2015-10-22 20:59:28 +00:00
|
|
|
}]
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_push('public', 'newrepo/somesubrepo', 'public', 'password', images,
|
|
|
|
expect_failure=FailureCodes.INVALID_REGISTRY)
|
2015-10-22 20:59:28 +00:00
|
|
|
|
2015-10-27 21:54:02 +00:00
|
|
|
def test_push_unicode_metadata(self):
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
|
|
|
|
images = [{
|
|
|
|
'id': 'onlyimagehere',
|
2016-01-19 20:52:34 +00:00
|
|
|
'comment': 'Pawe\xc5\x82 Kami\xc5\x84ski <pawel.kaminski@codewise.com>'.decode('utf-8'),
|
|
|
|
'contents': 'somecontents',
|
2015-10-27 21:54:02 +00:00
|
|
|
}]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'unicodetest', 'devtable', 'password', images)
|
2015-11-06 23:18:29 +00:00
|
|
|
self.do_pull('devtable', 'unicodetest', 'devtable', 'password', images=images)
|
2015-10-27 21:54:02 +00:00
|
|
|
|
2015-10-05 20:36:33 +00:00
|
|
|
def test_tag_validation(self):
|
|
|
|
image_id = 'onlyimagehere'
|
|
|
|
images = [{
|
2016-01-19 20:52:34 +00:00
|
|
|
'id': image_id,
|
|
|
|
'contents': 'somecontents',
|
2015-10-05 20:36:33 +00:00
|
|
|
}]
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2015-10-05 20:36:33 +00:00
|
|
|
self.do_push('public', 'newrepo', 'public', 'password', images)
|
|
|
|
self.do_tag('public', 'newrepo', '1', image_id)
|
|
|
|
self.do_tag('public', 'newrepo', 'x' * 128, image_id)
|
2016-01-21 20:40:51 +00:00
|
|
|
self.do_tag('public', 'newrepo', '', image_id, expected_code=404)
|
2015-10-05 20:36:33 +00:00
|
|
|
self.do_tag('public', 'newrepo', 'x' * 129, image_id, expected_code=400)
|
|
|
|
self.do_tag('public', 'newrepo', '.fail', image_id, expected_code=400)
|
|
|
|
self.do_tag('public', 'newrepo', '-fail', image_id, expected_code=400)
|
|
|
|
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V2RegistryTests(V2RegistryPullMixin, V2RegistryPushMixin, RegistryTestsMixin,
|
|
|
|
RegistryTestCaseMixin, LiveServerTestCase):
|
2015-08-27 18:55:33 +00:00
|
|
|
""" Tests for V2 registry. """
|
2016-06-02 16:46:20 +00:00
|
|
|
def test_invalid_manifest_type(self):
|
|
|
|
namespace = 'devtable'
|
|
|
|
repository = 'somerepo'
|
|
|
|
tag_name = 'sometag'
|
|
|
|
|
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
|
|
|
|
self.v2_ping()
|
|
|
|
self.do_auth('devtable', 'password', namespace, repository, scopes=['push', 'pull'])
|
|
|
|
|
|
|
|
# Build a fake manifest.
|
|
|
|
builder = SignedManifestBuilder(namespace, repository, tag_name)
|
|
|
|
builder.add_layer('sha256:' + hashlib.sha256('invalid').hexdigest(), json.dumps({'id': 'foo'}))
|
|
|
|
manifest = builder.build(_JWK)
|
|
|
|
|
|
|
|
self.conduct('PUT', '/v2/%s/manifests/%s' % (repo_name, tag_name),
|
|
|
|
data=manifest.bytes, expected_code=415,
|
|
|
|
headers={'Content-Type': 'application/vnd.docker.distribution.manifest.v2+json'},
|
|
|
|
auth='jwt')
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def test_invalid_blob(self):
|
|
|
|
namespace = 'devtable'
|
|
|
|
repository = 'somerepo'
|
|
|
|
tag_name = 'sometag'
|
|
|
|
|
|
|
|
repo_name = _get_repo_name(namespace, repository)
|
|
|
|
|
|
|
|
self.v2_ping()
|
|
|
|
self.do_auth('devtable', 'password', namespace, repository, scopes=['push', 'pull'])
|
|
|
|
|
|
|
|
# Build a fake manifest.
|
|
|
|
builder = SignedManifestBuilder(namespace, repository, tag_name)
|
|
|
|
builder.add_layer('sha256:' + hashlib.sha256('invalid').hexdigest(), json.dumps({'id': 'foo'}))
|
|
|
|
manifest = builder.build(_JWK)
|
|
|
|
|
|
|
|
response = self.conduct('PUT', '/v2/%s/manifests/%s' % (repo_name, tag_name),
|
|
|
|
data=manifest.bytes, expected_code=404,
|
|
|
|
headers={'Content-Type': 'application/json'}, auth='jwt')
|
|
|
|
self.assertEquals('BLOB_UNKNOWN', response.json()['errors'][0]['code'])
|
|
|
|
|
|
|
|
|
2015-12-10 20:15:24 +00:00
|
|
|
def test_delete_manifest(self):
|
|
|
|
# Push a new repo with the latest tag.
|
|
|
|
(_, digest) = self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Ensure the pull works.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password')
|
|
|
|
|
|
|
|
# Conduct auth for the write scope.
|
|
|
|
self.do_auth('devtable', 'password', 'devtable', 'newrepo', scopes=['push'])
|
|
|
|
|
|
|
|
# Delete the digest.
|
|
|
|
self.conduct('DELETE', '/v2/devtable/newrepo/manifests/' + digest, auth='jwt',
|
|
|
|
expected_code=202)
|
|
|
|
|
|
|
|
# Ensure the tag no longer exists.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password',
|
2016-01-22 21:49:32 +00:00
|
|
|
expect_failure=FailureCodes.DOES_NOT_EXIST)
|
2015-12-10 20:15:24 +00:00
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
def test_push_only_push_scope(self):
|
|
|
|
images = [{
|
|
|
|
'id': 'onlyimagehere',
|
|
|
|
'contents': 'foobar',
|
|
|
|
}]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'somenewrepo', 'devtable', 'password', images,
|
|
|
|
scopes=['push'])
|
|
|
|
|
2015-10-22 20:59:28 +00:00
|
|
|
def test_push_reponame_with_slashes(self):
|
|
|
|
# Attempt to add a repository name with slashes. This should fail as we do not support it.
|
|
|
|
images = [{
|
2016-01-19 20:52:34 +00:00
|
|
|
'id': 'onlyimagehere',
|
|
|
|
'contents': 'somecontents',
|
2015-10-22 20:59:28 +00:00
|
|
|
}]
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2015-10-22 20:59:28 +00:00
|
|
|
self.do_push('public', 'newrepo/somesubrepo', 'devtable', 'password', images,
|
2016-01-22 21:49:32 +00:00
|
|
|
expect_failure=FailureCodes.INVALID_REGISTRY)
|
2015-10-22 20:59:28 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
def test_invalid_push(self):
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', invalid=True)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
def test_cancel_push(self):
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', cancel=True)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
def test_pull_by_checksum(self):
|
|
|
|
# Add a new repository under the user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
_, digest = self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
2015-09-29 19:02:03 +00:00
|
|
|
|
|
|
|
# Attempt to pull by digest.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password', manifest_id=digest)
|
|
|
|
|
|
|
|
def test_pull_invalid_image_tag(self):
|
|
|
|
# Add a new repository under the user, so we have a real repository to pull.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password')
|
2015-09-29 19:02:03 +00:00
|
|
|
self.clearSession()
|
|
|
|
|
|
|
|
# Attempt to pull the invalid tag.
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password', manifest_id='invalid',
|
2016-01-22 21:49:32 +00:00
|
|
|
expect_failure=FailureCodes.INVALID_REGISTRY)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
def test_partial_upload_below_5mb(self):
|
|
|
|
chunksize = 1024 * 1024 * 2
|
|
|
|
size = chunksize * 3
|
|
|
|
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
|
|
|
|
|
|
|
chunk_count = int(math.ceil((len(contents) * 1.0) / chunksize))
|
|
|
|
chunks = [(index * chunksize, (index + 1)*chunksize) for index in range(chunk_count)]
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id':'someid',
|
2015-09-18 19:40:35 +00:00
|
|
|
'contents': contents,
|
|
|
|
'chunks': chunks
|
|
|
|
}
|
2015-09-29 21:53:39 +00:00
|
|
|
]
|
2015-09-18 19:40:35 +00:00
|
|
|
|
|
|
|
# Push the chunked upload.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-18 19:40:35 +00:00
|
|
|
|
|
|
|
# Pull the image back and verify the contents.
|
2015-10-02 18:33:38 +00:00
|
|
|
blobs = self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-18 19:40:35 +00:00
|
|
|
self.assertEquals(len(blobs.items()), 1)
|
|
|
|
self.assertEquals(blobs.items()[0][1], contents)
|
|
|
|
|
2015-09-25 15:51:50 +00:00
|
|
|
def test_partial_upload_way_below_5mb(self):
|
|
|
|
size = 1024
|
|
|
|
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
|
|
|
chunks = [(0, 100), (100, size)]
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id':'someid',
|
2015-09-25 15:51:50 +00:00
|
|
|
'contents': contents,
|
|
|
|
'chunks': chunks
|
|
|
|
}
|
2015-09-29 21:53:39 +00:00
|
|
|
]
|
2015-09-25 15:51:50 +00:00
|
|
|
|
|
|
|
# Push the chunked upload.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-25 15:51:50 +00:00
|
|
|
|
|
|
|
# Pull the image back and verify the contents.
|
2015-10-02 18:33:38 +00:00
|
|
|
blobs = self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-25 15:51:50 +00:00
|
|
|
self.assertEquals(len(blobs.items()), 1)
|
|
|
|
self.assertEquals(blobs.items()[0][1], contents)
|
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
def test_partial_upload_resend_below_5mb(self):
|
2015-09-25 15:51:50 +00:00
|
|
|
size = 150
|
2015-09-18 19:40:35 +00:00
|
|
|
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
|
|
|
|
2015-09-25 15:51:50 +00:00
|
|
|
chunks = [(0, 100), (10, size)]
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id':'someid',
|
2015-09-18 19:40:35 +00:00
|
|
|
'contents': contents,
|
|
|
|
'chunks': chunks
|
|
|
|
}
|
2015-09-29 21:53:39 +00:00
|
|
|
]
|
2015-09-18 19:40:35 +00:00
|
|
|
|
|
|
|
# Push the chunked upload.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-18 19:40:35 +00:00
|
|
|
|
|
|
|
# Pull the image back and verify the contents.
|
2015-10-02 18:33:38 +00:00
|
|
|
blobs = self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-18 19:40:35 +00:00
|
|
|
self.assertEquals(len(blobs.items()), 1)
|
|
|
|
self.assertEquals(blobs.items()[0][1], contents)
|
|
|
|
|
2015-09-25 15:51:50 +00:00
|
|
|
def test_partial_upload_try_resend_with_gap(self):
|
|
|
|
size = 150
|
|
|
|
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
|
|
|
|
|
|
|
chunks = [(0, 100), (101, size, 416)]
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id':'someid',
|
2015-09-25 15:51:50 +00:00
|
|
|
'contents': contents,
|
|
|
|
'chunks': chunks
|
|
|
|
}
|
2015-09-29 21:53:39 +00:00
|
|
|
]
|
2015-09-25 15:51:50 +00:00
|
|
|
|
|
|
|
# Attempt to push the chunked upload, which should fail.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
def test_multiple_layers_invalid(self):
|
|
|
|
# Attempt to push a manifest with an image depending on an unknown base layer.
|
|
|
|
images = [
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images,
|
2016-01-22 21:49:32 +00:00
|
|
|
expect_failure=FailureCodes.INVALID_REQUEST)
|
2015-09-29 21:53:39 +00:00
|
|
|
|
|
|
|
def test_multiple_layers(self):
|
|
|
|
# Push a manifest with multiple layers.
|
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2015-09-29 21:53:39 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
2015-09-25 15:51:50 +00:00
|
|
|
|
2015-10-05 18:19:52 +00:00
|
|
|
def test_invalid_regname(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_push('devtable', 'this/is/a/repo', 'devtable', 'password',
|
|
|
|
expect_failure=FailureCodes.INVALID_REGISTRY)
|
2015-10-05 18:19:52 +00:00
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
def test_multiple_tags(self):
|
2015-09-29 21:53:39 +00:00
|
|
|
latest_images = [
|
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
foobar_images = [
|
|
|
|
{
|
|
|
|
'id': 'foobarid',
|
|
|
|
'contents': 'the foobar image',
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
2015-09-29 19:02:03 +00:00
|
|
|
# Create the repo.
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=latest_images,
|
2015-09-29 19:02:03 +00:00
|
|
|
tag_name='latest')
|
|
|
|
|
2015-09-29 21:53:39 +00:00
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=foobar_images,
|
2015-09-29 19:02:03 +00:00
|
|
|
tag_name='foobar')
|
|
|
|
|
|
|
|
# Retrieve the tags.
|
|
|
|
response = self.conduct('GET', '/v2/devtable/newrepo/tags/list', auth='jwt', expected_code=200)
|
|
|
|
data = json.loads(response.text)
|
|
|
|
self.assertEquals(data['name'], "devtable/newrepo")
|
|
|
|
self.assertIn('latest', data['tags'])
|
|
|
|
self.assertIn('foobar', data['tags'])
|
|
|
|
|
2015-10-05 19:26:45 +00:00
|
|
|
# Retrieve the tags with pagination.
|
2015-09-29 19:02:03 +00:00
|
|
|
response = self.conduct('GET', '/v2/devtable/newrepo/tags/list', auth='jwt',
|
|
|
|
params=dict(n=1), expected_code=200)
|
|
|
|
|
|
|
|
data = json.loads(response.text)
|
|
|
|
self.assertEquals(data['name'], "devtable/newrepo")
|
|
|
|
self.assertEquals(len(data['tags']), 1)
|
|
|
|
|
|
|
|
# Try to get tags before a repo exists.
|
2016-02-22 17:21:22 +00:00
|
|
|
response = self.conduct('GET', '/v2/devtable/doesnotexist/tags/list', auth='jwt', expected_code=401)
|
|
|
|
|
|
|
|
# Assert 401s to non-auth endpoints also get the WWW-Authenticate header.
|
|
|
|
self.assertIn('WWW-Authenticate', response.headers)
|
2015-09-29 19:02:03 +00:00
|
|
|
|
2015-12-15 21:21:06 +00:00
|
|
|
def test_one_five_blacklist(self):
|
|
|
|
self.conduct('GET', '/v2/', expected_code=404, user_agent='Go 1.1 package http')
|
|
|
|
|
2016-02-09 22:25:33 +00:00
|
|
|
def test_catalog(self):
|
|
|
|
# Look for public repositories and ensure all are public.
|
|
|
|
response = self.conduct('GET', '/v2/_catalog')
|
|
|
|
data = response.json()
|
|
|
|
self.assertTrue(len(data['repositories']) > 0)
|
|
|
|
|
|
|
|
for reponame in data['repositories']:
|
|
|
|
self.assertTrue(reponame.find('public/') == 0)
|
|
|
|
|
|
|
|
# Perform auth and lookup the catalog again.
|
|
|
|
self.do_auth('devtable', 'password', 'devtable', 'simple')
|
|
|
|
|
|
|
|
response = self.conduct('GET', '/v2/_catalog', params=dict(n=2), auth='jwt')
|
|
|
|
data = response.json()
|
|
|
|
self.assertEquals(len(data['repositories']), 2)
|
|
|
|
|
|
|
|
# Ensure we have a next link.
|
|
|
|
self.assertIsNotNone(response.headers.get('Link'))
|
|
|
|
|
|
|
|
# Request with the next link.
|
|
|
|
link_url = response.headers.get('Link').split(';')[0]
|
|
|
|
v2_index = link_url.find('/v2/')
|
|
|
|
relative_url = link_url[v2_index:]
|
|
|
|
|
|
|
|
next_response = self.conduct('GET', relative_url, auth='jwt')
|
|
|
|
next_data = next_response.json()
|
|
|
|
|
|
|
|
self.assertEquals(len(next_data['repositories']), 2)
|
|
|
|
self.assertNotEquals(next_data['repositories'], data['repositories'])
|
|
|
|
|
2015-09-18 19:40:35 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V1PushV2PullRegistryTests(V2RegistryPullMixin, V1RegistryPushMixin, RegistryTestsMixin,
|
2015-09-17 20:48:08 +00:00
|
|
|
RegistryTestCaseMixin, LiveServerTestCase):
|
2015-09-08 15:58:21 +00:00
|
|
|
""" Tests for V1 push, V2 pull registry. """
|
|
|
|
|
2016-02-12 15:39:27 +00:00
|
|
|
def test_multiple_tag_with_pull(self):
|
|
|
|
""" Tagging the same exact V1 tag multiple times and then pulling with V2. """
|
|
|
|
images = self._get_default_images()
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
self.do_tag('devtable', 'newrepo', 'latest', images[0]['id'], auth=('devtable', 'password'))
|
|
|
|
self.do_pull('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
|
2015-09-08 15:58:21 +00:00
|
|
|
class V1PullV2PushRegistryTests(V1RegistryPullMixin, V2RegistryPushMixin, RegistryTestsMixin,
|
2015-09-17 20:48:08 +00:00
|
|
|
RegistryTestCaseMixin, LiveServerTestCase):
|
2015-09-08 15:58:21 +00:00
|
|
|
""" Tests for V1 pull, V2 push registry. """
|
2015-08-27 18:55:33 +00:00
|
|
|
|
2015-11-20 19:47:56 +00:00
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
class TorrentTestMixin(V2RegistryPullMixin):
|
|
|
|
""" Mixin of tests for torrent support. """
|
|
|
|
def get_torrent(self, blobsum):
|
|
|
|
# Enable direct download URLs in fake storage.
|
|
|
|
self.conduct('POST', '/__test/fakestoragedd/true')
|
|
|
|
|
|
|
|
response = self.conduct('GET', '/c1/torrent/devtable/newrepo/blobs/' + blobsum,
|
|
|
|
auth=('devtable', 'password'))
|
|
|
|
|
|
|
|
# Disable direct download URLs in fake storage.
|
|
|
|
self.conduct('POST', '/__test/fakestoragedd/false')
|
|
|
|
|
|
|
|
return response.content
|
|
|
|
|
|
|
|
def test_get_basic_torrent(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
|
|
|
|
# Retrieve the manifest for the tag.
|
|
|
|
blobs = self.do_pull('devtable', 'newrepo', 'devtable', 'password', manifest_id='latest',
|
|
|
|
images=initial_images)
|
|
|
|
self.assertEquals(1, len(list(blobs.keys())))
|
|
|
|
blobsum = list(blobs.keys())[0]
|
|
|
|
|
|
|
|
# Retrieve the torrent for the tag.
|
|
|
|
torrent = self.get_torrent(blobsum)
|
|
|
|
contents = bencode.bdecode(torrent)
|
|
|
|
|
|
|
|
# Ensure that there is a webseed.
|
|
|
|
self.assertEquals(contents['url-list'], 'http://somefakeurl')
|
|
|
|
|
|
|
|
# Ensure there is an announce and some pieces.
|
|
|
|
self.assertIsNotNone(contents.get('info', {}).get('pieces'))
|
|
|
|
self.assertIsNotNone(contents.get('announce'))
|
|
|
|
|
|
|
|
sha = resumablehashlib.sha1()
|
|
|
|
sha.update(blobs[blobsum])
|
|
|
|
|
|
|
|
expected = binascii.hexlify(sha.digest())
|
|
|
|
found = binascii.hexlify(contents['info']['pieces'])
|
|
|
|
|
|
|
|
self.assertEquals(expected, found)
|
|
|
|
|
|
|
|
|
|
|
|
class TorrentV1PushTests(RegistryTestCaseMixin, TorrentTestMixin, V1RegistryPushMixin, LiveServerTestCase):
|
|
|
|
""" Torrent tests via V1 push. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
class TorrentV2PushTests(RegistryTestCaseMixin, TorrentTestMixin, V2RegistryPushMixin, LiveServerTestCase):
|
|
|
|
""" Torrent tests via V2 push. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2016-05-13 19:50:57 +00:00
|
|
|
class ACIConversionTests(RegistryTestCaseMixin, V1RegistryPushMixin, LiveServerTestCase):
|
|
|
|
""" Tests for registry ACI conversion. """
|
|
|
|
|
2016-06-06 19:38:29 +00:00
|
|
|
def get_converted_image(self, tag_name='latest'):
|
|
|
|
url = '/c1/aci/localhost:5000/devtable/newrepo/' + tag_name + '/aci/linux/amd64/'
|
|
|
|
response = self.conduct('GET', url, auth='sig')
|
2016-05-13 19:50:57 +00:00
|
|
|
tar = tarfile.open(fileobj=StringIO(response.content))
|
|
|
|
return tar, response.content
|
|
|
|
|
2016-06-06 19:38:29 +00:00
|
|
|
def get_converted_signature(self, tag_name='latest'):
|
2016-06-09 19:04:55 +00:00
|
|
|
counter = 0
|
|
|
|
|
|
|
|
# Give time for the signature to be written before continuing. As we don't exactly know when
|
|
|
|
# this is (based on CPU conditions when the test is being run), we try a backoff and sleep
|
|
|
|
# approach.
|
2016-06-13 21:31:42 +00:00
|
|
|
while counter < 10:
|
2016-06-06 19:38:29 +00:00
|
|
|
url = '/c1/aci/localhost:5000/devtable/newrepo/' + tag_name + '/aci.asc/linux/amd64/'
|
|
|
|
response = self.conduct('GET', url, auth='sig', expected_code=None)
|
2016-06-09 19:04:55 +00:00
|
|
|
if response.status_code == 202 or response.status_code == 404:
|
|
|
|
counter += 1
|
|
|
|
time.sleep(counter * 2)
|
|
|
|
else:
|
|
|
|
return response.content
|
|
|
|
|
|
|
|
self.fail('Signature was never created')
|
2016-05-13 22:29:57 +00:00
|
|
|
|
|
|
|
def _verify_signature(self, signature, converted):
|
|
|
|
sig_bytes = StringIO(signature)
|
|
|
|
content_bytes = StringIO(converted)
|
|
|
|
|
|
|
|
ctx = gpgme.Context()
|
|
|
|
sigs = ctx.verify(sig_bytes, content_bytes, None)
|
|
|
|
|
|
|
|
self.assertEqual(len(sigs), 1)
|
|
|
|
self.assertEqual(sigs[0].summary, 0)
|
|
|
|
self.assertEqual(sigs[0].fpr, '07692864E17025DD1BEA88E44632047EEEB32221')
|
|
|
|
self.assertEqual(sigs[0].status, None)
|
|
|
|
self.assertEqual(sigs[0].notations, [])
|
|
|
|
self.assertEqual(sigs[0].exp_timestamp, 0)
|
|
|
|
self.assertEqual(sigs[0].wrong_key_usage, False)
|
|
|
|
self.assertEqual(sigs[0].validity, gpgme.VALIDITY_UNKNOWN)
|
|
|
|
self.assertEqual(sigs[0].validity_reason, None)
|
|
|
|
|
|
|
|
|
2016-05-13 19:50:57 +00:00
|
|
|
def test_basic_conversion(self):
|
2016-07-13 18:23:30 +00:00
|
|
|
if os.environ.get('RUN_ACI_TESTS') == 'False':
|
|
|
|
return
|
|
|
|
|
2016-05-13 19:50:57 +00:00
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
2016-05-13 22:29:57 +00:00
|
|
|
tar, converted = self.get_converted_image()
|
|
|
|
signature = self.get_converted_signature()
|
2016-05-23 23:52:17 +00:00
|
|
|
first_hash = hashlib.sha256(converted).hexdigest()
|
2016-05-13 22:29:57 +00:00
|
|
|
|
|
|
|
# Verify the manifest.
|
2016-05-13 19:50:57 +00:00
|
|
|
self.assertEquals(['manifest', 'rootfs', 'rootfs/contents'], tar.getnames())
|
|
|
|
|
|
|
|
manifest = json.loads(tar.extractfile(tar.getmember('manifest')).read())
|
|
|
|
expected_manifest = {
|
|
|
|
"acKind": "ImageManifest",
|
|
|
|
"app": {
|
|
|
|
"environment": [],
|
|
|
|
"mountPoints": [],
|
|
|
|
"group": "root",
|
|
|
|
"user": "root",
|
|
|
|
"workingDirectory": "/",
|
|
|
|
"exec": [],
|
|
|
|
"isolators": [],
|
|
|
|
"eventHandlers": [],
|
|
|
|
"ports": [],
|
|
|
|
"annotations": [
|
|
|
|
{"name": "created", "value": ""},
|
|
|
|
{"name": "homepage", "value": "http://localhost:5000/devtable/newrepo:latest"},
|
2016-06-06 19:38:29 +00:00
|
|
|
{"name": "quay.io/derived-image",
|
|
|
|
"value": "fa916d5ca4da5348628dfffcfc943288a0cca521cd21a6d2981a85ec1d7f7a3a"}
|
2016-05-13 19:50:57 +00:00
|
|
|
]
|
|
|
|
},
|
|
|
|
"labels": [
|
|
|
|
{"name": "version", "value": "latest"},
|
|
|
|
{"name": "arch", "value": "amd64"},
|
|
|
|
{"name": "os", "value": "linux"}
|
|
|
|
],
|
|
|
|
"acVersion": "0.6.1",
|
|
|
|
"name": "localhost/devtable/newrepo"
|
|
|
|
}
|
|
|
|
|
|
|
|
self.assertEquals(manifest, expected_manifest)
|
|
|
|
self.assertEquals('the initial image', tar.extractfile(tar.getmember('rootfs/contents')).read())
|
|
|
|
|
2016-05-13 22:29:57 +00:00
|
|
|
# Verify the signature.
|
|
|
|
self._verify_signature(signature, converted)
|
|
|
|
|
2016-05-23 23:52:17 +00:00
|
|
|
# Clear the cache and pull again, ensuring that the hash does not change even for a completely
|
|
|
|
# new generation of the image.
|
|
|
|
self.conduct('POST', '/__test/clearderivedcache')
|
|
|
|
|
|
|
|
_, converted_again = self.get_converted_image()
|
|
|
|
second_hash = hashlib.sha256(converted_again).hexdigest()
|
|
|
|
self.assertEquals(second_hash, first_hash)
|
|
|
|
|
|
|
|
# Ensure we have a different signature (and therefore the cache was broken).
|
|
|
|
signature_again = self.get_converted_signature()
|
|
|
|
self.assertNotEquals(signature_again, signature)
|
|
|
|
|
|
|
|
# Ensure *both* signatures work for both images.
|
|
|
|
self._verify_signature(signature, converted_again)
|
|
|
|
self._verify_signature(signature_again, converted)
|
|
|
|
self._verify_signature(signature_again, converted_again)
|
|
|
|
|
2016-06-06 19:38:29 +00:00
|
|
|
def assertHasDerivedImage(self, manifest, expected):
|
|
|
|
for annotation in manifest['app']['annotations']:
|
|
|
|
if annotation['name'] == 'homepage':
|
|
|
|
self.assertEqual(expected, annotation['value'])
|
|
|
|
return
|
|
|
|
|
|
|
|
self.fail('Derived image annotation not found in metadata')
|
|
|
|
|
|
|
|
def test_conversion_different_tags(self):
|
2016-07-13 18:23:30 +00:00
|
|
|
if os.environ.get('RUN_ACI_TESTS') == 'False':
|
|
|
|
return
|
|
|
|
|
2016-06-06 19:38:29 +00:00
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images,
|
|
|
|
tag_names=['latest', 'sometag'])
|
|
|
|
|
|
|
|
# Pull the squashed version of tag latest.
|
|
|
|
latest_tar, _ = self.get_converted_image(tag_name='latest')
|
|
|
|
latest_manifest = json.loads(latest_tar.extractfile(latest_tar.getmember('manifest')).read())
|
|
|
|
self.assertHasDerivedImage(latest_manifest, 'http://localhost:5000/devtable/newrepo:latest')
|
|
|
|
|
|
|
|
# Pull the squashed version of tag sometag.
|
|
|
|
sometag_tar, _ = self.get_converted_image(tag_name='sometag')
|
|
|
|
sometag_manifest = json.loads(sometag_tar.extractfile(sometag_tar.getmember('manifest')).read())
|
|
|
|
self.assertHasDerivedImage(sometag_manifest, 'http://localhost:5000/devtable/newrepo:sometag')
|
|
|
|
|
2016-05-23 23:52:17 +00:00
|
|
|
|
2016-05-13 19:50:57 +00:00
|
|
|
def test_multilayer_conversion(self):
|
2016-07-13 18:23:30 +00:00
|
|
|
if os.environ.get('RUN_ACI_TESTS') == 'False':
|
|
|
|
return
|
|
|
|
|
2016-05-13 19:50:57 +00:00
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2016-05-13 19:50:57 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
2016-05-13 22:29:57 +00:00
|
|
|
tar, converted = self.get_converted_image()
|
|
|
|
signature = self.get_converted_signature()
|
|
|
|
|
2016-05-13 19:50:57 +00:00
|
|
|
self.assertEquals(['manifest', 'rootfs', 'rootfs/contents'], tar.getnames())
|
|
|
|
self.assertEquals('the latest image', tar.extractfile(tar.getmember('rootfs/contents')).read())
|
|
|
|
|
2016-05-13 22:29:57 +00:00
|
|
|
# Verify the signature.
|
|
|
|
self._verify_signature(signature, converted)
|
|
|
|
|
2016-05-13 19:50:57 +00:00
|
|
|
|
2016-01-19 20:52:34 +00:00
|
|
|
class SquashingTests(RegistryTestCaseMixin, V1RegistryPushMixin, LiveServerTestCase):
|
|
|
|
""" Tests for registry squashing. """
|
2015-11-20 19:47:56 +00:00
|
|
|
|
2015-11-24 16:18:56 +00:00
|
|
|
def get_squashed_image(self):
|
|
|
|
response = self.conduct('GET', '/c1/squash/devtable/newrepo/latest', auth='sig')
|
|
|
|
tar = tarfile.open(fileobj=StringIO(response.content))
|
2016-01-20 21:33:10 +00:00
|
|
|
return tar, response.content
|
2015-11-24 16:18:56 +00:00
|
|
|
|
|
|
|
def test_squashed_changes(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
initial_image_id = '91081df45b58dc62dd207441785eef2b895f0383fbe601c99a3cf643c79957dc'
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
2016-01-20 21:33:10 +00:00
|
|
|
tar, _ = self.get_squashed_image()
|
2015-11-24 16:18:56 +00:00
|
|
|
self.assertTrue(initial_image_id in tar.getnames())
|
|
|
|
|
|
|
|
# Change the images.
|
|
|
|
updated_images = [
|
|
|
|
{
|
|
|
|
'id': 'updatedid',
|
|
|
|
'contents': 'the updated image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=updated_images)
|
|
|
|
updated_image_id = '38df4bd4cdffc6b7d656dbd2813c73e864f2d362ad887c999ac315224ad281ac'
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag and ensure it has changed.
|
2016-01-20 21:33:10 +00:00
|
|
|
tar, _ = self.get_squashed_image()
|
2015-11-24 16:18:56 +00:00
|
|
|
self.assertTrue(updated_image_id in tar.getnames())
|
|
|
|
|
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
def test_estimated_squashing(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
'size': 2002,
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
|
|
|
|
# NULL out the uncompressed size to force estimation.
|
|
|
|
self.conduct('POST', '/__test/removeuncompressed/initialid')
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
|
|
|
initial_image_id = '91081df45b58dc62dd207441785eef2b895f0383fbe601c99a3cf643c79957dc'
|
2016-01-20 21:33:10 +00:00
|
|
|
tar, _ = self.get_squashed_image()
|
2015-12-17 18:39:01 +00:00
|
|
|
self.assertTrue(initial_image_id in tar.getnames())
|
|
|
|
|
|
|
|
|
2015-11-20 19:47:56 +00:00
|
|
|
def test_multilayer_squashing(self):
|
|
|
|
images = [
|
2016-05-13 22:29:57 +00:00
|
|
|
{
|
|
|
|
'id': 'baseid',
|
|
|
|
'contents': 'The base image',
|
|
|
|
},
|
2015-11-20 19:47:56 +00:00
|
|
|
{
|
|
|
|
'id': 'latestid',
|
|
|
|
'contents': 'the latest image',
|
|
|
|
'parent': 'baseid',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=images)
|
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
|
|
|
expected_image_id = 'bd590ae79fba5ebc6550aaf016c0bd0f49b1d78178e0f83e0ca1c56c2bb7e7bf'
|
|
|
|
|
|
|
|
expected_names = ['repositories',
|
|
|
|
expected_image_id,
|
|
|
|
'%s/json' % expected_image_id,
|
|
|
|
'%s/VERSION' % expected_image_id,
|
|
|
|
'%s/layer.tar' % expected_image_id]
|
|
|
|
|
2016-01-20 21:33:10 +00:00
|
|
|
tar, _ = self.get_squashed_image()
|
2015-11-20 19:47:56 +00:00
|
|
|
self.assertEquals(expected_names, tar.getnames())
|
|
|
|
self.assertEquals('1.0', tar.extractfile(tar.getmember('%s/VERSION' % expected_image_id)).read())
|
|
|
|
|
|
|
|
json_data = (tar.extractfile(tar.getmember('%s/json' % expected_image_id)).read())
|
|
|
|
|
|
|
|
# Ensure the JSON loads and parses.
|
|
|
|
result = json.loads(json_data)
|
|
|
|
self.assertEquals(expected_image_id, result['id'])
|
|
|
|
|
|
|
|
# Ensure that the "image_name" file refers to the latest image, as it is the top layer.
|
|
|
|
layer_tar = tarfile.open(fileobj=tar.extractfile(tar.getmember('%s/layer.tar' % expected_image_id)))
|
2016-01-19 20:52:34 +00:00
|
|
|
image_contents = layer_tar.extractfile(layer_tar.getmember('contents')).read()
|
|
|
|
self.assertEquals('the latest image', image_contents)
|
2015-11-24 04:46:05 +00:00
|
|
|
|
2016-01-20 21:33:10 +00:00
|
|
|
def test_squashed_torrent(self):
|
|
|
|
initial_images = [
|
|
|
|
{
|
|
|
|
'id': 'initialid',
|
|
|
|
'contents': 'the initial image',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
# Create the repo.
|
|
|
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', images=initial_images)
|
|
|
|
initial_image_id = '91081df45b58dc62dd207441785eef2b895f0383fbe601c99a3cf643c79957dc'
|
|
|
|
|
|
|
|
# Try to pull the torrent of the squashed image. This should fail with a 404 since the
|
|
|
|
# squashed image doesn't yet exist.
|
|
|
|
self.conduct('GET', '/c1/squash/devtable/newrepo/latest', auth=('devtable', 'password'),
|
|
|
|
headers=dict(accept='application/x-bittorrent'),
|
2016-01-20 23:35:07 +00:00
|
|
|
expected_code=406)
|
2016-01-20 21:33:10 +00:00
|
|
|
|
|
|
|
# Pull the squashed version of the tag.
|
|
|
|
tar, squashed = self.get_squashed_image()
|
|
|
|
self.assertTrue(initial_image_id in tar.getnames())
|
|
|
|
|
|
|
|
# Enable direct download URLs in fake storage.
|
|
|
|
self.conduct('POST', '/__test/fakestoragedd/true')
|
|
|
|
|
|
|
|
# Pull the torrent.
|
|
|
|
response = self.conduct('GET', '/c1/squash/devtable/newrepo/latest',
|
|
|
|
auth=('devtable', 'password'),
|
|
|
|
headers=dict(accept='application/x-bittorrent'))
|
|
|
|
|
|
|
|
# Disable direct download URLs in fake storage.
|
|
|
|
self.conduct('POST', '/__test/fakestoragedd/false')
|
|
|
|
|
|
|
|
# Ensure the torrent is valid.
|
|
|
|
contents = bencode.bdecode(response.content)
|
|
|
|
|
|
|
|
# Ensure that there is a webseed.
|
|
|
|
self.assertEquals(contents['url-list'], 'http://somefakeurl')
|
|
|
|
|
|
|
|
# Ensure there is an announce and some pieces.
|
|
|
|
self.assertIsNotNone(contents.get('info', {}).get('pieces'))
|
|
|
|
self.assertIsNotNone(contents.get('announce'))
|
|
|
|
|
|
|
|
# Ensure the SHA1 matches the generated tar.
|
|
|
|
sha = resumablehashlib.sha1()
|
|
|
|
sha.update(squashed)
|
|
|
|
|
|
|
|
expected = binascii.hexlify(sha.digest())
|
|
|
|
found = binascii.hexlify(contents['info']['pieces'])
|
|
|
|
|
|
|
|
self.assertEquals(expected, found)
|
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
class LoginTests(object):
|
|
|
|
""" Generic tests for registry login. """
|
|
|
|
def test_invalid_username_knownrepo(self):
|
|
|
|
self.do_login('invaliduser', 'somepassword', expect_success=False,
|
|
|
|
scope='repository:devtable/simple:pull')
|
|
|
|
|
|
|
|
def test_invalid_password_knownrepo(self):
|
|
|
|
self.do_login('devtable', 'somepassword', expect_success=False,
|
|
|
|
scope='repository:devtable/simple:pull')
|
|
|
|
|
|
|
|
def test_validuser_knownrepo(self):
|
|
|
|
self.do_login('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:devtable/simple:pull')
|
|
|
|
|
|
|
|
def test_validuser_encryptedpass(self):
|
|
|
|
# Generate an encrypted password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('POST', '/api/v1/user/clientkey', json_data=dict(password='password'))
|
|
|
|
|
|
|
|
encryptedpassword = resp.json()['key']
|
|
|
|
self.do_login('devtable', encryptedpassword, expect_success=True,
|
|
|
|
scope='repository:devtable/simple:pull')
|
|
|
|
|
|
|
|
def test_robotkey(self):
|
|
|
|
# Lookup the robot's password.
|
|
|
|
self.conduct_api_login('devtable', 'password')
|
|
|
|
resp = self.conduct('GET', '/api/v1/user/robots/dtrobot')
|
|
|
|
robot_token = resp.json()['token']
|
|
|
|
|
|
|
|
self.do_login('devtable+dtrobot', robot_token, expect_success=True,
|
|
|
|
scope='repository:devtable/complex:pull')
|
|
|
|
|
|
|
|
def test_oauth(self):
|
|
|
|
self.do_login('$oauthtoken', 'test', expect_success=True,
|
|
|
|
scope='repository:devtable/complex:pull')
|
|
|
|
|
|
|
|
|
|
|
|
class V1LoginTests(V1RegistryLoginMixin, LoginTests, RegistryTestCaseMixin, BaseRegistryMixin, LiveServerTestCase):
|
|
|
|
""" Tests for V1 login. """
|
|
|
|
pass # No additional tests.
|
|
|
|
|
|
|
|
|
|
|
|
class V2LoginTests(V2RegistryLoginMixin, LoginTests, RegistryTestCaseMixin, BaseRegistryMixin, LiveServerTestCase):
|
|
|
|
""" Tests for V2 login. """
|
2016-05-23 20:36:48 +00:00
|
|
|
def do_logincheck(self, username, password, scope, expected_actions=[], expect_success=True,
|
|
|
|
**kwargs):
|
2016-05-31 20:48:19 +00:00
|
|
|
# Perform login to get an auth token.
|
2016-05-23 20:36:48 +00:00
|
|
|
response = self.do_login(username, password, scope, expect_success=expect_success, **kwargs)
|
2016-01-22 21:49:32 +00:00
|
|
|
if not expect_success:
|
|
|
|
return
|
|
|
|
|
2016-05-31 20:48:19 +00:00
|
|
|
# Validate the returned token.
|
2016-01-22 21:49:32 +00:00
|
|
|
encoded = response.json()['token']
|
2016-05-31 20:48:19 +00:00
|
|
|
token = 'Bearer ' + encoded
|
2016-01-22 21:49:32 +00:00
|
|
|
|
2016-05-31 20:48:19 +00:00
|
|
|
payload = decode_bearer_token(token, instance_keys)
|
|
|
|
self.assertIsNotNone(payload)
|
2016-01-22 21:49:32 +00:00
|
|
|
|
|
|
|
if scope is None:
|
|
|
|
self.assertEquals(0, len(payload['access']))
|
|
|
|
else:
|
|
|
|
self.assertEquals(1, len(payload['access']))
|
|
|
|
self.assertEquals(payload['access'][0]['actions'], expected_actions)
|
|
|
|
|
|
|
|
|
2015-12-09 20:07:37 +00:00
|
|
|
def test_nouser_noscope(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('', '', expect_success=False, scope=None)
|
2015-12-09 20:07:37 +00:00
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
def test_validuser_unknownrepo(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:invalidnamespace/simple:pull',
|
|
|
|
expected_actions=[])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_validuser_unknownnamespacerepo(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:devtable/newrepo:push',
|
|
|
|
expected_actions=['push'])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_validuser_noaccess(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('public', 'password', expect_success=True,
|
|
|
|
scope='repository:devtable/simple:pull',
|
|
|
|
expected_actions=[])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
2016-05-23 20:36:48 +00:00
|
|
|
def test_validuser_withendpoint(self):
|
|
|
|
self.do_logincheck('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:localhost:5000/devtable/simple:pull,push',
|
|
|
|
expected_actions=['push', 'pull'])
|
|
|
|
|
|
|
|
def test_validuser_invalid_endpoint(self):
|
|
|
|
self.do_logincheck('public', 'password', expect_success=False, expected_failure_code=400,
|
|
|
|
scope='repository:someotherrepo.com/devtable/simple:pull,push',
|
|
|
|
expected_actions=[])
|
|
|
|
|
|
|
|
def test_validuser_malformed_endpoint(self):
|
|
|
|
self.do_logincheck('public', 'password', expect_success=False, expected_failure_code=400,
|
|
|
|
scope='repository:localhost:5000/registryroot/devtable/simple:pull,push',
|
|
|
|
expected_actions=[])
|
|
|
|
|
2015-11-24 04:46:05 +00:00
|
|
|
def test_validuser_noscope(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('public', 'password', expect_success=True, scope=None)
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_invaliduser_noscope(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('invaliduser', 'invalidpass', expect_success=False, scope=None)
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_invalidpassword_noscope(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('public', 'invalidpass', expect_success=False, scope=None)
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_oauth_noaccess(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('$oauthtoken', 'test', expect_success=True,
|
|
|
|
scope='repository:freshuser/unknownrepo:pull,push',
|
|
|
|
expected_actions=[])
|
|
|
|
|
|
|
|
def test_oauth_public(self):
|
|
|
|
self.do_logincheck('$oauthtoken', 'test', expect_success=True,
|
|
|
|
scope='repository:public/publicrepo:pull,push',
|
|
|
|
expected_actions=['pull'])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_nouser_pull_publicrepo(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('', '', expect_success=True, scope='repository:public/publicrepo:pull',
|
|
|
|
expected_actions=['pull'])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
|
|
|
def test_nouser_push_publicrepo(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('', '', expect_success=True, scope='repository:public/publicrepo:push',
|
|
|
|
expected_actions=[])
|
2015-11-24 04:46:05 +00:00
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
def test_library_invaliduser(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('invaliduser', 'password', expect_success=False,
|
|
|
|
scope='repository:librepo:pull,push')
|
2016-01-21 20:40:51 +00:00
|
|
|
|
|
|
|
def test_library_noaccess(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('freshuser', 'password', expect_success=True,
|
|
|
|
scope='repository:librepo:pull,push',
|
|
|
|
expected_actions=[])
|
2016-01-21 20:40:51 +00:00
|
|
|
|
|
|
|
def test_library_access(self):
|
2016-01-22 21:49:32 +00:00
|
|
|
self.do_logincheck('devtable', 'password', expect_success=True,
|
|
|
|
scope='repository:librepo:pull,push',
|
|
|
|
expected_actions=['push', 'pull'])
|
|
|
|
|
|
|
|
def test_nouser_pushpull_publicrepo(self):
|
|
|
|
# Note: Docker 1.8.3 will ask for both push and pull scopes at all times. For public pulls
|
|
|
|
# with no credentials, we were returning a 401. This test makes sure we get back just a pull
|
|
|
|
# token.
|
|
|
|
self.do_logincheck('', '', expect_success=True,
|
|
|
|
scope='repository:public/publicrepo:pull,push',
|
|
|
|
expected_actions=['pull'])
|
2016-01-21 20:40:51 +00:00
|
|
|
|
2015-11-20 19:47:56 +00:00
|
|
|
|
2015-05-29 22:08:17 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|