initial import for Open Source 🎉

This commit is contained in:
Jimmy Zelinskie 2019-11-12 11:09:47 -05:00
parent 1898c361f3
commit 9c0dd3b722
2048 changed files with 218743 additions and 0 deletions

View file

View file

@ -0,0 +1,11 @@
from endpoints.test.shared import conduct_call
from endpoints.api import api
def conduct_api_call(client, resource, method, params, body=None, expected_code=200, headers=None):
""" Conducts an API call to the given resource via the given client, and ensures its returned
status matches the code given.
Returns the response.
"""
return conduct_call(client, resource, api.url_for, method, params, body, expected_code,
headers=headers)

View file

@ -0,0 +1,50 @@
from datetime import datetime, timedelta
from data import model
from endpoints.api.appspecifictokens import AppTokens, AppToken
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.fixtures import *
def test_app_specific_tokens(app, client):
with client_with_identity('devtable', client) as cl:
# Add an app specific token.
token_data = {'title': 'Testing 123'}
resp = conduct_api_call(cl, AppTokens, 'POST', None, token_data, 200).json
token_uuid = resp['token']['uuid']
assert 'token_code' in resp['token']
# List the tokens and ensure we have the one added.
resp = conduct_api_call(cl, AppTokens, 'GET', None, None, 200).json
assert len(resp['tokens'])
assert token_uuid in set([token['uuid'] for token in resp['tokens']])
assert not set([token['token_code'] for token in resp['tokens'] if 'token_code' in token])
# List the tokens expiring soon and ensure the one added is not present.
resp = conduct_api_call(cl, AppTokens, 'GET', {'expiring': True}, None, 200).json
assert token_uuid not in set([token['uuid'] for token in resp['tokens']])
# Get the token and ensure we have its code.
resp = conduct_api_call(cl, AppToken, 'GET', {'token_uuid': token_uuid}, None, 200).json
assert resp['token']['uuid'] == token_uuid
assert 'token_code' in resp['token']
# Delete the token.
conduct_api_call(cl, AppToken, 'DELETE', {'token_uuid': token_uuid}, None, 204)
# Ensure the token no longer exists.
resp = conduct_api_call(cl, AppTokens, 'GET', None, None, 200).json
assert len(resp['tokens'])
assert token_uuid not in set([token['uuid'] for token in resp['tokens']])
conduct_api_call(cl, AppToken, 'GET', {'token_uuid': token_uuid}, None, 404)
def test_delete_expired_app_token(app, client):
user = model.user.get_user('devtable')
expiration = datetime.now() - timedelta(seconds=10)
token = model.appspecifictoken.create_token(user, 'some token', expiration)
with client_with_identity('devtable', client) as cl:
# Delete the token.
conduct_api_call(cl, AppToken, 'DELETE', {'token_uuid': token.uuid}, None, 204)

View file

@ -0,0 +1,20 @@
import pytest
from endpoints.api.build import RepositoryBuildList
@pytest.mark.parametrize('request_json,subdir,context', [
({}, '/Dockerfile', '/'),
({'context': '/some_context'}, '/some_context/Dockerfile', '/some_context'),
({'subdirectory': 'some_context'}, 'some_context/Dockerfile', 'some_context'),
({'subdirectory': 'some_context/'}, 'some_context/Dockerfile', 'some_context/'),
({'dockerfile_path': 'some_context/Dockerfile'}, 'some_context/Dockerfile', 'some_context'),
({'dockerfile_path': 'some_context/Dockerfile', 'context': '/'}, 'some_context/Dockerfile', '/'),
({'dockerfile_path': 'some_context/Dockerfile',
'context': '/',
'subdirectory': 'slime'}, 'some_context/Dockerfile', '/'),
])
def test_extract_dockerfile_args(request_json, subdir, context):
actual_context, actual_subdir = RepositoryBuildList.get_dockerfile_context(request_json)
assert subdir == actual_subdir
assert context == actual_context

View file

@ -0,0 +1,83 @@
import pytest
from data import model
from endpoints.api.repository import Repository
from endpoints.api.build import (RepositoryBuildList, RepositoryBuildResource,
RepositoryBuildStatus, RepositoryBuildLogs)
from endpoints.api.image import RepositoryImageList, RepositoryImage
from endpoints.api.manifest import RepositoryManifestLabels, ManageRepositoryManifestLabel
from endpoints.api.repositorynotification import (RepositoryNotification,
RepositoryNotificationList,
TestRepositoryNotification)
from endpoints.api.secscan import RepositoryImageSecurity, RepositoryManifestSecurity
from endpoints.api.signing import RepositorySignatures
from endpoints.api.tag import ListRepositoryTags, RepositoryTag, RepositoryTagImages, RestoreTag
from endpoints.api.trigger import (BuildTriggerList, BuildTrigger, BuildTriggerSubdirs,
BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger,
TriggerBuildList, BuildTriggerFieldValues, BuildTriggerSources,
BuildTriggerSourceNamespaces)
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.fixtures import *
BUILD_ARGS = {'build_uuid': '1234'}
IMAGE_ARGS = {'imageid': '1234', 'image_id': 1234}
MANIFEST_ARGS = {'manifestref': 'sha256:abcd1234'}
LABEL_ARGS = {'manifestref': 'sha256:abcd1234', 'labelid': '1234'}
NOTIFICATION_ARGS = {'uuid': '1234'}
TAG_ARGS = {'tag': 'foobar'}
TRIGGER_ARGS = {'trigger_uuid': '1234'}
FIELD_ARGS = {'trigger_uuid': '1234', 'field_name': 'foobar'}
@pytest.mark.parametrize('resource, method, params', [
(RepositoryBuildList, 'get', None),
(RepositoryBuildList, 'post', None),
(RepositoryBuildResource, 'get', BUILD_ARGS),
(RepositoryBuildResource, 'delete', BUILD_ARGS),
(RepositoryBuildStatus, 'get', BUILD_ARGS),
(RepositoryBuildLogs, 'get', BUILD_ARGS),
(RepositoryImageList, 'get', None),
(RepositoryImage, 'get', IMAGE_ARGS),
(RepositoryManifestLabels, 'get', MANIFEST_ARGS),
(RepositoryManifestLabels, 'post', MANIFEST_ARGS),
(ManageRepositoryManifestLabel, 'get', LABEL_ARGS),
(ManageRepositoryManifestLabel, 'delete', LABEL_ARGS),
(RepositoryNotificationList, 'get', None),
(RepositoryNotificationList, 'post', None),
(RepositoryNotification, 'get', NOTIFICATION_ARGS),
(RepositoryNotification, 'delete', NOTIFICATION_ARGS),
(RepositoryNotification, 'post', NOTIFICATION_ARGS),
(TestRepositoryNotification, 'post', NOTIFICATION_ARGS),
(RepositoryImageSecurity, 'get', IMAGE_ARGS),
(RepositoryManifestSecurity, 'get', MANIFEST_ARGS),
(RepositorySignatures, 'get', None),
(ListRepositoryTags, 'get', None),
(RepositoryTag, 'put', TAG_ARGS),
(RepositoryTag, 'delete', TAG_ARGS),
(RepositoryTagImages, 'get', TAG_ARGS),
(RestoreTag, 'post', TAG_ARGS),
(BuildTriggerList, 'get', None),
(BuildTrigger, 'get', TRIGGER_ARGS),
(BuildTrigger, 'delete', TRIGGER_ARGS),
(BuildTriggerSubdirs, 'post', TRIGGER_ARGS),
(BuildTriggerActivate, 'post', TRIGGER_ARGS),
(BuildTriggerAnalyze, 'post', TRIGGER_ARGS),
(ActivateBuildTrigger, 'post', TRIGGER_ARGS),
(TriggerBuildList, 'get', TRIGGER_ARGS),
(BuildTriggerFieldValues, 'post', FIELD_ARGS),
(BuildTriggerSources, 'post', TRIGGER_ARGS),
(BuildTriggerSourceNamespaces, 'get', TRIGGER_ARGS),
])
def test_disallowed_for_apps(resource, method, params, client):
namespace = 'devtable'
repository = 'someapprepo'
devtable = model.user.get_user('devtable')
model.repository.create_repository(namespace, repository, devtable, repo_kind='application')
params = params or {}
params['repository'] = '%s/%s' % (namespace, repository)
with client_with_identity('devtable', client) as cl:
conduct_api_call(cl, resource, method, params, None, 501)

View file

@ -0,0 +1,64 @@
import pytest
from data import model
from data.database import RepositoryState
from endpoints.api.build import RepositoryBuildList, RepositoryBuildResource
from endpoints.api.manifest import RepositoryManifestLabels, ManageRepositoryManifestLabel
from endpoints.api.tag import RepositoryTag, RestoreTag
from endpoints.api.trigger import (BuildTrigger, BuildTriggerSubdirs,
BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger,
BuildTriggerFieldValues, BuildTriggerSources)
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.fixtures import *
BUILD_ARGS = {'build_uuid': '1234'}
IMAGE_ARGS = {'imageid': '1234', 'image_id': 1234}
MANIFEST_ARGS = {'manifestref': 'sha256:abcd1234'}
LABEL_ARGS = {'manifestref': 'sha256:abcd1234', 'labelid': '1234'}
NOTIFICATION_ARGS = {'uuid': '1234'}
TAG_ARGS = {'tag': 'foobar'}
TRIGGER_ARGS = {'trigger_uuid': '1234'}
FIELD_ARGS = {'trigger_uuid': '1234', 'field_name': 'foobar'}
@pytest.mark.parametrize('state', [
RepositoryState.MIRROR,
RepositoryState.READ_ONLY,
])
@pytest.mark.parametrize('resource, method, params', [
(RepositoryBuildList, 'post', None),
(RepositoryBuildResource, 'delete', BUILD_ARGS),
(RepositoryManifestLabels, 'post', MANIFEST_ARGS),
(ManageRepositoryManifestLabel, 'delete', LABEL_ARGS),
(RepositoryTag, 'put', TAG_ARGS),
(RepositoryTag, 'delete', TAG_ARGS),
(RestoreTag, 'post', TAG_ARGS),
(BuildTrigger, 'delete', TRIGGER_ARGS),
(BuildTriggerSubdirs, 'post', TRIGGER_ARGS),
(BuildTriggerActivate, 'post', TRIGGER_ARGS),
(BuildTriggerAnalyze, 'post', TRIGGER_ARGS),
(ActivateBuildTrigger, 'post', TRIGGER_ARGS),
(BuildTriggerFieldValues, 'post', FIELD_ARGS),
(BuildTriggerSources, 'post', TRIGGER_ARGS),
])
def test_disallowed_for_nonnormal(state, resource, method, params, client):
namespace = 'devtable'
repository = 'somenewstaterepo'
devtable = model.user.get_user('devtable')
repo = model.repository.create_repository(namespace, repository, devtable)
repo.state = state
repo.save()
params = params or {}
params['repository'] = '%s/%s' % (namespace, repository)
with client_with_identity('devtable', client) as cl:
conduct_api_call(cl, resource, method, params, None, 503)

View file

@ -0,0 +1,63 @@
import pytest
from mock import patch
from endpoints.api.search import EntitySearch, LinkExternalEntity
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.test_ldap import mock_ldap
from test.test_external_jwt_authn import fake_jwt
from test.test_keystone_auth import fake_keystone
from test.fixtures import *
@pytest.fixture(params=[
mock_ldap,
fake_jwt,
fake_keystone,
])
def auth_engine(request):
return request.param
@pytest.fixture(params=[
False,
True,
])
def requires_email(request):
return request.param
def test_entity_search(auth_engine, requires_email, client):
with auth_engine(requires_email=requires_email) as auth:
with patch('endpoints.api.search.authentication', auth):
# Try an unknown prefix.
response = conduct_api_call(client, EntitySearch, 'GET', params=dict(prefix='unknown'))
results = response.json['results']
assert len(results) == 0
# Try a known prefix.
response = conduct_api_call(client, EntitySearch, 'GET', params=dict(prefix='cool'))
results = response.json['results']
entity = results[0]
assert entity['name'] == 'cool.user'
assert entity['kind'] == 'external'
def test_link_external_entity(auth_engine, requires_email, client):
with auth_engine(requires_email=requires_email) as auth:
with patch('endpoints.api.search.authentication', auth):
with client_with_identity('devtable', client) as cl:
# Try an unknown user.
conduct_api_call(cl, LinkExternalEntity, 'POST', params=dict(username='unknownuser'),
expected_code=400)
# Try a known user.
response = conduct_api_call(cl, LinkExternalEntity, 'POST',
params=dict(username='cool.user'))
entity = response.json['entity']
assert entity['name'] == 'cool_user'
assert entity['kind'] == 'user'

View file

@ -0,0 +1,34 @@
import os
import time
from mock import patch
from app import export_action_logs_queue
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.logs import ExportOrgLogs
from endpoints.test.shared import client_with_identity
from test.fixtures import *
@pytest.mark.skipif(os.environ.get('TEST_DATABASE_URI', '').find('mysql') >= 0,
reason="Queue code is very sensitive to times on MySQL, making this flaky")
def test_export_logs(client):
with client_with_identity('devtable', client) as cl:
assert export_action_logs_queue.get() is None
timecode = time.time()
def get_time():
return timecode - 2
with patch('time.time', get_time):
# Call to export logs.
body = {
'callback_url': 'http://some/url',
'callback_email': 'a@b.com',
}
conduct_api_call(cl, ExportOrgLogs, 'POST', {'orgname': 'buynlarge'},
body, expected_code=200)
# Ensure the request was queued.
assert export_action_logs_queue.get() is not None

View file

@ -0,0 +1,24 @@
from data.registry_model import registry_model
from endpoints.api.manifest import RepositoryManifest
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.fixtures import *
def test_repository_manifest(client):
with client_with_identity('devtable', client) as cl:
repo_ref = registry_model.lookup_repository('devtable', 'simple')
tags = registry_model.list_all_active_repository_tags(repo_ref)
for tag in tags:
manifest_digest = tag.manifest_digest
if manifest_digest is None:
continue
params = {
'repository': 'devtable/simple',
'manifestref': manifest_digest,
}
result = conduct_api_call(cl, RepositoryManifest, 'GET', params, None, 200).json
assert result['digest'] == manifest_digest
assert result['manifest_data']
assert result['image']

View file

@ -0,0 +1,230 @@
from datetime import datetime
import pytest
from data import model
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.mirror import RepoMirrorResource
from endpoints.test.shared import client_with_identity
from test.fixtures import *
def _setup_mirror():
repo = model.repository.get_repository('devtable', 'simple')
assert repo
robot = model.user.lookup_robot('devtable+dtrobot')
assert robot
rule = model.repo_mirror.create_rule(repo, ['latest', '3.3*', 'foo'])
assert rule
mirror_kwargs = {
'is_enabled': True,
'external_reference': 'quay.io/redhat/quay',
'sync_interval': 5000,
'sync_start_date': datetime(2020, 01, 02, 6, 30, 0),
'external_registry_username': 'fakeUsername',
'external_registry_password': 'fakePassword',
'external_registry_config': {
'verify_tls': True,
'proxy': {
'http_proxy': 'http://insecure.proxy.corp',
'https_proxy': 'https://secure.proxy.corp',
'no_proxy': 'mylocalhost'
}
}
}
mirror = model.repo_mirror.enable_mirroring_for_repository(repo, root_rule=rule,
internal_robot=robot, **mirror_kwargs)
assert mirror
return mirror
@pytest.mark.parametrize('existing_robot_permission, expected_permission', [
(None, 'write'),
('read', 'write'),
('write', 'write'),
('admin', 'admin'),
])
def test_create_mirror_sets_permissions(existing_robot_permission, expected_permission, client):
mirror_bot, _ = model.user.create_robot('newmirrorbot', model.user.get_namespace_user('devtable'))
if existing_robot_permission:
model.permission.set_user_repo_permission(mirror_bot.username, 'devtable', 'simple',
existing_robot_permission)
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
request_body = {
'external_reference': 'quay.io/foobar/barbaz',
'sync_interval': 100,
'sync_start_date': '2019-08-20T17:51:00Z',
'root_rule': {
'rule_type': 'TAG_GLOB_CSV',
'rule_value': ['latest','foo', 'bar']
},
'robot_username': 'devtable+newmirrorbot',
}
conduct_api_call(cl, RepoMirrorResource, 'POST', params, request_body, 201)
# Check the status of the robot.
permissions = model.permission.get_user_repository_permissions(mirror_bot, 'devtable', 'simple')
assert permissions[0].role.name == expected_permission
config = model.repo_mirror.get_mirror(model.repository.get_repository('devtable', 'simple'))
assert config.root_rule.rule_value == ['latest', 'foo', 'bar']
def test_get_mirror_does_not_exist(client):
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 404)
def test_get_repo_does_not_exist(client):
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/unicorn'}
resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 404)
def test_get_mirror(client):
""" Verify that performing a `GET` request returns expected and accurate data. """
mirror = _setup_mirror()
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 200).json
assert resp['is_enabled'] == True
assert resp['external_reference'] == 'quay.io/redhat/quay'
assert resp['sync_interval'] == 5000
assert resp['sync_start_date'] == '2020-01-02T06:30:00Z'
assert resp['external_registry_username'] == 'fakeUsername'
assert 'external_registry_password' not in resp
assert 'external_registry_config' in resp
assert resp['external_registry_config']['verify_tls'] == True
assert 'proxy' in resp['external_registry_config']
assert resp['external_registry_config']['proxy']['http_proxy'] == 'http://insecure.proxy.corp'
assert resp['external_registry_config']['proxy']['https_proxy'] == 'https://secure.proxy.corp'
assert resp['external_registry_config']['proxy']['no_proxy'] == 'mylocalhost'
@pytest.mark.parametrize('key, value, expected_status', [
('is_enabled', True, 201),
('is_enabled', False, 201),
('is_enabled', None, 400),
('is_enabled', 'foo', 400),
('external_reference', 'example.com/foo/bar', 201),
('external_reference', 'example.com/foo', 201),
('external_reference', 'example.com', 201),
('external_registry_username', 'newTestUsername', 201),
('external_registry_username', None, 201),
('external_registry_username', 123, 400),
('external_registry_password', 'newTestPassword', 400),
('external_registry_password', None, 400),
('external_registry_password', 41, 400),
('robot_username', 'devtable+dtrobot', 201),
('robot_username', 'devtable+doesntExist', 400),
('sync_start_date', '2020-01-01T00:00:00Z', 201),
('sync_start_date', 'January 1 2020', 400),
('sync_start_date', '2020-01-01T00:00:00.00Z', 400),
('sync_start_date', 'Wed, 01 Jan 2020 00:00:00 -0000', 400),
('sync_start_date', 'Wed, 02 Oct 2002 08:00:00 EST', 400),
('sync_interval', 2000, 201),
('sync_interval', -5, 400),
('https_proxy', 'https://proxy.corp.example.com', 201),
('https_proxy', None, 201),
('https_proxy', 'proxy.example.com; rm -rf /', 201), # Safe; values only set in env, not eval'ed
('http_proxy', 'http://proxy.corp.example.com', 201),
('http_proxy', None, 201),
('http_proxy', 'proxy.example.com; rm -rf /', 201), # Safe; values only set in env, not eval'ed
('no_proxy', 'quay.io', 201),
('no_proxy', None, 201),
('no_proxy', 'quay.io; rm -rf /', 201), # Safe because proxy values are not eval'ed
('verify_tls', True, 201),
('verify_tls', False, 201),
('verify_tls', None, 400),
('verify_tls', 'abc', 400),
])
def test_change_config(key, value, expected_status, client):
""" Verify that changing each attribute works as expected. """
mirror = _setup_mirror()
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
if key in ('http_proxy', 'https_proxy', 'no_proxy'):
request_body = {'external_registry_config': {'proxy': {key: value}}}
elif key == 'verify_tls':
request_body = {'external_registry_config': {key: value}}
else:
request_body = {key: value}
conduct_api_call(cl, RepoMirrorResource, 'PUT', params, request_body, expected_status)
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 200)
if expected_status < 400:
if key == 'external_registry_password':
assert key not in resp.json
elif key == 'verify_tls':
assert resp.json['external_registry_config']['verify_tls'] == value
elif key in ('http_proxy', 'https_proxy', 'no_proxy'):
assert resp.json['external_registry_config']['proxy'][key] == value
else:
assert resp.json[key] == value
else:
if key == 'external_registry_password':
assert key not in resp.json
elif key == 'verify_tls':
assert resp.json['external_registry_config'][key] != value
elif key in ('http_proxy', 'https_proxy', 'no_proxy'):
assert resp.json['external_registry_config']['proxy'][key] != value
else:
assert resp.json[key] != value
@pytest.mark.parametrize('request_body, expected_status', [
# Set a new password and username => Success
({ 'external_registry_username': 'newUsername',
'external_registry_password': 'newPassword'}, 201 ),
# Set password and username to None => Success
({ 'external_registry_username': None,
'external_registry_password': None}, 201 ),
# Set username to value but password None => Sucess
({ 'external_registry_username': 'myUsername',
'external_registry_password': None}, 201 ),
# Set only new Username => Success
({'external_registry_username': 'myNewUsername'}, 201),
({'external_registry_username': None}, 201),
# Set only new Password => Failure
({'external_registry_password': 'myNewPassword'}, 400),
({'external_registry_password': None}, 400),
# Set username and password to empty string => Success?
({'external_registry_username': '',
'external_registry_password': ''}, 201),
])
def test_change_credentials(request_body, expected_status, client):
""" Verify credentials can only be modified as a pair. """
mirror = _setup_mirror()
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
conduct_api_call(cl, RepoMirrorResource, 'PUT', params, request_body, expected_status)

View file

@ -0,0 +1,38 @@
import pytest
from data import model
from endpoints.api import api
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.organization import (Organization,
OrganizationCollaboratorList)
from endpoints.test.shared import client_with_identity
from test.fixtures import *
@pytest.mark.parametrize('expiration, expected_code', [
(0, 200),
(100, 400),
(100000000000000000000, 400),
])
def test_change_tag_expiration(expiration, expected_code, client):
with client_with_identity('devtable', client) as cl:
conduct_api_call(cl, Organization, 'PUT', {'orgname': 'buynlarge'},
body={'tag_expiration_s': expiration},
expected_code=expected_code)
def test_get_organization_collaborators(client):
params = {'orgname': 'buynlarge'}
with client_with_identity('devtable', client) as cl:
resp = conduct_api_call(cl, OrganizationCollaboratorList, 'GET', params)
collaborator_names = [c['name'] for c in resp.json['collaborators']]
assert 'outsideorg' in collaborator_names
assert 'devtable' not in collaborator_names
assert 'reader' not in collaborator_names
for collaborator in resp.json['collaborators']:
if collaborator['name'] == 'outsideorg':
assert 'orgrepo' in collaborator['repositories']
assert 'anotherorgrepo' not in collaborator['repositories']

View file

@ -0,0 +1,23 @@
import pytest
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.permission import RepositoryUserPermission
from endpoints.test.shared import client_with_identity
from test.fixtures import *
@pytest.mark.parametrize('repository, username, expected_code', [
pytest.param('devtable/simple', 'public', 200, id='valid user under user'),
pytest.param('devtable/simple', 'devtable+dtrobot', 200, id='valid robot under user'),
pytest.param('devtable/simple', 'buynlarge+coolrobot', 400, id='invalid robot under user'),
pytest.param('buynlarge/orgrepo', 'devtable', 200, id='valid user under org'),
pytest.param('buynlarge/orgrepo', 'devtable+dtrobot', 400, id='invalid robot under org'),
pytest.param('buynlarge/orgrepo', 'buynlarge+coolrobot', 200, id='valid robot under org'),
])
def test_robot_permission(repository, username, expected_code, client):
with client_with_identity('devtable', client) as cl:
conduct_api_call(cl, RepositoryUserPermission, 'PUT',
{'repository': repository, 'username': username},
body={
'role': 'read',
},
expected_code=expected_code)

View file

@ -0,0 +1,89 @@
import pytest
from mock import Mock
import util
from data import model
from endpoints.api.repoemail_models_interface import RepositoryAuthorizedEmail
from endpoints.api.repoemail_models_pre_oci import pre_oci_model
@pytest.fixture
def get_monkeypatch(monkeypatch):
return monkeypatch
def return_none(name, repo, email):
return None
def get_return_mock(mock):
def return_mock(name, repo, email):
return mock
return return_mock
def test_get_email_authorized_for_repo(get_monkeypatch):
mock = Mock()
get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', mock)
pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name', 'email')
mock.assert_called_once_with('namespace_name', 'repository_name', 'email')
def test_get_email_authorized_for_repo_return_none(get_monkeypatch):
get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', return_none)
repo = pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name', 'email')
assert repo is None
def test_get_email_authorized_for_repo_return_repo(get_monkeypatch):
mock = Mock(confirmed=True, code='code')
get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', get_return_mock(mock))
actual = pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name',
'email')
assert actual == RepositoryAuthorizedEmail('email', 'repository_name', 'namespace_name', True,
'code')
def test_create_email_authorization_for_repo(get_monkeypatch):
mock = Mock()
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo', mock)
pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name', 'email')
mock.assert_called_once_with('namespace_name', 'repository_name', 'email')
def test_create_email_authorization_for_repo_return_none(get_monkeypatch):
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo', return_none)
assert pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
'email') is None
def test_create_email_authorization_for_repo_return_mock(get_monkeypatch):
mock = Mock()
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo',
get_return_mock(mock))
assert pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
'email') is not None
def test_create_email_authorization_for_repo_return_value(get_monkeypatch):
mock = Mock(confirmed=False, code='code')
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo',
get_return_mock(mock))
actual = pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
'email')
assert actual == RepositoryAuthorizedEmail('email', 'repository_name', 'namespace_name', False,
'code')

View file

@ -0,0 +1,166 @@
import pytest
from mock import patch, ANY, MagicMock
from data import model, database
from data.appr_model import release, channel, blob
from endpoints.appr.models_cnr import model as appr_model
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.repository import RepositoryTrust, Repository, RepositoryList
from endpoints.test.shared import client_with_identity
from features import FeatureNameValue
from test.fixtures import *
@pytest.mark.parametrize('trust_enabled,repo_found,expected_status', [
(True, True, 200),
(False, True, 200),
(False, False, 404),
('invalid_req', False, 400),
])
def test_post_changetrust(trust_enabled, repo_found, expected_status, client):
with patch('endpoints.api.repository.tuf_metadata_api') as mock_tuf:
with patch(
'endpoints.api.repository_models_pre_oci.model.repository.get_repository') as mock_model:
mock_model.return_value = MagicMock() if repo_found else None
mock_tuf.get_default_tags_with_expiration.return_value = ['tags', 'expiration']
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/repo'}
request_body = {'trust_enabled': trust_enabled}
conduct_api_call(cl, RepositoryTrust, 'POST', params, request_body, expected_status)
def test_signing_disabled(client):
with patch('features.SIGNING', FeatureNameValue('SIGNING', False)):
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
response = conduct_api_call(cl, Repository, 'GET', params).json
assert not response['trust_enabled']
def test_list_starred_repos(client):
with client_with_identity('devtable', client) as cl:
params = {
'starred': 'true',
}
response = conduct_api_call(cl, RepositoryList, 'GET', params).json
repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
assert 'devtable/simple' in repos
assert 'public/publicrepo' not in repos
# Add a star on publicrepo.
publicrepo = model.repository.get_repository('public', 'publicrepo')
model.repository.star_repository(model.user.get_user('devtable'), publicrepo)
# Ensure publicrepo shows up.
response = conduct_api_call(cl, RepositoryList, 'GET', params).json
repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
assert 'devtable/simple' in repos
assert 'public/publicrepo' in repos
# Make publicrepo private and ensure it disappears.
model.repository.set_repository_visibility(publicrepo, 'private')
response = conduct_api_call(cl, RepositoryList, 'GET', params).json
repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
assert 'devtable/simple' in repos
assert 'public/publicrepo' not in repos
def test_list_repositories_last_modified(client):
with client_with_identity('devtable', client) as cl:
params = {
'namespace': 'devtable',
'last_modified': 'true',
}
response = conduct_api_call(cl, RepositoryList, 'GET', params).json
for repo in response['repositories']:
if repo['name'] != 'building':
assert repo['last_modified'] is not None
@pytest.mark.parametrize('repo_name, expected_status', [
pytest.param('x' * 255, 201, id='Maximum allowed length'),
pytest.param('x' * 256, 400, id='Over allowed length'),
pytest.param('a|b', 400, id='Invalid name'),
])
def test_create_repository(repo_name, expected_status, client):
with client_with_identity('devtable', client) as cl:
body = {
'namespace': 'devtable',
'repository': repo_name,
'visibility': 'public',
'description': 'foo',
}
result = conduct_api_call(client, RepositoryList, 'post', None, body,
expected_code=expected_status).json
if expected_status == 201:
assert result['name'] == repo_name
assert model.repository.get_repository('devtable', repo_name).name == repo_name
@pytest.mark.parametrize('has_tag_manifest', [
True,
False,
])
def test_get_repo(has_tag_manifest, client, initialized_db):
with client_with_identity('devtable', client) as cl:
if not has_tag_manifest:
database.TagManifestLabelMap.delete().execute()
database.TagManifestToManifest.delete().execute()
database.TagManifestLabel.delete().execute()
database.TagManifest.delete().execute()
params = {'repository': 'devtable/simple'}
response = conduct_api_call(cl, Repository, 'GET', params).json
assert response['kind'] == 'image'
def test_get_app_repo(client, initialized_db):
with client_with_identity('devtable', client) as cl:
devtable = model.user.get_user('devtable')
repo = model.repository.create_repository('devtable', 'someappr', devtable,
repo_kind='application')
models_ref = appr_model.models_ref
blob.get_or_create_blob('sha256:somedigest', 0, 'application/vnd.cnr.blob.v0.tar+gzip',
['local_us'], models_ref)
release.create_app_release(repo, 'test',
dict(mediaType='application/vnd.cnr.package-manifest.helm.v0.json'),
'sha256:somedigest', models_ref, False)
channel.create_or_update_channel(repo, 'somechannel', 'test', models_ref)
params = {'repository': 'devtable/someappr'}
response = conduct_api_call(cl, Repository, 'GET', params).json
assert response['kind'] == 'application'
assert response['channels']
assert response['releases']
@pytest.mark.parametrize('state, can_write', [
(database.RepositoryState.NORMAL, True),
(database.RepositoryState.READ_ONLY, False),
(database.RepositoryState.MIRROR, False),
])
def test_get_repo_state_can_write(state, can_write, client, initialized_db):
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
response = conduct_api_call(cl, Repository, 'GET', params).json
assert response['can_write']
repo = model.repository.get_repository('devtable', 'simple')
repo.state = state
repo.save()
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/simple'}
response = conduct_api_call(cl, Repository, 'GET', params).json
assert response['can_write'] == can_write

View file

@ -0,0 +1,90 @@
import pytest
from mock import Mock, MagicMock
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.repositorynotification import RepositoryNotificationList, RepositoryNotification, TestRepositoryNotification
from endpoints.test.shared import client_with_identity
import endpoints.api.repositorynotification_models_interface as iface
from test.fixtures import *
@pytest.fixture()
def authd_client(client):
with client_with_identity('devtable', client) as cl:
yield cl
def mock_get_notification(uuid):
mock_notification = MagicMock(iface.RepositoryNotification)
if uuid == 'exists':
mock_notification.return_value = iface.RepositoryNotification(
'exists',
'title',
'event_name',
'method_name',
'config_json',
'event_config_json',
2,
)
else:
mock_notification.return_value = None
return mock_notification
@pytest.mark.parametrize('namespace,repository,body,expected_code',[
('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_push',
method='webhook', eventConfig={}, title='test'), 201),
('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_started',
method='webhook', eventConfig={}, title='test'), 201),
('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_success',
method='webhook', eventConfig={}, title='test'), 201),
('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_failed',
method='webhook', eventConfig={}, title='test'), 201)
])
def test_create_repo_notification(namespace, repository, body, expected_code, authd_client):
params = {'repository': namespace + '/' + repository}
conduct_api_call(authd_client, RepositoryNotificationList, 'POST', params, body, expected_code=expected_code)
@pytest.mark.parametrize('namespace,repository,expected_code',[
('devtable', 'simple', 200)
])
def test_list_repo_notifications(namespace, repository, expected_code, authd_client):
params = {'repository': namespace + '/' + repository}
resp = conduct_api_call(authd_client, RepositoryNotificationList, 'GET', params, expected_code=expected_code).json
assert len(resp['notifications']) > 0
@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
('devtable', 'simple', 'exists', 200),
('devtable', 'simple', 'not found', 404),
])
def test_get_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
monkeypatch.setattr('endpoints.api.repositorynotification.model.get_repo_notification', mock_get_notification(uuid))
params = {'repository': namespace + '/' + repository, 'uuid': uuid}
conduct_api_call(authd_client, RepositoryNotification, 'GET', params, expected_code=expected_code)
@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
('devtable', 'simple', 'exists', 204),
('devtable', 'simple', 'not found', 400),
])
def test_delete_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
monkeypatch.setattr('endpoints.api.repositorynotification.model.delete_repo_notification', mock_get_notification(uuid))
params = {'repository': namespace + '/' + repository, 'uuid': uuid}
conduct_api_call(authd_client, RepositoryNotification, 'DELETE', params, expected_code=expected_code)
@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
('devtable', 'simple', 'exists', 204),
('devtable', 'simple', 'not found', 400),
])
def test_reset_repo_noticiation(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
monkeypatch.setattr('endpoints.api.repositorynotification.model.reset_notification_number_of_failures', mock_get_notification(uuid))
params = {'repository': namespace + '/' + repository, 'uuid': uuid}
conduct_api_call(authd_client, RepositoryNotification, 'POST', params, expected_code=expected_code)
@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
('devtable', 'simple', 'exists', 200),
('devtable', 'simple', 'not found', 400),
])
def test_test_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
monkeypatch.setattr('endpoints.api.repositorynotification.model.queue_test_notification', mock_get_notification(uuid))
params = {'repository': namespace + '/' + repository, 'uuid': uuid}
conduct_api_call(authd_client, TestRepositoryNotification, 'POST', params, expected_code=expected_code)

View file

@ -0,0 +1,104 @@
import pytest
import json
from data import model
from endpoints.api import api
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.robot import UserRobot, OrgRobot, UserRobotList, OrgRobotList
from endpoints.test.shared import client_with_identity
from util.names import parse_robot_username
from test.test_ldap import mock_ldap
from test.fixtures import *
@pytest.mark.parametrize('endpoint', [
UserRobot,
OrgRobot,
])
@pytest.mark.parametrize('body', [
{},
{'description': 'this is a description'},
{'unstructured_metadata': {'foo': 'bar'}},
{'description': 'this is a description', 'unstructured_metadata': {'foo': 'bar'}},
])
def test_create_robot_with_metadata(endpoint, body, client):
with client_with_identity('devtable', client) as cl:
# Create the robot with the specified body.
conduct_api_call(cl, endpoint, 'PUT', {'orgname': 'buynlarge', 'robot_shortname': 'somebot'},
body, expected_code=201)
# Ensure the create succeeded.
resp = conduct_api_call(cl, endpoint, 'GET', {
'orgname': 'buynlarge',
'robot_shortname': 'somebot',
})
body = body or {}
assert resp.json['description'] == (body.get('description') or '')
assert resp.json['unstructured_metadata'] == (body.get('unstructured_metadata') or {})
@pytest.mark.parametrize('endpoint, params', [
(UserRobot, {'robot_shortname': 'dtrobot'}),
(OrgRobot, {'orgname': 'buynlarge', 'robot_shortname': 'coolrobot'}),
])
def test_retrieve_robot(endpoint, params, app, client):
with client_with_identity('devtable', client) as cl:
result = conduct_api_call(cl, endpoint, 'GET', params, None)
assert result.json['token'] is not None
@pytest.mark.parametrize('endpoint, params, bot_endpoint', [
(UserRobotList, {}, UserRobot),
(OrgRobotList, {'orgname': 'buynlarge'}, OrgRobot),
])
@pytest.mark.parametrize('include_token', [
True,
False,
])
@pytest.mark.parametrize('limit', [
None,
1,
5,
])
def test_retrieve_robots(endpoint, params, bot_endpoint, include_token, limit, app, client):
params['token'] = 'true' if include_token else 'false'
if limit is not None:
params['limit'] = limit
with client_with_identity('devtable', client) as cl:
result = conduct_api_call(cl, endpoint, 'GET', params, None)
if limit is not None:
assert len(result.json['robots']) <= limit
for robot in result.json['robots']:
assert (robot.get('token') is not None) == include_token
if include_token:
bot_params = dict(params)
bot_params['robot_shortname'] = parse_robot_username(robot['name'])[1]
result = conduct_api_call(cl, bot_endpoint, 'GET', bot_params, None)
assert robot.get('token') == result.json['token']
@pytest.mark.parametrize('username, is_admin', [
('devtable', True),
('reader', False),
])
@pytest.mark.parametrize('with_permissions', [
True,
False,
])
def test_retrieve_robots_token_permission(username, is_admin, with_permissions, app, client):
with client_with_identity(username, client) as cl:
params = {'orgname': 'buynlarge', 'token': 'true'}
if with_permissions:
params['permissions'] = 'true'
result = conduct_api_call(cl, OrgRobotList, 'GET', params, None)
assert result.json['robots']
for robot in result.json['robots']:
assert (robot.get('token') is not None) == is_admin
assert (robot.get('repositories') is not None) == (is_admin and with_permissions)

View file

@ -0,0 +1,41 @@
import pytest
from playhouse.test_utils import assert_query_count
from data import model, database
from endpoints.api.search import ConductRepositorySearch, ConductSearch
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.fixtures import *
@pytest.mark.parametrize('query', [
(''),
('simple'),
('public'),
('repository'),
])
def test_repository_search(query, client):
# Prime the caches.
database.Repository.kind.get_id('image')
database.Repository.kind.get_name(1)
with client_with_identity('devtable', client) as cl:
params = {'query': query}
with assert_query_count(7):
result = conduct_api_call(cl, ConductRepositorySearch, 'GET', params, None, 200).json
assert result['start_index'] == 0
assert result['page'] == 1
assert len(result['results'])
@pytest.mark.parametrize('query', [
('simple'),
('public'),
('repository'),
])
def test_search_query_count(query, client):
with client_with_identity('devtable', client) as cl:
params = {'query': query}
with assert_query_count(10):
result = conduct_api_call(cl, ConductSearch, 'GET', params, None, 200).json
assert len(result['results'])

View file

@ -0,0 +1,30 @@
import base64
import pytest
from data.registry_model import registry_model
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.secscan import RepositoryImageSecurity, RepositoryManifestSecurity
from test.fixtures import *
@pytest.mark.parametrize('endpoint', [
RepositoryImageSecurity,
RepositoryManifestSecurity,
])
def test_get_security_info_with_pull_secret(endpoint, client):
repository_ref = registry_model.lookup_repository('devtable', 'simple')
tag = registry_model.get_repo_tag(repository_ref, 'latest', include_legacy_image=True)
manifest = registry_model.get_manifest_for_tag(tag, backfill_if_necessary=True)
params = {
'repository': 'devtable/simple',
'imageid': tag.legacy_image.docker_image_id,
'manifestref': manifest.digest,
}
headers = {
'Authorization': 'Basic %s' % base64.b64encode('devtable:password'),
}
conduct_api_call(client, endpoint, 'GET', params, None, headers=headers, expected_code=200)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,55 @@
import pytest
from collections import Counter
from mock import patch
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.signing import RepositorySignatures
from endpoints.test.shared import client_with_identity
from test.fixtures import *
VALID_TARGETS_MAP = {
"targets/ci": {
"targets": {
"latest": {
"hashes": {
"sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
},
"length": 2111
}
},
"expiration": "2020-05-22T10:26:46.618176424-04:00"
},
"targets": {
"targets": {
"latest": {
"hashes": {
"sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
},
"length": 2111
}
},
"expiration": "2020-05-22T10:26:01.953414888-04:00"}
}
def tags_equal(expected, actual):
expected_tags = expected.get('delegations')
actual_tags = actual.get('delegations')
if expected_tags and actual_tags:
return Counter(expected_tags) == Counter(actual_tags)
return expected == actual
@pytest.mark.parametrize('targets_map,expected', [
(VALID_TARGETS_MAP, {'delegations': VALID_TARGETS_MAP}),
({'bad': 'tags'}, {'delegations': {'bad': 'tags'}}),
({}, {'delegations': {}}),
(None, {'delegations': None}), # API returns None on exceptions
])
def test_get_signatures(targets_map, expected, client):
with patch('endpoints.api.signing.tuf_metadata_api') as mock_tuf:
mock_tuf.get_all_tags_with_expiration.return_value = targets_map
with client_with_identity('devtable', client) as cl:
params = {'repository': 'devtable/trusted'}
assert tags_equal(expected, conduct_api_call(cl, RepositorySignatures, 'GET', params, None, 200).json)

View file

@ -0,0 +1,43 @@
import pytest
from mock import patch
from endpoints.api.subscribe_models_pre_oci import data_model
@pytest.mark.parametrize('username,repo_count', [
('devtable', 3)
])
def test_get_private_repo_count(username, repo_count):
with patch('endpoints.api.subscribe_models_pre_oci.get_private_repo_count') as mock_get_private_reop_count:
mock_get_private_reop_count.return_value = repo_count
count = data_model.get_private_repo_count(username)
mock_get_private_reop_count.assert_called_once_with(username)
assert count == repo_count
@pytest.mark.parametrize('kind_name,target_username,metadata', [
('over_private_usage', 'devtable', {'namespace': 'devtable'})
])
def test_create_unique_notification(kind_name, target_username, metadata):
with patch('endpoints.api.subscribe_models_pre_oci.get_user_or_org') as mock_get_user_or_org:
mock_get_user_or_org.return_value = {'username': target_username}
with patch('endpoints.api.subscribe_models_pre_oci.create_unique_notification') as mock_create_unique_notification:
data_model.create_unique_notification(kind_name, target_username, metadata)
mock_get_user_or_org.assert_called_once_with(target_username)
mock_create_unique_notification.assert_called_once_with(kind_name, mock_get_user_or_org.return_value, metadata)
@pytest.mark.parametrize('target_username,kind_name', [
('devtable', 'over_private_usage')
])
def test_delete_notifications_by_kind(target_username, kind_name):
with patch('endpoints.api.subscribe_models_pre_oci.get_user_or_org') as mock_get_user_or_org:
mock_get_user_or_org.return_value = {'username': target_username}
with patch('endpoints.api.subscribe_models_pre_oci.delete_notifications_by_kind') as mock_delete_notifications_by_kind:
data_model.delete_notifications_by_kind(target_username, kind_name)
mock_get_user_or_org.assert_called_once_with(target_username)
mock_delete_notifications_by_kind.assert_called_once_with(mock_get_user_or_org.return_value, kind_name)

View file

@ -0,0 +1,28 @@
import pytest
from endpoints.api.superuser import SuperUserList, SuperUserManagement
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.fixtures import *
@pytest.mark.parametrize('disabled', [
(True),
(False),
])
def test_list_all_users(disabled, client):
with client_with_identity('devtable', client) as cl:
params = {'disabled': disabled}
result = conduct_api_call(cl, SuperUserList, 'GET', params, None, 200).json
assert len(result['users'])
for user in result['users']:
if not disabled:
assert user['enabled']
def test_change_install_user(client):
with client_with_identity('devtable', client) as cl:
params = {'username': 'randomuser'}
body = {'email': 'new_email123@test.com'}
result = conduct_api_call(cl, SuperUserManagement, 'PUT', params, body, 200).json
assert result['email'] == body['email']

View file

@ -0,0 +1,116 @@
import pytest
from playhouse.test_utils import assert_query_count
from data.registry_model import registry_model
from data.database import Manifest
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from endpoints.api.tag import RepositoryTag, RestoreTag, ListRepositoryTags, RepositoryTagImages
from test.fixtures import *
@pytest.mark.parametrize('expiration_time, expected_status', [
(None, 201),
('aksdjhasd', 400),
])
def test_change_tag_expiration_default(expiration_time, expected_status, client, app):
with client_with_identity('devtable', client) as cl:
params = {
'repository': 'devtable/simple',
'tag': 'latest',
}
request_body = {
'expiration': expiration_time,
}
conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
def test_change_tag_expiration(client, app):
with client_with_identity('devtable', client) as cl:
params = {
'repository': 'devtable/simple',
'tag': 'latest',
}
tag = model.tag.get_active_tag('devtable', 'simple', 'latest')
updated_expiration = tag.lifetime_start_ts + 60*60*24
request_body = {
'expiration': updated_expiration,
}
conduct_api_call(cl, RepositoryTag, 'put', params, request_body, 201)
tag = model.tag.get_active_tag('devtable', 'simple', 'latest')
assert tag.lifetime_end_ts == updated_expiration
@pytest.mark.parametrize('image_exists,test_tag,expected_status', [
(True, '-INVALID-TAG-NAME', 400),
(True, '.INVALID-TAG-NAME', 400),
(True,
'INVALID-TAG_NAME-BECAUSE-THIS-IS-WAY-WAY-TOO-LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG',
400),
(False, 'newtag', 404),
(True, 'generatemanifestfail', None),
(True, 'latest', 201),
(True, 'newtag', 201),
])
def test_move_tag(image_exists, test_tag, expected_status, client, app):
with client_with_identity('devtable', client) as cl:
test_image = 'unknown'
if image_exists:
repo_ref = registry_model.lookup_repository('devtable', 'simple')
tag_ref = registry_model.get_repo_tag(repo_ref, 'latest', include_legacy_image=True)
assert tag_ref
test_image = tag_ref.legacy_image.docker_image_id
params = {'repository': 'devtable/simple', 'tag': test_tag}
request_body = {'image': test_image}
if expected_status is None:
with pytest.raises(Exception):
conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
else:
conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
@pytest.mark.parametrize('repo_namespace, repo_name, query_count', [
('devtable', 'simple', 5),
('devtable', 'history', 5),
('devtable', 'complex', 5),
('devtable', 'gargantuan', 5),
('buynlarge', 'orgrepo', 7), # +2 for permissions checks.
('buynlarge', 'anotherorgrepo', 7), # +2 for permissions checks.
])
def test_list_repo_tags(repo_namespace, repo_name, client, query_count, app):
# Pre-cache media type loads to ensure consistent query count.
Manifest.media_type.get_name(1)
params = {'repository': repo_namespace + '/' + repo_name}
with client_with_identity('devtable', client) as cl:
with assert_query_count(query_count):
tags = conduct_api_call(cl, ListRepositoryTags, 'get', params).json['tags']
repo_ref = registry_model.lookup_repository(repo_namespace, repo_name)
history, _ = registry_model.list_repository_tag_history(repo_ref)
assert len(tags) == len(history)
@pytest.mark.parametrize('repository, tag, owned, expect_images', [
('devtable/simple', 'prod', False, True),
('devtable/simple', 'prod', True, False),
('devtable/simple', 'latest', False, True),
('devtable/simple', 'latest', True, False),
('devtable/complex', 'prod', False, True),
('devtable/complex', 'prod', True, True),
])
def test_list_tag_images(repository, tag, owned, expect_images, client, app):
with client_with_identity('devtable', client) as cl:
params = {'repository': repository, 'tag': tag, 'owned': owned}
result = conduct_api_call(cl, RepositoryTagImages, 'get', params, None, 200).json
assert bool(result['images']) == expect_images

View file

@ -0,0 +1,90 @@
import json
from mock import patch
from data import model
from endpoints.api import api
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.team import OrganizationTeamSyncing, TeamMemberList
from endpoints.api.organization import Organization
from endpoints.test.shared import client_with_identity
from test.test_ldap import mock_ldap
from test.fixtures import *
SYNCED_TEAM_PARAMS = {'orgname': 'sellnsmall', 'teamname': 'synced'}
UNSYNCED_TEAM_PARAMS = {'orgname': 'sellnsmall', 'teamname': 'owners'}
def test_team_syncing(client):
with mock_ldap() as ldap:
with patch('endpoints.api.team.authentication', ldap):
with client_with_identity('devtable', client) as cl:
config = {
'group_dn': 'cn=AwesomeFolk',
}
conduct_api_call(cl, OrganizationTeamSyncing, 'POST', UNSYNCED_TEAM_PARAMS, config)
# Ensure the team is now synced.
sync_info = model.team.get_team_sync_information(UNSYNCED_TEAM_PARAMS['orgname'],
UNSYNCED_TEAM_PARAMS['teamname'])
assert sync_info is not None
assert json.loads(sync_info.config) == config
# Remove the syncing.
conduct_api_call(cl, OrganizationTeamSyncing, 'DELETE', UNSYNCED_TEAM_PARAMS, None)
# Ensure the team is no longer synced.
sync_info = model.team.get_team_sync_information(UNSYNCED_TEAM_PARAMS['orgname'],
UNSYNCED_TEAM_PARAMS['teamname'])
assert sync_info is None
def test_team_member_sync_info(client):
with mock_ldap() as ldap:
with patch('endpoints.api.team.authentication', ldap):
# Check for an unsynced team, with superuser.
with client_with_identity('devtable', client) as cl:
resp = conduct_api_call(cl, TeamMemberList, 'GET', UNSYNCED_TEAM_PARAMS)
assert 'can_sync' in resp.json
assert resp.json['can_sync']['service'] == 'ldap'
assert 'synced' not in resp.json
# Check for an unsynced team, with non-superuser.
with client_with_identity('randomuser', client) as cl:
resp = conduct_api_call(cl, TeamMemberList, 'GET', UNSYNCED_TEAM_PARAMS)
assert 'can_sync' not in resp.json
assert 'synced' not in resp.json
# Check for a synced team, with superuser.
with client_with_identity('devtable', client) as cl:
resp = conduct_api_call(cl, TeamMemberList, 'GET', SYNCED_TEAM_PARAMS)
assert 'can_sync' in resp.json
assert resp.json['can_sync']['service'] == 'ldap'
assert 'synced' in resp.json
assert 'last_updated' in resp.json['synced']
assert 'group_dn' in resp.json['synced']['config']
# Check for a synced team, with non-superuser.
with client_with_identity('randomuser', client) as cl:
resp = conduct_api_call(cl, TeamMemberList, 'GET', SYNCED_TEAM_PARAMS)
assert 'can_sync' not in resp.json
assert 'synced' in resp.json
assert 'last_updated' not in resp.json['synced']
assert 'config' not in resp.json['synced']
def test_organization_teams_sync_bool(client):
with mock_ldap() as ldap:
with patch('endpoints.api.organization.authentication', ldap):
# Ensure synced teams are marked as such in the organization teams list.
with client_with_identity('devtable', client) as cl:
resp = conduct_api_call(cl, Organization, 'GET', {'orgname': 'sellnsmall'})
assert not resp.json['teams']['owners']['is_synced']
assert resp.json['teams']['synced']['is_synced']

View file

@ -0,0 +1,55 @@
import pytest
import json
from data import model
from endpoints.api.trigger_analyzer import is_parent
from endpoints.api.trigger import BuildTrigger
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.fixtures import *
@pytest.mark.parametrize('context,dockerfile_path,expected', [
("/", "/a/b", True),
("/a", "/a/b", True),
("/a/b", "/a/b", False),
("/a//", "/a/b", True),
("/a", "/a//b/c", True),
("/a//", "a/b", True),
("/a/b", "a/bc/d", False),
("/d", "/a/b", False),
("/a/b", "/a/b.c", False),
("/a/b", "/a/b/b.c", True),
("", "/a/b.c", False),
("/a/b", "", False),
("", "", False),
])
def test_super_user_build_endpoints(context, dockerfile_path, expected):
assert is_parent(context, dockerfile_path) == expected
def test_enabled_disabled_trigger(app, client):
trigger = model.build.list_build_triggers('devtable', 'building')[0]
trigger.config = json.dumps({'hook_id': 'someid'})
trigger.save()
params = {
'repository': 'devtable/building',
'trigger_uuid': trigger.uuid,
}
body = {
'enabled': False,
}
with client_with_identity('devtable', client) as cl:
result = conduct_api_call(cl, BuildTrigger, 'PUT', params, body, 200).json
assert not result['enabled']
body = {
'enabled': True,
}
with client_with_identity('devtable', client) as cl:
result = conduct_api_call(cl, BuildTrigger, 'PUT', params, body, 200).json
assert result['enabled']

View file

@ -0,0 +1,152 @@
import pytest
from mock import Mock
from auth import permissions
from data import model
from endpoints.api.trigger_analyzer import TriggerAnalyzer
from util import dockerfileparse
BAD_PATH = "\"server_hostname/\" is not a valid Quay repository path"
EMPTY_CONF = {}
GOOD_CONF = {'context': '/', 'dockerfile_path': '/file'}
BAD_CONF = {'context': 'context', 'dockerfile_path': 'dockerfile_path'}
ONE_ROBOT = {'can_read': False, 'is_robot': True, 'kind': 'user', 'name': 'name'}
DOCKERFILE_NOT_CHILD = 'Dockerfile, context, is not a child of the context, dockerfile_path.'
THE_DOCKERFILE_SPECIFIED = 'Could not parse the Dockerfile specified'
DOCKERFILE_PATH_NOT_FOUND = 'Specified Dockerfile path for the trigger was not found on the main branch. This trigger may fail.'
NO_FROM_LINE = 'No FROM line found in the Dockerfile'
REPO_NOT_FOUND = 'Repository "server_hostname/path/file" referenced by the Dockerfile was not found'
@pytest.fixture
def get_monkeypatch(monkeypatch):
return monkeypatch
def patch_permissions(monkeypatch, can_read=False):
def can_read_fn(base_namespace, base_repository):
return can_read
monkeypatch.setattr(permissions, 'ReadRepositoryPermission', can_read_fn)
def patch_list_namespace_robots(monkeypatch):
my_mock = Mock()
my_mock.configure_mock(**{'username': 'name'})
return_value = [my_mock]
def return_list_mocks(namesapce):
return return_value
monkeypatch.setattr(model.user, 'list_namespace_robots', return_list_mocks)
return return_value
def patch_get_all_repo_users_transitive(monkeypatch):
my_mock = Mock()
my_mock.configure_mock(**{'username': 'name'})
return_value = [my_mock]
def return_get_mocks(namesapce, image_repostiory):
return return_value
monkeypatch.setattr(model.user, 'get_all_repo_users_transitive', return_get_mocks)
return return_value
def patch_parse_dockerfile(monkeypatch, get_base_image):
if get_base_image is not None:
def return_return_value(content):
parse_mock = Mock()
parse_mock.configure_mock(**{'get_base_image': get_base_image})
return parse_mock
monkeypatch.setattr(dockerfileparse, "parse_dockerfile", return_return_value)
else:
def return_return_value(content):
return get_base_image
monkeypatch.setattr(dockerfileparse, "parse_dockerfile", return_return_value)
def patch_model_repository_get_repository(monkeypatch, get_repository):
if get_repository is not None:
def mock_get_repository(base_namespace, base_repository):
vis_mock = Mock()
vis_mock.name = get_repository
get_repo_mock = Mock(visibility=vis_mock)
return get_repo_mock
else:
def mock_get_repository(base_namespace, base_repository):
return None
monkeypatch.setattr(model.repository, "get_repository", mock_get_repository)
def return_none():
return None
def return_content():
return Mock()
def return_server_hostname():
return "server_hostname/"
def return_non_server_hostname():
return "slime"
def return_path():
return "server_hostname/path/file"
@pytest.mark.parametrize(
'handler_fn, config_dict, admin_org_permission, status, message, get_base_image, robots, server_hostname, get_repository, can_read, namespace, name', [
(return_none, EMPTY_CONF, False, "warning", DOCKERFILE_PATH_NOT_FOUND, None, [], None, None, False, "namespace", None),
(return_none, EMPTY_CONF, True, "warning", DOCKERFILE_PATH_NOT_FOUND, None, [ONE_ROBOT], None, None, False, "namespace", None),
(return_content, BAD_CONF, False, "error", THE_DOCKERFILE_SPECIFIED, None, [], None, None, False, "namespace", None),
(return_none, EMPTY_CONF, False, "warning", DOCKERFILE_PATH_NOT_FOUND, return_none, [], None, None, False, "namespace", None),
(return_none, EMPTY_CONF, True, "warning", DOCKERFILE_PATH_NOT_FOUND, return_none, [ONE_ROBOT], None, None, False, "namespace", None),
(return_content, BAD_CONF, False, "error", DOCKERFILE_NOT_CHILD, return_none, [], None, None, False, "namespace", None),
(return_content, GOOD_CONF, False, "warning", NO_FROM_LINE, return_none, [], None, None, False, "namespace", None),
(return_content, GOOD_CONF, False, "publicbase", None, return_non_server_hostname, [], "server_hostname", None, False, "namespace", None),
(return_content, GOOD_CONF, False, "warning", BAD_PATH, return_server_hostname, [], "server_hostname", None, False, "namespace", None),
(return_content, GOOD_CONF, False, "error", REPO_NOT_FOUND, return_path, [], "server_hostname", None, False, "namespace", None),
(return_content, GOOD_CONF, False, "error", REPO_NOT_FOUND, return_path, [], "server_hostname", "nonpublic", False, "namespace", None),
(return_content, GOOD_CONF, False, "requiresrobot", None, return_path, [], "server_hostname", "nonpublic", True, "path", "file"),
(return_content, GOOD_CONF, False, "publicbase", None, return_path, [], "server_hostname", "public", True, "path", "file"),
])
def test_trigger_analyzer(handler_fn, config_dict, admin_org_permission, status, message, get_base_image, robots,
server_hostname, get_repository, can_read, namespace, name,
get_monkeypatch):
patch_list_namespace_robots(get_monkeypatch)
patch_get_all_repo_users_transitive(get_monkeypatch)
patch_parse_dockerfile(get_monkeypatch, get_base_image)
patch_model_repository_get_repository(get_monkeypatch, get_repository)
patch_permissions(get_monkeypatch, can_read)
handler_mock = Mock()
handler_mock.configure_mock(**{'load_dockerfile_contents': handler_fn})
trigger_analyzer = TriggerAnalyzer(handler_mock, 'namespace', server_hostname, config_dict, admin_org_permission)
assert trigger_analyzer.analyze_trigger() == {'namespace': namespace,
'name': name,
'robots': robots,
'status': status,
'message': message,
'is_admin': admin_org_permission}

View file

@ -0,0 +1,42 @@
import pytest
from mock import patch
from endpoints.api.test.shared import conduct_api_call
from endpoints.api.user import User
from endpoints.test.shared import client_with_identity
from features import FeatureNameValue
from test.fixtures import *
def test_user_metadata_update(client):
with patch('features.USER_METADATA', FeatureNameValue('USER_METADATA', True)):
with client_with_identity('devtable', client) as cl:
metadata = {
'given_name': 'Quay',
'family_name': 'User',
'location': 'NYC',
'company': 'Red Hat',
}
# Update all user metadata fields.
conduct_api_call(cl, User, 'PUT', None, body=metadata)
# Test that they were successfully updated.
user = conduct_api_call(cl, User, 'GET', None).json
for field in metadata:
assert user.get(field) == metadata.get(field)
# Now nullify one of the fields, and remove another.
metadata['company'] = None
location = metadata.pop('location')
conduct_api_call(cl, User, 'PUT', None, body=metadata)
user = conduct_api_call(cl, User, 'GET', None).json
for field in metadata:
assert user.get(field) == metadata.get(field)
# The location field should be unchanged.
assert user.get('location') == location