608ffd9663
Adds basic labels support to the registry code (V2), and the API. Note that this does not yet add any UI related support.
816 lines
33 KiB
Python
816 lines
33 KiB
Python
import logging
|
|
import json
|
|
import hashlib
|
|
import random
|
|
import calendar
|
|
import os
|
|
import argparse
|
|
|
|
from datetime import datetime, timedelta, date
|
|
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite,
|
|
savepoint)
|
|
from itertools import count
|
|
from uuid import UUID, uuid4
|
|
from threading import Event
|
|
|
|
from email.utils import formatdate
|
|
from data.database import (db, all_models, Role, TeamRole, Visibility, LoginService,
|
|
BuildTriggerService, AccessTokenKind, LogEntryKind, ImageStorageLocation,
|
|
ImageStorageTransformation, ImageStorageSignatureKind,
|
|
ExternalNotificationEvent, ExternalNotificationMethod, NotificationKind,
|
|
QuayRegion, QuayService, UserRegion, OAuthAuthorizationCode,
|
|
ServiceKeyApprovalType, MediaType, LabelSourceType)
|
|
from data import model
|
|
from data.queue import WorkQueue
|
|
from app import app, storage as store, tf
|
|
from storage.basestorage import StoragePaths
|
|
from endpoints.v2.manifest import _generate_and_store_manifest
|
|
|
|
|
|
from workers import repositoryactioncounter
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i
|
|
for i in range(1, 10)]
|
|
|
|
SAMPLE_CMDS = [["/bin/bash"],
|
|
["/bin/sh", "-c",
|
|
"echo \"PasswordAuthentication no\" >> /etc/ssh/sshd_config"],
|
|
["/bin/sh", "-c",
|
|
"sed -i 's/#\\(force_color_prompt\\)/\\1/' /etc/skel/.bashrc"],
|
|
["/bin/sh", "-c", "#(nop) EXPOSE [8080]"],
|
|
["/bin/sh", "-c",
|
|
"#(nop) MAINTAINER Jake Moshenko <jake@devtable.com>"],
|
|
None]
|
|
|
|
REFERENCE_DATE = datetime(2013, 6, 23)
|
|
TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG'
|
|
|
|
IS_TESTING_REAL_DATABASE = bool(os.environ.get('TEST_DATABASE_URI'))
|
|
|
|
|
|
def __gen_checksum(image_id):
|
|
csum = hashlib.md5(image_id)
|
|
return 'tarsum+sha256:' + csum.hexdigest() + csum.hexdigest()
|
|
|
|
|
|
def __gen_image_id(repo, image_num):
|
|
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
|
|
|
|
img_id = hashlib.md5(str_to_hash)
|
|
return img_id.hexdigest() + img_id.hexdigest()
|
|
|
|
|
|
def __gen_image_uuid(repo, image_num):
|
|
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
|
|
|
|
img_uuid = hashlib.md5(str_to_hash)
|
|
return UUID(bytes=img_uuid.digest())
|
|
|
|
|
|
global_image_num = count()
|
|
|
|
|
|
def __create_subtree(with_storage, repo, structure, creator_username, parent, tag_map):
|
|
num_nodes, subtrees, last_node_tags = structure
|
|
|
|
# create the nodes
|
|
for model_num in range(num_nodes):
|
|
image_num = next(global_image_num)
|
|
docker_image_id = __gen_image_id(repo, image_num)
|
|
logger.debug('new docker id: %s', docker_image_id)
|
|
checksum = __gen_checksum(docker_image_id)
|
|
|
|
new_image = model.image.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
|
|
new_image.storage.uuid = __gen_image_uuid(repo, image_num)
|
|
new_image.storage.uploading = False
|
|
new_image.storage.save()
|
|
|
|
# Write out a fake torrentinfo
|
|
model.storage.save_torrent_info(new_image.storage, 1, 'deadbeef')
|
|
|
|
# Write some data for the storage.
|
|
if with_storage or os.environ.get('WRITE_STORAGE_FILES'):
|
|
storage_paths = StoragePaths()
|
|
paths = [storage_paths.v1_image_layer_path]
|
|
|
|
for path_builder in paths:
|
|
path = path_builder(new_image.storage.uuid)
|
|
store.put_content('local_us', path, checksum)
|
|
|
|
new_image.security_indexed = False
|
|
new_image.security_indexed_engine = -1
|
|
new_image.save()
|
|
|
|
creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=model_num)
|
|
command_list = SAMPLE_CMDS[image_num % len(SAMPLE_CMDS)]
|
|
command = json.dumps(command_list) if command_list else None
|
|
|
|
v1_metadata = {
|
|
'id': docker_image_id,
|
|
}
|
|
if parent is not None:
|
|
v1_metadata['parent'] = parent.docker_image_id
|
|
|
|
new_image = model.image.set_image_metadata(docker_image_id, repo.namespace_user.username,
|
|
repo.name, str(creation_time), 'no comment', command,
|
|
json.dumps(v1_metadata), parent)
|
|
new_image.storage.content_checksum = checksum
|
|
new_image.storage.save()
|
|
|
|
compressed_size = random.randrange(1, 1024 * 1024 * 1024)
|
|
model.storage.set_image_storage_metadata(docker_image_id, repo.namespace_user.username,
|
|
repo.name, compressed_size, int(compressed_size * 1.4))
|
|
|
|
parent = new_image
|
|
|
|
if last_node_tags:
|
|
if not isinstance(last_node_tags, list):
|
|
last_node_tags = [last_node_tags]
|
|
|
|
for tag_name in last_node_tags:
|
|
new_tag = model.tag.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
|
|
new_image.docker_image_id)
|
|
derived = model.image.find_or_create_derived_storage(new_tag, 'squash', 'local_us')
|
|
model.storage.find_or_create_storage_signature(derived, 'gpg2')
|
|
|
|
_generate_and_store_manifest(repo.namespace_user.username, repo.name, tag_name)
|
|
tag_map[tag_name] = new_tag
|
|
|
|
for tag_name in last_node_tags:
|
|
if tag_name[0] == '#':
|
|
found_tag = tag_map[tag_name]
|
|
found_tag.name = tag_name[1:]
|
|
found_tag.lifetime_end_ts = tag_map[tag_name[1:]].lifetime_start_ts
|
|
found_tag.lifetime_start_ts = found_tag.lifetime_end_ts - 10
|
|
found_tag.save()
|
|
|
|
for subtree in subtrees:
|
|
__create_subtree(with_storage, repo, subtree, creator_username, new_image, tag_map)
|
|
|
|
|
|
def __generate_service_key(kid, name, user, timestamp, approval_type, expiration=None,
|
|
metadata=None, service='sample_service', rotation_duration=None):
|
|
_, key = model.service_keys.generate_service_key(service, expiration, kid=kid,
|
|
name=name, metadata=metadata,
|
|
rotation_duration=rotation_duration)
|
|
|
|
if approval_type is not None:
|
|
model.service_keys.approve_service_key(key.kid, user, approval_type,
|
|
notes='The **test** approval')
|
|
|
|
key_metadata = {
|
|
'kid': kid,
|
|
'preshared': True,
|
|
'service': service,
|
|
'name': name,
|
|
'expiration_date': expiration,
|
|
'auto_approved': True
|
|
}
|
|
|
|
model.log.log_action('service_key_approve', None, performer=user,
|
|
timestamp=timestamp, metadata=key_metadata)
|
|
|
|
model.log.log_action('service_key_create', None, performer=user,
|
|
timestamp=timestamp, metadata=key_metadata)
|
|
|
|
|
|
def __generate_repository(with_storage, user_obj, name, description, is_public, permissions, structure):
|
|
repo = model.repository.create_repository(user_obj.username, name, user_obj)
|
|
|
|
if is_public:
|
|
model.repository.set_repository_visibility(repo, 'public')
|
|
|
|
if description:
|
|
repo.description = description
|
|
repo.save()
|
|
|
|
for delegate, role in permissions:
|
|
model.permission.set_user_repo_permission(delegate.username, user_obj.username, name, role)
|
|
|
|
if isinstance(structure, list):
|
|
for leaf in structure:
|
|
__create_subtree(with_storage, repo, leaf, user_obj.username, None, {})
|
|
else:
|
|
__create_subtree(with_storage, repo, structure, user_obj.username, None, {})
|
|
|
|
return repo
|
|
|
|
|
|
db_initialized_for_testing = Event()
|
|
testcases = {}
|
|
|
|
def finished_database_for_testing(testcase):
|
|
""" Called when a testcase has finished using the database, indicating that
|
|
any changes should be discarded.
|
|
"""
|
|
testcases[testcase]['savepoint'].__exit__(True, None, None)
|
|
|
|
def setup_database_for_testing(testcase, with_storage=False, force_rebuild=False):
|
|
""" Called when a testcase has started using the database, indicating that
|
|
the database should be setup (if not already) and a savepoint created.
|
|
"""
|
|
|
|
# Sanity check to make sure we're not killing our prod db
|
|
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
|
|
raise RuntimeError('Attempted to wipe production database!')
|
|
|
|
if not db_initialized_for_testing.is_set() or force_rebuild:
|
|
logger.debug('Setting up DB for testing.')
|
|
|
|
# Setup the database.
|
|
wipe_database()
|
|
initialize_database()
|
|
populate_database(with_storage=with_storage)
|
|
|
|
models_missing_data = find_models_missing_data()
|
|
if models_missing_data:
|
|
raise RuntimeError('%s models are missing data: %s', len(models_missing_data),
|
|
models_missing_data)
|
|
|
|
# Enable foreign key constraints.
|
|
if not IS_TESTING_REAL_DATABASE:
|
|
db.obj.execute_sql('PRAGMA foreign_keys = ON;')
|
|
|
|
db_initialized_for_testing.set()
|
|
|
|
# Create a savepoint for the testcase.
|
|
test_savepoint = savepoint(db) if IS_TESTING_REAL_DATABASE else savepoint_sqlite(db)
|
|
|
|
testcases[testcase] = {}
|
|
testcases[testcase]['savepoint'] = test_savepoint
|
|
testcases[testcase]['savepoint'].__enter__()
|
|
|
|
|
|
def initialize_database():
|
|
create_model_tables(all_models)
|
|
|
|
Role.create(name='admin')
|
|
Role.create(name='write')
|
|
Role.create(name='read')
|
|
TeamRole.create(name='admin')
|
|
TeamRole.create(name='creator')
|
|
TeamRole.create(name='member')
|
|
Visibility.create(name='public')
|
|
Visibility.create(name='private')
|
|
|
|
LoginService.create(name='google')
|
|
LoginService.create(name='github')
|
|
LoginService.create(name='quayrobot')
|
|
LoginService.create(name='ldap')
|
|
LoginService.create(name='jwtauthn')
|
|
LoginService.create(name='keystone')
|
|
LoginService.create(name='dex')
|
|
|
|
BuildTriggerService.create(name='github')
|
|
BuildTriggerService.create(name='custom-git')
|
|
BuildTriggerService.create(name='bitbucket')
|
|
BuildTriggerService.create(name='gitlab')
|
|
|
|
AccessTokenKind.create(name='build-worker')
|
|
AccessTokenKind.create(name='pushpull-token')
|
|
|
|
LogEntryKind.create(name='account_change_plan')
|
|
LogEntryKind.create(name='account_change_cc')
|
|
LogEntryKind.create(name='account_change_password')
|
|
LogEntryKind.create(name='account_convert')
|
|
|
|
LogEntryKind.create(name='create_robot')
|
|
LogEntryKind.create(name='delete_robot')
|
|
|
|
LogEntryKind.create(name='create_repo')
|
|
LogEntryKind.create(name='push_repo')
|
|
LogEntryKind.create(name='pull_repo')
|
|
LogEntryKind.create(name='delete_repo')
|
|
LogEntryKind.create(name='create_tag')
|
|
LogEntryKind.create(name='move_tag')
|
|
LogEntryKind.create(name='delete_tag')
|
|
LogEntryKind.create(name='revert_tag')
|
|
LogEntryKind.create(name='add_repo_permission')
|
|
LogEntryKind.create(name='change_repo_permission')
|
|
LogEntryKind.create(name='delete_repo_permission')
|
|
LogEntryKind.create(name='change_repo_visibility')
|
|
LogEntryKind.create(name='add_repo_accesstoken')
|
|
LogEntryKind.create(name='delete_repo_accesstoken')
|
|
LogEntryKind.create(name='set_repo_description')
|
|
|
|
LogEntryKind.create(name='build_dockerfile')
|
|
|
|
LogEntryKind.create(name='org_create_team')
|
|
LogEntryKind.create(name='org_delete_team')
|
|
LogEntryKind.create(name='org_invite_team_member')
|
|
LogEntryKind.create(name='org_delete_team_member_invite')
|
|
LogEntryKind.create(name='org_add_team_member')
|
|
LogEntryKind.create(name='org_team_member_invite_accepted')
|
|
LogEntryKind.create(name='org_team_member_invite_declined')
|
|
LogEntryKind.create(name='org_remove_team_member')
|
|
LogEntryKind.create(name='org_set_team_description')
|
|
LogEntryKind.create(name='org_set_team_role')
|
|
|
|
LogEntryKind.create(name='create_prototype_permission')
|
|
LogEntryKind.create(name='modify_prototype_permission')
|
|
LogEntryKind.create(name='delete_prototype_permission')
|
|
|
|
LogEntryKind.create(name='setup_repo_trigger')
|
|
LogEntryKind.create(name='delete_repo_trigger')
|
|
|
|
LogEntryKind.create(name='create_application')
|
|
LogEntryKind.create(name='update_application')
|
|
LogEntryKind.create(name='delete_application')
|
|
LogEntryKind.create(name='reset_application_client_secret')
|
|
|
|
# Note: These next two are deprecated.
|
|
LogEntryKind.create(name='add_repo_webhook')
|
|
LogEntryKind.create(name='delete_repo_webhook')
|
|
|
|
LogEntryKind.create(name='add_repo_notification')
|
|
LogEntryKind.create(name='delete_repo_notification')
|
|
|
|
LogEntryKind.create(name='regenerate_robot_token')
|
|
|
|
LogEntryKind.create(name='repo_verb')
|
|
|
|
LogEntryKind.create(name='service_key_create')
|
|
LogEntryKind.create(name='service_key_approve')
|
|
LogEntryKind.create(name='service_key_delete')
|
|
LogEntryKind.create(name='service_key_modify')
|
|
LogEntryKind.create(name='service_key_extend')
|
|
LogEntryKind.create(name='service_key_rotate')
|
|
|
|
LogEntryKind.create(name='take_ownership')
|
|
|
|
LogEntryKind.create(name='manifest_label_add')
|
|
LogEntryKind.create(name='manifest_label_delete')
|
|
|
|
ImageStorageLocation.create(name='local_eu')
|
|
ImageStorageLocation.create(name='local_us')
|
|
|
|
ImageStorageTransformation.create(name='squash')
|
|
ImageStorageTransformation.create(name='aci')
|
|
|
|
ImageStorageSignatureKind.create(name='gpg2')
|
|
|
|
# NOTE: These MUST be copied over to NotificationKind, since every external
|
|
# notification can also generate a Quay.io notification.
|
|
ExternalNotificationEvent.create(name='repo_push')
|
|
ExternalNotificationEvent.create(name='build_queued')
|
|
ExternalNotificationEvent.create(name='build_start')
|
|
ExternalNotificationEvent.create(name='build_success')
|
|
ExternalNotificationEvent.create(name='build_failure')
|
|
ExternalNotificationEvent.create(name='vulnerability_found')
|
|
|
|
ExternalNotificationMethod.create(name='quay_notification')
|
|
ExternalNotificationMethod.create(name='email')
|
|
ExternalNotificationMethod.create(name='webhook')
|
|
|
|
ExternalNotificationMethod.create(name='flowdock')
|
|
ExternalNotificationMethod.create(name='hipchat')
|
|
ExternalNotificationMethod.create(name='slack')
|
|
|
|
NotificationKind.create(name='repo_push')
|
|
NotificationKind.create(name='build_queued')
|
|
NotificationKind.create(name='build_start')
|
|
NotificationKind.create(name='build_success')
|
|
NotificationKind.create(name='build_failure')
|
|
NotificationKind.create(name='vulnerability_found')
|
|
NotificationKind.create(name='service_key_submitted')
|
|
|
|
NotificationKind.create(name='password_required')
|
|
NotificationKind.create(name='over_private_usage')
|
|
NotificationKind.create(name='expiring_license')
|
|
NotificationKind.create(name='maintenance')
|
|
NotificationKind.create(name='org_team_invite')
|
|
|
|
NotificationKind.create(name='test_notification')
|
|
|
|
QuayRegion.create(name='us')
|
|
QuayService.create(name='quay')
|
|
|
|
MediaType.create(name='text/plain')
|
|
MediaType.create(name='application/json')
|
|
|
|
LabelSourceType.create(name='manifest')
|
|
LabelSourceType.create(name='api', mutable=True)
|
|
LabelSourceType.create(name='internal')
|
|
|
|
|
|
def wipe_database():
|
|
logger.debug('Wiping all data from the DB.')
|
|
|
|
# Sanity check to make sure we're not killing our prod db
|
|
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
|
|
raise RuntimeError('Attempted to wipe production database!')
|
|
|
|
drop_model_tables(all_models, fail_silently=True)
|
|
|
|
|
|
def populate_database(minimal=False, with_storage=False):
|
|
logger.debug('Populating the DB with test data.')
|
|
|
|
new_user_1 = model.user.create_user('devtable', 'password', 'jschorr@devtable.com')
|
|
new_user_1.verified = True
|
|
new_user_1.stripe_id = TEST_STRIPE_ID
|
|
new_user_1.save()
|
|
|
|
if minimal:
|
|
logger.debug('Skipping most db population because user requested mininal db')
|
|
return
|
|
|
|
UserRegion.create(user=new_user_1, location=1)
|
|
model.release.set_region_release('quay', 'us', 'v0.1.2')
|
|
|
|
model.user.create_confirm_email_code(new_user_1, new_email='typo@devtable.com')
|
|
|
|
disabled_user = model.user.create_user('disabled', 'password', 'jschorr+disabled@devtable.com')
|
|
disabled_user.verified = True
|
|
disabled_user.enabled = False
|
|
disabled_user.save()
|
|
|
|
dtrobot = model.user.create_robot('dtrobot', new_user_1)
|
|
|
|
new_user_2 = model.user.create_user('public', 'password', 'jacob.moshenko@gmail.com')
|
|
new_user_2.verified = True
|
|
new_user_2.save()
|
|
|
|
new_user_3 = model.user.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
|
|
new_user_3.verified = True
|
|
new_user_3.save()
|
|
|
|
model.user.create_robot('anotherrobot', new_user_3)
|
|
|
|
new_user_4 = model.user.create_user('randomuser', 'password', 'no4@thanks.com')
|
|
new_user_4.verified = True
|
|
new_user_4.save()
|
|
|
|
new_user_5 = model.user.create_user('unverified', 'password', 'no5@thanks.com')
|
|
new_user_5.save()
|
|
|
|
reader = model.user.create_user('reader', 'password', 'no1@thanks.com')
|
|
reader.verified = True
|
|
reader.save()
|
|
|
|
creatoruser = model.user.create_user('creator', 'password', 'noc@thanks.com')
|
|
creatoruser.verified = True
|
|
creatoruser.save()
|
|
|
|
outside_org = model.user.create_user('outsideorg', 'password', 'no2@thanks.com')
|
|
outside_org.verified = True
|
|
outside_org.save()
|
|
|
|
model.notification.create_notification('test_notification', new_user_1,
|
|
metadata={'some':'value',
|
|
'arr':[1, 2, 3],
|
|
'obj':{'a':1, 'b':2}})
|
|
|
|
from_date = datetime.utcnow()
|
|
to_date = from_date + timedelta(hours=1)
|
|
notification_metadata = {
|
|
'from_date': formatdate(calendar.timegm(from_date.utctimetuple())),
|
|
'to_date': formatdate(calendar.timegm(to_date.utctimetuple())),
|
|
'reason': 'database migration'
|
|
}
|
|
model.notification.create_notification('maintenance', new_user_1,
|
|
metadata=notification_metadata)
|
|
|
|
|
|
__generate_repository(with_storage, new_user_4, 'randomrepo', 'Random repo repository.', False,
|
|
[], (4, [], ['latest', 'prod']))
|
|
|
|
simple_repo = __generate_repository(with_storage, new_user_1, 'simple', 'Simple repository.', False,
|
|
[], (4, [], ['latest', 'prod']))
|
|
|
|
# Add some labels to the latest tag's manifest.
|
|
tag_manifest = model.tag.load_tag_manifest(new_user_1.username, 'simple', 'latest')
|
|
first_label = model.label.create_manifest_label(tag_manifest, 'foo', 'bar', 'manifest')
|
|
model.label.create_manifest_label(tag_manifest, 'foo', 'baz', 'api')
|
|
model.label.create_manifest_label(tag_manifest, 'anotherlabel', '1234', 'internal')
|
|
|
|
label_metadata = {
|
|
'key': 'foo',
|
|
'value': 'bar',
|
|
'id': first_label.id,
|
|
'manifest_digest': tag_manifest.digest
|
|
}
|
|
|
|
model.log.log_action('manifest_label_add', new_user_1.username, performer=new_user_1,
|
|
timestamp=datetime.now(), metadata=label_metadata,
|
|
repository=tag_manifest.tag.repository)
|
|
|
|
model.blob.initiate_upload(new_user_1.username, simple_repo.name, str(uuid4()), 'local_us', {})
|
|
model.notification.create_repo_notification(simple_repo, 'repo_push', 'quay_notification', {}, {})
|
|
|
|
__generate_repository(with_storage, new_user_1, 'sharedtags',
|
|
'Shared tags repository',
|
|
False, [(new_user_2, 'read'), (dtrobot[0], 'read')],
|
|
(2, [(3, [], ['v2.0', 'v2.1', 'v2.2']),
|
|
(1, [(1, [(1, [], ['prod', '581a284'])],
|
|
['staging', '8423b58']),
|
|
(1, [], None)], None)], None))
|
|
|
|
__generate_repository(with_storage, new_user_1, 'history', 'Historical repository.', False,
|
|
[], (4, [(2, [], 'latest'), (3, [], '#latest')], None))
|
|
|
|
__generate_repository(with_storage, new_user_1, 'complex',
|
|
'Complex repository with many branches and tags.',
|
|
False, [(new_user_2, 'read'), (dtrobot[0], 'read')],
|
|
(2, [(3, [], 'v2.0'),
|
|
(1, [(1, [(1, [], ['prod'])],
|
|
'staging'),
|
|
(1, [], None)], None)], None))
|
|
|
|
__generate_repository(with_storage, new_user_1, 'gargantuan', None, False, [],
|
|
(2, [(3, [], 'v2.0'),
|
|
(1, [(1, [(1, [], ['latest', 'prod'])],
|
|
'staging'),
|
|
(1, [], None)], None),
|
|
(20, [], 'v3.0'),
|
|
(5, [], 'v4.0'),
|
|
(1, [(1, [], 'v5.0'), (1, [], 'v6.0')], None)],
|
|
None))
|
|
|
|
publicrepo = __generate_repository(with_storage, new_user_2, 'publicrepo',
|
|
'Public repository pullable by the world.', True,
|
|
[], (10, [], 'latest'))
|
|
|
|
__generate_repository(with_storage, outside_org, 'coolrepo',
|
|
'Some cool repo.', False,
|
|
[],
|
|
(5, [], 'latest'))
|
|
|
|
__generate_repository(with_storage, new_user_1, 'shared',
|
|
'Shared repository, another user can write.', False,
|
|
[(new_user_2, 'write'), (reader, 'read')],
|
|
(5, [], 'latest'))
|
|
|
|
building = __generate_repository(with_storage, new_user_1, 'building',
|
|
'Empty repository which is building.',
|
|
False, [], (0, [], None))
|
|
|
|
new_token = model.token.create_access_token(building, 'write', 'build-worker')
|
|
|
|
trigger = model.build.create_build_trigger(building, 'github', '123authtoken', new_user_1,
|
|
pull_robot=dtrobot[0])
|
|
trigger.config = json.dumps({
|
|
'build_source': 'jakedt/testconnect',
|
|
'subdir': '',
|
|
})
|
|
trigger.save()
|
|
|
|
repo = 'ci.devtable.com:5000/%s/%s' % (building.namespace_user.username, building.name)
|
|
job_config = {
|
|
'repository': repo,
|
|
'docker_tags': ['latest'],
|
|
'build_subdir': '',
|
|
'trigger_metadata': {
|
|
'commit': '3482adc5822c498e8f7db2e361e8d57b3d77ddd9',
|
|
'ref': 'refs/heads/master',
|
|
'default_branch': 'master'
|
|
}
|
|
}
|
|
|
|
model.repository.star_repository(new_user_1, simple_repo)
|
|
|
|
record = model.repository.create_email_authorization_for_repo(new_user_1.username, 'simple',
|
|
'jschorr@devtable.com')
|
|
record.confirmed = True
|
|
record.save()
|
|
|
|
model.repository.create_email_authorization_for_repo(new_user_1.username, 'simple',
|
|
'jschorr+other@devtable.com')
|
|
|
|
build2 = model.build.create_repository_build(building, new_token, job_config,
|
|
'68daeebd-a5b9-457f-80a0-4363b882f8ea',
|
|
'build-name', trigger)
|
|
build2.uuid = 'deadpork-dead-pork-dead-porkdeadpork'
|
|
build2.save()
|
|
|
|
build3 = model.build.create_repository_build(building, new_token, job_config,
|
|
'f49d07f9-93da-474d-ad5f-c852107c3892',
|
|
'build-name', trigger)
|
|
build3.uuid = 'deadduck-dead-duck-dead-duckdeadduck'
|
|
build3.save()
|
|
|
|
build1 = model.build.create_repository_build(building, new_token, job_config,
|
|
'701dcc3724fb4f2ea6c31400528343cd', 'build-name',
|
|
trigger)
|
|
build1.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
|
|
build1.save()
|
|
|
|
org = model.organization.create_organization('buynlarge', 'quay@devtable.com', new_user_1)
|
|
org.stripe_id = TEST_STRIPE_ID
|
|
org.save()
|
|
|
|
liborg = model.organization.create_organization('library', 'quay+library@devtable.com', new_user_1)
|
|
liborg.save()
|
|
|
|
model.user.create_robot('coolrobot', org)
|
|
|
|
oauth_app_1 = model.oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
|
|
'http://localhost:8000/o2c.html',
|
|
client_id='deadbeef')
|
|
|
|
model.oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
|
|
'http://localhost:8000/o2c.html', client_id='deadpork',
|
|
description='This is another test application')
|
|
|
|
model.oauth.create_access_token_for_testing(new_user_1, 'deadbeef', 'repo:admin')
|
|
|
|
OAuthAuthorizationCode.create(application=oauth_app_1, code='Z932odswfhasdf1', scope='repo:admin',
|
|
data='{"somejson": "goeshere"}')
|
|
|
|
model.user.create_robot('neworgrobot', org)
|
|
|
|
ownerbot = model.user.create_robot('ownerbot', org)[0]
|
|
creatorbot = model.user.create_robot('creatorbot', org)[0]
|
|
|
|
owners = model.team.get_organization_team('buynlarge', 'owners')
|
|
owners.description = 'Owners have unfetterd access across the entire org.'
|
|
owners.save()
|
|
|
|
org_repo = __generate_repository(with_storage, org, 'orgrepo', 'Repository owned by an org.', False,
|
|
[(outside_org, 'read')], (4, [], ['latest', 'prod']))
|
|
|
|
__generate_repository(with_storage, org, 'anotherorgrepo', 'Another repository owned by an org.', False,
|
|
[], (4, [], ['latest', 'prod']))
|
|
|
|
creators = model.team.create_team('creators', org, 'creator', 'Creators of orgrepo.')
|
|
|
|
reader_team = model.team.create_team('readers', org, 'member', 'Readers of orgrepo.')
|
|
model.team.add_or_invite_to_team(new_user_1, reader_team, outside_org)
|
|
model.permission.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username,
|
|
org_repo.name, 'read')
|
|
|
|
model.team.add_user_to_team(new_user_2, reader_team)
|
|
model.team.add_user_to_team(reader, reader_team)
|
|
model.team.add_user_to_team(ownerbot, owners)
|
|
model.team.add_user_to_team(creatorbot, creators)
|
|
model.team.add_user_to_team(creatoruser, creators)
|
|
|
|
__generate_repository(with_storage, new_user_1, 'superwide', None, False, [],
|
|
[(10, [], 'latest2'),
|
|
(2, [], 'latest3'),
|
|
(2, [(1, [], 'latest11'), (2, [], 'latest12')],
|
|
'latest4'),
|
|
(2, [], 'latest5'),
|
|
(2, [], 'latest6'),
|
|
(2, [], 'latest7'),
|
|
(2, [], 'latest8'),
|
|
(2, [], 'latest9'),
|
|
(2, [], 'latest10'),
|
|
(2, [], 'latest13'),
|
|
(2, [], 'latest14'),
|
|
(2, [], 'latest15'),
|
|
(2, [], 'latest16'),
|
|
(2, [], 'latest17'),
|
|
(2, [], 'latest18')])
|
|
|
|
model.permission.add_prototype_permission(org, 'read', activating_user=new_user_1,
|
|
delegate_user=new_user_2)
|
|
model.permission.add_prototype_permission(org, 'read', activating_user=new_user_1,
|
|
delegate_team=reader_team)
|
|
model.permission.add_prototype_permission(org, 'write', activating_user=new_user_2,
|
|
delegate_user=new_user_1)
|
|
|
|
today = datetime.today()
|
|
week_ago = today - timedelta(6)
|
|
six_ago = today - timedelta(5)
|
|
four_ago = today - timedelta(4)
|
|
yesterday = datetime.combine(date.today(), datetime.min.time()) - timedelta(hours=6)
|
|
|
|
__generate_service_key('kid1', 'somesamplekey', new_user_1, today,
|
|
ServiceKeyApprovalType.SUPERUSER)
|
|
__generate_service_key('kid2', 'someexpiringkey', new_user_1, week_ago,
|
|
ServiceKeyApprovalType.SUPERUSER, today + timedelta(days=14))
|
|
|
|
__generate_service_key('kid3', 'unapprovedkey', new_user_1, today, None)
|
|
|
|
__generate_service_key('kid4', 'autorotatingkey', new_user_1, six_ago,
|
|
ServiceKeyApprovalType.KEY_ROTATION, today + timedelta(days=1),
|
|
rotation_duration=timedelta(hours=12).total_seconds())
|
|
|
|
__generate_service_key('kid5', 'key for another service', new_user_1, today,
|
|
ServiceKeyApprovalType.SUPERUSER, today + timedelta(days=14),
|
|
service='different_sample_service')
|
|
|
|
__generate_service_key('kid6', 'someexpiredkey', new_user_1, week_ago,
|
|
ServiceKeyApprovalType.SUPERUSER, today - timedelta(days=1))
|
|
|
|
__generate_service_key('kid7', 'somewayexpiredkey', new_user_1, week_ago,
|
|
ServiceKeyApprovalType.SUPERUSER, today - timedelta(days=30))
|
|
|
|
# Add the test pull key as pre-approved for local and unittest registry testing.
|
|
# Note: this must match the private key found in the local/test config.
|
|
_TEST_JWK = {
|
|
'e': 'AQAB',
|
|
'kty': 'RSA',
|
|
'n': 'yqdQgnelhAPMSeyH0kr3UGePK9oFOmNfwD0Ymnh7YYXr21VHWwyM2eVW3cnLd9KXywDFtGSe9oFDbnOuMCdUowdkBcaHju-isbv5KEbNSoy_T2Rip-6L0cY63YzcMJzv1nEYztYXS8wz76pSK81BKBCLapqOCmcPeCvV9yaoFZYvZEsXCl5jjXN3iujSzSF5Z6PpNFlJWTErMT2Z4QfbDKX2Nw6vJN6JnGpTNHZvgvcyNX8vkSgVpQ8DFnFkBEx54PvRV5KpHAq6AsJxKONMo11idQS2PfCNpa2hvz9O6UZe-eIX8jPo5NW8TuGZJumbdPT_nxTDLfCqfiZboeI0Pw'
|
|
}
|
|
|
|
key = model.service_keys.create_service_key('test_service_key', 'test_service_key', 'quay',
|
|
_TEST_JWK, {}, None)
|
|
|
|
model.service_keys.approve_service_key(key.kid, new_user_1, ServiceKeyApprovalType.SUPERUSER,
|
|
notes='Test service key for local/test registry testing')
|
|
|
|
model.log.log_action('org_create_team', org.username, performer=new_user_1,
|
|
timestamp=week_ago, metadata={'team': 'readers'})
|
|
|
|
model.log.log_action('org_set_team_role', org.username, performer=new_user_1,
|
|
timestamp=week_ago,
|
|
metadata={'team': 'readers', 'role': 'read'})
|
|
|
|
model.log.log_action('create_repo', org.username, performer=new_user_1,
|
|
repository=org_repo, timestamp=week_ago,
|
|
metadata={'namespace': org.username, 'repo': 'orgrepo'})
|
|
|
|
model.log.log_action('change_repo_permission', org.username,
|
|
performer=new_user_2, repository=org_repo,
|
|
timestamp=six_ago,
|
|
metadata={'username': new_user_1.username,
|
|
'repo': 'orgrepo', 'role': 'admin'})
|
|
|
|
model.log.log_action('change_repo_permission', org.username,
|
|
performer=new_user_1, repository=org_repo,
|
|
timestamp=six_ago,
|
|
metadata={'username': new_user_2.username,
|
|
'repo': 'orgrepo', 'role': 'read'})
|
|
|
|
model.log.log_action('add_repo_accesstoken', org.username, performer=new_user_1,
|
|
repository=org_repo, timestamp=four_ago,
|
|
metadata={'repo': 'orgrepo', 'token': 'deploytoken'})
|
|
|
|
model.log.log_action('push_repo', org.username, performer=new_user_2,
|
|
repository=org_repo, timestamp=today,
|
|
metadata={'username': new_user_2.username,
|
|
'repo': 'orgrepo'})
|
|
|
|
model.log.log_action('pull_repo', org.username, performer=new_user_2,
|
|
repository=org_repo, timestamp=today,
|
|
metadata={'username': new_user_2.username,
|
|
'repo': 'orgrepo'})
|
|
|
|
model.log.log_action('pull_repo', org.username, repository=org_repo,
|
|
timestamp=today,
|
|
metadata={'token': 'sometoken', 'token_code': 'somecode',
|
|
'repo': 'orgrepo'})
|
|
|
|
model.log.log_action('delete_tag', org.username, performer=new_user_2,
|
|
repository=org_repo, timestamp=today,
|
|
metadata={'username': new_user_2.username,
|
|
'repo': 'orgrepo', 'tag': 'sometag'})
|
|
|
|
model.log.log_action('pull_repo', org.username, repository=org_repo,
|
|
timestamp=today,
|
|
metadata={'token_code': 'somecode', 'repo': 'orgrepo'})
|
|
|
|
model.log.log_action('pull_repo', new_user_2.username, repository=publicrepo,
|
|
timestamp=yesterday,
|
|
metadata={'token_code': 'somecode', 'repo': 'publicrepo'})
|
|
|
|
model.log.log_action('build_dockerfile', new_user_1.username, repository=building,
|
|
timestamp=today,
|
|
metadata={'repo': 'building', 'namespace': new_user_1.username,
|
|
'trigger_id': trigger.uuid, 'config': json.loads(trigger.config),
|
|
'service': trigger.service.name})
|
|
|
|
fake_queue = WorkQueue('fakequeue', tf)
|
|
fake_queue.put(['canonical', 'job', 'name'], '{}')
|
|
|
|
while repositoryactioncounter.count_repository_actions():
|
|
pass
|
|
|
|
|
|
def find_models_missing_data():
|
|
# As a sanity check we are going to make sure that all db tables have some data
|
|
models_missing_data = set()
|
|
for one_model in all_models:
|
|
try:
|
|
one_model.select().get()
|
|
except one_model.DoesNotExist:
|
|
models_missing_data.add(one_model.__name__)
|
|
|
|
return models_missing_data
|
|
|
|
|
|
if __name__ == '__main__':
|
|
parser = argparse.ArgumentParser(description='Initialize the test database.')
|
|
parser.add_argument('--simple', action='store_true')
|
|
args = parser.parse_args()
|
|
|
|
log_level = getattr(logging, app.config['LOGGING_LEVEL'])
|
|
logging.basicConfig(level=log_level)
|
|
|
|
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
|
|
raise RuntimeError('Attempted to initialize production database!')
|
|
|
|
initialize_database()
|
|
|
|
populate_database(args.simple)
|
|
|
|
if not args.simple:
|
|
models_missing_data = find_models_missing_data()
|
|
if models_missing_data:
|
|
logger.warning('The following models do not have any data: %s', models_missing_data)
|