Accidental refactor, split out legacy.py into separate sumodules and update all call sites.

This commit is contained in:
Jake Moshenko 2015-07-15 17:25:41 -04:00
parent 2109d24483
commit 3efaa255e8
92 changed files with 4458 additions and 4269 deletions

357
initdb.py
View file

@ -6,14 +6,18 @@ import calendar
import os
from datetime import datetime, timedelta
from email.utils import formatdate
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables,
savepoint_sqlite, savepoint)
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite,
savepoint)
from itertools import count
from uuid import UUID
from threading import Event
from data.database import *
from email.utils import formatdate
from data.database import (db, all_models, Role, TeamRole, Visibility, LoginService,
BuildTriggerService, AccessTokenKind, LogEntryKind, ImageStorageLocation,
ImageStorageTransformation, ImageStorageSignatureKind,
ExternalNotificationEvent, ExternalNotificationMethod, NotificationKind)
from data import model
from data.model import oauth
from app import app, storage as store
from workers import repositoryactioncounter
@ -21,6 +25,7 @@ from workers import repositoryactioncounter
logger = logging.getLogger(__name__)
SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i
for i in range(1, 10)]
@ -39,53 +44,56 @@ TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG'
IS_TESTING_REAL_DATABASE = bool(os.environ.get('TEST_DATABASE_URI'))
def __gen_checksum(image_id):
h = hashlib.md5(image_id)
return 'tarsum+sha256:' + h.hexdigest() + h.hexdigest()
csum = hashlib.md5(image_id)
return 'tarsum+sha256:' + csum.hexdigest() + csum.hexdigest()
def __gen_image_id(repo, image_num):
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
h = hashlib.md5(str_to_hash)
return h.hexdigest() + h.hexdigest()
img_id = hashlib.md5(str_to_hash)
return img_id.hexdigest() + img_id.hexdigest()
def __gen_image_uuid(repo, image_num):
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
h = hashlib.md5(str_to_hash)
return UUID(bytes=h.digest())
img_uuid = hashlib.md5(str_to_hash)
return UUID(bytes=img_uuid.digest())
global_image_num = count()
global_image_num = [0]
def __create_subtree(repo, structure, creator_username, parent, tag_map):
num_nodes, subtrees, last_node_tags = structure
# create the nodes
for i in range(num_nodes):
image_num = global_image_num[0]
global_image_num[0] += 1
for model_num in range(num_nodes):
image_num = next(global_image_num)
docker_image_id = __gen_image_id(repo, image_num)
logger.debug('new docker id: %s' % docker_image_id)
logger.debug('new docker id: %s', docker_image_id)
checksum = __gen_checksum(docker_image_id)
new_image = model.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
new_image = model.image.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
new_image_locations = new_image.storage.locations
new_image.storage.uuid = __gen_image_uuid(repo, image_num)
new_image.storage.uploading = False
new_image.storage.checksum = checksum
new_image.storage.save()
creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=i)
creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=model_num)
command_list = SAMPLE_CMDS[image_num % len(SAMPLE_CMDS)]
command = json.dumps(command_list) if command_list else None
new_image = model.set_image_metadata(docker_image_id, repo.namespace_user.username, repo.name,
str(creation_time), 'no comment', command, parent)
new_image = model.image.set_image_metadata(docker_image_id, repo.namespace_user.username,
repo.name, str(creation_time), 'no comment', command,
parent)
compressed_size = random.randrange(1, 1024 * 1024 * 1024)
model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name, compressed_size,
int(compressed_size * 1.4))
model.image.set_image_size(docker_image_id, repo.namespace_user.username, repo.name,
compressed_size, int(compressed_size * 1.4))
# Populate the diff file
diff_path = store.image_file_diffs_path(new_image.storage.uuid)
@ -101,55 +109,52 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map):
last_node_tags = [last_node_tags]
for tag_name in last_node_tags:
tag = model.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
new_image.docker_image_id)
new_tag = model.tag.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
new_image.docker_image_id)
tag_map[tag_name] = tag
tag_map[tag_name] = new_tag
for tag_name in last_node_tags:
if tag_name[0] == '#':
tag = tag_map[tag_name]
tag.name = tag_name[1:]
tag.lifetime_end_ts = tag_map[tag_name[1:]].lifetime_start_ts
tag.lifetime_start_ts = tag.lifetime_end_ts - 10
tag.save()
found_tag = tag_map[tag_name]
found_tag.name = tag_name[1:]
found_tag.lifetime_end_ts = tag_map[tag_name[1:]].lifetime_start_ts
found_tag.lifetime_start_ts = found_tag.lifetime_end_ts - 10
found_tag.save()
for subtree in subtrees:
__create_subtree(repo, subtree, creator_username, new_image, tag_map)
def __generate_repository(user, name, description, is_public, permissions,
structure):
repo = model.create_repository(user.username, name, user)
def __generate_repository(user_obj, name, description, is_public, permissions, structure):
repo = model.repository.create_repository(user_obj.username, name, user_obj)
if is_public:
model.set_repository_visibility(repo, 'public')
model.repository.set_repository_visibility(repo, 'public')
if description:
repo.description = description
repo.save()
for delegate, role in permissions:
model.set_user_repo_permission(delegate.username, user.username, name,
role)
model.permission.set_user_repo_permission(delegate.username, user_obj.username, name, role)
if isinstance(structure, list):
for s in structure:
__create_subtree(repo, s, user.username, None, {})
for leaf in structure:
__create_subtree(repo, leaf, user_obj.username, None, {})
else:
__create_subtree(repo, structure, user.username, None, {})
__create_subtree(repo, structure, user_obj.username, None, {})
return repo
db_initialized_for_testing = False
db_initialized_for_testing = Event()
testcases = {}
def finished_database_for_testing(testcase):
""" Called when a testcase has finished using the database, indicating that
any changes should be discarded.
"""
global testcases
testcases[testcase]['savepoint'].__exit__(True, None, None)
def setup_database_for_testing(testcase):
@ -158,12 +163,10 @@ def setup_database_for_testing(testcase):
"""
# Sanity check to make sure we're not killing our prod db
db = model.db
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
raise RuntimeError('Attempted to wipe production database!')
global db_initialized_for_testing
if not db_initialized_for_testing:
if not db_initialized_for_testing.is_set():
logger.debug('Setting up DB for testing.')
# Setup the database.
@ -173,18 +176,18 @@ def setup_database_for_testing(testcase):
# Enable foreign key constraints.
if not IS_TESTING_REAL_DATABASE:
model.db.obj.execute_sql('PRAGMA foreign_keys = ON;')
db.obj.execute_sql('PRAGMA foreign_keys = ON;')
db_initialized_for_testing = True
db_initialized_for_testing.set()
# Create a savepoint for the testcase.
test_savepoint = savepoint(db) if IS_TESTING_REAL_DATABASE else savepoint_sqlite(db)
global testcases
testcases[testcase] = {}
testcases[testcase]['savepoint'] = test_savepoint
testcases[testcase]['savepoint'].__enter__()
def initialize_database():
create_model_tables(all_models)
@ -314,8 +317,7 @@ def wipe_database():
logger.debug('Wiping all data from the DB.')
# Sanity check to make sure we're not killing our prod db
db = model.db
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
raise RuntimeError('Attempted to wipe production database!')
drop_model_tables(all_models, fail_silently=True)
@ -324,52 +326,51 @@ def wipe_database():
def populate_database():
logger.debug('Populating the DB with test data.')
new_user_1 = model.create_user('devtable', 'password',
'jschorr@devtable.com')
new_user_1 = model.user.create_user('devtable', 'password', 'jschorr@devtable.com')
new_user_1.verified = True
new_user_1.stripe_id = TEST_STRIPE_ID
new_user_1.save()
disabled_user = model.create_user('disabled', 'password',
'jschorr+disabled@devtable.com')
disabled_user = model.user.create_user('disabled', 'password', 'jschorr+disabled@devtable.com')
disabled_user.verified = True
disabled_user.enabled = False
disabled_user.save()
dtrobot = model.create_robot('dtrobot', new_user_1)
dtrobot = model.user.create_robot('dtrobot', new_user_1)
new_user_2 = model.create_user('public', 'password',
'jacob.moshenko@gmail.com')
new_user_2 = model.user.create_user('public', 'password', 'jacob.moshenko@gmail.com')
new_user_2.verified = True
new_user_2.save()
new_user_3 = model.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
new_user_3 = model.user.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
new_user_3.verified = True
new_user_3.save()
model.create_robot('anotherrobot', new_user_3)
model.user.create_robot('anotherrobot', new_user_3)
new_user_4 = model.create_user('randomuser', 'password', 'no4@thanks.com')
new_user_4 = model.user.create_user('randomuser', 'password', 'no4@thanks.com')
new_user_4.verified = True
new_user_4.save()
new_user_5 = model.create_user('unverified', 'password', 'no5@thanks.com')
new_user_5 = model.user.create_user('unverified', 'password', 'no5@thanks.com')
new_user_5.save()
reader = model.create_user('reader', 'password', 'no1@thanks.com')
reader = model.user.create_user('reader', 'password', 'no1@thanks.com')
reader.verified = True
reader.save()
creatoruser = model.create_user('creator', 'password', 'noc@thanks.com')
creatoruser = model.user.create_user('creator', 'password', 'noc@thanks.com')
creatoruser.verified = True
creatoruser.save()
outside_org = model.create_user('outsideorg', 'password', 'no2@thanks.com')
outside_org = model.user.create_user('outsideorg', 'password', 'no2@thanks.com')
outside_org.verified = True
outside_org.save()
model.create_notification('test_notification', new_user_1,
metadata={'some':'value', 'arr':[1, 2, 3], 'obj':{'a':1, 'b':2}})
model.notification.create_notification('test_notification', new_user_1,
metadata={'some':'value',
'arr':[1, 2, 3],
'obj':{'a':1, 'b':2}})
from_date = datetime.utcnow()
to_date = from_date + timedelta(hours=1)
@ -378,7 +379,7 @@ def populate_database():
'to_date': formatdate(calendar.timegm(to_date.utctimetuple())),
'reason': 'database migration'
}
model.create_notification('maintenance', new_user_1, metadata=notification_metadata)
model.notification.create_notification('maintenance', new_user_1, metadata=notification_metadata)
__generate_repository(new_user_4, 'randomrepo', 'Random repo repository.', False,
@ -434,10 +435,10 @@ def populate_database():
'Empty repository which is building.',
False, [], (0, [], None))
token = model.create_access_token(building, 'write', 'build-worker')
new_token = model.token.create_access_token(building, 'write', 'build-worker')
trigger = model.create_build_trigger(building, 'github', '123authtoken',
new_user_1, pull_robot=dtrobot[0])
trigger = model.build.create_build_trigger(building, 'github', '123authtoken', new_user_1,
pull_robot=dtrobot[0])
trigger.config = json.dumps({
'build_source': 'jakedt/testconnect',
'subdir': '',
@ -456,164 +457,160 @@ def populate_database():
}
}
record = model.create_email_authorization_for_repo(new_user_1.username, 'simple',
'jschorr@devtable.com')
record = model.repository.create_email_authorization_for_repo(new_user_1.username, 'simple',
'jschorr@devtable.com')
record.confirmed = True
record.save()
model.create_email_authorization_for_repo(new_user_1.username, 'simple',
'jschorr+other@devtable.com')
model.repository.create_email_authorization_for_repo(new_user_1.username, 'simple',
'jschorr+other@devtable.com')
build2 = model.create_repository_build(building, token, job_config,
'68daeebd-a5b9-457f-80a0-4363b882f8ea',
'build-name', trigger)
build2 = model.build.create_repository_build(building, new_token, job_config,
'68daeebd-a5b9-457f-80a0-4363b882f8ea',
'build-name', trigger)
build2.uuid = 'deadpork-dead-pork-dead-porkdeadpork'
build2.save()
build3 = model.create_repository_build(building, token, job_config,
'f49d07f9-93da-474d-ad5f-c852107c3892',
'build-name', trigger)
build3 = model.build.create_repository_build(building, new_token, job_config,
'f49d07f9-93da-474d-ad5f-c852107c3892',
'build-name', trigger)
build3.uuid = 'deadduck-dead-duck-dead-duckdeadduck'
build3.save()
build = model.create_repository_build(building, token, job_config,
'701dcc3724fb4f2ea6c31400528343cd',
'build-name', trigger)
build.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
build.save()
build1 = model.build.create_repository_build(building, new_token, job_config,
'701dcc3724fb4f2ea6c31400528343cd', 'build-name',
trigger)
build1.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
build1.save()
org = model.create_organization('buynlarge', 'quay@devtable.com',
new_user_1)
org = model.organization.create_organization('buynlarge', 'quay@devtable.com', new_user_1)
org.stripe_id = TEST_STRIPE_ID
org.save()
model.create_robot('coolrobot', org)
model.user.create_robot('coolrobot', org)
oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
'http://localhost:8000/o2c.html', client_id='deadbeef')
model.oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
'http://localhost:8000/o2c.html', client_id='deadbeef')
oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
'http://localhost:8000/o2c.html', client_id='deadpork',
description='This is another test application')
model.oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
'http://localhost:8000/o2c.html', client_id='deadpork',
description='This is another test application')
model.oauth.create_access_token_for_testing(new_user_1, 'deadbeef', 'repo:admin')
model.create_robot('neworgrobot', org)
model.user.create_robot('neworgrobot', org)
ownerbot = model.create_robot('ownerbot', org)[0]
creatorbot = model.create_robot('creatorbot', org)[0]
ownerbot = model.user.create_robot('ownerbot', org)[0]
creatorbot = model.user.create_robot('creatorbot', org)[0]
owners = model.get_organization_team('buynlarge', 'owners')
owners = model.team.get_organization_team('buynlarge', 'owners')
owners.description = 'Owners have unfetterd access across the entire org.'
owners.save()
org_repo = __generate_repository(org, 'orgrepo',
'Repository owned by an org.', False,
[(outside_org, 'read')],
(4, [], ['latest', 'prod']))
org_repo = __generate_repository(org, 'orgrepo', 'Repository owned by an org.', False,
[(outside_org, 'read')], (4, [], ['latest', 'prod']))
org_repo2 = __generate_repository(org, 'anotherorgrepo',
'Another repository owned by an org.', False,
[],
(4, [], ['latest', 'prod']))
__generate_repository(org, 'anotherorgrepo', 'Another repository owned by an org.', False,
[], (4, [], ['latest', 'prod']))
creators = model.create_team('creators', org, 'creator',
'Creators of orgrepo.')
creators = model.team.create_team('creators', org, 'creator', 'Creators of orgrepo.')
reader_team = model.create_team('readers', org, 'member',
'Readers of orgrepo.')
model.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username, org_repo.name,
'read')
reader_team = model.team.create_team('readers', org, 'member', 'Readers of orgrepo.')
model.permission.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username,
org_repo.name, 'read')
model.add_user_to_team(new_user_2, reader_team)
model.add_user_to_team(reader, reader_team)
model.add_user_to_team(ownerbot, owners)
model.add_user_to_team(creatorbot, creators)
model.add_user_to_team(creatoruser, creators)
model.team.add_user_to_team(new_user_2, reader_team)
model.team.add_user_to_team(reader, reader_team)
model.team.add_user_to_team(ownerbot, owners)
model.team.add_user_to_team(creatorbot, creators)
model.team.add_user_to_team(creatoruser, creators)
__generate_repository(new_user_1, 'superwide', None, False, [],
[(10, [], 'latest2'),
(2, [], 'latest3'),
(2, [(1, [], 'latest11'), (2, [], 'latest12')],
'latest4'),
(2, [], 'latest5'),
(2, [], 'latest6'),
(2, [], 'latest7'),
(2, [], 'latest8'),
(2, [], 'latest9'),
(2, [], 'latest10'),
(2, [], 'latest13'),
(2, [], 'latest14'),
(2, [], 'latest15'),
(2, [], 'latest16'),
(2, [], 'latest17'),
(2, [], 'latest18'),])
(2, [], 'latest3'),
(2, [(1, [], 'latest11'), (2, [], 'latest12')],
'latest4'),
(2, [], 'latest5'),
(2, [], 'latest6'),
(2, [], 'latest7'),
(2, [], 'latest8'),
(2, [], 'latest9'),
(2, [], 'latest10'),
(2, [], 'latest13'),
(2, [], 'latest14'),
(2, [], 'latest15'),
(2, [], 'latest16'),
(2, [], 'latest17'),
(2, [], 'latest18')])
model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_user=new_user_2)
model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_team=reader_team)
model.add_prototype_permission(org, 'write', activating_user=new_user_2, delegate_user=new_user_1)
model.permission.add_prototype_permission(org, 'read', activating_user=new_user_1,
delegate_user=new_user_2)
model.permission.add_prototype_permission(org, 'read', activating_user=new_user_1,
delegate_team=reader_team)
model.permission.add_prototype_permission(org, 'write', activating_user=new_user_2,
delegate_user=new_user_1)
today = datetime.today()
week_ago = today - timedelta(6)
six_ago = today - timedelta(5)
four_ago = today - timedelta(4)
model.log_action('org_create_team', org.username, performer=new_user_1,
timestamp=week_ago, metadata={'team': 'readers'})
model.log.log_action('org_create_team', org.username, performer=new_user_1,
timestamp=week_ago, metadata={'team': 'readers'})
model.log_action('org_set_team_role', org.username, performer=new_user_1,
timestamp=week_ago,
metadata={'team': 'readers', 'role': 'read'})
model.log.log_action('org_set_team_role', org.username, performer=new_user_1,
timestamp=week_ago,
metadata={'team': 'readers', 'role': 'read'})
model.log_action('create_repo', org.username, performer=new_user_1,
repository=org_repo, timestamp=week_ago,
metadata={'namespace': org.username, 'repo': 'orgrepo'})
model.log.log_action('create_repo', org.username, performer=new_user_1,
repository=org_repo, timestamp=week_ago,
metadata={'namespace': org.username, 'repo': 'orgrepo'})
model.log_action('change_repo_permission', org.username,
performer=new_user_2, repository=org_repo,
timestamp=six_ago,
metadata={'username': new_user_1.username,
'repo': 'orgrepo', 'role': 'admin'})
model.log.log_action('change_repo_permission', org.username,
performer=new_user_2, repository=org_repo,
timestamp=six_ago,
metadata={'username': new_user_1.username,
'repo': 'orgrepo', 'role': 'admin'})
model.log_action('change_repo_permission', org.username,
performer=new_user_1, repository=org_repo,
timestamp=six_ago,
metadata={'username': new_user_2.username,
'repo': 'orgrepo', 'role': 'read'})
model.log.log_action('change_repo_permission', org.username,
performer=new_user_1, repository=org_repo,
timestamp=six_ago,
metadata={'username': new_user_2.username,
'repo': 'orgrepo', 'role': 'read'})
model.log_action('add_repo_accesstoken', org.username, performer=new_user_1,
repository=org_repo, timestamp=four_ago,
metadata={'repo': 'orgrepo', 'token': 'deploytoken'})
model.log.log_action('add_repo_accesstoken', org.username, performer=new_user_1,
repository=org_repo, timestamp=four_ago,
metadata={'repo': 'orgrepo', 'token': 'deploytoken'})
model.log_action('push_repo', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo'})
model.log.log_action('push_repo', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo'})
model.log_action('pull_repo', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo'})
model.log.log_action('pull_repo', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo'})
model.log_action('pull_repo', org.username, repository=org_repo,
timestamp=today,
metadata={'token': 'sometoken', 'token_code': 'somecode',
'repo': 'orgrepo'})
model.log.log_action('pull_repo', org.username, repository=org_repo,
timestamp=today,
metadata={'token': 'sometoken', 'token_code': 'somecode',
'repo': 'orgrepo'})
model.log_action('delete_tag', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo', 'tag': 'sometag'})
model.log.log_action('delete_tag', org.username, performer=new_user_2,
repository=org_repo, timestamp=today,
metadata={'username': new_user_2.username,
'repo': 'orgrepo', 'tag': 'sometag'})
model.log_action('pull_repo', org.username, repository=org_repo,
timestamp=today,
metadata={'token_code': 'somecode', 'repo': 'orgrepo'})
model.log.log_action('pull_repo', org.username, repository=org_repo,
timestamp=today,
metadata={'token_code': 'somecode', 'repo': 'orgrepo'})
model.log_action('build_dockerfile', new_user_1.username, repository=building,
timestamp=today,
metadata={'repo': 'building', 'namespace': new_user_1.username,
'trigger_id': trigger.uuid, 'config': json.loads(trigger.config),
'service': trigger.service.name})
model.log.log_action('build_dockerfile', new_user_1.username, repository=building,
timestamp=today,
metadata={'repo': 'building', 'namespace': new_user_1.username,
'trigger_id': trigger.uuid, 'config': json.loads(trigger.config),
'service': trigger.service.name})
while repositoryactioncounter.count_repository_actions():
pass
@ -622,7 +619,7 @@ if __name__ == '__main__':
log_level = getattr(logging, app.config['LOGGING_LEVEL'])
logging.basicConfig(level=log_level)
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
if not IS_TESTING_REAL_DATABASE and not isinstance(db.obj, SqliteDatabase):
raise RuntimeError('Attempted to initialize production database!')
initialize_database()