Merge pull request #1154 from jakedt/enforcemodels

Enforce all models have some data
This commit is contained in:
Jake Moshenko 2016-01-19 15:32:58 -05:00
commit c36c00e15d
8 changed files with 90 additions and 39 deletions

View file

@ -4,6 +4,8 @@ import uuid
import time import time
import toposort import toposort
import resumablehashlib import resumablehashlib
import sys
import inspect
from random import SystemRandom from random import SystemRandom
from datetime import datetime from datetime import datetime
@ -861,15 +863,5 @@ class TorrentInfo(BaseModel):
(('storage', 'piece_length'), True), (('storage', 'piece_length'), True),
) )
is_model = lambda x: inspect.isclass(x) and issubclass(x, BaseModel) and x is not BaseModel
all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission, Visibility, all_models = [model[1] for model in inspect.getmembers(sys.modules[__name__], is_model)]
RepositoryTag, EmailConfirmation, FederatedLogin, LoginService, QueueItem,
RepositoryBuild, Team, TeamMember, TeamRole, LogEntryKind, LogEntry,
PermissionPrototype, ImageStorage, BuildTriggerService, RepositoryBuildTrigger,
OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, NotificationKind,
Notification, ImageStorageLocation, ImageStoragePlacement,
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
RepositoryAuthorizedEmail, ImageStorageTransformation,
TeamMemberInvite, ImageStorageSignature, ImageStorageSignatureKind,
AccessTokenKind, Star, RepositoryActionCount, TagManifest, UserRegion,
QuayService, QuayRegion, QuayRelease, BlobUpload, DerivedStorageForImage, TorrentInfo]

View file

@ -95,4 +95,4 @@ config = Config()
# moving the minimal number of things to _basequery # moving the minimal number of things to _basequery
# TODO document the methods and modules for each one of the submodules below. # TODO document the methods and modules for each one of the submodules below.
from data.model import (blob, build, image, log, notification, oauth, organization, permission, from data.model import (blob, build, image, log, notification, oauth, organization, permission,
repository, storage, tag, team, token, user) repository, storage, tag, team, token, user, release)

View file

@ -10,14 +10,12 @@ def set_region_release(service_name, region_name, version):
def get_recent_releases(service_name, region_name): def get_recent_releases(service_name, region_name):
return (QuayRelease return (QuayRelease
.select(QuayRelease) .select(QuayRelease)
.join(QuayService) .join(QuayService)
.switch(QuayRelease) .switch(QuayRelease)
.join(QuayRegion) .join(QuayRegion)
.where( .where(QuayService.name == service_name,
QuayService.name == service_name, QuayRegion.name == region_name,
QuayRegion.name == region_name, QuayRelease.reverted == False,
QuayRelease.reverted == False, )
) .order_by(QuayRelease.created.desc()))
.order_by(QuayRelease.created.desc())
)

View file

@ -75,6 +75,12 @@ def garbage_collect_storage(storage_id_whitelist):
.execute()) .execute())
logger.debug('Removed %s torrent info records', torrents_removed) logger.debug('Removed %s torrent info records', torrents_removed)
signatures_removed = (ImageStorageSignature
.delete()
.where(ImageStorageSignature.storage << orphaned_storages)
.execute())
logger.debug('Removed %s image storage signatures', signatures_removed)
storages_removed = (ImageStorage storages_removed = (ImageStorage
.delete() .delete()
.where(ImageStorage.id << orphaned_storages) .where(ImageStorage.id << orphaned_storages)
@ -97,17 +103,17 @@ def create_v1_storage(location_name):
return storage return storage
def find_or_create_storage_signature(storage, signature_kind): def find_or_create_storage_signature(storage, signature_kind_name):
found = lookup_storage_signature(storage, signature_kind) found = lookup_storage_signature(storage, signature_kind_name)
if found is None: if found is None:
kind = ImageStorageSignatureKind.get(name=signature_kind) kind = ImageStorageSignatureKind.get(name=signature_kind_name)
found = ImageStorageSignature.create(storage=storage, kind=kind) found = ImageStorageSignature.create(storage=storage, kind=kind)
return found return found
def lookup_storage_signature(storage, signature_kind): def lookup_storage_signature(storage, signature_kind_name):
kind = ImageStorageSignatureKind.get(name=signature_kind) kind = ImageStorageSignatureKind.get(name=signature_kind_name)
try: try:
return (ImageStorageSignature return (ImageStorageSignature
.select() .select()

View file

@ -368,6 +368,7 @@ def lookup_federated_login(user, service_name):
except FederatedLogin.DoesNotExist: except FederatedLogin.DoesNotExist:
return None return None
def create_confirm_email_code(user, new_email=None): def create_confirm_email_code(user, new_email=None):
if new_email: if new_email:
if not validate_email(new_email): if not validate_email(new_email):

View file

@ -10,16 +10,18 @@ from datetime import datetime, timedelta
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite, from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite,
savepoint) savepoint)
from itertools import count from itertools import count
from uuid import UUID from uuid import UUID, uuid4
from threading import Event from threading import Event
from email.utils import formatdate from email.utils import formatdate
from data.database import (db, all_models, Role, TeamRole, Visibility, LoginService, from data.database import (db, all_models, Role, TeamRole, Visibility, LoginService,
BuildTriggerService, AccessTokenKind, LogEntryKind, ImageStorageLocation, BuildTriggerService, AccessTokenKind, LogEntryKind, ImageStorageLocation,
ImageStorageTransformation, ImageStorageSignatureKind, ImageStorageTransformation, ImageStorageSignatureKind,
ExternalNotificationEvent, ExternalNotificationMethod, NotificationKind) ExternalNotificationEvent, ExternalNotificationMethod, NotificationKind,
QuayRegion, QuayService, UserRegion, OAuthAuthorizationCode)
from data import model from data import model
from app import app, storage as store from data.queue import WorkQueue
from app import app, storage as store, tf
from storage.basestorage import StoragePaths from storage.basestorage import StoragePaths
from endpoints.v2.manifest import _generate_and_store_manifest from endpoints.v2.manifest import _generate_and_store_manifest
@ -85,6 +87,9 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map):
new_image.storage.uploading = False new_image.storage.uploading = False
new_image.storage.save() new_image.storage.save()
# Write out a fake torrentinfo
model.storage.save_torrent_info(new_image.storage, 1, 'deadbeef')
# Write some data for the storage. # Write some data for the storage.
if os.environ.get('WRITE_STORAGE_FILES'): if os.environ.get('WRITE_STORAGE_FILES'):
storage_paths = StoragePaths() storage_paths = StoragePaths()
@ -127,6 +132,8 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map):
for tag_name in last_node_tags: for tag_name in last_node_tags:
new_tag = model.tag.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name, new_tag = model.tag.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
new_image.docker_image_id) new_image.docker_image_id)
derived = model.image.find_or_create_derived_storage(new_tag, 'squash', 'local_us')
model.storage.find_or_create_storage_signature(derived, 'gpg2')
_generate_and_store_manifest(repo.namespace_user.username, repo.name, tag_name) _generate_and_store_manifest(repo.namespace_user.username, repo.name, tag_name)
tag_map[tag_name] = new_tag tag_map[tag_name] = new_tag
@ -191,6 +198,11 @@ def setup_database_for_testing(testcase):
initialize_database() initialize_database()
populate_database() populate_database()
models_missing_data = find_models_missing_data()
if models_missing_data:
raise RuntimeError('%s models are missing data: %s', len(models_missing_data),
models_missing_data)
# Enable foreign key constraints. # Enable foreign key constraints.
if not IS_TESTING_REAL_DATABASE: if not IS_TESTING_REAL_DATABASE:
db.obj.execute_sql('PRAGMA foreign_keys = ON;') db.obj.execute_sql('PRAGMA foreign_keys = ON;')
@ -333,6 +345,9 @@ def initialize_database():
NotificationKind.create(name='test_notification') NotificationKind.create(name='test_notification')
QuayRegion.create(name='us')
QuayService.create(name='quay')
def wipe_database(): def wipe_database():
logger.debug('Wiping all data from the DB.') logger.debug('Wiping all data from the DB.')
@ -356,6 +371,11 @@ def populate_database(minimal=False):
logger.debug('Skipping most db population because user requested mininal db') logger.debug('Skipping most db population because user requested mininal db')
return return
UserRegion.create(user=new_user_1, location=1)
model.release.set_region_release('quay', 'us', 'v0.1.2')
model.user.create_confirm_email_code(new_user_1, new_email='typo@devtable.com')
disabled_user = model.user.create_user('disabled', 'password', 'jschorr+disabled@devtable.com') disabled_user = model.user.create_user('disabled', 'password', 'jschorr+disabled@devtable.com')
disabled_user.verified = True disabled_user.verified = True
disabled_user.enabled = False disabled_user.enabled = False
@ -413,6 +433,8 @@ def populate_database(minimal=False):
simple_repo = __generate_repository(new_user_1, 'simple', 'Simple repository.', False, simple_repo = __generate_repository(new_user_1, 'simple', 'Simple repository.', False,
[], (4, [], ['latest', 'prod'])) [], (4, [], ['latest', 'prod']))
model.blob.initiate_upload(new_user_1.username, simple_repo.name, str(uuid4()), 'local_us', {})
model.notification.create_repo_notification(simple_repo, 'repo_push', 'quay_notification', {}, {})
__generate_repository(new_user_1, 'sharedtags', __generate_repository(new_user_1, 'sharedtags',
'Shared tags repository', 'Shared tags repository',
@ -517,8 +539,9 @@ def populate_database(minimal=False):
model.user.create_robot('coolrobot', org) model.user.create_robot('coolrobot', org)
model.oauth.create_application(org, 'Some Test App', 'http://localhost:8000', oauth_app_1 = model.oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
'http://localhost:8000/o2c.html', client_id='deadbeef') 'http://localhost:8000/o2c.html',
client_id='deadbeef')
model.oauth.create_application(org, 'Some Other Test App', 'http://quay.io', model.oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
'http://localhost:8000/o2c.html', client_id='deadpork', 'http://localhost:8000/o2c.html', client_id='deadpork',
@ -526,6 +549,9 @@ def populate_database(minimal=False):
model.oauth.create_access_token_for_testing(new_user_1, 'deadbeef', 'repo:admin') model.oauth.create_access_token_for_testing(new_user_1, 'deadbeef', 'repo:admin')
OAuthAuthorizationCode.create(application=oauth_app_1, code='Z932odswfhasdf1', scope='repo:admin',
data='{"somejson": "goeshere"}')
model.user.create_robot('neworgrobot', org) model.user.create_robot('neworgrobot', org)
ownerbot = model.user.create_robot('ownerbot', org)[0] ownerbot = model.user.create_robot('ownerbot', org)[0]
@ -544,6 +570,7 @@ def populate_database(minimal=False):
creators = model.team.create_team('creators', org, 'creator', 'Creators of orgrepo.') creators = model.team.create_team('creators', org, 'creator', 'Creators of orgrepo.')
reader_team = model.team.create_team('readers', org, 'member', 'Readers of orgrepo.') reader_team = model.team.create_team('readers', org, 'member', 'Readers of orgrepo.')
model.team.add_or_invite_to_team(new_user_1, reader_team, outside_org)
model.permission.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username, model.permission.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username,
org_repo.name, 'read') org_repo.name, 'read')
@ -640,10 +667,25 @@ def populate_database(minimal=False):
'trigger_id': trigger.uuid, 'config': json.loads(trigger.config), 'trigger_id': trigger.uuid, 'config': json.loads(trigger.config),
'service': trigger.service.name}) 'service': trigger.service.name})
fake_queue = WorkQueue('fakequeue', tf)
fake_queue.put(['canonical', 'job', 'name'], '{}')
while repositoryactioncounter.count_repository_actions(): while repositoryactioncounter.count_repository_actions():
pass pass
def find_models_missing_data():
# As a sanity check we are going to make sure that all db tables have some data
models_missing_data = set()
for one_model in all_models:
try:
one_model.select().get()
except one_model.DoesNotExist:
models_missing_data.add(one_model.__name__)
return models_missing_data
if __name__ == '__main__': if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Initialize the test database.') parser = argparse.ArgumentParser(description='Initialize the test database.')
parser.add_argument('--simple', action='store_true') parser.add_argument('--simple', action='store_true')
@ -658,3 +700,8 @@ if __name__ == '__main__':
initialize_database() initialize_database()
populate_database(args.simple) populate_database(args.simple)
if not args.simple:
models_missing_data = find_models_missing_data()
if models_missing_data:
logger.warning('The following models do not have any data: %s', models_missing_data)

Binary file not shown.

View file

@ -24,8 +24,7 @@ from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, Org
from endpoints.api.tag import RepositoryTagImages, RepositoryTag, RevertTag, ListRepositoryTags from endpoints.api.tag import RepositoryTagImages, RepositoryTag, RevertTag, ListRepositoryTags
from endpoints.api.search import EntitySearch, ConductSearch from endpoints.api.search import EntitySearch, ConductSearch
from endpoints.api.image import RepositoryImage, RepositoryImageList from endpoints.api.image import RepositoryImage, RepositoryImageList
from endpoints.api.build import (RepositoryBuildStatus, RepositoryBuildLogs, RepositoryBuildList, from endpoints.api.build import RepositoryBuildStatus, RepositoryBuildList, RepositoryBuildResource
RepositoryBuildResource)
from endpoints.api.robot import (UserRobotList, OrgRobot, OrgRobotList, UserRobot, from endpoints.api.robot import (UserRobotList, OrgRobot, OrgRobotList, UserRobot,
RegenerateUserRobot, RegenerateOrgRobot) RegenerateUserRobot, RegenerateOrgRobot)
from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, BuildTriggerSubdirs, from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, BuildTriggerSubdirs,
@ -1226,6 +1225,14 @@ class TestDeleteOrganizationTeamMember(ApiTestCase):
def test_deletememberinvite(self): def test_deletememberinvite(self):
self.login(ADMIN_ACCESS_USER) self.login(ADMIN_ACCESS_USER)
# Verify the initial member count
json = self.getJsonResponse(TeamMemberList,
params=dict(orgname=ORGANIZATION,
teamname='readers',
includePending=True))
self.assertEquals(len(json['members']), 3)
membername = NO_ACCESS_USER membername = NO_ACCESS_USER
response = self.putJsonResponse(TeamMember, response = self.putJsonResponse(TeamMember,
params=dict(orgname=ORGANIZATION, teamname='readers', params=dict(orgname=ORGANIZATION, teamname='readers',
@ -1240,7 +1247,7 @@ class TestDeleteOrganizationTeamMember(ApiTestCase):
teamname='readers', teamname='readers',
includePending=True)) includePending=True))
assert len(json['members']) == 3 self.assertEquals(len(json['members']), 4)
# Delete the invite. # Delete the invite.
self.deleteResponse(TeamMember, self.deleteResponse(TeamMember,
@ -1254,7 +1261,7 @@ class TestDeleteOrganizationTeamMember(ApiTestCase):
teamname='readers', teamname='readers',
includePending=True)) includePending=True))
assert len(json['members']) == 2 self.assertEquals(len(json['members']), 3)
def test_deletemember(self): def test_deletemember(self):
@ -1270,7 +1277,7 @@ class TestDeleteOrganizationTeamMember(ApiTestCase):
params=dict(orgname=ORGANIZATION, params=dict(orgname=ORGANIZATION,
teamname='readers')) teamname='readers'))
assert len(json['members']) == 1 self.assertEquals(len(json['members']), 1)
class TestCreateRepo(ApiTestCase): class TestCreateRepo(ApiTestCase):