8aac3fd86e
This authentication system hits two HTTP endpoints to check and verify the existence of users: Existance endpoint: GET http://endpoint/ with Authorization: Basic (username:) => Returns 200 if the username/email exists, 4** otherwise Verification endpoint: GET http://endpoint/ with Authorization: Basic (username:password) => Returns 200 and a signed JWT with the user's username and email address if the username+password validates, 4** otherwise with the body containing an optional error message The JWT produced by the endpoint must be issued with an issuer matching that configured in the config.yaml, and the audience must be "quay.io/jwtauthn". The JWT is signed using a private key and then validated on the Quay.io side with the associated public key, found as "jwt-authn.cert" in the conf/stack directory.
611 lines
23 KiB
Python
611 lines
23 KiB
Python
import logging
|
|
import json
|
|
import hashlib
|
|
import random
|
|
import calendar
|
|
import os
|
|
|
|
from datetime import datetime, timedelta
|
|
from email.utils import formatdate
|
|
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables,
|
|
savepoint_sqlite, savepoint)
|
|
from uuid import UUID
|
|
|
|
from data.database import *
|
|
from data import model
|
|
from data.model import oauth
|
|
from app import app, storage as store
|
|
|
|
from workers import repositoryactioncounter
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i
|
|
for i in range(1, 10)]
|
|
|
|
SAMPLE_CMDS = [["/bin/bash"],
|
|
["/bin/sh", "-c",
|
|
"echo \"PasswordAuthentication no\" >> /etc/ssh/sshd_config"],
|
|
["/bin/sh", "-c",
|
|
"sed -i 's/#\\(force_color_prompt\\)/\\1/' /etc/skel/.bashrc"],
|
|
["/bin/sh", "-c", "#(nop) EXPOSE [8080]"],
|
|
["/bin/sh", "-c",
|
|
"#(nop) MAINTAINER Jake Moshenko <jake@devtable.com>"],
|
|
None]
|
|
|
|
REFERENCE_DATE = datetime(2013, 6, 23)
|
|
TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG'
|
|
|
|
IS_TESTING_REAL_DATABASE = bool(os.environ.get('TEST_DATABASE_URI'))
|
|
|
|
def __gen_checksum(image_id):
|
|
h = hashlib.md5(image_id)
|
|
return 'tarsum+sha256:' + h.hexdigest() + h.hexdigest()
|
|
|
|
|
|
def __gen_image_id(repo, image_num):
|
|
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
|
|
|
|
h = hashlib.md5(str_to_hash)
|
|
return h.hexdigest() + h.hexdigest()
|
|
|
|
|
|
def __gen_image_uuid(repo, image_num):
|
|
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
|
|
|
|
h = hashlib.md5(str_to_hash)
|
|
return UUID(bytes=h.digest())
|
|
|
|
|
|
global_image_num = [0]
|
|
def __create_subtree(repo, structure, creator_username, parent, tag_map):
|
|
num_nodes, subtrees, last_node_tags = structure
|
|
|
|
# create the nodes
|
|
for i in range(num_nodes):
|
|
image_num = global_image_num[0]
|
|
global_image_num[0] += 1
|
|
docker_image_id = __gen_image_id(repo, image_num)
|
|
logger.debug('new docker id: %s' % docker_image_id)
|
|
checksum = __gen_checksum(docker_image_id)
|
|
|
|
new_image = model.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
|
|
new_image_locations = new_image.storage.locations
|
|
new_image.storage.uuid = __gen_image_uuid(repo, image_num)
|
|
new_image.storage.uploading = False
|
|
new_image.storage.checksum = checksum
|
|
new_image.storage.save()
|
|
|
|
creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=i)
|
|
command_list = SAMPLE_CMDS[image_num % len(SAMPLE_CMDS)]
|
|
command = json.dumps(command_list) if command_list else None
|
|
new_image = model.set_image_metadata(docker_image_id, repo.namespace_user.username, repo.name,
|
|
str(creation_time), 'no comment', command, parent)
|
|
|
|
compressed_size = random.randrange(1, 1024 * 1024 * 1024)
|
|
model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name, compressed_size,
|
|
int(compressed_size * 1.4))
|
|
|
|
# Populate the diff file
|
|
diff_path = store.image_file_diffs_path(new_image.storage.uuid)
|
|
source_diff = SAMPLE_DIFFS[image_num % len(SAMPLE_DIFFS)]
|
|
|
|
with open(source_diff, 'r') as source_file:
|
|
store.stream_write(new_image_locations, diff_path, source_file)
|
|
|
|
parent = new_image
|
|
|
|
if last_node_tags:
|
|
if not isinstance(last_node_tags, list):
|
|
last_node_tags = [last_node_tags]
|
|
|
|
for tag_name in last_node_tags:
|
|
tag = model.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
|
|
new_image.docker_image_id)
|
|
|
|
tag_map[tag_name] = tag
|
|
|
|
for tag_name in last_node_tags:
|
|
if tag_name[0] == '#':
|
|
tag = tag_map[tag_name]
|
|
tag.name = tag_name[1:]
|
|
tag.lifetime_end_ts = tag_map[tag_name[1:]].lifetime_start_ts
|
|
tag.lifetime_start_ts = tag.lifetime_end_ts - 10
|
|
tag.save()
|
|
|
|
for subtree in subtrees:
|
|
__create_subtree(repo, subtree, creator_username, new_image, tag_map)
|
|
|
|
|
|
def __generate_repository(user, name, description, is_public, permissions,
|
|
structure):
|
|
repo = model.create_repository(user.username, name, user)
|
|
|
|
if is_public:
|
|
model.set_repository_visibility(repo, 'public')
|
|
|
|
if description:
|
|
repo.description = description
|
|
repo.save()
|
|
|
|
for delegate, role in permissions:
|
|
model.set_user_repo_permission(delegate.username, user.username, name,
|
|
role)
|
|
|
|
if isinstance(structure, list):
|
|
for s in structure:
|
|
__create_subtree(repo, s, user.username, None, {})
|
|
else:
|
|
__create_subtree(repo, structure, user.username, None, {})
|
|
|
|
return repo
|
|
|
|
|
|
db_initialized_for_testing = False
|
|
testcases = {}
|
|
|
|
def finished_database_for_testing(testcase):
|
|
""" Called when a testcase has finished using the database, indicating that
|
|
any changes should be discarded.
|
|
"""
|
|
global testcases
|
|
testcases[testcase]['savepoint'].__exit__(True, None, None)
|
|
|
|
def setup_database_for_testing(testcase):
|
|
""" Called when a testcase has started using the database, indicating that
|
|
the database should be setup (if not already) and a savepoint created.
|
|
"""
|
|
|
|
# Sanity check to make sure we're not killing our prod db
|
|
db = model.db
|
|
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
|
raise RuntimeError('Attempted to wipe production database!')
|
|
|
|
global db_initialized_for_testing
|
|
if not db_initialized_for_testing:
|
|
logger.debug('Setting up DB for testing.')
|
|
|
|
# Setup the database.
|
|
wipe_database()
|
|
initialize_database()
|
|
populate_database()
|
|
|
|
# Enable foreign key constraints.
|
|
if not IS_TESTING_REAL_DATABASE:
|
|
model.db.obj.execute_sql('PRAGMA foreign_keys = ON;')
|
|
|
|
db_initialized_for_testing = True
|
|
|
|
# Create a savepoint for the testcase.
|
|
test_savepoint = savepoint(db) if IS_TESTING_REAL_DATABASE else savepoint_sqlite(db)
|
|
|
|
global testcases
|
|
testcases[testcase] = {}
|
|
testcases[testcase]['savepoint'] = test_savepoint
|
|
testcases[testcase]['savepoint'].__enter__()
|
|
|
|
def initialize_database():
|
|
create_model_tables(all_models)
|
|
|
|
Role.create(name='admin')
|
|
Role.create(name='write')
|
|
Role.create(name='read')
|
|
TeamRole.create(name='admin')
|
|
TeamRole.create(name='creator')
|
|
TeamRole.create(name='member')
|
|
Visibility.create(name='public')
|
|
Visibility.create(name='private')
|
|
|
|
LoginService.create(name='google')
|
|
LoginService.create(name='github')
|
|
LoginService.create(name='quayrobot')
|
|
LoginService.create(name='ldap')
|
|
LoginService.create(name='jwtauthn')
|
|
|
|
BuildTriggerService.create(name='github')
|
|
BuildTriggerService.create(name='custom-git')
|
|
BuildTriggerService.create(name='bitbucket')
|
|
BuildTriggerService.create(name='gitlab')
|
|
|
|
AccessTokenKind.create(name='build-worker')
|
|
AccessTokenKind.create(name='pushpull-token')
|
|
|
|
LogEntryKind.create(name='account_change_plan')
|
|
LogEntryKind.create(name='account_change_cc')
|
|
LogEntryKind.create(name='account_change_password')
|
|
LogEntryKind.create(name='account_convert')
|
|
|
|
LogEntryKind.create(name='create_robot')
|
|
LogEntryKind.create(name='delete_robot')
|
|
|
|
LogEntryKind.create(name='create_repo')
|
|
LogEntryKind.create(name='push_repo')
|
|
LogEntryKind.create(name='pull_repo')
|
|
LogEntryKind.create(name='delete_repo')
|
|
LogEntryKind.create(name='create_tag')
|
|
LogEntryKind.create(name='move_tag')
|
|
LogEntryKind.create(name='delete_tag')
|
|
LogEntryKind.create(name='revert_tag')
|
|
LogEntryKind.create(name='add_repo_permission')
|
|
LogEntryKind.create(name='change_repo_permission')
|
|
LogEntryKind.create(name='delete_repo_permission')
|
|
LogEntryKind.create(name='change_repo_visibility')
|
|
LogEntryKind.create(name='add_repo_accesstoken')
|
|
LogEntryKind.create(name='delete_repo_accesstoken')
|
|
LogEntryKind.create(name='set_repo_description')
|
|
|
|
LogEntryKind.create(name='build_dockerfile')
|
|
|
|
LogEntryKind.create(name='org_create_team')
|
|
LogEntryKind.create(name='org_delete_team')
|
|
LogEntryKind.create(name='org_invite_team_member')
|
|
LogEntryKind.create(name='org_delete_team_member_invite')
|
|
LogEntryKind.create(name='org_add_team_member')
|
|
LogEntryKind.create(name='org_team_member_invite_accepted')
|
|
LogEntryKind.create(name='org_team_member_invite_declined')
|
|
LogEntryKind.create(name='org_remove_team_member')
|
|
LogEntryKind.create(name='org_set_team_description')
|
|
LogEntryKind.create(name='org_set_team_role')
|
|
|
|
LogEntryKind.create(name='create_prototype_permission')
|
|
LogEntryKind.create(name='modify_prototype_permission')
|
|
LogEntryKind.create(name='delete_prototype_permission')
|
|
|
|
LogEntryKind.create(name='setup_repo_trigger')
|
|
LogEntryKind.create(name='delete_repo_trigger')
|
|
|
|
LogEntryKind.create(name='create_application')
|
|
LogEntryKind.create(name='update_application')
|
|
LogEntryKind.create(name='delete_application')
|
|
LogEntryKind.create(name='reset_application_client_secret')
|
|
|
|
# Note: These next two are deprecated.
|
|
LogEntryKind.create(name='add_repo_webhook')
|
|
LogEntryKind.create(name='delete_repo_webhook')
|
|
|
|
LogEntryKind.create(name='add_repo_notification')
|
|
LogEntryKind.create(name='delete_repo_notification')
|
|
|
|
LogEntryKind.create(name='regenerate_robot_token')
|
|
|
|
LogEntryKind.create(name='repo_verb')
|
|
|
|
ImageStorageLocation.create(name='local_eu')
|
|
ImageStorageLocation.create(name='local_us')
|
|
|
|
ImageStorageTransformation.create(name='squash')
|
|
ImageStorageTransformation.create(name='aci')
|
|
|
|
ImageStorageSignatureKind.create(name='gpg2')
|
|
|
|
# NOTE: These MUST be copied over to NotificationKind, since every external
|
|
# notification can also generate a Quay.io notification.
|
|
ExternalNotificationEvent.create(name='repo_push')
|
|
ExternalNotificationEvent.create(name='build_queued')
|
|
ExternalNotificationEvent.create(name='build_start')
|
|
ExternalNotificationEvent.create(name='build_success')
|
|
ExternalNotificationEvent.create(name='build_failure')
|
|
|
|
ExternalNotificationMethod.create(name='quay_notification')
|
|
ExternalNotificationMethod.create(name='email')
|
|
ExternalNotificationMethod.create(name='webhook')
|
|
|
|
ExternalNotificationMethod.create(name='flowdock')
|
|
ExternalNotificationMethod.create(name='hipchat')
|
|
ExternalNotificationMethod.create(name='slack')
|
|
|
|
NotificationKind.create(name='repo_push')
|
|
NotificationKind.create(name='build_queued')
|
|
NotificationKind.create(name='build_start')
|
|
NotificationKind.create(name='build_success')
|
|
NotificationKind.create(name='build_failure')
|
|
|
|
NotificationKind.create(name='password_required')
|
|
NotificationKind.create(name='over_private_usage')
|
|
NotificationKind.create(name='expiring_license')
|
|
NotificationKind.create(name='maintenance')
|
|
NotificationKind.create(name='org_team_invite')
|
|
|
|
NotificationKind.create(name='test_notification')
|
|
|
|
|
|
def wipe_database():
|
|
logger.debug('Wiping all data from the DB.')
|
|
|
|
# Sanity check to make sure we're not killing our prod db
|
|
db = model.db
|
|
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
|
raise RuntimeError('Attempted to wipe production database!')
|
|
|
|
drop_model_tables(all_models, fail_silently=True)
|
|
|
|
|
|
def populate_database():
|
|
logger.debug('Populating the DB with test data.')
|
|
|
|
new_user_1 = model.create_user('devtable', 'password',
|
|
'jschorr@devtable.com')
|
|
new_user_1.verified = True
|
|
new_user_1.stripe_id = TEST_STRIPE_ID
|
|
new_user_1.save()
|
|
|
|
dtrobot = model.create_robot('dtrobot', new_user_1)
|
|
|
|
new_user_2 = model.create_user('public', 'password',
|
|
'jacob.moshenko@gmail.com')
|
|
new_user_2.verified = True
|
|
new_user_2.save()
|
|
|
|
new_user_3 = model.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
|
|
new_user_3.verified = True
|
|
new_user_3.save()
|
|
|
|
model.create_robot('anotherrobot', new_user_3)
|
|
|
|
new_user_4 = model.create_user('randomuser', 'password', 'no4@thanks.com')
|
|
new_user_4.verified = True
|
|
new_user_4.save()
|
|
|
|
new_user_5 = model.create_user('unverified', 'password', 'no5@thanks.com')
|
|
new_user_5.save()
|
|
|
|
reader = model.create_user('reader', 'password', 'no1@thanks.com')
|
|
reader.verified = True
|
|
reader.save()
|
|
|
|
outside_org = model.create_user('outsideorg', 'password', 'no2@thanks.com')
|
|
outside_org.verified = True
|
|
outside_org.save()
|
|
|
|
model.create_notification('test_notification', new_user_1,
|
|
metadata={'some':'value', 'arr':[1, 2, 3], 'obj':{'a':1, 'b':2}})
|
|
|
|
from_date = datetime.utcnow()
|
|
to_date = from_date + timedelta(hours=1)
|
|
notification_metadata = {
|
|
'from_date': formatdate(calendar.timegm(from_date.utctimetuple())),
|
|
'to_date': formatdate(calendar.timegm(to_date.utctimetuple())),
|
|
'reason': 'database migration'
|
|
}
|
|
model.create_notification('maintenance', new_user_1, metadata=notification_metadata)
|
|
|
|
|
|
__generate_repository(new_user_4, 'randomrepo', 'Random repo repository.', False,
|
|
[], (4, [], ['latest', 'prod']))
|
|
|
|
__generate_repository(new_user_1, 'simple', 'Simple repository.', False,
|
|
[], (4, [], ['latest', 'prod']))
|
|
|
|
__generate_repository(new_user_1, 'sharedtags',
|
|
'Shared tags repository',
|
|
False, [(new_user_2, 'read'), (dtrobot[0], 'read')],
|
|
(2, [(3, [], ['v2.0', 'v2.1', 'v2.2']),
|
|
(1, [(1, [(1, [], ['prod', '581a284'])],
|
|
['staging', '8423b58']),
|
|
(1, [], None)], None)], None))
|
|
|
|
__generate_repository(new_user_1, 'history', 'Historical repository.', False,
|
|
[], (4, [(2, [], 'latest'), (3, [], '#latest')], None))
|
|
|
|
__generate_repository(new_user_1, 'complex',
|
|
'Complex repository with many branches and tags.',
|
|
False, [(new_user_2, 'read'), (dtrobot[0], 'read')],
|
|
(2, [(3, [], 'v2.0'),
|
|
(1, [(1, [(1, [], ['prod'])],
|
|
'staging'),
|
|
(1, [], None)], None)], None))
|
|
|
|
__generate_repository(new_user_1, 'gargantuan', None, False, [],
|
|
(2, [(3, [], 'v2.0'),
|
|
(1, [(1, [(1, [], ['latest', 'prod'])],
|
|
'staging'),
|
|
(1, [], None)], None),
|
|
(20, [], 'v3.0'),
|
|
(5, [], 'v4.0'),
|
|
(1, [(1, [], 'v5.0'), (1, [], 'v6.0')], None)],
|
|
None))
|
|
|
|
__generate_repository(new_user_2, 'publicrepo',
|
|
'Public repository pullable by the world.', True,
|
|
[], (10, [], 'latest'))
|
|
|
|
__generate_repository(outside_org, 'coolrepo',
|
|
'Some cool repo.', False,
|
|
[],
|
|
(5, [], 'latest'))
|
|
|
|
__generate_repository(new_user_1, 'shared',
|
|
'Shared repository, another user can write.', False,
|
|
[(new_user_2, 'write'), (reader, 'read')],
|
|
(5, [], 'latest'))
|
|
|
|
building = __generate_repository(new_user_1, 'building',
|
|
'Empty repository which is building.',
|
|
False, [], (0, [], None))
|
|
|
|
token = model.create_access_token(building, 'write', 'build-worker')
|
|
|
|
trigger = model.create_build_trigger(building, 'github', '123authtoken',
|
|
new_user_1, pull_robot=dtrobot[0])
|
|
trigger.config = json.dumps({
|
|
'build_source': 'jakedt/testconnect',
|
|
'subdir': '',
|
|
})
|
|
trigger.save()
|
|
|
|
repo = 'ci.devtable.com:5000/%s/%s' % (building.namespace_user.username, building.name)
|
|
job_config = {
|
|
'repository': repo,
|
|
'docker_tags': ['latest'],
|
|
'build_subdir': '',
|
|
'trigger_metadata': {
|
|
'commit_sha': '3482adc5822c498e8f7db2e361e8d57b3d77ddd9',
|
|
'ref': 'refs/heads/master',
|
|
'default_branch': 'master'
|
|
}
|
|
}
|
|
|
|
record = model.create_email_authorization_for_repo(new_user_1.username, 'simple',
|
|
'jschorr@devtable.com')
|
|
record.confirmed = True
|
|
record.save()
|
|
|
|
model.create_email_authorization_for_repo(new_user_1.username, 'simple',
|
|
'jschorr+other@devtable.com')
|
|
|
|
build2 = model.create_repository_build(building, token, job_config,
|
|
'68daeebd-a5b9-457f-80a0-4363b882f8ea',
|
|
'build-name', trigger)
|
|
build2.uuid = 'deadpork-dead-pork-dead-porkdeadpork'
|
|
build2.save()
|
|
|
|
build3 = model.create_repository_build(building, token, job_config,
|
|
'f49d07f9-93da-474d-ad5f-c852107c3892',
|
|
'build-name', trigger)
|
|
build3.uuid = 'deadduck-dead-duck-dead-duckdeadduck'
|
|
build3.save()
|
|
|
|
build = model.create_repository_build(building, token, job_config,
|
|
'701dcc3724fb4f2ea6c31400528343cd',
|
|
'build-name', trigger)
|
|
build.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
|
|
build.save()
|
|
|
|
org = model.create_organization('buynlarge', 'quay@devtable.com',
|
|
new_user_1)
|
|
org.stripe_id = TEST_STRIPE_ID
|
|
org.save()
|
|
|
|
model.create_robot('coolrobot', org)
|
|
|
|
oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
|
|
'http://localhost:8000/o2c.html', client_id='deadbeef')
|
|
|
|
oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
|
|
'http://localhost:8000/o2c.html', client_id='deadpork',
|
|
description='This is another test application')
|
|
|
|
model.oauth.create_access_token_for_testing(new_user_1, 'deadbeef', 'repo:admin')
|
|
|
|
model.create_robot('neworgrobot', org)
|
|
|
|
owners = model.get_organization_team('buynlarge', 'owners')
|
|
owners.description = 'Owners have unfetterd access across the entire org.'
|
|
owners.save()
|
|
|
|
org_repo = __generate_repository(org, 'orgrepo',
|
|
'Repository owned by an org.', False,
|
|
[(outside_org, 'read')],
|
|
(4, [], ['latest', 'prod']))
|
|
|
|
org_repo2 = __generate_repository(org, 'anotherorgrepo',
|
|
'Another repository owned by an org.', False,
|
|
[],
|
|
(4, [], ['latest', 'prod']))
|
|
|
|
reader_team = model.create_team('readers', org, 'member',
|
|
'Readers of orgrepo.')
|
|
model.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username, org_repo.name,
|
|
'read')
|
|
model.add_user_to_team(new_user_2, reader_team)
|
|
model.add_user_to_team(reader, reader_team)
|
|
|
|
__generate_repository(new_user_1, 'superwide', None, False, [],
|
|
[(10, [], 'latest2'),
|
|
(2, [], 'latest3'),
|
|
(2, [(1, [], 'latest11'), (2, [], 'latest12')],
|
|
'latest4'),
|
|
(2, [], 'latest5'),
|
|
(2, [], 'latest6'),
|
|
(2, [], 'latest7'),
|
|
(2, [], 'latest8'),
|
|
(2, [], 'latest9'),
|
|
(2, [], 'latest10'),
|
|
(2, [], 'latest13'),
|
|
(2, [], 'latest14'),
|
|
(2, [], 'latest15'),
|
|
(2, [], 'latest16'),
|
|
(2, [], 'latest17'),
|
|
(2, [], 'latest18'),])
|
|
|
|
model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_user=new_user_2)
|
|
model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_team=reader_team)
|
|
model.add_prototype_permission(org, 'write', activating_user=new_user_2, delegate_user=new_user_1)
|
|
|
|
today = datetime.today()
|
|
week_ago = today - timedelta(6)
|
|
six_ago = today - timedelta(5)
|
|
four_ago = today - timedelta(4)
|
|
|
|
model.log_action('org_create_team', org.username, performer=new_user_1,
|
|
timestamp=week_ago, metadata={'team': 'readers'})
|
|
|
|
model.log_action('org_set_team_role', org.username, performer=new_user_1,
|
|
timestamp=week_ago,
|
|
metadata={'team': 'readers', 'role': 'read'})
|
|
|
|
model.log_action('create_repo', org.username, performer=new_user_1,
|
|
repository=org_repo, timestamp=week_ago,
|
|
metadata={'namespace': org.username, 'repo': 'orgrepo'})
|
|
|
|
model.log_action('change_repo_permission', org.username,
|
|
performer=new_user_2, repository=org_repo,
|
|
timestamp=six_ago,
|
|
metadata={'username': new_user_1.username,
|
|
'repo': 'orgrepo', 'role': 'admin'})
|
|
|
|
model.log_action('change_repo_permission', org.username,
|
|
performer=new_user_1, repository=org_repo,
|
|
timestamp=six_ago,
|
|
metadata={'username': new_user_2.username,
|
|
'repo': 'orgrepo', 'role': 'read'})
|
|
|
|
model.log_action('add_repo_accesstoken', org.username, performer=new_user_1,
|
|
repository=org_repo, timestamp=four_ago,
|
|
metadata={'repo': 'orgrepo', 'token': 'deploytoken'})
|
|
|
|
model.log_action('push_repo', org.username, performer=new_user_2,
|
|
repository=org_repo, timestamp=today,
|
|
metadata={'username': new_user_2.username,
|
|
'repo': 'orgrepo'})
|
|
|
|
model.log_action('pull_repo', org.username, performer=new_user_2,
|
|
repository=org_repo, timestamp=today,
|
|
metadata={'username': new_user_2.username,
|
|
'repo': 'orgrepo'})
|
|
|
|
model.log_action('pull_repo', org.username, repository=org_repo,
|
|
timestamp=today,
|
|
metadata={'token': 'sometoken', 'token_code': 'somecode',
|
|
'repo': 'orgrepo'})
|
|
|
|
model.log_action('delete_tag', org.username, performer=new_user_2,
|
|
repository=org_repo, timestamp=today,
|
|
metadata={'username': new_user_2.username,
|
|
'repo': 'orgrepo', 'tag': 'sometag'})
|
|
|
|
model.log_action('pull_repo', org.username, repository=org_repo,
|
|
timestamp=today,
|
|
metadata={'token_code': 'somecode', 'repo': 'orgrepo'})
|
|
|
|
model.log_action('build_dockerfile', new_user_1.username, repository=building,
|
|
timestamp=today,
|
|
metadata={'repo': 'building', 'namespace': new_user_1.username,
|
|
'trigger_id': trigger.uuid, 'config': json.loads(trigger.config),
|
|
'service': trigger.service.name})
|
|
|
|
while repositoryactioncounter.count_repository_actions():
|
|
pass
|
|
|
|
if __name__ == '__main__':
|
|
log_level = getattr(logging, app.config['LOGGING_LEVEL'])
|
|
logging.basicConfig(level=log_level)
|
|
|
|
if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
|
|
raise RuntimeError('Attempted to initialize production database!')
|
|
|
|
initialize_database()
|
|
|
|
if app.config.get('POPULATE_DB_TEST_DATA', False):
|
|
populate_database()
|