2016-06-15 18:48:46 +00:00
|
|
|
# pylint: disable=old-style-class,no-init
|
|
|
|
|
2016-03-16 19:49:25 +00:00
|
|
|
import inspect
|
2013-10-02 16:43:45 +00:00
|
|
|
import logging
|
2016-03-16 19:49:25 +00:00
|
|
|
import string
|
2016-01-12 20:57:03 +00:00
|
|
|
import sys
|
2016-03-16 19:49:25 +00:00
|
|
|
import time
|
|
|
|
import uuid
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2017-03-08 22:01:07 +00:00
|
|
|
from contextlib import contextmanager
|
2016-03-16 19:49:25 +00:00
|
|
|
from collections import defaultdict
|
2013-09-30 23:10:27 +00:00
|
|
|
from datetime import datetime
|
2016-03-16 19:49:25 +00:00
|
|
|
from random import SystemRandom
|
|
|
|
|
|
|
|
import toposort
|
|
|
|
|
2016-03-25 22:44:11 +00:00
|
|
|
from enum import Enum
|
2013-09-20 15:55:44 +00:00
|
|
|
from peewee import *
|
2016-07-22 18:04:13 +00:00
|
|
|
from playhouse.shortcuts import RetryOperationalError
|
|
|
|
|
2014-04-10 22:30:09 +00:00
|
|
|
from sqlalchemy.engine.url import make_url
|
2015-02-12 19:11:56 +00:00
|
|
|
|
2016-06-15 18:48:46 +00:00
|
|
|
import resumablehashlib
|
2016-10-26 21:19:28 +00:00
|
|
|
from cachetools import lru_cache
|
2016-06-15 18:48:46 +00:00
|
|
|
|
2017-01-11 20:03:14 +00:00
|
|
|
from data.fields import (ResumableSHA256Field, ResumableSHA1Field, JSONField, Base64BinaryField,
|
|
|
|
FullIndexedTextField, FullIndexedCharField)
|
2017-01-11 19:52:12 +00:00
|
|
|
from data.text import match_mysql, match_like
|
2016-03-16 19:49:25 +00:00
|
|
|
from data.read_slave import ReadSlaveModel
|
2014-09-11 19:45:41 +00:00
|
|
|
from util.names import urn_generator
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2014-10-14 17:58:08 +00:00
|
|
|
|
2013-10-02 16:43:45 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-04-07 20:59:22 +00:00
|
|
|
|
2016-08-25 19:01:18 +00:00
|
|
|
DEFAULT_DB_CONNECT_TIMEOUT = 10 # seconds
|
2014-04-09 23:11:33 +00:00
|
|
|
|
2017-01-11 20:03:14 +00:00
|
|
|
|
2016-12-14 03:51:29 +00:00
|
|
|
# IMAGE_NOT_SCANNED_ENGINE_VERSION is the version found in security_indexed_engine when the
|
|
|
|
# image has not yet been scanned.
|
|
|
|
IMAGE_NOT_SCANNED_ENGINE_VERSION = -1
|
|
|
|
|
2017-01-11 20:03:14 +00:00
|
|
|
|
2016-07-22 18:04:13 +00:00
|
|
|
_SCHEME_DRIVERS = {
|
2014-05-19 21:14:23 +00:00
|
|
|
'mysql': MySQLDatabase,
|
2014-05-22 16:13:41 +00:00
|
|
|
'mysql+pymysql': MySQLDatabase,
|
2014-04-09 23:11:33 +00:00
|
|
|
'sqlite': SqliteDatabase,
|
2014-08-21 23:21:20 +00:00
|
|
|
'postgresql': PostgresqlDatabase,
|
|
|
|
'postgresql+psycopg2': PostgresqlDatabase,
|
2014-04-07 20:59:22 +00:00
|
|
|
}
|
|
|
|
|
2017-01-11 20:03:14 +00:00
|
|
|
|
2017-01-11 19:52:12 +00:00
|
|
|
SCHEME_MATCH_FUNCTION = {
|
|
|
|
'mysql': match_mysql,
|
|
|
|
'mysql+pymysql': match_mysql,
|
|
|
|
'sqlite': match_like,
|
|
|
|
'postgresql': match_like,
|
|
|
|
'postgresql+psycopg2': match_like,
|
|
|
|
}
|
|
|
|
|
2017-01-11 20:03:14 +00:00
|
|
|
|
2014-09-15 19:58:56 +00:00
|
|
|
SCHEME_RANDOM_FUNCTION = {
|
|
|
|
'mysql': fn.Rand,
|
|
|
|
'mysql+pymysql': fn.Rand,
|
|
|
|
'sqlite': fn.Random,
|
|
|
|
'postgresql': fn.Random,
|
2014-11-11 22:22:37 +00:00
|
|
|
'postgresql+psycopg2': fn.Random,
|
2014-09-15 19:58:56 +00:00
|
|
|
}
|
|
|
|
|
2017-01-11 20:03:14 +00:00
|
|
|
|
2016-08-26 18:48:39 +00:00
|
|
|
def pipes_concat(arg1, arg2, *extra_args):
|
|
|
|
""" Concat function for sqlite, since it doesn't support fn.Concat.
|
|
|
|
Concatenates clauses with || characters.
|
|
|
|
"""
|
|
|
|
reduced = arg1.concat(arg2)
|
|
|
|
for arg in extra_args:
|
|
|
|
reduced = reduced.concat(arg)
|
|
|
|
return reduced
|
|
|
|
|
|
|
|
|
|
|
|
def function_concat(arg1, arg2, *extra_args):
|
|
|
|
""" Default implementation of concat which uses fn.Concat(). Used by all
|
|
|
|
database engines except sqlite.
|
|
|
|
"""
|
|
|
|
return fn.Concat(arg1, arg2, *extra_args)
|
|
|
|
|
|
|
|
|
|
|
|
SCHEME_SPECIALIZED_CONCAT = {
|
|
|
|
'sqlite': pipes_concat,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-30 21:32:13 +00:00
|
|
|
def real_for_update(query):
|
|
|
|
return query.for_update()
|
|
|
|
|
2016-08-26 18:48:39 +00:00
|
|
|
|
2015-01-30 21:32:13 +00:00
|
|
|
def null_for_update(query):
|
|
|
|
return query
|
|
|
|
|
2016-08-26 18:48:39 +00:00
|
|
|
|
2015-12-17 17:29:44 +00:00
|
|
|
def delete_instance_filtered(instance, model_class, delete_nullable, skip_transitive_deletes):
|
|
|
|
""" Deletes the DB instance recursively, skipping any models in the skip_transitive_deletes set.
|
|
|
|
|
|
|
|
Callers *must* ensure that any models listed in the skip_transitive_deletes must be capable
|
|
|
|
of being directly deleted when the instance is deleted (with automatic sorting handling
|
|
|
|
dependency order).
|
|
|
|
|
|
|
|
For example, the RepositoryTag and Image tables for Repository will always refer to the
|
|
|
|
*same* repository when RepositoryTag references Image, so we can safely skip
|
|
|
|
transitive deletion for the RepositoryTag table.
|
|
|
|
"""
|
|
|
|
# We need to sort the ops so that models get cleaned in order of their dependencies
|
|
|
|
ops = reversed(list(instance.dependencies(delete_nullable)))
|
|
|
|
filtered_ops = []
|
|
|
|
|
|
|
|
dependencies = defaultdict(set)
|
|
|
|
|
|
|
|
for query, fk in ops:
|
|
|
|
# We only want to skip transitive deletes, which are done using subqueries in the form of
|
|
|
|
# DELETE FROM <table> in <subquery>. If an op is not using a subquery, we allow it to be
|
|
|
|
# applied directly.
|
|
|
|
if fk.model_class not in skip_transitive_deletes or query.op != 'in':
|
|
|
|
filtered_ops.append((query, fk))
|
|
|
|
|
|
|
|
if query.op == 'in':
|
|
|
|
dependencies[fk.model_class.__name__].add(query.rhs.model_class.__name__)
|
|
|
|
elif query.op == '=':
|
|
|
|
dependencies[fk.model_class.__name__].add(model_class.__name__)
|
|
|
|
else:
|
|
|
|
raise RuntimeError('Unknown operator in recursive repository delete query')
|
|
|
|
|
|
|
|
sorted_models = list(reversed(toposort.toposort_flatten(dependencies)))
|
|
|
|
def sorted_model_key(query_fk_tuple):
|
|
|
|
cmp_query, cmp_fk = query_fk_tuple
|
|
|
|
if cmp_query.op == 'in':
|
|
|
|
return -1
|
|
|
|
return sorted_models.index(cmp_fk.model_class.__name__)
|
|
|
|
filtered_ops.sort(key=sorted_model_key)
|
|
|
|
|
|
|
|
with db_transaction():
|
|
|
|
for query, fk in filtered_ops:
|
2016-06-15 18:48:46 +00:00
|
|
|
_model = fk.model_class
|
2015-12-17 17:29:44 +00:00
|
|
|
if fk.null and not delete_nullable:
|
2016-06-15 18:48:46 +00:00
|
|
|
_model.update(**{fk.name: None}).where(query).execute()
|
2015-12-17 17:29:44 +00:00
|
|
|
else:
|
2016-06-15 18:48:46 +00:00
|
|
|
_model.delete().where(query).execute()
|
2015-12-17 17:29:44 +00:00
|
|
|
|
|
|
|
return instance.delete().where(instance._pk_expr()).execute()
|
|
|
|
|
|
|
|
|
2015-01-30 21:32:13 +00:00
|
|
|
SCHEME_SPECIALIZED_FOR_UPDATE = {
|
|
|
|
'sqlite': null_for_update,
|
|
|
|
}
|
|
|
|
|
2016-10-26 21:19:28 +00:00
|
|
|
|
2014-09-15 19:58:56 +00:00
|
|
|
class CallableProxy(Proxy):
|
|
|
|
def __call__(self, *args, **kwargs):
|
|
|
|
if self.obj is None:
|
|
|
|
raise AttributeError('Cannot use uninitialized Proxy.')
|
|
|
|
return self.obj(*args, **kwargs)
|
|
|
|
|
2014-11-06 23:00:52 +00:00
|
|
|
|
|
|
|
class CloseForLongOperation(object):
|
|
|
|
""" Helper object which disconnects the database then reconnects after the nested operation
|
|
|
|
completes.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, config_object):
|
|
|
|
self.config_object = config_object
|
|
|
|
|
|
|
|
def __enter__(self):
|
2017-07-26 16:10:48 +00:00
|
|
|
# TODO(jschorr): Remove this stupid hack.
|
2016-06-15 18:48:46 +00:00
|
|
|
if self.config_object.get('TESTING') is True:
|
2016-02-24 21:01:27 +00:00
|
|
|
return
|
|
|
|
|
2014-11-06 23:00:52 +00:00
|
|
|
close_db_filter(None)
|
|
|
|
|
2016-06-15 18:48:46 +00:00
|
|
|
def __exit__(self, typ, value, traceback):
|
2014-11-10 18:44:36 +00:00
|
|
|
# Note: Nothing to do. The next SQL call will reconnect automatically.
|
|
|
|
pass
|
2014-11-06 23:00:52 +00:00
|
|
|
|
|
|
|
|
2014-11-06 22:50:48 +00:00
|
|
|
class UseThenDisconnect(object):
|
|
|
|
""" Helper object for conducting work with a database and then tearing it down. """
|
|
|
|
|
|
|
|
def __init__(self, config_object):
|
|
|
|
self.config_object = config_object
|
|
|
|
|
|
|
|
def __enter__(self):
|
2017-07-26 16:10:48 +00:00
|
|
|
# TODO(jschorr): Remove this stupid hack.
|
|
|
|
if self.config_object.get('TESTING') is True:
|
|
|
|
return
|
|
|
|
|
2014-11-06 22:50:48 +00:00
|
|
|
configure(self.config_object)
|
|
|
|
|
2016-06-15 18:48:46 +00:00
|
|
|
def __exit__(self, typ, value, traceback):
|
2017-07-26 16:10:48 +00:00
|
|
|
# TODO(jschorr): Remove this stupid hack.
|
|
|
|
if self.config_object.get('TESTING') is True:
|
|
|
|
return
|
|
|
|
|
2014-11-06 22:50:48 +00:00
|
|
|
close_db_filter(None)
|
|
|
|
|
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
class TupleSelector(object):
|
|
|
|
""" Helper class for selecting tuples from a peewee query and easily accessing
|
|
|
|
them as if they were objects.
|
|
|
|
"""
|
|
|
|
class _TupleWrapper(object):
|
|
|
|
def __init__(self, data, fields):
|
|
|
|
self._data = data
|
|
|
|
self._fields = fields
|
|
|
|
|
|
|
|
def get(self, field):
|
|
|
|
return self._data[self._fields.index(TupleSelector.tuple_reference_key(field))]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tuple_reference_key(cls, field):
|
|
|
|
""" Returns a string key for referencing a field in a TupleSelector. """
|
|
|
|
if field._node_type == 'func':
|
|
|
|
return field.name + ','.join([cls.tuple_reference_key(arg) for arg in field.arguments])
|
|
|
|
|
|
|
|
if field._node_type == 'field':
|
|
|
|
return field.name + ':' + field.model_class.__name__
|
|
|
|
|
|
|
|
raise Exception('Unknown field type %s in TupleSelector' % field._node_type)
|
|
|
|
|
|
|
|
def __init__(self, query, fields):
|
|
|
|
self._query = query.select(*fields).tuples()
|
|
|
|
self._fields = [TupleSelector.tuple_reference_key(field) for field in fields]
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self._build_iterator()
|
|
|
|
|
|
|
|
def _build_iterator(self):
|
|
|
|
for tuple_data in self._query:
|
|
|
|
yield TupleSelector._TupleWrapper(tuple_data, self._fields)
|
|
|
|
|
|
|
|
|
2014-05-13 16:17:26 +00:00
|
|
|
db = Proxy()
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slave = Proxy()
|
2014-09-15 19:58:56 +00:00
|
|
|
db_random_func = CallableProxy()
|
2017-01-11 19:52:12 +00:00
|
|
|
db_match_func = CallableProxy()
|
2015-01-30 21:32:13 +00:00
|
|
|
db_for_update = CallableProxy()
|
2015-09-16 19:34:20 +00:00
|
|
|
db_transaction = CallableProxy()
|
2016-08-26 18:48:39 +00:00
|
|
|
db_concat_func = CallableProxy()
|
2017-03-08 22:01:07 +00:00
|
|
|
ensure_under_transaction = CallableProxy()
|
2014-09-15 19:58:56 +00:00
|
|
|
|
2014-04-09 23:11:33 +00:00
|
|
|
|
2015-06-29 05:08:10 +00:00
|
|
|
def validate_database_url(url, db_kwargs, connect_timeout=5):
|
|
|
|
db_kwargs = db_kwargs.copy()
|
|
|
|
|
2016-08-25 19:01:18 +00:00
|
|
|
driver = _db_from_url(url, db_kwargs, connect_timeout=connect_timeout)
|
2015-01-20 19:46:22 +00:00
|
|
|
driver.connect()
|
|
|
|
driver.close()
|
2014-09-15 19:58:56 +00:00
|
|
|
|
2014-04-09 23:11:33 +00:00
|
|
|
|
2016-07-22 18:04:13 +00:00
|
|
|
def _wrap_for_retry(driver):
|
|
|
|
return type('Retrying' + driver.__class__.__name__, (RetryOperationalError, driver), {})
|
|
|
|
|
|
|
|
|
2016-08-25 19:01:18 +00:00
|
|
|
def _db_from_url(url, db_kwargs, connect_timeout=DEFAULT_DB_CONNECT_TIMEOUT):
|
2014-07-02 23:10:24 +00:00
|
|
|
parsed_url = make_url(url)
|
2014-04-10 22:30:09 +00:00
|
|
|
|
|
|
|
if parsed_url.host:
|
|
|
|
db_kwargs['host'] = parsed_url.host
|
|
|
|
if parsed_url.port:
|
|
|
|
db_kwargs['port'] = parsed_url.port
|
|
|
|
if parsed_url.username:
|
|
|
|
db_kwargs['user'] = parsed_url.username
|
|
|
|
if parsed_url.password:
|
2014-08-21 23:21:20 +00:00
|
|
|
db_kwargs['password'] = parsed_url.password
|
2014-04-10 22:30:09 +00:00
|
|
|
|
2015-01-20 19:46:22 +00:00
|
|
|
# Note: sqlite does not support connect_timeout.
|
2016-08-25 19:01:18 +00:00
|
|
|
if parsed_url.drivername != 'sqlite':
|
|
|
|
db_kwargs['connect_timeout'] = db_kwargs.get('connect_timeout', connect_timeout)
|
2015-01-20 19:46:22 +00:00
|
|
|
|
2016-07-22 18:04:13 +00:00
|
|
|
driver = _SCHEME_DRIVERS[parsed_url.drivername]
|
|
|
|
wrapped_driver = _wrap_for_retry(driver)
|
|
|
|
return wrapped_driver(parsed_url.database, **db_kwargs)
|
2014-07-02 23:10:24 +00:00
|
|
|
|
2014-09-15 19:58:56 +00:00
|
|
|
|
2014-07-02 23:10:24 +00:00
|
|
|
def configure(config_object):
|
2014-11-06 23:00:52 +00:00
|
|
|
logger.debug('Configuring database')
|
2014-07-02 23:10:24 +00:00
|
|
|
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
|
|
|
|
write_db_uri = config_object['DB_URI']
|
|
|
|
db.initialize(_db_from_url(write_db_uri, db_kwargs))
|
|
|
|
|
2014-09-15 19:58:56 +00:00
|
|
|
parsed_write_uri = make_url(write_db_uri)
|
|
|
|
db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername])
|
2017-01-11 19:52:12 +00:00
|
|
|
db_match_func.initialize(SCHEME_MATCH_FUNCTION[parsed_write_uri.drivername])
|
2015-01-30 21:32:13 +00:00
|
|
|
db_for_update.initialize(SCHEME_SPECIALIZED_FOR_UPDATE.get(parsed_write_uri.drivername,
|
|
|
|
real_for_update))
|
2016-08-26 18:48:39 +00:00
|
|
|
db_concat_func.initialize(SCHEME_SPECIALIZED_CONCAT.get(parsed_write_uri.drivername,
|
|
|
|
function_concat))
|
2014-09-15 19:58:56 +00:00
|
|
|
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
|
|
|
|
if read_slave_uri is not None:
|
|
|
|
read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs))
|
2014-04-09 23:11:33 +00:00
|
|
|
|
2015-09-16 19:34:20 +00:00
|
|
|
def _db_transaction():
|
|
|
|
return config_object['DB_TRANSACTION_FACTORY'](db)
|
|
|
|
|
2017-03-08 22:01:07 +00:00
|
|
|
@contextmanager
|
|
|
|
def _ensure_under_transaction():
|
|
|
|
if not config_object['TESTING']:
|
|
|
|
if db.transaction_depth() == 0:
|
|
|
|
raise Exception('Expected to be under a transaction')
|
|
|
|
|
|
|
|
yield
|
|
|
|
|
2015-09-16 19:34:20 +00:00
|
|
|
db_transaction.initialize(_db_transaction)
|
2017-03-08 22:01:07 +00:00
|
|
|
ensure_under_transaction.initialize(_ensure_under_transaction)
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2013-11-20 21:13:03 +00:00
|
|
|
def random_string_generator(length=16):
|
|
|
|
def random_string():
|
|
|
|
random = SystemRandom()
|
|
|
|
return ''.join([random.choice(string.ascii_uppercase + string.digits)
|
|
|
|
for _ in range(length)])
|
|
|
|
return random_string
|
|
|
|
|
|
|
|
|
2014-02-04 00:08:37 +00:00
|
|
|
def uuid_generator():
|
|
|
|
return str(uuid.uuid4())
|
|
|
|
|
|
|
|
|
2015-03-21 03:21:20 +00:00
|
|
|
get_epoch_timestamp = lambda: int(time.time())
|
2016-08-29 20:00:42 +00:00
|
|
|
get_epoch_timestamp_ms = lambda: int(time.time() * 1000)
|
2015-02-12 19:11:56 +00:00
|
|
|
|
|
|
|
|
2014-10-14 17:58:08 +00:00
|
|
|
def close_db_filter(_):
|
2017-02-21 19:27:31 +00:00
|
|
|
try:
|
|
|
|
if not db.is_closed():
|
|
|
|
logger.debug('Disconnecting from database.')
|
|
|
|
db.close()
|
|
|
|
|
|
|
|
if read_slave.obj is not None and not read_slave.is_closed():
|
|
|
|
logger.debug('Disconnecting from read slave.')
|
|
|
|
read_slave.close()
|
|
|
|
except AttributeError:
|
|
|
|
# If the database is closed between the time we check on line 309 and db.close() is called on
|
|
|
|
# 311, then an AttributeError will be raised. Simply eat this exception and continue onward.
|
|
|
|
pass
|
2014-10-14 17:58:08 +00:00
|
|
|
|
|
|
|
|
2014-11-07 17:05:21 +00:00
|
|
|
class QuayUserField(ForeignKeyField):
|
2015-01-14 17:56:06 +00:00
|
|
|
def __init__(self, allows_robots=False, robot_null_delete=False, *args, **kwargs):
|
2014-11-07 17:05:21 +00:00
|
|
|
self.allows_robots = allows_robots
|
2015-01-14 17:56:06 +00:00
|
|
|
self.robot_null_delete = robot_null_delete
|
2016-06-15 18:48:46 +00:00
|
|
|
if 'rel_model' not in kwargs:
|
2014-11-07 17:05:21 +00:00
|
|
|
kwargs['rel_model'] = User
|
|
|
|
super(QuayUserField, self).__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
|
2016-10-26 21:19:28 +00:00
|
|
|
class EnumField(ForeignKeyField):
|
|
|
|
""" Create a cached python Enum from an EnumTable """
|
|
|
|
def __init__(self, rel_model, enum_key_field='name', *args, **kwargs):
|
|
|
|
"""
|
|
|
|
rel_model is the EnumTable model-class (see ForeignKeyField)
|
|
|
|
enum_key_field is the field from the EnumTable to use as the enum name
|
|
|
|
"""
|
|
|
|
self.enum_key_field = enum_key_field
|
|
|
|
super(EnumField, self).__init__(rel_model, *args, **kwargs)
|
|
|
|
|
|
|
|
@property
|
|
|
|
@lru_cache(maxsize=1)
|
|
|
|
def enum(self):
|
|
|
|
""" Returns a python enun.Enum generated from the associated EnumTable """
|
|
|
|
values = []
|
|
|
|
for row in self.rel_model.select():
|
|
|
|
key = getattr(row, self.enum_key_field)
|
|
|
|
value = getattr(row, 'id')
|
|
|
|
values.append((key, value))
|
|
|
|
return Enum(self.rel_model.__name__, values)
|
|
|
|
|
|
|
|
def get_id(self, name):
|
|
|
|
""" Returns the ForeignKeyId from the name field
|
|
|
|
Example:
|
|
|
|
>>> Repository.repo_kind.get_id("application")
|
|
|
|
2
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return self.enum[name].value
|
|
|
|
except KeyError:
|
|
|
|
raise self.rel_model.DoesNotExist
|
|
|
|
|
|
|
|
def get_name(self, value):
|
|
|
|
""" Returns the name value from the ForeignKeyId
|
|
|
|
Example:
|
|
|
|
>>> Repository.repo_kind.get_name(2)
|
|
|
|
"application"
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return self.enum(value).name
|
|
|
|
except ValueError:
|
|
|
|
raise self.rel_model.DoesNotExist
|
|
|
|
|
|
|
|
|
2014-07-02 23:10:24 +00:00
|
|
|
class BaseModel(ReadSlaveModel):
|
2013-09-20 15:55:44 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2015-04-10 19:27:37 +00:00
|
|
|
def __getattribute__(self, name):
|
|
|
|
""" Adds _id accessors so that foreign key field IDs can be looked up without making
|
|
|
|
a database roundtrip.
|
|
|
|
"""
|
|
|
|
if name.endswith('_id'):
|
|
|
|
field_name = name[0:len(name) - 3]
|
|
|
|
if field_name in self._meta.fields:
|
|
|
|
return self._data.get(field_name)
|
|
|
|
|
|
|
|
return super(BaseModel, self).__getattribute__(name)
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
|
|
|
|
class User(BaseModel):
|
2014-11-19 20:32:30 +00:00
|
|
|
uuid = CharField(default=uuid_generator, max_length=36, null=True)
|
2013-09-28 03:33:59 +00:00
|
|
|
username = CharField(unique=True, index=True)
|
2013-10-10 03:00:34 +00:00
|
|
|
password_hash = CharField(null=True)
|
2013-11-20 21:13:03 +00:00
|
|
|
email = CharField(unique=True, index=True,
|
|
|
|
default=random_string_generator(length=64))
|
2013-09-27 23:55:04 +00:00
|
|
|
verified = BooleanField(default=False)
|
2013-10-02 04:48:03 +00:00
|
|
|
stripe_id = CharField(index=True, null=True)
|
2013-10-31 20:46:04 +00:00
|
|
|
organization = BooleanField(default=False, index=True)
|
2013-11-20 21:13:03 +00:00
|
|
|
robot = BooleanField(default=False, index=True)
|
2013-11-15 19:42:31 +00:00
|
|
|
invoice_email = BooleanField(default=False)
|
2014-09-02 19:27:05 +00:00
|
|
|
invalid_login_attempts = IntegerField(default=0)
|
|
|
|
last_invalid_login = DateTimeField(default=datetime.utcnow)
|
2015-02-12 19:11:56 +00:00
|
|
|
removed_tag_expiration_s = IntegerField(default=1209600) # Two weeks
|
2015-05-11 21:13:42 +00:00
|
|
|
enabled = BooleanField(default=True)
|
2015-12-28 18:59:50 +00:00
|
|
|
invoice_email_address = CharField(null=True, index=True)
|
2013-10-31 20:46:04 +00:00
|
|
|
|
2016-11-09 20:29:53 +00:00
|
|
|
given_name = CharField(null=True)
|
|
|
|
family_name = CharField(null=True)
|
2016-11-04 21:57:55 +00:00
|
|
|
company = CharField(null=True)
|
|
|
|
|
2014-11-10 18:18:17 +00:00
|
|
|
def delete_instance(self, recursive=False, delete_nullable=False):
|
|
|
|
# If we are deleting a robot account, only execute the subset of queries necessary.
|
|
|
|
if self.robot:
|
|
|
|
# For all the model dependencies, only delete those that allow robots.
|
2015-06-30 19:35:28 +00:00
|
|
|
for query, fk in reversed(list(self.dependencies(search_nullable=True))):
|
2014-11-10 18:18:17 +00:00
|
|
|
if isinstance(fk, QuayUserField) and fk.allows_robots:
|
2016-06-15 18:48:46 +00:00
|
|
|
_model = fk.model_class
|
2015-01-14 17:56:06 +00:00
|
|
|
|
|
|
|
if fk.robot_null_delete:
|
2016-06-15 18:48:46 +00:00
|
|
|
_model.update(**{fk.name: None}).where(query).execute()
|
2015-01-14 17:56:06 +00:00
|
|
|
else:
|
2016-06-15 18:48:46 +00:00
|
|
|
_model.delete().where(query).execute()
|
2014-11-10 18:18:17 +00:00
|
|
|
|
|
|
|
# Delete the instance itself.
|
|
|
|
super(User, self).delete_instance(recursive=False, delete_nullable=False)
|
|
|
|
else:
|
2015-12-17 17:29:44 +00:00
|
|
|
if not recursive:
|
|
|
|
raise RuntimeError('Non-recursive delete on user.')
|
|
|
|
|
|
|
|
# These models don't need to use transitive deletes, because the referenced objects
|
2016-08-09 21:58:33 +00:00
|
|
|
# are cleaned up directly in the model.
|
|
|
|
skip_transitive_deletes = {Image, Repository, Team, RepositoryBuild, ServiceKeyApproval,
|
|
|
|
RepositoryBuildTrigger, ServiceKey, RepositoryPermission,
|
|
|
|
TeamMemberInvite, Star, RepositoryAuthorizedEmail, TeamMember,
|
|
|
|
RepositoryTag, PermissionPrototype, DerivedStorageForImage,
|
|
|
|
TagManifest, AccessToken, OAuthAccessToken, BlobUpload,
|
|
|
|
RepositoryNotification, OAuthAuthorizationCode,
|
2016-10-26 21:19:28 +00:00
|
|
|
RepositoryActionCount, TagManifestLabel, Tag,
|
2017-03-17 17:51:45 +00:00
|
|
|
ManifestLabel, BlobUploading, TeamSync,
|
|
|
|
RepositorySearchScore} | beta_classes
|
2015-12-17 17:29:44 +00:00
|
|
|
delete_instance_filtered(self, User, delete_nullable, skip_transitive_deletes)
|
2013-10-31 20:46:04 +00:00
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
|
|
|
|
Namespace = User.alias()
|
|
|
|
|
|
|
|
|
2016-11-04 21:57:55 +00:00
|
|
|
class UserPromptTypes(object):
|
|
|
|
CONFIRM_USERNAME = 'confirm_username'
|
|
|
|
ENTER_NAME = 'enter_name'
|
|
|
|
ENTER_COMPANY = 'enter_company'
|
|
|
|
|
|
|
|
|
2016-09-08 22:43:50 +00:00
|
|
|
class UserPromptKind(BaseModel):
|
|
|
|
name = CharField(index=True)
|
|
|
|
|
|
|
|
|
|
|
|
class UserPrompt(BaseModel):
|
|
|
|
user = QuayUserField(allows_robots=False, index=True)
|
|
|
|
kind = ForeignKeyField(UserPromptKind)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('user', 'kind'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2013-11-04 20:42:08 +00:00
|
|
|
class TeamRole(BaseModel):
|
|
|
|
name = CharField(index=True)
|
|
|
|
|
|
|
|
|
2013-10-31 20:46:04 +00:00
|
|
|
class Team(BaseModel):
|
2013-11-01 23:34:17 +00:00
|
|
|
name = CharField(index=True)
|
2014-11-07 17:05:21 +00:00
|
|
|
organization = QuayUserField(index=True)
|
2013-11-04 20:42:08 +00:00
|
|
|
role = ForeignKeyField(TeamRole)
|
2013-11-04 21:57:20 +00:00
|
|
|
description = TextField(default='')
|
2013-10-31 20:46:04 +00:00
|
|
|
|
2013-11-01 23:34:17 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2013-11-01 23:34:17 +00:00
|
|
|
indexes = (
|
|
|
|
# A team name must be unique within an organization
|
|
|
|
(('name', 'organization'), True),
|
|
|
|
)
|
|
|
|
|
2013-10-31 20:46:04 +00:00
|
|
|
|
|
|
|
class TeamMember(BaseModel):
|
2014-11-07 17:05:21 +00:00
|
|
|
user = QuayUserField(allows_robots=True, index=True)
|
2016-08-03 21:11:28 +00:00
|
|
|
team = ForeignKeyField(Team)
|
2013-10-31 20:46:04 +00:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2013-10-31 20:46:04 +00:00
|
|
|
indexes = (
|
|
|
|
# A user may belong to a team only once
|
|
|
|
(('user', 'team'), True),
|
|
|
|
)
|
2013-09-20 15:55:44 +00:00
|
|
|
|
|
|
|
|
2014-08-15 21:47:43 +00:00
|
|
|
class TeamMemberInvite(BaseModel):
|
|
|
|
# Note: Either user OR email will be filled in, but not both.
|
2014-11-07 17:05:21 +00:00
|
|
|
user = QuayUserField(index=True, null=True)
|
2014-08-15 21:47:43 +00:00
|
|
|
email = CharField(null=True)
|
2016-08-03 21:11:28 +00:00
|
|
|
team = ForeignKeyField(Team)
|
2014-08-18 21:24:00 +00:00
|
|
|
inviter = ForeignKeyField(User, related_name='inviter')
|
2014-09-11 19:45:41 +00:00
|
|
|
invite_token = CharField(default=urn_generator(['teaminvite']))
|
2014-08-15 21:47:43 +00:00
|
|
|
|
|
|
|
|
2013-10-10 03:00:34 +00:00
|
|
|
class LoginService(BaseModel):
|
|
|
|
name = CharField(unique=True, index=True)
|
|
|
|
|
|
|
|
|
2017-02-17 17:01:41 +00:00
|
|
|
class TeamSync(BaseModel):
|
2017-02-23 18:37:47 +00:00
|
|
|
team = ForeignKeyField(Team, unique=True)
|
2017-02-17 17:01:41 +00:00
|
|
|
|
|
|
|
transaction_id = CharField()
|
2017-02-17 23:36:56 +00:00
|
|
|
last_updated = DateTimeField(null=True, index=True)
|
2017-02-17 17:01:41 +00:00
|
|
|
service = ForeignKeyField(LoginService)
|
|
|
|
config = JSONField()
|
|
|
|
|
|
|
|
|
2013-10-10 03:00:34 +00:00
|
|
|
class FederatedLogin(BaseModel):
|
2014-11-07 17:05:21 +00:00
|
|
|
user = QuayUserField(allows_robots=True, index=True)
|
2016-08-03 21:11:28 +00:00
|
|
|
service = ForeignKeyField(LoginService)
|
2013-10-10 03:00:34 +00:00
|
|
|
service_ident = CharField()
|
2014-11-11 22:22:37 +00:00
|
|
|
metadata_json = TextField(default='{}')
|
2013-10-10 03:00:34 +00:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2013-10-10 03:00:34 +00:00
|
|
|
indexes = (
|
|
|
|
# create a unique index on service and the local service id
|
|
|
|
(('service', 'service_ident'), True),
|
|
|
|
|
|
|
|
# a user may only have one federated login per service
|
|
|
|
(('service', 'user'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2013-09-20 22:38:17 +00:00
|
|
|
class Visibility(BaseModel):
|
2014-05-29 15:24:10 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
2013-09-20 22:38:17 +00:00
|
|
|
|
|
|
|
|
2017-03-19 16:31:21 +00:00
|
|
|
class RepositoryKind(BaseModel):
|
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
class Repository(BaseModel):
|
2014-11-07 17:05:21 +00:00
|
|
|
namespace_user = QuayUserField(null=True)
|
2017-01-11 20:03:14 +00:00
|
|
|
name = FullIndexedCharField(match_function=db_match_func)
|
2013-09-20 22:38:17 +00:00
|
|
|
visibility = ForeignKeyField(Visibility)
|
2017-01-11 20:03:14 +00:00
|
|
|
description = FullIndexedTextField(match_function=db_match_func, null=True)
|
2014-02-28 21:23:36 +00:00
|
|
|
badge_token = CharField(default=uuid_generator)
|
2017-03-19 16:31:21 +00:00
|
|
|
kind = EnumField(RepositoryKind)
|
2017-04-15 12:26:33 +00:00
|
|
|
trust_enabled = BooleanField(default=False)
|
2013-09-20 15:55:44 +00:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2013-09-20 15:55:44 +00:00
|
|
|
indexes = (
|
|
|
|
# create a unique index on namespace and name
|
2014-09-24 22:01:35 +00:00
|
|
|
(('namespace_user', 'name'), True),
|
2013-09-20 15:55:44 +00:00
|
|
|
)
|
|
|
|
|
2014-11-11 04:05:20 +00:00
|
|
|
def delete_instance(self, recursive=False, delete_nullable=False):
|
2015-07-06 19:00:07 +00:00
|
|
|
if not recursive:
|
|
|
|
raise RuntimeError('Non-recursive delete on repository.')
|
|
|
|
|
|
|
|
# These models don't need to use transitive deletes, because the referenced objects
|
|
|
|
# are cleaned up directly
|
2015-11-17 21:48:26 +00:00
|
|
|
skip_transitive_deletes = {RepositoryTag, RepositoryBuild, RepositoryBuildTrigger, BlobUpload,
|
2017-03-17 17:51:45 +00:00
|
|
|
Image, TagManifest, TagManifestLabel, Label, DerivedStorageForImage,
|
|
|
|
RepositorySearchScore} | beta_classes
|
|
|
|
|
2015-12-17 17:29:44 +00:00
|
|
|
delete_instance_filtered(self, Repository, delete_nullable, skip_transitive_deletes)
|
2015-07-06 19:00:07 +00:00
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2017-03-17 17:51:45 +00:00
|
|
|
class RepositorySearchScore(BaseModel):
|
|
|
|
repository = ForeignKeyField(Repository, unique=True)
|
|
|
|
score = BigIntegerField(index=True, default=0)
|
|
|
|
last_updated = DateTimeField(null=True)
|
|
|
|
|
|
|
|
|
2014-11-19 19:50:56 +00:00
|
|
|
class Star(BaseModel):
|
2016-08-03 21:11:28 +00:00
|
|
|
user = ForeignKeyField(User)
|
|
|
|
repository = ForeignKeyField(Repository)
|
2014-11-19 19:50:56 +00:00
|
|
|
created = DateTimeField(default=datetime.now)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
# create a unique index on user and repository
|
|
|
|
(('user', 'repository'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2013-09-20 22:38:17 +00:00
|
|
|
class Role(BaseModel):
|
2014-05-29 15:24:10 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
2013-09-20 22:38:17 +00:00
|
|
|
|
|
|
|
|
|
|
|
class RepositoryPermission(BaseModel):
|
2016-08-03 21:11:28 +00:00
|
|
|
team = ForeignKeyField(Team, null=True)
|
|
|
|
user = QuayUserField(allows_robots=True, null=True)
|
|
|
|
repository = ForeignKeyField(Repository)
|
2013-09-20 22:38:17 +00:00
|
|
|
role = ForeignKeyField(Role)
|
|
|
|
|
2013-09-28 03:33:59 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2013-09-28 03:33:59 +00:00
|
|
|
indexes = (
|
2013-11-01 23:34:17 +00:00
|
|
|
(('team', 'repository'), True),
|
2013-09-28 03:33:59 +00:00
|
|
|
(('user', 'repository'), True),
|
|
|
|
)
|
|
|
|
|
2013-09-20 22:38:17 +00:00
|
|
|
|
2014-01-17 22:28:21 +00:00
|
|
|
class PermissionPrototype(BaseModel):
|
2014-11-07 17:05:21 +00:00
|
|
|
org = QuayUserField(index=True, related_name='orgpermissionproto')
|
2014-02-04 00:08:37 +00:00
|
|
|
uuid = CharField(default=uuid_generator)
|
2014-11-07 17:05:21 +00:00
|
|
|
activating_user = QuayUserField(allows_robots=True, index=True, null=True,
|
|
|
|
related_name='userpermissionproto')
|
2016-06-15 18:48:46 +00:00
|
|
|
delegate_user = QuayUserField(allows_robots=True, related_name='receivingpermission',
|
2014-11-07 17:05:21 +00:00
|
|
|
null=True)
|
2014-01-21 00:05:26 +00:00
|
|
|
delegate_team = ForeignKeyField(Team, related_name='receivingpermission',
|
|
|
|
null=True)
|
2014-01-17 22:28:21 +00:00
|
|
|
role = ForeignKeyField(Role)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2014-01-17 22:28:21 +00:00
|
|
|
indexes = (
|
2014-01-21 00:05:26 +00:00
|
|
|
(('org', 'activating_user'), False),
|
2014-01-17 22:28:21 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2015-02-17 17:35:16 +00:00
|
|
|
class AccessTokenKind(BaseModel):
|
|
|
|
name = CharField(unique=True, index=True)
|
|
|
|
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
class AccessToken(BaseModel):
|
2013-10-16 18:24:10 +00:00
|
|
|
friendly_name = CharField(null=True)
|
|
|
|
code = CharField(default=random_string_generator(length=64), unique=True,
|
|
|
|
index=True)
|
2013-09-20 15:55:44 +00:00
|
|
|
repository = ForeignKeyField(Repository)
|
|
|
|
created = DateTimeField(default=datetime.now)
|
2013-10-16 18:24:10 +00:00
|
|
|
role = ForeignKeyField(Role)
|
|
|
|
temporary = BooleanField(default=True)
|
2015-02-17 17:35:16 +00:00
|
|
|
kind = ForeignKeyField(AccessTokenKind, null=True)
|
2013-09-20 15:55:44 +00:00
|
|
|
|
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
class BuildTriggerService(BaseModel):
|
2014-05-29 15:24:10 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
2014-02-21 22:09:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
class RepositoryBuildTrigger(BaseModel):
|
|
|
|
uuid = CharField(default=uuid_generator)
|
2016-08-03 21:11:28 +00:00
|
|
|
service = ForeignKeyField(BuildTriggerService)
|
|
|
|
repository = ForeignKeyField(Repository)
|
2014-11-07 17:05:21 +00:00
|
|
|
connected_user = QuayUserField()
|
2015-03-27 15:20:30 +00:00
|
|
|
auth_token = CharField(null=True)
|
2015-03-19 18:30:25 +00:00
|
|
|
private_key = TextField(null=True)
|
2014-02-21 22:09:56 +00:00
|
|
|
config = TextField(default='{}')
|
|
|
|
write_token = ForeignKeyField(AccessToken, null=True)
|
2015-06-30 19:35:28 +00:00
|
|
|
pull_robot = QuayUserField(allows_robots=True, null=True, related_name='triggerpullrobot',
|
|
|
|
robot_null_delete=True)
|
2014-02-21 22:09:56 +00:00
|
|
|
|
|
|
|
|
2013-09-27 23:29:01 +00:00
|
|
|
class EmailConfirmation(BaseModel):
|
2013-09-28 03:33:59 +00:00
|
|
|
code = CharField(default=random_string_generator(), unique=True, index=True)
|
2014-11-07 17:05:21 +00:00
|
|
|
user = QuayUserField()
|
2013-09-27 23:29:01 +00:00
|
|
|
pw_reset = BooleanField(default=False)
|
2014-01-17 22:09:31 +00:00
|
|
|
new_email = CharField(null=True)
|
2013-09-27 23:29:01 +00:00
|
|
|
email_confirm = BooleanField(default=False)
|
|
|
|
created = DateTimeField(default=datetime.now)
|
|
|
|
|
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
class ImageStorage(BaseModel):
|
2014-10-07 20:03:57 +00:00
|
|
|
uuid = CharField(default=uuid_generator, index=True, unique=True)
|
2014-05-02 20:59:46 +00:00
|
|
|
image_size = BigIntegerField(null=True)
|
2014-09-16 04:18:57 +00:00
|
|
|
uncompressed_size = BigIntegerField(null=True)
|
2014-05-02 20:59:46 +00:00
|
|
|
uploading = BooleanField(default=True, null=True)
|
2015-10-23 17:49:23 +00:00
|
|
|
cas_path = BooleanField(default=True)
|
2015-11-04 21:18:53 +00:00
|
|
|
content_checksum = CharField(null=True, index=True)
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
|
2014-09-18 21:26:40 +00:00
|
|
|
class ImageStorageTransformation(BaseModel):
|
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
2015-02-04 20:29:24 +00:00
|
|
|
class ImageStorageSignatureKind(BaseModel):
|
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
|
|
|
class ImageStorageSignature(BaseModel):
|
2016-08-03 21:11:28 +00:00
|
|
|
storage = ForeignKeyField(ImageStorage)
|
2015-02-04 20:29:24 +00:00
|
|
|
kind = ForeignKeyField(ImageStorageSignatureKind)
|
|
|
|
signature = TextField(null=True)
|
|
|
|
uploading = BooleanField(default=True, null=True)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('kind', 'storage'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
class ImageStorageLocation(BaseModel):
|
|
|
|
name = CharField(unique=True, index=True)
|
|
|
|
|
|
|
|
|
|
|
|
class ImageStoragePlacement(BaseModel):
|
|
|
|
storage = ForeignKeyField(ImageStorage)
|
|
|
|
location = ForeignKeyField(ImageStorageLocation)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2014-06-17 20:03:43 +00:00
|
|
|
indexes = (
|
|
|
|
# An image can only be placed in the same place once
|
|
|
|
(('storage', 'location'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2015-06-28 10:29:22 +00:00
|
|
|
class UserRegion(BaseModel):
|
|
|
|
user = QuayUserField(index=True, allows_robots=False)
|
|
|
|
location = ForeignKeyField(ImageStorageLocation)
|
|
|
|
|
|
|
|
indexes = (
|
|
|
|
(('user', 'location'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2015-11-17 21:07:21 +00:00
|
|
|
_ImageProxy = Proxy()
|
|
|
|
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
class Image(BaseModel):
|
2013-09-26 19:58:11 +00:00
|
|
|
# This class is intentionally denormalized. Even though images are supposed
|
|
|
|
# to be globally unique we can't treat them as such for permissions and
|
|
|
|
# security reasons. So rather than Repository <-> Image being many to many
|
|
|
|
# each image now belongs to exactly one repository.
|
2014-11-13 17:51:50 +00:00
|
|
|
docker_image_id = CharField(index=True)
|
2013-09-25 20:46:28 +00:00
|
|
|
repository = ForeignKeyField(Repository)
|
|
|
|
|
2013-09-30 19:30:00 +00:00
|
|
|
# '/' separated list of ancestory ids, e.g. /1/2/6/7/10/
|
2014-02-14 21:06:30 +00:00
|
|
|
ancestors = CharField(index=True, default='/', max_length=64535, null=True)
|
|
|
|
|
2016-08-03 21:11:28 +00:00
|
|
|
storage = ForeignKeyField(ImageStorage, null=True)
|
2013-09-30 19:30:00 +00:00
|
|
|
|
2015-09-15 15:53:31 +00:00
|
|
|
created = DateTimeField(null=True)
|
|
|
|
comment = TextField(null=True)
|
|
|
|
command = TextField(null=True)
|
|
|
|
aggregate_size = BigIntegerField(null=True)
|
|
|
|
v1_json_metadata = TextField(null=True)
|
2015-11-04 21:18:53 +00:00
|
|
|
v1_checksum = CharField(null=True)
|
2015-09-15 15:53:31 +00:00
|
|
|
|
2015-11-18 18:29:51 +00:00
|
|
|
security_indexed = BooleanField(default=False, index=True)
|
2016-12-14 03:51:29 +00:00
|
|
|
security_indexed_engine = IntegerField(default=IMAGE_NOT_SCANNED_ENGINE_VERSION, index=True)
|
2015-11-17 21:07:21 +00:00
|
|
|
|
|
|
|
# We use a proxy here instead of 'self' in order to disable the foreign key constraint
|
2016-08-03 21:11:28 +00:00
|
|
|
parent = ForeignKeyField(_ImageProxy, null=True, related_name='children')
|
2015-10-05 17:35:01 +00:00
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2013-09-20 15:55:44 +00:00
|
|
|
indexes = (
|
|
|
|
# we don't really want duplicates
|
2014-06-11 19:37:45 +00:00
|
|
|
(('repository', 'docker_image_id'), True),
|
2015-10-05 17:35:01 +00:00
|
|
|
|
|
|
|
(('security_indexed_engine', 'security_indexed'), False),
|
2013-09-20 15:55:44 +00:00
|
|
|
)
|
|
|
|
|
2016-08-26 18:46:18 +00:00
|
|
|
def ancestor_id_list(self):
|
|
|
|
""" Returns an integer list of ancestor ids, ordered chronologically from
|
|
|
|
root to direct parent.
|
|
|
|
"""
|
|
|
|
return map(int, self.ancestors.split('/')[1:-1])
|
|
|
|
|
2013-09-20 15:55:44 +00:00
|
|
|
|
2015-11-17 21:07:21 +00:00
|
|
|
_ImageProxy.initialize(Image)
|
|
|
|
|
|
|
|
|
2015-11-24 17:44:07 +00:00
|
|
|
class DerivedStorageForImage(BaseModel):
|
|
|
|
source_image = ForeignKeyField(Image)
|
|
|
|
derivative = ForeignKeyField(ImageStorage)
|
|
|
|
transformation = ForeignKeyField(ImageStorageTransformation)
|
2016-06-06 19:38:29 +00:00
|
|
|
uniqueness_hash = CharField(null=True)
|
2015-11-24 17:44:07 +00:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
2016-06-06 19:38:29 +00:00
|
|
|
(('source_image', 'transformation', 'uniqueness_hash'), True),
|
2015-11-24 17:44:07 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2013-09-26 19:58:11 +00:00
|
|
|
class RepositoryTag(BaseModel):
|
|
|
|
name = CharField()
|
|
|
|
image = ForeignKeyField(Image)
|
|
|
|
repository = ForeignKeyField(Repository)
|
2015-03-21 03:21:20 +00:00
|
|
|
lifetime_start_ts = IntegerField(default=get_epoch_timestamp)
|
2015-02-12 19:11:56 +00:00
|
|
|
lifetime_end_ts = IntegerField(null=True, index=True)
|
2015-02-18 21:37:38 +00:00
|
|
|
hidden = BooleanField(default=False)
|
2015-04-16 21:18:00 +00:00
|
|
|
reversion = BooleanField(default=False)
|
2013-09-26 19:58:11 +00:00
|
|
|
|
2013-09-28 03:33:59 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
2014-07-02 23:10:24 +00:00
|
|
|
read_slaves = (read_slave,)
|
2013-09-28 03:33:59 +00:00
|
|
|
indexes = (
|
2015-02-11 16:54:30 +00:00
|
|
|
(('repository', 'name'), False),
|
2015-03-21 03:21:20 +00:00
|
|
|
|
|
|
|
# This unique index prevents deadlocks when concurrently moving and deleting tags
|
|
|
|
(('repository', 'name', 'lifetime_end_ts'), True),
|
2013-09-28 03:33:59 +00:00
|
|
|
)
|
|
|
|
|
2013-09-26 19:58:11 +00:00
|
|
|
|
2015-06-22 21:37:13 +00:00
|
|
|
class TagManifest(BaseModel):
|
2016-08-03 21:11:28 +00:00
|
|
|
tag = ForeignKeyField(RepositoryTag, unique=True)
|
2016-02-12 15:39:27 +00:00
|
|
|
digest = CharField(index=True)
|
2015-07-22 20:19:10 +00:00
|
|
|
json_data = TextField()
|
2015-06-22 21:37:13 +00:00
|
|
|
|
|
|
|
|
2014-09-08 20:43:17 +00:00
|
|
|
class BUILD_PHASE(object):
|
|
|
|
""" Build phases enum """
|
|
|
|
ERROR = 'error'
|
2014-11-21 19:27:06 +00:00
|
|
|
INTERNAL_ERROR = 'internalerror'
|
2015-05-14 19:37:16 +00:00
|
|
|
BUILD_SCHEDULED = 'build-scheduled'
|
2014-09-08 20:43:17 +00:00
|
|
|
UNPACKING = 'unpacking'
|
|
|
|
PULLING = 'pulling'
|
|
|
|
BUILDING = 'building'
|
|
|
|
PUSHING = 'pushing'
|
2015-02-13 20:54:01 +00:00
|
|
|
WAITING = 'waiting'
|
2014-09-08 20:43:17 +00:00
|
|
|
COMPLETE = 'complete'
|
2016-10-27 17:18:02 +00:00
|
|
|
CANCELLED = 'cancelled'
|
2014-09-08 20:43:17 +00:00
|
|
|
|
2016-07-26 20:41:13 +00:00
|
|
|
@classmethod
|
|
|
|
def is_terminal_phase(cls, phase):
|
|
|
|
return (phase == cls.COMPLETE or
|
|
|
|
phase == cls.ERROR or
|
2016-12-05 19:56:18 +00:00
|
|
|
phase == cls.INTERNAL_ERROR or
|
|
|
|
phase == cls.CANCELLED)
|
2016-07-26 20:41:13 +00:00
|
|
|
|
2014-09-08 20:43:17 +00:00
|
|
|
|
2015-02-12 21:19:44 +00:00
|
|
|
class QueueItem(BaseModel):
|
|
|
|
queue_name = CharField(index=True, max_length=1024)
|
|
|
|
body = TextField()
|
2016-12-09 16:02:27 +00:00
|
|
|
available_after = DateTimeField(default=datetime.utcnow)
|
|
|
|
available = BooleanField(default=True)
|
|
|
|
processing_expires = DateTimeField(null=True)
|
|
|
|
retries_remaining = IntegerField(default=5)
|
2017-01-18 20:19:45 +00:00
|
|
|
state_id = CharField(default=uuid_generator, index=True, unique=True)
|
2015-02-12 21:19:44 +00:00
|
|
|
|
2016-12-05 23:05:18 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
only_save_dirty = True
|
2016-12-09 16:02:27 +00:00
|
|
|
indexes = (
|
|
|
|
(('processing_expires', 'available'), False),
|
|
|
|
(('processing_expires', 'queue_name', 'available'), False),
|
|
|
|
(('processing_expires', 'available_after', 'retries_remaining', 'available'), False),
|
|
|
|
(('processing_expires', 'available_after', 'queue_name', 'retries_remaining', 'available'), False),
|
|
|
|
)
|
2016-12-05 23:05:18 +00:00
|
|
|
|
Change queue to use state-field for claiming items
Before this change, the queue code would check that none of the fields on the item to be claimed had changed between the time when the item was selected and the item is claimed. While this is a safe approach, it also causes quite a bit of lock contention in MySQL, because InnoDB will take a lock on *any* rows examined by the `where` clause of the `update`, even if they will ultimately thrown out due to other clauses (See: http://dev.mysql.com/doc/refman/5.7/en/innodb-locks-set.html: "A ..., an UPDATE, ... generally set record locks on every index record that is scanned in the processing of the SQL statement. It does not matter whether there are WHERE conditions in the statement that would exclude the row. InnoDB does not remember the exact WHERE condition, but only knows which index ranges were scanned").
As a result, we want to minimize the number of fields accessed in the `where` clause on an update to the QueueItem row. To do so, we introduce a new `state_id` column, which is updated on *every change* to the QueueItem rows with a unique, random value. We can then have the queue item claiming code simply check that the `state_id` column has not changed between the retrieval and claiming steps. This minimizes the number of columns being checked to two (`id` and `state_id`), and thus, should significantly reduce lock contention. Note that we can not (yet) reduce to just a single `state_id` column (which should work in theory), because we need to maintain backwards compatibility with existing items in the QueueItem table, which will be given empty `state_id` values when the migration in this change runs.
Also adds a number of tests for other queue operations that we want to make sure operate correctly following this change.
[Delivers #133632501]
2017-01-12 21:13:27 +00:00
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
# Always change the queue item's state ID when we update it.
|
|
|
|
self.state_id = str(uuid.uuid4())
|
|
|
|
super(QueueItem, self).save(*args, **kwargs)
|
|
|
|
|
2015-02-12 21:19:44 +00:00
|
|
|
|
2013-10-24 20:37:03 +00:00
|
|
|
class RepositoryBuild(BaseModel):
|
2014-02-04 00:08:37 +00:00
|
|
|
uuid = CharField(default=uuid_generator, index=True)
|
2016-08-03 21:11:28 +00:00
|
|
|
repository = ForeignKeyField(Repository)
|
2013-10-25 22:17:43 +00:00
|
|
|
access_token = ForeignKeyField(AccessToken)
|
2015-03-23 19:40:43 +00:00
|
|
|
resource_key = CharField(index=True, null=True)
|
2014-02-24 21:11:23 +00:00
|
|
|
job_config = TextField()
|
2015-02-13 20:54:01 +00:00
|
|
|
phase = CharField(default=BUILD_PHASE.WAITING)
|
2016-08-03 21:11:28 +00:00
|
|
|
started = DateTimeField(default=datetime.now, index=True)
|
2014-02-12 18:52:12 +00:00
|
|
|
display_name = CharField()
|
2016-08-03 21:11:28 +00:00
|
|
|
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True)
|
2015-06-30 19:35:28 +00:00
|
|
|
pull_robot = QuayUserField(null=True, related_name='buildpullrobot', allows_robots=True,
|
|
|
|
robot_null_delete=True)
|
2014-09-08 20:43:17 +00:00
|
|
|
logs_archived = BooleanField(default=False)
|
2015-02-23 18:38:01 +00:00
|
|
|
queue_id = CharField(null=True, index=True)
|
2013-10-17 22:25:19 +00:00
|
|
|
|
2016-08-03 21:11:28 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('repository', 'started', 'phase'), False),
|
|
|
|
(('started', 'logs_archived', 'phase'), False),
|
|
|
|
)
|
|
|
|
|
2013-10-17 22:25:19 +00:00
|
|
|
|
2013-11-27 07:29:31 +00:00
|
|
|
class LogEntryKind(BaseModel):
|
2014-05-29 15:24:10 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
2013-11-27 07:29:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
class LogEntry(BaseModel):
|
2016-08-03 21:11:28 +00:00
|
|
|
kind = ForeignKeyField(LogEntryKind)
|
2016-08-05 19:22:41 +00:00
|
|
|
account = IntegerField(index=True, db_column='account_id')
|
|
|
|
performer = IntegerField(index=True, null=True, db_column='performer_id')
|
|
|
|
repository = IntegerField(index=True, null=True, db_column='repository_id')
|
2013-11-27 07:29:31 +00:00
|
|
|
datetime = DateTimeField(default=datetime.now, index=True)
|
|
|
|
ip = CharField(null=True)
|
|
|
|
metadata_json = TextField(default='{}')
|
|
|
|
|
2015-04-30 19:56:40 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
2016-08-12 20:53:17 +00:00
|
|
|
(('account', 'datetime'), False),
|
|
|
|
(('performer', 'datetime'), False),
|
2015-04-30 19:56:40 +00:00
|
|
|
(('repository', 'datetime'), False),
|
2015-07-31 17:38:02 +00:00
|
|
|
(('repository', 'datetime', 'kind'), False),
|
2015-04-30 19:56:40 +00:00
|
|
|
)
|
|
|
|
|
2013-11-27 07:29:31 +00:00
|
|
|
|
2015-04-13 17:31:07 +00:00
|
|
|
class RepositoryActionCount(BaseModel):
|
2016-08-03 21:11:28 +00:00
|
|
|
repository = ForeignKeyField(Repository)
|
2015-04-13 17:31:07 +00:00
|
|
|
count = IntegerField()
|
|
|
|
date = DateField(index=True)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
# create a unique index on repository and date
|
|
|
|
(('repository', 'date'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2014-03-12 16:37:06 +00:00
|
|
|
class OAuthApplication(BaseModel):
|
|
|
|
client_id = CharField(index=True, default=random_string_generator(length=20))
|
|
|
|
client_secret = CharField(default=random_string_generator(length=40))
|
|
|
|
redirect_uri = CharField()
|
2014-03-14 22:57:28 +00:00
|
|
|
application_uri = CharField()
|
2014-11-07 17:05:21 +00:00
|
|
|
organization = QuayUserField()
|
2014-03-20 19:46:13 +00:00
|
|
|
|
2014-03-14 22:57:28 +00:00
|
|
|
name = CharField()
|
|
|
|
description = TextField(default='')
|
2014-11-25 00:25:13 +00:00
|
|
|
avatar_email = CharField(null=True, db_column='gravatar_email')
|
2014-03-12 16:37:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
class OAuthAuthorizationCode(BaseModel):
|
|
|
|
application = ForeignKeyField(OAuthApplication)
|
|
|
|
code = CharField(index=True)
|
|
|
|
scope = CharField()
|
2014-03-25 20:06:34 +00:00
|
|
|
data = TextField() # Context for the code, such as the user
|
2014-03-12 16:37:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
class OAuthAccessToken(BaseModel):
|
2014-03-25 00:57:02 +00:00
|
|
|
uuid = CharField(default=uuid_generator, index=True)
|
2014-03-12 16:37:06 +00:00
|
|
|
application = ForeignKeyField(OAuthApplication)
|
2014-11-07 17:05:21 +00:00
|
|
|
authorized_user = QuayUserField()
|
2014-03-12 16:37:06 +00:00
|
|
|
scope = CharField()
|
|
|
|
access_token = CharField(index=True)
|
|
|
|
token_type = CharField(default='Bearer')
|
|
|
|
expires_at = DateTimeField()
|
|
|
|
refresh_token = CharField(index=True, null=True)
|
2014-03-25 16:42:40 +00:00
|
|
|
data = TextField() # This is context for which this token was generated, such as the user
|
2014-03-12 16:37:06 +00:00
|
|
|
|
|
|
|
|
2014-03-12 04:49:03 +00:00
|
|
|
class NotificationKind(BaseModel):
|
2014-05-29 15:24:10 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
2014-03-12 04:49:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Notification(BaseModel):
|
|
|
|
uuid = CharField(default=uuid_generator, index=True)
|
2016-08-03 21:11:28 +00:00
|
|
|
kind = ForeignKeyField(NotificationKind)
|
2014-11-07 17:05:21 +00:00
|
|
|
target = QuayUserField(index=True)
|
2014-03-12 04:49:03 +00:00
|
|
|
metadata_json = TextField(default='{}')
|
|
|
|
created = DateTimeField(default=datetime.now, index=True)
|
2014-07-28 22:23:46 +00:00
|
|
|
dismissed = BooleanField(default=False)
|
2016-03-29 19:58:14 +00:00
|
|
|
lookup_path = CharField(null=True, index=True)
|
2014-03-12 04:49:03 +00:00
|
|
|
|
2014-07-16 20:30:47 +00:00
|
|
|
|
|
|
|
class ExternalNotificationEvent(BaseModel):
|
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
|
|
|
class ExternalNotificationMethod(BaseModel):
|
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
|
|
|
class RepositoryNotification(BaseModel):
|
|
|
|
uuid = CharField(default=uuid_generator, index=True)
|
2016-08-03 21:11:28 +00:00
|
|
|
repository = ForeignKeyField(Repository)
|
2014-07-16 20:30:47 +00:00
|
|
|
event = ForeignKeyField(ExternalNotificationEvent)
|
|
|
|
method = ForeignKeyField(ExternalNotificationMethod)
|
2015-08-17 20:30:15 +00:00
|
|
|
title = CharField(null=True)
|
2014-07-16 20:30:47 +00:00
|
|
|
config_json = TextField()
|
2015-10-13 22:14:52 +00:00
|
|
|
event_config_json = TextField(default='{}')
|
2017-05-18 21:52:50 +00:00
|
|
|
number_of_failures = IntegerField(default=0)
|
2014-07-16 20:30:47 +00:00
|
|
|
|
|
|
|
|
2014-07-28 18:58:12 +00:00
|
|
|
class RepositoryAuthorizedEmail(BaseModel):
|
2016-08-03 21:11:28 +00:00
|
|
|
repository = ForeignKeyField(Repository)
|
2014-07-28 18:58:12 +00:00
|
|
|
email = CharField()
|
|
|
|
code = CharField(default=random_string_generator(), unique=True, index=True)
|
|
|
|
confirmed = BooleanField(default=False)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
# create a unique index on email and repository
|
|
|
|
(('email', 'repository'), True),
|
|
|
|
)
|
2014-11-11 22:22:37 +00:00
|
|
|
|
2014-07-28 18:58:12 +00:00
|
|
|
|
2015-10-23 17:49:23 +00:00
|
|
|
class BlobUpload(BaseModel):
|
2016-08-03 21:11:28 +00:00
|
|
|
repository = ForeignKeyField(Repository)
|
2015-10-23 17:49:23 +00:00
|
|
|
uuid = CharField(index=True, unique=True)
|
2017-02-27 17:58:13 +00:00
|
|
|
byte_count = BigIntegerField(default=0)
|
2015-12-30 22:19:19 +00:00
|
|
|
sha_state = ResumableSHA256Field(null=True, default=resumablehashlib.sha256)
|
2015-10-23 17:49:23 +00:00
|
|
|
location = ForeignKeyField(ImageStorageLocation)
|
|
|
|
storage_metadata = JSONField(null=True, default={})
|
2015-11-30 19:25:01 +00:00
|
|
|
chunk_count = IntegerField(default=0)
|
2017-02-27 17:58:13 +00:00
|
|
|
uncompressed_byte_count = BigIntegerField(null=True)
|
2015-12-14 20:26:15 +00:00
|
|
|
created = DateTimeField(default=datetime.now, index=True)
|
2016-01-11 21:43:20 +00:00
|
|
|
piece_sha_state = ResumableSHA1Field(null=True)
|
|
|
|
piece_hashes = Base64BinaryField(null=True)
|
2015-10-23 17:49:23 +00:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
# create a unique index on email and repository
|
|
|
|
(('repository', 'uuid'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2015-09-16 15:44:58 +00:00
|
|
|
class QuayService(BaseModel):
|
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
|
|
|
class QuayRegion(BaseModel):
|
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
|
|
|
class QuayRelease(BaseModel):
|
|
|
|
service = ForeignKeyField(QuayService)
|
|
|
|
version = CharField()
|
|
|
|
region = ForeignKeyField(QuayRegion)
|
|
|
|
reverted = BooleanField(default=False)
|
|
|
|
created = DateTimeField(default=datetime.now, index=True)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
# unique release per region
|
|
|
|
(('service', 'version', 'region'), True),
|
|
|
|
|
|
|
|
# get recent releases
|
|
|
|
(('service', 'region', 'created'), False),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2015-12-30 22:19:19 +00:00
|
|
|
class TorrentInfo(BaseModel):
|
|
|
|
storage = ForeignKeyField(ImageStorage)
|
|
|
|
piece_length = IntegerField()
|
2015-12-31 17:30:13 +00:00
|
|
|
pieces = Base64BinaryField()
|
2015-12-30 22:19:19 +00:00
|
|
|
|
2016-01-06 18:52:27 +00:00
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
# we may want to compute the piece hashes multiple times with different piece lengths
|
|
|
|
(('storage', 'piece_length'), True),
|
|
|
|
)
|
2015-12-30 22:19:19 +00:00
|
|
|
|
2016-03-16 19:49:25 +00:00
|
|
|
|
2016-03-25 22:44:11 +00:00
|
|
|
class ServiceKeyApprovalType(Enum):
|
|
|
|
SUPERUSER = 'Super User API'
|
2016-03-28 23:00:00 +00:00
|
|
|
KEY_ROTATION = 'Key Rotation'
|
2016-04-29 14:14:50 +00:00
|
|
|
AUTOMATIC = 'Automatic'
|
2016-03-25 22:44:11 +00:00
|
|
|
|
2016-04-01 17:55:29 +00:00
|
|
|
|
2016-03-16 19:49:25 +00:00
|
|
|
_ServiceKeyApproverProxy = Proxy()
|
|
|
|
class ServiceKeyApproval(BaseModel):
|
2016-03-23 22:16:03 +00:00
|
|
|
approver = ForeignKeyField(_ServiceKeyApproverProxy, null=True)
|
2016-03-16 19:49:25 +00:00
|
|
|
approval_type = CharField(index=True)
|
2016-03-23 22:16:03 +00:00
|
|
|
approved_date = DateTimeField(default=datetime.utcnow)
|
2016-04-01 17:55:29 +00:00
|
|
|
notes = TextField(default='')
|
|
|
|
|
2016-03-16 19:49:25 +00:00
|
|
|
_ServiceKeyApproverProxy.initialize(User)
|
|
|
|
|
|
|
|
|
|
|
|
class ServiceKey(BaseModel):
|
2016-03-23 22:16:03 +00:00
|
|
|
name = CharField()
|
2016-03-16 19:49:25 +00:00
|
|
|
kid = CharField(unique=True, index=True)
|
|
|
|
service = CharField(index=True)
|
2016-03-30 20:16:35 +00:00
|
|
|
jwk = JSONField()
|
|
|
|
metadata = JSONField()
|
2016-03-23 22:16:03 +00:00
|
|
|
created_date = DateTimeField(default=datetime.utcnow)
|
2016-03-16 19:49:25 +00:00
|
|
|
expiration_date = DateTimeField(null=True)
|
2016-04-12 21:58:52 +00:00
|
|
|
rotation_duration = IntegerField(null=True)
|
2016-08-03 21:11:28 +00:00
|
|
|
approval = ForeignKeyField(ServiceKeyApproval, null=True)
|
2016-03-16 19:49:25 +00:00
|
|
|
|
2016-09-21 02:09:25 +00:00
|
|
|
|
2016-07-18 22:20:00 +00:00
|
|
|
class MediaType(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" MediaType is an enumeration of the possible formats of various objects in the data model.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-07-18 22:20:00 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
2017-01-17 21:41:24 +00:00
|
|
|
class Messages(BaseModel):
|
|
|
|
content = TextField()
|
|
|
|
uuid = CharField(default=uuid_generator, max_length=36, index=True)
|
|
|
|
severity = CharField(default='info', index=True)
|
|
|
|
media_type = ForeignKeyField(MediaType)
|
|
|
|
|
|
|
|
|
2016-07-18 22:20:00 +00:00
|
|
|
class LabelSourceType(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" LabelSourceType is an enumeration of the possible sources for a label.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-07-18 22:20:00 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
mutable = BooleanField(default=False)
|
|
|
|
|
|
|
|
|
|
|
|
class Label(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" Label represents user-facing metadata associated with another entry in the database (e.g. a
|
|
|
|
Manifest).
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-07-18 22:20:00 +00:00
|
|
|
uuid = CharField(default=uuid_generator, index=True, unique=True)
|
|
|
|
key = CharField(index=True)
|
|
|
|
value = TextField()
|
2016-10-26 21:19:28 +00:00
|
|
|
media_type = EnumField(MediaType)
|
2016-07-18 22:20:00 +00:00
|
|
|
source_type = ForeignKeyField(LabelSourceType)
|
|
|
|
|
|
|
|
|
|
|
|
class TagManifestLabel(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" Mapping from a tag manifest to a label.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-07-18 22:20:00 +00:00
|
|
|
repository = ForeignKeyField(Repository, index=True)
|
|
|
|
annotated = ForeignKeyField(TagManifest, index=True)
|
|
|
|
label = ForeignKeyField(Label)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('annotated', 'label'), True),
|
|
|
|
)
|
|
|
|
|
2016-09-21 02:09:25 +00:00
|
|
|
|
2016-06-15 18:48:46 +00:00
|
|
|
class Blob(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" Blob represents a content-addressable object stored outside of the database.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
digest = CharField(index=True, unique=True)
|
2016-10-26 21:19:28 +00:00
|
|
|
media_type = EnumField(MediaType)
|
2016-06-15 18:48:46 +00:00
|
|
|
size = BigIntegerField()
|
|
|
|
uncompressed_size = BigIntegerField(null=True)
|
|
|
|
|
|
|
|
|
|
|
|
class BlobPlacementLocation(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" BlobPlacementLocation is an enumeration of the possible storage locations for Blobs.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
|
|
|
class BlobPlacementLocationPreference(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" BlobPlacementLocationPreference is a location to which a user's data will be replicated.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-07-12 19:09:31 +00:00
|
|
|
user = QuayUserField(index=True, allows_robots=False)
|
2016-10-26 21:19:28 +00:00
|
|
|
location = EnumField(BlobPlacementLocation)
|
2016-06-15 18:48:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
class BlobPlacement(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" BlobPlacement represents the location of a Blob.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
blob = ForeignKeyField(Blob)
|
2016-10-26 21:19:28 +00:00
|
|
|
location = EnumField(BlobPlacementLocation)
|
2016-06-15 18:48:46 +00:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('blob', 'location'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class BlobUploading(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" BlobUploading represents the state of a Blob currently being uploaded.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
uuid = CharField(index=True, unique=True)
|
|
|
|
created = DateTimeField(default=datetime.now, index=True)
|
|
|
|
repository = ForeignKeyField(Repository, index=True)
|
|
|
|
location = ForeignKeyField(BlobPlacementLocation)
|
|
|
|
byte_count = IntegerField(default=0)
|
|
|
|
uncompressed_byte_count = IntegerField(null=True)
|
2016-08-02 00:46:31 +00:00
|
|
|
chunk_count = IntegerField(default=0)
|
|
|
|
storage_metadata = JSONField(null=True, default={})
|
|
|
|
sha_state = ResumableSHA256Field(null=True, default=resumablehashlib.sha256)
|
2016-06-15 18:48:46 +00:00
|
|
|
piece_sha_state = ResumableSHA1Field(null=True)
|
|
|
|
piece_hashes = Base64BinaryField(null=True)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('repository', 'uuid'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class Manifest(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" Manifest represents the metadata and collection of blobs that comprise a container image.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
digest = CharField(index=True, unique=True)
|
2016-10-26 21:19:28 +00:00
|
|
|
media_type = EnumField(MediaType)
|
2016-06-15 18:48:46 +00:00
|
|
|
manifest_json = JSONField()
|
|
|
|
|
|
|
|
|
2016-10-26 21:19:28 +00:00
|
|
|
class ManifestLabel(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" ManifestLabel represents label metadata annotating a Manifest.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-10-26 21:19:28 +00:00
|
|
|
repository = ForeignKeyField(Repository, index=True)
|
|
|
|
annotated = ForeignKeyField(Manifest, index=True)
|
|
|
|
label = ForeignKeyField(Label)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('repository', 'annotated', 'label'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2016-06-15 18:48:46 +00:00
|
|
|
class ManifestBlob(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" ManifestBlob is a many-to-many relation table linking Manifests and Blobs.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
manifest = ForeignKeyField(Manifest, index=True)
|
|
|
|
blob = ForeignKeyField(Blob, index=True)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('manifest', 'blob'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class ManifestList(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" ManifestList represents all of the various manifests that compose a Tag.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
digest = CharField(index=True, unique=True)
|
|
|
|
manifest_list_json = JSONField()
|
|
|
|
schema_version = CharField()
|
2016-10-26 21:19:28 +00:00
|
|
|
media_type = EnumField(MediaType)
|
2016-06-15 18:48:46 +00:00
|
|
|
|
|
|
|
|
2016-10-26 21:19:28 +00:00
|
|
|
class TagKind(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" TagKind is a enumtable to reference tag kinds.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-10-26 21:19:28 +00:00
|
|
|
name = CharField(index=True, unique=True)
|
|
|
|
|
|
|
|
|
|
|
|
class Tag(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" Tag represents a user-facing alias for referencing a ManifestList.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-10-26 21:19:28 +00:00
|
|
|
name = CharField()
|
|
|
|
repository = ForeignKeyField(Repository)
|
|
|
|
manifest_list = ForeignKeyField(ManifestList, null=True)
|
|
|
|
lifetime_start = BigIntegerField(default=get_epoch_timestamp_ms)
|
|
|
|
lifetime_end = BigIntegerField(null=True, index=True)
|
|
|
|
hidden = BooleanField(default=False)
|
|
|
|
reverted = BooleanField(default=False)
|
|
|
|
protected = BooleanField(default=False)
|
|
|
|
tag_kind = EnumField(TagKind)
|
|
|
|
linked_tag = ForeignKeyField('self', null=True, related_name='tag_parents')
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
2017-03-16 21:20:54 +00:00
|
|
|
(('repository', 'name'), False),
|
|
|
|
(('repository', 'name', 'hidden'), False),
|
2016-10-26 21:19:28 +00:00
|
|
|
# This unique index prevents deadlocks when concurrently moving and deleting tags
|
2017-03-16 21:20:54 +00:00
|
|
|
(('repository', 'name', 'lifetime_end'), True),
|
2016-10-26 21:19:28 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Channel = Tag.alias()
|
|
|
|
|
2016-06-15 18:48:46 +00:00
|
|
|
class ManifestListManifest(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" ManifestListManifest is a many-to-many relation table linking ManifestLists and Manifests.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
manifest_list = ForeignKeyField(ManifestList, index=True)
|
|
|
|
manifest = ForeignKeyField(Manifest, index=True)
|
2016-10-26 21:19:28 +00:00
|
|
|
operating_system = CharField(null=True)
|
|
|
|
architecture = CharField(null=True)
|
|
|
|
platform_json = JSONField(null=True)
|
|
|
|
media_type = EnumField(MediaType)
|
2016-06-15 18:48:46 +00:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
2016-10-26 21:19:28 +00:00
|
|
|
(('manifest_list', 'operating_system', 'architecture', 'media_type'), False),
|
|
|
|
(('manifest_list', 'media_type'), False),
|
2016-06-15 18:48:46 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class ManifestLayer(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" ManifestLayer represents one of the layers that compose a Manifest.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
blob = ForeignKeyField(Blob, index=True)
|
|
|
|
manifest = ForeignKeyField(Manifest)
|
|
|
|
manifest_index = IntegerField(index=True) # index 0 is the last command in a Dockerfile
|
|
|
|
metadata_json = JSONField()
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('manifest', 'manifest_index'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class ManifestLayerDockerV1(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" ManifestLayerDockerV1 is the Docker v1 registry protocol metadata for a ManifestLayer.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
manifest_layer = ForeignKeyField(ManifestLayer)
|
|
|
|
image_id = CharField(index=True)
|
|
|
|
checksum = CharField()
|
|
|
|
compat_json = JSONField()
|
|
|
|
|
|
|
|
|
|
|
|
class ManifestLayerScan(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" ManifestLayerScan represents the state of security scanning for a ManifestLayer.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
layer = ForeignKeyField(ManifestLayer, unique=True)
|
|
|
|
scannable = BooleanField()
|
|
|
|
scanned_by = CharField()
|
|
|
|
|
|
|
|
|
|
|
|
class DerivedImage(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" DerivedImage represents a Manifest transcoded into an alternative format.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-09-01 23:00:11 +00:00
|
|
|
uuid = CharField(default=uuid_generator, unique=True)
|
2016-06-15 18:48:46 +00:00
|
|
|
source_manifest = ForeignKeyField(Manifest)
|
|
|
|
derived_manifest_json = JSONField()
|
2016-10-26 21:19:28 +00:00
|
|
|
media_type = EnumField(MediaType)
|
2016-07-12 19:09:31 +00:00
|
|
|
blob = ForeignKeyField(Blob, related_name='blob')
|
2016-06-15 18:48:46 +00:00
|
|
|
uniqueness_hash = CharField(index=True, unique=True)
|
2016-07-12 19:09:31 +00:00
|
|
|
signature_blob = ForeignKeyField(Blob, null=True, related_name='signature_blob')
|
2016-06-15 18:48:46 +00:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
|
|
|
(('source_manifest', 'blob'), True),
|
|
|
|
(('source_manifest', 'media_type', 'uniqueness_hash'), True),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class BitTorrentPieces(BaseModel):
|
2017-03-16 20:53:39 +00:00
|
|
|
""" BitTorrentPieces represents the BitTorrent piece metadata calculated from a Blob.
|
|
|
|
This model is a part of the new OCI/CNR model set.
|
|
|
|
"""
|
2016-06-15 18:48:46 +00:00
|
|
|
blob = ForeignKeyField(Blob)
|
|
|
|
pieces = Base64BinaryField()
|
|
|
|
piece_length = IntegerField()
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
database = db
|
|
|
|
read_slaves = (read_slave,)
|
|
|
|
indexes = (
|
2016-09-23 18:28:59 +00:00
|
|
|
(('blob', 'piece_length'), True),
|
2016-06-15 18:48:46 +00:00
|
|
|
)
|
|
|
|
|
2016-07-18 22:20:00 +00:00
|
|
|
|
2016-10-26 21:19:28 +00:00
|
|
|
beta_classes = set([ManifestLayerScan, Tag, TagKind, BlobPlacementLocation, ManifestLayer, ManifestList,
|
2016-08-31 20:08:01 +00:00
|
|
|
BitTorrentPieces, MediaType, Label, ManifestBlob, BlobUploading, Blob,
|
|
|
|
ManifestLayerDockerV1, BlobPlacementLocationPreference, ManifestListManifest,
|
2016-10-26 21:19:28 +00:00
|
|
|
Manifest, DerivedImage, BlobPlacement, ManifestLabel])
|
2016-09-01 23:00:11 +00:00
|
|
|
is_model = lambda x: inspect.isclass(x) and issubclass(x, BaseModel) and x is not BaseModel
|
2016-01-12 20:57:03 +00:00
|
|
|
all_models = [model[1] for model in inspect.getmembers(sys.modules[__name__], is_model)]
|