Upgrade Peewee to latest 3.x
This requires a number of small changes in the data model code, as well as additional testing.
This commit is contained in:
parent
70b7ee4654
commit
d3d9cca182
26 changed files with 220 additions and 193 deletions
|
@ -17,7 +17,7 @@ import toposort
|
|||
|
||||
from enum import Enum
|
||||
from peewee import *
|
||||
from playhouse.shortcuts import RetryOperationalError
|
||||
from peewee import __exception_wrapper__, Function
|
||||
from playhouse.pool import PooledMySQLDatabase, PooledPostgresqlDatabase, PooledSqliteDatabase
|
||||
|
||||
from sqlalchemy.engine.url import make_url
|
||||
|
@ -121,27 +121,27 @@ def delete_instance_filtered(instance, model_class, delete_nullable, skip_transi
|
|||
# We only want to skip transitive deletes, which are done using subqueries in the form of
|
||||
# DELETE FROM <table> in <subquery>. If an op is not using a subquery, we allow it to be
|
||||
# applied directly.
|
||||
if fk.model_class not in skip_transitive_deletes or query.op != 'in':
|
||||
if fk.model not in skip_transitive_deletes or query.op.lower() != 'in':
|
||||
filtered_ops.append((query, fk))
|
||||
|
||||
if query.op == 'in':
|
||||
dependencies[fk.model_class.__name__].add(query.rhs.model_class.__name__)
|
||||
if query.op.lower() == 'in':
|
||||
dependencies[fk.model.__name__].add(query.rhs.model.__name__)
|
||||
elif query.op == '=':
|
||||
dependencies[fk.model_class.__name__].add(model_class.__name__)
|
||||
dependencies[fk.model.__name__].add(model_class.__name__)
|
||||
else:
|
||||
raise RuntimeError('Unknown operator in recursive repository delete query')
|
||||
|
||||
sorted_models = list(reversed(toposort.toposort_flatten(dependencies)))
|
||||
def sorted_model_key(query_fk_tuple):
|
||||
cmp_query, cmp_fk = query_fk_tuple
|
||||
if cmp_query.op == 'in':
|
||||
if cmp_query.op.lower() == 'in':
|
||||
return -1
|
||||
return sorted_models.index(cmp_fk.model_class.__name__)
|
||||
return sorted_models.index(cmp_fk.model.__name__)
|
||||
filtered_ops.sort(key=sorted_model_key)
|
||||
|
||||
with db_transaction():
|
||||
for query, fk in filtered_ops:
|
||||
_model = fk.model_class
|
||||
_model = fk.model
|
||||
if fk.null and not delete_nullable:
|
||||
_model.update(**{fk.name: None}).where(query).execute()
|
||||
else:
|
||||
|
@ -162,6 +162,24 @@ class CallableProxy(Proxy):
|
|||
return self.obj(*args, **kwargs)
|
||||
|
||||
|
||||
class RetryOperationalError(object):
|
||||
|
||||
def execute_sql(self, sql, params=None, commit=True):
|
||||
try:
|
||||
cursor = super(RetryOperationalError, self).execute_sql(sql, params, commit)
|
||||
except OperationalError:
|
||||
if not self.is_closed():
|
||||
self.close()
|
||||
|
||||
with __exception_wrapper__:
|
||||
cursor = self.cursor()
|
||||
cursor.execute(sql, params or ())
|
||||
if commit and not self.in_transaction():
|
||||
self.commit()
|
||||
|
||||
return cursor
|
||||
|
||||
|
||||
class CloseForLongOperation(object):
|
||||
""" Helper object which disconnects the database then reconnects after the nested operation
|
||||
completes.
|
||||
|
@ -214,11 +232,11 @@ class TupleSelector(object):
|
|||
@classmethod
|
||||
def tuple_reference_key(cls, field):
|
||||
""" Returns a string key for referencing a field in a TupleSelector. """
|
||||
if field._node_type == 'func':
|
||||
if isinstance(field, Function):
|
||||
return field.name + ','.join([cls.tuple_reference_key(arg) for arg in field.arguments])
|
||||
|
||||
if field._node_type == 'field':
|
||||
return field.name + ':' + field.model_class.__name__
|
||||
if isinstance(field, Field):
|
||||
return field.name + ':' + field.model.__name__
|
||||
|
||||
raise Exception('Unknown field type %s in TupleSelector' % field._node_type)
|
||||
|
||||
|
@ -268,6 +286,9 @@ def _db_from_url(url, db_kwargs, connect_timeout=DEFAULT_DB_CONNECT_TIMEOUT):
|
|||
if parsed_url.password:
|
||||
db_kwargs['password'] = parsed_url.password
|
||||
|
||||
# Remove threadlocals. It used to be required.
|
||||
db_kwargs.pop('threadlocals', None)
|
||||
|
||||
# Note: sqlite does not support connect_timeout.
|
||||
if parsed_url.drivername != 'sqlite':
|
||||
db_kwargs['connect_timeout'] = db_kwargs.get('connect_timeout', connect_timeout)
|
||||
|
@ -285,8 +306,9 @@ def _db_from_url(url, db_kwargs, connect_timeout=DEFAULT_DB_CONNECT_TIMEOUT):
|
|||
db_kwargs.pop('stale_timeout', None)
|
||||
db_kwargs.pop('max_connections', None)
|
||||
|
||||
wrapped_driver = _wrap_for_retry(driver)
|
||||
return wrapped_driver(parsed_url.database, **db_kwargs)
|
||||
# wrapped_driver = _wrap_for_retry(driver)
|
||||
# return wrapped_driver(parsed_url.database, **db_kwargs)
|
||||
return driver(parsed_url.database, **db_kwargs)
|
||||
|
||||
|
||||
def configure(config_object):
|
||||
|
@ -351,20 +373,20 @@ class QuayUserField(ForeignKeyField):
|
|||
def __init__(self, allows_robots=False, robot_null_delete=False, *args, **kwargs):
|
||||
self.allows_robots = allows_robots
|
||||
self.robot_null_delete = robot_null_delete
|
||||
if 'rel_model' not in kwargs:
|
||||
kwargs['rel_model'] = User
|
||||
if 'model' not in kwargs:
|
||||
kwargs['model'] = User
|
||||
super(QuayUserField, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class EnumField(ForeignKeyField):
|
||||
""" Create a cached python Enum from an EnumTable """
|
||||
def __init__(self, rel_model, enum_key_field='name', *args, **kwargs):
|
||||
def __init__(self, model, enum_key_field='name', *args, **kwargs):
|
||||
"""
|
||||
rel_model is the EnumTable model-class (see ForeignKeyField)
|
||||
model is the EnumTable model-class (see ForeignKeyField)
|
||||
enum_key_field is the field from the EnumTable to use as the enum name
|
||||
"""
|
||||
self.enum_key_field = enum_key_field
|
||||
super(EnumField, self).__init__(rel_model, *args, **kwargs)
|
||||
super(EnumField, self).__init__(model, *args, **kwargs)
|
||||
|
||||
@property
|
||||
@lru_cache(maxsize=1)
|
||||
|
@ -412,7 +434,7 @@ class BaseModel(ReadSlaveModel):
|
|||
if name.endswith('_id'):
|
||||
field_name = name[0:len(name) - 3]
|
||||
if field_name in self._meta.fields:
|
||||
return self._data.get(field_name)
|
||||
return self.__data__.get(field_name)
|
||||
|
||||
return super(BaseModel, self).__getattribute__(name)
|
||||
|
||||
|
@ -449,7 +471,7 @@ class User(BaseModel):
|
|||
# For all the model dependencies, only delete those that allow robots.
|
||||
for query, fk in reversed(list(self.dependencies(search_nullable=True))):
|
||||
if isinstance(fk, QuayUserField) and fk.allows_robots:
|
||||
_model = fk.model_class
|
||||
_model = fk.model
|
||||
|
||||
if fk.robot_null_delete:
|
||||
_model.update(**{fk.name: None}).where(query).execute()
|
||||
|
@ -551,7 +573,7 @@ class TeamMemberInvite(BaseModel):
|
|||
user = QuayUserField(index=True, null=True)
|
||||
email = CharField(null=True)
|
||||
team = ForeignKeyField(Team)
|
||||
inviter = ForeignKeyField(User, related_name='inviter')
|
||||
inviter = ForeignKeyField(User, backref='inviter')
|
||||
invite_token = CharField(default=urn_generator(['teaminvite']))
|
||||
|
||||
|
||||
|
@ -664,13 +686,13 @@ class RepositoryPermission(BaseModel):
|
|||
|
||||
|
||||
class PermissionPrototype(BaseModel):
|
||||
org = QuayUserField(index=True, related_name='orgpermissionproto')
|
||||
org = QuayUserField(index=True, backref='orgpermissionproto')
|
||||
uuid = CharField(default=uuid_generator)
|
||||
activating_user = QuayUserField(allows_robots=True, index=True, null=True,
|
||||
related_name='userpermissionproto')
|
||||
delegate_user = QuayUserField(allows_robots=True, related_name='receivingpermission',
|
||||
backref='userpermissionproto')
|
||||
delegate_user = QuayUserField(allows_robots=True, backref='receivingpermission',
|
||||
null=True)
|
||||
delegate_team = ForeignKeyField(Team, related_name='receivingpermission',
|
||||
delegate_team = ForeignKeyField(Team, backref='receivingpermission',
|
||||
null=True)
|
||||
role = ForeignKeyField(Role)
|
||||
|
||||
|
@ -714,7 +736,7 @@ class RepositoryBuildTrigger(BaseModel):
|
|||
private_key = TextField(null=True)
|
||||
config = TextField(default='{}')
|
||||
write_token = ForeignKeyField(AccessToken, null=True)
|
||||
pull_robot = QuayUserField(allows_robots=True, null=True, related_name='triggerpullrobot',
|
||||
pull_robot = QuayUserField(allows_robots=True, null=True, backref='triggerpullrobot',
|
||||
robot_null_delete=True)
|
||||
enabled = BooleanField(default=True)
|
||||
disabled_reason = EnumField(DisableReason, null=True)
|
||||
|
@ -789,9 +811,6 @@ class UserRegion(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
_ImageProxy = Proxy()
|
||||
|
||||
|
||||
class Image(BaseModel):
|
||||
# This class is intentionally denormalized. Even though images are supposed
|
||||
# to be globally unique we can't treat them as such for permissions and
|
||||
|
@ -816,7 +835,7 @@ class Image(BaseModel):
|
|||
security_indexed_engine = IntegerField(default=IMAGE_NOT_SCANNED_ENGINE_VERSION, index=True)
|
||||
|
||||
# We use a proxy here instead of 'self' in order to disable the foreign key constraint
|
||||
parent = ForeignKeyField(_ImageProxy, null=True, related_name='children')
|
||||
parent = DeferredForeignKey('Image', null=True, backref='children')
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
@ -835,9 +854,6 @@ class Image(BaseModel):
|
|||
return map(int, self.ancestors.split('/')[1:-1])
|
||||
|
||||
|
||||
_ImageProxy.initialize(Image)
|
||||
|
||||
|
||||
class DerivedStorageForImage(BaseModel):
|
||||
source_image = ForeignKeyField(Image)
|
||||
derivative = ForeignKeyField(ImageStorage)
|
||||
|
@ -942,7 +958,7 @@ class RepositoryBuild(BaseModel):
|
|||
started = DateTimeField(default=datetime.now, index=True)
|
||||
display_name = CharField()
|
||||
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True)
|
||||
pull_robot = QuayUserField(null=True, related_name='buildpullrobot', allows_robots=True,
|
||||
pull_robot = QuayUserField(null=True, backref='buildpullrobot', allows_robots=True,
|
||||
robot_null_delete=True)
|
||||
logs_archived = BooleanField(default=False)
|
||||
queue_id = CharField(null=True, index=True)
|
||||
|
@ -962,9 +978,9 @@ class LogEntryKind(BaseModel):
|
|||
|
||||
class LogEntry(BaseModel):
|
||||
kind = ForeignKeyField(LogEntryKind)
|
||||
account = IntegerField(index=True, db_column='account_id')
|
||||
performer = IntegerField(index=True, null=True, db_column='performer_id')
|
||||
repository = IntegerField(index=True, null=True, db_column='repository_id')
|
||||
account = IntegerField(index=True, column_name='account_id')
|
||||
performer = IntegerField(index=True, null=True, column_name='performer_id')
|
||||
repository = IntegerField(index=True, null=True, column_name='repository_id')
|
||||
datetime = DateTimeField(default=datetime.now, index=True)
|
||||
ip = CharField(null=True)
|
||||
metadata_json = TextField(default='{}')
|
||||
|
@ -1024,7 +1040,7 @@ class OAuthApplication(BaseModel):
|
|||
|
||||
name = CharField()
|
||||
description = TextField(default='')
|
||||
avatar_email = CharField(null=True, db_column='gravatar_email')
|
||||
avatar_email = CharField(null=True, column_name='gravatar_email')
|
||||
|
||||
|
||||
class OAuthAuthorizationCode(BaseModel):
|
||||
|
@ -1163,15 +1179,12 @@ class ServiceKeyApprovalType(Enum):
|
|||
AUTOMATIC = 'Automatic'
|
||||
|
||||
|
||||
_ServiceKeyApproverProxy = Proxy()
|
||||
class ServiceKeyApproval(BaseModel):
|
||||
approver = ForeignKeyField(_ServiceKeyApproverProxy, null=True)
|
||||
approver = QuayUserField(null=True)
|
||||
approval_type = CharField(index=True)
|
||||
approved_date = DateTimeField(default=datetime.utcnow)
|
||||
notes = TextField(default='')
|
||||
|
||||
_ServiceKeyApproverProxy.initialize(User)
|
||||
|
||||
|
||||
class ServiceKey(BaseModel):
|
||||
name = CharField()
|
||||
|
@ -1309,7 +1322,7 @@ class ApprTag(BaseModel):
|
|||
reverted = BooleanField(default=False)
|
||||
protected = BooleanField(default=False)
|
||||
tag_kind = EnumField(ApprTagKind)
|
||||
linked_tag = ForeignKeyField('self', null=True, related_name='tag_parents')
|
||||
linked_tag = ForeignKeyField('self', null=True, backref='tag_parents')
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
|
Reference in a new issue