2015-07-15 21:25:41 +00:00
|
|
|
import json
|
2017-03-16 22:51:42 +00:00
|
|
|
import os
|
|
|
|
from datetime import timedelta, datetime
|
2015-07-15 21:25:41 +00:00
|
|
|
|
|
|
|
from peewee import JOIN_LEFT_OUTER
|
|
|
|
|
2017-03-16 22:51:42 +00:00
|
|
|
import features
|
2015-07-15 21:25:41 +00:00
|
|
|
from data.database import (BuildTriggerService, RepositoryBuildTrigger, Repository, Namespace, User,
|
2015-09-09 17:59:45 +00:00
|
|
|
RepositoryBuild, BUILD_PHASE, db_for_update, db_random_func)
|
2015-07-15 21:25:41 +00:00
|
|
|
from data.model import (InvalidBuildTriggerException, InvalidRepositoryBuildException,
|
|
|
|
db_transaction, user as user_model)
|
|
|
|
|
|
|
|
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
|
2016-10-27 17:18:02 +00:00
|
|
|
PHASES_NOT_ALLOWED_TO_CANCEL_FROM = (BUILD_PHASE.PUSHING, BUILD_PHASE.COMPLETE,
|
|
|
|
BUILD_PHASE.ERROR, BUILD_PHASE.INTERNAL_ERROR)
|
2015-07-15 21:25:41 +00:00
|
|
|
|
2016-11-16 18:51:07 +00:00
|
|
|
ARCHIVABLE_BUILD_PHASES = [BUILD_PHASE.COMPLETE, BUILD_PHASE.ERROR, BUILD_PHASE.CANCELLED]
|
2015-07-15 21:25:41 +00:00
|
|
|
|
2017-03-16 22:51:42 +00:00
|
|
|
|
2017-03-21 21:24:11 +00:00
|
|
|
def update_build_trigger(trigger, config, auth_token=None, write_token=None):
|
2017-03-28 18:35:47 +00:00
|
|
|
trigger.config = json.dumps(config or {})
|
2015-07-15 21:25:41 +00:00
|
|
|
if auth_token is not None:
|
|
|
|
trigger.auth_token = auth_token
|
2017-03-21 21:24:11 +00:00
|
|
|
|
|
|
|
if write_token is not None:
|
|
|
|
trigger.write_token = write_token
|
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
trigger.save()
|
|
|
|
|
|
|
|
|
|
|
|
def create_build_trigger(repo, service_name, auth_token, user, pull_robot=None, config=None):
|
|
|
|
service = BuildTriggerService.get(name=service_name)
|
|
|
|
trigger = RepositoryBuildTrigger.create(repository=repo, service=service,
|
|
|
|
auth_token=auth_token,
|
|
|
|
connected_user=user,
|
|
|
|
pull_robot=pull_robot,
|
2017-03-28 18:35:47 +00:00
|
|
|
config=json.dumps(config or {}))
|
2015-07-15 21:25:41 +00:00
|
|
|
return trigger
|
|
|
|
|
|
|
|
|
2017-03-16 22:51:42 +00:00
|
|
|
|
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
def get_build_trigger(trigger_uuid):
|
|
|
|
try:
|
|
|
|
return (RepositoryBuildTrigger
|
|
|
|
.select(RepositoryBuildTrigger, BuildTriggerService, Repository, Namespace)
|
|
|
|
.join(BuildTriggerService)
|
|
|
|
.switch(RepositoryBuildTrigger)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(RepositoryBuildTrigger)
|
|
|
|
.join(User)
|
|
|
|
.where(RepositoryBuildTrigger.uuid == trigger_uuid)
|
|
|
|
.get())
|
|
|
|
except RepositoryBuildTrigger.DoesNotExist:
|
|
|
|
msg = 'No build trigger with uuid: %s' % trigger_uuid
|
|
|
|
raise InvalidBuildTriggerException(msg)
|
|
|
|
|
|
|
|
|
|
|
|
def list_build_triggers(namespace_name, repository_name):
|
|
|
|
return (RepositoryBuildTrigger
|
|
|
|
.select(RepositoryBuildTrigger, BuildTriggerService, Repository)
|
|
|
|
.join(BuildTriggerService)
|
|
|
|
.switch(RepositoryBuildTrigger)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.where(Namespace.username == namespace_name, Repository.name == repository_name))
|
|
|
|
|
|
|
|
|
|
|
|
def list_trigger_builds(namespace_name, repository_name, trigger_uuid,
|
|
|
|
limit):
|
|
|
|
return (list_repository_builds(namespace_name, repository_name, limit)
|
|
|
|
.where(RepositoryBuildTrigger.uuid == trigger_uuid))
|
|
|
|
|
|
|
|
|
|
|
|
def get_repository_for_resource(resource_key):
|
|
|
|
try:
|
|
|
|
return (Repository
|
|
|
|
.select(Repository, Namespace)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(Repository)
|
|
|
|
.join(RepositoryBuild)
|
|
|
|
.where(RepositoryBuild.resource_key == resource_key)
|
|
|
|
.get())
|
|
|
|
except Repository.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def _get_build_base_query():
|
|
|
|
return (RepositoryBuild
|
|
|
|
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService, Repository,
|
|
|
|
Namespace, User)
|
|
|
|
.join(Repository)
|
|
|
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
|
|
.switch(RepositoryBuild)
|
|
|
|
.join(User, JOIN_LEFT_OUTER)
|
|
|
|
.switch(RepositoryBuild)
|
|
|
|
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
|
|
|
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
|
|
|
.order_by(RepositoryBuild.started.desc()))
|
|
|
|
|
|
|
|
|
|
|
|
def get_repository_build(build_uuid):
|
|
|
|
try:
|
|
|
|
return _get_build_base_query().where(RepositoryBuild.uuid == build_uuid).get()
|
|
|
|
|
|
|
|
except RepositoryBuild.DoesNotExist:
|
|
|
|
msg = 'Unable to locate a build by id: %s' % build_uuid
|
|
|
|
raise InvalidRepositoryBuildException(msg)
|
|
|
|
|
|
|
|
|
|
|
|
def list_repository_builds(namespace_name, repository_name, limit,
|
|
|
|
include_inactive=True, since=None):
|
|
|
|
query = (_get_build_base_query()
|
|
|
|
.where(Repository.name == repository_name, Namespace.username == namespace_name)
|
|
|
|
.limit(limit))
|
|
|
|
|
|
|
|
if since is not None:
|
|
|
|
query = query.where(RepositoryBuild.started >= since)
|
|
|
|
|
|
|
|
if not include_inactive:
|
|
|
|
query = query.where(RepositoryBuild.phase != BUILD_PHASE.ERROR,
|
|
|
|
RepositoryBuild.phase != BUILD_PHASE.COMPLETE)
|
|
|
|
|
|
|
|
return query
|
|
|
|
|
|
|
|
|
|
|
|
def get_recent_repository_build(namespace_name, repository_name):
|
|
|
|
query = list_repository_builds(namespace_name, repository_name, 1)
|
|
|
|
try:
|
|
|
|
return query.get()
|
|
|
|
except RepositoryBuild.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def create_repository_build(repo, access_token, job_config_obj, dockerfile_id,
|
|
|
|
display_name, trigger=None, pull_robot_name=None):
|
|
|
|
pull_robot = None
|
|
|
|
if pull_robot_name:
|
|
|
|
pull_robot = user_model.lookup_robot(pull_robot_name)
|
|
|
|
|
|
|
|
return RepositoryBuild.create(repository=repo, access_token=access_token,
|
|
|
|
job_config=json.dumps(job_config_obj),
|
|
|
|
display_name=display_name, trigger=trigger,
|
|
|
|
resource_key=dockerfile_id,
|
|
|
|
pull_robot=pull_robot)
|
|
|
|
|
|
|
|
|
|
|
|
def get_pull_robot_name(trigger):
|
|
|
|
if not trigger.pull_robot:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return trigger.pull_robot.username
|
|
|
|
|
|
|
|
|
2016-10-27 17:18:02 +00:00
|
|
|
def _get_build_row_for_update(build_uuid):
|
|
|
|
return db_for_update(RepositoryBuild.select().where(RepositoryBuild.uuid == build_uuid)).get()
|
|
|
|
|
|
|
|
|
2016-10-21 14:05:17 +00:00
|
|
|
def update_phase(build_uuid, phase):
|
|
|
|
""" A function to change the phase of a build """
|
2016-10-27 17:18:02 +00:00
|
|
|
try:
|
|
|
|
build = _get_build_row_for_update(build_uuid)
|
|
|
|
except RepositoryBuild.DoesNotExist:
|
2015-07-15 21:25:41 +00:00
|
|
|
return False
|
|
|
|
|
2016-10-27 17:18:02 +00:00
|
|
|
# Can't update a cancelled build
|
|
|
|
if build.phase == BUILD_PHASE.CANCELLED:
|
|
|
|
return False
|
|
|
|
|
|
|
|
build.phase = phase
|
|
|
|
build.save()
|
|
|
|
return True
|
|
|
|
|
2016-10-21 14:05:17 +00:00
|
|
|
|
|
|
|
def create_cancel_build_in_queue(build, build_queue):
|
|
|
|
""" A function to cancel a build before it leaves the queue """
|
2017-03-16 22:51:42 +00:00
|
|
|
|
2016-10-21 14:05:17 +00:00
|
|
|
def cancel_build():
|
2016-10-27 17:18:02 +00:00
|
|
|
cancelled = False
|
|
|
|
|
|
|
|
if build.queue_id is not None:
|
|
|
|
cancelled = build_queue.cancel(build.queue_id)
|
|
|
|
|
|
|
|
if build.phase != BUILD_PHASE.WAITING:
|
2015-07-15 21:25:41 +00:00
|
|
|
return False
|
|
|
|
|
2016-10-25 16:50:58 +00:00
|
|
|
return cancelled
|
2016-10-21 14:05:17 +00:00
|
|
|
|
|
|
|
return cancel_build
|
|
|
|
|
|
|
|
|
2016-10-27 17:18:02 +00:00
|
|
|
def create_cancel_build_in_manager(build, build_canceller):
|
2016-10-25 16:50:58 +00:00
|
|
|
""" A function to cancel the build before it starts to push """
|
2017-03-16 22:51:42 +00:00
|
|
|
|
2016-10-25 16:50:58 +00:00
|
|
|
def cancel_build():
|
2016-10-27 17:18:02 +00:00
|
|
|
if build.phase in PHASES_NOT_ALLOWED_TO_CANCEL_FROM:
|
|
|
|
return False
|
|
|
|
|
2016-11-16 18:51:07 +00:00
|
|
|
return build_canceller.try_cancel_build(build.uuid)
|
2016-10-27 17:18:02 +00:00
|
|
|
|
2016-10-25 16:50:58 +00:00
|
|
|
return cancel_build
|
|
|
|
|
|
|
|
|
2016-10-21 14:05:17 +00:00
|
|
|
def cancel_repository_build(build, build_queue):
|
|
|
|
""" This tries to cancel the build returns true if request is successful false if it can't be cancelled """
|
|
|
|
with db_transaction():
|
2016-10-27 17:18:02 +00:00
|
|
|
from app import build_canceller
|
2016-11-29 16:48:08 +00:00
|
|
|
from buildman.jobutil.buildjob import BuildJobNotifier
|
2016-10-21 14:05:17 +00:00
|
|
|
# Reload the build for update.
|
|
|
|
# We are loading the build for update so checks should be as quick as possible.
|
|
|
|
try:
|
2016-10-27 17:18:02 +00:00
|
|
|
build = _get_build_row_for_update(build.uuid)
|
2016-10-21 14:05:17 +00:00
|
|
|
except RepositoryBuild.DoesNotExist:
|
2015-07-15 21:25:41 +00:00
|
|
|
return False
|
|
|
|
|
2016-10-25 16:50:58 +00:00
|
|
|
cancel_builds = [create_cancel_build_in_queue(build, build_queue),
|
2016-10-27 17:18:02 +00:00
|
|
|
create_cancel_build_in_manager(build, build_canceller), ]
|
2016-11-16 18:51:07 +00:00
|
|
|
original_phase = build.phase
|
2016-10-21 14:05:17 +00:00
|
|
|
for cancelled in cancel_builds:
|
|
|
|
if cancelled():
|
2016-11-16 18:51:07 +00:00
|
|
|
build.phase = BUILD_PHASE.CANCELLED
|
2016-11-29 16:48:08 +00:00
|
|
|
BuildJobNotifier(build.uuid).send_notification("build_cancelled")
|
2016-11-16 18:51:07 +00:00
|
|
|
build.save()
|
2016-10-21 14:05:17 +00:00
|
|
|
return True
|
2016-11-16 18:51:07 +00:00
|
|
|
build.phase = original_phase
|
|
|
|
build.save()
|
2016-10-21 14:05:17 +00:00
|
|
|
return False
|
2015-07-15 21:25:41 +00:00
|
|
|
|
|
|
|
|
2015-09-09 17:59:45 +00:00
|
|
|
def get_archivable_build():
|
2015-07-15 21:25:41 +00:00
|
|
|
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
|
2016-11-16 18:51:07 +00:00
|
|
|
|
2015-09-09 17:59:45 +00:00
|
|
|
candidates = (RepositoryBuild
|
2015-12-04 20:46:07 +00:00
|
|
|
.select(RepositoryBuild.id)
|
2016-11-16 18:51:07 +00:00
|
|
|
.where((RepositoryBuild.phase << ARCHIVABLE_BUILD_PHASES) |
|
2015-12-04 20:46:07 +00:00
|
|
|
(RepositoryBuild.started < presumed_dead_date),
|
|
|
|
RepositoryBuild.logs_archived == False)
|
|
|
|
.limit(50)
|
|
|
|
.alias('candidates'))
|
2015-09-09 17:59:45 +00:00
|
|
|
|
|
|
|
try:
|
2015-09-10 01:43:48 +00:00
|
|
|
found_id = (RepositoryBuild
|
2015-12-04 20:46:07 +00:00
|
|
|
.select(candidates.c.id)
|
|
|
|
.from_(candidates)
|
|
|
|
.order_by(db_random_func())
|
|
|
|
.get())
|
2015-09-10 01:43:48 +00:00
|
|
|
return RepositoryBuild.get(id=found_id)
|
2015-09-09 17:59:45 +00:00
|
|
|
except RepositoryBuild.DoesNotExist:
|
|
|
|
return None
|
2017-04-24 17:37:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
def mark_build_archived(build_uuid):
|
|
|
|
""" Mark a build as archived, and return True if we were the ones who actually
|
|
|
|
updated the row. """
|
|
|
|
return (RepositoryBuild
|
|
|
|
.update(logs_archived=True)
|
|
|
|
.where(RepositoryBuild.uuid == build_uuid,
|
|
|
|
RepositoryBuild.logs_archived == False)
|
|
|
|
.execute()) > 0
|