Merge pull request #2449 from charltonaustin/phase_two_config
feat(build runner): added in context, dockerfile_location
This commit is contained in:
commit
ca99535774
29 changed files with 531 additions and 111 deletions
|
@ -124,12 +124,13 @@ class BuildComponent(BaseComponent):
|
|||
# username: The username for pulling the base image (if any).
|
||||
# password: The password for pulling the base image (if any).
|
||||
|
||||
subdir, dockerfile_name = self.name_and_path(build_config.get('build_subdir'))
|
||||
# TODO: Charlie Tuesday, March 28, 2017 come back and clean up build_subdir.
|
||||
dockerfile_path = os.path.relpath(build_config.get('build_subdir'), build_config.get('context'))
|
||||
|
||||
build_arguments = {
|
||||
'build_package': build_job.get_build_package_url(self.user_files),
|
||||
'sub_directory': subdir,
|
||||
'dockerfile_name': dockerfile_name,
|
||||
'context': build_config.get('context'),
|
||||
'dockerfile_path': dockerfile_path,
|
||||
'repository': repository_name,
|
||||
'registry': self.registry_hostname,
|
||||
'pull_token': build_job.repo_build.access_token.code,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import os
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from jsonschema import validate
|
||||
from six import add_metaclass
|
||||
|
@ -290,7 +291,7 @@ class BuildTriggerHandler(object):
|
|||
def get_dockerfile_path(self):
|
||||
""" Returns the normalized path to the Dockerfile found in the subdirectory
|
||||
in the config. """
|
||||
dockerfile_path = self.config.get('subdir') or 'Dockerfile'
|
||||
dockerfile_path = self.config.get('dockerfile_path') or 'Dockerfile'
|
||||
if dockerfile_path[0] == '/':
|
||||
dockerfile_path = dockerfile_path[1:]
|
||||
return dockerfile_path
|
||||
|
@ -303,10 +304,11 @@ class BuildTriggerHandler(object):
|
|||
ref = metadata.get('ref', None)
|
||||
commit_sha = metadata['commit']
|
||||
default_branch = metadata.get('default_branch', None)
|
||||
|
||||
prepared = PreparedBuild(self.trigger)
|
||||
prepared.name_from_sha(commit_sha)
|
||||
prepared.subdirectory = config.get('subdir', None)
|
||||
# TODO: Charlie Tuesday, March 28, 2017 come back and clean up subdirectory.
|
||||
prepared.subdirectory = config.get('dockerfile_path', None)
|
||||
prepared.context = config.get('context', None)
|
||||
prepared.is_manual = is_manual
|
||||
prepared.metadata = metadata
|
||||
|
||||
|
@ -327,3 +329,28 @@ class BuildTriggerHandler(object):
|
|||
namespaces = list(namespaces_dict.values())
|
||||
validate(namespaces, NAMESPACES_SCHEMA)
|
||||
return namespaces
|
||||
|
||||
@classmethod
|
||||
def get_parent_directory_mappings(cls, dockerfile_path, current_paths=None):
|
||||
""" Returns a map of dockerfile_paths to it's possible contexts. """
|
||||
if dockerfile_path == "":
|
||||
return {}
|
||||
|
||||
if dockerfile_path[0] != os.path.sep:
|
||||
dockerfile_path = os.path.sep + dockerfile_path
|
||||
|
||||
dockerfile_path = os.path.normpath(dockerfile_path)
|
||||
all_paths = set()
|
||||
path, _ = os.path.split(dockerfile_path)
|
||||
if path == "":
|
||||
path = os.path.sep
|
||||
|
||||
all_paths.add(path)
|
||||
for i in range(1, len(path.split(os.path.sep))):
|
||||
path, _ = os.path.split(path)
|
||||
all_paths.add(path)
|
||||
|
||||
if current_paths:
|
||||
return dict({dockerfile_path: list(all_paths)}, **current_paths)
|
||||
|
||||
return {dockerfile_path: list(all_paths)}
|
||||
|
|
|
@ -371,17 +371,22 @@ class GithubBuildTrigger(BuildTriggerHandler):
|
|||
raise RepositoryReadException(message)
|
||||
|
||||
path = self.get_dockerfile_path()
|
||||
if not path or not self.filename_is_dockerfile(os.path.basename(path)):
|
||||
if not path:
|
||||
return None
|
||||
|
||||
try:
|
||||
file_info = repo.get_file_contents(path)
|
||||
except GithubException as ghe:
|
||||
# TypeError is needed because directory inputs cause a TypeError
|
||||
except (GithubException, TypeError) as ghe:
|
||||
logger.error("got error from trying to find github file %s" % ghe)
|
||||
return None
|
||||
|
||||
if file_info is None:
|
||||
return None
|
||||
|
||||
if isinstance(file_info, list):
|
||||
return None
|
||||
|
||||
content = file_info.content
|
||||
if file_info.encoding == 'base64':
|
||||
content = base64.b64decode(content)
|
||||
|
|
|
@ -4,11 +4,11 @@ from mock import Mock
|
|||
from buildtrigger.bitbuckethandler import BitbucketBuildTrigger
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
def get_bitbucket_trigger(subdir=''):
|
||||
def get_bitbucket_trigger(dockerfile_path=''):
|
||||
trigger_obj = AttrDict(dict(auth_token='foobar', id='sometrigger'))
|
||||
trigger = BitbucketBuildTrigger(trigger_obj, {
|
||||
'build_source': 'foo/bar',
|
||||
'subdir': subdir,
|
||||
'dockerfile_path': dockerfile_path,
|
||||
'username': 'knownuser'
|
||||
})
|
||||
|
||||
|
|
|
@ -6,9 +6,9 @@ from github import GithubException
|
|||
from buildtrigger.githubhandler import GithubBuildTrigger
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
def get_github_trigger(subdir=''):
|
||||
def get_github_trigger(dockerfile_path=''):
|
||||
trigger_obj = AttrDict(dict(auth_token='foobar', id='sometrigger'))
|
||||
trigger = GithubBuildTrigger(trigger_obj, {'build_source': 'foo', 'subdir': subdir})
|
||||
trigger = GithubBuildTrigger(trigger_obj, {'build_source': 'foo', 'dockerfile_path': dockerfile_path})
|
||||
trigger._get_client = get_mock_github
|
||||
return trigger
|
||||
|
||||
|
|
|
@ -4,11 +4,11 @@ from mock import Mock
|
|||
from buildtrigger.gitlabhandler import GitLabBuildTrigger
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
def get_gitlab_trigger(subdir=''):
|
||||
def get_gitlab_trigger(dockerfile_path=''):
|
||||
trigger_obj = AttrDict(dict(auth_token='foobar', id='sometrigger'))
|
||||
trigger = GitLabBuildTrigger(trigger_obj, {
|
||||
'build_source': 'foo/bar',
|
||||
'subdir': subdir,
|
||||
'dockerfile_path': dockerfile_path,
|
||||
'username': 'knownuser'
|
||||
})
|
||||
|
||||
|
|
|
@ -13,3 +13,43 @@ from buildtrigger.basehandler import BuildTriggerHandler
|
|||
])
|
||||
def test_path_is_dockerfile(input, output):
|
||||
assert BuildTriggerHandler.filename_is_dockerfile(input) == output
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input,output', [
|
||||
("", {}),
|
||||
("/a", {"/a": ["/"]}),
|
||||
("a", {"/a": ["/"]}),
|
||||
("/b/a", {"/b/a": ["/b", "/"]}),
|
||||
("b/a", {"/b/a": ["/b", "/"]}),
|
||||
("/c/b/a", {"/c/b/a": ["/c/b", "/c", "/"]}),
|
||||
("/a//b//c", {"/a/b/c": ["/", "/a", "/a/b"]}),
|
||||
("/a", {"/a": ["/"]}),
|
||||
])
|
||||
def test_subdir_path_map_no_previous(input, output):
|
||||
actual_mapping = BuildTriggerHandler.get_parent_directory_mappings(input)
|
||||
for key in actual_mapping:
|
||||
value = actual_mapping[key]
|
||||
actual_mapping[key] = value.sort()
|
||||
for key in output:
|
||||
value = output[key]
|
||||
output[key] = value.sort()
|
||||
|
||||
assert actual_mapping == output
|
||||
|
||||
|
||||
@pytest.mark.parametrize('new_path,original_dictionary,output', [
|
||||
("/a", {}, {"/a": ["/"]}),
|
||||
("b", {"/a": ["some_path", "another_path"]}, {"/a": ["some_path", "another_path"], "/b": ["/"]}),
|
||||
("/a/b/c/d", {"/e": ["some_path", "another_path"]},
|
||||
{"/e": ["some_path", "another_path"], "/a/b/c/d": ["/", "/a", "/a/b", "/a/b/c"]}),
|
||||
])
|
||||
def test_subdir_path_map(new_path, original_dictionary, output):
|
||||
actual_mapping = BuildTriggerHandler.get_parent_directory_mappings(new_path, original_dictionary)
|
||||
for key in actual_mapping:
|
||||
value = actual_mapping[key]
|
||||
actual_mapping[key] = value.sort()
|
||||
for key in output:
|
||||
value = output[key]
|
||||
output[key] = value.sort()
|
||||
|
||||
assert actual_mapping == output
|
||||
|
|
|
@ -16,13 +16,13 @@ def test_list_build_subdirs(bitbucket_trigger):
|
|||
assert bitbucket_trigger.list_build_subdirs() == ["/Dockerfile"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('subdir, contents', [
|
||||
@pytest.mark.parametrize('dockerfile_path, contents', [
|
||||
('/Dockerfile', 'hello world'),
|
||||
('somesubdir/Dockerfile', 'hi universe'),
|
||||
('unknownpath', None),
|
||||
])
|
||||
def test_load_dockerfile_contents(subdir, contents):
|
||||
trigger = get_bitbucket_trigger(subdir)
|
||||
def test_load_dockerfile_contents(dockerfile_path, contents):
|
||||
trigger = get_bitbucket_trigger(dockerfile_path)
|
||||
assert trigger.load_dockerfile_contents() == contents
|
||||
|
||||
|
||||
|
|
|
@ -68,13 +68,13 @@ def test_handle_trigger_request(github_trigger, payload, expected_error, expecte
|
|||
assert isinstance(github_trigger.handle_trigger_request(request), PreparedBuild)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('subdir, contents', [
|
||||
@pytest.mark.parametrize('dockerfile_path, contents', [
|
||||
('/Dockerfile', 'hello world'),
|
||||
('somesubdir/Dockerfile', 'hi universe'),
|
||||
('unknownpath', None),
|
||||
])
|
||||
def test_load_dockerfile_contents(subdir, contents):
|
||||
trigger = get_github_trigger(subdir)
|
||||
def test_load_dockerfile_contents(dockerfile_path, contents):
|
||||
trigger = get_github_trigger(dockerfile_path)
|
||||
assert trigger.load_dockerfile_contents() == contents
|
||||
|
||||
|
||||
|
|
|
@ -16,13 +16,13 @@ def test_list_build_subdirs(gitlab_trigger):
|
|||
assert gitlab_trigger.list_build_subdirs() == ['/Dockerfile']
|
||||
|
||||
|
||||
@pytest.mark.parametrize('subdir, contents', [
|
||||
@pytest.mark.parametrize('dockerfile_path, contents', [
|
||||
('/Dockerfile', 'hello world'),
|
||||
('somesubdir/Dockerfile', 'hi universe'),
|
||||
('unknownpath', None),
|
||||
])
|
||||
def test_load_dockerfile_contents(subdir, contents):
|
||||
trigger = get_gitlab_trigger(subdir)
|
||||
def test_load_dockerfile_contents(dockerfile_path, contents):
|
||||
trigger = get_gitlab_trigger(dockerfile_path)
|
||||
assert trigger.load_dockerfile_contents() == contents
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
"""back fill build expand_config
|
||||
|
||||
Revision ID: a6c463dfb9fe
|
||||
Revises: e2894a3a3c19
|
||||
Create Date: 2017-03-17 10:00:19.739858
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
import json
|
||||
import os
|
||||
|
||||
from data.database import RepositoryBuildTrigger
|
||||
|
||||
revision = 'a6c463dfb9fe'
|
||||
down_revision = 'e2894a3a3c19'
|
||||
|
||||
from alembic import op
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
repostioryBuildTriggers = RepositoryBuildTrigger.select()
|
||||
for repositoryBuildTrigger in repostioryBuildTriggers:
|
||||
config = json.loads(repositoryBuildTrigger.config)
|
||||
repositoryBuildTrigger.config = json.dumps(get_config_expand(config))
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
repostioryBuildTriggers = RepositoryBuildTrigger.select()
|
||||
for repositoryBuildTrigger in repostioryBuildTriggers:
|
||||
config = json.loads(repositoryBuildTrigger.config)
|
||||
repositoryBuildTrigger.config = json.dumps(get_config_expand(config))
|
||||
|
||||
|
||||
def create_context(current_subdir):
|
||||
if current_subdir == "":
|
||||
current_subdir = os.path.sep + current_subdir
|
||||
|
||||
if current_subdir[len(current_subdir) - 1] != os.path.sep:
|
||||
current_subdir += os.path.sep
|
||||
|
||||
context, _ = os.path.split(current_subdir)
|
||||
return context
|
||||
|
||||
|
||||
def create_dockerfile_path(current_subdir):
|
||||
if current_subdir == "":
|
||||
current_subdir = os.path.sep + current_subdir
|
||||
|
||||
if current_subdir[len(current_subdir) - 1] != os.path.sep:
|
||||
current_subdir += os.path.sep
|
||||
|
||||
return current_subdir + "Dockerfile"
|
||||
|
||||
|
||||
def get_config_expand(config):
|
||||
""" A function to transform old records into new records """
|
||||
if not config:
|
||||
return config
|
||||
|
||||
# skip records that have been updated
|
||||
if "context" in config or "dockerfile_path" in config:
|
||||
return config
|
||||
|
||||
config_expand = {}
|
||||
if "subdir" in config:
|
||||
config_expand = dict(config)
|
||||
config_expand["context"] = create_context(config["subdir"])
|
||||
config_expand["dockerfile_path"] = create_dockerfile_path(config["subdir"])
|
||||
|
||||
return config_expand
|
||||
|
||||
|
||||
def get_config_contract(config):
|
||||
""" A function to delete context and dockerfile_path from config """
|
||||
if not config:
|
||||
return config
|
||||
|
||||
if "context" in config:
|
||||
del config["context"]
|
||||
|
||||
if "dockerfile_path" in config:
|
||||
del config["dockerfile_path"]
|
||||
|
||||
return config
|
|
@ -18,10 +18,14 @@ PHASES_NOT_ALLOWED_TO_CANCEL_FROM = (BUILD_PHASE.PUSHING, BUILD_PHASE.COMPLETE,
|
|||
ARCHIVABLE_BUILD_PHASES = [BUILD_PHASE.COMPLETE, BUILD_PHASE.ERROR, BUILD_PHASE.CANCELLED]
|
||||
|
||||
|
||||
def update_build_trigger(trigger, config, auth_token=None):
|
||||
def update_build_trigger(trigger, config, auth_token=None, write_token=None):
|
||||
trigger.config = json.dumps(_get_config_expand(config or {}))
|
||||
if auth_token is not None:
|
||||
trigger.auth_token = auth_token
|
||||
|
||||
if write_token is not None:
|
||||
trigger.write_token = write_token
|
||||
|
||||
trigger.save()
|
||||
|
||||
|
||||
|
|
|
@ -1,44 +1,9 @@
|
|||
import os
|
||||
|
||||
|
||||
def _get_config_expand(config):
|
||||
""" Get config with both context and dockerfile_path written to it """
|
||||
if config and "subdir" in config and "context" not in config:
|
||||
config_expand = dict(config)
|
||||
config_expand["context"] = _create_context(config["subdir"])
|
||||
config_expand["dockerfile_path"] = _create_dockerfile_path(config["subdir"])
|
||||
return config_expand
|
||||
return config or {}
|
||||
if not config:
|
||||
return {}
|
||||
if 'context' in config:
|
||||
config['subdir'] = config['context']
|
||||
|
||||
|
||||
def _create_context(current_subdir):
|
||||
""" Create context from current subdir """
|
||||
if current_subdir.endswith("Dockerfile"):
|
||||
context, _ = os.path.split(current_subdir)
|
||||
if context == "":
|
||||
return os.path.sep
|
||||
|
||||
return context
|
||||
|
||||
if current_subdir == "":
|
||||
current_subdir = os.path.sep + current_subdir
|
||||
|
||||
if current_subdir[len(current_subdir) - 1] != os.path.sep:
|
||||
current_subdir += os.path.sep
|
||||
|
||||
context, _ = os.path.split(current_subdir)
|
||||
return context
|
||||
|
||||
|
||||
def _create_dockerfile_path(current_subdir):
|
||||
""" Create dockefile path from current subdir """
|
||||
if current_subdir.endswith("Dockerfile"):
|
||||
return current_subdir
|
||||
|
||||
if current_subdir == "":
|
||||
current_subdir = os.path.sep + current_subdir
|
||||
|
||||
if current_subdir[len(current_subdir) - 1] != os.path.sep:
|
||||
current_subdir += os.path.sep
|
||||
|
||||
return current_subdir + "Dockerfile"
|
||||
return config
|
||||
|
|
|
@ -4,27 +4,11 @@ from data.model.helpers.build_helper import _get_config_expand
|
|||
|
||||
|
||||
@pytest.mark.parametrize('org_config,expected', [
|
||||
# Empty config
|
||||
(None, {}),
|
||||
|
||||
# No subdir in config
|
||||
({}, {}),
|
||||
|
||||
({"subdir": ""}, {"context": "/", "dockerfile_path": "/Dockerfile", "subdir": ""}),
|
||||
({"subdir": "/"}, {"context": "/", "dockerfile_path": "/Dockerfile", "subdir": "/"}),
|
||||
({"subdir": "/Dockerfile"}, {"context": "/", "dockerfile_path": "/Dockerfile", "subdir": "/Dockerfile"}),
|
||||
({"subdir": "a"}, {"context": "a", "dockerfile_path": "a/Dockerfile", "subdir": "a"}),
|
||||
({"subdir": "Dockerfile"}, {"context": "/", "dockerfile_path": "Dockerfile", "subdir": "Dockerfile"}),
|
||||
({"subdir": "server.Dockerfile"},
|
||||
{"context": "/", "dockerfile_path": "server.Dockerfile", "subdir": "server.Dockerfile"}),
|
||||
({"subdir": "/a/b/Dockerfile"},
|
||||
{"context": "/a/b", "dockerfile_path": "/a/b/Dockerfile", "subdir": "/a/b/Dockerfile"}),
|
||||
({"subdir": "/a/b/server.Dockerfile"},
|
||||
{"context": "/a/b", "dockerfile_path": "/a/b/server.Dockerfile", "subdir": "/a/b/server.Dockerfile"}),
|
||||
({"subdir": "a/b/Dockerfile"}, {"context": "a/b", "dockerfile_path": "a/b/Dockerfile", "subdir": "a/b/Dockerfile"}),
|
||||
({"subdir": "a/b/server.Dockerfile"},
|
||||
{"context": "a/b", "dockerfile_path": "a/b/server.Dockerfile", "subdir": "a/b/server.Dockerfile"}),
|
||||
({"subdir": "/a/b/c", "context": "slime"}, {"context": "slime", "subdir": "/a/b/c"}),
|
||||
({'some other key': 'some other value'}, {'some other key': 'some other value'}),
|
||||
({'context': 'some/context', 'dockerfile_path': 'some/context/with/Dockerfile'},
|
||||
{'context': 'some/context', 'dockerfile_path': 'some/context/with/Dockerfile', 'subdir': 'some/context'}),
|
||||
])
|
||||
def test_super_user_build_endpoints(org_config, expected):
|
||||
assert _get_config_expand(org_config) == expected
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
""" Create, list, cancel and get status/logs of repository builds. """
|
||||
|
||||
import os
|
||||
from urlparse import urlparse
|
||||
|
||||
import logging
|
||||
|
@ -51,6 +51,7 @@ def user_view(user):
|
|||
'is_robot': user.robot,
|
||||
}
|
||||
|
||||
|
||||
def trigger_view(trigger, can_read=False, can_admin=False, for_build=False):
|
||||
if trigger and trigger.uuid:
|
||||
build_trigger = BuildTriggerHandler.get_handler(trigger)
|
||||
|
@ -133,6 +134,8 @@ def build_status_view(build_obj):
|
|||
'display_name': build_obj.display_name,
|
||||
'status': status or {},
|
||||
'subdirectory': job_config.get('build_subdir', ''),
|
||||
'dockerfile_path': job_config.get('build_subdir', ''),
|
||||
'context': job_config.get('context', ''),
|
||||
'tags': job_config.get('docker_tags', []),
|
||||
'manual_user': job_config.get('manual_user', None),
|
||||
'is_writer': can_write,
|
||||
|
@ -244,6 +247,7 @@ class RepositoryBuildList(RepositoryParamResource):
|
|||
raise InvalidRequest('Invalid Archive URL: Must be http or https')
|
||||
|
||||
subdir = request_json['subdirectory'] if 'subdirectory' in request_json else ''
|
||||
context = request_json['context'] if 'context' in request_json else os.path.dirname(subdir)
|
||||
tags = request_json.get('docker_tags', ['latest'])
|
||||
pull_robot_name = request_json.get('pull_robot', None)
|
||||
|
||||
|
@ -291,9 +295,9 @@ class RepositoryBuildList(RepositoryParamResource):
|
|||
prepared.archive_url = archive_url
|
||||
prepared.tags = tags
|
||||
prepared.subdirectory = subdir
|
||||
prepared.context = context
|
||||
prepared.is_manual = True
|
||||
prepared.metadata = {}
|
||||
|
||||
try:
|
||||
build_request = start_build(repo, prepared, pull_robot_name=pull_robot_name)
|
||||
except MaximumBuildsQueuedException:
|
||||
|
|
22
endpoints/api/test/test_trigger.py
Normal file
22
endpoints/api/test/test_trigger.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
import pytest
|
||||
|
||||
from endpoints.api.trigger import is_parent
|
||||
|
||||
|
||||
@pytest.mark.parametrize('context,dockerfile_path,expected', [
|
||||
("/", "/a/b", True),
|
||||
("/a", "/a/b", True),
|
||||
("/a/b", "/a/b", False),
|
||||
("/a//", "/a/b", True),
|
||||
("/a", "/a//b/c", True),
|
||||
("/a//", "a/b", True),
|
||||
("/a/b", "a/bc/d", False),
|
||||
("/d", "/a/b", False),
|
||||
("/a/b", "/a/b.c", False),
|
||||
("/a/b", "/a/b/b.c", True),
|
||||
("", "/a/b.c", False),
|
||||
("/a/b", "", False),
|
||||
("", "", False),
|
||||
])
|
||||
def test_super_user_build_endpoints(context, dockerfile_path, expected):
|
||||
assert is_parent(context, dockerfile_path) == expected
|
|
@ -2,17 +2,21 @@
|
|||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from os import path
|
||||
from urllib import quote
|
||||
from urlparse import urlunparse
|
||||
|
||||
from flask import request, url_for
|
||||
|
||||
from app import app
|
||||
from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
|
||||
ReadRepositoryPermission, AdministerRepositoryPermission)
|
||||
from buildtrigger.basehandler import BuildTriggerHandler
|
||||
from buildtrigger.triggerutil import (TriggerDeactivationException,
|
||||
TriggerActivationException, EmptyRepositoryException,
|
||||
RepositoryReadException, TriggerStartException)
|
||||
from data import model
|
||||
from data.model.build import update_build_trigger
|
||||
from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin,
|
||||
log_action, request_error, query_param, parse_args, internal_only,
|
||||
validate_json_request, api, path_param, abort,
|
||||
|
@ -20,12 +24,9 @@ from endpoints.api import (RepositoryParamResource, nickname, resource, require_
|
|||
from endpoints.exception import NotFound, Unauthorized, InvalidRequest
|
||||
from endpoints.api.build import build_status_view, trigger_view, RepositoryBuildStatus
|
||||
from endpoints.building import start_build, MaximumBuildsQueuedException
|
||||
from data import model
|
||||
from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
|
||||
ReadRepositoryPermission, AdministerRepositoryPermission)
|
||||
from util.names import parse_robot_username
|
||||
from endpoints.exception import NotFound, Unauthorized, InvalidRequest
|
||||
from util.dockerfileparse import parse_dockerfile
|
||||
|
||||
from util.names import parse_robot_username
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -131,19 +132,25 @@ class BuildTriggerSubdirs(RepositoryParamResource):
|
|||
|
||||
try:
|
||||
subdirs = handler.list_build_subdirs()
|
||||
context_map = {}
|
||||
for file in subdirs:
|
||||
context_map = handler.get_parent_directory_mappings(file, context_map)
|
||||
|
||||
return {
|
||||
'subdir': ['/' + subdir for subdir in subdirs],
|
||||
'status': 'success'
|
||||
'dockerfile_paths': ['/' + subdir for subdir in subdirs],
|
||||
'contextMap': context_map,
|
||||
'status': 'success',
|
||||
}
|
||||
except EmptyRepositoryException as exc:
|
||||
return {
|
||||
'status': 'success',
|
||||
'subdir': []
|
||||
'contextMap': {},
|
||||
'dockerfile_paths': [],
|
||||
}
|
||||
except RepositoryReadException as exc:
|
||||
return {
|
||||
'status': 'error',
|
||||
'message': exc.message
|
||||
'message': exc.message,
|
||||
}
|
||||
else:
|
||||
raise Unauthorized()
|
||||
|
@ -235,9 +242,7 @@ class BuildTriggerActivate(RepositoryParamResource):
|
|||
raise request_error(message=exc.message)
|
||||
|
||||
# Save the updated config.
|
||||
trigger.config = json.dumps(final_config)
|
||||
trigger.write_token = write_token
|
||||
trigger.save()
|
||||
update_build_trigger(trigger, final_config, write_token=write_token)
|
||||
|
||||
# Log the trigger setup.
|
||||
repo = model.repository.get_repository(namespace_name, repo_name)
|
||||
|
@ -343,6 +348,15 @@ class BuildTriggerAnalyze(RepositoryParamResource):
|
|||
'message': 'Could not parse the Dockerfile specified'
|
||||
}
|
||||
|
||||
# Check whether the dockerfile_path is correct
|
||||
if new_config_dict.get('context'):
|
||||
if not is_parent(new_config_dict.get('context'), new_config_dict.get('dockerfile_path')):
|
||||
return {
|
||||
'status': 'error',
|
||||
'message': 'Dockerfile, %s, is not child of the context, %s.' %
|
||||
(new_config_dict.get('context'), new_config_dict.get('dockerfile_path'))
|
||||
}
|
||||
|
||||
# Default to the current namespace.
|
||||
base_namespace = namespace_name
|
||||
base_repository = None
|
||||
|
@ -399,6 +413,28 @@ class BuildTriggerAnalyze(RepositoryParamResource):
|
|||
raise NotFound()
|
||||
|
||||
|
||||
def is_parent(context, dockerfile_path):
|
||||
""" This checks whether the context is a parent of the dockerfile_path"""
|
||||
if context == "" or dockerfile_path == "":
|
||||
return False
|
||||
|
||||
normalized_context = path.normpath(context)
|
||||
if normalized_context[len(normalized_context) - 1] != path.sep:
|
||||
normalized_context += path.sep
|
||||
|
||||
if normalized_context[0] != path.sep:
|
||||
normalized_context = path.sep + normalized_context
|
||||
|
||||
normalized_subdir = path.normpath(path.dirname(dockerfile_path))
|
||||
if normalized_subdir[0] != path.sep:
|
||||
normalized_subdir = path.sep + normalized_subdir
|
||||
|
||||
if normalized_subdir[len(normalized_subdir) - 1] != path.sep:
|
||||
normalized_subdir += path.sep
|
||||
|
||||
return normalized_subdir.startswith(normalized_context)
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/start')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
|
@ -418,7 +454,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
|
|||
'description': '(Custom Only) If specified, the ref/SHA1 used to checkout a git repository.'
|
||||
},
|
||||
'refs': {
|
||||
'type': ['object', 'null'],
|
||||
'type': ['object', 'null'],
|
||||
'description': '(SCM Only) If specified, the ref to build.'
|
||||
}
|
||||
},
|
||||
|
@ -467,6 +503,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
|
|||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
class TriggerBuildList(RepositoryParamResource):
|
||||
""" Resource to represent builds that were activated from the specified trigger. """
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@parse_args()
|
||||
|
@ -483,10 +520,12 @@ class TriggerBuildList(RepositoryParamResource):
|
|||
|
||||
FIELD_VALUE_LIMIT = 30
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/fields/<field_name>')
|
||||
@internal_only
|
||||
class BuildTriggerFieldValues(RepositoryParamResource):
|
||||
""" Custom verb to fetch a values list for a particular field name. """
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@nickname('listTriggerFieldValues')
|
||||
|
@ -558,13 +597,13 @@ class BuildTriggerSources(RepositoryParamResource):
|
|||
raise Unauthorized()
|
||||
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/namespaces')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
@internal_only
|
||||
class BuildTriggerSourceNamespaces(RepositoryParamResource):
|
||||
""" Custom verb to fetch the list of namespaces (orgs, projects, etc) for the trigger config. """
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@nickname('listTriggerBuildSourceNamespaces')
|
||||
|
|
|
@ -54,6 +54,7 @@ def start_build(repository, prepared_build, pull_robot_name=None):
|
|||
'docker_tags': prepared_build.tags,
|
||||
'registry': host,
|
||||
'build_subdir': prepared_build.subdirectory,
|
||||
'context': prepared_build.context,
|
||||
'trigger_metadata': prepared_build.metadata or {},
|
||||
'is_manual': prepared_build.is_manual,
|
||||
'manual_user': get_authenticated_user().username if get_authenticated_user() else None,
|
||||
|
@ -122,6 +123,7 @@ class PreparedBuild(object):
|
|||
self._tags = None
|
||||
self._build_name = None
|
||||
self._subdirectory = None
|
||||
self._context = None
|
||||
self._metadata = None
|
||||
self._trigger = trigger
|
||||
self._is_manual = None
|
||||
|
@ -224,6 +226,20 @@ class PreparedBuild(object):
|
|||
|
||||
self._subdirectory = value
|
||||
|
||||
@property
|
||||
def context(self):
|
||||
if self._context is None:
|
||||
raise Exception('Missing property context')
|
||||
|
||||
return self._context
|
||||
|
||||
@context.setter
|
||||
def context(self, value):
|
||||
if self._context:
|
||||
raise Exception('Property context already set')
|
||||
|
||||
self._context = value
|
||||
|
||||
@property
|
||||
def metadata(self):
|
||||
if self._metadata is None:
|
||||
|
|
|
@ -605,6 +605,8 @@ def populate_database(minimal=False, with_storage=False):
|
|||
trigger.config = json.dumps({
|
||||
'build_source': 'jakedt/testconnect',
|
||||
'subdir': '',
|
||||
'dockerfile_path': 'Dockerfile',
|
||||
'context': '/',
|
||||
})
|
||||
trigger.save()
|
||||
|
||||
|
|
|
@ -119,6 +119,7 @@
|
|||
<thead>
|
||||
<td>Trigger Name</td>
|
||||
<td>Dockerfile Location</td>
|
||||
<td>Context Location</td>
|
||||
<td>Branches/Tags</td>
|
||||
<td>Pull Robot</td>
|
||||
<td class="options-col"></td>
|
||||
|
@ -133,7 +134,8 @@
|
|||
|
||||
<tr ng-repeat="trigger in triggers | filter:{'is_active':true}">
|
||||
<td><div class="trigger-description" trigger="trigger"></div></td>
|
||||
<td>{{ trigger.config.subdir || '/' }}</td>
|
||||
<td>{{ trigger.config.dockerfile_path || '/Dockerfile' }}</td>
|
||||
<td>{{ trigger.config.context || '/' }}</td>
|
||||
<td>{{ trigger.config.branchtag_regex || 'All' }}</td>
|
||||
<td>
|
||||
<span class="entity-reference" entity="trigger.pull_robot" ng-if="trigger.pull_robot"></span>
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
<div class="context-path-select-element">
|
||||
<div class="dropdown-select" placeholder="'Enter a docker context'"
|
||||
selected-item="$ctrl.selectedContext"
|
||||
lookahead-items="$ctrl.contexts"
|
||||
handle-input="$ctrl.setContext(input)"
|
||||
handle-item-selected="$ctrl.setSelectedContext(datum.value)"
|
||||
allow-custom-input="true"
|
||||
hide-dropdown="$ctrl.contexts.length <= 0">
|
||||
<!-- Icons -->
|
||||
<i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg"
|
||||
ng-show="$ctrl.isUnknownContext"></i>
|
||||
<i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;"
|
||||
ng-show="!$ctrl.isUnknownContext"></i>
|
||||
<i class="dropdown-select-icon fa fa-folder fa-lg"></i>
|
||||
|
||||
<!-- Dropdown menu -->
|
||||
<ul class="dropdown-select-menu pull-right" role="menu">
|
||||
<li ng-repeat="context in $ctrl.contexts">
|
||||
<a ng-click="$ctrl.setSelectedContext(context)"
|
||||
ng-if="context">
|
||||
<i class="fa fa-folder fa-lg"></i> {{ context }}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div style="padding: 10px">
|
||||
<div class="co-alert co-alert-danger"
|
||||
ng-show="!$ctrl.isValidContext && $ctrl.currentContext">
|
||||
Path is an invalid context.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
|
@ -0,0 +1,87 @@
|
|||
import { ContextPathSelectComponent } from './context-path-select.component';
|
||||
|
||||
|
||||
describe("ContextPathSelectComponent", () => {
|
||||
var component: ContextPathSelectComponent;
|
||||
var currentContext: string;
|
||||
var isValidContext: boolean;
|
||||
var contexts: string[];
|
||||
|
||||
beforeEach(() => {
|
||||
component = new ContextPathSelectComponent();
|
||||
currentContext = '/';
|
||||
isValidContext = false;
|
||||
contexts = ['/'];
|
||||
component.currentContext = currentContext;
|
||||
component.isValidContext = isValidContext;
|
||||
component.contexts = contexts;
|
||||
});
|
||||
|
||||
describe("$onChanges", () => {
|
||||
|
||||
it("sets valid context flag to true if current context is valid", () => {
|
||||
component.$onChanges({});
|
||||
|
||||
expect(component.isValidContext).toBe(true);
|
||||
});
|
||||
|
||||
it("sets valid context flag to false if current context is invalid", () => {
|
||||
component.currentContext = "asdfdsf";
|
||||
component.$onChanges({});
|
||||
|
||||
expect(component.isValidContext).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("setcontext", () => {
|
||||
var newContext: string;
|
||||
|
||||
beforeEach(() => {
|
||||
newContext = '/conf';
|
||||
});
|
||||
|
||||
it("sets current context to given context", () => {
|
||||
component.setContext(newContext);
|
||||
|
||||
expect(component.currentContext).toEqual(newContext);
|
||||
});
|
||||
|
||||
it("sets valid context flag to true if given context is valid", () => {
|
||||
component.setContext(newContext);
|
||||
|
||||
expect(component.isValidContext).toBe(true);
|
||||
});
|
||||
|
||||
it("sets valid context flag to false if given context is invalid", () => {
|
||||
component.setContext("asdfsadfs");
|
||||
|
||||
expect(component.isValidContext).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("setCurrentcontext", () => {
|
||||
var context: string;
|
||||
|
||||
beforeEach(() => {
|
||||
context = '/conf';
|
||||
});
|
||||
|
||||
it("sets current context to given context", () => {
|
||||
component.setSelectedContext(context);
|
||||
|
||||
expect(component.currentContext).toEqual(context);
|
||||
});
|
||||
|
||||
it("sets valid context flag to true if given context is valid", () => {
|
||||
component.setSelectedContext(context);
|
||||
|
||||
expect(component.isValidContext).toBe(true);
|
||||
});
|
||||
|
||||
it("sets valid context flag to false if given context is invalid", () => {
|
||||
component.setSelectedContext("a;lskjdf;ldsa");
|
||||
|
||||
expect(component.isValidContext).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,46 @@
|
|||
import { Input, Component } from 'angular-ts-decorators';
|
||||
|
||||
|
||||
/**
|
||||
* A component that allows the user to select the location of the Context in their source code repository.
|
||||
*/
|
||||
@Component({
|
||||
selector: 'contextPathSelect',
|
||||
templateUrl: '/static/js/directives/ui/context-path-select/context-path-select.component.html'
|
||||
})
|
||||
export class ContextPathSelectComponent implements ng.IComponentController {
|
||||
|
||||
// FIXME: Use one-way data binding
|
||||
@Input('=') public currentContext: string;
|
||||
@Input('=') public isValidContext: boolean;
|
||||
@Input('=') public contexts: string[];
|
||||
private isUnknownContext: boolean = true;
|
||||
private selectedContext: string | null = null;
|
||||
|
||||
public $onChanges(changes: ng.IOnChangesObject): void {
|
||||
this.isValidContext = this.checkContext(this.currentContext, this.contexts);
|
||||
}
|
||||
|
||||
public setContext(context: string): void {
|
||||
this.currentContext = context;
|
||||
this.selectedContext = null;
|
||||
this.isValidContext = this.checkContext(context, this.contexts);
|
||||
}
|
||||
|
||||
public setSelectedContext(context: string): void {
|
||||
this.currentContext = context;
|
||||
this.selectedContext = context;
|
||||
this.isValidContext = this.checkContext(context, this.contexts);
|
||||
}
|
||||
|
||||
private checkContext(context: string = '', contexts: string[] = []): boolean {
|
||||
this.isUnknownContext = false;
|
||||
var isValidContext: boolean = false;
|
||||
|
||||
if (context.length > 0 && context[0] === '/') {
|
||||
isValidContext = true;
|
||||
this.isUnknownContext = true;
|
||||
}
|
||||
return isValidContext;
|
||||
}
|
||||
}
|
|
@ -257,9 +257,10 @@
|
|||
|
||||
<dockerfile-path-select
|
||||
current-path="$ctrl.local.dockerfilePath"
|
||||
paths="$ctrl.local.dockerfileLocations.subdir"
|
||||
paths="$ctrl.local.dockerfileLocations.dockerfile_paths"
|
||||
supports-full-listing="true"
|
||||
is-valid-path="$ctrl.local.hasValidDockerfilePath"></dockerfile-path-select>
|
||||
is-valid-path="$ctrl.local.hasValidDockerfilePath">
|
||||
</dockerfile-path-select>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-8 col-md-8 col-sm-12 main-col"
|
||||
|
@ -268,10 +269,44 @@
|
|||
</div>
|
||||
<div class="col-lg-4 col-md-4 hidden-sm hidden-xs help-col">
|
||||
<p>Please select the location containing the Dockerfile to be built.</p>
|
||||
<p>The build context will start at the location selected.</p>
|
||||
</div>
|
||||
</linear-workflow-section><!-- /Section: Dockerfile Location -->
|
||||
|
||||
<!-- Section: Context Location -->
|
||||
<linear-workflow-section class="row"
|
||||
section-id="contextlocation"
|
||||
section-title="Select Docker Context"
|
||||
section-valid="$ctrl.local.hasValidContextLocation">
|
||||
<div class="col-lg-7 col-md-7 col-sm-12 main-col"
|
||||
ng-if="$ctrl.local.dockerfileLocations.status == 'error'">
|
||||
<div class="co-alert co-alert-warning">
|
||||
{{ $ctrl.local.dockerfileLocations.message }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-7 col-md-7 col-sm-12 main-col" ng-if="$ctrl.local.dockerfileLocations.status == 'success'">
|
||||
<h3>Select Context</h3>
|
||||
<strong>
|
||||
Please select the context for the docker build
|
||||
</strong>
|
||||
|
||||
<context-path-select
|
||||
current-context="$ctrl.local.dockerContext"
|
||||
current-path="$ctrl.local.dockerfilePath"
|
||||
contexts="$ctrl.local.contexts"
|
||||
is-valid-context="$ctrl.local.hasValidContextLocation">
|
||||
</context-path-select>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-8 col-md-8 col-sm-12 main-col"
|
||||
ng-if="!$ctrl.local.dockerfileLocations">
|
||||
<span class="cor-loader-inline"></span> Retrieving Dockerfile locations
|
||||
</div>
|
||||
<div class="col-lg-4 col-md-4 hidden-sm hidden-xs help-col">
|
||||
<p>Please select a docker context.</p>
|
||||
</div>
|
||||
</linear-workflow-section><!-- /Section: Context Location -->
|
||||
|
||||
<!-- Section: Robot Account -->
|
||||
<linear-workflow-section class="row"
|
||||
section-id="verification"
|
||||
|
|
|
@ -80,7 +80,14 @@ export class ManageTriggerGithostComponent implements ng.IComponentController {
|
|||
|
||||
this.$scope.$watch(() => this.local.dockerfilePath, (path) => {
|
||||
if (path && this.local.selectedRepository) {
|
||||
this.checkDockerfilePath(this.local.selectedRepository, path);
|
||||
this.setPossibleContexts(path);
|
||||
this.checkDockerfilePath(this.local.selectedRepository, path, this.local.dockerContext);
|
||||
}
|
||||
});
|
||||
|
||||
this.$scope.$watch(() => this.local.dockerContext, (context) => {
|
||||
if (context && this.local.selectedRepository) {
|
||||
this.checkDockerfilePath(this.local.selectedRepository, this.local.dockerfilePath, context);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -117,7 +124,8 @@ export class ManageTriggerGithostComponent implements ng.IComponentController {
|
|||
public createTrigger(): void {
|
||||
var config: any = {
|
||||
build_source: this.local.selectedRepository.full_name,
|
||||
subdir: this.local.dockerfilePath.substr(1) // Remove starting /
|
||||
dockerfile_path: this.local.dockerfilePath,
|
||||
context: this.local.dockerContext
|
||||
};
|
||||
|
||||
if (this.local.triggerOptions.hasBranchTagFilter &&
|
||||
|
@ -271,6 +279,7 @@ export class ManageTriggerGithostComponent implements ng.IComponentController {
|
|||
|
||||
private loadDockerfileLocations(repository: any): void {
|
||||
this.local.dockerfilePath = null;
|
||||
this.local.dockerContext = null;
|
||||
|
||||
var params = {
|
||||
'repository': this.repository.namespace + '/' + this.repository.name,
|
||||
|
@ -301,7 +310,7 @@ export class ManageTriggerGithostComponent implements ng.IComponentController {
|
|||
[]);
|
||||
}
|
||||
|
||||
private checkDockerfilePath(repository: any, path: string): void {
|
||||
private checkDockerfilePath(repository: any, path: string, context: string): void {
|
||||
this.local.triggerAnalysis = null;
|
||||
this.local.robotAccount = null;
|
||||
|
||||
|
@ -312,7 +321,8 @@ export class ManageTriggerGithostComponent implements ng.IComponentController {
|
|||
|
||||
var config = {
|
||||
'build_source': repository.full_name,
|
||||
'subdir': path.substr(1)
|
||||
'dockerfile_path': path.substr(1),
|
||||
'context': context
|
||||
};
|
||||
|
||||
var data = {
|
||||
|
@ -325,6 +335,12 @@ export class ManageTriggerGithostComponent implements ng.IComponentController {
|
|||
this.buildOrderedRobotAccounts();
|
||||
}, this.ApiService.errorDisplay('Could not analyze trigger'));
|
||||
}
|
||||
|
||||
private setPossibleContexts(path){
|
||||
if(this.local.dockerfileLocations.contextMap){
|
||||
this.local.contexts = this.local.dockerfileLocations.contextMap[path] || [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import { RegexMatchViewComponent } from "./directives/ui/regex-match-view/regex-
|
|||
import { NgModule } from "angular-ts-decorators";
|
||||
import { QuayRoutes } from "./quay-routes.module";
|
||||
import { DockerfilePathSelectComponent } from './directives/ui/dockerfile-path-select/dockerfile-path-select.component';
|
||||
import { ContextPathSelectComponent } from './directives/ui/context-path-select/context-path-select.component';
|
||||
import { ManageTriggerCustomGitComponent } from './directives/ui/manage-trigger-custom-git/manage-trigger-custom-git.component';
|
||||
import { ManageTriggerGithostComponent } from './directives/ui/manage-trigger-githost/manage-trigger-githost.component';
|
||||
import { LinearWorkflowComponent } from './directives/ui/linear-workflow/linear-workflow.component';
|
||||
|
@ -31,6 +32,7 @@ import { DataFileServiceImpl } from './services/datafile/datafile.service.impl';
|
|||
declarations: [
|
||||
RegexMatchViewComponent,
|
||||
DockerfilePathSelectComponent,
|
||||
ContextPathSelectComponent,
|
||||
ManageTriggerCustomGitComponent,
|
||||
ManageTriggerGithostComponent,
|
||||
LinearWorkflowComponent,
|
||||
|
|
Binary file not shown.
|
@ -3852,6 +3852,7 @@ class FakeBuildTrigger(BuildTriggerHandler):
|
|||
prepared.subdirectory = 'subdir'
|
||||
prepared.metadata = {'foo': 'bar'}
|
||||
prepared.is_manual = True
|
||||
prepared.context = '/'
|
||||
return prepared
|
||||
|
||||
def get_repository_url(self):
|
||||
|
@ -4046,7 +4047,11 @@ class TestBuildTriggers(ApiTestCase):
|
|||
trigger_uuid=trigger.uuid),
|
||||
data={'somevalue': 'meh'})
|
||||
|
||||
self.assertEquals({'status': 'success', 'subdir': ['/sometoken', '/foo', '/bar', '/meh']},
|
||||
self.assertEquals({'status': 'success', 'dockerfile_paths': ['/sometoken', '/foo', '/bar', '/meh'],
|
||||
'contextMap': {'/bar': ['/'],
|
||||
'/foo': ['/'],
|
||||
'/meh': ['/'],
|
||||
'/sometoken': ['/']}},
|
||||
subdir_json)
|
||||
|
||||
# Activate the trigger.
|
||||
|
|
|
@ -5,6 +5,7 @@ import json
|
|||
from app import app
|
||||
from data import model
|
||||
from data.database import RepositoryBuildTrigger, configure
|
||||
from data.model.build import update_build_trigger
|
||||
|
||||
configure(app.config)
|
||||
|
||||
|
@ -40,8 +41,7 @@ def run_branchregex_migration():
|
|||
config['branchtag_regex'] = new_regex
|
||||
|
||||
logger.debug("Updating to branchtag regex '%s'", new_regex)
|
||||
trigger.config = json.dumps(config)
|
||||
trigger.save()
|
||||
update_build_trigger(trigger, config)
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
|
Reference in a new issue