Merge remote-tracking branch 'origin/master' into comewithmeifyouwanttowork
Conflicts: data/model/legacy.py
This commit is contained in:
commit
75d2ef377e
35 changed files with 743 additions and 93 deletions
|
@ -38,6 +38,7 @@ ADD conf/init/gunicorn /etc/service/gunicorn
|
||||||
ADD conf/init/nginx /etc/service/nginx
|
ADD conf/init/nginx /etc/service/nginx
|
||||||
ADD conf/init/diffsworker /etc/service/diffsworker
|
ADD conf/init/diffsworker /etc/service/diffsworker
|
||||||
ADD conf/init/notificationworker /etc/service/notificationworker
|
ADD conf/init/notificationworker /etc/service/notificationworker
|
||||||
|
ADD conf/init/buildlogsarchiver /etc/service/buildlogsarchiver
|
||||||
|
|
||||||
# Download any external libs.
|
# Download any external libs.
|
||||||
RUN mkdir static/fonts static/ldn
|
RUN mkdir static/fonts static/ldn
|
||||||
|
|
2
app.py
2
app.py
|
@ -20,6 +20,7 @@ from util.exceptionlog import Sentry
|
||||||
from util.queuemetrics import QueueMetrics
|
from util.queuemetrics import QueueMetrics
|
||||||
from data.billing import Billing
|
from data.billing import Billing
|
||||||
from data.buildlogs import BuildLogs
|
from data.buildlogs import BuildLogs
|
||||||
|
from data.archivedlogs import LogArchive
|
||||||
from data.queue import WorkQueue
|
from data.queue import WorkQueue
|
||||||
from data.userevent import UserEventsBuilderModule
|
from data.userevent import UserEventsBuilderModule
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
@ -89,6 +90,7 @@ login_manager = LoginManager(app)
|
||||||
mail = Mail(app)
|
mail = Mail(app)
|
||||||
storage = Storage(app)
|
storage = Storage(app)
|
||||||
userfiles = Userfiles(app, storage)
|
userfiles = Userfiles(app, storage)
|
||||||
|
log_archive = LogArchive(app, storage)
|
||||||
analytics = Analytics(app)
|
analytics = Analytics(app)
|
||||||
billing = Billing(app)
|
billing = Billing(app)
|
||||||
sentry = Sentry(app)
|
sentry = Sentry(app)
|
||||||
|
|
|
@ -135,8 +135,15 @@ def process_token(auth):
|
||||||
logger.warning('Invalid token format: %s' % auth)
|
logger.warning('Invalid token format: %s' % auth)
|
||||||
abort(401, message='Invalid token format: %(auth)s', issue='invalid-auth-token', auth=auth)
|
abort(401, message='Invalid token format: %(auth)s', issue='invalid-auth-token', auth=auth)
|
||||||
|
|
||||||
token_vals = {val[0]: val[1] for val in
|
def safe_get(lst, index, default_value):
|
||||||
|
try:
|
||||||
|
return lst[index]
|
||||||
|
except IndexError:
|
||||||
|
return default_value
|
||||||
|
|
||||||
|
token_vals = {val[0]: safe_get(val, 1, '') for val in
|
||||||
(detail.split('=') for detail in token_details)}
|
(detail.split('=') for detail in token_details)}
|
||||||
|
|
||||||
if 'signature' not in token_vals:
|
if 'signature' not in token_vals:
|
||||||
logger.warning('Token does not contain signature: %s' % auth)
|
logger.warning('Token does not contain signature: %s' % auth)
|
||||||
abort(401, message='Token does not contain a valid signature: %(auth)s',
|
abort(401, message='Token does not contain a valid signature: %(auth)s',
|
||||||
|
|
2
conf/init/buildlogsarchiver/log/run
Executable file
2
conf/init/buildlogsarchiver/log/run
Executable file
|
@ -0,0 +1,2 @@
|
||||||
|
#!/bin/sh
|
||||||
|
exec svlogd /var/log/buildlogsarchiver/
|
8
conf/init/buildlogsarchiver/run
Executable file
8
conf/init/buildlogsarchiver/run
Executable file
|
@ -0,0 +1,8 @@
|
||||||
|
#! /bin/bash
|
||||||
|
|
||||||
|
echo 'Starting build logs archiver worker'
|
||||||
|
|
||||||
|
cd /
|
||||||
|
venv/bin/python -m workers.buildlogsarchiver 2>&1
|
||||||
|
|
||||||
|
echo 'Diffs worker exited'
|
|
@ -172,3 +172,7 @@ class DefaultConfig(object):
|
||||||
# Userfiles
|
# Userfiles
|
||||||
USERFILES_LOCATION = 'local_us'
|
USERFILES_LOCATION = 'local_us'
|
||||||
USERFILES_PATH = 'userfiles/'
|
USERFILES_PATH = 'userfiles/'
|
||||||
|
|
||||||
|
# Build logs archive
|
||||||
|
LOG_ARCHIVE_LOCATION = 'local_us'
|
||||||
|
LOG_ARCHIVE_PATH = 'logarchive/'
|
||||||
|
|
56
data/archivedlogs.py
Normal file
56
data/archivedlogs.py
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from gzip import GzipFile
|
||||||
|
from flask import send_file, abort
|
||||||
|
from cStringIO import StringIO
|
||||||
|
|
||||||
|
from data.userfiles import DelegateUserfiles, UserfilesHandlers
|
||||||
|
|
||||||
|
|
||||||
|
JSON_MIMETYPE = 'application/json'
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LogArchiveHandlers(UserfilesHandlers):
|
||||||
|
def get(self, file_id):
|
||||||
|
path = self._files.get_file_id_path(file_id)
|
||||||
|
try:
|
||||||
|
with self._storage.stream_read_file(self._locations, path) as gzip_stream:
|
||||||
|
with GzipFile(fileobj=gzip_stream) as unzipped:
|
||||||
|
unzipped_buffer = StringIO(unzipped.read())
|
||||||
|
return send_file(unzipped_buffer, mimetype=JSON_MIMETYPE)
|
||||||
|
except IOError:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
|
||||||
|
class LogArchive(object):
|
||||||
|
def __init__(self, app=None, distributed_storage=None):
|
||||||
|
self.app = app
|
||||||
|
if app is not None:
|
||||||
|
self.state = self.init_app(app, distributed_storage)
|
||||||
|
else:
|
||||||
|
self.state = None
|
||||||
|
|
||||||
|
def init_app(self, app, distributed_storage):
|
||||||
|
location = app.config.get('LOG_ARCHIVE_LOCATION')
|
||||||
|
path = app.config.get('LOG_ARCHIVE_PATH', None)
|
||||||
|
|
||||||
|
handler_name = 'logarchive_handlers'
|
||||||
|
|
||||||
|
log_archive = DelegateUserfiles(app, distributed_storage, location, path, handler_name)
|
||||||
|
|
||||||
|
app.add_url_rule('/logarchive/<file_id>',
|
||||||
|
view_func=LogArchiveHandlers.as_view(handler_name,
|
||||||
|
distributed_storage=distributed_storage,
|
||||||
|
location=location,
|
||||||
|
files=log_archive))
|
||||||
|
|
||||||
|
# register extension with app
|
||||||
|
app.extensions = getattr(app, 'extensions', {})
|
||||||
|
app.extensions['log_archive'] = log_archive
|
||||||
|
return log_archive
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return getattr(self.state, name, None)
|
|
@ -3,7 +3,7 @@ import stripe
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from calendar import timegm
|
from calendar import timegm
|
||||||
|
|
||||||
from util.collections import AttrDict
|
from util.morecollections import AttrDict
|
||||||
|
|
||||||
PLANS = [
|
PLANS = [
|
||||||
# Deprecated Plans
|
# Deprecated Plans
|
||||||
|
|
|
@ -2,6 +2,11 @@ import redis
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from util.dynamic import import_class
|
from util.dynamic import import_class
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
|
||||||
|
ONE_DAY = timedelta(days=1)
|
||||||
|
|
||||||
|
|
||||||
class BuildStatusRetrievalError(Exception):
|
class BuildStatusRetrievalError(Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -54,6 +59,13 @@ class RedisBuildLogs(object):
|
||||||
except redis.ConnectionError:
|
except redis.ConnectionError:
|
||||||
raise BuildStatusRetrievalError('Cannot retrieve build logs')
|
raise BuildStatusRetrievalError('Cannot retrieve build logs')
|
||||||
|
|
||||||
|
def expire_log_entries(self, build_id):
|
||||||
|
"""
|
||||||
|
Sets the log entry to expire in 1 day.
|
||||||
|
"""
|
||||||
|
self._redis.expire(self._logs_key(build_id), ONE_DAY)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _status_key(build_id):
|
def _status_key(build_id):
|
||||||
return 'builds/%s/status' % build_id
|
return 'builds/%s/status' % build_id
|
||||||
|
|
|
@ -21,8 +21,24 @@ SCHEME_DRIVERS = {
|
||||||
'postgresql+psycopg2': PostgresqlDatabase,
|
'postgresql+psycopg2': PostgresqlDatabase,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SCHEME_RANDOM_FUNCTION = {
|
||||||
|
'mysql': fn.Rand,
|
||||||
|
'mysql+pymysql': fn.Rand,
|
||||||
|
'sqlite': fn.Random,
|
||||||
|
'postgresql': fn.Random,
|
||||||
|
'postgresql+psycopg2': fn.Random,
|
||||||
|
}
|
||||||
|
|
||||||
|
class CallableProxy(Proxy):
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
if self.obj is None:
|
||||||
|
raise AttributeError('Cannot use uninitialized Proxy.')
|
||||||
|
return self.obj(*args, **kwargs)
|
||||||
|
|
||||||
db = Proxy()
|
db = Proxy()
|
||||||
read_slave = Proxy()
|
read_slave = Proxy()
|
||||||
|
db_random_func = CallableProxy()
|
||||||
|
|
||||||
|
|
||||||
def _db_from_url(url, db_kwargs):
|
def _db_from_url(url, db_kwargs):
|
||||||
parsed_url = make_url(url)
|
parsed_url = make_url(url)
|
||||||
|
@ -38,11 +54,15 @@ def _db_from_url(url, db_kwargs):
|
||||||
|
|
||||||
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
||||||
|
|
||||||
|
|
||||||
def configure(config_object):
|
def configure(config_object):
|
||||||
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
|
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
|
||||||
write_db_uri = config_object['DB_URI']
|
write_db_uri = config_object['DB_URI']
|
||||||
db.initialize(_db_from_url(write_db_uri, db_kwargs))
|
db.initialize(_db_from_url(write_db_uri, db_kwargs))
|
||||||
|
|
||||||
|
parsed_write_uri = make_url(write_db_uri)
|
||||||
|
db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername])
|
||||||
|
|
||||||
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
|
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
|
||||||
if read_slave_uri is not None:
|
if read_slave_uri is not None:
|
||||||
read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs))
|
read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs))
|
||||||
|
@ -298,6 +318,16 @@ class RepositoryTag(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BUILD_PHASE(object):
|
||||||
|
""" Build phases enum """
|
||||||
|
ERROR = 'error'
|
||||||
|
UNPACKING = 'unpacking'
|
||||||
|
PULLING = 'pulling'
|
||||||
|
BUILDING = 'building'
|
||||||
|
PUSHING = 'pushing'
|
||||||
|
COMPLETE = 'complete'
|
||||||
|
|
||||||
|
|
||||||
class RepositoryBuild(BaseModel):
|
class RepositoryBuild(BaseModel):
|
||||||
uuid = CharField(default=uuid_generator, index=True)
|
uuid = CharField(default=uuid_generator, index=True)
|
||||||
repository = ForeignKeyField(Repository, index=True)
|
repository = ForeignKeyField(Repository, index=True)
|
||||||
|
@ -309,6 +339,7 @@ class RepositoryBuild(BaseModel):
|
||||||
display_name = CharField()
|
display_name = CharField()
|
||||||
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
||||||
pull_robot = ForeignKeyField(User, null=True, related_name='buildpullrobot')
|
pull_robot = ForeignKeyField(User, null=True, related_name='buildpullrobot')
|
||||||
|
logs_archived = BooleanField(default=False)
|
||||||
|
|
||||||
|
|
||||||
class QueueItem(BaseModel):
|
class QueueItem(BaseModel):
|
||||||
|
|
|
@ -8,7 +8,7 @@ from peewee import SqliteDatabase
|
||||||
from data.database import all_models, db
|
from data.database import all_models, db
|
||||||
from app import app
|
from app import app
|
||||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||||
from util.collections import AttrDict
|
from util.morecollections import AttrDict
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
# this is the Alembic Config object, which provides
|
||||||
# access to the values within the .ini file in use.
|
# access to the values within the .ini file in use.
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
"""Add support for build log migration.
|
||||||
|
|
||||||
|
Revision ID: 34fd69f63809
|
||||||
|
Revises: 4a0c94399f38
|
||||||
|
Create Date: 2014-09-12 11:50:09.217777
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '34fd69f63809'
|
||||||
|
down_revision = '4a0c94399f38'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('repositorybuild', sa.Column('logs_archived', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
|
||||||
|
### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('repositorybuild', 'logs_archived')
|
||||||
|
### end Alembic commands ###
|
|
@ -12,6 +12,7 @@ from util.backoff import exponential_backoff
|
||||||
|
|
||||||
|
|
||||||
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
|
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
|
||||||
|
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -1896,6 +1897,16 @@ def get_active_users():
|
||||||
def get_active_user_count():
|
def get_active_user_count():
|
||||||
return get_active_users().count()
|
return get_active_users().count()
|
||||||
|
|
||||||
|
|
||||||
|
def detach_external_login(user, service_name):
|
||||||
|
try:
|
||||||
|
service = LoginService.get(name = service_name)
|
||||||
|
except LoginService.DoesNotExist:
|
||||||
|
return
|
||||||
|
|
||||||
|
FederatedLogin.delete().where(FederatedLogin.user == user,
|
||||||
|
FederatedLogin.service == service).execute()
|
||||||
|
|
||||||
def delete_user(user):
|
def delete_user(user):
|
||||||
user.delete_instance(recursive=True, delete_nullable=True)
|
user.delete_instance(recursive=True, delete_nullable=True)
|
||||||
|
|
||||||
|
@ -2006,3 +2017,10 @@ def confirm_team_invite(code, user):
|
||||||
inviter = found.inviter
|
inviter = found.inviter
|
||||||
found.delete_instance()
|
found.delete_instance()
|
||||||
return (team, inviter)
|
return (team, inviter)
|
||||||
|
|
||||||
|
def archivable_buildlogs_query():
|
||||||
|
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
|
||||||
|
return (RepositoryBuild.select()
|
||||||
|
.where((RepositoryBuild.phase == BUILD_PHASE.COMPLETE) |
|
||||||
|
(RepositoryBuild.phase == BUILD_PHASE.ERROR) |
|
||||||
|
(RepositoryBuild.started < presumed_dead_date), RepositoryBuild.logs_archived == False))
|
||||||
|
|
|
@ -46,7 +46,7 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
|
||||||
def validate_redirect_uri(self, client_id, redirect_uri):
|
def validate_redirect_uri(self, client_id, redirect_uri):
|
||||||
try:
|
try:
|
||||||
app = OAuthApplication.get(client_id=client_id)
|
app = OAuthApplication.get(client_id=client_id)
|
||||||
if app.redirect_uri and redirect_uri.startswith(app.redirect_uri):
|
if app.redirect_uri and redirect_uri and redirect_uri.startswith(app.redirect_uri):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
except OAuthApplication.DoesNotExist:
|
except OAuthApplication.DoesNotExist:
|
||||||
|
|
|
@ -81,10 +81,13 @@ class DelegateUserfiles(object):
|
||||||
|
|
||||||
return (url, file_id)
|
return (url, file_id)
|
||||||
|
|
||||||
def store_file(self, file_like_obj, content_type):
|
def store_file(self, file_like_obj, content_type, content_encoding=None, file_id=None):
|
||||||
file_id = str(uuid4())
|
if file_id is None:
|
||||||
|
file_id = str(uuid4())
|
||||||
|
|
||||||
path = self.get_file_id_path(file_id)
|
path = self.get_file_id_path(file_id)
|
||||||
self._storage.stream_write(self._locations, path, file_like_obj, content_type)
|
self._storage.stream_write(self._locations, path, file_like_obj, content_type,
|
||||||
|
content_encoding)
|
||||||
return file_id
|
return file_id
|
||||||
|
|
||||||
def get_file_url(self, file_id, expires_in=300, requires_cors=False):
|
def get_file_url(self, file_id, expires_in=300, requires_cors=False):
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from flask import request
|
from flask import request, redirect
|
||||||
|
|
||||||
from app import app, userfiles as user_files, build_logs
|
from app import app, userfiles as user_files, build_logs, log_archive
|
||||||
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
||||||
require_repo_read, require_repo_write, validate_json_request,
|
require_repo_read, require_repo_write, validate_json_request,
|
||||||
ApiResource, internal_only, format_date, api, Unauthorized, NotFound)
|
ApiResource, internal_only, format_date, api, Unauthorized, NotFound)
|
||||||
|
@ -215,6 +215,10 @@ class RepositoryBuildLogs(RepositoryParamResource):
|
||||||
|
|
||||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
build = model.get_repository_build(namespace, repository, build_uuid)
|
||||||
|
|
||||||
|
# If the logs have been archived, just redirect to the completed archive
|
||||||
|
if build.logs_archived:
|
||||||
|
return redirect(log_archive.get_file_url(build.uuid))
|
||||||
|
|
||||||
start = int(request.args.get('start', 0))
|
start = int(request.args.get('start', 0))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -426,6 +426,19 @@ class Signout(ApiResource):
|
||||||
return {'success': True}
|
return {'success': True}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/detachexternal/<servicename>')
|
||||||
|
@internal_only
|
||||||
|
class DetachExternal(ApiResource):
|
||||||
|
""" Resource for detaching an external login. """
|
||||||
|
@require_user_admin
|
||||||
|
@nickname('detachExternalLogin')
|
||||||
|
def post(self, servicename):
|
||||||
|
""" Request that the current user be detached from the external login service. """
|
||||||
|
model.detach_external_login(get_authenticated_user(), servicename)
|
||||||
|
return {'success': True}
|
||||||
|
|
||||||
|
|
||||||
@resource("/v1/recovery")
|
@resource("/v1/recovery")
|
||||||
@internal_only
|
@internal_only
|
||||||
class Recovery(ApiResource):
|
class Recovery(ApiResource):
|
||||||
|
|
|
@ -66,6 +66,9 @@ def generate_headers(role='read'):
|
||||||
@index.route('/users/', methods=['POST'])
|
@index.route('/users/', methods=['POST'])
|
||||||
def create_user():
|
def create_user():
|
||||||
user_data = request.get_json()
|
user_data = request.get_json()
|
||||||
|
if not 'username' in user_data:
|
||||||
|
abort(400, 'Missing username')
|
||||||
|
|
||||||
username = user_data['username']
|
username = user_data['username']
|
||||||
password = user_data.get('password', '')
|
password = user_data.get('password', '')
|
||||||
|
|
||||||
|
|
|
@ -223,6 +223,78 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
return service;
|
return service;
|
||||||
}]);
|
}]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specialized class for conducting an HTTP poll, while properly preventing multiple calls.
|
||||||
|
*/
|
||||||
|
$provide.factory('AngularPollChannel', ['ApiService', '$timeout', function(ApiService, $timeout) {
|
||||||
|
var _PollChannel = function(scope, requester, opt_sleeptime) {
|
||||||
|
this.scope_ = scope;
|
||||||
|
this.requester_ = requester;
|
||||||
|
this.sleeptime_ = opt_sleeptime || (60 * 1000 /* 60s */);
|
||||||
|
this.timer_ = null;
|
||||||
|
|
||||||
|
this.working = false;
|
||||||
|
this.polling = false;
|
||||||
|
|
||||||
|
var that = this;
|
||||||
|
scope.$on('$destroy', function() {
|
||||||
|
that.stop();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
_PollChannel.prototype.stop = function() {
|
||||||
|
if (this.timer_) {
|
||||||
|
$timeout.cancel(this.timer_);
|
||||||
|
this.timer_ = null;
|
||||||
|
this.polling_ = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.working = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
_PollChannel.prototype.start = function() {
|
||||||
|
// Make sure we invoke call outside the normal digest cycle, since
|
||||||
|
// we'll call $scope.$apply ourselves.
|
||||||
|
var that = this;
|
||||||
|
setTimeout(function() { that.call_(); }, 0);
|
||||||
|
};
|
||||||
|
|
||||||
|
_PollChannel.prototype.call_ = function() {
|
||||||
|
if (this.working) { return; }
|
||||||
|
|
||||||
|
var that = this;
|
||||||
|
this.working = true;
|
||||||
|
this.scope_.$apply(function() {
|
||||||
|
that.requester_(function(status) {
|
||||||
|
if (status) {
|
||||||
|
that.working = false;
|
||||||
|
that.setupTimer_();
|
||||||
|
} else {
|
||||||
|
that.stop();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
_PollChannel.prototype.setupTimer_ = function() {
|
||||||
|
if (this.timer_) { return; }
|
||||||
|
|
||||||
|
var that = this;
|
||||||
|
this.polling = true;
|
||||||
|
this.timer_ = $timeout(function() {
|
||||||
|
that.timer_ = null;
|
||||||
|
that.call_();
|
||||||
|
}, this.sleeptime_)
|
||||||
|
};
|
||||||
|
|
||||||
|
var service = {
|
||||||
|
'create': function(scope, requester, opt_sleeptime) {
|
||||||
|
return new _PollChannel(scope, requester, opt_sleeptime);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return service;
|
||||||
|
}]);
|
||||||
|
|
||||||
$provide.factory('DataFileService', [function() {
|
$provide.factory('DataFileService', [function() {
|
||||||
var dataFileService = {};
|
var dataFileService = {};
|
||||||
|
@ -489,7 +561,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
// If an error occurred, report it and done.
|
// If an error occurred, report it and done.
|
||||||
if (ping < 0) {
|
if (ping < 0) {
|
||||||
cached['pings'] = [-1];
|
cached['pings'] = [-1];
|
||||||
invokeCallback($scope, pings, callback);
|
invokeCallback($scope, [-1], callback);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1502,7 +1574,12 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
};
|
};
|
||||||
|
|
||||||
notificationService.getPage = function(notification) {
|
notificationService.getPage = function(notification) {
|
||||||
var page = notificationKinds[notification['kind']]['page'];
|
var kindInfo = notificationKinds[notification['kind']];
|
||||||
|
if (!kindInfo) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var page = kindInfo['page'];
|
||||||
if (typeof page != 'string') {
|
if (typeof page != 'string') {
|
||||||
page = page(notification['metadata']);
|
page = page(notification['metadata']);
|
||||||
}
|
}
|
||||||
|
|
|
@ -982,14 +982,9 @@ function BuildPackageCtrl($scope, Restangular, ApiService, DataFileService, $rou
|
||||||
}
|
}
|
||||||
|
|
||||||
function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize,
|
function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize,
|
||||||
ansi2html, AngularViewArray) {
|
ansi2html, AngularViewArray, AngularPollChannel) {
|
||||||
var namespace = $routeParams.namespace;
|
var namespace = $routeParams.namespace;
|
||||||
var name = $routeParams.name;
|
var name = $routeParams.name;
|
||||||
var pollTimerHandle = null;
|
|
||||||
|
|
||||||
$scope.$on('$destroy', function() {
|
|
||||||
stopPollTimer();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Watch for changes to the current parameter.
|
// Watch for changes to the current parameter.
|
||||||
$scope.$on('$routeUpdate', function(){
|
$scope.$on('$routeUpdate', function(){
|
||||||
|
@ -999,8 +994,7 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
||||||
});
|
});
|
||||||
|
|
||||||
$scope.builds = null;
|
$scope.builds = null;
|
||||||
$scope.polling = false;
|
$scope.pollChannel = null;
|
||||||
|
|
||||||
$scope.buildDialogShowCounter = 0;
|
$scope.buildDialogShowCounter = 0;
|
||||||
|
|
||||||
$scope.showNewBuildDialog = function() {
|
$scope.showNewBuildDialog = function() {
|
||||||
|
@ -1085,8 +1079,6 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
||||||
$scope.setCurrentBuildInternal = function(index, build, opt_updateURL) {
|
$scope.setCurrentBuildInternal = function(index, build, opt_updateURL) {
|
||||||
if (build == $scope.currentBuild) { return; }
|
if (build == $scope.currentBuild) { return; }
|
||||||
|
|
||||||
stopPollTimer();
|
|
||||||
|
|
||||||
$scope.logEntries = null;
|
$scope.logEntries = null;
|
||||||
$scope.logStartIndex = null;
|
$scope.logStartIndex = null;
|
||||||
$scope.currentParentEntry = null;
|
$scope.currentParentEntry = null;
|
||||||
|
@ -1107,47 +1099,35 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
||||||
$scope.adjustLogHeight();
|
$scope.adjustLogHeight();
|
||||||
}, 1);
|
}, 1);
|
||||||
|
|
||||||
// Load the first set of logs.
|
// Stop any existing polling.
|
||||||
getBuildStatusAndLogs();
|
if ($scope.pollChannel) {
|
||||||
|
$scope.pollChannel.stop();
|
||||||
// If the build is currently processing, start the build timer.
|
|
||||||
checkPollTimer();
|
|
||||||
};
|
|
||||||
|
|
||||||
var checkPollTimer = function() {
|
|
||||||
var build = $scope.currentBuild;
|
|
||||||
if (!build) {
|
|
||||||
stopPollTimer();
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create a new channel for polling the build status and logs.
|
||||||
|
var conductStatusAndLogRequest = function(callback) {
|
||||||
|
getBuildStatusAndLogs(build, callback);
|
||||||
|
};
|
||||||
|
|
||||||
if (build['phase'] != 'complete' && build['phase'] != 'error') {
|
$scope.pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */);
|
||||||
startPollTimer();
|
$scope.pollChannel.start();
|
||||||
return true;
|
|
||||||
} else {
|
|
||||||
stopPollTimer();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
var stopPollTimer = function() {
|
var processLogs = function(logs, startIndex, endIndex) {
|
||||||
$interval.cancel(pollTimerHandle);
|
|
||||||
};
|
|
||||||
|
|
||||||
var startPollTimer = function() {
|
|
||||||
stopPollTimer();
|
|
||||||
pollTimerHandle = $interval(getBuildStatusAndLogs, 2000);
|
|
||||||
};
|
|
||||||
|
|
||||||
var processLogs = function(logs, startIndex) {
|
|
||||||
if (!$scope.logEntries) { $scope.logEntries = []; }
|
if (!$scope.logEntries) { $scope.logEntries = []; }
|
||||||
|
|
||||||
|
// If the start index given is less than that requested, then we've received a larger
|
||||||
|
// pool of logs, and we need to only consider the new ones.
|
||||||
|
if (startIndex < $scope.logStartIndex) {
|
||||||
|
logs = logs.slice($scope.logStartIndex - startIndex);
|
||||||
|
}
|
||||||
|
|
||||||
for (var i = 0; i < logs.length; ++i) {
|
for (var i = 0; i < logs.length; ++i) {
|
||||||
var entry = logs[i];
|
var entry = logs[i];
|
||||||
var type = entry['type'] || 'entry';
|
var type = entry['type'] || 'entry';
|
||||||
if (type == 'command' || type == 'phase' || type == 'error') {
|
if (type == 'command' || type == 'phase' || type == 'error') {
|
||||||
entry['logs'] = AngularViewArray.create();
|
entry['logs'] = AngularViewArray.create();
|
||||||
entry['index'] = startIndex + i;
|
entry['index'] = $scope.logStartIndex + i;
|
||||||
|
|
||||||
$scope.logEntries.push(entry);
|
$scope.logEntries.push(entry);
|
||||||
$scope.currentParentEntry = entry;
|
$scope.currentParentEntry = entry;
|
||||||
|
@ -1155,18 +1135,19 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
||||||
$scope.currentParentEntry['logs'].push(entry);
|
$scope.currentParentEntry['logs'].push(entry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return endIndex;
|
||||||
};
|
};
|
||||||
|
|
||||||
var getBuildStatusAndLogs = function() {
|
var getBuildStatusAndLogs = function(build, callback) {
|
||||||
if (!$scope.currentBuild || $scope.polling) { return; }
|
|
||||||
$scope.polling = true;
|
|
||||||
|
|
||||||
var params = {
|
var params = {
|
||||||
'repository': namespace + '/' + name,
|
'repository': namespace + '/' + name,
|
||||||
'build_uuid': $scope.currentBuild.id
|
'build_uuid': build.id
|
||||||
};
|
};
|
||||||
|
|
||||||
ApiService.getRepoBuildStatus(null, params, true).then(function(resp) {
|
ApiService.getRepoBuildStatus(null, params, true).then(function(resp) {
|
||||||
|
if (build != $scope.currentBuild) { callback(false); return; }
|
||||||
|
|
||||||
// Note: We use extend here rather than replacing as Angular is depending on the
|
// Note: We use extend here rather than replacing as Angular is depending on the
|
||||||
// root build object to remain the same object.
|
// root build object to remain the same object.
|
||||||
var matchingBuilds = $.grep($scope.builds, function(elem) {
|
var matchingBuilds = $.grep($scope.builds, function(elem) {
|
||||||
|
@ -1181,22 +1162,16 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
||||||
$scope.builds.push(currentBuild);
|
$scope.builds.push(currentBuild);
|
||||||
}
|
}
|
||||||
|
|
||||||
checkPollTimer();
|
|
||||||
|
|
||||||
// Load the updated logs for the build.
|
// Load the updated logs for the build.
|
||||||
var options = {
|
var options = {
|
||||||
'start': $scope.logStartIndex
|
'start': $scope.logStartIndex
|
||||||
};
|
};
|
||||||
|
|
||||||
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
|
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
|
||||||
if ($scope.logStartIndex != null && resp['start'] != $scope.logStartIndex) {
|
if (build != $scope.currentBuild) { callback(false); return; }
|
||||||
$scope.polling = false;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
processLogs(resp['logs'], resp['start']);
|
// Process the logs we've received.
|
||||||
$scope.logStartIndex = resp['total'];
|
$scope.logStartIndex = processLogs(resp['logs'], resp['start'], resp['total']);
|
||||||
$scope.polling = false;
|
|
||||||
|
|
||||||
// If the build status is an error, open the last two log entries.
|
// If the build status is an error, open the last two log entries.
|
||||||
if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) {
|
if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) {
|
||||||
|
@ -1209,9 +1184,15 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
||||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 2]);
|
openLogEntries($scope.logEntries[$scope.logEntries.length - 2]);
|
||||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 1]);
|
openLogEntries($scope.logEntries[$scope.logEntries.length - 1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the build phase is an error or a complete, then we mark the channel
|
||||||
|
// as closed.
|
||||||
|
callback(currentBuild['phase'] != 'error' && currentBuild['phase'] != 'complete');
|
||||||
}, function() {
|
}, function() {
|
||||||
$scope.polling = false;
|
callback(false);
|
||||||
});
|
});
|
||||||
|
}, function() {
|
||||||
|
callback(false);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1804,6 +1785,18 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
||||||
UIService.showFormError('#changePasswordForm', result);
|
UIService.showFormError('#changePasswordForm', result);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
$scope.detachExternalLogin = function(kind) {
|
||||||
|
var params = {
|
||||||
|
'servicename': kind
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.detachExternalLogin(null, params).then(function() {
|
||||||
|
$scope.hasGithubLogin = false;
|
||||||
|
$scope.hasGoogleLogin = false;
|
||||||
|
UserService.load();
|
||||||
|
}, ApiService.errorDisplay('Count not detach service'));
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function ImageViewCtrl($scope, $routeParams, $rootScope, $timeout, ApiService, ImageMetadataService) {
|
function ImageViewCtrl($scope, $routeParams, $rootScope, $timeout, ApiService, ImageMetadataService) {
|
||||||
|
|
|
@ -94,7 +94,7 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div style="margin-top: 10px">
|
<div style="margin-top: 10px">
|
||||||
<span class="quay-spinner" ng-show="polling"></span>
|
<span class="quay-spinner" ng-show="pollChannel.working"></span>
|
||||||
<button class="btn" ng-show="(build.phase == 'error' || build.phase == 'complete') && build.resource_key"
|
<button class="btn" ng-show="(build.phase == 'error' || build.phase == 'complete') && build.resource_key"
|
||||||
ng-class="build.phase == 'error' ? 'btn-success' : 'btn-default'"
|
ng-class="build.phase == 'error' ? 'btn-success' : 'btn-default'"
|
||||||
ng-click="askRestartBuild(build)">
|
ng-click="askRestartBuild(build)">
|
||||||
|
|
|
@ -177,10 +177,14 @@
|
||||||
<div ng-show="hasGithubLogin && githubLogin" class="lead col-md-8">
|
<div ng-show="hasGithubLogin && githubLogin" class="lead col-md-8">
|
||||||
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
|
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
|
||||||
<b><a href="https://github.com/{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
|
<b><a href="https://github.com/{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
|
||||||
|
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||||
|
perform-delete="detachExternalLogin('github')"></span>
|
||||||
</div>
|
</div>
|
||||||
<div ng-show="hasGithubLogin && !githubLogin" class="lead col-md-8">
|
<div ng-show="hasGithubLogin && !githubLogin" class="lead col-md-8">
|
||||||
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
|
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
|
||||||
Account attached to Github Account
|
Account attached to Github Account
|
||||||
|
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||||
|
perform-delete="detachExternalLogin('github')"></span>
|
||||||
</div>
|
</div>
|
||||||
<div ng-show="!hasGithubLogin" class="col-md-4">
|
<div ng-show="!hasGithubLogin" class="col-md-4">
|
||||||
<span class="external-login-button" provider="github" action="attach"></span>
|
<span class="external-login-button" provider="github" action="attach"></span>
|
||||||
|
@ -197,10 +201,14 @@
|
||||||
<div ng-show="hasGoogleLogin && googleLogin" class="lead col-md-8">
|
<div ng-show="hasGoogleLogin && googleLogin" class="lead col-md-8">
|
||||||
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
|
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
|
||||||
<b>{{ googleLogin }}</b>
|
<b>{{ googleLogin }}</b>
|
||||||
|
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||||
|
perform-delete="detachExternalLogin('google')"></span>
|
||||||
</div>
|
</div>
|
||||||
<div ng-show="hasGoogleLogin && !googleLogin" class="lead col-md-8">
|
<div ng-show="hasGoogleLogin && !googleLogin" class="lead col-md-8">
|
||||||
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
|
<i class="fa fa-google fa-lg" style="margin-right: 6px;" data-title="Google" bs-tooltip="tooltip.title"></i>
|
||||||
Account attached to Google Account
|
Account attached to Google Account
|
||||||
|
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
|
||||||
|
perform-delete="detachExternalLogin('google')"></span>
|
||||||
</div>
|
</div>
|
||||||
<div ng-show="!hasGoogleLogin" class="col-md-4">
|
<div ng-show="!hasGoogleLogin" class="col-md-4">
|
||||||
<span class="external-login-button" provider="google" action="attach"></span>
|
<span class="external-login-button" provider="google" action="attach"></span>
|
||||||
|
|
|
@ -75,7 +75,7 @@ class BaseStorage(StoragePaths):
|
||||||
def stream_read_file(self, path):
|
def stream_read_file(self, path):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def stream_write(self, path, fp, content_type=None):
|
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def list_directory(self, path=None):
|
def list_directory(self, path=None):
|
||||||
|
|
|
@ -128,7 +128,7 @@ class _CloudStorage(BaseStorage):
|
||||||
raise IOError('No such key: \'{0}\''.format(path))
|
raise IOError('No such key: \'{0}\''.format(path))
|
||||||
return StreamReadKeyAsFile(key)
|
return StreamReadKeyAsFile(key)
|
||||||
|
|
||||||
def stream_write(self, path, fp, content_type=None):
|
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||||
# Minimum size of upload part size on S3 is 5MB
|
# Minimum size of upload part size on S3 is 5MB
|
||||||
self._initialize_cloud_conn()
|
self._initialize_cloud_conn()
|
||||||
buffer_size = 5 * 1024 * 1024
|
buffer_size = 5 * 1024 * 1024
|
||||||
|
@ -140,6 +140,9 @@ class _CloudStorage(BaseStorage):
|
||||||
if content_type is not None:
|
if content_type is not None:
|
||||||
metadata['Content-Type'] = content_type
|
metadata['Content-Type'] = content_type
|
||||||
|
|
||||||
|
if content_encoding is not None:
|
||||||
|
metadata['Content-Encoding'] = content_encoding
|
||||||
|
|
||||||
mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
|
mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
|
||||||
**self._upload_params)
|
**self._upload_params)
|
||||||
num_part = 1
|
num_part = 1
|
||||||
|
@ -202,6 +205,9 @@ class _CloudStorage(BaseStorage):
|
||||||
path = self._init_path(path)
|
path = self._init_path(path)
|
||||||
key = self._key_class(self._cloud_bucket, path)
|
key = self._key_class(self._cloud_bucket, path)
|
||||||
k = self._cloud_bucket.lookup(key)
|
k = self._cloud_bucket.lookup(key)
|
||||||
|
if k is None:
|
||||||
|
raise IOError('No such key: \'{0}\''.format(path))
|
||||||
|
|
||||||
return k.etag[1:-1][:7]
|
return k.etag[1:-1][:7]
|
||||||
|
|
||||||
|
|
||||||
|
@ -224,7 +230,7 @@ class GoogleCloudStorage(_CloudStorage):
|
||||||
connect_kwargs, upload_params, storage_path,
|
connect_kwargs, upload_params, storage_path,
|
||||||
access_key, secret_key, bucket_name)
|
access_key, secret_key, bucket_name)
|
||||||
|
|
||||||
def stream_write(self, path, fp, content_type=None):
|
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||||
# Minimum size of upload part size on S3 is 5MB
|
# Minimum size of upload part size on S3 is 5MB
|
||||||
self._initialize_cloud_conn()
|
self._initialize_cloud_conn()
|
||||||
path = self._init_path(path)
|
path = self._init_path(path)
|
||||||
|
@ -233,6 +239,9 @@ class GoogleCloudStorage(_CloudStorage):
|
||||||
if content_type is not None:
|
if content_type is not None:
|
||||||
key.set_metadata('Content-Type', content_type)
|
key.set_metadata('Content-Type', content_type)
|
||||||
|
|
||||||
|
if content_encoding is not None:
|
||||||
|
key.set_metadata('Content-Encoding', content_encoding)
|
||||||
|
|
||||||
key.set_contents_from_stream(fp)
|
key.set_contents_from_stream(fp)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ class FakeStorage(BaseStorage):
|
||||||
def stream_read(self, path):
|
def stream_read(self, path):
|
||||||
yield ''
|
yield ''
|
||||||
|
|
||||||
def stream_write(self, path, fp, content_type=None):
|
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def remove(self, path):
|
def remove(self, path):
|
||||||
|
|
|
@ -43,7 +43,7 @@ class LocalStorage(BaseStorage):
|
||||||
path = self._init_path(path)
|
path = self._init_path(path)
|
||||||
return io.open(path, mode='rb')
|
return io.open(path, mode='rb')
|
||||||
|
|
||||||
def stream_write(self, path, fp, content_type=None):
|
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||||
# Size is mandatory
|
# Size is mandatory
|
||||||
path = self._init_path(path, create=True)
|
path = self._init_path(path, create=True)
|
||||||
with open(path, mode='wb') as f:
|
with open(path, mode='wb') as f:
|
||||||
|
|
Binary file not shown.
|
@ -24,7 +24,7 @@ from endpoints.api.repoemail import RepositoryAuthorizedEmail
|
||||||
from endpoints.api.repositorynotification import RepositoryNotification, RepositoryNotificationList
|
from endpoints.api.repositorynotification import RepositoryNotification, RepositoryNotificationList
|
||||||
from endpoints.api.user import (PrivateRepositories, ConvertToOrganization, Recovery, Signout,
|
from endpoints.api.user import (PrivateRepositories, ConvertToOrganization, Recovery, Signout,
|
||||||
Signin, User, UserAuthorizationList, UserAuthorization, UserNotification,
|
Signin, User, UserAuthorizationList, UserAuthorization, UserNotification,
|
||||||
VerifyUser)
|
VerifyUser, DetachExternal)
|
||||||
from endpoints.api.repotoken import RepositoryToken, RepositoryTokenList
|
from endpoints.api.repotoken import RepositoryToken, RepositoryTokenList
|
||||||
from endpoints.api.prototype import PermissionPrototype, PermissionPrototypeList
|
from endpoints.api.prototype import PermissionPrototype, PermissionPrototypeList
|
||||||
from endpoints.api.logs import UserLogs, OrgLogs, RepositoryLogs
|
from endpoints.api.logs import UserLogs, OrgLogs, RepositoryLogs
|
||||||
|
@ -435,6 +435,24 @@ class TestSignin(ApiTestCase):
|
||||||
self._run_test('POST', 403, 'devtable', {u'username': 'E9RY', u'password': 'LQ0N'})
|
self._run_test('POST', 403, 'devtable', {u'username': 'E9RY', u'password': 'LQ0N'})
|
||||||
|
|
||||||
|
|
||||||
|
class TestDetachExternal(ApiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
ApiTestCase.setUp(self)
|
||||||
|
self._set_url(DetachExternal, servicename='someservice')
|
||||||
|
|
||||||
|
def test_post_anonymous(self):
|
||||||
|
self._run_test('POST', 401, None, {})
|
||||||
|
|
||||||
|
def test_post_freshuser(self):
|
||||||
|
self._run_test('POST', 200, 'freshuser', {})
|
||||||
|
|
||||||
|
def test_post_reader(self):
|
||||||
|
self._run_test('POST', 200, 'reader', {})
|
||||||
|
|
||||||
|
def test_post_devtable(self):
|
||||||
|
self._run_test('POST', 200, 'devtable', {})
|
||||||
|
|
||||||
|
|
||||||
class TestVerifyUser(ApiTestCase):
|
class TestVerifyUser(ApiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
ApiTestCase.setUp(self)
|
ApiTestCase.setUp(self)
|
||||||
|
|
|
@ -198,3 +198,11 @@ class TestBuildLogs(RedisBuildLogs):
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return super(TestBuildLogs, self).get_status(build_id)
|
return super(TestBuildLogs, self).get_status(build_id)
|
||||||
|
|
||||||
|
def expire_log_entries(self, build_id):
|
||||||
|
if build_id == self.test_build_id:
|
||||||
|
return
|
||||||
|
if not self.allow_delegate:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return super(TestBuildLogs, self).expire_log_entries(build_id)
|
||||||
|
|
23
tools/reparsedockerfile.py
Normal file
23
tools/reparsedockerfile.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
from util.dockerfileparse import parse_dockerfile, ParsedDockerfile, serialize_dockerfile
|
||||||
|
|
||||||
|
with open('Dockerfile.test', 'r') as dockerfileobj:
|
||||||
|
parsed_dockerfile = parse_dockerfile(dockerfileobj.read())
|
||||||
|
|
||||||
|
quay_reponame = 'something'
|
||||||
|
env_command = {
|
||||||
|
'command': 'ENV',
|
||||||
|
'parameters': 'QUAY_REPOSITORY %s' % quay_reponame
|
||||||
|
}
|
||||||
|
|
||||||
|
for index, command in reversed(list(enumerate(parsed_dockerfile.commands))):
|
||||||
|
if command['command'] == 'FROM':
|
||||||
|
new_command_index = index + 1
|
||||||
|
parsed_dockerfile.commands.insert(new_command_index, env_command)
|
||||||
|
break
|
||||||
|
|
||||||
|
image_and_tag_tuple = parsed_dockerfile.get_image_and_tag()
|
||||||
|
print image_and_tag_tuple
|
||||||
|
if image_and_tag_tuple is None or image_and_tag_tuple[0] is None:
|
||||||
|
raise Exception('Missing FROM command in Dockerfile')
|
||||||
|
|
||||||
|
print serialize_dockerfile(parsed_dockerfile)
|
|
@ -1,6 +1,6 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
LINE_CONTINUATION_REGEX = re.compile('\s*\\\s*\n')
|
LINE_CONTINUATION_REGEX = re.compile(r'(\s)*\\(\s)*\n')
|
||||||
COMMAND_REGEX = re.compile('([A-Za-z]+)\s(.*)')
|
COMMAND_REGEX = re.compile('([A-Za-z]+)\s(.*)')
|
||||||
|
|
||||||
COMMENT_CHARACTER = '#'
|
COMMENT_CHARACTER = '#'
|
||||||
|
|
267
util/streamingjsonencoder.py
Normal file
267
util/streamingjsonencoder.py
Normal file
|
@ -0,0 +1,267 @@
|
||||||
|
# Adapted from https://gist.github.com/akaihola/1415730#file-streamingjson-py
|
||||||
|
|
||||||
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
# All rights reserved.
|
||||||
|
|
||||||
|
# Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
# are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
# notice, this list of conditions and the following disclaimer in the
|
||||||
|
# documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
# 3. Neither the name of Django nor the names of its contributors may be used
|
||||||
|
# to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import json
|
||||||
|
from json.encoder import encode_basestring, encode_basestring_ascii, FLOAT_REPR, INFINITY
|
||||||
|
from types import GeneratorType
|
||||||
|
|
||||||
|
|
||||||
|
class StreamingJSONEncoder(json.JSONEncoder):
|
||||||
|
def iterencode(self, o, _one_shot=False):
|
||||||
|
"""Encode the given object and yield each string
|
||||||
|
representation as available.
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
|
for chunk in StreamingJSONEncoder().iterencode(bigobject):
|
||||||
|
mysocket.write(chunk)
|
||||||
|
|
||||||
|
This method is a verbatim copy of
|
||||||
|
:meth:`json.JSONEncoder.iterencode`. It is
|
||||||
|
needed because we need to call our patched
|
||||||
|
:func:`streamingjsonencoder._make_iterencode`.
|
||||||
|
"""
|
||||||
|
if self.check_circular:
|
||||||
|
markers = {}
|
||||||
|
else:
|
||||||
|
markers = None
|
||||||
|
if self.ensure_ascii:
|
||||||
|
_encoder = encode_basestring_ascii
|
||||||
|
else:
|
||||||
|
_encoder = encode_basestring
|
||||||
|
if self.encoding != 'utf-8':
|
||||||
|
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
||||||
|
if isinstance(o, str):
|
||||||
|
o = o.decode(_encoding)
|
||||||
|
return _orig_encoder(o)
|
||||||
|
|
||||||
|
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
|
||||||
|
# Check for specials. Note that this type of test is processor- and/or
|
||||||
|
# platform-specific, so do tests which don't depend on the internals.
|
||||||
|
|
||||||
|
if o != o:
|
||||||
|
text = 'NaN'
|
||||||
|
elif o == _inf:
|
||||||
|
text = 'Infinity'
|
||||||
|
elif o == _neginf:
|
||||||
|
text = '-Infinity'
|
||||||
|
else:
|
||||||
|
return _repr(o)
|
||||||
|
|
||||||
|
if not allow_nan:
|
||||||
|
raise ValueError("Out of range float values are not JSON compliant: %r"
|
||||||
|
% (o,))
|
||||||
|
|
||||||
|
return text
|
||||||
|
|
||||||
|
_iterencode = _make_iterencode(
|
||||||
|
markers, self.default, _encoder, self.indent, floatstr,
|
||||||
|
self.key_separator, self.item_separator, self.sort_keys,
|
||||||
|
self.skipkeys, _one_shot)
|
||||||
|
return _iterencode(o, 0)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator,
|
||||||
|
_item_separator, _sort_keys, _skipkeys, _one_shot, False=False, True=True,
|
||||||
|
ValueError=ValueError, basestring=basestring, dict=dict, float=float,
|
||||||
|
GeneratorType=GeneratorType, id=id, int=int, isinstance=isinstance, list=list,
|
||||||
|
long=long, str=str, tuple=tuple):
|
||||||
|
"""
|
||||||
|
This is a patched version of
|
||||||
|
:func:`django.utils.simplejson.encoder.iterencode`. Whenever it encounters
|
||||||
|
a generator in the data structure, it encodes it as a JSON list.
|
||||||
|
"""
|
||||||
|
def _iterencode_list(lst, _current_indent_level):
|
||||||
|
if not lst:
|
||||||
|
# note: empty generators aren't caught here, see below
|
||||||
|
yield '[]'
|
||||||
|
return
|
||||||
|
if markers is not None:
|
||||||
|
markerid = id(lst)
|
||||||
|
if markerid in markers:
|
||||||
|
raise ValueError("Circular reference detected")
|
||||||
|
markers[markerid] = lst
|
||||||
|
buf = '['
|
||||||
|
if _indent is not None:
|
||||||
|
_current_indent_level += 1
|
||||||
|
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
||||||
|
separator = _item_separator + newline_indent
|
||||||
|
buf += newline_indent
|
||||||
|
else:
|
||||||
|
newline_indent = None
|
||||||
|
separator = _item_separator
|
||||||
|
first = True
|
||||||
|
for value in lst:
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
buf = separator
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
yield buf + _encoder(value)
|
||||||
|
elif value is None:
|
||||||
|
yield buf + 'null'
|
||||||
|
elif value is True:
|
||||||
|
yield buf + 'true'
|
||||||
|
elif value is False:
|
||||||
|
yield buf + 'false'
|
||||||
|
elif isinstance(value, (int, long)):
|
||||||
|
yield buf + str(value)
|
||||||
|
elif isinstance(value, float):
|
||||||
|
yield buf + _floatstr(value)
|
||||||
|
else:
|
||||||
|
yield buf
|
||||||
|
if isinstance(value, (list, tuple, GeneratorType)):
|
||||||
|
chunks = _iterencode_list(value, _current_indent_level)
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
chunks = _iterencode_dict(value, _current_indent_level)
|
||||||
|
else:
|
||||||
|
chunks = _iterencode(value, _current_indent_level)
|
||||||
|
for chunk in chunks:
|
||||||
|
yield chunk
|
||||||
|
if first:
|
||||||
|
# we had an empty generator
|
||||||
|
yield buf
|
||||||
|
if newline_indent is not None:
|
||||||
|
_current_indent_level -= 1
|
||||||
|
yield '\n' + (' ' * (_indent * _current_indent_level))
|
||||||
|
yield ']'
|
||||||
|
if markers is not None:
|
||||||
|
del markers[markerid]
|
||||||
|
|
||||||
|
def _iterencode_dict(dct, _current_indent_level):
|
||||||
|
if not dct:
|
||||||
|
yield '{}'
|
||||||
|
return
|
||||||
|
if markers is not None:
|
||||||
|
markerid = id(dct)
|
||||||
|
if markerid in markers:
|
||||||
|
raise ValueError("Circular reference detected")
|
||||||
|
markers[markerid] = dct
|
||||||
|
yield '{'
|
||||||
|
if _indent is not None:
|
||||||
|
_current_indent_level += 1
|
||||||
|
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
||||||
|
item_separator = _item_separator + newline_indent
|
||||||
|
yield newline_indent
|
||||||
|
else:
|
||||||
|
newline_indent = None
|
||||||
|
item_separator = _item_separator
|
||||||
|
first = True
|
||||||
|
if _sort_keys:
|
||||||
|
items = dct.items()
|
||||||
|
items.sort(key=lambda kv: kv[0])
|
||||||
|
else:
|
||||||
|
items = dct.iteritems()
|
||||||
|
for key, value in items:
|
||||||
|
if isinstance(key, basestring):
|
||||||
|
pass
|
||||||
|
# JavaScript is weakly typed for these, so it makes sense to
|
||||||
|
# also allow them. Many encoders seem to do something like this.
|
||||||
|
elif isinstance(key, float):
|
||||||
|
key = _floatstr(key)
|
||||||
|
elif isinstance(key, (int, long)):
|
||||||
|
key = str(key)
|
||||||
|
elif key is True:
|
||||||
|
key = 'true'
|
||||||
|
elif key is False:
|
||||||
|
key = 'false'
|
||||||
|
elif key is None:
|
||||||
|
key = 'null'
|
||||||
|
elif _skipkeys:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
raise TypeError("key %r is not a string" % (key,))
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
yield item_separator
|
||||||
|
yield _encoder(key)
|
||||||
|
yield _key_separator
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
yield _encoder(value)
|
||||||
|
elif value is None:
|
||||||
|
yield 'null'
|
||||||
|
elif value is True:
|
||||||
|
yield 'true'
|
||||||
|
elif value is False:
|
||||||
|
yield 'false'
|
||||||
|
elif isinstance(value, (int, long)):
|
||||||
|
yield str(value)
|
||||||
|
elif isinstance(value, float):
|
||||||
|
yield _floatstr(value)
|
||||||
|
else:
|
||||||
|
if isinstance(value, collections.Mapping):
|
||||||
|
chunks = _iterencode_dict(value, _current_indent_level)
|
||||||
|
elif isinstance(value, collections.Iterable):
|
||||||
|
chunks = _iterencode_list(value, _current_indent_level)
|
||||||
|
else:
|
||||||
|
chunks = _iterencode(value, _current_indent_level)
|
||||||
|
for chunk in chunks:
|
||||||
|
yield chunk
|
||||||
|
if newline_indent is not None:
|
||||||
|
_current_indent_level -= 1
|
||||||
|
yield '\n' + (' ' * (_indent * _current_indent_level))
|
||||||
|
yield '}'
|
||||||
|
if markers is not None:
|
||||||
|
del markers[markerid]
|
||||||
|
|
||||||
|
def _iterencode(o, _current_indent_level):
|
||||||
|
if isinstance(o, basestring):
|
||||||
|
yield _encoder(o)
|
||||||
|
elif o is None:
|
||||||
|
yield 'null'
|
||||||
|
elif o is True:
|
||||||
|
yield 'true'
|
||||||
|
elif o is False:
|
||||||
|
yield 'false'
|
||||||
|
elif isinstance(o, (int, long)):
|
||||||
|
yield str(o)
|
||||||
|
elif isinstance(o, float):
|
||||||
|
yield _floatstr(o)
|
||||||
|
elif isinstance(o, collections.Mapping):
|
||||||
|
for chunk in _iterencode_dict(o, _current_indent_level):
|
||||||
|
yield chunk
|
||||||
|
elif isinstance(o, collections.Iterable):
|
||||||
|
for chunk in _iterencode_list(o, _current_indent_level):
|
||||||
|
yield chunk
|
||||||
|
else:
|
||||||
|
if markers is not None:
|
||||||
|
markerid = id(o)
|
||||||
|
if markerid in markers:
|
||||||
|
raise ValueError("Circular reference detected")
|
||||||
|
markers[markerid] = o
|
||||||
|
o = _default(o)
|
||||||
|
for chunk in _iterencode(o, _current_indent_level):
|
||||||
|
yield chunk
|
||||||
|
if markers is not None:
|
||||||
|
del markers[markerid]
|
||||||
|
|
||||||
|
return _iterencode
|
55
workers/buildlogsarchiver.py
Normal file
55
workers/buildlogsarchiver.py
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from apscheduler.schedulers.blocking import BlockingScheduler
|
||||||
|
from peewee import fn
|
||||||
|
from tempfile import SpooledTemporaryFile
|
||||||
|
from gzip import GzipFile
|
||||||
|
|
||||||
|
from data import model
|
||||||
|
from data.archivedlogs import JSON_MIMETYPE
|
||||||
|
from data.database import RepositoryBuild, db_random_func
|
||||||
|
from app import build_logs, log_archive
|
||||||
|
from util.streamingjsonencoder import StreamingJSONEncoder
|
||||||
|
|
||||||
|
POLL_PERIOD_SECONDS = 30
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
sched = BlockingScheduler()
|
||||||
|
|
||||||
|
@sched.scheduled_job(trigger='interval', seconds=30)
|
||||||
|
def archive_redis_buildlogs():
|
||||||
|
""" Archive a single build, choosing a candidate at random. This process must be idempotent to
|
||||||
|
avoid needing two-phase commit. """
|
||||||
|
try:
|
||||||
|
# Get a random build to archive
|
||||||
|
to_archive = model.archivable_buildlogs_query().order_by(db_random_func()).get()
|
||||||
|
logger.debug('Archiving: %s', to_archive.uuid)
|
||||||
|
|
||||||
|
length, entries = build_logs.get_log_entries(to_archive.uuid, 0)
|
||||||
|
to_encode = {
|
||||||
|
'start': 0,
|
||||||
|
'total': length,
|
||||||
|
'logs': entries,
|
||||||
|
}
|
||||||
|
|
||||||
|
with SpooledTemporaryFile() as tempfile:
|
||||||
|
with GzipFile('testarchive', fileobj=tempfile) as zipstream:
|
||||||
|
for chunk in StreamingJSONEncoder().iterencode(to_encode):
|
||||||
|
zipstream.write(chunk)
|
||||||
|
|
||||||
|
tempfile.seek(0)
|
||||||
|
log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding='gzip',
|
||||||
|
file_id=to_archive.uuid)
|
||||||
|
|
||||||
|
to_archive.logs_archived = True
|
||||||
|
to_archive.save()
|
||||||
|
|
||||||
|
build_logs.expire_log_entries(to_archive.uuid)
|
||||||
|
|
||||||
|
except RepositoryBuild.DoesNotExist:
|
||||||
|
logger.debug('No more builds to archive')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
sched.start()
|
|
@ -1,6 +1,7 @@
|
||||||
import logging.config
|
import logging.config
|
||||||
|
|
||||||
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
|
if __name__ == "__main__":
|
||||||
|
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import argparse
|
import argparse
|
||||||
|
@ -23,6 +24,7 @@ from collections import defaultdict
|
||||||
from requests.exceptions import ConnectionError
|
from requests.exceptions import ConnectionError
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
|
from data.database import BUILD_PHASE
|
||||||
from workers.worker import Worker, WorkerUnhealthyException, JobException
|
from workers.worker import Worker, WorkerUnhealthyException, JobException
|
||||||
from app import userfiles as user_files, build_logs, sentry, dockerfile_build_queue
|
from app import userfiles as user_files, build_logs, sentry, dockerfile_build_queue
|
||||||
from endpoints.notificationhelper import spawn_notification
|
from endpoints.notificationhelper import spawn_notification
|
||||||
|
@ -557,7 +559,7 @@ class DockerfileBuildWorker(Worker):
|
||||||
|
|
||||||
if c_type not in self._mime_processors:
|
if c_type not in self._mime_processors:
|
||||||
log_appender('error', build_logs.PHASE)
|
log_appender('error', build_logs.PHASE)
|
||||||
repository_build.phase = 'error'
|
repository_build.phase = BUILD_PHASE.ERROR
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
message = 'Unknown mime-type: %s' % c_type
|
message = 'Unknown mime-type: %s' % c_type
|
||||||
log_appender(message, build_logs.ERROR)
|
log_appender(message, build_logs.ERROR)
|
||||||
|
@ -566,7 +568,7 @@ class DockerfileBuildWorker(Worker):
|
||||||
|
|
||||||
# Try to build the build directory package from the buildpack.
|
# Try to build the build directory package from the buildpack.
|
||||||
log_appender('unpacking', build_logs.PHASE)
|
log_appender('unpacking', build_logs.PHASE)
|
||||||
repository_build.phase = 'unpacking'
|
repository_build.phase = BUILD_PHASE.UNPACKING
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
|
|
||||||
build_dir = None
|
build_dir = None
|
||||||
|
@ -584,20 +586,20 @@ class DockerfileBuildWorker(Worker):
|
||||||
repository_build.uuid, self._cache_size_gb,
|
repository_build.uuid, self._cache_size_gb,
|
||||||
pull_credentials) as build_ctxt:
|
pull_credentials) as build_ctxt:
|
||||||
log_appender('pulling', build_logs.PHASE)
|
log_appender('pulling', build_logs.PHASE)
|
||||||
repository_build.phase = 'pulling'
|
repository_build.phase = BUILD_PHASE.PULLING
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
build_ctxt.pull()
|
build_ctxt.pull()
|
||||||
|
|
||||||
self.extend_processing(RESERVATION_TIME)
|
self.extend_processing(RESERVATION_TIME)
|
||||||
|
|
||||||
log_appender('building', build_logs.PHASE)
|
log_appender('building', build_logs.PHASE)
|
||||||
repository_build.phase = 'building'
|
repository_build.phase = BUILD_PHASE.BUILDING
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
built_image = build_ctxt.build(self.extend_processing)
|
built_image = build_ctxt.build(self.extend_processing)
|
||||||
|
|
||||||
if not built_image:
|
if not built_image:
|
||||||
log_appender('error', build_logs.PHASE)
|
log_appender('error', build_logs.PHASE)
|
||||||
repository_build.phase = 'error'
|
repository_build.phase = BUILD_PHASE.ERROR
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
|
|
||||||
message = 'Unable to build dockerfile.'
|
message = 'Unable to build dockerfile.'
|
||||||
|
@ -610,13 +612,13 @@ class DockerfileBuildWorker(Worker):
|
||||||
self.extend_processing(RESERVATION_TIME)
|
self.extend_processing(RESERVATION_TIME)
|
||||||
|
|
||||||
log_appender('pushing', build_logs.PHASE)
|
log_appender('pushing', build_logs.PHASE)
|
||||||
repository_build.phase = 'pushing'
|
repository_build.phase = BUILD_PHASE.PUSHING
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
|
|
||||||
build_ctxt.push(built_image)
|
build_ctxt.push(built_image)
|
||||||
|
|
||||||
log_appender('complete', build_logs.PHASE)
|
log_appender('complete', build_logs.PHASE)
|
||||||
repository_build.phase = 'complete'
|
repository_build.phase = BUILD_PHASE.COMPLETE
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
|
|
||||||
# Spawn a notification that the build has completed.
|
# Spawn a notification that the build has completed.
|
||||||
|
@ -653,20 +655,20 @@ class DockerfileBuildWorker(Worker):
|
||||||
sentry.client.captureException()
|
sentry.client.captureException()
|
||||||
log_appender('error', build_logs.PHASE)
|
log_appender('error', build_logs.PHASE)
|
||||||
logger.exception('Exception when processing request.')
|
logger.exception('Exception when processing request.')
|
||||||
repository_build.phase = 'error'
|
repository_build.phase = BUILD_PHASE.ERROR
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
log_appender(str(exc), build_logs.ERROR)
|
log_appender(str(exc), build_logs.ERROR)
|
||||||
|
|
||||||
# Raise the exception to the queue.
|
# Raise the exception to the queue.
|
||||||
raise JobException(str(exc))
|
raise JobException(str(exc))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
desc = 'Worker daemon to monitor dockerfile build'
|
||||||
|
parser = argparse.ArgumentParser(description=desc)
|
||||||
|
parser.add_argument('--cachegb', default=20, type=float,
|
||||||
|
help='Maximum cache size in gigabytes.')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
desc = 'Worker daemon to monitor dockerfile build'
|
worker = DockerfileBuildWorker(args.cachegb, dockerfile_build_queue,
|
||||||
parser = argparse.ArgumentParser(description=desc)
|
reservation_seconds=RESERVATION_TIME)
|
||||||
parser.add_argument('--cachegb', default=20, type=float,
|
worker.start(start_status_server_port=8000)
|
||||||
help='Maximum cache size in gigabytes.')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
worker = DockerfileBuildWorker(args.cachegb, dockerfile_build_queue,
|
|
||||||
reservation_seconds=RESERVATION_TIME)
|
|
||||||
worker.start(start_status_server_port=8000)
|
|
||||||
|
|
Reference in a new issue