Merge remote-tracking branch 'origin/master' into pullinprivate
Conflicts: workers/dockerfilebuild.py
This commit is contained in:
commit
d67a1cddc2
14 changed files with 96 additions and 32 deletions
Binary file not shown.
Binary file not shown.
|
@ -1,6 +1,8 @@
|
||||||
import redis
|
import redis
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
class BuildStatusRetrievalError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
class BuildLogs(object):
|
class BuildLogs(object):
|
||||||
ERROR = 'error'
|
ERROR = 'error'
|
||||||
|
@ -45,7 +47,7 @@ class BuildLogs(object):
|
||||||
log_entries = self._redis.lrange(self._logs_key(build_id), start_index, -1)
|
log_entries = self._redis.lrange(self._logs_key(build_id), start_index, -1)
|
||||||
return (llen, (json.loads(entry) for entry in log_entries))
|
return (llen, (json.loads(entry) for entry in log_entries))
|
||||||
except redis.ConnectionError:
|
except redis.ConnectionError:
|
||||||
return (0, [])
|
raise BuildStatusRetrievalError('Cannot retrieve build logs')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _status_key(build_id):
|
def _status_key(build_id):
|
||||||
|
@ -65,6 +67,6 @@ class BuildLogs(object):
|
||||||
try:
|
try:
|
||||||
fetched = self._redis.get(self._status_key(build_id))
|
fetched = self._redis.get(self._status_key(build_id))
|
||||||
except redis.ConnectionError:
|
except redis.ConnectionError:
|
||||||
return None
|
raise BuildStatusRetrievalError('Cannot retrieve build status')
|
||||||
|
|
||||||
return json.loads(fetched) if fetched else None
|
return json.loads(fetched) if fetched else None
|
||||||
|
|
|
@ -1029,7 +1029,7 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name,
|
||||||
except Image.DoesNotExist:
|
except Image.DoesNotExist:
|
||||||
raise DataModelException('No image with specified id and repository')
|
raise DataModelException('No image with specified id and repository')
|
||||||
|
|
||||||
fetched.storage.created = dateutil.parser.parse(created_date_str)
|
fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
|
||||||
fetched.storage.comment = comment
|
fetched.storage.comment = comment
|
||||||
fetched.storage.command = command
|
fetched.storage.command = command
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,7 @@ from endpoints.common import start_build
|
||||||
from endpoints.trigger import BuildTrigger
|
from endpoints.trigger import BuildTrigger
|
||||||
from data import model
|
from data import model
|
||||||
from auth.permissions import ModifyRepositoryPermission
|
from auth.permissions import ModifyRepositoryPermission
|
||||||
|
from data.buildlogs import BuildStatusRetrievalError
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -56,11 +57,17 @@ def trigger_view(trigger):
|
||||||
|
|
||||||
|
|
||||||
def build_status_view(build_obj, can_write=False):
|
def build_status_view(build_obj, can_write=False):
|
||||||
status = build_logs.get_status(build_obj.uuid)
|
phase = build_obj.phase
|
||||||
|
try:
|
||||||
|
status = build_logs.get_status(build_obj.uuid)
|
||||||
|
except BuildStatusRetrievalError:
|
||||||
|
status = {}
|
||||||
|
phase = 'cannot_load'
|
||||||
|
|
||||||
logger.debug('Can write: %s job_config: %s', can_write, build_obj.job_config)
|
logger.debug('Can write: %s job_config: %s', can_write, build_obj.job_config)
|
||||||
resp = {
|
resp = {
|
||||||
'id': build_obj.uuid,
|
'id': build_obj.uuid,
|
||||||
'phase': build_obj.phase if status else 'cannot_load',
|
'phase': phase,
|
||||||
'started': format_date(build_obj.started),
|
'started': format_date(build_obj.started),
|
||||||
'display_name': build_obj.display_name,
|
'display_name': build_obj.display_name,
|
||||||
'status': status or {},
|
'status': status or {},
|
||||||
|
@ -202,7 +209,10 @@ class RepositoryBuildLogs(RepositoryParamResource):
|
||||||
|
|
||||||
start = int(request.args.get('start', 0))
|
start = int(request.args.get('start', 0))
|
||||||
|
|
||||||
count, logs = build_logs.get_log_entries(build.uuid, start)
|
try:
|
||||||
|
count, logs = build_logs.get_log_entries(build.uuid, start)
|
||||||
|
except BuildStatusRetrievalError:
|
||||||
|
count, logs = (0, [])
|
||||||
|
|
||||||
response_obj.update({
|
response_obj.update({
|
||||||
'start': start,
|
'start': start,
|
||||||
|
|
|
@ -13,7 +13,8 @@ from endpoints.api.build import (build_status_view, trigger_view, RepositoryBuil
|
||||||
get_trigger_config)
|
get_trigger_config)
|
||||||
from endpoints.common import start_build
|
from endpoints.common import start_build
|
||||||
from endpoints.trigger import (BuildTrigger as BuildTriggerBase, TriggerDeactivationException,
|
from endpoints.trigger import (BuildTrigger as BuildTriggerBase, TriggerDeactivationException,
|
||||||
TriggerActivationException, EmptyRepositoryException)
|
TriggerActivationException, EmptyRepositoryException,
|
||||||
|
RepositoryReadException)
|
||||||
from data import model
|
from data import model
|
||||||
from auth.permissions import UserAdminPermission, AdministerOrganizationPermission
|
from auth.permissions import UserAdminPermission, AdministerOrganizationPermission
|
||||||
from util.names import parse_robot_username
|
from util.names import parse_robot_username
|
||||||
|
@ -117,9 +118,14 @@ class BuildTriggerSubdirs(RepositoryParamResource):
|
||||||
'status': 'success'
|
'status': 'success'
|
||||||
}
|
}
|
||||||
except EmptyRepositoryException as exc:
|
except EmptyRepositoryException as exc:
|
||||||
|
return {
|
||||||
|
'status': 'success',
|
||||||
|
'subdir': []
|
||||||
|
}
|
||||||
|
except RepositoryReadException as exc:
|
||||||
return {
|
return {
|
||||||
'status': 'error',
|
'status': 'error',
|
||||||
'message': exc.msg
|
'message': exc.message
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
raise Unauthorized()
|
raise Unauthorized()
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
import logging
|
import logging
|
||||||
import urlparse
|
import urlparse
|
||||||
import json
|
import json
|
||||||
|
import string
|
||||||
|
|
||||||
from flask import make_response, render_template, request
|
from flask import make_response, render_template, request
|
||||||
from flask.ext.login import login_user, UserMixin
|
from flask.ext.login import login_user, UserMixin
|
||||||
from flask.ext.principal import identity_changed
|
from flask.ext.principal import identity_changed
|
||||||
|
from random import SystemRandom
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from data.queue import dockerfile_build_queue
|
from data.queue import dockerfile_build_queue
|
||||||
|
@ -83,8 +85,13 @@ def handle_dme(ex):
|
||||||
return make_response(json.dumps({'message': ex.message}), 400)
|
return make_response(json.dumps({'message': ex.message}), 400)
|
||||||
|
|
||||||
|
|
||||||
|
def random_string():
|
||||||
|
random = SystemRandom()
|
||||||
|
return ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(8)])
|
||||||
|
|
||||||
def render_page_template(name, **kwargs):
|
def render_page_template(name, **kwargs):
|
||||||
resp = make_response(render_template(name, route_data=json.dumps(get_route_data()), **kwargs))
|
resp = make_response(render_template(name, route_data=json.dumps(get_route_data()),
|
||||||
|
cache_buster=random_string(), **kwargs))
|
||||||
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
|
@ -38,6 +38,9 @@ class ValidationRequestException(Exception):
|
||||||
class EmptyRepositoryException(Exception):
|
class EmptyRepositoryException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class RepositoryReadException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class BuildTrigger(object):
|
class BuildTrigger(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -209,9 +212,12 @@ class GithubBuildTrigger(BuildTrigger):
|
||||||
return [os.path.dirname(elem.path) for elem in commit_tree.tree
|
return [os.path.dirname(elem.path) for elem in commit_tree.tree
|
||||||
if (elem.type == u'blob' and
|
if (elem.type == u'blob' and
|
||||||
os.path.basename(elem.path) == u'Dockerfile')]
|
os.path.basename(elem.path) == u'Dockerfile')]
|
||||||
except GithubException:
|
except GithubException as ge:
|
||||||
msg = 'Unable to list contents of repository: %s' % source
|
message = ge.data.get('message', 'Unable to list contents of repository: %s' % source)
|
||||||
raise EmptyRepositoryException(msg)
|
if message == 'Branch not found':
|
||||||
|
raise EmptyRepositoryException()
|
||||||
|
|
||||||
|
raise RepositoryReadException(message)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _prepare_build(config, repo, commit_sha, build_name, ref):
|
def _prepare_build(config, repo, commit_sha, build_name, ref):
|
||||||
|
|
|
@ -21,7 +21,7 @@ xhtml2pdf
|
||||||
logstash_formatter
|
logstash_formatter
|
||||||
redis
|
redis
|
||||||
hiredis
|
hiredis
|
||||||
git+https://github.com/dotcloud/docker-py.git
|
docker-py
|
||||||
loremipsum
|
loremipsum
|
||||||
pygithub
|
pygithub
|
||||||
flask-restful
|
flask-restful
|
||||||
|
|
|
@ -16,7 +16,7 @@ beautifulsoup4==4.3.2
|
||||||
blinker==1.3
|
blinker==1.3
|
||||||
boto==2.27.0
|
boto==2.27.0
|
||||||
distribute==0.6.34
|
distribute==0.6.34
|
||||||
git+https://github.com/dotcloud/docker-py.git
|
docker-py==0.3.0
|
||||||
ecdsa==0.11
|
ecdsa==0.11
|
||||||
gevent==1.0
|
gevent==1.0
|
||||||
greenlet==0.4.2
|
greenlet==0.4.2
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css">
|
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css">
|
||||||
<link href='//fonts.googleapis.com/css?family=Droid+Sans:400,700' rel='stylesheet' type='text/css'>
|
<link href='//fonts.googleapis.com/css?family=Droid+Sans:400,700' rel='stylesheet' type='text/css'>
|
||||||
|
|
||||||
<link rel="stylesheet" href="/static/css/quay.css">
|
<link rel="stylesheet" href="/static/css/quay.css?v={{ cache_buster }}">
|
||||||
<link rel="stylesheet" href="/static/lib/angular-motion.min.css">
|
<link rel="stylesheet" href="/static/lib/angular-motion.min.css">
|
||||||
<link rel="stylesheet" href="/static/lib/bootstrap-additions.min.css">
|
<link rel="stylesheet" href="/static/lib/bootstrap-additions.min.css">
|
||||||
|
|
||||||
|
@ -76,10 +76,10 @@
|
||||||
window.__token = '{{ csrf_token() }}';
|
window.__token = '{{ csrf_token() }}';
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<script src="/static/js/tour.js"></script>
|
<script src="/static/js/tour.js?v={{ cache_buster }}"></script>
|
||||||
<script src="/static/js/app.js"></script>
|
<script src="/static/js/app.js?v={{ cache_buster }}"></script>
|
||||||
<script src="/static/js/controllers.js"></script>
|
<script src="/static/js/controllers.js?v={{ cache_buster }}"></script>
|
||||||
<script src="/static/js/graphing.js"></script>
|
<script src="/static/js/graphing.js?v={{ cache_buster }}"></script>
|
||||||
|
|
||||||
<!-- start Mixpanel --><script type="text/javascript">
|
<!-- start Mixpanel --><script type="text/javascript">
|
||||||
var isProd = document.location.hostname === 'quay.io';
|
var isProd = document.location.hostname === 'quay.io';
|
||||||
|
|
|
@ -29,6 +29,6 @@ def no_cache(f):
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def add_no_cache(*args, **kwargs):
|
def add_no_cache(*args, **kwargs):
|
||||||
response = f(*args, **kwargs)
|
response = f(*args, **kwargs)
|
||||||
response.headers['Cache-Control'] = 'no-cache'
|
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||||
return response
|
return response
|
||||||
return add_no_cache
|
return add_no_cache
|
||||||
|
|
|
@ -15,7 +15,7 @@ sudo gdebi --n binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.
|
||||||
sudo gdebi --n binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb
|
sudo gdebi --n binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb
|
||||||
sudo gdebi --n binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb
|
sudo gdebi --n binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb
|
||||||
sudo gdebi --n binary_dependencies/builder/nsexec_1.22ubuntu1trusty1_amd64.deb
|
sudo gdebi --n binary_dependencies/builder/nsexec_1.22ubuntu1trusty1_amd64.deb
|
||||||
sudo gdebi --n binary_dependencies/builder/lxc-docker-0.8.0-tutum_0.8.0-tutum-20140212002736-afad5c0-dirty_amd64.deb
|
sudo gdebi --n binary_dependencies/builder/lxc-docker-0.9.0-tutum2_0.9.0-tutum2-20140327210604-4c49268-dirty_amd64.deb
|
||||||
sudo chown -R 100000:100000 /var/lib/docker
|
sudo chown -R 100000:100000 /var/lib/docker
|
||||||
sudo shutdown -r now
|
sudo shutdown -r now
|
||||||
```
|
```
|
||||||
|
|
|
@ -49,9 +49,43 @@ class StatusWrapper(object):
|
||||||
build_logs.set_status(self._uuid, self._status)
|
build_logs.set_status(self._uuid, self._status)
|
||||||
|
|
||||||
|
|
||||||
def unwrap_stream(json_stream):
|
class _IncompleteJsonError(Exception):
|
||||||
for json_entry in json_stream:
|
def __init__(self, start_from):
|
||||||
yield json.loads(json_entry).values()[0]
|
self.start_from = start_from
|
||||||
|
|
||||||
|
|
||||||
|
class _StreamingJSONDecoder(json.JSONDecoder):
|
||||||
|
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
||||||
|
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
|
||||||
|
|
||||||
|
def decode(self, s, _w=WHITESPACE.match):
|
||||||
|
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
|
||||||
|
instance containing a JSON document)
|
||||||
|
|
||||||
|
"""
|
||||||
|
start_from = 0
|
||||||
|
while start_from < len(s):
|
||||||
|
try:
|
||||||
|
obj, end = self.raw_decode(s[start_from:], idx=_w(s[start_from:], 0).end())
|
||||||
|
except ValueError:
|
||||||
|
raise _IncompleteJsonError(start_from)
|
||||||
|
end = _w(s[start_from:], end).end()
|
||||||
|
start_from += end
|
||||||
|
yield obj
|
||||||
|
|
||||||
|
|
||||||
|
class StreamingDockerClient(Client):
|
||||||
|
def _stream_helper(self, response):
|
||||||
|
"""Generator for data coming from a chunked-encoded HTTP response."""
|
||||||
|
content_buf = ''
|
||||||
|
for content in response.iter_content(chunk_size=256):
|
||||||
|
content_buf += content
|
||||||
|
try:
|
||||||
|
for val in json.loads(content_buf, cls=_StreamingJSONDecoder):
|
||||||
|
yield val
|
||||||
|
content_buf = ''
|
||||||
|
except _IncompleteJsonError as exc:
|
||||||
|
content_buf = content_buf[exc.start_from:]
|
||||||
|
|
||||||
|
|
||||||
class DockerfileBuildContext(object):
|
class DockerfileBuildContext(object):
|
||||||
|
@ -69,11 +103,11 @@ class DockerfileBuildContext(object):
|
||||||
# Note: We have two different clients here because we (potentially) login
|
# Note: We have two different clients here because we (potentially) login
|
||||||
# with both, but with different credentials that we do not want shared between
|
# with both, but with different credentials that we do not want shared between
|
||||||
# the build and push operations.
|
# the build and push operations.
|
||||||
self._push_cl = Client(timeout=1200)
|
self._push_cl = StreamingDockerClient(timeout=1200)
|
||||||
self._build_cl = Client(timeout=1200)
|
self._build_cl = StreamingDockerClient(timeout=1200)
|
||||||
|
|
||||||
dockerfile_path = os.path.join(self._build_dir, dockerfile_subdir,
|
dockerfile_path = os.path.join(self._build_dir, dockerfile_subdir,
|
||||||
"Dockerfile")
|
'Dockerfile')
|
||||||
self._num_steps = DockerfileBuildContext.__count_steps(dockerfile_path)
|
self._num_steps = DockerfileBuildContext.__count_steps(dockerfile_path)
|
||||||
|
|
||||||
logger.debug('Will build and push to repo %s with tags named: %s' %
|
logger.debug('Will build and push to repo %s with tags named: %s' %
|
||||||
|
@ -108,7 +142,7 @@ class DockerfileBuildContext(object):
|
||||||
# Login with the specified credentials (if any).
|
# Login with the specified credentials (if any).
|
||||||
if self._pull_credentials:
|
if self._pull_credentials:
|
||||||
logger.debug('Logging in with pull credentials.')
|
logger.debug('Logging in with pull credentials.')
|
||||||
self.build_cl_.login(self._pull_credentials['username'], self._pull_credentials['password'],
|
self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'],
|
||||||
registry=self._pull_credentials['registry'], reauth=True)
|
registry=self._pull_credentials['registry'], reauth=True)
|
||||||
|
|
||||||
# Start the build itself.
|
# Start the build itself.
|
||||||
|
@ -127,7 +161,7 @@ class DockerfileBuildContext(object):
|
||||||
|
|
||||||
current_step = 0
|
current_step = 0
|
||||||
built_image = None
|
built_image = None
|
||||||
for status in unwrap_stream(build_status):
|
for status in build_status:
|
||||||
fully_unwrapped = ""
|
fully_unwrapped = ""
|
||||||
if isinstance(status, dict):
|
if isinstance(status, dict):
|
||||||
if len(status) > 0:
|
if len(status) > 0:
|
||||||
|
@ -189,9 +223,8 @@ class DockerfileBuildContext(object):
|
||||||
logger.debug('Pushing to repo %s' % self._repo)
|
logger.debug('Pushing to repo %s' % self._repo)
|
||||||
resp = self._push_cl.push(self._repo, stream=True)
|
resp = self._push_cl.push(self._repo, stream=True)
|
||||||
|
|
||||||
for status_str in resp:
|
for status in resp:
|
||||||
status = json.loads(status_str)
|
logger.debug('Status: %s', status)
|
||||||
logger.debug('Status: %s', status_str)
|
|
||||||
if u'status' in status:
|
if u'status' in status:
|
||||||
status_msg = status[u'status']
|
status_msg = status[u'status']
|
||||||
|
|
||||||
|
|
Reference in a new issue