Merge branch 'master' of bitbucket.org:yackob03/quay

This commit is contained in:
Jake Moshenko 2014-10-15 16:30:15 -04:00
commit 23bf74fd34
9 changed files with 152 additions and 61 deletions

View file

@ -9,6 +9,7 @@ from github import Github, UnknownObjectException, GithubException
from tempfile import SpooledTemporaryFile
from app import app, userfiles as user_files
from util.tarfileappender import TarfileAppender
client = app.config['HTTPCLIENT']
@ -324,10 +325,17 @@ class GithubBuildTrigger(BuildTrigger):
with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make boto multipart happy
# Seek to position 0 to make tarfile happy.
tarball.seek(0)
dockerfile_id = user_files.store_file(tarball, TARBALL_MIME)
entries = {
tarball_subdir + '/.git/HEAD': commit_sha,
tarball_subdir + '/.git/objects/': None,
tarball_subdir + '/.git/refs/': None
}
appender = TarfileAppender(tarball, entries).get_stream()
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
logger.debug('Successfully prepared job')

View file

@ -18,7 +18,7 @@ paramiko
xhtml2pdf
redis
hiredis
docker-py
git+https://github.com/devtable/docker-py.git@emptydirs
pygithub
flask-restful
jsonschema

View file

@ -22,7 +22,7 @@ backports.ssl-match-hostname==3.4.0.2
beautifulsoup4==4.3.2
blinker==1.3
boto==2.32.1
docker-py==0.5.0
git+https://github.com/devtable/docker-py.git@emptydirs
ecdsa==0.11
futures==2.2.0
gevent==1.0.1

Binary file not shown.

View file

@ -8,8 +8,12 @@ class GzipWrap(object):
self.input = iter(input)
self.buffer = ''
self.zipper = GzipFile(filename, mode='wb', fileobj=self, compresslevel=compresslevel)
self.is_done = False
def read(self, size=-1):
if self.is_done:
return ''
# If the buffer already has enough bytes, then simply pop them off of
# the beginning and return them.
if len(self.buffer) >= size:
@ -37,6 +41,8 @@ class GzipWrap(object):
if is_done:
self.zipper.flush()
self.zipper.close()
self.is_done = True
if len(self.buffer) >= size or is_done:
ret = self.buffer[0:size]

View file

@ -2,77 +2,29 @@ import marisa_trie
import os
import tarfile
from aufs import is_aufs_metadata, get_deleted_prefix
from util.tarlayerformat import TarLayerFormat
AUFS_METADATA = u'.wh..wh.'
AUFS_WHITEOUT = u'.wh.'
AUFS_WHITEOUT_PREFIX_LENGTH = len(AUFS_WHITEOUT)
class StreamLayerMerger(object):
class StreamLayerMerger(TarLayerFormat):
""" Class which creates a generator of the combined TAR data for a set of Docker layers. """
def __init__(self, layer_iterator):
super(StreamLayerMerger, self).__init__(layer_iterator)
self.path_trie = marisa_trie.Trie()
self.path_encountered = []
self.prefix_trie = marisa_trie.Trie()
self.prefix_encountered = []
self.layer_iterator = layer_iterator
def get_generator(self):
for current_layer in self.layer_iterator():
# Read the current layer as TAR. If it is empty, we just continue
# to the next layer.
try:
tar_file = tarfile.open(mode='r|*', fileobj=current_layer)
except tarfile.ReadError as re:
continue
# For each of the tar entries, yield them IF and ONLY IF we have not
# encountered the path before.
# 9MB (+ padding below) so that it matches the 10MB expected by Gzip.
chunk_size = 1024 * 1024 * 9
for tar_info in tar_file:
if not self.check_tar_info(tar_info):
continue
# Yield the tar header.
yield tar_info.tobuf()
# Try to extract any file contents for the tar. If found, we yield them as well.
if tar_info.isreg():
file_stream = tar_file.extractfile(tar_info)
if file_stream is not None:
length = 0
while True:
current_block = file_stream.read(chunk_size)
if not len(current_block):
break
yield current_block
length += len(current_block)
file_stream.close()
# Files must be padding to 512 byte multiples.
if length % 512 != 0:
yield '\0' * (512 - (length % 512))
# Close the layer stream now that we're done with it.
tar_file.close()
def after_tar_layer(self, current_layer):
# Update the tries.
self.path_trie = marisa_trie.Trie(self.path_encountered)
self.prefix_trie = marisa_trie.Trie(self.prefix_encountered)
# Last two records are empty in TAR spec.
yield '\0' * 512
yield '\0' * 512
def check_tar_info(self, tar_info):
absolute = os.path.relpath(tar_info.name.decode('utf-8'), './')

52
util/tarfileappender.py Normal file
View file

@ -0,0 +1,52 @@
import tarfile
from cStringIO import StringIO
from util.tarlayerformat import TarLayerFormat
from util.gzipwrap import GzipWrap
class TarfileAppender(TarLayerFormat):
""" Helper class which allows for appending entries to a gzipped-tarfile and doing so
in a streaming manner.
"""
def __init__(self, base_tar_file, entries):
super(TarfileAppender, self).__init__(self._get_tar_iterator)
self.entries = entries
self.base_tar_file = base_tar_file
self.last_info = None
def get_stream(self):
return GzipWrap(self.get_generator())
def after_tar_layer(self, current_layer):
pass
def check_tar_info(self, tar_info):
self.last_info = tar_info
return True
def _get_tar_iterator(self):
# Yield the contents of the base tar.
yield self.base_tar_file
# Construct an in-memory tar containing the entries to append, and then yield
# its data.
def add_entry(arch, dir_path, contents=None):
info = tarfile.TarInfo(dir_path)
info.uid = self.last_info.uid
info.gid = self.last_info.gid
info.type = tarfile.REGTYPE if contents else tarfile.DIRTYPE
if contents:
info.size = len(contents)
arch.addfile(info, fileobj=StringIO(contents) if contents else None)
append_tarball = StringIO()
with tarfile.open(fileobj=append_tarball, mode='w') as updated_archive:
for entry in self.entries:
add_entry(updated_archive, entry, self.entries[entry])
# To make tarfile happy.
append_tarball.seek(0)
yield append_tarball

70
util/tarlayerformat.py Normal file
View file

@ -0,0 +1,70 @@
import os
import tarfile
class TarLayerFormat(object):
""" Class which creates a generator of the combined TAR data. """
def __init__(self, tar_iterator):
self.tar_iterator = tar_iterator
def get_generator(self):
for current_tar in self.tar_iterator():
# Read the current TAR. If it is empty, we just continue
# to the next one.
try:
tar_file = tarfile.open(mode='r|*', fileobj=current_tar)
except tarfile.ReadError as re:
raise re
continue
# For each of the tar entries, yield them IF and ONLY IF we have not
# encountered the path before.
# 9MB (+ padding below) so that it matches the 10MB expected by Gzip.
chunk_size = 1024 * 1024 * 9
for tar_info in tar_file:
if not self.check_tar_info(tar_info):
continue
# Yield the tar header.
yield tar_info.tobuf()
# Try to extract any file contents for the tar. If found, we yield them as well.
if tar_info.isreg():
file_stream = tar_file.extractfile(tar_info)
if file_stream is not None:
length = 0
while True:
current_block = file_stream.read(chunk_size)
if not len(current_block):
break
yield current_block
length += len(current_block)
file_stream.close()
# Files must be padding to 512 byte multiples.
if length % 512 != 0:
yield '\0' * (512 - (length % 512))
# Close the layer stream now that we're done with it.
tar_file.close()
# Conduct any post-tar work.
self.after_tar_layer(current_tar)
# Last two records are empty in TAR spec.
yield '\0' * 512
yield '\0' * 512
def check_tar_info(self, tar_info):
""" Returns true if the current tar_info should be added to the combined tar. False
otherwise.
"""
raise NotImplementedError()
def after_tar_layer(self, current_tar):
""" Invoked after a TAR layer is added, to do any post-add work. """
raise NotImplementedError()

View file

@ -38,8 +38,7 @@ TIMEOUT_PERIOD_MINUTES = 20
CACHE_EXPIRATION_PERIOD_HOURS = 24
NO_TAGS = ['<none>:<none>']
RESERVATION_TIME = (TIMEOUT_PERIOD_MINUTES + 5) * 60
DOCKER_BASE_URL = None # Set this if you want to use a different docker URL/socket.
DOCKER_BASE_URL = os.environ.get('DOCKER_HOST', None)
def matches_system_error(status_str):
""" Returns true if the given status string matches a known system error in the
@ -521,8 +520,12 @@ class DockerfileBuildWorker(Worker):
repository_build.uuid)
# Lookup and save the version of docker being used.
try:
docker_cl = Client(base_url = DOCKER_BASE_URL)
docker_version = docker_cl.version().get('Version', '')
except ConnectionError as exc:
raise WorkerUnhealthyException(exc.message)
dash = docker_version.find('-')
# Strip any -tutum or whatever off of the version.