Merge remote-tracking branch 'origin/master' into nomenclature

Conflicts:
	test/data/test.db
	workers/dockerfilebuild.py
This commit is contained in:
Jake Moshenko 2014-10-06 10:29:39 -04:00
commit ed8bcff39e
26 changed files with 138 additions and 134 deletions

View file

@ -13,10 +13,5 @@ http {
include server-base.conf; include server-base.conf;
listen 80 default; listen 80 default;
location /static/ {
# checks for static file, if not found proxy to app
alias /static/;
}
} }
} }

View file

@ -23,10 +23,5 @@ http {
ssl_protocols SSLv3 TLSv1; ssl_protocols SSLv3 TLSv1;
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP; ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
ssl_prefer_server_ciphers on; ssl_prefer_server_ciphers on;
location /static/ {
# checks for static file, if not found proxy to app
alias /static/;
}
} }
} }

View file

@ -25,3 +25,15 @@ location / {
proxy_read_timeout 2000; proxy_read_timeout 2000;
proxy_temp_path /var/log/nginx/proxy_temp 1 2; proxy_temp_path /var/log/nginx/proxy_temp 1 2;
} }
location /static/ {
# checks for static file, if not found proxy to app
alias /static/;
}
location /v1/_ping {
add_header Content-Type text/plain;
add_header X-Docker-Registry-Version 0.6.0;
add_header X-Docker-Registry-Standalone 0;
return 200 'okay';
}

View file

@ -165,6 +165,9 @@ class DefaultConfig(object):
# Feature Flag: Whether emails are enabled. # Feature Flag: Whether emails are enabled.
FEATURE_MAILING = True FEATURE_MAILING = True
# Feature Flag: Whether users can be created (by non-super users).
FEATURE_USER_CREATION = True
DISTRIBUTED_STORAGE_CONFIG = { DISTRIBUTED_STORAGE_CONFIG = {
'local_eu': ['LocalStorage', {'storage_path': 'test/data/registry/eu'}], 'local_eu': ['LocalStorage', {'storage_path': 'test/data/registry/eu'}],
'local_us': ['LocalStorage', {'storage_path': 'test/data/registry/us'}], 'local_us': ['LocalStorage', {'storage_path': 'test/data/registry/us'}],

View file

@ -1,14 +1,14 @@
"""Translate the queue names to reference namespace by id, remove the namespace column. """Translate the queue names to reference namespace by id, remove the namespace column.
Revision ID: 2fb36d4be80d Revision ID: 2fb36d4be80d
Revises: 3f4fe1194671 Revises: 9a1087b007d
Create Date: 2014-09-30 17:31:33.308490 Create Date: 2014-09-30 17:31:33.308490
""" """
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '2fb36d4be80d' revision = '2fb36d4be80d'
down_revision = '3f4fe1194671' down_revision = '9a1087b007d'
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa

View file

@ -22,4 +22,5 @@ def upgrade(tables):
def downgrade(tables): def downgrade(tables):
op.drop_constraint('fk_repository_namespace_user_id_user', table_name='repository', type_='foreignkey')
op.drop_index('repository_namespace_user_id_name', table_name='repository') op.drop_index('repository_namespace_user_id_name', table_name='repository')

View file

@ -74,8 +74,5 @@ def downgrade(tables):
.where(tables.notificationkind.c.name == op.inline_literal('org_team_invite'))) .where(tables.notificationkind.c.name == op.inline_literal('org_team_invite')))
) )
op.drop_index('teammemberinvite_user_id', table_name='teammemberinvite')
op.drop_index('teammemberinvite_team_id', table_name='teammemberinvite')
op.drop_index('teammemberinvite_inviter_id', table_name='teammemberinvite')
op.drop_table('teammemberinvite') op.drop_table('teammemberinvite')
### end Alembic commands ### ### end Alembic commands ###

View file

@ -0,0 +1,28 @@
"""Allow the namespace column to be nullable.
Revision ID: 9a1087b007d
Revises: 3f4fe1194671
Create Date: 2014-10-01 16:11:21.277226
"""
# revision identifiers, used by Alembic.
revision = '9a1087b007d'
down_revision = '3f4fe1194671'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.drop_index('repository_namespace_name', table_name='repository')
op.alter_column('repository', 'namespace', nullable=True, existing_type=sa.String(length=255),
server_default=sa.text('NULL'))
def downgrade(tables):
conn = op.get_bind()
conn.execute('update repository set namespace = (select username from user where user.id = repository.namespace_user_id) where namespace is NULL')
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
op.alter_column('repository', 'namespace', nullable=False, existing_type=sa.String(length=255))

View file

@ -1239,8 +1239,7 @@ def get_storage_by_uuid(storage_uuid):
return found return found
def set_image_size(docker_image_id, namespace_name, repository_name, def set_image_size(docker_image_id, namespace_name, repository_name, image_size, uncompressed_size):
image_size):
try: try:
image = (Image image = (Image
.select(Image, ImageStorage) .select(Image, ImageStorage)
@ -1249,18 +1248,15 @@ def set_image_size(docker_image_id, namespace_name, repository_name,
.switch(Image) .switch(Image)
.join(ImageStorage, JOIN_LEFT_OUTER) .join(ImageStorage, JOIN_LEFT_OUTER)
.where(Repository.name == repository_name, Namespace.username == namespace_name, .where(Repository.name == repository_name, Namespace.username == namespace_name,
Image.docker_image_id == docker_image_id) Image.docker_image_id == docker_image_id)
.get()) .get())
except Image.DoesNotExist: except Image.DoesNotExist:
raise DataModelException('No image with specified id and repository') raise DataModelException('No image with specified id and repository')
if image.storage and image.storage.id: image.storage.image_size = image_size
image.storage.image_size = image_size image.storage.uncompressed_size = uncompressed_size
image.storage.save() image.storage.save()
else:
image.image_size = image_size
image.save()
return image return image

View file

@ -17,7 +17,12 @@ OPTION_TRANSLATIONS = {
def gen_sqlalchemy_metadata(peewee_model_list): def gen_sqlalchemy_metadata(peewee_model_list):
metadata = MetaData() metadata = MetaData(naming_convention={
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
})
for model in peewee_model_list: for model in peewee_model_list:
meta = model._meta meta = model._meta

View file

@ -27,8 +27,8 @@ api_bp = Blueprint('api', __name__)
api = Api() api = Api()
api.init_app(api_bp) api.init_app(api_bp)
api.decorators = [csrf_protect, api.decorators = [csrf_protect,
process_oauth, crossdomain(origin='*', headers=['Authorization', 'Content-Type']),
crossdomain(origin='*', headers=['Authorization', 'Content-Type'])] process_oauth]
class ApiException(Exception): class ApiException(Exception):
@ -90,6 +90,7 @@ def handle_api_error(error):
if error.error_type is not None: if error.error_type is not None:
response.headers['WWW-Authenticate'] = ('Bearer error="%s" error_description="%s"' % response.headers['WWW-Authenticate'] = ('Bearer error="%s" error_description="%s"' %
(error.error_type, error.error_description)) (error.error_type, error.error_description))
return response return response
@ -191,6 +192,7 @@ def query_param(name, help_str, type=reqparse.text_type, default=None,
'default': default, 'default': default,
'choices': choices, 'choices': choices,
'required': required, 'required': required,
'location': ('args')
}) })
return func return func
return add_param return add_param

View file

@ -120,6 +120,10 @@ class User(ApiResource):
'type': 'string', 'type': 'string',
'description': 'The user\'s email address', 'description': 'The user\'s email address',
}, },
'invite_code': {
'type': 'string',
'description': 'The optional invite code'
}
} }
}, },
'UpdateUser': { 'UpdateUser': {
@ -207,16 +211,14 @@ class User(ApiResource):
return user_view(user) return user_view(user)
@show_if(features.USER_CREATION)
@nickname('createNewUser') @nickname('createNewUser')
@parse_args
@query_param('inviteCode', 'Invitation code given for creating the user.', type=str,
default='')
@internal_only @internal_only
@validate_json_request('NewUser') @validate_json_request('NewUser')
def post(self, args): def post(self):
""" Create a new user. """ """ Create a new user. """
user_data = request.get_json() user_data = request.get_json()
invite_code = args['inviteCode'] invite_code = user_data.get('invite_code', '')
existing_user = model.get_user(user_data['username']) existing_user = model.get_user(user_data['username'])
if existing_user: if existing_user:

View file

@ -26,7 +26,8 @@ def render_ologin_error(service_name,
error_message='Could not load user data. The token may have expired.'): error_message='Could not load user data. The token may have expired.'):
return render_page_template('ologinerror.html', service_name=service_name, return render_page_template('ologinerror.html', service_name=service_name,
error_message=error_message, error_message=error_message,
service_url=get_app_url()) service_url=get_app_url(),
user_creation=features.USER_CREATION)
def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False, def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False,
redirect_suffix=''): redirect_suffix=''):
@ -85,7 +86,12 @@ def get_google_user(token):
def conduct_oauth_login(service_name, user_id, username, email, metadata={}): def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
to_login = model.verify_federated_login(service_name.lower(), user_id) to_login = model.verify_federated_login(service_name.lower(), user_id)
if not to_login: if not to_login:
# try to create the user # See if we can create a new user.
if not features.USER_CREATION:
error_message = 'User creation is disabled. Please contact your administrator'
return render_ologin_error(service_name, error_message)
# Try to create the user
try: try:
valid = next(generate_valid_usernames(username)) valid = next(generate_valid_usernames(username))
to_login = model.create_federated_user(valid, email, service_name.lower(), to_login = model.create_federated_user(valid, email, service_name.lower(),
@ -147,7 +153,7 @@ def github_oauth_callback():
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB') token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
user_data = get_github_user(token) user_data = get_github_user(token)
if not user_data: if not user_data or not 'login' in user_data:
return render_ologin_error('GitHub') return render_ologin_error('GitHub')
username = user_data['login'] username = user_data['login']

View file

@ -19,6 +19,7 @@ from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
from util.http import abort from util.http import abort
from endpoints.notificationhelper import spawn_notification from endpoints.notificationhelper import spawn_notification
import features
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
profile = logging.getLogger('application.profiler') profile = logging.getLogger('application.profiler')
@ -65,6 +66,9 @@ def generate_headers(role='read'):
@index.route('/users', methods=['POST']) @index.route('/users', methods=['POST'])
@index.route('/users/', methods=['POST']) @index.route('/users/', methods=['POST'])
def create_user(): def create_user():
if not features.USER_CREATION:
abort(400, 'User creation is disabled. Please speak to your administrator.')
user_data = request.get_json() user_data = request.get_json()
if not 'username' in user_data: if not 'username' in user_data:
abort(400, 'Missing username') abort(400, 'Missing username')
@ -454,6 +458,7 @@ def get_search():
@index.route('/_ping') @index.route('/_ping')
@index.route('/_ping') @index.route('/_ping')
def ping(): def ping():
# NOTE: any changes made here must also be reflected in the nginx config
response = make_response('true', 200) response = make_response('true', 200)
response.headers['X-Docker-Registry-Version'] = '0.6.0' response.headers['X-Docker-Registry-Version'] = '0.6.0'
response.headers['X-Docker-Registry-Standalone'] = '0' response.headers['X-Docker-Registry-Standalone'] = '0'

View file

@ -220,7 +220,7 @@ def put_image_layer(namespace, repository, image_id):
image_size = tmp.tell() image_size = tmp.tell()
# Save the size of the image. # Save the size of the image.
model.set_image_size(image_id, namespace, repository, image_size) model.set_image_size(image_id, namespace, repository, image_size, uncompressed_size_info.size)
tmp.seek(0) tmp.seek(0)
csums.append(checksums.compute_tarsum(tmp, json_data)) csums.append(checksums.compute_tarsum(tmp, json_data))
@ -229,12 +229,6 @@ def put_image_layer(namespace, repository, image_id):
logger.debug('put_image_layer: Error when computing tarsum ' logger.debug('put_image_layer: Error when computing tarsum '
'{0}'.format(e)) '{0}'.format(e))
# Write the uncompressed image size, if any.
if uncompressed_size_info['size'] > 0:
profile.debug('Storing uncompressed layer size: %s' % uncompressed_size_info['size'])
repo_image.storage.uncompressed_size = uncompressed_size_info['size']
repo_image.storage.save()
if repo_image.storage.checksum is None: if repo_image.storage.checksum is None:
# We don't have a checksum stored yet, that's fine skipping the check. # We don't have a checksum stored yet, that's fine skipping the check.
# Not removing the mark though, image is not downloadable yet. # Not removing the mark though, image is not downloadable yet.

View file

@ -377,7 +377,7 @@ class GithubBuildTrigger(BuildTrigger):
gh_client = self._get_client(auth_token) gh_client = self._get_client(auth_token)
source = config['build_source'] source = config['build_source']
repo = gh_client.get_repo(source) repo = gh_client.get_repo(source)
branches = [branch['name'] for branch in repo.get_branches()] branches = [branch.name for branch in repo.get_branches()]
if not repo.default_branch in branches: if not repo.default_branch in branches:
branches.insert(0, repo.default_branch) branches.insert(0, repo.default_branch)

View file

@ -82,8 +82,9 @@ def __create_subtree(repo, structure, creator_username, parent):
new_image = model.set_image_metadata(docker_image_id, repo.namespace_user.username, repo.name, new_image = model.set_image_metadata(docker_image_id, repo.namespace_user.username, repo.name,
str(creation_time), 'no comment', command, parent) str(creation_time), 'no comment', command, parent)
model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name, compressed_size = random.randrange(1, 1024 * 1024 * 1024)
random.randrange(1, 1024 * 1024 * 1024)) model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name, compressed_size,
int(compressed_size * 1.4))
# Populate the diff file # Populate the diff file
diff_path = store.image_file_diffs_path(new_image.storage.uuid) diff_path = store.image_file_diffs_path(new_image.storage.uuid)

View file

@ -1,4 +1,4 @@
<div class="signup-form-element"> <div class="signup-form-element" quay-show="Features.USER_CREATION">
<form class="form-signup" name="signupForm" ng-submit="register()" ng-show="!awaitingConfirmation && !registering"> <form class="form-signup" name="signupForm" ng-submit="register()" ng-show="!awaitingConfirmation && !registering">
<input type="text" class="form-control" placeholder="Create a username" name="username" ng-model="newUser.username" autofocus required ng-pattern="/^[a-z0-9_]{4,30}$/"> <input type="text" class="form-control" placeholder="Create a username" name="username" ng-model="newUser.username" autofocus required ng-pattern="/^[a-z0-9_]{4,30}$/">
<input type="email" class="form-control" placeholder="Email address" ng-model="newUser.email" required> <input type="email" class="form-control" placeholder="Email address" ng-model="newUser.email" required>

View file

@ -14,7 +14,7 @@
</div> </div>
</div> </div>
</div> </div>
<div class="panel panel-default"> <div class="panel panel-default" quay-show="Features.USER_CREATION">
<div class="panel-heading"> <div class="panel-heading">
<h6 class="panel-title accordion-title"> <h6 class="panel-title accordion-title">
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" data-target="#collapseRegister"> <a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" data-target="#collapseRegister">

View file

@ -2814,7 +2814,7 @@ quayApp.directive('signupForm', function () {
$scope.registering = true; $scope.registering = true;
if ($scope.inviteCode) { if ($scope.inviteCode) {
$scope.newUser['inviteCode'] = $scope.inviteCode; $scope.newUser['invite_code'] = $scope.inviteCode;
} }
ApiService.createNewUser($scope.newUser).then(function(resp) { ApiService.createNewUser($scope.newUser).then(function(resp) {

View file

@ -8,17 +8,19 @@
<div class="container"> <div class="container">
<div class="row"> <div class="row">
<div class="col-md-12"> <div class="col-md-12">
<h2>There was an error logging in with {{ service_name }}.</h2> <h2 style="margin-bottom: 20px;">There was an error logging in with {{ service_name }}.</h2>
{% if error_message %} {% if error_message %}
<div class="alert alert-danger">{{ error_message }}</div> <div class="alert alert-danger">{{ error_message }}</div>
{% endif %} {% endif %}
{% if user_creation %}
<div> <div>
Please register using the <a ng-href="{{ service_url }}/signin" target="_self">registration form</a> to continue. Please register using the <a ng-href="{{ service_url }}/signin" target="_self">registration form</a> to continue.
You will be able to connect your account to your Quay.io account You will be able to connect your account to your Quay.io account
in the user settings. in the user settings.
</div> </div>
{% endif %}
</div> </div>
</div> </div>

Binary file not shown.

View file

@ -173,7 +173,7 @@ class ApiTestCase(unittest.TestCase):
if memberData['name'] == membername: if memberData['name'] == membername:
return return
self.fail(membername + ' not found in team: ' + json.dumps(data)) self.fail(membername + ' not found in team: ' + py_json.dumps(data))
def login(self, username, password='password'): def login(self, username, password='password'):
return self.postJsonResponse(Signin, data=dict(username=username, password=password)) return self.postJsonResponse(Signin, data=dict(username=username, password=password))
@ -405,7 +405,7 @@ class TestCreateNewUser(ApiTestCase):
invite = model.add_or_invite_to_team(inviter, team, None, 'foo@example.com') invite = model.add_or_invite_to_team(inviter, team, None, 'foo@example.com')
details = { details = {
'inviteCode': invite.invite_token 'invite_code': invite.invite_token
} }
details.update(NEW_USER_DETAILS); details.update(NEW_USER_DETAILS);

View file

@ -1,101 +1,60 @@
import json
import logging import logging
import zlib
from data import model from data import model
from data.database import ImageStorage from data.database import ImageStorage
from app import app, storage as store from app import app, storage as store
from data.database import db from data.database import db
from gzip import GzipFile from util.gzipstream import ZLIB_GZIP_WINDOW
from tempfile import SpooledTemporaryFile
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def backfill_sizes_from_json(): CHUNK_SIZE = 512 * 1024 * 1024
query = (ImageStorage
.select()
.where(ImageStorage.uncompressed_size == None, ImageStorage.uploading == False)
.limit(100))
total = 0
missing = 0
batch_processed = 1
while batch_processed > 0:
batch_processed = 0
with app.config['DB_TRANSACTION_FACTORY'](db):
for image_storage in query.clone():
total += 1
batch_processed += 1
if (total - 1) % 100 == 0:
logger.debug('Storing entry: %s', total)
# Lookup the JSON for the image.
uuid = image_storage.uuid
with_locations = model.get_storage_by_uuid(uuid)
try:
json_string = store.get_content(with_locations.locations, store.image_json_path(uuid))
json_data = json.loads(json_string)
size = json_data.get('Size', json_data.get('size', -1))
except IOError:
logger.debug('Image storage with no json %s', uuid)
size = -1
if size == -1:
missing += 1
logger.debug('Missing entry %s (%s/%s)', uuid, missing, total)
image_storage.uncompressed_size = size
image_storage.save()
def backfill_sizes_from_data(): def backfill_sizes_from_data():
storage_ids = list(ImageStorage while True:
.select(ImageStorage.uuid) # Load the record from the DB.
.where(ImageStorage.uncompressed_size == -1, ImageStorage.uploading == False)) try:
record = (ImageStorage
.select(ImageStorage.uuid)
.where(ImageStorage.uncompressed_size >> None, ImageStorage.uploading == False)
.get())
except ImageStorage.DoesNotExist:
# We're done!
return
counter = 0 uuid = record.uuid
for uuid in [s.uuid for s in storage_ids]:
counter += 1
# Load the storage with locations.
logger.debug('Loading entry: %s (%s/%s)', uuid, counter, len(storage_ids))
with_locations = model.get_storage_by_uuid(uuid) with_locations = model.get_storage_by_uuid(uuid)
layer_size = -2
# Read the layer from backing storage and calculate the uncompressed size. # Read the layer from backing storage and calculate the uncompressed size.
try: logger.debug('Loading data: %s (%s bytes)', uuid, with_locations.image_size)
logger.debug('Loading data: %s (%s bytes)', uuid, with_locations.image_size) decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW)
CHUNK_SIZE = 512 * 1024
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
layer_data = store.get_content(with_locations.locations, store.image_layer_path(uuid))
tarball.write(layer_data)
tarball.seek(0)
with GzipFile(fileobj=tarball, mode='rb') as gzip_file: uncompressed_size = 0
gzip_file.read() with store.stream_read_file(with_locations.locations, store.image_layer_path(uuid)) as stream:
layer_size = gzip_file.size while True:
current_data = stream.read(CHUNK_SIZE)
if len(current_data) == 0:
break
except Exception as ex: uncompressed_size += len(decompressor.decompress(current_data))
logger.debug('Could not gunzip entry: %s. Reason: %s', uuid, ex)
continue
# Write the size to the image storage. We do so under a transaction AFTER checking to # Write the size to the image storage. We do so under a transaction AFTER checking to
# make sure the image storage still exists and has not changed. # make sure the image storage still exists and has not changed.
logger.debug('Writing entry: %s. Size: %s', uuid, layer_size) logger.debug('Writing entry: %s. Size: %s', uuid, uncompressed_size)
with app.config['DB_TRANSACTION_FACTORY'](db): with app.config['DB_TRANSACTION_FACTORY'](db):
try: try:
current_record = model.get_storage_by_uuid(uuid) current_record = model.get_storage_by_uuid(uuid)
except: except model.InvalidImageException:
# Record no longer exists. logger.warning('Storage with uuid no longer exists: %s', uuid)
continue continue
if not current_record.uploading and current_record.uncompressed_size == -1: if not current_record.uploading and current_record.uncompressed_size == None:
current_record.uncompressed_size = layer_size current_record.uncompressed_size = uncompressed_size
current_record.save() current_record.save()
@ -103,5 +62,4 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
logging.getLogger('boto').setLevel(logging.CRITICAL) logging.getLogger('boto').setLevel(logging.CRITICAL)
backfill_sizes_from_json()
backfill_sizes_from_data() backfill_sizes_from_data()

View file

@ -9,17 +9,20 @@ import zlib
# http://stackoverflow.com/questions/3122145/zlib-error-error-3-while-decompressing-incorrect-header-check/22310760#22310760 # http://stackoverflow.com/questions/3122145/zlib-error-error-3-while-decompressing-incorrect-header-check/22310760#22310760
ZLIB_GZIP_WINDOW = zlib.MAX_WBITS | 32 ZLIB_GZIP_WINDOW = zlib.MAX_WBITS | 32
class SizeInfo(object):
def __init__(self):
self.size = 0
def calculate_size_handler(): def calculate_size_handler():
""" Returns an object and a SocketReader handler. The handler will gunzip the data it receives, """ Returns an object and a SocketReader handler. The handler will gunzip the data it receives,
adding the size found to the object. adding the size found to the object.
""" """
uncompressed_size_info = {
'size': 0 size_info = SizeInfo()
}
decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW) decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW)
def fn(buf): def fn(buf):
uncompressed_size_info['size'] += len(decompressor.decompress(buf)) size_info.size += len(decompressor.decompress(buf))
return uncompressed_size_info, fn return size_info, fn

View file

@ -223,7 +223,6 @@ class DockerfileBuildContext(object):
raise RuntimeError(message) raise RuntimeError(message)
def pull(self): def pull(self):
# Pull the image, in case it was updated since the last build
image_and_tag_tuple = self._parsed_dockerfile.get_image_and_tag() image_and_tag_tuple = self._parsed_dockerfile.get_image_and_tag()
if image_and_tag_tuple is None or image_and_tag_tuple[0] is None: if image_and_tag_tuple is None or image_and_tag_tuple[0] is None:
self._build_logger('Missing FROM command in Dockerfile', build_logs.ERROR) self._build_logger('Missing FROM command in Dockerfile', build_logs.ERROR)