Merge remote-tracking branch 'origin/master' into nomenclature
Conflicts: test/data/test.db workers/dockerfilebuild.py
This commit is contained in:
commit
ed8bcff39e
26 changed files with 138 additions and 134 deletions
|
@ -13,10 +13,5 @@ http {
|
|||
include server-base.conf;
|
||||
|
||||
listen 80 default;
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /static/;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,10 +23,5 @@ http {
|
|||
ssl_protocols SSLv3 TLSv1;
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /static/;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,3 +25,15 @@ location / {
|
|||
proxy_read_timeout 2000;
|
||||
proxy_temp_path /var/log/nginx/proxy_temp 1 2;
|
||||
}
|
||||
|
||||
location /static/ {
|
||||
# checks for static file, if not found proxy to app
|
||||
alias /static/;
|
||||
}
|
||||
|
||||
location /v1/_ping {
|
||||
add_header Content-Type text/plain;
|
||||
add_header X-Docker-Registry-Version 0.6.0;
|
||||
add_header X-Docker-Registry-Standalone 0;
|
||||
return 200 'okay';
|
||||
}
|
|
@ -165,6 +165,9 @@ class DefaultConfig(object):
|
|||
# Feature Flag: Whether emails are enabled.
|
||||
FEATURE_MAILING = True
|
||||
|
||||
# Feature Flag: Whether users can be created (by non-super users).
|
||||
FEATURE_USER_CREATION = True
|
||||
|
||||
DISTRIBUTED_STORAGE_CONFIG = {
|
||||
'local_eu': ['LocalStorage', {'storage_path': 'test/data/registry/eu'}],
|
||||
'local_us': ['LocalStorage', {'storage_path': 'test/data/registry/us'}],
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
"""Translate the queue names to reference namespace by id, remove the namespace column.
|
||||
|
||||
Revision ID: 2fb36d4be80d
|
||||
Revises: 3f4fe1194671
|
||||
Revises: 9a1087b007d
|
||||
Create Date: 2014-09-30 17:31:33.308490
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2fb36d4be80d'
|
||||
down_revision = '3f4fe1194671'
|
||||
down_revision = '9a1087b007d'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
|
|
@ -22,4 +22,5 @@ def upgrade(tables):
|
|||
|
||||
|
||||
def downgrade(tables):
|
||||
op.drop_constraint('fk_repository_namespace_user_id_user', table_name='repository', type_='foreignkey')
|
||||
op.drop_index('repository_namespace_user_id_name', table_name='repository')
|
||||
|
|
|
@ -74,8 +74,5 @@ def downgrade(tables):
|
|||
.where(tables.notificationkind.c.name == op.inline_literal('org_team_invite')))
|
||||
)
|
||||
|
||||
op.drop_index('teammemberinvite_user_id', table_name='teammemberinvite')
|
||||
op.drop_index('teammemberinvite_team_id', table_name='teammemberinvite')
|
||||
op.drop_index('teammemberinvite_inviter_id', table_name='teammemberinvite')
|
||||
op.drop_table('teammemberinvite')
|
||||
### end Alembic commands ###
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
"""Allow the namespace column to be nullable.
|
||||
|
||||
Revision ID: 9a1087b007d
|
||||
Revises: 3f4fe1194671
|
||||
Create Date: 2014-10-01 16:11:21.277226
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9a1087b007d'
|
||||
down_revision = '3f4fe1194671'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
op.drop_index('repository_namespace_name', table_name='repository')
|
||||
op.alter_column('repository', 'namespace', nullable=True, existing_type=sa.String(length=255),
|
||||
server_default=sa.text('NULL'))
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
conn = op.get_bind()
|
||||
conn.execute('update repository set namespace = (select username from user where user.id = repository.namespace_user_id) where namespace is NULL')
|
||||
|
||||
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
|
||||
op.alter_column('repository', 'namespace', nullable=False, existing_type=sa.String(length=255))
|
|
@ -1239,8 +1239,7 @@ def get_storage_by_uuid(storage_uuid):
|
|||
return found
|
||||
|
||||
|
||||
def set_image_size(docker_image_id, namespace_name, repository_name,
|
||||
image_size):
|
||||
def set_image_size(docker_image_id, namespace_name, repository_name, image_size, uncompressed_size):
|
||||
try:
|
||||
image = (Image
|
||||
.select(Image, ImageStorage)
|
||||
|
@ -1249,18 +1248,15 @@ def set_image_size(docker_image_id, namespace_name, repository_name,
|
|||
.switch(Image)
|
||||
.join(ImageStorage, JOIN_LEFT_OUTER)
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
||||
Image.docker_image_id == docker_image_id)
|
||||
Image.docker_image_id == docker_image_id)
|
||||
.get())
|
||||
|
||||
except Image.DoesNotExist:
|
||||
raise DataModelException('No image with specified id and repository')
|
||||
|
||||
if image.storage and image.storage.id:
|
||||
image.storage.image_size = image_size
|
||||
image.storage.save()
|
||||
else:
|
||||
image.image_size = image_size
|
||||
image.save()
|
||||
image.storage.image_size = image_size
|
||||
image.storage.uncompressed_size = uncompressed_size
|
||||
image.storage.save()
|
||||
|
||||
return image
|
||||
|
||||
|
|
|
@ -17,7 +17,12 @@ OPTION_TRANSLATIONS = {
|
|||
|
||||
|
||||
def gen_sqlalchemy_metadata(peewee_model_list):
|
||||
metadata = MetaData()
|
||||
metadata = MetaData(naming_convention={
|
||||
"ix": 'ix_%(column_0_label)s',
|
||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
||||
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
||||
"pk": "pk_%(table_name)s"
|
||||
})
|
||||
|
||||
for model in peewee_model_list:
|
||||
meta = model._meta
|
||||
|
|
|
@ -27,8 +27,8 @@ api_bp = Blueprint('api', __name__)
|
|||
api = Api()
|
||||
api.init_app(api_bp)
|
||||
api.decorators = [csrf_protect,
|
||||
process_oauth,
|
||||
crossdomain(origin='*', headers=['Authorization', 'Content-Type'])]
|
||||
crossdomain(origin='*', headers=['Authorization', 'Content-Type']),
|
||||
process_oauth]
|
||||
|
||||
|
||||
class ApiException(Exception):
|
||||
|
@ -90,6 +90,7 @@ def handle_api_error(error):
|
|||
if error.error_type is not None:
|
||||
response.headers['WWW-Authenticate'] = ('Bearer error="%s" error_description="%s"' %
|
||||
(error.error_type, error.error_description))
|
||||
|
||||
return response
|
||||
|
||||
|
||||
|
@ -191,6 +192,7 @@ def query_param(name, help_str, type=reqparse.text_type, default=None,
|
|||
'default': default,
|
||||
'choices': choices,
|
||||
'required': required,
|
||||
'location': ('args')
|
||||
})
|
||||
return func
|
||||
return add_param
|
||||
|
|
|
@ -120,6 +120,10 @@ class User(ApiResource):
|
|||
'type': 'string',
|
||||
'description': 'The user\'s email address',
|
||||
},
|
||||
'invite_code': {
|
||||
'type': 'string',
|
||||
'description': 'The optional invite code'
|
||||
}
|
||||
}
|
||||
},
|
||||
'UpdateUser': {
|
||||
|
@ -207,16 +211,14 @@ class User(ApiResource):
|
|||
|
||||
return user_view(user)
|
||||
|
||||
@show_if(features.USER_CREATION)
|
||||
@nickname('createNewUser')
|
||||
@parse_args
|
||||
@query_param('inviteCode', 'Invitation code given for creating the user.', type=str,
|
||||
default='')
|
||||
@internal_only
|
||||
@validate_json_request('NewUser')
|
||||
def post(self, args):
|
||||
def post(self):
|
||||
""" Create a new user. """
|
||||
user_data = request.get_json()
|
||||
invite_code = args['inviteCode']
|
||||
invite_code = user_data.get('invite_code', '')
|
||||
|
||||
existing_user = model.get_user(user_data['username'])
|
||||
if existing_user:
|
||||
|
|
|
@ -26,7 +26,8 @@ def render_ologin_error(service_name,
|
|||
error_message='Could not load user data. The token may have expired.'):
|
||||
return render_page_template('ologinerror.html', service_name=service_name,
|
||||
error_message=error_message,
|
||||
service_url=get_app_url())
|
||||
service_url=get_app_url(),
|
||||
user_creation=features.USER_CREATION)
|
||||
|
||||
def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False,
|
||||
redirect_suffix=''):
|
||||
|
@ -85,7 +86,12 @@ def get_google_user(token):
|
|||
def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
|
||||
to_login = model.verify_federated_login(service_name.lower(), user_id)
|
||||
if not to_login:
|
||||
# try to create the user
|
||||
# See if we can create a new user.
|
||||
if not features.USER_CREATION:
|
||||
error_message = 'User creation is disabled. Please contact your administrator'
|
||||
return render_ologin_error(service_name, error_message)
|
||||
|
||||
# Try to create the user
|
||||
try:
|
||||
valid = next(generate_valid_usernames(username))
|
||||
to_login = model.create_federated_user(valid, email, service_name.lower(),
|
||||
|
@ -147,7 +153,7 @@ def github_oauth_callback():
|
|||
|
||||
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
|
||||
user_data = get_github_user(token)
|
||||
if not user_data:
|
||||
if not user_data or not 'login' in user_data:
|
||||
return render_ologin_error('GitHub')
|
||||
|
||||
username = user_data['login']
|
||||
|
|
|
@ -19,6 +19,7 @@ from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
|||
from util.http import abort
|
||||
from endpoints.notificationhelper import spawn_notification
|
||||
|
||||
import features
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
profile = logging.getLogger('application.profiler')
|
||||
|
@ -65,6 +66,9 @@ def generate_headers(role='read'):
|
|||
@index.route('/users', methods=['POST'])
|
||||
@index.route('/users/', methods=['POST'])
|
||||
def create_user():
|
||||
if not features.USER_CREATION:
|
||||
abort(400, 'User creation is disabled. Please speak to your administrator.')
|
||||
|
||||
user_data = request.get_json()
|
||||
if not 'username' in user_data:
|
||||
abort(400, 'Missing username')
|
||||
|
@ -454,6 +458,7 @@ def get_search():
|
|||
@index.route('/_ping')
|
||||
@index.route('/_ping')
|
||||
def ping():
|
||||
# NOTE: any changes made here must also be reflected in the nginx config
|
||||
response = make_response('true', 200)
|
||||
response.headers['X-Docker-Registry-Version'] = '0.6.0'
|
||||
response.headers['X-Docker-Registry-Standalone'] = '0'
|
||||
|
|
|
@ -220,7 +220,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
image_size = tmp.tell()
|
||||
|
||||
# Save the size of the image.
|
||||
model.set_image_size(image_id, namespace, repository, image_size)
|
||||
model.set_image_size(image_id, namespace, repository, image_size, uncompressed_size_info.size)
|
||||
|
||||
tmp.seek(0)
|
||||
csums.append(checksums.compute_tarsum(tmp, json_data))
|
||||
|
@ -229,12 +229,6 @@ def put_image_layer(namespace, repository, image_id):
|
|||
logger.debug('put_image_layer: Error when computing tarsum '
|
||||
'{0}'.format(e))
|
||||
|
||||
# Write the uncompressed image size, if any.
|
||||
if uncompressed_size_info['size'] > 0:
|
||||
profile.debug('Storing uncompressed layer size: %s' % uncompressed_size_info['size'])
|
||||
repo_image.storage.uncompressed_size = uncompressed_size_info['size']
|
||||
repo_image.storage.save()
|
||||
|
||||
if repo_image.storage.checksum is None:
|
||||
# We don't have a checksum stored yet, that's fine skipping the check.
|
||||
# Not removing the mark though, image is not downloadable yet.
|
||||
|
|
|
@ -377,7 +377,7 @@ class GithubBuildTrigger(BuildTrigger):
|
|||
gh_client = self._get_client(auth_token)
|
||||
source = config['build_source']
|
||||
repo = gh_client.get_repo(source)
|
||||
branches = [branch['name'] for branch in repo.get_branches()]
|
||||
branches = [branch.name for branch in repo.get_branches()]
|
||||
|
||||
if not repo.default_branch in branches:
|
||||
branches.insert(0, repo.default_branch)
|
||||
|
|
|
@ -82,8 +82,9 @@ def __create_subtree(repo, structure, creator_username, parent):
|
|||
new_image = model.set_image_metadata(docker_image_id, repo.namespace_user.username, repo.name,
|
||||
str(creation_time), 'no comment', command, parent)
|
||||
|
||||
model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name,
|
||||
random.randrange(1, 1024 * 1024 * 1024))
|
||||
compressed_size = random.randrange(1, 1024 * 1024 * 1024)
|
||||
model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name, compressed_size,
|
||||
int(compressed_size * 1.4))
|
||||
|
||||
# Populate the diff file
|
||||
diff_path = store.image_file_diffs_path(new_image.storage.uuid)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<div class="signup-form-element">
|
||||
<div class="signup-form-element" quay-show="Features.USER_CREATION">
|
||||
<form class="form-signup" name="signupForm" ng-submit="register()" ng-show="!awaitingConfirmation && !registering">
|
||||
<input type="text" class="form-control" placeholder="Create a username" name="username" ng-model="newUser.username" autofocus required ng-pattern="/^[a-z0-9_]{4,30}$/">
|
||||
<input type="email" class="form-control" placeholder="Email address" ng-model="newUser.email" required>
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="panel panel-default">
|
||||
<div class="panel panel-default" quay-show="Features.USER_CREATION">
|
||||
<div class="panel-heading">
|
||||
<h6 class="panel-title accordion-title">
|
||||
<a class="accordion-toggle" data-toggle="collapse" data-parent="#accordion" data-target="#collapseRegister">
|
||||
|
|
|
@ -2814,7 +2814,7 @@ quayApp.directive('signupForm', function () {
|
|||
$scope.registering = true;
|
||||
|
||||
if ($scope.inviteCode) {
|
||||
$scope.newUser['inviteCode'] = $scope.inviteCode;
|
||||
$scope.newUser['invite_code'] = $scope.inviteCode;
|
||||
}
|
||||
|
||||
ApiService.createNewUser($scope.newUser).then(function(resp) {
|
||||
|
|
|
@ -8,17 +8,19 @@
|
|||
<div class="container">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<h2>There was an error logging in with {{ service_name }}.</h2>
|
||||
<h2 style="margin-bottom: 20px;">There was an error logging in with {{ service_name }}.</h2>
|
||||
|
||||
{% if error_message %}
|
||||
<div class="alert alert-danger">{{ error_message }}</div>
|
||||
{% endif %}
|
||||
|
||||
{% if user_creation %}
|
||||
<div>
|
||||
Please register using the <a ng-href="{{ service_url }}/signin" target="_self">registration form</a> to continue.
|
||||
You will be able to connect your account to your Quay.io account
|
||||
in the user settings.
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
Binary file not shown.
|
@ -173,7 +173,7 @@ class ApiTestCase(unittest.TestCase):
|
|||
if memberData['name'] == membername:
|
||||
return
|
||||
|
||||
self.fail(membername + ' not found in team: ' + json.dumps(data))
|
||||
self.fail(membername + ' not found in team: ' + py_json.dumps(data))
|
||||
|
||||
def login(self, username, password='password'):
|
||||
return self.postJsonResponse(Signin, data=dict(username=username, password=password))
|
||||
|
@ -405,7 +405,7 @@ class TestCreateNewUser(ApiTestCase):
|
|||
invite = model.add_or_invite_to_team(inviter, team, None, 'foo@example.com')
|
||||
|
||||
details = {
|
||||
'inviteCode': invite.invite_token
|
||||
'invite_code': invite.invite_token
|
||||
}
|
||||
details.update(NEW_USER_DETAILS);
|
||||
|
||||
|
|
|
@ -1,101 +1,60 @@
|
|||
import json
|
||||
import logging
|
||||
import zlib
|
||||
|
||||
from data import model
|
||||
from data.database import ImageStorage
|
||||
from app import app, storage as store
|
||||
from data.database import db
|
||||
from gzip import GzipFile
|
||||
from tempfile import SpooledTemporaryFile
|
||||
from util.gzipstream import ZLIB_GZIP_WINDOW
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def backfill_sizes_from_json():
|
||||
query = (ImageStorage
|
||||
.select()
|
||||
.where(ImageStorage.uncompressed_size == None, ImageStorage.uploading == False)
|
||||
.limit(100))
|
||||
|
||||
total = 0
|
||||
missing = 0
|
||||
batch_processed = 1
|
||||
|
||||
while batch_processed > 0:
|
||||
batch_processed = 0
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
for image_storage in query.clone():
|
||||
total += 1
|
||||
batch_processed += 1
|
||||
|
||||
if (total - 1) % 100 == 0:
|
||||
logger.debug('Storing entry: %s', total)
|
||||
|
||||
# Lookup the JSON for the image.
|
||||
uuid = image_storage.uuid
|
||||
with_locations = model.get_storage_by_uuid(uuid)
|
||||
|
||||
try:
|
||||
json_string = store.get_content(with_locations.locations, store.image_json_path(uuid))
|
||||
json_data = json.loads(json_string)
|
||||
size = json_data.get('Size', json_data.get('size', -1))
|
||||
except IOError:
|
||||
logger.debug('Image storage with no json %s', uuid)
|
||||
size = -1
|
||||
|
||||
if size == -1:
|
||||
missing += 1
|
||||
|
||||
logger.debug('Missing entry %s (%s/%s)', uuid, missing, total)
|
||||
|
||||
image_storage.uncompressed_size = size
|
||||
image_storage.save()
|
||||
CHUNK_SIZE = 512 * 1024 * 1024
|
||||
|
||||
|
||||
def backfill_sizes_from_data():
|
||||
storage_ids = list(ImageStorage
|
||||
.select(ImageStorage.uuid)
|
||||
.where(ImageStorage.uncompressed_size == -1, ImageStorage.uploading == False))
|
||||
while True:
|
||||
# Load the record from the DB.
|
||||
try:
|
||||
record = (ImageStorage
|
||||
.select(ImageStorage.uuid)
|
||||
.where(ImageStorage.uncompressed_size >> None, ImageStorage.uploading == False)
|
||||
.get())
|
||||
except ImageStorage.DoesNotExist:
|
||||
# We're done!
|
||||
return
|
||||
|
||||
counter = 0
|
||||
for uuid in [s.uuid for s in storage_ids]:
|
||||
counter += 1
|
||||
uuid = record.uuid
|
||||
|
||||
# Load the storage with locations.
|
||||
logger.debug('Loading entry: %s (%s/%s)', uuid, counter, len(storage_ids))
|
||||
with_locations = model.get_storage_by_uuid(uuid)
|
||||
layer_size = -2
|
||||
|
||||
# Read the layer from backing storage and calculate the uncompressed size.
|
||||
try:
|
||||
logger.debug('Loading data: %s (%s bytes)', uuid, with_locations.image_size)
|
||||
CHUNK_SIZE = 512 * 1024
|
||||
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
|
||||
layer_data = store.get_content(with_locations.locations, store.image_layer_path(uuid))
|
||||
tarball.write(layer_data)
|
||||
tarball.seek(0)
|
||||
logger.debug('Loading data: %s (%s bytes)', uuid, with_locations.image_size)
|
||||
decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW)
|
||||
|
||||
with GzipFile(fileobj=tarball, mode='rb') as gzip_file:
|
||||
gzip_file.read()
|
||||
layer_size = gzip_file.size
|
||||
uncompressed_size = 0
|
||||
with store.stream_read_file(with_locations.locations, store.image_layer_path(uuid)) as stream:
|
||||
while True:
|
||||
current_data = stream.read(CHUNK_SIZE)
|
||||
if len(current_data) == 0:
|
||||
break
|
||||
|
||||
except Exception as ex:
|
||||
logger.debug('Could not gunzip entry: %s. Reason: %s', uuid, ex)
|
||||
continue
|
||||
uncompressed_size += len(decompressor.decompress(current_data))
|
||||
|
||||
# Write the size to the image storage. We do so under a transaction AFTER checking to
|
||||
# make sure the image storage still exists and has not changed.
|
||||
logger.debug('Writing entry: %s. Size: %s', uuid, layer_size)
|
||||
logger.debug('Writing entry: %s. Size: %s', uuid, uncompressed_size)
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
try:
|
||||
current_record = model.get_storage_by_uuid(uuid)
|
||||
except:
|
||||
# Record no longer exists.
|
||||
except model.InvalidImageException:
|
||||
logger.warning('Storage with uuid no longer exists: %s', uuid)
|
||||
continue
|
||||
|
||||
if not current_record.uploading and current_record.uncompressed_size == -1:
|
||||
current_record.uncompressed_size = layer_size
|
||||
if not current_record.uploading and current_record.uncompressed_size == None:
|
||||
current_record.uncompressed_size = uncompressed_size
|
||||
current_record.save()
|
||||
|
||||
|
||||
|
@ -103,5 +62,4 @@ if __name__ == "__main__":
|
|||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
backfill_sizes_from_json()
|
||||
backfill_sizes_from_data()
|
||||
|
|
|
@ -9,17 +9,20 @@ import zlib
|
|||
# http://stackoverflow.com/questions/3122145/zlib-error-error-3-while-decompressing-incorrect-header-check/22310760#22310760
|
||||
ZLIB_GZIP_WINDOW = zlib.MAX_WBITS | 32
|
||||
|
||||
class SizeInfo(object):
|
||||
def __init__(self):
|
||||
self.size = 0
|
||||
|
||||
def calculate_size_handler():
|
||||
""" Returns an object and a SocketReader handler. The handler will gunzip the data it receives,
|
||||
adding the size found to the object.
|
||||
"""
|
||||
uncompressed_size_info = {
|
||||
'size': 0
|
||||
}
|
||||
|
||||
size_info = SizeInfo()
|
||||
|
||||
decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW)
|
||||
|
||||
def fn(buf):
|
||||
uncompressed_size_info['size'] += len(decompressor.decompress(buf))
|
||||
size_info.size += len(decompressor.decompress(buf))
|
||||
|
||||
return uncompressed_size_info, fn
|
||||
return size_info, fn
|
||||
|
|
|
@ -223,7 +223,6 @@ class DockerfileBuildContext(object):
|
|||
raise RuntimeError(message)
|
||||
|
||||
def pull(self):
|
||||
# Pull the image, in case it was updated since the last build
|
||||
image_and_tag_tuple = self._parsed_dockerfile.get_image_and_tag()
|
||||
if image_and_tag_tuple is None or image_and_tag_tuple[0] is None:
|
||||
self._build_logger('Missing FROM command in Dockerfile', build_logs.ERROR)
|
||||
|
|
Reference in a new issue