Strip whitespace from ALL the things.

This commit is contained in:
Jimmy Zelinskie 2014-11-24 16:07:38 -05:00
parent f6dd8b0a4d
commit 716d7a737b
171 changed files with 807 additions and 807 deletions

View file

@ -11,7 +11,7 @@ def files_and_dirs_from_tar(source_stream, removed_prefix_collector):
except tarfile.ReadError:
# Empty tar file
return
for tar_info in tar_stream:
absolute = os.path.relpath(tar_info.name.decode('utf-8'), './')

View file

@ -88,4 +88,4 @@ if __name__ == '__main__':
json_data = file(sys.argv[1]).read()
fp = open(sys.argv[2])
print compute_simple(fp, json_data)
print compute_tarsum(fp, json_data)
print compute_tarsum(fp, json_data)

View file

@ -46,8 +46,8 @@ class ParsedDockerfile(object):
# Remaining cases:
# server/some/path:tag
# server:port/some/path:tag
return (':'.join(parts[0:-1]), parts[-1])
# server:port/some/path:tag
return (':'.join(parts[0:-1]), parts[-1])
def get_base_image(self):
""" Return the base image without the tag name. """

View file

@ -33,7 +33,7 @@ def _import_format_generator(namespace, repository, tag, synthetic_image_id,
# layer.tar - The TARed contents of the layer
# VERSION - The docker import version: '1.0'
layer_merger = StreamLayerMerger(get_layer_iterator)
# Yield the repositories file:
synthetic_layer_info = {}
synthetic_layer_info[tag + '.squash'] = synthetic_image_id
@ -68,7 +68,7 @@ def _import_format_generator(namespace, repository, tag, synthetic_image_id,
yielded_size += len(entry)
# If the yielded size is more than the estimated size (which is unlikely but possible), then
# raise an exception since the tar header will be wrong.
# raise an exception since the tar header will be wrong.
if yielded_size > estimated_file_size:
raise FileEstimationException()
@ -95,7 +95,7 @@ def _build_layer_json(layer_json, synthetic_image_id):
if 'parent' in updated_json:
del updated_json['parent']
if 'config' in updated_json and 'Image' in updated_json['config']:
updated_json['config']['Image'] = synthetic_image_id
@ -129,4 +129,4 @@ def _tar_file_header(name, file_size):
def _tar_folder(name):
info = tarfile.TarInfo(name=name)
info.type = tarfile.DIRTYPE
return info.tobuf()
return info.tobuf()

View file

@ -30,7 +30,7 @@ def calculate_size_handler():
# memory. As a result, we have to loop until the unconsumed tail is empty.
current_data = buf
size_info.compressed_size += len(current_data)
while len(current_data) > 0:
size_info.uncompressed_size += len(decompressor.decompress(current_data, CHUNK_SIZE))
current_data = decompressor.unconsumed_tail

View file

@ -9,7 +9,7 @@ class GzipWrap(object):
self.buffer = ''
self.zipper = GzipFile(filename, mode='wb', fileobj=self, compresslevel=compresslevel)
self.is_done = False
def read(self, size=-1):
# If the buffer already has enough bytes, then simply pop them off of
# the beginning and return them.
@ -33,7 +33,7 @@ class GzipWrap(object):
except StopIteration:
is_done = True
break
self.zipper.write(input_buffer)
if is_done:
@ -48,7 +48,7 @@ class GzipWrap(object):
def flush(self):
pass
def write(self, data):
self.buffer += data

View file

@ -17,7 +17,7 @@ def renderInvoiceToPdf(invoice, user):
pisaStatus = pisa.CreatePDF(sourceHtml, dest=output)
if pisaStatus.err:
return None
value = output.getvalue()
output.close()
return value
@ -33,7 +33,7 @@ def renderInvoiceToHtml(invoice, user):
def get_range(line):
if line.period and line.period.start and line.period.end:
return ': ' + format_date(line.period.start) + ' - ' + format_date(line.period.end)
return ': ' + format_date(line.period.start) + ' - ' + format_date(line.period.end)
return ''
def format_date(timestamp):
@ -45,8 +45,8 @@ def renderInvoiceToHtml(invoice, user):
'invoice_date': format_date(invoice.date),
'getPrice': get_price,
'getRange': get_range
}
}
template = env.get_template('invoice.tmpl')
rendered = template.render(data)
return rendered

View file

@ -9,4 +9,4 @@ class AttrDict(dict):
for key, value in copy.items():
if isinstance(value, AttrDict):
copy[key] = cls.deep_copy(value)
return copy
return copy

View file

@ -38,7 +38,7 @@ class QueueFile(object):
for handler in self._exception_handlers:
handler(result)
handled = True
if handled:
return
@ -46,7 +46,7 @@ class QueueFile(object):
self._buffer += result
self._total_size += len(result)
buf = self._buffer[0:size]
self._buffer = self._buffer[size:]
return buf

View file

@ -24,7 +24,7 @@ class QueueingCloudWatchReporter(object):
def report(self, currently_processing, running_count, total_count):
logger.debug('Worker indicated %s running count and %s total count', running_count,
total_count)
need_capacity_count = total_count - running_count
self._send_to_queue(self._namespace, self._need_capacity_name, need_capacity_count,
unit='Count')

View file

@ -61,11 +61,11 @@ def _run(get_producer, queues, chunk_size, args):
except Exception as ex:
logger.exception('Exception writing to queue.')
return
if data is None or isinstance(data, Exception):
break
# Important! This allows the thread that writes the queue data to the pipe
# to do so. Otherwise, this hangs.
time.sleep(0)

View file

@ -2,21 +2,21 @@
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Django nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
@ -264,4 +264,4 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separ
if markers is not None:
del markers[markerid]
return _iterencode
return _iterencode

View file

@ -24,7 +24,7 @@ class StreamLayerMerger(TarLayerFormat):
# Update the tries.
self.path_trie = marisa_trie.Trie(self.path_encountered)
self.prefix_trie = marisa_trie.Trie(self.prefix_encountered)
def check_tar_info(self, tar_info):
absolute = os.path.relpath(tar_info.name.decode('utf-8'), './')
@ -33,7 +33,7 @@ class StreamLayerMerger(TarLayerFormat):
return False
# Add any prefix of deleted paths to the prefix list.
deleted_prefix = get_deleted_prefix(absolute)
deleted_prefix = get_deleted_prefix(absolute)
if deleted_prefix is not None:
self.prefix_encountered.append(deleted_prefix)
return False

View file

@ -10,7 +10,7 @@ class TarfileAppender(TarLayerFormat):
in a streaming manner.
"""
def __init__(self, base_tar_file, entries):
super(TarfileAppender, self).__init__(self._get_tar_iterator)
super(TarfileAppender, self).__init__(self._get_tar_iterator)
self.entries = entries
self.base_tar_file = base_tar_file
self.first_info = None
@ -20,7 +20,7 @@ class TarfileAppender(TarLayerFormat):
def after_tar_layer(self, current_layer):
pass
def check_tar_info(self, tar_info):
if not self.first_info:
self.first_info = tar_info
@ -53,4 +53,4 @@ class TarfileAppender(TarLayerFormat):
# To make tarfile happy.
append_tarball.seek(0)
yield append_tarball
yield append_tarball

View file

@ -4,7 +4,7 @@ import tarfile
class TarLayerReadException(Exception):
""" Exception raised when reading a layer has failed. """
pass
class TarLayerFormat(object):
""" Class which creates a generator of the combined TAR data. """
@ -62,7 +62,7 @@ class TarLayerFormat(object):
# Conduct any post-tar work.
self.after_tar_layer(current_tar)
# Last two records are empty in TAR spec.
yield '\0' * 512
yield '\0' * 512

View file

@ -25,7 +25,7 @@ def send_email(recipient, subject, template_file, parameters):
return '<a href="%s">%s</a>' % (real_url, title)
parameters.update({
parameters.update({
'subject': subject,
'app_logo': 'https://quay.io/static/img/quay-logo.png', # TODO: make this pull from config
'app_url': app_url,
@ -53,7 +53,7 @@ def send_email_changed(username, old_email, new_email):
send_email(old_email, 'Account e-mail address changed', 'emailchanged', {
'username': username,
'new_email': new_email
})
})
def send_change_email(username, email, token):
send_email(email, 'E-mail address change requested', 'changeemail', {
@ -114,7 +114,7 @@ def send_subscription_change(change_description, customer_id, customer_email, qu
Customer email: <a href="mailto:{2}">{2}</a><br>
Quay user or org name: {3}<br>
"""
title = SUBSCRIPTION_CHANGE_TITLE.format(quay_username, change_description)
msg = Message(title, recipients=['stripe@quay.io'])
msg.html = SUBSCRIPTION_CHANGE.format(change_description, customer_id, customer_email,