This repository has been archived on 2020-03-24. You can view files and clone it, but cannot push or open issues or pull requests.
quay/util/registry/filelike.py
2015-09-28 16:46:19 -04:00

124 lines
3.6 KiB
Python

WHENCE_ABSOLUTE = 0
WHENCE_RELATIVE = 1
WHENCE_RELATIVE_END = 2
READ_UNTIL_END = -1
class BaseStreamFilelike(object):
def __init__(self, fileobj):
self._fileobj = fileobj
self._cursor_position = 0
def read(self, size=READ_UNTIL_END):
buf = self._fileobj.read(size)
self._cursor_position += len(buf)
return buf
def tell(self):
return self._cursor_position
def seek(self, index, whence=WHENCE_ABSOLUTE):
num_bytes_to_ff = 0
if whence == WHENCE_ABSOLUTE:
if index < self._cursor_position:
raise IOError('Cannot seek backwards')
num_bytes_to_ff = index - self._cursor_position
elif whence == WHENCE_RELATIVE:
if index < 0:
raise IOError('Cannnot seek backwards')
num_bytes_to_ff = index
elif whence == WHENCE_RELATIVE_END:
raise IOError('Stream does not have a known end point')
while num_bytes_to_ff > 0:
buf = self._fileobj.read(num_bytes_to_ff)
if not buf:
raise IOError('Seek past end of file')
num_bytes_to_ff -= len(buf)
class SocketReader(BaseStreamFilelike):
def __init__(self, fileobj):
super(SocketReader, self).__init__(fileobj)
self.handlers = []
def add_handler(self, handler):
self.handlers.append(handler)
def read(self, size=READ_UNTIL_END):
buf = super(SocketReader, self).read(size)
for handler in self.handlers:
handler(buf)
return buf
def wrap_with_handler(in_fp, handler):
wrapper = SocketReader(in_fp)
wrapper.add_handler(handler)
return wrapper
class FilelikeStreamConcat(BaseStreamFilelike):
def __init__(self, file_generator):
super(FilelikeStreamConcat, self).__init__(self)
self._file_generator = file_generator
self._current_file = file_generator.next()
def read(self, size=READ_UNTIL_END):
buf = self._current_file.read(size)
if buf:
self._cursor_position += len(buf)
return buf
# That file was out of data, prime a new one
self._current_file.close()
try:
self._current_file = self._file_generator.next()
except StopIteration:
return ''
return self.read(size)
class LimitingStream(BaseStreamFilelike):
def __init__(self, fileobj, read_limit=READ_UNTIL_END):
super(LimitingStream, self).__init__(fileobj)
self._read_limit = read_limit
self.byte_count_read = 0
def read(self, size=READ_UNTIL_END):
max_bytes_to_read = -1
# If a read limit is specified, then determine the maximum number of bytes to return.
if self._read_limit != READ_UNTIL_END:
if size == READ_UNTIL_END:
size = self._read_limit
max_bytes_to_read = min(self._read_limit - self.byte_count_read, size)
byte_data_read = super(LimitingStream, self).read(max_bytes_to_read)
self.byte_count_read = self.byte_count_read + len(byte_data_read)
return byte_data_read
class StreamSlice(BaseStreamFilelike):
def __init__(self, fileobj, start_offset=0, end_offset_exclusive=READ_UNTIL_END):
super(StreamSlice, self).__init__(fileobj)
self._end_offset_exclusive = end_offset_exclusive
if start_offset > 0:
self.seek(start_offset)
def read(self, size=READ_UNTIL_END):
if self._end_offset_exclusive == READ_UNTIL_END:
# We weren't asked to limit the end of the stream
return super(StreamSlice, self).read(size)
# Compute the max bytes to read until the end or until we reach the user requested max
max_bytes_to_read = self._end_offset_exclusive - self.tell()
if size != READ_UNTIL_END:
max_bytes_to_read = min(max_bytes_to_read, size)
return super(StreamSlice, self).read(max_bytes_to_read)