When the decompression ratio is huge, we may be faced with a large
(multiple megabytes) bytes object. Slicing that object incurs a copy
becomes O(n^2) while appending and trimming a bytearray is much faster.
"""
self.fileobj = fileobj
self.decompressor = decompressor
- self.buff = b""
+ self.buff = bytearray()
self.pos = 0
def _fill_buff_until(self, predicate):
break
def _read_from_buff(self, length):
- ret = self.buff[:length]
- self.buff = self.buff[length:]
+ ret = bytes(self.buff[:length])
+ self.buff[:length] = b""
self.pos += length
return ret
self.fileobj.close()
self.fileobj = None
self.decompressor = None
- self.buff = b""
+ self.buff = bytearray()
decompressors = {
'.gz': GzipDecompressor,