Compressed Macintosh resource data, Apple `'dcmp' (2)` format: Python (read-write) parsing library

Compressed resource data in 'dcmp' (2) format, as stored in compressed resources with header type 9 and decompressor ID 2.

The 'dcmp' (2) decompressor resource is included in the System file of System 7.0 and later. This compression format is used for a few compressed resources in System 7.0's files (such as the System file). This decompressor is also included with and used by some other Apple applications, such as ResEdit. (Note: ResEdit includes the 'dcmp' (2) resource, but none of its resources actually use this decompressor.)

This compression format is based on simple dictionary coding, where each byte in the compressed data expands to two bytes, based on a lookup table (either included in the compressed data or provided by the decompressor). An alternative "tagged" compression format is also supported, which allows using two-byte literals in addition to single-byte table references, at the cost of requiring an extra "tag" byte every 16 output bytes, to differentiate literals and table references.

Application

Mac OS

KS implementation details

License: MIT
Minimal Kaitai Struct required: 0.8

This page hosts a formal specification of Compressed Macintosh resource data, Apple `'dcmp' (2)` format using Kaitai Struct. This specification can be automatically translated into a variety of programming languages to get a parsing library.

Python (read-write) source code to parse Compressed Macintosh resource data, Apple `'dcmp' (2)` format

dcmp_2.py

# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
# type: ignore

import kaitaistruct
from kaitaistruct import ReadWriteKaitaiStruct, KaitaiStream, BytesIO
import bytes_with_io


if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 11):
    raise Exception("Incompatible Kaitai Struct Python API: 0.11 or later is required, but you have %s" % (kaitaistruct.__version__))

class Dcmp2(ReadWriteKaitaiStruct):
    """Compressed resource data in `'dcmp' (2)` format,
    as stored in compressed resources with header type `9` and decompressor ID `2`.
    
    The `'dcmp' (2)` decompressor resource is included in the System file of System 7.0 and later.
    This compression format is used for a few compressed resources in System 7.0's files
    (such as the System file).
    This decompressor is also included with and used by some other Apple applications,
    such as ResEdit.
    (Note: ResEdit includes the `'dcmp' (2)` resource,
    but none of its resources actually use this decompressor.)
    
    This compression format is based on simple dictionary coding,
    where each byte in the compressed data expands to two bytes,
    based on a lookup table
    (either included in the compressed data or provided by the decompressor).
    An alternative "tagged" compression format is also supported,
    which allows using two-byte literals in addition to single-byte table references,
    at the cost of requiring an extra "tag" byte every 16 output bytes,
    to differentiate literals and table references.
    
    .. seealso::
       Source - https://github.com/dgelessus/python-rsrcfork/blob/f891a6e/src/rsrcfork/compress/dcmp2.py
    """
    def __init__(self, len_decompressed, header_parameters_with_io, _io=None, _parent=None, _root=None):
        super(Dcmp2, self).__init__(_io)
        self._parent = _parent
        self._root = _root or self
        self.len_decompressed = len_decompressed
        self.header_parameters_with_io = header_parameters_with_io
        self._should_write_header_parameters = False
        self.header_parameters__enabled = True

    def _read(self):
        if self.header_parameters.flags.has_custom_lookup_table:
            pass
            self.custom_lookup_table = []
            for i in range(self.header_parameters.num_custom_lookup_table_entries):
                self.custom_lookup_table.append(self._io.read_bytes(2))


        _on = self.header_parameters.flags.tagged
        if _on == True:
            pass
            self._raw_data = self._io.read_bytes((self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0))
            _io__raw_data = KaitaiStream(BytesIO(self._raw_data))
            self.data = Dcmp2.TaggedData(_io__raw_data, self, self._root)
            self.data._read()
        else:
            pass
            self._raw_data = self._io.read_bytes((self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0))
            _io__raw_data = KaitaiStream(BytesIO(self._raw_data))
            self.data = Dcmp2.UntaggedData(_io__raw_data, self, self._root)
            self.data._read()
        if self.is_len_decompressed_odd:
            pass
            self.last_byte = self._io.read_bytes(1)

        self._dirty = False


    def _fetch_instances(self):
        pass
        if self.header_parameters.flags.has_custom_lookup_table:
            pass
            for i in range(len(self.custom_lookup_table)):
                pass


        _on = self.header_parameters.flags.tagged
        if _on == True:
            pass
            self.data._fetch_instances()
        else:
            pass
            self.data._fetch_instances()
        if self.is_len_decompressed_odd:
            pass

        _ = self.header_parameters
        if hasattr(self, '_m_header_parameters'):
            pass
            self._m_header_parameters._fetch_instances()



    def _write__seq(self, io=None):
        super(Dcmp2, self)._write__seq(io)
        self._should_write_header_parameters = self.header_parameters__enabled
        if self.header_parameters.flags.has_custom_lookup_table:
            pass
            if len(self.custom_lookup_table) != self.header_parameters.num_custom_lookup_table_entries:
                raise kaitaistruct.ConsistencyError(u"custom_lookup_table", self.header_parameters.num_custom_lookup_table_entries, len(self.custom_lookup_table))
            for i in range(len(self.custom_lookup_table)):
                pass
                if len(self.custom_lookup_table[i]) != 2:
                    raise kaitaistruct.ConsistencyError(u"custom_lookup_table", 2, len(self.custom_lookup_table[i]))
                self._io.write_bytes(self.custom_lookup_table[i])


        _on = self.header_parameters.flags.tagged
        if _on == True:
            pass
            if self.data._root != self._root:
                raise kaitaistruct.ConsistencyError(u"data", self._root, self.data._root)
            if self.data._parent != self:
                raise kaitaistruct.ConsistencyError(u"data", self, self.data._parent)
            _io__raw_data = KaitaiStream(BytesIO(bytearray((self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0))))
            self._io.add_child_stream(_io__raw_data)
            _pos2 = self._io.pos()
            self._io.seek(self._io.pos() + ((self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0)))
            def handler(parent, _io__raw_data=_io__raw_data):
                self._raw_data = _io__raw_data.to_byte_array()
                if len(self._raw_data) != (self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0):
                    raise kaitaistruct.ConsistencyError(u"raw(data)", (self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0), len(self._raw_data))
                parent.write_bytes(self._raw_data)
            _io__raw_data.write_back_handler = KaitaiStream.WriteBackHandler(_pos2, handler)
            self.data._write__seq(_io__raw_data)
        else:
            pass
            if self.data._root != self._root:
                raise kaitaistruct.ConsistencyError(u"data", self._root, self.data._root)
            if self.data._parent != self:
                raise kaitaistruct.ConsistencyError(u"data", self, self.data._parent)
            _io__raw_data = KaitaiStream(BytesIO(bytearray((self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0))))
            self._io.add_child_stream(_io__raw_data)
            _pos2 = self._io.pos()
            self._io.seek(self._io.pos() + ((self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0)))
            def handler(parent, _io__raw_data=_io__raw_data):
                self._raw_data = _io__raw_data.to_byte_array()
                if len(self._raw_data) != (self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0):
                    raise kaitaistruct.ConsistencyError(u"raw(data)", (self._io.size() - self._io.pos()) - (1 if self.is_len_decompressed_odd else 0), len(self._raw_data))
                parent.write_bytes(self._raw_data)
            _io__raw_data.write_back_handler = KaitaiStream.WriteBackHandler(_pos2, handler)
            self.data._write__seq(_io__raw_data)
        if self.is_len_decompressed_odd:
            pass
            self._io.write_bytes(self.last_byte)



    def _check(self):
        if self.is_len_decompressed_odd:
            pass
            if len(self.last_byte) != 1:
                raise kaitaistruct.ConsistencyError(u"last_byte", 1, len(self.last_byte))

        if self.header_parameters__enabled:
            pass
            if self._m_header_parameters._root != self._root:
                raise kaitaistruct.ConsistencyError(u"header_parameters", self._root, self._m_header_parameters._root)
            if self._m_header_parameters._parent != self:
                raise kaitaistruct.ConsistencyError(u"header_parameters", self, self._m_header_parameters._parent)

        self._dirty = False

    class HeaderParameters(ReadWriteKaitaiStruct):
        """Decompressor-specific parameters for this compression format,
        as stored in the compressed resource header.
        """
        def __init__(self, _io=None, _parent=None, _root=None):
            super(Dcmp2.HeaderParameters, self).__init__(_io)
            self._parent = _parent
            self._root = _root

        def _read(self):
            self.unknown = self._io.read_u2be()
            self.num_custom_lookup_table_entries_m1 = self._io.read_u1()
            self.flags = Dcmp2.HeaderParameters.Flags(self._io, self, self._root)
            self.flags._read()
            self._dirty = False


        def _fetch_instances(self):
            pass
            self.flags._fetch_instances()


        def _write__seq(self, io=None):
            super(Dcmp2.HeaderParameters, self)._write__seq(io)
            self._io.write_u2be(self.unknown)
            self._io.write_u1(self.num_custom_lookup_table_entries_m1)
            self.flags._write__seq(self._io)


        def _check(self):
            if self.flags._root != self._root:
                raise kaitaistruct.ConsistencyError(u"flags", self._root, self.flags._root)
            if self.flags._parent != self:
                raise kaitaistruct.ConsistencyError(u"flags", self, self.flags._parent)
            self._dirty = False

        class Flags(ReadWriteKaitaiStruct):
            """Flags for the decompressor,
            as stored in the decompressor-specific parameters.
            """
            def __init__(self, _io=None, _parent=None, _root=None):
                super(Dcmp2.HeaderParameters.Flags, self).__init__(_io)
                self._parent = _parent
                self._root = _root
                self._should_write_as_int = False
                self.as_int__enabled = True

            def _read(self):
                self.reserved = self._io.read_bits_int_be(6)
                self.tagged = self._io.read_bits_int_be(1) != 0
                self.has_custom_lookup_table = self._io.read_bits_int_be(1) != 0
                self._dirty = False


            def _fetch_instances(self):
                pass
                _ = self.as_int
                if hasattr(self, '_m_as_int'):
                    pass



            def _write__seq(self, io=None):
                super(Dcmp2.HeaderParameters.Flags, self)._write__seq(io)
                self._should_write_as_int = self.as_int__enabled
                self._io.write_bits_int_be(6, self.reserved)
                self._io.write_bits_int_be(1, int(self.tagged))
                self._io.write_bits_int_be(1, int(self.has_custom_lookup_table))


            def _check(self):
                if self.as_int__enabled:
                    pass

                self._dirty = False

            @property
            def as_int(self):
                """The flags as a packed integer,
                as they are stored in the data.
                """
                if self._should_write_as_int:
                    self._write_as_int()
                if hasattr(self, '_m_as_int'):
                    return self._m_as_int

                if not self.as_int__enabled:
                    return None

                _pos = self._io.pos()
                self._io.seek(0)
                self._m_as_int = self._io.read_u1()
                self._io.seek(_pos)
                return getattr(self, '_m_as_int', None)

            @as_int.setter
            def as_int(self, v):
                self._dirty = True
                self._m_as_int = v

            def _write_as_int(self):
                self._should_write_as_int = False
                _pos = self._io.pos()
                self._io.seek(0)
                self._io.write_u1(self._m_as_int)
                self._io.seek(_pos)


        @property
        def num_custom_lookup_table_entries(self):
            """The number of entries in the custom lookup table.
            Only used if a custom lookup table is present.
            """
            if hasattr(self, '_m_num_custom_lookup_table_entries'):
                return self._m_num_custom_lookup_table_entries

            if self.flags.has_custom_lookup_table:
                pass
                self._m_num_custom_lookup_table_entries = self.num_custom_lookup_table_entries_m1 + 1

            return getattr(self, '_m_num_custom_lookup_table_entries', None)

        def _invalidate_num_custom_lookup_table_entries(self):
            del self._m_num_custom_lookup_table_entries

    class TaggedData(ReadWriteKaitaiStruct):
        """Compressed data in the "tagged" variant of the format.
        """
        def __init__(self, _io=None, _parent=None, _root=None):
            super(Dcmp2.TaggedData, self).__init__(_io)
            self._parent = _parent
            self._root = _root

        def _read(self):
            self.chunks = []
            i = 0
            while not self._io.is_eof():
                _t_chunks = Dcmp2.TaggedData.Chunk(self._io, self, self._root)
                try:
                    _t_chunks._read()
                finally:
                    self.chunks.append(_t_chunks)
                i += 1

            self._dirty = False


        def _fetch_instances(self):
            pass
            for i in range(len(self.chunks)):
                pass
                self.chunks[i]._fetch_instances()



        def _write__seq(self, io=None):
            super(Dcmp2.TaggedData, self)._write__seq(io)
            for i in range(len(self.chunks)):
                pass
                if self._io.is_eof():
                    raise kaitaistruct.ConsistencyError(u"chunks", 0, self._io.size() - self._io.pos())
                self.chunks[i]._write__seq(self._io)

            if not self._io.is_eof():
                raise kaitaistruct.ConsistencyError(u"chunks", 0, self._io.size() - self._io.pos())


        def _check(self):
            for i in range(len(self.chunks)):
                pass
                if self.chunks[i]._root != self._root:
                    raise kaitaistruct.ConsistencyError(u"chunks", self._root, self.chunks[i]._root)
                if self.chunks[i]._parent != self:
                    raise kaitaistruct.ConsistencyError(u"chunks", self, self.chunks[i]._parent)

            self._dirty = False

        class Chunk(ReadWriteKaitaiStruct):
            """A single tagged chunk of compressed data.
            
            Each chunk expands to 16 bytes of decompressed data.
            In compressed form,
            the chunks have a variable length
            (between 9 and 17 bytes)
            depending on the value of the tag byte.
            """
            def __init__(self, _io=None, _parent=None, _root=None):
                super(Dcmp2.TaggedData.Chunk, self).__init__(_io)
                self._parent = _parent
                self._root = _root

            def _read(self):
                self.tag = []
                for i in range(8):
                    self.tag.append(self._io.read_bits_int_be(1) != 0)

                self.units = []
                i = 0
                while True:
                    _on = self.tag[i]
                    if _on == True:
                        pass
                        _ = self._io.read_u1()
                        self.units.append(_)
                    else:
                        pass
                        _ = self._io.read_bytes((1 if self.tag[i] else 2))
                        self.units.append(_)
                    if  ((i >= 7) or (self._io.is_eof())) :
                        break
                    i += 1
                self._dirty = False


            def _fetch_instances(self):
                pass
                for i in range(len(self.tag)):
                    pass

                for i in range(len(self.units)):
                    pass
                    _on = self.tag[i]
                    if _on == True:
                        pass
                    else:
                        pass



            def _write__seq(self, io=None):
                super(Dcmp2.TaggedData.Chunk, self)._write__seq(io)
                for i in range(len(self.tag)):
                    pass
                    self._io.write_bits_int_be(1, int(self.tag[i]))

                for i in range(len(self.units)):
                    pass
                    _on = self.tag[i]
                    if _on == True:
                        pass
                        self._io.write_u1(self.units[i])
                    else:
                        pass
                        self._io.write_bytes(self.units[i])
                    _ = self.units[i]
                    if  ((i >= 7) or (self._io.is_eof()))  != (i == len(self.units) - 1):
                        raise kaitaistruct.ConsistencyError(u"units", i == len(self.units) - 1,  ((i >= 7) or (self._io.is_eof())) )



            def _check(self):
                if len(self.tag) != 8:
                    raise kaitaistruct.ConsistencyError(u"tag", 8, len(self.tag))
                for i in range(len(self.tag)):
                    pass

                if len(self.units) == 0:
                    raise kaitaistruct.ConsistencyError(u"units", 0, len(self.units))
                for i in range(len(self.units)):
                    pass
                    _on = self.tag[i]
                    if _on == True:
                        pass
                    else:
                        pass
                        if len(self.units[i]) != (1 if self.tag[i] else 2):
                            raise kaitaistruct.ConsistencyError(u"units", (1 if self.tag[i] else 2), len(self.units[i]))

                self._dirty = False



    class UntaggedData(ReadWriteKaitaiStruct):
        """Compressed data in the "untagged" variant of the format.
        """
        def __init__(self, _io=None, _parent=None, _root=None):
            super(Dcmp2.UntaggedData, self).__init__(_io)
            self._parent = _parent
            self._root = _root

        def _read(self):
            self.table_references = []
            i = 0
            while not self._io.is_eof():
                self.table_references.append(self._io.read_u1())
                i += 1

            self._dirty = False


        def _fetch_instances(self):
            pass
            for i in range(len(self.table_references)):
                pass



        def _write__seq(self, io=None):
            super(Dcmp2.UntaggedData, self)._write__seq(io)
            for i in range(len(self.table_references)):
                pass
                if self._io.is_eof():
                    raise kaitaistruct.ConsistencyError(u"table_references", 0, self._io.size() - self._io.pos())
                self._io.write_u1(self.table_references[i])

            if not self._io.is_eof():
                raise kaitaistruct.ConsistencyError(u"table_references", 0, self._io.size() - self._io.pos())


        def _check(self):
            for i in range(len(self.table_references)):
                pass

            self._dirty = False


    @property
    def default_lookup_table(self):
        """The default lookup table,
        which is used if no custom lookup table is included with the compressed data.
        """
        if hasattr(self, '_m_default_lookup_table'):
            return self._m_default_lookup_table

        self._m_default_lookup_table = [b"\x00\x00", b"\x00\x08", b"\x4E\xBA", b"\x20\x6E", b"\x4E\x75", b"\x00\x0C", b"\x00\x04", b"\x70\x00", b"\x00\x10", b"\x00\x02", b"\x48\x6E", b"\xFF\xFC", b"\x60\x00", b"\x00\x01", b"\x48\xE7", b"\x2F\x2E", b"\x4E\x56", b"\x00\x06", b"\x4E\x5E", b"\x2F\x00", b"\x61\x00", b"\xFF\xF8", b"\x2F\x0B", b"\xFF\xFF", b"\x00\x14", b"\x00\x0A", b"\x00\x18", b"\x20\x5F", b"\x00\x0E", b"\x20\x50", b"\x3F\x3C", b"\xFF\xF4", b"\x4C\xEE", b"\x30\x2E", b"\x67\x00", b"\x4C\xDF", b"\x26\x6E", b"\x00\x12", b"\x00\x1C", b"\x42\x67", b"\xFF\xF0", b"\x30\x3C", b"\x2F\x0C", b"\x00\x03", b"\x4E\xD0", b"\x00\x20", b"\x70\x01", b"\x00\x16", b"\x2D\x40", b"\x48\xC0", b"\x20\x78", b"\x72\x00", b"\x58\x8F", b"\x66\x00", b"\x4F\xEF", b"\x42\xA7", b"\x67\x06", b"\xFF\xFA", b"\x55\x8F", b"\x28\x6E", b"\x3F\x00", b"\xFF\xFE", b"\x2F\x3C", b"\x67\x04", b"\x59\x8F", b"\x20\x6B", b"\x00\x24", b"\x20\x1F", b"\x41\xFA", b"\x81\xE1", b"\x66\x04", b"\x67\x08", b"\x00\x1A", b"\x4E\xB9", b"\x50\x8F", b"\x20\x2E", b"\x00\x07", b"\x4E\xB0", b"\xFF\xF2", b"\x3D\x40", b"\x00\x1E", b"\x20\x68", b"\x66\x06", b"\xFF\xF6", b"\x4E\xF9", b"\x08\x00", b"\x0C\x40", b"\x3D\x7C", b"\xFF\xEC", b"\x00\x05", b"\x20\x3C", b"\xFF\xE8", b"\xDE\xFC", b"\x4A\x2E", b"\x00\x30", b"\x00\x28", b"\x2F\x08", b"\x20\x0B", b"\x60\x02", b"\x42\x6E", b"\x2D\x48", b"\x20\x53", b"\x20\x40", b"\x18\x00", b"\x60\x04", b"\x41\xEE", b"\x2F\x28", b"\x2F\x01", b"\x67\x0A", b"\x48\x40", b"\x20\x07", b"\x66\x08", b"\x01\x18", b"\x2F\x07", b"\x30\x28", b"\x3F\x2E", b"\x30\x2B", b"\x22\x6E", b"\x2F\x2B", b"\x00\x2C", b"\x67\x0C", b"\x22\x5F", b"\x60\x06", b"\x00\xFF", b"\x30\x07", b"\xFF\xEE", b"\x53\x40", b"\x00\x40", b"\xFF\xE4", b"\x4A\x40", b"\x66\x0A", b"\x00\x0F", b"\x4E\xAD", b"\x70\xFF", b"\x22\xD8", b"\x48\x6B", b"\x00\x22", b"\x20\x4B", b"\x67\x0E", b"\x4A\xAE", b"\x4E\x90", b"\xFF\xE0", b"\xFF\xC0", b"\x00\x2A", b"\x27\x40", b"\x67\x02", b"\x51\xC8", b"\x02\xB6", b"\x48\x7A", b"\x22\x78", b"\xB0\x6E", b"\xFF\xE6", b"\x00\x09", b"\x32\x2E", b"\x3E\x00", b"\x48\x41", b"\xFF\xEA", b"\x43\xEE", b"\x4E\x71", b"\x74\x00", b"\x2F\x2C", b"\x20\x6C", b"\x00\x3C", b"\x00\x26", b"\x00\x50", b"\x18\x80", b"\x30\x1F", b"\x22\x00", b"\x66\x0C", b"\xFF\xDA", b"\x00\x38", b"\x66\x02", b"\x30\x2C", b"\x20\x0C", b"\x2D\x6E", b"\x42\x40", b"\xFF\xE2", b"\xA9\xF0", b"\xFF\x00", b"\x37\x7C", b"\xE5\x80", b"\xFF\xDC", b"\x48\x68", b"\x59\x4F", b"\x00\x34", b"\x3E\x1F", b"\x60\x08", b"\x2F\x06", b"\xFF\xDE", b"\x60\x0A", b"\x70\x02", b"\x00\x32", b"\xFF\xCC", b"\x00\x80", b"\x22\x51", b"\x10\x1F", b"\x31\x7C", b"\xA0\x29", b"\xFF\xD8", b"\x52\x40", b"\x01\x00", b"\x67\x10", b"\xA0\x23", b"\xFF\xCE", b"\xFF\xD4", b"\x20\x06", b"\x48\x78", b"\x00\x2E", b"\x50\x4F", b"\x43\xFA", b"\x67\x12", b"\x76\x00", b"\x41\xE8", b"\x4A\x6E", b"\x20\xD9", b"\x00\x5A", b"\x7F\xFF", b"\x51\xCA", b"\x00\x5C", b"\x2E\x00", b"\x02\x40", b"\x48\xC7", b"\x67\x14", b"\x0C\x80", b"\x2E\x9F", b"\xFF\xD6", b"\x80\x00", b"\x10\x00", b"\x48\x42", b"\x4A\x6B", b"\xFF\xD2", b"\x00\x48", b"\x4A\x47", b"\x4E\xD1", b"\x20\x6F", b"\x00\x41", b"\x60\x0C", b"\x2A\x78", b"\x42\x2E", b"\x32\x00", b"\x65\x74", b"\x67\x16", b"\x00\x44", b"\x48\x6D", b"\x20\x08", b"\x48\x6C", b"\x0B\x7C", b"\x26\x40", b"\x04\x00", b"\x00\x68", b"\x20\x6D", b"\x00\x0D", b"\x2A\x40", b"\x00\x0B", b"\x00\x3E", b"\x02\x20"]
        return getattr(self, '_m_default_lookup_table', None)

    def _invalidate_default_lookup_table(self):
        del self._m_default_lookup_table
    @property
    def header_parameters(self):
        """The parsed decompressor-specific parameters from the compressed resource header.
        """
        if self._should_write_header_parameters:
            self._write_header_parameters()
        if hasattr(self, '_m_header_parameters'):
            return self._m_header_parameters

        if not self.header_parameters__enabled:
            return None

        io = self.header_parameters_with_io._io
        _pos = io.pos()
        io.seek(0)
        self._m_header_parameters = Dcmp2.HeaderParameters(io, self, self._root)
        self._m_header_parameters._read()
        io.seek(_pos)
        return getattr(self, '_m_header_parameters', None)

    @header_parameters.setter
    def header_parameters(self, v):
        self._dirty = True
        self._m_header_parameters = v

    def _write_header_parameters(self):
        self._should_write_header_parameters = False
        io = self.header_parameters_with_io._io
        _pos = io.pos()
        io.seek(0)
        self._m_header_parameters._write__seq(io)
        io.seek(_pos)

    @property
    def is_len_decompressed_odd(self):
        """Whether the length of the decompressed data is odd.
        This affects the meaning of the last byte of the compressed data.
        """
        if hasattr(self, '_m_is_len_decompressed_odd'):
            return self._m_is_len_decompressed_odd

        self._m_is_len_decompressed_odd = self.len_decompressed % 2 != 0
        return getattr(self, '_m_is_len_decompressed_odd', None)

    def _invalidate_is_len_decompressed_odd(self):
        del self._m_is_len_decompressed_odd
    @property
    def lookup_table(self):
        """The lookup table to be used for this compressed data.
        """
        if hasattr(self, '_m_lookup_table'):
            return self._m_lookup_table

        self._m_lookup_table = (self.custom_lookup_table if self.header_parameters.flags.has_custom_lookup_table else self.default_lookup_table)
        return getattr(self, '_m_lookup_table', None)

    def _invalidate_lookup_table(self):
        del self._m_lookup_table