Skip to content
Browse files

Oops, remove .bak files again

  • Loading branch information...
1 parent 175d340 commit c46f66fedffb9188a7323194f63fc2ebc6484d89 Blixt committed Feb 28, 2014
Showing with 0 additions and 691 deletions.
  1. +0 −7 starbound/__init__.py.bak
  2. +0 −231 starbound/btreedb4.py.bak
  3. +0 −198 starbound/helpers.py.bak
  4. +0 −96 starbound/sbbf02.py.bak
  5. +0 −139 starbound/sbon.py.bak
  6. +0 −20 starbound/sbvj01.py.bak
View
7 starbound/__init__.py.bak
@@ -1,7 +0,0 @@
-from btreedb4 import FileBTreeDB4
-from sbbf02 import FileSBBF02
-from sbvj01 import FileSBVJ01
-
-import sbon
-
-from helpers import open as open_file, KeyStore, Package, Player, World
View
231 starbound/btreedb4.py.bak
@@ -1,231 +0,0 @@
-import binascii
-import bisect
-import io
-import struct
-
-import sbbf02
-import sbon
-
-
-class FileBTreeDB4(sbbf02.FileSBBF02):
- """A B-tree database format on top of the SBBF02 block format.
-
- Note: The developers of this format probably intended for the underlying
- file format to be arbitrary, but this database has pretty strong
- connections to SBBF02 right now so it's been implemented as inheriting from
- that file format. In the future we may want to split away from the
- inheritance chain and instead use the SBBF02 file as an API.
-
- """
- def __init__(self, path):
- super(FileBTreeDB4, self).__init__(path)
-
- self.key_size = None
-
- # Set this attribute to True to make reading more forgiving.
- self.repair = False
-
- self.alternate_root_node = None
- self.root_node = None
- self.root_node_is_leaf = None
-
- def deserialize_data(self, data):
- """Can be overridden to deserialize data before returning it.
-
- """
- return data
-
- def encode_key(self, key):
- """Can be overridden to encode a key before looking for it in the
- database (for example if the key needs to be hashed).
-
- """
- return key
-
- def get(self, key):
- """Returns the deserialized data for the provided key.
-
- """
- encoded_key = self.encode_key(key)
- try:
- return self.deserialize_data(self.get_binary(encoded_key))
- except KeyError:
- if encoded_key == key:
- raise KeyError(binascii.hexlify(key))
- else:
- raise KeyError(key, binascii.hexlify(encoded_key))
-
- def get_binary(self, key):
- """Returns the binary data for the provided pre-encoded key.
-
- """
- assert self.is_open(), 'Tried to get from closed file'
- assert len(key) == self.key_size, 'Invalid key length'
-
- block = self.get_block(self.root_node)
-
- # Scan down the B-tree until we reach a leaf.
- while isinstance(block, BTreeIndex):
- block_number = block.get_block_for_key(key)
- block = self.get_block(block_number)
- assert isinstance(block, BTreeLeaf), 'Did not reach a leaf'
-
- return self.get_leaf_value(block, key)
-
- def get_leaf_value(self, leaf, key):
- stream = LeafReader(self, leaf)
-
- # The number of keys is read on-demand because only leaves pointed to
- # by an index contain this number (others just contain arbitrary data).
- num_keys, = struct.unpack('>i', stream.read(4))
- for i in xrange(num_keys):
- cur_key = stream.read(self.key_size)
- value = sbon.read_bytes(stream)
-
- if cur_key == key:
- return value
-
- raise KeyError(key)
-
- def get_raw(self, key):
- """Returns the raw data for the provided key.
-
- """
- return self.get_binary(self.encode_key(key))
-
- def get_using_encoded_key(self, key):
- """Returns the deserialized data for the provided pre-encoded key.
-
- """
- return self.deserialize_data(self.get_binary(key))
-
- def open(self):
- super(FileBTreeDB4, self).open()
- stream = self.get_user_header()
-
- # Require that the format of the content is BTreeDB4.
- db_format = sbon.read_fixlen_string(stream, 12)
- assert db_format == 'BTreeDB4', 'Expected binary tree database'
-
- # Name of the database.
- self.identifier = sbon.read_fixlen_string(stream, 12)
-
- fields = struct.unpack('>i?xi?xxxi?', stream.read(19))
- self.key_size = fields[0]
-
- # Whether to use the alternate root node index.
- self.alternate_root_node = fields[1]
- if self.alternate_root_node:
- self.root_node, self.root_node_is_leaf = fields[4:6]
- else:
- self.root_node, self.root_node_is_leaf = fields[2:4]
-
-
-class BTreeIndex(sbbf02.Block):
- SIGNATURE = 'II'
-
- __slots__ = ['keys', 'level', 'num_keys', 'values']
-
- def __init__(self, file):
- self.level, self.num_keys, left_block = struct.unpack('>Bii', file.read(9))
-
- self.keys = []
- self.values = [left_block]
-
- for i in xrange(self.num_keys):
- key = file.read(file.key_size)
- block, = struct.unpack('>i', file.read(4))
-
- self.keys.append(key)
- self.values.append(block)
-
- def __str__(self):
- return 'Index(level={}, num_keys={})'.format(self.level, self.num_keys)
-
- def get_block_for_key(self, key):
- i = bisect.bisect_right(self.keys, key)
- return self.values[i]
-
-
-class BTreeLeaf(sbbf02.Block):
- SIGNATURE = 'LL'
-
- __slots__ = ['data', 'next_block']
-
- def __init__(self, file):
- # Substract 6 for signature and next_block.
- self.data = file.read(file.block_size - 6)
-
- value, = struct.unpack('>i', file.read(4))
- self.next_block = value if value != -1 else None
-
- def __str__(self):
- return 'Leaf(next_block={})'.format(self.next_block)
-
-
-class BTreeRestoredLeaf(BTreeLeaf):
- def __init__(self, free_block):
- assert isinstance(free_block, sbbf02.BlockFree), 'Expected free block'
- self.data = free_block.raw_data[:-4]
-
- value, = struct.unpack('>i', free_block.raw_data[-4:])
- self.next_block = value if value != -1 else None
-
- def __str__(self):
- return 'RestoredLeaf(next_block={})'.format(self.next_block)
-
-
-class LeafReader(object):
- """A pseudo-reader that will cross over block boundaries if necessary.
-
- """
- __slots__ = ['_file', '_leaf', '_offset']
-
- def __init__(self, file, leaf):
- assert isinstance(file, FileBTreeDB4), 'File is not a FileBTreeDB4 instance'
- assert isinstance(leaf, BTreeLeaf), 'Leaf is not a BTreeLeaf instance'
-
- self._file = file
- self._leaf = leaf
- self._offset = 0
-
- def read(self, length):
- offset = self._offset
-
- if offset + length <= len(self._leaf.data):
- self._offset += length
- return self._leaf.data[offset:offset + length]
-
- buffer = io.BytesIO()
-
- # If the file is in repair mode, make the buffer available globally.
- if self._file.repair:
- LeafReader.last_buffer = buffer
-
- # Exhaust current leaf.
- num_read = buffer.write(self._leaf.data[offset:])
- length -= num_read
-
- # Keep moving onto the next leaf until we have read the desired amount.
- while length > 0:
- assert self._leaf.next_block is not None, 'Tried to read too far'
-
- next_block = self._leaf.next_block
- self._leaf = self._file.get_block(next_block)
- if self._file.repair and isinstance(self._leaf, sbbf02.BlockFree):
- self._leaf = BTreeRestoredLeaf(self._leaf)
-
- assert isinstance(self._leaf, BTreeLeaf), \
- 'Leaf pointed to non-leaf %s after reading %d byte(s)' % (
- next_block, buffer.tell())
-
- num_read = buffer.write(self._leaf.data[:length])
- length -= num_read
-
- # The new offset will be how much was read from the current leaf.
- self._offset = num_read
-
- data = buffer.getvalue()
- buffer.close()
-
- return data
View
198 starbound/helpers.py.bak
@@ -1,198 +0,0 @@
-import hashlib
-import binascii
-import io
-import os
-import struct
-import zlib
-
-import btreedb4
-import sbvj01
-import sbon
-
-
-class KeyStore(btreedb4.FileBTreeDB4):
- """A B-tree database that uses SHA-256 hashes for key lookup.
-
- """
- def encode_key(self, key):
- return hashlib.sha256(key.encode('utf-8')).digest()
-
-
-class KeyStoreCompressed(KeyStore):
- """A B-tree database that uses SHA-256 hashes for key lookup, and inflates
- the data before returning it.
-
- """
- def deserialize_data(self, data):
- return zlib.decompress(data)
-
-
-class CelestialChunks(KeyStoreCompressed):
- def deserialize_data(self, data):
- data = super(CelestialChunks, self).deserialize_data(data)
- stream = io.BytesIO(data)
- return sbon.read_document(stream)
-
- def open(self):
- super(CelestialChunks, self).open()
- assert self.identifier == 'Celestial2', 'Unsupported celestial chunks file'
-
-
-class Package(KeyStore):
- """A B-tree database representing a package of files.
-
- """
- DIGEST_KEY = '_digest'
- INDEX_KEY = '_index'
-
- def __init__(self, path):
- super(Package, self).__init__(path)
- self._index = None
-
- def get_digest(self):
- return self.get(Package.DIGEST_KEY)
-
- def get_index(self):
- if self._index:
- return self._index
-
- stream = io.BytesIO(self.get(Package.INDEX_KEY))
- if self.identifier == 'Assets1':
- self._index = sbon.read_string_list(stream)
- elif self.identifier == 'Assets2':
- self._index = sbon.read_string_digest_map(stream)
-
- return self._index
-
-
-class VariantDatabase(KeyStoreCompressed):
- """A B-tree database where each key is a SHA-256 hash and the value is
- compressed Starbound Variant data.
-
- """
- def deserialize_data(self, data):
- data = super(VariantDatabase, self).deserialize_data(data)
- stream = io.BytesIO(data)
- return sbon.read_dynamic(stream)
-
- def encode_key(self, key):
- # TODO: The key encoding for this may be SBON-encoded SHA-256 hash.
- return super(VariantDatabase, self).encode_key(key)
-
- def open(self):
- super(VariantDatabase, self).open()
- assert self.identifier == 'JSON1', 'Unsupported variant database'
-
-
-class Player(sbvj01.FileSBVJ01):
- """A Starbound character.
-
- """
- def __init__(self, path):
- super(Player, self).__init__(path)
- self.name = None
-
- def open(self):
- super(Player, self).open()
- assert self.identifier == 'PlayerEntity', 'Invalid player file'
- self.name = self.data['identity']['name']
-
-
-class World(btreedb4.FileBTreeDB4):
- """A single Starbound world.
-
- """
- METADATA_KEY = (0, 0, 0)
-
- TILES_X = 32
- TILES_Y = 32
- TILES_PER_REGION = TILES_X * TILES_Y
-
- def __init__(self, path):
- super(World, self).__init__(path)
- self._metadata = None
- self._metadata_version = None
-
- def deserialize_data(self, data):
- return zlib.decompress(data)
-
- def encode_key(self, key):
- return struct.pack('>BHH', *key)
-
- def get_entities(self, x, y):
- stream = io.BytesIO(self.get((2, x, y)))
- return sbon.read_document_list(stream)
-
- def get_metadata(self):
- if self._metadata:
- return self._metadata, self._metadata_version
-
- stream = io.BytesIO(self.get_raw(World.METADATA_KEY))
-
- # Not sure what these values mean.
- unknown_1, unknown_2 = struct.unpack('>ii', stream.read(8))
-
- name, version, data = sbon.read_document(stream)
- assert name == 'WorldMetadata', 'Invalid world data'
-
- self._metadata = data
- self._metadata_version = version
-
- return data, version
-
- def get_tiles(self, x, y):
- stream = io.BytesIO(self.get((1, x, y)))
- unknown = stream.read(3)
- # There are 1024 (32x32) tiles in a region.
- return [sbon.read_tile(stream) for _ in xrange(World.TILES_PER_REGION)]
-
- def open(self):
- super(World, self).open()
- assert self.identifier == 'World2', 'Tried to open non-world BTreeDB4 file'
-
-
-class FailedWorld(World):
- def __init__(self, path):
- super(FailedWorld, self).__init__(path)
- self.repair = True
-
- def get_metadata(self):
- try:
- stream = io.BytesIO(self.get_raw(World.METADATA_KEY))
- except:
- stream = btreedb4.LeafReader.last_buffer
- stream.seek(0)
-
- # Not sure what these values mean.
- unknown_1, unknown_2 = struct.unpack('>ii', stream.read(8))
-
- name, version, data = sbon.read_document(stream, True)
- assert name == 'WorldMetadata', 'Invalid world data'
-
- self._metadata = data
- self._metadata_version = version
-
- return data, version
-
-
-def open(path):
- _, extension = os.path.splitext(path)
- if extension == '.chunks':
- file = CelestialChunks(path)
- elif extension in ('.clientcontext', '.dat'):
- file = sbvj01.FileSBVJ01(path)
- elif extension == '.db':
- file = VariantDatabase(path)
- elif extension == '.fail':
- file = FailedWorld(path)
- elif extension in ('.modpak', '.pak'):
- file = Package(path)
- elif extension == '.player':
- file = Player(path)
- elif extension in ('.shipworld', '.world'):
- file = World(path)
- else:
- raise ValueError('Unrecognized file extension')
-
- file.open()
- return file
View
96 starbound/sbbf02.py.bak
@@ -1,96 +0,0 @@
-import io
-import struct
-
-import filebase
-
-
-class BlockMeta(type):
- """Metaclass that registers all subclasses of Block as block types.
-
- """
- def __new__(mcs, name, bases, dict):
- cls = type.__new__(mcs, name, bases, dict)
- try:
- if Block in bases:
- sig = dict.get('SIGNATURE')
- assert sig and len(sig) == 2, 'Invalid signature'
- assert sig not in Block.types, 'Duplicate signature'
- Block.types[sig] = cls
- except NameError:
- # The first time this function is called, Block will not be
- # defined.
- pass
- return cls
-
-
-class Block(object):
- __metaclass__ = BlockMeta
-
- types = dict()
-
- @staticmethod
- def read(file):
- signature = file.read(2)
-
- if signature == '\x00\x00':
- return None
-
- if signature not in Block.types:
- raise ValueError('Unrecognized block type')
-
- # Return a new instance of the appropriate block type.
- return Block.types[signature](file)
-
-
-class BlockFree(Block):
- SIGNATURE = 'FF'
-
- __slots__ = ['next_free_block', 'raw_data']
-
- def __init__(self, file):
- self.raw_data = file.read(file.block_size - 2)
- value, = struct.unpack('>i', self.raw_data[:4])
- self.next_free_block = value if value != -1 else None
-
- def __str__(self):
- return 'Free(next_free_block={})'.format(self.next_free_block)
-
-
-class FileSBBF02(filebase.File):
- def __init__(self, path):
- super(FileSBBF02, self).__init__(path)
-
- self._user_header = None
-
- self.block_size = None
- self.header_size = None
- self.free_block_is_dirty = None
- self.free_block = None
-
- def get_block(self, block):
- self._stream.seek(self.header_size + self.block_size * block)
- return Block.read(self)
-
- def get_user_header(self):
- assert self.is_open(), 'File must be open to get user header'
- return io.BytesIO(self._user_header)
-
- def open(self):
- """Opens the file and reads its header data.
-
- """
- super(FileSBBF02, self).open()
- stream = self._stream
-
- assert stream.read(6) == 'SBBF02', 'Invalid file format'
-
- # Block header data.
- fields = struct.unpack('>ii?i', stream.read(13))
- self.header_size = fields[0]
- self.block_size = fields[1]
- self.free_block_is_dirty = fields[2]
- self.free_block = fields[3]
-
- # Read the user header data.
- stream.seek(32)
- self._user_header = stream.read(self.header_size - 32)
View
139 starbound/sbon.py.bak
@@ -1,139 +0,0 @@
-import collections
-import struct
-
-Document = collections.namedtuple('Document', ['name', 'version', 'data'])
-
-Tile = collections.namedtuple('Tile', [
- 'foreground_material',
- 'foreground_hue_shift',
- 'foreground_variant',
- 'foreground_sprite',
- 'foreground_sprite_hue_shift',
- 'background_material',
- 'background_hue_shift',
- 'background_variant',
- 'background_sprite',
- 'background_sprite_hue_shift',
- 'liquid',
- 'liquid_pressure',
- 'collision',
- 'dungeon',
- 'biome',
- 'biome_2',
- 'indestructible',
-])
-
-def read_bytes(stream):
- length = read_varlen_number(stream)
- return stream.read(length)
-
-def read_document(stream, repair=False):
- name = read_string(stream)
-
- # Not sure what this part is.
- assert stream.read(1) == '\x01'
-
- version = struct.unpack('>i', stream.read(4))[0]
- data = read_dynamic(stream, repair)
-
- return Document(name, version, data)
-
-def read_document_list(stream):
- length = read_varlen_number(stream)
- return [read_document(stream) for _ in xrange(length)]
-
-def read_dynamic(stream, repair=False):
- type = ord(stream.read(1))
-
- try:
- if type == 1:
- return None
- elif type == 2:
- format = '>d'
- elif type == 3:
- format = '?'
- elif type == 4:
- return read_varlen_number_signed(stream)
- elif type == 5:
- return read_string(stream)
- elif type == 6:
- return read_list(stream, repair)
- elif type == 7:
- return read_map(stream, repair)
- else:
- raise ValueError('Unknown dynamic type 0x%02X' % type)
- except:
- if repair:
- return None
- raise
-
- # Anything that passes through is assumed to have set a format to unpack.
- return struct.unpack(format, stream.read(struct.calcsize(format)))[0]
-
-def read_fixlen_string(stream, length):
- return stream.read(length).rstrip('\x00').decode('utf-8')
-
-def read_list(stream, repair=False):
- length = read_varlen_number(stream)
- return [read_dynamic(stream, repair) for _ in xrange(length)]
-
-def read_map(stream, repair=False):
- length = read_varlen_number(stream)
-
- value = dict()
- for _ in xrange(length):
- key = read_string(stream)
- value[key] = read_dynamic(stream, repair)
-
- return value
-
-def read_string(stream):
- return read_bytes(stream).decode('utf-8')
-
-def read_string_list(stream):
- """Optimized structure that doesn't have a type byte for every item.
-
- """
- length = read_varlen_number(stream)
- return [read_string(stream) for _ in xrange(length)]
-
-def read_string_digest_map(stream):
- """Special structure of string/digest pairs, used by the assets database.
-
- """
- length = read_varlen_number(stream)
-
- value = dict()
- for _ in xrange(length):
- path = read_string(stream)
- # Unnecessary whitespace.
- stream.seek(1, 1)
- digest = stream.read(32)
- value[path] = digest
-
- return value
-
-def read_tile(stream):
- values = struct.unpack('>hBBhBhBBhBBHBhBB?', stream.read(23))
- return Tile(*values)
-
-def read_varlen_number(stream):
- """Read while the most significant bit is set, then put the 7 least
- significant bits of all read bytes together to create a number.
-
- """
- value = 0
- while True:
- byte = ord(stream.read(1))
- if not byte & 0b10000000:
- return value << 7 | byte
- value = value << 7 | (byte & 0b01111111)
-
-def read_varlen_number_signed(stream):
- value = read_varlen_number(stream)
-
- # Least significant bit represents the sign.
- if value & 1:
- return -(value >> 1)
- else:
- return value >> 1
View
20 starbound/sbvj01.py.bak
@@ -1,20 +0,0 @@
-import sbon
-import filebase
-
-
-class FileSBVJ01(filebase.File):
- def __init__(self, path):
- super(FileSBVJ01, self).__init__(path)
- self.data = None
-
- def open(self):
- """Opens the file and reads its contents.
-
- """
- super(FileSBVJ01, self).open()
-
- assert self.read(6) == 'SBVJ01', 'Invalid file format'
- self.identifier, self.version, self.data = sbon.read_document(self._stream)
-
- # Technically, we could already close the file at this point. Need to
- # think about this.

0 comments on commit c46f66f

Please sign in to comment.
Something went wrong with that request. Please try again.