diff --git a/hlna.py b/hlna.py index 4fa9eb3..d191582 100755 --- a/hlna.py +++ b/hlna.py @@ -19,6 +19,18 @@ from rcon.source import Client home_dir = Path.home() +logging.basicConfig(stream=sys.stderr, level=logging.CRITICAL) + +class UnpackException(Exception): + pass + +class SignatureUnpackException(UnpackException): + pass + +class CorruptUnpackException(UnpackException): + pass + + def find_file(path): """Находим все конфиги в зависимости от пути""" arr = next(os.walk(path), (None, None, []))[2] # [] if no file @@ -50,6 +62,74 @@ def path_server(): def hlna(): pass +def arkit(src, dst): + with open(src, 'rb') as f: + sigver = struct.unpack('q', f.read(8))[0] + unpacked_chunk = f.read(8) + packed = f.read(8) + unpacked = f.read(8) + size_unpacked_chunk = struct.unpack('q', unpacked_chunk)[0] + size_packed = struct.unpack('q', packed)[0] + size_unpacked = struct.unpack('q', unpacked)[0] + + #Verify the integrity of the Archive Header + if sigver == 2653586369: + if isinstance(size_unpacked_chunk, int) and isinstance(size_packed , int) and isinstance(size_unpacked , int): + logging.info("Archive is valid.") + logging.debug(f"Archive header size information. Unpacked Chunk: {size_unpacked_chunk}({unpacked_chunk}) Full Packed: {size_packed}({packed}) Full Unpacked: {size_unpacked}({unpacked})") + + #Obtain the Archive Compression Index + compression_index = [] + size_indexed = 0 + while size_indexed < size_unpacked: + raw_compressed = f.read(8) + raw_uncompressed = f.read(8) + compressed = struct.unpack('q', raw_compressed)[0] + uncompressed = struct.unpack('q', raw_uncompressed)[0] + compression_index.append((compressed, uncompressed)) + size_indexed += uncompressed + logging.debug(f"{len(compression_index)}: {size_indexed}/{size_unpacked} ({compressed}/{uncompressed}) - {raw_compressed} - {raw_uncompressed}") + + if size_unpacked != size_indexed: + msg = f"Header-Index mismatch. Header indicates it should only have {size_unpacked} bytes when uncompressed but the index indicates {size_indexed} bytes." + logging.critical(msg) + raise CorruptUnpackException(msg) + + #Read the actual archive data + data = b'' + read_data = 0 + for compressed, uncompressed in compression_index: + compressed_data = f.read(compressed) + uncompressed_data = zlib.decompress(compressed_data) + + #Verify the size of the data is consistent with the archives index + if len(uncompressed_data) == uncompressed: + data += uncompressed_data + read_data += 1 + + #Verify there is only one partial chunk + if len(uncompressed_data) != size_unpacked_chunk and read_data != len(compression_index): + msg = f"Index contains more than one partial chunk: was {len(uncompressed_data)} when the full chunk size is {size_unpacked_chunk}, chunk {read_data}/{len(compression_index)}" + logging.critical(msg) + raise CorruptUnpackException(msg) + else: + msg = f"Uncompressed chunk size is not the same as in the index: was {len(uncompressed_data)} but should be {uncompressed}." + logging.critical(msg) + raise CorruptUnpackException(msg) + else: + msg = f"Data types in the headers should be int's. Size Types: unpacked_chunk({type(size_unpacked_chunk)}), packed({type(size_packed)}), unpacked({type(size_unpacked)})" + logging.critical(msg) + raise CorruptUnpackException(msg) + else: + msg = "The signature and format version is incorrect. Signature was {} should be 2653586369.".format(sigver) + logging.critical(msg) + raise SignatureUnpackException(msg) + + #Write the extracted data to disk + with open(dst, 'wb') as f: + f.write(data) + logging.info("Archive has been extracted.") + def create_dir(directory): """Проверка и создание директории""" @@ -465,87 +545,6 @@ def modextract(id_mod, id_game_workshop): os.makedirs(dir_ark_mods) -logging.basicConfig(stream=sys.stderr, level=logging.CRITICAL) - -class UnpackException(Exception): - pass - -class SignatureUnpackException(UnpackException): - pass - -class CorruptUnpackException(UnpackException): - pass - - -def arkit(src, dst): - with open(src, 'rb') as f: - sigver = struct.unpack('q', f.read(8))[0] - unpacked_chunk = f.read(8) - packed = f.read(8) - unpacked = f.read(8) - size_unpacked_chunk = struct.unpack('q', unpacked_chunk)[0] - size_packed = struct.unpack('q', packed)[0] - size_unpacked = struct.unpack('q', unpacked)[0] - - #Verify the integrity of the Archive Header - if sigver == 2653586369: - if isinstance(size_unpacked_chunk, int) and isinstance(size_packed , int) and isinstance(size_unpacked , int): - logging.info("Archive is valid.") - logging.debug(f"Archive header size information. Unpacked Chunk: {size_unpacked_chunk}({unpacked_chunk}) Full Packed: {size_packed}({packed}) Full Unpacked: {size_unpacked}({unpacked})") - - #Obtain the Archive Compression Index - compression_index = [] - size_indexed = 0 - while size_indexed < size_unpacked: - raw_compressed = f.read(8) - raw_uncompressed = f.read(8) - compressed = struct.unpack('q', raw_compressed)[0] - uncompressed = struct.unpack('q', raw_uncompressed)[0] - compression_index.append((compressed, uncompressed)) - size_indexed += uncompressed - logging.debug(f"{len(compression_index)}: {size_indexed}/{size_unpacked} ({compressed}/{uncompressed}) - {raw_compressed} - {raw_uncompressed}") - - if size_unpacked != size_indexed: - msg = f"Header-Index mismatch. Header indicates it should only have {size_unpacked} bytes when uncompressed but the index indicates {size_indexed} bytes." - logging.critical(msg) - raise CorruptUnpackException(msg) - - #Read the actual archive data - data = b'' - read_data = 0 - for compressed, uncompressed in compression_index: - compressed_data = f.read(compressed) - uncompressed_data = zlib.decompress(compressed_data) - - #Verify the size of the data is consistent with the archives index - if len(uncompressed_data) == uncompressed: - data += uncompressed_data - read_data += 1 - - #Verify there is only one partial chunk - if len(uncompressed_data) != size_unpacked_chunk and read_data != len(compression_index): - msg = f"Index contains more than one partial chunk: was {len(uncompressed_data)} when the full chunk size is {size_unpacked_chunk}, chunk {read_data}/{len(compression_index)}" - logging.critical(msg) - raise CorruptUnpackException(msg) - else: - msg = f"Uncompressed chunk size is not the same as in the index: was {len(uncompressed_data)} but should be {uncompressed}." - logging.critical(msg) - raise CorruptUnpackException(msg) - else: - msg = f"Data types in the headers should be int's. Size Types: unpacked_chunk({type(size_unpacked_chunk)}), packed({type(size_packed)}), unpacked({type(size_unpacked)})" - logging.critical(msg) - raise CorruptUnpackException(msg) - else: - msg = "The signature and format version is incorrect. Signature was {} should be 2653586369.".format(sigver) - logging.critical(msg) - raise SignatureUnpackException(msg) - - #Write the extracted data to disk - with open(dst, 'wb') as f: - f.write(data) - logging.info("Archive has been extracted.") - - @ hlna.command() @ click.option("-m", required=True, help="Название Сервера") @ click.option("-e/-d", default=True, help="-e активировать карты, -d деактивировать") @@ -805,8 +804,5 @@ create_dir(dir_config) create_dir(dir_logs) - if __name__ == '__main__': hlna() - -