mirror of
https://github.com/spicyjpeg/573in1.git
synced 2025-03-01 07:20:42 +01:00
Bump to 1.0.3, migrate to custom archive format
This commit is contained in:
parent
efa22c8372
commit
ccba05c070
@ -19,7 +19,7 @@ set(CMAKE_TOOLCHAIN_FILE "${CMAKE_CURRENT_LIST_DIR}/cmake/toolchain.cmake")
|
||||
project(
|
||||
573in1
|
||||
LANGUAGES C CXX ASM
|
||||
VERSION 1.0.2
|
||||
VERSION 1.0.3
|
||||
DESCRIPTION "Konami System 573 maintenance tool"
|
||||
HOMEPAGE_URL "https://github.com/spicyjpeg/573in1"
|
||||
)
|
||||
@ -77,6 +77,7 @@ set(
|
||||
src/common/fs/file.cpp
|
||||
src/common/fs/iso9660.cpp
|
||||
src/common/fs/misc.cpp
|
||||
src/common/fs/package.cpp
|
||||
src/common/fs/zip.cpp
|
||||
)
|
||||
set(
|
||||
@ -204,7 +205,7 @@ function(addPS1Executable name address stackTop)
|
||||
endfunction()
|
||||
|
||||
# IMPORTANT: these addresses assume the boot executable's size (including code,
|
||||
# heap and stack allocations as well as the resource archive, but excluding the
|
||||
# heap and stack allocations as well as the resource package, but excluding the
|
||||
# executable header) is 576 KB (0x90000 bytes) or less, and that each launcher's
|
||||
# size is 12 KB (0x3000 bytes) or less.
|
||||
addPS1Executable(main 800a0000 801dfff0 ${mainSources})
|
||||
@ -221,7 +222,7 @@ target_link_libraries(bootStub PUBLIC subExecutableFlags)
|
||||
target_link_libraries(launcher801fd000 PUBLIC launcher)
|
||||
target_link_libraries(launcher803fd000 PUBLIC launcher)
|
||||
|
||||
## Boot stubs and resource archives
|
||||
## Boot stubs and resource packages
|
||||
|
||||
file(GLOB_RECURSE assetList RELATIVE "${PROJECT_SOURCE_DIR}" assets/*)
|
||||
|
||||
@ -233,24 +234,25 @@ function(addBuildVariant name resourceName)
|
||||
add_custom_command(
|
||||
COMMAND
|
||||
"${Python3_EXECUTABLE}"
|
||||
"${PROJECT_SOURCE_DIR}/tools/buildResourceArchive.py"
|
||||
"${PROJECT_SOURCE_DIR}/tools/buildResourcePackage.py"
|
||||
-a 64
|
||||
${resourceName}.json
|
||||
${resourceName}.zip
|
||||
OUTPUT ${resourceName}.zip
|
||||
${resourceName}.pkg
|
||||
OUTPUT ${resourceName}.pkg
|
||||
DEPENDS
|
||||
${resourceName}.json
|
||||
${assetList}
|
||||
main.psexe
|
||||
launcher801fd000.psexe
|
||||
launcher803fd000.psexe
|
||||
COMMENT "Building ${name} resource archive"
|
||||
COMMENT "Building ${name} resource package"
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
addPS1Executable(${name} 80010000 0)
|
||||
addBinaryFileWithSize(
|
||||
${name} _resourceArchive _resourceArchiveLength
|
||||
"${PROJECT_BINARY_DIR}/${resourceName}.zip"
|
||||
${name} _resourcePackage _resourcePackageLength
|
||||
"${PROJECT_BINARY_DIR}/${resourceName}.pkg"
|
||||
)
|
||||
|
||||
target_link_libraries(${name} PUBLIC bootStub)
|
||||
|
@ -84,7 +84,7 @@ set(
|
||||
set(
|
||||
ENABLE_DUMMY_CART_DRIVER OFF
|
||||
CACHE BOOL "Enable support for simulating a dummy security cartridge (if \
|
||||
data/test.573 is present in the resource archive)"
|
||||
data/dummy.dmp is present in the resource package)"
|
||||
)
|
||||
set(
|
||||
ENABLE_X76F041_CART_DRIVER ON
|
||||
|
@ -3,10 +3,9 @@
|
||||
|
||||
"resources": [
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "binaries/main.psexe.lz4",
|
||||
"source": "${PROJECT_BINARY_DIR}/main.psexe",
|
||||
"compression": "lz4"
|
||||
"type": "binary",
|
||||
"name": "binaries/main.psexe",
|
||||
"source": "${PROJECT_BINARY_DIR}/main.psexe"
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
@ -59,52 +58,52 @@
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/textures/font.json"
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/startup.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/startup.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/startup.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/startup.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/about.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/about.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/about.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/about.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/alert.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/alert.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/alert.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/alert.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/move.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/move.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/move.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/move.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/enter.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/enter.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/enter.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/enter.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/exit.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/exit.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/exit.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/exit.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/click.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/click.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/click.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/click.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/screenshot.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/screenshot.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/screenshot.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/screenshot.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
@ -112,10 +111,9 @@
|
||||
"source": "${PROJECT_BINARY_DIR}/about.txt"
|
||||
},
|
||||
{
|
||||
"type": "palette",
|
||||
"name": "assets/palette.dat",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/palette.json",
|
||||
"compression": "none"
|
||||
"type": "palette",
|
||||
"name": "assets/palette.dat",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/palette.json"
|
||||
},
|
||||
|
||||
{
|
||||
|
@ -3,10 +3,9 @@
|
||||
|
||||
"resources": [
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "binaries/main.psexe.lz4",
|
||||
"source": "${PROJECT_BINARY_DIR}/main.psexe",
|
||||
"compression": "lz4"
|
||||
"type": "binary",
|
||||
"name": "binaries/main.psexe",
|
||||
"source": "${PROJECT_BINARY_DIR}/main.psexe"
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
@ -59,46 +58,46 @@
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/textures/font.json"
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/startup.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/startup.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/startup.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/startup.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/alert.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/alert.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/alert.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/alert.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/move.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/move.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/move.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/move.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/enter.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/enter.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/enter.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/enter.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/exit.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/exit.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/exit.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/exit.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/click.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/click.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/click.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/click.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/screenshot.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/screenshot.vag",
|
||||
"compression": "none"
|
||||
"type": "binary",
|
||||
"name": "assets/sounds/screenshot.vag",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/sounds/screenshot.vag",
|
||||
"compLevel": 0
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
@ -106,10 +105,9 @@
|
||||
"source": "${PROJECT_BINARY_DIR}/about.txt"
|
||||
},
|
||||
{
|
||||
"type": "palette",
|
||||
"name": "assets/palette.dat",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/palette.json",
|
||||
"compression": "none"
|
||||
"type": "palette",
|
||||
"name": "assets/palette.dat",
|
||||
"source": "${PROJECT_SOURCE_DIR}/assets/palette.json"
|
||||
},
|
||||
|
||||
{
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
"properties": {
|
||||
"resources": {
|
||||
"title": "Resource archive entries",
|
||||
"title": "Resource package entries",
|
||||
"type": "array",
|
||||
|
||||
"uniqueItems": true,
|
||||
@ -39,19 +39,12 @@
|
||||
},
|
||||
"name": {
|
||||
"title": "Entry path",
|
||||
"description": "Full path of the entry within the resource archive.",
|
||||
"description": "Full path of the entry within the resource package.",
|
||||
"type": "string"
|
||||
},
|
||||
"compression": {
|
||||
"title": "Compression type",
|
||||
"description": "Must be 'none', 'deflate' or 'lz4'. If unspecified, defaults to 'deflate' unless manually overridden on the command line.",
|
||||
"type": "string",
|
||||
|
||||
"enum": [ "none", "deflate", "lz4" ]
|
||||
},
|
||||
"compressLevel": {
|
||||
"compLevel": {
|
||||
"title": "Compression level",
|
||||
"description": "DEFLATE or LZ4 compression level in 0-9 range, ignored for uncompressed files. If unspecified, defaults to 9 unless manually overridden on the command line.",
|
||||
"description": "LZ4 compression level in 1-9 range, or 0 to store the file uncompressed. If unspecified, defaults to 9 unless manually overridden on the command line.",
|
||||
"type": "integer",
|
||||
|
||||
"minimum": 0,
|
||||
@ -81,10 +74,9 @@
|
||||
"additionalProperties": false,
|
||||
|
||||
"properties": {
|
||||
"type": { "pattern": "^text|binary$" },
|
||||
"name": { "type": "string" },
|
||||
"compression": {},
|
||||
"compressLevel": {},
|
||||
"type": { "pattern": "^text|binary$" },
|
||||
"name": { "type": "string" },
|
||||
"compLevel": {},
|
||||
|
||||
"source": {
|
||||
"title": "Path to source file",
|
||||
@ -100,10 +92,9 @@
|
||||
"additionalProperties": false,
|
||||
|
||||
"properties": {
|
||||
"type": { "const": "tim" },
|
||||
"name": { "type": "string" },
|
||||
"compression": {},
|
||||
"compressLevel": {},
|
||||
"type": { "const": "tim" },
|
||||
"name": { "type": "string" },
|
||||
"compLevel": {},
|
||||
|
||||
"source": {
|
||||
"title": "Path to source file",
|
||||
@ -181,10 +172,9 @@
|
||||
"additionalProperties": false,
|
||||
|
||||
"properties": {
|
||||
"type": { "pattern": "^metrics|palette|strings|db$" },
|
||||
"name": { "type": "string" },
|
||||
"compression": {},
|
||||
"compressLevel": {},
|
||||
"type": { "pattern": "^metrics|palette|strings|db$" },
|
||||
"name": { "type": "string" },
|
||||
"compLevel": {},
|
||||
|
||||
"source": {
|
||||
"title": "Path to source file",
|
||||
@ -200,10 +190,9 @@
|
||||
"additionalProperties": false,
|
||||
|
||||
"properties": {
|
||||
"type": { "const": "metrics" },
|
||||
"name": { "type": "string" },
|
||||
"compression": {},
|
||||
"compressLevel": {},
|
||||
"type": { "const": "metrics" },
|
||||
"name": { "type": "string" },
|
||||
"compLevel": {},
|
||||
|
||||
"metrics": {
|
||||
"title": "Font metrics",
|
||||
@ -217,10 +206,9 @@
|
||||
"additionalProperties": false,
|
||||
|
||||
"properties": {
|
||||
"type": { "const": "palette" },
|
||||
"name": { "type": "string" },
|
||||
"compression": {},
|
||||
"compressLevel": {},
|
||||
"type": { "const": "palette" },
|
||||
"name": { "type": "string" },
|
||||
"compLevel": {},
|
||||
|
||||
"palette": {
|
||||
"title": "Color entries",
|
||||
@ -234,10 +222,9 @@
|
||||
"additionalProperties": false,
|
||||
|
||||
"properties": {
|
||||
"type": { "const": "strings" },
|
||||
"name": { "type": "string" },
|
||||
"compression": {},
|
||||
"compressLevel": {},
|
||||
"type": { "const": "strings" },
|
||||
"name": { "type": "string" },
|
||||
"compLevel": {},
|
||||
|
||||
"strings": {
|
||||
"title": "String table",
|
||||
@ -251,10 +238,9 @@
|
||||
"additionalProperties": false,
|
||||
|
||||
"properties": {
|
||||
"type": { "const": "db" },
|
||||
"name": { "type": "string" },
|
||||
"compression": {},
|
||||
"compressLevel": {},
|
||||
"type": { "const": "db" },
|
||||
"name": { "type": "string" },
|
||||
"compLevel": {},
|
||||
|
||||
"strings": {
|
||||
"title": "Game database",
|
||||
|
@ -16,78 +16,64 @@
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include "common/fs/package.hpp"
|
||||
#include "common/util/hash.hpp"
|
||||
#include "common/util/misc.hpp"
|
||||
#include "common/util/string.hpp"
|
||||
#include "common/util/templates.hpp"
|
||||
#include "common/io.hpp"
|
||||
#include "ps1/system.h"
|
||||
|
||||
extern "C" const uint8_t _resourceArchive[];
|
||||
extern "C" const size_t _resourceArchiveLength;
|
||||
extern "C" const uint8_t _resourcePackage[];
|
||||
extern "C" const size_t _resourcePackageLength;
|
||||
|
||||
static char _ptrArg[]{ "resource.ptr=xxxxxxxx\0" };
|
||||
static char _lengthArg[]{ "resource.length=xxxxxxxx\0" };
|
||||
|
||||
class [[gnu::packed]] ZIPFileHeader {
|
||||
public:
|
||||
uint32_t magic;
|
||||
uint16_t version, flags, compType;
|
||||
uint16_t fileTime, fileDate;
|
||||
uint32_t crc, compLength, uncompLength;
|
||||
uint16_t nameLength, extraLength;
|
||||
|
||||
inline bool validateMagic(void) const {
|
||||
return (magic == util::concat4('P', 'K', 0x03, 0x04));
|
||||
}
|
||||
inline size_t getHeaderLength(void) const {
|
||||
return sizeof(ZIPFileHeader) + nameLength + extraLength;
|
||||
}
|
||||
};
|
||||
|
||||
int main(int argc, const char **argv) {
|
||||
disableInterrupts();
|
||||
io::init();
|
||||
|
||||
// Parse the header of the archive's first entry manually. This avoids
|
||||
// pulling in miniz and bloating the binary.
|
||||
// NOTE: this assumes the main executable is always the first file in the
|
||||
// archive.
|
||||
auto zipHeader = reinterpret_cast<const ZIPFileHeader *>(_resourceArchive);
|
||||
auto ptr = &_resourceArchive[zipHeader->getHeaderLength()];
|
||||
auto compLength = zipHeader->compLength;
|
||||
|
||||
#if 0
|
||||
assert(zipHeader->validateMagic());
|
||||
assert(!zipHeader->compType);
|
||||
#endif
|
||||
auto header = \
|
||||
reinterpret_cast<const fs::PackageIndexHeader *>(_resourcePackage);
|
||||
auto entry = util::getHashTableEntry(
|
||||
reinterpret_cast<const fs::PackageIndexEntry *>(header + 1),
|
||||
header->numBuckets,
|
||||
"binaries/main.psexe"_h
|
||||
);
|
||||
auto ptr = &_resourcePackage[entry->offset];
|
||||
|
||||
// Decompress only the header to determine where to place the binary in
|
||||
// memory, then rerun the decompressor on the entire executable.
|
||||
util::ExecutableHeader exeHeader;
|
||||
|
||||
util::decompressLZ4(
|
||||
reinterpret_cast<uint8_t *>(&exeHeader), ptr, sizeof(exeHeader),
|
||||
compLength
|
||||
reinterpret_cast<uint8_t *>(&exeHeader),
|
||||
ptr,
|
||||
sizeof(exeHeader),
|
||||
entry->compLength
|
||||
);
|
||||
|
||||
auto offset = exeHeader.textOffset - util::EXECUTABLE_BODY_OFFSET;
|
||||
auto length = exeHeader.textLength + util::EXECUTABLE_BODY_OFFSET;
|
||||
|
||||
util::decompressLZ4(
|
||||
reinterpret_cast<uint8_t *>(offset), ptr, length, compLength
|
||||
reinterpret_cast<uint8_t *>(offset),
|
||||
ptr,
|
||||
entry->uncompLength,
|
||||
entry->compLength
|
||||
);
|
||||
io::clearWatchdog();
|
||||
|
||||
util::ExecutableLoader loader(
|
||||
exeHeader.getEntryPoint(), exeHeader.getInitialGP(),
|
||||
exeHeader.getEntryPoint(),
|
||||
exeHeader.getInitialGP(),
|
||||
exeHeader.getStackPtr()
|
||||
);
|
||||
|
||||
util::hexValueToString(
|
||||
&_ptrArg[13], reinterpret_cast<uint32_t>(_resourceArchive), 8
|
||||
&_ptrArg[13], reinterpret_cast<uint32_t>(_resourcePackage), 8
|
||||
);
|
||||
loader.addArgument(_ptrArg);
|
||||
util::hexValueToString(&_lengthArg[16], _resourceArchiveLength, 8);
|
||||
util::hexValueToString(&_lengthArg[16], _resourcePackageLength, 8);
|
||||
loader.addArgument(_lengthArg);
|
||||
|
||||
#ifdef ENABLE_ARGV_PARSER
|
||||
|
@ -116,7 +116,7 @@ size_t Provider::loadTIM(gpu::Image &output, const char *path) {
|
||||
if (!loadData(data, path))
|
||||
return 0;
|
||||
|
||||
auto header = data.as<const gpu::TIMHeader>();
|
||||
auto header = data.as<gpu::TIMHeader>();
|
||||
|
||||
if (output.initFromTIMHeader(*header)) {
|
||||
auto image = header->getImage();
|
||||
@ -141,7 +141,7 @@ size_t Provider::loadBS(
|
||||
if (!loadData(data, path))
|
||||
return 0;
|
||||
|
||||
size_t bsLength = data.as<const mdec::BSHeader>()->getUncompLength();
|
||||
size_t bsLength = data.as<mdec::BSHeader>()->getUncompLength();
|
||||
|
||||
mdec::BSDecompressor decompressor;
|
||||
util::Data buffer;
|
||||
@ -189,7 +189,7 @@ size_t Provider::loadVAG(
|
||||
if (!loadData(data, path))
|
||||
return 0;
|
||||
|
||||
auto header = data.as<const spu::VAGHeader>();
|
||||
auto header = data.as<spu::VAGHeader>();
|
||||
|
||||
if (output.initFromVAGHeader(*header, offset))
|
||||
loadLength = spu::upload(
|
||||
@ -285,22 +285,15 @@ size_t Provider::saveVRAMBMP(const gpu::RectWH &rect, const char *path) {
|
||||
static const char _ERROR_STRING[]{ "missingno" };
|
||||
|
||||
const char *StringTable::get(util::Hash id) const {
|
||||
if (!ptr)
|
||||
return _ERROR_STRING;
|
||||
auto header = as<StringTableHeader>();
|
||||
auto blob = as<char>();
|
||||
auto entry = util::getHashTableEntry(
|
||||
reinterpret_cast<const StringTableEntry *>(header + 1),
|
||||
header->numBuckets,
|
||||
id
|
||||
);
|
||||
|
||||
auto blob = as<const char>();
|
||||
auto table = as<const StringTableEntry>();
|
||||
auto index = id % STRING_TABLE_BUCKET_COUNT;
|
||||
|
||||
do {
|
||||
auto entry = &table[index];
|
||||
index = entry->chained;
|
||||
|
||||
if (entry->hash == id)
|
||||
return &blob[entry->offset];
|
||||
} while (index);
|
||||
|
||||
return _ERROR_STRING;
|
||||
return entry ? &blob[entry->offset] : _ERROR_STRING;
|
||||
}
|
||||
|
||||
size_t StringTable::format(
|
||||
|
@ -32,16 +32,18 @@ static constexpr size_t MAX_PATH_LENGTH = 256;
|
||||
|
||||
// The first 4 of these map to the FS_* enum used by FatFs.
|
||||
enum FileSystemType {
|
||||
NONE = 0,
|
||||
FAT12 = 1,
|
||||
FAT16 = 2,
|
||||
FAT32 = 3,
|
||||
EXFAT = 4,
|
||||
ISO9660 = 5,
|
||||
ZIP_MEMORY = 6,
|
||||
ZIP_FILE = 7,
|
||||
HOST = 8,
|
||||
VFS = 9
|
||||
NONE = 0,
|
||||
FAT12 = 1,
|
||||
FAT16 = 2,
|
||||
FAT32 = 3,
|
||||
EXFAT = 4,
|
||||
ISO9660 = 5,
|
||||
PACKAGE_MEMORY = 6,
|
||||
PACKAGE_FILE = 7,
|
||||
ZIP_MEMORY = 8,
|
||||
ZIP_FILE = 9,
|
||||
HOST = 10,
|
||||
VFS = 11
|
||||
};
|
||||
|
||||
// These are functionally equivalent to the FA_* flags used by FatFs.
|
||||
@ -158,12 +160,22 @@ public:
|
||||
|
||||
/* String table parser */
|
||||
|
||||
static constexpr size_t STRING_TABLE_BUCKET_COUNT = 256;
|
||||
|
||||
struct StringTableEntry {
|
||||
struct StringTableHeader {
|
||||
public:
|
||||
uint32_t hash;
|
||||
uint16_t offset, chained;
|
||||
uint16_t numBuckets, numEntries;
|
||||
};
|
||||
|
||||
class StringTableEntry {
|
||||
public:
|
||||
util::Hash id;
|
||||
uint16_t offset, chained;
|
||||
|
||||
inline util::Hash getHash(void) const {
|
||||
return id;
|
||||
}
|
||||
inline uint16_t getChained(void) const {
|
||||
return chained;
|
||||
}
|
||||
};
|
||||
|
||||
class StringTable : public util::Data {
|
||||
|
256
src/common/fs/package.cpp
Normal file
256
src/common/fs/package.cpp
Normal file
@ -0,0 +1,256 @@
|
||||
/*
|
||||
* 573in1 - Copyright (C) 2022-2024 spicyjpeg
|
||||
*
|
||||
* 573in1 is free software: you can redistribute it and/or modify it under the
|
||||
* terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 3 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* 573in1 is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||
* A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* 573in1. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include "common/fs/file.hpp"
|
||||
#include "common/fs/package.hpp"
|
||||
#include "common/util/hash.hpp"
|
||||
#include "common/util/log.hpp"
|
||||
#include "common/util/string.hpp"
|
||||
|
||||
namespace fs {
|
||||
|
||||
/* Package filesystem provider */
|
||||
|
||||
const PackageIndexEntry *PackageProvider::_getEntry(const char *path) const {
|
||||
// Any leading path separators must be stripped manually.
|
||||
while ((*path == '/') || (*path == '\\'))
|
||||
path++;
|
||||
|
||||
auto header = _index.as<PackageIndexHeader>();
|
||||
|
||||
return util::getHashTableEntry(
|
||||
reinterpret_cast<const PackageIndexEntry *>(header + 1),
|
||||
header->numBuckets,
|
||||
util::hash(path)
|
||||
);
|
||||
}
|
||||
|
||||
bool PackageProvider::init(File *file) {
|
||||
if (type)
|
||||
return false;
|
||||
|
||||
_file = file;
|
||||
|
||||
// Parse the package's header to obtain the size of the index, then reread
|
||||
// the entire index section.
|
||||
PackageIndexHeader header;
|
||||
|
||||
if (file->read(&header, sizeof(header)) < sizeof(header))
|
||||
return false;
|
||||
|
||||
size_t indexLength = header.indexLength;
|
||||
|
||||
if (!_index.allocate(indexLength))
|
||||
return false;
|
||||
|
||||
if (
|
||||
file->seek(0) ||
|
||||
(file->read(_index.ptr, indexLength) != indexLength)
|
||||
) {
|
||||
_index.destroy();
|
||||
return false;
|
||||
}
|
||||
|
||||
type = PACKAGE_FILE;
|
||||
capacity = file->size - indexLength;
|
||||
|
||||
LOG_FS("mounted package file");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool PackageProvider::init(const void *packageData, size_t length) {
|
||||
if (type)
|
||||
return false;
|
||||
|
||||
auto header = reinterpret_cast<const PackageIndexHeader *>(packageData);
|
||||
|
||||
// TODO: set a flag to prevent output deallocation
|
||||
_file = nullptr;
|
||||
_index.ptr = reinterpret_cast<void *>(uintptr_t(packageData));
|
||||
_index.length = header->indexLength;
|
||||
|
||||
type = PACKAGE_MEMORY;
|
||||
capacity = 0;
|
||||
|
||||
LOG_FS("mounted package: 0x%08x", packageData);
|
||||
return true;
|
||||
}
|
||||
|
||||
void PackageProvider::close(void) {
|
||||
if (!type)
|
||||
return;
|
||||
|
||||
_index.destroy();
|
||||
|
||||
#if 0
|
||||
if (_file) {
|
||||
_file->close();
|
||||
delete _file;
|
||||
}
|
||||
#endif
|
||||
|
||||
type = NONE;
|
||||
capacity = 0;
|
||||
}
|
||||
|
||||
bool PackageProvider::getFileInfo(FileInfo &output, const char *path) {
|
||||
auto blob = _index.as<char>();
|
||||
auto entry = _getEntry(path);
|
||||
|
||||
if (!entry)
|
||||
return false;
|
||||
|
||||
#if 0
|
||||
const char *ptr = __builtin_strrchr(&blob[entry->nameOffset], '/');
|
||||
|
||||
if (ptr)
|
||||
ptr++;
|
||||
else
|
||||
ptr = &blob[entry->nameOffset];
|
||||
#else
|
||||
auto ptr = &blob[entry->nameOffset];
|
||||
#endif
|
||||
|
||||
__builtin_strncpy(output.name, ptr, sizeof(output.name));
|
||||
output.size = entry->uncompLength;
|
||||
output.attributes = READ_ONLY | ARCHIVE;
|
||||
return true;
|
||||
}
|
||||
|
||||
size_t PackageProvider::loadData(util::Data &output, const char *path) {
|
||||
auto blob = _index.as<uint8_t>();
|
||||
auto entry = _getEntry(path);
|
||||
|
||||
if (!entry)
|
||||
return 0;
|
||||
|
||||
auto offset = entry->offset;
|
||||
size_t compLength = entry->compLength;
|
||||
size_t uncompLength = entry->uncompLength;
|
||||
|
||||
if (_file) {
|
||||
if (compLength) {
|
||||
// Package on disk, file compressed
|
||||
auto margin = util::getLZ4InPlaceMargin(compLength);
|
||||
|
||||
if (!output.allocate(uncompLength + margin))
|
||||
return 0;
|
||||
|
||||
auto compPtr = &output.as<uint8_t>()[margin];
|
||||
|
||||
if (
|
||||
(_file->seek(offset) != offset) ||
|
||||
(_file->read(compPtr, compLength) < compLength)
|
||||
) {
|
||||
output.destroy();
|
||||
return 0;
|
||||
}
|
||||
|
||||
util::decompressLZ4(
|
||||
output.as<uint8_t>(),
|
||||
compPtr,
|
||||
uncompLength,
|
||||
compLength
|
||||
);
|
||||
} else {
|
||||
// Package on disk, file not compressed
|
||||
if (!output.allocate(uncompLength))
|
||||
return 0;
|
||||
|
||||
if (_file->seek(offset) != offset) {
|
||||
output.destroy();
|
||||
return 0;
|
||||
}
|
||||
|
||||
return _file->read(output.ptr, uncompLength);
|
||||
}
|
||||
} else {
|
||||
if (compLength) {
|
||||
// Package in RAM, file compressed
|
||||
if (!output.allocate(uncompLength))
|
||||
return 0;
|
||||
|
||||
util::decompressLZ4(
|
||||
output.as<uint8_t>(),
|
||||
&blob[offset],
|
||||
uncompLength,
|
||||
compLength
|
||||
);
|
||||
} else {
|
||||
// Package in RAM, file not compressed (return in-place pointer)
|
||||
// TODO: set a flag to prevent output deallocation
|
||||
output.ptr = &blob[offset];
|
||||
output.length = uncompLength;
|
||||
}
|
||||
}
|
||||
|
||||
return uncompLength;
|
||||
}
|
||||
|
||||
size_t PackageProvider::loadData(void *output, size_t length, const char *path) {
|
||||
auto blob = _index.as<uint8_t>();
|
||||
auto entry = _getEntry(path);
|
||||
|
||||
if (!entry)
|
||||
return 0;
|
||||
|
||||
auto offset = entry->offset;
|
||||
size_t compLength = entry->compLength;
|
||||
size_t uncompLength = util::min(length, size_t(entry->uncompLength));
|
||||
|
||||
if (_file) {
|
||||
if (_file->seek(offset) != offset)
|
||||
return 0;
|
||||
|
||||
if (compLength) {
|
||||
// Package on disk, file compressed
|
||||
auto margin = util::getLZ4InPlaceMargin(compLength);
|
||||
auto compPtr = &reinterpret_cast<uint8_t *>(output)[margin];
|
||||
|
||||
if (_file->read(compPtr, compLength) < compLength)
|
||||
return 0;
|
||||
|
||||
util::decompressLZ4(
|
||||
reinterpret_cast<uint8_t *>(output),
|
||||
compPtr,
|
||||
uncompLength,
|
||||
compLength
|
||||
);
|
||||
} else {
|
||||
// Package on disk, file not compressed
|
||||
if (_file->read(output, uncompLength) < uncompLength)
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
if (compLength)
|
||||
// Package in RAM, file compressed
|
||||
util::decompressLZ4(
|
||||
reinterpret_cast<uint8_t *>(output),
|
||||
&blob[offset],
|
||||
uncompLength,
|
||||
compLength
|
||||
);
|
||||
else
|
||||
// Package in RAM, file not compressed
|
||||
__builtin_memcpy(output, &blob[offset], uncompLength);
|
||||
}
|
||||
|
||||
return uncompLength;
|
||||
}
|
||||
|
||||
}
|
71
src/common/fs/package.hpp
Normal file
71
src/common/fs/package.hpp
Normal file
@ -0,0 +1,71 @@
|
||||
/*
|
||||
* 573in1 - Copyright (C) 2022-2024 spicyjpeg
|
||||
*
|
||||
* 573in1 is free software: you can redistribute it and/or modify it under the
|
||||
* terms of the GNU General Public License as published by the Free Software
|
||||
* Foundation, either version 3 of the License, or (at your option) any later
|
||||
* version.
|
||||
*
|
||||
* 573in1 is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||
* A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along with
|
||||
* 573in1. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include "common/fs/file.hpp"
|
||||
#include "common/util/hash.hpp"
|
||||
#include "common/util/templates.hpp"
|
||||
|
||||
namespace fs {
|
||||
|
||||
/* Package index parser */
|
||||
|
||||
struct PackageIndexHeader {
|
||||
public:
|
||||
uint32_t indexLength;
|
||||
uint16_t numBuckets, numEntries;
|
||||
};
|
||||
|
||||
class PackageIndexEntry {
|
||||
public:
|
||||
util::Hash id;
|
||||
uint16_t nameOffset, chained;
|
||||
uint64_t offset;
|
||||
uint32_t compLength, uncompLength;
|
||||
|
||||
inline util::Hash getHash(void) const {
|
||||
return id;
|
||||
}
|
||||
inline uint16_t getChained(void) const {
|
||||
return chained;
|
||||
}
|
||||
};
|
||||
|
||||
/* Package filesystem provider */
|
||||
|
||||
// The current implementation only supports loading an entire file at once.
|
||||
class PackageProvider : public Provider {
|
||||
private:
|
||||
util::Data _index;
|
||||
File *_file;
|
||||
|
||||
const PackageIndexEntry *_getEntry(const char *path) const;
|
||||
|
||||
public:
|
||||
bool init(File *file);
|
||||
bool init(const void *packageData, size_t length);
|
||||
void close(void);
|
||||
|
||||
bool getFileInfo(FileInfo &output, const char *path);
|
||||
|
||||
size_t loadData(util::Data &output, const char *path);
|
||||
size_t loadData(void *output, size_t length, const char *path);
|
||||
};
|
||||
|
||||
}
|
@ -15,6 +15,7 @@
|
||||
*/
|
||||
|
||||
#include <stdint.h>
|
||||
#include "common/util/hash.hpp"
|
||||
#include "common/util/string.hpp"
|
||||
#include "common/gpu.hpp"
|
||||
#include "common/gpufont.hpp"
|
||||
@ -28,18 +29,19 @@ CharacterSize FontMetrics::get(util::UTF8CodePoint id) const {
|
||||
if (!ptr)
|
||||
return 0;
|
||||
|
||||
auto table = reinterpret_cast<const FontMetricsEntry *>(getHeader() + 1);
|
||||
auto index = id % METRICS_BUCKET_COUNT;
|
||||
auto header = as<FontMetricsHeader>();
|
||||
auto entry = util::getHashTableEntry(
|
||||
reinterpret_cast<const FontMetricsEntry *>(header + 1),
|
||||
header->numBuckets,
|
||||
id
|
||||
);
|
||||
|
||||
do {
|
||||
auto entry = &table[index];
|
||||
index = entry->getChained();
|
||||
|
||||
if (entry->getCodePoint() == id)
|
||||
return entry->size;
|
||||
} while (index);
|
||||
|
||||
return (id == FONT_INVALID_CHAR) ? 0 : get(FONT_INVALID_CHAR);
|
||||
if (entry)
|
||||
return entry->size;
|
||||
else if (id != FONT_INVALID_CHAR)
|
||||
return get(FONT_INVALID_CHAR);
|
||||
else
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Font class */
|
||||
@ -53,7 +55,7 @@ void Font::draw(
|
||||
|
||||
ctx.setTexturePage(image.texpage);
|
||||
|
||||
auto header = metrics.getHeader();
|
||||
auto header = metrics.as<FontMetricsHeader>();
|
||||
|
||||
int x = rect.x1;
|
||||
int clipX1 = clipRect.x1;
|
||||
@ -153,7 +155,7 @@ void Font::draw(
|
||||
}
|
||||
|
||||
int Font::getCharacterWidth(util::UTF8CodePoint ch) const {
|
||||
auto header = metrics.getHeader();
|
||||
auto header = metrics.as<FontMetricsHeader>();
|
||||
|
||||
switch (ch) {
|
||||
case 0:
|
||||
@ -180,7 +182,7 @@ void Font::getStringBounds(
|
||||
if (!str || !metrics.ptr)
|
||||
return;
|
||||
|
||||
auto header = metrics.getHeader();
|
||||
auto header = metrics.as<FontMetricsHeader>();
|
||||
|
||||
int x = rect.x1, maxX = rect.x1, y = rect.y1;
|
||||
|
||||
@ -259,7 +261,7 @@ int Font::getStringWidth(const char *str, bool breakOnSpace) const {
|
||||
if (!str || !metrics.ptr)
|
||||
return 0;
|
||||
|
||||
auto header = metrics.getHeader();
|
||||
auto header = metrics.as<FontMetricsHeader>();
|
||||
|
||||
int width = 0, maxWidth = 0;
|
||||
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include "common/util/hash.hpp"
|
||||
#include "common/util/string.hpp"
|
||||
#include "common/util/templates.hpp"
|
||||
#include "common/gpu.hpp"
|
||||
@ -26,7 +27,6 @@ namespace gpu {
|
||||
|
||||
/* Font metrics class */
|
||||
|
||||
static constexpr size_t METRICS_BUCKET_COUNT = 256;
|
||||
static constexpr size_t METRICS_CODE_POINT_BITS = 21;
|
||||
|
||||
static constexpr util::UTF8CodePoint FONT_INVALID_CHAR = 0xfffd;
|
||||
@ -35,8 +35,9 @@ using CharacterSize = uint32_t;
|
||||
|
||||
struct FontMetricsHeader {
|
||||
public:
|
||||
uint8_t spaceWidth, tabWidth, lineHeight;
|
||||
int8_t baselineOffset;
|
||||
uint8_t spaceWidth, tabWidth, lineHeight;
|
||||
int8_t baselineOffset;
|
||||
uint16_t numBuckets, numEntries;
|
||||
};
|
||||
|
||||
class FontMetricsEntry {
|
||||
@ -44,7 +45,7 @@ public:
|
||||
uint32_t codePoint;
|
||||
CharacterSize size;
|
||||
|
||||
inline util::UTF8CodePoint getCodePoint(void) const {
|
||||
inline util::Hash getHash(void) const {
|
||||
return codePoint & ((1 << METRICS_CODE_POINT_BITS) - 1);
|
||||
}
|
||||
inline uint32_t getChained(void) const {
|
||||
@ -54,9 +55,6 @@ public:
|
||||
|
||||
class FontMetrics : public util::Data {
|
||||
public:
|
||||
inline const FontMetricsHeader *getHeader(void) const {
|
||||
return as<const FontMetricsHeader>();
|
||||
}
|
||||
inline CharacterSize operator[](util::UTF8CodePoint id) const {
|
||||
return get(id);
|
||||
}
|
||||
@ -75,13 +73,13 @@ public:
|
||||
if (!metrics.ptr)
|
||||
return 0;
|
||||
|
||||
return metrics.getHeader()->spaceWidth;
|
||||
return metrics.as<FontMetricsHeader>()->spaceWidth;
|
||||
}
|
||||
inline int getLineHeight(void) const {
|
||||
if (!metrics.ptr)
|
||||
return 0;
|
||||
|
||||
return metrics.getHeader()->lineHeight;
|
||||
return metrics.as<FontMetricsHeader>()->lineHeight;
|
||||
}
|
||||
|
||||
void draw(
|
||||
|
@ -40,6 +40,28 @@ template<typename T> static constexpr inline Hash hash(
|
||||
Hash hash(const char *str, char terminator = 0);
|
||||
Hash hash(const uint8_t *data, size_t length);
|
||||
|
||||
/* Hash table parser */
|
||||
|
||||
template<typename T> static inline const T *getHashTableEntry(
|
||||
const T *table, size_t numBuckets, Hash id
|
||||
) {
|
||||
#if 0
|
||||
auto index = id % NB;
|
||||
#else
|
||||
auto index = id & (numBuckets - 1);
|
||||
#endif
|
||||
|
||||
do {
|
||||
auto entry = &table[index];
|
||||
index = entry->getChained();
|
||||
|
||||
if (entry->getHash() == id)
|
||||
return entry;
|
||||
} while (index);
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* CRC calculation */
|
||||
|
||||
uint8_t dsCRC8(const uint8_t *data, size_t length);
|
||||
|
@ -22,7 +22,7 @@
|
||||
#include "common/fs/file.hpp"
|
||||
#include "common/fs/iso9660.hpp"
|
||||
#include "common/fs/misc.hpp"
|
||||
#include "common/fs/zip.hpp"
|
||||
#include "common/fs/package.hpp"
|
||||
#include "common/storage/device.hpp"
|
||||
#include "common/util/log.hpp"
|
||||
#include "common/util/misc.hpp"
|
||||
@ -179,7 +179,7 @@ bool FileIOManager::loadResourceFile(const char *path) {
|
||||
if (path)
|
||||
_resourceFile = vfs.openFile(path, fs::READ);
|
||||
|
||||
// Fall back to the default in-memory resource archive in case of failure.
|
||||
// Fall back to the default in-memory resource package in case of failure.
|
||||
if (_resourceFile) {
|
||||
if (resource.init(_resourceFile))
|
||||
return true;
|
||||
|
@ -19,7 +19,7 @@
|
||||
#include <stddef.h>
|
||||
#include "common/fs/file.hpp"
|
||||
#include "common/fs/misc.hpp"
|
||||
#include "common/fs/zip.hpp"
|
||||
#include "common/fs/package.hpp"
|
||||
#include "common/storage/device.hpp"
|
||||
#include "common/util/log.hpp"
|
||||
#include "common/util/templates.hpp"
|
||||
@ -76,11 +76,11 @@ public:
|
||||
const void *resourcePtr;
|
||||
size_t resourceLength;
|
||||
|
||||
fs::ZIPProvider resource;
|
||||
fs::PackageProvider resource;
|
||||
#ifdef ENABLE_PCDRV
|
||||
fs::HostProvider host;
|
||||
fs::HostProvider host;
|
||||
#endif
|
||||
fs::VFSProvider vfs;
|
||||
fs::VFSProvider vfs;
|
||||
|
||||
storage::Device *ideDevices[2];
|
||||
fs::Provider *ideProviders[2];
|
||||
|
@ -45,7 +45,7 @@ bool cartDetectWorker(App &app) {
|
||||
|
||||
#ifdef ENABLE_DUMMY_CART_DRIVER
|
||||
if (!cart::dummyDriverDump.chipType)
|
||||
app._fileIO.resource.loadStruct(cart::dummyDriverDump, "data/test.573");
|
||||
app._fileIO.resource.loadStruct(cart::dummyDriverDump, "data/dummy.dmp");
|
||||
|
||||
if (cart::dummyDriverDump.chipType) {
|
||||
LOG_APP("using dummy cart driver");
|
||||
@ -214,11 +214,11 @@ bool cartDumpWorker(App &app) {
|
||||
app._cartParser->getRegion(region)
|
||||
) {
|
||||
snprintf(
|
||||
path, sizeof(path), EXTERNAL_DATA_DIR "/%s%s.573", code, region
|
||||
path, sizeof(path), EXTERNAL_DATA_DIR "/%s%s.dmp", code, region
|
||||
);
|
||||
} else {
|
||||
if (!app._getNumberedPath(
|
||||
path, sizeof(path), EXTERNAL_DATA_DIR "/cart%04d.573"
|
||||
path, sizeof(path), EXTERNAL_DATA_DIR "/cart%04d.dmp"
|
||||
))
|
||||
goto _error;
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ bool fileInitWorker(App &app) {
|
||||
app._fileIO.mountIDE();
|
||||
|
||||
app._workerStatus.update(2, 3, WSTR("App.fileInitWorker.loadResources"));
|
||||
if (app._fileIO.loadResourceFile(EXTERNAL_DATA_DIR "/resource.zip"))
|
||||
if (app._fileIO.loadResourceFile(EXTERNAL_DATA_DIR "/resource.pkg"))
|
||||
app._loadResources();
|
||||
|
||||
return true;
|
||||
|
@ -1,209 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# 573in1 - Copyright (C) 2022-2024 spicyjpeg
|
||||
#
|
||||
# 573in1 is free software: you can redistribute it and/or modify it under the
|
||||
# terms of the GNU General Public License as published by the Free Software
|
||||
# Foundation, either version 3 of the License, or (at your option) any later
|
||||
# version.
|
||||
#
|
||||
# 573in1 is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# 573in1. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
__version__ = "0.4.6"
|
||||
__author__ = "spicyjpeg"
|
||||
|
||||
import json
|
||||
from argparse import ArgumentParser, FileType, Namespace
|
||||
from pathlib import Path
|
||||
from typing import Any, ByteString
|
||||
from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile
|
||||
|
||||
import lz4.block
|
||||
from common.assets import *
|
||||
from PIL import Image
|
||||
|
||||
## Main
|
||||
|
||||
def createParser() -> ArgumentParser:
|
||||
parser = ArgumentParser(
|
||||
description = \
|
||||
"Parses a JSON file containing a list of resources to convert, "
|
||||
"generates the respective files and packs them into a ZIP archive.",
|
||||
add_help = False
|
||||
)
|
||||
|
||||
group = parser.add_argument_group("Tool options")
|
||||
group.add_argument(
|
||||
"-h", "--help",
|
||||
action = "help",
|
||||
help = "Show this help message and exit"
|
||||
)
|
||||
|
||||
group = parser.add_argument_group("Compression options")
|
||||
group.add_argument(
|
||||
"-c", "--compression",
|
||||
type = str,
|
||||
choices = ( "none", "deflate", "lz4" ),
|
||||
default = "deflate",
|
||||
help = "Set default compression algorithm (default DEFLATE)"
|
||||
)
|
||||
group.add_argument(
|
||||
"-l", "--compress-level",
|
||||
type = int,
|
||||
default = 9,
|
||||
help = "Set default DEFLATE and LZ4 compression level (default 9)",
|
||||
metavar = "0-9"
|
||||
)
|
||||
|
||||
group = parser.add_argument_group("File paths")
|
||||
group.add_argument(
|
||||
"-s", "--source-dir",
|
||||
type = Path,
|
||||
help = \
|
||||
"Set path to directory containing source files (same directory as "
|
||||
"resource list by default)",
|
||||
metavar = "dir"
|
||||
)
|
||||
group.add_argument(
|
||||
"configFile",
|
||||
type = FileType("rt", encoding = "utf-8"),
|
||||
help = "Path to JSON configuration file",
|
||||
)
|
||||
group.add_argument(
|
||||
"output",
|
||||
type = Path,
|
||||
help = "Path to ZIP file to generate"
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def main():
|
||||
parser: ArgumentParser = createParser()
|
||||
args: Namespace = parser.parse_args()
|
||||
|
||||
with args.configFile as file:
|
||||
configFile: dict[str, Any] = json.load(file)
|
||||
sourceDir: Path = \
|
||||
args.source_dir or Path(file.name).parent
|
||||
|
||||
assetList: list[dict[str, Any]] = configFile["resources"]
|
||||
|
||||
with ZipFile(args.output, "w", allowZip64 = False) as _zip:
|
||||
for asset in assetList:
|
||||
match asset.get("type", "file").strip():
|
||||
case "empty":
|
||||
data: ByteString = bytes(int(asset.get("size", 0)))
|
||||
|
||||
case "text":
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt", encoding = "utf-8"
|
||||
) as file:
|
||||
data: ByteString = file.read().encode("ascii")
|
||||
|
||||
case "binary":
|
||||
with open(sourceDir / asset["source"], "rb") as file:
|
||||
data: ByteString = file.read()
|
||||
|
||||
case "tim":
|
||||
ix: int = int(asset["imagePos"]["x"])
|
||||
iy: int = int(asset["imagePos"]["y"])
|
||||
cx: int = int(asset["clutPos"]["x"])
|
||||
cy: int = int(asset["clutPos"]["y"])
|
||||
|
||||
image: Image.Image = Image.open(sourceDir / asset["source"])
|
||||
image.load()
|
||||
|
||||
if image.mode != "P":
|
||||
image = image.quantize(
|
||||
int(asset.get("quantize", 16)), dither = Image.NONE
|
||||
)
|
||||
|
||||
data: ByteString = generateIndexedTIM(image, ix, iy, cx, cy)
|
||||
|
||||
case "metrics":
|
||||
if "metrics" in asset:
|
||||
metrics: dict = asset["metrics"]
|
||||
else:
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt",
|
||||
encoding = "utf-8"
|
||||
) as file:
|
||||
metrics: dict = json.load(file)
|
||||
|
||||
data: ByteString = generateFontMetrics(metrics)
|
||||
|
||||
case "palette":
|
||||
if "palette" in asset:
|
||||
palette: dict = asset["palette"]
|
||||
else:
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt",
|
||||
encoding = "utf-8"
|
||||
) as file:
|
||||
palette: dict = json.load(file)
|
||||
|
||||
data: ByteString = generateColorPalette(palette)
|
||||
|
||||
case "strings":
|
||||
if "strings" in asset:
|
||||
strings: dict = asset["strings"]
|
||||
else:
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt",
|
||||
encoding = "utf-8"
|
||||
) as file:
|
||||
strings: dict = json.load(file)
|
||||
|
||||
data: ByteString = generateStringTable(strings)
|
||||
|
||||
case "db":
|
||||
if "db" in asset:
|
||||
db: dict = asset["db"]
|
||||
else:
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt",
|
||||
encoding = "utf-8"
|
||||
) as file:
|
||||
db: dict = json.load(file)
|
||||
|
||||
# TODO: implement
|
||||
data: ByteString = b""
|
||||
|
||||
case _type:
|
||||
raise KeyError(f"unsupported asset type '{_type}'")
|
||||
|
||||
compressLevel: int | None = \
|
||||
asset.get("compressLevel", args.compress_level)
|
||||
|
||||
match asset.get("compression", args.compression).strip():
|
||||
case "none" | None:
|
||||
_zip.writestr(asset["name"], data, ZIP_STORED)
|
||||
|
||||
case "deflate":
|
||||
_zip.writestr(
|
||||
asset["name"], data, ZIP_DEFLATED, compressLevel
|
||||
)
|
||||
|
||||
case "lz4":
|
||||
# ZIP archives do not "officially" support LZ4 compression,
|
||||
# so the entry is stored as an uncompressed file.
|
||||
compressed: bytes = lz4.block.compress(
|
||||
data,
|
||||
mode = "high_compression",
|
||||
compression = compressLevel,
|
||||
store_size = False
|
||||
)
|
||||
|
||||
_zip.writestr(asset["name"], compressed, ZIP_STORED)
|
||||
|
||||
case _type:
|
||||
raise KeyError(f"unsupported compression type '{_type}'")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
216
tools/buildResourcePackage.py
Normal file
216
tools/buildResourcePackage.py
Normal file
@ -0,0 +1,216 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# 573in1 - Copyright (C) 2022-2024 spicyjpeg
|
||||
#
|
||||
# 573in1 is free software: you can redistribute it and/or modify it under the
|
||||
# terms of the GNU General Public License as published by the Free Software
|
||||
# Foundation, either version 3 of the License, or (at your option) any later
|
||||
# version.
|
||||
#
|
||||
# 573in1 is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# 573in1. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
__version__ = "1.0.3"
|
||||
__author__ = "spicyjpeg"
|
||||
|
||||
import json
|
||||
from argparse import ArgumentParser, FileType, Namespace
|
||||
from pathlib import Path
|
||||
from typing import Any, ByteString, Mapping
|
||||
|
||||
import lz4.block
|
||||
from common.assets import *
|
||||
from PIL import Image
|
||||
|
||||
## Asset conversion
|
||||
|
||||
def processAsset(asset: Mapping[str, Any], sourceDir: Path) -> ByteString:
|
||||
match asset.get("type", "file").strip():
|
||||
case "empty":
|
||||
return bytes(int(asset.get("size", 0)))
|
||||
|
||||
case "text" | "binary":
|
||||
with open(sourceDir / asset["source"], "rb") as file:
|
||||
data: ByteString = file.read()
|
||||
|
||||
return data
|
||||
|
||||
case "tim":
|
||||
ix: int = int(asset["imagePos"]["x"])
|
||||
iy: int = int(asset["imagePos"]["y"])
|
||||
cx: int = int(asset["clutPos"]["x"])
|
||||
cy: int = int(asset["clutPos"]["y"])
|
||||
|
||||
image: Image.Image = Image.open(sourceDir / asset["source"])
|
||||
image.load()
|
||||
|
||||
if image.mode != "P":
|
||||
image = image.quantize(
|
||||
int(asset.get("quantize", 16)), dither = Image.NONE
|
||||
)
|
||||
|
||||
return generateIndexedTIM(image, ix, iy, cx, cy)
|
||||
|
||||
case "metrics":
|
||||
if "metrics" in asset:
|
||||
metrics: dict = asset["metrics"]
|
||||
else:
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt",
|
||||
encoding = "utf-8"
|
||||
) as file:
|
||||
metrics: dict = json.load(file)
|
||||
|
||||
return generateFontMetrics(metrics)
|
||||
|
||||
case "palette":
|
||||
if "palette" in asset:
|
||||
palette: dict = asset["palette"]
|
||||
else:
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt",
|
||||
encoding = "utf-8"
|
||||
) as file:
|
||||
palette: dict = json.load(file)
|
||||
|
||||
return generateColorPalette(palette)
|
||||
|
||||
case "strings":
|
||||
if "strings" in asset:
|
||||
strings: dict = asset["strings"]
|
||||
else:
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt",
|
||||
encoding = "utf-8"
|
||||
) as file:
|
||||
strings: dict = json.load(file)
|
||||
|
||||
return generateStringTable(strings)
|
||||
|
||||
case "db":
|
||||
if "db" in asset:
|
||||
db: dict = asset["db"]
|
||||
else:
|
||||
with open(
|
||||
sourceDir / asset["source"], "rt",
|
||||
encoding = "utf-8"
|
||||
) as file:
|
||||
db: dict = json.load(file)
|
||||
|
||||
# TODO: implement
|
||||
return b""
|
||||
|
||||
case _type:
|
||||
raise KeyError(f"unsupported asset type '{_type}'")
|
||||
|
||||
## Main
|
||||
|
||||
def createParser() -> ArgumentParser:
|
||||
parser = ArgumentParser(
|
||||
description = \
|
||||
"Parses a JSON file containing a list of resources to convert, "
|
||||
"generates the respective files and packs them into a 573in1 "
|
||||
"resource package (.pkg file).",
|
||||
add_help = False
|
||||
)
|
||||
|
||||
group = parser.add_argument_group("Tool options")
|
||||
group.add_argument(
|
||||
"-h", "--help",
|
||||
action = "help",
|
||||
help = "Show this help message and exit"
|
||||
)
|
||||
|
||||
group = parser.add_argument_group("Package options")
|
||||
group.add_argument(
|
||||
"-a", "--align",
|
||||
type = int,
|
||||
default = 2048,
|
||||
help = \
|
||||
"Ensure all files in the package are aligned to specified sector "
|
||||
"size (default 2048)",
|
||||
metavar = "length"
|
||||
|
||||
)
|
||||
group.add_argument(
|
||||
"-c", "--compress-level",
|
||||
type = int,
|
||||
default = 9,
|
||||
help = \
|
||||
"Set default LZ4 compression level (0 to disable compression, "
|
||||
"default 9)",
|
||||
metavar = "0-9"
|
||||
)
|
||||
|
||||
group = parser.add_argument_group("File paths")
|
||||
group.add_argument(
|
||||
"-s", "--source-dir",
|
||||
type = Path,
|
||||
help = \
|
||||
"Set path to directory containing source files (same directory as "
|
||||
"resource list by default)",
|
||||
metavar = "dir"
|
||||
)
|
||||
group.add_argument(
|
||||
"configFile",
|
||||
type = FileType("rt", encoding = "utf-8"),
|
||||
help = "Path to JSON configuration file",
|
||||
)
|
||||
group.add_argument(
|
||||
"output",
|
||||
type = FileType("wb"),
|
||||
help = "Path to package file to generate"
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def main():
|
||||
parser: ArgumentParser = createParser()
|
||||
args: Namespace = parser.parse_args()
|
||||
|
||||
with args.configFile as file:
|
||||
configFile: dict[str, Any] = json.load(file)
|
||||
sourceDir: Path = \
|
||||
args.source_dir or Path(file.name).parent
|
||||
|
||||
entries: dict[str, PackageIndexEntry] = {}
|
||||
fileData: bytearray = bytearray()
|
||||
|
||||
for asset in configFile["resources"]:
|
||||
data: ByteString = processAsset(asset, sourceDir)
|
||||
entry: PackageIndexEntry = \
|
||||
PackageIndexEntry(len(fileData), 0, len(data))
|
||||
|
||||
compLevel: int | None = asset.get("compLevel", args.compress_level)
|
||||
|
||||
if data and compLevel:
|
||||
data = lz4.block.compress(
|
||||
data,
|
||||
mode = "high_compression",
|
||||
compression = compLevel,
|
||||
store_size = False
|
||||
)
|
||||
entry.compLength = len(data)
|
||||
|
||||
entries[asset["name"]] = entry
|
||||
fileData += data
|
||||
|
||||
while len(fileData) % args.align:
|
||||
fileData.append(0)
|
||||
|
||||
indexData: bytearray = generatePackageIndex(entries, args.align)
|
||||
|
||||
while len(indexData) % args.align:
|
||||
indexData.append(0)
|
||||
|
||||
with args.output as file:
|
||||
file.write(indexData)
|
||||
file.write(fileData)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -14,14 +14,16 @@
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# 573in1. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
from itertools import chain
|
||||
from struct import Struct
|
||||
from typing import Any, Generator, Mapping, Sequence
|
||||
from dataclasses import dataclass
|
||||
from struct import Struct
|
||||
from typing import Any, Generator, Mapping, Sequence
|
||||
|
||||
import numpy
|
||||
from numpy import ndarray
|
||||
from PIL import Image
|
||||
from .util import colorFromString, generateHashTable, hashData
|
||||
from .util import \
|
||||
HashTableBuilder, StringBlobBuilder, colorFromString, hashData, \
|
||||
roundUpToMultiple
|
||||
|
||||
## .TIM image converter
|
||||
|
||||
@ -130,17 +132,18 @@ def generateIndexedTIM(
|
||||
|
||||
## Font metrics generator
|
||||
|
||||
_METRICS_HEADER_STRUCT: Struct = Struct("< 3B b")
|
||||
_METRICS_HEADER_STRUCT: Struct = Struct("< 3B b 2H")
|
||||
_METRICS_ENTRY_STRUCT: Struct = Struct("< 2I")
|
||||
_METRICS_BUCKET_COUNT: int = 256
|
||||
|
||||
def generateFontMetrics(metrics: Mapping[str, Any]) -> bytearray:
|
||||
def generateFontMetrics(
|
||||
metrics: Mapping[str, Any], numBuckets: int = 256
|
||||
) -> bytearray:
|
||||
spaceWidth: int = int(metrics["spaceWidth"])
|
||||
tabWidth: int = int(metrics["tabWidth"])
|
||||
lineHeight: int = int(metrics["lineHeight"])
|
||||
baselineOffset: int = int(metrics["baselineOffset"])
|
||||
|
||||
entries: dict[int, int] = {}
|
||||
hashTable: HashTableBuilder = HashTableBuilder(numBuckets)
|
||||
|
||||
for ch, entry in metrics["characterSizes"].items():
|
||||
x: int = int(entry["x"])
|
||||
@ -156,7 +159,7 @@ def generateFontMetrics(metrics: Mapping[str, Any]) -> bytearray:
|
||||
if h > lineHeight:
|
||||
raise ValueError("character height exceeds line height")
|
||||
|
||||
entries[ord(ch)] = (0
|
||||
hashTable.addEntry(ord(ch), 0
|
||||
| (x << 0)
|
||||
| (y << 8)
|
||||
| (w << 16)
|
||||
@ -164,30 +167,26 @@ def generateFontMetrics(metrics: Mapping[str, Any]) -> bytearray:
|
||||
| (i << 30)
|
||||
)
|
||||
|
||||
buckets, chained = generateHashTable(entries, _METRICS_BUCKET_COUNT)
|
||||
table: bytearray = bytearray()
|
||||
|
||||
if (len(buckets) + len(chained)) > 2048:
|
||||
raise RuntimeError("font hash table must have <=2048 entries")
|
||||
|
||||
table += _METRICS_HEADER_STRUCT.pack(
|
||||
metrics: bytearray = bytearray()
|
||||
metrics += _METRICS_HEADER_STRUCT.pack(
|
||||
spaceWidth,
|
||||
tabWidth,
|
||||
lineHeight,
|
||||
baselineOffset
|
||||
baselineOffset,
|
||||
numBuckets,
|
||||
len(hashTable.entries)
|
||||
)
|
||||
|
||||
for entry in chain(buckets, chained):
|
||||
for entry in hashTable.entries:
|
||||
if entry is None:
|
||||
table += _METRICS_ENTRY_STRUCT.pack(0, 0)
|
||||
continue
|
||||
metrics += bytes(_METRICS_ENTRY_STRUCT.size)
|
||||
else:
|
||||
metrics += _METRICS_ENTRY_STRUCT.pack(
|
||||
entry.fullHash | (entry.chainIndex << 21),
|
||||
entry.data
|
||||
)
|
||||
|
||||
table += _METRICS_ENTRY_STRUCT.pack(
|
||||
entry.fullHash | (entry.chainIndex << 21),
|
||||
entry.data
|
||||
)
|
||||
|
||||
return table
|
||||
return metrics
|
||||
|
||||
## Color palette generator
|
||||
|
||||
@ -235,65 +234,108 @@ def generateColorPalette(
|
||||
|
||||
## String table generator
|
||||
|
||||
_STRING_TABLE_ENTRY_STRUCT: Struct = Struct("< I 2H")
|
||||
_STRING_TABLE_BUCKET_COUNT: int = 256
|
||||
_STRING_TABLE_ALIGNMENT: int = 4
|
||||
_STRING_TABLE_HEADER_STRUCT: Struct = Struct("< 2H")
|
||||
_STRING_TABLE_ENTRY_STRUCT: Struct = Struct("< I 2H")
|
||||
_STRING_TABLE_ALIGNMENT: int = 4
|
||||
|
||||
def _walkStringTree(
|
||||
strings: Mapping[str, Any], prefix: str = ""
|
||||
) -> Generator[tuple[int, bytes | None], None, None]:
|
||||
) -> Generator[tuple[int, bytes], None, None]:
|
||||
for key, value in strings.items():
|
||||
fullKey: str = prefix + key
|
||||
keyHash: int = hashData(fullKey.encode("ascii"))
|
||||
|
||||
if value is None:
|
||||
yield keyHash, None
|
||||
elif isinstance(value, str):
|
||||
yield keyHash, value.encode("utf-8")
|
||||
if isinstance(value, str):
|
||||
yield keyHash, value.encode("utf-8") + b"\0"
|
||||
else:
|
||||
yield from _walkStringTree(value, f"{fullKey}.")
|
||||
|
||||
def generateStringTable(strings: Mapping[str, Any]) -> bytearray:
|
||||
offsets: dict[bytes, int] = {}
|
||||
entries: dict[int, int] = {}
|
||||
blob: bytearray = bytearray()
|
||||
def generateStringTable(
|
||||
strings: Mapping[str, Any], numBuckets: int = 256
|
||||
) -> bytearray:
|
||||
hashTable: HashTableBuilder = HashTableBuilder(numBuckets)
|
||||
blob: StringBlobBuilder = StringBlobBuilder(_STRING_TABLE_ALIGNMENT)
|
||||
|
||||
for keyHash, string in _walkStringTree(strings):
|
||||
if string is None:
|
||||
entries[keyHash] = 0
|
||||
continue
|
||||
hashTable.addEntry(keyHash, blob.addString(string))
|
||||
|
||||
# Identical strings associated to multiple keys are deduplicated.
|
||||
offset: int | None = offsets.get(string, None)
|
||||
tableLength: int = 0 \
|
||||
+ _STRING_TABLE_HEADER_STRUCT.size \
|
||||
+ _STRING_TABLE_ENTRY_STRUCT.size * len(hashTable.entries)
|
||||
|
||||
if offset is None:
|
||||
offset = len(blob)
|
||||
offsets[string] = offset
|
||||
tableData: bytearray = bytearray()
|
||||
tableData += _STRING_TABLE_HEADER_STRUCT.pack(
|
||||
numBuckets, len(hashTable.entries)
|
||||
)
|
||||
|
||||
blob += string
|
||||
blob.append(0)
|
||||
|
||||
while len(blob) % _STRING_TABLE_ALIGNMENT:
|
||||
blob.append(0)
|
||||
|
||||
entries[keyHash] = offset
|
||||
|
||||
buckets, chained = generateHashTable(entries, _STRING_TABLE_BUCKET_COUNT)
|
||||
table: bytearray = bytearray()
|
||||
|
||||
# Relocate the offsets and serialize the table.
|
||||
blobOffset: int = \
|
||||
(len(buckets) + len(chained)) * _STRING_TABLE_ENTRY_STRUCT.size
|
||||
|
||||
for entry in chain(buckets, chained):
|
||||
for entry in hashTable.entries:
|
||||
if entry is None:
|
||||
table += _STRING_TABLE_ENTRY_STRUCT.pack(0, 0, 0)
|
||||
continue
|
||||
tableData += bytes(_STRING_TABLE_ENTRY_STRUCT.size)
|
||||
else:
|
||||
tableData += _STRING_TABLE_ENTRY_STRUCT.pack(
|
||||
entry.fullHash,
|
||||
tableLength + entry.data,
|
||||
entry.chainIndex
|
||||
)
|
||||
|
||||
table += _STRING_TABLE_ENTRY_STRUCT.pack(
|
||||
entry.fullHash,
|
||||
0 if (entry.data is None) else (blobOffset + entry.data),
|
||||
entry.chainIndex
|
||||
return tableData + blob.data
|
||||
|
||||
## Package header generator
|
||||
|
||||
_PACKAGE_INDEX_HEADER_STRUCT: Struct = Struct("< I 2H")
|
||||
_PACKAGE_INDEX_ENTRY_STRUCT: Struct = Struct("< I 2H Q 2I")
|
||||
_PACKAGE_STRING_ALIGNMENT: int = 4
|
||||
|
||||
@dataclass
|
||||
class PackageIndexEntry:
|
||||
offset: int
|
||||
compLength: int
|
||||
uncompLength: int
|
||||
nameOffset: int = 0
|
||||
|
||||
def generatePackageIndex(
|
||||
files: Mapping[str, PackageIndexEntry], alignment: int = 2048,
|
||||
numBuckets: int = 256
|
||||
) -> bytearray:
|
||||
hashTable: HashTableBuilder = HashTableBuilder(numBuckets)
|
||||
blob: StringBlobBuilder = StringBlobBuilder(_PACKAGE_STRING_ALIGNMENT)
|
||||
|
||||
for name, entry in files.items():
|
||||
nameString: bytes = name.encode("ascii")
|
||||
data: PackageIndexEntry = PackageIndexEntry(
|
||||
entry.offset,
|
||||
entry.compLength,
|
||||
entry.uncompLength,
|
||||
blob.addString(nameString + b"\0")
|
||||
)
|
||||
|
||||
return table + blob
|
||||
hashTable.addEntry(hashData(nameString), data)
|
||||
|
||||
tableLength: int = 0 \
|
||||
+ _PACKAGE_INDEX_HEADER_STRUCT.size \
|
||||
+ _PACKAGE_INDEX_ENTRY_STRUCT.size * len(hashTable.entries)
|
||||
indexLength: int = tableLength + len(blob.data)
|
||||
|
||||
tableData: bytearray = bytearray()
|
||||
tableData += _PACKAGE_INDEX_HEADER_STRUCT.pack(
|
||||
indexLength,
|
||||
numBuckets,
|
||||
len(hashTable.entries)
|
||||
)
|
||||
|
||||
fileDataOffset: int = roundUpToMultiple(indexLength, alignment)
|
||||
|
||||
for entry in hashTable.entries:
|
||||
if entry is None:
|
||||
tableData += bytes(_PACKAGE_INDEX_ENTRY_STRUCT.size)
|
||||
else:
|
||||
tableData += _PACKAGE_INDEX_ENTRY_STRUCT.pack(
|
||||
entry.fullHash,
|
||||
tableLength + entry.data.nameOffset,
|
||||
entry.chainIndex,
|
||||
fileDataOffset + entry.data.offset,
|
||||
entry.data.compLength,
|
||||
entry.data.uncompLength
|
||||
)
|
||||
|
||||
return tableData + blob.data
|
||||
|
@ -15,18 +15,23 @@
|
||||
# 573in1. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
import logging, re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from hashlib import md5
|
||||
from io import SEEK_END, SEEK_SET
|
||||
from typing import \
|
||||
Any, BinaryIO, ByteString, Generator, Iterable, Iterator, Mapping, \
|
||||
Sequence, TextIO
|
||||
Any, BinaryIO, ByteString, Iterable, Iterator, Sequence, TextIO
|
||||
|
||||
## Value manipulation
|
||||
|
||||
def roundUpToMultiple(value: int, length: int) -> int:
|
||||
diff: int = value % length
|
||||
|
||||
return (value - diff + length) if diff else value
|
||||
|
||||
def encodeSigned(value: int, bitLength: int) -> int:
|
||||
return value & (1 << bitLength)
|
||||
valueMask: int = (1 << bitLength) - 1
|
||||
|
||||
return value & valueMask
|
||||
|
||||
def decodeSigned(value: int, bitLength: int) -> int:
|
||||
signMask: int = 1 << (bitLength - 1)
|
||||
@ -153,35 +158,60 @@ class HashTableEntry:
|
||||
chainIndex: int
|
||||
data: Any
|
||||
|
||||
def generateHashTable(
|
||||
entries: Mapping[int, Any], numBuckets: int
|
||||
) -> tuple[list[HashTableEntry | None], list[HashTableEntry]]:
|
||||
chains: defaultdict[int, list[HashTableEntry]] = defaultdict(list)
|
||||
class HashTableBuilder:
|
||||
def __init__(self, numBuckets: int = 256):
|
||||
self._numBuckets: int = numBuckets
|
||||
|
||||
for fullHash, data in entries.items():
|
||||
entry: HashTableEntry = HashTableEntry(fullHash, 0, data)
|
||||
self.entries: list[HashTableEntry | None] = [ None ] * numBuckets
|
||||
|
||||
chains[fullHash % numBuckets].append(entry)
|
||||
def addEntry(self, fullHash: int, data: Any) -> int:
|
||||
index: int = fullHash % self._numBuckets
|
||||
|
||||
buckets: list[HashTableEntry | None] = []
|
||||
chained: list[HashTableEntry] = []
|
||||
entry: HashTableEntry = HashTableEntry(fullHash, 0, data)
|
||||
bucket: HashTableEntry | None = self.entries[index]
|
||||
|
||||
for shortHash in range(numBuckets):
|
||||
entries: list[HashTableEntry] = chains[shortHash]
|
||||
# If no bucket exists for the entry's index, create one.
|
||||
if bucket is None:
|
||||
self.entries[index] = entry
|
||||
return index
|
||||
if bucket.fullHash == fullHash:
|
||||
raise KeyError(f"collision detected, hash={fullHash:#08x}")
|
||||
|
||||
if not len(entries): # Empty bucket
|
||||
buckets.append(None)
|
||||
continue
|
||||
# Otherwise, follow the buckets's chain, find the last chained item and
|
||||
# link the new entry to it.
|
||||
while bucket.chainIndex:
|
||||
bucket = self.entries[bucket.chainIndex]
|
||||
|
||||
for index, entry in enumerate(entries):
|
||||
entry.chainIndex = numBuckets + len(chained) + index
|
||||
if bucket.fullHash == fullHash:
|
||||
raise KeyError(f"collision detected, hash={fullHash:#08x}")
|
||||
|
||||
entries[-1].chainIndex = 0 # Terminate chain
|
||||
bucket.chainIndex = len(self.entries)
|
||||
self.entries.append(entry)
|
||||
|
||||
buckets.append(entries[0])
|
||||
chained += entries[1:]
|
||||
return bucket.chainIndex
|
||||
|
||||
return buckets, chained
|
||||
class StringBlobBuilder:
|
||||
def __init__(self, alignment: int = 1):
|
||||
self._alignment: int = alignment
|
||||
self._offsets: dict[ByteString, int] = {}
|
||||
|
||||
self.data: bytearray = bytearray()
|
||||
|
||||
def addString(self, string: ByteString) -> int:
|
||||
# If the same string is already in the blob, return its offset without
|
||||
# adding new data.
|
||||
offset: int | None = self._offsets.get(string, None)
|
||||
|
||||
if offset is None:
|
||||
offset = len(self.data)
|
||||
|
||||
self._offsets[string] = offset
|
||||
self.data += string
|
||||
|
||||
while len(self.data) % self._alignment:
|
||||
self.data.append(0)
|
||||
|
||||
return offset
|
||||
|
||||
## Odd/even interleaved file reader
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user