Refactor tools, add card size selection, more fixes

This commit is contained in:
spicyjpeg 2024-04-20 07:36:39 +02:00
parent fad8aa11bc
commit 3f259377ce
No known key found for this signature in database
GPG Key ID: 5CC87404C01DF393
30 changed files with 2353 additions and 1410 deletions

View File

@ -57,14 +57,11 @@
"fileError": "The selected file could not be accessed or is not a valid System 573 executable. Make sure the file has been copied properly.\n\nFile: %s", "fileError": "The selected file could not be accessed or is not a valid System 573 executable. Make sure the file has been copied properly.\n\nFile: %s",
"addressError": "The selected file cannot be loaded as it overlaps the memory region reserved for use by the executable launcher.\n\nFile:\t\t%s\nRegion:\t%08X-%08X\nStack top:\t%08X" "addressError": "The selected file cannot be loaded as it overlaps the memory region reserved for use by the executable launcher.\n\nFile:\t\t%s\nRegion:\t%08X-%08X\nStack top:\t%08X"
}, },
"flashHeaderEraseWorker": {
"erase": "Erasing header...\nDo not turn off the 573.",
"flashError": "An error occurred while erasing and rewriting the first sector of the internal flash memory.\n\nError code: %s\nPress the Test button to view debug logs.",
"unsupported": "This system's onboard flash memory chips are not currently supported. See the documentation for more information on supported flash chips."
},
"flashHeaderWriteWorker": { "flashHeaderWriteWorker": {
"erase": "Erasing existing header...\nDo not turn off the 573.",
"write": "Writing new header...\nDo not turn off the 573.", "write": "Writing new header...\nDo not turn off the 573.",
"flashError": "An error occurred while erasing and rewriting the first sector of the internal flash memory.\n\nError code: %s\nPress the Test button to view debug logs." "flashError": "An error occurred while erasing and rewriting the first sector of the internal flash memory.\n\nError code: %s\nPress the Test button to view debug logs.",
"unsupported": "This system's onboard flash memory chips are not currently supported.\n\nSee the documentation for more information on supported flash chips."
}, },
"qrCodeWorker": { "qrCodeWorker": {
"compress": "Compressing cartridge dump...", "compress": "Compressing cartridge dump...",
@ -92,7 +89,7 @@
"erase": "Erasing device...\nDo not turn off the 573.", "erase": "Erasing device...\nDo not turn off the 573.",
"success": "The device has been successfully wiped.\n\nSectors erased: %d", "success": "The device has been successfully wiped.\n\nSectors erased: %d",
"flashError": "An error occurred while erasing sectors on one of the chips.\n\nError code:\t%s\nSectors erased:\t%d\nPress the Test button to view debug logs.", "flashError": "An error occurred while erasing sectors on one of the chips.\n\nError code:\t%s\nSectors erased:\t%d\nPress the Test button to view debug logs.",
"unsupported": "Erasing sectors on the flash memory chips used by this device is not currently supported. See the documentation for more information on supported flash chips." "unsupported": "The flash memory chips on this device are unresponsive to commands or are currently unsupported. If you are trying to erase a PCMCIA card with a write protect switch, make sure the switch is off.\n\nSee the documentation for more information on supported flash chips."
}, },
"romRestoreWorker": { "romRestoreWorker": {
"init": "Opening dump file...\nDo not turn off the 573 or unplug drives.", "init": "Opening dump file...\nDo not turn off the 573 or unplug drives.",
@ -101,7 +98,7 @@
"overflow": "The selected file was larger than the target device's capacity, so all data past the limit was ignored. All other data has been successfully restored.\n\nBytes written: %d", "overflow": "The selected file was larger than the target device's capacity, so all data past the limit was ignored. All other data has been successfully restored.\n\nBytes written: %d",
"fileError": "An error occurred while reading data from the file. Ensure the filesystem is not damaged.\n\nFile: %s\nPress the Test button to view debug logs.", "fileError": "An error occurred while reading data from the file. Ensure the filesystem is not damaged.\n\nFile: %s\nPress the Test button to view debug logs.",
"flashError": "An error occurred while erasing sectors on or writing data to one of the chips.\n\nError code:\t%s\nBytes written:\t%d\nPress the Test button to view debug logs.", "flashError": "An error occurred while erasing sectors on or writing data to one of the chips.\n\nError code:\t%s\nBytes written:\t%d\nPress the Test button to view debug logs.",
"unsupported": "Writing data to the flash memory chips used by this device is not currently supported. See the documentation for more information on supported flash chips." "unsupported": "The flash memory chips on this device are unresponsive to commands or are currently unsupported. If you are trying to write to a PCMCIA card with a write protect switch, make sure the switch is off.\n\nSee the documentation for more information on supported flash chips."
}, },
"atapiEjectWorker": { "atapiEjectWorker": {
"eject": "Sending eject command...", "eject": "Sending eject command...",
@ -126,6 +123,15 @@
"dmxCab": "Dance Maniax cabinet" "dmxCab": "Dance Maniax cabinet"
}, },
"CardSizeScreen": {
"title": "Select card size",
"body": "Select the size of the PCMCIA flash card currently inserted in the selected slot, as it cannot be detected automatically.",
"cancel": "Cancel",
"16": "16 MB",
"32": "32 MB",
"64": "64 MB"
},
"CartActionsScreen": { "CartActionsScreen": {
"title": "{CART_ICON} Cartridge options", "title": "{CART_ICON} Cartridge options",
"itemPrompt": "{RIGHT_ARROW} Press {START_BUTTON} to select, hold {LEFT_BUTTON}{RIGHT_BUTTON} + {START_BUTTON} to go back", "itemPrompt": "{RIGHT_ARROW} Press {START_BUTTON} to select, hold {LEFT_BUTTON}{RIGHT_BUTTON} + {START_BUTTON} to go back",
@ -481,6 +487,12 @@
"unsupported": "This game does not pair to I/O boards", "unsupported": "This game does not pair to I/O boards",
"thisSystem": "Paired to this system's I/O board", "thisSystem": "Paired to this system's I/O board",
"otherSystem": "Paired to another system's I/O board" "otherSystem": "Paired to another system's I/O board"
},
"description": {
"unidentified": "The flash header contains data for an unsupported game.\n",
"identified": "The game currently installed on the flash has been identified as:\n %s\n %s\n",
"noHeader": "The flash header is currently blank, but a boot executable is present. If you have just restored a flash dump, use the \"Edit internal flash header\" option to populate the header.\n",
"blank": "The flash header is currently blank and no boot executable is present. This likely means that the flash has been erased and is currently empty.\n"
} }
}, },
"pcmcia": { "pcmcia": {

0
data/flash.db Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@ -96,16 +96,17 @@
}, },
{ {
"type": "binary", "type": "binary",
"name": "data/x76f041.cartdb", "name": "data/x76f041.db",
"source": "${PROJECT_SOURCE_DIR}/data/x76f041.cartdb" "source": "${PROJECT_SOURCE_DIR}/data/x76f041.db"
},
{
"type": "empty",
"name": "data/x76f100.cartdb"
}, },
{ {
"type": "binary", "type": "binary",
"name": "data/zs01.cartdb", "name": "data/zs01.db",
"source": "${PROJECT_SOURCE_DIR}/data/zs01.cartdb" "source": "${PROJECT_SOURCE_DIR}/data/zs01.db"
},
{
"type": "binary",
"name": "data/flash.db",
"source": "${PROJECT_SOURCE_DIR}/data/flash.db"
} }
] ]

View File

@ -6,7 +6,6 @@
#include "common/util.hpp" #include "common/util.hpp"
#include "ps1/registers.h" #include "ps1/registers.h"
#include "ps1/registers573.h" #include "ps1/registers573.h"
#include "ps1/system.h"
namespace rom { namespace rom {
@ -304,8 +303,8 @@ const FlashRegion pcmcia[2]{
/* Data common to all chip drivers */ /* Data common to all chip drivers */
static constexpr int _FLASH_WRITE_TIMEOUT = 10000; static constexpr int _FLASH_WRITE_TIMEOUT = 100000;
static constexpr int _FLASH_ERASE_TIMEOUT = 10000000; static constexpr int _FLASH_ERASE_TIMEOUT = 20000000;
const char *const DRIVER_ERROR_NAMES[]{ const char *const DRIVER_ERROR_NAMES[]{
"NO_ERROR", "NO_ERROR",
@ -345,8 +344,9 @@ static const ChipSize _RTC_CHIP_SIZE{
}; };
void RTCDriver::write(uint32_t offset, uint16_t value) { void RTCDriver::write(uint32_t offset, uint16_t value) {
auto ptr = reinterpret_cast<volatile uint32_t *>(_region.ptr + offset * 2); auto ptr = reinterpret_cast<volatile uint16_t *>(_region.ptr + offset * 2);
*ptr = (value & 0x00ff) | ((value & 0xff00) << 8); ptr[0] = value & 0xff;
ptr[1] = value >> 8;
} }
void RTCDriver::eraseSector(uint32_t offset) { void RTCDriver::eraseSector(uint32_t offset) {
@ -360,13 +360,14 @@ void RTCDriver::eraseChip(uint32_t offset) {
} }
DriverError RTCDriver::flushWrite(uint32_t offset, uint16_t value) { DriverError RTCDriver::flushWrite(uint32_t offset, uint16_t value) {
auto ptr = reinterpret_cast<volatile uint32_t *>(_region.ptr + offset * 2); auto ptr = reinterpret_cast<volatile uint16_t *>(_region.ptr + offset * 2);
value = (value & 0x00ff) | ((value & 0xff00) << 8);
if (ptr[offset] != value) { uint16_t actualValue = (ptr[0] & 0xff) | ((ptr[1] & 0xff) << 8);
if (value != actualValue) {
LOG( LOG(
"mismatch @ 0x%08x, exp=0x%02x, got=0x%02x", offset, value, "mismatch @ 0x%08x, exp=0x%02x, got=0x%04x", offset, value,
ptr[offset] actualValue
); );
return VERIFY_MISMATCH; return VERIFY_MISMATCH;
} }
@ -384,12 +385,12 @@ const ChipSize &RTCDriver::getChipSize(void) const {
/* AMD AM29F016/017 (Fujitsu MBM29F016A/017A) driver */ /* AMD AM29F016/017 (Fujitsu MBM29F016A/017A) driver */
enum FujitsuStatusFlag : uint16_t { enum JEDECStatusFlag : uint16_t {
_FUJITSU_STATUS_ERASE_TOGGLE = 1 << 2, _JEDEC_STATUS_ERASE_TOGGLE = 1 << 2,
_FUJITSU_STATUS_ERASE_START = 1 << 3, _JEDEC_STATUS_ERASE_START = 1 << 3,
_FUJITSU_STATUS_ERROR = 1 << 5, _JEDEC_STATUS_ERROR = 1 << 5,
_FUJITSU_STATUS_TOGGLE = 1 << 6, _JEDEC_STATUS_TOGGLE = 1 << 6,
_FUJITSU_STATUS_POLL_BIT = 1 << 7 _JEDEC_STATUS_POLL_BIT = 1 << 7
}; };
DriverError AM29F016Driver::_flush( DriverError AM29F016Driver::_flush(
@ -402,40 +403,31 @@ DriverError AM29F016Driver::_flush(
uint8_t status, diff; uint8_t status, diff;
for (; timeout > 0; timeout -= 10) { for (; timeout > 0; timeout--) {
status = (*ptr >> shift) & 0xff; status = (*ptr >> shift) & 0xff;
diff = status ^ byte; diff = status ^ byte;
// Some chips seem to flip the poll bit slightly before returning the if (!(diff & _JEDEC_STATUS_POLL_BIT))
// newly written byte.
if (!diff)
return NO_ERROR; return NO_ERROR;
if (!(diff & _FUJITSU_STATUS_POLL_BIT)) if (status & _JEDEC_STATUS_ERROR)
continue; break;
}
if (status & _FUJITSU_STATUS_ERROR) { // If the error flag was set, make sure an error actually occurred.
status = (*ptr >> shift) & 0xff;
diff = status ^ byte;
if (!(diff & _JEDEC_STATUS_POLL_BIT))
return NO_ERROR;
*ptr = _JEDEC_RESET;
if (status & _JEDEC_STATUS_ERROR) {
LOG("error @ 0x%08x, stat=0x%02x", offset, status); LOG("error @ 0x%08x, stat=0x%02x", offset, status);
*ptr = _JEDEC_RESET;
return CHIP_ERROR; return CHIP_ERROR;
}
delayMicroseconds(10);
}
if (diff & _FUJITSU_STATUS_POLL_BIT) {
LOG("timeout @ 0x%08x, stat=0x%02x", offset, status);
*ptr = _JEDEC_RESET;
return CHIP_TIMEOUT;
} else { } else {
LOG( LOG("timeout @ 0x%08x, stat=0x%02x", offset, status);
"mismatch @ 0x%08x, exp=0x%02x, got=0x%02x", offset, byte, return CHIP_TIMEOUT;
status
);
*ptr = _JEDEC_RESET;
return VERIFY_MISMATCH;
} }
} }
@ -566,32 +558,34 @@ DriverError Intel28F016S5Driver::_flush(uint32_t offset, int timeout) {
// reading mode. // reading mode.
//*ptr = _INTEL_GET_STATUS; //*ptr = _INTEL_GET_STATUS;
for (; timeout > 0; timeout -= 10) { for (; timeout > 0; timeout--) {
status = (*ptr >> shift) & 0xff; status = (*ptr >> shift) & 0xff;
if (status & (_INTEL_STATUS_DPS | _INTEL_STATUS_VPPS)) { if (!(status & _INTEL_STATUS_WSMS))
LOG("locked @ 0x%08x, stat=0x%02x", offset, status); continue;
*ptr = _INTEL_RESET;
// The datasheet suggests only checking the error flags after WSMS = 1.
if (status & (_INTEL_STATUS_DPS | _INTEL_STATUS_VPPS)) {
*ptr = _INTEL_CLEAR_STATUS; *ptr = _INTEL_CLEAR_STATUS;
LOG("locked @ 0x%08x, stat=0x%02x", offset, status);
return WRITE_PROTECTED; return WRITE_PROTECTED;
} }
if (status & (_INTEL_STATUS_BWSLBS | _INTEL_STATUS_ECLBS)) { if (status & (_INTEL_STATUS_BWSLBS | _INTEL_STATUS_ECLBS)) {
LOG("error @ 0x%08x, stat=0x%02x", offset, status);
*ptr = _INTEL_CLEAR_STATUS; *ptr = _INTEL_CLEAR_STATUS;
LOG("error @ 0x%08x, stat=0x%02x", offset, status);
return CHIP_ERROR; return CHIP_ERROR;
} }
if (status & _INTEL_STATUS_WSMS) {
*ptr = _INTEL_CLEAR_STATUS;
return NO_ERROR; return NO_ERROR;
} }
delayMicroseconds(10); *ptr = _INTEL_RESET;
}
LOG("timeout @ 0x%08x, stat=0x%02x", offset, status); LOG("timeout @ 0x%08x, stat=0x%02x", offset, status);
*ptr = _INTEL_CLEAR_STATUS;
return CHIP_TIMEOUT; return CHIP_TIMEOUT;
} }

View File

@ -70,6 +70,7 @@ class App {
friend class HexdumpScreen; friend class HexdumpScreen;
friend class ReflashGameScreen; friend class ReflashGameScreen;
friend class SystemIDEntryScreen; friend class SystemIDEntryScreen;
friend class CardSizeScreen;
friend class ChecksumScreen; friend class ChecksumScreen;
private: private:
@ -93,6 +94,7 @@ private:
HexdumpScreen _hexdumpScreen; HexdumpScreen _hexdumpScreen;
ReflashGameScreen _reflashGameScreen; ReflashGameScreen _reflashGameScreen;
SystemIDEntryScreen _systemIDEntryScreen; SystemIDEntryScreen _systemIDEntryScreen;
CardSizeScreen _cardSizeScreen;
ChecksumScreen _checksumScreen; ChecksumScreen _checksumScreen;
#ifdef ENABLE_LOG_BUFFER #ifdef ENABLE_LOG_BUFFER
@ -142,7 +144,6 @@ private:
bool _romRestoreWorker(void); bool _romRestoreWorker(void);
bool _romEraseWorker(void); bool _romEraseWorker(void);
bool _flashHeaderWriteWorker(void); bool _flashHeaderWriteWorker(void);
bool _flashHeaderEraseWorker(void);
// miscworkers.cpp // miscworkers.cpp
bool _startupWorker(void); bool _startupWorker(void);

View File

@ -265,10 +265,7 @@ const char *UnlockKeyScreen::_getItemName(ui::Context &ctx, int index) const {
} }
void UnlockKeyScreen::autoUnlock(ui::Context &ctx) { void UnlockKeyScreen::autoUnlock(ui::Context &ctx) {
__builtin_memcpy( APP->_cartDump.copyKeyFrom(APP->_identified->dataKey);
APP->_cartDump.dataKey, APP->_identified->dataKey,
sizeof(APP->_cartDump.dataKey)
);
//APP->_selectedEntry = APP->_identified; //APP->_selectedEntry = APP->_identified;
APP->_selectedEntry = nullptr; APP->_selectedEntry = nullptr;
@ -330,10 +327,7 @@ void UnlockKeyScreen::update(ui::Context &ctx) {
if (index < 0) { if (index < 0) {
(this->*_SPECIAL_ENTRIES[-index].target)(ctx); (this->*_SPECIAL_ENTRIES[-index].target)(ctx);
} else { } else {
__builtin_memcpy( dump.copyKeyFrom(APP->_cartDB.get(index)->dataKey);
dump.dataKey, APP->_cartDB.get(index)->dataKey,
sizeof(dump.dataKey)
);
APP->_selectedEntry = APP->_cartDB.get(index); APP->_selectedEntry = APP->_cartDB.get(index);
ctx.show(APP->_confirmScreen, false, true); ctx.show(APP->_confirmScreen, false, true);
@ -376,7 +370,7 @@ void KeyEntryScreen::update(ui::Context &ctx) {
STRH(_UNLOCK_WARNINGS[dump.chipType]) STRH(_UNLOCK_WARNINGS[dump.chipType])
); );
__builtin_memcpy(dump.dataKey, _buffer, sizeof(dump.dataKey)); dump.copyKeyFrom(_buffer);
ctx.show(APP->_confirmScreen, false, true); ctx.show(APP->_confirmScreen, false, true);
} }
} }

View File

@ -13,9 +13,9 @@
static const char *const _CARTDB_PATHS[cart::NUM_CHIP_TYPES]{ static const char *const _CARTDB_PATHS[cart::NUM_CHIP_TYPES]{
nullptr, nullptr,
"data/x76f041.cartdb", "data/x76f041.db",
"data/x76f100.cartdb", "data/x76f100.db",
"data/zs01.cartdb" "data/zs01.db"
}; };
bool App::_cartDetectWorker(void) { bool App::_cartDetectWorker(void) {

View File

@ -264,7 +264,8 @@ void StorageActionsScreen::resetFlashHeader(ui::Context &ctx) {
APP->_confirmScreen.setMessage( APP->_confirmScreen.setMessage(
*this, *this,
[](ui::Context &ctx) { [](ui::Context &ctx) {
APP->_setupWorker(&App::_flashHeaderEraseWorker); APP->_romHeaderDump.clearData();
APP->_setupWorker(&App::_flashHeaderWriteWorker);
ctx.show(APP->_workerStatusScreen, false, true); ctx.show(APP->_workerStatusScreen, false, true);
}, },
STR("StorageActionsScreen.resetFlashHeader.confirm") STR("StorageActionsScreen.resetFlashHeader.confirm")
@ -302,8 +303,16 @@ void StorageActionsScreen::update(ui::Context &ctx) {
ctx.show(APP->_storageInfoScreen, true, true); ctx.show(APP->_storageInfoScreen, true, true);
} else { } else {
if (action.region.isPresent()) { if (action.region.isPresent()) {
this->_selectedRegion = &(action.region); this->selectedRegion = &(action.region);
if (action.region.regionLength > 0x1000000) {
APP->_cardSizeScreen.callback = action.target;
ctx.show(APP->_cardSizeScreen, false, true);
} else {
APP->_cardSizeScreen.selectedLength =
action.region.regionLength;
(this->*action.target)(ctx); (this->*action.target)(ctx);
}
} else { } else {
APP->_messageScreen.setMessage( APP->_messageScreen.setMessage(
MESSAGE_ERROR, *this, STR("StorageActionsScreen.cardError") MESSAGE_ERROR, *this, STR("StorageActionsScreen.cardError")
@ -315,6 +324,32 @@ void StorageActionsScreen::update(ui::Context &ctx) {
} }
} }
void CardSizeScreen::show(ui::Context &ctx, bool goBack) {
_title = STR("CardSizeScreen.title");
_body = STR("CardSizeScreen.body");
_buttons[0] = STR("CardSizeScreen.16");
_buttons[1] = STR("CardSizeScreen.32");
_buttons[2] = STR("CardSizeScreen.64");
_buttons[3] = STR("CardSizeScreen.cancel");
_numButtons = 4;
MessageBoxScreen::show(ctx, goBack);
}
void CardSizeScreen::update(ui::Context &ctx) {
MessageBoxScreen::update(ctx);
if (ctx.buttons.pressed(ui::BTN_START)) {
if (_activeButton == 3) {
ctx.show(APP->_storageActionsScreen, true, true);
} else {
selectedLength = 0x1000000 << _activeButton;
(APP->_storageActionsScreen.*callback)(ctx);
}
}
}
void ChecksumScreen::show(ui::Context &ctx, bool goBack) { void ChecksumScreen::show(ui::Context &ctx, bool goBack) {
_title = STR("ChecksumScreen.title"); _title = STR("ChecksumScreen.title");
_body = _bodyText; _body = _bodyText;

View File

@ -1,6 +1,7 @@
#pragma once #pragma once
#include <stddef.h>
#include <stdint.h> #include <stdint.h>
#include "common/rom.hpp" #include "common/rom.hpp"
#include "main/uibase.hpp" #include "main/uibase.hpp"
@ -18,16 +19,11 @@ public:
}; };
class StorageActionsScreen : public ui::ListScreen { class StorageActionsScreen : public ui::ListScreen {
private:
const rom::Region *_selectedRegion;
protected: protected:
const char *_getItemName(ui::Context &ctx, int index) const; const char *_getItemName(ui::Context &ctx, int index) const;
public: public:
inline const rom::Region &getSelectedRegion(void) { const rom::Region *selectedRegion;
return *_selectedRegion;
}
void checksum(ui::Context &ctx); void checksum(ui::Context &ctx);
void dump(ui::Context &ctx); void dump(ui::Context &ctx);
@ -41,6 +37,15 @@ public:
void update(ui::Context &ctx); void update(ui::Context &ctx);
}; };
class CardSizeScreen : public ui::MessageBoxScreen {
public:
size_t selectedLength;
void (StorageActionsScreen::*callback)(ui::Context &ctx);
void show(ui::Context &ctx, bool goBack = false);
void update(ui::Context &ctx);
};
// The CRCs have to be wrapped into their own structure in order to allow usage // The CRCs have to be wrapped into their own structure in order to allow usage
// of offsetof(). // of offsetof().
struct ChecksumValues { struct ChecksumValues {

View File

@ -199,7 +199,8 @@ bool App::_romRestoreWorker(void) {
const char *path = _filePickerScreen.selectedPath; const char *path = _filePickerScreen.selectedPath;
auto _file = _fileProvider.openFile(path, file::READ); auto _file = _fileProvider.openFile(path, file::READ);
auto &region = _storageActionsScreen.getSelectedRegion(); auto region = _storageActionsScreen.selectedRegion;
auto regionLength = _cardSizeScreen.selectedLength;
if (!_file) if (!_file)
goto _fileError; goto _fileError;
@ -209,12 +210,12 @@ bool App::_romRestoreWorker(void) {
size_t fileLength, dataLength; size_t fileLength, dataLength;
fileLength = size_t(_file->length); fileLength = size_t(_file->length);
dataLength = util::min(fileLength, region.regionLength); dataLength = util::min(fileLength, regionLength);
rom::Driver *driver; rom::Driver *driver;
size_t sectorLength, numSectors; size_t sectorLength, numSectors;
driver = region.newDriver(); driver = region->newDriver();
sectorLength = driver->getChipSize().eraseSectorLength; sectorLength = driver->getChipSize().eraseSectorLength;
numSectors = (dataLength + sectorLength - 1) / sectorLength; numSectors = (dataLength + sectorLength - 1) / sectorLength;
@ -290,8 +291,9 @@ _flashError:
} }
bool App::_romEraseWorker(void) { bool App::_romEraseWorker(void) {
auto &region = _storageActionsScreen.getSelectedRegion(); auto region = _storageActionsScreen.selectedRegion;
auto driver = region.newDriver(); auto regionLength = _cardSizeScreen.selectedLength;
auto driver = region->newDriver();
size_t chipLength = driver->getChipSize().chipLength; size_t chipLength = driver->getChipSize().chipLength;
size_t sectorLength = driver->getChipSize().eraseSectorLength; size_t sectorLength = driver->getChipSize().eraseSectorLength;
@ -308,13 +310,10 @@ bool App::_romEraseWorker(void) {
for (size_t i = 0; i < chipLength; i += sectorLength) { for (size_t i = 0; i < chipLength; i += sectorLength) {
_workerStatus.update(i, chipLength, WSTR("App.romEraseWorker.erase")); _workerStatus.update(i, chipLength, WSTR("App.romEraseWorker.erase"));
for (size_t j = 0; j < region.regionLength; j += chipLength) for (size_t j = 0; j < regionLength; j += chipLength)
driver->eraseSector(i + j); driver->eraseSector(i + j);
for ( for (size_t j = 0; j < regionLength; j += chipLength, sectorsErased++) {
size_t j = 0; j < region.regionLength; j += chipLength,
sectorsErased++
) {
error = driver->flushErase(i + j); error = driver->flushErase(i + j);
if (error) if (error)
@ -354,35 +353,6 @@ _unsupported:
} }
bool App::_flashHeaderWriteWorker(void) { bool App::_flashHeaderWriteWorker(void) {
if (!_flashHeaderEraseWorker())
return false;
auto driver = rom::flash.newDriver();
_workerStatus.update(1, 2, WSTR("App.flashHeaderWriteWorker.write"));
rom::DriverError error;
// TODO: implement
delete driver;
_workerStatus.setNextScreen(_storageInfoScreen);
return true;
_flashError:
delete driver;
_messageScreen.setMessage(
MESSAGE_ERROR, _storageInfoScreen,
WSTR("App.flashHeaderWriteWorker.flashError"),
rom::getErrorString(error)
);
_workerStatus.setNextScreen(_messageScreen);
return false;
}
bool App::_flashHeaderEraseWorker(void) {
auto driver = rom::flash.newDriver(); auto driver = rom::flash.newDriver();
size_t sectorLength = driver->getChipSize().eraseSectorLength; size_t sectorLength = driver->getChipSize().eraseSectorLength;
@ -392,7 +362,7 @@ bool App::_flashHeaderEraseWorker(void) {
goto _unsupported; goto _unsupported;
_checksumScreen.valid = false; _checksumScreen.valid = false;
_workerStatus.update(0, 1, WSTR("App.flashHeaderEraseWorker.erase")); _workerStatus.update(0, 2, WSTR("App.flashHeaderWriteWorker.erase"));
// The flash can only be erased with sector granularity, so all data in the // The flash can only be erased with sector granularity, so all data in the
// first sector other than the header must be backed up and rewritten. // first sector other than the header must be backed up and rewritten.
@ -412,6 +382,27 @@ bool App::_flashHeaderEraseWorker(void) {
if (error) if (error)
goto _flashError; goto _flashError;
_workerStatus.update(1, 2, WSTR("App.flashHeaderWriteWorker.write"));
// Write the new header (if any).
if (!_romHeaderDump.isDataEmpty()) {
ptr = reinterpret_cast<const uint16_t *>(_romHeaderDump.data);
for (
uint32_t offset = rom::FLASH_HEADER_OFFSET;
offset < rom::FLASH_CRC_OFFSET; offset += 2
) {
auto value = *(ptr++);
driver->write(offset, value);
error = driver->flushWrite(offset, value);
if (error)
goto _flashError;
}
}
// Restore the rest of the sector that was erased.
ptr = reinterpret_cast<const uint16_t *>(&buffer[rom::FLASH_CRC_OFFSET]); ptr = reinterpret_cast<const uint16_t *>(&buffer[rom::FLASH_CRC_OFFSET]);
for ( for (
@ -439,7 +430,7 @@ _flashError:
_messageScreen.setMessage( _messageScreen.setMessage(
MESSAGE_ERROR, _storageInfoScreen, MESSAGE_ERROR, _storageInfoScreen,
WSTR("App.flashHeaderEraseWorker.flashError"), WSTR("App.flashHeaderWriteWorker.flashError"),
rom::getErrorString(error) rom::getErrorString(error)
); );
_workerStatus.setNextScreen(_messageScreen); _workerStatus.setNextScreen(_messageScreen);
@ -450,7 +441,7 @@ _unsupported:
_messageScreen.setMessage( _messageScreen.setMessage(
MESSAGE_ERROR, _storageInfoScreen, MESSAGE_ERROR, _storageInfoScreen,
WSTR("App.flashHeaderEraseWorker.unsupported") WSTR("App.flashHeaderWriteWorker.unsupported")
); );
_workerStatus.setNextScreen(_messageScreen); _workerStatus.setNextScreen(_messageScreen);
return false; return false;

View File

@ -152,7 +152,11 @@ size_t CartDump::toQRString(char *output) const {
bool ROMHeaderDump::isDataEmpty(void) const { bool ROMHeaderDump::isDataEmpty(void) const {
auto sum = util::sum(data, sizeof(data)); auto sum = util::sum(data, sizeof(data));
#if 0
return (!sum || (sum == (0xff * sizeof(data)))); return (!sum || (sum == (0xff * sizeof(data))));
#else
return (sum == (0xff * sizeof(data)));
#endif
} }
} }

View File

@ -17,7 +17,7 @@ enum ChipType : uint8_t {
ZS01 = 3 ZS01 = 3
}; };
enum CartDumpFlag : uint8_t { enum DumpFlag : uint8_t {
DUMP_HAS_SYSTEM_ID = 1 << 0, DUMP_HAS_SYSTEM_ID = 1 << 0,
DUMP_HAS_CART_ID = 1 << 1, DUMP_HAS_CART_ID = 1 << 1,
DUMP_CONFIG_OK = 1 << 2, DUMP_CONFIG_OK = 1 << 2,
@ -65,7 +65,8 @@ public:
/* Cartridge dump structure */ /* Cartridge dump structure */
static constexpr uint16_t DUMP_HEADER_MAGIC = 0x573d; static constexpr uint16_t CART_DUMP_HEADER_MAGIC = 0x573d;
static constexpr uint16_t ROM_HEADER_DUMP_HEADER_MAGIC = 0x573e;
struct ChipSize { struct ChipSize {
public: public:
@ -86,14 +87,14 @@ public:
uint8_t data[512]; uint8_t data[512];
inline CartDump(void) inline CartDump(void)
: magic(DUMP_HEADER_MAGIC), chipType(NONE), flags(0) {} : magic(CART_DUMP_HEADER_MAGIC), chipType(NONE), flags(0) {}
inline const ChipSize &getChipSize(void) const { inline const ChipSize &getChipSize(void) const {
return CHIP_SIZES[chipType]; return CHIP_SIZES[chipType];
} }
inline bool validateMagic(void) const { inline bool validateMagic(void) const {
return return
(magic == DUMP_HEADER_MAGIC) && (magic == CART_DUMP_HEADER_MAGIC) &&
(chipType > 0) && (chipType > 0) &&
(chipType < NUM_CHIP_TYPES); (chipType < NUM_CHIP_TYPES);
} }
@ -144,10 +145,23 @@ public:
class [[gnu::packed]] ROMHeaderDump { class [[gnu::packed]] ROMHeaderDump {
public: public:
uint16_t magic;
uint8_t _reserved, flags;
Identifier systemID; Identifier systemID;
uint8_t data[rom::FLASH_CRC_OFFSET - rom::FLASH_HEADER_OFFSET]; uint8_t data[rom::FLASH_CRC_OFFSET - rom::FLASH_HEADER_OFFSET];
inline ROMHeaderDump(void)
: magic(ROM_HEADER_DUMP_HEADER_MAGIC), _reserved(0), flags(0) {}
inline bool validateMagic(void) const {
return (magic == ROM_HEADER_DUMP_HEADER_MAGIC);
}
inline void clearData(void) {
__builtin_memset(data, 0xff, sizeof(data));
}
bool isDataEmpty(void) const; bool isDataEmpty(void) const;
}; };

View File

@ -339,7 +339,7 @@ bool ExtendedCartParser::validate(void) {
// Used alongside the system ID and the header itself to calculate the MD5 used // Used alongside the system ID and the header itself to calculate the MD5 used
// as a header signature. Seems to be the same in all games. // as a header signature. Seems to be the same in all games.
static const uint8_t _EXTENDED_HEADER_SIGNATURE_SALT[]{ static const uint8_t _SIGNATURE_SALT[]{
0xc1, 0xa2, 0x03, 0xd6, 0xab, 0x70, 0x85, 0x5e 0xc1, 0xa2, 0x03, 0xd6, 0xab, 0x70, 0x85, 0x5e
}; };
@ -366,9 +366,7 @@ void ExtendedROMHeaderParser::_calculateSignature(uint8_t *output) const {
md5.update( md5.update(
reinterpret_cast<const uint8_t *>(_getHeader()), sizeof(ExtendedHeader) reinterpret_cast<const uint8_t *>(_getHeader()), sizeof(ExtendedHeader)
); );
md5.update( md5.update(_SIGNATURE_SALT, sizeof(_SIGNATURE_SALT));
_EXTENDED_HEADER_SIGNATURE_SALT, sizeof(_EXTENDED_HEADER_SIGNATURE_SALT)
);
md5.digest(buffer); md5.digest(buffer);
for (int i = 0; i < 8; i++) for (int i = 0; i < 8; i++)
@ -549,6 +547,7 @@ static const KnownFormat _KNOWN_ROM_HEADER_FORMATS[]{
.name = "extended + MD5", .name = "extended + MD5",
.format = EXTENDED, .format = EXTENDED,
.flags = DATA_HAS_CODE_PREFIX | DATA_HAS_SYSTEM_ID .flags = DATA_HAS_CODE_PREFIX | DATA_HAS_SYSTEM_ID
| DATA_CHECKSUM_INVERTED
} }
}; };

View File

@ -297,7 +297,7 @@ public:
class [[gnu::packed]] ROMHeaderDBEntry { class [[gnu::packed]] ROMHeaderDBEntry {
public: public:
// TODO: define these flags FormatType formatType;
uint8_t flags; uint8_t flags;
uint16_t year; uint16_t year;

View File

@ -20,14 +20,14 @@ private:
util::Tween<int, util::QuadOutEasing> _buttonAnim; util::Tween<int, util::QuadOutEasing> _buttonAnim;
inline int _getButtonWidth(void) const { inline int _getButtonWidth(void) const {
return ((_width / 4) * 3) / _numButtons - BUTTON_SPACING; return ((_width / 5) * 4) / _numButtons - BUTTON_SPACING;
} }
protected: protected:
int _numButtons, _activeButton, _buttonIndexOffset; int _numButtons, _activeButton, _buttonIndexOffset;
bool _locked; bool _locked;
const char *_buttons[3]; const char *_buttons[4];
public: public:
MessageBoxScreen(void); MessageBoxScreen(void);

View File

@ -1,420 +0,0 @@
# -*- coding: utf-8 -*-
__version__ = "0.3.1"
__author__ = "spicyjpeg"
import re
from dataclasses import dataclass
from enum import IntEnum, IntFlag
from struct import Struct, unpack
from typing import Any, Iterable, Iterator, Mapping, Sequence
## Definitions
class ChipType(IntEnum):
NONE = 0
X76F041 = 1
X76F100 = 2
ZS01 = 3
class FormatType(IntEnum):
BLANK = 0
SIMPLE = 1
BASIC = 2
EXTENDED = 3
class TraceIDType(IntEnum):
TID_NONE = 0
TID_81 = 1
TID_82_BIG_ENDIAN = 2
TID_82_LITTLE_ENDIAN = 3
class DumpFlag(IntFlag):
DUMP_HAS_SYSTEM_ID = 1 << 0
DUMP_HAS_CART_ID = 1 << 1
DUMP_CONFIG_OK = 1 << 2
DUMP_SYSTEM_ID_OK = 1 << 3
DUMP_CART_ID_OK = 1 << 4
DUMP_ZS_ID_OK = 1 << 5
DUMP_PUBLIC_DATA_OK = 1 << 6
DUMP_PRIVATE_DATA_OK = 1 << 7
class DataFlag(IntFlag):
DATA_HAS_CODE_PREFIX = 1 << 0
DATA_HAS_TRACE_ID = 1 << 1
DATA_HAS_CART_ID = 1 << 2
DATA_HAS_INSTALL_ID = 1 << 3
DATA_HAS_SYSTEM_ID = 1 << 4
DATA_HAS_PUBLIC_SECTION = 1 << 5
DATA_CHECKSUM_INVERTED = 1 << 6
DATA_GX706_WORKAROUND = 1 << 7
# Character 0: always G
# Character 1: region related? (can be B, C, E, K, L, N, Q, U, X)
# Characters 2-4: identifier (700-999 or A00-A99 ~ D00-D99)
GAME_CODE_REGEX: re.Pattern = \
re.compile(rb"G[A-Z][0-9A-D][0-9][0-9]", re.IGNORECASE)
# Character 0: region (A=Asia?, E=Europe, J=Japan, K=Korea, S=?, U=US)
# Character 1: type/variant (A-F=regular, R-W=e-Amusement, X-Z=?)
# Characters 2-4: game revision (A-D or Z00-Z99, optional)
GAME_REGION_REGEX: re.Pattern = \
re.compile(rb"[AEJKSU][A-FR-WX-Z]([A-D]|Z[0-9][0-9])?", re.IGNORECASE)
SYSTEM_ID_IO_BOARDS: Sequence[str] = (
"GX700-PWB(K)", # Kick & Kick expansion board
"GX894-PWB(B)", # Digital I/O board
"GX921-PWB(B)", # DDR Karaoke Mix expansion board
"PWB0000073070" # GunMania expansion board
)
## Common data structures
@dataclass
class IdentifierSet:
traceID: bytes | None = None # aka TID
cartID: bytes | None = None # aka SID
installID: bytes | None = None # aka MID
systemID: bytes | None = None # aka XID
def __init__(self, data: bytes):
ids: list[bytes | None] = []
for offset in range(0, 32, 8):
_id: bytes = data[offset:offset + 8]
ids.append(_id if sum(_id) else None)
self.traceID, self.cartID, self.installID, self.systemID = ids
def getFlags(self) -> DataFlag:
flags: DataFlag = DataFlag(0)
if self.traceID:
flags |= DataFlag.DATA_HAS_TRACE_ID
if self.cartID:
flags |= DataFlag.DATA_HAS_CART_ID
if self.installID:
flags |= DataFlag.DATA_HAS_INSTALL_ID
if self.systemID:
flags |= DataFlag.DATA_HAS_SYSTEM_ID
return flags
def getCartIDChecksum(self, param: int) -> int:
if self.cartID is None:
return 0
checksum: int = 0
for i in range(6):
value: int = self.cartID[i + 1]
for j in range(i * 8, (i + 1) * 8):
if value & 1:
checksum ^= 1 << (j % param)
value >>= 1
return checksum & 0xffff
def getTraceIDType(self, param: int) -> TraceIDType:
if self.traceID is None:
return TraceIDType.TID_NONE
match self.traceID[0]:
case 0x81:
return TraceIDType.TID_81
case 0x82:
checksum: int = self.getCartIDChecksum(param)
big: int = unpack("> H", self.traceID[1:3])[0]
little: int = unpack("< H", self.traceID[1:3])[0]
if checksum == big:
return TraceIDType.TID_82_BIG_ENDIAN
elif checksum == little:
return TraceIDType.TID_82_LITTLE_ENDIAN
raise ValueError(f"trace ID mismatch, exp=0x{checksum:04x}, big=0x{big:04x}, little=0x{little:04x}")
case prefix:
raise ValueError(f"unknown trace ID prefix: 0x{prefix:02x}")
@dataclass
class PublicIdentifierSet:
installID: bytes | None = None # aka MID
systemID: bytes | None = None # aka XID
def __init__(self, data: bytes):
ids: list[bytes | None] = []
for offset in range(0, 16, 8):
_id: bytes = data[offset:offset + 8]
ids.append(_id if sum(_id) else None)
self.installID, self.systemID = ids
def getFlags(self) -> DataFlag:
flags: DataFlag = DataFlag(0)
if self.installID:
flags |= DataFlag.DATA_HAS_INSTALL_ID
if self.systemID:
flags |= DataFlag.DATA_HAS_SYSTEM_ID
return flags
## Cartridge dump structure
_DUMP_HEADER_STRUCT: Struct = Struct("< H 2B 8s 8s 8s 8s 8s")
_DUMP_HEADER_MAGIC: int = 0x573d
_CHIP_SIZES: Mapping[ChipType, tuple[int, int, int]] = {
ChipType.X76F041: ( 512, 384, 128 ),
ChipType.X76F100: ( 112, 0, 0 ),
ChipType.ZS01: ( 112, 0, 32 )
}
@dataclass
class Dump:
chipType: ChipType
flags: DumpFlag
systemID: bytes
cartID: bytes
zsID: bytes
dataKey: bytes
config: bytes
data: bytes
def getChipSize(self) -> tuple[int, int, int]:
return _CHIP_SIZES[self.chipType]
def serialize(self) -> bytes:
return _DUMP_HEADER_STRUCT.pack(
_DUMP_HEADER_MAGIC,
self.chipType,
self.flags,
self.systemID,
self.cartID,
self.zsID,
self.dataKey,
self.config
) + self.data
def parseDump(data: bytes) -> Dump:
magic, chipType, flags, systemID, cartID, zsID, dataKey, config = \
_DUMP_HEADER_STRUCT.unpack(data[0:_DUMP_HEADER_STRUCT.size])
if magic != _DUMP_HEADER_MAGIC:
raise ValueError(f"invalid or unsupported dump format: 0x{magic:04x}")
dataLength, _, _ = _CHIP_SIZES[chipType]
return Dump(
chipType, flags, systemID, cartID, zsID, dataKey, config,
data[_DUMP_HEADER_STRUCT.size:_DUMP_HEADER_STRUCT.size + dataLength]
)
## Cartridge data parsers
_BASIC_HEADER_STRUCT: Struct = Struct("< 2s 2s B 3x")
_EXTENDED_HEADER_STRUCT: Struct = Struct("< 8s H 4s H")
# The system and install IDs are excluded from validation as they may not be
# always present.
_IDENTIFIER_FLAG_MASK: DataFlag = \
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_HAS_CART_ID
def _checksum8(data: Iterable[int], invert: bool = False):
return (sum(data) & 0xff) ^ (0xff if invert else 0)
def _checksum16(data: Iterable[int], invert: bool = False):
it: Iterator = iter(data)
values: map[int] = map(lambda x: x[0] | (x[1] << 8), zip(it, it))
return (sum(values) & 0xffff) ^ (0xffff if invert else 0)
def _getPublicData(dump: Dump, flags: DataFlag, maxLength: int = 512) -> bytes:
if flags & DataFlag.DATA_HAS_PUBLIC_SECTION:
_, offset, length = dump.getChipSize()
return dump.data[offset:offset + min(length, maxLength)]
else:
return dump.data[0:maxLength]
class ParserError(BaseException):
pass
@dataclass
class Parser:
formatType: FormatType
flags: DataFlag
identifiers: IdentifierSet
publicIdentifiers: PublicIdentifierSet
region: str | None = None
codePrefix: str | None = None
code: str | None = None
year: int | None = None
class SimpleParser(Parser):
def __init__(self, dump: Dump, flags: DataFlag):
region: bytes = _getPublicData(dump, flags, 8).rstrip(b"\0")
if GAME_REGION_REGEX.fullmatch(region) is None:
raise ParserError(f"invalid game region: {region}")
super().__init__(
FormatType.SIMPLE, flags, IdentifierSet(b""),
PublicIdentifierSet(b""), region.decode("ascii")
)
class BasicParser(Parser):
def __init__(self, dump: Dump, flags: DataFlag):
data: bytes = _getPublicData(dump, flags, _BASIC_HEADER_STRUCT.size)
pri: IdentifierSet = IdentifierSet(dump.data[_BASIC_HEADER_STRUCT.size:])
region, codePrefix, checksum = _BASIC_HEADER_STRUCT.unpack(data)
codePrefix: bytes = codePrefix.rstrip(b"\0")
value: int = _checksum8(
data[0:4], bool(flags & DataFlag.DATA_CHECKSUM_INVERTED)
)
if value != checksum:
raise ParserError(f"invalid header checksum, exp=0x{value:02x}, got=0x{checksum:02x}")
if GAME_REGION_REGEX.fullmatch(region) is None:
raise ParserError(f"invalid game region: {region}")
if bool(flags & DataFlag.DATA_HAS_CODE_PREFIX) != bool(codePrefix):
raise ParserError(f"game code prefix should{' not' if codePrefix else ''} be present")
if (pri.getFlags() ^ flags) & _IDENTIFIER_FLAG_MASK:
raise ParserError("identifier flags do not match")
super().__init__(
FormatType.BASIC, flags, pri, PublicIdentifierSet(b""),
region.decode("ascii"), codePrefix.decode("ascii") or None
)
class ExtendedParser(Parser):
def __init__(self, dump: Dump, flags: DataFlag):
data: bytes = \
_getPublicData(dump, flags, _EXTENDED_HEADER_STRUCT.size + 16)
pri: IdentifierSet = \
IdentifierSet(dump.data[_EXTENDED_HEADER_STRUCT.size + 16:])
pub: PublicIdentifierSet = \
PublicIdentifierSet(data[_EXTENDED_HEADER_STRUCT.size:])
if flags & DataFlag.DATA_GX706_WORKAROUND:
data = data[0:1] + b"X" + data[2:]
code, year, region, checksum = \
_EXTENDED_HEADER_STRUCT.unpack(data[0:_EXTENDED_HEADER_STRUCT.size])
code: bytes = code.rstrip(b"\0")
region: bytes = region.rstrip(b"\0")
value: int = _checksum16(
data[0:14], bool(flags & DataFlag.DATA_CHECKSUM_INVERTED)
)
if value != checksum:
raise ParserError(f"invalid header checksum, exp=0x{value:04x}, got=0x{checksum:04x}")
if GAME_CODE_REGEX.fullmatch(code) is None:
raise ParserError(f"invalid game code: {code}")
if GAME_REGION_REGEX.fullmatch(region) is None:
raise ParserError(f"invalid game region: {region}")
if (pri.getFlags() ^ flags) & _IDENTIFIER_FLAG_MASK:
raise ParserError("identifier flags do not match")
_code: str = code.decode("ascii")
super().__init__(
FormatType.EXTENDED, flags, pri, pub, region.decode("ascii"),
_code[0:2], _code, year
)
## Cartridge database
DB_ENTRY_STRUCT: Struct = Struct("< 6B H 8s 8s 8s 96s")
TRACE_ID_PARAMS: Sequence[int] = 16, 14
@dataclass
class DBEntry:
code: str
region: str
name: str
dataKey: bytes
chipType: ChipType
formatType: FormatType
traceIDType: TraceIDType
flags: DataFlag
traceIDParam: int = 0
installIDPrefix: int = 0
year: int = 0
def __init__(
self, code: str, region: str, name: str, dump: Dump, parser: Parser
):
# Find the correct parameters for the trace ID heuristically.
_type: TraceIDType | None = None
for self.traceIDParam in TRACE_ID_PARAMS:
try:
_type = parser.identifiers.getTraceIDType(self.traceIDParam)
except ValueError:
continue
break
if _type is None:
raise RuntimeError("failed to determine trace ID parameters")
self.code = code
self.region = region
self.name = name
self.dataKey = dump.dataKey
self.chipType = dump.chipType
self.formatType = parser.formatType
self.traceIDType = _type
self.flags = parser.flags
self.year = parser.year or 0
if parser.publicIdentifiers.installID is not None:
self.installIDPrefix = parser.publicIdentifiers.installID[0]
elif parser.identifiers.installID is not None:
self.installIDPrefix = parser.identifiers.installID[0]
else:
self.installIDPrefix = 0
# Implement the comparison overload so sorting will work. The 3-digit number
# in the game code is used as a key.
def __lt__(self, entry: Any) -> bool:
return ( self.code[2:], self.code[0:2], self.region, self.name ) < \
( entry.code[2:], entry.code[0:2], entry.region, entry.name )
def requiresCartID(self) -> bool:
if self.flags & DataFlag.DATA_HAS_CART_ID:
return True
if (self.flags & DataFlag.DATA_HAS_TRACE_ID) and \
(self.traceIDType >= TraceIDType.TID_82_BIG_ENDIAN):
return True
return False
def serialize(self) -> bytes:
return DB_ENTRY_STRUCT.pack(
self.chipType,
self.formatType,
self.traceIDType,
self.flags,
self.traceIDParam,
self.installIDPrefix,
self.year,
self.dataKey,
self.code.encode("ascii"),
self.region.encode("ascii"),
self.name.encode("ascii")
)

View File

@ -1,109 +1,27 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
__version__ = "0.3.4" __version__ = "0.4.1"
__author__ = "spicyjpeg" __author__ = "spicyjpeg"
import json, logging, os, re import json, logging, os, re
from argparse import ArgumentParser, FileType, Namespace from argparse import ArgumentParser, FileType, Namespace
from collections import Counter, defaultdict from collections import Counter, defaultdict
from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from struct import Struct from struct import Struct
from typing import Any, Generator, Iterable, Mapping, Sequence, TextIO, Type from typing import Any, Mapping, Sequence, TextIO
from _common import * from common.cart import CartDump, DumpFlag
from common.cartdata import *
from common.games import GameDB, GameDBEntry
## Game list (loaded from games.json) ## MAME NVRAM file parser
@dataclass
class GameEntry:
code: str
region: str
name: str
mameID: str | None = None
installCart: str | None = None
gameCart: str | None = None
ioBoard: str | None = None
lockedToIOBoard: bool = False
# Implement the comparison overload so sorting will work. The 3-digit number
# in the game code is used as a key.
def __lt__(self, entry: Any) -> bool:
return ( self.code[2:], self.code[0:2], self.region, self.name ) < \
( entry.code[2:], entry.code[0:2], entry.region, entry.name )
def __str__(self) -> str:
return f"{self.code} {self.region}"
def getFullName(self) -> str:
return f"{self.name} [{self.code} {self.region}]"
def hasCartID(self) -> bool:
return self.gameCart.endswith("DS2401")
def hasSystemID(self) -> bool:
return (self.ioBoard in SYSTEM_ID_IO_BOARDS)
class GameDB:
def __init__(self, entries: Iterable[Mapping[str, Any]] | None = None):
self._entries: defaultdict[str, list[GameEntry]] = defaultdict(list)
if entries:
for entry in entries:
self.addEntry(entry)
def addEntry(self, entryObj: Mapping[str, Any]):
code: str = entryObj["code"].strip().upper()
region: str = entryObj["region"].strip().upper()
name: str = entryObj["name"]
mameID: str | None = entryObj.get("id", None)
installCart: str | None = entryObj.get("installCart", None)
gameCart: str | None = entryObj.get("gameCart", None)
ioBoard: str | None = entryObj.get("ioBoard", None)
lockedToIOBoard: bool = entryObj.get("lockedToIOBoard", False)
if GAME_CODE_REGEX.fullmatch(code.encode("ascii")) is None:
raise ValueError(f"invalid game code: {code}")
if GAME_REGION_REGEX.fullmatch(region.encode("ascii")) is None:
raise ValueError(f"invalid game region: {region}")
entry: GameEntry = GameEntry(
code, region, name, mameID, installCart, gameCart, ioBoard,
lockedToIOBoard
)
# Store all entries indexed by their game code and first two characters
# of the region code. This allows for quick retrieval of all revisions
# of a game.
self._entries[code + region[0:2]].append(entry)
def lookup(
self, code: str, region: str
) -> Generator[GameEntry, None, None]:
_code: str = code.strip().upper()
_region: str = region.strip().upper()
# If only two characters of the region code are provided, match all
# entries whose region code starts with those two characters (even if
# longer).
for entry in self._entries[_code + _region[0:2]]:
if _region == entry.region[0:len(_region)]:
yield entry
## MAME dump parser
_MAME_X76F041_STRUCT: Struct = Struct("< 4x 8s 8s 8s 8s 512s") _MAME_X76F041_STRUCT: Struct = Struct("< 4x 8s 8s 8s 8s 512s")
_MAME_X76F100_STRUCT: Struct = Struct("< 4x 8s 8s 112s") _MAME_X76F100_STRUCT: Struct = Struct("< 4x 8s 8s 112s")
_MAME_ZS01_STRUCT: Struct = Struct("< 4x 8s 8s 8s 112s") _MAME_ZS01_STRUCT: Struct = Struct("< 4x 8s 8s 8s 112s")
_MAME_DUMP_SIZES: Sequence[int] = ( def parseMAMEDump(dump: bytes) -> CartDump:
_MAME_X76F041_STRUCT.size, _MAME_X76F100_STRUCT.size, _MAME_ZS01_STRUCT.size
)
def parseMAMEDump(dump: bytes) -> Dump:
systemID: bytes = bytes(8) systemID: bytes = bytes(8)
cartID: bytes = bytes(8) cartID: bytes = bytes(8)
zsID: bytes = bytes(8) zsID: bytes = bytes(8)
@ -124,123 +42,67 @@ def parseMAMEDump(dump: bytes) -> Dump:
dataKey, readKey, data = _MAME_X76F100_STRUCT.unpack(dump) dataKey, readKey, data = _MAME_X76F100_STRUCT.unpack(dump)
if dataKey != readKey: if dataKey != readKey:
raise RuntimeError(chipType, "X76F100 dumps with different read/write keys are not supported") raise RuntimeError(
chipType,
"X76F100 dumps with different read/write keys are not "
"supported"
)
case 0x5a530001: case 0x5a530001:
chipType: ChipType = ChipType.ZS01 chipType: ChipType = ChipType.ZS01
_, dataKey, config, data = _MAME_ZS01_STRUCT.unpack(dump) _, dataKey, config, data = _MAME_ZS01_STRUCT.unpack(dump)
#zsID = MAME_ZS_ID #zsID = _MAME_ZS_ID
flags |= DumpFlag.DUMP_CONFIG_OK | DumpFlag.DUMP_ZS_ID_OK flags |= DumpFlag.DUMP_CONFIG_OK | DumpFlag.DUMP_ZS_ID_OK
case _id: case _id:
raise RuntimeError(ChipType.NONE, f"unrecognized chip ID: 0x{_id:08x}") raise RuntimeError(
ChipType.NONE, f"unrecognized chip ID: 0x{_id:08x}"
)
#if data.find(MAME_CART_ID) >= 0: #if data.find(_MAME_CART_ID) >= 0:
#cartID = MAME_CART_ID #cartID = _MAME_CART_ID
#flags |= DumpFlag.DUMP_HAS_CART_ID | DumpFlag.DUMP_CART_ID_OK #flags |= DumpFlag.DUMP_HAS_CART_ID | DumpFlag.DUMP_CART_ID_OK
#if data.find(MAME_SYSTEM_ID) >= 0: #if data.find(_MAME_SYSTEM_ID) >= 0:
#systemID = MAME_SYSTEM_ID #systemID = _MAME_SYSTEM_ID
#flags |= DumpFlag.DUMP_HAS_SYSTEM_ID | DumpFlag.DUMP_SYSTEM_ID_OK #flags |= DumpFlag.DUMP_HAS_SYSTEM_ID | DumpFlag.DUMP_SYSTEM_ID_OK
return Dump(chipType, flags, systemID, cartID, zsID, dataKey, config, data) return CartDump(
chipType, flags, systemID, cartID, zsID, dataKey, config, data
## Data format identification
_KNOWN_FORMATS: Sequence[tuple[str, Type, DataFlag]] = (
(
# Used by GCB48 (and possibly other games?)
"region only",
SimpleParser,
DataFlag.DATA_HAS_PUBLIC_SECTION
), (
"basic (no IDs)",
BasicParser,
DataFlag.DATA_CHECKSUM_INVERTED
), (
"basic + TID",
BasicParser,
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_CHECKSUM_INVERTED
), (
"basic + SID",
BasicParser,
DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_CHECKSUM_INVERTED
), (
"basic + TID, SID",
BasicParser,
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_HAS_CART_ID
| DataFlag.DATA_CHECKSUM_INVERTED
), (
"basic + prefix, TID, SID",
BasicParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_CHECKSUM_INVERTED
), (
# Used by most pre-ZS01 Bemani games
"basic + prefix, all IDs",
BasicParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_HAS_INSTALL_ID
| DataFlag.DATA_HAS_SYSTEM_ID | DataFlag.DATA_CHECKSUM_INVERTED
), (
"extended (no IDs)",
ExtendedParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_CHECKSUM_INVERTED
), (
"extended (no IDs, alt)",
ExtendedParser,
DataFlag.DATA_HAS_CODE_PREFIX
), (
# Used by GX706
"extended (no IDs, GX706)",
ExtendedParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_GX706_WORKAROUND
), (
# Used by GE936/GK936 and all ZS01 Bemani games
"extended + all IDs",
ExtendedParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_HAS_INSTALL_ID
| DataFlag.DATA_HAS_SYSTEM_ID | DataFlag.DATA_HAS_PUBLIC_SECTION
| DataFlag.DATA_CHECKSUM_INVERTED
) )
)
def newCartParser(dump: Dump) -> Parser:
for name, constructor, flags in reversed(_KNOWN_FORMATS):
try:
parser: Any = constructor(dump, flags)
except ParserError:
continue
logging.debug(f"found known data format: {name}")
return parser
raise RuntimeError("no known data format found")
## Dump processing ## Dump processing
def processDump( def processDump(
dump: Dump, db: GameDB, nameHint: str = "", exportFile: TextIO | None = None dump: CartDump, gameDB: GameDB, nameHints: Sequence[str] = [],
) -> DBEntry: exportFile: TextIO | None = None
parser: Parser = newCartParser(dump) ) -> CartDBEntry:
parser: CartParser = newCartParser(dump)
# If the parser could not find a valid game code in the dump, attempt to # If the parser could not find a valid game code in the dump, attempt to
# parse it from the provided hint (filename). # parse it from the provided hints.
if parser.region is None: if parser.region is None:
raise RuntimeError("can't parse game region from dump") raise RuntimeError("can't parse game region from dump")
if parser.code is None: if parser.code is None:
for hint in nameHints:
code: re.Match | None = GAME_CODE_REGEX.search( code: re.Match | None = GAME_CODE_REGEX.search(
nameHint.upper().encode("ascii") hint.upper().encode("ascii")
) )
if code is None: if code is not None:
raise RuntimeError("can't parse game code from dump nor from filename")
else:
parser.code = code.group().decode("ascii") parser.code = code.group().decode("ascii")
break
matches: list[GameEntry] = sorted(db.lookup(parser.code, parser.region)) if parser.code is None:
raise RuntimeError(
"can't parse game code from dump nor from filename"
)
matches: list[GameDBEntry] = sorted(
gameDB.lookupByCode(parser.code, parser.region)
)
if exportFile: if exportFile:
_, flags = str(parser.flags).split(".", 1) _, flags = str(parser.flags).split(".", 1)
@ -249,14 +111,21 @@ def processDump(
) )
exportFile.write( exportFile.write(
f"{dump.chipType.name},{nameHint},{parser.code},{parser.region}," f"{dump.chipType.name},"
f"{matchList},{parser.formatType.name},{flags}\n" f"{' '.join(nameHints)},"
f"{parser.code},"
f"{parser.region},"
f"{matchList},"
f"{parser.getFormatType().name},"
f"{flags}\n"
) )
if not matches: if not matches:
raise RuntimeError(f"{parser.code} {parser.region} not found in game list") raise RuntimeError(
f"{parser.code} {parser.region} not found in game list"
)
# If more than one match is found, use the first result. # If more than one match is found, use the first result.
game: GameEntry = matches[0] game: GameDBEntry = matches[0]
if game.hasCartID(): if game.hasCartID():
if not (parser.flags & DataFlag.DATA_HAS_CART_ID): if not (parser.flags & DataFlag.DATA_HAS_CART_ID):
@ -265,7 +134,7 @@ def processDump(
if parser.flags & DataFlag.DATA_HAS_CART_ID: if parser.flags & DataFlag.DATA_HAS_CART_ID:
raise RuntimeError("dump has a cartridge ID but game does not") raise RuntimeError("dump has a cartridge ID but game does not")
if game.hasSystemID() and game.lockedToIOBoard: if game.hasSystemID() and game.cartLockedToIOBoard:
if not (parser.flags & DataFlag.DATA_HAS_SYSTEM_ID): if not (parser.flags & DataFlag.DATA_HAS_SYSTEM_ID):
raise RuntimeError("game has a system ID but dump does not") raise RuntimeError("game has a system ID but dump does not")
else: else:
@ -273,15 +142,21 @@ def processDump(
raise RuntimeError("dump has a system ID but game does not") raise RuntimeError("dump has a system ID but game does not")
logging.info(f"imported {dump.chipType.name}: {game.getFullName()}") logging.info(f"imported {dump.chipType.name}: {game.getFullName()}")
return DBEntry(parser.code, parser.region, game.name, dump, parser) return CartDBEntry(parser.code, parser.region, game.name, dump, parser)
## Main ## Main
_MAME_DUMP_SIZES: Sequence[int] = (
_MAME_X76F041_STRUCT.size,
_MAME_X76F100_STRUCT.size,
_MAME_ZS01_STRUCT.size
)
def createParser() -> ArgumentParser: def createParser() -> ArgumentParser:
parser = ArgumentParser( parser = ArgumentParser(
description = \ description = \
"Recursively scans a directory for MAME dumps of X76F041 and ZS01 " "Recursively scans a directory for MAME dumps of X76F041 and ZS01 "
"cartridges, analyzes them and generates .cartdb files.", "cartridges, analyzes them and generates .db files.",
add_help = False add_help = False
) )
@ -341,57 +216,73 @@ def main():
args: Namespace = parser.parse_args() args: Namespace = parser.parse_args()
setupLogger(args.verbose) setupLogger(args.verbose)
failures: Counter[ChipType] = Counter()
entries: defaultdict[ChipType, list[DBEntry]] = defaultdict(list)
with args.gameList.open("rt") as _file: with args.gameList.open("rt") as _file:
gameList: Sequence[Mapping[str, Any]] = json.load(_file) gameList: Sequence[Mapping[str, Any]] = json.load(_file)
db: GameDB = GameDB(gameList) gameDB: GameDB = GameDB(gameList)
failures: Counter[ChipType] = Counter()
entries: defaultdict[ChipType, list[CartDBEntry]] = defaultdict(list)
if args.export:
args.export.write(
"# chipType,nameHints,code,region,matchList,formatType,flags\n"
)
for inputPath in args.input: for inputPath in args.input:
for rootDir, _, files in os.walk(inputPath): for rootDir, _, files in os.walk(inputPath):
root: Path = Path(rootDir)
for dumpName in files: for dumpName in files:
path: Path = Path(rootDir, dumpName) path: Path = root / dumpName
size: int = os.stat(path).st_size
# Skip files whose size does not match any of the known dump # Skip files whose size does not match any of the known dump
# formats. # formats.
if os.stat(path).st_size not in _MAME_DUMP_SIZES: if size not in _MAME_DUMP_SIZES:
logging.warning(f"ignoring {dumpName}") logging.warning(f"ignoring: {dumpName}, invalid size")
continue continue
try:
with open(path, "rb") as _file: with open(path, "rb") as _file:
dump: Dump = parseMAMEDump(_file.read()) data: bytes = _file.read()
try:
dump: CartDump = parseMAMEDump(data)
except RuntimeError as exc: except RuntimeError as exc:
logging.error(f"failed to import: {path}, {exc}") logging.error(f"failed to parse: {path}, {exc}")
continue continue
hints: Sequence[str] = dumpName, root.name
try: try:
entries[dump.chipType].append( entries[dump.chipType].append(
processDump(dump, db, dumpName, args.export) processDump(dump, gameDB, hints, args.export)
) )
except RuntimeError as exc: except RuntimeError as exc:
logging.error(f"failed to import {dump.chipType.name}: {path}, {exc}") logging.error(
f"failed to import {dump.chipType.name}: {path}, {exc}"
)
failures[dump.chipType] += 1 failures[dump.chipType] += 1
if args.export: if args.export:
args.export.close() args.export.close()
# Sort all entries and generate the cartdb files. # Sort all entries and generate the .db files.
for chipType, dbEntries in entries.items(): for chipType, _entries in entries.items():
if not dbEntries: if not _entries:
logging.warning(f"DB for {chipType.name} is empty") logging.warning(f"no entries generated for {chipType.name}")
continue continue
dbEntries.sort() _entries.sort()
path: Path = args.output / f"{chipType.name.lower()}.cartdb"
with open(path, "wb") as _file: with open(args.output / f"{chipType.name.lower()}.db", "wb") as _file:
for entry in dbEntries: for entry in _entries:
_file.write(entry.serialize()) _file.write(entry.serialize())
logging.info(f"{chipType.name}: {len(dbEntries)} entries saved, {failures[chipType]} failures") logging.info(
f"{chipType.name}: {len(_entries)} entries saved, "
f"{failures[chipType]} failures"
)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

244
tools/buildFlashDB.py Executable file
View File

@ -0,0 +1,244 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__version__ = "0.4.1"
__author__ = "spicyjpeg"
import json, logging, os, re
from argparse import ArgumentParser, FileType, Namespace
from pathlib import Path
from typing import ByteString, Mapping, TextIO
from common.cart import DumpFlag, ROMHeaderDump
from common.cartdata import *
from common.games import GameDB, GameDBEntry
from common.util import InterleavedFile
## Flash dump "parser"
_ROM_HEADER_LENGTH: int = 0x20
_MAME_SYSTEM_ID: bytes = bytes.fromhex("01 12 34 56 78 9a bc 3d")
def parseFlashDump(dump: bytes) -> ROMHeaderDump:
return ROMHeaderDump(
DumpFlag.DUMP_HAS_SYSTEM_ID | DumpFlag.DUMP_SYSTEM_ID_OK,
_MAME_SYSTEM_ID,
dump[0:_ROM_HEADER_LENGTH]
)
## Dump processing
def processDump(
dump: ROMHeaderDump, gameDB: GameDB, nameHints: Sequence[str] = [],
exportFile: TextIO | None = None
) -> ROMHeaderDBEntry:
parser: ROMHeaderParser = newROMHeaderParser(dump)
# If the parser could not find a valid game code in the dump, attempt to
# parse it from the provided hints.
if parser.region is None:
raise RuntimeError("can't parse game region from dump")
if parser.code is None:
for hint in nameHints:
code: re.Match | None = GAME_CODE_REGEX.search(
hint.upper().encode("ascii")
)
if code is not None:
parser.code = code.group().decode("ascii")
break
if parser.code is None:
raise RuntimeError(
"can't parse game code from dump nor from filename"
)
matches: list[GameDBEntry] = sorted(
gameDB.lookupByCode(parser.code, parser.region)
)
if exportFile:
_, flags = str(parser.flags).split(".", 1)
matchList: str = " ".join(
(game.mameID or f"[{game}]") for game in matches
)
exportFile.write(
f"{' '.join(nameHints)},"
f"{parser.code},"
f"{parser.region},"
f"{matchList},"
f"{parser.getFormatType().name},"
f"{flags}\n"
)
if not matches:
raise RuntimeError(
f"{parser.code} {parser.region} not found in game list"
)
# If more than one match is found, use the first result.
game: GameDBEntry = matches[0]
if game.hasSystemID() and game.flashLockedToIOBoard:
if not (parser.flags & DataFlag.DATA_HAS_SYSTEM_ID):
raise RuntimeError("game has a system ID but dump has no signature")
else:
if parser.flags & DataFlag.DATA_HAS_SYSTEM_ID:
raise RuntimeError("dump has a signature but game has no system ID")
logging.info(f"imported: {game.getFullName()}")
return ROMHeaderDBEntry(parser.code, parser.region, game.name, parser)
## Main
_FULL_DUMP_SIZE: int = 0x1000000
_EVEN_ODD_DUMP_SIZE: int = 0x200000
def createParser() -> ArgumentParser:
parser = ArgumentParser(
description = \
"Recursively scans a directory for subdirectories containing MAME "
"flash dumps, analyzes them and generates .db files.",
add_help = False
)
group = parser.add_argument_group("Tool options")
group.add_argument(
"-h", "--help",
action = "help",
help = "Show this help message and exit"
)
group.add_argument(
"-v", "--verbose",
action = "count",
help = "Enable additional logging levels"
)
group = parser.add_argument_group("File paths")
group.add_argument(
"-o", "--output",
type = Path,
default = os.curdir,
help = "Path to output directory (current directory by default)",
metavar = "dir"
)
group.add_argument(
"-e", "--export",
type = FileType("wt"),
help = "Export CSV table of all dumps parsed to specified path",
metavar = "file"
)
group.add_argument(
"gameList",
type = Path,
help = "Path to JSON file containing game list"
)
group.add_argument(
"input",
type = Path,
nargs = "+",
help = "Paths to input directories"
)
return parser
def setupLogger(level: int | None):
logging.basicConfig(
format = "[{levelname:8s}] {message}",
style = "{",
level = (
logging.WARNING,
logging.INFO,
logging.DEBUG
)[min(level or 0, 2)]
)
def main():
parser: ArgumentParser = createParser()
args: Namespace = parser.parse_args()
setupLogger(args.verbose)
with args.gameList.open("rt") as _file:
gameList: Sequence[Mapping[str, Any]] = json.load(_file)
gameDB: GameDB = GameDB(gameList)
failures: int = 0
entries: list[ROMHeaderDBEntry] = []
if args.export:
args.export.write(
"# nameHints,code,region,matchList,formatType,flags\n"
)
for inputPath in args.input:
for rootDir, _, files in os.walk(inputPath):
root: Path = Path(rootDir)
for dumpName in files:
path: Path = root / dumpName
size: int = os.stat(path).st_size
match path.suffix.lower():
case ".31m":
oddPath: Path = Path(rootDir, f"{path.stem}.27m")
if not oddPath.is_file():
logging.warning(f"ignoring: {path}, no .27m file")
continue
if size != _EVEN_ODD_DUMP_SIZE:
logging.warning(f"ignoring: {path}, invalid size")
continue
with \
open(path, "rb") as even, \
open(oddPath, "rb") as odd:
data: ByteString = InterleavedFile(even, odd) \
.read(_ROM_HEADER_LENGTH)
case ".27m":
evenPath: Path = Path(rootDir, f"{path.stem}.31m")
if not evenPath.is_file():
logging.warning(f"ignoring: {path}, no .31m file")
continue
case _:
if size != _FULL_DUMP_SIZE:
logging.warning(f"ignoring: {path}, invalid size")
continue
with open(path, "rb") as _file:
data: ByteString = _file.read(_ROM_HEADER_LENGTH)
dump: ROMHeaderDump = parseFlashDump(data)
hints: Sequence[str] = dumpName, root.name
try:
entries.append(
processDump(dump, gameDB, hints, args.export)
)
except RuntimeError as exc:
logging.error(f"failed to import: {path}, {exc}")
failures += 1
if args.export:
args.export.close()
# Sort all entries and generate the .db file.
if not entries:
logging.warning("no entries generated")
return
entries.sort()
with open(args.output / "flash.db", "wb") as _file:
for entry in entries:
_file.write(entry.serialize())
logging.info(f"{len(entries)} entries saved, {failures} failures")
if __name__ == "__main__":
main()

View File

@ -1,334 +1,19 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
__version__ = "0.3.5" __version__ = "0.4.1"
__author__ = "spicyjpeg" __author__ = "spicyjpeg"
import json, re import json
from argparse import ArgumentParser, FileType, Namespace from argparse import ArgumentParser, FileType, Namespace
from collections import defaultdict
from itertools import chain
from pathlib import Path from pathlib import Path
from struct import Struct from typing import ByteString
from typing import Any, ByteString, Generator, Mapping, Sequence
from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile from zipfile import ZIP_DEFLATED, ZIP_STORED, ZipFile
import lz4.block, numpy import lz4.block
from numpy import ndarray from common.assets import *
from PIL import Image from PIL import Image
## .TIM image converter
TIM_HEADER_STRUCT: Struct = Struct("< 2I")
TIM_SECTION_STRUCT: Struct = Struct("< I 4H")
TIM_HEADER_VERSION: int = 0x10
LOWER_ALPHA_BOUND: int = 0x20
UPPER_ALPHA_BOUND: int = 0xe0
# Color 0x0000 is interpreted by the PS1 GPU as fully transparent, so black
# pixels must be changed to dark gray to prevent them from becoming transparent.
TRANSPARENT_COLOR: int = 0x0000
BLACK_COLOR: int = 0x0421
def convertRGBAto16(inputData: ndarray) -> ndarray:
source: ndarray = inputData.astype("<H")
r: ndarray = ((source[:, :, 0] * 249) + 1014) >> 11
g: ndarray = ((source[:, :, 1] * 249) + 1014) >> 11
b: ndarray = ((source[:, :, 2] * 249) + 1014) >> 11
data: ndarray = r | (g << 5) | (b << 10)
data = numpy.where(data != TRANSPARENT_COLOR, data, BLACK_COLOR)
if source.shape[2] == 4:
alpha: ndarray = source[:, :, 3]
data = numpy.select(
(
alpha > UPPER_ALPHA_BOUND, # Leave as-is
alpha > LOWER_ALPHA_BOUND # Set semitransparency flag
), (
data,
data | (1 << 15)
),
TRANSPARENT_COLOR
)
return data.reshape(source.shape[:-1])
def convertIndexedImage(imageObj: Image.Image) -> tuple[ndarray, ndarray]:
# PIL/Pillow doesn't provide a proper way to get the number of colors in a
# palette, so here's an extremely ugly hack.
colorDepth: int = { "RGB": 3, "RGBA": 4 }[imageObj.palette.mode]
clutData: bytes = imageObj.palette.tobytes()
numColors: int = len(clutData) // colorDepth
clut: ndarray = convertRGBAto16(
numpy.frombuffer(clutData, "B").reshape(( 1, numColors, colorDepth ))
)
# Pad the palette to 16 or 256 colors.
padAmount: int = (16 if (numColors <= 16) else 256) - numColors
if padAmount:
clut = numpy.c_[ clut, numpy.zeros(padAmount, "<H") ]
image: ndarray = numpy.asarray(imageObj, "B")
if image.shape[1] % 2:
image = numpy.c_[ image, numpy.zeros(image.shape[0], "B") ]
# Pack two pixels into each byte for 4bpp images.
if numColors <= 16:
image = image[:, 0::2] | (image[:, 1::2] << 4)
if image.shape[1] % 2:
image = numpy.c_[ image, numpy.zeros(image.shape[0], "B") ]
return image, clut
def generateIndexedTIM(
imageObj: Image.Image, ix: int, iy: int, cx: int, cy: int
) -> bytearray:
if (ix < 0) or (ix > 1023) or (iy < 0) or (iy > 1023):
raise ValueError("image X/Y coordinates must be in 0-1023 range")
if (cx < 0) or (cx > 1023) or (cy < 0) or (cy > 1023):
raise ValueError("palette X/Y coordinates must be in 0-1023 range")
image, clut = convertIndexedImage(imageObj)
mode: int = 0x8 if (clut.size <= 16) else 0x9
data: bytearray = bytearray(TIM_HEADER_STRUCT.pack(TIM_HEADER_VERSION, mode))
data.extend(TIM_SECTION_STRUCT.pack(
TIM_SECTION_STRUCT.size + clut.size * 2,
cx, cy, clut.shape[1], clut.shape[0]
))
data.extend(clut)
data.extend(TIM_SECTION_STRUCT.pack(
TIM_SECTION_STRUCT.size + image.size,
ix, iy, image.shape[1] // 2, image.shape[0]
))
data.extend(image)
return data
## Font metrics generator
METRICS_HEADER_STRUCT: Struct = Struct("< 3B x")
METRICS_ENTRY_STRUCT: Struct = Struct("< 2B H")
def generateFontMetrics(
metrics: Mapping[str, int | Mapping[str, Mapping[str, int | bool]]]
) -> bytearray:
data: bytearray = bytearray(
METRICS_HEADER_STRUCT.size + METRICS_ENTRY_STRUCT.size * 256
)
spaceWidth: int = int(metrics["spaceWidth"])
tabWidth: int = int(metrics["tabWidth"])
lineHeight: int = int(metrics["lineHeight"])
data[0:METRICS_HEADER_STRUCT.size] = \
METRICS_HEADER_STRUCT.pack(spaceWidth, tabWidth, lineHeight)
for ch, entry in metrics["characterSizes"].items():
index: int = ord(ch)
#index: int = ch.encode("ascii")[0]
if (index < 0) or (index > 255):
raise ValueError(f"extended character {index} is not supported")
x: int = int(entry["x"])
y: int = int(entry["y"])
w: int = int(entry["width"])
h: int = int(entry["height"])
i: bool = bool(entry.get("icon", False))
if (x < 0) or (x > 255) or (y < 0) or (y > 255):
raise ValueError("all X/Y coordinates must be in 0-255 range")
if (w < 0) or (w > 127) or (h < 0) or (h > 127):
raise ValueError("all characters must be <=127x127 pixels")
if h > lineHeight:
raise ValueError("character height exceeds line height")
offset: int = \
METRICS_HEADER_STRUCT.size + METRICS_ENTRY_STRUCT.size * index
data[offset:offset + METRICS_ENTRY_STRUCT.size] = \
METRICS_ENTRY_STRUCT.pack(x, y, w | (h << 7) | (i << 14))
return data
## Color palette generator
PALETTE_COLOR_REGEX: re.Pattern = re.compile(r"^#?([0-9A-Fa-f]{6})$")
PALETTE_COLORS: Sequence[str] = (
"default",
"shadow",
"backdrop",
"accent1",
"accent2",
"window1",
"window2",
"window3",
"highlight1",
"highlight2",
"progress1",
"progress2",
"box1",
"box2",
"text1",
"text2",
"title",
"subtitle"
)
PALETTE_ENTRY_STRUCT: Struct = Struct("< 3s x")
def generateColorPalette(palette: Mapping[str, str]) -> bytearray:
data: bytearray = bytearray()
for entry in PALETTE_COLORS:
color: str | None = palette.get(entry, None)
if color is None:
raise ValueError(f"no entry found for {entry}")
matched: re.Match | None = PALETTE_COLOR_REGEX.match(color)
if matched is None:
raise ValueError(f"invalid color value: {color}")
data.extend(PALETTE_ENTRY_STRUCT.pack(bytes.fromhex(matched.group(1))))
return data
## String table generator
TABLE_ENTRY_STRUCT: Struct = Struct("< I 2H")
TABLE_BUCKET_COUNT: int = 256
TABLE_STRING_ALIGN: int = 4
TABLE_ESCAPE_REGEX: re.Pattern = re.compile(rb"\$?\{(.+?)\}")
TABLE_ESCAPE_REPL: Mapping[bytes, bytes] = {
b"UP_ARROW": b"\x80",
b"DOWN_ARROW": b"\x81",
b"LEFT_ARROW": b"\x82",
b"RIGHT_ARROW": b"\x83",
b"UP_ARROW_ALT": b"\x84",
b"DOWN_ARROW_ALT": b"\x85",
b"LEFT_ARROW_ALT": b"\x86",
b"RIGHT_ARROW_ALT": b"\x87",
b"LEFT_BUTTON": b"\x90",
b"RIGHT_BUTTON": b"\x91",
b"START_BUTTON": b"\x92",
b"CLOSED_LOCK": b"\x93",
b"OPEN_LOCK": b"\x94",
b"DIR_ICON": b"\x95",
b"PARENT_DIR_ICON": b"\x96",
b"FILE_ICON": b"\x97",
b"CHIP_ICON": b"\x98",
b"CART_ICON": b"\x99"
}
def hashString(string: str) -> int:
value: int = 0
for byte in string.encode("ascii"):
value = (
byte + \
((value << 6) & 0xffffffff) + \
((value << 16) & 0xffffffff) - \
value
) & 0xffffffff
return value
def convertString(string: str) -> bytes:
return TABLE_ESCAPE_REGEX.sub(
lambda match: TABLE_ESCAPE_REPL[match.group(1).strip().upper()],
string.encode("ascii")
)
def prepareStrings(
strings: Mapping[str, Any], prefix: str = ""
) -> Generator[tuple[int, bytes | None], None, None]:
for key, value in strings.items():
fullKey: str = prefix + key
if value is None:
yield hashString(fullKey), None
elif type(value) is str:
yield hashString(fullKey), convertString(value)
else:
yield from prepareStrings(value, f"{fullKey}.")
def generateStringTable(strings: Mapping[str, Any]) -> bytearray:
offsets: dict[bytes, int] = {}
chains: defaultdict[int, list[tuple[int, int | None]]] = defaultdict(list)
blob: bytearray = bytearray()
for fullHash, string in prepareStrings(strings):
if string is None:
entry: tuple[int, int | None] = fullHash, 0
else:
offset: int | None = offsets.get(string, None)
if offset is None:
offset = len(blob)
offsets[string] = offset
blob.extend(string)
blob.append(0)
while len(blob) % TABLE_STRING_ALIGN:
blob.append(0)
entry: tuple[int, int | None] = fullHash, offset
chains[fullHash % TABLE_BUCKET_COUNT].append(entry)
# Build the bucket array and all chains of entries.
buckets: list[tuple[int, int | None, int]] = []
chained: list[tuple[int, int | None, int]] = []
for shortHash in range(TABLE_BUCKET_COUNT):
entries: list[tuple[int, int | None]] = chains[shortHash]
if not entries:
buckets.append(( 0, None, 0 ))
continue
for index, entry in enumerate(entries):
if index < (len(entries) - 1):
chainIndex: int = TABLE_BUCKET_COUNT + len(chained)
else:
chainIndex: int = 0
fullHash, offset = entry
if index:
chained.append(( fullHash, offset, chainIndex + 1 ))
else:
buckets.append(( fullHash, offset, chainIndex ))
# Relocate the offsets and serialize the table.
blobAddr: int = TABLE_ENTRY_STRUCT.size * (len(buckets) + len(chained))
data: bytearray = bytearray()
for fullHash, offset, chainIndex in chain(buckets, chained):
absOffset: int = 0 if (offset is None) else (blobAddr + offset)
if absOffset > 0xffff:
raise RuntimeError("string table exceeds 64 KB size limit")
data.extend(TABLE_ENTRY_STRUCT.pack( fullHash, absOffset, chainIndex ))
data.extend(blob)
return data
## Main ## Main
def createParser() -> ArgumentParser: def createParser() -> ArgumentParser:

12
tools/common/__init__.py Normal file
View File

@ -0,0 +1,12 @@
# -*- coding: utf-8 -*-
__version__ = "0.4.1"
__author__ = "spicyjpeg"
__all__ = (
"assets",
"cart",
"cartdata",
"games",
"util"
)

314
tools/common/assets.py Normal file
View File

@ -0,0 +1,314 @@
# -*- coding: utf-8 -*-
import re
from collections import defaultdict
from itertools import chain
from struct import Struct
from typing import Any, Generator, Mapping, Sequence
import numpy
from numpy import ndarray
from PIL import Image
from .util import hashData
## .TIM image converter
_TIM_HEADER_STRUCT: Struct = Struct("< 2I")
_TIM_SECTION_STRUCT: Struct = Struct("< I 4H")
_TIM_HEADER_VERSION: int = 0x10
_LOWER_ALPHA_BOUND: int = 0x20
_UPPER_ALPHA_BOUND: int = 0xe0
# Color 0x0000 is interpreted by the PS1 GPU as fully transparent, so black
# pixels must be changed to dark gray to prevent them from becoming transparent.
_TRANSPARENT_COLOR: int = 0x0000
_BLACK_COLOR: int = 0x0421
def _convertRGBAto16(inputData: ndarray) -> ndarray:
source: ndarray = inputData.astype("<H")
r: ndarray = ((source[:, :, 0] * 249) + 1014) >> 11
g: ndarray = ((source[:, :, 1] * 249) + 1014) >> 11
b: ndarray = ((source[:, :, 2] * 249) + 1014) >> 11
data: ndarray = r | (g << 5) | (b << 10)
data = numpy.where(data != _TRANSPARENT_COLOR, data, _BLACK_COLOR)
if source.shape[2] == 4:
alpha: ndarray = source[:, :, 3]
data = numpy.select(
(
alpha > _UPPER_ALPHA_BOUND, # Leave as-is
alpha > _LOWER_ALPHA_BOUND # Set semitransparency flag
), (
data,
data | (1 << 15)
),
_TRANSPARENT_COLOR
)
return data.reshape(source.shape[:-1])
def convertIndexedImage(imageObj: Image.Image) -> tuple[ndarray, ndarray]:
# PIL/Pillow doesn't provide a proper way to get the number of colors in a
# palette, so here's an extremely ugly hack.
colorDepth: int = { "RGB": 3, "RGBA": 4 }[imageObj.palette.mode]
clutData: bytes = imageObj.palette.tobytes()
numColors: int = len(clutData) // colorDepth
clut: ndarray = _convertRGBAto16(
numpy.frombuffer(clutData, "B").reshape(( 1, numColors, colorDepth ))
)
# Pad the palette to 16 or 256 colors.
padAmount: int = (16 if (numColors <= 16) else 256) - numColors
if padAmount:
clut = numpy.c_[ clut, numpy.zeros(padAmount, "<H") ]
image: ndarray = numpy.asarray(imageObj, "B")
if image.shape[1] % 2:
image = numpy.c_[ image, numpy.zeros(image.shape[0], "B") ]
# Pack two pixels into each byte for 4bpp images.
if numColors <= 16:
image = image[:, 0::2] | (image[:, 1::2] << 4)
if image.shape[1] % 2:
image = numpy.c_[ image, numpy.zeros(image.shape[0], "B") ]
return image, clut
def generateIndexedTIM(
imageObj: Image.Image, ix: int, iy: int, cx: int, cy: int
) -> bytearray:
if (ix < 0) or (ix > 1023) or (iy < 0) or (iy > 1023):
raise ValueError("image X/Y coordinates must be in 0-1023 range")
if (cx < 0) or (cx > 1023) or (cy < 0) or (cy > 1023):
raise ValueError("palette X/Y coordinates must be in 0-1023 range")
image, clut = convertIndexedImage(imageObj)
mode: int = 0x8 if (clut.size <= 16) else 0x9
data: bytearray = bytearray(
_TIM_HEADER_STRUCT.pack(_TIM_HEADER_VERSION, mode)
)
data.extend(_TIM_SECTION_STRUCT.pack(
_TIM_SECTION_STRUCT.size + clut.size * 2,
cx, cy, clut.shape[1], clut.shape[0]
))
data.extend(clut)
data.extend(_TIM_SECTION_STRUCT.pack(
_TIM_SECTION_STRUCT.size + image.size,
ix, iy, image.shape[1] // 2, image.shape[0]
))
data.extend(image)
return data
## Font metrics generator
_METRICS_HEADER_STRUCT: Struct = Struct("< 3B x")
_METRICS_ENTRY_STRUCT: Struct = Struct("< 2B H")
def generateFontMetrics(metrics: Mapping[str, Any]) -> bytearray:
data: bytearray = bytearray(
_METRICS_HEADER_STRUCT.size + _METRICS_ENTRY_STRUCT.size * 256
)
spaceWidth: int = int(metrics["spaceWidth"])
tabWidth: int = int(metrics["tabWidth"])
lineHeight: int = int(metrics["lineHeight"])
data[0:_METRICS_HEADER_STRUCT.size] = \
_METRICS_HEADER_STRUCT.pack(spaceWidth, tabWidth, lineHeight)
for ch, entry in metrics["characterSizes"].items():
index: int = ord(ch)
#index: int = ch.encode("ascii")[0]
if (index < 0) or (index > 255):
raise ValueError(f"extended character {index} is not supported")
x: int = int(entry["x"])
y: int = int(entry["y"])
w: int = int(entry["width"])
h: int = int(entry["height"])
i: bool = bool(entry.get("icon", False))
if (x < 0) or (x > 255) or (y < 0) or (y > 255):
raise ValueError("all X/Y coordinates must be in 0-255 range")
if (w < 0) or (w > 127) or (h < 0) or (h > 127):
raise ValueError("all characters must be <=127x127 pixels")
if h > lineHeight:
raise ValueError("character height exceeds line height")
offset: int = \
_METRICS_HEADER_STRUCT.size + _METRICS_ENTRY_STRUCT.size * index
data[offset:offset + _METRICS_ENTRY_STRUCT.size] = \
_METRICS_ENTRY_STRUCT.pack(x, y, w | (h << 7) | (i << 14))
return data
## Color palette generator
_PALETTE_COLOR_REGEX: re.Pattern = re.compile(r"^#?([0-9A-Fa-f]{6})$")
_PALETTE_COLORS: Sequence[str] = (
"default",
"shadow",
"backdrop",
"accent1",
"accent2",
"window1",
"window2",
"window3",
"highlight1",
"highlight2",
"progress1",
"progress2",
"box1",
"box2",
"text1",
"text2",
"title",
"subtitle"
)
_PALETTE_ENTRY_STRUCT: Struct = Struct("< 3s x")
def generateColorPalette(palette: Mapping[str, str]) -> bytearray:
data: bytearray = bytearray()
for entry in _PALETTE_COLORS:
color: str | None = palette.get(entry, None)
if color is None:
raise ValueError(f"no entry found for {entry}")
matched: re.Match | None = _PALETTE_COLOR_REGEX.match(color)
if matched is None:
raise ValueError(f"invalid color value: {color}")
colorValue: bytes = bytes.fromhex(matched.group(1))
data.extend(_PALETTE_ENTRY_STRUCT.pack(colorValue))
return data
## String table generator
_TABLE_ENTRY_STRUCT: Struct = Struct("< I 2H")
_TABLE_BUCKET_COUNT: int = 256
_TABLE_STRING_ALIGN: int = 4
_TABLE_ESCAPE_REGEX: re.Pattern = re.compile(rb"\$?\{(.+?)\}")
_TABLE_ESCAPE_REPL: Mapping[bytes, bytes] = {
b"UP_ARROW": b"\x80",
b"DOWN_ARROW": b"\x81",
b"LEFT_ARROW": b"\x82",
b"RIGHT_ARROW": b"\x83",
b"UP_ARROW_ALT": b"\x84",
b"DOWN_ARROW_ALT": b"\x85",
b"LEFT_ARROW_ALT": b"\x86",
b"RIGHT_ARROW_ALT": b"\x87",
b"LEFT_BUTTON": b"\x90",
b"RIGHT_BUTTON": b"\x91",
b"START_BUTTON": b"\x92",
b"CLOSED_LOCK": b"\x93",
b"OPEN_LOCK": b"\x94",
b"DIR_ICON": b"\x95",
b"PARENT_DIR_ICON": b"\x96",
b"FILE_ICON": b"\x97",
b"CHIP_ICON": b"\x98",
b"CART_ICON": b"\x99"
}
def _convertString(string: str) -> bytes:
return _TABLE_ESCAPE_REGEX.sub(
lambda match: _TABLE_ESCAPE_REPL[match.group(1).strip().upper()],
string.encode("ascii")
)
def _walkStringTree(
strings: Mapping[str, Any], prefix: str = ""
) -> Generator[tuple[int, bytes | None], None, None]:
for key, value in strings.items():
fullKey: str = prefix + key
if value is None:
yield hashData(fullKey.encode("ascii")), None
elif type(value) is str:
yield hashData(fullKey.encode("ascii")), _convertString(value)
else:
yield from _walkStringTree(value, f"{fullKey}.")
def generateStringTable(strings: Mapping[str, Any]) -> bytearray:
offsets: dict[bytes, int] = {}
chains: defaultdict[int, list[tuple[int, int | None]]] = defaultdict(list)
blob: bytearray = bytearray()
for fullHash, string in _walkStringTree(strings):
if string is None:
entry: tuple[int, int | None] = fullHash, 0
else:
offset: int | None = offsets.get(string, None)
if offset is None:
offset = len(blob)
offsets[string] = offset
blob.extend(string)
blob.append(0)
while len(blob) % _TABLE_STRING_ALIGN:
blob.append(0)
entry: tuple[int, int | None] = fullHash, offset
chains[fullHash % _TABLE_BUCKET_COUNT].append(entry)
# Build the bucket array and all chains of entries.
buckets: list[tuple[int, int | None, int]] = []
chained: list[tuple[int, int | None, int]] = []
for shortHash in range(_TABLE_BUCKET_COUNT):
entries: list[tuple[int, int | None]] = chains[shortHash]
if not entries:
buckets.append(( 0, None, 0 ))
continue
for index, entry in enumerate(entries):
if index < (len(entries) - 1):
chainIndex: int = _TABLE_BUCKET_COUNT + len(chained)
else:
chainIndex: int = 0
fullHash, offset = entry
if index:
chained.append(( fullHash, offset, chainIndex + 1 ))
else:
buckets.append(( fullHash, offset, chainIndex ))
# Relocate the offsets and serialize the table.
totalLength: int = len(buckets) + len(chained)
blobOffset: int = _TABLE_ENTRY_STRUCT.size * totalLength
data: bytearray = bytearray()
for fullHash, offset, chainIndex in chain(buckets, chained):
absOffset: int = 0 if (offset is None) else (blobOffset + offset)
if absOffset > 0xffff:
raise RuntimeError("string table exceeds 64 KB size limit")
data.extend(_TABLE_ENTRY_STRUCT.pack(fullHash, absOffset, chainIndex))
data.extend(blob)
return data

114
tools/common/cart.py Normal file
View File

@ -0,0 +1,114 @@
# -*- coding: utf-8 -*-
from dataclasses import dataclass
from enum import IntEnum, IntFlag
from struct import Struct
from typing import Mapping
from zlib import decompress
from .util import decodeBase41
## Definitions
class ChipType(IntEnum):
NONE = 0
X76F041 = 1
X76F100 = 2
ZS01 = 3
class DumpFlag(IntFlag):
DUMP_HAS_SYSTEM_ID = 1 << 0
DUMP_HAS_CART_ID = 1 << 1
DUMP_CONFIG_OK = 1 << 2
DUMP_SYSTEM_ID_OK = 1 << 3
DUMP_CART_ID_OK = 1 << 4
DUMP_ZS_ID_OK = 1 << 5
DUMP_PUBLIC_DATA_OK = 1 << 6
DUMP_PRIVATE_DATA_OK = 1 << 7
## Cartridge dump structure
_CART_DUMP_HEADER_STRUCT: Struct = Struct("< H 2B 8s 8s 8s 8s 8s")
_CART_DUMP_HEADER_MAGIC: int = 0x573d
_CHIP_SIZES: Mapping[ChipType, tuple[int, int, int]] = {
ChipType.X76F041: ( 512, 384, 128 ),
ChipType.X76F100: ( 112, 0, 0 ),
ChipType.ZS01: ( 112, 0, 32 )
}
_QR_STRING_START: str = "573::"
_QR_STRING_END: str = "::"
@dataclass
class CartDump:
chipType: ChipType
flags: DumpFlag
systemID: bytes
cartID: bytes
zsID: bytes
dataKey: bytes
config: bytes
data: bytes
def getChipSize(self) -> tuple[int, int, int]:
return _CHIP_SIZES[self.chipType]
def serialize(self) -> bytes:
return _CART_DUMP_HEADER_STRUCT.pack(
_CART_DUMP_HEADER_MAGIC,
self.chipType,
self.flags,
self.systemID,
self.cartID,
self.zsID,
self.dataKey,
self.config
) + self.data
def parseCartDump(data: bytes) -> CartDump:
magic, chipType, flags, systemID, cartID, zsID, dataKey, config = \
_CART_DUMP_HEADER_STRUCT.unpack(data[0:_CART_DUMP_HEADER_STRUCT.size])
if magic != _CART_DUMP_HEADER_MAGIC:
raise ValueError(f"invalid or unsupported dump format: 0x{magic:04x}")
length, _, _ = _CHIP_SIZES[chipType]
return CartDump(
chipType, flags, systemID, cartID, zsID, dataKey, config,
data[_CART_DUMP_HEADER_STRUCT.size:_CART_DUMP_HEADER_STRUCT.size + length]
)
def parseCartQRString(data: str) -> CartDump:
_data: str = data.strip().upper()
if not _data.startswith(_QR_STRING_START):
raise ValueError(f"dump string does not begin with '{_QR_STRING_START}'")
if not _data.endswith(_QR_STRING_END):
raise ValueError(f"dump string does not end with '{_QR_STRING_END}'")
_data = _data[len(_QR_STRING_START):-len(_QR_STRING_END)]
return parseCartDump(decompress(decodeBase41(_data)))
## Flash and RTC header dump structure
_ROM_HEADER_DUMP_HEADER_STRUCT: Struct = Struct("< H x B 8s 32s")
_ROM_HEADER_DUMP_HEADER_MAGIC: int = 0x573e
@dataclass
class ROMHeaderDump:
flags: DumpFlag
systemID: bytes
data: bytes
def serialize(self) -> bytes:
return _ROM_HEADER_DUMP_HEADER_STRUCT.pack(
_ROM_HEADER_DUMP_HEADER_MAGIC,
self.flags,
self.systemID,
self.data
)

578
tools/common/cartdata.py Normal file
View File

@ -0,0 +1,578 @@
# -*- coding: utf-8 -*-
import logging
from dataclasses import dataclass
from enum import IntEnum, IntFlag
from struct import Struct, unpack
from typing import Any, Sequence, Type
from .cart import CartDump, ChipType, ROMHeaderDump
from .games import GAME_CODE_REGEX, GAME_REGION_REGEX
from .util import checksum8, checksum16, shortenedMD5
## Definitions
class FormatType(IntEnum):
BLANK = 0
SIMPLE = 1
BASIC = 2
EXTENDED = 3
class TraceIDType(IntEnum):
TID_NONE = 0
TID_81 = 1
TID_82_BIG_ENDIAN = 2
TID_82_LITTLE_ENDIAN = 3
class DataFlag(IntFlag):
DATA_HAS_CODE_PREFIX = 1 << 0
DATA_HAS_TRACE_ID = 1 << 1
DATA_HAS_CART_ID = 1 << 2
DATA_HAS_INSTALL_ID = 1 << 3
DATA_HAS_SYSTEM_ID = 1 << 4
DATA_HAS_PUBLIC_SECTION = 1 << 5
DATA_CHECKSUM_INVERTED = 1 << 6
DATA_GX706_WORKAROUND = 1 << 7
class ParserError(BaseException):
pass
## Common data structures
@dataclass
class IdentifierSet:
traceID: bytes | None = None # aka TID
cartID: bytes | None = None # aka SID
installID: bytes | None = None # aka MID
systemID: bytes | None = None # aka XID
def __init__(self, data: bytes):
ids: list[bytes | None] = []
for offset in range(0, 32, 8):
_id: bytes = data[offset:offset + 8]
ids.append(_id if sum(_id) else None)
(
self.traceID,
self.cartID,
self.installID,
self.systemID
) = ids
def getFlags(self) -> DataFlag:
flags: DataFlag = DataFlag(0)
if self.traceID:
flags |= DataFlag.DATA_HAS_TRACE_ID
if self.cartID:
flags |= DataFlag.DATA_HAS_CART_ID
if self.installID:
flags |= DataFlag.DATA_HAS_INSTALL_ID
if self.systemID:
flags |= DataFlag.DATA_HAS_SYSTEM_ID
return flags
def getCartIDChecksum(self, param: int) -> int:
if self.cartID is None:
return 0
checksum: int = 0
for i in range(6):
value: int = self.cartID[i + 1]
for j in range(i * 8, (i + 1) * 8):
if value & 1:
checksum ^= 1 << (j % param)
value >>= 1
return checksum & 0xffff
def getTraceIDType(self, param: int) -> TraceIDType:
if self.traceID is None:
return TraceIDType.TID_NONE
match self.traceID[0]:
case 0x81:
return TraceIDType.TID_81
case 0x82:
checksum: int = self.getCartIDChecksum(param)
big: int = unpack("> H", self.traceID[1:3])[0]
little: int = unpack("< H", self.traceID[1:3])[0]
if checksum == big:
return TraceIDType.TID_82_BIG_ENDIAN
elif checksum == little:
return TraceIDType.TID_82_LITTLE_ENDIAN
raise ValueError(
f"trace ID mismatch, exp=0x{checksum:04x}, "
f"big=0x{big:04x}, little=0x{little:04x}"
)
case prefix:
raise ValueError(f"unknown trace ID prefix: 0x{prefix:02x}")
@dataclass
class PublicIdentifierSet:
installID: bytes | None = None # aka MID
systemID: bytes | None = None # aka XID
def __init__(self, data: bytes):
ids: list[bytes | None] = []
for offset in range(0, 16, 8):
_id: bytes = data[offset:offset + 8]
ids.append(_id if sum(_id) else None)
self.installID, self.systemID = ids
def getFlags(self) -> DataFlag:
flags: DataFlag = DataFlag(0)
if self.installID:
flags |= DataFlag.DATA_HAS_INSTALL_ID
if self.systemID:
flags |= DataFlag.DATA_HAS_SYSTEM_ID
return flags
## Cartridge data parsers
_BASIC_HEADER_STRUCT: Struct = Struct("< 2s 2s B 3x")
_EXTENDED_HEADER_STRUCT: Struct = Struct("< 8s H 4s H")
# The system and install IDs are excluded from validation as they may not be
# always present.
_IDENTIFIER_FLAG_MASK: DataFlag = \
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_HAS_CART_ID
def _getPublicData(
dump: CartDump, flags: DataFlag, maxLength: int = 512
) -> bytes:
if flags & DataFlag.DATA_HAS_PUBLIC_SECTION:
_, offset, length = dump.getChipSize()
return dump.data[offset:offset + min(length, maxLength)]
else:
return dump.data[0:maxLength]
@dataclass
class CartParser:
flags: DataFlag
identifiers: IdentifierSet
publicIdentifiers: PublicIdentifierSet
region: str | None = None
codePrefix: str | None = None
code: str | None = None
year: int | None = None
def getFormatType(self) -> FormatType:
return FormatType.BLANK
class SimpleCartParser(CartParser):
def __init__(self, dump: CartDump, flags: DataFlag):
region: bytes = _getPublicData(dump, flags, 8).rstrip(b"\0")
if GAME_REGION_REGEX.fullmatch(region) is None:
raise ParserError(f"invalid game region: {region}")
super().__init__(
flags,
IdentifierSet(b""),
PublicIdentifierSet(b""),
region.decode("ascii")
)
def getFormatType(self) -> FormatType:
return FormatType.SIMPLE
class BasicCartParser(CartParser):
def __init__(self, dump: CartDump, flags: DataFlag):
data: bytes = _getPublicData(dump, flags, _BASIC_HEADER_STRUCT.size)
pri: IdentifierSet = IdentifierSet(dump.data[_BASIC_HEADER_STRUCT.size:])
region, codePrefix, checksum = _BASIC_HEADER_STRUCT.unpack(data)
codePrefix: bytes = codePrefix.rstrip(b"\0")
value: int = checksum8(
data[0:4], bool(flags & DataFlag.DATA_CHECKSUM_INVERTED)
)
if value != checksum:
raise ParserError(
f"invalid header checksum, exp=0x{value:02x}, "
f"got=0x{checksum:02x}"
)
if GAME_REGION_REGEX.fullmatch(region) is None:
raise ParserError(f"invalid game region: {region}")
if bool(flags & DataFlag.DATA_HAS_CODE_PREFIX) != bool(codePrefix):
raise ParserError(
f"game code prefix should{' not' if codePrefix else ''} be "
f"present"
)
if (pri.getFlags() ^ flags) & _IDENTIFIER_FLAG_MASK:
raise ParserError("identifier flags do not match")
super().__init__(
flags,
pri,
PublicIdentifierSet(b""),
region.decode("ascii"),
codePrefix.decode("ascii") or None
)
def getFormatType(self) -> FormatType:
return FormatType.BASIC
class ExtendedCartParser(CartParser):
def __init__(self, dump: CartDump, flags: DataFlag):
data: bytes = \
_getPublicData(dump, flags, _EXTENDED_HEADER_STRUCT.size + 16)
idsPri: bytes = dump.data[_EXTENDED_HEADER_STRUCT.size + 16:]
idsPub: bytes = dump.data[_EXTENDED_HEADER_STRUCT.size:]
header: bytes = data[0:_EXTENDED_HEADER_STRUCT.size]
pri: IdentifierSet = IdentifierSet(idsPri)
pub: PublicIdentifierSet = PublicIdentifierSet(idsPub)
if flags & DataFlag.DATA_GX706_WORKAROUND:
data = data[0:1] + b"X" + data[2:]
code, year, region, checksum = _EXTENDED_HEADER_STRUCT.unpack(header)
code: bytes = code.rstrip(b"\0")
region: bytes = region.rstrip(b"\0")
value: int = checksum16(
data[0:14], bool(flags & DataFlag.DATA_CHECKSUM_INVERTED)
)
if value != checksum:
raise ParserError(
f"invalid header checksum, exp=0x{value:04x}, "
f"got=0x{checksum:04x}"
)
if GAME_CODE_REGEX.fullmatch(code) is None:
raise ParserError(f"invalid game code: {code}")
if GAME_REGION_REGEX.fullmatch(region) is None:
raise ParserError(f"invalid game region: {region}")
if (pri.getFlags() ^ flags) & _IDENTIFIER_FLAG_MASK:
raise ParserError("identifier flags do not match")
_code: str = code.decode("ascii")
super().__init__(
flags,
pri,
pub,
region.decode("ascii"),
_code[0:2],
_code,
year
)
def getFormatType(self) -> FormatType:
return FormatType.EXTENDED
## Flash and RTC header parsers/writers
# Used alongside the system ID and the header itself to calculate the MD5 used
# as a header signature. Seems to be the same in all games.
_SIGNATURE_SALT: bytes = bytes.fromhex("c1 a2 03 d6 ab 70 85 5e")
@dataclass
class ROMHeaderParser:
flags: DataFlag
signature: bytes | None = None
region: str | None = None
codePrefix: str | None = None
code: str | None = None
year: int | None = None
def getFormatType(self) -> FormatType:
return FormatType.BLANK
class ExtendedROMHeaderParser(ROMHeaderParser):
def __init__(self, dump: ROMHeaderDump, flags: DataFlag):
data: bytes = dump.data[0:_EXTENDED_HEADER_STRUCT.size + 8]
header: bytes = data[0:_EXTENDED_HEADER_STRUCT.size]
signature: bytes = data[_EXTENDED_HEADER_STRUCT.size:]
if flags & DataFlag.DATA_GX706_WORKAROUND:
data = data[0:1] + b"X" + data[2:]
code, year, region, checksum = _EXTENDED_HEADER_STRUCT.unpack(header)
code: bytes = code.rstrip(b"\0")
region: bytes = region.rstrip(b"\0")
value: int = checksum16(
data[0:14], bool(flags & DataFlag.DATA_CHECKSUM_INVERTED)
)
if value != checksum:
raise ParserError(
f"invalid header checksum, exp=0x{value:04x}, "
f"got=0x{checksum:04x}"
)
if GAME_CODE_REGEX.fullmatch(code) is None:
raise ParserError(f"invalid game code: {code}")
if GAME_REGION_REGEX.fullmatch(region) is None:
raise ParserError(f"invalid game region: {region}")
if flags & DataFlag.DATA_HAS_SYSTEM_ID:
expected: bytearray = \
shortenedMD5(dump.systemID + header + _SIGNATURE_SALT)
if signature != expected:
raise ParserError(
f"invalid signature, exp={expected.hex()}, "
f"got={signature.hex()}"
)
else:
if sum(signature) not in ( 0, 0xff * 8 ):
raise ParserError("unexpected signature present")
_code: str = code.decode("ascii")
super().__init__(
flags, signature, region.decode("ascii"), _code[0:2], _code, year
)
def getFormatType(self) -> FormatType:
return FormatType.EXTENDED
## Cartridge and flash header database
_CART_DB_ENTRY_STRUCT: Struct = Struct("< 6B H 8s 8s 8s 96s")
_ROM_HEADER_DB_ENTRY_STRUCT: Struct = Struct("< 2B H 8s 8s 96s")
_TRACE_ID_PARAMS: Sequence[int] = 16, 14
@dataclass
class CartDBEntry:
code: str
region: str
name: str
dataKey: bytes
chipType: ChipType
formatType: FormatType
traceIDType: TraceIDType
flags: DataFlag
traceIDParam: int = 0
installIDPrefix: int = 0
year: int = 0
def __init__(
self, code: str, region: str, name: str, dump: CartDump,
parser: CartParser
):
# Find the correct parameters for the trace ID heuristically.
_type: TraceIDType | None = None
for self.traceIDParam in _TRACE_ID_PARAMS:
try:
_type = parser.identifiers.getTraceIDType(self.traceIDParam)
except ValueError:
continue
break
if _type is None:
raise RuntimeError("failed to determine trace ID parameters")
self.code = code
self.region = region
self.name = name
self.dataKey = dump.dataKey
self.chipType = dump.chipType
self.formatType = parser.getFormatType()
self.traceIDType = _type
self.flags = parser.flags
self.year = parser.year or 0
if parser.publicIdentifiers.installID is not None:
self.installIDPrefix = parser.publicIdentifiers.installID[0]
elif parser.identifiers.installID is not None:
self.installIDPrefix = parser.identifiers.installID[0]
else:
self.installIDPrefix = 0
# Implement the comparison overload so sorting will work. The 3-digit number
# in the game code is used as a key.
def __lt__(self, entry: Any) -> bool:
return ( self.code[2:], self.code[0:2], self.region, self.name ) < \
( entry.code[2:], entry.code[0:2], entry.region, entry.name )
def requiresCartID(self) -> bool:
if self.flags & DataFlag.DATA_HAS_CART_ID:
return True
if (self.flags & DataFlag.DATA_HAS_TRACE_ID) and \
(self.traceIDType >= TraceIDType.TID_82_BIG_ENDIAN):
return True
return False
def serialize(self) -> bytes:
return _CART_DB_ENTRY_STRUCT.pack(
self.chipType,
self.formatType,
self.traceIDType,
self.flags,
self.traceIDParam,
self.installIDPrefix,
self.year,
self.dataKey,
self.code.encode("ascii"),
self.region.encode("ascii"),
self.name.encode("ascii")
)
@dataclass
class ROMHeaderDBEntry:
code: str
region: str
name: str
formatType: FormatType
flags: DataFlag
year: int = 0
def __init__(
self, code: str, region: str, name: str, parser: ROMHeaderParser
):
self.code = code
self.region = region
self.name = name
self.formatType = parser.getFormatType()
self.flags = parser.flags
self.year = parser.year or 0
def __lt__(self, entry: Any) -> bool:
return ( self.code[2:], self.code[0:2], self.region, self.name ) < \
( entry.code[2:], entry.code[0:2], entry.region, entry.name )
def serialize(self) -> bytes:
return _ROM_HEADER_DB_ENTRY_STRUCT.pack(
self.formatType,
self.flags,
self.year,
self.code.encode("ascii"),
self.region.encode("ascii"),
self.name.encode("ascii")
)
## Data format identification
_KNOWN_CART_FORMATS: Sequence[tuple[str, Type, DataFlag]] = (
(
# Used by GCB48 (and possibly other games?)
"region only",
SimpleCartParser,
DataFlag.DATA_HAS_PUBLIC_SECTION
), (
"basic (no IDs)",
BasicCartParser,
DataFlag.DATA_CHECKSUM_INVERTED
), (
"basic + TID",
BasicCartParser,
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_CHECKSUM_INVERTED
), (
"basic + SID",
BasicCartParser,
DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_CHECKSUM_INVERTED
), (
"basic + TID, SID",
BasicCartParser,
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_HAS_CART_ID
| DataFlag.DATA_CHECKSUM_INVERTED
), (
"basic + prefix, TID, SID",
BasicCartParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_CHECKSUM_INVERTED
), (
# Used by most pre-ZS01 Bemani games
"basic + prefix, all IDs",
BasicCartParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_HAS_INSTALL_ID
| DataFlag.DATA_HAS_SYSTEM_ID | DataFlag.DATA_CHECKSUM_INVERTED
), (
"extended (no IDs)",
ExtendedCartParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_CHECKSUM_INVERTED
), (
"extended (no IDs, alt)",
ExtendedCartParser,
DataFlag.DATA_HAS_CODE_PREFIX
), (
# Used by GX706
"extended (no IDs, GX706)",
ExtendedCartParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_GX706_WORKAROUND
), (
# Used by GE936/GK936 and all ZS01 Bemani games
"extended + all IDs",
ExtendedCartParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_HAS_INSTALL_ID
| DataFlag.DATA_HAS_SYSTEM_ID | DataFlag.DATA_HAS_PUBLIC_SECTION
| DataFlag.DATA_CHECKSUM_INVERTED
)
)
_KNOWN_ROM_HEADER_FORMATS: Sequence[tuple[str, Type, DataFlag]] = (
(
"extended (no MD5)",
ExtendedROMHeaderParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_CHECKSUM_INVERTED
), (
"extended (no MD5, alt)",
ExtendedROMHeaderParser,
DataFlag.DATA_HAS_CODE_PREFIX
), (
# Used by GX706
"extended (no MD5, GX706)",
ExtendedROMHeaderParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_GX706_WORKAROUND
), (
"extended + MD5",
ExtendedROMHeaderParser,
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_SYSTEM_ID
| DataFlag.DATA_CHECKSUM_INVERTED
)
)
def newCartParser(dump: CartDump) -> CartParser:
for name, constructor, flags in reversed(_KNOWN_CART_FORMATS):
try:
parser: Any = constructor(dump, flags)
except ParserError as exc:
logging.debug(f"parsing as {name} failed, {exc}")
continue
return parser
raise RuntimeError("no known data format found")
def newROMHeaderParser(dump: ROMHeaderDump) -> ROMHeaderParser:
for name, constructor, flags in reversed(_KNOWN_ROM_HEADER_FORMATS):
try:
parser: Any = constructor(dump, flags)
except ParserError as exc:
logging.debug(f"parsing as {name} failed, {exc}")
continue
return parser
raise RuntimeError("no known data format found")

126
tools/common/games.py Normal file
View File

@ -0,0 +1,126 @@
# -*- coding: utf-8 -*-
import re
from collections import defaultdict
from dataclasses import dataclass
from typing import Any, Generator, Iterable, Mapping, Sequence
## Definitions
# Character 0: always G
# Character 1: region related? (can be B, C, E, K, L, N, Q, U, X or wildcard)
# Characters 2-4: identifier (700-999 or A00-A99 ~ D00-D99)
GAME_CODE_REGEX: re.Pattern = \
re.compile(rb"G[A-Z*][0-9A-D][0-9][0-9]", re.IGNORECASE)
# Character 0: region (A=Asia?, E=Europe, J=Japan, K=Korea, S=?, U=US)
# Character 1: type/variant (A-F=regular, R-W=e-Amusement, X-Z=?)
# Characters 2-4: game revision (A-D or Z00-Z99, optional)
GAME_REGION_REGEX: re.Pattern = \
re.compile(rb"[AEJKSU][A-FR-WX-Z]([A-D]|Z[0-9][0-9])?", re.IGNORECASE)
_CARTS_WITH_ID: Sequence[str] = (
"X76F041+DS2401",
"ZS01+DS2401"
)
_IO_BOARDS_WITH_ID: Sequence[str] = (
"GX700-PWB(K)", # Kick & Kick expansion board
"GX894-PWB(B)", # Digital I/O board
"GX921-PWB(B)", # DDR Karaoke Mix expansion board
"PWB0000073070" # GunMania expansion board
)
## Game list (loaded from games.json)
@dataclass
class GameDBEntry:
code: str
region: str
name: str
mameID: str | None = None
installCart: str | None = None
gameCart: str | None = None
ioBoard: str | None = None
cartLockedToIOBoard: bool = False
flashLockedToIOBoard: bool = False
# Implement the comparison overload so sorting will work. The 3-digit number
# in the game code is used as a key.
def __lt__(self, entry: Any) -> bool:
return ( self.code[2:], self.code[0:2], self.region, self.name ) < \
( entry.code[2:], entry.code[0:2], entry.region, entry.name )
def __str__(self) -> str:
return f"{self.code} {self.region}"
def getFullName(self) -> str:
return f"{self.name} [{self.code} {self.region}]"
def hasCartID(self) -> bool:
if self.gameCart is None:
return False
return (self.gameCart in _CARTS_WITH_ID)
def hasSystemID(self) -> bool:
return (self.ioBoard in _IO_BOARDS_WITH_ID)
class GameDB:
def __init__(self, entries: Iterable[Mapping[str, Any]] | None = None):
self._idIndex: dict[str, GameDBEntry] = {}
self._codeIndex: defaultdict[str, list[GameDBEntry]] = defaultdict(list)
if entries:
for entry in entries:
self.addEntry(entry)
def addEntry(self, entryObj: Mapping[str, Any]):
code: str = entryObj["code"].strip().upper()
region: str = entryObj["region"].strip().upper()
name: str = entryObj["name"]
mameID: str | None = entryObj.get("id", None)
installCart: str | None = entryObj.get("installCart", None)
gameCart: str | None = entryObj.get("gameCart", None)
ioBoard: str | None = entryObj.get("ioBoard", None)
cartLockedToIOBoard: bool = entryObj.get("cartLockedToIOBoard", False)
flashLockedToIOBoard: bool = entryObj.get("flashLockedToIOBoard", False)
if GAME_CODE_REGEX.fullmatch(code.encode("ascii")) is None:
raise ValueError(f"invalid game code: {code}")
if GAME_REGION_REGEX.fullmatch(region.encode("ascii")) is None:
raise ValueError(f"invalid game region: {region}")
entry: GameDBEntry = GameDBEntry(
code, region, name, mameID, installCart, gameCart, ioBoard,
cartLockedToIOBoard, flashLockedToIOBoard
)
if mameID is not None:
self._idIndex[mameID.lower()] = entry
# Store all entries indexed by their game code and first two characters
# of the region code. This allows for quick retrieval of all revisions
# of a game.
self._codeIndex[f"{code}{region[0:2]}"].append(entry)
self._codeIndex[f"{code[0]}*{code[2:]}{region[0:2]}"].append(entry)
def lookupByID(self, mameID: str) -> GameDBEntry:
return self._idIndex[mameID.lower()]
def lookupByCode(
self, code: str, region: str
) -> Generator[GameDBEntry, None, None]:
_code: str = code.strip().upper()
_region: str = region.strip().upper()
# If only two characters of the region code are provided, match all
# entries whose region code starts with those two characters (even if
# longer).
for entry in self._codeIndex[_code + _region[0:2]]:
if _region == entry.region[0:len(_region)]:
yield entry

141
tools/common/util.py Normal file
View File

@ -0,0 +1,141 @@
# -*- coding: utf-8 -*-
from hashlib import md5
from io import SEEK_SET, SEEK_END
from typing import BinaryIO, ByteString, Iterable, Iterator, Sequence, TextIO
## Misc. utilities
def signExtend(value: int, bitLength: int) -> int:
signMask: int = 1 << (bitLength - 1)
valueMask: int = signMask - 1
return (value & valueMask) - (value & signMask)
## String manipulation
# This encoding is similar to standard base45, but with some problematic
# characters (' ', '$', '%', '*') excluded.
_BASE41_CHARSET: str = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ+-./:"
def toPrintableChar(value: int) -> str:
if (value < 0x20) or (value > 0x7e):
return "."
return chr(value)
def hexdumpToFile(data: Sequence[int], output: TextIO, width: int = 16):
for i in range(0, len(data), width):
hexBytes: map[str] = map(lambda value: f"{value:02x}", data[i:i + width])
hexLine: str = " ".join(hexBytes).ljust(width * 3 - 1)
asciiBytes: map[str] = map(toPrintableChar, data[i:i + width])
asciiLine: str = "".join(asciiBytes).ljust(width)
output.write(f" {i:04x}: {hexLine} |{asciiLine}|\n")
def serialNumberToString(_id: ByteString) -> str:
value: int = int.from_bytes(_id[1:7], "little")
#if value >= 100000000:
#return "xxxx-xxxx"
return f"{(value // 10000) % 10000:04d}-{value % 10000:04d}"
def decodeBase41(data: str) -> bytearray:
mapped: map[int] = map(_BASE41_CHARSET.index, data)
output: bytearray = bytearray()
for a, b, c in zip(mapped, mapped, mapped):
value: int = a + (b * 41) + (c * 1681)
output.append(value >> 8)
output.append(value & 0xff)
return output
## Hashes and checksums
def hashData(data: Iterable[int]) -> int:
value: int = 0
for byte in data:
value = (
byte + \
((value << 6) & 0xffffffff) + \
((value << 16) & 0xffffffff) - \
value
) & 0xffffffff
return value
def checksum8(data: Iterable[int], invert: bool = False) -> int:
return (sum(data) & 0xff) ^ (0xff if invert else 0)
def checksum16(data: Iterable[int], invert: bool = False) -> int:
it: Iterator = iter(data)
values: map[int] = map(lambda x: x[0] | (x[1] << 8), zip(it, it))
return (sum(values) & 0xffff) ^ (0xffff if invert else 0)
def shortenedMD5(data: ByteString) -> bytearray:
hashed: bytes = md5(data).digest()
output: bytearray = bytearray(8)
for i in range(8):
output[i] = hashed[i] ^ hashed[i + 8]
return output
## Odd/even interleaved file reader
class InterleavedFile(BinaryIO):
def __init__(self, even: BinaryIO, odd: BinaryIO):
self._even: BinaryIO = even
self._odd: BinaryIO = odd
self._offset: int = 0
# Determine the total size of the file ahead of time.
even.seek(0, SEEK_END)
odd.seek(0, SEEK_END)
self._length: int = even.tell()
if self._length != odd.tell():
raise RuntimeError("even and odd files must have the same size")
even.seek(0, SEEK_SET)
odd.seek(0, SEEK_SET)
def close(self):
self._even.close()
self._odd.close()
def seek(self, offset: int, mode: int = SEEK_SET):
match mode:
case 0:
self._offset = offset
case 1:
self._offset = min(self._offset + offset, self._length)
case 2:
self._offset = max(self._length - offset, 0)
self._even.seek((self._offset + 1) // 2)
self._odd.seek(self._offset // 2)
def tell(self) -> int:
return self._offset
def read(self, length: int) -> bytearray:
_length: int = min(length, self._length - self._offset)
output: bytearray = bytearray(_length)
if self._offset % 2:
output[0:_length:2] = self._odd.read((_length + 1) // 2)
output[1:_length:2] = self._even.read(_length // 2)
else:
output[0:_length:2] = self._even.read((_length + 1) // 2)
output[1:_length:2] = self._odd.read(_length // 2)
self._offset += _length
return output

View File

@ -9,7 +9,7 @@ customizing the region string (used by some emulators to determine whether they
should start in PAL or NTSC mode by default). Requires no external dependencies. should start in PAL or NTSC mode by default). Requires no external dependencies.
""" """
__version__ = "0.1.0" __version__ = "0.1.1"
__author__ = "spicyjpeg" __author__ = "spicyjpeg"
from argparse import ArgumentParser, FileType, Namespace from argparse import ArgumentParser, FileType, Namespace
@ -83,17 +83,28 @@ class ELF:
# Parse the file header and perform some minimal validation. # Parse the file header and perform some minimal validation.
_file.seek(0) _file.seek(0)
magic, wordSize, endianness, _, abi, _type, architecture, _, \ (
entryPoint, progHeaderOffset, secHeaderOffset, flags, elfHeaderSize, \ magic,
progHeaderSize, progHeaderCount, secHeaderSize, secHeaderCount, _ = \ wordSize,
endianness,
_,
abi,
elfType,
architecture,
_,
entryPoint,
progHeaderOffset,
secHeaderOffset,
flags,
elfHeaderSize,
progHeaderSize,
progHeaderCount,
secHeaderSize,
secHeaderCount,
_
) = \
parseStructFromFile(_file, ELF_HEADER_STRUCT) parseStructFromFile(_file, ELF_HEADER_STRUCT)
self.type: ELFType = ELFType(_type)
self.architecture: int = architecture
self.abi: int = abi
self.entryPoint: int = entryPoint
self.flags: int = flags
if magic != ELF_HEADER_MAGIC: if magic != ELF_HEADER_MAGIC:
raise RuntimeError("file is not a valid ELF") raise RuntimeError("file is not a valid ELF")
if wordSize != 1 or endianness != ELFEndianness.LITTLE: if wordSize != 1 or endianness != ELFEndianness.LITTLE:
@ -104,13 +115,26 @@ class ELF:
): ):
raise RuntimeError("unsupported ELF format") raise RuntimeError("unsupported ELF format")
self.type: ELFType = ELFType(elfType)
self.architecture: int = architecture
self.abi: int = abi
self.entryPoint: int = entryPoint
self.flags: int = flags
# Parse the program headers and extract all loadable segments. # Parse the program headers and extract all loadable segments.
self.segments: list[Segment] = [] self.segments: list[Segment] = []
_file.seek(progHeaderOffset) _file.seek(progHeaderOffset)
for ( for (
headerType, fileOffset, address, _, fileLength, length, flags, _ headerType,
fileOffset,
address,
_,
fileLength,
length,
flags,
_
) in parseStructsFromFile(_file, PROG_HEADER_STRUCT, progHeaderCount): ) in parseStructsFromFile(_file, PROG_HEADER_STRUCT, progHeaderCount):
if headerType != ProgHeaderType.LOAD: if headerType != ProgHeaderType.LOAD:
continue continue
@ -151,7 +175,7 @@ class ELF:
## Main ## Main
EXE_HEADER_STRUCT: Struct = Struct("< 16s 4I 16x 2I 20x 1972s") EXE_HEADER_STRUCT: Struct = Struct("< 8s 8x 4I 16x 2I 20x 1972s")
EXE_HEADER_MAGIC: bytes = b"PS-X EXE" EXE_HEADER_MAGIC: bytes = b"PS-X EXE"
EXE_ALIGNMENT: int = 2048 EXE_ALIGNMENT: int = 2048

View File

@ -1,63 +1,18 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
__version__ = "0.3.5" __version__ = "0.4.1"
__author__ = "spicyjpeg" __author__ = "spicyjpeg"
import sys import sys
from argparse import ArgumentParser, FileType, Namespace from argparse import ArgumentParser, FileType, Namespace
from typing import ByteString, Mapping, Sequence, TextIO from typing import Mapping, TextIO
from zlib import decompress
from _common import * from common.cart import *
from common.util import serialNumberToString, hexdumpToFile
## Utilities
# This encoding is similar to standard base45, but with some problematic
# characters (' ', '$', '%', '*') excluded.
_BASE41_CHARSET: str = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ+-./:"
def decodeBase41(data: str) -> bytearray:
mapped: map[int] = map(_BASE41_CHARSET.index, data)
output: bytearray = bytearray()
for a, b, c in zip(mapped, mapped, mapped):
value: int = a + (b * 41) + (c * 1681)
output.append(value >> 8)
output.append(value & 0xff)
return output
def serialNumberToString(_id: ByteString) -> str:
value: int = int.from_bytes(_id[1:7], "little")
#if value >= 100000000:
#return "xxxx-xxxx"
return f"{(value // 10000) % 10000:04d}-{value % 10000:04d}"
def toPrintableChar(value: int):
if (value < 0x20) or (value > 0x7e):
return "."
return chr(value)
def hexdump(data: ByteString | Sequence[int], output: TextIO, width: int = 16):
for i in range(0, len(data), width):
hexBytes: map[str] = map(lambda value: f"{value:02x}", data[i:i + width])
hexLine: str = " ".join(hexBytes).ljust(width * 3 - 1)
asciiBytes: map[str] = map(toPrintableChar, data[i:i + width])
asciiLine: str = "".join(asciiBytes).ljust(width)
output.write(f" {i:04x}: {hexLine} |{asciiLine}|\n")
## Dump parser ## Dump parser
_DUMP_START: str = "573::"
_DUMP_END: str = "::"
_CHIP_NAMES: Mapping[ChipType, str] = { _CHIP_NAMES: Mapping[ChipType, str] = {
ChipType.NONE: "None", ChipType.NONE: "None",
ChipType.X76F041: "Xicor X76F041", ChipType.X76F041: "Xicor X76F041",
@ -65,21 +20,13 @@ _CHIP_NAMES: Mapping[ChipType, str] = {
ChipType.ZS01: "Konami ZS01 (PIC16CE625)" ChipType.ZS01: "Konami ZS01 (PIC16CE625)"
} }
def parseDumpString(data: str) -> Dump: def printDumpInfo(dump: CartDump, output: TextIO):
_data: str = data.strip().upper()
if not _data.startswith(_DUMP_START) or not _data.endswith(_DUMP_END):
raise ValueError(f"dump string does not begin with '{_DUMP_START}' and end with '{_DUMP_END}'")
_data = _data[len(_DUMP_START):-len(_DUMP_END)]
return parseDump(decompress(decodeBase41(_data)))
def printDumpInfo(dump: Dump, output: TextIO):
if dump.flags & DumpFlag.DUMP_SYSTEM_ID_OK: if dump.flags & DumpFlag.DUMP_SYSTEM_ID_OK:
output.write("Digital I/O board:\n") output.write("Digital I/O board:\n")
output.write(f" DS2401 ID: {dump.systemID.hex('-')}\n") output.write(f" DS2401 ID: {dump.systemID.hex('-')}\n")
output.write(f" Serial number: {serialNumberToString(dump.systemID)}\n\n") output.write(
f" Serial number: {serialNumberToString(dump.systemID)}\n\n"
)
output.write("Security cartridge:\n") output.write("Security cartridge:\n")
output.write(f" Chip type: {_CHIP_NAMES[dump.chipType]}\n") output.write(f" Chip type: {_CHIP_NAMES[dump.chipType]}\n")
@ -92,14 +39,16 @@ def printDumpInfo(dump: Dump, output: TextIO):
output.write(f" Configuration: {dump.config.hex('-')}\n") output.write(f" Configuration: {dump.config.hex('-')}\n")
output.write("\nEEPROM dump:\n") output.write("\nEEPROM dump:\n")
hexdump(dump.data, output) hexdumpToFile(dump.data, output)
output.write("\n") output.write("\n")
## Main ## Main
def createParser() -> ArgumentParser: def createParser() -> ArgumentParser:
parser = ArgumentParser( parser = ArgumentParser(
description = "Decodes the contents of a QR code generated by the tool.", description = \
"Decodes and displays or saves the contents of a QR code cartridge "
"dump generated by the tool.",
add_help = False add_help = False
) )
@ -150,11 +99,11 @@ def main():
data: bytes = _file.read() data: bytes = _file.read()
try: try:
dump: Dump = parseDump(data) dump: CartDump = parseCartDump(data)
except: except:
dump: Dump = parseDumpString(data.decode("ascii")) dump: CartDump = parseCartQRString(data.decode("ascii"))
elif args.data: elif args.data:
dump: Dump = parseDumpString(args.data) dump: CartDump = parseCartQRString(args.data)
else: else:
parser.error("a dump must be passed on the command line or using -i") parser.error("a dump must be passed on the command line or using -i")