mirror of
https://github.com/spicyjpeg/573in1.git
synced 2025-01-22 19:52:05 +01:00
Update tools, edit GE706 back to GX706
This commit is contained in:
parent
90651460eb
commit
664e656b14
@ -1,7 +1,7 @@
|
||||
[
|
||||
{
|
||||
"id": "darkhleg",
|
||||
"code": "GE706",
|
||||
"code": "GX706",
|
||||
"region": "JAA",
|
||||
"name": "Dark Horse Legend",
|
||||
"year": 1998,
|
||||
|
343
tools/_common.py
Normal file
343
tools/_common.py
Normal file
@ -0,0 +1,343 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__version__ = "0.3.1"
|
||||
__author__ = "spicyjpeg"
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from enum import IntEnum, IntFlag
|
||||
from struct import Struct
|
||||
from typing import Any, Iterable, Iterator, Mapping, Sequence
|
||||
|
||||
## Definitions
|
||||
|
||||
class ChipType(IntEnum):
|
||||
NONE = 0
|
||||
X76F041 = 1
|
||||
X76F100 = 2
|
||||
ZS01 = 3
|
||||
|
||||
class FormatType(IntEnum):
|
||||
BLANK = 0
|
||||
SIMPLE = 1
|
||||
BASIC = 2
|
||||
EXTENDED = 3
|
||||
|
||||
class TraceIDType(IntEnum):
|
||||
TID_NONE = 0
|
||||
TID_81 = 1
|
||||
TID_82_BIG_ENDIAN = 2
|
||||
TID_82_LITTLE_ENDIAN = 3
|
||||
|
||||
class DumpFlag(IntFlag):
|
||||
DUMP_HAS_SYSTEM_ID = 1 << 0
|
||||
DUMP_HAS_CART_ID = 1 << 1
|
||||
DUMP_CONFIG_OK = 1 << 2
|
||||
DUMP_SYSTEM_ID_OK = 1 << 3
|
||||
DUMP_CART_ID_OK = 1 << 4
|
||||
DUMP_ZS_ID_OK = 1 << 5
|
||||
DUMP_PUBLIC_DATA_OK = 1 << 6
|
||||
DUMP_PRIVATE_DATA_OK = 1 << 7
|
||||
|
||||
class DataFlag(IntFlag):
|
||||
DATA_HAS_CODE_PREFIX = 1 << 0
|
||||
DATA_HAS_TRACE_ID = 1 << 1
|
||||
DATA_HAS_CART_ID = 1 << 2
|
||||
DATA_HAS_INSTALL_ID = 1 << 3
|
||||
DATA_HAS_SYSTEM_ID = 1 << 4
|
||||
DATA_HAS_PUBLIC_SECTION = 1 << 5
|
||||
DATA_CHECKSUM_INVERTED = 1 << 6
|
||||
|
||||
# Character 0: always G
|
||||
# Character 1: region related? (can be B, C, E, K, L, N, Q, U, X)
|
||||
# Characters 2-4: identifier (700-999 or A00-A99 ~ D00-D99)
|
||||
GAME_CODE_REGEX: re.Pattern = \
|
||||
re.compile(rb"G[A-Z][0-9A-D][0-9][0-9]", re.IGNORECASE)
|
||||
|
||||
# Character 0: region (A=Asia?, E=Europe, J=Japan, K=Korea, S=?, U=US)
|
||||
# Character 1: type/variant (A-F=regular, R-W=e-Amusement, X-Z=?)
|
||||
# Characters 2-4: game revision (A-D or Z00-Z99, optional)
|
||||
GAME_REGION_REGEX: re.Pattern = \
|
||||
re.compile(rb"[AEJKSU][A-FR-WX-Z]([A-D]|Z[0-9][0-9])?", re.IGNORECASE)
|
||||
|
||||
SYSTEM_ID_IO_BOARDS: Sequence[str] = (
|
||||
"GX700-PWB(K)", # Kick & Kick expansion board
|
||||
"GX894-PWB(B)", # Digital I/O board
|
||||
"GX921-PWB(B)", # DDR Karaoke Mix expansion board
|
||||
"PWB0000073070" # GunMania expansion board
|
||||
)
|
||||
|
||||
## Common data structures
|
||||
|
||||
@dataclass
|
||||
class IdentifierSet:
|
||||
traceID: bytes | None = None # aka TID
|
||||
cartID: bytes | None = None # aka SID
|
||||
installID: bytes | None = None # aka MID
|
||||
systemID: bytes | None = None # aka XID
|
||||
|
||||
def __init__(self, data: bytes):
|
||||
ids: list[bytes | None] = []
|
||||
|
||||
for offset in range(0, 32, 8):
|
||||
_id: bytes = data[offset:offset + 8]
|
||||
ids.append(_id if sum(_id) else None)
|
||||
|
||||
self.traceID, self.cartID, self.installID, self.systemID = ids
|
||||
|
||||
def getFlags(self) -> DataFlag:
|
||||
flags: DataFlag = DataFlag(0)
|
||||
|
||||
if self.traceID:
|
||||
flags |= DataFlag.DATA_HAS_TRACE_ID
|
||||
if self.cartID:
|
||||
flags |= DataFlag.DATA_HAS_CART_ID
|
||||
if self.installID:
|
||||
flags |= DataFlag.DATA_HAS_INSTALL_ID
|
||||
if self.systemID:
|
||||
flags |= DataFlag.DATA_HAS_SYSTEM_ID
|
||||
|
||||
return flags
|
||||
|
||||
def getTraceIDType(self) -> TraceIDType:
|
||||
if self.traceID is None:
|
||||
return TraceIDType.TID_NONE
|
||||
|
||||
match self.traceID[0]:
|
||||
case 0x81:
|
||||
return TraceIDType.TID_81
|
||||
|
||||
case 0x82:
|
||||
return TraceIDType.TID_82_BIG_ENDIAN # TODO
|
||||
|
||||
case prefix:
|
||||
raise ValueError(f"unknown trace ID prefix: 0x{prefix:02x}")
|
||||
|
||||
## Cartridge dump structure
|
||||
|
||||
_DUMP_HEADER_STRUCT: Struct = Struct("< 2B 8s 8s 8s 8s 8s")
|
||||
|
||||
_CHIP_SIZES: Mapping[ChipType, tuple[int, int, int]] = {
|
||||
ChipType.X76F041: ( 512, 384, 128 ),
|
||||
ChipType.X76F100: ( 112, 0, 0 ),
|
||||
ChipType.ZS01: ( 112, 0, 32 )
|
||||
}
|
||||
|
||||
@dataclass
|
||||
class Dump:
|
||||
chipType: ChipType
|
||||
flags: DumpFlag
|
||||
|
||||
systemID: bytes
|
||||
cartID: bytes
|
||||
zsID: bytes
|
||||
dataKey: bytes
|
||||
config: bytes
|
||||
data: bytes
|
||||
|
||||
def getChipSize(self) -> tuple[int, int, int]:
|
||||
return _CHIP_SIZES[self.chipType]
|
||||
|
||||
def serialize(self) -> bytes:
|
||||
return _DUMP_HEADER_STRUCT.pack(
|
||||
self.chipType,
|
||||
self.flags,
|
||||
self.systemID,
|
||||
self.cartID,
|
||||
self.zsID,
|
||||
self.dataKey,
|
||||
self.config
|
||||
) + self.data
|
||||
|
||||
def parseDump(data: bytes) -> Dump:
|
||||
chipType, flags, systemID, cartID, zsID, dataKey, config = \
|
||||
_DUMP_HEADER_STRUCT.unpack(data[0:_DUMP_HEADER_STRUCT.size])
|
||||
dataLength, _, _ = _CHIP_SIZES[chipType]
|
||||
|
||||
return Dump(
|
||||
chipType, flags, systemID, cartID, zsID, dataKey, config,
|
||||
data[_DUMP_HEADER_STRUCT.size:_DUMP_HEADER_STRUCT.size + dataLength]
|
||||
)
|
||||
|
||||
## Cartridge data parsers
|
||||
|
||||
_BASIC_HEADER_STRUCT: Struct = Struct("< 2s 2s B 3x")
|
||||
_EXTENDED_HEADER_STRUCT: Struct = Struct("< 8s H 4s H")
|
||||
|
||||
# The system and install IDs are excluded from validation as they may not be
|
||||
# always present.
|
||||
_IDENTIFIER_FLAG_MASK: DataFlag = \
|
||||
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_HAS_CART_ID
|
||||
|
||||
def _checksum8(data: Iterable[int], invert: bool = False):
|
||||
return (sum(data) & 0xff) ^ (0xff if invert else 0)
|
||||
|
||||
def _checksum16(data: Iterable[int], invert: bool = False):
|
||||
it: Iterator = iter(data)
|
||||
values: map[int] = map(lambda x: x[0] | (x[1] << 8), zip(it, it))
|
||||
|
||||
return (sum(values) & 0xffff) ^ (0xffff if invert else 0)
|
||||
|
||||
def _getPublicData(dump: Dump, flags: DataFlag, maxLength: int = 512) -> bytes:
|
||||
if flags & DataFlag.DATA_HAS_PUBLIC_SECTION:
|
||||
_, offset, length = dump.getChipSize()
|
||||
|
||||
return dump.data[offset:offset + min(length, maxLength)]
|
||||
else:
|
||||
return dump.data[0:maxLength]
|
||||
|
||||
class ParserError(BaseException):
|
||||
pass
|
||||
|
||||
@dataclass
|
||||
class Parser:
|
||||
formatType: FormatType
|
||||
flags: DataFlag
|
||||
identifiers: IdentifierSet
|
||||
|
||||
region: str | None = None
|
||||
codePrefix: str | None = None
|
||||
code: str | None = None
|
||||
year: int | None = None
|
||||
|
||||
class SimpleParser(Parser):
|
||||
def __init__(self, dump: Dump, flags: DataFlag):
|
||||
region: bytes = _getPublicData(dump, flags, 8).rstrip(b"\0")
|
||||
|
||||
if GAME_REGION_REGEX.fullmatch(region) is None:
|
||||
raise ParserError(f"invalid game region: {region}")
|
||||
|
||||
super().__init__(
|
||||
FormatType.SIMPLE, flags, IdentifierSet(b""), region.decode("ascii")
|
||||
)
|
||||
|
||||
class BasicParser(Parser):
|
||||
def __init__(self, dump: Dump, flags: DataFlag):
|
||||
data: bytes = _getPublicData(dump, flags, _BASIC_HEADER_STRUCT.size)
|
||||
ids: IdentifierSet = IdentifierSet(dump.data[_BASIC_HEADER_STRUCT.size:])
|
||||
|
||||
region, codePrefix, checksum = _BASIC_HEADER_STRUCT.unpack(data)
|
||||
|
||||
codePrefix: bytes = codePrefix.rstrip(b"\0")
|
||||
value: int = _checksum8(
|
||||
data[0:4], bool(flags & DataFlag.DATA_CHECKSUM_INVERTED)
|
||||
)
|
||||
|
||||
if value != checksum:
|
||||
raise ParserError(f"invalid header checksum, exp=0x{value:02x}, got=0x{checksum:02x}")
|
||||
if GAME_REGION_REGEX.fullmatch(region) is None:
|
||||
raise ParserError(f"invalid game region: {region}")
|
||||
if bool(flags & DataFlag.DATA_HAS_CODE_PREFIX) != bool(codePrefix):
|
||||
raise ParserError(f"game code prefix should{' not' if codePrefix else ''} be present")
|
||||
if (ids.getFlags() ^ flags) & _IDENTIFIER_FLAG_MASK:
|
||||
raise ParserError("identifier flags do not match")
|
||||
|
||||
super().__init__(
|
||||
FormatType.BASIC, flags, ids, region.decode("ascii"),
|
||||
codePrefix.decode("ascii") or None
|
||||
)
|
||||
|
||||
class ExtendedParser(Parser):
|
||||
def __init__(self, dump: Dump, flags: DataFlag):
|
||||
data: bytes = _getPublicData(dump, flags, _EXTENDED_HEADER_STRUCT.size)
|
||||
ids: IdentifierSet = IdentifierSet(dump.data[_EXTENDED_HEADER_STRUCT.size + 16:])
|
||||
|
||||
code, year, region, checksum = _EXTENDED_HEADER_STRUCT.unpack(data)
|
||||
|
||||
code: bytes = code.rstrip(b"\0")
|
||||
region: bytes = region.rstrip(b"\0")
|
||||
value: int = _checksum16(
|
||||
data[0:14], bool(flags & DataFlag.DATA_CHECKSUM_INVERTED)
|
||||
)
|
||||
|
||||
if value != checksum:
|
||||
raise ParserError(f"invalid header checksum, exp=0x{value:04x}, got=0x{checksum:04x}")
|
||||
if GAME_CODE_REGEX.fullmatch(code) is None:
|
||||
raise ParserError(f"invalid game code: {code}")
|
||||
if GAME_REGION_REGEX.fullmatch(region) is None:
|
||||
raise ParserError(f"invalid game region: {region}")
|
||||
if (ids.getFlags() ^ flags) & _IDENTIFIER_FLAG_MASK:
|
||||
raise ParserError("identifier flags do not match")
|
||||
|
||||
_code: str = code.decode("ascii")
|
||||
super().__init__(
|
||||
FormatType.EXTENDED, flags, ids, region.decode("ascii"), _code[0:2],
|
||||
_code, year
|
||||
)
|
||||
|
||||
## Cartridge database
|
||||
|
||||
DB_ENTRY_STRUCT: Struct = Struct("< 6B H 8s 8s 8s 64s")
|
||||
|
||||
@dataclass
|
||||
class GameEntry:
|
||||
code: str
|
||||
region: str
|
||||
name: str
|
||||
|
||||
installCart: str | None = None
|
||||
gameCart: str | None = None
|
||||
ioBoard: str | None = None
|
||||
|
||||
# Implement the comparison overload so sorting will work.
|
||||
def __lt__(self, entry: Any) -> bool:
|
||||
return ( self.code, self.region, self.name ) < \
|
||||
( entry.code, entry.region, entry.name )
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.code} {self.region}"
|
||||
|
||||
def getFullName(self) -> str:
|
||||
return f"{self.name} [{self.code} {self.region}]"
|
||||
|
||||
def hasSystemID(self) -> bool:
|
||||
return (self.ioBoard in SYSTEM_ID_IO_BOARDS)
|
||||
|
||||
@dataclass
|
||||
class DBEntry:
|
||||
game: GameEntry
|
||||
dataKey: bytes
|
||||
|
||||
chipType: ChipType
|
||||
formatType: FormatType
|
||||
traceIDType: TraceIDType
|
||||
flags: DataFlag
|
||||
|
||||
traceIDParam: int = 0
|
||||
installIDPrefix: int = 0
|
||||
year: int = 0
|
||||
|
||||
def __init__(self, game: GameEntry, dump: Dump, parser: Parser):
|
||||
self.game = game
|
||||
self.dataKey = dump.dataKey
|
||||
self.chipType = dump.chipType
|
||||
self.formatType = parser.formatType
|
||||
self.traceIDType = parser.identifiers.getTraceIDType()
|
||||
self.flags = parser.flags
|
||||
self.year = parser.year or 0
|
||||
|
||||
# TODO: implement this properly
|
||||
self.traceIDParam = 16
|
||||
|
||||
if parser.identifiers.installID:
|
||||
self.installIDPrefix = parser.identifiers.installID[0]
|
||||
else:
|
||||
self.installIDPrefix = 0
|
||||
|
||||
def __lt__(self, entry: Any) -> bool:
|
||||
return (self.game < entry.game)
|
||||
|
||||
def serialize(self) -> bytes:
|
||||
return DB_ENTRY_STRUCT.pack(
|
||||
self.chipType,
|
||||
self.formatType,
|
||||
self.traceIDType,
|
||||
self.flags,
|
||||
self.traceIDParam,
|
||||
self.installIDPrefix,
|
||||
self.year,
|
||||
self.dataKey,
|
||||
self.game.code.encode("ascii"),
|
||||
self.game.region.encode("ascii"),
|
||||
self.game.name.encode("ascii")
|
||||
)
|
323
tools/buildCartDB.py
Executable file
323
tools/buildCartDB.py
Executable file
@ -0,0 +1,323 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__version__ = "0.3.1"
|
||||
__author__ = "spicyjpeg"
|
||||
|
||||
import json, logging, os, re
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from collections import Counter, defaultdict
|
||||
from operator import methodcaller
|
||||
from pathlib import Path
|
||||
from struct import Struct
|
||||
from typing import Any, Generator, Iterable, Mapping, Sequence, Type
|
||||
|
||||
from _common import *
|
||||
|
||||
## Game list (loaded from games.json)
|
||||
|
||||
class GameDB:
|
||||
def __init__(self, entries: Iterable[Mapping[str, Any]] | None = None):
|
||||
self._entries: defaultdict[str, list[GameEntry]] = defaultdict(list)
|
||||
|
||||
if entries:
|
||||
for entry in entries:
|
||||
self.addEntry(entry)
|
||||
|
||||
def addEntry(self, entryObj: Mapping[str, Any]):
|
||||
code: str = entryObj["code"].strip().upper()
|
||||
region: str = entryObj["region"].strip().upper()
|
||||
name: str = entryObj["name"]
|
||||
|
||||
installCart: str | None = entryObj.get("installCart", None)
|
||||
gameCart: str | None = entryObj.get("gameCart", None)
|
||||
ioBoard: str | None = entryObj.get("ioBoard", None)
|
||||
|
||||
if GAME_CODE_REGEX.fullmatch(code.encode("ascii")) is None:
|
||||
raise ValueError(f"invalid game code: {code}")
|
||||
if GAME_REGION_REGEX.fullmatch(region.encode("ascii")) is None:
|
||||
raise ValueError(f"invalid game region: {region}")
|
||||
|
||||
entry: GameEntry = GameEntry(
|
||||
code, region, name, installCart, gameCart, ioBoard
|
||||
)
|
||||
|
||||
# Store all entries indexed by their game code and first two characters
|
||||
# of the region code. This allows for quick retrieval of all revisions
|
||||
# of a game.
|
||||
self._entries[code + region[0:2]].append(entry)
|
||||
|
||||
def lookup(
|
||||
self, code: str, region: str
|
||||
) -> Generator[GameEntry, None, None]:
|
||||
_code: str = code.strip().upper()
|
||||
_region: str = region.strip().upper()
|
||||
|
||||
# If only two characters of the region code are provided, match all
|
||||
# entries whose region code starts with those two characters (even if
|
||||
# longer).
|
||||
for entry in self._entries[_code + _region[0:2]]:
|
||||
if _region == entry.region[0:len(_region)]:
|
||||
yield entry
|
||||
|
||||
## MAME dump parser
|
||||
|
||||
_MAME_X76F041_STRUCT: Struct = Struct("< 4x 8s 8s 8s 8s 512s")
|
||||
_MAME_X76F100_STRUCT: Struct = Struct("< 4x 8s 8s 112s")
|
||||
_MAME_ZS01_STRUCT: Struct = Struct("< 4x 8s 8s 8s 112s")
|
||||
|
||||
_MAME_DUMP_SIZES: Sequence[int] = (
|
||||
_MAME_X76F041_STRUCT.size, _MAME_X76F100_STRUCT.size, _MAME_ZS01_STRUCT.size
|
||||
)
|
||||
|
||||
def parseMAMEDump(dump: bytes):
|
||||
systemID: bytes = bytes(8)
|
||||
cartID: bytes = bytes(8)
|
||||
zsID: bytes = bytes(8)
|
||||
config: bytes = bytes(8)
|
||||
|
||||
flags: DumpFlag = \
|
||||
DumpFlag.DUMP_PUBLIC_DATA_OK | DumpFlag.DUMP_PRIVATE_DATA_OK
|
||||
|
||||
match int.from_bytes(dump[0:4], "big"):
|
||||
case 0x1955aa55:
|
||||
chipType: ChipType = ChipType.X76F041
|
||||
_, _, dataKey, config, data = _MAME_X76F041_STRUCT.unpack(dump)
|
||||
|
||||
flags |= DumpFlag.DUMP_CONFIG_OK
|
||||
|
||||
case 0x1900aa55:
|
||||
chipType: ChipType = ChipType.X76F100
|
||||
dataKey, readKey, data = _MAME_X76F100_STRUCT.unpack(dump)
|
||||
|
||||
if dataKey != readKey:
|
||||
raise RuntimeError(chipType, "X76F100 dumps with different read/write keys are not supported")
|
||||
|
||||
case 0x5a530001:
|
||||
chipType: ChipType = ChipType.ZS01
|
||||
_, dataKey, config, data = _MAME_ZS01_STRUCT.unpack(dump)
|
||||
|
||||
#zsID = MAME_ZS_ID
|
||||
flags |= DumpFlag.DUMP_CONFIG_OK | DumpFlag.DUMP_ZS_ID_OK
|
||||
|
||||
case _id:
|
||||
raise RuntimeError(ChipType.NONE, f"unrecognized chip ID: 0x{_id:08x}")
|
||||
|
||||
#if data.find(MAME_CART_ID) >= 0:
|
||||
#cartID = MAME_CART_ID
|
||||
#flags |= DumpFlag.DUMP_HAS_CART_ID | DumpFlag.DUMP_CART_ID_OK
|
||||
|
||||
#if data.find(MAME_SYSTEM_ID) >= 0:
|
||||
#systemID = MAME_SYSTEM_ID
|
||||
#flags |= DumpFlag.DUMP_HAS_SYSTEM_ID | DumpFlag.DUMP_SYSTEM_ID_OK
|
||||
|
||||
return Dump(chipType, flags, systemID, cartID, zsID, dataKey, config, data)
|
||||
|
||||
## Data format identification
|
||||
|
||||
_KNOWN_FORMATS: Sequence[tuple[str, Type, DataFlag]] = (
|
||||
(
|
||||
# Used by GCB48 (and possibly other games?)
|
||||
"region only",
|
||||
SimpleParser,
|
||||
DataFlag.DATA_HAS_PUBLIC_SECTION
|
||||
), (
|
||||
"basic (no IDs)",
|
||||
BasicParser,
|
||||
DataFlag.DATA_CHECKSUM_INVERTED
|
||||
), (
|
||||
"basic + TID",
|
||||
BasicParser,
|
||||
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_CHECKSUM_INVERTED
|
||||
), (
|
||||
"basic + TID, SID",
|
||||
BasicParser,
|
||||
DataFlag.DATA_HAS_TRACE_ID | DataFlag.DATA_HAS_CART_ID
|
||||
| DataFlag.DATA_CHECKSUM_INVERTED
|
||||
), (
|
||||
"basic + prefix, TID, SID",
|
||||
BasicParser,
|
||||
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
|
||||
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_CHECKSUM_INVERTED
|
||||
), (
|
||||
# Used by most pre-ZS01 Bemani games
|
||||
"basic + prefix, all IDs",
|
||||
BasicParser,
|
||||
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
|
||||
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_HAS_INSTALL_ID
|
||||
| DataFlag.DATA_HAS_SYSTEM_ID | DataFlag.DATA_CHECKSUM_INVERTED
|
||||
), (
|
||||
# Used by early (pre-digital-I/O) Bemani games
|
||||
"extended (no IDs)",
|
||||
ExtendedParser,
|
||||
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_CHECKSUM_INVERTED
|
||||
), (
|
||||
# Used by early (pre-digital-I/O) Bemani games
|
||||
"extended alt. (no IDs)",
|
||||
ExtendedParser,
|
||||
DataFlag.DATA_HAS_CODE_PREFIX
|
||||
), (
|
||||
# Used by GE936/GK936 and all ZS01 Bemani games
|
||||
"extended + all IDs",
|
||||
ExtendedParser,
|
||||
DataFlag.DATA_HAS_CODE_PREFIX | DataFlag.DATA_HAS_TRACE_ID
|
||||
| DataFlag.DATA_HAS_CART_ID | DataFlag.DATA_HAS_INSTALL_ID
|
||||
| DataFlag.DATA_HAS_SYSTEM_ID | DataFlag.DATA_HAS_PUBLIC_SECTION
|
||||
| DataFlag.DATA_CHECKSUM_INVERTED
|
||||
)
|
||||
)
|
||||
|
||||
def newCartParser(dump: Dump) -> Parser:
|
||||
for name, constructor, flags in reversed(_KNOWN_FORMATS):
|
||||
try:
|
||||
parser: Any = constructor(dump, flags)
|
||||
except ParserError:
|
||||
continue
|
||||
|
||||
logging.debug(f"found known data format: {name}")
|
||||
return parser
|
||||
|
||||
raise RuntimeError("no known data format found")
|
||||
|
||||
## Dump processing
|
||||
|
||||
def processDump(
|
||||
dump: Dump, db: GameDB, nameHint: str = ""
|
||||
) -> Generator[DBEntry, None, None]:
|
||||
parser: Parser = newCartParser(dump)
|
||||
|
||||
# If the parser could not find a valid game code in the dump, attempt to
|
||||
# parse it from the provided hint (filename).
|
||||
if parser.region is None:
|
||||
raise RuntimeError("can't parse game region from dump")
|
||||
if parser.code is None:
|
||||
code: re.Match | None = GAME_CODE_REGEX.search(
|
||||
nameHint.upper().encode("ascii")
|
||||
)
|
||||
|
||||
if code is None:
|
||||
raise RuntimeError("can't parse game code from dump nor from filename")
|
||||
else:
|
||||
parser.code = code.group().decode("ascii")
|
||||
|
||||
matches: list[GameEntry] = sorted(db.lookup(parser.code, parser.region))
|
||||
|
||||
if not matches:
|
||||
raise RuntimeError(f"{parser.code} {parser.region} not found in game list")
|
||||
|
||||
names: str = ", ".join(map(methodcaller("getFullName"), matches))
|
||||
logging.info(f"imported {dump.chipType.name}: {names}")
|
||||
|
||||
for game in matches:
|
||||
# TODO: handle separate installation/game carts
|
||||
if game.hasSystemID():
|
||||
parser.flags |= DataFlag.DATA_HAS_SYSTEM_ID
|
||||
else:
|
||||
parser.flags &= ~DataFlag.DATA_HAS_SYSTEM_ID
|
||||
|
||||
yield DBEntry(game, dump, parser)
|
||||
|
||||
## Main
|
||||
|
||||
_CARTDB_PATHS: Mapping[ChipType, str] = {
|
||||
ChipType.X76F041: "x76f041.cartdb",
|
||||
ChipType.X76F100: "x76f100.cartdb",
|
||||
ChipType.ZS01: "zs01.cartdb"
|
||||
}
|
||||
|
||||
def createParser() -> ArgumentParser:
|
||||
parser = ArgumentParser(
|
||||
description = "Recursively scans a directory for MAME dumps and generates cartdb files."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose",
|
||||
action = "count",
|
||||
help = "enable additional logging levels"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o", "--output",
|
||||
type = Path,
|
||||
default = os.curdir,
|
||||
help = "path to output directory (current directory by default)",
|
||||
metavar = "dir"
|
||||
)
|
||||
parser.add_argument(
|
||||
"gameList",
|
||||
type = Path,
|
||||
help = "path to JSON file containing game list"
|
||||
)
|
||||
parser.add_argument(
|
||||
"input",
|
||||
type = Path,
|
||||
nargs = "+",
|
||||
help = "paths to input directories"
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def setupLogger(level: int | None):
|
||||
logging.basicConfig(
|
||||
format = "[{levelname:8s}] {message}",
|
||||
style = "{",
|
||||
level = (
|
||||
logging.WARNING,
|
||||
logging.INFO,
|
||||
logging.DEBUG
|
||||
)[min(level or 0, 2)]
|
||||
)
|
||||
|
||||
def main():
|
||||
parser: ArgumentParser = createParser()
|
||||
args: Namespace = parser.parse_args()
|
||||
setupLogger(args.verbose)
|
||||
|
||||
failures: Counter[ChipType] = Counter()
|
||||
entries: defaultdict[ChipType, list[DBEntry]] = defaultdict(list)
|
||||
|
||||
with args.gameList.open("rt") as _file:
|
||||
gameList: Sequence[Mapping[str, Any]] = json.load(_file)
|
||||
|
||||
db: GameDB = GameDB(gameList)
|
||||
|
||||
for inputPath in args.input:
|
||||
for rootDir, _, files in os.walk(inputPath):
|
||||
for dumpName in files:
|
||||
path: Path = Path(rootDir, dumpName)
|
||||
dump: Dump | None = None
|
||||
|
||||
# Skip files whose size does not match any of the known dump
|
||||
# formats.
|
||||
if os.stat(path).st_size not in _MAME_DUMP_SIZES:
|
||||
logging.warning(f"ignoring {dumpName}")
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(path, "rb") as _file:
|
||||
dump = parseMAMEDump(_file.read())
|
||||
|
||||
entries[dump.chipType].extend(
|
||||
processDump(dump, db, dumpName)
|
||||
)
|
||||
except RuntimeError as exc:
|
||||
if dump is None:
|
||||
logging.error(f"failed to import: {path}, {exc}")
|
||||
else:
|
||||
logging.error(f"failed to import {dump.chipType.name}: {path}, {exc}")
|
||||
failures[dump.chipType] += 1
|
||||
|
||||
# Sort all entries and generate the cartdb files.
|
||||
for chipType, dbEntries in entries.items():
|
||||
if not dbEntries:
|
||||
logging.warning(f"DB for {chipType.name} is empty")
|
||||
continue
|
||||
|
||||
dbEntries.sort()
|
||||
|
||||
with open(args.output / _CARTDB_PATHS[chipType], "wb") as _file:
|
||||
for entry in dbEntries:
|
||||
_file.write(entry.serialize())
|
||||
|
||||
logging.info(f"{chipType.name}: {len(dbEntries)} entries saved, {failures[chipType]} failures")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,26 +1,29 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__version__ = "0.3.0"
|
||||
__version__ = "0.3.1"
|
||||
__author__ = "spicyjpeg"
|
||||
|
||||
import sys
|
||||
from argparse import ArgumentParser, FileType, Namespace
|
||||
from enum import IntEnum, IntFlag
|
||||
from struct import Struct
|
||||
from typing import BinaryIO, ByteString, Mapping, Sequence, TextIO
|
||||
from zlib import decompress
|
||||
|
||||
## Base45 decoder
|
||||
from _common import *
|
||||
|
||||
BASE45_CHARSET: str = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:"
|
||||
## Utilities
|
||||
|
||||
def decodeBase45(data: str) -> bytearray:
|
||||
mapped: map = map(BASE45_CHARSET.index, data)
|
||||
# This encoding is similar to standard base45, but with some problematic
|
||||
# characters (' ', '$', '%', '*') excluded.
|
||||
_BASE41_CHARSET: str = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ+-./:"
|
||||
|
||||
def decodeBase41(data: str) -> bytearray:
|
||||
mapped: map[int] = map(_BASE41_CHARSET.index, data)
|
||||
output: bytearray = bytearray()
|
||||
|
||||
for a, b, c in zip(mapped, mapped, mapped):
|
||||
value: int = a + (b * 45) + (c * 2025)
|
||||
value: int = a + (b * 41) + (c * 1681)
|
||||
|
||||
output.append(value >> 8)
|
||||
output.append(value & 0xff)
|
||||
@ -35,43 +38,6 @@ def serialNumberToString(_id: ByteString) -> str:
|
||||
|
||||
return f"{(value // 10000) % 10000:04d}-{value % 10000:04d}"
|
||||
|
||||
## Dump parser
|
||||
|
||||
DUMP_START: str = "573::"
|
||||
DUMP_END: str = "::"
|
||||
DUMP_STRUCT: Struct = Struct("< 3B x 8s 8s 8s 8s 8s 512s")
|
||||
DUMP_VERSION: int = 1
|
||||
|
||||
class ChipType(IntEnum):
|
||||
TYPE_NONE = 0
|
||||
TYPE_X76F041 = 1
|
||||
TYPE_X76F100 = 2
|
||||
TYPE_ZS01 = 3
|
||||
|
||||
class CartFlag(IntFlag):
|
||||
HAS_DIGITAL_IO = 1 << 0
|
||||
HAS_DS2401 = 1 << 1
|
||||
CONFIG_OK = 1 << 2
|
||||
SYSTEM_ID_OK = 1 << 3
|
||||
CART_ID_OK = 1 << 4
|
||||
ZS_ID_OK = 1 << 5
|
||||
PUBLIC_DATA_OK = 1 << 6
|
||||
PRIVATE_DATA_OK = 1 << 7
|
||||
|
||||
CHIP_NAMES: Mapping[ChipType, str] = {
|
||||
ChipType.TYPE_NONE: "None",
|
||||
ChipType.TYPE_X76F041: "Xicor X76F041",
|
||||
ChipType.TYPE_X76F100: "Xicor X76F100",
|
||||
ChipType.TYPE_ZS01: "Konami ZS01 (PIC16CE625)"
|
||||
}
|
||||
|
||||
DATA_LENGTHS: Mapping[ChipType, int] = {
|
||||
ChipType.TYPE_NONE: 0,
|
||||
ChipType.TYPE_X76F041: 512,
|
||||
ChipType.TYPE_X76F100: 112,
|
||||
ChipType.TYPE_ZS01: 112
|
||||
}
|
||||
|
||||
def toPrintableChar(value: int):
|
||||
if (value < 0x20) or (value > 0x7e):
|
||||
return "."
|
||||
@ -80,58 +46,52 @@ def toPrintableChar(value: int):
|
||||
|
||||
def hexdump(data: ByteString | Sequence[int], output: TextIO, width: int = 16):
|
||||
for i in range(0, len(data), width):
|
||||
hexBytes: map = map(lambda value: f"{value:02x}", data[i:i + width])
|
||||
hexLine: str = " ".join(hexBytes).ljust(width * 3 - 1)
|
||||
hexBytes: map[str] = map(lambda value: f"{value:02x}", data[i:i + width])
|
||||
hexLine: str = " ".join(hexBytes).ljust(width * 3 - 1)
|
||||
|
||||
asciiBytes: map = map(toPrintableChar, data[i:i + width])
|
||||
asciiLine: str = "".join(asciiBytes).ljust(width)
|
||||
asciiBytes: map[str] = map(toPrintableChar, data[i:i + width])
|
||||
asciiLine: str = "".join(asciiBytes).ljust(width)
|
||||
|
||||
output.write(f"{i:04x}: {hexLine} |{asciiLine}|\n")
|
||||
output.write(f" {i:04x}: {hexLine} |{asciiLine}|\n")
|
||||
|
||||
def parseDump(
|
||||
dumpString: str, logOutput: TextIO | None = None,
|
||||
exportOutput: BinaryIO | None = None
|
||||
):
|
||||
_dumpString: str = dumpString.strip().upper()
|
||||
## Dump parser
|
||||
|
||||
if (
|
||||
not _dumpString.startswith(DUMP_START) or
|
||||
not _dumpString.endswith(DUMP_END)
|
||||
):
|
||||
raise ValueError(f"dump string does not begin with '{DUMP_START}' and end with '{DUMP_END}'")
|
||||
_DUMP_START: str = "573::"
|
||||
_DUMP_END: str = "::"
|
||||
|
||||
_dumpString = _dumpString[len(DUMP_START):-len(DUMP_END)]
|
||||
dump: bytes = decompress(decodeBase45(_dumpString))
|
||||
_CHIP_NAMES: Mapping[ChipType, str] = {
|
||||
ChipType.NONE: "None",
|
||||
ChipType.X76F041: "Xicor X76F041",
|
||||
ChipType.X76F100: "Xicor X76F100",
|
||||
ChipType.ZS01: "Konami ZS01 (PIC16CE625)"
|
||||
}
|
||||
|
||||
version, chipType, flags, dataKey, config, systemID, cartID, zsID, data = \
|
||||
DUMP_STRUCT.unpack(dump[0:DUMP_STRUCT.size])
|
||||
def parseDumpString(data: str) -> Dump:
|
||||
_data: str = data.strip().upper()
|
||||
|
||||
if version != DUMP_VERSION:
|
||||
raise ValueError(f"unsupported dump version {version}")
|
||||
if not _data.startswith(_DUMP_START) or not _data.endswith(_DUMP_END):
|
||||
raise ValueError(f"dump string does not begin with '{_DUMP_START}' and end with '{_DUMP_END}'")
|
||||
|
||||
chipType: ChipType = ChipType(chipType)
|
||||
flags: CartFlag = CartFlag(flags)
|
||||
data: bytes = data[0:DATA_LENGTHS[chipType]]
|
||||
_data = _data[len(_DUMP_START):-len(_DUMP_END)]
|
||||
|
||||
if logOutput:
|
||||
if flags & CartFlag.SYSTEM_ID_OK:
|
||||
logOutput.write(f"Digital I/O ID: {systemID.hex('-')}\n")
|
||||
logOutput.write(f"Serial number: {serialNumberToString(systemID)}\n")
|
||||
return parseDump(decompress(decodeBase41(_data)))
|
||||
|
||||
logOutput.write(f"Cartridge type: {CHIP_NAMES[chipType]}\n")
|
||||
if flags & CartFlag.CART_ID_OK:
|
||||
logOutput.write(f"DS2401 identifier: {cartID.hex('-')}\n")
|
||||
if flags & CartFlag.ZS_ID_OK:
|
||||
logOutput.write(f"ZS01 identifier: {zsID.hex('-')}\n")
|
||||
if flags & CartFlag.CONFIG_OK:
|
||||
logOutput.write(f"Configuration: {config.hex('-')}\n")
|
||||
def printDumpInfo(dump: Dump, output: TextIO):
|
||||
if dump.flags & DumpFlag.DUMP_SYSTEM_ID_OK:
|
||||
output.write(f"Digital I/O ID: {dump.systemID.hex('-')}\n")
|
||||
output.write(f"Serial number: {serialNumberToString(dump.systemID)}\n")
|
||||
|
||||
logOutput.write("\nEEPROM dump:\n")
|
||||
hexdump(data, logOutput)
|
||||
logOutput.write("\n")
|
||||
output.write(f"Cartridge type: {_CHIP_NAMES[dump.chipType]}\n")
|
||||
if dump.flags & DumpFlag.DUMP_CART_ID_OK:
|
||||
output.write(f"DS2401 identifier: {dump.cartID.hex('-')}\n")
|
||||
if dump.flags & DumpFlag.DUMP_ZS_ID_OK:
|
||||
output.write(f"ZS01 identifier: {dump.zsID.hex('-')}\n")
|
||||
if dump.flags & DumpFlag.DUMP_CONFIG_OK:
|
||||
output.write(f"Configuration: {dump.config.hex('-')}\n")
|
||||
|
||||
if exportOutput:
|
||||
pass # TODO: implement exporting
|
||||
output.write("\nEEPROM dump:\n")
|
||||
hexdump(dump.data, output)
|
||||
output.write("\n")
|
||||
|
||||
## Main
|
||||
|
||||
@ -162,15 +122,15 @@ def createParser() -> ArgumentParser:
|
||||
help = "log cartridge info to specified file (stdout by default)",
|
||||
metavar = "file"
|
||||
)
|
||||
group.add_argument(
|
||||
"-e", "--export",
|
||||
type = FileType("wb"),
|
||||
help = "export dump in MAME format to specified file",
|
||||
metavar = "file"
|
||||
)
|
||||
#group.add_argument(
|
||||
#"-e", "--export",
|
||||
#type = FileType("wb"),
|
||||
#help = "export dump in MAME format to specified file",
|
||||
#metavar = "file"
|
||||
#)
|
||||
|
||||
group.add_argument(
|
||||
"dump",
|
||||
"data",
|
||||
type = str,
|
||||
nargs = "?",
|
||||
help = "QR string to decode (if -i was not passed)"
|
||||
@ -184,13 +144,16 @@ def main():
|
||||
|
||||
if args.input:
|
||||
with args.input as _file:
|
||||
dump: str = _file.read()
|
||||
elif args.dump:
|
||||
dump: str = args.dump
|
||||
data: str = _file.read()
|
||||
elif args.data:
|
||||
data: str = args.data
|
||||
else:
|
||||
parser.error("a dump must be passed on the command line or using -i")
|
||||
|
||||
parseDump(dump, args.log, args.export)
|
||||
dump: Dump = parseDumpString(data)
|
||||
|
||||
if args.log:
|
||||
printDumpInfo(dump, args.log)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
Loading…
x
Reference in New Issue
Block a user