Initial commit
Adding base tools
This commit is contained in:
commit
a31e555ce7
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
155
.gitignore
vendored
Normal file
155
.gitignore
vendored
Normal file
@ -0,0 +1,155 @@
|
||||
Data/
|
||||
checks.json
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
63
README.md
Normal file
63
README.md
Normal file
@ -0,0 +1,63 @@
|
||||
# Nijiiro Toolset
|
||||
|
||||
A collection of various python scripts to help you edit and validate taiko nijiiro game files.
|
||||
|
||||
**This is meant to be placed straight in the game's folder**
|
||||
|
||||
```files
|
||||
.\Data\
|
||||
.\Executable\
|
||||
|
||||
extract here <----
|
||||
```
|
||||
|
||||
Please note that this works both on 08.18 and CHN.
|
||||
|
||||
## encryption.py
|
||||
|
||||
This script allows you to encrypt or decrypt both Datatables and Fumens
|
||||
|
||||
**You will need to provide your own aes keys for this script to work.**
|
||||
|
||||
```py
|
||||
class Keys(Enum):
|
||||
Datatable = "" # Add datatable key here
|
||||
Fumen = "" # Add Fumen key here
|
||||
```
|
||||
|
||||
you also need to install the pip module `cryptography`:
|
||||
> pip install cryptography
|
||||
|
||||
here's some examples :
|
||||
|
||||
```py
|
||||
# Display the help message
|
||||
py .\encryption.py --help
|
||||
|
||||
# Decrypting a datatable :
|
||||
py .\encryption.py --input "data.bin" --output "data.json"
|
||||
|
||||
# Encrypting a datatable :
|
||||
py .\encryption.py --enc --input "data.json" --output "data.bin"
|
||||
|
||||
# Encrypting a fumen for use in CHN :
|
||||
py .\encryption.py --enc --fumen --input "data_e.bin" --output "data_e.bin"
|
||||
```
|
||||
|
||||
## checkDatatables.py
|
||||
|
||||
This script generates a comprehensive list of various informations regarding your database files. It is meant to be used for basic checks such as:
|
||||
|
||||
* Listing the amount of songs in your tables
|
||||
* Listing all vacant entries bellow 1599 to facilitate adding songs in
|
||||
* Checking their uniqueIds to make sure they don't exceed 1599
|
||||
* Listing all missing word entries for your songlist
|
||||
* Checking for doublons in various files
|
||||
* Checking for id and uniqueId mismatches in various files
|
||||
* Checking for missing sound and fumen files
|
||||
|
||||
To run this one you simply need to call it like so:
|
||||
|
||||
> py .\checkDatatables.py
|
||||
|
||||
The output will be written in a file named `checks.json`
|
475
checkDatatables.py
Normal file
475
checkDatatables.py
Normal file
@ -0,0 +1,475 @@
|
||||
from enum import Enum
|
||||
import gzip
|
||||
from encryption import decrypt_file
|
||||
import json
|
||||
import os
|
||||
|
||||
# "japaneseText"
|
||||
# "englishUsText"
|
||||
# "chineseTText"
|
||||
# "koreanText"
|
||||
# "chineseSText"
|
||||
language = "englishUsText"
|
||||
|
||||
isCHN = False
|
||||
|
||||
# region Loading files
|
||||
checkFile = {}
|
||||
|
||||
# Loading musicinfo.bin
|
||||
try:
|
||||
infos = json.load(gzip.open("./Data/x64/datatable/musicinfo.bin", "rb"))["items"]
|
||||
except:
|
||||
try:
|
||||
infos = json.loads(
|
||||
decrypt_file(input_file="./Data/x64/datatable/musicinfo.bin")
|
||||
)["items"]
|
||||
isCHN = True
|
||||
except:
|
||||
print("Couldn't load musicinfo.bin, exiting.")
|
||||
exit(0)
|
||||
|
||||
# Loading music_usbsetting.bin
|
||||
try:
|
||||
usb = (
|
||||
json.loads(
|
||||
decrypt_file(input_file="./Data/x64/datatable/music_usbsetting.bin")
|
||||
)["items"]
|
||||
if isCHN
|
||||
else None
|
||||
)
|
||||
except:
|
||||
usb = None
|
||||
|
||||
# Loading music_order.bin
|
||||
try:
|
||||
order = (
|
||||
json.loads(decrypt_file(input_file="./Data/x64/datatable/music_order.bin"))[
|
||||
"items"
|
||||
]
|
||||
if isCHN
|
||||
else json.load(gzip.open("./Data/x64/datatable/music_order.bin", "rb"))["items"]
|
||||
)
|
||||
except:
|
||||
order = None
|
||||
|
||||
# Loading music_attribute.bin
|
||||
try:
|
||||
attributes = (
|
||||
json.loads(decrypt_file(input_file="./Data/x64/datatable/music_attribute.bin"))[
|
||||
"items"
|
||||
]
|
||||
if isCHN
|
||||
else json.load(gzip.open("./Data/x64/datatable/music_attribute.bin", "rb"))[
|
||||
"items"
|
||||
]
|
||||
)
|
||||
except:
|
||||
attributes = None
|
||||
|
||||
# Loading wordlist.bin
|
||||
try:
|
||||
words = (
|
||||
json.loads(decrypt_file(input_file="./Data/x64/datatable/wordlist.bin"))[
|
||||
"items"
|
||||
]
|
||||
if isCHN
|
||||
else json.load(gzip.open("./Data/x64/datatable/wordlist.bin", "rb"))["items"]
|
||||
)
|
||||
except:
|
||||
words = None
|
||||
# endregion
|
||||
|
||||
# Forcing japanese language on 08.18 as this is what is usually used for omnimix.
|
||||
if isCHN:
|
||||
language = "japaneseText"
|
||||
|
||||
|
||||
# region Classes And Methods
|
||||
class Genres(Enum):
|
||||
Unknown = -1
|
||||
Pop = 0
|
||||
Anime = 1
|
||||
Kids = 2
|
||||
Vocaloid = 3
|
||||
GameMusic = 4
|
||||
NamcoOriginal = 5
|
||||
Variety = 6 if not isCHN else 7
|
||||
Classical = 7 if not isCHN else 8
|
||||
if not isCHN:
|
||||
Custom = 9
|
||||
|
||||
@classmethod
|
||||
def _missing_(cls, value):
|
||||
return cls.Unknown
|
||||
|
||||
|
||||
def findKeyInList(list: list, key: str, keyValue, value=None):
|
||||
for object in list:
|
||||
if object[key] == keyValue:
|
||||
if value is not None:
|
||||
return object[value]
|
||||
else:
|
||||
return object
|
||||
|
||||
if value is not None:
|
||||
return ""
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def findAllObjects(list: list, key: str, keyValue):
|
||||
templist = []
|
||||
templist.append(list)
|
||||
objects = []
|
||||
|
||||
for element in templist[0]:
|
||||
if element[key] == keyValue:
|
||||
objects.append(element)
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
def findDoubloninList(list: list, key: str, keyValue):
|
||||
if len(findAllObjects(list=list, key=key, keyValue=keyValue)) > 1:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def doesPathExist(path: str):
|
||||
if os.path.exists(path):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def initCheckFile():
|
||||
global checkFile
|
||||
checkFile = {
|
||||
"musicinfo.json": {
|
||||
"TotalEntries": len(infos),
|
||||
"MaxId": max(infos, key=lambda ev: ev["uniqueId"])["uniqueId"],
|
||||
"UniqueIdTooHigh": 0,
|
||||
"UniqueIdTooHighList": [],
|
||||
"UnusedUniqueIds": 0,
|
||||
"UnusedUniqueIdsList": [],
|
||||
"Doublons": 0,
|
||||
"DoublonsList": [],
|
||||
"GenreNoList": [],
|
||||
},
|
||||
}
|
||||
|
||||
if attributes is not None:
|
||||
checkFile["music_attribute.json"] = {
|
||||
"TotalEntries": len(attributes),
|
||||
"Missing": 0,
|
||||
"MissingList": [],
|
||||
"Mismatch": 0,
|
||||
"MismatchList": [],
|
||||
"Doublons": 0,
|
||||
"DoublonsList": [],
|
||||
}
|
||||
|
||||
if order is not None:
|
||||
checkFile["music_order.json"] = {
|
||||
"TotalEntries": len(order),
|
||||
"UniqueEntries": 0,
|
||||
"UniqueEntriesList": [],
|
||||
"GenreNoList": [],
|
||||
"Missing": 0,
|
||||
"MissingList": [],
|
||||
"Mismatch": 0,
|
||||
"MismatchList": [],
|
||||
}
|
||||
|
||||
if usb is not None:
|
||||
checkFile["music_usbsetting.json"] = {
|
||||
"TotalEntries": len(usb),
|
||||
"Missing": 0,
|
||||
"MissingList": [],
|
||||
"Mismatch": 0,
|
||||
"MismatchList": [],
|
||||
"Doublons": 0,
|
||||
"DoublonsList": [],
|
||||
}
|
||||
|
||||
if words is not None:
|
||||
checkFile["wordlist.json"] = {
|
||||
"TotalEntries": len(words),
|
||||
"MissingSongName": 0,
|
||||
"MissingSongNameList": [],
|
||||
"MissingSongSub": 0,
|
||||
"MissingSongSubList": [],
|
||||
"MissingSongDetail": 0,
|
||||
"MissingSongDetailList": [],
|
||||
"Doublons": 0,
|
||||
"DoublonsList": [],
|
||||
}
|
||||
|
||||
checkFile.update(
|
||||
{
|
||||
"GameFiles": {
|
||||
"MissingSound": 0,
|
||||
"MissingSoundList": [],
|
||||
"MissingFumen": 0,
|
||||
"MissingFumenList": [],
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Song:
|
||||
id = ""
|
||||
uniqueId = -1
|
||||
genreNo = -1
|
||||
name = ""
|
||||
sub = ""
|
||||
detail = ""
|
||||
|
||||
def __init__(self, id, uniqueId, genreNo, name, sub, detail):
|
||||
self.id = id
|
||||
self.uniqueId = uniqueId
|
||||
self.genreNo = genreNo
|
||||
self.name = name
|
||||
self.sub = sub
|
||||
self.detail = detail
|
||||
|
||||
|
||||
# endregion
|
||||
|
||||
# Loading all songs from musicinfo in an array
|
||||
songs = []
|
||||
|
||||
for song in infos:
|
||||
name = findKeyInList(
|
||||
list=words,
|
||||
key="key",
|
||||
keyValue="song_" + song["id"],
|
||||
value=language,
|
||||
)
|
||||
sub = findKeyInList(
|
||||
list=words,
|
||||
key="key",
|
||||
keyValue="song_sub_" + song["id"],
|
||||
value=language,
|
||||
)
|
||||
detail = findKeyInList(
|
||||
list=words,
|
||||
key="key",
|
||||
keyValue="song_detail_" + song["id"],
|
||||
value=language,
|
||||
)
|
||||
|
||||
songs.append(
|
||||
Song(
|
||||
id=song["id"],
|
||||
uniqueId=song["uniqueId"],
|
||||
genreNo=song["genreNo"],
|
||||
name=name,
|
||||
sub=sub,
|
||||
detail=detail,
|
||||
)
|
||||
)
|
||||
|
||||
# Preparing the json file containing the results of this checking script
|
||||
initCheckFile()
|
||||
|
||||
# Checking...
|
||||
for song in songs:
|
||||
# musicinfo.json
|
||||
if infos is not None:
|
||||
# Checking for too high of an id
|
||||
if song.uniqueId > 1599:
|
||||
checkFile["musicinfo.json"]["UniqueIdTooHigh"] += 1
|
||||
checkFile["musicinfo.json"]["UniqueIdTooHighList"].append(
|
||||
{
|
||||
"id": song.id,
|
||||
"uniqueId": song.uniqueId,
|
||||
}
|
||||
)
|
||||
# Listing genres and counting entries for each genres
|
||||
genre = {
|
||||
"GenreNo": song.genreNo,
|
||||
"Name": Genres(song.genreNo).name,
|
||||
"NumberofSongs": 0,
|
||||
}
|
||||
if (
|
||||
findKeyInList(
|
||||
list=checkFile["musicinfo.json"]["GenreNoList"],
|
||||
key="GenreNo",
|
||||
keyValue=song.genreNo,
|
||||
)
|
||||
is None
|
||||
):
|
||||
genre["NumberofSongs"] = len(
|
||||
findAllObjects(list=infos, key="genreNo", keyValue=song.genreNo)
|
||||
)
|
||||
checkFile["musicinfo.json"]["GenreNoList"].append(genre)
|
||||
# Search doublons
|
||||
if findDoubloninList(list=infos, key="id", keyValue=song.id):
|
||||
if song.id not in checkFile["musicinfo.json"]["DoublonsList"]:
|
||||
checkFile["musicinfo.json"]["Doublons"] += 1
|
||||
checkFile["musicinfo.json"]["DoublonsList"].append(song.id)
|
||||
|
||||
# music_usbsetting.json
|
||||
if usb is not None:
|
||||
# Check for missing uniqueIds or id and uniqueId mismatches
|
||||
orderOccurences = findAllObjects(list=usb, key="id", keyValue=song.id)
|
||||
if len(orderOccurences) == 0:
|
||||
checkFile["music_usbsetting.json"]["Missing"] += 1
|
||||
checkFile["music_usbsetting.json"]["MissingList"].append(song.id)
|
||||
else:
|
||||
for occurence in orderOccurences:
|
||||
if not all(
|
||||
[song.id == occurence["id"], song.uniqueId == occurence["uniqueId"]]
|
||||
):
|
||||
if (
|
||||
song.id
|
||||
not in checkFile["music_usbsetting.json"]["MismatchList"]
|
||||
):
|
||||
checkFile["music_usbsetting.json"]["Mismatch"] += 1
|
||||
checkFile["music_usbsetting.json"]["MismatchList"].append(
|
||||
{
|
||||
"id": song.id,
|
||||
"ExpectedUniqueId": song.uniqueId,
|
||||
"CurrentUniqueId": occurence["uniqueId"],
|
||||
}
|
||||
)
|
||||
|
||||
# Search doublons
|
||||
if findDoubloninList(list=usb, key="id", keyValue=song.id):
|
||||
if song.id not in checkFile["music_usbsetting.json"]["DoublonsList"]:
|
||||
checkFile["music_usbsetting.json"]["Doublons"] += 1
|
||||
checkFile["music_usbsetting.json"]["DoublonsList"].append(song.id)
|
||||
|
||||
# music_attribute.json
|
||||
if attributes is not None:
|
||||
# Check for missing uniqueIds or id and uniqueId mismatches
|
||||
orderOccurences = findAllObjects(list=attributes, key="id", keyValue=song.id)
|
||||
if len(orderOccurences) == 0:
|
||||
checkFile["music_attribute.json"]["Missing"] += 1
|
||||
checkFile["music_attribute.json"]["MissingList"].append(song.id)
|
||||
else:
|
||||
for occurence in orderOccurences:
|
||||
if not all(
|
||||
[song.id == occurence["id"], song.uniqueId == occurence["uniqueId"]]
|
||||
):
|
||||
if song.id not in checkFile["music_attribute.json"]["MismatchList"]:
|
||||
checkFile["music_attribute.json"]["Mismatch"] += 1
|
||||
checkFile["music_attribute.json"]["MismatchList"].append(
|
||||
{
|
||||
"id": song.id,
|
||||
"ExpectedUniqueId": song.uniqueId,
|
||||
"CurrentUniqueId": occurence["uniqueId"],
|
||||
}
|
||||
)
|
||||
if findDoubloninList(list=attributes, key="id", keyValue=song.id):
|
||||
if song.id not in checkFile["music_attribute.json"]["DoublonsList"]:
|
||||
checkFile["music_attribute.json"]["Doublons"] += 1
|
||||
checkFile["music_attribute.json"]["DoublonsList"].append(song.id)
|
||||
|
||||
# music_order.json
|
||||
if order is not None:
|
||||
# Check for missing uniqueIds or id and uniqueId mismatches
|
||||
orderOccurences = findAllObjects(list=order, key="id", keyValue=song.id)
|
||||
if len(orderOccurences) == 0:
|
||||
checkFile["music_order.json"]["Missing"] += 1
|
||||
checkFile["music_order.json"]["MissingList"].append(song.id)
|
||||
else:
|
||||
songGenres = []
|
||||
for occurence in orderOccurences:
|
||||
songGenres.append(occurence["genreNo"])
|
||||
if not all(
|
||||
[song.id == occurence["id"], song.uniqueId == occurence["uniqueId"]]
|
||||
):
|
||||
if song.id not in checkFile["music_order.json"]["MismatchList"]:
|
||||
checkFile["music_order.json"]["Mismatch"] += 1
|
||||
checkFile["music_order.json"]["MismatchList"].append(
|
||||
{
|
||||
"id": song.id,
|
||||
"ExpectedUniqueId": song.uniqueId,
|
||||
"CurrentUniqueId": occurence["uniqueId"],
|
||||
}
|
||||
)
|
||||
|
||||
# Counting unique entries
|
||||
checkFile["music_order.json"]["UniqueEntries"] += 1
|
||||
checkFile["music_order.json"]["UniqueEntriesList"].append(
|
||||
{
|
||||
song.id: songGenres,
|
||||
}
|
||||
)
|
||||
|
||||
# wordlist.json
|
||||
if words is not None:
|
||||
if song.name == "":
|
||||
checkFile["wordlist.json"]["MissingSongName"] += 1
|
||||
checkFile["wordlist.json"]["MissingSongNameList"].append(song.id)
|
||||
if song.sub == "":
|
||||
checkFile["wordlist.json"]["MissingSongSub"] += 1
|
||||
checkFile["wordlist.json"]["MissingSongSubList"].append(song.id)
|
||||
if song.detail == "":
|
||||
checkFile["wordlist.json"]["MissingSongDetail"] += 1
|
||||
checkFile["wordlist.json"]["MissingSongDetailList"].append(song.id)
|
||||
|
||||
# Gamefiles
|
||||
if not doesPathExist("./Data/x64/sound/" + "song_" + song.id + ".nus3bank"):
|
||||
checkFile["GameFiles"]["MissingSound"] += 1
|
||||
checkFile["GameFiles"]["MissingSoundList"].append(song.id)
|
||||
if not doesPathExist("./Data/x64/fumen/" + song.id):
|
||||
checkFile["GameFiles"]["MissingFumen"] += 1
|
||||
checkFile["GameFiles"]["MissingFumenList"].append(song.id)
|
||||
|
||||
# Checking for vacant uniqueIds
|
||||
for i in range(max(checkFile["musicinfo.json"]["MaxId"], 1600)):
|
||||
key = findKeyInList(list=infos, key="uniqueId", keyValue=i)
|
||||
|
||||
if key is not None:
|
||||
# Updating GenreNoList of music_order.json
|
||||
for song in findAllObjects(
|
||||
list=order, key="uniqueId", keyValue=key["uniqueId"]
|
||||
):
|
||||
genre = {
|
||||
"GenreNo": song["genreNo"],
|
||||
"Name": Genres(song["genreNo"]).name,
|
||||
"NumberofSongs": 0,
|
||||
}
|
||||
if (
|
||||
findKeyInList(
|
||||
list=checkFile["music_order.json"]["GenreNoList"],
|
||||
key="GenreNo",
|
||||
keyValue=song["genreNo"],
|
||||
)
|
||||
is None
|
||||
):
|
||||
genre["NumberofSongs"] = len(
|
||||
findAllObjects(list=order, key="genreNo", keyValue=song["genreNo"])
|
||||
)
|
||||
checkFile["music_order.json"]["GenreNoList"].append(genre)
|
||||
else:
|
||||
# Finding unused Ids bellow 1599
|
||||
if i < 1600:
|
||||
checkFile["musicinfo.json"]["UnusedUniqueIds"] += 1
|
||||
checkFile["musicinfo.json"]["UnusedUniqueIdsList"].append(i)
|
||||
|
||||
# Checking for doublons in wordlist
|
||||
if words is not None:
|
||||
for word in words:
|
||||
if findDoubloninList(list=words, key="key", keyValue=word["key"]):
|
||||
if word["key"] not in checkFile["wordlist.json"]["DoublonsList"]:
|
||||
checkFile["wordlist.json"]["Doublons"] += 1
|
||||
checkFile["wordlist.json"]["DoublonsList"].append(word["key"])
|
||||
|
||||
# Sorting some values for better readability
|
||||
checkFile["musicinfo.json"]["GenreNoList"].sort(
|
||||
key=lambda x: x["GenreNo"], reverse=False
|
||||
)
|
||||
checkFile["music_order.json"]["GenreNoList"].sort(
|
||||
key=lambda x: x["GenreNo"], reverse=False
|
||||
)
|
||||
|
||||
# Writing everything to checks.json
|
||||
json_object = json.dumps(checkFile, ensure_ascii=False, indent="\t")
|
||||
# json_object = json.dumps(jsonList, ensure_ascii=False, indent="\t")
|
||||
with open("./checks.json", "w", encoding="utf8") as outfile:
|
||||
outfile.write(json_object)
|
||||
print("Wrote checks.\n")
|
134
encryption.py
Normal file
134
encryption.py
Normal file
@ -0,0 +1,134 @@
|
||||
import gzip
|
||||
import os
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from argparse import ArgumentParser
|
||||
from enum import Enum
|
||||
import binascii
|
||||
|
||||
|
||||
class Keys(Enum):
|
||||
Datatable = "" # Add datatable key here
|
||||
Fumen = "" # Add Fumen key here
|
||||
|
||||
|
||||
def read_iv_from_file(file_path):
|
||||
with open(file_path, "rb") as f:
|
||||
iv = f.read(16)
|
||||
if len(iv) != 16:
|
||||
raise Exception("Invalid file")
|
||||
return iv
|
||||
|
||||
|
||||
def pad_data(data):
|
||||
padder = padding.PKCS7(128).padder()
|
||||
return padder.update(data) + padder.finalize()
|
||||
|
||||
|
||||
def remove_pkcs7_padding(data):
|
||||
unpadder = padding.PKCS7(128).unpadder()
|
||||
return unpadder.update(data) + unpadder.finalize()
|
||||
|
||||
|
||||
def decrypt_file(input_file, key_type: Keys = Keys(Keys.Datatable)):
|
||||
# Convert the key from hex to bytes
|
||||
key = binascii.unhexlify(Keys(key_type.value).value)
|
||||
|
||||
# Read the IV from the first 16 bytes of the input file
|
||||
iv = read_iv_from_file(input_file)
|
||||
|
||||
# Create an AES cipher object with CBC mode
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
decryptor = cipher.decryptor()
|
||||
|
||||
with open(input_file, "rb") as infile:
|
||||
# Skip the IV in the input file
|
||||
infile.seek(16)
|
||||
|
||||
# Decrypt the file
|
||||
decrypted_data = b"" + decryptor.update(infile.read())
|
||||
|
||||
# Remove PKCS7 padding
|
||||
unpadded_data = remove_pkcs7_padding(decrypted_data)
|
||||
|
||||
# Gzip decompress the data
|
||||
decompressed_data = gzip.decompress(unpadded_data)
|
||||
|
||||
# return the decompressed data
|
||||
return decompressed_data.decode()
|
||||
|
||||
|
||||
def encrypt_file(input_file, key_type: Keys = Keys(Keys.Datatable)):
|
||||
# Convert the key from hex to bytes
|
||||
key = binascii.unhexlify(Keys(key_type.value).value)
|
||||
|
||||
# Generate a random 128-bit IV
|
||||
iv = os.urandom(16)
|
||||
|
||||
# Create an AES cipher object with CBC mode
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
encryptor = cipher.encryptor()
|
||||
|
||||
with open(input_file, "rb") as infile:
|
||||
# Read the entire file into memory
|
||||
data = infile.read()
|
||||
|
||||
# Gzip compress the data
|
||||
compressed_data = gzip.compress(data)
|
||||
|
||||
# Pad the compressed data, encrypt it, and return the encrypted result
|
||||
encrypted_data = (
|
||||
encryptor.update(pad_data(compressed_data)) + encryptor.finalize()
|
||||
)
|
||||
|
||||
return iv + encrypted_data
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--input",
|
||||
help="Input file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
help="Output file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--enc",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Use this flag to encrypt a file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--type",
|
||||
default="Datatable",
|
||||
help="Datatable is default, you can also use Fumen",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.input:
|
||||
print("Missing input file, pass the argument --help for help")
|
||||
exit(0)
|
||||
|
||||
if not args.output:
|
||||
print("Missing output file, pass the argument --help for help")
|
||||
exit(0)
|
||||
|
||||
type = Keys.Datatable if args.type == "Datatable" else Keys.Fumen
|
||||
|
||||
if not args.enc:
|
||||
print("Encrypting " + args.input + " to " + args.output)
|
||||
file = decrypt_file(input_file=args.input, key_type=type)
|
||||
with open(args.output, "w") as outfile:
|
||||
outfile.write(file)
|
||||
else:
|
||||
print("Decrypting " + args.input + " to " + args.output)
|
||||
file = encrypt_file(input_file=args.input, key_type=type)
|
||||
with open(args.output, "wb") as outfile:
|
||||
outfile.write(file)
|
Loading…
Reference in New Issue
Block a user