Taiko/tooling/makeDatabases.py
2025-01-20 23:20:26 +01:00

464 lines
18 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import glob
import json
import os
import shutil
from encryption import encrypt_file
from helpers import fetchKey, is_cjk
translationFixes = [
{
"key": "folder_event1",
"japaneseText": "東方Project特集",
"englishUsText": "Touhou Project",
},
{
"key": "folder_intro_event1",
"japaneseText": "東方Projectアレンジの曲をあつめたよ",
"englishUsText": "A collection of Touhou Project songs!",
},
{
"key": "folder_event2",
"japaneseText": "アイドルマスター特集",
"englishUsText": "The Idolmaster",
},
{
"key": "folder_intro_event2",
"japaneseText": "東方Projectアレンジの曲をあつめたよ",
"englishUsText": "A collection of songs from The Idolmaster!",
},
{
"key": "folder_event5",
"japaneseText": "スタジオジブリ特集",
"englishUsText": "Studio Ghibli",
},
{
"key": "folder_intro_event5",
"japaneseText": "東方Projectアレンジの曲をあつめたよ",
"englishUsText": "A collection of Studio Ghibli songs!",
},
{
"key": "folder_event6",
"japaneseText": "妖怪ウォッチ特集",
"englishUsText": "Yokai Watch",
},
{
"key": "folder_intro_event6",
"japaneseText": "東方Projectアレンジの曲をあつめたよ",
"englishUsText": "A collection of Yokai Watch songs!",
},
{
"key": "folder_event7",
"japaneseText": "UUUMクリエイター特集",
"englishUsText": "UUUM Creator Feature",
},
{
"key": "folder_intro_event7",
"japaneseText": "「#コンパス」の曲をあつめたよ!",
"englishUsText": "A collection of songs from UMMM!",
},
{
"key": "folder_event12",
"japaneseText": "#コンパス特集",
"englishUsText": "#Compass Creator Feature",
},
{
"key": "folder_intro_event12",
"japaneseText": "「#コンパス」の曲をあつめたよ!",
"englishUsText": "A collection of songs from the game #Compass!",
},
]
# region Loading json files
# Loading final song list ======================================================================
finalList = json.load(open("./temp/finalList.json", encoding="utf-8"))
# Loading wordlists ============================================================================
wordlist = json.load(open("./Data_decrypted/wordlist.json", encoding="utf-8"))
omni_wordlist_en = json.load(open("../08.18 & CHN/gamefiles/Omni/wordlist_en.json", encoding="utf-8"))
omni_wordlist_jp = json.load(open("../08.18 & CHN/gamefiles/Omni/wordlist_jp.json", encoding="utf-8"))
music_attributes = json.load(open("./Data_decrypted/music_attribute.json", encoding="utf-8"))
omni_music_attributes = json.load(open("../08.18 & CHN/gamefiles/Omni/music_attribute.json", encoding="utf-8"))
# Loading music_order ====================================================
music_orders = json.load(open("./Data_decrypted/music_order.json", encoding="utf-8"))
omni_music_orders = {"items": []}
for item in json.load(open("../08.18 & CHN/gamefiles/Omni/music_order.json", encoding="utf-8"))["items"]:
if item["genreNo"] >= 6:
item["genreNo"] -= 1
omni_music_orders["items"].append(item)
# Loading music_ai_section =============================================
music_ai_section = json.load(open("./Data_decrypted/music_ai_section.json", encoding="utf-8"))
# Loading musicinfo ====================================================
musicinfos = json.load(open("./Data_decrypted/musicinfo.json", encoding="utf-8"))
omni_musicinfos = {"items": []}
for item in json.load(open("../08.18 & CHN/gamefiles/Omni/musicinfo.json", encoding="utf-8"))["items"]:
if item["genreNo"] >= 6:
item["genreNo"] -= 1
omni_musicinfos["items"].append(item)
# endregion
# region Game files
###################
###################
#### endregion ####
# region musicinfo.json, music_usbsetting.json, music_attributes.json, music_ai_section.json.
for newentry in finalList["songs"]:
# we try to find an entry from the final list in the 39.06's musicinfo file
entry = next((item for item in musicinfos["items"] if item["id"] == newentry["id"]), None)
# if we find nothing that means the song is from omnimix
if entry is None:
# we get the musicinfo entry from the omnimix files and append it to the 39.06 file.
omni_entry = next((item for item in omni_musicinfos["items"] if item["id"] == newentry["id"]), None)
omni_entry["spikeOnEasy"] = 0
omni_entry["spikeOnNormal"] = 0
omni_entry["spikeOnHard"] = 0
omni_entry["spikeOnOni"] = 0
omni_entry["spikeOnUra"] = 0
musicinfos["items"].append(omni_entry)
# we generate a list of all unused uniqueIds bellow 1599 and bellow the highest uniqueId for a 39.06 song.
higher = 0
usedUniqueIds = []
for song in musicinfos["items"]:
uniqueId = song["uniqueId"]
# find higher id
usedUniqueIds.append(uniqueId)
if uniqueId >= higher:
higher = uniqueId
unusedList = []
overLimitList = []
for i in range(higher):
if all([i not in usedUniqueIds, i <= 1599]):
unusedList.append(i)
if all([i in usedUniqueIds, i > 1599]):
overLimitList.append(i)
# we then remap all songs above id 1599 using the list of unused uniqueIds bellow 1599.
unusedIndex = 0
remapJson = {"items": []}
for song in musicinfos["items"]:
if song["uniqueId"] > 1599:
if len(unusedList) > 0:
if unusedIndex < len(unusedList):
remapJson["items"].append(
{"id": song["id"], "uniqueIdOriginal": song["uniqueId"], "uniqueIdRemap": unusedList[unusedIndex]}
)
song["uniqueId"] = unusedList[unusedIndex]
unusedIndex += 1
music_ai_section["items"].append(
{
"id": song["id"],
"uniqueId": song["uniqueId"],
"easy": 5,
"normal": 5,
"hard": 5,
"oni": 5,
"ura": 3,
"oniLevel11": "",
"uraLevel11": "",
},
)
else:
print("Couldn't remap " + song["id"])
else:
print("Couldn't remap " + song["id"])
print("Remapped " + str(len(remapJson["items"])) + " songs")
json_object = json.dumps(remapJson["items"], indent="\t", ensure_ascii=False)
with open("./Data_exported/Data_mods/x64/datatable/dec/remap.json", "w", encoding="utf8") as outfile:
outfile.write(json_object)
# we generate the music_ai_section file
music_ai_section["items"].sort(key=lambda x: x["uniqueId"], reverse=False)
json_object = json.dumps(music_ai_section, indent="\t", ensure_ascii=False)
with open("./Data_exported/Data_mods/x64/datatable/dec/music_ai_section.json", "w", encoding="utf8") as outfile:
outfile.write(json_object)
# we generate the music_usbsetting file
usbsettingjson = {"items": []}
musicinfos["items"].sort(key=lambda x: x["uniqueId"], reverse=False)
for song in musicinfos["items"]:
usbsetting = {"id": song["id"], "uniqueId": int(song["uniqueId"]), "usbVer": ""}
usbsettingjson["items"].append(usbsetting)
json_object = json.dumps(usbsettingjson, indent="\t", ensure_ascii=False)
with open("./Data_exported/Data_mods/x64/datatable/dec/music_usbsetting.json", "w", encoding="utf8") as outfile:
outfile.write(json_object)
# we generate the music_attribute file
for song in musicinfos["items"]:
entry = next((item for item in music_attributes["items"] if item["id"] == song["id"]), None)
# if we find nothing that means the song is from omnimix
if entry is None:
# we get the musicinfo entry from the omnimix files and append it to the 39.06 file.
omni_entry = next((item for item in omni_music_attributes["items"] if item["id"] == song["id"]), None)
omni_entry["uniqueId"] = song["uniqueId"]
omni_entry["ensoPartsID1"] = 0
omni_entry["ensoPartsID2"] = 0
del omni_entry["canPlayUra"]
music_attributes["items"].append(omni_entry)
music_attributes["items"].sort(key=lambda x: x["uniqueId"], reverse=False)
json_object = json.dumps(music_attributes, indent="\t", ensure_ascii=False)
with open("./Data_exported/Data_mods/x64/datatable/dec/music_attribute.json", "w", encoding="utf8") as outfile:
outfile.write(json_object)
musicinfos["items"].sort(key=lambda x: x["uniqueId"], reverse=False)
json_object = json.dumps(musicinfos, indent="\t", ensure_ascii=False)
with open("./Data_exported/Data_mods/x64/datatable/dec/musicinfo.json", "w", encoding="utf8") as outfile:
outfile.write(json_object)
print("Wrote musicinfo, music_attribute and music_usbsetting.\n")
# # endregion
# region wordlist.json
for song in finalList["songs"]:
songKey = "song_" + song["id"]
songSubKey = "song_sub_" + song["id"]
songDetailKey = "song_sub_" + song["id"]
# song entry
entry = next((item for item in wordlist["items"] if item["key"] == songKey), None)
if entry is not None:
if entry["japaneseText"] != "":
next
else:
print(songKey, "is already in the wordlist but has an empty string.")
entry["japaneseText"] = song["nameJp"]
entry["englishUsText"] = song["nameUs"]
entry["englishUsFontType"] = 1 if not is_cjk(song["nameUs"]) else 0
else:
print(songKey, "has been added to the wordlist.")
wordlist["items"].append(
{
"key": songKey,
"japaneseText": song["nameJp"],
"japaneseFontType": 0,
"englishUsText": song["nameUs"],
"englishUsFontType": 1 if not is_cjk(song["nameUs"]) else 0,
},
)
# song sub entry
entry = next((item for item in wordlist["items"] if item["key"] == songSubKey), None)
if entry is not None:
if entry["japaneseText"] != "":
next
else:
subentry = next(
(item for item in omni_wordlist_en["items"] if item["key"] == songSubKey),
{"japaneseText": ""},
)["japaneseText"]
if subentry != "":
print(songKey, "sub is already in the wordlist but has an empty string.")
entry["japaneseText"] = subentry
entry["englishUsText"] = subentry
entry["englishUsFontType"] = 1 if not is_cjk(subentry) else 0
else:
subentry = next(
(item for item in omni_wordlist_en["items"] if item["key"] == songSubKey),
{"japaneseText": ""},
)["japaneseText"]
wordlist["items"].append(
{
"key": songSubKey,
"japaneseText": subentry,
"japaneseFontType": 0,
"englishUsText": subentry,
"englishUsFontType": 1 if not is_cjk(subentry) else 0,
},
)
if subentry != "":
print(songSubKey, "has been added to the wordlist.")
# song detail entry
entry = next((item for item in wordlist["items"] if item["key"] == songDetailKey), None)
if entry is not None:
if entry["japaneseText"] != "":
next
else:
detailentry = next(
(item for item in omni_wordlist_en["items"] if item["key"] == songDetailKey),
{"japaneseText": ""},
)["japaneseText"]
if detailentry != "":
print(songKey, "detail is already in the wordlist but has an empty string.")
entry["japaneseText"] = detailentry
entry["englishUsText"] = detailentry
entry["englishUsFontType"] = 1 if not is_cjk(detailentry) else 0
else:
detailentry = next(
(item for item in omni_wordlist_en["items"] if item["key"] == songDetailKey),
{"japaneseText": ""},
)["japaneseText"]
if detailentry != "":
print(songDetailKey, "has been added to the wordlist.")
wordlist["items"].append(
{
"key": songDetailKey,
"japaneseText": detailentry,
"japaneseFontType": 0,
"englishUsText": detailentry,
"englishUsFontType": 1 if not is_cjk(detailentry) else 0,
},
)
print("Processed wordlist.\n")
# endregion
# region music_order.json
# closedisptype in music_order
# 1 to show subtitle
# 0 to show title
for song in music_attributes["items"]:
# we try to find an entry from the final list in the 39.06's music_order file
entry = next((item for item in music_orders["items"] if item["id"] == song["id"]), None)
name = next((item for item in wordlist["items"] if item["key"] == "song_" + song["id"]), {"englishUsText": ""})
if name["englishUsText"] == "" and song["id"] != "tmap4":
name["englishUsText"] = name["japaneseText"]
name["englishUsFontType"] = 1 if not is_cjk(name["japaneseText"]) else 0
print("Missing title for", name["key"])
# if we find nothing that means the song is from omnimix
if entry is None:
if song["id"] != "tmap4":
for omniEntry in omni_music_orders["items"]:
if omniEntry["id"] == song["id"]:
omniEntry["uniqueId"] = song["uniqueId"]
omniEntry["englishUsText"] = name["englishUsText"]
music_orders["items"].append(omniEntry)
continue
else:
for entry in music_orders["items"]:
if entry["id"] == song["id"]:
entry["englishUsText"] = name["englishUsText"]
# Writing music_order ===============================================================================
# ordering music_order by genre and english name
music_orders["items"].sort(key=lambda x: (x["genreNo"], x["englishUsText"]))
# removing the names from the dict
# for items in music_orders["items"]:
# if "englishUsText" in items:
# del items["englishUsText"]
# writing the music order
json_object = json.dumps(music_orders, indent="\t", ensure_ascii=False)
with open("./Data_exported/Data_mods/x64/datatable/dec/music_order.json", "w", encoding="utf8") as outfile:
outfile.write(json_object)
print("Wrote music_order.\n")
# wordlist["items"].sort(key=lambda x: x["key"], reverse=False)
# removing unused languages from the dict
for items in wordlist["items"]:
if "koreanText" in items:
del items["koreanText"]
del items["koreanFontType"]
# # if "chineseTText" in items:
# # del items["chineseTText"]
# # del items["chineseTFontType"]
for entry in translationFixes:
key = fetchKey(key=entry["key"], wordlist=wordlist)
key["englishUsText"] = entry["englishUsText"]
key["englishUsFontType"] = 1
print(key["japaneseText"], "->", key["englishUsText"])
# exporting the wordlist.
json_object = json.dumps(wordlist, ensure_ascii=False, indent="\t")
with open("./Data_exported/Data_mods/x64/datatable/dec/wordlist.json", "w", encoding="utf8") as outfile:
outfile.write(json_object)
# Fixing missing translations
print("Wrote wordlist.\n")
# endregion
# region Encrypting databases
files = glob.glob("./Data_exported/Data_mods/x64/datatable/dec/*")
for f in files:
outfile = os.path.splitext(os.path.basename(f))[0] + ".bin"
outdir = os.path.join("./Data_exported/Data_mods/x64/datatable/", outfile)
if outfile != "remap.bin":
print("Encrypting " + f + " to " + outdir)
file = encrypt_file(input_file=f)
with open(outdir, "wb") as outfile:
outfile.write(file)
print("Encrypted Datatables.\n")
# endregion
# region Writing server files
#############################
ServerFolderSongsPerType = 20
######## endregion ##########
# region event_folder_data.json
playcounts = json.load(open(file="./temp/listPlays.json", encoding="utf-8"))
eventfolders = json.load(open(file="./Data_decrypted/Server/event_folder_data.json", encoding="utf-8"))
musicinfo = json.load(open(file="./Data_exported/Data_mods/x64/datatable/dec/musicinfo.json", encoding="utf-8"))
# The recommended song folder is accessed with the following key:
# eventfolders[2]["songNo"]
MostPlayedList = []
for song in playcounts["Omnimix"]:
song["Omni"] = True
if song not in MostPlayedList:
MostPlayedList.append(song)
if len(MostPlayedList) > ServerFolderSongsPerType:
break
for song in playcounts["Regular"]:
song["Omni"] = False
if song not in MostPlayedList:
MostPlayedList.append(song)
if len(MostPlayedList) > ServerFolderSongsPerType * 2 - 1:
break
MostPlayedList = sorted(MostPlayedList, key=lambda item: item["plays"], reverse=True)
MostPlayedArray = []
for song in MostPlayedList:
uniqueId = next((item for item in musicinfo["items"] if item["id"] == song["id"]), None)
if not uniqueId:
print(song["id"])
else:
print(str(song["plays"]).zfill(3), "=>", "O" if song["Omni"] else "R", "=>", song["id"], "=>", song["nameUs"])
MostPlayedArray.append(uniqueId["uniqueId"])
print("Exported", len(MostPlayedArray), "songs")
eventfolders[2]["songNo"] = MostPlayedArray
eventfoldersDump = json.dumps(eventfolders, indent=4, ensure_ascii=False)
with open("./Data_exported/Server/wwwroot/data/event_folder_data.json", "w", encoding="utf8") as outfile:
outfile.write(eventfoldersDump)
print("Wrote event_folder_data.\n")
# endregion
# region shop_folder_data.json
with open("./Data_exported/Server/wwwroot/data/shop_folder_data.json", "w", encoding="utf8") as outfile:
outfile.write(json.dumps([], indent=4, ensure_ascii=False))
print("Wrote shop_folder_data.\n")
# endregion
# region movie_data.json
with open("./Data_exported/Server/wwwroot/data/movie_data.json", "w", encoding="utf8") as outfile:
outfile.write(json.dumps([{"movie_id": 20, "enable_days": 999}], indent=4, ensure_ascii=False))
print("Wrote movie_data.\n")
# endregion