mirror of
https://github.com/cainan-c/TaikoPythonTools.git
synced 2024-11-23 22:51:02 +01:00
Add Omnimix Tool
This commit is contained in:
parent
e18647c571
commit
b05fa1d3df
23
TaikoNijiiroOmnimixTool/README.md
Normal file
23
TaikoNijiiroOmnimixTool/README.md
Normal file
@ -0,0 +1,23 @@
|
||||
# Taiko no Tatsujin - Omnimix Creation Tool
|
||||
|
||||
(Not so) Simple Python 3 scripts that find and add back missing/removed songs to newer versions of Taiko Nijiiro
|
||||
|
||||
Setup:
|
||||
Extract/Dump/Decrypt the following `datatable` files from your newest build of the game:
|
||||
`music_ai_section`, `music_attribute`, `music_order`, `music_usbsetting`, `musicinfo` and `wordlist` to the folder called `datatable`
|
||||
|
||||
Do the same but for the versions you want to extract songs from, and place them in their designated folders.
|
||||
Example: `musicinfo.json` from JPN00 will go in the `musicinfo` folder with the prefix `_JPN00`
|
||||
`musicinfo/musicinfo_JPN00.json` etc etc.
|
||||
|
||||
Edit `config.toml` to specify the paths to the game's you're adding entries from along with an output folder.
|
||||
|
||||
Once everything is properly defined, run `_run.py`. If everything is properly set up, two folders should appear in your output folder:
|
||||
`sound` and `datatable`
|
||||
as `fumen` files are always present for removed songs, we do not need to worry about them.
|
||||
|
||||
Assuming this is for newer releases, this tool also automatically handles encryption, so all that's needed is to just drag and drop your output folders onto the game.
|
||||
|
||||
As always, make sure to backup your files before modification.
|
||||
|
||||
Should support every version of Taiko Nijiiro that uses encryption, this also handles adding `music_ai_section` entries to new songs also.
|
34
TaikoNijiiroOmnimixTool/_run.py
Normal file
34
TaikoNijiiroOmnimixTool/_run.py
Normal file
@ -0,0 +1,34 @@
|
||||
import subprocess
|
||||
|
||||
def run_script(script_name):
|
||||
try:
|
||||
subprocess.run(["python", script_name], check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error running {script_name}: {e}")
|
||||
raise
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
# Run musicinfo_merge.py
|
||||
print("Merging musicinfo entries...")
|
||||
run_script("musicinfo_merge.py")
|
||||
|
||||
# Run wordlist_merge.py
|
||||
print("Merging wordlist entries...")
|
||||
run_script("wordlist_merge.py")
|
||||
|
||||
# Run copy.py
|
||||
print("Copying audio to the specified output folder...")
|
||||
run_script("copy.py")
|
||||
|
||||
# Run encrypt.py
|
||||
print("Encrypting and copying merged datatable files...")
|
||||
run_script("encrypt.py")
|
||||
|
||||
# All scripts executed successfully
|
||||
print("Missing songs successfully added.\nPress Enter to Exit")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
input() # Wait for user to press Enter before exiting
|
12
TaikoNijiiroOmnimixTool/config.toml
Normal file
12
TaikoNijiiroOmnimixTool/config.toml
Normal file
@ -0,0 +1,12 @@
|
||||
[game_origin_mapping]
|
||||
JPN39 = "f:\\data\\S1210JPN39\\Data\\x64"
|
||||
JPN08 = "f:\\data\\S1210JPN08\\Data\\x64"
|
||||
JPN00 = "f:\\data\\S1210JPN00\\Data\\x64"
|
||||
CHN00 = "f:\\data\\S1250CHN00\\Data\\x64"
|
||||
# Add more mappings as needed
|
||||
|
||||
[output]
|
||||
folder = "f:\\data\\out_3\\x64"
|
||||
|
||||
[key]
|
||||
key = "3530304242323633353537423431384139353134383346433246464231354534"
|
48
TaikoNijiiroOmnimixTool/copy.py
Normal file
48
TaikoNijiiroOmnimixTool/copy.py
Normal file
@ -0,0 +1,48 @@
|
||||
import json
|
||||
import shutil
|
||||
import os
|
||||
import toml
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def copy_sound_file(song_id, source_folder, output_folder):
|
||||
# Source path for song_[id].nus3bank
|
||||
source_sound_file = os.path.join(source_folder, "sound", f"song_{song_id}.nus3bank")
|
||||
|
||||
# Destination path in output_folder/sound
|
||||
destination_sound_file = os.path.join(output_folder, "sound", f"song_{song_id}.nus3bank")
|
||||
|
||||
# Copy sound/song_[id].nus3bank to output_folder/sound/song_[id].nus3bank
|
||||
if os.path.exists(source_sound_file):
|
||||
os.makedirs(os.path.join(output_folder, "sound"), exist_ok=True)
|
||||
shutil.copy2(source_sound_file, destination_sound_file)
|
||||
|
||||
# Log message based on game origin
|
||||
game_origin = os.path.basename(os.path.normpath(source_folder))
|
||||
if game_origin in ["JPN00", "JPN08"]:
|
||||
logger.info(f"Copied song_{song_id}.nus3bank from '{game_origin}'.")
|
||||
|
||||
def process_added_songs(json_file, config_file):
|
||||
with open(json_file, 'r') as f:
|
||||
added_songs = json.load(f)
|
||||
|
||||
config = toml.load(config_file)
|
||||
output_folder = config['output']['folder']
|
||||
|
||||
for song in added_songs:
|
||||
song_id = song.get('id')
|
||||
game_origin = song.get('gameOrigin')
|
||||
|
||||
if game_origin in config['game_origin_mapping']:
|
||||
source_folder = config['game_origin_mapping'][game_origin]
|
||||
copy_sound_file(song_id, source_folder, output_folder)
|
||||
|
||||
# Specify the paths to your JSON and TOML files
|
||||
json_file_path = 'added_songs.json'
|
||||
config_file_path = 'config.toml'
|
||||
|
||||
# Call the function to process the added songs using the specified configuration
|
||||
process_added_songs(json_file_path, config_file_path)
|
73
TaikoNijiiroOmnimixTool/encrypt.py
Normal file
73
TaikoNijiiroOmnimixTool/encrypt.py
Normal file
@ -0,0 +1,73 @@
|
||||
import os
|
||||
import toml
|
||||
import gzip
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util.Padding import pad
|
||||
|
||||
def compress_file(input_file):
|
||||
# Generate the output filename with .gz extension
|
||||
output_file = os.path.splitext(input_file)[0] + ".gz"
|
||||
|
||||
# Compress the input file
|
||||
with open(input_file, 'rb') as f_in, gzip.open(output_file, 'wb') as f_out:
|
||||
f_out.write(f_in.read())
|
||||
|
||||
print(f"Compression successful. Compressed file saved as: {output_file}")
|
||||
|
||||
return output_file
|
||||
|
||||
def encrypt_file(input_file, output_folder, key, iv):
|
||||
# Compress the input file
|
||||
compressed_file = compress_file(input_file)
|
||||
|
||||
# Read the compressed file
|
||||
with open(compressed_file, 'rb') as f_in:
|
||||
plaintext = f_in.read()
|
||||
|
||||
# Encrypt the file
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
ciphertext = cipher.encrypt(pad(plaintext, AES.block_size))
|
||||
|
||||
# Generate the output filename
|
||||
output_filename = os.path.splitext(os.path.basename(compressed_file))[0] + ".bin"
|
||||
|
||||
# Save the encrypted data to the output folder
|
||||
output_path = os.path.join(output_folder, output_filename)
|
||||
with open(output_path, 'wb') as f_out:
|
||||
f_out.write(iv + ciphertext)
|
||||
|
||||
print(f"Encryption successful. Encrypted file saved as: {output_path}")
|
||||
|
||||
# Remove the compressed file
|
||||
os.remove(compressed_file)
|
||||
print(f"Removed the compressed file: {compressed_file}")
|
||||
|
||||
def main():
|
||||
# Load configuration from config.toml
|
||||
config_file = "config.toml"
|
||||
with open(config_file, "r") as file:
|
||||
config = toml.load(file)
|
||||
|
||||
# Get key and IV from configuration and convert them to bytes
|
||||
key_hex = config["key"]["key"]
|
||||
key = bytes.fromhex(key_hex)
|
||||
iv = bytes.fromhex("FF" * 16) # IV set to FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF
|
||||
|
||||
# Get the input folder and output folder from the configuration
|
||||
input_folder = "datatable_merged"
|
||||
output_folder = config["output"]["folder"]
|
||||
datatable_folder = os.path.join(output_folder, "datatable")
|
||||
|
||||
# Create the datatable folder if it doesn't exist
|
||||
os.makedirs(datatable_folder, exist_ok=True)
|
||||
|
||||
# Process each JSON file in the input folder
|
||||
for filename in os.listdir(input_folder):
|
||||
if filename.endswith(".json"):
|
||||
input_file = os.path.join(input_folder, filename)
|
||||
|
||||
# Encrypt the JSON file and save the encrypted file to the datatable folder
|
||||
encrypt_file(input_file, datatable_folder, key, iv)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
226
TaikoNijiiroOmnimixTool/musicinfo_merge.py
Normal file
226
TaikoNijiiroOmnimixTool/musicinfo_merge.py
Normal file
@ -0,0 +1,226 @@
|
||||
import os
|
||||
import json
|
||||
import glob
|
||||
from collections import OrderedDict
|
||||
|
||||
def load_json(file_path):
|
||||
with open(file_path, 'r') as file:
|
||||
return json.load(file)
|
||||
|
||||
def save_json(data, file_path):
|
||||
with open(file_path, 'w') as file:
|
||||
json.dump(data, file, indent=4)
|
||||
|
||||
def find_missing_items(original_items, newer_items):
|
||||
newer_item_ids = {item['id']: item for item in newer_items}
|
||||
missing_items = [item for item in original_items if item['id'] not in newer_item_ids]
|
||||
return missing_items
|
||||
|
||||
def remove_duplicate_entries(data):
|
||||
seen = OrderedDict()
|
||||
for entry in data:
|
||||
seen[entry['id']] = entry
|
||||
return list(seen.values())
|
||||
|
||||
def format_game_origin(source_file):
|
||||
base_filename = os.path.splitext(os.path.basename(source_file))[0]
|
||||
game_origin = base_filename[-5:] # Extract the last 5 characters
|
||||
return game_origin
|
||||
|
||||
def merge_datasets(datatable_file, source_folder, output_folder):
|
||||
try:
|
||||
newest_data = load_json(datatable_file)
|
||||
newer_items = newest_data.get('items', [])
|
||||
except Exception as e:
|
||||
print(f"Error loading data from {datatable_file}: {e}")
|
||||
return []
|
||||
|
||||
source_files = glob.glob(os.path.join(source_folder, '*.json'))
|
||||
|
||||
# Reverse the order of source_files
|
||||
source_files.reverse()
|
||||
|
||||
added_songs = []
|
||||
|
||||
for source_file in source_files:
|
||||
try:
|
||||
original_data = load_json(source_file)
|
||||
original_items = original_data.get('items', [])
|
||||
except Exception as e:
|
||||
print(f"Error loading data from {source_file}: {e}")
|
||||
continue
|
||||
|
||||
try:
|
||||
missing_items = find_missing_items(original_items, newer_items)
|
||||
except Exception as e:
|
||||
print(f"Error finding missing items: {e}")
|
||||
continue
|
||||
|
||||
newer_items.extend(missing_items)
|
||||
|
||||
for item in missing_items:
|
||||
added_songs.append({
|
||||
"id": item['id'],
|
||||
"uniqueId": item['uniqueId'],
|
||||
"sourceFile": os.path.basename(source_file)
|
||||
})
|
||||
|
||||
newer_items.sort(key=lambda x: x.get('uniqueId', 0))
|
||||
|
||||
newest_data['items'] = newer_items
|
||||
|
||||
output_file_name = os.path.basename(datatable_file)
|
||||
output_file_path = os.path.join(output_folder, output_file_name)
|
||||
|
||||
save_json(newest_data, output_file_path)
|
||||
|
||||
added_ids = {item['id'] for item in added_songs}
|
||||
if added_ids:
|
||||
print(f"Added Entries to {output_file_name}:")
|
||||
for entry_id in added_ids:
|
||||
print(entry_id)
|
||||
|
||||
return added_songs
|
||||
|
||||
def update_music_ai_section(datatable_folder):
|
||||
try:
|
||||
musicinfo_file = os.path.join(datatable_folder, 'musicinfo.json')
|
||||
music_ai_section_file = os.path.join(datatable_folder, 'music_ai_section.json')
|
||||
|
||||
musicinfo_data = load_json(musicinfo_file)
|
||||
music_ai_section_data = load_json(music_ai_section_file)
|
||||
|
||||
musicinfo_items = musicinfo_data.get('items', [])
|
||||
music_ai_section_items = music_ai_section_data.get('items', [])
|
||||
|
||||
existing_entries = {(item['id'], item['uniqueId']) for item in music_ai_section_items}
|
||||
|
||||
added_entries = []
|
||||
|
||||
for musicinfo_item in musicinfo_items:
|
||||
item_id = musicinfo_item['id']
|
||||
unique_id = musicinfo_item['uniqueId']
|
||||
|
||||
if (item_id, unique_id) not in existing_entries:
|
||||
new_entry = {
|
||||
"id": item_id,
|
||||
"uniqueId": unique_id,
|
||||
"easy": 3 if musicinfo_item.get('starEasy', 0) < 6 else 5,
|
||||
"normal": 3 if musicinfo_item.get('starNormal', 0) < 6 else 5,
|
||||
"hard": 3 if musicinfo_item.get('starHard', 0) < 6 else 5,
|
||||
"oni": 3 if musicinfo_item.get('starMania', 0) < 6 else 5,
|
||||
"ura": 3 if musicinfo_item.get('starUra', 0) < 6 else 5,
|
||||
"oniLevel11": "o" if musicinfo_item.get('starMania', 0) == 10 else "",
|
||||
"uraLevel11": "o" if musicinfo_item.get('starUra', 0) == 10 else ""
|
||||
}
|
||||
|
||||
music_ai_section_items.append(new_entry)
|
||||
added_entries.append((item_id, unique_id))
|
||||
else:
|
||||
existing_entry = next(
|
||||
(item for item in music_ai_section_items if item['id'] == item_id and item['uniqueId'] == unique_id),
|
||||
None
|
||||
)
|
||||
if existing_entry:
|
||||
if 'oniLevel11' not in existing_entry:
|
||||
existing_entry['oniLevel11'] = "o" if musicinfo_item.get('starMania', 0) == 10 else ""
|
||||
if 'uraLevel11' not in existing_entry:
|
||||
existing_entry['uraLevel11'] = "o" if musicinfo_item.get('starUra', 0) == 10 else ""
|
||||
|
||||
music_ai_section_items.sort(key=lambda x: x.get('uniqueId', 0))
|
||||
|
||||
music_ai_section_data['items'] = music_ai_section_items
|
||||
|
||||
save_json(music_ai_section_data, music_ai_section_file)
|
||||
|
||||
if added_entries:
|
||||
print("Added Entries to music_ai_section.json:")
|
||||
for item_id, unique_id in added_entries:
|
||||
print(f"ID: {item_id}, UniqueID: {unique_id}")
|
||||
|
||||
return added_entries
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error updating music_ai_section.json: {e}")
|
||||
return []
|
||||
|
||||
def update_music_usbsetting(datatable_merged_folder):
|
||||
musicinfo_file_path = os.path.join(datatable_merged_folder, 'musicinfo.json')
|
||||
music_usbsetting_file_path = os.path.join(datatable_merged_folder, 'music_usbsetting.json')
|
||||
|
||||
try:
|
||||
musicinfo_data = load_json(musicinfo_file_path)
|
||||
music_usbsetting_data = load_json(music_usbsetting_file_path)
|
||||
|
||||
musicinfo_items = musicinfo_data.get('items', [])
|
||||
music_usbsetting_items = music_usbsetting_data.get('items', [])
|
||||
|
||||
existing_entries = {(item['id'], item['uniqueId']) for item in music_usbsetting_items}
|
||||
|
||||
added_entries = []
|
||||
|
||||
for musicinfo_item in musicinfo_items:
|
||||
item_id = musicinfo_item['id']
|
||||
unique_id = musicinfo_item['uniqueId']
|
||||
|
||||
if (item_id, unique_id) not in existing_entries:
|
||||
new_entry = {
|
||||
"id": item_id,
|
||||
"uniqueId": unique_id,
|
||||
"usbVer": ""
|
||||
}
|
||||
|
||||
music_usbsetting_items.append(new_entry)
|
||||
added_entries.append((item_id, unique_id))
|
||||
|
||||
music_usbsetting_items.sort(key=lambda x: x.get('uniqueId', 0))
|
||||
|
||||
music_usbsetting_data['items'] = music_usbsetting_items
|
||||
|
||||
save_json(music_usbsetting_data, music_usbsetting_file_path)
|
||||
|
||||
if added_entries:
|
||||
print("Added Entries to music_usbsetting.json:")
|
||||
for item_id, unique_id in added_entries:
|
||||
print(f"ID: {item_id}, UniqueID: {unique_id}")
|
||||
|
||||
return added_entries
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error updating music_usbsetting.json: {e}")
|
||||
return []
|
||||
|
||||
if __name__ == "__main__":
|
||||
datatable_folder = 'datatable'
|
||||
source_folders = {
|
||||
'musicinfo': 'musicinfo',
|
||||
'music_order': 'music_order',
|
||||
'music_usbsetting': 'music_usbsetting',
|
||||
'music_attribute': 'music_attribute',
|
||||
'music_ai_section': 'music_ai_section'
|
||||
}
|
||||
output_folder = 'datatable_merged'
|
||||
added_songs_file = 'added_songs.json'
|
||||
|
||||
os.makedirs(output_folder, exist_ok=True)
|
||||
|
||||
all_added_songs = []
|
||||
|
||||
for datatable_file, source_folder in source_folders.items():
|
||||
datatable_file_path = os.path.join(datatable_folder, f"{datatable_file}.json")
|
||||
|
||||
added_songs = merge_datasets(datatable_file_path, source_folder, output_folder)
|
||||
all_added_songs.extend(added_songs)
|
||||
|
||||
music_ai_section_added = update_music_ai_section(output_folder)
|
||||
music_usbsetting_added = update_music_usbsetting(output_folder)
|
||||
|
||||
# Remove duplicate entries and format gameOrigin
|
||||
all_added_songs_unique = remove_duplicate_entries(all_added_songs)
|
||||
for entry in all_added_songs_unique:
|
||||
entry['gameOrigin'] = format_game_origin(entry['sourceFile'])
|
||||
del entry['sourceFile']
|
||||
|
||||
save_json(all_added_songs_unique, os.path.join(added_songs_file))
|
||||
|
||||
print(f"All added songs information saved to {added_songs_file}.")
|
74
TaikoNijiiroOmnimixTool/wordlist_merge.py
Normal file
74
TaikoNijiiroOmnimixTool/wordlist_merge.py
Normal file
@ -0,0 +1,74 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
def load_json(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as file:
|
||||
return json.load(file)
|
||||
|
||||
def save_json(data, file_path):
|
||||
with open(file_path, 'w', encoding='utf-8') as file:
|
||||
json.dump(data, file, indent=4, ensure_ascii=False)
|
||||
|
||||
def find_missing_items(original_items, newer_items):
|
||||
newer_item_ids = {item['id']: item for item in newer_items}
|
||||
missing_items = [item for item in original_items if item['id'] not in newer_item_ids]
|
||||
return missing_items
|
||||
|
||||
def remove_entries_with_keys(data, keys_to_remove):
|
||||
return [entry for entry in data if entry['key'] not in keys_to_remove]
|
||||
|
||||
def process_wordlist_files(wordlist_file, wordlist_folder, added_songs_file, output_folder):
|
||||
try:
|
||||
added_songs_data = load_json(added_songs_file)
|
||||
except Exception as e:
|
||||
print(f"Error loading added songs data: {e}")
|
||||
return
|
||||
|
||||
try:
|
||||
wordlist_data = load_json(wordlist_file)
|
||||
except Exception as e:
|
||||
print(f"Error loading wordlist data: {e}")
|
||||
return
|
||||
|
||||
for added_song in added_songs_data:
|
||||
song_id = added_song['id']
|
||||
game_origin = added_song['gameOrigin']
|
||||
|
||||
# Generate keys to identify entries to remove in wordlist.json
|
||||
keys_to_remove = [
|
||||
f"song_sub_{song_id}",
|
||||
f"song_detail_{song_id}",
|
||||
f"song_{song_id}"
|
||||
]
|
||||
|
||||
# Remove entries from wordlist.json based on keys
|
||||
wordlist_data['items'] = remove_entries_with_keys(wordlist_data['items'], keys_to_remove)
|
||||
|
||||
# Load and process wordlist_[gameOrigin].json
|
||||
wordlist_game_file = os.path.join(wordlist_folder, f"wordlist_{game_origin}.json")
|
||||
try:
|
||||
wordlist_game_data = load_json(wordlist_game_file)
|
||||
except Exception as e:
|
||||
print(f"Error loading wordlist game data ({game_origin}): {e}")
|
||||
continue
|
||||
|
||||
# Copy entries from wordlist_game_data to wordlist_data
|
||||
for entry in wordlist_game_data['items']:
|
||||
if entry['key'] in keys_to_remove:
|
||||
wordlist_data['items'].append(entry)
|
||||
|
||||
# Save modified wordlist data to output folder
|
||||
output_wordlist_file = os.path.join(output_folder, 'wordlist.json')
|
||||
save_json(wordlist_data, output_wordlist_file)
|
||||
print(f"Modified wordlist saved to: {output_wordlist_file}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
datatable_folder = 'datatable'
|
||||
wordlist_folder = 'wordlist'
|
||||
added_songs_file = 'added_songs.json'
|
||||
output_folder = 'datatable_merged'
|
||||
|
||||
os.makedirs(output_folder, exist_ok=True)
|
||||
|
||||
wordlist_file = os.path.join(datatable_folder, 'wordlist.json')
|
||||
process_wordlist_files(wordlist_file, wordlist_folder, added_songs_file, output_folder)
|
Loading…
Reference in New Issue
Block a user