add back games, conform them to new title dispatch
This commit is contained in:
parent
f18e939dd0
commit
7e3396a7ff
@ -1,4 +1,4 @@
|
||||
from twisted.web import resource
|
||||
from typing import Dict, Any
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web.http import Request
|
||||
@ -13,6 +13,7 @@ class TitleServlet():
|
||||
self.config = core_cfg
|
||||
self.config_folder = cfg_folder
|
||||
self.data = Data(core_cfg)
|
||||
self.title_registry: Dict[str, Any] = {}
|
||||
|
||||
self.logger = logging.getLogger("title")
|
||||
if not hasattr(self.logger, "initialized"):
|
||||
@ -35,8 +36,31 @@ class TitleServlet():
|
||||
if "game_registry" not in globals():
|
||||
globals()["game_registry"] = Utils.get_all_titles()
|
||||
|
||||
def handle_GET(self, request: Request):
|
||||
pass
|
||||
for folder, mod in globals()["game_registry"].items():
|
||||
if hasattr(mod, "game_codes") and hasattr(mod, "index"):
|
||||
handler_cls = mod.index(self.config, self.config_folder)
|
||||
if hasattr(handler_cls, "setup"):
|
||||
handler_cls.setup()
|
||||
|
||||
def handle_POST(self, request: Request):
|
||||
pass
|
||||
for code in mod.game_codes:
|
||||
self.title_registry[code] = handler_cls
|
||||
|
||||
else:
|
||||
self.logger.error(f"{folder} missing game_code or index in __init__.py")
|
||||
|
||||
self.logger.info(f"Serving {len(globals()['game_registry'])} game codes")
|
||||
|
||||
def render_GET(self, request: Request, endpoints: dict) -> bytes:
|
||||
print(endpoints)
|
||||
|
||||
def render_POST(self, request: Request, endpoints: dict) -> bytes:
|
||||
print(endpoints)
|
||||
code = endpoints["game"]
|
||||
if code not in self.title_registry:
|
||||
self.logger.warn(f"Unknown game code {code}")
|
||||
|
||||
index = self.title_registry[code]
|
||||
if not hasattr(index, "render_POST"):
|
||||
self.logger.warn(f"{code} does not dispatch on POST")
|
||||
|
||||
return index.render_POST(request, endpoints["version"], endpoints["endpoint"])
|
||||
|
@ -18,4 +18,5 @@ class Utils:
|
||||
|
||||
except ImportError as e:
|
||||
print(f"{dir} - {e}")
|
||||
raise
|
||||
return ret
|
||||
|
47
dbutils.py
47
dbutils.py
@ -1,6 +1,47 @@
|
||||
import yaml
|
||||
import argparse
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="ARTEMiS main entry point")
|
||||
parser.add_argument("--config", "-c", type=str, default="config", help="Configuration folder")
|
||||
if __name__=='__main__':
|
||||
parser = argparse.ArgumentParser(description="Database utilities")
|
||||
parser.add_argument("--config", "-c", type=str, help="Config folder to use", default="config")
|
||||
parser.add_argument("--version", "-v", type=str, help="Version of the database to upgrade/rollback to")
|
||||
parser.add_argument("--game", "-g", type=str, help="Game code of the game who's schema will be updated/rolled back. Ex. SDFE")
|
||||
parser.add_argument("action", type=str, help="DB Action, create, recreate, upgrade, or rollback")
|
||||
args = parser.parse_args()
|
||||
|
||||
cfg = CoreConfig()
|
||||
cfg.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||
data = Data(cfg)
|
||||
|
||||
if args.action == "create":
|
||||
data.create_database()
|
||||
|
||||
elif args.action == "recreate":
|
||||
data.recreate_database()
|
||||
|
||||
elif args.action == "upgrade" or args.action == "rollback":
|
||||
if args.version is None:
|
||||
print("Must set game and version to migrate to")
|
||||
exit(0)
|
||||
|
||||
if args.game is None:
|
||||
print("No game set, upgrading core schema")
|
||||
data.migrate_database("CORE", int(args.version))
|
||||
|
||||
else:
|
||||
data.migrate_database(args.game, int(args.version), args.action)
|
||||
|
||||
elif args.action == "migrate":
|
||||
print("Migrating from old schema to new schema")
|
||||
data.restore_from_old_schema()
|
||||
|
||||
elif args.action == "dump":
|
||||
print("Dumping old schema to migrate to new schema")
|
||||
data.dump_db()
|
||||
|
||||
elif args.action == "generate":
|
||||
pass
|
||||
|
||||
data.logger.info("Done")
|
||||
|
6
example_config/chuni.yaml
Normal file
6
example_config/chuni.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
crypto:
|
||||
encrypted_only: False
|
9
example_config/cxb.yaml
Normal file
9
example_config/cxb.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
hostname: "localhost"
|
||||
ssl_enable: False
|
||||
port: 8082
|
||||
port_secure: 443
|
||||
ssl_cert: "cert/title.crt"
|
||||
ssl_key: "cert/title.key"
|
4
example_config/diva.yaml
Normal file
4
example_config/diva.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
|
3
example_config/mai2.yaml
Normal file
3
example_config/mai2.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
60
example_config/nginx_example.conf
Normal file
60
example_config/nginx_example.conf
Normal file
@ -0,0 +1,60 @@
|
||||
# Allnet
|
||||
server {
|
||||
listen 80;
|
||||
server_name naominet.jp;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:8000/;
|
||||
}
|
||||
}
|
||||
|
||||
# Non-SSL titles
|
||||
server {
|
||||
listen 80;
|
||||
server_name your.hostname.here;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:8080/;
|
||||
}
|
||||
}
|
||||
|
||||
# SSL titles
|
||||
server {
|
||||
listen 443 ssl default_server;
|
||||
listen [::]:443 ssl default_server;
|
||||
server_name your.hostname.here;
|
||||
|
||||
ssl_certificate /path/to/cert/title.crt;
|
||||
ssl_certificate_key /path/to/cert/title.key;
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=0";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:8080/;
|
||||
}
|
||||
}
|
||||
|
||||
# Billing
|
||||
server {
|
||||
listen 8443 ssl;
|
||||
server_name ib.naominet.jp;
|
||||
|
||||
ssl_certificate /path/to/cert/server.pem;
|
||||
ssl_certificate_key /path/to/cert/server.key;
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=1";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:8444/;
|
||||
}
|
||||
}
|
3
example_config/ongeki.yaml
Normal file
3
example_config/ongeki.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
32
example_config/wacca.yaml
Normal file
32
example_config/wacca.yaml
Normal file
@ -0,0 +1,32 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
|
||||
mods:
|
||||
always_vip: True
|
||||
infinite_tickets: True
|
||||
infinite_wp: True
|
||||
|
||||
gates:
|
||||
enabled_gates:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- 4
|
||||
- 5
|
||||
- 6
|
||||
- 7
|
||||
- 8
|
||||
- 9
|
||||
- 10
|
||||
- 11
|
||||
- 12
|
||||
- 13
|
||||
- 14
|
||||
- 15
|
||||
- 16
|
||||
- 17
|
||||
- 18
|
||||
- 19
|
||||
- 21
|
||||
- 22
|
87
index.py
87
index.py
@ -4,9 +4,82 @@ import yaml
|
||||
from os import path, mkdir, access, W_OK
|
||||
from core import *
|
||||
|
||||
from twisted.web import server
|
||||
from twisted.web import server, resource
|
||||
from twisted.internet import reactor, endpoints
|
||||
from txroutes import Dispatcher
|
||||
from twisted.web.http import Request
|
||||
from routes import Mapper
|
||||
|
||||
class HttpDispatcher(resource.Resource):
|
||||
def __init__(self, cfg: CoreConfig, config_dir: str):
|
||||
super().__init__()
|
||||
self.config = cfg
|
||||
self.isLeaf = True
|
||||
self.map_get = Mapper()
|
||||
self.map_post = Mapper()
|
||||
|
||||
self.allnet = AllnetServlet(cfg, config_dir)
|
||||
self.title = TitleServlet(cfg, config_dir)
|
||||
|
||||
self.map_post.connect('allnet_poweron', '/sys/servlet/PowerOn', controller="allnet", action='handle_poweron', conditions=dict(method=['POST']))
|
||||
self.map_post.connect('allnet_downloadorder', '/sys/servlet/DownloadOrder', controller="allnet", action='handle_dlorder', conditions=dict(method=['POST']))
|
||||
self.map_post.connect('allnet_billing', '/request', controller="allnet", action='handle_billing_request', conditions=dict(method=['POST']))
|
||||
|
||||
self.map_get.connect("title_get", "/{game}/{version}/{endpoint:.*?}", controller="title", action="render_GET", requirements=dict(game=R"S..."))
|
||||
self.map_post.connect("title_post", "/{game}/{version}/{endpoint:.*?}", controller="title", action="render_POST", requirements=dict(game=R"S..."))
|
||||
|
||||
def render_POST(self, request: Request) -> bytes:
|
||||
test = self.map_get.match(request.uri.decode())
|
||||
if test is None:
|
||||
return b""
|
||||
|
||||
controller = getattr(self, test["controller"], None)
|
||||
if controller is None:
|
||||
return b""
|
||||
|
||||
handler = getattr(controller, test["action"], None)
|
||||
if handler is None:
|
||||
return b""
|
||||
|
||||
url_vars = test
|
||||
url_vars.pop("controller")
|
||||
url_vars.pop("action")
|
||||
|
||||
if len(url_vars) > 0:
|
||||
ret = handler(request, url_vars)
|
||||
else:
|
||||
ret = handler(request)
|
||||
|
||||
if type(ret) == str:
|
||||
return ret.encode()
|
||||
elif type(ret) == bytes:
|
||||
return ret
|
||||
else:
|
||||
return b""
|
||||
|
||||
def render_POST(self, request: Request) -> bytes:
|
||||
test = self.map_post.match(request.uri.decode())
|
||||
if test is None:
|
||||
return b""
|
||||
|
||||
controller = getattr(self, test["controller"], None)
|
||||
if controller is None:
|
||||
return b""
|
||||
|
||||
handler = getattr(controller, test["action"], None)
|
||||
if handler is None:
|
||||
return b""
|
||||
|
||||
url_vars = test
|
||||
url_vars.pop("controller")
|
||||
url_vars.pop("action")
|
||||
ret = handler(request, url_vars)
|
||||
|
||||
if type(ret) == str:
|
||||
return ret.encode()
|
||||
elif type(ret) == bytes:
|
||||
return ret
|
||||
else:
|
||||
return b""
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="ARTEMiS main entry point")
|
||||
@ -42,15 +115,7 @@ if __name__ == "__main__":
|
||||
billing_server_str = f"ssl:{cfg.billing.port}:interface={cfg.server.listen_address}"\
|
||||
f":privateKey={cfg.billing.ssl_key}:certKey={cfg.billing.ssl_cert}"
|
||||
|
||||
allnet_cls = AllnetServlet(cfg, args.config)
|
||||
title_cls = TitleServlet(cfg, args.config)
|
||||
|
||||
dispatcher = Dispatcher()
|
||||
dispatcher.connect('allnet_poweron', '/sys/servlet/PowerOn', allnet_cls, action='handle_poweron', conditions=dict(method=['POST']))
|
||||
dispatcher.connect('allnet_downloadorder', '/sys/servlet/DownloadOrder', allnet_cls, action='handle_dlorder', conditions=dict(method=['POST']))
|
||||
dispatcher.connect('allnet_billing', '/request', allnet_cls, action='handle_billing_request', conditions=dict(method=['POST']))
|
||||
dispatcher.connect("title_get", "/{game}/{version}/{endpoint}", title_cls, action="handle_GET", conditions=dict(method=['GET']))
|
||||
dispatcher.connect("title_post", "/{game}/{version}/{endpoint}", title_cls, action="handle_POST", conditions=dict(method=['POST']))
|
||||
dispatcher = HttpDispatcher(cfg, args.config)
|
||||
|
||||
endpoints.serverFromString(reactor, allnet_server_str).listen(server.Site(dispatcher))
|
||||
endpoints.serverFromString(reactor, adb_server_str).listen(AimedbFactory(cfg))
|
||||
|
129
read.py
Normal file
129
read.py
Normal file
@ -0,0 +1,129 @@
|
||||
# vim: set fileencoding=utf-8
|
||||
import argparse
|
||||
import re
|
||||
import os
|
||||
import yaml
|
||||
import importlib
|
||||
import logging, coloredlogs
|
||||
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import List, Optional
|
||||
|
||||
from core import CoreConfig
|
||||
from core.utils import Utils
|
||||
|
||||
class BaseReader():
|
||||
def __init__(self, config: CoreConfig, version: int, bin_dir: Optional[str], opt_dir: Optional[str], extra: Optional[str]) -> None:
|
||||
self.logger = logging.getLogger("reader")
|
||||
self.config = config
|
||||
self.bin_dir = bin_dir
|
||||
self.opt_dir = opt_dir
|
||||
self.version = version
|
||||
self.extra = extra
|
||||
|
||||
|
||||
def get_data_directories(self, directory: str) -> List[str]:
|
||||
ret: List[str] = []
|
||||
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for dir in dirs:
|
||||
if re.fullmatch("[A-Z0-9]{4,4}", dir) is not None:
|
||||
ret.append(f"{root}/{dir}")
|
||||
|
||||
return ret
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Import Game Information')
|
||||
parser.add_argument(
|
||||
'--series',
|
||||
action='store',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The game series we are importing.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--version',
|
||||
dest='version',
|
||||
action='store',
|
||||
type=int,
|
||||
required=True,
|
||||
help='The game version we are importing.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--binfolder',
|
||||
dest='bin',
|
||||
action='store',
|
||||
type=str,
|
||||
help='Folder containing A000 base data',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--optfolder',
|
||||
dest='opt',
|
||||
action='store',
|
||||
type=str,
|
||||
help='Folder containing Option data folders',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
type=str,
|
||||
default="config",
|
||||
help="Folder containing the core configuration for importing to DB. Defaults to 'config'.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra",
|
||||
type=str,
|
||||
help="Any extra data that a reader might require.",
|
||||
)
|
||||
|
||||
# Parse args, validate invariants.
|
||||
args = parser.parse_args()
|
||||
|
||||
config = CoreConfig()
|
||||
config.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||
|
||||
log_fmt_str = "[%(asctime)s] Reader | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
logger = logging.getLogger("reader")
|
||||
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(config.server.logs, "reader"), when="d", backupCount=10)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
logger.addHandler(fileHandler)
|
||||
logger.addHandler(consoleHandler)
|
||||
|
||||
logger.setLevel(logging.INFO)
|
||||
coloredlogs.install(level=logging.INFO, logger=logger, fmt=log_fmt_str)
|
||||
|
||||
if args.series is None or args.version is None:
|
||||
logger.error("Game or version not specified")
|
||||
parser.print_help()
|
||||
exit(1)
|
||||
|
||||
if args.bin is None and args.opt is None:
|
||||
logger.error("Must specify either bin or opt directory")
|
||||
parser.print_help()
|
||||
exit(1)
|
||||
|
||||
if args.bin is not None and (args.bin.endswith("\\") or args.bin.endswith("/")):
|
||||
bin_arg = args.bin[:-1]
|
||||
else:
|
||||
bin_arg = args.bin
|
||||
|
||||
if args.opt is not None and (args.opt.endswith("\\") or args.opt.endswith("/")):
|
||||
opt_arg = args.opt[:-1]
|
||||
else:
|
||||
opt_arg = args.opt
|
||||
|
||||
logger.info("Starting importer...")
|
||||
|
||||
titles = Utils.get_all_titles()
|
||||
|
||||
for dir, mod in titles.items():
|
||||
if args.series in mod.game_codes:
|
||||
handler = mod.reader(config, args.version, bin_arg, opt_arg, args.extra)
|
||||
handler.read()
|
||||
|
||||
logger.info("Done")
|
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
Binary file not shown.
18
titles/chuni/__init__.py
Normal file
18
titles/chuni/__init__.py
Normal file
@ -0,0 +1,18 @@
|
||||
from titles.chuni.index import ChuniServlet
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.database import ChuniData
|
||||
from titles.chuni.read import ChuniReader
|
||||
|
||||
index = ChuniServlet
|
||||
database = ChuniData
|
||||
reader = ChuniReader
|
||||
|
||||
use_default_title = True
|
||||
include_protocol = True
|
||||
title_secure = False
|
||||
game_codes = [ChuniConstants.GAME_CODE, ChuniConstants.GAME_CODE_NEW]
|
||||
trailing_slash = True
|
||||
use_default_host = False
|
||||
host = ""
|
||||
|
||||
current_schema_version = 1
|
16
titles/chuni/air.py
Normal file
16
titles/chuni/air.py
Normal file
@ -0,0 +1,16 @@
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniAir(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AIR
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.10.00"
|
||||
return ret
|
16
titles/chuni/airplus.py
Normal file
16
titles/chuni/airplus.py
Normal file
@ -0,0 +1,16 @@
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniAirPlus(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AIR_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.15.00"
|
||||
return ret
|
18
titles/chuni/amazon.py
Normal file
18
titles/chuni/amazon.py
Normal file
@ -0,0 +1,18 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import pytz
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniAmazon(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AMAZON
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.30.00"
|
||||
return ret
|
18
titles/chuni/amazonplus.py
Normal file
18
titles/chuni/amazonplus.py
Normal file
@ -0,0 +1,18 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import pytz
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniAmazonPlus(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AMAZON_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.35.00"
|
||||
return ret
|
572
titles/chuni/base.py
Normal file
572
titles/chuni/base.py
Normal file
@ -0,0 +1,572 @@
|
||||
import logging
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from time import strftime
|
||||
|
||||
import pytz
|
||||
from typing import Dict, Any
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.database import ChuniData
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniBase():
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
self.game_cfg = game_cfg
|
||||
self.data = ChuniData(core_cfg)
|
||||
self.date_time_format = "%Y-%m-%d %H:%M:%S"
|
||||
self.logger = logging.getLogger("chuni")
|
||||
self.game = ChuniConstants.GAME_CODE
|
||||
self.version = ChuniConstants.VER_CHUNITHM
|
||||
|
||||
def handle_game_login_api_request(self, data: Dict) -> Dict:
|
||||
#self.data.base.log_event("chuni", "login", logging.INFO, {"version": self.version, "user": data["userId"]})
|
||||
return { "returnCode": 1 }
|
||||
|
||||
def handle_game_logout_api_request(self, data: Dict) -> Dict:
|
||||
#self.data.base.log_event("chuni", "logout", logging.INFO, {"version": self.version, "user": data["userId"]})
|
||||
return { "returnCode": 1 }
|
||||
|
||||
def handle_get_game_charge_api_request(self, data: Dict) -> Dict:
|
||||
game_charge_list = self.data.static.get_enabled_charges(self.version)
|
||||
|
||||
charges = []
|
||||
for x in range(len(game_charge_list)):
|
||||
charges.append({
|
||||
"orderId": x,
|
||||
"chargeId": game_charge_list[x]["chargeId"],
|
||||
"price": 1,
|
||||
"startDate": "2017-12-05 07:00:00.0",
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
"salePrice": 1,
|
||||
"saleStartDate": "2017-12-05 07:00:00.0",
|
||||
"saleEndDate": "2099-12-31 00:00:00.0"
|
||||
})
|
||||
return {
|
||||
"length": len(charges),
|
||||
"gameChargeList": charges
|
||||
}
|
||||
|
||||
def handle_get_game_event_api_request(self, data: Dict) -> Dict:
|
||||
game_events = self.data.static.get_enabled_events(self.version)
|
||||
|
||||
event_list = []
|
||||
for evt_row in game_events:
|
||||
tmp = {}
|
||||
tmp["id"] = evt_row["eventId"]
|
||||
tmp["type"] = evt_row["type"]
|
||||
tmp["startDate"] = "2017-12-05 07:00:00.0"
|
||||
tmp["endDate"] = "2099-12-31 00:00:00.0"
|
||||
event_list.append(tmp)
|
||||
|
||||
return {
|
||||
"type": data["type"],
|
||||
"length": len(event_list),
|
||||
"gameEventList": event_list
|
||||
}
|
||||
|
||||
def handle_get_game_idlist_api_request(self, data: Dict) -> Dict:
|
||||
return { "type": data["type"], "length": 0, "gameIdlistList": [] }
|
||||
|
||||
def handle_get_game_message_api_request(self, data: Dict) -> Dict:
|
||||
return { "type": data["type"], "length": "0", "gameMessageList": [] }
|
||||
|
||||
def handle_get_game_ranking_api_request(self, data: Dict) -> Dict:
|
||||
return { "type": data["type"], "gameRankingList": [] }
|
||||
|
||||
def handle_get_game_sale_api_request(self, data: Dict) -> Dict:
|
||||
return { "type": data["type"], "length": 0, "gameSaleList": [] }
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
reboot_start = datetime.strftime(datetime.now() - timedelta(hours=4), self.date_time_format)
|
||||
reboot_end = datetime.strftime(datetime.now() - timedelta(hours=3), self.date_time_format)
|
||||
return {
|
||||
"gameSetting": {
|
||||
"dataVersion": "1.00.00",
|
||||
"isMaintenance": "false",
|
||||
"requestInterval": 10,
|
||||
"rebootStartTime": reboot_start,
|
||||
"rebootEndTime": reboot_end,
|
||||
"isBackgroundDistribute": "false",
|
||||
"maxCountCharacter": 300,
|
||||
"maxCountItem": 300,
|
||||
"maxCountMusic": 300,
|
||||
},
|
||||
"isDumpUpload": "false",
|
||||
"isAou": "false",
|
||||
}
|
||||
|
||||
def handle_get_user_activity_api_request(self, data: Dict) -> Dict:
|
||||
user_activity_list = self.data.profile.get_profile_activity(data["userId"], data["kind"])
|
||||
|
||||
activity_list = []
|
||||
|
||||
for activity in user_activity_list:
|
||||
tmp = activity._asdict()
|
||||
tmp.pop("user")
|
||||
tmp["id"] = tmp["activityId"]
|
||||
tmp.pop("activityId")
|
||||
activity_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(activity_list),
|
||||
"kind": data["kind"],
|
||||
"userActivityList": activity_list
|
||||
}
|
||||
|
||||
def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
characters = self.data.item.get_characters(data["userId"])
|
||||
if characters is None: return {}
|
||||
next_idx = -1
|
||||
|
||||
characterList = []
|
||||
for x in range(int(data["nextIndex"]), len(characters)):
|
||||
tmp = characters[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
characterList.append(tmp)
|
||||
|
||||
if len(characterList) >= int(data["maxCount"]):
|
||||
break
|
||||
|
||||
if len(characterList) >= int(data["maxCount"]) and len(characters) > int(data["maxCount"]) + int(data["nextIndex"]):
|
||||
next_idx = int(data["maxCount"]) + int(data["nextIndex"]) + 1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(characterList),
|
||||
"nextIndex": next_idx,
|
||||
"userCharacterList": characterList
|
||||
}
|
||||
|
||||
def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
|
||||
user_charge_list = self.data.profile.get_profile_charge(data["userId"])
|
||||
|
||||
charge_list = []
|
||||
for charge in user_charge_list:
|
||||
tmp = charge._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
charge_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(charge_list),
|
||||
"userChargeList": charge_list
|
||||
}
|
||||
|
||||
def handle_get_user_course_api_request(self, data: Dict) -> Dict:
|
||||
user_course_list = self.data.score.get_courses(data["userId"])
|
||||
if user_course_list is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userCourseList": []
|
||||
}
|
||||
|
||||
course_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(user_course_list)):
|
||||
tmp = user_course_list[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
course_list.append(tmp)
|
||||
|
||||
if len(user_course_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(user_course_list) >= max_ct:
|
||||
next_idx = next_idx + max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(course_list),
|
||||
"nextIndex": next_idx,
|
||||
"userCourseList": course_list
|
||||
}
|
||||
|
||||
def handle_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
if p is None: return {}
|
||||
|
||||
profile = p._asdict()
|
||||
profile.pop("id")
|
||||
profile.pop("user")
|
||||
profile.pop("version")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userData": profile
|
||||
}
|
||||
|
||||
def handle_get_user_data_ex_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_data_ex(data["userId"], self.version)
|
||||
if p is None: return {}
|
||||
|
||||
profile = p._asdict()
|
||||
profile.pop("id")
|
||||
profile.pop("user")
|
||||
profile.pop("version")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userDataEx": profile
|
||||
}
|
||||
|
||||
def handle_get_user_duel_api_request(self, data: Dict) -> Dict:
|
||||
user_duel_list = self.data.item.get_duels(data["userId"])
|
||||
if user_duel_list is None: return {}
|
||||
|
||||
duel_list = []
|
||||
for duel in user_duel_list:
|
||||
tmp = duel._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
duel_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(duel_list),
|
||||
"userDuelList": duel_list
|
||||
}
|
||||
|
||||
def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"kind": data["kind"],
|
||||
"nextIndex": -1,
|
||||
"userFavoriteItemList": []
|
||||
}
|
||||
|
||||
def handle_get_user_favorite_music_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
This is handled via the webui, which we don't have right now
|
||||
"""
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"userFavoriteMusicList": []
|
||||
}
|
||||
|
||||
def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
kind = int(int(data["nextIndex"]) / 10000000000)
|
||||
next_idx = int(int(data["nextIndex"]) % 10000000000)
|
||||
user_item_list = self.data.item.get_items(data["userId"], kind)
|
||||
|
||||
if user_item_list is None or len(user_item_list) == 0:
|
||||
return {"userId": data["userId"], "nextIndex": -1, "itemKind": kind, "userItemList": []}
|
||||
|
||||
items: list[Dict[str, Any]] = []
|
||||
for i in range(next_idx, len(user_item_list)):
|
||||
tmp = user_item_list[i]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
items.append(tmp)
|
||||
if len(items) >= int(data["maxCount"]):
|
||||
break
|
||||
|
||||
xout = kind * 10000000000 + next_idx + len(items)
|
||||
|
||||
if len(items) < int(data["maxCount"]): nextIndex = 0
|
||||
else: nextIndex = xout
|
||||
|
||||
return {"userId": data["userId"], "nextIndex": nextIndex, "itemKind": kind, "length": len(items), "userItemList": items}
|
||||
|
||||
def handle_get_user_login_bonus_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
Unsure how to get this to trigger...
|
||||
"""
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 2,
|
||||
"userLoginBonusList": [
|
||||
{
|
||||
"presetId": '10',
|
||||
"bonusCount": '0',
|
||||
"lastUpdateDate": "1970-01-01 09:00:00",
|
||||
"isWatched": "true"
|
||||
},
|
||||
{
|
||||
"presetId": '20',
|
||||
"bonusCount": '0',
|
||||
"lastUpdateDate": "1970-01-01 09:00:00",
|
||||
"isWatched": "true"
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
def handle_get_user_map_api_request(self, data: Dict) -> Dict:
|
||||
user_map_list = self.data.item.get_maps(data["userId"])
|
||||
if user_map_list is None: return {}
|
||||
|
||||
map_list = []
|
||||
for map in user_map_list:
|
||||
tmp = map._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
map_list.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(map_list),
|
||||
"userMapList": map_list
|
||||
}
|
||||
|
||||
def handle_get_user_music_api_request(self, data: Dict) -> Dict:
|
||||
music_detail = self.data.score.get_scores(data["userId"])
|
||||
if music_detail is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userMusicList": [] #240
|
||||
}
|
||||
song_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(music_detail)):
|
||||
found = False
|
||||
tmp = music_detail[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
|
||||
for song in song_list:
|
||||
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
|
||||
found = True
|
||||
song["userMusicDetailList"].append(tmp)
|
||||
song["length"] = len(song["userMusicDetailList"])
|
||||
|
||||
if not found:
|
||||
song_list.append({
|
||||
"length": 1,
|
||||
"userMusicDetailList": [tmp]
|
||||
})
|
||||
|
||||
if len(song_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(song_list) >= max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = 0
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(song_list),
|
||||
"nextIndex": next_idx,
|
||||
"userMusicList": song_list #240
|
||||
}
|
||||
|
||||
def handle_get_user_option_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_option(data["userId"])
|
||||
|
||||
option = p._asdict()
|
||||
option.pop("id")
|
||||
option.pop("user")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userGameOption": option
|
||||
}
|
||||
|
||||
def handle_get_user_option_ex_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_option_ex(data["userId"])
|
||||
|
||||
option = p._asdict()
|
||||
option.pop("id")
|
||||
option.pop("user")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userGameOptionEx": option
|
||||
}
|
||||
|
||||
def read_wtf8(self, src):
|
||||
return bytes([ord(c) for c in src]).decode("utf-8")
|
||||
|
||||
def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_preview(data["userId"], self.version)
|
||||
if profile is None: return None
|
||||
profile_character = self.data.item.get_character(data["userId"], profile["characterId"])
|
||||
|
||||
if profile_character is None:
|
||||
chara = {}
|
||||
else:
|
||||
chara = profile_character._asdict()
|
||||
chara.pop("id")
|
||||
chara.pop("user")
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
# Current Login State
|
||||
"isLogin": False,
|
||||
"lastLoginDate": profile["lastPlayDate"],
|
||||
# User Profile
|
||||
"userName": profile["userName"],
|
||||
"reincarnationNum": profile["reincarnationNum"],
|
||||
"level": profile["level"],
|
||||
"exp": profile["exp"],
|
||||
"playerRating": profile["playerRating"],
|
||||
"lastGameId": profile["lastGameId"],
|
||||
"lastRomVersion": profile["lastRomVersion"],
|
||||
"lastDataVersion": profile["lastDataVersion"],
|
||||
"lastPlayDate": profile["lastPlayDate"],
|
||||
"trophyId": profile["trophyId"],
|
||||
"nameplateId": profile["nameplateId"],
|
||||
# Current Selected Character
|
||||
"userCharacter": chara,
|
||||
# User Game Options
|
||||
"playerLevel": profile["playerLevel"],
|
||||
"rating": profile["rating"],
|
||||
"headphone": profile["headphone"],
|
||||
"chargeState": "1",
|
||||
"userNameEx": profile["userName"],
|
||||
}
|
||||
|
||||
def handle_get_user_recent_rating_api_request(self, data: Dict) -> Dict:
|
||||
recet_rating_list = self.data.profile.get_profile_recent_rating(data["userId"])
|
||||
if recet_rating_list is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"userRecentRatingList": [],
|
||||
}
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(recet_rating_list["recentRating"]),
|
||||
"userRecentRatingList": recet_rating_list["recentRating"],
|
||||
}
|
||||
|
||||
def handle_get_user_region_api_request(self, data: Dict) -> Dict:
|
||||
# TODO: Region
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"userRegionList": [],
|
||||
}
|
||||
|
||||
def handle_get_user_team_api_request(self, data: Dict) -> Dict:
|
||||
# TODO: Team
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"teamId": 0
|
||||
}
|
||||
|
||||
def handle_get_team_course_setting_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"nextIndex": 0,
|
||||
"teamCourseSettingList": [],
|
||||
}
|
||||
|
||||
def handle_get_team_course_rule_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"nextIndex": 0,
|
||||
"teamCourseRuleList": [],
|
||||
}
|
||||
|
||||
def handle_upsert_user_all_api_request(self, data: Dict) -> Dict:
|
||||
upsert = data["upsertUserAll"]
|
||||
user_id = data["userId"]
|
||||
|
||||
if "userData" in upsert:
|
||||
try:
|
||||
upsert["userData"][0]["userName"] = self.read_wtf8(upsert["userData"][0]["userName"])
|
||||
except: pass
|
||||
|
||||
self.data.profile.put_profile_data(user_id, self.version, upsert["userData"][0])
|
||||
if "userDataEx" in upsert:
|
||||
self.data.profile.put_profile_data_ex(user_id, self.version, upsert["userDataEx"][0])
|
||||
if "userGameOption" in upsert:
|
||||
self.data.profile.put_profile_option(user_id, upsert["userGameOption"][0])
|
||||
if "userGameOptionEx" in upsert:
|
||||
self.data.profile.put_profile_option_ex(user_id, upsert["userGameOptionEx"][0])
|
||||
if "userRecentRatingList" in upsert:
|
||||
self.data.profile.put_profile_recent_rating(user_id, upsert["userRecentRatingList"])
|
||||
|
||||
if "userCharacterList" in upsert:
|
||||
for character in upsert["userCharacterList"]:
|
||||
self.data.item.put_character(user_id, character)
|
||||
|
||||
if "userMapList" in upsert:
|
||||
for map in upsert["userMapList"]:
|
||||
self.data.item.put_map(user_id, map)
|
||||
|
||||
if "userCourseList" in upsert:
|
||||
for course in upsert["userCourseList"]:
|
||||
self.data.score.put_course(user_id, course)
|
||||
|
||||
if "userDuelList" in upsert:
|
||||
for duel in upsert["userDuelList"]:
|
||||
self.data.item.put_duel(user_id, duel)
|
||||
|
||||
if "userItemList" in upsert:
|
||||
for item in upsert["userItemList"]:
|
||||
self.data.item.put_item(user_id, item)
|
||||
|
||||
if "userActivityList" in upsert:
|
||||
for activity in upsert["userActivityList"]:
|
||||
self.data.profile.put_profile_activity(user_id, activity)
|
||||
|
||||
if "userChargeList" in upsert:
|
||||
for charge in upsert["userChargeList"]:
|
||||
self.data.profile.put_profile_charge(user_id, charge)
|
||||
|
||||
if "userMusicDetailList" in upsert:
|
||||
for song in upsert["userMusicDetailList"]:
|
||||
self.data.score.put_score(user_id, song)
|
||||
|
||||
if "userPlaylogList" in upsert:
|
||||
for playlog in upsert["userPlaylogList"]:
|
||||
self.data.score.put_playlog(user_id, playlog)
|
||||
|
||||
if "userTeamPoint" in upsert:
|
||||
# TODO: team stuff
|
||||
pass
|
||||
|
||||
if "userMapAreaList" in upsert:
|
||||
for map_area in upsert["userMapAreaList"]:
|
||||
self.data.item.put_map_area(user_id, map_area)
|
||||
|
||||
if "userOverPowerList" in upsert:
|
||||
for overpower in upsert["userOverPowerList"]:
|
||||
self.data.profile.put_profile_overpower(user_id, overpower)
|
||||
|
||||
if "userEmoneyList" in upsert:
|
||||
for emoney in upsert["userEmoneyList"]:
|
||||
self.data.profile.put_profile_emoney(user_id, emoney)
|
||||
|
||||
return { "returnCode": "1" }
|
||||
|
||||
def handle_upsert_user_chargelog_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
|
||||
def handle_upsert_client_bookkeeping_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
|
||||
def handle_upsert_client_develop_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
|
||||
def handle_upsert_client_error_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
|
||||
def handle_upsert_client_setting_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
|
||||
def handle_upsert_client_testmode_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
36
titles/chuni/config.py
Normal file
36
titles/chuni/config.py
Normal file
@ -0,0 +1,36 @@
|
||||
from core.config import CoreConfig
|
||||
from typing import Dict
|
||||
|
||||
class ChuniServerConfig():
|
||||
def __init__(self, parent_config: "ChuniConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'chuni', 'server', 'enable', default=True)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'chuni', 'server', 'loglevel', default="info"))
|
||||
|
||||
class ChuniCryptoConfig():
|
||||
def __init__(self, parent_config: "ChuniConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def keys(self) -> Dict:
|
||||
"""
|
||||
in the form of:
|
||||
internal_version: [key, iv]
|
||||
all values are hex strings
|
||||
"""
|
||||
return CoreConfig.get_config_field(self.__config, 'chuni', 'crypto', 'keys', default={})
|
||||
|
||||
@property
|
||||
def encrypted_only(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'chuni', 'crypto', 'encrypted_only', default=False)
|
||||
|
||||
class ChuniConfig(dict):
|
||||
def __init__(self) -> None:
|
||||
self.server = ChuniServerConfig(self)
|
||||
self.crypto = ChuniCryptoConfig(self)
|
24
titles/chuni/const.py
Normal file
24
titles/chuni/const.py
Normal file
@ -0,0 +1,24 @@
|
||||
class ChuniConstants():
|
||||
GAME_CODE = "SDBT"
|
||||
GAME_CODE_NEW = "SDHD"
|
||||
|
||||
VER_CHUNITHM = 0
|
||||
VER_CHUNITHM_PLUS = 1
|
||||
VER_CHUNITHM_AIR = 2
|
||||
VER_CHUNITHM_AIR_PLUS = 3
|
||||
VER_CHUNITHM_STAR = 4
|
||||
VER_CHUNITHM_STAR_PLUS = 5
|
||||
VER_CHUNITHM_AMAZON = 6
|
||||
VER_CHUNITHM_AMAZON_PLUS = 7
|
||||
VER_CHUNITHM_CRYSTAL = 8
|
||||
VER_CHUNITHM_CRYSTAL_PLUS = 9
|
||||
VER_CHUNITHM_PARADISE = 10
|
||||
VER_CHUNITHM_NEW = 11
|
||||
VER_CHUNITHM_NEW_PLUS = 12
|
||||
|
||||
VERSION_NAMES = ["Chunithm", "Chunithm+", "Chunithm Air", "Chunithm Air+", "Chunithm Star", "Chunithm Star+", "Chunithm Amazon",
|
||||
"Chunithm Amazon+", "Chunithm Crystal", "Chunithm Crystal+", "Chunithm Paradise", "Chunithm New!!", "Chunithm New!!+"]
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
return cls.VERSION_NAMES[ver]
|
18
titles/chuni/crystal.py
Normal file
18
titles/chuni/crystal.py
Normal file
@ -0,0 +1,18 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import pytz
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniCrystal(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_CRYSTAL
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.40.00"
|
||||
return ret
|
18
titles/chuni/crystalplus.py
Normal file
18
titles/chuni/crystalplus.py
Normal file
@ -0,0 +1,18 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import pytz
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniCrystalPlus(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.45.00"
|
||||
return ret
|
12
titles/chuni/database.py
Normal file
12
titles/chuni/database.py
Normal file
@ -0,0 +1,12 @@
|
||||
from core.data import Data
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.schema import *
|
||||
|
||||
class ChuniData(Data):
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
super().__init__(cfg)
|
||||
|
||||
self.item = ChuniItemData(cfg, self.session)
|
||||
self.profile = ChuniProfileData(cfg, self.session)
|
||||
self.score = ChuniScoreData(cfg, self.session)
|
||||
self.static = ChuniStaticData(cfg, self.session)
|
172
titles/chuni/index.py
Normal file
172
titles/chuni/index.py
Normal file
@ -0,0 +1,172 @@
|
||||
from twisted.web.http import Request
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
import zlib
|
||||
import yaml
|
||||
import json
|
||||
import inflection
|
||||
import string
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util.Padding import pad
|
||||
|
||||
from core import CoreConfig
|
||||
from titles.chuni.config import ChuniConfig
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.plus import ChuniPlus
|
||||
from titles.chuni.air import ChuniAir
|
||||
from titles.chuni.airplus import ChuniAirPlus
|
||||
from titles.chuni.star import ChuniStar
|
||||
from titles.chuni.starplus import ChuniStarPlus
|
||||
from titles.chuni.amazon import ChuniAmazon
|
||||
from titles.chuni.amazonplus import ChuniAmazonPlus
|
||||
from titles.chuni.crystal import ChuniCrystal
|
||||
from titles.chuni.crystalplus import ChuniCrystalPlus
|
||||
from titles.chuni.paradise import ChuniParadise
|
||||
from titles.chuni.new import ChuniNew
|
||||
from titles.chuni.newplus import ChuniNewPlus
|
||||
|
||||
class ChuniServlet():
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
self.game_cfg = ChuniConfig()
|
||||
self.game_cfg.update(yaml.safe_load(open(f"{cfg_dir}/chuni.yaml")))
|
||||
|
||||
self.versions = [
|
||||
ChuniBase(core_cfg, self.game_cfg),
|
||||
ChuniPlus(core_cfg, self.game_cfg),
|
||||
ChuniAir(core_cfg, self.game_cfg),
|
||||
ChuniAirPlus(core_cfg, self.game_cfg),
|
||||
ChuniStar(core_cfg, self.game_cfg),
|
||||
ChuniStarPlus(core_cfg, self.game_cfg),
|
||||
ChuniAmazon(core_cfg, self.game_cfg),
|
||||
ChuniAmazonPlus(core_cfg, self.game_cfg),
|
||||
ChuniCrystal(core_cfg, self.game_cfg),
|
||||
ChuniCrystalPlus(core_cfg, self.game_cfg),
|
||||
ChuniParadise(core_cfg, self.game_cfg),
|
||||
ChuniNew(core_cfg, self.game_cfg),
|
||||
ChuniNewPlus(core_cfg, self.game_cfg),
|
||||
]
|
||||
|
||||
self.logger = logging.getLogger("chuni")
|
||||
|
||||
if not hasattr(self.logger, "inited"):
|
||||
log_fmt_str = "[%(asctime)s] Chunithm | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(self.core_cfg.server.log_dir, "chuni"), encoding='utf8',
|
||||
when="d", backupCount=10)
|
||||
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(self.game_cfg.server.loglevel)
|
||||
coloredlogs.install(level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
self.logger.inited = True
|
||||
|
||||
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
||||
req_raw = request.content.getvalue()
|
||||
url_split = url_path.split("/")
|
||||
encrtped = False
|
||||
internal_ver = 0
|
||||
endpoint = url_split[len(url_split) - 1]
|
||||
|
||||
if version < 105: # 1.0
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM
|
||||
elif version >= 105 and version < 110: # Plus
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_PLUS
|
||||
elif version >= 110 and version < 115: # Air
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_AIR
|
||||
elif version >= 115 and version < 120: # Air Plus
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_AIR_PLUS
|
||||
elif version >= 120 and version < 125: # Star
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_STAR
|
||||
elif version >= 125 and version < 130: # Star Plus
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_STAR_PLUS
|
||||
elif version >= 130 and version < 135: # Amazon
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_AMAZON
|
||||
elif version >= 135 and version < 140: # Amazon Plus
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_AMAZON_PLUS
|
||||
elif version >= 140 and version < 145: # Crystal
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_CRYSTAL
|
||||
elif version >= 145 and version < 150: # Crystal Plus
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS
|
||||
elif version >= 150 and version < 200: # Paradise
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_PARADISE
|
||||
elif version >= 200 and version < 205: # New
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_NEW
|
||||
elif version >= 205 and version < 210: # New Plus
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_NEW_PLUS
|
||||
|
||||
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
|
||||
# If we get a 32 character long hex string, it's a hash and we're
|
||||
# doing encrypted. The likelyhood of false positives is low but
|
||||
# technically not 0
|
||||
endpoint = request.getHeader("User-Agent").split("#")[0]
|
||||
try:
|
||||
crypt = AES.new(
|
||||
bytes.fromhex(self.game_cfg.crypto.keys[str(internal_ver)][0]),
|
||||
AES.MODE_CBC,
|
||||
bytes.fromhex(self.game_cfg.crypto.keys[str(internal_ver)][1])
|
||||
)
|
||||
|
||||
req_raw = crypt.decrypt(req_raw)
|
||||
|
||||
except:
|
||||
self.logger.error(f"Failed to decrypt v{version} request to {endpoint} -> {req_raw}")
|
||||
return zlib.compress("{\"stat\": \"0\"}".encode("utf-8"))
|
||||
|
||||
encrtped = True
|
||||
|
||||
if not encrtped and self.game_cfg.crypto.encrypted_only:
|
||||
self.logger.error(f"Unencrypted v{version} {endpoint} request, but config is set to encrypted only: {req_raw}")
|
||||
return zlib.compress("{\"stat\": \"0\"}".encode("utf-8"))
|
||||
|
||||
try:
|
||||
unzip = zlib.decompress(req_raw)
|
||||
|
||||
except zlib.error as e:
|
||||
self.logger.error(f"Failed to decompress v{version} {endpoint} request -> {e}")
|
||||
return b""
|
||||
|
||||
req_data = json.loads(unzip)
|
||||
|
||||
self.logger.info(f"v{version} {endpoint} request - {req_data}")
|
||||
|
||||
func_to_find = "handle_" + inflection.underscore(endpoint) + "_request"
|
||||
|
||||
try:
|
||||
handler = getattr(self.versions[internal_ver], func_to_find)
|
||||
resp = handler(req_data)
|
||||
|
||||
except AttributeError as e:
|
||||
self.logger.warning(f"Unhandled v{version} request {endpoint} - {e}")
|
||||
return zlib.compress("{\"stat\": \"0\"}".encode("utf-8"))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling v{version} method {endpoint} - {e}")
|
||||
return zlib.compress("{\"stat\": \"0\"}".encode("utf-8"))
|
||||
|
||||
if resp == None:
|
||||
resp = {'returnCode': 1}
|
||||
|
||||
self.logger.info(f"Response {resp}")
|
||||
|
||||
zipped = zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|
||||
|
||||
if not encrtped:
|
||||
return zipped
|
||||
|
||||
padded = pad(zipped, 16)
|
||||
|
||||
crypt = AES.new(
|
||||
bytes.fromhex(self.game_cfg.crypto.keys[str(internal_ver)][0]),
|
||||
AES.MODE_CBC,
|
||||
bytes.fromhex(self.game_cfg.crypto.keys[str(internal_ver)][1])
|
||||
)
|
||||
|
||||
return crypt.encrypt(padded)
|
128
titles/chuni/new.py
Normal file
128
titles/chuni/new.py
Normal file
@ -0,0 +1,128 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.database import ChuniData
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniNew(ChuniBase):
|
||||
|
||||
ITEM_TYPE = {
|
||||
"character": 20,
|
||||
"story": 21,
|
||||
"card": 22
|
||||
}
|
||||
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
self.game_cfg = game_cfg
|
||||
self.data = ChuniData(core_cfg)
|
||||
self.date_time_format = "%Y-%m-%d %H:%M:%S"
|
||||
self.logger = logging.getLogger("chuni")
|
||||
self.game = ChuniConstants.GAME_CODE
|
||||
self.version = ChuniConstants.VER_CHUNITHM_NEW
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
match_start = datetime.strftime(datetime.now() - timedelta(hours=10), self.date_time_format)
|
||||
match_end = datetime.strftime(datetime.now() + timedelta(hours=10), self.date_time_format)
|
||||
reboot_start = datetime.strftime(datetime.now() - timedelta(hours=11), self.date_time_format)
|
||||
reboot_end = datetime.strftime(datetime.now() - timedelta(hours=10), self.date_time_format)
|
||||
return {
|
||||
"gameSetting": {
|
||||
"isMaintenance": "false",
|
||||
"requestInterval": 10,
|
||||
"rebootStartTime": reboot_start,
|
||||
"rebootEndTime": reboot_end,
|
||||
"isBackgroundDistribute": "false",
|
||||
"maxCountCharacter": 300,
|
||||
"maxCountItem": 300,
|
||||
"maxCountMusic": 300,
|
||||
"matchStartTime": match_start,
|
||||
"matchEndTime": match_end,
|
||||
"matchTimeLimit": 99,
|
||||
"matchErrorLimit": 9999,
|
||||
"romVersion": "2.00.00",
|
||||
"dataVersion": "2.00.00",
|
||||
"matchingUri": f"http://{self.core_cfg.server.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"matchingUriX": f"http://{self.core_cfg.server.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"udpHolePunchUri": f"http://{self.core_cfg.server.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
"reflectorUri": f"http://{self.core_cfg.server.hostname}:{self.core_cfg.title.port}/SDHD/200/ChuniServlet/",
|
||||
},
|
||||
"isDumpUpload": "false",
|
||||
"isAou": "false",
|
||||
}
|
||||
|
||||
def handle_delete_token_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
|
||||
def handle_create_token_api_request(self, data: Dict) -> Dict:
|
||||
return { "returnCode": "1" }
|
||||
|
||||
def handle_get_user_map_area_api_request(self, data: Dict) -> Dict:
|
||||
user_map_areas = self.data.item.get_map_areas(data["userId"])
|
||||
|
||||
map_areas = []
|
||||
for map_area in user_map_areas:
|
||||
tmp = map_area._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
map_areas.append(tmp)
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userMapAreaList": map_areas
|
||||
}
|
||||
|
||||
def handle_get_user_symbol_chat_setting_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"symbolCharInfoList": []
|
||||
}
|
||||
|
||||
def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_preview(data["userId"], self.version)
|
||||
if profile is None: return None
|
||||
profile_character = self.data.item.get_character(data["userId"], profile["characterId"])
|
||||
|
||||
if profile_character is None:
|
||||
chara = {}
|
||||
else:
|
||||
chara = profile_character._asdict()
|
||||
chara.pop("id")
|
||||
chara.pop("user")
|
||||
|
||||
data1 = {
|
||||
"userId": data["userId"],
|
||||
# Current Login State
|
||||
"isLogin": False,
|
||||
"lastLoginDate": profile["lastPlayDate"],
|
||||
# User Profile
|
||||
"userName": profile["userName"],
|
||||
"reincarnationNum": profile["reincarnationNum"],
|
||||
"level": profile["level"],
|
||||
"exp": profile["exp"],
|
||||
"playerRating": profile["playerRating"],
|
||||
"lastGameId": profile["lastGameId"],
|
||||
"lastRomVersion": profile["lastRomVersion"],
|
||||
"lastDataVersion": profile["lastDataVersion"],
|
||||
"lastPlayDate": profile["lastPlayDate"],
|
||||
"emoneyBrandId": 0,
|
||||
"trophyId": profile["trophyId"],
|
||||
# Current Selected Character
|
||||
"userCharacter": chara,
|
||||
# User Game Options
|
||||
"playerLevel": profile["playerLevel"],
|
||||
"rating": profile["rating"],
|
||||
"headphone": profile["headphone"],
|
||||
"chargeState": 0,
|
||||
"userNameEx": "0",
|
||||
"banState": 0,
|
||||
"classEmblemMedal": profile["classEmblemMedal"],
|
||||
"classEmblemBase": profile["classEmblemBase"],
|
||||
"battleRankId": profile["battleRankId"],
|
||||
}
|
||||
return data1
|
23
titles/chuni/newplus.py
Normal file
23
titles/chuni/newplus.py
Normal file
@ -0,0 +1,23 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import pytz
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.new import ChuniNew
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniNewPlus(ChuniNew):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_NEW_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["romVersion"] = "2.05.00"
|
||||
ret["gameSetting"]["dataVersion"] = "2.05.00"
|
||||
ret["gameSetting"]["matchingUri"] = f"http://{self.core_cfg.server.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||
ret["gameSetting"]["matchingUriX"] = f"http://{self.core_cfg.server.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||
ret["gameSetting"]["udpHolePunchUri"] = f"http://{self.core_cfg.server.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||
ret["gameSetting"]["reflectorUri"] = f"http://{self.core_cfg.server.hostname}:{self.core_cfg.title.port}/SDHD/205/ChuniServlet/"
|
||||
return ret
|
18
titles/chuni/paradise.py
Normal file
18
titles/chuni/paradise.py
Normal file
@ -0,0 +1,18 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
import pytz
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniParadise(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_PARADISE
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.50.00"
|
||||
return ret
|
16
titles/chuni/plus.py
Normal file
16
titles/chuni/plus.py
Normal file
@ -0,0 +1,16 @@
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniPlus(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.05.00"
|
||||
return ret
|
157
titles/chuni/read.py
Normal file
157
titles/chuni/read.py
Normal file
@ -0,0 +1,157 @@
|
||||
from typing import Optional
|
||||
from os import walk, path
|
||||
import xml.etree.ElementTree as ET
|
||||
from read import BaseReader
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.database import ChuniData
|
||||
from titles.chuni.const import ChuniConstants
|
||||
|
||||
class ChuniReader(BaseReader):
|
||||
def __init__(self, config: CoreConfig, version: int, bin_dir: Optional[str], opt_dir: Optional[str], extra: Optional[str]) -> None:
|
||||
super().__init__(config, version, bin_dir, opt_dir, extra)
|
||||
self.data = ChuniData(config)
|
||||
|
||||
try:
|
||||
self.logger.info(f"Start importer for {ChuniConstants.game_ver_to_string(version)}")
|
||||
except IndexError:
|
||||
self.logger.error(f"Invalid chunithm version {version}")
|
||||
exit(1)
|
||||
|
||||
def read(self) -> None:
|
||||
data_dirs = []
|
||||
if self.bin_dir is not None:
|
||||
data_dirs += self.get_data_directories(self.bin_dir)
|
||||
|
||||
if self.opt_dir is not None:
|
||||
data_dirs += self.get_data_directories(self.opt_dir)
|
||||
|
||||
for dir in data_dirs:
|
||||
self.logger.info(f"Read from {dir}")
|
||||
self.read_events(f"{dir}/event")
|
||||
self.read_music(f"{dir}/music")
|
||||
self.read_charges(f"{dir}/chargeItem")
|
||||
self.read_avatar(f"{dir}/avatarAccessory")
|
||||
|
||||
def read_events(self, evt_dir: str) -> None:
|
||||
for root, dirs, files in walk(evt_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Event.xml"):
|
||||
with open(f"{root}/{dir}/Event.xml", 'rb') as fp:
|
||||
bytedata = fp.read()
|
||||
strdata = bytedata.decode('UTF-8')
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall('name'):
|
||||
id = name.find('id').text
|
||||
name = name.find('str').text
|
||||
for substances in xml_root.findall('substances'):
|
||||
event_type = substances.find('type').text
|
||||
|
||||
result = self.data.static.put_event(self.version, id, event_type, name)
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted event {id}")
|
||||
else:
|
||||
self.logger.warn(f"Failed to insert event {id}")
|
||||
|
||||
def read_music(self, music_dir: str) -> None:
|
||||
for root, dirs, files in walk(music_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Music.xml"):
|
||||
with open(f"{root}/{dir}/Music.xml", 'rb') as fp:
|
||||
bytedata = fp.read()
|
||||
strdata = bytedata.decode('UTF-8')
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall('name'):
|
||||
song_id = name.find('id').text
|
||||
title = name.find('str').text
|
||||
|
||||
for artistName in xml_root.findall('artistName'):
|
||||
artist = artistName.find('str').text
|
||||
|
||||
for genreNames in xml_root.findall('genreNames'):
|
||||
for list_ in genreNames.findall('list'):
|
||||
for StringID in list_.findall('StringID'):
|
||||
genre = StringID.find('str').text
|
||||
|
||||
for jaketFile in xml_root.findall('jaketFile'): #nice typo, SEGA
|
||||
jacket_path = jaketFile.find('path').text
|
||||
|
||||
for fumens in xml_root.findall('fumens'):
|
||||
for MusicFumenData in fumens.findall('MusicFumenData'):
|
||||
fumen_path = MusicFumenData.find('file').find("path")
|
||||
|
||||
if fumen_path.text is not None:
|
||||
chart_id = MusicFumenData.find('type').find('id').text
|
||||
if chart_id == "4":
|
||||
level = float(xml_root.find("starDifType").text)
|
||||
we_chara = xml_root.find("worldsEndTagName").find("str").text
|
||||
else:
|
||||
level = float(f"{MusicFumenData.find('level').text}.{MusicFumenData.find('levelDecimal').text}")
|
||||
we_chara = None
|
||||
|
||||
result = self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
chart_id,
|
||||
title,
|
||||
artist,
|
||||
level,
|
||||
genre,
|
||||
jacket_path,
|
||||
we_chara
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted music {song_id} chart {chart_id}")
|
||||
else:
|
||||
self.logger.warn(f"Failed to insert music {song_id} chart {chart_id}")
|
||||
|
||||
def read_charges(self, charge_dir: str) -> None:
|
||||
for root, dirs, files in walk(charge_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/ChargeItem.xml"):
|
||||
with open(f"{root}/{dir}/ChargeItem.xml", 'rb') as fp:
|
||||
bytedata = fp.read()
|
||||
strdata = bytedata.decode('UTF-8')
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall('name'):
|
||||
id = name.find('id').text
|
||||
name = name.find('str').text
|
||||
expirationDays = xml_root.find('expirationDays').text
|
||||
consumeType = xml_root.find('consumeType').text
|
||||
sellingAppeal = bool(xml_root.find('sellingAppeal').text)
|
||||
|
||||
result = self.data.static.put_charge(self.version, id, name, expirationDays, consumeType, sellingAppeal)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted charge {id}")
|
||||
else:
|
||||
self.logger.warn(f"Failed to insert charge {id}")
|
||||
|
||||
def read_avatar(self, avatar_dir: str) -> None:
|
||||
for root, dirs, files in walk(avatar_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/AvatarAccessory.xml"):
|
||||
with open(f"{root}/{dir}/AvatarAccessory.xml", 'rb') as fp:
|
||||
bytedata = fp.read()
|
||||
strdata = bytedata.decode('UTF-8')
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall('name'):
|
||||
id = name.find('id').text
|
||||
name = name.find('str').text
|
||||
category = xml_root.find('category').text
|
||||
for image in xml_root.findall('image'):
|
||||
iconPath = image.find('path').text
|
||||
for texture in xml_root.findall('texture'):
|
||||
texturePath = texture.find('path').text
|
||||
|
||||
result = self.data.static.put_avatar(self.version, id, name, category, iconPath, texturePath)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted avatarAccessory {id}")
|
||||
else:
|
||||
self.logger.warn(f"Failed to insert avatarAccessory {id}")
|
6
titles/chuni/schema/__init__.py
Normal file
6
titles/chuni/schema/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from titles.chuni.schema.profile import ChuniProfileData
|
||||
from titles.chuni.schema.score import ChuniScoreData
|
||||
from titles.chuni.schema.item import ChuniItemData
|
||||
from titles.chuni.schema.static import ChuniStaticData
|
||||
|
||||
__all__ = ["ChuniProfileData", "ChuniScoreData", "ChuniItemData", "ChuniStaticData"]
|
207
titles/chuni/schema/item.py
Normal file
207
titles/chuni/schema/item.py
Normal file
@ -0,0 +1,207 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.engine import Row
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
character = Table(
|
||||
"chuni_item_character",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("characterId", Integer),
|
||||
Column("level", Integer),
|
||||
Column("param1", Integer),
|
||||
Column("param2", Integer),
|
||||
Column("isValid", Boolean),
|
||||
Column("skillId", Integer),
|
||||
Column("isNewMark", Boolean),
|
||||
Column("playCount", Integer),
|
||||
Column("friendshipExp", Integer),
|
||||
Column("assignIllust", Integer),
|
||||
Column("exMaxLv", Integer),
|
||||
UniqueConstraint("user", "characterId", name="chuni_item_character_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
item = Table(
|
||||
"chuni_item_item",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("itemId", Integer),
|
||||
Column("itemKind", Integer),
|
||||
Column("stock", Integer),
|
||||
Column("isValid", Boolean),
|
||||
UniqueConstraint("user", "itemId", "itemKind", name="chuni_item_item_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
duel = Table(
|
||||
"chuni_item_duel",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("duelId", Integer),
|
||||
Column("progress", Integer),
|
||||
Column("point", Integer),
|
||||
Column("isClear", Boolean),
|
||||
Column("lastPlayDate", String(25)),
|
||||
Column("param1", Integer),
|
||||
Column("param2", Integer),
|
||||
Column("param3", Integer),
|
||||
Column("param4", Integer),
|
||||
UniqueConstraint("user", "duelId", name="chuni_item_duel_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
map = Table(
|
||||
"chuni_item_map",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("mapId", Integer),
|
||||
Column("position", Integer),
|
||||
Column("isClear", Boolean),
|
||||
Column("areaId", Integer),
|
||||
Column("routeNumber", Integer),
|
||||
Column("eventId", Integer),
|
||||
Column("rate", Integer),
|
||||
Column("statusCount", Integer),
|
||||
Column("isValid", Boolean),
|
||||
UniqueConstraint("user", "mapId", name="chuni_item_map_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
map_area = Table(
|
||||
"chuni_item_map_area",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("mapAreaId", Integer),
|
||||
Column("rate", Integer),
|
||||
Column("isClear", Boolean),
|
||||
Column("isLocked", Boolean),
|
||||
Column("position", Integer),
|
||||
Column("statusCount", Integer),
|
||||
Column("remainGridCount", Integer),
|
||||
UniqueConstraint("user", "mapAreaId", name="chuni_item_map_area_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
class ChuniItemData(BaseData):
|
||||
def put_character(self, user_id: int, character_data: Dict) -> Optional[int]:
|
||||
character_data["user"] = user_id
|
||||
|
||||
character_data = self.fix_bools(character_data)
|
||||
|
||||
sql = insert(character).values(**character_data)
|
||||
conflict = sql.on_duplicate_key_update(**character_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_character(self, user_id: int, character_id: int) -> Optional[Dict]:
|
||||
sql = select(character).where(and_(
|
||||
character.c.user == user_id,
|
||||
character.c.characterId == character_id
|
||||
))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_characters(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(character).where(character.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_item(self, user_id: int, item_data: Dict) -> Optional[int]:
|
||||
item_data["user"] = user_id
|
||||
|
||||
item_data = self.fix_bools(item_data)
|
||||
|
||||
sql = insert(item).values(**item_data)
|
||||
conflict = sql.on_duplicate_key_update(**item_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_items(self, user_id: int, kind: int = None) -> Optional[List[Row]]:
|
||||
if kind is None:
|
||||
sql = select(item).where(item.c.user == user_id)
|
||||
else:
|
||||
sql = select(item).where(and_(
|
||||
item.c.user == user_id,
|
||||
item.c.itemKind == kind
|
||||
))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_duel(self, user_id: int, duel_data: Dict) -> Optional[int]:
|
||||
duel_data["user"] = user_id
|
||||
|
||||
duel_data = self.fix_bools(duel_data)
|
||||
|
||||
sql = insert(duel).values(**duel_data)
|
||||
conflict = sql.on_duplicate_key_update(**duel_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_duels(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(duel).where(duel.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_map(self, user_id: int, map_data: Dict) -> Optional[int]:
|
||||
map_data["user"] = user_id
|
||||
|
||||
map_data = self.fix_bools(map_data)
|
||||
|
||||
sql = insert(map).values(**map_data)
|
||||
conflict = sql.on_duplicate_key_update(**map_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_maps(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(map).where(map.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_map_area(self, user_id: int, map_area_data: Dict) -> Optional[int]:
|
||||
map_area_data["user"] = user_id
|
||||
|
||||
map_area_data = self.fix_bools(map_area_data)
|
||||
|
||||
sql = insert(map_area).values(**map_area_data)
|
||||
conflict = sql.on_duplicate_key_update(**map_area_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_map_areas(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(map_area).where(map_area.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
551
titles/chuni/schema/profile.py
Normal file
551
titles/chuni/schema/profile.py
Normal file
@ -0,0 +1,551 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
profile = Table(
|
||||
"chuni_profile_data",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("exp", Integer),
|
||||
Column("level", Integer),
|
||||
Column("point", Integer),
|
||||
Column("frameId", Integer),
|
||||
Column("isMaimai", Boolean),
|
||||
Column("trophyId", Integer),
|
||||
Column("userName", String(25)),
|
||||
Column("isWebJoin", Boolean),
|
||||
Column("playCount", Integer),
|
||||
Column("lastGameId", String(25)),
|
||||
Column("totalPoint", BigInteger),
|
||||
Column("characterId", Integer),
|
||||
Column("firstGameId", String(25)),
|
||||
Column("friendCount", Integer),
|
||||
Column("lastPlaceId", Integer),
|
||||
Column("nameplateId", Integer),
|
||||
Column("totalMapNum", Integer),
|
||||
Column("lastAllNetId", Integer),
|
||||
Column("lastClientId", String(25)),
|
||||
Column("lastPlayDate", String(25)),
|
||||
Column("lastRegionId", Integer),
|
||||
Column("playerRating", Integer),
|
||||
Column("totalHiScore", Integer),
|
||||
Column("webLimitDate", String(25)),
|
||||
Column("firstPlayDate", String(25)),
|
||||
Column("highestRating", Integer),
|
||||
Column("lastPlaceName", String(25)),
|
||||
Column("multiWinCount", Integer),
|
||||
Column("acceptResCount", Integer),
|
||||
Column("lastRegionName", String(25)),
|
||||
Column("lastRomVersion", String(25)),
|
||||
Column("multiPlayCount", Integer),
|
||||
Column("firstRomVersion", String(25)),
|
||||
Column("lastDataVersion", String(25)),
|
||||
Column("requestResCount", Integer),
|
||||
Column("successResCount", Integer),
|
||||
Column("eventWatchedDate", String(25)),
|
||||
Column("firstDataVersion", String(25)),
|
||||
Column("reincarnationNum", Integer),
|
||||
Column("playedTutorialBit", Integer),
|
||||
Column("totalBasicHighScore", Integer),
|
||||
Column("totalExpertHighScore", Integer),
|
||||
Column("totalMasterHighScore", Integer),
|
||||
Column("totalRepertoireCount", Integer),
|
||||
Column("firstTutorialCancelNum", Integer),
|
||||
Column("totalAdvancedHighScore", Integer),
|
||||
Column("masterTutorialCancelNum", Integer),
|
||||
Column("ext1", Integer), # Added in chunew
|
||||
Column("ext2", Integer),
|
||||
Column("ext3", Integer),
|
||||
Column("ext4", Integer),
|
||||
Column("ext5", Integer),
|
||||
Column("ext6", Integer),
|
||||
Column("ext7", Integer),
|
||||
Column("ext8", Integer),
|
||||
Column("ext9", Integer),
|
||||
Column("ext10", Integer),
|
||||
Column("extStr1", String(255)),
|
||||
Column("extStr2", String(255)),
|
||||
Column("extLong1", Integer),
|
||||
Column("extLong2", Integer),
|
||||
Column("mapIconId", Integer),
|
||||
Column("compatibleCmVersion", String(25)),
|
||||
Column("medal", Integer),
|
||||
Column("voiceId", Integer),
|
||||
Column("teamId", Integer, ForeignKey("chuni_profile_team.id", ondelete="SET NULL", onupdate="SET NULL")),
|
||||
Column("avatarBack", Integer, server_default="0"),
|
||||
Column("avatarFace", Integer, server_default="0"),
|
||||
Column("eliteRankPoint", Integer, server_default="0"),
|
||||
Column("stockedGridCount", Integer, server_default="0"),
|
||||
Column("netBattleLoseCount", Integer, server_default="0"),
|
||||
Column("netBattleHostErrCnt", Integer, server_default="0"),
|
||||
Column("netBattle4thCount", Integer, server_default="0"),
|
||||
Column("overPowerRate", Integer, server_default="0"),
|
||||
Column("battleRewardStatus", Integer, server_default="0"),
|
||||
Column("avatarPoint", Integer, server_default="0"),
|
||||
Column("netBattle1stCount", Integer, server_default="0"),
|
||||
Column("charaIllustId", Integer, server_default="0"),
|
||||
Column("avatarItem", Integer, server_default="0"),
|
||||
Column("userNameEx", String(8), server_default=""),
|
||||
Column("netBattleWinCount", Integer, server_default="0"),
|
||||
Column("netBattleCorrection", Integer, server_default="0"),
|
||||
Column("classEmblemMedal", Integer, server_default="0"),
|
||||
Column("overPowerPoint", Integer, server_default="0"),
|
||||
Column("netBattleErrCnt", Integer, server_default="0"),
|
||||
Column("battleRankId", Integer, server_default="0"),
|
||||
Column("netBattle3rdCount", Integer, server_default="0"),
|
||||
Column("netBattleConsecutiveWinCount", Integer, server_default="0"),
|
||||
Column("overPowerLowerRank", Integer, server_default="0"),
|
||||
Column("avatarWear", Integer, server_default="0"),
|
||||
Column("classEmblemBase", Integer, server_default="0"),
|
||||
Column("battleRankPoint", Integer, server_default="0"),
|
||||
Column("netBattle2ndCount", Integer, server_default="0"),
|
||||
Column("totalUltimaHighScore", Integer, server_default="0"),
|
||||
Column("skillId", Integer, server_default="0"),
|
||||
Column("lastCountryCode", String(5), server_default="JPN"),
|
||||
Column("isNetBattleHost", Boolean, server_default="0"),
|
||||
Column("avatarFront", Integer, server_default="0"),
|
||||
Column("avatarSkin", Integer, server_default="0"),
|
||||
Column("battleRewardCount", Integer, server_default="0"),
|
||||
Column("battleRewardIndex", Integer, server_default="0"),
|
||||
Column("netBattlePlayCount", Integer, server_default="0"),
|
||||
Column("exMapLoopCount", Integer, server_default="0"),
|
||||
Column("netBattleEndState", Integer, server_default="0"),
|
||||
Column("avatarHead", Integer, server_default="0"),
|
||||
UniqueConstraint("user", "version", name="chuni_profile_profile_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
profile_ex = Table(
|
||||
"chuni_profile_data_ex",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("ext1", Integer),
|
||||
Column("ext2", Integer),
|
||||
Column("ext3", Integer),
|
||||
Column("ext4", Integer),
|
||||
Column("ext5", Integer),
|
||||
Column("ext6", Integer),
|
||||
Column("ext7", Integer),
|
||||
Column("ext8", Integer),
|
||||
Column("ext9", Integer),
|
||||
Column("ext10", Integer),
|
||||
Column("ext11", Integer),
|
||||
Column("ext12", Integer),
|
||||
Column("ext13", Integer),
|
||||
Column("ext14", Integer),
|
||||
Column("ext15", Integer),
|
||||
Column("ext16", Integer),
|
||||
Column("ext17", Integer),
|
||||
Column("ext18", Integer),
|
||||
Column("ext19", Integer),
|
||||
Column("ext20", Integer),
|
||||
Column("medal", Integer),
|
||||
Column("extStr1", String(255)),
|
||||
Column("extStr2", String(255)),
|
||||
Column("extStr3", String(255)),
|
||||
Column("extStr4", String(255)),
|
||||
Column("extStr5", String(255)),
|
||||
Column("voiceId", Integer),
|
||||
Column("extLong1", Integer),
|
||||
Column("extLong2", Integer),
|
||||
Column("extLong3", Integer),
|
||||
Column("extLong4", Integer),
|
||||
Column("extLong5", Integer),
|
||||
Column("mapIconId", Integer),
|
||||
Column("compatibleCmVersion", String(25)),
|
||||
UniqueConstraint("user", "version", name="chuni_profile_data_ex_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
option = Table(
|
||||
"chuni_profile_option",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("speed", Integer),
|
||||
Column("bgInfo", Integer),
|
||||
Column("rating", Integer),
|
||||
Column("privacy", Integer),
|
||||
Column("judgePos", Integer),
|
||||
Column("matching", Integer),
|
||||
Column("guideLine", Integer),
|
||||
Column("headphone", Integer),
|
||||
Column("optionSet", Integer),
|
||||
Column("fieldColor", Integer),
|
||||
Column("guideSound", Integer),
|
||||
Column("successAir", Integer),
|
||||
Column("successTap", Integer),
|
||||
Column("judgeAttack", Integer),
|
||||
Column("playerLevel", Integer),
|
||||
Column("soundEffect", Integer),
|
||||
Column("judgeJustice", Integer),
|
||||
Column("successExTap", Integer),
|
||||
Column("successFlick", Integer),
|
||||
Column("successSkill", Integer),
|
||||
Column("successSlideHold", Integer),
|
||||
Column("successTapTimbre", Integer),
|
||||
Column("ext1", Integer), # Added in chunew
|
||||
Column("ext2", Integer),
|
||||
Column("ext3", Integer),
|
||||
Column("ext4", Integer),
|
||||
Column("ext5", Integer),
|
||||
Column("ext6", Integer),
|
||||
Column("ext7", Integer),
|
||||
Column("ext8", Integer),
|
||||
Column("ext9", Integer),
|
||||
Column("ext10", Integer),
|
||||
Column("categoryDetail", Integer, server_default="0"),
|
||||
Column("judgeTimingOffset_120", Integer, server_default="0"),
|
||||
Column("resultVoiceShort", Integer, server_default="0"),
|
||||
Column("judgeAppendSe", Integer, server_default="0"),
|
||||
Column("judgeCritical", Integer, server_default="0"),
|
||||
Column("trackSkip", Integer, server_default="0"),
|
||||
Column("selectMusicFilterLv", Integer, server_default="0"),
|
||||
Column("sortMusicFilterLv", Integer, server_default="0"),
|
||||
Column("sortMusicGenre", Integer, server_default="0"),
|
||||
Column("speed_120", Integer, server_default="0"),
|
||||
Column("judgeTimingOffset", Integer, server_default="0"),
|
||||
Column("mirrorFumen", Integer, server_default="0"),
|
||||
Column("playTimingOffset_120", Integer, server_default="0"),
|
||||
Column("hardJudge", Integer, server_default="0"),
|
||||
Column("notesThickness", Integer, server_default="0"),
|
||||
Column("fieldWallPosition", Integer, server_default="0"),
|
||||
Column("playTimingOffset", Integer, server_default="0"),
|
||||
Column("fieldWallPosition_120", Integer, server_default="0"),
|
||||
UniqueConstraint("user", name="chuni_profile_option_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
option_ex = Table(
|
||||
"chuni_profile_option_ex",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("ext1", Integer),
|
||||
Column("ext2", Integer),
|
||||
Column("ext3", Integer),
|
||||
Column("ext4", Integer),
|
||||
Column("ext5", Integer),
|
||||
Column("ext6", Integer),
|
||||
Column("ext7", Integer),
|
||||
Column("ext8", Integer),
|
||||
Column("ext9", Integer),
|
||||
Column("ext10", Integer),
|
||||
Column("ext11", Integer),
|
||||
Column("ext12", Integer),
|
||||
Column("ext13", Integer),
|
||||
Column("ext14", Integer),
|
||||
Column("ext15", Integer),
|
||||
Column("ext16", Integer),
|
||||
Column("ext17", Integer),
|
||||
Column("ext18", Integer),
|
||||
Column("ext19", Integer),
|
||||
Column("ext20", Integer),
|
||||
UniqueConstraint("user", name="chuni_profile_option_ex_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
recent_rating = Table(
|
||||
"chuni_profile_recent_rating",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("recentRating", JSON),
|
||||
UniqueConstraint("user", name="chuni_profile_recent_rating_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
region = Table(
|
||||
"chuni_profile_region",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("regionId", Integer),
|
||||
Column("playCount", Integer),
|
||||
UniqueConstraint("user", "regionId", name="chuni_profile_region_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
activity = Table(
|
||||
"chuni_profile_activity",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("kind", Integer),
|
||||
Column("activityId", Integer), # Reminder: Change this to ID in base.py or the game will be sad
|
||||
Column("sortNumber", Integer),
|
||||
Column("param1", Integer),
|
||||
Column("param2", Integer),
|
||||
Column("param3", Integer),
|
||||
Column("param4", Integer),
|
||||
UniqueConstraint("user", "kind", "activityId", name="chuni_profile_activity_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
charge = Table(
|
||||
"chuni_profile_charge",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("chargeId", Integer),
|
||||
Column("stock", Integer),
|
||||
Column("purchaseDate", String(25)),
|
||||
Column("validDate", String(25)),
|
||||
Column("param1", Integer),
|
||||
Column("param2", Integer),
|
||||
Column("paramDate", String(25)),
|
||||
UniqueConstraint("user", "chargeId", name="chuni_profile_charge_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
emoney = Table(
|
||||
"chuni_profile_emoney",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("ext1", Integer),
|
||||
Column("ext2", Integer),
|
||||
Column("ext3", Integer),
|
||||
Column("type", Integer),
|
||||
Column("emoneyBrand", Integer),
|
||||
Column("emoneyCredit", Integer),
|
||||
UniqueConstraint("user", "emoneyBrand", name="chuni_profile_emoney_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
overpower = Table(
|
||||
"chuni_profile_overpower",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("genreId", Integer),
|
||||
Column("difficulty", Integer),
|
||||
Column("rate", Integer),
|
||||
Column("point", Integer),
|
||||
UniqueConstraint("user", "genreId", "difficulty", name="chuni_profile_emoney_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
team = Table(
|
||||
"chuni_profile_team",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("teamName", String(255)),
|
||||
Column("teamPoint", Integer),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
class ChuniProfileData(BaseData):
|
||||
def put_profile_data(self, aime_id: int, version: int, profile_data: Dict) -> Optional[int]:
|
||||
profile_data["user"] = aime_id
|
||||
profile_data["version"] = version
|
||||
if "accessCode" in profile_data:
|
||||
profile_data.pop("accessCode")
|
||||
|
||||
profile_data = self.fix_bools(profile_data)
|
||||
|
||||
sql = insert(profile).values(**profile_data)
|
||||
conflict = sql.on_duplicate_key_update(**profile_data)
|
||||
result = self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(f"put_profile_data: Failed to update! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_preview(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
sql = select([profile, option]).join(option, profile.c.user == option.c.user).filter(
|
||||
and_(profile.c.user == aime_id, profile.c.version == version)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_profile_data(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
sql = select(profile).where(and_(
|
||||
profile.c.user == aime_id,
|
||||
profile.c.version == version,
|
||||
))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_data_ex(self, aime_id: int, version: int, profile_ex_data: Dict) -> Optional[int]:
|
||||
profile_ex_data["user"] = aime_id
|
||||
profile_ex_data["version"] = version
|
||||
if "accessCode" in profile_ex_data:
|
||||
profile_ex_data.pop("accessCode")
|
||||
|
||||
sql = insert(profile_ex).values(**profile_ex_data)
|
||||
conflict = sql.on_duplicate_key_update(**profile_ex_data)
|
||||
result = self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(f"put_profile_data_ex: Failed to update! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_data_ex(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
sql = select(profile_ex).where(and_(
|
||||
profile_ex.c.user == aime_id,
|
||||
profile_ex.c.version == version,
|
||||
))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_option(self, aime_id: int, option_data: Dict) -> Optional[int]:
|
||||
option_data["user"] = aime_id
|
||||
|
||||
sql = insert(option).values(**option_data)
|
||||
conflict = sql.on_duplicate_key_update(**option_data)
|
||||
result = self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(f"put_profile_option: Failed to update! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_option(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(option).where(option.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_option_ex(self, aime_id: int, option_ex_data: Dict) -> Optional[int]:
|
||||
option_ex_data["user"] = aime_id
|
||||
|
||||
sql = insert(option_ex).values(**option_ex_data)
|
||||
conflict = sql.on_duplicate_key_update(**option_ex_data)
|
||||
result = self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(f"put_profile_option_ex: Failed to update! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_option_ex(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(option_ex).where(option_ex.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_recent_rating(self, aime_id: int, recent_rating_data: List[Dict]) -> Optional[int]:
|
||||
sql = insert(recent_rating).values(
|
||||
user = aime_id,
|
||||
recentRating = recent_rating_data
|
||||
)
|
||||
conflict = sql.on_duplicate_key_update(recentRating = recent_rating_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warn(f"put_profile_recent_rating: Failed to update! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_recent_rating(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(recent_rating).where(recent_rating.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_activity(self, aime_id: int, activity_data: Dict) -> Optional[int]:
|
||||
# The game just uses "id" but we need to distinguish that from the db column "id"
|
||||
activity_data["user"] = aime_id
|
||||
activity_data["activityId"] = activity_data["id"]
|
||||
activity_data.pop("id")
|
||||
|
||||
sql = insert(activity).values(**activity_data)
|
||||
conflict = sql.on_duplicate_key_update(**activity_data)
|
||||
result = self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(f"put_profile_activity: Failed to update! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_activity(self, aime_id: int, kind: int) -> Optional[List[Row]]:
|
||||
sql = select(activity).where(and_(
|
||||
activity.c.user == aime_id,
|
||||
activity.c.kind == kind
|
||||
))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_profile_charge(self, aime_id: int, charge_data: Dict) -> Optional[int]:
|
||||
charge_data["user"] = aime_id
|
||||
|
||||
sql = insert(charge).values(**charge_data)
|
||||
conflict = sql.on_duplicate_key_update(**charge_data)
|
||||
result = self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(f"put_profile_charge: Failed to update! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_charge(self, aime_id: int) -> Optional[List[Row]]:
|
||||
sql = select(charge).where(charge.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def add_profile_region(self, aime_id: int, region_id: int) -> Optional[int]:
|
||||
pass
|
||||
|
||||
def get_profile_regions(self, aime_id: int) -> Optional[List[Row]]:
|
||||
pass
|
||||
|
||||
def put_profile_emoney(self, aime_id: int, emoney_data: Dict) -> Optional[int]:
|
||||
emoney_data["user"] = aime_id
|
||||
|
||||
sql = insert(emoney).values(**emoney_data)
|
||||
conflict = sql.on_duplicate_key_update(**emoney_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_emoney(self, aime_id: int) -> Optional[List[Row]]:
|
||||
sql = select(emoney).where(emoney.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_profile_overpower(self, aime_id: int, overpower_data: Dict) -> Optional[int]:
|
||||
overpower_data["user"] = aime_id
|
||||
|
||||
sql = insert(overpower).values(**overpower_data)
|
||||
conflict = sql.on_duplicate_key_update(**overpower_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_overpower(self, aime_id: int) -> Optional[List[Row]]:
|
||||
sql = select(overpower).where(overpower.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
178
titles/chuni/schema/score.py
Normal file
178
titles/chuni/schema/score.py
Normal file
@ -0,0 +1,178 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
course = Table(
|
||||
"chuni_score_course",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("courseId", Integer),
|
||||
Column("classId", Integer),
|
||||
Column("playCount", Integer),
|
||||
Column("scoreMax", Integer),
|
||||
Column("isFullCombo", Boolean),
|
||||
Column("isAllJustice", Boolean),
|
||||
Column("isSuccess", Boolean),
|
||||
Column("scoreRank", Integer),
|
||||
Column("eventId", Integer),
|
||||
Column("lastPlayDate", String(25)),
|
||||
Column("param1", Integer),
|
||||
Column("param2", Integer),
|
||||
Column("param3", Integer),
|
||||
Column("param4", Integer),
|
||||
Column("isClear", Boolean),
|
||||
UniqueConstraint("user", "courseId", name="chuni_score_course_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
best_score = Table(
|
||||
"chuni_score_best",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("musicId", Integer),
|
||||
Column("level", Integer),
|
||||
Column("playCount", Integer),
|
||||
Column("scoreMax", Integer),
|
||||
Column("resRequestCount", Integer),
|
||||
Column("resAcceptCount", Integer),
|
||||
Column("resSuccessCount", Integer),
|
||||
Column("missCount", Integer),
|
||||
Column("maxComboCount", Integer),
|
||||
Column("isFullCombo", Boolean),
|
||||
Column("isAllJustice", Boolean),
|
||||
Column("isSuccess", Boolean),
|
||||
Column("fullChain", Integer),
|
||||
Column("maxChain", Integer),
|
||||
Column("scoreRank", Integer),
|
||||
Column("isLock", Boolean),
|
||||
Column("ext1", Integer),
|
||||
Column("theoryCount", Integer),
|
||||
UniqueConstraint("user", "musicId", "level", name="chuni_score_best_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
playlog = Table(
|
||||
"chuni_score_playlog",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", ForeignKey("aime_user.id", ondelete="cascade", onupdate="cascade"), nullable=False),
|
||||
Column("orderId", Integer),
|
||||
Column("sortNumber", Integer),
|
||||
Column("placeId", Integer),
|
||||
Column("playDate", String(20)),
|
||||
Column("userPlayDate", String(20)),
|
||||
Column("musicId", Integer),
|
||||
Column("level", Integer),
|
||||
Column("customId", Integer),
|
||||
Column("playedUserId1", Integer),
|
||||
Column("playedUserId2", Integer),
|
||||
Column("playedUserId3", Integer),
|
||||
Column("playedUserName1", String(20)),
|
||||
Column("playedUserName2", String(20)),
|
||||
Column("playedUserName3", String(20)),
|
||||
Column("playedMusicLevel1", Integer),
|
||||
Column("playedMusicLevel2", Integer),
|
||||
Column("playedMusicLevel3", Integer),
|
||||
Column("playedCustom1", Integer),
|
||||
Column("playedCustom2", Integer),
|
||||
Column("playedCustom3", Integer),
|
||||
Column("track", Integer),
|
||||
Column("score", Integer),
|
||||
Column("rank", Integer),
|
||||
Column("maxCombo", Integer),
|
||||
Column("maxChain", Integer),
|
||||
Column("rateTap", Integer),
|
||||
Column("rateHold", Integer),
|
||||
Column("rateSlide", Integer),
|
||||
Column("rateAir", Integer),
|
||||
Column("rateFlick", Integer),
|
||||
Column("judgeGuilty", Integer),
|
||||
Column("judgeAttack", Integer),
|
||||
Column("judgeJustice", Integer),
|
||||
Column("judgeCritical", Integer),
|
||||
Column("eventId", Integer),
|
||||
Column("playerRating", Integer),
|
||||
Column("isNewRecord", Boolean),
|
||||
Column("isFullCombo", Boolean),
|
||||
Column("fullChainKind", Integer),
|
||||
Column("isAllJustice", Boolean),
|
||||
Column("isContinue", Boolean),
|
||||
Column("isFreeToPlay", Boolean),
|
||||
Column("characterId", Integer),
|
||||
Column("skillId", Integer),
|
||||
Column("playKind", Integer),
|
||||
Column("isClear", Boolean),
|
||||
Column("skillLevel", Integer),
|
||||
Column("skillEffect", Integer),
|
||||
Column("placeName", String(255)),
|
||||
Column("isMaimai", Boolean),
|
||||
Column("commonId", Integer),
|
||||
Column("charaIllustId", Integer),
|
||||
Column("romVersion", String(255)),
|
||||
Column("judgeHeaven", Integer),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
class ChuniScoreData(BaseData):
|
||||
def get_courses(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(course).where(course.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_course(self, aime_id: int, course_data: Dict) -> Optional[int]:
|
||||
course_data["user"] = aime_id
|
||||
course_data = self.fix_bools(course_data)
|
||||
|
||||
sql = insert(course).values(**course_data)
|
||||
conflict = sql.on_duplicate_key_update(**course_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_scores(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(best_score).where(best_score.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_score(self, aime_id: int, score_data: Dict) -> Optional[int]:
|
||||
score_data["user"] = aime_id
|
||||
score_data = self.fix_bools(score_data)
|
||||
|
||||
sql = insert(best_score).values(**score_data)
|
||||
conflict = sql.on_duplicate_key_update(**score_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_playlogs(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(playlog).where(playlog.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_playlog(self, aime_id: int, playlog_data: Dict) -> Optional[int]:
|
||||
playlog_data["user"] = aime_id
|
||||
playlog_data = self.fix_bools(playlog_data)
|
||||
|
||||
sql = insert(playlog).values(**playlog_data)
|
||||
conflict = sql.on_duplicate_key_update(**playlog_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
223
titles/chuni/schema/static.py
Normal file
223
titles/chuni/schema/static.py
Normal file
@ -0,0 +1,223 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
events = Table(
|
||||
"chuni_static_events",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("eventId", Integer),
|
||||
Column("type", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
UniqueConstraint("version", "eventId", name="chuni_static_events_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
music = Table(
|
||||
"chuni_static_music",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("songId", Integer),
|
||||
Column("chartId", Integer),
|
||||
Column("title", String(255)),
|
||||
Column("artist", String(255)),
|
||||
Column("level", Float),
|
||||
Column("genre", String(255)),
|
||||
Column("jacketPath", String(255)),
|
||||
Column("worldsEndTag", String(20)),
|
||||
UniqueConstraint("version", "songId", "chartId", name="chuni_static_music_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
charge = Table(
|
||||
"chuni_static_charge",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("chargeId", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("expirationDays", Integer),
|
||||
Column("consumeType", Integer),
|
||||
Column("sellingAppeal", Boolean),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
UniqueConstraint("version", "chargeId", name="chuni_static_charge_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
avatar = Table(
|
||||
"chuni_static_avatar",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("avatarAccessoryId", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("category", Integer),
|
||||
Column("iconPath", String(255)),
|
||||
Column("texturePath", String(255)),
|
||||
UniqueConstraint("version", "avatarAccessoryId", name="chuni_static_avatar_uk"),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
|
||||
class ChuniStaticData(BaseData):
|
||||
def put_event(self, version: int, event_id: int, type: int, name: str) -> Optional[int]:
|
||||
sql = insert(events).values(
|
||||
version = version,
|
||||
eventId = event_id,
|
||||
type = type,
|
||||
name = name
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name = name
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def update_event(self, version: int, event_id: int, enabled: bool) -> Optional[bool]:
|
||||
sql = events.update(and_(events.c.version == version, events.c.eventId == event_id)).values(
|
||||
enabled = enabled
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warn(f"update_event: failed to update event! version: {version}, event_id: {event_id}, enabled: {enabled}")
|
||||
return None
|
||||
|
||||
event = self.get_event(version, event_id)
|
||||
if event is None:
|
||||
self.logger.warn(f"update_event: failed to fetch event {event_id} after updating")
|
||||
return None
|
||||
return event["enabled"]
|
||||
|
||||
def get_event(self, version: int, event_id: int) -> Optional[Row]:
|
||||
sql = select(events).where(and_(events.c.version == version, events.c.eventId == event_id))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_enabled_events(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(events).where(and_(events.c.version == version, events.c.enabled == True))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_events(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(events).where(events.c.version == version)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_music(self, version: int, song_id: int, chart_id: int, title: int, artist: str,
|
||||
level: float, genre: str, jacketPath: str, we_tag: str) -> Optional[int]:
|
||||
|
||||
sql = insert(music).values(
|
||||
version = version,
|
||||
songId = song_id,
|
||||
chartId = chart_id,
|
||||
title = title,
|
||||
artist = artist,
|
||||
level = level,
|
||||
genre = genre,
|
||||
jacketPath = jacketPath,
|
||||
worldsEndTag = we_tag,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
title = title,
|
||||
artist = artist,
|
||||
level = level,
|
||||
genre = genre,
|
||||
jacketPath = jacketPath,
|
||||
worldsEndTag = we_tag,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def put_charge(self, version: int, charge_id: int, name: str, expiration_days: int,
|
||||
consume_type: int, selling_appeal: bool) -> Optional[int]:
|
||||
sql = insert(charge).values(
|
||||
version = version,
|
||||
chargeId = charge_id,
|
||||
name = name,
|
||||
expirationDays = expiration_days,
|
||||
consumeType = consume_type,
|
||||
sellingAppeal = selling_appeal,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name = name,
|
||||
expirationDays = expiration_days,
|
||||
consumeType = consume_type,
|
||||
sellingAppeal = selling_appeal,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_enabled_charges(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(charge).where(and_(
|
||||
charge.c.version == version,
|
||||
charge.c.enabled == True
|
||||
))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_charges(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(charge).where(charge.c.version == version)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_music_chart(self, version: int, song_id: int, chart_id: int) -> Optional[List[Row]]:
|
||||
sql = select(music).where(and_(
|
||||
music.c.version == version,
|
||||
music.c.songId == song_id,
|
||||
music.c.chartId == chart_id
|
||||
))
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None: return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_avatar(self, version: int, avatarAccessoryId: int, name: str, category: int, iconPath: str, texturePath: str) -> Optional[int]:
|
||||
sql = insert(avatar).values(
|
||||
version = version,
|
||||
avatarAccessoryId = avatarAccessoryId,
|
||||
name = name,
|
||||
category = category,
|
||||
iconPath = iconPath,
|
||||
texturePath = texturePath,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name = name,
|
||||
category = category,
|
||||
iconPath = iconPath,
|
||||
texturePath = texturePath,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None: return None
|
||||
return result.lastrowid
|
||||
|
16
titles/chuni/star.py
Normal file
16
titles/chuni/star.py
Normal file
@ -0,0 +1,16 @@
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniStar(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_STAR
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.20.00"
|
||||
return ret
|
16
titles/chuni/starplus.py
Normal file
16
titles/chuni/starplus.py
Normal file
@ -0,0 +1,16 @@
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.config import ChuniConfig
|
||||
|
||||
class ChuniStarPlus(ChuniBase):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_STAR_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.25.00"
|
||||
return ret
|
21
titles/cxb/__init__.py
Normal file
21
titles/cxb/__init__.py
Normal file
@ -0,0 +1,21 @@
|
||||
from titles.cxb.index import CxbServlet
|
||||
from titles.cxb.const import CxbConstants
|
||||
from titles.cxb.database import CxbData
|
||||
from titles.cxb.read import CxbReader
|
||||
|
||||
index = CxbServlet
|
||||
database = CxbData
|
||||
reader = CxbReader
|
||||
|
||||
use_default_title = False
|
||||
include_protocol = True
|
||||
title_secure = True
|
||||
game_codes = [CxbConstants.GAME_CODE]
|
||||
trailing_slash = True
|
||||
use_default_host = False
|
||||
|
||||
include_port = True
|
||||
uri = "http://$h:$p/" # If you care about the allnet response you're probably running with no SSL
|
||||
host = ""
|
||||
|
||||
current_schema_version = 1
|
426
titles/cxb/base.py
Normal file
426
titles/cxb/base.py
Normal file
@ -0,0 +1,426 @@
|
||||
import logging
|
||||
import json
|
||||
from decimal import Decimal
|
||||
from base64 import b64encode
|
||||
from typing import Any, Dict
|
||||
from hashlib import md5
|
||||
from datetime import datetime
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.const import CxbConstants
|
||||
from titles.cxb.database import CxbData
|
||||
|
||||
class CxbBase():
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: CxbConfig) -> None:
|
||||
self.config = cfg # Config file
|
||||
self.game_config = game_cfg
|
||||
self.data = CxbData(cfg) # Database
|
||||
self.game = CxbConstants.GAME_CODE
|
||||
self.logger = logging.getLogger("cxb")
|
||||
self.version = CxbConstants.VER_CROSSBEATS_REV
|
||||
|
||||
def handle_action_rpreq_request(self, data: Dict) -> Dict:
|
||||
return({})
|
||||
|
||||
def handle_action_hitreq_request(self, data: Dict) -> Dict:
|
||||
return({"data":[]})
|
||||
|
||||
def handle_auth_usercheck_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_index(0, data["usercheck"]["authid"], self.version)
|
||||
if profile is not None:
|
||||
self.logger.info(f"User {data['usercheck']['authid']} has CXB profile")
|
||||
return({"exist": "true", "logout": "true"})
|
||||
|
||||
self.logger.info(f"No profile for aime id {data['usercheck']['authid']}")
|
||||
return({"exist": "false", "logout": "true"})
|
||||
|
||||
def handle_auth_entry_request(self, data: Dict) -> Dict:
|
||||
self.logger.info(f"New profile for {data['entry']['authid']}")
|
||||
return({"token": data["entry"]["authid"], "uid": data["entry"]["authid"]})
|
||||
|
||||
def handle_auth_login_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_index(0, data["login"]["authid"], self.version)
|
||||
|
||||
if profile is not None:
|
||||
self.logger.info(f"Login user {data['login']['authid']}")
|
||||
return({"token": data["login"]["authid"], "uid": data["login"]["authid"]})
|
||||
|
||||
self.logger.warn(f"User {data['login']['authid']} does not have a profile")
|
||||
return({})
|
||||
|
||||
def handle_action_loadrange_request(self, data: Dict) -> Dict:
|
||||
range_start = data['loadrange']['range'][0]
|
||||
range_end = data['loadrange']['range'][1]
|
||||
uid = data['loadrange']['uid']
|
||||
|
||||
self.logger.info(f"Load data for {uid}")
|
||||
profile = self.data.profile.get_profile(uid, self.version)
|
||||
songs = self.data.score.get_best_scores(uid)
|
||||
|
||||
data1 = []
|
||||
index = []
|
||||
versionindex = []
|
||||
|
||||
for profile_index in profile:
|
||||
profile_data = profile_index["data"]
|
||||
|
||||
if int(range_start) == 800000:
|
||||
return({"index":range_start, "data":[], "version":10400})
|
||||
|
||||
if not ( int(range_start) <= int(profile_index[3]) <= int(range_end) ):
|
||||
continue
|
||||
#Prevent loading of the coupons within the profile to use the force unlock instead
|
||||
elif 500 <= int(profile_index[3]) <= 510:
|
||||
continue
|
||||
#Prevent loading of songs saved in the profile
|
||||
elif 100000 <= int(profile_index[3]) <= 110000:
|
||||
continue
|
||||
#Prevent loading of the shop list / unlocked titles & icons saved in the profile
|
||||
elif 200000 <= int(profile_index[3]) <= 210000:
|
||||
continue
|
||||
#Prevent loading of stories in the profile
|
||||
elif 900000 <= int(profile_index[3]) <= 900200:
|
||||
continue
|
||||
else:
|
||||
index.append(profile_index[3])
|
||||
data1.append(b64encode(bytes(json.dumps(profile_data, separators=(',', ':')), 'utf-8')).decode('utf-8'))
|
||||
|
||||
'''
|
||||
100000 = Songs
|
||||
200000 = Shop
|
||||
300000 = Courses
|
||||
400000 = Events
|
||||
500000 = Challenges
|
||||
600000 = Bonuses
|
||||
700000 = rcLog
|
||||
800000 = Partners
|
||||
900000 = Stories
|
||||
'''
|
||||
|
||||
# Coupons
|
||||
for i in range(500,510):
|
||||
index.append(str(i))
|
||||
couponid = int(i) - 500
|
||||
dataValue = [{
|
||||
"couponId":str(couponid),
|
||||
"couponNum":"1",
|
||||
"couponLog":[],
|
||||
}]
|
||||
data1.append(b64encode(bytes(json.dumps(dataValue[0], separators=(',', ':')), 'utf-8')).decode('utf-8'))
|
||||
|
||||
|
||||
# ShopList_Title
|
||||
for i in range(200000,201451):
|
||||
index.append(str(i))
|
||||
shopid = int(i) - 200000
|
||||
dataValue = [{
|
||||
"shopId":shopid,
|
||||
"shopState":"2",
|
||||
"isDisable":"t",
|
||||
"isDeleted":"f",
|
||||
"isSpecialFlag":"f"
|
||||
}]
|
||||
data1.append(b64encode(bytes(json.dumps(dataValue[0], separators=(',', ':')), 'utf-8')).decode('utf-8'))
|
||||
|
||||
#ShopList_Icon
|
||||
for i in range(202000,202264):
|
||||
index.append(str(i))
|
||||
shopid = int(i) - 200000
|
||||
dataValue = [{
|
||||
"shopId":shopid,
|
||||
"shopState":"2",
|
||||
"isDisable":"t",
|
||||
"isDeleted":"f",
|
||||
"isSpecialFlag":"f"
|
||||
}]
|
||||
data1.append(b64encode(bytes(json.dumps(dataValue[0], separators=(',', ':')), 'utf-8')).decode('utf-8'))
|
||||
|
||||
#Stories
|
||||
for i in range(900000,900003):
|
||||
index.append(str(i))
|
||||
storyid = int(i) - 900000
|
||||
dataValue = [{
|
||||
"storyId":storyid,
|
||||
"unlockState1":["t"] * 10,
|
||||
"unlockState2":["t"] * 10,
|
||||
"unlockState3":["t"] * 10,
|
||||
"unlockState4":["t"] * 10,
|
||||
"unlockState5":["t"] * 10,
|
||||
"unlockState6":["t"] * 10,
|
||||
"unlockState7":["t"] * 10,
|
||||
"unlockState8":["t"] * 10,
|
||||
"unlockState9":["t"] * 10,
|
||||
"unlockState10":["t"] * 10,
|
||||
"unlockState11":["t"] * 10,
|
||||
"unlockState12":["t"] * 10,
|
||||
"unlockState13":["t"] * 10,
|
||||
"unlockState14":["t"] * 10,
|
||||
"unlockState15":["t"] * 10,
|
||||
"unlockState16":["t"] * 10
|
||||
}]
|
||||
data1.append(b64encode(bytes(json.dumps(dataValue[0], separators=(',', ':')), 'utf-8')).decode('utf-8'))
|
||||
|
||||
for song in songs:
|
||||
song_data = song["data"]
|
||||
songCode = []
|
||||
|
||||
songCode.append({
|
||||
"mcode": song_data['mcode'],
|
||||
"musicState": song_data['musicState'],
|
||||
"playCount": song_data['playCount'],
|
||||
"totalScore": song_data['totalScore'],
|
||||
"highScore": song_data['highScore'],
|
||||
"everHighScore": song_data['everHighScore'] if 'everHighScore' in song_data else ["0","0","0","0","0"],
|
||||
"clearRate": song_data['clearRate'],
|
||||
"rankPoint": song_data['rankPoint'],
|
||||
"normalCR": song_data['normalCR'] if 'normalCR' in song_data else ["0","0","0","0","0"],
|
||||
"survivalCR": song_data['survivalCR'] if 'survivalCR' in song_data else ["0","0","0","0","0"],
|
||||
"ultimateCR": song_data['ultimateCR'] if 'ultimateCR' in song_data else ["0","0","0","0","0"],
|
||||
"nohopeCR": song_data['nohopeCR'] if 'nohopeCR' in song_data else ["0","0","0","0","0"],
|
||||
"combo": song_data['combo'],
|
||||
"coupleUserId": song_data['coupleUserId'],
|
||||
"difficulty": song_data['difficulty'],
|
||||
"isFullCombo": song_data['isFullCombo'],
|
||||
"clearGaugeType": song_data['clearGaugeType'],
|
||||
"fieldType": song_data['fieldType'],
|
||||
"gameType": song_data['gameType'],
|
||||
"grade": song_data['grade'],
|
||||
"unlockState": song_data['unlockState'],
|
||||
"extraState": song_data['extraState']
|
||||
})
|
||||
index.append(song_data['index'])
|
||||
data1.append(b64encode(bytes(json.dumps(songCode[0], separators=(',', ':')), 'utf-8')).decode('utf-8'))
|
||||
|
||||
for v in index:
|
||||
try:
|
||||
v_profile = self.data.profile.get_profile_index(0, uid, self.version)
|
||||
v_profile_data = v_profile["data"]
|
||||
versionindex.append(int(v_profile_data["appVersion"]))
|
||||
except:
|
||||
versionindex.append('10400')
|
||||
|
||||
return({"index":index, "data":data1, "version":versionindex})
|
||||
|
||||
def handle_action_saveindex_request(self, data: Dict) -> Dict:
|
||||
save_data = data['saveindex']
|
||||
|
||||
try:
|
||||
#REV Omnimix Version Fetcher
|
||||
gameversion = data['saveindex']['data'][0][2]
|
||||
self.logger.warning(f"Game Version is {gameversion}")
|
||||
except:
|
||||
pass
|
||||
|
||||
if "10205" in gameversion:
|
||||
self.logger.info(f"Saving CrossBeats REV profile for {data['saveindex']['uid']}")
|
||||
#Alright.... time to bring the jank code
|
||||
|
||||
for value in data['saveindex']['data']:
|
||||
|
||||
if 'playedUserId' in value[1]:
|
||||
self.data.profile.put_profile(data['saveindex']['uid'], self.version, value[0], value[1])
|
||||
if 'mcode' not in value[1]:
|
||||
self.data.profile.put_profile(data['saveindex']['uid'], self.version, value[0], value[1])
|
||||
if 'shopId' in value:
|
||||
continue
|
||||
if 'mcode' in value[1] and 'musicState' in value[1]:
|
||||
song_json = json.loads(value[1])
|
||||
|
||||
songCode = []
|
||||
songCode.append({
|
||||
"mcode": song_json['mcode'],
|
||||
"musicState": song_json['musicState'],
|
||||
"playCount": song_json['playCount'],
|
||||
"totalScore": song_json['totalScore'],
|
||||
"highScore": song_json['highScore'],
|
||||
"clearRate": song_json['clearRate'],
|
||||
"rankPoint": song_json['rankPoint'],
|
||||
"combo": song_json['combo'],
|
||||
"coupleUserId": song_json['coupleUserId'],
|
||||
"difficulty": song_json['difficulty'],
|
||||
"isFullCombo": song_json['isFullCombo'],
|
||||
"clearGaugeType": song_json['clearGaugeType'],
|
||||
"fieldType": song_json['fieldType'],
|
||||
"gameType": song_json['gameType'],
|
||||
"grade": song_json['grade'],
|
||||
"unlockState": song_json['unlockState'],
|
||||
"extraState": song_json['extraState'],
|
||||
"index": value[0]
|
||||
})
|
||||
self.data.score.put_best_score(data['saveindex']['uid'], song_json['mcode'], self.version, value[0], songCode[0])
|
||||
return({})
|
||||
else:
|
||||
self.logger.info(f"Saving CrossBeats REV Sunrise profile for {data['saveindex']['uid']}")
|
||||
|
||||
#Sunrise
|
||||
try:
|
||||
profileIndex = save_data['index'].index('0')
|
||||
except:
|
||||
return({"data":""}) #Maybe
|
||||
|
||||
profile = json.loads(save_data["data"][profileIndex])
|
||||
aimeId = profile["aimeId"]
|
||||
i = 0
|
||||
|
||||
for index, value in enumerate(data["saveindex"]["data"]):
|
||||
if int(data["saveindex"]["index"][index]) == 101:
|
||||
self.data.profile.put_profile(aimeId, self.version, data["saveindex"]["index"][index], value)
|
||||
if int(data["saveindex"]["index"][index]) >= 700000 and int(data["saveindex"]["index"][index])<= 701000:
|
||||
self.data.profile.put_profile(aimeId, self.version, data["saveindex"]["index"][index], value)
|
||||
if int(data["saveindex"]["index"][index]) >= 500 and int(data["saveindex"]["index"][index]) <= 510:
|
||||
self.data.profile.put_profile(aimeId, self.version, data["saveindex"]["index"][index], value)
|
||||
if 'playedUserId' in value:
|
||||
self.data.profile.put_profile(aimeId, self.version, data["saveindex"]["index"][index], json.loads(value))
|
||||
if 'mcode' not in value and "normalCR" not in value:
|
||||
self.data.profile.put_profile(aimeId, self.version, data["saveindex"]["index"][index], json.loads(value))
|
||||
if 'shopId' in value:
|
||||
continue
|
||||
|
||||
# MusicList Index for the profile
|
||||
indexSongList = []
|
||||
for value in data["saveindex"]["index"]:
|
||||
if int(value) in range(100000,110000):
|
||||
indexSongList.append(value)
|
||||
|
||||
for index, value in enumerate(data["saveindex"]["data"]):
|
||||
if 'mcode' not in value:
|
||||
continue
|
||||
if 'playedUserId' in value:
|
||||
continue
|
||||
|
||||
data1 = json.loads(value)
|
||||
|
||||
songCode = []
|
||||
songCode.append({
|
||||
"mcode": data1['mcode'],
|
||||
"musicState": data1['musicState'],
|
||||
"playCount": data1['playCount'],
|
||||
"totalScore": data1['totalScore'],
|
||||
"highScore": data1['highScore'],
|
||||
"everHighScore": data1['everHighScore'],
|
||||
"clearRate": data1['clearRate'],
|
||||
"rankPoint": data1['rankPoint'],
|
||||
"normalCR": data1['normalCR'],
|
||||
"survivalCR": data1['survivalCR'],
|
||||
"ultimateCR": data1['ultimateCR'],
|
||||
"nohopeCR": data1['nohopeCR'],
|
||||
"combo": data1['combo'],
|
||||
"coupleUserId": data1['coupleUserId'],
|
||||
"difficulty": data1['difficulty'],
|
||||
"isFullCombo": data1['isFullCombo'],
|
||||
"clearGaugeType": data1['clearGaugeType'],
|
||||
"fieldType": data1['fieldType'],
|
||||
"gameType": data1['gameType'],
|
||||
"grade": data1['grade'],
|
||||
"unlockState": data1['unlockState'],
|
||||
"extraState": data1['extraState'],
|
||||
"index": indexSongList[i]
|
||||
})
|
||||
|
||||
self.data.score.put_best_score(aimeId, data1['mcode'], self.version, indexSongList[i], songCode[0])
|
||||
i += 1
|
||||
return({})
|
||||
|
||||
def handle_action_sprankreq_request(self, data: Dict) -> Dict:
|
||||
uid = data['sprankreq']['uid']
|
||||
self.logger.info(f"Get best rankings for {uid}")
|
||||
p = self.data.score.get_best_rankings(uid)
|
||||
|
||||
rankList: list[Dict[str, Any]] = []
|
||||
|
||||
for rank in p:
|
||||
if rank["song_id"] is not None:
|
||||
rankList.append({
|
||||
"sc": [rank["score"],rank["song_id"]],
|
||||
"rid": rank["rev_id"],
|
||||
"clear": rank["clear"]
|
||||
})
|
||||
else:
|
||||
rankList.append({
|
||||
"sc": [rank["score"]],
|
||||
"rid": rank["rev_id"],
|
||||
"clear": rank["clear"]
|
||||
})
|
||||
|
||||
return({
|
||||
"uid": data["sprankreq"]["uid"],
|
||||
"aid": data["sprankreq"]["aid"],
|
||||
"rank": rankList,
|
||||
"rankx":[1,1,1]
|
||||
})
|
||||
|
||||
def handle_action_getadv_request(self, data: Dict) -> Dict:
|
||||
return({"data":[{"r":"1","i":"100300","c":"20"}]})
|
||||
|
||||
def handle_action_getmsg_request(self, data: Dict) -> Dict:
|
||||
return({"msgs":[]})
|
||||
|
||||
def handle_auth_logout_request(self, data: Dict) -> Dict:
|
||||
return({"auth":True})
|
||||
|
||||
def handle_action_rankreg_request(self, data: Dict) -> Dict:
|
||||
uid = data['rankreg']['uid']
|
||||
self.logger.info(f"Put {len(data['rankreg']['data'])} rankings for {uid}")
|
||||
|
||||
for rid in data['rankreg']['data']:
|
||||
#REV S2
|
||||
if "clear" in rid:
|
||||
try:
|
||||
self.data.score.put_ranking(user_id=uid, rev_id=int(rid["rid"]), song_id=int(rid["sc"][1]), score=int(rid["sc"][0]), clear=rid["clear"])
|
||||
except:
|
||||
self.data.score.put_ranking(user_id=uid, rev_id=int(rid["rid"]), song_id=0, score=int(rid["sc"][0]), clear=rid["clear"])
|
||||
#REV
|
||||
else:
|
||||
try:
|
||||
self.data.score.put_ranking(user_id=uid, rev_id=int(rid["rid"]), song_id=int(rid["sc"][1]), score=int(rid["sc"][0]), clear=0)
|
||||
except:
|
||||
self.data.score.put_ranking(user_id=uid, rev_id=int(rid["rid"]), song_id=0, score=int(rid["sc"][0]), clear=0)
|
||||
return({})
|
||||
|
||||
def handle_action_addenergy_request(self, data: Dict) -> Dict:
|
||||
uid = data['addenergy']['uid']
|
||||
self.logger.info(f"Add energy to user {uid}")
|
||||
profile = self.data.profile.get_profile_index(0, uid, self.version)
|
||||
data1 = profile["data"]
|
||||
p = self.data.item.get_energy(uid)
|
||||
energy = p["energy"]
|
||||
|
||||
if not p:
|
||||
self.data.item.put_energy(uid, 5)
|
||||
|
||||
return({
|
||||
"class": data1["myClass"],
|
||||
"granted": "5",
|
||||
"total": "5",
|
||||
"threshold": "1000"
|
||||
})
|
||||
|
||||
array = []
|
||||
|
||||
newenergy = int(energy) + 5
|
||||
self.data.item.put_energy(uid, newenergy)
|
||||
|
||||
if int(energy) <= 995:
|
||||
array.append({
|
||||
"class": data1["myClass"],
|
||||
"granted": "5",
|
||||
"total": str(energy),
|
||||
"threshold": "1000"
|
||||
})
|
||||
else:
|
||||
array.append({
|
||||
"class": data1["myClass"],
|
||||
"granted": "0",
|
||||
"total": str(energy),
|
||||
"threshold": "1000"
|
||||
})
|
||||
return array[0]
|
||||
|
||||
def handle_action_eventreq_request(self, data: Dict) -> Dict:
|
||||
self.logger.info(data)
|
||||
return {"eventreq": ""}
|
||||
|
||||
def handle_action_stampreq_request(self, data: Dict) -> Dict:
|
||||
self.logger.info(data)
|
||||
return {"stampreq": ""}
|
41
titles/cxb/config.py
Normal file
41
titles/cxb/config.py
Normal file
@ -0,0 +1,41 @@
|
||||
from core.config import CoreConfig
|
||||
|
||||
class CxbServerConfig():
|
||||
def __init__(self, parent_config: "CxbConfig"):
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'cxb', 'server', 'enable', default=True)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(CoreConfig.get_config_field(self.__config, 'cxb', 'server', 'loglevel', default="info"))
|
||||
|
||||
@property
|
||||
def hostname(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'cxb', 'server', 'hostname', default="localhost")
|
||||
|
||||
@property
|
||||
def ssl_enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(self.__config, 'cxb', 'server', 'ssl_enable', default=False)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'cxb', 'server', 'port', default=8082)
|
||||
|
||||
@property
|
||||
def port_secure(self) -> int:
|
||||
return CoreConfig.get_config_field(self.__config, 'cxb', 'server', 'port_secure', default=443)
|
||||
|
||||
@property
|
||||
def ssl_cert(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'cxb', 'server', 'ssl_cert', default="cert/title.crt")
|
||||
|
||||
@property
|
||||
def ssl_key(self) -> str:
|
||||
return CoreConfig.get_config_field(self.__config, 'cxb', 'server', 'ssl_key', default="cert/title.key")
|
||||
|
||||
class CxbConfig(dict):
|
||||
def __init__(self) -> None:
|
||||
self.server = CxbServerConfig(self)
|
15
titles/cxb/const.py
Normal file
15
titles/cxb/const.py
Normal file
@ -0,0 +1,15 @@
|
||||
class CxbConstants():
|
||||
GAME_CODE = "SDCA"
|
||||
|
||||
CONFIG_NAME = "cxb.yaml"
|
||||
|
||||
VER_CROSSBEATS_REV = 0
|
||||
VER_CROSSBEATS_REV_SUNRISE_S1 = 1
|
||||
VER_CROSSBEATS_REV_SUNRISE_S2 = 2
|
||||
VER_CROSSBEATS_REV_SUNRISE_S2_OMNI = 3
|
||||
|
||||
VERSION_NAMES = ("crossbeats REV.", "crossbeats REV. SUNRISE", "crossbeats REV. SUNRISE S2", "crossbeats REV. SUNRISE S2 Omnimix")
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
return cls.VERSION_NAMES[ver]
|
13
titles/cxb/database.py
Normal file
13
titles/cxb/database.py
Normal file
@ -0,0 +1,13 @@
|
||||
|
||||
from core.data import Data
|
||||
from core.config import CoreConfig
|
||||
from titles.cxb.schema import CxbProfileData, CxbScoreData, CxbItemData, CxbStaticData
|
||||
|
||||
class CxbData(Data):
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
super().__init__(cfg)
|
||||
|
||||
self.profile = CxbProfileData(self.config, self.session)
|
||||
self.score = CxbScoreData(self.config, self.session)
|
||||
self.item = CxbItemData(self.config, self.session)
|
||||
self.static = CxbStaticData(self.config, self.session)
|
145
titles/cxb/index.py
Normal file
145
titles/cxb/index.py
Normal file
@ -0,0 +1,145 @@
|
||||
from twisted.web.http import Request
|
||||
from twisted.web import resource, server
|
||||
from twisted.internet import reactor, endpoints
|
||||
import yaml
|
||||
import json
|
||||
import re
|
||||
import inflection
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.const import CxbConstants
|
||||
from titles.cxb.rev import CxbRev
|
||||
from titles.cxb.rss1 import CxbRevSunriseS1
|
||||
from titles.cxb.rss2 import CxbRevSunriseS2
|
||||
|
||||
class CxbServlet(resource.Resource):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.isLeaf = True
|
||||
self.cfg_dir = cfg_dir
|
||||
self.core_cfg = core_cfg
|
||||
self.game_cfg = CxbConfig()
|
||||
self.game_cfg.update(yaml.safe_load(open(f"{cfg_dir}/cxb.yaml")))
|
||||
|
||||
self.logger = logging.getLogger("cxb")
|
||||
log_fmt_str = "[%(asctime)s] CXB | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
fileHandler = TimedRotatingFileHandler("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), encoding='utf8',
|
||||
when="d", backupCount=10)
|
||||
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(self.game_cfg.server.loglevel)
|
||||
coloredlogs.install(level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str)
|
||||
|
||||
self.versions = [
|
||||
CxbRev(core_cfg, self.game_cfg),
|
||||
CxbRevSunriseS1(core_cfg, self.game_cfg),
|
||||
CxbRevSunriseS2(core_cfg, self.game_cfg),
|
||||
]
|
||||
|
||||
def setup(self):
|
||||
if self.game_cfg.server.enable:
|
||||
endpoints.serverFromString(reactor, f"tcp:{self.game_cfg.server.port}:interface={self.core_cfg.server.listen_address}")\
|
||||
.listen(server.Site(CxbServlet(self.core_cfg, self.cfg_dir)))
|
||||
|
||||
if self.core_cfg.server.is_develop and self.game_cfg.server.ssl_enable:
|
||||
endpoints.serverFromString(reactor, f"ssl:{self.game_cfg.server.port_secure}"\
|
||||
f":interface={self.core_cfg.server.listen_address}:privateKey={self.game_cfg.server.ssl_key}:"\
|
||||
f"certKey={self.game_cfg.server.ssl_cert}")\
|
||||
.listen(server.Site(CxbServlet(self.core_cfg, self.cfg_dir)))
|
||||
|
||||
self.logger.info(f"Crossbeats title server ready on port {self.game_cfg.server.port} & {self.game_cfg.server.port_secure}")
|
||||
else:
|
||||
self.logger.info(f"Crossbeats title server ready on port {self.game_cfg.server.port}")
|
||||
|
||||
|
||||
def render_POST(self, request: Request):
|
||||
version = 0
|
||||
internal_ver = 0
|
||||
func_to_find = ""
|
||||
cmd = ""
|
||||
subcmd = ""
|
||||
req_url = request.uri.decode()
|
||||
url_split = req_url.split("/")
|
||||
req_bytes = request.content.getvalue()
|
||||
|
||||
try:
|
||||
req_json: Dict = json.loads(req_bytes)
|
||||
|
||||
except Exception as e:
|
||||
try:
|
||||
req_json: Dict = json.loads(req_bytes.decode().replace('"', '\\"').replace("'", '"'))
|
||||
|
||||
except Exception as f:
|
||||
self.logger.warn(f"Error decoding json: {e} / {f} - {req_url} - {req_bytes}")
|
||||
return b""
|
||||
|
||||
if req_json == {}:
|
||||
self.logger.warn(f"Empty json request to {req_url}")
|
||||
return b""
|
||||
|
||||
cmd = url_split[len(url_split) - 1]
|
||||
subcmd = list(req_json.keys())[0]
|
||||
|
||||
if subcmd == "dldate":
|
||||
if not type(req_json["dldate"]) is dict or "filetype" not in req_json["dldate"]:
|
||||
self.logger.warn(f"Malformed dldate request: {req_url} {req_json}")
|
||||
return b""
|
||||
|
||||
filetype = req_json["dldate"]["filetype"]
|
||||
filetype_split = filetype.split("/")
|
||||
version = int(filetype_split[0])
|
||||
filetype_inflect_split = inflection.underscore(filetype).split("/")
|
||||
|
||||
match = re.match("^([A-Za-z]*)(\d\d\d\d)$", filetype_split[len(filetype_split) - 1])
|
||||
if match:
|
||||
subcmd = f"{inflection.underscore(match.group(1))}xxxx"
|
||||
else:
|
||||
subcmd = f"{filetype_inflect_split[len(filetype_inflect_split) - 1]}"
|
||||
else:
|
||||
filetype = subcmd
|
||||
|
||||
func_to_find = f"handle_{cmd}_{subcmd}_request"
|
||||
|
||||
if version <= 10102:
|
||||
version_string = "Rev"
|
||||
internal_ver = CxbConstants.VER_CROSSBEATS_REV
|
||||
|
||||
elif version == 10113 or version == 10103:
|
||||
version_string = "Rev SunriseS1"
|
||||
internal_ver = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S1
|
||||
|
||||
elif version >= 10114 or version == 10104:
|
||||
version_string = "Rev SunriseS2"
|
||||
internal_ver = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S2
|
||||
|
||||
else:
|
||||
version_string = "Base"
|
||||
|
||||
self.logger.info(f"{version_string} Request {req_url} -> {filetype}")
|
||||
self.logger.debug(req_json)
|
||||
|
||||
try:
|
||||
handler = getattr(self.versions[internal_ver], func_to_find)
|
||||
resp = handler(req_json)
|
||||
|
||||
except AttributeError as e:
|
||||
self.logger.warning(f"Unhandled {version_string} request {req_url} - {e}")
|
||||
resp = {}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling {version_string} method {req_url} - {e}")
|
||||
raise
|
||||
|
||||
self.logger.debug(f"{version_string} Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
62
titles/cxb/read.py
Normal file
62
titles/cxb/read.py
Normal file
@ -0,0 +1,62 @@
|
||||
from typing import Optional, Dict, List
|
||||
from os import walk, path
|
||||
import urllib
|
||||
import csv
|
||||
|
||||
from read import BaseReader
|
||||
from core.config import CoreConfig
|
||||
from titles.cxb.database import CxbData
|
||||
from titles.cxb.const import CxbConstants
|
||||
|
||||
class CxbReader(BaseReader):
|
||||
def __init__(self, config: CoreConfig, version: int, bin_arg: Optional[str], opt_arg: Optional[str], extra: Optional[str]) -> None:
|
||||
super().__init__(config, version, bin_arg, opt_arg, extra)
|
||||
self.data = CxbData(config)
|
||||
|
||||
try:
|
||||
self.logger.info(f"Start importer for {CxbConstants.game_ver_to_string(version)}")
|
||||
except IndexError:
|
||||
self.logger.error(f"Invalid project cxb version {version}")
|
||||
exit(1)
|
||||
|
||||
def read(self) -> None:
|
||||
pull_bin_ram = True
|
||||
|
||||
if not path.exists(f"{self.bin_dir}"):
|
||||
self.logger.warn(f"Couldn't find csv file in {self.bin_dir}, skipping")
|
||||
pull_bin_ram = False
|
||||
|
||||
if pull_bin_ram:
|
||||
self.read_csv(f"{self.bin_dir}")
|
||||
|
||||
def read_csv(self, bin_dir: str) -> None:
|
||||
self.logger.info(f"Read csv from {bin_dir}")
|
||||
|
||||
try:
|
||||
fullPath = bin_dir + "/export.csv"
|
||||
with open(fullPath, encoding="UTF-8") as fp:
|
||||
reader = csv.DictReader(fp)
|
||||
for row in reader:
|
||||
song_id = row["mcode"]
|
||||
index = row["index"]
|
||||
title = row["name"]
|
||||
artist = row["artist"]
|
||||
genre = row["category"]
|
||||
|
||||
if not "N/A" in row["standard"]:
|
||||
self.logger.info(f"Added song {song_id} chart 0")
|
||||
self.data.static.put_music(self.version, song_id, index, 0, title, artist, genre, int(row["standard"].replace("Standard ","").replace("N/A","0")))
|
||||
if not "N/A" in row["hard"]:
|
||||
self.logger.info(f"Added song {song_id} chart 1")
|
||||
self.data.static.put_music(self.version, song_id, index, 1, title, artist, genre, int(row["hard"].replace("Hard ","").replace("N/A","0")))
|
||||
if not "N/A" in row["master"]:
|
||||
self.logger.info(f"Added song {song_id} chart 2")
|
||||
self.data.static.put_music(self.version, song_id, index, 2, title, artist, genre, int(row["master"].replace("Master ","").replace("N/A","0")))
|
||||
if not "N/A" in row["unlimited"]:
|
||||
self.logger.info(f"Added song {song_id} chart 3")
|
||||
self.data.static.put_music(self.version, song_id, index, 3, title, artist, genre, int(row["unlimited"].replace("Unlimited ","").replace("N/A","0")))
|
||||
if not "N/A" in row["easy"]:
|
||||
self.logger.info(f"Added song {song_id} chart 4")
|
||||
self.data.static.put_music(self.version, song_id, index, 4, title, artist, genre, int(row["easy"].replace("Easy ","").replace("N/A","0")))
|
||||
except:
|
||||
self.logger.warn(f"Couldn't read csv file in {self.bin_dir}, skipping")
|
256
titles/cxb/rev.py
Normal file
256
titles/cxb/rev.py
Normal file
@ -0,0 +1,256 @@
|
||||
import json
|
||||
from decimal import Decimal
|
||||
from base64 import b64encode
|
||||
from typing import Any, Dict
|
||||
from hashlib import md5
|
||||
from datetime import datetime
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data, cached
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.base import CxbBase
|
||||
from titles.cxb.const import CxbConstants
|
||||
|
||||
class CxbRev(CxbBase):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: CxbConfig) -> None:
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = CxbConstants.VER_CROSSBEATS_REV
|
||||
|
||||
def handle_data_path_list_request(self, data: Dict) -> Dict:
|
||||
return { "data": "" }
|
||||
|
||||
def handle_data_putlog_request(self, data: Dict) -> Dict:
|
||||
if data["putlog"]["type"] == "ResultLog":
|
||||
score_data = json.loads(data["putlog"]["data"])
|
||||
userid = score_data['usid']
|
||||
|
||||
self.data.score.put_playlog(userid, score_data['mcode'], score_data['difficulty'], score_data["score"], int(Decimal(score_data["clearrate"]) * 100), score_data["flawless"], score_data["super"], score_data["cool"], score_data["fast"], score_data["fast2"], score_data["slow"], score_data["slow2"], score_data["fail"], score_data["combo"])
|
||||
return({"data":True})
|
||||
return {"data": True }
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
for line in lines:
|
||||
line_split = line.split(',')
|
||||
ret_str += f"{line_split[0]},{line_split[1]},{line_split[2]},{line_split[3]},{line_split[4]},{line_split[5]},{line_split[6]},{line_split[7]},{line_split[8]},{line_split[9]},{line_split[10]},{line_split[11]},{line_split[12]},{line_split[13]},{line_split[14]},\r\n"
|
||||
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_icon_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListIcon\r\n"
|
||||
with open(r"titles/cxb/rev_data/Item/ItemArchiveList_Icon.csv", encoding="utf-8") as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_skin_notes_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinNotes\r\n"
|
||||
with open(r"titles/cxb/rev_data/Item/ItemArchiveList_SkinNotes.csv", encoding="utf-8") as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_skin_effect_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinEffect\r\n"
|
||||
with open(r"titles/cxb/rev_data/Item/ItemArchiveList_SkinEffect.csv", encoding="utf-8") as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_skin_bg_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinBg\r\n"
|
||||
with open(r"titles/cxb/rev_data/Item/ItemArchiveList_SkinBg.csv", encoding="utf-8") as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_title_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListTitle\r\n"
|
||||
with open(r"titles/cxb/rev_data/Item/ItemList_Title.csv", encoding="shift-jis") as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_music_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListMusic\r\n"
|
||||
with open(r"titles/cxb/rev_data/Shop/ShopList_Music.csv", encoding="shift-jis") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_icon_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListIcon\r\n"
|
||||
with open(r"titles/cxb/rev_data/Shop/ShopList_Icon.csv", encoding="shift-jis") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_title_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListTitle\r\n"
|
||||
with open(r"titles/cxb/rev_data/Shop/ShopList_Title.csv", encoding="shift-jis") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_shop_list_skin_hud_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_shop_list_skin_arrow_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_shop_list_skin_hit_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_sale_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListSale\r\n"
|
||||
with open(r"titles/cxb/rev_data/Shop/ShopList_Sale.csv", encoding="shift-jis") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_exxxxx_request(self, data: Dict) -> Dict:
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(fr"titles/cxb/rev_data/Ex000{extra_num}.csv", encoding="shift-jis") as stage:
|
||||
lines = stage.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/NewsList.csv", encoding="UTF-8") as news:
|
||||
lines = news.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_tips_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/License_Offline.csv", encoding="UTF-8") as lic:
|
||||
lines = lic.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/Course/CourseList.csv", encoding="UTF-8") as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
||||
# Removed the CSVs since the format isnt quite right
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(fr"titles/cxb/rev_data/Course/Cs000{extra_num}.csv", encoding="shift-jis") as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/MissionList.csv", encoding="shift-jis") as mission:
|
||||
lines = mission.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/Event/EventArchiveList.csv", encoding="shift-jis") as mission:
|
||||
lines = mission.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_event_music_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_event_mission_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_event_achievement_single_high_score_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_event_achievement_single_accumulation_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_event_ranking_high_score_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_event_ranking_accumulation_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_event_ranking_stamp_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_event_ranking_store_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
def handle_data_event_ranking_area_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": ""})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rev_data/Event/EventStampList.csv", encoding="shift-jis") as event:
|
||||
lines = event.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
||||
return({"data": "1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"})
|
||||
|
||||
def handle_data_server_state_request(self, data: Dict) -> Dict:
|
||||
return({"data": True})
|
BIN
titles/cxb/rev_data/Course/CourseList.csv
Normal file
BIN
titles/cxb/rev_data/Course/CourseList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0000.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0000.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0001.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0001.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0002.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0002.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0003.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0003.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0004.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0004.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0005.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0005.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0006.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0006.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0007.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0007.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0008.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0008.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Course/Cs0009.csv
Normal file
BIN
titles/cxb/rev_data/Course/Cs0009.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Event/EventArchiveList.csv
Normal file
BIN
titles/cxb/rev_data/Event/EventArchiveList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Event/EventStampList.csv
Normal file
BIN
titles/cxb/rev_data/Event/EventStampList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Ex0000.csv
Normal file
BIN
titles/cxb/rev_data/Ex0000.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Ex0001.csv
Normal file
BIN
titles/cxb/rev_data/Ex0001.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/ExtraStageList.csv
Normal file
BIN
titles/cxb/rev_data/ExtraStageList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Item/ItemArchiveList_Icon.csv
Normal file
BIN
titles/cxb/rev_data/Item/ItemArchiveList_Icon.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Item/ItemArchiveList_SkinBg.csv
Normal file
BIN
titles/cxb/rev_data/Item/ItemArchiveList_SkinBg.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Item/ItemArchiveList_SkinEffect.csv
Normal file
BIN
titles/cxb/rev_data/Item/ItemArchiveList_SkinEffect.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Item/ItemArchiveList_SkinNotes.csv
Normal file
BIN
titles/cxb/rev_data/Item/ItemArchiveList_SkinNotes.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Item/ItemList_Title.csv
Normal file
BIN
titles/cxb/rev_data/Item/ItemList_Title.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/License_Offline.csv
Normal file
BIN
titles/cxb/rev_data/License_Offline.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/MissionList.csv
Normal file
BIN
titles/cxb/rev_data/MissionList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/MusicArchiveList.csv
Normal file
BIN
titles/cxb/rev_data/MusicArchiveList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/NewsList.csv
Normal file
BIN
titles/cxb/rev_data/NewsList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Shop/ShopList_Icon.csv
Normal file
BIN
titles/cxb/rev_data/Shop/ShopList_Icon.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Shop/ShopList_Music.csv
Normal file
BIN
titles/cxb/rev_data/Shop/ShopList_Music.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Shop/ShopList_Sale.csv
Normal file
BIN
titles/cxb/rev_data/Shop/ShopList_Sale.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Shop/ShopList_SkinBg.csv
Normal file
BIN
titles/cxb/rev_data/Shop/ShopList_SkinBg.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Shop/ShopList_SkinEffect.csv
Normal file
BIN
titles/cxb/rev_data/Shop/ShopList_SkinEffect.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Shop/ShopList_SkinNotes.csv
Normal file
BIN
titles/cxb/rev_data/Shop/ShopList_SkinNotes.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/Shop/ShopList_Title.csv
Normal file
BIN
titles/cxb/rev_data/Shop/ShopList_Title.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rev_data/SkinArchiveList.csv
Normal file
BIN
titles/cxb/rev_data/SkinArchiveList.csv
Normal file
Binary file not shown.
|
257
titles/cxb/rss1.py
Normal file
257
titles/cxb/rss1.py
Normal file
@ -0,0 +1,257 @@
|
||||
import json
|
||||
from decimal import Decimal
|
||||
from base64 import b64encode
|
||||
from typing import Any, Dict
|
||||
from hashlib import md5
|
||||
from datetime import datetime
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data, cached
|
||||
from titles.cxb.config import CxbConfig
|
||||
from titles.cxb.base import CxbBase
|
||||
from titles.cxb.const import CxbConstants
|
||||
|
||||
class CxbRevSunriseS1(CxbBase):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: CxbConfig) -> None:
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S1
|
||||
|
||||
def handle_data_path_list_request(self, data: Dict) -> Dict:
|
||||
return { "data": "" }
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
for line in lines:
|
||||
line_split = line.split(',')
|
||||
ret_str += f"{line_split[0]},{line_split[1]},{line_split[2]},{line_split[3]},{line_split[4]},{line_split[5]},{line_split[6]},{line_split[7]},{line_split[8]},{line_split[9]},{line_split[10]},{line_split[11]},{line_split[12]},{line_split[13]},{line_split[14]},\r\n"
|
||||
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_detail_request(self, data: Dict) -> Dict:
|
||||
#ItemListIcon load
|
||||
ret_str = "#ItemListIcon\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Item/ItemList_Icon.csv", encoding="shift-jis") as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
||||
#ItemListTitle load
|
||||
ret_str += "\r\n#ItemListTitle\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Item/ItemList_Title.csv", encoding="shift-jis") as item:
|
||||
lines = item.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_detail_request(self, data: Dict) -> Dict:
|
||||
#ShopListIcon load
|
||||
ret_str = "#ShopListIcon\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Shop/ShopList_Icon.csv", encoding="utf-8") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
||||
#ShopListMusic load
|
||||
ret_str += "\r\n#ShopListMusic\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Shop/ShopList_Music.csv", encoding="utf-8") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
||||
#ShopListSale load
|
||||
ret_str += "\r\n#ShopListSale\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Shop/ShopList_Sale.csv", encoding="shift-jis") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
||||
#ShopListSkinBg load
|
||||
ret_str += "\r\n#ShopListSkinBg\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Shop/ShopList_SkinBg.csv", encoding="shift-jis") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
||||
#ShopListSkinEffect load
|
||||
ret_str += "\r\n#ShopListSkinEffect\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Shop/ShopList_SkinEffect.csv", encoding="shift-jis") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
||||
#ShopListSkinNotes load
|
||||
ret_str += "\r\n#ShopListSkinNotes\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Shop/ShopList_SkinNotes.csv", encoding="shift-jis") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
|
||||
#ShopListTitle load
|
||||
ret_str += "\r\n#ShopListTitle\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Shop/ShopList_Title.csv", encoding="utf-8") as shop:
|
||||
lines = shop.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_ex0001_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_one_more_extra_list_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_oe0001_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/NewsList.csv", encoding="UTF-8") as news:
|
||||
lines = news.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_tips_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_release_info_list_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_random_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
count = 0
|
||||
for line in lines:
|
||||
line_split = line.split(",")
|
||||
ret_str += str(count) + "," + line_split[0] + "," + line_split[0] + ",\r\n"
|
||||
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/License.csv", encoding="UTF-8") as licenses:
|
||||
lines = licenses.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/rss1_data/Course/CourseList.csv", encoding="UTF-8") as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(fr"titles/cxb/rss1_data/Course/Cs{extra_num}.csv", encoding="shift-jis") as course:
|
||||
lines = course.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_partner_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
# Lord forgive me for the sins I am about to commit
|
||||
for i in range(0,10):
|
||||
ret_str += f"80000{i},{i},{i},0,10000,,\r\n"
|
||||
ret_str += f"80000{i},{i},{i},1,10500,,\r\n"
|
||||
ret_str += f"80000{i},{i},{i},2,10500,,\r\n"
|
||||
for i in range(10,13):
|
||||
ret_str += f"8000{i},{i},{i},0,10000,,\r\n"
|
||||
ret_str += f"8000{i},{i},{i},1,10500,,\r\n"
|
||||
ret_str += f"8000{i},{i},{i},2,10500,,\r\n"
|
||||
ret_str +="\r\n---\r\n0,150,100,100,100,100,\r\n"
|
||||
for i in range(1,130):
|
||||
ret_str +=f"{i},100,100,100,100,100,\r\n"
|
||||
|
||||
ret_str += "---\r\n"
|
||||
return({"data": ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_partnerxxxx_request(self, data: Dict) -> Dict:
|
||||
partner_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = f"{partner_num},,{partner_num},1,10000,\r\n"
|
||||
with open(r"titles/cxb/rss1_data/Partner0000.csv") as partner:
|
||||
lines = partner.readlines()
|
||||
for line in lines:
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data": ret_str})
|
||||
|
||||
def handle_data_server_state_request(self, data: Dict) -> Dict:
|
||||
return({"data": True})
|
||||
|
||||
def handle_data_settings_request(self, data: Dict) -> Dict:
|
||||
return({"data": "2,\r\n"})
|
||||
|
||||
def handle_data_story_list_request(self, data: Dict) -> Dict:
|
||||
#story id, story name, game version, start time, end time, course arc, unlock flag, song mcode for menu
|
||||
ret_str = "\r\n"
|
||||
ret_str += f"st0000,RISING PURPLE,10104,1464370990,4096483201,Cs1000,-1,purple,\r\n"
|
||||
ret_str += f"st0001,REBEL YELL,10104,1467999790,4096483201,Cs1000,-1,chaset,\r\n"
|
||||
ret_str += f"st0002,REMNANT,10104,1502127790,4096483201,Cs1000,-1,overcl,\r\n"
|
||||
return({"data": ret_str})
|
||||
|
||||
def handle_data_stxxxx_request(self, data: Dict) -> Dict:
|
||||
story_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
for i in range(1,11):
|
||||
ret_str +=f"{i},st000{story_num}_{i-1},,,,,,,,,,,,,,,,1,,-1,1,\r\n"
|
||||
return({"data": ret_str})
|
||||
|
||||
def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
return({"data":"Cs1032,1,1,1,1,1,1,1,1,1,1,\r\n"})
|
||||
|
||||
def handle_data_premium_list_request(self, data: Dict) -> Dict:
|
||||
return({"data": "1,,,,10,,,,,99,,,,,,,,,100,,\r\n"})
|
||||
|
||||
def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_event_detail_list_request(self, data: Dict) -> Dict:
|
||||
event_id = data["dldate"]["filetype"].split("/")[2]
|
||||
if "EventStampMapListCs1002" in event_id:
|
||||
return({"data":"1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"})
|
||||
elif "EventStampList" in event_id:
|
||||
return({"data":"Cs1002,1,1,1,1,1,1,1,1,1,1,\r\n"})
|
||||
else:
|
||||
return({"data":""})
|
||||
|
||||
def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
||||
event_id = data["dldate"]["filetype"].split("/")[2]
|
||||
if "EventStampMapListCs1002" in event_id:
|
||||
return({"data":"1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"})
|
||||
else:
|
||||
return({"data":""})
|
BIN
titles/cxb/rss1_data/Course/CourseList.csv
Normal file
BIN
titles/cxb/rss1_data/Course/CourseList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Course/Cs0000.csv
Normal file
BIN
titles/cxb/rss1_data/Course/Cs0000.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Course/Cs0001.csv
Normal file
BIN
titles/cxb/rss1_data/Course/Cs0001.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Course/Cs0002.csv
Normal file
BIN
titles/cxb/rss1_data/Course/Cs0002.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Course/Cs0003.csv
Normal file
BIN
titles/cxb/rss1_data/Course/Cs0003.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Course/Cs0004.csv
Normal file
BIN
titles/cxb/rss1_data/Course/Cs0004.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Ex0000.csv
Normal file
BIN
titles/cxb/rss1_data/Ex0000.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/ExtraStageList.csv
Normal file
BIN
titles/cxb/rss1_data/ExtraStageList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Item/ItemList_Icon.csv
Normal file
BIN
titles/cxb/rss1_data/Item/ItemList_Icon.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Item/ItemList_Title.csv
Normal file
BIN
titles/cxb/rss1_data/Item/ItemList_Title.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/License.csv
Normal file
BIN
titles/cxb/rss1_data/License.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/MissionList.csv
Normal file
BIN
titles/cxb/rss1_data/MissionList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/MusicArchiveList.csv
Normal file
BIN
titles/cxb/rss1_data/MusicArchiveList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/NewsList.csv
Normal file
BIN
titles/cxb/rss1_data/NewsList.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Partner0000.csv
Normal file
BIN
titles/cxb/rss1_data/Partner0000.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Shop/ShopList_Icon.csv
Normal file
BIN
titles/cxb/rss1_data/Shop/ShopList_Icon.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Shop/ShopList_Music.csv
Normal file
BIN
titles/cxb/rss1_data/Shop/ShopList_Music.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Shop/ShopList_Sale.csv
Normal file
BIN
titles/cxb/rss1_data/Shop/ShopList_Sale.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Shop/ShopList_SkinBg.csv
Normal file
BIN
titles/cxb/rss1_data/Shop/ShopList_SkinBg.csv
Normal file
Binary file not shown.
|
BIN
titles/cxb/rss1_data/Shop/ShopList_SkinEffect.csv
Normal file
BIN
titles/cxb/rss1_data/Shop/ShopList_SkinEffect.csv
Normal file
Binary file not shown.
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user