Merge branch 'starlette_cleanup' into develop
This commit is contained in:
commit
df4c667adf
@ -1,6 +1,6 @@
|
||||
from core.config import CoreConfig
|
||||
from core.allnet import AllnetServlet
|
||||
from core.aimedb import AimedbFactory
|
||||
from core.allnet import AllnetServlet, BillingServlet
|
||||
from core.aimedb import AimedbServlette
|
||||
from core.title import TitleServlet
|
||||
from core.utils import Utils
|
||||
from core.mucha import MuchaServlet
|
||||
|
@ -102,7 +102,7 @@ class ADBHeader:
|
||||
magic, protocol_ver, cmd, length, status, game_id, store_id, keychip_id = struct.unpack_from("<5H6sI12s", data)
|
||||
head = cls(magic, protocol_ver, cmd, length, status, game_id, store_id, keychip_id)
|
||||
|
||||
if head.length != len(data):
|
||||
if head.length > len(data):
|
||||
raise ADBHeaderException(f"Length is incorrect! Expect {head.length}, got {len(data)}")
|
||||
|
||||
return head
|
||||
|
200
core/aimedb.py
200
core/aimedb.py
@ -1,9 +1,7 @@
|
||||
from twisted.internet.protocol import Factory, Protocol
|
||||
import logging, coloredlogs
|
||||
from Crypto.Cipher import AES
|
||||
import struct
|
||||
from typing import Dict, Tuple, Callable, Union
|
||||
from typing_extensions import Final
|
||||
from typing import Dict, Tuple, Callable, Union, Optional
|
||||
import asyncio
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
|
||||
from core.config import CoreConfig
|
||||
@ -11,15 +9,37 @@ from core.utils import create_sega_auth_key
|
||||
from core.data import Data
|
||||
from .adb_handlers import *
|
||||
|
||||
|
||||
class AimedbProtocol(Protocol):
|
||||
class AimedbServlette():
|
||||
request_list: Dict[int, Tuple[Callable[[bytes, int], Union[ADBBaseResponse, bytes]], int, str]] = {}
|
||||
|
||||
def __init__(self, core_cfg: CoreConfig) -> None:
|
||||
self.logger = logging.getLogger("aimedb")
|
||||
self.config = core_cfg
|
||||
def __init__(self, core_cfg: CoreConfig) -> None:
|
||||
self.config = core_cfg
|
||||
self.data = Data(core_cfg)
|
||||
if core_cfg.aimedb.key == "":
|
||||
|
||||
self.logger = logging.getLogger("aimedb")
|
||||
if not hasattr(self.logger, "initted"):
|
||||
log_fmt_str = "[%(asctime)s] Aimedb | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "aimedb"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(self.config.aimedb.loglevel)
|
||||
coloredlogs.install(
|
||||
level=core_cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
self.logger.initted = True
|
||||
|
||||
if not core_cfg.aimedb.key:
|
||||
self.logger.error("!!!KEY NOT SET!!!")
|
||||
exit(1)
|
||||
|
||||
@ -40,27 +60,30 @@ class AimedbProtocol(Protocol):
|
||||
|
||||
self.register_handler(0x13, 0x14, self.handle_log_ex, 'aime_log_ex')
|
||||
self.register_handler(0x64, 0x65, self.handle_hello, 'hello')
|
||||
self.register_handler(0x66, 0, self.handle_goodbye, 'goodbye')
|
||||
|
||||
|
||||
def register_handler(self, cmd: int, resp:int, handler: Callable[[bytes, int], Union[ADBBaseResponse, bytes]], name: str) -> None:
|
||||
self.request_list[cmd] = (handler, resp, name)
|
||||
|
||||
def start(self) -> None:
|
||||
self.logger.info(f"Start on port {self.config.aimedb.port}")
|
||||
asyncio.create_task(asyncio.start_server(self.dataReceived, self.config.server.listen_address, self.config.aimedb.port))
|
||||
|
||||
async def dataReceived(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
|
||||
self.logger.debug(f"Connection made from {writer.get_extra_info('peername')[0]}")
|
||||
while True:
|
||||
try:
|
||||
data: bytes = await reader.read(4096)
|
||||
if len(data) == 0:
|
||||
self.logger.debug("Connection closed")
|
||||
return
|
||||
await self.process_data(data, reader, writer)
|
||||
await writer.drain()
|
||||
except ConnectionResetError as e:
|
||||
self.logger.debug("Connection reset, disconnecting")
|
||||
return
|
||||
|
||||
def append_padding(self, data: bytes):
|
||||
"""Appends 0s to the end of the data until it's at the correct size"""
|
||||
length = struct.unpack_from("<H", data, 6)
|
||||
padding_size = length[0] - len(data)
|
||||
data += bytes(padding_size)
|
||||
return data
|
||||
|
||||
def connectionMade(self) -> None:
|
||||
self.logger.debug(f"{self.transport.getPeer().host} Connected")
|
||||
|
||||
def connectionLost(self, reason) -> None:
|
||||
self.logger.debug(
|
||||
f"{self.transport.getPeer().host} Disconnected - {reason.value}"
|
||||
)
|
||||
|
||||
def dataReceived(self, data: bytes) -> None:
|
||||
async def process_data(self, data: bytes, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> Optional[bytes]:
|
||||
addr = writer.get_extra_info('peername')[0]
|
||||
cipher = AES.new(self.config.aimedb.key.encode(), AES.MODE_ECB)
|
||||
|
||||
try:
|
||||
@ -68,9 +91,9 @@ class AimedbProtocol(Protocol):
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to decrypt {data.hex()} because {e}")
|
||||
return None
|
||||
return
|
||||
|
||||
self.logger.debug(f"{self.transport.getPeer().host} wrote {decrypted.hex()}")
|
||||
self.logger.debug(f"{addr} wrote {decrypted.hex()}")
|
||||
|
||||
try:
|
||||
head = ADBHeader.from_data(decrypted)
|
||||
@ -79,7 +102,9 @@ class AimedbProtocol(Protocol):
|
||||
self.logger.error(f"Error parsing ADB header: {e}")
|
||||
try:
|
||||
encrypted = cipher.encrypt(ADBBaseResponse().make())
|
||||
self.transport.write(encrypted)
|
||||
writer.write(encrypted)
|
||||
await writer.drain()
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to encrypt default response because {e}")
|
||||
@ -89,46 +114,51 @@ class AimedbProtocol(Protocol):
|
||||
if head.keychip_id == "ABCD1234567" or head.store_id == 0xfff0:
|
||||
self.logger.warning(f"Request from uninitialized AMLib: {vars(head)}")
|
||||
|
||||
if head.cmd == 0x66:
|
||||
self.logger.info("Goodbye")
|
||||
writer.close()
|
||||
return
|
||||
|
||||
handler, resp_code, name = self.request_list.get(head.cmd, (self.handle_default, None, 'default'))
|
||||
|
||||
if resp_code is None:
|
||||
self.logger.warning(f"No handler for cmd {hex(head.cmd)}")
|
||||
|
||||
elif resp_code > 0:
|
||||
self.logger.info(f"{name} from {head.keychip_id} ({head.game_id}) @ {self.transport.getPeer().host}")
|
||||
self.logger.info(f"{name} from {head.keychip_id} ({head.game_id}) @ {addr}")
|
||||
|
||||
resp = handler(decrypted, resp_code)
|
||||
resp = await handler(decrypted, resp_code)
|
||||
|
||||
if type(resp) == ADBBaseResponse or issubclass(type(resp), ADBBaseResponse):
|
||||
resp_bytes = resp.make()
|
||||
if len(resp_bytes) != resp.head.length:
|
||||
resp_bytes = self.append_padding(resp_bytes)
|
||||
|
||||
elif type(resp) == bytes:
|
||||
resp_bytes = resp
|
||||
|
||||
elif resp is None: # Nothing to send, probably a goodbye
|
||||
self.logger.warn(f"None return by handler for {name}")
|
||||
return
|
||||
|
||||
else:
|
||||
self.logger.error(f"Unsupported type returned by ADB handler for {name}: {type(resp)}")
|
||||
raise TypeError(f"Unsupported type returned by ADB handler for {name}: {type(resp)}")
|
||||
|
||||
try:
|
||||
try:
|
||||
encrypted = cipher.encrypt(resp_bytes)
|
||||
self.logger.debug(f"Response {resp_bytes.hex()}")
|
||||
self.transport.write(encrypted)
|
||||
writer.write(encrypted)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to encrypt {resp_bytes.hex()} because {e}")
|
||||
|
||||
def handle_default(self, data: bytes, resp_code: int, length: int = 0x20) -> ADBBaseResponse:
|
||||
|
||||
async def handle_default(self, data: bytes, resp_code: int, length: int = 0x20) -> ADBBaseResponse:
|
||||
req = ADBHeader.from_data(data)
|
||||
return ADBBaseResponse(resp_code, length, 1, req.game_id, req.store_id, req.keychip_id, req.protocol_ver)
|
||||
|
||||
def handle_hello(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
return self.handle_default(data, resp_code)
|
||||
async def handle_hello(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
return await self.handle_default(data, resp_code)
|
||||
|
||||
def handle_campaign(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
async def handle_campaign(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
h = ADBHeader.from_data(data)
|
||||
if h.protocol_ver >= 0x3030:
|
||||
req = h
|
||||
@ -143,12 +173,12 @@ class AimedbProtocol(Protocol):
|
||||
# We don't currently support campaigns
|
||||
return resp
|
||||
|
||||
def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
async def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
req = ADBLookupRequest(data)
|
||||
user_id = self.data.card.get_user_id_from_card(req.access_code)
|
||||
is_banned = self.data.card.get_card_banned(req.access_code)
|
||||
is_locked = self.data.card.get_card_locked(req.access_code)
|
||||
|
||||
user_id = await self.data.card.get_user_id_from_card(req.access_code)
|
||||
is_banned = await self.data.card.get_card_banned(req.access_code)
|
||||
is_locked = await self.data.card.get_card_locked(req.access_code)
|
||||
|
||||
ret = ADBLookupResponse.from_req(req.head, user_id)
|
||||
if is_banned and is_locked:
|
||||
ret.head.status = ADBStatus.BAN_SYS_USER
|
||||
@ -162,12 +192,12 @@ class AimedbProtocol(Protocol):
|
||||
)
|
||||
return ret
|
||||
|
||||
def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
async def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
req = ADBLookupRequest(data)
|
||||
user_id = self.data.card.get_user_id_from_card(req.access_code)
|
||||
user_id = await self.data.card.get_user_id_from_card(req.access_code)
|
||||
|
||||
is_banned = self.data.card.get_card_banned(req.access_code)
|
||||
is_locked = self.data.card.get_card_locked(req.access_code)
|
||||
is_banned = await self.data.card.get_card_banned(req.access_code)
|
||||
is_locked = await self.data.card.get_card_locked(req.access_code)
|
||||
|
||||
ret = ADBLookupExResponse.from_req(req.head, user_id)
|
||||
if is_banned and is_locked:
|
||||
@ -191,7 +221,7 @@ class AimedbProtocol(Protocol):
|
||||
|
||||
return ret
|
||||
|
||||
def handle_felica_lookup(self, data: bytes, resp_code: int) -> bytes:
|
||||
async def handle_felica_lookup(self, data: bytes, resp_code: int) -> bytes:
|
||||
"""
|
||||
On official, I think a card has to be registered for this to actually work, but
|
||||
I'm making the executive decision to not implement that and just kick back our
|
||||
@ -207,7 +237,7 @@ class AimedbProtocol(Protocol):
|
||||
)
|
||||
return ADBFelicaLookupResponse.from_req(req.head, ac)
|
||||
|
||||
def handle_felica_register(self, data: bytes, resp_code: int) -> bytes:
|
||||
async def handle_felica_register(self, data: bytes, resp_code: int) -> bytes:
|
||||
"""
|
||||
I've never seen this used.
|
||||
"""
|
||||
@ -215,14 +245,14 @@ class AimedbProtocol(Protocol):
|
||||
ac = self.data.card.to_access_code(req.idm)
|
||||
|
||||
if self.config.server.allow_user_registration:
|
||||
user_id = self.data.user.create_user()
|
||||
user_id = await self.data.user.create_user()
|
||||
|
||||
if user_id is None:
|
||||
self.logger.error("Failed to register user!")
|
||||
user_id = -1
|
||||
|
||||
else:
|
||||
card_id = self.data.card.create_card(user_id, ac)
|
||||
card_id = await self.data.card.create_card(user_id, ac)
|
||||
|
||||
if card_id is None:
|
||||
self.logger.error("Failed to register card!")
|
||||
@ -239,10 +269,10 @@ class AimedbProtocol(Protocol):
|
||||
|
||||
return ADBFelicaLookupResponse.from_req(req.head, ac)
|
||||
|
||||
def handle_felica_lookup_ex(self, data: bytes, resp_code: int) -> bytes:
|
||||
async def handle_felica_lookup_ex(self, data: bytes, resp_code: int) -> bytes:
|
||||
req = ADBFelicaLookup2Request(data)
|
||||
access_code = self.data.card.to_access_code(req.idm)
|
||||
user_id = self.data.card.get_user_id_from_card(access_code=access_code)
|
||||
user_id = await self.data.card.get_user_id_from_card(access_code=access_code)
|
||||
|
||||
if user_id is None:
|
||||
user_id = -1
|
||||
@ -263,7 +293,7 @@ class AimedbProtocol(Protocol):
|
||||
|
||||
return resp
|
||||
|
||||
def handle_campaign_clear(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
async def handle_campaign_clear(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
req = ADBCampaignClearRequest(data)
|
||||
|
||||
resp = ADBCampaignClearResponse.from_req(req.head)
|
||||
@ -271,19 +301,19 @@ class AimedbProtocol(Protocol):
|
||||
# We don't support campaign stuff
|
||||
return resp
|
||||
|
||||
def handle_register(self, data: bytes, resp_code: int) -> bytes:
|
||||
async def handle_register(self, data: bytes, resp_code: int) -> bytes:
|
||||
req = ADBLookupRequest(data)
|
||||
user_id = -1
|
||||
|
||||
if self.config.server.allow_user_registration:
|
||||
user_id = self.data.user.create_user()
|
||||
user_id = await self.data.user.create_user()
|
||||
|
||||
if user_id is None:
|
||||
self.logger.error("Failed to register user!")
|
||||
user_id = -1
|
||||
|
||||
else:
|
||||
card_id = self.data.card.create_card(user_id, req.access_code)
|
||||
card_id = await self.data.card.create_card(user_id, req.access_code)
|
||||
|
||||
if card_id is None:
|
||||
self.logger.error("Failed to register card!")
|
||||
@ -305,17 +335,17 @@ class AimedbProtocol(Protocol):
|
||||
return resp
|
||||
|
||||
# TODO: Save these in some capacity, as deemed relevant
|
||||
def handle_status_log(self, data: bytes, resp_code: int) -> bytes:
|
||||
async def handle_status_log(self, data: bytes, resp_code: int) -> bytes:
|
||||
req = ADBStatusLogRequest(data)
|
||||
self.logger.info(f"User {req.aime_id} logged {req.status.name} event")
|
||||
return ADBBaseResponse(resp_code, 0x20, 1, req.head.game_id, req.head.store_id, req.head.keychip_id, req.head.protocol_ver)
|
||||
|
||||
def handle_log(self, data: bytes, resp_code: int) -> bytes:
|
||||
async def handle_log(self, data: bytes, resp_code: int) -> bytes:
|
||||
req = ADBLogRequest(data)
|
||||
self.logger.info(f"User {req.aime_id} logged {req.status.name} event, credit_ct: {req.credit_ct} bet_ct: {req.bet_ct} won_ct: {req.won_ct}")
|
||||
return ADBBaseResponse(resp_code, 0x20, 1, req.head.game_id, req.head.store_id, req.head.keychip_id, req.head.protocol_ver)
|
||||
|
||||
def handle_log_ex(self, data: bytes, resp_code: int) -> bytes:
|
||||
async def handle_log_ex(self, data: bytes, resp_code: int) -> bytes:
|
||||
req = ADBLogExRequest(data)
|
||||
strs = []
|
||||
self.logger.info(f"Recieved {req.num_logs} or {len(req.logs)} logs")
|
||||
@ -324,43 +354,3 @@ class AimedbProtocol(Protocol):
|
||||
self.logger.debug(f"User {req.logs[x].aime_id} logged {req.logs[x].status.name} event, credit_ct: {req.logs[x].credit_ct} bet_ct: {req.logs[x].bet_ct} won_ct: {req.logs[x].won_ct}")
|
||||
return ADBLogExResponse.from_req(req.head)
|
||||
|
||||
def handle_goodbye(self, data: bytes, resp_code: int) -> None:
|
||||
self.logger.info(f"goodbye from {self.transport.getPeer().host}")
|
||||
self.transport.loseConnection()
|
||||
return
|
||||
|
||||
class AimedbFactory(Factory):
|
||||
protocol = AimedbProtocol
|
||||
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
self.config = cfg
|
||||
log_fmt_str = "[%(asctime)s] Aimedb | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
self.logger = logging.getLogger("aimedb")
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "aimedb"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(self.config.aimedb.loglevel)
|
||||
coloredlogs.install(
|
||||
level=cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
|
||||
if self.config.aimedb.key == "":
|
||||
self.logger.error("Please set 'key' field in your config file.")
|
||||
exit(1)
|
||||
|
||||
self.logger.info(f"Ready on port {self.config.aimedb.port}")
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
return AimedbProtocol(self.config)
|
||||
|
443
core/allnet.py
443
core/allnet.py
@ -1,20 +1,24 @@
|
||||
from typing import Dict, List, Any, Optional, Tuple, Union, Final
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web.http import Request
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
import base64
|
||||
import zlib
|
||||
import json
|
||||
import yaml
|
||||
import logging
|
||||
import coloredlogs
|
||||
import urllib.parse
|
||||
import math
|
||||
from typing import Dict, List, Any, Optional, Union, Final
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import PlainTextResponse
|
||||
from starlette.applications import Starlette
|
||||
from starlette.routing import Route
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Hash import SHA
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
from time import strptime
|
||||
from os import path
|
||||
import urllib.parse
|
||||
import math
|
||||
from os import path, environ, mkdir, access, W_OK
|
||||
|
||||
from .config import CoreConfig
|
||||
from .utils import Utils
|
||||
@ -91,7 +95,6 @@ class DLI_STATUS(Enum):
|
||||
|
||||
class AllnetServlet:
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str):
|
||||
super().__init__()
|
||||
self.config = core_cfg
|
||||
self.config_folder = cfg_folder
|
||||
self.data = Data(core_cfg)
|
||||
@ -120,25 +123,21 @@ class AllnetServlet:
|
||||
)
|
||||
self.logger.initialized = True
|
||||
|
||||
plugins = Utils.get_all_titles()
|
||||
|
||||
if len(plugins) == 0:
|
||||
self.logger.error("No games detected!")
|
||||
|
||||
self.logger.info(
|
||||
f"Serving {len(TitleServlet.title_registry)} game codes port {core_cfg.allnet.port}"
|
||||
f"Ready on port {self.config.allnet.port if self.config.allnet.standalone else self.config.server.port}"
|
||||
)
|
||||
|
||||
def handle_poweron(self, request: Request, _: Dict):
|
||||
async def handle_poweron(self, request: Request):
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
pragma_header = request.getHeader('Pragma')
|
||||
pragma_header = request.headers.get('Pragma', "")
|
||||
is_dfi = pragma_header is not None and pragma_header == "DFI"
|
||||
data = await request.body()
|
||||
|
||||
try:
|
||||
if is_dfi:
|
||||
req_urlencode = self.from_dfi(request.content.getvalue())
|
||||
req_urlencode = self.from_dfi(data)
|
||||
else:
|
||||
req_urlencode = request.content.getvalue().decode()
|
||||
req_urlencode = data
|
||||
|
||||
req_dict = self.allnet_req_to_dict(req_urlencode)
|
||||
if req_dict is None:
|
||||
@ -155,7 +154,7 @@ class AllnetServlet:
|
||||
except AllnetRequestException as e:
|
||||
if e.message != "":
|
||||
self.logger.error(e)
|
||||
return b""
|
||||
return PlainTextResponse()
|
||||
|
||||
if req.format_ver == 3:
|
||||
resp = AllnetPowerOnResponse3(req.token)
|
||||
@ -166,42 +165,42 @@ class AllnetServlet:
|
||||
|
||||
self.logger.debug(f"Allnet request: {vars(req)}")
|
||||
|
||||
machine = self.data.arcade.get_machine(req.serial)
|
||||
machine = await self.data.arcade.get_machine(req.serial)
|
||||
if machine is None and not self.config.server.allow_unregistered_serials:
|
||||
msg = f"Unrecognised serial {req.serial} attempted allnet auth from {request_ip}."
|
||||
self.data.base.log_event(
|
||||
await self.data.base.log_event(
|
||||
"allnet", "ALLNET_AUTH_UNKNOWN_SERIAL", logging.WARN, msg
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
resp.stat = ALLNET_STAT.bad_machine.value
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
|
||||
|
||||
if machine is not None:
|
||||
arcade = self.data.arcade.get_arcade(machine["arcade"])
|
||||
arcade = await self.data.arcade.get_arcade(machine["arcade"])
|
||||
if self.config.server.check_arcade_ip:
|
||||
if arcade["ip"] and arcade["ip"] is not None and arcade["ip"] != req.ip:
|
||||
msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip} (expected {arcade['ip']})."
|
||||
self.data.base.log_event(
|
||||
await self.data.base.log_event(
|
||||
"allnet", "ALLNET_AUTH_BAD_IP", logging.ERROR, msg
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
resp.stat = ALLNET_STAT.bad_shop.value
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
|
||||
|
||||
elif (not arcade["ip"] or arcade["ip"] is None) and self.config.server.strict_ip_checking:
|
||||
msg = f"Serial {req.serial} attempted allnet auth from bad IP {req.ip}, but arcade {arcade['id']} has no IP set! (strict checking enabled)."
|
||||
self.data.base.log_event(
|
||||
await self.data.base.log_event(
|
||||
"allnet", "ALLNET_AUTH_NO_SHOP_IP", logging.ERROR, msg
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
resp.stat = ALLNET_STAT.bad_shop.value
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
|
||||
|
||||
|
||||
country = (
|
||||
@ -238,34 +237,34 @@ class AllnetServlet:
|
||||
if req.game_id not in TitleServlet.title_registry:
|
||||
if not self.config.server.is_develop:
|
||||
msg = f"Unrecognised game {req.game_id} attempted allnet auth from {request_ip}."
|
||||
self.data.base.log_event(
|
||||
await self.data.base.log_event(
|
||||
"allnet", "ALLNET_AUTH_UNKNOWN_GAME", logging.WARN, msg
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
resp.stat = ALLNET_STAT.bad_game.value
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return (urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n").encode("utf-8")
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
f"Allowed unknown game {req.game_id} v{req.ver} to authenticate from {request_ip} due to 'is_develop' being enabled. S/N: {req.serial}"
|
||||
)
|
||||
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/{req.game_id}/{req.ver.replace('.', '')}/"
|
||||
resp.host = f"{self.config.title.hostname}:{self.config.title.port}"
|
||||
resp.uri = f"http://{self.config.server.hostname}:{self.config.server.port}/{req.game_id}/{req.ver.replace('.', '')}/"
|
||||
resp.host = f"{self.config.server.hostname}:{self.config.server.port}"
|
||||
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict))
|
||||
|
||||
self.logger.debug(f"Allnet response: {resp_str}")
|
||||
return (resp_str + "\n").encode("utf-8")
|
||||
return PlainTextResponse(resp_str + "\n")
|
||||
|
||||
|
||||
int_ver = req.ver.replace(".", "")
|
||||
resp.uri, resp.host = TitleServlet.title_registry[req.game_id].get_allnet_info(req.game_id, int(int_ver), req.serial)
|
||||
|
||||
msg = f"{req.serial} authenticated from {request_ip}: {req.game_id} v{req.ver}"
|
||||
self.data.base.log_event("allnet", "ALLNET_AUTH_SUCCESS", logging.INFO, msg)
|
||||
await self.data.base.log_event("allnet", "ALLNET_AUTH_SUCCESS", logging.INFO, msg)
|
||||
self.logger.info(msg)
|
||||
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
@ -277,18 +276,19 @@ class AllnetServlet:
|
||||
request.responseHeaders.addRawHeader('Pragma', 'DFI')
|
||||
return self.to_dfi(resp_str)"""
|
||||
|
||||
return resp_str.encode("utf-8")
|
||||
return PlainTextResponse(resp_str)
|
||||
|
||||
def handle_dlorder(self, request: Request, _: Dict):
|
||||
async def handle_dlorder(self, request: Request):
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
pragma_header = request.getHeader('Pragma')
|
||||
pragma_header = request.headers.get('Pragma', "")
|
||||
is_dfi = pragma_header is not None and pragma_header == "DFI"
|
||||
data = await request.body()
|
||||
|
||||
try:
|
||||
if is_dfi:
|
||||
req_urlencode = self.from_dfi(request.content.getvalue())
|
||||
req_urlencode = self.from_dfi(data)
|
||||
else:
|
||||
req_urlencode = request.content.getvalue().decode()
|
||||
req_urlencode = data.decode()
|
||||
|
||||
req_dict = self.allnet_req_to_dict(req_urlencode)
|
||||
if req_dict is None:
|
||||
@ -305,7 +305,7 @@ class AllnetServlet:
|
||||
except AllnetRequestException as e:
|
||||
if e.message != "":
|
||||
self.logger.error(e)
|
||||
return b""
|
||||
return PlainTextResponse()
|
||||
|
||||
self.logger.info(
|
||||
f"DownloadOrder from {request_ip} -> {req.game_id} v{req.ver} serial {req.serial}"
|
||||
@ -316,54 +316,54 @@ class AllnetServlet:
|
||||
not self.config.allnet.allow_online_updates
|
||||
or not self.config.allnet.update_cfg_folder
|
||||
):
|
||||
return urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n")
|
||||
|
||||
else: # TODO: Keychip check
|
||||
if path.exists(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
|
||||
):
|
||||
resp.uri = f"http://{self.config.title.hostname}:{self.config.title.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
|
||||
resp.uri = f"http://{self.config.server.hostname}:{self.config.server.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
|
||||
|
||||
if path.exists(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
|
||||
):
|
||||
resp.uri += f"|http://{self.config.title.hostname}:{self.config.title.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
|
||||
resp.uri += f"|http://{self.config.server.hostname}:{self.config.server.port}/dl/ini/{req.game_id}-{req.ver.replace('.', '')}-opt.ini"
|
||||
|
||||
self.logger.debug(f"Sending download uri {resp.uri}")
|
||||
self.data.base.log_event("allnet", "DLORDER_REQ_SUCCESS", logging.INFO, f"{Utils.get_ip_addr(request)} requested DL Order for {req.serial} {req.game_id} v{req.ver}")
|
||||
await self.data.base.log_event("allnet", "DLORDER_REQ_SUCCESS", logging.INFO, f"{Utils.get_ip_addr(request)} requested DL Order for {req.serial} {req.game_id} v{req.ver}")
|
||||
|
||||
res_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
|
||||
"""if is_dfi:
|
||||
request.responseHeaders.addRawHeader('Pragma', 'DFI')
|
||||
return self.to_dfi(res_str)"""
|
||||
|
||||
return res_str
|
||||
return PlainTextResponse(res_str)
|
||||
|
||||
def handle_dlorder_ini(self, request: Request, match: Dict) -> bytes:
|
||||
if "file" not in match:
|
||||
return b""
|
||||
async def handle_dlorder_ini(self, request: Request) -> bytes:
|
||||
req_file = request.path_params.get("file", "").replace("%0A", "").replace("\n", "")
|
||||
|
||||
req_file = match["file"].replace("%0A", "")
|
||||
if not req_file:
|
||||
return PlainTextResponse(status_code=404)
|
||||
|
||||
if path.exists(f"{self.config.allnet.update_cfg_folder}/{req_file}"):
|
||||
self.logger.info(f"Request for DL INI file {req_file} from {Utils.get_ip_addr(request)} successful")
|
||||
self.data.base.log_event("allnet", "DLORDER_INI_SENT", logging.INFO, f"{Utils.get_ip_addr(request)} successfully recieved {req_file}")
|
||||
await self.data.base.log_event("allnet", "DLORDER_INI_SENT", logging.INFO, f"{Utils.get_ip_addr(request)} successfully recieved {req_file}")
|
||||
|
||||
return open(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req_file}", "rb"
|
||||
).read()
|
||||
return PlainTextResponse(open(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req_file}", "r", encoding="utf-8"
|
||||
).read())
|
||||
|
||||
self.logger.info(f"DL INI File {req_file} not found")
|
||||
return b""
|
||||
return PlainTextResponse()
|
||||
|
||||
def handle_dlorder_report(self, request: Request, match: Dict) -> bytes:
|
||||
req_raw = request.content.getvalue()
|
||||
async def handle_dlorder_report(self, request: Request) -> bytes:
|
||||
req_raw = await request.body()
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
try:
|
||||
req_dict: Dict = json.loads(req_raw)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to parse DL Report: {e}")
|
||||
return "NG"
|
||||
return PlainTextResponse("NG")
|
||||
|
||||
dl_data_type = DLIMG_TYPE.app
|
||||
dl_data = req_dict.get("appimage", {})
|
||||
@ -374,24 +374,24 @@ class AllnetServlet:
|
||||
|
||||
if dl_data is None or not dl_data:
|
||||
self.logger.warning(f"Failed to parse DL Report: Invalid format - contains neither appimage nor optimage")
|
||||
return "NG"
|
||||
return PlainTextResponse("NG")
|
||||
|
||||
rep = DLReport(dl_data, dl_data_type)
|
||||
|
||||
if not rep.validate():
|
||||
self.logger.warning(f"Failed to parse DL Report: Invalid format - {rep.err}")
|
||||
return "NG"
|
||||
return PlainTextResponse("NG")
|
||||
|
||||
msg = f"{rep.serial} @ {client_ip} reported {rep.rep_type.name} download state {rep.rf_state.name} for {rep.gd} v{rep.dav}:"\
|
||||
f" {rep.tdsc}/{rep.tsc} segments downloaded for working files {rep.wfl} with {rep.dfl if rep.dfl else 'none'} complete."
|
||||
|
||||
self.data.base.log_event("allnet", "DL_REPORT", logging.INFO, msg, dl_data)
|
||||
await self.data.base.log_event("allnet", "DL_REPORT", logging.INFO, msg, dl_data)
|
||||
self.logger.info(msg)
|
||||
|
||||
return "OK"
|
||||
return PlainTextResponse("OK")
|
||||
|
||||
def handle_loaderstaterecorder(self, request: Request, match: Dict) -> bytes:
|
||||
req_data = request.content.getvalue()
|
||||
async def handle_loaderstaterecorder(self, request: Request) -> bytes:
|
||||
req_data = await request.body()
|
||||
sections = req_data.decode("utf-8").split("\r\n")
|
||||
|
||||
req_dict = dict(urllib.parse.parse_qsl(sections[0]))
|
||||
@ -403,130 +403,17 @@ class AllnetServlet:
|
||||
ip = Utils.get_ip_addr(request)
|
||||
|
||||
if serial is None or num_files_dld is None or num_files_to_dl is None or dl_state is None:
|
||||
return "NG".encode()
|
||||
return PlainTextResponse("NG")
|
||||
|
||||
self.logger.info(f"LoaderStateRecorder Request from {ip} {serial}: {num_files_dld}/{num_files_to_dl} Files download (State: {dl_state})")
|
||||
return "OK".encode()
|
||||
return PlainTextResponse("OK")
|
||||
|
||||
def handle_alive(self, request: Request, match: Dict) -> bytes:
|
||||
return "OK".encode()
|
||||
async def handle_alive(self, request: Request) -> bytes:
|
||||
return PlainTextResponse("OK")
|
||||
|
||||
def handle_billing_request(self, request: Request, _: Dict):
|
||||
req_raw = request.content.getvalue()
|
||||
|
||||
if request.getHeader('Content-Type') == "application/octet-stream":
|
||||
req_unzip = zlib.decompressobj(-zlib.MAX_WBITS).decompress(req_raw)
|
||||
else:
|
||||
req_unzip = req_raw
|
||||
|
||||
req_dict = self.billing_req_to_dict(req_unzip)
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if req_dict is None:
|
||||
self.logger.error(f"Failed to parse request {request.content.getvalue()}")
|
||||
return b""
|
||||
|
||||
self.logger.debug(f"request {req_dict}")
|
||||
|
||||
rsa = RSA.import_key(open(self.config.billing.signing_key, "rb").read())
|
||||
signer = PKCS1_v1_5.new(rsa)
|
||||
digest = SHA.new()
|
||||
traces: List[TraceData] = []
|
||||
try:
|
||||
req = BillingInfo(req_dict[0])
|
||||
except KeyError as e:
|
||||
self.logger.error(f"Billing request failed to parse: {e}")
|
||||
return f"result=5&linelimit=&message=field is missing or formatting is incorrect\r\n".encode()
|
||||
|
||||
for x in range(1, len(req_dict)):
|
||||
if not req_dict[x]:
|
||||
continue
|
||||
|
||||
try:
|
||||
tmp = TraceData(req_dict[x])
|
||||
if tmp.trace_type == TraceDataType.CHARGE:
|
||||
tmp = TraceDataCharge(req_dict[x])
|
||||
elif tmp.trace_type == TraceDataType.EVENT:
|
||||
tmp = TraceDataEvent(req_dict[x])
|
||||
elif tmp.trace_type == TraceDataType.CREDIT:
|
||||
tmp = TraceDataCredit(req_dict[x])
|
||||
|
||||
traces.append(tmp)
|
||||
|
||||
except KeyError as e:
|
||||
self.logger.warn(f"Tracelog failed to parse: {e}")
|
||||
|
||||
kc_serial_bytes = req.keychipid.encode()
|
||||
|
||||
|
||||
machine = self.data.arcade.get_machine(req.keychipid)
|
||||
if machine is None and not self.config.server.allow_unregistered_serials:
|
||||
msg = f"Unrecognised serial {req.keychipid} attempted billing checkin from {request_ip} for {req.gameid} v{req.gamever}."
|
||||
self.data.base.log_event(
|
||||
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
return f"result=1&requestno={req.requestno}&message=Keychip Serial bad\r\n".encode()
|
||||
|
||||
msg = (
|
||||
f"Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
|
||||
f"{req.playcnt} billing_type {req.billingtype.name} nearfull {req.nearfull} playlimit {req.playlimit}"
|
||||
)
|
||||
self.logger.info(msg)
|
||||
self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, msg)
|
||||
if req.traceleft > 0:
|
||||
self.logger.warn(f"{req.traceleft} unsent tracelogs")
|
||||
kc_playlimit = req.playlimit
|
||||
kc_nearfull = req.nearfull
|
||||
|
||||
while req.playcnt > req.playlimit:
|
||||
kc_playlimit += 1024
|
||||
kc_nearfull += 1024
|
||||
|
||||
playlimit = kc_playlimit
|
||||
nearfull = kc_nearfull + (req.billingtype.value * 0x00010000)
|
||||
|
||||
digest.update(playlimit.to_bytes(4, "little") + kc_serial_bytes)
|
||||
playlimit_sig = signer.sign(digest).hex()
|
||||
|
||||
digest = SHA.new()
|
||||
digest.update(nearfull.to_bytes(4, "little") + kc_serial_bytes)
|
||||
nearfull_sig = signer.sign(digest).hex()
|
||||
|
||||
# TODO: playhistory
|
||||
|
||||
#resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig)
|
||||
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig, req.requestno, req.protocolver)
|
||||
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\r\n"
|
||||
|
||||
self.logger.debug(f"response {vars(resp)}")
|
||||
if req.traceleft > 0:
|
||||
self.logger.info(f"Requesting 20 more of {req.traceleft} unsent tracelogs")
|
||||
return f"result=6&waittime=0&linelimit=20\r\n".encode()
|
||||
|
||||
return resp_str.encode("utf-8")
|
||||
|
||||
def handle_naomitest(self, request: Request, _: Dict) -> bytes:
|
||||
async def handle_naomitest(self, request: Request) -> bytes:
|
||||
self.logger.info(f"Ping from {Utils.get_ip_addr(request)}")
|
||||
return b"naomi ok"
|
||||
|
||||
def billing_req_to_dict(self, data: bytes):
|
||||
"""
|
||||
Parses an billing request string into a python dictionary
|
||||
"""
|
||||
try:
|
||||
sections = data.decode("ascii").split("\r\n")
|
||||
|
||||
ret = []
|
||||
for x in sections:
|
||||
ret.append(dict(urllib.parse.parse_qsl(x)))
|
||||
return ret
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"billing_req_to_dict: {e} while parsing {data}")
|
||||
return None
|
||||
return PlainTextResponse("naomi ok")
|
||||
|
||||
def allnet_req_to_dict(self, data: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""
|
||||
@ -554,6 +441,147 @@ class AllnetServlet:
|
||||
zipped = zlib.compress(unzipped)
|
||||
return base64.b64encode(zipped)
|
||||
|
||||
class BillingServlet:
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str) -> None:
|
||||
self.config = core_cfg
|
||||
self.config_folder = cfg_folder
|
||||
self.data = Data(core_cfg)
|
||||
|
||||
self.logger = logging.getLogger("billing")
|
||||
if not hasattr(self.logger, "initialized"):
|
||||
log_fmt_str = "[%(asctime)s] Billing | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "billing"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(core_cfg.allnet.loglevel)
|
||||
coloredlogs.install(
|
||||
level=core_cfg.billing.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
self.logger.initialized = True
|
||||
|
||||
def billing_req_to_dict(self, data: bytes):
|
||||
"""
|
||||
Parses an billing request string into a python dictionary
|
||||
"""
|
||||
try:
|
||||
sections = data.decode("ascii").split("\r\n")
|
||||
|
||||
ret = []
|
||||
for x in sections:
|
||||
ret.append(dict(urllib.parse.parse_qsl(x)))
|
||||
return ret
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"billing_req_to_dict: {e} while parsing {data}")
|
||||
return None
|
||||
|
||||
async def handle_billing_request(self, request: Request):
|
||||
req_raw = await request.body()
|
||||
|
||||
if request.headers.get('Content-Type', '') == "application/octet-stream":
|
||||
req_unzip = zlib.decompressobj(-zlib.MAX_WBITS).decompress(req_raw)
|
||||
else:
|
||||
req_unzip = req_raw
|
||||
|
||||
req_dict = self.billing_req_to_dict(req_unzip)
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if req_dict is None:
|
||||
self.logger.error(f"Failed to parse request {req_raw}")
|
||||
return PlainTextResponse()
|
||||
|
||||
self.logger.debug(f"request {req_dict}")
|
||||
|
||||
rsa = RSA.import_key(open(self.config.billing.signing_key, "rb").read())
|
||||
signer = PKCS1_v1_5.new(rsa)
|
||||
digest = SHA.new()
|
||||
traces: List[TraceData] = []
|
||||
try:
|
||||
req = BillingInfo(req_dict[0])
|
||||
except KeyError as e:
|
||||
self.logger.error(f"Billing request failed to parse: {e}")
|
||||
return PlainTextResponse("result=5&linelimit=&message=field is missing or formatting is incorrect\r\n")
|
||||
|
||||
for x in range(1, len(req_dict)):
|
||||
if not req_dict[x]:
|
||||
continue
|
||||
|
||||
try:
|
||||
tmp = TraceData(req_dict[x])
|
||||
if tmp.trace_type == TraceDataType.CHARGE:
|
||||
tmp = TraceDataCharge(req_dict[x])
|
||||
elif tmp.trace_type == TraceDataType.EVENT:
|
||||
tmp = TraceDataEvent(req_dict[x])
|
||||
elif tmp.trace_type == TraceDataType.CREDIT:
|
||||
tmp = TraceDataCredit(req_dict[x])
|
||||
|
||||
traces.append(tmp)
|
||||
|
||||
except KeyError as e:
|
||||
self.logger.warn(f"Tracelog failed to parse: {e}")
|
||||
|
||||
kc_serial_bytes = req.keychipid.encode()
|
||||
|
||||
|
||||
machine = await self.data.arcade.get_machine(req.keychipid)
|
||||
if machine is None and not self.config.server.allow_unregistered_serials:
|
||||
msg = f"Unrecognised serial {req.keychipid} attempted billing checkin from {request_ip} for {req.gameid} v{req.gamever}."
|
||||
await self.data.base.log_event(
|
||||
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
return PlainTextResponse(f"result=1&requestno={req.requestno}&message=Keychip Serial bad\r\n")
|
||||
|
||||
msg = (
|
||||
f"Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
|
||||
f"{req.playcnt} billing_type {req.billingtype.name} nearfull {req.nearfull} playlimit {req.playlimit}"
|
||||
)
|
||||
self.logger.info(msg)
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, msg)
|
||||
if req.traceleft > 0:
|
||||
self.logger.warn(f"{req.traceleft} unsent tracelogs")
|
||||
kc_playlimit = req.playlimit
|
||||
kc_nearfull = req.nearfull
|
||||
|
||||
while req.playcnt > req.playlimit:
|
||||
kc_playlimit += 1024
|
||||
kc_nearfull += 1024
|
||||
|
||||
playlimit = kc_playlimit
|
||||
nearfull = kc_nearfull + (req.billingtype.value * 0x00010000)
|
||||
|
||||
digest.update(playlimit.to_bytes(4, "little") + kc_serial_bytes)
|
||||
playlimit_sig = signer.sign(digest).hex()
|
||||
|
||||
digest = SHA.new()
|
||||
digest.update(nearfull.to_bytes(4, "little") + kc_serial_bytes)
|
||||
nearfull_sig = signer.sign(digest).hex()
|
||||
|
||||
# TODO: playhistory
|
||||
|
||||
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig, req.requestno, req.protocolver)
|
||||
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\r\n"
|
||||
|
||||
self.logger.debug(f"response {vars(resp)}")
|
||||
if req.traceleft > 0:
|
||||
self.logger.info(f"Requesting 20 more of {req.traceleft} unsent tracelogs")
|
||||
return PlainTextResponse("result=6&waittime=0&linelimit=20\r\n")
|
||||
|
||||
return PlainTextResponse(resp_str)
|
||||
|
||||
class AllnetPowerOnRequest:
|
||||
def __init__(self, req: Dict) -> None:
|
||||
@ -613,7 +641,6 @@ class AllnetPowerOnResponse3(AllnetPowerOnResponse):
|
||||
self.minute = None
|
||||
self.second = None
|
||||
|
||||
|
||||
class AllnetPowerOnResponse2(AllnetPowerOnResponse):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
@ -623,7 +650,6 @@ class AllnetPowerOnResponse2(AllnetPowerOnResponse):
|
||||
self.timezone = "+09:00"
|
||||
self.res_class = "PowerOnResponseV2"
|
||||
|
||||
|
||||
class AllnetDownloadOrderRequest:
|
||||
def __init__(self, req: Dict) -> None:
|
||||
self.game_id = req.get("game_id", "")
|
||||
@ -631,7 +657,6 @@ class AllnetDownloadOrderRequest:
|
||||
self.serial = req.get("serial", "")
|
||||
self.encode = req.get("encode", "")
|
||||
|
||||
|
||||
class AllnetDownloadOrderResponse:
|
||||
def __init__(self, stat: int = 1, serial: str = "", uri: str = "") -> None:
|
||||
self.stat = stat
|
||||
@ -669,7 +694,7 @@ class BillingInfo:
|
||||
self.boardid = str(data.get("boardid", None))
|
||||
self.tenpoip = str(data.get("tenpoip", None))
|
||||
self.libalibver = float(data.get("libalibver", None))
|
||||
self.datamax = int(data.get("datamax", None))
|
||||
self.data.max = int(data.get("datamax", None))
|
||||
self.billingtype = BillingType(int(data.get("billingtype", None)))
|
||||
self.protocolver = float(data.get("protocolver", None))
|
||||
self.operatingfix = bool(data.get("operatingfix", None))
|
||||
@ -781,7 +806,6 @@ class BillingResponse:
|
||||
# playhistory -> YYYYMM/C:...
|
||||
# YYYY -> 4 digit year, MM -> 2 digit month, C -> Playcount during that period
|
||||
|
||||
|
||||
class AllnetRequestException(Exception):
|
||||
def __init__(self, message="") -> None:
|
||||
self.message = message
|
||||
@ -849,3 +873,46 @@ class DLReport:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
cfg_dir = environ.get("DIANA_CFG_DIR", "config")
|
||||
cfg: CoreConfig = CoreConfig()
|
||||
if path.exists(f"{cfg_dir}/core.yaml"):
|
||||
cfg.update(yaml.safe_load(open(f"{cfg_dir}/core.yaml")))
|
||||
|
||||
if not path.exists(cfg.server.log_dir):
|
||||
mkdir(cfg.server.log_dir)
|
||||
|
||||
if not access(cfg.server.log_dir, W_OK):
|
||||
print(
|
||||
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
billing = BillingServlet(cfg, cfg_dir)
|
||||
app_billing = Starlette(
|
||||
cfg.server.is_develop,
|
||||
[
|
||||
Route("/request", billing.handle_billing_request, methods=["POST"]),
|
||||
Route("/request/", billing.handle_billing_request, methods=["POST"]),
|
||||
]
|
||||
)
|
||||
|
||||
allnet = AllnetServlet(cfg, cfg_dir)
|
||||
route_lst = [
|
||||
Route("/sys/servlet/PowerOn", allnet.handle_poweron, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/DownloadOrder", allnet.handle_dlorder, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/LoaderStateRecorder", allnet.handle_loaderstaterecorder, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/Alive", allnet.handle_alive, methods=["GET", "POST"]),
|
||||
Route("/naomitest.html", allnet.handle_naomitest),
|
||||
]
|
||||
|
||||
if cfg.allnet.allow_online_updates:
|
||||
route_lst += [
|
||||
Route("/report-api/Report", allnet.handle_dlorder_report, methods=["POST"]),
|
||||
Route("/dl/ini/{file:str}", allnet.handle_dlorder_ini),
|
||||
]
|
||||
|
||||
app_allnet = Starlette(
|
||||
cfg.server.is_develop,
|
||||
route_lst
|
||||
)
|
||||
|
94
core/app.py
Normal file
94
core/app.py
Normal file
@ -0,0 +1,94 @@
|
||||
import yaml
|
||||
import logging
|
||||
import coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from starlette.routing import Route
|
||||
from starlette.requests import Request
|
||||
from starlette.applications import Starlette
|
||||
from starlette.responses import PlainTextResponse
|
||||
from os import environ, path, mkdir, W_OK, access
|
||||
from typing import List
|
||||
|
||||
from core import CoreConfig, TitleServlet, MuchaServlet, AllnetServlet, BillingServlet, AimedbServlette
|
||||
from core.frontend import FrontendServlet
|
||||
|
||||
async def dummy_rt(request: Request):
|
||||
return PlainTextResponse("Service OK")
|
||||
|
||||
cfg_dir = environ.get("ARTEMIS_CFG_DIR", "config")
|
||||
cfg: CoreConfig = CoreConfig()
|
||||
if path.exists(f"{cfg_dir}/core.yaml"):
|
||||
cfg.update(yaml.safe_load(open(f"{cfg_dir}/core.yaml")))
|
||||
|
||||
if not path.exists(cfg.server.log_dir):
|
||||
mkdir(cfg.server.log_dir)
|
||||
|
||||
if not access(cfg.server.log_dir, W_OK):
|
||||
print(
|
||||
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
logger = logging.getLogger("core")
|
||||
log_fmt_str = "[%(asctime)s] Core | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(cfg.server.log_dir, "core"), when="d", backupCount=10
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
logger.addHandler(fileHandler)
|
||||
logger.addHandler(consoleHandler)
|
||||
|
||||
log_lv = logging.DEBUG if cfg.server.is_develop else logging.INFO
|
||||
logger.setLevel(log_lv)
|
||||
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
|
||||
|
||||
logger.info(f"Artemis starting in {'develop' if cfg.server.is_develop else 'production'} mode")
|
||||
|
||||
title = TitleServlet(cfg, cfg_dir) # This has to be loaded first to load plugins
|
||||
mucha = MuchaServlet(cfg, cfg_dir)
|
||||
|
||||
route_lst: List[Route] = [
|
||||
# Allnet
|
||||
|
||||
# Mucha
|
||||
Route("/mucha_front/boardauth.do", mucha.handle_boardauth, methods=["POST"]),
|
||||
Route("/mucha_front/updatacheck.do", mucha.handle_updatecheck, methods=["POST"]),
|
||||
Route("/mucha_front/downloadstate.do", mucha.handle_dlstate, methods=["POST"]),
|
||||
# General
|
||||
Route("/", dummy_rt),
|
||||
Route("/robots.txt", FrontendServlet.robots)
|
||||
]
|
||||
|
||||
if not cfg.billing.standalone:
|
||||
billing = BillingServlet(cfg, cfg_dir)
|
||||
route_lst += [
|
||||
Route("/request", billing.handle_billing_request, methods=["POST"]),
|
||||
Route("/request/", billing.handle_billing_request, methods=["POST"]),
|
||||
]
|
||||
|
||||
if not cfg.allnet.standalone:
|
||||
allnet = AllnetServlet(cfg, cfg_dir)
|
||||
route_lst += [
|
||||
Route("/sys/servlet/PowerOn", allnet.handle_poweron, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/DownloadOrder", allnet.handle_dlorder, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/LoaderStateRecorder", allnet.handle_loaderstaterecorder, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/Alive", allnet.handle_alive, methods=["GET", "POST"]),
|
||||
Route("/naomitest.html", allnet.handle_naomitest),
|
||||
]
|
||||
|
||||
if cfg.allnet.allow_online_updates:
|
||||
route_lst += [
|
||||
Route("/report-api/Report", allnet.handle_dlorder_report, methods=["POST"]),
|
||||
Route("/dl/ini/{file:str}", allnet.handle_dlorder_ini),
|
||||
]
|
||||
|
||||
for code, game in title.title_registry.items():
|
||||
route_lst += game.get_routes()
|
||||
|
||||
app = Starlette(cfg.server.is_develop, route_lst)
|
173
core/config.py
173
core/config.py
@ -1,16 +1,48 @@
|
||||
import logging, os
|
||||
from typing import Any
|
||||
|
||||
|
||||
class ServerConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def listen_address(self) -> str:
|
||||
"""
|
||||
Address Artemis will bind to and listen on
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "listen_address", default="127.0.0.1"
|
||||
)
|
||||
|
||||
@property
|
||||
def hostname(self) -> str:
|
||||
"""
|
||||
Hostname sent to games
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "hostname", default="localhost"
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
"""
|
||||
Port the game will listen on
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "port", default=80
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_key(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "ssl_key", default="cert/title.key"
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_cert(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "ssl_cert", default="cert/title.pem"
|
||||
)
|
||||
|
||||
@property
|
||||
def allow_user_registration(self) -> bool:
|
||||
@ -43,9 +75,23 @@ class ServerConfig:
|
||||
)
|
||||
|
||||
@property
|
||||
def threading(self) -> bool:
|
||||
def proxy_port(self) -> int:
|
||||
"""
|
||||
What port the proxy is listening on. This will be sent instead of 'port' if
|
||||
is_using_proxy is True and this value is non-zero
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "server", "threading", default=False
|
||||
self.__config, "core", "title", "proxy_port", default=0
|
||||
)
|
||||
|
||||
@property
|
||||
def proxy_port_ssl(self) -> int:
|
||||
"""
|
||||
What port the proxy is listening for secure connections on. This will be sent
|
||||
instead of 'port' if is_using_proxy is True and this value is non-zero
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "proxy_port_ssl", default=0
|
||||
)
|
||||
|
||||
@property
|
||||
@ -66,7 +112,6 @@ class ServerConfig:
|
||||
self.__config, "core", "server", "strict_ip_checking", default=False
|
||||
)
|
||||
|
||||
|
||||
class TitleConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
@ -79,36 +124,6 @@ class TitleConfig:
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def hostname(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "hostname", default="localhost"
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "port", default=8080
|
||||
)
|
||||
|
||||
@property
|
||||
def port_ssl(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "port_ssl", default=0
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_key(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "ssl_key", default="cert/title.key"
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_cert(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "title", "ssl_cert", default="cert/title.pem"
|
||||
)
|
||||
|
||||
@property
|
||||
def reboot_start_time(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
@ -121,7 +136,6 @@ class TitleConfig:
|
||||
self.__config, "core", "title", "reboot_end_time", default=""
|
||||
)
|
||||
|
||||
|
||||
class DatabaseConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
@ -176,16 +190,6 @@ class DatabaseConfig:
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def user_table_autoincrement_start(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config,
|
||||
"core",
|
||||
"database",
|
||||
"user_table_autoincrement_start",
|
||||
default=10000,
|
||||
)
|
||||
|
||||
@property
|
||||
def enable_memcached(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
@ -198,13 +202,12 @@ class DatabaseConfig:
|
||||
self.__config, "core", "database", "memcached_host", default="localhost"
|
||||
)
|
||||
|
||||
|
||||
class FrontendConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def enable(self) -> int:
|
||||
def enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "frontend", "enable", default=False
|
||||
)
|
||||
@ -212,7 +215,7 @@ class FrontendConfig:
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "frontend", "port", default=8090
|
||||
self.__config, "core", "frontend", "port", default=8080
|
||||
)
|
||||
|
||||
@property
|
||||
@ -222,20 +225,23 @@ class FrontendConfig:
|
||||
self.__config, "core", "frontend", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@property
|
||||
def secret(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "frontend", "secret", default=""
|
||||
)
|
||||
|
||||
class AllnetConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "loglevel", default="info"
|
||||
)
|
||||
def standalone(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "standalone", default=False
|
||||
)
|
||||
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
@ -243,9 +249,11 @@ class AllnetConfig:
|
||||
)
|
||||
|
||||
@property
|
||||
def ip_check(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "ip_check", default=False
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
@ -260,10 +268,23 @@ class AllnetConfig:
|
||||
self.__config, "core", "allnet", "update_cfg_folder", default=""
|
||||
)
|
||||
|
||||
|
||||
class BillingConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def standalone(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "billing", "standalone", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "billing", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
@ -289,11 +310,16 @@ class BillingConfig:
|
||||
self.__config, "core", "billing", "signing_key", default="cert/billing.key"
|
||||
)
|
||||
|
||||
|
||||
class AimedbConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "aimedb", "enable", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(
|
||||
@ -326,17 +352,10 @@ class AimedbConfig:
|
||||
self.__config, "core", "aimedb", "id_lifetime_seconds", default=86400
|
||||
)
|
||||
|
||||
|
||||
class MuchaConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def enable(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "mucha", "enable", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(
|
||||
@ -345,13 +364,6 @@ class MuchaConfig:
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def hostname(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "mucha", "hostname", default="localhost"
|
||||
)
|
||||
|
||||
|
||||
class CoreConfig(dict):
|
||||
def __init__(self) -> None:
|
||||
self.server = ServerConfig(self)
|
||||
@ -373,6 +385,19 @@ class CoreConfig(dict):
|
||||
return logging.DEBUG
|
||||
else:
|
||||
return logging.INFO
|
||||
|
||||
@classmethod
|
||||
def loglevel_to_str(cls, level: int) -> str:
|
||||
if level == logging.ERROR:
|
||||
return "error"
|
||||
elif level == logging.WARN:
|
||||
return "warn"
|
||||
elif level == logging.INFO:
|
||||
return "info"
|
||||
elif level == logging.DEBUG:
|
||||
return "debug"
|
||||
else:
|
||||
return "notset"
|
||||
|
||||
@classmethod
|
||||
def get_config_field(
|
||||
|
1
core/data/alembic/README
Normal file
1
core/data/alembic/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
64
core/data/alembic/alembic.ini
Normal file
64
core/data/alembic/alembic.ini
Normal file
@ -0,0 +1,64 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
script_location=.
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to migrations//versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat migrations//versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
81
core/data/alembic/env.py
Normal file
81
core/data/alembic/env.py
Normal file
@ -0,0 +1,81 @@
|
||||
from __future__ import with_statement
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
|
||||
from core.data.schema.base import metadata
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
raise Exception('Not implemented or configured!')
|
||||
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
ini_section = config.get_section(config.config_ini_section)
|
||||
overrides = context.get_x_argument(as_dictionary=True)
|
||||
for override in overrides:
|
||||
ini_section[override] = overrides[override]
|
||||
|
||||
connectable = engine_from_config(
|
||||
ini_section,
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
core/data/alembic/script.py.mako
Normal file
24
core/data/alembic/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
24
core/data/alembic/versions/835b862f9bf0_initial_migration.py
Normal file
24
core/data/alembic/versions/835b862f9bf0_initial_migration.py
Normal file
@ -0,0 +1,24 @@
|
||||
"""Initial Migration
|
||||
|
||||
Revision ID: 835b862f9bf0
|
||||
Revises:
|
||||
Create Date: 2024-01-09 13:06:10.787432
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '835b862f9bf0'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
@ -0,0 +1,29 @@
|
||||
"""Remove old db mgmt system
|
||||
|
||||
Revision ID: d8950c7ce2fc
|
||||
Revises: 835b862f9bf0
|
||||
Create Date: 2024-01-09 13:43:51.381175
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd8950c7ce2fc'
|
||||
down_revision = '835b862f9bf0'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.drop_table("schema_versions")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.create_table(
|
||||
"schema_versions",
|
||||
sa.Column("game", sa.String(4), primary_key=True, nullable=False),
|
||||
sa.Column("version", sa.Integer, nullable=False, server_default="1"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
@ -1,13 +1,14 @@
|
||||
import logging, coloredlogs
|
||||
from typing import Optional, Dict, List
|
||||
from typing import Optional
|
||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy import create_engine
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
import importlib, os
|
||||
import os
|
||||
import secrets, string
|
||||
import bcrypt
|
||||
from hashlib import sha256
|
||||
import alembic.config
|
||||
import glob
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data.schema import *
|
||||
@ -15,7 +16,6 @@ from core.utils import Utils
|
||||
|
||||
|
||||
class Data:
|
||||
current_schema_version = 6
|
||||
engine = None
|
||||
session = None
|
||||
user = None
|
||||
@ -77,281 +77,144 @@ class Data:
|
||||
)
|
||||
self.logger.handler_set = True # type: ignore
|
||||
|
||||
def __alembic_cmd(self, command: str, *args: str) -> None:
|
||||
old_dir = os.path.abspath(os.path.curdir)
|
||||
base_dir = os.path.join(os.path.abspath(os.path.curdir), 'core', 'data', 'alembic')
|
||||
alembicArgs = [
|
||||
"-c",
|
||||
os.path.join(base_dir, "alembic.ini"),
|
||||
"-x",
|
||||
f"script_location={base_dir}",
|
||||
"-x",
|
||||
f"sqlalchemy.url={self.__url}",
|
||||
command,
|
||||
]
|
||||
alembicArgs.extend(args)
|
||||
os.chdir(base_dir)
|
||||
alembic.config.main(argv=alembicArgs)
|
||||
os.chdir(old_dir)
|
||||
|
||||
def create_database(self):
|
||||
self.logger.info("Creating databases...")
|
||||
try:
|
||||
metadata.create_all(self.__engine.connect())
|
||||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"Failed to create databases! {e}")
|
||||
return
|
||||
|
||||
games = Utils.get_all_titles()
|
||||
for game_dir, game_mod in games.items():
|
||||
try:
|
||||
if hasattr(game_mod, "database") and hasattr(
|
||||
game_mod, "current_schema_version"
|
||||
):
|
||||
game_mod.database(self.config)
|
||||
metadata.create_all(self.__engine.connect())
|
||||
|
||||
self.base.touch_schema_ver(
|
||||
game_mod.current_schema_version, game_mod.game_codes[0]
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
f"Could not load database schema from {game_dir} - {e}"
|
||||
)
|
||||
|
||||
self.logger.info(f"Setting base_schema_ver to {self.current_schema_version}")
|
||||
self.base.set_schema_ver(self.current_schema_version)
|
||||
|
||||
self.logger.info(
|
||||
f"Setting user auto_incrememnt to {self.config.database.user_table_autoincrement_start}"
|
||||
)
|
||||
self.user.reset_autoincrement(
|
||||
self.config.database.user_table_autoincrement_start
|
||||
metadata.create_all(
|
||||
self.engine,
|
||||
checkfirst=True,
|
||||
)
|
||||
|
||||
def recreate_database(self):
|
||||
self.logger.info("Dropping all databases...")
|
||||
self.base.execute("SET FOREIGN_KEY_CHECKS=0")
|
||||
try:
|
||||
metadata.drop_all(self.__engine.connect())
|
||||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"Failed to drop databases! {e}")
|
||||
return
|
||||
for _, mod in Utils.get_all_titles().items():
|
||||
if hasattr(mod, "database"):
|
||||
mod.database(self.config)
|
||||
metadata.create_all(
|
||||
self.engine,
|
||||
checkfirst=True,
|
||||
)
|
||||
|
||||
for root, dirs, files in os.walk("./titles"):
|
||||
for dir in dirs:
|
||||
if not dir.startswith("__"):
|
||||
try:
|
||||
mod = importlib.import_module(f"titles.{dir}")
|
||||
# Stamp the end revision as if alembic had created it, so it can take off after this.
|
||||
self.__alembic_cmd(
|
||||
"stamp",
|
||||
"head",
|
||||
)
|
||||
|
||||
try:
|
||||
if hasattr(mod, "database"):
|
||||
mod.database(self.config)
|
||||
metadata.drop_all(self.__engine.connect())
|
||||
def schema_upgrade(self, ver: str = None):
|
||||
self.__alembic_cmd(
|
||||
"upgrade",
|
||||
"head",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
f"Could not load database schema from {dir} - {e}"
|
||||
)
|
||||
|
||||
except ImportError as e:
|
||||
self.logger.warning(
|
||||
f"Failed to load database schema dir {dir} - {e}"
|
||||
)
|
||||
break
|
||||
|
||||
self.base.execute("SET FOREIGN_KEY_CHECKS=1")
|
||||
|
||||
self.create_database()
|
||||
|
||||
def migrate_database(self, game: str, version: Optional[int], action: str) -> None:
|
||||
old_ver = self.base.get_schema_ver(game)
|
||||
sql = ""
|
||||
if version is None:
|
||||
if not game == "CORE":
|
||||
titles = Utils.get_all_titles()
|
||||
|
||||
for folder, mod in titles.items():
|
||||
if not mod.game_codes[0] == game:
|
||||
continue
|
||||
|
||||
if hasattr(mod, "current_schema_version"):
|
||||
version = mod.current_schema_version
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
f"current_schema_version not found for {folder}"
|
||||
)
|
||||
|
||||
else:
|
||||
version = self.current_schema_version
|
||||
|
||||
if version is None:
|
||||
self.logger.warning(
|
||||
f"Could not determine latest version for {game}, please specify --version"
|
||||
)
|
||||
|
||||
if old_ver is None:
|
||||
self.logger.error(
|
||||
f"Schema for game {game} does not exist, did you run the creation script?"
|
||||
)
|
||||
return
|
||||
|
||||
if old_ver == version:
|
||||
self.logger.info(
|
||||
f"Schema for game {game} is already version {old_ver}, nothing to do"
|
||||
)
|
||||
return
|
||||
|
||||
if action == "upgrade":
|
||||
for x in range(old_ver, version):
|
||||
if not os.path.exists(
|
||||
f"core/data/schema/versions/{game.upper()}_{x + 1}_{action}.sql"
|
||||
):
|
||||
self.logger.error(
|
||||
f"Could not find {action} script {game.upper()}_{x + 1}_{action}.sql in core/data/schema/versions folder"
|
||||
)
|
||||
return
|
||||
|
||||
with open(
|
||||
f"core/data/schema/versions/{game.upper()}_{x + 1}_{action}.sql",
|
||||
"r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
sql = f.read()
|
||||
|
||||
result = self.base.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error("Error execuing sql script!")
|
||||
return None
|
||||
|
||||
else:
|
||||
for x in range(old_ver, version, -1):
|
||||
if not os.path.exists(
|
||||
f"core/data/schema/versions/{game.upper()}_{x - 1}_{action}.sql"
|
||||
):
|
||||
self.logger.error(
|
||||
f"Could not find {action} script {game.upper()}_{x - 1}_{action}.sql in core/data/schema/versions folder"
|
||||
)
|
||||
return
|
||||
|
||||
with open(
|
||||
f"core/data/schema/versions/{game.upper()}_{x - 1}_{action}.sql",
|
||||
"r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
sql = f.read()
|
||||
|
||||
result = self.base.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error("Error execuing sql script!")
|
||||
return None
|
||||
|
||||
result = self.base.set_schema_ver(version, game)
|
||||
if result is None:
|
||||
self.logger.error("Error setting version in schema_version table!")
|
||||
return None
|
||||
|
||||
self.logger.info(f"Successfully migrated {game} to schema version {version}")
|
||||
|
||||
def create_owner(self, email: Optional[str] = None) -> None:
|
||||
async def create_owner(self, email: Optional[str] = None, code: Optional[str] = "00000000000000000000") -> None:
|
||||
pw = "".join(
|
||||
secrets.choice(string.ascii_letters + string.digits) for i in range(20)
|
||||
)
|
||||
hash = bcrypt.hashpw(pw.encode(), bcrypt.gensalt())
|
||||
|
||||
user_id = self.user.create_user(email=email, permission=255, password=hash)
|
||||
user_id = await self.user.create_user("sysowner", email, hash.decode(), 255)
|
||||
if user_id is None:
|
||||
self.logger.error(f"Failed to create owner with email {email}")
|
||||
return
|
||||
|
||||
card_id = self.card.create_card(user_id, "00000000000000000000")
|
||||
card_id = await self.card.create_card(user_id, code)
|
||||
if card_id is None:
|
||||
self.logger.error(f"Failed to create card for owner with id {user_id}")
|
||||
return
|
||||
|
||||
self.logger.warning(
|
||||
f"Successfully created owner with email {email}, access code 00000000000000000000, and password {pw} Make sure to change this password and assign a real card ASAP!"
|
||||
f"Successfully created owner with email {email}, access code {code}, and password {pw} Make sure to change this password and assign a real card ASAP!"
|
||||
)
|
||||
|
||||
def migrate_card(self, old_ac: str, new_ac: str, should_force: bool) -> None:
|
||||
if old_ac == new_ac:
|
||||
self.logger.error("Both access codes are the same!")
|
||||
return
|
||||
|
||||
new_card = self.card.get_card_by_access_code(new_ac)
|
||||
if new_card is None:
|
||||
self.card.update_access_code(old_ac, new_ac)
|
||||
return
|
||||
|
||||
if not should_force:
|
||||
self.logger.warning(
|
||||
f"Card already exists for access code {new_ac} (id {new_card['id']}). If you wish to continue, rerun with the '--force' flag."
|
||||
f" All exiting data on the target card {new_ac} will be perminently erased and replaced with data from card {old_ac}."
|
||||
)
|
||||
return
|
||||
|
||||
self.logger.info(
|
||||
f"All exiting data on the target card {new_ac} will be perminently erased and replaced with data from card {old_ac}."
|
||||
)
|
||||
self.card.delete_card(new_card["id"])
|
||||
self.card.update_access_code(old_ac, new_ac)
|
||||
|
||||
hanging_user = self.user.get_user(new_card["user"])
|
||||
if hanging_user["password"] is None:
|
||||
self.logger.info(f"Delete hanging user {hanging_user['id']}")
|
||||
self.user.delete_user(hanging_user["id"])
|
||||
|
||||
def delete_hanging_users(self) -> None:
|
||||
"""
|
||||
Finds and deletes users that have not registered for the webui that have no cards assocated with them.
|
||||
"""
|
||||
unreg_users = self.user.get_unregistered_users()
|
||||
if unreg_users is None:
|
||||
self.logger.error("Error occoured finding unregistered users")
|
||||
|
||||
for user in unreg_users:
|
||||
cards = self.card.get_user_cards(user["id"])
|
||||
if cards is None:
|
||||
self.logger.error(f"Error getting cards for user {user['id']}")
|
||||
continue
|
||||
|
||||
if not cards:
|
||||
self.logger.info(f"Delete hanging user {user['id']}")
|
||||
self.user.delete_user(user["id"])
|
||||
|
||||
def autoupgrade(self) -> None:
|
||||
all_game_versions = self.base.get_all_schema_vers()
|
||||
if all_game_versions is None:
|
||||
self.logger.warning("Failed to get schema versions")
|
||||
return
|
||||
|
||||
all_games = Utils.get_all_titles()
|
||||
all_games_list: Dict[str, int] = {}
|
||||
for _, mod in all_games.items():
|
||||
if hasattr(mod, "current_schema_version"):
|
||||
all_games_list[mod.game_codes[0]] = mod.current_schema_version
|
||||
|
||||
for x in all_game_versions:
|
||||
failed = False
|
||||
game = x["game"].upper()
|
||||
update_ver = int(x["version"])
|
||||
latest_ver = all_games_list.get(game, 1)
|
||||
if game == "CORE":
|
||||
latest_ver = self.current_schema_version
|
||||
|
||||
if update_ver == latest_ver:
|
||||
self.logger.info(f"{game} is already latest version")
|
||||
continue
|
||||
|
||||
for y in range(update_ver + 1, latest_ver + 1):
|
||||
if os.path.exists(f"core/data/schema/versions/{game}_{y}_upgrade.sql"):
|
||||
with open(
|
||||
f"core/data/schema/versions/{game}_{y}_upgrade.sql",
|
||||
"r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
sql = f.read()
|
||||
|
||||
result = self.base.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Error execuing sql script for game {game} v{y}!"
|
||||
)
|
||||
failed = True
|
||||
break
|
||||
else:
|
||||
self.logger.warning(f"Could not find script {game}_{y}_upgrade.sql")
|
||||
failed = True
|
||||
|
||||
if not failed:
|
||||
self.base.set_schema_ver(latest_ver, game)
|
||||
|
||||
def show_versions(self) -> None:
|
||||
all_game_versions = self.base.get_all_schema_vers()
|
||||
for ver in all_game_versions:
|
||||
self.logger.info(f"{ver['game']} -> v{ver['version']}")
|
||||
async def migrate(self) -> None:
|
||||
exist = await self.base.execute("SELECT * FROM alembic_version")
|
||||
if exist is not None:
|
||||
self.logger.warn("No need to migrate as you have already migrated to alembic. If you are trying to upgrade the schema, use `upgrade` instead!")
|
||||
return
|
||||
|
||||
self.logger.info("Upgrading to latest with legacy system")
|
||||
if not await self.legacy_upgrade():
|
||||
self.logger.warn("No need to migrate as you have already deleted the old schema_versions system. If you are trying to upgrade the schema, use `upgrade` instead!")
|
||||
return
|
||||
self.logger.info("Done")
|
||||
|
||||
self.logger.info("Stamp with initial revision")
|
||||
self.__alembic_cmd(
|
||||
"stamp",
|
||||
"835b862f9bf0",
|
||||
)
|
||||
|
||||
self.logger.info("Upgrade")
|
||||
self.__alembic_cmd(
|
||||
"upgrade",
|
||||
"head",
|
||||
)
|
||||
|
||||
async def legacy_upgrade(self) -> bool:
|
||||
vers = await self.base.execute("SELECT * FROM schema_versions")
|
||||
if vers is None:
|
||||
self.logger.warn("Cannot legacy upgrade, schema_versions table unavailable!")
|
||||
return False
|
||||
|
||||
db_vers = {}
|
||||
for x in vers:
|
||||
db_vers[x['game']] = x['version']
|
||||
|
||||
core_now_ver = int(db_vers['CORE']) + 1
|
||||
while os.path.exists(f"core/data/schema/versions/CORE_{core_now_ver}_upgrade.sql"):
|
||||
with open(f"core/data/schema/versions/CORE_{core_now_ver}_upgrade.sql", "r") as f:
|
||||
result = await self.base.execute(f.read())
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Invalid upgrade script CORE_{core_now_ver}_upgrade.sql")
|
||||
break
|
||||
|
||||
result = await self.base.execute(f"UPDATE schema_versions SET version = {core_now_ver} WHERE game = 'CORE'")
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update schema version for CORE to {core_now_ver}")
|
||||
break
|
||||
|
||||
self.logger.info(f"Upgrade CORE to version {core_now_ver}")
|
||||
core_now_ver += 1
|
||||
|
||||
for _, mod in Utils.get_all_titles().items():
|
||||
game_codes = getattr(mod, "game_codes", [])
|
||||
for game in game_codes:
|
||||
if game not in db_vers:
|
||||
self.logger.warn(f"{game} does not have an antry in schema_versions, skipping")
|
||||
continue
|
||||
|
||||
now_ver = int(db_vers[game]) + 1
|
||||
while os.path.exists(f"core/data/schema/versions/{game}_{now_ver}_upgrade.sql"):
|
||||
with open(f"core/data/schema/versions/{game}_{now_ver}_upgrade.sql", "r") as f:
|
||||
result = await self.base.execute(f.read())
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Invalid upgrade script {game}_{now_ver}_upgrade.sql")
|
||||
break
|
||||
|
||||
result = await self.base.execute(f"UPDATE schema_versions SET version = {now_ver} WHERE game = '{game}'")
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update schema version for {game} to {now_ver}")
|
||||
break
|
||||
|
||||
self.logger.info(f"Upgrade {game} to version {now_ver}")
|
||||
now_ver += 1
|
||||
|
||||
return True
|
||||
|
||||
|
@ -69,7 +69,7 @@ arcade_owner = Table(
|
||||
|
||||
|
||||
class ArcadeData(BaseData):
|
||||
def get_machine(self, serial: str = None, id: int = None) -> Optional[Row]:
|
||||
async def get_machine(self, serial: str = None, id: int = None) -> Optional[Row]:
|
||||
if serial is not None:
|
||||
serial = serial.replace("-", "")
|
||||
if len(serial) == 11:
|
||||
@ -89,12 +89,12 @@ class ArcadeData(BaseData):
|
||||
self.logger.error(f"{__name__ }: Need either serial or ID to look up!")
|
||||
return None
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_machine(
|
||||
async def put_machine(
|
||||
self,
|
||||
arcade_id: int,
|
||||
serial: str = "",
|
||||
@ -110,13 +110,13 @@ class ArcadeData(BaseData):
|
||||
arcade=arcade_id, keychip=serial, board=board, game=game, is_cab=is_cab
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def set_machine_serial(self, machine_id: int, serial: str) -> None:
|
||||
result = self.execute(
|
||||
async def set_machine_serial(self, machine_id: int, serial: str) -> None:
|
||||
result = await self.execute(
|
||||
machine.update(machine.c.id == machine_id).values(keychip=serial)
|
||||
)
|
||||
if result is None:
|
||||
@ -125,8 +125,8 @@ class ArcadeData(BaseData):
|
||||
)
|
||||
return result.lastrowid
|
||||
|
||||
def set_machine_boardid(self, machine_id: int, boardid: str) -> None:
|
||||
result = self.execute(
|
||||
async def set_machine_boardid(self, machine_id: int, boardid: str) -> None:
|
||||
result = await self.execute(
|
||||
machine.update(machine.c.id == machine_id).values(board=boardid)
|
||||
)
|
||||
if result is None:
|
||||
@ -134,21 +134,21 @@ class ArcadeData(BaseData):
|
||||
f"Failed to update board id for machine {machine_id} -> {boardid}"
|
||||
)
|
||||
|
||||
def get_arcade(self, id: int) -> Optional[Row]:
|
||||
async def get_arcade(self, id: int) -> Optional[Row]:
|
||||
sql = arcade.select(arcade.c.id == id)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_arcade_machines(self, id: int) -> Optional[List[Row]]:
|
||||
async def get_arcade_machines(self, id: int) -> Optional[List[Row]]:
|
||||
sql = machine.select(machine.c.arcade == id)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_arcade(
|
||||
async def put_arcade(
|
||||
self,
|
||||
name: str,
|
||||
nickname: str = None,
|
||||
@ -171,42 +171,42 @@ class ArcadeData(BaseData):
|
||||
regional_id=regional_id,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_arcades_managed_by_user(self, user_id: int) -> Optional[List[Row]]:
|
||||
async def get_arcades_managed_by_user(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(arcade).join(arcade_owner, arcade_owner.c.arcade == arcade.c.id).where(arcade_owner.c.user == user_id)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return False
|
||||
return result.fetchall()
|
||||
|
||||
def get_manager_permissions(self, user_id: int, arcade_id: int) -> Optional[int]:
|
||||
async def get_manager_permissions(self, user_id: int, arcade_id: int) -> Optional[int]:
|
||||
sql = select(arcade_owner.c.permissions).where(and_(arcade_owner.c.user == user_id, arcade_owner.c.arcade == arcade_id))
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return False
|
||||
return result.fetchone()
|
||||
|
||||
def get_arcade_owners(self, arcade_id: int) -> Optional[Row]:
|
||||
async def get_arcade_owners(self, arcade_id: int) -> Optional[Row]:
|
||||
sql = select(arcade_owner).where(arcade_owner.c.arcade == arcade_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def add_arcade_owner(self, arcade_id: int, user_id: int) -> None:
|
||||
async def add_arcade_owner(self, arcade_id: int, user_id: int) -> None:
|
||||
sql = insert(arcade_owner).values(arcade=arcade_id, user=user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def format_serial(
|
||||
async def format_serial(
|
||||
self, platform_code: str, platform_rev: int, serial_num: int, append: int = 4152
|
||||
) -> str:
|
||||
return f"{platform_code}{platform_rev:02d}A{serial_num:04d}{append:04d}" # 0x41 = A, 0x52 = R
|
||||
@ -217,16 +217,16 @@ class ArcadeData(BaseData):
|
||||
|
||||
return True
|
||||
|
||||
def get_arcade_by_name(self, name: str) -> Optional[List[Row]]:
|
||||
async def get_arcade_by_name(self, name: str) -> Optional[List[Row]]:
|
||||
sql = arcade.select(or_(arcade.c.name.like(f"%{name}%"), arcade.c.nickname.like(f"%{name}%")))
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_arcades_by_ip(self, ip: str) -> Optional[List[Row]]:
|
||||
async def get_arcades_by_ip(self, ip: str) -> Optional[List[Row]]:
|
||||
sql = arcade.select().where(arcade.c.ip == ip)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
@ -15,14 +15,6 @@ from core.config import CoreConfig
|
||||
|
||||
metadata = MetaData()
|
||||
|
||||
schema_ver = Table(
|
||||
"schema_versions",
|
||||
metadata,
|
||||
Column("game", String(4), primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False, server_default="1"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
event_log = Table(
|
||||
"event_log",
|
||||
metadata,
|
||||
@ -43,11 +35,11 @@ class BaseData:
|
||||
self.conn = conn
|
||||
self.logger = logging.getLogger("database")
|
||||
|
||||
def execute(self, sql: str, opts: Dict[str, Any] = {}) -> Optional[CursorResult]:
|
||||
async def execute(self, sql: str, opts: Dict[str, Any] = {}) -> Optional[CursorResult]:
|
||||
res = None
|
||||
|
||||
try:
|
||||
self.logger.info(f"SQL Execute: {''.join(str(sql).splitlines())}")
|
||||
self.logger.debug(f"SQL Execute: {''.join(str(sql).splitlines())}")
|
||||
res = self.conn.execute(text(sql), opts)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
@ -82,52 +74,7 @@ class BaseData:
|
||||
"""
|
||||
return randrange(10000, 9999999)
|
||||
|
||||
def get_all_schema_vers(self) -> Optional[List[Row]]:
|
||||
sql = select(schema_ver)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_schema_ver(self, game: str) -> Optional[int]:
|
||||
sql = select(schema_ver).where(schema_ver.c.game == game)
|
||||
|
||||
result = self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
row = result.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
return row["version"]
|
||||
|
||||
def touch_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
|
||||
sql = insert(schema_ver).values(game=game, version=ver)
|
||||
conflict = sql.on_duplicate_key_update(version=schema_ver.c.version)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update schema version for game {game} (v{ver})"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def set_schema_ver(self, ver: int, game: str = "CORE") -> Optional[int]:
|
||||
sql = insert(schema_ver).values(game=game, version=ver)
|
||||
conflict = sql.on_duplicate_key_update(version=ver)
|
||||
|
||||
result = self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to update schema version for game {game} (v{ver})"
|
||||
)
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def log_event(
|
||||
async def log_event(
|
||||
self, system: str, type: str, severity: int, message: str, details: Dict = {}
|
||||
) -> Optional[int]:
|
||||
sql = event_log.insert().values(
|
||||
@ -137,7 +84,7 @@ class BaseData:
|
||||
message=message,
|
||||
details=json.dumps(details),
|
||||
)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
@ -147,9 +94,9 @@ class BaseData:
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
def get_event_log(self, entries: int = 100) -> Optional[List[Dict]]:
|
||||
async def get_event_log(self, entries: int = 100) -> Optional[List[Dict]]:
|
||||
sql = event_log.select().limit(entries).all()
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
|
@ -27,87 +27,87 @@ aime_card = Table(
|
||||
|
||||
|
||||
class CardData(BaseData):
|
||||
def get_card_by_access_code(self, access_code: str) -> Optional[Row]:
|
||||
async def get_card_by_access_code(self, access_code: str) -> Optional[Row]:
|
||||
sql = aime_card.select(aime_card.c.access_code == access_code)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_card_by_id(self, card_id: int) -> Optional[Row]:
|
||||
async def get_card_by_id(self, card_id: int) -> Optional[Row]:
|
||||
sql = aime_card.select(aime_card.c.id == card_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def update_access_code(self, old_ac: str, new_ac: str) -> None:
|
||||
async def update_access_code(self, old_ac: str, new_ac: str) -> None:
|
||||
sql = aime_card.update(aime_card.c.access_code == old_ac).values(
|
||||
access_code=new_ac
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"Failed to change card access code from {old_ac} to {new_ac}"
|
||||
)
|
||||
|
||||
def get_user_id_from_card(self, access_code: str) -> Optional[int]:
|
||||
async def get_user_id_from_card(self, access_code: str) -> Optional[int]:
|
||||
"""
|
||||
Given a 20 digit access code as a string, get the user id associated with that card
|
||||
"""
|
||||
card = self.get_card_by_access_code(access_code)
|
||||
card = await self.get_card_by_access_code(access_code)
|
||||
if card is None:
|
||||
return None
|
||||
|
||||
return int(card["user"])
|
||||
|
||||
def get_card_banned(self, access_code: str) -> Optional[bool]:
|
||||
async def get_card_banned(self, access_code: str) -> Optional[bool]:
|
||||
"""
|
||||
Given a 20 digit access code as a string, check if the card is banned
|
||||
"""
|
||||
card = self.get_card_by_access_code(access_code)
|
||||
card = await self.get_card_by_access_code(access_code)
|
||||
if card is None:
|
||||
return None
|
||||
if card["is_banned"]:
|
||||
return True
|
||||
return False
|
||||
def get_card_locked(self, access_code: str) -> Optional[bool]:
|
||||
async def get_card_locked(self, access_code: str) -> Optional[bool]:
|
||||
"""
|
||||
Given a 20 digit access code as a string, check if the card is locked
|
||||
"""
|
||||
card = self.get_card_by_access_code(access_code)
|
||||
card = await self.get_card_by_access_code(access_code)
|
||||
if card is None:
|
||||
return None
|
||||
if card["is_locked"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def delete_card(self, card_id: int) -> None:
|
||||
async def delete_card(self, card_id: int) -> None:
|
||||
sql = aime_card.delete(aime_card.c.id == card_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to delete card with id {card_id}")
|
||||
|
||||
def get_user_cards(self, aime_id: int) -> Optional[List[Row]]:
|
||||
async def get_user_cards(self, aime_id: int) -> Optional[List[Row]]:
|
||||
"""
|
||||
Returns all cards owned by a user
|
||||
"""
|
||||
sql = aime_card.select(aime_card.c.user == aime_id)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def create_card(self, user_id: int, access_code: str) -> Optional[int]:
|
||||
async def create_card(self, user_id: int, access_code: str) -> Optional[int]:
|
||||
"""
|
||||
Given a aime_user id and a 20 digit access code as a string, create a card and return the ID if successful
|
||||
"""
|
||||
sql = aime_card.insert().values(user=user_id, access_code=access_code)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
@ -1,4 +1,3 @@
|
||||
from enum import Enum
|
||||
from typing import Optional, List
|
||||
from sqlalchemy import Table, Column
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP
|
||||
@ -24,15 +23,8 @@ aime_user = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
class PermissionBits(Enum):
|
||||
PermUser = 1
|
||||
PermMod = 2
|
||||
PermSysAdmin = 4
|
||||
|
||||
|
||||
class UserData(BaseData):
|
||||
def create_user(
|
||||
async def create_user(
|
||||
self,
|
||||
id: int = None,
|
||||
username: str = None,
|
||||
@ -60,20 +52,20 @@ class UserData(BaseData):
|
||||
username=username, email=email, password=password, permissions=permission
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_user(self, user_id: int) -> Optional[Row]:
|
||||
async def get_user(self, user_id: int) -> Optional[Row]:
|
||||
sql = select(aime_user).where(aime_user.c.id == user_id)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return False
|
||||
return result.fetchone()
|
||||
|
||||
def check_password(self, user_id: int, passwd: bytes = None) -> bool:
|
||||
usr = self.get_user(user_id)
|
||||
async def check_password(self, user_id: int, passwd: bytes = None) -> bool:
|
||||
usr = await self.get_user(user_id)
|
||||
if usr is None:
|
||||
return False
|
||||
|
||||
@ -85,39 +77,34 @@ class UserData(BaseData):
|
||||
|
||||
return bcrypt.checkpw(passwd, usr["password"].encode())
|
||||
|
||||
def reset_autoincrement(self, ai_value: int) -> None:
|
||||
# ALTER TABLE isn't in sqlalchemy so we do this the ugly way
|
||||
sql = f"ALTER TABLE aime_user AUTO_INCREMENT={ai_value}"
|
||||
self.execute(sql)
|
||||
|
||||
def delete_user(self, user_id: int) -> None:
|
||||
async def delete_user(self, user_id: int) -> None:
|
||||
sql = aime_user.delete(aime_user.c.id == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to delete user with id {user_id}")
|
||||
|
||||
def get_unregistered_users(self) -> List[Row]:
|
||||
async def get_unregistered_users(self) -> List[Row]:
|
||||
"""
|
||||
Returns a list of users who have not registered with the webui. They may or may not have cards.
|
||||
"""
|
||||
sql = select(aime_user).where(aime_user.c.password == None)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def find_user_by_email(self, email: str) -> Row:
|
||||
async def find_user_by_email(self, email: str) -> Row:
|
||||
sql = select(aime_user).where(aime_user.c.email == email)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return False
|
||||
return result.fetchone()
|
||||
|
||||
def find_user_by_username(self, username: str) -> List[Row]:
|
||||
async def find_user_by_username(self, username: str) -> List[Row]:
|
||||
sql = aime_user.select(aime_user.c.username.like(f"%{username}%"))
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return False
|
||||
return result.fetchall()
|
||||
|
953
core/frontend.py
953
core/frontend.py
File diff suppressed because it is too large
Load Diff
@ -1,4 +0,0 @@
|
||||
{% extends "core/frontend/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>{{ arcade.name }}</h1>
|
||||
{% endblock content %}
|
@ -1,5 +0,0 @@
|
||||
{% extends "core/frontend/index.jinja" %}
|
||||
{% block content %}
|
||||
{% include "core/frontend/widgets/err_banner.jinja" %}
|
||||
<h1>Machine Management</h1>
|
||||
{% endblock content %}
|
@ -1,103 +0,0 @@
|
||||
{% extends "core/frontend/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>System Management</h1>
|
||||
|
||||
<div class="row" id="rowForm">
|
||||
{% if sesh.permissions >= 2 %}
|
||||
<div class="col-sm-6" style="max-width: 25%;">
|
||||
<form id="usrLookup" name="usrLookup" action="/sys/lookup.user" class="form-inline">
|
||||
<h3>User Search</h3>
|
||||
<div class="form-group">
|
||||
<label for="usrId">User ID</label>
|
||||
<input type="number" class="form-control" id="usrId" name="usrId">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="usrName">Username</label>
|
||||
<input type="text" class="form-control" id="usrName" name="usrName">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="usrEmail">Email address</label>
|
||||
<input type="email" class="form-control" id="usrEmail" name="usrEmail" aria-describedby="emailHelp">
|
||||
</div>
|
||||
<br />
|
||||
<button type="submit" class="btn btn-primary">Search</button>
|
||||
</form>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if sesh.permissions >= 4 %}
|
||||
<div class="col-sm-6" style="max-width: 25%;">
|
||||
<form id="arcadeLookup" name="arcadeLookup" action="/sys/lookup.arcade" class="form-inline" >
|
||||
<h3>Arcade Search</h3>
|
||||
<div class="form-group">
|
||||
<label for="arcadeId">Arcade ID</label>
|
||||
<input type="number" class="form-control" id="arcadeId" name="arcadeId">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="arcadeName">Arcade Name</label>
|
||||
<input type="text" class="form-control" id="arcadeName" name="arcadeName">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="arcadeUser">Owner User ID</label>
|
||||
<input type="number" class="form-control" id="arcadeUser" name="arcadeUser">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="arcadeIp">Assigned IP Address</label>
|
||||
<input type="text" class="form-control" id="arcadeIp" name="arcadeIp">
|
||||
</div>
|
||||
<br />
|
||||
<button type="submit" class="btn btn-primary">Search</button>
|
||||
</form>
|
||||
</div>
|
||||
<div class="col-sm-6" style="max-width: 25%;">
|
||||
<form id="cabLookup" name="cabLookup" action="/sys/lookup.cab" class="form-inline" >
|
||||
<h3>Machine Search</h3>
|
||||
<div class="form-group">
|
||||
<label for="cabId">Machine ID</label>
|
||||
<input type="number" class="form-control" id="cabId" name="cabId">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="cabSerial">Machine Serial</label>
|
||||
<input type="text" class="form-control" id="cabSerial" name="cabSerial">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="cabAcId">Arcade ID</label>
|
||||
<input type="number" class="form-control" id="cabAcId" name="cabAcId">
|
||||
</div>
|
||||
<br />
|
||||
<button type="submit" class="btn btn-primary">Search</button>
|
||||
</form>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="row" id="rowResult" style="margin: 10px;">
|
||||
{% if sesh.permissions >= 2 %}
|
||||
<div id="userSearchResult" class="col-sm-6" style="max-width: 25%;">
|
||||
{% for usr in usrlist %}
|
||||
<a href=/user/{{ usr.id }}><pre>{{ usr.id }} | {{ usr.username if usr.username != None else "<i>No Name Set</i>"}}</pre></a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if sesh.permissions >= 4 %}
|
||||
<div id="arcadeSearchResult" class="col-sm-6" style="max-width: 25%;">
|
||||
{% for ac in aclist %}
|
||||
<pre><a href=/arcade/{{ ac.id }}>{{ ac.id }} | {{ ac.name if ac.name != None else "<i>No Name Set</i>" }} | {{ ac.ip if ac.ip != None else "<i>No IP Assigned</i>"}}</pre></a>
|
||||
{% endfor %}
|
||||
</div
|
||||
><div id="cabSearchResult" class="col-sm-6" style="max-width: 25%;">
|
||||
{% for cab in cablist %}
|
||||
<a href=/cab/{{ cab.id }}><pre>{{ cab.id }} | {{ cab.game if cab.game != None else "<i>ANY </i>" }} | {{ cab.serial }}</pre></a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="row" id="rowAdd">
|
||||
|
||||
</div>
|
||||
{% endblock content %}
|
@ -1,41 +0,0 @@
|
||||
{% extends "core/frontend/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>Management for {{ username }}</h1>
|
||||
<h2>Cards <button class="btn btn-success" data-bs-toggle="modal" data-bs-target="#card_add">Add</button></h2>
|
||||
<ul style="font-size: 20px;">
|
||||
{% for c in cards %}
|
||||
<li>{{ c.access_code }}: {{ c.status }} {% if c.status == 'Active'%}<button class="btn-warning btn">Lock</button>{% elif c.status == 'Locked' %}<button class="btn-warning btn">Unlock</button>{% endif %} <button class="btn-danger btn">Delete</button></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
{% if arcades is defined %}
|
||||
<h2>Arcades</h2>
|
||||
<ul style="font-size: 20px;">
|
||||
{% for a in arcades %}
|
||||
<li><a href=/arcade/{{ a.id }}>{{ a.name }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
<div class="modal fade" id="card_add" tabindex="-1" aria-labelledby="card_add_label" aria-hidden="true">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h1 class="modal-title fs-5" id="card_add_label">Add Card</h1>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
HOW TO:<br>
|
||||
Scan your card on any networked game and press the "View Access Code" button (varies by game) and enter the 20 digit code below.<br>
|
||||
!!FOR AMUSEIC CARDS: DO NOT ENTER THE CODE SHOWN ON THE BACK OF THE CARD ITSELF OR IT WILL NOT WORK!!
|
||||
<p /><label for="card_add_frm_access_code">Access Code: </label><input id="card_add_frm_access_code" maxlength="20" type="text" required>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-primary">Add</button>
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock content %}
|
@ -1,18 +0,0 @@
|
||||
{% if error > 0 %}
|
||||
<div class="err-banner">
|
||||
<h3>Error</h3>
|
||||
{% if error == 1 %}
|
||||
Card not registered, or wrong password
|
||||
{% elif error == 2 %}
|
||||
Missing or malformed access code
|
||||
{% elif error == 3 %}
|
||||
Failed to create user
|
||||
{% elif error == 4 %}
|
||||
Arcade not found
|
||||
{% elif error == 5 %}
|
||||
Machine not found
|
||||
{% else %}
|
||||
An unknown error occoured
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
@ -1,8 +1,7 @@
|
||||
from typing import Dict, Any, Optional, List
|
||||
from typing import Dict, Any, Optional
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web import resource
|
||||
from twisted.web.http import Request
|
||||
from starlette.requests import Request
|
||||
from datetime import datetime
|
||||
from Crypto.Cipher import Blowfish
|
||||
import pytz
|
||||
@ -12,7 +11,7 @@ from .utils import Utils
|
||||
from .title import TitleServlet
|
||||
|
||||
class MuchaServlet:
|
||||
mucha_registry: List[str] = []
|
||||
mucha_registry: Dict[str, str] = {}
|
||||
def __init__(self, cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.config = cfg
|
||||
self.config_dir = cfg_dir
|
||||
@ -39,11 +38,12 @@ class MuchaServlet:
|
||||
|
||||
for _, mod in TitleServlet.title_registry.items():
|
||||
if hasattr(mod, "get_mucha_info"):
|
||||
enabled, game_cd = mod.get_mucha_info(
|
||||
enabled, game_cds, netid_prefixes = mod.get_mucha_info(
|
||||
self.config, self.config_dir
|
||||
)
|
||||
if enabled:
|
||||
self.mucha_registry.append(game_cd)
|
||||
for x in range(len(game_cds)):
|
||||
self.mucha_registry[game_cds[x]] = netid_prefixes[x]
|
||||
|
||||
self.logger.info(f"Serving {len(self.mucha_registry)} games")
|
||||
|
||||
@ -75,7 +75,7 @@ class MuchaServlet:
|
||||
self.logger.debug(f"Decrypt SN to {sn_decrypt.hex()}")
|
||||
|
||||
resp = MuchaAuthResponse(
|
||||
f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}"
|
||||
f"{self.config.server.hostname}{':' + str(self.config.server.port) if self.config.server.is_develop else ''}"
|
||||
)
|
||||
|
||||
self.logger.debug(f"Mucha response {vars(resp)}")
|
||||
@ -100,7 +100,7 @@ class MuchaServlet:
|
||||
self.logger.warning(f"Unknown gameCd {req.gameCd}")
|
||||
return b"RESULTS=000"
|
||||
|
||||
resp = MuchaUpdateResponse(req.gameVer, f"{self.config.mucha.hostname}{':' + str(self.config.allnet.port) if self.config.server.is_develop else ''}")
|
||||
resp = MuchaUpdateResponse(req.gameVer, f"{self.config.server.hostname}{':' + str(self.config.server.port) if self.config.server.is_develop else ''}")
|
||||
|
||||
self.logger.debug(f"Mucha response {vars(resp)}")
|
||||
|
||||
|
19
core/templates/arcade/index.jinja
Normal file
19
core/templates/arcade/index.jinja
Normal file
@ -0,0 +1,19 @@
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
{% if arcade is defined %}
|
||||
<h1>{{ arcade.name }}</h1>
|
||||
<h2>PCBs assigned to this arcade <button class="btn btn-success" id="btn_add_cab" onclick="toggle_add_cab_form()">Add</button></h2>
|
||||
{% if success is defined and success == 3 %}
|
||||
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
|
||||
Cab added successfully
|
||||
</div>
|
||||
{% endif %}
|
||||
<ul style="font-size: 20px;">
|
||||
{% for c in arcade.cabs %}
|
||||
<li><a href="/cab/{{ c.id }}">{{ c.serial }} ({{ c.game }})</a> <button class="btn btn-secondary" onclick="prep_edit_form()">Edit</button> <button class="btn-danger btn">Delete</button></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% else %}
|
||||
<h3>Arcade Not Found</h3>
|
||||
{% endif %}
|
||||
{% endblock content %}
|
@ -1,4 +1,4 @@
|
||||
{% extends "core/frontend/index.jinja" %}
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>Create User</h1>
|
||||
<form id="create" style="max-width: 240px; min-width: 10%;" action="/gate/gate.create" method="post">
|
@ -1,7 +1,7 @@
|
||||
{% extends "core/frontend/index.jinja" %}
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>Gate</h1>
|
||||
{% include "core/frontend/widgets/err_banner.jinja" %}
|
||||
{% include "core/templates/widgets/err_banner.jinja" %}
|
||||
<style>
|
||||
/* Chrome, Safari, Edge, Opera */
|
||||
input::-webkit-outer-spin-button,
|
@ -84,7 +84,7 @@
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
{% include "core/frontend/widgets/topbar.jinja" %}
|
||||
{% include "core/templates/widgets/topbar.jinja" %}
|
||||
{% block content %}
|
||||
<h1>{{ server_name }}</h1>
|
||||
{% endblock content %}
|
4
core/templates/machine/index.jinja
Normal file
4
core/templates/machine/index.jinja
Normal file
@ -0,0 +1,4 @@
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>Machine Management</h1>
|
||||
{% endblock content %}
|
69
core/templates/sys/index.jinja
Normal file
69
core/templates/sys/index.jinja
Normal file
@ -0,0 +1,69 @@
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>System Management</h1>
|
||||
{% if error is defined %}
|
||||
{% include "core/templates/widgets/err_banner.jinja" %}
|
||||
{% endif %}
|
||||
<div class="row" id="rowForm">
|
||||
{% if "{:08b}".format(sesh.permissions)[6] == "1" %}
|
||||
<div class="col-sm-6" style="max-width: 25%;">
|
||||
<form id="usrLookup" name="usrLookup" action="/sys/lookup.user" class="form-inline">
|
||||
<h3>User Search</h3>
|
||||
<div class="form-group">
|
||||
<label for="usrId">User ID</label>
|
||||
<input type="number" class="form-control" id="usrId" name="usrId">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="usrName">Username</label>
|
||||
<input type="text" class="form-control" id="usrName" name="usrName">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="usrEmail">Email address</label>
|
||||
<input type="email" class="form-control" id="usrEmail" name="usrEmail" aria-describedby="emailHelp">
|
||||
</div>
|
||||
<br />
|
||||
<button type="submit" class="btn btn-primary">Search</button>
|
||||
</form>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if "{:08b}".format(sesh.permissions)[5] == "1" %}
|
||||
<div class="col-sm-6" style="max-width: 25%;">
|
||||
<form id="shopLookup" name="shopLookup" action="/sys/lookup.shop" class="form-inline">
|
||||
<h3>Shop search</h3>
|
||||
<div class="form-group">
|
||||
<label for="shopId">Shop ID</label>
|
||||
<input type="number" class="form-control" id="shopId" name="shopId">
|
||||
</div>
|
||||
OR
|
||||
<div class="form-group">
|
||||
<label for="serialNum">Serial Number</label>
|
||||
<input type="text" class="form-control" id="serialNum" name="serialNum" maxlength="15">
|
||||
</div>
|
||||
<br />
|
||||
<button type="submit" class="btn btn-primary">Search</button>
|
||||
</form>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="row" id="rowResult" style="margin: 10px;">
|
||||
{% if "{:08b}".format(sesh.permissions)[6] == "1" %}
|
||||
<div id="userSearchResult" class="col-sm-6" style="max-width: 25%;">
|
||||
{% for usr in usrlist %}
|
||||
<a href=/user/{{ usr.id }}><pre>{{ usr.username if usr.username is not none else "<i>No Name Set</i>"}}</pre></a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if "{:08b}".format(sesh.permissions)[5] == "1" %}
|
||||
<div id="shopSearchResult" class="col-sm-6" style="max-width: 25%;">
|
||||
{% for shop in shoplist %}
|
||||
<a href="/shop/{{ shop.id }}"><pre>{{ shop.name if shop.name else "<i>No Name Set</i>"}}</pre></a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="row" id="rowAdd">
|
||||
|
||||
</div>
|
||||
{% endblock content %}
|
175
core/templates/user/index.jinja
Normal file
175
core/templates/user/index.jinja
Normal file
@ -0,0 +1,175 @@
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<script type="text/javascript">
|
||||
function toggle_new_name_form() {
|
||||
let frm = document.getElementById("new_name_form");
|
||||
let btn = document.getElementById("btn_toggle_form");
|
||||
|
||||
if (frm.style['display'] != "") {
|
||||
frm.style['display'] = "";
|
||||
frm.style['max-height'] = "";
|
||||
btn.innerText = "Cancel";
|
||||
} else {
|
||||
frm.style['display'] = "none";
|
||||
frm.style['max-height'] = "0px";
|
||||
btn.innerText = "Edit";
|
||||
}
|
||||
}
|
||||
function toggle_add_card_form() {
|
||||
let btn = document.getElementById("btn_add_card");
|
||||
let dv = document.getElementById("add_card_container")
|
||||
|
||||
if (dv.style['display'] != "") {
|
||||
btn.innerText = "Cancel";
|
||||
dv.style['display'] = "";
|
||||
} else {
|
||||
btn.innerText = "Add";
|
||||
dv.style['display'] = "none";
|
||||
}
|
||||
}
|
||||
|
||||
function prep_edit_form(access_code, chip_id, idm, card_type, u_memo) {
|
||||
ac = document.getElementById("card_edit_frm_access_code");
|
||||
cid = document.getElementById("card_edit_frm_chip_id");
|
||||
fidm = document.getElementById("card_edit_frm_idm");
|
||||
memo = document.getElementById("card_edit_frm_memo");
|
||||
|
||||
if (chip_id == "None" || chip_id == undefined) {
|
||||
chip_id = ""
|
||||
}
|
||||
if (idm == "None" || idm == undefined) {
|
||||
idm = ""
|
||||
}
|
||||
if (u_memo == "None" || u_memo == undefined) {
|
||||
u_memo = ""
|
||||
}
|
||||
|
||||
ac.value = access_code;
|
||||
cid.value = chip_id;
|
||||
fidm.value = idm;
|
||||
memo.value = u_memo;
|
||||
|
||||
if (card_type == "AmusementIC") {
|
||||
cid.disabled = true;
|
||||
fidm.disabled = false;
|
||||
} else {
|
||||
cid.disabled = false;
|
||||
fidm.disabled = true;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<h1>Management for {{ username }} <button onclick="toggle_new_name_form()" class="btn btn-secondary" id="btn_toggle_form">Edit</button></h1>
|
||||
{% if error is defined %}
|
||||
{% include "core/templates/widgets/err_banner.jinja" %}
|
||||
{% endif %}
|
||||
{% if success is defined and success == 2 %}
|
||||
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
|
||||
Update successful
|
||||
</div>
|
||||
{% endif %}
|
||||
<form style="max-width: 33%; display: none; max-height: 0px;" action="/user/update.name" method="post" id="new_name_form">
|
||||
<div class="mb-3">
|
||||
<label for="new_name" class="form-label">New Nickname</label>
|
||||
<input type="text" class="form-control" id="new_name" name="new_name" aria-describedby="new_name_help">
|
||||
<div id="new_name_help" class="form-text">Must be 10 characters or less</div>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary">Submit</button>
|
||||
</form>
|
||||
<p></p>
|
||||
<h2>Cards <button class="btn btn-success" id="btn_add_card" onclick="toggle_add_card_form()">Add</button></h2>
|
||||
{% if success is defined and success == 3 %}
|
||||
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
|
||||
Card added successfully
|
||||
</div>
|
||||
{% endif %}
|
||||
<div id="add_card_container" style="display: none; max-width: 33%;">
|
||||
<form action="/user/add.card" method="post", id="frm_add_card">
|
||||
<label class="form-label" for="card_add_frm_access_code">Access Code:</label>
|
||||
<input class="form-control" name="add_access_code" id="card_add_frm_access_code" maxlength="20" type="text" required aria-describedby="ac_help">
|
||||
<div id="ac_help" class="form-text">20 digit code on the back of the card.</div>
|
||||
<button type="submit" class="btn btn-primary">Add</button>
|
||||
</form>
|
||||
<br>
|
||||
</div>
|
||||
<ul style="font-size: 20px;">
|
||||
{% for c in cards %}
|
||||
<li>{{ c.access_code }} ({{ c.type}}): {{ c.status }} <button onclick="prep_edit_form('{{ c.access_code }}', '{{ c.chip_id}}', '{{ c.idm }}', '{{ c.type }}', '{{ c.memo }}')" data-bs-toggle="modal" data-bs-target="#card_edit" class="btn btn-secondary" id="btn_edit_card_{{ c.access_code }}">Edit</button> {% if c.status == 'Active'%}<button class="btn-warning btn">Lock</button>{% elif c.status == 'Locked' %}<button class="btn-warning btn">Unlock</button>{% endif %} <button class="btn-danger btn">Delete</button></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
<h2>Reset Password</h2>
|
||||
{% if success is defined and success == 1 %}
|
||||
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
|
||||
Update successful
|
||||
</div>
|
||||
{% endif %}
|
||||
<form style="max-width: 33%;" action="/user/update.pw" method="post">
|
||||
<div class="mb-3">
|
||||
<label for="current_pw" class="form-label">Current Password</label>
|
||||
<input type="password" class="form-control" id="current_pw" name="current_pw">
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="password1" class="form-label">New Password</label>
|
||||
<input type="password" class="form-control" id="password1" name="password1" aria-describedby="password_help">
|
||||
<div id="password_help" class="form-text">Password must be at least 10 characters long, contain an upper and lowercase character, number, and special character</div>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="password2" class="form-label">Retype New Password</label>
|
||||
<input type="password" class="form-control" id="password2" name="password2">
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary">Submit</button>
|
||||
</form>
|
||||
|
||||
{% if arcades is defined and arcades|length > 0 %}
|
||||
<h2>Arcades</h2>
|
||||
<ul>
|
||||
{% for a in arcades %}
|
||||
<li><h3>{{ a.name }}</h3>
|
||||
{% if a.machines|length > 0 %}
|
||||
<table>
|
||||
<tr><th>Serial</th><th>Game</th><th>Last Seen</th></tr>
|
||||
{% for m in a.machines %}
|
||||
<tr><td>{{ m.serial }}</td><td>{{ m.game }}</td><td>{{ m.last_seen }}</td></tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
{% endif %}
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
<div class="modal fade" id="card_edit" tabindex="-1" aria-labelledby="card_edit_label" aria-hidden="true">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h1 class="modal-title fs-5" id="card_edit_label">Edit Card</h1>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<form action="/user/edit.card" method="post" id="frm_edit_card">
|
||||
<label class="form-label" for="card_edit_frm_access_code">Access Code:</label>
|
||||
<input class="form-control" readonly name="add_access_code" id="card_edit_frm_access_code" maxlength="20" type="text" required aria-describedby="ac_help">
|
||||
<div id="ac_help" class="form-text">20 digit code on the back of the card. If this is incorrect, contact a sysadmin.</div>
|
||||
|
||||
<label class="form-label" for="card_edit_frm_memo" id="card_edit_frm_memo_lbl">Memo:</label>
|
||||
<input class="form-control" aria-describedby="memo_help" name="add_memo" id="card_edit_frm_memo" maxlength="16" type="text">
|
||||
<div id="memo_help" class="form-text">Must be 16 characters or less.</div>
|
||||
|
||||
<label class="form-label" for="card_edit_frm_idm" id="card_edit_frm_idm_lbl">FeliCa IDm:</label>
|
||||
<input class="form-control" aria-describedby="idm_help" name="add_felica_idm" id="card_edit_frm_idm" maxlength="16" type="text">
|
||||
<div id="idm_help" class="form-text">8 bytes that uniquly idenfites a FeliCa card. Obtained by reading the card with an NFC reader.</div>
|
||||
|
||||
<label class="form-label" for="card_edit_frm_chip_id" id="card_edit_frm_chip_id_lbl">Mifare UID:</label>
|
||||
<input class="form-control" aria-describedby="chip_id_help" name="add_mifare_chip_id" id="card_edit_frm_chip_id" maxlength="8" type="text">
|
||||
<div id="chip_id_help" class="form-text">4 byte integer that uniquly identifies a Mifare card. Obtained by reading the card with an NFC reader.</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="submit" class="btn btn-primary" form="frm_edit_card">Edit</button>
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock content %}
|
29
core/templates/widgets/err_banner.jinja
Normal file
29
core/templates/widgets/err_banner.jinja
Normal file
@ -0,0 +1,29 @@
|
||||
{% if error > 0 %}
|
||||
<div class="err-banner">
|
||||
<h3>Error</h3>
|
||||
{% if error == 1 %}
|
||||
Card not registered, or wrong password
|
||||
{% elif error == 2 %}
|
||||
Missing or malformed access code
|
||||
{% elif error == 3 %}
|
||||
Failed to create user
|
||||
{% elif error == 4 %}
|
||||
Required field not filled or invalid
|
||||
{% elif error == 5 %}
|
||||
Incorrect old password
|
||||
{% elif error == 6 %}
|
||||
Passwords don't match
|
||||
{% elif error == 7 %}
|
||||
New password not acceptable
|
||||
{% elif error == 8 %}
|
||||
New Nickname too long
|
||||
{% elif error == 9 %}
|
||||
You must be logged in to preform this action
|
||||
New Nickname too long
|
||||
{% elif error == 10 %}
|
||||
Invalid serial number
|
||||
{% else %}
|
||||
An unknown error occoured
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
@ -3,19 +3,20 @@
|
||||
</div>
|
||||
<div style="background: #333; color: #f9f9f9; width: 80%; height: 50px; line-height: 50px; padding-left: 10px; float: left;">
|
||||
<a href=/><button class="btn btn-primary">Home</button></a>
|
||||
{% for game in game_list %}
|
||||
<a href=/game/{{ game.url }}><button class="btn btn-success">{{ game.name }}</button></a>
|
||||
{% for game, data in game_list|items %}
|
||||
<a href=/game{{ data.url }}/><button class="btn btn-success">{{ game }}</button></a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
<div style="background: #333; color: #f9f9f9; width: 10%; height: 50px; line-height: 50px; text-align: center; float: left;">
|
||||
{% if sesh is defined and sesh["permissions"] >= 2 %}
|
||||
<a href="/sys"><button class="btn btn-primary">System</button></a>
|
||||
<a href="/sys/"><button class="btn btn-primary">System</button></a>
|
||||
{% endif %}
|
||||
{% if sesh is defined and sesh["userId"] > 0 %}
|
||||
<a href="/user"><button class="btn btn-primary">Account</button></a>
|
||||
{% if sesh is defined and sesh["user_id"] > 0 %}
|
||||
<a href="/user/"><button class="btn btn-primary">Account</button></a>
|
||||
<a href="/user/logout"><button class="btn btn-danger">Logout</button></a>
|
||||
{% else %}
|
||||
<a href="/gate"><button class="btn btn-primary">Gate</button></a>
|
||||
<a href="/gate/"><button class="btn btn-primary">Gate</button></a>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
@ -1,12 +1,24 @@
|
||||
from typing import Dict, List, Tuple
|
||||
from typing import Dict, List, Tuple, Any
|
||||
import json
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web.http import Request
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response
|
||||
from starlette.routing import Route
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
from core.utils import Utils
|
||||
|
||||
class JSONResponseNoASCII(Response):
|
||||
media_type = "application/json"
|
||||
|
||||
def render(self, content: Any) -> bytes:
|
||||
return json.dumps(
|
||||
content,
|
||||
ensure_ascii=False,
|
||||
).encode("utf-8")
|
||||
|
||||
class BaseServlet:
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
@ -28,18 +40,16 @@ class BaseServlet:
|
||||
"""
|
||||
return False
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
def get_routes(self) -> List[Route]:
|
||||
"""Called during boot to get all matcher endpoints this title servlet handles
|
||||
|
||||
Returns:
|
||||
Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]: A 2-length tuple where offset 0 is GET and offset 1 is POST,
|
||||
containing a list of 3-length tuples where offset 0 is the name of the function in the handler that should be called, offset 1
|
||||
is the matching string, and offset 2 is a dict containing rules for the matcher.
|
||||
List[Route]: A list of Routes, WebSocketRoutes, or similar classes
|
||||
"""
|
||||
return (
|
||||
[("render_GET", "/{game}/{version}/{endpoint}", {'game': R'S...'})],
|
||||
[("render_POST", "/{game}/{version}/{endpoint}", {'game': R'S...'})]
|
||||
)
|
||||
return [
|
||||
Route("/{game}/{version}/{endpoint}", self.render_POST, methods=["POST"]),
|
||||
Route("/{game}/{version}/{endpoint}", self.render_GET, methods=["GET"]),
|
||||
]
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Called once during boot, should contain any additional setup the handler must do, such as starting any sub-services
|
||||
@ -58,11 +68,11 @@ class BaseServlet:
|
||||
Tuple[str, str]: A tuple where offset 0 is the allnet uri field, and offset 1 is the allnet host field
|
||||
"""
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
||||
return (f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_code}/{game_ver}/", "")
|
||||
return (f"http://{self.core_cfg.server.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_code}/{game_ver}/", "")
|
||||
|
||||
return (f"http://{self.core_cfg.title.hostname}/{game_code}/{game_ver}/", "")
|
||||
return (f"http://{self.core_cfg.server.hostname}/{game_code}/{game_ver}/", "")
|
||||
|
||||
def get_mucha_info(self, core_cfg: CoreConfig, cfg_dir: str) -> Tuple[bool, str]:
|
||||
def get_mucha_info(self, core_cfg: CoreConfig, cfg_dir: str) -> Tuple[bool, List[str], List[str]]:
|
||||
"""Called once during boot to check if this game is a mucha game
|
||||
|
||||
Args:
|
||||
@ -72,15 +82,15 @@ class BaseServlet:
|
||||
Returns:
|
||||
Tuple[bool, str]: Tuple where offset 0 is true if the game is enabled, false otherwise, and offset 1 is the game CD
|
||||
"""
|
||||
return (False, "")
|
||||
return (False, [], [])
|
||||
|
||||
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
self.logger.warn(f"{game_code} Does not dispatch POST")
|
||||
return None
|
||||
async def render_POST(self, request: Request) -> bytes:
|
||||
self.logger.warn(f"Game Does not dispatch POST")
|
||||
return Response()
|
||||
|
||||
def render_GET(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
self.logger.warn(f"{game_code} Does not dispatch GET")
|
||||
return None
|
||||
async def render_GET(self, request: Request) -> bytes:
|
||||
self.logger.warn(f"Game Does not dispatch GET")
|
||||
return Response()
|
||||
|
||||
class TitleServlet:
|
||||
title_registry: Dict[str, BaseServlet] = {}
|
||||
@ -136,7 +146,7 @@ class TitleServlet:
|
||||
self.logger.error(f"{folder} missing game_code or index in __init__.py, or is_game_enabled in index")
|
||||
|
||||
self.logger.info(
|
||||
f"Serving {len(self.title_registry)} game codes {'on port ' + str(core_cfg.title.port) if core_cfg.title.port > 0 else ''}"
|
||||
f"Serving {len(self.title_registry)} game codes {'on port ' + str(core_cfg.server.port) if core_cfg.server.port > 0 else ''}"
|
||||
)
|
||||
|
||||
def render_GET(self, request: Request, endpoints: dict) -> bytes:
|
||||
|
@ -1,6 +1,6 @@
|
||||
from typing import Dict, Any, Optional
|
||||
from types import ModuleType
|
||||
from twisted.web.http import Request
|
||||
from starlette.requests import Request
|
||||
import logging
|
||||
import importlib
|
||||
from os import walk
|
||||
@ -34,33 +34,21 @@ class Utils:
|
||||
|
||||
@classmethod
|
||||
def get_ip_addr(cls, req: Request) -> str:
|
||||
return (
|
||||
req.getAllHeaders()[b"x-forwarded-for"].decode()
|
||||
if b"x-forwarded-for" in req.getAllHeaders()
|
||||
else req.getClientAddress().host
|
||||
)
|
||||
return req.headers.get("x-forwarded-for", req.client.host)
|
||||
|
||||
@classmethod
|
||||
def get_title_port(cls, cfg: CoreConfig):
|
||||
if cls.real_title_port is not None: return cls.real_title_port
|
||||
|
||||
if cfg.title.port == 0:
|
||||
cls.real_title_port = cfg.allnet.port
|
||||
|
||||
else:
|
||||
cls.real_title_port = cfg.title.port
|
||||
cls.real_title_port = cfg.server.proxy_port if cfg.server.is_using_proxy and cfg.server.proxy_port else cfg.server.port
|
||||
|
||||
return cls.real_title_port
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_title_port_ssl(cls, cfg: CoreConfig):
|
||||
if cls.real_title_port_ssl is not None: return cls.real_title_port_ssl
|
||||
|
||||
if cfg.title.port_ssl == 0:
|
||||
cls.real_title_port_ssl = 443
|
||||
|
||||
else:
|
||||
cls.real_title_port_ssl = cfg.title.port_ssl
|
||||
cls.real_title_port_ssl = cfg.server.proxy_port_ssl if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl else Utils.get_title_port(cfg)
|
||||
|
||||
return cls.real_title_port_ssl
|
||||
|
||||
|
69
dbutils.py
69
dbutils.py
@ -1,9 +1,12 @@
|
||||
import yaml
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import logging
|
||||
from core.config import CoreConfig
|
||||
from os import mkdir, path, access, W_OK
|
||||
import yaml
|
||||
import asyncio
|
||||
|
||||
from core.data import Data
|
||||
from os import path, mkdir, access, W_OK
|
||||
from core.config import CoreConfig
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Database utilities")
|
||||
@ -16,19 +19,9 @@ if __name__ == "__main__":
|
||||
type=str,
|
||||
help="Version of the database to upgrade/rollback to",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--game",
|
||||
"-g",
|
||||
type=str,
|
||||
help="Game code of the game who's schema will be updated/rolled back. Ex. SDFE",
|
||||
)
|
||||
parser.add_argument("--email", "-e", type=str, help="Email for the new user")
|
||||
parser.add_argument("--old_ac", "-o", type=str, help="Access code to transfer from")
|
||||
parser.add_argument("--new_ac", "-n", type=str, help="Access code to transfer to")
|
||||
parser.add_argument("--force", "-f", type=bool, help="Force the action to happen")
|
||||
parser.add_argument(
|
||||
"action", type=str, help="DB Action, create, recreate, upgrade, or rollback"
|
||||
)
|
||||
parser.add_argument("--access_code", "-a", type=str, help="Access code for new/transfer user", default="00000000000000000000")
|
||||
parser.add_argument("action", type=str, help="create, upgrade, create-owner")
|
||||
args = parser.parse_args()
|
||||
|
||||
cfg = CoreConfig()
|
||||
@ -50,42 +43,18 @@ if __name__ == "__main__":
|
||||
|
||||
if args.action == "create":
|
||||
data.create_database()
|
||||
|
||||
elif args.action == "recreate":
|
||||
data.recreate_database()
|
||||
|
||||
elif args.action == "upgrade" or args.action == "rollback":
|
||||
if args.version is None:
|
||||
data.logger.warning("No version set, upgrading to latest")
|
||||
|
||||
if args.game is None:
|
||||
data.logger.warning("No game set, upgrading core schema")
|
||||
data.migrate_database(
|
||||
"CORE",
|
||||
int(args.version) if args.version is not None else None,
|
||||
args.action,
|
||||
)
|
||||
|
||||
else:
|
||||
data.migrate_database(
|
||||
args.game,
|
||||
int(args.version) if args.version is not None else None,
|
||||
args.action,
|
||||
)
|
||||
|
||||
elif args.action == "autoupgrade":
|
||||
data.autoupgrade()
|
||||
|
||||
elif args.action == "upgrade":
|
||||
data.schema_upgrade(args.version)
|
||||
|
||||
elif args.action == "create-owner":
|
||||
data.create_owner(args.email)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(data.create_owner(args.email, args.access_code))
|
||||
data.schema_upgrade(args.version)
|
||||
|
||||
elif args.action == "migrate-card":
|
||||
data.migrate_card(args.old_ac, args.new_ac, args.force)
|
||||
elif args.action == "migrate":
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(data.migrate())
|
||||
|
||||
elif args.action == "cleanup":
|
||||
data.delete_hanging_users()
|
||||
|
||||
elif args.action == "version":
|
||||
data.show_versions()
|
||||
|
||||
data.logger.info("Done")
|
||||
else:
|
||||
logging.getLogger("database").info(f"Unknown action {args.action}")
|
||||
|
@ -1,23 +1,24 @@
|
||||
# ARTEMiS Configuration
|
||||
## Server
|
||||
- `listen_address`: IP Address or hostname that the server will listen for connections on. Set to 127.0.0.1 for local only, or 0.0.0.0 for all interfaces. Default `127.0.0.1`
|
||||
- `hostname`: Hostname that gets sent to clients to tell them where to connect. Games must be able to connect to your server via the hostname or IP you spcify here. Note that most games will reject `localhost` or `127.0.0.1`. Default `localhost`
|
||||
- `port`: Port that the server will listen for connections on. Default `80`
|
||||
- `ssl_key`: Location of the ssl server key for the secure title server. Ignored if you don't use SSL. Default `cert/title.key`
|
||||
- `ssl_cert`: Location of the ssl server certificate for the secure title server. Must not be a self-signed SSL. Ignored if you don't use SSL. Default `cert/title.pem`
|
||||
- `allow_user_registration`: Allows users to register in-game via the AimeDB `register` function. Disable to be able to control who can use cards on your server. Default `True`
|
||||
- `allow_unregistered_serials`: Allows games that do not have registered keychips to connect and authenticate. Disable to restrict who can connect to your server. Recomended to disable for production setups. Default `True`
|
||||
- `name`: Name for the server, used by some games in their default MOTDs. Default `ARTEMiS`
|
||||
- `is_develop`: Flags that the server is a development instance without a proxy standing in front of it. Setting to `False` tells the server not to listen for SSL, because the proxy should be handling all SSL-related things, among other things. Default `True`
|
||||
- `threading`: Flags that `reactor.run` should be called via the `Thread` standard library. May provide a speed boost, but removes the ability to kill the server via `Ctrl + C`. Default: `False`
|
||||
- `check_arcade_ip`: Checks IPs against the `arcade` table in the database, if one is defined. Default `False`
|
||||
- `strict_ip_checking`: Rejects clients if there is no IP in the `arcade` table for the respective arcade
|
||||
- `is_using_proxy`: Flags that you'll be using some other software, such as nginx, to proxy requests, and to send `proxy_port` or `proxy_port_ssl` to games instead of `port`. Default `False`
|
||||
- `proxy_port`: Which port your front-facing proxy will be listening on. Ignored if `is_using_proxy` is `False` or if set to `0`. Default `0`
|
||||
- `proxy_port`: Which port your front-facing proxy will be listening for ssl connections on. Ignored if `is_using_proxy` is `False` or if set to `0`. Default `0`
|
||||
- `log_dir`: Directory to store logs. Server MUST have read and write permissions to this directory or you will have issues. Default `logs`
|
||||
- `check_arcade_ip`: Checks IPs against the `arcade` table in the database, if one is defined. Default `False`
|
||||
- `strict_ip_checking`: Rejects clients if there is no IP in the `arcade` table for the respective arcade. Default `False`
|
||||
## Title
|
||||
- `loglevel`: Logging level for the title server. Default `info`
|
||||
- `hostname`: Hostname that gets sent to clients to tell them where to connect. Games must be able to connect to your server via the hostname or IP you spcify here. Note that most games will reject `localhost` or `127.0.0.1`. Default `localhost`
|
||||
- `port`: Port that the title server will listen for connections on. Set to 0 to use the Allnet handler to reduce the port footprint. Default `8080`
|
||||
- `port_ssl`: Port that the secure title server will listen for connections on. Set to 0 to use the Allnet handler to reduce the port footprint. Default `0`
|
||||
- `ssl_key`: Location of the ssl server key for the secure title server. Ignored if `port_ssl` is set to `0` or `is_develop` set to `False`. Default `cert/title.key`
|
||||
- `ssl_cert`: Location of the ssl server certificate for the secure title server. Must not be a self-signed SSL. Ignored if `port_ssl` is set to `0` or `is_develop` is set to `False`. Default `cert/title.pem`
|
||||
- `reboot_start_time`: 24 hour JST time that clients will see as the start of maintenance period. Leave blank for no maintenance time. Default: ""
|
||||
- `reboot_end_time`: 24 hour JST time that clients will see as the end of maintenance period. Leave blank for no maintenance time. Default: ""
|
||||
- `reboot_start_time`: 24 hour JST time that clients will see as the start of maintenance period, ex `04:00`. Leave blank for no maintenance time. Default: `""`
|
||||
- `reboot_end_time`: 24 hour JST time that clients will see as the end of maintenance period, ex `05:00`. Leave blank for no maintenance time. Default: `""`
|
||||
## Database
|
||||
- `host`: Host of the database. Default `localhost`
|
||||
- `username`: Username of the account the server should connect to the database with. Default `aime`
|
||||
@ -26,23 +27,30 @@
|
||||
- `port`: Port the database server is listening on. Default `3306`
|
||||
- `protocol`: Protocol used in the connection string, e.i `mysql` would result in `mysql://...`. Default `mysql`
|
||||
- `sha2_password`: Weather or not the password in the connection string should be hashed via SHA2. Default `False`
|
||||
- `loglevel`: Logging level for the database. Default `warn`
|
||||
- `user_table_autoincrement_start`: What the `aime_user` table ID autoincrememnt should start with. Default `10000`
|
||||
- `loglevel`: Logging level for the database. Default `info`
|
||||
- `memcached_host`: Host of the memcached server. Default `localhost`
|
||||
## Frontend
|
||||
- `enable`: Weather or not the frontend should be enabled. Default `False`
|
||||
- `port`: Port the frontend should listen for connections on. Default `8090`
|
||||
- `enable`: Weather or not the frontend should run. Default `False`
|
||||
- `port`: Port the frontend should listen on. Default `8080`
|
||||
- `loglevel`: Logging level for the frontend server. Default `info`
|
||||
- `secret`: Base64-encoded JWT secret for session cookies, generated by you. Default `""`
|
||||
## Allnet
|
||||
- `standalone`: Weather allnet should be launched as a standalone service on it's own port.
|
||||
- `port`: Port the billing server should listen for connections on. Games are hardcoded to ask for port `80` so only change if you have a proxy redirecting properly. Default `80`
|
||||
- `loglevel`: Logging level for the allnet server. Default `info`
|
||||
- `port`: Port the allnet server should listen for connections on. Games are hardcoded to ask for port `80` so only change if you have a proxy redirecting properly. Default `80`
|
||||
- `allow_online_updates`: Allow allnet to distribute online updates via DownloadOrders. This system is currently non-functional, so leave it disabled. Default `False`
|
||||
- `update_cfg_folder`: Folder where delivery INI files will be checked for. Ignored if `allow_online_updates` is `False`. Default `""`
|
||||
## Billing
|
||||
- `port`: Port the billing server should listen for connections on. Games are hardcoded to ask for port `8443` so only change if you have a proxy redirecting properly. Set to 0 to use the allnet handler to reduce the number of ports the server eats up. Default `8443`
|
||||
- `ssl_key`: Location of the ssl server key for the billing server. Ignored if `port` is set to `0` or `is_develop` set to `False`. Default `cert/server.key`
|
||||
- `ssl_cert`: Location of the ssl server certificate for the billing server. Must match the CA distributed to users or the billing server will not connect. Ignored if `port` is set to `0` or `is_develop` is set to `False`. Default `cert/server.pem`
|
||||
- `standalone`: Weather or not the billing server should be launched as a standalone service on it's own port. Setting this to `True` requires that you have `ssl_key` and `ssl_cert` set. Default `False`
|
||||
- `loglevel`: Logging level for the billing server. Default `info`
|
||||
- `port`: Port the billing server should listen for connections on. Games are hardcoded to ask for port `8443` so only change if you have a proxy redirecting properly. Ignored if `standalone` is `False`. Default `8443`
|
||||
- `ssl_key`: Location of the ssl server key for the billing server. Ignored if `standalone` is `False`. Default `cert/server.key`
|
||||
- `ssl_cert`: Location of the ssl server certificate for the billing server. Ignored if `standalone` is `False`. Must match the CA distributed to users or the billing server will not connect. Default `cert/server.pem`
|
||||
- `signing_key`: Location of the RSA Private key used to sign billing requests. Must match the public key distributed to users or the billing server will not connect. Default `cert/billing.key`
|
||||
## Aimedb
|
||||
- `enable`: Weather or not aimedb should run. Default `True`
|
||||
- `loglevel`: Logging level for the aimedb server. Default `info`
|
||||
- `port`: Port the aimedb server should listen for connections on. Games are hardcoded to ask for port `22345` so only change if you have a proxy redirecting properly. Default `22345`
|
||||
- `key`: Key to encrypt/decrypt aimedb requests and responses. MUST be set or the server will not start. If set incorrectly, your server will not properly handle aimedb requests. Default `""`
|
||||
- `key`: Key to encrypt/decrypt aimedb requests and responses. MUST be set or the server will not start. If set incorrectly, your server will not properly handle aimedb requests. Default `""`
|
||||
- `id_secret`: Base64-encoded JWT secret for Sega Auth IDs. Leaving this blank disables this feature. Default `""`
|
||||
- `id_lifetime_seconds`: Number of secons a JWT generated should be valid for. Default `86400` (1 day)
|
||||
|
63
docs/prod.md
63
docs/prod.md
@ -1,41 +1,30 @@
|
||||
# ARTEMiS Production mode
|
||||
Production mode is a configuration option that changes how the server listens to be more friendly to a production environment. This mode assumes that a proxy (for this guide, nginx) is standing in front of the server to handle port mapping and TLS. In order to activate production mode, simply change `is_develop` to `False` in `core.yaml`. Next time you start the server, you should see "Starting server in production mode".
|
||||
ARTEMiS is designed to run in one of two ways. Developmen/local mode, which assumes you're just trying to set up something to save your scores and make the games work, and have patched your games to disable SSL and cert checks and encryption and the like, and production mode. In production mode, artemis assumes you have a proxy server, such as nginx or apache, standing in front of artemis doing HTTPS and port management. This document will cover how to properly set up a production instance of ARTEMiS.
|
||||
|
||||
## ARTEMiS configuration
|
||||
Step 1 is to edit your artemis configuration. Some recomended changes:
|
||||
### `server`
|
||||
- `listen_address` -> `127.0.0.1`
|
||||
- `is_develop` -> `False`
|
||||
- `is_using_proxy` -> `True`
|
||||
- `port` -> The port nginx will send proxied requests to. If you're using the example config, set this to 8080.
|
||||
- `proxy_port` -> The port your proxy will be accepting title server connections on. If you're using the example config, set this to 80.
|
||||
- `proxy_port_ssl` -> The port your proxy will be accepting secure title server connections on. If you're using the example config, set this to 443.
|
||||
- `allow_unregistered_serials` -> `False`
|
||||
### `billing`
|
||||
- `standalone` -> `False`
|
||||
### `frontend`
|
||||
- `enable` -> `True` if you want the frontend
|
||||
- `port` -> `8080` if you're using the default nginx config
|
||||
|
||||
If you plan to serve artemis behind a VPN, these additional settings are also recomended
|
||||
- `check_arcade_ip` -> `True`
|
||||
- `strict_ip_checking` -> `True`
|
||||
|
||||
## Nginx Configuration
|
||||
### Port forwarding
|
||||
Artemis requires that the following ports be forwarded to allow internet traffic to access the server. This will not change regardless of what you set in the config, as many of these ports are hard-coded in the games.
|
||||
`tcp:80` all.net, non-ssl titles
|
||||
`tcp:8443` billing
|
||||
`tcp:22345` aimedb
|
||||
`tcp:443` frontend, SSL titles
|
||||
For most cases, the config in `example_config` will suffice. It makes the following assumptions
|
||||
- ARTEMiS is running on port 8080
|
||||
- Billing is set to not be standalone
|
||||
- You're not using cloudflare in front of your frontend
|
||||
|
||||
### A note about external proxy services (cloudflare, etc)
|
||||
Due to the way that artemis functions, it is currently not possible to put the server behind something like Cloudflare. Cloudflare only proxies web traffic on the standard ports (80, 443) and, as shown above, this does not work with artemis. Server administrators should seek other means to protect their network (VPS hosting, VPN, etc)
|
||||
|
||||
### SSL Certificates
|
||||
You will need to generate SSL certificates for some games. The certificates vary in security and validity requirements. Please see the general guide below
|
||||
- General Title: The certificate for the general title server should be valid, not self-signed and match the CN that the game will be reaching out to (e.i if your games are reaching out to titles.hostname.here, your ssl certificate should be valid for titles.hostname.here, or *.hostname.here)
|
||||
- CXB: Same requires as the title server. It must not be self-signed, and CN must match. Recomended to get a wildcard cert if possible, and use it for both Title and CXB
|
||||
- Pokken: Pokken can be self-signed, and the CN doesn't have to match, but it MUST use 2048-bit RSA. Due to the games age, andthing stronger then that will be rejected.
|
||||
|
||||
### Port mappings
|
||||
An example config is provided in the `config` folder called `nginx_example.conf`. It is set up for the following:
|
||||
`naominet.jp:tcp:80` -> `localhost:tcp:8000` for allnet
|
||||
`ib.naominet.jp:ssl:8443` -> `localhost:tcp:8444` for the billing server
|
||||
`your.hostname.here:ssl:443` -> `localhost:tcp:8080` for the SSL title server
|
||||
`your.hostname.here:tcp:80` -> `localhost:tcp:8080` for the non-SSL title server
|
||||
`cxb.hostname.here:ssl:443` -> `localhost:tcp:8080` for crossbeats (appends /SDCA/104/ to the request)
|
||||
`pokken.hostname.here:ssl:443` -> `localhost:tcp:8080` for pokken
|
||||
`frontend.hostname.here:ssl:443` -> `localhost:tcp:8090` for the frontend, includes https redirection
|
||||
|
||||
If you're using this as a guide, be sure to replace your.hostname.here with the hostname you specified in core.yaml under `titles->hostname`. Do *not* change naominet.jp, or allnet/billing will fail. Also remember to specifiy certificate paths correctly, as in the example they are simply placeholders.
|
||||
|
||||
### Multi-service ports
|
||||
It is possible to use nginx to redirect billing and title server requests to the same port that all.net uses. By setting `port` to 0 under billing and title server, you can change the nginx config to serve the following (entries not shown here should be the same)
|
||||
`ib.naominet.jp:ssl:8443` -> `localhost:tcp:8000` for the billing server
|
||||
`your.hostname.here:ssl:443` -> `localhost:tcp:8000` for the SSL title server
|
||||
`your.hostname.here:tcp:80` -> `localhost:tcp:8000` for the non-SSL title server
|
||||
`cxb.hostname.here:ssl:443` -> `localhost:tcp:8000` for crossbeats (appends /SDCA/104/ to the request)
|
||||
`pokken.hostname.here:ssl:443` -> `localhost:tcp:8000` for pokken
|
||||
|
||||
This will allow you to only use 3 ports locally, but you will still need to forward the same internet-facing ports as before.
|
||||
If this describes you, your only configuration needs are to edit the `server_name` and `certificate_*` directives. Otherwise, please see nginx configuration documentation to configure it to best suit your setup.
|
||||
|
@ -1,26 +1,25 @@
|
||||
server:
|
||||
listen_address: "127.0.0.1"
|
||||
listen_address: "127.0.0.1"
|
||||
hostname: "localhost"
|
||||
port: 80
|
||||
ssl_key: "cert/title.key"
|
||||
ssl_cert: "cert/title.crt"
|
||||
allow_user_registration: True
|
||||
allow_unregistered_serials: True
|
||||
name: "ARTEMiS"
|
||||
is_develop: True
|
||||
is_using_proxy: False
|
||||
threading: False
|
||||
proxy_port: 0
|
||||
proxy_port_ssl: 0
|
||||
log_dir: "logs"
|
||||
check_arcade_ip: False
|
||||
strict_ip_checking: False
|
||||
|
||||
title:
|
||||
loglevel: "info"
|
||||
hostname: "localhost"
|
||||
port: 8080
|
||||
port_ssl: 0
|
||||
ssl_cert: "cert/title.crt"
|
||||
ssl_key: "cert/title.key"
|
||||
reboot_start_time: "04:00"
|
||||
reboot_end_time: "05:00"
|
||||
|
||||
|
||||
database:
|
||||
host: "localhost"
|
||||
username: "aime"
|
||||
@ -29,30 +28,33 @@ database:
|
||||
port: 3306
|
||||
protocol: "mysql"
|
||||
sha2_password: False
|
||||
loglevel: "warn"
|
||||
user_table_autoincrement_start: 10000
|
||||
loglevel: "info"
|
||||
enable_memcached: True
|
||||
memcached_host: "localhost"
|
||||
|
||||
frontend:
|
||||
enable: False
|
||||
port: 8090
|
||||
enable: True
|
||||
port: 8080
|
||||
loglevel: "info"
|
||||
secret: ""
|
||||
|
||||
allnet:
|
||||
loglevel: "info"
|
||||
standalone: False
|
||||
port: 80
|
||||
ip_check: False
|
||||
loglevel: "info"
|
||||
allow_online_updates: False
|
||||
update_cfg_folder: ""
|
||||
|
||||
billing:
|
||||
standalone: True
|
||||
loglevel: "info"
|
||||
port: 8443
|
||||
ssl_key: "cert/server.key"
|
||||
ssl_cert: "cert/server.pem"
|
||||
signing_key: "cert/billing.key"
|
||||
|
||||
aimedb:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
port: 22345
|
||||
key: ""
|
||||
@ -60,6 +62,4 @@ aimedb:
|
||||
id_lifetime_seconds: 86400
|
||||
|
||||
mucha:
|
||||
enable: False
|
||||
hostname: "localhost"
|
||||
loglevel: "info"
|
||||
|
@ -1,3 +1,4 @@
|
||||
server:
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
loglevel: "info"
|
||||
use:https: True
|
@ -6,7 +6,7 @@ server {
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8000/;
|
||||
proxy_pass http://localhost:8080/;
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,7 +42,7 @@ server {
|
||||
}
|
||||
}
|
||||
|
||||
# Billing
|
||||
# Billing, comment this out if running billing standalone
|
||||
server {
|
||||
listen 8443 ssl;
|
||||
server_name ib.naominet.jp;
|
||||
@ -58,28 +58,6 @@ server {
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:8444/;
|
||||
}
|
||||
}
|
||||
|
||||
# Pokken, comment this out if you don't plan on serving pokken.
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name pokken.hostname.here;
|
||||
|
||||
ssl_certificate /path/to/cert/pokken.pem;
|
||||
ssl_certificate_key /path/to/cert/pokken.key;
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=0";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8080/;
|
||||
}
|
||||
}
|
||||
@ -91,12 +69,12 @@ server {
|
||||
|
||||
location / {
|
||||
return 301 https://$host$request_uri;
|
||||
# If you don't want https redirection, comment the line above and uncomment the line below
|
||||
# proxy_pass http://localhost:8090/;
|
||||
# If you don't want https redirection, or are using something like cloudflare to manage HTTPS, comment out the line above and uncomment the line below
|
||||
# proxy_pass http://localhost:8080/;
|
||||
}
|
||||
}
|
||||
|
||||
# Frontend HTTPS. Comment out if you on't intend to use the frontend
|
||||
# Frontend HTTPS. Comment out if you on't intend to use the frontend, or have cloudflare or something managing https for you.
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name frontend.hostname.here;
|
||||
@ -118,6 +96,6 @@ server {
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://localhost:8090/;
|
||||
proxy_pass http://localhost:8080/;
|
||||
}
|
||||
}
|
||||
|
399
index.py
399
index.py
@ -1,335 +1,116 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import Dict
|
||||
import yaml
|
||||
from os import path, mkdir, access, W_OK
|
||||
from core import *
|
||||
from os import path, environ
|
||||
import uvicorn
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from twisted.web import server, resource
|
||||
from twisted.internet import reactor, endpoints
|
||||
from twisted.web.http import Request
|
||||
from routes import Mapper
|
||||
from threading import Thread
|
||||
from core import CoreConfig, AimedbServlette
|
||||
|
||||
class HttpDispatcher(resource.Resource):
|
||||
def __init__(self, cfg: CoreConfig, config_dir: str):
|
||||
super().__init__()
|
||||
self.config = cfg
|
||||
self.isLeaf = True
|
||||
self.map_get = Mapper()
|
||||
self.map_post = Mapper()
|
||||
self.logger = logging.getLogger("core")
|
||||
|
||||
self.title = TitleServlet(cfg, config_dir)
|
||||
self.allnet = AllnetServlet(cfg, config_dir)
|
||||
self.mucha = MuchaServlet(cfg, config_dir)
|
||||
|
||||
self.map_get.connect(
|
||||
"allnet_downloadorder_ini",
|
||||
"/dl/ini/{file}",
|
||||
controller="allnet",
|
||||
action="handle_dlorder_ini",
|
||||
conditions=dict(method=["GET"]),
|
||||
async def launch_main(cfg: CoreConfig, ssl: bool) -> None:
|
||||
if ssl:
|
||||
server_cfg = uvicorn.Config(
|
||||
"core.app:app",
|
||||
host=cfg.server.listen_address,
|
||||
port=cfg.server.port if args.port == 0 else args.port,
|
||||
reload=cfg.server.is_develop,
|
||||
log_level="info" if cfg.server.is_develop else "critical",
|
||||
ssl_version=3,
|
||||
ssl_certfile=cfg.server.ssl_cert,
|
||||
ssl_keyfile=cfg.server.ssl_key
|
||||
)
|
||||
else:
|
||||
server_cfg = uvicorn.Config(
|
||||
"core.app:app",
|
||||
host=cfg.server.listen_address,
|
||||
port=cfg.server.port if args.port == 0 else args.port,
|
||||
reload=cfg.server.is_develop,
|
||||
log_level="info" if cfg.server.is_develop else "critical"
|
||||
)
|
||||
server = uvicorn.Server(server_cfg)
|
||||
await server.serve()
|
||||
|
||||
self.map_post.connect(
|
||||
"allnet_downloadorder_report",
|
||||
"/report-api/Report",
|
||||
controller="allnet",
|
||||
action="handle_dlorder_report",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
async def launch_billing(cfg: CoreConfig) -> None:
|
||||
server_cfg = uvicorn.Config(
|
||||
"core.allnet:app_billing",
|
||||
host=cfg.server.listen_address,
|
||||
port=cfg.billing.port,
|
||||
reload=cfg.server.is_develop,
|
||||
log_level="info" if cfg.server.is_develop else "critical",
|
||||
ssl_version=3,
|
||||
ssl_certfile=cfg.billing.ssl_cert,
|
||||
ssl_keyfile=cfg.billing.ssl_key
|
||||
)
|
||||
server = uvicorn.Server(server_cfg)
|
||||
await server.serve()
|
||||
|
||||
self.map_get.connect(
|
||||
"allnet_ping",
|
||||
"/naomitest.html",
|
||||
controller="allnet",
|
||||
action="handle_naomitest",
|
||||
conditions=dict(method=["GET"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_poweron",
|
||||
"/sys/servlet/PowerOn",
|
||||
controller="allnet",
|
||||
action="handle_poweron",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_downloadorder",
|
||||
"/sys/servlet/DownloadOrder",
|
||||
controller="allnet",
|
||||
action="handle_dlorder",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_loaderstaterecorder",
|
||||
"/sys/servlet/LoaderStateRecorder",
|
||||
controller="allnet",
|
||||
action="handle_loaderstaterecorder",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_alive",
|
||||
"/sys/servlet/Alive",
|
||||
controller="allnet",
|
||||
action="handle_alive",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_get.connect(
|
||||
"allnet_alive",
|
||||
"/sys/servlet/Alive",
|
||||
controller="allnet",
|
||||
action="handle_alive",
|
||||
conditions=dict(method=["GET"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_billing",
|
||||
"/request",
|
||||
controller="allnet",
|
||||
action="handle_billing_request",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"allnet_billing",
|
||||
"/request/",
|
||||
controller="allnet",
|
||||
action="handle_billing_request",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
async def launch_frontend(cfg: CoreConfig) -> None:
|
||||
server_cfg = uvicorn.Config(
|
||||
"core.frontend:app",
|
||||
host=cfg.server.listen_address,
|
||||
port=cfg.frontend.port,
|
||||
reload=cfg.server.is_develop,
|
||||
log_level="info" if cfg.server.is_develop else "critical",
|
||||
)
|
||||
server = uvicorn.Server(server_cfg)
|
||||
await server.serve()
|
||||
|
||||
# Maintain compatability
|
||||
self.map_post.connect(
|
||||
"mucha_boardauth",
|
||||
"/mucha/boardauth.do",
|
||||
controller="mucha",
|
||||
action="handle_boardauth",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"mucha_updatacheck",
|
||||
"/mucha/updatacheck.do",
|
||||
controller="mucha",
|
||||
action="handle_updatecheck",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"mucha_dlstate",
|
||||
"/mucha/downloadstate.do",
|
||||
controller="mucha",
|
||||
action="handle_dlstate",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
async def launch_allnet(cfg: CoreConfig) -> None:
|
||||
server_cfg = uvicorn.Config(
|
||||
"core.allnet:app_allnet",
|
||||
host=cfg.server.listen_address,
|
||||
port=cfg.allnet.port,
|
||||
reload=cfg.server.is_develop,
|
||||
log_level="info" if cfg.server.is_develop else "critical",
|
||||
)
|
||||
server = uvicorn.Server(server_cfg)
|
||||
await server.serve()
|
||||
|
||||
self.map_post.connect(
|
||||
"mucha_boardauth",
|
||||
"/mucha_front/boardauth.do",
|
||||
controller="mucha",
|
||||
action="handle_boardauth",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"mucha_updatacheck",
|
||||
"/mucha_front/updatacheck.do",
|
||||
controller="mucha",
|
||||
action="handle_updatecheck",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
self.map_post.connect(
|
||||
"mucha_dlstate",
|
||||
"/mucha_front/downloadstate.do",
|
||||
controller="mucha",
|
||||
action="handle_dlstate",
|
||||
conditions=dict(method=["POST"]),
|
||||
)
|
||||
|
||||
for code, game in self.title.title_registry.items():
|
||||
get_matchers, post_matchers = game.get_endpoint_matchers()
|
||||
|
||||
for m in get_matchers:
|
||||
self.map_get.connect(
|
||||
"title_get",
|
||||
m[1],
|
||||
controller="title",
|
||||
action="render_GET",
|
||||
title=code,
|
||||
subaction=m[0],
|
||||
conditions=dict(method=["GET"]),
|
||||
requirements=m[2],
|
||||
)
|
||||
|
||||
for m in post_matchers:
|
||||
self.map_post.connect(
|
||||
"title_post",
|
||||
m[1],
|
||||
controller="title",
|
||||
action="render_POST",
|
||||
title=code,
|
||||
subaction=m[0],
|
||||
conditions=dict(method=["POST"]),
|
||||
requirements=m[2],
|
||||
)
|
||||
|
||||
def render_GET(self, request: Request) -> bytes:
|
||||
test = self.map_get.match(request.uri.decode())
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if test is None:
|
||||
self.logger.debug(
|
||||
f"Unknown GET endpoint {request.uri.decode()} from {client_ip} to port {request.getHost().port}"
|
||||
)
|
||||
request.setResponseCode(404)
|
||||
return b"Endpoint not found."
|
||||
|
||||
return self.dispatch(test, request)
|
||||
|
||||
def render_POST(self, request: Request) -> bytes:
|
||||
test = self.map_post.match(request.uri.decode())
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if test is None:
|
||||
self.logger.debug(
|
||||
f"Unknown POST endpoint {request.uri.decode()} from {client_ip} to port {request.getHost().port}"
|
||||
)
|
||||
request.setResponseCode(404)
|
||||
return b"Endpoint not found."
|
||||
|
||||
return self.dispatch(test, request)
|
||||
|
||||
def dispatch(self, matcher: Dict, request: Request) -> bytes:
|
||||
controller = getattr(self, matcher["controller"], None)
|
||||
if controller is None:
|
||||
self.logger.error(
|
||||
f"Controller {matcher['controller']} not found via endpoint {request.uri.decode()}"
|
||||
)
|
||||
request.setResponseCode(404)
|
||||
return b"Endpoint not found."
|
||||
|
||||
handler = getattr(controller, matcher["action"], None)
|
||||
if handler is None:
|
||||
self.logger.error(
|
||||
f"Action {matcher['action']} not found in controller {matcher['controller']} via endpoint {request.uri.decode()}"
|
||||
)
|
||||
request.setResponseCode(404)
|
||||
return b"Endpoint not found."
|
||||
|
||||
url_vars = matcher
|
||||
url_vars.pop("controller")
|
||||
url_vars.pop("action")
|
||||
ret = handler(request, url_vars)
|
||||
|
||||
if type(ret) == str:
|
||||
return ret.encode()
|
||||
|
||||
elif type(ret) == bytes or type(ret) == tuple: # allow for bytes or tuple (data, response code) responses
|
||||
return ret
|
||||
|
||||
elif ret is None:
|
||||
self.logger.warning(f"None returned by controller for {request.uri.decode()} endpoint")
|
||||
return b""
|
||||
|
||||
else:
|
||||
self.logger.warning(f"Unknown data type returned by controller for {request.uri.decode()} endpoint")
|
||||
return b""
|
||||
|
||||
async def launcher(cfg: CoreConfig, ssl: bool) -> None:
|
||||
task_list = [asyncio.create_task(launch_main(cfg, ssl))]
|
||||
|
||||
if cfg.billing.standalone:
|
||||
task_list.append(asyncio.create_task(launch_billing(cfg)))
|
||||
if cfg.frontend.enable:
|
||||
task_list.append(asyncio.create_task(launch_frontend(cfg)))
|
||||
if cfg.allnet.standalone:
|
||||
task_list.append(asyncio.create_task(launch_allnet(cfg)))
|
||||
if cfg.aimedb.enable:
|
||||
AimedbServlette(cfg).start()
|
||||
|
||||
done, pending = await asyncio.wait(
|
||||
task_list,
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
|
||||
logging.getLogger("core").info("Shutdown")
|
||||
for pending_task in pending:
|
||||
pending_task.cancel("Another service died, server is shutting down")
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="ARTEMiS main entry point")
|
||||
parser = argparse.ArgumentParser(description="Artemis main entry point")
|
||||
parser.add_argument(
|
||||
"--config", "-c", type=str, default="config", help="Configuration folder"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port", "-p", type=int, default=0, help="Port override"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ssl", "-s", type=bool, help="Launch with SSL"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not path.exists(f"{args.config}/core.yaml"):
|
||||
print(
|
||||
f"The config folder you specified ({args.config}) does not exist or does not contain core.yaml.\nDid you copy the example folder?"
|
||||
f"The config folder you specified ({args.config}) does not exist or does not contain core.yaml. Defaults will be used.\nDid you copy the example folder?"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
|
||||
cfg: CoreConfig = CoreConfig()
|
||||
if path.exists(f"{args.config}/core.yaml"):
|
||||
cfg.update(yaml.safe_load(open(f"{args.config}/core.yaml")))
|
||||
|
||||
if not path.exists(cfg.server.log_dir):
|
||||
mkdir(cfg.server.log_dir)
|
||||
environ["ARTEMIS_CFG_DIR"] = args.config
|
||||
|
||||
if not access(cfg.server.log_dir, W_OK):
|
||||
print(
|
||||
f"Log directory {cfg.server.log_dir} NOT writable, please check permissions"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
logger = logging.getLogger("core")
|
||||
log_fmt_str = "[%(asctime)s] Core | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(cfg.server.log_dir, "core"), when="d", backupCount=10
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
logger.addHandler(fileHandler)
|
||||
logger.addHandler(consoleHandler)
|
||||
|
||||
log_lv = logging.DEBUG if cfg.server.is_develop else logging.INFO
|
||||
logger.setLevel(log_lv)
|
||||
coloredlogs.install(level=log_lv, logger=logger, fmt=log_fmt_str)
|
||||
|
||||
if not cfg.aimedb.key:
|
||||
logger.error("!!AIMEDB KEY BLANK, SET KEY IN CORE.YAML!!")
|
||||
exit(1)
|
||||
|
||||
logger.info(
|
||||
f"ARTEMiS starting in {'develop' if cfg.server.is_develop else 'production'} mode"
|
||||
)
|
||||
|
||||
allnet_server_str = f"tcp:{cfg.allnet.port}:interface={cfg.server.listen_address}"
|
||||
title_server_str = f"tcp:{cfg.title.port}:interface={cfg.server.listen_address}"
|
||||
title_https_server_str = f"ssl:{cfg.title.port_ssl}:interface={cfg.server.listen_address}:privateKey={cfg.title.ssl_key}:certKey={cfg.title.ssl_cert}"
|
||||
adb_server_str = f"tcp:{cfg.aimedb.port}:interface={cfg.server.listen_address}"
|
||||
frontend_server_str = (
|
||||
f"tcp:{cfg.frontend.port}:interface={cfg.server.listen_address}"
|
||||
)
|
||||
|
||||
billing_server_str = f"tcp:{cfg.billing.port}:interface={cfg.server.listen_address}"
|
||||
if cfg.server.is_develop:
|
||||
billing_server_str = (
|
||||
f"ssl:{cfg.billing.port}:interface={cfg.server.listen_address}"
|
||||
f":privateKey={cfg.billing.ssl_key}:certKey={cfg.billing.ssl_cert}"
|
||||
)
|
||||
|
||||
dispatcher = HttpDispatcher(cfg, args.config)
|
||||
|
||||
endpoints.serverFromString(reactor, allnet_server_str).listen(
|
||||
server.Site(dispatcher)
|
||||
)
|
||||
endpoints.serverFromString(reactor, adb_server_str).listen(AimedbFactory(cfg))
|
||||
|
||||
if cfg.frontend.enable:
|
||||
endpoints.serverFromString(reactor, frontend_server_str).listen(
|
||||
server.Site(FrontendServlet(cfg, args.config))
|
||||
)
|
||||
|
||||
if cfg.billing.port > 0:
|
||||
endpoints.serverFromString(reactor, billing_server_str).listen(
|
||||
server.Site(dispatcher)
|
||||
)
|
||||
|
||||
if cfg.title.port > 0:
|
||||
endpoints.serverFromString(reactor, title_server_str).listen(
|
||||
server.Site(dispatcher)
|
||||
)
|
||||
|
||||
if cfg.title.port_ssl > 0:
|
||||
endpoints.serverFromString(reactor, title_https_server_str).listen(
|
||||
server.Site(dispatcher)
|
||||
)
|
||||
|
||||
if cfg.server.threading:
|
||||
Thread(target=reactor.run, args=(False,)).start()
|
||||
else:
|
||||
reactor.run()
|
||||
asyncio.run(launcher(cfg, args.ssl))
|
||||
|
10
read.py
10
read.py
@ -1,4 +1,4 @@
|
||||
# vim: set fileencoding=utf-8
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import re
|
||||
import os
|
||||
@ -6,6 +6,7 @@ import yaml
|
||||
from os import path
|
||||
import logging
|
||||
import coloredlogs
|
||||
import asyncio
|
||||
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import List, Optional
|
||||
@ -38,6 +39,9 @@ class BaseReader:
|
||||
ret.append(f"{root}/{dir}")
|
||||
|
||||
return ret
|
||||
|
||||
async def read(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@ -136,6 +140,8 @@ if __name__ == "__main__":
|
||||
for dir, mod in titles.items():
|
||||
if args.game in mod.game_codes:
|
||||
handler = mod.reader(config, args.version, bin_arg, opt_arg, args.extra)
|
||||
handler.read()
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(handler.read())
|
||||
|
||||
|
||||
logger.info("Done")
|
||||
|
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
@ -7,4 +7,3 @@ index = ChuniServlet
|
||||
database = ChuniData
|
||||
reader = ChuniReader
|
||||
game_codes = [ChuniConstants.GAME_CODE, ChuniConstants.GAME_CODE_NEW, ChuniConstants.GAME_CODE_INT]
|
||||
current_schema_version = 5
|
@ -11,7 +11,7 @@ class ChuniAir(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AIR
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.10.00"
|
||||
return ret
|
||||
|
@ -11,7 +11,7 @@ class ChuniAirPlus(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AIR_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.15.00"
|
||||
return ret
|
||||
|
@ -13,7 +13,7 @@ class ChuniAmazon(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AMAZON
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.30.00"
|
||||
return ret
|
||||
|
@ -13,7 +13,7 @@ class ChuniAmazonPlus(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_AMAZON_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.35.00"
|
||||
return ret
|
||||
|
@ -22,7 +22,7 @@ class ChuniBase:
|
||||
self.game = ChuniConstants.GAME_CODE
|
||||
self.version = ChuniConstants.VER_CHUNITHM
|
||||
|
||||
def handle_game_login_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_game_login_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
Handles the login bonus logic, required for the game because
|
||||
getUserLoginBonus gets called after getUserItem and therefore the
|
||||
@ -38,20 +38,20 @@ class ChuniBase:
|
||||
return {"returnCode": 1}
|
||||
|
||||
user_id = data["userId"]
|
||||
login_bonus_presets = self.data.static.get_login_bonus_presets(self.version)
|
||||
login_bonus_presets = await self.data.static.get_login_bonus_presets(self.version)
|
||||
|
||||
for preset in login_bonus_presets:
|
||||
# check if a user already has some pogress and if not add the
|
||||
# login bonus entry
|
||||
user_login_bonus = self.data.item.get_login_bonus(
|
||||
user_login_bonus = await self.data.item.get_login_bonus(
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
if user_login_bonus is None:
|
||||
self.data.item.put_login_bonus(
|
||||
await self.data.item.put_login_bonus(
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
# yeah i'm lazy
|
||||
user_login_bonus = self.data.item.get_login_bonus(
|
||||
user_login_bonus = await self.data.item.get_login_bonus(
|
||||
user_id, self.version, preset["presetId"]
|
||||
)
|
||||
|
||||
@ -67,7 +67,7 @@ class ChuniBase:
|
||||
bonus_count = user_login_bonus["bonusCount"] + 1
|
||||
last_update_date = datetime.now()
|
||||
|
||||
all_login_boni = self.data.static.get_login_bonus(
|
||||
all_login_boni = await self.data.static.get_login_bonus(
|
||||
self.version, preset["presetId"]
|
||||
)
|
||||
|
||||
@ -91,13 +91,13 @@ class ChuniBase:
|
||||
is_finished = True
|
||||
|
||||
# grab the item for the corresponding day
|
||||
login_item = self.data.static.get_login_bonus_by_required_days(
|
||||
login_item = await self.data.static.get_login_bonus_by_required_days(
|
||||
self.version, preset["presetId"], bonus_count
|
||||
)
|
||||
if login_item is not None:
|
||||
# now add the present to the database so the
|
||||
# handle_get_user_item_api_request can grab them
|
||||
self.data.item.put_item(
|
||||
await self.data.item.put_item(
|
||||
user_id,
|
||||
{
|
||||
"itemId": login_item["presentId"],
|
||||
@ -107,7 +107,7 @@ class ChuniBase:
|
||||
},
|
||||
)
|
||||
|
||||
self.data.item.put_login_bonus(
|
||||
await self.data.item.put_login_bonus(
|
||||
user_id,
|
||||
self.version,
|
||||
preset["presetId"],
|
||||
@ -119,12 +119,12 @@ class ChuniBase:
|
||||
|
||||
return {"returnCode": 1}
|
||||
|
||||
def handle_game_logout_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_game_logout_api_request(self, data: Dict) -> Dict:
|
||||
# self.data.base.log_event("chuni", "logout", logging.INFO, {"version": self.version, "user": data["userId"]})
|
||||
return {"returnCode": 1}
|
||||
|
||||
def handle_get_game_charge_api_request(self, data: Dict) -> Dict:
|
||||
game_charge_list = self.data.static.get_enabled_charges(self.version)
|
||||
async def handle_get_game_charge_api_request(self, data: Dict) -> Dict:
|
||||
game_charge_list = await self.data.static.get_enabled_charges(self.version)
|
||||
|
||||
if game_charge_list is None or len(game_charge_list) == 0:
|
||||
return {"length": 0, "gameChargeList": []}
|
||||
@ -145,8 +145,8 @@ class ChuniBase:
|
||||
)
|
||||
return {"length": len(charges), "gameChargeList": charges}
|
||||
|
||||
def handle_get_game_event_api_request(self, data: Dict) -> Dict:
|
||||
game_events = self.data.static.get_enabled_events(self.version)
|
||||
async def handle_get_game_event_api_request(self, data: Dict) -> Dict:
|
||||
game_events = await self.data.static.get_enabled_events(self.version)
|
||||
|
||||
if game_events is None or len(game_events) == 0:
|
||||
self.logger.warning("No enabled events, did you run the reader?")
|
||||
@ -177,10 +177,10 @@ class ChuniBase:
|
||||
"gameEventList": event_list,
|
||||
}
|
||||
|
||||
def handle_get_game_idlist_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_idlist_api_request(self, data: Dict) -> Dict:
|
||||
return {"type": data["type"], "length": 0, "gameIdlistList": []}
|
||||
|
||||
def handle_get_game_message_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_message_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"type": data["type"],
|
||||
"length": 1,
|
||||
@ -193,14 +193,14 @@ class ChuniBase:
|
||||
}]
|
||||
}
|
||||
|
||||
def handle_get_game_ranking_api_request(self, data: Dict) -> Dict:
|
||||
rankings = self.data.score.get_rankings(self.version)
|
||||
async def handle_get_game_ranking_api_request(self, data: Dict) -> Dict:
|
||||
rankings = await self.data.score.get_rankings(self.version)
|
||||
return {"type": data["type"], "gameRankingList": rankings}
|
||||
|
||||
def handle_get_game_sale_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_sale_api_request(self, data: Dict) -> Dict:
|
||||
return {"type": data["type"], "length": 0, "gameSaleList": []}
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
# if reboot start/end time is not defined use the default behavior of being a few hours ago
|
||||
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
|
||||
reboot_start = datetime.strftime(
|
||||
@ -240,8 +240,8 @@ class ChuniBase:
|
||||
"isDumpUpload": "false",
|
||||
"isAou": "false",
|
||||
}
|
||||
def handle_get_user_activity_api_request(self, data: Dict) -> Dict:
|
||||
user_activity_list = self.data.profile.get_profile_activity(
|
||||
async def handle_get_user_activity_api_request(self, data: Dict) -> Dict:
|
||||
user_activity_list = await self.data.profile.get_profile_activity(
|
||||
data["userId"], data["kind"]
|
||||
)
|
||||
|
||||
@ -261,8 +261,8 @@ class ChuniBase:
|
||||
"userActivityList": activity_list,
|
||||
}
|
||||
|
||||
def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
characters = self.data.item.get_characters(data["userId"])
|
||||
async def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
characters = await self.data.item.get_characters(data["userId"])
|
||||
if characters is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
@ -296,8 +296,8 @@ class ChuniBase:
|
||||
"userCharacterList": character_list,
|
||||
}
|
||||
|
||||
def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
|
||||
user_charge_list = self.data.profile.get_profile_charge(data["userId"])
|
||||
async def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
|
||||
user_charge_list = await self.data.profile.get_profile_charge(data["userId"])
|
||||
|
||||
charge_list = []
|
||||
for charge in user_charge_list:
|
||||
@ -312,15 +312,15 @@ class ChuniBase:
|
||||
"userChargeList": charge_list,
|
||||
}
|
||||
|
||||
def handle_get_user_recent_player_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_recent_player_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
"userRecentPlayerList": [], # playUserId, playUserName, playDate, friendPoint
|
||||
}
|
||||
|
||||
def handle_get_user_course_api_request(self, data: Dict) -> Dict:
|
||||
user_course_list = self.data.score.get_courses(data["userId"])
|
||||
async def handle_get_user_course_api_request(self, data: Dict) -> Dict:
|
||||
user_course_list = await self.data.score.get_courses(data["userId"])
|
||||
if user_course_list is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
@ -354,8 +354,8 @@ class ChuniBase:
|
||||
"userCourseList": course_list,
|
||||
}
|
||||
|
||||
def handle_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
async def handle_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
p = await self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
if p is None:
|
||||
return {}
|
||||
|
||||
@ -366,8 +366,8 @@ class ChuniBase:
|
||||
|
||||
return {"userId": data["userId"], "userData": profile}
|
||||
|
||||
def handle_get_user_data_ex_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_data_ex(data["userId"], self.version)
|
||||
async def handle_get_user_data_ex_api_request(self, data: Dict) -> Dict:
|
||||
p = await self.data.profile.get_profile_data_ex(data["userId"], self.version)
|
||||
if p is None:
|
||||
return {}
|
||||
|
||||
@ -378,8 +378,8 @@ class ChuniBase:
|
||||
|
||||
return {"userId": data["userId"], "userDataEx": profile}
|
||||
|
||||
def handle_get_user_duel_api_request(self, data: Dict) -> Dict:
|
||||
user_duel_list = self.data.item.get_duels(data["userId"])
|
||||
async def handle_get_user_duel_api_request(self, data: Dict) -> Dict:
|
||||
user_duel_list = await self.data.item.get_duels(data["userId"])
|
||||
if user_duel_list is None:
|
||||
return {}
|
||||
|
||||
@ -396,8 +396,8 @@ class ChuniBase:
|
||||
"userDuelList": duel_list,
|
||||
}
|
||||
|
||||
def handle_get_user_rival_data_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_rival(data["rivalId"])
|
||||
async def handle_get_user_rival_data_api_request(self, data: Dict) -> Dict:
|
||||
p = await self.data.profile.get_rival(data["rivalId"])
|
||||
if p is None:
|
||||
return {}
|
||||
userRivalData = {
|
||||
@ -409,14 +409,14 @@ class ChuniBase:
|
||||
"userRivalData": userRivalData
|
||||
}
|
||||
|
||||
def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
|
||||
rival_id = data["rivalId"]
|
||||
next_index = int(data["nextIndex"])
|
||||
max_count = int(data["maxCount"])
|
||||
user_rival_music_list = []
|
||||
|
||||
# Fetch all the rival music entries for the user
|
||||
all_entries = self.data.score.get_rival_music(rival_id)
|
||||
all_entries = await self.data.score.get_rival_music(rival_id)
|
||||
|
||||
# Process the entries based on max_count and nextIndex
|
||||
for music in all_entries:
|
||||
@ -462,12 +462,12 @@ class ChuniBase:
|
||||
return result
|
||||
|
||||
|
||||
def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
|
||||
user_fav_item_list = []
|
||||
|
||||
# still needs to be implemented on WebUI
|
||||
# 1: Music, 2: User, 3: Character
|
||||
fav_list = self.data.item.get_all_favorites(
|
||||
fav_list = await self.data.item.get_all_favorites(
|
||||
data["userId"], self.version, fav_kind=int(data["kind"])
|
||||
)
|
||||
if fav_list is not None:
|
||||
@ -482,17 +482,17 @@ class ChuniBase:
|
||||
"userFavoriteItemList": user_fav_item_list,
|
||||
}
|
||||
|
||||
def handle_get_user_favorite_music_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_favorite_music_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
This is handled via the webui, which we don't have right now
|
||||
"""
|
||||
|
||||
return {"userId": data["userId"], "length": 0, "userFavoriteMusicList": []}
|
||||
|
||||
def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
kind = int(int(data["nextIndex"]) / 10000000000)
|
||||
next_idx = int(int(data["nextIndex"]) % 10000000000)
|
||||
user_item_list = self.data.item.get_items(data["userId"], kind)
|
||||
user_item_list = await self.data.item.get_items(data["userId"], kind)
|
||||
|
||||
if user_item_list is None or len(user_item_list) == 0:
|
||||
return {
|
||||
@ -526,9 +526,9 @@ class ChuniBase:
|
||||
"userItemList": items,
|
||||
}
|
||||
|
||||
def handle_get_user_login_bonus_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_login_bonus_api_request(self, data: Dict) -> Dict:
|
||||
user_id = data["userId"]
|
||||
user_login_bonus = self.data.item.get_all_login_bonus(user_id, self.version)
|
||||
user_login_bonus = await self.data.item.get_all_login_bonus(user_id, self.version)
|
||||
# ignore the loginBonus request if its disabled in config
|
||||
if user_login_bonus is None or not self.game_cfg.mods.use_login_bonus:
|
||||
return {"userId": user_id, "length": 0, "userLoginBonusList": []}
|
||||
@ -552,8 +552,8 @@ class ChuniBase:
|
||||
"userLoginBonusList": user_login_list,
|
||||
}
|
||||
|
||||
def handle_get_user_map_api_request(self, data: Dict) -> Dict:
|
||||
user_map_list = self.data.item.get_maps(data["userId"])
|
||||
async def handle_get_user_map_api_request(self, data: Dict) -> Dict:
|
||||
user_map_list = await self.data.item.get_maps(data["userId"])
|
||||
if user_map_list is None:
|
||||
return {}
|
||||
|
||||
@ -570,8 +570,8 @@ class ChuniBase:
|
||||
"userMapList": map_list,
|
||||
}
|
||||
|
||||
def handle_get_user_music_api_request(self, data: Dict) -> Dict:
|
||||
music_detail = self.data.score.get_scores(data["userId"])
|
||||
async def handle_get_user_music_api_request(self, data: Dict) -> Dict:
|
||||
music_detail = await self.data.score.get_scores(data["userId"])
|
||||
if music_detail is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
@ -629,8 +629,8 @@ class ChuniBase:
|
||||
"userMusicList": song_list, # 240
|
||||
}
|
||||
|
||||
def handle_get_user_option_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_option(data["userId"])
|
||||
async def handle_get_user_option_api_request(self, data: Dict) -> Dict:
|
||||
p = await self.data.profile.get_profile_option(data["userId"])
|
||||
|
||||
option = p._asdict()
|
||||
option.pop("id")
|
||||
@ -638,8 +638,8 @@ class ChuniBase:
|
||||
|
||||
return {"userId": data["userId"], "userGameOption": option}
|
||||
|
||||
def handle_get_user_option_ex_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_option_ex(data["userId"])
|
||||
async def handle_get_user_option_ex_api_request(self, data: Dict) -> Dict:
|
||||
p = await self.data.profile.get_profile_option_ex(data["userId"])
|
||||
|
||||
option = p._asdict()
|
||||
option.pop("id")
|
||||
@ -650,11 +650,11 @@ class ChuniBase:
|
||||
def read_wtf8(self, src):
|
||||
return bytes([ord(c) for c in src]).decode("utf-8")
|
||||
|
||||
def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_preview(data["userId"], self.version)
|
||||
async def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile_preview(data["userId"], self.version)
|
||||
if profile is None:
|
||||
return None
|
||||
profile_character = self.data.item.get_character(
|
||||
profile_character = await self.data.item.get_character(
|
||||
data["userId"], profile["characterId"]
|
||||
)
|
||||
|
||||
@ -692,8 +692,8 @@ class ChuniBase:
|
||||
"userNameEx": profile["userName"],
|
||||
}
|
||||
|
||||
def handle_get_user_recent_rating_api_request(self, data: Dict) -> Dict:
|
||||
recent_rating_list = self.data.profile.get_profile_recent_rating(data["userId"])
|
||||
async def handle_get_user_recent_rating_api_request(self, data: Dict) -> Dict:
|
||||
recent_rating_list = await self.data.profile.get_profile_recent_rating(data["userId"])
|
||||
if recent_rating_list is None:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
@ -707,7 +707,7 @@ class ChuniBase:
|
||||
"userRecentRatingList": recent_rating_list["recentRating"],
|
||||
}
|
||||
|
||||
def handle_get_user_region_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_region_api_request(self, data: Dict) -> Dict:
|
||||
# TODO: Region
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
@ -715,22 +715,22 @@ class ChuniBase:
|
||||
"userRegionList": [],
|
||||
}
|
||||
|
||||
def handle_get_user_team_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_team_api_request(self, data: Dict) -> Dict:
|
||||
# Default values
|
||||
team_id = 65535
|
||||
team_name = self.game_cfg.team.team_name
|
||||
team_rank = 0
|
||||
|
||||
# Get user profile
|
||||
profile = self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
profile = await self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
if profile and profile["teamId"]:
|
||||
# Get team by id
|
||||
team = self.data.profile.get_team_by_id(profile["teamId"])
|
||||
team = await self.data.profile.get_team_by_id(profile["teamId"])
|
||||
|
||||
if team:
|
||||
team_id = team["id"]
|
||||
team_name = team["teamName"]
|
||||
team_rank = self.data.profile.get_team_rank(team["id"])
|
||||
team_rank = await self.data.profile.get_team_rank(team["id"])
|
||||
|
||||
# Don't return anything if no team name has been defined for defaults and there is no team set for the player
|
||||
if not profile["teamId"] and team_name == "":
|
||||
@ -750,7 +750,7 @@ class ChuniBase:
|
||||
},
|
||||
}
|
||||
|
||||
def handle_get_team_course_setting_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_team_course_setting_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
@ -758,7 +758,7 @@ class ChuniBase:
|
||||
"teamCourseSettingList": [],
|
||||
}
|
||||
|
||||
def handle_get_team_course_setting_api_request_proto(self, data: Dict) -> Dict:
|
||||
async def handle_get_team_course_setting_api_request_proto(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 1,
|
||||
@ -782,7 +782,7 @@ class ChuniBase:
|
||||
],
|
||||
}
|
||||
|
||||
def handle_get_team_course_rule_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_team_course_rule_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 0,
|
||||
@ -790,7 +790,7 @@ class ChuniBase:
|
||||
"teamCourseRuleList": []
|
||||
}
|
||||
|
||||
def handle_get_team_course_rule_api_request_proto(self, data: Dict) -> Dict:
|
||||
async def handle_get_team_course_rule_api_request_proto(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": 1,
|
||||
@ -807,7 +807,7 @@ class ChuniBase:
|
||||
],
|
||||
}
|
||||
|
||||
def handle_upsert_user_all_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_user_all_api_request(self, data: Dict) -> Dict:
|
||||
upsert = data["upsertUserAll"]
|
||||
user_id = data["userId"]
|
||||
|
||||
@ -819,58 +819,58 @@ class ChuniBase:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.data.profile.put_profile_data(
|
||||
await self.data.profile.put_profile_data(
|
||||
user_id, self.version, upsert["userData"][0]
|
||||
)
|
||||
|
||||
if "userDataEx" in upsert:
|
||||
self.data.profile.put_profile_data_ex(
|
||||
await self.data.profile.put_profile_data_ex(
|
||||
user_id, self.version, upsert["userDataEx"][0]
|
||||
)
|
||||
|
||||
if "userGameOption" in upsert:
|
||||
self.data.profile.put_profile_option(user_id, upsert["userGameOption"][0])
|
||||
await self.data.profile.put_profile_option(user_id, upsert["userGameOption"][0])
|
||||
|
||||
if "userGameOptionEx" in upsert:
|
||||
self.data.profile.put_profile_option_ex(
|
||||
await self.data.profile.put_profile_option_ex(
|
||||
user_id, upsert["userGameOptionEx"][0]
|
||||
)
|
||||
if "userRecentRatingList" in upsert:
|
||||
self.data.profile.put_profile_recent_rating(
|
||||
await self.data.profile.put_profile_recent_rating(
|
||||
user_id, upsert["userRecentRatingList"]
|
||||
)
|
||||
|
||||
if "userCharacterList" in upsert:
|
||||
for character in upsert["userCharacterList"]:
|
||||
self.data.item.put_character(user_id, character)
|
||||
await self.data.item.put_character(user_id, character)
|
||||
|
||||
if "userMapList" in upsert:
|
||||
for map in upsert["userMapList"]:
|
||||
self.data.item.put_map(user_id, map)
|
||||
await self.data.item.put_map(user_id, map)
|
||||
|
||||
if "userCourseList" in upsert:
|
||||
for course in upsert["userCourseList"]:
|
||||
self.data.score.put_course(user_id, course)
|
||||
await self.data.score.put_course(user_id, course)
|
||||
|
||||
if "userDuelList" in upsert:
|
||||
for duel in upsert["userDuelList"]:
|
||||
self.data.item.put_duel(user_id, duel)
|
||||
await self.data.item.put_duel(user_id, duel)
|
||||
|
||||
if "userItemList" in upsert:
|
||||
for item in upsert["userItemList"]:
|
||||
self.data.item.put_item(user_id, item)
|
||||
await self.data.item.put_item(user_id, item)
|
||||
|
||||
if "userActivityList" in upsert:
|
||||
for activity in upsert["userActivityList"]:
|
||||
self.data.profile.put_profile_activity(user_id, activity)
|
||||
await self.data.profile.put_profile_activity(user_id, activity)
|
||||
|
||||
if "userChargeList" in upsert:
|
||||
for charge in upsert["userChargeList"]:
|
||||
self.data.profile.put_profile_charge(user_id, charge)
|
||||
await self.data.profile.put_profile_charge(user_id, charge)
|
||||
|
||||
if "userMusicDetailList" in upsert:
|
||||
for song in upsert["userMusicDetailList"]:
|
||||
self.data.score.put_score(user_id, song)
|
||||
await self.data.score.put_score(user_id, song)
|
||||
|
||||
if "userPlaylogList" in upsert:
|
||||
for playlog in upsert["userPlaylogList"]:
|
||||
@ -881,7 +881,7 @@ class ChuniBase:
|
||||
playlog["playedUserName2"] = self.read_wtf8(playlog["playedUserName2"])
|
||||
if playlog["playedUserName3"] is not None:
|
||||
playlog["playedUserName3"] = self.read_wtf8(playlog["playedUserName3"])
|
||||
self.data.score.put_playlog(user_id, playlog, self.version)
|
||||
await self.data.score.put_playlog(user_id, playlog, self.version)
|
||||
|
||||
if "userTeamPoint" in upsert:
|
||||
team_points = upsert["userTeamPoint"]
|
||||
@ -889,7 +889,7 @@ class ChuniBase:
|
||||
for tp in team_points:
|
||||
if tp["teamId"] != '65535':
|
||||
# Fetch the current team data
|
||||
current_team = self.data.profile.get_team_by_id(tp["teamId"])
|
||||
current_team = await self.data.profile.get_team_by_id(tp["teamId"])
|
||||
|
||||
# Calculate the new teamPoint
|
||||
new_team_point = int(tp["teamPoint"]) + current_team["teamPoint"]
|
||||
@ -900,24 +900,24 @@ class ChuniBase:
|
||||
}
|
||||
|
||||
# Update the team data
|
||||
self.data.profile.update_team(tp["teamId"], team_data)
|
||||
await self.data.profile.update_team(tp["teamId"], team_data)
|
||||
except:
|
||||
pass # Probably a better way to catch if the team is not set yet (new profiles), but let's just pass
|
||||
if "userMapAreaList" in upsert:
|
||||
for map_area in upsert["userMapAreaList"]:
|
||||
self.data.item.put_map_area(user_id, map_area)
|
||||
await self.data.item.put_map_area(user_id, map_area)
|
||||
|
||||
if "userOverPowerList" in upsert:
|
||||
for overpower in upsert["userOverPowerList"]:
|
||||
self.data.profile.put_profile_overpower(user_id, overpower)
|
||||
await self.data.profile.put_profile_overpower(user_id, overpower)
|
||||
|
||||
if "userEmoneyList" in upsert:
|
||||
for emoney in upsert["userEmoneyList"]:
|
||||
self.data.profile.put_profile_emoney(user_id, emoney)
|
||||
await self.data.profile.put_profile_emoney(user_id, emoney)
|
||||
|
||||
if "userLoginBonusList" in upsert:
|
||||
for login in upsert["userLoginBonusList"]:
|
||||
self.data.item.put_login_bonus(
|
||||
await self.data.item.put_login_bonus(
|
||||
user_id, self.version, login["presetId"], isWatched=True
|
||||
)
|
||||
|
||||
@ -927,28 +927,28 @@ class ChuniBase:
|
||||
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_user_chargelog_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_user_chargelog_api_request(self, data: Dict) -> Dict:
|
||||
# add tickets after they got bought, this makes sure the tickets are
|
||||
# still valid after an unsuccessful logout
|
||||
self.data.profile.put_profile_charge(data["userId"], data["userCharge"])
|
||||
await self.data.profile.put_profile_charge(data["userId"], data["userCharge"])
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_bookkeeping_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_client_bookkeeping_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_develop_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_client_develop_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_error_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_client_error_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_setting_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_client_setting_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_upsert_client_testmode_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_client_testmode_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_get_user_net_battle_data_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_net_battle_data_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userNetBattleData": {"recentNBSelectMusicList": []},
|
||||
|
@ -13,7 +13,7 @@ class ChuniCrystal(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_CRYSTAL
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.40.00"
|
||||
return ret
|
||||
|
@ -13,7 +13,7 @@ class ChuniCrystalPlus(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.45.00"
|
||||
return ret
|
||||
|
@ -1,4 +1,6 @@
|
||||
from twisted.web.http import Request
|
||||
from starlette.requests import Request
|
||||
from starlette.routing import Route
|
||||
from starlette.responses import Response
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
import zlib
|
||||
@ -33,7 +35,6 @@ from .newplus import ChuniNewPlus
|
||||
from .sun import ChuniSun
|
||||
from .sunplus import ChuniSunPlus
|
||||
|
||||
|
||||
class ChuniServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
super().__init__(core_cfg, cfg_dir)
|
||||
@ -124,15 +125,6 @@ class ChuniServlet(BaseServlet):
|
||||
f"Hashed v{version} method {method_fixed} with {bytes.fromhex(keys[2])} to get {hash.hex()}"
|
||||
)
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[
|
||||
("render_POST", "/{game}/{version}/ChuniServlet/{endpoint}", {}),
|
||||
("render_POST", "/{game}/{version}/ChuniServlet/MatchingServer/{endpoint}", {})
|
||||
]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def is_game_enabled(
|
||||
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
||||
@ -150,19 +142,25 @@ class ChuniServlet(BaseServlet):
|
||||
|
||||
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
||||
return (f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_code}/{game_ver}/", self.core_cfg.title.hostname)
|
||||
return (f"http://{self.core_cfg.server.hostname}:{Utils.get_title_port(self.core_cfg)}/{game_code}/{game_ver}/", self.core_cfg.server.hostname)
|
||||
|
||||
return (f"http://{self.core_cfg.title.hostname}/{game_code}/{game_ver}/", self.core_cfg.title.hostname)
|
||||
return (f"http://{self.core_cfg.server.hostname}/{game_code}/{game_ver}/", self.core_cfg.server.hostname)
|
||||
|
||||
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
endpoint = matchers['endpoint']
|
||||
version = int(matchers['version'])
|
||||
game_code = matchers['game']
|
||||
def get_routes(self) -> List[Route]:
|
||||
return [
|
||||
Route("/{game:str}/{version:int}/ChuniServlet/{endpoint:str}", self.render_POST, methods=['POST']),
|
||||
Route("/{game:str}/{version:int}/ChuniServlet/MatchingServer/{endpoint:str}", self.render_POST, methods=['POST']),
|
||||
]
|
||||
|
||||
async def render_POST(self, request: Request) -> bytes:
|
||||
endpoint: str = request.path_params.get('endpoint')
|
||||
version: int = request.path_params.get('version')
|
||||
game_code: str = request.path_params.get('game')
|
||||
|
||||
if endpoint.lower() == "ping":
|
||||
return zlib.compress(b'{"returnCode": "1"}')
|
||||
return Response(zlib.compress(b'{"returnCode": "1"}'))
|
||||
|
||||
req_raw = request.content.getvalue()
|
||||
req_raw = await request.body()
|
||||
|
||||
encrtped = False
|
||||
internal_ver = 0
|
||||
@ -201,7 +199,7 @@ class ChuniServlet(BaseServlet):
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_SUN_PLUS
|
||||
elif game_code == "SDGS": # Int
|
||||
if version < 110: # SUPERSTAR
|
||||
internal_ver = ChuniConstants.PARADISE
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_PARADISE # FIXME: Not sure what was intended to go here? was just "PARADISE"
|
||||
elif version >= 110 and version < 115: # NEW
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_NEW
|
||||
elif version >= 115 and version < 120: # NEW PLUS!!
|
||||
@ -216,20 +214,20 @@ class ChuniServlet(BaseServlet):
|
||||
# doing encrypted. The likelyhood of false positives is low but
|
||||
# technically not 0
|
||||
if internal_ver < ChuniConstants.VER_CHUNITHM_NEW:
|
||||
endpoint = request.getHeader("User-Agent").split("#")[0]
|
||||
endpoint = request.headers.get("User-Agent").split("#")[0]
|
||||
|
||||
else:
|
||||
if internal_ver not in self.hash_table:
|
||||
self.logger.error(
|
||||
f"v{version} does not support encryption or no keys entered"
|
||||
)
|
||||
return zlib.compress(b'{"stat": "0"}')
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
elif endpoint.lower() not in self.hash_table[internal_ver]:
|
||||
self.logger.error(
|
||||
f"No hash found for v{version} endpoint {endpoint}"
|
||||
)
|
||||
return zlib.compress(b'{"stat": "0"}')
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
endpoint = self.hash_table[internal_ver][endpoint.lower()]
|
||||
|
||||
@ -246,7 +244,7 @@ class ChuniServlet(BaseServlet):
|
||||
self.logger.error(
|
||||
f"Failed to decrypt v{version} request to {endpoint} -> {e}"
|
||||
)
|
||||
return zlib.compress(b'{"stat": "0"}')
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
encrtped = True
|
||||
|
||||
@ -258,7 +256,7 @@ class ChuniServlet(BaseServlet):
|
||||
self.logger.error(
|
||||
f"Unencrypted v{version} {endpoint} request, but config is set to encrypted only: {req_raw}"
|
||||
)
|
||||
return zlib.compress(b'{"stat": "0"}')
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
try:
|
||||
unzip = zlib.decompress(req_raw)
|
||||
@ -267,7 +265,7 @@ class ChuniServlet(BaseServlet):
|
||||
self.logger.error(
|
||||
f"Failed to decompress v{version} {endpoint} request -> {e}"
|
||||
)
|
||||
return b""
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
req_data = json.loads(unzip)
|
||||
|
||||
@ -285,11 +283,11 @@ class ChuniServlet(BaseServlet):
|
||||
else:
|
||||
try:
|
||||
handler = getattr(handler_cls, func_to_find)
|
||||
resp = handler(req_data)
|
||||
resp = await handler(req_data)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling v{version} method {endpoint} - {e}")
|
||||
return zlib.compress(b'{"stat": "0"}')
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
if resp == None:
|
||||
resp = {"returnCode": 1}
|
||||
@ -299,7 +297,7 @@ class ChuniServlet(BaseServlet):
|
||||
zipped = zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|
||||
|
||||
if not encrtped:
|
||||
return zipped
|
||||
return Response(zipped)
|
||||
|
||||
padded = pad(zipped, 16)
|
||||
|
||||
@ -309,4 +307,4 @@ class ChuniServlet(BaseServlet):
|
||||
bytes.fromhex(self.game_cfg.crypto.keys[internal_ver][1]),
|
||||
)
|
||||
|
||||
return crypt.encrypt(padded)
|
||||
return Response(crypt.encrypt(padded))
|
@ -33,7 +33,7 @@ class ChuniNew(ChuniBase):
|
||||
if self.version == ChuniConstants.VER_CHUNITHM_SUN_PLUS:
|
||||
return "215"
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
# use UTC time and convert it to JST time by adding +9
|
||||
# matching therefore starts one hour before and lasts for 8 hours
|
||||
match_start = datetime.strftime(
|
||||
@ -82,27 +82,27 @@ class ChuniNew(ChuniBase):
|
||||
"matchErrorLimit": self.game_cfg.matching.match_error_limit,
|
||||
"romVersion": self.game_cfg.version.version(self.version)["rom"],
|
||||
"dataVersion": self.game_cfg.version.version(self.version)["data"],
|
||||
"matchingUri": f"http://{self.core_cfg.title.hostname}:{t_port}/SDHD/{self._interal_ver_to_intver()}/ChuniServlet/",
|
||||
"matchingUriX": f"http://{self.core_cfg.title.hostname}:{t_port}/SDHD/{self._interal_ver_to_intver()}/ChuniServlet/",
|
||||
"matchingUri": f"http://{self.core_cfg.server.hostname}:{t_port}/SDHD/{self._interal_ver_to_intver()}/ChuniServlet/",
|
||||
"matchingUriX": f"http://{self.core_cfg.server.hostname}:{t_port}/SDHD/{self._interal_ver_to_intver()}/ChuniServlet/",
|
||||
# might be really important for online battle to connect the cabs via UDP port 50201
|
||||
"udpHolePunchUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/{self._interal_ver_to_intver()}/ChuniServlet/",
|
||||
"reflectorUri": f"http://{self.core_cfg.title.hostname}:{self.core_cfg.title.port}/SDHD/{self._interal_ver_to_intver()}/ChuniServlet/",
|
||||
"udpHolePunchUri": f"http://{self.core_cfg.server.hostname}:{self.core_cfg.server.port}/SDHD/{self._interal_ver_to_intver()}/ChuniServlet/",
|
||||
"reflectorUri": f"http://{self.core_cfg.server.hostname}:{self.core_cfg.server.port}/SDHD/{self._interal_ver_to_intver()}/ChuniServlet/",
|
||||
},
|
||||
"isDumpUpload": False,
|
||||
"isAou": False,
|
||||
}
|
||||
|
||||
def handle_remove_token_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_remove_token_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_delete_token_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_delete_token_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_create_token_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_create_token_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_get_user_map_area_api_request(self, data: Dict) -> Dict:
|
||||
user_map_areas = self.data.item.get_map_areas(data["userId"])
|
||||
async def handle_get_user_map_area_api_request(self, data: Dict) -> Dict:
|
||||
user_map_areas = await self.data.item.get_map_areas(data["userId"])
|
||||
|
||||
map_areas = []
|
||||
for map_area in user_map_areas:
|
||||
@ -113,14 +113,14 @@ class ChuniNew(ChuniBase):
|
||||
|
||||
return {"userId": data["userId"], "userMapAreaList": map_areas}
|
||||
|
||||
def handle_get_user_symbol_chat_setting_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_symbol_chat_setting_api_request(self, data: Dict) -> Dict:
|
||||
return {"userId": data["userId"], "symbolCharInfoList": []}
|
||||
|
||||
def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_preview(data["userId"], self.version)
|
||||
async def handle_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile_preview(data["userId"], self.version)
|
||||
if profile is None:
|
||||
return None
|
||||
profile_character = self.data.item.get_character(
|
||||
profile_character = await self.data.item.get_character(
|
||||
data["userId"], profile["characterId"]
|
||||
)
|
||||
|
||||
@ -164,8 +164,8 @@ class ChuniNew(ChuniBase):
|
||||
}
|
||||
return data1
|
||||
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
async def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
p = await self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
if p is None:
|
||||
return {}
|
||||
|
||||
@ -177,17 +177,17 @@ class ChuniNew(ChuniBase):
|
||||
"isLogin": False,
|
||||
}
|
||||
|
||||
def handle_printer_login_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_printer_login_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": 1}
|
||||
|
||||
def handle_printer_logout_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_printer_logout_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": 1}
|
||||
|
||||
def handle_get_game_gacha_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_gacha_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
returns all current active banners (gachas)
|
||||
"""
|
||||
game_gachas = self.data.static.get_gachas(self.version)
|
||||
game_gachas = await self.data.static.get_gachas(self.version)
|
||||
|
||||
# clean the database rows
|
||||
game_gacha_list = []
|
||||
@ -213,11 +213,11 @@ class ChuniNew(ChuniBase):
|
||||
"registIdList": [],
|
||||
}
|
||||
|
||||
def handle_get_game_gacha_card_by_id_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_gacha_card_by_id_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
returns all valid cards for a given gachaId
|
||||
"""
|
||||
game_gacha_cards = self.data.static.get_gacha_cards(data["gachaId"])
|
||||
game_gacha_cards = await self.data.static.get_gacha_cards(data["gachaId"])
|
||||
|
||||
game_gacha_card_list = []
|
||||
for gacha_card in game_gacha_cards:
|
||||
@ -237,8 +237,8 @@ class ChuniNew(ChuniBase):
|
||||
"ssrBookCalcList": [],
|
||||
}
|
||||
|
||||
def handle_cm_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
p = self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
async def handle_cm_get_user_data_api_request(self, data: Dict) -> Dict:
|
||||
p = await self.data.profile.get_profile_data(data["userId"], self.version)
|
||||
if p is None:
|
||||
return {}
|
||||
|
||||
@ -262,8 +262,8 @@ class ChuniNew(ChuniBase):
|
||||
],
|
||||
}
|
||||
|
||||
def handle_get_user_gacha_api_request(self, data: Dict) -> Dict:
|
||||
user_gachas = self.data.item.get_user_gachas(data["userId"])
|
||||
async def handle_get_user_gacha_api_request(self, data: Dict) -> Dict:
|
||||
user_gachas = await self.data.item.get_user_gachas(data["userId"])
|
||||
if user_gachas is None:
|
||||
return {"userId": data["userId"], "length": 0, "userGachaList": []}
|
||||
|
||||
@ -281,8 +281,8 @@ class ChuniNew(ChuniBase):
|
||||
"userGachaList": user_gacha_list,
|
||||
}
|
||||
|
||||
def handle_get_user_printed_card_api_request(self, data: Dict) -> Dict:
|
||||
user_print_list = self.data.item.get_user_print_states(
|
||||
async def handle_get_user_printed_card_api_request(self, data: Dict) -> Dict:
|
||||
user_print_list = await self.data.item.get_user_print_states(
|
||||
data["userId"], has_completed=True
|
||||
)
|
||||
if user_print_list is None:
|
||||
@ -316,10 +316,10 @@ class ChuniNew(ChuniBase):
|
||||
"userPrintedCardList": print_list,
|
||||
}
|
||||
|
||||
def handle_get_user_card_print_error_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_card_print_error_api_request(self, data: Dict) -> Dict:
|
||||
user_id = data["userId"]
|
||||
|
||||
user_print_states = self.data.item.get_user_print_states(
|
||||
user_print_states = await self.data.item.get_user_print_states(
|
||||
user_id, has_completed=False
|
||||
)
|
||||
|
||||
@ -338,13 +338,13 @@ class ChuniNew(ChuniBase):
|
||||
"userCardPrintStateList": card_print_state_list,
|
||||
}
|
||||
|
||||
def handle_cm_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_cm_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
return super().handle_get_user_character_api_request(data)
|
||||
|
||||
def handle_cm_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_cm_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
return super().handle_get_user_item_api_request(data)
|
||||
|
||||
def handle_roll_gacha_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_roll_gacha_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
Handle a gacha roll API request, with:
|
||||
gachaId: the gachaId where the cards should be pulled from
|
||||
@ -362,14 +362,14 @@ class ChuniNew(ChuniBase):
|
||||
# characterId should be returned
|
||||
if chara_id != -1:
|
||||
# get the
|
||||
card = self.data.static.get_gacha_card_by_character(gacha_id, chara_id)
|
||||
card = await self.data.static.get_gacha_card_by_character(gacha_id, chara_id)
|
||||
|
||||
tmp = card._asdict()
|
||||
tmp.pop("id")
|
||||
|
||||
rolled_cards.append(tmp)
|
||||
else:
|
||||
gacha_cards = self.data.static.get_gacha_cards(gacha_id)
|
||||
gacha_cards = await self.data.static.get_gacha_cards(gacha_id)
|
||||
|
||||
# get the card id for each roll
|
||||
for _ in range(num_rolls):
|
||||
@ -386,7 +386,7 @@ class ChuniNew(ChuniBase):
|
||||
|
||||
return {"length": len(rolled_cards), "gameGachaCardList": rolled_cards}
|
||||
|
||||
def handle_cm_upsert_user_gacha_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_cm_upsert_user_gacha_api_request(self, data: Dict) -> Dict:
|
||||
upsert = data["cmUpsertUserGacha"]
|
||||
user_id = data["userId"]
|
||||
place_id = data["placeId"]
|
||||
@ -396,7 +396,7 @@ class ChuniNew(ChuniBase):
|
||||
user_data.pop("rankUpChallengeResults")
|
||||
user_data.pop("userEmoney")
|
||||
|
||||
self.data.profile.put_profile_data(user_id, self.version, user_data)
|
||||
await self.data.profile.put_profile_data(user_id, self.version, user_data)
|
||||
|
||||
# save the user gacha
|
||||
user_gacha = upsert["userGacha"]
|
||||
@ -404,16 +404,16 @@ class ChuniNew(ChuniBase):
|
||||
user_gacha.pop("gachaId")
|
||||
user_gacha.pop("dailyGachaDate")
|
||||
|
||||
self.data.item.put_user_gacha(user_id, gacha_id, user_gacha)
|
||||
await self.data.item.put_user_gacha(user_id, gacha_id, user_gacha)
|
||||
|
||||
# save all user items
|
||||
if "userItemList" in upsert:
|
||||
for item in upsert["userItemList"]:
|
||||
self.data.item.put_item(user_id, item)
|
||||
await self.data.item.put_item(user_id, item)
|
||||
|
||||
# add every gamegachaCard to database
|
||||
for card in upsert["gameGachaCardList"]:
|
||||
self.data.item.put_user_print_state(
|
||||
await self.data.item.put_user_print_state(
|
||||
user_id,
|
||||
hasCompleted=False,
|
||||
placeId=place_id,
|
||||
@ -423,7 +423,7 @@ class ChuniNew(ChuniBase):
|
||||
|
||||
# retrieve every game gacha card which has been added in order to get
|
||||
# the orderId for the next request
|
||||
user_print_states = self.data.item.get_user_print_states_by_gacha(
|
||||
user_print_states = await self.data.item.get_user_print_states_by_gacha(
|
||||
user_id, gacha_id, has_completed=False
|
||||
)
|
||||
card_print_state_list = []
|
||||
@ -441,7 +441,7 @@ class ChuniNew(ChuniBase):
|
||||
"userCardPrintStateList": card_print_state_list,
|
||||
}
|
||||
|
||||
def handle_cm_upsert_user_printlog_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_cm_upsert_user_printlog_api_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"returnCode": 1,
|
||||
"orderId": 0,
|
||||
@ -449,7 +449,7 @@ class ChuniNew(ChuniBase):
|
||||
"apiName": "CMUpsertUserPrintlogApi",
|
||||
}
|
||||
|
||||
def handle_cm_upsert_user_print_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_cm_upsert_user_print_api_request(self, data: Dict) -> Dict:
|
||||
user_print_detail = data["userPrintDetail"]
|
||||
user_id = data["userId"]
|
||||
|
||||
@ -465,7 +465,7 @@ class ChuniNew(ChuniBase):
|
||||
)
|
||||
|
||||
# add the entry to the user print table with the random serialId
|
||||
self.data.item.put_user_print_detail(user_id, serial_id, user_print_detail)
|
||||
await self.data.item.put_user_print_detail(user_id, serial_id, user_print_detail)
|
||||
|
||||
return {
|
||||
"returnCode": 1,
|
||||
@ -474,7 +474,7 @@ class ChuniNew(ChuniBase):
|
||||
"apiName": "CMUpsertUserPrintApi",
|
||||
}
|
||||
|
||||
def handle_cm_upsert_user_print_subtract_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_cm_upsert_user_print_subtract_api_request(self, data: Dict) -> Dict:
|
||||
upsert = data["userCardPrintState"]
|
||||
user_id = data["userId"]
|
||||
place_id = data["placeId"]
|
||||
@ -482,37 +482,37 @@ class ChuniNew(ChuniBase):
|
||||
# save all user items
|
||||
if "userItemList" in data:
|
||||
for item in data["userItemList"]:
|
||||
self.data.item.put_item(user_id, item)
|
||||
await self.data.item.put_item(user_id, item)
|
||||
|
||||
# set the card print state to success and use the orderId as the key
|
||||
self.data.item.put_user_print_state(
|
||||
await self.data.item.put_user_print_state(
|
||||
user_id, id=upsert["orderId"], hasCompleted=True
|
||||
)
|
||||
|
||||
return {"returnCode": "1", "apiName": "CMUpsertUserPrintSubtractApi"}
|
||||
|
||||
def handle_cm_upsert_user_print_cancel_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_cm_upsert_user_print_cancel_api_request(self, data: Dict) -> Dict:
|
||||
order_ids = data["orderIdList"]
|
||||
user_id = data["userId"]
|
||||
|
||||
# set the card print state to success and use the orderId as the key
|
||||
for order_id in order_ids:
|
||||
self.data.item.put_user_print_state(user_id, id=order_id, hasCompleted=True)
|
||||
await self.data.item.put_user_print_state(user_id, id=order_id, hasCompleted=True)
|
||||
|
||||
return {"returnCode": "1", "apiName": "CMUpsertUserPrintCancelApi"}
|
||||
|
||||
def handle_ping_request(self, data: Dict) -> Dict:
|
||||
async def handle_ping_request(self, data: Dict) -> Dict:
|
||||
# matchmaking ping request
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_begin_matching_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_begin_matching_api_request(self, data: Dict) -> Dict:
|
||||
room_id = 1
|
||||
# check if there is a free matching room
|
||||
matching_room = self.data.item.get_oldest_free_matching(self.version)
|
||||
matching_room = await self.data.item.get_oldest_free_matching(self.version)
|
||||
|
||||
if matching_room is None:
|
||||
# grab the latest roomId and add 1 for the new room
|
||||
newest_matching = self.data.item.get_newest_matching(self.version)
|
||||
newest_matching = await self.data.item.get_newest_matching(self.version)
|
||||
if newest_matching is not None:
|
||||
room_id = newest_matching["roomId"] + 1
|
||||
|
||||
@ -522,12 +522,12 @@ class ChuniNew(ChuniBase):
|
||||
|
||||
# create the new room with room_id and the current user id (host)
|
||||
# user id is required for the countdown later on
|
||||
self.data.item.put_matching(
|
||||
await self.data.item.put_matching(
|
||||
self.version, room_id, [new_member], user_id=new_member["userId"]
|
||||
)
|
||||
|
||||
# get the newly created matching room
|
||||
matching_room = self.data.item.get_matching(self.version, room_id)
|
||||
matching_room = await self.data.item.get_matching(self.version, room_id)
|
||||
else:
|
||||
# a room already exists, so just add the new member to it
|
||||
matching_member_list = matching_room["matchingMemberInfoList"]
|
||||
@ -537,7 +537,7 @@ class ChuniNew(ChuniBase):
|
||||
matching_member_list.append(new_member)
|
||||
|
||||
# add the updated room to the database, make sure to set isFull correctly!
|
||||
self.data.item.put_matching(
|
||||
await self.data.item.put_matching(
|
||||
self.version,
|
||||
matching_room["roomId"],
|
||||
matching_member_list,
|
||||
@ -554,8 +554,8 @@ class ChuniNew(ChuniBase):
|
||||
|
||||
return {"roomId": 1, "matchingWaitState": matching_wait}
|
||||
|
||||
def handle_end_matching_api_request(self, data: Dict) -> Dict:
|
||||
matching_room = self.data.item.get_matching(self.version, data["roomId"])
|
||||
async def handle_end_matching_api_request(self, data: Dict) -> Dict:
|
||||
matching_room = await self.data.item.get_matching(self.version, data["roomId"])
|
||||
members = matching_room["matchingMemberInfoList"]
|
||||
|
||||
# only set the host user to role 1 every other to 0?
|
||||
@ -564,7 +564,7 @@ class ChuniNew(ChuniBase):
|
||||
for m in members
|
||||
]
|
||||
|
||||
self.data.item.put_matching(
|
||||
await self.data.item.put_matching(
|
||||
self.version,
|
||||
matching_room["roomId"],
|
||||
members,
|
||||
@ -579,13 +579,13 @@ class ChuniNew(ChuniBase):
|
||||
# no idea, maybe to differentiate between CPUs and real players?
|
||||
"matchingMemberRoleList": role_list,
|
||||
# TCP/UDP connection?
|
||||
"reflectorUri": f"{self.core_cfg.title.hostname}",
|
||||
"reflectorUri": f"{self.core_cfg.server.hostname}",
|
||||
}
|
||||
|
||||
def handle_remove_matching_member_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_remove_matching_member_api_request(self, data: Dict) -> Dict:
|
||||
# get all matching rooms, because Chuni only returns the userId
|
||||
# not the actual roomId
|
||||
matching_rooms = self.data.item.get_all_matchings(self.version)
|
||||
matching_rooms = await self.data.item.get_all_matchings(self.version)
|
||||
if matching_rooms is None:
|
||||
return {"returnCode": "1"}
|
||||
|
||||
@ -599,10 +599,10 @@ class ChuniNew(ChuniBase):
|
||||
|
||||
# if the last user got removed, delete the matching room
|
||||
if len(new_members) <= 0:
|
||||
self.data.item.delete_matching(self.version, room["roomId"])
|
||||
await self.data.item.delete_matching(self.version, room["roomId"])
|
||||
else:
|
||||
# remove the user from the room
|
||||
self.data.item.put_matching(
|
||||
await self.data.item.put_matching(
|
||||
self.version,
|
||||
room["roomId"],
|
||||
new_members,
|
||||
@ -612,10 +612,10 @@ class ChuniNew(ChuniBase):
|
||||
|
||||
return {"returnCode": "1"}
|
||||
|
||||
def handle_get_matching_state_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_matching_state_api_request(self, data: Dict) -> Dict:
|
||||
polling_interval = 1
|
||||
# get the current active room
|
||||
matching_room = self.data.item.get_matching(self.version, data["roomId"])
|
||||
matching_room = await self.data.item.get_matching(self.version, data["roomId"])
|
||||
members = matching_room["matchingMemberInfoList"]
|
||||
rest_sec = matching_room["restMSec"]
|
||||
|
||||
@ -638,7 +638,7 @@ class ChuniNew(ChuniBase):
|
||||
current_member["userName"] = self.read_wtf8(current_member["userName"])
|
||||
members[i] = current_member
|
||||
|
||||
self.data.item.put_matching(
|
||||
await self.data.item.put_matching(
|
||||
self.version,
|
||||
data["roomId"],
|
||||
members,
|
||||
|
@ -11,8 +11,8 @@ class ChuniNewPlus(ChuniNew):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_NEW_PLUS
|
||||
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = super().handle_cm_get_user_preview_api_request(data)
|
||||
async def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = await super().handle_cm_get_user_preview_api_request(data)
|
||||
|
||||
# hardcode lastDataVersion for CardMaker 1.35 A028
|
||||
user_data["lastDataVersion"] = "2.05.00"
|
||||
|
@ -13,7 +13,7 @@ class ChuniParadise(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_PARADISE
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.50.00"
|
||||
return ret
|
||||
|
@ -11,7 +11,7 @@ class ChuniPlus(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.05.00"
|
||||
return ret
|
||||
|
@ -28,7 +28,7 @@ class ChuniReader(BaseReader):
|
||||
self.logger.error(f"Invalid chunithm version {version}")
|
||||
exit(1)
|
||||
|
||||
def read(self) -> None:
|
||||
async def read(self) -> None:
|
||||
data_dirs = []
|
||||
if self.bin_dir is not None:
|
||||
data_dirs += self.get_data_directories(self.bin_dir)
|
||||
@ -38,13 +38,13 @@ class ChuniReader(BaseReader):
|
||||
|
||||
for dir in data_dirs:
|
||||
self.logger.info(f"Read from {dir}")
|
||||
self.read_events(f"{dir}/event")
|
||||
self.read_music(f"{dir}/music")
|
||||
self.read_charges(f"{dir}/chargeItem")
|
||||
self.read_avatar(f"{dir}/avatarAccessory")
|
||||
self.read_login_bonus(f"{dir}/")
|
||||
await self.read_events(f"{dir}/event")
|
||||
await self.read_music(f"{dir}/music")
|
||||
await self.read_charges(f"{dir}/chargeItem")
|
||||
await self.read_avatar(f"{dir}/avatarAccessory")
|
||||
await self.read_login_bonus(f"{dir}/")
|
||||
|
||||
def read_login_bonus(self, root_dir: str) -> None:
|
||||
async def read_login_bonus(self, root_dir: str) -> None:
|
||||
for root, dirs, files in walk(f"{root_dir}loginBonusPreset"):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/LoginBonusPreset.xml"):
|
||||
@ -60,7 +60,7 @@ class ChuniReader(BaseReader):
|
||||
True if xml_root.find("disableFlag").text == "false" else False
|
||||
)
|
||||
|
||||
result = self.data.static.put_login_bonus_preset(
|
||||
result = await self.data.static.put_login_bonus_preset(
|
||||
self.version, id, name, is_enabled
|
||||
)
|
||||
|
||||
@ -98,7 +98,7 @@ class ChuniReader(BaseReader):
|
||||
bonus_root.find("loginBonusCategoryType").text
|
||||
)
|
||||
|
||||
result = self.data.static.put_login_bonus(
|
||||
result = await self.data.static.put_login_bonus(
|
||||
self.version,
|
||||
id,
|
||||
bonus_id,
|
||||
@ -117,7 +117,7 @@ class ChuniReader(BaseReader):
|
||||
f"Failed to insert login bonus {bonus_id}"
|
||||
)
|
||||
|
||||
def read_events(self, evt_dir: str) -> None:
|
||||
async def read_events(self, evt_dir: str) -> None:
|
||||
for root, dirs, files in walk(evt_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Event.xml"):
|
||||
@ -132,7 +132,7 @@ class ChuniReader(BaseReader):
|
||||
for substances in xml_root.findall("substances"):
|
||||
event_type = substances.find("type").text
|
||||
|
||||
result = self.data.static.put_event(
|
||||
result = await self.data.static.put_event(
|
||||
self.version, id, event_type, name
|
||||
)
|
||||
if result is not None:
|
||||
@ -140,7 +140,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert event {id}")
|
||||
|
||||
def read_music(self, music_dir: str) -> None:
|
||||
async def read_music(self, music_dir: str) -> None:
|
||||
for root, dirs, files in walk(music_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Music.xml"):
|
||||
@ -185,7 +185,7 @@ class ChuniReader(BaseReader):
|
||||
)
|
||||
we_chara = None
|
||||
|
||||
result = self.data.static.put_music(
|
||||
result = await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
chart_id,
|
||||
@ -206,7 +206,7 @@ class ChuniReader(BaseReader):
|
||||
f"Failed to insert music {song_id} chart {chart_id}"
|
||||
)
|
||||
|
||||
def read_charges(self, charge_dir: str) -> None:
|
||||
async def read_charges(self, charge_dir: str) -> None:
|
||||
for root, dirs, files in walk(charge_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/ChargeItem.xml"):
|
||||
@ -222,7 +222,7 @@ class ChuniReader(BaseReader):
|
||||
consumeType = xml_root.find("consumeType").text
|
||||
sellingAppeal = bool(xml_root.find("sellingAppeal").text)
|
||||
|
||||
result = self.data.static.put_charge(
|
||||
result = await self.data.static.put_charge(
|
||||
self.version,
|
||||
id,
|
||||
name,
|
||||
@ -236,7 +236,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert charge {id}")
|
||||
|
||||
def read_avatar(self, avatar_dir: str) -> None:
|
||||
async def read_avatar(self, avatar_dir: str) -> None:
|
||||
for root, dirs, files in walk(avatar_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/AvatarAccessory.xml"):
|
||||
@ -254,7 +254,7 @@ class ChuniReader(BaseReader):
|
||||
for texture in xml_root.findall("texture"):
|
||||
texturePath = texture.find("path").text
|
||||
|
||||
result = self.data.static.put_avatar(
|
||||
result = await self.data.static.put_avatar(
|
||||
self.version, id, name, category, iconPath, texturePath
|
||||
)
|
||||
|
||||
|
@ -245,7 +245,7 @@ matching = Table(
|
||||
|
||||
|
||||
class ChuniItemData(BaseData):
|
||||
def get_oldest_free_matching(self, version: int) -> Optional[Row]:
|
||||
async def get_oldest_free_matching(self, version: int) -> Optional[Row]:
|
||||
sql = matching.select(
|
||||
and_(
|
||||
matching.c.version == version,
|
||||
@ -253,46 +253,46 @@ class ChuniItemData(BaseData):
|
||||
)
|
||||
).order_by(matching.c.roomId.asc())
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_newest_matching(self, version: int) -> Optional[Row]:
|
||||
async def get_newest_matching(self, version: int) -> Optional[Row]:
|
||||
sql = matching.select(
|
||||
and_(
|
||||
matching.c.version == version
|
||||
)
|
||||
).order_by(matching.c.roomId.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_all_matchings(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_all_matchings(self, version: int) -> Optional[List[Row]]:
|
||||
sql = matching.select(
|
||||
and_(
|
||||
matching.c.version == version
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_matching(self, version: int, room_id: int) -> Optional[Row]:
|
||||
async def get_matching(self, version: int, room_id: int) -> Optional[Row]:
|
||||
sql = matching.select(
|
||||
and_(matching.c.version == version, matching.c.roomId == room_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_matching(
|
||||
async def put_matching(
|
||||
self,
|
||||
version: int,
|
||||
room_id: int,
|
||||
@ -314,22 +314,22 @@ class ChuniItemData(BaseData):
|
||||
restMSec=rest_sec, matchingMemberInfoList=matching_member_info_list
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def delete_matching(self, version: int, room_id: int):
|
||||
async def delete_matching(self, version: int, room_id: int):
|
||||
sql = delete(matching).where(
|
||||
and_(matching.c.roomId == room_id, matching.c.version == version)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_all_favorites(
|
||||
async def get_all_favorites(
|
||||
self, user_id: int, version: int, fav_kind: int = 1
|
||||
) -> Optional[List[Row]]:
|
||||
sql = favorite.select(
|
||||
@ -340,12 +340,12 @@ class ChuniItemData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_login_bonus(
|
||||
async def put_login_bonus(
|
||||
self, user_id: int, version: int, preset_id: int, **login_bonus_data
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus).values(
|
||||
@ -354,12 +354,12 @@ class ChuniItemData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(presetId=preset_id, **login_bonus_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_all_login_bonus(
|
||||
async def get_all_login_bonus(
|
||||
self, user_id: int, version: int, is_finished: bool = False
|
||||
) -> Optional[List[Row]]:
|
||||
sql = login_bonus.select(
|
||||
@ -370,12 +370,12 @@ class ChuniItemData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_login_bonus(
|
||||
async def get_login_bonus(
|
||||
self, user_id: int, version: int, preset_id: int
|
||||
) -> Optional[Row]:
|
||||
sql = login_bonus.select(
|
||||
@ -386,12 +386,12 @@ class ChuniItemData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_character(self, user_id: int, character_data: Dict) -> Optional[int]:
|
||||
async def put_character(self, user_id: int, character_data: Dict) -> Optional[int]:
|
||||
character_data["user"] = user_id
|
||||
|
||||
character_data = self.fix_bools(character_data)
|
||||
@ -399,30 +399,30 @@ class ChuniItemData(BaseData):
|
||||
sql = insert(character).values(**character_data)
|
||||
conflict = sql.on_duplicate_key_update(**character_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_character(self, user_id: int, character_id: int) -> Optional[Dict]:
|
||||
async def get_character(self, user_id: int, character_id: int) -> Optional[Dict]:
|
||||
sql = select(character).where(
|
||||
and_(character.c.user == user_id, character.c.characterId == character_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_characters(self, user_id: int) -> Optional[List[Row]]:
|
||||
async def get_characters(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(character).where(character.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_item(self, user_id: int, item_data: Dict) -> Optional[int]:
|
||||
async def put_item(self, user_id: int, item_data: Dict) -> Optional[int]:
|
||||
item_data["user"] = user_id
|
||||
|
||||
item_data = self.fix_bools(item_data)
|
||||
@ -430,12 +430,12 @@ class ChuniItemData(BaseData):
|
||||
sql = insert(item).values(**item_data)
|
||||
conflict = sql.on_duplicate_key_update(**item_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_items(self, user_id: int, kind: int = None) -> Optional[List[Row]]:
|
||||
async def get_items(self, user_id: int, kind: int = None) -> Optional[List[Row]]:
|
||||
if kind is None:
|
||||
sql = select(item).where(item.c.user == user_id)
|
||||
else:
|
||||
@ -443,12 +443,12 @@ class ChuniItemData(BaseData):
|
||||
and_(item.c.user == user_id, item.c.itemKind == kind)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_duel(self, user_id: int, duel_data: Dict) -> Optional[int]:
|
||||
async def put_duel(self, user_id: int, duel_data: Dict) -> Optional[int]:
|
||||
duel_data["user"] = user_id
|
||||
|
||||
duel_data = self.fix_bools(duel_data)
|
||||
@ -456,20 +456,20 @@ class ChuniItemData(BaseData):
|
||||
sql = insert(duel).values(**duel_data)
|
||||
conflict = sql.on_duplicate_key_update(**duel_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_duels(self, user_id: int) -> Optional[List[Row]]:
|
||||
async def get_duels(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(duel).where(duel.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_map(self, user_id: int, map_data: Dict) -> Optional[int]:
|
||||
async def put_map(self, user_id: int, map_data: Dict) -> Optional[int]:
|
||||
map_data["user"] = user_id
|
||||
|
||||
map_data = self.fix_bools(map_data)
|
||||
@ -477,20 +477,20 @@ class ChuniItemData(BaseData):
|
||||
sql = insert(map).values(**map_data)
|
||||
conflict = sql.on_duplicate_key_update(**map_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_maps(self, user_id: int) -> Optional[List[Row]]:
|
||||
async def get_maps(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(map).where(map.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_map_area(self, user_id: int, map_area_data: Dict) -> Optional[int]:
|
||||
async def put_map_area(self, user_id: int, map_area_data: Dict) -> Optional[int]:
|
||||
map_area_data["user"] = user_id
|
||||
|
||||
map_area_data = self.fix_bools(map_area_data)
|
||||
@ -498,28 +498,28 @@ class ChuniItemData(BaseData):
|
||||
sql = insert(map_area).values(**map_area_data)
|
||||
conflict = sql.on_duplicate_key_update(**map_area_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_map_areas(self, user_id: int) -> Optional[List[Row]]:
|
||||
async def get_map_areas(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(map_area).where(map_area.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_user_gachas(self, aime_id: int) -> Optional[List[Row]]:
|
||||
async def get_user_gachas(self, aime_id: int) -> Optional[List[Row]]:
|
||||
sql = gacha.select(gacha.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_user_gacha(
|
||||
async def put_user_gacha(
|
||||
self, aime_id: int, gacha_id: int, gacha_data: Dict
|
||||
) -> Optional[int]:
|
||||
sql = insert(gacha).values(user=aime_id, gachaId=gacha_id, **gacha_data)
|
||||
@ -527,14 +527,14 @@ class ChuniItemData(BaseData):
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
user=aime_id, gachaId=gacha_id, **gacha_data
|
||||
)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(f"put_user_gacha: Failed to insert! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_user_print_states(
|
||||
async def get_user_print_states(
|
||||
self, aime_id: int, has_completed: bool = False
|
||||
) -> Optional[List[Row]]:
|
||||
sql = print_state.select(
|
||||
@ -544,12 +544,12 @@ class ChuniItemData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_user_print_states_by_gacha(
|
||||
async def get_user_print_states_by_gacha(
|
||||
self, aime_id: int, gacha_id: int, has_completed: bool = False
|
||||
) -> Optional[List[Row]]:
|
||||
sql = print_state.select(
|
||||
@ -560,16 +560,16 @@ class ChuniItemData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_user_print_state(self, aime_id: int, **print_data) -> Optional[int]:
|
||||
async def put_user_print_state(self, aime_id: int, **print_data) -> Optional[int]:
|
||||
sql = insert(print_state).values(user=aime_id, **print_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(user=aime_id, **print_data)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
@ -578,7 +578,7 @@ class ChuniItemData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def put_user_print_detail(
|
||||
async def put_user_print_detail(
|
||||
self, aime_id: int, serial_id: str, user_print_data: Dict
|
||||
) -> Optional[int]:
|
||||
sql = insert(print_detail).values(
|
||||
@ -586,7 +586,7 @@ class ChuniItemData(BaseData):
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(user=aime_id, **user_print_data)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
|
@ -395,7 +395,7 @@ team = Table(
|
||||
|
||||
|
||||
class ChuniProfileData(BaseData):
|
||||
def put_profile_data(
|
||||
async def put_profile_data(
|
||||
self, aime_id: int, version: int, profile_data: Dict
|
||||
) -> Optional[int]:
|
||||
profile_data["user"] = aime_id
|
||||
@ -407,26 +407,26 @@ class ChuniProfileData(BaseData):
|
||||
|
||||
sql = insert(profile).values(**profile_data)
|
||||
conflict = sql.on_duplicate_key_update(**profile_data)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(f"put_profile_data: Failed to update! aime_id: {aime_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_preview(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
async def get_profile_preview(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
sql = (
|
||||
select([profile, option])
|
||||
.join(option, profile.c.user == option.c.user)
|
||||
.filter(and_(profile.c.user == aime_id, profile.c.version <= version))
|
||||
).order_by(profile.c.version.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_profile_data(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
async def get_profile_data(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
sql = select(profile).where(
|
||||
and_(
|
||||
profile.c.user == aime_id,
|
||||
@ -434,12 +434,12 @@ class ChuniProfileData(BaseData):
|
||||
)
|
||||
).order_by(profile.c.version.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_data_ex(
|
||||
async def put_profile_data_ex(
|
||||
self, aime_id: int, version: int, profile_ex_data: Dict
|
||||
) -> Optional[int]:
|
||||
profile_ex_data["user"] = aime_id
|
||||
@ -449,7 +449,7 @@ class ChuniProfileData(BaseData):
|
||||
|
||||
sql = insert(profile_ex).values(**profile_ex_data)
|
||||
conflict = sql.on_duplicate_key_update(**profile_ex_data)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
@ -458,7 +458,7 @@ class ChuniProfileData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_data_ex(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
async def get_profile_data_ex(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
sql = select(profile_ex).where(
|
||||
and_(
|
||||
profile_ex.c.user == aime_id,
|
||||
@ -466,17 +466,17 @@ class ChuniProfileData(BaseData):
|
||||
)
|
||||
).order_by(profile_ex.c.version.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_option(self, aime_id: int, option_data: Dict) -> Optional[int]:
|
||||
async def put_profile_option(self, aime_id: int, option_data: Dict) -> Optional[int]:
|
||||
option_data["user"] = aime_id
|
||||
|
||||
sql = insert(option).values(**option_data)
|
||||
conflict = sql.on_duplicate_key_update(**option_data)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
@ -485,22 +485,22 @@ class ChuniProfileData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_option(self, aime_id: int) -> Optional[Row]:
|
||||
async def get_profile_option(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(option).where(option.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_option_ex(
|
||||
async def put_profile_option_ex(
|
||||
self, aime_id: int, option_ex_data: Dict
|
||||
) -> Optional[int]:
|
||||
option_ex_data["user"] = aime_id
|
||||
|
||||
sql = insert(option_ex).values(**option_ex_data)
|
||||
conflict = sql.on_duplicate_key_update(**option_ex_data)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
@ -509,15 +509,15 @@ class ChuniProfileData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_option_ex(self, aime_id: int) -> Optional[Row]:
|
||||
async def get_profile_option_ex(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(option_ex).where(option_ex.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_recent_rating(
|
||||
async def put_profile_recent_rating(
|
||||
self, aime_id: int, recent_rating_data: List[Dict]
|
||||
) -> Optional[int]:
|
||||
sql = insert(recent_rating).values(
|
||||
@ -525,7 +525,7 @@ class ChuniProfileData(BaseData):
|
||||
)
|
||||
conflict = sql.on_duplicate_key_update(recentRating=recent_rating_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
f"put_profile_recent_rating: Failed to update! aime_id: {aime_id}"
|
||||
@ -533,15 +533,15 @@ class ChuniProfileData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_recent_rating(self, aime_id: int) -> Optional[Row]:
|
||||
async def get_profile_recent_rating(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(recent_rating).where(recent_rating.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_profile_activity(self, aime_id: int, activity_data: Dict) -> Optional[int]:
|
||||
async def put_profile_activity(self, aime_id: int, activity_data: Dict) -> Optional[int]:
|
||||
# The game just uses "id" but we need to distinguish that from the db column "id"
|
||||
activity_data["user"] = aime_id
|
||||
activity_data["activityId"] = activity_data["id"]
|
||||
@ -549,7 +549,7 @@ class ChuniProfileData(BaseData):
|
||||
|
||||
sql = insert(activity).values(**activity_data)
|
||||
conflict = sql.on_duplicate_key_update(**activity_data)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
@ -558,24 +558,24 @@ class ChuniProfileData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_activity(self, aime_id: int, kind: int) -> Optional[List[Row]]:
|
||||
async def get_profile_activity(self, aime_id: int, kind: int) -> Optional[List[Row]]:
|
||||
sql = (
|
||||
select(activity)
|
||||
.where(and_(activity.c.user == aime_id, activity.c.kind == kind))
|
||||
.order_by(activity.c.sortNumber.desc()) # to get the last played track
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_profile_charge(self, aime_id: int, charge_data: Dict) -> Optional[int]:
|
||||
async def put_profile_charge(self, aime_id: int, charge_data: Dict) -> Optional[int]:
|
||||
charge_data["user"] = aime_id
|
||||
|
||||
sql = insert(charge).values(**charge_data)
|
||||
conflict = sql.on_duplicate_key_update(**charge_data)
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
@ -584,40 +584,40 @@ class ChuniProfileData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_charge(self, aime_id: int) -> Optional[List[Row]]:
|
||||
async def get_profile_charge(self, aime_id: int) -> Optional[List[Row]]:
|
||||
sql = select(charge).where(charge.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def add_profile_region(self, aime_id: int, region_id: int) -> Optional[int]:
|
||||
async def add_profile_region(self, aime_id: int, region_id: int) -> Optional[int]:
|
||||
pass
|
||||
|
||||
def get_profile_regions(self, aime_id: int) -> Optional[List[Row]]:
|
||||
async def get_profile_regions(self, aime_id: int) -> Optional[List[Row]]:
|
||||
pass
|
||||
|
||||
def put_profile_emoney(self, aime_id: int, emoney_data: Dict) -> Optional[int]:
|
||||
async def put_profile_emoney(self, aime_id: int, emoney_data: Dict) -> Optional[int]:
|
||||
emoney_data["user"] = aime_id
|
||||
|
||||
sql = insert(emoney).values(**emoney_data)
|
||||
conflict = sql.on_duplicate_key_update(**emoney_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_emoney(self, aime_id: int) -> Optional[List[Row]]:
|
||||
async def get_profile_emoney(self, aime_id: int) -> Optional[List[Row]]:
|
||||
sql = select(emoney).where(emoney.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_profile_overpower(
|
||||
async def put_profile_overpower(
|
||||
self, aime_id: int, overpower_data: Dict
|
||||
) -> Optional[int]:
|
||||
overpower_data["user"] = aime_id
|
||||
@ -625,31 +625,31 @@ class ChuniProfileData(BaseData):
|
||||
sql = insert(overpower).values(**overpower_data)
|
||||
conflict = sql.on_duplicate_key_update(**overpower_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile_overpower(self, aime_id: int) -> Optional[List[Row]]:
|
||||
async def get_profile_overpower(self, aime_id: int) -> Optional[List[Row]]:
|
||||
sql = select(overpower).where(overpower.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_team_by_id(self, team_id: int) -> Optional[Row]:
|
||||
async def get_team_by_id(self, team_id: int) -> Optional[Row]:
|
||||
sql = select(team).where(team.c.id == team_id)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_team_rank(self, team_id: int) -> int:
|
||||
async def get_team_rank(self, team_id: int) -> int:
|
||||
# Normal ranking system, likely the one used in the real servers
|
||||
# Query all teams sorted by 'teamPoint'
|
||||
result = self.execute(
|
||||
result = await self.execute(
|
||||
select(team.c.id).order_by(team.c.teamPoint.desc())
|
||||
)
|
||||
|
||||
@ -666,13 +666,13 @@ class ChuniProfileData(BaseData):
|
||||
# RIP scaled team ranking. Gone, but forgotten
|
||||
# def get_team_rank_scaled(self, team_id: int) -> int:
|
||||
|
||||
def update_team(self, team_id: int, team_data: Dict) -> bool:
|
||||
async def update_team(self, team_id: int, team_data: Dict) -> bool:
|
||||
team_data["id"] = team_id
|
||||
|
||||
sql = insert(team).values(**team_data)
|
||||
conflict = sql.on_duplicate_key_update(**team_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
@ -680,16 +680,16 @@ class ChuniProfileData(BaseData):
|
||||
)
|
||||
return False
|
||||
return True
|
||||
def get_rival(self, rival_id: int) -> Optional[Row]:
|
||||
async def get_rival(self, rival_id: int) -> Optional[Row]:
|
||||
sql = select(profile).where(profile.c.user == rival_id)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
def get_overview(self) -> Dict:
|
||||
async def get_overview(self) -> Dict:
|
||||
# Fetch and add up all the playcounts
|
||||
playcount_sql = self.execute(select(profile.c.playCount))
|
||||
playcount_sql = await self.execute(select(profile.c.playCount))
|
||||
|
||||
if playcount_sql is None:
|
||||
self.logger.warn(
|
||||
|
@ -142,55 +142,55 @@ playlog = Table(
|
||||
|
||||
|
||||
class ChuniScoreData(BaseData):
|
||||
def get_courses(self, aime_id: int) -> Optional[Row]:
|
||||
async def get_courses(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(course).where(course.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_course(self, aime_id: int, course_data: Dict) -> Optional[int]:
|
||||
async def put_course(self, aime_id: int, course_data: Dict) -> Optional[int]:
|
||||
course_data["user"] = aime_id
|
||||
course_data = self.fix_bools(course_data)
|
||||
|
||||
sql = insert(course).values(**course_data)
|
||||
conflict = sql.on_duplicate_key_update(**course_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_scores(self, aime_id: int) -> Optional[Row]:
|
||||
async def get_scores(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(best_score).where(best_score.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_score(self, aime_id: int, score_data: Dict) -> Optional[int]:
|
||||
async def put_score(self, aime_id: int, score_data: Dict) -> Optional[int]:
|
||||
score_data["user"] = aime_id
|
||||
score_data = self.fix_bools(score_data)
|
||||
|
||||
sql = insert(best_score).values(**score_data)
|
||||
conflict = sql.on_duplicate_key_update(**score_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_playlogs(self, aime_id: int) -> Optional[Row]:
|
||||
async def get_playlogs(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(playlog).where(playlog.c.user == aime_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_playlog(self, aime_id: int, playlog_data: Dict, version: int) -> Optional[int]:
|
||||
async def put_playlog(self, aime_id: int, playlog_data: Dict, version: int) -> Optional[int]:
|
||||
# Calculate the ROM version that should be inserted into the DB, based on the version of the ggame being inserted
|
||||
# We only need from Version 10 (Plost) and back, as newer versions include romVersion in their upsert
|
||||
# This matters both for gameRankings, as well as a future DB update to keep version data separate
|
||||
@ -216,12 +216,12 @@ class ChuniScoreData(BaseData):
|
||||
sql = insert(playlog).values(**playlog_data)
|
||||
conflict = sql.on_duplicate_key_update(**playlog_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_rankings(self, version: int) -> Optional[List[Dict]]:
|
||||
async def get_rankings(self, version: int) -> Optional[List[Dict]]:
|
||||
# Calculates the ROM version that should be fetched for rankings, based on the game version being retrieved
|
||||
# This prevents tracks that are not accessible in your version from counting towards the 10 results
|
||||
romVer = {
|
||||
@ -241,7 +241,7 @@ class ChuniScoreData(BaseData):
|
||||
0: "1.00%"
|
||||
}
|
||||
sql = select([playlog.c.musicId.label('id'), func.count(playlog.c.musicId).label('point')]).where((playlog.c.level != 4) & (playlog.c.romVersion.like(romVer.get(version, "%")))).group_by(playlog.c.musicId).order_by(func.count(playlog.c.musicId).desc()).limit(10)
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
@ -249,10 +249,10 @@ class ChuniScoreData(BaseData):
|
||||
rows = result.fetchall()
|
||||
return [dict(row) for row in rows]
|
||||
|
||||
def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]:
|
||||
async def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]:
|
||||
sql = select(best_score).where(best_score.c.user == rival_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
@ -175,7 +175,7 @@ login_bonus = Table(
|
||||
|
||||
|
||||
class ChuniStaticData(BaseData):
|
||||
def put_login_bonus(
|
||||
async def put_login_bonus(
|
||||
self,
|
||||
version: int,
|
||||
preset_id: int,
|
||||
@ -207,12 +207,12 @@ class ChuniStaticData(BaseData):
|
||||
loginBonusCategoryType=login_bonus_category_type,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_login_bonus(
|
||||
async def get_login_bonus(
|
||||
self,
|
||||
version: int,
|
||||
preset_id: int,
|
||||
@ -224,12 +224,12 @@ class ChuniStaticData(BaseData):
|
||||
)
|
||||
).order_by(login_bonus.c.needLoginDayCount.desc())
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_login_bonus_by_required_days(
|
||||
async def get_login_bonus_by_required_days(
|
||||
self, version: int, preset_id: int, need_login_day_count: int
|
||||
) -> Optional[Row]:
|
||||
sql = login_bonus.select(
|
||||
@ -240,12 +240,12 @@ class ChuniStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_login_bonus_preset(
|
||||
async def put_login_bonus_preset(
|
||||
self, version: int, preset_id: int, preset_name: str, is_enabled: bool
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus_preset).values(
|
||||
@ -259,12 +259,12 @@ class ChuniStaticData(BaseData):
|
||||
presetName=preset_name, isEnabled=is_enabled
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_login_bonus_presets(
|
||||
async def get_login_bonus_presets(
|
||||
self, version: int, is_enabled: bool = True
|
||||
) -> Optional[List[Row]]:
|
||||
sql = login_bonus_preset.select(
|
||||
@ -274,12 +274,12 @@ class ChuniStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_event(
|
||||
async def put_event(
|
||||
self, version: int, event_id: int, type: int, name: str
|
||||
) -> Optional[int]:
|
||||
sql = insert(events).values(
|
||||
@ -288,19 +288,19 @@ class ChuniStaticData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(name=name)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def update_event(
|
||||
async def update_event(
|
||||
self, version: int, event_id: int, enabled: bool
|
||||
) -> Optional[bool]:
|
||||
sql = events.update(
|
||||
and_(events.c.version == version, events.c.eventId == event_id)
|
||||
).values(enabled=enabled)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
f"update_event: failed to update event! version: {version}, event_id: {event_id}, enabled: {enabled}"
|
||||
@ -315,35 +315,35 @@ class ChuniStaticData(BaseData):
|
||||
return None
|
||||
return event["enabled"]
|
||||
|
||||
def get_event(self, version: int, event_id: int) -> Optional[Row]:
|
||||
async def get_event(self, version: int, event_id: int) -> Optional[Row]:
|
||||
sql = select(events).where(
|
||||
and_(events.c.version == version, events.c.eventId == event_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_enabled_events(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_enabled_events(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(events).where(
|
||||
and_(events.c.version == version, events.c.enabled == True)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_events(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_events(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(events).where(events.c.version == version)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_music(
|
||||
async def put_music(
|
||||
self,
|
||||
version: int,
|
||||
song_id: int,
|
||||
@ -376,12 +376,12 @@ class ChuniStaticData(BaseData):
|
||||
worldsEndTag=we_tag,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def put_charge(
|
||||
async def put_charge(
|
||||
self,
|
||||
version: int,
|
||||
charge_id: int,
|
||||
@ -406,38 +406,38 @@ class ChuniStaticData(BaseData):
|
||||
sellingAppeal=selling_appeal,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_enabled_charges(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_enabled_charges(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(charge).where(
|
||||
and_(charge.c.version == version, charge.c.enabled == True)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_charges(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_charges(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(charge).where(charge.c.version == version)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_music(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_music(self, version: int) -> Optional[List[Row]]:
|
||||
sql = music.select(music.c.version <= version)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_music_chart(
|
||||
async def get_music_chart(
|
||||
self, version: int, song_id: int, chart_id: int
|
||||
) -> Optional[List[Row]]:
|
||||
sql = select(music).where(
|
||||
@ -448,21 +448,21 @@ class ChuniStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_song(self, music_id: int) -> Optional[Row]:
|
||||
async def get_song(self, music_id: int) -> Optional[Row]:
|
||||
sql = music.select(music.c.id == music_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
|
||||
def put_avatar(
|
||||
async def put_avatar(
|
||||
self,
|
||||
version: int,
|
||||
avatarAccessoryId: int,
|
||||
@ -487,12 +487,12 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def put_gacha(
|
||||
async def put_gacha(
|
||||
self,
|
||||
version: int,
|
||||
gacha_id: int,
|
||||
@ -513,33 +513,33 @@ class ChuniStaticData(BaseData):
|
||||
**gacha_data,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert gacha! gacha_id {gacha_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_gachas(self, version: int) -> Optional[List[Dict]]:
|
||||
async def get_gachas(self, version: int) -> Optional[List[Dict]]:
|
||||
sql = gachas.select(gachas.c.version <= version).order_by(
|
||||
gachas.c.gachaId.asc()
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_gacha(self, version: int, gacha_id: int) -> Optional[Dict]:
|
||||
async def get_gacha(self, version: int, gacha_id: int) -> Optional[Dict]:
|
||||
sql = gachas.select(
|
||||
and_(gachas.c.version <= version, gachas.c.gachaId == gacha_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_gacha_card(
|
||||
async def put_gacha_card(
|
||||
self, gacha_id: int, card_id: int, **gacha_card
|
||||
) -> Optional[int]:
|
||||
sql = insert(gacha_cards).values(gachaId=gacha_id, cardId=card_id, **gacha_card)
|
||||
@ -548,21 +548,21 @@ class ChuniStaticData(BaseData):
|
||||
gachaId=gacha_id, cardId=card_id, **gacha_card
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert gacha card! gacha_id {gacha_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_gacha_cards(self, gacha_id: int) -> Optional[List[Dict]]:
|
||||
async def get_gacha_cards(self, gacha_id: int) -> Optional[List[Dict]]:
|
||||
sql = gacha_cards.select(gacha_cards.c.gachaId == gacha_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_gacha_card_by_character(
|
||||
async def get_gacha_card_by_character(
|
||||
self, gacha_id: int, chara_id: int
|
||||
) -> Optional[Dict]:
|
||||
sql_sub = (
|
||||
@ -574,26 +574,26 @@ class ChuniStaticData(BaseData):
|
||||
and_(gacha_cards.c.gachaId == gacha_id, gacha_cards.c.cardId == sql_sub)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
|
||||
async def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, **card_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**card_data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert card! card_id {card_id}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_card(self, version: int, card_id: int) -> Optional[Dict]:
|
||||
async def get_card(self, version: int, card_id: int) -> Optional[Dict]:
|
||||
sql = cards.select(and_(cards.c.version <= version, cards.c.cardId == card_id))
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
@ -11,7 +11,7 @@ class ChuniStar(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_STAR
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.20.00"
|
||||
return ret
|
||||
|
@ -11,7 +11,7 @@ class ChuniStarPlus(ChuniBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_STAR_PLUS
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.25.00"
|
||||
return ret
|
||||
|
@ -11,8 +11,8 @@ class ChuniSun(ChuniNewPlus):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_SUN
|
||||
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = super().handle_cm_get_user_preview_api_request(data)
|
||||
async def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = await super().handle_cm_get_user_preview_api_request(data)
|
||||
|
||||
# hardcode lastDataVersion for CardMaker 1.35 A032
|
||||
user_data["lastDataVersion"] = "2.10.00"
|
||||
|
@ -11,8 +11,8 @@ class ChuniSunPlus(ChuniSun):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_SUN_PLUS
|
||||
|
||||
def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = super().handle_cm_get_user_preview_api_request(data)
|
||||
async def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = await super().handle_cm_get_user_preview_api_request(data)
|
||||
|
||||
# I don't know if lastDataVersion is going to matter, I don't think CardMaker 1.35 works this far up
|
||||
user_data["lastDataVersion"] = "2.15.00"
|
||||
|
@ -6,7 +6,4 @@ from titles.cm.database import CardMakerData
|
||||
index = CardMakerServlet
|
||||
reader = CardMakerReader
|
||||
database = CardMakerData
|
||||
|
||||
game_codes = [CardMakerConstants.GAME_CODE]
|
||||
|
||||
current_schema_version = 1
|
||||
|
@ -29,11 +29,11 @@ class CardMakerBase:
|
||||
def _parse_int_ver(version: str) -> str:
|
||||
return version.replace(".", "")[:3]
|
||||
|
||||
def handle_get_game_connect_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_connect_api_request(self, data: Dict) -> Dict:
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
||||
uri = f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}"
|
||||
uri = f"http://{self.core_cfg.server.hostname}:{Utils.get_title_port(self.core_cfg)}"
|
||||
else:
|
||||
uri = f"http://{self.core_cfg.title.hostname}"
|
||||
uri = f"http://{self.core_cfg.server.hostname}"
|
||||
|
||||
# grab the dict with all games version numbers from user config
|
||||
games_ver = self.game_cfg.version.version(self.version)
|
||||
@ -62,7 +62,7 @@ class CardMakerBase:
|
||||
],
|
||||
}
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
# if reboot start/end time is not defined use the default behavior of being a few hours ago
|
||||
if self.core_cfg.title.reboot_start_time == "" or self.core_cfg.title.reboot_end_time == "":
|
||||
reboot_start = datetime.strftime(
|
||||
@ -110,11 +110,11 @@ class CardMakerBase:
|
||||
"isAou": False,
|
||||
}
|
||||
|
||||
def handle_get_client_bookkeeping_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_client_bookkeeping_api_request(self, data: Dict) -> Dict:
|
||||
return {"placeId": data["placeId"], "length": 0, "clientBookkeepingList": []}
|
||||
|
||||
def handle_upsert_client_setting_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_client_setting_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": 1, "apiName": "UpsertClientSettingApi"}
|
||||
|
||||
def handle_upsert_client_bookkeeping_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_upsert_client_bookkeeping_api_request(self, data: Dict) -> Dict:
|
||||
return {"returnCode": 1, "apiName": "UpsertClientBookkeepingApi"}
|
||||
|
@ -12,7 +12,7 @@ class CardMaker135(CardMakerBase):
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = CardMakerConstants.VER_CARD_MAKER_135
|
||||
|
||||
def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = super().handle_get_game_setting_api_request(data)
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
ret = await super().handle_get_game_setting_api_request(data)
|
||||
ret["gameSetting"]["dataVersion"] = "1.35.00"
|
||||
return ret
|
||||
|
@ -5,10 +5,11 @@ import string
|
||||
import logging
|
||||
import coloredlogs
|
||||
import zlib
|
||||
|
||||
from starlette.routing import Route
|
||||
from starlette.responses import Response
|
||||
from starlette.requests import Request
|
||||
from os import path
|
||||
from typing import Tuple, List, Dict
|
||||
from twisted.web.http import Request
|
||||
from typing import List
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
|
||||
from core.config import CoreConfig
|
||||
@ -19,7 +20,6 @@ from .const import CardMakerConstants
|
||||
from .base import CardMakerBase
|
||||
from .cm135 import CardMaker135
|
||||
|
||||
|
||||
class CardMakerServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
super().__init__(core_cfg, cfg_dir)
|
||||
@ -72,16 +72,15 @@ class CardMakerServlet(BaseServlet):
|
||||
|
||||
return True
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[("render_POST", "/SDED/{version}/{endpoint}", {})]
|
||||
)
|
||||
|
||||
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
version = int(matchers['version'])
|
||||
endpoint = matchers['endpoint']
|
||||
req_raw = request.content.getvalue()
|
||||
def get_routes(self) -> List[Route]:
|
||||
return [
|
||||
Route("/SDED/{version:int}/{endpoint:str}", self.render_POST)
|
||||
]
|
||||
|
||||
async def render_POST(self, request: Request) -> bytes:
|
||||
version: int = request.path_params.get('version')
|
||||
endpoint: str = request.path_params.get('endpoint')
|
||||
req_raw = await request.body()
|
||||
internal_ver = 0
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
@ -103,7 +102,7 @@ class CardMakerServlet(BaseServlet):
|
||||
self.logger.error(
|
||||
f"Failed to decompress v{version} {endpoint} request -> {e}"
|
||||
)
|
||||
return zlib.compress(b'{"stat": "0"}')
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
req_data = json.loads(unzip)
|
||||
|
||||
@ -114,7 +113,7 @@ class CardMakerServlet(BaseServlet):
|
||||
|
||||
if not hasattr(self.versions[internal_ver], func_to_find):
|
||||
self.logger.warning(f"Unhandled v{version} request {endpoint}")
|
||||
return zlib.compress(b'{"returnCode": 1}')
|
||||
return Response(zlib.compress(b'{"returnCode": 1}'))
|
||||
|
||||
try:
|
||||
handler = getattr(self.versions[internal_ver], func_to_find)
|
||||
@ -123,11 +122,11 @@ class CardMakerServlet(BaseServlet):
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling v{version} method {endpoint} - {e}")
|
||||
raise
|
||||
return zlib.compress(b'{"stat": "0"}')
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
if resp is None:
|
||||
resp = {"returnCode": 1}
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
|
||||
return zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|
||||
return Response(zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8")))
|
||||
|
@ -50,7 +50,7 @@ class CardMakerReader(BaseReader):
|
||||
):
|
||||
return f"{root}/{dir}"
|
||||
|
||||
def read(self) -> None:
|
||||
async def read(self) -> None:
|
||||
static_datas = {
|
||||
"static_gachas.csv": "read_ongeki_gacha_csv",
|
||||
"static_gacha_cards.csv": "read_ongeki_gacha_card_csv",
|
||||
@ -66,7 +66,7 @@ class CardMakerReader(BaseReader):
|
||||
for file, func in static_datas.items():
|
||||
if os.path.exists(f"{self.bin_dir}/MU3/{file}"):
|
||||
read_csv = getattr(CardMakerReader, func)
|
||||
read_csv(self, f"{self.bin_dir}/MU3/{file}")
|
||||
await read_csv(self, f"{self.bin_dir}/MU3/{file}")
|
||||
else:
|
||||
self.logger.warning(
|
||||
f"Couldn't find {file} file in {self.bin_dir}, skipping"
|
||||
@ -78,12 +78,12 @@ class CardMakerReader(BaseReader):
|
||||
# ONGEKI (MU3) cnnot easily access the bin data(A000.pac)
|
||||
# so only opt_dir will work for now
|
||||
for dir in data_dirs:
|
||||
self.read_chuni_card(f"{dir}/CHU/card")
|
||||
self.read_chuni_gacha(f"{dir}/CHU/gacha")
|
||||
self.read_mai2_card(f"{dir}/MAI/card")
|
||||
self.read_ongeki_gacha(f"{dir}/MU3/gacha")
|
||||
await self.read_chuni_card(f"{dir}/CHU/card")
|
||||
await self.read_chuni_gacha(f"{dir}/CHU/gacha")
|
||||
await self.read_mai2_card(f"{dir}/MAI/card")
|
||||
await self.read_ongeki_gacha(f"{dir}/MU3/gacha")
|
||||
|
||||
def read_chuni_card(self, base_dir: str) -> None:
|
||||
async def read_chuni_card(self, base_dir: str) -> None:
|
||||
self.logger.info(f"Reading cards from {base_dir}...")
|
||||
|
||||
version_ids = {
|
||||
@ -114,7 +114,7 @@ class CardMakerReader(BaseReader):
|
||||
chain = int(troot.find("chain").text)
|
||||
skill_name = troot.find("skillName").text
|
||||
|
||||
self.chuni_data.static.put_card(
|
||||
await self.chuni_data.static.put_card(
|
||||
version,
|
||||
card_id,
|
||||
charaName=chara_name,
|
||||
@ -131,7 +131,7 @@ class CardMakerReader(BaseReader):
|
||||
|
||||
self.logger.info(f"Added chuni card {card_id}")
|
||||
|
||||
def read_chuni_gacha(self, base_dir: str) -> None:
|
||||
async def read_chuni_gacha(self, base_dir: str) -> None:
|
||||
self.logger.info(f"Reading gachas from {base_dir}...")
|
||||
|
||||
version_ids = {
|
||||
@ -158,7 +158,7 @@ class CardMakerReader(BaseReader):
|
||||
True if troot.find("ceilingType").text == "1" else False
|
||||
)
|
||||
|
||||
self.chuni_data.static.put_gacha(
|
||||
await self.chuni_data.static.put_gacha(
|
||||
version,
|
||||
gacha_id,
|
||||
name,
|
||||
@ -181,7 +181,7 @@ class CardMakerReader(BaseReader):
|
||||
True if gacha_card.find("pickup").text == "1" else False
|
||||
)
|
||||
|
||||
self.chuni_data.static.put_gacha_card(
|
||||
await self.chuni_data.static.put_gacha_card(
|
||||
gacha_id,
|
||||
card_id,
|
||||
weight=weight,
|
||||
@ -193,7 +193,7 @@ class CardMakerReader(BaseReader):
|
||||
f"Added chuni card {card_id} to gacha {gacha_id}"
|
||||
)
|
||||
|
||||
def read_mai2_card(self, base_dir: str) -> None:
|
||||
async def read_mai2_card(self, base_dir: str) -> None:
|
||||
self.logger.info(f"Reading cards from {base_dir}...")
|
||||
|
||||
version_ids = {
|
||||
@ -231,18 +231,18 @@ class CardMakerReader(BaseReader):
|
||||
False if re.search(r"\d{2}/\d{2}/\d{2}", name) else enabled
|
||||
)
|
||||
|
||||
self.mai2_data.static.put_card(
|
||||
await self.mai2_data.static.put_card(
|
||||
version, card_id, name, enabled=enabled
|
||||
)
|
||||
self.logger.info(f"Added mai2 card {card_id}")
|
||||
|
||||
def read_ongeki_gacha_csv(self, file_path: str) -> None:
|
||||
async def read_ongeki_gacha_csv(self, file_path: str) -> None:
|
||||
self.logger.info(f"Reading gachas from {file_path}...")
|
||||
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
self.ongeki_data.static.put_gacha(
|
||||
await self.ongeki_data.static.put_gacha(
|
||||
row["version"],
|
||||
row["gachaId"],
|
||||
row["gachaName"],
|
||||
@ -254,13 +254,13 @@ class CardMakerReader(BaseReader):
|
||||
|
||||
self.logger.info(f"Added ongeki gacha {row['gachaId']}")
|
||||
|
||||
def read_ongeki_gacha_card_csv(self, file_path: str) -> None:
|
||||
async def read_ongeki_gacha_card_csv(self, file_path: str) -> None:
|
||||
self.logger.info(f"Reading gacha cards from {file_path}...")
|
||||
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
self.ongeki_data.static.put_gacha_card(
|
||||
await self.ongeki_data.static.put_gacha_card(
|
||||
row["gachaId"],
|
||||
row["cardId"],
|
||||
rarity=row["rarity"],
|
||||
@ -271,7 +271,7 @@ class CardMakerReader(BaseReader):
|
||||
|
||||
self.logger.info(f"Added ongeki card {row['cardId']} to gacha")
|
||||
|
||||
def read_ongeki_gacha(self, base_dir: str) -> None:
|
||||
async def read_ongeki_gacha(self, base_dir: str) -> None:
|
||||
self.logger.info(f"Reading gachas from {base_dir}...")
|
||||
|
||||
# assuming some GachaKinds based on the GachaType
|
||||
@ -294,7 +294,7 @@ class CardMakerReader(BaseReader):
|
||||
|
||||
# skip already existing gachas
|
||||
if (
|
||||
self.ongeki_data.static.get_gacha(
|
||||
await self.ongeki_data.static.get_gacha(
|
||||
OngekiConstants.VER_ONGEKI_BRIGHT_MEMORY, gacha_id
|
||||
)
|
||||
is not None
|
||||
@ -320,7 +320,7 @@ class CardMakerReader(BaseReader):
|
||||
is_ceiling = 1
|
||||
max_select_point = 33
|
||||
|
||||
self.ongeki_data.static.put_gacha(
|
||||
await self.ongeki_data.static.put_gacha(
|
||||
version,
|
||||
gacha_id,
|
||||
name,
|
||||
|
@ -7,4 +7,3 @@ index = CxbServlet
|
||||
database = CxbData
|
||||
reader = CxbReader
|
||||
game_codes = [CxbConstants.GAME_CODE]
|
||||
current_schema_version = 1
|
||||
|
@ -28,14 +28,14 @@ class CxbBase:
|
||||
|
||||
return []
|
||||
|
||||
def handle_action_rpreq_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_rpreq_request(self, data: Dict) -> Dict:
|
||||
return {}
|
||||
|
||||
def handle_action_hitreq_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_hitreq_request(self, data: Dict) -> Dict:
|
||||
return {"data": []}
|
||||
|
||||
def handle_auth_usercheck_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_index(
|
||||
async def handle_auth_usercheck_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile_index(
|
||||
0, data["usercheck"]["authid"], self.version
|
||||
)
|
||||
if profile is not None:
|
||||
@ -45,12 +45,12 @@ class CxbBase:
|
||||
self.logger.info(f"No profile for aime id {data['usercheck']['authid']}")
|
||||
return {"exist": "false", "logout": "true"}
|
||||
|
||||
def handle_auth_entry_request(self, data: Dict) -> Dict:
|
||||
async def handle_auth_entry_request(self, data: Dict) -> Dict:
|
||||
self.logger.info(f"New profile for {data['entry']['authid']}")
|
||||
return {"token": data["entry"]["authid"], "uid": data["entry"]["authid"]}
|
||||
|
||||
def handle_auth_login_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile_index(
|
||||
async def handle_auth_login_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile_index(
|
||||
0, data["login"]["authid"], self.version
|
||||
)
|
||||
|
||||
@ -198,14 +198,14 @@ class CxbBase:
|
||||
).decode("utf-8")
|
||||
)
|
||||
|
||||
def handle_action_loadrange_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_loadrange_request(self, data: Dict) -> Dict:
|
||||
range_start = data["loadrange"]["range"][0]
|
||||
range_end = data["loadrange"]["range"][1]
|
||||
uid = data["loadrange"]["uid"]
|
||||
|
||||
self.logger.info(f"Load data for {uid}")
|
||||
profile = self.data.profile.get_profile(uid, self.version)
|
||||
songs = self.data.score.get_best_scores(uid)
|
||||
profile = await self.data.profile.get_profile(uid, self.version)
|
||||
songs = await self.data.score.get_best_scores(uid)
|
||||
|
||||
data1 = []
|
||||
index = []
|
||||
@ -271,7 +271,7 @@ class CxbBase:
|
||||
thread_ScoreData = Thread(target=CxbBase.task_generateScoreData(song, index, data1))
|
||||
thread_ScoreData.start()
|
||||
|
||||
v_profile = self.data.profile.get_profile_index(0, uid, self.version)
|
||||
v_profile = await self.data.profile.get_profile_index(0, uid, self.version)
|
||||
v_profile_data = v_profile["data"]
|
||||
|
||||
for _, data in enumerate(profile):
|
||||
@ -282,7 +282,7 @@ class CxbBase:
|
||||
|
||||
return {"index": index, "data": data1, "version": versionindex}
|
||||
|
||||
def handle_action_saveindex_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_saveindex_request(self, data: Dict) -> Dict:
|
||||
save_data = data["saveindex"]
|
||||
|
||||
try:
|
||||
@ -300,11 +300,11 @@ class CxbBase:
|
||||
|
||||
for value in data["saveindex"]["data"]:
|
||||
if "playedUserId" in value[1]:
|
||||
self.data.profile.put_profile(
|
||||
await self.data.profile.put_profile(
|
||||
data["saveindex"]["uid"], self.version, value[0], value[1]
|
||||
)
|
||||
if "mcode" not in value[1]:
|
||||
self.data.profile.put_profile(
|
||||
await self.data.profile.put_profile(
|
||||
data["saveindex"]["uid"], self.version, value[0], value[1]
|
||||
)
|
||||
if "shopId" in value:
|
||||
@ -335,7 +335,7 @@ class CxbBase:
|
||||
"index": value[0],
|
||||
}
|
||||
)
|
||||
self.data.score.put_best_score(
|
||||
await self.data.score.put_best_score(
|
||||
data["saveindex"]["uid"],
|
||||
song_json["mcode"],
|
||||
self.version,
|
||||
@ -360,32 +360,32 @@ class CxbBase:
|
||||
|
||||
for index, value in enumerate(data["saveindex"]["data"]):
|
||||
if int(data["saveindex"]["index"][index]) == 101:
|
||||
self.data.profile.put_profile(
|
||||
await self.data.profile.put_profile(
|
||||
aimeId, self.version, data["saveindex"]["index"][index], value
|
||||
)
|
||||
if (
|
||||
int(data["saveindex"]["index"][index]) >= 700000
|
||||
and int(data["saveindex"]["index"][index]) <= 701000
|
||||
):
|
||||
self.data.profile.put_profile(
|
||||
await self.data.profile.put_profile(
|
||||
aimeId, self.version, data["saveindex"]["index"][index], value
|
||||
)
|
||||
if (
|
||||
int(data["saveindex"]["index"][index]) >= 500
|
||||
and int(data["saveindex"]["index"][index]) <= 510
|
||||
):
|
||||
self.data.profile.put_profile(
|
||||
await self.data.profile.put_profile(
|
||||
aimeId, self.version, data["saveindex"]["index"][index], value
|
||||
)
|
||||
if "playedUserId" in value:
|
||||
self.data.profile.put_profile(
|
||||
await self.data.profile.put_profile(
|
||||
aimeId,
|
||||
self.version,
|
||||
data["saveindex"]["index"][index],
|
||||
json.loads(value),
|
||||
)
|
||||
if "mcode" not in value and "normalCR" not in value:
|
||||
self.data.profile.put_profile(
|
||||
await self.data.profile.put_profile(
|
||||
aimeId,
|
||||
self.version,
|
||||
data["saveindex"]["index"][index],
|
||||
@ -437,16 +437,16 @@ class CxbBase:
|
||||
}
|
||||
)
|
||||
|
||||
self.data.score.put_best_score(
|
||||
await self.data.score.put_best_score(
|
||||
aimeId, data1["mcode"], self.version, indexSongList[i], songCode[0]
|
||||
)
|
||||
i += 1
|
||||
return {}
|
||||
|
||||
def handle_action_sprankreq_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_sprankreq_request(self, data: Dict) -> Dict:
|
||||
uid = data["sprankreq"]["uid"]
|
||||
self.logger.info(f"Get best rankings for {uid}")
|
||||
p = self.data.score.get_best_rankings(uid)
|
||||
p = await self.data.score.get_best_rankings(uid)
|
||||
|
||||
rankList: List[Dict[str, Any]] = []
|
||||
|
||||
@ -475,16 +475,16 @@ class CxbBase:
|
||||
"rankx": [1, 1, 1],
|
||||
}
|
||||
|
||||
def handle_action_getadv_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_getadv_request(self, data: Dict) -> Dict:
|
||||
return {"data": [{"r": "1", "i": "100300", "c": "20"}]}
|
||||
|
||||
def handle_action_getmsg_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_getmsg_request(self, data: Dict) -> Dict:
|
||||
return {"msgs": []}
|
||||
|
||||
def handle_auth_logout_request(self, data: Dict) -> Dict:
|
||||
async def handle_auth_logout_request(self, data: Dict) -> Dict:
|
||||
return {"auth": True}
|
||||
|
||||
def handle_action_rankreg_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_rankreg_request(self, data: Dict) -> Dict:
|
||||
uid = data["rankreg"]["uid"]
|
||||
self.logger.info(f"Put {len(data['rankreg']['data'])} rankings for {uid}")
|
||||
|
||||
@ -492,7 +492,7 @@ class CxbBase:
|
||||
# REV S2
|
||||
if "clear" in rid:
|
||||
try:
|
||||
self.data.score.put_ranking(
|
||||
await self.data.score.put_ranking(
|
||||
user_id=uid,
|
||||
rev_id=int(rid["rid"]),
|
||||
song_id=int(rid["sc"][1]),
|
||||
@ -500,7 +500,7 @@ class CxbBase:
|
||||
clear=rid["clear"],
|
||||
)
|
||||
except Exception:
|
||||
self.data.score.put_ranking(
|
||||
await self.data.score.put_ranking(
|
||||
user_id=uid,
|
||||
rev_id=int(rid["rid"]),
|
||||
song_id=0,
|
||||
@ -510,7 +510,7 @@ class CxbBase:
|
||||
# REV
|
||||
else:
|
||||
try:
|
||||
self.data.score.put_ranking(
|
||||
await self.data.score.put_ranking(
|
||||
user_id=uid,
|
||||
rev_id=int(rid["rid"]),
|
||||
song_id=int(rid["sc"][1]),
|
||||
@ -518,7 +518,7 @@ class CxbBase:
|
||||
clear=0,
|
||||
)
|
||||
except Exception:
|
||||
self.data.score.put_ranking(
|
||||
await self.data.score.put_ranking(
|
||||
user_id=uid,
|
||||
rev_id=int(rid["rid"]),
|
||||
song_id=0,
|
||||
@ -527,15 +527,15 @@ class CxbBase:
|
||||
)
|
||||
return {}
|
||||
|
||||
def handle_action_addenergy_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_addenergy_request(self, data: Dict) -> Dict:
|
||||
uid = data["addenergy"]["uid"]
|
||||
self.logger.info(f"Add energy to user {uid}")
|
||||
profile = self.data.profile.get_profile_index(0, uid, self.version)
|
||||
profile = await self.data.profile.get_profile_index(0, uid, self.version)
|
||||
data1 = profile["data"]
|
||||
p = self.data.item.get_energy(uid)
|
||||
p = await self.data.item.get_energy(uid)
|
||||
|
||||
if not p:
|
||||
self.data.item.put_energy(uid, 5)
|
||||
await self.data.item.put_energy(uid, 5)
|
||||
|
||||
return {
|
||||
"class": data1["myClass"],
|
||||
@ -548,7 +548,7 @@ class CxbBase:
|
||||
energy = p["energy"]
|
||||
|
||||
newenergy = int(energy) + 5
|
||||
self.data.item.put_energy(uid, newenergy)
|
||||
await self.data.item.put_energy(uid, newenergy)
|
||||
|
||||
if int(energy) <= 995:
|
||||
array.append(
|
||||
@ -570,10 +570,10 @@ class CxbBase:
|
||||
)
|
||||
return array[0]
|
||||
|
||||
def handle_action_eventreq_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_eventreq_request(self, data: Dict) -> Dict:
|
||||
self.logger.info(data)
|
||||
return {"eventreq": ""}
|
||||
|
||||
def handle_action_stampreq_request(self, data: Dict) -> Dict:
|
||||
async def handle_action_stampreq_request(self, data: Dict) -> Dict:
|
||||
self.logger.info(data)
|
||||
return {"stampreq": ""}
|
@ -18,6 +18,12 @@ class CxbServerConfig:
|
||||
self.__config, "cxb", "server", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def use_https(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "cxb", "server", "use_https", default=True
|
||||
)
|
||||
|
||||
|
||||
class CxbConfig(dict):
|
||||
|
@ -1,4 +1,6 @@
|
||||
from twisted.web.http import Request
|
||||
from starlette.requests import Request
|
||||
from starlette.routing import Route
|
||||
from starlette.responses import Response, JSONResponse
|
||||
import traceback
|
||||
import sys
|
||||
import yaml
|
||||
@ -11,7 +13,7 @@ from typing import Dict, Tuple, List
|
||||
from os import path
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.title import BaseServlet
|
||||
from core.title import BaseServlet, JSONResponseNoASCII
|
||||
from core.utils import Utils
|
||||
from .config import CxbConfig
|
||||
from .const import CxbConstants
|
||||
@ -62,6 +64,14 @@ class CxbServlet(BaseServlet):
|
||||
CxbRevSunriseS2(core_cfg, self.game_cfg),
|
||||
]
|
||||
|
||||
def get_routes(self) -> List[Route]:
|
||||
return [
|
||||
Route("/data", self.handle_data, methods=['POST']),
|
||||
Route("/action", self.handle_action, methods=['POST']),
|
||||
Route("/v2/action", self.handle_action, methods=['POST']),
|
||||
Route("/auth", self.handle_auth, methods=['POST']),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def is_game_enabled(cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str) -> bool:
|
||||
game_cfg = CxbConfig()
|
||||
@ -79,35 +89,22 @@ class CxbServlet(BaseServlet):
|
||||
title_port_int = Utils.get_title_port(self.core_cfg)
|
||||
title_port_ssl_int = Utils.get_title_port_ssl(self.core_cfg)
|
||||
|
||||
proto = "https" if title_port_ssl_int != 443 else "http"
|
||||
proto = "https" if self.game_cfg.server.use_https else "http"
|
||||
|
||||
if proto == "https":
|
||||
t_port = f":{title_port_ssl_int}" if title_port_ssl_int and not self.core_cfg.server.is_using_proxy else ""
|
||||
t_port = f":{title_port_ssl_int}" if title_port_ssl_int != 443 else ""
|
||||
|
||||
else:
|
||||
t_port = f":{title_port_int}" if title_port_int and not self.core_cfg.server.is_using_proxy else ""
|
||||
t_port = f":{title_port_int}" if title_port_int != 80 else ""
|
||||
|
||||
return (
|
||||
f"{proto}://{self.core_cfg.title.hostname}{t_port}",
|
||||
"",
|
||||
)
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[
|
||||
("handle_data", "/data", {}),
|
||||
("handle_action", "/action", {}),
|
||||
("handle_action", "/v2/action", {}),
|
||||
("handle_auth", "/auth", {}),
|
||||
]
|
||||
)
|
||||
|
||||
def preprocess(self, req: Request) -> Dict:
|
||||
try:
|
||||
req_bytes = req.content.getvalue()
|
||||
except:
|
||||
req_bytes = req.content.read() # Can we just use this one?
|
||||
|
||||
async def preprocess(self, req: Request) -> Dict:
|
||||
req_bytes = await req.body()
|
||||
|
||||
try:
|
||||
req_json: Dict = json.loads(req_bytes)
|
||||
@ -126,8 +123,8 @@ class CxbServlet(BaseServlet):
|
||||
|
||||
return req_json
|
||||
|
||||
def handle_data(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_json = self.preprocess(request)
|
||||
async def handle_data(self, request: Request) -> bytes:
|
||||
req_json = await self.preprocess(request)
|
||||
func_to_find = "handle_data_"
|
||||
version_string = "Base"
|
||||
internal_ver = 0
|
||||
@ -135,7 +132,7 @@ class CxbServlet(BaseServlet):
|
||||
|
||||
if req_json == {}:
|
||||
self.logger.warning(f"Empty json request to /data")
|
||||
return b""
|
||||
return Response()
|
||||
|
||||
subcmd = list(req_json.keys())[0]
|
||||
if subcmd == "dldate":
|
||||
@ -145,14 +142,14 @@ class CxbServlet(BaseServlet):
|
||||
or "filetype" not in req_json["dldate"]
|
||||
):
|
||||
self.logger.warning(f"Malformed dldate request: {req_json}")
|
||||
return b""
|
||||
return Response()
|
||||
|
||||
filetype = req_json["dldate"]["filetype"]
|
||||
filetype_split = filetype.split("/")
|
||||
|
||||
if len(filetype_split) < 2 or not filetype_split[0].isnumeric():
|
||||
self.logger.warning(f"Malformed dldate request: {req_json}")
|
||||
return b""
|
||||
return Response()
|
||||
|
||||
version = int(filetype_split[0])
|
||||
filename = filetype_split[len(filetype_split) - 1]
|
||||
@ -184,7 +181,7 @@ class CxbServlet(BaseServlet):
|
||||
|
||||
if not hasattr(self.versions[internal_ver], func_to_find):
|
||||
self.logger.warn(f"{version_string} has no handler for filetype {filetype} / {func_to_find}")
|
||||
return({"data":""})
|
||||
return JSONResponse({"data":""})
|
||||
|
||||
self.logger.info(f"{version_string} request for filetype {filetype}")
|
||||
self.logger.debug(req_json)
|
||||
@ -192,7 +189,7 @@ class CxbServlet(BaseServlet):
|
||||
handler = getattr(self.versions[internal_ver], func_to_find)
|
||||
|
||||
try:
|
||||
resp = handler(req_json)
|
||||
resp = await handler(req_json)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling request for file {filetype} - {e}")
|
||||
@ -201,19 +198,19 @@ class CxbServlet(BaseServlet):
|
||||
traceback.print_exception(tp, val, tb, limit=1)
|
||||
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
||||
traceback.print_exception(tp, val, tb, limit=1, file=f)
|
||||
return ""
|
||||
return Response()
|
||||
|
||||
self.logger.debug(f"{version_string} Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
||||
return JSONResponseNoASCII(resp)
|
||||
|
||||
def handle_action(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_json = self.preprocess(request)
|
||||
async def handle_action(self, request: Request) -> bytes:
|
||||
req_json = await self.preprocess(request)
|
||||
subcmd = list(req_json.keys())[0]
|
||||
func_to_find = f"handle_action_{subcmd}_request"
|
||||
|
||||
if not hasattr(self.versions[0], func_to_find):
|
||||
self.logger.warn(f"No handler for action {subcmd} request")
|
||||
return ""
|
||||
return Response()
|
||||
|
||||
self.logger.info(f"Action {subcmd} Request")
|
||||
self.logger.debug(req_json)
|
||||
@ -221,7 +218,7 @@ class CxbServlet(BaseServlet):
|
||||
handler = getattr(self.versions[0], func_to_find)
|
||||
|
||||
try:
|
||||
resp = handler(req_json)
|
||||
resp = await handler(req_json)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling action {subcmd} request - {e}")
|
||||
@ -230,19 +227,19 @@ class CxbServlet(BaseServlet):
|
||||
traceback.print_exception(tp, val, tb, limit=1)
|
||||
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
||||
traceback.print_exception(tp, val, tb, limit=1, file=f)
|
||||
return ""
|
||||
return Response()
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
||||
return JSONResponseNoASCII(resp)
|
||||
|
||||
def handle_auth(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_json = self.preprocess(request)
|
||||
async def handle_auth(self, request: Request) -> bytes:
|
||||
req_json = await self.preprocess(request)
|
||||
subcmd = list(req_json.keys())[0]
|
||||
func_to_find = f"handle_auth_{subcmd}_request"
|
||||
|
||||
if not hasattr(self.versions[0], func_to_find):
|
||||
self.logger.warn(f"No handler for auth {subcmd} request")
|
||||
return ""
|
||||
return Response()
|
||||
|
||||
self.logger.info(f"Action {subcmd} Request")
|
||||
self.logger.debug(req_json)
|
||||
@ -250,7 +247,7 @@ class CxbServlet(BaseServlet):
|
||||
handler = getattr(self.versions[0], func_to_find)
|
||||
|
||||
try:
|
||||
resp = handler(req_json)
|
||||
resp = await handler(req_json)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling auth {subcmd} request - {e}")
|
||||
@ -259,7 +256,7 @@ class CxbServlet(BaseServlet):
|
||||
traceback.print_exception(tp, val, tb, limit=1)
|
||||
with open("{0}/{1}.log".format(self.core_cfg.server.log_dir, "cxb"), "a") as f:
|
||||
traceback.print_exception(tp, val, tb, limit=1, file=f)
|
||||
return ""
|
||||
return Response()
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
||||
return JSONResponseNoASCII(resp)
|
||||
|
@ -1,6 +1,5 @@
|
||||
from typing import Optional, Dict, List
|
||||
from os import walk, path
|
||||
import urllib
|
||||
from typing import Optional
|
||||
from os import path
|
||||
import csv
|
||||
|
||||
from read import BaseReader
|
||||
@ -8,7 +7,6 @@ from core.config import CoreConfig
|
||||
from titles.cxb.database import CxbData
|
||||
from titles.cxb.const import CxbConstants
|
||||
|
||||
|
||||
class CxbReader(BaseReader):
|
||||
def __init__(
|
||||
self,
|
||||
@ -29,17 +27,14 @@ class CxbReader(BaseReader):
|
||||
self.logger.error(f"Invalid project cxb version {version}")
|
||||
exit(1)
|
||||
|
||||
def read(self) -> None:
|
||||
pull_bin_ram = True
|
||||
async def read(self) -> None:
|
||||
if path.exists(self.bin_dir):
|
||||
await self.read_csv(self.bin_dir)
|
||||
|
||||
else:
|
||||
self.logger.warn(f"{self.bin_dir} does not exist, nothing to import")
|
||||
|
||||
if not path.exists(f"{self.bin_dir}"):
|
||||
self.logger.warning(f"Couldn't find csv file in {self.bin_dir}, skipping")
|
||||
pull_bin_ram = False
|
||||
|
||||
if pull_bin_ram:
|
||||
self.read_csv(f"{self.bin_dir}")
|
||||
|
||||
def read_csv(self, bin_dir: str) -> None:
|
||||
async def read_csv(self, bin_dir: str) -> None:
|
||||
self.logger.info(f"Read csv from {bin_dir}")
|
||||
|
||||
try:
|
||||
@ -55,7 +50,7 @@ class CxbReader(BaseReader):
|
||||
|
||||
if not "N/A" in row["standard"]:
|
||||
self.logger.info(f"Added song {song_id} chart 0")
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
index,
|
||||
@ -71,7 +66,7 @@ class CxbReader(BaseReader):
|
||||
)
|
||||
if not "N/A" in row["hard"]:
|
||||
self.logger.info(f"Added song {song_id} chart 1")
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
index,
|
||||
@ -83,7 +78,7 @@ class CxbReader(BaseReader):
|
||||
)
|
||||
if not "N/A" in row["master"]:
|
||||
self.logger.info(f"Added song {song_id} chart 2")
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
index,
|
||||
@ -97,7 +92,7 @@ class CxbReader(BaseReader):
|
||||
)
|
||||
if not "N/A" in row["unlimited"]:
|
||||
self.logger.info(f"Added song {song_id} chart 3")
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
index,
|
||||
@ -113,7 +108,7 @@ class CxbReader(BaseReader):
|
||||
)
|
||||
if not "N/A" in row["easy"]:
|
||||
self.logger.info(f"Added song {song_id} chart 4")
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
index,
|
||||
|
@ -17,15 +17,15 @@ class CxbRev(CxbBase):
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = CxbConstants.VER_CROSSBEATS_REV
|
||||
|
||||
def handle_data_path_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_path_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_putlog_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_putlog_request(self, data: Dict) -> Dict:
|
||||
if data["putlog"]["type"] == "ResultLog":
|
||||
score_data = json.loads(data["putlog"]["data"])
|
||||
userid = score_data["usid"]
|
||||
|
||||
self.data.score.put_playlog(
|
||||
await self.data.score.put_playlog(
|
||||
userid,
|
||||
score_data["mcode"],
|
||||
score_data["difficulty"],
|
||||
@ -45,7 +45,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": True}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
@ -56,7 +56,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_icon_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_item_list_icon_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListIcon\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Item/ItemArchiveList_Icon.csv", encoding="utf-8"
|
||||
@ -67,7 +67,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_skin_notes_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_item_list_skin_notes_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinNotes\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Item/ItemArchiveList_SkinNotes.csv", encoding="utf-8"
|
||||
@ -78,7 +78,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_skin_effect_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_item_list_skin_effect_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinEffect\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Item/ItemArchiveList_SkinEffect.csv", encoding="utf-8"
|
||||
@ -89,7 +89,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_skin_bg_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_item_list_skin_bg_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListSkinBg\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Item/ItemArchiveList_SkinBg.csv", encoding="utf-8"
|
||||
@ -100,7 +100,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_title_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_item_list_title_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ItemListTitle\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Item/ItemList_Title.csv", encoding="shift-jis"
|
||||
@ -111,7 +111,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_music_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_music_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListMusic\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Shop/ShopList_Music.csv", encoding="shift-jis"
|
||||
@ -122,7 +122,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_icon_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_icon_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListIcon\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Shop/ShopList_Icon.csv", encoding="shift-jis"
|
||||
@ -133,7 +133,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_title_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_title_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListTitle\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Shop/ShopList_Title.csv", encoding="shift-jis"
|
||||
@ -143,17 +143,17 @@ class CxbRev(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_shop_list_skin_hud_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_skin_hud_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_shop_list_skin_arrow_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_skin_arrow_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_shop_list_skin_hit_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_skin_hit_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_sale_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_sale_request(self, data: Dict) -> Dict:
|
||||
ret_str = "\r\n#ShopListSale\r\n"
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Shop/ShopList_Sale.csv", encoding="shift-jis"
|
||||
@ -163,11 +163,11 @@ class CxbRev(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_exxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_exxxxx_request(self, data: Dict) -> Dict:
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(
|
||||
@ -178,14 +178,14 @@ class CxbRev(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss/NewsList.csv", encoding="UTF-8") as news:
|
||||
lines = news.readlines()
|
||||
@ -193,11 +193,11 @@ class CxbRev(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_tips_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_tips_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss/License_Offline.csv", encoding="UTF-8") as lic:
|
||||
lines = lic.readlines()
|
||||
@ -206,7 +206,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Course/CourseList.csv", encoding="UTF-8"
|
||||
@ -217,7 +217,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
||||
# Removed the CSVs since the format isnt quite right
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
@ -230,7 +230,7 @@ class CxbRev(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/data/rss/MissionList.csv", encoding="shift-jis"
|
||||
@ -240,14 +240,14 @@ class CxbRev(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Event/EventArchiveList.csv", encoding="shift-jis"
|
||||
@ -257,39 +257,39 @@ class CxbRev(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_event_music_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_music_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_mission_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_mission_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_achievement_single_high_score_list_request(
|
||||
async def handle_data_event_achievement_single_high_score_list_request(
|
||||
self, data: Dict
|
||||
) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_achievement_single_accumulation_request(
|
||||
async def handle_data_event_achievement_single_accumulation_request(
|
||||
self, data: Dict
|
||||
) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_ranking_high_score_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_ranking_high_score_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_ranking_accumulation_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_ranking_accumulation_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_ranking_stamp_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_ranking_stamp_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_ranking_store_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_ranking_store_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_ranking_area_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_ranking_area_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/data/rss/Event/EventStampList.csv", encoding="shift-jis"
|
||||
@ -299,8 +299,8 @@ class CxbRev(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
||||
return {"data": "1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"}
|
||||
|
||||
def handle_data_server_state_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_server_state_request(self, data: Dict) -> Dict:
|
||||
return {"data": True}
|
||||
|
@ -17,11 +17,11 @@ class CxbRevSunriseS1(CxbBase):
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S1
|
||||
|
||||
def handle_data_path_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_path_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss1/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
@ -32,7 +32,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_detail_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_item_list_detail_request(self, data: Dict) -> Dict:
|
||||
# ItemListIcon load
|
||||
ret_str = "#ItemListIcon\r\n"
|
||||
with open(
|
||||
@ -54,7 +54,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_detail_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_detail_request(self, data: Dict) -> Dict:
|
||||
# ShopListIcon load
|
||||
ret_str = "#ShopListIcon\r\n"
|
||||
with open(
|
||||
@ -119,26 +119,26 @@ class CxbRevSunriseS1(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_exxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_exxxxx_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_one_more_extra_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_one_more_extra_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_oexxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_oexxxx_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss1/NewsList.csv", encoding="UTF-8") as news:
|
||||
lines = news.readlines()
|
||||
@ -146,14 +146,14 @@ class CxbRevSunriseS1(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_tips_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_tips_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_release_info_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_release_info_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_random_music_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_random_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss1/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
@ -167,7 +167,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss1/License.csv", encoding="UTF-8") as licenses:
|
||||
lines = licenses.readlines()
|
||||
@ -176,7 +176,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/data/rss1/Course/CourseList.csv", encoding="UTF-8"
|
||||
@ -187,7 +187,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(
|
||||
@ -198,16 +198,16 @@ class CxbRevSunriseS1(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_partner_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_partner_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
# Lord forgive me for the sins I am about to commit
|
||||
for i in range(0, 10):
|
||||
@ -226,7 +226,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_partnerxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_partnerxxxx_request(self, data: Dict) -> Dict:
|
||||
partner_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = f"{partner_num},,{partner_num},1,10000,\r\n"
|
||||
with open(r"titles/cxb/data/rss1/Partner0000.csv") as partner:
|
||||
@ -235,13 +235,13 @@ class CxbRevSunriseS1(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_server_state_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_server_state_request(self, data: Dict) -> Dict:
|
||||
return {"data": True}
|
||||
|
||||
def handle_data_settings_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_settings_request(self, data: Dict) -> Dict:
|
||||
return {"data": "2,\r\n"}
|
||||
|
||||
def handle_data_story_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_story_list_request(self, data: Dict) -> Dict:
|
||||
# story id, story name, game version, start time, end time, course arc, unlock flag, song mcode for menu
|
||||
ret_str = "\r\n"
|
||||
ret_str += (
|
||||
@ -253,23 +253,23 @@ class CxbRevSunriseS1(CxbBase):
|
||||
ret_str += f"st0002,REMNANT,10104,1502127790,4096483201,Cs1000,-1,overcl,\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_stxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_stxxxx_request(self, data: Dict) -> Dict:
|
||||
story_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
for i in range(1, 11):
|
||||
ret_str += f"{i},st000{story_num}_{i-1},,,,,,,,,,,,,,,,1,,-1,1,\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": "Cs1032,1,1,1,1,1,1,1,1,1,1,\r\n"}
|
||||
|
||||
def handle_data_premium_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_premium_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": "1,,,,10,,,,,99,,,,,,,,,100,,\r\n"}
|
||||
|
||||
def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_detail_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_detail_list_request(self, data: Dict) -> Dict:
|
||||
event_id = data["dldate"]["filetype"].split("/")[2]
|
||||
if "EventStampMapListCs1002" in event_id:
|
||||
return {"data": "1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"}
|
||||
@ -278,7 +278,7 @@ class CxbRevSunriseS1(CxbBase):
|
||||
else:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
||||
event_id = data["dldate"]["filetype"].split("/")[2]
|
||||
if "EventStampMapListCs1002" in event_id:
|
||||
return {"data": "1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"}
|
||||
|
@ -17,11 +17,11 @@ class CxbRevSunriseS2(CxbBase):
|
||||
super().__init__(cfg, game_cfg)
|
||||
self.version = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S2_OMNI
|
||||
|
||||
def handle_data_path_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_path_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_music_list_request(self, data: Dict) -> Dict:
|
||||
version = data["dldate"]["filetype"].split("/")[0]
|
||||
ret_str = ""
|
||||
|
||||
@ -41,7 +41,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_item_list_detail_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_item_list_detail_request(self, data: Dict) -> Dict:
|
||||
# ItemListIcon load
|
||||
ret_str = "#ItemListIcon\r\n"
|
||||
with open(
|
||||
@ -63,7 +63,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_shop_list_detail_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_shop_list_detail_request(self, data: Dict) -> Dict:
|
||||
# ShopListIcon load
|
||||
ret_str = "#ShopListIcon\r\n"
|
||||
with open(
|
||||
@ -128,7 +128,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
||||
ret_str=""
|
||||
with open(r"titles/cxb/data/rss2/ExtraStageList.csv") as extra:
|
||||
lines = extra.readlines()
|
||||
@ -136,19 +136,19 @@ class CxbRevSunriseS2(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return({"data":ret_str})
|
||||
|
||||
def handle_data_exxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_exxxxx_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_one_more_extra_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_one_more_extra_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_oexxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_oexxxx_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
||||
ret_str=""
|
||||
with open(r"titles/cxb/data/rss2/FreeCoupon.csv") as coupon:
|
||||
lines = coupon.readlines()
|
||||
@ -157,7 +157,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
return({"data":ret_str})
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_news_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss2/NewsList.csv", encoding="UTF-8") as news:
|
||||
lines = news.readlines()
|
||||
@ -165,14 +165,14 @@ class CxbRevSunriseS2(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_tips_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_tips_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_release_info_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_release_info_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_random_music_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_random_music_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss2/MusicArchiveList.csv") as music:
|
||||
lines = music.readlines()
|
||||
@ -186,7 +186,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_license_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(r"titles/cxb/data/rss2/License.csv", encoding="UTF-8") as licenses:
|
||||
lines = licenses.readlines()
|
||||
@ -195,7 +195,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_course_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
with open(
|
||||
r"titles/cxb/data/rss2/Course/CourseList.csv", encoding="UTF-8"
|
||||
@ -206,7 +206,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
||||
extra_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
with open(
|
||||
@ -217,16 +217,16 @@ class CxbRevSunriseS2(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_partner_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_partner_list_request(self, data: Dict) -> Dict:
|
||||
ret_str = ""
|
||||
# Lord forgive me for the sins I am about to commit
|
||||
for i in range(0, 10):
|
||||
@ -245,7 +245,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
return {"data": ret_str}
|
||||
|
||||
@cached(lifetime=86400)
|
||||
def handle_data_partnerxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_partnerxxxx_request(self, data: Dict) -> Dict:
|
||||
partner_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = f"{partner_num},,{partner_num},1,10000,\r\n"
|
||||
with open(r"titles/cxb/data/rss2/Partner0000.csv") as partner:
|
||||
@ -254,13 +254,13 @@ class CxbRevSunriseS2(CxbBase):
|
||||
ret_str += f"{line[:-1]}\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_server_state_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_server_state_request(self, data: Dict) -> Dict:
|
||||
return {"data": True}
|
||||
|
||||
def handle_data_settings_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_settings_request(self, data: Dict) -> Dict:
|
||||
return {"data": "2,\r\n"}
|
||||
|
||||
def handle_data_story_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_story_list_request(self, data: Dict) -> Dict:
|
||||
# story id, story name, game version, start time, end time, course arc, unlock flag, song mcode for menu
|
||||
ret_str = "\r\n"
|
||||
ret_str += (
|
||||
@ -272,7 +272,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
ret_str += f"st0002,REMNANT,10104,1502127790,4096483201,Cs1000,-1,overcl,\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_stxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_stxxxx_request(self, data: Dict) -> Dict:
|
||||
story_num = int(data["dldate"]["filetype"][-4:])
|
||||
ret_str = ""
|
||||
# Each stories appears to have 10 pieces based on the wiki but as on how they are set.... no clue
|
||||
@ -280,18 +280,18 @@ class CxbRevSunriseS2(CxbBase):
|
||||
ret_str += f"{i},st000{story_num}_{i-1},,,,,,,,,,,,,,,,1,,-1,1,\r\n"
|
||||
return {"data": ret_str}
|
||||
|
||||
def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": "Cs1002,1,1,1,1,1,1,1,1,1,1,\r\n"}
|
||||
|
||||
def handle_data_premium_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_premium_list_request(self, data: Dict) -> Dict:
|
||||
return {"data": "1,,,,10,,,,,99,,,,,,,,,100,,\r\n"}
|
||||
|
||||
def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_list_request(self, data: Dict) -> Dict:
|
||||
return {
|
||||
"data": "Cs4001,0,10000,1601510400,1604188799,1,nv2006,1,\r\nCs4005,0,10000,1609459200,1615766399,1,nv2006,1,\r\n"
|
||||
}
|
||||
|
||||
def handle_data_event_detail_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_detail_list_request(self, data: Dict) -> Dict:
|
||||
event_id = data["dldate"]["filetype"].split("/")[2]
|
||||
if "Cs4001" in event_id:
|
||||
return {
|
||||
@ -308,7 +308,7 @@ class CxbRevSunriseS2(CxbBase):
|
||||
else:
|
||||
return {"data": ""}
|
||||
|
||||
def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
||||
async def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
||||
event_id = data["dldate"]["filetype"].split("/")[2]
|
||||
if "EventStampMapListCs1002" in event_id:
|
||||
return {"data": "1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"}
|
||||
|
@ -19,12 +19,12 @@ energy = Table(
|
||||
|
||||
|
||||
class CxbItemData(BaseData):
|
||||
def put_energy(self, user_id: int, rev_energy: int) -> Optional[int]:
|
||||
async def put_energy(self, user_id: int, rev_energy: int) -> Optional[int]:
|
||||
sql = insert(energy).values(user=user_id, energy=rev_energy)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(energy=rev_energy)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} failed to insert item! user: {user_id}, energy: {rev_energy}"
|
||||
@ -33,10 +33,10 @@ class CxbItemData(BaseData):
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
def get_energy(self, user_id: int) -> Optional[Dict]:
|
||||
async def get_energy(self, user_id: int) -> Optional[Dict]:
|
||||
sql = energy.select(and_(energy.c.user == user_id))
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
@ -21,7 +21,7 @@ profile = Table(
|
||||
|
||||
|
||||
class CxbProfileData(BaseData):
|
||||
def put_profile(
|
||||
async def put_profile(
|
||||
self, user_id: int, version: int, index: int, data: JSON
|
||||
) -> Optional[int]:
|
||||
sql = insert(profile).values(
|
||||
@ -30,7 +30,7 @@ class CxbProfileData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(index=index, data=data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} failed to update! user: {user_id}, index: {index}, data: {data}"
|
||||
@ -39,7 +39,7 @@ class CxbProfileData(BaseData):
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
def get_profile(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
async def get_profile(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
"""
|
||||
Given a game version and either a profile or aime id, return the profile
|
||||
"""
|
||||
@ -47,12 +47,12 @@ class CxbProfileData(BaseData):
|
||||
and_(profile.c.version == version, profile.c.user == aime_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_profile_index(
|
||||
async def get_profile_index(
|
||||
self, index: int, aime_id: int = None, version: int = None
|
||||
) -> Optional[Dict]:
|
||||
"""
|
||||
@ -72,7 +72,7 @@ class CxbProfileData(BaseData):
|
||||
)
|
||||
return None
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
@ -58,7 +58,7 @@ ranking = Table(
|
||||
|
||||
|
||||
class CxbScoreData(BaseData):
|
||||
def put_best_score(
|
||||
async def put_best_score(
|
||||
self,
|
||||
user_id: int,
|
||||
song_mcode: str,
|
||||
@ -79,7 +79,7 @@ class CxbScoreData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(data=sql.inserted.data)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} failed to insert best score! profile: {user_id}, song: {song_mcode}, data: {data}"
|
||||
@ -88,7 +88,7 @@ class CxbScoreData(BaseData):
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
def put_playlog(
|
||||
async def put_playlog(
|
||||
self,
|
||||
user_id: int,
|
||||
song_mcode: str,
|
||||
@ -125,7 +125,7 @@ class CxbScoreData(BaseData):
|
||||
combo=combo,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} failed to insert playlog! profile: {user_id}, song: {song_mcode}, chart: {chart_id}"
|
||||
@ -134,7 +134,7 @@ class CxbScoreData(BaseData):
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
def put_ranking(
|
||||
async def put_ranking(
|
||||
self, user_id: int, rev_id: int, song_id: int, score: int, clear: int
|
||||
) -> Optional[int]:
|
||||
"""
|
||||
@ -151,7 +151,7 @@ class CxbScoreData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(score=score)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} failed to insert ranking log! profile: {user_id}, score: {score}, clear: {clear}"
|
||||
@ -160,28 +160,28 @@ class CxbScoreData(BaseData):
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
def get_best_score(self, user_id: int, song_mcode: int) -> Optional[Dict]:
|
||||
async def get_best_score(self, user_id: int, song_mcode: int) -> Optional[Dict]:
|
||||
sql = score.select(
|
||||
and_(score.c.user == user_id, score.c.song_mcode == song_mcode)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_best_scores(self, user_id: int) -> Optional[Dict]:
|
||||
async def get_best_scores(self, user_id: int) -> Optional[Dict]:
|
||||
sql = score.select(score.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_best_rankings(self, user_id: int) -> Optional[List[Dict]]:
|
||||
async def get_best_rankings(self, user_id: int) -> Optional[List[Dict]]:
|
||||
sql = ranking.select(ranking.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
@ -29,7 +29,7 @@ music = Table(
|
||||
|
||||
|
||||
class CxbStaticData(BaseData):
|
||||
def put_music(
|
||||
async def put_music(
|
||||
self,
|
||||
version: int,
|
||||
mcode: str,
|
||||
@ -55,12 +55,12 @@ class CxbStaticData(BaseData):
|
||||
title=title, artist=artist, category=category, level=level
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_music(
|
||||
async def get_music(
|
||||
self, version: int, song_id: Optional[int] = None
|
||||
) -> Optional[List[Row]]:
|
||||
if song_id is None:
|
||||
@ -73,12 +73,12 @@ class CxbStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_music_chart(
|
||||
async def get_music_chart(
|
||||
self, version: int, song_id: int, chart_id: int
|
||||
) -> Optional[List[Row]]:
|
||||
sql = select(music).where(
|
||||
@ -89,7 +89,7 @@ class CxbStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
@ -7,4 +7,3 @@ index = DivaServlet
|
||||
database = DivaData
|
||||
reader = DivaReader
|
||||
game_codes = [DivaConstants.GAME_CODE]
|
||||
current_schema_version = 6
|
||||
|
@ -1,8 +1,7 @@
|
||||
import datetime
|
||||
from typing import Any, List, Dict
|
||||
from typing import Dict
|
||||
import logging
|
||||
import json
|
||||
import urllib
|
||||
import urllib.parse
|
||||
from threading import Thread
|
||||
|
||||
from core.config import CoreConfig
|
||||
@ -24,13 +23,13 @@ class DivaBase:
|
||||
dt = datetime.datetime.now()
|
||||
self.time_lut = urllib.parse.quote(dt.strftime("%Y-%m-%d %H:%M:%S:16.0"))
|
||||
|
||||
def handle_test_request(self, data: Dict) -> Dict:
|
||||
async def handle_test_request(self, data: Dict) -> Dict:
|
||||
return ""
|
||||
|
||||
def handle_game_init_request(self, data: Dict) -> Dict:
|
||||
async def handle_game_init_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_attend_request(self, data: Dict) -> Dict:
|
||||
async def handle_attend_request(self, data: Dict) -> Dict:
|
||||
encoded = "&"
|
||||
params = {
|
||||
"atnd_prm1": "0,1,1,0,0,0,1,0,100,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1",
|
||||
@ -44,7 +43,7 @@ class DivaBase:
|
||||
|
||||
return encoded
|
||||
|
||||
def handle_ping_request(self, data: Dict) -> Dict:
|
||||
async def handle_ping_request(self, data: Dict) -> Dict:
|
||||
encoded = "&"
|
||||
params = {
|
||||
"ping_b_msg": f"Welcome to {self.core_cfg.server.name} network!",
|
||||
@ -89,7 +88,7 @@ class DivaBase:
|
||||
|
||||
return encoded
|
||||
|
||||
def handle_pv_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_pv_list_request(self, data: Dict) -> Dict:
|
||||
pvlist = ""
|
||||
with open(r"titles/diva/data/PvList0.dat", encoding="utf-8") as shop:
|
||||
lines = shop.readlines()
|
||||
@ -126,10 +125,10 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_shop_catalog_request(self, data: Dict) -> Dict:
|
||||
async def handle_shop_catalog_request(self, data: Dict) -> Dict:
|
||||
catalog = ""
|
||||
|
||||
shopList = self.data.static.get_enabled_shops(self.version)
|
||||
shopList = await self.data.static.get_enabled_shops(self.version)
|
||||
if not shopList:
|
||||
with open(r"titles/diva/data/ShopCatalog.dat", encoding="utf-8") as shop:
|
||||
lines = shop.readlines()
|
||||
@ -164,9 +163,9 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_buy_module_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
module = self.data.static.get_enabled_shop(self.version, int(data["mdl_id"]))
|
||||
async def handle_buy_module_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
module = await self.data.static.get_enabled_shop(self.version, int(data["mdl_id"]))
|
||||
|
||||
# make sure module is available to purchase
|
||||
if not module:
|
||||
@ -178,11 +177,11 @@ class DivaBase:
|
||||
|
||||
new_vcld_pts = profile["vcld_pts"] - int(data["mdl_price"])
|
||||
|
||||
self.data.profile.update_profile(profile["user"], vcld_pts=new_vcld_pts)
|
||||
self.data.module.put_module(data["pd_id"], self.version, data["mdl_id"])
|
||||
await self.data.profile.update_profile(profile["user"], vcld_pts=new_vcld_pts)
|
||||
await self.data.module.put_module(data["pd_id"], self.version, data["mdl_id"])
|
||||
|
||||
# generate the mdl_have string
|
||||
mdl_have = self.data.module.get_modules_have_string(data["pd_id"], self.version)
|
||||
mdl_have = await self.data.module.get_modules_have_string(data["pd_id"], self.version)
|
||||
|
||||
response = "&shp_rslt=1"
|
||||
response += f"&mdl_id={data['mdl_id']}"
|
||||
@ -191,10 +190,10 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_cstmz_itm_ctlg_request(self, data: Dict) -> Dict:
|
||||
async def handle_cstmz_itm_ctlg_request(self, data: Dict) -> Dict:
|
||||
catalog = ""
|
||||
|
||||
itemList = self.data.static.get_enabled_items(self.version)
|
||||
itemList = await self.data.static.get_enabled_items(self.version)
|
||||
if not itemList:
|
||||
with open(r"titles/diva/data/ItemCatalog.dat", encoding="utf-8") as item:
|
||||
lines = item.readlines()
|
||||
@ -229,9 +228,9 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_buy_cstmz_itm_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
item = self.data.static.get_enabled_item(
|
||||
async def handle_buy_cstmz_itm_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
item = await self.data.static.get_enabled_item(
|
||||
self.version, int(data["cstmz_itm_id"])
|
||||
)
|
||||
|
||||
@ -246,14 +245,14 @@ class DivaBase:
|
||||
new_vcld_pts = profile["vcld_pts"] - int(data["cstmz_itm_price"])
|
||||
|
||||
# save new Vocaloid Points balance
|
||||
self.data.profile.update_profile(profile["user"], vcld_pts=new_vcld_pts)
|
||||
await self.data.profile.update_profile(profile["user"], vcld_pts=new_vcld_pts)
|
||||
|
||||
self.data.customize.put_customize_item(
|
||||
await self.data.customize.put_customize_item(
|
||||
data["pd_id"], self.version, data["cstmz_itm_id"]
|
||||
)
|
||||
|
||||
# generate the cstmz_itm_have string
|
||||
cstmz_itm_have = self.data.customize.get_customize_items_have_string(
|
||||
cstmz_itm_have = await self.data.customize.get_customize_items_have_string(
|
||||
data["pd_id"], self.version
|
||||
)
|
||||
|
||||
@ -264,7 +263,7 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_festa_info_request(self, data: Dict) -> Dict:
|
||||
async def handle_festa_info_request(self, data: Dict) -> Dict:
|
||||
encoded = "&"
|
||||
params = {
|
||||
"fi_id": "1,2",
|
||||
@ -287,7 +286,7 @@ class DivaBase:
|
||||
|
||||
return encoded
|
||||
|
||||
def handle_contest_info_request(self, data: Dict) -> Dict:
|
||||
async def handle_contest_info_request(self, data: Dict) -> Dict:
|
||||
response = ""
|
||||
|
||||
response += f"&ci_lut={self.time_lut}"
|
||||
@ -295,10 +294,10 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_qst_inf_request(self, data: Dict) -> Dict:
|
||||
async def handle_qst_inf_request(self, data: Dict) -> Dict:
|
||||
quest = ""
|
||||
|
||||
questList = self.data.static.get_enabled_quests(self.version)
|
||||
questList = await self.data.static.get_enabled_quests(self.version)
|
||||
if not questList:
|
||||
with open(r"titles/diva/data/QuestInfo.dat", encoding="utf-8") as shop:
|
||||
lines = shop.readlines()
|
||||
@ -345,45 +344,45 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_nv_ranking_request(self, data: Dict) -> Dict:
|
||||
async def handle_nv_ranking_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_ps_ranking_request(self, data: Dict) -> Dict:
|
||||
async def handle_ps_ranking_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_ng_word_request(self, data: Dict) -> Dict:
|
||||
async def handle_ng_word_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_rmt_wp_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_rmt_wp_list_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_pv_def_chr_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_pv_def_chr_list_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_pv_ng_mdl_list_request(self, data: Dict) -> Dict:
|
||||
async def handle_pv_ng_mdl_list_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_cstmz_itm_ng_mdl_lst_request(self, data: Dict) -> Dict:
|
||||
async def handle_cstmz_itm_ng_mdl_lst_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_banner_info_request(self, data: Dict) -> Dict:
|
||||
async def handle_banner_info_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_banner_data_request(self, data: Dict) -> Dict:
|
||||
async def handle_banner_data_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_cm_ply_info_request(self, data: Dict) -> Dict:
|
||||
async def handle_cm_ply_info_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_pstd_h_ctrl_request(self, data: Dict) -> Dict:
|
||||
async def handle_pstd_h_ctrl_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_pstd_item_ng_lst_request(self, data: Dict) -> Dict:
|
||||
async def handle_pstd_item_ng_lst_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_pre_start_request(self, data: Dict) -> str:
|
||||
profile = self.data.profile.get_profile(data["aime_id"], self.version)
|
||||
profile_shop = self.data.item.get_shop(data["aime_id"], self.version)
|
||||
async def handle_pre_start_request(self, data: Dict) -> str:
|
||||
profile = await self.data.profile.get_profile(data["aime_id"], self.version)
|
||||
profile_shop = await self.data.item.get_shop(data["aime_id"], self.version)
|
||||
|
||||
if profile is None:
|
||||
return f"&ps_result=-3"
|
||||
@ -422,29 +421,29 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_registration_request(self, data: Dict) -> Dict:
|
||||
self.data.profile.create_profile(
|
||||
async def handle_registration_request(self, data: Dict) -> Dict:
|
||||
await self.data.profile.create_profile(
|
||||
self.version, data["aime_id"], data["player_name"]
|
||||
)
|
||||
return f"&cd_adm_result=1&pd_id={data['aime_id']}"
|
||||
|
||||
def handle_start_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
profile_shop = self.data.item.get_shop(data["pd_id"], self.version)
|
||||
async def handle_start_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
profile_shop = await self.data.item.get_shop(data["pd_id"], self.version)
|
||||
if profile is None:
|
||||
return
|
||||
|
||||
mdl_have = "F" * 250
|
||||
# generate the mdl_have string if "unlock_all_modules" is disabled
|
||||
if not self.game_config.mods.unlock_all_modules:
|
||||
mdl_have = self.data.module.get_modules_have_string(
|
||||
mdl_have = await self.data.module.get_modules_have_string(
|
||||
data["pd_id"], self.version
|
||||
)
|
||||
|
||||
cstmz_itm_have = "F" * 250
|
||||
# generate the cstmz_itm_have string if "unlock_all_items" is disabled
|
||||
if not self.game_config.mods.unlock_all_items:
|
||||
cstmz_itm_have = self.data.customize.get_customize_items_have_string(
|
||||
cstmz_itm_have = await self.data.customize.get_customize_items_have_string(
|
||||
data["pd_id"], self.version
|
||||
)
|
||||
|
||||
@ -525,7 +524,7 @@ class DivaBase:
|
||||
}
|
||||
|
||||
# get clear status from user scores
|
||||
pv_records = self.data.score.get_best_scores(data["pd_id"])
|
||||
pv_records = await self.data.score.get_best_scores(data["pd_id"])
|
||||
clear_status = "0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0"
|
||||
|
||||
if pv_records is not None:
|
||||
@ -583,11 +582,11 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_pd_unlock_request(self, data: Dict) -> Dict:
|
||||
async def handle_pd_unlock_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_spend_credit_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
async def handle_spend_credit_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
if profile is None:
|
||||
return
|
||||
|
||||
@ -664,30 +663,30 @@ class DivaBase:
|
||||
|
||||
return pv_result
|
||||
|
||||
def task_generateScoreData(self, data: Dict, pd_by_pv_id, song):
|
||||
async def task_generateScoreData(self, data: Dict, pd_by_pv_id, song):
|
||||
|
||||
if int(song) > 0:
|
||||
# the request do not send a edition so just perform a query best score and ranking for each edition.
|
||||
# 0=ORIGINAL, 1=EXTRA
|
||||
pd_db_song_0 = self.data.score.get_best_user_score(
|
||||
pd_db_song_0 = await self.data.score.get_best_user_score(
|
||||
data["pd_id"], int(song), data["difficulty"], edition=0
|
||||
)
|
||||
pd_db_song_1 = self.data.score.get_best_user_score(
|
||||
pd_db_song_1 = await self.data.score.get_best_user_score(
|
||||
data["pd_id"], int(song), data["difficulty"], edition=1
|
||||
)
|
||||
|
||||
pd_db_ranking_0, pd_db_ranking_1 = None, None
|
||||
if pd_db_song_0:
|
||||
pd_db_ranking_0 = self.data.score.get_global_ranking(
|
||||
pd_db_ranking_0 = await self.data.score.get_global_ranking(
|
||||
data["pd_id"], int(song), data["difficulty"], edition=0
|
||||
)
|
||||
|
||||
if pd_db_song_1:
|
||||
pd_db_ranking_1 = self.data.score.get_global_ranking(
|
||||
pd_db_ranking_1 = await self.data.score.get_global_ranking(
|
||||
data["pd_id"], int(song), data["difficulty"], edition=1
|
||||
)
|
||||
|
||||
pd_db_customize = self.data.pv_customize.get_pv_customize(
|
||||
pd_db_customize = await self.data.pv_customize.get_pv_customize(
|
||||
data["pd_id"], int(song)
|
||||
)
|
||||
|
||||
@ -705,7 +704,7 @@ class DivaBase:
|
||||
pd_by_pv_id.append(urllib.parse.quote(f"{song}***"))
|
||||
pd_by_pv_id.append(",")
|
||||
|
||||
def handle_get_pv_pd_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_pv_pd_request(self, data: Dict) -> Dict:
|
||||
song_id = data["pd_pv_id_lst"].split(",")
|
||||
pv = ""
|
||||
|
||||
@ -713,7 +712,7 @@ class DivaBase:
|
||||
pd_by_pv_id = []
|
||||
|
||||
for song in song_id:
|
||||
thread_ScoreData = Thread(target=self.task_generateScoreData(data, pd_by_pv_id, song))
|
||||
thread_ScoreData = Thread(target=await self.task_generateScoreData(data, pd_by_pv_id, song))
|
||||
threads.append(thread_ScoreData)
|
||||
|
||||
for x in threads:
|
||||
@ -732,11 +731,11 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_stage_start_request(self, data: Dict) -> Dict:
|
||||
async def handle_stage_start_request(self, data: Dict) -> Dict:
|
||||
return f""
|
||||
|
||||
def handle_stage_result_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
async def handle_stage_result_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
|
||||
pd_song_list = data["stg_ply_pv_id"].split(",")
|
||||
pd_song_difficulty = data["stg_difficulty"].split(",")
|
||||
@ -754,14 +753,14 @@ class DivaBase:
|
||||
|
||||
for index, value in enumerate(pd_song_list):
|
||||
if "-1" not in pd_song_list[index]:
|
||||
profile_pd_db_song = self.data.score.get_best_user_score(
|
||||
profile_pd_db_song = await self.data.score.get_best_user_score(
|
||||
data["pd_id"],
|
||||
pd_song_list[index],
|
||||
pd_song_difficulty[index],
|
||||
pd_song_edition[index],
|
||||
)
|
||||
if profile_pd_db_song is None:
|
||||
self.data.score.put_best_score(
|
||||
await self.data.score.put_best_score(
|
||||
data["pd_id"],
|
||||
self.version,
|
||||
pd_song_list[index],
|
||||
@ -778,7 +777,7 @@ class DivaBase:
|
||||
pd_song_worst_cnt[index],
|
||||
pd_song_max_combo[index],
|
||||
)
|
||||
self.data.score.put_playlog(
|
||||
await self.data.score.put_playlog(
|
||||
data["pd_id"],
|
||||
self.version,
|
||||
pd_song_list[index],
|
||||
@ -796,7 +795,7 @@ class DivaBase:
|
||||
pd_song_max_combo[index],
|
||||
)
|
||||
elif int(pd_song_max_score[index]) >= int(profile_pd_db_song["score"]):
|
||||
self.data.score.put_best_score(
|
||||
await self.data.score.put_best_score(
|
||||
data["pd_id"],
|
||||
self.version,
|
||||
pd_song_list[index],
|
||||
@ -813,7 +812,7 @@ class DivaBase:
|
||||
pd_song_worst_cnt[index],
|
||||
pd_song_max_combo[index],
|
||||
)
|
||||
self.data.score.put_playlog(
|
||||
await self.data.score.put_playlog(
|
||||
data["pd_id"],
|
||||
self.version,
|
||||
pd_song_list[index],
|
||||
@ -831,7 +830,7 @@ class DivaBase:
|
||||
pd_song_max_combo[index],
|
||||
)
|
||||
elif int(pd_song_max_score[index]) != int(profile_pd_db_song["score"]):
|
||||
self.data.score.put_playlog(
|
||||
await self.data.score.put_playlog(
|
||||
data["pd_id"],
|
||||
self.version,
|
||||
pd_song_list[index],
|
||||
@ -852,7 +851,7 @@ class DivaBase:
|
||||
# Profile saving based on registration list
|
||||
|
||||
# Calculate new level
|
||||
best_scores = self.data.score.get_best_scores(data["pd_id"])
|
||||
best_scores = await self.data.score.get_best_scores(data["pd_id"])
|
||||
|
||||
total_atn_pnt = 0
|
||||
for best_score in best_scores:
|
||||
@ -866,7 +865,7 @@ class DivaBase:
|
||||
response += f"&lv_pnt_old={int(profile['lv_pnt'])}"
|
||||
|
||||
# update the profile and commit changes to the db
|
||||
self.data.profile.update_profile(
|
||||
await self.data.profile.update_profile(
|
||||
profile["user"],
|
||||
lv_num=new_level,
|
||||
lv_pnt=new_level_pnt,
|
||||
@ -914,16 +913,16 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_end_request(self, data: Dict) -> Dict:
|
||||
profile = self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
async def handle_end_request(self, data: Dict) -> Dict:
|
||||
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
|
||||
self.data.profile.update_profile(
|
||||
await self.data.profile.update_profile(
|
||||
profile["user"], my_qst_id=data["my_qst_id"], my_qst_sts=data["my_qst_sts"]
|
||||
)
|
||||
return f""
|
||||
|
||||
def handle_shop_exit_request(self, data: Dict) -> Dict:
|
||||
self.data.item.put_shop(
|
||||
async def handle_shop_exit_request(self, data: Dict) -> Dict:
|
||||
await self.data.item.put_shop(
|
||||
data["pd_id"],
|
||||
self.version,
|
||||
data["mdl_eqp_cmn_ary"],
|
||||
@ -931,7 +930,7 @@ class DivaBase:
|
||||
data["ms_itm_flg_cmn_ary"],
|
||||
)
|
||||
if int(data["use_pv_mdl_eqp"]) == 1:
|
||||
self.data.pv_customize.put_pv_customize(
|
||||
await self.data.pv_customize.put_pv_customize(
|
||||
data["pd_id"],
|
||||
self.version,
|
||||
data["ply_pv_id"],
|
||||
@ -940,7 +939,7 @@ class DivaBase:
|
||||
data["ms_itm_flg_pv_ary"],
|
||||
)
|
||||
else:
|
||||
self.data.pv_customize.put_pv_customize(
|
||||
await self.data.pv_customize.put_pv_customize(
|
||||
data["pd_id"],
|
||||
self.version,
|
||||
data["ply_pv_id"],
|
||||
@ -952,8 +951,8 @@ class DivaBase:
|
||||
response = "&shp_rslt=1"
|
||||
return response
|
||||
|
||||
def handle_card_procedure_request(self, data: Dict) -> str:
|
||||
profile = self.data.profile.get_profile(data["aime_id"], self.version)
|
||||
async def handle_card_procedure_request(self, data: Dict) -> str:
|
||||
profile = await self.data.profile.get_profile(data["aime_id"], self.version)
|
||||
if profile is None:
|
||||
return "&cd_adm_result=0"
|
||||
|
||||
@ -972,8 +971,8 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_change_name_request(self, data: Dict) -> str:
|
||||
profile = self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
async def handle_change_name_request(self, data: Dict) -> str:
|
||||
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
|
||||
# make sure user has enough Vocaloid Points
|
||||
if profile["vcld_pts"] < int(data["chg_name_price"]):
|
||||
@ -981,7 +980,7 @@ class DivaBase:
|
||||
|
||||
# update the vocaloid points and player name
|
||||
new_vcld_pts = profile["vcld_pts"] - int(data["chg_name_price"])
|
||||
self.data.profile.update_profile(
|
||||
await self.data.profile.update_profile(
|
||||
profile["user"], player_name=data["player_name"], vcld_pts=new_vcld_pts
|
||||
)
|
||||
|
||||
@ -992,15 +991,15 @@ class DivaBase:
|
||||
|
||||
return response
|
||||
|
||||
def handle_change_passwd_request(self, data: Dict) -> str:
|
||||
profile = self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
async def handle_change_passwd_request(self, data: Dict) -> str:
|
||||
profile = await self.data.profile.get_profile(data["pd_id"], self.version)
|
||||
|
||||
# TODO: return correct error number instead of 0
|
||||
if data["passwd"] != profile["passwd"]:
|
||||
return "&cd_adm_result=0"
|
||||
|
||||
# set password to true and update the saved password
|
||||
self.data.profile.update_profile(
|
||||
await self.data.profile.update_profile(
|
||||
profile["user"], passwd_stat=1, passwd=data["new_passwd"]
|
||||
)
|
||||
|
||||
|
@ -1,4 +1,6 @@
|
||||
from twisted.web.http import Request
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import PlainTextResponse
|
||||
from starlette.routing import Route
|
||||
import yaml
|
||||
import logging, coloredlogs
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
@ -51,17 +53,16 @@ class DivaServlet(BaseServlet):
|
||||
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[("render_POST", "/DivaServlet/", {})]
|
||||
)
|
||||
def get_routes(self) -> List[Route]:
|
||||
return [
|
||||
Route("/DivaServlet/", self.render_POST, methods=['POST'])
|
||||
]
|
||||
|
||||
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
||||
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
||||
return (f"http://{self.core_cfg.title.hostname}:{Utils.get_title_port(self.core_cfg)}/DivaServlet/", self.core_cfg.title.hostname)
|
||||
return (f"http://{self.core_cfg.server.hostname}:{Utils.get_title_port(self.core_cfg)}/DivaServlet/", self.core_cfg.server.hostname)
|
||||
|
||||
return (f"http://{self.core_cfg.title.hostname}/DivaServlet/", self.core_cfg.title.hostname)
|
||||
return (f"http://{self.core_cfg.server.hostname}/DivaServlet/", self.core_cfg.server.hostname)
|
||||
|
||||
@classmethod
|
||||
def is_game_enabled(
|
||||
@ -78,9 +79,9 @@ class DivaServlet(BaseServlet):
|
||||
|
||||
return True
|
||||
|
||||
def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_raw = request.content.getvalue()
|
||||
url_header = request.getAllHeaders()
|
||||
async def render_POST(self, request: Request, game_code: str, matchers: Dict) -> bytes:
|
||||
req_raw = await request.body()
|
||||
url_header = request.headers
|
||||
|
||||
# Ping Dispatch
|
||||
if "THIS_STRING_SEPARATES" in str(url_header):
|
||||
@ -103,9 +104,7 @@ class DivaServlet(BaseServlet):
|
||||
self.logger.debug(
|
||||
f"Response cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok{resp}"
|
||||
)
|
||||
return f"cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok{resp}".encode(
|
||||
"utf-8"
|
||||
)
|
||||
return PlainTextResponse(f"cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok{resp}")
|
||||
|
||||
# Main Dispatch
|
||||
json_string = json.dumps(
|
||||
@ -122,7 +121,7 @@ class DivaServlet(BaseServlet):
|
||||
) # Decompressing the gzip
|
||||
except zlib.error as e:
|
||||
self.logger.error(f"Failed to defalte! {e} -> {gz_string}")
|
||||
return "stat=0"
|
||||
return PlainTextResponse("stat=0")
|
||||
|
||||
req_kvp = urllib.parse.unquote(url_data)
|
||||
req_data = {}
|
||||
@ -141,27 +140,18 @@ class DivaServlet(BaseServlet):
|
||||
# Load the requests
|
||||
try:
|
||||
handler = getattr(self.base, func_to_find)
|
||||
resp = handler(req_data)
|
||||
resp = await handler(req_data)
|
||||
|
||||
except AttributeError as e:
|
||||
self.logger.warning(f"Unhandled {req_data['cmd']} request {e}")
|
||||
return f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok".encode(
|
||||
"utf-8"
|
||||
)
|
||||
return PlainTextResponse(f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling method {func_to_find} {e}")
|
||||
return f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok".encode(
|
||||
"utf-8"
|
||||
)
|
||||
return PlainTextResponse(f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok")
|
||||
|
||||
request.responseHeaders.addRawHeader(b"content-type", b"text/plain")
|
||||
self.logger.debug(
|
||||
f"Response cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok{resp}"
|
||||
)
|
||||
|
||||
return (
|
||||
f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok{resp}".encode(
|
||||
"utf-8"
|
||||
)
|
||||
)
|
||||
return PlainTextResponse(f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok{resp}")
|
||||
|
@ -28,7 +28,7 @@ class DivaReader(BaseReader):
|
||||
self.logger.error(f"Invalid project diva version {version}")
|
||||
exit(1)
|
||||
|
||||
def read(self) -> None:
|
||||
async def read(self) -> None:
|
||||
pull_bin_ram = True
|
||||
pull_bin_rom = True
|
||||
pull_opt_rom = True
|
||||
@ -48,14 +48,14 @@ class DivaReader(BaseReader):
|
||||
self.logger.warning("No option directory specified, skipping")
|
||||
|
||||
if pull_bin_ram:
|
||||
self.read_ram(f"{self.bin_dir}/ram")
|
||||
await self.read_ram(f"{self.bin_dir}/ram")
|
||||
if pull_bin_rom:
|
||||
self.read_rom(f"{self.bin_dir}/rom")
|
||||
await self.read_rom(f"{self.bin_dir}/rom")
|
||||
if pull_opt_rom:
|
||||
for dir in opt_dirs:
|
||||
self.read_rom(f"{dir}/rom")
|
||||
await self.read_rom(f"{dir}/rom")
|
||||
|
||||
def read_ram(self, ram_root_dir: str) -> None:
|
||||
async def read_ram(self, ram_root_dir: str) -> None:
|
||||
self.logger.info(f"Read RAM from {ram_root_dir}")
|
||||
|
||||
if path.exists(f"{ram_root_dir}/databank"):
|
||||
@ -91,7 +91,7 @@ class DivaReader(BaseReader):
|
||||
f"Added shop item {split[x+0]}"
|
||||
)
|
||||
|
||||
self.data.static.put_shop(
|
||||
await self.data.static.put_shop(
|
||||
self.version,
|
||||
split[x + 0],
|
||||
split[x + 2],
|
||||
@ -109,7 +109,7 @@ class DivaReader(BaseReader):
|
||||
for x in range(0, len(split), 7):
|
||||
self.logger.info(f"Added item {split[x+0]}")
|
||||
|
||||
self.data.static.put_items(
|
||||
await self.data.static.put_items(
|
||||
self.version,
|
||||
split[x + 0],
|
||||
split[x + 2],
|
||||
@ -123,7 +123,7 @@ class DivaReader(BaseReader):
|
||||
elif file.startswith("QuestInfo") and len(split) >= 9:
|
||||
self.logger.info(f"Added quest {split[0]}")
|
||||
|
||||
self.data.static.put_quests(
|
||||
await self.data.static.put_quests(
|
||||
self.version,
|
||||
split[0],
|
||||
split[6],
|
||||
@ -141,7 +141,7 @@ class DivaReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Databank folder not found in {ram_root_dir}, skipping")
|
||||
|
||||
def read_rom(self, rom_root_dir: str) -> None:
|
||||
async def read_rom(self, rom_root_dir: str) -> None:
|
||||
self.logger.info(f"Read ROM from {rom_root_dir}")
|
||||
pv_list: Dict[str, Dict] = {}
|
||||
|
||||
@ -199,7 +199,7 @@ class DivaReader(BaseReader):
|
||||
diff = pv_data["difficulty"]["easy"]["0"]["level"].split("_")
|
||||
self.logger.info(f"Added song {song_id} chart 0")
|
||||
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
0,
|
||||
@ -220,7 +220,7 @@ class DivaReader(BaseReader):
|
||||
diff = pv_data["difficulty"]["normal"]["0"]["level"].split("_")
|
||||
self.logger.info(f"Added song {song_id} chart 1")
|
||||
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
1,
|
||||
@ -238,7 +238,7 @@ class DivaReader(BaseReader):
|
||||
diff = pv_data["difficulty"]["hard"]["0"]["level"].split("_")
|
||||
self.logger.info(f"Added song {song_id} chart 2")
|
||||
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
2,
|
||||
@ -257,7 +257,7 @@ class DivaReader(BaseReader):
|
||||
diff = pv_data["difficulty"]["extreme"]["0"]["level"].split("_")
|
||||
self.logger.info(f"Added song {song_id} chart 3")
|
||||
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
3,
|
||||
@ -275,7 +275,7 @@ class DivaReader(BaseReader):
|
||||
diff = pv_data["difficulty"]["extreme"]["1"]["level"].split("_")
|
||||
self.logger.info(f"Added song {song_id} chart 4")
|
||||
|
||||
self.data.static.put_music(
|
||||
await self.data.static.put_music(
|
||||
self.version,
|
||||
song_id,
|
||||
4,
|
||||
|
@ -25,10 +25,10 @@ customize = Table(
|
||||
|
||||
|
||||
class DivaCustomizeItemData(BaseData):
|
||||
def put_customize_item(self, aime_id: int, version: int, item_id: int) -> None:
|
||||
async def put_customize_item(self, aime_id: int, version: int, item_id: int) -> None:
|
||||
sql = insert(customize).values(version=version, user=aime_id, item_id=item_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} Failed to insert diva profile customize item! aime id: {aime_id} item: {item_id}"
|
||||
@ -36,7 +36,7 @@ class DivaCustomizeItemData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_customize_items(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
async def get_customize_items(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
"""
|
||||
Given a game version and an aime id, return all the customize items, not used directly
|
||||
"""
|
||||
@ -44,12 +44,12 @@ class DivaCustomizeItemData(BaseData):
|
||||
and_(customize.c.version == version, customize.c.user == aime_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_customize_items_have_string(self, aime_id: int, version: int) -> str:
|
||||
async def get_customize_items_have_string(self, aime_id: int, version: int) -> str:
|
||||
"""
|
||||
Given a game version and an aime id, return the cstmz_itm_have hex string
|
||||
required for diva directly
|
||||
|
@ -26,7 +26,7 @@ shop = Table(
|
||||
|
||||
|
||||
class DivaItemData(BaseData):
|
||||
def put_shop(
|
||||
async def put_shop(
|
||||
self,
|
||||
aime_id: int,
|
||||
version: int,
|
||||
@ -48,7 +48,7 @@ class DivaItemData(BaseData):
|
||||
ms_itm_flg_ary=ms_itm_flg_ary,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} Failed to insert diva profile! aime id: {aime_id} array: {mdl_eqp_ary}"
|
||||
@ -56,13 +56,13 @@ class DivaItemData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_shop(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
async def get_shop(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
"""
|
||||
Given a game version and either a profile or aime id, return the profile
|
||||
"""
|
||||
sql = shop.select(and_(shop.c.version == version, shop.c.user == aime_id))
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
@ -23,10 +23,10 @@ module = Table(
|
||||
|
||||
|
||||
class DivaModuleData(BaseData):
|
||||
def put_module(self, aime_id: int, version: int, module_id: int) -> None:
|
||||
async def put_module(self, aime_id: int, version: int, module_id: int) -> None:
|
||||
sql = insert(module).values(version=version, user=aime_id, module_id=module_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} Failed to insert diva profile module! aime id: {aime_id} module: {module_id}"
|
||||
@ -34,18 +34,18 @@ class DivaModuleData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_modules(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
async def get_modules(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
"""
|
||||
Given a game version and an aime id, return all the modules, not used directly
|
||||
"""
|
||||
sql = module.select(and_(module.c.version == version, module.c.user == aime_id))
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_modules_have_string(self, aime_id: int, version: int) -> str:
|
||||
async def get_modules_have_string(self, aime_id: int, version: int) -> str:
|
||||
"""
|
||||
Given a game version and an aime id, return the mdl_have hex string
|
||||
required for diva directly
|
||||
|
@ -70,7 +70,7 @@ profile = Table(
|
||||
|
||||
|
||||
class DivaProfileData(BaseData):
|
||||
def create_profile(
|
||||
async def create_profile(
|
||||
self, version: int, aime_id: int, player_name: str
|
||||
) -> Optional[int]:
|
||||
"""
|
||||
@ -82,7 +82,7 @@ class DivaProfileData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(player_name=sql.inserted.player_name)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} Failed to insert diva profile! aime id: {aime_id} username: {player_name}"
|
||||
@ -90,21 +90,21 @@ class DivaProfileData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def update_profile(self, aime_id: int, **profile_args) -> None:
|
||||
async def update_profile(self, aime_id: int, **profile_args) -> None:
|
||||
"""
|
||||
Given an aime_id update the profile corresponding to the arguments
|
||||
which are the diva_profile Columns
|
||||
"""
|
||||
sql = profile.update(profile.c.user == aime_id).values(**profile_args)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"update_profile: failed to update profile! profile: {aime_id}"
|
||||
)
|
||||
return None
|
||||
|
||||
def get_profile(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
async def get_profile(self, aime_id: int, version: int) -> Optional[List[Dict]]:
|
||||
"""
|
||||
Given a game version and either a profile or aime id, return the profile
|
||||
"""
|
||||
@ -112,7 +112,7 @@ class DivaProfileData(BaseData):
|
||||
and_(profile.c.version == version, profile.c.user == aime_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
@ -39,7 +39,7 @@ pv_customize = Table(
|
||||
|
||||
|
||||
class DivaPvCustomizeData(BaseData):
|
||||
def put_pv_customize(
|
||||
async def put_pv_customize(
|
||||
self,
|
||||
aime_id: int,
|
||||
version: int,
|
||||
@ -64,7 +64,7 @@ class DivaPvCustomizeData(BaseData):
|
||||
ms_itm_flg_ary=ms_itm_flg_ary,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} Failed to insert diva pv customize! aime id: {aime_id}"
|
||||
@ -72,7 +72,7 @@ class DivaPvCustomizeData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_pv_customize(self, aime_id: int, pv_id: int) -> Optional[List[Dict]]:
|
||||
async def get_pv_customize(self, aime_id: int, pv_id: int) -> Optional[List[Dict]]:
|
||||
"""
|
||||
Given either a profile or aime id, return a Pv Customize row
|
||||
"""
|
||||
@ -80,7 +80,7 @@ class DivaPvCustomizeData(BaseData):
|
||||
and_(pv_customize.c.user == aime_id, pv_customize.c.pv_id == pv_id)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
@ -57,7 +57,7 @@ playlog = Table(
|
||||
|
||||
|
||||
class DivaScoreData(BaseData):
|
||||
def put_best_score(
|
||||
async def put_best_score(
|
||||
self,
|
||||
user_id: int,
|
||||
game_version: int,
|
||||
@ -109,7 +109,7 @@ class DivaScoreData(BaseData):
|
||||
max_combo=max_combo,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} failed to insert best score! profile: {user_id}, song: {song_id}"
|
||||
@ -118,7 +118,7 @@ class DivaScoreData(BaseData):
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
def put_playlog(
|
||||
async def put_playlog(
|
||||
self,
|
||||
user_id: int,
|
||||
game_version: int,
|
||||
@ -157,7 +157,7 @@ class DivaScoreData(BaseData):
|
||||
max_combo=max_combo,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(
|
||||
f"{__name__} failed to insert playlog! profile: {user_id}, song: {song_id}, chart: {difficulty}"
|
||||
@ -166,7 +166,7 @@ class DivaScoreData(BaseData):
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
def get_best_user_score(
|
||||
async def get_best_user_score(
|
||||
self, user_id: int, pv_id: int, difficulty: int, edition: int
|
||||
) -> Optional[Row]:
|
||||
sql = score.select(
|
||||
@ -178,12 +178,12 @@ class DivaScoreData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_top3_scores(
|
||||
async def get_top3_scores(
|
||||
self, pv_id: int, difficulty: int, edition: int
|
||||
) -> Optional[List[Row]]:
|
||||
sql = (
|
||||
@ -198,12 +198,12 @@ class DivaScoreData(BaseData):
|
||||
.limit(3)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_global_ranking(
|
||||
async def get_global_ranking(
|
||||
self, user_id: int, pv_id: int, difficulty: int, edition: int
|
||||
) -> Optional[List[Row]]:
|
||||
# get the subquery max score of a user with pv_id, difficulty and
|
||||
@ -227,15 +227,15 @@ class DivaScoreData(BaseData):
|
||||
score.c.edition == edition,
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_best_scores(self, user_id: int) -> Optional[List[Row]]:
|
||||
async def get_best_scores(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = score.select(score.c.user == user_id)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
@ -83,7 +83,7 @@ items = Table(
|
||||
|
||||
|
||||
class DivaStaticData(BaseData):
|
||||
def put_quests(
|
||||
async def put_quests(
|
||||
self,
|
||||
version: int,
|
||||
questId: int,
|
||||
@ -111,22 +111,22 @@ class DivaStaticData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(name=name)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_enabled_quests(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_enabled_quests(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(quests).where(
|
||||
and_(quests.c.version == version, quests.c.quest_enable == True)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_shop(
|
||||
async def put_shop(
|
||||
self,
|
||||
version: int,
|
||||
shopId: int,
|
||||
@ -150,12 +150,12 @@ class DivaStaticData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(name=name)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_enabled_shop(self, version: int, shopId: int) -> Optional[Row]:
|
||||
async def get_enabled_shop(self, version: int, shopId: int) -> Optional[Row]:
|
||||
sql = select(shop).where(
|
||||
and_(
|
||||
shop.c.version == version,
|
||||
@ -164,22 +164,22 @@ class DivaStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_enabled_shops(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_enabled_shops(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(shop).where(
|
||||
and_(shop.c.version == version, shop.c.enabled == True)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_items(
|
||||
async def put_items(
|
||||
self,
|
||||
version: int,
|
||||
itemId: int,
|
||||
@ -203,12 +203,12 @@ class DivaStaticData(BaseData):
|
||||
|
||||
conflict = sql.on_duplicate_key_update(name=name)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_enabled_item(self, version: int, itemId: int) -> Optional[Row]:
|
||||
async def get_enabled_item(self, version: int, itemId: int) -> Optional[Row]:
|
||||
sql = select(items).where(
|
||||
and_(
|
||||
items.c.version == version,
|
||||
@ -217,22 +217,22 @@ class DivaStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
def get_enabled_items(self, version: int) -> Optional[List[Row]]:
|
||||
async def get_enabled_items(self, version: int) -> Optional[List[Row]]:
|
||||
sql = select(items).where(
|
||||
and_(items.c.version == version, items.c.enabled == True)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def put_music(
|
||||
async def put_music(
|
||||
self,
|
||||
version: int,
|
||||
song: int,
|
||||
@ -271,12 +271,12 @@ class DivaStaticData(BaseData):
|
||||
date=date,
|
||||
)
|
||||
|
||||
result = self.execute(conflict)
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
def get_music(
|
||||
async def get_music(
|
||||
self, version: int, song_id: Optional[int] = None
|
||||
) -> Optional[List[Row]]:
|
||||
if song_id is None:
|
||||
@ -289,12 +289,12 @@ class DivaStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
def get_music_chart(
|
||||
async def get_music_chart(
|
||||
self, version: int, song_id: int, chart_id: int
|
||||
) -> Optional[List[Row]]:
|
||||
sql = select(music).where(
|
||||
@ -305,7 +305,7 @@ class DivaStaticData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
result = self.execute(sql)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
@ -9,4 +9,3 @@ database = IDACData
|
||||
reader = IDACReader
|
||||
frontend = IDACFrontend
|
||||
game_codes = [IDACConstants.GAME_CODE]
|
||||
current_schema_version = 1
|
||||
|
@ -1,7 +1,6 @@
|
||||
import logging
|
||||
import socket
|
||||
|
||||
from twisted.internet.protocol import DatagramProtocol
|
||||
from socketserver import BaseRequestHandler, TCPServer
|
||||
from typing import Tuple
|
||||
|
||||
@ -10,19 +9,14 @@ from titles.idac.config import IDACConfig
|
||||
from titles.idac.database import IDACData
|
||||
|
||||
|
||||
class IDACEchoUDP(DatagramProtocol):
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: IDACConfig, port: int) -> None:
|
||||
super().__init__()
|
||||
self.port = port
|
||||
self.core_config = cfg
|
||||
self.game_config = game_cfg
|
||||
self.logger = logging.getLogger("idac")
|
||||
class IDACEchoUDP:
|
||||
def connection_made(self, transport):
|
||||
self.transport = transport
|
||||
|
||||
def datagram_received(self, data, addr):
|
||||
logging.getLogger('idz').debug(f'Received echo from {addr}')
|
||||
self.transport.sendto(data, addr)
|
||||
|
||||
def datagramReceived(self, data, addr):
|
||||
self.logger.info(
|
||||
f"UDP Ping from from {addr[0]}:{addr[1]} -> {self.port} - {data.hex()}"
|
||||
)
|
||||
self.transport.write(data, addr)
|
||||
|
||||
|
||||
class IDACEchoTCP(BaseRequestHandler):
|
||||
|
@ -1,12 +1,13 @@
|
||||
import json
|
||||
from typing import List
|
||||
from starlette.routing import Route
|
||||
from starlette.responses import Response, RedirectResponse
|
||||
import yaml
|
||||
import jinja2
|
||||
from os import path
|
||||
from twisted.web.util import redirectTo
|
||||
from twisted.web.http import Request
|
||||
from twisted.web.server import Session
|
||||
from starlette.requests import Request
|
||||
|
||||
from core.frontend import FE_Base, IUserSession
|
||||
from core.frontend import FE_Base, UserSession
|
||||
from core.config import CoreConfig
|
||||
from titles.idac.database import IDACData
|
||||
from titles.idac.schema.profile import *
|
||||
@ -26,7 +27,8 @@ class IDACFrontend(FE_Base):
|
||||
self.game_cfg.update(
|
||||
yaml.safe_load(open(f"{cfg_dir}/{IDACConstants.CONFIG_NAME}"))
|
||||
)
|
||||
self.nav_name = "頭文字D THE ARCADE"
|
||||
#self.nav_name = "頭文字D THE ARCADE"
|
||||
self.nav_name = "IDAC"
|
||||
# TODO: Add version list
|
||||
self.version = IDACConstants.VER_IDAC_SEASON_2
|
||||
|
||||
@ -36,8 +38,13 @@ class IDACFrontend(FE_Base):
|
||||
25: "full_tune_tickets",
|
||||
34: "full_tune_fragments",
|
||||
}
|
||||
|
||||
def get_routes(self) -> List[Route]:
|
||||
return [
|
||||
Route("/", self.render_GET)
|
||||
]
|
||||
|
||||
def generate_all_tables_json(self, user_id: int):
|
||||
async def generate_all_tables_json(self, user_id: int):
|
||||
json_export = {}
|
||||
|
||||
idac_tables = {
|
||||
@ -73,7 +80,7 @@ class IDACFrontend(FE_Base):
|
||||
sql = sql.where(table.c.version == self.version)
|
||||
|
||||
# lol use the profile connection for items, dirty hack
|
||||
result = self.data.profile.execute(sql)
|
||||
result = await self.data.profile.execute(sql)
|
||||
data_list = result.fetchall()
|
||||
|
||||
# add the list to the json export with the correct table name
|
||||
@ -86,49 +93,47 @@ class IDACFrontend(FE_Base):
|
||||
|
||||
return json.dumps(json_export, indent=4, default=str, ensure_ascii=False)
|
||||
|
||||
def render_GET(self, request: Request) -> bytes:
|
||||
uri: str = request.uri.decode()
|
||||
async def render_GET(self, request: Request) -> bytes:
|
||||
uri: str = request.url.path
|
||||
|
||||
template = self.environment.get_template(
|
||||
"titles/idac/frontend/idac_index.jinja"
|
||||
"titles/idac/templates/idac_index.jinja"
|
||||
)
|
||||
sesh: Session = request.getSession()
|
||||
usr_sesh = IUserSession(sesh)
|
||||
user_id = usr_sesh.userId
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
usr_sesh = UserSession()
|
||||
user_id = usr_sesh.user_id
|
||||
# user_id = usr_sesh.user_id
|
||||
|
||||
# profile export
|
||||
if uri.startswith("/game/idac/export"):
|
||||
if user_id == 0:
|
||||
return redirectTo(b"/game/idac", request)
|
||||
return RedirectResponse(b"/game/idac", request)
|
||||
|
||||
# set the file name, content type and size to download the json
|
||||
content = self.generate_all_tables_json(user_id).encode("utf-8")
|
||||
request.responseHeaders.addRawHeader(
|
||||
b"content-type", b"application/octet-stream"
|
||||
)
|
||||
request.responseHeaders.addRawHeader(
|
||||
b"content-disposition", b"attachment; filename=idac_profile.json"
|
||||
)
|
||||
request.responseHeaders.addRawHeader(
|
||||
b"content-length", str(len(content)).encode("utf-8")
|
||||
)
|
||||
content = await self.generate_all_tables_json(user_id).encode("utf-8")
|
||||
|
||||
self.logger.info(f"User {user_id} exported their IDAC data")
|
||||
return content
|
||||
return Response(
|
||||
content,
|
||||
200,
|
||||
{'content-disposition': 'attachment; filename=idac_profile.json'},
|
||||
"application/octet-stream"
|
||||
)
|
||||
|
||||
profile_data, tickets, rank = None, None, None
|
||||
if user_id > 0:
|
||||
profile_data = self.data.profile.get_profile(user_id, self.version)
|
||||
ticket_data = self.data.item.get_tickets(user_id)
|
||||
rank = self.data.profile.get_profile_rank(user_id, self.version)
|
||||
profile_data = await self.data.profile.get_profile(user_id, self.version)
|
||||
ticket_data = await self.data.item.get_tickets(user_id)
|
||||
rank = await self.data.profile.get_profile_rank(user_id, self.version)
|
||||
|
||||
tickets = {
|
||||
self.ticket_names[ticket["ticket_id"]]: ticket["ticket_cnt"]
|
||||
for ticket in ticket_data
|
||||
}
|
||||
if ticket_data:
|
||||
tickets = {
|
||||
self.ticket_names[ticket["ticket_id"]]: ticket["ticket_cnt"]
|
||||
for ticket in ticket_data
|
||||
}
|
||||
|
||||
return template.render(
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | {self.nav_name}",
|
||||
game_list=self.environment.globals["game_list"],
|
||||
profile=profile_data,
|
||||
@ -136,7 +141,4 @@ class IDACFrontend(FE_Base):
|
||||
rank=rank,
|
||||
sesh=vars(usr_sesh),
|
||||
active_page="idac",
|
||||
).encode("utf-16")
|
||||
|
||||
def render_POST(self, request: Request) -> bytes:
|
||||
pass
|
||||
))
|
||||
|
@ -1,28 +1,26 @@
|
||||
import json
|
||||
import traceback
|
||||
import inflection
|
||||
from starlette.routing import Route
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
import yaml
|
||||
import logging
|
||||
import coloredlogs
|
||||
|
||||
from os import path
|
||||
from typing import Dict, List, Tuple
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from twisted.web import server
|
||||
from twisted.web.http import Request
|
||||
from twisted.internet import reactor, endpoints
|
||||
import asyncio
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.title import BaseServlet, JSONResponseNoASCII
|
||||
from core.utils import Utils
|
||||
from titles.idac.base import IDACBase
|
||||
from titles.idac.season2 import IDACSeason2
|
||||
from titles.idac.config import IDACConfig
|
||||
from titles.idac.const import IDACConstants
|
||||
from titles.idac.echo import IDACEchoUDP
|
||||
from titles.idac.matching import IDACMatching
|
||||
|
||||
|
||||
class IDACServlet:
|
||||
class IDACServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
self.core_cfg = core_cfg
|
||||
self.game_cfg = IDACConfig()
|
||||
@ -72,12 +70,12 @@ class IDACServlet:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_endpoint_matchers(self) -> Tuple[List[Tuple[str, str, Dict]], List[Tuple[str, str, Dict]]]:
|
||||
return (
|
||||
[],
|
||||
[("render_POST", "/SDGT/{version}/initiald/{category}/{endpoint}", {})]
|
||||
)
|
||||
|
||||
def get_routes(self) -> List[Route]:
|
||||
return [
|
||||
Route("/{version:int}/initiald/{category:str}/{endpoint:str}", self.render_POST, methods=["POST"]),
|
||||
Route("/{version:int}/initiald-matching/{endpoint:str}", self.render_matching, methods=["POST"]),
|
||||
]
|
||||
|
||||
def get_allnet_info(
|
||||
self, game_code: str, game_ver: int, keychip: str
|
||||
@ -88,15 +86,15 @@ class IDACServlet:
|
||||
return (
|
||||
f"",
|
||||
# requires http or else it defaults to https
|
||||
f"http://{self.core_cfg.title.hostname}{t_port}/{game_code}/{game_ver}/",
|
||||
f"http://{self.core_cfg.server.hostname}{t_port}/{game_ver}/",
|
||||
)
|
||||
|
||||
def render_POST(self, request: Request, game_code: int, matchers: Dict) -> bytes:
|
||||
req_raw = request.content.getvalue()
|
||||
async def render_POST(self, request: Request) -> bytes:
|
||||
req_raw = await request.body()
|
||||
internal_ver = 0
|
||||
version = int(matchers['version'])
|
||||
category = matchers['category']
|
||||
endpoint = matchers['endpoint']
|
||||
version: int = request.path_params.get('version')
|
||||
category: str = request.path_params.get('category')
|
||||
endpoint: str = request.path_params.get('endpoint')
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
|
||||
if version >= 100 and version < 140: # IDAC Season 1
|
||||
@ -104,7 +102,7 @@ class IDACServlet:
|
||||
elif version >= 140 and version < 171: # IDAC Season 2
|
||||
internal_ver = IDACConstants.VER_IDAC_SEASON_2
|
||||
|
||||
header_application = self.decode_header(request.getAllHeaders())
|
||||
header_application = self.decode_header(request.headers.get("application", ""))
|
||||
|
||||
req_data = json.loads(req_raw)
|
||||
|
||||
@ -119,27 +117,61 @@ class IDACServlet:
|
||||
|
||||
if not hasattr(self.versions[internal_ver], func_to_find):
|
||||
self.logger.warning(f"Unhandled v{version} request {endpoint}")
|
||||
return '{"status_code": "0"}'.encode("utf-8")
|
||||
return JSONResponse('{"status_code": "0"}')
|
||||
|
||||
resp = None
|
||||
try:
|
||||
handler = getattr(self.versions[internal_ver], func_to_find)
|
||||
resp = handler(req_data, header_application)
|
||||
resp = await handler(req_data, header_application)
|
||||
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
self.logger.error(f"Error handling v{version} method {endpoint} - {e}")
|
||||
return '{"status_code": "0"}'.encode("utf-8")
|
||||
return JSONResponse('{"status_code": "0"}')
|
||||
|
||||
if resp is None:
|
||||
resp = {"status_code": "0"}
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
||||
return JSONResponseNoASCII(resp)
|
||||
|
||||
async def render_matching(self, request: Request):
|
||||
url: str = request.path_params.get("endpoint")
|
||||
ver: int = request.path_params.get("version")
|
||||
client_ip = Utils.get_ip_addr(request)
|
||||
req_data = await request.json()
|
||||
header_application = self.decode_header(request.headers.get('application', ''))
|
||||
user_id = int(header_application["session"])
|
||||
|
||||
def decode_header(self, data: Dict) -> Dict:
|
||||
app: str = data[b"application"].decode()
|
||||
# self.getMatchingStatus(user_id)
|
||||
|
||||
self.logger.info(
|
||||
f"IDAC Matching request from {client_ip}: {url} - {req_data}"
|
||||
)
|
||||
|
||||
resp = {"status_code": "0"}
|
||||
if url == "/regist":
|
||||
self.queue = self.queue + 1
|
||||
elif url == "/status":
|
||||
if req_data.get("cancel_flag"):
|
||||
self.queue = self.queue - 1
|
||||
self.logger.info(
|
||||
f"IDAC Matching endpoint {client_ip} had quited"
|
||||
)
|
||||
|
||||
resp = {
|
||||
"status_code": "0",
|
||||
# Only IPv4 is supported
|
||||
"host": self.game_config.server.matching_host,
|
||||
"port": self.game_config.server.matching_p2p,
|
||||
"room_name": "INDTA",
|
||||
"state": 1,
|
||||
}
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
return JSONResponseNoASCII(resp)
|
||||
|
||||
def decode_header(self, app: str) -> Dict:
|
||||
ret = {}
|
||||
|
||||
for x in app.split(", "):
|
||||
@ -150,18 +182,13 @@ class IDACServlet:
|
||||
|
||||
def setup(self):
|
||||
if self.game_cfg.server.enable:
|
||||
endpoints.serverFromString(
|
||||
reactor,
|
||||
f"tcp:{self.game_cfg.server.matching}:interface={self.core_cfg.server.listen_address}",
|
||||
).listen(server.Site(IDACMatching(self.core_cfg, self.game_cfg)))
|
||||
|
||||
reactor.listenUDP(
|
||||
self.game_cfg.server.echo1,
|
||||
IDACEchoUDP(self.core_cfg, self.game_cfg, self.game_cfg.server.echo1),
|
||||
)
|
||||
reactor.listenUDP(
|
||||
self.game_cfg.server.echo2,
|
||||
IDACEchoUDP(self.core_cfg, self.game_cfg, self.game_cfg.server.echo2),
|
||||
loop = asyncio.get_running_loop()
|
||||
asyncio.create_task(
|
||||
loop.create_datagram_endpoint(
|
||||
lambda: IDACEchoUDP(),
|
||||
local_addr=(self.core_cfg.server.listen_address, self.game_cfg.server.echo1)
|
||||
)
|
||||
)
|
||||
|
||||
self.logger.info(f"Matching listening on {self.game_cfg.server.matching} with echos on {self.game_cfg.server.echo1} and {self.game_cfg.server.echo2}")
|
||||
self.logger.info(f"Matching listening on {self.game_cfg.server.matching} with echo on {self.game_cfg.server.echo1}")
|
||||
|
||||
|
@ -1,72 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from typing import Dict
|
||||
from twisted.web import resource
|
||||
|
||||
from core import CoreConfig
|
||||
from titles.idac.season2 import IDACBase
|
||||
from titles.idac.config import IDACConfig
|
||||
|
||||
|
||||
class IDACMatching(resource.Resource):
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, cfg: CoreConfig, game_cfg: IDACConfig) -> None:
|
||||
self.core_config = cfg
|
||||
self.game_config = game_cfg
|
||||
self.base = IDACBase(cfg, game_cfg)
|
||||
self.logger = logging.getLogger("idac")
|
||||
|
||||
self.queue = 0
|
||||
|
||||
def get_matching_state(self):
|
||||
if self.queue >= 1:
|
||||
self.queue -= 1
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
def render_POST(self, req) -> bytes:
|
||||
url = req.uri.decode()
|
||||
req_data = json.loads(req.content.getvalue().decode())
|
||||
header_application = self.decode_header(req.getAllHeaders())
|
||||
user_id = int(header_application["session"])
|
||||
|
||||
# self.getMatchingStatus(user_id)
|
||||
|
||||
self.logger.info(
|
||||
f"IDAC Matching request from {req.getClientIP()}: {url} - {req_data}"
|
||||
)
|
||||
|
||||
resp = {"status_code": "0"}
|
||||
if url == "/regist":
|
||||
self.queue = self.queue + 1
|
||||
elif url == "/status":
|
||||
if req_data.get("cancel_flag"):
|
||||
self.queue = self.queue - 1
|
||||
self.logger.info(
|
||||
f"IDAC Matching endpoint {req.getClientIP()} had quited"
|
||||
)
|
||||
|
||||
resp = {
|
||||
"status_code": "0",
|
||||
# Only IPv4 is supported
|
||||
"host": self.game_config.server.matching_host,
|
||||
"port": self.game_config.server.matching_p2p,
|
||||
"room_name": "INDTA",
|
||||
"state": 1,
|
||||
}
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
return json.dumps(resp, ensure_ascii=False).encode("utf-8")
|
||||
|
||||
def decode_header(self, data: Dict) -> Dict:
|
||||
app: str = data[b"application"].decode()
|
||||
ret = {}
|
||||
|
||||
for x in app.split(", "):
|
||||
y = x.split("=")
|
||||
ret[y[0]] = y[1].replace('"', "")
|
||||
|
||||
return ret
|
@ -33,7 +33,7 @@ class IDACReader(BaseReader):
|
||||
self.logger.error(f"Invalid Initial D THE ARCADE version {version}")
|
||||
exit(1)
|
||||
|
||||
def read(self) -> None:
|
||||
async def read(self) -> None:
|
||||
if self.bin_dir is None and self.opt_dir is None:
|
||||
self.logger.error(
|
||||
(
|
||||
@ -59,9 +59,9 @@ class IDACReader(BaseReader):
|
||||
)
|
||||
exit(1)
|
||||
|
||||
self.read_idac_profile(self.opt_dir)
|
||||
await self.read_idac_profile(self.opt_dir)
|
||||
|
||||
def read_idac_profile(self, file_path: str) -> None:
|
||||
async def read_idac_profile(self, file_path: str) -> None:
|
||||
self.logger.info(f"Reading profile from {file_path}...")
|
||||
|
||||
# read it as binary to avoid encoding issues
|
||||
@ -88,14 +88,14 @@ class IDACReader(BaseReader):
|
||||
self.logger.info("Exiting...")
|
||||
exit(0)
|
||||
|
||||
user_id = self.data.user.create_user()
|
||||
user_id = await self.data.user.create_user()
|
||||
|
||||
if user_id is None:
|
||||
self.logger.error("Failed to register user!")
|
||||
user_id = -1
|
||||
|
||||
else:
|
||||
card_id = self.data.card.create_card(user_id, access_code)
|
||||
card_id = await self.data.card.create_card(user_id, access_code)
|
||||
|
||||
if card_id is None:
|
||||
self.logger.error("Failed to register card!")
|
||||
@ -150,7 +150,7 @@ class IDACReader(BaseReader):
|
||||
|
||||
# lol use the profile connection for items, dirty hack
|
||||
conflict = sql.on_duplicate_key_update(**data)
|
||||
result = self.data.profile.execute(conflict)
|
||||
result = await self.data.profile.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to insert data into table {name}")
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user