2023-02-17 07:02:21 +01:00
|
|
|
from twisted.web.http import Request
|
|
|
|
import json
|
|
|
|
import inflection
|
|
|
|
import yaml
|
|
|
|
import string
|
|
|
|
import logging, coloredlogs
|
|
|
|
import zlib
|
|
|
|
from logging.handlers import TimedRotatingFileHandler
|
2023-03-05 03:58:51 +01:00
|
|
|
from os import path
|
|
|
|
from typing import Tuple
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
from core.config import CoreConfig
|
2023-04-10 18:58:19 +02:00
|
|
|
from core.utils import Utils
|
2023-02-17 07:02:21 +01:00
|
|
|
from titles.mai2.config import Mai2Config
|
|
|
|
from titles.mai2.const import Mai2Constants
|
|
|
|
from titles.mai2.base import Mai2Base
|
|
|
|
from titles.mai2.plus import Mai2Plus
|
|
|
|
from titles.mai2.splash import Mai2Splash
|
|
|
|
from titles.mai2.splashplus import Mai2SplashPlus
|
|
|
|
from titles.mai2.universe import Mai2Universe
|
|
|
|
from titles.mai2.universeplus import Mai2UniversePlus
|
2023-04-10 18:58:19 +02:00
|
|
|
from titles.mai2.festival import Mai2Festival
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
class Mai2Servlet:
|
2023-02-17 07:02:21 +01:00
|
|
|
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
|
|
|
self.core_cfg = core_cfg
|
|
|
|
self.game_cfg = Mai2Config()
|
2023-03-05 03:58:51 +01:00
|
|
|
if path.exists(f"{cfg_dir}/{Mai2Constants.CONFIG_NAME}"):
|
2023-03-09 17:38:58 +01:00
|
|
|
self.game_cfg.update(
|
|
|
|
yaml.safe_load(open(f"{cfg_dir}/{Mai2Constants.CONFIG_NAME}"))
|
|
|
|
)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
self.versions = [
|
2023-04-10 18:58:19 +02:00
|
|
|
Mai2Base,
|
|
|
|
Mai2Plus,
|
|
|
|
Mai2Splash,
|
|
|
|
Mai2SplashPlus,
|
|
|
|
Mai2Universe,
|
|
|
|
Mai2UniversePlus,
|
2023-04-24 03:04:52 +02:00
|
|
|
Mai2Festival,
|
2023-02-17 07:02:21 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
self.logger = logging.getLogger("mai2")
|
|
|
|
log_fmt_str = "[%(asctime)s] Mai2 | %(levelname)s | %(message)s"
|
|
|
|
log_fmt = logging.Formatter(log_fmt_str)
|
2023-03-09 17:38:58 +01:00
|
|
|
fileHandler = TimedRotatingFileHandler(
|
|
|
|
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "mai2"),
|
|
|
|
encoding="utf8",
|
|
|
|
when="d",
|
|
|
|
backupCount=10,
|
|
|
|
)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
fileHandler.setFormatter(log_fmt)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
consoleHandler = logging.StreamHandler()
|
|
|
|
consoleHandler.setFormatter(log_fmt)
|
|
|
|
|
|
|
|
self.logger.addHandler(fileHandler)
|
|
|
|
self.logger.addHandler(consoleHandler)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
self.logger.setLevel(self.game_cfg.server.loglevel)
|
2023-03-09 17:38:58 +01:00
|
|
|
coloredlogs.install(
|
|
|
|
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
|
|
|
|
)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-05 03:58:51 +01:00
|
|
|
@classmethod
|
2023-03-09 17:38:58 +01:00
|
|
|
def get_allnet_info(
|
|
|
|
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
|
|
|
) -> Tuple[bool, str, str]:
|
2023-03-05 03:58:51 +01:00
|
|
|
game_cfg = Mai2Config()
|
|
|
|
|
|
|
|
if path.exists(f"{cfg_dir}/{Mai2Constants.CONFIG_NAME}"):
|
2023-03-09 17:38:58 +01:00
|
|
|
game_cfg.update(
|
|
|
|
yaml.safe_load(open(f"{cfg_dir}/{Mai2Constants.CONFIG_NAME}"))
|
|
|
|
)
|
2023-03-05 03:58:51 +01:00
|
|
|
|
|
|
|
if not game_cfg.server.enable:
|
|
|
|
return (False, "", "")
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-03-05 03:58:51 +01:00
|
|
|
if core_cfg.server.is_develop:
|
2023-03-09 17:38:58 +01:00
|
|
|
return (
|
|
|
|
True,
|
|
|
|
f"http://{core_cfg.title.hostname}:{core_cfg.title.port}/{game_code}/$v/",
|
|
|
|
f"{core_cfg.title.hostname}:{core_cfg.title.port}/",
|
|
|
|
)
|
|
|
|
|
|
|
|
return (
|
|
|
|
True,
|
|
|
|
f"http://{core_cfg.title.hostname}/{game_code}/$v/",
|
|
|
|
f"{core_cfg.title.hostname}/",
|
|
|
|
)
|
2023-03-05 03:58:51 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
def render_POST(self, request: Request, version: int, url_path: str) -> bytes:
|
2023-03-15 21:03:22 +01:00
|
|
|
if url_path.lower() == "ping":
|
2023-03-09 17:29:36 +01:00
|
|
|
return zlib.compress(b'{"returnCode": "1"}')
|
2023-03-09 16:52:49 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
req_raw = request.content.getvalue()
|
|
|
|
url = request.uri.decode()
|
|
|
|
url_split = url_path.split("/")
|
|
|
|
internal_ver = 0
|
|
|
|
endpoint = url_split[len(url_split) - 1]
|
2023-04-10 18:58:19 +02:00
|
|
|
client_ip = Utils.get_ip_addr(request)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
if version < 105: # 1.0
|
2023-02-17 07:02:21 +01:00
|
|
|
internal_ver = Mai2Constants.VER_MAIMAI_DX
|
2023-03-09 17:38:58 +01:00
|
|
|
elif version >= 105 and version < 110: # Plus
|
2023-02-17 07:02:21 +01:00
|
|
|
internal_ver = Mai2Constants.VER_MAIMAI_DX_PLUS
|
2023-03-09 17:38:58 +01:00
|
|
|
elif version >= 110 and version < 115: # Splash
|
2023-02-17 07:02:21 +01:00
|
|
|
internal_ver = Mai2Constants.VER_MAIMAI_DX_SPLASH
|
2023-03-09 17:38:58 +01:00
|
|
|
elif version >= 115 and version < 120: # Splash Plus
|
2023-02-17 07:02:21 +01:00
|
|
|
internal_ver = Mai2Constants.VER_MAIMAI_DX_SPLASH_PLUS
|
2023-03-09 17:38:58 +01:00
|
|
|
elif version >= 120 and version < 125: # Universe
|
2023-02-17 07:02:21 +01:00
|
|
|
internal_ver = Mai2Constants.VER_MAIMAI_DX_UNIVERSE
|
2023-04-10 18:58:19 +02:00
|
|
|
elif version >= 125 and version < 130: # Universe Plus
|
2023-02-17 07:02:21 +01:00
|
|
|
internal_ver = Mai2Constants.VER_MAIMAI_DX_UNIVERSE_PLUS
|
2023-04-10 18:58:19 +02:00
|
|
|
elif version >= 130: # Festival
|
|
|
|
internal_ver = Mai2Constants.VER_MAIMAI_DX_FESTIVAL
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
|
2023-03-09 17:38:58 +01:00
|
|
|
# If we get a 32 character long hex string, it's a hash and we're
|
|
|
|
# doing encrypted. The likelyhood of false positives is low but
|
2023-02-17 07:02:21 +01:00
|
|
|
# technically not 0
|
|
|
|
self.logger.error("Encryption not supported at this time")
|
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
try:
|
2023-02-17 07:02:21 +01:00
|
|
|
unzip = zlib.decompress(req_raw)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
except zlib.error as e:
|
2023-03-09 17:38:58 +01:00
|
|
|
self.logger.error(
|
|
|
|
f"Failed to decompress v{version} {endpoint} request -> {e}"
|
|
|
|
)
|
2023-03-09 17:29:36 +01:00
|
|
|
return zlib.compress(b'{"stat": "0"}')
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
req_data = json.loads(unzip)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-04-24 03:04:52 +02:00
|
|
|
self.logger.info(f"v{version} {endpoint} request from {client_ip}")
|
2023-04-10 18:58:19 +02:00
|
|
|
self.logger.debug(req_data)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
func_to_find = "handle_" + inflection.underscore(endpoint) + "_request"
|
2023-04-10 18:58:19 +02:00
|
|
|
handler_cls = self.versions[internal_ver](self.core_cfg, self.game_cfg)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-04-10 18:58:19 +02:00
|
|
|
if not hasattr(handler_cls, func_to_find):
|
2023-03-09 17:29:36 +01:00
|
|
|
self.logger.warning(f"Unhandled v{version} request {endpoint}")
|
2023-04-10 18:58:19 +02:00
|
|
|
resp = {"returnCode": 1}
|
2023-03-09 17:29:36 +01:00
|
|
|
|
2023-04-10 18:58:19 +02:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
handler = getattr(handler_cls, func_to_find)
|
|
|
|
resp = handler(req_data)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-04-10 18:58:19 +02:00
|
|
|
except Exception as e:
|
|
|
|
self.logger.error(f"Error handling v{version} method {endpoint} - {e}")
|
|
|
|
return zlib.compress(b'{"stat": "0"}')
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
if resp == None:
|
2023-03-09 17:38:58 +01:00
|
|
|
resp = {"returnCode": 1}
|
|
|
|
|
2023-04-10 18:58:19 +02:00
|
|
|
self.logger.debug(f"Response {resp}")
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
return zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|