2024-01-09 09:07:04 +01:00
|
|
|
from starlette.requests import Request
|
|
|
|
from starlette.responses import PlainTextResponse
|
|
|
|
from starlette.routing import Route
|
2023-02-17 07:02:21 +01:00
|
|
|
import yaml
|
|
|
|
import logging, coloredlogs
|
|
|
|
from logging.handlers import TimedRotatingFileHandler
|
|
|
|
import zlib
|
|
|
|
import json
|
|
|
|
import urllib.parse
|
|
|
|
import base64
|
2023-03-05 03:58:51 +01:00
|
|
|
from os import path
|
2023-11-09 03:17:48 +01:00
|
|
|
from typing import Tuple, Dict, List
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
from core.config import CoreConfig
|
2023-11-09 03:17:48 +01:00
|
|
|
from core.title import BaseServlet
|
|
|
|
from core.utils import Utils
|
|
|
|
from .config import DivaConfig
|
|
|
|
from .const import DivaConstants
|
|
|
|
from .base import DivaBase
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-11-09 03:17:48 +01:00
|
|
|
class DivaServlet(BaseServlet):
|
2023-02-17 07:02:21 +01:00
|
|
|
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
2023-11-09 03:17:48 +01:00
|
|
|
super().__init__(core_cfg, cfg_dir)
|
2023-02-17 07:02:21 +01:00
|
|
|
self.game_cfg = DivaConfig()
|
2023-03-05 03:58:51 +01:00
|
|
|
if path.exists(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"):
|
2023-03-09 17:38:58 +01:00
|
|
|
self.game_cfg.update(
|
|
|
|
yaml.safe_load(open(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"))
|
|
|
|
)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
self.base = DivaBase(core_cfg, self.game_cfg)
|
|
|
|
|
|
|
|
self.logger = logging.getLogger("diva")
|
|
|
|
log_fmt_str = "[%(asctime)s] Diva | %(levelname)s | %(message)s"
|
|
|
|
log_fmt = logging.Formatter(log_fmt_str)
|
2023-03-09 17:38:58 +01:00
|
|
|
fileHandler = TimedRotatingFileHandler(
|
|
|
|
"{0}/{1}.log".format(self.core_cfg.server.log_dir, "diva"),
|
|
|
|
encoding="utf8",
|
|
|
|
when="d",
|
|
|
|
backupCount=10,
|
|
|
|
)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
fileHandler.setFormatter(log_fmt)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
consoleHandler = logging.StreamHandler()
|
|
|
|
consoleHandler.setFormatter(log_fmt)
|
|
|
|
|
|
|
|
self.logger.addHandler(fileHandler)
|
|
|
|
self.logger.addHandler(consoleHandler)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
self.logger.setLevel(self.game_cfg.server.loglevel)
|
2023-03-09 17:38:58 +01:00
|
|
|
coloredlogs.install(
|
|
|
|
level=self.game_cfg.server.loglevel, logger=self.logger, fmt=log_fmt_str
|
|
|
|
)
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
def get_routes(self) -> List[Route]:
|
|
|
|
return [
|
|
|
|
Route("/DivaServlet/", self.render_POST, methods=['POST'])
|
|
|
|
]
|
2023-11-09 03:17:48 +01:00
|
|
|
|
|
|
|
def get_allnet_info(self, game_code: str, game_ver: int, keychip: str) -> Tuple[str, str]:
|
|
|
|
if not self.core_cfg.server.is_using_proxy and Utils.get_title_port(self.core_cfg) != 80:
|
2024-01-09 09:07:04 +01:00
|
|
|
return (f"http://{self.core_cfg.server.hostname}:{Utils.get_title_port(self.core_cfg)}/DivaServlet/", self.core_cfg.server.hostname)
|
2023-11-09 03:17:48 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
return (f"http://{self.core_cfg.server.hostname}/DivaServlet/", self.core_cfg.server.hostname)
|
2023-11-09 03:17:48 +01:00
|
|
|
|
2023-03-05 03:58:51 +01:00
|
|
|
@classmethod
|
2023-11-09 03:17:48 +01:00
|
|
|
def is_game_enabled(
|
2023-03-09 17:38:58 +01:00
|
|
|
cls, game_code: str, core_cfg: CoreConfig, cfg_dir: str
|
2023-11-09 03:17:48 +01:00
|
|
|
) -> bool:
|
2023-03-05 03:58:51 +01:00
|
|
|
game_cfg = DivaConfig()
|
|
|
|
if path.exists(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"):
|
2023-03-09 17:38:58 +01:00
|
|
|
game_cfg.update(
|
|
|
|
yaml.safe_load(open(f"{cfg_dir}/{DivaConstants.CONFIG_NAME}"))
|
|
|
|
)
|
2023-03-05 03:58:51 +01:00
|
|
|
|
|
|
|
if not game_cfg.server.enable:
|
2023-11-09 03:17:48 +01:00
|
|
|
return False
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-11-09 03:17:48 +01:00
|
|
|
return True
|
2023-03-05 03:58:51 +01:00
|
|
|
|
2024-05-23 15:21:08 +02:00
|
|
|
async def render_POST(self, request: Request) -> bytes:
|
2024-01-09 09:07:04 +01:00
|
|
|
req_raw = await request.body()
|
|
|
|
url_header = request.headers
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
# Ping Dispatch
|
|
|
|
if "THIS_STRING_SEPARATES" in str(url_header):
|
2023-02-17 07:02:21 +01:00
|
|
|
binary_request = req_raw.splitlines()
|
|
|
|
binary_cmd_decoded = binary_request[3].decode("utf-8")
|
2023-03-09 17:38:58 +01:00
|
|
|
binary_array = binary_cmd_decoded.split("&")
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
bin_req_data = {}
|
|
|
|
|
|
|
|
for kvp in binary_array:
|
|
|
|
split_bin = kvp.split("=")
|
|
|
|
bin_req_data[split_bin[0]] = split_bin[1]
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
self.logger.info(f"Binary {bin_req_data['cmd']} Request")
|
|
|
|
self.logger.debug(bin_req_data)
|
|
|
|
|
2024-05-23 15:21:08 +02:00
|
|
|
try:
|
|
|
|
handler = getattr(self.base, f"handle_{bin_req_data['cmd']}_request")
|
|
|
|
resp = handler(bin_req_data)
|
|
|
|
|
|
|
|
except AttributeError as e:
|
|
|
|
self.logger.warning(f"Unhandled {bin_req_data['cmd']} request {e}")
|
|
|
|
return PlainTextResponse(f"cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok")
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.error(f"Error handling method {e}")
|
|
|
|
return PlainTextResponse(f"cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok")
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
self.logger.debug(
|
|
|
|
f"Response cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok{resp}"
|
|
|
|
)
|
2024-01-09 09:07:04 +01:00
|
|
|
return PlainTextResponse(f"cmd={bin_req_data['cmd']}&req_id={bin_req_data['req_id']}&stat=ok{resp}")
|
2023-03-09 17:38:58 +01:00
|
|
|
|
|
|
|
# Main Dispatch
|
|
|
|
json_string = json.dumps(
|
|
|
|
req_raw.decode("utf-8")
|
|
|
|
) # Take the response and decode as UTF-8 and dump
|
|
|
|
b64string = json_string.replace(
|
|
|
|
r"\n", "\n"
|
|
|
|
) # Remove all \n and separate them as new lines
|
|
|
|
gz_string = base64.b64decode(b64string) # Decompressing the base64 string
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
try:
|
2023-03-09 17:38:58 +01:00
|
|
|
url_data = zlib.decompress(gz_string).decode(
|
|
|
|
"utf-8"
|
|
|
|
) # Decompressing the gzip
|
2023-02-17 07:02:21 +01:00
|
|
|
except zlib.error as e:
|
|
|
|
self.logger.error(f"Failed to defalte! {e} -> {gz_string}")
|
2024-01-09 09:07:04 +01:00
|
|
|
return PlainTextResponse("stat=0")
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
req_kvp = urllib.parse.unquote(url_data)
|
|
|
|
req_data = {}
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
# We then need to split each parts with & so we can reuse them to fill out the requests
|
|
|
|
splitted_request = str.split(req_kvp, "&")
|
|
|
|
for kvp in splitted_request:
|
|
|
|
split = kvp.split("=")
|
|
|
|
req_data[split[0]] = split[1]
|
|
|
|
|
|
|
|
self.logger.info(f"{req_data['cmd']} Request")
|
|
|
|
self.logger.debug(req_data)
|
|
|
|
|
|
|
|
func_to_find = f"handle_{req_data['cmd']}_request"
|
|
|
|
|
|
|
|
# Load the requests
|
|
|
|
try:
|
|
|
|
handler = getattr(self.base, func_to_find)
|
2024-01-09 09:07:04 +01:00
|
|
|
resp = await handler(req_data)
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
except AttributeError as e:
|
2023-02-17 07:02:21 +01:00
|
|
|
self.logger.warning(f"Unhandled {req_data['cmd']} request {e}")
|
2024-01-09 09:07:04 +01:00
|
|
|
return PlainTextResponse(f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok")
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.error(f"Error handling method {func_to_find} {e}")
|
2024-01-09 09:07:04 +01:00
|
|
|
return PlainTextResponse(f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok")
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
self.logger.debug(
|
|
|
|
f"Response cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok{resp}"
|
|
|
|
)
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
return PlainTextResponse(f"cmd={req_data['cmd']}&req_id={req_data['req_id']}&stat=ok{resp}")
|