2023-02-16 06:06:42 +01:00
|
|
|
import logging, coloredlogs
|
|
|
|
from Crypto.Cipher import AES
|
2024-01-09 09:07:04 +01:00
|
|
|
from typing import Dict, Tuple, Callable, Union, Optional
|
|
|
|
import asyncio
|
2023-02-16 06:06:42 +01:00
|
|
|
from logging.handlers import TimedRotatingFileHandler
|
|
|
|
|
|
|
|
from core.config import CoreConfig
|
2023-11-30 00:01:19 +01:00
|
|
|
from core.utils import create_sega_auth_key
|
2023-02-16 06:06:42 +01:00
|
|
|
from core.data import Data
|
2023-08-14 05:32:03 +02:00
|
|
|
from .adb_handlers import *
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
class AimedbServlette():
|
2023-08-14 05:32:03 +02:00
|
|
|
request_list: Dict[int, Tuple[Callable[[bytes, int], Union[ADBBaseResponse, bytes]], int, str]] = {}
|
2024-01-09 09:07:04 +01:00
|
|
|
def __init__(self, core_cfg: CoreConfig) -> None:
|
|
|
|
self.config = core_cfg
|
|
|
|
self.data = Data(core_cfg)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
|
|
|
self.logger = logging.getLogger("aimedb")
|
2024-01-09 09:07:04 +01:00
|
|
|
if not hasattr(self.logger, "initted"):
|
|
|
|
log_fmt_str = "[%(asctime)s] Aimedb | %(levelname)s | %(message)s"
|
|
|
|
log_fmt = logging.Formatter(log_fmt_str)
|
|
|
|
|
|
|
|
fileHandler = TimedRotatingFileHandler(
|
|
|
|
"{0}/{1}.log".format(self.config.server.log_dir, "aimedb"),
|
|
|
|
when="d",
|
|
|
|
backupCount=10,
|
|
|
|
)
|
|
|
|
fileHandler.setFormatter(log_fmt)
|
|
|
|
|
|
|
|
consoleHandler = logging.StreamHandler()
|
|
|
|
consoleHandler.setFormatter(log_fmt)
|
|
|
|
|
|
|
|
self.logger.addHandler(fileHandler)
|
|
|
|
self.logger.addHandler(consoleHandler)
|
|
|
|
|
|
|
|
self.logger.setLevel(self.config.aimedb.loglevel)
|
|
|
|
coloredlogs.install(
|
|
|
|
level=core_cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str
|
|
|
|
)
|
|
|
|
self.logger.initted = True
|
|
|
|
|
|
|
|
if not core_cfg.aimedb.key:
|
2023-02-16 06:06:42 +01:00
|
|
|
self.logger.error("!!!KEY NOT SET!!!")
|
|
|
|
exit(1)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
self.register_handler(0x01, 0x03, self.handle_felica_lookup, 'felica_lookup')
|
|
|
|
self.register_handler(0x02, 0x03, self.handle_felica_register, 'felica_register')
|
|
|
|
|
|
|
|
self.register_handler(0x04, 0x06, self.handle_lookup, 'lookup')
|
|
|
|
self.register_handler(0x05, 0x06, self.handle_register, 'register')
|
|
|
|
|
|
|
|
self.register_handler(0x07, 0x08, self.handle_status_log, 'status_log')
|
|
|
|
self.register_handler(0x09, 0x0A, self.handle_log, 'aime_log')
|
|
|
|
|
|
|
|
self.register_handler(0x0B, 0x0C, self.handle_campaign, 'campaign')
|
|
|
|
self.register_handler(0x0D, 0x0E, self.handle_campaign_clear, 'campaign_clear')
|
|
|
|
|
|
|
|
self.register_handler(0x0F, 0x10, self.handle_lookup_ex, 'lookup_ex')
|
|
|
|
self.register_handler(0x11, 0x12, self.handle_felica_lookup_ex, 'felica_lookup_ex')
|
|
|
|
|
|
|
|
self.register_handler(0x13, 0x14, self.handle_log_ex, 'aime_log_ex')
|
|
|
|
self.register_handler(0x64, 0x65, self.handle_hello, 'hello')
|
2024-01-09 09:07:04 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
def register_handler(self, cmd: int, resp:int, handler: Callable[[bytes, int], Union[ADBBaseResponse, bytes]], name: str) -> None:
|
|
|
|
self.request_list[cmd] = (handler, resp, name)
|
2024-01-09 09:07:04 +01:00
|
|
|
|
|
|
|
def start(self) -> None:
|
|
|
|
self.logger.info(f"Start on port {self.config.aimedb.port}")
|
2024-01-14 22:48:41 +01:00
|
|
|
addr = self.config.aimedb.listen_address if self.config.aimedb.listen_address else self.config.server.listen_address
|
|
|
|
asyncio.create_task(asyncio.start_server(self.dataReceived, addr, self.config.aimedb.port))
|
2024-01-09 09:07:04 +01:00
|
|
|
|
|
|
|
async def dataReceived(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
|
|
|
|
self.logger.debug(f"Connection made from {writer.get_extra_info('peername')[0]}")
|
|
|
|
while True:
|
|
|
|
try:
|
2024-01-09 09:16:31 +01:00
|
|
|
data: bytes = await reader.read(4096)
|
2024-01-09 09:07:04 +01:00
|
|
|
if len(data) == 0:
|
|
|
|
self.logger.debug("Connection closed")
|
|
|
|
return
|
|
|
|
await self.process_data(data, reader, writer)
|
|
|
|
await writer.drain()
|
|
|
|
except ConnectionResetError as e:
|
2024-01-09 09:17:23 +01:00
|
|
|
self.logger.debug("Connection reset, disconnecting")
|
2024-01-09 09:07:04 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
async def process_data(self, data: bytes, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> Optional[bytes]:
|
|
|
|
addr = writer.get_extra_info('peername')[0]
|
2023-02-16 06:06:42 +01:00
|
|
|
cipher = AES.new(self.config.aimedb.key.encode(), AES.MODE_ECB)
|
|
|
|
|
|
|
|
try:
|
|
|
|
decrypted = cipher.decrypt(data)
|
2023-08-14 05:32:03 +02:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.error(f"Failed to decrypt {data.hex()} because {e}")
|
2024-01-09 09:07:04 +01:00
|
|
|
return
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
self.logger.debug(f"{addr} wrote {decrypted.hex()}")
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
try:
|
|
|
|
head = ADBHeader.from_data(decrypted)
|
|
|
|
|
|
|
|
except ADBHeaderException as e:
|
|
|
|
self.logger.error(f"Error parsing ADB header: {e}")
|
|
|
|
try:
|
|
|
|
encrypted = cipher.encrypt(ADBBaseResponse().make())
|
2024-01-09 09:07:04 +01:00
|
|
|
writer.write(encrypted)
|
|
|
|
await writer.drain()
|
|
|
|
return
|
2023-08-14 05:32:03 +02:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.error(f"Failed to encrypt default response because {e}")
|
|
|
|
|
|
|
|
return
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-28 01:18:22 +02:00
|
|
|
if head.keychip_id == "ABCD1234567" or head.store_id == 0xfff0:
|
|
|
|
self.logger.warning(f"Request from uninitialized AMLib: {vars(head)}")
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
if head.cmd == 0x66:
|
|
|
|
self.logger.info("Goodbye")
|
|
|
|
writer.close()
|
|
|
|
return
|
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
handler, resp_code, name = self.request_list.get(head.cmd, (self.handle_default, None, 'default'))
|
|
|
|
|
|
|
|
if resp_code is None:
|
|
|
|
self.logger.warning(f"No handler for cmd {hex(head.cmd)}")
|
|
|
|
|
|
|
|
elif resp_code > 0:
|
2024-01-09 09:07:04 +01:00
|
|
|
self.logger.info(f"{name} from {head.keychip_id} ({head.game_id}) @ {addr}")
|
2023-08-14 05:32:03 +02:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
resp = await handler(decrypted, resp_code)
|
2023-08-14 05:32:03 +02:00
|
|
|
|
|
|
|
if type(resp) == ADBBaseResponse or issubclass(type(resp), ADBBaseResponse):
|
|
|
|
resp_bytes = resp.make()
|
|
|
|
|
|
|
|
elif type(resp) == bytes:
|
|
|
|
resp_bytes = resp
|
|
|
|
|
|
|
|
elif resp is None: # Nothing to send, probably a goodbye
|
2024-01-09 09:07:04 +01:00
|
|
|
self.logger.warn(f"None return by handler for {name}")
|
2023-02-16 06:06:42 +01:00
|
|
|
return
|
2023-08-14 05:32:03 +02:00
|
|
|
|
|
|
|
else:
|
2024-01-09 09:07:04 +01:00
|
|
|
self.logger.error(f"Unsupported type returned by ADB handler for {name}: {type(resp)}")
|
2023-08-14 05:32:03 +02:00
|
|
|
raise TypeError(f"Unsupported type returned by ADB handler for {name}: {type(resp)}")
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
try:
|
2023-08-14 05:32:03 +02:00
|
|
|
encrypted = cipher.encrypt(resp_bytes)
|
|
|
|
self.logger.debug(f"Response {resp_bytes.hex()}")
|
2024-01-09 09:07:04 +01:00
|
|
|
writer.write(encrypted)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
except Exception as e:
|
|
|
|
self.logger.error(f"Failed to encrypt {resp_bytes.hex()} because {e}")
|
2024-01-09 09:07:04 +01:00
|
|
|
|
|
|
|
async def handle_default(self, data: bytes, resp_code: int, length: int = 0x20) -> ADBBaseResponse:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBHeader.from_data(data)
|
2023-08-19 07:35:37 +02:00
|
|
|
return ADBBaseResponse(resp_code, length, 1, req.game_id, req.store_id, req.keychip_id, req.protocol_ver)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_hello(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
|
|
|
return await self.handle_default(data, resp_code)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_campaign(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
2023-08-14 05:32:03 +02:00
|
|
|
h = ADBHeader.from_data(data)
|
|
|
|
if h.protocol_ver >= 0x3030:
|
|
|
|
req = h
|
2023-08-19 07:35:37 +02:00
|
|
|
resp = ADBCampaignResponse.from_req(req)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
else:
|
|
|
|
req = ADBOldCampaignRequest(data)
|
|
|
|
|
|
|
|
self.logger.info(f"Legacy campaign request for campaign {req.campaign_id} (protocol version {hex(h.protocol_ver)})")
|
2023-08-19 07:35:37 +02:00
|
|
|
resp = ADBOldCampaignResponse.from_req(req.head)
|
2023-08-14 05:32:03 +02:00
|
|
|
|
|
|
|
# We don't currently support campaigns
|
|
|
|
return resp
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBLookupRequest(data)
|
2024-01-09 20:42:17 +01:00
|
|
|
user_id = await self.data.card.get_user_id_from_card(req.access_code)
|
|
|
|
is_banned = await self.data.card.get_card_banned(req.access_code)
|
|
|
|
is_locked = await self.data.card.get_card_locked(req.access_code)
|
2024-01-09 09:07:04 +01:00
|
|
|
|
|
|
|
ret = ADBLookupResponse.from_req(req.head, user_id)
|
2023-10-20 05:25:12 +02:00
|
|
|
if is_banned and is_locked:
|
|
|
|
ret.head.status = ADBStatus.BAN_SYS_USER
|
|
|
|
elif is_banned:
|
|
|
|
ret.head.status = ADBStatus.BAN_SYS
|
|
|
|
elif is_locked:
|
|
|
|
ret.head.status = ADBStatus.LOCK_USER
|
2023-08-14 05:32:03 +02:00
|
|
|
|
|
|
|
self.logger.info(
|
|
|
|
f"access_code {req.access_code} -> user_id {ret.user_id}"
|
2023-03-09 17:38:58 +01:00
|
|
|
)
|
2024-02-12 21:53:28 +01:00
|
|
|
|
|
|
|
await self.data.card.update_card_last_login(req.access_code)
|
2023-08-14 05:32:03 +02:00
|
|
|
return ret
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBLookupRequest(data)
|
2024-01-09 20:42:17 +01:00
|
|
|
user_id = await self.data.card.get_user_id_from_card(req.access_code)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 20:42:17 +01:00
|
|
|
is_banned = await self.data.card.get_card_banned(req.access_code)
|
|
|
|
is_locked = await self.data.card.get_card_locked(req.access_code)
|
2023-10-20 05:25:12 +02:00
|
|
|
|
2023-08-19 07:35:37 +02:00
|
|
|
ret = ADBLookupExResponse.from_req(req.head, user_id)
|
2023-10-20 05:25:12 +02:00
|
|
|
if is_banned and is_locked:
|
|
|
|
ret.head.status = ADBStatus.BAN_SYS_USER
|
|
|
|
elif is_banned:
|
|
|
|
ret.head.status = ADBStatus.BAN_SYS
|
|
|
|
elif is_locked:
|
|
|
|
ret.head.status = ADBStatus.LOCK_USER
|
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
self.logger.info(
|
2023-08-14 05:32:03 +02:00
|
|
|
f"access_code {req.access_code} -> user_id {ret.user_id}"
|
2023-03-09 17:38:58 +01:00
|
|
|
)
|
2023-11-30 00:01:19 +01:00
|
|
|
|
2023-12-01 00:10:22 +01:00
|
|
|
if user_id and user_id > 0 and self.config.aimedb.id_secret:
|
2023-12-01 00:22:01 +01:00
|
|
|
auth_key = create_sega_auth_key(user_id, req.head.game_id, req.head.store_id, req.head.keychip_id, self.config.aimedb.id_secret, self.config.aimedb.id_lifetime_seconds)
|
2023-11-30 00:01:19 +01:00
|
|
|
if auth_key is not None:
|
|
|
|
auth_key_extra_len = 256 - len(auth_key)
|
2023-11-30 08:07:24 +01:00
|
|
|
auth_key_full = auth_key.encode() + (b"\0" * auth_key_extra_len)
|
2023-11-30 00:01:19 +01:00
|
|
|
self.logger.debug(f"Generated auth token {auth_key}")
|
2023-11-30 08:07:24 +01:00
|
|
|
ret.auth_key = auth_key_full
|
2023-11-30 00:01:19 +01:00
|
|
|
|
2024-02-12 21:53:28 +01:00
|
|
|
await self.data.card.update_card_last_login(req.access_code)
|
2023-08-14 05:32:03 +02:00
|
|
|
return ret
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_felica_lookup(self, data: bytes, resp_code: int) -> bytes:
|
2023-08-14 05:32:03 +02:00
|
|
|
"""
|
|
|
|
On official, I think a card has to be registered for this to actually work, but
|
|
|
|
I'm making the executive decision to not implement that and just kick back our
|
|
|
|
faux generated access code. The real felica IDm -> access code conversion is done
|
|
|
|
on the ADB server, which we do not and will not ever have access to. Because we can
|
|
|
|
assure that all IDms will be unique, this basic 0-padded hex -> int conversion will
|
|
|
|
be fine.
|
|
|
|
"""
|
|
|
|
req = ADBFelicaLookupRequest(data)
|
2024-01-10 01:10:39 +01:00
|
|
|
ac = self.data.card.to_access_code(req.idm)
|
2023-08-14 05:32:03 +02:00
|
|
|
self.logger.info(
|
|
|
|
f"idm {req.idm} ipm {req.pmm} -> access_code {ac}"
|
2023-03-09 17:38:58 +01:00
|
|
|
)
|
2023-08-19 07:35:37 +02:00
|
|
|
return ADBFelicaLookupResponse.from_req(req.head, ac)
|
2023-08-14 05:32:03 +02:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_felica_register(self, data: bytes, resp_code: int) -> bytes:
|
2023-08-14 05:32:03 +02:00
|
|
|
"""
|
|
|
|
I've never seen this used.
|
|
|
|
"""
|
|
|
|
req = ADBFelicaLookupRequest(data)
|
2024-01-10 01:10:39 +01:00
|
|
|
ac = self.data.card.to_access_code(req.idm)
|
2023-08-14 05:32:03 +02:00
|
|
|
|
|
|
|
if self.config.server.allow_user_registration:
|
2024-01-09 20:42:17 +01:00
|
|
|
user_id = await self.data.user.create_user()
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
if user_id is None:
|
|
|
|
self.logger.error("Failed to register user!")
|
|
|
|
user_id = -1
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
else:
|
2024-01-09 20:42:17 +01:00
|
|
|
card_id = await self.data.card.create_card(user_id, ac)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
if card_id is None:
|
|
|
|
self.logger.error("Failed to register card!")
|
|
|
|
user_id = -1
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
self.logger.info(
|
|
|
|
f"Register access code {ac} (IDm: {req.idm} PMm: {req.pmm}) -> user_id {user_id}"
|
|
|
|
)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
else:
|
|
|
|
self.logger.info(
|
|
|
|
f"Registration blocked!: access code {ac} (IDm: {req.idm} PMm: {req.pmm})"
|
|
|
|
)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-19 07:35:37 +02:00
|
|
|
return ADBFelicaLookupResponse.from_req(req.head, ac)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_felica_lookup_ex(self, data: bytes, resp_code: int) -> bytes:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBFelicaLookup2Request(data)
|
2024-01-10 01:10:39 +01:00
|
|
|
access_code = self.data.card.to_access_code(req.idm)
|
2024-01-09 20:42:17 +01:00
|
|
|
user_id = await self.data.card.get_user_id_from_card(access_code=access_code)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
if user_id is None:
|
|
|
|
user_id = -1
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
self.logger.info(
|
2023-08-14 05:32:03 +02:00
|
|
|
f"idm {req.idm} ipm {req.pmm} -> access_code {access_code} user_id {user_id}"
|
2023-03-09 17:38:58 +01:00
|
|
|
)
|
|
|
|
|
2023-11-30 00:01:19 +01:00
|
|
|
resp = ADBFelicaLookup2Response.from_req(req.head, user_id, access_code)
|
|
|
|
|
2023-12-01 00:10:22 +01:00
|
|
|
if user_id and user_id > 0 and self.config.aimedb.id_secret:
|
2023-12-01 00:22:01 +01:00
|
|
|
auth_key = create_sega_auth_key(user_id, req.head.game_id, req.head.store_id, req.head.keychip_id, self.config.aimedb.id_secret, self.config.aimedb.id_lifetime_seconds)
|
2023-11-30 00:01:19 +01:00
|
|
|
if auth_key is not None:
|
|
|
|
auth_key_extra_len = 256 - len(auth_key)
|
2023-11-30 08:07:24 +01:00
|
|
|
auth_key_full = auth_key.encode() + (b"\0" * auth_key_extra_len)
|
2023-11-30 00:01:19 +01:00
|
|
|
self.logger.debug(f"Generated auth token {auth_key}")
|
2023-11-30 08:07:24 +01:00
|
|
|
resp.auth_key = auth_key_full
|
2024-02-12 21:53:28 +01:00
|
|
|
|
|
|
|
|
|
|
|
await self.data.card.update_card_last_login(access_code)
|
2023-11-30 00:01:19 +01:00
|
|
|
return resp
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_campaign_clear(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBCampaignClearRequest(data)
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-08-19 07:35:37 +02:00
|
|
|
resp = ADBCampaignClearResponse.from_req(req.head)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
# We don't support campaign stuff
|
|
|
|
return resp
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_register(self, data: bytes, resp_code: int) -> bytes:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBLookupRequest(data)
|
|
|
|
user_id = -1
|
2023-10-20 05:25:12 +02:00
|
|
|
|
2023-02-19 06:07:14 +01:00
|
|
|
if self.config.server.allow_user_registration:
|
2024-01-09 20:42:17 +01:00
|
|
|
user_id = await self.data.user.create_user()
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
if user_id is None:
|
2023-02-16 06:06:42 +01:00
|
|
|
self.logger.error("Failed to register user!")
|
2023-08-14 05:32:03 +02:00
|
|
|
user_id = -1
|
2023-02-16 06:06:42 +01:00
|
|
|
|
|
|
|
else:
|
2024-01-09 20:42:17 +01:00
|
|
|
card_id = await self.data.card.create_card(user_id, req.access_code)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
if card_id is None:
|
2023-02-16 06:06:42 +01:00
|
|
|
self.logger.error("Failed to register card!")
|
2023-08-14 05:32:03 +02:00
|
|
|
user_id = -1
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
self.logger.info(
|
2023-08-14 05:32:03 +02:00
|
|
|
f"Register access code {req.access_code} -> user_id {user_id}"
|
2023-03-09 17:38:58 +01:00
|
|
|
)
|
|
|
|
|
2023-02-16 06:06:42 +01:00
|
|
|
else:
|
2023-03-09 17:38:58 +01:00
|
|
|
self.logger.info(
|
2023-08-14 05:32:03 +02:00
|
|
|
f"Registration blocked!: access code {req.access_code}"
|
2023-03-09 17:38:58 +01:00
|
|
|
)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-19 07:35:37 +02:00
|
|
|
resp = ADBLookupResponse.from_req(req.head, user_id)
|
2023-08-14 05:32:03 +02:00
|
|
|
if resp.user_id <= 0:
|
|
|
|
resp.head.status = ADBStatus.BAN_SYS # Closest we can get to a "You cannot register"
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
return resp
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2023-08-14 05:32:03 +02:00
|
|
|
# TODO: Save these in some capacity, as deemed relevant
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_status_log(self, data: bytes, resp_code: int) -> bytes:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBStatusLogRequest(data)
|
|
|
|
self.logger.info(f"User {req.aime_id} logged {req.status.name} event")
|
2023-08-19 07:35:37 +02:00
|
|
|
return ADBBaseResponse(resp_code, 0x20, 1, req.head.game_id, req.head.store_id, req.head.keychip_id, req.head.protocol_ver)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_log(self, data: bytes, resp_code: int) -> bytes:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBLogRequest(data)
|
|
|
|
self.logger.info(f"User {req.aime_id} logged {req.status.name} event, credit_ct: {req.credit_ct} bet_ct: {req.bet_ct} won_ct: {req.won_ct}")
|
2023-08-19 07:35:37 +02:00
|
|
|
return ADBBaseResponse(resp_code, 0x20, 1, req.head.game_id, req.head.store_id, req.head.keychip_id, req.head.protocol_ver)
|
2023-02-16 06:06:42 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_log_ex(self, data: bytes, resp_code: int) -> bytes:
|
2023-08-14 05:32:03 +02:00
|
|
|
req = ADBLogExRequest(data)
|
2023-08-21 01:55:26 +02:00
|
|
|
strs = []
|
|
|
|
self.logger.info(f"Recieved {req.num_logs} or {len(req.logs)} logs")
|
|
|
|
|
|
|
|
for x in range(req.num_logs):
|
|
|
|
self.logger.debug(f"User {req.logs[x].aime_id} logged {req.logs[x].status.name} event, credit_ct: {req.logs[x].credit_ct} bet_ct: {req.logs[x].bet_ct} won_ct: {req.logs[x].won_ct}")
|
2023-08-21 01:56:16 +02:00
|
|
|
return ADBLogExResponse.from_req(req.head)
|
2023-02-16 06:06:42 +01:00
|
|
|
|