1
0
mirror of synced 2025-02-15 09:52:35 +01:00

Merge pull request 'develop' (#3) from Hay1tsme/artemis:develop into develop

Reviewed-on: https://gitea.tendokyu.moe/SoulGateKey/artemis/pulls/3
This commit is contained in:
SoulGateKey 2024-11-20 19:25:36 +00:00
commit fe9a04ef8e
68 changed files with 4006 additions and 906 deletions

1
.gitignore vendored
View File

@ -145,6 +145,7 @@ dmypy.json
cython_debug/
.vscode/*
.vs/*
# Local History for Visual Studio Code
.history/

View File

@ -1,5 +1,9 @@
import logging, os
from typing import Any
import logging
import os
import ssl
from typing import Any, Union
from typing_extensions import Optional
class ServerConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
@ -176,6 +180,60 @@ class DatabaseConfig:
self.__config, "core", "database", "protocol", default="mysql"
)
@property
def ssl_enabled(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_enabled", default=False
)
@property
def ssl_cafile(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_cafile", default=None
)
@property
def ssl_capath(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_capath", default=None
)
@property
def ssl_cert(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_cert", default=None
)
@property
def ssl_key(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_key", default=None
)
@property
def ssl_key_password(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_key_password", default=None
)
@property
def ssl_verify_identity(self) -> bool:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_verify_identity", default=True
)
@property
def ssl_verify_cert(self) -> Optional[Union[str, bool]]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_verify_cert", default=None
)
@property
def ssl_ciphers(self) -> Optional[str]:
return CoreConfig.get_config_field(
self.__config, "core", "database", "ssl_ciphers", default=None
)
@property
def sha2_password(self) -> bool:
return CoreConfig.get_config_field(
@ -202,6 +260,53 @@ class DatabaseConfig:
self.__config, "core", "database", "memcached_host", default="localhost"
)
def create_ssl_context_if_enabled(self):
if not self.ssl_enabled:
return
no_ca = (
self.ssl_cafile is None
and self.ssl_capath is None
)
ctx = ssl.create_default_context(
cafile=self.ssl_cafile,
capath=self.ssl_capath,
)
ctx.check_hostname = not no_ca and self.ssl_verify_identity
if self.ssl_verify_cert is None:
ctx.verify_mode = ssl.CERT_NONE if no_ca else ssl.CERT_REQUIRED
elif isinstance(self.ssl_verify_cert, bool):
ctx.verify_mode = (
ssl.CERT_REQUIRED
if self.ssl_verify_cert
else ssl.CERT_NONE
)
elif isinstance(self.ssl_verify_cert, str):
value = self.ssl_verify_cert.lower()
if value in ("none", "0", "false", "no"):
ctx.verify_mode = ssl.CERT_NONE
elif value == "optional":
ctx.verify_mode = ssl.CERT_OPTIONAL
elif value in ("required", "1", "true", "yes"):
ctx.verify_mode = ssl.CERT_REQUIRED
else:
ctx.verify_mode = ssl.CERT_NONE if no_ca else ssl.CERT_REQUIRED
if self.ssl_cert:
ctx.load_cert_chain(
self.ssl_cert,
self.ssl_key,
self.ssl_key_password,
)
if self.ssl_ciphers:
ctx.set_ciphers(self.ssl_ciphers)
return ctx
class FrontendConfig:
def __init__(self, parent_config: "CoreConfig") -> None:
self.__config = parent_config

View File

@ -1,8 +1,18 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
import asyncio
import os
from pathlib import Path
import threading
from logging.config import fileConfig
import yaml
from alembic import context
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from core.config import CoreConfig
from core.data.schema.base import metadata
# this is the Alembic Config object, which provides
@ -37,20 +47,29 @@ def run_migrations_offline():
script output.
"""
raise Exception('Not implemented or configured!')
raise Exception("Not implemented or configured!")
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
def do_run_migrations(connection: Connection) -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
In this scenario we need to create an Engine
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
@ -59,21 +78,42 @@ def run_migrations_online():
for override in overrides:
ini_section[override] = overrides[override]
connectable = engine_from_config(
core_config = CoreConfig()
with (Path("../../..") / os.environ["ARTEMIS_CFG_DIR"] / "core.yaml").open(encoding="utf-8") as f:
core_config.update(yaml.safe_load(f))
connectable = async_engine_from_config(
ini_section,
prefix='sqlalchemy.',
poolclass=pool.NullPool)
poolclass=pool.NullPool,
connect_args={
"charset": "utf8mb4",
"ssl": core_config.database.create_ssl_context_if_enabled(),
}
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online():
try:
loop = asyncio.get_running_loop()
except RuntimeError:
# there's no event loop
asyncio.run(run_async_migrations())
else:
# there's currently an event loop and trying to wait for a coroutine
# to finish without using `await` is pretty wormy. nested event loops
# are explicitly forbidden by asyncio.
#
# take the easy way out, spawn it in another thread.
thread = threading.Thread(target=asyncio.run, args=(run_async_migrations(),))
thread.start()
thread.join()
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()

View File

@ -0,0 +1,122 @@
"""chuni_ui_overhaul
Revision ID: 41f77ef50588
Revises: d8cd1fa04c2a
Create Date: 2024-11-02 13:27:45.839787
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '41f77ef50588'
down_revision = 'd8cd1fa04c2a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('chuni_static_avatar', sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True))
op.add_column('chuni_static_avatar', sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True))
op.add_column('chuni_static_avatar', sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True))
op.create_table('chuni_static_character',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('characterId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
sa.Column('worksName', mysql.VARCHAR(length=255), nullable=True),
sa.Column('rareType', mysql.INTEGER(display_width=11), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('imagePath1', mysql.VARCHAR(length=255), nullable=True),
sa.Column('imagePath2', mysql.VARCHAR(length=255), nullable=True),
sa.Column('imagePath3', mysql.VARCHAR(length=255), nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_character_uk', 'chuni_static_character', ['version', 'characterId'], unique=True)
op.create_table('chuni_static_map_icon',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('mapIconId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
sa.Column('iconPath', mysql.VARCHAR(length=255), nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_mapicon_uk', 'chuni_static_map_icon', ['version', 'mapIconId'], unique=True)
op.create_table('chuni_static_nameplate',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('nameplateId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('texturePath', mysql.VARCHAR(length=255), nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_nameplate_uk', 'chuni_static_nameplate', ['version', 'nameplateId'], unique=True)
op.create_table('chuni_static_trophy',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('trophyId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('rareType', mysql.TINYINT(display_width=11), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_trophy_uk', 'chuni_static_trophy', ['version', 'trophyId'], unique=True)
op.create_table('chuni_static_system_voice',
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('voiceId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
sa.Column('imagePath', mysql.VARCHAR(length=255), nullable=True),
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_collate='utf8mb4_general_ci',
mysql_default_charset='utf8mb4',
mysql_engine='InnoDB'
)
op.create_index('chuni_static_systemvoice_uk', 'chuni_static_system_voice', ['version', 'voiceId'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('chuni_static_systemvoice_uk', table_name='chuni_static_system_voice')
op.drop_table('chuni_static_system_voice')
op.drop_index('chuni_static_trophy_uk', table_name='chuni_static_trophy')
op.drop_table('chuni_static_trophy')
op.drop_index('chuni_static_nameplate_uk', table_name='chuni_static_nameplate')
op.drop_table('chuni_static_nameplate')
op.drop_index('chuni_static_mapicon_uk', table_name='chuni_static_map_icon')
op.drop_table('chuni_static_map_icon')
op.drop_index('chuni_static_character_uk', table_name='chuni_static_character')
op.drop_table('chuni_static_character')
op.drop_column('chuni_static_avatar', 'defaultHave')
op.drop_column('chuni_static_avatar', 'isEnabled')
op.drop_column('chuni_static_avatar', 'sortName')
# ### end Alembic commands ###

View File

@ -1,54 +1,70 @@
import logging, coloredlogs
from typing import Optional
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import create_engine
from logging.handlers import TimedRotatingFileHandler
import logging
import os
import secrets, string
import bcrypt
import secrets
import ssl
import string
import warnings
from hashlib import sha256
from logging.handlers import TimedRotatingFileHandler
from typing import Any, ClassVar, Optional
import alembic.config
import glob
import bcrypt
import coloredlogs
import pymysql.err
from sqlalchemy.ext.asyncio import (
AsyncEngine,
AsyncSession,
create_async_engine,
)
from core.config import CoreConfig
from core.data.schema import *
from core.utils import Utils
from core.data.schema import ArcadeData, BaseData, CardData, UserData, metadata
from core.utils import MISSING, Utils
class Data:
engine = None
session = None
user = None
arcade = None
card = None
base = None
engine: ClassVar[AsyncEngine] = MISSING
session: ClassVar[AsyncSession] = MISSING
user: ClassVar[UserData] = MISSING
arcade: ClassVar[ArcadeData] = MISSING
card: ClassVar[CardData] = MISSING
base: ClassVar[BaseData] = MISSING
def __init__(self, cfg: CoreConfig) -> None:
self.config = cfg
if self.config.database.sha2_password:
passwd = sha256(self.config.database.password.encode()).digest()
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
self.__url = f"{self.config.database.protocol}+aiomysql://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}"
else:
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
self.__url = f"{self.config.database.protocol}+aiomysql://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}"
if Data.engine is None:
Data.engine = create_engine(self.__url, pool_recycle=3600)
if Data.engine is MISSING:
Data.engine = create_async_engine(
self.__url,
pool_recycle=3600,
isolation_level="AUTOCOMMIT",
connect_args={
"charset": "utf8mb4",
"ssl": self.config.database.create_ssl_context_if_enabled(),
},
)
self.__engine = Data.engine
if Data.session is None:
s = sessionmaker(bind=Data.engine, autoflush=True, autocommit=True)
Data.session = scoped_session(s)
if Data.session is MISSING:
Data.session = AsyncSession(Data.engine, expire_on_commit=False)
if Data.user is None:
if Data.user is MISSING:
Data.user = UserData(self.config, self.session)
if Data.arcade is None:
if Data.arcade is MISSING:
Data.arcade = ArcadeData(self.config, self.session)
if Data.card is None:
if Data.card is MISSING:
Data.card = CardData(self.config, self.session)
if Data.base is None:
if Data.base is MISSING:
Data.base = BaseData(self.config, self.session)
self.logger = logging.getLogger("database")
@ -94,40 +110,73 @@ class Data:
alembic.config.main(argv=alembicArgs)
os.chdir(old_dir)
def create_database(self):
async def create_database(self):
self.logger.info("Creating databases...")
metadata.create_all(
self.engine,
checkfirst=True,
)
for _, mod in Utils.get_all_titles().items():
if hasattr(mod, "database"):
mod.database(self.config)
metadata.create_all(
self.engine,
checkfirst=True,
)
with warnings.catch_warnings():
# SQLAlchemy will generate a nice primary key constraint name, but in
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
# custom primary key name is generated, a warning is emitted from pymysql,
# which we don't care about. Other warnings may be helpful though, don't
# suppress everything.
warnings.filterwarnings(
action="ignore",
message=r"Name '(.+)' ignored for PRIMARY key\.",
category=pymysql.err.Warning,
)
# Stamp the end revision as if alembic had created it, so it can take off after this.
self.__alembic_cmd(
"stamp",
"head",
)
async with self.engine.begin() as conn:
await conn.run_sync(metadata.create_all, checkfirst=True)
def schema_upgrade(self, ver: str = None):
self.__alembic_cmd(
"upgrade",
"head" if not ver else ver,
)
for _, mod in Utils.get_all_titles().items():
if hasattr(mod, "database"):
mod.database(self.config)
await conn.run_sync(metadata.create_all, checkfirst=True)
# Stamp the end revision as if alembic had created it, so it can take off after this.
self.__alembic_cmd(
"stamp",
"head",
)
def schema_upgrade(self, ver: Optional[str] = None):
with warnings.catch_warnings():
# SQLAlchemy will generate a nice primary key constraint name, but in
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
# custom primary key name is generated, a warning is emitted from pymysql,
# which we don't care about. Other warnings may be helpful though, don't
# suppress everything.
warnings.filterwarnings(
action="ignore",
message=r"Name '(.+)' ignored for PRIMARY key\.",
category=pymysql.err.Warning,
)
self.__alembic_cmd(
"upgrade",
"head" if not ver else ver,
)
def schema_downgrade(self, ver: str):
self.__alembic_cmd(
"downgrade",
ver,
)
with warnings.catch_warnings():
# SQLAlchemy will generate a nice primary key constraint name, but in
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
# custom primary key name is generated, a warning is emitted from pymysql,
# which we don't care about. Other warnings may be helpful though, don't
# suppress everything.
warnings.filterwarnings(
action="ignore",
message=r"Name '(.+)' ignored for PRIMARY key\.",
category=pymysql.err.Warning,
)
async def create_owner(self, email: Optional[str] = None, code: Optional[str] = "00000000000000000000") -> None:
self.__alembic_cmd(
"downgrade",
ver,
)
async def create_owner(self, email: Optional[str] = None, code: str = "00000000000000000000") -> None:
pw = "".join(
secrets.choice(string.ascii_letters + string.digits) for i in range(20)
)
@ -150,12 +199,12 @@ class Data:
async def migrate(self) -> None:
exist = await self.base.execute("SELECT * FROM alembic_version")
if exist is not None:
self.logger.warn("No need to migrate as you have already migrated to alembic. If you are trying to upgrade the schema, use `upgrade` instead!")
self.logger.warning("No need to migrate as you have already migrated to alembic. If you are trying to upgrade the schema, use `upgrade` instead!")
return
self.logger.info("Upgrading to latest with legacy system")
if not await self.legacy_upgrade():
self.logger.warn("No need to migrate as you have already deleted the old schema_versions system. If you are trying to upgrade the schema, use `upgrade` instead!")
self.logger.warning("No need to migrate as you have already deleted the old schema_versions system. If you are trying to upgrade the schema, use `upgrade` instead!")
return
self.logger.info("Done")

View File

@ -1,16 +1,16 @@
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, and_, or_
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
from sqlalchemy.types import Integer, String, Boolean, JSON
from sqlalchemy.sql import func, select
import re
from typing import List, Optional
from sqlalchemy import Column, Table, and_, or_
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
import re
from sqlalchemy.sql import func, select
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
from sqlalchemy.types import JSON, Boolean, Integer, String
from core.data.schema.base import BaseData, metadata
from core.const import *
arcade = Table(
arcade: Table = Table(
"arcade",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -26,7 +26,7 @@ arcade = Table(
mysql_charset="utf8mb4",
)
machine = Table(
machine: Table = Table(
"machine",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -47,7 +47,7 @@ machine = Table(
mysql_charset="utf8mb4",
)
arcade_owner = Table(
arcade_owner: Table = Table(
"arcade_owner",
metadata,
Column(
@ -69,7 +69,7 @@ arcade_owner = Table(
class ArcadeData(BaseData):
async def get_machine(self, serial: str = None, id: int = None) -> Optional[Row]:
async def get_machine(self, serial: Optional[str] = None, id: Optional[int] = None) -> Optional[Row]:
if serial is not None:
serial = serial.replace("-", "")
if len(serial) == 11:
@ -98,8 +98,8 @@ class ArcadeData(BaseData):
self,
arcade_id: int,
serial: str = "",
board: str = None,
game: str = None,
board: Optional[str] = None,
game: Optional[str] = None,
is_cab: bool = False,
) -> Optional[int]:
if not arcade_id:
@ -150,8 +150,8 @@ class ArcadeData(BaseData):
async def create_arcade(
self,
name: str = None,
nickname: str = None,
name: Optional[str] = None,
nickname: Optional[str] = None,
country: str = "JPN",
country_id: int = 1,
state: str = "",

View File

@ -1,22 +1,23 @@
import asyncio
import json
import logging
from random import randrange
from typing import Any, Optional, Dict, List
from typing import Any, Dict, List, Optional
from sqlalchemy import Column, MetaData, Table
from sqlalchemy.engine import Row
from sqlalchemy.engine.cursor import CursorResult
from sqlalchemy.engine.base import Connection
from sqlalchemy.sql import text, func, select
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON, INTEGER, TEXT
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.schema import ForeignKey
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, text
from sqlalchemy.types import INTEGER, JSON, TEXT, TIMESTAMP, Integer, String
from core.config import CoreConfig
metadata = MetaData()
event_log = Table(
event_log: Table = Table(
"event_log",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -37,7 +38,7 @@ event_log = Table(
class BaseData:
def __init__(self, cfg: CoreConfig, conn: Connection) -> None:
def __init__(self, cfg: CoreConfig, conn: AsyncSession) -> None:
self.config = cfg
self.conn = conn
self.logger = logging.getLogger("database")
@ -47,7 +48,7 @@ class BaseData:
try:
self.logger.debug(f"SQL Execute: {''.join(str(sql).splitlines())}")
res = self.conn.execute(text(sql), opts)
res = await self.conn.execute(text(sql), opts)
except SQLAlchemyError as e:
self.logger.error(f"SQLAlchemy error {e}")
@ -59,7 +60,7 @@ class BaseData:
except Exception:
try:
res = self.conn.execute(sql, opts)
res = await self.conn.execute(sql, opts)
except SQLAlchemyError as e:
self.logger.error(f"SQLAlchemy error {e}")
@ -83,7 +84,7 @@ class BaseData:
async def log_event(
self, system: str, type: str, severity: int, message: str, details: Dict = {}, user: int = None,
arcade: int = None, machine: int = None, ip: str = None, game: str = None, version: str = None
arcade: int = None, machine: int = None, ip: Optional[str] = None, game: Optional[str] = None, version: Optional[str] = None
) -> Optional[int]:
sql = event_log.insert().values(
system=system,

View File

@ -1,13 +1,14 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint
from sqlalchemy.types import Integer, String, Boolean, TIMESTAMP, BIGINT, VARCHAR
from sqlalchemy.sql.schema import ForeignKey
from sqlalchemy.sql import func
from sqlalchemy import Column, Table, UniqueConstraint
from sqlalchemy.engine import Row
from sqlalchemy.sql import func
from sqlalchemy.sql.schema import ForeignKey
from sqlalchemy.types import BIGINT, TIMESTAMP, VARCHAR, Boolean, Integer, String
from core.data.schema.base import BaseData, metadata
aime_card = Table(
aime_card: Table = Table(
"aime_card",
metadata,
Column("id", Integer, primary_key=True, nullable=False),

View File

@ -1,15 +1,15 @@
from typing import Optional, List
from sqlalchemy import Table, Column
from sqlalchemy.types import Integer, String, TIMESTAMP
from sqlalchemy.sql import func
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql import func, select
from sqlalchemy.engine import Row
from typing import List, Optional
import bcrypt
from sqlalchemy import Column, Table
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from sqlalchemy.types import TIMESTAMP, Integer, String
from core.data.schema.base import BaseData, metadata
aime_user = Table(
aime_user: Table = Table(
"aime_user",
metadata,
Column("id", Integer, nullable=False, primary_key=True, autoincrement=True),
@ -26,10 +26,10 @@ aime_user = Table(
class UserData(BaseData):
async def create_user(
self,
id: int = None,
username: str = None,
email: str = None,
password: str = None,
id: Optional[int] = None,
username: Optional[str] = None,
email: Optional[str] = None,
password: Optional[str] = None,
permission: int = 1,
) -> Optional[int]:
if id is None:

View File

@ -1,18 +1,47 @@
from typing import Dict, Any, Optional
from types import ModuleType
from starlette.requests import Request
import logging
import importlib
from os import walk
import jwt
import logging
from base64 import b64decode
from datetime import datetime, timezone
from os import walk
from types import ModuleType
from typing import Any, Dict, Optional
import jwt
from starlette.requests import Request
from .config import CoreConfig
class _MissingSentinel:
__slots__: tuple[str, ...] = ()
def __eq__(self, other) -> bool:
return False
def __bool__(self) -> bool:
return False
def __hash__(self) -> int:
return 0
def __repr__(self):
return "..."
MISSING: Any = _MissingSentinel()
"""This is different from `None` in that its type is `Any`, and so it can be used
as a placeholder for values that are *definitely* going to be initialized,
so they don't have to be typed as `T | None`, which makes type checkers
angry when an attribute is accessed.
This can also be used for when `None` has actual meaning as a value, and so a
separate value is needed to mean "unset"."""
class Utils:
real_title_port = None
real_title_port_ssl = None
@classmethod
def get_all_titles(cls) -> Dict[str, ModuleType]:
ret: Dict[str, Any] = {}
@ -36,27 +65,56 @@ class Utils:
def get_ip_addr(cls, req: Request) -> str:
ip = req.headers.get("x-forwarded-for", req.client.host)
return ip.split(", ")[0]
@classmethod
def get_title_port(cls, cfg: CoreConfig):
if cls.real_title_port is not None: return cls.real_title_port
if cls.real_title_port is not None:
return cls.real_title_port
cls.real_title_port = (
cfg.server.proxy_port
if cfg.server.is_using_proxy and cfg.server.proxy_port
else cfg.server.port
)
cls.real_title_port = cfg.server.proxy_port if cfg.server.is_using_proxy and cfg.server.proxy_port else cfg.server.port
return cls.real_title_port
@classmethod
def get_title_port_ssl(cls, cfg: CoreConfig):
if cls.real_title_port_ssl is not None: return cls.real_title_port_ssl
if cls.real_title_port_ssl is not None:
return cls.real_title_port_ssl
cls.real_title_port_ssl = (
cfg.server.proxy_port_ssl
if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl
else 443
)
cls.real_title_port_ssl = cfg.server.proxy_port_ssl if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl else 443
return cls.real_title_port_ssl
def create_sega_auth_key(aime_id: int, game: str, place_id: int, keychip_id: str, b64_secret: str, exp_seconds: int = 86400, err_logger: str = 'aimedb') -> Optional[str]:
def create_sega_auth_key(
aime_id: int,
game: str,
place_id: int,
keychip_id: str,
b64_secret: str,
exp_seconds: int = 86400,
err_logger: str = "aimedb",
) -> Optional[str]:
logger = logging.getLogger(err_logger)
try:
return jwt.encode({ "aime_id": aime_id, "game": game, "place_id": place_id, "keychip_id": keychip_id, "exp": int(datetime.now(tz=timezone.utc).timestamp()) + exp_seconds }, b64decode(b64_secret), algorithm="HS256")
return jwt.encode(
{
"aime_id": aime_id,
"game": game,
"place_id": place_id,
"keychip_id": keychip_id,
"exp": int(datetime.now(tz=timezone.utc).timestamp()) + exp_seconds,
},
b64decode(b64_secret),
algorithm="HS256",
)
except jwt.InvalidKeyError:
logger.error("Failed to encode Sega Auth Key because the secret is invalid!")
return None
@ -64,10 +122,19 @@ def create_sega_auth_key(aime_id: int, game: str, place_id: int, keychip_id: str
logger.error(f"Unknown exception occoured when encoding Sega Auth Key! {e}")
return None
def decode_sega_auth_key(token: str, b64_secret: str, err_logger: str = 'aimedb') -> Optional[Dict]:
def decode_sega_auth_key(
token: str, b64_secret: str, err_logger: str = "aimedb"
) -> Optional[Dict]:
logger = logging.getLogger(err_logger)
try:
return jwt.decode(token, "secret", b64decode(b64_secret), algorithms=["HS256"], options={"verify_signature": True})
return jwt.decode(
token,
"secret",
b64decode(b64_secret),
algorithms=["HS256"],
options={"verify_signature": True},
)
except jwt.ExpiredSignatureError:
logger.error("Sega Auth Key failed to validate due to an expired signature!")
return None
@ -83,4 +150,3 @@ def decode_sega_auth_key(token: str, b64_secret: str, err_logger: str = 'aimedb'
except Exception as e:
logger.error(f"Unknown exception occoured when decoding Sega Auth Key! {e}")
return None

View File

@ -1,14 +1,15 @@
#!/usr/bin/env python3
import argparse
import logging
from os import mkdir, path, access, W_OK, environ
import yaml
import asyncio
import logging
from os import W_OK, access, environ, mkdir, path
import yaml
from core.data import Data
from core.config import CoreConfig
from core.data import Data
if __name__ == "__main__":
async def main():
parser = argparse.ArgumentParser(description="Database utilities")
parser.add_argument(
"--config", "-c", type=str, help="Config folder to use", default="config"
@ -43,10 +44,8 @@ if __name__ == "__main__":
data = Data(cfg)
loop = asyncio.get_event_loop()
if args.action == "create":
data.create_database()
await data.create_database()
elif args.action == "upgrade":
data.schema_upgrade(args.version)
@ -58,16 +57,20 @@ if __name__ == "__main__":
data.schema_downgrade(args.version)
elif args.action == "create-owner":
loop.run_until_complete(data.create_owner(args.email, args.access_code))
await data.create_owner(args.email, args.access_code)
elif args.action == "migrate":
loop.run_until_complete(data.migrate())
await data.migrate()
elif args.action == "create-revision":
loop.run_until_complete(data.create_revision(args.message))
await data.create_revision(args.message)
elif args.action == "create-autorevision":
loop.run_until_complete(data.create_revision_auto(args.message))
await data.create_revision_auto(args.message)
else:
logging.getLogger("database").info(f"Unknown action {args.action}")
if __name__ == "__main__":
asyncio.run(main())

View File

@ -26,6 +26,7 @@
- `name`: Name of the database the server should expect. Default `aime`
- `port`: Port the database server is listening on. Default `3306`
- `protocol`: Protocol used in the connection string, e.i `mysql` would result in `mysql://...`. Default `mysql`
- `ssl_enabled`: Enforce SSL to be used in the connection string. Default `False`
- `sha2_password`: Whether or not the password in the connection string should be hashed via SHA2. Default `False`
- `loglevel`: Logging level for the database. Default `info`
- `memcached_host`: Host of the memcached server. Default `localhost`

View File

@ -77,20 +77,21 @@ In order to use the importer locate your game installation folder and execute:
python read.py --game SDBT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
```
The importer for Chunithm will import: Events, Music, Charge Items and Avatar Accesories.
The importer for Chunithm will import: Events, Music, Charge Items, Avatar Accesories, Nameplates, Characters, Trophies, Map Icons, and System Voices.
### Config
Config file is located in `config/chuni.yaml`.
| Option | Info |
|------------------|---------------------------------------------------------------------------------------------------------------------|
| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) |
| `name` | If this is set, all players that are not on a team will use this one by default. |
| `use_login_bonus`| This is used to enable the login bonuses |
| `stock_tickets` | If this is set, specifies tickets to auto-stock at login. Format is a comma-delimited list of IDs. Defaults to None |
| `stock_count` | Ignored if stock_tickets is not specified. Number to stock of each ticket. Defaults to 99 |
| `crypto` | This option is used to enable the TLS Encryption |
| Option | Info |
|-----------------------|-------------------------------------------------------------------------------------------------------------------------------------------|
| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) |
| `name` | If this is set, all players that are not on a team will use this one by default. |
| `use_login_bonus` | This is used to enable the login bonuses |
| `stock_tickets` | If this is set, specifies tickets to auto-stock at login. Format is a comma-delimited list of IDs. Defaults to None |
| `stock_count` | Ignored if stock_tickets is not specified. Number to stock of each ticket. Defaults to 99 |
| `forced_item_unlocks` | Frontend UI customization overrides that allow all items of given types to be used (instead of just those unlocked/purchased by the user) |
| `crypto` | This option is used to enable the TLS Encryption |
If you would like to use network encryption, add the keys to the `keys` section under `crypto`, where the key
@ -153,12 +154,15 @@ INSERT INTO aime.chuni_profile_team (teamName) VALUES (<teamName>);
Team names can be regular ASCII, and they will be displayed ingame.
### Favorite songs
You can set the songs that will be in a user's Favorite Songs category using the following SQL entries:
Favorites can be set through the Frontend Web UI for songs previously played. Alternatively, you can set the songs that will be in a user's Favorite Songs category using the following SQL entries:
```sql
INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (<user>, <version>, <songId>, 1);
```
The songId is based on the actual ID within your version of Chunithm.
### Profile Customization
The Frontend Web UI supports configuration of the userbox, avatar (NEW!! and newer), map icon (AMAZON and newer), and system voice (AMAZON and newer).
## crossbeats REV.

View File

@ -12,7 +12,18 @@ mods:
# note: quanity is not refreshed on "continue" after set - only on subsequent login
stock_tickets:
stock_count: 99
# Allow use of all available customization items in frontend web ui
# note: This effectively makes every available item appear to be in the user's inventory. It does _not_ override the "disableFlag" setting on individual items
# warning: This can result in pushing a lot of data, especially the userbox items. Recommended for local network use only.
forced_item_unlocks:
map_icons: False
system_voices: False
avatar_accessories: False
nameplates: False
trophies: False
character_icons: False
version:
11:
rom: 2.00.00

View File

@ -27,6 +27,7 @@ database:
name: "aime"
port: 3306
protocol: "mysql"
ssl_enabled: False
sha2_password: False
loglevel: "info"
enable_memcached: True

24
read.py
View File

@ -1,16 +1,16 @@
#!/usr/bin/env python3
import argparse
import re
import os
import yaml
from os import path
import logging
import coloredlogs
import asyncio
import logging
import os
import re
from logging.handlers import TimedRotatingFileHandler
from os import path
from typing import List, Optional
import coloredlogs
import yaml
from core import CoreConfig, Utils
@ -44,7 +44,7 @@ class BaseReader:
pass
if __name__ == "__main__":
async def main():
parser = argparse.ArgumentParser(description="Import Game Information")
parser.add_argument(
"--game",
@ -140,8 +140,12 @@ if __name__ == "__main__":
for dir, mod in titles.items():
if args.game in mod.game_codes:
handler = mod.reader(config, args.version, bin_arg, opt_arg, args.extra)
loop = asyncio.get_event_loop()
loop.run_until_complete(handler.read())
await handler.read()
logger.info("Done")
if __name__ == "__main__":
asyncio.run(main())

View File

@ -3,7 +3,7 @@ wheel
pytz
pyyaml
sqlalchemy==1.4.46
mysqlclient
aiomysql
pyopenssl
service_identity
PyCryptodome
@ -21,4 +21,4 @@ starlette
asyncio
uvicorn
alembic
python-multipart
python-multipart

View File

@ -1,16 +1,16 @@
import logging
import itertools
import json
import logging
from datetime import datetime, timedelta
from time import strftime
from typing import Any, Dict, List
import pytz
from typing import Dict, Any, List
from core.config import CoreConfig
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
from titles.chuni.config import ChuniConfig
SCORE_BUFFER = {}
from titles.chuni.const import ChuniConstants, ItemKind
from titles.chuni.database import ChuniData
class ChuniBase:
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
@ -43,7 +43,7 @@ class ChuniBase:
user_id,
{
"itemId": ticket.strip(),
"itemKind": 5,
"itemKind": ItemKind.TICKET.value,
"stock": self.game_cfg.mods.stock_count,
"isValid": True,
},
@ -116,7 +116,7 @@ class ChuniBase:
user_id,
{
"itemId": login_item["presentId"],
"itemKind": 6,
"itemKind": ItemKind.PRESENT.value,
"stock": login_item["itemNum"],
"isValid": True,
},
@ -277,35 +277,39 @@ class ChuniBase:
}
async def handle_get_user_character_api_request(self, data: Dict) -> Dict:
characters = await self.data.item.get_characters(data["userId"])
if characters is None:
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
# add one to the limit so we know if there's a next page of items
rows = await self.data.item.get_characters(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": -1,
"userCharacterList": [],
}
character_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(characters)):
tmp = characters[x]._asdict()
tmp.pop("user")
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("id")
tmp.pop("user")
character_list.append(tmp)
if len(character_list) >= max_ct:
break
if len(characters) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = -1
return {
"userId": data["userId"],
"userId": user_id,
"length": len(character_list),
"nextIndex": next_idx,
"userCharacterList": character_list,
@ -335,29 +339,31 @@ class ChuniBase:
}
async def handle_get_user_course_api_request(self, data: Dict) -> Dict:
user_course_list = await self.data.score.get_courses(data["userId"])
if user_course_list is None:
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
rows = await self.data.score.get_courses(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": -1,
"userCourseList": [],
}
course_list = []
next_idx = int(data.get("nextIndex", 0))
max_ct = int(data.get("maxCount", 300))
for x in range(next_idx, len(user_course_list)):
tmp = user_course_list[x]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("user")
tmp.pop("id")
course_list.append(tmp)
if len(user_course_list) >= max_ct:
break
if len(user_course_list) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = -1
@ -425,75 +431,94 @@ class ChuniBase:
}
async def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
rival_id = data["rivalId"]
next_index = int(data["nextIndex"])
max_count = int(data["maxCount"])
user_rival_music_list = []
user_id = int(data["userId"])
rival_id = int(data["rivalId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
rival_levels = [int(x["level"]) for x in data["userRivalMusicLevelList"]]
# Fetch all the rival music entries for the user
all_entries = await self.data.score.get_rival_music(rival_id)
rows = await self.data.score.get_scores(
rival_id,
levels=rival_levels,
limit=max_ct + 1,
offset=next_idx,
)
# Process the entries based on max_count and nextIndex
for music in all_entries:
music_id = music["musicId"]
level = music["level"]
score = music["scoreMax"]
rank = music["scoreRank"]
if rows is None or len(rows) == 0:
return {
"userId": user_id,
"rivalId": rival_id,
"nextIndex": -1,
"userRivalMusicList": [],
}
# Create a music entry for the current music_id if it's unique
music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None)
if music_entry is None:
music_entry = {
"musicId": music_id,
"length": 0,
"userRivalMusicDetailList": []
}
user_rival_music_list.append(music_entry)
music_details = [x._asdict() for x in rows]
returned_music_details_count = 0
music_list = []
# Create a level entry for the current level if it's unique or has a higher score
level_entry = next((entry for entry in music_entry["userRivalMusicDetailList"] if entry["level"] == level), None)
if level_entry is None:
level_entry = {
"level": level,
"scoreMax": score,
"scoreRank": rank
}
music_entry["userRivalMusicDetailList"].append(level_entry)
elif score > level_entry["scoreMax"]:
level_entry["scoreMax"] = score
level_entry["scoreRank"] = rank
# note that itertools.groupby will only work on sorted keys, which is already sorted by
# the query in get_scores
for music_id, details_iter in itertools.groupby(music_details, key=lambda x: x["musicId"]):
details: list[dict[Any, Any]] = [
{"level": d["level"], "scoreMax": d["scoreMax"]}
for d in details_iter
]
# Calculate the length for each "musicId" by counting the unique levels
for music_entry in user_rival_music_list:
music_entry["length"] = len(music_entry["userRivalMusicDetailList"])
music_list.append({"musicId": music_id, "length": len(details), "userMusicDetailList": details})
returned_music_details_count += len(details)
# Prepare the result dictionary with user rival music data
result = {
"userId": data["userId"],
"rivalId": data["rivalId"],
"nextIndex": str(next_index + len(user_rival_music_list[next_index: next_index + max_count]) if max_count <= len(user_rival_music_list[next_index: next_index + max_count]) else -1),
"userRivalMusicList": user_rival_music_list[next_index: next_index + max_count]
if len(music_list) >= max_ct:
break
# if we returned fewer PBs than we originally asked for from the database, that means
# we queried for the PBs of max_ct + 1 songs.
if returned_music_details_count < len(rows):
next_idx += max_ct
else:
next_idx = -1
return {
"userId": user_id,
"rivalId": rival_id,
"length": len(music_list),
"nextIndex": next_idx,
"userRivalMusicList": music_list,
}
return result
async def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
kind = int(data["kind"])
is_all_favorite_item = str(data["isAllFavoriteItem"]) == "true"
user_fav_item_list = []
# still needs to be implemented on WebUI
# 1: Music, 2: User, 3: Character
fav_list = await self.data.item.get_all_favorites(
data["userId"], self.version, fav_kind=int(data["kind"])
rows = await self.data.item.get_all_favorites(
user_id,
self.version,
fav_kind=kind,
limit=max_ct + 1,
offset=next_idx,
)
if fav_list is not None:
for fav in fav_list:
if rows is not None:
for fav in rows[:max_ct]:
user_fav_item_list.append({"id": fav["favId"]})
if rows is None or len(rows) <= max_ct:
next_idx = -1
else:
next_idx += max_ct
return {
"userId": data["userId"],
"userId": user_id,
"length": len(user_fav_item_list),
"kind": data["kind"],
"nextIndex": -1,
"kind": kind,
"nextIndex": next_idx,
"userFavoriteItemList": user_fav_item_list,
}
@ -505,36 +530,39 @@ class ChuniBase:
return {"userId": data["userId"], "length": 0, "userFavoriteMusicList": []}
async def handle_get_user_item_api_request(self, data: Dict) -> Dict:
kind = int(int(data["nextIndex"]) / 10000000000)
next_idx = int(int(data["nextIndex"]) % 10000000000)
user_item_list = await self.data.item.get_items(data["userId"], kind)
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
if user_item_list is None or len(user_item_list) == 0:
kind = next_idx // 10000000000
next_idx = next_idx % 10000000000
rows = await self.data.item.get_items(
user_id, kind, limit=max_ct + 1, offset=next_idx
)
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": -1,
"itemKind": kind,
"userItemList": [],
}
items: List[Dict[str, Any]] = []
for i in range(next_idx, len(user_item_list)):
tmp = user_item_list[i]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("user")
tmp.pop("id")
items.append(tmp)
if len(items) >= int(data["maxCount"]):
break
xout = kind * 10000000000 + next_idx + len(items)
if len(items) < int(data["maxCount"]):
next_idx = 0
if len(rows) > max_ct:
next_idx = kind * 10000000000 + next_idx + max_ct
else:
next_idx = xout
next_idx = -1
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"itemKind": kind,
"length": len(items),
@ -586,62 +614,55 @@ class ChuniBase:
}
async def handle_get_user_music_api_request(self, data: Dict) -> Dict:
music_detail = await self.data.score.get_scores(data["userId"])
if music_detail is None:
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
rows = await self.data.score.get_scores(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": -1,
"userMusicList": [], # 240
}
song_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
music_details = [x._asdict() for x in rows]
returned_music_details_count = 0
music_list = []
for x in range(next_idx, len(music_detail)):
found = False
tmp = music_detail[x]._asdict()
tmp.pop("user")
tmp.pop("id")
# note that itertools.groupby will only work on sorted keys, which is already sorted by
# the query in get_scores
for _music_id, details_iter in itertools.groupby(music_details, key=lambda x: x["musicId"]):
details: list[dict[Any, Any]] = []
for song in song_list:
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
found = True
song["userMusicDetailList"].append(tmp)
song["length"] = len(song["userMusicDetailList"])
score_buf.append(tmp["musicId"])
SCORE_BUFFER[str(data["userId"])] = score_buf
for d in details_iter:
d.pop("id")
d.pop("user")
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
if not found and tmp["musicId"] not in score_buf:
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
score_buf.append(tmp["musicId"])
SCORE_BUFFER[str(data["userId"])] = score_buf
details.append(d)
if len(song_list) >= max_ct:
music_list.append({"length": len(details), "userMusicDetailList": details})
returned_music_details_count += len(details)
if len(music_list) >= max_ct:
break
for songIdx in range(len(song_list)):
for recordIdx in range(x+1, len(music_detail)):
if song_list[songIdx]["userMusicDetailList"][0]["musicId"] == music_detail[recordIdx]["musicId"]:
music = music_detail[recordIdx]._asdict()
music.pop("user")
music.pop("id")
song_list[songIdx]["userMusicDetailList"].append(music)
song_list[songIdx]["length"] += 1
if len(song_list) >= max_ct:
next_idx += len(song_list)
# if we returned fewer PBs than we originally asked for from the database, that means
# we queried for the PBs of max_ct + 1 songs.
if returned_music_details_count < len(rows):
next_idx += max_ct
else:
next_idx = -1
SCORE_BUFFER[str(data["userId"])] = []
return {
"userId": data["userId"],
"length": len(song_list),
"userId": user_id,
"length": len(music_list),
"nextIndex": next_idx,
"userMusicList": song_list, # 240
"userMusicList": music_list,
}
async def handle_get_user_option_api_request(self, data: Dict) -> Dict:

View File

@ -65,6 +65,17 @@ class ChuniModsConfig:
self.__config, "chuni", "mods", "stock_count", default=99
)
def forced_item_unlocks(self, item: str) -> bool:
forced_item_unlocks = CoreConfig.get_config_field(
self.__config, "chuni", "mods", "forced_item_unlocks", default={}
)
if item not in forced_item_unlocks.keys():
# default to no forced unlocks
return False
return forced_item_unlocks[item]
class ChuniVersionConfig:
def __init__(self, parent_config: "ChuniConfig") -> None:

View File

@ -1,4 +1,4 @@
from enum import Enum
from enum import Enum, IntEnum
class ChuniConstants:
@ -81,13 +81,76 @@ class ChuniConstants:
return cls.VERSION_NAMES[ver]
class MapAreaConditionType(Enum):
UNLOCKED = 0
class MapAreaConditionType(IntEnum):
"""Condition types for the GetGameMapAreaConditionApi endpoint. Incomplete.
For the MAP_CLEARED/MAP_AREA_CLEARED/TROPHY_OBTAINED conditions, the conditionId
is the map/map area/trophy.
For the RANK_*/ALL_JUSTICE conditions, the conditionId is songId * 100 + difficultyId.
For example, Halcyon [ULTIMA] would be 173 * 100 + 4 = 17304.
"""
ALWAYS_UNLOCKED = 0
MAP_CLEARED = 1
MAP_AREA_CLEARED = 2
TROPHY_OBTAINED = 3
RANK_SSS = 19
RANK_SSP = 20
RANK_SS = 21
RANK_SP = 22
RANK_S = 23
ALL_JUSTICE = 28
class MapAreaConditionLogicalOperator(Enum):
AND = 1
OR = 2
class AvatarCategory(Enum):
WEAR = 1
HEAD = 2
FACE = 3
SKIN = 4
ITEM = 5
FRONT = 6
BACK = 7
class ItemKind(IntEnum):
NAMEPLATE = 1
FRAME = 2
"""
"Frame" is the background for the gauge/score/max combo display
shown during gameplay. This item cannot be equipped (as of LUMINOUS)
and is hardcoded to the current game's version.
"""
TROPHY = 3
SKILL = 4
TICKET = 5
"""A statue is also a ticket."""
PRESENT = 6
MUSIC_UNLOCK = 7
MAP_ICON = 8
SYSTEM_VOICE = 9
SYMBOL_CHAT = 10
AVATAR_ACCESSORY = 11
ULTIMA_UNLOCK = 12
"""This only applies to ULTIMA difficulties that are *not* unlocked by
SS-ing EXPERT+MASTER.
"""
class FavoriteItemKind(IntEnum):
MUSIC = 1
RIVAL = 2
CHARACTER = 3

View File

@ -3,6 +3,7 @@ from starlette.routing import Route, Mount
from starlette.requests import Request
from starlette.responses import Response, RedirectResponse
from starlette.staticfiles import StaticFiles
from sqlalchemy.engine import Row
from os import path
import yaml
import jinja2
@ -11,7 +12,7 @@ from core.frontend import FE_Base, UserSession
from core.config import CoreConfig
from .database import ChuniData
from .config import ChuniConfig
from .const import ChuniConstants
from .const import ChuniConstants, AvatarCategory, ItemKind
def pairwise(iterable):
@ -99,6 +100,12 @@ class ChuniFrontend(FE_Base):
Route("/{index}", self.render_GET_playlog, methods=['GET']),
]),
Route("/favorites", self.render_GET_favorites, methods=['GET']),
Route("/userbox", self.render_GET_userbox, methods=['GET']),
Route("/avatar", self.render_GET_avatar, methods=['GET']),
Route("/update.map-icon", self.update_map_icon, methods=['POST']),
Route("/update.system-voice", self.update_system_voice, methods=['POST']),
Route("/update.userbox", self.update_userbox, methods=['POST']),
Route("/update.avatar", self.update_avatar, methods=['POST']),
Route("/update.name", self.update_name, methods=['POST']),
Route("/update.favorite_music_playlog", self.update_favorite_music_playlog, methods=['POST']),
Route("/update.favorite_music_favorites", self.update_favorite_music_favorites, methods=['POST']),
@ -123,15 +130,28 @@ class ChuniFrontend(FE_Base):
usr_sesh.chunithm_version = versions[0]
profile = await self.data.profile.get_profile_data(usr_sesh.user_id, usr_sesh.chunithm_version)
user_id = usr_sesh.user_id
version = usr_sesh.chunithm_version
# While map icons and system voices weren't present prior to AMAZON, we don't need to bother checking
# version here - it'll just end up being empty sets and the jinja will ignore the variables anyway.
map_icons, total_map_icons = await self.get_available_map_icons(version, profile)
system_voices, total_system_voices = await self.get_available_system_voices(version, profile)
resp = Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=usr_sesh.user_id,
user_id=user_id,
profile=profile,
version_list=ChuniConstants.VERSION_NAMES,
versions=versions,
cur_version=usr_sesh.chunithm_version
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version),
map_icons=map_icons,
system_voices=system_voices,
total_map_icons=total_map_icons,
total_system_voices=total_system_voices
), media_type="text/html; charset=utf-8")
if usr_sesh.chunithm_version >= 0:
@ -189,6 +209,8 @@ class ChuniFrontend(FE_Base):
profile=profile,
hot_list=hot_list,
base_list=base_list,
cur_version=usr_sesh.chunithm_version,
cur_version_name=ChuniConstants.game_ver_to_string(usr_sesh.chunithm_version)
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
@ -217,7 +239,9 @@ class ChuniFrontend(FE_Base):
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
playlog_count=0
playlog_count=0,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version)
), media_type="text/html; charset=utf-8")
playlog = await self.data.score.get_playlogs_limited(user_id, version, index, 20)
playlog_with_title = []
@ -257,6 +281,7 @@ class ChuniFrontend(FE_Base):
user_id=user_id,
playlog=playlog_with_title,
playlog_count=playlog_count,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version)
), media_type="text/html; charset=utf-8")
else:
@ -319,11 +344,354 @@ class ChuniFrontend(FE_Base):
user_id=user_id,
favorites_by_genre=favorites_by_genre,
favorites_count=favorites_count,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version)
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def get_available_map_icons(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_map_icons(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("map_icons")
user_map_icons = []
if not force_unlocked:
user_map_icons = await self.data.item.get_items(profile.user, ItemKind.MAP_ICON.value)
user_map_icons = [icon["itemId"] for icon in user_map_icons] + [profile.mapIconId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["mapIconId"] in user_map_icons:
item = dict()
item["id"] = row["mapIconId"]
item["name"] = row["name"]
item["iconPath"] = path.splitext(row["iconPath"])[0] + ".png"
items[row["mapIconId"]] = item
return (items, len(rows))
async def get_available_system_voices(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_system_voices(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("system_voices")
user_system_voices = []
if not force_unlocked:
user_system_voices = await self.data.item.get_items(profile.user, ItemKind.SYSTEM_VOICE.value)
user_system_voices = [icon["itemId"] for icon in user_system_voices] + [profile.voiceId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["voiceId"] in user_system_voices:
item = dict()
item["id"] = row["voiceId"]
item["name"] = row["name"]
item["imagePath"] = path.splitext(row["imagePath"])[0] + ".png"
items[row["voiceId"]] = item
return (items, len(rows))
async def get_available_nameplates(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_nameplates(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("nameplates")
user_nameplates = []
if not force_unlocked:
user_nameplates = await self.data.item.get_items(profile.user, ItemKind.NAMEPLATE.value)
user_nameplates = [item["itemId"] for item in user_nameplates] + [profile.nameplateId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["nameplateId"] in user_nameplates:
item = dict()
item["id"] = row["nameplateId"]
item["name"] = row["name"]
item["texturePath"] = path.splitext(row["texturePath"])[0] + ".png"
items[row["nameplateId"]] = item
return (items, len(rows))
async def get_available_trophies(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_trophies(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("trophies")
user_trophies = []
if not force_unlocked:
user_trophies = await self.data.item.get_items(profile.user, ItemKind.TROPHY.value)
user_trophies = [item["itemId"] for item in user_trophies] + [profile.trophyId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["trophyId"] in user_trophies:
item = dict()
item["id"] = row["trophyId"]
item["name"] = row["name"]
item["rarity"] = row["rareType"]
items[row["trophyId"]] = item
return (items, len(rows))
async def get_available_characters(self, version: int, profile: Row) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_characters(version)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("character_icons")
user_characters = []
if not force_unlocked:
user_characters = await self.data.item.get_characters(profile.user)
user_characters = [chara["characterId"] for chara in user_characters] + [profile.characterId, profile.charaIllustId]
for row in rows:
if force_unlocked or row["defaultHave"] or row["characterId"] in user_characters:
item = dict()
item["id"] = row["characterId"]
item["name"] = row["name"]
item["iconPath"] = path.splitext(row["imagePath3"])[0] + ".png"
items[row["characterId"]] = item
return (items, len(rows))
async def get_available_avatar_items(self, version: int, category: AvatarCategory, user_unlocked_items: List[int]) -> (List[dict], int):
items = dict()
rows = await self.data.static.get_avatar_items(version, category.value)
if rows is None:
return (items, 0) # can only happen with old db
force_unlocked = self.game_cfg.mods.forced_item_unlocks("avatar_accessories")
for row in rows:
if force_unlocked or row["defaultHave"] or row["avatarAccessoryId"] in user_unlocked_items:
item = dict()
item["id"] = row["avatarAccessoryId"]
item["name"] = row["name"]
item["iconPath"] = path.splitext(row["iconPath"])[0] + ".png"
item["texturePath"] = path.splitext(row["texturePath"])[0] + ".png"
items[row["avatarAccessoryId"]] = item
return (items, len(rows))
async def render_GET_userbox(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/chuni/templates/chuni_userbox.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
if usr_sesh.chunithm_version < 0:
return RedirectResponse("/game/chuni/", 303)
user_id = usr_sesh.user_id
version = usr_sesh.chunithm_version
# Get the user profile so we know how the userbox is currently configured
profile = await self.data.profile.get_profile_data(user_id, version)
# Build up lists of available userbox components
nameplates, total_nameplates = await self.get_available_nameplates(version, profile)
trophies, total_trophies = await self.get_available_trophies(version, profile)
characters, total_characters = await self.get_available_characters(version, profile)
# Get the user's team
team_name = "ARTEMiS"
if profile["teamId"]:
team = await self.data.profile.get_team_by_id(profile["teamId"])
team_name = team["teamName"]
# Figure out the rating color we should use (rank maps to the stylesheet)
rating = profile.playerRating / 100;
rating_rank = 0
if rating >= 16:
rating_rank = 8
elif rating >= 15.25:
rating_rank = 7
elif rating >= 14.5:
rating_rank = 6
elif rating >= 13.25:
rating_rank = 5
elif rating >= 12:
rating_rank = 4
elif rating >= 10:
rating_rank = 3
elif rating >= 7:
rating_rank = 2
elif rating >= 4:
rating_rank = 1
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=user_id,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version),
profile=profile,
team_name=team_name,
rating_rank=rating_rank,
nameplates=nameplates,
trophies=trophies,
characters=characters,
total_nameplates=total_nameplates,
total_trophies=total_trophies,
total_characters=total_characters
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def render_GET_avatar(self, request: Request) -> bytes:
template = self.environment.get_template(
"titles/chuni/templates/chuni_avatar.jinja"
)
usr_sesh = self.validate_session(request)
if not usr_sesh:
usr_sesh = UserSession()
if usr_sesh.user_id > 0:
if usr_sesh.chunithm_version < 11:
# Avatar configuration only for NEW!! and newer
return RedirectResponse("/game/chuni/", 303)
user_id = usr_sesh.user_id
version = usr_sesh.chunithm_version
# Get the user profile so we know what avatar items are currently in use
profile = await self.data.profile.get_profile_data(user_id, version)
# Get all the user avatar accessories so we know what to populate
user_accessories = await self.data.item.get_items(user_id, ItemKind.AVATAR_ACCESSORY.value)
user_accessories = [item["itemId"] for item in user_accessories] + \
[profile.avatarBack, profile.avatarItem, profile.avatarWear, \
profile.avatarFront, profile.avatarSkin, profile.avatarHead, profile.avatarFace]
# Build up available list of items for each avatar category
wears, total_wears = await self.get_available_avatar_items(version, AvatarCategory.WEAR, user_accessories)
faces, total_faces = await self.get_available_avatar_items(version, AvatarCategory.FACE, user_accessories)
heads, total_heads = await self.get_available_avatar_items(version, AvatarCategory.HEAD, user_accessories)
skins, total_skins = await self.get_available_avatar_items(version, AvatarCategory.SKIN, user_accessories)
items, total_items = await self.get_available_avatar_items(version, AvatarCategory.ITEM, user_accessories)
fronts, total_fronts = await self.get_available_avatar_items(version, AvatarCategory.FRONT, user_accessories)
backs, total_backs = await self.get_available_avatar_items(version, AvatarCategory.BACK, user_accessories)
return Response(template.render(
title=f"{self.core_config.server.name} | {self.nav_name}",
game_list=self.environment.globals["game_list"],
sesh=vars(usr_sesh),
user_id=user_id,
cur_version=version,
cur_version_name=ChuniConstants.game_ver_to_string(version),
profile=profile,
wears=wears,
faces=faces,
heads=heads,
skins=skins,
items=items,
fronts=fronts,
backs=backs,
total_wears=total_wears,
total_faces=total_faces,
total_heads=total_heads,
total_skins=total_skins,
total_items=total_items,
total_fronts=total_fronts,
total_backs=total_backs
), media_type="text/html; charset=utf-8")
else:
return RedirectResponse("/gate/", 303)
async def update_map_icon(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate/", 303)
form_data = await request.form()
new_map_icon: str = form_data.get("id")
if not new_map_icon:
return RedirectResponse("/gate/?e=4", 303)
if not await self.data.profile.update_map_icon(usr_sesh.user_id, usr_sesh.chunithm_version, new_map_icon):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/chuni/", 303)
async def update_system_voice(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate/", 303)
form_data = await request.form()
new_system_voice: str = form_data.get("id")
if not new_system_voice:
return RedirectResponse("/gate/?e=4", 303)
if not await self.data.profile.update_system_voice(usr_sesh.user_id, usr_sesh.chunithm_version, new_system_voice):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/chuni/", 303)
async def update_userbox(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate/", 303)
form_data = await request.form()
new_nameplate: str = form_data.get("nameplate")
new_trophy: str = form_data.get("trophy")
new_character: str = form_data.get("character")
if not new_nameplate or \
not new_trophy or \
not new_character:
return RedirectResponse("/game/chuni/userbox?e=4", 303)
if not await self.data.profile.update_userbox(usr_sesh.user_id, usr_sesh.chunithm_version, new_nameplate, new_trophy, new_character):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/chuni/userbox", 303)
async def update_avatar(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:
return RedirectResponse("/gate/", 303)
form_data = await request.form()
new_wear: str = form_data.get("wear")
new_face: str = form_data.get("face")
new_head: str = form_data.get("head")
new_skin: str = form_data.get("skin")
new_item: str = form_data.get("item")
new_front: str = form_data.get("front")
new_back: str = form_data.get("back")
if not new_wear or \
not new_face or \
not new_head or \
not new_skin or \
not new_item or \
not new_front or \
not new_back:
return RedirectResponse("/game/chuni/avatar?e=4", 303)
if not await self.data.profile.update_avatar(usr_sesh.user_id, usr_sesh.chunithm_version, new_wear, new_face, new_head, new_skin, new_item, new_front, new_back):
return RedirectResponse("/gate/?e=999", 303)
return RedirectResponse("/game/chuni/avatar", 303)
async def update_name(self, request: Request) -> bytes:
usr_sesh = self.validate_session(request)
if not usr_sesh:

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

4
titles/chuni/img/avatar/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

4
titles/chuni/img/character/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

5
titles/chuni/img/jacket/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Ignore everything in this directory
*
# Except this file and default unknown
!.gitignore
!unknown.png

4
titles/chuni/img/mapIcon/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

4
titles/chuni/img/nameplate/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -4,12 +4,14 @@ from random import randint
from typing import Dict
import pytz
from core.config import CoreConfig
from core.utils import Utils
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
from titles.chuni.base import ChuniBase
from titles.chuni.config import ChuniConfig
from titles.chuni.const import ChuniConstants
from titles.chuni.database import ChuniData
class ChuniNew(ChuniBase):
ITEM_TYPE = {"character": 20, "story": 21, "card": 22}
@ -285,35 +287,37 @@ class ChuniNew(ChuniBase):
}
async def handle_get_user_printed_card_api_request(self, data: Dict) -> Dict:
user_print_list = await self.data.item.get_user_print_states(
data["userId"], has_completed=True
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
rows = await self.data.item.get_user_print_states(
user_id,
has_completed=True,
limit=max_ct + 1,
offset=next_idx,
)
if user_print_list is None:
if rows is None or len(rows) == 0:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": -1,
"userPrintedCardList": [],
}
print_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(user_print_list)):
tmp = user_print_list[x]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
print_list.append(tmp["cardId"])
if len(print_list) >= max_ct:
break
if len(print_list) >= max_ct:
next_idx = next_idx + max_ct
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = -1
return {
"userId": data["userId"],
"userId": user_id,
"length": len(print_list),
"nextIndex": next_idx,
"userPrintedCardList": print_list,

View File

@ -42,6 +42,12 @@ class ChuniReader(BaseReader):
if self.version >= ChuniConstants.VER_CHUNITHM_NEW:
we_diff = "5"
# character images could be stored anywhere across all the data dirs. Map them first
self.logger.info(f"Mapping DDS image files...")
dds_images = dict()
for dir in data_dirs:
self.map_dds_images(dds_images, f"{dir}/ddsImage")
for dir in data_dirs:
self.logger.info(f"Read from {dir}")
await self.read_events(f"{dir}/event")
@ -49,6 +55,11 @@ class ChuniReader(BaseReader):
await self.read_charges(f"{dir}/chargeItem")
await self.read_avatar(f"{dir}/avatarAccessory")
await self.read_login_bonus(f"{dir}/")
await self.read_nameplate(f"{dir}/namePlate")
await self.read_trophy(f"{dir}/trophy")
await self.read_character(f"{dir}/chara", dds_images)
await self.read_map_icon(f"{dir}/mapIcon")
await self.read_system_voice(f"{dir}/systemVoice")
async def read_login_bonus(self, root_dir: str) -> None:
for root, dirs, files in walk(f"{root_dir}loginBonusPreset"):
@ -61,9 +72,8 @@ class ChuniReader(BaseReader):
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
is_enabled = (
True if xml_root.find("disableFlag").text == "false" else False
)
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
result = await self.data.static.put_login_bonus_preset(
self.version, id, name, is_enabled
@ -175,16 +185,8 @@ class ChuniReader(BaseReader):
for jaketFile in xml_root.findall("jaketFile"): # nice typo, SEGA
jacket_path = jaketFile.find("path").text
# Convert the image to png and save it for use in the frontend
jacket_filename_src = f"{root}/{dir}/{jacket_path}"
(pre, ext) = path.splitext(jacket_path)
jacket_filename_dst = f"titles/chuni/img/jacket/{pre}.png"
if path.exists(jacket_filename_src) and not path.exists(jacket_filename_dst):
try:
im = Image.open(jacket_filename_src)
im.save(jacket_filename_dst)
except Exception:
self.logger.warning(f"Failed to convert {jacket_path} to png")
# Save off image for use in frontend
self.copy_image(jacket_path, f"{root}/{dir}", "titles/chuni/img/jacket/")
for fumens in xml_root.findall("fumens"):
for MusicFumenData in fumens.findall("MusicFumenData"):
@ -268,17 +270,212 @@ class ChuniReader(BaseReader):
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
category = xml_root.find("category").text
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
for image in xml_root.findall("image"):
iconPath = image.find("path").text
self.copy_image(iconPath, f"{root}/{dir}", "titles/chuni/img/avatar/")
for texture in xml_root.findall("texture"):
texturePath = texture.find("path").text
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/avatar/")
result = await self.data.static.put_avatar(
self.version, id, name, category, iconPath, texturePath
self.version, id, name, category, iconPath, texturePath, is_enabled, defaultHave, sortName
)
if result is not None:
self.logger.info(f"Inserted avatarAccessory {id}")
else:
self.logger.warning(f"Failed to insert avatarAccessory {id}")
async def read_nameplate(self, nameplate_dir: str) -> None:
for root, dirs, files in walk(nameplate_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/NamePlate.xml"):
with open(f"{root}/{dir}/NamePlate.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
for image in xml_root.findall("image"):
texturePath = image.find("path").text
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/nameplate/")
result = await self.data.static.put_nameplate(
self.version, id, name, texturePath, is_enabled, defaultHave, sortName
)
if result is not None:
self.logger.info(f"Inserted nameplate {id}")
else:
self.logger.warning(f"Failed to insert nameplate {id}")
async def read_trophy(self, trophy_dir: str) -> None:
for root, dirs, files in walk(trophy_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/Trophy.xml"):
with open(f"{root}/{dir}/Trophy.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
rareType = xml_root.find("rareType").text
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
defaultHave = xml_root.find("defaultHave").text == 'true'
result = await self.data.static.put_trophy(
self.version, id, name, rareType, is_enabled, defaultHave
)
if result is not None:
self.logger.info(f"Inserted trophy {id}")
else:
self.logger.warning(f"Failed to insert trophy {id}")
async def read_character(self, chara_dir: str, dds_images: dict) -> None:
for root, dirs, files in walk(chara_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/Chara.xml"):
with open(f"{root}/{dir}/Chara.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
for work in xml_root.findall("works"):
worksName = work.find("str").text
rareType = xml_root.find("rareType").text
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
# character images are not stored alongside
for image in xml_root.findall("defaultImages"):
imageKey = image.find("str").text
if imageKey in dds_images.keys():
(imageDir, imagePaths) = dds_images[imageKey]
imagePath1 = imagePaths[0] if len(imagePaths) > 0 else ""
imagePath2 = imagePaths[1] if len(imagePaths) > 1 else ""
imagePath3 = imagePaths[2] if len(imagePaths) > 2 else ""
# @note the third image is the image needed for the user box ui
if imagePath3:
self.copy_image(imagePath3, imageDir, "titles/chuni/img/character/")
else:
self.logger.warning(f"Character {id} only has {len(imagePaths)} images. Expected 3")
else:
self.logger.warning(f"Unable to location character {id} images")
result = await self.data.static.put_character(
self.version, id, name, sortName, worksName, rareType, imagePath1, imagePath2, imagePath3, is_enabled, defaultHave
)
if result is not None:
self.logger.info(f"Inserted character {id}")
else:
self.logger.warning(f"Failed to insert character {id}")
async def read_map_icon(self, mapicon_dir: str) -> None:
for root, dirs, files in walk(mapicon_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/MapIcon.xml"):
with open(f"{root}/{dir}/MapIcon.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
for image in xml_root.findall("image"):
iconPath = image.find("path").text
self.copy_image(iconPath, f"{root}/{dir}", "titles/chuni/img/mapIcon/")
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
result = await self.data.static.put_map_icon(
self.version, id, name, sortName, iconPath, is_enabled, defaultHave
)
if result is not None:
self.logger.info(f"Inserted map icon {id}")
else:
self.logger.warning(f"Failed to map icon {id}")
async def read_system_voice(self, voice_dir: str) -> None:
for root, dirs, files in walk(voice_dir):
for dir in dirs:
if path.exists(f"{root}/{dir}/SystemVoice.xml"):
with open(f"{root}/{dir}/SystemVoice.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
id = name.find("id").text
name = name.find("str").text
sortName = xml_root.find("sortName").text
for image in xml_root.findall("image"):
imagePath = image.find("path").text
self.copy_image(imagePath, f"{root}/{dir}", "titles/chuni/img/systemVoice/")
defaultHave = xml_root.find("defaultHave").text == 'true'
disableFlag = xml_root.find("disableFlag") # may not exist in older data
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
result = await self.data.static.put_system_voice(
self.version, id, name, sortName, imagePath, is_enabled, defaultHave
)
if result is not None:
self.logger.info(f"Inserted system voice {id}")
else:
self.logger.warning(f"Failed to system voice {id}")
def copy_image(self, filename: str, src_dir: str, dst_dir: str) -> None:
# Convert the image to png so we can easily display it in the frontend
file_src = path.join(src_dir, filename)
(basename, ext) = path.splitext(filename)
file_dst = path.join(dst_dir, basename) + ".png"
if path.exists(file_src) and not path.exists(file_dst):
try:
im = Image.open(file_src)
im.save(file_dst)
except Exception:
self.logger.warning(f"Failed to convert {filename} to png")
def map_dds_images(self, image_dict: dict, dds_dir: str) -> None:
for root, dirs, files in walk(dds_dir):
for dir in dirs:
directory = f"{root}/{dir}"
if path.exists(f"{directory}/DDSImage.xml"):
with open(f"{directory}/DDSImage.xml", "r", encoding='utf-8') as fp:
strdata = fp.read()
xml_root = ET.fromstring(strdata)
for name in xml_root.findall("name"):
name = name.find("str").text
images = []
i = 0
while xml_root.findall(f"ddsFile{i}"):
for ddsFile in xml_root.findall(f"ddsFile{i}"):
images += [ddsFile.find("path").text]
i += 1
image_dict[name] = (directory, images)

View File

@ -1,22 +1,22 @@
from typing import Dict, List, Optional
from sqlalchemy import (
Table,
Column,
UniqueConstraint,
PrimaryKeyConstraint,
Table,
UniqueConstraint,
and_,
delete,
)
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.types import JSON, TIMESTAMP, Boolean, Integer, String
from core.data.schema import BaseData, metadata
character = Table(
character: Table = Table(
"chuni_item_character",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -40,7 +40,7 @@ character = Table(
mysql_charset="utf8mb4",
)
item = Table(
item: Table = Table(
"chuni_item_item",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -141,7 +141,7 @@ gacha = Table(
mysql_charset="utf8mb4",
)
print_state = Table(
print_state: Table = Table(
"chuni_item_print_state",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -210,7 +210,7 @@ login_bonus = Table(
mysql_charset="utf8mb4",
)
favorite = Table(
favorite: Table = Table(
"chuni_item_favorite",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -379,9 +379,14 @@ class ChuniItemData(BaseData):
return True if len(result.all()) else False
async def get_all_favorites(
self, user_id: int, version: int, fav_kind: int = 1
self,
user_id: int,
version: int,
fav_kind: int = 1,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = favorite.select(
sql = select(favorite).where(
and_(
favorite.c.version == version,
favorite.c.user == user_id,
@ -389,6 +394,13 @@ class ChuniItemData(BaseData):
)
)
if limit is not None or offset is not None:
sql = sql.order_by(favorite.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None
@ -488,9 +500,18 @@ class ChuniItemData(BaseData):
return None
return result.fetchone()
async def get_characters(self, user_id: int) -> Optional[List[Row]]:
async def get_characters(
self, user_id: int, limit: Optional[int] = None, offset: Optional[int] = None
) -> Optional[List[Row]]:
sql = select(character).where(character.c.user == user_id)
if limit is not None or offset is not None:
sql = sql.order_by(character.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None
@ -509,13 +530,26 @@ class ChuniItemData(BaseData):
return None
return result.lastrowid
async def get_items(self, user_id: int, kind: int = None) -> Optional[List[Row]]:
if kind is None:
sql = select(item).where(item.c.user == user_id)
else:
sql = select(item).where(
and_(item.c.user == user_id, item.c.itemKind == kind)
)
async def get_items(
self,
user_id: int,
kind: Optional[int] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
cond = item.c.user == user_id
if kind is not None:
cond &= item.c.itemKind == kind
sql = select(item).where(cond)
if limit is not None or offset is not None:
sql = sql.order_by(item.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
@ -609,15 +643,26 @@ class ChuniItemData(BaseData):
return result.lastrowid
async def get_user_print_states(
self, aime_id: int, has_completed: bool = False
self,
aime_id: int,
has_completed: bool = False,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = print_state.select(
sql = select(print_state).where(
and_(
print_state.c.user == aime_id,
print_state.c.hasCompleted == has_completed,
)
)
if limit is not None or offset is not None:
sql = sql.order_by(print_state.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None

View File

@ -439,6 +439,58 @@ class ChuniProfileData(BaseData):
return False
return True
async def update_map_icon(self, user_id: int, version: int, new_map_icon: int) -> bool:
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
mapIconId=new_map_icon
)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"Failed to set user {user_id} map icon")
return False
return True
async def update_system_voice(self, user_id: int, version: int, new_system_voice: int) -> bool:
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
voiceId=new_system_voice
)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"Failed to set user {user_id} system voice")
return False
return True
async def update_userbox(self, user_id: int, version: int, new_nameplate: int, new_trophy: int, new_character: int) -> bool:
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
nameplateId=new_nameplate,
trophyId=new_trophy,
charaIllustId=new_character
)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"Failed to set user {user_id} userbox")
return False
return True
async def update_avatar(self, user_id: int, version: int, new_wear: int, new_face: int, new_head: int, new_skin: int, new_item: int, new_front: int, new_back: int) -> bool:
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
avatarWear=new_wear,
avatarFace=new_face,
avatarHead=new_head,
avatarSkin=new_skin,
avatarItem=new_item,
avatarFront=new_front,
avatarBack=new_back
)
result = await self.execute(sql)
if result is None:
self.logger.warning(f"Failed to set user {user_id} avatar")
return False
return True
async def put_profile_data(
self, aime_id: int, version: int, profile_data: Dict
) -> Optional[int]:

View File

@ -1,16 +1,17 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
from sqlalchemy.engine.base import Connection
from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from sqlalchemy import Column, Table, UniqueConstraint
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.sql.expression import exists
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.types import Boolean, Integer, String
from core.data.schema import BaseData, metadata
from ..config import ChuniConfig
course = Table(
course: Table = Table(
"chuni_score_course",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -41,7 +42,7 @@ course = Table(
mysql_charset="utf8mb4",
)
best_score = Table(
best_score: Table = Table(
"chuni_score_best",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -229,9 +230,21 @@ class ChuniRomVersion():
return -1
class ChuniScoreData(BaseData):
async def get_courses(self, aime_id: int) -> Optional[Row]:
async def get_courses(
self,
aime_id: int,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(course).where(course.c.user == aime_id)
if limit is not None or offset is not None:
sql = sql.order_by(course.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None
@ -249,8 +262,45 @@ class ChuniScoreData(BaseData):
return None
return result.lastrowid
async def get_scores(self, aime_id: int) -> Optional[Row]:
sql = select(best_score).where(best_score.c.user == aime_id)
async def get_scores(
self,
aime_id: int,
levels: Optional[list[int]] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
condition = best_score.c.user == aime_id
if levels is not None:
condition &= best_score.c.level.in_(levels)
if limit is None and offset is None:
sql = (
select(best_score)
.where(condition)
.order_by(best_score.c.musicId.asc(), best_score.c.level.asc())
)
else:
subq = (
select(best_score.c.musicId)
.distinct()
.where(condition)
.order_by(best_score.c.musicId)
)
if limit is not None:
subq = subq.limit(limit)
if offset is not None:
subq = subq.offset(offset)
subq = subq.subquery()
sql = (
select(best_score)
.join(subq, best_score.c.musicId == subq.c.musicId)
.where(condition)
.order_by(best_score.c.musicId, best_score.c.level)
)
result = await self.execute(sql)
if result is None:
@ -360,11 +410,3 @@ class ChuniScoreData(BaseData):
rows = result.fetchall()
return [dict(row) for row in rows]
async def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]:
sql = select(best_score).where(best_score.c.user == rival_id)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()

View File

@ -73,10 +73,91 @@ avatar = Table(
Column("category", Integer),
Column("iconPath", String(255)),
Column("texturePath", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
Column("sortName", String(255)),
UniqueConstraint("version", "avatarAccessoryId", name="chuni_static_avatar_uk"),
mysql_charset="utf8mb4",
)
nameplate = Table(
"chuni_static_nameplate",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("nameplateId", Integer),
Column("name", String(255)),
Column("texturePath", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
Column("sortName", String(255)),
UniqueConstraint("version", "nameplateId", name="chuni_static_nameplate_uk"),
mysql_charset="utf8mb4",
)
character = Table(
"chuni_static_character",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("characterId", Integer),
Column("name", String(255)),
Column("sortName", String(255)),
Column("worksName", String(255)),
Column("rareType", Integer),
Column("imagePath1", String(255)),
Column("imagePath2", String(255)),
Column("imagePath3", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
UniqueConstraint("version", "characterId", name="chuni_static_character_uk"),
mysql_charset="utf8mb4",
)
trophy = Table(
"chuni_static_trophy",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("trophyId", Integer),
Column("name", String(255)),
Column("rareType", Integer),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
UniqueConstraint("version", "trophyId", name="chuni_static_trophy_uk"),
mysql_charset="utf8mb4",
)
map_icon = Table(
"chuni_static_map_icon",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("mapIconId", Integer),
Column("name", String(255)),
Column("sortName", String(255)),
Column("iconPath", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
UniqueConstraint("version", "mapIconId", name="chuni_static_mapicon_uk"),
mysql_charset="utf8mb4",
)
system_voice = Table(
"chuni_static_system_voice",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
Column("version", Integer, nullable=False),
Column("voiceId", Integer),
Column("name", String(255)),
Column("sortName", String(255)),
Column("imagePath", String(255)),
Column("isEnabled", Boolean, server_default="1"),
Column("defaultHave", Boolean, server_default="0"),
UniqueConstraint("version", "voiceId", name="chuni_static_systemvoice_uk"),
mysql_charset="utf8mb4",
)
gachas = Table(
"chuni_static_gachas",
metadata,
@ -470,6 +551,9 @@ class ChuniStaticData(BaseData):
category: int,
iconPath: str,
texturePath: str,
isEnabled: int,
defaultHave: int,
sortName: str
) -> Optional[int]:
sql = insert(avatar).values(
version=version,
@ -478,6 +562,9 @@ class ChuniStaticData(BaseData):
category=category,
iconPath=iconPath,
texturePath=texturePath,
isEnabled=isEnabled,
defaultHave=defaultHave,
sortName=sortName
)
conflict = sql.on_duplicate_key_update(
@ -485,6 +572,9 @@ class ChuniStaticData(BaseData):
category=category,
iconPath=iconPath,
texturePath=texturePath,
isEnabled=isEnabled,
defaultHave=defaultHave,
sortName=sortName
)
result = await self.execute(conflict)
@ -492,6 +582,246 @@ class ChuniStaticData(BaseData):
return None
return result.lastrowid
async def get_avatar_items(self, version: int, category: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(avatar).where((avatar.c.version == version) & (avatar.c.category == category) & (avatar.c.isEnabled)).order_by(avatar.c.sortName)
else:
sql = select(avatar).where((avatar.c.version == version) & (avatar.c.category == category)).order_by(avatar.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_nameplate(
self,
version: int,
nameplateId: int,
name: str,
texturePath: str,
isEnabled: int,
defaultHave: int,
sortName: str
) -> Optional[int]:
sql = insert(nameplate).values(
version=version,
nameplateId=nameplateId,
name=name,
texturePath=texturePath,
isEnabled=isEnabled,
defaultHave=defaultHave,
sortName=sortName
)
conflict = sql.on_duplicate_key_update(
name=name,
texturePath=texturePath,
isEnabled=isEnabled,
defaultHave=defaultHave,
sortName=sortName
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_nameplates(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(nameplate).where((nameplate.c.version == version) & (nameplate.c.isEnabled)).order_by(nameplate.c.sortName)
else:
sql = select(nameplate).where(nameplate.c.version == version).order_by(nameplate.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_trophy(
self,
version: int,
trophyId: int,
name: str,
rareType: int,
isEnabled: int,
defaultHave: int,
) -> Optional[int]:
sql = insert(trophy).values(
version=version,
trophyId=trophyId,
name=name,
rareType=rareType,
isEnabled=isEnabled,
defaultHave=defaultHave
)
conflict = sql.on_duplicate_key_update(
name=name,
rareType=rareType,
isEnabled=isEnabled,
defaultHave=defaultHave
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_trophies(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(trophy).where((trophy.c.version == version) & (trophy.c.isEnabled)).order_by(trophy.c.name)
else:
sql = select(trophy).where(trophy.c.version == version).order_by(trophy.c.name)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_map_icon(
self,
version: int,
mapIconId: int,
name: str,
sortName: str,
iconPath: str,
isEnabled: int,
defaultHave: int,
) -> Optional[int]:
sql = insert(map_icon).values(
version=version,
mapIconId=mapIconId,
name=name,
sortName=sortName,
iconPath=iconPath,
isEnabled=isEnabled,
defaultHave=defaultHave
)
conflict = sql.on_duplicate_key_update(
name=name,
sortName=sortName,
iconPath=iconPath,
isEnabled=isEnabled,
defaultHave=defaultHave
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_map_icons(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(map_icon).where((map_icon.c.version == version) & (map_icon.c.isEnabled)).order_by(map_icon.c.sortName)
else:
sql = select(map_icon).where(map_icon.c.version == version).order_by(map_icon.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_system_voice(
self,
version: int,
voiceId: int,
name: str,
sortName: str,
imagePath: str,
isEnabled: int,
defaultHave: int,
) -> Optional[int]:
sql = insert(system_voice).values(
version=version,
voiceId=voiceId,
name=name,
sortName=sortName,
imagePath=imagePath,
isEnabled=isEnabled,
defaultHave=defaultHave
)
conflict = sql.on_duplicate_key_update(
name=name,
sortName=sortName,
imagePath=imagePath,
isEnabled=isEnabled,
defaultHave=defaultHave
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_system_voices(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(system_voice).where((system_voice.c.version == version) & (system_voice.c.isEnabled)).order_by(system_voice.c.sortName)
else:
sql = select(system_voice).where(system_voice.c.version == version).order_by(system_voice.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_character(
self,
version: int,
characterId: int,
name: str,
sortName: str,
worksName: str,
rareType: int,
imagePath1: str,
imagePath2: str,
imagePath3: str,
isEnabled: int,
defaultHave: int
) -> Optional[int]:
sql = insert(character).values(
version=version,
characterId=characterId,
name=name,
sortName=sortName,
worksName=worksName,
rareType=rareType,
imagePath1=imagePath1,
imagePath2=imagePath2,
imagePath3=imagePath3,
isEnabled=isEnabled,
defaultHave=defaultHave
)
conflict = sql.on_duplicate_key_update(
name=name,
sortName=sortName,
worksName=worksName,
rareType=rareType,
imagePath1=imagePath1,
imagePath2=imagePath2,
imagePath3=imagePath3,
isEnabled=isEnabled,
defaultHave=defaultHave
)
result = await self.execute(conflict)
if result is None:
return None
return result.lastrowid
async def get_characters(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
if enabled_only:
sql = select(character).where((character.c.version == version) & (character.c.isEnabled)).order_by(character.c.sortName)
else:
sql = select(character).where(character.c.version == version).order_by(character.c.sortName)
result = await self.execute(sql)
if result is None:
return None
return result.fetchall()
async def put_gacha(
self,
version: int,

View File

@ -0,0 +1,309 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<style>
{% include 'titles/chuni/templates/css/chuni_style.css' %}
</style>
<div class="container">
{% include 'titles/chuni/templates/chuni_header.jinja' %}
<!-- AVATAR PREVIEW -->
<div class="row">
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
<caption align="top">AVATAR</caption>
<tr><td style="height:340px; width:50%" rowspan=8>
<img class="avatar-preview avatar-preview-platform" src="img/avatar-platform.png">
<img id="preview1_back" class="avatar-preview avatar-preview-back" src="">
<img id="preview1_skin" class="avatar-preview avatar-preview-skin-rightfoot" src="">
<img id="preview2_skin" class="avatar-preview avatar-preview-skin-leftfoot" src="">
<img id="preview3_skin" class="avatar-preview avatar-preview-skin-body" src="">
<img id="preview1_wear" class="avatar-preview avatar-preview-wear" src="">
<img class="avatar-preview avatar-preview-common" src="img/avatar-common.png">
<img id="preview1_head" class="avatar-preview avatar-preview-head" src="">
<img id="preview1_face" class="avatar-preview avatar-preview-face" src="">
<img id="preview1_item" class="avatar-preview avatar-preview-item-righthand" src="">
<img id="preview2_item" class="avatar-preview avatar-preview-item-lefthand" src="">
<img id="preview1_front" class="avatar-preview avatar-preview-front" src="">
</td>
</tr>
<tr><td>Wear:</td><td><div id="name_wear"></div></td></tr>
<tr><td>Face:</td><td><div id="name_face"></div></td></tr>
<tr><td>Head:</td><td><div id="name_head"></div></td></tr>
<tr><td>Skin:</td><td><div id="name_skin"></div></td></tr>
<tr><td>Item:</td><td><div id="name_item"></div></td></tr>
<tr><td>Front:</td><td><div id="name_front"></div></td></tr>
<tr><td>Back:</td><td><div id="name_back"></div></td></tr>
<tr><td colspan=3 style="padding:8px 0px; text-align: center;">
<button id="save-btn" class="btn btn-primary" style="width:140px;" onClick="saveAvatar()">SAVE</button>&nbsp;&nbsp;&nbsp;&nbsp;
<button id="reset-btn" class="btn btn-danger" style="width:140px;" onClick="resetAvatar()">RESET</button>
</td></tr>
</table>
</div>
</div>
</div>
<!-- ACCESSORY SELECTION -->
<div class="row col-lg-8 m-auto mt-3 scrolling-lists-lg card bg-card rounded">
<!-- WEAR ACCESSORIES -->
<button class="collapsible">Wear:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ wears|length }}/{{ total_wears }} {{ "items" if total_wears > 1 else "item" }}</button>
<div id="scrollable-wear" class="collapsible-content">
{% for item in wears.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('wear', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="wear-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- FACE ACCESSORIES -->
<button class="collapsible">Face:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ faces|length }}/{{ total_faces }} {{ "items" if total_faces > 1 else "item" }}</button>
<div id="scrollable-face" class="collapsible-content">
{% for item in faces.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('face', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="face-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- HEAD ACCESSORIES -->
<button class="collapsible">Head:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ heads|length }}/{{ total_heads }} {{ "items" if total_heads > 1 else "item" }}</button>
<div id="scrollable-head" class="collapsible-content">
{% for item in heads.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('head', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="head-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- SKIN ACCESSORIES -->
<button class="collapsible">Skin:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ skins|length }}/{{ total_skins }} {{ "items" if total_skins > 1 else "item" }}</button>
<div id="scrollable-skin" class="collapsible-content">
{% for item in skins.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('skin', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="skin-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- ITEM ACCESSORIES -->
<button class="collapsible">Item:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ items|length }}/{{ total_items }} {{ "items" if total_items > 1 else "item" }}</button>
<div id="scrollable-item" class="collapsible-content">
{% for item in items.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('item', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="item-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- FRONT ACCESSORIES -->
<button class="collapsible">Front:&nbsp;&nbsp;&nbsp;&nbsp;{{ fronts|length }}/{{ total_fronts }} {{ "items" if total_fronts > 1 else "item" }}</button>
<div id="scrollable-front" class="collapsible-content">
{% for item in fronts.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('front', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="front-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- BACK ACCESSORIES -->
<button class="collapsible">Back:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ backs|length }}/{{ total_backs }} {{ "items" if total_backs > 1 else "item" }}</button>
<div id="scrollable-back" class="collapsible-content">
{% for item in backs.values() %}
<img id="{{ item["id"] }}" onclick="changeAccessory('back', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="back-br-{{ loop.index }}"></span>
{% endfor %}
</div>
</div>
{% if error is defined %}
{% include "core/templates/widgets/err_banner.jinja" %}
{% endif %}
</div>
{% if wears|length == 0 or faces|length == 0 or heads|length == 0 or skins|length == 0 or items|length == 0 or fronts|length == 0 or backs|length == 0 %}
<script>
// Server DB lacks necessary info. Maybe importer never got ran for this verison?
document.getElementById("name_wear").innerHTML = "Server DB needs upgraded or is not populated with necessary data";
</script>
{% else %}
<script>
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
///
/// This script handles all updates to the avatar
///
total_items = 0;
orig_id = 1;
orig_name = 2;
orig_img = 3;
curr_id = 4;
curr_name = 5;
curr_img = 6;
accessories = {
// [total_items, orig_id, orig_name, orig_img, curr_id, curr_name, curr_img]
"wear":["{{ wears|length }}",
"{{ profile.avatarWear }}",
"{{ wears[profile.avatarWear]["name"] }}",
"{{ wears[profile.avatarWear]["texturePath"] }}", "", "", "" ],
"face":["{{ faces|length }}",
"{{ profile.avatarFace }}",
"{{ faces[profile.avatarFace]["name"] }}",
"{{ faces[profile.avatarFace]["texturePath"] }}", "", "", "" ],
"head":["{{ heads|length }}",
"{{ profile.avatarHead }}",
"{{ heads[profile.avatarHead]["name"] }}",
"{{ heads[profile.avatarHead]["texturePath"] }}", "", "", "" ],
"skin":["{{ skins|length }}",
"{{ profile.avatarSkin }}",
"{{ skins[profile.avatarSkin]["name"] }}",
"{{ skins[profile.avatarSkin]["texturePath"] }}", "", "", "" ],
"item":["{{ items|length }}",
"{{ profile.avatarItem }}",
"{{ items[profile.avatarItem]["name"] }}",
"{{ items[profile.avatarItem]["texturePath"] }}", "", "", "" ],
"front":["{{ fronts|length }}",
"{{ profile.avatarFront }}",
"{{ fronts[profile.avatarFront]["name"] }}",
"{{ fronts[profile.avatarFront]["texturePath"] }}", "", "", "" ],
"back":["{{ backs|length }}",
"{{ profile.avatarBack }}",
"{{ backs[profile.avatarBack]["name"] }}",
"{{ backs[profile.avatarBack]["texturePath"] }}", "", "", "" ]
};
types = Object.keys(accessories);
function enableButtons(enabled) {
document.getElementById("reset-btn").disabled = !enabled;
document.getElementById("save-btn").disabled = !enabled;
}
function changeAccessory(type, id, name, img) {
// clear select style for old accessory
var element = document.getElementById(accessories[type][curr_id]);
if (element) {
element.style.backgroundColor="inherit";
}
// set new accessory
accessories[type][curr_id] = id;
accessories[type][curr_name] = name;
accessories[type][curr_img] = img;
// update select style for new accessory
element = document.getElementById(id);
if (element) {
element.style.backgroundColor="#5F5";
}
// Update the avatar preview and enable buttons
updatePreview();
if (id != accessories[type][orig_id]) {
enableButtons(true);
}
}
function resetAvatar() {
for (const type of types) {
changeAccessory(type, accessories[type][orig_id], accessories[type][orig_name], accessories[type][orig_img]);
}
// disable the save/reset buttons until something changes
enableButtons(false);
}
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min + 1)) + min;
}
function updatePreview() {
for (const type of types) {
for (let i = 1; i <= 3; i++) {
var img = document.getElementById("preview" + i + "_" + type);
if (img) {
img.src = "img/avatar/" + accessories[type][curr_img];
}
}
document.getElementById("name_" + type).innerHTML = accessories[type][curr_name];
}
}
function saveAvatar() {
$.post("/game/chuni/update.avatar", { wear: accessories["wear"][curr_id],
face: accessories["face"][curr_id],
head: accessories["head"][curr_id],
skin: accessories["skin"][curr_id],
item: accessories["item"][curr_id],
front: accessories["front"][curr_id],
back: accessories["back"][curr_id] })
.done(function (data) {
// set the current as the original and disable buttons
for (const type of types) {
accessories[type][orig_id] = accessories[type][curr_id];
accessories[type][orig_name] = accessories[type][curr_name];
accessories[type][orig_img] = accessories[type][curr_img];
}
enableButtons(false);
})
.fail(function () {
alert("Failed to save avatar.");
});
}
function resizePage() {
//
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
//
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
// when dealing with something like userbox characters where there are 1000s of possible items to
// display. This approach gives us full control over where items in the div wrap, allowing us to try
// to keep things presentable.
//
for (const type of types) {
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
// Dont put fewer than 4 per row
numPerRow = Math.max(numPerRow, 4);
// Dont populate more than 8 rows
numPerRow = Math.max(numPerRow, Math.ceil(accessories[type][total_items] / 8));
// update the locations of the <br>
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
var spanBr = document.getElementById(type + "-br-" + i);
if ( i % numPerRow == 0 ) {
spanBr.innerHTML = "<br>";
} else {
spanBr.innerHTML = "";
}
}
}
// update the max height for any currently visible containers
Collapsibles.updateAllHeights();
}
resizePage();
window.addEventListener('resize', resizePage);
// Set initial preview for current avatar
resetAvatar();
// Initialize scroll on all current accessories so we can see the selected ones
for (const type of types) {
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(accessories[type][curr_id]).offsetLeft;
}
// Expand the first collapsible so the user can get the gist of it.
Collapsibles.expandFirst();
</script>
{% endif %}
{% endblock content %}

View File

@ -7,15 +7,10 @@
{% include 'titles/chuni/templates/chuni_header.jinja' %}
{% if favorites_by_genre is defined and favorites_by_genre is not none %}
<div class="row">
<h1 style="text-align: center;">{{ cur_version_name }}</h1>
<h4 style="text-align: center;">Favorite Count: {{ favorites_count }}</h4>
{% for key, genre in favorites_by_genre.items() %}
<h2 style="text-align: center; padding-top: 32px">{{ key }}</h2>
{% for favorite in genre %}
<form id="fav_{{ favorite.idx }}" action="/game/chuni/update.favorite_music_favorites" method="post" style="display: none;">
<input class="form-control" form="fav_{{ favorite.idx }}" id="musicId" name="musicId" type="hidden" value="{{ favorite.favId }}">
<input class="form-control" form="fav_{{ favorite.idx }}" id="isAdd" name="isAdd" type="hidden" value="0">
</form>
<div class="col-lg-6 mt-3">
<div class="card bg-card rounded card-hover">
<div class="card-body row">
@ -28,7 +23,7 @@
<h6 class="card-text"> {{ favorite.artist }} </h6>
<br><br>
<div style="text-align: right;">
<input type=submit class="btn btn-secondary btn-fav-remove" type="button" form="fav_{{ favorite.idx }}" value="Remove">
<button onclick="removeFavorite({{ favorite.favId }})" class="btn btn-secondary btn-fav-remove">Remove</button>
</div>
</div>
</div>
@ -51,5 +46,16 @@
}
});
});
// Remove Favorite
function removeFavorite(musicId) {
$.post("/game/chuni/update.favorite_music_favorites", { musicId: musicId, isAdd: 0 })
.done(function (data) {
location.reload();
})
.fail(function () {
alert("Failed to remove favorite.");
});
}
</script>
{% endblock content %}

View File

@ -1,5 +1,5 @@
<div class="chuni-header">
<h1>Chunithm</h1>
<h1>{{ cur_version_name }}</h1>
<ul class="chuni-navi">
<li><a class="nav-link" href="/game/chuni">PROFILE</a></li>
<li><a class="nav-link" href="/game/chuni/rating">RATING</a></li>
@ -7,6 +7,9 @@
<li><a class="nav-link" href="/game/chuni/favorites">FAVORITES</a></li>
<li><a class="nav-link" href="/game/chuni/musics">MUSICS</a></li>
<li><a class="nav-link" href="/game/chuni/userbox">USER BOX</a></li>
{% if cur_version >= 11 %} <!-- avatar config introduced in NEW!! -->
<li><a class="nav-link" href="/game/chuni/avatar">AVATAR</a></li>
{% endif %}
</ul>
</div>
<script>
@ -22,6 +25,13 @@
$('.nav-link[href="/game/chuni/favorites"]').addClass('active');
} else if (currentPath.startsWith('/game/chuni/musics')) {
$('.nav-link[href="/game/chuni/musics"]').addClass('active');
} else if (currentPath.startsWith('/game/chuni/userbox')) {
$('.nav-link[href="/game/chuni/userbox"]').addClass('active');
}
{% if cur_version >= 11 %} <!-- avatar config introduced in NEW!! -->
else if (currentPath.startsWith('/game/chuni/avatar')) {
$('.nav-link[href="/game/chuni/avatar"]').addClass('active');
}
{% endif %}
});
</script>

View File

@ -69,9 +69,48 @@
<td>Last Play Date:</td>
<td>{{ profile.lastPlayDate }}</td>
</tr>
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
<tr>
<td>Map Icon:</td>
<td><div id="map-icon-name">{{ map_icons[profile.mapIconId]["name"] if map_icons|length > 0 else "Server DB needs upgraded or is not populated with necessary data" }}</div></td>
</tr>
<tr>
<td>System Voice:</td>
<td><div id="system-voice-name">{{ system_voices[profile.voiceId]["name"] if system_voices|length > 0 else "Server DB needs upgraded or is not populated with necessary data" }}</div></td>
</tr>
{% endif %}
</table>
</div>
</div>
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
<!-- MAP ICON SELECTION -->
<div class="col-lg-8 m-auto mt-3 scrolling-lists">
<div class="card bg-card rounded">
<button class="collapsible">Map Icon:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ map_icons|length }}/{{ total_map_icons }}</button>
<div id="scrollable-map-icon" class="collapsible-content">
{% for item in map_icons.values() %}
<img id="map-icon-{{ item["id"] }}" style="padding: 8px 8px;" onclick="saveItem('map-icon', '{{ item["id"] }}', '{{ item["name"] }}')" src="img/mapIcon/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="map-icon-br-{{ loop.index }}"></span>
{% endfor %}
</div>
</div>
</div>
<!-- SYSTEM VOICE SELECTION -->
<div class="col-lg-8 m-auto mt-3 scrolling-lists">
<div class="card bg-card rounded">
<button class="collapsible">System Voice:&nbsp;&nbsp;&nbsp;{{ system_voices|length }}/{{ total_system_voices }}</button>
<div id="scrollable-system-voice" class="collapsible-content">
{% for item in system_voices.values() %}
<img id="system-voice-{{ item["id"] }}" style="padding: 8px 8px;" onclick="saveItem('system-voice', '{{ item["id"] }}', '{{ item["name"] }}')" src="img/systemVoice/{{ item["imagePath"] }}" alt="{{ item["name"] }}">
<span id="system-voice-br-{{ loop.index }}"></span>
{% endfor %}
</div>
</div>
</div>
{% endif %}
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
@ -147,4 +186,93 @@
});
}
</script>
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
<script>
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
///
/// This script handles all updates to the map icon and system voice
///
total_items = 0;
curr_id = 1;
items = {
// [total_items, curr_id]
"map-icon": ["{{ map_icons|length }}", "{{ profile.mapIconId }}"],
"system-voice":["{{ system_voices|length }}", "{{ profile.voiceId }}"]
};
types = Object.keys(items);
function changeItem(type, id, name) {
// clear select style for old selection
var element = document.getElementById(type + "-" + items[type][curr_id]);
if (element) {
element.style.backgroundColor="inherit";
}
// set new item
items[type][curr_id] = id;
document.getElementById(type + "-name").innerHTML = name;
// update select style for new accessory
element = document.getElementById(type + "-" + id);
if (element) {
element.style.backgroundColor="#5F5";
}
}
function saveItem(type, id, name) {
$.post("/game/chuni/update." + type, { id: id })
.done(function (data) {
changeItem(type, id, name);
})
.fail(function () {
alert("Failed to set " + type + " to " + name);
});
}
function resizePage() {
//
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
//
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
// when dealing with something like userbox characters where there are 1000s of possible items being
// display. This approach gives us full control over where items in the div wrap, allowing us to try
// to keep things presentable.
//
for (const type of types) {
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
// Dont put fewer than 4 per row
numPerRow = Math.max(numPerRow, 4);
// Dont populate more than 6 rows
numPerRow = Math.max(numPerRow, Math.ceil(items[type][total_items] / 6));
// update the locations of the <br>
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
var spanBr = document.getElementById(type + "-br-" + i);
if ( i % numPerRow == 0 ) {
spanBr.innerHTML = "<br>";
} else {
spanBr.innerHTML = "";
}
}
}
// update the max height for any currently visible containers
Collapsibles.updateAllHeights();
}
resizePage();
window.addEventListener('resize', resizePage);
// Set initial style for current and scroll to selected
for (const type of types) {
changeItem(type, items[type][curr_id], document.getElementById(type + "-name").innerHTML);
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(type + "-" + items[type][curr_id]).offsetLeft;
}
Collapsibles.expandAll();
</script>
{% endif %}
{% endblock content %}

View File

@ -7,20 +7,15 @@
{% include 'titles/chuni/templates/chuni_header.jinja' %}
{% if playlog is defined and playlog is not none %}
<div class="row">
<h1 style="text-align: center;">{{ cur_version_name }}</h1>
<h4 style="text-align: center;">Playlog Count: {{ playlog_count }}</h4>
{% set rankName = ['D', 'C', 'B', 'BB', 'BBB', 'A', 'AA', 'AAA', 'S', 'S+', 'SS', 'SS+', 'SSS', 'SSS+'] %}
{% set difficultyName = ['normal', 'hard', 'expert', 'master', 'ultimate'] %}
{% for record in playlog %}
<form id="fav_{{ record.idx }}" action="/game/chuni/update.favorite_music_playlog" method="post" style="display: none;">
<input class="form-control" form="fav_{{ record.idx }}" id="musicId" name="musicId" type="hidden" value="{{ record.musicId }}">
<input class="form-control" form="fav_{{ record.idx }}" id="isAdd" name="isAdd" type="hidden" value="{{ 0 if record.isFav else 1 }}">
</form>
<div class="col-lg-6 mt-3">
<div class="card bg-card rounded card-hover">
<div class="card-header row">
<div class="col-auto fav" title="{{ ('Remove' if record.isFav else 'Add') + ' Favorite'}}">
<h1><input type=submit class="fav {{ 'fav-set' if record.isFav else '' }}" type="button" form="fav_{{ record.idx }}" value="{{ '&#9733' if record.isFav else '&#9734' }} "></h1>
<h1><span id="{{ record.idx }}" class="fav {{ 'fav-set' if record.isFav else '' }}" onclick="updateFavorite({{ record.idx }}, {{ record.musicId }})">{{ '&#9733' if record.isFav else '&#9734' }}</span>
</div>
<div class="col scrolling-text">
<h5 class="card-text"> {{ record.title }} </h5>
@ -191,5 +186,23 @@
}
});
});
// Add/Remove Favorite
function updateFavorite(elementId, musicId) {
element = document.getElementById(elementId);
isAdd = 1;
if (element.classList.contains("fav-set"))
{
isAdd = 0;
}
$.post("/game/chuni/update.favorite_music_favorites", { musicId: musicId, isAdd: isAdd })
.done(function (data) {
location.reload();
})
.fail(function () {
alert("Failed to update favorite.");
});
}
</script>
{% endblock content %}

View File

@ -0,0 +1,262 @@
{% extends "core/templates/index.jinja" %}
{% block content %}
<style>
{% include 'titles/chuni/templates/css/chuni_style.css' %}
</style>
<div class="container">
{% include 'titles/chuni/templates/chuni_header.jinja' %}
<!-- USER BOX PREVIEW -->
<div class="row">
<div class="col-lg-8 m-auto mt-3">
<div class="card bg-card rounded">
<table class="table-large table-rowdistinct">
<caption align="top">USER BOX</caption>
<tr><td colspan=2 style="height:240px;">
<!-- NAMEPLATE -->
<img id="preview_nameplate" class="userbox userbox-nameplate" src="">
<!-- TEAM -->
<img class="userbox userbox-teamframe" src="img/rank/team3.png">
<div class="userbox userbox-teamname">{{team_name}}</div>
<!-- TROPHY/TITLE -->
<img id="preview_trophy_rank" class="userbox userbox-trophy" src="">
<div id="preview_trophy_name" class="userbox userbox-trophy userbox-trophy-name"></div>
<!-- NAME/RATING -->
<img class="userbox userbox-ratingframe" src="img/rank/rating0.png">
<div class="userbox userbox-name">
<span class="userbox-name-level-label">Lv.</span>
{{ profile.level }}&nbsp;&nbsp;&nbsp;{{ profile.userName }}
</div>
<div class="userbox userbox-rating rating rating-rank{{ rating_rank }}">
<span class="userbox-rating-label">RATING</span>
&nbsp;&nbsp;{{ profile.playerRating/100 }}
</div>
<!-- CHARACTER -->
<img class="userbox userbox-charaframe" src="img/character-bg.png">
<img id="preview_character" class="userbox userbox-chara" src="">
</td></tr>
<tr><td>Nameplate:</td><td style="width: 80%;"><div id="name_nameplate"></div></td></tr>
<tr><td>Trophy:</td><td><div id="name_trophy">
<select name="trophy" id="trophy" onchange="changeTrophy()" style="width:100%;">
{% for item in trophies.values() %}
<option value="{{ item["id"] }}" class="trophy-rank{{ item["rarity"] }}">{{ item["name"] }}</option>
{% endfor %}
</select>
</div></td></tr>
<tr><td>Character:</td><td><div id="name_character"></div></td></tr>
<tr><td colspan=2 style="padding:8px 0px; text-align: center;">
<button id="save-btn" class="btn btn-primary" style="width:140px;" onClick="saveUserbox()">SAVE</button>&nbsp;&nbsp;&nbsp;&nbsp;
<button id="reset-btn" class="btn btn-danger" style="width:140px;" onClick="resetUserbox()">RESET</button>
</td></tr>
</table>
</div>
</div>
</div>
<!-- USERBOX SELECTION -->
<div class="row col-lg-8 m-auto mt-3 scrolling-lists-lg card bg-card rounded">
<!-- NAMEPLATE -->
<button class="collapsible">Nameplate:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ nameplates|length }}/{{ total_nameplates }}</button>
<div id="scrollable-nameplate" class="collapsible-content">
{% for item in nameplates.values() %}
<img id="nameplate-{{ item["id"] }}" style="padding: 8px 8px;" onclick="changeItem('nameplate', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/nameplate/{{ item["texturePath"] }}" alt="{{ item["name"] }}">
<span id="nameplate-br-{{ loop.index }}"></span>
{% endfor %}
</div>
<hr>
<!-- CHARACTER -->
<button class="collapsible">Character:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;{{ characters|length }}/{{ total_characters }}</button>
<div id="scrollable-character" class="collapsible-content">
{% for item in characters.values() %}
<img id="character-{{ item["id"] }}" onclick="changeItem('character', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["iconPath"] }}')" src="img/character/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
<span id="character-br-{{ loop.index }}"></span>
{% endfor %}
</div>
</div>
{% if error is defined %}
{% include "core/templates/widgets/err_banner.jinja" %}
{% endif %}
</div>
{% if nameplates|length == 0 or characters|length == 0 %}
<script>
// Server DB lacks necessary info. Maybe importer never got ran for this verison?
document.getElementById("name_nameplate").innerHTML = "Server DB needs upgraded or is not populated with necessary data";
</script>
{% else %}
<script>
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
///
/// This script handles all updates to the user box
///
total_items = 0;
orig_id = 1;
orig_name = 2;
orig_img = 3;
curr_id = 4;
curr_name = 5;
curr_img = 6;
userbox_components = {
// [total_items, orig_id, orig_name, orig_img, curr_id, curr_name, curr_img]
"nameplate":["{{ nameplates|length }}",
"{{ profile.nameplateId }}",
"{{ nameplates[profile.nameplateId]["name"] }}",
"{{ nameplates[profile.nameplateId]["texturePath"] }}", "", "", ""],
"character":["{{ characters|length }}",
"{{ profile.charaIllustId }}",
"{{ characters[profile.charaIllustId]["name"] }}",
"{{ characters[profile.charaIllustId]["iconPath"] }}", "", "", ""]
};
types = Object.keys(userbox_components);
orig_trophy = curr_trophy = "{{ profile.trophyId }}";
curr_trophy_img = "";
function enableButtons(enabled) {
document.getElementById("reset-btn").disabled = !enabled;
document.getElementById("save-btn").disabled = !enabled;
}
function changeItem(type, id, name, img) {
// clear select style for old component
var element = document.getElementById(type + "-" + userbox_components[type][curr_id]);
if (element) {
element.style.backgroundColor="inherit";
}
// set new component
userbox_components[type][curr_id] = id;
userbox_components[type][curr_name] = name;
userbox_components[type][curr_img] = img;
// update select style for new accessory
element = document.getElementById(type + "-" + id);
if (element) {
element.style.backgroundColor="#5F5";
}
// Update the userbox preview and enable buttons
updatePreview();
if (id != userbox_components[type][orig_id]) {
enableButtons(true);
}
}
function getRankImage(selected_rank) {
for (const x of Array(12).keys()) {
if (selected_rank.classList.contains("trophy-rank" + x.toString())) {
return "rank" + x.toString() + ".png";
}
}
return "rank0.png"; // shouldnt ever happen
}
function changeTrophy() {
var trophy_element = document.getElementById("trophy");
curr_trophy = trophy_element.value;
curr_trophy_img = getRankImage(trophy_element[trophy_element.selectedIndex]);
updatePreview();
if (curr_trophy != orig_trophy) {
enableButtons(true);
}
}
function resetUserbox() {
for (const type of types) {
changeItem(type, userbox_components[type][orig_id], userbox_components[type][orig_name], userbox_components[type][orig_img]);
}
// reset trophy
document.getElementById("trophy").value = orig_trophy;
changeTrophy();
// disable the save/reset buttons until something changes
enableButtons(false);
}
function updatePreview() {
for (const type of types) {
document.getElementById("preview_" + type).src = "img/" + type + "/" + userbox_components[type][curr_img];
document.getElementById("name_" + type).innerHTML = userbox_components[type][curr_name];
}
document.getElementById("preview_trophy_rank").src = "img/rank/" + curr_trophy_img;
document.getElementById("preview_trophy_name").innerHTML = document.getElementById("trophy")[document.getElementById("trophy").selectedIndex].innerText;
}
function saveUserbox() {
$.post("/game/chuni/update.userbox", { nameplate: userbox_components["nameplate"][curr_id],
trophy: curr_trophy,
character: userbox_components["character"][curr_id] })
.done(function (data) {
// set the current as the original and disable buttons
for (const type of types) {
userbox_components[type][orig_id] = userbox_components[type][curr_id];
userbox_components[type][orig_name] = userbox_components[type][orig_name];
userbox_components[type][orig_img] = userbox_components[type][curr_img];
}
orig_trophy = curr_trophy
enableButtons(false);
})
.fail(function () {
alert("Failed to save userbox.");
});
}
function resizePage() {
//
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
//
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
// when dealing with something like userbox characters where there are 1000s of possible items being
// display. This approach gives us full control over where items in the div wrap, allowing us to try
// to keep things presentable.
//
for (const type of types) {
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
// Dont put fewer than 4 per row
numPerRow = Math.max(numPerRow, 4);
// Dont populate more than 8 rows
numPerRow = Math.max(numPerRow, Math.ceil(userbox_components[type][total_items] / 8));
// update the locations of the <br>
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
var spanBr = document.getElementById(type + "-br-" + i);
if ( i % numPerRow == 0 ) {
spanBr.innerHTML = "<br>";
} else {
spanBr.innerHTML = "";
}
}
}
// update the max height for any currently visible containers
Collapsibles.updateAllHeights();
}
resizePage();
window.addEventListener('resize', resizePage);
// Set initial preview for current userbox
resetUserbox();
// Initialize scroll on all current items so we can see the selected ones
for (const type of types) {
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(type + "-" + userbox_components[type][curr_id]).offsetLeft;
}
Collapsibles.expandAll();
</script>
{% endif %}
{% endblock content %}

View File

@ -159,6 +159,45 @@ caption {
font-weight: bold;
}
.rating {
font-weight: bold;
-webkit-text-stroke-width: 1px;
-webkit-text-stroke-color: black;
}
.rating-rank0 {
color: #008000;
}
.rating-rank1 {
color: #ffa500;
}
.rating-rank2 {
color: #ff0000;
}
.rating-rank3 {
color: #800080;
}
.rating-rank4 {
color: #cd853f;
}
.rating-rank5 {
color: #c0c0c0;
}
.rating-rank6 {
color: #ffd700;
}
.rating-rank7 {
color: #a9a9a9;
}
.rating-rank8 {
background: linear-gradient(to right, red, yellow, lime, aqua, blue, fuchsia) 0 / 5em;
background-clip: text;
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
}
.scrolling-text {
overflow: hidden;
}
@ -194,6 +233,41 @@ caption {
}
}
/*
Styles to support collapsible boxes (used for browsing images)
*/
.collapsible {
background-color: #555;
cursor: pointer;
padding-bottom: 16px;
width: 100%;
border: none;
text-align: left;
outline: none;
font-family: monospace;
font-weight: bold;
}
.collapsible:after {
content: '[+]';
float: right;
}
.collapsible-active:after {
content: "[-]";
}
.collapsible-content {
max-height: 0px;
overflow: hidden;
opacity: 0;
transition: max-height 0.2s ease-out;
background-color: #DDD;
}
/*
Styles for favorites star in /playlog
*/
.fav {
padding: 0;
padding-left: 4px;
@ -206,7 +280,257 @@ caption {
color: gold;
}
/*
Styles for favorites in /favorites
*/
.btn-fav-remove {
padding:10px;
width:100%;
}
/*
Styles for userbox configuration
*/
.userbox {
position: absolute;
}
.userbox-nameplate {
top: 72px;
left: 32px;
}
.userbox-teamframe {
top: 74px;
left: 156px;
}
.userbox-teamname {
top: 72px;
left: 254px;
padding: 8px 20px;
font-size: 22px;
text-shadow: rgba(0,0,0,0.8) 2px 2px;
color: #DDD;
width: 588px;
text-align: left;
}
.userbox-trophy {
top: 170px;
left: 250px;
zoom: 0.70;
}
.userbox-trophy-name {
top: 170px;
left: 250px;
padding: 8px 20px;
font-size: 28px;
font-weight: bold;
color: #333;
width: 588px;
text-align: center;
}
.userbox-ratingframe {
top: 160px;
left: 175px;
}
.userbox-charaframe {
top: 267px;
left: 824px;
zoom: 0.61;
}
.userbox-chara {
top: 266px;
left: 814px;
zoom: 0.62;
}
.userbox-name {
top: 160px;
left: 162px;
padding: 8px 20px;
font-size: 32px;
font-weight: bold;
color: #333;
text-align: left;
}
.userbox-name-level-label {
font-size: 24px;
}
.userbox-rating {
top: 204px;
left: 166px;
padding: 8px 20px;
font-size: 24px;
text-align: left;
}
.userbox-rating-label {
font-size: 16px;
}
.trophy-rank0 {
color: #111;
background-color: #DDD;
}
.trophy-rank1 {
color: #111;
background-color: #D85;
}
.trophy-rank2 {
color: #111;
background-color: #ADF;
}
.trophy-rank3 {
color: #111;
background-color: #EB3;
}
.trophy-rank4 {
color: #111;
background-color: #EB3;
}
.trophy-rank5 {
color: #111;
background-color: #FFA;
}
.trophy-rank6 {
color: #111;
background-color: #FFA;
}
.trophy-rank7 {
color: #111;
background-color: #FCF;
}
.trophy-rank8 {
color: #111;
background-color: #FCF;
}
.trophy-rank9 {
color: #111;
background-color: #07C;
}
.trophy-rank10 {
color: #111;
background-color: #7FE;
}
.trophy-rank11 {
color: #111;
background-color: #8D7;
}
/*
Styles for scrollable divs (used for browsing images)
*/
.scrolling-lists {
table-layout: fixed;
}
.scrolling-lists div {
overflow: auto;
white-space: nowrap;
}
.scrolling-lists img {
width: 128px;
}
.scrolling-lists-lg {
table-layout: fixed;
width: 100%;
}
.scrolling-lists-lg div {
overflow: auto;
white-space: nowrap;
padding: 4px;
}
.scrolling-lists-lg img {
padding: 4px;
width: 128px;
}
/*
Styles for avatar configuration
*/
.avatar-preview {
position:absolute;
zoom:0.5;
}
.avatar-preview-wear {
top: 280px;
left: 60px;
}
.avatar-preview-face {
top: 262px;
left: 200px;
}
.avatar-preview-head {
top: 130px;
left: 120px;
}
.avatar-preview-skin-body {
top: 250px;
left: 190px;
height: 406px;
width: 256px;
object-fit: cover;
object-position: top;
}
.avatar-preview-skin-leftfoot {
top: 625px;
left: 340px;
object-position: -84px -406px;
}
.avatar-preview-skin-rightfoot {
top: 625px;
left: 40px;
object-position: 172px -406px;
}
.avatar-preview-common {
top: 250px;
left: 135px;
}
.avatar-preview-item-lefthand {
top: 180px;
left: 370px;
height: 544px;
width: 200px;
object-fit: cover;
object-position: right;
}
.avatar-preview-item-righthand {
top: 180px;
left: 65px;
height: 544px;
width: 200px;
object-fit: cover;
object-position: left;
}
.avatar-preview-back {
top: 140px;
left: 46px;
}
.avatar-preview-platform {
top: 310px;
left: 55px;
zoom: 1;
}

View File

@ -0,0 +1,66 @@
///
/// Handles the collapsible behavior of each of the scrollable containers
///
/// @note Intent is to include this file via jinja in the same <script> tag as
/// any page-specific logic that interacts with the collapisbles.
///
class Collapsibles {
static setHeight(content) {
// @note Add an extra 20% height buffer - the div will autosize but we
// want to make sure we dont clip due to the horizontal scroll.
content.style.maxHeight = (content.scrollHeight * 1.2) + "px";
}
static updateAllHeights() {
// Updates the height of all expanded collapsibles.
// Intended for use when resolution changes cause the contents within the collapsible to move around.
var coll = document.getElementsByClassName("collapsible");
for (var i = 0; i < coll.length; i++) {
var content = coll[i].nextElementSibling;
if (content.style.maxHeight) {
// currently visible. update height
Collapsibles.setHeight(content);
}
}
}
static expandFirst() {
// Activate the first collapsible once loaded so the user can get an idea of how things work
window.addEventListener('load', function () {
var coll = document.getElementsByClassName("collapsible");
if (coll && coll.length > 0) {
coll[0].click();
}
})
}
static expandAll() {
// Activate all collapsibles so everything is visible immediately
window.addEventListener('load', function () {
var coll = document.getElementsByClassName("collapsible");
for (var i = 0; i < coll.length; i++) {
coll[i].click();
}
})
}
}
//
// Initial Collapsible Setup
//
// Add an event listener for a click on any collapsible object. This will change the style to collapse/expand the content.
var coll = document.getElementsByClassName("collapsible");
for (var i = 0; i < coll.length; i++) {
coll[i].addEventListener("click", function () {
this.classList.toggle("collapsible-active");
var content = this.nextElementSibling;
if (content.style.maxHeight) {
content.style.maxHeight = null;
content.style.opacity = 0;
} else {
Collapsibles.setHeight(content);
content.style.opacity = 1;
}
});
}

View File

@ -1,16 +1,17 @@
from datetime import datetime, timedelta
from typing import Any, Dict, List
import itertools
import logging
from base64 import b64decode
from os import path, stat, remove, mkdir, access, W_OK
from PIL import ImageFile
from random import randint
from datetime import datetime, timedelta
from os import W_OK, access, mkdir, path
from typing import Any, Dict, List
import pytz
from core.config import CoreConfig
from core.utils import Utils
from .const import Mai2Constants
from .config import Mai2Config
from .const import Mai2Constants
from .database import Mai2Data
@ -444,23 +445,22 @@ class Mai2Base:
return {"userId": data["userId"], "userOption": options_dict}
async def handle_get_user_card_api_request(self, data: Dict) -> Dict:
user_cards = await self.data.item.get_cards(data["userId"])
if user_cards is None:
return {"userId": data["userId"], "nextIndex": 0, "userCardList": []}
user_id = int(data["userId"])
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
max_ct = data["maxCount"]
next_idx = data["nextIndex"]
start_idx = next_idx
end_idx = max_ct + start_idx
user_cards = await self.data.item.get_cards(
user_id, limit=max_ct + 1, offset=next_idx
)
if len(user_cards[start_idx:]) > max_ct:
next_idx += max_ct
else:
next_idx = 0
if user_cards is None or len(user_cards) == 0:
return {"userId": user_id, "nextIndex": 0, "userCardList": []}
card_list = []
for card in user_cards:
for card in user_cards[:max_ct]:
tmp = card._asdict()
tmp.pop("id")
tmp.pop("user")
tmp["startDate"] = datetime.strftime(
@ -469,12 +469,18 @@ class Mai2Base:
tmp["endDate"] = datetime.strftime(
tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT
)
card_list.append(tmp)
if len(user_cards) > max_ct:
next_idx += max_ct
else:
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"userCardList": card_list[start_idx:end_idx],
"userCardList": card_list,
}
async def handle_get_user_charge_api_request(self, data: Dict) -> Dict:
@ -536,28 +542,35 @@ class Mai2Base:
return { "userId": data.get("userId", 0), "userBossData": boss_lst}
async def handle_get_user_item_api_request(self, data: Dict) -> Dict:
kind = int(data["nextIndex"] / 10000000000)
next_idx = int(data["nextIndex"] % 10000000000)
user_item_list = await self.data.item.get_items(data["userId"], kind)
user_id: int = data["userId"]
kind: int = data["nextIndex"] // 10000000000
next_idx: int = data["nextIndex"] % 10000000000
max_ct: int = data["maxCount"]
rows = await self.data.item.get_items(user_id, kind, limit=max_ct, offset=next_idx)
if rows is None or len(rows) == 0:
return {
"userId": user_id,
"nextIndex": 0,
"itemKind": kind,
"userItemList": [],
}
items: List[Dict[str, Any]] = []
for i in range(next_idx, len(user_item_list)):
tmp = user_item_list[i]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("user")
tmp.pop("id")
items.append(tmp)
if len(items) >= int(data["maxCount"]):
break
xout = kind * 10000000000 + next_idx + len(items)
if len(items) < int(data["maxCount"]):
next_idx = 0
if len(rows) > max_ct:
next_idx = kind * 10000000000 + next_idx + max_ct
else:
next_idx = xout
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"itemKind": kind,
"userItemList": items,
@ -675,77 +688,90 @@ class Mai2Base:
return {"length": 0, "userPortraitList": []}
async def handle_get_user_friend_season_ranking_api_request(self, data: Dict) -> Dict:
friend_season_ranking = await self.data.item.get_friend_season_ranking(data["userId"])
if friend_season_ranking is None:
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.item.get_friend_season_ranking(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": 0,
"userFriendSeasonRankingList": [],
}
friend_season_ranking_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(friend_season_ranking)):
tmp = friend_season_ranking[x]._asdict()
tmp.pop("user")
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("id")
tmp.pop("user")
tmp["recordDate"] = datetime.strftime(
tmp["recordDate"], f"{Mai2Constants.DATE_TIME_FORMAT}.0"
)
friend_season_ranking_list.append(tmp)
if len(friend_season_ranking_list) >= max_ct:
break
if len(friend_season_ranking) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"userFriendSeasonRankingList": friend_season_ranking_list,
}
async def handle_get_user_map_api_request(self, data: Dict) -> Dict:
maps = await self.data.item.get_maps(data["userId"])
if maps is None:
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.item.get_maps(
user_id, limit=max_ct + 1, offset=next_idx,
)
if rows is None:
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": 0,
"userMapList": [],
}
map_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(maps)):
tmp = maps[x]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("user")
tmp.pop("id")
map_list.append(tmp)
if len(map_list) >= max_ct:
break
if len(maps) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"userMapList": map_list,
}
async def handle_get_user_login_bonus_api_request(self, data: Dict) -> Dict:
login_bonuses = await self.data.item.get_login_bonuses(data["userId"])
if login_bonuses is None:
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.item.get_login_bonuses(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {
"userId": data["userId"],
"nextIndex": 0,
@ -753,25 +779,20 @@ class Mai2Base:
}
login_bonus_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(login_bonuses)):
tmp = login_bonuses[x]._asdict()
for row in rows[:max_ct]:
tmp = row._asdict()
tmp.pop("user")
tmp.pop("id")
login_bonus_list.append(tmp)
if len(login_bonus_list) >= max_ct:
break
if len(login_bonuses) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"userLoginBonusList": login_bonus_list,
}
@ -805,42 +826,54 @@ class Mai2Base:
return {"userId": data["userId"], "userGradeStatus": grade_stat, "length": 0, "userGradeList": []}
async def handle_get_user_music_api_request(self, data: Dict) -> Dict:
user_id = data.get("userId", 0)
next_index = data.get("nextIndex", 0)
max_ct = data.get("maxCount", 50)
upper_lim = next_index + max_ct
music_detail_list = []
user_id: int = data.get("userId", 0)
next_idx: int = data.get("nextIndex", 0)
max_ct: int = data.get("maxCount", 50)
if user_id <= 0:
self.logger.warning("handle_get_user_music_api_request: Could not find userid in data, or userId is 0")
return {}
songs = await self.data.score.get_best_scores(user_id, is_dx=False)
if songs is None:
rows = await self.data.score.get_best_scores(
user_id, is_dx=False, limit=max_ct + 1, offset=next_idx
)
if rows is None:
self.logger.debug("handle_get_user_music_api_request: get_best_scores returned None!")
return {
"userId": data["userId"],
"nextIndex": 0,
"userMusicList": [],
}
"userId": user_id,
"nextIndex": 0,
"userMusicList": [],
}
num_user_songs = len(songs)
music_details = [row._asdict() for row in rows]
returned_count = 0
music_list = []
for x in range(next_index, upper_lim):
if num_user_songs <= x:
for _music_id, details_iter in itertools.groupby(music_details, key=lambda d: d["musicId"]):
details: list[dict[Any, Any]] = []
for d in details_iter:
d.pop("id")
d.pop("user")
details.append(d)
music_list.append({"userMusicDetailList": details})
returned_count += len(details)
if len(music_list) >= max_ct:
break
if returned_count < len(rows):
next_idx += max_ct
else:
next_idx = 0
tmp = songs[x]._asdict()
tmp.pop("id")
tmp.pop("user")
music_detail_list.append(tmp)
next_index = 0 if len(music_detail_list) < max_ct or num_user_songs == upper_lim else upper_lim
self.logger.info(f"Send songs {next_index}-{upper_lim} ({len(music_detail_list)}) out of {num_user_songs} for user {user_id} (next idx {next_index})")
return {
"userId": data["userId"],
"nextIndex": next_index,
"userMusicList": [{"userMusicDetailList": music_detail_list}],
"userId": user_id,
"nextIndex": next_idx,
"userMusicList": music_list,
}
async def handle_upload_user_portrait_api_request(self, data: Dict) -> Dict:
@ -925,30 +958,52 @@ class Mai2Base:
async def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
user_id = data.get("userId", 0)
kind = data.get("kind", 0) # 1 is fav music, 2 is rival user IDs
next_index = data.get("nextIndex", 0)
next_idx = data.get("nextIndex", 0)
max_ct = data.get("maxCount", 100) # always 100
is_all = data.get("isAllFavoriteItem", False) # always false
empty_resp = {
"userId": user_id,
"kind": kind,
"nextIndex": 0,
"userFavoriteItemList": [],
}
if not user_id or kind not in (1, 2):
return empty_resp
id_list: List[Dict] = []
if user_id:
if kind == 1:
fav_music = await self.data.item.get_fav_music(user_id)
if fav_music:
for fav in fav_music:
id_list.append({"orderId": fav["orderId"] or 0, "id": fav["musicId"]})
if len(id_list) >= 100: # Lazy but whatever
break
elif kind == 2:
rivals = await self.data.profile.get_rivals_game(user_id)
if rivals:
for rival in rivals:
id_list.append({"orderId": 0, "id": rival["rival"]})
if kind == 1:
rows = await self.data.item.get_fav_music(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return empty_resp
for row in rows[:max_ct]:
id_list.append({"orderId": row["orderId"] or 0, "id": row["musicId"]})
elif kind == 2:
rows = await self.data.profile.get_rivals_game(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return empty_resp
for row in rows[:max_ct]:
id_list.append({"orderId": 0, "id": row["rival"]})
if rows is None or len(rows) <= max_ct:
next_idx = 0
else:
next_idx += max_ct
return {
"userId": user_id,
"kind": kind,
"nextIndex": 0,
"nextIndex": next_idx,
"userFavoriteItemList": id_list,
}
@ -964,5 +1019,4 @@ class Mai2Base:
"""
return {"userId": data["userId"], "userRecommendSelectionMusicIdList": []}
async def handle_get_user_score_ranking_api_request(self, data: Dict) ->Dict:
return {"userId": data["userId"], "userScoreRanking": []}
return {"userId": data["userId"], "userScoreRanking": []}

View File

@ -1,8 +1,9 @@
from typing import Any, List, Dict
import itertools
from datetime import datetime, timedelta
import pytz
import json
from random import randint
from typing import Any, Dict, List
import pytz
from core.config import CoreConfig
from core.utils import Utils
@ -309,83 +310,112 @@ class Mai2DX(Mai2Base):
return {"userId": data["userId"], "userOption": options_dict}
async def handle_get_user_card_api_request(self, data: Dict) -> Dict:
user_cards = await self.data.item.get_cards(data["userId"])
if user_cards is None:
return {"userId": data["userId"], "nextIndex": 0, "userCardList": []}
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.item.get_cards(user_id, limit=max_ct + 1, offset=next_idx)
if rows is None:
return {"userId": user_id, "nextIndex": 0, "userCardList": []}
max_ct = data["maxCount"]
next_idx = data["nextIndex"]
start_idx = next_idx
end_idx = max_ct + start_idx
card_list = []
if len(user_cards[start_idx:]) > max_ct:
for row in rows[:max_ct]:
card = row._asdict()
card.pop("id")
card.pop("user")
card["startDate"] = datetime.strftime(
card["startDate"], Mai2Constants.DATE_TIME_FORMAT
)
card["endDate"] = datetime.strftime(
card["endDate"], Mai2Constants.DATE_TIME_FORMAT
)
card_list.append(card)
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = 0
card_list = []
for card in user_cards:
tmp = card._asdict()
tmp.pop("id")
tmp.pop("user")
tmp["startDate"] = datetime.strftime(
tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT
)
tmp["endDate"] = datetime.strftime(
tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT
)
card_list.append(tmp)
return {
"userId": data["userId"],
"nextIndex": next_idx,
"userCardList": card_list[start_idx:end_idx],
"userCardList": card_list,
}
async def handle_get_user_item_api_request(self, data: Dict) -> Dict:
kind = data["nextIndex"] // 10000000000
next_idx = data["nextIndex"] % 10000000000
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
kind = next_idx // 10000000000
next_idx = next_idx % 10000000000
items: List[Dict[str, Any]] = []
if kind == 4: # presents
user_pres_list = await self.data.item.get_presents_by_version_user(self.version, data["userId"])
if user_pres_list:
self.logger.debug(f"Found {len(user_pres_list)} possible presents")
for present in user_pres_list:
if (present['startDate'] and present['startDate'].timestamp() > datetime.now().timestamp()):
self.logger.debug(f"Present {present['id']} distribution hasn't started yet (begins {present['startDate']})")
continue # present period hasn't started yet, move onto the next one
if (present['endDate'] and present['endDate'].timestamp() < datetime.now().timestamp()):
self.logger.warn(f"Present {present['id']} ended on {present['endDate']} and should be removed")
continue # present period ended, move onto the next one
test = await self.data.item.get_item(data["userId"], present['itemKind'], present['itemId'])
if not test: # Don't send presents for items the user already has
pres_id = present['itemKind'] * 1000000
pres_id += present['itemId']
items.append({"itemId": pres_id, "itemKind": 4, "stock": present['stock'], "isValid": True})
self.logger.info(f"Give user {data['userId']} {present['stock']}x item {present['itemId']} (kind {present['itemKind']}) as present")
rows = await self.data.item.get_presents_by_version_user(
version=self.version,
user_id=user_id,
exclude_owned=True,
exclude_not_in_present_period=True,
limit=max_ct + 1,
offset=next_idx,
)
if rows is None:
return {
"userId": user_id,
"nextIndex": 0,
"itemKind": kind,
"userItemList": [],
}
for row in rows[:max_ct]:
self.logger.info(
f"Give user {user_id} {row['stock']}x item {row['itemId']} (kind {row['itemKind']}) as present"
)
items.append(
{
"itemId": row["itemKind"] * 1000000 + row["itemId"],
"itemKind": kind,
"stock": row["stock"],
"isValid": True,
}
)
else:
user_item_list = await self.data.item.get_items(data["userId"], kind)
for i in range(next_idx, len(user_item_list)):
tmp = user_item_list[i]._asdict()
tmp.pop("user")
tmp.pop("id")
items.append(tmp)
if len(items) >= int(data["maxCount"]):
break
rows = await self.data.item.get_items(
user_id=user_id,
item_kind=kind,
limit=max_ct + 1,
offset=next_idx,
)
xout = kind * 10000000000 + next_idx + len(items)
if rows is None:
return {
"userId": user_id,
"nextIndex": 0,
"itemKind": kind,
"userItemList": [],
}
if len(items) < int(data["maxCount"]):
for row in rows[:max_ct]:
item = row._asdict()
item.pop("id")
item.pop("user")
items.append(item)
if len(rows) > max_ct:
next_idx = kind * 10000000000 + next_idx + max_ct
else:
next_idx = 0
else:
next_idx = xout
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"itemKind": kind,
"userItemList": items,
@ -491,103 +521,115 @@ class Mai2DX(Mai2Base):
return {"length": 0, "userPortraitList": []}
async def handle_get_user_friend_season_ranking_api_request(self, data: Dict) -> Dict:
friend_season_ranking = await self.data.item.get_friend_season_ranking(data["userId"])
if friend_season_ranking is None:
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.item.get_friend_season_ranking(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": 0,
"userFriendSeasonRankingList": [],
}
friend_season_ranking_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(friend_season_ranking)):
tmp = friend_season_ranking[x]._asdict()
tmp.pop("user")
tmp.pop("id")
tmp["recordDate"] = datetime.strftime(
tmp["recordDate"], f"{Mai2Constants.DATE_TIME_FORMAT}.0"
for row in rows[:max_ct]:
friend_season_ranking = row._asdict()
friend_season_ranking.pop("user")
friend_season_ranking.pop("id")
friend_season_ranking["recordDate"] = datetime.strftime(
friend_season_ranking["recordDate"], f"{Mai2Constants.DATE_TIME_FORMAT}.0"
)
friend_season_ranking_list.append(tmp)
friend_season_ranking_list.append(friend_season_ranking)
if len(friend_season_ranking_list) >= max_ct:
break
if len(friend_season_ranking) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"userFriendSeasonRankingList": friend_season_ranking_list,
}
async def handle_get_user_map_api_request(self, data: Dict) -> Dict:
maps = await self.data.item.get_maps(data["userId"])
if maps is None:
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.item.get_maps(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": 0,
"userMapList": [],
}
map_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(maps)):
tmp = maps[x]._asdict()
tmp.pop("user")
tmp.pop("id")
map_list.append(tmp)
for row in rows[:max_ct]:
map = row._asdict()
map.pop("user")
map.pop("id")
map_list.append(map)
if len(map_list) >= max_ct:
break
if len(maps) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"userMapList": map_list,
}
async def handle_get_user_login_bonus_api_request(self, data: Dict) -> Dict:
login_bonuses = await self.data.item.get_login_bonuses(data["userId"])
if login_bonuses is None:
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.item.get_login_bonuses(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": 0,
"userLoginBonusList": [],
}
login_bonus_list = []
next_idx = int(data["nextIndex"])
max_ct = int(data["maxCount"])
for x in range(next_idx, len(login_bonuses)):
tmp = login_bonuses[x]._asdict()
tmp.pop("user")
tmp.pop("id")
login_bonus_list.append(tmp)
for row in rows[:max_ct]:
login_bonus = row._asdict()
login_bonus.pop("user")
login_bonus.pop("id")
login_bonus_list.append(login_bonus)
if len(login_bonus_list) >= max_ct:
break
if len(login_bonuses) >= next_idx + max_ct:
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"nextIndex": next_idx,
"userLoginBonusList": login_bonus_list,
}
@ -619,46 +661,62 @@ class Mai2DX(Mai2Base):
}
async def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
user_id = data.get("userId", 0)
rival_id = data.get("rivalId", 0)
next_index = data.get("nextIndex", 0)
max_ct = 100
upper_lim = next_index + max_ct
rival_music_list: Dict[int, List] = {}
user_id: int = data["userId"]
rival_id: int = data["rivalId"]
next_idx: int = data["nextIndex"]
max_ct: int = 100
levels: list[int] = [x["level"] for x in data["userRivalMusicLevelList"]]
songs = await self.data.score.get_best_scores(rival_id)
if songs is None:
rows = await self.data.score.get_best_scores(
rival_id,
is_dx=True,
limit=max_ct + 1,
offset=next_idx,
levels=levels,
)
if rows is None:
self.logger.debug("handle_get_user_rival_music_api_request: get_best_scores returned None!")
return {
"userId": user_id,
"rivalId": rival_id,
"nextIndex": 0,
"userRivalMusicList": [] # musicId userRivalMusicDetailList -> level achievement deluxscoreMax
}
music_details = [x._asdict() for x in rows]
returned_count = 0
music_list = []
num_user_songs = len(songs)
for music_id, details_iter in itertools.groupby(music_details, key=lambda x: x["musicId"]):
details: list[dict[Any, Any]] = []
for x in range(next_index, upper_lim):
if x >= num_user_songs:
for d in details_iter:
details.append(
{
"level": d["level"],
"achievement": d["achievement"],
"deluxscoreMax": d["deluxscoreMax"],
}
)
music_list.append({"musicId": music_id, "userRivalMusicDetailList": details})
returned_count += len(details)
if len(music_list) >= max_ct:
break
tmp = songs[x]._asdict()
if tmp['musicId'] in rival_music_list:
rival_music_list[tmp['musicId']].append([{"level": tmp['level'], 'achievement': tmp['achievement'], 'deluxscoreMax': tmp['deluxscoreMax']}])
else:
if len(rival_music_list) >= max_ct:
break
rival_music_list[tmp['musicId']] = [{"level": tmp['level'], 'achievement': tmp['achievement'], 'deluxscoreMax': tmp['deluxscoreMax']}]
next_index = 0 if len(rival_music_list) < max_ct or num_user_songs == upper_lim else upper_lim
self.logger.info(f"Send rival {rival_id} songs {next_index}-{upper_lim} ({len(rival_music_list)}) out of {num_user_songs} for user {user_id} (next idx {next_index})")
if returned_count < len(rows):
next_idx += max_ct
else:
next_idx = 0
return {
"userId": user_id,
"rivalId": rival_id,
"nextIndex": next_index,
"userRivalMusicList": [{"musicId": x, "userRivalMusicDetailList": y} for x, y in rival_music_list.items()]
"nextIndex": next_idx,
"userRivalMusicList": music_list,
}
async def handle_get_user_new_item_api_request(self, data: Dict) -> Dict:
@ -674,42 +732,55 @@ class Mai2DX(Mai2Base):
}
async def handle_get_user_music_api_request(self, data: Dict) -> Dict:
user_id = data.get("userId", 0)
next_index = data.get("nextIndex", 0)
max_ct = data.get("maxCount", 50)
upper_lim = next_index + max_ct
music_detail_list = []
user_id: int = data.get("userId", 0)
next_idx: int = data.get("nextIndex", 0)
max_ct: int = data.get("maxCount", 50)
if user_id <= 0:
self.logger.warning("handle_get_user_music_api_request: Could not find userid in data, or userId is 0")
return {}
songs = await self.data.score.get_best_scores(user_id)
if songs is None:
rows = await self.data.score.get_best_scores(
user_id, is_dx=True, limit=max_ct + 1, offset=next_idx
)
if rows is None:
self.logger.debug("handle_get_user_music_api_request: get_best_scores returned None!")
return {
"userId": data["userId"],
"nextIndex": 0,
"userMusicList": [],
}
"userId": user_id,
"nextIndex": 0,
"userMusicList": [],
}
num_user_songs = len(songs)
music_details = [row._asdict() for row in rows]
returned_count = 0
music_list = []
for x in range(next_index, upper_lim):
if num_user_songs <= x:
for _music_id, details_iter in itertools.groupby(music_details, key=lambda d: d["musicId"]):
details: list[dict[Any, Any]] = []
for d in details_iter:
d.pop("id")
d.pop("user")
details.append(d)
music_list.append({"userMusicDetailList": details})
returned_count += len(details)
if len(music_list) >= max_ct:
break
if returned_count < len(rows):
next_idx += max_ct
else:
next_idx = 0
tmp = songs[x]._asdict()
tmp.pop("id")
tmp.pop("user")
music_detail_list.append(tmp)
next_index = 0 if len(music_detail_list) < max_ct or num_user_songs == upper_lim else upper_lim
self.logger.info(f"Send songs {next_index}-{upper_lim} ({len(music_detail_list)}) out of {num_user_songs} for user {user_id} (next idx {next_index})")
return {
"userId": data["userId"],
"nextIndex": next_index,
"userMusicList": [{"userMusicDetailList": music_detail_list}],
"userId": user_id,
"nextIndex": next_idx,
"userMusicList": music_list,
}
async def handle_user_login_api_request(self, data: Dict) -> Dict:
@ -812,39 +883,43 @@ class Mai2DX(Mai2Base):
return {"length": len(selling_card_list), "sellingCardList": selling_card_list}
async def handle_cm_get_user_card_api_request(self, data: Dict) -> Dict:
user_cards = await self.data.item.get_cards(data["userId"])
if user_cards is None:
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.item.get_cards(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {"returnCode": 1, "length": 0, "nextIndex": 0, "userCardList": []}
max_ct = data["maxCount"]
next_idx = data["nextIndex"]
start_idx = next_idx
end_idx = max_ct + start_idx
card_list = []
if len(user_cards[start_idx:]) > max_ct:
for row in rows[:max_ct]:
card = row._asdict()
card.pop("id")
card.pop("user")
card["startDate"] = datetime.strftime(
card["startDate"], Mai2Constants.DATE_TIME_FORMAT
)
card["endDate"] = datetime.strftime(
card["endDate"], Mai2Constants.DATE_TIME_FORMAT
)
card_list.append(card)
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = 0
card_list = []
for card in user_cards:
tmp = card._asdict()
tmp.pop("id")
tmp.pop("user")
tmp["startDate"] = datetime.strftime(
tmp["startDate"], Mai2Constants.DATE_TIME_FORMAT
)
tmp["endDate"] = datetime.strftime(
tmp["endDate"], Mai2Constants.DATE_TIME_FORMAT
)
card_list.append(tmp)
return {
"returnCode": 1,
"length": len(card_list[start_idx:end_idx]),
"length": len(card_list),
"nextIndex": next_idx,
"userCardList": card_list[start_idx:end_idx],
"userCardList": card_list,
}
async def handle_cm_get_user_item_api_request(self, data: Dict) -> Dict:

View File

@ -1,15 +1,16 @@
from core.data.schema import BaseData, metadata
from datetime import datetime
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_, or_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BIGINT, INTEGER
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from typing import Dict, List, Optional
from sqlalchemy import Column, Table, UniqueConstraint, and_, or_
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.types import BIGINT, INTEGER, JSON, TIMESTAMP, Boolean, Integer, String
character = Table(
from core.data.schema import BaseData, metadata
character: Table = Table(
"mai2_item_character",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -27,7 +28,7 @@ character = Table(
mysql_charset="utf8mb4",
)
card = Table(
card: Table = Table(
"mai2_item_card",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -46,7 +47,7 @@ card = Table(
mysql_charset="utf8mb4",
)
item = Table(
item: Table = Table(
"mai2_item_item",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -63,7 +64,7 @@ item = Table(
mysql_charset="utf8mb4",
)
map = Table(
map: Table = Table(
"mai2_item_map",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -81,7 +82,7 @@ map = Table(
mysql_charset="utf8mb4",
)
login_bonus = Table(
login_bonus: Table = Table(
"mai2_item_login_bonus",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -98,7 +99,7 @@ login_bonus = Table(
mysql_charset="utf8mb4",
)
friend_season_ranking = Table(
friend_season_ranking: Table = Table(
"mai2_item_friend_season_ranking",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -134,7 +135,7 @@ favorite = Table(
mysql_charset="utf8mb4",
)
fav_music = Table(
fav_music: Table = Table(
"mai2_item_favorite_music",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -199,7 +200,7 @@ print_detail = Table(
mysql_charset="utf8mb4",
)
present = Table(
present: Table = Table(
"mai2_item_present",
metadata,
Column('id', BIGINT, primary_key=True, nullable=False),
@ -239,13 +240,26 @@ class Mai2ItemData(BaseData):
return None
return result.lastrowid
async def get_items(self, user_id: int, item_kind: int = None) -> Optional[List[Row]]:
if item_kind is None:
sql = item.select(item.c.user == user_id)
else:
sql = item.select(
and_(item.c.user == user_id, item.c.itemKind == item_kind)
)
async def get_items(
self,
user_id: int,
item_kind: Optional[int] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
cond = item.c.user == user_id
if item_kind is not None:
cond &= item.c.itemKind == item_kind
sql = select(item).where(cond)
if limit is not None or offset is not None:
sql = sql.order_by(item.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
@ -296,8 +310,20 @@ class Mai2ItemData(BaseData):
return None
return result.lastrowid
async def get_login_bonuses(self, user_id: int) -> Optional[List[Row]]:
sql = login_bonus.select(login_bonus.c.user == user_id)
async def get_login_bonuses(
self,
user_id: int,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(login_bonus).where(login_bonus.c.user == user_id)
if limit is not None or offset is not None:
sql = sql.order_by(login_bonus.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
@ -347,8 +373,20 @@ class Mai2ItemData(BaseData):
return None
return result.lastrowid
async def get_maps(self, user_id: int) -> Optional[List[Row]]:
sql = map.select(map.c.user == user_id)
async def get_maps(
self,
user_id: int,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(map).where(map.c.user == user_id)
if limit is not None or offset is not None:
sql = sql.order_by(map.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
@ -424,8 +462,20 @@ class Mai2ItemData(BaseData):
return None
return result.fetchone()
async def get_friend_season_ranking(self, user_id: int) -> Optional[Row]:
sql = friend_season_ranking.select(friend_season_ranking.c.user == user_id)
async def get_friend_season_ranking(
self,
user_id: int,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(friend_season_ranking).where(friend_season_ranking.c.user == user_id)
if limit is not None or offset is not None:
sql = sql.order_by(friend_season_ranking.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
@ -480,8 +530,23 @@ class Mai2ItemData(BaseData):
return None
return result.fetchall()
async def get_fav_music(self, user_id: int) -> Optional[List[Row]]:
result = await self.execute(fav_music.select(fav_music.c.user == user_id))
async def get_fav_music(
self,
user_id: int,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(fav_music).where(fav_music.c.user == user_id)
if limit is not None or offset is not None:
sql = sql.order_by(fav_music.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result:
return result.fetchall()
@ -537,13 +602,24 @@ class Mai2ItemData(BaseData):
return None
return result.lastrowid
async def get_cards(self, user_id: int, kind: int = None) -> Optional[Row]:
if kind is None:
sql = card.select(card.c.user == user_id)
else:
sql = card.select(and_(card.c.user == user_id, card.c.cardKind == kind))
async def get_cards(
self,
user_id: int,
kind: Optional[int] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
condition = card.c.user == user_id
sql = sql.order_by(card.c.startDate.desc())
if kind is not None:
condition &= card.c.cardKind == kind
sql = select(card).where(condition).order_by(card.c.startDate.desc(), card.c.id.asc())
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
@ -634,13 +710,46 @@ class Mai2ItemData(BaseData):
if result:
return result.fetchall()
async def get_presents_by_version_user(self, ver: int = None, user_id: int = None) -> Optional[List[Row]]:
result = await self.execute(present.select(
and_(
or_(present.c.user == user_id, present.c.user == None),
or_(present.c.version == ver, present.c.version == None)
async def get_presents_by_version_user(
self,
version: Optional[int] = None,
user_id: Optional[int] = None,
exclude_owned: bool = False,
exclude_not_in_present_period: bool = False,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(present)
condition = (
((present.c.user == user_id) | present.c.user.is_(None))
& ((present.c.version == version) | present.c.version.is_(None))
)
# Do an anti-join with the mai2_item_item table to exclude any
# items the users have already owned.
if exclude_owned:
sql = sql.join(
item,
(present.c.itemKind == item.c.itemKind)
& (present.c.itemId == item.c.itemId)
)
))
condition &= (item.c.itemKind.is_(None) & item.c.itemId.is_(None))
if exclude_not_in_present_period:
condition &= (present.c.startDate.is_(None) | (present.c.startDate <= func.now()))
condition &= (present.c.endDate.is_(None) | (present.c.endDate >= func.now()))
sql = sql.where(condition)
if limit is not None or offset is not None:
sql = sql.order_by(present.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result:
return result.fetchall()

View File

@ -1,15 +1,26 @@
from core.data.schema import BaseData, metadata
from titles.mai2.const import Mai2Constants
from datetime import datetime
from typing import Dict, List, Optional
from uuid import uuid4
from typing import Optional, Dict, List
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger, SmallInteger, VARCHAR, INTEGER
from sqlalchemy import Column, Table, UniqueConstraint, and_
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.engine import Row
from sqlalchemy.dialects.mysql import insert
from datetime import datetime
from sqlalchemy.types import (
INTEGER,
JSON,
TIMESTAMP,
VARCHAR,
BigInteger,
Boolean,
Integer,
SmallInteger,
String,
)
from core.data.schema import BaseData, metadata
from titles.mai2.const import Mai2Constants
detail = Table(
"mai2_profile_detail",
@ -495,7 +506,7 @@ consec_logins = Table(
mysql_charset="utf8mb4",
)
rival = Table(
rival: Table = Table(
"mai2_user_rival",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -908,8 +919,23 @@ class Mai2ProfileData(BaseData):
if result:
return result.fetchall()
async def get_rivals_game(self, user_id: int) -> Optional[List[Row]]:
result = await self.execute(rival.select(and_(rival.c.user == user_id, rival.c.show == True)).limit(3))
async def get_rivals_game(
self,
user_id: int,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
sql = select(rival).where((rival.c.user == user_id) & rival.c.show.is_(True))
if limit is not None or offset is not None:
sql = sql.order_by(rival.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result:
return result.fetchall()

View File

@ -1,15 +1,15 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
from sqlalchemy import Column, Table, UniqueConstraint, and_
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.engine import Row
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.types import JSON, BigInteger, Boolean, Integer, String
from core.data.schema import BaseData, metadata
from core.data import cached
best_score = Table(
best_score: Table = Table(
"mai2_score_best",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -272,7 +272,7 @@ playlog_old = Table(
mysql_charset="utf8mb4",
)
best_score_old = Table(
best_score_old: Table = Table(
"maimai_score_best",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -313,22 +313,55 @@ class Mai2ScoreData(BaseData):
return None
return result.lastrowid
@cached(2)
async def get_best_scores(self, user_id: int, song_id: int = None, is_dx: bool = True) -> Optional[List[Row]]:
async def get_best_scores(
self,
user_id: int,
song_id: Optional[int] = None,
is_dx: bool = True,
limit: Optional[int] = None,
offset: Optional[int] = None,
levels: Optional[list[int]] = None,
) -> Optional[List[Row]]:
if is_dx:
sql = best_score.select(
and_(
best_score.c.user == user_id,
(best_score.c.musicId == song_id) if song_id is not None else True,
)
).order_by(best_score.c.musicId).order_by(best_score.c.level)
table = best_score
else:
sql = best_score_old.select(
and_(
best_score_old.c.user == user_id,
(best_score_old.c.musicId == song_id) if song_id is not None else True,
)
).order_by(best_score.c.musicId).order_by(best_score.c.level)
table = best_score_old
cond = table.c.user == user_id
if song_id is not None:
cond &= table.c.musicId == song_id
if levels is not None:
cond &= table.c.level.in_(levels)
if limit is None and offset is None:
sql = (
select(table)
.where(cond)
.order_by(table.c.musicId, table.c.level)
)
else:
subq = (
select(table.c.musicId)
.distinct()
.where(cond)
.order_by(table.c.musicId)
)
if limit is not None:
subq = subq.limit(limit)
if offset is not None:
subq = subq.offset(offset)
subq = subq.subquery()
sql = (
select(table)
.join(subq, table.c.musicId == subq.c.musicId)
.where(cond)
.order_by(table.c.musicId, table.c.level)
)
result = await self.execute(sql)
if result is None:

View File

@ -1,16 +1,16 @@
from datetime import date, datetime, timedelta
from typing import Any, Dict, List
import itertools
import json
import logging
from datetime import datetime, timedelta
from enum import Enum
from typing import Any, Dict, List
import pytz
from core.config import CoreConfig
from core.data.cache import cached
from titles.ongeki.config import OngekiConfig
from titles.ongeki.const import OngekiConstants
from titles.ongeki.config import OngekiConfig
from titles.ongeki.database import OngekiData
from titles.ongeki.config import OngekiConfig
class OngekiBattleGrade(Enum):
@ -500,57 +500,93 @@ class OngekiBase:
}
async def handle_get_user_music_api_request(self, data: Dict) -> Dict:
song_list = await self.util_generate_music_list(data["userId"])
max_ct = data["maxCount"]
next_idx = data["nextIndex"]
start_idx = next_idx
end_idx = max_ct + start_idx
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
if len(song_list[start_idx:]) > max_ct:
rows = await self.data.score.get_best_scores(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {
"userId": user_id,
"length": 0,
"nextIndex": 0,
"userMusicList": [],
}
music_details = [row._asdict() for row in rows]
returned_count = 0
music_list = []
for _music_id, details_iter in itertools.groupby(music_details, key=lambda d: d["musicId"]):
details: list[dict[Any, Any]] = []
for d in details_iter:
d.pop("id")
d.pop("user")
details.append(d)
music_list.append({"length": len(details), "userMusicDetailList": details})
returned_count += len(details)
if len(music_list) >= max_ct:
break
if returned_count < len(rows):
next_idx += max_ct
else:
next_idx = -1
next_idx = 0
return {
"userId": data["userId"],
"length": len(song_list[start_idx:end_idx]),
"userId": user_id,
"length": len(music_list),
"nextIndex": next_idx,
"userMusicList": song_list[start_idx:end_idx],
"userMusicList": music_list,
}
async def handle_get_user_item_api_request(self, data: Dict) -> Dict:
kind = data["nextIndex"] / 10000000000
p = await self.data.item.get_items(data["userId"], kind)
user_id: int = data["userId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
if p is None:
kind = next_idx // 10000000000
next_idx = next_idx % 10000000000
rows = await self.data.item.get_items(
user_id, kind, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {
"userId": data["userId"],
"nextIndex": -1,
"userId": user_id,
"nextIndex": 0,
"itemKind": kind,
"length": 0,
"userItemList": [],
}
items: List[Dict[str, Any]] = []
for i in range(data["nextIndex"] % 10000000000, len(p)):
if len(items) > data["maxCount"]:
break
tmp = p[i]._asdict()
tmp.pop("user")
tmp.pop("id")
items.append(tmp)
xout = kind * 10000000000 + (data["nextIndex"] % 10000000000) + len(items)
for row in rows[:max_ct]:
item = row._asdict()
item.pop("id")
item.pop("user")
items.append(item)
if len(items) < data["maxCount"] or data["maxCount"] == 0:
nextIndex = 0
if len(rows) > max_ct:
next_idx = kind * 10000000000 + next_idx + max_ct
else:
nextIndex = xout
next_idx = 0
return {
"userId": data["userId"],
"nextIndex": int(nextIndex),
"itemKind": int(kind),
"userId": user_id,
"nextIndex": next_idx,
"itemKind": kind,
"length": len(items),
"userItemList": items,
}
@ -1143,43 +1179,56 @@ class OngekiBase:
"""
Added in Bright
"""
rival_id = data["rivalUserId"]
next_idx = data["nextIndex"]
max_ct = data["maxCount"]
music = self.handle_get_user_music_api_request(
{"userId": rival_id, "nextIndex": next_idx, "maxCount": max_ct}
user_id: int = data["userId"]
rival_id: int = data["rivalUserId"]
next_idx: int = data["nextIndex"]
max_ct: int = data["maxCount"]
rows = await self.data.score.get_best_scores(
rival_id, limit=max_ct + 1, offset=next_idx
)
for song in music["userMusicList"]:
song["userRivalMusicDetailList"] = song["userMusicDetailList"]
song.pop("userMusicDetailList")
if rows is None:
return {
"userId": user_id,
"rivalUserId": rival_id,
"nextIndex": 0,
"length": 0,
"userRivalMusicList": [],
}
music_details = [row._asdict() for row in rows]
returned_count = 0
music_list = []
for _music_id, details_iter in itertools.groupby(music_details, key=lambda d: d["musicId"]):
details: list[dict[Any, Any]] = []
for d in details_iter:
d.pop("id")
d.pop("user")
d.pop("playCount")
d.pop("isLock")
d.pop("clearStatus")
d.pop("isStoryWatched")
details.append(d)
music_list.append({"length": len(details), "userRivalMusicDetailList": details})
returned_count += len(details)
if len(music_list) >= max_ct:
break
if returned_count < len(rows):
next_idx += max_ct
else:
next_idx = 0
return {
"userId": data["userId"],
"userId": user_id,
"rivalUserId": rival_id,
"length": music["length"],
"nextIndex": music["nextIndex"],
"userRivalMusicList": music["userMusicList"],
"nextIndex": next_idx,
"length": len(music_list),
"userRivalMusicList": music_list,
}
@cached(2)
async def util_generate_music_list(self, user_id: int) -> List:
music_detail = await self.data.score.get_best_scores(user_id)
song_list = []
for md in music_detail:
found = False
tmp = md._asdict()
tmp.pop("user")
tmp.pop("id")
for song in song_list:
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
found = True
song["userMusicDetailList"].append(tmp)
song["length"] = len(song["userMusicDetailList"])
break
if not found:
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
return song_list

View File

@ -1,13 +1,11 @@
from datetime import date, datetime, timedelta
from typing import Any, Dict
from datetime import datetime
from random import randint
import pytz
import json
from typing import Dict
from core.config import CoreConfig
from titles.ongeki.base import OngekiBase
from titles.ongeki.const import OngekiConstants
from titles.ongeki.config import OngekiConfig
from titles.ongeki.const import OngekiConstants
class OngekiBright(OngekiBase):
@ -62,66 +60,72 @@ class OngekiBright(OngekiBase):
return {"returnCode": 1}
async def handle_cm_get_user_card_api_request(self, data: Dict) -> Dict:
user_cards = await self.data.item.get_cards(data["userId"])
if user_cards is None:
user_id: int = data["userId"]
max_ct: int = data["maxCount"]
next_idx: int = data["nextIndex"]
rows = await self.data.item.get_cards(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {}
card_list = []
max_ct = data["maxCount"]
next_idx = data["nextIndex"]
start_idx = next_idx
end_idx = max_ct + start_idx
if len(user_cards[start_idx:]) > max_ct:
for row in rows[:max_ct]:
card = row._asdict()
card.pop("id")
card.pop("user")
card_list.append(card)
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = -1
card_list = []
for card in user_cards:
tmp = card._asdict()
tmp.pop("id")
tmp.pop("user")
card_list.append(tmp)
next_idx = 0
return {
"userId": data["userId"],
"length": len(card_list[start_idx:end_idx]),
"length": len(card_list),
"nextIndex": next_idx,
"userCardList": card_list[start_idx:end_idx],
"userCardList": card_list,
}
async def handle_cm_get_user_character_api_request(self, data: Dict) -> Dict:
user_characters = await self.data.item.get_characters(data["userId"])
if user_characters is None:
user_id: int = data["userId"]
max_ct: int = data["maxCount"]
next_idx: int = data["nextIndex"]
rows = await self.data.item.get_characters(
user_id, limit=max_ct + 1, offset=next_idx
)
if rows is None:
return {
"userId": data["userId"],
"userId": user_id,
"length": 0,
"nextIndex": 0,
"userCharacterList": [],
}
max_ct = data["maxCount"]
next_idx = data["nextIndex"]
start_idx = next_idx
end_idx = max_ct + start_idx
character_list = []
if len(user_characters[start_idx:]) > max_ct:
for row in rows[:max_ct]:
character = row._asdict()
character.pop("id")
character.pop("user")
character_list.append(character)
if len(rows) > max_ct:
next_idx += max_ct
else:
next_idx = -1
character_list = []
for character in user_characters:
tmp = character._asdict()
tmp.pop("id")
tmp.pop("user")
character_list.append(tmp)
next_idx = 0
return {
"userId": data["userId"],
"length": len(character_list[start_idx:end_idx]),
"length": len(character_list),
"nextIndex": next_idx,
"userCharacterList": character_list[start_idx:end_idx],
"userCharacterList": character_list,
}
async def handle_get_user_gacha_api_request(self, data: Dict) -> Dict:

View File

@ -1,15 +1,16 @@
from datetime import date, datetime, timedelta
from typing import Dict, Optional, List
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
from sqlalchemy.schema import ForeignKey
from sqlalchemy.engine import Row
from sqlalchemy.sql import func, select
from datetime import datetime
from typing import Dict, List, Optional
from sqlalchemy import Column, Table, UniqueConstraint, and_
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy.types import TIMESTAMP, Boolean, Integer, String
from core.data.schema import BaseData, metadata
card = Table(
card: Table = Table(
"ongeki_user_card",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -45,7 +46,7 @@ deck = Table(
mysql_charset="utf8mb4",
)
character = Table(
character: Table = Table(
"ongeki_user_character",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -130,7 +131,7 @@ memorychapter = Table(
mysql_charset="utf8mb4",
)
item = Table(
item: Table = Table(
"ongeki_user_item",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -351,9 +352,18 @@ class OngekiItemData(BaseData):
return None
return result.lastrowid
async def get_cards(self, aime_id: int) -> Optional[List[Dict]]:
async def get_cards(
self, aime_id: int, limit: Optional[int] = None, offset: Optional[int] = None
) -> Optional[List[Row]]:
sql = select(card).where(card.c.user == aime_id)
if limit is not None or offset is not None:
sql = sql.order_by(card.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None
@ -371,9 +381,18 @@ class OngekiItemData(BaseData):
return None
return result.lastrowid
async def get_characters(self, aime_id: int) -> Optional[List[Dict]]:
async def get_characters(
self, aime_id: int, limit: Optional[int] = None, offset: Optional[int] = None
) -> Optional[List[Row]]:
sql = select(character).where(character.c.user == aime_id)
if limit is not None or offset is not None:
sql = sql.order_by(character.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:
return None
@ -479,13 +498,26 @@ class OngekiItemData(BaseData):
return None
return result.fetchone()
async def get_items(self, aime_id: int, item_kind: int = None) -> Optional[List[Dict]]:
if item_kind is None:
sql = select(item).where(item.c.user == aime_id)
else:
sql = select(item).where(
and_(item.c.user == aime_id, item.c.itemKind == item_kind)
)
async def get_items(
self,
aime_id: int,
item_kind: Optional[int] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
cond = item.c.user == aime_id
if item_kind is not None:
cond &= item.c.itemKind == item_kind
sql = select(item).where(cond)
if limit is not None or offset is not None:
sql = sql.order_by(item.c.id)
if limit is not None:
sql = sql.limit(limit)
if offset is not None:
sql = sql.offset(offset)
result = await self.execute(sql)
if result is None:

View File

@ -1,13 +1,15 @@
from typing import Dict, List, Optional
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import func, select
from sqlalchemy import Column, Table, UniqueConstraint
from sqlalchemy.dialects.mysql import insert
from sqlalchemy.engine import Row
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql import select
from sqlalchemy.types import TIMESTAMP, Boolean, Float, Integer, String
from core.data.schema import BaseData, metadata
score_best = Table(
score_best: Table = Table(
"ongeki_score_best",
metadata,
Column("id", Integer, primary_key=True, nullable=False),
@ -149,8 +151,41 @@ class OngekiScoreData(BaseData):
return None
return result.lastrowid
async def get_best_scores(self, aime_id: int) -> Optional[List[Dict]]:
sql = select(score_best).where(score_best.c.user == aime_id)
async def get_best_scores(
self,
aime_id: int,
limit: Optional[int] = None,
offset: Optional[int] = None,
) -> Optional[List[Row]]:
cond = score_best.c.user == aime_id
if limit is None and offset is None:
sql = (
select(score_best)
.where(cond)
.order_by(score_best.c.musicId, score_best.c.level)
)
else:
subq = (
select(score_best.c.musicId)
.distinct()
.where(cond)
.order_by(score_best.c.musicId)
)
if limit is not None:
subq = subq.limit(limit)
if offset is not None:
subq = subq.offset(offset)
subq = subq.subquery()
sql = (
select(score_best)
.join(subq, score_best.c.musicId == subq.c.musicId)
.where(cond)
.order_by(score_best.c.musicId, score_best.c.level)
)
result = await self.execute(sql)
if result is None:

View File

@ -98,9 +98,7 @@ class WaccaServlet(BaseServlet):
async def render_POST(self, request: Request) -> bytes:
def end(resp: Dict) -> bytes:
hash = md5(json.dumps(resp, ensure_ascii=False).encode()).digest()
j_Resp = Response(json.dumps(resp, ensure_ascii=False))
j_Resp.raw_headers.append((b"X-Wacca-Hash", hash.hex().encode()))
return j_Resp
return Response(content=json.dumps(resp, ensure_ascii=False), headers={"X-Wacca-Hash": hash.hex()})
api = request.path_params.get('api', '')
branch = request.path_params.get('branch', '')