2023-02-17 07:02:21 +01:00
|
|
|
import json
|
|
|
|
from decimal import Decimal
|
|
|
|
from base64 import b64encode
|
|
|
|
from typing import Any, Dict
|
|
|
|
from hashlib import md5
|
|
|
|
from datetime import datetime
|
|
|
|
|
|
|
|
from core.config import CoreConfig
|
|
|
|
from core.data import Data, cached
|
2023-11-09 03:17:48 +01:00
|
|
|
from .config import CxbConfig
|
|
|
|
from .base import CxbBase
|
|
|
|
from .const import CxbConstants
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
class CxbRevSunriseS2(CxbBase):
|
|
|
|
def __init__(self, cfg: CoreConfig, game_cfg: CxbConfig) -> None:
|
|
|
|
super().__init__(cfg, game_cfg)
|
|
|
|
self.version = CxbConstants.VER_CROSSBEATS_REV_SUNRISE_S2_OMNI
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_path_list_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_music_list_request(self, data: Dict) -> Dict:
|
2023-11-25 22:03:26 +01:00
|
|
|
version = data["dldate"]["filetype"].split("/")[0]
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = ""
|
2023-11-25 22:03:26 +01:00
|
|
|
|
|
|
|
if "10104" in version:
|
|
|
|
self.logger.warning("Game Version is Season 2 Non-Omni")
|
|
|
|
file = "titles/cxb/data/rss2/MusicArchiveList-NonOmni.csv"
|
|
|
|
else:
|
|
|
|
self.logger.warning("Game Version is Season 2 Omnimix")
|
|
|
|
file = "titles/cxb/data/rss2/MusicArchiveList.csv"
|
|
|
|
|
|
|
|
with open(rf"{file}") as music:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = music.readlines()
|
|
|
|
for line in lines:
|
2023-03-09 17:38:58 +01:00
|
|
|
line_split = line.split(",")
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += f"{line_split[0]},{line_split[1]},{line_split[2]},{line_split[3]},{line_split[4]},{line_split[5]},{line_split[6]},{line_split[7]},{line_split[8]},{line_split[9]},{line_split[10]},{line_split[11]},{line_split[12]},{line_split[13]},{line_split[14]},\r\n"
|
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
|
|
|
|
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_item_list_detail_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
# ItemListIcon load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = "#ItemListIcon\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Item/ItemList_Icon.csv", encoding="utf-8"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as item:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = item.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
|
|
|
|
# ItemListTitle load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += "\r\n#ItemListTitle\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Item/ItemList_Title.csv", encoding="utf-8"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as item:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = item.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_shop_list_detail_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
# ShopListIcon load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = "#ShopListIcon\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Shop/ShopList_Icon.csv", encoding="utf-8"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as shop:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = shop.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
|
|
|
|
# ShopListMusic load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += "\r\n#ShopListMusic\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Shop/ShopList_Music.csv", encoding="utf-8"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as shop:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = shop.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
|
|
|
|
# ShopListSale load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += "\r\n#ShopListSale\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Shop/ShopList_Sale.csv", encoding="shift-jis"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as shop:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = shop.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
|
|
|
|
# ShopListSkinBg load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += "\r\n#ShopListSkinBg\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Shop/ShopList_SkinBg.csv", encoding="shift-jis"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as shop:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = shop.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
|
|
|
|
# ShopListSkinEffect load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += "\r\n#ShopListSkinEffect\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Shop/ShopList_SkinEffect.csv", encoding="shift-jis"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as shop:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = shop.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
|
|
|
|
# ShopListSkinNotes load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += "\r\n#ShopListSkinNotes\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Shop/ShopList_SkinNotes.csv", encoding="shift-jis"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as shop:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = shop.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
|
|
|
|
# ShopListTitle load
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += "\r\n#ShopListTitle\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Shop/ShopList_Title.csv", encoding="utf-8"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as shop:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = shop.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_extra_stage_list_request(self, data: Dict) -> Dict:
|
2023-11-25 19:23:39 +01:00
|
|
|
ret_str=""
|
|
|
|
with open(r"titles/cxb/data/rss2/ExtraStageList.csv") as extra:
|
|
|
|
lines = extra.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
|
|
|
return({"data":ret_str})
|
2023-03-09 17:38:58 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_exxxxx_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_one_more_extra_list_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_bonus_list10100_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_oexxxx_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_free_coupon_request(self, data: Dict) -> Dict:
|
2023-11-25 21:41:38 +01:00
|
|
|
ret_str=""
|
|
|
|
with open(r"titles/cxb/data/rss2/FreeCoupon.csv") as coupon:
|
|
|
|
lines = coupon.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
|
|
|
return({"data":ret_str})
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2023-03-09 17:38:58 +01:00
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_news_list_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = ""
|
2023-11-09 03:17:48 +01:00
|
|
|
with open(r"titles/cxb/data/rss2/NewsList.csv", encoding="UTF-8") as news:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = news.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_tips_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_release_info_list_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_random_music_list_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = ""
|
2023-11-09 03:17:48 +01:00
|
|
|
with open(r"titles/cxb/data/rss2/MusicArchiveList.csv") as music:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = music.readlines()
|
|
|
|
count = 0
|
|
|
|
for line in lines:
|
|
|
|
line_split = line.split(",")
|
2023-03-09 17:38:58 +01:00
|
|
|
ret_str += (
|
|
|
|
str(count) + "," + line_split[0] + "," + line_split[0] + ",\r\n"
|
|
|
|
)
|
|
|
|
|
|
|
|
return {"data": ret_str}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_license_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = ""
|
2023-11-09 03:17:48 +01:00
|
|
|
with open(r"titles/cxb/data/rss2/License.csv", encoding="UTF-8") as licenses:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = licenses.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_course_list_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = ""
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
r"titles/cxb/data/rss2/Course/CourseList.csv", encoding="UTF-8"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as course:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = course.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_csxxxx_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
extra_num = int(data["dldate"]["filetype"][-4:])
|
|
|
|
ret_str = ""
|
2023-03-09 17:38:58 +01:00
|
|
|
with open(
|
2023-11-09 03:17:48 +01:00
|
|
|
rf"titles/cxb/data/rss2/Course/Cs{extra_num}.csv", encoding="shift-jis"
|
2023-03-09 17:38:58 +01:00
|
|
|
) as course:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = course.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_mission_list_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_mission_bonus_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_unlimited_mission_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_partner_list_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = ""
|
|
|
|
# Lord forgive me for the sins I am about to commit
|
2023-03-09 17:38:58 +01:00
|
|
|
for i in range(0, 10):
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += f"80000{i},{i},{i},0,10000,,\r\n"
|
|
|
|
ret_str += f"80000{i},{i},{i},1,10500,,\r\n"
|
|
|
|
ret_str += f"80000{i},{i},{i},2,10500,,\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
for i in range(10, 13):
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += f"8000{i},{i},{i},0,10000,,\r\n"
|
|
|
|
ret_str += f"8000{i},{i},{i},1,10500,,\r\n"
|
|
|
|
ret_str += f"8000{i},{i},{i},2,10500,,\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
ret_str += "\r\n---\r\n0,150,100,100,100,100,\r\n"
|
|
|
|
for i in range(1, 130):
|
|
|
|
ret_str += f"{i},100,100,100,100,100,\r\n"
|
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += "---\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
|
|
|
|
2023-02-17 07:02:21 +01:00
|
|
|
@cached(lifetime=86400)
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_partnerxxxx_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
partner_num = int(data["dldate"]["filetype"][-4:])
|
|
|
|
ret_str = f"{partner_num},,{partner_num},1,10000,\r\n"
|
2023-11-09 03:17:48 +01:00
|
|
|
with open(r"titles/cxb/data/rss2/Partner0000.csv") as partner:
|
2023-02-17 07:02:21 +01:00
|
|
|
lines = partner.readlines()
|
|
|
|
for line in lines:
|
|
|
|
ret_str += f"{line[:-1]}\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_server_state_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": True}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_settings_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": "2,\r\n"}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_story_list_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
# story id, story name, game version, start time, end time, course arc, unlock flag, song mcode for menu
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str = "\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
ret_str += (
|
|
|
|
f"st0000,RISING PURPLE,10104,1464370990,4096483201,Cs1000,-1,purple,\r\n"
|
|
|
|
)
|
|
|
|
ret_str += (
|
|
|
|
f"st0001,REBEL YELL,10104,1467999790,4096483201,Cs1000,-1,chaset,\r\n"
|
|
|
|
)
|
2023-02-17 07:02:21 +01:00
|
|
|
ret_str += f"st0002,REMNANT,10104,1502127790,4096483201,Cs1000,-1,overcl,\r\n"
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ret_str}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_stxxxx_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
story_num = int(data["dldate"]["filetype"][-4:])
|
|
|
|
ret_str = ""
|
|
|
|
# Each stories appears to have 10 pieces based on the wiki but as on how they are set.... no clue
|
2023-03-09 17:38:58 +01:00
|
|
|
for i in range(1, 11):
|
|
|
|
ret_str += f"{i},st000{story_num}_{i-1},,,,,,,,,,,,,,,,1,,-1,1,\r\n"
|
|
|
|
return {"data": ret_str}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_event_stamp_list_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": "Cs1002,1,1,1,1,1,1,1,1,1,1,\r\n"}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_premium_list_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": "1,,,,10,,,,,99,,,,,,,,,100,,\r\n"}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_event_list_request(self, data: Dict) -> Dict:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {
|
|
|
|
"data": "Cs4001,0,10000,1601510400,1604188799,1,nv2006,1,\r\nCs4005,0,10000,1609459200,1615766399,1,nv2006,1,\r\n"
|
|
|
|
}
|
2023-02-17 07:02:21 +01:00
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_event_detail_list_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
event_id = data["dldate"]["filetype"].split("/")[2]
|
|
|
|
if "Cs4001" in event_id:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {
|
|
|
|
"data": "#EventMusicList\r\n1,zonzon2,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\r\n2,moonki,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\r\n3,tricko,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\r\n"
|
|
|
|
}
|
2023-02-17 07:02:21 +01:00
|
|
|
elif "Cs4005" in event_id:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {
|
|
|
|
"data": "#EventMusicList\r\n2,firstl,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\r\n2,valent,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\r\n2,dazzli2,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\r\n"
|
|
|
|
}
|
2023-02-17 07:02:21 +01:00
|
|
|
elif "EventStampMapListCs1002" in event_id:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": "1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"}
|
2023-02-17 07:02:21 +01:00
|
|
|
elif "EventStampList" in event_id:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": "Cs1002,1,1,1,1,1,1,1,1,1,1,\r\n"}
|
2023-02-17 07:02:21 +01:00
|
|
|
else:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|
|
|
|
|
2024-01-09 09:07:04 +01:00
|
|
|
async def handle_data_event_stamp_map_list_csxxxx_request(self, data: Dict) -> Dict:
|
2023-02-17 07:02:21 +01:00
|
|
|
event_id = data["dldate"]["filetype"].split("/")[2]
|
|
|
|
if "EventStampMapListCs1002" in event_id:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": "1,2,1,1,2,3,9,5,6,7,8,9,10,\r\n"}
|
2023-02-17 07:02:21 +01:00
|
|
|
else:
|
2023-03-09 17:38:58 +01:00
|
|
|
return {"data": ""}
|