mirror of
https://github.com/beerpiss/saekawa.git
synced 2025-02-22 04:39:37 +01:00
feat: overhaul + pb import
This commit is contained in:
parent
96253e13bc
commit
8bcb664378
24
Cargo.lock
generated
24
Cargo.lock
generated
@ -61,6 +61,15 @@ version = "0.21.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
|
checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "binary-reader"
|
||||||
|
version = "0.4.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1d173c51941d642588ed6a13d464617e3a9176b8fe00dc2de182434c36812a5e"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
@ -97,6 +106,12 @@ version = "3.14.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
|
checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "byteorder"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cbc"
|
name = "cbc"
|
||||||
version = "0.1.2"
|
version = "0.1.2"
|
||||||
@ -312,6 +327,12 @@ version = "0.3.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
|
checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hex-literal"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hmac"
|
name = "hmac"
|
||||||
version = "0.12.1"
|
version = "0.12.1"
|
||||||
@ -702,12 +723,15 @@ version = "0.2.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"aes",
|
"aes",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"binary-reader",
|
||||||
"cbc",
|
"cbc",
|
||||||
"chrono",
|
"chrono",
|
||||||
"confy",
|
"confy",
|
||||||
|
"crc32fast",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"faster-hex",
|
"faster-hex",
|
||||||
"flate2",
|
"flate2",
|
||||||
|
"hex-literal",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"num_enum",
|
"num_enum",
|
||||||
|
@ -18,12 +18,15 @@ panic = "abort"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
aes = "0.8.3"
|
aes = "0.8.3"
|
||||||
anyhow = "1.0.75"
|
anyhow = "1.0.75"
|
||||||
|
binary-reader = "0.4.5"
|
||||||
cbc = "0.1.2"
|
cbc = "0.1.2"
|
||||||
chrono = "0.4.31"
|
chrono = "0.4.31"
|
||||||
confy = "0.5.1"
|
confy = "0.5.1"
|
||||||
|
crc32fast = "1.3.2"
|
||||||
env_logger = "0.10.1"
|
env_logger = "0.10.1"
|
||||||
faster-hex = "0.8.1"
|
faster-hex = "0.8.1"
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.28"
|
||||||
|
hex-literal = "0.4.1"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.20"
|
log = "0.4.20"
|
||||||
num_enum = "0.7.1"
|
num_enum = "0.7.1"
|
||||||
|
@ -3,15 +3,23 @@
|
|||||||
enable = true
|
enable = true
|
||||||
# Whether the hook should export your class medals and emblems or not.
|
# Whether the hook should export your class medals and emblems or not.
|
||||||
export_class = true
|
export_class = true
|
||||||
|
# Whether the hook should export PBs. This should be used as a last resort, if
|
||||||
|
# you cannot import scores from your network, since this provides less data
|
||||||
|
# and sends only one pre-joined score per chart. Will only work once every session; you'll
|
||||||
|
# need to restart the game to do it again.
|
||||||
|
export_pbs = false
|
||||||
# Whether FAILED should override FULL COMBO and ALL JUSTICE.
|
# Whether FAILED should override FULL COMBO and ALL JUSTICE.
|
||||||
fail_over_lamp = false
|
fail_over_lamp = false
|
||||||
# Timeout for web requests, in milliseconds
|
# Timeout for web requests, in milliseconds
|
||||||
timeout = 3000
|
timeout = 3000
|
||||||
|
|
||||||
[cards]
|
[cards]
|
||||||
|
# **DOES NOT WORK FOR WHITELISTING PBS!!**
|
||||||
|
#
|
||||||
# Access codes that should be whitelisted
|
# Access codes that should be whitelisted
|
||||||
# If this is empty, all cards will be whitelisted
|
# If this is empty, all cards will be whitelisted
|
||||||
# There should be no whitespace between the digits
|
# There should be no whitespace between the digits
|
||||||
|
#
|
||||||
# example: whitelist = ["00001111222233334444"]
|
# example: whitelist = ["00001111222233334444"]
|
||||||
whitelist = []
|
whitelist = []
|
||||||
|
|
||||||
|
@ -27,6 +27,11 @@ impl Configuration {
|
|||||||
confy::load_path("saekawa.toml")
|
confy::load_path("saekawa.toml")
|
||||||
.map_err(|err| anyhow::anyhow!("Could not load config: {}", err))
|
.map_err(|err| anyhow::anyhow!("Could not load config: {}", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn update(cfg: Configuration) -> Result<()> {
|
||||||
|
confy::store_path("saekawa.toml", cfg)
|
||||||
|
.map_err(|err| anyhow::anyhow!("Could not update config: {}", err))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||||
@ -37,6 +42,9 @@ pub struct GeneralConfiguration {
|
|||||||
#[serde(default = "default_true")]
|
#[serde(default = "default_true")]
|
||||||
pub export_class: bool,
|
pub export_class: bool,
|
||||||
|
|
||||||
|
#[serde(default = "default_false")]
|
||||||
|
pub export_pbs: bool,
|
||||||
|
|
||||||
#[serde(default = "default_false")]
|
#[serde(default = "default_false")]
|
||||||
pub fail_over_lamp: bool,
|
pub fail_over_lamp: bool,
|
||||||
|
|
||||||
|
59
src/handlers.rs
Normal file
59
src/handlers.rs
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
use std::{fmt::Debug, sync::atomic::Ordering};
|
||||||
|
|
||||||
|
use log::{debug, error, info};
|
||||||
|
use serde::de::DeserializeOwned;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
helpers::execute_tachi_import, saekawa::GAME_MAJOR_VERSION, types::tachi::ToTachiImport,
|
||||||
|
CONFIGURATION,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn score_handler<T>(body: String, guard: impl Fn(&T) -> bool)
|
||||||
|
where
|
||||||
|
T: Debug + DeserializeOwned + ToTachiImport,
|
||||||
|
{
|
||||||
|
let data = match serde_json::from_str::<T>(body.as_ref()) {
|
||||||
|
Ok(req) => req,
|
||||||
|
Err(err) => {
|
||||||
|
error!("Could not parse request body: {:#}", err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!("parsed request body: {:#?}", data);
|
||||||
|
|
||||||
|
if !guard(&data) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let import = data.to_tachi_import(
|
||||||
|
GAME_MAJOR_VERSION.load(Ordering::SeqCst),
|
||||||
|
CONFIGURATION.general.export_class,
|
||||||
|
CONFIGURATION.general.fail_over_lamp,
|
||||||
|
);
|
||||||
|
|
||||||
|
if import.scores.is_empty() {
|
||||||
|
if import.classes.is_none() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if import
|
||||||
|
.classes
|
||||||
|
.clone()
|
||||||
|
.is_some_and(|v| v.dan.is_none() && v.emblem.is_none())
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Submitting {} scores from {} {}",
|
||||||
|
import.scores.len(),
|
||||||
|
data.displayed_id_type(),
|
||||||
|
data.displayed_id(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Err(err) = execute_tachi_import(import) {
|
||||||
|
error!("{:#}", err);
|
||||||
|
}
|
||||||
|
}
|
334
src/helpers.rs
334
src/helpers.rs
@ -1,334 +0,0 @@
|
|||||||
use std::{fmt::Debug, io::Read, ptr};
|
|
||||||
|
|
||||||
use aes::cipher::{block_padding::Pkcs7, BlockDecryptMut, KeyIvInit};
|
|
||||||
use anyhow::{anyhow, Result};
|
|
||||||
use flate2::read::ZlibDecoder;
|
|
||||||
use log::{debug, error, info};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use widestring::U16CString;
|
|
||||||
use winapi::{
|
|
||||||
ctypes::c_void,
|
|
||||||
shared::{minwindef::TRUE, winerror::ERROR_INSUFFICIENT_BUFFER},
|
|
||||||
um::{
|
|
||||||
errhandlingapi::GetLastError,
|
|
||||||
winhttp::{
|
|
||||||
WinHttpQueryHeaders, WinHttpQueryOption, HINTERNET, WINHTTP_OPTION_URL,
|
|
||||||
WINHTTP_QUERY_FLAG_REQUEST_HEADERS, WINHTTP_QUERY_USER_AGENT,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
types::tachi::{Import, ImportDocument, ImportPollStatus, TachiResponse, ImportResponse},
|
|
||||||
CONFIGURATION, TACHI_IMPORT_URL, UPSERT_USER_ALL_API_ENCRYPTED,
|
|
||||||
};
|
|
||||||
|
|
||||||
type Aes256CbcDec = cbc::Decryptor<aes::Aes256>;
|
|
||||||
|
|
||||||
pub fn request_agent() -> ureq::Agent {
|
|
||||||
let timeout = CONFIGURATION.general.timeout;
|
|
||||||
let timeout = if timeout > 10000 { 10000 } else { timeout };
|
|
||||||
|
|
||||||
ureq::builder()
|
|
||||||
.timeout(std::time::Duration::from_millis(timeout))
|
|
||||||
.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn request<T>(
|
|
||||||
method: impl AsRef<str>,
|
|
||||||
url: impl AsRef<str>,
|
|
||||||
body: Option<T>,
|
|
||||||
) -> Result<ureq::Response>
|
|
||||||
where
|
|
||||||
T: Serialize + Debug,
|
|
||||||
{
|
|
||||||
let agent = request_agent();
|
|
||||||
|
|
||||||
let method = method.as_ref();
|
|
||||||
let url = url.as_ref();
|
|
||||||
debug!("{} request to {} with body: {:#?}", method, url, body);
|
|
||||||
|
|
||||||
let authorization = format!("Bearer {}", CONFIGURATION.tachi.api_key);
|
|
||||||
let request = agent
|
|
||||||
.request(method, url)
|
|
||||||
.set("Authorization", authorization.as_str());
|
|
||||||
let response = match body {
|
|
||||||
Some(body) => request.send_json(body),
|
|
||||||
None => request.call(),
|
|
||||||
}
|
|
||||||
.map_err(|err| anyhow::anyhow!("Could not reach Tachi API: {:#}", err))?;
|
|
||||||
|
|
||||||
Ok(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn request_tachi<T, R>(
|
|
||||||
method: impl AsRef<str>,
|
|
||||||
url: impl AsRef<str>,
|
|
||||||
body: Option<T>,
|
|
||||||
) -> Result<R>
|
|
||||||
where
|
|
||||||
T: Serialize + Debug,
|
|
||||||
R: for<'de> Deserialize<'de> + Debug,
|
|
||||||
{
|
|
||||||
let response = request(method, url, body)?;
|
|
||||||
let response = response.into_json()?;
|
|
||||||
debug!("Tachi API response: {:#?}", response);
|
|
||||||
|
|
||||||
Ok(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Queries a HINTERNET handle for its URL, then return the result.
|
|
||||||
pub fn read_hinternet_url(handle: HINTERNET) -> Result<String> {
|
|
||||||
let mut buf_length = 255;
|
|
||||||
let mut buffer = [0u16; 255];
|
|
||||||
let result = unsafe {
|
|
||||||
WinHttpQueryOption(
|
|
||||||
handle,
|
|
||||||
WINHTTP_OPTION_URL,
|
|
||||||
buffer.as_mut_ptr() as *mut c_void,
|
|
||||||
&mut buf_length,
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
if result == TRUE {
|
|
||||||
let url_str = U16CString::from_vec_truncate(&buffer[..buf_length as usize]);
|
|
||||||
return url_str
|
|
||||||
.to_string()
|
|
||||||
.map_err(|err| anyhow!("Could not decode wide string: {:#}", err));
|
|
||||||
}
|
|
||||||
|
|
||||||
let ec = unsafe { GetLastError() };
|
|
||||||
if ec == ERROR_INSUFFICIENT_BUFFER {
|
|
||||||
let mut buffer = vec![0u16; buf_length as usize];
|
|
||||||
let result = unsafe {
|
|
||||||
WinHttpQueryOption(
|
|
||||||
handle,
|
|
||||||
WINHTTP_OPTION_URL,
|
|
||||||
buffer.as_mut_ptr() as *mut c_void,
|
|
||||||
&mut buf_length,
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
if result != TRUE {
|
|
||||||
let ec = unsafe { GetLastError() };
|
|
||||||
return Err(anyhow!("Could not get URL from HINTERNET handle: {ec}"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let url_str = U16CString::from_vec_truncate(&buffer[..buf_length as usize]);
|
|
||||||
return url_str
|
|
||||||
.to_string()
|
|
||||||
.map_err(|err| anyhow!("Could not decode wide string: {:#}", err));
|
|
||||||
}
|
|
||||||
|
|
||||||
let ec = unsafe { GetLastError() };
|
|
||||||
Err(anyhow!("Could not get URL from HINTERNET handle: {ec}"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read_hinternet_user_agent(handle: HINTERNET) -> Result<String> {
|
|
||||||
let mut buf_length = 255;
|
|
||||||
let mut buffer = [0u16; 255];
|
|
||||||
let result = unsafe {
|
|
||||||
WinHttpQueryHeaders(
|
|
||||||
handle,
|
|
||||||
WINHTTP_QUERY_USER_AGENT | WINHTTP_QUERY_FLAG_REQUEST_HEADERS,
|
|
||||||
ptr::null(),
|
|
||||||
buffer.as_mut_ptr() as *mut c_void,
|
|
||||||
&mut buf_length,
|
|
||||||
ptr::null_mut(),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
if result == TRUE {
|
|
||||||
let user_agent_str = U16CString::from_vec_truncate(&buffer[..buf_length as usize]);
|
|
||||||
return user_agent_str
|
|
||||||
.to_string()
|
|
||||||
.map_err(|err| anyhow!("Could not decode wide string: {:#}", err));
|
|
||||||
}
|
|
||||||
|
|
||||||
let ec = unsafe { GetLastError() };
|
|
||||||
if ec == ERROR_INSUFFICIENT_BUFFER {
|
|
||||||
let mut buffer = vec![0u16; buf_length as usize];
|
|
||||||
let result = unsafe {
|
|
||||||
WinHttpQueryHeaders(
|
|
||||||
handle,
|
|
||||||
WINHTTP_QUERY_USER_AGENT | WINHTTP_QUERY_FLAG_REQUEST_HEADERS,
|
|
||||||
ptr::null(),
|
|
||||||
buffer.as_mut_ptr() as *mut c_void,
|
|
||||||
&mut buf_length,
|
|
||||||
ptr::null_mut(),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
if result != TRUE {
|
|
||||||
let ec = unsafe { GetLastError() };
|
|
||||||
return Err(anyhow!("Could not get URL from HINTERNET handle: {ec}"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let user_agent_str = U16CString::from_vec_truncate(&buffer[..buf_length as usize]);
|
|
||||||
return user_agent_str
|
|
||||||
.to_string()
|
|
||||||
.map_err(|err| anyhow!("Could not decode wide string: {:#}", err));
|
|
||||||
}
|
|
||||||
|
|
||||||
let ec = unsafe { GetLastError() };
|
|
||||||
Err(anyhow!(
|
|
||||||
"Could not get User-Agent from HINTERNET handle: {ec}"
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read_slice(buf: *const u8, len: usize) -> Result<Vec<u8>> {
|
|
||||||
let mut slice = unsafe { std::slice::from_raw_parts(buf, len) };
|
|
||||||
let mut ret = Vec::with_capacity(len);
|
|
||||||
|
|
||||||
slice.read_to_end(&mut ret)?;
|
|
||||||
|
|
||||||
Ok(ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read_maybe_compressed_buffer(buf: impl AsRef<[u8]>) -> Result<String> {
|
|
||||||
let mut ret = String::new();
|
|
||||||
|
|
||||||
let mut decoder = ZlibDecoder::new(buf.as_ref());
|
|
||||||
let zlib_result = decoder.read_to_string(&mut ret);
|
|
||||||
if zlib_result.is_ok() {
|
|
||||||
return Ok(ret);
|
|
||||||
}
|
|
||||||
|
|
||||||
ret.clear();
|
|
||||||
let result = buf.as_ref().read_to_string(&mut ret);
|
|
||||||
if result.is_ok() {
|
|
||||||
return Ok(ret);
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(anyhow!(
|
|
||||||
"Could not decode contents of buffer as both DEFLATE-compressed ({:#}) and plaintext ({:#}) UTF-8 string.",
|
|
||||||
zlib_result.expect_err("This shouldn't happen, if Result was Ok the string should have been returned earlier."),
|
|
||||||
result.expect_err("This shouldn't happen, if Result was Ok the string should have been returned earlier."),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Determine if we're looking at the UpsertUserAllApi endpoint,
|
|
||||||
/// which is the endpoint that contains our scores.
|
|
||||||
pub fn is_upsert_user_all_endpoint(endpoint: &str) -> bool {
|
|
||||||
if endpoint == "UpsertUserAllApi" {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if UPSERT_USER_ALL_API_ENCRYPTED
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|v| v == endpoint)
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Determine if it is an encrypted endpoint by checking if the endpoint
|
|
||||||
/// is exactly 32 characters long, and consists of all hex characters.
|
|
||||||
///
|
|
||||||
/// While this may trigger false positives, this should not happen as long
|
|
||||||
/// as CHUNITHM title APIs keep their `{method}{object}Api` endpoint
|
|
||||||
/// convention.
|
|
||||||
pub fn is_encrypted_endpoint(endpoint: &str) -> bool {
|
|
||||||
if endpoint.len() != 32 {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lazy way to check if all digits are hexadecimal
|
|
||||||
if u128::from_str_radix(endpoint, 16).is_err() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decrypt_aes256_cbc(
|
|
||||||
body: &mut [u8],
|
|
||||||
key: impl AsRef<[u8]>,
|
|
||||||
iv: impl AsRef<[u8]>,
|
|
||||||
) -> Result<Vec<u8>> {
|
|
||||||
let cipher = Aes256CbcDec::new_from_slices(key.as_ref(), iv.as_ref())?;
|
|
||||||
Ok(cipher
|
|
||||||
.decrypt_padded_mut::<Pkcs7>(body)
|
|
||||||
.map_err(|err| anyhow!(err))?
|
|
||||||
.to_owned())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn log_import(description: &str, import: ImportDocument) {
|
|
||||||
info!(
|
|
||||||
"{description} {} scores, {} sessions, {} errors",
|
|
||||||
import.score_ids.len(),
|
|
||||||
import.created_sessions.len(),
|
|
||||||
import.errors.len()
|
|
||||||
);
|
|
||||||
|
|
||||||
for err in import.errors {
|
|
||||||
error!("{}: {}", err.error_type, err.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Executes a DIRECT-MANUAL import and logs some information on success.
|
|
||||||
///
|
|
||||||
/// ## Important
|
|
||||||
/// This function blocks until import has fully finished! It is best to call this in a separate thread.
|
|
||||||
pub fn execute_tachi_import(import: Import) -> Result<()> {
|
|
||||||
let resp: TachiResponse<ImportResponse> =
|
|
||||||
match request_tachi("POST", TACHI_IMPORT_URL.as_str(), Some(import)) {
|
|
||||||
Err(err) => {
|
|
||||||
return Err(anyhow!("Could not send scores to Tachi: {:#}", err));
|
|
||||||
}
|
|
||||||
Ok(resp) => resp,
|
|
||||||
};
|
|
||||||
|
|
||||||
let (body, description) = match resp {
|
|
||||||
TachiResponse::Err(err) => {
|
|
||||||
return Err(anyhow!(
|
|
||||||
"Tachi API returned an error: {:#}",
|
|
||||||
err.description
|
|
||||||
));
|
|
||||||
}
|
|
||||||
TachiResponse::Ok(resp) => (resp.body, resp.description),
|
|
||||||
};
|
|
||||||
|
|
||||||
let poll_url = match body {
|
|
||||||
ImportResponse::Queued { url, import_id: _ } => {
|
|
||||||
info!("Queued import for processing. Status URL: {}", url);
|
|
||||||
url
|
|
||||||
}
|
|
||||||
ImportResponse::Finished(import) => {
|
|
||||||
log_import(&description, import);
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let resp: TachiResponse<ImportPollStatus> =
|
|
||||||
match request_tachi("GET", &poll_url, None::<()>) {
|
|
||||||
Ok(resp) => resp,
|
|
||||||
Err(err) => {
|
|
||||||
error!("Could not poll import status: {:#}", err);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let (body, description) = match resp {
|
|
||||||
TachiResponse::Ok(resp) => (resp.body, resp.description),
|
|
||||||
TachiResponse::Err(err) => {
|
|
||||||
return Err(anyhow!("Tachi API returned an error: {}", err.description));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
match body {
|
|
||||||
ImportPollStatus::Completed { import } => {
|
|
||||||
log_import(&description, import);
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::thread::sleep(std::time::Duration::from_secs(1));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
16
src/helpers/crypto.rs
Normal file
16
src/helpers/crypto.rs
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
use aes::cipher::{block_padding::Pkcs7, BlockDecryptMut, KeyIvInit};
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
|
||||||
|
type Aes256CbcDec = cbc::Decryptor<aes::Aes256>;
|
||||||
|
|
||||||
|
pub fn decrypt_aes256_cbc(
|
||||||
|
body: &mut [u8],
|
||||||
|
key: impl AsRef<[u8]>,
|
||||||
|
iv: impl AsRef<[u8]>,
|
||||||
|
) -> Result<Vec<u8>> {
|
||||||
|
let cipher = Aes256CbcDec::new_from_slices(key.as_ref(), iv.as_ref())?;
|
||||||
|
Ok(cipher
|
||||||
|
.decrypt_padded_mut::<Pkcs7>(body)
|
||||||
|
.map_err(|err| anyhow!(err))?
|
||||||
|
.to_owned())
|
||||||
|
}
|
58
src/helpers/endpoint.rs
Normal file
58
src/helpers/endpoint.rs
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
use log::debug;
|
||||||
|
use pbkdf2::pbkdf2_hmac_array;
|
||||||
|
use sha1::Sha1;
|
||||||
|
|
||||||
|
use crate::CONFIGURATION;
|
||||||
|
|
||||||
|
pub fn is_endpoint(
|
||||||
|
endpoint: &str,
|
||||||
|
unencrypted_variant: &str,
|
||||||
|
encrypted_variant: &Option<String>,
|
||||||
|
) -> bool {
|
||||||
|
if endpoint == unencrypted_variant {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if encrypted_variant.as_ref().is_some_and(|v| v == endpoint) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determine if it is an encrypted endpoint by checking if the endpoint
|
||||||
|
/// is exactly 32 characters long, and consists of all hex characters.
|
||||||
|
///
|
||||||
|
/// While this may trigger false positives, this should not happen as long
|
||||||
|
/// as CHUNITHM title APIs keep their `{method}{object}Api` endpoint
|
||||||
|
/// convention.
|
||||||
|
pub fn is_encrypted_endpoint(endpoint: &str) -> bool {
|
||||||
|
if endpoint.len() != 32 {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lazy way to check if all digits are hexadecimal
|
||||||
|
if u128::from_str_radix(endpoint, 16).is_err() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash_endpoint(endpoint: &str) -> Option<String> {
|
||||||
|
if CONFIGURATION.crypto.salt.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let key_bytes = pbkdf2_hmac_array::<Sha1, 16>(
|
||||||
|
endpoint.as_bytes(),
|
||||||
|
&CONFIGURATION.crypto.salt,
|
||||||
|
CONFIGURATION.crypto.iterations,
|
||||||
|
);
|
||||||
|
|
||||||
|
let key = faster_hex::hex_string(&key_bytes);
|
||||||
|
|
||||||
|
debug!("Running with encryption support: {endpoint} maps to {key}");
|
||||||
|
|
||||||
|
Some(key)
|
||||||
|
}
|
114
src/helpers/hinternet.rs
Normal file
114
src/helpers/hinternet.rs
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
use std::ptr;
|
||||||
|
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use widestring::U16CString;
|
||||||
|
use winapi::{
|
||||||
|
ctypes::c_void,
|
||||||
|
shared::{minwindef::TRUE, winerror::ERROR_INSUFFICIENT_BUFFER},
|
||||||
|
um::{
|
||||||
|
errhandlingapi::GetLastError,
|
||||||
|
winhttp::{
|
||||||
|
WinHttpQueryHeaders, WinHttpQueryOption, HINTERNET, WINHTTP_OPTION_URL,
|
||||||
|
WINHTTP_QUERY_FLAG_REQUEST_HEADERS, WINHTTP_QUERY_USER_AGENT,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Queries a HINTERNET handle for its URL, then return the result.
|
||||||
|
pub fn read_hinternet_url(handle: HINTERNET) -> Result<String> {
|
||||||
|
let mut buf_length = 255;
|
||||||
|
let mut buffer = [0u16; 255];
|
||||||
|
let result = unsafe {
|
||||||
|
WinHttpQueryOption(
|
||||||
|
handle,
|
||||||
|
WINHTTP_OPTION_URL,
|
||||||
|
buffer.as_mut_ptr() as *mut c_void,
|
||||||
|
&mut buf_length,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if result == TRUE {
|
||||||
|
let url_str = U16CString::from_vec_truncate(&buffer[..buf_length as usize]);
|
||||||
|
return url_str
|
||||||
|
.to_string()
|
||||||
|
.map_err(|err| anyhow!("Could not decode wide string: {:#}", err));
|
||||||
|
}
|
||||||
|
|
||||||
|
let ec = unsafe { GetLastError() };
|
||||||
|
if ec == ERROR_INSUFFICIENT_BUFFER {
|
||||||
|
let mut buffer = vec![0u16; buf_length as usize];
|
||||||
|
let result = unsafe {
|
||||||
|
WinHttpQueryOption(
|
||||||
|
handle,
|
||||||
|
WINHTTP_OPTION_URL,
|
||||||
|
buffer.as_mut_ptr() as *mut c_void,
|
||||||
|
&mut buf_length,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if result != TRUE {
|
||||||
|
let ec = unsafe { GetLastError() };
|
||||||
|
return Err(anyhow!("Could not get URL from HINTERNET handle: {ec}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let url_str = U16CString::from_vec_truncate(&buffer[..buf_length as usize]);
|
||||||
|
return url_str
|
||||||
|
.to_string()
|
||||||
|
.map_err(|err| anyhow!("Could not decode wide string: {:#}", err));
|
||||||
|
}
|
||||||
|
|
||||||
|
let ec = unsafe { GetLastError() };
|
||||||
|
Err(anyhow!("Could not get URL from HINTERNET handle: {ec}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_hinternet_user_agent(handle: HINTERNET) -> Result<String> {
|
||||||
|
let mut buf_length = 255;
|
||||||
|
let mut buffer = [0u16; 255];
|
||||||
|
let result = unsafe {
|
||||||
|
WinHttpQueryHeaders(
|
||||||
|
handle,
|
||||||
|
WINHTTP_QUERY_USER_AGENT | WINHTTP_QUERY_FLAG_REQUEST_HEADERS,
|
||||||
|
ptr::null(),
|
||||||
|
buffer.as_mut_ptr() as *mut c_void,
|
||||||
|
&mut buf_length,
|
||||||
|
ptr::null_mut(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if result == TRUE {
|
||||||
|
let user_agent_str = U16CString::from_vec_truncate(&buffer[..buf_length as usize]);
|
||||||
|
return user_agent_str
|
||||||
|
.to_string()
|
||||||
|
.map_err(|err| anyhow!("Could not decode wide string: {:#}", err));
|
||||||
|
}
|
||||||
|
|
||||||
|
let ec = unsafe { GetLastError() };
|
||||||
|
if ec == ERROR_INSUFFICIENT_BUFFER {
|
||||||
|
let mut buffer = vec![0u16; buf_length as usize];
|
||||||
|
let result = unsafe {
|
||||||
|
WinHttpQueryHeaders(
|
||||||
|
handle,
|
||||||
|
WINHTTP_QUERY_USER_AGENT | WINHTTP_QUERY_FLAG_REQUEST_HEADERS,
|
||||||
|
ptr::null(),
|
||||||
|
buffer.as_mut_ptr() as *mut c_void,
|
||||||
|
&mut buf_length,
|
||||||
|
ptr::null_mut(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if result != TRUE {
|
||||||
|
let ec = unsafe { GetLastError() };
|
||||||
|
return Err(anyhow!("Could not get URL from HINTERNET handle: {ec}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let user_agent_str = U16CString::from_vec_truncate(&buffer[..buf_length as usize]);
|
||||||
|
return user_agent_str
|
||||||
|
.to_string()
|
||||||
|
.map_err(|err| anyhow!("Could not decode wide string: {:#}", err));
|
||||||
|
}
|
||||||
|
|
||||||
|
let ec = unsafe { GetLastError() };
|
||||||
|
Err(anyhow!(
|
||||||
|
"Could not get User-Agent from HINTERNET handle: {ec}"
|
||||||
|
))
|
||||||
|
}
|
35
src/helpers/io.rs
Normal file
35
src/helpers/io.rs
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
use std::io::Read;
|
||||||
|
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use flate2::read::ZlibDecoder;
|
||||||
|
|
||||||
|
pub fn read_slice(buf: *const u8, len: usize) -> Result<Vec<u8>> {
|
||||||
|
let mut slice = unsafe { std::slice::from_raw_parts(buf, len) };
|
||||||
|
let mut ret = Vec::with_capacity(len);
|
||||||
|
|
||||||
|
slice.read_to_end(&mut ret)?;
|
||||||
|
|
||||||
|
Ok(ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_maybe_compressed_buffer(buf: impl AsRef<[u8]>) -> Result<String> {
|
||||||
|
let mut ret = String::new();
|
||||||
|
|
||||||
|
let mut decoder = ZlibDecoder::new(buf.as_ref());
|
||||||
|
let zlib_result = decoder.read_to_string(&mut ret);
|
||||||
|
if zlib_result.is_ok() {
|
||||||
|
return Ok(ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
ret.clear();
|
||||||
|
let result = buf.as_ref().read_to_string(&mut ret);
|
||||||
|
if result.is_ok() {
|
||||||
|
return Ok(ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(anyhow!(
|
||||||
|
"Could not decode contents of buffer as both DEFLATE-compressed ({:#}) and plaintext ({:#}) UTF-8 string.",
|
||||||
|
zlib_result.expect_err("This shouldn't happen, if Result was Ok the string should have been returned earlier."),
|
||||||
|
result.expect_err("This shouldn't happen, if Result was Ok the string should have been returned earlier."),
|
||||||
|
))
|
||||||
|
}
|
11
src/helpers/mod.rs
Normal file
11
src/helpers/mod.rs
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
mod crypto;
|
||||||
|
mod endpoint;
|
||||||
|
mod hinternet;
|
||||||
|
mod io;
|
||||||
|
mod net;
|
||||||
|
|
||||||
|
pub use crypto::*;
|
||||||
|
pub use endpoint::*;
|
||||||
|
pub use hinternet::*;
|
||||||
|
pub use io::*;
|
||||||
|
pub use net::*;
|
140
src/helpers/net.rs
Normal file
140
src/helpers/net.rs
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use log::{debug, error, info};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
types::tachi::{Import, ImportDocument, ImportPollStatus, ImportResponse, TachiResponse},
|
||||||
|
CONFIGURATION, TACHI_IMPORT_URL,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn request_agent() -> ureq::Agent {
|
||||||
|
let timeout = CONFIGURATION.general.timeout;
|
||||||
|
let timeout = if timeout > 10000 { 10000 } else { timeout };
|
||||||
|
|
||||||
|
ureq::builder()
|
||||||
|
.timeout(std::time::Duration::from_millis(timeout))
|
||||||
|
.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn request<T>(
|
||||||
|
method: impl AsRef<str>,
|
||||||
|
url: impl AsRef<str>,
|
||||||
|
body: Option<T>,
|
||||||
|
) -> Result<ureq::Response>
|
||||||
|
where
|
||||||
|
T: Serialize + Debug,
|
||||||
|
{
|
||||||
|
let agent = request_agent();
|
||||||
|
|
||||||
|
let method = method.as_ref();
|
||||||
|
let url = url.as_ref();
|
||||||
|
debug!("{} request to {} with body: {:#?}", method, url, body);
|
||||||
|
|
||||||
|
let authorization = format!("Bearer {}", CONFIGURATION.tachi.api_key);
|
||||||
|
let request = agent
|
||||||
|
.request(method, url)
|
||||||
|
.set("Authorization", authorization.as_str());
|
||||||
|
let response = match body {
|
||||||
|
Some(body) => request.send_json(body),
|
||||||
|
None => request.call(),
|
||||||
|
}
|
||||||
|
.map_err(|err| anyhow::anyhow!("Could not reach Tachi API: {:#}", err))?;
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn request_tachi<T, R>(
|
||||||
|
method: impl AsRef<str>,
|
||||||
|
url: impl AsRef<str>,
|
||||||
|
body: Option<T>,
|
||||||
|
) -> Result<R>
|
||||||
|
where
|
||||||
|
T: Serialize + Debug,
|
||||||
|
R: for<'de> Deserialize<'de> + Debug,
|
||||||
|
{
|
||||||
|
let response = request(method, url, body)?;
|
||||||
|
let response = response.into_json()?;
|
||||||
|
debug!("Tachi API response: {:#?}", response);
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn log_import(description: &str, import: ImportDocument) {
|
||||||
|
info!(
|
||||||
|
"{description} {} scores, {} sessions, {} errors",
|
||||||
|
import.score_ids.len(),
|
||||||
|
import.created_sessions.len(),
|
||||||
|
import.errors.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
for err in import.errors {
|
||||||
|
error!("{}: {}", err.error_type, err.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Executes a DIRECT-MANUAL import and logs some information on success.
|
||||||
|
///
|
||||||
|
/// ## Important
|
||||||
|
/// This function blocks until import has fully finished! It is best to call this in a separate thread.
|
||||||
|
pub fn execute_tachi_import(import: Import) -> Result<()> {
|
||||||
|
let resp: TachiResponse<ImportResponse> =
|
||||||
|
match request_tachi("POST", TACHI_IMPORT_URL.as_str(), Some(import)) {
|
||||||
|
Err(err) => {
|
||||||
|
return Err(anyhow!("Could not send scores to Tachi: {:#}", err));
|
||||||
|
}
|
||||||
|
Ok(resp) => resp,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (body, description) = match resp {
|
||||||
|
TachiResponse::Err(err) => {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Tachi API returned an error: {:#}",
|
||||||
|
err.description
|
||||||
|
));
|
||||||
|
}
|
||||||
|
TachiResponse::Ok(resp) => (resp.body, resp.description),
|
||||||
|
};
|
||||||
|
|
||||||
|
let poll_url = match body {
|
||||||
|
ImportResponse::Queued { url, import_id: _ } => {
|
||||||
|
info!("Queued import for processing. Status URL: {}", url);
|
||||||
|
url
|
||||||
|
}
|
||||||
|
ImportResponse::Finished(import) => {
|
||||||
|
log_import(&description, import);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let resp: TachiResponse<ImportPollStatus> =
|
||||||
|
match request_tachi("GET", &poll_url, None::<()>) {
|
||||||
|
Ok(resp) => resp,
|
||||||
|
Err(err) => {
|
||||||
|
error!("Could not poll import status: {:#}", err);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let (body, description) = match resp {
|
||||||
|
TachiResponse::Ok(resp) => (resp.body, resp.description),
|
||||||
|
TachiResponse::Err(err) => {
|
||||||
|
return Err(anyhow!("Tachi API returned an error: {}", err.description));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match body {
|
||||||
|
ImportPollStatus::Completed { import } => {
|
||||||
|
log_import(&description, import);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::thread::sleep(std::time::Duration::from_secs(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
234
src/icf.rs
Normal file
234
src/icf.rs
Normal file
@ -0,0 +1,234 @@
|
|||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
use aes::cipher::{block_padding::NoPadding, BlockDecryptMut, KeyIvInit};
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use binary_reader::{BinaryReader, Endian};
|
||||||
|
use chrono::{NaiveDate, NaiveDateTime};
|
||||||
|
use log::warn;
|
||||||
|
|
||||||
|
type Aes128CbcDec = cbc::Decryptor<aes::Aes128>;
|
||||||
|
|
||||||
|
const ICF_KEY: [u8; 16] = hex_literal::decode(&[env!("SAEKAWA_ICF_KEY").as_bytes()]);
|
||||||
|
const ICF_IV: [u8; 16] = hex_literal::decode(&[env!("SAEKAWA_ICF_IV").as_bytes()]);
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
pub struct Version {
|
||||||
|
pub major: u16,
|
||||||
|
pub minor: u8,
|
||||||
|
pub build: u8,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Version {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}.{:0>2}.{:0>2}", self.major, self.minor, self.build)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct IcfInnerData {
|
||||||
|
pub id: String,
|
||||||
|
pub version: Version,
|
||||||
|
pub required_system_version: Version,
|
||||||
|
pub datetime: NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct IcfPatchData {
|
||||||
|
pub id: String,
|
||||||
|
pub source_version: Version,
|
||||||
|
pub target_version: Version,
|
||||||
|
pub required_system_version: Version,
|
||||||
|
pub datetime: NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum IcfData {
|
||||||
|
System(IcfInnerData),
|
||||||
|
App(IcfInnerData),
|
||||||
|
Patch(IcfPatchData),
|
||||||
|
Option(IcfInnerData),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decrypt_icf(data: &mut [u8], key: impl AsRef<[u8]>, iv: impl AsRef<[u8]>) -> Result<Vec<u8>> {
|
||||||
|
let size = data.len();
|
||||||
|
|
||||||
|
let mut decrypted = Vec::with_capacity(size);
|
||||||
|
|
||||||
|
for i in (0..size).step_by(4096) {
|
||||||
|
let from_start = i;
|
||||||
|
|
||||||
|
let bufsz = std::cmp::min(4096, size - from_start);
|
||||||
|
let buf = &data[i..i + bufsz];
|
||||||
|
let mut decbuf = vec![0; bufsz];
|
||||||
|
|
||||||
|
let cipher = Aes128CbcDec::new_from_slices(key.as_ref(), iv.as_ref())?;
|
||||||
|
cipher
|
||||||
|
.decrypt_padded_b2b_mut::<NoPadding>(buf, &mut decbuf)
|
||||||
|
.map_err(|err| anyhow!(err))?;
|
||||||
|
|
||||||
|
let xor1 = u64::from_le_bytes(decbuf[0..8].try_into()?) ^ (from_start as u64);
|
||||||
|
let xor2 = u64::from_le_bytes(decbuf[8..16].try_into()?) ^ (from_start as u64);
|
||||||
|
|
||||||
|
decrypted.extend(xor1.to_le_bytes());
|
||||||
|
decrypted.extend(xor2.to_le_bytes());
|
||||||
|
decrypted.extend(&decbuf[16..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(decrypted)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode_icf_container_data(
|
||||||
|
rd: &mut BinaryReader,
|
||||||
|
) -> Result<(Version, NaiveDateTime, Version)> {
|
||||||
|
let version = Version {
|
||||||
|
build: rd.read_u8()?,
|
||||||
|
minor: rd.read_u8()?,
|
||||||
|
major: rd.read_u16()?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let datetime = NaiveDate::from_ymd_opt(
|
||||||
|
rd.read_i16()? as i32,
|
||||||
|
rd.read_u8()? as u32,
|
||||||
|
rd.read_u8()? as u32,
|
||||||
|
)
|
||||||
|
.ok_or(anyhow!("Invalid date"))?
|
||||||
|
.and_hms_milli_opt(
|
||||||
|
rd.read_u8()? as u32,
|
||||||
|
rd.read_u8()? as u32,
|
||||||
|
rd.read_u8()? as u32,
|
||||||
|
rd.read_u8()? as u32,
|
||||||
|
)
|
||||||
|
.ok_or(anyhow!("Invalid time"))?;
|
||||||
|
|
||||||
|
let required_system_version = Version {
|
||||||
|
build: rd.read_u8()?,
|
||||||
|
minor: rd.read_u8()?,
|
||||||
|
major: rd.read_u16()?,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((version, datetime, required_system_version))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode_icf(data: &mut [u8]) -> Result<Vec<IcfData>> {
|
||||||
|
let decrypted = decrypt_icf(data, ICF_KEY, ICF_IV)?;
|
||||||
|
|
||||||
|
let mut rd = BinaryReader::from_vec(&decrypted);
|
||||||
|
rd.endian = Endian::Little;
|
||||||
|
|
||||||
|
let checksum = crc32fast::hash(&decrypted[4..]);
|
||||||
|
let reported_crc = rd.read_u32()?;
|
||||||
|
if reported_crc != checksum {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Reported CRC32 ({reported_crc:02X}) does not match actual checksum ({checksum:02X})"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let reported_size = rd.read_u32()? as usize;
|
||||||
|
let actual_size = decrypted.len();
|
||||||
|
if actual_size != reported_size {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Reported size {reported_size} does not match actual size {actual_size}"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let padding = rd.read_u64()?;
|
||||||
|
if padding != 0 {
|
||||||
|
return Err(anyhow!("Padding error. Expected 8 NULL bytes."));
|
||||||
|
}
|
||||||
|
|
||||||
|
let entry_count: usize = rd.read_u64()?.try_into()?;
|
||||||
|
let expected_size = 0x40 * (entry_count + 1);
|
||||||
|
if actual_size != expected_size {
|
||||||
|
return Err(anyhow!("Expected size {expected_size} ({entry_count} entries) does not match actual size {actual_size}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let app_id = String::from_utf8(rd.read_bytes(4)?.to_vec())?;
|
||||||
|
let platform_id = String::from_utf8(rd.read_bytes(3)?.to_vec())?;
|
||||||
|
let _platform_generation = rd.read_u8()?;
|
||||||
|
|
||||||
|
let reported_crc = rd.read_u32()?;
|
||||||
|
let mut checksum = 0;
|
||||||
|
for i in 1..=entry_count {
|
||||||
|
let container = &decrypted[0x40 * i..0x40 * (i + 1)];
|
||||||
|
if container[0] == 2 && container[1] == 1 {
|
||||||
|
checksum ^= crc32fast::hash(container);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if reported_crc != checksum {
|
||||||
|
return Err(anyhow!("Reported container CRC32 ({reported_crc:02X}) does not match actual checksum ({checksum:02X})"));
|
||||||
|
}
|
||||||
|
|
||||||
|
for _ in 0..7 {
|
||||||
|
if rd.read_u32()? != 0 {
|
||||||
|
return Err(anyhow!("Padding error. Expected 28 NULL bytes."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut entries: Vec<IcfData> = Vec::with_capacity(entry_count);
|
||||||
|
for _ in 0..entry_count {
|
||||||
|
let sig = rd.read_bytes(4)?;
|
||||||
|
if sig[0] != 2 || sig[1] != 1 {
|
||||||
|
return Err(anyhow!("Container does not start with signature (0x0102)"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let container_type = rd.read_u32()?;
|
||||||
|
for _ in 0..3 {
|
||||||
|
if rd.read_u64()? != 0 {
|
||||||
|
return Err(anyhow!("Padding error. Expected 24 NULL bytes."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let (version, datetime, required_system_version) = decode_icf_container_data(&mut rd)?;
|
||||||
|
|
||||||
|
let data: IcfData = match container_type {
|
||||||
|
0x0000 | 0x0001 | 0x0002 => {
|
||||||
|
for _ in 0..2 {
|
||||||
|
if rd.read_u64()? != 0 {
|
||||||
|
return Err(anyhow!("Padding error. Expected 16 NULL bytes."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match container_type {
|
||||||
|
0x0000 => IcfData::System(IcfInnerData {
|
||||||
|
id: platform_id.clone(),
|
||||||
|
version,
|
||||||
|
datetime,
|
||||||
|
required_system_version,
|
||||||
|
}),
|
||||||
|
0x0001 => IcfData::App(IcfInnerData {
|
||||||
|
id: app_id.clone(),
|
||||||
|
version,
|
||||||
|
datetime,
|
||||||
|
required_system_version,
|
||||||
|
}),
|
||||||
|
0x0002 => IcfData::Option(IcfInnerData {
|
||||||
|
id: app_id.clone(),
|
||||||
|
version,
|
||||||
|
datetime,
|
||||||
|
required_system_version,
|
||||||
|
}),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
0x0101 => {
|
||||||
|
let (target_version, _, _) = decode_icf_container_data(&mut rd)?;
|
||||||
|
IcfData::Patch(IcfPatchData {
|
||||||
|
id: app_id.clone(),
|
||||||
|
source_version: version,
|
||||||
|
target_version,
|
||||||
|
required_system_version,
|
||||||
|
datetime,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn!("Unknown ICF container type {container_type}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
entries.push(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(entries)
|
||||||
|
}
|
27
src/lib.rs
27
src/lib.rs
@ -1,18 +1,19 @@
|
|||||||
mod configuration;
|
mod configuration;
|
||||||
|
mod handlers;
|
||||||
mod helpers;
|
mod helpers;
|
||||||
|
mod icf;
|
||||||
mod log;
|
mod log;
|
||||||
mod saekawa;
|
mod saekawa;
|
||||||
mod types;
|
mod types;
|
||||||
|
|
||||||
use ::log::{debug, error};
|
use ::log::error;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use pbkdf2::pbkdf2_hmac_array;
|
|
||||||
use sha1::Sha1;
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use winapi::shared::minwindef::{BOOL, DWORD, HINSTANCE, LPVOID, TRUE};
|
use winapi::shared::minwindef::{BOOL, DWORD, HINSTANCE, LPVOID, TRUE};
|
||||||
use winapi::um::winnt::{DLL_PROCESS_ATTACH, DLL_PROCESS_DETACH};
|
use winapi::um::winnt::{DLL_PROCESS_ATTACH, DLL_PROCESS_DETACH};
|
||||||
|
|
||||||
use crate::configuration::Configuration;
|
use crate::configuration::Configuration;
|
||||||
|
use crate::helpers::hash_endpoint;
|
||||||
use crate::log::Logger;
|
use crate::log::Logger;
|
||||||
use crate::saekawa::{hook_init, hook_release};
|
use crate::saekawa::{hook_init, hook_release};
|
||||||
|
|
||||||
@ -46,23 +47,9 @@ lazy_static! {
|
|||||||
|
|
||||||
result.unwrap().to_string()
|
result.unwrap().to_string()
|
||||||
};
|
};
|
||||||
pub static ref UPSERT_USER_ALL_API_ENCRYPTED: Option<String> = {
|
pub static ref UPSERT_USER_ALL_API_ENCRYPTED: Option<String> =
|
||||||
if CONFIGURATION.crypto.salt.is_empty() {
|
hash_endpoint("UpsertUserAllApi");
|
||||||
return None;
|
pub static ref GET_USER_MUSIC_API_ENCRYPTED: Option<String> = hash_endpoint("GetUserMusicApi");
|
||||||
}
|
|
||||||
|
|
||||||
let key_bytes = pbkdf2_hmac_array::<Sha1, 16>(
|
|
||||||
b"UpsertUserAllApi",
|
|
||||||
&CONFIGURATION.crypto.salt,
|
|
||||||
CONFIGURATION.crypto.iterations,
|
|
||||||
);
|
|
||||||
|
|
||||||
let key = faster_hex::hex_string(&key_bytes);
|
|
||||||
|
|
||||||
debug!("Running with encryption support: UpsertUserAllApi maps to {key}");
|
|
||||||
|
|
||||||
Some(key)
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_logger() {
|
fn init_logger() {
|
||||||
|
@ -22,7 +22,9 @@ impl Logger {
|
|||||||
impl Write for Logger {
|
impl Write for Logger {
|
||||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||||
if let Ok(c_str) = CString::new(buf) {
|
if let Ok(c_str) = CString::new(buf) {
|
||||||
unsafe { OutputDebugStringA(c_str.as_ptr()); }
|
unsafe {
|
||||||
|
OutputDebugStringA(c_str.as_ptr());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = std::io::stdout().write(buf);
|
let _ = std::io::stdout().write(buf);
|
||||||
|
402
src/saekawa.rs
402
src/saekawa.rs
@ -1,37 +1,53 @@
|
|||||||
use std::ffi::CString;
|
use std::{
|
||||||
|
ffi::CString,
|
||||||
|
fmt::Debug,
|
||||||
|
fs::File,
|
||||||
|
io::Read,
|
||||||
|
path::Path,
|
||||||
|
sync::atomic::{AtomicBool, AtomicU16, Ordering},
|
||||||
|
};
|
||||||
|
|
||||||
use ::log::{debug, error, info};
|
use ::log::{debug, error, info};
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
|
use log::warn;
|
||||||
use retour::static_detour;
|
use retour::static_detour;
|
||||||
|
use serde::de::DeserializeOwned;
|
||||||
|
use widestring::U16CString;
|
||||||
use winapi::{
|
use winapi::{
|
||||||
ctypes::c_void,
|
ctypes::c_void,
|
||||||
shared::minwindef::{__some_function, BOOL, DWORD, LPCVOID, LPDWORD},
|
shared::minwindef::{__some_function, BOOL, DWORD, FALSE, LPCVOID, LPDWORD, LPVOID, MAX_PATH},
|
||||||
um::{
|
um::{
|
||||||
errhandlingapi::GetLastError,
|
errhandlingapi::GetLastError,
|
||||||
libloaderapi::{GetModuleHandleA, GetProcAddress},
|
libloaderapi::{GetModuleHandleA, GetProcAddress},
|
||||||
|
winbase::GetPrivateProfileStringW,
|
||||||
winhttp::HINTERNET,
|
winhttp::HINTERNET,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
configuration::{Configuration, GeneralConfiguration},
|
||||||
|
handlers::score_handler,
|
||||||
helpers::{
|
helpers::{
|
||||||
decrypt_aes256_cbc, execute_tachi_import, is_encrypted_endpoint,
|
decrypt_aes256_cbc, is_encrypted_endpoint, is_endpoint, read_hinternet_url,
|
||||||
is_upsert_user_all_endpoint, read_hinternet_url, read_hinternet_user_agent,
|
read_hinternet_user_agent, read_maybe_compressed_buffer, read_slice, request_tachi,
|
||||||
read_maybe_compressed_buffer, read_slice, request_tachi,
|
|
||||||
},
|
},
|
||||||
|
icf::{decode_icf, IcfData},
|
||||||
types::{
|
types::{
|
||||||
game::UpsertUserAllRequest,
|
game::{UpsertUserAllRequest, UserMusicResponse},
|
||||||
tachi::{
|
tachi::{StatusCheck, TachiResponse, ToTachiImport},
|
||||||
ClassEmblem, Difficulty, Import, ImportClasses, ImportScore, StatusCheck, TachiResponse,
|
|
||||||
},
|
},
|
||||||
},
|
CONFIGURATION, GET_USER_MUSIC_API_ENCRYPTED, TACHI_STATUS_URL, UPSERT_USER_ALL_API_ENCRYPTED,
|
||||||
CONFIGURATION, TACHI_STATUS_URL,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub static GAME_MAJOR_VERSION: AtomicU16 = AtomicU16::new(0);
|
||||||
|
pub static PB_IMPORTED: AtomicBool = AtomicBool::new(true);
|
||||||
|
|
||||||
type WinHttpWriteDataFunc = unsafe extern "system" fn(HINTERNET, LPCVOID, DWORD, LPDWORD) -> BOOL;
|
type WinHttpWriteDataFunc = unsafe extern "system" fn(HINTERNET, LPCVOID, DWORD, LPDWORD) -> BOOL;
|
||||||
|
type WinHttpReadDataFunc = unsafe extern "system" fn(HINTERNET, LPVOID, DWORD, LPDWORD) -> BOOL;
|
||||||
|
|
||||||
static_detour! {
|
static_detour! {
|
||||||
static DetourWriteData: unsafe extern "system" fn (HINTERNET, LPCVOID, DWORD, LPDWORD) -> BOOL;
|
static DetourWriteData: unsafe extern "system" fn (HINTERNET, LPCVOID, DWORD, LPDWORD) -> BOOL;
|
||||||
|
static DetourReadData: unsafe extern "system" fn(HINTERNET, LPVOID, DWORD, LPDWORD) -> BOOL;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hook_init() -> Result<()> {
|
pub fn hook_init() -> Result<()> {
|
||||||
@ -39,6 +55,75 @@ pub fn hook_init() -> Result<()> {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if CONFIGURATION.general.export_pbs {
|
||||||
|
warn!("===============================================================================");
|
||||||
|
warn!("Exporting PBs is enabled. This should only be used once to sync up your scores!");
|
||||||
|
warn!("Leaving it on can make your profile messy! This will be automatically be turned off after exporting is finished.");
|
||||||
|
warn!("You can check when it's done by searching for the message 'Submitting x scores from user ID xxxxx'.");
|
||||||
|
warn!("===============================================================================");
|
||||||
|
|
||||||
|
PB_IMPORTED.store(false, Ordering::SeqCst);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Retrieving AMFS path from segatools.ini");
|
||||||
|
|
||||||
|
let mut buf = [0u16; MAX_PATH];
|
||||||
|
let amfs_cfg = unsafe {
|
||||||
|
let sz = GetPrivateProfileStringW(
|
||||||
|
U16CString::from_str_unchecked("vfs").as_ptr(),
|
||||||
|
U16CString::from_str_unchecked("amfs").as_ptr(),
|
||||||
|
U16CString::new().as_ptr(),
|
||||||
|
buf.as_mut_ptr(),
|
||||||
|
MAX_PATH as u32,
|
||||||
|
U16CString::from_str(".\\segatools.ini").unwrap().as_ptr(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if sz == 0 {
|
||||||
|
let ec = GetLastError();
|
||||||
|
return Err(anyhow!(
|
||||||
|
"AMFS path not specified in segatools.ini, error code {ec}"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
match U16CString::from_ptr(buf.as_ptr(), sz as usize) {
|
||||||
|
Ok(data) => data.to_string_lossy(),
|
||||||
|
Err(err) => {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"could not read AMFS path from segatools.ini: {:#}",
|
||||||
|
err
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let amfs_path = Path::new(&amfs_cfg);
|
||||||
|
let icf1_path = amfs_path.join("ICF1");
|
||||||
|
|
||||||
|
if !icf1_path.exists() {
|
||||||
|
return Err(anyhow!("Could not find ICF1 inside AMFS path. You will probably not be able to network without this file, so this hook will also be disabled."));
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Reading ICF1 located at {:?}", icf1_path);
|
||||||
|
|
||||||
|
let mut icf1_buf = {
|
||||||
|
let mut icf1_file = File::open(icf1_path)?;
|
||||||
|
let mut icf1_buf = Vec::new();
|
||||||
|
icf1_file.read_to_end(&mut icf1_buf)?;
|
||||||
|
icf1_buf
|
||||||
|
};
|
||||||
|
let icf = decode_icf(&mut icf1_buf).map_err(|err| anyhow!("Reading ICF failed: {:#}", err))?;
|
||||||
|
|
||||||
|
for entry in icf {
|
||||||
|
match entry {
|
||||||
|
IcfData::App(app) => {
|
||||||
|
info!("Running on {} {}", app.id, app.version);
|
||||||
|
GAME_MAJOR_VERSION.store(app.version.major, Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("Pinging Tachi API for status check and token verification");
|
||||||
|
|
||||||
let resp: TachiResponse<StatusCheck> =
|
let resp: TachiResponse<StatusCheck> =
|
||||||
request_tachi("GET", TACHI_STATUS_URL.as_str(), None::<()>)?;
|
request_tachi("GET", TACHI_STATUS_URL.as_str(), None::<()>)?;
|
||||||
let user_id = match resp {
|
let user_id = match resp {
|
||||||
@ -64,15 +149,37 @@ pub fn hook_init() -> Result<()> {
|
|||||||
|
|
||||||
info!("Logged in to Tachi with userID {user_id}");
|
info!("Logged in to Tachi with userID {user_id}");
|
||||||
|
|
||||||
|
debug!("Acquring addresses");
|
||||||
|
|
||||||
let winhttpwritedata = unsafe {
|
let winhttpwritedata = unsafe {
|
||||||
let addr = get_proc_address("winhttp.dll", "WinHttpWriteData")
|
let addr = get_proc_address("winhttp.dll", "WinHttpWriteData")
|
||||||
.map_err(|err| anyhow!("{:#}", err))?;
|
.map_err(|err| anyhow!("{:#}", err))?;
|
||||||
|
|
||||||
|
debug!("WinHttpWriteData: winhttp.dll!{:p}", addr);
|
||||||
|
|
||||||
std::mem::transmute::<_, WinHttpWriteDataFunc>(addr)
|
std::mem::transmute::<_, WinHttpWriteDataFunc>(addr)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let winhttpreaddata = unsafe {
|
||||||
|
let addr = get_proc_address("winhttp.dll", "WinHttpReadData")
|
||||||
|
.map_err(|err| anyhow!("{:#}", err))?;
|
||||||
|
|
||||||
|
debug!("WinHttpReadData: winhttp.dll!{:p}", addr);
|
||||||
|
|
||||||
|
std::mem::transmute::<_, WinHttpReadDataFunc>(addr)
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!("Initializing detours");
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
|
debug!("Initializing WinHttpWriteData detour");
|
||||||
DetourWriteData
|
DetourWriteData
|
||||||
.initialize(winhttpwritedata, winhttpwritedata_hook)?
|
.initialize(winhttpwritedata, winhttpwritedata_hook_wrapper)?
|
||||||
|
.enable()?;
|
||||||
|
|
||||||
|
debug!("Initializing WinHttpReadData detour");
|
||||||
|
DetourReadData
|
||||||
|
.initialize(winhttpreaddata, winhttpreaddata_hook_wrapper)?
|
||||||
.enable()?;
|
.enable()?;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -86,12 +193,79 @@ pub fn hook_release() -> Result<()> {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if DetourWriteData.is_enabled() {
|
||||||
unsafe { DetourWriteData.disable()? };
|
unsafe { DetourWriteData.disable()? };
|
||||||
|
}
|
||||||
|
|
||||||
|
if DetourReadData.is_enabled() {
|
||||||
|
unsafe { DetourReadData.disable()? };
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn winhttpwritedata_hook(
|
fn winhttpreaddata_hook_wrapper(
|
||||||
|
h_request: HINTERNET,
|
||||||
|
lp_buffer: LPVOID,
|
||||||
|
dw_number_of_bytes_to_read: DWORD,
|
||||||
|
lpdw_number_of_bytes_read: LPDWORD,
|
||||||
|
) -> BOOL {
|
||||||
|
debug!("hit winhttpreaddata");
|
||||||
|
|
||||||
|
let result = unsafe {
|
||||||
|
DetourReadData.call(
|
||||||
|
h_request,
|
||||||
|
lp_buffer,
|
||||||
|
dw_number_of_bytes_to_read,
|
||||||
|
lpdw_number_of_bytes_read,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if result == FALSE {
|
||||||
|
let ec = unsafe { GetLastError() };
|
||||||
|
error!("Calling original WinHttpReadData function failed: {ec}");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pb_imported = PB_IMPORTED.load(Ordering::SeqCst);
|
||||||
|
if cfg!(not(debug_assertions)) && pb_imported {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(err) = winhttprwdata_hook::<UserMusicResponse>(
|
||||||
|
h_request,
|
||||||
|
lp_buffer,
|
||||||
|
dw_number_of_bytes_to_read,
|
||||||
|
"GetUserMusicApi",
|
||||||
|
&GET_USER_MUSIC_API_ENCRYPTED,
|
||||||
|
move |_| {
|
||||||
|
if pb_imported {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
PB_IMPORTED.store(true, Ordering::Relaxed);
|
||||||
|
if let Err(err) = Configuration::update(Configuration {
|
||||||
|
general: GeneralConfiguration {
|
||||||
|
export_pbs: false,
|
||||||
|
..CONFIGURATION.general
|
||||||
|
},
|
||||||
|
cards: CONFIGURATION.cards.clone(),
|
||||||
|
crypto: CONFIGURATION.crypto.clone(),
|
||||||
|
tachi: CONFIGURATION.tachi.clone(),
|
||||||
|
}) {
|
||||||
|
error!("Could not update configuration to disable exporting PBs: {err:?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
error!("{err:?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn winhttpwritedata_hook_wrapper(
|
||||||
h_request: HINTERNET,
|
h_request: HINTERNET,
|
||||||
lp_buffer: LPCVOID,
|
lp_buffer: LPCVOID,
|
||||||
dw_number_of_bytes_to_write: DWORD,
|
dw_number_of_bytes_to_write: DWORD,
|
||||||
@ -99,48 +273,62 @@ fn winhttpwritedata_hook(
|
|||||||
) -> BOOL {
|
) -> BOOL {
|
||||||
debug!("hit winhttpwritedata");
|
debug!("hit winhttpwritedata");
|
||||||
|
|
||||||
let orig = || unsafe {
|
if let Err(err) = winhttprwdata_hook::<UpsertUserAllRequest>(
|
||||||
|
h_request,
|
||||||
|
lp_buffer,
|
||||||
|
dw_number_of_bytes_to_write,
|
||||||
|
"UpsertUserAllApi",
|
||||||
|
&UPSERT_USER_ALL_API_ENCRYPTED,
|
||||||
|
|upsert_req| {
|
||||||
|
let user_data = &upsert_req.upsert_user_all.user_data[0];
|
||||||
|
let access_code = &user_data.access_code;
|
||||||
|
if !CONFIGURATION.cards.whitelist.is_empty()
|
||||||
|
&& !CONFIGURATION.cards.whitelist.contains(access_code)
|
||||||
|
{
|
||||||
|
info!("Card {access_code} is not whitelisted, skipping score submission");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
error!("{err:?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
DetourWriteData.call(
|
DetourWriteData.call(
|
||||||
h_request,
|
h_request,
|
||||||
lp_buffer,
|
lp_buffer,
|
||||||
dw_number_of_bytes_to_write,
|
dw_number_of_bytes_to_write,
|
||||||
lpdw_number_of_bytes_written,
|
lpdw_number_of_bytes_written,
|
||||||
)
|
)
|
||||||
};
|
|
||||||
|
|
||||||
let url = match read_hinternet_url(h_request) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(err) => {
|
|
||||||
error!("There was an error reading the request URL: {:#}", err);
|
|
||||||
return orig();
|
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
let user_agent = match read_hinternet_user_agent(h_request) {
|
|
||||||
Ok(ua) => ua,
|
|
||||||
Err(err) => {
|
|
||||||
error!(
|
|
||||||
"There was an error reading the request's User-Agent: {:#}",
|
|
||||||
err
|
|
||||||
);
|
|
||||||
return orig();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
debug!("request from user-agent {user_agent} with URL: {url}");
|
|
||||||
|
|
||||||
// Quick explanation for this rodeo:
|
/// Common hook for WinHttpWriteData/WinHttpReadData. The flow is similar for both
|
||||||
//
|
/// hooks:
|
||||||
// Since CRYSTAL+, requests are encrypted with a hardcoded key/IV pair, and endpoints
|
/// - Read URL and User-Agent from the handle
|
||||||
// are supposed to be HMAC'd with a salt, and then stored in the User-Agent in the format
|
/// - Extract the API method from the URL, and exit if it's not the method we're
|
||||||
// of `{hashed_endpoint}#{numbers}`, as well as mangling the URL.
|
/// looking for
|
||||||
//
|
/// - Determine if the API is encrypted, and exit if it is and we don't have keys
|
||||||
// However, there was an oversight in the implementation for versions PARADISE LOST
|
/// - Parse the body and convert it to Tachi's BATCH-MANUAL
|
||||||
// and older: the endpoint stored in the User-Agent was not hashed. A mangled URL
|
/// - Submit it off to Tachi, if our guard function (which takes the parsed body) allows so.
|
||||||
// still indicates encryption was used, however, so you still need to provide key/IV. Only
|
fn winhttprwdata_hook<'a, T: Debug + DeserializeOwned + ToTachiImport + 'static>(
|
||||||
// the salt is not needed.
|
handle: HINTERNET,
|
||||||
let Some(maybe_endpoint) = url.split('/').last() else {
|
buffer: *const c_void,
|
||||||
error!("Could not get name of endpoint");
|
bufsz: DWORD,
|
||||||
return orig();
|
unencrypted_endpoint: &str,
|
||||||
};
|
encrypted_endpoint: &Option<String>,
|
||||||
|
guard_fn: impl Fn(&T) -> bool + Send + 'static,
|
||||||
|
) -> Result<()> {
|
||||||
|
let url = read_hinternet_url(handle)?;
|
||||||
|
let user_agent = read_hinternet_user_agent(handle)?;
|
||||||
|
debug!("user-agent {user_agent}, URL: {url}");
|
||||||
|
|
||||||
|
let maybe_endpoint = url
|
||||||
|
.split('/')
|
||||||
|
.last()
|
||||||
|
.ok_or(anyhow!("Could not extract last part of a split URL"))?;
|
||||||
|
|
||||||
let is_encrypted = is_encrypted_endpoint(maybe_endpoint);
|
let is_encrypted = is_encrypted_endpoint(maybe_endpoint);
|
||||||
|
|
||||||
@ -148,53 +336,50 @@ fn winhttpwritedata_hook(
|
|||||||
user_agent
|
user_agent
|
||||||
.split('#')
|
.split('#')
|
||||||
.next()
|
.next()
|
||||||
.expect("there should be at least one item in the split")
|
.ok_or(anyhow!("there should be at least one item in the split"))?
|
||||||
} else {
|
} else {
|
||||||
maybe_endpoint
|
maybe_endpoint
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let is_correct_endpoint = is_endpoint(endpoint, unencrypted_endpoint, encrypted_endpoint);
|
||||||
|
if cfg!(not(debug_assertions)) && !is_correct_endpoint {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
if is_encrypted && (CONFIGURATION.crypto.key.is_empty() || CONFIGURATION.crypto.iv.is_empty()) {
|
if is_encrypted && (CONFIGURATION.crypto.key.is_empty() || CONFIGURATION.crypto.iv.is_empty()) {
|
||||||
error!("Communications with the server is encrypted, but no keys were provided. Fill in the keys by editing 'saekawa.toml'.");
|
return Err(anyhow!("Communications with the server is encrypted, but no keys were provided. Fill in the keys by editing 'saekawa.toml'."));
|
||||||
return orig();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let is_upsert_user_all = is_upsert_user_all_endpoint(endpoint);
|
let mut raw_body = match read_slice(buffer as *const u8, bufsz as usize) {
|
||||||
// Exit early if release mode and the endpoint is not what we're looking for
|
|
||||||
if cfg!(not(debug_assertions)) && !is_upsert_user_all {
|
|
||||||
return orig();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut raw_request_body =
|
|
||||||
match read_slice(lp_buffer as *const u8, dw_number_of_bytes_to_write as usize) {
|
|
||||||
Ok(data) => data,
|
Ok(data) => data,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("There was an error reading the request body: {:#}", err);
|
return Err(anyhow!(
|
||||||
return orig();
|
"There was an error reading the response body: {:#}",
|
||||||
|
err
|
||||||
|
));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("raw request body: {}", faster_hex::hex_string(&raw_request_body));
|
debug!("raw body: {}", faster_hex::hex_string(&raw_body));
|
||||||
|
|
||||||
// Rest of the processing can be safely moved to a different thread, since we're not dealing
|
|
||||||
// with the hooked function's stuff anymore, probably.
|
|
||||||
std::thread::spawn(move || {
|
std::thread::spawn(move || {
|
||||||
let request_body_compressed = if is_encrypted {
|
let compressed_body = if is_encrypted {
|
||||||
match decrypt_aes256_cbc(
|
match decrypt_aes256_cbc(
|
||||||
&mut raw_request_body,
|
&mut raw_body,
|
||||||
&CONFIGURATION.crypto.key,
|
&CONFIGURATION.crypto.key,
|
||||||
&CONFIGURATION.crypto.iv,
|
&CONFIGURATION.crypto.iv,
|
||||||
) {
|
) {
|
||||||
Ok(res) => res,
|
Ok(res) => res,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Could not decrypt request: {:#}", err);
|
error!("Could not decrypt response: {:#}", err);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
raw_request_body
|
raw_body
|
||||||
};
|
};
|
||||||
|
|
||||||
let request_body = match read_maybe_compressed_buffer(&request_body_compressed[..]) {
|
let body = match read_maybe_compressed_buffer(&compressed_body[..]) {
|
||||||
Ok(data) => data,
|
Ok(data) => data,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("There was an error decoding the request body: {:#}", err);
|
error!("There was an error decoding the request body: {:#}", err);
|
||||||
@ -202,90 +387,17 @@ fn winhttpwritedata_hook(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("decoded request body: {request_body}");
|
debug!("decoded response body: {body}");
|
||||||
|
|
||||||
// Reached in debug mode
|
// Hit in debug build
|
||||||
if !is_upsert_user_all {
|
if !is_correct_endpoint {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let upsert_req = match serde_json::from_str::<UpsertUserAllRequest>(&request_body) {
|
score_handler::<T>(body, guard_fn)
|
||||||
Ok(req) => req,
|
|
||||||
Err(err) => {
|
|
||||||
error!("Could not parse request body: {:#}", err);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
debug!("parsed request body: {:#?}", upsert_req);
|
|
||||||
|
|
||||||
let user_data = &upsert_req.upsert_user_all.user_data[0];
|
|
||||||
let access_code = &user_data.access_code;
|
|
||||||
if !CONFIGURATION.cards.whitelist.is_empty()
|
|
||||||
&& !CONFIGURATION.cards.whitelist.contains(access_code)
|
|
||||||
{
|
|
||||||
info!("Card {access_code} is not whitelisted, skipping score submission");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let classes = if CONFIGURATION.general.export_class {
|
|
||||||
Some(ImportClasses {
|
|
||||||
dan: ClassEmblem::try_from(user_data.class_emblem_medal).ok(),
|
|
||||||
emblem: ClassEmblem::try_from(user_data.class_emblem_base).ok(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let scores = upsert_req
|
|
||||||
.upsert_user_all
|
|
||||||
.user_playlog_list
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|playlog| {
|
|
||||||
let result =
|
|
||||||
ImportScore::try_from_playlog(playlog, CONFIGURATION.general.fail_over_lamp);
|
|
||||||
if result
|
|
||||||
.as_ref()
|
|
||||||
.is_ok_and(|v| v.difficulty != Difficulty::WorldsEnd)
|
|
||||||
{
|
|
||||||
result.ok()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<ImportScore>>();
|
|
||||||
|
|
||||||
if scores.is_empty() {
|
|
||||||
if classes.is_none() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if classes
|
|
||||||
.clone()
|
|
||||||
.is_some_and(|v| v.dan.is_none() && v.emblem.is_none())
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let import = Import {
|
|
||||||
classes,
|
|
||||||
scores,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"Submitting {} scores from access code {}",
|
|
||||||
import.scores.len(),
|
|
||||||
user_data.access_code
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Err(err) = execute_tachi_import(import) {
|
|
||||||
error!("{:#}", err);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
orig()
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_proc_address(module: &str, function: &str) -> Result<*mut __some_function> {
|
fn get_proc_address(module: &str, function: &str) -> Result<*mut __some_function> {
|
||||||
|
@ -5,12 +5,22 @@ fn deserialize_bool<'de, D>(deserializer: D) -> Result<bool, D::Error>
|
|||||||
where
|
where
|
||||||
D: de::Deserializer<'de>,
|
D: de::Deserializer<'de>,
|
||||||
{
|
{
|
||||||
let s: &str = de::Deserialize::deserialize(deserializer)?;
|
#[derive(Deserialize)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
enum StringOrBoolean {
|
||||||
|
String(String),
|
||||||
|
Bool(bool),
|
||||||
|
}
|
||||||
|
|
||||||
|
let s: StringOrBoolean = de::Deserialize::deserialize(deserializer)?;
|
||||||
|
|
||||||
match s {
|
match s {
|
||||||
|
StringOrBoolean::String(s) => match s.as_str() {
|
||||||
"true" => Ok(true),
|
"true" => Ok(true),
|
||||||
"false" => Ok(false),
|
"false" => Ok(false),
|
||||||
_ => Err(de::Error::unknown_variant(s, &["true", "false"])),
|
_ => Err(de::Error::unknown_variant(&s, &["true", "false"])),
|
||||||
|
},
|
||||||
|
StringOrBoolean::Bool(b) => Ok(b),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -80,6 +90,43 @@ fn default_judge_heaven() -> u32 {
|
|||||||
0
|
0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct UserMusicDetail {
|
||||||
|
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||||
|
pub music_id: u32,
|
||||||
|
|
||||||
|
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||||
|
pub level: u32,
|
||||||
|
|
||||||
|
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||||
|
pub score_max: u32,
|
||||||
|
|
||||||
|
#[serde(deserialize_with = "deserialize_bool")]
|
||||||
|
pub is_all_justice: bool,
|
||||||
|
|
||||||
|
#[serde(deserialize_with = "deserialize_bool")]
|
||||||
|
pub is_full_combo: bool,
|
||||||
|
|
||||||
|
#[serde(deserialize_with = "deserialize_bool")]
|
||||||
|
pub is_success: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct UserMusicItem {
|
||||||
|
pub length: u32,
|
||||||
|
pub user_music_detail_list: Vec<UserMusicDetail>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct UserMusicResponse {
|
||||||
|
pub user_id: String,
|
||||||
|
pub length: u32,
|
||||||
|
pub user_music_list: Vec<UserMusicItem>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct UpsertUserAllBody {
|
pub struct UpsertUserAllBody {
|
||||||
|
@ -4,7 +4,7 @@ use num_enum::TryFromPrimitive;
|
|||||||
use serde::{de, Deserialize, Deserializer, Serialize};
|
use serde::{de, Deserialize, Deserializer, Serialize};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
|
|
||||||
use super::game::UserPlaylog;
|
use super::game::{UpsertUserAllRequest, UserMusicDetail, UserMusicResponse, UserPlaylog};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum TachiResponse<T> {
|
pub enum TachiResponse<T> {
|
||||||
@ -178,9 +178,15 @@ pub struct ImportScore {
|
|||||||
pub match_type: String,
|
pub match_type: String,
|
||||||
pub identifier: String,
|
pub identifier: String,
|
||||||
pub difficulty: Difficulty,
|
pub difficulty: Difficulty,
|
||||||
pub time_achieved: u128,
|
|
||||||
pub judgements: Judgements,
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub optional: OptionalMetrics,
|
pub time_achieved: Option<u128>,
|
||||||
|
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub judgements: Option<Judgements>,
|
||||||
|
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub optional: Option<OptionalMetrics>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, TryFromPrimitive)]
|
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, TryFromPrimitive)]
|
||||||
@ -239,7 +245,11 @@ pub struct OptionalMetrics {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ImportScore {
|
impl ImportScore {
|
||||||
pub fn try_from_playlog(p: UserPlaylog, fail_over_lamp: bool) -> Result<ImportScore> {
|
pub fn try_from_playlog(
|
||||||
|
p: &UserPlaylog,
|
||||||
|
major_version: u16,
|
||||||
|
fail_over_lamp: bool,
|
||||||
|
) -> Result<ImportScore> {
|
||||||
let lamp = if !p.is_clear && fail_over_lamp {
|
let lamp = if !p.is_clear && fail_over_lamp {
|
||||||
TachiLamp::Failed
|
TachiLamp::Failed
|
||||||
} else if p.is_all_justice {
|
} else if p.is_all_justice {
|
||||||
@ -263,8 +273,7 @@ impl ImportScore {
|
|||||||
miss: p.judge_guilty,
|
miss: p.judge_guilty,
|
||||||
};
|
};
|
||||||
|
|
||||||
let rom_major_version = p.rom_version.split('.').next().unwrap_or("2");
|
let difficulty = if major_version == 1 && p.level == 4 {
|
||||||
let difficulty = if rom_major_version == "1" && p.level == 4 {
|
|
||||||
Difficulty::WorldsEnd
|
Difficulty::WorldsEnd
|
||||||
} else {
|
} else {
|
||||||
Difficulty::try_from(p.level)?
|
Difficulty::try_from(p.level)?
|
||||||
@ -279,13 +288,148 @@ impl ImportScore {
|
|||||||
score: p.score,
|
score: p.score,
|
||||||
lamp,
|
lamp,
|
||||||
match_type: "inGameID".to_string(),
|
match_type: "inGameID".to_string(),
|
||||||
identifier: p.music_id,
|
identifier: p.music_id.clone(),
|
||||||
difficulty,
|
difficulty,
|
||||||
time_achieved: jst_time.timestamp_millis() as u128,
|
time_achieved: Some(jst_time.timestamp_millis() as u128),
|
||||||
judgements,
|
judgements: Some(judgements),
|
||||||
optional: OptionalMetrics {
|
optional: Some(OptionalMetrics {
|
||||||
max_combo: p.max_combo,
|
max_combo: p.max_combo,
|
||||||
},
|
}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_from_music_detail(
|
||||||
|
d: &UserMusicDetail,
|
||||||
|
major_version: u16,
|
||||||
|
fail_over_lamp: bool,
|
||||||
|
) -> Result<ImportScore> {
|
||||||
|
let lamp = if !d.is_success && fail_over_lamp {
|
||||||
|
TachiLamp::Failed
|
||||||
|
} else if d.is_all_justice {
|
||||||
|
TachiLamp::AllJustice
|
||||||
|
} else if d.is_full_combo {
|
||||||
|
TachiLamp::FullCombo
|
||||||
|
} else if d.is_success {
|
||||||
|
TachiLamp::Clear
|
||||||
|
} else {
|
||||||
|
TachiLamp::Failed
|
||||||
|
};
|
||||||
|
|
||||||
|
let difficulty = if major_version == 1 && d.level == 4 {
|
||||||
|
Difficulty::WorldsEnd
|
||||||
|
} else {
|
||||||
|
Difficulty::try_from(d.level)?
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(ImportScore {
|
||||||
|
score: d.score_max,
|
||||||
|
lamp,
|
||||||
|
match_type: "inGameID".to_string(),
|
||||||
|
identifier: d.music_id.to_string(),
|
||||||
|
difficulty,
|
||||||
|
time_achieved: None,
|
||||||
|
judgements: None,
|
||||||
|
optional: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait ToTachiImport {
|
||||||
|
fn displayed_id(&self) -> &str;
|
||||||
|
fn displayed_id_type(&self) -> &str;
|
||||||
|
fn to_tachi_import(
|
||||||
|
&self,
|
||||||
|
major_version: u16,
|
||||||
|
export_class: bool,
|
||||||
|
fail_over_lamp: bool,
|
||||||
|
) -> Import;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToTachiImport for UserMusicResponse {
|
||||||
|
fn displayed_id(&self) -> &str {
|
||||||
|
&self.user_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn displayed_id_type(&self) -> &str {
|
||||||
|
"user ID"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_tachi_import(&self, major_version: u16, _: bool, fail_over_lamp: bool) -> Import {
|
||||||
|
let scores = self
|
||||||
|
.user_music_list
|
||||||
|
.iter()
|
||||||
|
.flat_map(|item| {
|
||||||
|
item.user_music_detail_list.iter().filter_map(|d| {
|
||||||
|
let result =
|
||||||
|
ImportScore::try_from_music_detail(d, major_version, fail_over_lamp);
|
||||||
|
if result
|
||||||
|
.as_ref()
|
||||||
|
.is_ok_and(|v| v.difficulty != Difficulty::WorldsEnd)
|
||||||
|
{
|
||||||
|
result.ok()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Import {
|
||||||
|
scores,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToTachiImport for UpsertUserAllRequest {
|
||||||
|
fn displayed_id(&self) -> &str {
|
||||||
|
let user_data = &self.upsert_user_all.user_data[0];
|
||||||
|
|
||||||
|
&user_data.access_code
|
||||||
|
}
|
||||||
|
|
||||||
|
fn displayed_id_type(&self) -> &str {
|
||||||
|
"access code"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_tachi_import(
|
||||||
|
&self,
|
||||||
|
major_version: u16,
|
||||||
|
export_class: bool,
|
||||||
|
fail_over_lamp: bool,
|
||||||
|
) -> Import {
|
||||||
|
let user_data = &self.upsert_user_all.user_data[0];
|
||||||
|
|
||||||
|
let classes = if export_class {
|
||||||
|
Some(ImportClasses {
|
||||||
|
dan: ClassEmblem::try_from(user_data.class_emblem_medal).ok(),
|
||||||
|
emblem: ClassEmblem::try_from(user_data.class_emblem_base).ok(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let scores = self
|
||||||
|
.upsert_user_all
|
||||||
|
.user_playlog_list
|
||||||
|
.iter()
|
||||||
|
.filter_map(|playlog| {
|
||||||
|
let result = ImportScore::try_from_playlog(playlog, major_version, fail_over_lamp);
|
||||||
|
if result
|
||||||
|
.as_ref()
|
||||||
|
.is_ok_and(|v| v.difficulty != Difficulty::WorldsEnd)
|
||||||
|
{
|
||||||
|
result.ok()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<ImportScore>>();
|
||||||
|
|
||||||
|
Import {
|
||||||
|
classes,
|
||||||
|
scores,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user