Add TXTH chunk_* config for chunked data and fix leaks [SF EX 3 (PS3)]

This commit is contained in:
bnnm 2019-03-21 23:08:11 +01:00
parent cb80b198ce
commit 0aec721788
6 changed files with 330 additions and 29 deletions

View File

@ -316,13 +316,30 @@ name_size = (number)|(offset)|(field)
subfile_offset = (number)|(offset)|(field)
subfile_size = (number)|(offset)|(field)
subfile_extension = (string)
# CHUNK DEINTERLEAVING [OPTIONAL]
# Some files interleave raw data chunks, for example 3 stereo songs pasted together,
# alternating 0x10000 bytes of data each. These settings allow vgmstream to play
# one of the chunks while ignoring the rest (read 0x10000 data, skip 0x10000*2).
# File is first "dechunked" then played with using other settings (start_offset
# would points within the internal "dechunked" file).
#
# You need to set:
# - start: where all chunk data start (normally 0x00)
# - size: amount of data in a single chunk (ex. 0x10000).
# - count: total number of interleaved chunks (ex. 3=3 interleaved songs)
# - number: first chunk to start (ex. 1=0x00000, 2=0x10000, 3=0x20000...)
# If you set subsong_count first chunk_number will be a set per subsong.
chunk_start = (number)|(offset)|(field)
chunk_size = (number)|(offset)|(field)
chunk_count = (number)|(offset)|(field)
chunk_number = (number)|(offset)|(field)
```
## Usages
### Temporary values
Most commands are evaluated and calculated immediatedly, every time they are found.
This is by design, as it can be used to adjust and trick for certain calculations.
Most commands are evaluated and calculated immediatedly, every time they are found. This is by design, as it can be used to adjust and trick for certain calculations.
It makes TXTHs a bit harder to follow, as they are order dependant, but otherwise it's hard to accomplish some things or others become ambiguous.
@ -482,4 +499,36 @@ num_samples = @0x10
loop_start_sample = @0x14
loop_end_sample = @0x18
```
Most fields can't be changed after parsing since doesn't make much sense technically, as the parsed subfile should supply them.
Most fields can't be changed after parsing since doesn't make much sense technically, as the parsed subfile should supply them. You can supply them when using bytes-to-samples conversions, though.
```
# parses subfile at start
subfile_offset = 0x20
# force recalculation of num_samples
codec = PSX
start_offset = 0x40
num_samples = data_size
```
### Chunks
Chunks affect some values (padding size, data size, etc) and are a bit sensitive to order at the moment, due to technical complexities:
```
# Street Fighter EX3
# base config is defined normally
codec = PSX
sample_rate = 44100
channels = 2
interleave = 0x8000
# set subsong number instead of chunk_number for subsongs
subsong_count = 26
#chunk_number = 1
chunk_start = 0
chunk_size = 0x10000
chunk_count = 26
# after setting chunks (sizes vary when 'dechunking')
start_offset = 0x00
padding_size = auto-empty
num_samples = data_size
```

View File

@ -292,6 +292,10 @@
RelativePath=".\meta\sqex_scd_streamfile.h"
>
</File>
<File
RelativePath=".\meta\txth_streamfile.h"
>
</File>
<File
RelativePath=".\meta\ubi_bao_streamfile.h"
>

View File

@ -115,6 +115,7 @@
<ClInclude Include="meta\opus_interleave_streamfile.h" />
<ClInclude Include="meta\sfh_streamfile.h" />
<ClInclude Include="meta\sqex_scd_streamfile.h" />
<ClInclude Include="meta\txth_streamfile.h" />
<ClInclude Include="meta\ubi_bao_streamfile.h" />
<ClInclude Include="meta\ubi_sb_streamfile.h" />
<ClInclude Include="meta\ubi_lyn_ogg_streamfile.h" />

View File

@ -116,6 +116,9 @@
<ClInclude Include="meta\sqex_scd_streamfile.h">
<Filter>meta\Header Files</Filter>
</ClInclude>
<ClInclude Include="meta\txth_streamfile.h">
<Filter>meta\Header Files</Filter>
</ClInclude>
<ClInclude Include="meta\ubi_bao_streamfile.h">
<Filter>meta\Header Files</Filter>
</ClInclude>

View File

@ -1,6 +1,7 @@
#include "meta.h"
#include "../coding/coding.h"
#include "../layout/layout.h"
#include "txth_streamfile.h"
#define TXT_LINE_MAX 0x2000
@ -57,6 +58,7 @@ typedef struct {
uint32_t data_size;
int data_size_set;
uint32_t start_offset;
uint32_t next_offset;
uint32_t padding_size;
int sample_type;
@ -93,6 +95,14 @@ typedef struct {
uint32_t subfile_size;
char subfile_extension[32];
uint32_t chunk_number;
uint32_t chunk_start;
uint32_t chunk_size;
uint32_t chunk_count;
int chunk_start_set;
int chunk_size_set;
int chunk_count_set;
/* original STREAMFILE and its type (may be an unsupported "base" file or a .txth) */
STREAMFILE *streamFile;
int streamfile_is_txth;
@ -107,8 +117,9 @@ typedef struct {
} txth_header;
static STREAMFILE * open_txth(STREAMFILE * streamFile);
static VGMSTREAM *init_subfile(txth_header * txth);
static STREAMFILE * open_txth(STREAMFILE * streamFile);
static void clean_txth(txth_header * txth);
static int parse_txth(txth_header * txth);
@ -134,8 +145,8 @@ VGMSTREAM * init_vgmstream_txth(STREAMFILE *streamFile) {
txth.streambody_opened = 0;
}
else {
/* accept base file (no need for ID or ext checks --if a companion .TXTH exists all is good)
* (player still needs to accept the streamfile's ext, so at worst rename to .vgmstream) */
/* accept base file (no need for ID or ext checks --if a companion .TXTH exists all is good).
* player still needs to accept the streamfile's ext, so at worst rename to .vgmstream */
STREAMFILE * streamText = open_txth(streamFile);
if (!streamText) goto fail;
@ -156,7 +167,9 @@ VGMSTREAM * init_vgmstream_txth(STREAMFILE *streamFile) {
/* special case of parsing subfiles */
if (txth.subfile_set) {
return init_subfile(&txth);
VGMSTREAM *subfile_vgmstream = init_subfile(&txth);
clean_txth(&txth);
return subfile_vgmstream;
}
@ -495,15 +508,11 @@ VGMSTREAM * init_vgmstream_txth(STREAMFILE *streamFile) {
if ( !vgmstream_open_stream(vgmstream,txth.streamBody,txth.start_offset) )
goto fail;
if (txth.streamtext_opened) close_streamfile(txth.streamText);
if (txth.streamhead_opened) close_streamfile(txth.streamHead);
if (txth.streambody_opened) close_streamfile(txth.streamBody);
clean_txth(&txth);
return vgmstream;
fail:
if (txth.streamtext_opened) close_streamfile(txth.streamText);
if (txth.streamhead_opened) close_streamfile(txth.streamHead);
if (txth.streambody_opened) close_streamfile(txth.streamBody);
clean_txth(&txth);
close_vgmstream(vgmstream);
return NULL;
}
@ -608,8 +617,58 @@ static STREAMFILE * open_txth(STREAMFILE * streamFile) {
return NULL;
}
static void clean_txth(txth_header * txth) {
/* close stuff manually opened during parse */
if (txth->streamtext_opened) close_streamfile(txth->streamText);
if (txth->streamhead_opened) close_streamfile(txth->streamHead);
if (txth->streambody_opened) close_streamfile(txth->streamBody);
}
/* ****************************************************************** */
static void set_body_chunk(txth_header * txth) {
STREAMFILE *temp_streamFile = NULL;
/* sets body "chunk" if all needed values are set
* (done inline for padding/get_samples/etc calculators to work) */
if (!txth->chunk_start_set || !txth->chunk_size_set || !txth->chunk_count_set)
return;
if (txth->chunk_size == 0 || txth->chunk_start > txth->data_size || txth->chunk_count == 0)
return;
if (!txth->streamBody)
return;
/* treat chunks as subsongs */
if (txth->subsong_count > 1)
txth->chunk_number = txth->target_subsong;
if (txth->chunk_number == 0)
txth->chunk_number = 1;
if (txth->chunk_number > txth->chunk_count)
return;
temp_streamFile = setup_txth_streamfile(txth->streamBody, txth->chunk_start, txth->chunk_size, txth->chunk_count, txth->chunk_number - 1);
if (!temp_streamFile) return;
if (txth->streambody_opened) {
close_streamfile(txth->streamBody);
txth->streamBody = NULL;
txth->streambody_opened = 0;
}
txth->streamBody = temp_streamFile;
txth->streambody_opened = 1;
/* cancel values once set, to avoid weirdness and possibly allow chunks-in-chunks? */
txth->chunk_start_set = 0;
txth->chunk_size_set = 0;
txth->chunk_count_set = 0;
/* re-apply */
if (!txth->data_size_set) {
txth->data_size = get_streamfile_size(txth->streamBody);
}
}
static int parse_keyval(STREAMFILE * streamFile, txth_header * txth, const char * key, char * val);
static int parse_num(STREAMFILE * streamFile, txth_header * txth, const char * val, uint32_t * out_value);
static int parse_string(STREAMFILE * streamFile, txth_header * txth, const char * val, char * str);
@ -668,7 +727,7 @@ static int parse_txth(txth_header * txth) {
if (!txth->streamBody)
goto fail;
if (txth->data_size > get_streamfile_size(txth->streamBody) - txth->start_offset || txth->data_size == 0)
if (txth->data_size > get_streamfile_size(txth->streamBody) - txth->start_offset || txth->data_size <= 0)
txth->data_size = get_streamfile_size(txth->streamBody) - txth->start_offset;
return 1;
@ -679,6 +738,7 @@ fail:
static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char * key, char * val) {
//;VGM_LOG("TXTH: key=%s, val=%s\n", key, val);
/* CODEC */
if (is_string(key,"codec")) {
if (is_string(val,"PSX")) txth->codec = PSX;
else if (is_string(val,"XBOX")) txth->codec = XBOX;
@ -732,6 +792,8 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
else if (is_string(key,"codec_mode")) {
if (!parse_num(txth->streamHead,txth,val, &txth->codec_mode)) goto fail;
}
/* VALUE MODIFIERS */
else if (is_string(key,"value_mul") || is_string(key,"value_*")) {
if (!parse_num(txth->streamHead,txth,val, &txth->value_mul)) goto fail;
}
@ -744,6 +806,8 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
else if (is_string(key,"value_sub") || is_string(key,"value_-")) {
if (!parse_num(txth->streamHead,txth,val, &txth->value_sub)) goto fail;
}
/* ID VALUES */
else if (is_string(key,"id_value")) {
if (!parse_num(txth->streamHead,txth,val, &txth->id_value)) goto fail;
}
@ -752,6 +816,8 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
if (txth->id_value != txth->id_offset) /* evaluate current ID */
goto fail;
}
/* INTERLEAVE / FRAME SIZE */
else if (is_string(key,"interleave")) {
if (is_string(val,"half_size")) {
if (txth->channels == 0) goto fail;
@ -770,33 +836,38 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
if (!parse_num(txth->streamHead,txth,val, &txth->interleave_last)) goto fail;
}
}
/* BASE CONFIG */
else if (is_string(key,"channels")) {
if (!parse_num(txth->streamHead,txth,val, &txth->channels)) goto fail;
}
else if (is_string(key,"sample_rate")) {
if (!parse_num(txth->streamHead,txth,val, &txth->sample_rate)) goto fail;
}
/* DATA CONFIG */
else if (is_string(key,"start_offset")) {
if (!parse_num(txth->streamHead,txth,val, &txth->start_offset)) goto fail;
/* apply */
if (!txth->data_size_set) {
uint32_t body_size = !txth->streamBody ? 0 : get_streamfile_size(txth->streamBody);
/* with subsongs we want to clamp body_size from this subsong start to next subsong start */
/* with subsongs we want to clamp data_size from this subsong start to next subsong start */
txth->next_offset = txth->data_size;
if (txth->subsong_count > 1 && txth->target_subsong < txth->subsong_count) {
uint32_t next_offset;
/* temp move to next start_offset and move back*/
txth->target_subsong++;
parse_num(txth->streamHead,txth,val, &next_offset);
parse_num(txth->streamHead,txth,val, &txth->next_offset);
txth->target_subsong--;
if (next_offset > txth->start_offset) {
body_size = next_offset;
}
if (txth->next_offset < txth->start_offset)
txth->next_offset = 0;
}
if (body_size && body_size > txth->start_offset)
txth->data_size = body_size - txth->start_offset; /* re-evaluate */
if (txth->data_size && txth->data_size > txth->next_offset && txth->next_offset)
txth->data_size = txth->next_offset;
if (txth->data_size && txth->data_size > txth->start_offset)
txth->data_size -= txth->start_offset;
}
}
else if (is_string(key,"padding_size")) {
@ -812,7 +883,7 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
/* apply */
if (!txth->data_size_set) {
if (txth->padding_size < txth->data_size)
if (txth->data_size && txth->data_size > txth->padding_size)
txth->data_size -= txth->padding_size;
}
}
@ -820,6 +891,8 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
if (!parse_num(txth->streamHead,txth,val, &txth->data_size)) goto fail;
txth->data_size_set = 1;
}
/* SAMPLES */
else if (is_string(key,"sample_type")) {
if (is_string(val,"samples")) txth->sample_type = 0;
else if (is_string(val,"bytes")) txth->sample_type = 1;
@ -889,6 +962,8 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
}
}
}
/* COEFS */
else if (is_string(key,"coef_offset")) {
if (!parse_num(txth->streamHead,txth,val, &txth->coef_offset)) goto fail;
}
@ -909,9 +984,8 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
if (!parse_coef_table(txth->streamHead,txth,val, txth->coef_table, sizeof(txth->coef_table))) goto fail;
txth->coef_table_set = 1;
}
else if (is_string(key,"psx_loops")) {
if (!parse_num(txth->streamHead,txth,val, &txth->coef_mode)) goto fail;
}
/* SUBSONGS */
else if (is_string(key,"subsong_count")) {
if (!parse_num(txth->streamHead,txth,val, &txth->subsong_count)) goto fail;
}
@ -928,6 +1002,8 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
else if (is_string(key,"name_size")) {
if (!parse_num(txth->streamHead,txth,val, &txth->name_size)) goto fail;
}
/* SUBFILES */
else if (is_string(key,"subfile_offset")) {
if (!parse_num(txth->streamHead,txth,val, &txth->subfile_offset)) goto fail;
txth->subfile_set = 1;
@ -940,6 +1016,8 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
if (!parse_string(txth->streamHead,txth,val, txth->subfile_extension)) goto fail;
txth->subfile_set = 1;
}
/* HEADER/BODY CONFIG */
else if (is_string(key,"header_file")) {
if (txth->streamhead_opened) {
close_streamfile(txth->streamHead);
@ -995,14 +1073,48 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
txth->streamHead = txth->streamBody;
}
txth->data_size = !txth->streamBody ? 0 :
get_streamfile_size(txth->streamBody) - txth->start_offset; /* re-evaluate */
/* re-apply */
if (!txth->data_size_set) {
txth->data_size = get_streamfile_size(txth->streamBody);
/* maybe should be manually set again? */
if (txth->data_size && txth->data_size > txth->next_offset && txth->next_offset)
txth->data_size = txth->next_offset;
if (txth->data_size && txth->data_size > txth->start_offset)
txth->data_size -= txth->start_offset;
if (txth->data_size && txth->data_size > txth->padding_size)
txth->data_size -= txth->padding_size;
}
}
/* CHUNKS */
else if (is_string(key,"chunk_number")) {
if (!parse_num(txth->streamHead,txth,val, &txth->chunk_number)) goto fail;
}
else if (is_string(key,"chunk_start")) {
if (!parse_num(txth->streamHead,txth,val, &txth->chunk_start)) goto fail;
txth->chunk_start_set = 1;
set_body_chunk(txth);
}
else if (is_string(key,"chunk_size")) {
if (!parse_num(txth->streamHead,txth,val, &txth->chunk_size)) goto fail;
txth->chunk_size_set = 1;
set_body_chunk(txth);
}
else if (is_string(key,"chunk_count")) {
if (!parse_num(txth->streamHead,txth,val, &txth->chunk_count)) goto fail;
txth->chunk_count_set = 1;
set_body_chunk(txth);
}
/* DEFAULT */
else {
VGM_LOG("TXTH: unknown key=%s, val=%s\n", key,val);
goto fail;
}
//;VGM_LOG("TXTH: data_size=%x, start=%x, next=%x, padding=%x\n", txth->data_size, txth->start_offset, txth->next_offset, txth->padding_size);
return 1;
fail:
return 0;

132
src/meta/txth_streamfile.h Normal file
View File

@ -0,0 +1,132 @@
#ifndef _TXTH_STREAMFILE_H_
#define _TXTH_STREAMFILE_H_
#include "../streamfile.h"
typedef struct {
/* config */
off_t stream_offset;
size_t stream_size;
size_t chunk_size;
int chunk_count;
int chunk_number;
/* state */
off_t logical_offset; /* fake offset */
off_t physical_offset; /* actual offset */
size_t block_size; /* current size */
size_t skip_size; /* size from block start to reach data */
size_t data_size; /* usable size in a block */
size_t logical_size;
} txth_io_data;
static size_t txth_io_read(STREAMFILE *streamfile, uint8_t *dest, off_t offset, size_t length, txth_io_data* data) {
size_t total_read = 0;
/* re-start when previous offset (can't map logical<>physical offsets) */
if (data->logical_offset < 0 || offset < data->logical_offset) {
data->physical_offset = data->stream_offset;
data->logical_offset = 0x00;
data->data_size = 0;
data->skip_size = 0;
}
/* read blocks */
while (length > 0) {
/* ignore EOF */
if (offset < 0 || data->physical_offset >= data->stream_offset + data->stream_size) {
break;
}
/* process new block */
if (data->data_size == 0) {
data->block_size = data->chunk_size * data->chunk_count;
data->skip_size = data->chunk_size * data->chunk_number;
data->data_size = data->chunk_size;
}
/* move to next block */
if (data->data_size == 0 || offset >= data->logical_offset + data->data_size) {
data->physical_offset += data->block_size;
data->logical_offset += data->data_size;
data->data_size = 0;
continue;
}
/* read data */
{
size_t bytes_consumed, bytes_done, to_read;
bytes_consumed = offset - data->logical_offset;
to_read = data->data_size - bytes_consumed;
if (to_read > length)
to_read = length;
bytes_done = read_streamfile(dest, data->physical_offset + data->skip_size + bytes_consumed, to_read, streamfile);
total_read += bytes_done;
dest += bytes_done;
offset += bytes_done;
length -= bytes_done;
if (bytes_done != to_read || bytes_done == 0) {
break; /* error/EOF */
}
}
}
return total_read;
}
static size_t txth_io_size(STREAMFILE *streamfile, txth_io_data* data) {
uint8_t buf[1];
if (data->logical_size)
return data->logical_size;
/* force a fake read at max offset, to get max logical_offset (will be reset next read) */
txth_io_read(streamfile, buf, 0x7FFFFFFF, 1, data);
data->logical_size = data->logical_offset;
return data->logical_size;
}
/* Handles deinterleaving of generic chunked streams */
static STREAMFILE* setup_txth_streamfile(STREAMFILE *streamFile, off_t chunk_start, size_t chunk_size, int chunk_count, int chunk_number) {
STREAMFILE *temp_streamFile = NULL, *new_streamFile = NULL;
txth_io_data io_data = {0};
size_t io_data_size = sizeof(txth_io_data);
io_data.stream_offset = chunk_start;
io_data.stream_size = (get_streamfile_size(streamFile) - chunk_start);
io_data.chunk_size = chunk_size;
io_data.chunk_count = chunk_count;
io_data.chunk_number = chunk_number;
io_data.logical_size = io_data.stream_size / chunk_count;
io_data.logical_offset = -1; /* force phys offset reset */
/* setup subfile */
new_streamFile = open_wrap_streamfile(streamFile);
if (!new_streamFile) goto fail;
temp_streamFile = new_streamFile;
new_streamFile = open_io_streamfile(new_streamFile, &io_data,io_data_size, txth_io_read,txth_io_size);
if (!new_streamFile) goto fail;
temp_streamFile = new_streamFile;
//new_streamFile = open_buffer_streamfile(new_streamFile,0);
//if (!new_streamFile) goto fail;
//temp_streamFile = new_streamFile;
return temp_streamFile;
fail:
close_streamfile(temp_streamFile);
return NULL;
}
#endif /* _TXTH_STREAMFILE_H_ */