Add TXTH chunk_header_size/chunk_data_size for padding and fix fields

This commit is contained in:
bnnm 2019-06-15 13:03:30 +02:00
parent 42edcf7524
commit 2be24a4d6e
3 changed files with 180 additions and 27 deletions

View File

@ -318,25 +318,36 @@ subfile_size = (number)|(offset)|(field)
subfile_extension = (string) subfile_extension = (string)
# CHUNK DEINTERLEAVING [OPTIONAL] # CHUNK DEINTERLEAVING [OPTIONAL]
# Some files interleave raw data chunks, for example 3 stereo songs pasted together, # Some files interleave data chunks, for example 3 stereo songs pasted together,
# alternating 0x10000 bytes of data each. These settings allow vgmstream to play # alternating 0x10000 bytes of data each. These settings allow vgmstream to play
# one of the chunks while ignoring the rest (read 0x10000 data, skip 0x10000*2). # one of the chunks while ignoring the rest (read 0x10000 data, skip 0x10000*2).
# File is first "dechunked" then played with using other settings (start_offset # File is first "dechunked" then played with using other settings (start_offset
# would points within the internal "dechunked" file). # would points within the internal "dechunked" file).
# #
# You need to set: # You need to set:
# - start: where all chunk data start (normally 0x00) # - chunk_count: total number of interleaved chunks (ex. 3=3 interleaved songs)
# - size: amount of data in a single chunk (ex. 0x10000). # - chunk_number: first chunk to start (ex. 1=0x00000, 2=0x10000, 3=0x20000...)
# - count: total number of interleaved chunks (ex. 3=3 interleaved songs) # If you set subsong_count first chunk_number will be auto-set per subsong.
# - number: first chunk to start (ex. 1=0x00000, 2=0x10000, 3=0x20000...) # (subsong 1 starts from chunk number 1, subsong 2 from chunk 2, etc)
# If you set subsong_count first chunk_number will be a set per subsong. # - chunk_start: absolute offset where chunks start (normally 0x00)
chunk_start = (number)|(offset)|(field) # - chunk_size: amount of data in a single chunk (ex. 0x10000).
chunk_size = (number)|(offset)|(field) # For fine-tuning you can optionally set (before chunk_size, for reasons):
# - chunk_header_size: header to skip before chunk data (part of chunk_size)
# - chunk_data_size: actual data size (part of chunk_size, rest is header/padding)
# So, if you set size to 0x1000, header_size 0x100, data_size is implicitly 0xF00,
# or if size is 0x1000 and data_size 0x800 last 0x200 is ignored padding.
#
# Use combinations of the above to make vgmstream "see" only actual codec data.
#
chunk_count = (number)|(offset)|(field) chunk_count = (number)|(offset)|(field)
chunk_number = (number)|(offset)|(field) chunk_number = (number)|(offset)|(field)
chunk_start = (number)|(offset)|(field)
chunk_header_size = (number)|(offset)|(field)
chunk_data_size = (number)|(offset)|(field)
chunk_size = (number)|(offset)|(field)
``` ```
## Usages ## Complex usages
### Temporary values ### Temporary values
Most commands are evaluated and calculated immediatedly, every time they are found. This is by design, as it can be used to adjust and trick for certain calculations. Most commands are evaluated and calculated immediatedly, every time they are found. This is by design, as it can be used to adjust and trick for certain calculations.
@ -548,3 +559,97 @@ subfile_offset = 0
subfile_size = @0x04 + 0x08 #RIFF size subfile_size = @0x04 + 0x08 #RIFF size
subfile_extension = at3 subfile_extension = at3
``` ```
It can be used to make blocks with padding playable:
```
# Mortal Kombat: Deception (PS2)
codec = PSX
interleave = 0x3F40
sample_rate = 32000
channels = 2
chunk_number = 1
chunk_count = 1
chunk_start = 0x00
chunk_data_size = interleave * channels
chunk_size = 0x8000
num_samples = data_size
```
## Examples
**Colin McRae DiRT (PC) .wip.txth**
```
id_value = 0x00000000
id_offset = @0x00
codec = PCM16LE
channels = 2
sample_rate = 32000
start_offset = 0x04
num_samples = data_size
loop_start_sample = 0
loop_end_sample = data_size
```
**Kim Possible: What's the Switch (PS2) .str.txth**
```
codec = PSX
interleave = 0x2000
channels = 2
sample_rate = 48000
num_samples = data_size
interleave_last = auto
```
**Manhunt (Xbox) .rib.txth**
```
codec = XBOX
codec_mode = 1 #interleaved XBOX
interleave = 0xD800
channels = 12
sample_rate = 44100
start_offset = 0x00
num_samples = data_size
```
**Pitfall The Lost Expedition (PC) .txth**
```
codec = DVI_IMA
interleave = 0x80
start_offset = 0x00
channels = 2
sample_rate = 44100
num_samples = data_size
```
**Spy Hunter (GC) .pcm.txth**
```
codec = PCM8
sample_rate = 32000
channels = 1
start_offset = 0
num_samples = data_size
```
**Ultimate Board Game Collection (Wii) .dsp.txth**
```
codec = NGC_DSP
interleave = 0x10000
channels = 2
start_offset = 0x00
num_samples = @0x00:BE
sample_rate = @0x08:BE
loop_flag = @0x0C:BE$2
sample_type = bytes
loop_start_sample = @0x10:BE
loop_end_sample = @0x14:BE
coef_offset = 0x1c
coef_spacing = 0x10000
coef_endianness = BE
```

View File

@ -99,6 +99,8 @@ typedef struct {
uint32_t chunk_start; uint32_t chunk_start;
uint32_t chunk_size; uint32_t chunk_size;
uint32_t chunk_count; uint32_t chunk_count;
uint32_t chunk_header_size;
uint32_t chunk_data_size;
int chunk_start_set; int chunk_start_set;
int chunk_size_set; int chunk_size_set;
int chunk_count_set; int chunk_count_set;
@ -637,6 +639,7 @@ static void set_body_chunk(txth_header * txth) {
/* sets body "chunk" if all needed values are set /* sets body "chunk" if all needed values are set
* (done inline for padding/get_samples/etc calculators to work) */ * (done inline for padding/get_samples/etc calculators to work) */
//todo maybe should only be done once, or have some count to retrigger to simplify?
if (!txth->chunk_start_set || !txth->chunk_size_set || !txth->chunk_count_set) if (!txth->chunk_start_set || !txth->chunk_size_set || !txth->chunk_count_set)
return; return;
if (txth->chunk_size == 0 || txth->chunk_start > txth->data_size || txth->chunk_count == 0) if (txth->chunk_size == 0 || txth->chunk_start > txth->data_size || txth->chunk_count == 0)
@ -652,8 +655,20 @@ static void set_body_chunk(txth_header * txth) {
if (txth->chunk_number > txth->chunk_count) if (txth->chunk_number > txth->chunk_count)
return; return;
temp_streamFile = setup_txth_streamfile(txth->streamBody, txth->chunk_start, txth->chunk_size, txth->chunk_count, txth->chunk_number - 1, txth->streambody_opened); {
if (!temp_streamFile) return; txth_io_config_data cfg = {0};
cfg.chunk_start = txth->chunk_start;
cfg.chunk_header_size = txth->chunk_header_size;
cfg.chunk_data_size = txth->chunk_data_size;
cfg.chunk_size = txth->chunk_size;
cfg.chunk_count = txth->chunk_count;
cfg.chunk_number = txth->chunk_number - 1; /* 1-index to 0-index */
temp_streamFile = setup_txth_streamfile(txth->streamBody, cfg, txth->streambody_opened);
if (!temp_streamFile) return;
}
/* closing is handled by temp_streamFile */ /* closing is handled by temp_streamFile */
//if (txth->streambody_opened) { //if (txth->streambody_opened) {
@ -1103,6 +1118,16 @@ static int parse_keyval(STREAMFILE * streamFile_, txth_header * txth, const char
txth->chunk_start_set = 1; txth->chunk_start_set = 1;
set_body_chunk(txth); set_body_chunk(txth);
} }
else if (is_string(key,"chunk_header_size")) {
if (!parse_num(txth->streamHead,txth,val, &txth->chunk_header_size)) goto fail;
//txth->chunk_header_size_set = 1;
//set_body_chunk(txth); /* optional and should go before chunk_size */
}
else if (is_string(key,"chunk_data_size")) {
if (!parse_num(txth->streamHead,txth,val, &txth->chunk_data_size)) goto fail;
//txth->chunk_data_size_set = 1;
//set_body_chunk(txth); /* optional and should go before chunk_size */
}
else if (is_string(key,"chunk_size")) { else if (is_string(key,"chunk_size")) {
if (!parse_num(txth->streamHead,txth,val, &txth->chunk_size)) goto fail; if (!parse_num(txth->streamHead,txth,val, &txth->chunk_size)) goto fail;
txth->chunk_size_set = 1; txth->chunk_size_set = 1;
@ -1282,7 +1307,7 @@ static int parse_num(STREAMFILE * streamFile, txth_header * txth, const char * v
} }
else { /* known field */ else { /* known field */
if ((n = is_substring(val,"interleave"))) value = txth->interleave; if ((n = is_substring(val,"interleave"))) value = txth->interleave;
if ((n = is_substring(val,"interleave_last"))) value = txth->interleave_last; else if ((n = is_substring(val,"interleave_last"))) value = txth->interleave_last;
else if ((n = is_substring(val,"channels"))) value = txth->channels; else if ((n = is_substring(val,"channels"))) value = txth->channels;
else if ((n = is_substring(val,"sample_rate"))) value = txth->sample_rate; else if ((n = is_substring(val,"sample_rate"))) value = txth->sample_rate;
else if ((n = is_substring(val,"start_offset"))) value = txth->start_offset; else if ((n = is_substring(val,"start_offset"))) value = txth->start_offset;

View File

@ -4,12 +4,18 @@
typedef struct { typedef struct {
/* config */ off_t chunk_start;
off_t stream_offset;
size_t stream_size;
size_t chunk_size; size_t chunk_size;
size_t chunk_header_size;
size_t chunk_data_size;
int chunk_count; int chunk_count;
int chunk_number; int chunk_number;
} txth_io_config_data;
typedef struct {
/* config */
txth_io_config_data cfg;
size_t stream_size;
/* state */ /* state */
off_t logical_offset; /* fake offset */ off_t logical_offset; /* fake offset */
@ -28,7 +34,7 @@ static size_t txth_io_read(STREAMFILE *streamfile, uint8_t *dest, off_t offset,
/* re-start when previous offset (can't map logical<>physical offsets) */ /* re-start when previous offset (can't map logical<>physical offsets) */
if (data->logical_offset < 0 || offset < data->logical_offset) { if (data->logical_offset < 0 || offset < data->logical_offset) {
data->physical_offset = data->stream_offset; data->physical_offset = data->cfg.chunk_start;
data->logical_offset = 0x00; data->logical_offset = 0x00;
data->data_size = 0; data->data_size = 0;
data->skip_size = 0; data->skip_size = 0;
@ -38,15 +44,35 @@ static size_t txth_io_read(STREAMFILE *streamfile, uint8_t *dest, off_t offset,
while (length > 0) { while (length > 0) {
/* ignore EOF */ /* ignore EOF */
if (offset < 0 || data->physical_offset >= data->stream_offset + data->stream_size) { if (offset < 0 || data->physical_offset >= data->cfg.chunk_start + data->stream_size) {
break; break;
} }
/* process new block */ /* process new block */
if (data->data_size == 0) { if (data->data_size == 0) {
data->block_size = data->chunk_size * data->chunk_count; /* base sizes */
data->skip_size = data->chunk_size * data->chunk_number; data->block_size = data->cfg.chunk_size * data->cfg.chunk_count;
data->data_size = data->chunk_size; data->skip_size = data->cfg.chunk_size * data->cfg.chunk_number;
data->data_size = data->cfg.chunk_size;
/* chunk size modifiers */
if (data->cfg.chunk_header_size) {
data->skip_size += data->cfg.chunk_header_size;
data->data_size -= data->cfg.chunk_header_size;
}
if (data->cfg.chunk_data_size) {
data->data_size = data->cfg.chunk_data_size;
}
/* clamp for games where last block is smaller */ //todo not correct for all cases
if (data->physical_offset + data->block_size > data->cfg.chunk_start + data->stream_size) {
data->block_size = (data->cfg.chunk_start + data->stream_size) - data->physical_offset;
data->skip_size = (data->block_size / data->cfg.chunk_count) * data->cfg.chunk_number;
}
if (data->physical_offset + data->data_size > data->cfg.chunk_start + data->stream_size) {
data->data_size = (data->cfg.chunk_start + data->stream_size) - data->physical_offset;
}
} }
/* move to next block */ /* move to next block */
@ -95,18 +121,15 @@ static size_t txth_io_size(STREAMFILE *streamfile, txth_io_data* data) {
} }
/* Handles deinterleaving of generic chunked streams */ /* Handles deinterleaving of generic chunked streams */
static STREAMFILE* setup_txth_streamfile(STREAMFILE *streamFile, off_t chunk_start, size_t chunk_size, int chunk_count, int chunk_number, int is_opened_streamfile) { static STREAMFILE* setup_txth_streamfile(STREAMFILE *streamFile, txth_io_config_data cfg, int is_opened_streamfile) {
STREAMFILE *temp_streamFile = NULL, *new_streamFile = NULL; STREAMFILE *temp_streamFile = NULL, *new_streamFile = NULL;
txth_io_data io_data = {0}; txth_io_data io_data = {0};
size_t io_data_size = sizeof(txth_io_data); size_t io_data_size = sizeof(txth_io_data);
io_data.stream_offset = chunk_start; io_data.cfg = cfg; /* memcpy */
io_data.stream_size = (get_streamfile_size(streamFile) - chunk_start); io_data.stream_size = (get_streamfile_size(streamFile) - cfg.chunk_start);
io_data.chunk_size = chunk_size;
io_data.chunk_count = chunk_count;
io_data.chunk_number = chunk_number;
io_data.logical_size = io_data.stream_size / chunk_count;
io_data.logical_offset = -1; /* force phys offset reset */ io_data.logical_offset = -1; /* force phys offset reset */
//io_data.logical_size = io_data.stream_size / cfg.chunk_count; //todo would help with performance but not ok if data_size is set
new_streamFile = streamFile; new_streamFile = streamFile;