diff --git a/src/base/decode.c b/src/base/decode.c index 374eb59a..35784a8b 100644 --- a/src/base/decode.c +++ b/src/base/decode.c @@ -49,6 +49,10 @@ void decode_free(VGMSTREAM* vgmstream) { free_imuse(vgmstream->codec_data); } + if (vgmstream->coding_type == coding_ONGAKUKAN_ADPCM) { + free_ongakukan_adp(vgmstream->codec_data); + } + if (vgmstream->coding_type == coding_COMPRESSWAVE) { free_compresswave(vgmstream->codec_data); } @@ -149,6 +153,10 @@ void decode_seek(VGMSTREAM* vgmstream) { seek_imuse(vgmstream->codec_data, vgmstream->loop_current_sample); } + if (vgmstream->coding_type == coding_ONGAKUKAN_ADPCM) { + seek_ongakukan_adp(vgmstream->codec_data, vgmstream->loop_current_sample); + } + if (vgmstream->coding_type == coding_COMPRESSWAVE) { seek_compresswave(vgmstream->codec_data, vgmstream->loop_current_sample); } @@ -254,6 +262,10 @@ void decode_reset(VGMSTREAM* vgmstream) { reset_imuse(vgmstream->codec_data); } + if (vgmstream->coding_type == coding_ONGAKUKAN_ADPCM) { + reset_ongakukan_adp(vgmstream->codec_data); + } + if (vgmstream->coding_type == coding_COMPRESSWAVE) { reset_compresswave(vgmstream->codec_data); } @@ -522,6 +534,8 @@ int decode_get_samples_per_frame(VGMSTREAM* vgmstream) { return 0; /* varies per mode */ case coding_IMUSE: return 0; /* varies per frame */ + case coding_ONGAKUKAN_ADPCM: + return 0; /* actually 1. */ case coding_COMPRESSWAVE: return 0; /* multiple of 2 */ case coding_EA_MT: @@ -1529,6 +1543,10 @@ void decode_vgmstream(VGMSTREAM* vgmstream, int samples_written, int samples_to_ decode_imuse(vgmstream, buffer, samples_to_do); break; + case coding_ONGAKUKAN_ADPCM: + decode_ongakukan_adp(vgmstream, buffer, samples_to_do); + break; + case coding_COMPRESSWAVE: decode_compresswave(vgmstream->codec_data, buffer, samples_to_do); break; diff --git a/src/coding/coding.h b/src/coding/coding.h index a2d47108..edbf6256 100644 --- a/src/coding/coding.h +++ b/src/coding/coding.h @@ -294,6 +294,16 @@ void reset_imuse(imuse_codec_data* data); void seek_imuse(imuse_codec_data* data, int32_t num_sample); void free_imuse(imuse_codec_data* data); +/* ongakukan_adp_decoder */ +typedef struct ongakukan_adp_data ongakukan_adp_data; + +ongakukan_adp_data* init_ongakukan_adp(STREAMFILE* sf, int32_t data_offset, int32_t data_size, + bool sound_is_adpcm); +void decode_ongakukan_adp(VGMSTREAM* vgmstream, sample_t* outbuf, int32_t samples_to_do); +void reset_ongakukan_adp(ongakukan_adp_data* data); +void seek_ongakukan_adp(ongakukan_adp_data* data, int32_t current_sample); +void free_ongakukan_adp(ongakukan_adp_data* data); +int32_t ongakukan_adp_get_samples(ongakukan_adp_data* data); /* compresswave_decoder */ typedef struct compresswave_codec_data compresswave_codec_data; diff --git a/src/coding/libs/ongakukan_adp_lib.c b/src/coding/libs/ongakukan_adp_lib.c new file mode 100644 index 00000000..87fd07ab --- /dev/null +++ b/src/coding/libs/ongakukan_adp_lib.c @@ -0,0 +1,221 @@ + +/* Decodes Ongakukan ADPCM, found in their PS2 and PSP games. + * Basically their take on ADPCM with some companding and quantization involved. + * + * Original decoder is a mix of COP0 and VU1 code, however PS2 floats aren't actually used (if at all) + * when it comes to converting encoded sample data (consisting of a single byte with two 4-bit nibbles, respectively) to PCM16. + * + * The decoder you see here is a hand-crafted, faithful C adaptation of original MIPS R5900 (PS2) and R4000 (PSP) code, from various executables of their games. + * As a consequence of all this, a new, entirely custom decoder had to be designed from the ground-up into vgmstream. No info surrounding this codec was available. */ + +/* Additional notes: + * - This code does not support PCM16 sound data, in any way, shape, or form. + * -- Ongakukan's internal sound engine from their PS2 and PSP games allow for only two codecs: signed PCM16, and their own take on ADPCM, respectively. + * -- However, much of that support is reliant on a flag that's set to either one of the two codecs depending on the opened file extension. + * Basically, how it works is: if sound data is "PCM16" (available to "wav" and "ads" files), set flag to 0. + * If sound data is "ADPCM" (available to "adp" files), set it to 1. + * Code handles this flag as a boolean var; 0 is "false" and 1 is "true". + * -- As vgmstream has built-in support for the former codec (and the many metas that use it) however, despite being fairly easy to add here, + * re-implementing one from scratch would be a wasted effort regardless; it is consequentially not included. */ + +#include +#include "../../util/reader_sf.h" +#include "ongakukan_adp_lib.h" + +/* the struct that oversees everything. */ + +struct ongakukan_adp_t +{ + STREAMFILE* sf; /* streamfile var. */ + + long int data_offset; /* current offset of data that's being read. */ + long int start_offset; /* base offset of encoded sound data. */ + long int data_size; /* sound data size, basically ADP size if it didn't have 44 bytes more. */ + long int sample_work; /* total number of samples, calc'd using data_size as a base. */ + long int alt_sample_work1; /* represents current number of samples as they're decoded. */ + long int alt_sample_work2; /* represents the many samples left to go through. */ + long int samples_filled; /* how many samples were filled to vgmstream buffer. */ + long int samples_consumed; /* how many samples vgmstream buffer had to consume. */ + + bool sound_is_adpcm; /* false = no (see "additional notes" above) , true = yes */ + bool sample_startpoint_present; /* false = failed to make startpoint, true = startpoint present */ + char sample_mode; /* 0 = creates decoding setup, 1 = continue decoding data with setup in place */ + bool sample_pair_is_decoded; /* false = no, true = yes */ + + unsigned char base_pair; /* represents a read byte from ADPCM data, consisting of two 4-bit nibbles each.*/ + long int base_scale; /* how loud should this sample be. */ + short int sample_hist[2]; /* two pairs of signed 16-bit data, representing samples. yes, it is void. */ +}; + +/* filter table consisting of 16 numbers each. */ + +const short int ongakukan_adpcm_filter[16] = { 233, 549, 453, 375, 310, 233, 233, 233, 233, 233, 233, 233, 310, 375, 453, 549 }; + +/* streamfile read function declararion, more may be added in the future. */ + +static uint8_t read_u8_wrapper(ongakukan_adp_t* handle); + +/* function declarations for the inner workings of codec data. */ + +static bool set_up_sample_startpoint(ongakukan_adp_t* handle); +static void decode_ongakukan_adpcm_samples(ongakukan_adp_t* handle); + +/* codec management functions, meant to oversee and supervise ADP data from the top-down. + * in layman terms, they control how ADP data should be handled and when. */ + +ongakukan_adp_t* init_ongakukan_adpcm(STREAMFILE* sf, long int data_offset, long int data_size, + bool sound_is_adpcm) +{ + ongakukan_adp_t* handle = NULL; + + /* allocate handle using malloc. */ + handle = malloc(sizeof(ongakukan_adp_t)); + if (!handle) goto fail; + + /* now, to set up the rest of the handle with the data we have... */ + handle->sf = sf; + handle->data_offset = data_offset; + handle->start_offset = data_offset; + handle->data_size = data_size; + handle->sample_mode = 0; + handle->sound_is_adpcm = sound_is_adpcm; + handle->sample_startpoint_present = set_up_sample_startpoint(handle); + /* if we failed in planting up the seeds for an ADPCM decoder, we simply throw in the towel and take a walk in the park. */ + if (handle->sample_startpoint_present == false) { goto fail; } + + return handle; +fail: + ongakukan_adpcm_free(handle); + return NULL; +} + +void ongakukan_adpcm_free(ongakukan_adp_t* handle) +{ + if (!handle) return; + free(handle); +} + +void ongakukan_adpcm_reset(ongakukan_adp_t* handle) +{ + if (!handle) return; + + /* wipe out certain values from handle so we can start over. */ + handle->data_offset = handle->start_offset; + handle->sample_pair_is_decoded = false; + handle->sample_mode = 0; + handle->alt_sample_work1 = 0; + handle->alt_sample_work2 = handle->sample_work; +} + +void ongakukan_adpcm_seek(ongakukan_adp_t* handle, long int target_sample) +{ + if (!handle) return; + + char ts_modulus = 0; /* ts_modulus is here to ensure target_sample gets rounded to a multiple of 2. */ + long int ts_data_offset = 0; /* ts_data_offset is basically data_offset but with (left(if PCM)/right(if ADPCM))-shifted target_sample calc by 1. */ + ts_data_offset = target_sample >> 1; + ts_modulus = target_sample % 2; + target_sample = target_sample - ts_modulus; + /* if ADPCM, right-shift the former first then have ts_modulus calc remainder of target_sample by 2 so we can subtract it with ts_modulus. + * this is needed for the two counters that the decoder has that can both add and subtract with 2, respectively + * (and in order, too; meaning one counter does "plus 2" while the other does "minus 2", + * and though they're fairly useless ATM, you pretty much want to leave them alone). */ + + /* anyway, we'll have to tell decoder that target_sample is calling and wants to go somewhere right now, + * so we'll have data_offset reposition itself to where sound data for that sample ought to be + * and (as of now) reset basically all decode state up to this point so we can continue to decode all sample pairs without issue. */ + handle->data_offset = handle->start_offset + ts_data_offset; + handle->sample_pair_is_decoded = false; + handle->sample_mode = 0; + handle->alt_sample_work1 = target_sample; + handle->alt_sample_work2 = handle->sample_work - target_sample; + + /* for now, just do what reset_all_ongakukan_adpcm does but for the current sample instead of literally everything. + * seek_ongakukan_adpcm_pos in its current state is a bit more involved than the above, but works. */ +} + +long int get_num_samples_from_ongakukan_adpcm(ongakukan_adp_t* handle) +{ + if (!handle) return 0; + return handle->sample_work; +} + +void* get_sample_hist_from_ongakukan_adpcm(ongakukan_adp_t* handle) +{ + if (!handle) return 0; + return &handle->sample_hist; +} + +/* function definitions for the inner workings of codec data. */ + +static bool set_up_sample_startpoint(ongakukan_adp_t* handle) +{ + /* make decoder fail hard if streamfile object isn't opened or downright useless. */ + if (!handle->sf) return false; + + if (handle->sound_is_adpcm == 0) { return false; } + else { /* num_samples but for Ongakukan ADPCM sound data. */ handle->sample_work = handle->data_size << 1; } + /* set "beginning" and "end" sample vars and send a "message" that we went through no sample yet.*/ + handle->alt_sample_work1 = 0; + handle->alt_sample_work2 = handle->sample_work; + handle->sample_pair_is_decoded = false; + + return true; +} + +void decode_ongakukan_adpcm_data(ongakukan_adp_t* handle) +{ + /* set samples_filled to 0 and have our decoder go through every sample that exists in the sound data.*/ + decode_ongakukan_adpcm_samples(handle); + /* if setup is established for further decoding, switch gears and have the decoder use that setup for as long as possible. */ + /* if sample pair is decoded, advance to next byte, tell our handle that we went through 2 samples and make decoder go through next available data again. */ + if (handle->sample_pair_is_decoded == true) + { + handle->data_offset++; + handle->alt_sample_work1 += 2; + handle->alt_sample_work2 -= 2; + handle->sample_pair_is_decoded = false; + } +} + +static void decode_ongakukan_adpcm_samples(ongakukan_adp_t* handle) +{ + unsigned char nibble1 = 0, nibble2 = 0; /* two chars representing a 4-bit nibble. */ + long int nibble1_1 = 0, nibble2_1 = 0; /* two long ints representing pure sample data. */ + + if (handle->sample_pair_is_decoded == false) + { + /* sample_mode being 0 means we can just do a setup for future sample decoding so we have nothing to worry about in the future. */ + if (handle->sample_mode == 0) + { + /* set "base scale", two "sample hist"s, and "base pair", respectively. */ + handle->base_scale = 0x10; + handle->sample_hist[0] = 0; + handle->sample_hist[1] = 0; + handle->base_pair = 0; + handle->sample_mode = 1; /* indicates we have the setup we need to decode samples. */ + } + handle->base_pair = (uint8_t)read_u8_wrapper(handle); + + nibble1 = handle->base_pair & 0xf; + nibble1_1 = nibble1 + -8; + nibble2 = (handle->base_pair >> 4) & 0xf; + nibble2_1 = nibble2 + -8; + nibble2_1 = nibble2_1 * handle->base_scale; + handle->sample_hist[0] = handle->sample_hist[1] + nibble2_1; + handle->base_scale = (handle->base_scale * (ongakukan_adpcm_filter[nibble2])) >> 8; + nibble1_1 = nibble1_1 * handle->base_scale; + handle->sample_hist[1] = handle->sample_hist[0] + nibble1_1; + handle->base_scale = (handle->base_scale * (ongakukan_adpcm_filter[nibble1])) >> 8; + handle->sample_pair_is_decoded = true; + } +} + +/* streamfile read function definitions at the very bottom. */ + +static uint8_t read_u8_wrapper(ongakukan_adp_t* handle) +{ + if ((handle->data_offset - handle->start_offset) > handle->data_size) return 0; + if ((handle->data_offset - handle->start_offset) < 0) return 0; + return read_u8((off_t)(handle->data_offset), handle->sf); +} diff --git a/src/coding/libs/ongakukan_adp_lib.h b/src/coding/libs/ongakukan_adp_lib.h new file mode 100644 index 00000000..f127c85c --- /dev/null +++ b/src/coding/libs/ongakukan_adp_lib.h @@ -0,0 +1,27 @@ +#ifndef _ONGAKUKAN_ADP_LIB_H_ +#define _ONGAKUKAN_ADP_LIB_H_ + +/* Ongakukan ADPCM codec, found in PS2 and PSP games. */ + +#include "../../util/reader_sf.h" + +/* typedef struct */ +typedef struct ongakukan_adp_t ongakukan_adp_t; + +/* function declaration for we need to set up the codec data. */ +ongakukan_adp_t* init_ongakukan_adpcm(STREAMFILE* sf, long int data_offset, long int data_size, + bool sound_is_adpcm); + +/* function declaration for freeing all memory related to ongakukan_adp_t struct var. */ +void ongakukan_adpcm_free(ongakukan_adp_t* handle); +void ongakukan_adpcm_reset(ongakukan_adp_t* handle); +void ongakukan_adpcm_seek(ongakukan_adp_t* handle, long int target_sample); + +/* function declaration for when we need to get (and send) certain values from ongakukan_adp_t handle */ +long int get_num_samples_from_ongakukan_adpcm(ongakukan_adp_t* handle); +void* get_sample_hist_from_ongakukan_adpcm(ongakukan_adp_t* handle); + +/* function declaration for actually decoding samples, can't be that hard, right? */ +void decode_ongakukan_adpcm_data(ongakukan_adp_t* handle); + +#endif // _ONGAKUKAN_ADP_LIB_H_ diff --git a/src/coding/ongakukan_adp_decoder.c b/src/coding/ongakukan_adp_decoder.c new file mode 100644 index 00000000..39172851 --- /dev/null +++ b/src/coding/ongakukan_adp_decoder.c @@ -0,0 +1,89 @@ +#include +#include "coding.h" +#include "libs/ongakukan_adp_lib.h" + +struct ongakukan_adp_data +{ + void* handle; + int16_t* samples; + int32_t samples_done; + bool samples_filled; /* false - no, true - yes */ + int32_t getting_samples; /* initialized to 2 on decode_ongakukan_adp. i mean, we literally get two decoded samples here. */ + STREAMFILE* sf; +}; + +ongakukan_adp_data* init_ongakukan_adp(STREAMFILE* sf, int32_t data_offset, int32_t data_size, + bool sound_is_adpcm) +{ + ongakukan_adp_data* data = NULL; + + data = calloc(1, sizeof(ongakukan_adp_data)); + if (!data) goto fail; + + /* reopen STREAMFILE from here, then pass it as an argument for our init function. */ + data->sf = reopen_streamfile(sf, 0); + if (!data->sf) goto fail; + data->handle = init_ongakukan_adpcm(data->sf, (long int)(data_offset), (long int)(data_size), + sound_is_adpcm); + if (!data->handle) goto fail; + + return data; +fail: + free_ongakukan_adp(data); + return NULL; +} + +void decode_ongakukan_adp(VGMSTREAM* vgmstream, sample_t* outbuf, int32_t samples_to_do) +{ + ongakukan_adp_data* data = vgmstream->codec_data; + + data->getting_samples = 2; + data->samples_done = 0; + data->samples_filled = false; + /* ^ samples_filled is boolean here because we need to simplify how decoding will work here. + * so, rather than making samples_filled into a long int counter, + * we make it into a boolean flag instead so as to let data->samples_done shine as a counter + * and the decoder to do its job without worry. */ + + while (data->samples_done < samples_to_do) + { + if (data->samples_filled) + { + memcpy(outbuf + data->samples_done, + data->samples, + data->getting_samples * sizeof(int16_t)); + data->samples_done += data->getting_samples; + + data->samples_filled = false; + } + else { decode_ongakukan_adpcm_data(data->handle); + data->samples_filled = true; + data->samples = (int16_t*)get_sample_hist_from_ongakukan_adpcm(data->handle); } + } +} + +void reset_ongakukan_adp(ongakukan_adp_data* data) +{ + if (!data) return; + ongakukan_adpcm_reset(data->handle); +} + +void seek_ongakukan_adp(ongakukan_adp_data* data, int32_t current_sample) +{ + if (!data) return; + ongakukan_adpcm_seek(data->handle, current_sample); +} + +void free_ongakukan_adp(ongakukan_adp_data* data) +{ + if (!data) return; + close_streamfile(data->sf); + ongakukan_adpcm_free(data->handle); + free(data); +} + +int32_t ongakukan_adp_get_samples(ongakukan_adp_data* data) +{ + if (!data) return 0; + return (int32_t)get_num_samples_from_ongakukan_adpcm(data->handle); +} diff --git a/src/formats.c b/src/formats.c index f201257e..3808767d 100644 --- a/src/formats.c +++ b/src/formats.c @@ -893,6 +893,7 @@ static const coding_info coding_info_list[] = { {coding_ACM, "InterPlay ACM"}, {coding_CIRCUS_ADPCM, "Circus 8-bit ADPCM"}, {coding_UBI_ADPCM, "Ubisoft 4/6-bit ADPCM"}, + {coding_ONGAKUKAN_ADPCM, "Ongakukan 4-bit ADPCM"}, {coding_EA_MT, "Electronic Arts MicroTalk"}, {coding_CIRCUS_VQ, "Circus VQ"}, @@ -1444,6 +1445,7 @@ static const meta_info meta_info_list[] = { {meta_VAS_ROCKSTAR, "Rockstar .VAS header"}, {meta_EA_SBK, "Electronic Arts SBK header"}, {meta_DSP_ASURA, "Rebellion DSP header"}, + {meta_ONGAKUKAN_RIFF_ADP, "Ongakukan RIFF WAVE header"}, }; void get_vgmstream_coding_description(VGMSTREAM* vgmstream, char* out, size_t out_size) { diff --git a/src/libvgmstream.vcxproj b/src/libvgmstream.vcxproj index 02164430..8912df0e 100644 --- a/src/libvgmstream.vcxproj +++ b/src/libvgmstream.vcxproj @@ -1,4 +1,4 @@ - + @@ -85,6 +85,7 @@ + @@ -206,7 +207,10 @@ + + + diff --git a/src/libvgmstream.vcxproj.filters b/src/libvgmstream.vcxproj.filters index e877e9b5..58efecb3 100644 --- a/src/libvgmstream.vcxproj.filters +++ b/src/libvgmstream.vcxproj.filters @@ -63,6 +63,15 @@ {20824073-8817-41CF-8A21-D54294A56050} + + {bda8b035-f70b-4601-b479-4403b1071fe4} + + + {eec1739b-a5e2-4202-98ff-e9fa2bd6ed00} + + + {5d980902-91bd-4e75-aa07-7da27edf5a99} + @@ -437,6 +446,57 @@ util\Header Files + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + + + coding\libs\Header Files + @@ -2230,5 +2290,53 @@ util\Source Files + + meta\Source Files + + + coding\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + + + coding\libs\Source Files + \ No newline at end of file diff --git a/src/meta/adp_ongakukan.c b/src/meta/adp_ongakukan.c new file mode 100644 index 00000000..9069c358 --- /dev/null +++ b/src/meta/adp_ongakukan.c @@ -0,0 +1,103 @@ +#include "meta.h" +#include "../coding/coding.h" + +/* Ongakukan RIFF with "ADP" extension [Train Simulator - Midousuji-sen (PS2)] */ +VGMSTREAM* init_vgmstream_ongakukan_adp(STREAMFILE* sf) +{ + VGMSTREAM* vgmstream = NULL; + off_t start_offset; + size_t file_size; + bool has_data_chunk = false, has_fact_chunk = false; + int loop_flag = 0; + int riff_wave_header_size = 0x2c; + /* ^ where sound data begins, as a consequence their tools couldn't even write full RIFF WAVE header to file beyond that point.. */ + bool sound_is_adpcm = false; + int32_t supposed_size, fmt_size, fmt_offset, offset_of_supposed_last_chunk; + int32_t sample_rate, data_size; + int16_t num_channels, block_size; + + /* RIFF+WAVE checks */ + if (!is_id32be(0x00, sf, "RIFF")) goto fail; + if (!is_id32be(0x08, sf, "WAVE")) goto fail; + /* WAVE "fmt " check */ + if (!is_id32be(0x0c, sf, "fmt ")) goto fail; + /* "adp" extension check (literally only one) */ + if (!check_extensions(sf, "adp")) goto fail; + + /* catch adp file size from here and use it whenever needed. */ + file_size = get_streamfile_size(sf); + + /* RIFF size from adp file can go beyond actual size (e.g: reported 10MB vs 2MB). do quick calcs around this. */ + supposed_size = ((read_s32le(0x04, sf) - 0x24) >> 2) + 0x2c; + if (file_size != supposed_size) goto fail; + + /* read entire WAVE "fmt " chunk. we start by reading fmt_size from yours truly and setting fmt_offset. */ + fmt_size = read_s32le(0x10, sf); + fmt_offset = 0x14; + if ((fmt_size >= 0x10) && (fmt_size <= 0x12)) /* depending on the adp, fmt_size alternates between 0x10 and 0x12 */ + { + if (read_s16le(fmt_offset + 0, sf) != 1) goto fail; /* chunk reports codec number as signed little-endian PCM, couldn't be more wrong. */ + num_channels = read_s16le(fmt_offset + 2, sf); + sample_rate = read_s32le(fmt_offset + 4, sf); + if (read_s16le(fmt_offset + 14, sf) != 0x10) goto fail; /* bit depth as chunk reports it. */ + /* rest of fmt header is the usual header for 16-bit PCM wav files: bitrate, block size, and the like (see riff.c) */ + /* if fmt_size == 0x12 there is an additional s16 field that's always zero. */ + } + else { + goto fail; + } + + /* now calc the var so we can read either "data" or "fact" chunk; */ + offset_of_supposed_last_chunk = fmt_offset + fmt_size; + + /* we need to get to the last WAVE chunk manually, and that means the calc below. */ + offset_of_supposed_last_chunk = fmt_offset + fmt_size; + if (is_id32be(offset_of_supposed_last_chunk + 0, sf, "data")) has_data_chunk = true; + if (is_id32be(offset_of_supposed_last_chunk + 0, sf, "fact")) has_fact_chunk = true; + + /* and because sound data *must* start at 0x2c, they have to bork both chunks too, so they're now essentially useless. + * they're basically leftovers from original (lossless) WAV files at this point. */ + if (has_data_chunk) + { + /* RIFF adp files have leftover "data" chunk size... that does NOT match the ADP file size at hand. */ + supposed_size = (read_s32le(offset_of_supposed_last_chunk + 4, sf) >> 2) + 0x2c; + if (file_size != supposed_size) goto fail; + } + + if (has_fact_chunk) + { + /* RIFF adp files have also cut off "fact" chunk so we're just left with a useless number now. */ + if (read_s16le(offset_of_supposed_last_chunk + 4, sf) != 4) goto fail; + } + + /* set start_offset value to riff_wave_header_size and calculate data_size by ourselves, basically how Ongakukan does it also. */ + start_offset = riff_wave_header_size; + data_size = (int32_t)(file_size) - riff_wave_header_size; + + /* Ongagukan games using this format just read it by checking "ADP" extension in an provided file name of a programmer's own choosing, + * and if extension is there they just read the reported "number of samples" and "sample_rate" vars + * from RIFF WAVE "fmt " chunk based on an already-opened file with that same name. + * and they don't even read RIFF chunks, they just pick these two vars and that's basically it. */ + + /* our custom decoder needs at least one flag set. */ + sound_is_adpcm = true; + + /* build the VGMSTREAM */ + vgmstream = allocate_vgmstream(num_channels, loop_flag); + if (!vgmstream) goto fail; + + vgmstream->meta_type = meta_ONGAKUKAN_RIFF_ADP; + vgmstream->sample_rate = sample_rate; + vgmstream->codec_data = init_ongakukan_adp(sf, start_offset, data_size, sound_is_adpcm); + if (!vgmstream->codec_data) goto fail; + vgmstream->coding_type = coding_ONGAKUKAN_ADPCM; + vgmstream->layout_type = layout_none; + vgmstream->num_samples = ongakukan_adp_get_samples(vgmstream->codec_data); + + if (!vgmstream_open_stream(vgmstream, sf, start_offset)) + goto fail; + return vgmstream; +fail: + close_vgmstream(vgmstream); + return NULL; +} diff --git a/src/meta/meta.h b/src/meta/meta.h index 75898d56..cc53fc1d 100644 --- a/src/meta/meta.h +++ b/src/meta/meta.h @@ -1018,4 +1018,6 @@ VGMSTREAM* init_vgmstream_ea_sbk(STREAMFILE* sf); VGMSTREAM* init_vgmstream_dsp_asura_sfx(STREAMFILE* sf); +VGMSTREAM* init_vgmstream_ongakukan_adp(STREAMFILE* sf); + #endif /*_META_H*/ diff --git a/src/meta/riff.c b/src/meta/riff.c index 609e85dc..8bb5e62c 100644 --- a/src/meta/riff.c +++ b/src/meta/riff.c @@ -473,7 +473,8 @@ VGMSTREAM* init_vgmstream_riff(STREAMFILE* sf) { file_size -= 0x40; /* [Megami no Etsubo (PSP)] (has extra padding in all files) */ else if (codec == 0x0011 && file_size - riff_size - 0x08 <= 0x900 && is_id32be(riff_size + 0x08, sf, "cont")) - riff_size = file_size - 0x08; /* [Shin Megami Tensei: Imagine (PC)] (extra "cont" info 0x800/0x900 chunk) */ + riff_size = file_size - 0x08; /* [Shin Megami Tensei: Imagine (PC)] (extra "cont" info 0x800/0x900 chunk) */ + } /* check for truncated RIFF */ @@ -1194,7 +1195,7 @@ VGMSTREAM* init_vgmstream_rifx(STREAMFILE* sf) { /* end must add +1, but check in case of faulty tools */ if (vgmstream->loop_end_sample - 1 == vgmstream->num_samples) vgmstream->loop_end_sample--; - + vgmstream->meta_type = meta_RIFX_WAVE_smpl; } } diff --git a/src/vgmstream.c b/src/vgmstream.c index c173d7f5..c01a45c3 100644 --- a/src/vgmstream.c +++ b/src/vgmstream.c @@ -533,6 +533,7 @@ init_vgmstream_t init_vgmstream_functions[] = { init_vgmstream_dsp_asura_ds2, init_vgmstream_dsp_asura_ttss, init_vgmstream_dsp_asura_sfx, + init_vgmstream_ongakukan_adp, /* lower priority metas (no clean header identity, somewhat ambiguous, or need extension/companion file to identify) */ init_vgmstream_agsc, diff --git a/src/vgmstream_types.h b/src/vgmstream_types.h index 871a66b5..23a65ba4 100644 --- a/src/vgmstream_types.h +++ b/src/vgmstream_types.h @@ -136,6 +136,7 @@ typedef enum { coding_ACM, /* InterPlay ACM */ coding_CIRCUS_ADPCM, /* Circus 8-bit ADPCM */ coding_UBI_ADPCM, /* Ubisoft 4/6-bit ADPCM */ + coding_ONGAKUKAN_ADPCM, /* Ongakukan 4-bit ADPCM */ coding_EA_MT, /* Electronic Arts MicroTalk (linear-predictive speech codec) */ coding_CIRCUS_VQ, /* Circus VQ */ @@ -710,6 +711,7 @@ typedef enum { meta_VAS_ROCKSTAR, meta_EA_SBK, meta_DSP_ASURA, + meta_ONGAKUKAN_RIFF_ADP, } meta_t;