mirror of
https://github.com/mon/ifstools.git
synced 2025-02-20 03:51:14 +01:00
Support v1 IFS files, add convenience flags
This commit is contained in:
parent
21488cc3c0
commit
471e825c61
32
README.md
32
README.md
@ -17,24 +17,32 @@ Install Python, then:
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
```
|
```
|
||||||
usage: ifstools [-h] [-y] [-o OUT_DIR] [--tex-only] [--nocache] [-s] [-r]
|
usage: ifstools [-h] [-e] [-y] [-o OUT_DIR] [--tex-only] [--nocache] [-m] [-s]
|
||||||
file.ifs|folder_ifs [file.ifs|folder_ifs ...]
|
[-r]
|
||||||
|
file_to_unpack.ifs|folder_to_repack_ifs
|
||||||
|
[file_to_unpack.ifs|folder_to_repack_ifs ...]
|
||||||
|
|
||||||
Unpack/pack IFS files and textures
|
Unpack/pack IFS files and textures
|
||||||
|
|
||||||
positional arguments:
|
positional arguments:
|
||||||
file.ifs|folder_ifs files/folders to process. Files will be unpacked,
|
file_to_unpack.ifs|folder_to_repack_ifs
|
||||||
folders will be repacked
|
files/folders to process. Files will be unpacked,
|
||||||
|
folders will be repacked
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
-y don't prompt for file/folder overwrite
|
-e, --extract-folders
|
||||||
-o OUT_DIR output directory
|
do not repack folders, instead unpack any IFS files
|
||||||
--tex-only only extract textures
|
inside them
|
||||||
--nocache ignore texture cache, recompress all
|
-y don't prompt for file/folder overwrite
|
||||||
-s, --silent don't display files as they are processed
|
-o OUT_DIR output directory
|
||||||
-r, --norecurse if file contains another IFS, don't extract its
|
--tex-only only extract textures
|
||||||
contents
|
--nocache ignore texture cache, recompress all
|
||||||
|
-m, --extract-manifest
|
||||||
|
extract the IFS manifest for inspection
|
||||||
|
-s, --silent don't display files as they are processed
|
||||||
|
-r, --norecurse if file contains another IFS, don't extract its
|
||||||
|
contents
|
||||||
```
|
```
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
|
@ -60,6 +60,10 @@ class GenericFile(Node):
|
|||||||
# offset, size, timestamp
|
# offset, size, timestamp
|
||||||
elem.text = '{} {} {}'.format(len(data_blob.getvalue()), len(data), self.time)
|
elem.text = '{} {} {}'.format(len(data_blob.getvalue()), len(data), self.time)
|
||||||
data_blob.write(data)
|
data_blob.write(data)
|
||||||
|
# 16 byte alignment
|
||||||
|
align = len(data) % 16
|
||||||
|
if align:
|
||||||
|
data_blob.write(b'\0' * (16-align))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def disk_path(self):
|
def disk_path(self):
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from itertools import chain
|
from itertools import chain
|
||||||
from os.path import getmtime, basename, join
|
from os.path import getmtime, basename, join
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
import lxml.etree as etree
|
import lxml.etree as etree
|
||||||
|
|
||||||
@ -24,7 +25,7 @@ class GenericFolder(Node):
|
|||||||
if element.text:
|
if element.text:
|
||||||
self.time = int(element.text)
|
self.time = int(element.text)
|
||||||
|
|
||||||
self.files = {}
|
self.files = OrderedDict()
|
||||||
self.folders = {}
|
self.folders = {}
|
||||||
for child in element.iterchildren(tag=etree.Element):
|
for child in element.iterchildren(tag=etree.Element):
|
||||||
filename = Node.fix_name(child.tag)
|
filename = Node.fix_name(child.tag)
|
||||||
|
@ -103,6 +103,10 @@ class ImageFile(GenericFile):
|
|||||||
elem.attrib['__type'] = '3s32'
|
elem.attrib['__type'] = '3s32'
|
||||||
elem.text = '{} {} {}'.format(len(data_blob.getvalue()), len(data), self.time)
|
elem.text = '{} {} {}'.format(len(data_blob.getvalue()), len(data), self.time)
|
||||||
data_blob.write(data)
|
data_blob.write(data)
|
||||||
|
# 16 byte alignment
|
||||||
|
align = len(data) % 16
|
||||||
|
if align:
|
||||||
|
data_blob.write(b'\0' * (16-align))
|
||||||
|
|
||||||
def _load_im(self):
|
def _load_im(self):
|
||||||
data = self.load()
|
data = self.load()
|
||||||
|
@ -15,7 +15,6 @@ from .handlers import GenericFolder, MD5Folder, ImageFile
|
|||||||
from . import utils
|
from . import utils
|
||||||
|
|
||||||
SIGNATURE = 0x6CAD8F89
|
SIGNATURE = 0x6CAD8F89
|
||||||
HEADER_SIZE = 36
|
|
||||||
|
|
||||||
FILE_VERSION = 3
|
FILE_VERSION = 3
|
||||||
|
|
||||||
@ -62,9 +61,12 @@ class IFS:
|
|||||||
ifs_tree_size = file.get_u32()
|
ifs_tree_size = file.get_u32()
|
||||||
manifest_end = file.get_u32()
|
manifest_end = file.get_u32()
|
||||||
self.data_blob = bytes(file.data[manifest_end:])
|
self.data_blob = bytes(file.data[manifest_end:])
|
||||||
# 16 bytes for manifest md5, unchecked
|
|
||||||
|
|
||||||
self.manifest = KBinXML(file.data[HEADER_SIZE:])
|
if self.file_version > 1:
|
||||||
|
# md5 of manifest, unchecked
|
||||||
|
file.offset += 16
|
||||||
|
|
||||||
|
self.manifest = KBinXML(file.data[file.offset:])
|
||||||
self.tree = GenericFolder(self.data_blob, self.manifest.xml_doc)
|
self.tree = GenericFolder(self.data_blob, self.manifest.xml_doc)
|
||||||
|
|
||||||
# IFS files repacked with other tools usually have wrong values - don't validate this
|
# IFS files repacked with other tools usually have wrong values - don't validate this
|
||||||
@ -111,7 +113,8 @@ class IFS:
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.tree)
|
return str(self.tree)
|
||||||
|
|
||||||
def extract(self, progress = True, use_cache = True, recurse = True, tex_only = False, path = None):
|
def extract(self, progress = True, use_cache = True, recurse = True,
|
||||||
|
tex_only = False, extract_manifest = False, path = None):
|
||||||
if path is None:
|
if path is None:
|
||||||
path = self.folder_out
|
path = self.folder_out
|
||||||
if tex_only and 'tex' not in self.tree.folders:
|
if tex_only and 'tex' not in self.tree.folders:
|
||||||
@ -119,7 +122,7 @@ class IFS:
|
|||||||
utils.mkdir_silent(path)
|
utils.mkdir_silent(path)
|
||||||
utime(path, (self.time, self.time))
|
utime(path, (self.time, self.time))
|
||||||
|
|
||||||
if self.manifest and not tex_only:
|
if extract_manifest and self.manifest and not tex_only:
|
||||||
with open(join(path, 'ifs_manifest.xml'), 'wb') as f:
|
with open(join(path, 'ifs_manifest.xml'), 'wb') as f:
|
||||||
f.write(self.manifest.to_text().encode('utf8'))
|
f.write(self.manifest.to_text().encode('utf8'))
|
||||||
|
|
||||||
@ -141,7 +144,8 @@ class IFS:
|
|||||||
if recurse and f.name.endswith('.ifs'):
|
if recurse and f.name.endswith('.ifs'):
|
||||||
rpath = join(path, f.full_path)
|
rpath = join(path, f.full_path)
|
||||||
i = IFS(rpath)
|
i = IFS(rpath)
|
||||||
i.extract(progress, use_cache, recurse, tex_only, rpath.replace('.ifs','_ifs'))
|
i.extract(progress=progress, use_cache=use_cache, recurse=recurse,
|
||||||
|
tex_only=tex_only, extract_manifest=extract_manifest, path=rpath.replace('.ifs','_ifs'))
|
||||||
|
|
||||||
''' If you can get shared memory for IFS.data_blob working, this will
|
''' If you can get shared memory for IFS.data_blob working, this will
|
||||||
be a lot faster. As it is, it gets pickled for every file, and
|
be a lot faster. As it is, it gets pickled for every file, and
|
||||||
@ -179,7 +183,6 @@ class IFS:
|
|||||||
data_size.text = str(len(data))
|
data_size.text = str(len(data))
|
||||||
|
|
||||||
manifest_bin = self.manifest.to_binary()
|
manifest_bin = self.manifest.to_binary()
|
||||||
manifest_end = HEADER_SIZE + len(manifest_bin)
|
|
||||||
manifest_hash = hashlib.md5(manifest_bin).digest()
|
manifest_hash = hashlib.md5(manifest_bin).digest()
|
||||||
|
|
||||||
head = ByteBuffer()
|
head = ByteBuffer()
|
||||||
@ -188,10 +191,17 @@ class IFS:
|
|||||||
head.append_u16(self.file_version ^ 0xFFFF)
|
head.append_u16(self.file_version ^ 0xFFFF)
|
||||||
head.append_u32(int(unixtime()))
|
head.append_u32(int(unixtime()))
|
||||||
head.append_u32(self.manifest.mem_size)
|
head.append_u32(self.manifest.mem_size)
|
||||||
|
|
||||||
|
manifest_end = len(manifest_bin) + head.offset + 4
|
||||||
|
if self.file_version > 1:
|
||||||
|
manifest_end += 16
|
||||||
|
|
||||||
head.append_u32(manifest_end)
|
head.append_u32(manifest_end)
|
||||||
|
|
||||||
|
if self.file_version > 1:
|
||||||
|
head.append_bytes(manifest_hash)
|
||||||
|
|
||||||
ifs_file.write(head.data)
|
ifs_file.write(head.data)
|
||||||
ifs_file.write(manifest_hash)
|
|
||||||
ifs_file.write(manifest_bin)
|
ifs_file.write(manifest_bin)
|
||||||
ifs_file.write(data)
|
ifs_file.write(data)
|
||||||
|
|
||||||
|
@ -21,14 +21,28 @@ def get_choice(prompt):
|
|||||||
else:
|
else:
|
||||||
print('Please answer y/n')
|
print('Please answer y/n')
|
||||||
|
|
||||||
|
def extract(i, args, path):
|
||||||
|
if args.progress:
|
||||||
|
print('Extracting...')
|
||||||
|
i.extract(progress = args.progress, use_cache = args.use_cache,
|
||||||
|
recurse = args.recurse, tex_only = args.tex_only, path = path,
|
||||||
|
extract_manifest = args.extract_manifest)
|
||||||
|
|
||||||
|
def repack(i, args, path):
|
||||||
|
if args.progress:
|
||||||
|
print('Repacking...')
|
||||||
|
i.repack(progress = args.progress, use_cache = args.use_cache, path = path)
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(description='Unpack/pack IFS files and textures')
|
parser = argparse.ArgumentParser(description='Unpack/pack IFS files and textures')
|
||||||
parser.add_argument('files', metavar='file.ifs|folder_ifs', type=str, nargs='+',
|
parser.add_argument('files', metavar='file_to_unpack.ifs|folder_to_repack_ifs', type=str, nargs='+',
|
||||||
help='files/folders to process. Files will be unpacked, folders will be repacked')
|
help='files/folders to process. Files will be unpacked, folders will be repacked')
|
||||||
|
parser.add_argument('-e', '--extract-folders', action='store_true', help='do not repack folders, instead unpack any IFS files inside them', dest='extract_folders')
|
||||||
parser.add_argument('-y', action='store_true', help='don\'t prompt for file/folder overwrite', dest='overwrite')
|
parser.add_argument('-y', action='store_true', help='don\'t prompt for file/folder overwrite', dest='overwrite')
|
||||||
parser.add_argument('-o', default='.', help='output directory', dest='out_dir')
|
parser.add_argument('-o', default='.', help='output directory', dest='out_dir')
|
||||||
parser.add_argument('--tex-only', action='store_true', help='only extract textures', dest='tex_only')
|
parser.add_argument('--tex-only', action='store_true', help='only extract textures', dest='tex_only')
|
||||||
parser.add_argument('--nocache', action='store_false', help='ignore texture cache, recompress all', dest='use_cache')
|
parser.add_argument('--nocache', action='store_false', help='ignore texture cache, recompress all', dest='use_cache')
|
||||||
|
parser.add_argument('-m', '--extract-manifest', action='store_true', help='extract the IFS manifest for inspection', dest='extract_manifest')
|
||||||
parser.add_argument('-s', '--silent', action='store_false', dest='progress',
|
parser.add_argument('-s', '--silent', action='store_false', dest='progress',
|
||||||
help='don\'t display files as they are processed')
|
help='don\'t display files as they are processed')
|
||||||
parser.add_argument('-r', '--norecurse', action='store_false', dest='recurse',
|
parser.add_argument('-r', '--norecurse', action='store_false', dest='recurse',
|
||||||
@ -36,6 +50,14 @@ def main():
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.extract_folders:
|
||||||
|
dirs = [f for f in args.files if os.path.isdir(f)]
|
||||||
|
# prune
|
||||||
|
args.files = [f for f in args.files if not os.path.isdir(f)]
|
||||||
|
# add the extras
|
||||||
|
for d in dirs:
|
||||||
|
args.files.extend((os.path.join(d,f) for f in os.listdir(d) if f.lower().endswith('.ifs')))
|
||||||
|
|
||||||
for f in args.files:
|
for f in args.files:
|
||||||
if args.progress:
|
if args.progress:
|
||||||
print(f)
|
print(f)
|
||||||
@ -52,13 +74,9 @@ def main():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if i.is_file:
|
if i.is_file:
|
||||||
if args.progress:
|
extract(i, args, path)
|
||||||
print('Extracting...')
|
|
||||||
i.extract(args.progress, args.use_cache, args.recurse, args.tex_only, path)
|
|
||||||
else:
|
else:
|
||||||
if args.progress:
|
repack(i, args, path)
|
||||||
print('Repacking...')
|
|
||||||
i.repack(args.progress, args.use_cache, path)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
4
setup.py
4
setup.py
@ -6,12 +6,12 @@ requires = [
|
|||||||
'lxml',
|
'lxml',
|
||||||
'tqdm',
|
'tqdm',
|
||||||
'pillow',
|
'pillow',
|
||||||
'kbinxml>=1.2',
|
'kbinxml>=1.4',
|
||||||
]
|
]
|
||||||
if sys.version_info < (3,0):
|
if sys.version_info < (3,0):
|
||||||
requires.append('future')
|
requires.append('future')
|
||||||
|
|
||||||
version = '1.2'
|
version = '1.3'
|
||||||
setup(
|
setup(
|
||||||
name='ifstools',
|
name='ifstools',
|
||||||
description='Extractor/repacker for Konmai IFS files',
|
description='Extractor/repacker for Konmai IFS files',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user