mirror of
https://github.com/mon/ifstools.git
synced 2024-11-24 01:50:10 +01:00
argb4444 support
This commit is contained in:
parent
e8bf6c13ee
commit
ac7ff78353
58
ifstools/handlers/ImageDecoders.py
Normal file
58
ifstools/handlers/ImageDecoders.py
Normal file
@ -0,0 +1,58 @@
|
||||
from io import BytesIO
|
||||
from struct import unpack, pack
|
||||
|
||||
from PIL import Image
|
||||
from tqdm import tqdm
|
||||
|
||||
# header for a standard DDS with DXT5 compression and RGBA pixels
|
||||
# gap placed for image height/width insertion
|
||||
dxt5_start = b'DDS |\x00\x00\x00\x07\x10\x00\x00'
|
||||
dxt5_end = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x04' + \
|
||||
b'\x00\x00\x00DXT5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
|
||||
def check_size(ifs_img, data, bytes_per_pixel):
|
||||
need = ifs_img.img_size[0] * ifs_img.img_size[1] * bytes_per_pixel
|
||||
if len(data) < need:
|
||||
tqdm.write('WARNING: Not enough image data for {}, padding'.format(ifs_img.name))
|
||||
data += b'\x00' * (need-len(data))
|
||||
return data
|
||||
|
||||
def decode_argb8888rev(ifs_img, data):
|
||||
data = check_size(ifs_img, data, 4)
|
||||
return Image.frombytes('RGBA', ifs_img.img_size, data, 'raw', 'BGRA')
|
||||
|
||||
def encode_argb8888rev(ifs_img, image):
|
||||
return image.tobytes('raw', 'BGRA')
|
||||
|
||||
def decode_argb4444(ifs_img, data):
|
||||
data = check_size(ifs_img, data, 2)
|
||||
im = Image.frombytes('RGBA', ifs_img.img_size, data, 'raw', 'RGBA;4B')
|
||||
# there's no BGRA;4B
|
||||
r, g, b, a = im.split()
|
||||
return Image.merge('RGBA', (b, g, r, a))
|
||||
|
||||
def decode_dxt5(ifs_img, data):
|
||||
b = BytesIO()
|
||||
b.write(dxt5_start)
|
||||
b.write(pack('<2I', ifs_img.img_size[1], ifs_img.img_size[0]))
|
||||
b.write(dxt5_end)
|
||||
# the data has swapped endianness for every WORD
|
||||
l = len(data)//2
|
||||
big = unpack('>{}H'.format(l), data)
|
||||
little = pack('<{}H'.format(l), *big)
|
||||
b.write(little)
|
||||
return Image.open(b)
|
||||
|
||||
|
||||
image_formats = {
|
||||
'argb8888rev' : {'decoder': decode_argb8888rev, 'encoder': encode_argb8888rev},
|
||||
'argb4444' : {'decoder': decode_argb4444, 'encoder': None},
|
||||
'dxt5' : {'decoder': decode_dxt5, 'encoder': None}
|
||||
}
|
||||
|
||||
cachable_formats = [key for key, val in image_formats.items() if val['encoder'] is not None]
|
@ -10,19 +10,9 @@ from kbinxml import KBinXML
|
||||
|
||||
from . import GenericFile
|
||||
from . import lz77
|
||||
from .ImageDecoders import image_formats, cachable_formats
|
||||
from .. import utils
|
||||
|
||||
# header for a standard DDS with DXT5 compression and RGBA pixels
|
||||
# gap placed for image height/width insertion
|
||||
dxt5_start = b'DDS |\x00\x00\x00\x07\x10\x00\x00'
|
||||
dxt5_end = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x04' + \
|
||||
b'\x00\x00\x00DXT5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00' + \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
|
||||
class ImageFile(GenericFile):
|
||||
def __init__(self, ifs_data, obj, parent = None, path = '', name = ''):
|
||||
raise Exception('ImageFile must be instantiated from existing GenericFile with ImageFile.upgrade_generic')
|
||||
@ -45,7 +35,7 @@ class ImageFile(GenericFile):
|
||||
def extract(self, base, use_cache = True):
|
||||
GenericFile.extract(self, base)
|
||||
|
||||
if use_cache and self.compress and self.from_ifs and self.format == 'argb8888rev':
|
||||
if use_cache and self.compress and self.from_ifs and self.format in cachable_formats:
|
||||
self.write_cache(GenericFile._load_from_ifs(self), base)
|
||||
|
||||
def _load_from_ifs(self, convert_kbin = False):
|
||||
@ -64,23 +54,9 @@ class ImageFile(GenericFile):
|
||||
else:
|
||||
data = data[8:] + data[:8]
|
||||
|
||||
if self.format == 'argb8888rev':
|
||||
need = self.img_size[0] * self.img_size[1] * 4
|
||||
if len(data) < need:
|
||||
print('WARNING: Not enough image data for {}, padding'.format(self.name))
|
||||
data += b'\x00' * (need-len(data))
|
||||
im = Image.frombytes('RGBA', self.img_size, data, 'raw', 'BGRA')
|
||||
elif self.format == 'dxt5':
|
||||
b = BytesIO()
|
||||
b.write(dxt5_start)
|
||||
b.write(pack('<2I', self.img_size[1], self.img_size[0]))
|
||||
b.write(dxt5_end)
|
||||
# the data has swapped endianness for every WORD
|
||||
l = len(data)//2
|
||||
big = unpack('>{}H'.format(l), data)
|
||||
little = pack('<{}H'.format(l), *big)
|
||||
b.write(little)
|
||||
im = Image.open(b)
|
||||
if self.format in image_formats:
|
||||
decoder = image_formats[self.format]['decoder']
|
||||
im = decoder(self, data)
|
||||
else:
|
||||
raise NotImplementedError('Unknown format {}'.format(self.format))
|
||||
|
||||
@ -114,9 +90,13 @@ class ImageFile(GenericFile):
|
||||
im = Image.open(BytesIO(data))
|
||||
if im.mode != 'RGBA':
|
||||
im = im.convert('RGBA')
|
||||
# we translate dxt5 to arb since dxt5 is lossy and not in python
|
||||
if self.format == 'argb8888rev' or self.format == 'dxt5':
|
||||
data = im.tobytes('raw', 'BGRA')
|
||||
|
||||
if self.format in image_formats:
|
||||
encoder = image_formats[self.format]['encoder']
|
||||
if encoder is None:
|
||||
# everything else becomes argb8888rev
|
||||
encoder = image_formats['argb8888rev']['encoder']
|
||||
data = encoder(self, im)
|
||||
else:
|
||||
raise NotImplementedError('Unknown format {}'.format(self.format))
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user