-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtexturesToTpf.py
More file actions
2337 lines (1860 loc) · 89.9 KB
/
texturesToTpf.py
File metadata and controls
2337 lines (1860 loc) · 89.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
Combined script to scan directory for PNG texture files, extract hexadecimal IDs,
and create TPF (TexMod Package File) format directly without intermediate log files.
Operates entirely in-memory using a dictionary to map hash IDs to texture paths.
"""
# Standard library imports (always available, no dependency check needed)
import sys
import subprocess
import importlib.util
import os
import re
import io
import time
import struct
import shutil
import configparser
import zlib
from pathlib import Path
from typing import Optional, Dict, Tuple, List
from zipfile import ZIP_DEFLATED
try:
import msvcrt
except ImportError:
msvcrt = None
# ============================================================================
# Configuration Loading
# ============================================================================
def load_config() -> Dict:
"""
Load configuration from config.ini file, or use defaults if not found.
Returns a dictionary with configuration values.
"""
# Default values
defaults = {
'input_formats': ['.png', '.jpg', '.jpeg', '.bmp'],
'generate_mipmaps': True,
'channel_variance_threshold': 0.001,
'normal_variance_threshold': 0.01,
'enable_compression': True
}
# Get the directory where this script is located
script_dir = Path(__file__).parent
config_path = script_dir / 'config.ini'
# Check if config.ini exists
if not config_path.exists():
print("No config.ini detected, using defaults")
return defaults
# Parse config file
config = configparser.ConfigParser()
try:
config.read(config_path)
result = defaults.copy()
# Load FileFormats section
if config.has_section('FileFormats'):
if config.has_option('FileFormats', 'input_formats'):
formats_str = config.get('FileFormats', 'input_formats')
# Split by comma, strip whitespace, convert to lowercase, and add dot prefix if needed
formats_list = []
for f in formats_str.split(','):
f = f.strip().lower()
if f:
# Add dot prefix if not already present
if not f.startswith('.'):
f = '.' + f
formats_list.append(f)
result['input_formats'] = formats_list
# Load DDS section
if config.has_section('DDS'):
if config.has_option('DDS', 'generate_mipmaps'):
result['generate_mipmaps'] = config.getboolean('DDS', 'generate_mipmaps')
if config.has_option('DDS', 'channel_variance_threshold'):
result['channel_variance_threshold'] = config.getfloat('DDS', 'channel_variance_threshold')
if config.has_option('DDS', 'normal_variance_threshold'):
result['normal_variance_threshold'] = config.getfloat('DDS', 'normal_variance_threshold')
if config.has_option('DDS', 'enable_compression'):
result['enable_compression'] = config.getboolean('DDS', 'enable_compression')
return result
except Exception as e:
print(f"Error reading config.ini: {e}, using defaults")
return defaults
# Load configuration once at module level
_config = load_config()
# ============================================================================
# Configuration Constants (User-configurable)
# ============================================================================
# File format settings
INPUT_FORMATS = _config['input_formats']
# DDS compression settings
GENERATE_MIPMAPS = _config['generate_mipmaps']
CHANNEL_VARIANCE_THRESHOLD = _config['channel_variance_threshold'] # Threshold for alpha channel variance (below this = uniform/unused)
NORMAL_VARIANCE_THRESHOLD = _config['normal_variance_threshold'] # Threshold for normal map channel detection (10x higher, below this = unused channel)
ENABLE_COMPRESSION = _config['enable_compression'] # Whether to compress textures to DDS format
# ============================================================================
# Dependency Checking and Installation
# ============================================================================
def _print_colored(message: str, color: str = ''):
"""Print message with optional color (works even without colorama)."""
print(f"{color}{message}\033[0m" if color else message)
def check_and_install_package(package_name: str, import_name: str = None, pip_name: str = None) -> bool:
"""
Check if a Python package is installed, and install it if missing.
Args:
package_name: Display name of the package
import_name: Name to use for import (defaults to package_name)
pip_name: Name to use for pip install (defaults to package_name)
Returns:
True if package is available (after installation attempt), False otherwise
"""
if import_name is None:
import_name = package_name
if pip_name is None:
pip_name = package_name
# Check if already installed
spec = importlib.util.find_spec(import_name)
if spec is not None:
return True
# Package not found, try to install
_print_colored(f"\n[Checking] {package_name} is not installed.", '\033[93m') # Yellow
_print_colored(f"[Installing] Attempting to install {package_name} via pip...", '\033[96m') # Cyan
try:
# Use subprocess to install via pip
subprocess.run(
[sys.executable, "-m", "pip", "install", pip_name],
capture_output=True,
text=True,
check=True
)
_print_colored(f"[Success] {package_name} installed successfully!", '\033[92m') # Green
# Verify installation
spec = importlib.util.find_spec(import_name)
if spec is not None:
return True
else:
_print_colored(f"[Warning] {package_name} was installed but cannot be imported.", '\033[93m') # Yellow
return False
except subprocess.CalledProcessError as e:
_print_colored(f"[Error] Failed to install {package_name}: {e.stderr}", '\033[91m') # Red
_print_colored(f"[Manual] Please install manually with: pip install {pip_name}", '\033[93m') # Yellow
return False
except Exception as e:
_print_colored(f"[Error] Unexpected error installing {package_name}: {e}", '\033[91m') # Red
return False
def check_imagemagick() -> bool:
"""
Check if ImageMagick is installed and available in PATH.
Returns:
True if ImageMagick is available, False otherwise
"""
try:
result = subprocess.run(
['magick', '-version'],
capture_output=True,
text=True,
timeout=5
)
if result.returncode == 0:
return True
except FileNotFoundError:
pass
except Exception:
pass
return False
def check_dependencies():
"""
Check all required dependencies and install missing pip packages.
Prompts user for ImageMagick installation if missing.
Returns:
True if all critical dependencies are available, False otherwise
"""
_print_colored("\n" + "="*60, '\033[96m') # Cyan
_print_colored("Checking Dependencies", '\033[96m') # Cyan
_print_colored("="*60, '\033[96m') # Cyan
# Required pip packages
required_packages = [
("Pillow", "PIL", "Pillow"),
("numpy", "numpy", "numpy"),
("colorama", "colorama", "colorama"),
]
# Optional but recommended packages
optional_packages = [
("numba", "numba", "numba"),
]
all_ok = True
# Check and install required packages
_print_colored("\n[Required Packages]", '\033[96m') # Cyan
for display_name, import_name, pip_name in required_packages:
if not check_and_install_package(display_name, import_name, pip_name):
_print_colored(f"[Critical] {display_name} is required but could not be installed!", '\033[91m') # Red
all_ok = False
# Check and install optional packages
_print_colored("\n[Optional Packages]", '\033[96m') # Cyan
for display_name, import_name, pip_name in optional_packages:
if not check_and_install_package(display_name, import_name, pip_name):
_print_colored(f"[Info] {display_name} is optional but recommended for better performance.", '\033[93m') # Yellow
# Check ImageMagick (optional, only needed for DDS compression)
_print_colored("\n[External Tools]", '\033[96m') # Cyan
if check_imagemagick():
_print_colored("[Found] ImageMagick is installed and available.", '\033[92m') # Green
else:
_print_colored("[Missing] ImageMagick is not found in your system PATH.", '\033[93m') # Yellow
_print_colored("\nImageMagick is required for optional DDS compression feature.", '\033[93m') # Yellow
_print_colored("If you want to use DDS compression, please install ImageMagick:", '\033[93m') # Yellow
_print_colored(" 1. Download from: https://imagemagick.org/script/download.php", '\033[96m') # Cyan
_print_colored(" 2. Install ImageMagick on your system", '\033[96m') # Cyan
_print_colored(" 3. Make sure 'magick' command is in your system PATH", '\033[96m') # Cyan
_print_colored(" 4. Verify installation by running: magick -version", '\033[96m') # Cyan
_print_colored("\n[Note] You can still use the script without ImageMagick,", '\033[93m') # Yellow
_print_colored(" but DDS compression will not be available.", '\033[93m') # Yellow
_print_colored("\n" + "="*60, '\033[96m') # Cyan
if not all_ok:
_print_colored("\n[Error] Some required dependencies are missing!", '\033[91m') # Red
_print_colored("Please install the missing packages and try again.", '\033[91m') # Red
input("\nPress Enter to exit...")
sys.exit(1)
_print_colored("[Success] All required dependencies are available!", '\033[92m') # Green
_print_colored("", '') # Empty line
# Run dependency checks before importing third-party packages that might fail
check_dependencies()
# Import colorama (should be available now after dependency check)
try:
from colorama import init, Fore, Style
init(autoreset=True)
except ImportError:
# Fallback if colorama is not installed (shouldn't happen after check)
class Fore:
GREEN = ''
YELLOW = ''
CYAN = ''
RED = ''
BLUE = ''
MAGENTA = ''
WHITE = ''
RESET = ''
class Style:
RESET_ALL = ''
def init(**kwargs):
pass
# Import Pillow (should be available after dependency check)
try:
from PIL import Image
except ImportError:
print(f"{Fore.RED}[Error] Pillow package is required but not available.{Style.RESET_ALL}")
print(f"{Fore.YELLOW}This should not happen if dependency check passed.{Style.RESET_ALL}")
input("\nPress Enter to exit...")
sys.exit(1)
# Import numpy (should be available after dependency check)
try:
import numpy as np
except ImportError:
print(f"{Fore.RED}[Error] numpy package is required but not available.{Style.RESET_ALL}")
print(f"{Fore.YELLOW}This should not happen if dependency check passed.{Style.RESET_ALL}")
input("\nPress Enter to exit...")
sys.exit(1)
# Try to use Numba-optimized ZipCrypto, fallback to zipencrypt if Numba unavailable
NUMBA_AVAILABLE = False
try:
from numba import jit # noqa: F401
NUMBA_AVAILABLE = True
except ImportError:
NUMBA_AVAILABLE = False
if NUMBA_AVAILABLE:
# We'll define NumbaZipFile class below, set ZipFile to it
ZipFile = None # Will be set after class definition
else:
# Fallback to zipencrypt
try:
from zipencrypt import ZipFile, ZIP_DEFLATED
# Print warning about numba not being available (non-blocking)
print(f"{Fore.YELLOW}[Warning] Numba is not installed. Using zipencrypt fallback.{Style.RESET_ALL}")
print(f"{Fore.YELLOW}[Info] For better performance, consider installing numba: pip install numba{Style.RESET_ALL}")
print(f"{Fore.YELLOW}[Info] The script will continue using zipencrypt, which works but is slower.{Style.RESET_ALL}\n")
except ImportError:
raise ImportError(
"Either numba or zipencrypt package is required for ZipCrypto encryption. "
"Install with: pip install numba (preferred) or pip install zipencrypt"
)
# ============================================================================
# Hardcoded Implementation Constants (Not user-configurable)
# ============================================================================
# ZipCrypto algorithm constants
KEY0_INIT = 0x12345678
KEY1_INIT = 0x23456789
KEY2_INIT = 0x34567890
LCG_MULTIPLIER = 134775813
CRC32_POLY = 0xEDB88320
# ZipCrypto password: 42-byte hardcoded password from TexMod specification
ZIPCRYPTO_PASSWORD = bytes([
0x73, 0x2A, 0x63, 0x7D, 0x5F, 0x0A, 0xA6, 0xBD, 0x7D, 0x65,
0x7E, 0x67, 0x61, 0x2A, 0x7F, 0x7F, 0x74, 0x61, 0x67, 0x5B,
0x60, 0x70, 0x45, 0x74, 0x5C, 0x22, 0x74, 0x5D, 0x6E, 0x6A,
0x73, 0x41, 0x77, 0x6E, 0x46, 0x47, 0x77, 0x49, 0x0C, 0x4B,
0x46, 0x6F
])
# XOR obfuscation key
XOR_KEY = 0x3FA43FA4
# Pattern: _0X or _0x followed by hex digits, must end at end of filename (before extension)
ID_PATTERN = re.compile(r'_0[xX]([0-9A-Fa-f]+)$')
# ============================================================================
# Numba-Optimized ZipCrypto Implementation
# ============================================================================
if NUMBA_AVAILABLE:
# Generate CRC32 table for polynomial 0xEDB88320 (IEEE 802.3)
# This table is used by ZipCrypto for key initialization and updates
_crc32_table_list = []
for i in range(256):
crc = np.uint32(i)
for _ in range(8):
if crc & 1:
crc = (crc >> 1) ^ 0xEDB88320
else:
crc >>= 1
_crc32_table_list.append(crc)
_CRC32_TABLE = np.array(_crc32_table_list, dtype=np.uint32)
@jit(nopython=True, cache=True)
def _zipcrypto_init_keys(password_bytes):
"""
Initialize ZipCrypto keys from password bytes.
Args:
password_bytes: uint8 array of password bytes
Returns:
uint32 array of [key0, key1, key2]
"""
k0 = np.uint32(0x12345678)
k1 = np.uint32(0x23456789)
k2 = np.uint32(0x34567890)
for i in range(len(password_bytes)):
byte = password_bytes[i]
# Update k0 using CRC32
idx0 = (k0 ^ byte) & 0xFF
k0 = (k0 >> 8) ^ _CRC32_TABLE[idx0]
# Update k1
low_k0 = k0 & 0xFF
k1 = (k1 + low_k0) & 0xFFFFFFFF
k1 = (k1 * 134775813) & 0xFFFFFFFF
k1 = (k1 + 1) & 0xFFFFFFFF
# Update k2 using CRC32
idx2 = (k2 ^ (k1 >> 24)) & 0xFF
k2 = (k2 >> 8) ^ _CRC32_TABLE[idx2]
return np.array([k0, k1, k2], dtype=np.uint32)
@jit(nopython=True, cache=True)
def _zipcrypto_process_chunk(data, keys):
"""
Encrypt a data chunk using ZipCrypto stream cipher.
Args:
data: uint8 array of plaintext data
keys: uint32 array of [key0, key1, key2]
Returns:
Tuple of (encrypted_data, updated_keys)
"""
k0, k1, k2 = keys[0], keys[1], keys[2]
n = len(data)
ciphertext = np.empty(n, dtype=np.uint8)
for i in range(n):
byte = data[i] # This is the PLAINTEXT byte
# Generate keystream byte
temp = (k2 | 2) & 0xFFFFFFFF
keystream = (((temp * (temp ^ 1)) & 0xFFFFFFFF) >> 8) & 0xFF
# Encrypt byte
cipher_byte = byte ^ keystream
ciphertext[i] = cipher_byte
# Update keys
# FIXED: Use 'byte' (plaintext), not 'cipher_byte'
# k0 = crc32(k0, plaintext_byte)
idx0 = (k0 ^ byte) & 0xFF
k0 = (k0 >> 8) ^ _CRC32_TABLE[idx0]
# k1 = (k1 + (k0 & 0xFF)) * 134775813 + 1
low_k0 = k0 & 0xFF
k1 = (k1 + low_k0) & 0xFFFFFFFF
k1 = (k1 * 134775813) & 0xFFFFFFFF
k1 = (k1 + 1) & 0xFFFFFFFF
# k2 = crc32(k2, k1 >> 24)
idx2 = (k2 ^ (k1 >> 24)) & 0xFF
k2 = (k2 >> 8) ^ _CRC32_TABLE[idx2]
return ciphertext, np.array([k0, k1, k2], dtype=np.uint32)
class NumbaZipFile:
"""
High-performance ZipCrypto-enabled ZIP file writer using Numba-optimized encryption.
Provides minimal API compatible with zipencrypt.ZipFile for writestr() and write() methods.
"""
def __init__(self, fileobj, mode='w', compression=ZIP_DEFLATED):
"""
Initialize ZIP file writer.
Args:
fileobj: File-like object (e.g., BytesIO) to write ZIP data to
mode: Must be 'w' for writing
compression: Compression method (ZIP_DEFLATED or ZIP_STORED)
"""
if mode != 'w':
raise ValueError("Only 'w' mode is supported")
self.fileobj = fileobj
self.compression = compression
self.entries = [] # List of (name, crc, size_uncompressed, size_compressed, offset, flags)
self.offset = 0
self.closed = False
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
def close(self):
"""Write Central Directory and End of Central Directory records, then close."""
if self.closed:
return
# Write Central Directory
cd_offset = self.offset
for name, crc, size_uncomp, size_comp, offset, flags in self.entries:
self._write_central_directory_entry(name, crc, size_uncomp, size_comp, offset, flags)
cd_size = self.offset - cd_offset
# Write End of Central Directory
self._write_end_of_central_directory(len(self.entries), cd_size, cd_offset)
self.closed = True
def writestr(self, filename, data, pwd=None):
"""
Write string/bytes data to ZIP archive with optional ZipCrypto encryption.
Args:
filename: Name of file in archive
data: String or bytes data to write
pwd: Password bytes for encryption (None = no encryption)
"""
if isinstance(data, str):
data = data.encode('utf-8')
# Calculate CRC32
crc = zlib.crc32(data) & 0xFFFFFFFF
# Compress data if needed
if self.compression == ZIP_DEFLATED:
compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
compressed_data = compressor.compress(data) + compressor.flush()
else:
compressed_data = data
# Encrypt if password provided
flags = 0
if pwd is not None:
flags |= 0x1 # Set encryption flag
compressed_data = self._encrypt_data(compressed_data, pwd, crc)
# Write Local File Header
lfh_offset = self.offset
self._write_local_file_header(filename, len(compressed_data), len(data), crc, flags)
# Write file data
self.fileobj.write(compressed_data)
self.offset += len(compressed_data)
# Record entry for Central Directory
self.entries.append((filename, crc, len(data), len(compressed_data), lfh_offset, flags))
def write(self, filepath, arcname=None, pwd=None):
"""
Write file from disk to ZIP archive with optional ZipCrypto encryption.
Args:
filepath: Path to file on disk (Path or str)
arcname: Name of file in archive (defaults to filepath.name)
pwd: Password bytes for encryption (None = no encryption)
"""
filepath = Path(filepath)
if arcname is None:
arcname = filepath.name
# Read file data
with open(filepath, 'rb') as f:
data = f.read()
# Use writestr to handle the rest
self.writestr(arcname, data, pwd)
def _encrypt_data(self, data, password, crc):
"""
Encrypt data using ZipCrypto algorithm.
Args:
data: Bytes to encrypt
password: Password bytes
crc: CRC32 of uncompressed data
Returns:
Encrypted bytes (12-byte header + encrypted body)
"""
# Initialize keys from password
pwd_array = np.frombuffer(password, dtype=np.uint8)
keys = _zipcrypto_init_keys(pwd_array)
# Generate 12-byte encryption header
# First 11 bytes are random, last byte is CRC check byte
header_nonce = os.urandom(11)
check_byte = (crc >> 24) & 0xFF
header = header_nonce + bytes([check_byte])
# Encrypt header (this updates the keys)
header_array = np.frombuffer(header, dtype=np.uint8)
enc_header, keys = _zipcrypto_process_chunk(header_array, keys)
# Encrypt body in chunks for better performance
chunk_size = 256 * 1024 # 256KB chunks
encrypted_chunks = []
data_array = np.frombuffer(data, dtype=np.uint8)
for i in range(0, len(data_array), chunk_size):
chunk = data_array[i:i + chunk_size]
enc_chunk, keys = _zipcrypto_process_chunk(chunk, keys)
encrypted_chunks.append(enc_chunk.tobytes())
# Combine header and body
return enc_header.tobytes() + b''.join(encrypted_chunks)
def _write_local_file_header(self, filename, compressed_size, uncompressed_size, crc, flags):
"""Write Local File Header (LFH) record."""
filename_bytes = filename.encode('utf-8')
filename_len = len(filename_bytes)
# Local File Header structure (30 bytes + filename)
# Signature (I), version (H), flags (H), compression (H), mod time (H), mod date (H),
# CRC-32 (I), compressed size (I), uncompressed size (I), filename len (H), extra len (H)
header = struct.pack('<IHHHHHIIIHH',
0x04034b50, # Local file header signature (4 bytes)
20, # Version needed to extract (2.0)
flags, # General purpose bit flag
8 if self.compression == ZIP_DEFLATED else 0, # Compression method
0, # Last mod file time
0, # Last mod file date
crc, # CRC-32
compressed_size, # Compressed size
uncompressed_size, # Uncompressed size
filename_len, # Filename length
0 # Extra field length
)
self.fileobj.write(header)
self.fileobj.write(filename_bytes)
self.offset += 30 + filename_len
def _write_central_directory_entry(self, filename, crc, uncompressed_size, compressed_size, offset, flags):
"""Write Central Directory File Header."""
filename_bytes = filename.encode('utf-8')
filename_len = len(filename_bytes)
# Central Directory File Header structure (46 bytes + filename)
# Signature (I), version made by (H), version needed (H), flags (H), compression (H),
# mod time (H), mod date (H), CRC-32 (I), compressed size (I), uncompressed size (I),
# filename len (H), extra len (H), comment len (H), disk num (H), internal attr (H),
# external attr (I), offset (I)
header = struct.pack('<IHHHHHHIIIHHHHHII',
0x02014b50, # Central file header signature (4 bytes)
20, # Version made by
20, # Version needed to extract
flags, # General purpose bit flag
8 if self.compression == ZIP_DEFLATED else 0, # Compression method
0, # Last mod file time
0, # Last mod file date
crc, # CRC-32
compressed_size, # Compressed size
uncompressed_size, # Uncompressed size
filename_len, # Filename length
0, # Extra field length
0, # File comment length
0, # Disk number start
0, # Internal file attributes
0, # External file attributes (4 bytes)
offset # Relative offset of local header
)
self.fileobj.write(header)
self.fileobj.write(filename_bytes)
self.offset += 46 + filename_len
def _write_end_of_central_directory(self, num_entries, cd_size, cd_offset):
"""Write End of Central Directory Record."""
# Signature (I), disk num (H), disk with CD (H), entries on disk (H), total entries (H),
# CD size (I), CD offset (I), comment length (H)
eocd = struct.pack('<IHHHHIIH',
0x06054b50, # End of central dir signature (4 bytes)
0, # Number of this disk
0, # Number of disk with start of central directory
num_entries, # Total number of entries in central directory on this disk
num_entries, # Total number of entries in central directory
cd_size, # Size of central directory
cd_offset, # Offset of start of central directory
0 # ZIP file comment length
)
self.fileobj.write(eocd)
self.offset += 22
# Set ZipFile to use NumbaZipFile (we're already inside NUMBA_AVAILABLE block)
ZipFile = NumbaZipFile
# ============================================================================
# Cache Functions
# ============================================================================
def calculate_file_hash(file_path: Path) -> str:
"""
Calculate MD5 hash of file content.
Args:
file_path: Path to the file to hash
Returns:
MD5 hash as hexadecimal string
"""
import hashlib
hash_md5 = hashlib.md5()
try:
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
except (OSError, IOError) as e:
print(f"{Fore.YELLOW}Warning: Could not hash file {file_path.name}: {e}{Style.RESET_ALL}")
return ""
def load_cache(cache_path: Path) -> Dict[str, dict]:
"""
Load cache data from JSON file.
Args:
cache_path: Path to the cache.json file
Returns:
Dictionary containing cache data, empty dict if file doesn't exist or is invalid
"""
import json
if not cache_path.exists():
return {}
try:
with open(cache_path, 'r', encoding='utf-8') as f:
return json.load(f)
except (json.JSONDecodeError, OSError, IOError) as e:
print(f"{Fore.YELLOW}Warning: Could not load cache file {cache_path}: {e}{Style.RESET_ALL}")
return {}
def save_cache(cache_path: Path, cache_data: Dict[str, dict]) -> None:
"""
Save cache data to JSON file.
Args:
cache_path: Path to save the cache.json file
cache_data: Dictionary containing cache data to save
"""
import json
try:
# Create parent directory if it doesn't exist
cache_path.parent.mkdir(parents=True, exist_ok=True)
with open(cache_path, 'w', encoding='utf-8') as f:
json.dump(cache_data, f, indent=2, ensure_ascii=False)
except (OSError, IOError) as e:
print(f"{Fore.YELLOW}Warning: Could not save cache file {cache_path}: {e}{Style.RESET_ALL}")
def compare_with_cache(texture_dict: Dict[str, Path], cache_data: Dict[str, dict], texture_dir: Path) -> Dict[str, str]:
"""
Compare current texture files against cache to determine status.
Args:
texture_dict: Dictionary mapping hash IDs to file paths
cache_data: Dictionary containing cache data
texture_dir: Base texture directory for relative path calculation
Returns:
Dictionary mapping hash IDs to status ("new", "changed", "unchanged")
"""
status_dict: Dict[str, str] = {}
for hash_id, texture_path in texture_dict.items():
# Calculate relative path for cache key
try:
relative_path = texture_path.relative_to(texture_dir)
cache_key = str(relative_path)
except ValueError:
# Fallback to absolute path if relative path calculation fails
cache_key = str(texture_path)
# Calculate current file hash
current_hash = calculate_file_hash(texture_path)
if not current_hash:
# Could not hash file, treat as new
status_dict[hash_id] = "new"
continue
# Check if file is in cache
if cache_key in cache_data:
cached_hash = cache_data[cache_key].get('hash', '')
if current_hash == cached_hash:
status_dict[hash_id] = "unchanged"
else:
status_dict[hash_id] = "changed"
else:
status_dict[hash_id] = "new"
return status_dict
# ============================================================================
# Validation Wrapper Functions
# ============================================================================
def is_valid_texture_directory(directory: Path) -> bool:
"""
Check if directory exists, is readable, and contains PNG files matching the ID pattern.
Args:
directory: Directory path to validate
Returns:
True if directory has valid texture files, False otherwise
"""
if not directory.exists():
return False
if not directory.is_dir():
return False
# Check if directory is readable
if not os.access(directory, os.R_OK):
return False
# Check if directory contains any matching PNG files
try:
with os.scandir(directory) as it:
for entry in it:
if entry.is_file():
path = Path(entry)
if path.suffix.lower() in INPUT_FORMATS:
id_value = extract_id_from_filename(path)
if id_value:
return True
except (PermissionError, OSError):
return False
return False
def validate_texture_file(path: Path) -> bool:
"""
Validate that a texture file exists and is readable.
Supports both PNG and DDS files (DDS when compression is enabled).
Args:
path: Path to texture file
Returns:
True if file is valid for processing, False otherwise
"""
if not path.exists():
return False
if not path.is_file():
return False
if not os.access(path, os.R_OK):
return False
# Accept input formats and DDS files
suffix_lower = path.suffix.lower()
if suffix_lower not in INPUT_FORMATS and suffix_lower != '.dds':
return False
return True
def is_valid_target_directory(directory: Path) -> Tuple[bool, str]:
"""
Check if directory is writable for TPF output.
Args:
directory: Directory path to validate
Returns:
Tuple of (is_valid, error_message)
"""
if not directory.exists():
return False, f"Directory does not exist: {directory}"
if not directory.is_dir():
return False, f"Path is not a directory: {directory}"
# Check if directory is writable
if not os.access(directory, os.W_OK):
return False, f"Directory is not writable: {directory}"
# Check available disk space (warn if less than 100MB, but don't fail)
try:
stat = shutil.disk_usage(directory)
free_mb = stat.free / (1024 * 1024)
if free_mb < 100:
return True, f"Warning: Low disk space ({free_mb:.1f} MB free)"
except OSError:
pass # Can't check disk space, but continue anyway
return True, ""
# ============================================================================
# Path Resolution Functions
# ============================================================================
def detect_texture_directory(script_dir: Path) -> Optional[Path]:
"""
Auto-detect texture directory by checking script directory and parent directory.
Args:
script_dir: Script directory to start search from
Returns:
Path if valid directory found, None otherwise
"""
# Check script directory first
if is_valid_texture_directory(script_dir):
return script_dir
# Check parent directory
parent_dir = script_dir.parent
if parent_dir.exists() and is_valid_texture_directory(parent_dir):
return parent_dir
return None
def prompt_directory_selection(start_dir: Path) -> Path:
"""
Interactive folder selection dialog.
Args:
start_dir: Starting directory for selection
Returns:
Selected and validated Path object
Raises:
SystemExit: If user cancels the operation
"""
print(f"\n{Fore.YELLOW}Please select the directory containing texture files.{Style.RESET_ALL}")
print(f"{Fore.YELLOW}Current directory: {start_dir}{Style.RESET_ALL}")
while True:
try:
path_input = input(f"\n{Fore.YELLOW}Enter directory path (relative or absolute, or press Enter to cancel): {Style.RESET_ALL}").strip()
if not path_input:
print(f"{Fore.YELLOW}Operation cancelled.{Style.RESET_ALL}")
raise SystemExit(0)
# Try as absolute path first
selected_path = Path(path_input)
if not selected_path.is_absolute():
# Resolve relative to start directory
selected_path = start_dir / path_input
selected_path = selected_path.resolve()
if not selected_path.exists():
print(f"{Fore.RED}Directory not found: {selected_path}{Style.RESET_ALL}")
continue
if not selected_path.is_dir():
print(f"{Fore.RED}Path is not a directory: {selected_path}{Style.RESET_ALL}")
continue
# Validate it contains texture files
if not is_valid_texture_directory(selected_path):
print(f"{Fore.RED}Directory does not contain valid texture files: {selected_path}{Style.RESET_ALL}")
print(f"{Fore.YELLOW}Please select a directory containing PNG files matching pattern *_0X*.png{Style.RESET_ALL}")
continue
print(f"{Fore.GREEN}Selected directory: {selected_path}{Style.RESET_ALL}")
return selected_path
except KeyboardInterrupt:
print(f"\n{Fore.YELLOW}Operation cancelled.{Style.RESET_ALL}")
raise SystemExit(0)
except Exception as e:
print(f"{Fore.RED}Error: {e}{Style.RESET_ALL}")
continue
def resolve_texture_directory(script_dir: Path) -> Path:
"""
Main function combining auto-detection and manual selection.
Args:
script_dir: Script directory
Returns:
Validated Path object to texture directory
"""
# Try auto-detection first
detected_dir = detect_texture_directory(script_dir)
if detected_dir:
print(f"{Fore.GREEN}[Auto-detected] Found texture directory: {detected_dir}{Style.RESET_ALL}")
return detected_dir
# Auto-detection failed, prompt user
print(f"{Fore.YELLOW}[Auto-detection] No texture files found in script directory or parent.{Style.RESET_ALL}")
return prompt_directory_selection(script_dir)
def prompt_auto_compress() -> bool:
"""
Prompt user if they want to auto-compress textures to DDS format.
Checks for ImageMagick availability first.
Returns:
True if user wants compression, False otherwise