Extensions: change the constant for the complete status
[blender-addons-contrib.git] / io_scene_max / import_max.py
blob2f88ecefbdc4f882f80a894fed7694ac29b065a3
1 # SPDX-FileCopyrightText: 2023-2024 Sebastian Schrand
2 # 2017-2022 Jens M. Plonka
3 # 2005-2018 Philippe Lagadec
5 # SPDX-License-Identifier: GPL-2.0-or-later
7 # Import is based on using information from `olefile` IO source-code
8 # and the FreeCAD Autodesk 3DS Max importer ImportMAX.
10 # `olefile` (formerly OleFileIO_PL) is copyright Philippe Lagadec.
11 # (https://www.decalage.info)
13 # ImportMAX is copyright Jens M. Plonka.
14 # (https://www.github.com/jmplonka/Importer3D)
16 import io
17 import os
18 import re
19 import sys
20 import bpy
21 import math
22 import zlib
23 import array
24 import struct
25 import mathutils
27 from bpy_extras.node_shader_utils import PrincipledBSDFWrapper
30 ###################
31 # DATA STRUCTURES #
32 ###################
34 MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1'
35 WORD_CLSID = "00020900-0000-0000-C000-000000000046"
37 MIN_FILE_SIZE = 1536
38 UNKNOWN_SIZE = 0x7FFFFFFF
39 MAXFILE_SIZE = 0x7FFFFFFFFFFFFFFF
40 MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT
41 DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT
42 FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT
43 ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain
44 FREESECT = 0xFFFFFFFF # (-1) unallocated sector
45 MAX_STREAM = 2 # element is a stream object
46 ROOT_STORE = 5 # element is a root storage
48 TYP_NAME = 0x0962
49 INVALID_NAME = re.compile('^[0-9].*')
50 UNPACK_BOX_DATA = struct.Struct('<HIHHBff').unpack_from # Index, int, 2short, byte, 2float
52 FLOAT_POINT = 0x71F11549498702E7 # Float Wire
53 MATRIX_POS = 0xFFEE238A118F7E02 # Position XYZ
54 MATRIX_ROT = 0x3A90416731381913 # Rotation Wire
55 MATRIX_SCL = 0xFEEE238B118F7C01 # Scale XYZ
56 EDIT_MESH = 0x00000000E44F10B3 # Editable Mesh
57 EDIT_POLY = 0x192F60981BF8338D # Editable Poly
58 CORO_MTL = 0x448931dd70be6506 # CoronaMtl
59 ARCH_MTL = 0x4A16365470B05735 # ArchMtl
60 VRAY_MTL = 0x7034695C37BF3F2F # VRayMtl
62 SKIPPABLE = {
63 0x0000000000001002: 'Camera',
64 0x0000000000001011: 'Omni',
65 0x0000000000001013: 'Free Direct',
66 0x0000000000001020: 'Camera Target',
67 0x0000000000001040: 'Line',
68 0x0000000000001065: 'Rectangle',
69 0x0000000000001097: 'Ellipse',
70 0x0000000000001999: 'Circle',
71 0x0000000000002013: 'Point',
72 0x0000000000009125: 'Biped Object',
73 0x0000000000876234: 'Dummy',
74 0x05622B0D69011E82: 'Compass',
75 0x12A822FB76A11646: 'CV Surface',
76 0x1EB3430074F93B07: 'Particle View',
77 0x2ECCA84028BF6E8D: 'Bone',
78 0x3BDB0E0C628140F6: 'VRayPlane',
79 0x4E9B599047DB14EF: 'Slider',
80 0x522E47057BF61478: 'Sky',
81 0x5FD602DF3C5575A1: 'VRayLight',
82 0x77566F65081F1DFC: 'Plane',
85 CONFIG = []
86 CLS_DATA = []
87 DLL_DIR_LIST = []
88 CLS_DIR3_LIST = []
89 VID_PST_QUE = []
90 SCENE_LIST = []
93 def get_valid_name(name):
94 if (INVALID_NAME.match(name)):
95 return "_%s" % (name.encode('utf8'))
96 return "%s" % (name.encode('utf8'))
99 def i8(data):
100 return data if data.__class__ is int else data[0]
103 def i16(data, offset=0):
104 return struct.unpack("<H", data[offset:offset + 2])[0]
107 def i32(data, offset=0):
108 return struct.unpack("<I", data[offset:offset + 4])[0]
111 def get_byte(data, offset=0):
112 size = offset + 1
113 value = struct.unpack('<B', data[offset:size])[0]
114 return value, size
117 def get_short(data, offset=0):
118 size = offset + 2
119 value = struct.unpack('<H', data[offset:size])[0]
120 return value, size
123 def get_long(data, offset=0):
124 size = offset + 4
125 value = struct.unpack('<I', data[offset:size])[0]
126 return value, size
129 def get_float(data, offset=0):
130 size = offset + 4
131 value = struct.unpack('<f', data[offset:size])[0]
132 return value, size
135 def get_bytes(data, offset=0, count=1):
136 size = offset + count
137 values = struct.unpack('<' + 'B' * count, data[offset:size])
138 return values, size
141 def get_shorts(data, offset=0, count=1):
142 size = offset + count * 2
143 values = struct.unpack('<' + 'H' * count, data[offset:size])
144 return values, size
147 def get_longs(data, offset=0, count=1):
148 size = offset + count * 4
149 values = struct.unpack('<' + 'I' * count, data[offset:size])
150 return values, size
153 def get_floats(data, offset=0, count=1):
154 size = offset + count * 4
155 values = struct.unpack('<' + 'f' * count, data[offset:size])
156 return values, size
159 def _clsid(clsid):
160 """Converts a CLSID to a readable string."""
161 assert len(clsid) == 16
162 if not clsid.strip(b"\0"):
163 return ""
164 return (("%08X-%04X-%04X-%02X%02X-" + "%02X" * 6) %
165 ((i32(clsid, 0), i16(clsid, 4), i16(clsid, 6)) +
166 tuple(map(i8, clsid[8:16]))))
169 ###############
170 # DATA IMPORT #
171 ###############
173 def is_maxfile(filename):
174 """Test if file is a MAX OLE2 container."""
175 if hasattr(filename, 'read'):
176 header = filename.read(len(MAGIC))
177 filename.seek(0)
178 elif isinstance(filename, bytes) and len(filename) >= MIN_FILE_SIZE:
179 header = filename[:len(MAGIC)]
180 else:
181 with open(filename, 'rb') as fp:
182 header = fp.read(len(MAGIC))
183 if header == MAGIC:
184 return True
185 else:
186 return False
189 class MaxStream(io.BytesIO):
190 """Returns an instance of the BytesIO class as read-only file object."""
192 def __init__(self, fp, sect, size, offset, sectorsize, fat, filesize):
193 if size == UNKNOWN_SIZE:
194 size = len(fat) * sectorsize
195 nb_sectors = (size + (sectorsize - 1)) // sectorsize
197 data = []
198 for i in range(nb_sectors):
199 try:
200 fp.seek(offset + sectorsize * sect)
201 except:
202 break
203 sector_data = fp.read(sectorsize)
204 data.append(sector_data)
205 try:
206 sect = fat[sect] & FREESECT
207 except IndexError:
208 break
209 data = b"".join(data)
210 if len(data) >= size:
211 data = data[:size]
212 self.size = size
213 else:
214 self.size = len(data)
215 io.BytesIO.__init__(self, data)
218 class MaxFileDirEntry:
219 """Directory Entry for a stream or storage."""
220 STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII'
221 DIRENTRY_SIZE = 128
222 assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE
224 def __init__(self, entry, sid, maxfile):
225 self.sid = sid
226 self.maxfile = maxfile
227 self.kids = []
228 self.kids_dict = {}
229 self.used = False
231 self.name_raw,
232 self.namelength,
233 self.entry_type,
234 self.color,
235 self.sid_left,
236 self.sid_right,
237 self.sid_child,
238 clsid,
239 self.dwUserFlags,
240 self.createTime,
241 self.modifyTime,
242 self.isectStart,
243 self.sizeLow,
244 self.sizeHigh
245 ) = struct.unpack(MaxFileDirEntry.STRUCT_DIRENTRY, entry)
247 if self.namelength > 64:
248 self.namelength = 64
249 self.name_utf16 = self.name_raw[:(self.namelength - 2)]
250 self.name = maxfile._decode_utf16_str(self.name_utf16)
251 # print('DirEntry SID=%d: %s' % (self.sid, repr(self.name)))
252 if maxfile.sectorsize == 512:
253 self.size = self.sizeLow
254 else:
255 self.size = self.sizeLow + (int(self.sizeHigh) << 32)
256 self.clsid = _clsid(clsid)
257 self.is_minifat = False
258 if self.entry_type in (ROOT_STORE, MAX_STREAM) and self.size > 0:
259 if self.size < maxfile.minisectorcutoff \
260 and self.entry_type == MAX_STREAM: # only streams can be in MiniFAT
261 self.is_minifat = True
262 else:
263 self.is_minifat = False
264 maxfile._check_duplicate_stream(self.isectStart, self.is_minifat)
265 self.sect_chain = None
267 def build_sect_chain(self, maxfile):
268 if self.sect_chain:
269 return
270 if self.entry_type not in (ROOT_STORE, MAX_STREAM) or self.size == 0:
271 return
272 self.sect_chain = list()
273 if self.is_minifat and not maxfile.minifat:
274 maxfile.loadminifat()
275 next_sect = self.isectStart
276 while next_sect != ENDOFCHAIN:
277 self.sect_chain.append(next_sect)
278 if self.is_minifat:
279 next_sect = maxfile.minifat[next_sect]
280 else:
281 next_sect = maxfile.fat[next_sect]
283 def build_storage_tree(self):
284 if self.sid_child != FREESECT:
285 self.append_kids(self.sid_child)
286 self.kids.sort()
288 def append_kids(self, child_sid):
289 if child_sid == FREESECT:
290 return
291 else:
292 child = self.maxfile._load_direntry(child_sid)
293 if child.used:
294 return
295 child.used = True
296 self.append_kids(child.sid_left)
297 name_lower = child.name.lower()
298 self.kids.append(child)
299 self.kids_dict[name_lower] = child
300 self.append_kids(child.sid_right)
301 child.build_storage_tree()
303 def __eq__(self, other):
304 return self.name == other.name
306 def __lt__(self, other):
307 return self.name < other.name
309 def __ne__(self, other):
310 return not self.__eq__(other)
312 def __le__(self, other):
313 return self.__eq__(other) or self.__lt__(other)
316 class ImportMaxFile:
317 """Representing an interface for importing .max files."""
319 def __init__(self, filename=None):
320 self._filesize = None
321 self.byte_order = None
322 self.directory_fp = None
323 self.direntries = None
324 self.dll_version = None
325 self.fat = None
326 self.first_difat_sector = None
327 self.first_dir_sector = None
328 self.first_mini_fat_sector = None
329 self.fp = None
330 self.header_clsid = None
331 self.header_signature = None
332 self.mini_sector_shift = None
333 self.mini_sector_size = None
334 self.mini_stream_cutoff_size = None
335 self.minifat = None
336 self.minifatsect = None
337 self.minisectorcutoff = None
338 self.minisectorsize = None
339 self.ministream = None
340 self.minor_version = None
341 self.nb_sect = None
342 self.num_difat_sectors = None
343 self.num_dir_sectors = None
344 self.num_fat_sectors = None
345 self.num_mini_fat_sectors = None
346 self.reserved1 = None
347 self.reserved2 = None
348 self.root = None
349 self.sector_shift = None
350 self.sector_size = None
351 self.transaction_signature_number = None
352 if filename:
353 self.open(filename)
355 def __enter__(self):
356 return self
358 def __exit__(self, *args):
359 self.close()
361 def _decode_utf16_str(self, utf16_str, errors='replace'):
362 unicode_str = utf16_str.decode('UTF-16LE', errors)
363 return unicode_str
365 def open(self, filename):
366 if hasattr(filename, 'read'):
367 self.fp = filename
368 elif isinstance(filename, bytes) and len(filename) >= MIN_FILE_SIZE:
369 self.fp = io.BytesIO(filename)
370 else:
371 self.fp = open(filename, 'rb')
372 filesize = 0
373 self.fp.seek(0, os.SEEK_END)
374 try:
375 filesize = self.fp.tell()
376 finally:
377 self.fp.seek(0)
378 self._filesize = filesize
379 self._used_streams_fat = []
380 self._used_streams_minifat = []
381 header = self.fp.read(512)
382 fmt_header = '<8s16sHHHHHHLLLLLLLLLL'
383 header_size = struct.calcsize(fmt_header)
384 header1 = header[:header_size]
386 self.header_signature,
387 self.header_clsid,
388 self.minor_version,
389 self.dll_version,
390 self.byte_order,
391 self.sector_shift,
392 self.mini_sector_shift,
393 self.reserved1,
394 self.reserved2,
395 self.num_dir_sectors,
396 self.num_fat_sectors,
397 self.first_dir_sector,
398 self.transaction_signature_number,
399 self.mini_stream_cutoff_size,
400 self.first_mini_fat_sector,
401 self.num_mini_fat_sectors,
402 self.first_difat_sector,
403 self.num_difat_sectors
404 ) = struct.unpack(fmt_header, header1)
406 self.sector_size = 2**self.sector_shift
407 self.mini_sector_size = 2**self.mini_sector_shift
408 if self.mini_stream_cutoff_size != 0x1000:
409 self.mini_stream_cutoff_size = 0x1000
410 self.nb_sect = ((filesize + self.sector_size - 1) // self.sector_size) - 1
412 # file clsid
413 self.header_clsid = _clsid(header[8:24])
414 self.sectorsize = self.sector_size # i16(header, 30)
415 self.minisectorsize = self.mini_sector_size # i16(header, 32)
416 self.minisectorcutoff = self.mini_stream_cutoff_size # i32(header, 56)
417 self._check_duplicate_stream(self.first_dir_sector)
418 if self.num_mini_fat_sectors:
419 self._check_duplicate_stream(self.first_mini_fat_sector)
420 if self.num_difat_sectors:
421 self._check_duplicate_stream(self.first_difat_sector)
423 # Load file allocation tables
424 self.loadfat(header)
425 self.loaddirectory(self.first_dir_sector)
426 self.minifatsect = self.first_mini_fat_sector
428 def close(self):
429 self.fp.close()
431 def _check_duplicate_stream(self, first_sect, minifat=False):
432 if minifat:
433 used_streams = self._used_streams_minifat
434 else:
435 if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT):
436 return
437 used_streams = self._used_streams_fat
438 if first_sect in used_streams:
439 pass
440 else:
441 used_streams.append(first_sect)
443 def sector_array(self, sect):
444 ary = array.array('I', sect)
445 if sys.byteorder == 'big':
446 ary.byteswap()
447 return ary
449 def loadfat_sect(self, sect):
450 if isinstance(sect, array.array):
451 fat1 = sect
452 else:
453 fat1 = self.sector_array(sect)
454 isect = None
455 for isect in fat1:
456 isect = isect & FREESECT
457 if isect == ENDOFCHAIN or isect == FREESECT:
458 break
459 sector = self.getsect(isect)
460 nextfat = self.sector_array(sector)
461 self.fat = self.fat + nextfat
462 return isect
464 def loadfat(self, header):
465 sect = header[76:512]
466 self.fat = array.array('I')
467 self.loadfat_sect(sect)
468 if self.num_difat_sectors != 0:
469 nb_difat_sectors = (self.sectorsize // 4) - 1
470 nb_difat = (self.num_fat_sectors - 109 + nb_difat_sectors - 1) // nb_difat_sectors
471 isect_difat = self.first_difat_sector
472 for i in range(nb_difat):
473 sector_difat = self.getsect(isect_difat)
474 difat = self.sector_array(sector_difat)
475 self.loadfat_sect(difat[:nb_difat_sectors])
476 isect_difat = difat[nb_difat_sectors]
477 if len(self.fat) > self.nb_sect:
478 self.fat = self.fat[:self.nb_sect]
480 def loadminifat(self):
481 stream_size = self.num_mini_fat_sectors * self.sector_size
482 nb_minisectors = (self.root.size + self.mini_sector_size - 1) // self.mini_sector_size
483 used_size = nb_minisectors * 4
484 sect = self._open(self.minifatsect, stream_size, force_FAT=True).read()
485 self.minifat = self.sector_array(sect)
486 self.minifat = self.minifat[:nb_minisectors]
488 def getsect(self, sect):
489 try:
490 self.fp.seek(self.sectorsize * (sect + 1))
491 except:
492 print('IndexError: Sector index out of range')
493 sector = self.fp.read(self.sectorsize)
494 return sector
496 def loaddirectory(self, sect):
497 self.directory_fp = self._open(sect, force_FAT=True)
498 max_entries = self.directory_fp.size // 128
499 self.direntries = [None] * max_entries
500 root_entry = self._load_direntry(0)
501 self.root = self.direntries[0]
502 self.root.build_storage_tree()
504 def _load_direntry(self, sid):
505 if self.direntries[sid] is not None:
506 return self.direntries[sid]
507 self.directory_fp.seek(sid * 128)
508 entry = self.directory_fp.read(128)
509 self.direntries[sid] = MaxFileDirEntry(entry, sid, self)
510 return self.direntries[sid]
512 def _open(self, start, size=UNKNOWN_SIZE, force_FAT=False):
513 if size < self.minisectorcutoff and not force_FAT:
514 if not self.ministream:
515 self.loadminifat()
516 size_ministream = self.root.size
517 self.ministream = self._open(self.root.isectStart,
518 size_ministream, force_FAT=True)
519 return MaxStream(fp=self.ministream, sect=start, size=size,
520 offset=0, sectorsize=self.minisectorsize,
521 fat=self.minifat, filesize=self.ministream.size)
522 else:
523 return MaxStream(fp=self.fp, sect=start, size=size,
524 offset=self.sectorsize, sectorsize=self.sectorsize,
525 fat=self.fat, filesize=self._filesize)
527 def _find(self, filename):
528 if isinstance(filename, str):
529 filename = filename.split('/')
530 node = self.root
531 for name in filename:
532 for kid in node.kids:
533 if kid.name.lower() == name.lower():
534 break
535 node = kid
536 return node.sid
538 def openstream(self, filename):
539 sid = self._find(filename)
540 entry = self.direntries[sid]
541 return self._open(entry.isectStart, entry.size)
544 ###################
545 # DATA PROCESSING #
546 ###################
548 class MaxChunk():
549 """Representing a chunk of a .max file."""
551 def __init__(self, types, size, level, number):
552 self.number = number
553 self.types = types
554 self.level = level
555 self.parent = None
556 self.previous = None
557 self.next = None
558 self.size = size
559 self.data = None
561 def __str__(self):
562 return "%s[%4x]%04X:%s" % ("" * self.level, self.number, self.types, self.data)
565 class ByteArrayChunk(MaxChunk):
566 """A byte array of a .max chunk."""
568 def __init__(self, types, data, level, number):
569 MaxChunk.__init__(self, types, data, level, number)
571 def set(self, data, fmt, start, end):
572 try:
573 self.data = struct.unpack(fmt, data[start:end])
574 except Exception as exc:
575 self.data = data
576 # print('StructError:', exc, name)
578 def set_string(self, data):
579 try:
580 self.data = data.decode('UTF-16LE')
581 except:
582 self.data = data
584 def set_data(self, data):
585 if (self.types in [0x0340, 0x4001, 0x0456, 0x0962]):
586 self.set_string(data)
587 elif (self.types in [0x2034, 0x2035]):
588 self.set(data, '<' + 'I' * int(len(data) / 4), 0, len(data))
589 elif (self.types in [0x2501, 0x2503, 0x2504, 0x2505, 0x2511]):
590 self.set(data, '<' + 'f' * int(len(data) / 4), 0, len(data))
591 elif (self.types == 0x2510):
592 self.set(data, '<' + 'f' * int(len(data) / 4 - 1) + 'I', 0, len(data))
593 elif (self.types == 0x0100):
594 self.set(data, '<f', 0, len(data))
595 else:
596 self.data = data
599 class ClassIDChunk(ByteArrayChunk):
600 """The class ID subchunk of a .max chunk."""
602 def __init__(self, types, data, level, number):
603 MaxChunk.__init__(self, types, data, level, number)
604 self.dll = None
606 def set_data(self, data):
607 if (self.types == 0x2042):
608 self.set_string(data) # ClsName
609 elif (self.types == 0x2060):
610 self.set(data, '<IQI', 0, 16) # DllIndex, ID, SuperID
611 else:
612 self.data = ":".join("%02x" % (c) for c in data)
615 class DirectoryChunk(ByteArrayChunk):
616 """The directory chunk of a .max file."""
618 def __init__(self, types, data, level, number):
619 MaxChunk.__init__(self, types, data, level, number)
621 def set_data(self, data):
622 if (self.types == 0x2039):
623 self.set_string(data)
624 elif (self.types == 0x2037):
625 self.set_string(data)
628 class ContainerChunk(MaxChunk):
629 """A container chunk in a .max file wich includes byte arrays."""
631 def __init__(self, types, data, level, number, primReader=ByteArrayChunk):
632 MaxChunk.__init__(self, types, data, level, number)
633 self.primReader = primReader
635 def __str__(self):
636 return "%s[%4x]%04X" % ("" * self.level, self.number, self.types)
638 def get_first(self, types):
639 for child in self.children:
640 if (child.types == types):
641 return child
642 return None
644 def set_data(self, data):
645 previous = None
646 next = None
647 reader = ChunkReader()
648 self.children = reader.get_chunks(data, self.level + 1, ContainerChunk, self.primReader)
651 class SceneChunk(ContainerChunk):
652 """The scene chunk of a .max file wich includes the relevant data for blender."""
654 def __init__(self, types, data, level, number, primReader=ByteArrayChunk):
655 MaxChunk.__init__(self, types, data, level, number)
656 self.primReader = primReader
657 self.matrix = None
659 def __str__(self):
660 return "%s[%4x]%s" % ("" * self.level, self.number, get_cls_name(self))
662 def set_data(self, data):
663 previous = None
664 next = None
665 # print('Scene', "%s\n" %(self))
666 reader = ChunkReader()
667 self.children = reader.get_chunks(data, self.level + 1,
668 SceneChunk, ByteArrayChunk)
671 class ChunkReader():
672 """The chunk reader class for decoding the byte arrays."""
674 def __init__(self, name=None):
675 self.name = name
677 def get_chunks(self, data, level, conReader, primReader):
678 chunks = []
679 offset = 0
680 if (level == 0):
681 short, step = get_short(data, 0)
682 long, step = get_long(data, step)
683 if (short == 0x8B1F):
684 short, step = get_long(data, step)
685 if (short in (0xB000000, 0xA040000)):
686 data = zlib.decompress(data, zlib.MAX_WBITS | 32)
687 print(" reading '%s'..." % self.name, len(data))
688 while offset < len(data):
689 old = offset
690 offset, chunk = self.get_next_chunk(data, offset, level,
691 len(chunks), conReader, primReader)
692 chunks.append(chunk)
693 return chunks
695 def get_next_chunk(self, data, offset, level, number, conReader, primReader):
696 header = 6
697 typ, siz, = struct.unpack("<Hi", data[offset:offset + header])
698 chunksize = siz & UNKNOWN_SIZE
699 if (siz == 0):
700 siz, = struct.unpack("<q", data[offset + header:offset + header + 8])
701 header += 8
702 chunksize = siz & MAXFILE_SIZE
703 if (siz < 0):
704 chunk = conReader(typ, chunksize, level, number, primReader)
705 else:
706 chunk = primReader(typ, chunksize, level, number)
707 chunkdata = data[offset + header:offset + chunksize]
708 chunk.set_data(chunkdata)
709 return offset + chunksize, chunk
712 class Point3d():
713 """Representing a three dimensional vector plus pointflag."""
715 def __init__(self):
716 self.points = None
717 self.flags = 0
718 self.fH = 0
719 self.f1 = 0
720 self.f2 = 0
721 self.fA = []
723 def __str__(self):
724 return "[%s]-%X,%X,%X,[%s]" % ('/'.join("%d" % p for p in self.points),
725 self.fH, self.f1, self.f2,
726 ','.join("%X" % f for f in self.fA))
729 class Material():
730 """Representing a material chunk of a scene chunk."""
732 def __init__(self):
733 self.data = {}
735 def set(self, name, value):
736 self.data[name] = value
738 def get(self, name, default=None):
739 value = None
740 if (name in self.data):
741 value = self.data[name]
742 if (value is None):
743 return default
744 return value
747 def get_node(index):
748 if isinstance(index, tuple):
749 index = index[0]
750 global SCENE_LIST
751 if (index < len(SCENE_LIST[0].children)):
752 return SCENE_LIST[0].children[index]
753 return None
756 def get_node_parent(node):
757 parent = None
758 if (node):
759 chunk = node.get_first(0x0960)
760 if (chunk is not None):
761 idx, offset = get_long(chunk.data, 0)
762 parent = get_node(idx)
763 return parent
766 def get_node_name(node):
767 if (node):
768 name = node.get_first(TYP_NAME)
769 if (name):
770 return name.data
771 return None
774 def get_class(chunk):
775 global CLS_DIR3_LIST
776 if (chunk.types < len(CLS_DIR3_LIST)):
777 return CLS_DIR3_LIST[chunk.types]
778 return None
781 def get_dll(chunk):
782 global DLL_DIR_LIST
783 idx = chunk.get_first(0x2060).data[0]
784 if (idx < len(DLL_DIR_LIST)):
785 return DLL_DIR_LIST[idx]
786 return None
789 def get_guid(chunk):
790 clid = get_class(chunk)
791 if (clid):
792 return clid.get_first(0x2060).data[1]
793 return chunk.types
796 def get_super_id(chunk):
797 clid = get_class(chunk)
798 if (clid):
799 return clid.get_first(0x2060).data[2]
800 return None
803 def get_cls_name(chunk):
804 clid = get_class(chunk)
805 if (clid):
806 cls_name = clid.get_first(0x2042).data
807 try:
808 return "'%s'" % (cls_name)
809 except:
810 return "'%r'" % (cls_name)
811 return u"%04X" % (chunk.types)
814 def get_references(chunk):
815 refs = chunk.get_first(0x2034)
816 if (refs):
817 references = [get_node(idx) for idx in refs.data]
818 return references
821 def get_reference(chunk):
822 references = {}
823 refs = chunk.get_first(0x2035)
824 if (refs):
825 offset = 1
826 while offset < len(refs.data):
827 key = refs.data[offset]
828 offset += 1
829 idx = refs.data[offset]
830 offset += 1
831 references[key] = get_node(idx)
832 return references
835 def read_chunks(maxfile, name, filename, conReader=ContainerChunk, primReader=ByteArrayChunk):
836 with maxfile.openstream(name) as file:
837 scene = file.read()
838 reader = ChunkReader(name)
839 return reader.get_chunks(scene, 0, conReader, primReader)
842 def read_class_data(maxfile, filename):
843 global CLS_DATA
844 CLS_DATA = read_chunks(maxfile, 'ClassData', filename + '.ClsDat.bin')
847 def read_class_directory(maxfile, filename):
848 global CLS_DIR3_LIST
849 try:
850 CLS_DIR3_LIST = read_chunks(maxfile, 'ClassDirectory3',
851 filename + '.ClsDir3.bin', ContainerChunk, ClassIDChunk)
852 except:
853 CLS_DIR3_LIST = read_chunks(maxfile, 'ClassDirectory',
854 filename + '.ClsDir.bin', ContainerChunk, ClassIDChunk)
855 for clsdir in CLS_DIR3_LIST:
856 clsdir.dll = get_dll(clsdir)
859 def read_config(maxfile, filename):
860 global CONFIG
861 CONFIG = read_chunks(maxfile, 'Config', filename + '.Cnf.bin')
864 def read_directory(maxfile, filename):
865 global DLL_DIR_LIST
866 DLL_DIR_LIST = read_chunks(maxfile, 'DllDirectory',
867 filename + '.DllDir.bin', ContainerChunk, DirectoryChunk)
870 def read_video_postqueue(maxfile, filename):
871 global VID_PST_QUE
872 VID_PST_QUE = read_chunks(maxfile, 'VideoPostQueue', filename + '.VidPstQue.bin')
875 def get_point(floatval, default=0.0):
876 uid = get_guid(floatval)
877 if (uid == 0x2007): # Bezier-Float
878 flv = floatval.get_first(0x7127)
879 if (flv):
880 try:
881 return flv.get_first(0x2501).data[0]
882 except:
883 print("SyntaxError: %s - assuming 0.0!\n" % (floatval))
884 return default
885 if (uid == FLOAT_POINT): # Float Wire
886 flv = get_references(floatval)[0]
887 return get_point(flv)
888 else:
889 return default
892 def get_point_3d(chunk, default=0.0):
893 floats = []
894 if (chunk):
895 refs = get_references(chunk)
896 for fl in refs:
897 flt = get_point(fl, default)
898 if (fl is not None):
899 floats.append(flt)
900 return floats
903 def get_position(pos):
904 position = None
905 mtx = mathutils.Matrix.Identity(4)
906 if (pos):
907 uid = get_guid(pos)
908 if (uid == MATRIX_POS): # Position XYZ
909 position = get_point_3d(pos)
910 elif (uid == 0x442312): # TCB Position
911 position = pos.get_first(0x2503).data
912 elif (uid == 0x2008): # Bezier Position
913 position = pos.get_first(0x2503).data
914 if (position):
915 mtx = mathutils.Matrix.Translation(position)
916 return mtx
919 def get_rotation(pos):
920 rotation = None
921 mtx = mathutils.Matrix.Identity(4)
922 if (pos):
923 uid = get_guid(pos)
924 if (uid == 0x2012): # Euler XYZ
925 rot = get_point_3d(pos)
926 rotation = mathutils.Euler((rot[2], rot[1], rot[0])).to_quaternion()
927 elif (uid == 0x442313): # TCB Rotation
928 rot = pos.get_first(0x2504).data
929 rotation = mathutils.Quaternion((rot[0], rot[1], rot[2], rot[3]))
930 elif (uid == 0x4B4B1003): # Rotation List
931 refs = get_references(pos)
932 if (len(refs) > 3):
933 return get_rotation(refs[0])
934 elif (uid == MATRIX_ROT): # Rotation Wire
935 return get_rotation(get_references(pos)[0])
936 if (rotation):
937 mtx = mathutils.Matrix.Rotation(rotation.angle, 4, rotation.axis)
938 return mtx
941 def get_scale(pos):
942 mtx = mathutils.Matrix.Identity(4)
943 if (pos):
944 uid = get_guid(pos)
945 if (uid == 0x2010): # Bezier Scale
946 scale = pos.get_first(0x2501)
947 if (scale is None):
948 scale = pos.get_first(0x2505)
949 pos = scale.data
950 elif (uid == 0x442315): # TCB Zoom
951 scale = pos.get_first(0x2501)
952 if (scale is None):
953 scale = pos.get_first(0x2505)
954 pos = scale.data
955 elif (uid == MATRIX_SCL): # ScaleXYZ
956 pos = get_point_3d(pos, 1.0)
957 else:
958 return mtx
959 mtx = mathutils.Matrix.Diagonal(pos[:3]).to_4x4()
960 return mtx
963 def create_matrix(prc):
964 mtx = mathutils.Matrix.Identity(4)
965 pos = rot = scl = None
966 uid = get_guid(prc)
967 if (uid == 0x2005): # Position/Rotation/Scale
968 pos = get_position(get_references(prc)[0])
969 rot = get_rotation(get_references(prc)[1])
970 scl = get_scale(get_references(prc)[2])
971 elif (uid == 0x9154): # BipSlave Control
972 biped_sub_anim = get_references(prc)[2]
973 refs = get_references(biped_sub_anim)
974 scl = get_scale(get_references(refs[1])[0])
975 rot = get_rotation(get_references(refs[2])[0])
976 pos = get_position(get_references(refs[3])[0])
977 if (pos is not None):
978 mtx = pos @ mtx
979 if (rot is not None):
980 mtx = rot @ mtx
981 if (scl is not None):
982 mtx = scl @ mtx
983 return mtx
986 def get_matrix_mesh_material(node):
987 refs = get_reference(node)
988 if (refs):
989 prs = refs.get(0, None)
990 msh = refs.get(1, None)
991 mat = refs.get(3, None)
992 lyr = refs.get(6, None)
993 else:
994 refs = get_references(node)
995 prs = refs[0]
996 msh = refs[1]
997 mat = refs[3]
998 lyr = None
999 if (len(refs) > 6):
1000 lyr = refs[6]
1001 return prs, msh, mat, lyr
1004 def get_property(properties, idx):
1005 for child in properties.children:
1006 if (child.types & 0x100E):
1007 if (get_short(child.data, 0)[0] == idx):
1008 return child
1009 return None
1012 def get_color(colors, idx):
1013 prop = get_property(colors, idx)
1014 if (prop is not None):
1015 siz = len(prop.data) - 12
1016 col, offset = get_floats(prop.data, siz, 3)
1017 return (col[0], col[1], col[2])
1018 return None
1021 def get_value(colors, idx):
1022 prop = get_property(colors, idx)
1023 if (prop is not None):
1024 siz = len(prop.data) - 4
1025 val, offset = get_float(prop.data, siz)
1026 return val
1027 return None
1030 def get_parameter(colors, fmt):
1031 if (fmt == 0x1):
1032 siz = len(colors.data) - 12
1033 para, offset = get_floats(colors.data, siz, 3)
1034 else:
1035 siz = len(colors.data) - 4
1036 para, offset = get_float(colors.data, siz)
1037 return para
1040 def get_standard_material(refs):
1041 material = None
1042 try:
1043 if (len(refs) > 2):
1044 colors = refs[2]
1045 parameters = get_references(colors)[0]
1046 material = Material()
1047 material.set('ambient', get_color(parameters, 0x00))
1048 material.set('diffuse', get_color(parameters, 0x01))
1049 material.set('specular', get_color(parameters, 0x02))
1050 material.set('emissive', get_color(parameters, 0x08))
1051 material.set('shinines', get_value(parameters, 0x0B))
1052 parablock = refs[4] # ParameterBlock2
1053 material.set('glossines', get_value(parablock, 0x02))
1054 material.set('metallic', get_value(parablock, 0x05))
1055 except:
1056 pass
1057 return material
1060 def get_vray_material(vry):
1061 material = Material()
1062 try:
1063 material.set('diffuse', get_color(vry, 0x01))
1064 material.set('specular', get_color(vry, 0x02))
1065 material.set('shinines', get_value(vry, 0x03))
1066 material.set('refraction', get_value(vry, 0x09))
1067 material.set('emissive', get_color(vry, 0x17))
1068 material.set('glossines', get_value(vry, 0x18))
1069 material.set('metallic', get_value(vry, 0x19))
1070 except:
1071 pass
1072 return material
1075 def get_corona_material(mtl):
1076 material = Material()
1077 try:
1078 cor = mtl.children
1079 material.set('diffuse', get_parameter(cor[3], 0x1))
1080 material.set('specular', get_parameter(cor[4], 0x1))
1081 material.set('emissive', get_parameter(cor[8], 0x1))
1082 material.set('glossines', get_parameter(cor[9], 0x2))
1083 except:
1084 pass
1085 return material
1088 def get_arch_material(ad):
1089 material = Material()
1090 try:
1091 material.set('diffuse', get_color(ad, 0x1A))
1092 material.set('specular', get_color(ad, 0x05))
1093 material.set('shinines', get_value(ad, 0x0B))
1094 except:
1095 pass
1096 return material
1099 def adjust_material(obj, mat):
1100 material = None
1101 if (mat is not None):
1102 uid = get_guid(mat)
1103 if (uid == 0x0002): # Standard
1104 refs = get_references(mat)
1105 material = get_standard_material(refs)
1106 elif (uid == 0x0200): # Multi/Sub-Object
1107 refs = get_references(mat)
1108 material = adjust_material(obj, refs[-1])
1109 elif (uid == VRAY_MTL): # VRayMtl
1110 refs = get_reference(mat)
1111 material = get_vray_material(refs[1])
1112 elif (uid == CORO_MTL): # CoronaMtl
1113 refs = get_references(mat)
1114 material = get_corona_material(refs[0])
1115 elif (uid == ARCH_MTL): # Arch
1116 refs = get_references(mat)
1117 material = get_arch_material(refs[0])
1118 if (obj is not None) and (material is not None):
1119 objMaterial = bpy.data.materials.new(get_cls_name(mat))
1120 obj.data.materials.append(objMaterial)
1121 matShader = PrincipledBSDFWrapper(objMaterial, is_readonly=False, use_nodes=True)
1122 matShader.base_color = objMaterial.diffuse_color[:3] = material.get('diffuse', (0.8, 0.8, 0.8))
1123 matShader.specular_tint = objMaterial.specular_color[:3] = material.get('specular', (1, 1, 1))
1124 matShader.specular = objMaterial.specular_intensity = material.get('glossines', 0.5)
1125 matShader.roughness = objMaterial.roughness = 1.0 - material.get('shinines', 0.6)
1126 matShader.metallic = objMaterial.metallic = material.get('metallic', 0)
1127 matShader.emission_color = material.get('emissive', (0, 0, 0))
1128 matShader.ior = material.get('refraction', 1.45)
1131 def adjust_matrix(obj, node):
1132 mtx = create_matrix(node).flatten()
1133 plc = mathutils.Matrix(*mtx)
1134 obj.matrix_world = plc
1135 return plc
1138 def create_shape(context, pts, indices, node, key, mtx, mat, umt):
1139 name = node.get_first(TYP_NAME).data
1140 shape = bpy.data.meshes.new(name)
1141 if (key is not None):
1142 name = "%s_%d" % (name, key)
1143 data = []
1144 if (pts):
1145 loopstart = []
1146 looplines = loop = 0
1147 nb_faces = len(indices)
1148 for fid in range(nb_faces):
1149 polyface = indices[fid]
1150 looplines += len(polyface)
1151 shape.vertices.add(len(pts) // 3)
1152 shape.loops.add(looplines)
1153 shape.polygons.add(nb_faces)
1154 shape.vertices.foreach_set("co", pts)
1155 for vtx in indices:
1156 loopstart.append(loop)
1157 data.extend(vtx)
1158 loop += len(vtx)
1159 shape.polygons.foreach_set("loop_start", loopstart)
1160 shape.loops.foreach_set("vertex_index", data)
1162 if (len(data) > 0):
1163 shape.validate()
1164 shape.update()
1165 obj = bpy.data.objects.new(name, shape)
1166 context.view_layer.active_layer_collection.collection.objects.link(obj)
1167 obj.matrix_world = mtx
1168 if (umt):
1169 adjust_material(obj, mat)
1170 return True
1171 return True
1174 def calc_point(data):
1175 points = []
1176 long, offset = get_long(data, 0)
1177 while (offset < len(data)):
1178 val, offset = get_long(data, offset)
1179 flt, offset = get_floats(data, offset, 3)
1180 points.extend(flt)
1181 return points
1184 def calc_point_float(data):
1185 points = []
1186 long, offset = get_long(data, 0)
1187 while (offset < len(data)):
1188 flt, offset = get_floats(data, offset, 3)
1189 points.extend(flt)
1190 return points
1193 def calc_point_3d(chunk):
1194 data = chunk.data
1195 count, offset = get_long(data, 0)
1196 pointlist = []
1197 try:
1198 while (offset < len(data)):
1199 pt = Point3d()
1200 long, offset = get_long(data, offset)
1201 pt.points, offset = get_longs(data, offset, long)
1202 pt.flags, offset = get_short(data, offset)
1203 if ((pt.flags & 0x01) != 0):
1204 pt.f1, offset = get_long(data, offset)
1205 if ((pt.flags & 0x08) != 0):
1206 pt.fH, offset = get_short(data, offset)
1207 if ((pt.flags & 0x10) != 0):
1208 pt.f2, offset = get_long(data, offset)
1209 if ((pt.flags & 0x20) != 0):
1210 pt.fA, offset = get_longs(data, offset, 2 * (long - 3))
1211 if (len(pt.points) > 0):
1212 pointlist.append(pt)
1213 except Exception as exc:
1214 print('ArrayError:\n', "%s: offset = %d\n" % (exc, offset))
1215 return pointlist
1218 def get_point_array(values):
1219 verts = []
1220 if len(values) >= 4:
1221 count, offset = get_long(values, 0)
1222 while (count > 0):
1223 floats, offset = get_floats(values, offset, 3)
1224 verts.extend(floats)
1225 count -= 1
1226 return verts
1229 def get_poly_4p(points):
1230 vertex = {}
1231 for point in points:
1232 ngon = point.points
1233 key = point.fH
1234 if (key not in vertex):
1235 vertex[key] = []
1236 vertex[key].append(ngon)
1237 return vertex
1240 def get_poly_5p(data):
1241 count, offset = get_long(data, 0)
1242 ngons = []
1243 while count > 0:
1244 pt, offset = get_longs(data, offset, 3)
1245 offset += 8
1246 ngons.append(pt)
1247 count -= 1
1248 return ngons
1251 def get_poly_6p(data):
1252 count, offset = get_long(data, 0)
1253 polylist = []
1254 while (offset < len(data)):
1255 long, offset = get_longs(data, offset, 6)
1256 i = 5
1257 while ((i > 3) and (long[i] < 0)):
1258 i -= 1
1259 if (i > 2):
1260 polylist.append(long[1:i])
1261 return polylist
1264 def get_poly_data(chunk):
1265 offset = 0
1266 polylist = []
1267 data = chunk.data
1268 while (offset < len(data)):
1269 count, offset = get_long(data, offset)
1270 points, offset = get_longs(data, offset, count)
1271 polylist.append(points)
1272 return polylist
1275 def create_editable_poly(context, node, msh, mat, mtx, umt, uvm):
1276 coords = point4i = point6i = pointNi = None
1277 poly = msh.get_first(0x08FE)
1278 created = False
1279 lidx = []
1280 lcrd = []
1281 lply = []
1282 if (poly):
1283 for child in poly.children:
1284 if (child.types == 0x0100):
1285 coords = calc_point(child.data)
1286 elif (child.types == 0x0108):
1287 point6i = child.data
1288 elif (child.types == 0x011A):
1289 point4i = calc_point_3d(child)
1290 elif (child.types == 0x0310):
1291 pointNi = child.data
1292 elif (child.types == 0x0124):
1293 lidx.append(get_long(child.data, 0)[0])
1294 elif (child.types == 0x0128):
1295 lcrd.append(calc_point_float(child.data))
1296 elif (child.types == 0x012B):
1297 lply.append(get_poly_data(child))
1298 if (point4i is not None):
1299 vertex = get_poly_4p(point4i)
1300 if (len(vertex) > 0):
1301 for key, ngons in vertex.items():
1302 created |= create_shape(context, coords, ngons,
1303 node, key, mtx, mat, umt)
1304 else:
1305 created = True
1306 elif (point6i is not None):
1307 ngons = get_poly_6p(point6i)
1308 created = create_shape(context, coords, ngons, node,
1309 None, mtx, mat, umt)
1310 elif (pointNi is not None):
1311 ngons = get_poly_5p(pointNi)
1312 created = create_shape(context, coords, ngons, node,
1313 None, mtx, mat, umt)
1314 if (uvm and len(lidx) > 0):
1315 for i in range(len(lidx)):
1316 created |= create_shape(context, lcrd[i], lply[i],
1317 node, lidx[i], mtx, mat, umt)
1318 return created
1321 def create_editable_mesh(context, node, msh, mat, mtx, umt):
1322 poly = msh.get_first(0x08FE)
1323 created = False
1324 if (poly):
1325 vertex_chunk = poly.get_first(0x0914)
1326 clsid_chunk = poly.get_first(0x0912)
1327 coords = get_point_array(vertex_chunk.data)
1328 ngons = get_poly_5p(clsid_chunk.data)
1329 created = create_shape(context, coords, ngons, node, None, mtx, mat, umt)
1330 return created
1333 def create_shell(context, node, shell, mat, mtx, umt, uvm):
1334 refs = get_references(shell)
1335 msh = refs[-1]
1336 if (get_cls_name(msh) == "'Editable Poly'"):
1337 created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm)
1338 else:
1339 created = create_editable_mesh(context, node, msh, mat, mtx, umt)
1340 return created
1343 def create_skipable(context, node, skip):
1344 name = node.get_first(TYP_NAME).data
1345 print(" skipping %s '%s'... " % (skip, name))
1346 return True
1349 def create_mesh(context, node, msh, mtx, mat, umt, uvm):
1350 created = False
1351 uid = get_guid(msh)
1352 msh.geometry = None
1353 if (uid == EDIT_MESH):
1354 created = create_editable_mesh(context, node, msh, mat, mtx, umt)
1355 elif (uid == EDIT_POLY):
1356 created = create_editable_poly(context, node, msh, mat, mtx, umt, uvm)
1357 elif (uid in {0x2032, 0x2033}):
1358 created = create_shell(context, node, msh, mat, mtx, umt, uvm)
1359 else:
1360 skip = SKIPPABLE.get(uid)
1361 if (skip is not None):
1362 created = create_skipable(context, node, skip)
1363 return created, uid
1366 def create_object(context, node, mscale, usemat, uvmesh, transform):
1367 parent = get_node_parent(node)
1368 node.parent = parent
1369 prs, msh, mat, lyr = get_matrix_mesh_material(node)
1370 while ((parent is not None) and (get_guid(parent) != 0x02)):
1371 parent_mtx = parent.matrix
1372 if (parent_mtx):
1373 prs = prs.dot(parent_mtx)
1374 parent = get_node_parent(parent)
1375 if (transform):
1376 mtx = create_matrix(prs) @ mscale
1377 else:
1378 mtx = mscale
1379 created, uid = create_mesh(context, node, msh, mtx, mat, usemat, uvmesh)
1382 def make_scene(context, mscale, usemat, uvmesh, transform, parent):
1383 for chunk in parent.children:
1384 if (isinstance(chunk, SceneChunk)):
1385 if ((get_guid(chunk) == 0x01) and (get_super_id(chunk) == 0x01)):
1386 try:
1387 create_object(context, chunk, mscale, usemat, uvmesh, transform)
1388 except Exception as exc:
1389 print('ImportError:', exc, chunk)
1392 def read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform):
1393 global SCENE_LIST
1394 SCENE_LIST = read_chunks(maxfile, 'Scene', filename + '.Scn.bin', conReader=SceneChunk)
1395 make_scene(context, mscale, usemat, uvmesh, transform, SCENE_LIST[0])
1398 def read(context, filename, mscale, usemat, uvmesh, transform):
1399 if (is_maxfile(filename)):
1400 maxfile = ImportMaxFile(filename)
1401 read_class_data(maxfile, filename)
1402 read_config(maxfile, filename)
1403 read_directory(maxfile, filename)
1404 read_class_directory(maxfile, filename)
1405 read_video_postqueue(maxfile, filename)
1406 read_scene(context, maxfile, filename, mscale, usemat, uvmesh, transform)
1407 else:
1408 print("File seems to be no 3D Studio Max file!")
1411 def load(operator, context, files=None, directory="", filepath="", scale_objects=1.0, use_material=True,
1412 use_uv_mesh=False, use_collection=False, use_apply_matrix=False, global_matrix=None):
1413 context.window.cursor_set('WAIT')
1414 mscale = mathutils.Matrix.Scale(scale_objects, 4)
1415 if global_matrix is not None:
1416 mscale = global_matrix @ mscale
1418 default_layer = context.view_layer.active_layer_collection.collection
1419 for fl in files:
1420 if use_collection:
1421 collection = bpy.data.collections.new(fl.name.split(".")[0])
1422 context.scene.collection.children.link(collection)
1423 context.view_layer.active_layer_collection = context.view_layer.layer_collection.children[collection.name]
1424 read(context, os.path.join(directory, fl.name), mscale, usemat=use_material, uvmesh=use_uv_mesh, transform=use_apply_matrix)
1426 active = context.view_layer.layer_collection.children.get(default_layer.name)
1427 if active is not None:
1428 context.view_layer.active_layer_collection = active
1430 context.window.cursor_set('DEFAULT')
1432 return {'FINISHED'}