Fix #100973: Node Wrangler: previewing node if hierarchy not active
[blender-addons.git] / io_scene_fbx / encode_bin.py
bloba7e8071e2db1bf64956cf06beb0859d0c263b997
1 # SPDX-FileCopyrightText: 2013 Campbell Barton
3 # SPDX-License-Identifier: GPL-2.0-or-later
5 try:
6 from . import data_types
7 except:
8 import data_types
10 from struct import pack
11 import array
12 import numpy as np
13 import zlib
15 _BLOCK_SENTINEL_LENGTH = ...
16 _BLOCK_SENTINEL_DATA = ...
17 _ELEM_META_FORMAT = ...
18 _ELEM_META_SIZE = ...
19 _IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
20 _HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
22 # fbx has very strict CRC rules, all based on file timestamp
23 # until we figure these out, write files at a fixed time. (workaround!)
25 # Assumes: CreationTime
26 _TIME_ID = b'1970-01-01 10:00:00:000'
27 _FILE_ID = b'\x28\xb3\x2a\xeb\xb6\x24\xcc\xc2\xbf\xc8\xb0\x2a\xa9\x2b\xfc\xf1'
28 _FOOT_ID = b'\xfa\xbc\xab\x09\xd0\xc8\xd4\x66\xb1\x76\xfb\x83\x1c\xf7\x26\x7e'
30 # Awful exceptions: those "classes" of elements seem to need block sentinel even when having no children and some props.
31 _ELEMS_ID_ALWAYS_BLOCK_SENTINEL = {b"AnimationStack", b"AnimationLayer"}
34 class FBXElem:
35 __slots__ = (
36 "id",
37 "props",
38 "props_type",
39 "elems",
41 "_props_length", # combine length of props
42 "_end_offset", # byte offset from the start of the file.
45 def __init__(self, id):
46 assert(len(id) < 256) # length must fit in a uint8
47 self.id = id
48 self.props = []
49 self.props_type = bytearray()
50 self.elems = []
51 self._end_offset = -1
52 self._props_length = -1
54 def add_bool(self, data):
55 assert(isinstance(data, bool))
56 data = pack('?', data)
58 self.props_type.append(data_types.BOOL)
59 self.props.append(data)
61 def add_char(self, data):
62 assert(isinstance(data, bytes))
63 assert(len(data) == 1)
64 data = pack('<c', data)
66 self.props_type.append(data_types.CHAR)
67 self.props.append(data)
69 def add_int8(self, data):
70 assert(isinstance(data, int))
71 data = pack('<b', data)
73 self.props_type.append(data_types.INT8)
74 self.props.append(data)
76 def add_int16(self, data):
77 assert(isinstance(data, int))
78 data = pack('<h', data)
80 self.props_type.append(data_types.INT16)
81 self.props.append(data)
83 def add_int32(self, data):
84 assert(isinstance(data, int))
85 data = pack('<i', data)
87 self.props_type.append(data_types.INT32)
88 self.props.append(data)
90 def add_int64(self, data):
91 assert(isinstance(data, int))
92 data = pack('<q', data)
94 self.props_type.append(data_types.INT64)
95 self.props.append(data)
97 def add_float32(self, data):
98 assert(isinstance(data, float))
99 data = pack('<f', data)
101 self.props_type.append(data_types.FLOAT32)
102 self.props.append(data)
104 def add_float64(self, data):
105 assert(isinstance(data, float))
106 data = pack('<d', data)
108 self.props_type.append(data_types.FLOAT64)
109 self.props.append(data)
111 def add_bytes(self, data):
112 assert(isinstance(data, bytes))
113 data = pack('<I', len(data)) + data
115 self.props_type.append(data_types.BYTES)
116 self.props.append(data)
118 def add_string(self, data):
119 assert(isinstance(data, bytes))
120 data = pack('<I', len(data)) + data
122 self.props_type.append(data_types.STRING)
123 self.props.append(data)
125 def add_string_unicode(self, data):
126 assert(isinstance(data, str))
127 data = data.encode('utf8')
128 data = pack('<I', len(data)) + data
130 self.props_type.append(data_types.STRING)
131 self.props.append(data)
133 def _add_array_helper(self, data, prop_type, length):
134 # mimic behavior of fbxconverter (also common sense)
135 # we could make this configurable.
136 encoding = 0 if len(data) <= 128 else 1
137 if encoding == 0:
138 pass
139 elif encoding == 1:
140 data = zlib.compress(data, 1)
142 comp_len = len(data)
144 data = pack('<3I', length, encoding, comp_len) + data
146 self.props_type.append(prop_type)
147 self.props.append(data)
149 def _add_parray_helper(self, data, array_type, prop_type):
150 assert (isinstance(data, array.array))
151 assert (data.typecode == array_type)
153 length = len(data)
155 if _IS_BIG_ENDIAN:
156 data = data[:]
157 data.byteswap()
158 data = data.tobytes()
160 self._add_array_helper(data, prop_type, length)
162 def _add_ndarray_helper(self, data, dtype, prop_type):
163 assert (isinstance(data, np.ndarray))
164 assert (data.dtype == dtype)
166 length = data.size
168 if _IS_BIG_ENDIAN and data.dtype.isnative:
169 data = data.byteswap()
170 data = data.tobytes()
172 self._add_array_helper(data, prop_type, length)
174 def add_int32_array(self, data):
175 if isinstance(data, np.ndarray):
176 self._add_ndarray_helper(data, np.int32, data_types.INT32_ARRAY)
177 else:
178 if not isinstance(data, array.array):
179 data = array.array(data_types.ARRAY_INT32, data)
180 self._add_parray_helper(data, data_types.ARRAY_INT32, data_types.INT32_ARRAY)
182 def add_int64_array(self, data):
183 if isinstance(data, np.ndarray):
184 self._add_ndarray_helper(data, np.int64, data_types.INT64_ARRAY)
185 else:
186 if not isinstance(data, array.array):
187 data = array.array(data_types.ARRAY_INT64, data)
188 self._add_parray_helper(data, data_types.ARRAY_INT64, data_types.INT64_ARRAY)
190 def add_float32_array(self, data):
191 if isinstance(data, np.ndarray):
192 self._add_ndarray_helper(data, np.float32, data_types.FLOAT32_ARRAY)
193 else:
194 if not isinstance(data, array.array):
195 data = array.array(data_types.ARRAY_FLOAT32, data)
196 self._add_parray_helper(data, data_types.ARRAY_FLOAT32, data_types.FLOAT32_ARRAY)
198 def add_float64_array(self, data):
199 if isinstance(data, np.ndarray):
200 self._add_ndarray_helper(data, np.float64, data_types.FLOAT64_ARRAY)
201 else:
202 if not isinstance(data, array.array):
203 data = array.array(data_types.ARRAY_FLOAT64, data)
204 self._add_parray_helper(data, data_types.ARRAY_FLOAT64, data_types.FLOAT64_ARRAY)
206 def add_bool_array(self, data):
207 if isinstance(data, np.ndarray):
208 self._add_ndarray_helper(data, bool, data_types.BOOL_ARRAY)
209 else:
210 if not isinstance(data, array.array):
211 data = array.array(data_types.ARRAY_BOOL, data)
212 self._add_parray_helper(data, data_types.ARRAY_BOOL, data_types.BOOL_ARRAY)
214 def add_byte_array(self, data):
215 if isinstance(data, np.ndarray):
216 self._add_ndarray_helper(data, np.byte, data_types.BYTE_ARRAY)
217 else:
218 if not isinstance(data, array.array):
219 data = array.array(data_types.ARRAY_BYTE, data)
220 self._add_parray_helper(data, data_types.ARRAY_BYTE, data_types.BYTE_ARRAY)
222 # -------------------------
223 # internal helper functions
225 def _calc_offsets(self, offset, is_last):
227 Call before writing, calculates fixed offsets.
229 assert(self._end_offset == -1)
230 assert(self._props_length == -1)
232 offset += _ELEM_META_SIZE # 3 uints (or 3 ulonglongs for FBX 7500 and later)
233 offset += 1 + len(self.id) # len + idname
235 props_length = 0
236 for data in self.props:
237 # 1 byte for the prop type
238 props_length += 1 + len(data)
239 self._props_length = props_length
240 offset += props_length
242 offset = self._calc_offsets_children(offset, is_last)
244 self._end_offset = offset
245 return offset
247 def _calc_offsets_children(self, offset, is_last):
248 if self.elems:
249 elem_last = self.elems[-1]
250 for elem in self.elems:
251 offset = elem._calc_offsets(offset, (elem is elem_last))
252 offset += _BLOCK_SENTINEL_LENGTH
253 elif (not self.props and not is_last) or self.id in _ELEMS_ID_ALWAYS_BLOCK_SENTINEL:
254 offset += _BLOCK_SENTINEL_LENGTH
256 return offset
258 def _write(self, write, tell, is_last):
259 assert(self._end_offset != -1)
260 assert(self._props_length != -1)
262 write(pack(_ELEM_META_FORMAT, self._end_offset, len(self.props), self._props_length))
264 write(bytes((len(self.id),)))
265 write(self.id)
267 for i, data in enumerate(self.props):
268 write(bytes((self.props_type[i],)))
269 write(data)
271 self._write_children(write, tell, is_last)
273 if tell() != self._end_offset:
274 raise IOError("scope length not reached, "
275 "something is wrong (%d)" % (self._end_offset - tell()))
277 def _write_children(self, write, tell, is_last):
278 if self.elems:
279 elem_last = self.elems[-1]
280 for elem in self.elems:
281 assert(elem.id != b'')
282 elem._write(write, tell, (elem is elem_last))
283 write(_BLOCK_SENTINEL_DATA)
284 elif (not self.props and not is_last) or self.id in _ELEMS_ID_ALWAYS_BLOCK_SENTINEL:
285 write(_BLOCK_SENTINEL_DATA)
288 def _write_timedate_hack(elem_root):
289 # perform 2 changes
290 # - set the FileID
291 # - set the CreationTime
293 ok = 0
294 for elem in elem_root.elems:
295 if elem.id == b'FileId':
296 assert(elem.props_type[0] == b'R'[0])
297 assert(len(elem.props_type) == 1)
298 elem.props.clear()
299 elem.props_type.clear()
301 elem.add_bytes(_FILE_ID)
302 ok += 1
303 elif elem.id == b'CreationTime':
304 assert(elem.props_type[0] == b'S'[0])
305 assert(len(elem.props_type) == 1)
306 elem.props.clear()
307 elem.props_type.clear()
309 elem.add_string(_TIME_ID)
310 ok += 1
312 if ok == 2:
313 break
315 if ok != 2:
316 print("Missing fields!")
319 # FBX 7500 (aka FBX2016) introduces incompatible changes at binary level:
320 # * The NULL block marking end of nested stuff switches from 13 bytes long to 25 bytes long.
321 # * The FBX element metadata (end_offset, prop_count and prop_length) switch from uint32 to uint64.
322 def init_version(fbx_version):
323 global _BLOCK_SENTINEL_LENGTH, _BLOCK_SENTINEL_DATA, _ELEM_META_FORMAT, _ELEM_META_SIZE
325 _BLOCK_SENTINEL_LENGTH = ...
326 _BLOCK_SENTINEL_DATA = ...
327 _ELEM_META_FORMAT = ...
328 _ELEM_META_SIZE = ...
330 if fbx_version < 7500:
331 _ELEM_META_FORMAT = '<3I'
332 _ELEM_META_SIZE = 12
333 else:
334 _ELEM_META_FORMAT = '<3Q'
335 _ELEM_META_SIZE = 24
336 _BLOCK_SENTINEL_LENGTH = _ELEM_META_SIZE + 1
337 _BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
340 def write(fn, elem_root, version):
341 assert(elem_root.id == b'')
343 with open(fn, 'wb') as f:
344 write = f.write
345 tell = f.tell
347 init_version(version)
349 write(_HEAD_MAGIC)
350 write(pack('<I', version))
352 # hack since we don't decode time.
353 # ideally we would _not_ modify this data.
354 _write_timedate_hack(elem_root)
356 elem_root._calc_offsets_children(tell(), False)
357 elem_root._write_children(write, tell, False)
359 write(_FOOT_ID)
360 write(b'\x00' * 4)
362 # padding for alignment (values between 1 & 16 observed)
363 # if already aligned to 16, add a full 16 bytes padding.
364 ofs = tell()
365 pad = ((ofs + 15) & ~15) - ofs
366 if pad == 0:
367 pad = 16
369 write(b'\0' * pad)
371 write(pack('<I', version))
373 # unknown magic (always the same)
374 write(b'\0' * 120)
375 write(b'\xf8\x5a\x8c\x6a\xde\xf5\xd9\x7e\xec\xe9\x0c\xe3\x75\x8f\x29\x0b')