FBX Export: Base patch for numpy speedup
[blender-addons.git] / io_scene_fbx / encode_bin.py
blob8433134a75fbf89c752563c6402df894658fd514
1 # SPDX-License-Identifier: GPL-2.0-or-later
3 # Script copyright (C) 2013 Campbell Barton
5 try:
6 from . import data_types
7 except:
8 import data_types
10 from struct import pack
11 import array
12 import numpy as np
13 import zlib
15 _BLOCK_SENTINEL_LENGTH = 13
16 _BLOCK_SENTINEL_DATA = (b'\0' * _BLOCK_SENTINEL_LENGTH)
17 _IS_BIG_ENDIAN = (__import__("sys").byteorder != 'little')
18 _HEAD_MAGIC = b'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
20 # fbx has very strict CRC rules, all based on file timestamp
21 # until we figure these out, write files at a fixed time. (workaround!)
23 # Assumes: CreationTime
24 _TIME_ID = b'1970-01-01 10:00:00:000'
25 _FILE_ID = b'\x28\xb3\x2a\xeb\xb6\x24\xcc\xc2\xbf\xc8\xb0\x2a\xa9\x2b\xfc\xf1'
26 _FOOT_ID = b'\xfa\xbc\xab\x09\xd0\xc8\xd4\x66\xb1\x76\xfb\x83\x1c\xf7\x26\x7e'
28 # Awful exceptions: those "classes" of elements seem to need block sentinel even when having no children and some props.
29 _ELEMS_ID_ALWAYS_BLOCK_SENTINEL = {b"AnimationStack", b"AnimationLayer"}
32 class FBXElem:
33 __slots__ = (
34 "id",
35 "props",
36 "props_type",
37 "elems",
39 "_props_length", # combine length of props
40 "_end_offset", # byte offset from the start of the file.
43 def __init__(self, id):
44 assert(len(id) < 256) # length must fit in a uint8
45 self.id = id
46 self.props = []
47 self.props_type = bytearray()
48 self.elems = []
49 self._end_offset = -1
50 self._props_length = -1
52 def add_bool(self, data):
53 assert(isinstance(data, bool))
54 data = pack('?', data)
56 self.props_type.append(data_types.BOOL)
57 self.props.append(data)
59 def add_int16(self, data):
60 assert(isinstance(data, int))
61 data = pack('<h', data)
63 self.props_type.append(data_types.INT16)
64 self.props.append(data)
66 def add_int32(self, data):
67 assert(isinstance(data, int))
68 data = pack('<i', data)
70 self.props_type.append(data_types.INT32)
71 self.props.append(data)
73 def add_int64(self, data):
74 assert(isinstance(data, int))
75 data = pack('<q', data)
77 self.props_type.append(data_types.INT64)
78 self.props.append(data)
80 def add_float32(self, data):
81 assert(isinstance(data, float))
82 data = pack('<f', data)
84 self.props_type.append(data_types.FLOAT32)
85 self.props.append(data)
87 def add_float64(self, data):
88 assert(isinstance(data, float))
89 data = pack('<d', data)
91 self.props_type.append(data_types.FLOAT64)
92 self.props.append(data)
94 def add_bytes(self, data):
95 assert(isinstance(data, bytes))
96 data = pack('<I', len(data)) + data
98 self.props_type.append(data_types.BYTES)
99 self.props.append(data)
101 def add_string(self, data):
102 assert(isinstance(data, bytes))
103 data = pack('<I', len(data)) + data
105 self.props_type.append(data_types.STRING)
106 self.props.append(data)
108 def add_string_unicode(self, data):
109 assert(isinstance(data, str))
110 data = data.encode('utf8')
111 data = pack('<I', len(data)) + data
113 self.props_type.append(data_types.STRING)
114 self.props.append(data)
116 def _add_array_helper(self, data, prop_type, length):
117 # mimic behavior of fbxconverter (also common sense)
118 # we could make this configurable.
119 encoding = 0 if len(data) <= 128 else 1
120 if encoding == 0:
121 pass
122 elif encoding == 1:
123 data = zlib.compress(data, 1)
125 comp_len = len(data)
127 data = pack('<3I', length, encoding, comp_len) + data
129 self.props_type.append(prop_type)
130 self.props.append(data)
132 def _add_parray_helper(self, data, array_type, prop_type):
133 assert (isinstance(data, array.array))
134 assert (data.typecode == array_type)
136 length = len(data)
138 if _IS_BIG_ENDIAN:
139 data = data[:]
140 data.byteswap()
141 data = data.tobytes()
143 self._add_array_helper(data, prop_type, length)
145 def _add_ndarray_helper(self, data, dtype, prop_type):
146 assert (isinstance(data, np.ndarray))
147 assert (data.dtype == dtype)
149 length = data.size
151 if _IS_BIG_ENDIAN and data.dtype.isnative:
152 data = data.byteswap()
153 data = data.tobytes()
155 self._add_array_helper(data, prop_type, length)
157 def add_int32_array(self, data):
158 if isinstance(data, np.ndarray):
159 self._add_ndarray_helper(data, np.int32, data_types.INT32_ARRAY)
160 else:
161 if not isinstance(data, array.array):
162 data = array.array(data_types.ARRAY_INT32, data)
163 self._add_parray_helper(data, data_types.ARRAY_INT32, data_types.INT32_ARRAY)
165 def add_int64_array(self, data):
166 if isinstance(data, np.ndarray):
167 self._add_ndarray_helper(data, np.int64, data_types.INT64_ARRAY)
168 else:
169 if not isinstance(data, array.array):
170 data = array.array(data_types.ARRAY_INT64, data)
171 self._add_parray_helper(data, data_types.ARRAY_INT64, data_types.INT64_ARRAY)
173 def add_float32_array(self, data):
174 if isinstance(data, np.ndarray):
175 self._add_ndarray_helper(data, np.float32, data_types.FLOAT32_ARRAY)
176 else:
177 if not isinstance(data, array.array):
178 data = array.array(data_types.ARRAY_FLOAT32, data)
179 self._add_parray_helper(data, data_types.ARRAY_FLOAT32, data_types.FLOAT32_ARRAY)
181 def add_float64_array(self, data):
182 if isinstance(data, np.ndarray):
183 self._add_ndarray_helper(data, np.float64, data_types.FLOAT64_ARRAY)
184 else:
185 if not isinstance(data, array.array):
186 data = array.array(data_types.ARRAY_FLOAT64, data)
187 self._add_parray_helper(data, data_types.ARRAY_FLOAT64, data_types.FLOAT64_ARRAY)
189 def add_bool_array(self, data):
190 if isinstance(data, np.ndarray):
191 self._add_ndarray_helper(data, bool, data_types.BOOL_ARRAY)
192 else:
193 if not isinstance(data, array.array):
194 data = array.array(data_types.ARRAY_BOOL, data)
195 self._add_parray_helper(data, data_types.ARRAY_BOOL, data_types.BOOL_ARRAY)
197 def add_byte_array(self, data):
198 if isinstance(data, np.ndarray):
199 self._add_ndarray_helper(data, np.byte, data_types.BYTE_ARRAY)
200 else:
201 if not isinstance(data, array.array):
202 data = array.array(data_types.ARRAY_BYTE, data)
203 self._add_parray_helper(data, data_types.ARRAY_BYTE, data_types.BYTE_ARRAY)
205 # -------------------------
206 # internal helper functions
208 def _calc_offsets(self, offset, is_last):
210 Call before writing, calculates fixed offsets.
212 assert(self._end_offset == -1)
213 assert(self._props_length == -1)
215 offset += 12 # 3 uints
216 offset += 1 + len(self.id) # len + idname
218 props_length = 0
219 for data in self.props:
220 # 1 byte for the prop type
221 props_length += 1 + len(data)
222 self._props_length = props_length
223 offset += props_length
225 offset = self._calc_offsets_children(offset, is_last)
227 self._end_offset = offset
228 return offset
230 def _calc_offsets_children(self, offset, is_last):
231 if self.elems:
232 elem_last = self.elems[-1]
233 for elem in self.elems:
234 offset = elem._calc_offsets(offset, (elem is elem_last))
235 offset += _BLOCK_SENTINEL_LENGTH
236 elif not self.props or self.id in _ELEMS_ID_ALWAYS_BLOCK_SENTINEL:
237 if not is_last:
238 offset += _BLOCK_SENTINEL_LENGTH
240 return offset
242 def _write(self, write, tell, is_last):
243 assert(self._end_offset != -1)
244 assert(self._props_length != -1)
246 write(pack('<3I', self._end_offset, len(self.props), self._props_length))
248 write(bytes((len(self.id),)))
249 write(self.id)
251 for i, data in enumerate(self.props):
252 write(bytes((self.props_type[i],)))
253 write(data)
255 self._write_children(write, tell, is_last)
257 if tell() != self._end_offset:
258 raise IOError("scope length not reached, "
259 "something is wrong (%d)" % (end_offset - tell()))
261 def _write_children(self, write, tell, is_last):
262 if self.elems:
263 elem_last = self.elems[-1]
264 for elem in self.elems:
265 assert(elem.id != b'')
266 elem._write(write, tell, (elem is elem_last))
267 write(_BLOCK_SENTINEL_DATA)
268 elif not self.props or self.id in _ELEMS_ID_ALWAYS_BLOCK_SENTINEL:
269 if not is_last:
270 write(_BLOCK_SENTINEL_DATA)
273 def _write_timedate_hack(elem_root):
274 # perform 2 changes
275 # - set the FileID
276 # - set the CreationTime
278 ok = 0
279 for elem in elem_root.elems:
280 if elem.id == b'FileId':
281 assert(elem.props_type[0] == b'R'[0])
282 assert(len(elem.props_type) == 1)
283 elem.props.clear()
284 elem.props_type.clear()
286 elem.add_bytes(_FILE_ID)
287 ok += 1
288 elif elem.id == b'CreationTime':
289 assert(elem.props_type[0] == b'S'[0])
290 assert(len(elem.props_type) == 1)
291 elem.props.clear()
292 elem.props_type.clear()
294 elem.add_string(_TIME_ID)
295 ok += 1
297 if ok == 2:
298 break
300 if ok != 2:
301 print("Missing fields!")
304 def write(fn, elem_root, version):
305 assert(elem_root.id == b'')
307 with open(fn, 'wb') as f:
308 write = f.write
309 tell = f.tell
311 write(_HEAD_MAGIC)
312 write(pack('<I', version))
314 # hack since we don't decode time.
315 # ideally we would _not_ modify this data.
316 _write_timedate_hack(elem_root)
318 elem_root._calc_offsets_children(tell(), False)
319 elem_root._write_children(write, tell, False)
321 write(_FOOT_ID)
322 write(b'\x00' * 4)
324 # padding for alignment (values between 1 & 16 observed)
325 # if already aligned to 16, add a full 16 bytes padding.
326 ofs = tell()
327 pad = ((ofs + 15) & ~15) - ofs
328 if pad == 0:
329 pad = 16
331 write(b'\0' * pad)
333 write(pack('<I', version))
335 # unknown magic (always the same)
336 write(b'\0' * 120)
337 write(b'\xf8\x5a\x8c\x6a\xde\xf5\xd9\x7e\xec\xe9\x0c\xe3\x75\x8f\x29\x0b')