Merge branch 'blender-v4.0-release'
[blender-addons.git] / io_scene_fbx / import_fbx.py
blob5247139550ac8af327a744ce7bb72f87185c00b0
1 # SPDX-FileCopyrightText: 2013-2023 Blender Foundation
3 # SPDX-License-Identifier: GPL-2.0-or-later
5 # FBX 7.1.0 -> 7.4.0 loader for Blender
7 # Not totally pep8 compliant.
8 # pep8 import_fbx.py --ignore=E501,E123,E702,E125
10 if "bpy" in locals():
11 import importlib
12 if "parse_fbx" in locals():
13 importlib.reload(parse_fbx)
14 if "fbx_utils" in locals():
15 importlib.reload(fbx_utils)
17 import bpy
18 from bpy.app.translations import pgettext_tip as tip_
19 from mathutils import Matrix, Euler, Vector, Quaternion
21 # Also imported in .fbx_utils, so importing here is unlikely to further affect Blender startup time.
22 import numpy as np
24 # -----
25 # Utils
26 from . import parse_fbx, fbx_utils
28 from .parse_fbx import (
29 data_types,
30 FBXElem,
32 from .fbx_utils import (
33 PerfMon,
34 units_blender_to_fbx_factor,
35 units_convertor_iter,
36 array_to_matrix4,
37 similar_values,
38 similar_values_iter,
39 FBXImportSettings,
40 vcos_transformed,
41 nors_transformed,
42 parray_as_ndarray,
43 astype_view_signedness,
44 MESH_ATTRIBUTE_MATERIAL_INDEX,
45 MESH_ATTRIBUTE_POSITION,
46 MESH_ATTRIBUTE_EDGE_VERTS,
47 MESH_ATTRIBUTE_CORNER_VERT,
48 MESH_ATTRIBUTE_SHARP_FACE,
49 MESH_ATTRIBUTE_SHARP_EDGE,
50 expand_shape_key_range,
53 LINEAR_INTERPOLATION_VALUE = bpy.types.Keyframe.bl_rna.properties['interpolation'].enum_items['LINEAR'].value
55 # global singleton, assign on execution
56 fbx_elem_nil = None
58 # Units converters...
59 convert_deg_to_rad_iter = units_convertor_iter("degree", "radian")
61 MAT_CONVERT_BONE = fbx_utils.MAT_CONVERT_BONE.inverted()
62 MAT_CONVERT_LIGHT = fbx_utils.MAT_CONVERT_LIGHT.inverted()
63 MAT_CONVERT_CAMERA = fbx_utils.MAT_CONVERT_CAMERA.inverted()
66 def validate_blend_names(name):
67 assert(type(name) == bytes)
68 # Blender typically does not accept names over 63 bytes...
69 if len(name) > 63:
70 import hashlib
71 h = hashlib.sha1(name).hexdigest()
72 n = 55
73 name_utf8 = name[:n].decode('utf-8', 'replace') + "_" + h[:7]
74 while len(name_utf8.encode()) > 63:
75 n -= 1
76 name_utf8 = name[:n].decode('utf-8', 'replace') + "_" + h[:7]
77 return name_utf8
78 else:
79 # We use 'replace' even though FBX 'specs' say it should always be utf8, see T53841.
80 return name.decode('utf-8', 'replace')
83 def elem_find_first(elem, id_search, default=None):
84 for fbx_item in elem.elems:
85 if fbx_item.id == id_search:
86 return fbx_item
87 return default
90 def elem_find_iter(elem, id_search):
91 for fbx_item in elem.elems:
92 if fbx_item.id == id_search:
93 yield fbx_item
96 def elem_find_first_string(elem, id_search):
97 fbx_item = elem_find_first(elem, id_search)
98 if fbx_item is not None and fbx_item.props: # Do not error on complete empty properties (see T45291).
99 assert(len(fbx_item.props) == 1)
100 assert(fbx_item.props_type[0] == data_types.STRING)
101 return fbx_item.props[0].decode('utf-8', 'replace')
102 return None
105 def elem_find_first_string_as_bytes(elem, id_search):
106 fbx_item = elem_find_first(elem, id_search)
107 if fbx_item is not None and fbx_item.props: # Do not error on complete empty properties (see T45291).
108 assert(len(fbx_item.props) == 1)
109 assert(fbx_item.props_type[0] == data_types.STRING)
110 return fbx_item.props[0] # Keep it as bytes as requested...
111 return None
114 def elem_find_first_bytes(elem, id_search, decode=True):
115 fbx_item = elem_find_first(elem, id_search)
116 if fbx_item is not None and fbx_item.props: # Do not error on complete empty properties (see T45291).
117 assert(len(fbx_item.props) == 1)
118 assert(fbx_item.props_type[0] == data_types.BYTES)
119 return fbx_item.props[0]
120 return None
123 def elem_repr(elem):
124 return "%s: props[%d=%r], elems=(%r)" % (
125 elem.id,
126 len(elem.props),
127 ", ".join([repr(p) for p in elem.props]),
128 # elem.props_type,
129 b", ".join([e.id for e in elem.elems]),
133 def elem_split_name_class(elem):
134 assert(elem.props_type[-2] == data_types.STRING)
135 elem_name, elem_class = elem.props[-2].split(b'\x00\x01')
136 return elem_name, elem_class
139 def elem_name_ensure_class(elem, clss=...):
140 elem_name, elem_class = elem_split_name_class(elem)
141 if clss is not ...:
142 assert(elem_class == clss)
143 return validate_blend_names(elem_name)
146 def elem_name_ensure_classes(elem, clss=...):
147 elem_name, elem_class = elem_split_name_class(elem)
148 if clss is not ...:
149 assert(elem_class in clss)
150 return validate_blend_names(elem_name)
153 def elem_split_name_class_nodeattr(elem):
154 assert(elem.props_type[-2] == data_types.STRING)
155 elem_name, elem_class = elem.props[-2].split(b'\x00\x01')
156 assert(elem_class == b'NodeAttribute')
157 assert(elem.props_type[-1] == data_types.STRING)
158 elem_class = elem.props[-1]
159 return elem_name, elem_class
162 def elem_uuid(elem):
163 assert(elem.props_type[0] == data_types.INT64)
164 return elem.props[0]
167 def elem_prop_first(elem, default=None):
168 return elem.props[0] if (elem is not None) and elem.props else default
171 # ----
172 # Support for
173 # Properties70: { ... P:
174 # Custom properties ("user properties" in FBX) are ignored here and get handled separately (see #104773).
175 def elem_props_find_first(elem, elem_prop_id):
176 if elem is None:
177 # When properties are not found... Should never happen, but happens - as usual.
178 return None
179 # support for templates (tuple of elems)
180 if type(elem) is not FBXElem:
181 assert(type(elem) is tuple)
182 for e in elem:
183 result = elem_props_find_first(e, elem_prop_id)
184 if result is not None:
185 return result
186 assert(len(elem) > 0)
187 return None
189 for subelem in elem.elems:
190 assert(subelem.id == b'P')
191 # 'U' flag indicates that the property has been defined by the user.
192 if subelem.props[0] == elem_prop_id and b'U' not in subelem.props[3]:
193 return subelem
194 return None
197 def elem_props_get_color_rgb(elem, elem_prop_id, default=None):
198 elem_prop = elem_props_find_first(elem, elem_prop_id)
199 if elem_prop is not None:
200 assert(elem_prop.props[0] == elem_prop_id)
201 if elem_prop.props[1] == b'Color':
202 # FBX version 7300
203 assert(elem_prop.props[1] == b'Color')
204 assert(elem_prop.props[2] == b'')
205 else:
206 assert(elem_prop.props[1] == b'ColorRGB')
207 assert(elem_prop.props[2] == b'Color')
208 assert(elem_prop.props_type[4:7] == bytes((data_types.FLOAT64,)) * 3)
209 return elem_prop.props[4:7]
210 return default
213 def elem_props_get_vector_3d(elem, elem_prop_id, default=None):
214 elem_prop = elem_props_find_first(elem, elem_prop_id)
215 if elem_prop is not None:
216 assert(elem_prop.props_type[4:7] == bytes((data_types.FLOAT64,)) * 3)
217 return elem_prop.props[4:7]
218 return default
221 def elem_props_get_number(elem, elem_prop_id, default=None):
222 elem_prop = elem_props_find_first(elem, elem_prop_id)
223 if elem_prop is not None:
224 assert(elem_prop.props[0] == elem_prop_id)
225 if elem_prop.props[1] == b'double':
226 assert(elem_prop.props[1] == b'double')
227 assert(elem_prop.props[2] == b'Number')
228 else:
229 assert(elem_prop.props[1] == b'Number')
230 assert(elem_prop.props[2] == b'')
232 # we could allow other number types
233 assert(elem_prop.props_type[4] == data_types.FLOAT64)
235 return elem_prop.props[4]
236 return default
239 def elem_props_get_integer(elem, elem_prop_id, default=None):
240 elem_prop = elem_props_find_first(elem, elem_prop_id)
241 if elem_prop is not None:
242 assert(elem_prop.props[0] == elem_prop_id)
243 if elem_prop.props[1] == b'int':
244 assert(elem_prop.props[1] == b'int')
245 assert(elem_prop.props[2] == b'Integer')
246 elif elem_prop.props[1] == b'ULongLong':
247 assert(elem_prop.props[1] == b'ULongLong')
248 assert(elem_prop.props[2] == b'')
250 # we could allow other number types
251 assert(elem_prop.props_type[4] in {data_types.INT32, data_types.INT64})
253 return elem_prop.props[4]
254 return default
257 def elem_props_get_bool(elem, elem_prop_id, default=None):
258 elem_prop = elem_props_find_first(elem, elem_prop_id)
259 if elem_prop is not None:
260 assert(elem_prop.props[0] == elem_prop_id)
261 # b'Bool' with a capital seems to be used for animated property... go figure...
262 assert(elem_prop.props[1] in {b'bool', b'Bool'})
263 assert(elem_prop.props[2] == b'')
265 # we could allow other number types
266 assert(elem_prop.props_type[4] == data_types.INT32)
267 assert(elem_prop.props[4] in {0, 1})
269 return bool(elem_prop.props[4])
270 return default
273 def elem_props_get_enum(elem, elem_prop_id, default=None):
274 elem_prop = elem_props_find_first(elem, elem_prop_id)
275 if elem_prop is not None:
276 assert(elem_prop.props[0] == elem_prop_id)
277 assert(elem_prop.props[1] == b'enum')
278 assert(elem_prop.props[2] == b'')
279 assert(elem_prop.props[3] == b'')
281 # we could allow other number types
282 assert(elem_prop.props_type[4] == data_types.INT32)
284 return elem_prop.props[4]
285 return default
288 def elem_props_get_visibility(elem, elem_prop_id, default=None):
289 elem_prop = elem_props_find_first(elem, elem_prop_id)
290 if elem_prop is not None:
291 assert(elem_prop.props[0] == elem_prop_id)
292 assert(elem_prop.props[1] == b'Visibility')
293 assert(elem_prop.props[2] == b'')
295 # we could allow other number types
296 assert(elem_prop.props_type[4] == data_types.FLOAT64)
298 return elem_prop.props[4]
299 return default
302 # ----------------------------------------------------------------------------
303 # Blender
305 # ------
306 # Object
307 from collections import namedtuple
310 FBXTransformData = namedtuple("FBXTransformData", (
311 "loc", "geom_loc",
312 "rot", "rot_ofs", "rot_piv", "pre_rot", "pst_rot", "rot_ord", "rot_alt_mat", "geom_rot",
313 "sca", "sca_ofs", "sca_piv", "geom_sca",
317 def blen_read_custom_properties(fbx_obj, blen_obj, settings):
318 # There doesn't seem to be a way to put user properties into templates, so this only get the object properties:
319 fbx_obj_props = elem_find_first(fbx_obj, b'Properties70')
320 if fbx_obj_props:
321 for fbx_prop in fbx_obj_props.elems:
322 assert(fbx_prop.id == b'P')
324 if b'U' in fbx_prop.props[3]:
325 if fbx_prop.props[0] == b'UDP3DSMAX':
326 # Special case for 3DS Max user properties:
327 assert(fbx_prop.props[1] == b'KString')
328 assert(fbx_prop.props_type[4] == data_types.STRING)
329 items = fbx_prop.props[4].decode('utf-8', 'replace')
330 for item in items.split('\r\n'):
331 if item:
332 split_item = item.split('=', 1)
333 if len(split_item) != 2:
334 split_item = item.split(':', 1)
335 if len(split_item) != 2:
336 print("cannot parse UDP3DSMAX custom property '%s', ignoring..." % item)
337 else:
338 prop_name, prop_value = split_item
339 prop_name = validate_blend_names(prop_name.strip().encode('utf-8'))
340 blen_obj[prop_name] = prop_value.strip()
341 else:
342 prop_name = validate_blend_names(fbx_prop.props[0])
343 prop_type = fbx_prop.props[1]
344 if prop_type in {b'Vector', b'Vector3D', b'Color', b'ColorRGB'}:
345 assert(fbx_prop.props_type[4:7] == bytes((data_types.FLOAT64,)) * 3)
346 blen_obj[prop_name] = fbx_prop.props[4:7]
347 elif prop_type in {b'Vector4', b'ColorRGBA'}:
348 assert(fbx_prop.props_type[4:8] == bytes((data_types.FLOAT64,)) * 4)
349 blen_obj[prop_name] = fbx_prop.props[4:8]
350 elif prop_type == b'Vector2D':
351 assert(fbx_prop.props_type[4:6] == bytes((data_types.FLOAT64,)) * 2)
352 blen_obj[prop_name] = fbx_prop.props[4:6]
353 elif prop_type in {b'Integer', b'int'}:
354 assert(fbx_prop.props_type[4] == data_types.INT32)
355 blen_obj[prop_name] = fbx_prop.props[4]
356 elif prop_type == b'KString':
357 assert(fbx_prop.props_type[4] == data_types.STRING)
358 blen_obj[prop_name] = fbx_prop.props[4].decode('utf-8', 'replace')
359 elif prop_type in {b'Number', b'double', b'Double'}:
360 assert(fbx_prop.props_type[4] == data_types.FLOAT64)
361 blen_obj[prop_name] = fbx_prop.props[4]
362 elif prop_type in {b'Float', b'float'}:
363 assert(fbx_prop.props_type[4] == data_types.FLOAT32)
364 blen_obj[prop_name] = fbx_prop.props[4]
365 elif prop_type in {b'Bool', b'bool'}:
366 assert(fbx_prop.props_type[4] == data_types.INT32)
367 blen_obj[prop_name] = fbx_prop.props[4] != 0
368 elif prop_type in {b'Enum', b'enum'}:
369 assert(fbx_prop.props_type[4:6] == bytes((data_types.INT32, data_types.STRING)))
370 val = fbx_prop.props[4]
371 if settings.use_custom_props_enum_as_string and fbx_prop.props[5]:
372 enum_items = fbx_prop.props[5].decode('utf-8', 'replace').split('~')
373 if val >= 0 and val < len(enum_items):
374 blen_obj[prop_name] = enum_items[val]
375 else:
376 print ("WARNING: User property '%s' has wrong enum value, skipped" % prop_name)
377 else:
378 blen_obj[prop_name] = val
379 else:
380 print ("WARNING: User property type '%s' is not supported" % prop_type.decode('utf-8', 'replace'))
383 def blen_read_object_transform_do(transform_data):
384 # This is a nightmare. FBX SDK uses Maya way to compute the transformation matrix of a node - utterly simple:
386 # WorldTransform = ParentWorldTransform @ T @ Roff @ Rp @ Rpre @ R @ Rpost-1 @ Rp-1 @ Soff @ Sp @ S @ Sp-1
388 # Where all those terms are 4 x 4 matrices that contain:
389 # WorldTransform: Transformation matrix of the node in global space.
390 # ParentWorldTransform: Transformation matrix of the parent node in global space.
391 # T: Translation
392 # Roff: Rotation offset
393 # Rp: Rotation pivot
394 # Rpre: Pre-rotation
395 # R: Rotation
396 # Rpost-1: Inverse of the post-rotation (FBX 2011 documentation incorrectly specifies this without inversion)
397 # Rp-1: Inverse of the rotation pivot
398 # Soff: Scaling offset
399 # Sp: Scaling pivot
400 # S: Scaling
401 # Sp-1: Inverse of the scaling pivot
403 # But it was still too simple, and FBX notion of compatibility is... quite specific. So we also have to
404 # support 3DSMax way:
406 # WorldTransform = ParentWorldTransform @ T @ R @ S @ OT @ OR @ OS
408 # Where all those terms are 4 x 4 matrices that contain:
409 # WorldTransform: Transformation matrix of the node in global space
410 # ParentWorldTransform: Transformation matrix of the parent node in global space
411 # T: Translation
412 # R: Rotation
413 # S: Scaling
414 # OT: Geometric transform translation
415 # OR: Geometric transform rotation
416 # OS: Geometric transform scale
418 # Notes:
419 # Geometric transformations ***are not inherited***: ParentWorldTransform does not contain the OT, OR, OS
420 # of WorldTransform's parent node.
421 # The R matrix takes into account the rotation order. Other rotation matrices are always 'XYZ' order.
423 # Taken from https://help.autodesk.com/view/FBX/2020/ENU/
424 # ?guid=FBX_Developer_Help_nodes_and_scene_graph_fbx_nodes_computing_transformation_matrix_html
426 # translation
427 lcl_translation = Matrix.Translation(transform_data.loc)
428 geom_loc = Matrix.Translation(transform_data.geom_loc)
430 # rotation
431 to_rot = lambda rot, rot_ord: Euler(convert_deg_to_rad_iter(rot), rot_ord).to_matrix().to_4x4()
432 lcl_rot = to_rot(transform_data.rot, transform_data.rot_ord) @ transform_data.rot_alt_mat
433 pre_rot = to_rot(transform_data.pre_rot, 'XYZ')
434 pst_rot = to_rot(transform_data.pst_rot, 'XYZ')
435 geom_rot = to_rot(transform_data.geom_rot, 'XYZ')
437 rot_ofs = Matrix.Translation(transform_data.rot_ofs)
438 rot_piv = Matrix.Translation(transform_data.rot_piv)
439 sca_ofs = Matrix.Translation(transform_data.sca_ofs)
440 sca_piv = Matrix.Translation(transform_data.sca_piv)
442 # scale
443 lcl_scale = Matrix()
444 lcl_scale[0][0], lcl_scale[1][1], lcl_scale[2][2] = transform_data.sca
445 geom_scale = Matrix();
446 geom_scale[0][0], geom_scale[1][1], geom_scale[2][2] = transform_data.geom_sca
448 base_mat = (
449 lcl_translation @
450 rot_ofs @
451 rot_piv @
452 pre_rot @
453 lcl_rot @
454 pst_rot.inverted_safe() @
455 rot_piv.inverted_safe() @
456 sca_ofs @
457 sca_piv @
458 lcl_scale @
459 sca_piv.inverted_safe()
461 geom_mat = geom_loc @ geom_rot @ geom_scale
462 # We return mat without 'geometric transforms' too, because it is to be used for children, sigh...
463 return (base_mat @ geom_mat, base_mat, geom_mat)
466 # XXX This might be weak, now that we can add vgroups from both bones and shapes, name collisions become
467 # more likely, will have to make this more robust!!!
468 def add_vgroup_to_objects(vg_indices, vg_weights, vg_name, objects):
469 assert(len(vg_indices) == len(vg_weights))
470 if vg_indices:
471 for obj in objects:
472 # We replace/override here...
473 vg = obj.vertex_groups.get(vg_name)
474 if vg is None:
475 vg = obj.vertex_groups.new(name=vg_name)
476 vg_add = vg.add
477 for i, w in zip(vg_indices, vg_weights):
478 vg_add((i,), w, 'REPLACE')
481 def blen_read_object_transform_preprocess(fbx_props, fbx_obj, rot_alt_mat, use_prepost_rot):
482 # This is quite involved, 'fbxRNode.cpp' from openscenegraph used as a reference
483 const_vector_zero_3d = 0.0, 0.0, 0.0
484 const_vector_one_3d = 1.0, 1.0, 1.0
486 loc = list(elem_props_get_vector_3d(fbx_props, b'Lcl Translation', const_vector_zero_3d))
487 rot = list(elem_props_get_vector_3d(fbx_props, b'Lcl Rotation', const_vector_zero_3d))
488 sca = list(elem_props_get_vector_3d(fbx_props, b'Lcl Scaling', const_vector_one_3d))
490 geom_loc = list(elem_props_get_vector_3d(fbx_props, b'GeometricTranslation', const_vector_zero_3d))
491 geom_rot = list(elem_props_get_vector_3d(fbx_props, b'GeometricRotation', const_vector_zero_3d))
492 geom_sca = list(elem_props_get_vector_3d(fbx_props, b'GeometricScaling', const_vector_one_3d))
494 rot_ofs = elem_props_get_vector_3d(fbx_props, b'RotationOffset', const_vector_zero_3d)
495 rot_piv = elem_props_get_vector_3d(fbx_props, b'RotationPivot', const_vector_zero_3d)
496 sca_ofs = elem_props_get_vector_3d(fbx_props, b'ScalingOffset', const_vector_zero_3d)
497 sca_piv = elem_props_get_vector_3d(fbx_props, b'ScalingPivot', const_vector_zero_3d)
499 is_rot_act = elem_props_get_bool(fbx_props, b'RotationActive', False)
501 if is_rot_act:
502 if use_prepost_rot:
503 pre_rot = elem_props_get_vector_3d(fbx_props, b'PreRotation', const_vector_zero_3d)
504 pst_rot = elem_props_get_vector_3d(fbx_props, b'PostRotation', const_vector_zero_3d)
505 else:
506 pre_rot = const_vector_zero_3d
507 pst_rot = const_vector_zero_3d
508 rot_ord = {
509 0: 'XYZ',
510 1: 'XZY',
511 2: 'YZX',
512 3: 'YXZ',
513 4: 'ZXY',
514 5: 'ZYX',
515 6: 'XYZ', # XXX eSphericXYZ, not really supported...
516 }.get(elem_props_get_enum(fbx_props, b'RotationOrder', 0))
517 else:
518 pre_rot = const_vector_zero_3d
519 pst_rot = const_vector_zero_3d
520 rot_ord = 'XYZ'
522 return FBXTransformData(loc, geom_loc,
523 rot, rot_ofs, rot_piv, pre_rot, pst_rot, rot_ord, rot_alt_mat, geom_rot,
524 sca, sca_ofs, sca_piv, geom_sca)
527 # ---------
528 # Animation
529 def _blen_read_object_transform_do_anim(transform_data, lcl_translation_mat, lcl_rot_euler, lcl_scale_mat,
530 extra_pre_matrix, extra_post_matrix):
531 """Specialized version of blen_read_object_transform_do for animation that pre-calculates the non-animated matrices
532 and returns a function that calculates (base_mat @ geom_mat). See the comments in blen_read_object_transform_do for
533 a full description of what this function is doing.
535 The lcl_translation_mat, lcl_rot_euler and lcl_scale_mat arguments should have their values updated each frame and
536 then calling the returned function will calculate the matrix for the current frame.
538 extra_pre_matrix and extra_post_matrix are any extra matrices to multiply first/last."""
539 # Translation
540 geom_loc = Matrix.Translation(transform_data.geom_loc)
542 # Rotation
543 def to_rot_xyz(rot):
544 # All the rotations that can be precalculated have a fixed XYZ order.
545 return Euler(convert_deg_to_rad_iter(rot), 'XYZ').to_matrix().to_4x4()
546 pre_rot = to_rot_xyz(transform_data.pre_rot)
547 pst_rot_inv = to_rot_xyz(transform_data.pst_rot).inverted_safe()
548 geom_rot = to_rot_xyz(transform_data.geom_rot)
550 # Offsets and pivots
551 rot_ofs = Matrix.Translation(transform_data.rot_ofs)
552 rot_piv = Matrix.Translation(transform_data.rot_piv)
553 rot_piv_inv = rot_piv.inverted_safe()
554 sca_ofs = Matrix.Translation(transform_data.sca_ofs)
555 sca_piv = Matrix.Translation(transform_data.sca_piv)
556 sca_piv_inv = sca_piv.inverted_safe()
558 # Scale
559 geom_scale = Matrix()
560 geom_scale[0][0], geom_scale[1][1], geom_scale[2][2] = transform_data.geom_sca
562 # Some matrices can be combined in advance, using the associative property of matrix multiplication, so that less
563 # matrix multiplication is required each frame.
564 geom_mat = geom_loc @ geom_rot @ geom_scale
565 post_lcl_translation = rot_ofs @ rot_piv @ pre_rot
566 post_lcl_rotation = transform_data.rot_alt_mat @ pst_rot_inv @ rot_piv_inv @ sca_ofs @ sca_piv
567 post_lcl_scaling = sca_piv_inv @ geom_mat @ extra_post_matrix
569 # Get the bound to_matrix method to avoid re-binding it on each call.
570 lcl_rot_euler_to_matrix_3x3 = lcl_rot_euler.to_matrix
571 # Get the unbound Matrix.to_4x4 method to avoid having to look it up again on each call.
572 matrix_to_4x4 = Matrix.to_4x4
574 if extra_pre_matrix == Matrix():
575 # There aren't any other matrices that must be multiplied before lcl_translation_mat that extra_pre_matrix can
576 # be combined with, so skip extra_pre_matrix when it's the identity matrix.
577 return lambda: (lcl_translation_mat @
578 post_lcl_translation @
579 matrix_to_4x4(lcl_rot_euler_to_matrix_3x3()) @
580 post_lcl_rotation @
581 lcl_scale_mat @
582 post_lcl_scaling)
583 else:
584 return lambda: (extra_pre_matrix @
585 lcl_translation_mat @
586 post_lcl_translation @
587 matrix_to_4x4(lcl_rot_euler_to_matrix_3x3()) @
588 post_lcl_rotation @
589 lcl_scale_mat @
590 post_lcl_scaling)
593 def _transformation_curves_gen(item, values_arrays, channel_keys):
594 """Yields flattened location/rotation/scaling values for imported PoseBone/Object Lcl Translation/Rotation/Scaling
595 animation curve values.
597 The value arrays must have the same lengths, where each index of each array corresponds to a single keyframe.
599 Each value array must have a corresponding channel key tuple that identifies the fbx property
600 (b'Lcl Translation'/b'Lcl Rotation'/b'Lcl Scaling') and the channel (x/y/z as 0/1/2) of that property."""
601 from operator import setitem
602 from functools import partial
604 if item.is_bone:
605 bl_obj = item.bl_obj.pose.bones[item.bl_bone]
606 else:
607 bl_obj = item.bl_obj
609 rot_mode = bl_obj.rotation_mode
610 transform_data = item.fbx_transform_data
611 rot_eul_prev = bl_obj.rotation_euler.copy()
612 rot_quat_prev = bl_obj.rotation_quaternion.copy()
614 # Pre-compute combined pre-matrix
615 # Remove that rest pose matrix from current matrix (also in parent space) by computing the inverted local rest
616 # matrix of the bone, if relevant.
617 combined_pre_matrix = item.get_bind_matrix().inverted_safe() if item.is_bone else Matrix()
618 # item.pre_matrix will contain any correction for a parent's correction matrix or the global matrix
619 if item.pre_matrix:
620 combined_pre_matrix @= item.pre_matrix
622 # Pre-compute combined post-matrix
623 # Compensate for changes in the local matrix during processing
624 combined_post_matrix = item.anim_compensation_matrix.copy() if item.anim_compensation_matrix else Matrix()
625 # item.post_matrix will contain any correction for lights, camera and bone orientation
626 if item.post_matrix:
627 combined_post_matrix @= item.post_matrix
629 # Create matrices/euler from the initial transformation values of this item.
630 # These variables will be updated in-place as we iterate through each frame.
631 lcl_translation_mat = Matrix.Translation(transform_data.loc)
632 lcl_rotation_eul = Euler(transform_data.rot, transform_data.rot_ord)
633 lcl_scaling_mat = Matrix()
634 lcl_scaling_mat[0][0], lcl_scaling_mat[1][1], lcl_scaling_mat[2][2] = transform_data.sca
636 # Create setters into lcl_translation_mat, lcl_rotation_eul and lcl_scaling_mat for each values_array and convert
637 # any rotation values into radians.
638 lcl_setters = []
639 values_arrays_converted = []
640 for values_array, (fbx_prop, channel) in zip(values_arrays, channel_keys):
641 if fbx_prop == b'Lcl Translation':
642 # lcl_translation_mat.translation[channel] = value
643 setter = partial(setitem, lcl_translation_mat.translation, channel)
644 elif fbx_prop == b'Lcl Rotation':
645 # FBX rotations are in degrees, but Blender uses radians, so convert all rotation values in advance.
646 values_array = np.deg2rad(values_array)
647 # lcl_rotation_eul[channel] = value
648 setter = partial(setitem, lcl_rotation_eul, channel)
649 else:
650 assert(fbx_prop == b'Lcl Scaling')
651 # lcl_scaling_mat[channel][channel] = value
652 setter = partial(setitem, lcl_scaling_mat[channel], channel)
653 lcl_setters.append(setter)
654 values_arrays_converted.append(values_array)
656 # Create an iterator that gets one value from each array. Each iterated tuple will be all the imported
657 # Lcl Translation/Lcl Rotation/Lcl Scaling values for a single frame, in that order.
658 # Note that an FBX animation does not have to animate all the channels, so only the animated channels of each
659 # property will be present.
660 # .data, the memoryview of an np.ndarray, is faster to iterate than the ndarray itself.
661 frame_values_it = zip(*(arr.data for arr in values_arrays_converted))
663 # Getting the unbound methods in advance avoids having to look them up again on each call within the loop.
664 mat_decompose = Matrix.decompose
665 quat_to_axis_angle = Quaternion.to_axis_angle
666 quat_to_euler = Quaternion.to_euler
667 quat_dot = Quaternion.dot
669 calc_mat = _blen_read_object_transform_do_anim(transform_data,
670 lcl_translation_mat, lcl_rotation_eul, lcl_scaling_mat,
671 combined_pre_matrix, combined_post_matrix)
673 # Iterate through the values for each frame.
674 for frame_values in frame_values_it:
675 # Set each value into its corresponding lcl matrix/euler.
676 for lcl_setter, value in zip(lcl_setters, frame_values):
677 lcl_setter(value)
679 # Calculate the updated matrix for this frame.
680 mat = calc_mat()
682 # Now we have a virtual matrix of transform from AnimCurves, we can yield keyframe values!
683 loc, rot, sca = mat_decompose(mat)
684 if rot_mode == 'QUATERNION':
685 if quat_dot(rot_quat_prev, rot) < 0.0:
686 rot = -rot
687 rot_quat_prev = rot
688 elif rot_mode == 'AXIS_ANGLE':
689 vec, ang = quat_to_axis_angle(rot)
690 rot = ang, vec.x, vec.y, vec.z
691 else: # Euler
692 rot = quat_to_euler(rot, rot_mode, rot_eul_prev)
693 rot_eul_prev = rot
695 # Yield order matches the order that the location/rotation/scale FCurves are created in.
696 yield from loc
697 yield from rot
698 yield from sca
701 def _combine_curve_keyframe_times(times_and_values_tuples, initial_values):
702 """Combine multiple parsed animation curves, that affect different channels, such that every animation curve
703 contains the keyframes from every other curve, interpolating the values for the newly inserted keyframes in each
704 curve.
706 Currently, linear interpolation is assumed, but FBX does store how keyframes should be interpolated, so correctly
707 interpolating the keyframe values is a TODO."""
708 if len(times_and_values_tuples) == 1:
709 # Nothing to do when there is only a single curve.
710 times, values = times_and_values_tuples[0]
711 return times, [values]
713 all_times = [t[0] for t in times_and_values_tuples]
715 # Get the combined sorted unique times of all the curves.
716 sorted_all_times = np.unique(np.concatenate(all_times))
718 values_arrays = []
719 for (times, values), initial_value in zip(times_and_values_tuples, initial_values):
720 if sorted_all_times.size == times.size:
721 # `sorted_all_times` will always contain all values in `times` and both `times` and `sorted_all_times` must
722 # be strictly increasing, so if both arrays have the same size, they must be identical.
723 extended_values = values
724 else:
725 # For now, linear interpolation is assumed. NumPy conveniently has a fast C-compiled function for this.
726 # Efficiently implementing other FBX supported interpolation will most likely be much more complicated.
727 extended_values = np.interp(sorted_all_times, times, values, left=initial_value)
728 values_arrays.append(extended_values)
729 return sorted_all_times, values_arrays
732 def blen_read_invalid_animation_curve(key_times, key_values):
733 """FBX will parse animation curves even when their keyframe times are invalid (not strictly increasing). It's
734 unclear exactly how FBX handles invalid curves, but this matches in some cases and is how the FBX IO addon has been
735 handling invalid keyframe times for a long time.
737 Notably, this function will also correctly parse valid animation curves, though is much slower than the trivial,
738 regular way.
740 The returned keyframe times are guaranteed to be strictly increasing."""
741 sorted_unique_times = np.unique(key_times)
743 # Unsure if this can be vectorized with numpy, so using iteration for now.
744 def index_gen():
745 idx = 0
746 key_times_data = key_times.data
747 key_times_len = len(key_times)
748 # Iterating .data, the memoryview of the array, is faster than iterating the array directly.
749 for curr_fbxktime in sorted_unique_times.data:
750 if key_times_data[idx] < curr_fbxktime:
751 if idx >= 0:
752 idx += 1
753 if idx >= key_times_len:
754 # We have reached our last element for this curve, stay on it from now on...
755 idx = -1
756 yield idx
758 indices = np.fromiter(index_gen(), dtype=np.int64, count=len(sorted_unique_times))
759 indexed_times = key_times[indices]
760 indexed_values = key_values[indices]
762 # Linear interpolate the value for each time in sorted_unique_times according to the times and values at each index
763 # and the previous index.
764 interpolated_values = np.empty_like(indexed_values)
766 # Where the index is 0, there's no previous value to interpolate from, so we set the value without interpolating.
767 # Because the indices are in increasing order, all zeroes must be at the start, so we can find the index of the last
768 # zero and use that to index with a slice instead of a boolean array for performance.
769 # Equivalent to, but as a slice:
770 # idx_zero_mask = indices == 0
771 # idx_nonzero_mask = ~idx_zero_mask
772 first_nonzero_idx = np.searchsorted(indices, 0, side='right')
773 idx_zero_slice = slice(0, first_nonzero_idx) # [:first_nonzero_idx]
774 idx_nonzero_slice = slice(first_nonzero_idx, None) # [first_nonzero_idx:]
776 interpolated_values[idx_zero_slice] = indexed_values[idx_zero_slice]
778 indexed_times_nonzero_idx = indexed_times[idx_nonzero_slice]
779 indexed_values_nonzero_idx = indexed_values[idx_nonzero_slice]
780 indices_nonzero = indices[idx_nonzero_slice]
782 prev_indices_nonzero = indices_nonzero - 1
783 prev_indexed_times_nonzero_idx = key_times[prev_indices_nonzero]
784 prev_indexed_values_nonzero_idx = key_values[prev_indices_nonzero]
786 ifac_a = sorted_unique_times[idx_nonzero_slice] - prev_indexed_times_nonzero_idx
787 ifac_b = indexed_times_nonzero_idx - prev_indexed_times_nonzero_idx
788 # If key_times contains two (or more) duplicate times in a row, then values in `ifac_b` can be zero which would
789 # result in division by zero.
790 # Use the `np.errstate` context manager to suppress printing the RuntimeWarning to the system console.
791 with np.errstate(divide='ignore'):
792 ifac = ifac_a / ifac_b
793 interpolated_values[idx_nonzero_slice] = ((indexed_values_nonzero_idx - prev_indexed_values_nonzero_idx) * ifac
794 + prev_indexed_values_nonzero_idx)
796 # If the time to interpolate at is larger than the time in indexed_times, then the value has been extrapolated.
797 # Extrapolated values are excluded.
798 valid_mask = indexed_times >= sorted_unique_times
800 key_times = sorted_unique_times[valid_mask]
801 key_values = interpolated_values[valid_mask]
803 return key_times, key_values
806 def _convert_fbx_time_to_blender_time(key_times, blen_start_offset, fbx_start_offset, fps):
807 from .fbx_utils import FBX_KTIME
808 timefac = fps / FBX_KTIME
810 # Convert from FBX timing to Blender timing.
811 # Cannot subtract in-place because key_times could be read directly from FBX and could be used by multiple Actions.
812 key_times = key_times - fbx_start_offset
813 # FBX times are integers and timefac is a Python float, so the new array will be a np.float64 array.
814 key_times = key_times * timefac
816 key_times += blen_start_offset
818 return key_times
821 def blen_read_animation_curve(fbx_curve):
822 """Read an animation curve from FBX data.
824 The parsed keyframe times are guaranteed to be strictly increasing."""
825 key_times = parray_as_ndarray(elem_prop_first(elem_find_first(fbx_curve, b'KeyTime')))
826 key_values = parray_as_ndarray(elem_prop_first(elem_find_first(fbx_curve, b'KeyValueFloat')))
828 assert(len(key_values) == len(key_times))
830 # The FBX SDK specifies that only one key per time is allowed and that the keys are sorted in time order.
831 # https://help.autodesk.com/view/FBX/2020/ENU/?guid=FBX_Developer_Help_cpp_ref_class_fbx_anim_curve_html
832 all_times_strictly_increasing = (key_times[1:] > key_times[:-1]).all()
834 if all_times_strictly_increasing:
835 return key_times, key_values
836 else:
837 # FBX will still read animation curves even if they are invalid.
838 return blen_read_invalid_animation_curve(key_times, key_values)
841 def blen_store_keyframes(fbx_key_times, blen_fcurve, key_values, blen_start_offset, fps, fbx_start_offset=0):
842 """Set all keyframe times and values for a newly created FCurve.
843 Linear interpolation is currently assumed.
845 This is a convenience function for calling blen_store_keyframes_multi with only a single fcurve and values array."""
846 blen_store_keyframes_multi(fbx_key_times, [(blen_fcurve, key_values)], blen_start_offset, fps, fbx_start_offset)
849 def blen_store_keyframes_multi(fbx_key_times, fcurve_and_key_values_pairs, blen_start_offset, fps, fbx_start_offset=0):
850 """Set all keyframe times and values for multiple pairs of newly created FCurves and keyframe values arrays, where
851 each pair has the same keyframe times.
852 Linear interpolation is currently assumed."""
853 bl_key_times = _convert_fbx_time_to_blender_time(fbx_key_times, blen_start_offset, fbx_start_offset, fps)
854 num_keys = len(bl_key_times)
856 # Compatible with C float type
857 bl_keyframe_dtype = np.single
858 # Compatible with C char type
859 bl_enum_dtype = np.byte
861 # The keyframe_points 'co' are accessed as flattened pairs of (time, value).
862 # The key times are the same for each (blen_fcurve, key_values) pair, so only the values need to be updated for each
863 # array of values.
864 keyframe_points_co = np.empty(len(bl_key_times) * 2, dtype=bl_keyframe_dtype)
865 # Even indices are times.
866 keyframe_points_co[0::2] = bl_key_times
868 interpolation_array = np.full(num_keys, LINEAR_INTERPOLATION_VALUE, dtype=bl_enum_dtype)
870 for blen_fcurve, key_values in fcurve_and_key_values_pairs:
871 # The fcurve must be newly created and thus have no keyframe_points.
872 assert(len(blen_fcurve.keyframe_points) == 0)
874 # Odd indices are values.
875 keyframe_points_co[1::2] = key_values
877 # Add the keyframe points to the FCurve and then set the 'co' and 'interpolation' of each point.
878 blen_fcurve.keyframe_points.add(num_keys)
879 blen_fcurve.keyframe_points.foreach_set('co', keyframe_points_co)
880 blen_fcurve.keyframe_points.foreach_set('interpolation', interpolation_array)
882 # Since we inserted our keyframes in 'ultra-fast' mode, we have to update the fcurves now.
883 blen_fcurve.update()
886 def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, global_scale, shape_key_deforms):
888 'Bake' loc/rot/scale into the action,
889 taking any pre_ and post_ matrix into account to transform from fbx into blender space.
891 from bpy.types import Object, PoseBone, ShapeKey, Material, Camera
893 fbx_curves: dict[bytes, dict[int, FBXElem]] = {}
894 for curves, fbxprop in cnodes.values():
895 channels_dict = fbx_curves.setdefault(fbxprop, {})
896 for (fbx_acdata, _blen_data), channel in curves.values():
897 if channel in channels_dict:
898 # Ignore extra curves when one has already been found for this channel because FBX's default animation
899 # system implementation only uses the first curve assigned to a channel.
900 # Additional curves per channel are allowed by the FBX specification, but the handling of these curves
901 # is considered the responsibility of the application that created them. Note that each curve node is
902 # expected to have a unique set of channels, so these additional curves with the same channel would have
903 # to belong to separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode.
904 continue
905 channels_dict[channel] = fbx_acdata
907 # Leave if no curves are attached (if a blender curve is attached to scale but without keys it defaults to 0).
908 if len(fbx_curves) == 0:
909 return
911 if isinstance(item, Material):
912 grpname = item.name
913 props = [("diffuse_color", 3, grpname or "Diffuse Color")]
914 elif isinstance(item, ShapeKey):
915 props = [(item.path_from_id("value"), 1, "Key")]
916 elif isinstance(item, Camera):
917 props = [(item.path_from_id("lens"), 1, "Camera"), (item.dof.path_from_id("focus_distance"), 1, "Camera")]
918 else: # Object or PoseBone:
919 if item.is_bone:
920 bl_obj = item.bl_obj.pose.bones[item.bl_bone]
921 else:
922 bl_obj = item.bl_obj
924 # We want to create actions for objects, but for bones we 'reuse' armatures' actions!
925 grpname = bl_obj.name
927 # Since we might get other channels animated in the end, due to all FBX transform magic,
928 # we need to add curves for whole loc/rot/scale in any case.
929 props = [(bl_obj.path_from_id("location"), 3, grpname or "Location"),
930 None,
931 (bl_obj.path_from_id("scale"), 3, grpname or "Scale")]
932 rot_mode = bl_obj.rotation_mode
933 if rot_mode == 'QUATERNION':
934 props[1] = (bl_obj.path_from_id("rotation_quaternion"), 4, grpname or "Quaternion Rotation")
935 elif rot_mode == 'AXIS_ANGLE':
936 props[1] = (bl_obj.path_from_id("rotation_axis_angle"), 4, grpname or "Axis Angle Rotation")
937 else: # Euler
938 props[1] = (bl_obj.path_from_id("rotation_euler"), 3, grpname or "Euler Rotation")
940 blen_curves = [action.fcurves.new(prop, index=channel, action_group=grpname)
941 for prop, nbr_channels, grpname in props for channel in range(nbr_channels)]
943 if isinstance(item, Material):
944 for fbxprop, channel_to_curve in fbx_curves.items():
945 assert(fbxprop == b'DiffuseColor')
946 for channel, curve in channel_to_curve.items():
947 assert(channel in {0, 1, 2})
948 blen_curve = blen_curves[channel]
949 fbx_key_times, values = blen_read_animation_curve(curve)
950 blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps)
952 elif isinstance(item, ShapeKey):
953 deform_values = shape_key_deforms.setdefault(item, [])
954 for fbxprop, channel_to_curve in fbx_curves.items():
955 assert(fbxprop == b'DeformPercent')
956 for channel, curve in channel_to_curve.items():
957 assert(channel == 0)
958 blen_curve = blen_curves[channel]
960 fbx_key_times, values = blen_read_animation_curve(curve)
961 # A fully activated shape key in FBX DeformPercent is 100.0 whereas it is 1.0 in Blender.
962 values = values / 100.0
963 blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps)
965 # Store the minimum and maximum shape key values, so that the shape key's slider range can be expanded
966 # if necessary after reading all animations.
967 deform_values.append(values.min())
968 deform_values.append(values.max())
970 elif isinstance(item, Camera):
971 for fbxprop, channel_to_curve in fbx_curves.items():
972 is_focus_distance = fbxprop == b'FocusDistance'
973 assert(fbxprop == b'FocalLength' or is_focus_distance)
974 for channel, curve in channel_to_curve.items():
975 assert(channel == 0)
976 # The indices are determined by the creation of the `props` list above.
977 blen_curve = blen_curves[1 if is_focus_distance else 0]
979 fbx_key_times, values = blen_read_animation_curve(curve)
980 if is_focus_distance:
981 # Remap the imported values from FBX to Blender.
982 values = values / 1000.0
983 values *= global_scale
984 blen_store_keyframes(fbx_key_times, blen_curve, values, anim_offset, fps)
986 else: # Object or PoseBone:
987 transform_data = item.fbx_transform_data
989 # Each transformation curve needs to have keyframes at the times of every other transformation curve
990 # (interpolating missing values), so that we can construct a matrix at every keyframe.
991 transform_prop_to_attr = {
992 b'Lcl Translation': transform_data.loc,
993 b'Lcl Rotation': transform_data.rot,
994 b'Lcl Scaling': transform_data.sca,
997 times_and_values_tuples = []
998 initial_values = []
999 channel_keys = []
1000 for fbxprop, channel_to_curve in fbx_curves.items():
1001 if fbxprop not in transform_prop_to_attr:
1002 # Currently, we only care about transformation curves.
1003 continue
1004 for channel, curve in channel_to_curve.items():
1005 assert(channel in {0, 1, 2})
1006 fbx_key_times, values = blen_read_animation_curve(curve)
1008 channel_keys.append((fbxprop, channel))
1010 initial_values.append(transform_prop_to_attr[fbxprop][channel])
1012 times_and_values_tuples.append((fbx_key_times, values))
1013 if not times_and_values_tuples:
1014 # If `times_and_values_tuples` is empty, all the imported animation curves are for properties other than
1015 # transformation (e.g. animated custom properties), so there is nothing to do until support for those other
1016 # properties is added.
1017 return
1019 # Combine the keyframe times of all the transformation curves so that each curve has a value at every time.
1020 combined_fbx_times, values_arrays = _combine_curve_keyframe_times(times_and_values_tuples, initial_values)
1022 # Convert from FBX Lcl Translation/Lcl Rotation/Lcl Scaling to the Blender location/rotation/scaling properties
1023 # of this Object/PoseBone.
1024 # The number of fcurves for the Blender properties varies depending on the rotation mode.
1025 num_loc_channels = 3
1026 num_rot_channels = 4 if rot_mode in {'QUATERNION', 'AXIS_ANGLE'} else 3 # Variations of EULER are all 3
1027 num_sca_channels = 3
1028 num_channels = num_loc_channels + num_rot_channels + num_sca_channels
1029 num_frames = len(combined_fbx_times)
1030 full_length = num_channels * num_frames
1032 # Do the conversion.
1033 flattened_channel_values_gen = _transformation_curves_gen(item, values_arrays, channel_keys)
1034 flattened_channel_values = np.fromiter(flattened_channel_values_gen, dtype=np.single, count=full_length)
1036 # Reshape to one row per frame and then view the transpose so that each row corresponds to a single channel.
1037 # e.g.
1038 # loc_channels = channel_values[:num_loc_channels]
1039 # rot_channels = channel_values[num_loc_channels:num_loc_channels + num_rot_channels]
1040 # sca_channels = channel_values[num_loc_channels + num_rot_channels:]
1041 channel_values = flattened_channel_values.reshape(num_frames, num_channels).T
1043 # Each channel has the same keyframe times, so the combined times can be passed once along with all the curves
1044 # and values arrays.
1045 blen_store_keyframes_multi(combined_fbx_times, zip(blen_curves, channel_values), anim_offset, fps)
1048 def blen_read_animations(fbx_tmpl_astack, fbx_tmpl_alayer, stacks, scene, anim_offset, global_scale):
1050 Recreate an action per stack/layer/object combinations.
1051 Only the first found action is linked to objects, more complex setups are not handled,
1052 it's up to user to reproduce them!
1054 from bpy.types import ShapeKey, Material, Camera
1056 shape_key_values = {}
1057 actions = {}
1058 for as_uuid, ((fbx_asdata, _blen_data), alayers) in stacks.items():
1059 stack_name = elem_name_ensure_class(fbx_asdata, b'AnimStack')
1060 for al_uuid, ((fbx_aldata, _blen_data), items) in alayers.items():
1061 layer_name = elem_name_ensure_class(fbx_aldata, b'AnimLayer')
1062 for item, cnodes in items.items():
1063 if isinstance(item, Material):
1064 id_data = item
1065 elif isinstance(item, ShapeKey):
1066 id_data = item.id_data
1067 elif isinstance(item, Camera):
1068 id_data = item
1069 else:
1070 id_data = item.bl_obj
1071 # XXX Ignore rigged mesh animations - those are a nightmare to handle, see note about it in
1072 # FbxImportHelperNode class definition.
1073 if id_data and id_data.type == 'MESH' and id_data.parent and id_data.parent.type == 'ARMATURE':
1074 continue
1075 if id_data is None:
1076 continue
1078 # Create new action if needed (should always be needed, except for keyblocks from shapekeys cases).
1079 key = (as_uuid, al_uuid, id_data)
1080 action = actions.get(key)
1081 if action is None:
1082 if stack_name == layer_name:
1083 action_name = "|".join((id_data.name, stack_name))
1084 else:
1085 action_name = "|".join((id_data.name, stack_name, layer_name))
1086 actions[key] = action = bpy.data.actions.new(action_name)
1087 action.use_fake_user = True
1088 # If none yet assigned, assign this action to id_data.
1089 if not id_data.animation_data:
1090 id_data.animation_data_create()
1091 if not id_data.animation_data.action:
1092 id_data.animation_data.action = action
1093 # And actually populate the action!
1094 blen_read_animations_action_item(action, item, cnodes, scene.render.fps, anim_offset, global_scale,
1095 shape_key_values)
1097 # If the minimum/maximum animated value is outside the slider range of the shape key, attempt to expand the slider
1098 # range until the animated range fits and has extra room to be decreased or increased further.
1099 # Shape key slider_min and slider_max have hard min/max values, if an imported animation uses a value outside that
1100 # range, a warning message will be printed to the console and the slider_min/slider_max values will end up clamped.
1101 shape_key_values_in_range = True
1102 for shape_key, deform_values in shape_key_values.items():
1103 min_animated_deform = min(deform_values)
1104 max_animated_deform = max(deform_values)
1105 shape_key_values_in_range &= expand_shape_key_range(shape_key, min_animated_deform)
1106 shape_key_values_in_range &= expand_shape_key_range(shape_key, max_animated_deform)
1107 if not shape_key_values_in_range:
1108 print("WARNING: The imported animated Value of a Shape Key is beyond the minimum/maximum allowed and will be"
1109 " clamped during playback.")
1112 # ----
1113 # Mesh
1115 def blen_read_geom_layerinfo(fbx_layer):
1116 return (
1117 validate_blend_names(elem_find_first_string_as_bytes(fbx_layer, b'Name')),
1118 elem_find_first_string_as_bytes(fbx_layer, b'MappingInformationType'),
1119 elem_find_first_string_as_bytes(fbx_layer, b'ReferenceInformationType'),
1123 def blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size):
1124 """Validate blen_data when it's not a bpy_prop_collection.
1125 Returns whether blen_data is a bpy_prop_collection"""
1126 blen_data_is_collection = isinstance(blen_data, bpy.types.bpy_prop_collection)
1127 if not blen_data_is_collection:
1128 if item_size > 1:
1129 assert(len(blen_data.shape) == 2)
1130 assert(blen_data.shape[1] == item_size)
1131 assert(blen_data.dtype == blen_dtype)
1132 return blen_data_is_collection
1135 def blen_read_geom_parse_fbx_data(fbx_data, stride, item_size):
1136 """Parse fbx_data as an array.array into a 2d np.ndarray that shares the same memory, where each row is a single
1137 item"""
1138 # Technically stride < item_size could be supported, but there's probably not a use case for it since it would
1139 # result in a view of the data with self-overlapping memory.
1140 assert(stride >= item_size)
1141 # View the array.array as an np.ndarray.
1142 fbx_data_np = parray_as_ndarray(fbx_data)
1144 if stride == item_size:
1145 if item_size > 1:
1146 # Need to make sure fbx_data_np has a whole number of items to be able to view item_size elements per row.
1147 items_remainder = len(fbx_data_np) % item_size
1148 if items_remainder:
1149 print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
1150 fbx_data_np = fbx_data_np[:-items_remainder]
1151 fbx_data_np = fbx_data_np.reshape(-1, item_size)
1152 else:
1153 # Create a view of fbx_data_np that is only the first item_size elements of each stride. Note that the view will
1154 # not be C-contiguous.
1155 stride_remainder = len(fbx_data_np) % stride
1156 if stride_remainder:
1157 if stride_remainder < item_size:
1158 print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
1159 # Not enough in the remainder for a full item, so cut off the partial stride
1160 fbx_data_np = fbx_data_np[:-stride_remainder]
1161 # Reshape to one stride per row and then create a view that includes only the first item_size elements
1162 # of each stride.
1163 fbx_data_np = fbx_data_np.reshape(-1, stride)[:, :item_size]
1164 else:
1165 print("ERROR: not a whole number of strides in this FBX layer! There are a whole number of items, but"
1166 " this could indicate an error!")
1167 # There is not a whole number of strides, but there is a whole number of items.
1168 # This is a pain to deal with because fbx_data_np.reshape(-1, stride) is not possible.
1169 # A view of just the items can be created using stride_tricks.as_strided by specifying the shape and
1170 # strides of the view manually.
1171 # Extreme care must be taken when using stride_tricks.as_strided because improper usage can result in
1172 # a view that gives access to memory outside the array.
1173 from numpy.lib import stride_tricks
1175 # fbx_data_np should always start off as flat and C-contiguous.
1176 assert(fbx_data_np.strides == (fbx_data_np.itemsize,))
1178 num_whole_strides = len(fbx_data_np) // stride
1179 # Plus the one partial stride that is enough elements for a complete item.
1180 num_items = num_whole_strides + 1
1181 shape = (num_items, item_size)
1183 # strides are the number of bytes to step to get to the next element, for each axis.
1184 step_per_item = fbx_data_np.itemsize * stride
1185 step_per_item_element = fbx_data_np.itemsize
1186 strides = (step_per_item, step_per_item_element)
1188 fbx_data_np = stride_tricks.as_strided(fbx_data_np, shape, strides)
1189 else:
1190 # There's a whole number of strides, so first reshape to one stride per row and then create a view that
1191 # includes only the first item_size elements of each stride.
1192 fbx_data_np = fbx_data_np.reshape(-1, stride)[:, :item_size]
1194 return fbx_data_np
1197 def blen_read_geom_check_fbx_data_length(blen_data, fbx_data_np, is_indices=False):
1198 """Check that there are the same number of items in blen_data and fbx_data_np.
1200 Returns a tuple of two elements:
1201 0: fbx_data_np or, if fbx_data_np contains more items than blen_data, a view of fbx_data_np with the excess
1202 items removed
1203 1: Whether the returned fbx_data_np contains enough items to completely fill blen_data"""
1204 bl_num_items = len(blen_data)
1205 fbx_num_items = len(fbx_data_np)
1206 enough_data = fbx_num_items >= bl_num_items
1207 if not enough_data:
1208 if is_indices:
1209 print("ERROR: not enough indices in this FBX layer, missing data will be left as default!")
1210 else:
1211 print("ERROR: not enough data in this FBX layer, missing data will be left as default!")
1212 elif fbx_num_items > bl_num_items:
1213 if is_indices:
1214 print("ERROR: too many indices in this FBX layer, skipping excess!")
1215 else:
1216 print("ERROR: too much data in this FBX layer, skipping excess!")
1217 fbx_data_np = fbx_data_np[:bl_num_items]
1219 return fbx_data_np, enough_data
1222 def blen_read_geom_xform(fbx_data_np, xform):
1223 """xform is either None, or a function that takes fbx_data_np as its only positional argument and returns an
1224 np.ndarray with the same total number of elements as fbx_data_np.
1225 It is acceptable for xform to return an array with a different dtype to fbx_data_np.
1227 Returns xform(fbx_data_np) when xform is not None and ensures the result of xform(fbx_data_np) has the same shape as
1228 fbx_data_np before returning it.
1229 When xform is None, fbx_data_np is returned as is."""
1230 if xform is not None:
1231 item_size = fbx_data_np.shape[1]
1232 fbx_total_data = fbx_data_np.size
1233 fbx_data_np = xform(fbx_data_np)
1234 # The amount of data should not be changed by xform
1235 assert(fbx_data_np.size == fbx_total_data)
1236 # Ensure fbx_data_np is still item_size elements per row
1237 if len(fbx_data_np.shape) != 2 or fbx_data_np.shape[1] != item_size:
1238 fbx_data_np = fbx_data_np.reshape(-1, item_size)
1239 return fbx_data_np
1242 def blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size, descr,
1243 xform):
1244 """Generic fbx_layer to blen_data foreach setter for Direct layers.
1245 blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
1246 fbx_data must be an array.array."""
1247 fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
1248 fbx_data_np, enough_data = blen_read_geom_check_fbx_data_length(blen_data, fbx_data_np)
1249 fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
1251 blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
1253 if blen_data_is_collection:
1254 if not enough_data:
1255 blen_total_data = len(blen_data) * item_size
1256 buffer = np.empty(blen_total_data, dtype=blen_dtype)
1257 # It's not clear what values should be used for the missing data, so read the current values into a buffer.
1258 blen_data.foreach_get(blen_attr, buffer)
1260 # Change the buffer shape to one item per row
1261 buffer.shape = (-1, item_size)
1263 # Copy the fbx data into the start of the buffer
1264 buffer[:len(fbx_data_np)] = fbx_data_np
1265 else:
1266 # Convert the buffer to the Blender C type of blen_attr
1267 buffer = astype_view_signedness(fbx_data_np, blen_dtype)
1269 # Set blen_attr of blen_data. The buffer must be flat and C-contiguous, which ravel() ensures
1270 blen_data.foreach_set(blen_attr, buffer.ravel())
1271 else:
1272 assert(blen_data.size % item_size == 0)
1273 blen_data = blen_data.view()
1274 blen_data.shape = (-1, item_size)
1275 blen_data[:len(fbx_data_np)] = fbx_data_np
1278 def blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, fbx_layer_index, stride,
1279 item_size, descr, xform):
1280 """Generic fbx_layer to blen_data foreach setter for IndexToDirect layers.
1281 blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
1282 fbx_data must be an array.array or a 1d np.ndarray."""
1283 fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
1284 fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
1286 # fbx_layer_index is allowed to be a 1d np.ndarray for use with blen_read_geom_array_foreach_set_looptovert.
1287 if not isinstance(fbx_layer_index, np.ndarray):
1288 fbx_layer_index = parray_as_ndarray(fbx_layer_index)
1290 fbx_layer_index, enough_indices = blen_read_geom_check_fbx_data_length(blen_data, fbx_layer_index, is_indices=True)
1292 blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
1294 blen_data_items_len = len(blen_data)
1295 blen_data_len = blen_data_items_len * item_size
1296 fbx_num_items = len(fbx_data_np)
1298 # Find all indices that are out of bounds of fbx_data_np.
1299 min_index_inclusive = -fbx_num_items
1300 max_index_inclusive = fbx_num_items - 1
1301 valid_index_mask = np.equal(fbx_layer_index, fbx_layer_index.clip(min_index_inclusive, max_index_inclusive))
1302 indices_invalid = not valid_index_mask.all()
1304 fbx_data_items = fbx_data_np.reshape(-1, item_size)
1306 if indices_invalid or not enough_indices:
1307 if blen_data_is_collection:
1308 buffer = np.empty(blen_data_len, dtype=blen_dtype)
1309 buffer_item_view = buffer.view()
1310 buffer_item_view.shape = (-1, item_size)
1311 # Since we don't know what the default values should be for the missing data, read the current values into a
1312 # buffer.
1313 blen_data.foreach_get(blen_attr, buffer)
1314 else:
1315 buffer_item_view = blen_data
1317 if not enough_indices:
1318 # Reduce the length of the view to the same length as the number of indices.
1319 buffer_item_view = buffer_item_view[:len(fbx_layer_index)]
1321 # Copy the result of indexing fbx_data_items by each element in fbx_layer_index into the buffer.
1322 if indices_invalid:
1323 print("ERROR: indices in this FBX layer out of bounds of the FBX data, skipping invalid indices!")
1324 buffer_item_view[valid_index_mask] = fbx_data_items[fbx_layer_index[valid_index_mask]]
1325 else:
1326 buffer_item_view[:] = fbx_data_items[fbx_layer_index]
1328 if blen_data_is_collection:
1329 blen_data.foreach_set(blen_attr, buffer.ravel())
1330 else:
1331 if blen_data_is_collection:
1332 # Cast the buffer to the Blender C type of blen_attr
1333 fbx_data_items = astype_view_signedness(fbx_data_items, blen_dtype)
1334 buffer_items = fbx_data_items[fbx_layer_index]
1335 blen_data.foreach_set(blen_attr, buffer_items.ravel())
1336 else:
1337 blen_data[:] = fbx_data_items[fbx_layer_index]
1340 def blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size, descr,
1341 xform):
1342 """Generic fbx_layer to blen_data foreach setter for AllSame layers.
1343 blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
1344 fbx_data must be an array.array."""
1345 fbx_data_np = blen_read_geom_parse_fbx_data(fbx_data, stride, item_size)
1346 fbx_data_np = blen_read_geom_xform(fbx_data_np, xform)
1347 blen_data_is_collection = blen_read_geom_validate_blen_data(blen_data, blen_dtype, item_size)
1348 fbx_items_len = len(fbx_data_np)
1349 blen_items_len = len(blen_data)
1351 if fbx_items_len < 1:
1352 print("ERROR: not enough data in this FBX layer, skipping!")
1353 return
1355 if blen_data_is_collection:
1356 # Create an array filled with the value from fbx_data_np
1357 buffer = np.full((blen_items_len, item_size), fbx_data_np[0], dtype=blen_dtype)
1359 blen_data.foreach_set(blen_attr, buffer.ravel())
1360 else:
1361 blen_data[:] = fbx_data_np[0]
1364 def blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_data, stride, item_size,
1365 descr, xform):
1366 """Generic fbx_layer to blen_data foreach setter for polyloop ByVertice layers.
1367 blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
1368 fbx_data must be an array.array"""
1369 # The fbx_data is mapped to vertices. To expand fbx_data to polygon loops, get an array of the vertex index of each
1370 # polygon loop that will then be used to index fbx_data
1371 loop_vertex_indices = np.empty(len(mesh.loops), dtype=np.uintc)
1372 mesh.loops.foreach_get("vertex_index", loop_vertex_indices)
1373 blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_data, loop_vertex_indices, stride,
1374 item_size, descr, xform)
1377 # generic error printers.
1378 def blen_read_geom_array_error_mapping(descr, fbx_layer_mapping, quiet=False):
1379 if not quiet:
1380 print("warning layer %r mapping type unsupported: %r" % (descr, fbx_layer_mapping))
1383 def blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet=False):
1384 if not quiet:
1385 print("warning layer %r ref type unsupported: %r" % (descr, fbx_layer_ref))
1388 def blen_read_geom_array_mapped_vert(
1389 mesh, blen_data, blen_attr, blen_dtype,
1390 fbx_layer_data, fbx_layer_index,
1391 fbx_layer_mapping, fbx_layer_ref,
1392 stride, item_size, descr,
1393 xform=None, quiet=False,
1395 if fbx_layer_mapping == b'ByVertice':
1396 if fbx_layer_ref == b'Direct':
1397 assert(fbx_layer_index is None)
1398 blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
1399 descr, xform)
1400 return True
1401 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1402 elif fbx_layer_mapping == b'AllSame':
1403 if fbx_layer_ref == b'IndexToDirect':
1404 assert(fbx_layer_index is None)
1405 blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
1406 item_size, descr, xform)
1407 return True
1408 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1409 else:
1410 blen_read_geom_array_error_mapping(descr, fbx_layer_mapping, quiet)
1412 return False
1415 def blen_read_geom_array_mapped_edge(
1416 mesh, blen_data, blen_attr, blen_dtype,
1417 fbx_layer_data, fbx_layer_index,
1418 fbx_layer_mapping, fbx_layer_ref,
1419 stride, item_size, descr,
1420 xform=None, quiet=False,
1422 if fbx_layer_mapping == b'ByEdge':
1423 if fbx_layer_ref == b'Direct':
1424 blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
1425 descr, xform)
1426 return True
1427 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1428 elif fbx_layer_mapping == b'AllSame':
1429 if fbx_layer_ref == b'IndexToDirect':
1430 assert(fbx_layer_index is None)
1431 blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
1432 item_size, descr, xform)
1433 return True
1434 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1435 else:
1436 blen_read_geom_array_error_mapping(descr, fbx_layer_mapping, quiet)
1438 return False
1441 def blen_read_geom_array_mapped_polygon(
1442 mesh, blen_data, blen_attr, blen_dtype,
1443 fbx_layer_data, fbx_layer_index,
1444 fbx_layer_mapping, fbx_layer_ref,
1445 stride, item_size, descr,
1446 xform=None, quiet=False,
1448 if fbx_layer_mapping == b'ByPolygon':
1449 if fbx_layer_ref == b'IndexToDirect':
1450 # XXX Looks like we often get no fbx_layer_index in this case, shall not happen but happens...
1451 # We fallback to 'Direct' mapping in this case.
1452 #~ assert(fbx_layer_index is not None)
1453 if fbx_layer_index is None:
1454 blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
1455 item_size, descr, xform)
1456 else:
1457 blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_layer_data,
1458 fbx_layer_index, stride, item_size, descr, xform)
1459 return True
1460 elif fbx_layer_ref == b'Direct':
1461 blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
1462 descr, xform)
1463 return True
1464 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1465 elif fbx_layer_mapping == b'AllSame':
1466 if fbx_layer_ref == b'IndexToDirect':
1467 assert(fbx_layer_index is None)
1468 blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
1469 item_size, descr, xform)
1470 return True
1471 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1472 else:
1473 blen_read_geom_array_error_mapping(descr, fbx_layer_mapping, quiet)
1475 return False
1478 def blen_read_geom_array_mapped_polyloop(
1479 mesh, blen_data, blen_attr, blen_dtype,
1480 fbx_layer_data, fbx_layer_index,
1481 fbx_layer_mapping, fbx_layer_ref,
1482 stride, item_size, descr,
1483 xform=None, quiet=False,
1485 if fbx_layer_mapping == b'ByPolygonVertex':
1486 if fbx_layer_ref == b'IndexToDirect':
1487 # XXX Looks like we often get no fbx_layer_index in this case, shall not happen but happens...
1488 # We fallback to 'Direct' mapping in this case.
1489 #~ assert(fbx_layer_index is not None)
1490 if fbx_layer_index is None:
1491 blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
1492 item_size, descr, xform)
1493 else:
1494 blen_read_geom_array_foreach_set_indexed(blen_data, blen_attr, blen_dtype, fbx_layer_data,
1495 fbx_layer_index, stride, item_size, descr, xform)
1496 return True
1497 elif fbx_layer_ref == b'Direct':
1498 blen_read_geom_array_foreach_set_direct(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride, item_size,
1499 descr, xform)
1500 return True
1501 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1502 elif fbx_layer_mapping == b'ByVertice':
1503 if fbx_layer_ref == b'Direct':
1504 assert(fbx_layer_index is None)
1505 blen_read_geom_array_foreach_set_looptovert(mesh, blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
1506 item_size, descr, xform)
1507 return True
1508 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1509 elif fbx_layer_mapping == b'AllSame':
1510 if fbx_layer_ref == b'IndexToDirect':
1511 assert(fbx_layer_index is None)
1512 blen_read_geom_array_foreach_set_allsame(blen_data, blen_attr, blen_dtype, fbx_layer_data, stride,
1513 item_size, descr, xform)
1514 return True
1515 blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
1516 else:
1517 blen_read_geom_array_error_mapping(descr, fbx_layer_mapping, quiet)
1519 return False
1522 def blen_read_geom_layer_material(fbx_obj, mesh):
1523 fbx_layer = elem_find_first(fbx_obj, b'LayerElementMaterial')
1525 if fbx_layer is None:
1526 return
1528 (fbx_layer_name,
1529 fbx_layer_mapping,
1530 fbx_layer_ref,
1531 ) = blen_read_geom_layerinfo(fbx_layer)
1533 layer_id = b'Materials'
1534 fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
1536 blen_data = MESH_ATTRIBUTE_MATERIAL_INDEX.ensure(mesh.attributes).data
1537 fbx_item_size = 1
1538 assert(fbx_item_size == MESH_ATTRIBUTE_MATERIAL_INDEX.item_size)
1539 blen_read_geom_array_mapped_polygon(
1540 mesh, blen_data, MESH_ATTRIBUTE_MATERIAL_INDEX.foreach_attribute, MESH_ATTRIBUTE_MATERIAL_INDEX.dtype,
1541 fbx_layer_data, None,
1542 fbx_layer_mapping, fbx_layer_ref,
1543 1, fbx_item_size, layer_id,
1547 def blen_read_geom_layer_uv(fbx_obj, mesh):
1548 for layer_id in (b'LayerElementUV',):
1549 for fbx_layer in elem_find_iter(fbx_obj, layer_id):
1550 # all should be valid
1551 (fbx_layer_name,
1552 fbx_layer_mapping,
1553 fbx_layer_ref,
1554 ) = blen_read_geom_layerinfo(fbx_layer)
1556 fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, b'UV'))
1557 fbx_layer_index = elem_prop_first(elem_find_first(fbx_layer, b'UVIndex'))
1559 # Always init our new layers with (0, 0) UVs.
1560 uv_lay = mesh.uv_layers.new(name=fbx_layer_name, do_init=False)
1561 if uv_lay is None:
1562 print("Failed to add {%r %r} UVLayer to %r (probably too many of them?)"
1563 "" % (layer_id, fbx_layer_name, mesh.name))
1564 continue
1566 blen_data = uv_lay.uv
1568 # some valid files omit this data
1569 if fbx_layer_data is None:
1570 print("%r %r missing data" % (layer_id, fbx_layer_name))
1571 continue
1573 blen_read_geom_array_mapped_polyloop(
1574 mesh, blen_data, "vector", np.single,
1575 fbx_layer_data, fbx_layer_index,
1576 fbx_layer_mapping, fbx_layer_ref,
1577 2, 2, layer_id,
1581 def blen_read_geom_layer_color(fbx_obj, mesh, colors_type):
1582 if colors_type == 'NONE':
1583 return
1584 use_srgb = colors_type == 'SRGB'
1585 layer_type = 'BYTE_COLOR' if use_srgb else 'FLOAT_COLOR'
1586 color_prop_name = "color_srgb" if use_srgb else "color"
1587 # almost same as UVs
1588 for layer_id in (b'LayerElementColor',):
1589 for fbx_layer in elem_find_iter(fbx_obj, layer_id):
1590 # all should be valid
1591 (fbx_layer_name,
1592 fbx_layer_mapping,
1593 fbx_layer_ref,
1594 ) = blen_read_geom_layerinfo(fbx_layer)
1596 fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, b'Colors'))
1597 fbx_layer_index = elem_prop_first(elem_find_first(fbx_layer, b'ColorIndex'))
1599 color_lay = mesh.color_attributes.new(name=fbx_layer_name, type=layer_type, domain='CORNER')
1601 if color_lay is None:
1602 print("Failed to add {%r %r} vertex color layer to %r (probably too many of them?)"
1603 "" % (layer_id, fbx_layer_name, mesh.name))
1604 continue
1606 blen_data = color_lay.data
1608 # some valid files omit this data
1609 if fbx_layer_data is None:
1610 print("%r %r missing data" % (layer_id, fbx_layer_name))
1611 continue
1613 blen_read_geom_array_mapped_polyloop(
1614 mesh, blen_data, color_prop_name, np.single,
1615 fbx_layer_data, fbx_layer_index,
1616 fbx_layer_mapping, fbx_layer_ref,
1617 4, 4, layer_id,
1621 def blen_read_geom_layer_smooth(fbx_obj, mesh):
1622 fbx_layer = elem_find_first(fbx_obj, b'LayerElementSmoothing')
1624 if fbx_layer is None:
1625 return False
1627 # all should be valid
1628 (fbx_layer_name,
1629 fbx_layer_mapping,
1630 fbx_layer_ref,
1631 ) = blen_read_geom_layerinfo(fbx_layer)
1633 layer_id = b'Smoothing'
1634 fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
1636 # udk has 'Direct' mapped, with no Smoothing, not sure why, but ignore these
1637 if fbx_layer_data is None:
1638 return False
1640 if fbx_layer_mapping == b'ByEdge':
1641 # some models have bad edge data, we can't use this info...
1642 if not mesh.edges:
1643 print("warning skipping sharp edges data, no valid edges...")
1644 return False
1646 blen_data = MESH_ATTRIBUTE_SHARP_EDGE.ensure(mesh.attributes).data
1647 fbx_item_size = 1
1648 assert(fbx_item_size == MESH_ATTRIBUTE_SHARP_EDGE.item_size)
1649 blen_read_geom_array_mapped_edge(
1650 mesh, blen_data, MESH_ATTRIBUTE_SHARP_EDGE.foreach_attribute, MESH_ATTRIBUTE_SHARP_EDGE.dtype,
1651 fbx_layer_data, None,
1652 fbx_layer_mapping, fbx_layer_ref,
1653 1, fbx_item_size, layer_id,
1654 xform=np.logical_not, # in FBX, 0 (False) is sharp, but in Blender True is sharp.
1656 # We only set sharp edges here, not face smoothing itself...
1657 mesh.use_auto_smooth = True
1658 return False
1659 elif fbx_layer_mapping == b'ByPolygon':
1660 blen_data = MESH_ATTRIBUTE_SHARP_FACE.ensure(mesh.attributes).data
1661 fbx_item_size = 1
1662 assert(fbx_item_size == MESH_ATTRIBUTE_SHARP_FACE.item_size)
1663 return blen_read_geom_array_mapped_polygon(
1664 mesh, blen_data, MESH_ATTRIBUTE_SHARP_FACE.foreach_attribute, MESH_ATTRIBUTE_SHARP_FACE.dtype,
1665 fbx_layer_data, None,
1666 fbx_layer_mapping, fbx_layer_ref,
1667 1, fbx_item_size, layer_id,
1668 xform=lambda s: (s == 0), # smoothgroup bitflags, treat as booleans for now
1670 else:
1671 print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
1672 return False
1674 def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
1675 fbx_layer = elem_find_first(fbx_obj, b'LayerElementEdgeCrease')
1677 if fbx_layer is None:
1678 return False
1680 # all should be valid
1681 (fbx_layer_name,
1682 fbx_layer_mapping,
1683 fbx_layer_ref,
1684 ) = blen_read_geom_layerinfo(fbx_layer)
1686 if fbx_layer_mapping != b'ByEdge':
1687 return False
1689 layer_id = b'EdgeCrease'
1690 fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
1692 # some models have bad edge data, we can't use this info...
1693 if not mesh.edges:
1694 print("warning skipping edge crease data, no valid edges...")
1695 return False
1697 if fbx_layer_mapping == b'ByEdge':
1698 # some models have bad edge data, we can't use this info...
1699 if not mesh.edges:
1700 print("warning skipping edge crease data, no valid edges...")
1701 return False
1703 blen_data = mesh.edge_creases_ensure().data
1704 return blen_read_geom_array_mapped_edge(
1705 mesh, blen_data, "value", np.single,
1706 fbx_layer_data, None,
1707 fbx_layer_mapping, fbx_layer_ref,
1708 1, 1, layer_id,
1709 # Blender squares those values before sending them to OpenSubdiv, when other software don't,
1710 # so we need to compensate that to get similar results through FBX...
1711 xform=np.sqrt,
1713 else:
1714 print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
1715 return False
1717 def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
1718 fbx_layer = elem_find_first(fbx_obj, b'LayerElementNormal')
1720 if fbx_layer is None:
1721 return False
1723 (fbx_layer_name,
1724 fbx_layer_mapping,
1725 fbx_layer_ref,
1726 ) = blen_read_geom_layerinfo(fbx_layer)
1728 layer_id = b'Normals'
1729 fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
1730 fbx_layer_index = elem_prop_first(elem_find_first(fbx_layer, b'NormalsIndex'))
1732 if fbx_layer_data is None:
1733 print("warning %r %r missing data" % (layer_id, fbx_layer_name))
1734 return False
1736 # Normals are temporarily set here so that they can be retrieved again after a call to Mesh.validate().
1737 bl_norm_dtype = np.single
1738 item_size = 3
1739 # try loops, then polygons, then vertices.
1740 tries = ((mesh.loops, "Loops", False, blen_read_geom_array_mapped_polyloop),
1741 (mesh.polygons, "Polygons", True, blen_read_geom_array_mapped_polygon),
1742 (mesh.vertices, "Vertices", True, blen_read_geom_array_mapped_vert))
1743 for blen_data, blen_data_type, is_fake, func in tries:
1744 bdata = np.zeros((len(blen_data), item_size), dtype=bl_norm_dtype) if is_fake else blen_data
1745 if func(mesh, bdata, "normal", bl_norm_dtype,
1746 fbx_layer_data, fbx_layer_index, fbx_layer_mapping, fbx_layer_ref, 3, item_size, layer_id, xform, True):
1747 if blen_data_type == "Polygons":
1748 # To expand to per-loop normals, repeat each per-polygon normal by the number of loops of each polygon.
1749 poly_loop_totals = np.empty(len(mesh.polygons), dtype=np.uintc)
1750 mesh.polygons.foreach_get("loop_total", poly_loop_totals)
1751 loop_normals = np.repeat(bdata, poly_loop_totals, axis=0)
1752 mesh.loops.foreach_set("normal", loop_normals.ravel())
1753 elif blen_data_type == "Vertices":
1754 # We have to copy vnors to lnors! Far from elegant, but simple.
1755 loop_vertex_indices = MESH_ATTRIBUTE_CORNER_VERT.to_ndarray(mesh.attributes)
1756 mesh.loops.foreach_set("normal", bdata[loop_vertex_indices].ravel())
1757 return True
1759 blen_read_geom_array_error_mapping("normal", fbx_layer_mapping)
1760 blen_read_geom_array_error_ref("normal", fbx_layer_ref)
1761 return False
1764 def blen_read_geom(fbx_tmpl, fbx_obj, settings):
1765 # Vertices are in object space, but we are post-multiplying all transforms with the inverse of the
1766 # global matrix, so we need to apply the global matrix to the vertices to get the correct result.
1767 geom_mat_co = settings.global_matrix if settings.bake_space_transform else None
1768 # We need to apply the inverse transpose of the global matrix when transforming normals.
1769 geom_mat_no = Matrix(settings.global_matrix_inv_transposed) if settings.bake_space_transform else None
1770 if geom_mat_no is not None:
1771 # Remove translation & scaling!
1772 geom_mat_no.translation = Vector()
1773 geom_mat_no.normalize()
1775 # TODO, use 'fbx_tmpl'
1776 elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'Geometry')
1778 fbx_verts = elem_prop_first(elem_find_first(fbx_obj, b'Vertices'))
1779 fbx_polys = elem_prop_first(elem_find_first(fbx_obj, b'PolygonVertexIndex'))
1780 fbx_edges = elem_prop_first(elem_find_first(fbx_obj, b'Edges'))
1782 # The dtypes when empty don't matter, but are set to what the fbx arrays are expected to be.
1783 fbx_verts = parray_as_ndarray(fbx_verts) if fbx_verts else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
1784 fbx_polys = parray_as_ndarray(fbx_polys) if fbx_polys else np.empty(0, dtype=data_types.ARRAY_INT32)
1785 fbx_edges = parray_as_ndarray(fbx_edges) if fbx_edges else np.empty(0, dtype=data_types.ARRAY_INT32)
1787 # Each vert is a 3d vector so is made of 3 components.
1788 tot_verts = len(fbx_verts) // 3
1789 if tot_verts * 3 != len(fbx_verts):
1790 print("ERROR: Not a whole number of vertices. Ignoring the partial vertex!")
1791 # Remove any remainder.
1792 fbx_verts = fbx_verts[:tot_verts * 3]
1794 tot_loops = len(fbx_polys)
1795 tot_edges = len(fbx_edges)
1797 mesh = bpy.data.meshes.new(name=elem_name_utf8)
1798 attributes = mesh.attributes
1800 if tot_verts:
1801 if geom_mat_co is not None:
1802 fbx_verts = vcos_transformed(fbx_verts, geom_mat_co, MESH_ATTRIBUTE_POSITION.dtype)
1803 else:
1804 fbx_verts = fbx_verts.astype(MESH_ATTRIBUTE_POSITION.dtype, copy=False)
1806 mesh.vertices.add(tot_verts)
1807 MESH_ATTRIBUTE_POSITION.foreach_set(attributes, fbx_verts.ravel())
1809 if tot_loops:
1810 bl_loop_start_dtype = np.uintc
1812 mesh.loops.add(tot_loops)
1813 # The end of each polygon is specified by an inverted index.
1814 fbx_loop_end_idx = np.flatnonzero(fbx_polys < 0)
1816 tot_polys = len(fbx_loop_end_idx)
1818 # Un-invert the loop ends.
1819 fbx_polys[fbx_loop_end_idx] ^= -1
1820 # Set loop vertex indices, casting to the Blender C type first for performance.
1821 MESH_ATTRIBUTE_CORNER_VERT.foreach_set(
1822 attributes, astype_view_signedness(fbx_polys, MESH_ATTRIBUTE_CORNER_VERT.dtype))
1824 poly_loop_starts = np.empty(tot_polys, dtype=bl_loop_start_dtype)
1825 # The first loop is always a loop start.
1826 poly_loop_starts[0] = 0
1827 # Ignoring the last loop end, the indices after every loop end are the remaining loop starts.
1828 poly_loop_starts[1:] = fbx_loop_end_idx[:-1] + 1
1830 mesh.polygons.add(tot_polys)
1831 mesh.polygons.foreach_set("loop_start", poly_loop_starts)
1833 blen_read_geom_layer_material(fbx_obj, mesh)
1834 blen_read_geom_layer_uv(fbx_obj, mesh)
1835 blen_read_geom_layer_color(fbx_obj, mesh, settings.colors_type)
1837 if tot_edges:
1838 # edges in fact index the polygons (NOT the vertices)
1840 # The first vertex index of each edge is the vertex index of the corresponding loop in fbx_polys.
1841 edges_a = fbx_polys[fbx_edges]
1843 # The second vertex index of each edge is the vertex index of the next loop in the same polygon. The
1844 # complexity here is that if the first vertex index was the last loop of that polygon in fbx_polys, the next
1845 # loop in the polygon is the first loop of that polygon, which is not the next loop in fbx_polys.
1847 # Copy fbx_polys, but rolled backwards by 1 so that indexing the result by [fbx_edges] will get the next
1848 # loop of the same polygon unless the first vertex index was the last loop of the polygon.
1849 fbx_polys_next = np.roll(fbx_polys, -1)
1850 # Get the first loop of each polygon and set them into fbx_polys_next at the same indices as the last loop
1851 # of each polygon in fbx_polys.
1852 fbx_polys_next[fbx_loop_end_idx] = fbx_polys[poly_loop_starts]
1854 # Indexing fbx_polys_next by fbx_edges now gets the vertex index of the next loop in fbx_polys.
1855 edges_b = fbx_polys_next[fbx_edges]
1857 # edges_a and edges_b need to be combined so that the first vertex index of each edge is immediately
1858 # followed by the second vertex index of that same edge.
1859 # Stack edges_a and edges_b as individual columns like np.column_stack((edges_a, edges_b)).
1860 # np.concatenate is used because np.column_stack doesn't allow specifying the dtype of the returned array.
1861 edges_conv = np.concatenate((edges_a.reshape(-1, 1), edges_b.reshape(-1, 1)),
1862 axis=1, dtype=MESH_ATTRIBUTE_EDGE_VERTS.dtype, casting='unsafe')
1864 # Add the edges and set their vertex indices.
1865 mesh.edges.add(len(edges_conv))
1866 # ravel() because edges_conv must be flat and C-contiguous when passed to foreach_set.
1867 MESH_ATTRIBUTE_EDGE_VERTS.foreach_set(attributes, edges_conv.ravel())
1868 elif tot_edges:
1869 print("ERROR: No polygons, but edges exist. Ignoring the edges!")
1871 # must be after edge, face loading.
1872 ok_smooth = blen_read_geom_layer_smooth(fbx_obj, mesh)
1874 blen_read_geom_layer_edge_crease(fbx_obj, mesh)
1876 ok_normals = False
1877 if settings.use_custom_normals:
1878 # Note: we store 'temp' normals in loops, since validate() may alter final mesh,
1879 # we can only set custom lnors *after* calling it.
1880 mesh.create_normals_split()
1881 if geom_mat_no is None:
1882 ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh)
1883 else:
1884 ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh,
1885 lambda v_array: nors_transformed(v_array, geom_mat_no))
1887 mesh.validate(clean_customdata=False) # *Very* important to not remove lnors here!
1889 if ok_normals:
1890 bl_nors_dtype = np.single
1891 clnors = np.empty(len(mesh.loops) * 3, dtype=bl_nors_dtype)
1892 mesh.loops.foreach_get("normal", clnors)
1894 if not ok_smooth:
1895 sharp_face = MESH_ATTRIBUTE_SHARP_FACE.get(attributes)
1896 if sharp_face:
1897 attributes.remove(sharp_face)
1898 ok_smooth = True
1900 # Iterating clnors into a nested tuple first is faster than passing clnors.reshape(-1, 3) directly into
1901 # normals_split_custom_set. We use clnors.data since it is a memoryview, which is faster to iterate than clnors.
1902 mesh.normals_split_custom_set(tuple(zip(*(iter(clnors.data),) * 3)))
1903 mesh.use_auto_smooth = True
1905 if settings.use_custom_normals:
1906 mesh.free_normals_split()
1908 if not ok_smooth:
1909 sharp_face = MESH_ATTRIBUTE_SHARP_FACE.get(attributes)
1910 if sharp_face:
1911 attributes.remove(sharp_face)
1913 if settings.use_custom_props:
1914 blen_read_custom_properties(fbx_obj, mesh, settings)
1916 return mesh
1919 def blen_read_shapes(fbx_tmpl, fbx_data, objects, me, scene):
1920 if not fbx_data:
1921 # No shape key data. Nothing to do.
1922 return
1924 me_vcos = MESH_ATTRIBUTE_POSITION.to_ndarray(me.attributes)
1925 me_vcos_vector_view = me_vcos.reshape(-1, 3)
1927 objects = list({node.bl_obj for node in objects})
1928 assert(objects)
1930 # Blender has a hard minimum and maximum shape key Value. If an imported shape key has a value outside this range it
1931 # will be clamped, and we'll print a warning message to the console.
1932 shape_key_values_in_range = True
1933 bc_uuid_to_keyblocks = {}
1934 for bc_uuid, fbx_sdata, fbx_bcdata in fbx_data:
1935 elem_name_utf8 = elem_name_ensure_class(fbx_sdata, b'Geometry')
1936 indices = elem_prop_first(elem_find_first(fbx_sdata, b'Indexes'))
1937 dvcos = elem_prop_first(elem_find_first(fbx_sdata, b'Vertices'))
1939 indices = parray_as_ndarray(indices) if indices else np.empty(0, dtype=data_types.ARRAY_INT32)
1940 dvcos = parray_as_ndarray(dvcos) if dvcos else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
1942 # If there's not a whole number of vectors, trim off the remainder.
1943 # 3 components per vector.
1944 remainder = len(dvcos) % 3
1945 if remainder:
1946 dvcos = dvcos[:-remainder]
1947 dvcos = dvcos.reshape(-1, 3)
1949 # We completely ignore normals here!
1950 weight = elem_prop_first(elem_find_first(fbx_bcdata, b'DeformPercent'), default=100.0) / 100.0
1952 vgweights = elem_prop_first(elem_find_first(fbx_bcdata, b'FullWeights'))
1953 vgweights = parray_as_ndarray(vgweights) if vgweights else np.empty(0, dtype=data_types.ARRAY_FLOAT64)
1954 # Not doing the division in-place in-case it's possible for FBX shape keys to be used by more than one mesh.
1955 vgweights = vgweights / 100.0
1957 create_vg = (vgweights != 1.0).any()
1959 # Special case, in case all weights are the same, FullWeight can have only one element - *sigh!*
1960 nbr_indices = len(indices)
1961 if len(vgweights) == 1 and nbr_indices > 1:
1962 vgweights = np.full_like(indices, vgweights[0], dtype=vgweights.dtype)
1964 assert(len(vgweights) == nbr_indices == len(dvcos))
1966 # To add shape keys to the mesh, an Object using the mesh is needed.
1967 if me.shape_keys is None:
1968 objects[0].shape_key_add(name="Basis", from_mix=False)
1969 kb = objects[0].shape_key_add(name=elem_name_utf8, from_mix=False)
1970 me.shape_keys.use_relative = True # Should already be set as such.
1972 # Only need to set the shape key co if there are any non-zero dvcos.
1973 if dvcos.any():
1974 shape_cos = me_vcos_vector_view.copy()
1975 shape_cos[indices] += dvcos
1976 kb.data.foreach_set("co", shape_cos.ravel())
1978 shape_key_values_in_range &= expand_shape_key_range(kb, weight)
1980 kb.value = weight
1982 # Add vgroup if necessary.
1983 if create_vg:
1984 # VertexGroup.add only allows sequences of int indices, but iterating the indices array directly would
1985 # produce numpy scalars of types such as np.int32. The underlying memoryview of the indices array, however,
1986 # does produce standard Python ints when iterated, so pass indices.data to add_vgroup_to_objects instead of
1987 # indices.
1988 # memoryviews tend to be faster to iterate than numpy arrays anyway, so vgweights.data is passed too.
1989 add_vgroup_to_objects(indices.data, vgweights.data, kb.name, objects)
1990 kb.vertex_group = kb.name
1992 bc_uuid_to_keyblocks.setdefault(bc_uuid, []).append(kb)
1994 if not shape_key_values_in_range:
1995 print("WARNING: The imported Value of a Shape Key on the Mesh '%s' is beyond the minimum/maximum allowed and"
1996 " has been clamped." % me.name)
1998 return bc_uuid_to_keyblocks
2001 # --------
2002 # Material
2004 def blen_read_material(fbx_tmpl, fbx_obj, settings):
2005 from bpy_extras import node_shader_utils
2006 from math import sqrt
2008 elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'Material')
2010 nodal_material_wrap_map = settings.nodal_material_wrap_map
2011 ma = bpy.data.materials.new(name=elem_name_utf8)
2013 const_color_white = 1.0, 1.0, 1.0
2014 const_color_black = 0.0, 0.0, 0.0
2016 fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
2017 elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
2018 fbx_props_no_template = (fbx_props[0], fbx_elem_nil)
2020 ma_wrap = node_shader_utils.PrincipledBSDFWrapper(ma, is_readonly=False, use_nodes=True)
2021 ma_wrap.base_color = elem_props_get_color_rgb(fbx_props, b'DiffuseColor', const_color_white)
2022 # No specular color in Principled BSDF shader, assumed to be either white or take some tint from diffuse one...
2023 # TODO: add way to handle tint option (guesstimate from spec color + intensity...)?
2024 ma_wrap.specular = elem_props_get_number(fbx_props, b'SpecularFactor', 0.25) * 2.0
2025 # XXX Totally empirical conversion, trying to adapt it (and protect against invalid negative values, see T96076):
2026 # From [1.0 - 0.0] Principled BSDF range to [0.0 - 100.0] FBX shininess range)...
2027 fbx_shininess = max(elem_props_get_number(fbx_props, b'Shininess', 20.0), 0.0)
2028 ma_wrap.roughness = 1.0 - (sqrt(fbx_shininess) / 10.0)
2029 # Sweetness... Looks like we are not the only ones to not know exactly how FBX is supposed to work (see T59850).
2030 # According to one of its developers, Unity uses that formula to extract alpha value:
2032 # alpha = 1 - TransparencyFactor
2033 # if (alpha == 1 or alpha == 0):
2034 # alpha = 1 - TransparentColor.r
2036 # Until further info, let's assume this is correct way to do, hence the following code for TransparentColor.
2037 # However, there are some cases (from 3DSMax, see T65065), where we do have TransparencyFactor only defined
2038 # in the template to 0.0, and then materials defining TransparentColor to pure white (1.0, 1.0, 1.0),
2039 # and setting alpha value in Opacity... try to cope with that too. :((((
2040 alpha = 1.0 - elem_props_get_number(fbx_props, b'TransparencyFactor', 0.0)
2041 if (alpha == 1.0 or alpha == 0.0):
2042 alpha = elem_props_get_number(fbx_props_no_template, b'Opacity', None)
2043 if alpha is None:
2044 alpha = 1.0 - elem_props_get_color_rgb(fbx_props, b'TransparentColor', const_color_black)[0]
2045 ma_wrap.alpha = alpha
2046 ma_wrap.metallic = elem_props_get_number(fbx_props, b'ReflectionFactor', 0.0)
2047 # We have no metallic (a.k.a. reflection) color...
2048 # elem_props_get_color_rgb(fbx_props, b'ReflectionColor', const_color_white)
2049 ma_wrap.normalmap_strength = elem_props_get_number(fbx_props, b'BumpFactor', 1.0)
2050 # Emission strength and color
2051 ma_wrap.emission_strength = elem_props_get_number(fbx_props, b'EmissiveFactor', 1.0)
2052 ma_wrap.emission_color = elem_props_get_color_rgb(fbx_props, b'EmissiveColor', const_color_black)
2054 nodal_material_wrap_map[ma] = ma_wrap
2056 if settings.use_custom_props:
2057 blen_read_custom_properties(fbx_obj, ma, settings)
2059 return ma
2062 # -------
2063 # Image & Texture
2065 def blen_read_texture_image(fbx_tmpl, fbx_obj, basedir, settings):
2066 import os
2067 from bpy_extras import image_utils
2069 def pack_data_from_content(image, fbx_obj):
2070 data = elem_find_first_bytes(fbx_obj, b'Content')
2071 if (data):
2072 data_len = len(data)
2073 if (data_len):
2074 image.pack(data=data, data_len=data_len)
2076 elem_name_utf8 = elem_name_ensure_classes(fbx_obj, {b'Texture', b'Video'})
2078 image_cache = settings.image_cache
2080 # Yet another beautiful logic demonstration by Master FBX:
2081 # * RelativeFilename in both Video and Texture nodes.
2082 # * FileName in texture nodes.
2083 # * Filename in video nodes.
2084 # Aaaaaaaarrrrrrrrgggggggggggg!!!!!!!!!!!!!!
2085 filepath = elem_find_first_string(fbx_obj, b'RelativeFilename')
2086 if filepath:
2087 # Make sure we do handle a relative path, and not an absolute one (see D5143).
2088 filepath = filepath.lstrip(os.path.sep).lstrip(os.path.altsep)
2089 filepath = os.path.join(basedir, filepath)
2090 else:
2091 filepath = elem_find_first_string(fbx_obj, b'FileName')
2092 if not filepath:
2093 filepath = elem_find_first_string(fbx_obj, b'Filename')
2094 if not filepath:
2095 print("Error, could not find any file path in ", fbx_obj)
2096 print(" Falling back to: ", elem_name_utf8)
2097 filepath = elem_name_utf8
2098 else :
2099 filepath = filepath.replace('\\', '/') if (os.sep == '/') else filepath.replace('/', '\\')
2101 image = image_cache.get(filepath)
2102 if image is not None:
2103 # Data is only embedded once, we may have already created the image but still be missing its data!
2104 if not image.has_data:
2105 pack_data_from_content(image, fbx_obj)
2106 return image
2108 image = image_utils.load_image(
2109 filepath,
2110 dirname=basedir,
2111 place_holder=True,
2112 recursive=settings.use_image_search,
2115 # Try to use embedded data, if available!
2116 pack_data_from_content(image, fbx_obj)
2118 image_cache[filepath] = image
2119 # name can be ../a/b/c
2120 image.name = os.path.basename(elem_name_utf8)
2122 if settings.use_custom_props:
2123 blen_read_custom_properties(fbx_obj, image, settings)
2125 return image
2128 def blen_read_camera(fbx_tmpl, fbx_obj, settings):
2129 # meters to inches
2130 M2I = 0.0393700787
2132 global_scale = settings.global_scale
2134 elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'NodeAttribute')
2136 fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
2137 elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
2139 camera = bpy.data.cameras.new(name=elem_name_utf8)
2141 camera.type = 'ORTHO' if elem_props_get_enum(fbx_props, b'CameraProjectionType', 0) == 1 else 'PERSP'
2143 camera.dof.focus_distance = elem_props_get_number(fbx_props, b'FocusDistance', 10 * 1000) / 1000 * global_scale
2144 if (elem_props_get_bool(fbx_props, b'UseDepthOfField', False)):
2145 camera.dof.use_dof = True
2147 camera.lens = elem_props_get_number(fbx_props, b'FocalLength', 35.0)
2148 camera.sensor_width = elem_props_get_number(fbx_props, b'FilmWidth', 32.0 * M2I) / M2I
2149 camera.sensor_height = elem_props_get_number(fbx_props, b'FilmHeight', 32.0 * M2I) / M2I
2151 camera.ortho_scale = elem_props_get_number(fbx_props, b'OrthoZoom', 1.0)
2153 filmaspect = camera.sensor_width / camera.sensor_height
2154 # film offset
2155 camera.shift_x = elem_props_get_number(fbx_props, b'FilmOffsetX', 0.0) / (M2I * camera.sensor_width)
2156 camera.shift_y = elem_props_get_number(fbx_props, b'FilmOffsetY', 0.0) / (M2I * camera.sensor_height * filmaspect)
2158 camera.clip_start = elem_props_get_number(fbx_props, b'NearPlane', 0.01) * global_scale
2159 camera.clip_end = elem_props_get_number(fbx_props, b'FarPlane', 100.0) * global_scale
2161 if settings.use_custom_props:
2162 blen_read_custom_properties(fbx_obj, camera, settings)
2164 return camera
2167 def blen_read_light(fbx_tmpl, fbx_obj, settings):
2168 import math
2169 elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'NodeAttribute')
2171 fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
2172 elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
2174 light_type = {
2175 0: 'POINT',
2176 1: 'SUN',
2177 2: 'SPOT'}.get(elem_props_get_enum(fbx_props, b'LightType', 0), 'POINT')
2179 lamp = bpy.data.lights.new(name=elem_name_utf8, type=light_type)
2181 if light_type == 'SPOT':
2182 spot_size = elem_props_get_number(fbx_props, b'OuterAngle', None)
2183 if spot_size is None:
2184 # Deprecated.
2185 spot_size = elem_props_get_number(fbx_props, b'Cone angle', 45.0)
2186 lamp.spot_size = math.radians(spot_size)
2188 spot_blend = elem_props_get_number(fbx_props, b'InnerAngle', None)
2189 if spot_blend is None:
2190 # Deprecated.
2191 spot_blend = elem_props_get_number(fbx_props, b'HotSpot', 45.0)
2192 lamp.spot_blend = 1.0 - (spot_blend / spot_size)
2194 # TODO, cycles nodes???
2195 lamp.color = elem_props_get_color_rgb(fbx_props, b'Color', (1.0, 1.0, 1.0))
2196 lamp.energy = elem_props_get_number(fbx_props, b'Intensity', 100.0) / 100.0
2197 lamp.use_shadow = elem_props_get_bool(fbx_props, b'CastShadow', True)
2198 if hasattr(lamp, "cycles"):
2199 lamp.cycles.cast_shadow = lamp.use_shadow
2200 # Keeping this for now, but this is not used nor exposed anymore afaik...
2201 lamp.shadow_color = elem_props_get_color_rgb(fbx_props, b'ShadowColor', (0.0, 0.0, 0.0))
2203 if settings.use_custom_props:
2204 blen_read_custom_properties(fbx_obj, lamp, settings)
2206 return lamp
2209 # ### Import Utility class
2210 class FbxImportHelperNode:
2212 Temporary helper node to store a hierarchy of fbxNode objects before building Objects, Armatures and Bones.
2213 It tries to keep the correction data in one place so it can be applied consistently to the imported data.
2216 __slots__ = (
2217 '_parent', 'anim_compensation_matrix', 'is_global_animation', 'armature_setup', 'armature', 'bind_matrix',
2218 'bl_bone', 'bl_data', 'bl_obj', 'bone_child_matrix', 'children', 'clusters',
2219 'fbx_elem', 'fbx_data_elem', 'fbx_name', 'fbx_transform_data', 'fbx_type',
2220 'is_armature', 'has_bone_children', 'is_bone', 'is_root', 'is_leaf',
2221 'matrix', 'matrix_as_parent', 'matrix_geom', 'meshes', 'post_matrix', 'pre_matrix')
2223 def __init__(self, fbx_elem, bl_data, fbx_transform_data, is_bone):
2224 self.fbx_name = elem_name_ensure_class(fbx_elem, b'Model') if fbx_elem else 'Unknown'
2225 self.fbx_type = fbx_elem.props[2] if fbx_elem else None
2226 self.fbx_elem = fbx_elem
2227 self.fbx_data_elem = None # FBX elem of a connected NodeAttribute/Geometry for helpers whose bl_data does not exist or is yet to be created.
2228 self.bl_obj = None
2229 self.bl_data = bl_data
2230 self.bl_bone = None # Name of bone if this is a bone (this may be different to fbx_name if there was a name conflict in Blender!)
2231 self.fbx_transform_data = fbx_transform_data
2232 self.is_root = False
2233 self.is_bone = is_bone
2234 self.is_armature = False
2235 self.armature = None # For bones only, relevant armature node.
2236 self.has_bone_children = False # True if the hierarchy below this node contains bones, important to support mixed hierarchies.
2237 self.is_leaf = False # True for leaf-bones added to the end of some bone chains to set the lengths.
2238 self.pre_matrix = None # correction matrix that needs to be applied before the FBX transform
2239 self.bind_matrix = None # for bones this is the matrix used to bind to the skin
2240 if fbx_transform_data:
2241 self.matrix, self.matrix_as_parent, self.matrix_geom = blen_read_object_transform_do(fbx_transform_data)
2242 else:
2243 self.matrix, self.matrix_as_parent, self.matrix_geom = (None, None, None)
2244 self.post_matrix = None # correction matrix that needs to be applied after the FBX transform
2245 self.bone_child_matrix = None # Objects attached to a bone end not the beginning, this matrix corrects for that
2247 # XXX Those two are to handle the fact that rigged meshes are not linked to their armature in FBX, which implies
2248 # that their animation is in global space (afaik...).
2249 # This is actually not really solvable currently, since anim_compensation_matrix is not valid if armature
2250 # itself is animated (we'd have to recompute global-to-local anim_compensation_matrix for each frame,
2251 # and for each armature action... beyond being an insane work).
2252 # Solution for now: do not read rigged meshes animations at all! sic...
2253 self.anim_compensation_matrix = None # a mesh moved in the hierarchy may have a different local matrix. This compensates animations for this.
2254 self.is_global_animation = False
2256 self.meshes = None # List of meshes influenced by this bone.
2257 self.clusters = [] # Deformer Cluster nodes
2258 self.armature_setup = {} # mesh and armature matrix when the mesh was bound
2260 self._parent = None
2261 self.children = []
2263 @property
2264 def parent(self):
2265 return self._parent
2267 @parent.setter
2268 def parent(self, value):
2269 if self._parent is not None:
2270 self._parent.children.remove(self)
2271 self._parent = value
2272 if self._parent is not None:
2273 self._parent.children.append(self)
2275 @property
2276 def ignore(self):
2277 # Separating leaf status from ignore status itself.
2278 # Currently they are equivalent, but this may change in future.
2279 return self.is_leaf
2281 def __repr__(self):
2282 if self.fbx_elem:
2283 return self.fbx_elem.props[1].decode()
2284 else:
2285 return "None"
2287 def print_info(self, indent=0):
2288 print(" " * indent + (self.fbx_name if self.fbx_name else "(Null)")
2289 + ("[root]" if self.is_root else "")
2290 + ("[leaf]" if self.is_leaf else "")
2291 + ("[ignore]" if self.ignore else "")
2292 + ("[armature]" if self.is_armature else "")
2293 + ("[bone]" if self.is_bone else "")
2294 + ("[HBC]" if self.has_bone_children else "")
2296 for c in self.children:
2297 c.print_info(indent + 1)
2299 def mark_leaf_bones(self):
2300 if self.is_bone and len(self.children) == 1:
2301 child = self.children[0]
2302 if child.is_bone and len(child.children) == 0:
2303 child.is_leaf = True
2304 for child in self.children:
2305 child.mark_leaf_bones()
2307 def do_bake_transform(self, settings):
2308 return (settings.bake_space_transform and self.fbx_type in (b'Mesh', b'Null') and
2309 not self.is_armature and not self.is_bone)
2311 def find_correction_matrix(self, settings, parent_correction_inv=None):
2312 from bpy_extras.io_utils import axis_conversion
2314 if self.parent and (self.parent.is_root or self.parent.do_bake_transform(settings)):
2315 self.pre_matrix = settings.global_matrix
2317 if parent_correction_inv:
2318 self.pre_matrix = parent_correction_inv @ (self.pre_matrix if self.pre_matrix else Matrix())
2320 correction_matrix = None
2322 if self.is_bone:
2323 if settings.automatic_bone_orientation:
2324 # find best orientation to align bone with
2325 bone_children = tuple(child for child in self.children if child.is_bone)
2326 if len(bone_children) == 0:
2327 # no children, inherit the correction from parent (if possible)
2328 if self.parent and self.parent.is_bone:
2329 correction_matrix = parent_correction_inv.inverted() if parent_correction_inv else None
2330 else:
2331 # else find how best to rotate the bone to align the Y axis with the children
2332 best_axis = (1, 0, 0)
2333 if len(bone_children) == 1:
2334 vec = bone_children[0].get_bind_matrix().to_translation()
2335 best_axis = Vector((0, 0, 1 if vec[2] >= 0 else -1))
2336 if abs(vec[0]) > abs(vec[1]):
2337 if abs(vec[0]) > abs(vec[2]):
2338 best_axis = Vector((1 if vec[0] >= 0 else -1, 0, 0))
2339 elif abs(vec[1]) > abs(vec[2]):
2340 best_axis = Vector((0, 1 if vec[1] >= 0 else -1, 0))
2341 else:
2342 # get the child directions once because they may be checked several times
2343 child_locs = (child.get_bind_matrix().to_translation() for child in bone_children)
2344 child_locs = tuple(loc.normalized() for loc in child_locs if loc.magnitude > 0.0)
2346 # I'm not sure which one I like better...
2347 if False:
2348 best_angle = -1.0
2349 for i in range(6):
2350 a = i // 2
2351 s = -1 if i % 2 == 1 else 1
2352 test_axis = Vector((s if a == 0 else 0, s if a == 1 else 0, s if a == 2 else 0))
2354 # find max angle to children
2355 max_angle = 1.0
2356 for loc in child_locs:
2357 max_angle = min(max_angle, test_axis.dot(loc))
2359 # is it better than the last one?
2360 if best_angle < max_angle:
2361 best_angle = max_angle
2362 best_axis = test_axis
2363 else:
2364 best_angle = -1.0
2365 for vec in child_locs:
2366 test_axis = Vector((0, 0, 1 if vec[2] >= 0 else -1))
2367 if abs(vec[0]) > abs(vec[1]):
2368 if abs(vec[0]) > abs(vec[2]):
2369 test_axis = Vector((1 if vec[0] >= 0 else -1, 0, 0))
2370 elif abs(vec[1]) > abs(vec[2]):
2371 test_axis = Vector((0, 1 if vec[1] >= 0 else -1, 0))
2373 # find max angle to children
2374 max_angle = 1.0
2375 for loc in child_locs:
2376 max_angle = min(max_angle, test_axis.dot(loc))
2378 # is it better than the last one?
2379 if best_angle < max_angle:
2380 best_angle = max_angle
2381 best_axis = test_axis
2383 # convert best_axis to axis string
2384 to_up = 'Z' if best_axis[2] >= 0 else '-Z'
2385 if abs(best_axis[0]) > abs(best_axis[1]):
2386 if abs(best_axis[0]) > abs(best_axis[2]):
2387 to_up = 'X' if best_axis[0] >= 0 else '-X'
2388 elif abs(best_axis[1]) > abs(best_axis[2]):
2389 to_up = 'Y' if best_axis[1] >= 0 else '-Y'
2390 to_forward = 'X' if to_up not in {'X', '-X'} else 'Y'
2392 # Build correction matrix
2393 if (to_up, to_forward) != ('Y', 'X'):
2394 correction_matrix = axis_conversion(from_forward='X',
2395 from_up='Y',
2396 to_forward=to_forward,
2397 to_up=to_up,
2398 ).to_4x4()
2399 else:
2400 correction_matrix = settings.bone_correction_matrix
2401 else:
2402 # camera and light can be hard wired
2403 if self.fbx_type == b'Camera':
2404 correction_matrix = MAT_CONVERT_CAMERA
2405 elif self.fbx_type == b'Light':
2406 correction_matrix = MAT_CONVERT_LIGHT
2408 self.post_matrix = correction_matrix
2410 if self.do_bake_transform(settings):
2411 self.post_matrix = settings.global_matrix_inv @ (self.post_matrix if self.post_matrix else Matrix())
2413 # process children
2414 correction_matrix_inv = correction_matrix.inverted_safe() if correction_matrix else None
2415 for child in self.children:
2416 child.find_correction_matrix(settings, correction_matrix_inv)
2418 def find_armature_bones(self, armature):
2419 for child in self.children:
2420 if child.is_bone:
2421 child.armature = armature
2422 child.find_armature_bones(armature)
2424 def find_armatures(self):
2425 needs_armature = False
2426 for child in self.children:
2427 if child.is_bone:
2428 needs_armature = True
2429 break
2430 if needs_armature:
2431 if self.fbx_type in {b'Null', b'Root'}:
2432 # if empty then convert into armature
2433 self.is_armature = True
2434 armature = self
2435 else:
2436 # otherwise insert a new node
2437 # XXX Maybe in case self is virtual FBX root node, we should instead add one armature per bone child?
2438 armature = FbxImportHelperNode(None, None, None, False)
2439 armature.fbx_name = "Armature"
2440 armature.is_armature = True
2442 for child in tuple(self.children):
2443 if child.is_bone:
2444 child.parent = armature
2446 armature.parent = self
2448 armature.find_armature_bones(armature)
2450 for child in self.children:
2451 if child.is_armature or child.is_bone:
2452 continue
2453 child.find_armatures()
2455 def find_bone_children(self):
2456 has_bone_children = False
2457 for child in self.children:
2458 has_bone_children |= child.find_bone_children()
2459 self.has_bone_children = has_bone_children
2460 return self.is_bone or has_bone_children
2462 def find_fake_bones(self, in_armature=False):
2463 if in_armature and not self.is_bone and self.has_bone_children:
2464 self.is_bone = True
2465 # if we are not a null node we need an intermediate node for the data
2466 if self.fbx_type not in {b'Null', b'Root'}:
2467 node = FbxImportHelperNode(self.fbx_elem, self.bl_data, None, False)
2468 self.fbx_elem = None
2469 self.bl_data = None
2471 # transfer children
2472 for child in self.children:
2473 if child.is_bone or child.has_bone_children:
2474 continue
2475 child.parent = node
2477 # attach to parent
2478 node.parent = self
2480 if self.is_armature:
2481 in_armature = True
2482 for child in self.children:
2483 child.find_fake_bones(in_armature)
2485 def get_world_matrix_as_parent(self):
2486 matrix = self.parent.get_world_matrix_as_parent() if self.parent else Matrix()
2487 if self.matrix_as_parent:
2488 matrix = matrix @ self.matrix_as_parent
2489 return matrix
2491 def get_world_matrix(self):
2492 matrix = self.parent.get_world_matrix_as_parent() if self.parent else Matrix()
2493 if self.matrix:
2494 matrix = matrix @ self.matrix
2495 return matrix
2497 def get_matrix(self):
2498 matrix = self.matrix if self.matrix else Matrix()
2499 if self.pre_matrix:
2500 matrix = self.pre_matrix @ matrix
2501 if self.post_matrix:
2502 matrix = matrix @ self.post_matrix
2503 return matrix
2505 def get_bind_matrix(self):
2506 matrix = self.bind_matrix if self.bind_matrix else Matrix()
2507 if self.pre_matrix:
2508 matrix = self.pre_matrix @ matrix
2509 if self.post_matrix:
2510 matrix = matrix @ self.post_matrix
2511 return matrix
2513 def make_bind_pose_local(self, parent_matrix=None):
2514 if parent_matrix is None:
2515 parent_matrix = Matrix()
2517 if self.bind_matrix:
2518 bind_matrix = parent_matrix.inverted_safe() @ self.bind_matrix
2519 else:
2520 bind_matrix = self.matrix.copy() if self.matrix else None
2522 self.bind_matrix = bind_matrix
2523 if bind_matrix:
2524 parent_matrix = parent_matrix @ bind_matrix
2526 for child in self.children:
2527 child.make_bind_pose_local(parent_matrix)
2529 def collect_skeleton_meshes(self, meshes):
2530 for _, m in self.clusters:
2531 meshes.update(m)
2532 for child in self.children:
2533 if not child.meshes:
2534 child.collect_skeleton_meshes(meshes)
2536 def collect_armature_meshes(self):
2537 if self.is_armature:
2538 armature_matrix_inv = self.get_world_matrix().inverted_safe()
2540 meshes = set()
2541 for child in self.children:
2542 # Children meshes may be linked to children armatures, in which case we do not want to link them
2543 # to a parent one. See T70244.
2544 child.collect_armature_meshes()
2545 if not child.meshes:
2546 child.collect_skeleton_meshes(meshes)
2547 for m in meshes:
2548 old_matrix = m.matrix
2549 m.matrix = armature_matrix_inv @ m.get_world_matrix()
2550 m.anim_compensation_matrix = old_matrix.inverted_safe() @ m.matrix
2551 m.is_global_animation = True
2552 m.parent = self
2553 self.meshes = meshes
2554 else:
2555 for child in self.children:
2556 child.collect_armature_meshes()
2558 def build_skeleton(self, arm, parent_matrix, settings, parent_bone_size=1):
2559 def child_connect(par_bone, child_bone, child_head, connect_ctx):
2560 # child_bone or child_head may be None.
2561 force_connect_children, connected = connect_ctx
2562 if child_bone is not None:
2563 child_bone.parent = par_bone
2564 child_head = child_bone.head
2566 if similar_values_iter(par_bone.tail, child_head):
2567 if child_bone is not None:
2568 child_bone.use_connect = True
2569 # Disallow any force-connection at this level from now on, since that child was 'really'
2570 # connected, we do not want to move current bone's tail anymore!
2571 connected = None
2572 elif force_connect_children and connected is not None:
2573 # We only store position where tail of par_bone should be in the end.
2574 # Actual tail moving and force connection of compatible child bones will happen
2575 # once all have been checked.
2576 if connected is ...:
2577 connected = ([child_head.copy(), 1], [child_bone] if child_bone is not None else [])
2578 else:
2579 connected[0][0] += child_head
2580 connected[0][1] += 1
2581 if child_bone is not None:
2582 connected[1].append(child_bone)
2583 connect_ctx[1] = connected
2585 def child_connect_finalize(par_bone, connect_ctx):
2586 force_connect_children, connected = connect_ctx
2587 # Do nothing if force connection is not enabled!
2588 if force_connect_children and connected is not None and connected is not ...:
2589 # Here again we have to be wary about zero-length bones!!!
2590 par_tail = connected[0][0] / connected[0][1]
2591 if (par_tail - par_bone.head).magnitude < 1e-2:
2592 par_bone_vec = (par_bone.tail - par_bone.head).normalized()
2593 par_tail = par_bone.head + par_bone_vec * 0.01
2594 par_bone.tail = par_tail
2595 for child_bone in connected[1]:
2596 if similar_values_iter(par_tail, child_bone.head):
2597 child_bone.use_connect = True
2599 # Create the (edit)bone.
2600 bone = arm.bl_data.edit_bones.new(name=self.fbx_name)
2601 bone.select = True
2602 self.bl_obj = arm.bl_obj
2603 self.bl_data = arm.bl_data
2604 self.bl_bone = bone.name # Could be different from the FBX name!
2605 # Read EditBone custom props the NodeAttribute
2606 if settings.use_custom_props and self.fbx_data_elem:
2607 blen_read_custom_properties(self.fbx_data_elem, bone, settings)
2609 # get average distance to children
2610 bone_size = 0.0
2611 bone_count = 0
2612 for child in self.children:
2613 if child.is_bone:
2614 bone_size += child.get_bind_matrix().to_translation().magnitude
2615 bone_count += 1
2616 if bone_count > 0:
2617 bone_size /= bone_count
2618 else:
2619 bone_size = parent_bone_size
2621 # So that our bone gets its final length, but still Y-aligned in armature space.
2622 # 0-length bones are automatically collapsed into their parent when you leave edit mode,
2623 # so this enforces a minimum length.
2624 bone_tail = Vector((0.0, 1.0, 0.0)) * max(0.01, bone_size)
2625 bone.tail = bone_tail
2627 # And rotate/move it to its final "rest pose".
2628 bone_matrix = parent_matrix @ self.get_bind_matrix().normalized()
2630 bone.matrix = bone_matrix
2632 # Correction for children attached to a bone. FBX expects to attach to the head of a bone,
2633 # while Blender attaches to the tail.
2634 self.bone_child_matrix = Matrix.Translation(-bone_tail)
2636 force_connect_children = settings.force_connect_children
2637 connect_ctx = [force_connect_children, ...]
2638 for child in self.children:
2639 if child.is_leaf and force_connect_children:
2640 # Arggggggggggggggggg! We do not want to create this bone, but we need its 'virtual head' location
2641 # to orient current one!!!
2642 child_head = (bone_matrix @ child.get_bind_matrix().normalized()).translation
2643 child_connect(bone, None, child_head, connect_ctx)
2644 elif child.is_bone and not child.ignore:
2645 child_bone = child.build_skeleton(arm, bone_matrix, settings, bone_size)
2646 # Connection to parent.
2647 child_connect(bone, child_bone, None, connect_ctx)
2649 child_connect_finalize(bone, connect_ctx)
2650 return bone
2652 def build_node_obj(self, fbx_tmpl, settings):
2653 if self.bl_obj:
2654 return self.bl_obj
2656 if self.is_bone or not self.fbx_elem:
2657 return None
2659 # create when linking since we need object data
2660 elem_name_utf8 = self.fbx_name
2662 # Object data must be created already
2663 self.bl_obj = obj = bpy.data.objects.new(name=elem_name_utf8, object_data=self.bl_data)
2665 fbx_props = (elem_find_first(self.fbx_elem, b'Properties70'),
2666 elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
2668 # ----
2669 # Misc Attributes
2671 obj.color[0:3] = elem_props_get_color_rgb(fbx_props, b'Color', (0.8, 0.8, 0.8))
2672 obj.hide_viewport = not bool(elem_props_get_visibility(fbx_props, b'Visibility', 1.0))
2674 obj.matrix_basis = self.get_matrix()
2676 if settings.use_custom_props:
2677 blen_read_custom_properties(self.fbx_elem, obj, settings)
2679 return obj
2681 def build_skeleton_children(self, fbx_tmpl, settings, scene, view_layer):
2682 if self.is_bone:
2683 for child in self.children:
2684 if child.ignore:
2685 continue
2686 child.build_skeleton_children(fbx_tmpl, settings, scene, view_layer)
2687 return None
2688 else:
2689 # child is not a bone
2690 obj = self.build_node_obj(fbx_tmpl, settings)
2692 if obj is None:
2693 return None
2695 for child in self.children:
2696 if child.ignore:
2697 continue
2698 child.build_skeleton_children(fbx_tmpl, settings, scene, view_layer)
2700 # instance in scene
2701 view_layer.active_layer_collection.collection.objects.link(obj)
2702 obj.select_set(True)
2704 return obj
2706 def link_skeleton_children(self, fbx_tmpl, settings, scene):
2707 if self.is_bone:
2708 for child in self.children:
2709 if child.ignore:
2710 continue
2711 child_obj = child.bl_obj
2712 if child_obj and child_obj != self.bl_obj:
2713 child_obj.parent = self.bl_obj # get the armature the bone belongs to
2714 child_obj.parent_bone = self.bl_bone
2715 child_obj.parent_type = 'BONE'
2716 child_obj.matrix_parent_inverse = Matrix()
2718 # Blender attaches to the end of a bone, while FBX attaches to the start.
2719 # bone_child_matrix corrects for that.
2720 if child.pre_matrix:
2721 child.pre_matrix = self.bone_child_matrix @ child.pre_matrix
2722 else:
2723 child.pre_matrix = self.bone_child_matrix
2725 child_obj.matrix_basis = child.get_matrix()
2726 child.link_skeleton_children(fbx_tmpl, settings, scene)
2727 return None
2728 else:
2729 obj = self.bl_obj
2731 for child in self.children:
2732 if child.ignore:
2733 continue
2734 child_obj = child.link_skeleton_children(fbx_tmpl, settings, scene)
2735 if child_obj:
2736 child_obj.parent = obj
2738 return obj
2740 def set_pose_matrix_and_custom_props(self, arm, settings):
2741 pose_bone = arm.bl_obj.pose.bones[self.bl_bone]
2742 pose_bone.matrix_basis = self.get_bind_matrix().inverted_safe() @ self.get_matrix()
2744 if settings.use_custom_props:
2745 blen_read_custom_properties(self.fbx_elem, pose_bone, settings)
2747 for child in self.children:
2748 if child.ignore:
2749 continue
2750 if child.is_bone:
2751 child.set_pose_matrix_and_custom_props(arm, settings)
2753 def merge_weights(self, combined_weights, fbx_cluster):
2754 indices = elem_prop_first(elem_find_first(fbx_cluster, b'Indexes', default=None), default=())
2755 weights = elem_prop_first(elem_find_first(fbx_cluster, b'Weights', default=None), default=())
2757 for index, weight in zip(indices, weights):
2758 w = combined_weights.get(index)
2759 if w is None:
2760 combined_weights[index] = [weight]
2761 else:
2762 w.append(weight)
2764 def set_bone_weights(self):
2765 ignored_children = tuple(child for child in self.children
2766 if child.is_bone and child.ignore and len(child.clusters) > 0)
2768 if len(ignored_children) > 0:
2769 # If we have an ignored child bone we need to merge their weights into the current bone weights.
2770 # This can happen both intentionally and accidentally when skinning a model. Either way, they
2771 # need to be moved into a parent bone or they cause animation glitches.
2772 for fbx_cluster, meshes in self.clusters:
2773 combined_weights = {}
2774 self.merge_weights(combined_weights, fbx_cluster)
2776 for child in ignored_children:
2777 for child_cluster, child_meshes in child.clusters:
2778 if not meshes.isdisjoint(child_meshes):
2779 self.merge_weights(combined_weights, child_cluster)
2781 # combine child weights
2782 indices = []
2783 weights = []
2784 for i, w in combined_weights.items():
2785 indices.append(i)
2786 if len(w) > 1:
2787 # Add ignored child weights to the current bone's weight.
2788 # XXX - Weights that sum to more than 1.0 get clamped to 1.0 when set in the vertex group.
2789 weights.append(sum(w))
2790 else:
2791 weights.append(w[0])
2793 add_vgroup_to_objects(indices, weights, self.bl_bone, [node.bl_obj for node in meshes])
2795 # clusters that drive meshes not included in a parent don't need to be merged
2796 all_meshes = set().union(*[meshes for _, meshes in self.clusters])
2797 for child in ignored_children:
2798 for child_cluster, child_meshes in child.clusters:
2799 if all_meshes.isdisjoint(child_meshes):
2800 indices = elem_prop_first(elem_find_first(child_cluster, b'Indexes', default=None), default=())
2801 weights = elem_prop_first(elem_find_first(child_cluster, b'Weights', default=None), default=())
2802 add_vgroup_to_objects(indices, weights, self.bl_bone, [node.bl_obj for node in child_meshes])
2803 else:
2804 # set the vertex weights on meshes
2805 for fbx_cluster, meshes in self.clusters:
2806 indices = elem_prop_first(elem_find_first(fbx_cluster, b'Indexes', default=None), default=())
2807 weights = elem_prop_first(elem_find_first(fbx_cluster, b'Weights', default=None), default=())
2808 add_vgroup_to_objects(indices, weights, self.bl_bone, [node.bl_obj for node in meshes])
2810 for child in self.children:
2811 if child.is_bone and not child.ignore:
2812 child.set_bone_weights()
2814 def build_hierarchy(self, fbx_tmpl, settings, scene, view_layer):
2815 if self.is_armature:
2816 # create when linking since we need object data
2817 elem_name_utf8 = self.fbx_name
2819 self.bl_data = arm_data = bpy.data.armatures.new(name=elem_name_utf8)
2821 # Object data must be created already
2822 self.bl_obj = arm = bpy.data.objects.new(name=elem_name_utf8, object_data=arm_data)
2824 arm.matrix_basis = self.get_matrix()
2826 if self.fbx_elem:
2827 fbx_props = (elem_find_first(self.fbx_elem, b'Properties70'),
2828 elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
2830 if settings.use_custom_props:
2831 # Read Armature Object custom props from the Node
2832 blen_read_custom_properties(self.fbx_elem, arm, settings)
2834 if self.fbx_data_elem:
2835 # Read Armature Data custom props from the NodeAttribute
2836 blen_read_custom_properties(self.fbx_data_elem, arm_data, settings)
2838 # instance in scene
2839 view_layer.active_layer_collection.collection.objects.link(arm)
2840 arm.select_set(True)
2842 # Add bones:
2844 # Switch to Edit mode.
2845 view_layer.objects.active = arm
2846 is_hidden = arm.hide_viewport
2847 arm.hide_viewport = False # Can't switch to Edit mode hidden objects...
2848 bpy.ops.object.mode_set(mode='EDIT')
2850 for child in self.children:
2851 if child.ignore:
2852 continue
2853 if child.is_bone:
2854 child.build_skeleton(self, Matrix(), settings)
2856 bpy.ops.object.mode_set(mode='OBJECT')
2858 arm.hide_viewport = is_hidden
2860 # Set pose matrix and PoseBone custom properties
2861 for child in self.children:
2862 if child.ignore:
2863 continue
2864 if child.is_bone:
2865 child.set_pose_matrix_and_custom_props(self, settings)
2867 # Add bone children:
2868 for child in self.children:
2869 if child.ignore:
2870 continue
2871 child_obj = child.build_skeleton_children(fbx_tmpl, settings, scene, view_layer)
2873 return arm
2874 elif self.fbx_elem and not self.is_bone:
2875 obj = self.build_node_obj(fbx_tmpl, settings)
2877 # walk through children
2878 for child in self.children:
2879 child.build_hierarchy(fbx_tmpl, settings, scene, view_layer)
2881 # instance in scene
2882 view_layer.active_layer_collection.collection.objects.link(obj)
2883 obj.select_set(True)
2885 return obj
2886 else:
2887 for child in self.children:
2888 child.build_hierarchy(fbx_tmpl, settings, scene, view_layer)
2890 return None
2892 def link_hierarchy(self, fbx_tmpl, settings, scene):
2893 if self.is_armature:
2894 arm = self.bl_obj
2896 # Link bone children:
2897 for child in self.children:
2898 if child.ignore:
2899 continue
2900 child_obj = child.link_skeleton_children(fbx_tmpl, settings, scene)
2901 if child_obj:
2902 child_obj.parent = arm
2904 # Add armature modifiers to the meshes
2905 if self.meshes:
2906 for mesh in self.meshes:
2907 (mmat, amat) = mesh.armature_setup[self]
2908 me_obj = mesh.bl_obj
2910 # bring global armature & mesh matrices into *Blender* global space.
2911 # Note: Usage of matrix_geom (local 'diff' transform) here is quite brittle.
2912 # Among other things, why in hell isn't it taken into account by bindpose & co???
2913 # Probably because org app (max) handles it completely aside from any parenting stuff,
2914 # which we obviously cannot do in Blender. :/
2915 if amat is None:
2916 amat = self.bind_matrix
2917 amat = settings.global_matrix @ (Matrix() if amat is None else amat)
2918 if self.matrix_geom:
2919 amat = amat @ self.matrix_geom
2920 mmat = settings.global_matrix @ mmat
2921 if mesh.matrix_geom:
2922 mmat = mmat @ mesh.matrix_geom
2924 # Now that we have armature and mesh in there (global) bind 'state' (matrix),
2925 # we can compute inverse parenting matrix of the mesh.
2926 me_obj.matrix_parent_inverse = amat.inverted_safe() @ mmat @ me_obj.matrix_basis.inverted_safe()
2928 mod = mesh.bl_obj.modifiers.new(arm.name, 'ARMATURE')
2929 mod.object = arm
2931 # Add bone weights to the deformers
2932 for child in self.children:
2933 if child.ignore:
2934 continue
2935 if child.is_bone:
2936 child.set_bone_weights()
2938 return arm
2939 elif self.bl_obj:
2940 obj = self.bl_obj
2942 # walk through children
2943 for child in self.children:
2944 child_obj = child.link_hierarchy(fbx_tmpl, settings, scene)
2945 if child_obj:
2946 child_obj.parent = obj
2948 return obj
2949 else:
2950 for child in self.children:
2951 child.link_hierarchy(fbx_tmpl, settings, scene)
2953 return None
2956 def load(operator, context, filepath="",
2957 use_manual_orientation=False,
2958 axis_forward='-Z',
2959 axis_up='Y',
2960 global_scale=1.0,
2961 bake_space_transform=False,
2962 use_custom_normals=True,
2963 use_image_search=False,
2964 use_alpha_decals=False,
2965 decal_offset=0.0,
2966 use_anim=True,
2967 anim_offset=1.0,
2968 use_subsurf=False,
2969 use_custom_props=True,
2970 use_custom_props_enum_as_string=True,
2971 ignore_leaf_bones=False,
2972 force_connect_children=False,
2973 automatic_bone_orientation=False,
2974 primary_bone_axis='Y',
2975 secondary_bone_axis='X',
2976 use_prepost_rot=True,
2977 colors_type='SRGB'):
2979 global fbx_elem_nil
2980 fbx_elem_nil = FBXElem('', (), (), ())
2982 import os
2983 import time
2984 from bpy_extras.io_utils import axis_conversion
2986 from . import parse_fbx
2987 from .fbx_utils import RIGHT_HAND_AXES, FBX_FRAMERATES
2989 start_time_proc = time.process_time()
2990 start_time_sys = time.time()
2992 perfmon = PerfMon()
2993 perfmon.level_up()
2994 perfmon.step("FBX Import: start importing %s" % filepath)
2995 perfmon.level_up()
2997 # Detect ASCII files.
2999 # Typically it's bad practice to fail silently on any error,
3000 # however the file may fail to read for many reasons,
3001 # and this situation is handled later in the code,
3002 # right now we only want to know if the file successfully reads as ascii.
3003 try:
3004 with open(filepath, 'r', encoding="utf-8") as fh:
3005 fh.read(24)
3006 is_ascii = True
3007 except Exception:
3008 is_ascii = False
3010 if is_ascii:
3011 operator.report({'ERROR'}, tip_("ASCII FBX files are not supported %r") % filepath)
3012 return {'CANCELLED'}
3013 del is_ascii
3014 # End ascii detection.
3016 try:
3017 elem_root, version = parse_fbx.parse(filepath)
3018 except Exception as e:
3019 import traceback
3020 traceback.print_exc()
3022 operator.report({'ERROR'}, tip_("Couldn't open file %r (%s)") % (filepath, e))
3023 return {'CANCELLED'}
3025 if version < 7100:
3026 operator.report({'ERROR'}, tip_("Version %r unsupported, must be %r or later") % (version, 7100))
3027 return {'CANCELLED'}
3029 print("FBX version: %r" % version)
3031 if bpy.ops.object.mode_set.poll():
3032 bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
3034 # deselect all
3035 if bpy.ops.object.select_all.poll():
3036 bpy.ops.object.select_all(action='DESELECT')
3038 basedir = os.path.dirname(filepath)
3040 nodal_material_wrap_map = {}
3041 image_cache = {}
3043 # Tables: (FBX_byte_id -> [FBX_data, None or Blender_datablock])
3044 fbx_table_nodes = {}
3046 if use_alpha_decals:
3047 material_decals = set()
3048 else:
3049 material_decals = None
3051 scene = context.scene
3052 view_layer = context.view_layer
3054 # #### Get some info from GlobalSettings.
3056 perfmon.step("FBX import: Prepare...")
3058 fbx_settings = elem_find_first(elem_root, b'GlobalSettings')
3059 fbx_settings_props = elem_find_first(fbx_settings, b'Properties70')
3060 if fbx_settings is None or fbx_settings_props is None:
3061 operator.report({'ERROR'}, tip_("No 'GlobalSettings' found in file %r") % filepath)
3062 return {'CANCELLED'}
3064 # FBX default base unit seems to be the centimeter, while raw Blender Unit is equivalent to the meter...
3065 unit_scale = elem_props_get_number(fbx_settings_props, b'UnitScaleFactor', 1.0)
3066 unit_scale_org = elem_props_get_number(fbx_settings_props, b'OriginalUnitScaleFactor', 1.0)
3067 global_scale *= (unit_scale / units_blender_to_fbx_factor(context.scene))
3068 # Compute global matrix and scale.
3069 if not use_manual_orientation:
3070 axis_forward = (elem_props_get_integer(fbx_settings_props, b'FrontAxis', 1),
3071 elem_props_get_integer(fbx_settings_props, b'FrontAxisSign', 1))
3072 axis_up = (elem_props_get_integer(fbx_settings_props, b'UpAxis', 2),
3073 elem_props_get_integer(fbx_settings_props, b'UpAxisSign', 1))
3074 axis_coord = (elem_props_get_integer(fbx_settings_props, b'CoordAxis', 0),
3075 elem_props_get_integer(fbx_settings_props, b'CoordAxisSign', 1))
3076 axis_key = (axis_up, axis_forward, axis_coord)
3077 axis_up, axis_forward = {v: k for k, v in RIGHT_HAND_AXES.items()}.get(axis_key, ('Z', 'Y'))
3078 global_matrix = (Matrix.Scale(global_scale, 4) @
3079 axis_conversion(from_forward=axis_forward, from_up=axis_up).to_4x4())
3081 # To cancel out unwanted rotation/scale on nodes.
3082 global_matrix_inv = global_matrix.inverted()
3083 # For transforming mesh normals.
3084 global_matrix_inv_transposed = global_matrix_inv.transposed()
3086 # Compute bone correction matrix
3087 bone_correction_matrix = None # None means no correction/identity
3088 if not automatic_bone_orientation:
3089 if (primary_bone_axis, secondary_bone_axis) != ('Y', 'X'):
3090 bone_correction_matrix = axis_conversion(from_forward='X',
3091 from_up='Y',
3092 to_forward=secondary_bone_axis,
3093 to_up=primary_bone_axis,
3094 ).to_4x4()
3096 # Compute framerate settings.
3097 custom_fps = elem_props_get_number(fbx_settings_props, b'CustomFrameRate', 25.0)
3098 time_mode = elem_props_get_enum(fbx_settings_props, b'TimeMode')
3099 real_fps = {eid: val for val, eid in FBX_FRAMERATES[1:]}.get(time_mode, custom_fps)
3100 if real_fps <= 0.0:
3101 real_fps = 25.0
3102 scene.render.fps = round(real_fps)
3103 scene.render.fps_base = scene.render.fps / real_fps
3105 # store global settings that need to be accessed during conversion
3106 settings = FBXImportSettings(
3107 operator.report, (axis_up, axis_forward), global_matrix, global_scale,
3108 bake_space_transform, global_matrix_inv, global_matrix_inv_transposed,
3109 use_custom_normals, use_image_search,
3110 use_alpha_decals, decal_offset,
3111 use_anim, anim_offset,
3112 use_subsurf,
3113 use_custom_props, use_custom_props_enum_as_string,
3114 nodal_material_wrap_map, image_cache,
3115 ignore_leaf_bones, force_connect_children, automatic_bone_orientation, bone_correction_matrix,
3116 use_prepost_rot, colors_type,
3119 # #### And now, the "real" data.
3121 perfmon.step("FBX import: Templates...")
3123 fbx_defs = elem_find_first(elem_root, b'Definitions') # can be None
3124 fbx_nodes = elem_find_first(elem_root, b'Objects')
3125 fbx_connections = elem_find_first(elem_root, b'Connections')
3127 if fbx_nodes is None:
3128 operator.report({'ERROR'}, tip_("No 'Objects' found in file %r") % filepath)
3129 return {'CANCELLED'}
3130 if fbx_connections is None:
3131 operator.report({'ERROR'}, tip_("No 'Connections' found in file %r") % filepath)
3132 return {'CANCELLED'}
3134 # ----
3135 # First load property templates
3136 # Load 'PropertyTemplate' values.
3137 # Key is a tuple, (ObjectType, FBXNodeType)
3138 # eg, (b'Texture', b'KFbxFileTexture')
3139 # (b'Geometry', b'KFbxMesh')
3140 fbx_templates = {}
3142 def _():
3143 if fbx_defs is not None:
3144 for fbx_def in fbx_defs.elems:
3145 if fbx_def.id == b'ObjectType':
3146 for fbx_subdef in fbx_def.elems:
3147 if fbx_subdef.id == b'PropertyTemplate':
3148 assert(fbx_def.props_type == b'S')
3149 assert(fbx_subdef.props_type == b'S')
3150 # (b'Texture', b'KFbxFileTexture') - eg.
3151 key = fbx_def.props[0], fbx_subdef.props[0]
3152 fbx_templates[key] = fbx_subdef
3153 _(); del _
3155 def fbx_template_get(key):
3156 ret = fbx_templates.get(key, fbx_elem_nil)
3157 if ret is fbx_elem_nil:
3158 # Newest FBX (7.4 and above) use no more 'K' in their type names...
3159 key = (key[0], key[1][1:])
3160 return fbx_templates.get(key, fbx_elem_nil)
3161 return ret
3163 perfmon.step("FBX import: Nodes...")
3165 # ----
3166 # Build FBX node-table
3167 def _():
3168 for fbx_obj in fbx_nodes.elems:
3169 # TODO, investigate what other items after first 3 may be
3170 assert(fbx_obj.props_type[:3] == b'LSS')
3171 fbx_uuid = elem_uuid(fbx_obj)
3172 fbx_table_nodes[fbx_uuid] = [fbx_obj, None]
3173 _(); del _
3175 # ----
3176 # Load in the data
3177 # http://download.autodesk.com/us/fbx/20112/FBX_SDK_HELP/index.html?url=
3178 # WS73099cc142f487551fea285e1221e4f9ff8-7fda.htm,topicNumber=d0e6388
3180 perfmon.step("FBX import: Connections...")
3182 fbx_connection_map = {}
3183 fbx_connection_map_reverse = {}
3185 def _():
3186 for fbx_link in fbx_connections.elems:
3187 c_type = fbx_link.props[0]
3188 if fbx_link.props_type[1:3] == b'LL':
3189 c_src, c_dst = fbx_link.props[1:3]
3190 fbx_connection_map.setdefault(c_src, []).append((c_dst, fbx_link))
3191 fbx_connection_map_reverse.setdefault(c_dst, []).append((c_src, fbx_link))
3192 _(); del _
3194 perfmon.step("FBX import: Meshes...")
3196 # ----
3197 # Load mesh data
3198 def _():
3199 fbx_tmpl = fbx_template_get((b'Geometry', b'KFbxMesh'))
3201 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3202 fbx_obj, blen_data = fbx_item
3203 if fbx_obj.id != b'Geometry':
3204 continue
3205 if fbx_obj.props[-1] == b'Mesh':
3206 assert(blen_data is None)
3207 fbx_item[1] = blen_read_geom(fbx_tmpl, fbx_obj, settings)
3208 _(); del _
3210 perfmon.step("FBX import: Materials & Textures...")
3212 # ----
3213 # Load material data
3214 def _():
3215 fbx_tmpl = fbx_template_get((b'Material', b'KFbxSurfacePhong'))
3216 # b'KFbxSurfaceLambert'
3218 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3219 fbx_obj, blen_data = fbx_item
3220 if fbx_obj.id != b'Material':
3221 continue
3222 assert(blen_data is None)
3223 fbx_item[1] = blen_read_material(fbx_tmpl, fbx_obj, settings)
3224 _(); del _
3226 # ----
3227 # Load image & textures data
3228 def _():
3229 fbx_tmpl_tex = fbx_template_get((b'Texture', b'KFbxFileTexture'))
3230 fbx_tmpl_img = fbx_template_get((b'Video', b'KFbxVideo'))
3232 # Important to run all 'Video' ones first, embedded images are stored in those nodes.
3233 # XXX Note we simplify things here, assuming both matching Video and Texture will use same file path,
3234 # this may be a bit weak, if issue arise we'll fallback to plain connection stuff...
3235 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3236 fbx_obj, blen_data = fbx_item
3237 if fbx_obj.id != b'Video':
3238 continue
3239 fbx_item[1] = blen_read_texture_image(fbx_tmpl_img, fbx_obj, basedir, settings)
3240 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3241 fbx_obj, blen_data = fbx_item
3242 if fbx_obj.id != b'Texture':
3243 continue
3244 fbx_item[1] = blen_read_texture_image(fbx_tmpl_tex, fbx_obj, basedir, settings)
3245 _(); del _
3247 perfmon.step("FBX import: Cameras & Lamps...")
3249 # ----
3250 # Load camera data
3251 def _():
3252 fbx_tmpl = fbx_template_get((b'NodeAttribute', b'KFbxCamera'))
3254 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3255 fbx_obj, blen_data = fbx_item
3256 if fbx_obj.id != b'NodeAttribute':
3257 continue
3258 if fbx_obj.props[-1] == b'Camera':
3259 assert(blen_data is None)
3260 fbx_item[1] = blen_read_camera(fbx_tmpl, fbx_obj, settings)
3261 _(); del _
3263 # ----
3264 # Load lamp data
3265 def _():
3266 fbx_tmpl = fbx_template_get((b'NodeAttribute', b'KFbxLight'))
3268 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3269 fbx_obj, blen_data = fbx_item
3270 if fbx_obj.id != b'NodeAttribute':
3271 continue
3272 if fbx_obj.props[-1] == b'Light':
3273 assert(blen_data is None)
3274 fbx_item[1] = blen_read_light(fbx_tmpl, fbx_obj, settings)
3275 _(); del _
3277 # ----
3278 # Connections
3279 def connection_filter_ex(fbx_uuid, fbx_id, dct):
3280 return [(c_found[0], c_found[1], c_type)
3281 for (c_uuid, c_type) in dct.get(fbx_uuid, ())
3282 # 0 is used for the root node, which isn't in fbx_table_nodes
3283 for c_found in (() if c_uuid == 0 else (fbx_table_nodes.get(c_uuid, (None, None)),))
3284 if (fbx_id is None) or (c_found[0] and c_found[0].id == fbx_id)]
3286 def connection_filter_forward(fbx_uuid, fbx_id):
3287 return connection_filter_ex(fbx_uuid, fbx_id, fbx_connection_map)
3289 def connection_filter_reverse(fbx_uuid, fbx_id):
3290 return connection_filter_ex(fbx_uuid, fbx_id, fbx_connection_map_reverse)
3292 perfmon.step("FBX import: Objects & Armatures...")
3294 # -- temporary helper hierarchy to build armatures and objects from
3295 # lookup from uuid to helper node. Used to build parent-child relations and later to look up animated nodes.
3296 fbx_helper_nodes = {}
3298 def _():
3299 # We build an intermediate hierarchy used to:
3300 # - Calculate and store bone orientation correction matrices. The same matrices will be reused for animation.
3301 # - Find/insert armature nodes.
3302 # - Filter leaf bones.
3304 # create scene root
3305 fbx_helper_nodes[0] = root_helper = FbxImportHelperNode(None, None, None, False)
3306 root_helper.is_root = True
3308 # add fbx nodes
3309 fbx_tmpl = fbx_template_get((b'Model', b'KFbxNode'))
3310 for a_uuid, a_item in fbx_table_nodes.items():
3311 fbx_obj, bl_data = a_item
3312 if fbx_obj is None or fbx_obj.id != b'Model':
3313 continue
3315 fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
3316 elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
3318 transform_data = blen_read_object_transform_preprocess(fbx_props, fbx_obj, Matrix(), use_prepost_rot)
3319 # Note: 'Root' "bones" are handled as (armature) objects.
3320 # Note: See T46912 for first FBX file I ever saw with 'Limb' bones - thought those were totally deprecated.
3321 is_bone = fbx_obj.props[2] in {b'LimbNode', b'Limb'}
3322 fbx_helper_nodes[a_uuid] = FbxImportHelperNode(fbx_obj, bl_data, transform_data, is_bone)
3324 # add parent-child relations and add blender data to the node
3325 for fbx_link in fbx_connections.elems:
3326 if fbx_link.props[0] != b'OO':
3327 continue
3328 if fbx_link.props_type[1:3] == b'LL':
3329 c_src, c_dst = fbx_link.props[1:3]
3330 parent = fbx_helper_nodes.get(c_dst)
3331 if parent is None:
3332 continue
3334 child = fbx_helper_nodes.get(c_src)
3335 if child is None:
3336 # add blender data (meshes, lights, cameras, etc.) to a helper node
3337 fbx_sdata, bl_data = p_item = fbx_table_nodes.get(c_src, (None, None))
3338 if fbx_sdata is None:
3339 continue
3340 if fbx_sdata.id not in {b'Geometry', b'NodeAttribute'}:
3341 continue
3342 parent.bl_data = bl_data
3343 if bl_data is None:
3344 # If there's no bl_data, add the fbx_sdata so that it can be read when creating the bl_data/bone
3345 parent.fbx_data_elem = fbx_sdata
3346 else:
3347 # set parent
3348 child.parent = parent
3350 # find armatures (either an empty below a bone or a new node inserted at the bone
3351 root_helper.find_armatures()
3353 # mark nodes that have bone children
3354 root_helper.find_bone_children()
3356 # mark nodes that need a bone to attach child-bones to
3357 root_helper.find_fake_bones()
3359 # mark leaf nodes that are only required to mark the end of their parent bone
3360 if settings.ignore_leaf_bones:
3361 root_helper.mark_leaf_bones()
3363 # What a mess! Some bones have several BindPoses, some have none, clusters contain a bind pose as well,
3364 # and you can have several clusters per bone!
3365 # Maybe some conversion can be applied to put them all into the same frame of reference?
3367 # get the bind pose from pose elements
3368 for a_uuid, a_item in fbx_table_nodes.items():
3369 fbx_obj, bl_data = a_item
3370 if fbx_obj is None:
3371 continue
3372 if fbx_obj.id != b'Pose':
3373 continue
3374 if fbx_obj.props[2] != b'BindPose':
3375 continue
3376 for fbx_pose_node in fbx_obj.elems:
3377 if fbx_pose_node.id != b'PoseNode':
3378 continue
3379 node_elem = elem_find_first(fbx_pose_node, b'Node')
3380 node = elem_uuid(node_elem)
3381 matrix_elem = elem_find_first(fbx_pose_node, b'Matrix')
3382 matrix = array_to_matrix4(matrix_elem.props[0]) if matrix_elem else None
3383 bone = fbx_helper_nodes.get(node)
3384 if bone and matrix:
3385 # Store the matrix in the helper node.
3386 # There may be several bind pose matrices for the same node, but in tests they seem to be identical.
3387 bone.bind_matrix = matrix # global space
3389 # get clusters and bind pose
3390 for helper_uuid, helper_node in fbx_helper_nodes.items():
3391 if not helper_node.is_bone:
3392 continue
3393 for cluster_uuid, cluster_link in fbx_connection_map.get(helper_uuid, ()):
3394 if cluster_link.props[0] != b'OO':
3395 continue
3396 fbx_cluster, _ = fbx_table_nodes.get(cluster_uuid, (None, None))
3397 if fbx_cluster is None or fbx_cluster.id != b'Deformer' or fbx_cluster.props[2] != b'Cluster':
3398 continue
3400 # Get the bind pose from the cluster:
3401 tx_mesh_elem = elem_find_first(fbx_cluster, b'Transform', default=None)
3402 tx_mesh = array_to_matrix4(tx_mesh_elem.props[0]) if tx_mesh_elem else Matrix()
3404 tx_bone_elem = elem_find_first(fbx_cluster, b'TransformLink', default=None)
3405 tx_bone = array_to_matrix4(tx_bone_elem.props[0]) if tx_bone_elem else None
3407 tx_arm_elem = elem_find_first(fbx_cluster, b'TransformAssociateModel', default=None)
3408 tx_arm = array_to_matrix4(tx_arm_elem.props[0]) if tx_arm_elem else None
3410 mesh_matrix = tx_mesh
3411 armature_matrix = tx_arm
3413 if tx_bone:
3414 mesh_matrix = tx_bone @ mesh_matrix
3415 helper_node.bind_matrix = tx_bone # overwrite the bind matrix
3417 # Get the meshes driven by this cluster: (Shouldn't that be only one?)
3418 meshes = set()
3419 for skin_uuid, skin_link in fbx_connection_map.get(cluster_uuid):
3420 if skin_link.props[0] != b'OO':
3421 continue
3422 fbx_skin, _ = fbx_table_nodes.get(skin_uuid, (None, None))
3423 if fbx_skin is None or fbx_skin.id != b'Deformer' or fbx_skin.props[2] != b'Skin':
3424 continue
3425 for mesh_uuid, mesh_link in fbx_connection_map.get(skin_uuid):
3426 if mesh_link.props[0] != b'OO':
3427 continue
3428 fbx_mesh, _ = fbx_table_nodes.get(mesh_uuid, (None, None))
3429 if fbx_mesh is None or fbx_mesh.id != b'Geometry' or fbx_mesh.props[2] != b'Mesh':
3430 continue
3431 for object_uuid, object_link in fbx_connection_map.get(mesh_uuid):
3432 if object_link.props[0] != b'OO':
3433 continue
3434 mesh_node = fbx_helper_nodes[object_uuid]
3435 if mesh_node:
3436 # ----
3437 # If we get a valid mesh matrix (in bone space), store armature and
3438 # mesh global matrices, we need them to compute mesh's matrix_parent_inverse
3439 # when actually binding them via the modifier.
3440 # Note we assume all bones were bound with the same mesh/armature (global) matrix,
3441 # we do not support otherwise in Blender anyway!
3442 mesh_node.armature_setup[helper_node.armature] = (mesh_matrix, armature_matrix)
3443 meshes.add(mesh_node)
3445 helper_node.clusters.append((fbx_cluster, meshes))
3447 # convert bind poses from global space into local space
3448 root_helper.make_bind_pose_local()
3450 # collect armature meshes
3451 root_helper.collect_armature_meshes()
3453 # find the correction matrices to align FBX objects with their Blender equivalent
3454 root_helper.find_correction_matrix(settings)
3456 # build the Object/Armature/Bone hierarchy
3457 root_helper.build_hierarchy(fbx_tmpl, settings, scene, view_layer)
3459 # Link the Object/Armature/Bone hierarchy
3460 root_helper.link_hierarchy(fbx_tmpl, settings, scene)
3462 # root_helper.print_info(0)
3463 _(); del _
3465 perfmon.step("FBX import: ShapeKeys...")
3467 # We can handle shapes.
3468 blend_shape_channels = {} # We do not need Shapes themselves, but keyblocks, for anim.
3470 def _():
3471 fbx_tmpl = fbx_template_get((b'Geometry', b'KFbxShape'))
3473 # - FBX | - Blender equivalent
3474 # Mesh | `Mesh`
3475 # BlendShape | `Key`
3476 # BlendShapeChannel | `ShapeKey`, but without its `.data`.
3477 # Shape | `ShapeKey.data`, but also includes normals and the values are relative to the base Mesh
3478 # | instead of being absolute. The data is sparse, so each Shape has an "Indexes" array too.
3479 # | FBX 2020 introduced 'Modern Style' Shapes that also support tangents, binormals, vertex
3480 # | colors and UVs, and can be absolute values instead of relative, but 'Modern Style' Shapes
3481 # | are not currently supported.
3483 # The FBX connections between Shapes and Meshes form multiple many-many relationships:
3484 # Mesh >-< BlendShape >-< BlendShapeChannel >-< Shape
3485 # In practice, the relationships are almost never many-many and are more typically 1-many or 1-1:
3486 # Mesh --- BlendShape:
3487 # usually 1-1 and the FBX SDK might enforce that each BlendShape is connected to at most one Mesh.
3488 # BlendShape --< BlendShapeChannel:
3489 # usually 1-many.
3490 # BlendShapeChannel --- or uncommonly --< Shape:
3491 # usually 1-1, but 1-many is a documented feature.
3493 def connections_gen(c_src_uuid, fbx_id, fbx_type):
3494 """Helper to reduce duplicate code"""
3495 # Rarely, an imported FBX file will have duplicate connections. For Shape Key related connections, FBX
3496 # appears to ignore the duplicates, or overwrite the existing duplicates such that the end result is the
3497 # same as ignoring them, so keep a set of the seen connections and ignore any duplicates.
3498 seen_connections = set()
3499 for c_dst_uuid, ctype in fbx_connection_map.get(c_src_uuid, ()):
3500 if ctype.props[0] != b'OO':
3501 # 'Object-Object' connections only.
3502 continue
3503 fbx_data, bl_data = fbx_table_nodes.get(c_dst_uuid, (None, None))
3504 if fbx_data is None or fbx_data.id != fbx_id or fbx_data.props[2] != fbx_type:
3505 # Either `c_dst_uuid` doesn't exist, or it has a different id or type.
3506 continue
3507 connection_key = (c_src_uuid, c_dst_uuid)
3508 if connection_key in seen_connections:
3509 # The connection is a duplicate, skip it.
3510 continue
3511 seen_connections.add(connection_key)
3512 yield c_dst_uuid, fbx_data, bl_data
3514 mesh_to_shapes = {}
3515 for s_uuid, (fbx_sdata, _bl_sdata) in fbx_table_nodes.items():
3516 if fbx_sdata is None or fbx_sdata.id != b'Geometry' or fbx_sdata.props[2] != b'Shape':
3517 continue
3519 # shape -> blendshapechannel -> blendshape -> mesh.
3520 for bc_uuid, fbx_bcdata, _bl_bcdata in connections_gen(s_uuid, b'Deformer', b'BlendShapeChannel'):
3521 for bs_uuid, _fbx_bsdata, _bl_bsdata in connections_gen(bc_uuid, b'Deformer', b'BlendShape'):
3522 for m_uuid, _fbx_mdata, bl_mdata in connections_gen(bs_uuid, b'Geometry', b'Mesh'):
3523 # Blenmeshes are assumed already created at that time!
3524 assert(isinstance(bl_mdata, bpy.types.Mesh))
3525 # Group shapes by mesh so that each mesh only needs to be processed once for all of its shape
3526 # keys.
3527 if bl_mdata not in mesh_to_shapes:
3528 # And we have to find all objects using this mesh!
3529 objects = []
3530 for o_uuid, o_ctype in fbx_connection_map.get(m_uuid, ()):
3531 if o_ctype.props[0] != b'OO':
3532 continue
3533 node = fbx_helper_nodes[o_uuid]
3534 if node:
3535 objects.append(node)
3536 shapes_list = []
3537 mesh_to_shapes[bl_mdata] = (objects, shapes_list)
3538 else:
3539 shapes_list = mesh_to_shapes[bl_mdata][1]
3540 shapes_list.append((bc_uuid, fbx_sdata, fbx_bcdata))
3541 # BlendShape deformers are only here to connect BlendShapeChannels to meshes, nothing else to do.
3543 # Iterate through each mesh and create its shape keys
3544 for bl_mdata, (objects, shapes) in mesh_to_shapes.items():
3545 for bc_uuid, keyblocks in blen_read_shapes(fbx_tmpl, shapes, objects, bl_mdata, scene).items():
3546 # keyblocks is a list of tuples (mesh, keyblock) matching that shape/blendshapechannel, for animation.
3547 blend_shape_channels.setdefault(bc_uuid, []).extend(keyblocks)
3548 _(); del _
3550 if settings.use_subsurf:
3551 perfmon.step("FBX import: Subdivision surfaces")
3553 # Look through connections for subsurf in meshes and add it to the parent object
3554 def _():
3555 for fbx_link in fbx_connections.elems:
3556 if fbx_link.props[0] != b'OO':
3557 continue
3558 if fbx_link.props_type[1:3] == b'LL':
3559 c_src, c_dst = fbx_link.props[1:3]
3560 parent = fbx_helper_nodes.get(c_dst)
3561 if parent is None:
3562 continue
3564 child = fbx_helper_nodes.get(c_src)
3565 if child is None:
3566 fbx_sdata, bl_data = fbx_table_nodes.get(c_src, (None, None))
3567 if fbx_sdata.id != b'Geometry':
3568 continue
3570 preview_levels = elem_prop_first(elem_find_first(fbx_sdata, b'PreviewDivisionLevels'))
3571 render_levels = elem_prop_first(elem_find_first(fbx_sdata, b'RenderDivisionLevels'))
3572 if isinstance(preview_levels, int) and isinstance(render_levels, int):
3573 mod = parent.bl_obj.modifiers.new('subsurf', 'SUBSURF')
3574 mod.levels = preview_levels
3575 mod.render_levels = render_levels
3576 boundary_rule = elem_prop_first(elem_find_first(fbx_sdata, b'BoundaryRule'), default=1)
3577 if boundary_rule == 1:
3578 mod.boundary_smooth = "PRESERVE_CORNERS"
3579 else:
3580 mod.boundary_smooth = "ALL"
3582 _(); del _
3584 if use_anim:
3585 perfmon.step("FBX import: Animations...")
3587 # Animation!
3588 def _():
3589 fbx_tmpl_astack = fbx_template_get((b'AnimationStack', b'FbxAnimStack'))
3590 fbx_tmpl_alayer = fbx_template_get((b'AnimationLayer', b'FbxAnimLayer'))
3591 stacks = {}
3593 # AnimationStacks.
3594 for as_uuid, fbx_asitem in fbx_table_nodes.items():
3595 fbx_asdata, _blen_data = fbx_asitem
3596 if fbx_asdata.id != b'AnimationStack' or fbx_asdata.props[2] != b'':
3597 continue
3598 stacks[as_uuid] = (fbx_asitem, {})
3600 # AnimationLayers
3601 # (mixing is completely ignored for now, each layer results in an independent set of actions).
3602 def get_astacks_from_alayer(al_uuid):
3603 for as_uuid, as_ctype in fbx_connection_map.get(al_uuid, ()):
3604 if as_ctype.props[0] != b'OO':
3605 continue
3606 fbx_asdata, _bl_asdata = fbx_table_nodes.get(as_uuid, (None, None))
3607 if (fbx_asdata is None or fbx_asdata.id != b'AnimationStack' or
3608 fbx_asdata.props[2] != b'' or as_uuid not in stacks):
3609 continue
3610 yield as_uuid
3611 for al_uuid, fbx_alitem in fbx_table_nodes.items():
3612 fbx_aldata, _blen_data = fbx_alitem
3613 if fbx_aldata.id != b'AnimationLayer' or fbx_aldata.props[2] != b'':
3614 continue
3615 for as_uuid in get_astacks_from_alayer(al_uuid):
3616 _fbx_asitem, alayers = stacks[as_uuid]
3617 alayers[al_uuid] = (fbx_alitem, {})
3619 # AnimationCurveNodes (also the ones linked to actual animated data!).
3620 curvenodes = {}
3621 for acn_uuid, fbx_acnitem in fbx_table_nodes.items():
3622 fbx_acndata, _blen_data = fbx_acnitem
3623 if fbx_acndata.id != b'AnimationCurveNode' or fbx_acndata.props[2] != b'':
3624 continue
3625 cnode = curvenodes[acn_uuid] = {}
3626 items = []
3627 for n_uuid, n_ctype in fbx_connection_map.get(acn_uuid, ()):
3628 if n_ctype.props[0] != b'OP':
3629 continue
3630 lnk_prop = n_ctype.props[3]
3631 if lnk_prop in {b'Lcl Translation', b'Lcl Rotation', b'Lcl Scaling'}:
3632 # n_uuid can (????) be linked to root '0' node, instead of a mere object node... See T41712.
3633 ob = fbx_helper_nodes.get(n_uuid, None)
3634 if ob is None or ob.is_root:
3635 continue
3636 items.append((ob, lnk_prop))
3637 elif lnk_prop == b'DeformPercent': # Shape keys.
3638 keyblocks = blend_shape_channels.get(n_uuid, None)
3639 if keyblocks is None:
3640 continue
3641 items += [(kb, lnk_prop) for kb in keyblocks]
3642 elif lnk_prop == b'FocalLength': # Camera lens.
3643 from bpy.types import Camera
3644 fbx_item = fbx_table_nodes.get(n_uuid, None)
3645 if fbx_item is None or not isinstance(fbx_item[1], Camera):
3646 continue
3647 cam = fbx_item[1]
3648 items.append((cam, lnk_prop))
3649 elif lnk_prop == b'FocusDistance': # Camera focus.
3650 from bpy.types import Camera
3651 fbx_item = fbx_table_nodes.get(n_uuid, None)
3652 if fbx_item is None or not isinstance(fbx_item[1], Camera):
3653 continue
3654 cam = fbx_item[1]
3655 items.append((cam, lnk_prop))
3656 elif lnk_prop == b'DiffuseColor':
3657 from bpy.types import Material
3658 fbx_item = fbx_table_nodes.get(n_uuid, None)
3659 if fbx_item is None or not isinstance(fbx_item[1], Material):
3660 continue
3661 mat = fbx_item[1]
3662 items.append((mat, lnk_prop))
3663 print("WARNING! Importing material's animation is not supported for Nodal materials...")
3664 for al_uuid, al_ctype in fbx_connection_map.get(acn_uuid, ()):
3665 if al_ctype.props[0] != b'OO':
3666 continue
3667 fbx_aldata, _blen_aldata = fbx_alitem = fbx_table_nodes.get(al_uuid, (None, None))
3668 if fbx_aldata is None or fbx_aldata.id != b'AnimationLayer' or fbx_aldata.props[2] != b'':
3669 continue
3670 for as_uuid in get_astacks_from_alayer(al_uuid):
3671 _fbx_alitem, anim_items = stacks[as_uuid][1][al_uuid]
3672 assert(_fbx_alitem == fbx_alitem)
3673 for item, item_prop in items:
3674 # No need to keep curvenode FBX data here, contains nothing useful for us.
3675 anim_items.setdefault(item, {})[acn_uuid] = (cnode, item_prop)
3677 # AnimationCurves (real animation data).
3678 for ac_uuid, fbx_acitem in fbx_table_nodes.items():
3679 fbx_acdata, _blen_data = fbx_acitem
3680 if fbx_acdata.id != b'AnimationCurve' or fbx_acdata.props[2] != b'':
3681 continue
3682 for acn_uuid, acn_ctype in fbx_connection_map.get(ac_uuid, ()):
3683 if acn_ctype.props[0] != b'OP':
3684 continue
3685 fbx_acndata, _bl_acndata = fbx_table_nodes.get(acn_uuid, (None, None))
3686 if (fbx_acndata is None or fbx_acndata.id != b'AnimationCurveNode' or
3687 fbx_acndata.props[2] != b'' or acn_uuid not in curvenodes):
3688 continue
3689 # Note this is an infamous simplification of the compound props stuff,
3690 # seems to be standard naming but we'll probably have to be smarter to handle more exotic files?
3691 channel = {
3692 b'd|X': 0, b'd|Y': 1, b'd|Z': 2,
3693 b'd|DeformPercent': 0,
3694 b'd|FocalLength': 0,
3695 b'd|FocusDistance': 0
3696 }.get(acn_ctype.props[3], None)
3697 if channel is None:
3698 continue
3699 curvenodes[acn_uuid][ac_uuid] = (fbx_acitem, channel)
3701 # And now that we have sorted all this, apply animations!
3702 blen_read_animations(fbx_tmpl_astack, fbx_tmpl_alayer, stacks, scene, settings.anim_offset, global_scale)
3704 _(); del _
3706 perfmon.step("FBX import: Assign materials...")
3708 def _():
3709 # link Material's to Geometry (via Model's)
3710 processed_meshes = set()
3711 for helper_uuid, helper_node in fbx_helper_nodes.items():
3712 obj = helper_node.bl_obj
3713 if not obj or obj.type != 'MESH':
3714 continue
3716 # Get the Mesh corresponding to the Geometry used by this Model.
3717 mesh = obj.data
3718 processed_meshes.add(mesh)
3720 # Get the Materials from the Model's connections.
3721 material_connections = connection_filter_reverse(helper_uuid, b'Material')
3722 if not material_connections:
3723 continue
3725 mesh_mats = mesh.materials
3726 num_mesh_mats = len(mesh_mats)
3728 if num_mesh_mats == 0:
3729 # This is the first (or only) model to use this Geometry. This is the most common case when importing.
3730 # All the Materials can trivially be appended to the Mesh's Materials.
3731 mats_to_append = material_connections
3732 mats_to_compare = ()
3733 elif num_mesh_mats == len(material_connections):
3734 # Another Model uses the same Geometry and has already appended its Materials to the Mesh. This is the
3735 # second most common case when importing.
3736 # It's also possible that a Model could share the same Geometry and have the same number of Materials,
3737 # but have different Materials, though this is less common.
3738 # The Model Materials will need to be compared with the Mesh Materials at the same indices to check if
3739 # they are different.
3740 mats_to_append = ()
3741 mats_to_compare = material_connections
3742 else:
3743 # Under the assumption that only used Materials are connected to the Model, the number of Materials of
3744 # each Model using a specific Geometry should be the same, otherwise the Material Indices of the
3745 # Geometry will be out-of-bounds of the Materials of at least one of the Models using that Geometry.
3746 # We wouldn't expect this case to happen, but there's nothing to say it can't.
3747 # We'll handle a differing number of Materials by appending any extra Materials and comparing the rest.
3748 mats_to_append = material_connections[num_mesh_mats:]
3749 mats_to_compare = material_connections[:num_mesh_mats]
3751 for _fbx_lnk_material, material, _fbx_lnk_material_type in mats_to_append:
3752 mesh_mats.append(material)
3754 mats_to_compare_and_slots = zip(mats_to_compare, obj.material_slots)
3755 for (_fbx_lnk_material, material, _fbx_lnk_material_type), mat_slot in mats_to_compare_and_slots:
3756 if material != mat_slot.material:
3757 # Material Slots default to being linked to the Mesh, so a previously processed Object is also using
3758 # this Mesh, but the Mesh uses a different Material for this Material Slot.
3759 # To have a different Material for this Material Slot on this Object only, the Material Slot must be
3760 # linked to the Object rather than the Mesh.
3761 # TODO: add an option to link all materials to objects in Blender instead?
3762 mat_slot.link = 'OBJECT'
3763 mat_slot.material = material
3765 # We have to validate mesh polygons' ma_idx, see #41015!
3766 # Some FBX seem to have an extra 'default' material which is not defined in FBX file.
3767 for mesh in processed_meshes:
3768 if mesh.validate_material_indices():
3769 print("WARNING: mesh '%s' had invalid material indices, those were reset to first material" % mesh.name)
3770 _(); del _
3772 perfmon.step("FBX import: Assign textures...")
3774 def _():
3775 material_images = {}
3777 fbx_tmpl = fbx_template_get((b'Material', b'KFbxSurfacePhong'))
3778 # b'KFbxSurfaceLambert'
3780 def texture_mapping_set(fbx_obj, node_texture):
3781 assert(fbx_obj.id == b'Texture')
3783 fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
3784 elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
3785 loc = elem_props_get_vector_3d(fbx_props, b'Translation', (0.0, 0.0, 0.0))
3786 rot = tuple(-r for r in elem_props_get_vector_3d(fbx_props, b'Rotation', (0.0, 0.0, 0.0)))
3787 scale = tuple(((1.0 / s) if s != 0.0 else 1.0)
3788 for s in elem_props_get_vector_3d(fbx_props, b'Scaling', (1.0, 1.0, 1.0)))
3789 clamp = (bool(elem_props_get_enum(fbx_props, b'WrapModeU', 0)) or
3790 bool(elem_props_get_enum(fbx_props, b'WrapModeV', 0)))
3792 if (loc == (0.0, 0.0, 0.0) and
3793 rot == (0.0, 0.0, 0.0) and
3794 scale == (1.0, 1.0, 1.0) and
3795 clamp == False):
3796 return
3798 node_texture.translation = loc
3799 node_texture.rotation = rot
3800 node_texture.scale = scale
3801 if clamp:
3802 node_texture.extension = 'EXTEND'
3804 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3805 fbx_obj, blen_data = fbx_item
3806 if fbx_obj.id != b'Material':
3807 continue
3809 material = fbx_table_nodes.get(fbx_uuid, (None, None))[1]
3810 for (fbx_lnk,
3811 image,
3812 fbx_lnk_type) in connection_filter_reverse(fbx_uuid, b'Texture'):
3814 if fbx_lnk_type.props[0] == b'OP':
3815 lnk_type = fbx_lnk_type.props[3]
3817 ma_wrap = nodal_material_wrap_map[material]
3819 if lnk_type in {b'DiffuseColor', b'3dsMax|maps|texmap_diffuse'}:
3820 ma_wrap.base_color_texture.image = image
3821 texture_mapping_set(fbx_lnk, ma_wrap.base_color_texture)
3822 elif lnk_type in {b'SpecularColor', b'SpecularFactor'}:
3823 # Intensity actually, not color...
3824 ma_wrap.specular_texture.image = image
3825 texture_mapping_set(fbx_lnk, ma_wrap.specular_texture)
3826 elif lnk_type in {b'ReflectionColor', b'ReflectionFactor', b'3dsMax|maps|texmap_reflection'}:
3827 # Intensity actually, not color...
3828 ma_wrap.metallic_texture.image = image
3829 texture_mapping_set(fbx_lnk, ma_wrap.metallic_texture)
3830 elif lnk_type in {b'TransparentColor', b'TransparencyFactor'}:
3831 ma_wrap.alpha_texture.image = image
3832 texture_mapping_set(fbx_lnk, ma_wrap.alpha_texture)
3833 if use_alpha_decals:
3834 material_decals.add(material)
3835 elif lnk_type == b'ShininessExponent':
3836 # That is probably reversed compared to expected results? TODO...
3837 ma_wrap.roughness_texture.image = image
3838 texture_mapping_set(fbx_lnk, ma_wrap.roughness_texture)
3839 # XXX, applications abuse bump!
3840 elif lnk_type in {b'NormalMap', b'Bump', b'3dsMax|maps|texmap_bump'}:
3841 ma_wrap.normalmap_texture.image = image
3842 texture_mapping_set(fbx_lnk, ma_wrap.normalmap_texture)
3844 elif lnk_type == b'Bump':
3845 # TODO displacement...
3847 elif lnk_type in {b'EmissiveColor'}:
3848 ma_wrap.emission_color_texture.image = image
3849 texture_mapping_set(fbx_lnk, ma_wrap.emission_color_texture)
3850 elif lnk_type in {b'EmissiveFactor'}:
3851 ma_wrap.emission_strength_texture.image = image
3852 texture_mapping_set(fbx_lnk, ma_wrap.emission_strength_texture)
3853 else:
3854 print("WARNING: material link %r ignored" % lnk_type)
3856 material_images.setdefault(material, {})[lnk_type] = image
3858 # Check if the diffuse image has an alpha channel,
3859 # if so, use the alpha channel.
3861 # Note: this could be made optional since images may have alpha but be entirely opaque
3862 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3863 fbx_obj, blen_data = fbx_item
3864 if fbx_obj.id != b'Material':
3865 continue
3866 material = fbx_table_nodes.get(fbx_uuid, (None, None))[1]
3867 image = material_images.get(material, {}).get(b'DiffuseColor', None)
3868 # do we have alpha?
3869 if image and image.depth == 32:
3870 if use_alpha_decals:
3871 material_decals.add(material)
3873 ma_wrap = nodal_material_wrap_map[material]
3874 ma_wrap.alpha_texture.use_alpha = True
3875 ma_wrap.alpha_texture.copy_from(ma_wrap.base_color_texture)
3877 # Propagate mapping from diffuse to all other channels which have none defined.
3878 # XXX Commenting for now, I do not really understand the logic here, why should diffuse mapping
3879 # be applied to all others if not defined for them???
3880 # ~ ma_wrap = nodal_material_wrap_map[material]
3881 # ~ ma_wrap.mapping_set_from_diffuse()
3883 _(); del _
3885 perfmon.step("FBX import: Cycles z-offset workaround...")
3887 def _():
3888 # Annoying workaround for cycles having no z-offset
3889 if material_decals and use_alpha_decals:
3890 for fbx_uuid, fbx_item in fbx_table_nodes.items():
3891 fbx_obj, blen_data = fbx_item
3892 if fbx_obj.id != b'Geometry':
3893 continue
3894 if fbx_obj.props[-1] == b'Mesh':
3895 mesh = fbx_item[1]
3897 num_verts = len(mesh.vertices)
3898 if decal_offset != 0.0 and num_verts > 0:
3899 for material in mesh.materials:
3900 if material in material_decals:
3901 blen_norm_dtype = np.single
3902 vcos = MESH_ATTRIBUTE_POSITION.to_ndarray(mesh.attributes)
3903 vnorm = np.empty(num_verts * 3, dtype=blen_norm_dtype)
3904 mesh.vertex_normals.foreach_get("vector", vnorm)
3906 vcos += vnorm * decal_offset
3908 MESH_ATTRIBUTE_POSITION.foreach_set(mesh.attributes, vcos)
3909 break
3911 for obj in (obj for obj in bpy.data.objects if obj.data == mesh):
3912 obj.visible_shadow = False
3913 _(); del _
3915 perfmon.level_down()
3917 perfmon.level_down("Import finished.")
3918 return {'FINISHED'}