1 # SPDX-License-Identifier: GPL-2.0-or-later
3 # Script copyright (C) Campbell Barton
4 # Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
7 This script imports a Wavefront OBJ files to Blender.
10 Run this script from "File->Import" menu and then load the desired OBJ file.
11 Note, This loads mesh objects and materials only, nurbs and curves are not supported.
13 http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
22 from bpy_extras
.io_utils
import unpack_list
23 from bpy_extras
.image_utils
import load_image
24 from bpy_extras
.wm_utils
.progress_report
import ProgressReport
27 def line_value(line_split
):
29 Returns 1 string representing the value for this line
30 None will be returned if there's only 1 word
32 length
= len(line_split
)
40 return b
' '.join(line_split
[1:])
43 def filenames_group_by_ext(line
, ext
):
45 Splits material libraries supporting spaces, so:
46 b'foo bar.mtl baz spam.MTL' -> (b'foo bar.mtl', b'baz spam.MTL')
47 Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
49 # Note that we assume that if there are some " in that line,
50 # then all filenames are properly enclosed within those...
51 start
= line
.find(b
'"') + 1
54 end
= line
.find(b
'"', start
)
57 start
= line
.find(b
'"', end
+ 1) + 1
62 line_lower
= line
.lower()
64 while i_prev
!= -1 and i_prev
< len(line
):
65 i
= line_lower
.find(ext
, i_prev
)
68 yield line
[i_prev
:i
].strip()
72 def obj_image_load(img_data
, context_imagepath_map
, line
, DIR
, recursive
, relpath
):
74 Mainly uses comprehensiveImageLoad
75 But we try all space-separated items from current line when file is not found with last one
76 (users keep generating/using image files with spaces in a format that does not support them, sigh...)
77 Also tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
78 Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
79 Also corrects img_data (in case filenames with spaces have been split up in multiple entries, see T72148).
81 filepath_parts
= line
.split(b
' ')
83 start
= line
.find(b
'"') + 1
85 end
= line
.find(b
'"', start
)
87 filepath_parts
= (line
[start
:end
],)
90 for i
in range(-1, -len(filepath_parts
), -1):
91 imagepath
= os
.fsdecode(b
" ".join(filepath_parts
[i
:]))
92 image
= context_imagepath_map
.get(imagepath
, ...)
94 image
= load_image(imagepath
, DIR
, recursive
=recursive
, relpath
=relpath
)
95 if image
is None and "_" in imagepath
:
96 image
= load_image(imagepath
.replace("_", " "), DIR
, recursive
=recursive
, relpath
=relpath
)
98 context_imagepath_map
[imagepath
] = image
100 img_data
.append(imagepath
)
104 img_data
.append(imagepath
)
108 imagepath
= os
.fsdecode(filepath_parts
[-1])
109 image
= load_image(imagepath
, DIR
, recursive
=recursive
, place_holder
=True, relpath
=relpath
)
110 context_imagepath_map
[imagepath
] = image
115 def create_materials(filepath
, relpath
,
116 material_libs
, unique_materials
,
117 use_image_search
, float_func
):
119 Create all the used materials in this obj,
120 assign colors and images to the materials from all referenced material libs
122 from math
import sqrt
123 from bpy_extras
import node_shader_utils
125 DIR
= os
.path
.dirname(filepath
)
126 context_material_vars
= set()
128 # Don't load the same image multiple times
129 context_imagepath_map
= {}
131 nodal_material_wrap_map
= {}
133 def load_material_image(blender_material
, mat_wrap
, context_material_name
, img_data
, line
, type):
135 Set textures defined in .mtl file.
139 # Absolute path - c:\.. etc would work here
140 image
= obj_image_load(img_data
, context_imagepath_map
, line
, DIR
, use_image_search
, relpath
)
143 for token
in img_data
[:-1]:
144 if token
.startswith(b
'-') and token
[1:].isalpha():
146 map_options
[curr_token
[0]] = curr_token
[1:]
148 curr_token
.append(token
)
150 map_options
[curr_token
[0]] = curr_token
[1:]
152 map_offset
= map_options
.get(b
'-o')
153 map_scale
= map_options
.get(b
'-s')
154 if map_offset
is not None:
155 map_offset
= tuple(map(float_func
, map_offset
))
156 if map_scale
is not None:
157 map_scale
= tuple(map(float_func
, map_scale
))
159 def _generic_tex_set(nodetex
, image
, texcoords
, translation
, scale
):
160 nodetex
.image
= image
161 nodetex
.texcoords
= texcoords
162 if translation
is not None:
163 nodetex
.translation
= translation
164 if scale
is not None:
165 nodetex
.scale
= scale
167 # Adds textures for materials (rendering)
169 _generic_tex_set(mat_wrap
.base_color_texture
, image
, 'UV', map_offset
, map_scale
)
173 print("WARNING, currently unsupported ambient texture, skipped.")
176 _generic_tex_set(mat_wrap
.specular_texture
, image
, 'UV', map_offset
, map_scale
)
179 _generic_tex_set(mat_wrap
.emission_color_texture
, image
, 'UV', map_offset
, map_scale
)
180 mat_wrap
.emission_strength
= 1.0
183 bump_mult
= map_options
.get(b
'-bm')
184 bump_mult
= float(bump_mult
[0]) if (bump_mult
and len(bump_mult
[0]) > 1) else 1.0
185 mat_wrap
.normalmap_strength_set(bump_mult
)
187 _generic_tex_set(mat_wrap
.normalmap_texture
, image
, 'UV', map_offset
, map_scale
)
190 _generic_tex_set(mat_wrap
.alpha_texture
, image
, 'UV', map_offset
, map_scale
)
194 print("WARNING, currently unsupported displacement texture, skipped.")
195 # ~ mat_wrap.bump_image_set(image)
196 # ~ mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
199 map_type
= map_options
.get(b
'-type')
200 if map_type
and map_type
!= [b
'sphere']:
201 print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
202 "" % ' '.join(i
.decode() for i
in map_type
))
204 _generic_tex_set(mat_wrap
.base_color_texture
, image
, 'Reflection', map_offset
, map_scale
)
205 mat_wrap
.base_color_texture
.projection
= 'SPHERE'
208 raise Exception("invalid type %r" % type)
210 def finalize_material(context_material
, context_material_vars
, spec_colors
,
211 do_highlight
, do_reflection
, do_transparency
, do_glass
):
212 # Finalize previous mat, if any.
214 if "specular" in context_material_vars
:
215 # XXX This is highly approximated, not sure whether we can do better...
216 # TODO: Find a way to guesstimate best value from diffuse color...
217 # IDEA: Use standard deviation of both spec and diff colors (i.e. how far away they are
218 # from some grey), and apply the the proportion between those two as tint factor?
219 spec
= sum(spec_colors
) / 3.0
220 # ~ spec_var = math.sqrt(sum((c - spec) ** 2 for c in spec_color) / 3.0)
221 # ~ diff = sum(context_mat_wrap.base_color) / 3.0
222 # ~ diff_var = math.sqrt(sum((c - diff) ** 2 for c in context_mat_wrap.base_color) / 3.0)
223 # ~ tint = min(1.0, spec_var / diff_var)
224 context_mat_wrap
.specular
= spec
225 context_mat_wrap
.specular_tint
= 0.0
226 if "roughness" not in context_material_vars
:
227 context_mat_wrap
.roughness
= 0.0
229 # FIXME, how else to use this?
231 if "specular" not in context_material_vars
:
232 context_mat_wrap
.specular
= 1.0
233 if "roughness" not in context_material_vars
:
234 context_mat_wrap
.roughness
= 0.0
236 if "specular" not in context_material_vars
:
237 context_mat_wrap
.specular
= 0.0
238 if "roughness" not in context_material_vars
:
239 context_mat_wrap
.roughness
= 1.0
242 if "metallic" not in context_material_vars
:
243 context_mat_wrap
.metallic
= 1.0
245 # since we are (ab)using ambient term for metallic (which can be non-zero)
246 context_mat_wrap
.metallic
= 0.0
249 if "ior" not in context_material_vars
:
250 context_mat_wrap
.ior
= 1.0
251 if "alpha" not in context_material_vars
:
252 context_mat_wrap
.alpha
= 1.0
254 context_material
.blend_method
= 'BLEND'
257 if "ior" not in context_material_vars
:
258 context_mat_wrap
.ior
= 1.5
260 # Try to find a MTL with the same name as the OBJ if no MTLs are specified.
261 temp_mtl
= os
.path
.splitext((os
.path
.basename(filepath
)))[0] + ".mtl"
262 if os
.path
.exists(os
.path
.join(DIR
, temp_mtl
)):
263 material_libs
.add(temp_mtl
)
266 # Create new materials
267 for name
in unique_materials
: # .keys()
268 ma_name
= "Default OBJ" if name
is None else name
.decode('utf-8', "replace")
269 ma
= unique_materials
[name
] = bpy
.data
.materials
.new(ma_name
)
270 ma_wrap
= node_shader_utils
.PrincipledBSDFWrapper(ma
, is_readonly
=False)
271 nodal_material_wrap_map
[ma
] = ma_wrap
272 ma_wrap
.use_nodes
= True
274 for libname
in sorted(material_libs
):
276 mtlpath
= os
.path
.join(DIR
, libname
)
277 if not os
.path
.exists(mtlpath
):
278 print("\tMaterial not found MTL: %r" % mtlpath
)
280 # Note: with modern Principled BSDF shader, things like ambient, raytrace or fresnel are always 'ON'
281 # (i.e. automatically controlled by other parameters).
283 do_reflection
= False
284 do_transparency
= False
286 spec_colors
= [0.0, 0.0, 0.0]
288 # print('\t\tloading mtl: %e' % mtlpath)
289 context_material
= None
290 context_mat_wrap
= None
291 mtl
= open(mtlpath
, 'rb')
292 for line
in mtl
: # .readlines():
294 if not line
or line
.startswith(b
'#'):
297 line_split
= line
.split()
298 line_id
= line_split
[0].lower()
300 if line_id
== b
'newmtl':
301 # Finalize previous mat, if any.
302 finalize_material(context_material
, context_material_vars
, spec_colors
,
303 do_highlight
, do_reflection
, do_transparency
, do_glass
)
305 context_material_name
= line_value(line_split
)
306 context_material
= unique_materials
.get(context_material_name
)
307 if context_material
is not None:
308 context_mat_wrap
= nodal_material_wrap_map
[context_material
]
309 context_material_vars
.clear()
311 spec_colors
[:] = [0.0, 0.0, 0.0]
313 do_reflection
= False
314 do_transparency
= False
318 elif context_material
:
319 def _get_colors(line_split
):
320 # OBJ 'allows' one or two components values, treat single component as greyscale, and two as blue = 0.0.
323 return [float_func(line_split
[1])] * 3
325 return [float_func(line_split
[1]), float_func(line_split
[2]), 0.0]
327 return [float_func(line_split
[1]), float_func(line_split
[2]), float_func(line_split
[3])]
329 # we need to make a material to assign properties to it.
331 refl
= sum(_get_colors(line_split
)) / 3.0
332 context_mat_wrap
.metallic
= refl
333 context_material_vars
.add("metallic")
334 elif line_id
== b
'kd':
335 context_mat_wrap
.base_color
= _get_colors(line_split
)
336 elif line_id
== b
'ks':
337 spec_colors
[:] = _get_colors(line_split
)
338 context_material_vars
.add("specular")
339 elif line_id
== b
'ke':
340 # We cannot set context_material.emit right now, we need final diffuse color as well for this.
341 # XXX Unsupported currently
342 context_mat_wrap
.emission_color
= _get_colors(line_split
)
343 context_mat_wrap
.emission_strength
= 1.0
344 elif line_id
== b
'ns':
345 # XXX Totally empirical conversion, trying to adapt it
346 # (from 0.0 - 1000.0 OBJ specular exponent range to 1.0 - 0.0 Principled BSDF range)...
347 val
= max(0.0, min(1000.0, float_func(line_split
[1])))
348 context_mat_wrap
.roughness
= 1.0 - (sqrt(val
/ 1000))
349 context_material_vars
.add("roughness")
350 elif line_id
== b
'ni': # Refraction index (between 0.001 and 10).
351 context_mat_wrap
.ior
= float_func(line_split
[1])
352 context_material_vars
.add("ior")
353 elif line_id
== b
'd': # dissolve (transparency)
354 context_mat_wrap
.alpha
= float_func(line_split
[1])
355 context_material_vars
.add("alpha")
356 elif line_id
== b
'tr': # translucency
357 print("WARNING, currently unsupported 'tr' translucency option, skipped.")
358 elif line_id
== b
'tf':
359 # rgb, filter color, blender has no support for this.
360 print("WARNING, currently unsupported 'tf' filter color option, skipped.")
361 elif line_id
== b
'illum':
362 # Some MTL files incorrectly use a float for this value, see T60135.
363 illum
= any_number_as_int(line_split
[1])
365 # inline comments are from the spec, v4.2
367 # Color on and Ambient off
368 print("WARNING, Principled BSDF shader does not support illumination 0 mode "
369 "(colors with no ambient), skipped.")
371 # Color on and Ambient on
377 # Reflection on and Ray trace on
380 # Transparency: Glass on
381 # Reflection: Ray trace on
382 do_transparency
= True
386 # Reflection: Fresnel on and Ray trace on
389 # Transparency: Refraction on
390 # Reflection: Fresnel off and Ray trace on
391 do_transparency
= True
394 # Transparency: Refraction on
395 # Reflection: Fresnel on and Ray trace on
396 do_transparency
= True
399 # Reflection on and Ray trace off
402 # Transparency: Glass on
403 # Reflection: Ray trace off
404 do_transparency
= True
405 do_reflection
= False
408 # Casts shadows onto invisible surfaces
409 print("WARNING, Principled BSDF shader does not support illumination 10 mode "
410 "(cast shadows on invisible surfaces), skipped.")
413 elif line_id
== b
'map_ka':
414 img_data
= line
.split()[1:]
416 load_material_image(context_material
, context_mat_wrap
,
417 context_material_name
, img_data
, line
, 'Ka')
418 elif line_id
== b
'map_ks':
419 img_data
= line
.split()[1:]
421 load_material_image(context_material
, context_mat_wrap
,
422 context_material_name
, img_data
, line
, 'Ks')
423 elif line_id
== b
'map_kd':
424 img_data
= line
.split()[1:]
426 load_material_image(context_material
, context_mat_wrap
,
427 context_material_name
, img_data
, line
, 'Kd')
428 elif line_id
== b
'map_ke':
429 img_data
= line
.split()[1:]
431 load_material_image(context_material
, context_mat_wrap
,
432 context_material_name
, img_data
, line
, 'Ke')
433 elif line_id
in {b
'map_bump', b
'bump'}: # 'bump' is incorrect but some files use it.
434 img_data
= line
.split()[1:]
436 load_material_image(context_material
, context_mat_wrap
,
437 context_material_name
, img_data
, line
, 'Bump')
438 elif line_id
in {b
'map_d', b
'map_tr'}: # Alpha map - Dissolve
439 img_data
= line
.split()[1:]
441 load_material_image(context_material
, context_mat_wrap
,
442 context_material_name
, img_data
, line
, 'D')
444 elif line_id
in {b
'map_disp', b
'disp'}: # displacementmap
445 img_data
= line
.split()[1:]
447 load_material_image(context_material
, context_mat_wrap
,
448 context_material_name
, img_data
, line
, 'disp')
450 elif line_id
in {b
'map_refl', b
'refl'}: # reflectionmap
451 img_data
= line
.split()[1:]
453 load_material_image(context_material
, context_mat_wrap
,
454 context_material_name
, img_data
, line
, 'refl')
456 print("WARNING: %r:%r (ignored)" % (filepath
, line
))
458 # Finalize last mat, if any.
459 finalize_material(context_material
, context_material_vars
, spec_colors
,
460 do_highlight
, do_reflection
, do_transparency
, do_glass
)
464 def face_is_edge(face
):
465 """Simple check to test whether given (temp, working) data is an edge, and not a real face."""
466 face_vert_loc_indices
= face
[0]
467 face_vert_nor_indices
= face
[1]
468 return len(face_vert_nor_indices
) == 1 or len(face_vert_loc_indices
) == 2
471 def split_mesh(verts_loc
, faces
, unique_materials
, filepath
, SPLIT_OB_OR_GROUP
):
473 Takes vert_loc and faces, and separates into multiple sets of
474 (verts_loc, faces, unique_materials, dataname)
477 filename
= os
.path
.splitext((os
.path
.basename(filepath
)))[0]
479 if not SPLIT_OB_OR_GROUP
or not faces
:
480 use_verts_nor
= any(f
[1] for f
in faces
)
481 use_verts_tex
= any(f
[2] for f
in faces
)
482 # use the filename for the object name since we aren't chopping up the mesh.
483 return [(verts_loc
, faces
, unique_materials
, filename
, use_verts_nor
, use_verts_tex
)]
485 def key_to_name(key
):
486 # if the key is a tuple, join it to make a string
488 return filename
# assume its a string. make sure this is true if the splitting code is changed
489 elif isinstance(key
, bytes
):
490 return key
.decode('utf-8', 'replace')
492 return "_".join(k
.decode('utf-8', 'replace') for k
in key
)
494 # Return a key that makes the faces unique.
497 oldkey
= -1 # initialize to a value that will never match the key
500 (face_vert_loc_indices
,
501 face_vert_nor_indices
,
502 face_vert_tex_indices
,
504 _context_smooth_group
,
506 _face_invalid_blenpoly
,
508 key
= context_object_key
511 # Check the key has changed.
512 (verts_split
, faces_split
, unique_materials_split
, vert_remap
,
513 use_verts_nor
, use_verts_tex
) = face_split_dict
.setdefault(key
, ([], [], {}, {}, [], []))
516 if not face_is_edge(face
):
517 if not use_verts_nor
and face_vert_nor_indices
:
518 use_verts_nor
.append(True)
520 if not use_verts_tex
and face_vert_tex_indices
:
521 use_verts_tex
.append(True)
523 # Remap verts to new vert list and add where needed
524 for loop_idx
, vert_idx
in enumerate(face_vert_loc_indices
):
525 map_index
= vert_remap
.get(vert_idx
)
526 if map_index
is None:
527 map_index
= len(verts_split
)
528 vert_remap
[vert_idx
] = map_index
# set the new remapped index so we only add once and can reference next time.
529 verts_split
.append(verts_loc
[vert_idx
]) # add the vert to the local verts
531 face_vert_loc_indices
[loop_idx
] = map_index
# remap to the local index
533 if context_material
not in unique_materials_split
:
534 unique_materials_split
[context_material
] = unique_materials
[context_material
]
536 faces_split
.append(face
)
538 # remove one of the items and reorder
539 return [(verts_split
, faces_split
, unique_materials_split
, key_to_name(key
), bool(use_vnor
), bool(use_vtex
))
540 for key
, (verts_split
, faces_split
, unique_materials_split
, _
, use_vnor
, use_vtex
)
541 in face_split_dict
.items()]
544 def create_mesh(new_objects
,
551 unique_smooth_groups
,
556 Takes all the data gathered and generates a mesh, adding the new object to new_objects
557 deals with ngons, sharp edges and assigning materials
560 if unique_smooth_groups
:
562 smooth_group_users
= {context_smooth_group
: {} for context_smooth_group
in unique_smooth_groups
.keys()}
563 context_smooth_group_old
= -1
565 fgon_edges
= set() # Used for storing fgon keys when we need to tessellate/untessellate them (ngons with hole).
569 context_object_key
= None
571 # reverse loop through face indices
572 for f_idx
in range(len(faces
) - 1, -1, -1):
575 (face_vert_loc_indices
,
576 face_vert_nor_indices
,
577 face_vert_tex_indices
,
579 context_smooth_group
,
581 face_invalid_blenpoly
,
584 len_face_vert_loc_indices
= len(face_vert_loc_indices
)
586 if len_face_vert_loc_indices
== 1:
587 faces
.pop(f_idx
) # can't add single vert faces
589 # Face with a single item in face_vert_nor_indices is actually a polyline!
590 elif face_is_edge(face
):
592 edges
.extend((face_vert_loc_indices
[i
], face_vert_loc_indices
[i
+ 1])
593 for i
in range(len_face_vert_loc_indices
- 1))
598 if unique_smooth_groups
and context_smooth_group
:
599 # Is a part of of a smooth group and is a face
600 if context_smooth_group_old
is not context_smooth_group
:
601 edge_dict
= smooth_group_users
[context_smooth_group
]
602 context_smooth_group_old
= context_smooth_group
604 prev_vidx
= face_vert_loc_indices
[-1]
605 for vidx
in face_vert_loc_indices
:
606 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
608 edge_dict
[edge_key
] = edge_dict
.get(edge_key
, 0) + 1
610 # NGons into triangles
611 if face_invalid_blenpoly
:
612 # ignore triangles with invalid indices
613 if len(face_vert_loc_indices
) > 3:
614 from bpy_extras
.mesh_utils
import ngon_tessellate
615 ngon_face_indices
= ngon_tessellate(verts_loc
, face_vert_loc_indices
, debug_print
=bpy
.app
.debug
)
616 faces
.extend([([face_vert_loc_indices
[ngon
[0]],
617 face_vert_loc_indices
[ngon
[1]],
618 face_vert_loc_indices
[ngon
[2]],
620 [face_vert_nor_indices
[ngon
[0]],
621 face_vert_nor_indices
[ngon
[1]],
622 face_vert_nor_indices
[ngon
[2]],
623 ] if face_vert_nor_indices
else [],
624 [face_vert_tex_indices
[ngon
[0]],
625 face_vert_tex_indices
[ngon
[1]],
626 face_vert_tex_indices
[ngon
[2]],
627 ] if face_vert_tex_indices
else [],
629 context_smooth_group
,
633 for ngon
in ngon_face_indices
]
635 tot_loops
+= 3 * len(ngon_face_indices
)
637 # edges to make ngons
638 if len(ngon_face_indices
) > 1:
640 for ngon
in ngon_face_indices
:
641 prev_vidx
= face_vert_loc_indices
[ngon
[-1]]
643 vidx
= face_vert_loc_indices
[ngidx
]
644 if vidx
== prev_vidx
:
645 continue # broken OBJ... Just skip.
646 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
648 if edge_key
in edge_users
:
649 fgon_edges
.add(edge_key
)
651 edge_users
.add(edge_key
)
655 tot_loops
+= len_face_vert_loc_indices
658 if unique_smooth_groups
:
659 for edge_dict
in smooth_group_users
.values():
660 for key
, users
in edge_dict
.items():
661 if users
== 1: # This edge is on the boundary of a group
664 # map the material names to an index
665 material_mapping
= {name
: i
for i
, name
in enumerate(unique_materials
)} # enumerate over unique_materials keys()
667 materials
= [None] * len(unique_materials
)
669 for name
, index
in material_mapping
.items():
670 materials
[index
] = unique_materials
[name
]
672 me
= bpy
.data
.meshes
.new(dataname
)
674 # make sure the list isn't too big
675 for material
in materials
:
676 me
.materials
.append(material
)
678 me
.vertices
.add(len(verts_loc
))
679 me
.loops
.add(tot_loops
)
680 me
.polygons
.add(len(faces
))
682 # verts_loc is a list of (x, y, z) tuples
683 me
.vertices
.foreach_set("co", unpack_list(verts_loc
))
685 loops_vert_idx
= tuple(vidx
for (face_vert_loc_indices
, _
, _
, _
, _
, _
, _
) in faces
for vidx
in face_vert_loc_indices
)
686 faces_loop_start
= []
689 face_vert_loc_indices
= f
[0]
690 nbr_vidx
= len(face_vert_loc_indices
)
691 faces_loop_start
.append(lidx
)
694 me
.loops
.foreach_set("vertex_index", loops_vert_idx
)
695 me
.polygons
.foreach_set("loop_start", faces_loop_start
)
697 faces_ma_index
= tuple(material_mapping
[context_material
] for (_
, _
, _
, context_material
, _
, _
, _
) in faces
)
698 me
.polygons
.foreach_set("material_index", faces_ma_index
)
700 faces_use_smooth
= tuple(bool(context_smooth_group
) for (_
, _
, _
, _
, context_smooth_group
, _
, _
) in faces
)
701 me
.polygons
.foreach_set("use_smooth", faces_use_smooth
)
703 if verts_nor
and me
.loops
:
704 # Note: we store 'temp' normals in loops, since validate() may alter final mesh,
705 # we can only set custom lnors *after* calling it.
706 me
.create_normals_split()
707 loops_nor
= tuple(no
for (_
, face_vert_nor_indices
, _
, _
, _
, _
, _
) in faces
708 for face_noidx
in face_vert_nor_indices
709 for no
in verts_nor
[face_noidx
])
710 me
.loops
.foreach_set("normal", loops_nor
)
712 if verts_tex
and me
.polygons
:
713 # Some files Do not explicitly write the 'v' value when it's 0.0, see T68249...
714 verts_tex
= [uv
if len(uv
) == 2 else uv
+ [0.0] for uv
in verts_tex
]
715 me
.uv_layers
.new(do_init
=False)
716 loops_uv
= tuple(uv
for (_
, _
, face_vert_tex_indices
, _
, _
, _
, _
) in faces
717 for face_uvidx
in face_vert_tex_indices
718 for uv
in verts_tex
[face_uvidx
])
719 me
.uv_layers
[0].data
.foreach_set("uv", loops_uv
)
721 use_edges
= use_edges
and bool(edges
)
723 me
.edges
.add(len(edges
))
724 # edges should be a list of (a, b) tuples
725 me
.edges
.foreach_set("vertices", unpack_list(edges
))
727 me
.validate(clean_customdata
=False) # *Very* important to not remove lnors here!
728 me
.update(calc_edges
=use_edges
, calc_edges_loose
=use_edges
)
730 # Un-tessellate as much as possible, in case we had to triangulate some ngons...
737 edges
= [get((verts
[vidx1
], verts
[vidx2
])) for vidx1
, vidx2
in fgon_edges
]
739 bmesh
.ops
.dissolve_edges(bm
, edges
=edges
, use_verts
=False)
741 # Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
743 traceback
.print_exc()
748 # XXX If validate changes the geometry, this is likely to be broken...
749 if unique_smooth_groups
and sharp_edges
:
751 if e
.key
in sharp_edges
:
752 e
.use_edge_sharp
= True
755 clnors
= array
.array('f', [0.0] * (len(me
.loops
) * 3))
756 me
.loops
.foreach_get("normal", clnors
)
758 if not unique_smooth_groups
:
759 me
.polygons
.foreach_set("use_smooth", [True] * len(me
.polygons
))
761 me
.normals_split_custom_set(tuple(zip(*(iter(clnors
),) * 3)))
762 me
.use_auto_smooth
= True
764 ob
= bpy
.data
.objects
.new(me
.name
, me
)
765 new_objects
.append(ob
)
767 # Create the vertex groups. No need to have the flag passed here since we test for the
768 # content of the vertex_groups. If the user selects to NOT have vertex groups saved then
769 # the following test will never run
770 for group_name
, group_indices
in vertex_groups
.items():
771 group
= ob
.vertex_groups
.new(name
=group_name
.decode('utf-8', "replace"))
772 group
.add(group_indices
, 1.0, 'REPLACE')
775 def create_nurbs(context_nurbs
, vert_loc
, new_objects
):
777 Add nurbs object to blender, only support one type at the moment
779 deg
= context_nurbs
.get(b
'deg', (3,))
780 curv_range
= context_nurbs
.get(b
'curv_range')
781 curv_idx
= context_nurbs
.get(b
'curv_idx', [])
782 parm_u
= context_nurbs
.get(b
'parm_u', [])
783 parm_v
= context_nurbs
.get(b
'parm_v', [])
784 name
= context_nurbs
.get(b
'name', b
'ObjNurb')
785 cstype
= context_nurbs
.get(b
'cstype')
788 print('\tWarning, cstype not found')
790 if cstype
!= b
'bspline':
791 print('\tWarning, cstype is not supported (only bspline)')
794 print('\tWarning, curv argument empty or not set')
796 if len(deg
) > 1 or parm_v
:
797 print('\tWarning, surfaces not supported')
800 cu
= bpy
.data
.curves
.new(name
.decode('utf-8', "replace"), 'CURVE')
803 nu
= cu
.splines
.new('NURBS')
804 nu
.points
.add(len(curv_idx
) - 1) # a point is added to start with
805 nu
.points
.foreach_set("co", [co_axis
for vt_idx
in curv_idx
for co_axis
in (vert_loc
[vt_idx
] + [1.0])])
807 nu
.order_u
= deg
[0] + 1
809 # get for endpoint flag from the weighting
810 if curv_range
and len(parm_u
) > deg
[0] + 1:
812 for i
in range(deg
[0] + 1):
814 if abs(parm_u
[i
] - curv_range
[0]) > 0.0001:
818 if abs(parm_u
[-(i
+ 1)] - curv_range
[1]) > 0.0001:
826 nu
.use_endpoint_u
= True
831 if len(parm_u) > deg[0]+1:
832 for i in xrange(deg[0]+1):
833 #print curv_idx[i], curv_idx[-(i+1)]
835 if curv_idx[i]==curv_idx[-(i+1)]:
840 nu.use_cyclic_u = True
843 ob
= bpy
.data
.objects
.new(name
.decode('utf-8', "replace"), cu
)
845 new_objects
.append(ob
)
848 def strip_slash(line_split
):
849 if line_split
[-1][-1] == 92: # '\' char
850 if len(line_split
[-1]) == 1:
851 line_split
.pop() # remove the \ item
853 line_split
[-1] = line_split
[-1][:-1] # remove the \ from the end last number
858 def get_float_func(filepath
):
860 find the float function for this obj file
861 - whether to replace commas or not
863 file = open(filepath
, 'rb')
864 for line
in file: # .readlines():
866 if line
.startswith(b
'v'): # vn vt v
869 return lambda f
: float(f
.replace(b
',', b
'.'))
875 # in case all vert values were ints
879 def any_number_as_int(svalue
):
881 svalue
= svalue
.replace(b
',', b
'.')
882 return int(float(svalue
))
888 global_clamp_size
=0.0,
889 use_smooth_groups
=True,
891 use_split_objects
=True,
892 use_split_groups
=False,
893 use_image_search
=True,
894 use_groups_as_vgroups
=False,
899 Called by the user interface or another script.
900 load_obj(path) - should give acceptable results.
901 This function passes the file and sends the data off
902 to be split into objects and then converted into mesh objects
904 def unique_name(existing_names
, name_orig
):
906 if name_orig
is None:
907 name_orig
= b
"ObjObject"
909 while name
in existing_names
:
910 name
= b
"%s.%03d" % (name_orig
, i
)
912 existing_names
.add(name
)
915 def handle_vec(line_start
, context_multi_line
, line_split
, tag
, data
, vec
, vec_len
):
916 ret_context_multi_line
= tag
if strip_slash(line_split
) else b
''
917 if line_start
== tag
:
918 vec
[:] = [float_func(v
) for v
in line_split
[1:]]
919 elif context_multi_line
== tag
:
920 vec
+= [float_func(v
) for v
in line_split
]
921 if not ret_context_multi_line
:
922 data
.append(tuple(vec
[:vec_len
]))
923 return ret_context_multi_line
925 def create_face(context_material
, context_smooth_group
, context_object_key
):
926 face_vert_loc_indices
= []
927 face_vert_nor_indices
= []
928 face_vert_tex_indices
= []
930 face_vert_loc_indices
,
931 face_vert_nor_indices
,
932 face_vert_tex_indices
,
934 context_smooth_group
,
936 [], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
939 with
ProgressReport(context
.window_manager
) as progress
:
940 progress
.enter_substeps(1, "Importing OBJ %r..." % filepath
)
942 if global_matrix
is None:
943 global_matrix
= mathutils
.Matrix()
945 if use_split_objects
or use_split_groups
:
946 use_groups_as_vgroups
= False
951 faces
= [] # tuples of the faces
952 material_libs
= set() # filenames to material libs this OBJ uses
953 vertex_groups
= {} # when use_groups_as_vgroups is true
955 # Get the string to float conversion func for this file- is 'float' for almost all files.
956 float_func
= get_float_func(filepath
)
959 context_material
= None
960 context_smooth_group
= None
961 context_object_key
= None
962 context_object_obpart
= None
963 context_vgroup
= None
965 objects_names
= set()
970 context_parm
= b
'' # used by nurbs too but could be used elsewhere
972 # Until we can use sets
973 use_default_material
= False
974 unique_materials
= {}
975 unique_smooth_groups
= {}
976 # unique_obects= {} - no use for this variable since the objects are stored in the face.
978 # when there are faces that end with \
979 # it means they are multiline-
980 # since we use xreadline we can't skip to the next line
981 # so we need to know whether
982 context_multi_line
= b
''
984 # Per-face handling data.
985 face_vert_loc_indices
= None
986 face_vert_nor_indices
= None
987 face_vert_tex_indices
= None
988 verts_loc_len
= verts_nor_len
= verts_tex_len
= 0
989 face_items_usage
= set()
990 face_invalid_blenpoly
= None
995 quick_vert_failures
= 0
996 skip_quick_vert
= False
998 progress
.enter_substeps(3, "Parsing OBJ file...")
999 with
open(filepath
, 'rb') as f
:
1001 line_split
= line
.split()
1006 line_start
= line_split
[0] # we compare with this a _lot_
1008 if len(line_split
) == 1 and not context_multi_line
and line_start
!= b
'end':
1009 print("WARNING, skipping malformatted line: %s" % line
.decode('UTF-8', 'replace').rstrip())
1012 # Handling vertex data are pretty similar, factorize that.
1013 # Also, most OBJ files store all those on a single line, so try fast parsing for that first,
1014 # and only fallback to full multi-line parsing when needed, this gives significant speed-up
1015 # (~40% on affected code).
1016 if line_start
== b
'v':
1017 vdata
, vdata_len
, do_quick_vert
= verts_loc
, 3, not skip_quick_vert
1018 elif line_start
== b
'vn':
1019 vdata
, vdata_len
, do_quick_vert
= verts_nor
, 3, not skip_quick_vert
1020 elif line_start
== b
'vt':
1021 vdata
, vdata_len
, do_quick_vert
= verts_tex
, 2, not skip_quick_vert
1022 elif context_multi_line
== b
'v':
1023 vdata
, vdata_len
, do_quick_vert
= verts_loc
, 3, False
1024 elif context_multi_line
== b
'vn':
1025 vdata
, vdata_len
, do_quick_vert
= verts_nor
, 3, False
1026 elif context_multi_line
== b
'vt':
1027 vdata
, vdata_len
, do_quick_vert
= verts_tex
, 2, False
1034 vdata
.append(list(map(float_func
, line_split
[1:vdata_len
+ 1])))
1036 do_quick_vert
= False
1037 # In case we get too many failures on quick parsing, force fallback to full multi-line one.
1038 # Exception handling can become costly...
1039 quick_vert_failures
+= 1
1040 if quick_vert_failures
> 10000:
1041 skip_quick_vert
= True
1042 if not do_quick_vert
:
1043 context_multi_line
= handle_vec(line_start
, context_multi_line
, line_split
,
1044 context_multi_line
or line_start
,
1045 vdata
, vec
, vdata_len
)
1047 elif line_start
== b
'f' or context_multi_line
== b
'f':
1048 if not context_multi_line
:
1049 line_split
= line_split
[1:]
1050 # Instantiate a face
1051 face
= create_face(context_material
, context_smooth_group
, context_object_key
)
1052 (face_vert_loc_indices
, face_vert_nor_indices
, face_vert_tex_indices
,
1053 _1
, _2
, _3
, face_invalid_blenpoly
) = face
1055 face_items_usage
.clear()
1056 verts_loc_len
= len(verts_loc
)
1057 verts_nor_len
= len(verts_nor
)
1058 verts_tex_len
= len(verts_tex
)
1059 if context_material
is None:
1060 use_default_material
= True
1061 # Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
1063 context_multi_line
= b
'f' if strip_slash(line_split
) else b
''
1065 for v
in line_split
:
1066 obj_vert
= v
.split(b
'/')
1067 idx
= int(obj_vert
[0]) # Note that we assume here we cannot get OBJ invalid 0 index...
1068 vert_loc_index
= (idx
+ verts_loc_len
) if (idx
< 1) else idx
- 1
1069 # Add the vertex to the current group
1070 # *warning*, this wont work for files that have groups defined around verts
1071 if use_groups_as_vgroups
and context_vgroup
:
1072 vertex_groups
[context_vgroup
].append(vert_loc_index
)
1073 # This a first round to quick-detect ngons that *may* use a same edge more than once.
1074 # Potential candidate will be re-checked once we have done parsing the whole face.
1075 if not face_invalid_blenpoly
:
1076 # If we use more than once a same vertex, invalid ngon is suspected.
1077 if vert_loc_index
in face_items_usage
:
1078 face_invalid_blenpoly
.append(True)
1080 face_items_usage
.add(vert_loc_index
)
1081 face_vert_loc_indices
.append(vert_loc_index
)
1083 # formatting for faces with normals and textures is
1084 # loc_index/tex_index/nor_index
1085 if len(obj_vert
) > 1 and obj_vert
[1] and obj_vert
[1] != b
'0':
1086 idx
= int(obj_vert
[1])
1087 face_vert_tex_indices
.append((idx
+ verts_tex_len
) if (idx
< 1) else idx
- 1)
1089 face_vert_tex_indices
.append(0)
1091 if len(obj_vert
) > 2 and obj_vert
[2] and obj_vert
[2] != b
'0':
1092 idx
= int(obj_vert
[2])
1093 face_vert_nor_indices
.append((idx
+ verts_nor_len
) if (idx
< 1) else idx
- 1)
1095 face_vert_nor_indices
.append(0)
1097 if not context_multi_line
:
1098 # Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
1099 if face_invalid_blenpoly
:
1100 face_invalid_blenpoly
.clear()
1101 face_items_usage
.clear()
1102 prev_vidx
= face_vert_loc_indices
[-1]
1103 for vidx
in face_vert_loc_indices
:
1104 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
1105 if edge_key
in face_items_usage
:
1106 face_invalid_blenpoly
.append(True)
1108 face_items_usage
.add(edge_key
)
1111 elif use_edges
and (line_start
== b
'l' or context_multi_line
== b
'l'):
1112 # very similar to the face load function above with some parts removed
1113 if not context_multi_line
:
1114 line_split
= line_split
[1:]
1115 # Instantiate a face
1116 face
= create_face(context_material
, context_smooth_group
, context_object_key
)
1117 face_vert_loc_indices
= face
[0]
1118 # XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
1119 # as a polyline, and not a regular face...
1122 if context_material
is None:
1123 use_default_material
= True
1124 # Else, use face_vert_loc_indices previously defined and used the obj_face
1126 context_multi_line
= b
'l' if strip_slash(line_split
) else b
''
1128 for v
in line_split
:
1129 obj_vert
= v
.split(b
'/')
1130 idx
= int(obj_vert
[0]) - 1
1131 face_vert_loc_indices
.append((idx
+ len(verts_loc
) + 1) if (idx
< 0) else idx
)
1133 elif line_start
== b
's':
1134 if use_smooth_groups
:
1135 context_smooth_group
= line_value(line_split
)
1136 if context_smooth_group
== b
'off':
1137 context_smooth_group
= None
1138 elif context_smooth_group
: # is not None
1139 unique_smooth_groups
[context_smooth_group
] = None
1141 elif line_start
== b
'o':
1142 if use_split_objects
:
1143 context_object_key
= unique_name(objects_names
, line_value(line_split
))
1144 context_object_obpart
= context_object_key
1145 # unique_objects[context_object_key]= None
1147 elif line_start
== b
'g':
1148 if use_split_groups
:
1149 grppart
= line_value(line_split
)
1150 context_object_key
= (context_object_obpart
, grppart
) if context_object_obpart
else grppart
1151 # print 'context_object_key', context_object_key
1152 # unique_objects[context_object_key]= None
1153 elif use_groups_as_vgroups
:
1154 context_vgroup
= line_value(line
.split())
1155 if context_vgroup
and context_vgroup
!= b
'(null)':
1156 vertex_groups
.setdefault(context_vgroup
, [])
1158 context_vgroup
= None # dont assign a vgroup
1160 elif line_start
== b
'usemtl':
1161 context_material
= line_value(line
.split())
1162 unique_materials
[context_material
] = None
1163 elif line_start
== b
'mtllib': # usemap or usemat
1164 # can have multiple mtllib filenames per line, mtllib can appear more than once,
1165 # so make sure only occurrence of material exists
1166 material_libs |
= {os
.fsdecode(f
) for f
in filenames_group_by_ext(line
.lstrip()[7:].strip(), b
'.mtl')
1170 elif line_start
== b
'cstype':
1171 context_nurbs
[b
'cstype'] = line_value(line
.split()) # 'rat bspline' / 'bspline'
1172 elif line_start
== b
'curv' or context_multi_line
== b
'curv':
1173 curv_idx
= context_nurbs
[b
'curv_idx'] = context_nurbs
.get(b
'curv_idx', []) # in case were multiline
1175 if not context_multi_line
:
1176 context_nurbs
[b
'curv_range'] = float_func(line_split
[1]), float_func(line_split
[2])
1177 line_split
[0:3] = [] # remove first 3 items
1179 if strip_slash(line_split
):
1180 context_multi_line
= b
'curv'
1182 context_multi_line
= b
''
1184 for i
in line_split
:
1185 vert_loc_index
= int(i
) - 1
1187 if vert_loc_index
< 0:
1188 vert_loc_index
= len(verts_loc
) + vert_loc_index
+ 1
1190 curv_idx
.append(vert_loc_index
)
1192 elif line_start
== b
'parm' or context_multi_line
== b
'parm':
1193 if context_multi_line
:
1194 context_multi_line
= b
''
1196 context_parm
= line_split
[1]
1197 line_split
[0:2] = [] # remove first 2
1199 if strip_slash(line_split
):
1200 context_multi_line
= b
'parm'
1202 context_multi_line
= b
''
1204 if context_parm
.lower() == b
'u':
1205 context_nurbs
.setdefault(b
'parm_u', []).extend([float_func(f
) for f
in line_split
])
1206 elif context_parm
.lower() == b
'v': # surfaces not supported yet
1207 context_nurbs
.setdefault(b
'parm_v', []).extend([float_func(f
) for f
in line_split
])
1208 # else: # may want to support other parm's ?
1210 elif line_start
== b
'deg':
1211 context_nurbs
[b
'deg'] = [int(i
) for i
in line
.split()[1:]]
1212 elif line_start
== b
'end':
1213 # Add the nurbs curve
1214 if context_object_key
:
1215 context_nurbs
[b
'name'] = context_object_key
1216 nurbs
.append(context_nurbs
)
1220 ''' # How to use usemap? deprecated?
1221 elif line_start == b'usema': # usemap or usemat
1222 context_image= line_value(line_split)
1225 progress
.step("Done, loading materials and images...")
1227 if use_default_material
:
1228 unique_materials
[None] = None
1229 create_materials(filepath
, relpath
, material_libs
, unique_materials
,
1230 use_image_search
, float_func
)
1232 progress
.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
1233 (len(verts_loc
), len(faces
), len(unique_materials
), len(unique_smooth_groups
)))
1236 if bpy
.ops
.object.select_all
.poll():
1237 bpy
.ops
.object.select_all(action
='DESELECT')
1239 new_objects
= [] # put new objects here
1241 # Split the mesh by objects/materials, may
1242 SPLIT_OB_OR_GROUP
= bool(use_split_objects
or use_split_groups
)
1244 for data
in split_mesh(verts_loc
, faces
, unique_materials
, filepath
, SPLIT_OB_OR_GROUP
):
1245 verts_loc_split
, faces_split
, unique_materials_split
, dataname
, use_vnor
, use_vtex
= data
1246 # Create meshes from the data, warning 'vertex_groups' wont support splitting
1247 #~ print(dataname, use_vnor, use_vtex)
1248 create_mesh(new_objects
,
1251 verts_nor
if use_vnor
else [],
1252 verts_tex
if use_vtex
else [],
1254 unique_materials_split
,
1255 unique_smooth_groups
,
1261 for context_nurbs
in nurbs
:
1262 create_nurbs(context_nurbs
, verts_loc
, new_objects
)
1264 view_layer
= context
.view_layer
1265 collection
= view_layer
.active_layer_collection
.collection
1268 for obj
in new_objects
:
1269 collection
.objects
.link(obj
)
1270 obj
.select_set(True)
1272 # we could apply this anywhere before scaling.
1273 obj
.matrix_world
= global_matrix
1277 axis_min
= [1000000000] * 3
1278 axis_max
= [-1000000000] * 3
1280 if global_clamp_size
:
1281 # Get all object bounds
1282 for ob
in new_objects
:
1283 for v
in ob
.bound_box
:
1284 for axis
, value
in enumerate(v
):
1285 if axis_min
[axis
] > value
:
1286 axis_min
[axis
] = value
1287 if axis_max
[axis
] < value
:
1288 axis_max
[axis
] = value
1291 max_axis
= max(axis_max
[0] - axis_min
[0], axis_max
[1] - axis_min
[1], axis_max
[2] - axis_min
[2])
1294 while global_clamp_size
< max_axis
* scale
:
1295 scale
= scale
/ 10.0
1297 for obj
in new_objects
:
1298 obj
.scale
= scale
, scale
, scale
1300 progress
.leave_substeps("Done.")
1301 progress
.leave_substeps("Finished importing: %r" % filepath
)