1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
21 # Script copyright (C) Campbell Barton
22 # Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
25 This script imports a Wavefront OBJ files to Blender.
28 Run this script from "File->Import" menu and then load the desired OBJ file.
29 Note, This loads mesh objects and materials only, nurbs and curves are not supported.
31 http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
40 from bpy_extras
.io_utils
import unpack_list
41 from bpy_extras
.image_utils
import load_image
42 from bpy_extras
.wm_utils
.progress_report
import ProgressReport
45 def line_value(line_split
):
47 Returns 1 string representing the value for this line
48 None will be returned if there's only 1 word
50 length
= len(line_split
)
58 return b
' '.join(line_split
[1:])
61 def obj_image_load(context_imagepath_map
, line
, DIR
, recursive
, relpath
):
63 Mainly uses comprehensiveImageLoad
64 But we try all space-separated items from current line when file is not found with last one
65 (users keep generating/using image files with spaces in a format that does not support them, sigh...)
66 Also tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
68 filepath_parts
= line
.split(b
' ')
70 for i
in range(-1, -len(filepath_parts
), -1):
71 imagepath
= os
.fsdecode(b
" ".join(filepath_parts
[i
:]))
72 image
= context_imagepath_map
.get(imagepath
, ...)
74 image
= load_image(imagepath
, DIR
, recursive
=recursive
, relpath
=relpath
)
75 if image
is None and "_" in imagepath
:
76 image
= load_image(imagepath
.replace("_", " "), DIR
, recursive
=recursive
, relpath
=relpath
)
78 context_imagepath_map
[imagepath
] = image
82 imagepath
= os
.fsdecode(filepath_parts
[-1])
83 image
= load_image(imagepath
, DIR
, recursive
=recursive
, place_holder
=True, relpath
=relpath
)
84 context_imagepath_map
[imagepath
] = image
89 def create_materials(filepath
, relpath
,
90 material_libs
, unique_materials
,
91 use_image_search
, float_func
):
93 Create all the used materials in this obj,
94 assign colors and images to the materials from all referenced material libs
97 from bpy_extras
import node_shader_utils
99 DIR
= os
.path
.dirname(filepath
)
100 context_material_vars
= set()
102 # Don't load the same image multiple times
103 context_imagepath_map
= {}
105 nodal_material_wrap_map
= {}
107 def load_material_image(blender_material
, mat_wrap
, context_material_name
, img_data
, line
, type):
109 Set textures defined in .mtl file.
114 for token
in img_data
[:-1]:
115 if token
.startswith(b
'-') and token
[1:].isalpha():
117 map_options
[curr_token
[0]] = curr_token
[1:]
119 curr_token
.append(token
)
121 map_options
[curr_token
[0]] = curr_token
[1:]
123 # Absolute path - c:\.. etc would work here
124 image
= obj_image_load(context_imagepath_map
, line
, DIR
, use_image_search
, relpath
)
126 map_offset
= map_options
.get(b
'-o')
127 map_scale
= map_options
.get(b
'-s')
128 if map_offset
is not None:
129 map_offset
= tuple(map(float_func
, map_offset
))
130 if map_scale
is not None:
131 map_scale
= tuple(map(float_func
, map_scale
))
133 def _generic_tex_set(nodetex
, image
, texcoords
, translation
, scale
):
134 nodetex
.image
= image
135 nodetex
.texcoords
= texcoords
136 if translation
is not None:
137 nodetex
.translation
= translation
138 if scale
is not None:
139 nodetex
.scale
= scale
141 # Adds textures for materials (rendering)
143 _generic_tex_set(mat_wrap
.base_color_texture
, image
, 'UV', map_offset
, map_scale
)
147 print("WARNING, currently unsupported ambient texture, skipped.")
150 _generic_tex_set(mat_wrap
.specular_texture
, image
, 'UV', map_offset
, map_scale
)
154 print("WARNING, currently unsupported emit texture, skipped.")
157 bump_mult
= map_options
.get(b
'-bm')
158 bump_mult
= float(bump_mult
[0]) if (bump_mult
and len(bump_mult
[0]) > 1) else 1.0
159 mat_wrap
.normalmap_strength_set(bump_mult
)
161 _generic_tex_set(mat_wrap
.normalmap_texture
, image
, 'UV', map_offset
, map_scale
)
164 _generic_tex_set(mat_wrap
.transmission_texture
, image
, 'UV', map_offset
, map_scale
)
168 print("WARNING, currently unsupported displacement texture, skipped.")
169 # ~ mat_wrap.bump_image_set(image)
170 # ~ mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
173 map_type
= map_options
.get(b
'-type')
174 if map_type
and map_type
!= [b
'sphere']:
175 print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
176 "" % ' '.join(i
.decode() for i
in map_type
))
178 _generic_tex_set(mat_wrap
.base_color_texture
, image
, 'Reflection', map_offset
, map_scale
)
179 mat_wrap
.base_color_texture
.projection
= 'SPHERE'
182 raise Exception("invalid type %r" % type)
184 def finalize_material(context_material
, context_material_vars
, spec_colors
, emit_colors
,
185 do_highlight
, do_reflection
, do_transparency
, do_glass
):
186 # Finalize previous mat, if any.
188 if "specular" in context_material_vars
:
189 # XXX This is highly approximated, not sure whether we can do better...
190 # TODO: Find a way to guesstimate best value from diffuse color...
191 # IDEA: Use standard deviation of both spec and diff colors (i.e. how far away they are
192 # from some grey), and apply the the proportion between those two as tint factor?
193 spec
= sum(spec_colors
) / 3.0
194 # ~ spec_var = math.sqrt(sum((c - spec) ** 2 for c in spec_color) / 3.0)
195 # ~ diff = sum(context_mat_wrap.base_color) / 3.0
196 # ~ diff_var = math.sqrt(sum((c - diff) ** 2 for c in context_mat_wrap.base_color) / 3.0)
197 # ~ tint = min(1.0, spec_var / diff_var)
198 context_mat_wrap
.specular
= spec
199 context_mat_wrap
.specular_tint
= 0.0
200 if "roughness" not in context_material_vars
:
201 context_mat_wrap
.roughness
= 0.0
204 emit_value
= sum(emit_colors
) / 3.0
205 if emit_value
> 1e-6:
206 print("WARNING, emit value unsupported by Principled BSDF shader, skipped.")
207 # We have to adapt it to diffuse color too...
208 emit_value
/= sum(tuple(context_material
.diffuse_color
)[:3]) / 3.0
209 # ~ context_material.emit = emit_value
211 # FIXME, how else to use this?
213 if "specular" not in context_material_vars
:
214 context_mat_wrap
.specular
= 1.0
215 if "roughness" not in context_material_vars
:
216 context_mat_wrap
.roughness
= 0.0
218 if "specular" not in context_material_vars
:
219 context_mat_wrap
.specular
= 0.0
220 if "roughness" not in context_material_vars
:
221 context_mat_wrap
.roughness
= 1.0
224 if "metallic" not in context_material_vars
:
225 context_mat_wrap
.metallic
= 1.0
227 # since we are (ab)using ambient term for metallic (which can be non-zero)
228 context_mat_wrap
.metallic
= 0.0
231 if "ior" not in context_material_vars
:
232 context_mat_wrap
.ior
= 1.0
233 if "transmission" not in context_material_vars
:
234 context_mat_wrap
.transmission
= 1.0
236 context_material
.blend_method
= 'BLEND'
239 if "ior" not in context_material_vars
:
240 context_mat_wrap
.ior
= 1.5
242 # Try to find a MTL with the same name as the OBJ if no MTLs are specified.
243 temp_mtl
= os
.path
.splitext((os
.path
.basename(filepath
)))[0] + ".mtl"
244 if os
.path
.exists(os
.path
.join(DIR
, temp_mtl
)):
245 material_libs
.add(temp_mtl
)
248 # Create new materials
249 for name
in unique_materials
: # .keys()
250 ma_name
= "Default OBJ" if name
is None else name
.decode('utf-8', "replace")
251 ma
= unique_materials
[name
] = bpy
.data
.materials
.new(ma_name
)
252 ma_wrap
= node_shader_utils
.PrincipledBSDFWrapper(ma
, is_readonly
=False)
253 nodal_material_wrap_map
[ma
] = ma_wrap
254 ma_wrap
.use_nodes
= True
256 for libname
in sorted(material_libs
):
258 mtlpath
= os
.path
.join(DIR
, libname
)
259 if not os
.path
.exists(mtlpath
):
260 print("\tMaterial not found MTL: %r" % mtlpath
)
262 # Note: with modern Principled BSDF shader, things like ambient, raytrace or fresnel are always 'ON'
263 # (i.e. automatically controlled by other parameters).
265 do_reflection
= False
266 do_transparency
= False
268 spec_colors
= [0.0, 0.0, 0.0]
269 emit_colors
= [0.0, 0.0, 0.0]
271 # print('\t\tloading mtl: %e' % mtlpath)
272 context_material
= None
273 context_mat_wrap
= None
274 mtl
= open(mtlpath
, 'rb')
275 for line
in mtl
: # .readlines():
277 if not line
or line
.startswith(b
'#'):
280 line_split
= line
.split()
281 line_id
= line_split
[0].lower()
283 if line_id
== b
'newmtl':
284 # Finalize previous mat, if any.
285 finalize_material(context_material
, context_material_vars
, spec_colors
, emit_colors
,
286 do_highlight
, do_reflection
, do_transparency
, do_glass
)
288 context_material_name
= line_value(line_split
)
289 context_material
= unique_materials
.get(context_material_name
)
290 if context_material
is not None:
291 context_mat_wrap
= nodal_material_wrap_map
[context_material
]
292 context_material_vars
.clear()
294 spec_colors
= [0.0, 0.0, 0.0]
295 emit_colors
[:] = [0.0, 0.0, 0.0]
297 do_reflection
= False
298 do_transparency
= False
302 elif context_material
:
303 # we need to make a material to assign properties to it.
305 refl
= (float_func(line_split
[1]) + float_func(line_split
[2]) + float_func(line_split
[3])) / 3.0
306 context_mat_wrap
.metallic
= refl
307 context_material_vars
.add("metallic")
308 elif line_id
== b
'kd':
309 col
= (float_func(line_split
[1]), float_func(line_split
[2]), float_func(line_split
[3]))
310 context_mat_wrap
.base_color
= col
311 elif line_id
== b
'ks':
313 float_func(line_split
[1]), float_func(line_split
[2]), float_func(line_split
[3])]
314 context_material_vars
.add("specular")
315 elif line_id
== b
'ke':
316 # We cannot set context_material.emit right now, we need final diffuse color as well for this.
317 # XXX Unsupported currently
319 float_func(line_split
[1]), float_func(line_split
[2]), float_func(line_split
[3])]
320 elif line_id
== b
'ns':
321 # XXX Totally empirical conversion, trying to adapt it
322 # (from 0.0 - 900.0 OBJ specular exponent range to 1.0 - 0.0 Principled BSDF range)...
323 context_mat_wrap
.roughness
= 1.0 - (sqrt(float_func(line_split
[1])) / 30)
324 context_material_vars
.add("roughness")
325 elif line_id
== b
'ni': # Refraction index (between 0.001 and 10).
326 context_mat_wrap
.ior
= float_func(line_split
[1])
327 context_material_vars
.add("ior")
328 elif line_id
== b
'd': # dissolve (transparency)
329 context_mat_wrap
.transmission
= 1.0 - float_func(line_split
[1])
330 context_material_vars
.add("transmission")
331 elif line_id
== b
'tr': # translucency
332 print("WARNING, currently unsupported 'tr' translucency option, skipped.")
333 elif line_id
== b
'tf':
334 # rgb, filter color, blender has no support for this.
335 print("WARNING, currently unsupported 'tf' filter color option, skipped.")
336 elif line_id
== b
'illum':
337 # Some MTL files incorrectly use a float for this value, see T60135.
338 illum
= any_number_as_int(line_split
[1])
340 # inline comments are from the spec, v4.2
342 # Color on and Ambient off
343 print("WARNING, Principled BSDF shader does not support illumination 0 mode "
344 "(colors with no ambient), skipped.")
346 # Color on and Ambient on
352 # Reflection on and Ray trace on
355 # Transparency: Glass on
356 # Reflection: Ray trace on
357 do_transparency
= True
361 # Reflection: Fresnel on and Ray trace on
364 # Transparency: Refraction on
365 # Reflection: Fresnel off and Ray trace on
366 do_transparency
= True
369 # Transparency: Refraction on
370 # Reflection: Fresnel on and Ray trace on
371 do_transparency
= True
374 # Reflection on and Ray trace off
377 # Transparency: Glass on
378 # Reflection: Ray trace off
379 do_transparency
= True
380 do_reflection
= False
383 # Casts shadows onto invisible surfaces
384 print("WARNING, Principled BSDF shader does not support illumination 10 mode "
385 "(cast shadows on invisible surfaces), skipped.")
388 elif line_id
== b
'map_ka':
389 img_data
= line
.split()[1:]
391 load_material_image(context_material
, context_mat_wrap
,
392 context_material_name
, img_data
, line
, 'Ka')
393 elif line_id
== b
'map_ks':
394 img_data
= line
.split()[1:]
396 load_material_image(context_material
, context_mat_wrap
,
397 context_material_name
, img_data
, line
, 'Ks')
398 elif line_id
== b
'map_kd':
399 img_data
= line
.split()[1:]
401 load_material_image(context_material
, context_mat_wrap
,
402 context_material_name
, img_data
, line
, 'Kd')
403 elif line_id
== b
'map_ke':
404 img_data
= line
.split()[1:]
406 load_material_image(context_material
, context_mat_wrap
,
407 context_material_name
, img_data
, line
, 'Ke')
408 elif line_id
in {b
'map_bump', b
'bump'}: # 'bump' is incorrect but some files use it.
409 img_data
= line
.split()[1:]
411 load_material_image(context_material
, context_mat_wrap
,
412 context_material_name
, img_data
, line
, 'Bump')
413 elif line_id
in {b
'map_d', b
'map_tr'}: # Alpha map - Dissolve
414 img_data
= line
.split()[1:]
416 load_material_image(context_material
, context_mat_wrap
,
417 context_material_name
, img_data
, line
, 'D')
419 elif line_id
in {b
'map_disp', b
'disp'}: # displacementmap
420 img_data
= line
.split()[1:]
422 load_material_image(context_material
, context_mat_wrap
,
423 context_material_name
, img_data
, line
, 'disp')
425 elif line_id
in {b
'map_refl', b
'refl'}: # reflectionmap
426 img_data
= line
.split()[1:]
428 load_material_image(context_material
, context_mat_wrap
,
429 context_material_name
, img_data
, line
, 'refl')
431 print("WARNING: %r:%r (ignored)" % (filepath
, line
))
433 # Finalize last mat, if any.
434 finalize_material(context_material
, context_material_vars
, spec_colors
, emit_colors
,
435 do_highlight
, do_reflection
, do_transparency
, do_glass
)
439 def split_mesh(verts_loc
, faces
, unique_materials
, filepath
, SPLIT_OB_OR_GROUP
):
441 Takes vert_loc and faces, and separates into multiple sets of
442 (verts_loc, faces, unique_materials, dataname)
445 filename
= os
.path
.splitext((os
.path
.basename(filepath
)))[0]
447 if not SPLIT_OB_OR_GROUP
or not faces
:
448 use_verts_nor
= any(f
[1] for f
in faces
)
449 use_verts_tex
= any(f
[2] for f
in faces
)
450 # use the filename for the object name since we aren't chopping up the mesh.
451 return [(verts_loc
, faces
, unique_materials
, filename
, use_verts_nor
, use_verts_tex
)]
453 def key_to_name(key
):
454 # if the key is a tuple, join it to make a string
456 return filename
# assume its a string. make sure this is true if the splitting code is changed
457 elif isinstance(key
, bytes
):
458 return key
.decode('utf-8', 'replace')
460 return "_".join(k
.decode('utf-8', 'replace') for k
in key
)
462 # Return a key that makes the faces unique.
465 oldkey
= -1 # initialize to a value that will never match the key
468 (face_vert_loc_indices
,
469 face_vert_nor_indices
,
470 face_vert_tex_indices
,
472 context_smooth_group
,
474 face_invalid_blenpoly
,
476 key
= context_object_key
479 # Check the key has changed.
480 (verts_split
, faces_split
, unique_materials_split
, vert_remap
,
481 use_verts_nor
, use_verts_tex
) = face_split_dict
.setdefault(key
, ([], [], {}, {}, [], []))
485 if not use_verts_nor
and face_vert_nor_indices
:
486 use_verts_nor
.append(True)
488 if not use_verts_tex
and face_vert_tex_indices
:
489 use_verts_tex
.append(True)
491 # Remap verts to new vert list and add where needed
492 for loop_idx
, vert_idx
in enumerate(face_vert_loc_indices
):
493 map_index
= vert_remap
.get(vert_idx
)
494 if map_index
is None:
495 map_index
= len(verts_split
)
496 vert_remap
[vert_idx
] = map_index
# set the new remapped index so we only add once and can reference next time.
497 verts_split
.append(verts_loc
[vert_idx
]) # add the vert to the local verts
499 face_vert_loc_indices
[loop_idx
] = map_index
# remap to the local index
501 if context_material
not in unique_materials_split
:
502 unique_materials_split
[context_material
] = unique_materials
[context_material
]
504 faces_split
.append(face
)
506 # remove one of the items and reorder
507 return [(verts_split
, faces_split
, unique_materials_split
, key_to_name(key
), bool(use_vnor
), bool(use_vtex
))
508 for key
, (verts_split
, faces_split
, unique_materials_split
, _
, use_vnor
, use_vtex
)
509 in face_split_dict
.items()]
512 def create_mesh(new_objects
,
519 unique_smooth_groups
,
524 Takes all the data gathered and generates a mesh, adding the new object to new_objects
525 deals with ngons, sharp edges and assigning materials
528 if unique_smooth_groups
:
530 smooth_group_users
= {context_smooth_group
: {} for context_smooth_group
in unique_smooth_groups
.keys()}
531 context_smooth_group_old
= -1
533 fgon_edges
= set() # Used for storing fgon keys when we need to tessellate/untessellate them (ngons with hole).
537 context_object_key
= None
539 # reverse loop through face indices
540 for f_idx
in range(len(faces
) - 1, -1, -1):
541 (face_vert_loc_indices
,
542 face_vert_nor_indices
,
543 face_vert_tex_indices
,
545 context_smooth_group
,
547 face_invalid_blenpoly
,
550 len_face_vert_loc_indices
= len(face_vert_loc_indices
)
552 if len_face_vert_loc_indices
== 1:
553 faces
.pop(f_idx
) # cant add single vert faces
555 # Face with a single item in face_vert_nor_indices is actually a polyline!
556 elif len(face_vert_nor_indices
) == 1 or len_face_vert_loc_indices
== 2:
558 edges
.extend((face_vert_loc_indices
[i
], face_vert_loc_indices
[i
+ 1])
559 for i
in range(len_face_vert_loc_indices
- 1))
564 if unique_smooth_groups
and context_smooth_group
:
565 # Is a part of of a smooth group and is a face
566 if context_smooth_group_old
is not context_smooth_group
:
567 edge_dict
= smooth_group_users
[context_smooth_group
]
568 context_smooth_group_old
= context_smooth_group
570 prev_vidx
= face_vert_loc_indices
[-1]
571 for vidx
in face_vert_loc_indices
:
572 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
574 edge_dict
[edge_key
] = edge_dict
.get(edge_key
, 0) + 1
576 # NGons into triangles
577 if face_invalid_blenpoly
:
578 # ignore triangles with invalid indices
579 if len(face_vert_loc_indices
) > 3:
580 from bpy_extras
.mesh_utils
import ngon_tessellate
581 ngon_face_indices
= ngon_tessellate(verts_loc
, face_vert_loc_indices
, debug_print
=bpy
.app
.debug
)
582 faces
.extend([([face_vert_loc_indices
[ngon
[0]],
583 face_vert_loc_indices
[ngon
[1]],
584 face_vert_loc_indices
[ngon
[2]],
586 [face_vert_nor_indices
[ngon
[0]],
587 face_vert_nor_indices
[ngon
[1]],
588 face_vert_nor_indices
[ngon
[2]],
589 ] if face_vert_nor_indices
else [],
590 [face_vert_tex_indices
[ngon
[0]],
591 face_vert_tex_indices
[ngon
[1]],
592 face_vert_tex_indices
[ngon
[2]],
593 ] if face_vert_tex_indices
else [],
595 context_smooth_group
,
599 for ngon
in ngon_face_indices
]
601 tot_loops
+= 3 * len(ngon_face_indices
)
603 # edges to make ngons
604 if len(ngon_face_indices
) > 1:
606 for ngon
in ngon_face_indices
:
607 prev_vidx
= face_vert_loc_indices
[ngon
[-1]]
609 vidx
= face_vert_loc_indices
[ngidx
]
610 if vidx
== prev_vidx
:
611 continue # broken OBJ... Just skip.
612 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
614 if edge_key
in edge_users
:
615 fgon_edges
.add(edge_key
)
617 edge_users
.add(edge_key
)
621 tot_loops
+= len_face_vert_loc_indices
624 if unique_smooth_groups
:
625 for edge_dict
in smooth_group_users
.values():
626 for key
, users
in edge_dict
.items():
627 if users
== 1: # This edge is on the boundary of a group
630 # map the material names to an index
631 material_mapping
= {name
: i
for i
, name
in enumerate(unique_materials
)} # enumerate over unique_materials keys()
633 materials
= [None] * len(unique_materials
)
635 for name
, index
in material_mapping
.items():
636 materials
[index
] = unique_materials
[name
]
638 me
= bpy
.data
.meshes
.new(dataname
)
640 # make sure the list isnt too big
641 for material
in materials
:
642 me
.materials
.append(material
)
644 me
.vertices
.add(len(verts_loc
))
645 me
.loops
.add(tot_loops
)
646 me
.polygons
.add(len(faces
))
648 # verts_loc is a list of (x, y, z) tuples
649 me
.vertices
.foreach_set("co", unpack_list(verts_loc
))
651 loops_vert_idx
= tuple(vidx
for (face_vert_loc_indices
, _
, _
, _
, _
, _
, _
) in faces
for vidx
in face_vert_loc_indices
)
652 faces_loop_start
= []
655 face_vert_loc_indices
= f
[0]
656 nbr_vidx
= len(face_vert_loc_indices
)
657 faces_loop_start
.append(lidx
)
659 faces_loop_total
= tuple(len(face_vert_loc_indices
) for (face_vert_loc_indices
, _
, _
, _
, _
, _
, _
) in faces
)
661 me
.loops
.foreach_set("vertex_index", loops_vert_idx
)
662 me
.polygons
.foreach_set("loop_start", faces_loop_start
)
663 me
.polygons
.foreach_set("loop_total", faces_loop_total
)
665 faces_ma_index
= tuple(material_mapping
[context_material
] for (_
, _
, _
, context_material
, _
, _
, _
) in faces
)
666 me
.polygons
.foreach_set("material_index", faces_ma_index
)
668 faces_use_smooth
= tuple(bool(context_smooth_group
) for (_
, _
, _
, _
, context_smooth_group
, _
, _
) in faces
)
669 me
.polygons
.foreach_set("use_smooth", faces_use_smooth
)
671 if verts_nor
and me
.loops
:
672 # Note: we store 'temp' normals in loops, since validate() may alter final mesh,
673 # we can only set custom lnors *after* calling it.
674 me
.create_normals_split()
675 loops_nor
= tuple(no
for (_
, face_vert_nor_indices
, _
, _
, _
, _
, _
) in faces
for face_noidx
in face_vert_nor_indices
for no
in verts_nor
[face_noidx
])
676 me
.loops
.foreach_set("normal", loops_nor
)
678 if verts_tex
and me
.polygons
:
680 loops_uv
= tuple(uv
for (_
, _
, face_vert_tex_indices
, _
, _
, _
, _
) in faces
for face_uvidx
in face_vert_tex_indices
for uv
in verts_tex
[face_uvidx
])
681 me
.uv_layers
[0].data
.foreach_set("uv", loops_uv
)
683 use_edges
= use_edges
and bool(edges
)
685 me
.edges
.add(len(edges
))
686 # edges should be a list of (a, b) tuples
687 me
.edges
.foreach_set("vertices", unpack_list(edges
))
689 me
.validate(clean_customdata
=False) # *Very* important to not remove lnors here!
690 me
.update(calc_edges
=use_edges
)
692 # Un-tessellate as much as possible, in case we had to triangulate some ngons...
699 edges
= [get((verts
[vidx1
], verts
[vidx2
])) for vidx1
, vidx2
in fgon_edges
]
701 bmesh
.ops
.dissolve_edges(bm
, edges
=edges
, use_verts
=False)
703 # Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
705 traceback
.print_exc()
710 # XXX If validate changes the geometry, this is likely to be broken...
711 if unique_smooth_groups
and sharp_edges
:
713 if e
.key
in sharp_edges
:
714 e
.use_edge_sharp
= True
717 clnors
= array
.array('f', [0.0] * (len(me
.loops
) * 3))
718 me
.loops
.foreach_get("normal", clnors
)
720 if not unique_smooth_groups
:
721 me
.polygons
.foreach_set("use_smooth", [True] * len(me
.polygons
))
723 me
.normals_split_custom_set(tuple(zip(*(iter(clnors
),) * 3)))
724 me
.use_auto_smooth
= True
726 ob
= bpy
.data
.objects
.new(me
.name
, me
)
727 new_objects
.append(ob
)
729 # Create the vertex groups. No need to have the flag passed here since we test for the
730 # content of the vertex_groups. If the user selects to NOT have vertex groups saved then
731 # the following test will never run
732 for group_name
, group_indices
in vertex_groups
.items():
733 group
= ob
.vertex_groups
.new(name
=group_name
.decode('utf-8', "replace"))
734 group
.add(group_indices
, 1.0, 'REPLACE')
737 def create_nurbs(context_nurbs
, vert_loc
, new_objects
):
739 Add nurbs object to blender, only support one type at the moment
741 deg
= context_nurbs
.get(b
'deg', (3,))
742 curv_range
= context_nurbs
.get(b
'curv_range')
743 curv_idx
= context_nurbs
.get(b
'curv_idx', [])
744 parm_u
= context_nurbs
.get(b
'parm_u', [])
745 parm_v
= context_nurbs
.get(b
'parm_v', [])
746 name
= context_nurbs
.get(b
'name', b
'ObjNurb')
747 cstype
= context_nurbs
.get(b
'cstype')
750 print('\tWarning, cstype not found')
752 if cstype
!= b
'bspline':
753 print('\tWarning, cstype is not supported (only bspline)')
756 print('\tWarning, curv argument empty or not set')
758 if len(deg
) > 1 or parm_v
:
759 print('\tWarning, surfaces not supported')
762 cu
= bpy
.data
.curves
.new(name
.decode('utf-8', "replace"), 'CURVE')
765 nu
= cu
.splines
.new('NURBS')
766 nu
.points
.add(len(curv_idx
) - 1) # a point is added to start with
767 nu
.points
.foreach_set("co", [co_axis
for vt_idx
in curv_idx
for co_axis
in (vert_loc
[vt_idx
] + (1.0,))])
769 nu
.order_u
= deg
[0] + 1
771 # get for endpoint flag from the weighting
772 if curv_range
and len(parm_u
) > deg
[0] + 1:
774 for i
in range(deg
[0] + 1):
776 if abs(parm_u
[i
] - curv_range
[0]) > 0.0001:
780 if abs(parm_u
[-(i
+ 1)] - curv_range
[1]) > 0.0001:
788 nu
.use_endpoint_u
= True
793 if len(parm_u) > deg[0]+1:
794 for i in xrange(deg[0]+1):
795 #print curv_idx[i], curv_idx[-(i+1)]
797 if curv_idx[i]==curv_idx[-(i+1)]:
802 nu.use_cyclic_u = True
805 ob
= bpy
.data
.objects
.new(name
.decode('utf-8', "replace"), cu
)
807 new_objects
.append(ob
)
810 def strip_slash(line_split
):
811 if line_split
[-1][-1] == 92: # '\' char
812 if len(line_split
[-1]) == 1:
813 line_split
.pop() # remove the \ item
815 line_split
[-1] = line_split
[-1][:-1] # remove the \ from the end last number
820 def get_float_func(filepath
):
822 find the float function for this obj file
823 - whether to replace commas or not
825 file = open(filepath
, 'rb')
826 for line
in file: # .readlines():
828 if line
.startswith(b
'v'): # vn vt v
831 return lambda f
: float(f
.replace(b
',', b
'.'))
837 # in case all vert values were ints
841 def any_number_as_int(svalue
):
843 svalue
= svalue
.replace(b
',', b
'.')
844 return int(float(svalue
))
850 global_clight_size
=0.0,
851 use_smooth_groups
=True,
853 use_split_objects
=True,
854 use_split_groups
=False,
855 use_image_search
=True,
856 use_groups_as_vgroups
=False,
861 Called by the user interface or another script.
862 load_obj(path) - should give acceptable results.
863 This function passes the file and sends the data off
864 to be split into objects and then converted into mesh objects
866 def unique_name(existing_names
, name_orig
):
869 while name
in existing_names
:
870 name
= b
"%s.%03d" % (name_orig
, i
)
872 existing_names
.add(name
)
875 def handle_vec(line_start
, context_multi_line
, line_split
, tag
, data
, vec
, vec_len
):
876 ret_context_multi_line
= tag
if strip_slash(line_split
) else b
''
877 if line_start
== tag
:
878 vec
[:] = [float_func(v
) for v
in line_split
[1:]]
879 elif context_multi_line
== tag
:
880 vec
+= [float_func(v
) for v
in line_split
]
881 if not ret_context_multi_line
:
882 data
.append(tuple(vec
[:vec_len
]))
883 return ret_context_multi_line
885 def create_face(context_material
, context_smooth_group
, context_object_key
):
886 face_vert_loc_indices
= []
887 face_vert_nor_indices
= []
888 face_vert_tex_indices
= []
890 face_vert_loc_indices
,
891 face_vert_nor_indices
,
892 face_vert_tex_indices
,
894 context_smooth_group
,
896 [], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
899 with
ProgressReport(context
.window_manager
) as progress
:
900 progress
.enter_substeps(1, "Importing OBJ %r..." % filepath
)
902 if global_matrix
is None:
903 global_matrix
= mathutils
.Matrix()
905 if use_split_objects
or use_split_groups
:
906 use_groups_as_vgroups
= False
908 time_main
= time
.time()
913 faces
= [] # tuples of the faces
914 material_libs
= set() # filenames to material libs this OBJ uses
915 vertex_groups
= {} # when use_groups_as_vgroups is true
917 # Get the string to float conversion func for this file- is 'float' for almost all files.
918 float_func
= get_float_func(filepath
)
921 context_material
= None
922 context_smooth_group
= None
923 context_object_key
= None
924 context_object_obpart
= None
925 context_vgroup
= None
927 objects_names
= set()
932 context_parm
= b
'' # used by nurbs too but could be used elsewhere
934 # Until we can use sets
935 use_default_material
= False
936 unique_materials
= {}
937 unique_smooth_groups
= {}
938 # unique_obects= {} - no use for this variable since the objects are stored in the face.
940 # when there are faces that end with \
941 # it means they are multiline-
942 # since we use xreadline we cant skip to the next line
943 # so we need to know whether
944 context_multi_line
= b
''
946 # Per-face handling data.
947 face_vert_loc_indices
= None
948 face_vert_nor_indices
= None
949 face_vert_tex_indices
= None
950 verts_loc_len
= verts_nor_len
= verts_tex_len
= 0
951 face_items_usage
= set()
952 face_invalid_blenpoly
= None
957 quick_vert_failures
= 0
958 skip_quick_vert
= False
960 progress
.enter_substeps(3, "Parsing OBJ file...")
961 with
open(filepath
, 'rb') as f
:
963 line_split
= line
.split()
968 line_start
= line_split
[0] # we compare with this a _lot_
970 # Handling vertex data are pretty similar, factorize that.
971 # Also, most OBJ files store all those on a single line, so try fast parsing for that first,
972 # and only fallback to full multi-line parsing when needed, this gives significant speed-up
973 # (~40% on affected code).
974 if line_start
== b
'v':
975 vdata
, vdata_len
, do_quick_vert
= verts_loc
, 3, not skip_quick_vert
976 elif line_start
== b
'vn':
977 vdata
, vdata_len
, do_quick_vert
= verts_nor
, 3, not skip_quick_vert
978 elif line_start
== b
'vt':
979 vdata
, vdata_len
, do_quick_vert
= verts_tex
, 2, not skip_quick_vert
980 elif context_multi_line
== b
'v':
981 vdata
, vdata_len
, do_quick_vert
= verts_loc
, 3, False
982 elif context_multi_line
== b
'vn':
983 vdata
, vdata_len
, do_quick_vert
= verts_nor
, 3, False
984 elif context_multi_line
== b
'vt':
985 vdata
, vdata_len
, do_quick_vert
= verts_tex
, 2, False
992 vdata
.append(tuple(map(float_func
, line_split
[1:vdata_len
+ 1])))
994 do_quick_vert
= False
995 # In case we get too many failures on quick parsing, force fallback to full multi-line one.
996 # Exception handling can become costly...
997 quick_vert_failures
+= 1
998 if quick_vert_failures
> 10000:
999 skip_quick_vert
= True
1000 if not do_quick_vert
:
1001 context_multi_line
= handle_vec(line_start
, context_multi_line
, line_split
,
1002 context_multi_line
or line_start
, vdata
, vec
, vdata_len
)
1004 elif line_start
== b
'f' or context_multi_line
== b
'f':
1005 if not context_multi_line
:
1006 line_split
= line_split
[1:]
1007 # Instantiate a face
1008 face
= create_face(context_material
, context_smooth_group
, context_object_key
)
1009 (face_vert_loc_indices
, face_vert_nor_indices
, face_vert_tex_indices
,
1010 _1
, _2
, _3
, face_invalid_blenpoly
) = face
1012 face_items_usage
.clear()
1013 verts_loc_len
= len(verts_loc
)
1014 verts_nor_len
= len(verts_nor
)
1015 verts_tex_len
= len(verts_tex
)
1016 if context_material
is None:
1017 use_default_material
= True
1018 # Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
1020 context_multi_line
= b
'f' if strip_slash(line_split
) else b
''
1022 for v
in line_split
:
1023 obj_vert
= v
.split(b
'/')
1024 idx
= int(obj_vert
[0]) # Note that we assume here we cannot get OBJ invalid 0 index...
1025 vert_loc_index
= (idx
+ verts_loc_len
) if (idx
< 1) else idx
- 1
1026 # Add the vertex to the current group
1027 # *warning*, this wont work for files that have groups defined around verts
1028 if use_groups_as_vgroups
and context_vgroup
:
1029 vertex_groups
[context_vgroup
].append(vert_loc_index
)
1030 # This a first round to quick-detect ngons that *may* use a same edge more than once.
1031 # Potential candidate will be re-checked once we have done parsing the whole face.
1032 if not face_invalid_blenpoly
:
1033 # If we use more than once a same vertex, invalid ngon is suspected.
1034 if vert_loc_index
in face_items_usage
:
1035 face_invalid_blenpoly
.append(True)
1037 face_items_usage
.add(vert_loc_index
)
1038 face_vert_loc_indices
.append(vert_loc_index
)
1040 # formatting for faces with normals and textures is
1041 # loc_index/tex_index/nor_index
1042 if len(obj_vert
) > 1 and obj_vert
[1] and obj_vert
[1] != b
'0':
1043 idx
= int(obj_vert
[1])
1044 face_vert_tex_indices
.append((idx
+ verts_tex_len
) if (idx
< 1) else idx
- 1)
1046 face_vert_tex_indices
.append(0)
1048 if len(obj_vert
) > 2 and obj_vert
[2] and obj_vert
[2] != b
'0':
1049 idx
= int(obj_vert
[2])
1050 face_vert_nor_indices
.append((idx
+ verts_nor_len
) if (idx
< 1) else idx
- 1)
1052 face_vert_nor_indices
.append(0)
1054 if not context_multi_line
:
1055 # Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
1056 if face_invalid_blenpoly
:
1057 face_invalid_blenpoly
.clear()
1058 face_items_usage
.clear()
1059 prev_vidx
= face_vert_loc_indices
[-1]
1060 for vidx
in face_vert_loc_indices
:
1061 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
1062 if edge_key
in face_items_usage
:
1063 face_invalid_blenpoly
.append(True)
1065 face_items_usage
.add(edge_key
)
1068 elif use_edges
and (line_start
== b
'l' or context_multi_line
== b
'l'):
1069 # very similar to the face load function above with some parts removed
1070 if not context_multi_line
:
1071 line_split
= line_split
[1:]
1072 # Instantiate a face
1073 face
= create_face(context_material
, context_smooth_group
, context_object_key
)
1074 face_vert_loc_indices
= face
[0]
1075 # XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
1076 # as a polyline, and not a regular face...
1079 # Else, use face_vert_loc_indices previously defined and used the obj_face
1081 context_multi_line
= b
'l' if strip_slash(line_split
) else b
''
1083 for v
in line_split
:
1084 obj_vert
= v
.split(b
'/')
1085 idx
= int(obj_vert
[0]) - 1
1086 face_vert_loc_indices
.append((idx
+ len(verts_loc
) + 1) if (idx
< 0) else idx
)
1088 elif line_start
== b
's':
1089 if use_smooth_groups
:
1090 context_smooth_group
= line_value(line_split
)
1091 if context_smooth_group
== b
'off':
1092 context_smooth_group
= None
1093 elif context_smooth_group
: # is not None
1094 unique_smooth_groups
[context_smooth_group
] = None
1096 elif line_start
== b
'o':
1097 if use_split_objects
:
1098 context_object_key
= unique_name(objects_names
, line_value(line_split
))
1099 context_object_obpart
= context_object_key
1100 # unique_objects[context_object_key]= None
1102 elif line_start
== b
'g':
1103 if use_split_groups
:
1104 grppart
= line_value(line_split
)
1105 context_object_key
= (context_object_obpart
, grppart
) if context_object_obpart
else grppart
1106 # print 'context_object_key', context_object_key
1107 # unique_objects[context_object_key]= None
1108 elif use_groups_as_vgroups
:
1109 context_vgroup
= line_value(line
.split())
1110 if context_vgroup
and context_vgroup
!= b
'(null)':
1111 vertex_groups
.setdefault(context_vgroup
, [])
1113 context_vgroup
= None # dont assign a vgroup
1115 elif line_start
== b
'usemtl':
1116 context_material
= line_value(line
.split())
1117 unique_materials
[context_material
] = None
1118 elif line_start
== b
'mtllib': # usemap or usemat
1119 # can have multiple mtllib filenames per line, mtllib can appear more than once,
1120 # so make sure only occurrence of material exists
1121 material_libs |
= {os
.fsdecode(f
) for f
in line
.split()[1:]}
1124 elif line_start
== b
'cstype':
1125 context_nurbs
[b
'cstype'] = line_value(line
.split()) # 'rat bspline' / 'bspline'
1126 elif line_start
== b
'curv' or context_multi_line
== b
'curv':
1127 curv_idx
= context_nurbs
[b
'curv_idx'] = context_nurbs
.get(b
'curv_idx', []) # in case were multiline
1129 if not context_multi_line
:
1130 context_nurbs
[b
'curv_range'] = float_func(line_split
[1]), float_func(line_split
[2])
1131 line_split
[0:3] = [] # remove first 3 items
1133 if strip_slash(line_split
):
1134 context_multi_line
= b
'curv'
1136 context_multi_line
= b
''
1138 for i
in line_split
:
1139 vert_loc_index
= int(i
) - 1
1141 if vert_loc_index
< 0:
1142 vert_loc_index
= len(verts_loc
) + vert_loc_index
+ 1
1144 curv_idx
.append(vert_loc_index
)
1146 elif line_start
== b
'parm' or context_multi_line
== b
'parm':
1147 if context_multi_line
:
1148 context_multi_line
= b
''
1150 context_parm
= line_split
[1]
1151 line_split
[0:2] = [] # remove first 2
1153 if strip_slash(line_split
):
1154 context_multi_line
= b
'parm'
1156 context_multi_line
= b
''
1158 if context_parm
.lower() == b
'u':
1159 context_nurbs
.setdefault(b
'parm_u', []).extend([float_func(f
) for f
in line_split
])
1160 elif context_parm
.lower() == b
'v': # surfaces not supported yet
1161 context_nurbs
.setdefault(b
'parm_v', []).extend([float_func(f
) for f
in line_split
])
1162 # else: # may want to support other parm's ?
1164 elif line_start
== b
'deg':
1165 context_nurbs
[b
'deg'] = [int(i
) for i
in line
.split()[1:]]
1166 elif line_start
== b
'end':
1167 # Add the nurbs curve
1168 if context_object_key
:
1169 context_nurbs
[b
'name'] = context_object_key
1170 nurbs
.append(context_nurbs
)
1174 ''' # How to use usemap? deprecated?
1175 elif line_start == b'usema': # usemap or usemat
1176 context_image= line_value(line_split)
1179 progress
.step("Done, loading materials and images...")
1181 if use_default_material
:
1182 unique_materials
[None] = None
1183 create_materials(filepath
, relpath
, material_libs
, unique_materials
,
1184 use_image_search
, float_func
)
1186 progress
.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
1187 (len(verts_loc
), len(faces
), len(unique_materials
), len(unique_smooth_groups
)))
1190 if bpy
.ops
.object.select_all
.poll():
1191 bpy
.ops
.object.select_all(action
='DESELECT')
1193 scene
= context
.scene
1194 new_objects
= [] # put new objects here
1196 # Split the mesh by objects/materials, may
1197 SPLIT_OB_OR_GROUP
= bool(use_split_objects
or use_split_groups
)
1199 for data
in split_mesh(verts_loc
, faces
, unique_materials
, filepath
, SPLIT_OB_OR_GROUP
):
1200 verts_loc_split
, faces_split
, unique_materials_split
, dataname
, use_vnor
, use_vtex
= data
1201 # Create meshes from the data, warning 'vertex_groups' wont support splitting
1202 #~ print(dataname, use_vnor, use_vtex)
1203 create_mesh(new_objects
,
1206 verts_nor
if use_vnor
else [],
1207 verts_tex
if use_vtex
else [],
1209 unique_materials_split
,
1210 unique_smooth_groups
,
1216 for context_nurbs
in nurbs
:
1217 create_nurbs(context_nurbs
, verts_loc
, new_objects
)
1219 view_layer
= context
.view_layer
1220 collection
= view_layer
.active_layer_collection
.collection
1223 for obj
in new_objects
:
1224 collection
.objects
.link(obj
)
1225 obj
.select_set(True)
1227 # we could apply this anywhere before scaling.
1228 obj
.matrix_world
= global_matrix
1232 axis_min
= [1000000000] * 3
1233 axis_max
= [-1000000000] * 3
1235 if global_clight_size
:
1236 # Get all object bounds
1237 for ob
in new_objects
:
1238 for v
in ob
.bound_box
:
1239 for axis
, value
in enumerate(v
):
1240 if axis_min
[axis
] > value
:
1241 axis_min
[axis
] = value
1242 if axis_max
[axis
] < value
:
1243 axis_max
[axis
] = value
1246 max_axis
= max(axis_max
[0] - axis_min
[0], axis_max
[1] - axis_min
[1], axis_max
[2] - axis_min
[2])
1249 while global_clight_size
< max_axis
* scale
:
1250 scale
= scale
/ 10.0
1252 for obj
in new_objects
:
1253 obj
.scale
= scale
, scale
, scale
1255 progress
.leave_substeps("Done.")
1256 progress
.leave_substeps("Finished importing: %r" % filepath
)