1 # SPDX-License-Identifier: GPL-2.0-or-later
5 # Script copyright (C) Campbell Barton
6 # Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
9 This script imports a Wavefront OBJ files to Blender.
12 Run this script from "File->Import" menu and then load the desired OBJ file.
13 Note, This loads mesh objects and materials only, nurbs and curves are not supported.
15 http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
24 from bpy_extras
.io_utils
import unpack_list
25 from bpy_extras
.image_utils
import load_image
26 from bpy_extras
.wm_utils
.progress_report
import ProgressReport
29 def line_value(line_split
):
31 Returns 1 string representing the value for this line
32 None will be returned if there's only 1 word
34 length
= len(line_split
)
42 return b
' '.join(line_split
[1:])
45 def filenames_group_by_ext(line
, ext
):
47 Splits material libraries supporting spaces, so:
48 b'foo bar.mtl baz spam.MTL' -> (b'foo bar.mtl', b'baz spam.MTL')
49 Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
51 # Note that we assume that if there are some " in that line,
52 # then all filenames are properly enclosed within those...
53 start
= line
.find(b
'"') + 1
56 end
= line
.find(b
'"', start
)
59 start
= line
.find(b
'"', end
+ 1) + 1
64 line_lower
= line
.lower()
66 while i_prev
!= -1 and i_prev
< len(line
):
67 i
= line_lower
.find(ext
, i_prev
)
70 yield line
[i_prev
:i
].strip()
74 def obj_image_load(img_data
, context_imagepath_map
, line
, DIR
, recursive
, relpath
):
76 Mainly uses comprehensiveImageLoad
77 But we try all space-separated items from current line when file is not found with last one
78 (users keep generating/using image files with spaces in a format that does not support them, sigh...)
79 Also tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
80 Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
81 Also corrects img_data (in case filenames with spaces have been split up in multiple entries, see T72148).
83 filepath_parts
= line
.split(b
' ')
85 start
= line
.find(b
'"') + 1
87 end
= line
.find(b
'"', start
)
89 filepath_parts
= (line
[start
:end
],)
92 for i
in range(-1, -len(filepath_parts
), -1):
93 imagepath
= os
.fsdecode(b
" ".join(filepath_parts
[i
:]))
94 image
= context_imagepath_map
.get(imagepath
, ...)
96 image
= load_image(imagepath
, DIR
, recursive
=recursive
, relpath
=relpath
)
97 if image
is None and "_" in imagepath
:
98 image
= load_image(imagepath
.replace("_", " "), DIR
, recursive
=recursive
, relpath
=relpath
)
100 context_imagepath_map
[imagepath
] = image
102 img_data
.append(imagepath
)
106 img_data
.append(imagepath
)
110 imagepath
= os
.fsdecode(filepath_parts
[-1])
111 image
= load_image(imagepath
, DIR
, recursive
=recursive
, place_holder
=True, relpath
=relpath
)
112 context_imagepath_map
[imagepath
] = image
117 def create_materials(filepath
, relpath
,
118 material_libs
, unique_materials
,
119 use_image_search
, float_func
):
121 Create all the used materials in this obj,
122 assign colors and images to the materials from all referenced material libs
124 from math
import sqrt
125 from bpy_extras
import node_shader_utils
127 DIR
= os
.path
.dirname(filepath
)
128 context_material_vars
= set()
130 # Don't load the same image multiple times
131 context_imagepath_map
= {}
133 nodal_material_wrap_map
= {}
135 def load_material_image(blender_material
, mat_wrap
, context_material_name
, img_data
, line
, type):
137 Set textures defined in .mtl file.
141 # Absolute path - c:\.. etc would work here
142 image
= obj_image_load(img_data
, context_imagepath_map
, line
, DIR
, use_image_search
, relpath
)
145 for token
in img_data
[:-1]:
146 if token
.startswith(b
'-') and token
[1:].isalpha():
148 map_options
[curr_token
[0]] = curr_token
[1:]
150 curr_token
.append(token
)
152 map_options
[curr_token
[0]] = curr_token
[1:]
154 map_offset
= map_options
.get(b
'-o')
155 map_scale
= map_options
.get(b
'-s')
156 if map_offset
is not None:
157 map_offset
= tuple(map(float_func
, map_offset
))
158 if map_scale
is not None:
159 map_scale
= tuple(map(float_func
, map_scale
))
161 def _generic_tex_set(nodetex
, image
, texcoords
, translation
, scale
):
162 nodetex
.image
= image
163 nodetex
.texcoords
= texcoords
164 if translation
is not None:
165 nodetex
.translation
= translation
166 if scale
is not None:
167 nodetex
.scale
= scale
169 # Adds textures for materials (rendering)
171 _generic_tex_set(mat_wrap
.base_color_texture
, image
, 'UV', map_offset
, map_scale
)
175 print("WARNING, currently unsupported ambient texture, skipped.")
178 _generic_tex_set(mat_wrap
.specular_texture
, image
, 'UV', map_offset
, map_scale
)
181 _generic_tex_set(mat_wrap
.emission_color_texture
, image
, 'UV', map_offset
, map_scale
)
182 mat_wrap
.emission_strength
= 1.0
185 bump_mult
= map_options
.get(b
'-bm')
186 bump_mult
= float(bump_mult
[0]) if (bump_mult
and len(bump_mult
[0]) > 1) else 1.0
187 mat_wrap
.normalmap_strength_set(bump_mult
)
189 _generic_tex_set(mat_wrap
.normalmap_texture
, image
, 'UV', map_offset
, map_scale
)
192 _generic_tex_set(mat_wrap
.alpha_texture
, image
, 'UV', map_offset
, map_scale
)
196 print("WARNING, currently unsupported displacement texture, skipped.")
197 # ~ mat_wrap.bump_image_set(image)
198 # ~ mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
201 map_type
= map_options
.get(b
'-type')
202 if map_type
and map_type
!= [b
'sphere']:
203 print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
204 "" % ' '.join(i
.decode() for i
in map_type
))
206 _generic_tex_set(mat_wrap
.base_color_texture
, image
, 'Reflection', map_offset
, map_scale
)
207 mat_wrap
.base_color_texture
.projection
= 'SPHERE'
210 raise Exception("invalid type %r" % type)
212 def finalize_material(context_material
, context_material_vars
, spec_colors
,
213 do_highlight
, do_reflection
, do_transparency
, do_glass
):
214 # Finalize previous mat, if any.
216 if "specular" in context_material_vars
:
217 # XXX This is highly approximated, not sure whether we can do better...
218 # TODO: Find a way to guesstimate best value from diffuse color...
219 # IDEA: Use standard deviation of both spec and diff colors (i.e. how far away they are
220 # from some grey), and apply the the proportion between those two as tint factor?
221 spec
= sum(spec_colors
) / 3.0
222 # ~ spec_var = math.sqrt(sum((c - spec) ** 2 for c in spec_color) / 3.0)
223 # ~ diff = sum(context_mat_wrap.base_color) / 3.0
224 # ~ diff_var = math.sqrt(sum((c - diff) ** 2 for c in context_mat_wrap.base_color) / 3.0)
225 # ~ tint = min(1.0, spec_var / diff_var)
226 context_mat_wrap
.specular
= spec
227 context_mat_wrap
.specular_tint
= 0.0
228 if "roughness" not in context_material_vars
:
229 context_mat_wrap
.roughness
= 0.0
231 # FIXME, how else to use this?
233 if "specular" not in context_material_vars
:
234 context_mat_wrap
.specular
= 1.0
235 if "roughness" not in context_material_vars
:
236 context_mat_wrap
.roughness
= 0.0
238 if "specular" not in context_material_vars
:
239 context_mat_wrap
.specular
= 0.0
240 if "roughness" not in context_material_vars
:
241 context_mat_wrap
.roughness
= 1.0
244 if "metallic" not in context_material_vars
:
245 context_mat_wrap
.metallic
= 1.0
247 # since we are (ab)using ambient term for metallic (which can be non-zero)
248 context_mat_wrap
.metallic
= 0.0
251 if "ior" not in context_material_vars
:
252 context_mat_wrap
.ior
= 1.0
253 if "alpha" not in context_material_vars
:
254 context_mat_wrap
.alpha
= 1.0
256 context_material
.blend_method
= 'BLEND'
259 if "ior" not in context_material_vars
:
260 context_mat_wrap
.ior
= 1.5
262 # Try to find a MTL with the same name as the OBJ if no MTLs are specified.
263 temp_mtl
= os
.path
.splitext((os
.path
.basename(filepath
)))[0] + ".mtl"
264 if os
.path
.exists(os
.path
.join(DIR
, temp_mtl
)):
265 material_libs
.add(temp_mtl
)
268 # Create new materials
269 for name
in unique_materials
: # .keys()
270 ma_name
= "Default OBJ" if name
is None else name
.decode('utf-8', "replace")
271 ma
= unique_materials
[name
] = bpy
.data
.materials
.new(ma_name
)
272 ma_wrap
= node_shader_utils
.PrincipledBSDFWrapper(ma
, is_readonly
=False)
273 nodal_material_wrap_map
[ma
] = ma_wrap
274 ma_wrap
.use_nodes
= True
276 for libname
in sorted(material_libs
):
278 mtlpath
= os
.path
.join(DIR
, libname
)
279 if not os
.path
.exists(mtlpath
):
280 print("\tMaterial not found MTL: %r" % mtlpath
)
282 # Note: with modern Principled BSDF shader, things like ambient, raytrace or fresnel are always 'ON'
283 # (i.e. automatically controlled by other parameters).
285 do_reflection
= False
286 do_transparency
= False
288 spec_colors
= [0.0, 0.0, 0.0]
290 # print('\t\tloading mtl: %e' % mtlpath)
291 context_material
= None
292 context_mat_wrap
= None
293 mtl
= open(mtlpath
, 'rb')
294 for line
in mtl
: # .readlines():
296 if not line
or line
.startswith(b
'#'):
299 line_split
= line
.split()
300 line_id
= line_split
[0].lower()
302 if line_id
== b
'newmtl':
303 # Finalize previous mat, if any.
304 finalize_material(context_material
, context_material_vars
, spec_colors
,
305 do_highlight
, do_reflection
, do_transparency
, do_glass
)
307 context_material_name
= line_value(line_split
)
308 context_material
= unique_materials
.get(context_material_name
)
309 if context_material
is not None:
310 context_mat_wrap
= nodal_material_wrap_map
[context_material
]
311 context_material_vars
.clear()
313 spec_colors
[:] = [0.0, 0.0, 0.0]
315 do_reflection
= False
316 do_transparency
= False
320 elif context_material
:
321 def _get_colors(line_split
):
322 # OBJ 'allows' one or two components values, treat single component as greyscale, and two as blue = 0.0.
325 return [float_func(line_split
[1])] * 3
327 return [float_func(line_split
[1]), float_func(line_split
[2]), 0.0]
329 return [float_func(line_split
[1]), float_func(line_split
[2]), float_func(line_split
[3])]
331 # we need to make a material to assign properties to it.
333 refl
= sum(_get_colors(line_split
)) / 3.0
334 context_mat_wrap
.metallic
= refl
335 context_material_vars
.add("metallic")
336 elif line_id
== b
'kd':
337 context_mat_wrap
.base_color
= _get_colors(line_split
)
338 elif line_id
== b
'ks':
339 spec_colors
[:] = _get_colors(line_split
)
340 context_material_vars
.add("specular")
341 elif line_id
== b
'ke':
342 # We cannot set context_material.emit right now, we need final diffuse color as well for this.
343 # XXX Unsupported currently
344 context_mat_wrap
.emission_color
= _get_colors(line_split
)
345 context_mat_wrap
.emission_strength
= 1.0
346 elif line_id
== b
'ns':
347 # XXX Totally empirical conversion, trying to adapt it
348 # (from 0.0 - 1000.0 OBJ specular exponent range to 1.0 - 0.0 Principled BSDF range)...
349 val
= max(0.0, min(1000.0, float_func(line_split
[1])))
350 context_mat_wrap
.roughness
= 1.0 - (sqrt(val
/ 1000))
351 context_material_vars
.add("roughness")
352 elif line_id
== b
'ni': # Refraction index (between 0.001 and 10).
353 context_mat_wrap
.ior
= float_func(line_split
[1])
354 context_material_vars
.add("ior")
355 elif line_id
== b
'd': # dissolve (transparency)
356 context_mat_wrap
.alpha
= float_func(line_split
[1])
357 context_material_vars
.add("alpha")
358 elif line_id
== b
'tr': # translucency
359 print("WARNING, currently unsupported 'tr' translucency option, skipped.")
360 elif line_id
== b
'tf':
361 # rgb, filter color, blender has no support for this.
362 print("WARNING, currently unsupported 'tf' filter color option, skipped.")
363 elif line_id
== b
'illum':
364 # Some MTL files incorrectly use a float for this value, see T60135.
365 illum
= any_number_as_int(line_split
[1])
367 # inline comments are from the spec, v4.2
369 # Color on and Ambient off
370 print("WARNING, Principled BSDF shader does not support illumination 0 mode "
371 "(colors with no ambient), skipped.")
373 # Color on and Ambient on
379 # Reflection on and Ray trace on
382 # Transparency: Glass on
383 # Reflection: Ray trace on
384 do_transparency
= True
388 # Reflection: Fresnel on and Ray trace on
391 # Transparency: Refraction on
392 # Reflection: Fresnel off and Ray trace on
393 do_transparency
= True
396 # Transparency: Refraction on
397 # Reflection: Fresnel on and Ray trace on
398 do_transparency
= True
401 # Reflection on and Ray trace off
404 # Transparency: Glass on
405 # Reflection: Ray trace off
406 do_transparency
= True
407 do_reflection
= False
410 # Casts shadows onto invisible surfaces
411 print("WARNING, Principled BSDF shader does not support illumination 10 mode "
412 "(cast shadows on invisible surfaces), skipped.")
415 elif line_id
== b
'map_ka':
416 img_data
= line
.split()[1:]
418 load_material_image(context_material
, context_mat_wrap
,
419 context_material_name
, img_data
, line
, 'Ka')
420 elif line_id
== b
'map_ks':
421 img_data
= line
.split()[1:]
423 load_material_image(context_material
, context_mat_wrap
,
424 context_material_name
, img_data
, line
, 'Ks')
425 elif line_id
== b
'map_kd':
426 img_data
= line
.split()[1:]
428 load_material_image(context_material
, context_mat_wrap
,
429 context_material_name
, img_data
, line
, 'Kd')
430 elif line_id
== b
'map_ke':
431 img_data
= line
.split()[1:]
433 load_material_image(context_material
, context_mat_wrap
,
434 context_material_name
, img_data
, line
, 'Ke')
435 elif line_id
in {b
'map_bump', b
'bump'}: # 'bump' is incorrect but some files use it.
436 img_data
= line
.split()[1:]
438 load_material_image(context_material
, context_mat_wrap
,
439 context_material_name
, img_data
, line
, 'Bump')
440 elif line_id
in {b
'map_d', b
'map_tr'}: # Alpha map - Dissolve
441 img_data
= line
.split()[1:]
443 load_material_image(context_material
, context_mat_wrap
,
444 context_material_name
, img_data
, line
, 'D')
446 elif line_id
in {b
'map_disp', b
'disp'}: # displacementmap
447 img_data
= line
.split()[1:]
449 load_material_image(context_material
, context_mat_wrap
,
450 context_material_name
, img_data
, line
, 'disp')
452 elif line_id
in {b
'map_refl', b
'refl'}: # reflectionmap
453 img_data
= line
.split()[1:]
455 load_material_image(context_material
, context_mat_wrap
,
456 context_material_name
, img_data
, line
, 'refl')
458 print("WARNING: %r:%r (ignored)" % (filepath
, line
))
460 # Finalize last mat, if any.
461 finalize_material(context_material
, context_material_vars
, spec_colors
,
462 do_highlight
, do_reflection
, do_transparency
, do_glass
)
466 def face_is_edge(face
):
467 """Simple check to test whether given (temp, working) data is an edge, and not a real face."""
468 face_vert_loc_indices
= face
[0]
469 face_vert_nor_indices
= face
[1]
470 return len(face_vert_nor_indices
) == 1 or len(face_vert_loc_indices
) == 2
473 def split_mesh(verts_loc
, faces
, unique_materials
, filepath
, SPLIT_OB_OR_GROUP
):
475 Takes vert_loc and faces, and separates into multiple sets of
476 (verts_loc, faces, unique_materials, dataname)
479 filename
= os
.path
.splitext((os
.path
.basename(filepath
)))[0]
481 if not SPLIT_OB_OR_GROUP
or not faces
:
482 use_verts_nor
= any(f
[1] for f
in faces
)
483 use_verts_tex
= any(f
[2] for f
in faces
)
484 # use the filename for the object name since we aren't chopping up the mesh.
485 return [(verts_loc
, faces
, unique_materials
, filename
, use_verts_nor
, use_verts_tex
)]
487 def key_to_name(key
):
488 # if the key is a tuple, join it to make a string
490 return filename
# assume its a string. make sure this is true if the splitting code is changed
491 elif isinstance(key
, bytes
):
492 return key
.decode('utf-8', 'replace')
494 return "_".join(k
.decode('utf-8', 'replace') for k
in key
)
496 # Return a key that makes the faces unique.
499 oldkey
= -1 # initialize to a value that will never match the key
502 (face_vert_loc_indices
,
503 face_vert_nor_indices
,
504 face_vert_tex_indices
,
506 _context_smooth_group
,
508 _face_invalid_blenpoly
,
510 key
= context_object_key
513 # Check the key has changed.
514 (verts_split
, faces_split
, unique_materials_split
, vert_remap
,
515 use_verts_nor
, use_verts_tex
) = face_split_dict
.setdefault(key
, ([], [], {}, {}, [], []))
518 if not face_is_edge(face
):
519 if not use_verts_nor
and face_vert_nor_indices
:
520 use_verts_nor
.append(True)
522 if not use_verts_tex
and face_vert_tex_indices
:
523 use_verts_tex
.append(True)
525 # Remap verts to new vert list and add where needed
526 for loop_idx
, vert_idx
in enumerate(face_vert_loc_indices
):
527 map_index
= vert_remap
.get(vert_idx
)
528 if map_index
is None:
529 map_index
= len(verts_split
)
530 vert_remap
[vert_idx
] = map_index
# set the new remapped index so we only add once and can reference next time.
531 verts_split
.append(verts_loc
[vert_idx
]) # add the vert to the local verts
533 face_vert_loc_indices
[loop_idx
] = map_index
# remap to the local index
535 if context_material
not in unique_materials_split
:
536 unique_materials_split
[context_material
] = unique_materials
[context_material
]
538 faces_split
.append(face
)
540 # remove one of the items and reorder
541 return [(verts_split
, faces_split
, unique_materials_split
, key_to_name(key
), bool(use_vnor
), bool(use_vtex
))
542 for key
, (verts_split
, faces_split
, unique_materials_split
, _
, use_vnor
, use_vtex
)
543 in face_split_dict
.items()]
546 def create_mesh(new_objects
,
553 unique_smooth_groups
,
558 Takes all the data gathered and generates a mesh, adding the new object to new_objects
559 deals with ngons, sharp edges and assigning materials
562 if unique_smooth_groups
:
564 smooth_group_users
= {context_smooth_group
: {} for context_smooth_group
in unique_smooth_groups
.keys()}
565 context_smooth_group_old
= -1
567 fgon_edges
= set() # Used for storing fgon keys when we need to tessellate/untessellate them (ngons with hole).
571 context_object_key
= None
573 # reverse loop through face indices
574 for f_idx
in range(len(faces
) - 1, -1, -1):
577 (face_vert_loc_indices
,
578 face_vert_nor_indices
,
579 face_vert_tex_indices
,
581 context_smooth_group
,
583 face_invalid_blenpoly
,
586 len_face_vert_loc_indices
= len(face_vert_loc_indices
)
588 if len_face_vert_loc_indices
== 1:
589 faces
.pop(f_idx
) # cant add single vert faces
591 # Face with a single item in face_vert_nor_indices is actually a polyline!
592 elif face_is_edge(face
):
594 edges
.extend((face_vert_loc_indices
[i
], face_vert_loc_indices
[i
+ 1])
595 for i
in range(len_face_vert_loc_indices
- 1))
600 if unique_smooth_groups
and context_smooth_group
:
601 # Is a part of of a smooth group and is a face
602 if context_smooth_group_old
is not context_smooth_group
:
603 edge_dict
= smooth_group_users
[context_smooth_group
]
604 context_smooth_group_old
= context_smooth_group
606 prev_vidx
= face_vert_loc_indices
[-1]
607 for vidx
in face_vert_loc_indices
:
608 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
610 edge_dict
[edge_key
] = edge_dict
.get(edge_key
, 0) + 1
612 # NGons into triangles
613 if face_invalid_blenpoly
:
614 # ignore triangles with invalid indices
615 if len(face_vert_loc_indices
) > 3:
616 from bpy_extras
.mesh_utils
import ngon_tessellate
617 ngon_face_indices
= ngon_tessellate(verts_loc
, face_vert_loc_indices
, debug_print
=bpy
.app
.debug
)
618 faces
.extend([([face_vert_loc_indices
[ngon
[0]],
619 face_vert_loc_indices
[ngon
[1]],
620 face_vert_loc_indices
[ngon
[2]],
622 [face_vert_nor_indices
[ngon
[0]],
623 face_vert_nor_indices
[ngon
[1]],
624 face_vert_nor_indices
[ngon
[2]],
625 ] if face_vert_nor_indices
else [],
626 [face_vert_tex_indices
[ngon
[0]],
627 face_vert_tex_indices
[ngon
[1]],
628 face_vert_tex_indices
[ngon
[2]],
629 ] if face_vert_tex_indices
else [],
631 context_smooth_group
,
635 for ngon
in ngon_face_indices
]
637 tot_loops
+= 3 * len(ngon_face_indices
)
639 # edges to make ngons
640 if len(ngon_face_indices
) > 1:
642 for ngon
in ngon_face_indices
:
643 prev_vidx
= face_vert_loc_indices
[ngon
[-1]]
645 vidx
= face_vert_loc_indices
[ngidx
]
646 if vidx
== prev_vidx
:
647 continue # broken OBJ... Just skip.
648 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
650 if edge_key
in edge_users
:
651 fgon_edges
.add(edge_key
)
653 edge_users
.add(edge_key
)
657 tot_loops
+= len_face_vert_loc_indices
660 if unique_smooth_groups
:
661 for edge_dict
in smooth_group_users
.values():
662 for key
, users
in edge_dict
.items():
663 if users
== 1: # This edge is on the boundary of a group
666 # map the material names to an index
667 material_mapping
= {name
: i
for i
, name
in enumerate(unique_materials
)} # enumerate over unique_materials keys()
669 materials
= [None] * len(unique_materials
)
671 for name
, index
in material_mapping
.items():
672 materials
[index
] = unique_materials
[name
]
674 me
= bpy
.data
.meshes
.new(dataname
)
676 # make sure the list isn't too big
677 for material
in materials
:
678 me
.materials
.append(material
)
680 me
.vertices
.add(len(verts_loc
))
681 me
.loops
.add(tot_loops
)
682 me
.polygons
.add(len(faces
))
684 # verts_loc is a list of (x, y, z) tuples
685 me
.vertices
.foreach_set("co", unpack_list(verts_loc
))
687 loops_vert_idx
= tuple(vidx
for (face_vert_loc_indices
, _
, _
, _
, _
, _
, _
) in faces
for vidx
in face_vert_loc_indices
)
688 faces_loop_start
= []
691 face_vert_loc_indices
= f
[0]
692 nbr_vidx
= len(face_vert_loc_indices
)
693 faces_loop_start
.append(lidx
)
695 faces_loop_total
= tuple(len(face_vert_loc_indices
) for (face_vert_loc_indices
, _
, _
, _
, _
, _
, _
) in faces
)
697 me
.loops
.foreach_set("vertex_index", loops_vert_idx
)
698 me
.polygons
.foreach_set("loop_start", faces_loop_start
)
699 me
.polygons
.foreach_set("loop_total", faces_loop_total
)
701 faces_ma_index
= tuple(material_mapping
[context_material
] for (_
, _
, _
, context_material
, _
, _
, _
) in faces
)
702 me
.polygons
.foreach_set("material_index", faces_ma_index
)
704 faces_use_smooth
= tuple(bool(context_smooth_group
) for (_
, _
, _
, _
, context_smooth_group
, _
, _
) in faces
)
705 me
.polygons
.foreach_set("use_smooth", faces_use_smooth
)
707 if verts_nor
and me
.loops
:
708 # Note: we store 'temp' normals in loops, since validate() may alter final mesh,
709 # we can only set custom lnors *after* calling it.
710 me
.create_normals_split()
711 loops_nor
= tuple(no
for (_
, face_vert_nor_indices
, _
, _
, _
, _
, _
) in faces
712 for face_noidx
in face_vert_nor_indices
713 for no
in verts_nor
[face_noidx
])
714 me
.loops
.foreach_set("normal", loops_nor
)
716 if verts_tex
and me
.polygons
:
717 # Some files Do not explicitly write the 'v' value when it's 0.0, see T68249...
718 verts_tex
= [uv
if len(uv
) == 2 else uv
+ [0.0] for uv
in verts_tex
]
719 me
.uv_layers
.new(do_init
=False)
720 loops_uv
= tuple(uv
for (_
, _
, face_vert_tex_indices
, _
, _
, _
, _
) in faces
721 for face_uvidx
in face_vert_tex_indices
722 for uv
in verts_tex
[face_uvidx
])
723 me
.uv_layers
[0].data
.foreach_set("uv", loops_uv
)
725 use_edges
= use_edges
and bool(edges
)
727 me
.edges
.add(len(edges
))
728 # edges should be a list of (a, b) tuples
729 me
.edges
.foreach_set("vertices", unpack_list(edges
))
731 me
.validate(clean_customdata
=False) # *Very* important to not remove lnors here!
732 me
.update(calc_edges
=use_edges
, calc_edges_loose
=use_edges
)
734 # Un-tessellate as much as possible, in case we had to triangulate some ngons...
741 edges
= [get((verts
[vidx1
], verts
[vidx2
])) for vidx1
, vidx2
in fgon_edges
]
743 bmesh
.ops
.dissolve_edges(bm
, edges
=edges
, use_verts
=False)
745 # Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
747 traceback
.print_exc()
752 # XXX If validate changes the geometry, this is likely to be broken...
753 if unique_smooth_groups
and sharp_edges
:
755 if e
.key
in sharp_edges
:
756 e
.use_edge_sharp
= True
759 clnors
= array
.array('f', [0.0] * (len(me
.loops
) * 3))
760 me
.loops
.foreach_get("normal", clnors
)
762 if not unique_smooth_groups
:
763 me
.polygons
.foreach_set("use_smooth", [True] * len(me
.polygons
))
765 me
.normals_split_custom_set(tuple(zip(*(iter(clnors
),) * 3)))
766 me
.use_auto_smooth
= True
768 ob
= bpy
.data
.objects
.new(me
.name
, me
)
769 new_objects
.append(ob
)
771 # Create the vertex groups. No need to have the flag passed here since we test for the
772 # content of the vertex_groups. If the user selects to NOT have vertex groups saved then
773 # the following test will never run
774 for group_name
, group_indices
in vertex_groups
.items():
775 group
= ob
.vertex_groups
.new(name
=group_name
.decode('utf-8', "replace"))
776 group
.add(group_indices
, 1.0, 'REPLACE')
779 def create_nurbs(context_nurbs
, vert_loc
, new_objects
):
781 Add nurbs object to blender, only support one type at the moment
783 deg
= context_nurbs
.get(b
'deg', (3,))
784 curv_range
= context_nurbs
.get(b
'curv_range')
785 curv_idx
= context_nurbs
.get(b
'curv_idx', [])
786 parm_u
= context_nurbs
.get(b
'parm_u', [])
787 parm_v
= context_nurbs
.get(b
'parm_v', [])
788 name
= context_nurbs
.get(b
'name', b
'ObjNurb')
789 cstype
= context_nurbs
.get(b
'cstype')
792 print('\tWarning, cstype not found')
794 if cstype
!= b
'bspline':
795 print('\tWarning, cstype is not supported (only bspline)')
798 print('\tWarning, curv argument empty or not set')
800 if len(deg
) > 1 or parm_v
:
801 print('\tWarning, surfaces not supported')
804 cu
= bpy
.data
.curves
.new(name
.decode('utf-8', "replace"), 'CURVE')
807 nu
= cu
.splines
.new('NURBS')
808 nu
.points
.add(len(curv_idx
) - 1) # a point is added to start with
809 nu
.points
.foreach_set("co", [co_axis
for vt_idx
in curv_idx
for co_axis
in (vert_loc
[vt_idx
] + [1.0])])
811 nu
.order_u
= deg
[0] + 1
813 # get for endpoint flag from the weighting
814 if curv_range
and len(parm_u
) > deg
[0] + 1:
816 for i
in range(deg
[0] + 1):
818 if abs(parm_u
[i
] - curv_range
[0]) > 0.0001:
822 if abs(parm_u
[-(i
+ 1)] - curv_range
[1]) > 0.0001:
830 nu
.use_endpoint_u
= True
835 if len(parm_u) > deg[0]+1:
836 for i in xrange(deg[0]+1):
837 #print curv_idx[i], curv_idx[-(i+1)]
839 if curv_idx[i]==curv_idx[-(i+1)]:
844 nu.use_cyclic_u = True
847 ob
= bpy
.data
.objects
.new(name
.decode('utf-8', "replace"), cu
)
849 new_objects
.append(ob
)
852 def strip_slash(line_split
):
853 if line_split
[-1][-1] == 92: # '\' char
854 if len(line_split
[-1]) == 1:
855 line_split
.pop() # remove the \ item
857 line_split
[-1] = line_split
[-1][:-1] # remove the \ from the end last number
862 def get_float_func(filepath
):
864 find the float function for this obj file
865 - whether to replace commas or not
867 file = open(filepath
, 'rb')
868 for line
in file: # .readlines():
870 if line
.startswith(b
'v'): # vn vt v
873 return lambda f
: float(f
.replace(b
',', b
'.'))
879 # in case all vert values were ints
883 def any_number_as_int(svalue
):
885 svalue
= svalue
.replace(b
',', b
'.')
886 return int(float(svalue
))
892 global_clamp_size
=0.0,
893 use_smooth_groups
=True,
895 use_split_objects
=True,
896 use_split_groups
=False,
897 use_image_search
=True,
898 use_groups_as_vgroups
=False,
903 Called by the user interface or another script.
904 load_obj(path) - should give acceptable results.
905 This function passes the file and sends the data off
906 to be split into objects and then converted into mesh objects
908 def unique_name(existing_names
, name_orig
):
910 if name_orig
is None:
911 name_orig
= b
"ObjObject"
913 while name
in existing_names
:
914 name
= b
"%s.%03d" % (name_orig
, i
)
916 existing_names
.add(name
)
919 def handle_vec(line_start
, context_multi_line
, line_split
, tag
, data
, vec
, vec_len
):
920 ret_context_multi_line
= tag
if strip_slash(line_split
) else b
''
921 if line_start
== tag
:
922 vec
[:] = [float_func(v
) for v
in line_split
[1:]]
923 elif context_multi_line
== tag
:
924 vec
+= [float_func(v
) for v
in line_split
]
925 if not ret_context_multi_line
:
926 data
.append(tuple(vec
[:vec_len
]))
927 return ret_context_multi_line
929 def create_face(context_material
, context_smooth_group
, context_object_key
):
930 face_vert_loc_indices
= []
931 face_vert_nor_indices
= []
932 face_vert_tex_indices
= []
934 face_vert_loc_indices
,
935 face_vert_nor_indices
,
936 face_vert_tex_indices
,
938 context_smooth_group
,
940 [], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
943 with
ProgressReport(context
.window_manager
) as progress
:
944 progress
.enter_substeps(1, "Importing OBJ %r..." % filepath
)
946 if global_matrix
is None:
947 global_matrix
= mathutils
.Matrix()
949 if use_split_objects
or use_split_groups
:
950 use_groups_as_vgroups
= False
955 faces
= [] # tuples of the faces
956 material_libs
= set() # filenames to material libs this OBJ uses
957 vertex_groups
= {} # when use_groups_as_vgroups is true
959 # Get the string to float conversion func for this file- is 'float' for almost all files.
960 float_func
= get_float_func(filepath
)
963 context_material
= None
964 context_smooth_group
= None
965 context_object_key
= None
966 context_object_obpart
= None
967 context_vgroup
= None
969 objects_names
= set()
974 context_parm
= b
'' # used by nurbs too but could be used elsewhere
976 # Until we can use sets
977 use_default_material
= False
978 unique_materials
= {}
979 unique_smooth_groups
= {}
980 # unique_obects= {} - no use for this variable since the objects are stored in the face.
982 # when there are faces that end with \
983 # it means they are multiline-
984 # since we use xreadline we cant skip to the next line
985 # so we need to know whether
986 context_multi_line
= b
''
988 # Per-face handling data.
989 face_vert_loc_indices
= None
990 face_vert_nor_indices
= None
991 face_vert_tex_indices
= None
992 verts_loc_len
= verts_nor_len
= verts_tex_len
= 0
993 face_items_usage
= set()
994 face_invalid_blenpoly
= None
999 quick_vert_failures
= 0
1000 skip_quick_vert
= False
1002 progress
.enter_substeps(3, "Parsing OBJ file...")
1003 with
open(filepath
, 'rb') as f
:
1005 line_split
= line
.split()
1010 line_start
= line_split
[0] # we compare with this a _lot_
1012 if len(line_split
) == 1 and not context_multi_line
and line_start
!= b
'end':
1013 print("WARNING, skipping malformatted line: %s" % line
.decode('UTF-8', 'replace').rstrip())
1016 # Handling vertex data are pretty similar, factorize that.
1017 # Also, most OBJ files store all those on a single line, so try fast parsing for that first,
1018 # and only fallback to full multi-line parsing when needed, this gives significant speed-up
1019 # (~40% on affected code).
1020 if line_start
== b
'v':
1021 vdata
, vdata_len
, do_quick_vert
= verts_loc
, 3, not skip_quick_vert
1022 elif line_start
== b
'vn':
1023 vdata
, vdata_len
, do_quick_vert
= verts_nor
, 3, not skip_quick_vert
1024 elif line_start
== b
'vt':
1025 vdata
, vdata_len
, do_quick_vert
= verts_tex
, 2, not skip_quick_vert
1026 elif context_multi_line
== b
'v':
1027 vdata
, vdata_len
, do_quick_vert
= verts_loc
, 3, False
1028 elif context_multi_line
== b
'vn':
1029 vdata
, vdata_len
, do_quick_vert
= verts_nor
, 3, False
1030 elif context_multi_line
== b
'vt':
1031 vdata
, vdata_len
, do_quick_vert
= verts_tex
, 2, False
1038 vdata
.append(list(map(float_func
, line_split
[1:vdata_len
+ 1])))
1040 do_quick_vert
= False
1041 # In case we get too many failures on quick parsing, force fallback to full multi-line one.
1042 # Exception handling can become costly...
1043 quick_vert_failures
+= 1
1044 if quick_vert_failures
> 10000:
1045 skip_quick_vert
= True
1046 if not do_quick_vert
:
1047 context_multi_line
= handle_vec(line_start
, context_multi_line
, line_split
,
1048 context_multi_line
or line_start
,
1049 vdata
, vec
, vdata_len
)
1051 elif line_start
== b
'f' or context_multi_line
== b
'f':
1052 if not context_multi_line
:
1053 line_split
= line_split
[1:]
1054 # Instantiate a face
1055 face
= create_face(context_material
, context_smooth_group
, context_object_key
)
1056 (face_vert_loc_indices
, face_vert_nor_indices
, face_vert_tex_indices
,
1057 _1
, _2
, _3
, face_invalid_blenpoly
) = face
1059 face_items_usage
.clear()
1060 verts_loc_len
= len(verts_loc
)
1061 verts_nor_len
= len(verts_nor
)
1062 verts_tex_len
= len(verts_tex
)
1063 if context_material
is None:
1064 use_default_material
= True
1065 # Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
1067 context_multi_line
= b
'f' if strip_slash(line_split
) else b
''
1069 for v
in line_split
:
1070 obj_vert
= v
.split(b
'/')
1071 idx
= int(obj_vert
[0]) # Note that we assume here we cannot get OBJ invalid 0 index...
1072 vert_loc_index
= (idx
+ verts_loc_len
) if (idx
< 1) else idx
- 1
1073 # Add the vertex to the current group
1074 # *warning*, this wont work for files that have groups defined around verts
1075 if use_groups_as_vgroups
and context_vgroup
:
1076 vertex_groups
[context_vgroup
].append(vert_loc_index
)
1077 # This a first round to quick-detect ngons that *may* use a same edge more than once.
1078 # Potential candidate will be re-checked once we have done parsing the whole face.
1079 if not face_invalid_blenpoly
:
1080 # If we use more than once a same vertex, invalid ngon is suspected.
1081 if vert_loc_index
in face_items_usage
:
1082 face_invalid_blenpoly
.append(True)
1084 face_items_usage
.add(vert_loc_index
)
1085 face_vert_loc_indices
.append(vert_loc_index
)
1087 # formatting for faces with normals and textures is
1088 # loc_index/tex_index/nor_index
1089 if len(obj_vert
) > 1 and obj_vert
[1] and obj_vert
[1] != b
'0':
1090 idx
= int(obj_vert
[1])
1091 face_vert_tex_indices
.append((idx
+ verts_tex_len
) if (idx
< 1) else idx
- 1)
1093 face_vert_tex_indices
.append(0)
1095 if len(obj_vert
) > 2 and obj_vert
[2] and obj_vert
[2] != b
'0':
1096 idx
= int(obj_vert
[2])
1097 face_vert_nor_indices
.append((idx
+ verts_nor_len
) if (idx
< 1) else idx
- 1)
1099 face_vert_nor_indices
.append(0)
1101 if not context_multi_line
:
1102 # Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
1103 if face_invalid_blenpoly
:
1104 face_invalid_blenpoly
.clear()
1105 face_items_usage
.clear()
1106 prev_vidx
= face_vert_loc_indices
[-1]
1107 for vidx
in face_vert_loc_indices
:
1108 edge_key
= (prev_vidx
, vidx
) if (prev_vidx
< vidx
) else (vidx
, prev_vidx
)
1109 if edge_key
in face_items_usage
:
1110 face_invalid_blenpoly
.append(True)
1112 face_items_usage
.add(edge_key
)
1115 elif use_edges
and (line_start
== b
'l' or context_multi_line
== b
'l'):
1116 # very similar to the face load function above with some parts removed
1117 if not context_multi_line
:
1118 line_split
= line_split
[1:]
1119 # Instantiate a face
1120 face
= create_face(context_material
, context_smooth_group
, context_object_key
)
1121 face_vert_loc_indices
= face
[0]
1122 # XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
1123 # as a polyline, and not a regular face...
1126 if context_material
is None:
1127 use_default_material
= True
1128 # Else, use face_vert_loc_indices previously defined and used the obj_face
1130 context_multi_line
= b
'l' if strip_slash(line_split
) else b
''
1132 for v
in line_split
:
1133 obj_vert
= v
.split(b
'/')
1134 idx
= int(obj_vert
[0]) - 1
1135 face_vert_loc_indices
.append((idx
+ len(verts_loc
) + 1) if (idx
< 0) else idx
)
1137 elif line_start
== b
's':
1138 if use_smooth_groups
:
1139 context_smooth_group
= line_value(line_split
)
1140 if context_smooth_group
== b
'off':
1141 context_smooth_group
= None
1142 elif context_smooth_group
: # is not None
1143 unique_smooth_groups
[context_smooth_group
] = None
1145 elif line_start
== b
'o':
1146 if use_split_objects
:
1147 context_object_key
= unique_name(objects_names
, line_value(line_split
))
1148 context_object_obpart
= context_object_key
1149 # unique_objects[context_object_key]= None
1151 elif line_start
== b
'g':
1152 if use_split_groups
:
1153 grppart
= line_value(line_split
)
1154 context_object_key
= (context_object_obpart
, grppart
) if context_object_obpart
else grppart
1155 # print 'context_object_key', context_object_key
1156 # unique_objects[context_object_key]= None
1157 elif use_groups_as_vgroups
:
1158 context_vgroup
= line_value(line
.split())
1159 if context_vgroup
and context_vgroup
!= b
'(null)':
1160 vertex_groups
.setdefault(context_vgroup
, [])
1162 context_vgroup
= None # dont assign a vgroup
1164 elif line_start
== b
'usemtl':
1165 context_material
= line_value(line
.split())
1166 unique_materials
[context_material
] = None
1167 elif line_start
== b
'mtllib': # usemap or usemat
1168 # can have multiple mtllib filenames per line, mtllib can appear more than once,
1169 # so make sure only occurrence of material exists
1170 material_libs |
= {os
.fsdecode(f
) for f
in filenames_group_by_ext(line
.lstrip()[7:].strip(), b
'.mtl')
1174 elif line_start
== b
'cstype':
1175 context_nurbs
[b
'cstype'] = line_value(line
.split()) # 'rat bspline' / 'bspline'
1176 elif line_start
== b
'curv' or context_multi_line
== b
'curv':
1177 curv_idx
= context_nurbs
[b
'curv_idx'] = context_nurbs
.get(b
'curv_idx', []) # in case were multiline
1179 if not context_multi_line
:
1180 context_nurbs
[b
'curv_range'] = float_func(line_split
[1]), float_func(line_split
[2])
1181 line_split
[0:3] = [] # remove first 3 items
1183 if strip_slash(line_split
):
1184 context_multi_line
= b
'curv'
1186 context_multi_line
= b
''
1188 for i
in line_split
:
1189 vert_loc_index
= int(i
) - 1
1191 if vert_loc_index
< 0:
1192 vert_loc_index
= len(verts_loc
) + vert_loc_index
+ 1
1194 curv_idx
.append(vert_loc_index
)
1196 elif line_start
== b
'parm' or context_multi_line
== b
'parm':
1197 if context_multi_line
:
1198 context_multi_line
= b
''
1200 context_parm
= line_split
[1]
1201 line_split
[0:2] = [] # remove first 2
1203 if strip_slash(line_split
):
1204 context_multi_line
= b
'parm'
1206 context_multi_line
= b
''
1208 if context_parm
.lower() == b
'u':
1209 context_nurbs
.setdefault(b
'parm_u', []).extend([float_func(f
) for f
in line_split
])
1210 elif context_parm
.lower() == b
'v': # surfaces not supported yet
1211 context_nurbs
.setdefault(b
'parm_v', []).extend([float_func(f
) for f
in line_split
])
1212 # else: # may want to support other parm's ?
1214 elif line_start
== b
'deg':
1215 context_nurbs
[b
'deg'] = [int(i
) for i
in line
.split()[1:]]
1216 elif line_start
== b
'end':
1217 # Add the nurbs curve
1218 if context_object_key
:
1219 context_nurbs
[b
'name'] = context_object_key
1220 nurbs
.append(context_nurbs
)
1224 ''' # How to use usemap? deprecated?
1225 elif line_start == b'usema': # usemap or usemat
1226 context_image= line_value(line_split)
1229 progress
.step("Done, loading materials and images...")
1231 if use_default_material
:
1232 unique_materials
[None] = None
1233 create_materials(filepath
, relpath
, material_libs
, unique_materials
,
1234 use_image_search
, float_func
)
1236 progress
.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
1237 (len(verts_loc
), len(faces
), len(unique_materials
), len(unique_smooth_groups
)))
1240 if bpy
.ops
.object.select_all
.poll():
1241 bpy
.ops
.object.select_all(action
='DESELECT')
1243 new_objects
= [] # put new objects here
1245 # Split the mesh by objects/materials, may
1246 SPLIT_OB_OR_GROUP
= bool(use_split_objects
or use_split_groups
)
1248 for data
in split_mesh(verts_loc
, faces
, unique_materials
, filepath
, SPLIT_OB_OR_GROUP
):
1249 verts_loc_split
, faces_split
, unique_materials_split
, dataname
, use_vnor
, use_vtex
= data
1250 # Create meshes from the data, warning 'vertex_groups' wont support splitting
1251 #~ print(dataname, use_vnor, use_vtex)
1252 create_mesh(new_objects
,
1255 verts_nor
if use_vnor
else [],
1256 verts_tex
if use_vtex
else [],
1258 unique_materials_split
,
1259 unique_smooth_groups
,
1265 for context_nurbs
in nurbs
:
1266 create_nurbs(context_nurbs
, verts_loc
, new_objects
)
1268 view_layer
= context
.view_layer
1269 collection
= view_layer
.active_layer_collection
.collection
1272 for obj
in new_objects
:
1273 collection
.objects
.link(obj
)
1274 obj
.select_set(True)
1276 # we could apply this anywhere before scaling.
1277 obj
.matrix_world
= global_matrix
1281 axis_min
= [1000000000] * 3
1282 axis_max
= [-1000000000] * 3
1284 if global_clamp_size
:
1285 # Get all object bounds
1286 for ob
in new_objects
:
1287 for v
in ob
.bound_box
:
1288 for axis
, value
in enumerate(v
):
1289 if axis_min
[axis
] > value
:
1290 axis_min
[axis
] = value
1291 if axis_max
[axis
] < value
:
1292 axis_max
[axis
] = value
1295 max_axis
= max(axis_max
[0] - axis_min
[0], axis_max
[1] - axis_min
[1], axis_max
[2] - axis_min
[2])
1298 while global_clamp_size
< max_axis
* scale
:
1299 scale
= scale
/ 10.0
1301 for obj
in new_objects
:
1302 obj
.scale
= scale
, scale
, scale
1304 progress
.leave_substeps("Done.")
1305 progress
.leave_substeps("Finished importing: %r" % filepath
)