Cleanup: io_scene_obj, unused imports, vars
[blender-addons.git] / io_scene_obj / import_obj.py
blob356dd43d18497cee408a9816b67a7c12ed415303
1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
19 # <pep8 compliant>
21 # Script copyright (C) Campbell Barton
22 # Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
24 """
25 This script imports a Wavefront OBJ files to Blender.
27 Usage:
28 Run this script from "File->Import" menu and then load the desired OBJ file.
29 Note, This loads mesh objects and materials only, nurbs and curves are not supported.
31 http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
32 """
34 import array
35 import os
36 import time
37 import bpy
38 import mathutils
40 from bpy_extras.io_utils import unpack_list
41 from bpy_extras.image_utils import load_image
42 from bpy_extras.wm_utils.progress_report import ProgressReport
45 def line_value(line_split):
46 """
47 Returns 1 string representing the value for this line
48 None will be returned if there's only 1 word
49 """
50 length = len(line_split)
51 if length == 1:
52 return None
54 elif length == 2:
55 return line_split[1]
57 elif length > 2:
58 return b' '.join(line_split[1:])
61 def filenames_group_by_ext(line, ext):
62 """
63 Splits material libraries supporting spaces, so:
64 b'foo bar.mtl baz spam.MTL' -> (b'foo bar.mtl', b'baz spam.MTL')
65 Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
66 """
67 # Note that we assume that if there are some " in that line,
68 # then all filenames are properly enclosed within those...
69 start = line.find(b'"') + 1
70 if start != 0:
71 while start != 0:
72 end = line.find(b'"', start)
73 if end != -1:
74 yield line[start:end]
75 start = line.find(b'"', end + 1) + 1
76 else:
77 break
78 return
80 line_lower = line.lower()
81 i_prev = 0
82 while i_prev != -1 and i_prev < len(line):
83 i = line_lower.find(ext, i_prev)
84 if i != -1:
85 i += len(ext)
86 yield line[i_prev:i].strip()
87 i_prev = i
90 def obj_image_load(context_imagepath_map, line, DIR, recursive, relpath):
91 """
92 Mainly uses comprehensiveImageLoad
93 But we try all space-separated items from current line when file is not found with last one
94 (users keep generating/using image files with spaces in a format that does not support them, sigh...)
95 Also tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
96 Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
97 """
98 filepath_parts = line.split(b' ')
100 start = line.find(b'"') + 1
101 if start != 0:
102 end = line.find(b'"', start)
103 if end != 0:
104 filepath_parts = (line[start:end],)
106 image = None
107 for i in range(-1, -len(filepath_parts), -1):
108 imagepath = os.fsdecode(b" ".join(filepath_parts[i:]))
109 image = context_imagepath_map.get(imagepath, ...)
110 if image is ...:
111 image = load_image(imagepath, DIR, recursive=recursive, relpath=relpath)
112 if image is None and "_" in imagepath:
113 image = load_image(imagepath.replace("_", " "), DIR, recursive=recursive, relpath=relpath)
114 if image is not None:
115 context_imagepath_map[imagepath] = image
116 break;
118 if image is None:
119 imagepath = os.fsdecode(filepath_parts[-1])
120 image = load_image(imagepath, DIR, recursive=recursive, place_holder=True, relpath=relpath)
121 context_imagepath_map[imagepath] = image
123 return image
126 def create_materials(filepath, relpath,
127 material_libs, unique_materials,
128 use_image_search, float_func):
130 Create all the used materials in this obj,
131 assign colors and images to the materials from all referenced material libs
133 from math import sqrt
134 from bpy_extras import node_shader_utils
136 DIR = os.path.dirname(filepath)
137 context_material_vars = set()
139 # Don't load the same image multiple times
140 context_imagepath_map = {}
142 nodal_material_wrap_map = {}
144 def load_material_image(blender_material, mat_wrap, context_material_name, img_data, line, type):
146 Set textures defined in .mtl file.
148 map_options = {}
150 curr_token = []
151 for token in img_data[:-1]:
152 if token.startswith(b'-') and token[1:].isalpha():
153 if curr_token:
154 map_options[curr_token[0]] = curr_token[1:]
155 curr_token[:] = []
156 curr_token.append(token)
157 if curr_token:
158 map_options[curr_token[0]] = curr_token[1:]
160 # Absolute path - c:\.. etc would work here
161 image = obj_image_load(context_imagepath_map, line, DIR, use_image_search, relpath)
163 map_offset = map_options.get(b'-o')
164 map_scale = map_options.get(b'-s')
165 if map_offset is not None:
166 map_offset = tuple(map(float_func, map_offset))
167 if map_scale is not None:
168 map_scale = tuple(map(float_func, map_scale))
170 def _generic_tex_set(nodetex, image, texcoords, translation, scale):
171 nodetex.image = image
172 nodetex.texcoords = texcoords
173 if translation is not None:
174 nodetex.translation = translation
175 if scale is not None:
176 nodetex.scale = scale
178 # Adds textures for materials (rendering)
179 if type == 'Kd':
180 _generic_tex_set(mat_wrap.base_color_texture, image, 'UV', map_offset, map_scale)
182 elif type == 'Ka':
183 # XXX Not supported?
184 print("WARNING, currently unsupported ambient texture, skipped.")
186 elif type == 'Ks':
187 _generic_tex_set(mat_wrap.specular_texture, image, 'UV', map_offset, map_scale)
189 elif type == 'Ke':
190 _generic_tex_set(mat_wrap.emission_color_texture, image, 'UV', map_offset, map_scale)
192 elif type == 'Bump':
193 bump_mult = map_options.get(b'-bm')
194 bump_mult = float(bump_mult[0]) if (bump_mult and len(bump_mult[0]) > 1) else 1.0
195 mat_wrap.normalmap_strength_set(bump_mult)
197 _generic_tex_set(mat_wrap.normalmap_texture, image, 'UV', map_offset, map_scale)
199 elif type == 'D':
200 _generic_tex_set(mat_wrap.alpha_texture, image, 'UV', map_offset, map_scale)
202 elif type == 'disp':
203 # XXX Not supported?
204 print("WARNING, currently unsupported displacement texture, skipped.")
205 # ~ mat_wrap.bump_image_set(image)
206 # ~ mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
208 elif type == 'refl':
209 map_type = map_options.get(b'-type')
210 if map_type and map_type != [b'sphere']:
211 print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
212 "" % ' '.join(i.decode() for i in map_type))
214 _generic_tex_set(mat_wrap.base_color_texture, image, 'Reflection', map_offset, map_scale)
215 mat_wrap.base_color_texture.projection = 'SPHERE'
217 else:
218 raise Exception("invalid type %r" % type)
220 def finalize_material(context_material, context_material_vars, spec_colors,
221 do_highlight, do_reflection, do_transparency, do_glass):
222 # Finalize previous mat, if any.
223 if context_material:
224 if "specular" in context_material_vars:
225 # XXX This is highly approximated, not sure whether we can do better...
226 # TODO: Find a way to guesstimate best value from diffuse color...
227 # IDEA: Use standard deviation of both spec and diff colors (i.e. how far away they are
228 # from some grey), and apply the the proportion between those two as tint factor?
229 spec = sum(spec_colors) / 3.0
230 # ~ spec_var = math.sqrt(sum((c - spec) ** 2 for c in spec_color) / 3.0)
231 # ~ diff = sum(context_mat_wrap.base_color) / 3.0
232 # ~ diff_var = math.sqrt(sum((c - diff) ** 2 for c in context_mat_wrap.base_color) / 3.0)
233 # ~ tint = min(1.0, spec_var / diff_var)
234 context_mat_wrap.specular = spec
235 context_mat_wrap.specular_tint = 0.0
236 if "roughness" not in context_material_vars:
237 context_mat_wrap.roughness = 0.0
239 # FIXME, how else to use this?
240 if do_highlight:
241 if "specular" not in context_material_vars:
242 context_mat_wrap.specular = 1.0
243 if "roughness" not in context_material_vars:
244 context_mat_wrap.roughness = 0.0
245 else:
246 if "specular" not in context_material_vars:
247 context_mat_wrap.specular = 0.0
248 if "roughness" not in context_material_vars:
249 context_mat_wrap.roughness = 1.0
251 if do_reflection:
252 if "metallic" not in context_material_vars:
253 context_mat_wrap.metallic = 1.0
254 else:
255 # since we are (ab)using ambient term for metallic (which can be non-zero)
256 context_mat_wrap.metallic = 0.0
258 if do_transparency:
259 if "ior" not in context_material_vars:
260 context_mat_wrap.ior = 1.0
261 if "alpha" not in context_material_vars:
262 context_mat_wrap.alpha = 1.0
263 # EEVEE only
264 context_material.blend_method = 'BLEND'
266 if do_glass:
267 if "ior" not in context_material_vars:
268 context_mat_wrap.ior = 1.5
270 # Try to find a MTL with the same name as the OBJ if no MTLs are specified.
271 temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + ".mtl"
272 if os.path.exists(os.path.join(DIR, temp_mtl)):
273 material_libs.add(temp_mtl)
274 del temp_mtl
276 # Create new materials
277 for name in unique_materials: # .keys()
278 ma_name = "Default OBJ" if name is None else name.decode('utf-8', "replace")
279 ma = unique_materials[name] = bpy.data.materials.new(ma_name)
280 ma_wrap = node_shader_utils.PrincipledBSDFWrapper(ma, is_readonly=False)
281 nodal_material_wrap_map[ma] = ma_wrap
282 ma_wrap.use_nodes = True
284 for libname in sorted(material_libs):
285 # print(libname)
286 mtlpath = os.path.join(DIR, libname)
287 if not os.path.exists(mtlpath):
288 print("\tMaterial not found MTL: %r" % mtlpath)
289 else:
290 # Note: with modern Principled BSDF shader, things like ambient, raytrace or fresnel are always 'ON'
291 # (i.e. automatically controlled by other parameters).
292 do_highlight = False
293 do_reflection = False
294 do_transparency = False
295 do_glass = False
296 spec_colors = [0.0, 0.0, 0.0]
298 # print('\t\tloading mtl: %e' % mtlpath)
299 context_material = None
300 context_mat_wrap = None
301 mtl = open(mtlpath, 'rb')
302 for line in mtl: # .readlines():
303 line = line.strip()
304 if not line or line.startswith(b'#'):
305 continue
307 line_split = line.split()
308 line_id = line_split[0].lower()
310 if line_id == b'newmtl':
311 # Finalize previous mat, if any.
312 finalize_material(context_material, context_material_vars, spec_colors,
313 do_highlight, do_reflection, do_transparency, do_glass)
315 context_material_name = line_value(line_split)
316 context_material = unique_materials.get(context_material_name)
317 if context_material is not None:
318 context_mat_wrap = nodal_material_wrap_map[context_material]
319 context_material_vars.clear()
321 spec_colors[:] = [0.0, 0.0, 0.0]
322 do_highlight = False
323 do_reflection = False
324 do_transparency = False
325 do_glass = False
328 elif context_material:
329 def _get_colors(line_split):
330 # OBJ 'allows' one or two components values, treat single component as greyscale, and two as blue = 0.0.
331 ln = len(line_split)
332 if ln == 2:
333 return [float_func(line_split[1])] * 3
334 elif ln == 3:
335 return [float_func(line_split[1]), float_func(line_split[2]), 0.0]
336 else:
337 return [float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
339 # we need to make a material to assign properties to it.
340 if line_id == b'ka':
341 refl = sum(_get_colors(line_split)) / 3.0
342 context_mat_wrap.metallic = refl
343 context_material_vars.add("metallic")
344 elif line_id == b'kd':
345 context_mat_wrap.base_color = _get_colors(line_split)
346 elif line_id == b'ks':
347 spec_colors[:] = _get_colors(line_split)
348 context_material_vars.add("specular")
349 elif line_id == b'ke':
350 # We cannot set context_material.emit right now, we need final diffuse color as well for this.
351 # XXX Unsupported currently
352 context_mat_wrap.emission_color = _get_colors(line_split)
353 elif line_id == b'ns':
354 # XXX Totally empirical conversion, trying to adapt it
355 # (from 0.0 - 900.0 OBJ specular exponent range to 1.0 - 0.0 Principled BSDF range)...
356 val = max(0.0, min(900.0, float_func(line_split[1])))
357 context_mat_wrap.roughness = 1.0 - (sqrt(val) / 30)
358 context_material_vars.add("roughness")
359 elif line_id == b'ni': # Refraction index (between 0.001 and 10).
360 context_mat_wrap.ior = float_func(line_split[1])
361 context_material_vars.add("ior")
362 elif line_id == b'd': # dissolve (transparency)
363 context_mat_wrap.alpha = float_func(line_split[1])
364 context_material_vars.add("alpha")
365 elif line_id == b'tr': # translucency
366 print("WARNING, currently unsupported 'tr' translucency option, skipped.")
367 elif line_id == b'tf':
368 # rgb, filter color, blender has no support for this.
369 print("WARNING, currently unsupported 'tf' filter color option, skipped.")
370 elif line_id == b'illum':
371 # Some MTL files incorrectly use a float for this value, see T60135.
372 illum = any_number_as_int(line_split[1])
374 # inline comments are from the spec, v4.2
375 if illum == 0:
376 # Color on and Ambient off
377 print("WARNING, Principled BSDF shader does not support illumination 0 mode "
378 "(colors with no ambient), skipped.")
379 elif illum == 1:
380 # Color on and Ambient on
381 pass
382 elif illum == 2:
383 # Highlight on
384 do_highlight = True
385 elif illum == 3:
386 # Reflection on and Ray trace on
387 do_reflection = True
388 elif illum == 4:
389 # Transparency: Glass on
390 # Reflection: Ray trace on
391 do_transparency = True
392 do_reflection = True
393 do_glass = True
394 elif illum == 5:
395 # Reflection: Fresnel on and Ray trace on
396 do_reflection = True
397 elif illum == 6:
398 # Transparency: Refraction on
399 # Reflection: Fresnel off and Ray trace on
400 do_transparency = True
401 do_reflection = True
402 elif illum == 7:
403 # Transparency: Refraction on
404 # Reflection: Fresnel on and Ray trace on
405 do_transparency = True
406 do_reflection = True
407 elif illum == 8:
408 # Reflection on and Ray trace off
409 do_reflection = True
410 elif illum == 9:
411 # Transparency: Glass on
412 # Reflection: Ray trace off
413 do_transparency = True
414 do_reflection = False
415 do_glass = True
416 elif illum == 10:
417 # Casts shadows onto invisible surfaces
418 print("WARNING, Principled BSDF shader does not support illumination 10 mode "
419 "(cast shadows on invisible surfaces), skipped.")
420 pass
422 elif line_id == b'map_ka':
423 img_data = line.split()[1:]
424 if img_data:
425 load_material_image(context_material, context_mat_wrap,
426 context_material_name, img_data, line, 'Ka')
427 elif line_id == b'map_ks':
428 img_data = line.split()[1:]
429 if img_data:
430 load_material_image(context_material, context_mat_wrap,
431 context_material_name, img_data, line, 'Ks')
432 elif line_id == b'map_kd':
433 img_data = line.split()[1:]
434 if img_data:
435 load_material_image(context_material, context_mat_wrap,
436 context_material_name, img_data, line, 'Kd')
437 elif line_id == b'map_ke':
438 img_data = line.split()[1:]
439 if img_data:
440 load_material_image(context_material, context_mat_wrap,
441 context_material_name, img_data, line, 'Ke')
442 elif line_id in {b'map_bump', b'bump'}: # 'bump' is incorrect but some files use it.
443 img_data = line.split()[1:]
444 if img_data:
445 load_material_image(context_material, context_mat_wrap,
446 context_material_name, img_data, line, 'Bump')
447 elif line_id in {b'map_d', b'map_tr'}: # Alpha map - Dissolve
448 img_data = line.split()[1:]
449 if img_data:
450 load_material_image(context_material, context_mat_wrap,
451 context_material_name, img_data, line, 'D')
453 elif line_id in {b'map_disp', b'disp'}: # displacementmap
454 img_data = line.split()[1:]
455 if img_data:
456 load_material_image(context_material, context_mat_wrap,
457 context_material_name, img_data, line, 'disp')
459 elif line_id in {b'map_refl', b'refl'}: # reflectionmap
460 img_data = line.split()[1:]
461 if img_data:
462 load_material_image(context_material, context_mat_wrap,
463 context_material_name, img_data, line, 'refl')
464 else:
465 print("WARNING: %r:%r (ignored)" % (filepath, line))
467 # Finalize last mat, if any.
468 finalize_material(context_material, context_material_vars, spec_colors,
469 do_highlight, do_reflection, do_transparency, do_glass)
470 mtl.close()
473 def face_is_edge(face):
474 """Simple check to test whether given (temp, working) data is an edge, and not a real face."""
475 face_vert_loc_indices = face[0]
476 face_vert_nor_indices = face[1]
477 return len(face_vert_nor_indices) == 1 or len(face_vert_loc_indices) == 2
480 def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
482 Takes vert_loc and faces, and separates into multiple sets of
483 (verts_loc, faces, unique_materials, dataname)
486 filename = os.path.splitext((os.path.basename(filepath)))[0]
488 if not SPLIT_OB_OR_GROUP or not faces:
489 use_verts_nor = any(f[1] for f in faces)
490 use_verts_tex = any(f[2] for f in faces)
491 # use the filename for the object name since we aren't chopping up the mesh.
492 return [(verts_loc, faces, unique_materials, filename, use_verts_nor, use_verts_tex)]
494 def key_to_name(key):
495 # if the key is a tuple, join it to make a string
496 if not key:
497 return filename # assume its a string. make sure this is true if the splitting code is changed
498 elif isinstance(key, bytes):
499 return key.decode('utf-8', 'replace')
500 else:
501 return "_".join(k.decode('utf-8', 'replace') for k in key)
503 # Return a key that makes the faces unique.
504 face_split_dict = {}
506 oldkey = -1 # initialize to a value that will never match the key
508 for face in faces:
509 (face_vert_loc_indices,
510 face_vert_nor_indices,
511 face_vert_tex_indices,
512 context_material,
513 _context_smooth_group,
514 context_object_key,
515 _face_invalid_blenpoly,
516 ) = face
517 key = context_object_key
519 if oldkey != key:
520 # Check the key has changed.
521 (verts_split, faces_split, unique_materials_split, vert_remap,
522 use_verts_nor, use_verts_tex) = face_split_dict.setdefault(key, ([], [], {}, {}, [], []))
523 oldkey = key
525 if not face_is_edge(face):
526 if not use_verts_nor and face_vert_nor_indices:
527 use_verts_nor.append(True)
529 if not use_verts_tex and face_vert_tex_indices:
530 use_verts_tex.append(True)
532 # Remap verts to new vert list and add where needed
533 for loop_idx, vert_idx in enumerate(face_vert_loc_indices):
534 map_index = vert_remap.get(vert_idx)
535 if map_index is None:
536 map_index = len(verts_split)
537 vert_remap[vert_idx] = map_index # set the new remapped index so we only add once and can reference next time.
538 verts_split.append(verts_loc[vert_idx]) # add the vert to the local verts
540 face_vert_loc_indices[loop_idx] = map_index # remap to the local index
542 if context_material not in unique_materials_split:
543 unique_materials_split[context_material] = unique_materials[context_material]
545 faces_split.append(face)
547 # remove one of the items and reorder
548 return [(verts_split, faces_split, unique_materials_split, key_to_name(key), bool(use_vnor), bool(use_vtex))
549 for key, (verts_split, faces_split, unique_materials_split, _, use_vnor, use_vtex)
550 in face_split_dict.items()]
553 def create_mesh(new_objects,
554 use_edges,
555 verts_loc,
556 verts_nor,
557 verts_tex,
558 faces,
559 unique_materials,
560 unique_smooth_groups,
561 vertex_groups,
562 dataname,
565 Takes all the data gathered and generates a mesh, adding the new object to new_objects
566 deals with ngons, sharp edges and assigning materials
569 if unique_smooth_groups:
570 sharp_edges = set()
571 smooth_group_users = {context_smooth_group: {} for context_smooth_group in unique_smooth_groups.keys()}
572 context_smooth_group_old = -1
574 fgon_edges = set() # Used for storing fgon keys when we need to tessellate/untessellate them (ngons with hole).
575 edges = []
576 tot_loops = 0
578 context_object_key = None
580 # reverse loop through face indices
581 for f_idx in range(len(faces) - 1, -1, -1):
582 face = faces[f_idx]
584 (face_vert_loc_indices,
585 face_vert_nor_indices,
586 face_vert_tex_indices,
587 context_material,
588 context_smooth_group,
589 context_object_key,
590 face_invalid_blenpoly,
591 ) = face
593 len_face_vert_loc_indices = len(face_vert_loc_indices)
595 if len_face_vert_loc_indices == 1:
596 faces.pop(f_idx) # cant add single vert faces
598 # Face with a single item in face_vert_nor_indices is actually a polyline!
599 elif face_is_edge(face):
600 if use_edges:
601 edges.extend((face_vert_loc_indices[i], face_vert_loc_indices[i + 1])
602 for i in range(len_face_vert_loc_indices - 1))
603 faces.pop(f_idx)
605 else:
606 # Smooth Group
607 if unique_smooth_groups and context_smooth_group:
608 # Is a part of of a smooth group and is a face
609 if context_smooth_group_old is not context_smooth_group:
610 edge_dict = smooth_group_users[context_smooth_group]
611 context_smooth_group_old = context_smooth_group
613 prev_vidx = face_vert_loc_indices[-1]
614 for vidx in face_vert_loc_indices:
615 edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
616 prev_vidx = vidx
617 edge_dict[edge_key] = edge_dict.get(edge_key, 0) + 1
619 # NGons into triangles
620 if face_invalid_blenpoly:
621 # ignore triangles with invalid indices
622 if len(face_vert_loc_indices) > 3:
623 from bpy_extras.mesh_utils import ngon_tessellate
624 ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices, debug_print=bpy.app.debug)
625 faces.extend([([face_vert_loc_indices[ngon[0]],
626 face_vert_loc_indices[ngon[1]],
627 face_vert_loc_indices[ngon[2]],
629 [face_vert_nor_indices[ngon[0]],
630 face_vert_nor_indices[ngon[1]],
631 face_vert_nor_indices[ngon[2]],
632 ] if face_vert_nor_indices else [],
633 [face_vert_tex_indices[ngon[0]],
634 face_vert_tex_indices[ngon[1]],
635 face_vert_tex_indices[ngon[2]],
636 ] if face_vert_tex_indices else [],
637 context_material,
638 context_smooth_group,
639 context_object_key,
642 for ngon in ngon_face_indices]
644 tot_loops += 3 * len(ngon_face_indices)
646 # edges to make ngons
647 if len(ngon_face_indices) > 1:
648 edge_users = set()
649 for ngon in ngon_face_indices:
650 prev_vidx = face_vert_loc_indices[ngon[-1]]
651 for ngidx in ngon:
652 vidx = face_vert_loc_indices[ngidx]
653 if vidx == prev_vidx:
654 continue # broken OBJ... Just skip.
655 edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
656 prev_vidx = vidx
657 if edge_key in edge_users:
658 fgon_edges.add(edge_key)
659 else:
660 edge_users.add(edge_key)
662 faces.pop(f_idx)
663 else:
664 tot_loops += len_face_vert_loc_indices
666 # Build sharp edges
667 if unique_smooth_groups:
668 for edge_dict in smooth_group_users.values():
669 for key, users in edge_dict.items():
670 if users == 1: # This edge is on the boundary of a group
671 sharp_edges.add(key)
673 # map the material names to an index
674 material_mapping = {name: i for i, name in enumerate(unique_materials)} # enumerate over unique_materials keys()
676 materials = [None] * len(unique_materials)
678 for name, index in material_mapping.items():
679 materials[index] = unique_materials[name]
681 me = bpy.data.meshes.new(dataname)
683 # make sure the list isnt too big
684 for material in materials:
685 me.materials.append(material)
687 me.vertices.add(len(verts_loc))
688 me.loops.add(tot_loops)
689 me.polygons.add(len(faces))
691 # verts_loc is a list of (x, y, z) tuples
692 me.vertices.foreach_set("co", unpack_list(verts_loc))
694 loops_vert_idx = tuple(vidx for (face_vert_loc_indices, _, _, _, _, _, _) in faces for vidx in face_vert_loc_indices)
695 faces_loop_start = []
696 lidx = 0
697 for f in faces:
698 face_vert_loc_indices = f[0]
699 nbr_vidx = len(face_vert_loc_indices)
700 faces_loop_start.append(lidx)
701 lidx += nbr_vidx
702 faces_loop_total = tuple(len(face_vert_loc_indices) for (face_vert_loc_indices, _, _, _, _, _, _) in faces)
704 me.loops.foreach_set("vertex_index", loops_vert_idx)
705 me.polygons.foreach_set("loop_start", faces_loop_start)
706 me.polygons.foreach_set("loop_total", faces_loop_total)
708 faces_ma_index = tuple(material_mapping[context_material] for (_, _, _, context_material, _, _, _) in faces)
709 me.polygons.foreach_set("material_index", faces_ma_index)
711 faces_use_smooth = tuple(bool(context_smooth_group) for (_, _, _, _, context_smooth_group, _, _) in faces)
712 me.polygons.foreach_set("use_smooth", faces_use_smooth)
714 if verts_nor and me.loops:
715 # Note: we store 'temp' normals in loops, since validate() may alter final mesh,
716 # we can only set custom lnors *after* calling it.
717 me.create_normals_split()
718 loops_nor = tuple(no for (_, face_vert_nor_indices, _, _, _, _, _) in faces
719 for face_noidx in face_vert_nor_indices
720 for no in verts_nor[face_noidx])
721 me.loops.foreach_set("normal", loops_nor)
723 if verts_tex and me.polygons:
724 # Some files Do not explicitely write the 'v' value when it's 0.0, see T68249...
725 verts_tex = [uv if len(uv) == 2 else uv + [0.0] for uv in verts_tex]
726 me.uv_layers.new(do_init=False)
727 loops_uv = tuple(uv for (_, _, face_vert_tex_indices, _, _, _, _) in faces
728 for face_uvidx in face_vert_tex_indices
729 for uv in verts_tex[face_uvidx])
730 me.uv_layers[0].data.foreach_set("uv", loops_uv)
732 use_edges = use_edges and bool(edges)
733 if use_edges:
734 me.edges.add(len(edges))
735 # edges should be a list of (a, b) tuples
736 me.edges.foreach_set("vertices", unpack_list(edges))
738 me.validate(clean_customdata=False) # *Very* important to not remove lnors here!
739 me.update(calc_edges=use_edges, calc_edges_loose=use_edges)
741 # Un-tessellate as much as possible, in case we had to triangulate some ngons...
742 if fgon_edges:
743 import bmesh
744 bm = bmesh.new()
745 bm.from_mesh(me)
746 verts = bm.verts[:]
747 get = bm.edges.get
748 edges = [get((verts[vidx1], verts[vidx2])) for vidx1, vidx2 in fgon_edges]
749 try:
750 bmesh.ops.dissolve_edges(bm, edges=edges, use_verts=False)
751 except:
752 # Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
753 import traceback
754 traceback.print_exc()
756 bm.to_mesh(me)
757 bm.free()
759 # XXX If validate changes the geometry, this is likely to be broken...
760 if unique_smooth_groups and sharp_edges:
761 for e in me.edges:
762 if e.key in sharp_edges:
763 e.use_edge_sharp = True
765 if verts_nor:
766 clnors = array.array('f', [0.0] * (len(me.loops) * 3))
767 me.loops.foreach_get("normal", clnors)
769 if not unique_smooth_groups:
770 me.polygons.foreach_set("use_smooth", [True] * len(me.polygons))
772 me.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
773 me.use_auto_smooth = True
775 ob = bpy.data.objects.new(me.name, me)
776 new_objects.append(ob)
778 # Create the vertex groups. No need to have the flag passed here since we test for the
779 # content of the vertex_groups. If the user selects to NOT have vertex groups saved then
780 # the following test will never run
781 for group_name, group_indices in vertex_groups.items():
782 group = ob.vertex_groups.new(name=group_name.decode('utf-8', "replace"))
783 group.add(group_indices, 1.0, 'REPLACE')
786 def create_nurbs(context_nurbs, vert_loc, new_objects):
788 Add nurbs object to blender, only support one type at the moment
790 deg = context_nurbs.get(b'deg', (3,))
791 curv_range = context_nurbs.get(b'curv_range')
792 curv_idx = context_nurbs.get(b'curv_idx', [])
793 parm_u = context_nurbs.get(b'parm_u', [])
794 parm_v = context_nurbs.get(b'parm_v', [])
795 name = context_nurbs.get(b'name', b'ObjNurb')
796 cstype = context_nurbs.get(b'cstype')
798 if cstype is None:
799 print('\tWarning, cstype not found')
800 return
801 if cstype != b'bspline':
802 print('\tWarning, cstype is not supported (only bspline)')
803 return
804 if not curv_idx:
805 print('\tWarning, curv argument empty or not set')
806 return
807 if len(deg) > 1 or parm_v:
808 print('\tWarning, surfaces not supported')
809 return
811 cu = bpy.data.curves.new(name.decode('utf-8', "replace"), 'CURVE')
812 cu.dimensions = '3D'
814 nu = cu.splines.new('NURBS')
815 nu.points.add(len(curv_idx) - 1) # a point is added to start with
816 nu.points.foreach_set("co", [co_axis for vt_idx in curv_idx for co_axis in (vert_loc[vt_idx] + (1.0,))])
818 nu.order_u = deg[0] + 1
820 # get for endpoint flag from the weighting
821 if curv_range and len(parm_u) > deg[0] + 1:
822 do_endpoints = True
823 for i in range(deg[0] + 1):
825 if abs(parm_u[i] - curv_range[0]) > 0.0001:
826 do_endpoints = False
827 break
829 if abs(parm_u[-(i + 1)] - curv_range[1]) > 0.0001:
830 do_endpoints = False
831 break
833 else:
834 do_endpoints = False
836 if do_endpoints:
837 nu.use_endpoint_u = True
839 # close
841 do_closed = False
842 if len(parm_u) > deg[0]+1:
843 for i in xrange(deg[0]+1):
844 #print curv_idx[i], curv_idx[-(i+1)]
846 if curv_idx[i]==curv_idx[-(i+1)]:
847 do_closed = True
848 break
850 if do_closed:
851 nu.use_cyclic_u = True
854 ob = bpy.data.objects.new(name.decode('utf-8', "replace"), cu)
856 new_objects.append(ob)
859 def strip_slash(line_split):
860 if line_split[-1][-1] == 92: # '\' char
861 if len(line_split[-1]) == 1:
862 line_split.pop() # remove the \ item
863 else:
864 line_split[-1] = line_split[-1][:-1] # remove the \ from the end last number
865 return True
866 return False
869 def get_float_func(filepath):
871 find the float function for this obj file
872 - whether to replace commas or not
874 file = open(filepath, 'rb')
875 for line in file: # .readlines():
876 line = line.lstrip()
877 if line.startswith(b'v'): # vn vt v
878 if b',' in line:
879 file.close()
880 return lambda f: float(f.replace(b',', b'.'))
881 elif b'.' in line:
882 file.close()
883 return float
885 file.close()
886 # in case all vert values were ints
887 return float
890 def any_number_as_int(svalue):
891 if b',' in svalue:
892 svalue = svalue.replace(b',', b'.')
893 return int(float(svalue))
896 def load(context,
897 filepath,
899 global_clight_size=0.0,
900 use_smooth_groups=True,
901 use_edges=True,
902 use_split_objects=True,
903 use_split_groups=False,
904 use_image_search=True,
905 use_groups_as_vgroups=False,
906 relpath=None,
907 global_matrix=None
910 Called by the user interface or another script.
911 load_obj(path) - should give acceptable results.
912 This function passes the file and sends the data off
913 to be split into objects and then converted into mesh objects
915 def unique_name(existing_names, name_orig):
916 i = 0
917 if name_orig is None:
918 name_orig = b"ObjObject"
919 name = name_orig
920 while name in existing_names:
921 name = b"%s.%03d" % (name_orig, i)
922 i += 1
923 existing_names.add(name)
924 return name
926 def handle_vec(line_start, context_multi_line, line_split, tag, data, vec, vec_len):
927 ret_context_multi_line = tag if strip_slash(line_split) else b''
928 if line_start == tag:
929 vec[:] = [float_func(v) for v in line_split[1:]]
930 elif context_multi_line == tag:
931 vec += [float_func(v) for v in line_split]
932 if not ret_context_multi_line:
933 data.append(tuple(vec[:vec_len]))
934 return ret_context_multi_line
936 def create_face(context_material, context_smooth_group, context_object_key):
937 face_vert_loc_indices = []
938 face_vert_nor_indices = []
939 face_vert_tex_indices = []
940 return (
941 face_vert_loc_indices,
942 face_vert_nor_indices,
943 face_vert_tex_indices,
944 context_material,
945 context_smooth_group,
946 context_object_key,
947 [], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
950 with ProgressReport(context.window_manager) as progress:
951 progress.enter_substeps(1, "Importing OBJ %r..." % filepath)
953 if global_matrix is None:
954 global_matrix = mathutils.Matrix()
956 if use_split_objects or use_split_groups:
957 use_groups_as_vgroups = False
959 verts_loc = []
960 verts_nor = []
961 verts_tex = []
962 faces = [] # tuples of the faces
963 material_libs = set() # filenames to material libs this OBJ uses
964 vertex_groups = {} # when use_groups_as_vgroups is true
966 # Get the string to float conversion func for this file- is 'float' for almost all files.
967 float_func = get_float_func(filepath)
969 # Context variables
970 context_material = None
971 context_smooth_group = None
972 context_object_key = None
973 context_object_obpart = None
974 context_vgroup = None
976 objects_names = set()
978 # Nurbs
979 context_nurbs = {}
980 nurbs = []
981 context_parm = b'' # used by nurbs too but could be used elsewhere
983 # Until we can use sets
984 use_default_material = False
985 unique_materials = {}
986 unique_smooth_groups = {}
987 # unique_obects= {} - no use for this variable since the objects are stored in the face.
989 # when there are faces that end with \
990 # it means they are multiline-
991 # since we use xreadline we cant skip to the next line
992 # so we need to know whether
993 context_multi_line = b''
995 # Per-face handling data.
996 face_vert_loc_indices = None
997 face_vert_nor_indices = None
998 face_vert_tex_indices = None
999 verts_loc_len = verts_nor_len = verts_tex_len = 0
1000 face_items_usage = set()
1001 face_invalid_blenpoly = None
1002 prev_vidx = None
1003 face = None
1004 vec = []
1006 quick_vert_failures = 0
1007 skip_quick_vert = False
1009 progress.enter_substeps(3, "Parsing OBJ file...")
1010 with open(filepath, 'rb') as f:
1011 for line in f:
1012 line_split = line.split()
1014 if not line_split:
1015 continue
1017 line_start = line_split[0] # we compare with this a _lot_
1019 # Handling vertex data are pretty similar, factorize that.
1020 # Also, most OBJ files store all those on a single line, so try fast parsing for that first,
1021 # and only fallback to full multi-line parsing when needed, this gives significant speed-up
1022 # (~40% on affected code).
1023 if line_start == b'v':
1024 vdata, vdata_len, do_quick_vert = verts_loc, 3, not skip_quick_vert
1025 elif line_start == b'vn':
1026 vdata, vdata_len, do_quick_vert = verts_nor, 3, not skip_quick_vert
1027 elif line_start == b'vt':
1028 vdata, vdata_len, do_quick_vert = verts_tex, 2, not skip_quick_vert
1029 elif context_multi_line == b'v':
1030 vdata, vdata_len, do_quick_vert = verts_loc, 3, False
1031 elif context_multi_line == b'vn':
1032 vdata, vdata_len, do_quick_vert = verts_nor, 3, False
1033 elif context_multi_line == b'vt':
1034 vdata, vdata_len, do_quick_vert = verts_tex, 2, False
1035 else:
1036 vdata_len = 0
1038 if vdata_len:
1039 if do_quick_vert:
1040 try:
1041 vdata.append(list(map(float_func, line_split[1:vdata_len + 1])))
1042 except:
1043 do_quick_vert = False
1044 # In case we get too many failures on quick parsing, force fallback to full multi-line one.
1045 # Exception handling can become costly...
1046 quick_vert_failures += 1
1047 if quick_vert_failures > 10000:
1048 skip_quick_vert = True
1049 if not do_quick_vert:
1050 context_multi_line = handle_vec(line_start, context_multi_line, line_split,
1051 context_multi_line or line_start,
1052 vdata, vec, vdata_len)
1054 elif line_start == b'f' or context_multi_line == b'f':
1055 if not context_multi_line:
1056 line_split = line_split[1:]
1057 # Instantiate a face
1058 face = create_face(context_material, context_smooth_group, context_object_key)
1059 (face_vert_loc_indices, face_vert_nor_indices, face_vert_tex_indices,
1060 _1, _2, _3, face_invalid_blenpoly) = face
1061 faces.append(face)
1062 face_items_usage.clear()
1063 verts_loc_len = len(verts_loc)
1064 verts_nor_len = len(verts_nor)
1065 verts_tex_len = len(verts_tex)
1066 if context_material is None:
1067 use_default_material = True
1068 # Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
1070 context_multi_line = b'f' if strip_slash(line_split) else b''
1072 for v in line_split:
1073 obj_vert = v.split(b'/')
1074 idx = int(obj_vert[0]) # Note that we assume here we cannot get OBJ invalid 0 index...
1075 vert_loc_index = (idx + verts_loc_len) if (idx < 1) else idx - 1
1076 # Add the vertex to the current group
1077 # *warning*, this wont work for files that have groups defined around verts
1078 if use_groups_as_vgroups and context_vgroup:
1079 vertex_groups[context_vgroup].append(vert_loc_index)
1080 # This a first round to quick-detect ngons that *may* use a same edge more than once.
1081 # Potential candidate will be re-checked once we have done parsing the whole face.
1082 if not face_invalid_blenpoly:
1083 # If we use more than once a same vertex, invalid ngon is suspected.
1084 if vert_loc_index in face_items_usage:
1085 face_invalid_blenpoly.append(True)
1086 else:
1087 face_items_usage.add(vert_loc_index)
1088 face_vert_loc_indices.append(vert_loc_index)
1090 # formatting for faces with normals and textures is
1091 # loc_index/tex_index/nor_index
1092 if len(obj_vert) > 1 and obj_vert[1] and obj_vert[1] != b'0':
1093 idx = int(obj_vert[1])
1094 face_vert_tex_indices.append((idx + verts_tex_len) if (idx < 1) else idx - 1)
1095 else:
1096 face_vert_tex_indices.append(0)
1098 if len(obj_vert) > 2 and obj_vert[2] and obj_vert[2] != b'0':
1099 idx = int(obj_vert[2])
1100 face_vert_nor_indices.append((idx + verts_nor_len) if (idx < 1) else idx - 1)
1101 else:
1102 face_vert_nor_indices.append(0)
1104 if not context_multi_line:
1105 # Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
1106 if face_invalid_blenpoly:
1107 face_invalid_blenpoly.clear()
1108 face_items_usage.clear()
1109 prev_vidx = face_vert_loc_indices[-1]
1110 for vidx in face_vert_loc_indices:
1111 edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
1112 if edge_key in face_items_usage:
1113 face_invalid_blenpoly.append(True)
1114 break
1115 face_items_usage.add(edge_key)
1116 prev_vidx = vidx
1118 elif use_edges and (line_start == b'l' or context_multi_line == b'l'):
1119 # very similar to the face load function above with some parts removed
1120 if not context_multi_line:
1121 line_split = line_split[1:]
1122 # Instantiate a face
1123 face = create_face(context_material, context_smooth_group, context_object_key)
1124 face_vert_loc_indices = face[0]
1125 # XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
1126 # as a polyline, and not a regular face...
1127 face[1][:] = [True]
1128 faces.append(face)
1129 if context_material is None:
1130 use_default_material = True
1131 # Else, use face_vert_loc_indices previously defined and used the obj_face
1133 context_multi_line = b'l' if strip_slash(line_split) else b''
1135 for v in line_split:
1136 obj_vert = v.split(b'/')
1137 idx = int(obj_vert[0]) - 1
1138 face_vert_loc_indices.append((idx + len(verts_loc) + 1) if (idx < 0) else idx)
1140 elif line_start == b's':
1141 if use_smooth_groups:
1142 context_smooth_group = line_value(line_split)
1143 if context_smooth_group == b'off':
1144 context_smooth_group = None
1145 elif context_smooth_group: # is not None
1146 unique_smooth_groups[context_smooth_group] = None
1148 elif line_start == b'o':
1149 if use_split_objects:
1150 context_object_key = unique_name(objects_names, line_value(line_split))
1151 context_object_obpart = context_object_key
1152 # unique_objects[context_object_key]= None
1154 elif line_start == b'g':
1155 if use_split_groups:
1156 grppart = line_value(line_split)
1157 context_object_key = (context_object_obpart, grppart) if context_object_obpart else grppart
1158 # print 'context_object_key', context_object_key
1159 # unique_objects[context_object_key]= None
1160 elif use_groups_as_vgroups:
1161 context_vgroup = line_value(line.split())
1162 if context_vgroup and context_vgroup != b'(null)':
1163 vertex_groups.setdefault(context_vgroup, [])
1164 else:
1165 context_vgroup = None # dont assign a vgroup
1167 elif line_start == b'usemtl':
1168 context_material = line_value(line.split())
1169 unique_materials[context_material] = None
1170 elif line_start == b'mtllib': # usemap or usemat
1171 # can have multiple mtllib filenames per line, mtllib can appear more than once,
1172 # so make sure only occurrence of material exists
1173 material_libs |= {os.fsdecode(f) for f in filenames_group_by_ext(line.lstrip()[7:].strip(), b'.mtl')
1176 # Nurbs support
1177 elif line_start == b'cstype':
1178 context_nurbs[b'cstype'] = line_value(line.split()) # 'rat bspline' / 'bspline'
1179 elif line_start == b'curv' or context_multi_line == b'curv':
1180 curv_idx = context_nurbs[b'curv_idx'] = context_nurbs.get(b'curv_idx', []) # in case were multiline
1182 if not context_multi_line:
1183 context_nurbs[b'curv_range'] = float_func(line_split[1]), float_func(line_split[2])
1184 line_split[0:3] = [] # remove first 3 items
1186 if strip_slash(line_split):
1187 context_multi_line = b'curv'
1188 else:
1189 context_multi_line = b''
1191 for i in line_split:
1192 vert_loc_index = int(i) - 1
1194 if vert_loc_index < 0:
1195 vert_loc_index = len(verts_loc) + vert_loc_index + 1
1197 curv_idx.append(vert_loc_index)
1199 elif line_start == b'parm' or context_multi_line == b'parm':
1200 if context_multi_line:
1201 context_multi_line = b''
1202 else:
1203 context_parm = line_split[1]
1204 line_split[0:2] = [] # remove first 2
1206 if strip_slash(line_split):
1207 context_multi_line = b'parm'
1208 else:
1209 context_multi_line = b''
1211 if context_parm.lower() == b'u':
1212 context_nurbs.setdefault(b'parm_u', []).extend([float_func(f) for f in line_split])
1213 elif context_parm.lower() == b'v': # surfaces not supported yet
1214 context_nurbs.setdefault(b'parm_v', []).extend([float_func(f) for f in line_split])
1215 # else: # may want to support other parm's ?
1217 elif line_start == b'deg':
1218 context_nurbs[b'deg'] = [int(i) for i in line.split()[1:]]
1219 elif line_start == b'end':
1220 # Add the nurbs curve
1221 if context_object_key:
1222 context_nurbs[b'name'] = context_object_key
1223 nurbs.append(context_nurbs)
1224 context_nurbs = {}
1225 context_parm = b''
1227 ''' # How to use usemap? deprecated?
1228 elif line_start == b'usema': # usemap or usemat
1229 context_image= line_value(line_split)
1232 progress.step("Done, loading materials and images...")
1234 if use_default_material:
1235 unique_materials[None] = None
1236 create_materials(filepath, relpath, material_libs, unique_materials,
1237 use_image_search, float_func)
1239 progress.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
1240 (len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)))
1242 # deselect all
1243 if bpy.ops.object.select_all.poll():
1244 bpy.ops.object.select_all(action='DESELECT')
1246 new_objects = [] # put new objects here
1248 # Split the mesh by objects/materials, may
1249 SPLIT_OB_OR_GROUP = bool(use_split_objects or use_split_groups)
1251 for data in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
1252 verts_loc_split, faces_split, unique_materials_split, dataname, use_vnor, use_vtex = data
1253 # Create meshes from the data, warning 'vertex_groups' wont support splitting
1254 #~ print(dataname, use_vnor, use_vtex)
1255 create_mesh(new_objects,
1256 use_edges,
1257 verts_loc_split,
1258 verts_nor if use_vnor else [],
1259 verts_tex if use_vtex else [],
1260 faces_split,
1261 unique_materials_split,
1262 unique_smooth_groups,
1263 vertex_groups,
1264 dataname,
1267 # nurbs support
1268 for context_nurbs in nurbs:
1269 create_nurbs(context_nurbs, verts_loc, new_objects)
1271 view_layer = context.view_layer
1272 collection = view_layer.active_layer_collection.collection
1274 # Create new obj
1275 for obj in new_objects:
1276 collection.objects.link(obj)
1277 obj.select_set(True)
1279 # we could apply this anywhere before scaling.
1280 obj.matrix_world = global_matrix
1282 view_layer.update()
1284 axis_min = [1000000000] * 3
1285 axis_max = [-1000000000] * 3
1287 if global_clight_size:
1288 # Get all object bounds
1289 for ob in new_objects:
1290 for v in ob.bound_box:
1291 for axis, value in enumerate(v):
1292 if axis_min[axis] > value:
1293 axis_min[axis] = value
1294 if axis_max[axis] < value:
1295 axis_max[axis] = value
1297 # Scale objects
1298 max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2])
1299 scale = 1.0
1301 while global_clight_size < max_axis * scale:
1302 scale = scale / 10.0
1304 for obj in new_objects:
1305 obj.scale = scale, scale, scale
1307 progress.leave_substeps("Done.")
1308 progress.leave_substeps("Finished importing: %r" % filepath)
1310 return {'FINISHED'}