Fix io_anim_camera error exporting cameras with quotes in their name
[blender-addons.git] / io_scene_obj / import_obj.py
blob4b996819c7f8b7b3eb2fd4e583cc98db82ba08af
1 # SPDX-License-Identifier: GPL-2.0-or-later
3 # <pep8 compliant>
5 # Script copyright (C) Campbell Barton
6 # Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
8 """
9 This script imports a Wavefront OBJ files to Blender.
11 Usage:
12 Run this script from "File->Import" menu and then load the desired OBJ file.
13 Note, This loads mesh objects and materials only, nurbs and curves are not supported.
15 http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
16 """
18 import array
19 import os
20 import time
21 import bpy
22 import mathutils
24 from bpy_extras.io_utils import unpack_list
25 from bpy_extras.image_utils import load_image
26 from bpy_extras.wm_utils.progress_report import ProgressReport
29 def line_value(line_split):
30 """
31 Returns 1 string representing the value for this line
32 None will be returned if there's only 1 word
33 """
34 length = len(line_split)
35 if length == 1:
36 return None
38 elif length == 2:
39 return line_split[1]
41 elif length > 2:
42 return b' '.join(line_split[1:])
45 def filenames_group_by_ext(line, ext):
46 """
47 Splits material libraries supporting spaces, so:
48 b'foo bar.mtl baz spam.MTL' -> (b'foo bar.mtl', b'baz spam.MTL')
49 Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
50 """
51 # Note that we assume that if there are some " in that line,
52 # then all filenames are properly enclosed within those...
53 start = line.find(b'"') + 1
54 if start != 0:
55 while start != 0:
56 end = line.find(b'"', start)
57 if end != -1:
58 yield line[start:end]
59 start = line.find(b'"', end + 1) + 1
60 else:
61 break
62 return
64 line_lower = line.lower()
65 i_prev = 0
66 while i_prev != -1 and i_prev < len(line):
67 i = line_lower.find(ext, i_prev)
68 if i != -1:
69 i += len(ext)
70 yield line[i_prev:i].strip()
71 i_prev = i
74 def obj_image_load(img_data, context_imagepath_map, line, DIR, recursive, relpath):
75 """
76 Mainly uses comprehensiveImageLoad
77 But we try all space-separated items from current line when file is not found with last one
78 (users keep generating/using image files with spaces in a format that does not support them, sigh...)
79 Also tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
80 Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
81 Also corrects img_data (in case filenames with spaces have been split up in multiple entries, see T72148).
82 """
83 filepath_parts = line.split(b' ')
85 start = line.find(b'"') + 1
86 if start != 0:
87 end = line.find(b'"', start)
88 if end != 0:
89 filepath_parts = (line[start:end],)
91 image = None
92 for i in range(-1, -len(filepath_parts), -1):
93 imagepath = os.fsdecode(b" ".join(filepath_parts[i:]))
94 image = context_imagepath_map.get(imagepath, ...)
95 if image is ...:
96 image = load_image(imagepath, DIR, recursive=recursive, relpath=relpath)
97 if image is None and "_" in imagepath:
98 image = load_image(imagepath.replace("_", " "), DIR, recursive=recursive, relpath=relpath)
99 if image is not None:
100 context_imagepath_map[imagepath] = image
101 del img_data[i:]
102 img_data.append(imagepath)
103 break;
104 else:
105 del img_data[i:]
106 img_data.append(imagepath)
107 break;
109 if image is None:
110 imagepath = os.fsdecode(filepath_parts[-1])
111 image = load_image(imagepath, DIR, recursive=recursive, place_holder=True, relpath=relpath)
112 context_imagepath_map[imagepath] = image
114 return image
117 def create_materials(filepath, relpath,
118 material_libs, unique_materials,
119 use_image_search, float_func):
121 Create all the used materials in this obj,
122 assign colors and images to the materials from all referenced material libs
124 from math import sqrt
125 from bpy_extras import node_shader_utils
127 DIR = os.path.dirname(filepath)
128 context_material_vars = set()
130 # Don't load the same image multiple times
131 context_imagepath_map = {}
133 nodal_material_wrap_map = {}
135 def load_material_image(blender_material, mat_wrap, context_material_name, img_data, line, type):
137 Set textures defined in .mtl file.
139 map_options = {}
141 # Absolute path - c:\.. etc would work here
142 image = obj_image_load(img_data, context_imagepath_map, line, DIR, use_image_search, relpath)
144 curr_token = []
145 for token in img_data[:-1]:
146 if token.startswith(b'-') and token[1:].isalpha():
147 if curr_token:
148 map_options[curr_token[0]] = curr_token[1:]
149 curr_token[:] = []
150 curr_token.append(token)
151 if curr_token:
152 map_options[curr_token[0]] = curr_token[1:]
154 map_offset = map_options.get(b'-o')
155 map_scale = map_options.get(b'-s')
156 if map_offset is not None:
157 map_offset = tuple(map(float_func, map_offset))
158 if map_scale is not None:
159 map_scale = tuple(map(float_func, map_scale))
161 def _generic_tex_set(nodetex, image, texcoords, translation, scale):
162 nodetex.image = image
163 nodetex.texcoords = texcoords
164 if translation is not None:
165 nodetex.translation = translation
166 if scale is not None:
167 nodetex.scale = scale
169 # Adds textures for materials (rendering)
170 if type == 'Kd':
171 _generic_tex_set(mat_wrap.base_color_texture, image, 'UV', map_offset, map_scale)
173 elif type == 'Ka':
174 # XXX Not supported?
175 print("WARNING, currently unsupported ambient texture, skipped.")
177 elif type == 'Ks':
178 _generic_tex_set(mat_wrap.specular_texture, image, 'UV', map_offset, map_scale)
180 elif type == 'Ke':
181 _generic_tex_set(mat_wrap.emission_color_texture, image, 'UV', map_offset, map_scale)
182 mat_wrap.emission_strength = 1.0
184 elif type == 'Bump':
185 bump_mult = map_options.get(b'-bm')
186 bump_mult = float(bump_mult[0]) if (bump_mult and len(bump_mult[0]) > 1) else 1.0
187 mat_wrap.normalmap_strength_set(bump_mult)
189 _generic_tex_set(mat_wrap.normalmap_texture, image, 'UV', map_offset, map_scale)
191 elif type == 'D':
192 _generic_tex_set(mat_wrap.alpha_texture, image, 'UV', map_offset, map_scale)
194 elif type == 'disp':
195 # XXX Not supported?
196 print("WARNING, currently unsupported displacement texture, skipped.")
197 # ~ mat_wrap.bump_image_set(image)
198 # ~ mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
200 elif type == 'refl':
201 map_type = map_options.get(b'-type')
202 if map_type and map_type != [b'sphere']:
203 print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
204 "" % ' '.join(i.decode() for i in map_type))
206 _generic_tex_set(mat_wrap.base_color_texture, image, 'Reflection', map_offset, map_scale)
207 mat_wrap.base_color_texture.projection = 'SPHERE'
209 else:
210 raise Exception("invalid type %r" % type)
212 def finalize_material(context_material, context_material_vars, spec_colors,
213 do_highlight, do_reflection, do_transparency, do_glass):
214 # Finalize previous mat, if any.
215 if context_material:
216 if "specular" in context_material_vars:
217 # XXX This is highly approximated, not sure whether we can do better...
218 # TODO: Find a way to guesstimate best value from diffuse color...
219 # IDEA: Use standard deviation of both spec and diff colors (i.e. how far away they are
220 # from some grey), and apply the the proportion between those two as tint factor?
221 spec = sum(spec_colors) / 3.0
222 # ~ spec_var = math.sqrt(sum((c - spec) ** 2 for c in spec_color) / 3.0)
223 # ~ diff = sum(context_mat_wrap.base_color) / 3.0
224 # ~ diff_var = math.sqrt(sum((c - diff) ** 2 for c in context_mat_wrap.base_color) / 3.0)
225 # ~ tint = min(1.0, spec_var / diff_var)
226 context_mat_wrap.specular = spec
227 context_mat_wrap.specular_tint = 0.0
228 if "roughness" not in context_material_vars:
229 context_mat_wrap.roughness = 0.0
231 # FIXME, how else to use this?
232 if do_highlight:
233 if "specular" not in context_material_vars:
234 context_mat_wrap.specular = 1.0
235 if "roughness" not in context_material_vars:
236 context_mat_wrap.roughness = 0.0
237 else:
238 if "specular" not in context_material_vars:
239 context_mat_wrap.specular = 0.0
240 if "roughness" not in context_material_vars:
241 context_mat_wrap.roughness = 1.0
243 if do_reflection:
244 if "metallic" not in context_material_vars:
245 context_mat_wrap.metallic = 1.0
246 else:
247 # since we are (ab)using ambient term for metallic (which can be non-zero)
248 context_mat_wrap.metallic = 0.0
250 if do_transparency:
251 if "ior" not in context_material_vars:
252 context_mat_wrap.ior = 1.0
253 if "alpha" not in context_material_vars:
254 context_mat_wrap.alpha = 1.0
255 # EEVEE only
256 context_material.blend_method = 'BLEND'
258 if do_glass:
259 if "ior" not in context_material_vars:
260 context_mat_wrap.ior = 1.5
262 # Try to find a MTL with the same name as the OBJ if no MTLs are specified.
263 temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + ".mtl"
264 if os.path.exists(os.path.join(DIR, temp_mtl)):
265 material_libs.add(temp_mtl)
266 del temp_mtl
268 # Create new materials
269 for name in unique_materials: # .keys()
270 ma_name = "Default OBJ" if name is None else name.decode('utf-8', "replace")
271 ma = unique_materials[name] = bpy.data.materials.new(ma_name)
272 ma_wrap = node_shader_utils.PrincipledBSDFWrapper(ma, is_readonly=False)
273 nodal_material_wrap_map[ma] = ma_wrap
274 ma_wrap.use_nodes = True
276 for libname in sorted(material_libs):
277 # print(libname)
278 mtlpath = os.path.join(DIR, libname)
279 if not os.path.exists(mtlpath):
280 print("\tMaterial not found MTL: %r" % mtlpath)
281 else:
282 # Note: with modern Principled BSDF shader, things like ambient, raytrace or fresnel are always 'ON'
283 # (i.e. automatically controlled by other parameters).
284 do_highlight = False
285 do_reflection = False
286 do_transparency = False
287 do_glass = False
288 spec_colors = [0.0, 0.0, 0.0]
290 # print('\t\tloading mtl: %e' % mtlpath)
291 context_material = None
292 context_mat_wrap = None
293 mtl = open(mtlpath, 'rb')
294 for line in mtl: # .readlines():
295 line = line.strip()
296 if not line or line.startswith(b'#'):
297 continue
299 line_split = line.split()
300 line_id = line_split[0].lower()
302 if line_id == b'newmtl':
303 # Finalize previous mat, if any.
304 finalize_material(context_material, context_material_vars, spec_colors,
305 do_highlight, do_reflection, do_transparency, do_glass)
307 context_material_name = line_value(line_split)
308 context_material = unique_materials.get(context_material_name)
309 if context_material is not None:
310 context_mat_wrap = nodal_material_wrap_map[context_material]
311 context_material_vars.clear()
313 spec_colors[:] = [0.0, 0.0, 0.0]
314 do_highlight = False
315 do_reflection = False
316 do_transparency = False
317 do_glass = False
320 elif context_material:
321 def _get_colors(line_split):
322 # OBJ 'allows' one or two components values, treat single component as greyscale, and two as blue = 0.0.
323 ln = len(line_split)
324 if ln == 2:
325 return [float_func(line_split[1])] * 3
326 elif ln == 3:
327 return [float_func(line_split[1]), float_func(line_split[2]), 0.0]
328 else:
329 return [float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
331 # we need to make a material to assign properties to it.
332 if line_id == b'ka':
333 refl = sum(_get_colors(line_split)) / 3.0
334 context_mat_wrap.metallic = refl
335 context_material_vars.add("metallic")
336 elif line_id == b'kd':
337 context_mat_wrap.base_color = _get_colors(line_split)
338 elif line_id == b'ks':
339 spec_colors[:] = _get_colors(line_split)
340 context_material_vars.add("specular")
341 elif line_id == b'ke':
342 # We cannot set context_material.emit right now, we need final diffuse color as well for this.
343 # XXX Unsupported currently
344 context_mat_wrap.emission_color = _get_colors(line_split)
345 context_mat_wrap.emission_strength = 1.0
346 elif line_id == b'ns':
347 # XXX Totally empirical conversion, trying to adapt it
348 # (from 0.0 - 1000.0 OBJ specular exponent range to 1.0 - 0.0 Principled BSDF range)...
349 val = max(0.0, min(1000.0, float_func(line_split[1])))
350 context_mat_wrap.roughness = 1.0 - (sqrt(val / 1000))
351 context_material_vars.add("roughness")
352 elif line_id == b'ni': # Refraction index (between 0.001 and 10).
353 context_mat_wrap.ior = float_func(line_split[1])
354 context_material_vars.add("ior")
355 elif line_id == b'd': # dissolve (transparency)
356 context_mat_wrap.alpha = float_func(line_split[1])
357 context_material_vars.add("alpha")
358 elif line_id == b'tr': # translucency
359 print("WARNING, currently unsupported 'tr' translucency option, skipped.")
360 elif line_id == b'tf':
361 # rgb, filter color, blender has no support for this.
362 print("WARNING, currently unsupported 'tf' filter color option, skipped.")
363 elif line_id == b'illum':
364 # Some MTL files incorrectly use a float for this value, see T60135.
365 illum = any_number_as_int(line_split[1])
367 # inline comments are from the spec, v4.2
368 if illum == 0:
369 # Color on and Ambient off
370 print("WARNING, Principled BSDF shader does not support illumination 0 mode "
371 "(colors with no ambient), skipped.")
372 elif illum == 1:
373 # Color on and Ambient on
374 pass
375 elif illum == 2:
376 # Highlight on
377 do_highlight = True
378 elif illum == 3:
379 # Reflection on and Ray trace on
380 do_reflection = True
381 elif illum == 4:
382 # Transparency: Glass on
383 # Reflection: Ray trace on
384 do_transparency = True
385 do_reflection = True
386 do_glass = True
387 elif illum == 5:
388 # Reflection: Fresnel on and Ray trace on
389 do_reflection = True
390 elif illum == 6:
391 # Transparency: Refraction on
392 # Reflection: Fresnel off and Ray trace on
393 do_transparency = True
394 do_reflection = True
395 elif illum == 7:
396 # Transparency: Refraction on
397 # Reflection: Fresnel on and Ray trace on
398 do_transparency = True
399 do_reflection = True
400 elif illum == 8:
401 # Reflection on and Ray trace off
402 do_reflection = True
403 elif illum == 9:
404 # Transparency: Glass on
405 # Reflection: Ray trace off
406 do_transparency = True
407 do_reflection = False
408 do_glass = True
409 elif illum == 10:
410 # Casts shadows onto invisible surfaces
411 print("WARNING, Principled BSDF shader does not support illumination 10 mode "
412 "(cast shadows on invisible surfaces), skipped.")
413 pass
415 elif line_id == b'map_ka':
416 img_data = line.split()[1:]
417 if img_data:
418 load_material_image(context_material, context_mat_wrap,
419 context_material_name, img_data, line, 'Ka')
420 elif line_id == b'map_ks':
421 img_data = line.split()[1:]
422 if img_data:
423 load_material_image(context_material, context_mat_wrap,
424 context_material_name, img_data, line, 'Ks')
425 elif line_id == b'map_kd':
426 img_data = line.split()[1:]
427 if img_data:
428 load_material_image(context_material, context_mat_wrap,
429 context_material_name, img_data, line, 'Kd')
430 elif line_id == b'map_ke':
431 img_data = line.split()[1:]
432 if img_data:
433 load_material_image(context_material, context_mat_wrap,
434 context_material_name, img_data, line, 'Ke')
435 elif line_id in {b'map_bump', b'bump'}: # 'bump' is incorrect but some files use it.
436 img_data = line.split()[1:]
437 if img_data:
438 load_material_image(context_material, context_mat_wrap,
439 context_material_name, img_data, line, 'Bump')
440 elif line_id in {b'map_d', b'map_tr'}: # Alpha map - Dissolve
441 img_data = line.split()[1:]
442 if img_data:
443 load_material_image(context_material, context_mat_wrap,
444 context_material_name, img_data, line, 'D')
446 elif line_id in {b'map_disp', b'disp'}: # displacementmap
447 img_data = line.split()[1:]
448 if img_data:
449 load_material_image(context_material, context_mat_wrap,
450 context_material_name, img_data, line, 'disp')
452 elif line_id in {b'map_refl', b'refl'}: # reflectionmap
453 img_data = line.split()[1:]
454 if img_data:
455 load_material_image(context_material, context_mat_wrap,
456 context_material_name, img_data, line, 'refl')
457 else:
458 print("WARNING: %r:%r (ignored)" % (filepath, line))
460 # Finalize last mat, if any.
461 finalize_material(context_material, context_material_vars, spec_colors,
462 do_highlight, do_reflection, do_transparency, do_glass)
463 mtl.close()
466 def face_is_edge(face):
467 """Simple check to test whether given (temp, working) data is an edge, and not a real face."""
468 face_vert_loc_indices = face[0]
469 face_vert_nor_indices = face[1]
470 return len(face_vert_nor_indices) == 1 or len(face_vert_loc_indices) == 2
473 def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
475 Takes vert_loc and faces, and separates into multiple sets of
476 (verts_loc, faces, unique_materials, dataname)
479 filename = os.path.splitext((os.path.basename(filepath)))[0]
481 if not SPLIT_OB_OR_GROUP or not faces:
482 use_verts_nor = any(f[1] for f in faces)
483 use_verts_tex = any(f[2] for f in faces)
484 # use the filename for the object name since we aren't chopping up the mesh.
485 return [(verts_loc, faces, unique_materials, filename, use_verts_nor, use_verts_tex)]
487 def key_to_name(key):
488 # if the key is a tuple, join it to make a string
489 if not key:
490 return filename # assume its a string. make sure this is true if the splitting code is changed
491 elif isinstance(key, bytes):
492 return key.decode('utf-8', 'replace')
493 else:
494 return "_".join(k.decode('utf-8', 'replace') for k in key)
496 # Return a key that makes the faces unique.
497 face_split_dict = {}
499 oldkey = -1 # initialize to a value that will never match the key
501 for face in faces:
502 (face_vert_loc_indices,
503 face_vert_nor_indices,
504 face_vert_tex_indices,
505 context_material,
506 _context_smooth_group,
507 context_object_key,
508 _face_invalid_blenpoly,
509 ) = face
510 key = context_object_key
512 if oldkey != key:
513 # Check the key has changed.
514 (verts_split, faces_split, unique_materials_split, vert_remap,
515 use_verts_nor, use_verts_tex) = face_split_dict.setdefault(key, ([], [], {}, {}, [], []))
516 oldkey = key
518 if not face_is_edge(face):
519 if not use_verts_nor and face_vert_nor_indices:
520 use_verts_nor.append(True)
522 if not use_verts_tex and face_vert_tex_indices:
523 use_verts_tex.append(True)
525 # Remap verts to new vert list and add where needed
526 for loop_idx, vert_idx in enumerate(face_vert_loc_indices):
527 map_index = vert_remap.get(vert_idx)
528 if map_index is None:
529 map_index = len(verts_split)
530 vert_remap[vert_idx] = map_index # set the new remapped index so we only add once and can reference next time.
531 verts_split.append(verts_loc[vert_idx]) # add the vert to the local verts
533 face_vert_loc_indices[loop_idx] = map_index # remap to the local index
535 if context_material not in unique_materials_split:
536 unique_materials_split[context_material] = unique_materials[context_material]
538 faces_split.append(face)
540 # remove one of the items and reorder
541 return [(verts_split, faces_split, unique_materials_split, key_to_name(key), bool(use_vnor), bool(use_vtex))
542 for key, (verts_split, faces_split, unique_materials_split, _, use_vnor, use_vtex)
543 in face_split_dict.items()]
546 def create_mesh(new_objects,
547 use_edges,
548 verts_loc,
549 verts_nor,
550 verts_tex,
551 faces,
552 unique_materials,
553 unique_smooth_groups,
554 vertex_groups,
555 dataname,
558 Takes all the data gathered and generates a mesh, adding the new object to new_objects
559 deals with ngons, sharp edges and assigning materials
562 if unique_smooth_groups:
563 sharp_edges = set()
564 smooth_group_users = {context_smooth_group: {} for context_smooth_group in unique_smooth_groups.keys()}
565 context_smooth_group_old = -1
567 fgon_edges = set() # Used for storing fgon keys when we need to tessellate/untessellate them (ngons with hole).
568 edges = []
569 tot_loops = 0
571 context_object_key = None
573 # reverse loop through face indices
574 for f_idx in range(len(faces) - 1, -1, -1):
575 face = faces[f_idx]
577 (face_vert_loc_indices,
578 face_vert_nor_indices,
579 face_vert_tex_indices,
580 context_material,
581 context_smooth_group,
582 context_object_key,
583 face_invalid_blenpoly,
584 ) = face
586 len_face_vert_loc_indices = len(face_vert_loc_indices)
588 if len_face_vert_loc_indices == 1:
589 faces.pop(f_idx) # cant add single vert faces
591 # Face with a single item in face_vert_nor_indices is actually a polyline!
592 elif face_is_edge(face):
593 if use_edges:
594 edges.extend((face_vert_loc_indices[i], face_vert_loc_indices[i + 1])
595 for i in range(len_face_vert_loc_indices - 1))
596 faces.pop(f_idx)
598 else:
599 # Smooth Group
600 if unique_smooth_groups and context_smooth_group:
601 # Is a part of of a smooth group and is a face
602 if context_smooth_group_old is not context_smooth_group:
603 edge_dict = smooth_group_users[context_smooth_group]
604 context_smooth_group_old = context_smooth_group
606 prev_vidx = face_vert_loc_indices[-1]
607 for vidx in face_vert_loc_indices:
608 edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
609 prev_vidx = vidx
610 edge_dict[edge_key] = edge_dict.get(edge_key, 0) + 1
612 # NGons into triangles
613 if face_invalid_blenpoly:
614 # ignore triangles with invalid indices
615 if len(face_vert_loc_indices) > 3:
616 from bpy_extras.mesh_utils import ngon_tessellate
617 ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices, debug_print=bpy.app.debug)
618 faces.extend([([face_vert_loc_indices[ngon[0]],
619 face_vert_loc_indices[ngon[1]],
620 face_vert_loc_indices[ngon[2]],
622 [face_vert_nor_indices[ngon[0]],
623 face_vert_nor_indices[ngon[1]],
624 face_vert_nor_indices[ngon[2]],
625 ] if face_vert_nor_indices else [],
626 [face_vert_tex_indices[ngon[0]],
627 face_vert_tex_indices[ngon[1]],
628 face_vert_tex_indices[ngon[2]],
629 ] if face_vert_tex_indices else [],
630 context_material,
631 context_smooth_group,
632 context_object_key,
635 for ngon in ngon_face_indices]
637 tot_loops += 3 * len(ngon_face_indices)
639 # edges to make ngons
640 if len(ngon_face_indices) > 1:
641 edge_users = set()
642 for ngon in ngon_face_indices:
643 prev_vidx = face_vert_loc_indices[ngon[-1]]
644 for ngidx in ngon:
645 vidx = face_vert_loc_indices[ngidx]
646 if vidx == prev_vidx:
647 continue # broken OBJ... Just skip.
648 edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
649 prev_vidx = vidx
650 if edge_key in edge_users:
651 fgon_edges.add(edge_key)
652 else:
653 edge_users.add(edge_key)
655 faces.pop(f_idx)
656 else:
657 tot_loops += len_face_vert_loc_indices
659 # Build sharp edges
660 if unique_smooth_groups:
661 for edge_dict in smooth_group_users.values():
662 for key, users in edge_dict.items():
663 if users == 1: # This edge is on the boundary of a group
664 sharp_edges.add(key)
666 # map the material names to an index
667 material_mapping = {name: i for i, name in enumerate(unique_materials)} # enumerate over unique_materials keys()
669 materials = [None] * len(unique_materials)
671 for name, index in material_mapping.items():
672 materials[index] = unique_materials[name]
674 me = bpy.data.meshes.new(dataname)
676 # make sure the list isn't too big
677 for material in materials:
678 me.materials.append(material)
680 me.vertices.add(len(verts_loc))
681 me.loops.add(tot_loops)
682 me.polygons.add(len(faces))
684 # verts_loc is a list of (x, y, z) tuples
685 me.vertices.foreach_set("co", unpack_list(verts_loc))
687 loops_vert_idx = tuple(vidx for (face_vert_loc_indices, _, _, _, _, _, _) in faces for vidx in face_vert_loc_indices)
688 faces_loop_start = []
689 lidx = 0
690 for f in faces:
691 face_vert_loc_indices = f[0]
692 nbr_vidx = len(face_vert_loc_indices)
693 faces_loop_start.append(lidx)
694 lidx += nbr_vidx
695 faces_loop_total = tuple(len(face_vert_loc_indices) for (face_vert_loc_indices, _, _, _, _, _, _) in faces)
697 me.loops.foreach_set("vertex_index", loops_vert_idx)
698 me.polygons.foreach_set("loop_start", faces_loop_start)
699 me.polygons.foreach_set("loop_total", faces_loop_total)
701 faces_ma_index = tuple(material_mapping[context_material] for (_, _, _, context_material, _, _, _) in faces)
702 me.polygons.foreach_set("material_index", faces_ma_index)
704 faces_use_smooth = tuple(bool(context_smooth_group) for (_, _, _, _, context_smooth_group, _, _) in faces)
705 me.polygons.foreach_set("use_smooth", faces_use_smooth)
707 if verts_nor and me.loops:
708 # Note: we store 'temp' normals in loops, since validate() may alter final mesh,
709 # we can only set custom lnors *after* calling it.
710 me.create_normals_split()
711 loops_nor = tuple(no for (_, face_vert_nor_indices, _, _, _, _, _) in faces
712 for face_noidx in face_vert_nor_indices
713 for no in verts_nor[face_noidx])
714 me.loops.foreach_set("normal", loops_nor)
716 if verts_tex and me.polygons:
717 # Some files Do not explicitly write the 'v' value when it's 0.0, see T68249...
718 verts_tex = [uv if len(uv) == 2 else uv + [0.0] for uv in verts_tex]
719 me.uv_layers.new(do_init=False)
720 loops_uv = tuple(uv for (_, _, face_vert_tex_indices, _, _, _, _) in faces
721 for face_uvidx in face_vert_tex_indices
722 for uv in verts_tex[face_uvidx])
723 me.uv_layers[0].data.foreach_set("uv", loops_uv)
725 use_edges = use_edges and bool(edges)
726 if use_edges:
727 me.edges.add(len(edges))
728 # edges should be a list of (a, b) tuples
729 me.edges.foreach_set("vertices", unpack_list(edges))
731 me.validate(clean_customdata=False) # *Very* important to not remove lnors here!
732 me.update(calc_edges=use_edges, calc_edges_loose=use_edges)
734 # Un-tessellate as much as possible, in case we had to triangulate some ngons...
735 if fgon_edges:
736 import bmesh
737 bm = bmesh.new()
738 bm.from_mesh(me)
739 verts = bm.verts[:]
740 get = bm.edges.get
741 edges = [get((verts[vidx1], verts[vidx2])) for vidx1, vidx2 in fgon_edges]
742 try:
743 bmesh.ops.dissolve_edges(bm, edges=edges, use_verts=False)
744 except:
745 # Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
746 import traceback
747 traceback.print_exc()
749 bm.to_mesh(me)
750 bm.free()
752 # XXX If validate changes the geometry, this is likely to be broken...
753 if unique_smooth_groups and sharp_edges:
754 for e in me.edges:
755 if e.key in sharp_edges:
756 e.use_edge_sharp = True
758 if verts_nor:
759 clnors = array.array('f', [0.0] * (len(me.loops) * 3))
760 me.loops.foreach_get("normal", clnors)
762 if not unique_smooth_groups:
763 me.polygons.foreach_set("use_smooth", [True] * len(me.polygons))
765 me.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
766 me.use_auto_smooth = True
768 ob = bpy.data.objects.new(me.name, me)
769 new_objects.append(ob)
771 # Create the vertex groups. No need to have the flag passed here since we test for the
772 # content of the vertex_groups. If the user selects to NOT have vertex groups saved then
773 # the following test will never run
774 for group_name, group_indices in vertex_groups.items():
775 group = ob.vertex_groups.new(name=group_name.decode('utf-8', "replace"))
776 group.add(group_indices, 1.0, 'REPLACE')
779 def create_nurbs(context_nurbs, vert_loc, new_objects):
781 Add nurbs object to blender, only support one type at the moment
783 deg = context_nurbs.get(b'deg', (3,))
784 curv_range = context_nurbs.get(b'curv_range')
785 curv_idx = context_nurbs.get(b'curv_idx', [])
786 parm_u = context_nurbs.get(b'parm_u', [])
787 parm_v = context_nurbs.get(b'parm_v', [])
788 name = context_nurbs.get(b'name', b'ObjNurb')
789 cstype = context_nurbs.get(b'cstype')
791 if cstype is None:
792 print('\tWarning, cstype not found')
793 return
794 if cstype != b'bspline':
795 print('\tWarning, cstype is not supported (only bspline)')
796 return
797 if not curv_idx:
798 print('\tWarning, curv argument empty or not set')
799 return
800 if len(deg) > 1 or parm_v:
801 print('\tWarning, surfaces not supported')
802 return
804 cu = bpy.data.curves.new(name.decode('utf-8', "replace"), 'CURVE')
805 cu.dimensions = '3D'
807 nu = cu.splines.new('NURBS')
808 nu.points.add(len(curv_idx) - 1) # a point is added to start with
809 nu.points.foreach_set("co", [co_axis for vt_idx in curv_idx for co_axis in (vert_loc[vt_idx] + [1.0])])
811 nu.order_u = deg[0] + 1
813 # get for endpoint flag from the weighting
814 if curv_range and len(parm_u) > deg[0] + 1:
815 do_endpoints = True
816 for i in range(deg[0] + 1):
818 if abs(parm_u[i] - curv_range[0]) > 0.0001:
819 do_endpoints = False
820 break
822 if abs(parm_u[-(i + 1)] - curv_range[1]) > 0.0001:
823 do_endpoints = False
824 break
826 else:
827 do_endpoints = False
829 if do_endpoints:
830 nu.use_endpoint_u = True
832 # close
834 do_closed = False
835 if len(parm_u) > deg[0]+1:
836 for i in xrange(deg[0]+1):
837 #print curv_idx[i], curv_idx[-(i+1)]
839 if curv_idx[i]==curv_idx[-(i+1)]:
840 do_closed = True
841 break
843 if do_closed:
844 nu.use_cyclic_u = True
847 ob = bpy.data.objects.new(name.decode('utf-8', "replace"), cu)
849 new_objects.append(ob)
852 def strip_slash(line_split):
853 if line_split[-1][-1] == 92: # '\' char
854 if len(line_split[-1]) == 1:
855 line_split.pop() # remove the \ item
856 else:
857 line_split[-1] = line_split[-1][:-1] # remove the \ from the end last number
858 return True
859 return False
862 def get_float_func(filepath):
864 find the float function for this obj file
865 - whether to replace commas or not
867 file = open(filepath, 'rb')
868 for line in file: # .readlines():
869 line = line.lstrip()
870 if line.startswith(b'v'): # vn vt v
871 if b',' in line:
872 file.close()
873 return lambda f: float(f.replace(b',', b'.'))
874 elif b'.' in line:
875 file.close()
876 return float
878 file.close()
879 # in case all vert values were ints
880 return float
883 def any_number_as_int(svalue):
884 if b',' in svalue:
885 svalue = svalue.replace(b',', b'.')
886 return int(float(svalue))
889 def load(context,
890 filepath,
892 global_clamp_size=0.0,
893 use_smooth_groups=True,
894 use_edges=True,
895 use_split_objects=True,
896 use_split_groups=False,
897 use_image_search=True,
898 use_groups_as_vgroups=False,
899 relpath=None,
900 global_matrix=None
903 Called by the user interface or another script.
904 load_obj(path) - should give acceptable results.
905 This function passes the file and sends the data off
906 to be split into objects and then converted into mesh objects
908 def unique_name(existing_names, name_orig):
909 i = 0
910 if name_orig is None:
911 name_orig = b"ObjObject"
912 name = name_orig
913 while name in existing_names:
914 name = b"%s.%03d" % (name_orig, i)
915 i += 1
916 existing_names.add(name)
917 return name
919 def handle_vec(line_start, context_multi_line, line_split, tag, data, vec, vec_len):
920 ret_context_multi_line = tag if strip_slash(line_split) else b''
921 if line_start == tag:
922 vec[:] = [float_func(v) for v in line_split[1:]]
923 elif context_multi_line == tag:
924 vec += [float_func(v) for v in line_split]
925 if not ret_context_multi_line:
926 data.append(tuple(vec[:vec_len]))
927 return ret_context_multi_line
929 def create_face(context_material, context_smooth_group, context_object_key):
930 face_vert_loc_indices = []
931 face_vert_nor_indices = []
932 face_vert_tex_indices = []
933 return (
934 face_vert_loc_indices,
935 face_vert_nor_indices,
936 face_vert_tex_indices,
937 context_material,
938 context_smooth_group,
939 context_object_key,
940 [], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
943 with ProgressReport(context.window_manager) as progress:
944 progress.enter_substeps(1, "Importing OBJ %r..." % filepath)
946 if global_matrix is None:
947 global_matrix = mathutils.Matrix()
949 if use_split_objects or use_split_groups:
950 use_groups_as_vgroups = False
952 verts_loc = []
953 verts_nor = []
954 verts_tex = []
955 faces = [] # tuples of the faces
956 material_libs = set() # filenames to material libs this OBJ uses
957 vertex_groups = {} # when use_groups_as_vgroups is true
959 # Get the string to float conversion func for this file- is 'float' for almost all files.
960 float_func = get_float_func(filepath)
962 # Context variables
963 context_material = None
964 context_smooth_group = None
965 context_object_key = None
966 context_object_obpart = None
967 context_vgroup = None
969 objects_names = set()
971 # Nurbs
972 context_nurbs = {}
973 nurbs = []
974 context_parm = b'' # used by nurbs too but could be used elsewhere
976 # Until we can use sets
977 use_default_material = False
978 unique_materials = {}
979 unique_smooth_groups = {}
980 # unique_obects= {} - no use for this variable since the objects are stored in the face.
982 # when there are faces that end with \
983 # it means they are multiline-
984 # since we use xreadline we cant skip to the next line
985 # so we need to know whether
986 context_multi_line = b''
988 # Per-face handling data.
989 face_vert_loc_indices = None
990 face_vert_nor_indices = None
991 face_vert_tex_indices = None
992 verts_loc_len = verts_nor_len = verts_tex_len = 0
993 face_items_usage = set()
994 face_invalid_blenpoly = None
995 prev_vidx = None
996 face = None
997 vec = []
999 quick_vert_failures = 0
1000 skip_quick_vert = False
1002 progress.enter_substeps(3, "Parsing OBJ file...")
1003 with open(filepath, 'rb') as f:
1004 for line in f:
1005 line_split = line.split()
1007 if not line_split:
1008 continue
1010 line_start = line_split[0] # we compare with this a _lot_
1012 if len(line_split) == 1 and not context_multi_line and line_start != b'end':
1013 print("WARNING, skipping malformatted line: %s" % line.decode('UTF-8', 'replace').rstrip())
1014 continue
1016 # Handling vertex data are pretty similar, factorize that.
1017 # Also, most OBJ files store all those on a single line, so try fast parsing for that first,
1018 # and only fallback to full multi-line parsing when needed, this gives significant speed-up
1019 # (~40% on affected code).
1020 if line_start == b'v':
1021 vdata, vdata_len, do_quick_vert = verts_loc, 3, not skip_quick_vert
1022 elif line_start == b'vn':
1023 vdata, vdata_len, do_quick_vert = verts_nor, 3, not skip_quick_vert
1024 elif line_start == b'vt':
1025 vdata, vdata_len, do_quick_vert = verts_tex, 2, not skip_quick_vert
1026 elif context_multi_line == b'v':
1027 vdata, vdata_len, do_quick_vert = verts_loc, 3, False
1028 elif context_multi_line == b'vn':
1029 vdata, vdata_len, do_quick_vert = verts_nor, 3, False
1030 elif context_multi_line == b'vt':
1031 vdata, vdata_len, do_quick_vert = verts_tex, 2, False
1032 else:
1033 vdata_len = 0
1035 if vdata_len:
1036 if do_quick_vert:
1037 try:
1038 vdata.append(list(map(float_func, line_split[1:vdata_len + 1])))
1039 except:
1040 do_quick_vert = False
1041 # In case we get too many failures on quick parsing, force fallback to full multi-line one.
1042 # Exception handling can become costly...
1043 quick_vert_failures += 1
1044 if quick_vert_failures > 10000:
1045 skip_quick_vert = True
1046 if not do_quick_vert:
1047 context_multi_line = handle_vec(line_start, context_multi_line, line_split,
1048 context_multi_line or line_start,
1049 vdata, vec, vdata_len)
1051 elif line_start == b'f' or context_multi_line == b'f':
1052 if not context_multi_line:
1053 line_split = line_split[1:]
1054 # Instantiate a face
1055 face = create_face(context_material, context_smooth_group, context_object_key)
1056 (face_vert_loc_indices, face_vert_nor_indices, face_vert_tex_indices,
1057 _1, _2, _3, face_invalid_blenpoly) = face
1058 faces.append(face)
1059 face_items_usage.clear()
1060 verts_loc_len = len(verts_loc)
1061 verts_nor_len = len(verts_nor)
1062 verts_tex_len = len(verts_tex)
1063 if context_material is None:
1064 use_default_material = True
1065 # Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
1067 context_multi_line = b'f' if strip_slash(line_split) else b''
1069 for v in line_split:
1070 obj_vert = v.split(b'/')
1071 idx = int(obj_vert[0]) # Note that we assume here we cannot get OBJ invalid 0 index...
1072 vert_loc_index = (idx + verts_loc_len) if (idx < 1) else idx - 1
1073 # Add the vertex to the current group
1074 # *warning*, this wont work for files that have groups defined around verts
1075 if use_groups_as_vgroups and context_vgroup:
1076 vertex_groups[context_vgroup].append(vert_loc_index)
1077 # This a first round to quick-detect ngons that *may* use a same edge more than once.
1078 # Potential candidate will be re-checked once we have done parsing the whole face.
1079 if not face_invalid_blenpoly:
1080 # If we use more than once a same vertex, invalid ngon is suspected.
1081 if vert_loc_index in face_items_usage:
1082 face_invalid_blenpoly.append(True)
1083 else:
1084 face_items_usage.add(vert_loc_index)
1085 face_vert_loc_indices.append(vert_loc_index)
1087 # formatting for faces with normals and textures is
1088 # loc_index/tex_index/nor_index
1089 if len(obj_vert) > 1 and obj_vert[1] and obj_vert[1] != b'0':
1090 idx = int(obj_vert[1])
1091 face_vert_tex_indices.append((idx + verts_tex_len) if (idx < 1) else idx - 1)
1092 else:
1093 face_vert_tex_indices.append(0)
1095 if len(obj_vert) > 2 and obj_vert[2] and obj_vert[2] != b'0':
1096 idx = int(obj_vert[2])
1097 face_vert_nor_indices.append((idx + verts_nor_len) if (idx < 1) else idx - 1)
1098 else:
1099 face_vert_nor_indices.append(0)
1101 if not context_multi_line:
1102 # Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
1103 if face_invalid_blenpoly:
1104 face_invalid_blenpoly.clear()
1105 face_items_usage.clear()
1106 prev_vidx = face_vert_loc_indices[-1]
1107 for vidx in face_vert_loc_indices:
1108 edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
1109 if edge_key in face_items_usage:
1110 face_invalid_blenpoly.append(True)
1111 break
1112 face_items_usage.add(edge_key)
1113 prev_vidx = vidx
1115 elif use_edges and (line_start == b'l' or context_multi_line == b'l'):
1116 # very similar to the face load function above with some parts removed
1117 if not context_multi_line:
1118 line_split = line_split[1:]
1119 # Instantiate a face
1120 face = create_face(context_material, context_smooth_group, context_object_key)
1121 face_vert_loc_indices = face[0]
1122 # XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
1123 # as a polyline, and not a regular face...
1124 face[1][:] = [True]
1125 faces.append(face)
1126 if context_material is None:
1127 use_default_material = True
1128 # Else, use face_vert_loc_indices previously defined and used the obj_face
1130 context_multi_line = b'l' if strip_slash(line_split) else b''
1132 for v in line_split:
1133 obj_vert = v.split(b'/')
1134 idx = int(obj_vert[0]) - 1
1135 face_vert_loc_indices.append((idx + len(verts_loc) + 1) if (idx < 0) else idx)
1137 elif line_start == b's':
1138 if use_smooth_groups:
1139 context_smooth_group = line_value(line_split)
1140 if context_smooth_group == b'off':
1141 context_smooth_group = None
1142 elif context_smooth_group: # is not None
1143 unique_smooth_groups[context_smooth_group] = None
1145 elif line_start == b'o':
1146 if use_split_objects:
1147 context_object_key = unique_name(objects_names, line_value(line_split))
1148 context_object_obpart = context_object_key
1149 # unique_objects[context_object_key]= None
1151 elif line_start == b'g':
1152 if use_split_groups:
1153 grppart = line_value(line_split)
1154 context_object_key = (context_object_obpart, grppart) if context_object_obpart else grppart
1155 # print 'context_object_key', context_object_key
1156 # unique_objects[context_object_key]= None
1157 elif use_groups_as_vgroups:
1158 context_vgroup = line_value(line.split())
1159 if context_vgroup and context_vgroup != b'(null)':
1160 vertex_groups.setdefault(context_vgroup, [])
1161 else:
1162 context_vgroup = None # dont assign a vgroup
1164 elif line_start == b'usemtl':
1165 context_material = line_value(line.split())
1166 unique_materials[context_material] = None
1167 elif line_start == b'mtllib': # usemap or usemat
1168 # can have multiple mtllib filenames per line, mtllib can appear more than once,
1169 # so make sure only occurrence of material exists
1170 material_libs |= {os.fsdecode(f) for f in filenames_group_by_ext(line.lstrip()[7:].strip(), b'.mtl')
1173 # Nurbs support
1174 elif line_start == b'cstype':
1175 context_nurbs[b'cstype'] = line_value(line.split()) # 'rat bspline' / 'bspline'
1176 elif line_start == b'curv' or context_multi_line == b'curv':
1177 curv_idx = context_nurbs[b'curv_idx'] = context_nurbs.get(b'curv_idx', []) # in case were multiline
1179 if not context_multi_line:
1180 context_nurbs[b'curv_range'] = float_func(line_split[1]), float_func(line_split[2])
1181 line_split[0:3] = [] # remove first 3 items
1183 if strip_slash(line_split):
1184 context_multi_line = b'curv'
1185 else:
1186 context_multi_line = b''
1188 for i in line_split:
1189 vert_loc_index = int(i) - 1
1191 if vert_loc_index < 0:
1192 vert_loc_index = len(verts_loc) + vert_loc_index + 1
1194 curv_idx.append(vert_loc_index)
1196 elif line_start == b'parm' or context_multi_line == b'parm':
1197 if context_multi_line:
1198 context_multi_line = b''
1199 else:
1200 context_parm = line_split[1]
1201 line_split[0:2] = [] # remove first 2
1203 if strip_slash(line_split):
1204 context_multi_line = b'parm'
1205 else:
1206 context_multi_line = b''
1208 if context_parm.lower() == b'u':
1209 context_nurbs.setdefault(b'parm_u', []).extend([float_func(f) for f in line_split])
1210 elif context_parm.lower() == b'v': # surfaces not supported yet
1211 context_nurbs.setdefault(b'parm_v', []).extend([float_func(f) for f in line_split])
1212 # else: # may want to support other parm's ?
1214 elif line_start == b'deg':
1215 context_nurbs[b'deg'] = [int(i) for i in line.split()[1:]]
1216 elif line_start == b'end':
1217 # Add the nurbs curve
1218 if context_object_key:
1219 context_nurbs[b'name'] = context_object_key
1220 nurbs.append(context_nurbs)
1221 context_nurbs = {}
1222 context_parm = b''
1224 ''' # How to use usemap? deprecated?
1225 elif line_start == b'usema': # usemap or usemat
1226 context_image= line_value(line_split)
1229 progress.step("Done, loading materials and images...")
1231 if use_default_material:
1232 unique_materials[None] = None
1233 create_materials(filepath, relpath, material_libs, unique_materials,
1234 use_image_search, float_func)
1236 progress.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
1237 (len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)))
1239 # deselect all
1240 if bpy.ops.object.select_all.poll():
1241 bpy.ops.object.select_all(action='DESELECT')
1243 new_objects = [] # put new objects here
1245 # Split the mesh by objects/materials, may
1246 SPLIT_OB_OR_GROUP = bool(use_split_objects or use_split_groups)
1248 for data in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
1249 verts_loc_split, faces_split, unique_materials_split, dataname, use_vnor, use_vtex = data
1250 # Create meshes from the data, warning 'vertex_groups' wont support splitting
1251 #~ print(dataname, use_vnor, use_vtex)
1252 create_mesh(new_objects,
1253 use_edges,
1254 verts_loc_split,
1255 verts_nor if use_vnor else [],
1256 verts_tex if use_vtex else [],
1257 faces_split,
1258 unique_materials_split,
1259 unique_smooth_groups,
1260 vertex_groups,
1261 dataname,
1264 # nurbs support
1265 for context_nurbs in nurbs:
1266 create_nurbs(context_nurbs, verts_loc, new_objects)
1268 view_layer = context.view_layer
1269 collection = view_layer.active_layer_collection.collection
1271 # Create new obj
1272 for obj in new_objects:
1273 collection.objects.link(obj)
1274 obj.select_set(True)
1276 # we could apply this anywhere before scaling.
1277 obj.matrix_world = global_matrix
1279 view_layer.update()
1281 axis_min = [1000000000] * 3
1282 axis_max = [-1000000000] * 3
1284 if global_clamp_size:
1285 # Get all object bounds
1286 for ob in new_objects:
1287 for v in ob.bound_box:
1288 for axis, value in enumerate(v):
1289 if axis_min[axis] > value:
1290 axis_min[axis] = value
1291 if axis_max[axis] < value:
1292 axis_max[axis] = value
1294 # Scale objects
1295 max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2])
1296 scale = 1.0
1298 while global_clamp_size < max_axis * scale:
1299 scale = scale / 10.0
1301 for obj in new_objects:
1302 obj.scale = scale, scale, scale
1304 progress.leave_substeps("Done.")
1305 progress.leave_substeps("Finished importing: %r" % filepath)
1307 return {'FINISHED'}