Sun Position: Fix crash when Blender was started in background
[blender-addons.git] / io_scene_x3d / import_x3d.py
blob6b2c7afc385877863ef453c12112694e01d6758a
1 # SPDX-License-Identifier: GPL-2.0-or-later
3 DEBUG = False
5 # This should work without a blender at all
6 import os
7 import shlex
8 import math
9 from math import sin, cos, pi
10 from itertools import chain
12 texture_cache = {}
13 material_cache = {}
15 EPSILON = 0.0000001 # Very crude.
18 def imageConvertCompat(path):
20 if os.sep == '\\':
21 return path # assume win32 has quicktime, dont convert
23 if path.lower().endswith('.gif'):
24 path_to = path[:-3] + 'png'
26 '''
27 if exists(path_to):
28 return path_to
29 '''
30 # print('\n'+path+'\n'+path_to+'\n')
31 os.system('convert "%s" "%s"' % (path, path_to)) # for now just hope we have image magick
33 if os.path.exists(path_to):
34 return path_to
36 return path
38 # notes
39 # transform are relative
40 # order doesn't matter for loc/size/rot
41 # right handed rotation
42 # angles are in radians
43 # rotation first defines axis then amount in radians
46 # =============================== VRML Specific
48 def vrml_split_fields(value):
49 """
50 key 0.0 otherkey 1,2,3 opt1 opt1 0.0
51 -> [key 0.0], [otherkey 1,2,3], [opt1 opt1 0.0]
52 """
53 def iskey(k):
54 if k[0] != '"' and k[0].isalpha() and k.upper() not in {'TRUE', 'FALSE'}:
55 return True
56 return False
58 field_list = []
59 field_context = []
61 for v in value:
62 if iskey(v):
63 if field_context:
64 field_context_len = len(field_context)
65 if (field_context_len > 2) and (field_context[-2] in {'DEF', 'USE'}):
66 field_context.append(v)
67 elif (not iskey(field_context[-1])) or ((field_context_len == 3 and field_context[1] == 'IS')):
68 # this IS a key but the previous value was not a key, or it was a defined field.
69 field_list.append(field_context)
70 field_context = [v]
71 else:
72 # The last item was not a value, multiple keys are needed in some cases.
73 field_context.append(v)
74 else:
75 # Is empty, just add this on
76 field_context.append(v)
77 else:
78 # Add a value to the list
79 field_context.append(v)
81 if field_context:
82 field_list.append(field_context)
84 return field_list
87 def vrmlFormat(data):
88 """
89 Keep this as a valid vrml file, but format in a way we can predict.
90 """
91 # Strip all comments - # not in strings - warning multiline strings are ignored.
92 def strip_comment(l):
93 #l = ' '.join(l.split())
94 l = l.strip()
96 if l.startswith('#'):
97 return ''
99 i = l.find('#')
101 if i == -1:
102 return l
104 # Most cases accounted for! if we have a comment at the end of the line do this...
105 #j = l.find('url "')
106 j = l.find('"')
108 if j == -1: # simple no strings
109 return l[:i].strip()
111 q = False
112 for i, c in enumerate(l):
113 if c == '"':
114 q = not q # invert
116 elif c == '#':
117 if q is False:
118 return l[:i - 1]
120 return l
122 data = '\n'.join([strip_comment(l) for l in data.split('\n')]) # remove all whitespace
124 EXTRACT_STRINGS = True # only needed when strings or filename contains ,[]{} chars :/
126 if EXTRACT_STRINGS:
128 # We need this so we can detect URL's
129 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
131 string_ls = []
133 #search = 'url "'
134 search = '"'
136 ok = True
137 last_i = 0
138 while ok:
139 ok = False
140 i = data.find(search, last_i)
141 if i != -1:
143 start = i + len(search) # first char after end of search
144 end = data.find('"', start)
145 if end != -1:
146 item = data[start:end]
147 string_ls.append(item)
148 data = data[:start] + data[end:]
149 ok = True # keep looking
151 last_i = (end - len(item)) + 1
152 # print(last_i, item, '|' + data[last_i] + '|')
154 # done with messy extracting strings part
156 # Bad, dont take strings into account
158 data = data.replace('#', '\n#')
159 data = '\n'.join([ll for l in data.split('\n') for ll in (l.strip(),) if not ll.startswith('#')]) # remove all whitespace
161 data = data.replace('{', '\n{\n')
162 data = data.replace('}', '\n}\n')
163 data = data.replace('[', '\n[\n')
164 data = data.replace(']', '\n]\n')
165 data = data.replace(',', ' , ') # make sure comma's separate
167 # We need to write one property (field) per line only, otherwise we fail later to detect correctly new nodes.
168 # See T45195 for details.
169 data = '\n'.join([' '.join(value) for l in data.split('\n') for value in vrml_split_fields(l.split())])
171 if EXTRACT_STRINGS:
172 # add strings back in
174 search = '"' # fill in these empty strings
176 ok = True
177 last_i = 0
178 while ok:
179 ok = False
180 i = data.find(search + '"', last_i)
181 # print(i)
182 if i != -1:
183 start = i + len(search) # first char after end of search
184 item = string_ls.pop(0)
185 # print(item)
186 data = data[:start] + item + data[start:]
188 last_i = start + len(item) + 1
190 ok = True
192 # More annoying obscure cases where USE or DEF are placed on a newline
193 # data = data.replace('\nDEF ', ' DEF ')
194 # data = data.replace('\nUSE ', ' USE ')
196 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
198 # Better to parse the file accounting for multiline arrays
200 data = data.replace(',\n', ' , ') # remove line endings with commas
201 data = data.replace(']', '\n]\n') # very very annoying - but some comma's are at the end of the list, must run this again.
204 return [l for l in data.split('\n') if l]
206 NODE_NORMAL = 1 # {}
207 NODE_ARRAY = 2 # []
208 NODE_REFERENCE = 3 # USE foobar
209 # NODE_PROTO = 4 #
211 lines = []
214 def getNodePreText(i, words):
215 # print(lines[i])
216 use_node = False
217 while len(words) < 5:
219 if i >= len(lines):
220 break
222 elif lines[i].startswith('PROTO'):
223 return NODE_PROTO, i+1
225 elif lines[i] == '{':
226 # words.append(lines[i]) # no need
227 # print("OK")
228 return NODE_NORMAL, i + 1
229 elif lines[i].count('"') % 2 != 0: # odd number of quotes? - part of a string.
230 # print('ISSTRING')
231 break
232 else:
233 new_words = lines[i].split()
234 if 'USE' in new_words:
235 use_node = True
237 words.extend(new_words)
238 i += 1
240 # Check for USE node - no {
241 # USE #id - should always be on the same line.
242 if use_node:
243 # print('LINE', i, words[:words.index('USE')+2])
244 words[:] = words[:words.index('USE') + 2]
245 if lines[i] == '{' and lines[i + 1] == '}':
246 # USE sometimes has {} after it anyway
247 i += 2
248 return NODE_REFERENCE, i
250 # print("error value!!!", words)
251 return 0, -1
254 def is_nodeline(i, words):
256 if not lines[i][0].isalpha():
257 return 0, 0
259 #if lines[i].startswith('field'):
260 # return 0, 0
262 # Is this a prototype??
263 if lines[i].startswith('PROTO'):
264 words[:] = lines[i].split()
265 return NODE_NORMAL, i + 1 # TODO - assumes the next line is a '[\n', skip that
266 if lines[i].startswith('EXTERNPROTO'):
267 words[:] = lines[i].split()
268 return NODE_ARRAY, i + 1 # TODO - assumes the next line is a '[\n', skip that
271 proto_type, new_i = is_protoline(i, words, proto_field_defs)
272 if new_i != -1:
273 return proto_type, new_i
276 # Simple "var [" type
277 if lines[i + 1] == '[':
278 if lines[i].count('"') % 2 == 0:
279 words[:] = lines[i].split()
280 return NODE_ARRAY, i + 2
282 node_type, new_i = getNodePreText(i, words)
284 if not node_type:
285 if DEBUG:
286 print("not node_type", lines[i])
287 return 0, 0
289 # Ok, we have a { after some values
290 # Check the values are not fields
291 for i, val in enumerate(words):
292 if i != 0 and words[i - 1] in {'DEF', 'USE'}:
293 # ignore anything after DEF, it is a ID and can contain any chars.
294 pass
295 elif val[0].isalpha() and val not in {'TRUE', 'FALSE'}:
296 pass
297 else:
298 # There is a number in one of the values, therefor we are not a node.
299 return 0, 0
301 #if node_type==NODE_REFERENCE:
302 # print(words, "REF_!!!!!!!")
303 return node_type, new_i
306 def is_numline(i):
308 Does this line start with a number?
311 # Works but too slow.
313 l = lines[i]
314 for w in l.split():
315 if w==',':
316 pass
317 else:
318 try:
319 float(w)
320 return True
322 except:
323 return False
325 return False
328 l = lines[i]
330 line_start = 0
332 if l.startswith(', '):
333 line_start += 2
335 line_end = len(l) - 1
336 line_end_new = l.find(' ', line_start) # comma's always have a space before them
338 if line_end_new != -1:
339 line_end = line_end_new
341 try:
342 float(l[line_start:line_end]) # works for a float or int
343 return True
344 except:
345 return False
348 class vrmlNode(object):
349 __slots__ = ('id',
350 'fields',
351 'proto_node',
352 'proto_field_defs',
353 'proto_fields',
354 'node_type',
355 'parent',
356 'children',
357 'parent',
358 'array_data',
359 'reference',
360 'lineno',
361 'filename',
362 'blendObject',
363 'blendData',
364 'DEF_NAMESPACE',
365 'ROUTE_IPO_NAMESPACE',
366 'PROTO_NAMESPACE',
367 'x3dNode',
368 'parsed')
370 def __init__(self, parent, node_type, lineno):
371 self.id = None
372 self.node_type = node_type
373 self.parent = parent
374 self.blendObject = None
375 self.blendData = None
376 self.x3dNode = None # for x3d import only
377 self.parsed = None # We try to reuse objects in a smart way
378 if parent:
379 parent.children.append(self)
381 self.lineno = lineno
383 # This is only set from the root nodes.
384 # Having a filename also denotes a root node
385 self.filename = None
386 self.proto_node = None # proto field definition eg: "field SFColor seatColor .6 .6 .1"
388 # Store in the root node because each inline file needs its own root node and its own namespace
389 self.DEF_NAMESPACE = None
390 self.ROUTE_IPO_NAMESPACE = None
392 self.FIELD_NAMESPACE = None
395 self.PROTO_NAMESPACE = None
397 self.reference = None
399 if node_type == NODE_REFERENCE:
400 # For references, only the parent and ID are needed
401 # the reference its self is assigned on parsing
402 return
404 self.fields = [] # fields have no order, in some cases rool level values are not unique so dont use a dict
406 self.proto_field_defs = [] # proto field definition eg: "field SFColor seatColor .6 .6 .1"
407 self.proto_fields = [] # proto field usage "diffuseColor IS seatColor"
408 self.children = []
409 self.array_data = [] # use for arrays of data - should only be for NODE_ARRAY types
411 # Only available from the root node
413 def getFieldDict(self):
414 if self.FIELD_NAMESPACE is not None:
415 return self.FIELD_NAMESPACE
416 else:
417 return self.parent.getFieldDict()
419 def getProtoDict(self):
420 if self.PROTO_NAMESPACE is not None:
421 return self.PROTO_NAMESPACE
422 else:
423 return self.parent.getProtoDict()
425 def getDefDict(self):
426 if self.DEF_NAMESPACE is not None:
427 return self.DEF_NAMESPACE
428 else:
429 return self.parent.getDefDict()
431 def getRouteIpoDict(self):
432 if self.ROUTE_IPO_NAMESPACE is not None:
433 return self.ROUTE_IPO_NAMESPACE
434 else:
435 return self.parent.getRouteIpoDict()
437 def setRoot(self, filename):
438 self.filename = filename
439 # self.FIELD_NAMESPACE = {}
440 self.DEF_NAMESPACE = {}
441 self.ROUTE_IPO_NAMESPACE = {}
442 self.PROTO_NAMESPACE = {}
444 def isRoot(self):
445 if self.filename is None:
446 return False
447 else:
448 return True
450 def getFilename(self):
451 if self.filename:
452 return self.filename
453 elif self.parent:
454 return self.parent.getFilename()
455 else:
456 return None
458 def getRealNode(self):
459 if self.reference:
460 return self.reference
461 else:
462 return self
464 def getSpec(self):
465 self_real = self.getRealNode()
466 try:
467 return self_real.id[-1] # its possible this node has no spec
468 except:
469 return None
471 def findSpecRecursive(self, spec):
472 self_real = self.getRealNode()
473 if spec == self_real.getSpec():
474 return self
476 for child in self_real.children:
477 if child.findSpecRecursive(spec):
478 return child
480 return None
482 def getPrefix(self):
483 if self.id:
484 return self.id[0]
485 return None
487 def getSpecialTypeName(self, typename):
488 self_real = self.getRealNode()
489 try:
490 return self_real.id[list(self_real.id).index(typename) + 1]
491 except:
492 return None
494 def getDefName(self):
495 return self.getSpecialTypeName('DEF')
497 def getProtoName(self):
498 return self.getSpecialTypeName('PROTO')
500 def getExternprotoName(self):
501 return self.getSpecialTypeName('EXTERNPROTO')
503 def getChildrenBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
504 self_real = self.getRealNode()
505 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
506 if type(node_spec) == str:
507 return [child for child in self_real.children if child.getSpec() == node_spec]
508 else:
509 # Check inside a list of optional types
510 return [child for child in self_real.children if child.getSpec() in node_spec]
512 def getChildrenBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
513 self_real = self.getRealNode()
514 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
515 return [child for child in self_real.children if cond(child.getSpec())]
517 def getChildBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
518 # Use in cases where there is only ever 1 child of this type
519 ls = self.getChildrenBySpec(node_spec)
520 if ls:
521 return ls[0]
522 else:
523 return None
525 def getChildBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
526 # Use in cases where there is only ever 1 child of this type
527 ls = self.getChildrenBySpecCondition(cond)
528 if ls:
529 return ls[0]
530 else:
531 return None
533 def getChildrenByName(self, node_name): # type could be geometry, children, appearance
534 self_real = self.getRealNode()
535 return [child for child in self_real.children if child.id if child.id[0] == node_name]
537 def getChildByName(self, node_name):
538 self_real = self.getRealNode()
539 for child in self_real.children:
540 if child.id and child.id[0] == node_name: # and child.id[-1]==node_spec:
541 return child
543 def getSerialized(self, results, ancestry):
544 """ Return this node and all its children in a flat list """
545 ancestry = ancestry[:] # always use a copy
547 # self_real = self.getRealNode()
549 results.append((self, tuple(ancestry)))
550 ancestry.append(self)
551 for child in self.getRealNode().children:
552 if child not in ancestry:
553 # We dont want to load proto's, they are only references
554 # We could enforce this elsewhere
556 # Only add this in a very special case
557 # where the parent of this object is not the real parent
558 # - In this case we have added the proto as a child to a node instancing it.
559 # This is a bit arbitrary, but its how Proto's are done with this importer.
560 if child.getProtoName() is None and child.getExternprotoName() is None:
561 child.getSerialized(results, ancestry)
562 else:
564 if DEBUG:
565 print('getSerialized() is proto:', child.getProtoName(), child.getExternprotoName(), self.getSpec())
567 self_spec = self.getSpec()
569 if child.getProtoName() == self_spec or child.getExternprotoName() == self_spec:
570 #if DEBUG:
571 # "FoundProto!"
572 child.getSerialized(results, ancestry)
574 return results
576 def searchNodeTypeID(self, node_spec, results):
577 self_real = self.getRealNode()
578 # print(self.lineno, self.id)
579 if self_real.id and self_real.id[-1] == node_spec: # use last element, could also be only element
580 results.append(self_real)
581 for child in self_real.children:
582 child.searchNodeTypeID(node_spec, results)
583 return results
585 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
586 self_real = self.getRealNode() # in case we're an instance
588 for f in self_real.fields:
589 # print(f)
590 if f and f[0] == field:
591 # print('\tfound field', f)
593 if len(f) >= 3 and f[1] == 'IS': # eg: 'diffuseColor IS legColor'
594 field_id = f[2]
596 # print("\n\n\n\n\n\nFOND IS!!!")
597 f_proto_lookup = None
598 f_proto_child_lookup = None
599 i = len(ancestry)
600 while i:
601 i -= 1
602 node = ancestry[i]
603 node = node.getRealNode()
605 # proto settings are stored in "self.proto_node"
606 if node.proto_node:
607 # Get the default value from the proto, this can be overwritten by the proto instance
608 # 'field SFColor legColor .8 .4 .7'
609 if AS_CHILD:
610 for child in node.proto_node.children:
611 #if child.id and len(child.id) >= 3 and child.id[2]==field_id:
612 if child.id and ('point' in child.id or 'points' in child.id):
613 f_proto_child_lookup = child
615 else:
616 for f_def in node.proto_node.proto_field_defs:
617 if len(f_def) >= 4:
618 if f_def[0] == 'field' and f_def[2] == field_id:
619 f_proto_lookup = f_def[3:]
621 # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent.
622 # This is the setting as defined by the instance, including this setting is optional,
623 # and will override the default PROTO value
624 # eg: 'legColor 1 0 0'
625 if AS_CHILD:
626 for child in node.children:
627 if child.id and child.id[0] == field_id:
628 f_proto_child_lookup = child
629 else:
630 for f_def in node.fields:
631 if len(f_def) >= 2:
632 if f_def[0] == field_id:
633 if DEBUG:
634 print("getFieldName(), found proto", f_def)
635 f_proto_lookup = f_def[1:]
637 if AS_CHILD:
638 if f_proto_child_lookup:
639 if DEBUG:
640 print("getFieldName() - AS_CHILD=True, child found")
641 print(f_proto_child_lookup)
642 return f_proto_child_lookup
643 else:
644 return f_proto_lookup
645 else:
646 if AS_CHILD:
647 return None
648 else:
649 # Not using a proto
650 return f[1:]
651 # print('\tfield not found', field)
653 # See if this is a proto name
654 if AS_CHILD:
655 for child in self_real.children:
656 if child.id and len(child.id) == 1 and child.id[0] == field:
657 return child
659 return None
661 def getFieldAsInt(self, field, default, ancestry):
662 self_real = self.getRealNode() # in case we're an instance
664 f = self_real.getFieldName(field, ancestry)
665 if f is None:
666 return default
667 if ',' in f:
668 f = f[:f.index(',')] # strip after the comma
670 if len(f) != 1:
671 print('\t"%s" wrong length for int conversion for field "%s"' % (f, field))
672 return default
674 try:
675 return int(f[0])
676 except:
677 print('\tvalue "%s" could not be used as an int for field "%s"' % (f[0], field))
678 return default
680 def getFieldAsFloat(self, field, default, ancestry):
681 self_real = self.getRealNode() # in case we're an instance
683 f = self_real.getFieldName(field, ancestry)
684 if f is None:
685 return default
686 if ',' in f:
687 f = f[:f.index(',')] # strip after the comma
689 if len(f) != 1:
690 print('\t"%s" wrong length for float conversion for field "%s"' % (f, field))
691 return default
693 try:
694 return float(f[0])
695 except:
696 print('\tvalue "%s" could not be used as a float for field "%s"' % (f[0], field))
697 return default
699 def getFieldAsFloatTuple(self, field, default, ancestry):
700 self_real = self.getRealNode() # in case we're an instance
702 f = self_real.getFieldName(field, ancestry)
703 if f is None:
704 return default
705 # if ',' in f: f = f[:f.index(',')] # strip after the comma
707 if len(f) < 1:
708 print('"%s" wrong length for float tuple conversion for field "%s"' % (f, field))
709 return default
711 ret = []
712 for v in f:
713 if v != ',':
714 try:
715 ret.append(float(v))
716 except:
717 break # quit of first non float, perhaps its a new field name on the same line? - if so we are going to ignore it :/ TODO
718 # print(ret)
720 if ret:
721 return ret
722 if not ret:
723 print('\tvalue "%s" could not be used as a float tuple for field "%s"' % (f, field))
724 return default
726 def getFieldAsBool(self, field, default, ancestry):
727 self_real = self.getRealNode() # in case we're an instance
729 f = self_real.getFieldName(field, ancestry)
730 if f is None:
731 return default
732 if ',' in f:
733 f = f[:f.index(',')] # strip after the comma
735 if len(f) != 1:
736 print('\t"%s" wrong length for bool conversion for field "%s"' % (f, field))
737 return default
739 if f[0].upper() == '"TRUE"' or f[0].upper() == 'TRUE':
740 return True
741 elif f[0].upper() == '"FALSE"' or f[0].upper() == 'FALSE':
742 return False
743 else:
744 print('\t"%s" could not be used as a bool for field "%s"' % (f[1], field))
745 return default
747 def getFieldAsString(self, field, default, ancestry):
748 self_real = self.getRealNode() # in case we're an instance
750 f = self_real.getFieldName(field, ancestry)
751 if f is None:
752 return default
753 if len(f) < 1:
754 print('\t"%s" wrong length for string conversion for field "%s"' % (f, field))
755 return default
757 if len(f) > 1:
758 # String may contain spaces
759 st = ' '.join(f)
760 else:
761 st = f[0]
763 # X3D HACK
764 if self.x3dNode:
765 return st
767 if st[0] == '"' and st[-1] == '"':
768 return st[1:-1]
769 else:
770 print('\tvalue "%s" could not be used as a string for field "%s"' % (f[0], field))
771 return default
773 def getFieldAsArray(self, field, group, ancestry):
775 For this parser arrays are children
778 def array_as_number(array_string):
779 array_data = []
780 try:
781 array_data = [int(val, 0) for val in array_string]
782 except:
783 try:
784 array_data = [float(val) for val in array_string]
785 except:
786 print('\tWarning, could not parse array data from field')
788 return array_data
790 self_real = self.getRealNode() # in case we're an instance
792 child_array = self_real.getFieldName(field, ancestry, True, SPLIT_COMMAS=True)
794 #if type(child_array)==list: # happens occasionally
795 # array_data = child_array
797 if child_array is None:
798 # For x3d, should work ok with vrml too
799 # for x3d arrays are fields, vrml they are nodes, annoying but not too bad.
800 data_split = self.getFieldName(field, ancestry, SPLIT_COMMAS=True)
801 if not data_split:
802 return []
804 array_data = array_as_number(data_split)
806 elif type(child_array) == list:
807 # x3d creates these
808 array_data = array_as_number(child_array)
809 else:
810 # print(child_array)
811 # Normal vrml
812 array_data = child_array.array_data
814 # print('array_data', array_data)
815 if group == -1 or len(array_data) == 0:
816 return array_data
818 # We want a flat list
819 flat = True
820 for item in array_data:
821 if type(item) == list:
822 flat = False
823 break
825 # make a flat array
826 if flat:
827 flat_array = array_data # we are already flat.
828 else:
829 flat_array = []
831 def extend_flat(ls):
832 for item in ls:
833 if type(item) == list:
834 extend_flat(item)
835 else:
836 flat_array.append(item)
838 extend_flat(array_data)
840 # We requested a flat array
841 if group == 0:
842 return flat_array
844 new_array = []
845 sub_array = []
847 for item in flat_array:
848 sub_array.append(item)
849 if len(sub_array) == group:
850 new_array.append(sub_array)
851 sub_array = []
853 if sub_array:
854 print('\twarning, array was not aligned to requested grouping', group, 'remaining value', sub_array)
856 return new_array
858 def getFieldAsStringArray(self, field, ancestry):
860 Get a list of strings
862 self_real = self.getRealNode() # in case we're an instance
864 child_array = None
865 for child in self_real.children:
866 if child.id and len(child.id) == 1 and child.id[0] == field:
867 child_array = child
868 break
869 if not child_array:
870 return []
872 # each string gets its own list, remove ""'s
873 try:
874 new_array = [f[0][1:-1] for f in child_array.fields]
875 except:
876 print('\twarning, string array could not be made')
877 new_array = []
879 return new_array
881 def getLevel(self):
882 # Ignore self_real
883 level = 0
884 p = self.parent
885 while p:
886 level += 1
887 p = p.parent
888 if not p:
889 break
891 return level
893 def __repr__(self):
894 level = self.getLevel()
895 ind = ' ' * level
896 if self.node_type == NODE_REFERENCE:
897 brackets = ''
898 elif self.node_type == NODE_NORMAL:
899 brackets = '{}'
900 else:
901 brackets = '[]'
903 if brackets:
904 text = ind + brackets[0] + '\n'
905 else:
906 text = ''
908 text += ind + 'ID: ' + str(self.id) + ' ' + str(level) + (' lineno %d\n' % self.lineno)
910 if self.node_type == NODE_REFERENCE:
911 text += ind + "(reference node)\n"
912 return text
914 if self.proto_node:
915 text += ind + 'PROTO NODE...\n'
916 text += str(self.proto_node)
917 text += ind + 'PROTO NODE_DONE\n'
919 text += ind + 'FIELDS:' + str(len(self.fields)) + '\n'
921 for i, item in enumerate(self.fields):
922 text += ind + 'FIELD:\n'
923 text += ind + str(item) + '\n'
925 text += ind + 'PROTO_FIELD_DEFS:' + str(len(self.proto_field_defs)) + '\n'
927 for i, item in enumerate(self.proto_field_defs):
928 text += ind + 'PROTO_FIELD:\n'
929 text += ind + str(item) + '\n'
931 text += ind + 'ARRAY: ' + str(len(self.array_data)) + ' ' + str(self.array_data) + '\n'
932 #text += ind + 'ARRAY: ' + str(len(self.array_data)) + '[...] \n'
934 text += ind + 'CHILDREN: ' + str(len(self.children)) + '\n'
935 for i, child in enumerate(self.children):
936 text += ind + ('CHILD%d:\n' % i)
937 text += str(child)
939 text += '\n' + ind + brackets[1]
941 return text
943 def parse(self, i, IS_PROTO_DATA=False):
944 new_i = self.__parse(i, IS_PROTO_DATA)
946 # print(self.id, self.getFilename())
948 # Check if this node was an inline or externproto
950 url_ls = []
952 if self.node_type == NODE_NORMAL and self.getSpec() == 'Inline':
953 ancestry = [] # Warning! - PROTO's using this wont work at all.
954 url = self.getFieldAsString('url', None, ancestry)
955 if url:
956 url_ls = [(url, None)]
957 del ancestry
959 elif self.getExternprotoName():
960 # externproto
961 url_ls = []
962 for f in self.fields:
964 if type(f) == str:
965 f = [f]
967 for ff in f:
968 for f_split in ff.split('"'):
969 # print(f_split)
970 # "someextern.vrml#SomeID"
971 if '#' in f_split:
973 f_split, f_split_id = f_split.split('#') # there should only be 1 # anyway
975 url_ls.append((f_split, f_split_id))
976 else:
977 url_ls.append((f_split, None))
979 # Was either an Inline or an EXTERNPROTO
980 if url_ls:
982 # print(url_ls)
984 for url, extern_key in url_ls:
985 print(url)
986 urls = []
987 urls.append(url)
988 urls.append(bpy.path.resolve_ncase(urls[-1]))
990 urls.append(os.path.join(os.path.dirname(self.getFilename()), url))
991 urls.append(bpy.path.resolve_ncase(urls[-1]))
993 urls.append(os.path.join(os.path.dirname(self.getFilename()), os.path.basename(url)))
994 urls.append(bpy.path.resolve_ncase(urls[-1]))
996 try:
997 url = [url for url in urls if os.path.exists(url)][0]
998 url_found = True
999 except:
1000 url_found = False
1002 if not url_found:
1003 print('\tWarning: Inline URL could not be found:', url)
1004 else:
1005 if url == self.getFilename():
1006 print('\tWarning: can\'t Inline yourself recursively:', url)
1007 else:
1009 try:
1010 data = gzipOpen(url)
1011 except:
1012 print('\tWarning: can\'t open the file:', url)
1013 data = None
1015 if data:
1016 # Tricky - inline another VRML
1017 print('\tLoading Inline:"%s"...' % url)
1019 # Watch it! - backup lines
1020 lines_old = lines[:]
1022 lines[:] = vrmlFormat(data)
1024 lines.insert(0, '{')
1025 lines.insert(0, 'root_node____')
1026 lines.append('}')
1028 ff = open('/tmp/test.txt', 'w')
1029 ff.writelines([l+'\n' for l in lines])
1032 child = vrmlNode(self, NODE_NORMAL, -1)
1033 child.setRoot(url) # initialized dicts
1034 child.parse(0)
1036 # if self.getExternprotoName():
1037 if self.getExternprotoName():
1038 if not extern_key: # if none is specified - use the name
1039 extern_key = self.getSpec()
1041 if extern_key:
1043 self.children.remove(child)
1044 child.parent = None
1046 extern_child = child.findSpecRecursive(extern_key)
1048 if extern_child:
1049 self.children.append(extern_child)
1050 extern_child.parent = self
1052 if DEBUG:
1053 print("\tEXTERNPROTO ID found!:", extern_key)
1054 else:
1055 print("\tEXTERNPROTO ID not found!:", extern_key)
1057 # Watch it! - restore lines
1058 lines[:] = lines_old
1060 return new_i
1062 def __parse(self, i, IS_PROTO_DATA=False):
1064 print('parsing at', i, end="")
1065 print(i, self.id, self.lineno)
1067 l = lines[i]
1069 if l == '[':
1070 # An anonymous list
1071 self.id = None
1072 i += 1
1073 else:
1074 words = []
1076 node_type, new_i = is_nodeline(i, words)
1077 if not node_type: # fail for parsing new node.
1078 print("Failed to parse new node")
1079 raise ValueError
1081 if self.node_type == NODE_REFERENCE:
1082 # Only assign the reference and quit
1083 key = words[words.index('USE') + 1]
1084 self.id = (words[0],)
1086 self.reference = self.getDefDict()[key]
1087 return new_i
1089 self.id = tuple(words)
1091 # fill in DEF/USE
1092 key = self.getDefName()
1093 if key is not None:
1094 self.getDefDict()[key] = self
1096 key = self.getProtoName()
1097 if not key:
1098 key = self.getExternprotoName()
1100 proto_dict = self.getProtoDict()
1101 if key is not None:
1102 proto_dict[key] = self
1104 # Parse the proto nodes fields
1105 self.proto_node = vrmlNode(self, NODE_ARRAY, new_i)
1106 new_i = self.proto_node.parse(new_i)
1108 self.children.remove(self.proto_node)
1110 # print(self.proto_node)
1112 new_i += 1 # skip past the {
1114 else: # If we're a proto instance, add the proto node as our child.
1115 spec = self.getSpec()
1116 try:
1117 self.children.append(proto_dict[spec])
1118 #pass
1119 except:
1120 pass
1122 del spec
1124 del proto_dict, key
1126 i = new_i
1128 # print(self.id)
1129 ok = True
1130 while ok:
1131 if i >= len(lines):
1132 return len(lines) - 1
1134 l = lines[i]
1135 # print('\tDEBUG:', i, self.node_type, l)
1136 if l == '':
1137 i += 1
1138 continue
1140 if l == '}':
1141 if self.node_type != NODE_NORMAL: # also ends proto nodes, we may want a type for these too.
1142 print('wrong node ending, expected an } ' + str(i) + ' ' + str(self.node_type))
1143 if DEBUG:
1144 raise ValueError
1145 ### print("returning", i)
1146 return i + 1
1147 if l == ']':
1148 if self.node_type != NODE_ARRAY:
1149 print('wrong node ending, expected a ] ' + str(i) + ' ' + str(self.node_type))
1150 if DEBUG:
1151 raise ValueError
1152 ### print("returning", i)
1153 return i + 1
1155 node_type, new_i = is_nodeline(i, [])
1156 if node_type: # check text\n{
1157 child = vrmlNode(self, node_type, i)
1158 i = child.parse(i)
1160 elif l == '[': # some files have these anonymous lists
1161 child = vrmlNode(self, NODE_ARRAY, i)
1162 i = child.parse(i)
1164 elif is_numline(i):
1165 l_split = l.split(',')
1167 values = None
1168 # See if each item is a float?
1170 for num_type in (int, float):
1171 try:
1172 values = [num_type(v) for v in l_split]
1173 break
1174 except:
1175 pass
1177 try:
1178 values = [[num_type(v) for v in segment.split()] for segment in l_split]
1179 break
1180 except:
1181 pass
1183 if values is None: # dont parse
1184 values = l_split
1186 # This should not extend over multiple lines however it is possible
1187 # print(self.array_data)
1188 if values:
1189 self.array_data.extend(values)
1190 i += 1
1191 else:
1192 words = l.split()
1193 if len(words) > 2 and words[1] == 'USE':
1194 vrmlNode(self, NODE_REFERENCE, i)
1195 else:
1197 # print("FIELD", i, l)
1199 #words = l.split()
1200 ### print('\t\ttag', i)
1201 # this is a tag/
1202 # print(words, i, l)
1203 value = l
1204 # print(i)
1205 # javastrips can exist as values.
1206 quote_count = l.count('"')
1207 if quote_count % 2: # odd number?
1208 # print('MULTILINE')
1209 while 1:
1210 i += 1
1211 l = lines[i]
1212 quote_count = l.count('"')
1213 if quote_count % 2: # odd number?
1214 value += '\n' + l[:l.rfind('"')]
1215 break # assume
1216 else:
1217 value += '\n' + l
1219 # use shlex so we get '"a b" "b v"' --> '"a b"', '"b v"'
1220 value_all = shlex.split(value, posix=False)
1222 for value in vrml_split_fields(value_all):
1223 # Split
1225 if value[0] == 'field':
1226 # field SFFloat creaseAngle 4
1227 self.proto_field_defs.append(value)
1228 else:
1229 self.fields.append(value)
1230 i += 1
1232 # This is a prerequisite for DEF/USE-based material caching
1233 def canHaveReferences(self):
1234 return self.node_type == NODE_NORMAL and self.getDefName()
1236 # This is a prerequisite for raw XML-based material caching.
1237 # NOTE - crude, but working implementation for
1238 # material and texture caching, based on __repr__.
1239 # Doesn't do any XML, but is better than nothing.
1240 def desc(self):
1241 if "material" in self.id or "texture" in self.id:
1242 node = self.reference if self.node_type == NODE_REFERENCE else self
1243 return frozenset(line.strip() for line in repr(node).strip().split("\n"))
1244 else:
1245 return None
1248 def gzipOpen(path):
1249 import gzip
1251 data = None
1252 try:
1253 data = gzip.open(path, 'r').read()
1254 except:
1255 pass
1257 if data is None:
1258 try:
1259 filehandle = open(path, 'r', encoding='utf-8', errors='surrogateescape')
1260 data = filehandle.read()
1261 filehandle.close()
1262 except:
1263 import traceback
1264 traceback.print_exc()
1265 else:
1266 data = data.decode(encoding='utf-8', errors='surrogateescape')
1268 return data
1271 def vrml_parse(path):
1273 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1274 Return root (vrmlNode, '') or (None, 'Error String')
1276 data = gzipOpen(path)
1278 if data is None:
1279 return None, 'Failed to open file: ' + path
1281 # Stripped above
1282 lines[:] = vrmlFormat(data)
1284 lines.insert(0, '{')
1285 lines.insert(0, 'dymmy_node')
1286 lines.append('}')
1287 # Use for testing our parsed output, so we can check on line numbers.
1290 ff = open('/tmp/test.txt', 'w')
1291 ff.writelines([l+'\n' for l in lines])
1292 ff.close()
1295 # Now evaluate it
1296 node_type, new_i = is_nodeline(0, [])
1297 if not node_type:
1298 return None, 'Error: VRML file has no starting Node'
1300 # Trick to make sure we get all root nodes.
1301 lines.insert(0, '{')
1302 lines.insert(0, 'root_node____') # important the name starts with an ascii char
1303 lines.append('}')
1305 root = vrmlNode(None, NODE_NORMAL, -1)
1306 root.setRoot(path) # we need to set the root so we have a namespace and know the path in case of inlineing
1308 # Parse recursively
1309 root.parse(0)
1311 # This prints a load of text
1312 if DEBUG:
1313 print(root)
1315 return root, ''
1318 # ====================== END VRML
1320 # ====================== X3d Support
1322 # Sane as vrml but replace the parser
1323 class x3dNode(vrmlNode):
1324 def __init__(self, parent, node_type, x3dNode):
1325 vrmlNode.__init__(self, parent, node_type, -1)
1326 self.x3dNode = x3dNode
1328 def parse(self, IS_PROTO_DATA=False):
1329 # print(self.x3dNode.tagName)
1330 self.lineno = self.x3dNode.parse_position[0]
1332 define = self.x3dNode.getAttributeNode('DEF')
1333 if define:
1334 self.getDefDict()[define.value] = self
1335 else:
1336 use = self.x3dNode.getAttributeNode('USE')
1337 if use:
1338 try:
1339 self.reference = self.getDefDict()[use.value]
1340 self.node_type = NODE_REFERENCE
1341 except:
1342 print('\tWarning: reference', use.value, 'not found')
1343 self.parent.children.remove(self)
1345 return
1347 for x3dChildNode in self.x3dNode.childNodes:
1348 if x3dChildNode.nodeType in {x3dChildNode.TEXT_NODE, x3dChildNode.COMMENT_NODE, x3dChildNode.CDATA_SECTION_NODE}:
1349 continue
1351 node_type = NODE_NORMAL
1352 # print(x3dChildNode, dir(x3dChildNode))
1353 if x3dChildNode.getAttributeNode('USE'):
1354 node_type = NODE_REFERENCE
1356 child = x3dNode(self, node_type, x3dChildNode)
1357 child.parse()
1359 # TODO - x3d Inline
1361 def getSpec(self):
1362 return self.x3dNode.tagName # should match vrml spec
1364 # Used to retain object identifiers from X3D to Blender
1365 def getDefName(self):
1366 node_id = self.x3dNode.getAttributeNode('DEF')
1367 if node_id:
1368 return node_id.value
1369 node_id = self.x3dNode.getAttributeNode('USE')
1370 if node_id:
1371 return "USE_" + node_id.value
1372 return None
1374 # Other funcs operate from vrml, but this means we can wrap XML fields, still use nice utility funcs
1375 # getFieldAsArray getFieldAsBool etc
1376 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
1377 # ancestry and AS_CHILD are ignored, only used for VRML now
1379 self_real = self.getRealNode() # in case we're an instance
1380 field_xml = self.x3dNode.getAttributeNode(field)
1381 if field_xml:
1382 value = field_xml.value
1384 # We may want to edit. for x3d specific stuff
1385 # Sucks a bit to return the field name in the list but vrml excepts this :/
1386 if SPLIT_COMMAS:
1387 value = value.replace(",", " ")
1388 return value.split()
1389 else:
1390 return None
1392 def canHaveReferences(self):
1393 return self.x3dNode.getAttributeNode('DEF')
1395 def desc(self):
1396 return self.getRealNode().x3dNode.toxml()
1399 def x3d_parse(path):
1401 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1402 Return root (x3dNode, '') or (None, 'Error String')
1404 import xml.dom.minidom
1405 import xml.sax
1406 from xml.sax import handler
1409 try: doc = xml.dom.minidom.parse(path)
1410 except: return None, 'Could not parse this X3D file, XML error'
1413 # Could add a try/except here, but a console error is more useful.
1414 data = gzipOpen(path)
1416 if data is None:
1417 return None, 'Failed to open file: ' + path
1419 # Enable line number reporting in the parser - kinda brittle
1420 def set_content_handler(dom_handler):
1421 def startElementNS(name, tagName, attrs):
1422 orig_start_cb(name, tagName, attrs)
1423 cur_elem = dom_handler.elementStack[-1]
1424 cur_elem.parse_position = (parser._parser.CurrentLineNumber, parser._parser.CurrentColumnNumber)
1426 orig_start_cb = dom_handler.startElementNS
1427 dom_handler.startElementNS = startElementNS
1428 orig_set_content_handler(dom_handler)
1430 parser = xml.sax.make_parser()
1431 orig_set_content_handler = parser.setContentHandler
1432 parser.setFeature(handler.feature_external_ges, False)
1433 parser.setFeature(handler.feature_external_pes, False)
1434 parser.setContentHandler = set_content_handler
1436 doc = xml.dom.minidom.parseString(data, parser)
1438 try:
1439 x3dnode = doc.getElementsByTagName('X3D')[0]
1440 except:
1441 return None, 'Not a valid x3d document, cannot import'
1443 bpy.ops.object.select_all(action='DESELECT')
1445 root = x3dNode(None, NODE_NORMAL, x3dnode)
1446 root.setRoot(path) # so images and Inline's we load have a relative path
1447 root.parse()
1449 return root, ''
1451 ## f = open('/_Cylinder.wrl', 'r')
1452 # f = open('/fe/wrl/Vrml/EGS/TOUCHSN.WRL', 'r')
1453 # vrml_parse('/fe/wrl/Vrml/EGS/TOUCHSN.WRL')
1454 #vrml_parse('/fe/wrl/Vrml/EGS/SCRIPT.WRL')
1456 import os
1457 files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
1458 files.sort()
1459 tot = len(files)
1460 for i, f in enumerate(files):
1461 #if i < 801:
1462 # continue
1464 f = f.strip()
1465 print(f, i, tot)
1466 vrml_parse(f)
1469 # NO BLENDER CODE ABOVE THIS LINE.
1470 # -----------------------------------------------------------------------------------
1471 import bpy
1472 from bpy_extras import image_utils, node_shader_utils
1473 from mathutils import Vector, Matrix, Quaternion
1475 GLOBALS = {'CIRCLE_DETAIL': 16}
1478 def translateRotation(rot):
1479 """ axis, angle """
1480 return Matrix.Rotation(rot[3], 4, Vector(rot[:3]))
1483 def translateScale(sca):
1484 mat = Matrix() # 4x4 default
1485 mat[0][0] = sca[0]
1486 mat[1][1] = sca[1]
1487 mat[2][2] = sca[2]
1488 return mat
1491 def translateTransform(node, ancestry):
1492 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0, 0.0)
1493 rot = node.getFieldAsFloatTuple('rotation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1494 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0, 1.0)
1495 scaori = node.getFieldAsFloatTuple('scaleOrientation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1496 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0, 0.0)
1498 if cent:
1499 cent_mat = Matrix.Translation(cent)
1500 cent_imat = cent_mat.inverted()
1501 else:
1502 cent_mat = cent_imat = None
1504 if rot:
1505 rot_mat = translateRotation(rot)
1506 else:
1507 rot_mat = None
1509 if sca:
1510 sca_mat = translateScale(sca)
1511 else:
1512 sca_mat = None
1514 if scaori:
1515 scaori_mat = translateRotation(scaori)
1516 scaori_imat = scaori_mat.inverted()
1517 else:
1518 scaori_mat = scaori_imat = None
1520 if tx:
1521 tx_mat = Matrix.Translation(tx)
1522 else:
1523 tx_mat = None
1525 new_mat = Matrix()
1527 mats = [tx_mat, cent_mat, rot_mat, scaori_mat, sca_mat, scaori_imat, cent_imat]
1528 for mtx in mats:
1529 if mtx:
1530 new_mat = new_mat @ mtx
1532 return new_mat
1535 def translateTexTransform(node, ancestry):
1536 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0)
1537 rot = node.getFieldAsFloat('rotation', None, ancestry) # 0.0
1538 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0)
1539 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0)
1541 if cent:
1542 # cent is at a corner by default
1543 cent_mat = Matrix.Translation(Vector(cent).to_3d())
1544 cent_imat = cent_mat.inverted()
1545 else:
1546 cent_mat = cent_imat = None
1548 if rot:
1549 rot_mat = Matrix.Rotation(rot, 4, 'Z') # translateRotation(rot)
1550 else:
1551 rot_mat = None
1553 if sca:
1554 sca_mat = translateScale((sca[0], sca[1], 0.0))
1555 else:
1556 sca_mat = None
1558 if tx:
1559 tx_mat = Matrix.Translation(Vector(tx).to_3d())
1560 else:
1561 tx_mat = None
1563 new_mat = Matrix()
1565 # as specified in VRML97 docs
1566 mats = [cent_imat, sca_mat, rot_mat, cent_mat, tx_mat]
1568 for mtx in mats:
1569 if mtx:
1570 new_mat = new_mat @ mtx
1572 return new_mat
1574 def getFinalMatrix(node, mtx, ancestry, global_matrix):
1576 transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform']
1577 if node.getSpec() == 'Transform':
1578 transform_nodes.append(node)
1579 transform_nodes.reverse()
1581 if mtx is None:
1582 mtx = Matrix()
1584 for node_tx in transform_nodes:
1585 mat = translateTransform(node_tx, ancestry)
1586 mtx = mat @ mtx
1588 # worldspace matrix
1589 mtx = global_matrix @ mtx
1591 return mtx
1594 # -----------------------------------------------------------------------------------
1595 # Mesh import utilities
1597 # Assumes that the mesh has polygons.
1598 def importMesh_ApplyColors(bpymesh, geom, ancestry):
1599 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1600 if colors:
1601 if colors.getSpec() == 'ColorRGBA':
1602 rgb = colors.getFieldAsArray('color', 4, ancestry)
1603 else:
1604 # Array of arrays; no need to flatten
1605 rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
1606 lcol_layer = bpymesh.vertex_colors.new()
1608 if len(rgb) == len(bpymesh.vertices):
1609 rgb = [rgb[l.vertex_index] for l in bpymesh.loops]
1610 rgb = tuple(chain(*rgb))
1611 elif len(rgb) == len(bpymesh.loops):
1612 rgb = tuple(chain(*rgb))
1613 else:
1614 print(
1615 "WARNING not applying vertex colors, non matching numbers of vertices or loops (%d vs %d/%d)" %
1616 (len(rgb), len(bpymesh.vertices), len(bpymesh.loops))
1618 return
1620 lcol_layer.data.foreach_set("color", rgb)
1623 # Assumes that the vertices have not been rearranged compared to the
1624 # source file order # or in the order assumed by the spec (e. g. in
1625 # Elevation, in rows by x).
1626 # Assumes polygons have been set.
1627 def importMesh_ApplyNormals(bpymesh, geom, ancestry):
1628 normals = geom.getChildBySpec('Normal')
1629 if not normals:
1630 return
1632 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1633 vectors = normals.getFieldAsArray('vector', 0, ancestry)
1634 if per_vertex:
1635 bpymesh.vertices.foreach_set("normal", vectors)
1636 else:
1637 bpymesh.polygons.foreach_set("normal", vectors)
1640 # Reads the standard Coordinate object - common for all mesh elements
1641 # Feeds the vertices in the mesh.
1642 # Rearranging the vertex order is a bad idea - other elements
1643 # in X3D might rely on it, if you need to rearrange, please play with
1644 # vertex indices in the polygons instead.
1646 # Vertex culling that we have in IndexedFaceSet is an unfortunate exception,
1647 # brought forth by a very specific issue.
1648 def importMesh_ReadVertices(bpymesh, geom, ancestry):
1649 # We want points here as a flat array, but the caching logic in
1650 # IndexedFaceSet presumes a 2D one.
1651 # The case for caching is stronger over there.
1652 coord = geom.getChildBySpec('Coordinate')
1653 points = coord.getFieldAsArray('point', 0, ancestry)
1654 bpymesh.vertices.add(len(points) // 3)
1655 bpymesh.vertices.foreach_set("co", points)
1658 # Assumes that the order of vertices matches the source file.
1659 # Relies upon texture coordinates in the X3D node; if a coordinate generation
1660 # algorithm for a geometry is in the spec (e. g. for ElevationGrid), it needs
1661 # to be implemented by the geometry handler.
1663 # Texture transform is applied in ProcessObject.
1664 def importMesh_ApplyUVs(bpymesh, geom, ancestry):
1665 tex_coord = geom.getChildBySpec('TextureCoordinate')
1666 if not tex_coord:
1667 return
1669 uvs = tex_coord.getFieldAsArray('point', 2, ancestry)
1670 if not uvs:
1671 return
1673 d = bpymesh.uv_layers.new().data
1674 uvs = [i for poly in bpymesh.polygons
1675 for vidx in poly.vertices
1676 for i in uvs[vidx]]
1677 d.foreach_set('uv', uvs)
1680 # Common steps for all triangle meshes once the geometry has been set:
1681 # normals, vertex colors, and UVs.
1682 def importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry):
1683 importMesh_ApplyNormals(bpymesh, geom, ancestry)
1684 importMesh_ApplyColors(bpymesh, geom, ancestry)
1685 importMesh_ApplyUVs(bpymesh, geom, ancestry)
1686 bpymesh.validate()
1687 bpymesh.update()
1688 return bpymesh
1691 # Assumes that the mesh is stored as polygons and loops, and the premade array
1692 # of texture coordinates follows the loop array.
1693 # The loops array must be flat.
1694 def importMesh_ApplyTextureToLoops(bpymesh, loops):
1695 d = bpymesh.uv_layers.new().data
1696 d.foreach_set('uv', loops)
1699 def flip(r, ccw):
1700 return r if ccw else r[::-1]
1702 # -----------------------------------------------------------------------------------
1703 # Now specific geometry importers
1706 def importMesh_IndexedTriangleSet(geom, ancestry):
1707 # Ignoring solid
1708 # colorPerVertex is always true
1709 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1711 bpymesh = bpy.data.meshes.new(name="XXX")
1712 importMesh_ReadVertices(bpymesh, geom, ancestry)
1714 # Read the faces
1715 index = geom.getFieldAsArray('index', 0, ancestry)
1716 num_polys = len(index) // 3
1717 if not ccw:
1718 index = [index[3 * i + j] for i in range(num_polys) for j in (1, 0, 2)]
1720 bpymesh.loops.add(num_polys * 3)
1721 bpymesh.polygons.add(num_polys)
1722 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1723 bpymesh.polygons.foreach_set("vertices", index)
1725 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1728 def importMesh_IndexedTriangleStripSet(geom, ancestry):
1729 # Ignoring solid
1730 # colorPerVertex is always true
1731 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1732 bpymesh = bpy.data.meshes.new(name="IndexedTriangleStripSet")
1733 importMesh_ReadVertices(bpymesh, geom, ancestry)
1735 # Read the faces
1736 index = geom.getFieldAsArray('index', 0, ancestry)
1737 while index[-1] == -1:
1738 del index[-1]
1739 ngaps = sum(1 for i in index if i == -1)
1740 num_polys = len(index) - 2 - 3 * ngaps
1741 bpymesh.loops.add(num_polys * 3)
1742 bpymesh.polygons.add(num_polys)
1743 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1745 def triangles():
1746 i = 0
1747 odd = cw
1748 while True:
1749 yield index[i + odd]
1750 yield index[i + 1 - odd]
1751 yield index[i + 2]
1752 odd = 1 - odd
1753 i += 1
1754 if i + 2 >= len(index):
1755 return
1756 if index[i + 2] == -1:
1757 i += 3
1758 odd = cw
1759 bpymesh.polygons.foreach_set("vertices", [f for f in triangles()])
1760 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1763 def importMesh_IndexedTriangleFanSet(geom, ancestry):
1764 # Ignoring solid
1765 # colorPerVertex is always true
1766 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1767 bpymesh = bpy.data.meshes.new(name="IndexedTriangleFanSet")
1768 importMesh_ReadVertices(bpymesh, geom, ancestry)
1770 # Read the faces
1771 index = geom.getFieldAsArray('index', 0, ancestry)
1772 while index[-1] == -1:
1773 del index[-1]
1774 ngaps = sum(1 for i in index if i == -1)
1775 num_polys = len(index) - 2 - 3 * ngaps
1776 bpymesh.loops.add(num_polys * 3)
1777 bpymesh.polygons.add(num_polys)
1778 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1780 def triangles():
1781 i = 0
1782 j = 1
1783 while True:
1784 yield index[i]
1785 yield index[i + j + cw]
1786 yield index[i + j + 1 - cw]
1787 j += 1
1788 if i + j + 1 >= len(index):
1789 return
1790 if index[i + j + 1] == -1:
1791 i = j + 2
1792 j = 1
1793 bpymesh.polygons.foreach_set("vertices", [f for f in triangles()])
1794 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1797 def importMesh_TriangleSet(geom, ancestry):
1798 # Ignoring solid
1799 # colorPerVertex is always true
1800 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1801 bpymesh = bpy.data.meshes.new(name="TriangleSet")
1802 importMesh_ReadVertices(bpymesh, geom, ancestry)
1803 n = len(bpymesh.vertices)
1804 num_polys = n // 3
1805 bpymesh.loops.add(num_polys * 3)
1806 bpymesh.polygons.add(num_polys)
1807 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1809 if ccw:
1810 fv = [i for i in range(n)]
1811 else:
1812 fv = [3 * i + j for i in range(n // 3) for j in (1, 0, 2)]
1813 bpymesh.polygons.foreach_set("vertices", fv)
1815 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1818 def importMesh_TriangleStripSet(geom, ancestry):
1819 # Ignoring solid
1820 # colorPerVertex is always true
1821 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1822 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1823 importMesh_ReadVertices(bpymesh, geom, ancestry)
1824 counts = geom.getFieldAsArray('stripCount', 0, ancestry)
1825 num_polys = sum([n - 2 for n in counts])
1826 bpymesh.loops.add(num_polys * 3)
1827 bpymesh.polygons.add(num_polys)
1828 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1830 def triangles():
1831 b = 0
1832 for i in range(0, len(counts)):
1833 for j in range(0, counts[i] - 2):
1834 yield b + j + (j + cw) % 2
1835 yield b + j + 1 - (j + cw) % 2
1836 yield b + j + 2
1837 b += counts[i]
1838 bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
1840 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1843 def importMesh_TriangleFanSet(geom, ancestry):
1844 # Ignoring solid
1845 # colorPerVertex is always true
1846 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1847 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1848 importMesh_ReadVertices(bpymesh, geom, ancestry)
1849 counts = geom.getFieldAsArray('fanCount', 0, ancestry)
1850 num_polys = sum([n - 2 for n in counts])
1851 bpymesh.loops.add(num_polys * 3)
1852 bpymesh.polygons.add(num_polys)
1853 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1855 def triangles():
1856 b = 0
1857 for i in range(0, len(counts)):
1858 for j in range(1, counts[i] - 1):
1859 yield b
1860 yield b + j + cw
1861 yield b + j + 1 - cw
1862 b += counts[i]
1863 bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
1864 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1867 def importMesh_IndexedFaceSet(geom, ancestry):
1868 # Saw the following structure in X3Ds: the first mesh has a huge set
1869 # of vertices and a reasonably sized index. The rest of the meshes
1870 # reference the Coordinate node from the first one, and have their
1871 # own reasonably sized indices.
1873 # In Blender, to the best of my knowledge, there's no way to reuse
1874 # the vertex set between meshes. So we have culling logic instead -
1875 # for each mesh, only leave vertices that are used for faces.
1877 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1878 coord = geom.getChildBySpec('Coordinate')
1879 if coord.reference:
1880 points = coord.getRealNode().parsed
1881 # We need unflattened coord array here, while
1882 # importMesh_ReadVertices uses flattened. Can't cache both :(
1883 # TODO: resolve that somehow, so that vertex set can be effectively
1884 # reused between different mesh types?
1885 else:
1886 points = coord.getFieldAsArray('point', 3, ancestry)
1887 if coord.canHaveReferences():
1888 coord.parsed = points
1889 index = geom.getFieldAsArray('coordIndex', 0, ancestry)
1891 while index and index[-1] == -1:
1892 del index[-1]
1894 if len(points) >= 2 * len(index): # Need to cull
1895 culled_points = []
1896 cull = {} # Maps old vertex indices to new ones
1897 uncull = [] # Maps new indices to the old ones
1898 new_index = 0
1899 else:
1900 uncull = cull = None
1902 faces = []
1903 face = []
1904 # Generate faces. Cull the vertices if necessary,
1905 for i in index:
1906 if i == -1:
1907 if face:
1908 faces.append(flip(face, ccw))
1909 face = []
1910 else:
1911 if cull is not None:
1912 if not(i in cull):
1913 culled_points.append(points[i])
1914 cull[i] = new_index
1915 uncull.append(i)
1916 i = new_index
1917 new_index += 1
1918 else:
1919 i = cull[i]
1920 face.append(i)
1921 if face:
1922 faces.append(flip(face, ccw)) # The last face
1924 if cull:
1925 points = culled_points
1927 bpymesh = bpy.data.meshes.new(name="IndexedFaceSet")
1928 bpymesh.from_pydata(points, [], faces)
1929 # No validation here. It throws off the per-face stuff.
1931 # Similar treatment for normal and color indices
1933 def processPerVertexIndex(ind):
1934 if ind:
1935 # Deflatten into an array of arrays by face; the latter might
1936 # need to be flipped
1937 i = 0
1938 verts_by_face = []
1939 for f in faces:
1940 verts_by_face.append(flip(ind[i:i + len(f)], ccw))
1941 i += len(f) + 1
1942 return verts_by_face
1943 elif uncull:
1944 return [[uncull[v] for v in f] for f in faces]
1945 else:
1946 return faces # Reuse coordIndex, as per the spec
1948 # Normals
1949 normals = geom.getChildBySpec('Normal')
1950 if normals:
1951 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1952 vectors = normals.getFieldAsArray('vector', 3, ancestry)
1953 normal_index = geom.getFieldAsArray('normalIndex', 0, ancestry)
1954 if per_vertex:
1955 co = [co for f in processPerVertexIndex(normal_index)
1956 for v in f
1957 for co in vectors[v]]
1958 bpymesh.vertices.foreach_set("normal", co)
1959 else:
1960 co = [co for (i, f) in enumerate(faces)
1961 for j in f
1962 for co in vectors[normal_index[i] if normal_index else i]]
1963 bpymesh.polygons.foreach_set("normal", co)
1965 # Apply vertex/face colors
1966 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1967 if colors:
1968 if colors.getSpec() == 'ColorRGBA':
1969 rgb = colors.getFieldAsArray('color', 4, ancestry)
1970 else:
1971 # Array of arrays; no need to flatten
1972 rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
1974 color_per_vertex = geom.getFieldAsBool('colorPerVertex', True, ancestry)
1975 color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
1977 d = bpymesh.vertex_colors.new().data
1978 if color_per_vertex:
1979 cco = [cco for f in processPerVertexIndex(color_index)
1980 for v in f
1981 for cco in rgb[v]]
1982 elif color_index: # Color per face with index
1983 cco = [cco for (i, f) in enumerate(faces)
1984 for j in f
1985 for cco in rgb[color_index[i]]]
1986 else: # Color per face without index
1987 cco = [cco for (i, f) in enumerate(faces)
1988 for j in f
1989 for cco in rgb[i]]
1990 d.foreach_set('color', cco)
1992 # Texture coordinates (UVs)
1993 tex_coord = geom.getChildBySpec('TextureCoordinate')
1994 if tex_coord:
1995 tex_coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
1996 tex_index = geom.getFieldAsArray('texCoordIndex', 0, ancestry)
1997 tex_index = processPerVertexIndex(tex_index)
1998 loops = [co for f in tex_index
1999 for v in f
2000 for co in tex_coord_points[v]]
2001 else:
2002 x_min = y_min = z_min = math.inf
2003 x_max = y_max = z_max = -math.inf
2004 for f in faces:
2005 # Unused vertices don't participate in size; X3DOM does so
2006 for v in f:
2007 (x, y, z) = points[v]
2008 x_min = min(x_min, x)
2009 x_max = max(x_max, x)
2010 y_min = min(y_min, y)
2011 y_max = max(y_max, y)
2012 z_min = min(z_min, z)
2013 z_max = max(z_max, z)
2015 mins = (x_min, y_min, z_min)
2016 deltas = (x_max - x_min, y_max - y_min, z_max - z_min)
2017 axes = [0, 1, 2]
2018 axes.sort(key=lambda a: (-deltas[a], a))
2019 # Tuple comparison breaks ties
2020 (s_axis, t_axis) = axes[0:2]
2021 s_min = mins[s_axis]
2022 ds = deltas[s_axis]
2023 t_min = mins[t_axis]
2024 dt = deltas[t_axis]
2026 # Avoid divide by zero T76303.
2027 if not (ds > 0.0):
2028 ds = 1.0
2029 if not (dt > 0.0):
2030 dt = 1.0
2032 def generatePointCoords(pt):
2033 return (pt[s_axis] - s_min) / ds, (pt[t_axis] - t_min) / dt
2034 loops = [co for f in faces
2035 for v in f
2036 for co in generatePointCoords(points[v])]
2038 importMesh_ApplyTextureToLoops(bpymesh, loops)
2040 bpymesh.validate()
2041 bpymesh.update()
2042 return bpymesh
2045 def importMesh_ElevationGrid(geom, ancestry):
2046 height = geom.getFieldAsArray('height', 0, ancestry)
2047 x_dim = geom.getFieldAsInt('xDimension', 0, ancestry)
2048 x_spacing = geom.getFieldAsFloat('xSpacing', 1, ancestry)
2049 z_dim = geom.getFieldAsInt('zDimension', 0, ancestry)
2050 z_spacing = geom.getFieldAsFloat('zSpacing', 1, ancestry)
2051 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2053 # The spec assumes a certain ordering of quads; outer loop by z, inner by x
2054 bpymesh = bpy.data.meshes.new(name="ElevationGrid")
2055 bpymesh.vertices.add(x_dim * z_dim)
2056 co = [w for x in range(x_dim) for z in range(z_dim)
2057 for w in (x * x_spacing, height[x_dim * z + x], z * z_spacing)]
2058 bpymesh.vertices.foreach_set("co", co)
2060 num_polys = (x_dim - 1) * (z_dim - 1)
2061 bpymesh.loops.add(num_polys * 4)
2062 bpymesh.polygons.add(num_polys)
2063 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 4, 4))
2064 # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
2065 # For quad tessfaces, it was important that the final vertex index was not 0
2066 # (Blender treated it as a triangle then).
2067 # So simply reversing the face was not an option.
2068 # With bmesh polygons, this has no importance anymore, but keep existing code for now.
2069 verts = [i for x in range(x_dim - 1) for z in range(z_dim - 1)
2070 for i in (z * x_dim + x,
2071 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2072 (z + 1) * x_dim + x + 1,
2073 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)]
2074 bpymesh.polygons.foreach_set("vertices", verts)
2076 importMesh_ApplyNormals(bpymesh, geom, ancestry)
2077 # ApplyColors won't work here; faces are quads, and also per-face
2078 # coloring should be supported
2079 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
2080 if colors:
2081 if colors.getSpec() == 'ColorRGBA':
2082 rgb = [c[:3] for c
2083 in colors.getFieldAsArray('color', 4, ancestry)]
2084 # Array of arrays; no need to flatten
2085 else:
2086 rgb = colors.getFieldAsArray('color', 3, ancestry)
2088 tc = bpymesh.vertex_colors.new().data
2089 if geom.getFieldAsBool('colorPerVertex', True, ancestry):
2090 # Per-vertex coloring
2091 # Note the 2/4 flip here
2092 tc.foreach_set("color",
2093 [c for x in range(x_dim - 1)
2094 for z in range(z_dim - 1)
2095 for rgb_idx in (z * x_dim + x,
2096 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2097 (z + 1) * x_dim + x + 1,
2098 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)
2099 for c in rgb[rgb_idx]])
2100 else: # Coloring per face
2101 tc.foreach_set("color",
2102 [c for x in range(x_dim - 1)
2103 for z in range(z_dim - 1)
2104 for rgb_idx in (z * (x_dim - 1) + x,) * 4
2105 for c in rgb[rgb_idx]])
2107 # Textures also need special treatment; it's all quads,
2108 # and there's a builtin algorithm for coordinate generation
2109 tex_coord = geom.getChildBySpec('TextureCoordinate')
2110 if tex_coord:
2111 uvs = tex_coord.getFieldAsArray('point', 2, ancestry)
2112 else:
2113 uvs = [(i / (x_dim - 1), j / (z_dim - 1))
2114 for i in range(x_dim)
2115 for j in range(z_dim)]
2117 d = bpymesh.uv_layers.new().data
2118 # Rather than repeat the face/vertex algorithm from above, we read
2119 # the vertex index back from polygon. Might be suboptimal.
2120 uvs = [i for poly in bpymesh.polygons
2121 for vidx in poly.vertices
2122 for i in uvs[vidx]]
2123 d.foreach_set('uv', uv)
2125 bpymesh.validate()
2126 bpymesh.update()
2127 return bpymesh
2130 def importMesh_Extrusion(geom, ancestry):
2131 # Interestingly, the spec doesn't allow for vertex/face colors in this
2132 # element, nor for normals.
2133 # Since coloring and normals are not supported here, and also large
2134 # polygons for caps might be required, we shall use from_pydata().
2136 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2137 begin_cap = geom.getFieldAsBool('beginCap', True, ancestry)
2138 end_cap = geom.getFieldAsBool('endCap', True, ancestry)
2139 cross = geom.getFieldAsArray('crossSection', 2, ancestry)
2140 if not cross:
2141 cross = ((1, 1), (1, -1), (-1, -1), (-1, 1), (1, 1))
2142 spine = geom.getFieldAsArray('spine', 3, ancestry)
2143 if not spine:
2144 spine = ((0, 0, 0), (0, 1, 0))
2145 orient = geom.getFieldAsArray('orientation', 4, ancestry)
2146 if orient:
2147 orient = [Quaternion(o[:3], o[3]).to_matrix()
2148 if o[3] else None for o in orient]
2149 scale = geom.getFieldAsArray('scale', 2, ancestry)
2150 if scale:
2151 scale = [Matrix(((s[0], 0, 0), (0, 1, 0), (0, 0, s[1])))
2152 if s[0] != 1 or s[1] != 1 else None for s in scale]
2154 # Special treatment for the closed spine and cross section.
2155 # Let's save some memory by not creating identical but distinct vertices;
2156 # later we'll introduce conditional logic to link the last vertex with
2157 # the first one where necessary.
2158 cross_closed = cross[0] == cross[-1]
2159 if cross_closed:
2160 cross = cross[:-1]
2161 nc = len(cross)
2162 cross = [Vector((c[0], 0, c[1])) for c in cross]
2163 ncf = nc if cross_closed else nc - 1
2164 # Face count along the cross; for closed cross, it's the same as the
2165 # respective vertex count
2167 spine_closed = spine[0] == spine[-1]
2168 if spine_closed:
2169 spine = spine[:-1]
2170 ns = len(spine)
2171 spine = [Vector(s) for s in spine]
2172 nsf = ns if spine_closed else ns - 1
2174 # This will be used for fallback, where the current spine point joins
2175 # two collinear spine segments. No need to recheck the case of the
2176 # closed spine/last-to-first point juncture; if there's an angle there,
2177 # it would kick in on the first iteration of the main loop by spine.
2178 def findFirstAngleNormal():
2179 for i in range(1, ns - 1):
2180 spt = spine[i]
2181 z = (spine[i + 1] - spt).cross(spine[i - 1] - spt)
2182 if z.length > EPSILON:
2183 return z
2184 # All the spines are collinear. Fallback to the rotated source
2185 # XZ plane.
2186 # TODO: handle the situation where the first two spine points match
2187 v = spine[1] - spine[0]
2188 orig_y = Vector((0, 1, 0))
2189 orig_z = Vector((0, 0, 1))
2190 if v.cross(orig_y).length >= EPSILON:
2191 # Spine at angle with global y - rotate the z accordingly
2192 orig_z.rotate(orig_y.rotation_difference(v))
2193 return orig_z
2195 verts = []
2196 z = None
2197 for i, spt in enumerate(spine):
2198 if (i > 0 and i < ns - 1) or spine_closed:
2199 snext = spine[(i + 1) % ns]
2200 sprev = spine[(i - 1 + ns) % ns]
2201 y = snext - sprev
2202 vnext = snext - spt
2203 vprev = sprev - spt
2204 try_z = vnext.cross(vprev)
2205 # Might be zero, then all kinds of fallback
2206 if try_z.length > EPSILON:
2207 if z is not None and try_z.dot(z) < 0:
2208 try_z.negate()
2209 z = try_z
2210 elif not z: # No z, and no previous z.
2211 # Look ahead, see if there's at least one point where
2212 # spines are not collinear.
2213 z = findFirstAngleNormal()
2214 elif i == 0: # And non-crossed
2215 snext = spine[i + 1]
2216 y = snext - spt
2217 z = findFirstAngleNormal()
2218 else: # last point and not crossed
2219 sprev = spine[i - 1]
2220 y = spt - sprev
2221 # If there's more than one point in the spine, z is already set.
2222 # One point in the spline is an error anyway.
2224 x = y.cross(z)
2225 m = Matrix(((x.x, y.x, z.x), (x.y, y.y, z.y), (x.z, y.z, z.z)))
2226 # Columns are the unit vectors for the xz plane for the cross-section
2227 m.normalize()
2228 if orient:
2229 mrot = orient[i] if len(orient) > 1 else orient[0]
2230 if mrot:
2231 m @= mrot # Not sure about this. Counterexample???
2232 if scale:
2233 mscale = scale[i] if len(scale) > 1 else scale[0]
2234 if mscale:
2235 m @= mscale
2236 # First the cross-section 2-vector is scaled,
2237 # then applied to the xz plane unit vectors
2238 for cpt in cross:
2239 verts.append((spt + m @ cpt).to_tuple())
2240 # Could've done this with a single 4x4 matrix... Oh well
2242 # The method from_pydata() treats correctly quads with final vertex
2243 # index being zero.
2244 # So we just flip the vertices if ccw is off.
2246 faces = []
2247 if begin_cap:
2248 faces.append(flip([x for x in range(nc - 1, -1, -1)], ccw))
2250 # Order of edges in the face: forward along cross, forward along spine,
2251 # backward along cross, backward along spine, flipped if now ccw.
2252 # This order is assumed later in the texture coordinate assignment;
2253 # please don't change without syncing.
2255 faces += [flip((
2256 s * nc + c,
2257 s * nc + (c + 1) % nc,
2258 (s + 1) * nc + (c + 1) % nc,
2259 (s + 1) * nc + c), ccw) for s in range(ns - 1) for c in range(ncf)]
2261 if spine_closed:
2262 # The faces between the last and the first spine points
2263 b = (ns - 1) * nc
2264 faces += [flip((
2265 b + c,
2266 b + (c + 1) % nc,
2267 (c + 1) % nc,
2268 c), ccw) for c in range(ncf)]
2270 if end_cap:
2271 faces.append(flip([(ns - 1) * nc + x for x in range(0, nc)], ccw))
2273 bpymesh = bpy.data.meshes.new(name="Extrusion")
2274 bpymesh.from_pydata(verts, [], faces)
2276 # The way we deal with textures in triangular meshes doesn't apply.
2277 # The structure of the loop array goes: cap, side, cap
2278 if begin_cap or end_cap: # Need dimensions
2279 x_min = x_max = z_min = z_max = None
2280 for c in cross:
2281 (x, z) = (c.x, c.z)
2282 if x_min is None or x < x_min:
2283 x_min = x
2284 if x_max is None or x > x_max:
2285 x_max = x
2286 if z_min is None or z < z_min:
2287 z_min = z
2288 if z_max is None or z > z_max:
2289 z_max = z
2290 dx = x_max - x_min
2291 dz = z_max - z_min
2292 cap_scale = dz if dz > dx else dx
2294 # Takes an index in the cross array, returns scaled
2295 # texture coords for cap texturing purposes
2296 def scaledLoopVertex(i):
2297 c = cross[i]
2298 return (c.x - x_min) / cap_scale, (c.z - z_min) / cap_scale
2300 # X3DOM uses raw cap shape, not a scaled one. So we will, too.
2302 loops = []
2303 mloops = bpymesh.loops
2304 if begin_cap: # vertex indices match the indices in cross
2305 # Rely on the loops in the mesh; don't repeat the face
2306 # generation logic here
2307 loops += [co for i in range(nc)
2308 for co in scaledLoopVertex(mloops[i].vertex_index)]
2310 # Sides
2311 # Same order of vertices as in face generation
2312 # We don't rely on the loops in the mesh; instead,
2313 # we repeat the face generation logic.
2314 loops += [co for s in range(nsf)
2315 for c in range(ncf)
2316 for v in flip(((c / ncf, s / nsf),
2317 ((c + 1) / ncf, s / nsf),
2318 ((c + 1) / ncf, (s + 1) / nsf),
2319 (c / ncf, (s + 1) / nsf)), ccw) for co in v]
2321 if end_cap:
2322 # Base loop index for end cap
2323 lb = ncf * nsf * 4 + (nc if begin_cap else 0)
2324 # Rely on the loops here too.
2325 loops += [co for i in range(nc) for co
2326 in scaledLoopVertex(mloops[lb + i].vertex_index % nc)]
2327 importMesh_ApplyTextureToLoops(bpymesh, loops)
2329 bpymesh.validate()
2330 bpymesh.update()
2331 return bpymesh
2334 # -----------------------------------------------------------------------------------
2335 # Line and point sets
2338 def importMesh_LineSet(geom, ancestry):
2339 # TODO: line display properties are ignored
2340 # Per-vertex color is ignored
2341 coord = geom.getChildBySpec('Coordinate')
2342 src_points = coord.getFieldAsArray('point', 3, ancestry)
2343 # Array of 3; Blender needs arrays of 4
2344 bpycurve = bpy.data.curves.new("LineSet", 'CURVE')
2345 bpycurve.dimensions = '3D'
2346 counts = geom.getFieldAsArray('vertexCount', 0, ancestry)
2347 b = 0
2348 for n in counts:
2349 sp = bpycurve.splines.new('POLY')
2350 sp.points.add(n - 1) # points already has one element
2352 def points():
2353 for x in src_points[b:b + n]:
2354 yield x[0]
2355 yield x[1]
2356 yield x[2]
2357 yield 0
2358 sp.points.foreach_set('co', [x for x in points()])
2359 b += n
2360 return bpycurve
2363 def importMesh_IndexedLineSet(geom, ancestry):
2364 # VRML not x3d
2365 # coord = geom.getChildByName('coord') # 'Coordinate'
2366 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2367 if coord:
2368 points = coord.getFieldAsArray('point', 3, ancestry)
2369 else:
2370 points = []
2372 if not points:
2373 print('\tWarning: IndexedLineSet had no points')
2374 return None
2376 ils_lines = geom.getFieldAsArray('coordIndex', 0, ancestry)
2378 lines = []
2379 line = []
2381 for il in ils_lines:
2382 if il == -1:
2383 lines.append(line)
2384 line = []
2385 else:
2386 line.append(int(il))
2387 lines.append(line)
2389 # vcolor = geom.getChildByName('color')
2390 # blender doesn't have per vertex color
2392 bpycurve = bpy.data.curves.new('IndexedCurve', 'CURVE')
2393 bpycurve.dimensions = '3D'
2395 for line in lines:
2396 if not line:
2397 continue
2398 # co = points[line[0]] # UNUSED
2399 nu = bpycurve.splines.new('POLY')
2400 nu.points.add(len(line) - 1) # the new nu has 1 point to begin with
2401 for il, pt in zip(line, nu.points):
2402 pt.co[0:3] = points[il]
2404 return bpycurve
2407 def importMesh_PointSet(geom, ancestry):
2408 # VRML not x3d
2409 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2410 if coord:
2411 points = coord.getFieldAsArray('point', 3, ancestry)
2412 else:
2413 points = []
2415 # vcolor = geom.getChildByName('color')
2416 # blender doesn't have per vertex color
2418 bpymesh = bpy.data.meshes.new("PointSet")
2419 bpymesh.vertices.add(len(points))
2420 bpymesh.vertices.foreach_set("co", [a for v in points for a in v])
2422 # No need to validate
2423 bpymesh.update()
2424 return bpymesh
2427 # -----------------------------------------------------------------------------------
2428 # Primitives
2429 # SA: they used to use bpy.ops for primitive creation. That was
2430 # unbelievably slow on complex scenes. I rewrote to generate meshes
2431 # by hand.
2434 GLOBALS['CIRCLE_DETAIL'] = 12
2437 def importMesh_Sphere(geom, ancestry):
2438 # solid is ignored.
2439 # Extra field 'subdivision="n m"' attribute, specifying how many
2440 # rings and segments to use (X3DOM).
2441 r = geom.getFieldAsFloat('radius', 0.5, ancestry)
2442 subdiv = geom.getFieldAsArray('subdivision', 0, ancestry)
2443 if subdiv:
2444 if len(subdiv) == 1:
2445 nr = ns = subdiv[0]
2446 else:
2447 (nr, ns) = subdiv
2448 else:
2449 nr = ns = GLOBALS['CIRCLE_DETAIL']
2450 # used as both ring count and segment count
2451 lau = pi / nr # Unit angle of latitude (rings) for the given tessellation
2452 lou = 2 * pi / ns # Unit angle of longitude (segments)
2454 bpymesh = bpy.data.meshes.new(name="Sphere")
2456 bpymesh.vertices.add(ns * (nr - 1) + 2)
2457 # The non-polar vertices go from x=0, negative z plane counterclockwise -
2458 # to -x, to +z, to +x, back to -z
2459 co = [0, r, 0, 0, -r, 0] # +y and -y poles
2460 co += [r * coe for ring in range(1, nr) for seg in range(ns)
2461 for coe in (-sin(lou * seg) * sin(lau * ring),
2462 cos(lau * ring),
2463 -cos(lou * seg) * sin(lau * ring))]
2464 bpymesh.vertices.foreach_set('co', co)
2466 num_poly = ns * nr
2467 num_tri = ns * 2
2468 num_quad = num_poly - num_tri
2469 num_loop = num_quad * 4 + num_tri * 3
2470 tf = bpymesh.polygons
2471 tf.add(num_poly)
2472 bpymesh.loops.add(num_loop)
2473 bpymesh.polygons.foreach_set("loop_start",
2474 tuple(range(0, ns * 3, 3)) +
2475 tuple(range(ns * 3, num_loop - ns * 3, 4)) +
2476 tuple(range(num_loop - ns * 3, num_loop, 3)))
2478 vb = 2 + (nr - 2) * ns # First vertex index for the bottom cap
2479 fb = (nr - 1) * ns # First face index for the bottom cap
2481 # Because of tricky structure, assign texture coordinates along with
2482 # face creation. Can't easily do foreach_set, 'cause caps are triangles and
2483 # sides are quads.
2485 tex = bpymesh.uv_layers.new().data
2487 # Faces go in order: top cap, sides, bottom cap.
2488 # Sides go by ring then by segment.
2490 # Caps
2491 # Top cap face vertices go in order: down right up
2492 # (starting from +y pole)
2493 # Bottom cap goes: up left down (starting from -y pole)
2494 for seg in range(ns):
2495 tf[seg].vertices = (0, seg + 2, (seg + 1) % ns + 2)
2496 tf[fb + seg].vertices = (1, vb + (seg + 1) % ns, vb + seg)
2497 for lidx, uv in zip(tf[seg].loop_indices,
2498 (((seg + 0.5) / ns, 1),
2499 (seg / ns, 1 - 1 / nr),
2500 ((seg + 1) / ns, 1 - 1 / nr))):
2501 tex[lidx].uv = uv
2502 for lidx, uv in zip(tf[fb + seg].loop_indices,
2503 (((seg + 0.5) / ns, 0),
2504 ((seg + 1) / ns, 1 / nr),
2505 (seg / ns, 1 / nr))):
2506 tex[lidx].uv = uv
2508 # Sides
2509 # Side face vertices go in order: down right up left
2510 for ring in range(nr - 2):
2511 tvb = 2 + ring * ns
2512 # First vertex index for the top edge of the ring
2513 bvb = tvb + ns
2514 # First vertex index for the bottom edge of the ring
2515 rfb = ns * (ring + 1)
2516 # First face index for the ring
2517 for seg in range(ns):
2518 nseg = (seg + 1) % ns
2519 tf[rfb + seg].vertices = (tvb + seg, bvb + seg, bvb + nseg, tvb + nseg)
2520 for lidx, uv in zip(tf[rfb + seg].loop_indices,
2521 ((seg / ns, 1 - (ring + 1) / nr),
2522 (seg / ns, 1 - (ring + 2) / nr),
2523 ((seg + 1) / ns, 1 - (ring + 2) / nr),
2524 ((seg + 1) / ns, 1 - (ring + 1) / nr))):
2525 tex[lidx].uv = uv
2527 bpymesh.validate()
2528 bpymesh.update()
2529 return bpymesh
2532 def importMesh_Cylinder(geom, ancestry):
2533 # solid is ignored
2534 # no ccw in this element
2535 # Extra parameter subdivision="n" - how many faces to use
2536 radius = geom.getFieldAsFloat('radius', 1.0, ancestry)
2537 height = geom.getFieldAsFloat('height', 2, ancestry)
2538 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2539 side = geom.getFieldAsBool('side', True, ancestry)
2540 top = geom.getFieldAsBool('top', True, ancestry)
2542 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2544 nn = n * 2
2545 yvalues = (height / 2, -height / 2)
2546 angle = 2 * pi / n
2548 # The seam is at x=0, z=-r, vertices go ccw -
2549 # to pos x, to neg z, to neg x, back to neg z
2550 verts = [(-radius * sin(angle * i), y, -radius * cos(angle * i))
2551 for i in range(n) for y in yvalues]
2552 faces = []
2553 if side:
2554 # Order of edges in side faces: up, left, down, right.
2555 # Texture coordinate logic depends on it.
2556 faces += [(i * 2 + 3, i * 2 + 2, i * 2, i * 2 + 1)
2557 for i in range(n - 1)] + [(1, 0, nn - 2, nn - 1)]
2558 if top:
2559 faces += [[x for x in range(0, nn, 2)]]
2560 if bottom:
2561 faces += [[x for x in range(nn - 1, -1, -2)]]
2563 bpymesh = bpy.data.meshes.new(name="Cylinder")
2564 bpymesh.from_pydata(verts, [], faces)
2565 # Tried constructing the mesh manually from polygons/loops/edges,
2566 # the difference in performance on Blender 2.74 (Win64) is negligible.
2568 bpymesh.validate()
2570 # The structure of the loop array goes: cap, side, cap.
2571 loops = []
2572 if side:
2573 loops += [co for i in range(n)
2574 for co in ((i + 1) / n, 0, (i + 1) / n, 1, i / n, 1, i / n, 0)]
2576 if top:
2577 loops += [0.5 + co / 2 for i in range(n)
2578 for co in (-sin(angle * i), cos(angle * i))]
2580 if bottom:
2581 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2582 for co in (sin(angle * i), cos(angle * i))]
2584 importMesh_ApplyTextureToLoops(bpymesh, loops)
2586 bpymesh.update()
2587 return bpymesh
2590 def importMesh_Cone(geom, ancestry):
2591 # Solid ignored
2592 # Extra parameter subdivision="n" - how many faces to use
2593 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2594 radius = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry)
2595 height = geom.getFieldAsFloat('height', 2, ancestry)
2596 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2597 side = geom.getFieldAsBool('side', True, ancestry)
2599 d = height / 2
2600 angle = 2 * pi / n
2602 verts = [(0, d, 0)]
2603 verts += [(-radius * sin(angle * i),
2605 -radius * cos(angle * i)) for i in range(n)]
2606 faces = []
2608 # Side face vertices go: up down right
2609 if side:
2610 faces += [(1 + (i + 1) % n, 0, 1 + i) for i in range(n)]
2611 if bottom:
2612 faces += [[i for i in range(n, 0, -1)]]
2614 bpymesh = bpy.data.meshes.new(name="Cone")
2615 bpymesh.from_pydata(verts, [], faces)
2617 bpymesh.validate()
2618 loops = []
2619 if side:
2620 loops += [co for i in range(n)
2621 for co in ((i + 1) / n, 0, (i + 0.5) / n, 1, i / n, 0)]
2622 if bottom:
2623 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2624 for co in (sin(angle * i), cos(angle * i))]
2625 importMesh_ApplyTextureToLoops(bpymesh, loops)
2627 bpymesh.update()
2628 return bpymesh
2631 def importMesh_Box(geom, ancestry):
2632 # Solid is ignored
2633 # No ccw in this element
2634 (dx, dy, dz) = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry)
2635 dx /= 2
2636 dy /= 2
2637 dz /= 2
2639 bpymesh = bpy.data.meshes.new(name="Box")
2640 bpymesh.vertices.add(8)
2642 # xz plane at +y, ccw
2643 co = (dx, dy, dz, -dx, dy, dz, -dx, dy, -dz, dx, dy, -dz,
2644 # xz plane at -y
2645 dx, -dy, dz, -dx, -dy, dz, -dx, -dy, -dz, dx, -dy, -dz)
2646 bpymesh.vertices.foreach_set('co', co)
2648 bpymesh.loops.add(6 * 4)
2649 bpymesh.polygons.add(6)
2650 bpymesh.polygons.foreach_set('loop_start', range(0, 6 * 4, 4))
2651 bpymesh.polygons.foreach_set('loop_total', (4,) * 6)
2652 bpymesh.polygons.foreach_set('vertices', (
2653 0, 1, 2, 3, # +y
2654 4, 0, 3, 7, # +x
2655 7, 3, 2, 6, # -z
2656 6, 2, 1, 5, # -x
2657 5, 1, 0, 4, # +z
2658 7, 6, 5, 4)) # -y
2660 bpymesh.validate()
2661 d = bpymesh.uv_layers.new().data
2662 d.foreach_set('uv', (
2663 1, 0, 0, 0, 0, 1, 1, 1,
2664 0, 0, 0, 1, 1, 1, 1, 0,
2665 0, 0, 0, 1, 1, 1, 1, 0,
2666 0, 0, 0, 1, 1, 1, 1, 0,
2667 0, 0, 0, 1, 1, 1, 1, 0,
2668 1, 0, 0, 0, 0, 1, 1, 1))
2670 bpymesh.update()
2671 return bpymesh
2673 # -----------------------------------------------------------------------------------
2674 # Utilities for importShape
2677 # Textures are processed elsewhere.
2678 def appearance_CreateMaterial(vrmlname, mat, ancestry, is_vcol):
2679 # Given an X3D material, creates a Blender material.
2680 # texture is applied later, in appearance_Create().
2681 # All values between 0.0 and 1.0, defaults from VRML docs.
2682 mat_name = mat.getDefName()
2683 bpymat = bpy.data.materials.new(mat_name if mat_name else vrmlname)
2684 bpymat_wrap = node_shader_utils.PrincipledBSDFWrapper(bpymat, is_readonly=False)
2686 # TODO: handle 'ambientIntensity'.
2687 #ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
2689 diff_color = mat.getFieldAsFloatTuple('diffuseColor', [0.8, 0.8, 0.8], ancestry)
2690 bpymat_wrap.base_color = diff_color
2692 emit_color = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry)
2693 bpymat_wrap.emission_color = emit_color
2695 # NOTE - 'shininess' is being handled as 1 - roughness for now.
2696 shininess = mat.getFieldAsFloat('shininess', 0.2, ancestry)
2697 bpymat_wrap.roughness = 1.0 - shininess
2699 #bpymat.specular_hardness = int(1 + (510 * shininess))
2700 # 0-1 -> 1-511
2701 # TODO: handle 'specularColor'.
2702 #specular_color = mat.getFieldAsFloatTuple('specularColor',
2703 # [0.0, 0.0, 0.0], ancestry)
2705 alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry)
2706 bpymat_wrap.alpha = alpha
2707 if alpha < 1.0:
2708 bpymat.blend_method = "BLEND"
2709 bpymat.shadow_method = "HASHED"
2711 # NOTE - leaving this disabled for now
2712 if False and is_vcol:
2713 node_vertex_color = bpymat.node_tree.nodes.new("ShaderNodeVertexColor")
2714 node_vertex_color.location = (-200, 300)
2716 bpymat.node_tree.links.new(
2717 bpymat_wrap.node_principled_bsdf.inputs["Base Color"],
2718 node_vertex_color.outputs["Color"]
2721 return bpymat_wrap
2724 def appearance_CreateDefaultMaterial():
2725 # Just applies the X3D defaults. Used for shapes
2726 # without explicit material definition
2727 # (but possibly with a texture).
2729 bpymat = bpy.data.materials.new("Material")
2730 bpymat_wrap = node_shader_utils.PrincipledBSDFWrapper(bpymat, is_readonly=False)
2732 bpymat_wrap.roughness = 0.8
2733 bpymat_wrap.base_color = (0.8, 0.8, 0.8)
2734 #bpymat.mirror_color = (0, 0, 0)
2735 #bpymat.emit = 0
2737 # TODO: handle 'shininess' and 'specularColor'.
2738 #bpymat.specular_hardness = 103
2739 # 0-1 -> 1-511
2740 #bpymat.specular_color = (0, 0, 0)
2742 bpymat_wrap.alpha = 1.0
2743 return bpymat_wrap
2746 def appearance_LoadImageTextureFile(ima_urls, node):
2747 bpyima = None
2748 for f in ima_urls:
2749 dirname = os.path.dirname(node.getFilename())
2750 bpyima = image_utils.load_image(f, dirname,
2751 place_holder=False,
2752 recursive=False,
2753 convert_callback=imageConvertCompat)
2754 if bpyima:
2755 break
2757 return bpyima
2760 def appearance_LoadImageTexture(imageTexture, ancestry, node):
2761 # TODO: cache loaded textures...
2762 ima_urls = imageTexture.getFieldAsString('url', None, ancestry)
2764 if ima_urls is None:
2765 try:
2766 ima_urls = imageTexture.getFieldAsStringArray('url', ancestry)
2767 # in some cases we get a list of images.
2768 except:
2769 ima_urls = None
2770 else:
2771 if '" "' in ima_urls:
2772 # '"foo" "bar"' --> ['foo', 'bar']
2773 ima_urls = [w.strip('"') for w in ima_urls.split('" "')]
2774 else:
2775 ima_urls = [ima_urls]
2776 # ima_urls is a list or None
2778 if ima_urls is None:
2779 print("\twarning, image with no URL, this is odd")
2780 return None
2781 else:
2782 bpyima = appearance_LoadImageTextureFile(ima_urls, node)
2784 if not bpyima:
2785 print("ImportX3D warning: unable to load texture", ima_urls)
2786 else:
2787 # KNOWN BUG; PNGs with a transparent color are not perceived
2788 # as transparent. Need alpha channel.
2790 if bpyima.depth not in {32, 128}:
2791 bpyima.alpha_mode = 'NONE'
2792 return bpyima
2795 def appearance_LoadTexture(tex_node, ancestry, node):
2796 # Both USE-based caching and desc-based caching
2797 # Works for bother ImageTextures and PixelTextures
2799 # USE-based caching
2800 if tex_node.reference:
2801 return tex_node.getRealNode().parsed
2803 # Desc-based caching. It might misfire on multifile models, where the
2804 # same desc means different things in different files.
2805 # TODO: move caches to file level.
2806 desc = tex_node.desc()
2807 if desc and desc in texture_cache:
2808 bpyima = texture_cache[desc]
2809 if tex_node.canHaveReferences():
2810 tex_node.parsed = bpyima
2811 return bpyima
2813 # No cached texture, load it.
2814 if tex_node.getSpec() == 'ImageTexture':
2815 bpyima = appearance_LoadImageTexture(tex_node, ancestry, node)
2816 else: # PixelTexture
2817 bpyima = appearance_LoadPixelTexture(tex_node, ancestry)
2819 if bpyima: # Loading can still fail
2820 # Update the desc-based cache
2821 if desc:
2822 texture_cache[desc] = bpyima
2824 # Update the USE-based cache
2825 if tex_node.canHaveReferences():
2826 tex_node.parsed = bpyima
2828 return bpyima
2831 def appearance_ExpandCachedMaterial(bpymat):
2832 if 0 and bpymat.texture_slots[0] is not None:
2833 bpyima = bpymat.texture_slots[0].texture.image
2834 tex_has_alpha = bpyima.alpha_mode not in {'NONE', 'CHANNEL_PACKED'}
2835 return (bpymat, bpyima, tex_has_alpha)
2837 return (bpymat, None, False)
2840 def appearance_MakeDescCacheKey(material, tex_node):
2841 mat_desc = material.desc() if material else "Default"
2842 tex_desc = tex_node.desc() if tex_node else "Default"
2844 if not((tex_node and tex_desc is None) or
2845 (material and mat_desc is None)):
2846 # desc not available (in VRML)
2847 # TODO: serialize VRML nodes!!!
2848 return (mat_desc, tex_desc)
2849 elif not tex_node and not material:
2850 # Even for VRML, we cache the null material
2851 return ("Default", "Default")
2852 else:
2853 return None # Desc-based caching is off
2856 def appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol):
2857 # Creates a Blender material object from appearance
2858 bpyima = None
2859 tex_has_alpha = False
2861 if material:
2862 bpymat_wrap = appearance_CreateMaterial(vrmlname, material, ancestry, is_vcol)
2863 else:
2864 bpymat_wrap = appearance_CreateDefaultMaterial()
2866 if tex_node: # Texture caching inside there
2867 bpyima = appearance_LoadTexture(tex_node, ancestry, node)
2869 if bpyima:
2870 repeatS = tex_node.getFieldAsBool('repeatS', True, ancestry)
2871 repeatT = tex_node.getFieldAsBool('repeatT', True, ancestry)
2873 bpymat_wrap.base_color_texture.image = bpyima
2875 # NOTE - not possible to handle x and y tiling individually.
2876 extension = "REPEAT" if repeatS or repeatT else "CLIP"
2877 bpymat_wrap.base_color_texture.extension = extension
2879 tex_has_alpha = bpyima.alpha_mode not in {'NONE', 'CHANNEL_PACKED'}
2880 if tex_has_alpha:
2881 bpymat_wrap.alpha_texture.image = bpyima
2882 bpymat_wrap.alpha_texture.extension = extension
2884 return (bpymat_wrap.material, bpyima, tex_has_alpha)
2887 def importShape_LoadAppearance(vrmlname, appr, ancestry, node, is_vcol):
2889 Material creation takes nontrivial time on large models.
2890 So we cache them aggressively.
2891 However, in Blender, texture is a part of material, while in
2892 X3D it's not. Blender's notion of material corresponds to
2893 X3D's notion of appearance.
2895 TextureTransform is not a part of material (at least
2896 not in the current implementation).
2898 USE on an Appearance node and USE on a Material node
2899 call for different approaches.
2901 Tools generate repeating, identical material definitions.
2902 Can't rely on USE alone. Repeating texture definitions
2903 are entirely possible, too.
2905 Vertex coloring is not a part of appearance, but Blender
2906 has a material flag for it. However, if a mesh has no vertex
2907 color layer, setting use_vertex_color_paint to true has no
2908 effect. So it's fine to reuse the same material for meshes
2909 with vertex colors and for ones without.
2910 It's probably an abuse of Blender of some level.
2912 So here's the caching structure:
2913 For USE on appearance, we store the material object
2914 in the appearance node.
2916 For USE on texture, we store the image object in the tex node.
2918 For USE on material with no texture, we store the material object
2919 in the material node.
2921 Also, we store textures by description in texture_cache.
2923 Also, we store materials by (material desc, texture desc)
2924 in material_cache.
2926 # First, check entire-appearance cache
2927 if appr.reference and appr.getRealNode().parsed:
2928 return appearance_ExpandCachedMaterial(appr.getRealNode().parsed)
2930 tex_node = appr.getChildBySpec(('ImageTexture', 'PixelTexture'))
2931 # Other texture nodes are: MovieTexture, MultiTexture
2932 material = appr.getChildBySpec('Material')
2933 # We're ignoring FillProperties, LineProperties, and shaders
2935 # Check the USE-based material cache for textureless materials
2936 if material and material.reference and not tex_node and material.getRealNode().parsed:
2937 return appearance_ExpandCachedMaterial(material.getRealNode().parsed)
2939 # Now the description-based caching
2940 cache_key = appearance_MakeDescCacheKey(material, tex_node)
2942 if cache_key and cache_key in material_cache:
2943 bpymat = material_cache[cache_key]
2944 # Still want to make the material available for USE-based reuse
2945 if appr.canHaveReferences():
2946 appr.parsed = bpymat
2947 if material and material.canHaveReferences() and not tex_node:
2948 material.parsed = bpymat
2949 return appearance_ExpandCachedMaterial(bpymat)
2951 # Done checking full-material caches. Texture cache may still kick in.
2952 # Create the material already
2953 (bpymat, bpyima, tex_has_alpha) = appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol)
2955 # Update the caches
2956 if appr.canHaveReferences():
2957 appr.parsed = bpymat
2959 if cache_key:
2960 material_cache[cache_key] = bpymat
2962 if material and material.canHaveReferences() and not tex_node:
2963 material.parsed = bpymat
2965 return (bpymat, bpyima, tex_has_alpha)
2968 def appearance_LoadPixelTexture(pixelTexture, ancestry):
2969 image = pixelTexture.getFieldAsArray('image', 0, ancestry)
2970 (w, h, plane_count) = image[0:3]
2971 has_alpha = plane_count in {2, 4}
2972 pixels = image[3:]
2973 if len(pixels) != w * h:
2974 print("ImportX3D warning: pixel count in PixelTexture is off")
2976 bpyima = bpy.data.images.new("PixelTexture", w, h, has_alpha, True)
2977 if not has_alpha:
2978 bpyima.alpha_mode = 'NONE'
2980 # Conditional above the loop, for performance
2981 if plane_count == 3: # RGB
2982 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2983 for cco in (pixel >> 16, pixel >> 8, pixel, 255)]
2984 elif plane_count == 4: # RGBA
2985 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2986 for cco
2987 in (pixel >> 24, pixel >> 16, pixel >> 8, pixel)]
2988 elif plane_count == 1: # Intensity - does Blender even support that?
2989 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2990 for cco in (pixel, pixel, pixel, 255)]
2991 elif plane_count == 2: # Intensity/alpha
2992 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2993 for cco
2994 in (pixel >> 8, pixel >> 8, pixel >> 8, pixel)]
2995 bpyima.update()
2996 return bpyima
2999 # Called from importShape to insert a data object (typically a mesh)
3000 # into the scene
3001 def importShape_ProcessObject(
3002 bpycollection, vrmlname, bpydata, geom, geom_spec, node,
3003 bpymat, has_alpha, texmtx, ancestry,
3004 global_matrix):
3006 vrmlname += "_" + geom_spec
3007 bpydata.name = vrmlname
3009 if type(bpydata) == bpy.types.Mesh:
3010 # solid, as understood by the spec, is always true in Blender
3011 # solid=false, we don't support it yet.
3012 creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry)
3013 if creaseAngle is not None:
3014 bpydata.auto_smooth_angle = creaseAngle
3015 bpydata.use_auto_smooth = True
3017 # Only ever 1 material per shape
3018 if bpymat:
3019 bpydata.materials.append(bpymat)
3021 if bpydata.uv_layers:
3022 if has_alpha and bpymat: # set the faces alpha flag?
3023 bpymat.blend_method = 'BLEND'
3024 bpymat.shadow_method = 'HASHED'
3026 if texmtx:
3027 # Apply texture transform?
3028 uv_copy = Vector()
3029 for l in bpydata.uv_layers.active.data:
3030 luv = l.uv
3031 uv_copy.x = luv[0]
3032 uv_copy.y = luv[1]
3033 l.uv[:] = (uv_copy @ texmtx)[0:2]
3035 # Done transforming the texture
3036 # TODO: check if per-polygon textures are supported here.
3037 elif type(bpydata) == bpy.types.TextCurve:
3038 # Text with textures??? Not sure...
3039 if bpymat:
3040 bpydata.materials.append(bpymat)
3042 # Can transform data or object, better the object so we can instance
3043 # the data
3044 # bpymesh.transform(getFinalMatrix(node))
3045 bpyob = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
3046 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3047 bpycollection.objects.link(bpyob)
3048 bpyob.select_set(True)
3050 if DEBUG:
3051 bpyob["source_line_no"] = geom.lineno
3054 def importText(geom, ancestry):
3055 fmt = geom.getChildBySpec('FontStyle')
3056 size = fmt.getFieldAsFloat("size", 1, ancestry) if fmt else 1.
3057 body = geom.getFieldAsString("string", None, ancestry)
3058 body = [w.strip('"') for w in body.split('" "')]
3060 bpytext = bpy.data.curves.new(name="Text", type='FONT')
3061 bpytext.offset_y = - size
3062 bpytext.body = "\n".join(body)
3063 bpytext.size = size
3064 return bpytext
3067 # -----------------------------------------------------------------------------------
3070 geometry_importers = {
3071 'IndexedFaceSet': importMesh_IndexedFaceSet,
3072 'IndexedTriangleSet': importMesh_IndexedTriangleSet,
3073 'IndexedTriangleStripSet': importMesh_IndexedTriangleStripSet,
3074 'IndexedTriangleFanSet': importMesh_IndexedTriangleFanSet,
3075 'IndexedLineSet': importMesh_IndexedLineSet,
3076 'TriangleSet': importMesh_TriangleSet,
3077 'TriangleStripSet': importMesh_TriangleStripSet,
3078 'TriangleFanSet': importMesh_TriangleFanSet,
3079 'LineSet': importMesh_LineSet,
3080 'ElevationGrid': importMesh_ElevationGrid,
3081 'Extrusion': importMesh_Extrusion,
3082 'PointSet': importMesh_PointSet,
3083 'Sphere': importMesh_Sphere,
3084 'Box': importMesh_Box,
3085 'Cylinder': importMesh_Cylinder,
3086 'Cone': importMesh_Cone,
3087 'Text': importText,
3091 def importShape(bpycollection, node, ancestry, global_matrix):
3092 # Under Shape, we can only have Appearance, MetadataXXX and a geometry node
3093 def isGeometry(spec):
3094 return spec != "Appearance" and not spec.startswith("Metadata")
3096 bpyob = node.getRealNode().blendObject
3098 if bpyob is not None:
3099 bpyob = node.blendData = node.blendObject = bpyob.copy()
3100 # Could transform data, but better the object so we can instance the data
3101 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3102 bpycollection.objects.link(bpyob)
3103 bpyob.select_set(True)
3104 return
3106 vrmlname = node.getDefName()
3107 if not vrmlname:
3108 vrmlname = 'Shape'
3110 appr = node.getChildBySpec('Appearance')
3111 geom = node.getChildBySpecCondition(isGeometry)
3112 if not geom:
3113 # Oh well, no geometry node in this shape
3114 return
3116 bpymat = None
3117 bpyima = None
3118 texmtx = None
3119 tex_has_alpha = False
3121 is_vcol = (geom.getChildBySpec(['Color', 'ColorRGBA']) is not None)
3123 if appr:
3124 (bpymat, bpyima,
3125 tex_has_alpha) = importShape_LoadAppearance(vrmlname, appr,
3126 ancestry, node,
3127 is_vcol)
3129 textx = appr.getChildBySpec('TextureTransform')
3130 if textx:
3131 texmtx = translateTexTransform(textx, ancestry)
3133 bpydata = None
3134 geom_spec = geom.getSpec()
3136 # ccw is handled by every geometry importer separately; some
3137 # geometries are easier to flip than others
3138 geom_fn = geometry_importers.get(geom_spec)
3139 if geom_fn is not None:
3140 bpydata = geom_fn(geom, ancestry)
3142 # There are no geometry importers that can legally return
3143 # no object. It's either a bpy object, or an exception
3144 importShape_ProcessObject(
3145 bpycollection, vrmlname, bpydata, geom, geom_spec,
3146 node, bpymat, tex_has_alpha, texmtx,
3147 ancestry, global_matrix)
3148 else:
3149 print('\tImportX3D warning: unsupported type "%s"' % geom_spec)
3152 # -----------------------------------------------------------------------------------
3153 # Lighting
3156 def importLamp_PointLight(node, ancestry):
3157 vrmlname = node.getDefName()
3158 if not vrmlname:
3159 vrmlname = 'PointLight'
3161 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3162 # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3163 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3164 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3165 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3166 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3167 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3169 bpylamp = bpy.data.lights.new(vrmlname, 'POINT')
3170 bpylamp.energy = intensity
3171 bpylamp.distance = radius
3172 bpylamp.color = color
3174 mtx = Matrix.Translation(Vector(location))
3176 return bpylamp, mtx
3179 def importLamp_DirectionalLight(node, ancestry):
3180 vrmlname = node.getDefName()
3181 if not vrmlname:
3182 vrmlname = 'DirectLight'
3184 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
3185 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3186 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3187 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3188 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3190 bpylamp = bpy.data.lights.new(vrmlname, 'SUN')
3191 bpylamp.energy = intensity
3192 bpylamp.color = color
3194 # lamps have their direction as -z, yup
3195 mtx = Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3197 return bpylamp, mtx
3199 # looks like default values for beamWidth and cutOffAngle were swapped in VRML docs.
3202 def importLamp_SpotLight(node, ancestry):
3203 vrmlname = node.getDefName()
3204 if not vrmlname:
3205 vrmlname = 'SpotLight'
3207 # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3208 # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3209 beamWidth = node.getFieldAsFloat('beamWidth', 1.570796, ancestry) # max is documented to be 1.0 but some files have higher.
3210 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3211 cutOffAngle = node.getFieldAsFloat('cutOffAngle', 0.785398, ancestry) * 2.0 # max is documented to be 1.0 but some files have higher.
3212 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3213 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3214 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3215 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3216 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3218 bpylamp = bpy.data.lights.new(vrmlname, 'SPOT')
3219 bpylamp.energy = intensity
3220 bpylamp.distance = radius
3221 bpylamp.color = color
3222 bpylamp.spot_size = cutOffAngle
3223 if beamWidth > cutOffAngle:
3224 bpylamp.spot_blend = 0.0
3225 else:
3226 if cutOffAngle == 0.0: # this should never happen!
3227 bpylamp.spot_blend = 0.5
3228 else:
3229 bpylamp.spot_blend = beamWidth / cutOffAngle
3231 # Convert
3233 # lamps have their direction as -z, y==up
3234 mtx = Matrix.Translation(location) @ Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3236 return bpylamp, mtx
3239 def importLamp(bpycollection, node, spec, ancestry, global_matrix):
3240 if spec == 'PointLight':
3241 bpylamp, mtx = importLamp_PointLight(node, ancestry)
3242 elif spec == 'DirectionalLight':
3243 bpylamp, mtx = importLamp_DirectionalLight(node, ancestry)
3244 elif spec == 'SpotLight':
3245 bpylamp, mtx = importLamp_SpotLight(node, ancestry)
3246 else:
3247 print("Error, not a lamp")
3248 raise ValueError
3250 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(bpylamp.name, bpylamp)
3251 bpycollection.objects.link(bpyob)
3252 bpyob.select_set(True)
3254 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3257 # -----------------------------------------------------------------------------------
3260 def importViewpoint(bpycollection, node, ancestry, global_matrix):
3261 name = node.getDefName()
3262 if not name:
3263 name = 'Viewpoint'
3265 fieldOfView = node.getFieldAsFloat('fieldOfView', 0.785398, ancestry) # max is documented to be 1.0 but some files have higher.
3266 # jump = node.getFieldAsBool('jump', True, ancestry)
3267 orientation = node.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry)
3268 position = node.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry)
3269 description = node.getFieldAsString('description', '', ancestry)
3271 bpycam = bpy.data.cameras.new(name)
3273 bpycam.angle = fieldOfView
3275 mtx = Matrix.Translation(Vector(position)) @ translateRotation(orientation)
3277 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, bpycam)
3278 bpycollection.objects.link(bpyob)
3279 bpyob.select_set(True)
3280 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3283 def importTransform(bpycollection, node, ancestry, global_matrix):
3284 name = node.getDefName()
3285 if not name:
3286 name = 'Transform'
3288 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, None)
3289 bpycollection.objects.link(bpyob)
3290 bpyob.select_set(True)
3292 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3294 # so they are not too annoying
3295 bpyob.empty_display_type = 'PLAIN_AXES'
3296 bpyob.empty_display_size = 0.2
3299 #def importTimeSensor(node):
3300 def action_fcurve_ensure(action, data_path, array_index):
3301 for fcu in action.fcurves:
3302 if fcu.data_path == data_path and fcu.array_index == array_index:
3303 return fcu
3305 return action.fcurves.new(data_path=data_path, index=array_index)
3308 def translatePositionInterpolator(node, action, ancestry):
3309 key = node.getFieldAsArray('key', 0, ancestry)
3310 keyValue = node.getFieldAsArray('keyValue', 3, ancestry)
3312 loc_x = action_fcurve_ensure(action, "location", 0)
3313 loc_y = action_fcurve_ensure(action, "location", 1)
3314 loc_z = action_fcurve_ensure(action, "location", 2)
3316 for i, time in enumerate(key):
3317 try:
3318 x, y, z = keyValue[i]
3319 except:
3320 continue
3322 loc_x.keyframe_points.insert(time, x)
3323 loc_y.keyframe_points.insert(time, y)
3324 loc_z.keyframe_points.insert(time, z)
3326 for fcu in (loc_x, loc_y, loc_z):
3327 for kf in fcu.keyframe_points:
3328 kf.interpolation = 'LINEAR'
3331 def translateOrientationInterpolator(node, action, ancestry):
3332 key = node.getFieldAsArray('key', 0, ancestry)
3333 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3335 rot_x = action_fcurve_ensure(action, "rotation_euler", 0)
3336 rot_y = action_fcurve_ensure(action, "rotation_euler", 1)
3337 rot_z = action_fcurve_ensure(action, "rotation_euler", 2)
3339 for i, time in enumerate(key):
3340 try:
3341 x, y, z, w = keyValue[i]
3342 except:
3343 continue
3345 mtx = translateRotation((x, y, z, w))
3346 eul = mtx.to_euler()
3347 rot_x.keyframe_points.insert(time, eul.x)
3348 rot_y.keyframe_points.insert(time, eul.y)
3349 rot_z.keyframe_points.insert(time, eul.z)
3351 for fcu in (rot_x, rot_y, rot_z):
3352 for kf in fcu.keyframe_points:
3353 kf.interpolation = 'LINEAR'
3356 # Untested!
3357 def translateScalarInterpolator(node, action, ancestry):
3358 key = node.getFieldAsArray('key', 0, ancestry)
3359 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3361 sca_x = action_fcurve_ensure(action, "scale", 0)
3362 sca_y = action_fcurve_ensure(action, "scale", 1)
3363 sca_z = action_fcurve_ensure(action, "scale", 2)
3365 for i, time in enumerate(key):
3366 try:
3367 x, y, z = keyValue[i]
3368 except:
3369 continue
3371 sca_x.keyframe_points.new(time, x)
3372 sca_y.keyframe_points.new(time, y)
3373 sca_z.keyframe_points.new(time, z)
3376 def translateTimeSensor(node, action, ancestry):
3378 Apply a time sensor to an action, VRML has many combinations of loop/start/stop/cycle times
3379 to give different results, for now just do the basics
3382 # XXX25 TODO
3383 if 1:
3384 return
3386 time_cu = action.addCurve('Time')
3387 time_cu.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
3389 cycleInterval = node.getFieldAsFloat('cycleInterval', None, ancestry)
3391 startTime = node.getFieldAsFloat('startTime', 0.0, ancestry)
3392 stopTime = node.getFieldAsFloat('stopTime', 250.0, ancestry)
3394 if cycleInterval is not None:
3395 stopTime = startTime + cycleInterval
3397 loop = node.getFieldAsBool('loop', False, ancestry)
3399 time_cu.append((1 + startTime, 0.0))
3400 time_cu.append((1 + stopTime, 1.0 / 10.0)) # annoying, the UI uses /10
3402 if loop:
3403 time_cu.extend = Blender.IpoCurve.ExtendTypes.CYCLIC # or - EXTRAP, CYCLIC_EXTRAP, CONST,
3406 def importRoute(node, ancestry):
3408 Animation route only at the moment
3411 if not hasattr(node, 'fields'):
3412 return
3414 routeIpoDict = node.getRouteIpoDict()
3416 def getIpo(act_id):
3417 try:
3418 action = routeIpoDict[act_id]
3419 except:
3420 action = routeIpoDict[act_id] = bpy.data.actions.new('web3d_ipo')
3421 return action
3423 # for getting definitions
3424 defDict = node.getDefDict()
3426 Handles routing nodes to each other
3428 ROUTE vpPI.value_changed TO champFly001.set_position
3429 ROUTE vpOI.value_changed TO champFly001.set_orientation
3430 ROUTE vpTs.fraction_changed TO vpPI.set_fraction
3431 ROUTE vpTs.fraction_changed TO vpOI.set_fraction
3432 ROUTE champFly001.bindTime TO vpTs.set_startTime
3435 #from_id, from_type = node.id[1].split('.')
3436 #to_id, to_type = node.id[3].split('.')
3438 #value_changed
3439 set_position_node = None
3440 set_orientation_node = None
3441 time_node = None
3443 for field in node.fields:
3444 if field and field[0] == 'ROUTE':
3445 try:
3446 from_id, from_type = field[1].split('.')
3447 to_id, to_type = field[3].split('.')
3448 except:
3449 print("Warning, invalid ROUTE", field)
3450 continue
3452 if from_type == 'value_changed':
3453 if to_type == 'set_position':
3454 action = getIpo(to_id)
3455 set_data_from_node = defDict[from_id]
3456 translatePositionInterpolator(set_data_from_node, action, ancestry)
3458 if to_type in {'set_orientation', 'rotation'}:
3459 action = getIpo(to_id)
3460 set_data_from_node = defDict[from_id]
3461 translateOrientationInterpolator(set_data_from_node, action, ancestry)
3463 if to_type == 'set_scale':
3464 action = getIpo(to_id)
3465 set_data_from_node = defDict[from_id]
3466 translateScalarInterpolator(set_data_from_node, action, ancestry)
3468 elif from_type == 'bindTime':
3469 action = getIpo(from_id)
3470 time_node = defDict[to_id]
3471 translateTimeSensor(time_node, action, ancestry)
3474 def load_web3d(
3475 bpycontext,
3476 filepath,
3478 PREF_FLAT=False,
3479 PREF_CIRCLE_DIV=16,
3480 global_matrix=None,
3481 HELPER_FUNC=None
3484 # Used when adding blender primitives
3485 GLOBALS['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV
3487 # NOTE - reset material cache
3488 # (otherwise we might get "StructRNA of type Material has been removed" errors)
3489 global material_cache
3490 material_cache = {}
3492 bpyscene = bpycontext.scene
3493 bpycollection = bpycontext.collection
3494 #root_node = vrml_parse('/_Cylinder.wrl')
3495 if filepath.lower().endswith('.x3d'):
3496 root_node, msg = x3d_parse(filepath)
3497 else:
3498 root_node, msg = vrml_parse(filepath)
3500 if not root_node:
3501 print(msg)
3502 return
3504 if global_matrix is None:
3505 global_matrix = Matrix()
3507 # fill with tuples - (node, [parents-parent, parent])
3508 all_nodes = root_node.getSerialized([], [])
3510 for node, ancestry in all_nodes:
3511 #if 'castle.wrl' not in node.getFilename():
3512 # continue
3514 spec = node.getSpec()
3516 prefix = node.getPrefix()
3517 if prefix=='PROTO':
3518 pass
3519 else
3521 if HELPER_FUNC and HELPER_FUNC(node, ancestry):
3522 # Note, include this function so the VRML/X3D importer can be extended
3523 # by an external script. - gets first pick
3524 pass
3525 if spec == 'Shape':
3526 importShape(bpycollection, node, ancestry, global_matrix)
3527 elif spec in {'PointLight', 'DirectionalLight', 'SpotLight'}:
3528 importLamp(bpycollection, node, spec, ancestry, global_matrix)
3529 elif spec == 'Viewpoint':
3530 importViewpoint(bpycollection, node, ancestry, global_matrix)
3531 elif spec == 'Transform':
3532 # Only use transform nodes when we are not importing a flat object hierarchy
3533 if PREF_FLAT == False:
3534 importTransform(bpycollection, node, ancestry, global_matrix)
3536 # These are delt with later within importRoute
3537 elif spec=='PositionInterpolator':
3538 action = bpy.data.ipos.new('web3d_ipo', 'Object')
3539 translatePositionInterpolator(node, action)
3542 # After we import all nodes, route events - anim paths
3543 for node, ancestry in all_nodes:
3544 importRoute(node, ancestry)
3546 for node, ancestry in all_nodes:
3547 if node.isRoot():
3548 # we know that all nodes referenced from will be in
3549 # routeIpoDict so no need to run node.getDefDict() for every node.
3550 routeIpoDict = node.getRouteIpoDict()
3551 defDict = node.getDefDict()
3553 for key, action in routeIpoDict.items():
3555 # Assign anim curves
3556 node = defDict[key]
3557 if node.blendData is None: # Add an object if we need one for animation
3558 bpyob = node.blendData = node.blendObject = bpy.data.objects.new('AnimOb', None) # , name)
3559 bpycollection.objects.link(bpyob)
3560 bpyob.select_set(True)
3562 if node.blendData.animation_data is None:
3563 node.blendData.animation_data_create()
3565 node.blendData.animation_data.action = action
3567 # Add in hierarchy
3568 if PREF_FLAT is False:
3569 child_dict = {}
3570 for node, ancestry in all_nodes:
3571 if node.blendObject:
3572 blendObject = None
3574 # Get the last parent
3575 i = len(ancestry)
3576 while i:
3577 i -= 1
3578 blendObject = ancestry[i].blendObject
3579 if blendObject:
3580 break
3582 if blendObject:
3583 # Parent Slow, - 1 liner but works
3584 # blendObject.makeParent([node.blendObject], 0, 1)
3586 # Parent FAST
3587 try:
3588 child_dict[blendObject].append(node.blendObject)
3589 except:
3590 child_dict[blendObject] = [node.blendObject]
3592 # Parent
3593 for parent, children in child_dict.items():
3594 for c in children:
3595 c.parent = parent
3597 # update deps
3598 bpycontext.view_layer.update()
3599 del child_dict
3602 def load_with_profiler(
3603 context,
3604 filepath,
3606 global_matrix=None
3608 import cProfile
3609 import pstats
3610 pro = cProfile.Profile()
3611 pro.runctx("load_web3d(context, filepath, PREF_FLAT=True, "
3612 "PREF_CIRCLE_DIV=16, global_matrix=global_matrix)",
3613 globals(), locals())
3614 st = pstats.Stats(pro)
3615 st.sort_stats("time")
3616 st.print_stats(0.1)
3617 # st.print_callers(0.1)
3620 def load(context,
3621 filepath,
3623 global_matrix=None
3626 # loadWithProfiler(operator, context, filepath, global_matrix)
3627 load_web3d(context, filepath,
3628 PREF_FLAT=True,
3629 PREF_CIRCLE_DIV=16,
3630 global_matrix=global_matrix,
3633 return {'FINISHED'}