Fix T91113: printf in X3D Importer causes NameError
[blender-addons.git] / io_scene_x3d / import_x3d.py
blob9218937244f2787e689118ac9b8d000ae531d61c
1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
19 # <pep8 compliant>
21 DEBUG = False
23 # This should work without a blender at all
24 import os
25 import shlex
26 import math
27 from math import sin, cos, pi
28 from itertools import chain
30 texture_cache = {}
31 material_cache = {}
33 EPSILON = 0.0000001 # Very crude.
36 def imageConvertCompat(path):
38 if os.sep == '\\':
39 return path # assume win32 has quicktime, dont convert
41 if path.lower().endswith('.gif'):
42 path_to = path[:-3] + 'png'
44 '''
45 if exists(path_to):
46 return path_to
47 '''
48 # print('\n'+path+'\n'+path_to+'\n')
49 os.system('convert "%s" "%s"' % (path, path_to)) # for now just hope we have image magick
51 if os.path.exists(path_to):
52 return path_to
54 return path
56 # notes
57 # transform are relative
58 # order doesn't matter for loc/size/rot
59 # right handed rotation
60 # angles are in radians
61 # rotation first defines axis then amount in radians
64 # =============================== VRML Spesific
66 def vrml_split_fields(value):
67 """
68 key 0.0 otherkey 1,2,3 opt1 opt1 0.0
69 -> [key 0.0], [otherkey 1,2,3], [opt1 opt1 0.0]
70 """
71 def iskey(k):
72 if k[0] != '"' and k[0].isalpha() and k.upper() not in {'TRUE', 'FALSE'}:
73 return True
74 return False
76 field_list = []
77 field_context = []
79 for v in value:
80 if iskey(v):
81 if field_context:
82 field_context_len = len(field_context)
83 if (field_context_len > 2) and (field_context[-2] in {'DEF', 'USE'}):
84 field_context.append(v)
85 elif (not iskey(field_context[-1])) or ((field_context_len == 3 and field_context[1] == 'IS')):
86 # this IS a key but the previous value was not a key, or it was a defined field.
87 field_list.append(field_context)
88 field_context = [v]
89 else:
90 # The last item was not a value, multiple keys are needed in some cases.
91 field_context.append(v)
92 else:
93 # Is empty, just add this on
94 field_context.append(v)
95 else:
96 # Add a value to the list
97 field_context.append(v)
99 if field_context:
100 field_list.append(field_context)
102 return field_list
105 def vrmlFormat(data):
107 Keep this as a valid vrml file, but format in a way we can predict.
109 # Strip all comments - # not in strings - warning multiline strings are ignored.
110 def strip_comment(l):
111 #l = ' '.join(l.split())
112 l = l.strip()
114 if l.startswith('#'):
115 return ''
117 i = l.find('#')
119 if i == -1:
120 return l
122 # Most cases accounted for! if we have a comment at the end of the line do this...
123 #j = l.find('url "')
124 j = l.find('"')
126 if j == -1: # simple no strings
127 return l[:i].strip()
129 q = False
130 for i, c in enumerate(l):
131 if c == '"':
132 q = not q # invert
134 elif c == '#':
135 if q is False:
136 return l[:i - 1]
138 return l
140 data = '\n'.join([strip_comment(l) for l in data.split('\n')]) # remove all whitespace
142 EXTRACT_STRINGS = True # only needed when strings or filename contains ,[]{} chars :/
144 if EXTRACT_STRINGS:
146 # We need this so we can detect URL's
147 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
149 string_ls = []
151 #search = 'url "'
152 search = '"'
154 ok = True
155 last_i = 0
156 while ok:
157 ok = False
158 i = data.find(search, last_i)
159 if i != -1:
161 start = i + len(search) # first char after end of search
162 end = data.find('"', start)
163 if end != -1:
164 item = data[start:end]
165 string_ls.append(item)
166 data = data[:start] + data[end:]
167 ok = True # keep looking
169 last_i = (end - len(item)) + 1
170 # print(last_i, item, '|' + data[last_i] + '|')
172 # done with messy extracting strings part
174 # Bad, dont take strings into account
176 data = data.replace('#', '\n#')
177 data = '\n'.join([ll for l in data.split('\n') for ll in (l.strip(),) if not ll.startswith('#')]) # remove all whitespace
179 data = data.replace('{', '\n{\n')
180 data = data.replace('}', '\n}\n')
181 data = data.replace('[', '\n[\n')
182 data = data.replace(']', '\n]\n')
183 data = data.replace(',', ' , ') # make sure comma's separate
185 # We need to write one property (field) per line only, otherwise we fail later to detect correctly new nodes.
186 # See T45195 for details.
187 data = '\n'.join([' '.join(value) for l in data.split('\n') for value in vrml_split_fields(l.split())])
189 if EXTRACT_STRINGS:
190 # add strings back in
192 search = '"' # fill in these empty strings
194 ok = True
195 last_i = 0
196 while ok:
197 ok = False
198 i = data.find(search + '"', last_i)
199 # print(i)
200 if i != -1:
201 start = i + len(search) # first char after end of search
202 item = string_ls.pop(0)
203 # print(item)
204 data = data[:start] + item + data[start:]
206 last_i = start + len(item) + 1
208 ok = True
210 # More annoying obscure cases where USE or DEF are placed on a newline
211 # data = data.replace('\nDEF ', ' DEF ')
212 # data = data.replace('\nUSE ', ' USE ')
214 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
216 # Better to parse the file accounting for multiline arrays
218 data = data.replace(',\n', ' , ') # remove line endings with commas
219 data = data.replace(']', '\n]\n') # very very annoying - but some comma's are at the end of the list, must run this again.
222 return [l for l in data.split('\n') if l]
224 NODE_NORMAL = 1 # {}
225 NODE_ARRAY = 2 # []
226 NODE_REFERENCE = 3 # USE foobar
227 # NODE_PROTO = 4 #
229 lines = []
232 def getNodePreText(i, words):
233 # print(lines[i])
234 use_node = False
235 while len(words) < 5:
237 if i >= len(lines):
238 break
240 elif lines[i].startswith('PROTO'):
241 return NODE_PROTO, i+1
243 elif lines[i] == '{':
244 # words.append(lines[i]) # no need
245 # print("OK")
246 return NODE_NORMAL, i + 1
247 elif lines[i].count('"') % 2 != 0: # odd number of quotes? - part of a string.
248 # print('ISSTRING')
249 break
250 else:
251 new_words = lines[i].split()
252 if 'USE' in new_words:
253 use_node = True
255 words.extend(new_words)
256 i += 1
258 # Check for USE node - no {
259 # USE #id - should always be on the same line.
260 if use_node:
261 # print('LINE', i, words[:words.index('USE')+2])
262 words[:] = words[:words.index('USE') + 2]
263 if lines[i] == '{' and lines[i + 1] == '}':
264 # USE sometimes has {} after it anyway
265 i += 2
266 return NODE_REFERENCE, i
268 # print("error value!!!", words)
269 return 0, -1
272 def is_nodeline(i, words):
274 if not lines[i][0].isalpha():
275 return 0, 0
277 #if lines[i].startswith('field'):
278 # return 0, 0
280 # Is this a prototype??
281 if lines[i].startswith('PROTO'):
282 words[:] = lines[i].split()
283 return NODE_NORMAL, i + 1 # TODO - assumes the next line is a '[\n', skip that
284 if lines[i].startswith('EXTERNPROTO'):
285 words[:] = lines[i].split()
286 return NODE_ARRAY, i + 1 # TODO - assumes the next line is a '[\n', skip that
289 proto_type, new_i = is_protoline(i, words, proto_field_defs)
290 if new_i != -1:
291 return proto_type, new_i
294 # Simple "var [" type
295 if lines[i + 1] == '[':
296 if lines[i].count('"') % 2 == 0:
297 words[:] = lines[i].split()
298 return NODE_ARRAY, i + 2
300 node_type, new_i = getNodePreText(i, words)
302 if not node_type:
303 if DEBUG:
304 print("not node_type", lines[i])
305 return 0, 0
307 # Ok, we have a { after some values
308 # Check the values are not fields
309 for i, val in enumerate(words):
310 if i != 0 and words[i - 1] in {'DEF', 'USE'}:
311 # ignore anything after DEF, it is a ID and can contain any chars.
312 pass
313 elif val[0].isalpha() and val not in {'TRUE', 'FALSE'}:
314 pass
315 else:
316 # There is a number in one of the values, therefor we are not a node.
317 return 0, 0
319 #if node_type==NODE_REFERENCE:
320 # print(words, "REF_!!!!!!!")
321 return node_type, new_i
324 def is_numline(i):
326 Does this line start with a number?
329 # Works but too slow.
331 l = lines[i]
332 for w in l.split():
333 if w==',':
334 pass
335 else:
336 try:
337 float(w)
338 return True
340 except:
341 return False
343 return False
346 l = lines[i]
348 line_start = 0
350 if l.startswith(', '):
351 line_start += 2
353 line_end = len(l) - 1
354 line_end_new = l.find(' ', line_start) # comma's always have a space before them
356 if line_end_new != -1:
357 line_end = line_end_new
359 try:
360 float(l[line_start:line_end]) # works for a float or int
361 return True
362 except:
363 return False
366 class vrmlNode(object):
367 __slots__ = ('id',
368 'fields',
369 'proto_node',
370 'proto_field_defs',
371 'proto_fields',
372 'node_type',
373 'parent',
374 'children',
375 'parent',
376 'array_data',
377 'reference',
378 'lineno',
379 'filename',
380 'blendObject',
381 'blendData',
382 'DEF_NAMESPACE',
383 'ROUTE_IPO_NAMESPACE',
384 'PROTO_NAMESPACE',
385 'x3dNode',
386 'parsed')
388 def __init__(self, parent, node_type, lineno):
389 self.id = None
390 self.node_type = node_type
391 self.parent = parent
392 self.blendObject = None
393 self.blendData = None
394 self.x3dNode = None # for x3d import only
395 self.parsed = None # We try to reuse objects in a smart way
396 if parent:
397 parent.children.append(self)
399 self.lineno = lineno
401 # This is only set from the root nodes.
402 # Having a filename also denotes a root node
403 self.filename = None
404 self.proto_node = None # proto field definition eg: "field SFColor seatColor .6 .6 .1"
406 # Store in the root node because each inline file needs its own root node and its own namespace
407 self.DEF_NAMESPACE = None
408 self.ROUTE_IPO_NAMESPACE = None
410 self.FIELD_NAMESPACE = None
413 self.PROTO_NAMESPACE = None
415 self.reference = None
417 if node_type == NODE_REFERENCE:
418 # For references, only the parent and ID are needed
419 # the reference its self is assigned on parsing
420 return
422 self.fields = [] # fields have no order, in some cases rool level values are not unique so dont use a dict
424 self.proto_field_defs = [] # proto field definition eg: "field SFColor seatColor .6 .6 .1"
425 self.proto_fields = [] # proto field usage "diffuseColor IS seatColor"
426 self.children = []
427 self.array_data = [] # use for arrays of data - should only be for NODE_ARRAY types
429 # Only available from the root node
431 def getFieldDict(self):
432 if self.FIELD_NAMESPACE is not None:
433 return self.FIELD_NAMESPACE
434 else:
435 return self.parent.getFieldDict()
437 def getProtoDict(self):
438 if self.PROTO_NAMESPACE is not None:
439 return self.PROTO_NAMESPACE
440 else:
441 return self.parent.getProtoDict()
443 def getDefDict(self):
444 if self.DEF_NAMESPACE is not None:
445 return self.DEF_NAMESPACE
446 else:
447 return self.parent.getDefDict()
449 def getRouteIpoDict(self):
450 if self.ROUTE_IPO_NAMESPACE is not None:
451 return self.ROUTE_IPO_NAMESPACE
452 else:
453 return self.parent.getRouteIpoDict()
455 def setRoot(self, filename):
456 self.filename = filename
457 # self.FIELD_NAMESPACE = {}
458 self.DEF_NAMESPACE = {}
459 self.ROUTE_IPO_NAMESPACE = {}
460 self.PROTO_NAMESPACE = {}
462 def isRoot(self):
463 if self.filename is None:
464 return False
465 else:
466 return True
468 def getFilename(self):
469 if self.filename:
470 return self.filename
471 elif self.parent:
472 return self.parent.getFilename()
473 else:
474 return None
476 def getRealNode(self):
477 if self.reference:
478 return self.reference
479 else:
480 return self
482 def getSpec(self):
483 self_real = self.getRealNode()
484 try:
485 return self_real.id[-1] # its possible this node has no spec
486 except:
487 return None
489 def findSpecRecursive(self, spec):
490 self_real = self.getRealNode()
491 if spec == self_real.getSpec():
492 return self
494 for child in self_real.children:
495 if child.findSpecRecursive(spec):
496 return child
498 return None
500 def getPrefix(self):
501 if self.id:
502 return self.id[0]
503 return None
505 def getSpecialTypeName(self, typename):
506 self_real = self.getRealNode()
507 try:
508 return self_real.id[list(self_real.id).index(typename) + 1]
509 except:
510 return None
512 def getDefName(self):
513 return self.getSpecialTypeName('DEF')
515 def getProtoName(self):
516 return self.getSpecialTypeName('PROTO')
518 def getExternprotoName(self):
519 return self.getSpecialTypeName('EXTERNPROTO')
521 def getChildrenBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
522 self_real = self.getRealNode()
523 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
524 if type(node_spec) == str:
525 return [child for child in self_real.children if child.getSpec() == node_spec]
526 else:
527 # Check inside a list of optional types
528 return [child for child in self_real.children if child.getSpec() in node_spec]
530 def getChildrenBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
531 self_real = self.getRealNode()
532 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
533 return [child for child in self_real.children if cond(child.getSpec())]
535 def getChildBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
536 # Use in cases where there is only ever 1 child of this type
537 ls = self.getChildrenBySpec(node_spec)
538 if ls:
539 return ls[0]
540 else:
541 return None
543 def getChildBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
544 # Use in cases where there is only ever 1 child of this type
545 ls = self.getChildrenBySpecCondition(cond)
546 if ls:
547 return ls[0]
548 else:
549 return None
551 def getChildrenByName(self, node_name): # type could be geometry, children, appearance
552 self_real = self.getRealNode()
553 return [child for child in self_real.children if child.id if child.id[0] == node_name]
555 def getChildByName(self, node_name):
556 self_real = self.getRealNode()
557 for child in self_real.children:
558 if child.id and child.id[0] == node_name: # and child.id[-1]==node_spec:
559 return child
561 def getSerialized(self, results, ancestry):
562 """ Return this node and all its children in a flat list """
563 ancestry = ancestry[:] # always use a copy
565 # self_real = self.getRealNode()
567 results.append((self, tuple(ancestry)))
568 ancestry.append(self)
569 for child in self.getRealNode().children:
570 if child not in ancestry:
571 # We dont want to load proto's, they are only references
572 # We could enforce this elsewhere
574 # Only add this in a very special case
575 # where the parent of this object is not the real parent
576 # - In this case we have added the proto as a child to a node instancing it.
577 # This is a bit arbitrary, but its how Proto's are done with this importer.
578 if child.getProtoName() is None and child.getExternprotoName() is None:
579 child.getSerialized(results, ancestry)
580 else:
582 if DEBUG:
583 print('getSerialized() is proto:', child.getProtoName(), child.getExternprotoName(), self.getSpec())
585 self_spec = self.getSpec()
587 if child.getProtoName() == self_spec or child.getExternprotoName() == self_spec:
588 #if DEBUG:
589 # "FoundProto!"
590 child.getSerialized(results, ancestry)
592 return results
594 def searchNodeTypeID(self, node_spec, results):
595 self_real = self.getRealNode()
596 # print(self.lineno, self.id)
597 if self_real.id and self_real.id[-1] == node_spec: # use last element, could also be only element
598 results.append(self_real)
599 for child in self_real.children:
600 child.searchNodeTypeID(node_spec, results)
601 return results
603 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
604 self_real = self.getRealNode() # in case we're an instance
606 for f in self_real.fields:
607 # print(f)
608 if f and f[0] == field:
609 # print('\tfound field', f)
611 if len(f) >= 3 and f[1] == 'IS': # eg: 'diffuseColor IS legColor'
612 field_id = f[2]
614 # print("\n\n\n\n\n\nFOND IS!!!")
615 f_proto_lookup = None
616 f_proto_child_lookup = None
617 i = len(ancestry)
618 while i:
619 i -= 1
620 node = ancestry[i]
621 node = node.getRealNode()
623 # proto settings are stored in "self.proto_node"
624 if node.proto_node:
625 # Get the default value from the proto, this can be overwritten by the proto instance
626 # 'field SFColor legColor .8 .4 .7'
627 if AS_CHILD:
628 for child in node.proto_node.children:
629 #if child.id and len(child.id) >= 3 and child.id[2]==field_id:
630 if child.id and ('point' in child.id or 'points' in child.id):
631 f_proto_child_lookup = child
633 else:
634 for f_def in node.proto_node.proto_field_defs:
635 if len(f_def) >= 4:
636 if f_def[0] == 'field' and f_def[2] == field_id:
637 f_proto_lookup = f_def[3:]
639 # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent.
640 # This is the setting as defined by the instance, including this setting is optional,
641 # and will override the default PROTO value
642 # eg: 'legColor 1 0 0'
643 if AS_CHILD:
644 for child in node.children:
645 if child.id and child.id[0] == field_id:
646 f_proto_child_lookup = child
647 else:
648 for f_def in node.fields:
649 if len(f_def) >= 2:
650 if f_def[0] == field_id:
651 if DEBUG:
652 print("getFieldName(), found proto", f_def)
653 f_proto_lookup = f_def[1:]
655 if AS_CHILD:
656 if f_proto_child_lookup:
657 if DEBUG:
658 print("getFieldName() - AS_CHILD=True, child found")
659 print(f_proto_child_lookup)
660 return f_proto_child_lookup
661 else:
662 return f_proto_lookup
663 else:
664 if AS_CHILD:
665 return None
666 else:
667 # Not using a proto
668 return f[1:]
669 # print('\tfield not found', field)
671 # See if this is a proto name
672 if AS_CHILD:
673 for child in self_real.children:
674 if child.id and len(child.id) == 1 and child.id[0] == field:
675 return child
677 return None
679 def getFieldAsInt(self, field, default, ancestry):
680 self_real = self.getRealNode() # in case we're an instance
682 f = self_real.getFieldName(field, ancestry)
683 if f is None:
684 return default
685 if ',' in f:
686 f = f[:f.index(',')] # strip after the comma
688 if len(f) != 1:
689 print('\t"%s" wrong length for int conversion for field "%s"' % (f, field))
690 return default
692 try:
693 return int(f[0])
694 except:
695 print('\tvalue "%s" could not be used as an int for field "%s"' % (f[0], field))
696 return default
698 def getFieldAsFloat(self, field, default, ancestry):
699 self_real = self.getRealNode() # in case we're an instance
701 f = self_real.getFieldName(field, ancestry)
702 if f is None:
703 return default
704 if ',' in f:
705 f = f[:f.index(',')] # strip after the comma
707 if len(f) != 1:
708 print('\t"%s" wrong length for float conversion for field "%s"' % (f, field))
709 return default
711 try:
712 return float(f[0])
713 except:
714 print('\tvalue "%s" could not be used as a float for field "%s"' % (f[0], field))
715 return default
717 def getFieldAsFloatTuple(self, field, default, ancestry):
718 self_real = self.getRealNode() # in case we're an instance
720 f = self_real.getFieldName(field, ancestry)
721 if f is None:
722 return default
723 # if ',' in f: f = f[:f.index(',')] # strip after the comma
725 if len(f) < 1:
726 print('"%s" wrong length for float tuple conversion for field "%s"' % (f, field))
727 return default
729 ret = []
730 for v in f:
731 if v != ',':
732 try:
733 ret.append(float(v))
734 except:
735 break # quit of first non float, perhaps its a new field name on the same line? - if so we are going to ignore it :/ TODO
736 # print(ret)
738 if ret:
739 return ret
740 if not ret:
741 print('\tvalue "%s" could not be used as a float tuple for field "%s"' % (f, field))
742 return default
744 def getFieldAsBool(self, field, default, ancestry):
745 self_real = self.getRealNode() # in case we're an instance
747 f = self_real.getFieldName(field, ancestry)
748 if f is None:
749 return default
750 if ',' in f:
751 f = f[:f.index(',')] # strip after the comma
753 if len(f) != 1:
754 print('\t"%s" wrong length for bool conversion for field "%s"' % (f, field))
755 return default
757 if f[0].upper() == '"TRUE"' or f[0].upper() == 'TRUE':
758 return True
759 elif f[0].upper() == '"FALSE"' or f[0].upper() == 'FALSE':
760 return False
761 else:
762 print('\t"%s" could not be used as a bool for field "%s"' % (f[1], field))
763 return default
765 def getFieldAsString(self, field, default, ancestry):
766 self_real = self.getRealNode() # in case we're an instance
768 f = self_real.getFieldName(field, ancestry)
769 if f is None:
770 return default
771 if len(f) < 1:
772 print('\t"%s" wrong length for string conversion for field "%s"' % (f, field))
773 return default
775 if len(f) > 1:
776 # String may contain spaces
777 st = ' '.join(f)
778 else:
779 st = f[0]
781 # X3D HACK
782 if self.x3dNode:
783 return st
785 if st[0] == '"' and st[-1] == '"':
786 return st[1:-1]
787 else:
788 print('\tvalue "%s" could not be used as a string for field "%s"' % (f[0], field))
789 return default
791 def getFieldAsArray(self, field, group, ancestry):
793 For this parser arrays are children
796 def array_as_number(array_string):
797 array_data = []
798 try:
799 array_data = [int(val, 0) for val in array_string]
800 except:
801 try:
802 array_data = [float(val) for val in array_string]
803 except:
804 print('\tWarning, could not parse array data from field')
806 return array_data
808 self_real = self.getRealNode() # in case we're an instance
810 child_array = self_real.getFieldName(field, ancestry, True, SPLIT_COMMAS=True)
812 #if type(child_array)==list: # happens occasionally
813 # array_data = child_array
815 if child_array is None:
816 # For x3d, should work ok with vrml too
817 # for x3d arrays are fields, vrml they are nodes, annoying but not too bad.
818 data_split = self.getFieldName(field, ancestry, SPLIT_COMMAS=True)
819 if not data_split:
820 return []
822 array_data = array_as_number(data_split)
824 elif type(child_array) == list:
825 # x3d creates these
826 array_data = array_as_number(child_array)
827 else:
828 # print(child_array)
829 # Normal vrml
830 array_data = child_array.array_data
832 # print('array_data', array_data)
833 if group == -1 or len(array_data) == 0:
834 return array_data
836 # We want a flat list
837 flat = True
838 for item in array_data:
839 if type(item) == list:
840 flat = False
841 break
843 # make a flat array
844 if flat:
845 flat_array = array_data # we are already flat.
846 else:
847 flat_array = []
849 def extend_flat(ls):
850 for item in ls:
851 if type(item) == list:
852 extend_flat(item)
853 else:
854 flat_array.append(item)
856 extend_flat(array_data)
858 # We requested a flat array
859 if group == 0:
860 return flat_array
862 new_array = []
863 sub_array = []
865 for item in flat_array:
866 sub_array.append(item)
867 if len(sub_array) == group:
868 new_array.append(sub_array)
869 sub_array = []
871 if sub_array:
872 print('\twarning, array was not aligned to requested grouping', group, 'remaining value', sub_array)
874 return new_array
876 def getFieldAsStringArray(self, field, ancestry):
878 Get a list of strings
880 self_real = self.getRealNode() # in case we're an instance
882 child_array = None
883 for child in self_real.children:
884 if child.id and len(child.id) == 1 and child.id[0] == field:
885 child_array = child
886 break
887 if not child_array:
888 return []
890 # each string gets its own list, remove ""'s
891 try:
892 new_array = [f[0][1:-1] for f in child_array.fields]
893 except:
894 print('\twarning, string array could not be made')
895 new_array = []
897 return new_array
899 def getLevel(self):
900 # Ignore self_real
901 level = 0
902 p = self.parent
903 while p:
904 level += 1
905 p = p.parent
906 if not p:
907 break
909 return level
911 def __repr__(self):
912 level = self.getLevel()
913 ind = ' ' * level
914 if self.node_type == NODE_REFERENCE:
915 brackets = ''
916 elif self.node_type == NODE_NORMAL:
917 brackets = '{}'
918 else:
919 brackets = '[]'
921 if brackets:
922 text = ind + brackets[0] + '\n'
923 else:
924 text = ''
926 text += ind + 'ID: ' + str(self.id) + ' ' + str(level) + (' lineno %d\n' % self.lineno)
928 if self.node_type == NODE_REFERENCE:
929 text += ind + "(reference node)\n"
930 return text
932 if self.proto_node:
933 text += ind + 'PROTO NODE...\n'
934 text += str(self.proto_node)
935 text += ind + 'PROTO NODE_DONE\n'
937 text += ind + 'FIELDS:' + str(len(self.fields)) + '\n'
939 for i, item in enumerate(self.fields):
940 text += ind + 'FIELD:\n'
941 text += ind + str(item) + '\n'
943 text += ind + 'PROTO_FIELD_DEFS:' + str(len(self.proto_field_defs)) + '\n'
945 for i, item in enumerate(self.proto_field_defs):
946 text += ind + 'PROTO_FIELD:\n'
947 text += ind + str(item) + '\n'
949 text += ind + 'ARRAY: ' + str(len(self.array_data)) + ' ' + str(self.array_data) + '\n'
950 #text += ind + 'ARRAY: ' + str(len(self.array_data)) + '[...] \n'
952 text += ind + 'CHILDREN: ' + str(len(self.children)) + '\n'
953 for i, child in enumerate(self.children):
954 text += ind + ('CHILD%d:\n' % i)
955 text += str(child)
957 text += '\n' + ind + brackets[1]
959 return text
961 def parse(self, i, IS_PROTO_DATA=False):
962 new_i = self.__parse(i, IS_PROTO_DATA)
964 # print(self.id, self.getFilename())
966 # Check if this node was an inline or externproto
968 url_ls = []
970 if self.node_type == NODE_NORMAL and self.getSpec() == 'Inline':
971 ancestry = [] # Warning! - PROTO's using this wont work at all.
972 url = self.getFieldAsString('url', None, ancestry)
973 if url:
974 url_ls = [(url, None)]
975 del ancestry
977 elif self.getExternprotoName():
978 # externproto
979 url_ls = []
980 for f in self.fields:
982 if type(f) == str:
983 f = [f]
985 for ff in f:
986 for f_split in ff.split('"'):
987 # print(f_split)
988 # "someextern.vrml#SomeID"
989 if '#' in f_split:
991 f_split, f_split_id = f_split.split('#') # there should only be 1 # anyway
993 url_ls.append((f_split, f_split_id))
994 else:
995 url_ls.append((f_split, None))
997 # Was either an Inline or an EXTERNPROTO
998 if url_ls:
1000 # print(url_ls)
1002 for url, extern_key in url_ls:
1003 print(url)
1004 urls = []
1005 urls.append(url)
1006 urls.append(bpy.path.resolve_ncase(urls[-1]))
1008 urls.append(os.path.join(os.path.dirname(self.getFilename()), url))
1009 urls.append(bpy.path.resolve_ncase(urls[-1]))
1011 urls.append(os.path.join(os.path.dirname(self.getFilename()), os.path.basename(url)))
1012 urls.append(bpy.path.resolve_ncase(urls[-1]))
1014 try:
1015 url = [url for url in urls if os.path.exists(url)][0]
1016 url_found = True
1017 except:
1018 url_found = False
1020 if not url_found:
1021 print('\tWarning: Inline URL could not be found:', url)
1022 else:
1023 if url == self.getFilename():
1024 print('\tWarning: cant Inline yourself recursively:', url)
1025 else:
1027 try:
1028 data = gzipOpen(url)
1029 except:
1030 print('\tWarning: cant open the file:', url)
1031 data = None
1033 if data:
1034 # Tricky - inline another VRML
1035 print('\tLoading Inline:"%s"...' % url)
1037 # Watch it! - backup lines
1038 lines_old = lines[:]
1040 lines[:] = vrmlFormat(data)
1042 lines.insert(0, '{')
1043 lines.insert(0, 'root_node____')
1044 lines.append('}')
1046 ff = open('/tmp/test.txt', 'w')
1047 ff.writelines([l+'\n' for l in lines])
1050 child = vrmlNode(self, NODE_NORMAL, -1)
1051 child.setRoot(url) # initialized dicts
1052 child.parse(0)
1054 # if self.getExternprotoName():
1055 if self.getExternprotoName():
1056 if not extern_key: # if none is specified - use the name
1057 extern_key = self.getSpec()
1059 if extern_key:
1061 self.children.remove(child)
1062 child.parent = None
1064 extern_child = child.findSpecRecursive(extern_key)
1066 if extern_child:
1067 self.children.append(extern_child)
1068 extern_child.parent = self
1070 if DEBUG:
1071 print("\tEXTERNPROTO ID found!:", extern_key)
1072 else:
1073 print("\tEXTERNPROTO ID not found!:", extern_key)
1075 # Watch it! - restore lines
1076 lines[:] = lines_old
1078 return new_i
1080 def __parse(self, i, IS_PROTO_DATA=False):
1082 print('parsing at', i, end="")
1083 print(i, self.id, self.lineno)
1085 l = lines[i]
1087 if l == '[':
1088 # An anonymous list
1089 self.id = None
1090 i += 1
1091 else:
1092 words = []
1094 node_type, new_i = is_nodeline(i, words)
1095 if not node_type: # fail for parsing new node.
1096 print("Failed to parse new node")
1097 raise ValueError
1099 if self.node_type == NODE_REFERENCE:
1100 # Only assign the reference and quit
1101 key = words[words.index('USE') + 1]
1102 self.id = (words[0],)
1104 self.reference = self.getDefDict()[key]
1105 return new_i
1107 self.id = tuple(words)
1109 # fill in DEF/USE
1110 key = self.getDefName()
1111 if key is not None:
1112 self.getDefDict()[key] = self
1114 key = self.getProtoName()
1115 if not key:
1116 key = self.getExternprotoName()
1118 proto_dict = self.getProtoDict()
1119 if key is not None:
1120 proto_dict[key] = self
1122 # Parse the proto nodes fields
1123 self.proto_node = vrmlNode(self, NODE_ARRAY, new_i)
1124 new_i = self.proto_node.parse(new_i)
1126 self.children.remove(self.proto_node)
1128 # print(self.proto_node)
1130 new_i += 1 # skip past the {
1132 else: # If we're a proto instance, add the proto node as our child.
1133 spec = self.getSpec()
1134 try:
1135 self.children.append(proto_dict[spec])
1136 #pass
1137 except:
1138 pass
1140 del spec
1142 del proto_dict, key
1144 i = new_i
1146 # print(self.id)
1147 ok = True
1148 while ok:
1149 if i >= len(lines):
1150 return len(lines) - 1
1152 l = lines[i]
1153 # print('\tDEBUG:', i, self.node_type, l)
1154 if l == '':
1155 i += 1
1156 continue
1158 if l == '}':
1159 if self.node_type != NODE_NORMAL: # also ends proto nodes, we may want a type for these too.
1160 print('wrong node ending, expected an } ' + str(i) + ' ' + str(self.node_type))
1161 if DEBUG:
1162 raise ValueError
1163 ### print("returning", i)
1164 return i + 1
1165 if l == ']':
1166 if self.node_type != NODE_ARRAY:
1167 print('wrong node ending, expected a ] ' + str(i) + ' ' + str(self.node_type))
1168 if DEBUG:
1169 raise ValueError
1170 ### print("returning", i)
1171 return i + 1
1173 node_type, new_i = is_nodeline(i, [])
1174 if node_type: # check text\n{
1175 child = vrmlNode(self, node_type, i)
1176 i = child.parse(i)
1178 elif l == '[': # some files have these anonymous lists
1179 child = vrmlNode(self, NODE_ARRAY, i)
1180 i = child.parse(i)
1182 elif is_numline(i):
1183 l_split = l.split(',')
1185 values = None
1186 # See if each item is a float?
1188 for num_type in (int, float):
1189 try:
1190 values = [num_type(v) for v in l_split]
1191 break
1192 except:
1193 pass
1195 try:
1196 values = [[num_type(v) for v in segment.split()] for segment in l_split]
1197 break
1198 except:
1199 pass
1201 if values is None: # dont parse
1202 values = l_split
1204 # This should not extend over multiple lines however it is possible
1205 # print(self.array_data)
1206 if values:
1207 self.array_data.extend(values)
1208 i += 1
1209 else:
1210 words = l.split()
1211 if len(words) > 2 and words[1] == 'USE':
1212 vrmlNode(self, NODE_REFERENCE, i)
1213 else:
1215 # print("FIELD", i, l)
1217 #words = l.split()
1218 ### print('\t\ttag', i)
1219 # this is a tag/
1220 # print(words, i, l)
1221 value = l
1222 # print(i)
1223 # javastrips can exist as values.
1224 quote_count = l.count('"')
1225 if quote_count % 2: # odd number?
1226 # print('MULTILINE')
1227 while 1:
1228 i += 1
1229 l = lines[i]
1230 quote_count = l.count('"')
1231 if quote_count % 2: # odd number?
1232 value += '\n' + l[:l.rfind('"')]
1233 break # assume
1234 else:
1235 value += '\n' + l
1237 # use shlex so we get '"a b" "b v"' --> '"a b"', '"b v"'
1238 value_all = shlex.split(value, posix=False)
1240 for value in vrml_split_fields(value_all):
1241 # Split
1243 if value[0] == 'field':
1244 # field SFFloat creaseAngle 4
1245 self.proto_field_defs.append(value)
1246 else:
1247 self.fields.append(value)
1248 i += 1
1250 # This is a prerequisite for DEF/USE-based material caching
1251 def canHaveReferences(self):
1252 return self.node_type == NODE_NORMAL and self.getDefName()
1254 # This is a prerequisite for raw XML-based material caching. For now, only for X3D
1255 def desc(self):
1256 return None
1259 def gzipOpen(path):
1260 import gzip
1262 data = None
1263 try:
1264 data = gzip.open(path, 'r').read()
1265 except:
1266 pass
1268 if data is None:
1269 try:
1270 filehandle = open(path, 'rU', encoding='utf-8', errors='surrogateescape')
1271 data = filehandle.read()
1272 filehandle.close()
1273 except:
1274 import traceback
1275 traceback.print_exc()
1276 else:
1277 data = data.decode(encoding='utf-8', errors='surrogateescape')
1279 return data
1282 def vrml_parse(path):
1284 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1285 Return root (vrmlNode, '') or (None, 'Error String')
1287 data = gzipOpen(path)
1289 if data is None:
1290 return None, 'Failed to open file: ' + path
1292 # Stripped above
1293 lines[:] = vrmlFormat(data)
1295 lines.insert(0, '{')
1296 lines.insert(0, 'dymmy_node')
1297 lines.append('}')
1298 # Use for testing our parsed output, so we can check on line numbers.
1301 ff = open('/tmp/test.txt', 'w')
1302 ff.writelines([l+'\n' for l in lines])
1303 ff.close()
1306 # Now evaluate it
1307 node_type, new_i = is_nodeline(0, [])
1308 if not node_type:
1309 return None, 'Error: VRML file has no starting Node'
1311 # Trick to make sure we get all root nodes.
1312 lines.insert(0, '{')
1313 lines.insert(0, 'root_node____') # important the name starts with an ascii char
1314 lines.append('}')
1316 root = vrmlNode(None, NODE_NORMAL, -1)
1317 root.setRoot(path) # we need to set the root so we have a namespace and know the path in case of inlineing
1319 # Parse recursively
1320 root.parse(0)
1322 # This prints a load of text
1323 if DEBUG:
1324 print(root)
1326 return root, ''
1329 # ====================== END VRML
1331 # ====================== X3d Support
1333 # Sane as vrml but replace the parser
1334 class x3dNode(vrmlNode):
1335 def __init__(self, parent, node_type, x3dNode):
1336 vrmlNode.__init__(self, parent, node_type, -1)
1337 self.x3dNode = x3dNode
1339 def parse(self, IS_PROTO_DATA=False):
1340 # print(self.x3dNode.tagName)
1341 self.lineno = self.x3dNode.parse_position[0]
1343 define = self.x3dNode.getAttributeNode('DEF')
1344 if define:
1345 self.getDefDict()[define.value] = self
1346 else:
1347 use = self.x3dNode.getAttributeNode('USE')
1348 if use:
1349 try:
1350 self.reference = self.getDefDict()[use.value]
1351 self.node_type = NODE_REFERENCE
1352 except:
1353 print('\tWarning: reference', use.value, 'not found')
1354 self.parent.children.remove(self)
1356 return
1358 for x3dChildNode in self.x3dNode.childNodes:
1359 if x3dChildNode.nodeType in {x3dChildNode.TEXT_NODE, x3dChildNode.COMMENT_NODE, x3dChildNode.CDATA_SECTION_NODE}:
1360 continue
1362 node_type = NODE_NORMAL
1363 # print(x3dChildNode, dir(x3dChildNode))
1364 if x3dChildNode.getAttributeNode('USE'):
1365 node_type = NODE_REFERENCE
1367 child = x3dNode(self, node_type, x3dChildNode)
1368 child.parse()
1370 # TODO - x3d Inline
1372 def getSpec(self):
1373 return self.x3dNode.tagName # should match vrml spec
1375 # Used to retain object identifiers from X3D to Blender
1376 def getDefName(self):
1377 node_id = self.x3dNode.getAttributeNode('DEF')
1378 if node_id:
1379 return node_id.value
1380 node_id = self.x3dNode.getAttributeNode('USE')
1381 if node_id:
1382 return "USE_" + node_id.value
1383 return None
1385 # Other funcs operate from vrml, but this means we can wrap XML fields, still use nice utility funcs
1386 # getFieldAsArray getFieldAsBool etc
1387 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
1388 # ancestry and AS_CHILD are ignored, only used for VRML now
1390 self_real = self.getRealNode() # in case we're an instance
1391 field_xml = self.x3dNode.getAttributeNode(field)
1392 if field_xml:
1393 value = field_xml.value
1395 # We may want to edit. for x3d specific stuff
1396 # Sucks a bit to return the field name in the list but vrml excepts this :/
1397 if SPLIT_COMMAS:
1398 value = value.replace(",", " ")
1399 return value.split()
1400 else:
1401 return None
1403 def canHaveReferences(self):
1404 return self.x3dNode.getAttributeNode('DEF')
1406 def desc(self):
1407 return self.getRealNode().x3dNode.toxml()
1410 def x3d_parse(path):
1412 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1413 Return root (x3dNode, '') or (None, 'Error String')
1415 import xml.dom.minidom
1416 import xml.sax
1417 from xml.sax import handler
1420 try: doc = xml.dom.minidom.parse(path)
1421 except: return None, 'Could not parse this X3D file, XML error'
1424 # Could add a try/except here, but a console error is more useful.
1425 data = gzipOpen(path)
1427 if data is None:
1428 return None, 'Failed to open file: ' + path
1430 # Enable line number reporting in the parser - kinda brittle
1431 def set_content_handler(dom_handler):
1432 def startElementNS(name, tagName, attrs):
1433 orig_start_cb(name, tagName, attrs)
1434 cur_elem = dom_handler.elementStack[-1]
1435 cur_elem.parse_position = (parser._parser.CurrentLineNumber, parser._parser.CurrentColumnNumber)
1437 orig_start_cb = dom_handler.startElementNS
1438 dom_handler.startElementNS = startElementNS
1439 orig_set_content_handler(dom_handler)
1441 parser = xml.sax.make_parser()
1442 orig_set_content_handler = parser.setContentHandler
1443 parser.setFeature(handler.feature_external_ges, False)
1444 parser.setFeature(handler.feature_external_pes, False)
1445 parser.setContentHandler = set_content_handler
1447 doc = xml.dom.minidom.parseString(data, parser)
1449 try:
1450 x3dnode = doc.getElementsByTagName('X3D')[0]
1451 except:
1452 return None, 'Not a valid x3d document, cannot import'
1454 bpy.ops.object.select_all(action='DESELECT')
1456 root = x3dNode(None, NODE_NORMAL, x3dnode)
1457 root.setRoot(path) # so images and Inline's we load have a relative path
1458 root.parse()
1460 return root, ''
1462 ## f = open('/_Cylinder.wrl', 'r')
1463 # f = open('/fe/wrl/Vrml/EGS/TOUCHSN.WRL', 'r')
1464 # vrml_parse('/fe/wrl/Vrml/EGS/TOUCHSN.WRL')
1465 #vrml_parse('/fe/wrl/Vrml/EGS/SCRIPT.WRL')
1467 import os
1468 files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
1469 files.sort()
1470 tot = len(files)
1471 for i, f in enumerate(files):
1472 #if i < 801:
1473 # continue
1475 f = f.strip()
1476 print(f, i, tot)
1477 vrml_parse(f)
1480 # NO BLENDER CODE ABOVE THIS LINE.
1481 # -----------------------------------------------------------------------------------
1482 import bpy
1483 from bpy_extras import image_utils
1484 from mathutils import Vector, Matrix, Quaternion
1486 GLOBALS = {'CIRCLE_DETAIL': 16}
1489 def translateRotation(rot):
1490 """ axis, angle """
1491 return Matrix.Rotation(rot[3], 4, Vector(rot[:3]))
1494 def translateScale(sca):
1495 mat = Matrix() # 4x4 default
1496 mat[0][0] = sca[0]
1497 mat[1][1] = sca[1]
1498 mat[2][2] = sca[2]
1499 return mat
1502 def translateTransform(node, ancestry):
1503 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0, 0.0)
1504 rot = node.getFieldAsFloatTuple('rotation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1505 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0, 1.0)
1506 scaori = node.getFieldAsFloatTuple('scaleOrientation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1507 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0, 0.0)
1509 if cent:
1510 cent_mat = Matrix.Translation(cent)
1511 cent_imat = cent_mat.inverted()
1512 else:
1513 cent_mat = cent_imat = None
1515 if rot:
1516 rot_mat = translateRotation(rot)
1517 else:
1518 rot_mat = None
1520 if sca:
1521 sca_mat = translateScale(sca)
1522 else:
1523 sca_mat = None
1525 if scaori:
1526 scaori_mat = translateRotation(scaori)
1527 scaori_imat = scaori_mat.inverted()
1528 else:
1529 scaori_mat = scaori_imat = None
1531 if tx:
1532 tx_mat = Matrix.Translation(tx)
1533 else:
1534 tx_mat = None
1536 new_mat = Matrix()
1538 mats = [tx_mat, cent_mat, rot_mat, scaori_mat, sca_mat, scaori_imat, cent_imat]
1539 for mtx in mats:
1540 if mtx:
1541 new_mat = new_mat @ mtx
1543 return new_mat
1546 def translateTexTransform(node, ancestry):
1547 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0)
1548 rot = node.getFieldAsFloat('rotation', None, ancestry) # 0.0
1549 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0)
1550 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0)
1552 if cent:
1553 # cent is at a corner by default
1554 cent_mat = Matrix.Translation(Vector(cent).to_3d())
1555 cent_imat = cent_mat.inverted()
1556 else:
1557 cent_mat = cent_imat = None
1559 if rot:
1560 rot_mat = Matrix.Rotation(rot, 4, 'Z') # translateRotation(rot)
1561 else:
1562 rot_mat = None
1564 if sca:
1565 sca_mat = translateScale((sca[0], sca[1], 0.0))
1566 else:
1567 sca_mat = None
1569 if tx:
1570 tx_mat = Matrix.Translation(Vector(tx).to_3d())
1571 else:
1572 tx_mat = None
1574 new_mat = Matrix()
1576 # as specified in VRML97 docs
1577 mats = [cent_imat, sca_mat, rot_mat, cent_mat, tx_mat]
1579 for mtx in mats:
1580 if mtx:
1581 new_mat = new_mat @ mtx
1583 return new_mat
1585 def getFinalMatrix(node, mtx, ancestry, global_matrix):
1587 transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform']
1588 if node.getSpec() == 'Transform':
1589 transform_nodes.append(node)
1590 transform_nodes.reverse()
1592 if mtx is None:
1593 mtx = Matrix()
1595 for node_tx in transform_nodes:
1596 mat = translateTransform(node_tx, ancestry)
1597 mtx = mat @ mtx
1599 # worldspace matrix
1600 mtx = global_matrix @ mtx
1602 return mtx
1605 # -----------------------------------------------------------------------------------
1606 # Mesh import utilities
1608 # Assumes that the mesh has polygons.
1609 def importMesh_ApplyColors(bpymesh, geom, ancestry):
1610 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1611 if colors:
1612 if colors.getSpec() == 'ColorRGBA':
1613 rgb = colors.getFieldAsArray('color', 4, ancestry)
1614 else:
1615 # Array of arrays; no need to flatten
1616 rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
1617 lcol_layer = bpymesh.vertex_colors.new()
1619 if len(rgb) == len(bpymesh.vertices):
1620 rgb = [rgb[l.vertex_index] for l in bpymesh.loops]
1621 rgb = tuple(chain(*rgb))
1622 elif len(rgb) == len(bpymesh.loops):
1623 rgb = tuple(chain(*rgb))
1624 else:
1625 print(
1626 "WARNING not applying vertex colors, non matching numbers of vertices or loops (%d vs %d/%d)" %
1627 (len(rgb), len(bpymesh.vertices), len(bpymesh.loops))
1629 return
1631 lcol_layer.data.foreach_set("color", rgb)
1634 # Assumes that the vertices have not been rearranged compared to the
1635 # source file order # or in the order assumed by the spec (e. g. in
1636 # Elevation, in rows by x).
1637 # Assumes polygons have been set.
1638 def importMesh_ApplyNormals(bpymesh, geom, ancestry):
1639 normals = geom.getChildBySpec('Normal')
1640 if not normals:
1641 return
1643 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1644 vectors = normals.getFieldAsArray('vector', 0, ancestry)
1645 if per_vertex:
1646 bpymesh.vertices.foreach_set("normal", vectors)
1647 else:
1648 bpymesh.polygons.foreach_set("normal", vectors)
1651 # Reads the standard Coordinate object - common for all mesh elements
1652 # Feeds the vertices in the mesh.
1653 # Rearranging the vertex order is a bad idea - other elements
1654 # in X3D might rely on it, if you need to rearrange, please play with
1655 # vertex indices in the polygons instead.
1657 # Vertex culling that we have in IndexedFaceSet is an unfortunate exception,
1658 # brought forth by a very specific issue.
1659 def importMesh_ReadVertices(bpymesh, geom, ancestry):
1660 # We want points here as a flat array, but the caching logic in
1661 # IndexedFaceSet presumes a 2D one.
1662 # The case for caching is stronger over there.
1663 coord = geom.getChildBySpec('Coordinate')
1664 points = coord.getFieldAsArray('point', 0, ancestry)
1665 bpymesh.vertices.add(len(points) // 3)
1666 bpymesh.vertices.foreach_set("co", points)
1669 # Assumes that the order of vertices matches the source file.
1670 # Relies upon texture coordinates in the X3D node; if a coordinate generation
1671 # algorithm for a geometry is in the spec (e. g. for ElevationGrid), it needs
1672 # to be implemented by the geometry handler.
1674 # Texture transform is applied in ProcessObject.
1675 def importMesh_ApplyUVs(bpymesh, geom, ancestry):
1676 tex_coord = geom.getChildBySpec('TextureCoordinate')
1677 if not tex_coord:
1678 return
1680 uvs = tex_coord.getFieldAsArray('point', 2, ancestry)
1681 if not uvs:
1682 return
1684 d = bpymesh.uv_layers.new().data
1685 uvs = [i for poly in bpymesh.polygons
1686 for vidx in poly.vertices
1687 for i in uvs[vidx]]
1688 d.foreach_set('uv', uvs)
1691 # Common steps for all triangle meshes once the geometry has been set:
1692 # normals, vertex colors, and UVs.
1693 def importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry):
1694 importMesh_ApplyNormals(bpymesh, geom, ancestry)
1695 importMesh_ApplyColors(bpymesh, geom, ancestry)
1696 importMesh_ApplyUVs(bpymesh, geom, ancestry)
1697 bpymesh.validate()
1698 bpymesh.update()
1699 return bpymesh
1702 # Assumes that the mesh is stored as polygons and loops, and the premade array
1703 # of texture coordinates follows the loop array.
1704 # The loops array must be flat.
1705 def importMesh_ApplyTextureToLoops(bpymesh, loops):
1706 d = bpymesh.uv_layers.new().data
1707 d.foreach_set('uv', loops)
1710 def flip(r, ccw):
1711 return r if ccw else r[::-1]
1713 # -----------------------------------------------------------------------------------
1714 # Now specific geometry importers
1717 def importMesh_IndexedTriangleSet(geom, ancestry):
1718 # Ignoring solid
1719 # colorPerVertex is always true
1720 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1722 bpymesh = bpy.data.meshes.new(name="XXX")
1723 importMesh_ReadVertices(bpymesh, geom, ancestry)
1725 # Read the faces
1726 index = geom.getFieldAsArray('index', 0, ancestry)
1727 num_polys = len(index) // 3
1728 if not ccw:
1729 index = [index[3 * i + j] for i in range(num_polys) for j in (1, 0, 2)]
1731 bpymesh.loops.add(num_polys * 3)
1732 bpymesh.polygons.add(num_polys)
1733 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1734 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1735 bpymesh.polygons.foreach_set("vertices", index)
1737 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1740 def importMesh_IndexedTriangleStripSet(geom, ancestry):
1741 # Ignoring solid
1742 # colorPerVertex is always true
1743 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1744 bpymesh = bpy.data.meshes.new(name="IndexedTriangleStripSet")
1745 importMesh_ReadVertices(bpymesh, geom, ancestry)
1747 # Read the faces
1748 index = geom.getFieldAsArray('index', 0, ancestry)
1749 while index[-1] == -1:
1750 del index[-1]
1751 ngaps = sum(1 for i in index if i == -1)
1752 num_polys = len(index) - 2 - 3 * ngaps
1753 bpymesh.loops.add(num_polys * 3)
1754 bpymesh.polygons.add(num_polys)
1755 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1756 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1758 def triangles():
1759 i = 0
1760 odd = cw
1761 while True:
1762 yield index[i + odd]
1763 yield index[i + 1 - odd]
1764 yield index[i + 2]
1765 odd = 1 - odd
1766 i += 1
1767 if i + 2 >= len(index):
1768 return
1769 if index[i + 2] == -1:
1770 i += 3
1771 odd = cw
1772 bpymesh.polygons.foreach_set("vertices", [f for f in triangles()])
1773 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1776 def importMesh_IndexedTriangleFanSet(geom, ancestry):
1777 # Ignoring solid
1778 # colorPerVertex is always true
1779 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1780 bpymesh = bpy.data.meshes.new(name="IndexedTriangleFanSet")
1781 importMesh_ReadVertices(bpymesh, geom, ancestry)
1783 # Read the faces
1784 index = geom.getFieldAsArray('index', 0, ancestry)
1785 while index[-1] == -1:
1786 del index[-1]
1787 ngaps = sum(1 for i in index if i == -1)
1788 num_polys = len(index) - 2 - 3 * ngaps
1789 bpymesh.loops.add(num_polys * 3)
1790 bpymesh.polygons.add(num_polys)
1791 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1792 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1794 def triangles():
1795 i = 0
1796 j = 1
1797 while True:
1798 yield index[i]
1799 yield index[i + j + cw]
1800 yield index[i + j + 1 - cw]
1801 j += 1
1802 if i + j + 1 >= len(index):
1803 return
1804 if index[i + j + 1] == -1:
1805 i = j + 2
1806 j = 1
1807 bpymesh.polygons.foreach_set("vertices", [f for f in triangles()])
1808 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1811 def importMesh_TriangleSet(geom, ancestry):
1812 # Ignoring solid
1813 # colorPerVertex is always true
1814 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1815 bpymesh = bpy.data.meshes.new(name="TriangleSet")
1816 importMesh_ReadVertices(bpymesh, geom, ancestry)
1817 n = len(bpymesh.vertices)
1818 num_polys = n // 3
1819 bpymesh.loops.add(num_polys * 3)
1820 bpymesh.polygons.add(num_polys)
1821 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1822 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1824 if ccw:
1825 fv = [i for i in range(n)]
1826 else:
1827 fv = [3 * i + j for i in range(n // 3) for j in (1, 0, 2)]
1828 bpymesh.polygons.foreach_set("vertices", fv)
1830 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1833 def importMesh_TriangleStripSet(geom, ancestry):
1834 # Ignoring solid
1835 # colorPerVertex is always true
1836 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1837 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1838 importMesh_ReadVertices(bpymesh, geom, ancestry)
1839 counts = geom.getFieldAsArray('stripCount', 0, ancestry)
1840 num_polys = sum([n - 2 for n in counts])
1841 bpymesh.loops.add(num_polys * 3)
1842 bpymesh.polygons.add(num_polys)
1843 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1844 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1846 def triangles():
1847 b = 0
1848 for i in range(0, len(counts)):
1849 for j in range(0, counts[i] - 2):
1850 yield b + j + (j + cw) % 2
1851 yield b + j + 1 - (j + cw) % 2
1852 yield b + j + 2
1853 b += counts[i]
1854 bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
1856 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1859 def importMesh_TriangleFanSet(geom, ancestry):
1860 # Ignoring solid
1861 # colorPerVertex is always true
1862 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1863 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1864 importMesh_ReadVertices(bpymesh, geom, ancestry)
1865 counts = geom.getFieldAsArray('fanCount', 0, ancestry)
1866 num_polys = sum([n - 2 for n in counts])
1867 bpymesh.loops.add(num_polys * 3)
1868 bpymesh.polygons.add(num_polys)
1869 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1870 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1872 def triangles():
1873 b = 0
1874 for i in range(0, len(counts)):
1875 for j in range(1, counts[i] - 1):
1876 yield b
1877 yield b + j + cw
1878 yield b + j + 1 - cw
1879 b += counts[i]
1880 bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
1881 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1884 def importMesh_IndexedFaceSet(geom, ancestry):
1885 # Saw the following structure in X3Ds: the first mesh has a huge set
1886 # of vertices and a reasonably sized index. The rest of the meshes
1887 # reference the Coordinate node from the first one, and have their
1888 # own reasonably sized indices.
1890 # In Blender, to the best of my knowledge, there's no way to reuse
1891 # the vertex set between meshes. So we have culling logic instead -
1892 # for each mesh, only leave vertices that are used for faces.
1894 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1895 coord = geom.getChildBySpec('Coordinate')
1896 if coord.reference:
1897 points = coord.getRealNode().parsed
1898 # We need unflattened coord array here, while
1899 # importMesh_ReadVertices uses flattened. Can't cache both :(
1900 # TODO: resolve that somehow, so that vertex set can be effectively
1901 # reused between different mesh types?
1902 else:
1903 points = coord.getFieldAsArray('point', 3, ancestry)
1904 if coord.canHaveReferences():
1905 coord.parsed = points
1906 index = geom.getFieldAsArray('coordIndex', 0, ancestry)
1908 while index and index[-1] == -1:
1909 del index[-1]
1911 if len(points) >= 2 * len(index): # Need to cull
1912 culled_points = []
1913 cull = {} # Maps old vertex indices to new ones
1914 uncull = [] # Maps new indices to the old ones
1915 new_index = 0
1916 else:
1917 uncull = cull = None
1919 faces = []
1920 face = []
1921 # Generate faces. Cull the vertices if necessary,
1922 for i in index:
1923 if i == -1:
1924 if face:
1925 faces.append(flip(face, ccw))
1926 face = []
1927 else:
1928 if cull is not None:
1929 if not(i in cull):
1930 culled_points.append(points[i])
1931 cull[i] = new_index
1932 uncull.append(i)
1933 i = new_index
1934 new_index += 1
1935 else:
1936 i = cull[i]
1937 face.append(i)
1938 if face:
1939 faces.append(flip(face, ccw)) # The last face
1941 if cull:
1942 points = culled_points
1944 bpymesh = bpy.data.meshes.new(name="IndexedFaceSet")
1945 bpymesh.from_pydata(points, [], faces)
1946 # No validation here. It throws off the per-face stuff.
1948 # Similar treatment for normal and color indices
1950 def processPerVertexIndex(ind):
1951 if ind:
1952 # Deflatten into an array of arrays by face; the latter might
1953 # need to be flipped
1954 i = 0
1955 verts_by_face = []
1956 for f in faces:
1957 verts_by_face.append(flip(ind[i:i + len(f)], ccw))
1958 i += len(f) + 1
1959 return verts_by_face
1960 elif uncull:
1961 return [[uncull[v] for v in f] for f in faces]
1962 else:
1963 return faces # Reuse coordIndex, as per the spec
1965 # Normals
1966 normals = geom.getChildBySpec('Normal')
1967 if normals:
1968 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1969 vectors = normals.getFieldAsArray('vector', 3, ancestry)
1970 normal_index = geom.getFieldAsArray('normalIndex', 0, ancestry)
1971 if per_vertex:
1972 co = [co for f in processPerVertexIndex(normal_index)
1973 for v in f
1974 for co in vectors[v]]
1975 bpymesh.vertices.foreach_set("normal", co)
1976 else:
1977 co = [co for (i, f) in enumerate(faces)
1978 for j in f
1979 for co in vectors[normal_index[i] if normal_index else i]]
1980 bpymesh.polygons.foreach_set("normal", co)
1982 # Apply vertex/face colors
1983 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1984 if colors:
1985 if colors.getSpec() == 'ColorRGBA':
1986 rgb = colors.getFieldAsArray('color', 4, ancestry)
1987 else:
1988 # Array of arrays; no need to flatten
1989 rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
1991 color_per_vertex = geom.getFieldAsBool('colorPerVertex', True, ancestry)
1992 color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
1994 d = bpymesh.vertex_colors.new().data
1995 if color_per_vertex:
1996 cco = [cco for f in processPerVertexIndex(color_index)
1997 for v in f
1998 for cco in rgb[v]]
1999 elif color_index: # Color per face with index
2000 cco = [cco for (i, f) in enumerate(faces)
2001 for j in f
2002 for cco in rgb[color_index[i]]]
2003 else: # Color per face without index
2004 cco = [cco for (i, f) in enumerate(faces)
2005 for j in f
2006 for cco in rgb[i]]
2007 d.foreach_set('color', cco)
2009 # Texture coordinates (UVs)
2010 tex_coord = geom.getChildBySpec('TextureCoordinate')
2011 if tex_coord:
2012 tex_coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
2013 tex_index = geom.getFieldAsArray('texCoordIndex', 0, ancestry)
2014 tex_index = processPerVertexIndex(tex_index)
2015 loops = [co for f in tex_index
2016 for v in f
2017 for co in tex_coord_points[v]]
2018 else:
2019 x_min = x_max = y_min = y_max = z_min = z_max = None
2020 for f in faces:
2021 # Unused vertices don't participate in size; X3DOM does so
2022 for v in f:
2023 (x, y, z) = points[v]
2024 if x_min is None or x < x_min:
2025 x_min = x
2026 if x_max is None or x > x_max:
2027 x_max = x
2028 if y_min is None or y < y_min:
2029 y_min = y
2030 if y_max is None or y > y_max:
2031 y_max = y
2032 if z_min is None or z < z_min:
2033 z_min = z
2034 if z_max is None or z > z_max:
2035 z_max = z
2037 mins = (x_min, y_min, z_min)
2038 deltas = (x_max - x_min, y_max - y_min, z_max - z_min)
2039 axes = [0, 1, 2]
2040 axes.sort(key=lambda a: (-deltas[a], a))
2041 # Tuple comparison breaks ties
2042 (s_axis, t_axis) = axes[0:2]
2043 s_min = mins[s_axis]
2044 ds = deltas[s_axis]
2045 t_min = mins[t_axis]
2046 dt = deltas[t_axis]
2048 # Avoid divide by zero T76303.
2049 if not (ds > 0.0):
2050 ds = 1.0
2051 if not (dt > 0.0):
2052 dt = 1.0
2054 def generatePointCoords(pt):
2055 return (pt[s_axis] - s_min) / ds, (pt[t_axis] - t_min) / dt
2056 loops = [co for f in faces
2057 for v in f
2058 for co in generatePointCoords(points[v])]
2060 importMesh_ApplyTextureToLoops(bpymesh, loops)
2062 bpymesh.validate()
2063 bpymesh.update()
2064 return bpymesh
2067 def importMesh_ElevationGrid(geom, ancestry):
2068 height = geom.getFieldAsArray('height', 0, ancestry)
2069 x_dim = geom.getFieldAsInt('xDimension', 0, ancestry)
2070 x_spacing = geom.getFieldAsFloat('xSpacing', 1, ancestry)
2071 z_dim = geom.getFieldAsInt('zDimension', 0, ancestry)
2072 z_spacing = geom.getFieldAsFloat('zSpacing', 1, ancestry)
2073 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2075 # The spec assumes a certain ordering of quads; outer loop by z, inner by x
2076 bpymesh = bpy.data.meshes.new(name="ElevationGrid")
2077 bpymesh.vertices.add(x_dim * z_dim)
2078 co = [w for x in range(x_dim) for z in range(z_dim)
2079 for w in (x * x_spacing, height[x_dim * z + x], z * z_spacing)]
2080 bpymesh.vertices.foreach_set("co", co)
2082 num_polys = (x_dim - 1) * (z_dim - 1)
2083 bpymesh.loops.add(num_polys * 4)
2084 bpymesh.polygons.add(num_polys)
2085 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 4, 4))
2086 bpymesh.polygons.foreach_set("loop_total", (4,) * num_polys)
2087 # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
2088 # For quad tessfaces, it was important that the final vertex index was not 0
2089 # (Blender treated it as a triangle then).
2090 # So simply reversing the face was not an option.
2091 # With bmesh polygons, this has no importance anymore, but keep existing code for now.
2092 verts = [i for x in range(x_dim - 1) for z in range(z_dim - 1)
2093 for i in (z * x_dim + x,
2094 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2095 (z + 1) * x_dim + x + 1,
2096 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)]
2097 bpymesh.polygons.foreach_set("vertices", verts)
2099 importMesh_ApplyNormals(bpymesh, geom, ancestry)
2100 # ApplyColors won't work here; faces are quads, and also per-face
2101 # coloring should be supported
2102 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
2103 if colors:
2104 if colors.getSpec() == 'ColorRGBA':
2105 rgb = [c[:3] for c
2106 in colors.getFieldAsArray('color', 4, ancestry)]
2107 # Array of arrays; no need to flatten
2108 else:
2109 rgb = colors.getFieldAsArray('color', 3, ancestry)
2111 tc = bpymesh.vertex_colors.new().data
2112 if geom.getFieldAsBool('colorPerVertex', True, ancestry):
2113 # Per-vertex coloring
2114 # Note the 2/4 flip here
2115 tc.foreach_set("color",
2116 [c for x in range(x_dim - 1)
2117 for z in range(z_dim - 1)
2118 for rgb_idx in (z * x_dim + x,
2119 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2120 (z + 1) * x_dim + x + 1,
2121 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)
2122 for c in rgb[rgb_idx]])
2123 else: # Coloring per face
2124 tc.foreach_set("color",
2125 [c for x in range(x_dim - 1)
2126 for z in range(z_dim - 1)
2127 for rgb_idx in (z * (x_dim - 1) + x,) * 4
2128 for c in rgb[rgb_idx]])
2130 # Textures also need special treatment; it's all quads,
2131 # and there's a builtin algorithm for coordinate generation
2132 tex_coord = geom.getChildBySpec('TextureCoordinate')
2133 if tex_coord:
2134 uvs = tex_coord.getFieldAsArray('point', 2, ancestry)
2135 else:
2136 uvs = [(i / (x_dim - 1), j / (z_dim - 1))
2137 for i in range(x_dim)
2138 for j in range(z_dim)]
2140 d = bpymesh.uv_layers.new().data
2141 # Rather than repeat the face/vertex algorithm from above, we read
2142 # the vertex index back from polygon. Might be suboptimal.
2143 uvs = [i for poly in bpymesh.polygons
2144 for vidx in poly.vertices
2145 for i in uvs[vidx]]
2146 d.foreach_set('uv', uv)
2148 bpymesh.validate()
2149 bpymesh.update()
2150 return bpymesh
2153 def importMesh_Extrusion(geom, ancestry):
2154 # Interestingly, the spec doesn't allow for vertex/face colors in this
2155 # element, nor for normals.
2156 # Since coloring and normals are not supported here, and also large
2157 # polygons for caps might be required, we shall use from_pydata().
2159 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2160 begin_cap = geom.getFieldAsBool('beginCap', True, ancestry)
2161 end_cap = geom.getFieldAsBool('endCap', True, ancestry)
2162 cross = geom.getFieldAsArray('crossSection', 2, ancestry)
2163 if not cross:
2164 cross = ((1, 1), (1, -1), (-1, -1), (-1, 1), (1, 1))
2165 spine = geom.getFieldAsArray('spine', 3, ancestry)
2166 if not spine:
2167 spine = ((0, 0, 0), (0, 1, 0))
2168 orient = geom.getFieldAsArray('orientation', 4, ancestry)
2169 if orient:
2170 orient = [Quaternion(o[:3], o[3]).to_matrix()
2171 if o[3] else None for o in orient]
2172 scale = geom.getFieldAsArray('scale', 2, ancestry)
2173 if scale:
2174 scale = [Matrix(((s[0], 0, 0), (0, 1, 0), (0, 0, s[1])))
2175 if s[0] != 1 or s[1] != 1 else None for s in scale]
2177 # Special treatment for the closed spine and cross section.
2178 # Let's save some memory by not creating identical but distinct vertices;
2179 # later we'll introduce conditional logic to link the last vertex with
2180 # the first one where necessary.
2181 cross_closed = cross[0] == cross[-1]
2182 if cross_closed:
2183 cross = cross[:-1]
2184 nc = len(cross)
2185 cross = [Vector((c[0], 0, c[1])) for c in cross]
2186 ncf = nc if cross_closed else nc - 1
2187 # Face count along the cross; for closed cross, it's the same as the
2188 # respective vertex count
2190 spine_closed = spine[0] == spine[-1]
2191 if spine_closed:
2192 spine = spine[:-1]
2193 ns = len(spine)
2194 spine = [Vector(s) for s in spine]
2195 nsf = ns if spine_closed else ns - 1
2197 # This will be used for fallback, where the current spine point joins
2198 # two collinear spine segments. No need to recheck the case of the
2199 # closed spine/last-to-first point juncture; if there's an angle there,
2200 # it would kick in on the first iteration of the main loop by spine.
2201 def findFirstAngleNormal():
2202 for i in range(1, ns - 1):
2203 spt = spine[i]
2204 z = (spine[i + 1] - spt).cross(spine[i - 1] - spt)
2205 if z.length > EPSILON:
2206 return z
2207 # All the spines are collinear. Fallback to the rotated source
2208 # XZ plane.
2209 # TODO: handle the situation where the first two spine points match
2210 v = spine[1] - spine[0]
2211 orig_y = Vector((0, 1, 0))
2212 orig_z = Vector((0, 0, 1))
2213 if v.cross(orig_y).length >= EPSILON:
2214 # Spine at angle with global y - rotate the z accordingly
2215 orig_z.rotate(orig_y.rotation_difference(v))
2216 return orig_z
2218 verts = []
2219 z = None
2220 for i, spt in enumerate(spine):
2221 if (i > 0 and i < ns - 1) or spine_closed:
2222 snext = spine[(i + 1) % ns]
2223 sprev = spine[(i - 1 + ns) % ns]
2224 y = snext - sprev
2225 vnext = snext - spt
2226 vprev = sprev - spt
2227 try_z = vnext.cross(vprev)
2228 # Might be zero, then all kinds of fallback
2229 if try_z.length > EPSILON:
2230 if z is not None and try_z.dot(z) < 0:
2231 try_z.negate()
2232 z = try_z
2233 elif not z: # No z, and no previous z.
2234 # Look ahead, see if there's at least one point where
2235 # spines are not collinear.
2236 z = findFirstAngleNormal()
2237 elif i == 0: # And non-crossed
2238 snext = spine[i + 1]
2239 y = snext - spt
2240 z = findFirstAngleNormal()
2241 else: # last point and not crossed
2242 sprev = spine[i - 1]
2243 y = spt - sprev
2244 # If there's more than one point in the spine, z is already set.
2245 # One point in the spline is an error anyway.
2247 x = y.cross(z)
2248 m = Matrix(((x.x, y.x, z.x), (x.y, y.y, z.y), (x.z, y.z, z.z)))
2249 # Columns are the unit vectors for the xz plane for the cross-section
2250 m.normalize()
2251 if orient:
2252 mrot = orient[i] if len(orient) > 1 else orient[0]
2253 if mrot:
2254 m @= mrot # Not sure about this. Counterexample???
2255 if scale:
2256 mscale = scale[i] if len(scale) > 1 else scale[0]
2257 if mscale:
2258 m @= mscale
2259 # First the cross-section 2-vector is scaled,
2260 # then applied to the xz plane unit vectors
2261 for cpt in cross:
2262 verts.append((spt + m @ cpt).to_tuple())
2263 # Could've done this with a single 4x4 matrix... Oh well
2265 # The method from_pydata() treats correctly quads with final vertex
2266 # index being zero.
2267 # So we just flip the vertices if ccw is off.
2269 faces = []
2270 if begin_cap:
2271 faces.append(flip([x for x in range(nc - 1, -1, -1)], ccw))
2273 # Order of edges in the face: forward along cross, forward along spine,
2274 # backward along cross, backward along spine, flipped if now ccw.
2275 # This order is assumed later in the texture coordinate assignment;
2276 # please don't change without syncing.
2278 faces += [flip((
2279 s * nc + c,
2280 s * nc + (c + 1) % nc,
2281 (s + 1) * nc + (c + 1) % nc,
2282 (s + 1) * nc + c), ccw) for s in range(ns - 1) for c in range(ncf)]
2284 if spine_closed:
2285 # The faces between the last and the first spine points
2286 b = (ns - 1) * nc
2287 faces += [flip((
2288 b + c,
2289 b + (c + 1) % nc,
2290 (c + 1) % nc,
2291 c), ccw) for c in range(ncf)]
2293 if end_cap:
2294 faces.append(flip([(ns - 1) * nc + x for x in range(0, nc)], ccw))
2296 bpymesh = bpy.data.meshes.new(name="Extrusion")
2297 bpymesh.from_pydata(verts, [], faces)
2299 # The way we deal with textures in triangular meshes doesn't apply.
2300 # The structure of the loop array goes: cap, side, cap
2301 if begin_cap or end_cap: # Need dimensions
2302 x_min = x_max = z_min = z_max = None
2303 for c in cross:
2304 (x, z) = (c.x, c.z)
2305 if x_min is None or x < x_min:
2306 x_min = x
2307 if x_max is None or x > x_max:
2308 x_max = x
2309 if z_min is None or z < z_min:
2310 z_min = z
2311 if z_max is None or z > z_max:
2312 z_max = z
2313 dx = x_max - x_min
2314 dz = z_max - z_min
2315 cap_scale = dz if dz > dx else dx
2317 # Takes an index in the cross array, returns scaled
2318 # texture coords for cap texturing purposes
2319 def scaledLoopVertex(i):
2320 c = cross[i]
2321 return (c.x - x_min) / cap_scale, (c.z - z_min) / cap_scale
2323 # X3DOM uses raw cap shape, not a scaled one. So we will, too.
2325 loops = []
2326 mloops = bpymesh.loops
2327 if begin_cap: # vertex indices match the indices in cross
2328 # Rely on the loops in the mesh; don't repeat the face
2329 # generation logic here
2330 loops += [co for i in range(nc)
2331 for co in scaledLoopVertex(mloops[i].vertex_index)]
2333 # Sides
2334 # Same order of vertices as in face generation
2335 # We don't rely on the loops in the mesh; instead,
2336 # we repeat the face generation logic.
2337 loops += [co for s in range(nsf)
2338 for c in range(ncf)
2339 for v in flip(((c / ncf, s / nsf),
2340 ((c + 1) / ncf, s / nsf),
2341 ((c + 1) / ncf, (s + 1) / nsf),
2342 (c / ncf, (s + 1) / nsf)), ccw) for co in v]
2344 if end_cap:
2345 # Base loop index for end cap
2346 lb = ncf * nsf * 4 + (nc if begin_cap else 0)
2347 # Rely on the loops here too.
2348 loops += [co for i in range(nc) for co
2349 in scaledLoopVertex(mloops[lb + i].vertex_index % nc)]
2350 importMesh_ApplyTextureToLoops(bpymesh, loops)
2352 bpymesh.validate()
2353 bpymesh.update()
2354 return bpymesh
2357 # -----------------------------------------------------------------------------------
2358 # Line and point sets
2361 def importMesh_LineSet(geom, ancestry):
2362 # TODO: line display properties are ignored
2363 # Per-vertex color is ignored
2364 coord = geom.getChildBySpec('Coordinate')
2365 src_points = coord.getFieldAsArray('point', 3, ancestry)
2366 # Array of 3; Blender needs arrays of 4
2367 bpycurve = bpy.data.curves.new("LineSet", 'CURVE')
2368 bpycurve.dimensions = '3D'
2369 counts = geom.getFieldAsArray('vertexCount', 0, ancestry)
2370 b = 0
2371 for n in counts:
2372 sp = bpycurve.splines.new('POLY')
2373 sp.points.add(n - 1) # points already has one element
2375 def points():
2376 for x in src_points[b:b + n]:
2377 yield x[0]
2378 yield x[1]
2379 yield x[2]
2380 yield 0
2381 sp.points.foreach_set('co', [x for x in points()])
2382 b += n
2383 return bpycurve
2386 def importMesh_IndexedLineSet(geom, ancestry):
2387 # VRML not x3d
2388 # coord = geom.getChildByName('coord') # 'Coordinate'
2389 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2390 if coord:
2391 points = coord.getFieldAsArray('point', 3, ancestry)
2392 else:
2393 points = []
2395 if not points:
2396 print('\tWarning: IndexedLineSet had no points')
2397 return None
2399 ils_lines = geom.getFieldAsArray('coordIndex', 0, ancestry)
2401 lines = []
2402 line = []
2404 for il in ils_lines:
2405 if il == -1:
2406 lines.append(line)
2407 line = []
2408 else:
2409 line.append(int(il))
2410 lines.append(line)
2412 # vcolor = geom.getChildByName('color')
2413 # blender doesn't have per vertex color
2415 bpycurve = bpy.data.curves.new('IndexedCurve', 'CURVE')
2416 bpycurve.dimensions = '3D'
2418 for line in lines:
2419 if not line:
2420 continue
2421 # co = points[line[0]] # UNUSED
2422 nu = bpycurve.splines.new('POLY')
2423 nu.points.add(len(line) - 1) # the new nu has 1 point to begin with
2424 for il, pt in zip(line, nu.points):
2425 pt.co[0:3] = points[il]
2427 return bpycurve
2430 def importMesh_PointSet(geom, ancestry):
2431 # VRML not x3d
2432 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2433 if coord:
2434 points = coord.getFieldAsArray('point', 3, ancestry)
2435 else:
2436 points = []
2438 # vcolor = geom.getChildByName('color')
2439 # blender doesn't have per vertex color
2441 bpymesh = bpy.data.meshes.new("PointSet")
2442 bpymesh.vertices.add(len(points))
2443 bpymesh.vertices.foreach_set("co", [a for v in points for a in v])
2445 # No need to validate
2446 bpymesh.update()
2447 return bpymesh
2450 # -----------------------------------------------------------------------------------
2451 # Primitives
2452 # SA: they used to use bpy.ops for primitive creation. That was
2453 # unbelievably slow on complex scenes. I rewrote to generate meshes
2454 # by hand.
2457 GLOBALS['CIRCLE_DETAIL'] = 12
2460 def importMesh_Sphere(geom, ancestry):
2461 # solid is ignored.
2462 # Extra field 'subdivision="n m"' attribute, specifying how many
2463 # rings and segments to use (X3DOM).
2464 r = geom.getFieldAsFloat('radius', 0.5, ancestry)
2465 subdiv = geom.getFieldAsArray('subdivision', 0, ancestry)
2466 if subdiv:
2467 if len(subdiv) == 1:
2468 nr = ns = subdiv[0]
2469 else:
2470 (nr, ns) = subdiv
2471 else:
2472 nr = ns = GLOBALS['CIRCLE_DETAIL']
2473 # used as both ring count and segment count
2474 lau = pi / nr # Unit angle of latitude (rings) for the given tessellation
2475 lou = 2 * pi / ns # Unit angle of longitude (segments)
2477 bpymesh = bpy.data.meshes.new(name="Sphere")
2479 bpymesh.vertices.add(ns * (nr - 1) + 2)
2480 # The non-polar vertices go from x=0, negative z plane counterclockwise -
2481 # to -x, to +z, to +x, back to -z
2482 co = [0, r, 0, 0, -r, 0] # +y and -y poles
2483 co += [r * coe for ring in range(1, nr) for seg in range(ns)
2484 for coe in (-sin(lou * seg) * sin(lau * ring),
2485 cos(lau * ring),
2486 -cos(lou * seg) * sin(lau * ring))]
2487 bpymesh.vertices.foreach_set('co', co)
2489 num_poly = ns * nr
2490 num_tri = ns * 2
2491 num_quad = num_poly - num_tri
2492 num_loop = num_quad * 4 + num_tri * 3
2493 tf = bpymesh.polygons
2494 tf.add(num_poly)
2495 bpymesh.loops.add(num_loop)
2496 bpymesh.polygons.foreach_set("loop_start",
2497 tuple(range(0, ns * 3, 3)) +
2498 tuple(range(ns * 3, num_loop - ns * 3, 4)) +
2499 tuple(range(num_loop - ns * 3, num_loop, 3)))
2500 bpymesh.polygons.foreach_set("loop_total", (3,) * ns + (4,) * num_quad + (3,) * ns)
2502 vb = 2 + (nr - 2) * ns # First vertex index for the bottom cap
2503 fb = (nr - 1) * ns # First face index for the bottom cap
2505 # Because of tricky structure, assign texture coordinates along with
2506 # face creation. Can't easily do foreach_set, 'cause caps are triangles and
2507 # sides are quads.
2509 tex = bpymesh.uv_layers.new().data
2511 # Faces go in order: top cap, sides, bottom cap.
2512 # Sides go by ring then by segment.
2514 # Caps
2515 # Top cap face vertices go in order: down right up
2516 # (starting from +y pole)
2517 # Bottom cap goes: up left down (starting from -y pole)
2518 for seg in range(ns):
2519 tf[seg].vertices = (0, seg + 2, (seg + 1) % ns + 2)
2520 tf[fb + seg].vertices = (1, vb + (seg + 1) % ns, vb + seg)
2521 for lidx, uv in zip(tf[seg].loop_indices,
2522 (((seg + 0.5) / ns, 1),
2523 (seg / ns, 1 - 1 / nr),
2524 ((seg + 1) / ns, 1 - 1 / nr))):
2525 tex[lidx].uv = uv
2526 for lidx, uv in zip(tf[fb + seg].loop_indices,
2527 (((seg + 0.5) / ns, 0),
2528 ((seg + 1) / ns, 1 / nr),
2529 (seg / ns, 1 / nr))):
2530 tex[lidx].uv = uv
2532 # Sides
2533 # Side face vertices go in order: down right up left
2534 for ring in range(nr - 2):
2535 tvb = 2 + ring * ns
2536 # First vertex index for the top edge of the ring
2537 bvb = tvb + ns
2538 # First vertex index for the bottom edge of the ring
2539 rfb = ns * (ring + 1)
2540 # First face index for the ring
2541 for seg in range(ns):
2542 nseg = (seg + 1) % ns
2543 tf[rfb + seg].vertices = (tvb + seg, bvb + seg, bvb + nseg, tvb + nseg)
2544 for lidx, uv in zip(tf[rfb + seg].loop_indices,
2545 ((seg / ns, 1 - (ring + 1) / nr),
2546 (seg / ns, 1 - (ring + 2) / nr),
2547 ((seg + 1) / ns, 1 - (ring + 2) / nr),
2548 ((seg + 1) / ns, 1 - (ring + 1) / nr))):
2549 tex[lidx].uv = uv
2551 bpymesh.validate()
2552 bpymesh.update()
2553 return bpymesh
2556 def importMesh_Cylinder(geom, ancestry):
2557 # solid is ignored
2558 # no ccw in this element
2559 # Extra parameter subdivision="n" - how many faces to use
2560 radius = geom.getFieldAsFloat('radius', 1.0, ancestry)
2561 height = geom.getFieldAsFloat('height', 2, ancestry)
2562 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2563 side = geom.getFieldAsBool('side', True, ancestry)
2564 top = geom.getFieldAsBool('top', True, ancestry)
2566 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2568 nn = n * 2
2569 yvalues = (height / 2, -height / 2)
2570 angle = 2 * pi / n
2572 # The seam is at x=0, z=-r, vertices go ccw -
2573 # to pos x, to neg z, to neg x, back to neg z
2574 verts = [(-radius * sin(angle * i), y, -radius * cos(angle * i))
2575 for i in range(n) for y in yvalues]
2576 faces = []
2577 if side:
2578 # Order of edges in side faces: up, left, down, right.
2579 # Texture coordinate logic depends on it.
2580 faces += [(i * 2 + 3, i * 2 + 2, i * 2, i * 2 + 1)
2581 for i in range(n - 1)] + [(1, 0, nn - 2, nn - 1)]
2582 if top:
2583 faces += [[x for x in range(0, nn, 2)]]
2584 if bottom:
2585 faces += [[x for x in range(nn - 1, -1, -2)]]
2587 bpymesh = bpy.data.meshes.new(name="Cylinder")
2588 bpymesh.from_pydata(verts, [], faces)
2589 # Tried constructing the mesh manually from polygons/loops/edges,
2590 # the difference in performance on Blender 2.74 (Win64) is negligible.
2592 bpymesh.validate()
2594 # The structure of the loop array goes: cap, side, cap.
2595 loops = []
2596 if side:
2597 loops += [co for i in range(n)
2598 for co in ((i + 1) / n, 0, (i + 1) / n, 1, i / n, 1, i / n, 0)]
2600 if top:
2601 loops += [0.5 + co / 2 for i in range(n)
2602 for co in (-sin(angle * i), cos(angle * i))]
2604 if bottom:
2605 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2606 for co in (sin(angle * i), cos(angle * i))]
2608 importMesh_ApplyTextureToLoops(bpymesh, loops)
2610 bpymesh.update()
2611 return bpymesh
2614 def importMesh_Cone(geom, ancestry):
2615 # Solid ignored
2616 # Extra parameter subdivision="n" - how many faces to use
2617 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2618 radius = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry)
2619 height = geom.getFieldAsFloat('height', 2, ancestry)
2620 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2621 side = geom.getFieldAsBool('side', True, ancestry)
2623 d = height / 2
2624 angle = 2 * pi / n
2626 verts = [(0, d, 0)]
2627 verts += [(-radius * sin(angle * i),
2629 -radius * cos(angle * i)) for i in range(n)]
2630 faces = []
2632 # Side face vertices go: up down right
2633 if side:
2634 faces += [(1 + (i + 1) % n, 0, 1 + i) for i in range(n)]
2635 if bottom:
2636 faces += [[i for i in range(n, 0, -1)]]
2638 bpymesh = bpy.data.meshes.new(name="Cone")
2639 bpymesh.from_pydata(verts, [], faces)
2641 bpymesh.validate()
2642 loops = []
2643 if side:
2644 loops += [co for i in range(n)
2645 for co in ((i + 1) / n, 0, (i + 0.5) / n, 1, i / n, 0)]
2646 if bottom:
2647 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2648 for co in (sin(angle * i), cos(angle * i))]
2649 importMesh_ApplyTextureToLoops(bpymesh, loops)
2651 bpymesh.update()
2652 return bpymesh
2655 def importMesh_Box(geom, ancestry):
2656 # Solid is ignored
2657 # No ccw in this element
2658 (dx, dy, dz) = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry)
2659 dx /= 2
2660 dy /= 2
2661 dz /= 2
2663 bpymesh = bpy.data.meshes.new(name="Box")
2664 bpymesh.vertices.add(8)
2666 # xz plane at +y, ccw
2667 co = (dx, dy, dz, -dx, dy, dz, -dx, dy, -dz, dx, dy, -dz,
2668 # xz plane at -y
2669 dx, -dy, dz, -dx, -dy, dz, -dx, -dy, -dz, dx, -dy, -dz)
2670 bpymesh.vertices.foreach_set('co', co)
2672 bpymesh.loops.add(6 * 4)
2673 bpymesh.polygons.add(6)
2674 bpymesh.polygons.foreach_set('loop_start', range(0, 6 * 4, 4))
2675 bpymesh.polygons.foreach_set('loop_total', (4,) * 6)
2676 bpymesh.polygons.foreach_set('vertices', (
2677 0, 1, 2, 3, # +y
2678 4, 0, 3, 7, # +x
2679 7, 3, 2, 6, # -z
2680 6, 2, 1, 5, # -x
2681 5, 1, 0, 4, # +z
2682 7, 6, 5, 4)) # -y
2684 bpymesh.validate()
2685 d = bpymesh.uv_layers.new().data
2686 d.foreach_set('uv', (
2687 1, 0, 0, 0, 0, 1, 1, 1,
2688 0, 0, 0, 1, 1, 1, 1, 0,
2689 0, 0, 0, 1, 1, 1, 1, 0,
2690 0, 0, 0, 1, 1, 1, 1, 0,
2691 0, 0, 0, 1, 1, 1, 1, 0,
2692 1, 0, 0, 0, 0, 1, 1, 1))
2694 bpymesh.update()
2695 return bpymesh
2697 # -----------------------------------------------------------------------------------
2698 # Utilities for importShape
2701 # Textures are processed elsewhere.
2702 def appearance_CreateMaterial(vrmlname, mat, ancestry, is_vcol):
2703 # Given an X3D material, creates a Blender material.
2704 # texture is applied later, in appearance_Create().
2705 # All values between 0.0 and 1.0, defaults from VRML docs.
2706 bpymat = bpy.data.materials.new(vrmlname)
2707 return # XXX For now...
2708 bpymat.ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
2709 diff_color = mat.getFieldAsFloatTuple('diffuseColor',
2710 [0.8, 0.8, 0.8],
2711 ancestry)
2712 bpymat.diffuse_color = diff_color
2714 # NOTE - blender doesn't support emmisive color
2715 # Store in mirror color and approximate with emit.
2716 emit = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry)
2717 bpymat.mirror_color = emit
2718 bpymat.emit = (emit[0] + emit[1] + emit[2]) / 3.0
2720 shininess = mat.getFieldAsFloat('shininess', 0.2, ancestry)
2721 bpymat.specular_hardness = int(1 + (510 * shininess))
2722 # 0-1 -> 1-511
2723 bpymat.specular_color = mat.getFieldAsFloatTuple('specularColor',
2724 [0.0, 0.0, 0.0], ancestry)
2725 bpymat.alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry)
2726 if bpymat.alpha < 0.999:
2727 bpymat.use_transparency = True
2728 if False and is_vcol:
2729 bpymat.use_vertex_color_paint = True
2730 return bpymat
2733 def appearance_CreateDefaultMaterial():
2734 # Just applies the X3D defaults. Used for shapes
2735 # without explicit material definition
2736 # (but possibly with a texture).
2738 bpymat = bpy.data.materials.new("Material")
2739 return # XXX For now...
2740 bpymat.ambient = 0.2
2741 bpymat.diffuse_color = [0.8, 0.8, 0.8]
2742 bpymat.mirror_color = (0, 0, 0)
2743 bpymat.emit = 0
2745 bpymat.specular_hardness = 103
2746 # 0-1 -> 1-511
2747 bpymat.specular_color = (0, 0, 0)
2748 bpymat.alpha = 1
2749 return bpymat
2752 def appearance_LoadImageTextureFile(ima_urls, node):
2753 bpyima = None
2754 for f in ima_urls:
2755 dirname = os.path.dirname(node.getFilename())
2756 bpyima = image_utils.load_image(f, dirname,
2757 place_holder=False,
2758 recursive=False,
2759 convert_callback=imageConvertCompat)
2760 if bpyima:
2761 break
2763 return bpyima
2766 def appearance_LoadImageTexture(imageTexture, ancestry, node):
2767 # TODO: cache loaded textures...
2768 ima_urls = imageTexture.getFieldAsString('url', None, ancestry)
2770 if ima_urls is None:
2771 try:
2772 ima_urls = imageTexture.getFieldAsStringArray('url', ancestry)
2773 # in some cases we get a list of images.
2774 except:
2775 ima_urls = None
2776 else:
2777 if '" "' in ima_urls:
2778 # '"foo" "bar"' --> ['foo', 'bar']
2779 ima_urls = [w.strip('"') for w in ima_urls.split('" "')]
2780 else:
2781 ima_urls = [ima_urls]
2782 # ima_urls is a list or None
2784 if ima_urls is None:
2785 print("\twarning, image with no URL, this is odd")
2786 return None
2787 else:
2788 bpyima = appearance_LoadImageTextureFile(ima_urls, node)
2790 if not bpyima:
2791 print("ImportX3D warning: unable to load texture", ima_urls)
2792 else:
2793 # KNOWN BUG; PNGs with a transparent color are not perceived
2794 # as transparent. Need alpha channel.
2796 if bpyima.depth not in {32, 128}:
2797 bpyima.alpha_mode = 'NONE'
2798 return bpyima
2801 def appearance_LoadTexture(tex_node, ancestry, node):
2802 # Both USE-based caching and desc-based caching
2803 # Works for bother ImageTextures and PixelTextures
2805 # USE-based caching
2806 if tex_node.reference:
2807 return tex_node.getRealNode().parsed
2809 # Desc-based caching. It might misfire on multifile models, where the
2810 # same desc means different things in different files.
2811 # TODO: move caches to file level.
2812 desc = tex_node.desc()
2813 if desc and desc in texture_cache:
2814 bpyima = texture_cache[desc]
2815 if tex_node.canHaveReferences():
2816 tex_node.parsed = bpyima
2817 return bpyima
2819 # No cached texture, load it.
2820 if tex_node.getSpec() == 'ImageTexture':
2821 bpyima = appearance_LoadImageTexture(tex_node, ancestry, node)
2822 else: # PixelTexture
2823 bpyima = appearance_LoadPixelTexture(tex_node, ancestry)
2825 if bpyima: # Loading can still fail
2826 repeat_s = tex_node.getFieldAsBool('repeatS', True, ancestry)
2827 bpyima.use_clamp_x = not repeat_s
2828 repeat_t = tex_node.getFieldAsBool('repeatT', True, ancestry)
2829 bpyima.use_clamp_y = not repeat_t
2831 # Update the desc-based cache
2832 if desc:
2833 texture_cache[desc] = bpyima
2835 # Update the USE-based cache
2836 if tex_node.canHaveReferences():
2837 tex_node.parsed = bpyima
2839 return bpyima
2842 def appearance_ExpandCachedMaterial(bpymat):
2843 if 0 and bpymat.texture_slots[0] is not None:
2844 bpyima = bpymat.texture_slots[0].texture.image
2845 tex_has_alpha = bpyima.alpha_mode not in {'NONE', 'CHANNEL_PACKED'}
2846 return (bpymat, bpyima, tex_has_alpha)
2848 return (bpymat, None, False)
2851 def appearance_MakeDescCacheKey(material, tex_node):
2852 mat_desc = material.desc() if material else "Default"
2853 tex_desc = tex_node.desc() if tex_node else "Default"
2855 if not((tex_node and tex_desc is None) or
2856 (material and mat_desc is None)):
2857 # desc not available (in VRML)
2858 # TODO: serialize VRML nodes!!!
2859 return (mat_desc, tex_desc)
2860 elif not tex_node and not material:
2861 # Even for VRML, we cache the null material
2862 return ("Default", "Default")
2863 else:
2864 return None # Desc-based caching is off
2867 def appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol):
2868 # Creates a Blender material object from appearance
2869 bpyima = None
2870 tex_has_alpha = False
2872 if material:
2873 bpymat = appearance_CreateMaterial(vrmlname, material, ancestry, is_vcol)
2874 else:
2875 bpymat = appearance_CreateDefaultMaterial()
2877 if tex_node: # Texture caching inside there
2878 bpyima = appearance_LoadTexture(tex_node, ancestry, node)
2880 if False and is_vcol:
2881 bpymat.use_vertex_color_paint = True
2883 if False and bpyima:
2884 tex_has_alpha = bpyima.alpha_mode not in {'NONE', 'CHANNEL_PACKED'}
2886 texture = bpy.data.textures.new(bpyima.name, 'IMAGE')
2887 texture.image = bpyima
2889 mtex = bpymat.texture_slots.add()
2890 mtex.texture = texture
2892 mtex.texture_coords = 'UV'
2893 mtex.use_map_diffuse = True
2894 mtex.use = True
2896 if tex_has_alpha:
2897 bpymat.use_transparency = True
2898 mtex.use_map_alpha = True
2899 mtex.alpha_factor = 0.0
2901 return (bpymat, bpyima, tex_has_alpha)
2904 def importShape_LoadAppearance(vrmlname, appr, ancestry, node, is_vcol):
2906 Material creation takes nontrivial time on large models.
2907 So we cache them aggressively.
2908 However, in Blender, texture is a part of material, while in
2909 X3D it's not. Blender's notion of material corresponds to
2910 X3D's notion of appearance.
2912 TextureTransform is not a part of material (at least
2913 not in the current implementation).
2915 USE on an Appearance node and USE on a Material node
2916 call for different approaches.
2918 Tools generate repeating, identical material definitions.
2919 Can't rely on USE alone. Repeating texture definitions
2920 are entirely possible, too.
2922 Vertex coloring is not a part of appearance, but Blender
2923 has a material flag for it. However, if a mesh has no vertex
2924 color layer, setting use_vertex_color_paint to true has no
2925 effect. So it's fine to reuse the same material for meshes
2926 with vertex colors and for ones without.
2927 It's probably an abuse of Blender of some level.
2929 So here's the caching structure:
2930 For USE on appearance, we store the material object
2931 in the appearance node.
2933 For USE on texture, we store the image object in the tex node.
2935 For USE on material with no texture, we store the material object
2936 in the material node.
2938 Also, we store textures by description in texture_cache.
2940 Also, we store materials by (material desc, texture desc)
2941 in material_cache.
2943 # First, check entire-appearance cache
2944 if appr.reference and appr.getRealNode().parsed:
2945 return appearance_ExpandCachedMaterial(appr.getRealNode().parsed)
2947 tex_node = appr.getChildBySpec(('ImageTexture', 'PixelTexture'))
2948 # Other texture nodes are: MovieTexture, MultiTexture
2949 material = appr.getChildBySpec('Material')
2950 # We're ignoring FillProperties, LineProperties, and shaders
2952 # Check the USE-based material cache for textureless materials
2953 if material and material.reference and not tex_node and material.getRealNode().parsed:
2954 return appearance_ExpandCachedMaterial(material.getRealNode().parsed)
2956 # Now the description-based caching
2957 cache_key = appearance_MakeDescCacheKey(material, tex_node)
2959 if cache_key and cache_key in material_cache:
2960 bpymat = material_cache[cache_key]
2961 # Still want to make the material available for USE-based reuse
2962 if appr.canHaveReferences():
2963 appr.parsed = bpymat
2964 if material and material.canHaveReferences() and not tex_node:
2965 material.parsed = bpymat
2966 return appearance_ExpandCachedMaterial(bpymat)
2968 # Done checking full-material caches. Texture cache may still kick in.
2969 # Create the material already
2970 (bpymat, bpyima, tex_has_alpha) = appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol)
2972 # Update the caches
2973 if appr.canHaveReferences():
2974 appr.parsed = bpymat
2976 if cache_key:
2977 material_cache[cache_key] = bpymat
2979 if material and material.canHaveReferences() and not tex_node:
2980 material.parsed = bpymat
2982 return (bpymat, bpyima, tex_has_alpha)
2985 def appearance_LoadPixelTexture(pixelTexture, ancestry):
2986 image = pixelTexture.getFieldAsArray('image', 0, ancestry)
2987 (w, h, plane_count) = image[0:3]
2988 has_alpha = plane_count in {2, 4}
2989 pixels = image[3:]
2990 if len(pixels) != w * h:
2991 print("ImportX3D warning: pixel count in PixelTexture is off")
2993 bpyima = bpy.data.images.new("PixelTexture", w, h, has_alpha, True)
2994 if not has_alpha:
2995 bpyima.alpha_mode = 'NONE'
2997 # Conditional above the loop, for performance
2998 if plane_count == 3: # RGB
2999 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
3000 for cco in (pixel >> 16, pixel >> 8, pixel, 255)]
3001 elif plane_count == 4: # RGBA
3002 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
3003 for cco
3004 in (pixel >> 24, pixel >> 16, pixel >> 8, pixel)]
3005 elif plane_count == 1: # Intensity - does Blender even support that?
3006 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
3007 for cco in (pixel, pixel, pixel, 255)]
3008 elif plane_count == 2: # Intensity/alpha
3009 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
3010 for cco
3011 in (pixel >> 8, pixel >> 8, pixel >> 8, pixel)]
3012 bpyima.update()
3013 return bpyima
3016 # Called from importShape to insert a data object (typically a mesh)
3017 # into the scene
3018 def importShape_ProcessObject(
3019 bpycollection, vrmlname, bpydata, geom, geom_spec, node,
3020 bpymat, has_alpha, texmtx, ancestry,
3021 global_matrix):
3023 vrmlname += "_" + geom_spec
3024 bpydata.name = vrmlname
3026 if type(bpydata) == bpy.types.Mesh:
3027 # solid, as understood by the spec, is always true in Blender
3028 # solid=false, we don't support it yet.
3029 creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry)
3030 if creaseAngle is not None:
3031 bpydata.auto_smooth_angle = creaseAngle
3032 bpydata.use_auto_smooth = True
3034 # Only ever 1 material per shape
3035 if bpymat:
3036 bpydata.materials.append(bpymat)
3038 if bpydata.uv_layers:
3039 if has_alpha and bpymat: # set the faces alpha flag?
3040 bpymat.blend_method = 'BLEND'
3042 if texmtx:
3043 # Apply texture transform?
3044 uv_copy = Vector()
3045 for l in bpydata.uv_layers.active.data:
3046 luv = l.uv
3047 uv_copy.x = luv[0]
3048 uv_copy.y = luv[1]
3049 l.uv[:] = (uv_copy @ texmtx)[0:2]
3051 # Done transforming the texture
3052 # TODO: check if per-polygon textures are supported here.
3053 elif type(bpydata) == bpy.types.TextCurve:
3054 # Text with textures??? Not sure...
3055 if bpymat:
3056 bpydata.materials.append(bpymat)
3058 # Can transform data or object, better the object so we can instance
3059 # the data
3060 # bpymesh.transform(getFinalMatrix(node))
3061 bpyob = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
3062 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3063 bpycollection.objects.link(bpyob)
3064 bpyob.select_set(True)
3066 if DEBUG:
3067 bpyob["source_line_no"] = geom.lineno
3070 def importText(geom, ancestry):
3071 fmt = geom.getChildBySpec('FontStyle')
3072 size = fmt.getFieldAsFloat("size", 1, ancestry) if fmt else 1.
3073 body = geom.getFieldAsString("string", None, ancestry)
3074 body = [w.strip('"') for w in body.split('" "')]
3076 bpytext = bpy.data.curves.new(name="Text", type='FONT')
3077 bpytext.offset_y = - size
3078 bpytext.body = "\n".join(body)
3079 bpytext.size = size
3080 return bpytext
3083 # -----------------------------------------------------------------------------------
3086 geometry_importers = {
3087 'IndexedFaceSet': importMesh_IndexedFaceSet,
3088 'IndexedTriangleSet': importMesh_IndexedTriangleSet,
3089 'IndexedTriangleStripSet': importMesh_IndexedTriangleStripSet,
3090 'IndexedTriangleFanSet': importMesh_IndexedTriangleFanSet,
3091 'IndexedLineSet': importMesh_IndexedLineSet,
3092 'TriangleSet': importMesh_TriangleSet,
3093 'TriangleStripSet': importMesh_TriangleStripSet,
3094 'TriangleFanSet': importMesh_TriangleFanSet,
3095 'LineSet': importMesh_LineSet,
3096 'ElevationGrid': importMesh_ElevationGrid,
3097 'Extrusion': importMesh_Extrusion,
3098 'PointSet': importMesh_PointSet,
3099 'Sphere': importMesh_Sphere,
3100 'Box': importMesh_Box,
3101 'Cylinder': importMesh_Cylinder,
3102 'Cone': importMesh_Cone,
3103 'Text': importText,
3107 def importShape(bpycollection, node, ancestry, global_matrix):
3108 # Under Shape, we can only have Appearance, MetadataXXX and a geometry node
3109 def isGeometry(spec):
3110 return spec != "Appearance" and not spec.startswith("Metadata")
3112 bpyob = node.getRealNode().blendObject
3114 if bpyob is not None:
3115 bpyob = node.blendData = node.blendObject = bpyob.copy()
3116 # Could transform data, but better the object so we can instance the data
3117 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3118 bpycollection.objects.link(bpyob)
3119 bpyob.select_set(True)
3120 return
3122 vrmlname = node.getDefName()
3123 if not vrmlname:
3124 vrmlname = 'Shape'
3126 appr = node.getChildBySpec('Appearance')
3127 geom = node.getChildBySpecCondition(isGeometry)
3128 if not geom:
3129 # Oh well, no geometry node in this shape
3130 return
3132 bpymat = None
3133 bpyima = None
3134 texmtx = None
3135 tex_has_alpha = False
3137 is_vcol = (geom.getChildBySpec(['Color', 'ColorRGBA']) is not None)
3139 if appr:
3140 (bpymat, bpyima,
3141 tex_has_alpha) = importShape_LoadAppearance(vrmlname, appr,
3142 ancestry, node,
3143 is_vcol)
3145 textx = appr.getChildBySpec('TextureTransform')
3146 if textx:
3147 texmtx = translateTexTransform(textx, ancestry)
3149 bpydata = None
3150 geom_spec = geom.getSpec()
3152 # ccw is handled by every geometry importer separately; some
3153 # geometries are easier to flip than others
3154 geom_fn = geometry_importers.get(geom_spec)
3155 if geom_fn is not None:
3156 bpydata = geom_fn(geom, ancestry)
3158 # There are no geometry importers that can legally return
3159 # no object. It's either a bpy object, or an exception
3160 importShape_ProcessObject(
3161 bpycollection, vrmlname, bpydata, geom, geom_spec,
3162 node, bpymat, tex_has_alpha, texmtx,
3163 ancestry, global_matrix)
3164 else:
3165 print('\tImportX3D warning: unsupported type "%s"' % geom_spec)
3168 # -----------------------------------------------------------------------------------
3169 # Lighting
3172 def importLamp_PointLight(node, ancestry):
3173 vrmlname = node.getDefName()
3174 if not vrmlname:
3175 vrmlname = 'PointLight'
3177 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3178 # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3179 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3180 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3181 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3182 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3183 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3185 bpylamp = bpy.data.lights.new(vrmlname, 'POINT')
3186 bpylamp.energy = intensity
3187 bpylamp.distance = radius
3188 bpylamp.color = color
3190 mtx = Matrix.Translation(Vector(location))
3192 return bpylamp, mtx
3195 def importLamp_DirectionalLight(node, ancestry):
3196 vrmlname = node.getDefName()
3197 if not vrmlname:
3198 vrmlname = 'DirectLight'
3200 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
3201 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3202 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3203 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3204 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3206 bpylamp = bpy.data.lights.new(vrmlname, 'SUN')
3207 bpylamp.energy = intensity
3208 bpylamp.color = color
3210 # lamps have their direction as -z, yup
3211 mtx = Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3213 return bpylamp, mtx
3215 # looks like default values for beamWidth and cutOffAngle were swapped in VRML docs.
3218 def importLamp_SpotLight(node, ancestry):
3219 vrmlname = node.getDefName()
3220 if not vrmlname:
3221 vrmlname = 'SpotLight'
3223 # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3224 # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3225 beamWidth = node.getFieldAsFloat('beamWidth', 1.570796, ancestry) # max is documented to be 1.0 but some files have higher.
3226 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3227 cutOffAngle = node.getFieldAsFloat('cutOffAngle', 0.785398, ancestry) * 2.0 # max is documented to be 1.0 but some files have higher.
3228 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3229 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3230 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3231 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3232 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3234 bpylamp = bpy.data.lights.new(vrmlname, 'SPOT')
3235 bpylamp.energy = intensity
3236 bpylamp.distance = radius
3237 bpylamp.color = color
3238 bpylamp.spot_size = cutOffAngle
3239 if beamWidth > cutOffAngle:
3240 bpylamp.spot_blend = 0.0
3241 else:
3242 if cutOffAngle == 0.0: # this should never happen!
3243 bpylamp.spot_blend = 0.5
3244 else:
3245 bpylamp.spot_blend = beamWidth / cutOffAngle
3247 # Convert
3249 # lamps have their direction as -z, y==up
3250 mtx = Matrix.Translation(location) @ Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3252 return bpylamp, mtx
3255 def importLamp(bpycollection, node, spec, ancestry, global_matrix):
3256 if spec == 'PointLight':
3257 bpylamp, mtx = importLamp_PointLight(node, ancestry)
3258 elif spec == 'DirectionalLight':
3259 bpylamp, mtx = importLamp_DirectionalLight(node, ancestry)
3260 elif spec == 'SpotLight':
3261 bpylamp, mtx = importLamp_SpotLight(node, ancestry)
3262 else:
3263 print("Error, not a lamp")
3264 raise ValueError
3266 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(bpylamp.name, bpylamp)
3267 bpycollection.objects.link(bpyob)
3268 bpyob.select_set(True)
3270 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3273 # -----------------------------------------------------------------------------------
3276 def importViewpoint(bpycollection, node, ancestry, global_matrix):
3277 name = node.getDefName()
3278 if not name:
3279 name = 'Viewpoint'
3281 fieldOfView = node.getFieldAsFloat('fieldOfView', 0.785398, ancestry) # max is documented to be 1.0 but some files have higher.
3282 # jump = node.getFieldAsBool('jump', True, ancestry)
3283 orientation = node.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry)
3284 position = node.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry)
3285 description = node.getFieldAsString('description', '', ancestry)
3287 bpycam = bpy.data.cameras.new(name)
3289 bpycam.angle = fieldOfView
3291 mtx = Matrix.Translation(Vector(position)) @ translateRotation(orientation)
3293 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, bpycam)
3294 bpycollection.objects.link(bpyob)
3295 bpyob.select_set(True)
3296 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3299 def importTransform(bpycollection, node, ancestry, global_matrix):
3300 name = node.getDefName()
3301 if not name:
3302 name = 'Transform'
3304 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, None)
3305 bpycollection.objects.link(bpyob)
3306 bpyob.select_set(True)
3308 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3310 # so they are not too annoying
3311 bpyob.empty_display_type = 'PLAIN_AXES'
3312 bpyob.empty_display_size = 0.2
3315 #def importTimeSensor(node):
3316 def action_fcurve_ensure(action, data_path, array_index):
3317 for fcu in action.fcurves:
3318 if fcu.data_path == data_path and fcu.array_index == array_index:
3319 return fcu
3321 return action.fcurves.new(data_path=data_path, index=array_index)
3324 def translatePositionInterpolator(node, action, ancestry):
3325 key = node.getFieldAsArray('key', 0, ancestry)
3326 keyValue = node.getFieldAsArray('keyValue', 3, ancestry)
3328 loc_x = action_fcurve_ensure(action, "location", 0)
3329 loc_y = action_fcurve_ensure(action, "location", 1)
3330 loc_z = action_fcurve_ensure(action, "location", 2)
3332 for i, time in enumerate(key):
3333 try:
3334 x, y, z = keyValue[i]
3335 except:
3336 continue
3338 loc_x.keyframe_points.insert(time, x)
3339 loc_y.keyframe_points.insert(time, y)
3340 loc_z.keyframe_points.insert(time, z)
3342 for fcu in (loc_x, loc_y, loc_z):
3343 for kf in fcu.keyframe_points:
3344 kf.interpolation = 'LINEAR'
3347 def translateOrientationInterpolator(node, action, ancestry):
3348 key = node.getFieldAsArray('key', 0, ancestry)
3349 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3351 rot_x = action_fcurve_ensure(action, "rotation_euler", 0)
3352 rot_y = action_fcurve_ensure(action, "rotation_euler", 1)
3353 rot_z = action_fcurve_ensure(action, "rotation_euler", 2)
3355 for i, time in enumerate(key):
3356 try:
3357 x, y, z, w = keyValue[i]
3358 except:
3359 continue
3361 mtx = translateRotation((x, y, z, w))
3362 eul = mtx.to_euler()
3363 rot_x.keyframe_points.insert(time, eul.x)
3364 rot_y.keyframe_points.insert(time, eul.y)
3365 rot_z.keyframe_points.insert(time, eul.z)
3367 for fcu in (rot_x, rot_y, rot_z):
3368 for kf in fcu.keyframe_points:
3369 kf.interpolation = 'LINEAR'
3372 # Untested!
3373 def translateScalarInterpolator(node, action, ancestry):
3374 key = node.getFieldAsArray('key', 0, ancestry)
3375 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3377 sca_x = action_fcurve_ensure(action, "scale", 0)
3378 sca_y = action_fcurve_ensure(action, "scale", 1)
3379 sca_z = action_fcurve_ensure(action, "scale", 2)
3381 for i, time in enumerate(key):
3382 try:
3383 x, y, z = keyValue[i]
3384 except:
3385 continue
3387 sca_x.keyframe_points.new(time, x)
3388 sca_y.keyframe_points.new(time, y)
3389 sca_z.keyframe_points.new(time, z)
3392 def translateTimeSensor(node, action, ancestry):
3394 Apply a time sensor to an action, VRML has many combinations of loop/start/stop/cycle times
3395 to give different results, for now just do the basics
3398 # XXX25 TODO
3399 if 1:
3400 return
3402 time_cu = action.addCurve('Time')
3403 time_cu.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
3405 cycleInterval = node.getFieldAsFloat('cycleInterval', None, ancestry)
3407 startTime = node.getFieldAsFloat('startTime', 0.0, ancestry)
3408 stopTime = node.getFieldAsFloat('stopTime', 250.0, ancestry)
3410 if cycleInterval is not None:
3411 stopTime = startTime + cycleInterval
3413 loop = node.getFieldAsBool('loop', False, ancestry)
3415 time_cu.append((1 + startTime, 0.0))
3416 time_cu.append((1 + stopTime, 1.0 / 10.0)) # annoying, the UI uses /10
3418 if loop:
3419 time_cu.extend = Blender.IpoCurve.ExtendTypes.CYCLIC # or - EXTRAP, CYCLIC_EXTRAP, CONST,
3422 def importRoute(node, ancestry):
3424 Animation route only at the moment
3427 if not hasattr(node, 'fields'):
3428 return
3430 routeIpoDict = node.getRouteIpoDict()
3432 def getIpo(act_id):
3433 try:
3434 action = routeIpoDict[act_id]
3435 except:
3436 action = routeIpoDict[act_id] = bpy.data.actions.new('web3d_ipo')
3437 return action
3439 # for getting definitions
3440 defDict = node.getDefDict()
3442 Handles routing nodes to each other
3444 ROUTE vpPI.value_changed TO champFly001.set_position
3445 ROUTE vpOI.value_changed TO champFly001.set_orientation
3446 ROUTE vpTs.fraction_changed TO vpPI.set_fraction
3447 ROUTE vpTs.fraction_changed TO vpOI.set_fraction
3448 ROUTE champFly001.bindTime TO vpTs.set_startTime
3451 #from_id, from_type = node.id[1].split('.')
3452 #to_id, to_type = node.id[3].split('.')
3454 #value_changed
3455 set_position_node = None
3456 set_orientation_node = None
3457 time_node = None
3459 for field in node.fields:
3460 if field and field[0] == 'ROUTE':
3461 try:
3462 from_id, from_type = field[1].split('.')
3463 to_id, to_type = field[3].split('.')
3464 except:
3465 print("Warning, invalid ROUTE", field)
3466 continue
3468 if from_type == 'value_changed':
3469 if to_type == 'set_position':
3470 action = getIpo(to_id)
3471 set_data_from_node = defDict[from_id]
3472 translatePositionInterpolator(set_data_from_node, action, ancestry)
3474 if to_type in {'set_orientation', 'rotation'}:
3475 action = getIpo(to_id)
3476 set_data_from_node = defDict[from_id]
3477 translateOrientationInterpolator(set_data_from_node, action, ancestry)
3479 if to_type == 'set_scale':
3480 action = getIpo(to_id)
3481 set_data_from_node = defDict[from_id]
3482 translateScalarInterpolator(set_data_from_node, action, ancestry)
3484 elif from_type == 'bindTime':
3485 action = getIpo(from_id)
3486 time_node = defDict[to_id]
3487 translateTimeSensor(time_node, action, ancestry)
3490 def load_web3d(
3491 bpycontext,
3492 filepath,
3494 PREF_FLAT=False,
3495 PREF_CIRCLE_DIV=16,
3496 global_matrix=None,
3497 HELPER_FUNC=None
3500 # Used when adding blender primitives
3501 GLOBALS['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV
3503 bpyscene = bpycontext.scene
3504 bpycollection = bpycontext.collection
3505 #root_node = vrml_parse('/_Cylinder.wrl')
3506 if filepath.lower().endswith('.x3d'):
3507 root_node, msg = x3d_parse(filepath)
3508 else:
3509 root_node, msg = vrml_parse(filepath)
3511 if not root_node:
3512 print(msg)
3513 return
3515 if global_matrix is None:
3516 global_matrix = Matrix()
3518 # fill with tuples - (node, [parents-parent, parent])
3519 all_nodes = root_node.getSerialized([], [])
3521 for node, ancestry in all_nodes:
3522 #if 'castle.wrl' not in node.getFilename():
3523 # continue
3525 spec = node.getSpec()
3527 prefix = node.getPrefix()
3528 if prefix=='PROTO':
3529 pass
3530 else
3532 if HELPER_FUNC and HELPER_FUNC(node, ancestry):
3533 # Note, include this function so the VRML/X3D importer can be extended
3534 # by an external script. - gets first pick
3535 pass
3536 if spec == 'Shape':
3537 importShape(bpycollection, node, ancestry, global_matrix)
3538 elif spec in {'PointLight', 'DirectionalLight', 'SpotLight'}:
3539 importLamp(bpycollection, node, spec, ancestry, global_matrix)
3540 elif spec == 'Viewpoint':
3541 importViewpoint(bpycollection, node, ancestry, global_matrix)
3542 elif spec == 'Transform':
3543 # Only use transform nodes when we are not importing a flat object hierarchy
3544 if PREF_FLAT == False:
3545 importTransform(bpycollection, node, ancestry, global_matrix)
3547 # These are delt with later within importRoute
3548 elif spec=='PositionInterpolator':
3549 action = bpy.data.ipos.new('web3d_ipo', 'Object')
3550 translatePositionInterpolator(node, action)
3553 # After we import all nodes, route events - anim paths
3554 for node, ancestry in all_nodes:
3555 importRoute(node, ancestry)
3557 for node, ancestry in all_nodes:
3558 if node.isRoot():
3559 # we know that all nodes referenced from will be in
3560 # routeIpoDict so no need to run node.getDefDict() for every node.
3561 routeIpoDict = node.getRouteIpoDict()
3562 defDict = node.getDefDict()
3564 for key, action in routeIpoDict.items():
3566 # Assign anim curves
3567 node = defDict[key]
3568 if node.blendData is None: # Add an object if we need one for animation
3569 node.blendData = node.blendObject = bpy.data.objects.new('AnimOb', None) # , name)
3570 bpycollection.objects.link(node.blendObject)
3571 bpyob.select_set(True)
3573 if node.blendData.animation_data is None:
3574 node.blendData.animation_data_create()
3576 node.blendData.animation_data.action = action
3578 # Add in hierarchy
3579 if PREF_FLAT is False:
3580 child_dict = {}
3581 for node, ancestry in all_nodes:
3582 if node.blendObject:
3583 blendObject = None
3585 # Get the last parent
3586 i = len(ancestry)
3587 while i:
3588 i -= 1
3589 blendObject = ancestry[i].blendObject
3590 if blendObject:
3591 break
3593 if blendObject:
3594 # Parent Slow, - 1 liner but works
3595 # blendObject.makeParent([node.blendObject], 0, 1)
3597 # Parent FAST
3598 try:
3599 child_dict[blendObject].append(node.blendObject)
3600 except:
3601 child_dict[blendObject] = [node.blendObject]
3603 # Parent
3604 for parent, children in child_dict.items():
3605 for c in children:
3606 c.parent = parent
3608 # update deps
3609 bpycontext.view_layer.update()
3610 del child_dict
3613 def load_with_profiler(
3614 context,
3615 filepath,
3617 global_matrix=None
3619 import cProfile
3620 import pstats
3621 pro = cProfile.Profile()
3622 pro.runctx("load_web3d(context, filepath, PREF_FLAT=True, "
3623 "PREF_CIRCLE_DIV=16, global_matrix=global_matrix)",
3624 globals(), locals())
3625 st = pstats.Stats(pro)
3626 st.sort_stats("time")
3627 st.print_stats(0.1)
3628 # st.print_callers(0.1)
3631 def load(context,
3632 filepath,
3634 global_matrix=None
3637 # loadWithProfiler(operator, context, filepath, global_matrix)
3638 load_web3d(context, filepath,
3639 PREF_FLAT=True,
3640 PREF_CIRCLE_DIV=16,
3641 global_matrix=global_matrix,
3644 return {'FINISHED'}