object_print3d_utils: replace f-strings by str.format() for I18n
[blender-addons.git] / io_scene_x3d / import_x3d.py
blob228e050705801ecf21a23de65d6b12738a137c19
1 # SPDX-License-Identifier: GPL-2.0-or-later
3 DEBUG = False
5 # This should work without a blender at all
6 import os
7 import shlex
8 import math
9 from math import sin, cos, pi
10 from itertools import chain
12 texture_cache = {}
13 material_cache = {}
15 EPSILON = 0.0000001 # Very crude.
18 def imageConvertCompat(path):
20 if os.sep == '\\':
21 return path # assume win32 has quicktime, dont convert
23 if path.lower().endswith('.gif'):
24 path_to = path[:-3] + 'png'
26 '''
27 if exists(path_to):
28 return path_to
29 '''
30 # print('\n'+path+'\n'+path_to+'\n')
31 os.system('convert "%s" "%s"' % (path, path_to)) # for now just hope we have image magick
33 if os.path.exists(path_to):
34 return path_to
36 return path
38 # notes
39 # transform are relative
40 # order doesn't matter for loc/size/rot
41 # right handed rotation
42 # angles are in radians
43 # rotation first defines axis then amount in radians
46 # =============================== VRML Specific
48 def vrml_split_fields(value):
49 """
50 key 0.0 otherkey 1,2,3 opt1 opt1 0.0
51 -> [key 0.0], [otherkey 1,2,3], [opt1 opt1 0.0]
52 """
53 def iskey(k):
54 if k[0] != '"' and k[0].isalpha() and k.upper() not in {'TRUE', 'FALSE'}:
55 return True
56 return False
58 field_list = []
59 field_context = []
61 for v in value:
62 if iskey(v):
63 if field_context:
64 field_context_len = len(field_context)
65 if (field_context_len > 2) and (field_context[-2] in {'DEF', 'USE'}):
66 field_context.append(v)
67 elif (not iskey(field_context[-1])) or ((field_context_len == 3 and field_context[1] == 'IS')):
68 # this IS a key but the previous value was not a key, or it was a defined field.
69 field_list.append(field_context)
70 field_context = [v]
71 else:
72 # The last item was not a value, multiple keys are needed in some cases.
73 field_context.append(v)
74 else:
75 # Is empty, just add this on
76 field_context.append(v)
77 else:
78 # Add a value to the list
79 field_context.append(v)
81 if field_context:
82 field_list.append(field_context)
84 return field_list
87 def vrmlFormat(data):
88 """
89 Keep this as a valid vrml file, but format in a way we can predict.
90 """
91 # Strip all comments - # not in strings - warning multiline strings are ignored.
92 def strip_comment(l):
93 #l = ' '.join(l.split())
94 l = l.strip()
96 if l.startswith('#'):
97 return ''
99 i = l.find('#')
101 if i == -1:
102 return l
104 # Most cases accounted for! if we have a comment at the end of the line do this...
105 #j = l.find('url "')
106 j = l.find('"')
108 if j == -1: # simple no strings
109 return l[:i].strip()
111 q = False
112 for i, c in enumerate(l):
113 if c == '"':
114 q = not q # invert
116 elif c == '#':
117 if q is False:
118 return l[:i - 1]
120 return l
122 data = '\n'.join([strip_comment(l) for l in data.split('\n')]) # remove all whitespace
124 EXTRACT_STRINGS = True # only needed when strings or filename contains ,[]{} chars :/
126 if EXTRACT_STRINGS:
128 # We need this so we can detect URL's
129 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
131 string_ls = []
133 #search = 'url "'
134 search = '"'
136 ok = True
137 last_i = 0
138 while ok:
139 ok = False
140 i = data.find(search, last_i)
141 if i != -1:
143 start = i + len(search) # first char after end of search
144 end = data.find('"', start)
145 if end != -1:
146 item = data[start:end]
147 string_ls.append(item)
148 data = data[:start] + data[end:]
149 ok = True # keep looking
151 last_i = (end - len(item)) + 1
152 # print(last_i, item, '|' + data[last_i] + '|')
154 # done with messy extracting strings part
156 # Bad, dont take strings into account
158 data = data.replace('#', '\n#')
159 data = '\n'.join([ll for l in data.split('\n') for ll in (l.strip(),) if not ll.startswith('#')]) # remove all whitespace
161 data = data.replace('{', '\n{\n')
162 data = data.replace('}', '\n}\n')
163 data = data.replace('[', '\n[\n')
164 data = data.replace(']', '\n]\n')
165 data = data.replace(',', ' , ') # make sure comma's separate
167 # We need to write one property (field) per line only, otherwise we fail later to detect correctly new nodes.
168 # See T45195 for details.
169 data = '\n'.join([' '.join(value) for l in data.split('\n') for value in vrml_split_fields(l.split())])
171 if EXTRACT_STRINGS:
172 # add strings back in
174 search = '"' # fill in these empty strings
176 ok = True
177 last_i = 0
178 while ok:
179 ok = False
180 i = data.find(search + '"', last_i)
181 # print(i)
182 if i != -1:
183 start = i + len(search) # first char after end of search
184 item = string_ls.pop(0)
185 # print(item)
186 data = data[:start] + item + data[start:]
188 last_i = start + len(item) + 1
190 ok = True
192 # More annoying obscure cases where USE or DEF are placed on a newline
193 # data = data.replace('\nDEF ', ' DEF ')
194 # data = data.replace('\nUSE ', ' USE ')
196 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
198 # Better to parse the file accounting for multiline arrays
200 data = data.replace(',\n', ' , ') # remove line endings with commas
201 data = data.replace(']', '\n]\n') # very very annoying - but some comma's are at the end of the list, must run this again.
204 return [l for l in data.split('\n') if l]
206 NODE_NORMAL = 1 # {}
207 NODE_ARRAY = 2 # []
208 NODE_REFERENCE = 3 # USE foobar
209 # NODE_PROTO = 4 #
211 lines = []
214 def getNodePreText(i, words):
215 # print(lines[i])
216 use_node = False
217 while len(words) < 5:
219 if i >= len(lines):
220 break
222 elif lines[i].startswith('PROTO'):
223 return NODE_PROTO, i+1
225 elif lines[i] == '{':
226 # words.append(lines[i]) # no need
227 # print("OK")
228 return NODE_NORMAL, i + 1
229 elif lines[i].count('"') % 2 != 0: # odd number of quotes? - part of a string.
230 # print('ISSTRING')
231 break
232 else:
233 new_words = lines[i].split()
234 if 'USE' in new_words:
235 use_node = True
237 words.extend(new_words)
238 i += 1
240 # Check for USE node - no {
241 # USE #id - should always be on the same line.
242 if use_node:
243 # print('LINE', i, words[:words.index('USE')+2])
244 words[:] = words[:words.index('USE') + 2]
245 if lines[i] == '{' and lines[i + 1] == '}':
246 # USE sometimes has {} after it anyway
247 i += 2
248 return NODE_REFERENCE, i
250 # print("error value!!!", words)
251 return 0, -1
254 def is_nodeline(i, words):
256 if not lines[i][0].isalpha():
257 return 0, 0
259 #if lines[i].startswith('field'):
260 # return 0, 0
262 # Is this a prototype??
263 if lines[i].startswith('PROTO'):
264 words[:] = lines[i].split()
265 return NODE_NORMAL, i + 1 # TODO - assumes the next line is a '[\n', skip that
266 if lines[i].startswith('EXTERNPROTO'):
267 words[:] = lines[i].split()
268 return NODE_ARRAY, i + 1 # TODO - assumes the next line is a '[\n', skip that
271 proto_type, new_i = is_protoline(i, words, proto_field_defs)
272 if new_i != -1:
273 return proto_type, new_i
276 # Simple "var [" type
277 if lines[i + 1] == '[':
278 if lines[i].count('"') % 2 == 0:
279 words[:] = lines[i].split()
280 return NODE_ARRAY, i + 2
282 node_type, new_i = getNodePreText(i, words)
284 if not node_type:
285 if DEBUG:
286 print("not node_type", lines[i])
287 return 0, 0
289 # Ok, we have a { after some values
290 # Check the values are not fields
291 for i, val in enumerate(words):
292 if i != 0 and words[i - 1] in {'DEF', 'USE'}:
293 # ignore anything after DEF, it is a ID and can contain any chars.
294 pass
295 elif val[0].isalpha() and val not in {'TRUE', 'FALSE'}:
296 pass
297 else:
298 # There is a number in one of the values, therefor we are not a node.
299 return 0, 0
301 #if node_type==NODE_REFERENCE:
302 # print(words, "REF_!!!!!!!")
303 return node_type, new_i
306 def is_numline(i):
308 Does this line start with a number?
311 # Works but too slow.
313 l = lines[i]
314 for w in l.split():
315 if w==',':
316 pass
317 else:
318 try:
319 float(w)
320 return True
322 except:
323 return False
325 return False
328 l = lines[i]
330 line_start = 0
332 if l.startswith(', '):
333 line_start += 2
335 line_end = len(l) - 1
336 line_end_new = l.find(' ', line_start) # comma's always have a space before them
338 if line_end_new != -1:
339 line_end = line_end_new
341 try:
342 float(l[line_start:line_end]) # works for a float or int
343 return True
344 except:
345 return False
348 class vrmlNode(object):
349 __slots__ = ('id',
350 'fields',
351 'proto_node',
352 'proto_field_defs',
353 'proto_fields',
354 'node_type',
355 'parent',
356 'children',
357 'parent',
358 'array_data',
359 'reference',
360 'lineno',
361 'filename',
362 'blendObject',
363 'blendData',
364 'DEF_NAMESPACE',
365 'ROUTE_IPO_NAMESPACE',
366 'PROTO_NAMESPACE',
367 'x3dNode',
368 'parsed')
370 def __init__(self, parent, node_type, lineno):
371 self.id = None
372 self.node_type = node_type
373 self.parent = parent
374 self.blendObject = None
375 self.blendData = None
376 self.x3dNode = None # for x3d import only
377 self.parsed = None # We try to reuse objects in a smart way
378 if parent:
379 parent.children.append(self)
381 self.lineno = lineno
383 # This is only set from the root nodes.
384 # Having a filename also denotes a root node
385 self.filename = None
386 self.proto_node = None # proto field definition eg: "field SFColor seatColor .6 .6 .1"
388 # Store in the root node because each inline file needs its own root node and its own namespace
389 self.DEF_NAMESPACE = None
390 self.ROUTE_IPO_NAMESPACE = None
392 self.FIELD_NAMESPACE = None
395 self.PROTO_NAMESPACE = None
397 self.reference = None
399 if node_type == NODE_REFERENCE:
400 # For references, only the parent and ID are needed
401 # the reference its self is assigned on parsing
402 return
404 self.fields = [] # fields have no order, in some cases rool level values are not unique so dont use a dict
406 self.proto_field_defs = [] # proto field definition eg: "field SFColor seatColor .6 .6 .1"
407 self.proto_fields = [] # proto field usage "diffuseColor IS seatColor"
408 self.children = []
409 self.array_data = [] # use for arrays of data - should only be for NODE_ARRAY types
411 # Only available from the root node
413 def getFieldDict(self):
414 if self.FIELD_NAMESPACE is not None:
415 return self.FIELD_NAMESPACE
416 else:
417 return self.parent.getFieldDict()
419 def getProtoDict(self):
420 if self.PROTO_NAMESPACE is not None:
421 return self.PROTO_NAMESPACE
422 else:
423 return self.parent.getProtoDict()
425 def getDefDict(self):
426 if self.DEF_NAMESPACE is not None:
427 return self.DEF_NAMESPACE
428 else:
429 return self.parent.getDefDict()
431 def getRouteIpoDict(self):
432 if self.ROUTE_IPO_NAMESPACE is not None:
433 return self.ROUTE_IPO_NAMESPACE
434 else:
435 return self.parent.getRouteIpoDict()
437 def setRoot(self, filename):
438 self.filename = filename
439 # self.FIELD_NAMESPACE = {}
440 self.DEF_NAMESPACE = {}
441 self.ROUTE_IPO_NAMESPACE = {}
442 self.PROTO_NAMESPACE = {}
444 def isRoot(self):
445 if self.filename is None:
446 return False
447 else:
448 return True
450 def getFilename(self):
451 if self.filename:
452 return self.filename
453 elif self.parent:
454 return self.parent.getFilename()
455 else:
456 return None
458 def getRealNode(self):
459 if self.reference:
460 return self.reference
461 else:
462 return self
464 def getSpec(self):
465 self_real = self.getRealNode()
466 try:
467 return self_real.id[-1] # its possible this node has no spec
468 except:
469 return None
471 def findSpecRecursive(self, spec):
472 self_real = self.getRealNode()
473 if spec == self_real.getSpec():
474 return self
476 for child in self_real.children:
477 if child.findSpecRecursive(spec):
478 return child
480 return None
482 def getPrefix(self):
483 if self.id:
484 return self.id[0]
485 return None
487 def getSpecialTypeName(self, typename):
488 self_real = self.getRealNode()
489 try:
490 return self_real.id[list(self_real.id).index(typename) + 1]
491 except:
492 return None
494 def getDefName(self):
495 return self.getSpecialTypeName('DEF')
497 def getProtoName(self):
498 return self.getSpecialTypeName('PROTO')
500 def getExternprotoName(self):
501 return self.getSpecialTypeName('EXTERNPROTO')
503 def getChildrenBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
504 self_real = self.getRealNode()
505 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
506 if type(node_spec) == str:
507 return [child for child in self_real.children if child.getSpec() == node_spec]
508 else:
509 # Check inside a list of optional types
510 return [child for child in self_real.children if child.getSpec() in node_spec]
512 def getChildrenBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
513 self_real = self.getRealNode()
514 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
515 return [child for child in self_real.children if cond(child.getSpec())]
517 def getChildBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
518 # Use in cases where there is only ever 1 child of this type
519 ls = self.getChildrenBySpec(node_spec)
520 if ls:
521 return ls[0]
522 else:
523 return None
525 def getChildBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
526 # Use in cases where there is only ever 1 child of this type
527 ls = self.getChildrenBySpecCondition(cond)
528 if ls:
529 return ls[0]
530 else:
531 return None
533 def getChildrenByName(self, node_name): # type could be geometry, children, appearance
534 self_real = self.getRealNode()
535 return [child for child in self_real.children if child.id if child.id[0] == node_name]
537 def getChildByName(self, node_name):
538 self_real = self.getRealNode()
539 for child in self_real.children:
540 if child.id and child.id[0] == node_name: # and child.id[-1]==node_spec:
541 return child
543 def getSerialized(self, results, ancestry):
544 """ Return this node and all its children in a flat list """
545 ancestry = ancestry[:] # always use a copy
547 # self_real = self.getRealNode()
549 results.append((self, tuple(ancestry)))
550 ancestry.append(self)
551 for child in self.getRealNode().children:
552 if child not in ancestry:
553 # We dont want to load proto's, they are only references
554 # We could enforce this elsewhere
556 # Only add this in a very special case
557 # where the parent of this object is not the real parent
558 # - In this case we have added the proto as a child to a node instancing it.
559 # This is a bit arbitrary, but its how Proto's are done with this importer.
560 if child.getProtoName() is None and child.getExternprotoName() is None:
561 child.getSerialized(results, ancestry)
562 else:
564 if DEBUG:
565 print('getSerialized() is proto:', child.getProtoName(), child.getExternprotoName(), self.getSpec())
567 self_spec = self.getSpec()
569 if child.getProtoName() == self_spec or child.getExternprotoName() == self_spec:
570 #if DEBUG:
571 # "FoundProto!"
572 child.getSerialized(results, ancestry)
574 return results
576 def searchNodeTypeID(self, node_spec, results):
577 self_real = self.getRealNode()
578 # print(self.lineno, self.id)
579 if self_real.id and self_real.id[-1] == node_spec: # use last element, could also be only element
580 results.append(self_real)
581 for child in self_real.children:
582 child.searchNodeTypeID(node_spec, results)
583 return results
585 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
586 self_real = self.getRealNode() # in case we're an instance
588 for f in self_real.fields:
589 # print(f)
590 if f and f[0] == field:
591 # print('\tfound field', f)
593 if len(f) >= 3 and f[1] == 'IS': # eg: 'diffuseColor IS legColor'
594 field_id = f[2]
596 # print("\n\n\n\n\n\nFOND IS!!!")
597 f_proto_lookup = None
598 f_proto_child_lookup = None
599 i = len(ancestry)
600 while i:
601 i -= 1
602 node = ancestry[i]
603 node = node.getRealNode()
605 # proto settings are stored in "self.proto_node"
606 if node.proto_node:
607 # Get the default value from the proto, this can be overwritten by the proto instance
608 # 'field SFColor legColor .8 .4 .7'
609 if AS_CHILD:
610 for child in node.proto_node.children:
611 #if child.id and len(child.id) >= 3 and child.id[2]==field_id:
612 if child.id and ('point' in child.id or 'points' in child.id):
613 f_proto_child_lookup = child
615 else:
616 for f_def in node.proto_node.proto_field_defs:
617 if len(f_def) >= 4:
618 if f_def[0] == 'field' and f_def[2] == field_id:
619 f_proto_lookup = f_def[3:]
621 # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent.
622 # This is the setting as defined by the instance, including this setting is optional,
623 # and will override the default PROTO value
624 # eg: 'legColor 1 0 0'
625 if AS_CHILD:
626 for child in node.children:
627 if child.id and child.id[0] == field_id:
628 f_proto_child_lookup = child
629 else:
630 for f_def in node.fields:
631 if len(f_def) >= 2:
632 if f_def[0] == field_id:
633 if DEBUG:
634 print("getFieldName(), found proto", f_def)
635 f_proto_lookup = f_def[1:]
637 if AS_CHILD:
638 if f_proto_child_lookup:
639 if DEBUG:
640 print("getFieldName() - AS_CHILD=True, child found")
641 print(f_proto_child_lookup)
642 return f_proto_child_lookup
643 else:
644 return f_proto_lookup
645 else:
646 if AS_CHILD:
647 return None
648 else:
649 # Not using a proto
650 return f[1:]
651 # print('\tfield not found', field)
653 # See if this is a proto name
654 if AS_CHILD:
655 for child in self_real.children:
656 if child.id and len(child.id) == 1 and child.id[0] == field:
657 return child
659 return None
661 def getFieldAsInt(self, field, default, ancestry):
662 self_real = self.getRealNode() # in case we're an instance
664 f = self_real.getFieldName(field, ancestry)
665 if f is None:
666 return default
667 if ',' in f:
668 f = f[:f.index(',')] # strip after the comma
670 if len(f) != 1:
671 print('\t"%s" wrong length for int conversion for field "%s"' % (f, field))
672 return default
674 try:
675 return int(f[0])
676 except:
677 print('\tvalue "%s" could not be used as an int for field "%s"' % (f[0], field))
678 return default
680 def getFieldAsFloat(self, field, default, ancestry):
681 self_real = self.getRealNode() # in case we're an instance
683 f = self_real.getFieldName(field, ancestry)
684 if f is None:
685 return default
686 if ',' in f:
687 f = f[:f.index(',')] # strip after the comma
689 if len(f) != 1:
690 print('\t"%s" wrong length for float conversion for field "%s"' % (f, field))
691 return default
693 try:
694 return float(f[0])
695 except:
696 print('\tvalue "%s" could not be used as a float for field "%s"' % (f[0], field))
697 return default
699 def getFieldAsFloatTuple(self, field, default, ancestry):
700 self_real = self.getRealNode() # in case we're an instance
702 f = self_real.getFieldName(field, ancestry)
703 if f is None:
704 return default
705 # if ',' in f: f = f[:f.index(',')] # strip after the comma
707 if len(f) < 1:
708 print('"%s" wrong length for float tuple conversion for field "%s"' % (f, field))
709 return default
711 ret = []
712 for v in f:
713 if v != ',':
714 try:
715 ret.append(float(v))
716 except:
717 break # quit of first non float, perhaps its a new field name on the same line? - if so we are going to ignore it :/ TODO
718 # print(ret)
720 if ret:
721 return ret
722 if not ret:
723 print('\tvalue "%s" could not be used as a float tuple for field "%s"' % (f, field))
724 return default
726 def getFieldAsBool(self, field, default, ancestry):
727 self_real = self.getRealNode() # in case we're an instance
729 f = self_real.getFieldName(field, ancestry)
730 if f is None:
731 return default
732 if ',' in f:
733 f = f[:f.index(',')] # strip after the comma
735 if len(f) != 1:
736 print('\t"%s" wrong length for bool conversion for field "%s"' % (f, field))
737 return default
739 if f[0].upper() == '"TRUE"' or f[0].upper() == 'TRUE':
740 return True
741 elif f[0].upper() == '"FALSE"' or f[0].upper() == 'FALSE':
742 return False
743 else:
744 print('\t"%s" could not be used as a bool for field "%s"' % (f[1], field))
745 return default
747 def getFieldAsString(self, field, default, ancestry):
748 self_real = self.getRealNode() # in case we're an instance
750 f = self_real.getFieldName(field, ancestry)
751 if f is None:
752 return default
753 if len(f) < 1:
754 print('\t"%s" wrong length for string conversion for field "%s"' % (f, field))
755 return default
757 if len(f) > 1:
758 # String may contain spaces
759 st = ' '.join(f)
760 else:
761 st = f[0]
763 # X3D HACK
764 if self.x3dNode:
765 return st
767 if st[0] == '"' and st[-1] == '"':
768 return st[1:-1]
769 else:
770 print('\tvalue "%s" could not be used as a string for field "%s"' % (f[0], field))
771 return default
773 def getFieldAsArray(self, field, group, ancestry):
775 For this parser arrays are children
778 def array_as_number(array_string):
779 array_data = []
780 try:
781 array_data = [int(val, 0) for val in array_string]
782 except:
783 try:
784 array_data = [float(val) for val in array_string]
785 except:
786 print('\tWarning, could not parse array data from field')
788 return array_data
790 self_real = self.getRealNode() # in case we're an instance
792 child_array = self_real.getFieldName(field, ancestry, True, SPLIT_COMMAS=True)
794 #if type(child_array)==list: # happens occasionally
795 # array_data = child_array
797 if child_array is None:
798 # For x3d, should work ok with vrml too
799 # for x3d arrays are fields, vrml they are nodes, annoying but not too bad.
800 data_split = self.getFieldName(field, ancestry, SPLIT_COMMAS=True)
801 if not data_split:
802 return []
804 array_data = array_as_number(data_split)
806 elif type(child_array) == list:
807 # x3d creates these
808 array_data = array_as_number(child_array)
809 else:
810 # print(child_array)
811 # Normal vrml
812 array_data = child_array.array_data
814 # print('array_data', array_data)
815 if group == -1 or len(array_data) == 0:
816 return array_data
818 # We want a flat list
819 flat = True
820 for item in array_data:
821 if type(item) == list:
822 flat = False
823 break
825 # make a flat array
826 if flat:
827 flat_array = array_data # we are already flat.
828 else:
829 flat_array = []
831 def extend_flat(ls):
832 for item in ls:
833 if type(item) == list:
834 extend_flat(item)
835 else:
836 flat_array.append(item)
838 extend_flat(array_data)
840 # We requested a flat array
841 if group == 0:
842 return flat_array
844 new_array = []
845 sub_array = []
847 for item in flat_array:
848 sub_array.append(item)
849 if len(sub_array) == group:
850 new_array.append(sub_array)
851 sub_array = []
853 if sub_array:
854 print('\twarning, array was not aligned to requested grouping', group, 'remaining value', sub_array)
856 return new_array
858 def getFieldAsStringArray(self, field, ancestry):
860 Get a list of strings
862 self_real = self.getRealNode() # in case we're an instance
864 child_array = None
865 for child in self_real.children:
866 if child.id and len(child.id) == 1 and child.id[0] == field:
867 child_array = child
868 break
869 if not child_array:
870 return []
872 # each string gets its own list, remove ""'s
873 try:
874 new_array = [f[0][1:-1] for f in child_array.fields]
875 except:
876 print('\twarning, string array could not be made')
877 new_array = []
879 return new_array
881 def getLevel(self):
882 # Ignore self_real
883 level = 0
884 p = self.parent
885 while p:
886 level += 1
887 p = p.parent
888 if not p:
889 break
891 return level
893 def __repr__(self):
894 level = self.getLevel()
895 ind = ' ' * level
896 if self.node_type == NODE_REFERENCE:
897 brackets = ''
898 elif self.node_type == NODE_NORMAL:
899 brackets = '{}'
900 else:
901 brackets = '[]'
903 if brackets:
904 text = ind + brackets[0] + '\n'
905 else:
906 text = ''
908 text += ind + 'ID: ' + str(self.id) + ' ' + str(level) + (' lineno %d\n' % self.lineno)
910 if self.node_type == NODE_REFERENCE:
911 text += ind + "(reference node)\n"
912 return text
914 if self.proto_node:
915 text += ind + 'PROTO NODE...\n'
916 text += str(self.proto_node)
917 text += ind + 'PROTO NODE_DONE\n'
919 text += ind + 'FIELDS:' + str(len(self.fields)) + '\n'
921 for i, item in enumerate(self.fields):
922 text += ind + 'FIELD:\n'
923 text += ind + str(item) + '\n'
925 text += ind + 'PROTO_FIELD_DEFS:' + str(len(self.proto_field_defs)) + '\n'
927 for i, item in enumerate(self.proto_field_defs):
928 text += ind + 'PROTO_FIELD:\n'
929 text += ind + str(item) + '\n'
931 text += ind + 'ARRAY: ' + str(len(self.array_data)) + ' ' + str(self.array_data) + '\n'
932 #text += ind + 'ARRAY: ' + str(len(self.array_data)) + '[...] \n'
934 text += ind + 'CHILDREN: ' + str(len(self.children)) + '\n'
935 for i, child in enumerate(self.children):
936 text += ind + ('CHILD%d:\n' % i)
937 text += str(child)
939 text += '\n' + ind + brackets[1]
941 return text
943 def parse(self, i, IS_PROTO_DATA=False):
944 new_i = self.__parse(i, IS_PROTO_DATA)
946 # print(self.id, self.getFilename())
948 # Check if this node was an inline or externproto
950 url_ls = []
952 if self.node_type == NODE_NORMAL and self.getSpec() == 'Inline':
953 ancestry = [] # Warning! - PROTO's using this wont work at all.
954 url = self.getFieldAsString('url', None, ancestry)
955 if url:
956 url_ls = [(url, None)]
957 del ancestry
959 elif self.getExternprotoName():
960 # externproto
961 url_ls = []
962 for f in self.fields:
964 if type(f) == str:
965 f = [f]
967 for ff in f:
968 for f_split in ff.split('"'):
969 # print(f_split)
970 # "someextern.vrml#SomeID"
971 if '#' in f_split:
973 f_split, f_split_id = f_split.split('#') # there should only be 1 # anyway
975 url_ls.append((f_split, f_split_id))
976 else:
977 url_ls.append((f_split, None))
979 # Was either an Inline or an EXTERNPROTO
980 if url_ls:
982 # print(url_ls)
984 for url, extern_key in url_ls:
985 print(url)
986 urls = []
987 urls.append(url)
988 urls.append(bpy.path.resolve_ncase(urls[-1]))
990 urls.append(os.path.join(os.path.dirname(self.getFilename()), url))
991 urls.append(bpy.path.resolve_ncase(urls[-1]))
993 urls.append(os.path.join(os.path.dirname(self.getFilename()), os.path.basename(url)))
994 urls.append(bpy.path.resolve_ncase(urls[-1]))
996 try:
997 url = [url for url in urls if os.path.exists(url)][0]
998 url_found = True
999 except:
1000 url_found = False
1002 if not url_found:
1003 print('\tWarning: Inline URL could not be found:', url)
1004 else:
1005 if url == self.getFilename():
1006 print('\tWarning: can\'t Inline yourself recursively:', url)
1007 else:
1009 try:
1010 data = gzipOpen(url)
1011 except:
1012 print('\tWarning: can\'t open the file:', url)
1013 data = None
1015 if data:
1016 # Tricky - inline another VRML
1017 print('\tLoading Inline:"%s"...' % url)
1019 # Watch it! - backup lines
1020 lines_old = lines[:]
1022 lines[:] = vrmlFormat(data)
1024 lines.insert(0, '{')
1025 lines.insert(0, 'root_node____')
1026 lines.append('}')
1028 ff = open('/tmp/test.txt', 'w')
1029 ff.writelines([l+'\n' for l in lines])
1032 child = vrmlNode(self, NODE_NORMAL, -1)
1033 child.setRoot(url) # initialized dicts
1034 child.parse(0)
1036 # if self.getExternprotoName():
1037 if self.getExternprotoName():
1038 if not extern_key: # if none is specified - use the name
1039 extern_key = self.getSpec()
1041 if extern_key:
1043 self.children.remove(child)
1044 child.parent = None
1046 extern_child = child.findSpecRecursive(extern_key)
1048 if extern_child:
1049 self.children.append(extern_child)
1050 extern_child.parent = self
1052 if DEBUG:
1053 print("\tEXTERNPROTO ID found!:", extern_key)
1054 else:
1055 print("\tEXTERNPROTO ID not found!:", extern_key)
1057 # Watch it! - restore lines
1058 lines[:] = lines_old
1060 return new_i
1062 def __parse(self, i, IS_PROTO_DATA=False):
1064 print('parsing at', i, end="")
1065 print(i, self.id, self.lineno)
1067 l = lines[i]
1069 if l == '[':
1070 # An anonymous list
1071 self.id = None
1072 i += 1
1073 else:
1074 words = []
1076 node_type, new_i = is_nodeline(i, words)
1077 if not node_type: # fail for parsing new node.
1078 print("Failed to parse new node")
1079 raise ValueError
1081 if self.node_type == NODE_REFERENCE:
1082 # Only assign the reference and quit
1083 key = words[words.index('USE') + 1]
1084 self.id = (words[0],)
1086 self.reference = self.getDefDict()[key]
1087 return new_i
1089 self.id = tuple(words)
1091 # fill in DEF/USE
1092 key = self.getDefName()
1093 if key is not None:
1094 self.getDefDict()[key] = self
1096 key = self.getProtoName()
1097 if not key:
1098 key = self.getExternprotoName()
1100 proto_dict = self.getProtoDict()
1101 if key is not None:
1102 proto_dict[key] = self
1104 # Parse the proto nodes fields
1105 self.proto_node = vrmlNode(self, NODE_ARRAY, new_i)
1106 new_i = self.proto_node.parse(new_i)
1108 self.children.remove(self.proto_node)
1110 # print(self.proto_node)
1112 new_i += 1 # skip past the {
1114 else: # If we're a proto instance, add the proto node as our child.
1115 spec = self.getSpec()
1116 try:
1117 self.children.append(proto_dict[spec])
1118 #pass
1119 except:
1120 pass
1122 del spec
1124 del proto_dict, key
1126 i = new_i
1128 # print(self.id)
1129 ok = True
1130 while ok:
1131 if i >= len(lines):
1132 return len(lines) - 1
1134 l = lines[i]
1135 # print('\tDEBUG:', i, self.node_type, l)
1136 if l == '':
1137 i += 1
1138 continue
1140 if l == '}':
1141 if self.node_type != NODE_NORMAL: # also ends proto nodes, we may want a type for these too.
1142 print('wrong node ending, expected an } ' + str(i) + ' ' + str(self.node_type))
1143 if DEBUG:
1144 raise ValueError
1145 ### print("returning", i)
1146 return i + 1
1147 if l == ']':
1148 if self.node_type != NODE_ARRAY:
1149 print('wrong node ending, expected a ] ' + str(i) + ' ' + str(self.node_type))
1150 if DEBUG:
1151 raise ValueError
1152 ### print("returning", i)
1153 return i + 1
1155 node_type, new_i = is_nodeline(i, [])
1156 if node_type: # check text\n{
1157 child = vrmlNode(self, node_type, i)
1158 i = child.parse(i)
1160 elif l == '[': # some files have these anonymous lists
1161 child = vrmlNode(self, NODE_ARRAY, i)
1162 i = child.parse(i)
1164 elif is_numline(i):
1165 l_split = l.split(',')
1167 values = None
1168 # See if each item is a float?
1170 for num_type in (int, float):
1171 try:
1172 values = [num_type(v) for v in l_split]
1173 break
1174 except:
1175 pass
1177 try:
1178 values = [[num_type(v) for v in segment.split()] for segment in l_split]
1179 break
1180 except:
1181 pass
1183 if values is None: # dont parse
1184 values = l_split
1186 # This should not extend over multiple lines however it is possible
1187 # print(self.array_data)
1188 if values:
1189 self.array_data.extend(values)
1190 i += 1
1191 else:
1192 words = l.split()
1193 if len(words) > 2 and words[1] == 'USE':
1194 vrmlNode(self, NODE_REFERENCE, i)
1195 else:
1197 # print("FIELD", i, l)
1199 #words = l.split()
1200 ### print('\t\ttag', i)
1201 # this is a tag/
1202 # print(words, i, l)
1203 value = l
1204 # print(i)
1205 # javastrips can exist as values.
1206 quote_count = l.count('"')
1207 if quote_count % 2: # odd number?
1208 # print('MULTILINE')
1209 while 1:
1210 i += 1
1211 l = lines[i]
1212 quote_count = l.count('"')
1213 if quote_count % 2: # odd number?
1214 value += '\n' + l[:l.rfind('"')]
1215 break # assume
1216 else:
1217 value += '\n' + l
1219 # use shlex so we get '"a b" "b v"' --> '"a b"', '"b v"'
1220 value_all = shlex.split(value, posix=False)
1222 for value in vrml_split_fields(value_all):
1223 # Split
1225 if value[0] == 'field':
1226 # field SFFloat creaseAngle 4
1227 self.proto_field_defs.append(value)
1228 else:
1229 self.fields.append(value)
1230 i += 1
1232 # This is a prerequisite for DEF/USE-based material caching
1233 def canHaveReferences(self):
1234 return self.node_type == NODE_NORMAL and self.getDefName()
1236 # This is a prerequisite for raw XML-based material caching.
1237 # NOTE - crude, but working implementation for
1238 # material and texture caching, based on __repr__.
1239 # Doesn't do any XML, but is better than nothing.
1240 def desc(self):
1241 if "material" in self.id or "texture" in self.id:
1242 node = self.reference if self.node_type == NODE_REFERENCE else self
1243 return frozenset(line.strip() for line in repr(node).strip().split("\n"))
1244 else:
1245 return None
1248 def gzipOpen(path):
1249 import gzip
1251 data = None
1252 try:
1253 data = gzip.open(path, 'r').read()
1254 except:
1255 pass
1257 if data is None:
1258 try:
1259 filehandle = open(path, 'rU', encoding='utf-8', errors='surrogateescape')
1260 data = filehandle.read()
1261 filehandle.close()
1262 except:
1263 import traceback
1264 traceback.print_exc()
1265 else:
1266 data = data.decode(encoding='utf-8', errors='surrogateescape')
1268 return data
1271 def vrml_parse(path):
1273 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1274 Return root (vrmlNode, '') or (None, 'Error String')
1276 data = gzipOpen(path)
1278 if data is None:
1279 return None, 'Failed to open file: ' + path
1281 # Stripped above
1282 lines[:] = vrmlFormat(data)
1284 lines.insert(0, '{')
1285 lines.insert(0, 'dymmy_node')
1286 lines.append('}')
1287 # Use for testing our parsed output, so we can check on line numbers.
1290 ff = open('/tmp/test.txt', 'w')
1291 ff.writelines([l+'\n' for l in lines])
1292 ff.close()
1295 # Now evaluate it
1296 node_type, new_i = is_nodeline(0, [])
1297 if not node_type:
1298 return None, 'Error: VRML file has no starting Node'
1300 # Trick to make sure we get all root nodes.
1301 lines.insert(0, '{')
1302 lines.insert(0, 'root_node____') # important the name starts with an ascii char
1303 lines.append('}')
1305 root = vrmlNode(None, NODE_NORMAL, -1)
1306 root.setRoot(path) # we need to set the root so we have a namespace and know the path in case of inlineing
1308 # Parse recursively
1309 root.parse(0)
1311 # This prints a load of text
1312 if DEBUG:
1313 print(root)
1315 return root, ''
1318 # ====================== END VRML
1320 # ====================== X3d Support
1322 # Sane as vrml but replace the parser
1323 class x3dNode(vrmlNode):
1324 def __init__(self, parent, node_type, x3dNode):
1325 vrmlNode.__init__(self, parent, node_type, -1)
1326 self.x3dNode = x3dNode
1328 def parse(self, IS_PROTO_DATA=False):
1329 # print(self.x3dNode.tagName)
1330 self.lineno = self.x3dNode.parse_position[0]
1332 define = self.x3dNode.getAttributeNode('DEF')
1333 if define:
1334 self.getDefDict()[define.value] = self
1335 else:
1336 use = self.x3dNode.getAttributeNode('USE')
1337 if use:
1338 try:
1339 self.reference = self.getDefDict()[use.value]
1340 self.node_type = NODE_REFERENCE
1341 except:
1342 print('\tWarning: reference', use.value, 'not found')
1343 self.parent.children.remove(self)
1345 return
1347 for x3dChildNode in self.x3dNode.childNodes:
1348 if x3dChildNode.nodeType in {x3dChildNode.TEXT_NODE, x3dChildNode.COMMENT_NODE, x3dChildNode.CDATA_SECTION_NODE}:
1349 continue
1351 node_type = NODE_NORMAL
1352 # print(x3dChildNode, dir(x3dChildNode))
1353 if x3dChildNode.getAttributeNode('USE'):
1354 node_type = NODE_REFERENCE
1356 child = x3dNode(self, node_type, x3dChildNode)
1357 child.parse()
1359 # TODO - x3d Inline
1361 def getSpec(self):
1362 return self.x3dNode.tagName # should match vrml spec
1364 # Used to retain object identifiers from X3D to Blender
1365 def getDefName(self):
1366 node_id = self.x3dNode.getAttributeNode('DEF')
1367 if node_id:
1368 return node_id.value
1369 node_id = self.x3dNode.getAttributeNode('USE')
1370 if node_id:
1371 return "USE_" + node_id.value
1372 return None
1374 # Other funcs operate from vrml, but this means we can wrap XML fields, still use nice utility funcs
1375 # getFieldAsArray getFieldAsBool etc
1376 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
1377 # ancestry and AS_CHILD are ignored, only used for VRML now
1379 self_real = self.getRealNode() # in case we're an instance
1380 field_xml = self.x3dNode.getAttributeNode(field)
1381 if field_xml:
1382 value = field_xml.value
1384 # We may want to edit. for x3d specific stuff
1385 # Sucks a bit to return the field name in the list but vrml excepts this :/
1386 if SPLIT_COMMAS:
1387 value = value.replace(",", " ")
1388 return value.split()
1389 else:
1390 return None
1392 def canHaveReferences(self):
1393 return self.x3dNode.getAttributeNode('DEF')
1395 def desc(self):
1396 return self.getRealNode().x3dNode.toxml()
1399 def x3d_parse(path):
1401 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1402 Return root (x3dNode, '') or (None, 'Error String')
1404 import xml.dom.minidom
1405 import xml.sax
1406 from xml.sax import handler
1409 try: doc = xml.dom.minidom.parse(path)
1410 except: return None, 'Could not parse this X3D file, XML error'
1413 # Could add a try/except here, but a console error is more useful.
1414 data = gzipOpen(path)
1416 if data is None:
1417 return None, 'Failed to open file: ' + path
1419 # Enable line number reporting in the parser - kinda brittle
1420 def set_content_handler(dom_handler):
1421 def startElementNS(name, tagName, attrs):
1422 orig_start_cb(name, tagName, attrs)
1423 cur_elem = dom_handler.elementStack[-1]
1424 cur_elem.parse_position = (parser._parser.CurrentLineNumber, parser._parser.CurrentColumnNumber)
1426 orig_start_cb = dom_handler.startElementNS
1427 dom_handler.startElementNS = startElementNS
1428 orig_set_content_handler(dom_handler)
1430 parser = xml.sax.make_parser()
1431 orig_set_content_handler = parser.setContentHandler
1432 parser.setFeature(handler.feature_external_ges, False)
1433 parser.setFeature(handler.feature_external_pes, False)
1434 parser.setContentHandler = set_content_handler
1436 doc = xml.dom.minidom.parseString(data, parser)
1438 try:
1439 x3dnode = doc.getElementsByTagName('X3D')[0]
1440 except:
1441 return None, 'Not a valid x3d document, cannot import'
1443 bpy.ops.object.select_all(action='DESELECT')
1445 root = x3dNode(None, NODE_NORMAL, x3dnode)
1446 root.setRoot(path) # so images and Inline's we load have a relative path
1447 root.parse()
1449 return root, ''
1451 ## f = open('/_Cylinder.wrl', 'r')
1452 # f = open('/fe/wrl/Vrml/EGS/TOUCHSN.WRL', 'r')
1453 # vrml_parse('/fe/wrl/Vrml/EGS/TOUCHSN.WRL')
1454 #vrml_parse('/fe/wrl/Vrml/EGS/SCRIPT.WRL')
1456 import os
1457 files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
1458 files.sort()
1459 tot = len(files)
1460 for i, f in enumerate(files):
1461 #if i < 801:
1462 # continue
1464 f = f.strip()
1465 print(f, i, tot)
1466 vrml_parse(f)
1469 # NO BLENDER CODE ABOVE THIS LINE.
1470 # -----------------------------------------------------------------------------------
1471 import bpy
1472 from bpy_extras import image_utils, node_shader_utils
1473 from mathutils import Vector, Matrix, Quaternion
1475 GLOBALS = {'CIRCLE_DETAIL': 16}
1478 def translateRotation(rot):
1479 """ axis, angle """
1480 return Matrix.Rotation(rot[3], 4, Vector(rot[:3]))
1483 def translateScale(sca):
1484 mat = Matrix() # 4x4 default
1485 mat[0][0] = sca[0]
1486 mat[1][1] = sca[1]
1487 mat[2][2] = sca[2]
1488 return mat
1491 def translateTransform(node, ancestry):
1492 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0, 0.0)
1493 rot = node.getFieldAsFloatTuple('rotation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1494 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0, 1.0)
1495 scaori = node.getFieldAsFloatTuple('scaleOrientation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1496 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0, 0.0)
1498 if cent:
1499 cent_mat = Matrix.Translation(cent)
1500 cent_imat = cent_mat.inverted()
1501 else:
1502 cent_mat = cent_imat = None
1504 if rot:
1505 rot_mat = translateRotation(rot)
1506 else:
1507 rot_mat = None
1509 if sca:
1510 sca_mat = translateScale(sca)
1511 else:
1512 sca_mat = None
1514 if scaori:
1515 scaori_mat = translateRotation(scaori)
1516 scaori_imat = scaori_mat.inverted()
1517 else:
1518 scaori_mat = scaori_imat = None
1520 if tx:
1521 tx_mat = Matrix.Translation(tx)
1522 else:
1523 tx_mat = None
1525 new_mat = Matrix()
1527 mats = [tx_mat, cent_mat, rot_mat, scaori_mat, sca_mat, scaori_imat, cent_imat]
1528 for mtx in mats:
1529 if mtx:
1530 new_mat = new_mat @ mtx
1532 return new_mat
1535 def translateTexTransform(node, ancestry):
1536 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0)
1537 rot = node.getFieldAsFloat('rotation', None, ancestry) # 0.0
1538 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0)
1539 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0)
1541 if cent:
1542 # cent is at a corner by default
1543 cent_mat = Matrix.Translation(Vector(cent).to_3d())
1544 cent_imat = cent_mat.inverted()
1545 else:
1546 cent_mat = cent_imat = None
1548 if rot:
1549 rot_mat = Matrix.Rotation(rot, 4, 'Z') # translateRotation(rot)
1550 else:
1551 rot_mat = None
1553 if sca:
1554 sca_mat = translateScale((sca[0], sca[1], 0.0))
1555 else:
1556 sca_mat = None
1558 if tx:
1559 tx_mat = Matrix.Translation(Vector(tx).to_3d())
1560 else:
1561 tx_mat = None
1563 new_mat = Matrix()
1565 # as specified in VRML97 docs
1566 mats = [cent_imat, sca_mat, rot_mat, cent_mat, tx_mat]
1568 for mtx in mats:
1569 if mtx:
1570 new_mat = new_mat @ mtx
1572 return new_mat
1574 def getFinalMatrix(node, mtx, ancestry, global_matrix):
1576 transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform']
1577 if node.getSpec() == 'Transform':
1578 transform_nodes.append(node)
1579 transform_nodes.reverse()
1581 if mtx is None:
1582 mtx = Matrix()
1584 for node_tx in transform_nodes:
1585 mat = translateTransform(node_tx, ancestry)
1586 mtx = mat @ mtx
1588 # worldspace matrix
1589 mtx = global_matrix @ mtx
1591 return mtx
1594 # -----------------------------------------------------------------------------------
1595 # Mesh import utilities
1597 # Assumes that the mesh has polygons.
1598 def importMesh_ApplyColors(bpymesh, geom, ancestry):
1599 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1600 if colors:
1601 if colors.getSpec() == 'ColorRGBA':
1602 rgb = colors.getFieldAsArray('color', 4, ancestry)
1603 else:
1604 # Array of arrays; no need to flatten
1605 rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
1606 lcol_layer = bpymesh.vertex_colors.new()
1608 if len(rgb) == len(bpymesh.vertices):
1609 rgb = [rgb[l.vertex_index] for l in bpymesh.loops]
1610 rgb = tuple(chain(*rgb))
1611 elif len(rgb) == len(bpymesh.loops):
1612 rgb = tuple(chain(*rgb))
1613 else:
1614 print(
1615 "WARNING not applying vertex colors, non matching numbers of vertices or loops (%d vs %d/%d)" %
1616 (len(rgb), len(bpymesh.vertices), len(bpymesh.loops))
1618 return
1620 lcol_layer.data.foreach_set("color", rgb)
1623 # Assumes that the vertices have not been rearranged compared to the
1624 # source file order # or in the order assumed by the spec (e. g. in
1625 # Elevation, in rows by x).
1626 # Assumes polygons have been set.
1627 def importMesh_ApplyNormals(bpymesh, geom, ancestry):
1628 normals = geom.getChildBySpec('Normal')
1629 if not normals:
1630 return
1632 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1633 vectors = normals.getFieldAsArray('vector', 0, ancestry)
1634 if per_vertex:
1635 bpymesh.vertices.foreach_set("normal", vectors)
1636 else:
1637 bpymesh.polygons.foreach_set("normal", vectors)
1640 # Reads the standard Coordinate object - common for all mesh elements
1641 # Feeds the vertices in the mesh.
1642 # Rearranging the vertex order is a bad idea - other elements
1643 # in X3D might rely on it, if you need to rearrange, please play with
1644 # vertex indices in the polygons instead.
1646 # Vertex culling that we have in IndexedFaceSet is an unfortunate exception,
1647 # brought forth by a very specific issue.
1648 def importMesh_ReadVertices(bpymesh, geom, ancestry):
1649 # We want points here as a flat array, but the caching logic in
1650 # IndexedFaceSet presumes a 2D one.
1651 # The case for caching is stronger over there.
1652 coord = geom.getChildBySpec('Coordinate')
1653 points = coord.getFieldAsArray('point', 0, ancestry)
1654 bpymesh.vertices.add(len(points) // 3)
1655 bpymesh.vertices.foreach_set("co", points)
1658 # Assumes that the order of vertices matches the source file.
1659 # Relies upon texture coordinates in the X3D node; if a coordinate generation
1660 # algorithm for a geometry is in the spec (e. g. for ElevationGrid), it needs
1661 # to be implemented by the geometry handler.
1663 # Texture transform is applied in ProcessObject.
1664 def importMesh_ApplyUVs(bpymesh, geom, ancestry):
1665 tex_coord = geom.getChildBySpec('TextureCoordinate')
1666 if not tex_coord:
1667 return
1669 uvs = tex_coord.getFieldAsArray('point', 2, ancestry)
1670 if not uvs:
1671 return
1673 d = bpymesh.uv_layers.new().data
1674 uvs = [i for poly in bpymesh.polygons
1675 for vidx in poly.vertices
1676 for i in uvs[vidx]]
1677 d.foreach_set('uv', uvs)
1680 # Common steps for all triangle meshes once the geometry has been set:
1681 # normals, vertex colors, and UVs.
1682 def importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry):
1683 importMesh_ApplyNormals(bpymesh, geom, ancestry)
1684 importMesh_ApplyColors(bpymesh, geom, ancestry)
1685 importMesh_ApplyUVs(bpymesh, geom, ancestry)
1686 bpymesh.validate()
1687 bpymesh.update()
1688 return bpymesh
1691 # Assumes that the mesh is stored as polygons and loops, and the premade array
1692 # of texture coordinates follows the loop array.
1693 # The loops array must be flat.
1694 def importMesh_ApplyTextureToLoops(bpymesh, loops):
1695 d = bpymesh.uv_layers.new().data
1696 d.foreach_set('uv', loops)
1699 def flip(r, ccw):
1700 return r if ccw else r[::-1]
1702 # -----------------------------------------------------------------------------------
1703 # Now specific geometry importers
1706 def importMesh_IndexedTriangleSet(geom, ancestry):
1707 # Ignoring solid
1708 # colorPerVertex is always true
1709 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1711 bpymesh = bpy.data.meshes.new(name="XXX")
1712 importMesh_ReadVertices(bpymesh, geom, ancestry)
1714 # Read the faces
1715 index = geom.getFieldAsArray('index', 0, ancestry)
1716 num_polys = len(index) // 3
1717 if not ccw:
1718 index = [index[3 * i + j] for i in range(num_polys) for j in (1, 0, 2)]
1720 bpymesh.loops.add(num_polys * 3)
1721 bpymesh.polygons.add(num_polys)
1722 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1723 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1724 bpymesh.polygons.foreach_set("vertices", index)
1726 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1729 def importMesh_IndexedTriangleStripSet(geom, ancestry):
1730 # Ignoring solid
1731 # colorPerVertex is always true
1732 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1733 bpymesh = bpy.data.meshes.new(name="IndexedTriangleStripSet")
1734 importMesh_ReadVertices(bpymesh, geom, ancestry)
1736 # Read the faces
1737 index = geom.getFieldAsArray('index', 0, ancestry)
1738 while index[-1] == -1:
1739 del index[-1]
1740 ngaps = sum(1 for i in index if i == -1)
1741 num_polys = len(index) - 2 - 3 * ngaps
1742 bpymesh.loops.add(num_polys * 3)
1743 bpymesh.polygons.add(num_polys)
1744 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1745 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1747 def triangles():
1748 i = 0
1749 odd = cw
1750 while True:
1751 yield index[i + odd]
1752 yield index[i + 1 - odd]
1753 yield index[i + 2]
1754 odd = 1 - odd
1755 i += 1
1756 if i + 2 >= len(index):
1757 return
1758 if index[i + 2] == -1:
1759 i += 3
1760 odd = cw
1761 bpymesh.polygons.foreach_set("vertices", [f for f in triangles()])
1762 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1765 def importMesh_IndexedTriangleFanSet(geom, ancestry):
1766 # Ignoring solid
1767 # colorPerVertex is always true
1768 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1769 bpymesh = bpy.data.meshes.new(name="IndexedTriangleFanSet")
1770 importMesh_ReadVertices(bpymesh, geom, ancestry)
1772 # Read the faces
1773 index = geom.getFieldAsArray('index', 0, ancestry)
1774 while index[-1] == -1:
1775 del index[-1]
1776 ngaps = sum(1 for i in index if i == -1)
1777 num_polys = len(index) - 2 - 3 * ngaps
1778 bpymesh.loops.add(num_polys * 3)
1779 bpymesh.polygons.add(num_polys)
1780 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1781 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1783 def triangles():
1784 i = 0
1785 j = 1
1786 while True:
1787 yield index[i]
1788 yield index[i + j + cw]
1789 yield index[i + j + 1 - cw]
1790 j += 1
1791 if i + j + 1 >= len(index):
1792 return
1793 if index[i + j + 1] == -1:
1794 i = j + 2
1795 j = 1
1796 bpymesh.polygons.foreach_set("vertices", [f for f in triangles()])
1797 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1800 def importMesh_TriangleSet(geom, ancestry):
1801 # Ignoring solid
1802 # colorPerVertex is always true
1803 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1804 bpymesh = bpy.data.meshes.new(name="TriangleSet")
1805 importMesh_ReadVertices(bpymesh, geom, ancestry)
1806 n = len(bpymesh.vertices)
1807 num_polys = n // 3
1808 bpymesh.loops.add(num_polys * 3)
1809 bpymesh.polygons.add(num_polys)
1810 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1811 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1813 if ccw:
1814 fv = [i for i in range(n)]
1815 else:
1816 fv = [3 * i + j for i in range(n // 3) for j in (1, 0, 2)]
1817 bpymesh.polygons.foreach_set("vertices", fv)
1819 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1822 def importMesh_TriangleStripSet(geom, ancestry):
1823 # Ignoring solid
1824 # colorPerVertex is always true
1825 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1826 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1827 importMesh_ReadVertices(bpymesh, geom, ancestry)
1828 counts = geom.getFieldAsArray('stripCount', 0, ancestry)
1829 num_polys = sum([n - 2 for n in counts])
1830 bpymesh.loops.add(num_polys * 3)
1831 bpymesh.polygons.add(num_polys)
1832 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1833 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1835 def triangles():
1836 b = 0
1837 for i in range(0, len(counts)):
1838 for j in range(0, counts[i] - 2):
1839 yield b + j + (j + cw) % 2
1840 yield b + j + 1 - (j + cw) % 2
1841 yield b + j + 2
1842 b += counts[i]
1843 bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
1845 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1848 def importMesh_TriangleFanSet(geom, ancestry):
1849 # Ignoring solid
1850 # colorPerVertex is always true
1851 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1852 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1853 importMesh_ReadVertices(bpymesh, geom, ancestry)
1854 counts = geom.getFieldAsArray('fanCount', 0, ancestry)
1855 num_polys = sum([n - 2 for n in counts])
1856 bpymesh.loops.add(num_polys * 3)
1857 bpymesh.polygons.add(num_polys)
1858 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1859 bpymesh.polygons.foreach_set("loop_total", (3,) * num_polys)
1861 def triangles():
1862 b = 0
1863 for i in range(0, len(counts)):
1864 for j in range(1, counts[i] - 1):
1865 yield b
1866 yield b + j + cw
1867 yield b + j + 1 - cw
1868 b += counts[i]
1869 bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
1870 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1873 def importMesh_IndexedFaceSet(geom, ancestry):
1874 # Saw the following structure in X3Ds: the first mesh has a huge set
1875 # of vertices and a reasonably sized index. The rest of the meshes
1876 # reference the Coordinate node from the first one, and have their
1877 # own reasonably sized indices.
1879 # In Blender, to the best of my knowledge, there's no way to reuse
1880 # the vertex set between meshes. So we have culling logic instead -
1881 # for each mesh, only leave vertices that are used for faces.
1883 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1884 coord = geom.getChildBySpec('Coordinate')
1885 if coord.reference:
1886 points = coord.getRealNode().parsed
1887 # We need unflattened coord array here, while
1888 # importMesh_ReadVertices uses flattened. Can't cache both :(
1889 # TODO: resolve that somehow, so that vertex set can be effectively
1890 # reused between different mesh types?
1891 else:
1892 points = coord.getFieldAsArray('point', 3, ancestry)
1893 if coord.canHaveReferences():
1894 coord.parsed = points
1895 index = geom.getFieldAsArray('coordIndex', 0, ancestry)
1897 while index and index[-1] == -1:
1898 del index[-1]
1900 if len(points) >= 2 * len(index): # Need to cull
1901 culled_points = []
1902 cull = {} # Maps old vertex indices to new ones
1903 uncull = [] # Maps new indices to the old ones
1904 new_index = 0
1905 else:
1906 uncull = cull = None
1908 faces = []
1909 face = []
1910 # Generate faces. Cull the vertices if necessary,
1911 for i in index:
1912 if i == -1:
1913 if face:
1914 faces.append(flip(face, ccw))
1915 face = []
1916 else:
1917 if cull is not None:
1918 if not(i in cull):
1919 culled_points.append(points[i])
1920 cull[i] = new_index
1921 uncull.append(i)
1922 i = new_index
1923 new_index += 1
1924 else:
1925 i = cull[i]
1926 face.append(i)
1927 if face:
1928 faces.append(flip(face, ccw)) # The last face
1930 if cull:
1931 points = culled_points
1933 bpymesh = bpy.data.meshes.new(name="IndexedFaceSet")
1934 bpymesh.from_pydata(points, [], faces)
1935 # No validation here. It throws off the per-face stuff.
1937 # Similar treatment for normal and color indices
1939 def processPerVertexIndex(ind):
1940 if ind:
1941 # Deflatten into an array of arrays by face; the latter might
1942 # need to be flipped
1943 i = 0
1944 verts_by_face = []
1945 for f in faces:
1946 verts_by_face.append(flip(ind[i:i + len(f)], ccw))
1947 i += len(f) + 1
1948 return verts_by_face
1949 elif uncull:
1950 return [[uncull[v] for v in f] for f in faces]
1951 else:
1952 return faces # Reuse coordIndex, as per the spec
1954 # Normals
1955 normals = geom.getChildBySpec('Normal')
1956 if normals:
1957 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1958 vectors = normals.getFieldAsArray('vector', 3, ancestry)
1959 normal_index = geom.getFieldAsArray('normalIndex', 0, ancestry)
1960 if per_vertex:
1961 co = [co for f in processPerVertexIndex(normal_index)
1962 for v in f
1963 for co in vectors[v]]
1964 bpymesh.vertices.foreach_set("normal", co)
1965 else:
1966 co = [co for (i, f) in enumerate(faces)
1967 for j in f
1968 for co in vectors[normal_index[i] if normal_index else i]]
1969 bpymesh.polygons.foreach_set("normal", co)
1971 # Apply vertex/face colors
1972 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1973 if colors:
1974 if colors.getSpec() == 'ColorRGBA':
1975 rgb = colors.getFieldAsArray('color', 4, ancestry)
1976 else:
1977 # Array of arrays; no need to flatten
1978 rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
1980 color_per_vertex = geom.getFieldAsBool('colorPerVertex', True, ancestry)
1981 color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
1983 d = bpymesh.vertex_colors.new().data
1984 if color_per_vertex:
1985 cco = [cco for f in processPerVertexIndex(color_index)
1986 for v in f
1987 for cco in rgb[v]]
1988 elif color_index: # Color per face with index
1989 cco = [cco for (i, f) in enumerate(faces)
1990 for j in f
1991 for cco in rgb[color_index[i]]]
1992 else: # Color per face without index
1993 cco = [cco for (i, f) in enumerate(faces)
1994 for j in f
1995 for cco in rgb[i]]
1996 d.foreach_set('color', cco)
1998 # Texture coordinates (UVs)
1999 tex_coord = geom.getChildBySpec('TextureCoordinate')
2000 if tex_coord:
2001 tex_coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
2002 tex_index = geom.getFieldAsArray('texCoordIndex', 0, ancestry)
2003 tex_index = processPerVertexIndex(tex_index)
2004 loops = [co for f in tex_index
2005 for v in f
2006 for co in tex_coord_points[v]]
2007 else:
2008 x_min = y_min = z_min = math.inf
2009 x_max = y_max = z_max = -math.inf
2010 for f in faces:
2011 # Unused vertices don't participate in size; X3DOM does so
2012 for v in f:
2013 (x, y, z) = points[v]
2014 x_min = min(x_min, x)
2015 x_max = max(x_max, x)
2016 y_min = min(y_min, y)
2017 y_max = max(y_max, y)
2018 z_min = min(z_min, z)
2019 z_max = max(z_max, z)
2021 mins = (x_min, y_min, z_min)
2022 deltas = (x_max - x_min, y_max - y_min, z_max - z_min)
2023 axes = [0, 1, 2]
2024 axes.sort(key=lambda a: (-deltas[a], a))
2025 # Tuple comparison breaks ties
2026 (s_axis, t_axis) = axes[0:2]
2027 s_min = mins[s_axis]
2028 ds = deltas[s_axis]
2029 t_min = mins[t_axis]
2030 dt = deltas[t_axis]
2032 # Avoid divide by zero T76303.
2033 if not (ds > 0.0):
2034 ds = 1.0
2035 if not (dt > 0.0):
2036 dt = 1.0
2038 def generatePointCoords(pt):
2039 return (pt[s_axis] - s_min) / ds, (pt[t_axis] - t_min) / dt
2040 loops = [co for f in faces
2041 for v in f
2042 for co in generatePointCoords(points[v])]
2044 importMesh_ApplyTextureToLoops(bpymesh, loops)
2046 bpymesh.validate()
2047 bpymesh.update()
2048 return bpymesh
2051 def importMesh_ElevationGrid(geom, ancestry):
2052 height = geom.getFieldAsArray('height', 0, ancestry)
2053 x_dim = geom.getFieldAsInt('xDimension', 0, ancestry)
2054 x_spacing = geom.getFieldAsFloat('xSpacing', 1, ancestry)
2055 z_dim = geom.getFieldAsInt('zDimension', 0, ancestry)
2056 z_spacing = geom.getFieldAsFloat('zSpacing', 1, ancestry)
2057 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2059 # The spec assumes a certain ordering of quads; outer loop by z, inner by x
2060 bpymesh = bpy.data.meshes.new(name="ElevationGrid")
2061 bpymesh.vertices.add(x_dim * z_dim)
2062 co = [w for x in range(x_dim) for z in range(z_dim)
2063 for w in (x * x_spacing, height[x_dim * z + x], z * z_spacing)]
2064 bpymesh.vertices.foreach_set("co", co)
2066 num_polys = (x_dim - 1) * (z_dim - 1)
2067 bpymesh.loops.add(num_polys * 4)
2068 bpymesh.polygons.add(num_polys)
2069 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 4, 4))
2070 bpymesh.polygons.foreach_set("loop_total", (4,) * num_polys)
2071 # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
2072 # For quad tessfaces, it was important that the final vertex index was not 0
2073 # (Blender treated it as a triangle then).
2074 # So simply reversing the face was not an option.
2075 # With bmesh polygons, this has no importance anymore, but keep existing code for now.
2076 verts = [i for x in range(x_dim - 1) for z in range(z_dim - 1)
2077 for i in (z * x_dim + x,
2078 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2079 (z + 1) * x_dim + x + 1,
2080 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)]
2081 bpymesh.polygons.foreach_set("vertices", verts)
2083 importMesh_ApplyNormals(bpymesh, geom, ancestry)
2084 # ApplyColors won't work here; faces are quads, and also per-face
2085 # coloring should be supported
2086 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
2087 if colors:
2088 if colors.getSpec() == 'ColorRGBA':
2089 rgb = [c[:3] for c
2090 in colors.getFieldAsArray('color', 4, ancestry)]
2091 # Array of arrays; no need to flatten
2092 else:
2093 rgb = colors.getFieldAsArray('color', 3, ancestry)
2095 tc = bpymesh.vertex_colors.new().data
2096 if geom.getFieldAsBool('colorPerVertex', True, ancestry):
2097 # Per-vertex coloring
2098 # Note the 2/4 flip here
2099 tc.foreach_set("color",
2100 [c for x in range(x_dim - 1)
2101 for z in range(z_dim - 1)
2102 for rgb_idx in (z * x_dim + x,
2103 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2104 (z + 1) * x_dim + x + 1,
2105 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)
2106 for c in rgb[rgb_idx]])
2107 else: # Coloring per face
2108 tc.foreach_set("color",
2109 [c for x in range(x_dim - 1)
2110 for z in range(z_dim - 1)
2111 for rgb_idx in (z * (x_dim - 1) + x,) * 4
2112 for c in rgb[rgb_idx]])
2114 # Textures also need special treatment; it's all quads,
2115 # and there's a builtin algorithm for coordinate generation
2116 tex_coord = geom.getChildBySpec('TextureCoordinate')
2117 if tex_coord:
2118 uvs = tex_coord.getFieldAsArray('point', 2, ancestry)
2119 else:
2120 uvs = [(i / (x_dim - 1), j / (z_dim - 1))
2121 for i in range(x_dim)
2122 for j in range(z_dim)]
2124 d = bpymesh.uv_layers.new().data
2125 # Rather than repeat the face/vertex algorithm from above, we read
2126 # the vertex index back from polygon. Might be suboptimal.
2127 uvs = [i for poly in bpymesh.polygons
2128 for vidx in poly.vertices
2129 for i in uvs[vidx]]
2130 d.foreach_set('uv', uv)
2132 bpymesh.validate()
2133 bpymesh.update()
2134 return bpymesh
2137 def importMesh_Extrusion(geom, ancestry):
2138 # Interestingly, the spec doesn't allow for vertex/face colors in this
2139 # element, nor for normals.
2140 # Since coloring and normals are not supported here, and also large
2141 # polygons for caps might be required, we shall use from_pydata().
2143 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2144 begin_cap = geom.getFieldAsBool('beginCap', True, ancestry)
2145 end_cap = geom.getFieldAsBool('endCap', True, ancestry)
2146 cross = geom.getFieldAsArray('crossSection', 2, ancestry)
2147 if not cross:
2148 cross = ((1, 1), (1, -1), (-1, -1), (-1, 1), (1, 1))
2149 spine = geom.getFieldAsArray('spine', 3, ancestry)
2150 if not spine:
2151 spine = ((0, 0, 0), (0, 1, 0))
2152 orient = geom.getFieldAsArray('orientation', 4, ancestry)
2153 if orient:
2154 orient = [Quaternion(o[:3], o[3]).to_matrix()
2155 if o[3] else None for o in orient]
2156 scale = geom.getFieldAsArray('scale', 2, ancestry)
2157 if scale:
2158 scale = [Matrix(((s[0], 0, 0), (0, 1, 0), (0, 0, s[1])))
2159 if s[0] != 1 or s[1] != 1 else None for s in scale]
2161 # Special treatment for the closed spine and cross section.
2162 # Let's save some memory by not creating identical but distinct vertices;
2163 # later we'll introduce conditional logic to link the last vertex with
2164 # the first one where necessary.
2165 cross_closed = cross[0] == cross[-1]
2166 if cross_closed:
2167 cross = cross[:-1]
2168 nc = len(cross)
2169 cross = [Vector((c[0], 0, c[1])) for c in cross]
2170 ncf = nc if cross_closed else nc - 1
2171 # Face count along the cross; for closed cross, it's the same as the
2172 # respective vertex count
2174 spine_closed = spine[0] == spine[-1]
2175 if spine_closed:
2176 spine = spine[:-1]
2177 ns = len(spine)
2178 spine = [Vector(s) for s in spine]
2179 nsf = ns if spine_closed else ns - 1
2181 # This will be used for fallback, where the current spine point joins
2182 # two collinear spine segments. No need to recheck the case of the
2183 # closed spine/last-to-first point juncture; if there's an angle there,
2184 # it would kick in on the first iteration of the main loop by spine.
2185 def findFirstAngleNormal():
2186 for i in range(1, ns - 1):
2187 spt = spine[i]
2188 z = (spine[i + 1] - spt).cross(spine[i - 1] - spt)
2189 if z.length > EPSILON:
2190 return z
2191 # All the spines are collinear. Fallback to the rotated source
2192 # XZ plane.
2193 # TODO: handle the situation where the first two spine points match
2194 v = spine[1] - spine[0]
2195 orig_y = Vector((0, 1, 0))
2196 orig_z = Vector((0, 0, 1))
2197 if v.cross(orig_y).length >= EPSILON:
2198 # Spine at angle with global y - rotate the z accordingly
2199 orig_z.rotate(orig_y.rotation_difference(v))
2200 return orig_z
2202 verts = []
2203 z = None
2204 for i, spt in enumerate(spine):
2205 if (i > 0 and i < ns - 1) or spine_closed:
2206 snext = spine[(i + 1) % ns]
2207 sprev = spine[(i - 1 + ns) % ns]
2208 y = snext - sprev
2209 vnext = snext - spt
2210 vprev = sprev - spt
2211 try_z = vnext.cross(vprev)
2212 # Might be zero, then all kinds of fallback
2213 if try_z.length > EPSILON:
2214 if z is not None and try_z.dot(z) < 0:
2215 try_z.negate()
2216 z = try_z
2217 elif not z: # No z, and no previous z.
2218 # Look ahead, see if there's at least one point where
2219 # spines are not collinear.
2220 z = findFirstAngleNormal()
2221 elif i == 0: # And non-crossed
2222 snext = spine[i + 1]
2223 y = snext - spt
2224 z = findFirstAngleNormal()
2225 else: # last point and not crossed
2226 sprev = spine[i - 1]
2227 y = spt - sprev
2228 # If there's more than one point in the spine, z is already set.
2229 # One point in the spline is an error anyway.
2231 x = y.cross(z)
2232 m = Matrix(((x.x, y.x, z.x), (x.y, y.y, z.y), (x.z, y.z, z.z)))
2233 # Columns are the unit vectors for the xz plane for the cross-section
2234 m.normalize()
2235 if orient:
2236 mrot = orient[i] if len(orient) > 1 else orient[0]
2237 if mrot:
2238 m @= mrot # Not sure about this. Counterexample???
2239 if scale:
2240 mscale = scale[i] if len(scale) > 1 else scale[0]
2241 if mscale:
2242 m @= mscale
2243 # First the cross-section 2-vector is scaled,
2244 # then applied to the xz plane unit vectors
2245 for cpt in cross:
2246 verts.append((spt + m @ cpt).to_tuple())
2247 # Could've done this with a single 4x4 matrix... Oh well
2249 # The method from_pydata() treats correctly quads with final vertex
2250 # index being zero.
2251 # So we just flip the vertices if ccw is off.
2253 faces = []
2254 if begin_cap:
2255 faces.append(flip([x for x in range(nc - 1, -1, -1)], ccw))
2257 # Order of edges in the face: forward along cross, forward along spine,
2258 # backward along cross, backward along spine, flipped if now ccw.
2259 # This order is assumed later in the texture coordinate assignment;
2260 # please don't change without syncing.
2262 faces += [flip((
2263 s * nc + c,
2264 s * nc + (c + 1) % nc,
2265 (s + 1) * nc + (c + 1) % nc,
2266 (s + 1) * nc + c), ccw) for s in range(ns - 1) for c in range(ncf)]
2268 if spine_closed:
2269 # The faces between the last and the first spine points
2270 b = (ns - 1) * nc
2271 faces += [flip((
2272 b + c,
2273 b + (c + 1) % nc,
2274 (c + 1) % nc,
2275 c), ccw) for c in range(ncf)]
2277 if end_cap:
2278 faces.append(flip([(ns - 1) * nc + x for x in range(0, nc)], ccw))
2280 bpymesh = bpy.data.meshes.new(name="Extrusion")
2281 bpymesh.from_pydata(verts, [], faces)
2283 # The way we deal with textures in triangular meshes doesn't apply.
2284 # The structure of the loop array goes: cap, side, cap
2285 if begin_cap or end_cap: # Need dimensions
2286 x_min = x_max = z_min = z_max = None
2287 for c in cross:
2288 (x, z) = (c.x, c.z)
2289 if x_min is None or x < x_min:
2290 x_min = x
2291 if x_max is None or x > x_max:
2292 x_max = x
2293 if z_min is None or z < z_min:
2294 z_min = z
2295 if z_max is None or z > z_max:
2296 z_max = z
2297 dx = x_max - x_min
2298 dz = z_max - z_min
2299 cap_scale = dz if dz > dx else dx
2301 # Takes an index in the cross array, returns scaled
2302 # texture coords for cap texturing purposes
2303 def scaledLoopVertex(i):
2304 c = cross[i]
2305 return (c.x - x_min) / cap_scale, (c.z - z_min) / cap_scale
2307 # X3DOM uses raw cap shape, not a scaled one. So we will, too.
2309 loops = []
2310 mloops = bpymesh.loops
2311 if begin_cap: # vertex indices match the indices in cross
2312 # Rely on the loops in the mesh; don't repeat the face
2313 # generation logic here
2314 loops += [co for i in range(nc)
2315 for co in scaledLoopVertex(mloops[i].vertex_index)]
2317 # Sides
2318 # Same order of vertices as in face generation
2319 # We don't rely on the loops in the mesh; instead,
2320 # we repeat the face generation logic.
2321 loops += [co for s in range(nsf)
2322 for c in range(ncf)
2323 for v in flip(((c / ncf, s / nsf),
2324 ((c + 1) / ncf, s / nsf),
2325 ((c + 1) / ncf, (s + 1) / nsf),
2326 (c / ncf, (s + 1) / nsf)), ccw) for co in v]
2328 if end_cap:
2329 # Base loop index for end cap
2330 lb = ncf * nsf * 4 + (nc if begin_cap else 0)
2331 # Rely on the loops here too.
2332 loops += [co for i in range(nc) for co
2333 in scaledLoopVertex(mloops[lb + i].vertex_index % nc)]
2334 importMesh_ApplyTextureToLoops(bpymesh, loops)
2336 bpymesh.validate()
2337 bpymesh.update()
2338 return bpymesh
2341 # -----------------------------------------------------------------------------------
2342 # Line and point sets
2345 def importMesh_LineSet(geom, ancestry):
2346 # TODO: line display properties are ignored
2347 # Per-vertex color is ignored
2348 coord = geom.getChildBySpec('Coordinate')
2349 src_points = coord.getFieldAsArray('point', 3, ancestry)
2350 # Array of 3; Blender needs arrays of 4
2351 bpycurve = bpy.data.curves.new("LineSet", 'CURVE')
2352 bpycurve.dimensions = '3D'
2353 counts = geom.getFieldAsArray('vertexCount', 0, ancestry)
2354 b = 0
2355 for n in counts:
2356 sp = bpycurve.splines.new('POLY')
2357 sp.points.add(n - 1) # points already has one element
2359 def points():
2360 for x in src_points[b:b + n]:
2361 yield x[0]
2362 yield x[1]
2363 yield x[2]
2364 yield 0
2365 sp.points.foreach_set('co', [x for x in points()])
2366 b += n
2367 return bpycurve
2370 def importMesh_IndexedLineSet(geom, ancestry):
2371 # VRML not x3d
2372 # coord = geom.getChildByName('coord') # 'Coordinate'
2373 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2374 if coord:
2375 points = coord.getFieldAsArray('point', 3, ancestry)
2376 else:
2377 points = []
2379 if not points:
2380 print('\tWarning: IndexedLineSet had no points')
2381 return None
2383 ils_lines = geom.getFieldAsArray('coordIndex', 0, ancestry)
2385 lines = []
2386 line = []
2388 for il in ils_lines:
2389 if il == -1:
2390 lines.append(line)
2391 line = []
2392 else:
2393 line.append(int(il))
2394 lines.append(line)
2396 # vcolor = geom.getChildByName('color')
2397 # blender doesn't have per vertex color
2399 bpycurve = bpy.data.curves.new('IndexedCurve', 'CURVE')
2400 bpycurve.dimensions = '3D'
2402 for line in lines:
2403 if not line:
2404 continue
2405 # co = points[line[0]] # UNUSED
2406 nu = bpycurve.splines.new('POLY')
2407 nu.points.add(len(line) - 1) # the new nu has 1 point to begin with
2408 for il, pt in zip(line, nu.points):
2409 pt.co[0:3] = points[il]
2411 return bpycurve
2414 def importMesh_PointSet(geom, ancestry):
2415 # VRML not x3d
2416 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2417 if coord:
2418 points = coord.getFieldAsArray('point', 3, ancestry)
2419 else:
2420 points = []
2422 # vcolor = geom.getChildByName('color')
2423 # blender doesn't have per vertex color
2425 bpymesh = bpy.data.meshes.new("PointSet")
2426 bpymesh.vertices.add(len(points))
2427 bpymesh.vertices.foreach_set("co", [a for v in points for a in v])
2429 # No need to validate
2430 bpymesh.update()
2431 return bpymesh
2434 # -----------------------------------------------------------------------------------
2435 # Primitives
2436 # SA: they used to use bpy.ops for primitive creation. That was
2437 # unbelievably slow on complex scenes. I rewrote to generate meshes
2438 # by hand.
2441 GLOBALS['CIRCLE_DETAIL'] = 12
2444 def importMesh_Sphere(geom, ancestry):
2445 # solid is ignored.
2446 # Extra field 'subdivision="n m"' attribute, specifying how many
2447 # rings and segments to use (X3DOM).
2448 r = geom.getFieldAsFloat('radius', 0.5, ancestry)
2449 subdiv = geom.getFieldAsArray('subdivision', 0, ancestry)
2450 if subdiv:
2451 if len(subdiv) == 1:
2452 nr = ns = subdiv[0]
2453 else:
2454 (nr, ns) = subdiv
2455 else:
2456 nr = ns = GLOBALS['CIRCLE_DETAIL']
2457 # used as both ring count and segment count
2458 lau = pi / nr # Unit angle of latitude (rings) for the given tessellation
2459 lou = 2 * pi / ns # Unit angle of longitude (segments)
2461 bpymesh = bpy.data.meshes.new(name="Sphere")
2463 bpymesh.vertices.add(ns * (nr - 1) + 2)
2464 # The non-polar vertices go from x=0, negative z plane counterclockwise -
2465 # to -x, to +z, to +x, back to -z
2466 co = [0, r, 0, 0, -r, 0] # +y and -y poles
2467 co += [r * coe for ring in range(1, nr) for seg in range(ns)
2468 for coe in (-sin(lou * seg) * sin(lau * ring),
2469 cos(lau * ring),
2470 -cos(lou * seg) * sin(lau * ring))]
2471 bpymesh.vertices.foreach_set('co', co)
2473 num_poly = ns * nr
2474 num_tri = ns * 2
2475 num_quad = num_poly - num_tri
2476 num_loop = num_quad * 4 + num_tri * 3
2477 tf = bpymesh.polygons
2478 tf.add(num_poly)
2479 bpymesh.loops.add(num_loop)
2480 bpymesh.polygons.foreach_set("loop_start",
2481 tuple(range(0, ns * 3, 3)) +
2482 tuple(range(ns * 3, num_loop - ns * 3, 4)) +
2483 tuple(range(num_loop - ns * 3, num_loop, 3)))
2484 bpymesh.polygons.foreach_set("loop_total", (3,) * ns + (4,) * num_quad + (3,) * ns)
2486 vb = 2 + (nr - 2) * ns # First vertex index for the bottom cap
2487 fb = (nr - 1) * ns # First face index for the bottom cap
2489 # Because of tricky structure, assign texture coordinates along with
2490 # face creation. Can't easily do foreach_set, 'cause caps are triangles and
2491 # sides are quads.
2493 tex = bpymesh.uv_layers.new().data
2495 # Faces go in order: top cap, sides, bottom cap.
2496 # Sides go by ring then by segment.
2498 # Caps
2499 # Top cap face vertices go in order: down right up
2500 # (starting from +y pole)
2501 # Bottom cap goes: up left down (starting from -y pole)
2502 for seg in range(ns):
2503 tf[seg].vertices = (0, seg + 2, (seg + 1) % ns + 2)
2504 tf[fb + seg].vertices = (1, vb + (seg + 1) % ns, vb + seg)
2505 for lidx, uv in zip(tf[seg].loop_indices,
2506 (((seg + 0.5) / ns, 1),
2507 (seg / ns, 1 - 1 / nr),
2508 ((seg + 1) / ns, 1 - 1 / nr))):
2509 tex[lidx].uv = uv
2510 for lidx, uv in zip(tf[fb + seg].loop_indices,
2511 (((seg + 0.5) / ns, 0),
2512 ((seg + 1) / ns, 1 / nr),
2513 (seg / ns, 1 / nr))):
2514 tex[lidx].uv = uv
2516 # Sides
2517 # Side face vertices go in order: down right up left
2518 for ring in range(nr - 2):
2519 tvb = 2 + ring * ns
2520 # First vertex index for the top edge of the ring
2521 bvb = tvb + ns
2522 # First vertex index for the bottom edge of the ring
2523 rfb = ns * (ring + 1)
2524 # First face index for the ring
2525 for seg in range(ns):
2526 nseg = (seg + 1) % ns
2527 tf[rfb + seg].vertices = (tvb + seg, bvb + seg, bvb + nseg, tvb + nseg)
2528 for lidx, uv in zip(tf[rfb + seg].loop_indices,
2529 ((seg / ns, 1 - (ring + 1) / nr),
2530 (seg / ns, 1 - (ring + 2) / nr),
2531 ((seg + 1) / ns, 1 - (ring + 2) / nr),
2532 ((seg + 1) / ns, 1 - (ring + 1) / nr))):
2533 tex[lidx].uv = uv
2535 bpymesh.validate()
2536 bpymesh.update()
2537 return bpymesh
2540 def importMesh_Cylinder(geom, ancestry):
2541 # solid is ignored
2542 # no ccw in this element
2543 # Extra parameter subdivision="n" - how many faces to use
2544 radius = geom.getFieldAsFloat('radius', 1.0, ancestry)
2545 height = geom.getFieldAsFloat('height', 2, ancestry)
2546 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2547 side = geom.getFieldAsBool('side', True, ancestry)
2548 top = geom.getFieldAsBool('top', True, ancestry)
2550 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2552 nn = n * 2
2553 yvalues = (height / 2, -height / 2)
2554 angle = 2 * pi / n
2556 # The seam is at x=0, z=-r, vertices go ccw -
2557 # to pos x, to neg z, to neg x, back to neg z
2558 verts = [(-radius * sin(angle * i), y, -radius * cos(angle * i))
2559 for i in range(n) for y in yvalues]
2560 faces = []
2561 if side:
2562 # Order of edges in side faces: up, left, down, right.
2563 # Texture coordinate logic depends on it.
2564 faces += [(i * 2 + 3, i * 2 + 2, i * 2, i * 2 + 1)
2565 for i in range(n - 1)] + [(1, 0, nn - 2, nn - 1)]
2566 if top:
2567 faces += [[x for x in range(0, nn, 2)]]
2568 if bottom:
2569 faces += [[x for x in range(nn - 1, -1, -2)]]
2571 bpymesh = bpy.data.meshes.new(name="Cylinder")
2572 bpymesh.from_pydata(verts, [], faces)
2573 # Tried constructing the mesh manually from polygons/loops/edges,
2574 # the difference in performance on Blender 2.74 (Win64) is negligible.
2576 bpymesh.validate()
2578 # The structure of the loop array goes: cap, side, cap.
2579 loops = []
2580 if side:
2581 loops += [co for i in range(n)
2582 for co in ((i + 1) / n, 0, (i + 1) / n, 1, i / n, 1, i / n, 0)]
2584 if top:
2585 loops += [0.5 + co / 2 for i in range(n)
2586 for co in (-sin(angle * i), cos(angle * i))]
2588 if bottom:
2589 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2590 for co in (sin(angle * i), cos(angle * i))]
2592 importMesh_ApplyTextureToLoops(bpymesh, loops)
2594 bpymesh.update()
2595 return bpymesh
2598 def importMesh_Cone(geom, ancestry):
2599 # Solid ignored
2600 # Extra parameter subdivision="n" - how many faces to use
2601 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2602 radius = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry)
2603 height = geom.getFieldAsFloat('height', 2, ancestry)
2604 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2605 side = geom.getFieldAsBool('side', True, ancestry)
2607 d = height / 2
2608 angle = 2 * pi / n
2610 verts = [(0, d, 0)]
2611 verts += [(-radius * sin(angle * i),
2613 -radius * cos(angle * i)) for i in range(n)]
2614 faces = []
2616 # Side face vertices go: up down right
2617 if side:
2618 faces += [(1 + (i + 1) % n, 0, 1 + i) for i in range(n)]
2619 if bottom:
2620 faces += [[i for i in range(n, 0, -1)]]
2622 bpymesh = bpy.data.meshes.new(name="Cone")
2623 bpymesh.from_pydata(verts, [], faces)
2625 bpymesh.validate()
2626 loops = []
2627 if side:
2628 loops += [co for i in range(n)
2629 for co in ((i + 1) / n, 0, (i + 0.5) / n, 1, i / n, 0)]
2630 if bottom:
2631 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2632 for co in (sin(angle * i), cos(angle * i))]
2633 importMesh_ApplyTextureToLoops(bpymesh, loops)
2635 bpymesh.update()
2636 return bpymesh
2639 def importMesh_Box(geom, ancestry):
2640 # Solid is ignored
2641 # No ccw in this element
2642 (dx, dy, dz) = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry)
2643 dx /= 2
2644 dy /= 2
2645 dz /= 2
2647 bpymesh = bpy.data.meshes.new(name="Box")
2648 bpymesh.vertices.add(8)
2650 # xz plane at +y, ccw
2651 co = (dx, dy, dz, -dx, dy, dz, -dx, dy, -dz, dx, dy, -dz,
2652 # xz plane at -y
2653 dx, -dy, dz, -dx, -dy, dz, -dx, -dy, -dz, dx, -dy, -dz)
2654 bpymesh.vertices.foreach_set('co', co)
2656 bpymesh.loops.add(6 * 4)
2657 bpymesh.polygons.add(6)
2658 bpymesh.polygons.foreach_set('loop_start', range(0, 6 * 4, 4))
2659 bpymesh.polygons.foreach_set('loop_total', (4,) * 6)
2660 bpymesh.polygons.foreach_set('vertices', (
2661 0, 1, 2, 3, # +y
2662 4, 0, 3, 7, # +x
2663 7, 3, 2, 6, # -z
2664 6, 2, 1, 5, # -x
2665 5, 1, 0, 4, # +z
2666 7, 6, 5, 4)) # -y
2668 bpymesh.validate()
2669 d = bpymesh.uv_layers.new().data
2670 d.foreach_set('uv', (
2671 1, 0, 0, 0, 0, 1, 1, 1,
2672 0, 0, 0, 1, 1, 1, 1, 0,
2673 0, 0, 0, 1, 1, 1, 1, 0,
2674 0, 0, 0, 1, 1, 1, 1, 0,
2675 0, 0, 0, 1, 1, 1, 1, 0,
2676 1, 0, 0, 0, 0, 1, 1, 1))
2678 bpymesh.update()
2679 return bpymesh
2681 # -----------------------------------------------------------------------------------
2682 # Utilities for importShape
2685 # Textures are processed elsewhere.
2686 def appearance_CreateMaterial(vrmlname, mat, ancestry, is_vcol):
2687 # Given an X3D material, creates a Blender material.
2688 # texture is applied later, in appearance_Create().
2689 # All values between 0.0 and 1.0, defaults from VRML docs.
2690 mat_name = mat.getDefName()
2691 bpymat = bpy.data.materials.new(mat_name if mat_name else vrmlname)
2692 bpymat_wrap = node_shader_utils.PrincipledBSDFWrapper(bpymat, is_readonly=False)
2694 # TODO: handle 'ambientIntensity'.
2695 #ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
2697 diff_color = mat.getFieldAsFloatTuple('diffuseColor', [0.8, 0.8, 0.8], ancestry)
2698 bpymat_wrap.base_color = diff_color
2700 emit_color = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry)
2701 bpymat_wrap.emission_color = emit_color
2703 # NOTE - 'shininess' is being handled as 1 - roughness for now.
2704 shininess = mat.getFieldAsFloat('shininess', 0.2, ancestry)
2705 bpymat_wrap.roughness = 1.0 - shininess
2707 #bpymat.specular_hardness = int(1 + (510 * shininess))
2708 # 0-1 -> 1-511
2709 # TODO: handle 'specularColor'.
2710 #specular_color = mat.getFieldAsFloatTuple('specularColor',
2711 # [0.0, 0.0, 0.0], ancestry)
2713 alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry)
2714 bpymat_wrap.alpha = alpha
2715 if alpha < 1.0:
2716 bpymat.blend_method = "BLEND"
2717 bpymat.shadow_method = "HASHED"
2719 # NOTE - leaving this disabled for now
2720 if False and is_vcol:
2721 node_vertex_color = bpymat.node_tree.nodes.new("ShaderNodeVertexColor")
2722 node_vertex_color.location = (-200, 300)
2724 bpymat.node_tree.links.new(
2725 bpymat_wrap.node_principled_bsdf.inputs["Base Color"],
2726 node_vertex_color.outputs["Color"]
2729 return bpymat_wrap
2732 def appearance_CreateDefaultMaterial():
2733 # Just applies the X3D defaults. Used for shapes
2734 # without explicit material definition
2735 # (but possibly with a texture).
2737 bpymat = bpy.data.materials.new("Material")
2738 bpymat_wrap = node_shader_utils.PrincipledBSDFWrapper(bpymat, is_readonly=False)
2740 bpymat_wrap.roughness = 0.8
2741 bpymat_wrap.base_color = (0.8, 0.8, 0.8, 1.0)
2742 #bpymat.mirror_color = (0, 0, 0)
2743 #bpymat.emit = 0
2745 # TODO: handle 'shininess' and 'specularColor'.
2746 #bpymat.specular_hardness = 103
2747 # 0-1 -> 1-511
2748 #bpymat.specular_color = (0, 0, 0)
2750 bpymat_wrap.alpha = 1.0
2751 return bpymat_wrap
2754 def appearance_LoadImageTextureFile(ima_urls, node):
2755 bpyima = None
2756 for f in ima_urls:
2757 dirname = os.path.dirname(node.getFilename())
2758 bpyima = image_utils.load_image(f, dirname,
2759 place_holder=False,
2760 recursive=False,
2761 convert_callback=imageConvertCompat)
2762 if bpyima:
2763 break
2765 return bpyima
2768 def appearance_LoadImageTexture(imageTexture, ancestry, node):
2769 # TODO: cache loaded textures...
2770 ima_urls = imageTexture.getFieldAsString('url', None, ancestry)
2772 if ima_urls is None:
2773 try:
2774 ima_urls = imageTexture.getFieldAsStringArray('url', ancestry)
2775 # in some cases we get a list of images.
2776 except:
2777 ima_urls = None
2778 else:
2779 if '" "' in ima_urls:
2780 # '"foo" "bar"' --> ['foo', 'bar']
2781 ima_urls = [w.strip('"') for w in ima_urls.split('" "')]
2782 else:
2783 ima_urls = [ima_urls]
2784 # ima_urls is a list or None
2786 if ima_urls is None:
2787 print("\twarning, image with no URL, this is odd")
2788 return None
2789 else:
2790 bpyima = appearance_LoadImageTextureFile(ima_urls, node)
2792 if not bpyima:
2793 print("ImportX3D warning: unable to load texture", ima_urls)
2794 else:
2795 # KNOWN BUG; PNGs with a transparent color are not perceived
2796 # as transparent. Need alpha channel.
2798 if bpyima.depth not in {32, 128}:
2799 bpyima.alpha_mode = 'NONE'
2800 return bpyima
2803 def appearance_LoadTexture(tex_node, ancestry, node):
2804 # Both USE-based caching and desc-based caching
2805 # Works for bother ImageTextures and PixelTextures
2807 # USE-based caching
2808 if tex_node.reference:
2809 return tex_node.getRealNode().parsed
2811 # Desc-based caching. It might misfire on multifile models, where the
2812 # same desc means different things in different files.
2813 # TODO: move caches to file level.
2814 desc = tex_node.desc()
2815 if desc and desc in texture_cache:
2816 bpyima = texture_cache[desc]
2817 if tex_node.canHaveReferences():
2818 tex_node.parsed = bpyima
2819 return bpyima
2821 # No cached texture, load it.
2822 if tex_node.getSpec() == 'ImageTexture':
2823 bpyima = appearance_LoadImageTexture(tex_node, ancestry, node)
2824 else: # PixelTexture
2825 bpyima = appearance_LoadPixelTexture(tex_node, ancestry)
2827 if bpyima: # Loading can still fail
2828 # Update the desc-based cache
2829 if desc:
2830 texture_cache[desc] = bpyima
2832 # Update the USE-based cache
2833 if tex_node.canHaveReferences():
2834 tex_node.parsed = bpyima
2836 return bpyima
2839 def appearance_ExpandCachedMaterial(bpymat):
2840 if 0 and bpymat.texture_slots[0] is not None:
2841 bpyima = bpymat.texture_slots[0].texture.image
2842 tex_has_alpha = bpyima.alpha_mode not in {'NONE', 'CHANNEL_PACKED'}
2843 return (bpymat, bpyima, tex_has_alpha)
2845 return (bpymat, None, False)
2848 def appearance_MakeDescCacheKey(material, tex_node):
2849 mat_desc = material.desc() if material else "Default"
2850 tex_desc = tex_node.desc() if tex_node else "Default"
2852 if not((tex_node and tex_desc is None) or
2853 (material and mat_desc is None)):
2854 # desc not available (in VRML)
2855 # TODO: serialize VRML nodes!!!
2856 return (mat_desc, tex_desc)
2857 elif not tex_node and not material:
2858 # Even for VRML, we cache the null material
2859 return ("Default", "Default")
2860 else:
2861 return None # Desc-based caching is off
2864 def appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol):
2865 # Creates a Blender material object from appearance
2866 bpyima = None
2867 tex_has_alpha = False
2869 if material:
2870 bpymat_wrap = appearance_CreateMaterial(vrmlname, material, ancestry, is_vcol)
2871 else:
2872 bpymat_wrap = appearance_CreateDefaultMaterial()
2874 if tex_node: # Texture caching inside there
2875 bpyima = appearance_LoadTexture(tex_node, ancestry, node)
2877 if bpyima:
2878 repeatS = tex_node.getFieldAsBool('repeatS', True, ancestry)
2879 repeatT = tex_node.getFieldAsBool('repeatT', True, ancestry)
2881 bpymat_wrap.base_color_texture.image = bpyima
2883 # NOTE - not possible to handle x and y tiling individually.
2884 extension = "REPEAT" if repeatS or repeatT else "CLIP"
2885 bpymat_wrap.base_color_texture.extension = extension
2887 tex_has_alpha = bpyima.alpha_mode not in {'NONE', 'CHANNEL_PACKED'}
2888 if tex_has_alpha:
2889 bpymat_wrap.alpha_texture.image = bpyima
2890 bpymat_wrap.alpha_texture.extension = extension
2892 return (bpymat_wrap.material, bpyima, tex_has_alpha)
2895 def importShape_LoadAppearance(vrmlname, appr, ancestry, node, is_vcol):
2897 Material creation takes nontrivial time on large models.
2898 So we cache them aggressively.
2899 However, in Blender, texture is a part of material, while in
2900 X3D it's not. Blender's notion of material corresponds to
2901 X3D's notion of appearance.
2903 TextureTransform is not a part of material (at least
2904 not in the current implementation).
2906 USE on an Appearance node and USE on a Material node
2907 call for different approaches.
2909 Tools generate repeating, identical material definitions.
2910 Can't rely on USE alone. Repeating texture definitions
2911 are entirely possible, too.
2913 Vertex coloring is not a part of appearance, but Blender
2914 has a material flag for it. However, if a mesh has no vertex
2915 color layer, setting use_vertex_color_paint to true has no
2916 effect. So it's fine to reuse the same material for meshes
2917 with vertex colors and for ones without.
2918 It's probably an abuse of Blender of some level.
2920 So here's the caching structure:
2921 For USE on appearance, we store the material object
2922 in the appearance node.
2924 For USE on texture, we store the image object in the tex node.
2926 For USE on material with no texture, we store the material object
2927 in the material node.
2929 Also, we store textures by description in texture_cache.
2931 Also, we store materials by (material desc, texture desc)
2932 in material_cache.
2934 # First, check entire-appearance cache
2935 if appr.reference and appr.getRealNode().parsed:
2936 return appearance_ExpandCachedMaterial(appr.getRealNode().parsed)
2938 tex_node = appr.getChildBySpec(('ImageTexture', 'PixelTexture'))
2939 # Other texture nodes are: MovieTexture, MultiTexture
2940 material = appr.getChildBySpec('Material')
2941 # We're ignoring FillProperties, LineProperties, and shaders
2943 # Check the USE-based material cache for textureless materials
2944 if material and material.reference and not tex_node and material.getRealNode().parsed:
2945 return appearance_ExpandCachedMaterial(material.getRealNode().parsed)
2947 # Now the description-based caching
2948 cache_key = appearance_MakeDescCacheKey(material, tex_node)
2950 if cache_key and cache_key in material_cache:
2951 bpymat = material_cache[cache_key]
2952 # Still want to make the material available for USE-based reuse
2953 if appr.canHaveReferences():
2954 appr.parsed = bpymat
2955 if material and material.canHaveReferences() and not tex_node:
2956 material.parsed = bpymat
2957 return appearance_ExpandCachedMaterial(bpymat)
2959 # Done checking full-material caches. Texture cache may still kick in.
2960 # Create the material already
2961 (bpymat, bpyima, tex_has_alpha) = appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol)
2963 # Update the caches
2964 if appr.canHaveReferences():
2965 appr.parsed = bpymat
2967 if cache_key:
2968 material_cache[cache_key] = bpymat
2970 if material and material.canHaveReferences() and not tex_node:
2971 material.parsed = bpymat
2973 return (bpymat, bpyima, tex_has_alpha)
2976 def appearance_LoadPixelTexture(pixelTexture, ancestry):
2977 image = pixelTexture.getFieldAsArray('image', 0, ancestry)
2978 (w, h, plane_count) = image[0:3]
2979 has_alpha = plane_count in {2, 4}
2980 pixels = image[3:]
2981 if len(pixels) != w * h:
2982 print("ImportX3D warning: pixel count in PixelTexture is off")
2984 bpyima = bpy.data.images.new("PixelTexture", w, h, has_alpha, True)
2985 if not has_alpha:
2986 bpyima.alpha_mode = 'NONE'
2988 # Conditional above the loop, for performance
2989 if plane_count == 3: # RGB
2990 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2991 for cco in (pixel >> 16, pixel >> 8, pixel, 255)]
2992 elif plane_count == 4: # RGBA
2993 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2994 for cco
2995 in (pixel >> 24, pixel >> 16, pixel >> 8, pixel)]
2996 elif plane_count == 1: # Intensity - does Blender even support that?
2997 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2998 for cco in (pixel, pixel, pixel, 255)]
2999 elif plane_count == 2: # Intensity/alpha
3000 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
3001 for cco
3002 in (pixel >> 8, pixel >> 8, pixel >> 8, pixel)]
3003 bpyima.update()
3004 return bpyima
3007 # Called from importShape to insert a data object (typically a mesh)
3008 # into the scene
3009 def importShape_ProcessObject(
3010 bpycollection, vrmlname, bpydata, geom, geom_spec, node,
3011 bpymat, has_alpha, texmtx, ancestry,
3012 global_matrix):
3014 vrmlname += "_" + geom_spec
3015 bpydata.name = vrmlname
3017 if type(bpydata) == bpy.types.Mesh:
3018 # solid, as understood by the spec, is always true in Blender
3019 # solid=false, we don't support it yet.
3020 creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry)
3021 if creaseAngle is not None:
3022 bpydata.auto_smooth_angle = creaseAngle
3023 bpydata.use_auto_smooth = True
3025 # Only ever 1 material per shape
3026 if bpymat:
3027 bpydata.materials.append(bpymat)
3029 if bpydata.uv_layers:
3030 if has_alpha and bpymat: # set the faces alpha flag?
3031 bpymat.blend_method = 'BLEND'
3032 bpymat.shadow_method = 'HASHED'
3034 if texmtx:
3035 # Apply texture transform?
3036 uv_copy = Vector()
3037 for l in bpydata.uv_layers.active.data:
3038 luv = l.uv
3039 uv_copy.x = luv[0]
3040 uv_copy.y = luv[1]
3041 l.uv[:] = (uv_copy @ texmtx)[0:2]
3043 # Done transforming the texture
3044 # TODO: check if per-polygon textures are supported here.
3045 elif type(bpydata) == bpy.types.TextCurve:
3046 # Text with textures??? Not sure...
3047 if bpymat:
3048 bpydata.materials.append(bpymat)
3050 # Can transform data or object, better the object so we can instance
3051 # the data
3052 # bpymesh.transform(getFinalMatrix(node))
3053 bpyob = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
3054 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3055 bpycollection.objects.link(bpyob)
3056 bpyob.select_set(True)
3058 if DEBUG:
3059 bpyob["source_line_no"] = geom.lineno
3062 def importText(geom, ancestry):
3063 fmt = geom.getChildBySpec('FontStyle')
3064 size = fmt.getFieldAsFloat("size", 1, ancestry) if fmt else 1.
3065 body = geom.getFieldAsString("string", None, ancestry)
3066 body = [w.strip('"') for w in body.split('" "')]
3068 bpytext = bpy.data.curves.new(name="Text", type='FONT')
3069 bpytext.offset_y = - size
3070 bpytext.body = "\n".join(body)
3071 bpytext.size = size
3072 return bpytext
3075 # -----------------------------------------------------------------------------------
3078 geometry_importers = {
3079 'IndexedFaceSet': importMesh_IndexedFaceSet,
3080 'IndexedTriangleSet': importMesh_IndexedTriangleSet,
3081 'IndexedTriangleStripSet': importMesh_IndexedTriangleStripSet,
3082 'IndexedTriangleFanSet': importMesh_IndexedTriangleFanSet,
3083 'IndexedLineSet': importMesh_IndexedLineSet,
3084 'TriangleSet': importMesh_TriangleSet,
3085 'TriangleStripSet': importMesh_TriangleStripSet,
3086 'TriangleFanSet': importMesh_TriangleFanSet,
3087 'LineSet': importMesh_LineSet,
3088 'ElevationGrid': importMesh_ElevationGrid,
3089 'Extrusion': importMesh_Extrusion,
3090 'PointSet': importMesh_PointSet,
3091 'Sphere': importMesh_Sphere,
3092 'Box': importMesh_Box,
3093 'Cylinder': importMesh_Cylinder,
3094 'Cone': importMesh_Cone,
3095 'Text': importText,
3099 def importShape(bpycollection, node, ancestry, global_matrix):
3100 # Under Shape, we can only have Appearance, MetadataXXX and a geometry node
3101 def isGeometry(spec):
3102 return spec != "Appearance" and not spec.startswith("Metadata")
3104 bpyob = node.getRealNode().blendObject
3106 if bpyob is not None:
3107 bpyob = node.blendData = node.blendObject = bpyob.copy()
3108 # Could transform data, but better the object so we can instance the data
3109 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3110 bpycollection.objects.link(bpyob)
3111 bpyob.select_set(True)
3112 return
3114 vrmlname = node.getDefName()
3115 if not vrmlname:
3116 vrmlname = 'Shape'
3118 appr = node.getChildBySpec('Appearance')
3119 geom = node.getChildBySpecCondition(isGeometry)
3120 if not geom:
3121 # Oh well, no geometry node in this shape
3122 return
3124 bpymat = None
3125 bpyima = None
3126 texmtx = None
3127 tex_has_alpha = False
3129 is_vcol = (geom.getChildBySpec(['Color', 'ColorRGBA']) is not None)
3131 if appr:
3132 (bpymat, bpyima,
3133 tex_has_alpha) = importShape_LoadAppearance(vrmlname, appr,
3134 ancestry, node,
3135 is_vcol)
3137 textx = appr.getChildBySpec('TextureTransform')
3138 if textx:
3139 texmtx = translateTexTransform(textx, ancestry)
3141 bpydata = None
3142 geom_spec = geom.getSpec()
3144 # ccw is handled by every geometry importer separately; some
3145 # geometries are easier to flip than others
3146 geom_fn = geometry_importers.get(geom_spec)
3147 if geom_fn is not None:
3148 bpydata = geom_fn(geom, ancestry)
3150 # There are no geometry importers that can legally return
3151 # no object. It's either a bpy object, or an exception
3152 importShape_ProcessObject(
3153 bpycollection, vrmlname, bpydata, geom, geom_spec,
3154 node, bpymat, tex_has_alpha, texmtx,
3155 ancestry, global_matrix)
3156 else:
3157 print('\tImportX3D warning: unsupported type "%s"' % geom_spec)
3160 # -----------------------------------------------------------------------------------
3161 # Lighting
3164 def importLamp_PointLight(node, ancestry):
3165 vrmlname = node.getDefName()
3166 if not vrmlname:
3167 vrmlname = 'PointLight'
3169 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3170 # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3171 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3172 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3173 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3174 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3175 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3177 bpylamp = bpy.data.lights.new(vrmlname, 'POINT')
3178 bpylamp.energy = intensity
3179 bpylamp.distance = radius
3180 bpylamp.color = color
3182 mtx = Matrix.Translation(Vector(location))
3184 return bpylamp, mtx
3187 def importLamp_DirectionalLight(node, ancestry):
3188 vrmlname = node.getDefName()
3189 if not vrmlname:
3190 vrmlname = 'DirectLight'
3192 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
3193 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3194 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3195 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3196 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3198 bpylamp = bpy.data.lights.new(vrmlname, 'SUN')
3199 bpylamp.energy = intensity
3200 bpylamp.color = color
3202 # lamps have their direction as -z, yup
3203 mtx = Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3205 return bpylamp, mtx
3207 # looks like default values for beamWidth and cutOffAngle were swapped in VRML docs.
3210 def importLamp_SpotLight(node, ancestry):
3211 vrmlname = node.getDefName()
3212 if not vrmlname:
3213 vrmlname = 'SpotLight'
3215 # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3216 # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3217 beamWidth = node.getFieldAsFloat('beamWidth', 1.570796, ancestry) # max is documented to be 1.0 but some files have higher.
3218 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3219 cutOffAngle = node.getFieldAsFloat('cutOffAngle', 0.785398, ancestry) * 2.0 # max is documented to be 1.0 but some files have higher.
3220 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3221 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3222 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3223 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3224 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3226 bpylamp = bpy.data.lights.new(vrmlname, 'SPOT')
3227 bpylamp.energy = intensity
3228 bpylamp.distance = radius
3229 bpylamp.color = color
3230 bpylamp.spot_size = cutOffAngle
3231 if beamWidth > cutOffAngle:
3232 bpylamp.spot_blend = 0.0
3233 else:
3234 if cutOffAngle == 0.0: # this should never happen!
3235 bpylamp.spot_blend = 0.5
3236 else:
3237 bpylamp.spot_blend = beamWidth / cutOffAngle
3239 # Convert
3241 # lamps have their direction as -z, y==up
3242 mtx = Matrix.Translation(location) @ Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3244 return bpylamp, mtx
3247 def importLamp(bpycollection, node, spec, ancestry, global_matrix):
3248 if spec == 'PointLight':
3249 bpylamp, mtx = importLamp_PointLight(node, ancestry)
3250 elif spec == 'DirectionalLight':
3251 bpylamp, mtx = importLamp_DirectionalLight(node, ancestry)
3252 elif spec == 'SpotLight':
3253 bpylamp, mtx = importLamp_SpotLight(node, ancestry)
3254 else:
3255 print("Error, not a lamp")
3256 raise ValueError
3258 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(bpylamp.name, bpylamp)
3259 bpycollection.objects.link(bpyob)
3260 bpyob.select_set(True)
3262 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3265 # -----------------------------------------------------------------------------------
3268 def importViewpoint(bpycollection, node, ancestry, global_matrix):
3269 name = node.getDefName()
3270 if not name:
3271 name = 'Viewpoint'
3273 fieldOfView = node.getFieldAsFloat('fieldOfView', 0.785398, ancestry) # max is documented to be 1.0 but some files have higher.
3274 # jump = node.getFieldAsBool('jump', True, ancestry)
3275 orientation = node.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry)
3276 position = node.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry)
3277 description = node.getFieldAsString('description', '', ancestry)
3279 bpycam = bpy.data.cameras.new(name)
3281 bpycam.angle = fieldOfView
3283 mtx = Matrix.Translation(Vector(position)) @ translateRotation(orientation)
3285 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, bpycam)
3286 bpycollection.objects.link(bpyob)
3287 bpyob.select_set(True)
3288 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3291 def importTransform(bpycollection, node, ancestry, global_matrix):
3292 name = node.getDefName()
3293 if not name:
3294 name = 'Transform'
3296 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, None)
3297 bpycollection.objects.link(bpyob)
3298 bpyob.select_set(True)
3300 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3302 # so they are not too annoying
3303 bpyob.empty_display_type = 'PLAIN_AXES'
3304 bpyob.empty_display_size = 0.2
3307 #def importTimeSensor(node):
3308 def action_fcurve_ensure(action, data_path, array_index):
3309 for fcu in action.fcurves:
3310 if fcu.data_path == data_path and fcu.array_index == array_index:
3311 return fcu
3313 return action.fcurves.new(data_path=data_path, index=array_index)
3316 def translatePositionInterpolator(node, action, ancestry):
3317 key = node.getFieldAsArray('key', 0, ancestry)
3318 keyValue = node.getFieldAsArray('keyValue', 3, ancestry)
3320 loc_x = action_fcurve_ensure(action, "location", 0)
3321 loc_y = action_fcurve_ensure(action, "location", 1)
3322 loc_z = action_fcurve_ensure(action, "location", 2)
3324 for i, time in enumerate(key):
3325 try:
3326 x, y, z = keyValue[i]
3327 except:
3328 continue
3330 loc_x.keyframe_points.insert(time, x)
3331 loc_y.keyframe_points.insert(time, y)
3332 loc_z.keyframe_points.insert(time, z)
3334 for fcu in (loc_x, loc_y, loc_z):
3335 for kf in fcu.keyframe_points:
3336 kf.interpolation = 'LINEAR'
3339 def translateOrientationInterpolator(node, action, ancestry):
3340 key = node.getFieldAsArray('key', 0, ancestry)
3341 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3343 rot_x = action_fcurve_ensure(action, "rotation_euler", 0)
3344 rot_y = action_fcurve_ensure(action, "rotation_euler", 1)
3345 rot_z = action_fcurve_ensure(action, "rotation_euler", 2)
3347 for i, time in enumerate(key):
3348 try:
3349 x, y, z, w = keyValue[i]
3350 except:
3351 continue
3353 mtx = translateRotation((x, y, z, w))
3354 eul = mtx.to_euler()
3355 rot_x.keyframe_points.insert(time, eul.x)
3356 rot_y.keyframe_points.insert(time, eul.y)
3357 rot_z.keyframe_points.insert(time, eul.z)
3359 for fcu in (rot_x, rot_y, rot_z):
3360 for kf in fcu.keyframe_points:
3361 kf.interpolation = 'LINEAR'
3364 # Untested!
3365 def translateScalarInterpolator(node, action, ancestry):
3366 key = node.getFieldAsArray('key', 0, ancestry)
3367 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3369 sca_x = action_fcurve_ensure(action, "scale", 0)
3370 sca_y = action_fcurve_ensure(action, "scale", 1)
3371 sca_z = action_fcurve_ensure(action, "scale", 2)
3373 for i, time in enumerate(key):
3374 try:
3375 x, y, z = keyValue[i]
3376 except:
3377 continue
3379 sca_x.keyframe_points.new(time, x)
3380 sca_y.keyframe_points.new(time, y)
3381 sca_z.keyframe_points.new(time, z)
3384 def translateTimeSensor(node, action, ancestry):
3386 Apply a time sensor to an action, VRML has many combinations of loop/start/stop/cycle times
3387 to give different results, for now just do the basics
3390 # XXX25 TODO
3391 if 1:
3392 return
3394 time_cu = action.addCurve('Time')
3395 time_cu.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
3397 cycleInterval = node.getFieldAsFloat('cycleInterval', None, ancestry)
3399 startTime = node.getFieldAsFloat('startTime', 0.0, ancestry)
3400 stopTime = node.getFieldAsFloat('stopTime', 250.0, ancestry)
3402 if cycleInterval is not None:
3403 stopTime = startTime + cycleInterval
3405 loop = node.getFieldAsBool('loop', False, ancestry)
3407 time_cu.append((1 + startTime, 0.0))
3408 time_cu.append((1 + stopTime, 1.0 / 10.0)) # annoying, the UI uses /10
3410 if loop:
3411 time_cu.extend = Blender.IpoCurve.ExtendTypes.CYCLIC # or - EXTRAP, CYCLIC_EXTRAP, CONST,
3414 def importRoute(node, ancestry):
3416 Animation route only at the moment
3419 if not hasattr(node, 'fields'):
3420 return
3422 routeIpoDict = node.getRouteIpoDict()
3424 def getIpo(act_id):
3425 try:
3426 action = routeIpoDict[act_id]
3427 except:
3428 action = routeIpoDict[act_id] = bpy.data.actions.new('web3d_ipo')
3429 return action
3431 # for getting definitions
3432 defDict = node.getDefDict()
3434 Handles routing nodes to each other
3436 ROUTE vpPI.value_changed TO champFly001.set_position
3437 ROUTE vpOI.value_changed TO champFly001.set_orientation
3438 ROUTE vpTs.fraction_changed TO vpPI.set_fraction
3439 ROUTE vpTs.fraction_changed TO vpOI.set_fraction
3440 ROUTE champFly001.bindTime TO vpTs.set_startTime
3443 #from_id, from_type = node.id[1].split('.')
3444 #to_id, to_type = node.id[3].split('.')
3446 #value_changed
3447 set_position_node = None
3448 set_orientation_node = None
3449 time_node = None
3451 for field in node.fields:
3452 if field and field[0] == 'ROUTE':
3453 try:
3454 from_id, from_type = field[1].split('.')
3455 to_id, to_type = field[3].split('.')
3456 except:
3457 print("Warning, invalid ROUTE", field)
3458 continue
3460 if from_type == 'value_changed':
3461 if to_type == 'set_position':
3462 action = getIpo(to_id)
3463 set_data_from_node = defDict[from_id]
3464 translatePositionInterpolator(set_data_from_node, action, ancestry)
3466 if to_type in {'set_orientation', 'rotation'}:
3467 action = getIpo(to_id)
3468 set_data_from_node = defDict[from_id]
3469 translateOrientationInterpolator(set_data_from_node, action, ancestry)
3471 if to_type == 'set_scale':
3472 action = getIpo(to_id)
3473 set_data_from_node = defDict[from_id]
3474 translateScalarInterpolator(set_data_from_node, action, ancestry)
3476 elif from_type == 'bindTime':
3477 action = getIpo(from_id)
3478 time_node = defDict[to_id]
3479 translateTimeSensor(time_node, action, ancestry)
3482 def load_web3d(
3483 bpycontext,
3484 filepath,
3486 PREF_FLAT=False,
3487 PREF_CIRCLE_DIV=16,
3488 global_matrix=None,
3489 HELPER_FUNC=None
3492 # Used when adding blender primitives
3493 GLOBALS['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV
3495 # NOTE - reset material cache
3496 # (otherwise we might get "StructRNA of type Material has been removed" errors)
3497 global material_cache
3498 material_cache = {}
3500 bpyscene = bpycontext.scene
3501 bpycollection = bpycontext.collection
3502 #root_node = vrml_parse('/_Cylinder.wrl')
3503 if filepath.lower().endswith('.x3d'):
3504 root_node, msg = x3d_parse(filepath)
3505 else:
3506 root_node, msg = vrml_parse(filepath)
3508 if not root_node:
3509 print(msg)
3510 return
3512 if global_matrix is None:
3513 global_matrix = Matrix()
3515 # fill with tuples - (node, [parents-parent, parent])
3516 all_nodes = root_node.getSerialized([], [])
3518 for node, ancestry in all_nodes:
3519 #if 'castle.wrl' not in node.getFilename():
3520 # continue
3522 spec = node.getSpec()
3524 prefix = node.getPrefix()
3525 if prefix=='PROTO':
3526 pass
3527 else
3529 if HELPER_FUNC and HELPER_FUNC(node, ancestry):
3530 # Note, include this function so the VRML/X3D importer can be extended
3531 # by an external script. - gets first pick
3532 pass
3533 if spec == 'Shape':
3534 importShape(bpycollection, node, ancestry, global_matrix)
3535 elif spec in {'PointLight', 'DirectionalLight', 'SpotLight'}:
3536 importLamp(bpycollection, node, spec, ancestry, global_matrix)
3537 elif spec == 'Viewpoint':
3538 importViewpoint(bpycollection, node, ancestry, global_matrix)
3539 elif spec == 'Transform':
3540 # Only use transform nodes when we are not importing a flat object hierarchy
3541 if PREF_FLAT == False:
3542 importTransform(bpycollection, node, ancestry, global_matrix)
3544 # These are delt with later within importRoute
3545 elif spec=='PositionInterpolator':
3546 action = bpy.data.ipos.new('web3d_ipo', 'Object')
3547 translatePositionInterpolator(node, action)
3550 # After we import all nodes, route events - anim paths
3551 for node, ancestry in all_nodes:
3552 importRoute(node, ancestry)
3554 for node, ancestry in all_nodes:
3555 if node.isRoot():
3556 # we know that all nodes referenced from will be in
3557 # routeIpoDict so no need to run node.getDefDict() for every node.
3558 routeIpoDict = node.getRouteIpoDict()
3559 defDict = node.getDefDict()
3561 for key, action in routeIpoDict.items():
3563 # Assign anim curves
3564 node = defDict[key]
3565 if node.blendData is None: # Add an object if we need one for animation
3566 node.blendData = node.blendObject = bpy.data.objects.new('AnimOb', None) # , name)
3567 bpycollection.objects.link(node.blendObject)
3568 bpyob.select_set(True)
3570 if node.blendData.animation_data is None:
3571 node.blendData.animation_data_create()
3573 node.blendData.animation_data.action = action
3575 # Add in hierarchy
3576 if PREF_FLAT is False:
3577 child_dict = {}
3578 for node, ancestry in all_nodes:
3579 if node.blendObject:
3580 blendObject = None
3582 # Get the last parent
3583 i = len(ancestry)
3584 while i:
3585 i -= 1
3586 blendObject = ancestry[i].blendObject
3587 if blendObject:
3588 break
3590 if blendObject:
3591 # Parent Slow, - 1 liner but works
3592 # blendObject.makeParent([node.blendObject], 0, 1)
3594 # Parent FAST
3595 try:
3596 child_dict[blendObject].append(node.blendObject)
3597 except:
3598 child_dict[blendObject] = [node.blendObject]
3600 # Parent
3601 for parent, children in child_dict.items():
3602 for c in children:
3603 c.parent = parent
3605 # update deps
3606 bpycontext.view_layer.update()
3607 del child_dict
3610 def load_with_profiler(
3611 context,
3612 filepath,
3614 global_matrix=None
3616 import cProfile
3617 import pstats
3618 pro = cProfile.Profile()
3619 pro.runctx("load_web3d(context, filepath, PREF_FLAT=True, "
3620 "PREF_CIRCLE_DIV=16, global_matrix=global_matrix)",
3621 globals(), locals())
3622 st = pstats.Stats(pro)
3623 st.sort_stats("time")
3624 st.print_stats(0.1)
3625 # st.print_callers(0.1)
3628 def load(context,
3629 filepath,
3631 global_matrix=None
3634 # loadWithProfiler(operator, context, filepath, global_matrix)
3635 load_web3d(context, filepath,
3636 PREF_FLAT=True,
3637 PREF_CIRCLE_DIV=16,
3638 global_matrix=global_matrix,
3641 return {'FINISHED'}