Import_3ds: Improved distance cue node setup
[blender-addons.git] / io_scene_x3d / import_x3d.py
blob4c215516322fbb6db93b56a997c0da23ad3f4a91
1 # SPDX-FileCopyrightText: 2011-2023 Blender Foundation
3 # SPDX-License-Identifier: GPL-2.0-or-later
5 DEBUG = False
7 # This should work without a blender at all
8 import os
9 import shlex
10 import math
11 from math import sin, cos, pi
12 from itertools import chain
14 texture_cache = {}
15 material_cache = {}
17 EPSILON = 0.0000001 # Very crude.
20 def imageConvertCompat(path):
22 if os.sep == '\\':
23 return path # assume win32 has quicktime, dont convert
25 if path.lower().endswith('.gif'):
26 path_to = path[:-3] + 'png'
28 '''
29 if exists(path_to):
30 return path_to
31 '''
32 # print('\n'+path+'\n'+path_to+'\n')
33 os.system('convert "%s" "%s"' % (path, path_to)) # for now just hope we have image magick
35 if os.path.exists(path_to):
36 return path_to
38 return path
40 # notes
41 # transform are relative
42 # order doesn't matter for loc/size/rot
43 # right handed rotation
44 # angles are in radians
45 # rotation first defines axis then amount in radians
48 # =============================== VRML Specific
50 def vrml_split_fields(value):
51 """
52 key 0.0 otherkey 1,2,3 opt1 opt1 0.0
53 -> [key 0.0], [otherkey 1,2,3], [opt1 opt1 0.0]
54 """
55 def iskey(k):
56 if k[0] != '"' and k[0].isalpha() and k.upper() not in {'TRUE', 'FALSE'}:
57 return True
58 return False
60 field_list = []
61 field_context = []
63 for v in value:
64 if iskey(v):
65 if field_context:
66 field_context_len = len(field_context)
67 if (field_context_len > 2) and (field_context[-2] in {'DEF', 'USE'}):
68 field_context.append(v)
69 elif (not iskey(field_context[-1])) or ((field_context_len == 3 and field_context[1] == 'IS')):
70 # this IS a key but the previous value was not a key, or it was a defined field.
71 field_list.append(field_context)
72 field_context = [v]
73 else:
74 # The last item was not a value, multiple keys are needed in some cases.
75 field_context.append(v)
76 else:
77 # Is empty, just add this on
78 field_context.append(v)
79 else:
80 # Add a value to the list
81 field_context.append(v)
83 if field_context:
84 field_list.append(field_context)
86 return field_list
89 def vrmlFormat(data):
90 """
91 Keep this as a valid vrml file, but format in a way we can predict.
92 """
93 # Strip all comments - # not in strings - warning multiline strings are ignored.
94 def strip_comment(l):
95 #l = ' '.join(l.split())
96 l = l.strip()
98 if l.startswith('#'):
99 return ''
101 i = l.find('#')
103 if i == -1:
104 return l
106 # Most cases accounted for! if we have a comment at the end of the line do this...
107 #j = l.find('url "')
108 j = l.find('"')
110 if j == -1: # simple no strings
111 return l[:i].strip()
113 q = False
114 for i, c in enumerate(l):
115 if c == '"':
116 q = not q # invert
118 elif c == '#':
119 if q is False:
120 return l[:i - 1]
122 return l
124 data = '\n'.join([strip_comment(l) for l in data.split('\n')]) # remove all whitespace
126 EXTRACT_STRINGS = True # only needed when strings or filename contains ,[]{} chars :/
128 if EXTRACT_STRINGS:
130 # We need this so we can detect URL's
131 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
133 string_ls = []
135 #search = 'url "'
136 search = '"'
138 ok = True
139 last_i = 0
140 while ok:
141 ok = False
142 i = data.find(search, last_i)
143 if i != -1:
145 start = i + len(search) # first char after end of search
146 end = data.find('"', start)
147 if end != -1:
148 item = data[start:end]
149 string_ls.append(item)
150 data = data[:start] + data[end:]
151 ok = True # keep looking
153 last_i = (end - len(item)) + 1
154 # print(last_i, item, '|' + data[last_i] + '|')
156 # done with messy extracting strings part
158 # Bad, dont take strings into account
160 data = data.replace('#', '\n#')
161 data = '\n'.join([ll for l in data.split('\n') for ll in (l.strip(),) if not ll.startswith('#')]) # remove all whitespace
163 data = data.replace('{', '\n{\n')
164 data = data.replace('}', '\n}\n')
165 data = data.replace('[', '\n[\n')
166 data = data.replace(']', '\n]\n')
167 data = data.replace(',', ' , ') # make sure comma's separate
169 # We need to write one property (field) per line only, otherwise we fail later to detect correctly new nodes.
170 # See T45195 for details.
171 data = '\n'.join([' '.join(value) for l in data.split('\n') for value in vrml_split_fields(l.split())])
173 if EXTRACT_STRINGS:
174 # add strings back in
176 search = '"' # fill in these empty strings
178 ok = True
179 last_i = 0
180 while ok:
181 ok = False
182 i = data.find(search + '"', last_i)
183 # print(i)
184 if i != -1:
185 start = i + len(search) # first char after end of search
186 item = string_ls.pop(0)
187 # print(item)
188 data = data[:start] + item + data[start:]
190 last_i = start + len(item) + 1
192 ok = True
194 # More annoying obscure cases where USE or DEF are placed on a newline
195 # data = data.replace('\nDEF ', ' DEF ')
196 # data = data.replace('\nUSE ', ' USE ')
198 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
200 # Better to parse the file accounting for multiline arrays
202 data = data.replace(',\n', ' , ') # remove line endings with commas
203 data = data.replace(']', '\n]\n') # very very annoying - but some comma's are at the end of the list, must run this again.
206 return [l for l in data.split('\n') if l]
208 NODE_NORMAL = 1 # {}
209 NODE_ARRAY = 2 # []
210 NODE_REFERENCE = 3 # USE foobar
211 # NODE_PROTO = 4 #
213 lines = []
216 def getNodePreText(i, words):
217 # print(lines[i])
218 use_node = False
219 while len(words) < 5:
221 if i >= len(lines):
222 break
224 elif lines[i].startswith('PROTO'):
225 return NODE_PROTO, i+1
227 elif lines[i] == '{':
228 # words.append(lines[i]) # no need
229 # print("OK")
230 return NODE_NORMAL, i + 1
231 elif lines[i].count('"') % 2 != 0: # odd number of quotes? - part of a string.
232 # print('ISSTRING')
233 break
234 else:
235 new_words = lines[i].split()
236 if 'USE' in new_words:
237 use_node = True
239 words.extend(new_words)
240 i += 1
242 # Check for USE node - no {
243 # USE #id - should always be on the same line.
244 if use_node:
245 # print('LINE', i, words[:words.index('USE')+2])
246 words[:] = words[:words.index('USE') + 2]
247 if lines[i] == '{' and lines[i + 1] == '}':
248 # USE sometimes has {} after it anyway
249 i += 2
250 return NODE_REFERENCE, i
252 # print("error value!!!", words)
253 return 0, -1
256 def is_nodeline(i, words):
258 if not lines[i][0].isalpha():
259 return 0, 0
261 #if lines[i].startswith('field'):
262 # return 0, 0
264 # Is this a prototype??
265 if lines[i].startswith('PROTO'):
266 words[:] = lines[i].split()
267 return NODE_NORMAL, i + 1 # TODO - assumes the next line is a '[\n', skip that
268 if lines[i].startswith('EXTERNPROTO'):
269 words[:] = lines[i].split()
270 return NODE_ARRAY, i + 1 # TODO - assumes the next line is a '[\n', skip that
273 proto_type, new_i = is_protoline(i, words, proto_field_defs)
274 if new_i != -1:
275 return proto_type, new_i
278 # Simple "var [" type
279 if lines[i + 1] == '[':
280 if lines[i].count('"') % 2 == 0:
281 words[:] = lines[i].split()
282 return NODE_ARRAY, i + 2
284 node_type, new_i = getNodePreText(i, words)
286 if not node_type:
287 if DEBUG:
288 print("not node_type", lines[i])
289 return 0, 0
291 # Ok, we have a { after some values
292 # Check the values are not fields
293 for i, val in enumerate(words):
294 if i != 0 and words[i - 1] in {'DEF', 'USE'}:
295 # ignore anything after DEF, it is a ID and can contain any chars.
296 pass
297 elif val[0].isalpha() and val not in {'TRUE', 'FALSE'}:
298 pass
299 else:
300 # There is a number in one of the values, therefor we are not a node.
301 return 0, 0
303 #if node_type==NODE_REFERENCE:
304 # print(words, "REF_!!!!!!!")
305 return node_type, new_i
308 def is_numline(i):
310 Does this line start with a number?
313 # Works but too slow.
315 l = lines[i]
316 for w in l.split():
317 if w==',':
318 pass
319 else:
320 try:
321 float(w)
322 return True
324 except:
325 return False
327 return False
330 l = lines[i]
332 line_start = 0
334 if l.startswith(', '):
335 line_start += 2
337 line_end = len(l) - 1
338 line_end_new = l.find(' ', line_start) # comma's always have a space before them
340 if line_end_new != -1:
341 line_end = line_end_new
343 try:
344 float(l[line_start:line_end]) # works for a float or int
345 return True
346 except:
347 return False
350 class vrmlNode(object):
351 __slots__ = ('id',
352 'fields',
353 'proto_node',
354 'proto_field_defs',
355 'proto_fields',
356 'node_type',
357 'parent',
358 'children',
359 'parent',
360 'array_data',
361 'reference',
362 'lineno',
363 'filename',
364 'blendObject',
365 'blendData',
366 'DEF_NAMESPACE',
367 'ROUTE_IPO_NAMESPACE',
368 'PROTO_NAMESPACE',
369 'x3dNode',
370 'parsed')
372 def __init__(self, parent, node_type, lineno):
373 self.id = None
374 self.node_type = node_type
375 self.parent = parent
376 self.blendObject = None
377 self.blendData = None
378 self.x3dNode = None # for x3d import only
379 self.parsed = None # We try to reuse objects in a smart way
380 if parent:
381 parent.children.append(self)
383 self.lineno = lineno
385 # This is only set from the root nodes.
386 # Having a filename also denotes a root node
387 self.filename = None
388 self.proto_node = None # proto field definition eg: "field SFColor seatColor .6 .6 .1"
390 # Store in the root node because each inline file needs its own root node and its own namespace
391 self.DEF_NAMESPACE = None
392 self.ROUTE_IPO_NAMESPACE = None
394 self.FIELD_NAMESPACE = None
397 self.PROTO_NAMESPACE = None
399 self.reference = None
401 if node_type == NODE_REFERENCE:
402 # For references, only the parent and ID are needed
403 # the reference its self is assigned on parsing
404 return
406 self.fields = [] # fields have no order, in some cases rool level values are not unique so dont use a dict
408 self.proto_field_defs = [] # proto field definition eg: "field SFColor seatColor .6 .6 .1"
409 self.proto_fields = [] # proto field usage "diffuseColor IS seatColor"
410 self.children = []
411 self.array_data = [] # use for arrays of data - should only be for NODE_ARRAY types
413 # Only available from the root node
415 def getFieldDict(self):
416 if self.FIELD_NAMESPACE is not None:
417 return self.FIELD_NAMESPACE
418 else:
419 return self.parent.getFieldDict()
421 def getProtoDict(self):
422 if self.PROTO_NAMESPACE is not None:
423 return self.PROTO_NAMESPACE
424 else:
425 return self.parent.getProtoDict()
427 def getDefDict(self):
428 if self.DEF_NAMESPACE is not None:
429 return self.DEF_NAMESPACE
430 else:
431 return self.parent.getDefDict()
433 def getRouteIpoDict(self):
434 if self.ROUTE_IPO_NAMESPACE is not None:
435 return self.ROUTE_IPO_NAMESPACE
436 else:
437 return self.parent.getRouteIpoDict()
439 def setRoot(self, filename):
440 self.filename = filename
441 # self.FIELD_NAMESPACE = {}
442 self.DEF_NAMESPACE = {}
443 self.ROUTE_IPO_NAMESPACE = {}
444 self.PROTO_NAMESPACE = {}
446 def isRoot(self):
447 if self.filename is None:
448 return False
449 else:
450 return True
452 def getFilename(self):
453 if self.filename:
454 return self.filename
455 elif self.parent:
456 return self.parent.getFilename()
457 else:
458 return None
460 def getRealNode(self):
461 if self.reference:
462 return self.reference
463 else:
464 return self
466 def getSpec(self):
467 self_real = self.getRealNode()
468 try:
469 return self_real.id[-1] # its possible this node has no spec
470 except:
471 return None
473 def findSpecRecursive(self, spec):
474 self_real = self.getRealNode()
475 if spec == self_real.getSpec():
476 return self
478 for child in self_real.children:
479 if child.findSpecRecursive(spec):
480 return child
482 return None
484 def getPrefix(self):
485 if self.id:
486 return self.id[0]
487 return None
489 def getSpecialTypeName(self, typename):
490 self_real = self.getRealNode()
491 try:
492 return self_real.id[list(self_real.id).index(typename) + 1]
493 except:
494 return None
496 def getDefName(self):
497 return self.getSpecialTypeName('DEF')
499 def getProtoName(self):
500 return self.getSpecialTypeName('PROTO')
502 def getExternprotoName(self):
503 return self.getSpecialTypeName('EXTERNPROTO')
505 def getChildrenBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
506 self_real = self.getRealNode()
507 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
508 if type(node_spec) == str:
509 return [child for child in self_real.children if child.getSpec() == node_spec]
510 else:
511 # Check inside a list of optional types
512 return [child for child in self_real.children if child.getSpec() in node_spec]
514 def getChildrenBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
515 self_real = self.getRealNode()
516 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
517 return [child for child in self_real.children if cond(child.getSpec())]
519 def getChildBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
520 # Use in cases where there is only ever 1 child of this type
521 ls = self.getChildrenBySpec(node_spec)
522 if ls:
523 return ls[0]
524 else:
525 return None
527 def getChildBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
528 # Use in cases where there is only ever 1 child of this type
529 ls = self.getChildrenBySpecCondition(cond)
530 if ls:
531 return ls[0]
532 else:
533 return None
535 def getChildrenByName(self, node_name): # type could be geometry, children, appearance
536 self_real = self.getRealNode()
537 return [child for child in self_real.children if child.id if child.id[0] == node_name]
539 def getChildByName(self, node_name):
540 self_real = self.getRealNode()
541 for child in self_real.children:
542 if child.id and child.id[0] == node_name: # and child.id[-1]==node_spec:
543 return child
545 def getSerialized(self, results, ancestry):
546 """ Return this node and all its children in a flat list """
547 ancestry = ancestry[:] # always use a copy
549 # self_real = self.getRealNode()
551 results.append((self, tuple(ancestry)))
552 ancestry.append(self)
553 for child in self.getRealNode().children:
554 if child not in ancestry:
555 # We dont want to load proto's, they are only references
556 # We could enforce this elsewhere
558 # Only add this in a very special case
559 # where the parent of this object is not the real parent
560 # - In this case we have added the proto as a child to a node instancing it.
561 # This is a bit arbitrary, but its how Proto's are done with this importer.
562 if child.getProtoName() is None and child.getExternprotoName() is None:
563 child.getSerialized(results, ancestry)
564 else:
566 if DEBUG:
567 print('getSerialized() is proto:', child.getProtoName(), child.getExternprotoName(), self.getSpec())
569 self_spec = self.getSpec()
571 if child.getProtoName() == self_spec or child.getExternprotoName() == self_spec:
572 #if DEBUG:
573 # "FoundProto!"
574 child.getSerialized(results, ancestry)
576 return results
578 def searchNodeTypeID(self, node_spec, results):
579 self_real = self.getRealNode()
580 # print(self.lineno, self.id)
581 if self_real.id and self_real.id[-1] == node_spec: # use last element, could also be only element
582 results.append(self_real)
583 for child in self_real.children:
584 child.searchNodeTypeID(node_spec, results)
585 return results
587 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
588 self_real = self.getRealNode() # in case we're an instance
590 for f in self_real.fields:
591 # print(f)
592 if f and f[0] == field:
593 # print('\tfound field', f)
595 if len(f) >= 3 and f[1] == 'IS': # eg: 'diffuseColor IS legColor'
596 field_id = f[2]
598 # print("\n\n\n\n\n\nFOND IS!!!")
599 f_proto_lookup = None
600 f_proto_child_lookup = None
601 i = len(ancestry)
602 while i:
603 i -= 1
604 node = ancestry[i]
605 node = node.getRealNode()
607 # proto settings are stored in "self.proto_node"
608 if node.proto_node:
609 # Get the default value from the proto, this can be overwritten by the proto instance
610 # 'field SFColor legColor .8 .4 .7'
611 if AS_CHILD:
612 for child in node.proto_node.children:
613 #if child.id and len(child.id) >= 3 and child.id[2]==field_id:
614 if child.id and ('point' in child.id or 'points' in child.id):
615 f_proto_child_lookup = child
617 else:
618 for f_def in node.proto_node.proto_field_defs:
619 if len(f_def) >= 4:
620 if f_def[0] == 'field' and f_def[2] == field_id:
621 f_proto_lookup = f_def[3:]
623 # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent.
624 # This is the setting as defined by the instance, including this setting is optional,
625 # and will override the default PROTO value
626 # eg: 'legColor 1 0 0'
627 if AS_CHILD:
628 for child in node.children:
629 if child.id and child.id[0] == field_id:
630 f_proto_child_lookup = child
631 else:
632 for f_def in node.fields:
633 if len(f_def) >= 2:
634 if f_def[0] == field_id:
635 if DEBUG:
636 print("getFieldName(), found proto", f_def)
637 f_proto_lookup = f_def[1:]
639 if AS_CHILD:
640 if f_proto_child_lookup:
641 if DEBUG:
642 print("getFieldName() - AS_CHILD=True, child found")
643 print(f_proto_child_lookup)
644 return f_proto_child_lookup
645 else:
646 return f_proto_lookup
647 else:
648 if AS_CHILD:
649 return None
650 else:
651 # Not using a proto
652 return f[1:]
653 # print('\tfield not found', field)
655 # See if this is a proto name
656 if AS_CHILD:
657 for child in self_real.children:
658 if child.id and len(child.id) == 1 and child.id[0] == field:
659 return child
661 return None
663 def getFieldAsInt(self, field, default, ancestry):
664 self_real = self.getRealNode() # in case we're an instance
666 f = self_real.getFieldName(field, ancestry)
667 if f is None:
668 return default
669 if ',' in f:
670 f = f[:f.index(',')] # strip after the comma
672 if len(f) != 1:
673 print('\t"%s" wrong length for int conversion for field "%s"' % (f, field))
674 return default
676 try:
677 return int(f[0])
678 except:
679 print('\tvalue "%s" could not be used as an int for field "%s"' % (f[0], field))
680 return default
682 def getFieldAsFloat(self, field, default, ancestry):
683 self_real = self.getRealNode() # in case we're an instance
685 f = self_real.getFieldName(field, ancestry)
686 if f is None:
687 return default
688 if ',' in f:
689 f = f[:f.index(',')] # strip after the comma
691 if len(f) != 1:
692 print('\t"%s" wrong length for float conversion for field "%s"' % (f, field))
693 return default
695 try:
696 return float(f[0])
697 except:
698 print('\tvalue "%s" could not be used as a float for field "%s"' % (f[0], field))
699 return default
701 def getFieldAsFloatTuple(self, field, default, ancestry):
702 self_real = self.getRealNode() # in case we're an instance
704 f = self_real.getFieldName(field, ancestry)
705 if f is None:
706 return default
707 # if ',' in f: f = f[:f.index(',')] # strip after the comma
709 if len(f) < 1:
710 print('"%s" wrong length for float tuple conversion for field "%s"' % (f, field))
711 return default
713 ret = []
714 for v in f:
715 if v != ',':
716 try:
717 ret.append(float(v))
718 except:
719 break # quit of first non float, perhaps its a new field name on the same line? - if so we are going to ignore it :/ TODO
720 # print(ret)
722 if ret:
723 return ret
724 if not ret:
725 print('\tvalue "%s" could not be used as a float tuple for field "%s"' % (f, field))
726 return default
728 def getFieldAsBool(self, field, default, ancestry):
729 self_real = self.getRealNode() # in case we're an instance
731 f = self_real.getFieldName(field, ancestry)
732 if f is None:
733 return default
734 if ',' in f:
735 f = f[:f.index(',')] # strip after the comma
737 if len(f) != 1:
738 print('\t"%s" wrong length for bool conversion for field "%s"' % (f, field))
739 return default
741 if f[0].upper() == '"TRUE"' or f[0].upper() == 'TRUE':
742 return True
743 elif f[0].upper() == '"FALSE"' or f[0].upper() == 'FALSE':
744 return False
745 else:
746 print('\t"%s" could not be used as a bool for field "%s"' % (f[1], field))
747 return default
749 def getFieldAsString(self, field, default, ancestry):
750 self_real = self.getRealNode() # in case we're an instance
752 f = self_real.getFieldName(field, ancestry)
753 if f is None:
754 return default
755 if len(f) < 1:
756 print('\t"%s" wrong length for string conversion for field "%s"' % (f, field))
757 return default
759 if len(f) > 1:
760 # String may contain spaces
761 st = ' '.join(f)
762 else:
763 st = f[0]
765 # X3D HACK
766 if self.x3dNode:
767 return st
769 if st[0] == '"' and st[-1] == '"':
770 return st[1:-1]
771 else:
772 print('\tvalue "%s" could not be used as a string for field "%s"' % (f[0], field))
773 return default
775 def getFieldAsArray(self, field, group, ancestry):
777 For this parser arrays are children
780 def array_as_number(array_string):
781 array_data = []
782 try:
783 array_data = [int(val, 0) for val in array_string]
784 except:
785 try:
786 array_data = [float(val) for val in array_string]
787 except:
788 print('\tWarning, could not parse array data from field')
790 return array_data
792 self_real = self.getRealNode() # in case we're an instance
794 child_array = self_real.getFieldName(field, ancestry, True, SPLIT_COMMAS=True)
796 #if type(child_array)==list: # happens occasionally
797 # array_data = child_array
799 if child_array is None:
800 # For x3d, should work ok with vrml too
801 # for x3d arrays are fields, vrml they are nodes, annoying but not too bad.
802 data_split = self.getFieldName(field, ancestry, SPLIT_COMMAS=True)
803 if not data_split:
804 return []
806 array_data = array_as_number(data_split)
808 elif type(child_array) == list:
809 # x3d creates these
810 array_data = array_as_number(child_array)
811 else:
812 # print(child_array)
813 # Normal vrml
814 array_data = child_array.array_data
816 # print('array_data', array_data)
817 if group == -1 or len(array_data) == 0:
818 return array_data
820 # We want a flat list
821 flat = True
822 for item in array_data:
823 if type(item) == list:
824 flat = False
825 break
827 # make a flat array
828 if flat:
829 flat_array = array_data # we are already flat.
830 else:
831 flat_array = []
833 def extend_flat(ls):
834 for item in ls:
835 if type(item) == list:
836 extend_flat(item)
837 else:
838 flat_array.append(item)
840 extend_flat(array_data)
842 # We requested a flat array
843 if group == 0:
844 return flat_array
846 new_array = []
847 sub_array = []
849 for item in flat_array:
850 sub_array.append(item)
851 if len(sub_array) == group:
852 new_array.append(sub_array)
853 sub_array = []
855 if sub_array:
856 print('\twarning, array was not aligned to requested grouping', group, 'remaining value', sub_array)
858 return new_array
860 def getFieldAsStringArray(self, field, ancestry):
862 Get a list of strings
864 self_real = self.getRealNode() # in case we're an instance
866 child_array = None
867 for child in self_real.children:
868 if child.id and len(child.id) == 1 and child.id[0] == field:
869 child_array = child
870 break
871 if not child_array:
872 return []
874 # each string gets its own list, remove ""'s
875 try:
876 new_array = [f[0][1:-1] for f in child_array.fields]
877 except:
878 print('\twarning, string array could not be made')
879 new_array = []
881 return new_array
883 def getLevel(self):
884 # Ignore self_real
885 level = 0
886 p = self.parent
887 while p:
888 level += 1
889 p = p.parent
890 if not p:
891 break
893 return level
895 def __repr__(self):
896 level = self.getLevel()
897 ind = ' ' * level
898 if self.node_type == NODE_REFERENCE:
899 brackets = ''
900 elif self.node_type == NODE_NORMAL:
901 brackets = '{}'
902 else:
903 brackets = '[]'
905 if brackets:
906 text = ind + brackets[0] + '\n'
907 else:
908 text = ''
910 text += ind + 'ID: ' + str(self.id) + ' ' + str(level) + (' lineno %d\n' % self.lineno)
912 if self.node_type == NODE_REFERENCE:
913 text += ind + "(reference node)\n"
914 return text
916 if self.proto_node:
917 text += ind + 'PROTO NODE...\n'
918 text += str(self.proto_node)
919 text += ind + 'PROTO NODE_DONE\n'
921 text += ind + 'FIELDS:' + str(len(self.fields)) + '\n'
923 for i, item in enumerate(self.fields):
924 text += ind + 'FIELD:\n'
925 text += ind + str(item) + '\n'
927 text += ind + 'PROTO_FIELD_DEFS:' + str(len(self.proto_field_defs)) + '\n'
929 for i, item in enumerate(self.proto_field_defs):
930 text += ind + 'PROTO_FIELD:\n'
931 text += ind + str(item) + '\n'
933 text += ind + 'ARRAY: ' + str(len(self.array_data)) + ' ' + str(self.array_data) + '\n'
934 #text += ind + 'ARRAY: ' + str(len(self.array_data)) + '[...] \n'
936 text += ind + 'CHILDREN: ' + str(len(self.children)) + '\n'
937 for i, child in enumerate(self.children):
938 text += ind + ('CHILD%d:\n' % i)
939 text += str(child)
941 text += '\n' + ind + brackets[1]
943 return text
945 def parse(self, i, IS_PROTO_DATA=False):
946 new_i = self.__parse(i, IS_PROTO_DATA)
948 # print(self.id, self.getFilename())
950 # Check if this node was an inline or externproto
952 url_ls = []
954 if self.node_type == NODE_NORMAL and self.getSpec() == 'Inline':
955 ancestry = [] # Warning! - PROTO's using this wont work at all.
956 url = self.getFieldAsString('url', None, ancestry)
957 if url:
958 url_ls = [(url, None)]
959 del ancestry
961 elif self.getExternprotoName():
962 # externproto
963 url_ls = []
964 for f in self.fields:
966 if type(f) == str:
967 f = [f]
969 for ff in f:
970 for f_split in ff.split('"'):
971 # print(f_split)
972 # "someextern.vrml#SomeID"
973 if '#' in f_split:
975 f_split, f_split_id = f_split.split('#') # there should only be 1 # anyway
977 url_ls.append((f_split, f_split_id))
978 else:
979 url_ls.append((f_split, None))
981 # Was either an Inline or an EXTERNPROTO
982 if url_ls:
984 # print(url_ls)
986 for url, extern_key in url_ls:
987 print(url)
988 urls = []
989 urls.append(url)
990 urls.append(bpy.path.resolve_ncase(urls[-1]))
992 urls.append(os.path.join(os.path.dirname(self.getFilename()), url))
993 urls.append(bpy.path.resolve_ncase(urls[-1]))
995 urls.append(os.path.join(os.path.dirname(self.getFilename()), os.path.basename(url)))
996 urls.append(bpy.path.resolve_ncase(urls[-1]))
998 try:
999 url = [url for url in urls if os.path.exists(url)][0]
1000 url_found = True
1001 except:
1002 url_found = False
1004 if not url_found:
1005 print('\tWarning: Inline URL could not be found:', url)
1006 else:
1007 if url == self.getFilename():
1008 print('\tWarning: can\'t Inline yourself recursively:', url)
1009 else:
1011 try:
1012 data = gzipOpen(url)
1013 except:
1014 print('\tWarning: can\'t open the file:', url)
1015 data = None
1017 if data:
1018 # Tricky - inline another VRML
1019 print('\tLoading Inline:"%s"...' % url)
1021 # Watch it! - backup lines
1022 lines_old = lines[:]
1024 lines[:] = vrmlFormat(data)
1026 lines.insert(0, '{')
1027 lines.insert(0, 'root_node____')
1028 lines.append('}')
1030 ff = open('/tmp/test.txt', 'w')
1031 ff.writelines([l+'\n' for l in lines])
1034 child = vrmlNode(self, NODE_NORMAL, -1)
1035 child.setRoot(url) # initialized dicts
1036 child.parse(0)
1038 # if self.getExternprotoName():
1039 if self.getExternprotoName():
1040 if not extern_key: # if none is specified - use the name
1041 extern_key = self.getSpec()
1043 if extern_key:
1045 self.children.remove(child)
1046 child.parent = None
1048 extern_child = child.findSpecRecursive(extern_key)
1050 if extern_child:
1051 self.children.append(extern_child)
1052 extern_child.parent = self
1054 if DEBUG:
1055 print("\tEXTERNPROTO ID found!:", extern_key)
1056 else:
1057 print("\tEXTERNPROTO ID not found!:", extern_key)
1059 # Watch it! - restore lines
1060 lines[:] = lines_old
1062 return new_i
1064 def __parse(self, i, IS_PROTO_DATA=False):
1066 print('parsing at', i, end="")
1067 print(i, self.id, self.lineno)
1069 l = lines[i]
1071 if l == '[':
1072 # An anonymous list
1073 self.id = None
1074 i += 1
1075 else:
1076 words = []
1078 node_type, new_i = is_nodeline(i, words)
1079 if not node_type: # fail for parsing new node.
1080 print("Failed to parse new node")
1081 raise ValueError
1083 if self.node_type == NODE_REFERENCE:
1084 # Only assign the reference and quit
1085 key = words[words.index('USE') + 1]
1086 self.id = (words[0],)
1088 self.reference = self.getDefDict()[key]
1089 return new_i
1091 self.id = tuple(words)
1093 # fill in DEF/USE
1094 key = self.getDefName()
1095 if key is not None:
1096 self.getDefDict()[key] = self
1098 key = self.getProtoName()
1099 if not key:
1100 key = self.getExternprotoName()
1102 proto_dict = self.getProtoDict()
1103 if key is not None:
1104 proto_dict[key] = self
1106 # Parse the proto nodes fields
1107 self.proto_node = vrmlNode(self, NODE_ARRAY, new_i)
1108 new_i = self.proto_node.parse(new_i)
1110 self.children.remove(self.proto_node)
1112 # print(self.proto_node)
1114 new_i += 1 # skip past the {
1116 else: # If we're a proto instance, add the proto node as our child.
1117 spec = self.getSpec()
1118 try:
1119 self.children.append(proto_dict[spec])
1120 #pass
1121 except:
1122 pass
1124 del spec
1126 del proto_dict, key
1128 i = new_i
1130 # print(self.id)
1131 ok = True
1132 while ok:
1133 if i >= len(lines):
1134 return len(lines) - 1
1136 l = lines[i]
1137 # print('\tDEBUG:', i, self.node_type, l)
1138 if l == '':
1139 i += 1
1140 continue
1142 if l == '}':
1143 if self.node_type != NODE_NORMAL: # also ends proto nodes, we may want a type for these too.
1144 print('wrong node ending, expected an } ' + str(i) + ' ' + str(self.node_type))
1145 if DEBUG:
1146 raise ValueError
1147 ### print("returning", i)
1148 return i + 1
1149 if l == ']':
1150 if self.node_type != NODE_ARRAY:
1151 print('wrong node ending, expected a ] ' + str(i) + ' ' + str(self.node_type))
1152 if DEBUG:
1153 raise ValueError
1154 ### print("returning", i)
1155 return i + 1
1157 node_type, new_i = is_nodeline(i, [])
1158 if node_type: # check text\n{
1159 child = vrmlNode(self, node_type, i)
1160 i = child.parse(i)
1162 elif l == '[': # some files have these anonymous lists
1163 child = vrmlNode(self, NODE_ARRAY, i)
1164 i = child.parse(i)
1166 elif is_numline(i):
1167 l_split = l.split(',')
1169 values = None
1170 # See if each item is a float?
1172 for num_type in (int, float):
1173 try:
1174 values = [num_type(v) for v in l_split]
1175 break
1176 except:
1177 pass
1179 try:
1180 values = [[num_type(v) for v in segment.split()] for segment in l_split]
1181 break
1182 except:
1183 pass
1185 if values is None: # dont parse
1186 values = l_split
1188 # This should not extend over multiple lines however it is possible
1189 # print(self.array_data)
1190 if values:
1191 self.array_data.extend(values)
1192 i += 1
1193 else:
1194 words = l.split()
1195 if len(words) > 2 and words[1] == 'USE':
1196 vrmlNode(self, NODE_REFERENCE, i)
1197 else:
1199 # print("FIELD", i, l)
1201 #words = l.split()
1202 ### print('\t\ttag', i)
1203 # this is a tag/
1204 # print(words, i, l)
1205 value = l
1206 # print(i)
1207 # javastrips can exist as values.
1208 quote_count = l.count('"')
1209 if quote_count % 2: # odd number?
1210 # print('MULTILINE')
1211 while 1:
1212 i += 1
1213 l = lines[i]
1214 quote_count = l.count('"')
1215 if quote_count % 2: # odd number?
1216 value += '\n' + l[:l.rfind('"')]
1217 break # assume
1218 else:
1219 value += '\n' + l
1221 # use shlex so we get '"a b" "b v"' --> '"a b"', '"b v"'
1222 value_all = shlex.split(value, posix=False)
1224 for value in vrml_split_fields(value_all):
1225 # Split
1227 if value[0] == 'field':
1228 # field SFFloat creaseAngle 4
1229 self.proto_field_defs.append(value)
1230 else:
1231 self.fields.append(value)
1232 i += 1
1234 # This is a prerequisite for DEF/USE-based material caching
1235 def canHaveReferences(self):
1236 return self.node_type == NODE_NORMAL and self.getDefName()
1238 # This is a prerequisite for raw XML-based material caching.
1239 # NOTE - crude, but working implementation for
1240 # material and texture caching, based on __repr__.
1241 # Doesn't do any XML, but is better than nothing.
1242 def desc(self):
1243 if "material" in self.id or "texture" in self.id:
1244 node = self.reference if self.node_type == NODE_REFERENCE else self
1245 return frozenset(line.strip() for line in repr(node).strip().split("\n"))
1246 else:
1247 return None
1250 def gzipOpen(path):
1251 import gzip
1253 data = None
1254 try:
1255 data = gzip.open(path, 'r').read()
1256 except:
1257 pass
1259 if data is None:
1260 try:
1261 filehandle = open(path, 'r', encoding='utf-8', errors='surrogateescape')
1262 data = filehandle.read()
1263 filehandle.close()
1264 except:
1265 import traceback
1266 traceback.print_exc()
1267 else:
1268 data = data.decode(encoding='utf-8', errors='surrogateescape')
1270 return data
1273 def vrml_parse(path):
1275 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1276 Return root (vrmlNode, '') or (None, 'Error String')
1278 data = gzipOpen(path)
1280 if data is None:
1281 return None, 'Failed to open file: ' + path
1283 # Stripped above
1284 lines[:] = vrmlFormat(data)
1286 lines.insert(0, '{')
1287 lines.insert(0, 'dymmy_node')
1288 lines.append('}')
1289 # Use for testing our parsed output, so we can check on line numbers.
1292 ff = open('/tmp/test.txt', 'w')
1293 ff.writelines([l+'\n' for l in lines])
1294 ff.close()
1297 # Now evaluate it
1298 node_type, new_i = is_nodeline(0, [])
1299 if not node_type:
1300 return None, 'Error: VRML file has no starting Node'
1302 # Trick to make sure we get all root nodes.
1303 lines.insert(0, '{')
1304 lines.insert(0, 'root_node____') # important the name starts with an ascii char
1305 lines.append('}')
1307 root = vrmlNode(None, NODE_NORMAL, -1)
1308 root.setRoot(path) # we need to set the root so we have a namespace and know the path in case of inlineing
1310 # Parse recursively
1311 root.parse(0)
1313 # This prints a load of text
1314 if DEBUG:
1315 print(root)
1317 return root, ''
1320 # ====================== END VRML
1322 # ====================== X3d Support
1324 # Sane as vrml but replace the parser
1325 class x3dNode(vrmlNode):
1326 def __init__(self, parent, node_type, x3dNode):
1327 vrmlNode.__init__(self, parent, node_type, -1)
1328 self.x3dNode = x3dNode
1330 def parse(self, IS_PROTO_DATA=False):
1331 # print(self.x3dNode.tagName)
1332 self.lineno = self.x3dNode.parse_position[0]
1334 define = self.x3dNode.getAttributeNode('DEF')
1335 if define:
1336 self.getDefDict()[define.value] = self
1337 else:
1338 use = self.x3dNode.getAttributeNode('USE')
1339 if use:
1340 try:
1341 self.reference = self.getDefDict()[use.value]
1342 self.node_type = NODE_REFERENCE
1343 except:
1344 print('\tWarning: reference', use.value, 'not found')
1345 self.parent.children.remove(self)
1347 return
1349 for x3dChildNode in self.x3dNode.childNodes:
1350 if x3dChildNode.nodeType in {x3dChildNode.TEXT_NODE, x3dChildNode.COMMENT_NODE, x3dChildNode.CDATA_SECTION_NODE}:
1351 continue
1353 node_type = NODE_NORMAL
1354 # print(x3dChildNode, dir(x3dChildNode))
1355 if x3dChildNode.getAttributeNode('USE'):
1356 node_type = NODE_REFERENCE
1358 child = x3dNode(self, node_type, x3dChildNode)
1359 child.parse()
1361 # TODO - x3d Inline
1363 def getSpec(self):
1364 return self.x3dNode.tagName # should match vrml spec
1366 # Used to retain object identifiers from X3D to Blender
1367 def getDefName(self):
1368 node_id = self.x3dNode.getAttributeNode('DEF')
1369 if node_id:
1370 return node_id.value
1371 node_id = self.x3dNode.getAttributeNode('USE')
1372 if node_id:
1373 return "USE_" + node_id.value
1374 return None
1376 # Other funcs operate from vrml, but this means we can wrap XML fields, still use nice utility funcs
1377 # getFieldAsArray getFieldAsBool etc
1378 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
1379 # ancestry and AS_CHILD are ignored, only used for VRML now
1381 self_real = self.getRealNode() # in case we're an instance
1382 field_xml = self.x3dNode.getAttributeNode(field)
1383 if field_xml:
1384 value = field_xml.value
1386 # We may want to edit. for x3d specific stuff
1387 # Sucks a bit to return the field name in the list but vrml excepts this :/
1388 if SPLIT_COMMAS:
1389 value = value.replace(",", " ")
1390 return value.split()
1391 else:
1392 return None
1394 def canHaveReferences(self):
1395 return self.x3dNode.getAttributeNode('DEF')
1397 def desc(self):
1398 return self.getRealNode().x3dNode.toxml()
1401 def x3d_parse(path):
1403 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1404 Return root (x3dNode, '') or (None, 'Error String')
1406 import xml.dom.minidom
1407 import xml.sax
1408 from xml.sax import handler
1411 try: doc = xml.dom.minidom.parse(path)
1412 except: return None, 'Could not parse this X3D file, XML error'
1415 # Could add a try/except here, but a console error is more useful.
1416 data = gzipOpen(path)
1418 if data is None:
1419 return None, 'Failed to open file: ' + path
1421 # Enable line number reporting in the parser - kinda brittle
1422 def set_content_handler(dom_handler):
1423 def startElementNS(name, tagName, attrs):
1424 orig_start_cb(name, tagName, attrs)
1425 cur_elem = dom_handler.elementStack[-1]
1426 cur_elem.parse_position = (parser._parser.CurrentLineNumber, parser._parser.CurrentColumnNumber)
1428 orig_start_cb = dom_handler.startElementNS
1429 dom_handler.startElementNS = startElementNS
1430 orig_set_content_handler(dom_handler)
1432 parser = xml.sax.make_parser()
1433 orig_set_content_handler = parser.setContentHandler
1434 parser.setFeature(handler.feature_external_ges, False)
1435 parser.setFeature(handler.feature_external_pes, False)
1436 parser.setContentHandler = set_content_handler
1438 doc = xml.dom.minidom.parseString(data, parser)
1440 try:
1441 x3dnode = doc.getElementsByTagName('X3D')[0]
1442 except:
1443 return None, 'Not a valid x3d document, cannot import'
1445 bpy.ops.object.select_all(action='DESELECT')
1447 root = x3dNode(None, NODE_NORMAL, x3dnode)
1448 root.setRoot(path) # so images and Inline's we load have a relative path
1449 root.parse()
1451 return root, ''
1453 ## f = open('/_Cylinder.wrl', 'r')
1454 # f = open('/fe/wrl/Vrml/EGS/TOUCHSN.WRL', 'r')
1455 # vrml_parse('/fe/wrl/Vrml/EGS/TOUCHSN.WRL')
1456 #vrml_parse('/fe/wrl/Vrml/EGS/SCRIPT.WRL')
1458 import os
1459 files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
1460 files.sort()
1461 tot = len(files)
1462 for i, f in enumerate(files):
1463 #if i < 801:
1464 # continue
1466 f = f.strip()
1467 print(f, i, tot)
1468 vrml_parse(f)
1471 # NO BLENDER CODE ABOVE THIS LINE.
1472 # -----------------------------------------------------------------------------------
1473 import bpy
1474 from bpy_extras import image_utils, node_shader_utils
1475 from mathutils import Vector, Matrix, Quaternion
1477 GLOBALS = {'CIRCLE_DETAIL': 16}
1480 def translateRotation(rot):
1481 """ axis, angle """
1482 return Matrix.Rotation(rot[3], 4, Vector(rot[:3]))
1485 def translateScale(sca):
1486 mat = Matrix() # 4x4 default
1487 mat[0][0] = sca[0]
1488 mat[1][1] = sca[1]
1489 mat[2][2] = sca[2]
1490 return mat
1493 def translateTransform(node, ancestry):
1494 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0, 0.0)
1495 rot = node.getFieldAsFloatTuple('rotation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1496 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0, 1.0)
1497 scaori = node.getFieldAsFloatTuple('scaleOrientation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1498 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0, 0.0)
1500 if cent:
1501 cent_mat = Matrix.Translation(cent)
1502 cent_imat = cent_mat.inverted()
1503 else:
1504 cent_mat = cent_imat = None
1506 if rot:
1507 rot_mat = translateRotation(rot)
1508 else:
1509 rot_mat = None
1511 if sca:
1512 sca_mat = translateScale(sca)
1513 else:
1514 sca_mat = None
1516 if scaori:
1517 scaori_mat = translateRotation(scaori)
1518 scaori_imat = scaori_mat.inverted()
1519 else:
1520 scaori_mat = scaori_imat = None
1522 if tx:
1523 tx_mat = Matrix.Translation(tx)
1524 else:
1525 tx_mat = None
1527 new_mat = Matrix()
1529 mats = [tx_mat, cent_mat, rot_mat, scaori_mat, sca_mat, scaori_imat, cent_imat]
1530 for mtx in mats:
1531 if mtx:
1532 new_mat = new_mat @ mtx
1534 return new_mat
1537 def translateTexTransform(node, ancestry):
1538 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0)
1539 rot = node.getFieldAsFloat('rotation', None, ancestry) # 0.0
1540 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0)
1541 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0)
1543 if cent:
1544 # cent is at a corner by default
1545 cent_mat = Matrix.Translation(Vector(cent).to_3d())
1546 cent_imat = cent_mat.inverted()
1547 else:
1548 cent_mat = cent_imat = None
1550 if rot:
1551 rot_mat = Matrix.Rotation(rot, 4, 'Z') # translateRotation(rot)
1552 else:
1553 rot_mat = None
1555 if sca:
1556 sca_mat = translateScale((sca[0], sca[1], 0.0))
1557 else:
1558 sca_mat = None
1560 if tx:
1561 tx_mat = Matrix.Translation(Vector(tx).to_3d())
1562 else:
1563 tx_mat = None
1565 new_mat = Matrix()
1567 # as specified in VRML97 docs
1568 mats = [cent_imat, sca_mat, rot_mat, cent_mat, tx_mat]
1570 for mtx in mats:
1571 if mtx:
1572 new_mat = new_mat @ mtx
1574 return new_mat
1576 def getFinalMatrix(node, mtx, ancestry, global_matrix):
1578 transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform']
1579 if node.getSpec() == 'Transform':
1580 transform_nodes.append(node)
1581 transform_nodes.reverse()
1583 if mtx is None:
1584 mtx = Matrix()
1586 for node_tx in transform_nodes:
1587 mat = translateTransform(node_tx, ancestry)
1588 mtx = mat @ mtx
1590 # worldspace matrix
1591 mtx = global_matrix @ mtx
1593 return mtx
1596 # -----------------------------------------------------------------------------------
1597 # Mesh import utilities
1599 # Assumes that the mesh has polygons.
1600 def importMesh_ApplyColors(bpymesh, geom, ancestry):
1601 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1602 if colors:
1603 if colors.getSpec() == 'ColorRGBA':
1604 rgb = colors.getFieldAsArray('color', 4, ancestry)
1605 else:
1606 # Array of arrays; no need to flatten
1607 rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
1608 lcol_layer = bpymesh.vertex_colors.new()
1610 if len(rgb) == len(bpymesh.vertices):
1611 rgb = [rgb[l.vertex_index] for l in bpymesh.loops]
1612 rgb = tuple(chain(*rgb))
1613 elif len(rgb) == len(bpymesh.loops):
1614 rgb = tuple(chain(*rgb))
1615 else:
1616 print(
1617 "WARNING not applying vertex colors, non matching numbers of vertices or loops (%d vs %d/%d)" %
1618 (len(rgb), len(bpymesh.vertices), len(bpymesh.loops))
1620 return
1622 lcol_layer.data.foreach_set("color", rgb)
1625 # Assumes that the vertices have not been rearranged compared to the
1626 # source file order # or in the order assumed by the spec (e. g. in
1627 # Elevation, in rows by x).
1628 # Assumes polygons have been set.
1629 def importMesh_ApplyNormals(bpymesh, geom, ancestry):
1630 normals = geom.getChildBySpec('Normal')
1631 if not normals:
1632 return
1634 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1635 vectors = normals.getFieldAsArray('vector', 0, ancestry)
1636 if per_vertex:
1637 bpymesh.vertices.foreach_set("normal", vectors)
1638 else:
1639 bpymesh.polygons.foreach_set("normal", vectors)
1642 # Reads the standard Coordinate object - common for all mesh elements
1643 # Feeds the vertices in the mesh.
1644 # Rearranging the vertex order is a bad idea - other elements
1645 # in X3D might rely on it, if you need to rearrange, please play with
1646 # vertex indices in the polygons instead.
1648 # Vertex culling that we have in IndexedFaceSet is an unfortunate exception,
1649 # brought forth by a very specific issue.
1650 def importMesh_ReadVertices(bpymesh, geom, ancestry):
1651 # We want points here as a flat array, but the caching logic in
1652 # IndexedFaceSet presumes a 2D one.
1653 # The case for caching is stronger over there.
1654 coord = geom.getChildBySpec('Coordinate')
1655 points = coord.getFieldAsArray('point', 0, ancestry)
1656 bpymesh.vertices.add(len(points) // 3)
1657 bpymesh.vertices.foreach_set("co", points)
1660 # Assumes that the order of vertices matches the source file.
1661 # Relies upon texture coordinates in the X3D node; if a coordinate generation
1662 # algorithm for a geometry is in the spec (e. g. for ElevationGrid), it needs
1663 # to be implemented by the geometry handler.
1665 # Texture transform is applied in ProcessObject.
1666 def importMesh_ApplyUVs(bpymesh, geom, ancestry):
1667 tex_coord = geom.getChildBySpec('TextureCoordinate')
1668 if not tex_coord:
1669 return
1671 uvs = tex_coord.getFieldAsArray('point', 2, ancestry)
1672 if not uvs:
1673 return
1675 d = bpymesh.uv_layers.new().data
1676 uvs = [i for poly in bpymesh.polygons
1677 for vidx in poly.vertices
1678 for i in uvs[vidx]]
1679 d.foreach_set('uv', uvs)
1682 # Common steps for all triangle meshes once the geometry has been set:
1683 # normals, vertex colors, and UVs.
1684 def importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry):
1685 importMesh_ApplyNormals(bpymesh, geom, ancestry)
1686 importMesh_ApplyColors(bpymesh, geom, ancestry)
1687 importMesh_ApplyUVs(bpymesh, geom, ancestry)
1688 bpymesh.validate()
1689 bpymesh.update()
1690 return bpymesh
1693 # Assumes that the mesh is stored as polygons and loops, and the premade array
1694 # of texture coordinates follows the loop array.
1695 # The loops array must be flat.
1696 def importMesh_ApplyTextureToLoops(bpymesh, loops):
1697 d = bpymesh.uv_layers.new().data
1698 d.foreach_set('uv', loops)
1701 def flip(r, ccw):
1702 return r if ccw else r[::-1]
1704 # -----------------------------------------------------------------------------------
1705 # Now specific geometry importers
1708 def importMesh_IndexedTriangleSet(geom, ancestry):
1709 # Ignoring solid
1710 # colorPerVertex is always true
1711 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1713 bpymesh = bpy.data.meshes.new(name="XXX")
1714 importMesh_ReadVertices(bpymesh, geom, ancestry)
1716 # Read the faces
1717 index = geom.getFieldAsArray('index', 0, ancestry)
1718 num_polys = len(index) // 3
1719 if not ccw:
1720 index = [index[3 * i + j] for i in range(num_polys) for j in (1, 0, 2)]
1722 bpymesh.loops.add(num_polys * 3)
1723 bpymesh.polygons.add(num_polys)
1724 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1725 bpymesh.polygons.foreach_set("vertices", index)
1727 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1730 def importMesh_IndexedTriangleStripSet(geom, ancestry):
1731 # Ignoring solid
1732 # colorPerVertex is always true
1733 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1734 bpymesh = bpy.data.meshes.new(name="IndexedTriangleStripSet")
1735 importMesh_ReadVertices(bpymesh, geom, ancestry)
1737 # Read the faces
1738 index = geom.getFieldAsArray('index', 0, ancestry)
1739 while index[-1] == -1:
1740 del index[-1]
1741 ngaps = sum(1 for i in index if i == -1)
1742 num_polys = len(index) - 2 - 3 * ngaps
1743 bpymesh.loops.add(num_polys * 3)
1744 bpymesh.polygons.add(num_polys)
1745 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1747 def triangles():
1748 i = 0
1749 odd = cw
1750 while True:
1751 yield index[i + odd]
1752 yield index[i + 1 - odd]
1753 yield index[i + 2]
1754 odd = 1 - odd
1755 i += 1
1756 if i + 2 >= len(index):
1757 return
1758 if index[i + 2] == -1:
1759 i += 3
1760 odd = cw
1761 bpymesh.polygons.foreach_set("vertices", [f for f in triangles()])
1762 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1765 def importMesh_IndexedTriangleFanSet(geom, ancestry):
1766 # Ignoring solid
1767 # colorPerVertex is always true
1768 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1769 bpymesh = bpy.data.meshes.new(name="IndexedTriangleFanSet")
1770 importMesh_ReadVertices(bpymesh, geom, ancestry)
1772 # Read the faces
1773 index = geom.getFieldAsArray('index', 0, ancestry)
1774 while index[-1] == -1:
1775 del index[-1]
1776 ngaps = sum(1 for i in index if i == -1)
1777 num_polys = len(index) - 2 - 3 * ngaps
1778 bpymesh.loops.add(num_polys * 3)
1779 bpymesh.polygons.add(num_polys)
1780 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1782 def triangles():
1783 i = 0
1784 j = 1
1785 while True:
1786 yield index[i]
1787 yield index[i + j + cw]
1788 yield index[i + j + 1 - cw]
1789 j += 1
1790 if i + j + 1 >= len(index):
1791 return
1792 if index[i + j + 1] == -1:
1793 i = j + 2
1794 j = 1
1795 bpymesh.polygons.foreach_set("vertices", [f for f in triangles()])
1796 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1799 def importMesh_TriangleSet(geom, ancestry):
1800 # Ignoring solid
1801 # colorPerVertex is always true
1802 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1803 bpymesh = bpy.data.meshes.new(name="TriangleSet")
1804 importMesh_ReadVertices(bpymesh, geom, ancestry)
1805 n = len(bpymesh.vertices)
1806 num_polys = n // 3
1807 bpymesh.loops.add(num_polys * 3)
1808 bpymesh.polygons.add(num_polys)
1809 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1811 if ccw:
1812 fv = [i for i in range(n)]
1813 else:
1814 fv = [3 * i + j for i in range(n // 3) for j in (1, 0, 2)]
1815 bpymesh.polygons.foreach_set("vertices", fv)
1817 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1820 def importMesh_TriangleStripSet(geom, ancestry):
1821 # Ignoring solid
1822 # colorPerVertex is always true
1823 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1824 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1825 importMesh_ReadVertices(bpymesh, geom, ancestry)
1826 counts = geom.getFieldAsArray('stripCount', 0, ancestry)
1827 num_polys = sum([n - 2 for n in counts])
1828 bpymesh.loops.add(num_polys * 3)
1829 bpymesh.polygons.add(num_polys)
1830 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1832 def triangles():
1833 b = 0
1834 for i in range(0, len(counts)):
1835 for j in range(0, counts[i] - 2):
1836 yield b + j + (j + cw) % 2
1837 yield b + j + 1 - (j + cw) % 2
1838 yield b + j + 2
1839 b += counts[i]
1840 bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
1842 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1845 def importMesh_TriangleFanSet(geom, ancestry):
1846 # Ignoring solid
1847 # colorPerVertex is always true
1848 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1849 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1850 importMesh_ReadVertices(bpymesh, geom, ancestry)
1851 counts = geom.getFieldAsArray('fanCount', 0, ancestry)
1852 num_polys = sum([n - 2 for n in counts])
1853 bpymesh.loops.add(num_polys * 3)
1854 bpymesh.polygons.add(num_polys)
1855 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 3, 3))
1857 def triangles():
1858 b = 0
1859 for i in range(0, len(counts)):
1860 for j in range(1, counts[i] - 1):
1861 yield b
1862 yield b + j + cw
1863 yield b + j + 1 - cw
1864 b += counts[i]
1865 bpymesh.polygons.foreach_set("vertices", [x for x in triangles()])
1866 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry)
1869 def importMesh_IndexedFaceSet(geom, ancestry):
1870 # Saw the following structure in X3Ds: the first mesh has a huge set
1871 # of vertices and a reasonably sized index. The rest of the meshes
1872 # reference the Coordinate node from the first one, and have their
1873 # own reasonably sized indices.
1875 # In Blender, to the best of my knowledge, there's no way to reuse
1876 # the vertex set between meshes. So we have culling logic instead -
1877 # for each mesh, only leave vertices that are used for faces.
1879 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1880 coord = geom.getChildBySpec('Coordinate')
1881 if coord.reference:
1882 points = coord.getRealNode().parsed
1883 # We need unflattened coord array here, while
1884 # importMesh_ReadVertices uses flattened. Can't cache both :(
1885 # TODO: resolve that somehow, so that vertex set can be effectively
1886 # reused between different mesh types?
1887 else:
1888 points = coord.getFieldAsArray('point', 3, ancestry)
1889 if coord.canHaveReferences():
1890 coord.parsed = points
1891 index = geom.getFieldAsArray('coordIndex', 0, ancestry)
1893 while index and index[-1] == -1:
1894 del index[-1]
1896 if len(points) >= 2 * len(index): # Need to cull
1897 culled_points = []
1898 cull = {} # Maps old vertex indices to new ones
1899 uncull = [] # Maps new indices to the old ones
1900 new_index = 0
1901 else:
1902 uncull = cull = None
1904 faces = []
1905 face = []
1906 # Generate faces. Cull the vertices if necessary,
1907 for i in index:
1908 if i == -1:
1909 if face:
1910 faces.append(flip(face, ccw))
1911 face = []
1912 else:
1913 if cull is not None:
1914 if not(i in cull):
1915 culled_points.append(points[i])
1916 cull[i] = new_index
1917 uncull.append(i)
1918 i = new_index
1919 new_index += 1
1920 else:
1921 i = cull[i]
1922 face.append(i)
1923 if face:
1924 faces.append(flip(face, ccw)) # The last face
1926 if cull:
1927 points = culled_points
1929 bpymesh = bpy.data.meshes.new(name="IndexedFaceSet")
1930 bpymesh.from_pydata(points, [], faces)
1931 # No validation here. It throws off the per-face stuff.
1933 # Similar treatment for normal and color indices
1935 def processPerVertexIndex(ind):
1936 if ind:
1937 # Deflatten into an array of arrays by face; the latter might
1938 # need to be flipped
1939 i = 0
1940 verts_by_face = []
1941 for f in faces:
1942 verts_by_face.append(flip(ind[i:i + len(f)], ccw))
1943 i += len(f) + 1
1944 return verts_by_face
1945 elif uncull:
1946 return [[uncull[v] for v in f] for f in faces]
1947 else:
1948 return faces # Reuse coordIndex, as per the spec
1950 # Normals
1951 normals = geom.getChildBySpec('Normal')
1952 if normals:
1953 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1954 vectors = normals.getFieldAsArray('vector', 3, ancestry)
1955 normal_index = geom.getFieldAsArray('normalIndex', 0, ancestry)
1956 if per_vertex:
1957 co = [co for f in processPerVertexIndex(normal_index)
1958 for v in f
1959 for co in vectors[v]]
1960 bpymesh.vertices.foreach_set("normal", co)
1961 else:
1962 co = [co for (i, f) in enumerate(faces)
1963 for j in f
1964 for co in vectors[normal_index[i] if normal_index else i]]
1965 bpymesh.polygons.foreach_set("normal", co)
1967 # Apply vertex/face colors
1968 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1969 if colors:
1970 if colors.getSpec() == 'ColorRGBA':
1971 rgb = colors.getFieldAsArray('color', 4, ancestry)
1972 else:
1973 # Array of arrays; no need to flatten
1974 rgb = [c + [1.0] for c in colors.getFieldAsArray('color', 3, ancestry)]
1976 color_per_vertex = geom.getFieldAsBool('colorPerVertex', True, ancestry)
1977 color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
1979 d = bpymesh.vertex_colors.new().data
1980 if color_per_vertex:
1981 cco = [cco for f in processPerVertexIndex(color_index)
1982 for v in f
1983 for cco in rgb[v]]
1984 elif color_index: # Color per face with index
1985 cco = [cco for (i, f) in enumerate(faces)
1986 for j in f
1987 for cco in rgb[color_index[i]]]
1988 else: # Color per face without index
1989 cco = [cco for (i, f) in enumerate(faces)
1990 for j in f
1991 for cco in rgb[i]]
1992 d.foreach_set('color', cco)
1994 # Texture coordinates (UVs)
1995 tex_coord = geom.getChildBySpec('TextureCoordinate')
1996 if tex_coord:
1997 tex_coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
1998 tex_index = geom.getFieldAsArray('texCoordIndex', 0, ancestry)
1999 tex_index = processPerVertexIndex(tex_index)
2000 loops = [co for f in tex_index
2001 for v in f
2002 for co in tex_coord_points[v]]
2003 else:
2004 x_min = y_min = z_min = math.inf
2005 x_max = y_max = z_max = -math.inf
2006 for f in faces:
2007 # Unused vertices don't participate in size; X3DOM does so
2008 for v in f:
2009 (x, y, z) = points[v]
2010 x_min = min(x_min, x)
2011 x_max = max(x_max, x)
2012 y_min = min(y_min, y)
2013 y_max = max(y_max, y)
2014 z_min = min(z_min, z)
2015 z_max = max(z_max, z)
2017 mins = (x_min, y_min, z_min)
2018 deltas = (x_max - x_min, y_max - y_min, z_max - z_min)
2019 axes = [0, 1, 2]
2020 axes.sort(key=lambda a: (-deltas[a], a))
2021 # Tuple comparison breaks ties
2022 (s_axis, t_axis) = axes[0:2]
2023 s_min = mins[s_axis]
2024 ds = deltas[s_axis]
2025 t_min = mins[t_axis]
2026 dt = deltas[t_axis]
2028 # Avoid divide by zero T76303.
2029 if not (ds > 0.0):
2030 ds = 1.0
2031 if not (dt > 0.0):
2032 dt = 1.0
2034 def generatePointCoords(pt):
2035 return (pt[s_axis] - s_min) / ds, (pt[t_axis] - t_min) / dt
2036 loops = [co for f in faces
2037 for v in f
2038 for co in generatePointCoords(points[v])]
2040 importMesh_ApplyTextureToLoops(bpymesh, loops)
2042 bpymesh.validate()
2043 bpymesh.update()
2044 return bpymesh
2047 def importMesh_ElevationGrid(geom, ancestry):
2048 height = geom.getFieldAsArray('height', 0, ancestry)
2049 x_dim = geom.getFieldAsInt('xDimension', 0, ancestry)
2050 x_spacing = geom.getFieldAsFloat('xSpacing', 1, ancestry)
2051 z_dim = geom.getFieldAsInt('zDimension', 0, ancestry)
2052 z_spacing = geom.getFieldAsFloat('zSpacing', 1, ancestry)
2053 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2055 # The spec assumes a certain ordering of quads; outer loop by z, inner by x
2056 bpymesh = bpy.data.meshes.new(name="ElevationGrid")
2057 bpymesh.vertices.add(x_dim * z_dim)
2058 co = [w for x in range(x_dim) for z in range(z_dim)
2059 for w in (x * x_spacing, height[x_dim * z + x], z * z_spacing)]
2060 bpymesh.vertices.foreach_set("co", co)
2062 num_polys = (x_dim - 1) * (z_dim - 1)
2063 bpymesh.loops.add(num_polys * 4)
2064 bpymesh.polygons.add(num_polys)
2065 bpymesh.polygons.foreach_set("loop_start", range(0, num_polys * 4, 4))
2066 # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
2067 # For quad tessfaces, it was important that the final vertex index was not 0
2068 # (Blender treated it as a triangle then).
2069 # So simply reversing the face was not an option.
2070 # With bmesh polygons, this has no importance anymore, but keep existing code for now.
2071 verts = [i for x in range(x_dim - 1) for z in range(z_dim - 1)
2072 for i in (z * x_dim + x,
2073 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2074 (z + 1) * x_dim + x + 1,
2075 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)]
2076 bpymesh.polygons.foreach_set("vertices", verts)
2078 importMesh_ApplyNormals(bpymesh, geom, ancestry)
2079 # ApplyColors won't work here; faces are quads, and also per-face
2080 # coloring should be supported
2081 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
2082 if colors:
2083 if colors.getSpec() == 'ColorRGBA':
2084 rgb = [c[:3] for c
2085 in colors.getFieldAsArray('color', 4, ancestry)]
2086 # Array of arrays; no need to flatten
2087 else:
2088 rgb = colors.getFieldAsArray('color', 3, ancestry)
2090 tc = bpymesh.vertex_colors.new().data
2091 if geom.getFieldAsBool('colorPerVertex', True, ancestry):
2092 # Per-vertex coloring
2093 # Note the 2/4 flip here
2094 tc.foreach_set("color",
2095 [c for x in range(x_dim - 1)
2096 for z in range(z_dim - 1)
2097 for rgb_idx in (z * x_dim + x,
2098 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2099 (z + 1) * x_dim + x + 1,
2100 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)
2101 for c in rgb[rgb_idx]])
2102 else: # Coloring per face
2103 tc.foreach_set("color",
2104 [c for x in range(x_dim - 1)
2105 for z in range(z_dim - 1)
2106 for rgb_idx in (z * (x_dim - 1) + x,) * 4
2107 for c in rgb[rgb_idx]])
2109 # Textures also need special treatment; it's all quads,
2110 # and there's a builtin algorithm for coordinate generation
2111 tex_coord = geom.getChildBySpec('TextureCoordinate')
2112 if tex_coord:
2113 uvs = tex_coord.getFieldAsArray('point', 2, ancestry)
2114 else:
2115 uvs = [(i / (x_dim - 1), j / (z_dim - 1))
2116 for i in range(x_dim)
2117 for j in range(z_dim)]
2119 d = bpymesh.uv_layers.new().data
2120 # Rather than repeat the face/vertex algorithm from above, we read
2121 # the vertex index back from polygon. Might be suboptimal.
2122 uvs = [i for poly in bpymesh.polygons
2123 for vidx in poly.vertices
2124 for i in uvs[vidx]]
2125 d.foreach_set('uv', uv)
2127 bpymesh.validate()
2128 bpymesh.update()
2129 return bpymesh
2132 def importMesh_Extrusion(geom, ancestry):
2133 # Interestingly, the spec doesn't allow for vertex/face colors in this
2134 # element, nor for normals.
2135 # Since coloring and normals are not supported here, and also large
2136 # polygons for caps might be required, we shall use from_pydata().
2138 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2139 begin_cap = geom.getFieldAsBool('beginCap', True, ancestry)
2140 end_cap = geom.getFieldAsBool('endCap', True, ancestry)
2141 cross = geom.getFieldAsArray('crossSection', 2, ancestry)
2142 if not cross:
2143 cross = ((1, 1), (1, -1), (-1, -1), (-1, 1), (1, 1))
2144 spine = geom.getFieldAsArray('spine', 3, ancestry)
2145 if not spine:
2146 spine = ((0, 0, 0), (0, 1, 0))
2147 orient = geom.getFieldAsArray('orientation', 4, ancestry)
2148 if orient:
2149 orient = [Quaternion(o[:3], o[3]).to_matrix()
2150 if o[3] else None for o in orient]
2151 scale = geom.getFieldAsArray('scale', 2, ancestry)
2152 if scale:
2153 scale = [Matrix(((s[0], 0, 0), (0, 1, 0), (0, 0, s[1])))
2154 if s[0] != 1 or s[1] != 1 else None for s in scale]
2156 # Special treatment for the closed spine and cross section.
2157 # Let's save some memory by not creating identical but distinct vertices;
2158 # later we'll introduce conditional logic to link the last vertex with
2159 # the first one where necessary.
2160 cross_closed = cross[0] == cross[-1]
2161 if cross_closed:
2162 cross = cross[:-1]
2163 nc = len(cross)
2164 cross = [Vector((c[0], 0, c[1])) for c in cross]
2165 ncf = nc if cross_closed else nc - 1
2166 # Face count along the cross; for closed cross, it's the same as the
2167 # respective vertex count
2169 spine_closed = spine[0] == spine[-1]
2170 if spine_closed:
2171 spine = spine[:-1]
2172 ns = len(spine)
2173 spine = [Vector(s) for s in spine]
2174 nsf = ns if spine_closed else ns - 1
2176 # This will be used for fallback, where the current spine point joins
2177 # two collinear spine segments. No need to recheck the case of the
2178 # closed spine/last-to-first point juncture; if there's an angle there,
2179 # it would kick in on the first iteration of the main loop by spine.
2180 def findFirstAngleNormal():
2181 for i in range(1, ns - 1):
2182 spt = spine[i]
2183 z = (spine[i + 1] - spt).cross(spine[i - 1] - spt)
2184 if z.length > EPSILON:
2185 return z
2186 # All the spines are collinear. Fallback to the rotated source
2187 # XZ plane.
2188 # TODO: handle the situation where the first two spine points match
2189 v = spine[1] - spine[0]
2190 orig_y = Vector((0, 1, 0))
2191 orig_z = Vector((0, 0, 1))
2192 if v.cross(orig_y).length >= EPSILON:
2193 # Spine at angle with global y - rotate the z accordingly
2194 orig_z.rotate(orig_y.rotation_difference(v))
2195 return orig_z
2197 verts = []
2198 z = None
2199 for i, spt in enumerate(spine):
2200 if (i > 0 and i < ns - 1) or spine_closed:
2201 snext = spine[(i + 1) % ns]
2202 sprev = spine[(i - 1 + ns) % ns]
2203 y = snext - sprev
2204 vnext = snext - spt
2205 vprev = sprev - spt
2206 try_z = vnext.cross(vprev)
2207 # Might be zero, then all kinds of fallback
2208 if try_z.length > EPSILON:
2209 if z is not None and try_z.dot(z) < 0:
2210 try_z.negate()
2211 z = try_z
2212 elif not z: # No z, and no previous z.
2213 # Look ahead, see if there's at least one point where
2214 # spines are not collinear.
2215 z = findFirstAngleNormal()
2216 elif i == 0: # And non-crossed
2217 snext = spine[i + 1]
2218 y = snext - spt
2219 z = findFirstAngleNormal()
2220 else: # last point and not crossed
2221 sprev = spine[i - 1]
2222 y = spt - sprev
2223 # If there's more than one point in the spine, z is already set.
2224 # One point in the spline is an error anyway.
2226 x = y.cross(z)
2227 m = Matrix(((x.x, y.x, z.x), (x.y, y.y, z.y), (x.z, y.z, z.z)))
2228 # Columns are the unit vectors for the xz plane for the cross-section
2229 m.normalize()
2230 if orient:
2231 mrot = orient[i] if len(orient) > 1 else orient[0]
2232 if mrot:
2233 m @= mrot # Not sure about this. Counterexample???
2234 if scale:
2235 mscale = scale[i] if len(scale) > 1 else scale[0]
2236 if mscale:
2237 m @= mscale
2238 # First the cross-section 2-vector is scaled,
2239 # then applied to the xz plane unit vectors
2240 for cpt in cross:
2241 verts.append((spt + m @ cpt).to_tuple())
2242 # Could've done this with a single 4x4 matrix... Oh well
2244 # The method from_pydata() treats correctly quads with final vertex
2245 # index being zero.
2246 # So we just flip the vertices if ccw is off.
2248 faces = []
2249 if begin_cap:
2250 faces.append(flip([x for x in range(nc - 1, -1, -1)], ccw))
2252 # Order of edges in the face: forward along cross, forward along spine,
2253 # backward along cross, backward along spine, flipped if now ccw.
2254 # This order is assumed later in the texture coordinate assignment;
2255 # please don't change without syncing.
2257 faces += [flip((
2258 s * nc + c,
2259 s * nc + (c + 1) % nc,
2260 (s + 1) * nc + (c + 1) % nc,
2261 (s + 1) * nc + c), ccw) for s in range(ns - 1) for c in range(ncf)]
2263 if spine_closed:
2264 # The faces between the last and the first spine points
2265 b = (ns - 1) * nc
2266 faces += [flip((
2267 b + c,
2268 b + (c + 1) % nc,
2269 (c + 1) % nc,
2270 c), ccw) for c in range(ncf)]
2272 if end_cap:
2273 faces.append(flip([(ns - 1) * nc + x for x in range(0, nc)], ccw))
2275 bpymesh = bpy.data.meshes.new(name="Extrusion")
2276 bpymesh.from_pydata(verts, [], faces)
2278 # The way we deal with textures in triangular meshes doesn't apply.
2279 # The structure of the loop array goes: cap, side, cap
2280 if begin_cap or end_cap: # Need dimensions
2281 x_min = x_max = z_min = z_max = None
2282 for c in cross:
2283 (x, z) = (c.x, c.z)
2284 if x_min is None or x < x_min:
2285 x_min = x
2286 if x_max is None or x > x_max:
2287 x_max = x
2288 if z_min is None or z < z_min:
2289 z_min = z
2290 if z_max is None or z > z_max:
2291 z_max = z
2292 dx = x_max - x_min
2293 dz = z_max - z_min
2294 cap_scale = dz if dz > dx else dx
2296 # Takes an index in the cross array, returns scaled
2297 # texture coords for cap texturing purposes
2298 def scaledLoopVertex(i):
2299 c = cross[i]
2300 return (c.x - x_min) / cap_scale, (c.z - z_min) / cap_scale
2302 # X3DOM uses raw cap shape, not a scaled one. So we will, too.
2304 loops = []
2305 mloops = bpymesh.loops
2306 if begin_cap: # vertex indices match the indices in cross
2307 # Rely on the loops in the mesh; don't repeat the face
2308 # generation logic here
2309 loops += [co for i in range(nc)
2310 for co in scaledLoopVertex(mloops[i].vertex_index)]
2312 # Sides
2313 # Same order of vertices as in face generation
2314 # We don't rely on the loops in the mesh; instead,
2315 # we repeat the face generation logic.
2316 loops += [co for s in range(nsf)
2317 for c in range(ncf)
2318 for v in flip(((c / ncf, s / nsf),
2319 ((c + 1) / ncf, s / nsf),
2320 ((c + 1) / ncf, (s + 1) / nsf),
2321 (c / ncf, (s + 1) / nsf)), ccw) for co in v]
2323 if end_cap:
2324 # Base loop index for end cap
2325 lb = ncf * nsf * 4 + (nc if begin_cap else 0)
2326 # Rely on the loops here too.
2327 loops += [co for i in range(nc) for co
2328 in scaledLoopVertex(mloops[lb + i].vertex_index % nc)]
2329 importMesh_ApplyTextureToLoops(bpymesh, loops)
2331 bpymesh.validate()
2332 bpymesh.update()
2333 return bpymesh
2336 # -----------------------------------------------------------------------------------
2337 # Line and point sets
2340 def importMesh_LineSet(geom, ancestry):
2341 # TODO: line display properties are ignored
2342 # Per-vertex color is ignored
2343 coord = geom.getChildBySpec('Coordinate')
2344 src_points = coord.getFieldAsArray('point', 3, ancestry)
2345 # Array of 3; Blender needs arrays of 4
2346 bpycurve = bpy.data.curves.new("LineSet", 'CURVE')
2347 bpycurve.dimensions = '3D'
2348 counts = geom.getFieldAsArray('vertexCount', 0, ancestry)
2349 b = 0
2350 for n in counts:
2351 sp = bpycurve.splines.new('POLY')
2352 sp.points.add(n - 1) # points already has one element
2354 def points():
2355 for x in src_points[b:b + n]:
2356 yield x[0]
2357 yield x[1]
2358 yield x[2]
2359 yield 0
2360 sp.points.foreach_set('co', [x for x in points()])
2361 b += n
2362 return bpycurve
2365 def importMesh_IndexedLineSet(geom, ancestry):
2366 # VRML not x3d
2367 # coord = geom.getChildByName('coord') # 'Coordinate'
2368 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2369 if coord:
2370 points = coord.getFieldAsArray('point', 3, ancestry)
2371 else:
2372 points = []
2374 if not points:
2375 print('\tWarning: IndexedLineSet had no points')
2376 return None
2378 ils_lines = geom.getFieldAsArray('coordIndex', 0, ancestry)
2380 lines = []
2381 line = []
2383 for il in ils_lines:
2384 if il == -1:
2385 lines.append(line)
2386 line = []
2387 else:
2388 line.append(int(il))
2389 lines.append(line)
2391 # vcolor = geom.getChildByName('color')
2392 # blender doesn't have per vertex color
2394 bpycurve = bpy.data.curves.new('IndexedCurve', 'CURVE')
2395 bpycurve.dimensions = '3D'
2397 for line in lines:
2398 if not line:
2399 continue
2400 # co = points[line[0]] # UNUSED
2401 nu = bpycurve.splines.new('POLY')
2402 nu.points.add(len(line) - 1) # the new nu has 1 point to begin with
2403 for il, pt in zip(line, nu.points):
2404 pt.co[0:3] = points[il]
2406 return bpycurve
2409 def importMesh_PointSet(geom, ancestry):
2410 # VRML not x3d
2411 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2412 if coord:
2413 points = coord.getFieldAsArray('point', 3, ancestry)
2414 else:
2415 points = []
2417 # vcolor = geom.getChildByName('color')
2418 # blender doesn't have per vertex color
2420 bpymesh = bpy.data.meshes.new("PointSet")
2421 bpymesh.vertices.add(len(points))
2422 bpymesh.vertices.foreach_set("co", [a for v in points for a in v])
2424 # No need to validate
2425 bpymesh.update()
2426 return bpymesh
2429 # -----------------------------------------------------------------------------------
2430 # Primitives
2431 # SA: they used to use bpy.ops for primitive creation. That was
2432 # unbelievably slow on complex scenes. I rewrote to generate meshes
2433 # by hand.
2436 GLOBALS['CIRCLE_DETAIL'] = 12
2439 def importMesh_Sphere(geom, ancestry):
2440 # solid is ignored.
2441 # Extra field 'subdivision="n m"' attribute, specifying how many
2442 # rings and segments to use (X3DOM).
2443 r = geom.getFieldAsFloat('radius', 0.5, ancestry)
2444 subdiv = geom.getFieldAsArray('subdivision', 0, ancestry)
2445 if subdiv:
2446 if len(subdiv) == 1:
2447 nr = ns = subdiv[0]
2448 else:
2449 (nr, ns) = subdiv
2450 else:
2451 nr = ns = GLOBALS['CIRCLE_DETAIL']
2452 # used as both ring count and segment count
2453 lau = pi / nr # Unit angle of latitude (rings) for the given tessellation
2454 lou = 2 * pi / ns # Unit angle of longitude (segments)
2456 bpymesh = bpy.data.meshes.new(name="Sphere")
2458 bpymesh.vertices.add(ns * (nr - 1) + 2)
2459 # The non-polar vertices go from x=0, negative z plane counterclockwise -
2460 # to -x, to +z, to +x, back to -z
2461 co = [0, r, 0, 0, -r, 0] # +y and -y poles
2462 co += [r * coe for ring in range(1, nr) for seg in range(ns)
2463 for coe in (-sin(lou * seg) * sin(lau * ring),
2464 cos(lau * ring),
2465 -cos(lou * seg) * sin(lau * ring))]
2466 bpymesh.vertices.foreach_set('co', co)
2468 num_poly = ns * nr
2469 num_tri = ns * 2
2470 num_quad = num_poly - num_tri
2471 num_loop = num_quad * 4 + num_tri * 3
2472 tf = bpymesh.polygons
2473 tf.add(num_poly)
2474 bpymesh.loops.add(num_loop)
2475 bpymesh.polygons.foreach_set("loop_start",
2476 tuple(range(0, ns * 3, 3)) +
2477 tuple(range(ns * 3, num_loop - ns * 3, 4)) +
2478 tuple(range(num_loop - ns * 3, num_loop, 3)))
2480 vb = 2 + (nr - 2) * ns # First vertex index for the bottom cap
2481 fb = (nr - 1) * ns # First face index for the bottom cap
2483 # Because of tricky structure, assign texture coordinates along with
2484 # face creation. Can't easily do foreach_set, 'cause caps are triangles and
2485 # sides are quads.
2487 tex = bpymesh.uv_layers.new().data
2489 # Faces go in order: top cap, sides, bottom cap.
2490 # Sides go by ring then by segment.
2492 # Caps
2493 # Top cap face vertices go in order: down right up
2494 # (starting from +y pole)
2495 # Bottom cap goes: up left down (starting from -y pole)
2496 for seg in range(ns):
2497 tf[seg].vertices = (0, seg + 2, (seg + 1) % ns + 2)
2498 tf[fb + seg].vertices = (1, vb + (seg + 1) % ns, vb + seg)
2499 for lidx, uv in zip(tf[seg].loop_indices,
2500 (((seg + 0.5) / ns, 1),
2501 (seg / ns, 1 - 1 / nr),
2502 ((seg + 1) / ns, 1 - 1 / nr))):
2503 tex[lidx].uv = uv
2504 for lidx, uv in zip(tf[fb + seg].loop_indices,
2505 (((seg + 0.5) / ns, 0),
2506 ((seg + 1) / ns, 1 / nr),
2507 (seg / ns, 1 / nr))):
2508 tex[lidx].uv = uv
2510 # Sides
2511 # Side face vertices go in order: down right up left
2512 for ring in range(nr - 2):
2513 tvb = 2 + ring * ns
2514 # First vertex index for the top edge of the ring
2515 bvb = tvb + ns
2516 # First vertex index for the bottom edge of the ring
2517 rfb = ns * (ring + 1)
2518 # First face index for the ring
2519 for seg in range(ns):
2520 nseg = (seg + 1) % ns
2521 tf[rfb + seg].vertices = (tvb + seg, bvb + seg, bvb + nseg, tvb + nseg)
2522 for lidx, uv in zip(tf[rfb + seg].loop_indices,
2523 ((seg / ns, 1 - (ring + 1) / nr),
2524 (seg / ns, 1 - (ring + 2) / nr),
2525 ((seg + 1) / ns, 1 - (ring + 2) / nr),
2526 ((seg + 1) / ns, 1 - (ring + 1) / nr))):
2527 tex[lidx].uv = uv
2529 bpymesh.validate()
2530 bpymesh.update()
2531 return bpymesh
2534 def importMesh_Cylinder(geom, ancestry):
2535 # solid is ignored
2536 # no ccw in this element
2537 # Extra parameter subdivision="n" - how many faces to use
2538 radius = geom.getFieldAsFloat('radius', 1.0, ancestry)
2539 height = geom.getFieldAsFloat('height', 2, ancestry)
2540 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2541 side = geom.getFieldAsBool('side', True, ancestry)
2542 top = geom.getFieldAsBool('top', True, ancestry)
2544 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2546 nn = n * 2
2547 yvalues = (height / 2, -height / 2)
2548 angle = 2 * pi / n
2550 # The seam is at x=0, z=-r, vertices go ccw -
2551 # to pos x, to neg z, to neg x, back to neg z
2552 verts = [(-radius * sin(angle * i), y, -radius * cos(angle * i))
2553 for i in range(n) for y in yvalues]
2554 faces = []
2555 if side:
2556 # Order of edges in side faces: up, left, down, right.
2557 # Texture coordinate logic depends on it.
2558 faces += [(i * 2 + 3, i * 2 + 2, i * 2, i * 2 + 1)
2559 for i in range(n - 1)] + [(1, 0, nn - 2, nn - 1)]
2560 if top:
2561 faces += [[x for x in range(0, nn, 2)]]
2562 if bottom:
2563 faces += [[x for x in range(nn - 1, -1, -2)]]
2565 bpymesh = bpy.data.meshes.new(name="Cylinder")
2566 bpymesh.from_pydata(verts, [], faces)
2567 # Tried constructing the mesh manually from polygons/loops/edges,
2568 # the difference in performance on Blender 2.74 (Win64) is negligible.
2570 bpymesh.validate()
2572 # The structure of the loop array goes: cap, side, cap.
2573 loops = []
2574 if side:
2575 loops += [co for i in range(n)
2576 for co in ((i + 1) / n, 0, (i + 1) / n, 1, i / n, 1, i / n, 0)]
2578 if top:
2579 loops += [0.5 + co / 2 for i in range(n)
2580 for co in (-sin(angle * i), cos(angle * i))]
2582 if bottom:
2583 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2584 for co in (sin(angle * i), cos(angle * i))]
2586 importMesh_ApplyTextureToLoops(bpymesh, loops)
2588 bpymesh.update()
2589 return bpymesh
2592 def importMesh_Cone(geom, ancestry):
2593 # Solid ignored
2594 # Extra parameter subdivision="n" - how many faces to use
2595 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2596 radius = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry)
2597 height = geom.getFieldAsFloat('height', 2, ancestry)
2598 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2599 side = geom.getFieldAsBool('side', True, ancestry)
2601 d = height / 2
2602 angle = 2 * pi / n
2604 verts = [(0, d, 0)]
2605 verts += [(-radius * sin(angle * i),
2607 -radius * cos(angle * i)) for i in range(n)]
2608 faces = []
2610 # Side face vertices go: up down right
2611 if side:
2612 faces += [(1 + (i + 1) % n, 0, 1 + i) for i in range(n)]
2613 if bottom:
2614 faces += [[i for i in range(n, 0, -1)]]
2616 bpymesh = bpy.data.meshes.new(name="Cone")
2617 bpymesh.from_pydata(verts, [], faces)
2619 bpymesh.validate()
2620 loops = []
2621 if side:
2622 loops += [co for i in range(n)
2623 for co in ((i + 1) / n, 0, (i + 0.5) / n, 1, i / n, 0)]
2624 if bottom:
2625 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2626 for co in (sin(angle * i), cos(angle * i))]
2627 importMesh_ApplyTextureToLoops(bpymesh, loops)
2629 bpymesh.update()
2630 return bpymesh
2633 def importMesh_Box(geom, ancestry):
2634 # Solid is ignored
2635 # No ccw in this element
2636 (dx, dy, dz) = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry)
2637 dx /= 2
2638 dy /= 2
2639 dz /= 2
2641 bpymesh = bpy.data.meshes.new(name="Box")
2642 bpymesh.vertices.add(8)
2644 # xz plane at +y, ccw
2645 co = (dx, dy, dz, -dx, dy, dz, -dx, dy, -dz, dx, dy, -dz,
2646 # xz plane at -y
2647 dx, -dy, dz, -dx, -dy, dz, -dx, -dy, -dz, dx, -dy, -dz)
2648 bpymesh.vertices.foreach_set('co', co)
2650 bpymesh.loops.add(6 * 4)
2651 bpymesh.polygons.add(6)
2652 bpymesh.polygons.foreach_set('loop_start', range(0, 6 * 4, 4))
2653 bpymesh.polygons.foreach_set('loop_total', (4,) * 6)
2654 bpymesh.polygons.foreach_set('vertices', (
2655 0, 1, 2, 3, # +y
2656 4, 0, 3, 7, # +x
2657 7, 3, 2, 6, # -z
2658 6, 2, 1, 5, # -x
2659 5, 1, 0, 4, # +z
2660 7, 6, 5, 4)) # -y
2662 bpymesh.validate()
2663 d = bpymesh.uv_layers.new().data
2664 d.foreach_set('uv', (
2665 1, 0, 0, 0, 0, 1, 1, 1,
2666 0, 0, 0, 1, 1, 1, 1, 0,
2667 0, 0, 0, 1, 1, 1, 1, 0,
2668 0, 0, 0, 1, 1, 1, 1, 0,
2669 0, 0, 0, 1, 1, 1, 1, 0,
2670 1, 0, 0, 0, 0, 1, 1, 1))
2672 bpymesh.update()
2673 return bpymesh
2675 # -----------------------------------------------------------------------------------
2676 # Utilities for importShape
2679 # Textures are processed elsewhere.
2680 def appearance_CreateMaterial(vrmlname, mat, ancestry, is_vcol):
2681 # Given an X3D material, creates a Blender material.
2682 # texture is applied later, in appearance_Create().
2683 # All values between 0.0 and 1.0, defaults from VRML docs.
2684 mat_name = mat.getDefName()
2685 bpymat = bpy.data.materials.new(mat_name if mat_name else vrmlname)
2686 bpymat_wrap = node_shader_utils.PrincipledBSDFWrapper(bpymat, is_readonly=False)
2688 # TODO: handle 'ambientIntensity'.
2689 #ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
2691 diff_color = mat.getFieldAsFloatTuple('diffuseColor', [0.8, 0.8, 0.8], ancestry)
2692 bpymat_wrap.base_color = diff_color
2694 emit_color = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry)
2695 bpymat_wrap.emission_color = emit_color
2697 # NOTE - 'shininess' is being handled as 1 - roughness for now.
2698 shininess = mat.getFieldAsFloat('shininess', 0.2, ancestry)
2699 bpymat_wrap.roughness = 1.0 - shininess
2701 #bpymat.specular_hardness = int(1 + (510 * shininess))
2702 # 0-1 -> 1-511
2703 # TODO: handle 'specularColor'.
2704 #specular_color = mat.getFieldAsFloatTuple('specularColor',
2705 # [0.0, 0.0, 0.0], ancestry)
2707 alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry)
2708 bpymat_wrap.alpha = alpha
2709 if alpha < 1.0:
2710 bpymat.blend_method = "BLEND"
2711 bpymat.shadow_method = "HASHED"
2713 # NOTE - leaving this disabled for now
2714 if False and is_vcol:
2715 node_vertex_color = bpymat.node_tree.nodes.new("ShaderNodeVertexColor")
2716 node_vertex_color.location = (-200, 300)
2718 bpymat.node_tree.links.new(
2719 bpymat_wrap.node_principled_bsdf.inputs["Base Color"],
2720 node_vertex_color.outputs["Color"]
2723 return bpymat_wrap
2726 def appearance_CreateDefaultMaterial():
2727 # Just applies the X3D defaults. Used for shapes
2728 # without explicit material definition
2729 # (but possibly with a texture).
2731 bpymat = bpy.data.materials.new("Material")
2732 bpymat_wrap = node_shader_utils.PrincipledBSDFWrapper(bpymat, is_readonly=False)
2734 bpymat_wrap.roughness = 0.8
2735 bpymat_wrap.base_color = (0.8, 0.8, 0.8)
2736 #bpymat.mirror_color = (0, 0, 0)
2737 #bpymat.emit = 0
2739 # TODO: handle 'shininess' and 'specularColor'.
2740 #bpymat.specular_hardness = 103
2741 # 0-1 -> 1-511
2742 #bpymat.specular_color = (0, 0, 0)
2744 bpymat_wrap.alpha = 1.0
2745 return bpymat_wrap
2748 def appearance_LoadImageTextureFile(ima_urls, node):
2749 bpyima = None
2750 for f in ima_urls:
2751 dirname = os.path.dirname(node.getFilename())
2752 bpyima = image_utils.load_image(f, dirname,
2753 place_holder=False,
2754 recursive=False,
2755 convert_callback=imageConvertCompat)
2756 if bpyima:
2757 break
2759 return bpyima
2762 def appearance_LoadImageTexture(imageTexture, ancestry, node):
2763 # TODO: cache loaded textures...
2764 ima_urls = imageTexture.getFieldAsString('url', None, ancestry)
2766 if ima_urls is None:
2767 try:
2768 ima_urls = imageTexture.getFieldAsStringArray('url', ancestry)
2769 # in some cases we get a list of images.
2770 except:
2771 ima_urls = None
2772 else:
2773 if '" "' in ima_urls:
2774 # '"foo" "bar"' --> ['foo', 'bar']
2775 ima_urls = [w.strip('"') for w in ima_urls.split('" "')]
2776 else:
2777 ima_urls = [ima_urls]
2778 # ima_urls is a list or None
2780 if ima_urls is None:
2781 print("\twarning, image with no URL, this is odd")
2782 return None
2783 else:
2784 bpyima = appearance_LoadImageTextureFile(ima_urls, node)
2786 if not bpyima:
2787 print("ImportX3D warning: unable to load texture", ima_urls)
2788 else:
2789 # KNOWN BUG; PNGs with a transparent color are not perceived
2790 # as transparent. Need alpha channel.
2792 if bpyima.depth not in {32, 128}:
2793 bpyima.alpha_mode = 'NONE'
2794 return bpyima
2797 def appearance_LoadTexture(tex_node, ancestry, node):
2798 # Both USE-based caching and desc-based caching
2799 # Works for bother ImageTextures and PixelTextures
2801 # USE-based caching
2802 if tex_node.reference:
2803 return tex_node.getRealNode().parsed
2805 # Desc-based caching. It might misfire on multifile models, where the
2806 # same desc means different things in different files.
2807 # TODO: move caches to file level.
2808 desc = tex_node.desc()
2809 if desc and desc in texture_cache:
2810 bpyima = texture_cache[desc]
2811 if tex_node.canHaveReferences():
2812 tex_node.parsed = bpyima
2813 return bpyima
2815 # No cached texture, load it.
2816 if tex_node.getSpec() == 'ImageTexture':
2817 bpyima = appearance_LoadImageTexture(tex_node, ancestry, node)
2818 else: # PixelTexture
2819 bpyima = appearance_LoadPixelTexture(tex_node, ancestry)
2821 if bpyima: # Loading can still fail
2822 # Update the desc-based cache
2823 if desc:
2824 texture_cache[desc] = bpyima
2826 # Update the USE-based cache
2827 if tex_node.canHaveReferences():
2828 tex_node.parsed = bpyima
2830 return bpyima
2833 def appearance_ExpandCachedMaterial(bpymat):
2834 if 0 and bpymat.texture_slots[0] is not None:
2835 bpyima = bpymat.texture_slots[0].texture.image
2836 tex_has_alpha = bpyima.alpha_mode not in {'NONE', 'CHANNEL_PACKED'}
2837 return (bpymat, bpyima, tex_has_alpha)
2839 return (bpymat, None, False)
2842 def appearance_MakeDescCacheKey(material, tex_node):
2843 mat_desc = material.desc() if material else "Default"
2844 tex_desc = tex_node.desc() if tex_node else "Default"
2846 if not((tex_node and tex_desc is None) or
2847 (material and mat_desc is None)):
2848 # desc not available (in VRML)
2849 # TODO: serialize VRML nodes!!!
2850 return (mat_desc, tex_desc)
2851 elif not tex_node and not material:
2852 # Even for VRML, we cache the null material
2853 return ("Default", "Default")
2854 else:
2855 return None # Desc-based caching is off
2858 def appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol):
2859 # Creates a Blender material object from appearance
2860 bpyima = None
2861 tex_has_alpha = False
2863 if material:
2864 bpymat_wrap = appearance_CreateMaterial(vrmlname, material, ancestry, is_vcol)
2865 else:
2866 bpymat_wrap = appearance_CreateDefaultMaterial()
2868 if tex_node: # Texture caching inside there
2869 bpyima = appearance_LoadTexture(tex_node, ancestry, node)
2871 if bpyima:
2872 repeatS = tex_node.getFieldAsBool('repeatS', True, ancestry)
2873 repeatT = tex_node.getFieldAsBool('repeatT', True, ancestry)
2875 bpymat_wrap.base_color_texture.image = bpyima
2877 # NOTE - not possible to handle x and y tiling individually.
2878 extension = "REPEAT" if repeatS or repeatT else "CLIP"
2879 bpymat_wrap.base_color_texture.extension = extension
2881 tex_has_alpha = bpyima.alpha_mode not in {'NONE', 'CHANNEL_PACKED'}
2882 if tex_has_alpha:
2883 bpymat_wrap.alpha_texture.image = bpyima
2884 bpymat_wrap.alpha_texture.extension = extension
2886 return (bpymat_wrap.material, bpyima, tex_has_alpha)
2889 def importShape_LoadAppearance(vrmlname, appr, ancestry, node, is_vcol):
2891 Material creation takes nontrivial time on large models.
2892 So we cache them aggressively.
2893 However, in Blender, texture is a part of material, while in
2894 X3D it's not. Blender's notion of material corresponds to
2895 X3D's notion of appearance.
2897 TextureTransform is not a part of material (at least
2898 not in the current implementation).
2900 USE on an Appearance node and USE on a Material node
2901 call for different approaches.
2903 Tools generate repeating, identical material definitions.
2904 Can't rely on USE alone. Repeating texture definitions
2905 are entirely possible, too.
2907 Vertex coloring is not a part of appearance, but Blender
2908 has a material flag for it. However, if a mesh has no vertex
2909 color layer, setting use_vertex_color_paint to true has no
2910 effect. So it's fine to reuse the same material for meshes
2911 with vertex colors and for ones without.
2912 It's probably an abuse of Blender of some level.
2914 So here's the caching structure:
2915 For USE on appearance, we store the material object
2916 in the appearance node.
2918 For USE on texture, we store the image object in the tex node.
2920 For USE on material with no texture, we store the material object
2921 in the material node.
2923 Also, we store textures by description in texture_cache.
2925 Also, we store materials by (material desc, texture desc)
2926 in material_cache.
2928 # First, check entire-appearance cache
2929 if appr.reference and appr.getRealNode().parsed:
2930 return appearance_ExpandCachedMaterial(appr.getRealNode().parsed)
2932 tex_node = appr.getChildBySpec(('ImageTexture', 'PixelTexture'))
2933 # Other texture nodes are: MovieTexture, MultiTexture
2934 material = appr.getChildBySpec('Material')
2935 # We're ignoring FillProperties, LineProperties, and shaders
2937 # Check the USE-based material cache for textureless materials
2938 if material and material.reference and not tex_node and material.getRealNode().parsed:
2939 return appearance_ExpandCachedMaterial(material.getRealNode().parsed)
2941 # Now the description-based caching
2942 cache_key = appearance_MakeDescCacheKey(material, tex_node)
2944 if cache_key and cache_key in material_cache:
2945 bpymat = material_cache[cache_key]
2946 # Still want to make the material available for USE-based reuse
2947 if appr.canHaveReferences():
2948 appr.parsed = bpymat
2949 if material and material.canHaveReferences() and not tex_node:
2950 material.parsed = bpymat
2951 return appearance_ExpandCachedMaterial(bpymat)
2953 # Done checking full-material caches. Texture cache may still kick in.
2954 # Create the material already
2955 (bpymat, bpyima, tex_has_alpha) = appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol)
2957 # Update the caches
2958 if appr.canHaveReferences():
2959 appr.parsed = bpymat
2961 if cache_key:
2962 material_cache[cache_key] = bpymat
2964 if material and material.canHaveReferences() and not tex_node:
2965 material.parsed = bpymat
2967 return (bpymat, bpyima, tex_has_alpha)
2970 def appearance_LoadPixelTexture(pixelTexture, ancestry):
2971 image = pixelTexture.getFieldAsArray('image', 0, ancestry)
2972 (w, h, plane_count) = image[0:3]
2973 has_alpha = plane_count in {2, 4}
2974 pixels = image[3:]
2975 if len(pixels) != w * h:
2976 print("ImportX3D warning: pixel count in PixelTexture is off")
2978 bpyima = bpy.data.images.new("PixelTexture", w, h, has_alpha, True)
2979 if not has_alpha:
2980 bpyima.alpha_mode = 'NONE'
2982 # Conditional above the loop, for performance
2983 if plane_count == 3: # RGB
2984 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2985 for cco in (pixel >> 16, pixel >> 8, pixel, 255)]
2986 elif plane_count == 4: # RGBA
2987 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2988 for cco
2989 in (pixel >> 24, pixel >> 16, pixel >> 8, pixel)]
2990 elif plane_count == 1: # Intensity - does Blender even support that?
2991 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2992 for cco in (pixel, pixel, pixel, 255)]
2993 elif plane_count == 2: # Intensity/alpha
2994 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2995 for cco
2996 in (pixel >> 8, pixel >> 8, pixel >> 8, pixel)]
2997 bpyima.update()
2998 return bpyima
3001 # Called from importShape to insert a data object (typically a mesh)
3002 # into the scene
3003 def importShape_ProcessObject(
3004 bpycollection, vrmlname, bpydata, geom, geom_spec, node,
3005 bpymat, has_alpha, texmtx, ancestry,
3006 global_matrix):
3008 vrmlname += "_" + geom_spec
3009 bpydata.name = vrmlname
3011 if type(bpydata) == bpy.types.Mesh:
3012 # solid, as understood by the spec, is always true in Blender
3013 # solid=false, we don't support it yet.
3014 creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry)
3015 if creaseAngle is not None:
3016 bpydata.set_sharp_from_angle(angle=creaseAngle)
3017 else:
3018 bpydata.polygons.foreach_set("use_smooth", [False] * len(bpydata.polygons))
3020 # Only ever 1 material per shape
3021 if bpymat:
3022 bpydata.materials.append(bpymat)
3024 if bpydata.uv_layers:
3025 if has_alpha and bpymat: # set the faces alpha flag?
3026 bpymat.blend_method = 'BLEND'
3027 bpymat.shadow_method = 'HASHED'
3029 if texmtx:
3030 # Apply texture transform?
3031 uv_copy = Vector()
3032 for l in bpydata.uv_layers.active.data:
3033 luv = l.uv
3034 uv_copy.x = luv[0]
3035 uv_copy.y = luv[1]
3036 l.uv[:] = (uv_copy @ texmtx)[0:2]
3038 # Done transforming the texture
3039 # TODO: check if per-polygon textures are supported here.
3040 elif type(bpydata) == bpy.types.TextCurve:
3041 # Text with textures??? Not sure...
3042 if bpymat:
3043 bpydata.materials.append(bpymat)
3045 # Can transform data or object, better the object so we can instance
3046 # the data
3047 # bpymesh.transform(getFinalMatrix(node))
3048 bpyob = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
3049 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3050 bpycollection.objects.link(bpyob)
3051 bpyob.select_set(True)
3053 if DEBUG:
3054 bpyob["source_line_no"] = geom.lineno
3057 def importText(geom, ancestry):
3058 fmt = geom.getChildBySpec('FontStyle')
3059 size = fmt.getFieldAsFloat("size", 1, ancestry) if fmt else 1.
3060 body = geom.getFieldAsString("string", None, ancestry)
3061 body = [w.strip('"') for w in body.split('" "')]
3063 bpytext = bpy.data.curves.new(name="Text", type='FONT')
3064 bpytext.offset_y = - size
3065 bpytext.body = "\n".join(body)
3066 bpytext.size = size
3067 return bpytext
3070 # -----------------------------------------------------------------------------------
3073 geometry_importers = {
3074 'IndexedFaceSet': importMesh_IndexedFaceSet,
3075 'IndexedTriangleSet': importMesh_IndexedTriangleSet,
3076 'IndexedTriangleStripSet': importMesh_IndexedTriangleStripSet,
3077 'IndexedTriangleFanSet': importMesh_IndexedTriangleFanSet,
3078 'IndexedLineSet': importMesh_IndexedLineSet,
3079 'TriangleSet': importMesh_TriangleSet,
3080 'TriangleStripSet': importMesh_TriangleStripSet,
3081 'TriangleFanSet': importMesh_TriangleFanSet,
3082 'LineSet': importMesh_LineSet,
3083 'ElevationGrid': importMesh_ElevationGrid,
3084 'Extrusion': importMesh_Extrusion,
3085 'PointSet': importMesh_PointSet,
3086 'Sphere': importMesh_Sphere,
3087 'Box': importMesh_Box,
3088 'Cylinder': importMesh_Cylinder,
3089 'Cone': importMesh_Cone,
3090 'Text': importText,
3094 def importShape(bpycollection, node, ancestry, global_matrix):
3095 # Under Shape, we can only have Appearance, MetadataXXX and a geometry node
3096 def isGeometry(spec):
3097 return spec != "Appearance" and not spec.startswith("Metadata")
3099 bpyob = node.getRealNode().blendObject
3101 if bpyob is not None:
3102 bpyob = node.blendData = node.blendObject = bpyob.copy()
3103 # Could transform data, but better the object so we can instance the data
3104 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3105 bpycollection.objects.link(bpyob)
3106 bpyob.select_set(True)
3107 return
3109 vrmlname = node.getDefName()
3110 if not vrmlname:
3111 vrmlname = 'Shape'
3113 appr = node.getChildBySpec('Appearance')
3114 geom = node.getChildBySpecCondition(isGeometry)
3115 if not geom:
3116 # Oh well, no geometry node in this shape
3117 return
3119 bpymat = None
3120 bpyima = None
3121 texmtx = None
3122 tex_has_alpha = False
3124 is_vcol = (geom.getChildBySpec(['Color', 'ColorRGBA']) is not None)
3126 if appr:
3127 (bpymat, bpyima,
3128 tex_has_alpha) = importShape_LoadAppearance(vrmlname, appr,
3129 ancestry, node,
3130 is_vcol)
3132 textx = appr.getChildBySpec('TextureTransform')
3133 if textx:
3134 texmtx = translateTexTransform(textx, ancestry)
3136 bpydata = None
3137 geom_spec = geom.getSpec()
3139 # ccw is handled by every geometry importer separately; some
3140 # geometries are easier to flip than others
3141 geom_fn = geometry_importers.get(geom_spec)
3142 if geom_fn is not None:
3143 bpydata = geom_fn(geom, ancestry)
3145 # There are no geometry importers that can legally return
3146 # no object. It's either a bpy object, or an exception
3147 importShape_ProcessObject(
3148 bpycollection, vrmlname, bpydata, geom, geom_spec,
3149 node, bpymat, tex_has_alpha, texmtx,
3150 ancestry, global_matrix)
3151 else:
3152 print('\tImportX3D warning: unsupported type "%s"' % geom_spec)
3155 # -----------------------------------------------------------------------------------
3156 # Lighting
3159 def importLamp_PointLight(node, ancestry):
3160 vrmlname = node.getDefName()
3161 if not vrmlname:
3162 vrmlname = 'PointLight'
3164 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3165 # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3166 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3167 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3168 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3169 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3170 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3172 bpylamp = bpy.data.lights.new(vrmlname, 'POINT')
3173 bpylamp.energy = intensity
3174 bpylamp.cutoff_distance = radius
3175 bpylamp.color = color
3177 mtx = Matrix.Translation(Vector(location))
3179 return bpylamp, mtx
3182 def importLamp_DirectionalLight(node, ancestry):
3183 vrmlname = node.getDefName()
3184 if not vrmlname:
3185 vrmlname = 'DirectLight'
3187 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
3188 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3189 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3190 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3191 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3193 bpylamp = bpy.data.lights.new(vrmlname, 'SUN')
3194 bpylamp.energy = intensity
3195 bpylamp.color = color
3197 # lamps have their direction as -z, yup
3198 mtx = Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3200 return bpylamp, mtx
3202 # looks like default values for beamWidth and cutOffAngle were swapped in VRML docs.
3205 def importLamp_SpotLight(node, ancestry):
3206 vrmlname = node.getDefName()
3207 if not vrmlname:
3208 vrmlname = 'SpotLight'
3210 # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3211 # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3212 beamWidth = node.getFieldAsFloat('beamWidth', 1.570796, ancestry) # max is documented to be 1.0 but some files have higher.
3213 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3214 cutOffAngle = node.getFieldAsFloat('cutOffAngle', 0.785398, ancestry) * 2.0 # max is documented to be 1.0 but some files have higher.
3215 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3216 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3217 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3218 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3219 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3221 bpylamp = bpy.data.lights.new(vrmlname, 'SPOT')
3222 bpylamp.energy = intensity
3223 bpylamp.cutoff_distance = radius
3224 bpylamp.color = color
3225 bpylamp.spot_size = cutOffAngle
3226 if beamWidth > cutOffAngle:
3227 bpylamp.spot_blend = 0.0
3228 else:
3229 if cutOffAngle == 0.0: # this should never happen!
3230 bpylamp.spot_blend = 0.5
3231 else:
3232 bpylamp.spot_blend = beamWidth / cutOffAngle
3234 # Convert
3236 # lamps have their direction as -z, y==up
3237 mtx = Matrix.Translation(location) @ Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3239 return bpylamp, mtx
3242 def importLamp(bpycollection, node, spec, ancestry, global_matrix):
3243 if spec == 'PointLight':
3244 bpylamp, mtx = importLamp_PointLight(node, ancestry)
3245 elif spec == 'DirectionalLight':
3246 bpylamp, mtx = importLamp_DirectionalLight(node, ancestry)
3247 elif spec == 'SpotLight':
3248 bpylamp, mtx = importLamp_SpotLight(node, ancestry)
3249 else:
3250 print("Error, not a lamp")
3251 raise ValueError
3253 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(bpylamp.name, bpylamp)
3254 bpycollection.objects.link(bpyob)
3255 bpyob.select_set(True)
3257 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3260 # -----------------------------------------------------------------------------------
3263 def importViewpoint(bpycollection, node, ancestry, global_matrix):
3264 name = node.getDefName()
3265 if not name:
3266 name = 'Viewpoint'
3268 fieldOfView = node.getFieldAsFloat('fieldOfView', 0.785398, ancestry) # max is documented to be 1.0 but some files have higher.
3269 # jump = node.getFieldAsBool('jump', True, ancestry)
3270 orientation = node.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry)
3271 position = node.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry)
3272 description = node.getFieldAsString('description', '', ancestry)
3274 bpycam = bpy.data.cameras.new(name)
3276 bpycam.angle = fieldOfView
3278 mtx = Matrix.Translation(Vector(position)) @ translateRotation(orientation)
3280 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, bpycam)
3281 bpycollection.objects.link(bpyob)
3282 bpyob.select_set(True)
3283 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3286 def importTransform(bpycollection, node, ancestry, global_matrix):
3287 name = node.getDefName()
3288 if not name:
3289 name = 'Transform'
3291 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, None)
3292 bpycollection.objects.link(bpyob)
3293 bpyob.select_set(True)
3295 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3297 # so they are not too annoying
3298 bpyob.empty_display_type = 'PLAIN_AXES'
3299 bpyob.empty_display_size = 0.2
3302 #def importTimeSensor(node):
3303 def action_fcurve_ensure(action, data_path, array_index):
3304 for fcu in action.fcurves:
3305 if fcu.data_path == data_path and fcu.array_index == array_index:
3306 return fcu
3308 return action.fcurves.new(data_path=data_path, index=array_index)
3311 def translatePositionInterpolator(node, action, ancestry):
3312 key = node.getFieldAsArray('key', 0, ancestry)
3313 keyValue = node.getFieldAsArray('keyValue', 3, ancestry)
3315 loc_x = action_fcurve_ensure(action, "location", 0)
3316 loc_y = action_fcurve_ensure(action, "location", 1)
3317 loc_z = action_fcurve_ensure(action, "location", 2)
3319 for i, time in enumerate(key):
3320 try:
3321 x, y, z = keyValue[i]
3322 except:
3323 continue
3325 loc_x.keyframe_points.insert(time, x)
3326 loc_y.keyframe_points.insert(time, y)
3327 loc_z.keyframe_points.insert(time, z)
3329 for fcu in (loc_x, loc_y, loc_z):
3330 for kf in fcu.keyframe_points:
3331 kf.interpolation = 'LINEAR'
3334 def translateOrientationInterpolator(node, action, ancestry):
3335 key = node.getFieldAsArray('key', 0, ancestry)
3336 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3338 rot_x = action_fcurve_ensure(action, "rotation_euler", 0)
3339 rot_y = action_fcurve_ensure(action, "rotation_euler", 1)
3340 rot_z = action_fcurve_ensure(action, "rotation_euler", 2)
3342 for i, time in enumerate(key):
3343 try:
3344 x, y, z, w = keyValue[i]
3345 except:
3346 continue
3348 mtx = translateRotation((x, y, z, w))
3349 eul = mtx.to_euler()
3350 rot_x.keyframe_points.insert(time, eul.x)
3351 rot_y.keyframe_points.insert(time, eul.y)
3352 rot_z.keyframe_points.insert(time, eul.z)
3354 for fcu in (rot_x, rot_y, rot_z):
3355 for kf in fcu.keyframe_points:
3356 kf.interpolation = 'LINEAR'
3359 # Untested!
3360 def translateScalarInterpolator(node, action, ancestry):
3361 key = node.getFieldAsArray('key', 0, ancestry)
3362 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3364 sca_x = action_fcurve_ensure(action, "scale", 0)
3365 sca_y = action_fcurve_ensure(action, "scale", 1)
3366 sca_z = action_fcurve_ensure(action, "scale", 2)
3368 for i, time in enumerate(key):
3369 try:
3370 x, y, z = keyValue[i]
3371 except:
3372 continue
3374 sca_x.keyframe_points.new(time, x)
3375 sca_y.keyframe_points.new(time, y)
3376 sca_z.keyframe_points.new(time, z)
3379 def translateTimeSensor(node, action, ancestry):
3381 Apply a time sensor to an action, VRML has many combinations of loop/start/stop/cycle times
3382 to give different results, for now just do the basics
3385 # XXX25 TODO
3386 if 1:
3387 return
3389 time_cu = action.addCurve('Time')
3390 time_cu.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
3392 cycleInterval = node.getFieldAsFloat('cycleInterval', None, ancestry)
3394 startTime = node.getFieldAsFloat('startTime', 0.0, ancestry)
3395 stopTime = node.getFieldAsFloat('stopTime', 250.0, ancestry)
3397 if cycleInterval is not None:
3398 stopTime = startTime + cycleInterval
3400 loop = node.getFieldAsBool('loop', False, ancestry)
3402 time_cu.append((1 + startTime, 0.0))
3403 time_cu.append((1 + stopTime, 1.0 / 10.0)) # annoying, the UI uses /10
3405 if loop:
3406 time_cu.extend = Blender.IpoCurve.ExtendTypes.CYCLIC # or - EXTRAP, CYCLIC_EXTRAP, CONST,
3409 def importRoute(node, ancestry):
3411 Animation route only at the moment
3414 if not hasattr(node, 'fields'):
3415 return
3417 routeIpoDict = node.getRouteIpoDict()
3419 def getIpo(act_id):
3420 try:
3421 action = routeIpoDict[act_id]
3422 except:
3423 action = routeIpoDict[act_id] = bpy.data.actions.new('web3d_ipo')
3424 return action
3426 # for getting definitions
3427 defDict = node.getDefDict()
3429 Handles routing nodes to each other
3431 ROUTE vpPI.value_changed TO champFly001.set_position
3432 ROUTE vpOI.value_changed TO champFly001.set_orientation
3433 ROUTE vpTs.fraction_changed TO vpPI.set_fraction
3434 ROUTE vpTs.fraction_changed TO vpOI.set_fraction
3435 ROUTE champFly001.bindTime TO vpTs.set_startTime
3438 #from_id, from_type = node.id[1].split('.')
3439 #to_id, to_type = node.id[3].split('.')
3441 #value_changed
3442 set_position_node = None
3443 set_orientation_node = None
3444 time_node = None
3446 for field in node.fields:
3447 if field and field[0] == 'ROUTE':
3448 try:
3449 from_id, from_type = field[1].split('.')
3450 to_id, to_type = field[3].split('.')
3451 except:
3452 print("Warning, invalid ROUTE", field)
3453 continue
3455 if from_type == 'value_changed':
3456 if to_type == 'set_position':
3457 action = getIpo(to_id)
3458 set_data_from_node = defDict[from_id]
3459 translatePositionInterpolator(set_data_from_node, action, ancestry)
3461 if to_type in {'set_orientation', 'rotation'}:
3462 action = getIpo(to_id)
3463 set_data_from_node = defDict[from_id]
3464 translateOrientationInterpolator(set_data_from_node, action, ancestry)
3466 if to_type == 'set_scale':
3467 action = getIpo(to_id)
3468 set_data_from_node = defDict[from_id]
3469 translateScalarInterpolator(set_data_from_node, action, ancestry)
3471 elif from_type == 'bindTime':
3472 action = getIpo(from_id)
3473 time_node = defDict[to_id]
3474 translateTimeSensor(time_node, action, ancestry)
3477 def load_web3d(
3478 bpycontext,
3479 filepath,
3481 PREF_FLAT=False,
3482 PREF_CIRCLE_DIV=16,
3483 global_matrix=None,
3484 HELPER_FUNC=None
3487 # Used when adding blender primitives
3488 GLOBALS['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV
3490 # NOTE - reset material cache
3491 # (otherwise we might get "StructRNA of type Material has been removed" errors)
3492 global material_cache
3493 material_cache = {}
3495 bpyscene = bpycontext.scene
3496 bpycollection = bpycontext.collection
3497 #root_node = vrml_parse('/_Cylinder.wrl')
3498 if filepath.lower().endswith('.x3d'):
3499 root_node, msg = x3d_parse(filepath)
3500 else:
3501 root_node, msg = vrml_parse(filepath)
3503 if not root_node:
3504 print(msg)
3505 return
3507 if global_matrix is None:
3508 global_matrix = Matrix()
3510 # fill with tuples - (node, [parents-parent, parent])
3511 all_nodes = root_node.getSerialized([], [])
3513 for node, ancestry in all_nodes:
3514 #if 'castle.wrl' not in node.getFilename():
3515 # continue
3517 spec = node.getSpec()
3519 prefix = node.getPrefix()
3520 if prefix=='PROTO':
3521 pass
3522 else
3524 if HELPER_FUNC and HELPER_FUNC(node, ancestry):
3525 # Note, include this function so the VRML/X3D importer can be extended
3526 # by an external script. - gets first pick
3527 pass
3528 if spec == 'Shape':
3529 importShape(bpycollection, node, ancestry, global_matrix)
3530 elif spec in {'PointLight', 'DirectionalLight', 'SpotLight'}:
3531 importLamp(bpycollection, node, spec, ancestry, global_matrix)
3532 elif spec == 'Viewpoint':
3533 importViewpoint(bpycollection, node, ancestry, global_matrix)
3534 elif spec == 'Transform':
3535 # Only use transform nodes when we are not importing a flat object hierarchy
3536 if PREF_FLAT == False:
3537 importTransform(bpycollection, node, ancestry, global_matrix)
3539 # These are delt with later within importRoute
3540 elif spec=='PositionInterpolator':
3541 action = bpy.data.ipos.new('web3d_ipo', 'Object')
3542 translatePositionInterpolator(node, action)
3545 # After we import all nodes, route events - anim paths
3546 for node, ancestry in all_nodes:
3547 importRoute(node, ancestry)
3549 for node, ancestry in all_nodes:
3550 if node.isRoot():
3551 # we know that all nodes referenced from will be in
3552 # routeIpoDict so no need to run node.getDefDict() for every node.
3553 routeIpoDict = node.getRouteIpoDict()
3554 defDict = node.getDefDict()
3556 for key, action in routeIpoDict.items():
3558 # Assign anim curves
3559 node = defDict[key]
3560 if node.blendData is None: # Add an object if we need one for animation
3561 bpyob = node.blendData = node.blendObject = bpy.data.objects.new('AnimOb', None) # , name)
3562 bpycollection.objects.link(bpyob)
3563 bpyob.select_set(True)
3565 if node.blendData.animation_data is None:
3566 node.blendData.animation_data_create()
3568 node.blendData.animation_data.action = action
3570 # Add in hierarchy
3571 if PREF_FLAT is False:
3572 child_dict = {}
3573 for node, ancestry in all_nodes:
3574 if node.blendObject:
3575 blendObject = None
3577 # Get the last parent
3578 i = len(ancestry)
3579 while i:
3580 i -= 1
3581 blendObject = ancestry[i].blendObject
3582 if blendObject:
3583 break
3585 if blendObject:
3586 # Parent Slow, - 1 liner but works
3587 # blendObject.makeParent([node.blendObject], 0, 1)
3589 # Parent FAST
3590 try:
3591 child_dict[blendObject].append(node.blendObject)
3592 except:
3593 child_dict[blendObject] = [node.blendObject]
3595 # Parent
3596 for parent, children in child_dict.items():
3597 for c in children:
3598 c.parent = parent
3600 # update deps
3601 bpycontext.view_layer.update()
3602 del child_dict
3605 def load_with_profiler(
3606 context,
3607 filepath,
3609 global_matrix=None
3611 import cProfile
3612 import pstats
3613 pro = cProfile.Profile()
3614 pro.runctx("load_web3d(context, filepath, PREF_FLAT=True, "
3615 "PREF_CIRCLE_DIV=16, global_matrix=global_matrix)",
3616 globals(), locals())
3617 st = pstats.Stats(pro)
3618 st.sort_stats("time")
3619 st.print_stats(0.1)
3620 # st.print_callers(0.1)
3623 def load(context,
3624 filepath,
3626 global_matrix=None
3629 # loadWithProfiler(operator, context, filepath, global_matrix)
3630 load_web3d(context, filepath,
3631 PREF_FLAT=True,
3632 PREF_CIRCLE_DIV=16,
3633 global_matrix=global_matrix,
3636 return {'FINISHED'}