Merge branch 'master' into blender2.8
[blender-addons.git] / io_scene_x3d / import_x3d.py
blobcffdabfeb3eb11d510d01f9243280ed78928abed
1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
19 # <pep8 compliant>
21 DEBUG = False
23 # This should work without a blender at all
24 import os
25 import shlex
26 import math
27 from math import sin, cos, pi
29 texture_cache = {}
30 material_cache = {}
32 EPSILON = 0.0000001 # Very crude.
35 def imageConvertCompat(path):
37 if os.sep == '\\':
38 return path # assume win32 has quicktime, dont convert
40 if path.lower().endswith('.gif'):
41 path_to = path[:-3] + 'png'
43 '''
44 if exists(path_to):
45 return path_to
46 '''
47 # print('\n'+path+'\n'+path_to+'\n')
48 os.system('convert "%s" "%s"' % (path, path_to)) # for now just hope we have image magick
50 if os.path.exists(path_to):
51 return path_to
53 return path
55 # notes
56 # transform are relative
57 # order dosnt matter for loc/size/rot
58 # right handed rotation
59 # angles are in radians
60 # rotation first defines axis then amount in radians
63 # =============================== VRML Spesific
65 def vrml_split_fields(value):
66 """
67 key 0.0 otherkey 1,2,3 opt1 opt1 0.0
68 -> [key 0.0], [otherkey 1,2,3], [opt1 opt1 0.0]
69 """
70 def iskey(k):
71 if k[0] != '"' and k[0].isalpha() and k.upper() not in {'TRUE', 'FALSE'}:
72 return True
73 return False
75 field_list = []
76 field_context = []
78 for v in value:
79 if iskey(v):
80 if field_context:
81 field_context_len = len(field_context)
82 if (field_context_len > 2) and (field_context[-2] in {'DEF', 'USE'}):
83 field_context.append(v)
84 elif (not iskey(field_context[-1])) or ((field_context_len == 3 and field_context[1] == 'IS')):
85 # this IS a key but the previous value was not a key, ot it was a defined field.
86 field_list.append(field_context)
87 field_context = [v]
88 else:
89 # The last item was not a value, multiple keys are needed in some cases.
90 field_context.append(v)
91 else:
92 # Is empty, just add this on
93 field_context.append(v)
94 else:
95 # Add a value to the list
96 field_context.append(v)
98 if field_context:
99 field_list.append(field_context)
101 return field_list
104 def vrmlFormat(data):
106 Keep this as a valid vrml file, but format in a way we can predict.
108 # Strip all commends - # not in strings - warning multiline strings are ignored.
109 def strip_comment(l):
110 #l = ' '.join(l.split())
111 l = l.strip()
113 if l.startswith('#'):
114 return ''
116 i = l.find('#')
118 if i == -1:
119 return l
121 # Most cases accounted for! if we have a comment at the end of the line do this...
122 #j = l.find('url "')
123 j = l.find('"')
125 if j == -1: # simple no strings
126 return l[:i].strip()
128 q = False
129 for i, c in enumerate(l):
130 if c == '"':
131 q = not q # invert
133 elif c == '#':
134 if q is False:
135 return l[:i - 1]
137 return l
139 data = '\n'.join([strip_comment(l) for l in data.split('\n')]) # remove all whitespace
141 EXTRACT_STRINGS = True # only needed when strings or filesnames containe ,[]{} chars :/
143 if EXTRACT_STRINGS:
145 # We need this so we can detect URL's
146 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
148 string_ls = []
150 #search = 'url "'
151 search = '"'
153 ok = True
154 last_i = 0
155 while ok:
156 ok = False
157 i = data.find(search, last_i)
158 if i != -1:
160 start = i + len(search) # first char after end of search
161 end = data.find('"', start)
162 if end != -1:
163 item = data[start:end]
164 string_ls.append(item)
165 data = data[:start] + data[end:]
166 ok = True # keep looking
168 last_i = (end - len(item)) + 1
169 # print(last_i, item, '|' + data[last_i] + '|')
171 # done with messy extracting strings part
173 # Bad, dont take strings into account
175 data = data.replace('#', '\n#')
176 data = '\n'.join([ll for l in data.split('\n') for ll in (l.strip(),) if not ll.startswith('#')]) # remove all whitespace
178 data = data.replace('{', '\n{\n')
179 data = data.replace('}', '\n}\n')
180 data = data.replace('[', '\n[\n')
181 data = data.replace(']', '\n]\n')
182 data = data.replace(',', ' , ') # make sure comma's separate
184 # We need to write one property (field) per line only, otherwise we fail later to detect correctly new nodes.
185 # See T45195 for details.
186 data = '\n'.join([' '.join(value) for l in data.split('\n') for value in vrml_split_fields(l.split())])
188 if EXTRACT_STRINGS:
189 # add strings back in
191 search = '"' # fill in these empty strings
193 ok = True
194 last_i = 0
195 while ok:
196 ok = False
197 i = data.find(search + '"', last_i)
198 # print(i)
199 if i != -1:
200 start = i + len(search) # first char after end of search
201 item = string_ls.pop(0)
202 # print(item)
203 data = data[:start] + item + data[start:]
205 last_i = start + len(item) + 1
207 ok = True
209 # More annoying obscure cases where USE or DEF are placed on a newline
210 # data = data.replace('\nDEF ', ' DEF ')
211 # data = data.replace('\nUSE ', ' USE ')
213 data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace
215 # Better to parse the file accounting for multiline arrays
217 data = data.replace(',\n', ' , ') # remove line endings with commas
218 data = data.replace(']', '\n]\n') # very very annoying - but some comma's are at the end of the list, must run this again.
221 return [l for l in data.split('\n') if l]
223 NODE_NORMAL = 1 # {}
224 NODE_ARRAY = 2 # []
225 NODE_REFERENCE = 3 # USE foobar
226 # NODE_PROTO = 4 #
228 lines = []
231 def getNodePreText(i, words):
232 # print(lines[i])
233 use_node = False
234 while len(words) < 5:
236 if i >= len(lines):
237 break
239 elif lines[i].startswith('PROTO'):
240 return NODE_PROTO, i+1
242 elif lines[i] == '{':
243 # words.append(lines[i]) # no need
244 # print("OK")
245 return NODE_NORMAL, i + 1
246 elif lines[i].count('"') % 2 != 0: # odd number of quotes? - part of a string.
247 # print('ISSTRING')
248 break
249 else:
250 new_words = lines[i].split()
251 if 'USE' in new_words:
252 use_node = True
254 words.extend(new_words)
255 i += 1
257 # Check for USE node - no {
258 # USE #id - should always be on the same line.
259 if use_node:
260 # print('LINE', i, words[:words.index('USE')+2])
261 words[:] = words[:words.index('USE') + 2]
262 if lines[i] == '{' and lines[i + 1] == '}':
263 # USE sometimes has {} after it anyway
264 i += 2
265 return NODE_REFERENCE, i
267 # print("error value!!!", words)
268 return 0, -1
271 def is_nodeline(i, words):
273 if not lines[i][0].isalpha():
274 return 0, 0
276 #if lines[i].startswith('field'):
277 # return 0, 0
279 # Is this a prototype??
280 if lines[i].startswith('PROTO'):
281 words[:] = lines[i].split()
282 return NODE_NORMAL, i + 1 # TODO - assumes the next line is a '[\n', skip that
283 if lines[i].startswith('EXTERNPROTO'):
284 words[:] = lines[i].split()
285 return NODE_ARRAY, i + 1 # TODO - assumes the next line is a '[\n', skip that
288 proto_type, new_i = is_protoline(i, words, proto_field_defs)
289 if new_i != -1:
290 return proto_type, new_i
293 # Simple "var [" type
294 if lines[i + 1] == '[':
295 if lines[i].count('"') % 2 == 0:
296 words[:] = lines[i].split()
297 return NODE_ARRAY, i + 2
299 node_type, new_i = getNodePreText(i, words)
301 if not node_type:
302 if DEBUG:
303 print("not node_type", lines[i])
304 return 0, 0
306 # Ok, we have a { after some values
307 # Check the values are not fields
308 for i, val in enumerate(words):
309 if i != 0 and words[i - 1] in {'DEF', 'USE'}:
310 # ignore anything after DEF, it is a ID and can contain any chars.
311 pass
312 elif val[0].isalpha() and val not in {'TRUE', 'FALSE'}:
313 pass
314 else:
315 # There is a number in one of the values, therefor we are not a node.
316 return 0, 0
318 #if node_type==NODE_REFERENCE:
319 # print(words, "REF_!!!!!!!")
320 return node_type, new_i
323 def is_numline(i):
325 Does this line start with a number?
328 # Works but too slow.
330 l = lines[i]
331 for w in l.split():
332 if w==',':
333 pass
334 else:
335 try:
336 float(w)
337 return True
339 except:
340 return False
342 return False
345 l = lines[i]
347 line_start = 0
349 if l.startswith(', '):
350 line_start += 2
352 line_end = len(l) - 1
353 line_end_new = l.find(' ', line_start) # comma's always have a space before them
355 if line_end_new != -1:
356 line_end = line_end_new
358 try:
359 float(l[line_start:line_end]) # works for a float or int
360 return True
361 except:
362 return False
365 class vrmlNode(object):
366 __slots__ = ('id',
367 'fields',
368 'proto_node',
369 'proto_field_defs',
370 'proto_fields',
371 'node_type',
372 'parent',
373 'children',
374 'parent',
375 'array_data',
376 'reference',
377 'lineno',
378 'filename',
379 'blendObject',
380 'blendData',
381 'DEF_NAMESPACE',
382 'ROUTE_IPO_NAMESPACE',
383 'PROTO_NAMESPACE',
384 'x3dNode',
385 'parsed')
387 def __init__(self, parent, node_type, lineno):
388 self.id = None
389 self.node_type = node_type
390 self.parent = parent
391 self.blendObject = None
392 self.blendData = None
393 self.x3dNode = None # for x3d import only
394 self.parsed = None # We try to reuse objects in a smart way
395 if parent:
396 parent.children.append(self)
398 self.lineno = lineno
400 # This is only set from the root nodes.
401 # Having a filename also denotes a root node
402 self.filename = None
403 self.proto_node = None # proto field definition eg: "field SFColor seatColor .6 .6 .1"
405 # Store in the root node because each inline file needs its own root node and its own namespace
406 self.DEF_NAMESPACE = None
407 self.ROUTE_IPO_NAMESPACE = None
409 self.FIELD_NAMESPACE = None
412 self.PROTO_NAMESPACE = None
414 self.reference = None
416 if node_type == NODE_REFERENCE:
417 # For references, only the parent and ID are needed
418 # the reference its self is assigned on parsing
419 return
421 self.fields = [] # fields have no order, in some cases rool level values are not unique so dont use a dict
423 self.proto_field_defs = [] # proto field definition eg: "field SFColor seatColor .6 .6 .1"
424 self.proto_fields = [] # proto field usage "diffuseColor IS seatColor"
425 self.children = []
426 self.array_data = [] # use for arrays of data - should only be for NODE_ARRAY types
428 # Only available from the root node
430 def getFieldDict(self):
431 if self.FIELD_NAMESPACE is not None:
432 return self.FIELD_NAMESPACE
433 else:
434 return self.parent.getFieldDict()
436 def getProtoDict(self):
437 if self.PROTO_NAMESPACE is not None:
438 return self.PROTO_NAMESPACE
439 else:
440 return self.parent.getProtoDict()
442 def getDefDict(self):
443 if self.DEF_NAMESPACE is not None:
444 return self.DEF_NAMESPACE
445 else:
446 return self.parent.getDefDict()
448 def getRouteIpoDict(self):
449 if self.ROUTE_IPO_NAMESPACE is not None:
450 return self.ROUTE_IPO_NAMESPACE
451 else:
452 return self.parent.getRouteIpoDict()
454 def setRoot(self, filename):
455 self.filename = filename
456 # self.FIELD_NAMESPACE = {}
457 self.DEF_NAMESPACE = {}
458 self.ROUTE_IPO_NAMESPACE = {}
459 self.PROTO_NAMESPACE = {}
461 def isRoot(self):
462 if self.filename is None:
463 return False
464 else:
465 return True
467 def getFilename(self):
468 if self.filename:
469 return self.filename
470 elif self.parent:
471 return self.parent.getFilename()
472 else:
473 return None
475 def getRealNode(self):
476 if self.reference:
477 return self.reference
478 else:
479 return self
481 def getSpec(self):
482 self_real = self.getRealNode()
483 try:
484 return self_real.id[-1] # its possible this node has no spec
485 except:
486 return None
488 def findSpecRecursive(self, spec):
489 self_real = self.getRealNode()
490 if spec == self_real.getSpec():
491 return self
493 for child in self_real.children:
494 if child.findSpecRecursive(spec):
495 return child
497 return None
499 def getPrefix(self):
500 if self.id:
501 return self.id[0]
502 return None
504 def getSpecialTypeName(self, typename):
505 self_real = self.getRealNode()
506 try:
507 return self_real.id[list(self_real.id).index(typename) + 1]
508 except:
509 return None
511 def getDefName(self):
512 return self.getSpecialTypeName('DEF')
514 def getProtoName(self):
515 return self.getSpecialTypeName('PROTO')
517 def getExternprotoName(self):
518 return self.getSpecialTypeName('EXTERNPROTO')
520 def getChildrenBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
521 self_real = self.getRealNode()
522 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
523 if type(node_spec) == str:
524 return [child for child in self_real.children if child.getSpec() == node_spec]
525 else:
526 # Check inside a list of optional types
527 return [child for child in self_real.children if child.getSpec() in node_spec]
529 def getChildrenBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
530 self_real = self.getRealNode()
531 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
532 return [child for child in self_real.children if cond(child.getSpec())]
534 def getChildBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
535 # Use in cases where there is only ever 1 child of this type
536 ls = self.getChildrenBySpec(node_spec)
537 if ls:
538 return ls[0]
539 else:
540 return None
542 def getChildBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
543 # Use in cases where there is only ever 1 child of this type
544 ls = self.getChildrenBySpecCondition(cond)
545 if ls:
546 return ls[0]
547 else:
548 return None
550 def getChildrenByName(self, node_name): # type could be geometry, children, appearance
551 self_real = self.getRealNode()
552 return [child for child in self_real.children if child.id if child.id[0] == node_name]
554 def getChildByName(self, node_name):
555 self_real = self.getRealNode()
556 for child in self_real.children:
557 if child.id and child.id[0] == node_name: # and child.id[-1]==node_spec:
558 return child
560 def getSerialized(self, results, ancestry):
561 """ Return this node and all its children in a flat list """
562 ancestry = ancestry[:] # always use a copy
564 # self_real = self.getRealNode()
566 results.append((self, tuple(ancestry)))
567 ancestry.append(self)
568 for child in self.getRealNode().children:
569 if child not in ancestry:
570 # We dont want to load proto's, they are only references
571 # We could enforce this elsewhere
573 # Only add this in a very special case
574 # where the parent of this object is not the real parent
575 # - In this case we have added the proto as a child to a node instancing it.
576 # This is a bit arbitary, but its how Proto's are done with this importer.
577 if child.getProtoName() is None and child.getExternprotoName() is None:
578 child.getSerialized(results, ancestry)
579 else:
581 if DEBUG:
582 print('getSerialized() is proto:', child.getProtoName(), child.getExternprotoName(), self.getSpec())
584 self_spec = self.getSpec()
586 if child.getProtoName() == self_spec or child.getExternprotoName() == self_spec:
587 #if DEBUG:
588 # "FoundProto!"
589 child.getSerialized(results, ancestry)
591 return results
593 def searchNodeTypeID(self, node_spec, results):
594 self_real = self.getRealNode()
595 # print(self.lineno, self.id)
596 if self_real.id and self_real.id[-1] == node_spec: # use last element, could also be only element
597 results.append(self_real)
598 for child in self_real.children:
599 child.searchNodeTypeID(node_spec, results)
600 return results
602 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
603 self_real = self.getRealNode() # in case we're an instance
605 for f in self_real.fields:
606 # print(f)
607 if f and f[0] == field:
608 # print('\tfound field', f)
610 if len(f) >= 3 and f[1] == 'IS': # eg: 'diffuseColor IS legColor'
611 field_id = f[2]
613 # print("\n\n\n\n\n\nFOND IS!!!")
614 f_proto_lookup = None
615 f_proto_child_lookup = None
616 i = len(ancestry)
617 while i:
618 i -= 1
619 node = ancestry[i]
620 node = node.getRealNode()
622 # proto settings are stored in "self.proto_node"
623 if node.proto_node:
624 # Get the default value from the proto, this can be overwridden by the proto instace
625 # 'field SFColor legColor .8 .4 .7'
626 if AS_CHILD:
627 for child in node.proto_node.children:
628 #if child.id and len(child.id) >= 3 and child.id[2]==field_id:
629 if child.id and ('point' in child.id or 'points' in child.id):
630 f_proto_child_lookup = child
632 else:
633 for f_def in node.proto_node.proto_field_defs:
634 if len(f_def) >= 4:
635 if f_def[0] == 'field' and f_def[2] == field_id:
636 f_proto_lookup = f_def[3:]
638 # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent.
639 # This is the setting as defined by the instance, including this setting is optional,
640 # and will override the default PROTO value
641 # eg: 'legColor 1 0 0'
642 if AS_CHILD:
643 for child in node.children:
644 if child.id and child.id[0] == field_id:
645 f_proto_child_lookup = child
646 else:
647 for f_def in node.fields:
648 if len(f_def) >= 2:
649 if f_def[0] == field_id:
650 if DEBUG:
651 print("getFieldName(), found proto", f_def)
652 f_proto_lookup = f_def[1:]
654 if AS_CHILD:
655 if f_proto_child_lookup:
656 if DEBUG:
657 print("getFieldName() - AS_CHILD=True, child found")
658 print(f_proto_child_lookup)
659 return f_proto_child_lookup
660 else:
661 return f_proto_lookup
662 else:
663 if AS_CHILD:
664 return None
665 else:
666 # Not using a proto
667 return f[1:]
668 # print('\tfield not found', field)
670 # See if this is a proto name
671 if AS_CHILD:
672 for child in self_real.children:
673 if child.id and len(child.id) == 1 and child.id[0] == field:
674 return child
676 return None
678 def getFieldAsInt(self, field, default, ancestry):
679 self_real = self.getRealNode() # in case we're an instance
681 f = self_real.getFieldName(field, ancestry)
682 if f is None:
683 return default
684 if ',' in f:
685 f = f[:f.index(',')] # strip after the comma
687 if len(f) != 1:
688 print('\t"%s" wrong length for int conversion for field "%s"' % (f, field))
689 return default
691 try:
692 return int(f[0])
693 except:
694 print('\tvalue "%s" could not be used as an int for field "%s"' % (f[0], field))
695 return default
697 def getFieldAsFloat(self, field, default, ancestry):
698 self_real = self.getRealNode() # in case we're an instance
700 f = self_real.getFieldName(field, ancestry)
701 if f is None:
702 return default
703 if ',' in f:
704 f = f[:f.index(',')] # strip after the comma
706 if len(f) != 1:
707 print('\t"%s" wrong length for float conversion for field "%s"' % (f, field))
708 return default
710 try:
711 return float(f[0])
712 except:
713 print('\tvalue "%s" could not be used as a float for field "%s"' % (f[0], field))
714 return default
716 def getFieldAsFloatTuple(self, field, default, ancestry):
717 self_real = self.getRealNode() # in case we're an instance
719 f = self_real.getFieldName(field, ancestry)
720 if f is None:
721 return default
722 # if ',' in f: f = f[:f.index(',')] # strip after the comma
724 if len(f) < 1:
725 print('"%s" wrong length for float tuple conversion for field "%s"' % (f, field))
726 return default
728 ret = []
729 for v in f:
730 if v != ',':
731 try:
732 ret.append(float(v))
733 except:
734 break # quit of first non float, perhaps its a new field name on the same line? - if so we are going to ignore it :/ TODO
735 # print(ret)
737 if ret:
738 return ret
739 if not ret:
740 print('\tvalue "%s" could not be used as a float tuple for field "%s"' % (f, field))
741 return default
743 def getFieldAsBool(self, field, default, ancestry):
744 self_real = self.getRealNode() # in case we're an instance
746 f = self_real.getFieldName(field, ancestry)
747 if f is None:
748 return default
749 if ',' in f:
750 f = f[:f.index(',')] # strip after the comma
752 if len(f) != 1:
753 print('\t"%s" wrong length for bool conversion for field "%s"' % (f, field))
754 return default
756 if f[0].upper() == '"TRUE"' or f[0].upper() == 'TRUE':
757 return True
758 elif f[0].upper() == '"FALSE"' or f[0].upper() == 'FALSE':
759 return False
760 else:
761 print('\t"%s" could not be used as a bool for field "%s"' % (f[1], field))
762 return default
764 def getFieldAsString(self, field, default, ancestry):
765 self_real = self.getRealNode() # in case we're an instance
767 f = self_real.getFieldName(field, ancestry)
768 if f is None:
769 return default
770 if len(f) < 1:
771 print('\t"%s" wrong length for string conversion for field "%s"' % (f, field))
772 return default
774 if len(f) > 1:
775 # String may contain spaces
776 st = ' '.join(f)
777 else:
778 st = f[0]
780 # X3D HACK
781 if self.x3dNode:
782 return st
784 if st[0] == '"' and st[-1] == '"':
785 return st[1:-1]
786 else:
787 print('\tvalue "%s" could not be used as a string for field "%s"' % (f[0], field))
788 return default
790 def getFieldAsArray(self, field, group, ancestry):
792 For this parser arrays are children
795 def array_as_number(array_string):
796 array_data = []
797 try:
798 array_data = [int(val, 0) for val in array_string]
799 except:
800 try:
801 array_data = [float(val) for val in array_string]
802 except:
803 print('\tWarning, could not parse array data from field')
805 return array_data
807 self_real = self.getRealNode() # in case we're an instance
809 child_array = self_real.getFieldName(field, ancestry, True, SPLIT_COMMAS=True)
811 #if type(child_array)==list: # happens occasionaly
812 # array_data = child_array
814 if child_array is None:
815 # For x3d, should work ok with vrml too
816 # for x3d arrays are fields, vrml they are nodes, annoying but not tooo bad.
817 data_split = self.getFieldName(field, ancestry, SPLIT_COMMAS=True)
818 if not data_split:
819 return []
821 array_data = array_as_number(data_split)
823 elif type(child_array) == list:
824 # x3d creates these
825 array_data = array_as_number(child_array)
826 else:
827 # print(child_array)
828 # Normal vrml
829 array_data = child_array.array_data
831 # print('array_data', array_data)
832 if group == -1 or len(array_data) == 0:
833 return array_data
835 # We want a flat list
836 flat = True
837 for item in array_data:
838 if type(item) == list:
839 flat = False
840 break
842 # make a flat array
843 if flat:
844 flat_array = array_data # we are already flat.
845 else:
846 flat_array = []
848 def extend_flat(ls):
849 for item in ls:
850 if type(item) == list:
851 extend_flat(item)
852 else:
853 flat_array.append(item)
855 extend_flat(array_data)
857 # We requested a flat array
858 if group == 0:
859 return flat_array
861 new_array = []
862 sub_array = []
864 for item in flat_array:
865 sub_array.append(item)
866 if len(sub_array) == group:
867 new_array.append(sub_array)
868 sub_array = []
870 if sub_array:
871 print('\twarning, array was not aligned to requested grouping', group, 'remaining value', sub_array)
873 return new_array
875 def getFieldAsStringArray(self, field, ancestry):
877 Get a list of strings
879 self_real = self.getRealNode() # in case we're an instance
881 child_array = None
882 for child in self_real.children:
883 if child.id and len(child.id) == 1 and child.id[0] == field:
884 child_array = child
885 break
886 if not child_array:
887 return []
889 # each string gets its own list, remove ""'s
890 try:
891 new_array = [f[0][1:-1] for f in child_array.fields]
892 except:
893 print('\twarning, string array could not be made')
894 new_array = []
896 return new_array
898 def getLevel(self):
899 # Ignore self_real
900 level = 0
901 p = self.parent
902 while p:
903 level += 1
904 p = p.parent
905 if not p:
906 break
908 return level
910 def __repr__(self):
911 level = self.getLevel()
912 ind = ' ' * level
913 if self.node_type == NODE_REFERENCE:
914 brackets = ''
915 elif self.node_type == NODE_NORMAL:
916 brackets = '{}'
917 else:
918 brackets = '[]'
920 if brackets:
921 text = ind + brackets[0] + '\n'
922 else:
923 text = ''
925 text += ind + 'ID: ' + str(self.id) + ' ' + str(level) + (' lineno %d\n' % self.lineno)
927 if self.node_type == NODE_REFERENCE:
928 text += ind + "(reference node)\n"
929 return text
931 if self.proto_node:
932 text += ind + 'PROTO NODE...\n'
933 text += str(self.proto_node)
934 text += ind + 'PROTO NODE_DONE\n'
936 text += ind + 'FIELDS:' + str(len(self.fields)) + '\n'
938 for i, item in enumerate(self.fields):
939 text += ind + 'FIELD:\n'
940 text += ind + str(item) + '\n'
942 text += ind + 'PROTO_FIELD_DEFS:' + str(len(self.proto_field_defs)) + '\n'
944 for i, item in enumerate(self.proto_field_defs):
945 text += ind + 'PROTO_FIELD:\n'
946 text += ind + str(item) + '\n'
948 text += ind + 'ARRAY: ' + str(len(self.array_data)) + ' ' + str(self.array_data) + '\n'
949 #text += ind + 'ARRAY: ' + str(len(self.array_data)) + '[...] \n'
951 text += ind + 'CHILDREN: ' + str(len(self.children)) + '\n'
952 for i, child in enumerate(self.children):
953 text += ind + ('CHILD%d:\n' % i)
954 text += str(child)
956 text += '\n' + ind + brackets[1]
958 return text
960 def parse(self, i, IS_PROTO_DATA=False):
961 new_i = self.__parse(i, IS_PROTO_DATA)
963 # print(self.id, self.getFilename())
965 # Check if this node was an inline or externproto
967 url_ls = []
969 if self.node_type == NODE_NORMAL and self.getSpec() == 'Inline':
970 ancestry = [] # Warning! - PROTO's using this wont work at all.
971 url = self.getFieldAsString('url', None, ancestry)
972 if url:
973 url_ls = [(url, None)]
974 del ancestry
976 elif self.getExternprotoName():
977 # externproto
978 url_ls = []
979 for f in self.fields:
981 if type(f) == str:
982 f = [f]
984 for ff in f:
985 for f_split in ff.split('"'):
986 # print(f_split)
987 # "someextern.vrml#SomeID"
988 if '#' in f_split:
990 f_split, f_split_id = f_split.split('#') # there should only be 1 # anyway
992 url_ls.append((f_split, f_split_id))
993 else:
994 url_ls.append((f_split, None))
996 # Was either an Inline or an EXTERNPROTO
997 if url_ls:
999 # print(url_ls)
1001 for url, extern_key in url_ls:
1002 print(url)
1003 urls = []
1004 urls.append(url)
1005 urls.append(bpy.path.resolve_ncase(urls[-1]))
1007 urls.append(os.path.join(os.path.dirname(self.getFilename()), url))
1008 urls.append(bpy.path.resolve_ncase(urls[-1]))
1010 urls.append(os.path.join(os.path.dirname(self.getFilename()), os.path.basename(url)))
1011 urls.append(bpy.path.resolve_ncase(urls[-1]))
1013 try:
1014 url = [url for url in urls if os.path.exists(url)][0]
1015 url_found = True
1016 except:
1017 url_found = False
1019 if not url_found:
1020 print('\tWarning: Inline URL could not be found:', url)
1021 else:
1022 if url == self.getFilename():
1023 print('\tWarning: cant Inline yourself recursively:', url)
1024 else:
1026 try:
1027 data = gzipOpen(url)
1028 except:
1029 print('\tWarning: cant open the file:', url)
1030 data = None
1032 if data:
1033 # Tricky - inline another VRML
1034 print('\tLoading Inline:"%s"...' % url)
1036 # Watch it! - backup lines
1037 lines_old = lines[:]
1039 lines[:] = vrmlFormat(data)
1041 lines.insert(0, '{')
1042 lines.insert(0, 'root_node____')
1043 lines.append('}')
1045 ff = open('/tmp/test.txt', 'w')
1046 ff.writelines([l+'\n' for l in lines])
1049 child = vrmlNode(self, NODE_NORMAL, -1)
1050 child.setRoot(url) # initialized dicts
1051 child.parse(0)
1053 # if self.getExternprotoName():
1054 if self.getExternprotoName():
1055 if not extern_key: # if none is spesified - use the name
1056 extern_key = self.getSpec()
1058 if extern_key:
1060 self.children.remove(child)
1061 child.parent = None
1063 extern_child = child.findSpecRecursive(extern_key)
1065 if extern_child:
1066 self.children.append(extern_child)
1067 extern_child.parent = self
1069 if DEBUG:
1070 print("\tEXTERNPROTO ID found!:", extern_key)
1071 else:
1072 print("\tEXTERNPROTO ID not found!:", extern_key)
1074 # Watch it! - restore lines
1075 lines[:] = lines_old
1077 return new_i
1079 def __parse(self, i, IS_PROTO_DATA=False):
1081 print('parsing at', i, end="")
1082 print(i, self.id, self.lineno)
1084 l = lines[i]
1086 if l == '[':
1087 # An anonymous list
1088 self.id = None
1089 i += 1
1090 else:
1091 words = []
1093 node_type, new_i = is_nodeline(i, words)
1094 if not node_type: # fail for parsing new node.
1095 print("Failed to parse new node")
1096 raise ValueError
1098 if self.node_type == NODE_REFERENCE:
1099 # Only assign the reference and quit
1100 key = words[words.index('USE') + 1]
1101 self.id = (words[0],)
1103 self.reference = self.getDefDict()[key]
1104 return new_i
1106 self.id = tuple(words)
1108 # fill in DEF/USE
1109 key = self.getDefName()
1110 if key is not None:
1111 self.getDefDict()[key] = self
1113 key = self.getProtoName()
1114 if not key:
1115 key = self.getExternprotoName()
1117 proto_dict = self.getProtoDict()
1118 if key is not None:
1119 proto_dict[key] = self
1121 # Parse the proto nodes fields
1122 self.proto_node = vrmlNode(self, NODE_ARRAY, new_i)
1123 new_i = self.proto_node.parse(new_i)
1125 self.children.remove(self.proto_node)
1127 # print(self.proto_node)
1129 new_i += 1 # skip past the {
1131 else: # If we're a proto instance, add the proto node as our child.
1132 spec = self.getSpec()
1133 try:
1134 self.children.append(proto_dict[spec])
1135 #pass
1136 except:
1137 pass
1139 del spec
1141 del proto_dict, key
1143 i = new_i
1145 # print(self.id)
1146 ok = True
1147 while ok:
1148 if i >= len(lines):
1149 return len(lines) - 1
1151 l = lines[i]
1152 # print('\tDEBUG:', i, self.node_type, l)
1153 if l == '':
1154 i += 1
1155 continue
1157 if l == '}':
1158 if self.node_type != NODE_NORMAL: # also ends proto nodes, we may want a type for these too.
1159 print('wrong node ending, expected an } ' + str(i) + ' ' + str(self.node_type))
1160 if DEBUG:
1161 raise ValueError
1162 ### print("returning", i)
1163 return i + 1
1164 if l == ']':
1165 if self.node_type != NODE_ARRAY:
1166 print('wrong node ending, expected a ] ' + str(i) + ' ' + str(self.node_type))
1167 if DEBUG:
1168 raise ValueError
1169 ### print("returning", i)
1170 return i + 1
1172 node_type, new_i = is_nodeline(i, [])
1173 if node_type: # check text\n{
1174 child = vrmlNode(self, node_type, i)
1175 i = child.parse(i)
1177 elif l == '[': # some files have these anonymous lists
1178 child = vrmlNode(self, NODE_ARRAY, i)
1179 i = child.parse(i)
1181 elif is_numline(i):
1182 l_split = l.split(',')
1184 values = None
1185 # See if each item is a float?
1187 for num_type in (int, float):
1188 try:
1189 values = [num_type(v) for v in l_split]
1190 break
1191 except:
1192 pass
1194 try:
1195 values = [[num_type(v) for v in segment.split()] for segment in l_split]
1196 break
1197 except:
1198 pass
1200 if values is None: # dont parse
1201 values = l_split
1203 # This should not extend over multiple lines however it is possible
1204 # print(self.array_data)
1205 if values:
1206 self.array_data.extend(values)
1207 i += 1
1208 else:
1209 words = l.split()
1210 if len(words) > 2 and words[1] == 'USE':
1211 vrmlNode(self, NODE_REFERENCE, i)
1212 else:
1214 # print("FIELD", i, l)
1216 #words = l.split()
1217 ### print('\t\ttag', i)
1218 # this is a tag/
1219 # print(words, i, l)
1220 value = l
1221 # print(i)
1222 # javastrips can exist as values.
1223 quote_count = l.count('"')
1224 if quote_count % 2: # odd number?
1225 # print('MULTILINE')
1226 while 1:
1227 i += 1
1228 l = lines[i]
1229 quote_count = l.count('"')
1230 if quote_count % 2: # odd number?
1231 value += '\n' + l[:l.rfind('"')]
1232 break # assume
1233 else:
1234 value += '\n' + l
1236 # use shlex so we get '"a b" "b v"' --> '"a b"', '"b v"'
1237 value_all = shlex.split(value, posix=False)
1239 for value in vrml_split_fields(value_all):
1240 # Split
1242 if value[0] == 'field':
1243 # field SFFloat creaseAngle 4
1244 self.proto_field_defs.append(value)
1245 else:
1246 self.fields.append(value)
1247 i += 1
1249 # This is a prerequisite for DEF/USE-based material caching
1250 def canHaveReferences(self):
1251 return self.node_type == NODE_NORMAL and self.getDefName()
1253 # This is a prerequisite for raw XML-based material caching. For now, only for X3D
1254 def desc(self):
1255 return None
1258 def gzipOpen(path):
1259 import gzip
1261 data = None
1262 try:
1263 data = gzip.open(path, 'r').read()
1264 except:
1265 pass
1267 if data is None:
1268 try:
1269 filehandle = open(path, 'rU', encoding='utf-8', errors='surrogateescape')
1270 data = filehandle.read()
1271 filehandle.close()
1272 except:
1273 import traceback
1274 traceback.print_exc()
1275 else:
1276 data = data.decode(encoding='utf-8', errors='surrogateescape')
1278 return data
1281 def vrml_parse(path):
1283 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1284 Return root (vrmlNode, '') or (None, 'Error String')
1286 data = gzipOpen(path)
1288 if data is None:
1289 return None, 'Failed to open file: ' + path
1291 # Stripped above
1292 lines[:] = vrmlFormat(data)
1294 lines.insert(0, '{')
1295 lines.insert(0, 'dymmy_node')
1296 lines.append('}')
1297 # Use for testing our parsed output, so we can check on line numbers.
1300 ff = open('/tmp/test.txt', 'w')
1301 ff.writelines([l+'\n' for l in lines])
1302 ff.close()
1305 # Now evaluate it
1306 node_type, new_i = is_nodeline(0, [])
1307 if not node_type:
1308 return None, 'Error: VRML file has no starting Node'
1310 # Trick to make sure we get all root nodes.
1311 lines.insert(0, '{')
1312 lines.insert(0, 'root_node____') # important the name starts with an ascii char
1313 lines.append('}')
1315 root = vrmlNode(None, NODE_NORMAL, -1)
1316 root.setRoot(path) # we need to set the root so we have a namespace and know the path in case of inlineing
1318 # Parse recursively
1319 root.parse(0)
1321 # This prints a load of text
1322 if DEBUG:
1323 print(root)
1325 return root, ''
1328 # ====================== END VRML
1330 # ====================== X3d Support
1332 # Sane as vrml but replace the parser
1333 class x3dNode(vrmlNode):
1334 def __init__(self, parent, node_type, x3dNode):
1335 vrmlNode.__init__(self, parent, node_type, -1)
1336 self.x3dNode = x3dNode
1338 def parse(self, IS_PROTO_DATA=False):
1339 # print(self.x3dNode.tagName)
1340 self.lineno = self.x3dNode.parse_position[0]
1342 define = self.x3dNode.getAttributeNode('DEF')
1343 if define:
1344 self.getDefDict()[define.value] = self
1345 else:
1346 use = self.x3dNode.getAttributeNode('USE')
1347 if use:
1348 try:
1349 self.reference = self.getDefDict()[use.value]
1350 self.node_type = NODE_REFERENCE
1351 except:
1352 print('\tWarning: reference', use.value, 'not found')
1353 self.parent.children.remove(self)
1355 return
1357 for x3dChildNode in self.x3dNode.childNodes:
1358 if x3dChildNode.nodeType in {x3dChildNode.TEXT_NODE, x3dChildNode.COMMENT_NODE, x3dChildNode.CDATA_SECTION_NODE}:
1359 continue
1361 node_type = NODE_NORMAL
1362 # print(x3dChildNode, dir(x3dChildNode))
1363 if x3dChildNode.getAttributeNode('USE'):
1364 node_type = NODE_REFERENCE
1366 child = x3dNode(self, node_type, x3dChildNode)
1367 child.parse()
1369 # TODO - x3d Inline
1371 def getSpec(self):
1372 return self.x3dNode.tagName # should match vrml spec
1374 # Used to retain object identifiers from X3D to Blender
1375 def getDefName(self):
1376 node_id = self.x3dNode.getAttributeNode('DEF')
1377 if node_id:
1378 return node_id.value
1379 node_id = self.x3dNode.getAttributeNode('USE')
1380 if node_id:
1381 return "USE_" + node_id.value
1382 return None
1384 # Other funcs operate from vrml, but this means we can wrap XML fields, still use nice utility funcs
1385 # getFieldAsArray getFieldAsBool etc
1386 def getFieldName(self, field, ancestry, AS_CHILD=False, SPLIT_COMMAS=False):
1387 # ancestry and AS_CHILD are ignored, only used for VRML now
1389 self_real = self.getRealNode() # in case we're an instance
1390 field_xml = self.x3dNode.getAttributeNode(field)
1391 if field_xml:
1392 value = field_xml.value
1394 # We may want to edit. for x3d specific stuff
1395 # Sucks a bit to return the field name in the list but vrml excepts this :/
1396 if SPLIT_COMMAS:
1397 value = value.replace(",", " ")
1398 return value.split()
1399 else:
1400 return None
1402 def canHaveReferences(self):
1403 return self.x3dNode.getAttributeNode('DEF')
1405 def desc(self):
1406 return self.getRealNode().x3dNode.toxml()
1409 def x3d_parse(path):
1411 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1412 Return root (x3dNode, '') or (None, 'Error String')
1414 import xml.dom.minidom
1415 import xml.sax
1418 try: doc = xml.dom.minidom.parse(path)
1419 except: return None, 'Could not parse this X3D file, XML error'
1422 # Could add a try/except here, but a console error is more useful.
1423 data = gzipOpen(path)
1425 if data is None:
1426 return None, 'Failed to open file: ' + path
1428 # Enable line number reporting in the parser - kinda brittle
1429 def set_content_handler(dom_handler):
1430 def startElementNS(name, tagName, attrs):
1431 orig_start_cb(name, tagName, attrs)
1432 cur_elem = dom_handler.elementStack[-1]
1433 cur_elem.parse_position = (parser._parser.CurrentLineNumber, parser._parser.CurrentColumnNumber)
1435 orig_start_cb = dom_handler.startElementNS
1436 dom_handler.startElementNS = startElementNS
1437 orig_set_content_handler(dom_handler)
1439 parser = xml.sax.make_parser()
1440 orig_set_content_handler = parser.setContentHandler
1441 parser.setContentHandler = set_content_handler
1443 doc = xml.dom.minidom.parseString(data, parser)
1445 try:
1446 x3dnode = doc.getElementsByTagName('X3D')[0]
1447 except:
1448 return None, 'Not a valid x3d document, cannot import'
1450 bpy.ops.object.select_all(action='DESELECT')
1452 root = x3dNode(None, NODE_NORMAL, x3dnode)
1453 root.setRoot(path) # so images and Inline's we load have a relative path
1454 root.parse()
1456 return root, ''
1458 ## f = open('/_Cylinder.wrl', 'r')
1459 # f = open('/fe/wrl/Vrml/EGS/TOUCHSN.WRL', 'r')
1460 # vrml_parse('/fe/wrl/Vrml/EGS/TOUCHSN.WRL')
1461 #vrml_parse('/fe/wrl/Vrml/EGS/SCRIPT.WRL')
1463 import os
1464 files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
1465 files.sort()
1466 tot = len(files)
1467 for i, f in enumerate(files):
1468 #if i < 801:
1469 # continue
1471 f = f.strip()
1472 print(f, i, tot)
1473 vrml_parse(f)
1476 # NO BLENDER CODE ABOVE THIS LINE.
1477 # -----------------------------------------------------------------------------------
1478 import bpy
1479 from bpy_extras import image_utils
1480 from mathutils import Vector, Matrix, Quaternion
1482 GLOBALS = {'CIRCLE_DETAIL': 16}
1485 def translateRotation(rot):
1486 """ axis, angle """
1487 return Matrix.Rotation(rot[3], 4, Vector(rot[:3]))
1490 def translateScale(sca):
1491 mat = Matrix() # 4x4 default
1492 mat[0][0] = sca[0]
1493 mat[1][1] = sca[1]
1494 mat[2][2] = sca[2]
1495 return mat
1498 def translateTransform(node, ancestry):
1499 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0, 0.0)
1500 rot = node.getFieldAsFloatTuple('rotation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1501 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0, 1.0)
1502 scaori = node.getFieldAsFloatTuple('scaleOrientation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
1503 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0, 0.0)
1505 if cent:
1506 cent_mat = Matrix.Translation(cent)
1507 cent_imat = cent_mat.inverted()
1508 else:
1509 cent_mat = cent_imat = None
1511 if rot:
1512 rot_mat = translateRotation(rot)
1513 else:
1514 rot_mat = None
1516 if sca:
1517 sca_mat = translateScale(sca)
1518 else:
1519 sca_mat = None
1521 if scaori:
1522 scaori_mat = translateRotation(scaori)
1523 scaori_imat = scaori_mat.inverted()
1524 else:
1525 scaori_mat = scaori_imat = None
1527 if tx:
1528 tx_mat = Matrix.Translation(tx)
1529 else:
1530 tx_mat = None
1532 new_mat = Matrix()
1534 mats = [tx_mat, cent_mat, rot_mat, scaori_mat, sca_mat, scaori_imat, cent_imat]
1535 for mtx in mats:
1536 if mtx:
1537 new_mat = new_mat * mtx
1539 return new_mat
1542 def translateTexTransform(node, ancestry):
1543 cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0)
1544 rot = node.getFieldAsFloat('rotation', None, ancestry) # 0.0
1545 sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0)
1546 tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0)
1548 if cent:
1549 # cent is at a corner by default
1550 cent_mat = Matrix.Translation(Vector(cent).to_3d())
1551 cent_imat = cent_mat.inverted()
1552 else:
1553 cent_mat = cent_imat = None
1555 if rot:
1556 rot_mat = Matrix.Rotation(rot, 4, 'Z') # translateRotation(rot)
1557 else:
1558 rot_mat = None
1560 if sca:
1561 sca_mat = translateScale((sca[0], sca[1], 0.0))
1562 else:
1563 sca_mat = None
1565 if tx:
1566 tx_mat = Matrix.Translation(Vector(tx).to_3d())
1567 else:
1568 tx_mat = None
1570 new_mat = Matrix()
1572 # as specified in VRML97 docs
1573 mats = [cent_imat, sca_mat, rot_mat, cent_mat, tx_mat]
1575 for mtx in mats:
1576 if mtx:
1577 new_mat = new_mat * mtx
1579 return new_mat
1581 def getFinalMatrix(node, mtx, ancestry, global_matrix):
1583 transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform']
1584 if node.getSpec() == 'Transform':
1585 transform_nodes.append(node)
1586 transform_nodes.reverse()
1588 if mtx is None:
1589 mtx = Matrix()
1591 for node_tx in transform_nodes:
1592 mat = translateTransform(node_tx, ancestry)
1593 mtx = mat * mtx
1595 # worldspace matrix
1596 mtx = global_matrix * mtx
1598 return mtx
1601 # -----------------------------------------------------------------------------------
1602 # Mesh import utilities
1604 # Assumes that the mesh has tessfaces - doesn't support polygons.
1605 # Also assumes that tessfaces are all triangles.
1606 # Assumes that the sequence of the mesh vertices array matches
1607 # the source file. For indexed meshes, that's almost a given;
1608 # for nonindexed ones, this is a consideration.
1611 def importMesh_ApplyColors(bpymesh, geom, ancestry):
1612 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1613 if colors:
1614 if colors.getSpec() == 'ColorRGBA':
1615 # Array of arrays; no need to flatten
1616 rgb = [c[:3] for c
1617 in colors.getFieldAsArray('color', 4, ancestry)]
1618 else:
1619 rgb = colors.getFieldAsArray('color', 3, ancestry)
1620 tc = bpymesh.tessface_vertex_colors.new()
1621 tc.data.foreach_set("color1", [i for face
1622 in bpymesh.tessfaces
1623 for i in rgb[face.vertices[0]]])
1624 tc.data.foreach_set("color2", [i for face
1625 in bpymesh.tessfaces
1626 for i in rgb[face.vertices[1]]])
1627 tc.data.foreach_set("color3", [i for face
1628 in bpymesh.tessfaces
1629 for i in rgb[face.vertices[2]]])
1632 # Assumes that the vertices have not been rearranged compared to the
1633 # source file order # or in the order assumed by the spec (e. g. in
1634 # Elevation, in rows by x).
1635 # Assumes tessfaces have been set, doesn't support polygons.
1636 def importMesh_ApplyNormals(bpymesh, geom, ancestry):
1637 normals = geom.getChildBySpec('Normal')
1638 if not normals:
1639 return
1641 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1642 vectors = normals.getFieldAsArray('vector', 0, ancestry)
1643 if per_vertex:
1644 bpymesh.vertices.foreach_set("normal", vectors)
1645 else:
1646 bpymesh.tessfaces.foreach_set("normal", vectors)
1649 # Reads the standard Coordinate object - common for all mesh elements
1650 # Feeds the vertices in the mesh.
1651 # Rearranging the vertex order is a bad idea - other elements
1652 # in X3D might rely on it, if you need to rearrange, please play with
1653 # vertex indices in the tessfaces/polygons instead.
1655 # Vertex culling that we have in IndexedFaceSet is an unfortunate exception,
1656 # brought forth by a very specific issue.
1657 def importMesh_ReadVertices(bpymesh, geom, ancestry):
1658 # We want points here as a flat array, but the caching logic in
1659 # IndexedFaceSet presumes a 2D one.
1660 # The case for caching is stronger over there.
1661 coord = geom.getChildBySpec('Coordinate')
1662 points = coord.getFieldAsArray('point', 0, ancestry)
1663 bpymesh.vertices.add(len(points) // 3)
1664 bpymesh.vertices.foreach_set("co", points)
1667 # Assumes the mesh only contains triangular tessfaces, and the order
1668 # of vertices matches the source file.
1669 # Relies upon texture coordinates in the X3D node; if a coordinate generation
1670 # algorithm for a geometry is in the spec (e. g. for ElevationGrid), it needs
1671 # to be implemeted by the geometry handler.
1673 # Texture transform is applied in ProcessObject.
1674 def importMesh_ApplyTextureToTessfaces(bpymesh, geom, ancestry, bpyima):
1675 if not bpyima:
1676 return
1678 tex_coord = geom.getChildBySpec('TextureCoordinate')
1679 if not tex_coord:
1680 return
1682 coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
1683 if not coord_points:
1684 return
1686 d = bpymesh.tessface_uv_textures.new().data
1687 for face in d: # No foreach_set for nonscalars
1688 face.image = bpyima
1689 uv = [i for face in bpymesh.tessfaces
1690 for vno in range(3) for i in coord_points[face.vertices[vno]]]
1691 d.foreach_set('uv', uv)
1694 # Common steps for all triangle meshes once the geometry has been set:
1695 # normals, vertex colors, and texture.
1696 def importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima):
1697 importMesh_ApplyNormals(bpymesh, geom, ancestry)
1698 importMesh_ApplyColors(bpymesh, geom, ancestry)
1699 importMesh_ApplyTextureToTessfaces(bpymesh, geom, ancestry, bpyima)
1700 bpymesh.validate()
1701 bpymesh.update()
1702 return bpymesh
1705 # Assumes that the mesh is stored as polygons and loops, and the premade array
1706 # of texture coordinates follows the loop array.
1707 # The loops array must be flat.
1708 def importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops):
1709 d = bpymesh.uv_textures.new().data
1710 for f in d:
1711 f.image = bpyima
1712 bpymesh.uv_layers[0].data.foreach_set('uv', loops)
1715 def flip(r, ccw):
1716 return r if ccw else r[::-1]
1718 # -----------------------------------------------------------------------------------
1719 # Now specific geometry importers
1722 def importMesh_IndexedTriangleSet(geom, ancestry, bpyima):
1723 # Ignoring solid
1724 # colorPerVertex is always true
1725 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1727 bpymesh = bpy.data.meshes.new(name="XXX")
1728 importMesh_ReadVertices(bpymesh, geom, ancestry)
1730 # Read the faces
1731 index = geom.getFieldAsArray('index', 0, ancestry)
1732 n = len(index) // 3
1733 if not ccw:
1734 index = [index[3 * i + j] for i in range(n) for j in (1, 0, 2)]
1735 bpymesh.tessfaces.add(n)
1736 bpymesh.tessfaces.foreach_set("vertices", index)
1738 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
1741 def importMesh_IndexedTriangleStripSet(geom, ancestry, bpyima):
1742 # Ignoring solid
1743 # colorPerVertex is always true
1744 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1745 bpymesh = bpy.data.meshes.new(name="IndexedTriangleStripSet")
1746 importMesh_ReadVertices(bpymesh, geom, ancestry)
1748 # Read the faces
1749 index = geom.getFieldAsArray('index', 0, ancestry)
1750 while index[-1] == -1:
1751 del index[-1]
1752 ngaps = sum(1 for i in index if i == -1)
1753 bpymesh.tessfaces.add(len(index) - 2 - 3 * ngaps)
1755 def triangles():
1756 i = 0
1757 odd = cw
1758 while True:
1759 yield index[i + odd]
1760 yield index[i + 1 - odd]
1761 yield index[i + 2]
1762 odd = 1 - odd
1763 i += 1
1764 if i + 2 >= len(index):
1765 return
1766 if index[i + 2] == -1:
1767 i += 3
1768 odd = cw
1769 bpymesh.tessfaces.foreach_set("vertices", [f for f in triangles()])
1770 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
1773 def importMesh_IndexedTriangleFanSet(geom, ancestry, bpyima):
1774 # Ignoring solid
1775 # colorPerVertex is always true
1776 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1777 bpymesh = bpy.data.meshes.new(name="IndexedTriangleFanSet")
1778 importMesh_ReadVertices(bpymesh, geom, ancestry)
1780 # Read the faces
1781 index = geom.getFieldAsArray('index', 0, ancestry)
1782 while index[-1] == -1:
1783 del index[-1]
1784 ngaps = sum(1 for i in index if i == -1)
1785 bpymesh.tessfaces.add(len(index) - 2 - 3 * ngaps)
1787 def triangles():
1788 i = 0
1789 j = 1
1790 while True:
1791 yield index[i]
1792 yield index[i + j + cw]
1793 yield index[i + j + 1 - cw]
1794 j += 1
1795 if i + j + 1 >= len(index):
1796 return
1797 if index[i + j + 1] == -1:
1798 i = j + 2
1799 j = 1
1800 bpymesh.tessfaces.foreach_set("vertices", [f for f in triangles()])
1801 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
1804 def importMesh_TriangleSet(geom, ancestry, bpyima):
1805 # Ignoring solid
1806 # colorPerVertex is always true
1807 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1808 bpymesh = bpy.data.meshes.new(name="TriangleSet")
1809 importMesh_ReadVertices(bpymesh, geom, ancestry)
1810 n = len(bpymesh.vertices)
1811 bpymesh.tessfaces.add(n // 3)
1812 if ccw:
1813 fv = [i for i in range(n)]
1814 else:
1815 fv = [3 * i + j for i in range(n // 3) for j in (1, 0, 2)]
1816 bpymesh.tessfaces.foreach_set("vertices", fv)
1818 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
1821 def importMesh_TriangleStripSet(geom, ancestry, bpyima):
1822 # Ignoring solid
1823 # colorPerVertex is always true
1824 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1825 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1826 importMesh_ReadVertices(bpymesh, geom, ancestry)
1827 counts = geom.getFieldAsArray('stripCount', 0, ancestry)
1828 bpymesh.tessfaces.add(sum([n - 2 for n in counts]))
1830 def triangles():
1831 b = 0
1832 for i in range(0, len(counts)):
1833 for j in range(0, counts[i] - 2):
1834 yield b + j + (j + cw) % 2
1835 yield b + j + 1 - (j + cw) % 2
1836 yield b + j + 2
1837 b += counts[i]
1838 bpymesh.tessfaces.foreach_set("vertices", [x for x in triangles()])
1840 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
1843 def importMesh_TriangleFanSet(geom, ancestry, bpyima):
1844 # Ignoring solid
1845 # colorPerVertex is always true
1846 cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
1847 bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
1848 importMesh_ReadVertices(bpymesh, geom, ancestry)
1849 counts = geom.getFieldAsArray('fanCount', 0, ancestry)
1850 bpymesh.tessfaces.add(sum([n - 2 for n in counts]))
1852 def triangles():
1853 b = 0
1854 for i in range(0, len(counts)):
1855 for j in range(1, counts[i] - 1):
1856 yield b
1857 yield b + j + cw
1858 yield b + j + 1 - cw
1859 b += counts[i]
1860 bpymesh.tessfaces.foreach_set("vertices", [x for x in triangles()])
1861 return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
1864 def importMesh_IndexedFaceSet(geom, ancestry, bpyima):
1865 # Saw the following structure in X3Ds: the first mesh has a huge set
1866 # of vertices and a reasonably sized index. The rest of the meshes
1867 # reference the Coordinate node from the first one, and have their
1868 # own reasonably sized indices.
1870 # In Blender, to the best of my knowledge, there's no way to reuse
1871 # the vertex set between meshes. So we have culling logic instead -
1872 # for each mesh, only leave vertices that are used for faces.
1874 ccw = geom.getFieldAsBool('ccw', True, ancestry)
1875 coord = geom.getChildBySpec('Coordinate')
1876 if coord.reference:
1877 points = coord.getRealNode().parsed
1878 # We need unflattened coord array here, while
1879 # importMesh_ReadVertices uses flattened. Can't cache both :(
1880 # TODO: resolve that somehow, so that vertex set can be effectively
1881 # reused between different mesh types?
1882 else:
1883 points = coord.getFieldAsArray('point', 3, ancestry)
1884 if coord.canHaveReferences():
1885 coord.parsed = points
1886 index = geom.getFieldAsArray('coordIndex', 0, ancestry)
1888 while index and index[-1] == -1:
1889 del index[-1]
1891 if len(points) >= 2 * len(index): # Need to cull
1892 culled_points = []
1893 cull = {} # Maps old vertex indices to new ones
1894 uncull = [] # Maps new indices to the old ones
1895 new_index = 0
1896 else:
1897 uncull = cull = None
1899 faces = []
1900 face = []
1901 # Generate faces. Cull the vertices if necessary,
1902 for i in index:
1903 if i == -1:
1904 if face:
1905 faces.append(flip(face, ccw))
1906 face = []
1907 else:
1908 if cull is not None:
1909 if not(i in cull):
1910 culled_points.append(points[i])
1911 cull[i] = new_index
1912 uncull.append(i)
1913 i = new_index
1914 new_index += 1
1915 else:
1916 i = cull[i]
1917 face.append(i)
1918 if face:
1919 faces.append(flip(face, ccw)) # The last face
1921 if cull:
1922 points = culled_points
1924 bpymesh = bpy.data.meshes.new(name="IndexedFaceSet")
1925 bpymesh.from_pydata(points, [], faces)
1926 # No validation here. It throws off the per-face stuff.
1928 # Similar treatment for normal and color indices
1930 def processPerVertexIndex(ind):
1931 if ind:
1932 # Deflatten into an array of arrays by face; the latter might
1933 # need to be flipped
1934 i = 0
1935 verts_by_face = []
1936 for f in faces:
1937 verts_by_face.append(flip(ind[i:i + len(f)], ccw))
1938 i += len(f) + 1
1939 return verts_by_face
1940 elif uncull:
1941 return [[uncull[v] for v in f] for f in faces]
1942 else:
1943 return faces # Reuse coordIndex, as per the spec
1945 # Normals
1946 normals = geom.getChildBySpec('Normal')
1947 if normals:
1948 per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
1949 vectors = normals.getFieldAsArray('vector', 3, ancestry)
1950 normal_index = geom.getFieldAsArray('normalIndex', 0, ancestry)
1951 if per_vertex:
1952 co = [co for f in processPerVertexIndex(normal_index)
1953 for v in f for co in vectors[v]]
1954 bpymesh.vertices.foreach_set("normal", co)
1955 else:
1956 co = [co for (i, f) in enumerate(faces) for j in f
1957 for co in vectors[normal_index[i] if normal_index else i]]
1958 bpymesh.polygons.foreach_set("normal", co)
1960 # Apply vertex/face colors
1961 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
1962 if colors:
1963 if colors.getSpec() == 'ColorRGBA':
1964 # Array of arrays; no need to flatten
1965 rgb = [c[:3] for c
1966 in colors.getFieldAsArray('color', 4, ancestry)]
1967 else:
1968 rgb = colors.getFieldAsArray('color', 3, ancestry)
1970 color_per_vertex = geom.getFieldAsBool('colorPerVertex',
1971 True, ancestry)
1972 color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
1974 d = bpymesh.vertex_colors.new().data
1975 if color_per_vertex:
1976 cco = [cco for f in processPerVertexIndex(color_index)
1977 for v in f for cco in rgb[v]]
1978 elif color_index: # Color per face with index
1979 cco = [cco for (i, f) in enumerate(faces) for j in f
1980 for cco in rgb[color_index[i]]]
1981 else: # Color per face without index
1982 cco = [cco for (i, f) in enumerate(faces) for j in f
1983 for cco in rgb[i]]
1984 d.foreach_set('color', cco)
1986 # Texture
1987 if bpyima:
1988 tex_coord = geom.getChildBySpec('TextureCoordinate')
1989 if tex_coord:
1990 tex_coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
1991 tex_index = geom.getFieldAsArray('texCoordIndex', 0, ancestry)
1992 tex_index = processPerVertexIndex(tex_index)
1993 loops = [co for f in tex_index
1994 for v in f for co in tex_coord_points[v]]
1995 else:
1996 x_min = x_max = y_min = y_max = z_min = z_max = None
1997 for f in faces:
1998 # Unused vertices don't participate in size; X3DOM does so
1999 for v in f:
2000 (x, y, z) = points[v]
2001 if x_min is None or x < x_min:
2002 x_min = x
2003 if x_max is None or x > x_max:
2004 x_max = x
2005 if y_min is None or y < y_min:
2006 y_min = y
2007 if y_max is None or y > y_max:
2008 y_max = y
2009 if z_min is None or z < z_min:
2010 z_min = z
2011 if z_max is None or z > z_max:
2012 z_max = z
2014 mins = (x_min, y_min, z_min)
2015 deltas = (x_max - x_min, y_max - y_min, z_max - z_min)
2016 axes = [0, 1, 2]
2017 axes.sort(key=lambda a: (-deltas[a], a))
2018 # Tuple comparison breaks ties
2019 (s_axis, t_axis) = axes[0:2]
2020 s_min = mins[s_axis]
2021 ds = deltas[s_axis]
2022 t_min = mins[t_axis]
2023 dt = deltas[t_axis]
2025 def generatePointCoords(pt):
2026 return (pt[s_axis] - s_min) / ds, (pt[t_axis] - t_min) / dt
2027 loops = [co for f in faces for v in f
2028 for co in generatePointCoords(points[v])]
2030 importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops)
2032 bpymesh.validate()
2033 bpymesh.update()
2034 return bpymesh
2037 def importMesh_ElevationGrid(geom, ancestry, bpyima):
2038 height = geom.getFieldAsArray('height', 0, ancestry)
2039 x_dim = geom.getFieldAsInt('xDimension', 0, ancestry)
2040 x_spacing = geom.getFieldAsFloat('xSpacing', 1, ancestry)
2041 z_dim = geom.getFieldAsInt('zDimension', 0, ancestry)
2042 z_spacing = geom.getFieldAsFloat('zSpacing', 1, ancestry)
2043 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2045 # The spec assumes a certain ordering of quads; outer loop by z, inner by x
2046 bpymesh = bpy.data.meshes.new(name="ElevationGrid")
2047 bpymesh.vertices.add(x_dim * z_dim)
2048 co = [w for x in range(x_dim) for z in range(z_dim)
2049 for w in (x * x_spacing, height[x_dim * z + x], z * z_spacing)]
2050 bpymesh.vertices.foreach_set("co", co)
2052 bpymesh.tessfaces.add((x_dim - 1) * (z_dim - 1))
2053 # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
2054 # For quad tessfaces, it's important that the final vertex index is not 0
2055 # (Blender treats it as a triangle then).
2056 # So simply reversing the face is not an option.
2057 verts = [i for x in range(x_dim - 1) for z in range(z_dim - 1)
2058 for i in (z * x_dim + x,
2059 z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
2060 (z + 1) * x_dim + x + 1,
2061 (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)]
2062 bpymesh.tessfaces.foreach_set("vertices_raw", verts)
2064 importMesh_ApplyNormals(bpymesh, geom, ancestry)
2065 # ApplyColors won't work here; faces are quads, and also per-face
2066 # coloring should be supported
2067 colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
2068 if colors:
2069 if colors.getSpec() == 'ColorRGBA':
2070 rgb = [c[:3] for c
2071 in colors.getFieldAsArray('color', 4, ancestry)]
2072 # Array of arrays; no need to flatten
2073 else:
2074 rgb = colors.getFieldAsArray('color', 3, ancestry)
2076 tc = bpymesh.tessface_vertex_colors.new()
2077 tcd = tc.data
2078 if geom.getFieldAsBool('colorPerVertex', True, ancestry):
2079 # Per-vertex coloring
2080 # Note the 2/4 flip here
2081 tcd.foreach_set("color1", [c for x in range(x_dim - 1)
2082 for z in range(z_dim - 1)
2083 for c in rgb[z * x_dim + x]])
2084 tcd.foreach_set("color2" if ccw else "color4",
2085 [c for x in range(x_dim - 1)
2086 for z in range(z_dim - 1)
2087 for c in rgb[z * x_dim + x + 1]])
2088 tcd.foreach_set("color3", [c for x in range(x_dim - 1)
2089 for z in range(z_dim - 1)
2090 for c in rgb[(z + 1) * x_dim + x + 1]])
2091 tcd.foreach_set("color4" if ccw else "color2",
2092 [c for x in range(x_dim - 1)
2093 for z in range(z_dim - 1)
2094 for c in rgb[(z + 1) * x_dim + x]])
2095 else: # Coloring per face
2096 colors = [c for x in range(x_dim - 1)
2097 for z in range(z_dim - 1) for c in rgb[z * (x_dim - 1) + x]]
2098 tcd.foreach_set("color1", colors)
2099 tcd.foreach_set("color2", colors)
2100 tcd.foreach_set("color3", colors)
2101 tcd.foreach_set("color4", colors)
2103 # Textures also need special treatment; it's all quads,
2104 # and there's a builtin algorithm for coordinate generation
2105 if bpyima:
2106 tex_coord = geom.getChildBySpec('TextureCoordinate')
2107 if tex_coord:
2108 coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
2109 else:
2110 coord_points = [(i / (x_dim - 1), j / (z_dim - 1))
2111 for i in range(x_dim)
2112 for j in range(z_dim)]
2114 d = bpymesh.tessface_uv_textures.new().data
2115 for face in d: # No foreach_set for nonscalars
2116 face.image = bpyima
2117 # Rather than repeat the face/vertex algorithm from above, we read
2118 # the vertex index back from tessfaces. Might be suboptimal.
2119 uv = [i for face in bpymesh.tessfaces
2120 for vno in range(4)
2121 for i in coord_points[face.vertices[vno]]]
2122 d.foreach_set('uv_raw', uv)
2124 bpymesh.validate()
2125 bpymesh.update()
2126 return bpymesh
2129 def importMesh_Extrusion(geom, ancestry, bpyima):
2130 # Interestingly, the spec doesn't allow for vertex/face colors in this
2131 # element, nor for normals.
2132 # Since coloring and normals are not supported here, and also large
2133 # polygons for caps might be required, we shall use from_pydata().
2135 ccw = geom.getFieldAsBool('ccw', True, ancestry)
2136 begin_cap = geom.getFieldAsBool('beginCap', True, ancestry)
2137 end_cap = geom.getFieldAsBool('endCap', True, ancestry)
2138 cross = geom.getFieldAsArray('crossSection', 2, ancestry)
2139 if not cross:
2140 cross = ((1, 1), (1, -1), (-1, -1), (-1, 1), (1, 1))
2141 spine = geom.getFieldAsArray('spine', 3, ancestry)
2142 if not spine:
2143 spine = ((0, 0, 0), (0, 1, 0))
2144 orient = geom.getFieldAsArray('orientation', 4, ancestry)
2145 if orient:
2146 orient = [Quaternion(o[:3], o[3]).to_matrix()
2147 if o[3] else None for o in orient]
2148 scale = geom.getFieldAsArray('scale', 2, ancestry)
2149 if scale:
2150 scale = [Matrix(((s[0], 0, 0), (0, 1, 0), (0, 0, s[1])))
2151 if s[0] != 1 or s[1] != 1 else None for s in scale]
2153 # Special treatment for the closed spine and cross section.
2154 # Let's save some memory by not creating identical but distinct vertices;
2155 # later we'll introduce conditional logic to link the last vertex with
2156 # the first one where necessary.
2157 cross_closed = cross[0] == cross[-1]
2158 if cross_closed:
2159 cross = cross[:-1]
2160 nc = len(cross)
2161 cross = [Vector((c[0], 0, c[1])) for c in cross]
2162 ncf = nc if cross_closed else nc - 1
2163 # Face count along the cross; for closed cross, it's the same as the
2164 # respective vertex count
2166 spine_closed = spine[0] == spine[-1]
2167 if spine_closed:
2168 spine = spine[:-1]
2169 ns = len(spine)
2170 spine = [Vector(s) for s in spine]
2171 nsf = ns if spine_closed else ns - 1
2173 # This will be used for fallback, where the current spine point joins
2174 # two collinear spine segments. No need to recheck the case of the
2175 # closed spine/last-to-first point juncture; if there's an angle there,
2176 # it would kick in on the first iteration of the main loop by spine.
2177 def findFirstAngleNormal():
2178 for i in range(1, ns - 1):
2179 spt = spine[i]
2180 z = (spine[i + 1] - spt).cross(spine[i - 1] - spt)
2181 if z.length > EPSILON:
2182 return z
2183 # All the spines are collinear. Fallback to the rotated source
2184 # XZ plane.
2185 # TODO: handle the situation where the first two spine points match
2186 v = spine[1] - spine[0]
2187 orig_y = Vector((0, 1, 0))
2188 orig_z = Vector((0, 0, 1))
2189 if v.cross(orig_y).length >= EPSILON:
2190 # Spine at angle with global y - rotate the z accordingly
2191 orig_z.rotate(orig_y.rotation_difference(v))
2192 return orig_z
2194 verts = []
2195 z = None
2196 for i, spt in enumerate(spine):
2197 if (i > 0 and i < ns - 1) or spine_closed:
2198 snext = spine[(i + 1) % ns]
2199 sprev = spine[(i - 1 + ns) % ns]
2200 y = snext - sprev
2201 vnext = snext - spt
2202 vprev = sprev - spt
2203 try_z = vnext.cross(vprev)
2204 # Might be zero, then all kinds of fallback
2205 if try_z.length > EPSILON:
2206 if z is not None and try_z.dot(z) < 0:
2207 try_z.negate()
2208 z = try_z
2209 elif not z: # No z, and no previous z.
2210 # Look ahead, see if there's at least one point where
2211 # spines are not collinear.
2212 z = findFirstAngleNormal()
2213 elif i == 0: # And non-crossed
2214 snext = spine[i + 1]
2215 y = snext - spt
2216 z = findFirstAngleNormal()
2217 else: # last point and not crossed
2218 sprev = spine[i - 1]
2219 y = spt - sprev
2220 # If there's more than one point in the spine, z is already set.
2221 # One point in the spline is an error anyway.
2223 x = y.cross(z)
2224 m = Matrix(((x.x, y.x, z.x), (x.y, y.y, z.y), (x.z, y.z, z.z)))
2225 # Columns are the unit vectors for the xz plane for the cross-section
2226 m.normalize()
2227 if orient:
2228 mrot = orient[i] if len(orient) > 1 else orient[0]
2229 if mrot:
2230 m *= mrot # Not sure about this. Counterexample???
2231 if scale:
2232 mscale = scale[i] if len(scale) > 1 else scale[0]
2233 if mscale:
2234 m *= mscale
2235 # First the cross-section 2-vector is scaled,
2236 # then applied to the xz plane unit vectors
2237 for cpt in cross:
2238 verts.append((spt + m * cpt).to_tuple())
2239 # Could've done this with a single 4x4 matrix... Oh well
2241 # The method from_pydata() treats correctly quads with final vertex
2242 # index being zero.
2243 # So we just flip the vertices if ccw is off.
2245 faces = []
2246 if begin_cap:
2247 faces.append(flip([x for x in range(nc - 1, -1, -1)], ccw))
2249 # Order of edges in the face: forward along cross, forward along spine,
2250 # backward along cross, backward along spine, flipped if now ccw.
2251 # This order is assumed later in the texture coordinate assignment;
2252 # please don't change without syncing.
2254 faces += [flip((
2255 s * nc + c,
2256 s * nc + (c + 1) % nc,
2257 (s + 1) * nc + (c + 1) % nc,
2258 (s + 1) * nc + c), ccw) for s in range(ns - 1) for c in range(ncf)]
2260 if spine_closed:
2261 # The faces between the last and the first spine poins
2262 b = (ns - 1) * nc
2263 faces += [flip((
2264 b + c,
2265 b + (c + 1) % nc,
2266 (c + 1) % nc,
2267 c), ccw) for c in range(ncf)]
2269 if end_cap:
2270 faces.append(flip([(ns - 1) * nc + x for x in range(0, nc)], ccw))
2272 bpymesh = bpy.data.meshes.new(name="Extrusion")
2273 bpymesh.from_pydata(verts, [], faces)
2275 # Polygons and loops here, not tessfaces. The way we deal with
2276 # textures in triangular meshes doesn't apply.
2277 if bpyima:
2278 # The structure of the loop array goes: cap, side, cap
2279 if begin_cap or end_cap: # Need dimensions
2280 x_min = x_max = z_min = z_max = None
2281 for c in cross:
2282 (x, z) = (c.x, c.z)
2283 if x_min is None or x < x_min:
2284 x_min = x
2285 if x_max is None or x > x_max:
2286 x_max = x
2287 if z_min is None or z < z_min:
2288 z_min = z
2289 if z_max is None or z > z_max:
2290 z_max = z
2291 dx = x_max - x_min
2292 dz = z_max - z_min
2293 cap_scale = dz if dz > dx else dx
2295 # Takes an index in the cross array, returns scaled
2296 # texture coords for cap texturing purposes
2297 def scaledLoopVertex(i):
2298 c = cross[i]
2299 return (c.x - x_min) / cap_scale, (c.z - z_min) / cap_scale
2301 # X3DOM uses raw cap shape, not a scaled one. So we will, too.
2303 loops = []
2304 mloops = bpymesh.loops
2305 if begin_cap: # vertex indices match the indices in cross
2306 # Rely on the loops in the mesh; don't repeat the face
2307 # generation logic here
2308 loops += [co for i in range(nc)
2309 for co in scaledLoopVertex(mloops[i].vertex_index)]
2311 # Sides
2312 # Same order of vertices as in face generation
2313 # We don't rely on the loops in the mesh; instead,
2314 # we repeat the face generation logic.
2315 loops += [co for s in range(nsf)
2316 for c in range(ncf)
2317 for v in flip(((c / ncf, s / nsf),
2318 ((c + 1) / ncf, s / nsf),
2319 ((c + 1) / ncf, (s + 1) / nsf),
2320 (c / ncf, (s + 1) / nsf)), ccw) for co in v]
2322 if end_cap:
2323 # Base loop index for end cap
2324 lb = ncf * nsf * 4 + (nc if begin_cap else 0)
2325 # Rely on the loops here too.
2326 loops += [co for i in range(nc) for co
2327 in scaledLoopVertex(mloops[lb + i].vertex_index % nc)]
2328 importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops)
2330 bpymesh.validate(True)
2331 bpymesh.update()
2332 return bpymesh
2335 # -----------------------------------------------------------------------------------
2336 # Line and point sets
2339 def importMesh_LineSet(geom, ancestry, bpyima):
2340 # TODO: line display properties are ignored
2341 # Per-vertex color is ignored
2342 coord = geom.getChildBySpec('Coordinate')
2343 src_points = coord.getFieldAsArray('point', 3, ancestry)
2344 # Array of 3; Blender needs arrays of 4
2345 bpycurve = bpy.data.curves.new("LineSet", 'CURVE')
2346 bpycurve.dimensions = '3D'
2347 counts = geom.getFieldAsArray('vertexCount', 0, ancestry)
2348 b = 0
2349 for n in counts:
2350 sp = bpycurve.splines.new('POLY')
2351 sp.points.add(n - 1) # points already has one element
2353 def points():
2354 for x in src_points[b:b + n]:
2355 yield x[0]
2356 yield x[1]
2357 yield x[2]
2358 yield 0
2359 sp.points.foreach_set('co', [x for x in points()])
2360 b += n
2361 return bpycurve
2364 def importMesh_IndexedLineSet(geom, ancestry, _):
2365 # VRML not x3d
2366 # coord = geom.getChildByName('coord') # 'Coordinate'
2367 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2368 if coord:
2369 points = coord.getFieldAsArray('point', 3, ancestry)
2370 else:
2371 points = []
2373 if not points:
2374 print('\tWarning: IndexedLineSet had no points')
2375 return None
2377 ils_lines = geom.getFieldAsArray('coordIndex', 0, ancestry)
2379 lines = []
2380 line = []
2382 for il in ils_lines:
2383 if il == -1:
2384 lines.append(line)
2385 line = []
2386 else:
2387 line.append(int(il))
2388 lines.append(line)
2390 # vcolor = geom.getChildByName('color')
2391 # blender dosnt have per vertex color
2393 bpycurve = bpy.data.curves.new('IndexedCurve', 'CURVE')
2394 bpycurve.dimensions = '3D'
2396 for line in lines:
2397 if not line:
2398 continue
2399 # co = points[line[0]] # UNUSED
2400 nu = bpycurve.splines.new('POLY')
2401 nu.points.add(len(line) - 1) # the new nu has 1 point to begin with
2402 for il, pt in zip(line, nu.points):
2403 pt.co[0:3] = points[il]
2405 return bpycurve
2408 def importMesh_PointSet(geom, ancestry, _):
2409 # VRML not x3d
2410 coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
2411 if coord:
2412 points = coord.getFieldAsArray('point', 3, ancestry)
2413 else:
2414 points = []
2416 # vcolor = geom.getChildByName('color')
2417 # blender dosnt have per vertex color
2419 bpymesh = bpy.data.meshes.new("PointSet")
2420 bpymesh.vertices.add(len(points))
2421 bpymesh.vertices.foreach_set("co", [a for v in points for a in v])
2423 # No need to validate
2424 bpymesh.update()
2425 return bpymesh
2428 # -----------------------------------------------------------------------------------
2429 # Primitives
2430 # SA: they used to use bpy.ops for primitive creation. That was
2431 # unbelievably slow on complex scenes. I rewrote to generate meshes
2432 # by hand.
2435 GLOBALS['CIRCLE_DETAIL'] = 12
2438 def importMesh_Sphere(geom, ancestry, bpyima):
2439 # solid is ignored.
2440 # Extra field 'subdivision="n m"' attribute, specifying how many
2441 # rings and segments to use (X3DOM).
2442 r = geom.getFieldAsFloat('radius', 0.5, ancestry)
2443 subdiv = geom.getFieldAsArray('subdivision', 0, ancestry)
2444 if subdiv:
2445 if len(subdiv) == 1:
2446 nr = ns = subdiv[0]
2447 else:
2448 (nr, ns) = subdiv
2449 else:
2450 nr = ns = GLOBALS['CIRCLE_DETAIL']
2451 # used as both ring count and segment count
2452 lau = pi / nr # Unit angle of latitude (rings) for the given tesselation
2453 lou = 2 * pi / ns # Unit angle of longitude (segments)
2455 bpymesh = bpy.data.meshes.new(name="Sphere")
2457 bpymesh.vertices.add(ns * (nr - 1) + 2)
2458 # The non-polar vertices go from x=0, negative z plane counterclockwise -
2459 # to -x, to +z, to +x, back to -z
2460 co = [0, r, 0, 0, -r, 0] # +y and -y poles
2461 co += [r * coe for ring in range(1, nr) for seg in range(ns)
2462 for coe in (-sin(lou * seg) * sin(lau * ring),
2463 cos(lau * ring),
2464 -cos(lou * seg) * sin(lau * ring))]
2465 bpymesh.vertices.foreach_set('co', co)
2467 tf = bpymesh.tessfaces
2468 tf.add(ns * nr)
2469 vb = 2 + (nr - 2) * ns # First vertex index for the bottom cap
2470 fb = (nr - 1) * ns # First face index for the bottom cap
2472 # Because of tricky structure, assign texture coordinates along with
2473 # face creation. Can't easily do foreach_set, 'cause caps are triangles and
2474 # sides are quads.
2476 if bpyima:
2477 tex = bpymesh.tessface_uv_textures.new().data
2478 for face in tex: # No foreach_set for nonscalars
2479 face.image = bpyima
2481 # Faces go in order: top cap, sides, bottom cap.
2482 # Sides go by ring then by segment.
2484 # Caps
2485 # Top cap face vertices go in order: down right up
2486 # (starting from +y pole)
2487 # Bottom cap goes: up left down (starting from -y pole)
2488 for seg in range(ns):
2489 tf[seg].vertices = (0, seg + 2, (seg + 1) % ns + 2)
2490 tf[fb + seg].vertices = (1, vb + (seg + 1) % ns, vb + seg)
2491 if bpyima:
2492 tex[seg].uv = (((seg + 0.5) / ns, 1),
2493 (seg / ns, 1 - 1 / nr),
2494 ((seg + 1) / ns, 1 - 1 / nr))
2495 tex[fb + seg].uv = (((seg + 0.5) / ns, 0),
2496 ((seg + 1) / ns, 1 / nr),
2497 (seg / ns, 1 / nr))
2499 # Sides
2500 # Side face vertices go in order: down right up left
2501 for ring in range(nr - 2):
2502 tvb = 2 + ring * ns
2503 # First vertex index for the top edge of the ring
2504 bvb = tvb + ns
2505 # First vertex index for the bottom edge of the ring
2506 rfb = ns * (ring + 1)
2507 # First face index for the ring
2508 for seg in range(ns):
2509 nseg = (seg + 1) % ns
2510 tf[rfb + seg].vertices_raw = (tvb + seg, bvb + seg, bvb + nseg, tvb + nseg)
2511 if bpyima:
2512 tex[rfb + seg].uv_raw = (seg / ns, 1 - (ring + 1) / nr,
2513 seg / ns, 1 - (ring + 2) / nr,
2514 (seg + 1) / ns, 1 - (ring + 2) / nr,
2515 (seg + 1) / ns, 1 - (ring + 1) / nr)
2517 bpymesh.validate(False)
2518 bpymesh.update()
2519 return bpymesh
2522 def importMesh_Cylinder(geom, ancestry, bpyima):
2523 # solid is ignored
2524 # no ccw in this element
2525 # Extra parameter subdivision="n" - how many faces to use
2526 radius = geom.getFieldAsFloat('radius', 1.0, ancestry)
2527 height = geom.getFieldAsFloat('height', 2, ancestry)
2528 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2529 side = geom.getFieldAsBool('side', True, ancestry)
2530 top = geom.getFieldAsBool('top', True, ancestry)
2532 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2534 nn = n * 2
2535 yvalues = (height / 2, -height / 2)
2536 angle = 2 * pi / n
2538 # The seam is at x=0, z=-r, vertices go ccw -
2539 # to pos x, to neg z, to neg x, back to neg z
2540 verts = [(-radius * sin(angle * i), y, -radius * cos(angle * i))
2541 for i in range(n) for y in yvalues]
2542 faces = []
2543 if side:
2544 # Order of edges in side faces: up, left, down, right.
2545 # Texture coordinate logic depends on it.
2546 faces += [(i * 2 + 3, i * 2 + 2, i * 2, i * 2 + 1)
2547 for i in range(n - 1)] + [(1, 0, nn - 2, nn - 1)]
2548 if top:
2549 faces += [[x for x in range(0, nn, 2)]]
2550 if bottom:
2551 faces += [[x for x in range(nn - 1, -1, -2)]]
2553 bpymesh = bpy.data.meshes.new(name="Cylinder")
2554 bpymesh.from_pydata(verts, [], faces)
2555 # Tried constructing the mesh manually from polygons/loops/edges,
2556 # the difference in performance on Blender 2.74 (Win64) is negligible.
2558 bpymesh.validate(False)
2560 # Polygons here, not tessfaces
2561 # The structure of the loop array goes: cap, side, cap.
2562 if bpyima:
2563 loops = []
2564 if side:
2565 loops += [co for i in range(n)
2566 for co in ((i + 1) / n, 0, (i + 1) / n, 1, i / n, 1, i / n, 0)]
2568 if top:
2569 loops += [0.5 + co / 2 for i in range(n)
2570 for co in (-sin(angle * i), cos(angle * i))]
2572 if bottom:
2573 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2574 for co in (sin(angle * i), cos(angle * i))]
2576 importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops)
2578 bpymesh.update()
2579 return bpymesh
2582 def importMesh_Cone(geom, ancestry, bpyima):
2583 # Solid ignored
2584 # Extra parameter subdivision="n" - how many faces to use
2585 n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
2586 radius = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry)
2587 height = geom.getFieldAsFloat('height', 2, ancestry)
2588 bottom = geom.getFieldAsBool('bottom', True, ancestry)
2589 side = geom.getFieldAsBool('side', True, ancestry)
2591 d = height / 2
2592 angle = 2 * pi / n
2594 verts = [(0, d, 0)]
2595 verts += [(-radius * sin(angle * i),
2597 -radius * cos(angle * i)) for i in range(n)]
2598 faces = []
2600 # Side face vertices go: up down right
2601 if side:
2602 faces += [(1 + (i + 1) % n, 0, 1 + i) for i in range(n)]
2603 if bottom:
2604 faces += [[i for i in range(n, 0, -1)]]
2606 bpymesh = bpy.data.meshes.new(name="Cone")
2607 bpymesh.from_pydata(verts, [], faces)
2609 bpymesh.validate(False)
2610 if bpyima:
2611 loops = []
2612 if side:
2613 loops += [co for i in range(n)
2614 for co in ((i + 1) / n, 0, (i + 0.5) / n, 1, i / n, 0)]
2615 if bottom:
2616 loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
2617 for co in (sin(angle * i), cos(angle * i))]
2618 importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops)
2620 bpymesh.update()
2621 return bpymesh
2624 def importMesh_Box(geom, ancestry, bpyima):
2625 # Solid is ignored
2626 # No ccw in this element
2627 (dx, dy, dz) = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry)
2628 dx /= 2
2629 dy /= 2
2630 dz /= 2
2632 bpymesh = bpy.data.meshes.new(name="Box")
2633 bpymesh.vertices.add(8)
2635 # xz plane at +y, ccw
2636 co = (dx, dy, dz, -dx, dy, dz, -dx, dy, -dz, dx, dy, -dz,
2637 # xz plane at -y
2638 dx, -dy, dz, -dx, -dy, dz, -dx, -dy, -dz, dx, -dy, -dz)
2639 bpymesh.vertices.foreach_set('co', co)
2641 bpymesh.tessfaces.add(6)
2642 bpymesh.tessfaces.foreach_set('vertices_raw', (
2643 0, 1, 2, 3, # +y
2644 4, 0, 3, 7, # +x
2645 7, 3, 2, 6, # -z
2646 6, 2, 1, 5, # -x
2647 5, 1, 0, 4, # +z
2648 7, 6, 5, 4)) # -y
2650 bpymesh.validate(False)
2651 if bpyima:
2652 d = bpymesh.tessface_uv_textures.new().data
2653 for face in d: # No foreach_set for nonscalars
2654 face.image = bpyima
2655 d.foreach_set('uv_raw', (
2656 1, 0, 0, 0, 0, 1, 1, 1,
2657 0, 0, 0, 1, 1, 1, 1, 0,
2658 0, 0, 0, 1, 1, 1, 1, 0,
2659 0, 0, 0, 1, 1, 1, 1, 0,
2660 0, 0, 0, 1, 1, 1, 1, 0,
2661 1, 0, 0, 0, 0, 1, 1, 1))
2663 bpymesh.update()
2664 return bpymesh
2666 # -----------------------------------------------------------------------------------
2667 # Utilities for importShape
2670 # Textures are processed elsewhere.
2671 def appearance_CreateMaterial(vrmlname, mat, ancestry, is_vcol):
2672 # Given an X3D material, creates a Blender material.
2673 # texture is applied later, in appearance_Create().
2674 # All values between 0.0 and 1.0, defaults from VRML docs.
2675 bpymat = bpy.data.materials.new(vrmlname)
2676 bpymat.ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
2677 diff_color = mat.getFieldAsFloatTuple('diffuseColor',
2678 [0.8, 0.8, 0.8],
2679 ancestry)
2680 bpymat.diffuse_color = diff_color
2682 # NOTE - blender dosnt support emmisive color
2683 # Store in mirror color and approximate with emit.
2684 emit = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry)
2685 bpymat.mirror_color = emit
2686 bpymat.emit = (emit[0] + emit[1] + emit[2]) / 3.0
2688 shininess = mat.getFieldAsFloat('shininess', 0.2, ancestry)
2689 bpymat.specular_hardness = int(1 + (510 * shininess))
2690 # 0-1 -> 1-511
2691 bpymat.specular_color = mat.getFieldAsFloatTuple('specularColor',
2692 [0.0, 0.0, 0.0], ancestry)
2693 bpymat.alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry)
2694 if bpymat.alpha < 0.999:
2695 bpymat.use_transparency = True
2696 if is_vcol:
2697 bpymat.use_vertex_color_paint = True
2698 return bpymat
2701 def appearance_CreateDefaultMaterial():
2702 # Just applies the X3D defaults. Used for shapes
2703 # without explicit material definition
2704 # (but possibly with a texture).
2706 bpymat = bpy.data.materials.new("Material")
2707 bpymat.ambient = 0.2
2708 bpymat.diffuse_color = [0.8, 0.8, 0.8]
2709 bpymat.mirror_color = (0, 0, 0)
2710 bpymat.emit = 0
2712 bpymat.specular_hardness = 103
2713 # 0-1 -> 1-511
2714 bpymat.specular_color = (0, 0, 0)
2715 bpymat.alpha = 1
2716 return bpymat
2719 def appearance_LoadImageTextureFile(ima_urls, node):
2720 bpyima = None
2721 for f in ima_urls:
2722 dirname = os.path.dirname(node.getFilename())
2723 bpyima = image_utils.load_image(f, dirname,
2724 place_holder=False,
2725 recursive=False,
2726 convert_callback=imageConvertCompat)
2727 if bpyima:
2728 break
2730 return bpyima
2733 def appearance_LoadImageTexture(imageTexture, ancestry, node):
2734 # TODO: cache loaded textures...
2735 ima_urls = imageTexture.getFieldAsString('url', None, ancestry)
2737 if ima_urls is None:
2738 try:
2739 ima_urls = imageTexture.getFieldAsStringArray('url', ancestry)
2740 # in some cases we get a list of images.
2741 except:
2742 ima_urls = None
2743 else:
2744 if '" "' in ima_urls:
2745 # '"foo" "bar"' --> ['foo', 'bar']
2746 ima_urls = [w.strip('"') for w in ima_urls.split('" "')]
2747 else:
2748 ima_urls = [ima_urls]
2749 # ima_urls is a list or None
2751 if ima_urls is None:
2752 print("\twarning, image with no URL, this is odd")
2753 return None
2754 else:
2755 bpyima = appearance_LoadImageTextureFile(ima_urls, node)
2757 if not bpyima:
2758 print("ImportX3D warning: unable to load texture", ima_urls)
2759 else:
2760 # KNOWN BUG; PNGs with a transparent color are not perceived
2761 # as transparent. Need alpha channel.
2763 bpyima.use_alpha = bpyima.depth in {32, 128}
2764 return bpyima
2767 def appearance_LoadTexture(tex_node, ancestry, node):
2768 # Both USE-based caching and desc-based caching
2769 # Works for bother ImageTextures and PixelTextures
2771 # USE-based caching
2772 if tex_node.reference:
2773 return tex_node.getRealNode().parsed
2775 # Desc-based caching. It might misfire on multifile models, where the
2776 # same desc means different things in different files.
2777 # TODO: move caches to file level.
2778 desc = tex_node.desc()
2779 if desc and desc in texture_cache:
2780 bpyima = texture_cache[desc]
2781 if tex_node.canHaveReferences():
2782 tex_node.parsed = bpyima
2783 return bpyima
2785 # No cached texture, load it.
2786 if tex_node.getSpec() == 'ImageTexture':
2787 bpyima = appearance_LoadImageTexture(tex_node, ancestry, node)
2788 else: # PixelTexture
2789 bpyima = appearance_LoadPixelTexture(tex_node, ancestry)
2791 if bpyima: # Loading can still fail
2792 repeat_s = tex_node.getFieldAsBool('repeatS', True, ancestry)
2793 bpyima.use_clight_x = not repeat_s
2794 repeat_t = tex_node.getFieldAsBool('repeatT', True, ancestry)
2795 bpyima.use_clight_y = not repeat_t
2797 # Update the desc-based cache
2798 if desc:
2799 texture_cache[desc] = bpyima
2801 # Update the USE-based cache
2802 if tex_node.canHaveReferences():
2803 tex_node.parsed = bpyima
2805 return bpyima
2808 def appearance_ExpandCachedMaterial(bpymat):
2809 if bpymat.texture_slots[0] is not None:
2810 bpyima = bpymat.texture_slots[0].texture.image
2811 tex_has_alpha = bpyima.use_alpha
2812 return (bpymat, bpyima, tex_has_alpha)
2814 return (bpymat, None, False)
2817 def appearance_MakeDescCacheKey(material, tex_node):
2818 mat_desc = material.desc() if material else "Default"
2819 tex_desc = tex_node.desc() if tex_node else "Default"
2821 if not((tex_node and tex_desc is None) or
2822 (material and mat_desc is None)):
2823 # desc not available (in VRML)
2824 # TODO: serialize VRML nodes!!!
2825 return (mat_desc, tex_desc)
2826 elif not tex_node and not material:
2827 # Even for VRML, we cache the null material
2828 return ("Default", "Default")
2829 else:
2830 return None # Desc-based caching is off
2833 def appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol):
2834 # Creates a Blender material object from appearance
2835 bpyima = None
2836 tex_has_alpha = False
2838 if material:
2839 bpymat = appearance_CreateMaterial(vrmlname, material, ancestry, is_vcol)
2840 else:
2841 bpymat = appearance_CreateDefaultMaterial()
2843 if tex_node: # Texture caching inside there
2844 bpyima = appearance_LoadTexture(tex_node, ancestry, node)
2846 if is_vcol:
2847 bpymat.use_vertex_color_paint = True
2849 if bpyima:
2850 tex_has_alpha = bpyima.use_alpha
2852 texture = bpy.data.textures.new(bpyima.name, 'IMAGE')
2853 texture.image = bpyima
2855 mtex = bpymat.texture_slots.add()
2856 mtex.texture = texture
2858 mtex.texture_coords = 'UV'
2859 mtex.use_map_diffuse = True
2860 mtex.use = True
2862 if bpyima.use_alpha:
2863 bpymat.use_transparency = True
2864 mtex.use_map_alpha = True
2865 mtex.alpha_factor = 0.0
2867 return (bpymat, bpyima, tex_has_alpha)
2870 def importShape_LoadAppearance(vrmlname, appr, ancestry, node, is_vcol):
2872 Material creation takes nontrivial time on large models.
2873 So we cache them aggressively.
2874 However, in Blender, texture is a part of material, while in
2875 X3D it's not. Blender's notion of material corresponds to
2876 X3D's notion of appearance.
2878 TextureTransform is not a part of material (at least
2879 not in the current implementation).
2881 USE on an Appearance node and USE on a Material node
2882 call for different approaches.
2884 Tools generate repeating, idential material definitions.
2885 Can't rely on USE alone. Repeating texture definitions
2886 are entirely possible, too.
2888 Vertex coloring is not a part of appearance, but Blender
2889 has a material flag for it. However, if a mesh has no vertex
2890 color layer, setting use_vertex_color_paint to true has no
2891 effect. So it's fine to reuse the same material for meshes
2892 with vertex colors and for ones without.
2893 It's probably an abuse of Blender of some level.
2895 So here's the caching structure:
2896 For USE on apprearance, we store the material object
2897 in the appearance node.
2899 For USE on texture, we store the image object in the tex node.
2901 For USE on material with no texture, we store the material object
2902 in the material node.
2904 Also, we store textures by description in texture_cache.
2906 Also, we store materials by (material desc, texture desc)
2907 in material_cache.
2909 # First, check entire-appearance cache
2910 if appr.reference and appr.getRealNode().parsed:
2911 return appearance_ExpandCachedMaterial(appr.getRealNode().parsed)
2913 tex_node = appr.getChildBySpec(('ImageTexture', 'PixelTexture'))
2914 # Other texture nodes are: MovieTexture, MultiTexture
2915 material = appr.getChildBySpec('Material')
2916 # We're ignoring FillProperties, LineProperties, and shaders
2918 # Check the USE-based material cache for textureless materials
2919 if material and material.reference and not tex_node and material.getRealNode().parsed:
2920 return appearance_ExpandCachedMaterial(material.getRealNode().parsed)
2922 # Now the description-based caching
2923 cache_key = appearance_MakeDescCacheKey(material, tex_node)
2925 if cache_key and cache_key in material_cache:
2926 bpymat = material_cache[cache_key]
2927 # Still want to make the material available for USE-based reuse
2928 if appr.canHaveReferences():
2929 appr.parsed = bpymat
2930 if material and material.canHaveReferences() and not tex_node:
2931 material.parsed = bpymat
2932 return appearance_ExpandCachedMaterial(bpymat)
2934 # Done checking full-material caches. Texture cache may still kick in.
2935 # Create the material already
2936 (bpymat, bpyima, tex_has_alpha) = appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol)
2938 # Update the caches
2939 if appr.canHaveReferences():
2940 appr.parsed = bpymat
2942 if cache_key:
2943 material_cache[cache_key] = bpymat
2945 if material and material.canHaveReferences() and not tex_node:
2946 material.parsed = bpymat
2948 return (bpymat, bpyima, tex_has_alpha)
2951 def appearance_LoadPixelTexture(pixelTexture, ancestry):
2952 image = pixelTexture.getFieldAsArray('image', 0, ancestry)
2953 (w, h, plane_count) = image[0:3]
2954 has_alpha = plane_count in {2, 4}
2955 pixels = image[3:]
2956 if len(pixels) != w * h:
2957 print("ImportX3D warning: pixel count in PixelTexture is off")
2959 bpyima = bpy.data.images.new("PixelTexture", w, h, has_alpha, True)
2960 bpyima.use_alpha = has_alpha
2962 # Conditional above the loop, for performance
2963 if plane_count == 3: # RGB
2964 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2965 for cco in (pixel >> 16, pixel >> 8, pixel, 255)]
2966 elif plane_count == 4: # RGBA
2967 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2968 for cco
2969 in (pixel >> 24, pixel >> 16, pixel >> 8, pixel)]
2970 elif plane_count == 1: # Intensity - does Blender even support that?
2971 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2972 for cco in (pixel, pixel, pixel, 255)]
2973 elif plane_count == 2: # Intensity/aplha
2974 bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
2975 for cco
2976 in (pixel >> 8, pixel >> 8, pixel >> 8, pixel)]
2977 bpyima.update()
2978 return bpyima
2981 # Called from importShape to insert a data object (typically a mesh)
2982 # into the scene
2983 def importShape_ProcessObject(
2984 bpyscene, vrmlname, bpydata, geom, geom_spec, node,
2985 bpymat, has_alpha, texmtx, ancestry,
2986 global_matrix):
2988 vrmlname += "_" + geom_spec
2989 bpydata.name = vrmlname
2991 if type(bpydata) == bpy.types.Mesh:
2992 # solid, as understood by the spec, is always true in Blender
2993 # solid=false, we don't support it yet.
2994 creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry)
2995 if creaseAngle is not None:
2996 bpydata.auto_smooth_angle = creaseAngle
2997 bpydata.use_auto_smooth = True
2999 # Only ever 1 material per shape
3000 if bpymat:
3001 bpydata.materials.append(bpymat)
3003 if bpydata.tessface_uv_textures:
3004 if has_alpha: # set the faces alpha flag?
3005 # transp = Mesh.FaceTranspModes.ALPHA
3006 for f in bpydata.tessface_uv_textures.active.data:
3007 f.blend_type = 'ALPHA'
3009 if texmtx:
3010 # Apply texture transform?
3011 uv_copy = Vector()
3012 for f in bpydata.tessface_uv_textures.active.data:
3013 fuv = f.uv
3014 for i, uv in enumerate(fuv):
3015 uv_copy.x = uv[0]
3016 uv_copy.y = uv[1]
3018 fuv[i] = (uv_copy * texmtx)[0:2]
3019 # Done transforming the texture
3020 # TODO: check if per-polygon textures are supported here.
3021 elif type(bpydata) == bpy.types.TextCurve:
3022 # Text with textures??? Not sure...
3023 if bpymat:
3024 bpydata.materials.append(bpymat)
3026 # Can transform data or object, better the object so we can instance
3027 # the data
3028 # bpymesh.transform(getFinalMatrix(node))
3029 bpyob = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
3030 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3031 bpyscene.objects.link(bpyob).select = True
3033 if DEBUG:
3034 bpyob["source_line_no"] = geom.lineno
3037 def importText(geom, ancestry, bpyima):
3038 fmt = geom.getChildBySpec('FontStyle')
3039 size = fmt.getFieldAsFloat("size", 1, ancestry) if fmt else 1.
3040 body = geom.getFieldAsString("string", None, ancestry)
3041 body = [w.strip('"') for w in body.split('" "')]
3043 bpytext = bpy.data.curves.new(name="Text", type='FONT')
3044 bpytext.offset_y = - size
3045 bpytext.body = "\n".join(body)
3046 bpytext.size = size
3047 return bpytext
3050 # -----------------------------------------------------------------------------------
3053 geometry_importers = {
3054 'IndexedFaceSet': importMesh_IndexedFaceSet,
3055 'IndexedTriangleSet': importMesh_IndexedTriangleSet,
3056 'IndexedTriangleStripSet': importMesh_IndexedTriangleStripSet,
3057 'IndexedTriangleFanSet': importMesh_IndexedTriangleFanSet,
3058 'IndexedLineSet': importMesh_IndexedLineSet,
3059 'TriangleSet': importMesh_TriangleSet,
3060 'TriangleStripSet': importMesh_TriangleStripSet,
3061 'TriangleFanSet': importMesh_TriangleFanSet,
3062 'LineSet': importMesh_LineSet,
3063 'ElevationGrid': importMesh_ElevationGrid,
3064 'Extrusion': importMesh_Extrusion,
3065 'PointSet': importMesh_PointSet,
3066 'Sphere': importMesh_Sphere,
3067 'Box': importMesh_Box,
3068 'Cylinder': importMesh_Cylinder,
3069 'Cone': importMesh_Cone,
3070 'Text': importText,
3074 def importShape(bpyscene, node, ancestry, global_matrix):
3075 # Under Shape, we can only have Appearance, MetadataXXX and a geometry node
3076 def isGeometry(spec):
3077 return spec != "Appearance" and not spec.startswith("Metadata")
3079 bpyob = node.getRealNode().blendObject
3081 if bpyob is not None:
3082 bpyob = node.blendData = node.blendObject = bpyob.copy()
3083 # Could transform data, but better the object so we can instance the data
3084 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3085 bpyscene.objects.link(bpyob).select = True
3086 return
3088 vrmlname = node.getDefName()
3089 if not vrmlname:
3090 vrmlname = 'Shape'
3092 appr = node.getChildBySpec('Appearance')
3093 geom = node.getChildBySpecCondition(isGeometry)
3094 if not geom:
3095 # Oh well, no geometry node in this shape
3096 return
3098 bpymat = None
3099 bpyima = None
3100 texmtx = None
3101 tex_has_alpha = False
3103 is_vcol = (geom.getChildBySpec(['Color', 'ColorRGBA']) is not None)
3105 if appr:
3106 (bpymat, bpyima,
3107 tex_has_alpha) = importShape_LoadAppearance(vrmlname, appr,
3108 ancestry, node,
3109 is_vcol)
3111 textx = appr.getChildBySpec('TextureTransform')
3112 if textx:
3113 texmtx = translateTexTransform(textx, ancestry)
3115 bpydata = None
3116 geom_spec = geom.getSpec()
3118 # ccw is handled by every geometry importer separately; some
3119 # geometries are easier to flip than others
3120 geom_fn = geometry_importers.get(geom_spec)
3121 if geom_fn is not None:
3122 bpydata = geom_fn(geom, ancestry, bpyima)
3124 # There are no geometry importers that can legally return
3125 # no object. It's either a bpy object, or an exception
3126 importShape_ProcessObject(
3127 bpyscene, vrmlname, bpydata, geom, geom_spec,
3128 node, bpymat, tex_has_alpha, texmtx,
3129 ancestry, global_matrix)
3130 else:
3131 print('\tImportX3D warning: unsupported type "%s"' % geom_spec)
3134 # -----------------------------------------------------------------------------------
3135 # Lighting
3138 def importLamp_PointLight(node, ancestry):
3139 vrmlname = node.getDefName()
3140 if not vrmlname:
3141 vrmlname = 'PointLight'
3143 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3144 # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3145 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3146 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3147 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3148 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3149 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3151 bpylamp = bpy.data.lights.new(vrmlname, 'POINT')
3152 bpylamp.energy = intensity
3153 bpylamp.distance = radius
3154 bpylamp.color = color
3156 mtx = Matrix.Translation(Vector(location))
3158 return bpylamp, mtx
3161 def importLamp_DirectionalLight(node, ancestry):
3162 vrmlname = node.getDefName()
3163 if not vrmlname:
3164 vrmlname = 'DirectLight'
3166 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
3167 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3168 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3169 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3170 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3172 bpylamp = bpy.data.lights.new(vrmlname, 'SUN')
3173 bpylamp.energy = intensity
3174 bpylamp.color = color
3176 # lamps have their direction as -z, yup
3177 mtx = Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3179 return bpylamp, mtx
3181 # looks like default values for beamWidth and cutOffAngle were swapped in VRML docs.
3184 def importLamp_SpotLight(node, ancestry):
3185 vrmlname = node.getDefName()
3186 if not vrmlname:
3187 vrmlname = 'SpotLight'
3189 # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3190 # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3191 beamWidth = node.getFieldAsFloat('beamWidth', 1.570796, ancestry) # max is documented to be 1.0 but some files have higher.
3192 color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
3193 cutOffAngle = node.getFieldAsFloat('cutOffAngle', 0.785398, ancestry) * 2.0 # max is documented to be 1.0 but some files have higher.
3194 direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
3195 intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
3196 location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
3197 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3198 radius = node.getFieldAsFloat('radius', 100.0, ancestry)
3200 bpylamp = bpy.data.lights.new(vrmlname, 'SPOT')
3201 bpylamp.energy = intensity
3202 bpylamp.distance = radius
3203 bpylamp.color = color
3204 bpylamp.spot_size = cutOffAngle
3205 if beamWidth > cutOffAngle:
3206 bpylamp.spot_blend = 0.0
3207 else:
3208 if cutOffAngle == 0.0: # this should never happen!
3209 bpylamp.spot_blend = 0.5
3210 else:
3211 bpylamp.spot_blend = beamWidth / cutOffAngle
3213 # Convert
3215 # lamps have their direction as -z, y==up
3216 mtx = Matrix.Translation(location) * Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3218 return bpylamp, mtx
3221 def importLamp(bpyscene, node, spec, ancestry, global_matrix):
3222 if spec == 'PointLight':
3223 bpylamp, mtx = importLamp_PointLight(node, ancestry)
3224 elif spec == 'DirectionalLight':
3225 bpylamp, mtx = importLamp_DirectionalLight(node, ancestry)
3226 elif spec == 'SpotLight':
3227 bpylamp, mtx = importLamp_SpotLight(node, ancestry)
3228 else:
3229 print("Error, not a lamp")
3230 raise ValueError
3232 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(bpylamp.name, bpylamp)
3233 bpyscene.objects.link(bpyob).select = True
3235 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3238 # -----------------------------------------------------------------------------------
3241 def importViewpoint(bpyscene, node, ancestry, global_matrix):
3242 name = node.getDefName()
3243 if not name:
3244 name = 'Viewpoint'
3246 fieldOfView = node.getFieldAsFloat('fieldOfView', 0.785398, ancestry) # max is documented to be 1.0 but some files have higher.
3247 # jump = node.getFieldAsBool('jump', True, ancestry)
3248 orientation = node.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry)
3249 position = node.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry)
3250 description = node.getFieldAsString('description', '', ancestry)
3252 bpycam = bpy.data.cameras.new(name)
3254 bpycam.angle = fieldOfView
3256 mtx = Matrix.Translation(Vector(position)) * translateRotation(orientation)
3258 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, bpycam)
3259 bpyscene.objects.link(bpyob).select = True
3260 bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
3263 def importTransform(bpyscene, node, ancestry, global_matrix):
3264 name = node.getDefName()
3265 if not name:
3266 name = 'Transform'
3268 bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, None)
3269 bpyscene.objects.link(bpyob).select = True
3271 bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
3273 # so they are not too annoying
3274 bpyob.empty_draw_type = 'PLAIN_AXES'
3275 bpyob.empty_draw_size = 0.2
3278 #def importTimeSensor(node):
3279 def action_fcurve_ensure(action, data_path, array_index):
3280 for fcu in action.fcurves:
3281 if fcu.data_path == data_path and fcu.array_index == array_index:
3282 return fcu
3284 return action.fcurves.new(data_path=data_path, index=array_index)
3287 def translatePositionInterpolator(node, action, ancestry):
3288 key = node.getFieldAsArray('key', 0, ancestry)
3289 keyValue = node.getFieldAsArray('keyValue', 3, ancestry)
3291 loc_x = action_fcurve_ensure(action, "location", 0)
3292 loc_y = action_fcurve_ensure(action, "location", 1)
3293 loc_z = action_fcurve_ensure(action, "location", 2)
3295 for i, time in enumerate(key):
3296 try:
3297 x, y, z = keyValue[i]
3298 except:
3299 continue
3301 loc_x.keyframe_points.insert(time, x)
3302 loc_y.keyframe_points.insert(time, y)
3303 loc_z.keyframe_points.insert(time, z)
3305 for fcu in (loc_x, loc_y, loc_z):
3306 for kf in fcu.keyframe_points:
3307 kf.interpolation = 'LINEAR'
3310 def translateOrientationInterpolator(node, action, ancestry):
3311 key = node.getFieldAsArray('key', 0, ancestry)
3312 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3314 rot_x = action_fcurve_ensure(action, "rotation_euler", 0)
3315 rot_y = action_fcurve_ensure(action, "rotation_euler", 1)
3316 rot_z = action_fcurve_ensure(action, "rotation_euler", 2)
3318 for i, time in enumerate(key):
3319 try:
3320 x, y, z, w = keyValue[i]
3321 except:
3322 continue
3324 mtx = translateRotation((x, y, z, w))
3325 eul = mtx.to_euler()
3326 rot_x.keyframe_points.insert(time, eul.x)
3327 rot_y.keyframe_points.insert(time, eul.y)
3328 rot_z.keyframe_points.insert(time, eul.z)
3330 for fcu in (rot_x, rot_y, rot_z):
3331 for kf in fcu.keyframe_points:
3332 kf.interpolation = 'LINEAR'
3335 # Untested!
3336 def translateScalarInterpolator(node, action, ancestry):
3337 key = node.getFieldAsArray('key', 0, ancestry)
3338 keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
3340 sca_x = action_fcurve_ensure(action, "scale", 0)
3341 sca_y = action_fcurve_ensure(action, "scale", 1)
3342 sca_z = action_fcurve_ensure(action, "scale", 2)
3344 for i, time in enumerate(key):
3345 try:
3346 x, y, z = keyValue[i]
3347 except:
3348 continue
3350 sca_x.keyframe_points.new(time, x)
3351 sca_y.keyframe_points.new(time, y)
3352 sca_z.keyframe_points.new(time, z)
3355 def translateTimeSensor(node, action, ancestry):
3357 Apply a time sensor to an action, VRML has many combinations of loop/start/stop/cycle times
3358 to give different results, for now just do the basics
3361 # XXX25 TODO
3362 if 1:
3363 return
3365 time_cu = action.addCurve('Time')
3366 time_cu.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
3368 cycleInterval = node.getFieldAsFloat('cycleInterval', None, ancestry)
3370 startTime = node.getFieldAsFloat('startTime', 0.0, ancestry)
3371 stopTime = node.getFieldAsFloat('stopTime', 250.0, ancestry)
3373 if cycleInterval is not None:
3374 stopTime = startTime + cycleInterval
3376 loop = node.getFieldAsBool('loop', False, ancestry)
3378 time_cu.append((1 + startTime, 0.0))
3379 time_cu.append((1 + stopTime, 1.0 / 10.0)) # anoying, the UI uses /10
3381 if loop:
3382 time_cu.extend = Blender.IpoCurve.ExtendTypes.CYCLIC # or - EXTRAP, CYCLIC_EXTRAP, CONST,
3385 def importRoute(node, ancestry):
3387 Animation route only at the moment
3390 if not hasattr(node, 'fields'):
3391 return
3393 routeIpoDict = node.getRouteIpoDict()
3395 def getIpo(act_id):
3396 try:
3397 action = routeIpoDict[act_id]
3398 except:
3399 action = routeIpoDict[act_id] = bpy.data.actions.new('web3d_ipo')
3400 return action
3402 # for getting definitions
3403 defDict = node.getDefDict()
3405 Handles routing nodes to eachother
3407 ROUTE vpPI.value_changed TO champFly001.set_position
3408 ROUTE vpOI.value_changed TO champFly001.set_orientation
3409 ROUTE vpTs.fraction_changed TO vpPI.set_fraction
3410 ROUTE vpTs.fraction_changed TO vpOI.set_fraction
3411 ROUTE champFly001.bindTime TO vpTs.set_startTime
3414 #from_id, from_type = node.id[1].split('.')
3415 #to_id, to_type = node.id[3].split('.')
3417 #value_changed
3418 set_position_node = None
3419 set_orientation_node = None
3420 time_node = None
3422 for field in node.fields:
3423 if field and field[0] == 'ROUTE':
3424 try:
3425 from_id, from_type = field[1].split('.')
3426 to_id, to_type = field[3].split('.')
3427 except:
3428 print("Warning, invalid ROUTE", field)
3429 continue
3431 if from_type == 'value_changed':
3432 if to_type == 'set_position':
3433 action = getIpo(to_id)
3434 set_data_from_node = defDict[from_id]
3435 translatePositionInterpolator(set_data_from_node, action, ancestry)
3437 if to_type in {'set_orientation', 'rotation'}:
3438 action = getIpo(to_id)
3439 set_data_from_node = defDict[from_id]
3440 translateOrientationInterpolator(set_data_from_node, action, ancestry)
3442 if to_type == 'set_scale':
3443 action = getIpo(to_id)
3444 set_data_from_node = defDict[from_id]
3445 translateScalarInterpolator(set_data_from_node, action, ancestry)
3447 elif from_type == 'bindTime':
3448 action = getIpo(from_id)
3449 time_node = defDict[to_id]
3450 translateTimeSensor(time_node, action, ancestry)
3453 def load_web3d(
3454 bpyscene,
3455 filepath,
3457 PREF_FLAT=False,
3458 PREF_CIRCLE_DIV=16,
3459 global_matrix=None,
3460 HELPER_FUNC=None
3463 # Used when adding blender primitives
3464 GLOBALS['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV
3466 #root_node = vrml_parse('/_Cylinder.wrl')
3467 if filepath.lower().endswith('.x3d'):
3468 root_node, msg = x3d_parse(filepath)
3469 else:
3470 root_node, msg = vrml_parse(filepath)
3472 if not root_node:
3473 print(msg)
3474 return
3476 if global_matrix is None:
3477 global_matrix = Matrix()
3479 # fill with tuples - (node, [parents-parent, parent])
3480 all_nodes = root_node.getSerialized([], [])
3482 for node, ancestry in all_nodes:
3483 #if 'castle.wrl' not in node.getFilename():
3484 # continue
3486 spec = node.getSpec()
3488 prefix = node.getPrefix()
3489 if prefix=='PROTO':
3490 pass
3491 else
3493 if HELPER_FUNC and HELPER_FUNC(node, ancestry):
3494 # Note, include this function so the VRML/X3D importer can be extended
3495 # by an external script. - gets first pick
3496 pass
3497 if spec == 'Shape':
3498 importShape(bpyscene, node, ancestry, global_matrix)
3499 elif spec in {'PointLight', 'DirectionalLight', 'SpotLight'}:
3500 importLamp(bpyscene, node, spec, ancestry, global_matrix)
3501 elif spec == 'Viewpoint':
3502 importViewpoint(bpyscene, node, ancestry, global_matrix)
3503 elif spec == 'Transform':
3504 # Only use transform nodes when we are not importing a flat object hierarchy
3505 if PREF_FLAT == False:
3506 importTransform(bpyscene, node, ancestry, global_matrix)
3508 # These are delt with later within importRoute
3509 elif spec=='PositionInterpolator':
3510 action = bpy.data.ipos.new('web3d_ipo', 'Object')
3511 translatePositionInterpolator(node, action)
3514 # After we import all nodes, route events - anim paths
3515 for node, ancestry in all_nodes:
3516 importRoute(node, ancestry)
3518 for node, ancestry in all_nodes:
3519 if node.isRoot():
3520 # we know that all nodes referenced from will be in
3521 # routeIpoDict so no need to run node.getDefDict() for every node.
3522 routeIpoDict = node.getRouteIpoDict()
3523 defDict = node.getDefDict()
3525 for key, action in routeIpoDict.items():
3527 # Assign anim curves
3528 node = defDict[key]
3529 if node.blendData is None: # Add an object if we need one for animation
3530 node.blendData = node.blendObject = bpy.data.objects.new('AnimOb', None) # , name)
3531 bpyscene.objects.link(node.blendObject).select = True
3533 if node.blendData.animation_data is None:
3534 node.blendData.animation_data_create()
3536 node.blendData.animation_data.action = action
3538 # Add in hierarchy
3539 if PREF_FLAT is False:
3540 child_dict = {}
3541 for node, ancestry in all_nodes:
3542 if node.blendObject:
3543 blendObject = None
3545 # Get the last parent
3546 i = len(ancestry)
3547 while i:
3548 i -= 1
3549 blendObject = ancestry[i].blendObject
3550 if blendObject:
3551 break
3553 if blendObject:
3554 # Parent Slow, - 1 liner but works
3555 # blendObject.makeParent([node.blendObject], 0, 1)
3557 # Parent FAST
3558 try:
3559 child_dict[blendObject].append(node.blendObject)
3560 except:
3561 child_dict[blendObject] = [node.blendObject]
3563 # Parent
3564 for parent, children in child_dict.items():
3565 for c in children:
3566 c.parent = parent
3568 # update deps
3569 bpyscene.update()
3570 del child_dict
3573 def load_with_profiler(
3574 context,
3575 filepath,
3577 global_matrix=None
3579 import cProfile
3580 import pstats
3581 pro = cProfile.Profile()
3582 pro.runctx("load_web3d(context.scene, filepath, PREF_FLAT=True, "
3583 "PREF_CIRCLE_DIV=16, global_matrix=global_matrix)",
3584 globals(), locals())
3585 st = pstats.Stats(pro)
3586 st.sort_stats("time")
3587 st.print_stats(0.1)
3588 # st.print_callers(0.1)
3591 def load(context,
3592 filepath,
3594 global_matrix=None
3597 # loadWithProfiler(operator, context, filepath, global_matrix)
3598 load_web3d(context.scene, filepath,
3599 PREF_FLAT=True,
3600 PREF_CIRCLE_DIV=16,
3601 global_matrix=global_matrix,
3604 return {'FINISHED'}