1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
23 # This should work without a blender at all
27 from math
import sin
, cos
, pi
28 from itertools
import chain
33 EPSILON
= 0.0000001 # Very crude.
36 def imageConvertCompat(path
):
39 return path
# assume win32 has quicktime, dont convert
41 if path
.lower().endswith('.gif'):
42 path_to
= path
[:-3] + 'png'
48 # print('\n'+path+'\n'+path_to+'\n')
49 os
.system('convert "%s" "%s"' % (path
, path_to
)) # for now just hope we have image magick
51 if os
.path
.exists(path_to
):
57 # transform are relative
58 # order doesn't matter for loc/size/rot
59 # right handed rotation
60 # angles are in radians
61 # rotation first defines axis then amount in radians
64 # =============================== VRML Spesific
66 def vrml_split_fields(value
):
68 key 0.0 otherkey 1,2,3 opt1 opt1 0.0
69 -> [key 0.0], [otherkey 1,2,3], [opt1 opt1 0.0]
72 if k
[0] != '"' and k
[0].isalpha() and k
.upper() not in {'TRUE', 'FALSE'}:
82 field_context_len
= len(field_context
)
83 if (field_context_len
> 2) and (field_context
[-2] in {'DEF', 'USE'}):
84 field_context
.append(v
)
85 elif (not iskey(field_context
[-1])) or ((field_context_len
== 3 and field_context
[1] == 'IS')):
86 # this IS a key but the previous value was not a key, or it was a defined field.
87 field_list
.append(field_context
)
90 # The last item was not a value, multiple keys are needed in some cases.
91 field_context
.append(v
)
93 # Is empty, just add this on
94 field_context
.append(v
)
96 # Add a value to the list
97 field_context
.append(v
)
100 field_list
.append(field_context
)
105 def vrmlFormat(data
):
107 Keep this as a valid vrml file, but format in a way we can predict.
109 # Strip all comments - # not in strings - warning multiline strings are ignored.
110 def strip_comment(l
):
111 #l = ' '.join(l.split())
114 if l
.startswith('#'):
122 # Most cases accounted for! if we have a comment at the end of the line do this...
126 if j
== -1: # simple no strings
130 for i
, c
in enumerate(l
):
140 data
= '\n'.join([strip_comment(l
) for l
in data
.split('\n')]) # remove all whitespace
142 EXTRACT_STRINGS
= True # only needed when strings or filename contains ,[]{} chars :/
146 # We need this so we can detect URL's
147 data
= '\n'.join([' '.join(l
.split()) for l
in data
.split('\n')]) # remove all whitespace
158 i
= data
.find(search
, last_i
)
161 start
= i
+ len(search
) # first char after end of search
162 end
= data
.find('"', start
)
164 item
= data
[start
:end
]
165 string_ls
.append(item
)
166 data
= data
[:start
] + data
[end
:]
167 ok
= True # keep looking
169 last_i
= (end
- len(item
)) + 1
170 # print(last_i, item, '|' + data[last_i] + '|')
172 # done with messy extracting strings part
174 # Bad, dont take strings into account
176 data = data.replace('#', '\n#')
177 data = '\n'.join([ll for l in data.split('\n') for ll in (l.strip(),) if not ll.startswith('#')]) # remove all whitespace
179 data
= data
.replace('{', '\n{\n')
180 data
= data
.replace('}', '\n}\n')
181 data
= data
.replace('[', '\n[\n')
182 data
= data
.replace(']', '\n]\n')
183 data
= data
.replace(',', ' , ') # make sure comma's separate
185 # We need to write one property (field) per line only, otherwise we fail later to detect correctly new nodes.
186 # See T45195 for details.
187 data
= '\n'.join([' '.join(value
) for l
in data
.split('\n') for value
in vrml_split_fields(l
.split())])
190 # add strings back in
192 search
= '"' # fill in these empty strings
198 i
= data
.find(search
+ '"', last_i
)
201 start
= i
+ len(search
) # first char after end of search
202 item
= string_ls
.pop(0)
204 data
= data
[:start
] + item
+ data
[start
:]
206 last_i
= start
+ len(item
) + 1
210 # More annoying obscure cases where USE or DEF are placed on a newline
211 # data = data.replace('\nDEF ', ' DEF ')
212 # data = data.replace('\nUSE ', ' USE ')
214 data
= '\n'.join([' '.join(l
.split()) for l
in data
.split('\n')]) # remove all whitespace
216 # Better to parse the file accounting for multiline arrays
218 data = data.replace(',\n', ' , ') # remove line endings with commas
219 data = data.replace(']', '\n]\n') # very very annoying - but some comma's are at the end of the list, must run this again.
222 return [l
for l
in data
.split('\n') if l
]
226 NODE_REFERENCE
= 3 # USE foobar
232 def getNodePreText(i
, words
):
235 while len(words
) < 5:
240 elif lines[i].startswith('PROTO'):
241 return NODE_PROTO, i+1
243 elif lines
[i
] == '{':
244 # words.append(lines[i]) # no need
246 return NODE_NORMAL
, i
+ 1
247 elif lines
[i
].count('"') % 2 != 0: # odd number of quotes? - part of a string.
251 new_words
= lines
[i
].split()
252 if 'USE' in new_words
:
255 words
.extend(new_words
)
258 # Check for USE node - no {
259 # USE #id - should always be on the same line.
261 # print('LINE', i, words[:words.index('USE')+2])
262 words
[:] = words
[:words
.index('USE') + 2]
263 if lines
[i
] == '{' and lines
[i
+ 1] == '}':
264 # USE sometimes has {} after it anyway
266 return NODE_REFERENCE
, i
268 # print("error value!!!", words)
272 def is_nodeline(i
, words
):
274 if not lines
[i
][0].isalpha():
277 #if lines[i].startswith('field'):
280 # Is this a prototype??
281 if lines
[i
].startswith('PROTO'):
282 words
[:] = lines
[i
].split()
283 return NODE_NORMAL
, i
+ 1 # TODO - assumes the next line is a '[\n', skip that
284 if lines
[i
].startswith('EXTERNPROTO'):
285 words
[:] = lines
[i
].split()
286 return NODE_ARRAY
, i
+ 1 # TODO - assumes the next line is a '[\n', skip that
289 proto_type, new_i = is_protoline(i, words, proto_field_defs)
291 return proto_type, new_i
294 # Simple "var [" type
295 if lines
[i
+ 1] == '[':
296 if lines
[i
].count('"') % 2 == 0:
297 words
[:] = lines
[i
].split()
298 return NODE_ARRAY
, i
+ 2
300 node_type
, new_i
= getNodePreText(i
, words
)
304 print("not node_type", lines
[i
])
307 # Ok, we have a { after some values
308 # Check the values are not fields
309 for i
, val
in enumerate(words
):
310 if i
!= 0 and words
[i
- 1] in {'DEF', 'USE'}:
311 # ignore anything after DEF, it is a ID and can contain any chars.
313 elif val
[0].isalpha() and val
not in {'TRUE', 'FALSE'}:
316 # There is a number in one of the values, therefor we are not a node.
319 #if node_type==NODE_REFERENCE:
320 # print(words, "REF_!!!!!!!")
321 return node_type
, new_i
326 Does this line start with a number?
329 # Works but too slow.
350 if l
.startswith(', '):
353 line_end
= len(l
) - 1
354 line_end_new
= l
.find(' ', line_start
) # comma's always have a space before them
356 if line_end_new
!= -1:
357 line_end
= line_end_new
360 float(l
[line_start
:line_end
]) # works for a float or int
366 class vrmlNode(object):
383 'ROUTE_IPO_NAMESPACE',
388 def __init__(self
, parent
, node_type
, lineno
):
390 self
.node_type
= node_type
392 self
.blendObject
= None
393 self
.blendData
= None
394 self
.x3dNode
= None # for x3d import only
395 self
.parsed
= None # We try to reuse objects in a smart way
397 parent
.children
.append(self
)
401 # This is only set from the root nodes.
402 # Having a filename also denotes a root node
404 self
.proto_node
= None # proto field definition eg: "field SFColor seatColor .6 .6 .1"
406 # Store in the root node because each inline file needs its own root node and its own namespace
407 self
.DEF_NAMESPACE
= None
408 self
.ROUTE_IPO_NAMESPACE
= None
410 self.FIELD_NAMESPACE = None
413 self
.PROTO_NAMESPACE
= None
415 self
.reference
= None
417 if node_type
== NODE_REFERENCE
:
418 # For references, only the parent and ID are needed
419 # the reference its self is assigned on parsing
422 self
.fields
= [] # fields have no order, in some cases rool level values are not unique so dont use a dict
424 self
.proto_field_defs
= [] # proto field definition eg: "field SFColor seatColor .6 .6 .1"
425 self
.proto_fields
= [] # proto field usage "diffuseColor IS seatColor"
427 self
.array_data
= [] # use for arrays of data - should only be for NODE_ARRAY types
429 # Only available from the root node
431 def getFieldDict(self):
432 if self.FIELD_NAMESPACE is not None:
433 return self.FIELD_NAMESPACE
435 return self.parent.getFieldDict()
437 def getProtoDict(self
):
438 if self
.PROTO_NAMESPACE
is not None:
439 return self
.PROTO_NAMESPACE
441 return self
.parent
.getProtoDict()
443 def getDefDict(self
):
444 if self
.DEF_NAMESPACE
is not None:
445 return self
.DEF_NAMESPACE
447 return self
.parent
.getDefDict()
449 def getRouteIpoDict(self
):
450 if self
.ROUTE_IPO_NAMESPACE
is not None:
451 return self
.ROUTE_IPO_NAMESPACE
453 return self
.parent
.getRouteIpoDict()
455 def setRoot(self
, filename
):
456 self
.filename
= filename
457 # self.FIELD_NAMESPACE = {}
458 self
.DEF_NAMESPACE
= {}
459 self
.ROUTE_IPO_NAMESPACE
= {}
460 self
.PROTO_NAMESPACE
= {}
463 if self
.filename
is None:
468 def getFilename(self
):
472 return self
.parent
.getFilename()
476 def getRealNode(self
):
478 return self
.reference
483 self_real
= self
.getRealNode()
485 return self_real
.id[-1] # its possible this node has no spec
489 def findSpecRecursive(self
, spec
):
490 self_real
= self
.getRealNode()
491 if spec
== self_real
.getSpec():
494 for child
in self_real
.children
:
495 if child
.findSpecRecursive(spec
):
505 def getSpecialTypeName(self
, typename
):
506 self_real
= self
.getRealNode()
508 return self_real
.id[list(self_real
.id).index(typename
) + 1]
512 def getDefName(self
):
513 return self
.getSpecialTypeName('DEF')
515 def getProtoName(self
):
516 return self
.getSpecialTypeName('PROTO')
518 def getExternprotoName(self
):
519 return self
.getSpecialTypeName('EXTERNPROTO')
521 def getChildrenBySpec(self
, node_spec
): # spec could be Transform, Shape, Appearance
522 self_real
= self
.getRealNode()
523 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
524 if type(node_spec
) == str:
525 return [child
for child
in self_real
.children
if child
.getSpec() == node_spec
]
527 # Check inside a list of optional types
528 return [child
for child
in self_real
.children
if child
.getSpec() in node_spec
]
530 def getChildrenBySpecCondition(self
, cond
): # spec could be Transform, Shape, Appearance
531 self_real
= self
.getRealNode()
532 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
533 return [child
for child
in self_real
.children
if cond(child
.getSpec())]
535 def getChildBySpec(self
, node_spec
): # spec could be Transform, Shape, Appearance
536 # Use in cases where there is only ever 1 child of this type
537 ls
= self
.getChildrenBySpec(node_spec
)
543 def getChildBySpecCondition(self
, cond
): # spec could be Transform, Shape, Appearance
544 # Use in cases where there is only ever 1 child of this type
545 ls
= self
.getChildrenBySpecCondition(cond
)
551 def getChildrenByName(self
, node_name
): # type could be geometry, children, appearance
552 self_real
= self
.getRealNode()
553 return [child
for child
in self_real
.children
if child
.id if child
.id[0] == node_name
]
555 def getChildByName(self
, node_name
):
556 self_real
= self
.getRealNode()
557 for child
in self_real
.children
:
558 if child
.id and child
.id[0] == node_name
: # and child.id[-1]==node_spec:
561 def getSerialized(self
, results
, ancestry
):
562 """ Return this node and all its children in a flat list """
563 ancestry
= ancestry
[:] # always use a copy
565 # self_real = self.getRealNode()
567 results
.append((self
, tuple(ancestry
)))
568 ancestry
.append(self
)
569 for child
in self
.getRealNode().children
:
570 if child
not in ancestry
:
571 # We dont want to load proto's, they are only references
572 # We could enforce this elsewhere
574 # Only add this in a very special case
575 # where the parent of this object is not the real parent
576 # - In this case we have added the proto as a child to a node instancing it.
577 # This is a bit arbitrary, but its how Proto's are done with this importer.
578 if child
.getProtoName() is None and child
.getExternprotoName() is None:
579 child
.getSerialized(results
, ancestry
)
583 print('getSerialized() is proto:', child
.getProtoName(), child
.getExternprotoName(), self
.getSpec())
585 self_spec
= self
.getSpec()
587 if child
.getProtoName() == self_spec
or child
.getExternprotoName() == self_spec
:
590 child
.getSerialized(results
, ancestry
)
594 def searchNodeTypeID(self
, node_spec
, results
):
595 self_real
= self
.getRealNode()
596 # print(self.lineno, self.id)
597 if self_real
.id and self_real
.id[-1] == node_spec
: # use last element, could also be only element
598 results
.append(self_real
)
599 for child
in self_real
.children
:
600 child
.searchNodeTypeID(node_spec
, results
)
603 def getFieldName(self
, field
, ancestry
, AS_CHILD
=False, SPLIT_COMMAS
=False):
604 self_real
= self
.getRealNode() # in case we're an instance
606 for f
in self_real
.fields
:
608 if f
and f
[0] == field
:
609 # print('\tfound field', f)
611 if len(f
) >= 3 and f
[1] == 'IS': # eg: 'diffuseColor IS legColor'
614 # print("\n\n\n\n\n\nFOND IS!!!")
615 f_proto_lookup
= None
616 f_proto_child_lookup
= None
621 node
= node
.getRealNode()
623 # proto settings are stored in "self.proto_node"
625 # Get the default value from the proto, this can be overwritten by the proto instance
626 # 'field SFColor legColor .8 .4 .7'
628 for child
in node
.proto_node
.children
:
629 #if child.id and len(child.id) >= 3 and child.id[2]==field_id:
630 if child
.id and ('point' in child
.id or 'points' in child
.id):
631 f_proto_child_lookup
= child
634 for f_def
in node
.proto_node
.proto_field_defs
:
636 if f_def
[0] == 'field' and f_def
[2] == field_id
:
637 f_proto_lookup
= f_def
[3:]
639 # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent.
640 # This is the setting as defined by the instance, including this setting is optional,
641 # and will override the default PROTO value
642 # eg: 'legColor 1 0 0'
644 for child
in node
.children
:
645 if child
.id and child
.id[0] == field_id
:
646 f_proto_child_lookup
= child
648 for f_def
in node
.fields
:
650 if f_def
[0] == field_id
:
652 print("getFieldName(), found proto", f_def
)
653 f_proto_lookup
= f_def
[1:]
656 if f_proto_child_lookup
:
658 print("getFieldName() - AS_CHILD=True, child found")
659 print(f_proto_child_lookup
)
660 return f_proto_child_lookup
662 return f_proto_lookup
669 # print('\tfield not found', field)
671 # See if this is a proto name
673 for child
in self_real
.children
:
674 if child
.id and len(child
.id) == 1 and child
.id[0] == field
:
679 def getFieldAsInt(self
, field
, default
, ancestry
):
680 self_real
= self
.getRealNode() # in case we're an instance
682 f
= self_real
.getFieldName(field
, ancestry
)
686 f
= f
[:f
.index(',')] # strip after the comma
689 print('\t"%s" wrong length for int conversion for field "%s"' % (f
, field
))
695 print('\tvalue "%s" could not be used as an int for field "%s"' % (f
[0], field
))
698 def getFieldAsFloat(self
, field
, default
, ancestry
):
699 self_real
= self
.getRealNode() # in case we're an instance
701 f
= self_real
.getFieldName(field
, ancestry
)
705 f
= f
[:f
.index(',')] # strip after the comma
708 print('\t"%s" wrong length for float conversion for field "%s"' % (f
, field
))
714 print('\tvalue "%s" could not be used as a float for field "%s"' % (f
[0], field
))
717 def getFieldAsFloatTuple(self
, field
, default
, ancestry
):
718 self_real
= self
.getRealNode() # in case we're an instance
720 f
= self_real
.getFieldName(field
, ancestry
)
723 # if ',' in f: f = f[:f.index(',')] # strip after the comma
726 print('"%s" wrong length for float tuple conversion for field "%s"' % (f
, field
))
735 break # quit of first non float, perhaps its a new field name on the same line? - if so we are going to ignore it :/ TODO
741 print('\tvalue "%s" could not be used as a float tuple for field "%s"' % (f
, field
))
744 def getFieldAsBool(self
, field
, default
, ancestry
):
745 self_real
= self
.getRealNode() # in case we're an instance
747 f
= self_real
.getFieldName(field
, ancestry
)
751 f
= f
[:f
.index(',')] # strip after the comma
754 print('\t"%s" wrong length for bool conversion for field "%s"' % (f
, field
))
757 if f
[0].upper() == '"TRUE"' or f
[0].upper() == 'TRUE':
759 elif f
[0].upper() == '"FALSE"' or f
[0].upper() == 'FALSE':
762 print('\t"%s" could not be used as a bool for field "%s"' % (f
[1], field
))
765 def getFieldAsString(self
, field
, default
, ancestry
):
766 self_real
= self
.getRealNode() # in case we're an instance
768 f
= self_real
.getFieldName(field
, ancestry
)
772 print('\t"%s" wrong length for string conversion for field "%s"' % (f
, field
))
776 # String may contain spaces
785 if st
[0] == '"' and st
[-1] == '"':
788 print('\tvalue "%s" could not be used as a string for field "%s"' % (f
[0], field
))
791 def getFieldAsArray(self
, field
, group
, ancestry
):
793 For this parser arrays are children
796 def array_as_number(array_string
):
799 array_data
= [int(val
, 0) for val
in array_string
]
802 array_data
= [float(val
) for val
in array_string
]
804 print('\tWarning, could not parse array data from field')
808 self_real
= self
.getRealNode() # in case we're an instance
810 child_array
= self_real
.getFieldName(field
, ancestry
, True, SPLIT_COMMAS
=True)
812 #if type(child_array)==list: # happens occasionally
813 # array_data = child_array
815 if child_array
is None:
816 # For x3d, should work ok with vrml too
817 # for x3d arrays are fields, vrml they are nodes, annoying but not too bad.
818 data_split
= self
.getFieldName(field
, ancestry
, SPLIT_COMMAS
=True)
822 array_data
= array_as_number(data_split
)
824 elif type(child_array
) == list:
826 array_data
= array_as_number(child_array
)
830 array_data
= child_array
.array_data
832 # print('array_data', array_data)
833 if group
== -1 or len(array_data
) == 0:
836 # We want a flat list
838 for item
in array_data
:
839 if type(item
) == list:
845 flat_array
= array_data
# we are already flat.
851 if type(item
) == list:
854 flat_array
.append(item
)
856 extend_flat(array_data
)
858 # We requested a flat array
865 for item
in flat_array
:
866 sub_array
.append(item
)
867 if len(sub_array
) == group
:
868 new_array
.append(sub_array
)
872 print('\twarning, array was not aligned to requested grouping', group
, 'remaining value', sub_array
)
876 def getFieldAsStringArray(self
, field
, ancestry
):
878 Get a list of strings
880 self_real
= self
.getRealNode() # in case we're an instance
883 for child
in self_real
.children
:
884 if child
.id and len(child
.id) == 1 and child
.id[0] == field
:
890 # each string gets its own list, remove ""'s
892 new_array
= [f
[0][1:-1] for f
in child_array
.fields
]
894 print('\twarning, string array could not be made')
912 level
= self
.getLevel()
914 if self
.node_type
== NODE_REFERENCE
:
916 elif self
.node_type
== NODE_NORMAL
:
922 text
= ind
+ brackets
[0] + '\n'
926 text
+= ind
+ 'ID: ' + str(self
.id) + ' ' + str(level
) + (' lineno %d\n' % self
.lineno
)
928 if self
.node_type
== NODE_REFERENCE
:
929 text
+= ind
+ "(reference node)\n"
933 text
+= ind
+ 'PROTO NODE...\n'
934 text
+= str(self
.proto_node
)
935 text
+= ind
+ 'PROTO NODE_DONE\n'
937 text
+= ind
+ 'FIELDS:' + str(len(self
.fields
)) + '\n'
939 for i
, item
in enumerate(self
.fields
):
940 text
+= ind
+ 'FIELD:\n'
941 text
+= ind
+ str(item
) + '\n'
943 text
+= ind
+ 'PROTO_FIELD_DEFS:' + str(len(self
.proto_field_defs
)) + '\n'
945 for i
, item
in enumerate(self
.proto_field_defs
):
946 text
+= ind
+ 'PROTO_FIELD:\n'
947 text
+= ind
+ str(item
) + '\n'
949 text
+= ind
+ 'ARRAY: ' + str(len(self
.array_data
)) + ' ' + str(self
.array_data
) + '\n'
950 #text += ind + 'ARRAY: ' + str(len(self.array_data)) + '[...] \n'
952 text
+= ind
+ 'CHILDREN: ' + str(len(self
.children
)) + '\n'
953 for i
, child
in enumerate(self
.children
):
954 text
+= ind
+ ('CHILD%d:\n' % i
)
957 text
+= '\n' + ind
+ brackets
[1]
961 def parse(self
, i
, IS_PROTO_DATA
=False):
962 new_i
= self
.__parse
(i
, IS_PROTO_DATA
)
964 # print(self.id, self.getFilename())
966 # Check if this node was an inline or externproto
970 if self
.node_type
== NODE_NORMAL
and self
.getSpec() == 'Inline':
971 ancestry
= [] # Warning! - PROTO's using this wont work at all.
972 url
= self
.getFieldAsString('url', None, ancestry
)
974 url_ls
= [(url
, None)]
977 elif self
.getExternprotoName():
980 for f
in self
.fields
:
986 for f_split
in ff
.split('"'):
988 # "someextern.vrml#SomeID"
991 f_split
, f_split_id
= f_split
.split('#') # there should only be 1 # anyway
993 url_ls
.append((f_split
, f_split_id
))
995 url_ls
.append((f_split
, None))
997 # Was either an Inline or an EXTERNPROTO
1002 for url
, extern_key
in url_ls
:
1006 urls
.append(bpy
.path
.resolve_ncase(urls
[-1]))
1008 urls
.append(os
.path
.join(os
.path
.dirname(self
.getFilename()), url
))
1009 urls
.append(bpy
.path
.resolve_ncase(urls
[-1]))
1011 urls
.append(os
.path
.join(os
.path
.dirname(self
.getFilename()), os
.path
.basename(url
)))
1012 urls
.append(bpy
.path
.resolve_ncase(urls
[-1]))
1015 url
= [url
for url
in urls
if os
.path
.exists(url
)][0]
1021 print('\tWarning: Inline URL could not be found:', url
)
1023 if url
== self
.getFilename():
1024 print('\tWarning: cant Inline yourself recursively:', url
)
1028 data
= gzipOpen(url
)
1030 print('\tWarning: cant open the file:', url
)
1034 # Tricky - inline another VRML
1035 print('\tLoading Inline:"%s"...' % url
)
1037 # Watch it! - backup lines
1038 lines_old
= lines
[:]
1040 lines
[:] = vrmlFormat(data
)
1042 lines
.insert(0, '{')
1043 lines
.insert(0, 'root_node____')
1046 ff = open('/tmp/test.txt', 'w')
1047 ff.writelines([l+'\n' for l in lines])
1050 child
= vrmlNode(self
, NODE_NORMAL
, -1)
1051 child
.setRoot(url
) # initialized dicts
1054 # if self.getExternprotoName():
1055 if self
.getExternprotoName():
1056 if not extern_key
: # if none is specified - use the name
1057 extern_key
= self
.getSpec()
1061 self
.children
.remove(child
)
1064 extern_child
= child
.findSpecRecursive(extern_key
)
1067 self
.children
.append(extern_child
)
1068 extern_child
.parent
= self
1071 print("\tEXTERNPROTO ID found!:", extern_key
)
1073 print("\tEXTERNPROTO ID not found!:", extern_key
)
1075 # Watch it! - restore lines
1076 lines
[:] = lines_old
1080 def __parse(self
, i
, IS_PROTO_DATA
=False):
1082 print('parsing at', i, end="")
1083 print(i, self.id, self.lineno)
1094 node_type
, new_i
= is_nodeline(i
, words
)
1095 if not node_type
: # fail for parsing new node.
1096 print("Failed to parse new node")
1099 if self
.node_type
== NODE_REFERENCE
:
1100 # Only assign the reference and quit
1101 key
= words
[words
.index('USE') + 1]
1102 self
.id = (words
[0],)
1104 self
.reference
= self
.getDefDict()[key
]
1107 self
.id = tuple(words
)
1110 key
= self
.getDefName()
1112 self
.getDefDict()[key
] = self
1114 key
= self
.getProtoName()
1116 key
= self
.getExternprotoName()
1118 proto_dict
= self
.getProtoDict()
1120 proto_dict
[key
] = self
1122 # Parse the proto nodes fields
1123 self
.proto_node
= vrmlNode(self
, NODE_ARRAY
, new_i
)
1124 new_i
= self
.proto_node
.parse(new_i
)
1126 self
.children
.remove(self
.proto_node
)
1128 # print(self.proto_node)
1130 new_i
+= 1 # skip past the {
1132 else: # If we're a proto instance, add the proto node as our child.
1133 spec
= self
.getSpec()
1135 self
.children
.append(proto_dict
[spec
])
1150 return len(lines
) - 1
1153 # print('\tDEBUG:', i, self.node_type, l)
1159 if self
.node_type
!= NODE_NORMAL
: # also ends proto nodes, we may want a type for these too.
1160 print('wrong node ending, expected an } ' + str(i
) + ' ' + str(self
.node_type
))
1163 ### print("returning", i)
1166 if self
.node_type
!= NODE_ARRAY
:
1167 print('wrong node ending, expected a ] ' + str(i
) + ' ' + str(self
.node_type
))
1170 ### print("returning", i)
1173 node_type
, new_i
= is_nodeline(i
, [])
1174 if node_type
: # check text\n{
1175 child
= vrmlNode(self
, node_type
, i
)
1178 elif l
== '[': # some files have these anonymous lists
1179 child
= vrmlNode(self
, NODE_ARRAY
, i
)
1183 l_split
= l
.split(',')
1186 # See if each item is a float?
1188 for num_type
in (int, float):
1190 values
= [num_type(v
) for v
in l_split
]
1196 values
= [[num_type(v
) for v
in segment
.split()] for segment
in l_split
]
1201 if values
is None: # dont parse
1204 # This should not extend over multiple lines however it is possible
1205 # print(self.array_data)
1207 self
.array_data
.extend(values
)
1211 if len(words
) > 2 and words
[1] == 'USE':
1212 vrmlNode(self
, NODE_REFERENCE
, i
)
1215 # print("FIELD", i, l)
1218 ### print('\t\ttag', i)
1220 # print(words, i, l)
1223 # javastrips can exist as values.
1224 quote_count
= l
.count('"')
1225 if quote_count
% 2: # odd number?
1226 # print('MULTILINE')
1230 quote_count
= l
.count('"')
1231 if quote_count
% 2: # odd number?
1232 value
+= '\n' + l
[:l
.rfind('"')]
1237 # use shlex so we get '"a b" "b v"' --> '"a b"', '"b v"'
1238 value_all
= shlex
.split(value
, posix
=False)
1240 for value
in vrml_split_fields(value_all
):
1243 if value
[0] == 'field':
1244 # field SFFloat creaseAngle 4
1245 self
.proto_field_defs
.append(value
)
1247 self
.fields
.append(value
)
1250 # This is a prerequisite for DEF/USE-based material caching
1251 def canHaveReferences(self
):
1252 return self
.node_type
== NODE_NORMAL
and self
.getDefName()
1254 # This is a prerequisite for raw XML-based material caching. For now, only for X3D
1264 data
= gzip
.open(path
, 'r').read()
1270 filehandle
= open(path
, 'rU', encoding
='utf-8', errors
='surrogateescape')
1271 data
= filehandle
.read()
1275 traceback
.print_exc()
1277 data
= data
.decode(encoding
='utf-8', errors
='surrogateescape')
1282 def vrml_parse(path
):
1284 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1285 Return root (vrmlNode, '') or (None, 'Error String')
1287 data
= gzipOpen(path
)
1290 return None, 'Failed to open file: ' + path
1293 lines
[:] = vrmlFormat(data
)
1295 lines
.insert(0, '{')
1296 lines
.insert(0, 'dymmy_node')
1298 # Use for testing our parsed output, so we can check on line numbers.
1301 ff = open('/tmp/test.txt', 'w')
1302 ff.writelines([l+'\n' for l in lines])
1307 node_type
, new_i
= is_nodeline(0, [])
1309 return None, 'Error: VRML file has no starting Node'
1311 # Trick to make sure we get all root nodes.
1312 lines
.insert(0, '{')
1313 lines
.insert(0, 'root_node____') # important the name starts with an ascii char
1316 root
= vrmlNode(None, NODE_NORMAL
, -1)
1317 root
.setRoot(path
) # we need to set the root so we have a namespace and know the path in case of inlineing
1322 # This prints a load of text
1329 # ====================== END VRML
1331 # ====================== X3d Support
1333 # Sane as vrml but replace the parser
1334 class x3dNode(vrmlNode
):
1335 def __init__(self
, parent
, node_type
, x3dNode
):
1336 vrmlNode
.__init
__(self
, parent
, node_type
, -1)
1337 self
.x3dNode
= x3dNode
1339 def parse(self
, IS_PROTO_DATA
=False):
1340 # print(self.x3dNode.tagName)
1341 self
.lineno
= self
.x3dNode
.parse_position
[0]
1343 define
= self
.x3dNode
.getAttributeNode('DEF')
1345 self
.getDefDict()[define
.value
] = self
1347 use
= self
.x3dNode
.getAttributeNode('USE')
1350 self
.reference
= self
.getDefDict()[use
.value
]
1351 self
.node_type
= NODE_REFERENCE
1353 print('\tWarning: reference', use
.value
, 'not found')
1354 self
.parent
.children
.remove(self
)
1358 for x3dChildNode
in self
.x3dNode
.childNodes
:
1359 if x3dChildNode
.nodeType
in {x3dChildNode
.TEXT_NODE
, x3dChildNode
.COMMENT_NODE
, x3dChildNode
.CDATA_SECTION_NODE
}:
1362 node_type
= NODE_NORMAL
1363 # print(x3dChildNode, dir(x3dChildNode))
1364 if x3dChildNode
.getAttributeNode('USE'):
1365 node_type
= NODE_REFERENCE
1367 child
= x3dNode(self
, node_type
, x3dChildNode
)
1373 return self
.x3dNode
.tagName
# should match vrml spec
1375 # Used to retain object identifiers from X3D to Blender
1376 def getDefName(self
):
1377 node_id
= self
.x3dNode
.getAttributeNode('DEF')
1379 return node_id
.value
1380 node_id
= self
.x3dNode
.getAttributeNode('USE')
1382 return "USE_" + node_id
.value
1385 # Other funcs operate from vrml, but this means we can wrap XML fields, still use nice utility funcs
1386 # getFieldAsArray getFieldAsBool etc
1387 def getFieldName(self
, field
, ancestry
, AS_CHILD
=False, SPLIT_COMMAS
=False):
1388 # ancestry and AS_CHILD are ignored, only used for VRML now
1390 self_real
= self
.getRealNode() # in case we're an instance
1391 field_xml
= self
.x3dNode
.getAttributeNode(field
)
1393 value
= field_xml
.value
1395 # We may want to edit. for x3d specific stuff
1396 # Sucks a bit to return the field name in the list but vrml excepts this :/
1398 value
= value
.replace(",", " ")
1399 return value
.split()
1403 def canHaveReferences(self
):
1404 return self
.x3dNode
.getAttributeNode('DEF')
1407 return self
.getRealNode().x3dNode
.toxml()
1410 def x3d_parse(path
):
1412 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1413 Return root (x3dNode, '') or (None, 'Error String')
1415 import xml
.dom
.minidom
1417 from xml
.sax
import handler
1420 try: doc = xml.dom.minidom.parse(path)
1421 except: return None, 'Could not parse this X3D file, XML error'
1424 # Could add a try/except here, but a console error is more useful.
1425 data
= gzipOpen(path
)
1428 return None, 'Failed to open file: ' + path
1430 # Enable line number reporting in the parser - kinda brittle
1431 def set_content_handler(dom_handler
):
1432 def startElementNS(name
, tagName
, attrs
):
1433 orig_start_cb(name
, tagName
, attrs
)
1434 cur_elem
= dom_handler
.elementStack
[-1]
1435 cur_elem
.parse_position
= (parser
._parser
.CurrentLineNumber
, parser
._parser
.CurrentColumnNumber
)
1437 orig_start_cb
= dom_handler
.startElementNS
1438 dom_handler
.startElementNS
= startElementNS
1439 orig_set_content_handler(dom_handler
)
1441 parser
= xml
.sax
.make_parser()
1442 orig_set_content_handler
= parser
.setContentHandler
1443 parser
.setFeature(handler
.feature_external_ges
, False)
1444 parser
.setFeature(handler
.feature_external_pes
, False)
1445 parser
.setContentHandler
= set_content_handler
1447 doc
= xml
.dom
.minidom
.parseString(data
, parser
)
1450 x3dnode
= doc
.getElementsByTagName('X3D')[0]
1452 return None, 'Not a valid x3d document, cannot import'
1454 bpy
.ops
.object.select_all(action
='DESELECT')
1456 root
= x3dNode(None, NODE_NORMAL
, x3dnode
)
1457 root
.setRoot(path
) # so images and Inline's we load have a relative path
1462 ## f = open('/_Cylinder.wrl', 'r')
1463 # f = open('/fe/wrl/Vrml/EGS/TOUCHSN.WRL', 'r')
1464 # vrml_parse('/fe/wrl/Vrml/EGS/TOUCHSN.WRL')
1465 #vrml_parse('/fe/wrl/Vrml/EGS/SCRIPT.WRL')
1468 files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
1471 for i, f in enumerate(files):
1480 # NO BLENDER CODE ABOVE THIS LINE.
1481 # -----------------------------------------------------------------------------------
1483 from bpy_extras
import image_utils
1484 from mathutils
import Vector
, Matrix
, Quaternion
1486 GLOBALS
= {'CIRCLE_DETAIL': 16}
1489 def translateRotation(rot
):
1491 return Matrix
.Rotation(rot
[3], 4, Vector(rot
[:3]))
1494 def translateScale(sca
):
1495 mat
= Matrix() # 4x4 default
1502 def translateTransform(node
, ancestry
):
1503 cent
= node
.getFieldAsFloatTuple('center', None, ancestry
) # (0.0, 0.0, 0.0)
1504 rot
= node
.getFieldAsFloatTuple('rotation', None, ancestry
) # (0.0, 0.0, 1.0, 0.0)
1505 sca
= node
.getFieldAsFloatTuple('scale', None, ancestry
) # (1.0, 1.0, 1.0)
1506 scaori
= node
.getFieldAsFloatTuple('scaleOrientation', None, ancestry
) # (0.0, 0.0, 1.0, 0.0)
1507 tx
= node
.getFieldAsFloatTuple('translation', None, ancestry
) # (0.0, 0.0, 0.0)
1510 cent_mat
= Matrix
.Translation(cent
)
1511 cent_imat
= cent_mat
.inverted()
1513 cent_mat
= cent_imat
= None
1516 rot_mat
= translateRotation(rot
)
1521 sca_mat
= translateScale(sca
)
1526 scaori_mat
= translateRotation(scaori
)
1527 scaori_imat
= scaori_mat
.inverted()
1529 scaori_mat
= scaori_imat
= None
1532 tx_mat
= Matrix
.Translation(tx
)
1538 mats
= [tx_mat
, cent_mat
, rot_mat
, scaori_mat
, sca_mat
, scaori_imat
, cent_imat
]
1541 new_mat
= new_mat
@ mtx
1546 def translateTexTransform(node
, ancestry
):
1547 cent
= node
.getFieldAsFloatTuple('center', None, ancestry
) # (0.0, 0.0)
1548 rot
= node
.getFieldAsFloat('rotation', None, ancestry
) # 0.0
1549 sca
= node
.getFieldAsFloatTuple('scale', None, ancestry
) # (1.0, 1.0)
1550 tx
= node
.getFieldAsFloatTuple('translation', None, ancestry
) # (0.0, 0.0)
1553 # cent is at a corner by default
1554 cent_mat
= Matrix
.Translation(Vector(cent
).to_3d())
1555 cent_imat
= cent_mat
.inverted()
1557 cent_mat
= cent_imat
= None
1560 rot_mat
= Matrix
.Rotation(rot
, 4, 'Z') # translateRotation(rot)
1565 sca_mat
= translateScale((sca
[0], sca
[1], 0.0))
1570 tx_mat
= Matrix
.Translation(Vector(tx
).to_3d())
1576 # as specified in VRML97 docs
1577 mats
= [cent_imat
, sca_mat
, rot_mat
, cent_mat
, tx_mat
]
1581 new_mat
= new_mat
@ mtx
1585 def getFinalMatrix(node
, mtx
, ancestry
, global_matrix
):
1587 transform_nodes
= [node_tx
for node_tx
in ancestry
if node_tx
.getSpec() == 'Transform']
1588 if node
.getSpec() == 'Transform':
1589 transform_nodes
.append(node
)
1590 transform_nodes
.reverse()
1595 for node_tx
in transform_nodes
:
1596 mat
= translateTransform(node_tx
, ancestry
)
1600 mtx
= global_matrix
@ mtx
1605 # -----------------------------------------------------------------------------------
1606 # Mesh import utilities
1608 # Assumes that the mesh has polygons.
1609 def importMesh_ApplyColors(bpymesh
, geom
, ancestry
):
1610 colors
= geom
.getChildBySpec(['ColorRGBA', 'Color'])
1612 if colors
.getSpec() == 'ColorRGBA':
1613 rgb
= colors
.getFieldAsArray('color', 4, ancestry
)
1615 # Array of arrays; no need to flatten
1616 rgb
= [c
+ [1.0] for c
in colors
.getFieldAsArray('color', 3, ancestry
)]
1617 lcol_layer
= bpymesh
.vertex_colors
.new()
1619 if len(rgb
) == len(bpymesh
.vertices
):
1620 rgb
= [rgb
[l
.vertex_index
] for l
in bpymesh
.loops
]
1621 rgb
= tuple(chain(*rgb
))
1622 elif len(rgb
) == len(bpymesh
.loops
):
1623 rgb
= tuple(chain(*rgb
))
1626 "WARNING not applying vertex colors, non matching numbers of vertices or loops (%d vs %d/%d)" %
1627 (len(rgb
), len(bpymesh
.vertices
), len(bpymesh
.loops
))
1631 lcol_layer
.data
.foreach_set("color", rgb
)
1634 # Assumes that the vertices have not been rearranged compared to the
1635 # source file order # or in the order assumed by the spec (e. g. in
1636 # Elevation, in rows by x).
1637 # Assumes polygons have been set.
1638 def importMesh_ApplyNormals(bpymesh
, geom
, ancestry
):
1639 normals
= geom
.getChildBySpec('Normal')
1643 per_vertex
= geom
.getFieldAsBool('normalPerVertex', True, ancestry
)
1644 vectors
= normals
.getFieldAsArray('vector', 0, ancestry
)
1646 bpymesh
.vertices
.foreach_set("normal", vectors
)
1648 bpymesh
.polygons
.foreach_set("normal", vectors
)
1651 # Reads the standard Coordinate object - common for all mesh elements
1652 # Feeds the vertices in the mesh.
1653 # Rearranging the vertex order is a bad idea - other elements
1654 # in X3D might rely on it, if you need to rearrange, please play with
1655 # vertex indices in the polygons instead.
1657 # Vertex culling that we have in IndexedFaceSet is an unfortunate exception,
1658 # brought forth by a very specific issue.
1659 def importMesh_ReadVertices(bpymesh
, geom
, ancestry
):
1660 # We want points here as a flat array, but the caching logic in
1661 # IndexedFaceSet presumes a 2D one.
1662 # The case for caching is stronger over there.
1663 coord
= geom
.getChildBySpec('Coordinate')
1664 points
= coord
.getFieldAsArray('point', 0, ancestry
)
1665 bpymesh
.vertices
.add(len(points
) // 3)
1666 bpymesh
.vertices
.foreach_set("co", points
)
1669 # Assumes that the order of vertices matches the source file.
1670 # Relies upon texture coordinates in the X3D node; if a coordinate generation
1671 # algorithm for a geometry is in the spec (e. g. for ElevationGrid), it needs
1672 # to be implemented by the geometry handler.
1674 # Texture transform is applied in ProcessObject.
1675 def importMesh_ApplyUVs(bpymesh
, geom
, ancestry
):
1676 tex_coord
= geom
.getChildBySpec('TextureCoordinate')
1680 uvs
= tex_coord
.getFieldAsArray('point', 2, ancestry
)
1684 d
= bpymesh
.uv_layers
.new().data
1685 uvs
= [i
for poly
in bpymesh
.polygons
1686 for vidx
in poly
.vertices
1688 d
.foreach_set('uv', uvs
)
1691 # Common steps for all triangle meshes once the geometry has been set:
1692 # normals, vertex colors, and UVs.
1693 def importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
):
1694 importMesh_ApplyNormals(bpymesh
, geom
, ancestry
)
1695 importMesh_ApplyColors(bpymesh
, geom
, ancestry
)
1696 importMesh_ApplyUVs(bpymesh
, geom
, ancestry
)
1702 # Assumes that the mesh is stored as polygons and loops, and the premade array
1703 # of texture coordinates follows the loop array.
1704 # The loops array must be flat.
1705 def importMesh_ApplyTextureToLoops(bpymesh
, loops
):
1706 d
= bpymesh
.uv_layers
.new().data
1707 d
.foreach_set('uv', loops
)
1711 return r
if ccw
else r
[::-1]
1713 # -----------------------------------------------------------------------------------
1714 # Now specific geometry importers
1717 def importMesh_IndexedTriangleSet(geom
, ancestry
):
1719 # colorPerVertex is always true
1720 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
1722 bpymesh
= bpy
.data
.meshes
.new(name
="XXX")
1723 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1726 index
= geom
.getFieldAsArray('index', 0, ancestry
)
1727 num_polys
= len(index
) // 3
1729 index
= [index
[3 * i
+ j
] for i
in range(num_polys
) for j
in (1, 0, 2)]
1731 bpymesh
.loops
.add(num_polys
* 3)
1732 bpymesh
.polygons
.add(num_polys
)
1733 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1734 bpymesh
.polygons
.foreach_set("loop_total", (3,) * num_polys
)
1735 bpymesh
.polygons
.foreach_set("vertices", index
)
1737 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1740 def importMesh_IndexedTriangleStripSet(geom
, ancestry
):
1742 # colorPerVertex is always true
1743 cw
= 0 if geom
.getFieldAsBool('ccw', True, ancestry
) else 1
1744 bpymesh
= bpy
.data
.meshes
.new(name
="IndexedTriangleStripSet")
1745 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1748 index
= geom
.getFieldAsArray('index', 0, ancestry
)
1749 while index
[-1] == -1:
1751 ngaps
= sum(1 for i
in index
if i
== -1)
1752 num_polys
= len(index
) - 2 - 3 * ngaps
1753 bpymesh
.loops
.add(num_polys
* 3)
1754 bpymesh
.polygons
.add(num_polys
)
1755 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1756 bpymesh
.polygons
.foreach_set("loop_total", (3,) * num_polys
)
1762 yield index
[i
+ odd
]
1763 yield index
[i
+ 1 - odd
]
1767 if i
+ 2 >= len(index
):
1769 if index
[i
+ 2] == -1:
1772 bpymesh
.polygons
.foreach_set("vertices", [f
for f
in triangles()])
1773 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1776 def importMesh_IndexedTriangleFanSet(geom
, ancestry
):
1778 # colorPerVertex is always true
1779 cw
= 0 if geom
.getFieldAsBool('ccw', True, ancestry
) else 1
1780 bpymesh
= bpy
.data
.meshes
.new(name
="IndexedTriangleFanSet")
1781 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1784 index
= geom
.getFieldAsArray('index', 0, ancestry
)
1785 while index
[-1] == -1:
1787 ngaps
= sum(1 for i
in index
if i
== -1)
1788 num_polys
= len(index
) - 2 - 3 * ngaps
1789 bpymesh
.loops
.add(num_polys
* 3)
1790 bpymesh
.polygons
.add(num_polys
)
1791 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1792 bpymesh
.polygons
.foreach_set("loop_total", (3,) * num_polys
)
1799 yield index
[i
+ j
+ cw
]
1800 yield index
[i
+ j
+ 1 - cw
]
1802 if i
+ j
+ 1 >= len(index
):
1804 if index
[i
+ j
+ 1] == -1:
1807 bpymesh
.polygons
.foreach_set("vertices", [f
for f
in triangles()])
1808 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1811 def importMesh_TriangleSet(geom
, ancestry
):
1813 # colorPerVertex is always true
1814 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
1815 bpymesh
= bpy
.data
.meshes
.new(name
="TriangleSet")
1816 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1817 n
= len(bpymesh
.vertices
)
1819 bpymesh
.loops
.add(num_polys
* 3)
1820 bpymesh
.polygons
.add(num_polys
)
1821 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1822 bpymesh
.polygons
.foreach_set("loop_total", (3,) * num_polys
)
1825 fv
= [i
for i
in range(n
)]
1827 fv
= [3 * i
+ j
for i
in range(n
// 3) for j
in (1, 0, 2)]
1828 bpymesh
.polygons
.foreach_set("vertices", fv
)
1830 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1833 def importMesh_TriangleStripSet(geom
, ancestry
):
1835 # colorPerVertex is always true
1836 cw
= 0 if geom
.getFieldAsBool('ccw', True, ancestry
) else 1
1837 bpymesh
= bpy
.data
.meshes
.new(name
="TriangleStripSet")
1838 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1839 counts
= geom
.getFieldAsArray('stripCount', 0, ancestry
)
1840 num_polys
= sum([n
- 2 for n
in counts
])
1841 bpymesh
.loops
.add(num_polys
* 3)
1842 bpymesh
.polygons
.add(num_polys
)
1843 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1844 bpymesh
.polygons
.foreach_set("loop_total", (3,) * num_polys
)
1848 for i
in range(0, len(counts
)):
1849 for j
in range(0, counts
[i
] - 2):
1850 yield b
+ j
+ (j
+ cw
) % 2
1851 yield b
+ j
+ 1 - (j
+ cw
) % 2
1854 bpymesh
.polygons
.foreach_set("vertices", [x
for x
in triangles()])
1856 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1859 def importMesh_TriangleFanSet(geom
, ancestry
):
1861 # colorPerVertex is always true
1862 cw
= 0 if geom
.getFieldAsBool('ccw', True, ancestry
) else 1
1863 bpymesh
= bpy
.data
.meshes
.new(name
="TriangleStripSet")
1864 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1865 counts
= geom
.getFieldAsArray('fanCount', 0, ancestry
)
1866 num_polys
= sum([n
- 2 for n
in counts
])
1867 bpymesh
.loops
.add(num_polys
* 3)
1868 bpymesh
.polygons
.add(num_polys
)
1869 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1870 bpymesh
.polygons
.foreach_set("loop_total", (3,) * num_polys
)
1874 for i
in range(0, len(counts
)):
1875 for j
in range(1, counts
[i
] - 1):
1878 yield b
+ j
+ 1 - cw
1880 bpymesh
.polygons
.foreach_set("vertices", [x
for x
in triangles()])
1881 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1884 def importMesh_IndexedFaceSet(geom
, ancestry
):
1885 # Saw the following structure in X3Ds: the first mesh has a huge set
1886 # of vertices and a reasonably sized index. The rest of the meshes
1887 # reference the Coordinate node from the first one, and have their
1888 # own reasonably sized indices.
1890 # In Blender, to the best of my knowledge, there's no way to reuse
1891 # the vertex set between meshes. So we have culling logic instead -
1892 # for each mesh, only leave vertices that are used for faces.
1894 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
1895 coord
= geom
.getChildBySpec('Coordinate')
1897 points
= coord
.getRealNode().parsed
1898 # We need unflattened coord array here, while
1899 # importMesh_ReadVertices uses flattened. Can't cache both :(
1900 # TODO: resolve that somehow, so that vertex set can be effectively
1901 # reused between different mesh types?
1903 points
= coord
.getFieldAsArray('point', 3, ancestry
)
1904 if coord
.canHaveReferences():
1905 coord
.parsed
= points
1906 index
= geom
.getFieldAsArray('coordIndex', 0, ancestry
)
1908 while index
and index
[-1] == -1:
1911 if len(points
) >= 2 * len(index
): # Need to cull
1913 cull
= {} # Maps old vertex indices to new ones
1914 uncull
= [] # Maps new indices to the old ones
1917 uncull
= cull
= None
1921 # Generate faces. Cull the vertices if necessary,
1925 faces
.append(flip(face
, ccw
))
1928 if cull
is not None:
1930 culled_points
.append(points
[i
])
1939 faces
.append(flip(face
, ccw
)) # The last face
1942 points
= culled_points
1944 bpymesh
= bpy
.data
.meshes
.new(name
="IndexedFaceSet")
1945 bpymesh
.from_pydata(points
, [], faces
)
1946 # No validation here. It throws off the per-face stuff.
1948 # Similar treatment for normal and color indices
1950 def processPerVertexIndex(ind
):
1952 # Deflatten into an array of arrays by face; the latter might
1953 # need to be flipped
1957 verts_by_face
.append(flip(ind
[i
:i
+ len(f
)], ccw
))
1959 return verts_by_face
1961 return [[uncull
[v
] for v
in f
] for f
in faces
]
1963 return faces
# Reuse coordIndex, as per the spec
1966 normals
= geom
.getChildBySpec('Normal')
1968 per_vertex
= geom
.getFieldAsBool('normalPerVertex', True, ancestry
)
1969 vectors
= normals
.getFieldAsArray('vector', 3, ancestry
)
1970 normal_index
= geom
.getFieldAsArray('normalIndex', 0, ancestry
)
1972 co
= [co
for f
in processPerVertexIndex(normal_index
)
1974 for co
in vectors
[v
]]
1975 bpymesh
.vertices
.foreach_set("normal", co
)
1977 co
= [co
for (i
, f
) in enumerate(faces
)
1979 for co
in vectors
[normal_index
[i
] if normal_index
else i
]]
1980 bpymesh
.polygons
.foreach_set("normal", co
)
1982 # Apply vertex/face colors
1983 colors
= geom
.getChildBySpec(['ColorRGBA', 'Color'])
1985 if colors
.getSpec() == 'ColorRGBA':
1986 rgb
= colors
.getFieldAsArray('color', 4, ancestry
)
1988 # Array of arrays; no need to flatten
1989 rgb
= [c
+ [1.0] for c
in colors
.getFieldAsArray('color', 3, ancestry
)]
1991 color_per_vertex
= geom
.getFieldAsBool('colorPerVertex', True, ancestry
)
1992 color_index
= geom
.getFieldAsArray('colorIndex', 0, ancestry
)
1994 d
= bpymesh
.vertex_colors
.new().data
1995 if color_per_vertex
:
1996 cco
= [cco
for f
in processPerVertexIndex(color_index
)
1999 elif color_index
: # Color per face with index
2000 cco
= [cco
for (i
, f
) in enumerate(faces
)
2002 for cco
in rgb
[color_index
[i
]]]
2003 else: # Color per face without index
2004 cco
= [cco
for (i
, f
) in enumerate(faces
)
2007 d
.foreach_set('color', cco
)
2009 # Texture coordinates (UVs)
2010 tex_coord
= geom
.getChildBySpec('TextureCoordinate')
2012 tex_coord_points
= tex_coord
.getFieldAsArray('point', 2, ancestry
)
2013 tex_index
= geom
.getFieldAsArray('texCoordIndex', 0, ancestry
)
2014 tex_index
= processPerVertexIndex(tex_index
)
2015 loops
= [co
for f
in tex_index
2017 for co
in tex_coord_points
[v
]]
2019 x_min
= x_max
= y_min
= y_max
= z_min
= z_max
= None
2021 # Unused vertices don't participate in size; X3DOM does so
2023 (x
, y
, z
) = points
[v
]
2024 if x_min
is None or x
< x_min
:
2026 if x_max
is None or x
> x_max
:
2028 if y_min
is None or y
< y_min
:
2030 if y_max
is None or y
> y_max
:
2032 if z_min
is None or z
< z_min
:
2034 if z_max
is None or z
> z_max
:
2037 mins
= (x_min
, y_min
, z_min
)
2038 deltas
= (x_max
- x_min
, y_max
- y_min
, z_max
- z_min
)
2040 axes
.sort(key
=lambda a
: (-deltas
[a
], a
))
2041 # Tuple comparison breaks ties
2042 (s_axis
, t_axis
) = axes
[0:2]
2043 s_min
= mins
[s_axis
]
2045 t_min
= mins
[t_axis
]
2048 # Avoid divide by zero T76303.
2054 def generatePointCoords(pt
):
2055 return (pt
[s_axis
] - s_min
) / ds
, (pt
[t_axis
] - t_min
) / dt
2056 loops
= [co
for f
in faces
2058 for co
in generatePointCoords(points
[v
])]
2060 importMesh_ApplyTextureToLoops(bpymesh
, loops
)
2067 def importMesh_ElevationGrid(geom
, ancestry
):
2068 height
= geom
.getFieldAsArray('height', 0, ancestry
)
2069 x_dim
= geom
.getFieldAsInt('xDimension', 0, ancestry
)
2070 x_spacing
= geom
.getFieldAsFloat('xSpacing', 1, ancestry
)
2071 z_dim
= geom
.getFieldAsInt('zDimension', 0, ancestry
)
2072 z_spacing
= geom
.getFieldAsFloat('zSpacing', 1, ancestry
)
2073 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
2075 # The spec assumes a certain ordering of quads; outer loop by z, inner by x
2076 bpymesh
= bpy
.data
.meshes
.new(name
="ElevationGrid")
2077 bpymesh
.vertices
.add(x_dim
* z_dim
)
2078 co
= [w
for x
in range(x_dim
) for z
in range(z_dim
)
2079 for w
in (x
* x_spacing
, height
[x_dim
* z
+ x
], z
* z_spacing
)]
2080 bpymesh
.vertices
.foreach_set("co", co
)
2082 num_polys
= (x_dim
- 1) * (z_dim
- 1)
2083 bpymesh
.loops
.add(num_polys
* 4)
2084 bpymesh
.polygons
.add(num_polys
)
2085 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 4, 4))
2086 bpymesh
.polygons
.foreach_set("loop_total", (4,) * num_polys
)
2087 # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
2088 # For quad tessfaces, it was important that the final vertex index was not 0
2089 # (Blender treated it as a triangle then).
2090 # So simply reversing the face was not an option.
2091 # With bmesh polygons, this has no importance anymore, but keep existing code for now.
2092 verts
= [i
for x
in range(x_dim
- 1) for z
in range(z_dim
- 1)
2093 for i
in (z
* x_dim
+ x
,
2094 z
* x_dim
+ x
+ 1 if ccw
else (z
+ 1) * x_dim
+ x
,
2095 (z
+ 1) * x_dim
+ x
+ 1,
2096 (z
+ 1) * x_dim
+ x
if ccw
else z
* x_dim
+ x
+ 1)]
2097 bpymesh
.polygons
.foreach_set("vertices", verts
)
2099 importMesh_ApplyNormals(bpymesh
, geom
, ancestry
)
2100 # ApplyColors won't work here; faces are quads, and also per-face
2101 # coloring should be supported
2102 colors
= geom
.getChildBySpec(['ColorRGBA', 'Color'])
2104 if colors
.getSpec() == 'ColorRGBA':
2106 in colors
.getFieldAsArray('color', 4, ancestry
)]
2107 # Array of arrays; no need to flatten
2109 rgb
= colors
.getFieldAsArray('color', 3, ancestry
)
2111 tc
= bpymesh
.vertex_colors
.new().data
2112 if geom
.getFieldAsBool('colorPerVertex', True, ancestry
):
2113 # Per-vertex coloring
2114 # Note the 2/4 flip here
2115 tc
.foreach_set("color",
2116 [c
for x
in range(x_dim
- 1)
2117 for z
in range(z_dim
- 1)
2118 for rgb_idx
in (z
* x_dim
+ x
,
2119 z
* x_dim
+ x
+ 1 if ccw
else (z
+ 1) * x_dim
+ x
,
2120 (z
+ 1) * x_dim
+ x
+ 1,
2121 (z
+ 1) * x_dim
+ x
if ccw
else z
* x_dim
+ x
+ 1)
2122 for c
in rgb
[rgb_idx
]])
2123 else: # Coloring per face
2124 tc
.foreach_set("color",
2125 [c
for x
in range(x_dim
- 1)
2126 for z
in range(z_dim
- 1)
2127 for rgb_idx
in (z
* (x_dim
- 1) + x
,) * 4
2128 for c
in rgb
[rgb_idx
]])
2130 # Textures also need special treatment; it's all quads,
2131 # and there's a builtin algorithm for coordinate generation
2132 tex_coord
= geom
.getChildBySpec('TextureCoordinate')
2134 uvs
= tex_coord
.getFieldAsArray('point', 2, ancestry
)
2136 uvs
= [(i
/ (x_dim
- 1), j
/ (z_dim
- 1))
2137 for i
in range(x_dim
)
2138 for j
in range(z_dim
)]
2140 d
= bpymesh
.uv_layers
.new().data
2141 # Rather than repeat the face/vertex algorithm from above, we read
2142 # the vertex index back from polygon. Might be suboptimal.
2143 uvs
= [i
for poly
in bpymesh
.polygons
2144 for vidx
in poly
.vertices
2146 d
.foreach_set('uv', uv
)
2153 def importMesh_Extrusion(geom
, ancestry
):
2154 # Interestingly, the spec doesn't allow for vertex/face colors in this
2155 # element, nor for normals.
2156 # Since coloring and normals are not supported here, and also large
2157 # polygons for caps might be required, we shall use from_pydata().
2159 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
2160 begin_cap
= geom
.getFieldAsBool('beginCap', True, ancestry
)
2161 end_cap
= geom
.getFieldAsBool('endCap', True, ancestry
)
2162 cross
= geom
.getFieldAsArray('crossSection', 2, ancestry
)
2164 cross
= ((1, 1), (1, -1), (-1, -1), (-1, 1), (1, 1))
2165 spine
= geom
.getFieldAsArray('spine', 3, ancestry
)
2167 spine
= ((0, 0, 0), (0, 1, 0))
2168 orient
= geom
.getFieldAsArray('orientation', 4, ancestry
)
2170 orient
= [Quaternion(o
[:3], o
[3]).to_matrix()
2171 if o
[3] else None for o
in orient
]
2172 scale
= geom
.getFieldAsArray('scale', 2, ancestry
)
2174 scale
= [Matrix(((s
[0], 0, 0), (0, 1, 0), (0, 0, s
[1])))
2175 if s
[0] != 1 or s
[1] != 1 else None for s
in scale
]
2177 # Special treatment for the closed spine and cross section.
2178 # Let's save some memory by not creating identical but distinct vertices;
2179 # later we'll introduce conditional logic to link the last vertex with
2180 # the first one where necessary.
2181 cross_closed
= cross
[0] == cross
[-1]
2185 cross
= [Vector((c
[0], 0, c
[1])) for c
in cross
]
2186 ncf
= nc
if cross_closed
else nc
- 1
2187 # Face count along the cross; for closed cross, it's the same as the
2188 # respective vertex count
2190 spine_closed
= spine
[0] == spine
[-1]
2194 spine
= [Vector(s
) for s
in spine
]
2195 nsf
= ns
if spine_closed
else ns
- 1
2197 # This will be used for fallback, where the current spine point joins
2198 # two collinear spine segments. No need to recheck the case of the
2199 # closed spine/last-to-first point juncture; if there's an angle there,
2200 # it would kick in on the first iteration of the main loop by spine.
2201 def findFirstAngleNormal():
2202 for i
in range(1, ns
- 1):
2204 z
= (spine
[i
+ 1] - spt
).cross(spine
[i
- 1] - spt
)
2205 if z
.length
> EPSILON
:
2207 # All the spines are collinear. Fallback to the rotated source
2209 # TODO: handle the situation where the first two spine points match
2210 v
= spine
[1] - spine
[0]
2211 orig_y
= Vector((0, 1, 0))
2212 orig_z
= Vector((0, 0, 1))
2213 if v
.cross(orig_y
).length
>= EPSILON
:
2214 # Spine at angle with global y - rotate the z accordingly
2215 orig_z
.rotate(orig_y
.rotation_difference(v
))
2220 for i
, spt
in enumerate(spine
):
2221 if (i
> 0 and i
< ns
- 1) or spine_closed
:
2222 snext
= spine
[(i
+ 1) % ns
]
2223 sprev
= spine
[(i
- 1 + ns
) % ns
]
2227 try_z
= vnext
.cross(vprev
)
2228 # Might be zero, then all kinds of fallback
2229 if try_z
.length
> EPSILON
:
2230 if z
is not None and try_z
.dot(z
) < 0:
2233 elif not z
: # No z, and no previous z.
2234 # Look ahead, see if there's at least one point where
2235 # spines are not collinear.
2236 z
= findFirstAngleNormal()
2237 elif i
== 0: # And non-crossed
2238 snext
= spine
[i
+ 1]
2240 z
= findFirstAngleNormal()
2241 else: # last point and not crossed
2242 sprev
= spine
[i
- 1]
2244 # If there's more than one point in the spine, z is already set.
2245 # One point in the spline is an error anyway.
2248 m
= Matrix(((x
.x
, y
.x
, z
.x
), (x
.y
, y
.y
, z
.y
), (x
.z
, y
.z
, z
.z
)))
2249 # Columns are the unit vectors for the xz plane for the cross-section
2252 mrot
= orient
[i
] if len(orient
) > 1 else orient
[0]
2254 m
@= mrot
# Not sure about this. Counterexample???
2256 mscale
= scale
[i
] if len(scale
) > 1 else scale
[0]
2259 # First the cross-section 2-vector is scaled,
2260 # then applied to the xz plane unit vectors
2262 verts
.append((spt
+ m
@ cpt
).to_tuple())
2263 # Could've done this with a single 4x4 matrix... Oh well
2265 # The method from_pydata() treats correctly quads with final vertex
2267 # So we just flip the vertices if ccw is off.
2271 faces
.append(flip([x
for x
in range(nc
- 1, -1, -1)], ccw
))
2273 # Order of edges in the face: forward along cross, forward along spine,
2274 # backward along cross, backward along spine, flipped if now ccw.
2275 # This order is assumed later in the texture coordinate assignment;
2276 # please don't change without syncing.
2280 s
* nc
+ (c
+ 1) % nc
,
2281 (s
+ 1) * nc
+ (c
+ 1) % nc
,
2282 (s
+ 1) * nc
+ c
), ccw
) for s
in range(ns
- 1) for c
in range(ncf
)]
2285 # The faces between the last and the first spine points
2291 c
), ccw
) for c
in range(ncf
)]
2294 faces
.append(flip([(ns
- 1) * nc
+ x
for x
in range(0, nc
)], ccw
))
2296 bpymesh
= bpy
.data
.meshes
.new(name
="Extrusion")
2297 bpymesh
.from_pydata(verts
, [], faces
)
2299 # The way we deal with textures in triangular meshes doesn't apply.
2300 # The structure of the loop array goes: cap, side, cap
2301 if begin_cap
or end_cap
: # Need dimensions
2302 x_min
= x_max
= z_min
= z_max
= None
2305 if x_min
is None or x
< x_min
:
2307 if x_max
is None or x
> x_max
:
2309 if z_min
is None or z
< z_min
:
2311 if z_max
is None or z
> z_max
:
2315 cap_scale
= dz
if dz
> dx
else dx
2317 # Takes an index in the cross array, returns scaled
2318 # texture coords for cap texturing purposes
2319 def scaledLoopVertex(i
):
2321 return (c
.x
- x_min
) / cap_scale
, (c
.z
- z_min
) / cap_scale
2323 # X3DOM uses raw cap shape, not a scaled one. So we will, too.
2326 mloops
= bpymesh
.loops
2327 if begin_cap
: # vertex indices match the indices in cross
2328 # Rely on the loops in the mesh; don't repeat the face
2329 # generation logic here
2330 loops
+= [co
for i
in range(nc
)
2331 for co
in scaledLoopVertex(mloops
[i
].vertex_index
)]
2334 # Same order of vertices as in face generation
2335 # We don't rely on the loops in the mesh; instead,
2336 # we repeat the face generation logic.
2337 loops
+= [co
for s
in range(nsf
)
2339 for v
in flip(((c
/ ncf
, s
/ nsf
),
2340 ((c
+ 1) / ncf
, s
/ nsf
),
2341 ((c
+ 1) / ncf
, (s
+ 1) / nsf
),
2342 (c
/ ncf
, (s
+ 1) / nsf
)), ccw
) for co
in v
]
2345 # Base loop index for end cap
2346 lb
= ncf
* nsf
* 4 + (nc
if begin_cap
else 0)
2347 # Rely on the loops here too.
2348 loops
+= [co
for i
in range(nc
) for co
2349 in scaledLoopVertex(mloops
[lb
+ i
].vertex_index
% nc
)]
2350 importMesh_ApplyTextureToLoops(bpymesh
, loops
)
2357 # -----------------------------------------------------------------------------------
2358 # Line and point sets
2361 def importMesh_LineSet(geom
, ancestry
):
2362 # TODO: line display properties are ignored
2363 # Per-vertex color is ignored
2364 coord
= geom
.getChildBySpec('Coordinate')
2365 src_points
= coord
.getFieldAsArray('point', 3, ancestry
)
2366 # Array of 3; Blender needs arrays of 4
2367 bpycurve
= bpy
.data
.curves
.new("LineSet", 'CURVE')
2368 bpycurve
.dimensions
= '3D'
2369 counts
= geom
.getFieldAsArray('vertexCount', 0, ancestry
)
2372 sp
= bpycurve
.splines
.new('POLY')
2373 sp
.points
.add(n
- 1) # points already has one element
2376 for x
in src_points
[b
:b
+ n
]:
2381 sp
.points
.foreach_set('co', [x
for x
in points()])
2386 def importMesh_IndexedLineSet(geom
, ancestry
):
2388 # coord = geom.getChildByName('coord') # 'Coordinate'
2389 coord
= geom
.getChildBySpec('Coordinate') # works for x3d and vrml
2391 points
= coord
.getFieldAsArray('point', 3, ancestry
)
2396 print('\tWarning: IndexedLineSet had no points')
2399 ils_lines
= geom
.getFieldAsArray('coordIndex', 0, ancestry
)
2404 for il
in ils_lines
:
2409 line
.append(int(il
))
2412 # vcolor = geom.getChildByName('color')
2413 # blender doesn't have per vertex color
2415 bpycurve
= bpy
.data
.curves
.new('IndexedCurve', 'CURVE')
2416 bpycurve
.dimensions
= '3D'
2421 # co = points[line[0]] # UNUSED
2422 nu
= bpycurve
.splines
.new('POLY')
2423 nu
.points
.add(len(line
) - 1) # the new nu has 1 point to begin with
2424 for il
, pt
in zip(line
, nu
.points
):
2425 pt
.co
[0:3] = points
[il
]
2430 def importMesh_PointSet(geom
, ancestry
):
2432 coord
= geom
.getChildBySpec('Coordinate') # works for x3d and vrml
2434 points
= coord
.getFieldAsArray('point', 3, ancestry
)
2438 # vcolor = geom.getChildByName('color')
2439 # blender doesn't have per vertex color
2441 bpymesh
= bpy
.data
.meshes
.new("PointSet")
2442 bpymesh
.vertices
.add(len(points
))
2443 bpymesh
.vertices
.foreach_set("co", [a
for v
in points
for a
in v
])
2445 # No need to validate
2450 # -----------------------------------------------------------------------------------
2452 # SA: they used to use bpy.ops for primitive creation. That was
2453 # unbelievably slow on complex scenes. I rewrote to generate meshes
2457 GLOBALS
['CIRCLE_DETAIL'] = 12
2460 def importMesh_Sphere(geom
, ancestry
):
2462 # Extra field 'subdivision="n m"' attribute, specifying how many
2463 # rings and segments to use (X3DOM).
2464 r
= geom
.getFieldAsFloat('radius', 0.5, ancestry
)
2465 subdiv
= geom
.getFieldAsArray('subdivision', 0, ancestry
)
2467 if len(subdiv
) == 1:
2472 nr
= ns
= GLOBALS
['CIRCLE_DETAIL']
2473 # used as both ring count and segment count
2474 lau
= pi
/ nr
# Unit angle of latitude (rings) for the given tessellation
2475 lou
= 2 * pi
/ ns
# Unit angle of longitude (segments)
2477 bpymesh
= bpy
.data
.meshes
.new(name
="Sphere")
2479 bpymesh
.vertices
.add(ns
* (nr
- 1) + 2)
2480 # The non-polar vertices go from x=0, negative z plane counterclockwise -
2481 # to -x, to +z, to +x, back to -z
2482 co
= [0, r
, 0, 0, -r
, 0] # +y and -y poles
2483 co
+= [r
* coe
for ring
in range(1, nr
) for seg
in range(ns
)
2484 for coe
in (-sin(lou
* seg
) * sin(lau
* ring
),
2486 -cos(lou
* seg
) * sin(lau
* ring
))]
2487 bpymesh
.vertices
.foreach_set('co', co
)
2491 num_quad
= num_poly
- num_tri
2492 num_loop
= num_quad
* 4 + num_tri
* 3
2493 tf
= bpymesh
.polygons
2495 bpymesh
.loops
.add(num_loop
)
2496 bpymesh
.polygons
.foreach_set("loop_start",
2497 tuple(range(0, ns
* 3, 3)) +
2498 tuple(range(ns
* 3, num_loop
- ns
* 3, 4)) +
2499 tuple(range(num_loop
- ns
* 3, num_loop
, 3)))
2500 bpymesh
.polygons
.foreach_set("loop_total", (3,) * ns
+ (4,) * num_quad
+ (3,) * ns
)
2502 vb
= 2 + (nr
- 2) * ns
# First vertex index for the bottom cap
2503 fb
= (nr
- 1) * ns
# First face index for the bottom cap
2505 # Because of tricky structure, assign texture coordinates along with
2506 # face creation. Can't easily do foreach_set, 'cause caps are triangles and
2509 tex
= bpymesh
.uv_layers
.new().data
2511 # Faces go in order: top cap, sides, bottom cap.
2512 # Sides go by ring then by segment.
2515 # Top cap face vertices go in order: down right up
2516 # (starting from +y pole)
2517 # Bottom cap goes: up left down (starting from -y pole)
2518 for seg
in range(ns
):
2519 tf
[seg
].vertices
= (0, seg
+ 2, (seg
+ 1) % ns
+ 2)
2520 tf
[fb
+ seg
].vertices
= (1, vb
+ (seg
+ 1) % ns
, vb
+ seg
)
2521 for lidx
, uv
in zip(tf
[seg
].loop_indices
,
2522 (((seg
+ 0.5) / ns
, 1),
2523 (seg
/ ns
, 1 - 1 / nr
),
2524 ((seg
+ 1) / ns
, 1 - 1 / nr
))):
2526 for lidx
, uv
in zip(tf
[fb
+ seg
].loop_indices
,
2527 (((seg
+ 0.5) / ns
, 0),
2528 ((seg
+ 1) / ns
, 1 / nr
),
2529 (seg
/ ns
, 1 / nr
))):
2533 # Side face vertices go in order: down right up left
2534 for ring
in range(nr
- 2):
2536 # First vertex index for the top edge of the ring
2538 # First vertex index for the bottom edge of the ring
2539 rfb
= ns
* (ring
+ 1)
2540 # First face index for the ring
2541 for seg
in range(ns
):
2542 nseg
= (seg
+ 1) % ns
2543 tf
[rfb
+ seg
].vertices
= (tvb
+ seg
, bvb
+ seg
, bvb
+ nseg
, tvb
+ nseg
)
2544 for lidx
, uv
in zip(tf
[rfb
+ seg
].loop_indices
,
2545 ((seg
/ ns
, 1 - (ring
+ 1) / nr
),
2546 (seg
/ ns
, 1 - (ring
+ 2) / nr
),
2547 ((seg
+ 1) / ns
, 1 - (ring
+ 2) / nr
),
2548 ((seg
+ 1) / ns
, 1 - (ring
+ 1) / nr
))):
2556 def importMesh_Cylinder(geom
, ancestry
):
2558 # no ccw in this element
2559 # Extra parameter subdivision="n" - how many faces to use
2560 radius
= geom
.getFieldAsFloat('radius', 1.0, ancestry
)
2561 height
= geom
.getFieldAsFloat('height', 2, ancestry
)
2562 bottom
= geom
.getFieldAsBool('bottom', True, ancestry
)
2563 side
= geom
.getFieldAsBool('side', True, ancestry
)
2564 top
= geom
.getFieldAsBool('top', True, ancestry
)
2566 n
= geom
.getFieldAsInt('subdivision', GLOBALS
['CIRCLE_DETAIL'], ancestry
)
2569 yvalues
= (height
/ 2, -height
/ 2)
2572 # The seam is at x=0, z=-r, vertices go ccw -
2573 # to pos x, to neg z, to neg x, back to neg z
2574 verts
= [(-radius
* sin(angle
* i
), y
, -radius
* cos(angle
* i
))
2575 for i
in range(n
) for y
in yvalues
]
2578 # Order of edges in side faces: up, left, down, right.
2579 # Texture coordinate logic depends on it.
2580 faces
+= [(i
* 2 + 3, i
* 2 + 2, i
* 2, i
* 2 + 1)
2581 for i
in range(n
- 1)] + [(1, 0, nn
- 2, nn
- 1)]
2583 faces
+= [[x
for x
in range(0, nn
, 2)]]
2585 faces
+= [[x
for x
in range(nn
- 1, -1, -2)]]
2587 bpymesh
= bpy
.data
.meshes
.new(name
="Cylinder")
2588 bpymesh
.from_pydata(verts
, [], faces
)
2589 # Tried constructing the mesh manually from polygons/loops/edges,
2590 # the difference in performance on Blender 2.74 (Win64) is negligible.
2594 # The structure of the loop array goes: cap, side, cap.
2597 loops
+= [co
for i
in range(n
)
2598 for co
in ((i
+ 1) / n
, 0, (i
+ 1) / n
, 1, i
/ n
, 1, i
/ n
, 0)]
2601 loops
+= [0.5 + co
/ 2 for i
in range(n
)
2602 for co
in (-sin(angle
* i
), cos(angle
* i
))]
2605 loops
+= [0.5 - co
/ 2 for i
in range(n
- 1, -1, -1)
2606 for co
in (sin(angle
* i
), cos(angle
* i
))]
2608 importMesh_ApplyTextureToLoops(bpymesh
, loops
)
2614 def importMesh_Cone(geom
, ancestry
):
2616 # Extra parameter subdivision="n" - how many faces to use
2617 n
= geom
.getFieldAsInt('subdivision', GLOBALS
['CIRCLE_DETAIL'], ancestry
)
2618 radius
= geom
.getFieldAsFloat('bottomRadius', 1.0, ancestry
)
2619 height
= geom
.getFieldAsFloat('height', 2, ancestry
)
2620 bottom
= geom
.getFieldAsBool('bottom', True, ancestry
)
2621 side
= geom
.getFieldAsBool('side', True, ancestry
)
2627 verts
+= [(-radius
* sin(angle
* i
),
2629 -radius
* cos(angle
* i
)) for i
in range(n
)]
2632 # Side face vertices go: up down right
2634 faces
+= [(1 + (i
+ 1) % n
, 0, 1 + i
) for i
in range(n
)]
2636 faces
+= [[i
for i
in range(n
, 0, -1)]]
2638 bpymesh
= bpy
.data
.meshes
.new(name
="Cone")
2639 bpymesh
.from_pydata(verts
, [], faces
)
2644 loops
+= [co
for i
in range(n
)
2645 for co
in ((i
+ 1) / n
, 0, (i
+ 0.5) / n
, 1, i
/ n
, 0)]
2647 loops
+= [0.5 - co
/ 2 for i
in range(n
- 1, -1, -1)
2648 for co
in (sin(angle
* i
), cos(angle
* i
))]
2649 importMesh_ApplyTextureToLoops(bpymesh
, loops
)
2655 def importMesh_Box(geom
, ancestry
):
2657 # No ccw in this element
2658 (dx
, dy
, dz
) = geom
.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry
)
2663 bpymesh
= bpy
.data
.meshes
.new(name
="Box")
2664 bpymesh
.vertices
.add(8)
2666 # xz plane at +y, ccw
2667 co
= (dx
, dy
, dz
, -dx
, dy
, dz
, -dx
, dy
, -dz
, dx
, dy
, -dz
,
2669 dx
, -dy
, dz
, -dx
, -dy
, dz
, -dx
, -dy
, -dz
, dx
, -dy
, -dz
)
2670 bpymesh
.vertices
.foreach_set('co', co
)
2672 bpymesh
.loops
.add(6 * 4)
2673 bpymesh
.polygons
.add(6)
2674 bpymesh
.polygons
.foreach_set('loop_start', range(0, 6 * 4, 4))
2675 bpymesh
.polygons
.foreach_set('loop_total', (4,) * 6)
2676 bpymesh
.polygons
.foreach_set('vertices', (
2685 d
= bpymesh
.uv_layers
.new().data
2686 d
.foreach_set('uv', (
2687 1, 0, 0, 0, 0, 1, 1, 1,
2688 0, 0, 0, 1, 1, 1, 1, 0,
2689 0, 0, 0, 1, 1, 1, 1, 0,
2690 0, 0, 0, 1, 1, 1, 1, 0,
2691 0, 0, 0, 1, 1, 1, 1, 0,
2692 1, 0, 0, 0, 0, 1, 1, 1))
2697 # -----------------------------------------------------------------------------------
2698 # Utilities for importShape
2701 # Textures are processed elsewhere.
2702 def appearance_CreateMaterial(vrmlname
, mat
, ancestry
, is_vcol
):
2703 # Given an X3D material, creates a Blender material.
2704 # texture is applied later, in appearance_Create().
2705 # All values between 0.0 and 1.0, defaults from VRML docs.
2706 bpymat
= bpy
.data
.materials
.new(vrmlname
)
2707 return # XXX For now...
2708 bpymat
.ambient
= mat
.getFieldAsFloat('ambientIntensity', 0.2, ancestry
)
2709 diff_color
= mat
.getFieldAsFloatTuple('diffuseColor',
2712 bpymat
.diffuse_color
= diff_color
2714 # NOTE - blender doesn't support emmisive color
2715 # Store in mirror color and approximate with emit.
2716 emit
= mat
.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry
)
2717 bpymat
.mirror_color
= emit
2718 bpymat
.emit
= (emit
[0] + emit
[1] + emit
[2]) / 3.0
2720 shininess
= mat
.getFieldAsFloat('shininess', 0.2, ancestry
)
2721 bpymat
.specular_hardness
= int(1 + (510 * shininess
))
2723 bpymat
.specular_color
= mat
.getFieldAsFloatTuple('specularColor',
2724 [0.0, 0.0, 0.0], ancestry
)
2725 bpymat
.alpha
= 1.0 - mat
.getFieldAsFloat('transparency', 0.0, ancestry
)
2726 if bpymat
.alpha
< 0.999:
2727 bpymat
.use_transparency
= True
2728 if False and is_vcol
:
2729 bpymat
.use_vertex_color_paint
= True
2733 def appearance_CreateDefaultMaterial():
2734 # Just applies the X3D defaults. Used for shapes
2735 # without explicit material definition
2736 # (but possibly with a texture).
2738 bpymat
= bpy
.data
.materials
.new("Material")
2739 return # XXX For now...
2740 bpymat
.ambient
= 0.2
2741 bpymat
.diffuse_color
= [0.8, 0.8, 0.8]
2742 bpymat
.mirror_color
= (0, 0, 0)
2745 bpymat
.specular_hardness
= 103
2747 bpymat
.specular_color
= (0, 0, 0)
2752 def appearance_LoadImageTextureFile(ima_urls
, node
):
2755 dirname
= os
.path
.dirname(node
.getFilename())
2756 bpyima
= image_utils
.load_image(f
, dirname
,
2759 convert_callback
=imageConvertCompat
)
2766 def appearance_LoadImageTexture(imageTexture
, ancestry
, node
):
2767 # TODO: cache loaded textures...
2768 ima_urls
= imageTexture
.getFieldAsString('url', None, ancestry
)
2770 if ima_urls
is None:
2772 ima_urls
= imageTexture
.getFieldAsStringArray('url', ancestry
)
2773 # in some cases we get a list of images.
2777 if '" "' in ima_urls
:
2778 # '"foo" "bar"' --> ['foo', 'bar']
2779 ima_urls
= [w
.strip('"') for w
in ima_urls
.split('" "')]
2781 ima_urls
= [ima_urls
]
2782 # ima_urls is a list or None
2784 if ima_urls
is None:
2785 print("\twarning, image with no URL, this is odd")
2788 bpyima
= appearance_LoadImageTextureFile(ima_urls
, node
)
2791 print("ImportX3D warning: unable to load texture", ima_urls
)
2793 # KNOWN BUG; PNGs with a transparent color are not perceived
2794 # as transparent. Need alpha channel.
2796 if bpyima
.depth
not in {32, 128}:
2797 bpyima
.alpha_mode
= 'NONE'
2801 def appearance_LoadTexture(tex_node
, ancestry
, node
):
2802 # Both USE-based caching and desc-based caching
2803 # Works for bother ImageTextures and PixelTextures
2806 if tex_node
.reference
:
2807 return tex_node
.getRealNode().parsed
2809 # Desc-based caching. It might misfire on multifile models, where the
2810 # same desc means different things in different files.
2811 # TODO: move caches to file level.
2812 desc
= tex_node
.desc()
2813 if desc
and desc
in texture_cache
:
2814 bpyima
= texture_cache
[desc
]
2815 if tex_node
.canHaveReferences():
2816 tex_node
.parsed
= bpyima
2819 # No cached texture, load it.
2820 if tex_node
.getSpec() == 'ImageTexture':
2821 bpyima
= appearance_LoadImageTexture(tex_node
, ancestry
, node
)
2822 else: # PixelTexture
2823 bpyima
= appearance_LoadPixelTexture(tex_node
, ancestry
)
2825 if bpyima
: # Loading can still fail
2826 repeat_s
= tex_node
.getFieldAsBool('repeatS', True, ancestry
)
2827 bpyima
.use_clamp_x
= not repeat_s
2828 repeat_t
= tex_node
.getFieldAsBool('repeatT', True, ancestry
)
2829 bpyima
.use_clamp_y
= not repeat_t
2831 # Update the desc-based cache
2833 texture_cache
[desc
] = bpyima
2835 # Update the USE-based cache
2836 if tex_node
.canHaveReferences():
2837 tex_node
.parsed
= bpyima
2842 def appearance_ExpandCachedMaterial(bpymat
):
2843 if 0 and bpymat
.texture_slots
[0] is not None:
2844 bpyima
= bpymat
.texture_slots
[0].texture
.image
2845 tex_has_alpha
= bpyima
.alpha_mode
not in {'NONE', 'CHANNEL_PACKED'}
2846 return (bpymat
, bpyima
, tex_has_alpha
)
2848 return (bpymat
, None, False)
2851 def appearance_MakeDescCacheKey(material
, tex_node
):
2852 mat_desc
= material
.desc() if material
else "Default"
2853 tex_desc
= tex_node
.desc() if tex_node
else "Default"
2855 if not((tex_node
and tex_desc
is None) or
2856 (material
and mat_desc
is None)):
2857 # desc not available (in VRML)
2858 # TODO: serialize VRML nodes!!!
2859 return (mat_desc
, tex_desc
)
2860 elif not tex_node
and not material
:
2861 # Even for VRML, we cache the null material
2862 return ("Default", "Default")
2864 return None # Desc-based caching is off
2867 def appearance_Create(vrmlname
, material
, tex_node
, ancestry
, node
, is_vcol
):
2868 # Creates a Blender material object from appearance
2870 tex_has_alpha
= False
2873 bpymat
= appearance_CreateMaterial(vrmlname
, material
, ancestry
, is_vcol
)
2875 bpymat
= appearance_CreateDefaultMaterial()
2877 if tex_node
: # Texture caching inside there
2878 bpyima
= appearance_LoadTexture(tex_node
, ancestry
, node
)
2880 if False and is_vcol
:
2881 bpymat
.use_vertex_color_paint
= True
2883 if False and bpyima
:
2884 tex_has_alpha
= bpyima
.alpha_mode
not in {'NONE', 'CHANNEL_PACKED'}
2886 texture
= bpy
.data
.textures
.new(bpyima
.name
, 'IMAGE')
2887 texture
.image
= bpyima
2889 mtex
= bpymat
.texture_slots
.add()
2890 mtex
.texture
= texture
2892 mtex
.texture_coords
= 'UV'
2893 mtex
.use_map_diffuse
= True
2897 bpymat
.use_transparency
= True
2898 mtex
.use_map_alpha
= True
2899 mtex
.alpha_factor
= 0.0
2901 return (bpymat
, bpyima
, tex_has_alpha
)
2904 def importShape_LoadAppearance(vrmlname
, appr
, ancestry
, node
, is_vcol
):
2906 Material creation takes nontrivial time on large models.
2907 So we cache them aggressively.
2908 However, in Blender, texture is a part of material, while in
2909 X3D it's not. Blender's notion of material corresponds to
2910 X3D's notion of appearance.
2912 TextureTransform is not a part of material (at least
2913 not in the current implementation).
2915 USE on an Appearance node and USE on a Material node
2916 call for different approaches.
2918 Tools generate repeating, identical material definitions.
2919 Can't rely on USE alone. Repeating texture definitions
2920 are entirely possible, too.
2922 Vertex coloring is not a part of appearance, but Blender
2923 has a material flag for it. However, if a mesh has no vertex
2924 color layer, setting use_vertex_color_paint to true has no
2925 effect. So it's fine to reuse the same material for meshes
2926 with vertex colors and for ones without.
2927 It's probably an abuse of Blender of some level.
2929 So here's the caching structure:
2930 For USE on appearance, we store the material object
2931 in the appearance node.
2933 For USE on texture, we store the image object in the tex node.
2935 For USE on material with no texture, we store the material object
2936 in the material node.
2938 Also, we store textures by description in texture_cache.
2940 Also, we store materials by (material desc, texture desc)
2943 # First, check entire-appearance cache
2944 if appr
.reference
and appr
.getRealNode().parsed
:
2945 return appearance_ExpandCachedMaterial(appr
.getRealNode().parsed
)
2947 tex_node
= appr
.getChildBySpec(('ImageTexture', 'PixelTexture'))
2948 # Other texture nodes are: MovieTexture, MultiTexture
2949 material
= appr
.getChildBySpec('Material')
2950 # We're ignoring FillProperties, LineProperties, and shaders
2952 # Check the USE-based material cache for textureless materials
2953 if material
and material
.reference
and not tex_node
and material
.getRealNode().parsed
:
2954 return appearance_ExpandCachedMaterial(material
.getRealNode().parsed
)
2956 # Now the description-based caching
2957 cache_key
= appearance_MakeDescCacheKey(material
, tex_node
)
2959 if cache_key
and cache_key
in material_cache
:
2960 bpymat
= material_cache
[cache_key
]
2961 # Still want to make the material available for USE-based reuse
2962 if appr
.canHaveReferences():
2963 appr
.parsed
= bpymat
2964 if material
and material
.canHaveReferences() and not tex_node
:
2965 material
.parsed
= bpymat
2966 return appearance_ExpandCachedMaterial(bpymat
)
2968 # Done checking full-material caches. Texture cache may still kick in.
2969 # Create the material already
2970 (bpymat
, bpyima
, tex_has_alpha
) = appearance_Create(vrmlname
, material
, tex_node
, ancestry
, node
, is_vcol
)
2973 if appr
.canHaveReferences():
2974 appr
.parsed
= bpymat
2977 material_cache
[cache_key
] = bpymat
2979 if material
and material
.canHaveReferences() and not tex_node
:
2980 material
.parsed
= bpymat
2982 return (bpymat
, bpyima
, tex_has_alpha
)
2985 def appearance_LoadPixelTexture(pixelTexture
, ancestry
):
2986 image
= pixelTexture
.getFieldAsArray('image', 0, ancestry
)
2987 (w
, h
, plane_count
) = image
[0:3]
2988 has_alpha
= plane_count
in {2, 4}
2990 if len(pixels
) != w
* h
:
2991 print("ImportX3D warning: pixel count in PixelTexture is off")
2993 bpyima
= bpy
.data
.images
.new("PixelTexture", w
, h
, has_alpha
, True)
2995 bpyima
.alpha_mode
= 'NONE'
2997 # Conditional above the loop, for performance
2998 if plane_count
== 3: # RGB
2999 bpyima
.pixels
= [(cco
& 0xff) / 255 for pixel
in pixels
3000 for cco
in (pixel
>> 16, pixel
>> 8, pixel
, 255)]
3001 elif plane_count
== 4: # RGBA
3002 bpyima
.pixels
= [(cco
& 0xff) / 255 for pixel
in pixels
3004 in (pixel
>> 24, pixel
>> 16, pixel
>> 8, pixel
)]
3005 elif plane_count
== 1: # Intensity - does Blender even support that?
3006 bpyima
.pixels
= [(cco
& 0xff) / 255 for pixel
in pixels
3007 for cco
in (pixel
, pixel
, pixel
, 255)]
3008 elif plane_count
== 2: # Intensity/alpha
3009 bpyima
.pixels
= [(cco
& 0xff) / 255 for pixel
in pixels
3011 in (pixel
>> 8, pixel
>> 8, pixel
>> 8, pixel
)]
3016 # Called from importShape to insert a data object (typically a mesh)
3018 def importShape_ProcessObject(
3019 bpycollection
, vrmlname
, bpydata
, geom
, geom_spec
, node
,
3020 bpymat
, has_alpha
, texmtx
, ancestry
,
3023 vrmlname
+= "_" + geom_spec
3024 bpydata
.name
= vrmlname
3026 if type(bpydata
) == bpy
.types
.Mesh
:
3027 # solid, as understood by the spec, is always true in Blender
3028 # solid=false, we don't support it yet.
3029 creaseAngle
= geom
.getFieldAsFloat('creaseAngle', None, ancestry
)
3030 if creaseAngle
is not None:
3031 bpydata
.auto_smooth_angle
= creaseAngle
3032 bpydata
.use_auto_smooth
= True
3034 # Only ever 1 material per shape
3036 bpydata
.materials
.append(bpymat
)
3038 if bpydata
.uv_layers
:
3039 if has_alpha
and bpymat
: # set the faces alpha flag?
3040 bpymat
.blend_method
= 'BLEND'
3043 # Apply texture transform?
3045 for l
in bpydata
.uv_layers
.active
.data
:
3049 l
.uv
[:] = (uv_copy
@ texmtx
)[0:2]
3051 # Done transforming the texture
3052 # TODO: check if per-polygon textures are supported here.
3053 elif type(bpydata
) == bpy
.types
.TextCurve
:
3054 # Text with textures??? Not sure...
3056 bpydata
.materials
.append(bpymat
)
3058 # Can transform data or object, better the object so we can instance
3060 # bpymesh.transform(getFinalMatrix(node))
3061 bpyob
= node
.blendObject
= bpy
.data
.objects
.new(vrmlname
, bpydata
)
3062 bpyob
.matrix_world
= getFinalMatrix(node
, None, ancestry
, global_matrix
)
3063 bpycollection
.objects
.link(bpyob
)
3064 bpyob
.select_set(True)
3067 bpyob
["source_line_no"] = geom
.lineno
3070 def importText(geom
, ancestry
):
3071 fmt
= geom
.getChildBySpec('FontStyle')
3072 size
= fmt
.getFieldAsFloat("size", 1, ancestry
) if fmt
else 1.
3073 body
= geom
.getFieldAsString("string", None, ancestry
)
3074 body
= [w
.strip('"') for w
in body
.split('" "')]
3076 bpytext
= bpy
.data
.curves
.new(name
="Text", type='FONT')
3077 bpytext
.offset_y
= - size
3078 bpytext
.body
= "\n".join(body
)
3083 # -----------------------------------------------------------------------------------
3086 geometry_importers
= {
3087 'IndexedFaceSet': importMesh_IndexedFaceSet
,
3088 'IndexedTriangleSet': importMesh_IndexedTriangleSet
,
3089 'IndexedTriangleStripSet': importMesh_IndexedTriangleStripSet
,
3090 'IndexedTriangleFanSet': importMesh_IndexedTriangleFanSet
,
3091 'IndexedLineSet': importMesh_IndexedLineSet
,
3092 'TriangleSet': importMesh_TriangleSet
,
3093 'TriangleStripSet': importMesh_TriangleStripSet
,
3094 'TriangleFanSet': importMesh_TriangleFanSet
,
3095 'LineSet': importMesh_LineSet
,
3096 'ElevationGrid': importMesh_ElevationGrid
,
3097 'Extrusion': importMesh_Extrusion
,
3098 'PointSet': importMesh_PointSet
,
3099 'Sphere': importMesh_Sphere
,
3100 'Box': importMesh_Box
,
3101 'Cylinder': importMesh_Cylinder
,
3102 'Cone': importMesh_Cone
,
3107 def importShape(bpycollection
, node
, ancestry
, global_matrix
):
3108 # Under Shape, we can only have Appearance, MetadataXXX and a geometry node
3109 def isGeometry(spec
):
3110 return spec
!= "Appearance" and not spec
.startswith("Metadata")
3112 bpyob
= node
.getRealNode().blendObject
3114 if bpyob
is not None:
3115 bpyob
= node
.blendData
= node
.blendObject
= bpyob
.copy()
3116 # Could transform data, but better the object so we can instance the data
3117 bpyob
.matrix_world
= getFinalMatrix(node
, None, ancestry
, global_matrix
)
3118 bpycollection
.objects
.link(bpyob
)
3119 bpyob
.select_set(True)
3122 vrmlname
= node
.getDefName()
3126 appr
= node
.getChildBySpec('Appearance')
3127 geom
= node
.getChildBySpecCondition(isGeometry
)
3129 # Oh well, no geometry node in this shape
3135 tex_has_alpha
= False
3137 is_vcol
= (geom
.getChildBySpec(['Color', 'ColorRGBA']) is not None)
3141 tex_has_alpha
) = importShape_LoadAppearance(vrmlname
, appr
,
3145 textx
= appr
.getChildBySpec('TextureTransform')
3147 texmtx
= translateTexTransform(textx
, ancestry
)
3150 geom_spec
= geom
.getSpec()
3152 # ccw is handled by every geometry importer separately; some
3153 # geometries are easier to flip than others
3154 geom_fn
= geometry_importers
.get(geom_spec
)
3155 if geom_fn
is not None:
3156 bpydata
= geom_fn(geom
, ancestry
)
3158 # There are no geometry importers that can legally return
3159 # no object. It's either a bpy object, or an exception
3160 importShape_ProcessObject(
3161 bpycollection
, vrmlname
, bpydata
, geom
, geom_spec
,
3162 node
, bpymat
, tex_has_alpha
, texmtx
,
3163 ancestry
, global_matrix
)
3165 print('\tImportX3D warning: unsupported type "%s"' % geom_spec
)
3168 # -----------------------------------------------------------------------------------
3172 def importLamp_PointLight(node
, ancestry
):
3173 vrmlname
= node
.getDefName()
3175 vrmlname
= 'PointLight'
3177 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3178 # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3179 color
= node
.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry
)
3180 intensity
= node
.getFieldAsFloat('intensity', 1.0, ancestry
) # max is documented to be 1.0 but some files have higher.
3181 location
= node
.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry
)
3182 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3183 radius
= node
.getFieldAsFloat('radius', 100.0, ancestry
)
3185 bpylamp
= bpy
.data
.lights
.new(vrmlname
, 'POINT')
3186 bpylamp
.energy
= intensity
3187 bpylamp
.distance
= radius
3188 bpylamp
.color
= color
3190 mtx
= Matrix
.Translation(Vector(location
))
3195 def importLamp_DirectionalLight(node
, ancestry
):
3196 vrmlname
= node
.getDefName()
3198 vrmlname
= 'DirectLight'
3200 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
3201 color
= node
.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry
)
3202 direction
= node
.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry
)
3203 intensity
= node
.getFieldAsFloat('intensity', 1.0, ancestry
) # max is documented to be 1.0 but some files have higher.
3204 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3206 bpylamp
= bpy
.data
.lights
.new(vrmlname
, 'SUN')
3207 bpylamp
.energy
= intensity
3208 bpylamp
.color
= color
3210 # lamps have their direction as -z, yup
3211 mtx
= Vector(direction
).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3215 # looks like default values for beamWidth and cutOffAngle were swapped in VRML docs.
3218 def importLamp_SpotLight(node
, ancestry
):
3219 vrmlname
= node
.getDefName()
3221 vrmlname
= 'SpotLight'
3223 # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3224 # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3225 beamWidth
= node
.getFieldAsFloat('beamWidth', 1.570796, ancestry
) # max is documented to be 1.0 but some files have higher.
3226 color
= node
.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry
)
3227 cutOffAngle
= node
.getFieldAsFloat('cutOffAngle', 0.785398, ancestry
) * 2.0 # max is documented to be 1.0 but some files have higher.
3228 direction
= node
.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry
)
3229 intensity
= node
.getFieldAsFloat('intensity', 1.0, ancestry
) # max is documented to be 1.0 but some files have higher.
3230 location
= node
.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry
)
3231 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3232 radius
= node
.getFieldAsFloat('radius', 100.0, ancestry
)
3234 bpylamp
= bpy
.data
.lights
.new(vrmlname
, 'SPOT')
3235 bpylamp
.energy
= intensity
3236 bpylamp
.distance
= radius
3237 bpylamp
.color
= color
3238 bpylamp
.spot_size
= cutOffAngle
3239 if beamWidth
> cutOffAngle
:
3240 bpylamp
.spot_blend
= 0.0
3242 if cutOffAngle
== 0.0: # this should never happen!
3243 bpylamp
.spot_blend
= 0.5
3245 bpylamp
.spot_blend
= beamWidth
/ cutOffAngle
3249 # lamps have their direction as -z, y==up
3250 mtx
= Matrix
.Translation(location
) @ Vector(direction
).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3255 def importLamp(bpycollection
, node
, spec
, ancestry
, global_matrix
):
3256 if spec
== 'PointLight':
3257 bpylamp
, mtx
= importLamp_PointLight(node
, ancestry
)
3258 elif spec
== 'DirectionalLight':
3259 bpylamp
, mtx
= importLamp_DirectionalLight(node
, ancestry
)
3260 elif spec
== 'SpotLight':
3261 bpylamp
, mtx
= importLamp_SpotLight(node
, ancestry
)
3263 print("Error, not a lamp")
3266 bpyob
= node
.blendData
= node
.blendObject
= bpy
.data
.objects
.new(bpylamp
.name
, bpylamp
)
3267 bpycollection
.objects
.link(bpyob
)
3268 bpyob
.select_set(True)
3270 bpyob
.matrix_world
= getFinalMatrix(node
, mtx
, ancestry
, global_matrix
)
3273 # -----------------------------------------------------------------------------------
3276 def importViewpoint(bpycollection
, node
, ancestry
, global_matrix
):
3277 name
= node
.getDefName()
3281 fieldOfView
= node
.getFieldAsFloat('fieldOfView', 0.785398, ancestry
) # max is documented to be 1.0 but some files have higher.
3282 # jump = node.getFieldAsBool('jump', True, ancestry)
3283 orientation
= node
.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry
)
3284 position
= node
.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry
)
3285 description
= node
.getFieldAsString('description', '', ancestry
)
3287 bpycam
= bpy
.data
.cameras
.new(name
)
3289 bpycam
.angle
= fieldOfView
3291 mtx
= Matrix
.Translation(Vector(position
)) @ translateRotation(orientation
)
3293 bpyob
= node
.blendData
= node
.blendObject
= bpy
.data
.objects
.new(name
, bpycam
)
3294 bpycollection
.objects
.link(bpyob
)
3295 bpyob
.select_set(True)
3296 bpyob
.matrix_world
= getFinalMatrix(node
, mtx
, ancestry
, global_matrix
)
3299 def importTransform(bpycollection
, node
, ancestry
, global_matrix
):
3300 name
= node
.getDefName()
3304 bpyob
= node
.blendData
= node
.blendObject
= bpy
.data
.objects
.new(name
, None)
3305 bpycollection
.objects
.link(bpyob
)
3306 bpyob
.select_set(True)
3308 bpyob
.matrix_world
= getFinalMatrix(node
, None, ancestry
, global_matrix
)
3310 # so they are not too annoying
3311 bpyob
.empty_display_type
= 'PLAIN_AXES'
3312 bpyob
.empty_display_size
= 0.2
3315 #def importTimeSensor(node):
3316 def action_fcurve_ensure(action
, data_path
, array_index
):
3317 for fcu
in action
.fcurves
:
3318 if fcu
.data_path
== data_path
and fcu
.array_index
== array_index
:
3321 return action
.fcurves
.new(data_path
=data_path
, index
=array_index
)
3324 def translatePositionInterpolator(node
, action
, ancestry
):
3325 key
= node
.getFieldAsArray('key', 0, ancestry
)
3326 keyValue
= node
.getFieldAsArray('keyValue', 3, ancestry
)
3328 loc_x
= action_fcurve_ensure(action
, "location", 0)
3329 loc_y
= action_fcurve_ensure(action
, "location", 1)
3330 loc_z
= action_fcurve_ensure(action
, "location", 2)
3332 for i
, time
in enumerate(key
):
3334 x
, y
, z
= keyValue
[i
]
3338 loc_x
.keyframe_points
.insert(time
, x
)
3339 loc_y
.keyframe_points
.insert(time
, y
)
3340 loc_z
.keyframe_points
.insert(time
, z
)
3342 for fcu
in (loc_x
, loc_y
, loc_z
):
3343 for kf
in fcu
.keyframe_points
:
3344 kf
.interpolation
= 'LINEAR'
3347 def translateOrientationInterpolator(node
, action
, ancestry
):
3348 key
= node
.getFieldAsArray('key', 0, ancestry
)
3349 keyValue
= node
.getFieldAsArray('keyValue', 4, ancestry
)
3351 rot_x
= action_fcurve_ensure(action
, "rotation_euler", 0)
3352 rot_y
= action_fcurve_ensure(action
, "rotation_euler", 1)
3353 rot_z
= action_fcurve_ensure(action
, "rotation_euler", 2)
3355 for i
, time
in enumerate(key
):
3357 x
, y
, z
, w
= keyValue
[i
]
3361 mtx
= translateRotation((x
, y
, z
, w
))
3362 eul
= mtx
.to_euler()
3363 rot_x
.keyframe_points
.insert(time
, eul
.x
)
3364 rot_y
.keyframe_points
.insert(time
, eul
.y
)
3365 rot_z
.keyframe_points
.insert(time
, eul
.z
)
3367 for fcu
in (rot_x
, rot_y
, rot_z
):
3368 for kf
in fcu
.keyframe_points
:
3369 kf
.interpolation
= 'LINEAR'
3373 def translateScalarInterpolator(node
, action
, ancestry
):
3374 key
= node
.getFieldAsArray('key', 0, ancestry
)
3375 keyValue
= node
.getFieldAsArray('keyValue', 4, ancestry
)
3377 sca_x
= action_fcurve_ensure(action
, "scale", 0)
3378 sca_y
= action_fcurve_ensure(action
, "scale", 1)
3379 sca_z
= action_fcurve_ensure(action
, "scale", 2)
3381 for i
, time
in enumerate(key
):
3383 x
, y
, z
= keyValue
[i
]
3387 sca_x
.keyframe_points
.new(time
, x
)
3388 sca_y
.keyframe_points
.new(time
, y
)
3389 sca_z
.keyframe_points
.new(time
, z
)
3392 def translateTimeSensor(node
, action
, ancestry
):
3394 Apply a time sensor to an action, VRML has many combinations of loop/start/stop/cycle times
3395 to give different results, for now just do the basics
3402 time_cu
= action
.addCurve('Time')
3403 time_cu
.interpolation
= Blender
.IpoCurve
.InterpTypes
.LINEAR
3405 cycleInterval
= node
.getFieldAsFloat('cycleInterval', None, ancestry
)
3407 startTime
= node
.getFieldAsFloat('startTime', 0.0, ancestry
)
3408 stopTime
= node
.getFieldAsFloat('stopTime', 250.0, ancestry
)
3410 if cycleInterval
is not None:
3411 stopTime
= startTime
+ cycleInterval
3413 loop
= node
.getFieldAsBool('loop', False, ancestry
)
3415 time_cu
.append((1 + startTime
, 0.0))
3416 time_cu
.append((1 + stopTime
, 1.0 / 10.0)) # annoying, the UI uses /10
3419 time_cu
.extend
= Blender
.IpoCurve
.ExtendTypes
.CYCLIC
# or - EXTRAP, CYCLIC_EXTRAP, CONST,
3422 def importRoute(node
, ancestry
):
3424 Animation route only at the moment
3427 if not hasattr(node
, 'fields'):
3430 routeIpoDict
= node
.getRouteIpoDict()
3434 action
= routeIpoDict
[act_id
]
3436 action
= routeIpoDict
[act_id
] = bpy
.data
.actions
.new('web3d_ipo')
3439 # for getting definitions
3440 defDict
= node
.getDefDict()
3442 Handles routing nodes to each other
3444 ROUTE vpPI.value_changed TO champFly001.set_position
3445 ROUTE vpOI.value_changed TO champFly001.set_orientation
3446 ROUTE vpTs.fraction_changed TO vpPI.set_fraction
3447 ROUTE vpTs.fraction_changed TO vpOI.set_fraction
3448 ROUTE champFly001.bindTime TO vpTs.set_startTime
3451 #from_id, from_type = node.id[1].split('.')
3452 #to_id, to_type = node.id[3].split('.')
3455 set_position_node
= None
3456 set_orientation_node
= None
3459 for field
in node
.fields
:
3460 if field
and field
[0] == 'ROUTE':
3462 from_id
, from_type
= field
[1].split('.')
3463 to_id
, to_type
= field
[3].split('.')
3465 print("Warning, invalid ROUTE", field
)
3468 if from_type
== 'value_changed':
3469 if to_type
== 'set_position':
3470 action
= getIpo(to_id
)
3471 set_data_from_node
= defDict
[from_id
]
3472 translatePositionInterpolator(set_data_from_node
, action
, ancestry
)
3474 if to_type
in {'set_orientation', 'rotation'}:
3475 action
= getIpo(to_id
)
3476 set_data_from_node
= defDict
[from_id
]
3477 translateOrientationInterpolator(set_data_from_node
, action
, ancestry
)
3479 if to_type
== 'set_scale':
3480 action
= getIpo(to_id
)
3481 set_data_from_node
= defDict
[from_id
]
3482 translateScalarInterpolator(set_data_from_node
, action
, ancestry
)
3484 elif from_type
== 'bindTime':
3485 action
= getIpo(from_id
)
3486 time_node
= defDict
[to_id
]
3487 translateTimeSensor(time_node
, action
, ancestry
)
3500 # Used when adding blender primitives
3501 GLOBALS
['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV
3503 bpyscene
= bpycontext
.scene
3504 bpycollection
= bpycontext
.collection
3505 #root_node = vrml_parse('/_Cylinder.wrl')
3506 if filepath
.lower().endswith('.x3d'):
3507 root_node
, msg
= x3d_parse(filepath
)
3509 root_node
, msg
= vrml_parse(filepath
)
3515 if global_matrix
is None:
3516 global_matrix
= Matrix()
3518 # fill with tuples - (node, [parents-parent, parent])
3519 all_nodes
= root_node
.getSerialized([], [])
3521 for node
, ancestry
in all_nodes
:
3522 #if 'castle.wrl' not in node.getFilename():
3525 spec
= node
.getSpec()
3527 prefix = node.getPrefix()
3532 if HELPER_FUNC
and HELPER_FUNC(node
, ancestry
):
3533 # Note, include this function so the VRML/X3D importer can be extended
3534 # by an external script. - gets first pick
3537 importShape(bpycollection
, node
, ancestry
, global_matrix
)
3538 elif spec
in {'PointLight', 'DirectionalLight', 'SpotLight'}:
3539 importLamp(bpycollection
, node
, spec
, ancestry
, global_matrix
)
3540 elif spec
== 'Viewpoint':
3541 importViewpoint(bpycollection
, node
, ancestry
, global_matrix
)
3542 elif spec
== 'Transform':
3543 # Only use transform nodes when we are not importing a flat object hierarchy
3544 if PREF_FLAT
== False:
3545 importTransform(bpycollection
, node
, ancestry
, global_matrix
)
3547 # These are delt with later within importRoute
3548 elif spec=='PositionInterpolator':
3549 action = bpy.data.ipos.new('web3d_ipo', 'Object')
3550 translatePositionInterpolator(node, action)
3553 # After we import all nodes, route events - anim paths
3554 for node
, ancestry
in all_nodes
:
3555 importRoute(node
, ancestry
)
3557 for node
, ancestry
in all_nodes
:
3559 # we know that all nodes referenced from will be in
3560 # routeIpoDict so no need to run node.getDefDict() for every node.
3561 routeIpoDict
= node
.getRouteIpoDict()
3562 defDict
= node
.getDefDict()
3564 for key
, action
in routeIpoDict
.items():
3566 # Assign anim curves
3568 if node
.blendData
is None: # Add an object if we need one for animation
3569 node
.blendData
= node
.blendObject
= bpy
.data
.objects
.new('AnimOb', None) # , name)
3570 bpycollection
.objects
.link(node
.blendObject
)
3571 bpyob
.select_set(True)
3573 if node
.blendData
.animation_data
is None:
3574 node
.blendData
.animation_data_create()
3576 node
.blendData
.animation_data
.action
= action
3579 if PREF_FLAT
is False:
3581 for node
, ancestry
in all_nodes
:
3582 if node
.blendObject
:
3585 # Get the last parent
3589 blendObject
= ancestry
[i
].blendObject
3594 # Parent Slow, - 1 liner but works
3595 # blendObject.makeParent([node.blendObject], 0, 1)
3599 child_dict
[blendObject
].append(node
.blendObject
)
3601 child_dict
[blendObject
] = [node
.blendObject
]
3604 for parent
, children
in child_dict
.items():
3609 bpycontext
.view_layer
.update()
3613 def load_with_profiler(
3621 pro
= cProfile
.Profile()
3622 pro
.runctx("load_web3d(context, filepath, PREF_FLAT=True, "
3623 "PREF_CIRCLE_DIV=16, global_matrix=global_matrix)",
3624 globals(), locals())
3625 st
= pstats
.Stats(pro
)
3626 st
.sort_stats("time")
3628 # st.print_callers(0.1)
3637 # loadWithProfiler(operator, context, filepath, global_matrix)
3638 load_web3d(context
, filepath
,
3641 global_matrix
=global_matrix
,