1 # SPDX-FileCopyrightText: 2011-2023 Blender Foundation
3 # SPDX-License-Identifier: GPL-2.0-or-later
7 # This should work without a blender at all
11 from math
import sin
, cos
, pi
12 from itertools
import chain
17 EPSILON
= 0.0000001 # Very crude.
20 def imageConvertCompat(path
):
23 return path
# assume win32 has quicktime, dont convert
25 if path
.lower().endswith('.gif'):
26 path_to
= path
[:-3] + 'png'
32 # print('\n'+path+'\n'+path_to+'\n')
33 os
.system('convert "%s" "%s"' % (path
, path_to
)) # for now just hope we have image magick
35 if os
.path
.exists(path_to
):
41 # transform are relative
42 # order doesn't matter for loc/size/rot
43 # right handed rotation
44 # angles are in radians
45 # rotation first defines axis then amount in radians
48 # =============================== VRML Specific
50 def vrml_split_fields(value
):
52 key 0.0 otherkey 1,2,3 opt1 opt1 0.0
53 -> [key 0.0], [otherkey 1,2,3], [opt1 opt1 0.0]
56 if k
[0] != '"' and k
[0].isalpha() and k
.upper() not in {'TRUE', 'FALSE'}:
66 field_context_len
= len(field_context
)
67 if (field_context_len
> 2) and (field_context
[-2] in {'DEF', 'USE'}):
68 field_context
.append(v
)
69 elif (not iskey(field_context
[-1])) or ((field_context_len
== 3 and field_context
[1] == 'IS')):
70 # this IS a key but the previous value was not a key, or it was a defined field.
71 field_list
.append(field_context
)
74 # The last item was not a value, multiple keys are needed in some cases.
75 field_context
.append(v
)
77 # Is empty, just add this on
78 field_context
.append(v
)
80 # Add a value to the list
81 field_context
.append(v
)
84 field_list
.append(field_context
)
91 Keep this as a valid vrml file, but format in a way we can predict.
93 # Strip all comments - # not in strings - warning multiline strings are ignored.
95 #l = ' '.join(l.split())
106 # Most cases accounted for! if we have a comment at the end of the line do this...
110 if j
== -1: # simple no strings
114 for i
, c
in enumerate(l
):
124 data
= '\n'.join([strip_comment(l
) for l
in data
.split('\n')]) # remove all whitespace
126 EXTRACT_STRINGS
= True # only needed when strings or filename contains ,[]{} chars :/
130 # We need this so we can detect URL's
131 data
= '\n'.join([' '.join(l
.split()) for l
in data
.split('\n')]) # remove all whitespace
142 i
= data
.find(search
, last_i
)
145 start
= i
+ len(search
) # first char after end of search
146 end
= data
.find('"', start
)
148 item
= data
[start
:end
]
149 string_ls
.append(item
)
150 data
= data
[:start
] + data
[end
:]
151 ok
= True # keep looking
153 last_i
= (end
- len(item
)) + 1
154 # print(last_i, item, '|' + data[last_i] + '|')
156 # done with messy extracting strings part
158 # Bad, dont take strings into account
160 data = data.replace('#', '\n#')
161 data = '\n'.join([ll for l in data.split('\n') for ll in (l.strip(),) if not ll.startswith('#')]) # remove all whitespace
163 data
= data
.replace('{', '\n{\n')
164 data
= data
.replace('}', '\n}\n')
165 data
= data
.replace('[', '\n[\n')
166 data
= data
.replace(']', '\n]\n')
167 data
= data
.replace(',', ' , ') # make sure comma's separate
169 # We need to write one property (field) per line only, otherwise we fail later to detect correctly new nodes.
170 # See T45195 for details.
171 data
= '\n'.join([' '.join(value
) for l
in data
.split('\n') for value
in vrml_split_fields(l
.split())])
174 # add strings back in
176 search
= '"' # fill in these empty strings
182 i
= data
.find(search
+ '"', last_i
)
185 start
= i
+ len(search
) # first char after end of search
186 item
= string_ls
.pop(0)
188 data
= data
[:start
] + item
+ data
[start
:]
190 last_i
= start
+ len(item
) + 1
194 # More annoying obscure cases where USE or DEF are placed on a newline
195 # data = data.replace('\nDEF ', ' DEF ')
196 # data = data.replace('\nUSE ', ' USE ')
198 data
= '\n'.join([' '.join(l
.split()) for l
in data
.split('\n')]) # remove all whitespace
200 # Better to parse the file accounting for multiline arrays
202 data = data.replace(',\n', ' , ') # remove line endings with commas
203 data = data.replace(']', '\n]\n') # very very annoying - but some comma's are at the end of the list, must run this again.
206 return [l
for l
in data
.split('\n') if l
]
210 NODE_REFERENCE
= 3 # USE foobar
216 def getNodePreText(i
, words
):
219 while len(words
) < 5:
224 elif lines[i].startswith('PROTO'):
225 return NODE_PROTO, i+1
227 elif lines
[i
] == '{':
228 # words.append(lines[i]) # no need
230 return NODE_NORMAL
, i
+ 1
231 elif lines
[i
].count('"') % 2 != 0: # odd number of quotes? - part of a string.
235 new_words
= lines
[i
].split()
236 if 'USE' in new_words
:
239 words
.extend(new_words
)
242 # Check for USE node - no {
243 # USE #id - should always be on the same line.
245 # print('LINE', i, words[:words.index('USE')+2])
246 words
[:] = words
[:words
.index('USE') + 2]
247 if lines
[i
] == '{' and lines
[i
+ 1] == '}':
248 # USE sometimes has {} after it anyway
250 return NODE_REFERENCE
, i
252 # print("error value!!!", words)
256 def is_nodeline(i
, words
):
258 if not lines
[i
][0].isalpha():
261 #if lines[i].startswith('field'):
264 # Is this a prototype??
265 if lines
[i
].startswith('PROTO'):
266 words
[:] = lines
[i
].split()
267 return NODE_NORMAL
, i
+ 1 # TODO - assumes the next line is a '[\n', skip that
268 if lines
[i
].startswith('EXTERNPROTO'):
269 words
[:] = lines
[i
].split()
270 return NODE_ARRAY
, i
+ 1 # TODO - assumes the next line is a '[\n', skip that
273 proto_type, new_i = is_protoline(i, words, proto_field_defs)
275 return proto_type, new_i
278 # Simple "var [" type
279 if lines
[i
+ 1] == '[':
280 if lines
[i
].count('"') % 2 == 0:
281 words
[:] = lines
[i
].split()
282 return NODE_ARRAY
, i
+ 2
284 node_type
, new_i
= getNodePreText(i
, words
)
288 print("not node_type", lines
[i
])
291 # Ok, we have a { after some values
292 # Check the values are not fields
293 for i
, val
in enumerate(words
):
294 if i
!= 0 and words
[i
- 1] in {'DEF', 'USE'}:
295 # ignore anything after DEF, it is a ID and can contain any chars.
297 elif val
[0].isalpha() and val
not in {'TRUE', 'FALSE'}:
300 # There is a number in one of the values, therefor we are not a node.
303 #if node_type==NODE_REFERENCE:
304 # print(words, "REF_!!!!!!!")
305 return node_type
, new_i
310 Does this line start with a number?
313 # Works but too slow.
334 if l
.startswith(', '):
337 line_end
= len(l
) - 1
338 line_end_new
= l
.find(' ', line_start
) # comma's always have a space before them
340 if line_end_new
!= -1:
341 line_end
= line_end_new
344 float(l
[line_start
:line_end
]) # works for a float or int
350 class vrmlNode(object):
367 'ROUTE_IPO_NAMESPACE',
372 def __init__(self
, parent
, node_type
, lineno
):
374 self
.node_type
= node_type
376 self
.blendObject
= None
377 self
.blendData
= None
378 self
.x3dNode
= None # for x3d import only
379 self
.parsed
= None # We try to reuse objects in a smart way
381 parent
.children
.append(self
)
385 # This is only set from the root nodes.
386 # Having a filename also denotes a root node
388 self
.proto_node
= None # proto field definition eg: "field SFColor seatColor .6 .6 .1"
390 # Store in the root node because each inline file needs its own root node and its own namespace
391 self
.DEF_NAMESPACE
= None
392 self
.ROUTE_IPO_NAMESPACE
= None
394 self.FIELD_NAMESPACE = None
397 self
.PROTO_NAMESPACE
= None
399 self
.reference
= None
401 if node_type
== NODE_REFERENCE
:
402 # For references, only the parent and ID are needed
403 # the reference its self is assigned on parsing
406 self
.fields
= [] # fields have no order, in some cases rool level values are not unique so dont use a dict
408 self
.proto_field_defs
= [] # proto field definition eg: "field SFColor seatColor .6 .6 .1"
409 self
.proto_fields
= [] # proto field usage "diffuseColor IS seatColor"
411 self
.array_data
= [] # use for arrays of data - should only be for NODE_ARRAY types
413 # Only available from the root node
415 def getFieldDict(self):
416 if self.FIELD_NAMESPACE is not None:
417 return self.FIELD_NAMESPACE
419 return self.parent.getFieldDict()
421 def getProtoDict(self
):
422 if self
.PROTO_NAMESPACE
is not None:
423 return self
.PROTO_NAMESPACE
425 return self
.parent
.getProtoDict()
427 def getDefDict(self
):
428 if self
.DEF_NAMESPACE
is not None:
429 return self
.DEF_NAMESPACE
431 return self
.parent
.getDefDict()
433 def getRouteIpoDict(self
):
434 if self
.ROUTE_IPO_NAMESPACE
is not None:
435 return self
.ROUTE_IPO_NAMESPACE
437 return self
.parent
.getRouteIpoDict()
439 def setRoot(self
, filename
):
440 self
.filename
= filename
441 # self.FIELD_NAMESPACE = {}
442 self
.DEF_NAMESPACE
= {}
443 self
.ROUTE_IPO_NAMESPACE
= {}
444 self
.PROTO_NAMESPACE
= {}
447 if self
.filename
is None:
452 def getFilename(self
):
456 return self
.parent
.getFilename()
460 def getRealNode(self
):
462 return self
.reference
467 self_real
= self
.getRealNode()
469 return self_real
.id[-1] # its possible this node has no spec
473 def findSpecRecursive(self
, spec
):
474 self_real
= self
.getRealNode()
475 if spec
== self_real
.getSpec():
478 for child
in self_real
.children
:
479 if child
.findSpecRecursive(spec
):
489 def getSpecialTypeName(self
, typename
):
490 self_real
= self
.getRealNode()
492 return self_real
.id[list(self_real
.id).index(typename
) + 1]
496 def getDefName(self
):
497 return self
.getSpecialTypeName('DEF')
499 def getProtoName(self
):
500 return self
.getSpecialTypeName('PROTO')
502 def getExternprotoName(self
):
503 return self
.getSpecialTypeName('EXTERNPROTO')
505 def getChildrenBySpec(self
, node_spec
): # spec could be Transform, Shape, Appearance
506 self_real
= self
.getRealNode()
507 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
508 if type(node_spec
) == str:
509 return [child
for child
in self_real
.children
if child
.getSpec() == node_spec
]
511 # Check inside a list of optional types
512 return [child
for child
in self_real
.children
if child
.getSpec() in node_spec
]
514 def getChildrenBySpecCondition(self
, cond
): # spec could be Transform, Shape, Appearance
515 self_real
= self
.getRealNode()
516 # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
517 return [child
for child
in self_real
.children
if cond(child
.getSpec())]
519 def getChildBySpec(self
, node_spec
): # spec could be Transform, Shape, Appearance
520 # Use in cases where there is only ever 1 child of this type
521 ls
= self
.getChildrenBySpec(node_spec
)
527 def getChildBySpecCondition(self
, cond
): # spec could be Transform, Shape, Appearance
528 # Use in cases where there is only ever 1 child of this type
529 ls
= self
.getChildrenBySpecCondition(cond
)
535 def getChildrenByName(self
, node_name
): # type could be geometry, children, appearance
536 self_real
= self
.getRealNode()
537 return [child
for child
in self_real
.children
if child
.id if child
.id[0] == node_name
]
539 def getChildByName(self
, node_name
):
540 self_real
= self
.getRealNode()
541 for child
in self_real
.children
:
542 if child
.id and child
.id[0] == node_name
: # and child.id[-1]==node_spec:
545 def getSerialized(self
, results
, ancestry
):
546 """ Return this node and all its children in a flat list """
547 ancestry
= ancestry
[:] # always use a copy
549 # self_real = self.getRealNode()
551 results
.append((self
, tuple(ancestry
)))
552 ancestry
.append(self
)
553 for child
in self
.getRealNode().children
:
554 if child
not in ancestry
:
555 # We dont want to load proto's, they are only references
556 # We could enforce this elsewhere
558 # Only add this in a very special case
559 # where the parent of this object is not the real parent
560 # - In this case we have added the proto as a child to a node instancing it.
561 # This is a bit arbitrary, but its how Proto's are done with this importer.
562 if child
.getProtoName() is None and child
.getExternprotoName() is None:
563 child
.getSerialized(results
, ancestry
)
567 print('getSerialized() is proto:', child
.getProtoName(), child
.getExternprotoName(), self
.getSpec())
569 self_spec
= self
.getSpec()
571 if child
.getProtoName() == self_spec
or child
.getExternprotoName() == self_spec
:
574 child
.getSerialized(results
, ancestry
)
578 def searchNodeTypeID(self
, node_spec
, results
):
579 self_real
= self
.getRealNode()
580 # print(self.lineno, self.id)
581 if self_real
.id and self_real
.id[-1] == node_spec
: # use last element, could also be only element
582 results
.append(self_real
)
583 for child
in self_real
.children
:
584 child
.searchNodeTypeID(node_spec
, results
)
587 def getFieldName(self
, field
, ancestry
, AS_CHILD
=False, SPLIT_COMMAS
=False):
588 self_real
= self
.getRealNode() # in case we're an instance
590 for f
in self_real
.fields
:
592 if f
and f
[0] == field
:
593 # print('\tfound field', f)
595 if len(f
) >= 3 and f
[1] == 'IS': # eg: 'diffuseColor IS legColor'
598 # print("\n\n\n\n\n\nFOND IS!!!")
599 f_proto_lookup
= None
600 f_proto_child_lookup
= None
605 node
= node
.getRealNode()
607 # proto settings are stored in "self.proto_node"
609 # Get the default value from the proto, this can be overwritten by the proto instance
610 # 'field SFColor legColor .8 .4 .7'
612 for child
in node
.proto_node
.children
:
613 #if child.id and len(child.id) >= 3 and child.id[2]==field_id:
614 if child
.id and ('point' in child
.id or 'points' in child
.id):
615 f_proto_child_lookup
= child
618 for f_def
in node
.proto_node
.proto_field_defs
:
620 if f_def
[0] == 'field' and f_def
[2] == field_id
:
621 f_proto_lookup
= f_def
[3:]
623 # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent.
624 # This is the setting as defined by the instance, including this setting is optional,
625 # and will override the default PROTO value
626 # eg: 'legColor 1 0 0'
628 for child
in node
.children
:
629 if child
.id and child
.id[0] == field_id
:
630 f_proto_child_lookup
= child
632 for f_def
in node
.fields
:
634 if f_def
[0] == field_id
:
636 print("getFieldName(), found proto", f_def
)
637 f_proto_lookup
= f_def
[1:]
640 if f_proto_child_lookup
:
642 print("getFieldName() - AS_CHILD=True, child found")
643 print(f_proto_child_lookup
)
644 return f_proto_child_lookup
646 return f_proto_lookup
653 # print('\tfield not found', field)
655 # See if this is a proto name
657 for child
in self_real
.children
:
658 if child
.id and len(child
.id) == 1 and child
.id[0] == field
:
663 def getFieldAsInt(self
, field
, default
, ancestry
):
664 self_real
= self
.getRealNode() # in case we're an instance
666 f
= self_real
.getFieldName(field
, ancestry
)
670 f
= f
[:f
.index(',')] # strip after the comma
673 print('\t"%s" wrong length for int conversion for field "%s"' % (f
, field
))
679 print('\tvalue "%s" could not be used as an int for field "%s"' % (f
[0], field
))
682 def getFieldAsFloat(self
, field
, default
, ancestry
):
683 self_real
= self
.getRealNode() # in case we're an instance
685 f
= self_real
.getFieldName(field
, ancestry
)
689 f
= f
[:f
.index(',')] # strip after the comma
692 print('\t"%s" wrong length for float conversion for field "%s"' % (f
, field
))
698 print('\tvalue "%s" could not be used as a float for field "%s"' % (f
[0], field
))
701 def getFieldAsFloatTuple(self
, field
, default
, ancestry
):
702 self_real
= self
.getRealNode() # in case we're an instance
704 f
= self_real
.getFieldName(field
, ancestry
)
707 # if ',' in f: f = f[:f.index(',')] # strip after the comma
710 print('"%s" wrong length for float tuple conversion for field "%s"' % (f
, field
))
719 break # quit of first non float, perhaps its a new field name on the same line? - if so we are going to ignore it :/ TODO
725 print('\tvalue "%s" could not be used as a float tuple for field "%s"' % (f
, field
))
728 def getFieldAsBool(self
, field
, default
, ancestry
):
729 self_real
= self
.getRealNode() # in case we're an instance
731 f
= self_real
.getFieldName(field
, ancestry
)
735 f
= f
[:f
.index(',')] # strip after the comma
738 print('\t"%s" wrong length for bool conversion for field "%s"' % (f
, field
))
741 if f
[0].upper() == '"TRUE"' or f
[0].upper() == 'TRUE':
743 elif f
[0].upper() == '"FALSE"' or f
[0].upper() == 'FALSE':
746 print('\t"%s" could not be used as a bool for field "%s"' % (f
[1], field
))
749 def getFieldAsString(self
, field
, default
, ancestry
):
750 self_real
= self
.getRealNode() # in case we're an instance
752 f
= self_real
.getFieldName(field
, ancestry
)
756 print('\t"%s" wrong length for string conversion for field "%s"' % (f
, field
))
760 # String may contain spaces
769 if st
[0] == '"' and st
[-1] == '"':
772 print('\tvalue "%s" could not be used as a string for field "%s"' % (f
[0], field
))
775 def getFieldAsArray(self
, field
, group
, ancestry
):
777 For this parser arrays are children
780 def array_as_number(array_string
):
783 array_data
= [int(val
, 0) for val
in array_string
]
786 array_data
= [float(val
) for val
in array_string
]
788 print('\tWarning, could not parse array data from field')
792 self_real
= self
.getRealNode() # in case we're an instance
794 child_array
= self_real
.getFieldName(field
, ancestry
, True, SPLIT_COMMAS
=True)
796 #if type(child_array)==list: # happens occasionally
797 # array_data = child_array
799 if child_array
is None:
800 # For x3d, should work ok with vrml too
801 # for x3d arrays are fields, vrml they are nodes, annoying but not too bad.
802 data_split
= self
.getFieldName(field
, ancestry
, SPLIT_COMMAS
=True)
806 array_data
= array_as_number(data_split
)
808 elif type(child_array
) == list:
810 array_data
= array_as_number(child_array
)
814 array_data
= child_array
.array_data
816 # print('array_data', array_data)
817 if group
== -1 or len(array_data
) == 0:
820 # We want a flat list
822 for item
in array_data
:
823 if type(item
) == list:
829 flat_array
= array_data
# we are already flat.
835 if type(item
) == list:
838 flat_array
.append(item
)
840 extend_flat(array_data
)
842 # We requested a flat array
849 for item
in flat_array
:
850 sub_array
.append(item
)
851 if len(sub_array
) == group
:
852 new_array
.append(sub_array
)
856 print('\twarning, array was not aligned to requested grouping', group
, 'remaining value', sub_array
)
860 def getFieldAsStringArray(self
, field
, ancestry
):
862 Get a list of strings
864 self_real
= self
.getRealNode() # in case we're an instance
867 for child
in self_real
.children
:
868 if child
.id and len(child
.id) == 1 and child
.id[0] == field
:
874 # each string gets its own list, remove ""'s
876 new_array
= [f
[0][1:-1] for f
in child_array
.fields
]
878 print('\twarning, string array could not be made')
896 level
= self
.getLevel()
898 if self
.node_type
== NODE_REFERENCE
:
900 elif self
.node_type
== NODE_NORMAL
:
906 text
= ind
+ brackets
[0] + '\n'
910 text
+= ind
+ 'ID: ' + str(self
.id) + ' ' + str(level
) + (' lineno %d\n' % self
.lineno
)
912 if self
.node_type
== NODE_REFERENCE
:
913 text
+= ind
+ "(reference node)\n"
917 text
+= ind
+ 'PROTO NODE...\n'
918 text
+= str(self
.proto_node
)
919 text
+= ind
+ 'PROTO NODE_DONE\n'
921 text
+= ind
+ 'FIELDS:' + str(len(self
.fields
)) + '\n'
923 for i
, item
in enumerate(self
.fields
):
924 text
+= ind
+ 'FIELD:\n'
925 text
+= ind
+ str(item
) + '\n'
927 text
+= ind
+ 'PROTO_FIELD_DEFS:' + str(len(self
.proto_field_defs
)) + '\n'
929 for i
, item
in enumerate(self
.proto_field_defs
):
930 text
+= ind
+ 'PROTO_FIELD:\n'
931 text
+= ind
+ str(item
) + '\n'
933 text
+= ind
+ 'ARRAY: ' + str(len(self
.array_data
)) + ' ' + str(self
.array_data
) + '\n'
934 #text += ind + 'ARRAY: ' + str(len(self.array_data)) + '[...] \n'
936 text
+= ind
+ 'CHILDREN: ' + str(len(self
.children
)) + '\n'
937 for i
, child
in enumerate(self
.children
):
938 text
+= ind
+ ('CHILD%d:\n' % i
)
941 text
+= '\n' + ind
+ brackets
[1]
945 def parse(self
, i
, IS_PROTO_DATA
=False):
946 new_i
= self
.__parse
(i
, IS_PROTO_DATA
)
948 # print(self.id, self.getFilename())
950 # Check if this node was an inline or externproto
954 if self
.node_type
== NODE_NORMAL
and self
.getSpec() == 'Inline':
955 ancestry
= [] # Warning! - PROTO's using this wont work at all.
956 url
= self
.getFieldAsString('url', None, ancestry
)
958 url_ls
= [(url
, None)]
961 elif self
.getExternprotoName():
964 for f
in self
.fields
:
970 for f_split
in ff
.split('"'):
972 # "someextern.vrml#SomeID"
975 f_split
, f_split_id
= f_split
.split('#') # there should only be 1 # anyway
977 url_ls
.append((f_split
, f_split_id
))
979 url_ls
.append((f_split
, None))
981 # Was either an Inline or an EXTERNPROTO
986 for url
, extern_key
in url_ls
:
990 urls
.append(bpy
.path
.resolve_ncase(urls
[-1]))
992 urls
.append(os
.path
.join(os
.path
.dirname(self
.getFilename()), url
))
993 urls
.append(bpy
.path
.resolve_ncase(urls
[-1]))
995 urls
.append(os
.path
.join(os
.path
.dirname(self
.getFilename()), os
.path
.basename(url
)))
996 urls
.append(bpy
.path
.resolve_ncase(urls
[-1]))
999 url
= [url
for url
in urls
if os
.path
.exists(url
)][0]
1005 print('\tWarning: Inline URL could not be found:', url
)
1007 if url
== self
.getFilename():
1008 print('\tWarning: can\'t Inline yourself recursively:', url
)
1012 data
= gzipOpen(url
)
1014 print('\tWarning: can\'t open the file:', url
)
1018 # Tricky - inline another VRML
1019 print('\tLoading Inline:"%s"...' % url
)
1021 # Watch it! - backup lines
1022 lines_old
= lines
[:]
1024 lines
[:] = vrmlFormat(data
)
1026 lines
.insert(0, '{')
1027 lines
.insert(0, 'root_node____')
1030 ff = open('/tmp/test.txt', 'w')
1031 ff.writelines([l+'\n' for l in lines])
1034 child
= vrmlNode(self
, NODE_NORMAL
, -1)
1035 child
.setRoot(url
) # initialized dicts
1038 # if self.getExternprotoName():
1039 if self
.getExternprotoName():
1040 if not extern_key
: # if none is specified - use the name
1041 extern_key
= self
.getSpec()
1045 self
.children
.remove(child
)
1048 extern_child
= child
.findSpecRecursive(extern_key
)
1051 self
.children
.append(extern_child
)
1052 extern_child
.parent
= self
1055 print("\tEXTERNPROTO ID found!:", extern_key
)
1057 print("\tEXTERNPROTO ID not found!:", extern_key
)
1059 # Watch it! - restore lines
1060 lines
[:] = lines_old
1064 def __parse(self
, i
, IS_PROTO_DATA
=False):
1066 print('parsing at', i, end="")
1067 print(i, self.id, self.lineno)
1078 node_type
, new_i
= is_nodeline(i
, words
)
1079 if not node_type
: # fail for parsing new node.
1080 print("Failed to parse new node")
1083 if self
.node_type
== NODE_REFERENCE
:
1084 # Only assign the reference and quit
1085 key
= words
[words
.index('USE') + 1]
1086 self
.id = (words
[0],)
1088 self
.reference
= self
.getDefDict()[key
]
1091 self
.id = tuple(words
)
1094 key
= self
.getDefName()
1096 self
.getDefDict()[key
] = self
1098 key
= self
.getProtoName()
1100 key
= self
.getExternprotoName()
1102 proto_dict
= self
.getProtoDict()
1104 proto_dict
[key
] = self
1106 # Parse the proto nodes fields
1107 self
.proto_node
= vrmlNode(self
, NODE_ARRAY
, new_i
)
1108 new_i
= self
.proto_node
.parse(new_i
)
1110 self
.children
.remove(self
.proto_node
)
1112 # print(self.proto_node)
1114 new_i
+= 1 # skip past the {
1116 else: # If we're a proto instance, add the proto node as our child.
1117 spec
= self
.getSpec()
1119 self
.children
.append(proto_dict
[spec
])
1134 return len(lines
) - 1
1137 # print('\tDEBUG:', i, self.node_type, l)
1143 if self
.node_type
!= NODE_NORMAL
: # also ends proto nodes, we may want a type for these too.
1144 print('wrong node ending, expected an } ' + str(i
) + ' ' + str(self
.node_type
))
1147 ### print("returning", i)
1150 if self
.node_type
!= NODE_ARRAY
:
1151 print('wrong node ending, expected a ] ' + str(i
) + ' ' + str(self
.node_type
))
1154 ### print("returning", i)
1157 node_type
, new_i
= is_nodeline(i
, [])
1158 if node_type
: # check text\n{
1159 child
= vrmlNode(self
, node_type
, i
)
1162 elif l
== '[': # some files have these anonymous lists
1163 child
= vrmlNode(self
, NODE_ARRAY
, i
)
1167 l_split
= l
.split(',')
1170 # See if each item is a float?
1172 for num_type
in (int, float):
1174 values
= [num_type(v
) for v
in l_split
]
1180 values
= [[num_type(v
) for v
in segment
.split()] for segment
in l_split
]
1185 if values
is None: # dont parse
1188 # This should not extend over multiple lines however it is possible
1189 # print(self.array_data)
1191 self
.array_data
.extend(values
)
1195 if len(words
) > 2 and words
[1] == 'USE':
1196 vrmlNode(self
, NODE_REFERENCE
, i
)
1199 # print("FIELD", i, l)
1202 ### print('\t\ttag', i)
1204 # print(words, i, l)
1207 # javastrips can exist as values.
1208 quote_count
= l
.count('"')
1209 if quote_count
% 2: # odd number?
1210 # print('MULTILINE')
1214 quote_count
= l
.count('"')
1215 if quote_count
% 2: # odd number?
1216 value
+= '\n' + l
[:l
.rfind('"')]
1221 # use shlex so we get '"a b" "b v"' --> '"a b"', '"b v"'
1222 value_all
= shlex
.split(value
, posix
=False)
1224 for value
in vrml_split_fields(value_all
):
1227 if value
[0] == 'field':
1228 # field SFFloat creaseAngle 4
1229 self
.proto_field_defs
.append(value
)
1231 self
.fields
.append(value
)
1234 # This is a prerequisite for DEF/USE-based material caching
1235 def canHaveReferences(self
):
1236 return self
.node_type
== NODE_NORMAL
and self
.getDefName()
1238 # This is a prerequisite for raw XML-based material caching.
1239 # NOTE - crude, but working implementation for
1240 # material and texture caching, based on __repr__.
1241 # Doesn't do any XML, but is better than nothing.
1243 if "material" in self
.id or "texture" in self
.id:
1244 node
= self
.reference
if self
.node_type
== NODE_REFERENCE
else self
1245 return frozenset(line
.strip() for line
in repr(node
).strip().split("\n"))
1255 data
= gzip
.open(path
, 'r').read()
1261 filehandle
= open(path
, 'r', encoding
='utf-8', errors
='surrogateescape')
1262 data
= filehandle
.read()
1266 traceback
.print_exc()
1268 data
= data
.decode(encoding
='utf-8', errors
='surrogateescape')
1273 def vrml_parse(path
):
1275 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1276 Return root (vrmlNode, '') or (None, 'Error String')
1278 data
= gzipOpen(path
)
1281 return None, 'Failed to open file: ' + path
1284 lines
[:] = vrmlFormat(data
)
1286 lines
.insert(0, '{')
1287 lines
.insert(0, 'dymmy_node')
1289 # Use for testing our parsed output, so we can check on line numbers.
1292 ff = open('/tmp/test.txt', 'w')
1293 ff.writelines([l+'\n' for l in lines])
1298 node_type
, new_i
= is_nodeline(0, [])
1300 return None, 'Error: VRML file has no starting Node'
1302 # Trick to make sure we get all root nodes.
1303 lines
.insert(0, '{')
1304 lines
.insert(0, 'root_node____') # important the name starts with an ascii char
1307 root
= vrmlNode(None, NODE_NORMAL
, -1)
1308 root
.setRoot(path
) # we need to set the root so we have a namespace and know the path in case of inlineing
1313 # This prints a load of text
1320 # ====================== END VRML
1322 # ====================== X3d Support
1324 # Sane as vrml but replace the parser
1325 class x3dNode(vrmlNode
):
1326 def __init__(self
, parent
, node_type
, x3dNode
):
1327 vrmlNode
.__init
__(self
, parent
, node_type
, -1)
1328 self
.x3dNode
= x3dNode
1330 def parse(self
, IS_PROTO_DATA
=False):
1331 # print(self.x3dNode.tagName)
1332 self
.lineno
= self
.x3dNode
.parse_position
[0]
1334 define
= self
.x3dNode
.getAttributeNode('DEF')
1336 self
.getDefDict()[define
.value
] = self
1338 use
= self
.x3dNode
.getAttributeNode('USE')
1341 self
.reference
= self
.getDefDict()[use
.value
]
1342 self
.node_type
= NODE_REFERENCE
1344 print('\tWarning: reference', use
.value
, 'not found')
1345 self
.parent
.children
.remove(self
)
1349 for x3dChildNode
in self
.x3dNode
.childNodes
:
1350 if x3dChildNode
.nodeType
in {x3dChildNode
.TEXT_NODE
, x3dChildNode
.COMMENT_NODE
, x3dChildNode
.CDATA_SECTION_NODE
}:
1353 node_type
= NODE_NORMAL
1354 # print(x3dChildNode, dir(x3dChildNode))
1355 if x3dChildNode
.getAttributeNode('USE'):
1356 node_type
= NODE_REFERENCE
1358 child
= x3dNode(self
, node_type
, x3dChildNode
)
1364 return self
.x3dNode
.tagName
# should match vrml spec
1366 # Used to retain object identifiers from X3D to Blender
1367 def getDefName(self
):
1368 node_id
= self
.x3dNode
.getAttributeNode('DEF')
1370 return node_id
.value
1371 node_id
= self
.x3dNode
.getAttributeNode('USE')
1373 return "USE_" + node_id
.value
1376 # Other funcs operate from vrml, but this means we can wrap XML fields, still use nice utility funcs
1377 # getFieldAsArray getFieldAsBool etc
1378 def getFieldName(self
, field
, ancestry
, AS_CHILD
=False, SPLIT_COMMAS
=False):
1379 # ancestry and AS_CHILD are ignored, only used for VRML now
1381 self_real
= self
.getRealNode() # in case we're an instance
1382 field_xml
= self
.x3dNode
.getAttributeNode(field
)
1384 value
= field_xml
.value
1386 # We may want to edit. for x3d specific stuff
1387 # Sucks a bit to return the field name in the list but vrml excepts this :/
1389 value
= value
.replace(",", " ")
1390 return value
.split()
1394 def canHaveReferences(self
):
1395 return self
.x3dNode
.getAttributeNode('DEF')
1398 return self
.getRealNode().x3dNode
.toxml()
1401 def x3d_parse(path
):
1403 Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
1404 Return root (x3dNode, '') or (None, 'Error String')
1406 import xml
.dom
.minidom
1408 from xml
.sax
import handler
1411 try: doc = xml.dom.minidom.parse(path)
1412 except: return None, 'Could not parse this X3D file, XML error'
1415 # Could add a try/except here, but a console error is more useful.
1416 data
= gzipOpen(path
)
1419 return None, 'Failed to open file: ' + path
1421 # Enable line number reporting in the parser - kinda brittle
1422 def set_content_handler(dom_handler
):
1423 def startElementNS(name
, tagName
, attrs
):
1424 orig_start_cb(name
, tagName
, attrs
)
1425 cur_elem
= dom_handler
.elementStack
[-1]
1426 cur_elem
.parse_position
= (parser
._parser
.CurrentLineNumber
, parser
._parser
.CurrentColumnNumber
)
1428 orig_start_cb
= dom_handler
.startElementNS
1429 dom_handler
.startElementNS
= startElementNS
1430 orig_set_content_handler(dom_handler
)
1432 parser
= xml
.sax
.make_parser()
1433 orig_set_content_handler
= parser
.setContentHandler
1434 parser
.setFeature(handler
.feature_external_ges
, False)
1435 parser
.setFeature(handler
.feature_external_pes
, False)
1436 parser
.setContentHandler
= set_content_handler
1438 doc
= xml
.dom
.minidom
.parseString(data
, parser
)
1441 x3dnode
= doc
.getElementsByTagName('X3D')[0]
1443 return None, 'Not a valid x3d document, cannot import'
1445 bpy
.ops
.object.select_all(action
='DESELECT')
1447 root
= x3dNode(None, NODE_NORMAL
, x3dnode
)
1448 root
.setRoot(path
) # so images and Inline's we load have a relative path
1453 ## f = open('/_Cylinder.wrl', 'r')
1454 # f = open('/fe/wrl/Vrml/EGS/TOUCHSN.WRL', 'r')
1455 # vrml_parse('/fe/wrl/Vrml/EGS/TOUCHSN.WRL')
1456 #vrml_parse('/fe/wrl/Vrml/EGS/SCRIPT.WRL')
1459 files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
1462 for i, f in enumerate(files):
1471 # NO BLENDER CODE ABOVE THIS LINE.
1472 # -----------------------------------------------------------------------------------
1474 from bpy_extras
import image_utils
, node_shader_utils
1475 from mathutils
import Vector
, Matrix
, Quaternion
1477 GLOBALS
= {'CIRCLE_DETAIL': 16}
1480 def translateRotation(rot
):
1482 return Matrix
.Rotation(rot
[3], 4, Vector(rot
[:3]))
1485 def translateScale(sca
):
1486 mat
= Matrix() # 4x4 default
1493 def translateTransform(node
, ancestry
):
1494 cent
= node
.getFieldAsFloatTuple('center', None, ancestry
) # (0.0, 0.0, 0.0)
1495 rot
= node
.getFieldAsFloatTuple('rotation', None, ancestry
) # (0.0, 0.0, 1.0, 0.0)
1496 sca
= node
.getFieldAsFloatTuple('scale', None, ancestry
) # (1.0, 1.0, 1.0)
1497 scaori
= node
.getFieldAsFloatTuple('scaleOrientation', None, ancestry
) # (0.0, 0.0, 1.0, 0.0)
1498 tx
= node
.getFieldAsFloatTuple('translation', None, ancestry
) # (0.0, 0.0, 0.0)
1501 cent_mat
= Matrix
.Translation(cent
)
1502 cent_imat
= cent_mat
.inverted()
1504 cent_mat
= cent_imat
= None
1507 rot_mat
= translateRotation(rot
)
1512 sca_mat
= translateScale(sca
)
1517 scaori_mat
= translateRotation(scaori
)
1518 scaori_imat
= scaori_mat
.inverted()
1520 scaori_mat
= scaori_imat
= None
1523 tx_mat
= Matrix
.Translation(tx
)
1529 mats
= [tx_mat
, cent_mat
, rot_mat
, scaori_mat
, sca_mat
, scaori_imat
, cent_imat
]
1532 new_mat
= new_mat
@ mtx
1537 def translateTexTransform(node
, ancestry
):
1538 cent
= node
.getFieldAsFloatTuple('center', None, ancestry
) # (0.0, 0.0)
1539 rot
= node
.getFieldAsFloat('rotation', None, ancestry
) # 0.0
1540 sca
= node
.getFieldAsFloatTuple('scale', None, ancestry
) # (1.0, 1.0)
1541 tx
= node
.getFieldAsFloatTuple('translation', None, ancestry
) # (0.0, 0.0)
1544 # cent is at a corner by default
1545 cent_mat
= Matrix
.Translation(Vector(cent
).to_3d())
1546 cent_imat
= cent_mat
.inverted()
1548 cent_mat
= cent_imat
= None
1551 rot_mat
= Matrix
.Rotation(rot
, 4, 'Z') # translateRotation(rot)
1556 sca_mat
= translateScale((sca
[0], sca
[1], 0.0))
1561 tx_mat
= Matrix
.Translation(Vector(tx
).to_3d())
1567 # as specified in VRML97 docs
1568 mats
= [cent_imat
, sca_mat
, rot_mat
, cent_mat
, tx_mat
]
1572 new_mat
= new_mat
@ mtx
1576 def getFinalMatrix(node
, mtx
, ancestry
, global_matrix
):
1578 transform_nodes
= [node_tx
for node_tx
in ancestry
if node_tx
.getSpec() == 'Transform']
1579 if node
.getSpec() == 'Transform':
1580 transform_nodes
.append(node
)
1581 transform_nodes
.reverse()
1586 for node_tx
in transform_nodes
:
1587 mat
= translateTransform(node_tx
, ancestry
)
1591 mtx
= global_matrix
@ mtx
1596 # -----------------------------------------------------------------------------------
1597 # Mesh import utilities
1599 # Assumes that the mesh has polygons.
1600 def importMesh_ApplyColors(bpymesh
, geom
, ancestry
):
1601 colors
= geom
.getChildBySpec(['ColorRGBA', 'Color'])
1603 if colors
.getSpec() == 'ColorRGBA':
1604 rgb
= colors
.getFieldAsArray('color', 4, ancestry
)
1606 # Array of arrays; no need to flatten
1607 rgb
= [c
+ [1.0] for c
in colors
.getFieldAsArray('color', 3, ancestry
)]
1608 lcol_layer
= bpymesh
.vertex_colors
.new()
1610 if len(rgb
) == len(bpymesh
.vertices
):
1611 rgb
= [rgb
[l
.vertex_index
] for l
in bpymesh
.loops
]
1612 rgb
= tuple(chain(*rgb
))
1613 elif len(rgb
) == len(bpymesh
.loops
):
1614 rgb
= tuple(chain(*rgb
))
1617 "WARNING not applying vertex colors, non matching numbers of vertices or loops (%d vs %d/%d)" %
1618 (len(rgb
), len(bpymesh
.vertices
), len(bpymesh
.loops
))
1622 lcol_layer
.data
.foreach_set("color", rgb
)
1625 # Assumes that the vertices have not been rearranged compared to the
1626 # source file order # or in the order assumed by the spec (e. g. in
1627 # Elevation, in rows by x).
1628 # Assumes polygons have been set.
1629 def importMesh_ApplyNormals(bpymesh
, geom
, ancestry
):
1630 normals
= geom
.getChildBySpec('Normal')
1634 per_vertex
= geom
.getFieldAsBool('normalPerVertex', True, ancestry
)
1635 vectors
= normals
.getFieldAsArray('vector', 0, ancestry
)
1637 bpymesh
.vertices
.foreach_set("normal", vectors
)
1639 bpymesh
.polygons
.foreach_set("normal", vectors
)
1642 # Reads the standard Coordinate object - common for all mesh elements
1643 # Feeds the vertices in the mesh.
1644 # Rearranging the vertex order is a bad idea - other elements
1645 # in X3D might rely on it, if you need to rearrange, please play with
1646 # vertex indices in the polygons instead.
1648 # Vertex culling that we have in IndexedFaceSet is an unfortunate exception,
1649 # brought forth by a very specific issue.
1650 def importMesh_ReadVertices(bpymesh
, geom
, ancestry
):
1651 # We want points here as a flat array, but the caching logic in
1652 # IndexedFaceSet presumes a 2D one.
1653 # The case for caching is stronger over there.
1654 coord
= geom
.getChildBySpec('Coordinate')
1655 points
= coord
.getFieldAsArray('point', 0, ancestry
)
1656 bpymesh
.vertices
.add(len(points
) // 3)
1657 bpymesh
.vertices
.foreach_set("co", points
)
1660 # Assumes that the order of vertices matches the source file.
1661 # Relies upon texture coordinates in the X3D node; if a coordinate generation
1662 # algorithm for a geometry is in the spec (e. g. for ElevationGrid), it needs
1663 # to be implemented by the geometry handler.
1665 # Texture transform is applied in ProcessObject.
1666 def importMesh_ApplyUVs(bpymesh
, geom
, ancestry
):
1667 tex_coord
= geom
.getChildBySpec('TextureCoordinate')
1671 uvs
= tex_coord
.getFieldAsArray('point', 2, ancestry
)
1675 d
= bpymesh
.uv_layers
.new().data
1676 uvs
= [i
for poly
in bpymesh
.polygons
1677 for vidx
in poly
.vertices
1679 d
.foreach_set('uv', uvs
)
1682 # Common steps for all triangle meshes once the geometry has been set:
1683 # normals, vertex colors, and UVs.
1684 def importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
):
1685 importMesh_ApplyNormals(bpymesh
, geom
, ancestry
)
1686 importMesh_ApplyColors(bpymesh
, geom
, ancestry
)
1687 importMesh_ApplyUVs(bpymesh
, geom
, ancestry
)
1693 # Assumes that the mesh is stored as polygons and loops, and the premade array
1694 # of texture coordinates follows the loop array.
1695 # The loops array must be flat.
1696 def importMesh_ApplyTextureToLoops(bpymesh
, loops
):
1697 d
= bpymesh
.uv_layers
.new().data
1698 d
.foreach_set('uv', loops
)
1702 return r
if ccw
else r
[::-1]
1704 # -----------------------------------------------------------------------------------
1705 # Now specific geometry importers
1708 def importMesh_IndexedTriangleSet(geom
, ancestry
):
1710 # colorPerVertex is always true
1711 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
1713 bpymesh
= bpy
.data
.meshes
.new(name
="XXX")
1714 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1717 index
= geom
.getFieldAsArray('index', 0, ancestry
)
1718 num_polys
= len(index
) // 3
1720 index
= [index
[3 * i
+ j
] for i
in range(num_polys
) for j
in (1, 0, 2)]
1722 bpymesh
.loops
.add(num_polys
* 3)
1723 bpymesh
.polygons
.add(num_polys
)
1724 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1725 bpymesh
.polygons
.foreach_set("vertices", index
)
1727 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1730 def importMesh_IndexedTriangleStripSet(geom
, ancestry
):
1732 # colorPerVertex is always true
1733 cw
= 0 if geom
.getFieldAsBool('ccw', True, ancestry
) else 1
1734 bpymesh
= bpy
.data
.meshes
.new(name
="IndexedTriangleStripSet")
1735 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1738 index
= geom
.getFieldAsArray('index', 0, ancestry
)
1739 while index
[-1] == -1:
1741 ngaps
= sum(1 for i
in index
if i
== -1)
1742 num_polys
= len(index
) - 2 - 3 * ngaps
1743 bpymesh
.loops
.add(num_polys
* 3)
1744 bpymesh
.polygons
.add(num_polys
)
1745 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1751 yield index
[i
+ odd
]
1752 yield index
[i
+ 1 - odd
]
1756 if i
+ 2 >= len(index
):
1758 if index
[i
+ 2] == -1:
1761 bpymesh
.polygons
.foreach_set("vertices", [f
for f
in triangles()])
1762 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1765 def importMesh_IndexedTriangleFanSet(geom
, ancestry
):
1767 # colorPerVertex is always true
1768 cw
= 0 if geom
.getFieldAsBool('ccw', True, ancestry
) else 1
1769 bpymesh
= bpy
.data
.meshes
.new(name
="IndexedTriangleFanSet")
1770 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1773 index
= geom
.getFieldAsArray('index', 0, ancestry
)
1774 while index
[-1] == -1:
1776 ngaps
= sum(1 for i
in index
if i
== -1)
1777 num_polys
= len(index
) - 2 - 3 * ngaps
1778 bpymesh
.loops
.add(num_polys
* 3)
1779 bpymesh
.polygons
.add(num_polys
)
1780 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1787 yield index
[i
+ j
+ cw
]
1788 yield index
[i
+ j
+ 1 - cw
]
1790 if i
+ j
+ 1 >= len(index
):
1792 if index
[i
+ j
+ 1] == -1:
1795 bpymesh
.polygons
.foreach_set("vertices", [f
for f
in triangles()])
1796 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1799 def importMesh_TriangleSet(geom
, ancestry
):
1801 # colorPerVertex is always true
1802 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
1803 bpymesh
= bpy
.data
.meshes
.new(name
="TriangleSet")
1804 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1805 n
= len(bpymesh
.vertices
)
1807 bpymesh
.loops
.add(num_polys
* 3)
1808 bpymesh
.polygons
.add(num_polys
)
1809 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1812 fv
= [i
for i
in range(n
)]
1814 fv
= [3 * i
+ j
for i
in range(n
// 3) for j
in (1, 0, 2)]
1815 bpymesh
.polygons
.foreach_set("vertices", fv
)
1817 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1820 def importMesh_TriangleStripSet(geom
, ancestry
):
1822 # colorPerVertex is always true
1823 cw
= 0 if geom
.getFieldAsBool('ccw', True, ancestry
) else 1
1824 bpymesh
= bpy
.data
.meshes
.new(name
="TriangleStripSet")
1825 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1826 counts
= geom
.getFieldAsArray('stripCount', 0, ancestry
)
1827 num_polys
= sum([n
- 2 for n
in counts
])
1828 bpymesh
.loops
.add(num_polys
* 3)
1829 bpymesh
.polygons
.add(num_polys
)
1830 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1834 for i
in range(0, len(counts
)):
1835 for j
in range(0, counts
[i
] - 2):
1836 yield b
+ j
+ (j
+ cw
) % 2
1837 yield b
+ j
+ 1 - (j
+ cw
) % 2
1840 bpymesh
.polygons
.foreach_set("vertices", [x
for x
in triangles()])
1842 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1845 def importMesh_TriangleFanSet(geom
, ancestry
):
1847 # colorPerVertex is always true
1848 cw
= 0 if geom
.getFieldAsBool('ccw', True, ancestry
) else 1
1849 bpymesh
= bpy
.data
.meshes
.new(name
="TriangleStripSet")
1850 importMesh_ReadVertices(bpymesh
, geom
, ancestry
)
1851 counts
= geom
.getFieldAsArray('fanCount', 0, ancestry
)
1852 num_polys
= sum([n
- 2 for n
in counts
])
1853 bpymesh
.loops
.add(num_polys
* 3)
1854 bpymesh
.polygons
.add(num_polys
)
1855 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 3, 3))
1859 for i
in range(0, len(counts
)):
1860 for j
in range(1, counts
[i
] - 1):
1863 yield b
+ j
+ 1 - cw
1865 bpymesh
.polygons
.foreach_set("vertices", [x
for x
in triangles()])
1866 return importMesh_FinalizeTriangleMesh(bpymesh
, geom
, ancestry
)
1869 def importMesh_IndexedFaceSet(geom
, ancestry
):
1870 # Saw the following structure in X3Ds: the first mesh has a huge set
1871 # of vertices and a reasonably sized index. The rest of the meshes
1872 # reference the Coordinate node from the first one, and have their
1873 # own reasonably sized indices.
1875 # In Blender, to the best of my knowledge, there's no way to reuse
1876 # the vertex set between meshes. So we have culling logic instead -
1877 # for each mesh, only leave vertices that are used for faces.
1879 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
1880 coord
= geom
.getChildBySpec('Coordinate')
1882 points
= coord
.getRealNode().parsed
1883 # We need unflattened coord array here, while
1884 # importMesh_ReadVertices uses flattened. Can't cache both :(
1885 # TODO: resolve that somehow, so that vertex set can be effectively
1886 # reused between different mesh types?
1888 points
= coord
.getFieldAsArray('point', 3, ancestry
)
1889 if coord
.canHaveReferences():
1890 coord
.parsed
= points
1891 index
= geom
.getFieldAsArray('coordIndex', 0, ancestry
)
1893 while index
and index
[-1] == -1:
1896 if len(points
) >= 2 * len(index
): # Need to cull
1898 cull
= {} # Maps old vertex indices to new ones
1899 uncull
= [] # Maps new indices to the old ones
1902 uncull
= cull
= None
1906 # Generate faces. Cull the vertices if necessary,
1910 faces
.append(flip(face
, ccw
))
1913 if cull
is not None:
1915 culled_points
.append(points
[i
])
1924 faces
.append(flip(face
, ccw
)) # The last face
1927 points
= culled_points
1929 bpymesh
= bpy
.data
.meshes
.new(name
="IndexedFaceSet")
1930 bpymesh
.from_pydata(points
, [], faces
)
1931 # No validation here. It throws off the per-face stuff.
1933 # Similar treatment for normal and color indices
1935 def processPerVertexIndex(ind
):
1937 # Deflatten into an array of arrays by face; the latter might
1938 # need to be flipped
1942 verts_by_face
.append(flip(ind
[i
:i
+ len(f
)], ccw
))
1944 return verts_by_face
1946 return [[uncull
[v
] for v
in f
] for f
in faces
]
1948 return faces
# Reuse coordIndex, as per the spec
1951 normals
= geom
.getChildBySpec('Normal')
1953 per_vertex
= geom
.getFieldAsBool('normalPerVertex', True, ancestry
)
1954 vectors
= normals
.getFieldAsArray('vector', 3, ancestry
)
1955 normal_index
= geom
.getFieldAsArray('normalIndex', 0, ancestry
)
1957 co
= [co
for f
in processPerVertexIndex(normal_index
)
1959 for co
in vectors
[v
]]
1960 bpymesh
.vertices
.foreach_set("normal", co
)
1962 co
= [co
for (i
, f
) in enumerate(faces
)
1964 for co
in vectors
[normal_index
[i
] if normal_index
else i
]]
1965 bpymesh
.polygons
.foreach_set("normal", co
)
1967 # Apply vertex/face colors
1968 colors
= geom
.getChildBySpec(['ColorRGBA', 'Color'])
1970 if colors
.getSpec() == 'ColorRGBA':
1971 rgb
= colors
.getFieldAsArray('color', 4, ancestry
)
1973 # Array of arrays; no need to flatten
1974 rgb
= [c
+ [1.0] for c
in colors
.getFieldAsArray('color', 3, ancestry
)]
1976 color_per_vertex
= geom
.getFieldAsBool('colorPerVertex', True, ancestry
)
1977 color_index
= geom
.getFieldAsArray('colorIndex', 0, ancestry
)
1979 d
= bpymesh
.vertex_colors
.new().data
1980 if color_per_vertex
:
1981 cco
= [cco
for f
in processPerVertexIndex(color_index
)
1984 elif color_index
: # Color per face with index
1985 cco
= [cco
for (i
, f
) in enumerate(faces
)
1987 for cco
in rgb
[color_index
[i
]]]
1988 else: # Color per face without index
1989 cco
= [cco
for (i
, f
) in enumerate(faces
)
1992 d
.foreach_set('color', cco
)
1994 # Texture coordinates (UVs)
1995 tex_coord
= geom
.getChildBySpec('TextureCoordinate')
1997 tex_coord_points
= tex_coord
.getFieldAsArray('point', 2, ancestry
)
1998 tex_index
= geom
.getFieldAsArray('texCoordIndex', 0, ancestry
)
1999 tex_index
= processPerVertexIndex(tex_index
)
2000 loops
= [co
for f
in tex_index
2002 for co
in tex_coord_points
[v
]]
2004 x_min
= y_min
= z_min
= math
.inf
2005 x_max
= y_max
= z_max
= -math
.inf
2007 # Unused vertices don't participate in size; X3DOM does so
2009 (x
, y
, z
) = points
[v
]
2010 x_min
= min(x_min
, x
)
2011 x_max
= max(x_max
, x
)
2012 y_min
= min(y_min
, y
)
2013 y_max
= max(y_max
, y
)
2014 z_min
= min(z_min
, z
)
2015 z_max
= max(z_max
, z
)
2017 mins
= (x_min
, y_min
, z_min
)
2018 deltas
= (x_max
- x_min
, y_max
- y_min
, z_max
- z_min
)
2020 axes
.sort(key
=lambda a
: (-deltas
[a
], a
))
2021 # Tuple comparison breaks ties
2022 (s_axis
, t_axis
) = axes
[0:2]
2023 s_min
= mins
[s_axis
]
2025 t_min
= mins
[t_axis
]
2028 # Avoid divide by zero T76303.
2034 def generatePointCoords(pt
):
2035 return (pt
[s_axis
] - s_min
) / ds
, (pt
[t_axis
] - t_min
) / dt
2036 loops
= [co
for f
in faces
2038 for co
in generatePointCoords(points
[v
])]
2040 importMesh_ApplyTextureToLoops(bpymesh
, loops
)
2047 def importMesh_ElevationGrid(geom
, ancestry
):
2048 height
= geom
.getFieldAsArray('height', 0, ancestry
)
2049 x_dim
= geom
.getFieldAsInt('xDimension', 0, ancestry
)
2050 x_spacing
= geom
.getFieldAsFloat('xSpacing', 1, ancestry
)
2051 z_dim
= geom
.getFieldAsInt('zDimension', 0, ancestry
)
2052 z_spacing
= geom
.getFieldAsFloat('zSpacing', 1, ancestry
)
2053 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
2055 # The spec assumes a certain ordering of quads; outer loop by z, inner by x
2056 bpymesh
= bpy
.data
.meshes
.new(name
="ElevationGrid")
2057 bpymesh
.vertices
.add(x_dim
* z_dim
)
2058 co
= [w
for x
in range(x_dim
) for z
in range(z_dim
)
2059 for w
in (x
* x_spacing
, height
[x_dim
* z
+ x
], z
* z_spacing
)]
2060 bpymesh
.vertices
.foreach_set("co", co
)
2062 num_polys
= (x_dim
- 1) * (z_dim
- 1)
2063 bpymesh
.loops
.add(num_polys
* 4)
2064 bpymesh
.polygons
.add(num_polys
)
2065 bpymesh
.polygons
.foreach_set("loop_start", range(0, num_polys
* 4, 4))
2066 # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
2067 # For quad tessfaces, it was important that the final vertex index was not 0
2068 # (Blender treated it as a triangle then).
2069 # So simply reversing the face was not an option.
2070 # With bmesh polygons, this has no importance anymore, but keep existing code for now.
2071 verts
= [i
for x
in range(x_dim
- 1) for z
in range(z_dim
- 1)
2072 for i
in (z
* x_dim
+ x
,
2073 z
* x_dim
+ x
+ 1 if ccw
else (z
+ 1) * x_dim
+ x
,
2074 (z
+ 1) * x_dim
+ x
+ 1,
2075 (z
+ 1) * x_dim
+ x
if ccw
else z
* x_dim
+ x
+ 1)]
2076 bpymesh
.polygons
.foreach_set("vertices", verts
)
2078 importMesh_ApplyNormals(bpymesh
, geom
, ancestry
)
2079 # ApplyColors won't work here; faces are quads, and also per-face
2080 # coloring should be supported
2081 colors
= geom
.getChildBySpec(['ColorRGBA', 'Color'])
2083 if colors
.getSpec() == 'ColorRGBA':
2085 in colors
.getFieldAsArray('color', 4, ancestry
)]
2086 # Array of arrays; no need to flatten
2088 rgb
= colors
.getFieldAsArray('color', 3, ancestry
)
2090 tc
= bpymesh
.vertex_colors
.new().data
2091 if geom
.getFieldAsBool('colorPerVertex', True, ancestry
):
2092 # Per-vertex coloring
2093 # Note the 2/4 flip here
2094 tc
.foreach_set("color",
2095 [c
for x
in range(x_dim
- 1)
2096 for z
in range(z_dim
- 1)
2097 for rgb_idx
in (z
* x_dim
+ x
,
2098 z
* x_dim
+ x
+ 1 if ccw
else (z
+ 1) * x_dim
+ x
,
2099 (z
+ 1) * x_dim
+ x
+ 1,
2100 (z
+ 1) * x_dim
+ x
if ccw
else z
* x_dim
+ x
+ 1)
2101 for c
in rgb
[rgb_idx
]])
2102 else: # Coloring per face
2103 tc
.foreach_set("color",
2104 [c
for x
in range(x_dim
- 1)
2105 for z
in range(z_dim
- 1)
2106 for rgb_idx
in (z
* (x_dim
- 1) + x
,) * 4
2107 for c
in rgb
[rgb_idx
]])
2109 # Textures also need special treatment; it's all quads,
2110 # and there's a builtin algorithm for coordinate generation
2111 tex_coord
= geom
.getChildBySpec('TextureCoordinate')
2113 uvs
= tex_coord
.getFieldAsArray('point', 2, ancestry
)
2115 uvs
= [(i
/ (x_dim
- 1), j
/ (z_dim
- 1))
2116 for i
in range(x_dim
)
2117 for j
in range(z_dim
)]
2119 d
= bpymesh
.uv_layers
.new().data
2120 # Rather than repeat the face/vertex algorithm from above, we read
2121 # the vertex index back from polygon. Might be suboptimal.
2122 uvs
= [i
for poly
in bpymesh
.polygons
2123 for vidx
in poly
.vertices
2125 d
.foreach_set('uv', uv
)
2132 def importMesh_Extrusion(geom
, ancestry
):
2133 # Interestingly, the spec doesn't allow for vertex/face colors in this
2134 # element, nor for normals.
2135 # Since coloring and normals are not supported here, and also large
2136 # polygons for caps might be required, we shall use from_pydata().
2138 ccw
= geom
.getFieldAsBool('ccw', True, ancestry
)
2139 begin_cap
= geom
.getFieldAsBool('beginCap', True, ancestry
)
2140 end_cap
= geom
.getFieldAsBool('endCap', True, ancestry
)
2141 cross
= geom
.getFieldAsArray('crossSection', 2, ancestry
)
2143 cross
= ((1, 1), (1, -1), (-1, -1), (-1, 1), (1, 1))
2144 spine
= geom
.getFieldAsArray('spine', 3, ancestry
)
2146 spine
= ((0, 0, 0), (0, 1, 0))
2147 orient
= geom
.getFieldAsArray('orientation', 4, ancestry
)
2149 orient
= [Quaternion(o
[:3], o
[3]).to_matrix()
2150 if o
[3] else None for o
in orient
]
2151 scale
= geom
.getFieldAsArray('scale', 2, ancestry
)
2153 scale
= [Matrix(((s
[0], 0, 0), (0, 1, 0), (0, 0, s
[1])))
2154 if s
[0] != 1 or s
[1] != 1 else None for s
in scale
]
2156 # Special treatment for the closed spine and cross section.
2157 # Let's save some memory by not creating identical but distinct vertices;
2158 # later we'll introduce conditional logic to link the last vertex with
2159 # the first one where necessary.
2160 cross_closed
= cross
[0] == cross
[-1]
2164 cross
= [Vector((c
[0], 0, c
[1])) for c
in cross
]
2165 ncf
= nc
if cross_closed
else nc
- 1
2166 # Face count along the cross; for closed cross, it's the same as the
2167 # respective vertex count
2169 spine_closed
= spine
[0] == spine
[-1]
2173 spine
= [Vector(s
) for s
in spine
]
2174 nsf
= ns
if spine_closed
else ns
- 1
2176 # This will be used for fallback, where the current spine point joins
2177 # two collinear spine segments. No need to recheck the case of the
2178 # closed spine/last-to-first point juncture; if there's an angle there,
2179 # it would kick in on the first iteration of the main loop by spine.
2180 def findFirstAngleNormal():
2181 for i
in range(1, ns
- 1):
2183 z
= (spine
[i
+ 1] - spt
).cross(spine
[i
- 1] - spt
)
2184 if z
.length
> EPSILON
:
2186 # All the spines are collinear. Fallback to the rotated source
2188 # TODO: handle the situation where the first two spine points match
2189 v
= spine
[1] - spine
[0]
2190 orig_y
= Vector((0, 1, 0))
2191 orig_z
= Vector((0, 0, 1))
2192 if v
.cross(orig_y
).length
>= EPSILON
:
2193 # Spine at angle with global y - rotate the z accordingly
2194 orig_z
.rotate(orig_y
.rotation_difference(v
))
2199 for i
, spt
in enumerate(spine
):
2200 if (i
> 0 and i
< ns
- 1) or spine_closed
:
2201 snext
= spine
[(i
+ 1) % ns
]
2202 sprev
= spine
[(i
- 1 + ns
) % ns
]
2206 try_z
= vnext
.cross(vprev
)
2207 # Might be zero, then all kinds of fallback
2208 if try_z
.length
> EPSILON
:
2209 if z
is not None and try_z
.dot(z
) < 0:
2212 elif not z
: # No z, and no previous z.
2213 # Look ahead, see if there's at least one point where
2214 # spines are not collinear.
2215 z
= findFirstAngleNormal()
2216 elif i
== 0: # And non-crossed
2217 snext
= spine
[i
+ 1]
2219 z
= findFirstAngleNormal()
2220 else: # last point and not crossed
2221 sprev
= spine
[i
- 1]
2223 # If there's more than one point in the spine, z is already set.
2224 # One point in the spline is an error anyway.
2227 m
= Matrix(((x
.x
, y
.x
, z
.x
), (x
.y
, y
.y
, z
.y
), (x
.z
, y
.z
, z
.z
)))
2228 # Columns are the unit vectors for the xz plane for the cross-section
2231 mrot
= orient
[i
] if len(orient
) > 1 else orient
[0]
2233 m
@= mrot
# Not sure about this. Counterexample???
2235 mscale
= scale
[i
] if len(scale
) > 1 else scale
[0]
2238 # First the cross-section 2-vector is scaled,
2239 # then applied to the xz plane unit vectors
2241 verts
.append((spt
+ m
@ cpt
).to_tuple())
2242 # Could've done this with a single 4x4 matrix... Oh well
2244 # The method from_pydata() treats correctly quads with final vertex
2246 # So we just flip the vertices if ccw is off.
2250 faces
.append(flip([x
for x
in range(nc
- 1, -1, -1)], ccw
))
2252 # Order of edges in the face: forward along cross, forward along spine,
2253 # backward along cross, backward along spine, flipped if now ccw.
2254 # This order is assumed later in the texture coordinate assignment;
2255 # please don't change without syncing.
2259 s
* nc
+ (c
+ 1) % nc
,
2260 (s
+ 1) * nc
+ (c
+ 1) % nc
,
2261 (s
+ 1) * nc
+ c
), ccw
) for s
in range(ns
- 1) for c
in range(ncf
)]
2264 # The faces between the last and the first spine points
2270 c
), ccw
) for c
in range(ncf
)]
2273 faces
.append(flip([(ns
- 1) * nc
+ x
for x
in range(0, nc
)], ccw
))
2275 bpymesh
= bpy
.data
.meshes
.new(name
="Extrusion")
2276 bpymesh
.from_pydata(verts
, [], faces
)
2278 # The way we deal with textures in triangular meshes doesn't apply.
2279 # The structure of the loop array goes: cap, side, cap
2280 if begin_cap
or end_cap
: # Need dimensions
2281 x_min
= x_max
= z_min
= z_max
= None
2284 if x_min
is None or x
< x_min
:
2286 if x_max
is None or x
> x_max
:
2288 if z_min
is None or z
< z_min
:
2290 if z_max
is None or z
> z_max
:
2294 cap_scale
= dz
if dz
> dx
else dx
2296 # Takes an index in the cross array, returns scaled
2297 # texture coords for cap texturing purposes
2298 def scaledLoopVertex(i
):
2300 return (c
.x
- x_min
) / cap_scale
, (c
.z
- z_min
) / cap_scale
2302 # X3DOM uses raw cap shape, not a scaled one. So we will, too.
2305 mloops
= bpymesh
.loops
2306 if begin_cap
: # vertex indices match the indices in cross
2307 # Rely on the loops in the mesh; don't repeat the face
2308 # generation logic here
2309 loops
+= [co
for i
in range(nc
)
2310 for co
in scaledLoopVertex(mloops
[i
].vertex_index
)]
2313 # Same order of vertices as in face generation
2314 # We don't rely on the loops in the mesh; instead,
2315 # we repeat the face generation logic.
2316 loops
+= [co
for s
in range(nsf
)
2318 for v
in flip(((c
/ ncf
, s
/ nsf
),
2319 ((c
+ 1) / ncf
, s
/ nsf
),
2320 ((c
+ 1) / ncf
, (s
+ 1) / nsf
),
2321 (c
/ ncf
, (s
+ 1) / nsf
)), ccw
) for co
in v
]
2324 # Base loop index for end cap
2325 lb
= ncf
* nsf
* 4 + (nc
if begin_cap
else 0)
2326 # Rely on the loops here too.
2327 loops
+= [co
for i
in range(nc
) for co
2328 in scaledLoopVertex(mloops
[lb
+ i
].vertex_index
% nc
)]
2329 importMesh_ApplyTextureToLoops(bpymesh
, loops
)
2336 # -----------------------------------------------------------------------------------
2337 # Line and point sets
2340 def importMesh_LineSet(geom
, ancestry
):
2341 # TODO: line display properties are ignored
2342 # Per-vertex color is ignored
2343 coord
= geom
.getChildBySpec('Coordinate')
2344 src_points
= coord
.getFieldAsArray('point', 3, ancestry
)
2345 # Array of 3; Blender needs arrays of 4
2346 bpycurve
= bpy
.data
.curves
.new("LineSet", 'CURVE')
2347 bpycurve
.dimensions
= '3D'
2348 counts
= geom
.getFieldAsArray('vertexCount', 0, ancestry
)
2351 sp
= bpycurve
.splines
.new('POLY')
2352 sp
.points
.add(n
- 1) # points already has one element
2355 for x
in src_points
[b
:b
+ n
]:
2360 sp
.points
.foreach_set('co', [x
for x
in points()])
2365 def importMesh_IndexedLineSet(geom
, ancestry
):
2367 # coord = geom.getChildByName('coord') # 'Coordinate'
2368 coord
= geom
.getChildBySpec('Coordinate') # works for x3d and vrml
2370 points
= coord
.getFieldAsArray('point', 3, ancestry
)
2375 print('\tWarning: IndexedLineSet had no points')
2378 ils_lines
= geom
.getFieldAsArray('coordIndex', 0, ancestry
)
2383 for il
in ils_lines
:
2388 line
.append(int(il
))
2391 # vcolor = geom.getChildByName('color')
2392 # blender doesn't have per vertex color
2394 bpycurve
= bpy
.data
.curves
.new('IndexedCurve', 'CURVE')
2395 bpycurve
.dimensions
= '3D'
2400 # co = points[line[0]] # UNUSED
2401 nu
= bpycurve
.splines
.new('POLY')
2402 nu
.points
.add(len(line
) - 1) # the new nu has 1 point to begin with
2403 for il
, pt
in zip(line
, nu
.points
):
2404 pt
.co
[0:3] = points
[il
]
2409 def importMesh_PointSet(geom
, ancestry
):
2411 coord
= geom
.getChildBySpec('Coordinate') # works for x3d and vrml
2413 points
= coord
.getFieldAsArray('point', 3, ancestry
)
2417 # vcolor = geom.getChildByName('color')
2418 # blender doesn't have per vertex color
2420 bpymesh
= bpy
.data
.meshes
.new("PointSet")
2421 bpymesh
.vertices
.add(len(points
))
2422 bpymesh
.vertices
.foreach_set("co", [a
for v
in points
for a
in v
])
2424 # No need to validate
2429 # -----------------------------------------------------------------------------------
2431 # SA: they used to use bpy.ops for primitive creation. That was
2432 # unbelievably slow on complex scenes. I rewrote to generate meshes
2436 GLOBALS
['CIRCLE_DETAIL'] = 12
2439 def importMesh_Sphere(geom
, ancestry
):
2441 # Extra field 'subdivision="n m"' attribute, specifying how many
2442 # rings and segments to use (X3DOM).
2443 r
= geom
.getFieldAsFloat('radius', 0.5, ancestry
)
2444 subdiv
= geom
.getFieldAsArray('subdivision', 0, ancestry
)
2446 if len(subdiv
) == 1:
2451 nr
= ns
= GLOBALS
['CIRCLE_DETAIL']
2452 # used as both ring count and segment count
2453 lau
= pi
/ nr
# Unit angle of latitude (rings) for the given tessellation
2454 lou
= 2 * pi
/ ns
# Unit angle of longitude (segments)
2456 bpymesh
= bpy
.data
.meshes
.new(name
="Sphere")
2458 bpymesh
.vertices
.add(ns
* (nr
- 1) + 2)
2459 # The non-polar vertices go from x=0, negative z plane counterclockwise -
2460 # to -x, to +z, to +x, back to -z
2461 co
= [0, r
, 0, 0, -r
, 0] # +y and -y poles
2462 co
+= [r
* coe
for ring
in range(1, nr
) for seg
in range(ns
)
2463 for coe
in (-sin(lou
* seg
) * sin(lau
* ring
),
2465 -cos(lou
* seg
) * sin(lau
* ring
))]
2466 bpymesh
.vertices
.foreach_set('co', co
)
2470 num_quad
= num_poly
- num_tri
2471 num_loop
= num_quad
* 4 + num_tri
* 3
2472 tf
= bpymesh
.polygons
2474 bpymesh
.loops
.add(num_loop
)
2475 bpymesh
.polygons
.foreach_set("loop_start",
2476 tuple(range(0, ns
* 3, 3)) +
2477 tuple(range(ns
* 3, num_loop
- ns
* 3, 4)) +
2478 tuple(range(num_loop
- ns
* 3, num_loop
, 3)))
2480 vb
= 2 + (nr
- 2) * ns
# First vertex index for the bottom cap
2481 fb
= (nr
- 1) * ns
# First face index for the bottom cap
2483 # Because of tricky structure, assign texture coordinates along with
2484 # face creation. Can't easily do foreach_set, 'cause caps are triangles and
2487 tex
= bpymesh
.uv_layers
.new().data
2489 # Faces go in order: top cap, sides, bottom cap.
2490 # Sides go by ring then by segment.
2493 # Top cap face vertices go in order: down right up
2494 # (starting from +y pole)
2495 # Bottom cap goes: up left down (starting from -y pole)
2496 for seg
in range(ns
):
2497 tf
[seg
].vertices
= (0, seg
+ 2, (seg
+ 1) % ns
+ 2)
2498 tf
[fb
+ seg
].vertices
= (1, vb
+ (seg
+ 1) % ns
, vb
+ seg
)
2499 for lidx
, uv
in zip(tf
[seg
].loop_indices
,
2500 (((seg
+ 0.5) / ns
, 1),
2501 (seg
/ ns
, 1 - 1 / nr
),
2502 ((seg
+ 1) / ns
, 1 - 1 / nr
))):
2504 for lidx
, uv
in zip(tf
[fb
+ seg
].loop_indices
,
2505 (((seg
+ 0.5) / ns
, 0),
2506 ((seg
+ 1) / ns
, 1 / nr
),
2507 (seg
/ ns
, 1 / nr
))):
2511 # Side face vertices go in order: down right up left
2512 for ring
in range(nr
- 2):
2514 # First vertex index for the top edge of the ring
2516 # First vertex index for the bottom edge of the ring
2517 rfb
= ns
* (ring
+ 1)
2518 # First face index for the ring
2519 for seg
in range(ns
):
2520 nseg
= (seg
+ 1) % ns
2521 tf
[rfb
+ seg
].vertices
= (tvb
+ seg
, bvb
+ seg
, bvb
+ nseg
, tvb
+ nseg
)
2522 for lidx
, uv
in zip(tf
[rfb
+ seg
].loop_indices
,
2523 ((seg
/ ns
, 1 - (ring
+ 1) / nr
),
2524 (seg
/ ns
, 1 - (ring
+ 2) / nr
),
2525 ((seg
+ 1) / ns
, 1 - (ring
+ 2) / nr
),
2526 ((seg
+ 1) / ns
, 1 - (ring
+ 1) / nr
))):
2534 def importMesh_Cylinder(geom
, ancestry
):
2536 # no ccw in this element
2537 # Extra parameter subdivision="n" - how many faces to use
2538 radius
= geom
.getFieldAsFloat('radius', 1.0, ancestry
)
2539 height
= geom
.getFieldAsFloat('height', 2, ancestry
)
2540 bottom
= geom
.getFieldAsBool('bottom', True, ancestry
)
2541 side
= geom
.getFieldAsBool('side', True, ancestry
)
2542 top
= geom
.getFieldAsBool('top', True, ancestry
)
2544 n
= geom
.getFieldAsInt('subdivision', GLOBALS
['CIRCLE_DETAIL'], ancestry
)
2547 yvalues
= (height
/ 2, -height
/ 2)
2550 # The seam is at x=0, z=-r, vertices go ccw -
2551 # to pos x, to neg z, to neg x, back to neg z
2552 verts
= [(-radius
* sin(angle
* i
), y
, -radius
* cos(angle
* i
))
2553 for i
in range(n
) for y
in yvalues
]
2556 # Order of edges in side faces: up, left, down, right.
2557 # Texture coordinate logic depends on it.
2558 faces
+= [(i
* 2 + 3, i
* 2 + 2, i
* 2, i
* 2 + 1)
2559 for i
in range(n
- 1)] + [(1, 0, nn
- 2, nn
- 1)]
2561 faces
+= [[x
for x
in range(0, nn
, 2)]]
2563 faces
+= [[x
for x
in range(nn
- 1, -1, -2)]]
2565 bpymesh
= bpy
.data
.meshes
.new(name
="Cylinder")
2566 bpymesh
.from_pydata(verts
, [], faces
)
2567 # Tried constructing the mesh manually from polygons/loops/edges,
2568 # the difference in performance on Blender 2.74 (Win64) is negligible.
2572 # The structure of the loop array goes: cap, side, cap.
2575 loops
+= [co
for i
in range(n
)
2576 for co
in ((i
+ 1) / n
, 0, (i
+ 1) / n
, 1, i
/ n
, 1, i
/ n
, 0)]
2579 loops
+= [0.5 + co
/ 2 for i
in range(n
)
2580 for co
in (-sin(angle
* i
), cos(angle
* i
))]
2583 loops
+= [0.5 - co
/ 2 for i
in range(n
- 1, -1, -1)
2584 for co
in (sin(angle
* i
), cos(angle
* i
))]
2586 importMesh_ApplyTextureToLoops(bpymesh
, loops
)
2592 def importMesh_Cone(geom
, ancestry
):
2594 # Extra parameter subdivision="n" - how many faces to use
2595 n
= geom
.getFieldAsInt('subdivision', GLOBALS
['CIRCLE_DETAIL'], ancestry
)
2596 radius
= geom
.getFieldAsFloat('bottomRadius', 1.0, ancestry
)
2597 height
= geom
.getFieldAsFloat('height', 2, ancestry
)
2598 bottom
= geom
.getFieldAsBool('bottom', True, ancestry
)
2599 side
= geom
.getFieldAsBool('side', True, ancestry
)
2605 verts
+= [(-radius
* sin(angle
* i
),
2607 -radius
* cos(angle
* i
)) for i
in range(n
)]
2610 # Side face vertices go: up down right
2612 faces
+= [(1 + (i
+ 1) % n
, 0, 1 + i
) for i
in range(n
)]
2614 faces
+= [[i
for i
in range(n
, 0, -1)]]
2616 bpymesh
= bpy
.data
.meshes
.new(name
="Cone")
2617 bpymesh
.from_pydata(verts
, [], faces
)
2622 loops
+= [co
for i
in range(n
)
2623 for co
in ((i
+ 1) / n
, 0, (i
+ 0.5) / n
, 1, i
/ n
, 0)]
2625 loops
+= [0.5 - co
/ 2 for i
in range(n
- 1, -1, -1)
2626 for co
in (sin(angle
* i
), cos(angle
* i
))]
2627 importMesh_ApplyTextureToLoops(bpymesh
, loops
)
2633 def importMesh_Box(geom
, ancestry
):
2635 # No ccw in this element
2636 (dx
, dy
, dz
) = geom
.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry
)
2641 bpymesh
= bpy
.data
.meshes
.new(name
="Box")
2642 bpymesh
.vertices
.add(8)
2644 # xz plane at +y, ccw
2645 co
= (dx
, dy
, dz
, -dx
, dy
, dz
, -dx
, dy
, -dz
, dx
, dy
, -dz
,
2647 dx
, -dy
, dz
, -dx
, -dy
, dz
, -dx
, -dy
, -dz
, dx
, -dy
, -dz
)
2648 bpymesh
.vertices
.foreach_set('co', co
)
2650 bpymesh
.loops
.add(6 * 4)
2651 bpymesh
.polygons
.add(6)
2652 bpymesh
.polygons
.foreach_set('loop_start', range(0, 6 * 4, 4))
2653 bpymesh
.polygons
.foreach_set('loop_total', (4,) * 6)
2654 bpymesh
.polygons
.foreach_set('vertices', (
2663 d
= bpymesh
.uv_layers
.new().data
2664 d
.foreach_set('uv', (
2665 1, 0, 0, 0, 0, 1, 1, 1,
2666 0, 0, 0, 1, 1, 1, 1, 0,
2667 0, 0, 0, 1, 1, 1, 1, 0,
2668 0, 0, 0, 1, 1, 1, 1, 0,
2669 0, 0, 0, 1, 1, 1, 1, 0,
2670 1, 0, 0, 0, 0, 1, 1, 1))
2675 # -----------------------------------------------------------------------------------
2676 # Utilities for importShape
2679 # Textures are processed elsewhere.
2680 def appearance_CreateMaterial(vrmlname
, mat
, ancestry
, is_vcol
):
2681 # Given an X3D material, creates a Blender material.
2682 # texture is applied later, in appearance_Create().
2683 # All values between 0.0 and 1.0, defaults from VRML docs.
2684 mat_name
= mat
.getDefName()
2685 bpymat
= bpy
.data
.materials
.new(mat_name
if mat_name
else vrmlname
)
2686 bpymat_wrap
= node_shader_utils
.PrincipledBSDFWrapper(bpymat
, is_readonly
=False)
2688 # TODO: handle 'ambientIntensity'.
2689 #ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
2691 diff_color
= mat
.getFieldAsFloatTuple('diffuseColor', [0.8, 0.8, 0.8], ancestry
)
2692 bpymat_wrap
.base_color
= diff_color
2694 emit_color
= mat
.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry
)
2695 bpymat_wrap
.emission_color
= emit_color
2697 # NOTE - 'shininess' is being handled as 1 - roughness for now.
2698 shininess
= mat
.getFieldAsFloat('shininess', 0.2, ancestry
)
2699 bpymat_wrap
.roughness
= 1.0 - shininess
2701 #bpymat.specular_hardness = int(1 + (510 * shininess))
2703 # TODO: handle 'specularColor'.
2704 #specular_color = mat.getFieldAsFloatTuple('specularColor',
2705 # [0.0, 0.0, 0.0], ancestry)
2707 alpha
= 1.0 - mat
.getFieldAsFloat('transparency', 0.0, ancestry
)
2708 bpymat_wrap
.alpha
= alpha
2710 bpymat
.blend_method
= "BLEND"
2711 bpymat
.shadow_method
= "HASHED"
2713 # NOTE - leaving this disabled for now
2714 if False and is_vcol
:
2715 node_vertex_color
= bpymat
.node_tree
.nodes
.new("ShaderNodeVertexColor")
2716 node_vertex_color
.location
= (-200, 300)
2718 bpymat
.node_tree
.links
.new(
2719 bpymat_wrap
.node_principled_bsdf
.inputs
["Base Color"],
2720 node_vertex_color
.outputs
["Color"]
2726 def appearance_CreateDefaultMaterial():
2727 # Just applies the X3D defaults. Used for shapes
2728 # without explicit material definition
2729 # (but possibly with a texture).
2731 bpymat
= bpy
.data
.materials
.new("Material")
2732 bpymat_wrap
= node_shader_utils
.PrincipledBSDFWrapper(bpymat
, is_readonly
=False)
2734 bpymat_wrap
.roughness
= 0.8
2735 bpymat_wrap
.base_color
= (0.8, 0.8, 0.8)
2736 #bpymat.mirror_color = (0, 0, 0)
2739 # TODO: handle 'shininess' and 'specularColor'.
2740 #bpymat.specular_hardness = 103
2742 #bpymat.specular_color = (0, 0, 0)
2744 bpymat_wrap
.alpha
= 1.0
2748 def appearance_LoadImageTextureFile(ima_urls
, node
):
2751 dirname
= os
.path
.dirname(node
.getFilename())
2752 bpyima
= image_utils
.load_image(f
, dirname
,
2755 convert_callback
=imageConvertCompat
)
2762 def appearance_LoadImageTexture(imageTexture
, ancestry
, node
):
2763 # TODO: cache loaded textures...
2764 ima_urls
= imageTexture
.getFieldAsString('url', None, ancestry
)
2766 if ima_urls
is None:
2768 ima_urls
= imageTexture
.getFieldAsStringArray('url', ancestry
)
2769 # in some cases we get a list of images.
2773 if '" "' in ima_urls
:
2774 # '"foo" "bar"' --> ['foo', 'bar']
2775 ima_urls
= [w
.strip('"') for w
in ima_urls
.split('" "')]
2777 ima_urls
= [ima_urls
]
2778 # ima_urls is a list or None
2780 if ima_urls
is None:
2781 print("\twarning, image with no URL, this is odd")
2784 bpyima
= appearance_LoadImageTextureFile(ima_urls
, node
)
2787 print("ImportX3D warning: unable to load texture", ima_urls
)
2789 # KNOWN BUG; PNGs with a transparent color are not perceived
2790 # as transparent. Need alpha channel.
2792 if bpyima
.depth
not in {32, 128}:
2793 bpyima
.alpha_mode
= 'NONE'
2797 def appearance_LoadTexture(tex_node
, ancestry
, node
):
2798 # Both USE-based caching and desc-based caching
2799 # Works for bother ImageTextures and PixelTextures
2802 if tex_node
.reference
:
2803 return tex_node
.getRealNode().parsed
2805 # Desc-based caching. It might misfire on multifile models, where the
2806 # same desc means different things in different files.
2807 # TODO: move caches to file level.
2808 desc
= tex_node
.desc()
2809 if desc
and desc
in texture_cache
:
2810 bpyima
= texture_cache
[desc
]
2811 if tex_node
.canHaveReferences():
2812 tex_node
.parsed
= bpyima
2815 # No cached texture, load it.
2816 if tex_node
.getSpec() == 'ImageTexture':
2817 bpyima
= appearance_LoadImageTexture(tex_node
, ancestry
, node
)
2818 else: # PixelTexture
2819 bpyima
= appearance_LoadPixelTexture(tex_node
, ancestry
)
2821 if bpyima
: # Loading can still fail
2822 # Update the desc-based cache
2824 texture_cache
[desc
] = bpyima
2826 # Update the USE-based cache
2827 if tex_node
.canHaveReferences():
2828 tex_node
.parsed
= bpyima
2833 def appearance_ExpandCachedMaterial(bpymat
):
2834 if 0 and bpymat
.texture_slots
[0] is not None:
2835 bpyima
= bpymat
.texture_slots
[0].texture
.image
2836 tex_has_alpha
= bpyima
.alpha_mode
not in {'NONE', 'CHANNEL_PACKED'}
2837 return (bpymat
, bpyima
, tex_has_alpha
)
2839 return (bpymat
, None, False)
2842 def appearance_MakeDescCacheKey(material
, tex_node
):
2843 mat_desc
= material
.desc() if material
else "Default"
2844 tex_desc
= tex_node
.desc() if tex_node
else "Default"
2846 if not((tex_node
and tex_desc
is None) or
2847 (material
and mat_desc
is None)):
2848 # desc not available (in VRML)
2849 # TODO: serialize VRML nodes!!!
2850 return (mat_desc
, tex_desc
)
2851 elif not tex_node
and not material
:
2852 # Even for VRML, we cache the null material
2853 return ("Default", "Default")
2855 return None # Desc-based caching is off
2858 def appearance_Create(vrmlname
, material
, tex_node
, ancestry
, node
, is_vcol
):
2859 # Creates a Blender material object from appearance
2861 tex_has_alpha
= False
2864 bpymat_wrap
= appearance_CreateMaterial(vrmlname
, material
, ancestry
, is_vcol
)
2866 bpymat_wrap
= appearance_CreateDefaultMaterial()
2868 if tex_node
: # Texture caching inside there
2869 bpyima
= appearance_LoadTexture(tex_node
, ancestry
, node
)
2872 repeatS
= tex_node
.getFieldAsBool('repeatS', True, ancestry
)
2873 repeatT
= tex_node
.getFieldAsBool('repeatT', True, ancestry
)
2875 bpymat_wrap
.base_color_texture
.image
= bpyima
2877 # NOTE - not possible to handle x and y tiling individually.
2878 extension
= "REPEAT" if repeatS
or repeatT
else "CLIP"
2879 bpymat_wrap
.base_color_texture
.extension
= extension
2881 tex_has_alpha
= bpyima
.alpha_mode
not in {'NONE', 'CHANNEL_PACKED'}
2883 bpymat_wrap
.alpha_texture
.image
= bpyima
2884 bpymat_wrap
.alpha_texture
.extension
= extension
2886 return (bpymat_wrap
.material
, bpyima
, tex_has_alpha
)
2889 def importShape_LoadAppearance(vrmlname
, appr
, ancestry
, node
, is_vcol
):
2891 Material creation takes nontrivial time on large models.
2892 So we cache them aggressively.
2893 However, in Blender, texture is a part of material, while in
2894 X3D it's not. Blender's notion of material corresponds to
2895 X3D's notion of appearance.
2897 TextureTransform is not a part of material (at least
2898 not in the current implementation).
2900 USE on an Appearance node and USE on a Material node
2901 call for different approaches.
2903 Tools generate repeating, identical material definitions.
2904 Can't rely on USE alone. Repeating texture definitions
2905 are entirely possible, too.
2907 Vertex coloring is not a part of appearance, but Blender
2908 has a material flag for it. However, if a mesh has no vertex
2909 color layer, setting use_vertex_color_paint to true has no
2910 effect. So it's fine to reuse the same material for meshes
2911 with vertex colors and for ones without.
2912 It's probably an abuse of Blender of some level.
2914 So here's the caching structure:
2915 For USE on appearance, we store the material object
2916 in the appearance node.
2918 For USE on texture, we store the image object in the tex node.
2920 For USE on material with no texture, we store the material object
2921 in the material node.
2923 Also, we store textures by description in texture_cache.
2925 Also, we store materials by (material desc, texture desc)
2928 # First, check entire-appearance cache
2929 if appr
.reference
and appr
.getRealNode().parsed
:
2930 return appearance_ExpandCachedMaterial(appr
.getRealNode().parsed
)
2932 tex_node
= appr
.getChildBySpec(('ImageTexture', 'PixelTexture'))
2933 # Other texture nodes are: MovieTexture, MultiTexture
2934 material
= appr
.getChildBySpec('Material')
2935 # We're ignoring FillProperties, LineProperties, and shaders
2937 # Check the USE-based material cache for textureless materials
2938 if material
and material
.reference
and not tex_node
and material
.getRealNode().parsed
:
2939 return appearance_ExpandCachedMaterial(material
.getRealNode().parsed
)
2941 # Now the description-based caching
2942 cache_key
= appearance_MakeDescCacheKey(material
, tex_node
)
2944 if cache_key
and cache_key
in material_cache
:
2945 bpymat
= material_cache
[cache_key
]
2946 # Still want to make the material available for USE-based reuse
2947 if appr
.canHaveReferences():
2948 appr
.parsed
= bpymat
2949 if material
and material
.canHaveReferences() and not tex_node
:
2950 material
.parsed
= bpymat
2951 return appearance_ExpandCachedMaterial(bpymat
)
2953 # Done checking full-material caches. Texture cache may still kick in.
2954 # Create the material already
2955 (bpymat
, bpyima
, tex_has_alpha
) = appearance_Create(vrmlname
, material
, tex_node
, ancestry
, node
, is_vcol
)
2958 if appr
.canHaveReferences():
2959 appr
.parsed
= bpymat
2962 material_cache
[cache_key
] = bpymat
2964 if material
and material
.canHaveReferences() and not tex_node
:
2965 material
.parsed
= bpymat
2967 return (bpymat
, bpyima
, tex_has_alpha
)
2970 def appearance_LoadPixelTexture(pixelTexture
, ancestry
):
2971 image
= pixelTexture
.getFieldAsArray('image', 0, ancestry
)
2972 (w
, h
, plane_count
) = image
[0:3]
2973 has_alpha
= plane_count
in {2, 4}
2975 if len(pixels
) != w
* h
:
2976 print("ImportX3D warning: pixel count in PixelTexture is off")
2978 bpyima
= bpy
.data
.images
.new("PixelTexture", w
, h
, has_alpha
, True)
2980 bpyima
.alpha_mode
= 'NONE'
2982 # Conditional above the loop, for performance
2983 if plane_count
== 3: # RGB
2984 bpyima
.pixels
= [(cco
& 0xff) / 255 for pixel
in pixels
2985 for cco
in (pixel
>> 16, pixel
>> 8, pixel
, 255)]
2986 elif plane_count
== 4: # RGBA
2987 bpyima
.pixels
= [(cco
& 0xff) / 255 for pixel
in pixels
2989 in (pixel
>> 24, pixel
>> 16, pixel
>> 8, pixel
)]
2990 elif plane_count
== 1: # Intensity - does Blender even support that?
2991 bpyima
.pixels
= [(cco
& 0xff) / 255 for pixel
in pixels
2992 for cco
in (pixel
, pixel
, pixel
, 255)]
2993 elif plane_count
== 2: # Intensity/alpha
2994 bpyima
.pixels
= [(cco
& 0xff) / 255 for pixel
in pixels
2996 in (pixel
>> 8, pixel
>> 8, pixel
>> 8, pixel
)]
3001 # Called from importShape to insert a data object (typically a mesh)
3003 def importShape_ProcessObject(
3004 bpycollection
, vrmlname
, bpydata
, geom
, geom_spec
, node
,
3005 bpymat
, has_alpha
, texmtx
, ancestry
,
3008 vrmlname
+= "_" + geom_spec
3009 bpydata
.name
= vrmlname
3011 if type(bpydata
) == bpy
.types
.Mesh
:
3012 # solid, as understood by the spec, is always true in Blender
3013 # solid=false, we don't support it yet.
3014 creaseAngle
= geom
.getFieldAsFloat('creaseAngle', None, ancestry
)
3015 if creaseAngle
is not None:
3016 bpydata
.set_sharp_from_angle(angle
=creaseAngle
)
3018 bpydata
.polygons
.foreach_set("use_smooth", [False] * len(bpydata
.polygons
))
3020 # Only ever 1 material per shape
3022 bpydata
.materials
.append(bpymat
)
3024 if bpydata
.uv_layers
:
3025 if has_alpha
and bpymat
: # set the faces alpha flag?
3026 bpymat
.blend_method
= 'BLEND'
3027 bpymat
.shadow_method
= 'HASHED'
3030 # Apply texture transform?
3032 for l
in bpydata
.uv_layers
.active
.data
:
3036 l
.uv
[:] = (uv_copy
@ texmtx
)[0:2]
3038 # Done transforming the texture
3039 # TODO: check if per-polygon textures are supported here.
3040 elif type(bpydata
) == bpy
.types
.TextCurve
:
3041 # Text with textures??? Not sure...
3043 bpydata
.materials
.append(bpymat
)
3045 # Can transform data or object, better the object so we can instance
3047 # bpymesh.transform(getFinalMatrix(node))
3048 bpyob
= node
.blendObject
= bpy
.data
.objects
.new(vrmlname
, bpydata
)
3049 bpyob
.matrix_world
= getFinalMatrix(node
, None, ancestry
, global_matrix
)
3050 bpycollection
.objects
.link(bpyob
)
3051 bpyob
.select_set(True)
3054 bpyob
["source_line_no"] = geom
.lineno
3057 def importText(geom
, ancestry
):
3058 fmt
= geom
.getChildBySpec('FontStyle')
3059 size
= fmt
.getFieldAsFloat("size", 1, ancestry
) if fmt
else 1.
3060 body
= geom
.getFieldAsString("string", None, ancestry
)
3061 body
= [w
.strip('"') for w
in body
.split('" "')]
3063 bpytext
= bpy
.data
.curves
.new(name
="Text", type='FONT')
3064 bpytext
.offset_y
= - size
3065 bpytext
.body
= "\n".join(body
)
3070 # -----------------------------------------------------------------------------------
3073 geometry_importers
= {
3074 'IndexedFaceSet': importMesh_IndexedFaceSet
,
3075 'IndexedTriangleSet': importMesh_IndexedTriangleSet
,
3076 'IndexedTriangleStripSet': importMesh_IndexedTriangleStripSet
,
3077 'IndexedTriangleFanSet': importMesh_IndexedTriangleFanSet
,
3078 'IndexedLineSet': importMesh_IndexedLineSet
,
3079 'TriangleSet': importMesh_TriangleSet
,
3080 'TriangleStripSet': importMesh_TriangleStripSet
,
3081 'TriangleFanSet': importMesh_TriangleFanSet
,
3082 'LineSet': importMesh_LineSet
,
3083 'ElevationGrid': importMesh_ElevationGrid
,
3084 'Extrusion': importMesh_Extrusion
,
3085 'PointSet': importMesh_PointSet
,
3086 'Sphere': importMesh_Sphere
,
3087 'Box': importMesh_Box
,
3088 'Cylinder': importMesh_Cylinder
,
3089 'Cone': importMesh_Cone
,
3094 def importShape(bpycollection
, node
, ancestry
, global_matrix
):
3095 # Under Shape, we can only have Appearance, MetadataXXX and a geometry node
3096 def isGeometry(spec
):
3097 return spec
!= "Appearance" and not spec
.startswith("Metadata")
3099 bpyob
= node
.getRealNode().blendObject
3101 if bpyob
is not None:
3102 bpyob
= node
.blendData
= node
.blendObject
= bpyob
.copy()
3103 # Could transform data, but better the object so we can instance the data
3104 bpyob
.matrix_world
= getFinalMatrix(node
, None, ancestry
, global_matrix
)
3105 bpycollection
.objects
.link(bpyob
)
3106 bpyob
.select_set(True)
3109 vrmlname
= node
.getDefName()
3113 appr
= node
.getChildBySpec('Appearance')
3114 geom
= node
.getChildBySpecCondition(isGeometry
)
3116 # Oh well, no geometry node in this shape
3122 tex_has_alpha
= False
3124 is_vcol
= (geom
.getChildBySpec(['Color', 'ColorRGBA']) is not None)
3128 tex_has_alpha
) = importShape_LoadAppearance(vrmlname
, appr
,
3132 textx
= appr
.getChildBySpec('TextureTransform')
3134 texmtx
= translateTexTransform(textx
, ancestry
)
3137 geom_spec
= geom
.getSpec()
3139 # ccw is handled by every geometry importer separately; some
3140 # geometries are easier to flip than others
3141 geom_fn
= geometry_importers
.get(geom_spec
)
3142 if geom_fn
is not None:
3143 bpydata
= geom_fn(geom
, ancestry
)
3145 # There are no geometry importers that can legally return
3146 # no object. It's either a bpy object, or an exception
3147 importShape_ProcessObject(
3148 bpycollection
, vrmlname
, bpydata
, geom
, geom_spec
,
3149 node
, bpymat
, tex_has_alpha
, texmtx
,
3150 ancestry
, global_matrix
)
3152 print('\tImportX3D warning: unsupported type "%s"' % geom_spec
)
3155 # -----------------------------------------------------------------------------------
3159 def importLamp_PointLight(node
, ancestry
):
3160 vrmlname
= node
.getDefName()
3162 vrmlname
= 'PointLight'
3164 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3165 # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3166 color
= node
.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry
)
3167 intensity
= node
.getFieldAsFloat('intensity', 1.0, ancestry
) # max is documented to be 1.0 but some files have higher.
3168 location
= node
.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry
)
3169 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3170 radius
= node
.getFieldAsFloat('radius', 100.0, ancestry
)
3172 bpylamp
= bpy
.data
.lights
.new(vrmlname
, 'POINT')
3173 bpylamp
.energy
= intensity
3174 bpylamp
.distance
= radius
3175 bpylamp
.color
= color
3177 mtx
= Matrix
.Translation(Vector(location
))
3182 def importLamp_DirectionalLight(node
, ancestry
):
3183 vrmlname
= node
.getDefName()
3185 vrmlname
= 'DirectLight'
3187 # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
3188 color
= node
.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry
)
3189 direction
= node
.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry
)
3190 intensity
= node
.getFieldAsFloat('intensity', 1.0, ancestry
) # max is documented to be 1.0 but some files have higher.
3191 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3193 bpylamp
= bpy
.data
.lights
.new(vrmlname
, 'SUN')
3194 bpylamp
.energy
= intensity
3195 bpylamp
.color
= color
3197 # lamps have their direction as -z, yup
3198 mtx
= Vector(direction
).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3202 # looks like default values for beamWidth and cutOffAngle were swapped in VRML docs.
3205 def importLamp_SpotLight(node
, ancestry
):
3206 vrmlname
= node
.getDefName()
3208 vrmlname
= 'SpotLight'
3210 # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
3211 # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
3212 beamWidth
= node
.getFieldAsFloat('beamWidth', 1.570796, ancestry
) # max is documented to be 1.0 but some files have higher.
3213 color
= node
.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry
)
3214 cutOffAngle
= node
.getFieldAsFloat('cutOffAngle', 0.785398, ancestry
) * 2.0 # max is documented to be 1.0 but some files have higher.
3215 direction
= node
.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry
)
3216 intensity
= node
.getFieldAsFloat('intensity', 1.0, ancestry
) # max is documented to be 1.0 but some files have higher.
3217 location
= node
.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry
)
3218 # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
3219 radius
= node
.getFieldAsFloat('radius', 100.0, ancestry
)
3221 bpylamp
= bpy
.data
.lights
.new(vrmlname
, 'SPOT')
3222 bpylamp
.energy
= intensity
3223 bpylamp
.distance
= radius
3224 bpylamp
.color
= color
3225 bpylamp
.spot_size
= cutOffAngle
3226 if beamWidth
> cutOffAngle
:
3227 bpylamp
.spot_blend
= 0.0
3229 if cutOffAngle
== 0.0: # this should never happen!
3230 bpylamp
.spot_blend
= 0.5
3232 bpylamp
.spot_blend
= beamWidth
/ cutOffAngle
3236 # lamps have their direction as -z, y==up
3237 mtx
= Matrix
.Translation(location
) @ Vector(direction
).to_track_quat('-Z', 'Y').to_matrix().to_4x4()
3242 def importLamp(bpycollection
, node
, spec
, ancestry
, global_matrix
):
3243 if spec
== 'PointLight':
3244 bpylamp
, mtx
= importLamp_PointLight(node
, ancestry
)
3245 elif spec
== 'DirectionalLight':
3246 bpylamp
, mtx
= importLamp_DirectionalLight(node
, ancestry
)
3247 elif spec
== 'SpotLight':
3248 bpylamp
, mtx
= importLamp_SpotLight(node
, ancestry
)
3250 print("Error, not a lamp")
3253 bpyob
= node
.blendData
= node
.blendObject
= bpy
.data
.objects
.new(bpylamp
.name
, bpylamp
)
3254 bpycollection
.objects
.link(bpyob
)
3255 bpyob
.select_set(True)
3257 bpyob
.matrix_world
= getFinalMatrix(node
, mtx
, ancestry
, global_matrix
)
3260 # -----------------------------------------------------------------------------------
3263 def importViewpoint(bpycollection
, node
, ancestry
, global_matrix
):
3264 name
= node
.getDefName()
3268 fieldOfView
= node
.getFieldAsFloat('fieldOfView', 0.785398, ancestry
) # max is documented to be 1.0 but some files have higher.
3269 # jump = node.getFieldAsBool('jump', True, ancestry)
3270 orientation
= node
.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry
)
3271 position
= node
.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry
)
3272 description
= node
.getFieldAsString('description', '', ancestry
)
3274 bpycam
= bpy
.data
.cameras
.new(name
)
3276 bpycam
.angle
= fieldOfView
3278 mtx
= Matrix
.Translation(Vector(position
)) @ translateRotation(orientation
)
3280 bpyob
= node
.blendData
= node
.blendObject
= bpy
.data
.objects
.new(name
, bpycam
)
3281 bpycollection
.objects
.link(bpyob
)
3282 bpyob
.select_set(True)
3283 bpyob
.matrix_world
= getFinalMatrix(node
, mtx
, ancestry
, global_matrix
)
3286 def importTransform(bpycollection
, node
, ancestry
, global_matrix
):
3287 name
= node
.getDefName()
3291 bpyob
= node
.blendData
= node
.blendObject
= bpy
.data
.objects
.new(name
, None)
3292 bpycollection
.objects
.link(bpyob
)
3293 bpyob
.select_set(True)
3295 bpyob
.matrix_world
= getFinalMatrix(node
, None, ancestry
, global_matrix
)
3297 # so they are not too annoying
3298 bpyob
.empty_display_type
= 'PLAIN_AXES'
3299 bpyob
.empty_display_size
= 0.2
3302 #def importTimeSensor(node):
3303 def action_fcurve_ensure(action
, data_path
, array_index
):
3304 for fcu
in action
.fcurves
:
3305 if fcu
.data_path
== data_path
and fcu
.array_index
== array_index
:
3308 return action
.fcurves
.new(data_path
=data_path
, index
=array_index
)
3311 def translatePositionInterpolator(node
, action
, ancestry
):
3312 key
= node
.getFieldAsArray('key', 0, ancestry
)
3313 keyValue
= node
.getFieldAsArray('keyValue', 3, ancestry
)
3315 loc_x
= action_fcurve_ensure(action
, "location", 0)
3316 loc_y
= action_fcurve_ensure(action
, "location", 1)
3317 loc_z
= action_fcurve_ensure(action
, "location", 2)
3319 for i
, time
in enumerate(key
):
3321 x
, y
, z
= keyValue
[i
]
3325 loc_x
.keyframe_points
.insert(time
, x
)
3326 loc_y
.keyframe_points
.insert(time
, y
)
3327 loc_z
.keyframe_points
.insert(time
, z
)
3329 for fcu
in (loc_x
, loc_y
, loc_z
):
3330 for kf
in fcu
.keyframe_points
:
3331 kf
.interpolation
= 'LINEAR'
3334 def translateOrientationInterpolator(node
, action
, ancestry
):
3335 key
= node
.getFieldAsArray('key', 0, ancestry
)
3336 keyValue
= node
.getFieldAsArray('keyValue', 4, ancestry
)
3338 rot_x
= action_fcurve_ensure(action
, "rotation_euler", 0)
3339 rot_y
= action_fcurve_ensure(action
, "rotation_euler", 1)
3340 rot_z
= action_fcurve_ensure(action
, "rotation_euler", 2)
3342 for i
, time
in enumerate(key
):
3344 x
, y
, z
, w
= keyValue
[i
]
3348 mtx
= translateRotation((x
, y
, z
, w
))
3349 eul
= mtx
.to_euler()
3350 rot_x
.keyframe_points
.insert(time
, eul
.x
)
3351 rot_y
.keyframe_points
.insert(time
, eul
.y
)
3352 rot_z
.keyframe_points
.insert(time
, eul
.z
)
3354 for fcu
in (rot_x
, rot_y
, rot_z
):
3355 for kf
in fcu
.keyframe_points
:
3356 kf
.interpolation
= 'LINEAR'
3360 def translateScalarInterpolator(node
, action
, ancestry
):
3361 key
= node
.getFieldAsArray('key', 0, ancestry
)
3362 keyValue
= node
.getFieldAsArray('keyValue', 4, ancestry
)
3364 sca_x
= action_fcurve_ensure(action
, "scale", 0)
3365 sca_y
= action_fcurve_ensure(action
, "scale", 1)
3366 sca_z
= action_fcurve_ensure(action
, "scale", 2)
3368 for i
, time
in enumerate(key
):
3370 x
, y
, z
= keyValue
[i
]
3374 sca_x
.keyframe_points
.new(time
, x
)
3375 sca_y
.keyframe_points
.new(time
, y
)
3376 sca_z
.keyframe_points
.new(time
, z
)
3379 def translateTimeSensor(node
, action
, ancestry
):
3381 Apply a time sensor to an action, VRML has many combinations of loop/start/stop/cycle times
3382 to give different results, for now just do the basics
3389 time_cu
= action
.addCurve('Time')
3390 time_cu
.interpolation
= Blender
.IpoCurve
.InterpTypes
.LINEAR
3392 cycleInterval
= node
.getFieldAsFloat('cycleInterval', None, ancestry
)
3394 startTime
= node
.getFieldAsFloat('startTime', 0.0, ancestry
)
3395 stopTime
= node
.getFieldAsFloat('stopTime', 250.0, ancestry
)
3397 if cycleInterval
is not None:
3398 stopTime
= startTime
+ cycleInterval
3400 loop
= node
.getFieldAsBool('loop', False, ancestry
)
3402 time_cu
.append((1 + startTime
, 0.0))
3403 time_cu
.append((1 + stopTime
, 1.0 / 10.0)) # annoying, the UI uses /10
3406 time_cu
.extend
= Blender
.IpoCurve
.ExtendTypes
.CYCLIC
# or - EXTRAP, CYCLIC_EXTRAP, CONST,
3409 def importRoute(node
, ancestry
):
3411 Animation route only at the moment
3414 if not hasattr(node
, 'fields'):
3417 routeIpoDict
= node
.getRouteIpoDict()
3421 action
= routeIpoDict
[act_id
]
3423 action
= routeIpoDict
[act_id
] = bpy
.data
.actions
.new('web3d_ipo')
3426 # for getting definitions
3427 defDict
= node
.getDefDict()
3429 Handles routing nodes to each other
3431 ROUTE vpPI.value_changed TO champFly001.set_position
3432 ROUTE vpOI.value_changed TO champFly001.set_orientation
3433 ROUTE vpTs.fraction_changed TO vpPI.set_fraction
3434 ROUTE vpTs.fraction_changed TO vpOI.set_fraction
3435 ROUTE champFly001.bindTime TO vpTs.set_startTime
3438 #from_id, from_type = node.id[1].split('.')
3439 #to_id, to_type = node.id[3].split('.')
3442 set_position_node
= None
3443 set_orientation_node
= None
3446 for field
in node
.fields
:
3447 if field
and field
[0] == 'ROUTE':
3449 from_id
, from_type
= field
[1].split('.')
3450 to_id
, to_type
= field
[3].split('.')
3452 print("Warning, invalid ROUTE", field
)
3455 if from_type
== 'value_changed':
3456 if to_type
== 'set_position':
3457 action
= getIpo(to_id
)
3458 set_data_from_node
= defDict
[from_id
]
3459 translatePositionInterpolator(set_data_from_node
, action
, ancestry
)
3461 if to_type
in {'set_orientation', 'rotation'}:
3462 action
= getIpo(to_id
)
3463 set_data_from_node
= defDict
[from_id
]
3464 translateOrientationInterpolator(set_data_from_node
, action
, ancestry
)
3466 if to_type
== 'set_scale':
3467 action
= getIpo(to_id
)
3468 set_data_from_node
= defDict
[from_id
]
3469 translateScalarInterpolator(set_data_from_node
, action
, ancestry
)
3471 elif from_type
== 'bindTime':
3472 action
= getIpo(from_id
)
3473 time_node
= defDict
[to_id
]
3474 translateTimeSensor(time_node
, action
, ancestry
)
3487 # Used when adding blender primitives
3488 GLOBALS
['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV
3490 # NOTE - reset material cache
3491 # (otherwise we might get "StructRNA of type Material has been removed" errors)
3492 global material_cache
3495 bpyscene
= bpycontext
.scene
3496 bpycollection
= bpycontext
.collection
3497 #root_node = vrml_parse('/_Cylinder.wrl')
3498 if filepath
.lower().endswith('.x3d'):
3499 root_node
, msg
= x3d_parse(filepath
)
3501 root_node
, msg
= vrml_parse(filepath
)
3507 if global_matrix
is None:
3508 global_matrix
= Matrix()
3510 # fill with tuples - (node, [parents-parent, parent])
3511 all_nodes
= root_node
.getSerialized([], [])
3513 for node
, ancestry
in all_nodes
:
3514 #if 'castle.wrl' not in node.getFilename():
3517 spec
= node
.getSpec()
3519 prefix = node.getPrefix()
3524 if HELPER_FUNC
and HELPER_FUNC(node
, ancestry
):
3525 # Note, include this function so the VRML/X3D importer can be extended
3526 # by an external script. - gets first pick
3529 importShape(bpycollection
, node
, ancestry
, global_matrix
)
3530 elif spec
in {'PointLight', 'DirectionalLight', 'SpotLight'}:
3531 importLamp(bpycollection
, node
, spec
, ancestry
, global_matrix
)
3532 elif spec
== 'Viewpoint':
3533 importViewpoint(bpycollection
, node
, ancestry
, global_matrix
)
3534 elif spec
== 'Transform':
3535 # Only use transform nodes when we are not importing a flat object hierarchy
3536 if PREF_FLAT
== False:
3537 importTransform(bpycollection
, node
, ancestry
, global_matrix
)
3539 # These are delt with later within importRoute
3540 elif spec=='PositionInterpolator':
3541 action = bpy.data.ipos.new('web3d_ipo', 'Object')
3542 translatePositionInterpolator(node, action)
3545 # After we import all nodes, route events - anim paths
3546 for node
, ancestry
in all_nodes
:
3547 importRoute(node
, ancestry
)
3549 for node
, ancestry
in all_nodes
:
3551 # we know that all nodes referenced from will be in
3552 # routeIpoDict so no need to run node.getDefDict() for every node.
3553 routeIpoDict
= node
.getRouteIpoDict()
3554 defDict
= node
.getDefDict()
3556 for key
, action
in routeIpoDict
.items():
3558 # Assign anim curves
3560 if node
.blendData
is None: # Add an object if we need one for animation
3561 bpyob
= node
.blendData
= node
.blendObject
= bpy
.data
.objects
.new('AnimOb', None) # , name)
3562 bpycollection
.objects
.link(bpyob
)
3563 bpyob
.select_set(True)
3565 if node
.blendData
.animation_data
is None:
3566 node
.blendData
.animation_data_create()
3568 node
.blendData
.animation_data
.action
= action
3571 if PREF_FLAT
is False:
3573 for node
, ancestry
in all_nodes
:
3574 if node
.blendObject
:
3577 # Get the last parent
3581 blendObject
= ancestry
[i
].blendObject
3586 # Parent Slow, - 1 liner but works
3587 # blendObject.makeParent([node.blendObject], 0, 1)
3591 child_dict
[blendObject
].append(node
.blendObject
)
3593 child_dict
[blendObject
] = [node
.blendObject
]
3596 for parent
, children
in child_dict
.items():
3601 bpycontext
.view_layer
.update()
3605 def load_with_profiler(
3613 pro
= cProfile
.Profile()
3614 pro
.runctx("load_web3d(context, filepath, PREF_FLAT=True, "
3615 "PREF_CIRCLE_DIV=16, global_matrix=global_matrix)",
3616 globals(), locals())
3617 st
= pstats
.Stats(pro
)
3618 st
.sort_stats("time")
3620 # st.print_callers(0.1)
3629 # loadWithProfiler(operator, context, filepath, global_matrix)
3630 load_web3d(context
, filepath
,
3633 global_matrix
=global_matrix
,